status-go/protocol/messenger.go

5821 lines
165 KiB
Go
Raw Normal View History

package protocol
2019-07-17 22:25:42 +00:00
import (
2020-12-10 10:12:51 +00:00
"bytes"
2019-07-17 22:25:42 +00:00
"context"
"crypto/ecdsa"
2020-07-14 14:07:19 +00:00
"database/sql"
"encoding/json"
"fmt"
"math/rand"
"os"
"strconv"
"strings"
"sync"
2019-07-17 22:25:42 +00:00
"time"
"github.com/golang/protobuf/proto"
"github.com/google/uuid"
"github.com/libp2p/go-libp2p/core/peer"
2022-08-24 15:14:09 +00:00
"github.com/pkg/errors"
"go.uber.org/zap"
"golang.org/x/time/rate"
datasyncnode "github.com/status-im/mvds/node"
2021-11-26 12:30:35 +00:00
gethcommon "github.com/ethereum/go-ethereum/common"
2022-08-24 15:14:09 +00:00
"github.com/ethereum/go-ethereum/common/hexutil"
2022-01-12 16:02:01 +00:00
"github.com/ethereum/go-ethereum/p2p"
"github.com/status-im/status-go/account"
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
"github.com/status-im/status-go/appmetrics"
gocommon "github.com/status-im/status-go/common"
utils "github.com/status-im/status-go/common"
2021-05-14 10:55:42 +00:00
"github.com/status-im/status-go/connection"
"github.com/status-im/status-go/contracts"
"github.com/status-im/status-go/deprecation"
"github.com/status-im/status-go/eth-node/crypto"
"github.com/status-im/status-go/eth-node/types"
2023-02-02 17:59:48 +00:00
"github.com/status-im/status-go/images"
multiaccountscommon "github.com/status-im/status-go/multiaccounts/common"
2020-11-25 00:34:32 +00:00
"github.com/status-im/status-go/multiaccounts"
"github.com/status-im/status-go/multiaccounts/accounts"
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
"github.com/status-im/status-go/multiaccounts/settings"
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
"github.com/status-im/status-go/protocol/anonmetrics"
2020-07-22 07:41:40 +00:00
"github.com/status-im/status-go/protocol/common"
"github.com/status-im/status-go/protocol/common/shard"
"github.com/status-im/status-go/protocol/communities"
"github.com/status-im/status-go/protocol/encryption"
"github.com/status-im/status-go/protocol/encryption/multidevice"
"github.com/status-im/status-go/protocol/encryption/sharedsecret"
2021-01-11 10:32:51 +00:00
"github.com/status-im/status-go/protocol/ens"
"github.com/status-im/status-go/protocol/identity/alias"
"github.com/status-im/status-go/protocol/identity/identicon"
"github.com/status-im/status-go/protocol/peersyncing"
"github.com/status-im/status-go/protocol/protobuf"
2020-07-22 07:41:40 +00:00
"github.com/status-im/status-go/protocol/pushnotificationclient"
"github.com/status-im/status-go/protocol/pushnotificationserver"
"github.com/status-im/status-go/protocol/requests"
"github.com/status-im/status-go/protocol/sqlite"
"github.com/status-im/status-go/protocol/storenodes"
"github.com/status-im/status-go/protocol/transport"
v1protocol "github.com/status-im/status-go/protocol/v1"
"github.com/status-im/status-go/protocol/verification"
2022-02-23 14:34:16 +00:00
"github.com/status-im/status-go/server"
"github.com/status-im/status-go/services/browsers"
ensservice "github.com/status-im/status-go/services/ens"
2022-01-12 16:02:01 +00:00
"github.com/status-im/status-go/services/ext/mailservers"
localnotifications "github.com/status-im/status-go/services/local-notifications"
2022-01-12 16:02:01 +00:00
mailserversDB "github.com/status-im/status-go/services/mailservers"
"github.com/status-im/status-go/services/wallet"
"github.com/status-im/status-go/services/wallet/community"
"github.com/status-im/status-go/services/wallet/token"
2023-04-16 15:06:00 +00:00
"github.com/status-im/status-go/signal"
"github.com/status-im/status-go/telemetry"
2019-07-17 22:25:42 +00:00
)
const (
PubKeyStringLength = 132
transactionSentTxt = "Transaction sent"
publicChat ChatContext = "public-chat"
privateChat ChatContext = "private-chat"
)
// errors
var (
ErrChatNotFoundError = errors.New("Chat not found")
)
const communityAdvertiseIntervalSecond int64 = 24 * 60 * 60
// messageCacheIntervalMs is how long we should keep processed messages in the cache, in ms
var messageCacheIntervalMs uint64 = 1000 * 60 * 60 * 48
2019-07-17 22:25:42 +00:00
// Messenger is a entity managing chats and messages.
// It acts as a bridge between the application and encryption
// layers.
// It needs to expose an interface to manage installations
// because installations are managed by the user.
// Similarly, it needs to expose an interface to manage
// mailservers because they can also be managed by the user.
type Messenger struct {
node types.Node
server *p2p.Server
peerStore *mailservers.PeerStore
config *config
identity *ecdsa.PrivateKey
persistence *sqlitePersistence
transport *transport.Transport
encryptor *encryption.Protocol
sender *common.MessageSender
ensVerifier *ens.Verifier
anonMetricsClient *anonmetrics.Client
anonMetricsServer *anonmetrics.Server
pushNotificationClient *pushnotificationclient.Client
pushNotificationServer *pushnotificationserver.Server
communitiesManager *communities.Manager
archiveManager communities.ArchiveService
communitiesKeyDistributor communities.KeyDistributor
accountsManager account.Manager
mentionsManager *MentionManager
storeNodeRequestsManager *StoreNodeRequestManager
logger *zap.Logger
outputCSV bool
csvFile *os.File
verifyTransactionClient EthClient
featureFlags common.FeatureFlags
shutdownTasks []func() error
shouldPublishContactCode bool
systemMessagesTranslations *systemMessageTranslationsMap
allChats *chatMap
selfContact *Contact
selfContactSubscriptions []chan *SelfContactChangeEvent
allContacts *contactMap
allInstallations *installationMap
modifiedInstallations *stringBoolMap
installationID string
communityStorenodes *storenodes.CommunityStorenodes
database *sql.DB
multiAccounts *multiaccounts.Database
settings *accounts.Database
account *multiaccounts.Account
mailserversDatabase *mailserversDB.Database
browserDatabase *browsers.Database
httpServer *server.MediaServer
started bool
quit chan struct{}
ctx context.Context
cancel context.CancelFunc
shutdownWaitGroup sync.WaitGroup
2022-09-29 11:50:23 +00:00
importingCommunities map[string]bool
importingChannels map[string]bool
importRateLimiter *rate.Limiter
importDelayer struct {
wait chan struct{}
once sync.Once
}
2022-09-29 11:50:23 +00:00
connectionState connection.State
telemetryClient *telemetry.Client
contractMaker *contracts.ContractMaker
verificationDatabase *verification.Persistence
savedAddressesManager *wallet.SavedAddressesManager
walletAPI *wallet.API
2021-03-29 15:41:30 +00:00
// TODO(samyoul) Determine if/how the remaining usage of this mutex can be removed
mutex sync.Mutex
handleMessagesMutex sync.Mutex
handleImportMessagesMutex sync.Mutex
// flag to disable checking #hasPairedDevices
localPairing bool
// flag to enable backedup messages processing, false by default
processBackedupMessages bool
communityTokensService communities.CommunityTokensServiceInterface
// used to track dispatched messages
dispatchMessageTestCallback func(common.RawMessage)
// used to track unhandled messages
unhandledMessagesTracker func(*v1protocol.StatusMessage, error)
// enables control over chat messages iteration
retrievedMessagesIteratorFactory func(map[transport.Filter][]*types.Message) MessagesIterator
peersyncing *peersyncing.PeerSyncing
peersyncingOffers map[string]uint64
peersyncingRequests map[string]uint64
mvdsStatusChangeEvent chan datasyncnode.PeerStatusChangeEvent
2019-07-17 22:25:42 +00:00
}
2020-12-15 14:43:41 +00:00
type EnvelopeEventsInterceptor struct {
EnvelopeEventsHandler transport.EnvelopeEventsHandler
Messenger *Messenger
}
type LatestContactRequest struct {
MessageID string
ContactRequestState common.ContactRequestState
ContactID string
}
func (m *Messenger) GetOwnPrimaryName() (string, error) {
ensName, err := m.settings.ENSName()
if err != nil {
return ensName, nil
}
return m.settings.DisplayName()
}
func (m *Messenger) ResolvePrimaryName(mentionID string) (string, error) {
if mentionID == m.myHexIdentity() {
return m.GetOwnPrimaryName()
}
contact, ok := m.allContacts.Load(mentionID)
if !ok {
var err error
contact, err = buildContactFromPkString(mentionID)
if err != nil {
return mentionID, err
}
}
return contact.PrimaryName(), nil
}
2020-12-15 14:43:41 +00:00
// EnvelopeSent triggered when envelope delivered at least to 1 peer.
func (interceptor EnvelopeEventsInterceptor) EnvelopeSent(identifiers [][]byte) {
if interceptor.Messenger != nil {
signalIDs := make([][]byte, 0, len(identifiers))
2020-12-15 14:43:41 +00:00
for _, identifierBytes := range identifiers {
messageID := types.EncodeHex(identifierBytes)
err := interceptor.Messenger.processSentMessage(messageID)
if err != nil {
interceptor.Messenger.logger.Info("messenger failed to process sent messages", zap.Error(err))
}
2020-12-15 14:43:41 +00:00
message, err := interceptor.Messenger.MessageByID(messageID)
if err != nil {
interceptor.Messenger.logger.Error("failed to query message outgoing status", zap.Error(err))
continue
}
if message.OutgoingStatus == common.OutgoingStatusDelivered {
// We don't want to send the signal if the message was already marked as delivered
continue
}
signalIDs = append(signalIDs, identifierBytes)
2020-12-15 14:43:41 +00:00
}
interceptor.EnvelopeEventsHandler.EnvelopeSent(signalIDs)
} else {
// NOTE(rasom): In case if interceptor.Messenger is not nil and
// some error occurred on processing sent message we don't want
// to send envelop.sent signal to the client, thus `else` cause
// is necessary.
interceptor.EnvelopeEventsHandler.EnvelopeSent(identifiers)
2020-12-15 14:43:41 +00:00
}
}
// EnvelopeExpired triggered when envelope is expired but wasn't delivered to any peer.
func (interceptor EnvelopeEventsInterceptor) EnvelopeExpired(identifiers [][]byte, err error) {
//we don't track expired events in Messenger, so just redirect to handler
interceptor.EnvelopeEventsHandler.EnvelopeExpired(identifiers, err)
}
// MailServerRequestCompleted triggered when the mailserver sends a message to notify that the request has been completed
func (interceptor EnvelopeEventsInterceptor) MailServerRequestCompleted(requestID types.Hash, lastEnvelopeHash types.Hash, cursor []byte, err error) {
//we don't track mailserver requests in Messenger, so just redirect to handler
interceptor.EnvelopeEventsHandler.MailServerRequestCompleted(requestID, lastEnvelopeHash, cursor, err)
}
// MailServerRequestExpired triggered when the mailserver request expires
func (interceptor EnvelopeEventsInterceptor) MailServerRequestExpired(hash types.Hash) {
//we don't track mailserver requests in Messenger, so just redirect to handler
interceptor.EnvelopeEventsHandler.MailServerRequestExpired(hash)
}
2019-07-17 22:25:42 +00:00
func NewMessenger(
nodeName string,
2019-07-17 22:25:42 +00:00
identity *ecdsa.PrivateKey,
node types.Node,
2019-07-17 22:25:42 +00:00
installationID string,
2022-01-12 16:02:01 +00:00
peerStore *mailservers.PeerStore,
version string,
2019-07-17 22:25:42 +00:00
opts ...Option,
) (*Messenger, error) {
var messenger *Messenger
c := messengerDefaultConfig()
2019-07-17 22:25:42 +00:00
for _, opt := range opts {
if err := opt(&c); err != nil {
return nil, err
}
}
logger := c.logger
if c.logger == nil {
var err error
if logger, err = zap.NewDevelopment(); err != nil {
return nil, errors.Wrap(err, "failed to create a logger")
}
}
if c.systemMessagesTranslations == nil {
c.systemMessagesTranslations = defaultSystemMessagesTranslations
}
2019-07-30 06:14:13 +00:00
// Configure the database.
if c.appDb == nil {
2019-09-26 09:26:33 +00:00
return nil, errors.New("database instance or database path needs to be provided")
}
database := c.appDb
2019-09-26 09:26:33 +00:00
// Apply any post database creation changes to the database
for _, opt := range c.afterDbCreatedHooks {
if err := opt(&c); err != nil {
return nil, err
}
}
2019-07-30 06:14:13 +00:00
// Apply migrations for all components.
err := sqlite.Migrate(database)
2019-07-30 06:14:13 +00:00
if err != nil {
return nil, errors.Wrap(err, "failed to apply migrations")
2019-07-17 22:25:42 +00:00
}
2019-07-30 06:14:13 +00:00
// Initialize transport layer.
var transp *transport.Transport
var peerId peer.ID
if waku, err := node.GetWaku(nil); err == nil && waku != nil {
transp, err = transport.NewTransport(
waku,
identity,
database,
"waku_keys",
nil,
c.envelopesMonitorConfig,
logger,
)
if err != nil {
return nil, errors.Wrap(err, "failed to create Transport")
}
} else {
logger.Info("failed to find Waku service; trying WakuV2", zap.Error(err))
wakuV2, err := node.GetWakuV2(nil)
if err != nil || wakuV2 == nil {
return nil, errors.Wrap(err, "failed to find Whisper and Waku V1/V2 services")
}
peerId = wakuV2.PeerID()
transp, err = transport.NewTransport(
wakuV2,
identity,
database,
"wakuv2_keys",
nil,
c.envelopesMonitorConfig,
logger,
)
if err != nil {
return nil, errors.Wrap(err, "failed to create Transport")
}
2019-07-17 22:25:42 +00:00
}
2019-07-30 06:14:13 +00:00
// Initialize encryption layer.
encryptionProtocol := encryption.New(
database,
2019-07-17 22:25:42 +00:00
installationID,
logger,
)
sender, err := common.NewMessageSender(
2019-09-02 09:29:06 +00:00
identity,
database,
2019-09-02 09:29:06 +00:00
encryptionProtocol,
transp,
2019-08-29 06:33:46 +00:00
logger,
2019-09-02 09:29:06 +00:00
c.featureFlags,
)
if err != nil {
return nil, errors.Wrap(err, "failed to create messageSender")
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
// Initialise anon metrics client
var anonMetricsClient *anonmetrics.Client
if c.anonMetricsClientConfig != nil &&
c.anonMetricsClientConfig.ShouldSend &&
c.anonMetricsClientConfig.Active == anonmetrics.ActiveClientPhrase {
anonMetricsClient = anonmetrics.NewClient(sender)
anonMetricsClient.Config = c.anonMetricsClientConfig
anonMetricsClient.Identity = identity
anonMetricsClient.DB = appmetrics.NewDB(database)
anonMetricsClient.Logger = logger
}
// Initialise anon metrics server
var anonMetricsServer *anonmetrics.Server
if c.anonMetricsServerConfig != nil &&
c.anonMetricsServerConfig.Enabled &&
c.anonMetricsServerConfig.Active == anonmetrics.ActiveServerPhrase {
server, err := anonmetrics.NewServer(c.anonMetricsServerConfig.PostgresURI)
if err != nil {
return nil, errors.Wrap(err, "failed to create anonmetrics.Server")
}
anonMetricsServer = server
anonMetricsServer.Config = c.anonMetricsServerConfig
anonMetricsServer.Logger = logger
}
2020-07-22 07:41:40 +00:00
// Initialize push notification server
var pushNotificationServer *pushnotificationserver.Server
if c.pushNotificationServerConfig != nil && c.pushNotificationServerConfig.Enabled {
2020-07-14 14:07:19 +00:00
c.pushNotificationServerConfig.Identity = identity
2020-07-22 07:41:40 +00:00
pushNotificationServerPersistence := pushnotificationserver.NewSQLitePersistence(database)
pushNotificationServer = pushnotificationserver.New(c.pushNotificationServerConfig, pushNotificationServerPersistence, sender)
}
2020-07-22 07:41:40 +00:00
// Initialize push notification client
pushNotificationClientPersistence := pushnotificationclient.NewPersistence(database)
2020-07-15 12:25:01 +00:00
pushNotificationClientConfig := c.pushNotificationClientConfig
if pushNotificationClientConfig == nil {
2020-07-22 07:41:40 +00:00
pushNotificationClientConfig = &pushnotificationclient.Config{}
}
2020-07-15 12:25:01 +00:00
2022-03-28 10:10:40 +00:00
sqlitePersistence := newSQLitePersistence(database)
2020-07-22 07:41:40 +00:00
// Overriding until we handle different identities
2020-07-15 12:25:01 +00:00
pushNotificationClientConfig.Identity = identity
pushNotificationClientConfig.Logger = logger
pushNotificationClientConfig.InstallationID = installationID
pushNotificationClient := pushnotificationclient.New(pushNotificationClientPersistence, pushNotificationClientConfig, sender, sqlitePersistence)
2021-01-11 10:32:51 +00:00
ensVerifier := ens.New(node, logger, transp, database, c.verifyENSURL, c.verifyENSContractAddress)
feat: add verified wallet accounts to community requests This commit extends the `CommunityRequestToJoin` with `RevealedAddresses` which represent wallet addresses and signatures provided by the sender, to proof a community owner ownership of those wallet addresses. **Note: This only works with keystore files maanged by status-go** At high level, the follwing happens: 1. User instructs Status to send a request to join to a community. By adding a password hash to the instruction, Status will try to unlock the users keystore and verify each wallet account. 2. For every verified wallet account, a signature is created for the following payload, using each wallet's private key ``` keccak256(chatkey + communityID + requestToJoinID) ``` A map of walletAddress->signature is then attached to the community request to join, which will be sent to the community owner 3. The owner node receives the request, and if the community requires users to hold tokens to become a member, it will check and verify whether the given wallet addresses are indeed owned by the sender. If any signature provided by the request cannot be recovered, the request is immediately declined by the owner. 4. The verified addresses are then added to the owner node's database such that, once the request should be accepted, the addresses can be used to check on chain whether they own the necessary funds to fulfill the community's permissions The checking of required funds is **not** part of this commit. It will be added in a follow-up commit.
2023-03-17 09:19:40 +00:00
managerOptions := []communities.ManagerOption{
communities.WithAccountManager(c.accountsManager),
feat: add verified wallet accounts to community requests This commit extends the `CommunityRequestToJoin` with `RevealedAddresses` which represent wallet addresses and signatures provided by the sender, to proof a community owner ownership of those wallet addresses. **Note: This only works with keystore files maanged by status-go** At high level, the follwing happens: 1. User instructs Status to send a request to join to a community. By adding a password hash to the instruction, Status will try to unlock the users keystore and verify each wallet account. 2. For every verified wallet account, a signature is created for the following payload, using each wallet's private key ``` keccak256(chatkey + communityID + requestToJoinID) ``` A map of walletAddress->signature is then attached to the community request to join, which will be sent to the community owner 3. The owner node receives the request, and if the community requires users to hold tokens to become a member, it will check and verify whether the given wallet addresses are indeed owned by the sender. If any signature provided by the request cannot be recovered, the request is immediately declined by the owner. 4. The verified addresses are then added to the owner node's database such that, once the request should be accepted, the addresses can be used to check on chain whether they own the necessary funds to fulfill the community's permissions The checking of required funds is **not** part of this commit. It will be added in a follow-up commit.
2023-03-17 09:19:40 +00:00
}
var walletAPI *wallet.API
if c.walletService != nil {
walletAPI = wallet.NewAPI(c.walletService)
managerOptions = append(managerOptions, communities.WithCollectiblesManager(walletAPI))
} else if c.collectiblesManager != nil {
managerOptions = append(managerOptions, communities.WithCollectiblesManager(c.collectiblesManager))
}
if c.tokenManager != nil {
managerOptions = append(managerOptions, communities.WithTokenManager(c.tokenManager))
} else if c.rpcClient != nil {
tokenManager := token.NewTokenManager(c.walletDb, c.rpcClient, community.NewManager(database, c.httpServer, nil), c.rpcClient.NetworkManager, database, c.httpServer, nil, nil, nil, token.NewPersistence(c.walletDb))
managerOptions = append(managerOptions, communities.WithTokenManager(communities.NewDefaultTokenManager(tokenManager, c.rpcClient.NetworkManager)))
}
feat: add verified wallet accounts to community requests This commit extends the `CommunityRequestToJoin` with `RevealedAddresses` which represent wallet addresses and signatures provided by the sender, to proof a community owner ownership of those wallet addresses. **Note: This only works with keystore files maanged by status-go** At high level, the follwing happens: 1. User instructs Status to send a request to join to a community. By adding a password hash to the instruction, Status will try to unlock the users keystore and verify each wallet account. 2. For every verified wallet account, a signature is created for the following payload, using each wallet's private key ``` keccak256(chatkey + communityID + requestToJoinID) ``` A map of walletAddress->signature is then attached to the community request to join, which will be sent to the community owner 3. The owner node receives the request, and if the community requires users to hold tokens to become a member, it will check and verify whether the given wallet addresses are indeed owned by the sender. If any signature provided by the request cannot be recovered, the request is immediately declined by the owner. 4. The verified addresses are then added to the owner node's database such that, once the request should be accepted, the addresses can be used to check on chain whether they own the necessary funds to fulfill the community's permissions The checking of required funds is **not** part of this commit. It will be added in a follow-up commit.
2023-03-17 09:19:40 +00:00
if c.walletConfig != nil {
managerOptions = append(managerOptions, communities.WithWalletConfig(c.walletConfig))
}
if c.communityTokensService != nil {
managerOptions = append(managerOptions, communities.WithCommunityTokensService(c.communityTokensService))
}
managerOptions = append(managerOptions, c.communityManagerOptions...)
communitiesKeyDistributor := &CommunitiesKeyDistributorImpl{
sender: sender,
encryptor: encryptionProtocol,
}
communitiesManager, err := communities.NewManager(
identity,
installationID,
database,
encryptionProtocol,
logger,
ensVerifier,
c.communityTokensService,
transp,
transp,
communitiesKeyDistributor,
c.httpServer,
managerOptions...,
)
if err != nil {
return nil, err
}
amc := &communities.ArchiveManagerConfig{
TorrentConfig: c.torrentConfig,
Logger: logger,
Persistence: communitiesManager.GetPersistence(),
Transport: transp,
Identity: identity,
Encryptor: encryptionProtocol,
Publisher: communitiesManager,
}
// Depending on the OS go will choose whether to use the "communities/manager_archive_nop.go" or
// "communities/manager_archive.go" version of this function based on the build instructions for those files.
// See those file for more details.
archiveManager := communities.NewArchiveManager(amc)
if err != nil {
return nil, err
}
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
settings, err := accounts.NewDB(database)
if err != nil {
return nil, err
}
savedAddressesManager := wallet.NewSavedAddressesManager(c.walletDb)
selfContact, err := buildSelfContact(identity, settings, c.multiAccount, c.account)
if err != nil {
return nil, fmt.Errorf("failed to build contact of ourself: %w", err)
}
ctx, cancel := context.WithCancel(context.Background())
var telemetryClient *telemetry.Client
if c.telemetryServerURL != "" {
options := []telemetry.TelemetryClientOption{
telemetry.WithPeerID(peerId.String()),
}
telemetryClient = telemetry.NewClient(logger, c.telemetryServerURL, c.account.KeyUID, nodeName, version, options...)
if c.wakuService != nil {
c.wakuService.SetStatusTelemetryClient(telemetryClient)
}
telemetryClient.Start(ctx)
}
2019-07-17 22:25:42 +00:00
messenger = &Messenger{
config: &c,
node: node,
identity: identity,
persistence: sqlitePersistence,
transport: transp,
encryptor: encryptionProtocol,
sender: sender,
anonMetricsClient: anonMetricsClient,
anonMetricsServer: anonMetricsServer,
telemetryClient: telemetryClient,
communityTokensService: c.communityTokensService,
pushNotificationClient: pushNotificationClient,
pushNotificationServer: pushNotificationServer,
communitiesManager: communitiesManager,
communitiesKeyDistributor: communitiesKeyDistributor,
archiveManager: archiveManager,
accountsManager: c.accountsManager,
2021-01-11 10:32:51 +00:00
ensVerifier: ensVerifier,
2019-07-17 22:25:42 +00:00
featureFlags: c.featureFlags,
systemMessagesTranslations: c.systemMessagesTranslations,
2021-03-29 15:41:30 +00:00
allChats: new(chatMap),
selfContact: selfContact,
allContacts: &contactMap{
logger: logger,
me: selfContact,
},
allInstallations: new(installationMap),
installationID: installationID,
modifiedInstallations: new(stringBoolMap),
verifyTransactionClient: c.verifyTransactionClient,
database: database,
multiAccounts: c.multiAccount,
settings: settings,
peersyncing: peersyncing.New(peersyncing.Config{Database: database, Timesource: transp}),
peersyncingOffers: make(map[string]uint64),
peersyncingRequests: make(map[string]uint64),
peerStore: peerStore,
mvdsStatusChangeEvent: make(chan datasyncnode.PeerStatusChangeEvent, 5),
verificationDatabase: verification.NewPersistence(database),
mailserversDatabase: c.mailserversDatabase,
communityStorenodes: storenodes.NewCommunityStorenodes(storenodes.NewDB(database), logger),
account: c.account,
quit: make(chan struct{}),
ctx: ctx,
cancel: cancel,
importingCommunities: make(map[string]bool),
importingChannels: make(map[string]bool),
importRateLimiter: rate.NewLimiter(rate.Every(importSlowRate), 1),
importDelayer: struct {
wait chan struct{}
once sync.Once
}{wait: make(chan struct{})},
browserDatabase: c.browserDatabase,
httpServer: c.httpServer,
2019-07-17 22:25:42 +00:00
shutdownTasks: []func() error{
2021-01-11 10:32:51 +00:00
ensVerifier.Stop,
pushNotificationClient.Stop,
communitiesManager.Stop,
archiveManager.Stop,
2020-07-31 09:08:09 +00:00
encryptionProtocol.Stop,
2023-07-14 13:42:02 +00:00
func() error {
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
err := transp.ResetFilters(ctx)
if err != nil {
logger.Warn("could not reset filters", zap.Error(err))
}
// We don't want to thrown an error in this case, this is a soft
// fail
return nil
},
transp.Stop,
func() error { sender.Stop(); return nil },
2019-07-17 22:25:42 +00:00
// Currently this often fails, seems like it's safe to ignore them
// https://github.com/uber-go/zap/issues/328
func() error { _ = logger.Sync; return nil },
database.Close,
2019-07-17 22:25:42 +00:00
},
logger: logger,
savedAddressesManager: savedAddressesManager,
retrievedMessagesIteratorFactory: NewDefaultMessagesIterator,
2019-07-17 22:25:42 +00:00
}
2023-08-29 12:59:37 +00:00
if c.rpcClient != nil {
contractMaker, err := contracts.NewContractMaker(c.rpcClient)
if err != nil {
return nil, err
}
messenger.contractMaker = contractMaker
}
2023-04-07 08:47:38 +00:00
messenger.mentionsManager = NewMentionManager(messenger)
messenger.storeNodeRequestsManager = NewStoreNodeRequestManager(messenger)
2019-07-17 22:25:42 +00:00
if c.walletService != nil {
messenger.walletAPI = walletAPI
}
if c.outputMessagesCSV {
messenger.outputCSV = c.outputMessagesCSV
csvFile, err := os.Create("messages-" + fmt.Sprint(time.Now().Unix()) + ".csv")
if err != nil {
return nil, err
}
_, err = csvFile.Write([]byte("timestamp\tmessageID\tfrom\ttopic\tchatID\tmessageType\tmessage\n"))
if err != nil {
return nil, err
}
messenger.csvFile = csvFile
messenger.shutdownTasks = append(messenger.shutdownTasks, csvFile.Close)
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
if anonMetricsClient != nil {
messenger.shutdownTasks = append(messenger.shutdownTasks, anonMetricsClient.Stop)
}
if anonMetricsServer != nil {
messenger.shutdownTasks = append(messenger.shutdownTasks, anonMetricsServer.Stop)
}
2020-12-15 14:43:41 +00:00
if c.envelopesMonitorConfig != nil {
interceptor := EnvelopeEventsInterceptor{c.envelopesMonitorConfig.EnvelopeEventsHandler, messenger}
err := messenger.transport.SetEnvelopeEventsHandler(interceptor)
if err != nil {
logger.Info("Unable to set envelopes event handler", zap.Error(err))
}
}
2019-07-17 22:25:42 +00:00
return messenger, nil
}
2022-01-12 16:02:01 +00:00
func (m *Messenger) SetP2PServer(server *p2p.Server) {
m.server = server
}
func (m *Messenger) EnableBackedupMessagesProcessing() {
m.processBackedupMessages = true
}
func (m *Messenger) processSentMessage(id string) error {
if m.connectionState.Offline {
return errors.New("Can't mark message as sent while offline")
}
rawMessage, err := m.persistence.RawMessageByID(id)
// If we have no raw message, we create a temporary one, so that
// the sent status is preserved
if err == sql.ErrNoRows || rawMessage == nil {
rawMessage = &common.RawMessage{
ID: id,
MessageType: protobuf.ApplicationMetadataMessage_CHAT_MESSAGE,
2020-12-15 14:43:41 +00:00
}
} else if err != nil {
return errors.Wrapf(err, "Can't get raw message with id %v", id)
}
2020-12-15 14:43:41 +00:00
rawMessage.Sent = true
2020-12-15 14:43:41 +00:00
err = m.persistence.SaveRawMessage(rawMessage)
if err != nil {
return errors.Wrapf(err, "Can't save raw message marked as sent")
}
err = m.UpdateMessageOutgoingStatus(id, common.OutgoingStatusSent)
if err != nil {
return err
2020-12-15 14:43:41 +00:00
}
return nil
}
func (m *Messenger) ToForeground() {
2022-02-23 14:34:16 +00:00
if m.httpServer != nil {
m.httpServer.ToForeground()
}
m.asyncRequestAllHistoricMessages()
}
func (m *Messenger) ToBackground() {
2022-02-23 14:34:16 +00:00
if m.httpServer != nil {
m.httpServer.ToBackground()
}
}
func (m *Messenger) Start() (*MessengerResponse, error) {
if m.started {
return nil, errors.New("messenger already started")
}
m.started = true
err := m.InitFilters()
if err != nil {
return nil, err
}
now := time.Now().UnixMilli()
if err := m.settings.CheckAndDeleteExpiredKeypairsAndAccounts(uint64(now)); err != nil {
return nil, err
}
2020-07-14 14:07:19 +00:00
m.logger.Info("starting messenger", zap.String("identity", types.EncodeHex(crypto.FromECDSAPub(&m.identity.PublicKey))))
// Start push notification server
if m.pushNotificationServer != nil {
if err := m.pushNotificationServer.Start(); err != nil {
return nil, err
}
}
2020-07-22 07:41:40 +00:00
// Start push notification client
if m.pushNotificationClient != nil {
2020-08-18 15:07:48 +00:00
m.handlePushNotificationClientRegistrations(m.pushNotificationClient.SubscribeToRegistrations())
if err := m.pushNotificationClient.Start(); err != nil {
return nil, err
}
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
// Start anonymous metrics client
if m.anonMetricsClient != nil {
if err := m.anonMetricsClient.Start(); err != nil {
return nil, err
}
}
2021-01-11 10:32:51 +00:00
ensSubscription := m.ensVerifier.Subscribe()
// Subscrbe
if err := m.ensVerifier.Start(); err != nil {
return nil, err
}
if err := m.communitiesManager.Start(); err != nil {
return nil, err
}
2020-07-31 12:22:05 +00:00
// set shared secret handles
m.sender.SetHandleSharedSecrets(m.handleSharedSecrets)
if err := m.sender.StartDatasync(m.mvdsStatusChangeEvent, m.sendDataSync); err != nil {
return nil, err
}
2020-07-31 12:22:05 +00:00
2020-07-31 09:08:09 +00:00
subscriptions, err := m.encryptor.Start(m.identity)
if err != nil {
return nil, err
2020-07-31 09:08:09 +00:00
}
2020-07-31 12:22:05 +00:00
// handle stored shared secrets
err = m.handleSharedSecrets(subscriptions.SharedSecrets)
if err != nil {
return nil, err
2020-07-31 12:22:05 +00:00
}
2020-07-31 09:08:09 +00:00
m.handleEncryptionLayerSubscriptions(subscriptions)
m.handleCommunitiesSubscription(m.communitiesManager.Subscribe())
m.handleCommunitiesHistoryArchivesSubscription(m.communitiesManager.Subscribe())
m.updateCommunitiesActiveMembersPeriodically()
m.schedulePublishGrantsForControlledCommunities()
2021-01-11 10:32:51 +00:00
m.handleENSVerificationSubscription(ensSubscription)
m.watchConnectionChange()
2024-08-30 12:17:43 +00:00
m.watchChatsToUnmute()
m.watchCommunitiesToUnmute()
m.watchExpiredMessages()
m.watchIdentityImageChanges()
m.watchWalletBalances()
m.watchPendingCommunityRequestToJoin()
m.broadcastLatestUserStatus()
m.timeoutAutomaticStatusUpdates()
if !m.config.featureFlags.DisableCheckingForBackup {
m.startBackupLoop()
}
if !m.config.featureFlags.DisableAutoMessageLoop {
err = m.startAutoMessageLoop()
if err != nil {
return nil, err
}
}
m.startPeerSyncingLoop()
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
m.startSyncSettingsLoop()
m.startSettingsChangesLoop()
m.startCommunityRekeyLoop()
if m.config.codeControlFlags.CuratedCommunitiesUpdateLoopEnabled {
m.startCuratedCommunitiesUpdateLoop()
}
m.startMessageSegmentsCleanupLoop()
m.startHashRatchetEncryptedMessagesCleanupLoop()
m.startRequestMissingCommunityChannelsHRKeysLoop()
if err := m.cleanTopics(); err != nil {
return nil, err
}
2021-03-25 15:15:22 +00:00
response := &MessengerResponse{}
storenodes, err := m.AllMailservers()
if err != nil {
return nil, err
}
err = m.setupStorenodes(storenodes)
if err != nil {
return nil, err
2022-01-12 16:02:01 +00:00
}
response.Mailservers = storenodes
m.transport.SetStorenodeConfigProvider(m)
if err := m.communityStorenodes.ReloadFromDB(); err != nil {
return nil, err
}
go m.checkForMissingMessagesLoop()
go m.checkForStorenodeCycleSignals()
controlledCommunities, err := m.communitiesManager.Controlled()
if err != nil {
return nil, err
}
if m.archiveManager.IsReady() {
go func() {
defer gocommon.LogOnPanic()
<-m.transport.OnStorenodeAvailableOneShot()
m.InitHistoryArchiveTasks(controlledCommunities)
}()
}
for _, c := range controlledCommunities {
if c.Joined() && c.HasTokenPermissions() {
m.communitiesManager.StartMembersReevaluationLoop(c.ID(), false)
}
}
joinedCommunities, err := m.communitiesManager.Joined()
if err != nil {
return nil, err
}
for _, joinedCommunity := range joinedCommunities {
// resume importing message history archives in case
// imports have been interrupted previously
err := m.resumeHistoryArchivesImport(joinedCommunity.ID())
if err != nil {
return nil, err
}
}
m.enableHistoryArchivesImportAfterDelay()
if m.httpServer != nil {
err = m.httpServer.Start()
if err != nil {
return nil, err
}
}
err = m.GarbageCollectRemovedBookmarks()
if err != nil {
return nil, err
}
err = m.garbageCollectRemovedSavedAddresses()
if err != nil {
return nil, err
}
displayName, err := m.settings.DisplayName()
if err != nil {
return nil, err
}
if err := utils.ValidateDisplayName(&displayName); err != nil {
// Somehow a wrong display name was saved. We need to update it so that others accept our messages
pubKey, err := m.settings.GetPublicKey()
if err != nil {
return nil, err
}
replacementDisplayName := pubKey[:12]
m.logger.Warn("unaccepted display name was saved to the setting, reverting to pubkey substring", zap.String("displayName", displayName), zap.String("replacement", replacementDisplayName))
if err := m.SetDisplayName(replacementDisplayName); err != nil {
// We do not return the error as we do not want to block the login for it
m.logger.Warn("error setting display name", zap.Error(err))
}
}
return response, nil
}
func (m *Messenger) SetMediaServer(server *server.MediaServer) {
m.httpServer = server
m.communitiesManager.SetMediaServer(server)
}
2022-02-10 22:55:03 +00:00
func (m *Messenger) IdentityPublicKey() *ecdsa.PublicKey {
return &m.identity.PublicKey
}
func (m *Messenger) IdentityPublicKeyCompressed() []byte {
return crypto.CompressPubkey(m.IdentityPublicKey())
}
func (m *Messenger) IdentityPublicKeyString() string {
return types.EncodeHex(crypto.FromECDSAPub(m.IdentityPublicKey()))
}
// cleanTopics remove any topic that does not have a Listen flag set
func (m *Messenger) cleanTopics() error {
if m.mailserversDatabase == nil {
return nil
}
var filters []*transport.Filter
for _, f := range m.transport.Filters() {
if f.Listen && !f.Ephemeral {
filters = append(filters, f)
}
}
m.logger.Debug("keeping topics", zap.Any("filters", filters))
return m.mailserversDatabase.SetTopics(filters)
2020-07-31 09:08:09 +00:00
}
// handle connection change is called each time we go from offline/online or viceversa
func (m *Messenger) handleConnectionChange(online bool) {
// Update pushNotificationClient
if m.pushNotificationClient != nil {
if online {
m.pushNotificationClient.Online()
} else {
m.pushNotificationClient.Offline()
}
}
// Update torrent manager
if m.archiveManager != nil {
m.archiveManager.SetOnline(online)
}
// Publish contact code
if online && m.shouldPublishContactCode {
if err := m.publishContactCode(); err != nil {
m.logger.Error("could not publish on contact code", zap.Error(err))
}
m.shouldPublishContactCode = false
}
2022-01-12 16:02:01 +00:00
// Start fetching messages from store nodes
if online {
m.asyncRequestAllHistoricMessages()
}
2022-01-12 16:02:01 +00:00
// Update ENS verifier
2021-01-11 10:32:51 +00:00
m.ensVerifier.SetOnline(online)
}
func (m *Messenger) Online() bool {
switch m.transport.WakuVersion() {
case 2:
return m.transport.PeerCount() > 0
default:
return m.node.PeersCount() > 0
}
}
2020-08-18 15:07:48 +00:00
func (m *Messenger) buildContactCodeAdvertisement() (*protobuf.ContactCodeAdvertisement, error) {
if m.pushNotificationClient == nil || !m.pushNotificationClient.Enabled() {
return nil, nil
}
m.logger.Debug("adding push notification info to contact code bundle")
info, err := m.pushNotificationClient.MyPushNotificationQueryInfo()
if err != nil {
return nil, err
}
if len(info) == 0 {
return nil, nil
}
return &protobuf.ContactCodeAdvertisement{
PushNotificationInfo: info,
}, nil
}
2021-01-25 13:49:13 +00:00
// publishContactCode sends a public message wrapped in the encryption
2020-07-31 12:22:05 +00:00
// layer, which will propagate our bundle
2021-01-25 13:49:13 +00:00
func (m *Messenger) publishContactCode() error {
var payload []byte
2020-08-18 15:07:48 +00:00
m.logger.Debug("sending contact code")
contactCodeAdvertisement, err := m.buildContactCodeAdvertisement()
if err != nil {
m.logger.Error("could not build contact code advertisement", zap.Error(err))
}
if contactCodeAdvertisement == nil {
contactCodeAdvertisement = &protobuf.ContactCodeAdvertisement{}
}
2021-02-17 23:14:48 +00:00
2021-01-25 13:49:13 +00:00
err = m.attachChatIdentity(contactCodeAdvertisement)
if err != nil {
return err
}
2021-02-17 23:14:48 +00:00
if contactCodeAdvertisement.ChatIdentity != nil {
m.logger.Debug("attached chat identity", zap.Int("images len", len(contactCodeAdvertisement.ChatIdentity.Images)))
} else {
m.logger.Debug("no attached chat identity")
}
payload, err = proto.Marshal(contactCodeAdvertisement)
if err != nil {
return err
}
2020-07-31 12:22:05 +00:00
contactCodeTopic := transport.ContactCodeTopic(&m.identity.PublicKey)
rawMessage := common.RawMessage{
LocalChatID: contactCodeTopic,
2020-08-18 15:07:48 +00:00
MessageType: protobuf.ApplicationMetadataMessage_CONTACT_CODE_ADVERTISEMENT,
Payload: payload,
Priority: &common.LowPriority,
2020-07-31 12:22:05 +00:00
}
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
2022-02-17 15:13:10 +00:00
_, err = m.sender.SendPublic(ctx, contactCodeTopic, rawMessage)
2020-07-31 12:22:05 +00:00
if err != nil {
m.logger.Warn("failed to send a contact code", zap.Error(err))
}
2022-02-17 15:13:10 +00:00
joinedCommunities, err := m.communitiesManager.Joined()
if err != nil {
return err
}
for _, community := range joinedCommunities {
rawMessage.LocalChatID = community.MemberUpdateChannelID()
rawMessage.PubsubTopic = community.PubsubTopic()
_, err = m.sender.SendPublic(ctx, rawMessage.LocalChatID, rawMessage)
if err != nil {
return err
}
}
2021-02-17 23:14:48 +00:00
m.logger.Debug("contact code sent")
2020-07-31 12:22:05 +00:00
return err
}
// contactCodeAdvertisement attaches a protobuf.ChatIdentity to the given protobuf.ContactCodeAdvertisement,
2020-11-09 15:16:36 +00:00
// if the `shouldPublish` conditions are met
2021-01-25 13:49:13 +00:00
func (m *Messenger) attachChatIdentity(cca *protobuf.ContactCodeAdvertisement) error {
2020-11-09 15:16:36 +00:00
contactCodeTopic := transport.ContactCodeTopic(&m.identity.PublicKey)
shouldPublish, err := m.shouldPublishChatIdentity(contactCodeTopic)
if err != nil {
return err
}
2021-01-25 13:49:13 +00:00
if !shouldPublish {
return nil
}
2021-01-25 13:49:13 +00:00
cca.ChatIdentity, err = m.createChatIdentity(privateChat)
if err != nil {
return err
}
2020-12-10 10:12:51 +00:00
2023-02-02 17:59:48 +00:00
img, err := m.multiAccounts.GetIdentityImage(m.account.KeyUID, images.SmallDimName)
2021-01-25 13:49:13 +00:00
if err != nil {
return err
}
2022-02-17 15:13:10 +00:00
displayName, err := m.settings.DisplayName()
if err != nil {
return err
2021-01-25 13:49:13 +00:00
}
2022-08-05 11:22:35 +00:00
bio, err := m.settings.Bio()
if err != nil {
return err
}
profileShowcase, err := m.GetProfileShowcaseForSelfIdentity()
if err != nil {
return err
}
identityHash, err := m.getIdentityHash(displayName, bio, img, profileShowcase, multiaccountscommon.IDToColorFallbackToBlue(cca.ChatIdentity.CustomizationColor))
2022-08-02 12:56:26 +00:00
if err != nil {
return err
}
err = m.persistence.SaveWhenChatIdentityLastPublished(contactCodeTopic, identityHash)
2021-01-25 13:49:13 +00:00
if err != nil {
return err
}
return nil
}
2021-10-20 09:14:48 +00:00
// handleStandaloneChatIdentity sends a standalone ChatIdentity message to a public or private channel if the publish criteria is met
2020-11-09 15:16:36 +00:00
func (m *Messenger) handleStandaloneChatIdentity(chat *Chat) error {
2021-10-20 09:45:54 +00:00
if chat.ChatType != ChatTypePublic && chat.ChatType != ChatTypeOneToOne {
2020-11-09 15:16:36 +00:00
return nil
}
shouldPublishChatIdentity, err := m.shouldPublishChatIdentity(chat.ID)
if err != nil {
return err
}
if !shouldPublishChatIdentity {
return nil
}
chatContext := GetChatContextFromChatType(chat.ChatType)
ci, err := m.createChatIdentity(chatContext)
2020-11-09 15:16:36 +00:00
if err != nil {
return err
}
2020-11-09 15:16:36 +00:00
payload, err := proto.Marshal(ci)
if err != nil {
return err
}
2020-11-09 15:16:36 +00:00
rawMessage := common.RawMessage{
LocalChatID: chat.ID,
MessageType: protobuf.ApplicationMetadataMessage_CHAT_IDENTITY,
Payload: payload,
Priority: &common.LowPriority,
2020-11-09 15:16:36 +00:00
}
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
2021-10-20 09:14:48 +00:00
if chat.ChatType == ChatTypePublic {
_, err = m.sender.SendPublic(ctx, chat.ID, rawMessage)
if err != nil {
return err
}
} else {
pk, err := chat.PublicKey()
if err != nil {
return err
}
_, err = m.sender.SendPrivate(ctx, pk, &rawMessage)
if err != nil {
return err
}
}
2023-02-02 17:59:48 +00:00
img, err := m.multiAccounts.GetIdentityImage(m.account.KeyUID, images.SmallDimName)
2020-12-10 10:12:51 +00:00
if err != nil {
return err
}
2022-02-17 15:13:10 +00:00
displayName, err := m.settings.DisplayName()
if err != nil {
return err
2020-12-10 10:12:51 +00:00
}
2022-08-05 11:22:35 +00:00
bio, err := m.settings.Bio()
if err != nil {
return err
}
profileShowcase, err := m.GetProfileShowcaseForSelfIdentity()
if err != nil {
return err
}
identityHash, err := m.getIdentityHash(displayName, bio, img, profileShowcase, multiaccountscommon.IDToColorFallbackToBlue(ci.CustomizationColor))
2022-08-02 12:56:26 +00:00
if err != nil {
return err
}
err = m.persistence.SaveWhenChatIdentityLastPublished(chat.ID, identityHash)
2020-11-09 15:16:36 +00:00
if err != nil {
return err
}
return nil
}
func (m *Messenger) getIdentityHash(displayName, bio string, img *images.IdentityImage, profileShowcase *protobuf.ProfileShowcase, customizationColor multiaccountscommon.CustomizationColor) ([]byte, error) {
profileShowcaseData, err := proto.Marshal(profileShowcase)
if err != nil {
return []byte{}, err
}
2022-02-17 15:13:10 +00:00
if img == nil {
return crypto.Keccak256([]byte(displayName), []byte(bio), profileShowcaseData, []byte(customizationColor)), nil
2022-02-17 15:13:10 +00:00
}
return crypto.Keccak256(img.Payload, []byte(displayName), []byte(bio), profileShowcaseData, []byte(customizationColor)), nil
2022-02-17 15:13:10 +00:00
}
2020-11-09 15:16:36 +00:00
// shouldPublishChatIdentity returns true if the last time the ChatIdentity was attached was more than 24 hours ago
2020-11-24 23:16:19 +00:00
func (m *Messenger) shouldPublishChatIdentity(chatID string) (bool, error) {
2021-01-25 13:49:13 +00:00
if m.account == nil {
return false, nil
}
2020-12-10 08:42:36 +00:00
2022-02-17 15:13:10 +00:00
// Check we have at least one image or a display name
2023-02-02 17:59:48 +00:00
img, err := m.multiAccounts.GetIdentityImage(m.account.KeyUID, images.SmallDimName)
2020-12-10 08:42:36 +00:00
if err != nil {
return false, err
}
2022-02-17 15:13:10 +00:00
displayName, err := m.settings.DisplayName()
if err != nil {
return false, err
}
if img == nil && displayName == "" {
2020-12-10 08:42:36 +00:00
return false, nil
}
2020-12-10 10:12:51 +00:00
lp, hash, err := m.persistence.GetWhenChatIdentityLastPublished(chatID)
2020-11-09 15:16:36 +00:00
if err != nil {
return false, err
}
2022-08-05 11:22:35 +00:00
bio, err := m.settings.Bio()
if err != nil {
return false, err
}
profileShowcase, err := m.GetProfileShowcaseForSelfIdentity()
if err != nil {
return false, err
}
identityHash, err := m.getIdentityHash(displayName, bio, img, profileShowcase, m.account.GetCustomizationColor())
2022-08-02 12:56:26 +00:00
if err != nil {
return false, err
}
if !bytes.Equal(hash, identityHash) {
2020-12-10 10:12:51 +00:00
return true, nil
}
2021-02-17 23:14:48 +00:00
// Note: If Alice does not add bob as a contact she will not update her contact code with images
2020-12-16 18:28:34 +00:00
return lp == 0 || time.Now().Unix()-lp > 24*60*60, nil
2020-11-09 15:16:36 +00:00
}
// createChatIdentity creates a context based protobuf.ChatIdentity.
// context 'public-chat' will attach only the 'thumbnail' IdentityImage
// context 'private-chat' will attach all IdentityImage
func (m *Messenger) createChatIdentity(context ChatContext) (*protobuf.ChatIdentity, error) {
m.logger.Info("called createChatIdentity",
zap.String("account keyUID", m.account.KeyUID),
zap.String("context", string(context)),
)
2022-02-17 15:13:10 +00:00
displayName, err := m.settings.DisplayName()
if err != nil {
return nil, err
}
2022-08-05 11:22:35 +00:00
bio, err := m.settings.Bio()
if err != nil {
return nil, err
}
profileShowcase, err := m.GetProfileShowcaseForSelfIdentity()
if err != nil {
return nil, err
}
2020-11-09 15:16:36 +00:00
ci := &protobuf.ChatIdentity{
Clock: m.transport.GetCurrentTime(),
EnsName: "", // TODO add ENS name handling to dedicate PR
DisplayName: displayName,
Description: bio,
ProfileShowcase: profileShowcase,
CustomizationColor: m.account.GetCustomizationColorID(),
}
2022-02-17 15:13:10 +00:00
err = m.attachIdentityImagesToChatIdentity(context, ci)
if err != nil {
return nil, err
}
return ci, nil
}
// adaptIdentityImageToProtobuf Adapts a images.IdentityImage to protobuf.IdentityImage
2023-02-02 17:59:48 +00:00
func (m *Messenger) adaptIdentityImageToProtobuf(img *images.IdentityImage) *protobuf.IdentityImage {
return &protobuf.IdentityImage{
Payload: img.Payload,
SourceType: protobuf.IdentityImage_RAW_PAYLOAD, // TODO add ENS avatar handling to dedicated PR
ImageFormat: images.GetProtobufImageFormat(img.Payload),
}
}
func (m *Messenger) attachIdentityImagesToChatIdentity(context ChatContext, ci *protobuf.ChatIdentity) error {
s, err := m.getSettings()
if err != nil {
return err
}
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
if s.ProfilePicturesShowTo == settings.ProfilePicturesShowToNone {
m.logger.Info(fmt.Sprintf("settings.ProfilePicturesShowTo is set to '%d', skipping attaching IdentityImages", s.ProfilePicturesShowTo))
2021-02-09 16:09:50 +00:00
return nil
}
2020-11-09 15:16:36 +00:00
ciis := make(map[string]*protobuf.IdentityImage)
switch context {
case publicChat:
m.logger.Info(fmt.Sprintf("handling %s ChatIdentity", context))
2023-02-02 17:59:48 +00:00
img, err := m.multiAccounts.GetIdentityImage(m.account.KeyUID, images.SmallDimName)
2020-11-09 15:16:36 +00:00
if err != nil {
return err
2020-11-09 15:16:36 +00:00
}
2022-02-17 15:13:10 +00:00
if img == nil {
return nil
}
2023-02-02 17:59:48 +00:00
ciis[images.SmallDimName] = m.adaptIdentityImageToProtobuf(img)
2020-11-09 15:16:36 +00:00
ci.Images = ciis
case privateChat:
m.logger.Info(fmt.Sprintf("handling %s ChatIdentity", context))
2020-12-09 14:03:43 +00:00
imgs, err := m.multiAccounts.GetIdentityImages(m.account.KeyUID)
2020-11-09 15:16:36 +00:00
if err != nil {
return err
2020-11-09 15:16:36 +00:00
}
for _, img := range imgs {
ciis[img.Name] = m.adaptIdentityImageToProtobuf(img)
}
ci.Images = ciis
default:
return fmt.Errorf("unknown ChatIdentity context '%s'", context)
2020-11-09 15:16:36 +00:00
}
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
if s.ProfilePicturesShowTo == settings.ProfilePicturesShowToContactsOnly {
err := EncryptIdentityImagesWithContactPubKeys(ci.Images, m)
2021-02-09 16:09:50 +00:00
if err != nil {
return err
}
}
return nil
}
2020-07-31 12:22:05 +00:00
// handleSharedSecrets process the negotiated secrets received from the encryption layer
func (m *Messenger) handleSharedSecrets(secrets []*sharedsecret.Secret) error {
2020-07-31 09:46:38 +00:00
for _, secret := range secrets {
fSecret := types.NegotiatedSecret{
PublicKey: secret.Identity,
Key: secret.Key,
}
2021-05-14 10:55:42 +00:00
_, err := m.transport.ProcessNegotiatedSecret(fSecret)
2020-07-31 09:46:38 +00:00
if err != nil {
2020-07-31 12:22:05 +00:00
return err
2020-07-31 09:46:38 +00:00
}
}
2020-07-31 12:22:05 +00:00
return nil
2020-07-31 09:46:38 +00:00
}
2020-07-31 12:22:05 +00:00
// handleInstallations adds the installations in the installations map
func (m *Messenger) handleInstallations(installations []*multidevice.Installation) {
2020-07-31 09:08:09 +00:00
for _, installation := range installations {
if installation.Identity == contactIDFromPublicKey(&m.identity.PublicKey) {
2021-03-29 15:41:30 +00:00
if _, ok := m.allInstallations.Load(installation.ID); !ok {
m.allInstallations.Store(installation.ID, installation)
m.modifiedInstallations.Store(installation.ID, true)
2020-07-31 09:08:09 +00:00
}
}
}
}
2020-07-31 09:46:38 +00:00
// handleEncryptionLayerSubscriptions handles events from the encryption layer
2020-07-31 09:08:09 +00:00
func (m *Messenger) handleEncryptionLayerSubscriptions(subscriptions *encryption.Subscriptions) {
go func() {
defer gocommon.LogOnPanic()
2020-07-31 09:08:09 +00:00
for {
select {
2020-07-31 12:22:05 +00:00
case <-subscriptions.SendContactCode:
2021-01-25 13:49:13 +00:00
if err := m.publishContactCode(); err != nil {
2020-07-31 12:22:05 +00:00
m.logger.Error("failed to publish contact code", zap.Error(err))
2020-07-31 09:46:38 +00:00
}
// we also piggy-back to clean up cached messages
if err := m.transport.CleanMessagesProcessed(m.getTimesource().GetCurrentTime() - messageCacheIntervalMs); err != nil {
m.logger.Error("failed to clean processed messages", zap.Error(err))
}
2020-07-31 09:46:38 +00:00
case keys := <-subscriptions.NewHashRatchetKeys:
if m.communitiesManager == nil {
continue
}
if err := m.communitiesManager.NewHashRatchetKeys(keys); err != nil {
m.logger.Error("failed to invalidate cache for decrypted communities", zap.Error(err))
}
2020-07-31 09:08:09 +00:00
case <-subscriptions.Quit:
m.logger.Debug("quitting encryption subscription loop")
return
}
}
}()
}
2021-01-11 10:32:51 +00:00
func (m *Messenger) handleENSVerified(records []*ens.VerificationRecord) {
var contacts []*Contact
for _, record := range records {
m.logger.Info("handling record", zap.Any("record", record))
2021-03-29 15:41:30 +00:00
contact, ok := m.allContacts.Load(record.PublicKey)
2021-01-11 10:32:51 +00:00
if !ok {
m.logger.Info("contact not found")
continue
}
2021-01-11 10:32:51 +00:00
contact.ENSVerified = record.Verified
2022-02-17 15:13:10 +00:00
contact.EnsName = record.Name
2021-01-11 10:32:51 +00:00
contacts = append(contacts, contact)
}
2021-01-11 10:32:51 +00:00
m.logger.Info("handled records", zap.Any("contacts", contacts))
if len(contacts) != 0 {
if err := m.persistence.SaveContacts(contacts); err != nil {
m.logger.Error("failed to save contacts", zap.Error(err))
return
}
}
m.PublishMessengerResponse(&MessengerResponse{Contacts: contacts})
2021-01-11 10:32:51 +00:00
}
2021-01-11 10:32:51 +00:00
func (m *Messenger) handleENSVerificationSubscription(c chan []*ens.VerificationRecord) {
go func() {
defer gocommon.LogOnPanic()
for {
select {
2021-01-11 10:32:51 +00:00
case records, more := <-c:
if !more {
2021-01-11 10:32:51 +00:00
m.logger.Info("No more records, quitting")
return
}
2021-01-11 10:32:51 +00:00
if len(records) != 0 {
m.logger.Info("handling records", zap.Any("records", records))
m.handleENSVerified(records)
}
case <-m.quit:
return
}
}
}()
}
// watchConnectionChange checks the connection status and call handleConnectionChange when this changes
func (m *Messenger) watchConnectionChange() {
state := m.Online()
// lastCheck, sleepDetention and keepAlive helps us recognizing when computer was offline because of sleep, lid closed, etc.
lastCheck := time.Now().Unix()
sleepDetentionInSecs := int64(20)
keepAlivePeriod := 15 * time.Second // must be lower than sleepDetentionInSecs
processNewState := func(newState bool) {
now := time.Now().Unix()
force := now-lastCheck > sleepDetentionInSecs
lastCheck = now
if !force && state == newState {
return
}
state = newState
m.logger.Debug("connection changed", zap.Bool("online", state), zap.Bool("force", force))
m.handleConnectionChange(state)
}
pollConnectionStatus := func() {
defer gocommon.LogOnPanic()
func() {
for {
select {
case <-time.After(200 * time.Millisecond):
processNewState(m.Online())
case <-m.quit:
return
}
}
}()
}
subscribedConnectionStatus := func(subscription *types.ConnStatusSubscription) {
defer gocommon.LogOnPanic()
defer subscription.Unsubscribe()
ticker := time.NewTicker(keepAlivePeriod)
defer ticker.Stop()
for {
select {
case status := <-subscription.C:
processNewState(status.IsOnline)
case <-ticker.C:
processNewState(m.Online())
case <-m.quit:
return
}
}
}
m.logger.Debug("watching connection changes")
m.handleConnectionChange(state)
waku, err := m.node.GetWakuV2(nil)
if err != nil {
// No waku v2, we can't watch connection changes
// Instead we will poll the connection status.
m.logger.Warn("using WakuV1, can't watch connection changes, this might be have side-effects")
go pollConnectionStatus()
return
}
// Wakuv2 is not going to return an error
// from SubscribeToConnStatusChanges
subscription, _ := waku.SubscribeToConnStatusChanges()
go subscribedConnectionStatus(subscription)
2020-12-15 14:43:41 +00:00
}
2024-08-30 12:17:43 +00:00
// watchChatsToUnmute checks every minute to identify and unmute chats that should no longer be muted.
func (m *Messenger) watchChatsToUnmute() {
m.logger.Debug("Checking for chats to unmute every minute")
2023-04-16 15:06:00 +00:00
go func() {
defer gocommon.LogOnPanic()
2023-04-16 15:06:00 +00:00
for {
2024-08-30 12:17:43 +00:00
// Execute the check immediately upon starting
response := &MessengerResponse{}
currTime := time.Now()
m.allChats.Range(func(chatID string, c *Chat) bool {
chatMuteTill := c.MuteTill
if currTime.After(chatMuteTill) && !chatMuteTill.Equal(time.Time{}) && c.Muted {
err := m.persistence.UnmuteChat(c.ID)
if err != nil {
m.logger.Warn("watchChatsToUnmute error", zap.Any("Couldn't unmute chat", err))
return false
2023-04-16 15:06:00 +00:00
}
2024-08-30 12:17:43 +00:00
c.Muted = false
c.MuteTill = time.Time{}
response.AddChat(c)
}
2024-08-30 12:17:43 +00:00
return true
})
if !response.IsEmpty() {
signal.SendNewMessages(response)
}
// Calculate the time until the next whole minute
now := time.Now()
waitDuration := time.Until(now.Truncate(time.Minute).Add(time.Minute))
// Wait until the next minute
select {
case <-time.After(waitDuration):
// Continue to next iteration
case <-m.quit:
return
}
}
}()
}
2024-08-30 12:17:43 +00:00
// watchCommunitiesToUnmute checks every minute to identify and unmute communities that should no longer be muted.
func (m *Messenger) watchCommunitiesToUnmute() {
2024-08-30 12:17:43 +00:00
m.logger.Debug("Checking for communities to unmute every minute")
go func() {
defer gocommon.LogOnPanic()
for {
2024-08-30 12:17:43 +00:00
// Execute the check immediately upon starting
response, err := m.CheckCommunitiesToUnmute()
if err != nil {
m.logger.Warn("watchCommunitiesToUnmute error", zap.Any("Couldn't unmute communities", err))
} else if !response.IsEmpty() {
signal.SendNewMessages(response)
}
2024-08-30 12:17:43 +00:00
// Calculate the time until the next whole minute
now := time.Now()
waitDuration := time.Until(now.Truncate(time.Minute).Add(time.Minute))
// Wait until the next minute
select {
case <-time.After(waitDuration):
// Continue to next iteration
2023-04-16 15:06:00 +00:00
case <-m.quit:
return
}
}
}()
}
// watchIdentityImageChanges checks for identity images changes and publishes to the contact code when it happens
func (m *Messenger) watchIdentityImageChanges() {
m.logger.Debug("watching identity image changes")
if m.multiAccounts == nil {
return
}
channel := m.multiAccounts.SubscribeToIdentityImageChanges()
go func() {
defer gocommon.LogOnPanic()
for {
select {
case change := <-channel:
identityImages, err := m.multiAccounts.GetIdentityImages(m.account.KeyUID)
2022-03-24 09:35:56 +00:00
if err != nil {
m.logger.Error("failed to get profile pictures to save self contact", zap.Error(err))
break
2022-03-24 09:35:56 +00:00
}
identityImagesMap := make(map[string]images.IdentityImage)
for _, img := range identityImages {
identityImagesMap[img.Name] = *img
}
m.selfContact.Images = identityImagesMap
m.publishSelfContactSubscriptions(&SelfContactChangeEvent{ImagesChanged: true})
if change.PublishExpected {
err = m.syncProfilePictures(m.dispatchMessage, identityImages)
if err != nil {
m.logger.Error("failed to sync profile pictures to paired devices", zap.Error(err))
}
err = m.PublishIdentityImage()
if err != nil {
m.logger.Error("failed to publish identity image", zap.Error(err))
}
}
case <-m.quit:
return
}
}
}()
}
func (m *Messenger) watchPendingCommunityRequestToJoin() {
m.logger.Debug("watching community request to join")
go func() {
defer gocommon.LogOnPanic()
for {
select {
case <-time.After(time.Minute * 10):
_, err := m.CheckAndDeletePendingRequestToJoinCommunity(context.Background(), false)
if err != nil {
m.logger.Error("failed to check and delete pending request to join community", zap.Error(err))
}
case <-m.quit:
return
}
}
}()
}
func (m *Messenger) PublishIdentityImage() error {
// Reset last published time for ChatIdentity so new contact can receive data
err := m.resetLastPublishedTimeForChatIdentity()
if err != nil {
m.logger.Error("failed to reset publish time", zap.Error(err))
return err
}
// If not online, we schedule it
if !m.Online() {
m.shouldPublishContactCode = true
return nil
}
return m.publishContactCode()
}
2020-08-18 15:07:48 +00:00
// handlePushNotificationClientRegistration handles registration events
func (m *Messenger) handlePushNotificationClientRegistrations(c chan struct{}) {
go func() {
defer gocommon.LogOnPanic()
2020-08-18 15:07:48 +00:00
for {
_, more := <-c
if !more {
return
}
2021-01-25 13:49:13 +00:00
if err := m.publishContactCode(); err != nil {
2020-08-18 15:07:48 +00:00
m.logger.Error("failed to publish contact code", zap.Error(err))
}
}
}()
}
// InitFilters analyzes chats and contacts in order to setup filters
2019-08-29 06:33:46 +00:00
// which are responsible for retrieving messages.
func (m *Messenger) InitFilters() error {
// Seed the for color generation
rand.Seed(time.Now().Unix())
2019-08-29 06:33:46 +00:00
logger := m.logger.With(zap.String("site", "Init"))
// Community requests will arrive in this pubsub topic
err := m.SubscribeToPubsubTopic(shard.DefaultNonProtectedPubsubTopic(), nil)
if err != nil {
return err
}
2019-08-29 06:33:46 +00:00
var (
filtersToInit []transport.FiltersToInitialize
2019-08-29 06:33:46 +00:00
publicKeys []*ecdsa.PublicKey
)
2021-01-11 10:32:51 +00:00
joinedCommunities, err := m.communitiesManager.Joined()
if err != nil {
return err
}
2021-01-11 10:32:51 +00:00
for _, org := range joinedCommunities {
// the org advertise on the public topic derived by the pk
2023-11-15 15:58:15 +00:00
filtersToInit = append(filtersToInit, m.DefaultFilters(org)...)
// This is for status-go versions that didn't have `CommunitySettings`
// We need to ensure communities that existed before community settings
// were introduced will have community settings as well
exists, err := m.communitiesManager.CommunitySettingsExist(org.ID())
if err != nil {
logger.Warn("failed to check if community settings exist", zap.Error(err))
continue
}
if !exists {
communitySettings := communities.CommunitySettings{
CommunityID: org.IDString(),
HistoryArchiveSupportEnabled: true,
}
err = m.communitiesManager.SaveCommunitySettings(communitySettings)
if err != nil {
logger.Warn("failed to save community settings", zap.Error(err))
}
continue
}
// In case we do have settings, but the history archive support is disabled
// for this community, we enable it, as this should be the default for all
// non-admin communities
communitySettings, err := m.communitiesManager.GetCommunitySettingsByID(org.ID())
if err != nil {
logger.Warn("failed to fetch community settings", zap.Error(err))
continue
}
if !org.IsControlNode() && !communitySettings.HistoryArchiveSupportEnabled {
communitySettings.HistoryArchiveSupportEnabled = true
err = m.communitiesManager.UpdateCommunitySettings(*communitySettings)
if err != nil {
logger.Warn("failed to update community settings", zap.Error(err))
}
}
}
spectatedCommunities, err := m.communitiesManager.Spectated()
if err != nil {
return err
}
for _, org := range spectatedCommunities {
2023-11-15 15:58:15 +00:00
filtersToInit = append(filtersToInit, m.DefaultFilters(org)...)
}
2019-08-29 06:33:46 +00:00
// Get chat IDs and public keys from the existing chats.
// TODO: Get only active chats by the query.
chats, err := m.persistence.Chats()
2019-08-29 06:33:46 +00:00
if err != nil {
return err
}
communityInfo := make(map[string]*communities.Community)
var validChats []*Chat
2019-08-29 06:33:46 +00:00
for _, chat := range chats {
if err := chat.Validate(); err != nil {
logger.Warn("failed to validate chat", zap.Error(err))
continue
}
validChats = append(validChats, chat)
}
m.initChatsFirstMessageTimestamp(communityInfo, validChats)
for _, chat := range validChats {
2020-11-06 10:57:05 +00:00
if !chat.Active || chat.Timeline() {
m.allChats.Store(chat.ID, chat)
2019-08-29 06:33:46 +00:00
continue
}
2020-11-06 10:57:05 +00:00
2019-08-29 06:33:46 +00:00
switch chat.ChatType {
2020-10-20 15:10:28 +00:00
case ChatTypePublic, ChatTypeProfile:
2023-11-15 15:58:15 +00:00
filtersToInit = append(filtersToInit, transport.FiltersToInitialize{ChatID: chat.ID})
case ChatTypeCommunityChat:
community, ok := communityInfo[chat.CommunityID]
if !ok {
community, err = m.communitiesManager.GetByIDString(chat.CommunityID)
if err != nil {
return err
}
communityInfo[chat.CommunityID] = community
}
if chat.UnviewedMessagesCount > 0 || chat.UnviewedMentionsCount > 0 {
// Make sure the unread count is 0 for the channels the user cannot view
// It's possible that the users received messages to a channel before permissions were added
canView := community.CanView(&m.identity.PublicKey, chat.CommunityChatID())
if !canView {
chat.UnviewedMessagesCount = 0
chat.UnviewedMentionsCount = 0
}
}
2023-11-15 15:58:15 +00:00
filtersToInit = append(filtersToInit, transport.FiltersToInitialize{ChatID: chat.ID, PubsubTopic: community.PubsubTopic()})
2019-08-29 06:33:46 +00:00
case ChatTypeOneToOne:
pk, err := chat.PublicKey()
if err != nil {
return err
}
publicKeys = append(publicKeys, pk)
2019-08-29 06:33:46 +00:00
case ChatTypePrivateGroupChat:
for _, member := range chat.Members {
publicKey, err := member.PublicKey()
if err != nil {
return errors.Wrapf(err, "invalid public key for member %s in chat %s", member.ID, chat.Name)
}
publicKeys = append(publicKeys, publicKey)
}
default:
return errors.New("invalid chat type")
}
m.allChats.Store(chat.ID, chat)
2019-08-29 06:33:46 +00:00
}
// Timeline and profile chats are deprecated.
// This code can be removed after some reasonable time.
2021-03-25 15:15:22 +00:00
// upsert timeline chat
if !deprecation.ChatProfileDeprecated {
err = m.ensureTimelineChat()
if err != nil {
return err
}
2021-03-25 15:15:22 +00:00
}
// upsert profile chat
if !deprecation.ChatTimelineDeprecated {
err = m.ensureMyOwnProfileChat()
if err != nil {
return err
}
2021-03-25 15:15:22 +00:00
}
2019-08-29 06:33:46 +00:00
// Get chat IDs and public keys from the contacts.
contacts, err := m.persistence.Contacts()
2019-08-29 06:33:46 +00:00
if err != nil {
return err
}
for idx, contact := range contacts {
if err = m.updateContactImagesURL(contact); err != nil {
return err
}
m.allContacts.Store(contact.ID, contacts[idx])
2019-08-29 06:33:46 +00:00
// We only need filters for contacts added by us and not blocked.
2023-01-20 14:28:30 +00:00
if !contact.added() || contact.Blocked {
2019-08-29 06:33:46 +00:00
continue
}
publicKey, err := contact.PublicKey()
if err != nil {
logger.Error("failed to get contact's public key", zap.Error(err))
continue
}
publicKeys = append(publicKeys, publicKey)
}
_, err = m.transport.InitFilters(filtersToInit, publicKeys)
if err != nil {
return err
}
// Init filters for the communities we control
var communityFiltersToInitialize []transport.CommunityFilterToInitialize
controlledCommunities, err := m.communitiesManager.Controlled()
if err != nil {
return err
}
for _, c := range controlledCommunities {
communityFiltersToInitialize = append(communityFiltersToInitialize, transport.CommunityFilterToInitialize{
Shard: c.Shard(),
PrivKey: c.PrivateKey(),
})
}
_, err = m.InitCommunityFilters(communityFiltersToInitialize)
if err != nil {
return err
}
return nil
2019-08-29 06:33:46 +00:00
}
2019-07-17 22:25:42 +00:00
// Shutdown takes care of ensuring a clean shutdown of Messenger
func (m *Messenger) Shutdown() (err error) {
if m == nil {
return nil
}
select {
case _, ok := <-m.quit:
if !ok {
return errors.New("messenger already shutdown")
}
default:
}
close(m.quit)
m.cancel()
m.shutdownWaitGroup.Wait()
for i, task := range m.shutdownTasks {
m.logger.Debug("running shutdown task", zap.Int("n", i))
2019-07-17 22:25:42 +00:00
if tErr := task(); tErr != nil {
m.logger.Info("shutdown task failed", zap.Error(tErr))
2019-07-17 22:25:42 +00:00
if err == nil {
// First error appeared.
err = tErr
} else {
// We return all errors. They will be concatenated in the order of occurrence,
// however, they will also be returned as a single error.
err = errors.Wrap(err, tErr.Error())
}
}
}
return
}
// NOT IMPLEMENTED
func (m *Messenger) SelectMailserver(id string) error {
return ErrNotImplemented
}
// NOT IMPLEMENTED
func (m *Messenger) AddMailserver(enode string) error {
return ErrNotImplemented
}
// NOT IMPLEMENTED
func (m *Messenger) RemoveMailserver(id string) error {
return ErrNotImplemented
}
// NOT IMPLEMENTED
func (m *Messenger) Mailservers() ([]string, error) {
return nil, ErrNotImplemented
}
func (m *Messenger) initChatsFirstMessageTimestamp(communityCache map[string]*communities.Community, chats []*Chat) {
communityChats, communityChatIDs := m.filterCommunityChats(chats)
if len(communityChatIDs) == 0 {
return
}
oldestMessageTimestamps, err := m.persistence.OldestMessageWhisperTimestampByChatIDs(communityChatIDs)
if err != nil {
m.logger.Warn("failed to get oldest message timestamps", zap.Error(err))
return
}
changedCommunities := m.processCommunityChats(communityChats, communityCache, oldestMessageTimestamps)
m.saveAndPublishCommunities(changedCommunities)
}
func (m *Messenger) filterCommunityChats(chats []*Chat) ([]*Chat, []string) {
var communityChats []*Chat
var communityChatIDs []string
for _, chat := range chats {
if chat.CommunityChat() && chat.FirstMessageTimestamp == FirstMessageTimestampUndefined {
communityChats = append(communityChats, chat)
communityChatIDs = append(communityChatIDs, chat.ID)
}
}
return communityChats, communityChatIDs
}
func (m *Messenger) processCommunityChats(communityChats []*Chat, communityCache map[string]*communities.Community, oldestMessageTimestamps map[string]uint64) []*communities.Community {
var changedCommunities []*communities.Community
for _, chat := range communityChats {
community := m.getCommunity(chat.CommunityID, communityCache)
if community == nil {
continue
}
oldestMessageTimestamp, ok := oldestMessageTimestamps[chat.ID]
timestamp := uint32(FirstMessageTimestampNoMessage)
if ok {
if oldestMessageTimestamp == FirstMessageTimestampUndefined {
continue
}
timestamp = whisperToUnixTimestamp(oldestMessageTimestamp)
}
changes, err := m.updateChatFirstMessageTimestampForCommunity(chat, timestamp, community)
if err != nil {
m.logger.Warn("failed to init first message timestamp", zap.Error(err), zap.String("chatID", chat.ID))
continue
}
if changes != nil {
changedCommunities = append(changedCommunities, community)
}
}
return changedCommunities
}
func (m *Messenger) getCommunity(communityID string, communityCache map[string]*communities.Community) *communities.Community {
community, ok := communityCache[communityID]
if ok {
return community
}
community, err := m.communitiesManager.GetByIDString(communityID)
if err != nil {
m.logger.Warn("failed to get community", zap.Error(err), zap.String("communityID", communityID))
return nil
}
communityCache[communityID] = community
return community
}
func (m *Messenger) saveAndPublishCommunities(communities []*communities.Community) {
for _, community := range communities {
err := m.communitiesManager.SaveAndPublish(community)
if err != nil {
m.logger.Warn("failed to save and publish community", zap.Error(err), zap.String("communityID", community.IDString()))
}
}
}
2021-06-25 08:30:18 +00:00
func (m *Messenger) addMessagesAndChat(chat *Chat, messages []*common.Message, response *MessengerResponse) (*MessengerResponse, error) {
response.AddChat(chat)
response.AddMessages(messages)
err := m.persistence.SaveMessages(response.Messages())
2020-01-17 12:39:09 +00:00
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return response, m.saveChat(chat)
2019-10-14 14:10:48 +00:00
}
2020-08-18 15:07:48 +00:00
func (m *Messenger) reregisterForPushNotifications() error {
m.logger.Info("contact state changed, re-registering for push notification")
if m.pushNotificationClient == nil {
return nil
}
return m.pushNotificationClient.Reregister(m.pushNotificationOptions())
2020-08-18 15:07:48 +00:00
}
2020-12-15 14:43:41 +00:00
// ReSendChatMessage pulls a message from the database and sends it again
func (m *Messenger) ReSendChatMessage(ctx context.Context, messageID string) error {
return m.reSendRawMessage(ctx, messageID)
}
func (m *Messenger) SetLocalPairing(localPairing bool) {
m.localPairing = localPairing
}
func (m *Messenger) hasPairedDevices() bool {
logger := m.logger.Named("hasPairedDevices")
if m.localPairing {
return true
}
var count int
2021-03-29 15:41:30 +00:00
m.allInstallations.Range(func(installationID string, installation *multidevice.Installation) (shouldContinue bool) {
if installation.Enabled {
count++
}
2021-03-29 15:41:30 +00:00
return true
})
logger.Debug("installations info",
zap.Int("Number of installations", m.allInstallations.Len()),
zap.Int("Number of enabled installations", count))
return count > 1
}
func (m *Messenger) HasPairedDevices() bool {
return m.hasPairedDevices()
}
// sendToPairedDevices will check if we have any paired devices and send to them if necessary
2020-07-28 13:22:22 +00:00
func (m *Messenger) sendToPairedDevices(ctx context.Context, spec common.RawMessage) error {
hasPairedDevices := m.hasPairedDevices()
// We send a message to any paired device
if hasPairedDevices {
_, err := m.sender.SendPrivate(ctx, &m.identity.PublicKey, &spec)
if err != nil {
return err
}
}
return nil
}
2019-07-17 22:25:42 +00:00
func (m *Messenger) dispatchPairInstallationMessage(ctx context.Context, spec common.RawMessage) (common.RawMessage, error) {
var err error
var id []byte
id, err = m.sender.SendPairInstallation(ctx, &m.identity.PublicKey, spec)
if err != nil {
return spec, err
}
spec.ID = types.EncodeHex(id)
spec.SendCount++
2020-07-28 13:22:22 +00:00
err = m.persistence.SaveRawMessage(&spec)
if err != nil {
return spec, err
}
return spec, nil
}
2022-05-27 09:14:40 +00:00
func (m *Messenger) dispatchMessage(ctx context.Context, rawMessage common.RawMessage) (common.RawMessage, error) {
var err error
var id []byte
2022-05-27 09:14:40 +00:00
logger := m.logger.With(zap.String("site", "dispatchMessage"), zap.String("chatID", rawMessage.LocalChatID))
chat, ok := m.allChats.Load(rawMessage.LocalChatID)
if !ok {
2022-05-27 09:14:40 +00:00
return rawMessage, errors.New("no chat found")
2019-07-17 22:25:42 +00:00
}
2019-10-14 14:10:48 +00:00
switch chat.ChatType {
case ChatTypeOneToOne:
publicKey, err := chat.PublicKey()
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
}
2021-02-23 15:47:45 +00:00
//SendPrivate will alter message identity and possibly datasyncid, so we save an unchanged
//message for sending to paired devices later
2022-05-27 09:14:40 +00:00
specCopyForPairedDevices := rawMessage
if !common.IsPubKeyEqual(publicKey, &m.identity.PublicKey) || rawMessage.SkipEncryptionLayer {
2022-05-27 09:14:40 +00:00
id, err = m.sender.SendPrivate(ctx, publicKey, &rawMessage)
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
}
2019-10-14 14:10:48 +00:00
}
2021-02-23 15:47:45 +00:00
err = m.sendToPairedDevices(ctx, specCopyForPairedDevices)
2019-10-14 14:10:48 +00:00
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
2019-10-14 14:10:48 +00:00
}
2020-10-20 15:10:28 +00:00
case ChatTypePublic, ChatTypeProfile:
logger.Debug("sending public message", zap.String("chatName", chat.Name))
2022-05-27 09:14:40 +00:00
id, err = m.sender.SendPublic(ctx, chat.ID, rawMessage)
2019-10-14 14:10:48 +00:00
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
2019-10-14 14:10:48 +00:00
}
case ChatTypeCommunityChat:
2023-11-15 15:58:15 +00:00
community, err := m.communitiesManager.GetByIDString(chat.CommunityID)
if err != nil {
return rawMessage, err
}
2023-11-15 15:58:15 +00:00
rawMessage.PubsubTopic = community.PubsubTopic()
canPost, err := m.communitiesManager.CanPost(&m.identity.PublicKey, chat.CommunityID, chat.CommunityChatID(), rawMessage.MessageType)
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
}
2020-12-22 16:20:12 +00:00
if !canPost {
m.logger.Error("can't post on chat",
zap.String("chatID", chat.ID),
zap.String("chatName", chat.Name),
zap.Any("messageType", rawMessage.MessageType),
)
return rawMessage, fmt.Errorf("can't post message type '%d' on chat '%s'", rawMessage.MessageType, chat.ID)
}
logger.Debug("sending community chat message", zap.String("chatName", chat.Name))
isCommunityEncrypted, err := m.communitiesManager.IsEncrypted(chat.CommunityID)
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
}
isChannelEncrypted, err := m.communitiesManager.IsChannelEncrypted(chat.CommunityID, chat.ID)
if err != nil {
return rawMessage, err
}
isEncrypted := isCommunityEncrypted || isChannelEncrypted
2022-05-27 09:14:40 +00:00
if !isEncrypted {
id, err = m.sender.SendPublic(ctx, chat.ID, rawMessage)
if err != nil {
return rawMessage, err
}
2022-05-27 09:14:40 +00:00
} else {
rawMessage.CommunityID, err = types.DecodeHex(chat.CommunityID)
if err != nil {
return rawMessage, err
}
2022-05-27 09:14:40 +00:00
if isChannelEncrypted {
rawMessage.HashRatchetGroupID = []byte(chat.ID)
} else {
rawMessage.HashRatchetGroupID = rawMessage.CommunityID
}
2024-03-22 10:55:09 +00:00
id, err = m.sender.SendCommunityMessage(ctx, &rawMessage)
if err != nil {
return rawMessage, err
2022-05-27 09:14:40 +00:00
}
}
case ChatTypePrivateGroupChat:
logger.Debug("sending group message", zap.String("chatName", chat.Name))
2022-05-27 09:14:40 +00:00
if rawMessage.Recipients == nil {
rawMessage.Recipients, err = chat.MembersAsPublicKeys()
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
2019-10-14 14:10:48 +00:00
}
}
hasPairedDevices := m.hasPairedDevices()
if !hasPairedDevices {
// Filter out my key from the recipients
n := 0
2022-05-27 09:14:40 +00:00
for _, recipient := range rawMessage.Recipients {
if !common.IsPubKeyEqual(recipient, &m.identity.PublicKey) {
2022-05-27 09:14:40 +00:00
rawMessage.Recipients[n] = recipient
n++
}
}
2022-05-27 09:14:40 +00:00
rawMessage.Recipients = rawMessage.Recipients[:n]
2019-10-14 14:10:48 +00:00
}
// We won't really send the message out if there's no recipients
2022-05-27 09:14:40 +00:00
if len(rawMessage.Recipients) == 0 {
rawMessage.Sent = true
}
// We skip wrapping in some cases (emoji reactions for example)
2022-05-27 09:14:40 +00:00
if !rawMessage.SkipGroupMessageWrap {
rawMessage.MessageType = protobuf.ApplicationMetadataMessage_MEMBERSHIP_UPDATE_MESSAGE
}
2022-05-27 09:14:40 +00:00
id, err = m.sender.SendGroup(ctx, rawMessage.Recipients, rawMessage)
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
2019-07-17 22:25:42 +00:00
}
2019-10-14 14:10:48 +00:00
default:
2022-05-27 09:14:40 +00:00
return rawMessage, errors.New("chat type not supported")
2019-10-14 14:10:48 +00:00
}
2022-05-27 09:14:40 +00:00
rawMessage.ID = types.EncodeHex(id)
rawMessage.SendCount++
rawMessage.LastSent = m.getTimesource().GetCurrentTime()
err = m.persistence.SaveRawMessage(&rawMessage)
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
}
if m.dispatchMessageTestCallback != nil {
m.dispatchMessageTestCallback(rawMessage)
}
2022-05-27 09:14:40 +00:00
return rawMessage, nil
2019-07-17 22:25:42 +00:00
}
// SendChatMessage takes a minimal message and sends it based on the corresponding chat
2020-09-01 13:27:01 +00:00
func (m *Messenger) SendChatMessage(ctx context.Context, message *common.Message) (*MessengerResponse, error) {
return m.sendChatMessage(ctx, message)
}
// SendChatMessages takes a array of messages and sends it based on the corresponding chats
func (m *Messenger) SendChatMessages(ctx context.Context, messages []*common.Message) (*MessengerResponse, error) {
var response MessengerResponse
generatedAlbumID, err := uuid.NewRandom()
if err != nil {
return nil, err
}
imagesCount := uint32(0)
for _, message := range messages {
if message.ContentType == protobuf.ChatMessage_IMAGE {
imagesCount++
}
}
for _, message := range messages {
if message.ContentType == protobuf.ChatMessage_IMAGE && len(messages) > 1 {
err = message.SetAlbumIDAndImagesCount(generatedAlbumID.String(), imagesCount)
if err != nil {
return nil, err
}
}
2021-03-29 15:41:30 +00:00
messageResponse, err := m.SendChatMessage(ctx, message)
if err != nil {
return nil, err
}
err = response.Merge(messageResponse)
if err != nil {
return nil, err
}
}
return &response, nil
}
// sendChatMessage takes a minimal message and sends it based on the corresponding chat
func (m *Messenger) sendChatMessage(ctx context.Context, message *common.Message) (*MessengerResponse, error) {
displayName, err := m.settings.DisplayName()
if err != nil {
return nil, err
}
message.DisplayName = displayName
replacedText, err := m.mentionsManager.ReplaceWithPublicKey(message.ChatId, message.Text)
if err == nil {
message.Text = replacedText
} else {
m.logger.Error("failed to replace text with public key", zap.String("chatID", message.ChatId), zap.String("text", message.Text))
}
if len(message.ImagePath) != 0 {
2022-05-27 11:21:02 +00:00
2023-02-02 17:59:48 +00:00
err := message.LoadImage()
if err != nil {
return nil, err
2022-05-27 11:21:02 +00:00
}
} else if len(message.CommunityID) != 0 {
community, err := m.communitiesManager.GetByIDString(message.CommunityID)
if err != nil {
return nil, err
}
wrappedCommunity, err := community.ToProtocolMessageBytes()
if err != nil {
return nil, err
}
message.Payload = &protobuf.ChatMessage_Community{Community: wrappedCommunity}
message.Shard = community.Shard().Protobuffer()
message.ContentType = protobuf.ChatMessage_COMMUNITY
} else if len(message.AudioPath) != 0 {
2023-02-02 17:59:48 +00:00
err := message.LoadAudio()
if err != nil {
return nil, err
}
}
URL unfurling (initial implementation) (#3471) This is the initial implementation for the new URL unfurling requirements. The most important one is that only the message sender will pay the privacy cost for unfurling and extracting metadata from websites. Once the message is sent, the unfurled data will be stored at the protocol level and receivers will just profit and happily decode the metadata to render it. Further development of this URL unfurling capability will be mostly guided by issues created on clients. For the moment in status-mobile: https://github.com/status-im/status-mobile/labels/url-preview - https://github.com/status-im/status-mobile/issues/15918 - https://github.com/status-im/status-mobile/issues/15917 - https://github.com/status-im/status-mobile/issues/15910 - https://github.com/status-im/status-mobile/issues/15909 - https://github.com/status-im/status-mobile/issues/15908 - https://github.com/status-im/status-mobile/issues/15906 - https://github.com/status-im/status-mobile/issues/15905 ### Terminology In the code, I've tried to stick to the word "unfurl URL" to really mean the process of extracting metadata from a website, sort of lower level. I use "link preview" to mean a higher level structure which is enriched by unfurled data. "link preview" is also how designers refer to it. ### User flows 1. Carol needs to see link previews while typing in the chat input field. Notice from the diagram nothing is persisted and that status-go endpoints are essentially stateless. ``` #+begin_src plantuml :results verbatim Client->>Server: Call wakuext_getTextURLs Server-->>Client: Normalized URLs Client->>Client: Render cached unfurled URLs Client->>Server: Unfurl non-cached URLs.\nCall wakuext_unfurlURLs Server->>Website: Fetch metadata Website-->>Server: Metadata (thumbnail URL, title, etc) Server->>Website: Fetch thumbnail Server->>Website: Fetch favicon Website-->>Server: Favicon bytes Website-->>Server: Thumbnail bytes Server->>Server: Decode & process images Server-->>Client: Unfurled data (thumbnail data URI, etc) #+end_src ``` ``` ,------. ,------. ,-------. |Client| |Server| |Website| `--+---' `--+---' `---+---' | Call wakuext_getTextURLs | | | ---------------------------------------> | | | | | Normalized URLs | | | <- - - - - - - - - - - - - - - - - - - - | | | | |----. | | | | Render cached unfurled URLs | | |<---' | | | | | | Unfurl non-cached URLs. | | | Call wakuext_unfurlURLs | | | ---------------------------------------> | | | | | | Fetch metadata | | | ------------------------------------> | | | | | Metadata (thumbnail URL, title, etc)| | | <- - - - - - - - - - - - - - - - - - | | | | | Fetch thumbnail | | | ------------------------------------> | | | | | Fetch favicon | | | ------------------------------------> | | | | | Favicon bytes | | | <- - - - - - - - - - - - - - - - - - | | | | | Thumbnail bytes | | | <- - - - - - - - - - - - - - - - - - | | | | |----. | | | | Decode & process images | | |<---' | | | | | Unfurled data (thumbnail data URI, etc)| | | <- - - - - - - - - - - - - - - - - - - - | ,--+---. ,--+---. ,---+---. |Client| |Server| |Website| `------' `------' `-------' ``` 2. Carol sends the text message with link previews in the RPC request wakuext_sendChatMessages. status-go assumes the link previews are good because it can't and shouldn't attempt to re-unfurl them. ``` #+begin_src plantuml :results verbatim Client->>Server: Call wakuext_sendChatMessages Server->>Server: Transform link previews to\nbe proto-marshalled Server->DB: Write link previews serialized as JSON Server-->>Client: Updated message response #+end_src ``` ``` ,------. ,------. ,--. |Client| |Server| |DB| `--+---' `--+---' `+-' | Call wakuext_sendChatMessages| | | -----------------------------> | | | | | |----. | | | | Transform link previews to | | |<---' be proto-marshalled | | | | | | | | | Write link previews serialized as JSON| | | --------------------------------------> | | | | Updated message response | | | <- - - - - - - - - - - - - - - | ,--+---. ,--+---. ,+-. |Client| |Server| |DB| `------' `------' `--' ``` 3. The message was sent over waku and persisted locally in Carol's device. She should now see the link previews in the chat history. There can be many link previews shared by other chat members, therefore it is important to serve the assets via the media server to avoid overloading the ReactNative bridge with lots of big JSON payloads containing base64 encoded data URIs (maybe this concern is meaningless for desktop). When a client is rendering messages with link previews, they will have the field linkPreviews, and the thumbnail URL will point to the local media server. ``` #+begin_src plantuml :results verbatim Client->>Server: GET /link-preview/thumbnail (media server) Server->>DB: Read from user_messages.unfurled_links Server->Server: Unmarshal JSON Server-->>Client: HTTP Content-Type: image/jpeg/etc #+end_src ``` ``` ,------. ,------. ,--. |Client| |Server| |DB| `--+---' `--+---' `+-' | GET /link-preview/thumbnail (media server)| | | ------------------------------------------> | | | | | | Read from user_messages.unfurled_links| | | --------------------------------------> | | | | |----. | | | | Unmarshal JSON | | |<---' | | | | | HTTP Content-Type: image/jpeg/etc | | | <- - - - - - - - - - - - - - - - - - - - - | ,--+---. ,--+---. ,+-. |Client| |Server| |DB| `------' `------' `--' ``` ### Some limitations of the current implementation The following points will become separate issues in status-go that I'll work on over the next couple weeks. In no order of importance: - Improve how multiple links are fetched; retries on failure and testing how unfurling behaves around the timeout limits (deterministically, not by making real HTTP calls as I did). https://github.com/status-im/status-go/issues/3498 - Unfurl favicons and store them in the protobuf too. - For this PR, I added unfurling support only for websites with OpenGraph https://ogp.me/ meta tags. Other unfurlers will be implemented on demand. The next one will probably be for oEmbed https://oembed.com/, the protocol supported by YouTube, for example. - Resize and/or compress thumbnails (and favicons). Often times, thumbnails are huge for the purposes of link previews. There is already support for compressing JPEGs in status-go, but I prefer to work with compression in a separate PR because I'd like to also solve the problem for PNGs (probably convert them to JPEGs, plus compress them). This would be a safe choice for thumbnails, favicons not so much because transparency is desirable. - Editing messages is not yet supported. - I haven't coded any artificial limit on the number of previews or on the size of the thumbnail payload. This will be done in a separate issue. I have heard the ideal solution may be to split messages into smaller chunks of ~125 KiB because of libp2p, but that might be too complicated at this stage of the product (?). - Link preview deletion. - For the moment, OpenGraph metadata is extracted by requesting data for the English language (and fallback to whatever is available). In the future, we'll want to unfurl by respecting the user's local device language. Some websites, like GoDaddy, are already localized based on the device's IP, but many aren't. - The website's description text should be limited by a certain number of characters, especially because it's outside our control. Exactly how much has not been decided yet, so it'll be done separately. - URL normalization can be tricky, so I implemented only the basics to help with caching. For example, the url https://status.im and HTTPS://status.im are considered identical. Also, a URL is considered valid for unfurling if its TLD exists according to publicsuffix.EffectiveTLDPlusOne. This was essential, otherwise the default Go url.Parse approach would consider many invalid URLs valid, and thus the server would waste resources trying to unfurl the unfurleable. ### Other requirements - If the message is edited, the link previews should reflect the edited text, not the original one. This has been aligned with the design team as well. - If the website's thumbnail or the favicon can't be fetched, just ignore them. The only mandatory piece of metadata is the website's title and URL. - Link previews in clients should be generated in near real-time, that is, as the user types, previews are updated. In mobile this performs very well, and it's what other clients like WhatsApp, Telegram, and Facebook do. ### Decisions - While the user typing in the input field, the client is constantly (debounced) asking status-go to parse the text and extract normalized URLs and then the client checks if they're already in its in-memory cache. If they are, no RPC call is made. I chose this approach to achieve the best possible performance in mobile and avoid the whole RPC overhead, since the chat experience is already not smooth enough. The mobile client uses URLs as cache keys in a hashmap, i.e. if the key is present, it means the preview is readily available (naive, but good enough for now). This decision also gave me more flexibility to find the best UX at this stage of the feature. - Due to the requirement that users should be able to see independent loading indicators for each link preview, when status-go can't unfurl a URL, it doesn't return it in the response. - As an initial implementation, I added the BLOB column unfurled_links to the user_messages table. The preview data is then serialized as JSON before being stored in this column. I felt that creating a separate table and the related code for this initial PR would be inconvenient. Is that reasonable to you? Once things stabilize I can create a proper table if we want to avoid this kind of solution with serialized columns.
2023-05-18 18:43:06 +00:00
// We consider link previews non-critical data, so we do not want to block
// messages from being sent.
unfurledLinks, err := message.ConvertLinkPreviewsToProto()
URL unfurling (initial implementation) (#3471) This is the initial implementation for the new URL unfurling requirements. The most important one is that only the message sender will pay the privacy cost for unfurling and extracting metadata from websites. Once the message is sent, the unfurled data will be stored at the protocol level and receivers will just profit and happily decode the metadata to render it. Further development of this URL unfurling capability will be mostly guided by issues created on clients. For the moment in status-mobile: https://github.com/status-im/status-mobile/labels/url-preview - https://github.com/status-im/status-mobile/issues/15918 - https://github.com/status-im/status-mobile/issues/15917 - https://github.com/status-im/status-mobile/issues/15910 - https://github.com/status-im/status-mobile/issues/15909 - https://github.com/status-im/status-mobile/issues/15908 - https://github.com/status-im/status-mobile/issues/15906 - https://github.com/status-im/status-mobile/issues/15905 ### Terminology In the code, I've tried to stick to the word "unfurl URL" to really mean the process of extracting metadata from a website, sort of lower level. I use "link preview" to mean a higher level structure which is enriched by unfurled data. "link preview" is also how designers refer to it. ### User flows 1. Carol needs to see link previews while typing in the chat input field. Notice from the diagram nothing is persisted and that status-go endpoints are essentially stateless. ``` #+begin_src plantuml :results verbatim Client->>Server: Call wakuext_getTextURLs Server-->>Client: Normalized URLs Client->>Client: Render cached unfurled URLs Client->>Server: Unfurl non-cached URLs.\nCall wakuext_unfurlURLs Server->>Website: Fetch metadata Website-->>Server: Metadata (thumbnail URL, title, etc) Server->>Website: Fetch thumbnail Server->>Website: Fetch favicon Website-->>Server: Favicon bytes Website-->>Server: Thumbnail bytes Server->>Server: Decode & process images Server-->>Client: Unfurled data (thumbnail data URI, etc) #+end_src ``` ``` ,------. ,------. ,-------. |Client| |Server| |Website| `--+---' `--+---' `---+---' | Call wakuext_getTextURLs | | | ---------------------------------------> | | | | | Normalized URLs | | | <- - - - - - - - - - - - - - - - - - - - | | | | |----. | | | | Render cached unfurled URLs | | |<---' | | | | | | Unfurl non-cached URLs. | | | Call wakuext_unfurlURLs | | | ---------------------------------------> | | | | | | Fetch metadata | | | ------------------------------------> | | | | | Metadata (thumbnail URL, title, etc)| | | <- - - - - - - - - - - - - - - - - - | | | | | Fetch thumbnail | | | ------------------------------------> | | | | | Fetch favicon | | | ------------------------------------> | | | | | Favicon bytes | | | <- - - - - - - - - - - - - - - - - - | | | | | Thumbnail bytes | | | <- - - - - - - - - - - - - - - - - - | | | | |----. | | | | Decode & process images | | |<---' | | | | | Unfurled data (thumbnail data URI, etc)| | | <- - - - - - - - - - - - - - - - - - - - | ,--+---. ,--+---. ,---+---. |Client| |Server| |Website| `------' `------' `-------' ``` 2. Carol sends the text message with link previews in the RPC request wakuext_sendChatMessages. status-go assumes the link previews are good because it can't and shouldn't attempt to re-unfurl them. ``` #+begin_src plantuml :results verbatim Client->>Server: Call wakuext_sendChatMessages Server->>Server: Transform link previews to\nbe proto-marshalled Server->DB: Write link previews serialized as JSON Server-->>Client: Updated message response #+end_src ``` ``` ,------. ,------. ,--. |Client| |Server| |DB| `--+---' `--+---' `+-' | Call wakuext_sendChatMessages| | | -----------------------------> | | | | | |----. | | | | Transform link previews to | | |<---' be proto-marshalled | | | | | | | | | Write link previews serialized as JSON| | | --------------------------------------> | | | | Updated message response | | | <- - - - - - - - - - - - - - - | ,--+---. ,--+---. ,+-. |Client| |Server| |DB| `------' `------' `--' ``` 3. The message was sent over waku and persisted locally in Carol's device. She should now see the link previews in the chat history. There can be many link previews shared by other chat members, therefore it is important to serve the assets via the media server to avoid overloading the ReactNative bridge with lots of big JSON payloads containing base64 encoded data URIs (maybe this concern is meaningless for desktop). When a client is rendering messages with link previews, they will have the field linkPreviews, and the thumbnail URL will point to the local media server. ``` #+begin_src plantuml :results verbatim Client->>Server: GET /link-preview/thumbnail (media server) Server->>DB: Read from user_messages.unfurled_links Server->Server: Unmarshal JSON Server-->>Client: HTTP Content-Type: image/jpeg/etc #+end_src ``` ``` ,------. ,------. ,--. |Client| |Server| |DB| `--+---' `--+---' `+-' | GET /link-preview/thumbnail (media server)| | | ------------------------------------------> | | | | | | Read from user_messages.unfurled_links| | | --------------------------------------> | | | | |----. | | | | Unmarshal JSON | | |<---' | | | | | HTTP Content-Type: image/jpeg/etc | | | <- - - - - - - - - - - - - - - - - - - - - | ,--+---. ,--+---. ,+-. |Client| |Server| |DB| `------' `------' `--' ``` ### Some limitations of the current implementation The following points will become separate issues in status-go that I'll work on over the next couple weeks. In no order of importance: - Improve how multiple links are fetched; retries on failure and testing how unfurling behaves around the timeout limits (deterministically, not by making real HTTP calls as I did). https://github.com/status-im/status-go/issues/3498 - Unfurl favicons and store them in the protobuf too. - For this PR, I added unfurling support only for websites with OpenGraph https://ogp.me/ meta tags. Other unfurlers will be implemented on demand. The next one will probably be for oEmbed https://oembed.com/, the protocol supported by YouTube, for example. - Resize and/or compress thumbnails (and favicons). Often times, thumbnails are huge for the purposes of link previews. There is already support for compressing JPEGs in status-go, but I prefer to work with compression in a separate PR because I'd like to also solve the problem for PNGs (probably convert them to JPEGs, plus compress them). This would be a safe choice for thumbnails, favicons not so much because transparency is desirable. - Editing messages is not yet supported. - I haven't coded any artificial limit on the number of previews or on the size of the thumbnail payload. This will be done in a separate issue. I have heard the ideal solution may be to split messages into smaller chunks of ~125 KiB because of libp2p, but that might be too complicated at this stage of the product (?). - Link preview deletion. - For the moment, OpenGraph metadata is extracted by requesting data for the English language (and fallback to whatever is available). In the future, we'll want to unfurl by respecting the user's local device language. Some websites, like GoDaddy, are already localized based on the device's IP, but many aren't. - The website's description text should be limited by a certain number of characters, especially because it's outside our control. Exactly how much has not been decided yet, so it'll be done separately. - URL normalization can be tricky, so I implemented only the basics to help with caching. For example, the url https://status.im and HTTPS://status.im are considered identical. Also, a URL is considered valid for unfurling if its TLD exists according to publicsuffix.EffectiveTLDPlusOne. This was essential, otherwise the default Go url.Parse approach would consider many invalid URLs valid, and thus the server would waste resources trying to unfurl the unfurleable. ### Other requirements - If the message is edited, the link previews should reflect the edited text, not the original one. This has been aligned with the design team as well. - If the website's thumbnail or the favicon can't be fetched, just ignore them. The only mandatory piece of metadata is the website's title and URL. - Link previews in clients should be generated in near real-time, that is, as the user types, previews are updated. In mobile this performs very well, and it's what other clients like WhatsApp, Telegram, and Facebook do. ### Decisions - While the user typing in the input field, the client is constantly (debounced) asking status-go to parse the text and extract normalized URLs and then the client checks if they're already in its in-memory cache. If they are, no RPC call is made. I chose this approach to achieve the best possible performance in mobile and avoid the whole RPC overhead, since the chat experience is already not smooth enough. The mobile client uses URLs as cache keys in a hashmap, i.e. if the key is present, it means the preview is readily available (naive, but good enough for now). This decision also gave me more flexibility to find the best UX at this stage of the feature. - Due to the requirement that users should be able to see independent loading indicators for each link preview, when status-go can't unfurl a URL, it doesn't return it in the response. - As an initial implementation, I added the BLOB column unfurled_links to the user_messages table. The preview data is then serialized as JSON before being stored in this column. I felt that creating a separate table and the related code for this initial PR would be inconvenient. Is that reasonable to you? Once things stabilize I can create a proper table if we want to avoid this kind of solution with serialized columns.
2023-05-18 18:43:06 +00:00
if err != nil {
m.logger.Error("failed to convert link previews", zap.Error(err))
} else {
message.UnfurledLinks = unfurledLinks
}
2023-10-13 12:25:34 +00:00
unfurledStatusLinks, err := message.ConvertStatusLinkPreviewsToProto()
if err != nil {
m.logger.Error("failed to convert status link previews", zap.Error(err))
} else {
message.UnfurledStatusLinks = unfurledStatusLinks
}
var response MessengerResponse
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(message.ChatId)
if !ok {
return nil, ErrChatNotFoundError
2019-07-17 22:25:42 +00:00
}
2022-02-17 15:13:10 +00:00
err = m.handleStandaloneChatIdentity(chat)
2020-11-09 15:16:36 +00:00
if err != nil {
return nil, err
}
err = extendMessageFromChat(message, chat, &m.identity.PublicKey, m.getTimesource())
if err != nil {
return nil, err
}
err = m.addContactRequestPropagatedState(message)
if err != nil {
return nil, err
}
encodedMessage, err := m.encodeChatEntity(chat, message)
if err != nil {
return nil, err
}
rawMessage := common.RawMessage{
2020-07-22 07:41:40 +00:00
LocalChatID: chat.ID,
SendPushNotification: m.featureFlags.PushNotifications,
2020-07-22 07:41:40 +00:00
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_CHAT_MESSAGE,
ResendType: chat.DefaultResendType(),
}
2022-05-27 09:14:40 +00:00
2023-06-20 16:12:59 +00:00
// We want to save the raw message before dispatching it, to avoid race conditions
// since it might get dispatched and confirmed before it's saved.
// This is not the best solution, probably it would be better to split
// the sent status in a different table and join on query for messages,
// but that's a much larger change and it would require an expensive migration of clients
rawMessage.BeforeDispatch = func(rawMessage *common.RawMessage) error {
2023-06-20 16:12:59 +00:00
if rawMessage.Sent {
message.OutgoingStatus = common.OutgoingStatusSent
}
message.ID = rawMessage.ID
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return err
}
2019-07-17 22:25:42 +00:00
2023-06-20 16:12:59 +00:00
err = chat.UpdateFromMessage(message, m.getTimesource())
if err != nil {
return err
}
2019-07-17 22:25:42 +00:00
err := m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return err
}
var syncMessageType peersyncing.SyncMessageType
if chat.OneToOne() {
syncMessageType = peersyncing.SyncMessageOneToOneType
} else if chat.CommunityChat() {
syncMessageType = peersyncing.SyncMessageCommunityType
} else if chat.PrivateGroupChat() {
syncMessageType = peersyncing.SyncMessagePrivateGroup
}
wrappedMessage, err := v1protocol.WrapMessageV1(rawMessage.Payload, rawMessage.MessageType, rawMessage.Sender)
if err != nil {
return errors.Wrap(err, "failed to wrap message")
}
syncMessage := peersyncing.SyncMessage{
Type: syncMessageType,
ID: types.Hex2Bytes(rawMessage.ID),
ChatID: []byte(chat.ID),
Payload: wrappedMessage,
Timestamp: m.transport.GetCurrentTime() / 1000,
}
// If the chat type is not supported, skip saving it
if syncMessageType == 0 {
return nil
}
// ensure that the message is saved only once
rawMessage.BeforeDispatch = nil
return m.peersyncing.Add(syncMessage)
}
2019-07-17 22:25:42 +00:00
2023-06-20 16:12:59 +00:00
rawMessage, err = m.dispatchMessage(ctx, rawMessage)
if err != nil {
return nil, err
}
2021-06-03 13:11:55 +00:00
msg, err := m.pullMessagesAndResponsesFromDB([]*common.Message{message})
if err != nil {
return nil, err
}
if err := m.updateChatFirstMessageTimestamp(chat, whisperToUnixTimestamp(message.WhisperTimestamp), &response); err != nil {
return nil, err
}
response.SetMessages(msg)
2021-01-11 10:32:51 +00:00
response.AddChat(chat)
m.logger.Debug("inside sendChatMessage",
zap.String("id", message.ID),
zap.String("from", message.From),
zap.String("displayName", message.DisplayName),
zap.String("ChatId", message.ChatId),
zap.String("Clock", strconv.FormatUint(message.Clock, 10)),
zap.String("Timestamp", strconv.FormatUint(message.Timestamp, 10)),
)
err = m.prepareMessages(response.messages)
if err != nil {
return nil, err
}
return &response, m.saveChat(chat)
2019-07-17 22:25:42 +00:00
}
func whisperToUnixTimestamp(whisperTimestamp uint64) uint32 {
return uint32(whisperTimestamp / 1000)
}
func (m *Messenger) updateChatFirstMessageTimestamp(chat *Chat, timestamp uint32, response *MessengerResponse) error {
// Currently supported only for communities
if !chat.CommunityChat() {
return nil
}
community, err := m.communitiesManager.GetByIDString(chat.CommunityID)
if err != nil {
return err
}
if community.IsControlNode() && chat.UpdateFirstMessageTimestamp(timestamp) {
community, changes, err := m.communitiesManager.EditChatFirstMessageTimestamp(community.ID(), chat.ID, chat.FirstMessageTimestamp)
if err != nil {
return err
}
response.AddCommunity(community)
response.CommunityChanges = append(response.CommunityChanges, changes)
}
return nil
}
func (m *Messenger) updateChatFirstMessageTimestampForCommunity(chat *Chat, timestamp uint32, community *communities.Community) (*communities.CommunityChanges, error) {
if community.IsControlNode() && chat.UpdateFirstMessageTimestamp(timestamp) {
return m.communitiesManager.UpdateChatFirstMessageTimestamp(community, chat.ID, chat.FirstMessageTimestamp)
}
return nil, nil
}
func (m *Messenger) ShareImageMessage(request *requests.ShareImageMessage) (*MessengerResponse, error) {
if err := request.Validate(); err != nil {
return nil, err
}
response := &MessengerResponse{}
msg, err := m.persistence.MessageByID(request.MessageID)
if err != nil {
return nil, err
}
var messages []*common.Message
for _, pk := range request.Users {
message := common.NewMessage()
message.ChatId = pk.String()
message.Payload = msg.Payload
message.Text = "This message has been shared with you"
message.ContentType = protobuf.ChatMessage_IMAGE
messages = append(messages, message)
r, err := m.CreateOneToOneChat(&requests.CreateOneToOneChat{ID: pk})
if err != nil {
return nil, err
}
if err := response.Merge(r); err != nil {
return nil, err
}
}
sendMessagesResponse, err := m.SendChatMessages(context.Background(), messages)
if err != nil {
return nil, err
}
if err := response.Merge(sendMessagesResponse); err != nil {
return nil, err
}
return response, nil
}
feat: fallback pairing seed (#5614) * feat(pairing)!: Add extra parameters and remove v2 compatibility This commit includes the following changes: I have added a flag to maintain 2.29 compatibility. Breaking change in connection string The local pairing code that was parsing the connection string had a few non-upgradable features: It was strictly checking the number of parameters, throwing an error if the number was different. This made it impossible to add parameters to it without breaking. It was strictly checking the version number. This made increasing the version number impossible as older client would just refuse to connect. The code has been changed so that: Two parameters have been added, installation-id and key-uid. Those are needed for the fallback flow. I have also removed version from the payload, since it wasn't used. This means that we don't support v1 anymore. V2 parsing is supported . Going forward there's a clear strategy on how to update the protocol (append parameters, don't change existing one). https://www.youtube.com/watch?v=oyLBGkS5ICk Is a must watch video for understanding the strategy Changed MessengerResponse to use internally a map of installations rather than an array (minor) Just moving towards maps as arrays tend to lead to subtle bugs. Moved pairing methods to messenger_pairing.go Just moved some methods Added 2 new methods for the fallback flow FinishPairingThroughSeedPhraseProcess https://github.com/status-im/status-go/pull/5567/files#diff-1ad620b07fa3bd5fbc96c9f459d88829938a162bf1aaf41c61dea6e38b488d54R29 EnableAndSyncInstallation https://github.com/status-im/status-go/pull/5567/files#diff-1ad620b07fa3bd5fbc96c9f459d88829938a162bf1aaf41c61dea6e38b488d54R18 Flow for clients Client A1 is logged in Client A2 is logged out Client A1 shows a QR code Client A2 scans a QR code If connection fails on A2, the user will be prompted to enter a seed phrase. If the generated account matches the key-uid from the QR code, A2 should call FinishPairingThroughSeedPhraseProcess with the installation id passed in the QR code. This will send installation information over waku. The user should be shown its own installation id and prompted to check the other device. Client A1 will receive new installation data through waku, if they are still on the qr code page, they should show a popup to the user showing the received installation id, and a way to Enable and Sync, which should call the EnableAndSyncInstallation endpoint. This should finish the fallback syncing flow. Current issues Currently I haven't tested that all the data is synced after finishing the flow. I see that the two devices are paired correctly, but for example the DisplayName is not changed on the receiving device. I haven't had time to look into it further. * test_: add more test for connection string parser * fix_: fix panic when parse old connection string * test_: add comments for TestMessengerPairAfterSeedPhrase * fix_: correct error description * feat_:rename FinishPairingThroughSeedPhraseProcess to EnableInstallationAndPair * fix_: delete leftover * fix_: add UniqueKey method * fix_: unify the response for InputConnectionStringForBootstrapping * fix_: remove fields installationID and keyUID in GethStatusBackend * fix_: rename messenger_pairing to messenger_pairing_and_syncing --------- Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2024-07-30 09:14:05 +00:00
func (m *Messenger) InstallationID() string {
return m.installationID
2022-03-24 09:35:56 +00:00
}
feat: fallback pairing seed (#5614) * feat(pairing)!: Add extra parameters and remove v2 compatibility This commit includes the following changes: I have added a flag to maintain 2.29 compatibility. Breaking change in connection string The local pairing code that was parsing the connection string had a few non-upgradable features: It was strictly checking the number of parameters, throwing an error if the number was different. This made it impossible to add parameters to it without breaking. It was strictly checking the version number. This made increasing the version number impossible as older client would just refuse to connect. The code has been changed so that: Two parameters have been added, installation-id and key-uid. Those are needed for the fallback flow. I have also removed version from the payload, since it wasn't used. This means that we don't support v1 anymore. V2 parsing is supported . Going forward there's a clear strategy on how to update the protocol (append parameters, don't change existing one). https://www.youtube.com/watch?v=oyLBGkS5ICk Is a must watch video for understanding the strategy Changed MessengerResponse to use internally a map of installations rather than an array (minor) Just moving towards maps as arrays tend to lead to subtle bugs. Moved pairing methods to messenger_pairing.go Just moved some methods Added 2 new methods for the fallback flow FinishPairingThroughSeedPhraseProcess https://github.com/status-im/status-go/pull/5567/files#diff-1ad620b07fa3bd5fbc96c9f459d88829938a162bf1aaf41c61dea6e38b488d54R29 EnableAndSyncInstallation https://github.com/status-im/status-go/pull/5567/files#diff-1ad620b07fa3bd5fbc96c9f459d88829938a162bf1aaf41c61dea6e38b488d54R18 Flow for clients Client A1 is logged in Client A2 is logged out Client A1 shows a QR code Client A2 scans a QR code If connection fails on A2, the user will be prompted to enter a seed phrase. If the generated account matches the key-uid from the QR code, A2 should call FinishPairingThroughSeedPhraseProcess with the installation id passed in the QR code. This will send installation information over waku. The user should be shown its own installation id and prompted to check the other device. Client A1 will receive new installation data through waku, if they are still on the qr code page, they should show a popup to the user showing the received installation id, and a way to Enable and Sync, which should call the EnableAndSyncInstallation endpoint. This should finish the fallback syncing flow. Current issues Currently I haven't tested that all the data is synced after finishing the flow. I see that the two devices are paired correctly, but for example the DisplayName is not changed on the receiving device. I haven't had time to look into it further. * test_: add more test for connection string parser * fix_: fix panic when parse old connection string * test_: add comments for TestMessengerPairAfterSeedPhrase * fix_: correct error description * feat_:rename FinishPairingThroughSeedPhraseProcess to EnableInstallationAndPair * fix_: delete leftover * fix_: add UniqueKey method * fix_: unify the response for InputConnectionStringForBootstrapping * fix_: remove fields installationID and keyUID in GethStatusBackend * fix_: rename messenger_pairing to messenger_pairing_and_syncing --------- Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2024-07-30 09:14:05 +00:00
func (m *Messenger) KeyUID() string {
return m.account.KeyUID
}
// syncChat sync a chat with paired devices
func (m *Messenger) syncChat(ctx context.Context, chatToSync *Chat, rawMessageHandler RawMessageHandler) error {
var err error
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncChat{
Clock: clock,
Id: chatToSync.ID,
Name: chatToSync.Name,
ChatType: uint32(chatToSync.ChatType),
Active: chatToSync.Active,
}
chatMuteTill, _ := time.Parse(time.RFC3339, chatToSync.MuteTill.Format(time.RFC3339))
if chatToSync.Muted && chatMuteTill.Equal(time.Time{}) {
// Only set Muted if it is "permanently" muted
syncMessage.Muted = true
}
if chatToSync.OneToOne() {
syncMessage.Name = "" // The Name is useless in 1-1 chats
}
if chatToSync.PrivateGroupChat() {
syncMessage.MembershipUpdateEvents = make([]*protobuf.MembershipUpdateEvents, len(chatToSync.MembershipUpdates))
for i, membershipUpdate := range chatToSync.MembershipUpdates {
syncMessage.MembershipUpdateEvents[i] = &protobuf.MembershipUpdateEvents{
Clock: membershipUpdate.ClockValue,
Type: uint32(membershipUpdate.Type),
Members: membershipUpdate.Members,
Name: membershipUpdate.Name,
Signature: membershipUpdate.Signature,
ChatId: membershipUpdate.ChatID,
From: membershipUpdate.From,
RawPayload: membershipUpdate.RawPayload,
Color: membershipUpdate.Color,
Image: membershipUpdate.Image,
}
}
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_CHAT,
ResendType: common.ResendTypeDataSync,
}
_, err = rawMessageHandler(ctx, rawMessage)
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) syncClearHistory(ctx context.Context, publicChat *Chat, rawMessageHandler RawMessageHandler) error {
2022-02-10 10:00:59 +00:00
var err error
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncClearHistory{
ChatId: publicChat.ID,
ClearedAt: publicChat.DeletedAtClockValue,
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_CLEAR_HISTORY,
ResendType: common.ResendTypeDataSync,
}
_, err = rawMessageHandler(ctx, rawMessage)
2022-02-10 10:00:59 +00:00
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) syncChatRemoving(ctx context.Context, id string, rawMessageHandler RawMessageHandler) error {
2021-10-05 17:26:02 +00:00
var err error
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncChatRemoved{
Clock: clock,
Id: id,
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_CHAT_REMOVED,
ResendType: common.ResendTypeDataSync,
}
_, err = rawMessageHandler(ctx, rawMessage)
2021-10-05 17:26:02 +00:00
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
// syncContact sync as contact with paired devices
func (m *Messenger) syncContact(ctx context.Context, contact *Contact, rawMessageHandler RawMessageHandler) error {
var err error
if contact.IsSyncing {
return nil
}
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
2023-01-20 14:28:30 +00:00
syncMessage := m.buildSyncContactMessage(contact)
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_INSTALLATION_CONTACT_V2,
ResendType: common.ResendTypeDataSync,
}
_, err = rawMessageHandler(ctx, rawMessage)
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) propagateSyncInstallationCommunityWithHRKeys(msg *protobuf.SyncInstallationCommunity, c *communities.Community) error {
communityKeys, err := m.encryptor.GetAllHRKeysMarshaledV1(c.ID())
if err != nil {
return err
}
msg.EncryptionKeysV1 = communityKeys
communityAndChannelKeys := [][]byte{}
communityKeys, err = m.encryptor.GetAllHRKeysMarshaledV2(c.ID())
if err != nil {
return err
}
if len(communityKeys) > 0 {
communityAndChannelKeys = append(communityAndChannelKeys, communityKeys)
}
for channelID := range c.Chats() {
channelKeys, err := m.encryptor.GetAllHRKeysMarshaledV2([]byte(c.IDString() + channelID))
if err != nil {
return err
}
if len(channelKeys) > 0 {
communityAndChannelKeys = append(communityAndChannelKeys, channelKeys)
}
}
msg.EncryptionKeysV2 = communityAndChannelKeys
return nil
}
func (m *Messenger) buildSyncInstallationCommunity(community *communities.Community, clock uint64) (*protobuf.SyncInstallationCommunity, error) {
communitySettings, err := m.communitiesManager.GetCommunitySettingsByID(community.ID())
if err != nil {
return nil, err
}
syncControlNode, err := m.communitiesManager.GetSyncControlNode(community.ID())
if err != nil {
return nil, err
}
syncMessage, err := community.ToSyncInstallationCommunityProtobuf(clock, communitySettings, syncControlNode)
if err != nil {
return nil, err
}
err = m.propagateSyncInstallationCommunityWithHRKeys(syncMessage, community)
if err != nil {
return nil, err
}
return syncMessage, nil
}
func (m *Messenger) syncCommunity(ctx context.Context, community *communities.Community, rawMessageHandler RawMessageHandler) error {
logger := m.logger.Named("syncCommunity")
if !m.hasPairedDevices() {
logger.Debug("device has no paired devices")
return nil
}
logger.Debug("device has paired device(s)")
clock, chat := m.getLastClockWithRelatedChat()
syncMessage, err := m.buildSyncInstallationCommunity(community, clock)
2022-11-07 17:30:00 +00:00
if err != nil {
return err
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_INSTALLATION_COMMUNITY,
ResendType: common.ResendTypeDataSync,
}
_, err = rawMessageHandler(ctx, rawMessage)
if err != nil {
return err
}
logger.Debug("message dispatched")
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) SyncBookmark(ctx context.Context, bookmark *browsers.Bookmark, rawMessageHandler RawMessageHandler) error {
2022-01-17 03:42:11 +00:00
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncBookmark{
Clock: clock,
Url: bookmark.URL,
Name: bookmark.Name,
ImageUrl: bookmark.ImageURL,
Removed: bookmark.Removed,
DeletedAt: bookmark.DeletedAt,
2022-01-17 03:42:11 +00:00
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_BOOKMARK,
ResendType: common.ResendTypeDataSync,
}
_, err = rawMessageHandler(ctx, rawMessage)
2022-01-17 03:42:11 +00:00
if err != nil {
return err
}
2022-01-17 03:42:11 +00:00
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) SyncEnsNamesWithDispatchMessage(ctx context.Context, usernameDetail *ensservice.UsernameDetail) error {
return m.syncEnsUsernameDetail(ctx, usernameDetail, m.dispatchMessage)
}
func (m *Messenger) syncEnsUsernameDetails(ctx context.Context, rawMessageHandler RawMessageHandler) error {
if !m.hasPairedDevices() {
return nil
}
ensNameDetails, err := m.getEnsUsernameDetails()
if err != nil {
return err
}
for _, d := range ensNameDetails {
if err = m.syncEnsUsernameDetail(ctx, d, rawMessageHandler); err != nil {
return err
}
}
return nil
}
func (m *Messenger) saveEnsUsernameDetailProto(syncMessage *protobuf.SyncEnsUsernameDetail) (*ensservice.UsernameDetail, error) {
ud := &ensservice.UsernameDetail{
Username: syncMessage.Username,
Clock: syncMessage.Clock,
ChainID: syncMessage.ChainId,
Removed: syncMessage.Removed,
}
db := ensservice.NewEnsDatabase(m.database)
err := db.SaveOrUpdateEnsUsername(ud)
if err != nil {
return nil, err
}
return ud, nil
}
func (m *Messenger) HandleSyncEnsUsernameDetail(state *ReceivedMessageState, syncMessage *protobuf.SyncEnsUsernameDetail, statusMessage *v1protocol.StatusMessage) error {
ud, err := m.saveEnsUsernameDetailProto(syncMessage)
if err != nil {
return err
}
state.Response.AddEnsUsernameDetail(ud)
return nil
}
func (m *Messenger) syncEnsUsernameDetail(ctx context.Context, usernameDetail *ensservice.UsernameDetail, rawMessageHandler RawMessageHandler) error {
syncMessage := &protobuf.SyncEnsUsernameDetail{
Clock: usernameDetail.Clock,
Username: usernameDetail.Username,
ChainId: usernameDetail.ChainID,
Removed: usernameDetail.Removed,
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
_, chat := m.getLastClockWithRelatedChat()
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_ENS_USERNAME_DETAIL,
ResendType: common.ResendTypeDataSync,
}
_, err = rawMessageHandler(ctx, rawMessage)
return err
}
func (m *Messenger) syncAccountCustomizationColor(ctx context.Context, acc *multiaccounts.Account) error {
if !m.hasPairedDevices() {
return nil
}
_, chat := m.getLastClockWithRelatedChat()
message := &protobuf.SyncAccountCustomizationColor{
KeyUid: acc.KeyUID,
CustomizationColor: string(acc.CustomizationColor),
UpdatedAt: acc.CustomizationColorClock,
}
encodedMessage, err := proto.Marshal(message)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_ACCOUNT_CUSTOMIZATION_COLOR,
ResendType: common.ResendTypeDataSync,
}
_, err = m.dispatchMessage(ctx, rawMessage)
return err
}
func (m *Messenger) SyncTrustedUser(ctx context.Context, publicKey string, ts verification.TrustStatus, rawMessageHandler RawMessageHandler) error {
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncTrustedUser{
Clock: clock,
Id: publicKey,
Status: protobuf.SyncTrustedUser_TrustStatus(ts),
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_TRUSTED_USER,
ResendType: common.ResendTypeDataSync,
}
_, err = rawMessageHandler(ctx, rawMessage)
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) SyncVerificationRequest(ctx context.Context, vr *verification.Request, rawMessageHandler RawMessageHandler) error {
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncVerificationRequest{
Id: vr.ID,
Clock: clock,
From: vr.From,
To: vr.To,
Challenge: vr.Challenge,
Response: vr.Response,
RequestedAt: vr.RequestedAt,
RepliedAt: vr.RepliedAt,
VerificationStatus: protobuf.SyncVerificationRequest_VerificationStatus(vr.RequestStatus),
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_VERIFICATION_REQUEST,
ResendType: common.ResendTypeDataSync,
}
_, err = rawMessageHandler(ctx, rawMessage)
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
// RetrieveAll retrieves messages from all filters, processes them and returns a
// MessengerResponse to the client
func (m *Messenger) RetrieveAll() (*MessengerResponse, error) {
chatWithMessages, err := m.transport.RetrieveRawAll()
if err != nil {
return nil, err
}
return m.handleRetrievedMessages(chatWithMessages, true, false)
}
func (m *Messenger) StartRetrieveMessagesLoop(tick time.Duration, cancel <-chan struct{}) {
m.shutdownWaitGroup.Add(1)
go func() {
defer gocommon.LogOnPanic()
defer m.shutdownWaitGroup.Done()
ticker := time.NewTicker(tick)
defer ticker.Stop()
for {
select {
case <-ticker.C:
m.ProcessAllMessages()
case <-cancel:
return
}
}
}()
}
func (m *Messenger) ProcessAllMessages() {
response, err := m.RetrieveAll()
if err != nil {
m.logger.Error("failed to retrieve raw messages", zap.Error(err))
return
}
m.PublishMessengerResponse(response)
}
func (m *Messenger) PublishMessengerResponse(response *MessengerResponse) {
if response.IsEmpty() {
return
}
notifications := response.Notifications()
// Clear notifications as not used for now
response.ClearNotifications()
signal.SendNewMessages(response)
localnotifications.PushMessages(notifications)
}
func (m *Messenger) GetStats() types.StatsSummary {
return m.transport.GetStats()
}
func (m *Messenger) GetTransport() *transport.Transport {
return m.transport
}
type CurrentMessageState struct {
// Message is the protobuf message received
Message *protobuf.ChatMessage
// MessageID is the ID of the message
MessageID string
// WhisperTimestamp is the whisper timestamp of the message
WhisperTimestamp uint64
// Contact is the contact associated with the author of the message
Contact *Contact
// PublicKey is the public key of the author of the message
PublicKey *ecdsa.PublicKey
StatusMessage *v1protocol.StatusMessage
}
type ReceivedMessageState struct {
// State on the message being processed
CurrentMessageState *CurrentMessageState
// AllChats in memory
2021-03-29 15:41:30 +00:00
AllChats *chatMap
// All contacts in memory
2021-03-29 15:41:30 +00:00
AllContacts *contactMap
// List of contacts modified
2021-03-29 15:41:30 +00:00
ModifiedContacts *stringBoolMap
// All installations in memory
AllInstallations *installationMap
2021-03-29 15:41:30 +00:00
ModifiedInstallations *stringBoolMap
// List of installations targeted to this device modified
TargetedInstallations *stringBoolMap
// Map of existing messages
ExistingMessagesMap map[string]bool
// EmojiReactions is a list of emoji reactions for the current batch
// indexed by from-message-id-emoji-type
EmojiReactions map[string]*EmojiReaction
2020-08-07 13:49:37 +00:00
// GroupChatInvitations is a list of invitation requests or rejections
GroupChatInvitations map[string]*GroupChatInvitation
// Response to the client
Response *MessengerResponse
ResolvePrimaryName func(string) (string, error)
// Timesource is a time source for clock values/timestamps.
Timesource common.TimeSource
AllBookmarks map[string]*browsers.Bookmark
AllVerificationRequests []*verification.Request
AllTrustStatus map[string]verification.TrustStatus
}
// addNewMessageNotification takes a common.Message and generates a new NotificationBody and appends it to the
// []Response.Notifications if the message is m.New
func (r *ReceivedMessageState) addNewMessageNotification(publicKey ecdsa.PublicKey, m *common.Message, responseTo *common.Message, profilePicturesVisibility int) error {
if !m.New {
return nil
}
pubKey, err := m.GetSenderPubKey()
if err != nil {
return err
}
contactID := contactIDFromPublicKey(pubKey)
2021-03-29 15:41:30 +00:00
chat, ok := r.AllChats.Load(m.LocalChatID)
if !ok {
return fmt.Errorf("chat ID '%s' not present", m.LocalChatID)
}
contact, ok := r.AllContacts.Load(contactID)
if !ok {
return fmt.Errorf("contact ID '%s' not present", contactID)
}
2021-02-22 16:12:59 +00:00
2022-02-24 21:36:09 +00:00
if !chat.Muted {
if showMessageNotification(publicKey, m, chat, responseTo) {
notification, err := NewMessageNotification(m.ID, m, chat, contact, r.ResolvePrimaryName, profilePicturesVisibility)
2022-02-24 21:36:09 +00:00
if err != nil {
return err
}
r.Response.AddNotification(notification)
}
2021-02-22 16:12:59 +00:00
}
return nil
}
2023-02-06 10:38:37 +00:00
// updateExistingActivityCenterNotification updates AC notification if it exists and hasn't been read yet
func (r *ReceivedMessageState) updateExistingActivityCenterNotification(publicKey ecdsa.PublicKey, m *Messenger, message *common.Message, responseTo *common.Message) error {
notification, err := m.persistence.GetActivityCenterNotificationByID(types.FromHex(message.ID))
if err != nil {
return err
}
if notification == nil || notification.Read {
return nil
}
notification.Message = message
notification.ReplyMessage = responseTo
notification.UpdatedAt = m.GetCurrentTimeInMillis()
2023-02-06 10:38:37 +00:00
err = m.addActivityCenterNotification(r.Response, notification, nil)
2023-02-06 10:38:37 +00:00
if err != nil {
return err
}
return nil
}
// function returns if the community is joined before the clock
func (m *Messenger) isCommunityJoinedBeforeClock(publicKey ecdsa.PublicKey, communityID string, clock uint64) (bool, error) {
community, err := m.communitiesManager.GetByIDString(communityID)
if err != nil {
return false, err
}
if !community.Joined() || clock < uint64(community.JoinedAt()) {
joinedClock, err := m.communitiesManager.GetCommunityRequestToJoinClock(&publicKey, communityID)
if err != nil {
return false, err
}
// no request to join, or request to join is after the message
if joinedClock == 0 || clock < joinedClock {
return false, nil
}
return true, nil
}
return true, nil
}
// addNewActivityCenterNotification takes a common.Message and generates a new ActivityCenterNotification and appends it to the
// []Response.ActivityCenterNotifications if the message is m.New
func (r *ReceivedMessageState) addNewActivityCenterNotification(publicKey ecdsa.PublicKey, m *Messenger, message *common.Message, responseTo *common.Message) error {
if !message.New {
return nil
}
chat, ok := r.AllChats.Load(message.LocalChatID)
if !ok {
return fmt.Errorf("chat ID '%s' not present", message.LocalChatID)
}
isNotification, notificationType := showMentionOrReplyActivityCenterNotification(publicKey, message, chat, responseTo)
if !isNotification {
return nil
}
if chat.CommunityChat() {
// Ignore mentions & replies in community before joining
ok, err := m.isCommunityJoinedBeforeClock(publicKey, chat.CommunityID, message.Clock)
if err != nil || !ok {
return nil
}
}
// Use albumId as notificationId to prevent multiple notifications
// for same message with multiple images
var notificationID string
image := message.GetImage()
var albumMessages = []*common.Message{}
if image != nil && image.GetAlbumId() != "" {
notificationID = image.GetAlbumId()
album, err := m.persistence.albumMessages(message.LocalChatID, image.AlbumId)
if err != nil {
return err
}
if m.httpServer != nil {
err = m.prepareMessagesList(album)
if err != nil {
return err
}
}
albumMessages = album
} else {
notificationID = message.ID
}
notification := &ActivityCenterNotification{
ID: types.FromHex(notificationID),
Name: chat.Name,
Message: message,
ReplyMessage: responseTo,
Type: notificationType,
Timestamp: message.WhisperTimestamp,
ChatID: chat.ID,
CommunityID: chat.CommunityID,
Author: message.From,
UpdatedAt: m.GetCurrentTimeInMillis(),
AlbumMessages: albumMessages,
Read: message.Seen,
}
return m.addActivityCenterNotification(r.Response, notification, nil)
}
func (m *Messenger) buildMessageState() *ReceivedMessageState {
return &ReceivedMessageState{
AllChats: m.allChats,
AllContacts: m.allContacts,
2021-03-29 15:41:30 +00:00
ModifiedContacts: new(stringBoolMap),
AllInstallations: m.allInstallations,
ModifiedInstallations: m.modifiedInstallations,
TargetedInstallations: new(stringBoolMap),
ExistingMessagesMap: make(map[string]bool),
EmojiReactions: make(map[string]*EmojiReaction),
2020-08-07 13:49:37 +00:00
GroupChatInvitations: make(map[string]*GroupChatInvitation),
Response: &MessengerResponse{},
Timesource: m.getTimesource(),
ResolvePrimaryName: m.ResolvePrimaryName,
2022-01-17 03:42:11 +00:00
AllBookmarks: make(map[string]*browsers.Bookmark),
AllTrustStatus: make(map[string]verification.TrustStatus),
}
}
func (m *Messenger) outputToCSV(timestamp uint32, messageID types.HexBytes, from string, topic types.TopicType, chatID string, msgType protobuf.ApplicationMetadataMessage_Type, parsedMessage interface{}) {
if !m.outputCSV {
return
}
msgJSON, err := json.Marshal(parsedMessage)
if err != nil {
m.logger.Error("could not marshall message", zap.Error(err))
return
}
line := fmt.Sprintf("%d\t%s\t%s\t%s\t%s\t%s\t%s\n", timestamp, messageID.String(), from, topic.String(), chatID, msgType, msgJSON)
_, err = m.csvFile.Write([]byte(line))
if err != nil {
m.logger.Error("could not write to csv", zap.Error(err))
return
}
}
func (m *Messenger) shouldSkipDuplicate(messageType protobuf.ApplicationMetadataMessage_Type) bool {
// Permit re-processing of ApplicationMetadataMessage_COMMUNITY_DESCRIPTION messages,
// as they may be queued pending receipt of decryption keys.
allowedDuplicateTypes := map[protobuf.ApplicationMetadataMessage_Type]struct{}{
protobuf.ApplicationMetadataMessage_COMMUNITY_DESCRIPTION: struct{}{},
}
if _, isAllowedDuplicate := allowedDuplicateTypes[messageType]; isAllowedDuplicate {
return false
}
return true
}
2022-09-29 11:50:23 +00:00
func (m *Messenger) handleImportedMessages(messagesToHandle map[transport.Filter][]*types.Message) error {
messageState := m.buildMessageState()
logger := m.logger.With(zap.String("site", "handleImportedMessages"))
for filter, messages := range messagesToHandle {
for _, shhMessage := range messages {
handleMessageResponse, err := m.sender.HandleMessages(shhMessage)
2022-09-29 11:50:23 +00:00
if err != nil {
logger.Info("failed to decode messages", zap.Error(err))
continue
}
statusMessages := handleMessageResponse.StatusMessages
2022-09-29 11:50:23 +00:00
for _, msg := range statusMessages {
logger := logger.With(zap.String("message-id", msg.TransportLayer.Message.ThirdPartyID))
2022-09-29 11:50:23 +00:00
logger.Debug("processing message")
publicKey := msg.SigPubKey()
senderID := contactIDFromPublicKey(publicKey)
if len(msg.EncryptionLayer.HashRatchetInfo) != 0 {
err := m.communitiesManager.NewHashRatchetKeys(msg.EncryptionLayer.HashRatchetInfo)
if err != nil {
m.logger.Warn("failed to invalidate communities description cache", zap.Error(err))
}
}
2022-09-29 11:50:23 +00:00
// Don't process duplicates
messageID := msg.TransportLayer.Message.ThirdPartyID
2022-09-29 11:50:23 +00:00
exists, err := m.messageExists(messageID, messageState.ExistingMessagesMap)
if err != nil {
logger.Warn("failed to check message exists", zap.Error(err))
}
if exists && m.shouldSkipDuplicate(msg.ApplicationLayer.Type) {
logger.Debug("skipping duplicate", zap.String("messageID", messageID))
2022-09-29 11:50:23 +00:00
continue
}
var contact *Contact
if c, ok := messageState.AllContacts.Load(senderID); ok {
contact = c
} else {
c, err := buildContact(senderID, publicKey)
if err != nil {
logger.Info("failed to build contact", zap.Error(err))
continue
}
contact = c
messageState.AllContacts.Store(senderID, contact)
}
messageState.CurrentMessageState = &CurrentMessageState{
MessageID: messageID,
WhisperTimestamp: uint64(msg.TransportLayer.Message.Timestamp) * 1000,
2022-09-29 11:50:23 +00:00
Contact: contact,
PublicKey: publicKey,
StatusMessage: msg,
2022-09-29 11:50:23 +00:00
}
if msg.ApplicationLayer.Payload != nil {
2022-09-29 11:50:23 +00:00
logger.Debug("Handling parsed message")
switch msg.ApplicationLayer.Type {
2022-09-29 11:50:23 +00:00
case protobuf.ApplicationMetadataMessage_CHAT_MESSAGE:
err = m.handleChatMessageProtobuf(messageState, msg.ApplicationLayer.Payload, msg, filter, true)
if err != nil {
logger.Warn("failed to handle ChatMessage", zap.Error(err))
continue
}
case protobuf.ApplicationMetadataMessage_PIN_MESSAGE:
err = m.handlePinMessageProtobuf(messageState, msg.ApplicationLayer.Payload, msg, filter, true)
2022-09-29 11:50:23 +00:00
if err != nil {
logger.Warn("failed to handle PinMessage", zap.Error(err))
2022-09-29 11:50:23 +00:00
}
}
}
}
}
}
importMessageAuthors := messageState.Response.DiscordMessageAuthors()
if len(importMessageAuthors) > 0 {
err := m.persistence.SaveDiscordMessageAuthors(importMessageAuthors)
if err != nil {
return err
}
}
importMessagesToSave := messageState.Response.DiscordMessages()
if len(importMessagesToSave) > 0 {
m.logger.Debug("saving discord messages", zap.Int("count", len(importMessagesToSave)))
m.handleImportMessagesMutex.Lock()
err := m.persistence.SaveDiscordMessages(importMessagesToSave)
if err != nil {
m.logger.Debug("failed to save discord messages", zap.Error(err))
m.handleImportMessagesMutex.Unlock()
return err
2022-09-29 11:50:23 +00:00
}
m.handleImportMessagesMutex.Unlock()
2022-09-29 11:50:23 +00:00
}
messageAttachmentsToSave := messageState.Response.DiscordMessageAttachments()
if len(messageAttachmentsToSave) > 0 {
m.logger.Debug("saving discord message attachments", zap.Int("count", len(messageAttachmentsToSave)))
m.handleImportMessagesMutex.Lock()
err := m.persistence.SaveDiscordMessageAttachments(messageAttachmentsToSave)
if err != nil {
m.logger.Debug("failed to save discord message attachments", zap.Error(err))
m.handleImportMessagesMutex.Unlock()
return err
2022-09-29 11:50:23 +00:00
}
m.handleImportMessagesMutex.Unlock()
2022-09-29 11:50:23 +00:00
}
messagesToSave := messageState.Response.Messages()
if len(messagesToSave) > 0 {
m.logger.Debug("saving %d app messages", zap.Int("count", len(messagesToSave)))
m.handleMessagesMutex.Lock()
err := m.SaveMessages(messagesToSave)
if err != nil {
m.handleMessagesMutex.Unlock()
return err
2022-09-29 11:50:23 +00:00
}
m.handleMessagesMutex.Unlock()
2022-09-29 11:50:23 +00:00
}
// Save chats if they were modified
if len(messageState.Response.chats) > 0 {
err := m.saveChats(messageState.Response.Chats())
if err != nil {
return err
}
}
2022-09-29 11:50:23 +00:00
return nil
}
func (m *Messenger) handleRetrievedMessages(chatWithMessages map[transport.Filter][]*types.Message, storeWakuMessages bool, fromArchive bool) (*MessengerResponse, error) {
m.handleMessagesMutex.Lock()
defer m.handleMessagesMutex.Unlock()
messageState := m.buildMessageState()
logger := m.logger.With(zap.String("site", "RetrieveAll"))
2021-02-23 15:47:45 +00:00
controlledCommunitiesChatIDs, err := m.communitiesManager.GetOwnedCommunitiesChatIDs()
if err != nil {
logger.Info("failed to retrieve admin communities", zap.Error(err))
}
iterator := m.retrievedMessagesIteratorFactory(chatWithMessages)
for iterator.HasNext() {
filter, messages := iterator.Next()
var processedMessages []string
for _, shhMessage := range messages {
2021-10-29 14:29:28 +00:00
logger := logger.With(zap.String("hash", types.EncodeHex(shhMessage.Hash)))
// Indicates tha all messages in the batch have been processed correctly
allMessagesProcessed := true
if controlledCommunitiesChatIDs[filter.ChatID] && storeWakuMessages {
logger.Debug("storing waku message")
err := m.communitiesManager.StoreWakuMessage(shhMessage)
if err != nil {
logger.Warn("failed to store waku message", zap.Error(err))
}
}
handleMessagesResponse, err := m.sender.HandleMessages(shhMessage)
2019-09-02 09:29:06 +00:00
if err != nil {
if m.telemetryClient != nil {
go m.telemetryClient.UpdateEnvelopeProcessingError(shhMessage, err)
}
2019-09-02 09:29:06 +00:00
logger.Info("failed to decode messages", zap.Error(err))
continue
}
if handleMessagesResponse == nil {
continue
}
statusMessages := handleMessagesResponse.StatusMessages
if m.telemetryClient != nil {
m.telemetryClient.PushReceivedMessages(m.ctx, telemetry.ReceivedMessages{
Filter: filter,
SSHMessage: shhMessage,
Messages: statusMessages,
})
}
err = m.handleDatasyncMetadata(handleMessagesResponse)
if err != nil {
m.logger.Warn("failed to handle datasync metadata", zap.Error(err))
}
2020-03-09 06:19:23 +00:00
logger.Debug("processing messages further", zap.Int("count", len(statusMessages)))
for _, msg := range statusMessages {
logger := logger.With(zap.String("message-id", msg.ApplicationLayer.ID.String()))
publicKey := msg.SigPubKey()
m.handleInstallations(msg.EncryptionLayer.Installations)
err := m.handleSharedSecrets(msg.EncryptionLayer.SharedSecrets)
2020-07-31 12:22:05 +00:00
if err != nil {
// log and continue, non-critical error
logger.Warn("failed to handle shared secrets")
}
senderID := contactIDFromPublicKey(publicKey)
ownID := contactIDFromPublicKey(m.IdentityPublicKey())
logger.Info("processing message", zap.Any("type", msg.ApplicationLayer.Type), zap.String("senderID", senderID))
if senderID == ownID {
// Skip own messages of certain types
if msg.ApplicationLayer.Type == protobuf.ApplicationMetadataMessage_CONTACT_CODE_ADVERTISEMENT {
continue
}
}
contact, contactFound := messageState.AllContacts.Load(senderID)
// Check for messages from blocked users
if contactFound && contact.Blocked {
continue
}
2021-02-23 15:47:45 +00:00
// Don't process duplicates
messageID := types.EncodeHex(msg.ApplicationLayer.ID)
2021-06-07 12:38:13 +00:00
exists, err := m.messageExists(messageID, messageState.ExistingMessagesMap)
if err != nil {
logger.Warn("failed to check message exists", zap.Error(err))
}
if exists && m.shouldSkipDuplicate(msg.ApplicationLayer.Type) {
logger.Debug("skipping duplicate", zap.String("messageID", messageID))
continue
}
if !contactFound {
c, err := buildContact(senderID, publicKey)
if err != nil {
logger.Info("failed to build contact", zap.Error(err))
allMessagesProcessed = false
continue
}
contact = c
if msg.ApplicationLayer.Type != protobuf.ApplicationMetadataMessage_PUSH_NOTIFICATION_QUERY {
messageState.AllContacts.Store(senderID, contact)
}
}
messageState.CurrentMessageState = &CurrentMessageState{
MessageID: messageID,
WhisperTimestamp: uint64(msg.TransportLayer.Message.Timestamp) * 1000,
Contact: contact,
PublicKey: publicKey,
StatusMessage: msg,
}
if msg.ApplicationLayer.Payload != nil {
2020-07-09 16:52:26 +00:00
err := m.dispatchToHandler(messageState, msg.ApplicationLayer.Payload, msg, filter, fromArchive)
if err != nil {
allMessagesProcessed = false
logger.Warn("failed to process protobuf", zap.String("type", msg.ApplicationLayer.Type.String()), zap.Error(err))
if m.unhandledMessagesTracker != nil {
m.unhandledMessagesTracker(msg, err)
}
continue
}
logger.Debug("Handled parsed message")
2020-08-18 15:07:48 +00:00
} else {
logger.Debug("parsed message is nil")
}
}
m.processCommunityChanges(messageState)
2021-01-11 10:32:51 +00:00
2022-01-13 10:06:01 +00:00
// NOTE: for now we confirm messages as processed regardless whether we
// actually processed them, this is because we need to differentiate
// from messages that we want to retry to process and messages that
// are never going to be processed
m.transport.MarkP2PMessageAsProcessed(gethcommon.BytesToHash(shhMessage.Hash))
if allMessagesProcessed {
processedMessages = append(processedMessages, types.EncodeHex(shhMessage.Hash))
}
}
if len(processedMessages) != 0 {
if err := m.transport.ConfirmMessagesProcessed(processedMessages, m.getTimesource().GetCurrentTime()); err != nil {
logger.Warn("failed to confirm processed messages", zap.Error(err))
}
2019-09-02 09:29:06 +00:00
}
}
return m.saveDataAndPrepareResponse(messageState)
}
func (m *Messenger) deleteNotification(response *MessengerResponse, installationID string) error {
notification, err := m.persistence.GetActivityCenterNotificationByID(types.FromHex(installationID))
if err != nil {
return err
}
if notification == nil {
return nil
}
updatedAt := m.GetCurrentTimeInMillis()
notification.UpdatedAt = updatedAt
notification.Deleted = true
// we shouldn't sync deleted notification here,
// as the same user on different devices will receive the same message(CommunityCancelRequestToJoin) ?
err = m.persistence.DeleteActivityCenterNotificationByID(types.FromHex(installationID), updatedAt)
if err != nil {
m.logger.Error("failed to delete notification from Activity Center", zap.Error(err))
return err
}
// sending signal to client to remove the activity center notification from UI
response.AddActivityCenterNotification(notification)
return nil
}
func (m *Messenger) saveDataAndPrepareResponse(messageState *ReceivedMessageState) (*MessengerResponse, error) {
var err error
var contactsToSave []*Contact
2021-03-29 15:41:30 +00:00
messageState.ModifiedContacts.Range(func(id string, value bool) (shouldContinue bool) {
contact, ok := messageState.AllContacts.Load(id)
if ok {
contactsToSave = append(contactsToSave, contact)
2023-01-20 14:28:30 +00:00
messageState.Response.AddContact(contact)
}
2021-03-29 15:41:30 +00:00
return true
})
2021-01-11 10:32:51 +00:00
// Hydrate chat alias and identicon
for id := range messageState.Response.chats {
2021-03-29 15:41:30 +00:00
chat, _ := messageState.AllChats.Load(id)
2021-11-05 15:11:10 +00:00
if chat == nil {
continue
}
if chat.OneToOne() {
2021-03-29 15:41:30 +00:00
contact, ok := m.allContacts.Load(chat.ID)
if ok {
chat.Alias = contact.Alias
chat.Identicon = contact.Identicon
}
}
2021-01-11 10:32:51 +00:00
messageState.Response.AddChat(chat)
}
2021-03-29 15:41:30 +00:00
messageState.ModifiedInstallations.Range(func(id string, value bool) (shouldContinue bool) {
installation, _ := messageState.AllInstallations.Load(id)
feat: fallback pairing seed (#5614) * feat(pairing)!: Add extra parameters and remove v2 compatibility This commit includes the following changes: I have added a flag to maintain 2.29 compatibility. Breaking change in connection string The local pairing code that was parsing the connection string had a few non-upgradable features: It was strictly checking the number of parameters, throwing an error if the number was different. This made it impossible to add parameters to it without breaking. It was strictly checking the version number. This made increasing the version number impossible as older client would just refuse to connect. The code has been changed so that: Two parameters have been added, installation-id and key-uid. Those are needed for the fallback flow. I have also removed version from the payload, since it wasn't used. This means that we don't support v1 anymore. V2 parsing is supported . Going forward there's a clear strategy on how to update the protocol (append parameters, don't change existing one). https://www.youtube.com/watch?v=oyLBGkS5ICk Is a must watch video for understanding the strategy Changed MessengerResponse to use internally a map of installations rather than an array (minor) Just moving towards maps as arrays tend to lead to subtle bugs. Moved pairing methods to messenger_pairing.go Just moved some methods Added 2 new methods for the fallback flow FinishPairingThroughSeedPhraseProcess https://github.com/status-im/status-go/pull/5567/files#diff-1ad620b07fa3bd5fbc96c9f459d88829938a162bf1aaf41c61dea6e38b488d54R29 EnableAndSyncInstallation https://github.com/status-im/status-go/pull/5567/files#diff-1ad620b07fa3bd5fbc96c9f459d88829938a162bf1aaf41c61dea6e38b488d54R18 Flow for clients Client A1 is logged in Client A2 is logged out Client A1 shows a QR code Client A2 scans a QR code If connection fails on A2, the user will be prompted to enter a seed phrase. If the generated account matches the key-uid from the QR code, A2 should call FinishPairingThroughSeedPhraseProcess with the installation id passed in the QR code. This will send installation information over waku. The user should be shown its own installation id and prompted to check the other device. Client A1 will receive new installation data through waku, if they are still on the qr code page, they should show a popup to the user showing the received installation id, and a way to Enable and Sync, which should call the EnableAndSyncInstallation endpoint. This should finish the fallback syncing flow. Current issues Currently I haven't tested that all the data is synced after finishing the flow. I see that the two devices are paired correctly, but for example the DisplayName is not changed on the receiving device. I haven't had time to look into it further. * test_: add more test for connection string parser * fix_: fix panic when parse old connection string * test_: add comments for TestMessengerPairAfterSeedPhrase * fix_: correct error description * feat_:rename FinishPairingThroughSeedPhraseProcess to EnableInstallationAndPair * fix_: delete leftover * fix_: add UniqueKey method * fix_: unify the response for InputConnectionStringForBootstrapping * fix_: remove fields installationID and keyUID in GethStatusBackend * fix_: rename messenger_pairing to messenger_pairing_and_syncing --------- Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2024-07-30 09:14:05 +00:00
messageState.Response.AddInstallation(installation)
if installation.InstallationMetadata != nil {
2021-03-29 15:41:30 +00:00
err = m.setInstallationMetadata(id, installation.InstallationMetadata)
if err != nil {
2021-03-29 15:41:30 +00:00
return false
}
}
2021-03-29 15:41:30 +00:00
targeted, _ := messageState.TargetedInstallations.Load(id)
if targeted {
if installation.Enabled {
// Delete AC notif since the installation is now enabled
err = m.deleteNotification(messageState.Response, id)
if err != nil {
m.logger.Error("error deleting notification", zap.Error(err))
return false
}
} else if id != m.installationID {
// Add activity center notification when we receive a new installation
notification := &ActivityCenterNotification{
ID: types.FromHex(id),
Type: ActivityCenterNotificationTypeNewInstallationReceived,
InstallationID: id,
Timestamp: m.getTimesource().GetCurrentTime(),
Read: false,
Deleted: false,
UpdatedAt: m.GetCurrentTimeInMillis(),
}
err = m.addActivityCenterNotification(messageState.Response, notification, nil)
if err != nil {
return false
}
}
}
2021-03-29 15:41:30 +00:00
return true
})
if err != nil {
return nil, err
}
2021-01-11 10:32:51 +00:00
if len(messageState.Response.chats) > 0 {
err = m.saveChats(messageState.Response.Chats())
if err != nil {
return nil, err
}
}
2021-01-11 10:32:51 +00:00
2021-06-03 13:11:55 +00:00
messagesToSave := messageState.Response.Messages()
if len(messagesToSave) > 0 {
err = m.SaveMessages(messagesToSave)
if err != nil {
return nil, err
}
}
for _, emojiReaction := range messageState.EmojiReactions {
messageState.Response.AddEmojiReaction(emojiReaction)
}
2020-08-07 13:49:37 +00:00
for _, groupChatInvitation := range messageState.GroupChatInvitations {
messageState.Response.Invitations = append(messageState.Response.Invitations, groupChatInvitation)
}
if len(contactsToSave) > 0 {
err = m.persistence.SaveContacts(contactsToSave)
if err != nil {
return nil, err
}
}
newMessagesIds := map[string]struct{}{}
2021-06-03 13:11:55 +00:00
for _, message := range messagesToSave {
2021-06-25 08:30:18 +00:00
if message.New {
newMessagesIds[message.ID] = struct{}{}
}
}
2021-06-03 13:11:55 +00:00
messagesWithResponses, err := m.pullMessagesAndResponsesFromDB(messagesToSave)
if err != nil {
return nil, err
}
2021-02-22 16:12:59 +00:00
messagesByID := map[string]*common.Message{}
for _, message := range messagesWithResponses {
messagesByID[message.ID] = message
}
2021-06-03 13:11:55 +00:00
messageState.Response.SetMessages(messagesWithResponses)
notificationsEnabled, err := m.settings.GetNotificationsEnabled()
if err != nil {
return nil, err
}
profilePicturesVisibility, err := m.settings.GetProfilePicturesVisibility()
if err != nil {
return nil, err
}
err = m.prepareMessages(messageState.Response.messages)
if err != nil {
return nil, err
}
2021-06-03 13:11:55 +00:00
for _, message := range messageState.Response.messages {
if _, ok := newMessagesIds[message.ID]; ok {
message.New = true
if notificationsEnabled {
// Create notification body to be eventually passed to `localnotifications.SendMessageNotifications()`
if err = messageState.addNewMessageNotification(m.identity.PublicKey, message, messagesByID[message.ResponseTo], profilePicturesVisibility); err != nil {
return nil, err
}
}
// Create activity center notification body to be eventually passed to `activitycenter.SendActivityCenterNotifications()`
if err = messageState.addNewActivityCenterNotification(m.identity.PublicKey, m, message, messagesByID[message.ResponseTo]); err != nil {
return nil, err
}
}
}
// Reset installations
2021-03-29 15:41:30 +00:00
m.modifiedInstallations = new(stringBoolMap)
2022-01-17 03:42:11 +00:00
if len(messageState.AllBookmarks) > 0 {
bookmarks, err := m.storeSyncBookmarks(messageState.AllBookmarks)
if err != nil {
return nil, err
}
messageState.Response.AddBookmarks(bookmarks)
}
if len(messageState.AllVerificationRequests) > 0 {
for _, vr := range messageState.AllVerificationRequests {
messageState.Response.AddVerificationRequest(vr)
}
}
if len(messageState.AllTrustStatus) > 0 {
messageState.Response.AddTrustStatuses(messageState.AllTrustStatus)
}
// Hydrate pinned messages
for _, pinnedMessage := range messageState.Response.PinMessages() {
if pinnedMessage.Pinned {
pinnedMessage.Message = &common.PinnedMessage{
Message: messageState.Response.GetMessage(pinnedMessage.MessageId),
PinnedBy: pinnedMessage.From,
PinnedAt: pinnedMessage.Clock,
}
}
}
return messageState.Response, nil
}
2022-01-17 03:42:11 +00:00
func (m *Messenger) storeSyncBookmarks(bookmarkMap map[string]*browsers.Bookmark) ([]*browsers.Bookmark, error) {
var bookmarks []*browsers.Bookmark
for _, bookmark := range bookmarkMap {
bookmarks = append(bookmarks, bookmark)
}
return m.browserDatabase.StoreSyncBookmarks(bookmarks)
}
2020-09-01 13:27:01 +00:00
func (m *Messenger) MessageByID(id string) (*common.Message, error) {
msg, err := m.persistence.MessageByID(id)
if err != nil {
return nil, err
}
if m.httpServer != nil {
err = m.prepareMessage(msg, m.httpServer)
if err != nil {
return nil, err
}
}
return msg, nil
2019-08-06 21:50:13 +00:00
}
func (m *Messenger) MessagesExist(ids []string) (map[string]bool, error) {
return m.persistence.MessagesExist(ids)
2019-08-06 21:50:13 +00:00
}
2022-12-11 19:08:51 +00:00
func (m *Messenger) FirstUnseenMessageID(chatID string) (string, error) {
return m.persistence.FirstUnseenMessageID(chatID)
}
func (m *Messenger) latestIncomingMessageClock(chatID string) (uint64, error) {
return m.persistence.latestIncomingMessageClock(chatID)
}
2020-09-01 13:27:01 +00:00
func (m *Messenger) MessageByChatID(chatID, cursor string, limit int) ([]*common.Message, string, error) {
2020-11-03 10:16:05 +00:00
chat, err := m.persistence.Chat(chatID)
if err != nil {
return nil, "", err
}
2021-10-27 10:59:43 +00:00
if chat == nil {
return nil, "", ErrChatNotFound
}
var msgs []*common.Message
var nextCursor string
2020-11-03 10:16:05 +00:00
if chat.Timeline() {
var chatIDs = []string{"@" + contactIDFromPublicKey(&m.identity.PublicKey)}
2021-03-24 08:04:03 +00:00
m.allContacts.Range(func(contactID string, contact *Contact) (shouldContinue bool) {
2023-01-20 14:28:30 +00:00
if contact.added() {
2020-11-03 10:16:05 +00:00
chatIDs = append(chatIDs, "@"+contact.ID)
}
2021-03-24 08:04:03 +00:00
return true
})
msgs, nextCursor, err = m.persistence.MessageByChatIDs(chatIDs, cursor, limit)
if err != nil {
return nil, "", err
}
} else {
msgs, nextCursor, err = m.persistence.MessageByChatID(chatID, cursor, limit)
if err != nil {
return nil, "", err
}
}
if m.httpServer != nil {
err = m.prepareMessagesList(msgs)
if err != nil {
return nil, "", err
}
}
return msgs, nextCursor, nil
}
func (m *Messenger) prepareMessages(messages map[string]*common.Message) error {
if m.httpServer == nil {
return nil
}
for idx := range messages {
err := m.prepareMessage(messages[idx], m.httpServer)
if err != nil {
return err
}
}
return nil
}
func (m *Messenger) prepareMessagesList(messages []*common.Message) error {
if m.httpServer == nil {
return nil
}
for idx := range messages {
err := m.prepareMessage(messages[idx], m.httpServer)
if err != nil {
return err
}
}
return nil
}
func extractQuotedImages(messages []*common.Message, s *server.MediaServer) []string {
var quotedImages []string
for _, message := range messages {
if message.ChatMessage != nil && message.ChatMessage.ContentType == protobuf.ChatMessage_IMAGE {
quotedImages = append(quotedImages, s.MakeImageURL(message.ID))
}
2020-11-03 10:16:05 +00:00
}
return quotedImages
2019-08-06 21:50:13 +00:00
}
func (m *Messenger) prepareTokenData(tokenData *ActivityTokenData, s *server.MediaServer) error {
if tokenData.TokenType == int(protobuf.CommunityTokenType_ERC721) {
tokenData.ImageURL = s.MakeWalletCollectibleImagesURL(tokenData.CollectibleID)
} else if tokenData.TokenType == int(protobuf.CommunityTokenType_ERC20) {
tokenData.ImageURL = s.MakeCommunityTokenImagesURL(tokenData.CommunityID, tokenData.ChainID, tokenData.Symbol)
}
return nil
}
func (m *Messenger) prepareMessage(msg *common.Message, s *server.MediaServer) error {
if msg.QuotedMessage != nil && msg.QuotedMessage.ContentType == int64(protobuf.ChatMessage_IMAGE) {
msg.QuotedMessage.ImageLocalURL = s.MakeImageURL(msg.QuotedMessage.ID)
quotedMessage, err := m.MessageByID(msg.QuotedMessage.ID)
if err != nil {
return err
}
if quotedMessage == nil {
return errors.New("message not found")
}
if quotedMessage.ChatMessage != nil {
image := quotedMessage.ChatMessage.GetImage()
albumID := quotedMessage.ChatMessage.GetImage().AlbumId
if image != nil && image.GetAlbumId() != "" {
albumMessages, err := m.persistence.albumMessages(quotedMessage.LocalChatID, albumID)
if err != nil {
return err
}
quotedImages := extractQuotedImages(albumMessages, s)
quotedImagesJSON, err := json.Marshal(quotedImages)
if err != nil {
return err
}
msg.QuotedMessage.AlbumImages = quotedImagesJSON
}
}
}
if msg.QuotedMessage != nil && msg.QuotedMessage.ContentType == int64(protobuf.ChatMessage_AUDIO) {
msg.QuotedMessage.AudioLocalURL = s.MakeAudioURL(msg.QuotedMessage.ID)
}
if msg.QuotedMessage != nil && msg.QuotedMessage.ContentType == int64(protobuf.ChatMessage_STICKER) {
msg.QuotedMessage.HasSticker = true
}
if msg.QuotedMessage != nil && msg.QuotedMessage.ContentType == int64(protobuf.ChatMessage_DISCORD_MESSAGE) {
dm := msg.QuotedMessage.DiscordMessage
exists, err := m.persistence.HasDiscordMessageAuthorImagePayload(dm.Author.Id)
if err != nil {
return err
}
if exists {
msg.QuotedMessage.DiscordMessage.Author.LocalUrl = s.MakeDiscordAuthorAvatarURL(dm.Author.Id)
}
}
if msg.ContentType == protobuf.ChatMessage_IMAGE {
msg.ImageLocalURL = s.MakeImageURL(msg.ID)
}
if msg.ContentType == protobuf.ChatMessage_DISCORD_MESSAGE {
dm := msg.GetDiscordMessage()
exists, err := m.persistence.HasDiscordMessageAuthorImagePayload(dm.Author.Id)
if err != nil {
return err
}
if exists {
dm.Author.LocalUrl = s.MakeDiscordAuthorAvatarURL(dm.Author.Id)
}
for idx, attachment := range dm.Attachments {
if strings.Contains(attachment.ContentType, "image") {
hasPayload, err := m.persistence.HasDiscordMessageAttachmentPayload(attachment.Id, dm.Id)
if err != nil {
m.logger.Error("failed to check if message attachment exist", zap.Error(err))
continue
}
if hasPayload {
localURL := s.MakeDiscordAttachmentURL(dm.Id, attachment.Id)
dm.Attachments[idx].LocalUrl = localURL
}
}
}
msg.Payload = &protobuf.ChatMessage_DiscordMessage{
DiscordMessage: dm,
}
}
if msg.ContentType == protobuf.ChatMessage_AUDIO {
msg.AudioLocalURL = s.MakeAudioURL(msg.ID)
}
if msg.ContentType == protobuf.ChatMessage_STICKER {
msg.StickerLocalURL = s.MakeStickerURL(msg.GetSticker().Hash)
}
msg.LinkPreviews = msg.ConvertFromProtoToLinkPreviews(s.MakeLinkPreviewThumbnailURL, s.MakeLinkPreviewFaviconURL)
2023-10-13 12:25:34 +00:00
msg.StatusLinkPreviews = msg.ConvertFromProtoToStatusLinkPreviews(s.MakeStatusLinkPreviewThumbnailURL)
return nil
}
func (m *Messenger) AllMessageByChatIDWhichMatchTerm(chatID string, searchTerm string, caseSensitive bool) ([]*common.Message, error) {
_, err := m.persistence.Chat(chatID)
if err != nil {
return nil, err
}
messages, err := m.persistence.AllMessageByChatIDWhichMatchTerm(chatID, searchTerm, caseSensitive)
if err != nil {
return nil, err
}
return m.filterOutHiddenChatMessages(messages)
}
func (m *Messenger) AllMessagesFromChatsAndCommunitiesWhichMatchTerm(communityIds []string, chatIds []string, searchTerm string, caseSensitive bool) ([]*common.Message, error) {
messages, err := m.persistence.AllMessagesFromChatsAndCommunitiesWhichMatchTerm(communityIds, chatIds, searchTerm, caseSensitive)
if err != nil {
return nil, err
}
return m.filterOutHiddenChatMessages(messages)
}
func (m *Messenger) filterOutHiddenChatMessages(messages []*common.Message) ([]*common.Message, error) {
communitiesCache := make(map[string]*communities.Community)
chatVisibilityCache := make(map[string]bool)
var filteredMessages []*common.Message
for _, message := range messages {
chatVisible, ok := chatVisibilityCache[message.ChatId]
if ok && chatVisible {
filteredMessages = append(filteredMessages, message)
continue
}
chat, ok := m.allChats.Load(message.ChatId)
if !ok {
return nil, ErrChatNotFoundError
}
if chat.CommunityID == "" {
filteredMessages = append(filteredMessages, message)
continue
}
community, ok := communitiesCache[chat.CommunityID]
if !ok {
communityID, err := hexutil.Decode(chat.CommunityID)
if err != nil {
return nil, err
}
comm, err := m.communitiesManager.GetByID(communityID)
if err != nil {
if err == communities.ErrOrgNotFound {
continue
}
return nil, err
}
communitiesCache[chat.CommunityID] = comm
community = comm
}
canView := community.CanView(&m.identity.PublicKey, chat.CommunityChannelID())
chatVisibilityCache[chat.ID] = canView
if canView {
filteredMessages = append(filteredMessages, message)
}
}
return filteredMessages, nil
}
2020-09-01 13:27:01 +00:00
func (m *Messenger) SaveMessages(messages []*common.Message) error {
return m.persistence.SaveMessages(messages)
2019-08-06 21:50:13 +00:00
}
func (m *Messenger) DeleteMessage(id string) error {
return m.persistence.DeleteMessage(id)
2019-08-06 21:50:13 +00:00
}
func (m *Messenger) DeleteMessagesByChatID(id string) error {
return m.persistence.DeleteMessagesByChatID(id)
2019-08-06 21:50:13 +00:00
}
func (m *Messenger) markMessageAsUnreadImpl(chatID string, messageID string) (uint64, uint64, error) {
count, countWithMentions, err := m.persistence.MarkMessageAsUnread(chatID, messageID)
if err != nil {
return 0, 0, err
}
chat, err := m.persistence.Chat(chatID)
if err != nil {
return 0, 0, err
}
m.allChats.Store(chatID, chat)
return count, countWithMentions, nil
}
func (m *Messenger) MarkMessageAsUnread(chatID string, messageID string) (*MessengerResponse, error) {
count, countWithMentions, err := m.markMessageAsUnreadImpl(chatID, messageID)
if err != nil {
return nil, err
}
response := &MessengerResponse{}
response.AddSeenAndUnseenMessages(&SeenUnseenMessages{
ChatID: chatID,
Count: count,
CountWithMentions: countWithMentions,
Seen: false,
})
ids, err := m.persistence.GetMessageIdsWithGreaterTimestamp(chatID, messageID)
if err != nil {
return nil, err
}
hexBytesIds := []types.HexBytes{}
for _, id := range ids {
hexBytesIds = append(hexBytesIds, types.FromHex(id))
}
updatedAt := m.GetCurrentTimeInMillis()
notifications, err := m.persistence.MarkActivityCenterNotificationsUnread(hexBytesIds, updatedAt)
if err != nil {
return nil, err
}
response.AddActivityCenterNotifications(notifications)
return response, nil
}
// MarkMessagesSeen marks messages with `ids` as seen in the chat `chatID`.
// It returns the number of affected messages or error. If there is an error,
// the number of affected messages is always zero.
func (m *Messenger) markMessagesSeenImpl(chatID string, ids []string) (uint64, uint64, *Chat, error) {
count, countWithMentions, err := m.persistence.MarkMessagesSeen(chatID, ids)
if err != nil {
return 0, 0, nil, err
}
chat, err := m.persistence.Chat(chatID)
if err != nil {
return 0, 0, nil, err
}
2021-03-29 15:41:30 +00:00
m.allChats.Store(chatID, chat)
return count, countWithMentions, chat, nil
2019-08-06 21:50:13 +00:00
}
// Deprecated: Use MarkMessagesRead instead
func (m *Messenger) MarkMessagesSeen(chatID string, ids []string) (uint64, uint64, []*ActivityCenterNotification, error) {
count, countWithMentions, _, err := m.markMessagesSeenImpl(chatID, ids)
if err != nil {
return 0, 0, nil, err
}
hexBytesIds := []types.HexBytes{}
for _, id := range ids {
hexBytesIds = append(hexBytesIds, types.FromHex(id))
}
// Mark notifications as read in the database
updatedAt := m.GetCurrentTimeInMillis()
err = m.persistence.MarkActivityCenterNotificationsRead(hexBytesIds, updatedAt)
if err != nil {
return 0, 0, nil, err
}
notifications, err := m.persistence.GetActivityCenterNotificationsByID(hexBytesIds)
if err != nil {
return 0, 0, nil, err
}
return count, countWithMentions, notifications, nil
}
func (m *Messenger) MarkMessagesRead(chatID string, ids []string) (*MessengerResponse, error) {
count, countWithMentions, _, err := m.markMessagesSeenImpl(chatID, ids)
if err != nil {
return nil, err
}
response := &MessengerResponse{}
response.AddSeenAndUnseenMessages(&SeenUnseenMessages{
ChatID: chatID,
Count: count,
CountWithMentions: countWithMentions,
Seen: true,
})
hexBytesIds := []types.HexBytes{}
for _, id := range ids {
hexBytesIds = append(hexBytesIds, types.FromHex(id))
}
// Mark notifications as read in the database
updatedAt := m.GetCurrentTimeInMillis()
err = m.persistence.MarkActivityCenterNotificationsRead(hexBytesIds, updatedAt)
if err != nil {
return nil, err
}
notifications, err := m.persistence.GetActivityCenterNotificationsByID(hexBytesIds)
if err != nil {
return nil, err
}
response.AddActivityCenterNotifications(notifications)
return response, nil
}
func (m *Messenger) syncChatMessagesRead(ctx context.Context, chatID string, clock uint64, rawMessageHandler RawMessageHandler) error {
2021-10-12 10:33:32 +00:00
if !m.hasPairedDevices() {
return nil
}
_, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncChatMessagesRead{
Clock: clock,
Id: chatID,
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_CHAT_MESSAGES_READ,
ResendType: common.ResendTypeDataSync,
}
_, err = rawMessageHandler(ctx, rawMessage)
2021-10-12 10:33:32 +00:00
return err
}
func (m *Messenger) markAllRead(chatID string, clock uint64, shouldBeSynced bool) error {
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
2020-02-26 12:31:48 +00:00
if !ok {
return ErrChatNotFoundError
2020-02-26 12:31:48 +00:00
}
_, _, err := m.persistence.MarkAllRead(chatID, clock)
2020-02-26 12:31:48 +00:00
if err != nil {
return err
}
2021-10-12 10:33:32 +00:00
if shouldBeSynced {
err := m.syncChatMessagesRead(context.Background(), chatID, clock, m.dispatchMessage)
2021-10-12 10:33:32 +00:00
if err != nil {
return err
}
}
chat.ReadMessagesAtClockValue = clock
chat.Highlight = false
2021-10-12 10:33:32 +00:00
chat.UnviewedMessagesCount = 0
chat.UnviewedMentionsCount = 0
2021-10-12 10:33:32 +00:00
if chat.LastMessage != nil {
chat.LastMessage.Seen = true
}
2021-03-29 15:41:30 +00:00
// TODO(samyoul) remove storing of an updated reference pointer?
m.allChats.Store(chat.ID, chat)
return m.persistence.SaveChats([]*Chat{chat})
2020-02-26 12:31:48 +00:00
}
func (m *Messenger) MarkAllRead(ctx context.Context, chatID string) (*MessengerResponse, error) {
response := &MessengerResponse{}
notifications, err := m.DismissAllActivityCenterNotificationsFromChatID(ctx, chatID, m.GetCurrentTimeInMillis())
if err != nil {
return nil, err
}
response.AddActivityCenterNotifications(notifications)
clock, _ := m.latestIncomingMessageClock(chatID)
if clock == 0 {
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, ErrChatNotFoundError
}
clock, _ = chat.NextClockAndTimestamp(m.getTimesource())
2021-10-12 10:33:32 +00:00
}
err = m.markAllRead(chatID, clock, true)
if err != nil {
return nil, err
}
return response, nil
2021-10-12 10:33:32 +00:00
}
func (m *Messenger) MarkAllReadInCommunity(ctx context.Context, communityID string) (*MessengerResponse, error) {
response := &MessengerResponse{}
notifications, err := m.DismissAllActivityCenterNotificationsFromCommunity(ctx, communityID, m.GetCurrentTimeInMillis())
if err != nil {
return nil, err
}
response.AddActivityCenterNotifications(notifications)
chatIDs, err := m.persistence.AllChatIDsByCommunity(nil, communityID)
if err != nil {
return nil, err
}
err = m.persistence.MarkAllReadMultiple(chatIDs)
if err != nil {
return nil, err
}
for _, chatID := range chatIDs {
chat, ok := m.allChats.Load(chatID)
if ok {
chat.UnviewedMessagesCount = 0
chat.UnviewedMentionsCount = 0
m.allChats.Store(chat.ID, chat)
response.AddChat(chat)
} else {
err = fmt.Errorf("chat with chatID %s not found", chatID)
}
}
return response, err
}
2020-06-26 07:46:14 +00:00
// MuteChat signals to the messenger that we don't want to be notified
// on new messages from this chat
2023-04-16 15:06:00 +00:00
func (m *Messenger) MuteChat(request *requests.MuteChat) (time.Time, error) {
chat, ok := m.allChats.Load(request.ChatID)
2020-06-26 07:46:14 +00:00
if !ok {
// Only one to one chan be muted when it's not in the database
2023-04-16 15:06:00 +00:00
publicKey, err := common.HexToPubkey(request.ChatID)
if err != nil {
2023-04-16 15:06:00 +00:00
return time.Time{}, err
}
// Create a one to one chat and set active to false
2023-04-16 15:06:00 +00:00
chat = CreateOneToOneChat(request.ChatID, publicKey, m.getTimesource())
chat.Active = false
err = m.initChatSyncFields(chat)
if err != nil {
2023-04-16 15:06:00 +00:00
return time.Time{}, err
}
err = m.saveChat(chat)
if err != nil {
2023-04-16 15:06:00 +00:00
return time.Time{}, err
}
2020-06-26 07:46:14 +00:00
}
var contact *Contact
if chat.OneToOne() {
2023-04-16 15:06:00 +00:00
contact, _ = m.allContacts.Load(request.ChatID)
}
var MuteTill time.Time
switch request.MutedType {
case MuteTill1Min:
MuteTill = time.Now().Add(MuteFor1MinDuration)
case MuteFor15Min:
MuteTill = time.Now().Add(MuteFor15MinsDuration)
case MuteFor1Hr:
MuteTill = time.Now().Add(MuteFor1HrsDuration)
case MuteFor8Hr:
MuteTill = time.Now().Add(MuteFor8HrsDuration)
case MuteFor24Hr:
MuteTill = time.Now().Add(MuteFor24HrsDuration)
2023-04-16 15:06:00 +00:00
case MuteFor1Week:
MuteTill = time.Now().Add(MuteFor1WeekDuration)
default:
MuteTill = time.Time{}
}
2023-04-16 15:06:00 +00:00
err := m.saveChat(chat)
if err != nil {
return time.Time{}, err
}
muteTillTimeRemoveMs, err := time.Parse(time.RFC3339, MuteTill.Format(time.RFC3339))
if err != nil {
return time.Time{}, err
}
return m.muteChat(chat, contact, muteTillTimeRemoveMs)
2023-04-16 15:06:00 +00:00
}
2023-04-16 15:06:00 +00:00
func (m *Messenger) MuteChatV2(muteParams *requests.MuteChat) (time.Time, error) {
return m.MuteChat(muteParams)
}
2023-04-16 15:06:00 +00:00
func (m *Messenger) muteChat(chat *Chat, contact *Contact, mutedTill time.Time) (time.Time, error) {
err := m.persistence.MuteChat(chat.ID, mutedTill)
2020-06-26 07:46:14 +00:00
if err != nil {
2023-04-16 15:06:00 +00:00
return time.Time{}, err
2020-06-26 07:46:14 +00:00
}
chat.Muted = true
2023-04-16 15:06:00 +00:00
chat.MuteTill = mutedTill
2021-03-29 15:41:30 +00:00
// TODO(samyoul) remove storing of an updated reference pointer?
m.allChats.Store(chat.ID, chat)
2020-08-18 15:07:48 +00:00
if contact != nil {
err := m.syncContact(context.Background(), contact, m.dispatchMessage)
if err != nil {
2023-04-16 15:06:00 +00:00
return time.Time{}, err
}
}
2023-04-16 15:06:00 +00:00
if !chat.MuteTill.IsZero() {
err := m.reregisterForPushNotifications()
if err != nil {
return time.Time{}, err
}
return mutedTill, nil
}
return time.Time{}, m.reregisterForPushNotifications()
2020-06-26 07:46:14 +00:00
}
// UnmuteChat signals to the messenger that we want to be notified
// on new messages from this chat
func (m *Messenger) UnmuteChat(chatID string) error {
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
2020-06-26 07:46:14 +00:00
if !ok {
return ErrChatNotFoundError
2020-06-26 07:46:14 +00:00
}
var contact *Contact
if chat.OneToOne() {
contact, _ = m.allContacts.Load(chatID)
}
return m.unmuteChat(chat, contact)
}
func (m *Messenger) unmuteChat(chat *Chat, contact *Contact) error {
err := m.persistence.UnmuteChat(chat.ID)
2020-06-26 07:46:14 +00:00
if err != nil {
return err
}
chat.Muted = false
2023-04-16 15:06:00 +00:00
chat.MuteTill = time.Time{}
2021-03-29 15:41:30 +00:00
// TODO(samyoul) remove storing of an updated reference pointer?
m.allChats.Store(chat.ID, chat)
if chat.CommunityChat() {
community, err := m.communitiesManager.GetByIDString(chat.CommunityID)
if err != nil {
return err
}
err = m.communitiesManager.SetMuted(community.ID(), false)
if err != nil {
return err
}
}
if contact != nil {
err := m.syncContact(context.Background(), contact, m.dispatchMessage)
if err != nil {
return err
}
}
2020-08-18 15:07:48 +00:00
return m.reregisterForPushNotifications()
2020-06-26 07:46:14 +00:00
}
2019-08-06 21:50:13 +00:00
func (m *Messenger) UpdateMessageOutgoingStatus(id, newOutgoingStatus string) error {
return m.persistence.UpdateMessageOutgoingStatus(id, newOutgoingStatus)
}
// Identicon returns an identicon based on the input string
func Identicon(id string) (string, error) {
return identicon.GenerateBase64(id)
}
// GenerateAlias name returns the generated name given a public key hex encoded prefixed with 0x
func GenerateAlias(id string) (string, error) {
return alias.GenerateFromPublicKeyString(id)
}
func (m *Messenger) RequestTransaction(ctx context.Context, chatID, value, contract, address string) (*MessengerResponse, error) {
var response MessengerResponse
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, ErrChatNotFoundError
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
message := common.NewMessage()
err := extendMessageFromChat(message, chat, &m.identity.PublicKey, m.transport)
if err != nil {
return nil, err
}
2020-07-25 11:46:43 +00:00
message.MessageType = protobuf.MessageType_ONE_TO_ONE
message.ContentType = protobuf.ChatMessage_TRANSACTION_COMMAND
message.Seen = true
message.Text = "Request transaction"
request := &protobuf.RequestTransaction{
Clock: message.Clock,
Address: address,
Value: value,
Contract: contract,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
2024-03-22 10:55:09 +00:00
resendType := common.ResendTypeRawMessage
if chat.ChatType == ChatTypeOneToOne {
resendType = common.ResendTypeDataSync
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_REQUEST_TRANSACTION,
ResendType: resendType,
})
2020-09-01 13:27:01 +00:00
message.CommandParameters = &common.CommandParameters{
ID: rawMessage.ID,
Value: value,
Address: address,
Contract: contract,
2020-09-01 13:27:01 +00:00
CommandState: common.CommandStateRequestTransaction,
}
if err != nil {
return nil, err
}
messageID := rawMessage.ID
message.ID = messageID
message.CommandParameters.ID = messageID
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) RequestAddressForTransaction(ctx context.Context, chatID, from, value, contract string) (*MessengerResponse, error) {
var response MessengerResponse
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, ErrChatNotFoundError
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
message := common.NewMessage()
err := extendMessageFromChat(message, chat, &m.identity.PublicKey, m.transport)
if err != nil {
return nil, err
}
2020-07-25 11:46:43 +00:00
message.MessageType = protobuf.MessageType_ONE_TO_ONE
message.ContentType = protobuf.ChatMessage_TRANSACTION_COMMAND
message.Seen = true
message.Text = "Request address for transaction"
request := &protobuf.RequestAddressForTransaction{
Clock: message.Clock,
Value: value,
Contract: contract,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
2024-03-22 10:55:09 +00:00
resendType := common.ResendTypeRawMessage
if chat.ChatType == ChatTypeOneToOne {
resendType = common.ResendTypeDataSync
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_REQUEST_ADDRESS_FOR_TRANSACTION,
ResendType: resendType,
})
2020-09-01 13:27:01 +00:00
message.CommandParameters = &common.CommandParameters{
ID: rawMessage.ID,
From: from,
Value: value,
Contract: contract,
2020-09-01 13:27:01 +00:00
CommandState: common.CommandStateRequestAddressForTransaction,
}
if err != nil {
return nil, err
}
messageID := rawMessage.ID
message.ID = messageID
message.CommandParameters.ID = messageID
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) AcceptRequestAddressForTransaction(ctx context.Context, messageID, address string) (*MessengerResponse, error) {
var response MessengerResponse
message, err := m.MessageByID(messageID)
if err != nil {
return nil, err
}
if message == nil {
return nil, errors.New("message not found")
}
chatID := message.LocalChatID
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, ErrChatNotFoundError
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.WhisperTimestamp = timestamp
message.Timestamp = timestamp
message.Text = "Request address for transaction accepted"
message.Seen = true
2020-09-01 13:27:01 +00:00
message.OutgoingStatus = common.OutgoingStatusSending
// Hide previous message
2020-01-17 12:39:09 +00:00
previousMessage, err := m.persistence.MessageByCommandID(chatID, messageID)
if err != nil {
return nil, err
}
if previousMessage == nil {
return nil, errors.New("No previous message found")
}
err = m.persistence.HideMessage(previousMessage.ID)
if err != nil {
return nil, err
}
message.Replace = previousMessage.ID
request := &protobuf.AcceptRequestAddressForTransaction{
Clock: message.Clock,
Id: messageID,
Address: address,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
2024-03-22 10:55:09 +00:00
resendType := common.ResendTypeRawMessage
if chat.ChatType == ChatTypeOneToOne {
resendType = common.ResendTypeDataSync
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_ACCEPT_REQUEST_ADDRESS_FOR_TRANSACTION,
ResendType: resendType,
})
if err != nil {
return nil, err
}
message.ID = rawMessage.ID
message.CommandParameters.Address = address
2020-09-01 13:27:01 +00:00
message.CommandParameters.CommandState = common.CommandStateRequestAddressForTransactionAccepted
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) DeclineRequestTransaction(ctx context.Context, messageID string) (*MessengerResponse, error) {
var response MessengerResponse
message, err := m.MessageByID(messageID)
if err != nil {
return nil, err
}
if message == nil {
return nil, errors.New("message not found")
}
chatID := message.LocalChatID
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, ErrChatNotFoundError
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.WhisperTimestamp = timestamp
message.Timestamp = timestamp
message.Text = "Transaction request declined"
message.Seen = true
2020-09-01 13:27:01 +00:00
message.OutgoingStatus = common.OutgoingStatusSending
message.Replace = messageID
err = m.persistence.HideMessage(messageID)
if err != nil {
return nil, err
}
request := &protobuf.DeclineRequestTransaction{
Clock: message.Clock,
Id: messageID,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
2024-03-22 10:55:09 +00:00
resendType := common.ResendTypeRawMessage
if chat.ChatType == ChatTypeOneToOne {
resendType = common.ResendTypeDataSync
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_DECLINE_REQUEST_TRANSACTION,
ResendType: resendType,
})
if err != nil {
return nil, err
}
message.ID = rawMessage.ID
2020-09-01 13:27:01 +00:00
message.CommandParameters.CommandState = common.CommandStateRequestTransactionDeclined
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) DeclineRequestAddressForTransaction(ctx context.Context, messageID string) (*MessengerResponse, error) {
var response MessengerResponse
message, err := m.MessageByID(messageID)
if err != nil {
return nil, err
}
if message == nil {
return nil, errors.New("message not found")
}
chatID := message.LocalChatID
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, ErrChatNotFoundError
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.WhisperTimestamp = timestamp
message.Timestamp = timestamp
message.Text = "Request address for transaction declined"
message.Seen = true
2020-09-01 13:27:01 +00:00
message.OutgoingStatus = common.OutgoingStatusSending
message.Replace = messageID
err = m.persistence.HideMessage(messageID)
if err != nil {
return nil, err
}
request := &protobuf.DeclineRequestAddressForTransaction{
Clock: message.Clock,
Id: messageID,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
2024-03-22 10:55:09 +00:00
resendType := common.ResendTypeRawMessage
if chat.ChatType == ChatTypeOneToOne {
resendType = common.ResendTypeDataSync
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_DECLINE_REQUEST_ADDRESS_FOR_TRANSACTION,
ResendType: resendType,
})
if err != nil {
return nil, err
}
message.ID = rawMessage.ID
2020-09-01 13:27:01 +00:00
message.CommandParameters.CommandState = common.CommandStateRequestAddressForTransactionDeclined
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) AcceptRequestTransaction(ctx context.Context, transactionHash, messageID string, signature []byte) (*MessengerResponse, error) {
var response MessengerResponse
message, err := m.MessageByID(messageID)
if err != nil {
return nil, err
}
if message == nil {
return nil, errors.New("message not found")
}
chatID := message.LocalChatID
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, ErrChatNotFoundError
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.WhisperTimestamp = timestamp
message.Timestamp = timestamp
message.Seen = true
message.Text = transactionSentTxt
2020-09-01 13:27:01 +00:00
message.OutgoingStatus = common.OutgoingStatusSending
// Hide previous message
2020-01-17 12:39:09 +00:00
previousMessage, err := m.persistence.MessageByCommandID(chatID, messageID)
if err != nil && err != common.ErrRecordNotFound {
return nil, err
}
if previousMessage != nil {
err = m.persistence.HideMessage(previousMessage.ID)
if err != nil {
return nil, err
}
message.Replace = previousMessage.ID
}
err = m.persistence.HideMessage(messageID)
if err != nil {
return nil, err
}
request := &protobuf.SendTransaction{
Clock: message.Clock,
Id: messageID,
TransactionHash: transactionHash,
Signature: signature,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
2024-03-22 10:55:09 +00:00
resendType := common.ResendTypeRawMessage
if chat.ChatType == ChatTypeOneToOne {
resendType = common.ResendTypeDataSync
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SEND_TRANSACTION,
ResendType: resendType,
})
if err != nil {
return nil, err
}
message.ID = rawMessage.ID
message.CommandParameters.TransactionHash = transactionHash
message.CommandParameters.Signature = signature
2020-09-01 13:27:01 +00:00
message.CommandParameters.CommandState = common.CommandStateTransactionSent
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) SendTransaction(ctx context.Context, chatID, value, contract, transactionHash string, signature []byte) (*MessengerResponse, error) {
var response MessengerResponse
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, ErrChatNotFoundError
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
message := common.NewMessage()
err := extendMessageFromChat(message, chat, &m.identity.PublicKey, m.transport)
if err != nil {
return nil, err
}
2020-07-25 11:46:43 +00:00
message.MessageType = protobuf.MessageType_ONE_TO_ONE
message.ContentType = protobuf.ChatMessage_TRANSACTION_COMMAND
message.LocalChatID = chatID
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.WhisperTimestamp = timestamp
message.Seen = true
message.Timestamp = timestamp
message.Text = transactionSentTxt
request := &protobuf.SendTransaction{
Clock: message.Clock,
TransactionHash: transactionHash,
Signature: signature,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
2024-03-22 10:55:09 +00:00
resendType := common.ResendTypeRawMessage
if chat.ChatType == ChatTypeOneToOne {
resendType = common.ResendTypeDataSync
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SEND_TRANSACTION,
ResendType: resendType,
})
if err != nil {
return nil, err
}
message.ID = rawMessage.ID
2020-09-01 13:27:01 +00:00
message.CommandParameters = &common.CommandParameters{
TransactionHash: transactionHash,
Value: value,
Contract: contract,
Signature: signature,
2020-09-01 13:27:01 +00:00
CommandState: common.CommandStateTransactionSent,
}
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) ValidateTransactions(ctx context.Context, addresses []types.Address) (*MessengerResponse, error) {
if m.verifyTransactionClient == nil {
return nil, nil
}
logger := m.logger.With(zap.String("site", "ValidateTransactions"))
logger.Debug("Validating transactions")
txs, err := m.persistence.TransactionsToValidate()
if err != nil {
logger.Error("Error pulling", zap.Error(err))
return nil, err
}
logger.Debug("Txs", zap.Int("count", len(txs)), zap.Any("txs", txs))
var response MessengerResponse
validator := NewTransactionValidator(addresses, m.persistence, m.verifyTransactionClient, m.logger)
responses, err := validator.ValidateTransactions(ctx)
if err != nil {
logger.Error("Error validating", zap.Error(err))
return nil, err
}
for _, validationResult := range responses {
2020-09-01 13:27:01 +00:00
var message *common.Message
chatID := contactIDFromPublicKey(validationResult.Transaction.From)
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
chat = OneToOneFromPublicKey(validationResult.Transaction.From, m.transport)
}
if validationResult.Message != nil {
message = validationResult.Message
} else {
message = common.NewMessage()
err := extendMessageFromChat(message, chat, &m.identity.PublicKey, m.transport)
if err != nil {
return nil, err
}
}
2020-07-25 11:46:43 +00:00
message.MessageType = protobuf.MessageType_ONE_TO_ONE
message.ContentType = protobuf.ChatMessage_TRANSACTION_COMMAND
message.LocalChatID = chatID
message.OutgoingStatus = ""
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.Timestamp = timestamp
message.WhisperTimestamp = timestamp
message.Text = "Transaction received"
message.Seen = false
message.ID = validationResult.Transaction.MessageID
if message.CommandParameters == nil {
2020-09-01 13:27:01 +00:00
message.CommandParameters = &common.CommandParameters{}
} else {
message.CommandParameters = validationResult.Message.CommandParameters
}
message.CommandParameters.Value = validationResult.Value
message.CommandParameters.Contract = validationResult.Contract
message.CommandParameters.Address = validationResult.Address
2020-09-01 13:27:01 +00:00
message.CommandParameters.CommandState = common.CommandStateTransactionSent
message.CommandParameters.TransactionHash = validationResult.Transaction.TransactionHash
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-01-17 12:39:09 +00:00
if len(message.CommandParameters.ID) != 0 {
// Hide previous message
previousMessage, err := m.persistence.MessageByCommandID(chatID, message.CommandParameters.ID)
if err != nil && err != common.ErrRecordNotFound {
return nil, err
}
2020-01-17 12:39:09 +00:00
if previousMessage != nil {
err = m.persistence.HideMessage(previousMessage.ID)
if err != nil {
return nil, err
}
message.Replace = previousMessage.ID
}
}
2021-06-03 13:11:55 +00:00
response.AddMessage(message)
2021-03-29 15:41:30 +00:00
m.allChats.Store(chat.ID, chat)
2021-01-11 10:32:51 +00:00
response.AddChat(chat)
contact, err := m.getOrBuildContactFromMessage(message)
if err != nil {
return nil, err
}
notificationsEnabled, err := m.settings.GetNotificationsEnabled()
if err != nil {
return nil, err
}
profilePicturesVisibility, err := m.settings.GetProfilePicturesVisibility()
if err != nil {
return nil, err
}
if notificationsEnabled {
notification, err := NewMessageNotification(message.ID, message, chat, contact, m.ResolvePrimaryName, profilePicturesVisibility)
if err != nil {
return nil, err
}
response.AddNotification(notification)
}
}
2021-06-03 13:11:55 +00:00
if len(response.messages) > 0 {
err = m.SaveMessages(response.Messages())
if err != nil {
return nil, err
}
}
return &response, nil
}
// pullMessagesAndResponsesFromDB pulls all the messages and the one that have
// been replied to from the database
2020-09-01 13:27:01 +00:00
func (m *Messenger) pullMessagesAndResponsesFromDB(messages []*common.Message) ([]*common.Message, error) {
var messageIDs []string
for _, message := range messages {
messageIDs = append(messageIDs, message.ID)
if len(message.ResponseTo) != 0 {
messageIDs = append(messageIDs, message.ResponseTo)
}
}
// We pull from the database all the messages & replies involved,
// so we let the db build the correct messages
return m.persistence.MessagesByIDs(messageIDs)
}
2020-03-20 08:32:13 +00:00
func (m *Messenger) SignMessage(message string) ([]byte, error) {
hash := crypto.TextHash([]byte(message))
return crypto.Sign(hash, m.identity)
}
func (m *Messenger) CreateCommunityTokenDeploymentSignature(ctx context.Context, chainID uint64, addressFrom string, communityID string) ([]byte, error) {
return m.communitiesManager.CreateCommunityTokenDeploymentSignature(ctx, chainID, addressFrom, communityID)
}
func (m *Messenger) getTimesource() common.TimeSource {
return m.transport
}
func (m *Messenger) GetCurrentTimeInMillis() uint64 {
return m.getTimesource().GetCurrentTime()
}
2020-07-22 07:41:40 +00:00
// AddPushNotificationsServer adds a push notification server
func (m *Messenger) AddPushNotificationsServer(ctx context.Context, publicKey *ecdsa.PublicKey, serverType pushnotificationclient.ServerType) error {
if m.pushNotificationClient == nil {
return errors.New("push notification client not enabled")
}
return m.pushNotificationClient.AddPushNotificationsServer(publicKey, serverType)
}
2020-07-17 11:41:49 +00:00
// RemovePushNotificationServer removes a push notification server
func (m *Messenger) RemovePushNotificationServer(ctx context.Context, publicKey *ecdsa.PublicKey) error {
if m.pushNotificationClient == nil {
return errors.New("push notification client not enabled")
}
return m.pushNotificationClient.RemovePushNotificationServer(publicKey)
}
2020-07-22 07:41:40 +00:00
// UnregisterFromPushNotifications unregister from any server
2020-07-15 12:43:15 +00:00
func (m *Messenger) UnregisterFromPushNotifications(ctx context.Context) error {
2020-07-16 07:45:42 +00:00
return m.pushNotificationClient.Unregister()
2020-07-15 12:43:15 +00:00
}
2020-07-22 07:41:40 +00:00
// DisableSendingPushNotifications signals the client not to send any push notification
2020-07-15 12:43:15 +00:00
func (m *Messenger) DisableSendingPushNotifications() error {
if m.pushNotificationClient == nil {
return errors.New("push notification client not enabled")
}
m.pushNotificationClient.DisableSending()
return nil
}
2020-07-22 07:41:40 +00:00
// EnableSendingPushNotifications signals the client to send push notifications
2020-07-15 12:43:15 +00:00
func (m *Messenger) EnableSendingPushNotifications() error {
if m.pushNotificationClient == nil {
return errors.New("push notification client not enabled")
}
m.pushNotificationClient.EnableSending()
return nil
}
func (m *Messenger) pushNotificationOptions() *pushnotificationclient.RegistrationOptions {
var contactIDs []*ecdsa.PublicKey
var mutedChatIDs []string
var publicChatIDs []string
var blockedChatIDs []string
2021-03-29 15:41:30 +00:00
m.allContacts.Range(func(contactID string, contact *Contact) (shouldContinue bool) {
2023-01-20 14:28:30 +00:00
if contact.added() && !contact.Blocked {
pk, err := contact.PublicKey()
if err != nil {
m.logger.Warn("could not parse contact public key")
2021-03-29 15:41:30 +00:00
return true
}
contactIDs = append(contactIDs, pk)
2021-10-01 14:50:16 +00:00
} else if contact.Blocked {
blockedChatIDs = append(blockedChatIDs, contact.ID)
}
2021-03-29 15:41:30 +00:00
return true
})
m.allChats.Range(func(chatID string, chat *Chat) (shouldContinue bool) {
if chat.Muted {
mutedChatIDs = append(mutedChatIDs, chat.ID)
return true
}
if chat.Active && (chat.Public() || chat.CommunityChat()) {
publicChatIDs = append(publicChatIDs, chat.ID)
}
2021-03-29 15:41:30 +00:00
return true
})
return &pushnotificationclient.RegistrationOptions{
ContactIDs: contactIDs,
MutedChatIDs: mutedChatIDs,
PublicChatIDs: publicChatIDs,
BlockedChatIDs: blockedChatIDs,
}
2020-07-17 12:29:51 +00:00
}
// RegisterForPushNotification register deviceToken with any push notification server enabled
func (m *Messenger) RegisterForPushNotifications(ctx context.Context, deviceToken, apnTopic string, tokenType protobuf.PushNotificationRegistration_TokenType) error {
2020-07-17 12:29:51 +00:00
if m.pushNotificationClient == nil {
return errors.New("push notification client not enabled")
}
2020-07-20 13:58:54 +00:00
m.mutex.Lock()
defer m.mutex.Unlock()
2020-07-17 12:29:51 +00:00
err := m.pushNotificationClient.Register(deviceToken, apnTopic, tokenType, m.pushNotificationOptions())
2020-08-18 15:07:48 +00:00
if err != nil {
m.logger.Error("failed to register for push notifications", zap.Error(err))
return err
}
return nil
}
2020-07-22 07:41:40 +00:00
// RegisteredForPushNotifications returns whether we successfully registered with all the servers
2020-07-16 08:36:17 +00:00
func (m *Messenger) RegisteredForPushNotifications() (bool, error) {
if m.pushNotificationClient == nil {
return false, errors.New("no push notification client")
}
return m.pushNotificationClient.Registered()
}
2020-07-22 07:41:40 +00:00
// EnablePushNotificationsFromContactsOnly is used to indicate that we want to received push notifications only from contacts
2020-07-17 11:41:49 +00:00
func (m *Messenger) EnablePushNotificationsFromContactsOnly() error {
if m.pushNotificationClient == nil {
return errors.New("no push notification client")
}
2020-07-20 13:58:54 +00:00
m.mutex.Lock()
defer m.mutex.Unlock()
2020-07-17 11:41:49 +00:00
return m.pushNotificationClient.EnablePushNotificationsFromContactsOnly(m.pushNotificationOptions())
2020-07-17 11:41:49 +00:00
}
2020-07-22 07:41:40 +00:00
// DisablePushNotificationsFromContactsOnly is used to indicate that we want to received push notifications from anyone
2020-07-17 11:41:49 +00:00
func (m *Messenger) DisablePushNotificationsFromContactsOnly() error {
if m.pushNotificationClient == nil {
return errors.New("no push notification client")
}
2020-07-20 13:58:54 +00:00
m.mutex.Lock()
defer m.mutex.Unlock()
2020-07-17 11:41:49 +00:00
return m.pushNotificationClient.DisablePushNotificationsFromContactsOnly(m.pushNotificationOptions())
2020-07-17 11:41:49 +00:00
}
2020-09-03 07:30:03 +00:00
// EnablePushNotificationsBlockMentions is used to indicate that we dont want to received push notifications for mentions
func (m *Messenger) EnablePushNotificationsBlockMentions() error {
if m.pushNotificationClient == nil {
return errors.New("no push notification client")
}
m.mutex.Lock()
defer m.mutex.Unlock()
return m.pushNotificationClient.EnablePushNotificationsBlockMentions(m.pushNotificationOptions())
}
// DisablePushNotificationsBlockMentions is used to indicate that we want to received push notifications for mentions
func (m *Messenger) DisablePushNotificationsBlockMentions() error {
if m.pushNotificationClient == nil {
return errors.New("no push notification client")
}
m.mutex.Lock()
defer m.mutex.Unlock()
return m.pushNotificationClient.DisablePushNotificationsBlockMentions(m.pushNotificationOptions())
}
// GetPushNotificationsServers returns the servers used for push notifications
func (m *Messenger) GetPushNotificationsServers() ([]*pushnotificationclient.PushNotificationServer, error) {
2020-07-16 08:36:17 +00:00
if m.pushNotificationClient == nil {
return nil, errors.New("no push notification client")
}
return m.pushNotificationClient.GetServers()
}
2020-07-22 07:41:40 +00:00
// StartPushNotificationsServer initialize and start a push notification server, using the current messenger identity key
func (m *Messenger) StartPushNotificationsServer() error {
2020-07-14 14:07:19 +00:00
if m.pushNotificationServer == nil {
2020-07-22 07:41:40 +00:00
pushNotificationServerPersistence := pushnotificationserver.NewSQLitePersistence(m.database)
config := &pushnotificationserver.Config{
Enabled: true,
2020-07-14 14:07:19 +00:00
Logger: m.logger,
Identity: m.identity,
}
m.pushNotificationServer = pushnotificationserver.New(config, pushNotificationServerPersistence, m.sender)
2020-07-14 14:07:19 +00:00
}
return m.pushNotificationServer.Start()
}
2020-07-22 07:41:40 +00:00
// StopPushNotificationServer stops the push notification server if running
func (m *Messenger) StopPushNotificationsServer() error {
2020-07-14 14:07:19 +00:00
m.pushNotificationServer = nil
return nil
}
func generateAliasAndIdenticon(pk string) (string, string, error) {
identicon, err := identicon.GenerateBase64(pk)
if err != nil {
return "", "", err
}
name, err := alias.GenerateFromPublicKeyString(pk)
if err != nil {
return "", "", err
}
return name, identicon, nil
}
func (m *Messenger) encodeChatEntity(chat *Chat, message common.ChatEntity) ([]byte, error) {
var encodedMessage []byte
var err error
l := m.logger.With(zap.String("site", "Send"), zap.String("chatID", chat.ID))
switch chat.ChatType {
case ChatTypeOneToOne:
l.Debug("sending private message")
message.SetMessageType(protobuf.MessageType_ONE_TO_ONE)
encodedMessage, err = proto.Marshal(message.GetProtobuf())
if err != nil {
return nil, err
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
2020-10-20 15:10:28 +00:00
case ChatTypePublic, ChatTypeProfile:
l.Debug("sending public message", zap.String("chatName", chat.Name))
message.SetMessageType(protobuf.MessageType_PUBLIC_GROUP)
encodedMessage, err = proto.Marshal(message.GetProtobuf())
if err != nil {
return nil, err
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
case ChatTypeCommunityChat:
l.Debug("sending community chat message", zap.String("chatName", chat.Name))
message.SetMessageType(protobuf.MessageType_COMMUNITY_CHAT)
encodedMessage, err = proto.Marshal(message.GetProtobuf())
if err != nil {
return nil, err
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
case ChatTypePrivateGroupChat:
message.SetMessageType(protobuf.MessageType_PRIVATE_GROUP)
l.Debug("sending group message", zap.String("chatName", chat.Name))
if !message.WrapGroupMessage() {
encodedMessage, err = proto.Marshal(message.GetProtobuf())
if err != nil {
return nil, err
}
} else {
group, err := newProtocolGroupFromChat(chat)
if err != nil {
return nil, err
}
2022-11-18 10:04:38 +00:00
// NOTE(cammellos): Disabling for now since the optimiziation is not
// applicable anymore after we changed group rules to allow
// anyone to change group details
encodedMessage, err = m.sender.EncodeMembershipUpdate(group, message)
if err != nil {
return nil, err
}
}
default:
return nil, errors.New("chat type not supported")
}
return encodedMessage, nil
}
func (m *Messenger) getOrBuildContactFromMessage(msg *common.Message) (*Contact, error) {
2021-03-29 15:41:30 +00:00
if c, ok := m.allContacts.Load(msg.From); ok {
return c, nil
}
senderPubKey, err := msg.GetSenderPubKey()
if err != nil {
return nil, err
}
senderID := contactIDFromPublicKey(senderPubKey)
c, err := buildContact(senderID, senderPubKey)
if err != nil {
return nil, err
}
2021-03-29 15:41:30 +00:00
// TODO(samyoul) remove storing of an updated reference pointer?
m.allContacts.Store(msg.From, c)
return c, nil
}
func (m *Messenger) BloomFilter() []byte {
return m.transport.BloomFilter()
}
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
func (m *Messenger) getSettings() (settings.Settings, error) {
sDB, err := accounts.NewDB(m.database)
if err != nil {
return settings.Settings{}, err
}
return sDB.GetSettings()
}
2022-02-10 10:00:59 +00:00
func (m *Messenger) getEnsUsernameDetails() (result []*ensservice.UsernameDetail, err error) {
db := ensservice.NewEnsDatabase(m.database)
return db.GetEnsUsernames(nil)
}
func ToVerificationRequest(message *protobuf.SyncVerificationRequest) *verification.Request {
return &verification.Request{
From: message.From,
To: message.To,
Challenge: message.Challenge,
Response: message.Response,
RequestedAt: message.RequestedAt,
RepliedAt: message.RepliedAt,
RequestStatus: verification.RequestStatus(message.VerificationStatus),
}
}
func (m *Messenger) HandleSyncVerificationRequest(state *ReceivedMessageState, message *protobuf.SyncVerificationRequest, statusMessage *v1protocol.StatusMessage) error {
verificationRequest := ToVerificationRequest(message)
err := m.verificationDatabase.SaveVerificationRequest(verificationRequest)
if err != nil {
return err
}
myPubKey := hexutil.Encode(crypto.FromECDSAPub(&m.identity.PublicKey))
state.AllVerificationRequests = append(state.AllVerificationRequests, verificationRequest)
if message.From == myPubKey { // Verification requests we sent
contact, ok := m.allContacts.Load(message.To)
if !ok {
m.logger.Info("contact not found")
return nil
}
contact.VerificationStatus = VerificationStatus(message.VerificationStatus)
if err := m.persistence.SaveContact(contact, nil); err != nil {
return err
}
m.allContacts.Store(contact.ID, contact)
state.ModifiedContacts.Store(contact.ID, true)
// TODO: create activity center notif
}
// else { // Verification requests we received
// // TODO: activity center notif
//}
return nil
}
2022-09-29 11:50:23 +00:00
2023-02-02 17:59:48 +00:00
func (m *Messenger) ImageServerURL() string {
return m.httpServer.MakeImageServerURL()
}
2023-01-20 14:28:30 +00:00
func (m *Messenger) myHexIdentity() string {
return common.PubkeyToHex(&m.identity.PublicKey)
}
2023-04-07 08:47:38 +00:00
func (m *Messenger) GetMentionsManager() *MentionManager {
return m.mentionsManager
}
func (m *Messenger) getOtherMessagesInAlbum(message *common.Message, chatID string) ([]*common.Message, error) {
var connectedMessages []*common.Message
// In case of Image messages, we need to delete all the images in the album
if message.ContentType == protobuf.ChatMessage_IMAGE {
image := message.GetImage()
if image != nil && image.AlbumId != "" {
messagesInTheAlbum, err := m.persistence.albumMessages(chatID, image.GetAlbumId())
if err != nil {
return nil, err
}
connectedMessages = append(connectedMessages, messagesInTheAlbum...)
return connectedMessages, nil
}
}
return append(connectedMessages, message), nil
}
func (m *Messenger) withChatClock(callback func(string, uint64) error) error {
clock, chat := m.getLastClockWithRelatedChat()
err := callback(chat.ID, clock)
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) syncDeleteForMeMessage(ctx context.Context, rawMessageDispatcher RawMessageHandler) error {
deleteForMes, err := m.persistence.GetDeleteForMeMessages()
if err != nil {
return err
}
return m.withChatClock(func(chatID string, _ uint64) error {
for _, deleteForMe := range deleteForMes {
encodedMessage, err2 := proto.Marshal(deleteForMe)
if err2 != nil {
return err2
}
rawMessage := common.RawMessage{
2024-03-22 10:55:09 +00:00
LocalChatID: chatID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_DELETE_FOR_ME_MESSAGE,
ResendType: common.ResendTypeDataSync,
}
_, err2 = rawMessageDispatcher(ctx, rawMessage)
if err2 != nil {
return err2
}
}
return nil
})
}
func (m *Messenger) GetDeleteForMeMessages() ([]*protobuf.SyncDeleteForMeMessage, error) {
return m.persistence.GetDeleteForMeMessages()
}
func (m *Messenger) startCleanupLoop(name string, cleanupFunc func() error) {
logger := m.logger.Named(name)
go func() {
defer gocommon.LogOnPanic()
// Delay by a few minutes to minimize messenger's startup time
var interval time.Duration = 5 * time.Minute
for {
select {
case <-time.After(interval):
// Set the regular interval after the first execution
interval = 1 * time.Hour
err := cleanupFunc()
if err != nil {
logger.Error("failed to cleanup", zap.Error(err))
}
case <-m.quit:
return
}
}
}()
}
func (m *Messenger) startMessageSegmentsCleanupLoop() {
m.startCleanupLoop("messageSegmentsCleanupLoop", m.sender.CleanupSegments)
}
func (m *Messenger) startHashRatchetEncryptedMessagesCleanupLoop() {
m.startCleanupLoop("hashRatchetEncryptedMessagesCleanupLoop", m.sender.CleanupHashRatchetEncryptedMessages)
}
func (m *Messenger) FindStatusMessageIDForBridgeMessageID(bridgeMessageID string) (string, error) {
return m.persistence.FindStatusMessageIDForBridgeMessageID(bridgeMessageID)
}