status-go/protocol/messenger.go

6168 lines
184 KiB
Go
Raw Normal View History

package protocol
2019-07-17 22:25:42 +00:00
import (
2020-12-10 10:12:51 +00:00
"bytes"
2019-07-17 22:25:42 +00:00
"context"
"crypto/ecdsa"
2020-07-14 14:07:19 +00:00
"database/sql"
2020-08-07 13:49:37 +00:00
"encoding/hex"
"encoding/json"
"fmt"
2020-12-15 14:43:41 +00:00
"math"
"math/rand"
"os"
"reflect"
"strings"
"sync"
2019-07-17 22:25:42 +00:00
"time"
"github.com/davecgh/go-spew/spew"
"github.com/golang/protobuf/proto"
"github.com/google/uuid"
2022-08-24 15:14:09 +00:00
"github.com/pkg/errors"
"go.uber.org/zap"
2021-11-26 12:30:35 +00:00
gethcommon "github.com/ethereum/go-ethereum/common"
2022-08-24 15:14:09 +00:00
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/event"
2022-01-12 16:02:01 +00:00
"github.com/ethereum/go-ethereum/p2p"
"github.com/status-im/status-go/account"
"github.com/status-im/status-go/appdatabase"
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
"github.com/status-im/status-go/appmetrics"
2021-05-14 10:55:42 +00:00
"github.com/status-im/status-go/connection"
"github.com/status-im/status-go/contracts"
"github.com/status-im/status-go/eth-node/crypto"
"github.com/status-im/status-go/eth-node/types"
2023-02-02 17:59:48 +00:00
"github.com/status-im/status-go/images"
2020-11-25 00:34:32 +00:00
"github.com/status-im/status-go/multiaccounts"
"github.com/status-im/status-go/multiaccounts/accounts"
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
"github.com/status-im/status-go/multiaccounts/settings"
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
"github.com/status-im/status-go/protocol/anonmetrics"
2020-07-22 07:41:40 +00:00
"github.com/status-im/status-go/protocol/common"
"github.com/status-im/status-go/protocol/communities"
"github.com/status-im/status-go/protocol/encryption"
"github.com/status-im/status-go/protocol/encryption/multidevice"
"github.com/status-im/status-go/protocol/encryption/sharedsecret"
2021-01-11 10:32:51 +00:00
"github.com/status-im/status-go/protocol/ens"
2022-08-02 12:56:26 +00:00
"github.com/status-im/status-go/protocol/identity"
"github.com/status-im/status-go/protocol/identity/alias"
"github.com/status-im/status-go/protocol/identity/identicon"
"github.com/status-im/status-go/protocol/protobuf"
2020-07-22 07:41:40 +00:00
"github.com/status-im/status-go/protocol/pushnotificationclient"
"github.com/status-im/status-go/protocol/pushnotificationserver"
"github.com/status-im/status-go/protocol/requests"
"github.com/status-im/status-go/protocol/sqlite"
"github.com/status-im/status-go/protocol/transport"
"github.com/status-im/status-go/protocol/verification"
2022-02-23 14:34:16 +00:00
"github.com/status-im/status-go/server"
"github.com/status-im/status-go/services/browsers"
2022-01-12 16:02:01 +00:00
"github.com/status-im/status-go/services/ext/mailservers"
mailserversDB "github.com/status-im/status-go/services/mailservers"
"github.com/status-im/status-go/services/wallet"
"github.com/status-im/status-go/services/wallet/token"
"github.com/status-im/status-go/telemetry"
2019-07-17 22:25:42 +00:00
)
2022-09-29 11:50:23 +00:00
const maxChunkSizeMessages = 1000
const maxChunkSizeBytes = 1500000
2022-09-28 11:42:17 +00:00
// todo: kozieiev: get rid of wakutransp word
type chatContext string
const (
PubKeyStringLength = 132
transactionSentTxt = "Transaction sent"
publicChat chatContext = "public-chat"
privateChat chatContext = "private-chat"
)
const messageResendMinDelay = 30
const messageResendMaxCount = 3
2020-12-15 14:43:41 +00:00
var communityAdvertiseIntervalSecond int64 = 60 * 60
// messageCacheIntervalMs is how long we should keep processed messages in the cache, in ms
var messageCacheIntervalMs uint64 = 1000 * 60 * 60 * 48
2019-07-17 22:25:42 +00:00
// Messenger is a entity managing chats and messages.
// It acts as a bridge between the application and encryption
// layers.
// It needs to expose an interface to manage installations
// because installations are managed by the user.
// Similarly, it needs to expose an interface to manage
// mailservers because they can also be managed by the user.
type Messenger struct {
node types.Node
server *p2p.Server
peerStore *mailservers.PeerStore
config *config
identity *ecdsa.PrivateKey
persistence *sqlitePersistence
transport *transport.Transport
encryptor *encryption.Protocol
sender *common.MessageSender
ensVerifier *ens.Verifier
anonMetricsClient *anonmetrics.Client
anonMetricsServer *anonmetrics.Server
pushNotificationClient *pushnotificationclient.Client
pushNotificationServer *pushnotificationserver.Server
communitiesManager *communities.Manager
accountsManager *account.GethManager
2023-04-07 08:47:38 +00:00
mentionsManager *MentionManager
logger *zap.Logger
outputCSV bool
csvFile *os.File
verifyTransactionClient EthClient
featureFlags common.FeatureFlags
shutdownTasks []func() error
shouldPublishContactCode bool
systemMessagesTranslations *systemMessageTranslationsMap
allChats *chatMap
allContacts *contactMap
allInstallations *installationMap
modifiedInstallations *stringBoolMap
installationID string
mailserverCycle mailserverCycle
database *sql.DB
multiAccounts *multiaccounts.Database
mailservers *mailserversDB.Database
settings *accounts.Database
account *multiaccounts.Account
mailserversDatabase *mailserversDB.Database
browserDatabase *browsers.Database
httpServer *server.MediaServer
quit chan struct{}
ctx context.Context
cancel context.CancelFunc
2022-09-29 11:50:23 +00:00
importingCommunities map[string]bool
requestedCommunitiesLock sync.RWMutex
requestedCommunities map[string]*transport.Filter
requestedContactsLock sync.RWMutex
requestedContacts map[string]*transport.Filter
connectionState connection.State
telemetryClient *telemetry.Client
contractMaker *contracts.ContractMaker
downloadHistoryArchiveTasksWaitGroup sync.WaitGroup
verificationDatabase *verification.Persistence
savedAddressesManager *wallet.SavedAddressesManager
2021-03-29 15:41:30 +00:00
// TODO(samyoul) Determine if/how the remaining usage of this mutex can be removed
mutex sync.Mutex
mailPeersMutex sync.Mutex
handleMessagesMutex sync.Mutex
handleImportMessagesMutex sync.Mutex
// flag to disable checking #hasPairedDevices
localPairing bool
2019-07-17 22:25:42 +00:00
}
2022-01-12 16:02:01 +00:00
type connStatus int
const (
disconnected connStatus = iota + 1
connecting
connected
)
type peerStatus struct {
status connStatus
canConnectAfter time.Time
lastConnectionAttempt time.Time
mailserver mailserversDB.Mailserver
2022-01-12 16:02:01 +00:00
}
type mailserverCycle struct {
sync.RWMutex
activeMailserver *mailserversDB.Mailserver
peers map[string]peerStatus
events chan *p2p.PeerEvent
subscription event.Subscription
availabilitySubscriptions []chan struct{}
2022-01-12 16:02:01 +00:00
}
2019-07-30 06:14:13 +00:00
type dbConfig struct {
dbPath string
dbKey string
dbKDFIterations int
2019-07-30 06:14:13 +00:00
}
2020-12-15 14:43:41 +00:00
type EnvelopeEventsInterceptor struct {
EnvelopeEventsHandler transport.EnvelopeEventsHandler
Messenger *Messenger
}
// EnvelopeSent triggered when envelope delivered at least to 1 peer.
func (interceptor EnvelopeEventsInterceptor) EnvelopeSent(identifiers [][]byte) {
if interceptor.Messenger != nil {
var ids []string
for _, identifierBytes := range identifiers {
ids = append(ids, types.EncodeHex(identifierBytes))
}
err := interceptor.Messenger.processSentMessages(ids)
if err != nil {
interceptor.Messenger.logger.Info("Messenger failed to process sent messages", zap.Error(err))
} else {
interceptor.EnvelopeEventsHandler.EnvelopeSent(identifiers)
2020-12-15 14:43:41 +00:00
}
} else {
// NOTE(rasom): In case if interceptor.Messenger is not nil and
// some error occurred on processing sent message we don't want
// to send envelop.sent signal to the client, thus `else` cause
// is necessary.
interceptor.EnvelopeEventsHandler.EnvelopeSent(identifiers)
2020-12-15 14:43:41 +00:00
}
}
// EnvelopeExpired triggered when envelope is expired but wasn't delivered to any peer.
func (interceptor EnvelopeEventsInterceptor) EnvelopeExpired(identifiers [][]byte, err error) {
//we don't track expired events in Messenger, so just redirect to handler
interceptor.EnvelopeEventsHandler.EnvelopeExpired(identifiers, err)
}
// MailServerRequestCompleted triggered when the mailserver sends a message to notify that the request has been completed
func (interceptor EnvelopeEventsInterceptor) MailServerRequestCompleted(requestID types.Hash, lastEnvelopeHash types.Hash, cursor []byte, err error) {
//we don't track mailserver requests in Messenger, so just redirect to handler
interceptor.EnvelopeEventsHandler.MailServerRequestCompleted(requestID, lastEnvelopeHash, cursor, err)
}
// MailServerRequestExpired triggered when the mailserver request expires
func (interceptor EnvelopeEventsInterceptor) MailServerRequestExpired(hash types.Hash) {
//we don't track mailserver requests in Messenger, so just redirect to handler
interceptor.EnvelopeEventsHandler.MailServerRequestExpired(hash)
}
2019-07-17 22:25:42 +00:00
func NewMessenger(
nodeName string,
2019-07-17 22:25:42 +00:00
identity *ecdsa.PrivateKey,
node types.Node,
2019-07-17 22:25:42 +00:00
installationID string,
2022-01-12 16:02:01 +00:00
peerStore *mailservers.PeerStore,
accountsManager *account.GethManager,
2019-07-17 22:25:42 +00:00
opts ...Option,
) (*Messenger, error) {
var messenger *Messenger
c := config{}
for _, opt := range opts {
if err := opt(&c); err != nil {
return nil, err
}
}
logger := c.logger
if c.logger == nil {
var err error
if logger, err = zap.NewDevelopment(); err != nil {
return nil, errors.Wrap(err, "failed to create a logger")
}
}
if c.systemMessagesTranslations == nil {
c.systemMessagesTranslations = defaultSystemMessagesTranslations
}
2019-07-30 06:14:13 +00:00
// Configure the database.
database := c.db
2019-09-26 09:26:33 +00:00
if c.db == nil && c.dbConfig == (dbConfig{}) {
return nil, errors.New("database instance or database path needs to be provided")
}
if c.db == nil {
logger.Info("opening a database", zap.String("dbPath", c.dbConfig.dbPath), zap.Int("KDFIterations", c.dbConfig.dbKDFIterations))
2019-09-26 09:26:33 +00:00
var err error
database, err = appdatabase.InitializeDB(c.dbConfig.dbPath, c.dbConfig.dbKey, c.dbConfig.dbKDFIterations)
2019-07-30 06:14:13 +00:00
if err != nil {
return nil, errors.Wrap(err, "failed to initialize database from the db config")
}
}
2019-09-26 09:26:33 +00:00
// Apply any post database creation changes to the database
c.db = database
for _, opt := range c.afterDbCreatedHooks {
if err := opt(&c); err != nil {
return nil, err
}
}
2019-07-30 06:14:13 +00:00
// Apply migrations for all components.
err := sqlite.Migrate(database)
2019-07-30 06:14:13 +00:00
if err != nil {
return nil, errors.Wrap(err, "failed to apply migrations")
2019-07-17 22:25:42 +00:00
}
2019-07-30 06:14:13 +00:00
// Initialize transport layer.
var transp *transport.Transport
if waku, err := node.GetWaku(nil); err == nil && waku != nil {
transp, err = transport.NewTransport(
waku,
identity,
database,
"waku_keys",
nil,
c.envelopesMonitorConfig,
logger,
)
if err != nil {
return nil, errors.Wrap(err, "failed to create Transport")
}
} else {
logger.Info("failed to find Waku service; trying WakuV2", zap.Error(err))
wakuV2, err := node.GetWakuV2(nil)
if err != nil || wakuV2 == nil {
return nil, errors.Wrap(err, "failed to find Whisper and Waku V1/V2 services")
}
transp, err = transport.NewTransport(
wakuV2,
identity,
database,
"wakuv2_keys",
nil,
c.envelopesMonitorConfig,
logger,
)
if err != nil {
return nil, errors.Wrap(err, "failed to create Transport")
}
2019-07-17 22:25:42 +00:00
}
2019-07-30 06:14:13 +00:00
// Initialize encryption layer.
encryptionProtocol := encryption.New(
database,
2019-07-17 22:25:42 +00:00
installationID,
logger,
)
sender, err := common.NewMessageSender(
2019-09-02 09:29:06 +00:00
identity,
database,
2019-09-02 09:29:06 +00:00
encryptionProtocol,
transp,
2019-08-29 06:33:46 +00:00
logger,
2019-09-02 09:29:06 +00:00
c.featureFlags,
)
if err != nil {
return nil, errors.Wrap(err, "failed to create messageSender")
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
// Initialise anon metrics client
var anonMetricsClient *anonmetrics.Client
if c.anonMetricsClientConfig != nil &&
c.anonMetricsClientConfig.ShouldSend &&
c.anonMetricsClientConfig.Active == anonmetrics.ActiveClientPhrase {
anonMetricsClient = anonmetrics.NewClient(sender)
anonMetricsClient.Config = c.anonMetricsClientConfig
anonMetricsClient.Identity = identity
anonMetricsClient.DB = appmetrics.NewDB(database)
anonMetricsClient.Logger = logger
}
// Initialise anon metrics server
var anonMetricsServer *anonmetrics.Server
if c.anonMetricsServerConfig != nil &&
c.anonMetricsServerConfig.Enabled &&
c.anonMetricsServerConfig.Active == anonmetrics.ActiveServerPhrase {
server, err := anonmetrics.NewServer(c.anonMetricsServerConfig.PostgresURI)
if err != nil {
return nil, errors.Wrap(err, "failed to create anonmetrics.Server")
}
anonMetricsServer = server
anonMetricsServer.Config = c.anonMetricsServerConfig
anonMetricsServer.Logger = logger
}
var telemetryClient *telemetry.Client
if c.telemetryServerURL != "" {
telemetryClient = telemetry.NewClient(logger, c.telemetryServerURL, c.account.KeyUID, nodeName)
}
2020-07-22 07:41:40 +00:00
// Initialize push notification server
var pushNotificationServer *pushnotificationserver.Server
if c.pushNotificationServerConfig != nil && c.pushNotificationServerConfig.Enabled {
2020-07-14 14:07:19 +00:00
c.pushNotificationServerConfig.Identity = identity
2020-07-22 07:41:40 +00:00
pushNotificationServerPersistence := pushnotificationserver.NewSQLitePersistence(database)
pushNotificationServer = pushnotificationserver.New(c.pushNotificationServerConfig, pushNotificationServerPersistence, sender)
}
2020-07-22 07:41:40 +00:00
// Initialize push notification client
pushNotificationClientPersistence := pushnotificationclient.NewPersistence(database)
2020-07-15 12:25:01 +00:00
pushNotificationClientConfig := c.pushNotificationClientConfig
if pushNotificationClientConfig == nil {
2020-07-22 07:41:40 +00:00
pushNotificationClientConfig = &pushnotificationclient.Config{}
}
2020-07-15 12:25:01 +00:00
2022-03-28 10:10:40 +00:00
sqlitePersistence := newSQLitePersistence(database)
2020-07-22 07:41:40 +00:00
// Overriding until we handle different identities
2020-07-15 12:25:01 +00:00
pushNotificationClientConfig.Identity = identity
pushNotificationClientConfig.Logger = logger
pushNotificationClientConfig.InstallationID = installationID
pushNotificationClient := pushnotificationclient.New(pushNotificationClientPersistence, pushNotificationClientConfig, sender, sqlitePersistence)
2021-01-11 10:32:51 +00:00
ensVerifier := ens.New(node, logger, transp, database, c.verifyENSURL, c.verifyENSContractAddress)
feat: add verified wallet accounts to community requests This commit extends the `CommunityRequestToJoin` with `RevealedAddresses` which represent wallet addresses and signatures provided by the sender, to proof a community owner ownership of those wallet addresses. **Note: This only works with keystore files maanged by status-go** At high level, the follwing happens: 1. User instructs Status to send a request to join to a community. By adding a password hash to the instruction, Status will try to unlock the users keystore and verify each wallet account. 2. For every verified wallet account, a signature is created for the following payload, using each wallet's private key ``` keccak256(chatkey + communityID + requestToJoinID) ``` A map of walletAddress->signature is then attached to the community request to join, which will be sent to the community owner 3. The owner node receives the request, and if the community requires users to hold tokens to become a member, it will check and verify whether the given wallet addresses are indeed owned by the sender. If any signature provided by the request cannot be recovered, the request is immediately declined by the owner. 4. The verified addresses are then added to the owner node's database such that, once the request should be accepted, the addresses can be used to check on chain whether they own the necessary funds to fulfill the community's permissions The checking of required funds is **not** part of this commit. It will be added in a follow-up commit.
2023-03-17 09:19:40 +00:00
managerOptions := []communities.ManagerOption{
communities.WithAccountManager(accountsManager),
}
if c.rpcClient != nil {
tokenManager := token.NewTokenManager(database, c.rpcClient, c.rpcClient.NetworkManager)
managerOptions = append(managerOptions, communities.WithTokenManager(tokenManager))
}
feat: add verified wallet accounts to community requests This commit extends the `CommunityRequestToJoin` with `RevealedAddresses` which represent wallet addresses and signatures provided by the sender, to proof a community owner ownership of those wallet addresses. **Note: This only works with keystore files maanged by status-go** At high level, the follwing happens: 1. User instructs Status to send a request to join to a community. By adding a password hash to the instruction, Status will try to unlock the users keystore and verify each wallet account. 2. For every verified wallet account, a signature is created for the following payload, using each wallet's private key ``` keccak256(chatkey + communityID + requestToJoinID) ``` A map of walletAddress->signature is then attached to the community request to join, which will be sent to the community owner 3. The owner node receives the request, and if the community requires users to hold tokens to become a member, it will check and verify whether the given wallet addresses are indeed owned by the sender. If any signature provided by the request cannot be recovered, the request is immediately declined by the owner. 4. The verified addresses are then added to the owner node's database such that, once the request should be accepted, the addresses can be used to check on chain whether they own the necessary funds to fulfill the community's permissions The checking of required funds is **not** part of this commit. It will be added in a follow-up commit.
2023-03-17 09:19:40 +00:00
if c.walletConfig != nil {
managerOptions = append(managerOptions, communities.WithWalletConfig(c.walletConfig))
}
feat: add verified wallet accounts to community requests This commit extends the `CommunityRequestToJoin` with `RevealedAddresses` which represent wallet addresses and signatures provided by the sender, to proof a community owner ownership of those wallet addresses. **Note: This only works with keystore files maanged by status-go** At high level, the follwing happens: 1. User instructs Status to send a request to join to a community. By adding a password hash to the instruction, Status will try to unlock the users keystore and verify each wallet account. 2. For every verified wallet account, a signature is created for the following payload, using each wallet's private key ``` keccak256(chatkey + communityID + requestToJoinID) ``` A map of walletAddress->signature is then attached to the community request to join, which will be sent to the community owner 3. The owner node receives the request, and if the community requires users to hold tokens to become a member, it will check and verify whether the given wallet addresses are indeed owned by the sender. If any signature provided by the request cannot be recovered, the request is immediately declined by the owner. 4. The verified addresses are then added to the owner node's database such that, once the request should be accepted, the addresses can be used to check on chain whether they own the necessary funds to fulfill the community's permissions The checking of required funds is **not** part of this commit. It will be added in a follow-up commit.
2023-03-17 09:19:40 +00:00
communitiesManager, err := communities.NewManager(identity, database, encryptionProtocol, logger, ensVerifier, transp, c.torrentConfig, managerOptions...)
if err != nil {
return nil, err
}
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
settings, err := accounts.NewDB(database)
if err != nil {
return nil, err
}
2022-01-12 16:02:01 +00:00
mailservers := mailserversDB.NewDB(database)
savedAddressesManager := wallet.NewSavedAddressesManager(c.db)
ctx, cancel := context.WithCancel(context.Background())
2019-07-17 22:25:42 +00:00
messenger = &Messenger{
2020-07-31 09:46:38 +00:00
config: &c,
node: node,
2019-07-17 22:25:42 +00:00
identity: identity,
persistence: sqlitePersistence,
transport: transp,
2019-07-17 22:25:42 +00:00
encryptor: encryptionProtocol,
sender: sender,
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
anonMetricsClient: anonMetricsClient,
anonMetricsServer: anonMetricsServer,
telemetryClient: telemetryClient,
pushNotificationClient: pushNotificationClient,
2020-07-03 10:08:47 +00:00
pushNotificationServer: pushNotificationServer,
communitiesManager: communitiesManager,
accountsManager: accountsManager,
2021-01-11 10:32:51 +00:00
ensVerifier: ensVerifier,
2019-07-17 22:25:42 +00:00
featureFlags: c.featureFlags,
systemMessagesTranslations: c.systemMessagesTranslations,
2021-03-29 15:41:30 +00:00
allChats: new(chatMap),
allContacts: new(contactMap),
allInstallations: new(installationMap),
installationID: installationID,
2021-03-29 15:41:30 +00:00
modifiedInstallations: new(stringBoolMap),
verifyTransactionClient: c.verifyTransactionClient,
2020-07-14 14:07:19 +00:00
database: database,
multiAccounts: c.multiAccount,
settings: settings,
2022-01-12 16:02:01 +00:00
peerStore: peerStore,
verificationDatabase: verification.NewPersistence(database),
2022-01-12 16:02:01 +00:00
mailservers: mailservers,
mailserverCycle: mailserverCycle{
peers: make(map[string]peerStatus),
availabilitySubscriptions: make([]chan struct{}, 0),
2022-01-12 16:02:01 +00:00
},
mailserversDatabase: c.mailserversDatabase,
account: c.account,
quit: make(chan struct{}),
ctx: ctx,
cancel: cancel,
requestedCommunitiesLock: sync.RWMutex{},
requestedCommunities: make(map[string]*transport.Filter),
requestedContactsLock: sync.RWMutex{},
requestedContacts: make(map[string]*transport.Filter),
2022-09-29 11:50:23 +00:00
importingCommunities: make(map[string]bool),
browserDatabase: c.browserDatabase,
httpServer: c.httpServer,
contractMaker: &contracts.ContractMaker{
RPCClient: c.rpcClient,
},
2019-07-17 22:25:42 +00:00
shutdownTasks: []func() error{
2021-01-11 10:32:51 +00:00
ensVerifier.Stop,
pushNotificationClient.Stop,
communitiesManager.Stop,
2020-07-31 09:08:09 +00:00
encryptionProtocol.Stop,
transp.ResetFilters,
transp.Stop,
func() error { sender.Stop(); return nil },
2019-07-17 22:25:42 +00:00
// Currently this often fails, seems like it's safe to ignore them
// https://github.com/uber-go/zap/issues/328
func() error { _ = logger.Sync; return nil },
database.Close,
2019-07-17 22:25:42 +00:00
},
logger: logger,
savedAddressesManager: savedAddressesManager,
2019-07-17 22:25:42 +00:00
}
2023-04-07 08:47:38 +00:00
messenger.mentionsManager = NewMentionManager(messenger)
2019-07-17 22:25:42 +00:00
if c.outputMessagesCSV {
messenger.outputCSV = c.outputMessagesCSV
csvFile, err := os.Create("messages-" + fmt.Sprint(time.Now().Unix()) + ".csv")
if err != nil {
return nil, err
}
_, err = csvFile.Write([]byte("timestamp\tmessageID\tfrom\ttopic\tchatID\tmessageType\tmessage\n"))
if err != nil {
return nil, err
}
messenger.csvFile = csvFile
messenger.shutdownTasks = append(messenger.shutdownTasks, csvFile.Close)
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
if anonMetricsClient != nil {
messenger.shutdownTasks = append(messenger.shutdownTasks, anonMetricsClient.Stop)
}
if anonMetricsServer != nil {
messenger.shutdownTasks = append(messenger.shutdownTasks, anonMetricsServer.Stop)
}
2020-12-15 14:43:41 +00:00
if c.envelopesMonitorConfig != nil {
interceptor := EnvelopeEventsInterceptor{c.envelopesMonitorConfig.EnvelopeEventsHandler, messenger}
err := messenger.transport.SetEnvelopeEventsHandler(interceptor)
if err != nil {
logger.Info("Unable to set envelopes event handler", zap.Error(err))
}
}
2019-07-17 22:25:42 +00:00
return messenger, nil
}
2022-01-12 16:02:01 +00:00
func (m *Messenger) SetP2PServer(server *p2p.Server) {
m.server = server
}
2020-12-15 14:43:41 +00:00
func (m *Messenger) processSentMessages(ids []string) error {
if m.connectionState.Offline {
return errors.New("Can't mark message as sent while offline")
}
2020-12-15 14:43:41 +00:00
for _, id := range ids {
rawMessage, err := m.persistence.RawMessageByID(id)
if err != nil {
return errors.Wrapf(err, "Can't get raw message with id %v", id)
}
rawMessage.Sent = true
err = m.persistence.SaveRawMessage(rawMessage)
if err != nil {
return errors.Wrapf(err, "Can't save raw message marked as sent")
}
err = m.UpdateMessageOutgoingStatus(id, common.OutgoingStatusSent)
if err != nil {
return err
}
2020-12-15 14:43:41 +00:00
}
return nil
}
func shouldResendMessage(message *common.RawMessage, t common.TimeSource) (bool, error) {
if !(message.MessageType == protobuf.ApplicationMetadataMessage_EMOJI_REACTION ||
message.MessageType == protobuf.ApplicationMetadataMessage_CHAT_MESSAGE) {
return false, errors.Errorf("Should resend only specific types of messages, can't resend %v", message.MessageType)
2020-12-15 14:43:41 +00:00
}
if message.Sent {
return false, errors.New("Should resend only non-sent messages")
}
if message.SendCount > messageResendMaxCount {
2020-12-15 14:43:41 +00:00
return false, nil
}
//exponential backoff depends on how many attempts to send message already made
backoff := uint64(math.Pow(2, float64(message.SendCount-1))) * messageResendMinDelay * uint64(time.Second.Milliseconds())
2020-12-15 14:43:41 +00:00
backoffElapsed := t.GetCurrentTime() > (message.LastSent + backoff)
return backoffElapsed, nil
}
func (m *Messenger) resendExpiredMessages() error {
if m.connectionState.Offline {
return errors.New("offline")
}
ids, err := m.persistence.ExpiredMessagesIDs(messageResendMaxCount)
2020-12-15 14:43:41 +00:00
if err != nil {
return errors.Wrapf(err, "Can't get expired reactions from db")
}
for _, id := range ids {
rawMessage, err := m.persistence.RawMessageByID(id)
if err != nil {
return errors.Wrapf(err, "Can't get raw message with id %v", id)
}
chat, ok := m.allChats.Load(rawMessage.LocalChatID)
if !ok {
return ErrChatNotFound
}
if !(chat.Public() || chat.CommunityChat()) {
return errors.New("Only public chats and community chats messages are resent")
}
ok, err = shouldResendMessage(rawMessage, m.getTimesource())
if err != nil {
return err
}
if ok {
2020-12-15 14:43:41 +00:00
err = m.persistence.SaveRawMessage(rawMessage)
if err != nil {
return errors.Wrapf(err, "Can't save raw message marked as non-expired")
}
err = m.reSendRawMessage(context.Background(), rawMessage.ID)
if err != nil {
return errors.Wrapf(err, "Can't resend expired message with id %v", rawMessage.ID)
}
}
}
return nil
}
func (m *Messenger) ToForeground() {
2022-02-23 14:34:16 +00:00
if m.httpServer != nil {
m.httpServer.ToForeground()
}
}
func (m *Messenger) ToBackground() {
2022-02-23 14:34:16 +00:00
if m.httpServer != nil {
m.httpServer.ToBackground()
}
}
func (m *Messenger) Start() (*MessengerResponse, error) {
2020-07-14 14:07:19 +00:00
m.logger.Info("starting messenger", zap.String("identity", types.EncodeHex(crypto.FromECDSAPub(&m.identity.PublicKey))))
// Start push notification server
if m.pushNotificationServer != nil {
if err := m.pushNotificationServer.Start(); err != nil {
return nil, err
}
}
2020-07-22 07:41:40 +00:00
// Start push notification client
if m.pushNotificationClient != nil {
2020-08-18 15:07:48 +00:00
m.handlePushNotificationClientRegistrations(m.pushNotificationClient.SubscribeToRegistrations())
if err := m.pushNotificationClient.Start(); err != nil {
return nil, err
}
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
// Start anonymous metrics client
if m.anonMetricsClient != nil {
if err := m.anonMetricsClient.Start(); err != nil {
return nil, err
}
}
2021-01-11 10:32:51 +00:00
ensSubscription := m.ensVerifier.Subscribe()
// Subscrbe
if err := m.ensVerifier.Start(); err != nil {
return nil, err
}
if err := m.communitiesManager.Start(); err != nil {
return nil, err
}
2020-07-31 12:22:05 +00:00
// set shared secret handles
m.sender.SetHandleSharedSecrets(m.handleSharedSecrets)
2020-07-31 12:22:05 +00:00
2020-07-31 09:08:09 +00:00
subscriptions, err := m.encryptor.Start(m.identity)
if err != nil {
return nil, err
2020-07-31 09:08:09 +00:00
}
2020-07-31 12:22:05 +00:00
// handle stored shared secrets
err = m.handleSharedSecrets(subscriptions.SharedSecrets)
if err != nil {
return nil, err
2020-07-31 12:22:05 +00:00
}
2020-07-31 09:08:09 +00:00
m.handleEncryptionLayerSubscriptions(subscriptions)
m.handleCommunitiesSubscription(m.communitiesManager.Subscribe())
m.handleCommunitiesHistoryArchivesSubscription(m.communitiesManager.Subscribe())
m.updateCommunitiesActiveMembersPeriodically()
m.handleConnectionChange(m.online())
2021-01-11 10:32:51 +00:00
m.handleENSVerificationSubscription(ensSubscription)
m.watchConnectionChange()
m.watchExpiredMessages()
m.watchIdentityImageChanges()
m.broadcastLatestUserStatus()
m.timeoutAutomaticStatusUpdates()
m.startBackupLoop()
err = m.startAutoMessageLoop()
if err != nil {
return nil, err
}
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
m.startSyncSettingsLoop()
if err := m.cleanTopics(); err != nil {
return nil, err
}
2021-03-25 15:15:22 +00:00
response := &MessengerResponse{}
mailservers, err := m.allMailservers()
if err != nil {
return nil, err
}
response.Mailservers = mailservers
err = m.StartMailserverCycle()
if err != nil {
return nil, err
2022-01-12 16:02:01 +00:00
}
if m.torrentClientReady() {
adminCommunities, err := m.communitiesManager.Created()
if err == nil && len(adminCommunities) > 0 {
available := m.SubscribeMailserverAvailable()
go func() {
<-available
m.InitHistoryArchiveTasks(adminCommunities)
}()
for _, c := range adminCommunities {
if c.Joined() && c.HasTokenPermissions() {
go m.communitiesManager.CheckMemberPermissionsPeriodically(c.ID())
}
}
}
}
joinedCommunities, err := m.communitiesManager.Joined()
if err != nil {
return nil, err
}
for _, joinedCommunity := range joinedCommunities {
// resume importing message history archives in case
// imports have been interrupted previously
err := m.resumeHistoryArchivesImport(joinedCommunity.ID())
if err != nil {
return nil, err
}
}
if m.httpServer != nil {
err = m.httpServer.Start()
if err != nil {
return nil, err
}
}
err = m.GarbageCollectRemovedBookmarks()
if err != nil {
return nil, err
}
err = m.garbageCollectRemovedSavedAddresses()
if err != nil {
return nil, err
}
return response, nil
}
2022-02-10 22:55:03 +00:00
func (m *Messenger) IdentityPublicKey() *ecdsa.PublicKey {
return &m.identity.PublicKey
}
func (m *Messenger) IdentityPublicKeyCompressed() []byte {
return crypto.CompressPubkey(m.IdentityPublicKey())
}
// cleanTopics remove any topic that does not have a Listen flag set
func (m *Messenger) cleanTopics() error {
if m.mailserversDatabase == nil {
return nil
}
var filters []*transport.Filter
for _, f := range m.transport.Filters() {
if f.Listen && !f.Ephemeral {
filters = append(filters, f)
}
}
m.logger.Debug("keeping topics", zap.Any("filters", filters))
return m.mailserversDatabase.SetTopics(filters)
2020-07-31 09:08:09 +00:00
}
// handle connection change is called each time we go from offline/online or viceversa
func (m *Messenger) handleConnectionChange(online bool) {
if online {
if m.pushNotificationClient != nil {
m.pushNotificationClient.Online()
}
if m.shouldPublishContactCode {
2021-01-25 13:49:13 +00:00
if err := m.publishContactCode(); err != nil {
m.logger.Error("could not publish on contact code", zap.Error(err))
return
}
m.shouldPublishContactCode = false
}
go func() {
_, err := m.RequestAllHistoricMessagesWithRetries()
if err != nil {
m.logger.Warn("failed to fetch historic messages", zap.Error(err))
}
}()
} else {
if m.pushNotificationClient != nil {
m.pushNotificationClient.Offline()
}
2022-01-12 16:02:01 +00:00
}
2022-01-12 16:02:01 +00:00
2021-01-11 10:32:51 +00:00
m.ensVerifier.SetOnline(online)
}
func (m *Messenger) online() bool {
switch m.transport.WakuVersion() {
case 2:
return m.transport.PeerCount() > 0
default:
return m.node.PeersCount() > 0
}
}
2020-08-18 15:07:48 +00:00
func (m *Messenger) buildContactCodeAdvertisement() (*protobuf.ContactCodeAdvertisement, error) {
if m.pushNotificationClient == nil || !m.pushNotificationClient.Enabled() {
return nil, nil
}
m.logger.Debug("adding push notification info to contact code bundle")
info, err := m.pushNotificationClient.MyPushNotificationQueryInfo()
if err != nil {
return nil, err
}
if len(info) == 0 {
return nil, nil
}
return &protobuf.ContactCodeAdvertisement{
PushNotificationInfo: info,
}, nil
}
2021-01-25 13:49:13 +00:00
// publishContactCode sends a public message wrapped in the encryption
2020-07-31 12:22:05 +00:00
// layer, which will propagate our bundle
2021-01-25 13:49:13 +00:00
func (m *Messenger) publishContactCode() error {
var payload []byte
2020-08-18 15:07:48 +00:00
m.logger.Debug("sending contact code")
contactCodeAdvertisement, err := m.buildContactCodeAdvertisement()
if err != nil {
m.logger.Error("could not build contact code advertisement", zap.Error(err))
}
if contactCodeAdvertisement == nil {
contactCodeAdvertisement = &protobuf.ContactCodeAdvertisement{}
}
2021-02-17 23:14:48 +00:00
2021-01-25 13:49:13 +00:00
err = m.attachChatIdentity(contactCodeAdvertisement)
if err != nil {
return err
}
2021-02-17 23:14:48 +00:00
if contactCodeAdvertisement.ChatIdentity != nil {
m.logger.Debug("attached chat identity", zap.Int("images len", len(contactCodeAdvertisement.ChatIdentity.Images)))
} else {
m.logger.Debug("no attached chat identity")
}
payload, err = proto.Marshal(contactCodeAdvertisement)
if err != nil {
return err
}
2020-07-31 12:22:05 +00:00
contactCodeTopic := transport.ContactCodeTopic(&m.identity.PublicKey)
rawMessage := common.RawMessage{
LocalChatID: contactCodeTopic,
2020-08-18 15:07:48 +00:00
MessageType: protobuf.ApplicationMetadataMessage_CONTACT_CODE_ADVERTISEMENT,
Payload: payload,
2020-07-31 12:22:05 +00:00
}
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
2022-02-17 15:13:10 +00:00
_, err = m.sender.SendPublic(ctx, contactCodeTopic, rawMessage)
2020-07-31 12:22:05 +00:00
if err != nil {
m.logger.Warn("failed to send a contact code", zap.Error(err))
}
2022-02-17 15:13:10 +00:00
joinedCommunities, err := m.communitiesManager.Joined()
if err != nil {
return err
}
for _, community := range joinedCommunities {
rawMessage.LocalChatID = community.MemberUpdateChannelID()
_, err = m.sender.SendPublic(ctx, rawMessage.LocalChatID, rawMessage)
if err != nil {
return err
}
}
2021-02-17 23:14:48 +00:00
m.logger.Debug("contact code sent")
2020-07-31 12:22:05 +00:00
return err
}
// contactCodeAdvertisement attaches a protobuf.ChatIdentity to the given protobuf.ContactCodeAdvertisement,
2020-11-09 15:16:36 +00:00
// if the `shouldPublish` conditions are met
2021-01-25 13:49:13 +00:00
func (m *Messenger) attachChatIdentity(cca *protobuf.ContactCodeAdvertisement) error {
2020-11-09 15:16:36 +00:00
contactCodeTopic := transport.ContactCodeTopic(&m.identity.PublicKey)
shouldPublish, err := m.shouldPublishChatIdentity(contactCodeTopic)
if err != nil {
return err
}
2021-01-25 13:49:13 +00:00
if !shouldPublish {
return nil
}
2021-01-25 13:49:13 +00:00
cca.ChatIdentity, err = m.createChatIdentity(privateChat)
if err != nil {
return err
}
2020-12-10 10:12:51 +00:00
2023-02-02 17:59:48 +00:00
img, err := m.multiAccounts.GetIdentityImage(m.account.KeyUID, images.SmallDimName)
2021-01-25 13:49:13 +00:00
if err != nil {
return err
}
2022-02-17 15:13:10 +00:00
displayName, err := m.settings.DisplayName()
if err != nil {
return err
2021-01-25 13:49:13 +00:00
}
2022-08-05 11:22:35 +00:00
bio, err := m.settings.Bio()
if err != nil {
return err
}
2022-08-02 12:56:26 +00:00
socialLinks, err := m.settings.GetSocialLinks()
if err != nil {
return err
}
2022-08-05 11:22:35 +00:00
identityHash, err := m.getIdentityHash(displayName, bio, img, &socialLinks)
2022-08-02 12:56:26 +00:00
if err != nil {
return err
}
err = m.persistence.SaveWhenChatIdentityLastPublished(contactCodeTopic, identityHash)
2021-01-25 13:49:13 +00:00
if err != nil {
return err
}
return nil
}
2021-10-20 09:14:48 +00:00
// handleStandaloneChatIdentity sends a standalone ChatIdentity message to a public or private channel if the publish criteria is met
2020-11-09 15:16:36 +00:00
func (m *Messenger) handleStandaloneChatIdentity(chat *Chat) error {
2021-10-20 09:45:54 +00:00
if chat.ChatType != ChatTypePublic && chat.ChatType != ChatTypeOneToOne {
2020-11-09 15:16:36 +00:00
return nil
}
shouldPublishChatIdentity, err := m.shouldPublishChatIdentity(chat.ID)
if err != nil {
return err
}
if !shouldPublishChatIdentity {
return nil
}
ci, err := m.createChatIdentity(publicChat)
2020-11-09 15:16:36 +00:00
if err != nil {
return err
}
2020-11-09 15:16:36 +00:00
payload, err := proto.Marshal(ci)
if err != nil {
return err
}
2020-11-09 15:16:36 +00:00
rawMessage := common.RawMessage{
LocalChatID: chat.ID,
MessageType: protobuf.ApplicationMetadataMessage_CHAT_IDENTITY,
Payload: payload,
}
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
2021-10-20 09:14:48 +00:00
if chat.ChatType == ChatTypePublic {
_, err = m.sender.SendPublic(ctx, chat.ID, rawMessage)
if err != nil {
return err
}
} else {
pk, err := chat.PublicKey()
if err != nil {
return err
}
_, err = m.sender.SendPrivate(ctx, pk, &rawMessage)
if err != nil {
return err
}
}
2023-02-02 17:59:48 +00:00
img, err := m.multiAccounts.GetIdentityImage(m.account.KeyUID, images.SmallDimName)
2020-12-10 10:12:51 +00:00
if err != nil {
return err
}
2022-02-17 15:13:10 +00:00
displayName, err := m.settings.DisplayName()
if err != nil {
return err
2020-12-10 10:12:51 +00:00
}
2022-08-05 11:22:35 +00:00
bio, err := m.settings.Bio()
if err != nil {
return err
}
2022-08-02 12:56:26 +00:00
socialLinks, err := m.settings.GetSocialLinks()
if err != nil {
return err
}
2022-08-05 11:22:35 +00:00
identityHash, err := m.getIdentityHash(displayName, bio, img, &socialLinks)
2022-08-02 12:56:26 +00:00
if err != nil {
return err
}
err = m.persistence.SaveWhenChatIdentityLastPublished(chat.ID, identityHash)
2020-11-09 15:16:36 +00:00
if err != nil {
return err
}
return nil
}
2023-02-02 17:59:48 +00:00
func (m *Messenger) getIdentityHash(displayName, bio string, img *images.IdentityImage, socialLinks *identity.SocialLinks) ([]byte, error) {
2022-08-02 12:56:26 +00:00
socialLinksData, err := socialLinks.Serialize()
if err != nil {
return []byte{}, err
}
2022-02-17 15:13:10 +00:00
if img == nil {
2022-08-05 11:22:35 +00:00
return crypto.Keccak256([]byte(displayName), []byte(bio), socialLinksData), nil
2022-02-17 15:13:10 +00:00
}
2022-08-05 11:22:35 +00:00
return crypto.Keccak256(img.Payload, []byte(displayName), []byte(bio), socialLinksData), nil
2022-02-17 15:13:10 +00:00
}
2020-11-09 15:16:36 +00:00
// shouldPublishChatIdentity returns true if the last time the ChatIdentity was attached was more than 24 hours ago
2020-11-24 23:16:19 +00:00
func (m *Messenger) shouldPublishChatIdentity(chatID string) (bool, error) {
2021-01-25 13:49:13 +00:00
if m.account == nil {
return false, nil
}
2020-12-10 08:42:36 +00:00
2022-02-17 15:13:10 +00:00
// Check we have at least one image or a display name
2023-02-02 17:59:48 +00:00
img, err := m.multiAccounts.GetIdentityImage(m.account.KeyUID, images.SmallDimName)
2020-12-10 08:42:36 +00:00
if err != nil {
return false, err
}
2022-02-17 15:13:10 +00:00
displayName, err := m.settings.DisplayName()
if err != nil {
return false, err
}
if img == nil && displayName == "" {
2020-12-10 08:42:36 +00:00
return false, nil
}
2020-12-10 10:12:51 +00:00
lp, hash, err := m.persistence.GetWhenChatIdentityLastPublished(chatID)
2020-11-09 15:16:36 +00:00
if err != nil {
return false, err
}
2022-08-05 11:22:35 +00:00
bio, err := m.settings.Bio()
if err != nil {
return false, err
}
2022-08-02 12:56:26 +00:00
socialLinks, err := m.settings.GetSocialLinks()
if err != nil {
return false, err
}
2022-08-05 11:22:35 +00:00
identityHash, err := m.getIdentityHash(displayName, bio, img, &socialLinks)
2022-08-02 12:56:26 +00:00
if err != nil {
return false, err
}
if !bytes.Equal(hash, identityHash) {
2020-12-10 10:12:51 +00:00
return true, nil
}
2021-02-17 23:14:48 +00:00
// Note: If Alice does not add bob as a contact she will not update her contact code with images
2020-12-16 18:28:34 +00:00
return lp == 0 || time.Now().Unix()-lp > 24*60*60, nil
2020-11-09 15:16:36 +00:00
}
// createChatIdentity creates a context based protobuf.ChatIdentity.
// context 'public-chat' will attach only the 'thumbnail' IdentityImage
// context 'private-chat' will attach all IdentityImage
func (m *Messenger) createChatIdentity(context chatContext) (*protobuf.ChatIdentity, error) {
2020-12-09 14:03:43 +00:00
m.logger.Info(fmt.Sprintf("account keyUID '%s'", m.account.KeyUID))
m.logger.Info(fmt.Sprintf("context '%s'", context))
2022-02-17 15:13:10 +00:00
displayName, err := m.settings.DisplayName()
if err != nil {
return nil, err
}
2022-08-05 11:22:35 +00:00
bio, err := m.settings.Bio()
if err != nil {
return nil, err
}
2022-08-02 12:56:26 +00:00
socialLinks, err := m.settings.GetSocialLinks()
if err != nil {
return nil, err
}
2020-11-09 15:16:36 +00:00
ci := &protobuf.ChatIdentity{
2022-02-17 15:13:10 +00:00
Clock: m.transport.GetCurrentTime(),
EnsName: "", // TODO add ENS name handling to dedicate PR
DisplayName: displayName,
2022-08-05 11:22:35 +00:00
Description: bio,
SocialLinks: socialLinks.ToProtobuf(),
}
2022-02-17 15:13:10 +00:00
err = m.attachIdentityImagesToChatIdentity(context, ci)
if err != nil {
return nil, err
}
return ci, nil
}
// adaptIdentityImageToProtobuf Adapts a images.IdentityImage to protobuf.IdentityImage
2023-02-02 17:59:48 +00:00
func (m *Messenger) adaptIdentityImageToProtobuf(img *images.IdentityImage) *protobuf.IdentityImage {
return &protobuf.IdentityImage{
Payload: img.Payload,
SourceType: protobuf.IdentityImage_RAW_PAYLOAD, // TODO add ENS avatar handling to dedicated PR
2023-02-02 17:59:48 +00:00
ImageType: images.GetProtobufImageType(img.Payload),
}
}
func (m *Messenger) attachIdentityImagesToChatIdentity(context chatContext, ci *protobuf.ChatIdentity) error {
s, err := m.getSettings()
if err != nil {
return err
}
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
if s.ProfilePicturesShowTo == settings.ProfilePicturesShowToNone {
m.logger.Info(fmt.Sprintf("settings.ProfilePicturesShowTo is set to '%d', skipping attaching IdentityImages", s.ProfilePicturesShowTo))
2021-02-09 16:09:50 +00:00
return nil
}
2020-11-09 15:16:36 +00:00
ciis := make(map[string]*protobuf.IdentityImage)
switch context {
case publicChat:
m.logger.Info(fmt.Sprintf("handling %s ChatIdentity", context))
2023-02-02 17:59:48 +00:00
img, err := m.multiAccounts.GetIdentityImage(m.account.KeyUID, images.SmallDimName)
2020-11-09 15:16:36 +00:00
if err != nil {
return err
2020-11-09 15:16:36 +00:00
}
2022-02-17 15:13:10 +00:00
if img == nil {
return nil
}
m.logger.Debug(fmt.Sprintf("%s images.IdentityImage '%s'", context, spew.Sdump(img)))
2023-02-02 17:59:48 +00:00
ciis[images.SmallDimName] = m.adaptIdentityImageToProtobuf(img)
m.logger.Debug(fmt.Sprintf("%s protobuf.IdentityImage '%s'", context, spew.Sdump(ciis)))
2020-11-09 15:16:36 +00:00
ci.Images = ciis
case privateChat:
m.logger.Info(fmt.Sprintf("handling %s ChatIdentity", context))
2020-12-09 14:03:43 +00:00
imgs, err := m.multiAccounts.GetIdentityImages(m.account.KeyUID)
2020-11-09 15:16:36 +00:00
if err != nil {
return err
2020-11-09 15:16:36 +00:00
}
m.logger.Debug(fmt.Sprintf("%s images.IdentityImage '%s'", context, spew.Sdump(imgs)))
2020-11-09 15:16:36 +00:00
for _, img := range imgs {
ciis[img.Name] = m.adaptIdentityImageToProtobuf(img)
}
m.logger.Debug(fmt.Sprintf("%s protobuf.IdentityImage '%s'", context, spew.Sdump(ciis)))
2020-11-09 15:16:36 +00:00
ci.Images = ciis
default:
return fmt.Errorf("unknown ChatIdentity context '%s'", context)
2020-11-09 15:16:36 +00:00
}
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
if s.ProfilePicturesShowTo == settings.ProfilePicturesShowToContactsOnly {
err := EncryptIdentityImagesWithContactPubKeys(ci.Images, m)
2021-02-09 16:09:50 +00:00
if err != nil {
return err
}
}
return nil
}
2020-07-31 12:22:05 +00:00
// handleSharedSecrets process the negotiated secrets received from the encryption layer
func (m *Messenger) handleSharedSecrets(secrets []*sharedsecret.Secret) error {
2020-07-31 09:46:38 +00:00
for _, secret := range secrets {
fSecret := types.NegotiatedSecret{
PublicKey: secret.Identity,
Key: secret.Key,
}
2021-05-14 10:55:42 +00:00
_, err := m.transport.ProcessNegotiatedSecret(fSecret)
2020-07-31 09:46:38 +00:00
if err != nil {
2020-07-31 12:22:05 +00:00
return err
2020-07-31 09:46:38 +00:00
}
}
2020-07-31 12:22:05 +00:00
return nil
2020-07-31 09:46:38 +00:00
}
2020-07-31 12:22:05 +00:00
// handleInstallations adds the installations in the installations map
func (m *Messenger) handleInstallations(installations []*multidevice.Installation) {
2020-07-31 09:08:09 +00:00
for _, installation := range installations {
if installation.Identity == contactIDFromPublicKey(&m.identity.PublicKey) {
2021-03-29 15:41:30 +00:00
if _, ok := m.allInstallations.Load(installation.ID); !ok {
m.allInstallations.Store(installation.ID, installation)
m.modifiedInstallations.Store(installation.ID, true)
2020-07-31 09:08:09 +00:00
}
}
}
}
2020-07-31 09:46:38 +00:00
// handleEncryptionLayerSubscriptions handles events from the encryption layer
2020-07-31 09:08:09 +00:00
func (m *Messenger) handleEncryptionLayerSubscriptions(subscriptions *encryption.Subscriptions) {
go func() {
for {
select {
2020-07-31 12:22:05 +00:00
case <-subscriptions.SendContactCode:
2021-01-25 13:49:13 +00:00
if err := m.publishContactCode(); err != nil {
2020-07-31 12:22:05 +00:00
m.logger.Error("failed to publish contact code", zap.Error(err))
2020-07-31 09:46:38 +00:00
}
// we also piggy-back to clean up cached messages
if err := m.transport.CleanMessagesProcessed(m.getTimesource().GetCurrentTime() - messageCacheIntervalMs); err != nil {
m.logger.Error("failed to clean processed messages", zap.Error(err))
}
2020-07-31 09:46:38 +00:00
2020-07-31 09:08:09 +00:00
case <-subscriptions.Quit:
m.logger.Debug("quitting encryption subscription loop")
return
}
}
}()
}
2021-01-11 10:32:51 +00:00
func (m *Messenger) handleENSVerified(records []*ens.VerificationRecord) {
var contacts []*Contact
for _, record := range records {
m.logger.Info("handling record", zap.Any("record", record))
2021-03-29 15:41:30 +00:00
contact, ok := m.allContacts.Load(record.PublicKey)
2021-01-11 10:32:51 +00:00
if !ok {
m.logger.Info("contact not found")
continue
}
2021-01-11 10:32:51 +00:00
contact.ENSVerified = record.Verified
2022-02-17 15:13:10 +00:00
contact.EnsName = record.Name
2021-01-11 10:32:51 +00:00
contacts = append(contacts, contact)
}
2021-01-11 10:32:51 +00:00
m.logger.Info("handled records", zap.Any("contacts", contacts))
if len(contacts) != 0 {
if err := m.persistence.SaveContacts(contacts); err != nil {
m.logger.Error("failed to save contacts", zap.Error(err))
return
}
}
2021-01-11 10:32:51 +00:00
m.logger.Info("calling on contacts")
if m.config.onContactENSVerified != nil {
m.logger.Info("called on contacts")
response := &MessengerResponse{Contacts: contacts}
m.config.onContactENSVerified(response)
}
2021-01-11 10:32:51 +00:00
}
2021-01-11 10:32:51 +00:00
func (m *Messenger) handleENSVerificationSubscription(c chan []*ens.VerificationRecord) {
go func() {
for {
select {
2021-01-11 10:32:51 +00:00
case records, more := <-c:
if !more {
2021-01-11 10:32:51 +00:00
m.logger.Info("No more records, quitting")
return
}
2021-01-11 10:32:51 +00:00
if len(records) != 0 {
m.logger.Info("handling records", zap.Any("records", records))
m.handleENSVerified(records)
}
case <-m.quit:
return
}
}
}()
}
// watchConnectionChange checks the connection status and call handleConnectionChange when this changes
func (m *Messenger) watchConnectionChange() {
m.logger.Debug("watching connection changes")
state := m.online()
go func() {
for {
select {
case <-time.After(200 * time.Millisecond):
newState := m.online()
if state != newState {
state = newState
m.logger.Debug("connection changed", zap.Bool("online", state))
m.handleConnectionChange(state)
}
case <-m.quit:
return
}
}
2020-12-15 14:43:41 +00:00
}()
}
// watchExpiredMessages regularly checks for expired emojis and invoke their resending
func (m *Messenger) watchExpiredMessages() {
m.logger.Debug("watching expired messages")
2020-12-15 14:43:41 +00:00
go func() {
for {
select {
case <-time.After(time.Second):
if m.online() {
err := m.resendExpiredMessages()
2020-12-15 14:43:41 +00:00
if err != nil {
m.logger.Debug("Error when resending expired emoji reactions", zap.Error(err))
}
}
case <-m.quit:
return
}
}
}()
}
// watchIdentityImageChanges checks for identity images changes and publishes to the contact code when it happens
func (m *Messenger) watchIdentityImageChanges() {
m.logger.Debug("watching identity image changes")
if m.multiAccounts == nil {
return
}
channel := m.multiAccounts.SubscribeToIdentityImageChanges()
go func() {
for {
select {
case <-channel:
err := m.syncProfilePictures(m.dispatchMessage)
2022-03-24 09:35:56 +00:00
if err != nil {
m.logger.Error("failed to sync profile pictures to paired devices", zap.Error(err))
}
err = m.PublishIdentityImage()
if err != nil {
m.logger.Error("failed to publish identity image", zap.Error(err))
}
case <-m.quit:
return
}
}
}()
}
func (m *Messenger) PublishIdentityImage() error {
// Reset last published time for ChatIdentity so new contact can receive data
err := m.resetLastPublishedTimeForChatIdentity()
if err != nil {
m.logger.Error("failed to reset publish time", zap.Error(err))
return err
}
// If not online, we schedule it
if !m.online() {
m.shouldPublishContactCode = true
return nil
}
return m.publishContactCode()
}
2020-08-18 15:07:48 +00:00
// handlePushNotificationClientRegistration handles registration events
func (m *Messenger) handlePushNotificationClientRegistrations(c chan struct{}) {
go func() {
for {
_, more := <-c
if !more {
return
}
2021-01-25 13:49:13 +00:00
if err := m.publishContactCode(); err != nil {
2020-08-18 15:07:48 +00:00
m.logger.Error("failed to publish contact code", zap.Error(err))
}
}
}()
}
2019-08-29 06:33:46 +00:00
// Init analyzes chats and contacts in order to setup filters
// which are responsible for retrieving messages.
func (m *Messenger) Init() error {
// Seed the for color generation
rand.Seed(time.Now().Unix())
2019-08-29 06:33:46 +00:00
logger := m.logger.With(zap.String("site", "Init"))
var (
publicChatIDs []string
publicKeys []*ecdsa.PublicKey
)
2021-01-11 10:32:51 +00:00
joinedCommunities, err := m.communitiesManager.Joined()
if err != nil {
return err
}
2021-01-11 10:32:51 +00:00
for _, org := range joinedCommunities {
// the org advertise on the public topic derived by the pk
publicChatIDs = append(publicChatIDs, org.DefaultFilters()...)
// This is for status-go versions that didn't have `CommunitySettings`
// We need to ensure communities that existed before community settings
// were introduced will have community settings as well
exists, err := m.communitiesManager.CommunitySettingsExist(org.ID())
if err != nil {
logger.Warn("failed to check if community settings exist", zap.Error(err))
continue
}
if !exists {
communitySettings := communities.CommunitySettings{
CommunityID: org.IDString(),
HistoryArchiveSupportEnabled: true,
}
err = m.communitiesManager.SaveCommunitySettings(communitySettings)
if err != nil {
logger.Warn("failed to save community settings", zap.Error(err))
}
continue
}
// In case we do have settings, but the history archive support is disabled
// for this community, we enable it, as this should be the default for all
// non-admin communities
communitySettings, err := m.communitiesManager.GetCommunitySettingsByID(org.ID())
if err != nil {
logger.Warn("failed to fetch community settings", zap.Error(err))
continue
}
if !org.IsAdmin() && !communitySettings.HistoryArchiveSupportEnabled {
communitySettings.HistoryArchiveSupportEnabled = true
err = m.communitiesManager.UpdateCommunitySettings(*communitySettings)
if err != nil {
logger.Warn("failed to update community settings", zap.Error(err))
}
}
}
spectatedCommunities, err := m.communitiesManager.Spectated()
if err != nil {
return err
}
for _, org := range spectatedCommunities {
publicChatIDs = append(publicChatIDs, org.DefaultFilters()...)
}
2021-01-11 10:32:51 +00:00
// Init filters for the communities we are an admin of
var adminCommunitiesPks []*ecdsa.PrivateKey
adminCommunities, err := m.communitiesManager.Created()
if err != nil {
return err
}
for _, c := range adminCommunities {
adminCommunitiesPks = append(adminCommunitiesPks, c.PrivateKey())
}
_, err = m.transport.InitCommunityFilters(adminCommunitiesPks)
if err != nil {
return err
}
2019-08-29 06:33:46 +00:00
// Get chat IDs and public keys from the existing chats.
// TODO: Get only active chats by the query.
chats, err := m.persistence.Chats()
2019-08-29 06:33:46 +00:00
if err != nil {
return err
}
for _, chat := range chats {
if err := chat.Validate(); err != nil {
logger.Warn("failed to validate chat", zap.Error(err))
continue
}
if err = m.initChatFirstMessageTimestamp(chat); err != nil {
logger.Warn("failed to init first message timestamp", zap.Error(err))
continue
}
2021-03-29 15:41:30 +00:00
m.allChats.Store(chat.ID, chat)
2021-08-25 08:59:53 +00:00
2020-11-06 10:57:05 +00:00
if !chat.Active || chat.Timeline() {
2019-08-29 06:33:46 +00:00
continue
}
2020-11-06 10:57:05 +00:00
2019-08-29 06:33:46 +00:00
switch chat.ChatType {
2020-10-20 15:10:28 +00:00
case ChatTypePublic, ChatTypeProfile:
2019-08-29 06:33:46 +00:00
publicChatIDs = append(publicChatIDs, chat.ID)
case ChatTypeCommunityChat:
2022-05-27 09:14:40 +00:00
// TODO not public chat now
publicChatIDs = append(publicChatIDs, chat.ID)
2019-08-29 06:33:46 +00:00
case ChatTypeOneToOne:
pk, err := chat.PublicKey()
if err != nil {
return err
}
publicKeys = append(publicKeys, pk)
2019-08-29 06:33:46 +00:00
case ChatTypePrivateGroupChat:
for _, member := range chat.Members {
publicKey, err := member.PublicKey()
if err != nil {
return errors.Wrapf(err, "invalid public key for member %s in chat %s", member.ID, chat.Name)
}
publicKeys = append(publicKeys, publicKey)
}
default:
return errors.New("invalid chat type")
}
}
2021-03-25 15:15:22 +00:00
// upsert timeline chat
err = m.ensureTimelineChat()
if err != nil {
return err
}
// upsert profile chat
2021-03-25 15:15:22 +00:00
err = m.ensureMyOwnProfileChat()
if err != nil {
return err
}
2019-08-29 06:33:46 +00:00
// Get chat IDs and public keys from the contacts.
contacts, err := m.persistence.Contacts()
2019-08-29 06:33:46 +00:00
if err != nil {
return err
}
for idx, contact := range contacts {
m.allContacts.Store(contact.ID, contacts[idx])
2019-08-29 06:33:46 +00:00
// We only need filters for contacts added by us and not blocked.
2023-01-20 14:28:30 +00:00
if !contact.added() || contact.Blocked {
2019-08-29 06:33:46 +00:00
continue
}
publicKey, err := contact.PublicKey()
if err != nil {
logger.Error("failed to get contact's public key", zap.Error(err))
continue
}
publicKeys = append(publicKeys, publicKey)
}
installations, err := m.encryptor.GetOurInstallations(&m.identity.PublicKey)
if err != nil {
return err
}
for _, installation := range installations {
2021-03-29 15:41:30 +00:00
m.allInstallations.Store(installation.ID, installation)
}
err = m.setInstallationHostname()
if err != nil {
return err
}
2019-09-02 09:29:06 +00:00
_, err = m.transport.InitFilters(publicChatIDs, publicKeys)
2019-08-29 06:33:46 +00:00
return err
}
2019-07-17 22:25:42 +00:00
// Shutdown takes care of ensuring a clean shutdown of Messenger
func (m *Messenger) Shutdown() (err error) {
close(m.quit)
m.cancel()
m.downloadHistoryArchiveTasksWaitGroup.Wait()
for i, task := range m.shutdownTasks {
m.logger.Debug("running shutdown task", zap.Int("n", i))
2019-07-17 22:25:42 +00:00
if tErr := task(); tErr != nil {
m.logger.Info("shutdown task failed", zap.Error(tErr))
2019-07-17 22:25:42 +00:00
if err == nil {
// First error appeared.
err = tErr
} else {
// We return all errors. They will be concatenated in the order of occurrence,
// however, they will also be returned as a single error.
err = errors.Wrap(err, tErr.Error())
}
}
}
return
}
func (m *Messenger) EnableInstallation(id string) error {
2021-03-29 15:41:30 +00:00
installation, ok := m.allInstallations.Load(id)
if !ok {
return errors.New("no installation found")
}
err := m.encryptor.EnableInstallation(&m.identity.PublicKey, id)
if err != nil {
return err
}
installation.Enabled = true
2021-03-29 15:41:30 +00:00
// TODO(samyoul) remove storing of an updated reference pointer?
m.allInstallations.Store(id, installation)
return nil
2019-07-17 22:25:42 +00:00
}
func (m *Messenger) DisableInstallation(id string) error {
2021-03-29 15:41:30 +00:00
installation, ok := m.allInstallations.Load(id)
if !ok {
return errors.New("no installation found")
}
err := m.encryptor.DisableInstallation(&m.identity.PublicKey, id)
if err != nil {
return err
}
installation.Enabled = false
2021-03-29 15:41:30 +00:00
// TODO(samyoul) remove storing of an updated reference pointer?
m.allInstallations.Store(id, installation)
return nil
2019-07-17 22:25:42 +00:00
}
func (m *Messenger) Installations() []*multidevice.Installation {
2021-03-29 15:41:30 +00:00
installations := make([]*multidevice.Installation, m.allInstallations.Len())
var i = 0
2021-03-29 15:41:30 +00:00
m.allInstallations.Range(func(installationID string, installation *multidevice.Installation) (shouldContinue bool) {
installations[i] = installation
i++
2021-03-29 15:41:30 +00:00
return true
})
return installations
2019-07-17 22:25:42 +00:00
}
func (m *Messenger) setInstallationMetadata(id string, data *multidevice.InstallationMetadata) error {
2021-03-29 15:41:30 +00:00
installation, ok := m.allInstallations.Load(id)
if !ok {
return errors.New("no installation found")
}
installation.InstallationMetadata = data
return m.encryptor.SetInstallationMetadata(m.IdentityPublicKey(), id, data)
2019-07-17 22:25:42 +00:00
}
func (m *Messenger) SetInstallationMetadata(id string, data *multidevice.InstallationMetadata) error {
return m.setInstallationMetadata(id, data)
}
func (m *Messenger) SetInstallationName(id string, name string) error {
installation, ok := m.allInstallations.Load(id)
if !ok {
return errors.New("no installation found")
}
installation.InstallationMetadata.Name = name
return m.encryptor.SetInstallationName(m.IdentityPublicKey(), id, name)
}
2019-07-17 22:25:42 +00:00
// NOT IMPLEMENTED
func (m *Messenger) SelectMailserver(id string) error {
return ErrNotImplemented
}
// NOT IMPLEMENTED
func (m *Messenger) AddMailserver(enode string) error {
return ErrNotImplemented
}
// NOT IMPLEMENTED
func (m *Messenger) RemoveMailserver(id string) error {
return ErrNotImplemented
}
// NOT IMPLEMENTED
func (m *Messenger) Mailservers() ([]string, error) {
return nil, ErrNotImplemented
}
func (m *Messenger) initChatFirstMessageTimestamp(chat *Chat) error {
if !chat.CommunityChat() || chat.FirstMessageTimestamp != FirstMessageTimestampUndefined {
return nil
}
oldestMessageTimestamp, hasAnyMessage, err := m.persistence.OldestMessageWhisperTimestampByChatID(chat.ID)
if err != nil {
return err
}
if hasAnyMessage {
if oldestMessageTimestamp == FirstMessageTimestampUndefined {
return nil
}
return m.updateChatFirstMessageTimestamp(chat, whisperToUnixTimestamp(oldestMessageTimestamp), &MessengerResponse{})
}
return m.updateChatFirstMessageTimestamp(chat, FirstMessageTimestampNoMessage, &MessengerResponse{})
}
2021-06-25 08:30:18 +00:00
func (m *Messenger) addMessagesAndChat(chat *Chat, messages []*common.Message, response *MessengerResponse) (*MessengerResponse, error) {
response.AddChat(chat)
response.AddMessages(messages)
err := m.persistence.SaveMessages(response.Messages())
2020-01-17 12:39:09 +00:00
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return response, m.saveChat(chat)
2019-10-14 14:10:48 +00:00
}
2020-08-18 15:07:48 +00:00
func (m *Messenger) reregisterForPushNotifications() error {
m.logger.Info("contact state changed, re-registering for push notification")
if m.pushNotificationClient == nil {
return nil
}
return m.pushNotificationClient.Reregister(m.pushNotificationOptions())
2020-08-18 15:07:48 +00:00
}
2020-12-15 14:43:41 +00:00
// pull a message from the database and send it again
func (m *Messenger) reSendRawMessage(ctx context.Context, messageID string) error {
message, err := m.persistence.RawMessageByID(messageID)
if err != nil {
return err
2019-07-17 22:25:42 +00:00
}
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(message.LocalChatID)
if !ok {
return errors.New("chat not found")
}
2020-07-28 13:22:22 +00:00
_, err = m.dispatchMessage(ctx, common.RawMessage{
2020-07-15 06:31:39 +00:00
LocalChatID: chat.ID,
Payload: message.Payload,
MessageType: message.MessageType,
Recipients: message.Recipients,
ResendAutomatically: message.ResendAutomatically,
2020-12-15 14:43:41 +00:00
SendCount: message.SendCount,
})
return err
}
2020-12-15 14:43:41 +00:00
// ReSendChatMessage pulls a message from the database and sends it again
func (m *Messenger) ReSendChatMessage(ctx context.Context, messageID string) error {
return m.reSendRawMessage(ctx, messageID)
}
func (m *Messenger) SetLocalPairing(localPairing bool) {
m.localPairing = localPairing
}
func (m *Messenger) hasPairedDevices() bool {
logger := m.logger.Named("hasPairedDevices")
if m.localPairing {
return true
}
var count int
2021-03-29 15:41:30 +00:00
m.allInstallations.Range(func(installationID string, installation *multidevice.Installation) (shouldContinue bool) {
if installation.Enabled {
count++
}
2021-03-29 15:41:30 +00:00
return true
})
logger.Debug("installations info",
zap.Int("Number of installations", m.allInstallations.Len()),
zap.Int("Number of enabled installations", count))
return count > 1
}
func (m *Messenger) HasPairedDevices() bool {
return m.hasPairedDevices()
}
// sendToPairedDevices will check if we have any paired devices and send to them if necessary
2020-07-28 13:22:22 +00:00
func (m *Messenger) sendToPairedDevices(ctx context.Context, spec common.RawMessage) error {
hasPairedDevices := m.hasPairedDevices()
// We send a message to any paired device
if hasPairedDevices {
_, err := m.sender.SendPrivate(ctx, &m.identity.PublicKey, &spec)
if err != nil {
return err
}
}
return nil
}
2019-07-17 22:25:42 +00:00
func (m *Messenger) dispatchPairInstallationMessage(ctx context.Context, spec common.RawMessage) (common.RawMessage, error) {
var err error
var id []byte
id, err = m.sender.SendPairInstallation(ctx, &m.identity.PublicKey, spec)
if err != nil {
return spec, err
}
spec.ID = types.EncodeHex(id)
spec.SendCount++
2020-07-28 13:22:22 +00:00
err = m.persistence.SaveRawMessage(&spec)
if err != nil {
return spec, err
}
return spec, nil
}
2022-05-27 09:14:40 +00:00
func (m *Messenger) dispatchMessage(ctx context.Context, rawMessage common.RawMessage) (common.RawMessage, error) {
var err error
var id []byte
2022-05-27 09:14:40 +00:00
logger := m.logger.With(zap.String("site", "dispatchMessage"), zap.String("chatID", rawMessage.LocalChatID))
chat, ok := m.allChats.Load(rawMessage.LocalChatID)
if !ok {
2022-05-27 09:14:40 +00:00
return rawMessage, errors.New("no chat found")
2019-07-17 22:25:42 +00:00
}
2019-10-14 14:10:48 +00:00
switch chat.ChatType {
case ChatTypeOneToOne:
publicKey, err := chat.PublicKey()
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
}
2021-02-23 15:47:45 +00:00
//SendPrivate will alter message identity and possibly datasyncid, so we save an unchanged
//message for sending to paired devices later
2022-05-27 09:14:40 +00:00
specCopyForPairedDevices := rawMessage
if !common.IsPubKeyEqual(publicKey, &m.identity.PublicKey) || rawMessage.SkipEncryption {
id, err = m.sender.SendPrivate(ctx, publicKey, &rawMessage)
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
}
2019-10-14 14:10:48 +00:00
}
2021-02-23 15:47:45 +00:00
err = m.sendToPairedDevices(ctx, specCopyForPairedDevices)
2019-10-14 14:10:48 +00:00
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
2019-10-14 14:10:48 +00:00
}
2020-10-20 15:10:28 +00:00
case ChatTypePublic, ChatTypeProfile:
logger.Debug("sending public message", zap.String("chatName", chat.Name))
2022-05-27 09:14:40 +00:00
id, err = m.sender.SendPublic(ctx, chat.ID, rawMessage)
2019-10-14 14:10:48 +00:00
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
2019-10-14 14:10:48 +00:00
}
case ChatTypeCommunityChat:
// TODO: add grant
canPost, err := m.communitiesManager.CanPost(&m.identity.PublicKey, chat.CommunityID, chat.CommunityChatID(), nil)
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
}
2020-12-22 16:20:12 +00:00
// We allow emoji reactions by anyone
2022-05-27 09:14:40 +00:00
if rawMessage.MessageType != protobuf.ApplicationMetadataMessage_EMOJI_REACTION && !canPost {
m.logger.Error("can't post on chat", zap.String("chat-id", chat.ID), zap.String("chat-name", chat.Name))
2022-05-27 09:14:40 +00:00
return rawMessage, errors.New("can't post on chat")
}
logger.Debug("sending community chat message", zap.String("chatName", chat.Name))
2022-05-27 09:14:40 +00:00
isEncrypted, err := m.communitiesManager.IsEncrypted(chat.CommunityID)
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
}
if !isEncrypted {
id, err = m.sender.SendPublic(ctx, chat.ID, rawMessage)
} else {
rawMessage.CommunityID, err = types.DecodeHex(chat.CommunityID)
if err == nil {
id, err = m.sender.SendCommunityMessage(ctx, rawMessage)
2022-05-27 09:14:40 +00:00
}
}
if err != nil {
return rawMessage, err
}
case ChatTypePrivateGroupChat:
logger.Debug("sending group message", zap.String("chatName", chat.Name))
2022-05-27 09:14:40 +00:00
if rawMessage.Recipients == nil {
rawMessage.Recipients, err = chat.MembersAsPublicKeys()
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
2019-10-14 14:10:48 +00:00
}
}
hasPairedDevices := m.hasPairedDevices()
if !hasPairedDevices {
// Filter out my key from the recipients
n := 0
2022-05-27 09:14:40 +00:00
for _, recipient := range rawMessage.Recipients {
if !common.IsPubKeyEqual(recipient, &m.identity.PublicKey) {
2022-05-27 09:14:40 +00:00
rawMessage.Recipients[n] = recipient
n++
}
}
2022-05-27 09:14:40 +00:00
rawMessage.Recipients = rawMessage.Recipients[:n]
2019-10-14 14:10:48 +00:00
}
// We won't really send the message out if there's no recipients
2022-05-27 09:14:40 +00:00
if len(rawMessage.Recipients) == 0 {
rawMessage.Sent = true
}
// We skip wrapping in some cases (emoji reactions for example)
2022-05-27 09:14:40 +00:00
if !rawMessage.SkipGroupMessageWrap {
rawMessage.MessageType = protobuf.ApplicationMetadataMessage_MEMBERSHIP_UPDATE_MESSAGE
}
2022-05-27 09:14:40 +00:00
id, err = m.sender.SendGroup(ctx, rawMessage.Recipients, rawMessage)
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
2019-07-17 22:25:42 +00:00
}
2019-10-14 14:10:48 +00:00
default:
2022-05-27 09:14:40 +00:00
return rawMessage, errors.New("chat type not supported")
2019-10-14 14:10:48 +00:00
}
2022-05-27 09:14:40 +00:00
rawMessage.ID = types.EncodeHex(id)
rawMessage.SendCount++
rawMessage.LastSent = m.getTimesource().GetCurrentTime()
err = m.persistence.SaveRawMessage(&rawMessage)
if err != nil {
2022-05-27 09:14:40 +00:00
return rawMessage, err
}
2022-05-27 09:14:40 +00:00
return rawMessage, nil
2019-07-17 22:25:42 +00:00
}
// SendChatMessage takes a minimal message and sends it based on the corresponding chat
2020-09-01 13:27:01 +00:00
func (m *Messenger) SendChatMessage(ctx context.Context, message *common.Message) (*MessengerResponse, error) {
return m.sendChatMessage(ctx, message)
}
// SendChatMessages takes a array of messages and sends it based on the corresponding chats
func (m *Messenger) SendChatMessages(ctx context.Context, messages []*common.Message) (*MessengerResponse, error) {
var response MessengerResponse
generatedAlbumID, err := uuid.NewRandom()
if err != nil {
return nil, err
}
imagesCount := uint32(0)
for _, message := range messages {
if message.ContentType == protobuf.ChatMessage_IMAGE {
imagesCount++
}
}
for _, message := range messages {
if message.ContentType == protobuf.ChatMessage_IMAGE && len(messages) > 1 {
err = message.SetAlbumIDAndImagesCount(generatedAlbumID.String(), imagesCount)
if err != nil {
return nil, err
}
}
2021-03-29 15:41:30 +00:00
messageResponse, err := m.SendChatMessage(ctx, message)
if err != nil {
return nil, err
}
err = response.Merge(messageResponse)
if err != nil {
return nil, err
}
}
return &response, nil
}
// SendChatMessage takes a minimal message and sends it based on the corresponding chat
func (m *Messenger) sendChatMessage(ctx context.Context, message *common.Message) (*MessengerResponse, error) {
displayName, err := m.settings.DisplayName()
if err != nil {
return nil, err
}
message.DisplayName = displayName
if len(message.ImagePath) != 0 {
2022-05-27 11:21:02 +00:00
2023-02-02 17:59:48 +00:00
err := message.LoadImage()
if err != nil {
return nil, err
2022-05-27 11:21:02 +00:00
}
} else if len(message.CommunityID) != 0 {
community, err := m.communitiesManager.GetByIDString(message.CommunityID)
if err != nil {
return nil, err
}
if community == nil {
return nil, errors.New("community not found")
}
wrappedCommunity, err := community.ToBytes()
if err != nil {
return nil, err
}
message.Payload = &protobuf.ChatMessage_Community{Community: wrappedCommunity}
message.ContentType = protobuf.ChatMessage_COMMUNITY
} else if len(message.AudioPath) != 0 {
2023-02-02 17:59:48 +00:00
err := message.LoadAudio()
if err != nil {
return nil, err
}
}
var response MessengerResponse
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(message.ChatId)
if !ok {
return nil, errors.New("Chat not found")
2019-07-17 22:25:42 +00:00
}
2022-02-17 15:13:10 +00:00
err = m.handleStandaloneChatIdentity(chat)
2020-11-09 15:16:36 +00:00
if err != nil {
return nil, err
}
err = extendMessageFromChat(message, chat, &m.identity.PublicKey, m.getTimesource())
if err != nil {
return nil, err
}
err = m.addContactRequestPropagatedState(message)
if err != nil {
return nil, err
}
encodedMessage, err := m.encodeChatEntity(chat, message)
if err != nil {
return nil, err
}
rawMessage := common.RawMessage{
2020-07-22 07:41:40 +00:00
LocalChatID: chat.ID,
SendPushNotification: m.featureFlags.PushNotifications,
2020-07-22 07:41:40 +00:00
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_CHAT_MESSAGE,
ResendAutomatically: true,
}
2022-05-27 09:14:40 +00:00
rawMessage, err = m.dispatchMessage(ctx, rawMessage)
if err != nil {
return nil, err
2019-07-17 22:25:42 +00:00
}
if rawMessage.Sent {
message.OutgoingStatus = common.OutgoingStatusSent
}
message.ID = rawMessage.ID
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
2019-07-17 22:25:42 +00:00
}
err = chat.UpdateFromMessage(message, m.getTimesource())
if err != nil {
return nil, err
}
2019-07-17 22:25:42 +00:00
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-03 13:11:55 +00:00
msg, err := m.pullMessagesAndResponsesFromDB([]*common.Message{message})
if err != nil {
return nil, err
}
if err := m.updateChatFirstMessageTimestamp(chat, whisperToUnixTimestamp(message.WhisperTimestamp), &response); err != nil {
return nil, err
}
response.SetMessages(msg)
2021-01-11 10:32:51 +00:00
response.AddChat(chat)
2021-10-29 14:29:28 +00:00
m.logger.Debug("sent message", zap.String("id", message.ID))
m.prepareMessages(response.messages)
return &response, m.saveChat(chat)
2019-07-17 22:25:42 +00:00
}
func whisperToUnixTimestamp(whisperTimestamp uint64) uint32 {
return uint32(whisperTimestamp / 1000)
}
func (m *Messenger) updateChatFirstMessageTimestamp(chat *Chat, timestamp uint32, response *MessengerResponse) error {
// Currently supported only for communities
if !chat.CommunityChat() {
return nil
}
community, err := m.communitiesManager.GetByIDString(chat.CommunityID)
if err != nil {
return err
}
if community.IsAdmin() && chat.UpdateFirstMessageTimestamp(timestamp) {
community, changes, err := m.communitiesManager.EditChatFirstMessageTimestamp(community.ID(), chat.ID, chat.FirstMessageTimestamp)
if err != nil {
return err
}
response.AddCommunity(community)
response.CommunityChanges = append(response.CommunityChanges, changes)
}
return nil
}
func (m *Messenger) ShareImageMessage(request *requests.ShareImageMessage) (*MessengerResponse, error) {
if err := request.Validate(); err != nil {
return nil, err
}
response := &MessengerResponse{}
msg, err := m.persistence.MessageByID(request.MessageID)
if err != nil {
return nil, err
}
var messages []*common.Message
for _, pk := range request.Users {
message := &common.Message{}
message.ChatId = pk.String()
message.Payload = msg.Payload
message.Text = "This message has been shared with you"
message.ContentType = protobuf.ChatMessage_IMAGE
messages = append(messages, message)
r, err := m.CreateOneToOneChat(&requests.CreateOneToOneChat{ID: pk})
if err != nil {
return nil, err
}
if err := response.Merge(r); err != nil {
return nil, err
}
}
sendMessagesResponse, err := m.SendChatMessages(context.Background(), messages)
if err != nil {
return nil, err
}
if err := response.Merge(sendMessagesResponse); err != nil {
return nil, err
}
return response, nil
}
func (m *Messenger) syncProfilePictures(rawMessageHandler RawMessageHandler) error {
2022-03-24 09:35:56 +00:00
if !m.hasPairedDevices() {
return nil
}
keyUID := m.account.KeyUID
images, err := m.multiAccounts.GetIdentityImages(keyUID)
if err != nil {
return err
}
2022-05-18 10:42:51 +00:00
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
2022-03-24 09:35:56 +00:00
pictures := make([]*protobuf.SyncProfilePicture, len(images))
clock, chat := m.getLastClockWithRelatedChat()
for i, image := range images {
p := &protobuf.SyncProfilePicture{}
p.Name = image.Name
p.Payload = image.Payload
p.Width = uint32(image.Width)
p.Height = uint32(image.Height)
p.FileSize = uint32(image.FileSize)
p.ResizeTarget = uint32(image.ResizeTarget)
if image.Clock == 0 {
p.Clock = clock
} else {
p.Clock = image.Clock
}
pictures[i] = p
}
message := &protobuf.SyncProfilePictures{}
message.KeyUid = keyUID
message.Pictures = pictures
encodedMessage, err := proto.Marshal(message)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2022-03-24 09:35:56 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_PROFILE_PICTURE,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
2022-03-24 09:35:56 +00:00
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
// SyncDevices sends all public chats and contacts to paired devices
// TODO remove use of photoPath in contacts
func (m *Messenger) SyncDevices(ctx context.Context, ensName, photoPath string, rawMessageHandler RawMessageHandler) (err error) {
if rawMessageHandler == nil {
rawMessageHandler = m.dispatchMessage
}
myID := contactIDFromPublicKey(&m.identity.PublicKey)
2022-02-17 15:13:10 +00:00
displayName, err := m.settings.DisplayName()
if err != nil {
return err
}
if _, err = m.sendContactUpdate(ctx, myID, displayName, ensName, photoPath, rawMessageHandler); err != nil {
return err
}
2021-03-29 15:41:30 +00:00
m.allChats.Range(func(chatID string, chat *Chat) (shouldContinue bool) {
2021-10-05 17:26:02 +00:00
isPublicChat := !chat.Timeline() && !chat.ProfileUpdates() && chat.Public()
if isPublicChat && chat.Active {
err = m.syncPublicChat(ctx, chat, rawMessageHandler)
2021-03-29 15:41:30 +00:00
if err != nil {
return false
}
}
2021-03-29 15:41:30 +00:00
if (isPublicChat || chat.OneToOne() || chat.PrivateGroupChat()) && !chat.Active && chat.DeletedAtClockValue > 0 {
pending, err := m.persistence.HasPendingNotificationsForChat(chat.ID)
2021-10-05 17:26:02 +00:00
if err != nil {
return false
}
if !pending {
err = m.syncChatRemoving(ctx, chatID, rawMessageHandler)
if err != nil {
return false
}
}
2021-10-05 17:26:02 +00:00
}
2021-10-12 10:33:32 +00:00
if (isPublicChat || chat.OneToOne() || chat.PrivateGroupChat() || chat.CommunityChat()) && chat.Active {
err := m.syncChatMessagesRead(ctx, chatID, chat.ReadMessagesAtClockValue, rawMessageHandler)
2021-10-12 10:33:32 +00:00
if err != nil {
return false
}
}
2022-02-10 10:00:59 +00:00
if isPublicChat && chat.Active && chat.DeletedAtClockValue > 0 {
err = m.syncClearHistory(ctx, chat, rawMessageHandler)
2022-02-10 10:00:59 +00:00
if err != nil {
return false
}
}
2021-03-29 15:41:30 +00:00
return true
})
if err != nil {
return err
}
2021-03-29 15:41:30 +00:00
m.allContacts.Range(func(contactID string, contact *Contact) (shouldContinue bool) {
if contact.ID != myID &&
2023-01-20 14:28:30 +00:00
(contact.LocalNickname != "" || contact.added() || contact.Blocked) {
if err = m.syncContact(ctx, contact, rawMessageHandler); err != nil {
2021-03-29 15:41:30 +00:00
return false
}
}
2021-03-29 15:41:30 +00:00
return true
})
cs, err := m.communitiesManager.JoinedAndPendingCommunitiesWithRequests()
if err != nil {
return err
}
for _, c := range cs {
if err = m.syncCommunity(ctx, c, rawMessageHandler); err != nil {
return err
}
}
2022-01-17 03:42:11 +00:00
bookmarks, err := m.browserDatabase.GetBookmarks()
if err != nil {
return err
}
for _, b := range bookmarks {
if err = m.SyncBookmark(ctx, b, rawMessageHandler); err != nil {
2022-01-17 03:42:11 +00:00
return err
}
}
trustedUsers, err := m.verificationDatabase.GetAllTrustStatus()
if err != nil {
return err
}
for id, ts := range trustedUsers {
if err = m.SyncTrustedUser(ctx, id, ts, rawMessageHandler); err != nil {
return err
}
}
verificationRequests, err := m.verificationDatabase.GetVerificationRequests()
if err != nil {
return err
}
for i := range verificationRequests {
if err = m.SyncVerificationRequest(ctx, &verificationRequests[i], rawMessageHandler); err != nil {
return err
}
}
err = m.syncSettings(rawMessageHandler)
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
if err != nil {
return err
}
err = m.syncProfilePictures(rawMessageHandler)
2022-03-24 09:35:56 +00:00
if err != nil {
return err
}
2022-05-18 10:42:51 +00:00
accounts, err := m.settings.GetAccounts()
if err != nil {
return err
}
ids, err := m.persistence.LatestContactRequestIDs()
if err != nil {
return err
}
for id, state := range ids {
if state == common.ContactRequestStateAccepted || state == common.ContactRequestStateDismissed {
accepted := state == common.ContactRequestStateAccepted
err := m.syncContactRequestDecision(ctx, id, accepted, rawMessageHandler)
if err != nil {
return err
}
}
}
err = m.syncWallets(accounts, rawMessageHandler)
if err != nil {
return err
}
savedAddresses, err := m.savedAddressesManager.GetRawSavedAddresses()
if err != nil {
return err
}
for i := range savedAddresses {
sa := savedAddresses[i]
err = m.syncSavedAddress(ctx, sa, rawMessageHandler)
if err != nil {
return err
}
}
err = m.syncAllKeycards(ctx, rawMessageHandler)
if err != nil {
return err
}
return nil
2022-05-18 10:42:51 +00:00
}
2022-07-06 16:12:49 +00:00
func (m *Messenger) SaveAccounts(accs []*accounts.Account) error {
err := m.settings.SaveAccounts(accs)
if err != nil {
return err
}
return m.syncWallets(accs, m.dispatchMessage)
2022-07-06 16:12:49 +00:00
}
2022-05-18 10:42:51 +00:00
2022-07-06 16:12:49 +00:00
func (m *Messenger) DeleteAccount(address types.Address) error {
2022-07-11 17:41:43 +00:00
acc, err := m.settings.GetAccountByAddress(address)
2022-07-06 16:12:49 +00:00
if err != nil {
return err
}
2022-05-18 10:42:51 +00:00
2022-07-11 17:41:43 +00:00
err = m.settings.DeleteAccount(address)
2022-07-06 16:12:49 +00:00
if err != nil {
return err
}
acc.Removed = true
accs := []*accounts.Account{acc}
return m.syncWallets(accs, m.dispatchMessage)
2022-05-18 10:42:51 +00:00
}
// syncWallets syncs all wallets with paired devices
func (m *Messenger) syncWallets(accs []*accounts.Account, rawMessageHandler RawMessageHandler) error {
2022-05-18 10:42:51 +00:00
if !m.hasPairedDevices() {
return nil
}
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
clock, chat := m.getLastClockWithRelatedChat()
accountMessages := make([]*protobuf.SyncWalletAccount, 0)
for _, acc := range accs {
2022-07-06 16:12:49 +00:00
if acc.Type != accounts.AccountTypeWatch &&
acc.Type != accounts.AccountTypeGenerated {
2022-05-18 10:42:51 +00:00
continue
}
var accountClock uint64
if acc.Clock == 0 {
accountClock = clock
} else {
accountClock = acc.Clock
}
syncMessage := &protobuf.SyncWalletAccount{
Clock: accountClock,
Address: acc.Address.Bytes(),
Wallet: acc.Wallet,
Chat: acc.Chat,
Type: acc.Type.String(),
2022-05-18 10:42:51 +00:00
Storage: acc.Storage,
Path: acc.Path,
PublicKey: acc.PublicKey,
Name: acc.Name,
Color: acc.Color,
Hidden: acc.Hidden,
Removed: acc.Removed,
}
accountMessages = append(accountMessages, syncMessage)
}
message := &protobuf.SyncWalletAccounts{
Accounts: accountMessages,
}
encodedMessage, err := proto.Marshal(message)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2022-05-18 10:42:51 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_WALLET_ACCOUNT,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
2022-05-18 10:42:51 +00:00
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) syncContactRequestDecision(ctx context.Context, requestID string, accepted bool, rawMessageHandler RawMessageHandler) error {
2022-08-07 14:25:03 +00:00
m.logger.Info("syncContactRequestDecision", zap.Any("from", requestID))
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
var status protobuf.SyncContactRequestDecision_DecisionStatus
if accepted {
status = protobuf.SyncContactRequestDecision_ACCEPTED
} else {
status = protobuf.SyncContactRequestDecision_DECLINED
}
message := &protobuf.SyncContactRequestDecision{
RequestId: requestID,
Clock: clock,
DecisionStatus: status,
}
encodedMessage, err := proto.Marshal(message)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2022-08-07 14:25:03 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_CONTACT_REQUEST_DECISION,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
2022-08-07 14:25:03 +00:00
if err != nil {
return err
}
return nil
}
func (m *Messenger) getLastClockWithRelatedChat() (uint64, *Chat) {
chatID := contactIDFromPublicKey(&m.identity.PublicKey)
chat, ok := m.allChats.Load(chatID)
if !ok {
chat = OneToOneFromPublicKey(&m.identity.PublicKey, m.getTimesource())
// We don't want to show the chat to the user
chat.Active = false
}
m.allChats.Store(chat.ID, chat)
clock, _ := chat.NextClockAndTimestamp(m.getTimesource())
return clock, chat
}
// SendPairInstallation sends a pair installation message
func (m *Messenger) SendPairInstallation(ctx context.Context, rawMessageHandler RawMessageHandler) (*MessengerResponse, error) {
var err error
var response MessengerResponse
2021-03-29 15:41:30 +00:00
installation, ok := m.allInstallations.Load(m.installationID)
if !ok {
return nil, errors.New("no installation found")
}
if installation.InstallationMetadata == nil {
return nil, errors.New("no installation metadata")
}
clock, chat := m.getLastClockWithRelatedChat()
pairMessage := &protobuf.PairInstallation{
Clock: clock,
Name: installation.InstallationMetadata.Name,
InstallationId: installation.ID,
DeviceType: installation.InstallationMetadata.DeviceType,
Version: installation.Version}
encodedMessage, err := proto.Marshal(pairMessage)
if err != nil {
return nil, err
}
if rawMessageHandler == nil {
rawMessageHandler = m.dispatchPairInstallationMessage
}
_, err = rawMessageHandler(ctx, common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_PAIR_INSTALLATION,
ResendAutomatically: true,
})
if err != nil {
return nil, err
}
2021-01-11 10:32:51 +00:00
response.AddChat(chat)
chat.LastClockValue = clock
err = m.saveChat(chat)
if err != nil {
return nil, err
}
return &response, nil
}
// syncPublicChat sync a public chat with paired devices
func (m *Messenger) syncPublicChat(ctx context.Context, publicChat *Chat, rawMessageHandler RawMessageHandler) error {
var err error
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncInstallationPublicChat{
Clock: clock,
Id: publicChat.ID,
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_INSTALLATION_PUBLIC_CHAT,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) syncClearHistory(ctx context.Context, publicChat *Chat, rawMessageHandler RawMessageHandler) error {
2022-02-10 10:00:59 +00:00
var err error
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncClearHistory{
ChatId: publicChat.ID,
ClearedAt: publicChat.DeletedAtClockValue,
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2022-02-10 10:00:59 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_CLEAR_HISTORY,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
2022-02-10 10:00:59 +00:00
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) syncChatRemoving(ctx context.Context, id string, rawMessageHandler RawMessageHandler) error {
2021-10-05 17:26:02 +00:00
var err error
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncChatRemoved{
Clock: clock,
Id: id,
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2021-10-05 17:26:02 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_CHAT_REMOVED,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
2021-10-05 17:26:02 +00:00
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
// syncContact sync as contact with paired devices
func (m *Messenger) syncContact(ctx context.Context, contact *Contact, rawMessageHandler RawMessageHandler) error {
var err error
if contact.IsSyncing {
return nil
}
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
2023-01-20 14:28:30 +00:00
syncMessage := m.buildSyncContactMessage(contact)
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_INSTALLATION_CONTACT,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) syncCommunity(ctx context.Context, community *communities.Community, rawMessageHandler RawMessageHandler) error {
logger := m.logger.Named("syncCommunity")
if !m.hasPairedDevices() {
logger.Debug("device has no paired devices")
return nil
}
logger.Debug("device has paired device(s)")
clock, chat := m.getLastClockWithRelatedChat()
communitySettings, err := m.communitiesManager.GetCommunitySettingsByID(community.ID())
if err != nil {
return err
}
syncMessage, err := community.ToSyncCommunityProtobuf(clock, communitySettings)
if err != nil {
return err
}
2022-11-07 17:30:00 +00:00
encodedKeys, err := m.encryptor.GetAllHREncodedKeys(community.ID())
if err != nil {
return err
}
syncMessage.EncryptionKeys = encodedKeys
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_INSTALLATION_COMMUNITY,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
if err != nil {
return err
}
logger.Debug("message dispatched")
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) SyncBookmark(ctx context.Context, bookmark *browsers.Bookmark, rawMessageHandler RawMessageHandler) error {
2022-01-17 03:42:11 +00:00
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncBookmark{
Clock: clock,
Url: bookmark.URL,
Name: bookmark.Name,
ImageUrl: bookmark.ImageURL,
Removed: bookmark.Removed,
DeletedAt: bookmark.DeletedAt,
2022-01-17 03:42:11 +00:00
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2022-01-17 03:42:11 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_BOOKMARK,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
2022-01-17 03:42:11 +00:00
if err != nil {
return err
}
2022-01-17 03:42:11 +00:00
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) SyncTrustedUser(ctx context.Context, publicKey string, ts verification.TrustStatus, rawMessageHandler RawMessageHandler) error {
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncTrustedUser{
Clock: clock,
Id: publicKey,
Status: protobuf.SyncTrustedUser_TrustStatus(ts),
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_TRUSTED_USER,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
func (m *Messenger) SyncVerificationRequest(ctx context.Context, vr *verification.Request, rawMessageHandler RawMessageHandler) error {
if !m.hasPairedDevices() {
return nil
}
clock, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncVerificationRequest{
Id: vr.ID,
Clock: clock,
From: vr.From,
To: vr.To,
Challenge: vr.Challenge,
Response: vr.Response,
RequestedAt: vr.RequestedAt,
RepliedAt: vr.RepliedAt,
VerificationStatus: protobuf.SyncVerificationRequest_VerificationStatus(vr.RequestStatus),
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_VERIFICATION_REQUEST,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
if err != nil {
return err
}
chat.LastClockValue = clock
return m.saveChat(chat)
}
// RetrieveAll retrieves messages from all filters, processes them and returns a
// MessengerResponse to the client
func (m *Messenger) RetrieveAll() (*MessengerResponse, error) {
chatWithMessages, err := m.transport.RetrieveRawAll()
if err != nil {
return nil, err
}
return m.handleRetrievedMessages(chatWithMessages, true)
}
func (m *Messenger) GetStats() types.StatsSummary {
return m.transport.GetStats()
}
type CurrentMessageState struct {
// Message is the protobuf message received
Message protobuf.ChatMessage
// MessageID is the ID of the message
MessageID string
// WhisperTimestamp is the whisper timestamp of the message
WhisperTimestamp uint64
// Contact is the contact associated with the author of the message
Contact *Contact
// PublicKey is the public key of the author of the message
PublicKey *ecdsa.PublicKey
}
type ReceivedMessageState struct {
// State on the message being processed
CurrentMessageState *CurrentMessageState
// AllChats in memory
2021-03-29 15:41:30 +00:00
AllChats *chatMap
// All contacts in memory
2021-03-29 15:41:30 +00:00
AllContacts *contactMap
// List of contacts modified
2021-03-29 15:41:30 +00:00
ModifiedContacts *stringBoolMap
// All installations in memory
2021-03-29 15:41:30 +00:00
AllInstallations *installationMap
// List of communities modified
2021-03-29 15:41:30 +00:00
ModifiedInstallations *stringBoolMap
// Map of existing messages
ExistingMessagesMap map[string]bool
// EmojiReactions is a list of emoji reactions for the current batch
// indexed by from-message-id-emoji-type
EmojiReactions map[string]*EmojiReaction
2020-08-07 13:49:37 +00:00
// GroupChatInvitations is a list of invitation requests or rejections
GroupChatInvitations map[string]*GroupChatInvitation
// Response to the client
Response *MessengerResponse
// Timesource is a time source for clock values/timestamps.
Timesource common.TimeSource
AllBookmarks map[string]*browsers.Bookmark
AllVerificationRequests []*verification.Request
AllTrustStatus map[string]verification.TrustStatus
}
2021-02-23 15:47:45 +00:00
func (m *Messenger) markDeliveredMessages(acks [][]byte) {
for _, ack := range acks {
//get message ID from database by datasync ID, with at-least-one
// semantic
messageIDBytes, err := m.persistence.MarkAsConfirmed(ack, true)
2021-02-23 15:47:45 +00:00
if err != nil {
m.logger.Info("got datasync acknowledge for message we don't have in db", zap.String("ack", hex.EncodeToString(ack)))
continue
}
messageID := messageIDBytes.String()
2021-02-23 15:47:45 +00:00
//mark messages as delivered
2021-02-23 15:47:45 +00:00
err = m.UpdateMessageOutgoingStatus(messageID, common.OutgoingStatusDelivered)
if err != nil {
m.logger.Debug("Can't set message status as delivered", zap.Error(err))
}
//send signal to client that message status updated
if m.config.messengerSignalsHandler != nil {
2021-02-23 15:47:45 +00:00
message, err := m.persistence.MessageByID(messageID)
if err != nil {
m.logger.Debug("Can't get message from database", zap.Error(err))
continue
}
m.config.messengerSignalsHandler.MessageDelivered(message.LocalChatID, messageID)
2021-02-23 15:47:45 +00:00
}
}
}
// addNewMessageNotification takes a common.Message and generates a new NotificationBody and appends it to the
// []Response.Notifications if the message is m.New
func (r *ReceivedMessageState) addNewMessageNotification(publicKey ecdsa.PublicKey, m *common.Message, responseTo *common.Message, profilePicturesVisibility int) error {
if !m.New {
return nil
}
pubKey, err := m.GetSenderPubKey()
if err != nil {
return err
}
contactID := contactIDFromPublicKey(pubKey)
2021-03-29 15:41:30 +00:00
chat, ok := r.AllChats.Load(m.LocalChatID)
if !ok {
return fmt.Errorf("chat ID '%s' not present", m.LocalChatID)
}
contact, ok := r.AllContacts.Load(contactID)
if !ok {
return fmt.Errorf("contact ID '%s' not present", contactID)
}
2021-02-22 16:12:59 +00:00
2022-02-24 21:36:09 +00:00
if !chat.Muted {
if showMessageNotification(publicKey, m, chat, responseTo) {
notification, err := NewMessageNotification(m.ID, m, chat, contact, r.AllContacts, profilePicturesVisibility)
if err != nil {
return err
}
r.Response.AddNotification(notification)
}
2021-02-22 16:12:59 +00:00
}
return nil
}
2023-02-06 10:38:37 +00:00
// updateExistingActivityCenterNotification updates AC notification if it exists and hasn't been read yet
func (r *ReceivedMessageState) updateExistingActivityCenterNotification(publicKey ecdsa.PublicKey, m *Messenger, message *common.Message, responseTo *common.Message) error {
notification, err := m.persistence.GetActivityCenterNotificationByID(types.FromHex(message.ID))
if err != nil {
return err
}
if notification == nil || notification.Read {
return nil
}
notification.Message = message
notification.ReplyMessage = responseTo
err = m.addActivityCenterNotification(r.Response, notification)
2023-02-06 10:38:37 +00:00
if err != nil {
return err
}
return nil
}
// addNewActivityCenterNotification takes a common.Message and generates a new ActivityCenterNotification and appends it to the
// []Response.ActivityCenterNotifications if the message is m.New
func (r *ReceivedMessageState) addNewActivityCenterNotification(publicKey ecdsa.PublicKey, m *Messenger, message *common.Message, responseTo *common.Message) error {
if !message.New {
return nil
}
chat, ok := r.AllChats.Load(message.LocalChatID)
if !ok {
return fmt.Errorf("chat ID '%s' not present", message.LocalChatID)
}
isNotification, notificationType := showMentionOrReplyActivityCenterNotification(publicKey, message, chat, responseTo)
if isNotification {
notification := &ActivityCenterNotification{
ID: types.FromHex(message.ID),
Name: chat.Name,
Message: message,
ReplyMessage: responseTo,
Type: notificationType,
Timestamp: message.WhisperTimestamp,
ChatID: chat.ID,
CommunityID: chat.CommunityID,
Author: message.From,
}
err := m.addActivityCenterNotification(r.Response, notification)
if err != nil {
return err
}
}
return nil
}
func (m *Messenger) buildMessageState() *ReceivedMessageState {
return &ReceivedMessageState{
AllChats: m.allChats,
AllContacts: m.allContacts,
2021-03-29 15:41:30 +00:00
ModifiedContacts: new(stringBoolMap),
AllInstallations: m.allInstallations,
ModifiedInstallations: m.modifiedInstallations,
ExistingMessagesMap: make(map[string]bool),
EmojiReactions: make(map[string]*EmojiReaction),
2020-08-07 13:49:37 +00:00
GroupChatInvitations: make(map[string]*GroupChatInvitation),
Response: &MessengerResponse{},
Timesource: m.getTimesource(),
2022-01-17 03:42:11 +00:00
AllBookmarks: make(map[string]*browsers.Bookmark),
AllTrustStatus: make(map[string]verification.TrustStatus),
}
}
func (m *Messenger) outputToCSV(timestamp uint32, messageID types.HexBytes, from string, topic types.TopicType, chatID string, msgType protobuf.ApplicationMetadataMessage_Type, parsedMessage interface{}) {
if !m.outputCSV {
return
}
msgJSON, err := json.Marshal(parsedMessage)
if err != nil {
m.logger.Error("could not marshall message", zap.Error(err))
return
}
line := fmt.Sprintf("%d\t%s\t%s\t%s\t%s\t%s\t%s\n", timestamp, messageID.String(), from, topic.String(), chatID, msgType, msgJSON)
_, err = m.csvFile.Write([]byte(line))
if err != nil {
m.logger.Error("could not write to csv", zap.Error(err))
return
}
}
2022-09-29 11:50:23 +00:00
func (m *Messenger) handleImportedMessages(messagesToHandle map[transport.Filter][]*types.Message) error {
messageState := m.buildMessageState()
logger := m.logger.With(zap.String("site", "handleImportedMessages"))
for filter, messages := range messagesToHandle {
for _, shhMessage := range messages {
statusMessages, _, err := m.sender.HandleMessages(shhMessage, true)
if err != nil {
logger.Info("failed to decode messages", zap.Error(err))
continue
}
for _, msg := range statusMessages {
logger := logger.With(zap.String("message-id", msg.TransportMessage.ThirdPartyID))
2022-09-29 11:50:23 +00:00
logger.Debug("processing message")
publicKey := msg.SigPubKey()
senderID := contactIDFromPublicKey(publicKey)
// Don't process duplicates
messageID := msg.TransportMessage.ThirdPartyID
exists, err := m.messageExists(messageID, messageState.ExistingMessagesMap)
if err != nil {
logger.Warn("failed to check message exists", zap.Error(err))
}
if exists {
logger.Debug("messageExists", zap.String("messageID", messageID))
continue
}
var contact *Contact
if c, ok := messageState.AllContacts.Load(senderID); ok {
contact = c
} else {
c, err := buildContact(senderID, publicKey)
if err != nil {
logger.Info("failed to build contact", zap.Error(err))
continue
}
contact = c
messageState.AllContacts.Store(senderID, contact)
}
messageState.CurrentMessageState = &CurrentMessageState{
MessageID: messageID,
WhisperTimestamp: uint64(msg.TransportMessage.Timestamp) * 1000,
Contact: contact,
PublicKey: publicKey,
}
if msg.ParsedMessage != nil {
logger.Debug("Handling parsed message")
switch msg.ParsedMessage.Interface().(type) {
case protobuf.ChatMessage:
logger.Debug("Handling ChatMessage")
messageState.CurrentMessageState.Message = msg.ParsedMessage.Interface().(protobuf.ChatMessage)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, messageState.CurrentMessageState.Message)
err = m.HandleChatMessage(messageState)
if err != nil {
logger.Warn("failed to handle ChatMessage", zap.Error(err))
continue
}
}
}
}
}
}
importMessageAuthors := messageState.Response.DiscordMessageAuthors()
if len(importMessageAuthors) > 0 {
err := m.persistence.SaveDiscordMessageAuthors(importMessageAuthors)
if err != nil {
return err
}
}
importMessagesToSave := messageState.Response.DiscordMessages()
importMessagesCount := len(importMessagesToSave)
if importMessagesCount > 0 {
if importMessagesCount <= maxChunkSizeMessages {
m.communitiesManager.LogStdout(fmt.Sprintf("saving %d discord messages", importMessagesCount))
m.handleImportMessagesMutex.Lock()
2022-09-29 11:50:23 +00:00
err := m.persistence.SaveDiscordMessages(importMessagesToSave)
if err != nil {
m.communitiesManager.LogStdout("failed to save discord messages", zap.Error(err))
m.handleImportMessagesMutex.Unlock()
2022-09-29 11:50:23 +00:00
return err
}
m.handleImportMessagesMutex.Unlock()
2022-09-29 11:50:23 +00:00
} else {
// We need to process the messages in chunks otherwise we'll
// block the database for too long
chunks := chunkSlice(importMessagesToSave, maxChunkSizeMessages)
chunksCount := len(chunks)
for i, msgs := range chunks {
m.communitiesManager.LogStdout(fmt.Sprintf("saving %d/%d chunk with %d discord messages", i+1, chunksCount, len(msgs)))
2022-09-29 11:50:23 +00:00
// We can't defer Unlock here because we want to
// unlock after every iteration to leave room for
// other processes to access the database
m.handleImportMessagesMutex.Lock()
2022-09-29 11:50:23 +00:00
err := m.persistence.SaveDiscordMessages(msgs)
if err != nil {
m.communitiesManager.LogStdout(fmt.Sprintf("failed to save discord message chunk %d of %d", i+1, chunksCount), zap.Error(err))
m.handleImportMessagesMutex.Unlock()
2022-09-29 11:50:23 +00:00
return err
}
m.handleImportMessagesMutex.Unlock()
2022-09-29 11:50:23 +00:00
// We slow down the saving of message chunks to keep the database responsive
if i < chunksCount-1 {
time.Sleep(2 * time.Second)
}
}
}
}
messageAttachmentsToSave := messageState.Response.DiscordMessageAttachments()
if len(messageAttachmentsToSave) > 0 {
chunks := chunkAttachmentsByByteSize(messageAttachmentsToSave, maxChunkSizeBytes)
chunksCount := len(chunks)
for i, attachments := range chunks {
m.communitiesManager.LogStdout(fmt.Sprintf("saving %d/%d chunk with %d discord message attachments", i+1, chunksCount, len(attachments)))
m.handleImportMessagesMutex.Lock()
2022-09-29 11:50:23 +00:00
err := m.persistence.SaveDiscordMessageAttachments(attachments)
if err != nil {
m.communitiesManager.LogStdout(fmt.Sprintf("failed to save discord message attachments chunk %d of %d", i+1, chunksCount), zap.Error(err))
m.handleImportMessagesMutex.Unlock()
2022-09-29 11:50:23 +00:00
return err
}
// We slow down the saving of message chunks to keep the database responsive
m.handleImportMessagesMutex.Unlock()
2022-09-29 11:50:23 +00:00
if i < chunksCount-1 {
time.Sleep(2 * time.Second)
}
}
}
messagesToSave := messageState.Response.Messages()
messagesCount := len(messagesToSave)
if messagesCount > 0 {
if messagesCount <= maxChunkSizeMessages {
m.communitiesManager.LogStdout(fmt.Sprintf("saving %d app messages", messagesCount))
m.handleMessagesMutex.Lock()
2022-09-29 11:50:23 +00:00
err := m.SaveMessages(messagesToSave)
if err != nil {
m.handleMessagesMutex.Unlock()
2022-09-29 11:50:23 +00:00
return err
}
m.handleMessagesMutex.Unlock()
2022-09-29 11:50:23 +00:00
} else {
chunks := chunkSlice(messagesToSave, maxChunkSizeMessages)
chunksCount := len(chunks)
for i, msgs := range chunks {
m.communitiesManager.LogStdout(fmt.Sprintf("saving %d/%d chunk with %d app messages", i+1, chunksCount, len(msgs)))
2022-09-29 11:50:23 +00:00
m.handleMessagesMutex.Lock()
err := m.SaveMessages(msgs)
if err != nil {
m.handleMessagesMutex.Unlock()
return err
}
m.handleMessagesMutex.Unlock()
// We slow down the saving of message chunks to keep the database responsive
if i < chunksCount-1 {
time.Sleep(2 * time.Second)
}
}
}
}
return nil
}
func (m *Messenger) handleRetrievedMessages(chatWithMessages map[transport.Filter][]*types.Message, storeWakuMessages bool) (*MessengerResponse, error) {
m.handleMessagesMutex.Lock()
defer m.handleMessagesMutex.Unlock()
messageState := m.buildMessageState()
logger := m.logger.With(zap.String("site", "RetrieveAll"))
2021-02-23 15:47:45 +00:00
adminCommunitiesChatIDs, err := m.communitiesManager.GetAdminCommunitiesChatIDs()
if err != nil {
logger.Info("failed to retrieve admin communities", zap.Error(err))
}
for filter, messages := range chatWithMessages {
var processedMessages []string
for _, shhMessage := range messages {
2021-10-29 14:29:28 +00:00
logger := logger.With(zap.String("hash", types.EncodeHex(shhMessage.Hash)))
// Indicates tha all messages in the batch have been processed correctly
allMessagesProcessed := true
if adminCommunitiesChatIDs[filter.ChatID] && storeWakuMessages {
logger.Debug("storing waku message")
err := m.communitiesManager.StoreWakuMessage(shhMessage)
if err != nil {
logger.Warn("failed to store waku message", zap.Error(err))
}
}
statusMessages, acks, err := m.sender.HandleMessages(shhMessage, true)
2019-09-02 09:29:06 +00:00
if err != nil {
logger.Info("failed to decode messages", zap.Error(err))
continue
}
if m.telemetryClient != nil {
go m.telemetryClient.PushReceivedMessages(filter, shhMessage, statusMessages)
}
2021-02-23 15:47:45 +00:00
m.markDeliveredMessages(acks)
2020-03-09 06:19:23 +00:00
logger.Debug("processing messages further", zap.Int("count", len(statusMessages)))
for _, msg := range statusMessages {
2021-10-29 14:29:28 +00:00
logger := logger.With(zap.String("message-id", msg.ID.String()))
logger.Info("processing message")
publicKey := msg.SigPubKey()
2020-07-31 12:22:05 +00:00
m.handleInstallations(msg.Installations)
err := m.handleSharedSecrets(msg.SharedSecrets)
if err != nil {
// log and continue, non-critical error
logger.Warn("failed to handle shared secrets")
}
senderID := contactIDFromPublicKey(publicKey)
if _, ok := m.requestedContacts[senderID]; !ok {
// Check for messages from blocked users
if contact, ok := messageState.AllContacts.Load(senderID); ok && contact.Blocked {
continue
}
}
2021-02-23 15:47:45 +00:00
// Don't process duplicates
messageID := types.EncodeHex(msg.ID)
2021-06-07 12:38:13 +00:00
exists, err := m.messageExists(messageID, messageState.ExistingMessagesMap)
if err != nil {
logger.Warn("failed to check message exists", zap.Error(err))
}
if exists {
2020-08-18 15:07:48 +00:00
logger.Debug("messageExists", zap.String("messageID", messageID))
continue
}
var contact *Contact
2021-03-29 15:41:30 +00:00
if c, ok := messageState.AllContacts.Load(senderID); ok {
contact = c
} else {
c, err := buildContact(senderID, publicKey)
if err != nil {
logger.Info("failed to build contact", zap.Error(err))
allMessagesProcessed = false
continue
}
contact = c
2021-03-29 15:41:30 +00:00
messageState.AllContacts.Store(senderID, contact)
m.forgetContactInfoRequest(senderID)
}
messageState.CurrentMessageState = &CurrentMessageState{
MessageID: messageID,
WhisperTimestamp: uint64(msg.TransportMessage.Timestamp) * 1000,
Contact: contact,
PublicKey: publicKey,
}
if msg.ParsedMessage != nil {
2021-02-23 15:47:45 +00:00
logger.Debug("Handling parsed message")
switch msg.ParsedMessage.Interface().(type) {
case protobuf.MembershipUpdateMessage:
logger.Debug("Handling MembershipUpdateMessage")
rawMembershipUpdate := msg.ParsedMessage.Interface().(protobuf.MembershipUpdateMessage)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, rawMembershipUpdate)
2021-03-29 15:41:30 +00:00
chat, _ := messageState.AllChats.Load(rawMembershipUpdate.ChatId)
2021-06-07 12:38:13 +00:00
err = m.HandleMembershipUpdate(messageState, chat, rawMembershipUpdate, m.systemMessagesTranslations)
if err != nil {
logger.Warn("failed to handle MembershipUpdate", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.ChatMessage:
logger.Debug("Handling ChatMessage")
messageState.CurrentMessageState.Message = msg.ParsedMessage.Interface().(protobuf.ChatMessage)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, messageState.CurrentMessageState.Message)
2021-06-07 12:38:13 +00:00
err = m.HandleChatMessage(messageState)
if err != nil {
logger.Warn("failed to handle ChatMessage", zap.Error(err))
2021-06-07 11:45:06 +00:00
allMessagesProcessed = false
continue
}
case protobuf.EditMessage:
logger.Debug("Handling EditMessage")
editProto := msg.ParsedMessage.Interface().(protobuf.EditMessage)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, editProto)
2021-06-07 11:45:06 +00:00
editMessage := EditMessage{
EditMessage: editProto,
From: contact.ID,
ID: messageID,
SigPubKey: publicKey,
}
2023-02-06 10:38:37 +00:00
err = m.HandleEditMessage(messageState, editMessage)
2021-06-07 11:45:06 +00:00
if err != nil {
logger.Warn("failed to handle EditMessage", zap.Error(err))
allMessagesProcessed = false
continue
}
2020-07-25 11:46:43 +00:00
case protobuf.DeleteMessage:
logger.Debug("Handling DeleteMessage")
deleteProto := msg.ParsedMessage.Interface().(protobuf.DeleteMessage)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, deleteProto)
deleteMessage := DeleteMessage{
DeleteMessage: deleteProto,
From: contact.ID,
ID: messageID,
SigPubKey: publicKey,
}
err = m.HandleDeleteMessage(messageState, deleteMessage)
if err != nil {
logger.Warn("failed to handle DeleteMessage", zap.Error(err))
allMessagesProcessed = false
continue
}
2022-09-28 11:42:17 +00:00
case protobuf.DeleteForMeMessage:
logger.Debug("Handling DeleteForMeMessage")
deleteForMeProto := msg.ParsedMessage.Interface().(protobuf.DeleteForMeMessage)
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, deleteForMeProto)
deleteForMeMessage := DeleteForMeMessage{
DeleteForMeMessage: deleteForMeProto,
From: contact.ID,
ID: messageID,
SigPubKey: publicKey,
}
err = m.HandleDeleteForMeMessage(messageState, deleteForMeMessage)
if err != nil {
logger.Warn("failed to handle DeleteForMeMessage", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.PinMessage:
pinMessage := msg.ParsedMessage.Interface().(protobuf.PinMessage)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, pinMessage)
2021-06-07 12:38:13 +00:00
err = m.HandlePinMessage(messageState, pinMessage)
if err != nil {
logger.Warn("failed to handle PinMessage", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.PairInstallation:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.PairInstallation)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
logger.Debug("Handling PairInstallation", zap.Any("message", p))
2021-06-07 12:38:13 +00:00
err = m.HandlePairInstallation(messageState, p)
if err != nil {
logger.Warn("failed to handle PairInstallation", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.StatusUpdate:
p := msg.ParsedMessage.Interface().(protobuf.StatusUpdate)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
logger.Debug("Handling StatusUpdate", zap.Any("message", p))
err = m.HandleStatusUpdate(messageState, p)
if err != nil {
logger.Warn("failed to handle StatusMessage", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.SyncInstallationContact:
logger.Warn("SyncInstallationContact is not supported")
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, msg.ParsedMessage.Interface().(protobuf.SyncInstallationContact))
continue
case protobuf.SyncInstallationContactV2:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncInstallationContactV2)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
logger.Debug("Handling SyncInstallationContact", zap.Any("message", p))
2021-06-07 12:38:13 +00:00
err = m.HandleSyncInstallationContact(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncInstallationContact", zap.Error(err))
allMessagesProcessed = false
continue
}
2020-07-25 11:46:43 +00:00
2022-03-24 09:35:56 +00:00
case protobuf.SyncProfilePictures:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncProfilePictures)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
2022-03-24 09:35:56 +00:00
logger.Debug("Handling SyncProfilePicture", zap.Any("message", p))
err = m.HandleSyncProfilePictures(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncProfilePicture", zap.Error(err))
allMessagesProcessed = false
continue
}
2022-01-17 03:42:11 +00:00
case protobuf.SyncBookmark:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncBookmark)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
2022-01-17 03:42:11 +00:00
logger.Debug("Handling SyncBookmark", zap.Any("message", p))
err = m.handleSyncBookmark(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncBookmark", zap.Error(err))
allMessagesProcessed = false
continue
}
2022-02-10 10:00:59 +00:00
case protobuf.SyncClearHistory:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncClearHistory)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
2022-02-10 10:00:59 +00:00
logger.Debug("Handling SyncClearHistory", zap.Any("message", p))
err = m.handleSyncClearHistory(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncClearHistory", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.SyncCommunitySettings:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncCommunitySettings)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
logger.Debug("Handling SyncCommunitySettings", zap.Any("message", p))
err = m.handleSyncCommunitySettings(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncCommunitySettings", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.SyncTrustedUser:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncTrustedUser)
logger.Debug("Handling SyncTrustedUser", zap.Any("message", p))
err = m.handleSyncTrustedUser(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncTrustedUser", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.SyncVerificationRequest:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncVerificationRequest)
logger.Debug("Handling SyncVerificationRequest", zap.Any("message", p))
err = m.handleSyncVerificationRequest(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncClearHistory", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.Backup:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.Backup)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
logger.Debug("Handling Backup", zap.Any("message", p))
errors := m.HandleBackup(messageState, p)
if len(errors) > 0 {
for _, err := range errors {
logger.Warn("failed to handle Backup", zap.Error(err))
}
allMessagesProcessed = false
continue
}
case protobuf.SyncInstallationPublicChat:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncInstallationPublicChat)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
logger.Debug("Handling SyncInstallationPublicChat", zap.Any("message", p))
2021-06-07 12:38:13 +00:00
addedChat := m.HandleSyncInstallationPublicChat(messageState, p)
2020-07-25 11:46:43 +00:00
2021-05-14 10:55:42 +00:00
// We join and re-register as we want to receive mentions from the newly joined public chat
if addedChat != nil {
_, err = m.createPublicChat(addedChat.ID, messageState.Response)
2021-05-14 10:55:42 +00:00
if err != nil {
allMessagesProcessed = false
logger.Error("error joining chat", zap.Error(err))
continue
}
}
2021-10-05 17:26:02 +00:00
case protobuf.SyncChatRemoved:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncChatRemoved)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
2021-10-05 17:26:02 +00:00
logger.Debug("Handling SyncChatRemoved", zap.Any("message", p))
err := m.HandleSyncChatRemoved(messageState, p)
if err != nil {
allMessagesProcessed = false
2021-10-05 17:26:02 +00:00
logger.Warn("failed to handle sync removing chat", zap.Error(err))
continue
}
2021-10-12 10:33:32 +00:00
case protobuf.SyncChatMessagesRead:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncChatMessagesRead)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
2021-10-12 10:33:32 +00:00
logger.Debug("Handling SyncChatMessagesRead", zap.Any("message", p))
err := m.HandleSyncChatMessagesRead(messageState, p)
if err != nil {
allMessagesProcessed = false
logger.Warn("failed to handle sync chat message read", zap.Error(err))
2021-10-12 10:33:32 +00:00
continue
}
case protobuf.SyncCommunity:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
community := msg.ParsedMessage.Interface().(protobuf.SyncCommunity)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, community)
logger.Debug("Handling SyncCommunity", zap.Any("message", community))
err = m.handleSyncCommunity(messageState, community)
if err != nil {
logger.Warn("failed to handle SyncCommunity", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.SyncActivityCenterRead:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
a := msg.ParsedMessage.Interface().(protobuf.SyncActivityCenterRead)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, a)
logger.Debug("Handling SyncActivityCenterRead", zap.Any("message", a))
err = m.handleActivityCenterRead(messageState, a)
if err != nil {
logger.Warn("failed to handle SyncActivityCenterRead", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.SyncActivityCenterAccepted:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
a := msg.ParsedMessage.Interface().(protobuf.SyncActivityCenterAccepted)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, a)
logger.Debug("Handling SyncActivityCenterAccepted", zap.Any("message", a))
err = m.handleActivityCenterAccepted(messageState, a)
if err != nil {
logger.Warn("failed to handle SyncActivityCenterAccepted", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.SyncActivityCenterDismissed:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
a := msg.ParsedMessage.Interface().(protobuf.SyncActivityCenterDismissed)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, a)
logger.Debug("Handling SyncActivityCenterDismissed", zap.Any("message", a))
err = m.handleActivityCenterDismissed(messageState, a)
if err != nil {
logger.Warn("failed to handle SyncActivityCenterDismissed", zap.Error(err))
allMessagesProcessed = false
continue
}
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
case protobuf.SyncSetting:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
ss := msg.ParsedMessage.Interface().(protobuf.SyncSetting)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, ss)
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
logger.Debug("Handling SyncSetting", zap.Any("message", ss))
2022-12-29 06:16:19 +00:00
err := m.handleSyncSetting(messageState, &ss)
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
if err != nil {
logger.Warn("failed to handle SyncSetting", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.RequestAddressForTransaction:
command := msg.ParsedMessage.Interface().(protobuf.RequestAddressForTransaction)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, command)
logger.Debug("Handling RequestAddressForTransaction", zap.Any("message", command))
2021-06-07 12:38:13 +00:00
err = m.HandleRequestAddressForTransaction(messageState, command)
if err != nil {
logger.Warn("failed to handle RequestAddressForTransaction", zap.Error(err))
allMessagesProcessed = false
continue
}
2020-07-25 11:46:43 +00:00
case protobuf.SendTransaction:
command := msg.ParsedMessage.Interface().(protobuf.SendTransaction)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, command)
logger.Debug("Handling SendTransaction", zap.Any("message", command))
2021-06-07 12:38:13 +00:00
err = m.HandleSendTransaction(messageState, command)
if err != nil {
logger.Warn("failed to handle SendTransaction", zap.Error(err))
allMessagesProcessed = false
continue
}
2020-07-25 11:46:43 +00:00
case protobuf.AcceptRequestAddressForTransaction:
command := msg.ParsedMessage.Interface().(protobuf.AcceptRequestAddressForTransaction)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, command)
logger.Debug("Handling AcceptRequestAddressForTransaction")
2021-06-07 12:38:13 +00:00
err = m.HandleAcceptRequestAddressForTransaction(messageState, command)
if err != nil {
logger.Warn("failed to handle AcceptRequestAddressForTransaction", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.DeclineRequestAddressForTransaction:
command := msg.ParsedMessage.Interface().(protobuf.DeclineRequestAddressForTransaction)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, command)
logger.Debug("Handling DeclineRequestAddressForTransaction")
2021-06-07 12:38:13 +00:00
err = m.HandleDeclineRequestAddressForTransaction(messageState, command)
if err != nil {
logger.Warn("failed to handle DeclineRequestAddressForTransaction", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.DeclineRequestTransaction:
command := msg.ParsedMessage.Interface().(protobuf.DeclineRequestTransaction)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, command)
logger.Debug("Handling DeclineRequestTransaction")
2021-06-07 12:38:13 +00:00
err = m.HandleDeclineRequestTransaction(messageState, command)
if err != nil {
logger.Warn("failed to handle DeclineRequestTransaction", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.RequestTransaction:
command := msg.ParsedMessage.Interface().(protobuf.RequestTransaction)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, command)
logger.Debug("Handling RequestTransaction")
2021-06-07 12:38:13 +00:00
err = m.HandleRequestTransaction(messageState, command)
if err != nil {
logger.Warn("failed to handle RequestTransaction", zap.Error(err))
allMessagesProcessed = false
continue
}
2020-07-25 11:46:43 +00:00
case protobuf.ContactUpdate:
logger.Info("<<< ContactUpdate", zap.String("publicKey", senderID))
if common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("coming from us, ignoring")
continue
}
contactUpdate := msg.ParsedMessage.Interface().(protobuf.ContactUpdate)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, contactUpdate)
2021-06-07 12:38:13 +00:00
err = m.HandleContactUpdate(messageState, contactUpdate)
if err != nil {
logger.Warn("failed to handle ContactUpdate", zap.Error(err))
allMessagesProcessed = false
continue
}
m.forgetContactInfoRequest(senderID)
case protobuf.AcceptContactRequest:
logger.Debug("Handling AcceptContactRequest")
message := msg.ParsedMessage.Interface().(protobuf.AcceptContactRequest)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
err = m.HandleAcceptContactRequest(messageState, message, senderID)
if err != nil {
logger.Warn("failed to handle AcceptContactRequest", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.RetractContactRequest:
logger.Debug("Handling RetractContactRequest")
message := msg.ParsedMessage.Interface().(protobuf.RetractContactRequest)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
err = m.HandleRetractContactRequest(messageState, message)
if err != nil {
logger.Warn("failed to handle RetractContactRequest", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.PushNotificationQuery:
logger.Debug("Received PushNotificationQuery")
if m.pushNotificationServer == nil {
continue
}
message := msg.ParsedMessage.Interface().(protobuf.PushNotificationQuery)
logger.Debug("Handling PushNotificationQuery")
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
if err := m.pushNotificationServer.HandlePushNotificationQuery(publicKey, msg.ID, message); err != nil {
allMessagesProcessed = false
logger.Warn("failed to handle PushNotificationQuery", zap.Error(err))
}
// We continue in any case, no changes to messenger
continue
2020-07-09 16:52:26 +00:00
case protobuf.PushNotificationRegistrationResponse:
logger.Debug("Received PushNotificationRegistrationResponse")
if m.pushNotificationClient == nil {
continue
}
logger.Debug("Handling PushNotificationRegistrationResponse")
message := msg.ParsedMessage.Interface().(protobuf.PushNotificationRegistrationResponse)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
if err := m.pushNotificationClient.HandlePushNotificationRegistrationResponse(publicKey, message); err != nil {
allMessagesProcessed = false
2020-07-09 16:52:26 +00:00
logger.Warn("failed to handle PushNotificationRegistrationResponse", zap.Error(err))
}
// We continue in any case, no changes to messenger
continue
case protobuf.ContactCodeAdvertisement:
logger.Debug("Received ContactCodeAdvertisement")
cca := msg.ParsedMessage.Interface().(protobuf.ContactCodeAdvertisement)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, cca)
2021-02-17 23:14:48 +00:00
logger.Debug("protobuf.ContactCodeAdvertisement received", zap.Any("cca", cca))
if cca.ChatIdentity != nil {
logger.Debug("Received ContactCodeAdvertisement ChatIdentity")
2021-06-07 12:38:13 +00:00
err = m.HandleChatIdentity(messageState, *cca.ChatIdentity)
if err != nil {
allMessagesProcessed = false
logger.Warn("failed to handle ContactCodeAdvertisement ChatIdentity", zap.Error(err))
// No continue as Chat Identity may fail but the rest of the cca may process fine.
}
}
if m.pushNotificationClient == nil {
continue
}
2020-08-18 15:07:48 +00:00
logger.Debug("Handling ContactCodeAdvertisement")
if err := m.pushNotificationClient.HandleContactCodeAdvertisement(publicKey, cca); err != nil {
allMessagesProcessed = false
logger.Warn("failed to handle ContactCodeAdvertisement", zap.Error(err))
}
// We continue in any case, no changes to messenger
continue
case protobuf.PushNotificationResponse:
logger.Debug("Received PushNotificationResponse")
if m.pushNotificationClient == nil {
continue
}
logger.Debug("Handling PushNotificationResponse")
message := msg.ParsedMessage.Interface().(protobuf.PushNotificationResponse)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
if err := m.pushNotificationClient.HandlePushNotificationResponse(publicKey, message); err != nil {
allMessagesProcessed = false
logger.Warn("failed to handle PushNotificationResponse", zap.Error(err))
}
// We continue in any case, no changes to messenger
continue
2020-07-09 16:52:26 +00:00
case protobuf.PushNotificationQueryResponse:
logger.Debug("Received PushNotificationQueryResponse")
if m.pushNotificationClient == nil {
continue
}
logger.Debug("Handling PushNotificationQueryResponse")
message := msg.ParsedMessage.Interface().(protobuf.PushNotificationQueryResponse)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
if err := m.pushNotificationClient.HandlePushNotificationQueryResponse(publicKey, message); err != nil {
allMessagesProcessed = false
2020-07-09 16:52:26 +00:00
logger.Warn("failed to handle PushNotificationQueryResponse", zap.Error(err))
}
// We continue in any case, no changes to messenger
continue
case protobuf.PushNotificationRequest:
logger.Debug("Received PushNotificationRequest")
if m.pushNotificationServer == nil {
continue
}
logger.Debug("Handling PushNotificationRequest")
message := msg.ParsedMessage.Interface().(protobuf.PushNotificationRequest)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
if err := m.pushNotificationServer.HandlePushNotificationRequest(publicKey, msg.ID, message); err != nil {
allMessagesProcessed = false
logger.Warn("failed to handle PushNotificationRequest", zap.Error(err))
}
// We continue in any case, no changes to messenger
continue
2020-07-24 13:47:58 +00:00
case protobuf.EmojiReaction:
2020-07-25 11:46:43 +00:00
logger.Debug("Handling EmojiReaction")
message := msg.ParsedMessage.Interface().(protobuf.EmojiReaction)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
err = m.HandleEmojiReaction(messageState, message)
2020-07-24 13:47:58 +00:00
if err != nil {
logger.Warn("failed to handle EmojiReaction", zap.Error(err))
allMessagesProcessed = false
2020-07-24 13:47:58 +00:00
continue
}
2020-08-07 13:49:37 +00:00
case protobuf.GroupChatInvitation:
logger.Debug("Handling GroupChatInvitation")
message := msg.ParsedMessage.Interface().(protobuf.GroupChatInvitation)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
err = m.HandleGroupChatInvitation(messageState, message)
2020-08-07 13:49:37 +00:00
if err != nil {
logger.Warn("failed to handle GroupChatInvitation", zap.Error(err))
allMessagesProcessed = false
2020-08-07 13:49:37 +00:00
continue
}
case protobuf.ChatIdentity:
message := msg.ParsedMessage.Interface().(protobuf.ChatIdentity)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
err = m.HandleChatIdentity(messageState, message)
if err != nil {
logger.Warn("failed to handle ChatIdentity", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.CommunityDescription:
logger.Debug("Handling CommunityDescription")
message := msg.ParsedMessage.Interface().(protobuf.CommunityDescription)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
err = m.handleCommunityDescription(messageState, publicKey, message, msg.DecryptedPayload)
if err != nil {
logger.Warn("failed to handle CommunityDescription", zap.Error(err))
allMessagesProcessed = false
continue
}
//if community was among requested ones, send its info and remove filter
for communityID := range m.requestedCommunities {
if _, ok := messageState.Response.communities[communityID]; ok {
m.passStoredCommunityInfoToSignalHandler(communityID)
}
}
case protobuf.RequestContactVerification:
logger.Debug("Handling RequestContactVerification")
err = m.HandleRequestContactVerification(messageState, msg.ParsedMessage.Interface().(protobuf.RequestContactVerification))
if err != nil {
logger.Warn("failed to handle RequestContactVerification", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.AcceptContactVerification:
logger.Debug("Handling AcceptContactVerification")
err = m.HandleAcceptContactVerification(messageState, msg.ParsedMessage.Interface().(protobuf.AcceptContactVerification))
if err != nil {
logger.Warn("failed to handle AcceptContactVerification", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.DeclineContactVerification:
logger.Debug("Handling DeclineContactVerification")
err = m.HandleDeclineContactVerification(messageState, msg.ParsedMessage.Interface().(protobuf.DeclineContactVerification))
if err != nil {
logger.Warn("failed to handle DeclineContactVerification", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.CancelContactVerification:
logger.Debug("Handling CancelContactVerification")
err = m.HandleCancelContactVerification(messageState, msg.ParsedMessage.Interface().(protobuf.CancelContactVerification))
if err != nil {
logger.Warn("failed to handle CancelContactVerification", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.CommunityInvitation:
logger.Debug("Handling CommunityInvitation")
invitation := msg.ParsedMessage.Interface().(protobuf.CommunityInvitation)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, invitation)
2021-06-07 12:38:13 +00:00
err = m.HandleCommunityInvitation(messageState, publicKey, invitation, invitation.CommunityDescription)
if err != nil {
2021-01-11 10:32:51 +00:00
logger.Warn("failed to handle CommunityInvitation", zap.Error(err))
allMessagesProcessed = false
continue
}
2021-01-11 10:32:51 +00:00
case protobuf.CommunityRequestToJoin:
logger.Debug("Handling CommunityRequestToJoin")
request := msg.ParsedMessage.Interface().(protobuf.CommunityRequestToJoin)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, request)
2021-06-07 12:38:13 +00:00
err = m.HandleCommunityRequestToJoin(messageState, publicKey, request)
2021-01-11 10:32:51 +00:00
if err != nil {
logger.Warn("failed to handle CommunityRequestToJoin", zap.Error(err))
continue
}
case protobuf.CommunityCancelRequestToJoin:
logger.Debug("Handling CommunityCancelRequestToJoin")
request := msg.ParsedMessage.Interface().(protobuf.CommunityCancelRequestToJoin)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, request)
err = m.HandleCommunityCancelRequestToJoin(messageState, publicKey, request)
if err != nil {
logger.Warn("failed to handle CommunityCancelRequestToJoin", zap.Error(err))
continue
}
case protobuf.CommunityRequestToJoinResponse:
logger.Debug("Handling CommunityRequestToJoinResponse")
requestToJoinResponse := msg.ParsedMessage.Interface().(protobuf.CommunityRequestToJoinResponse)
err = m.HandleCommunityRequestToJoinResponse(messageState, publicKey, requestToJoinResponse)
if err != nil {
logger.Warn("failed to handle CommunityRequestToJoinResponse", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.CommunityRequestToLeave:
logger.Debug("Handling CommunityRequestToLeave")
request := msg.ParsedMessage.Interface().(protobuf.CommunityRequestToLeave)
err = m.HandleCommunityRequestToLeave(messageState, publicKey, request)
if err != nil {
logger.Warn("failed to handle CommunityRequestToLeave", zap.Error(err))
continue
}
2021-01-11 10:32:51 +00:00
case protobuf.CommunityMessageArchiveMagnetlink:
logger.Debug("Handling CommunityMessageArchiveMagnetlink")
magnetlinkMessage := msg.ParsedMessage.Interface().(protobuf.CommunityMessageArchiveMagnetlink)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, magnetlinkMessage)
err = m.HandleHistoryArchiveMagnetlinkMessage(messageState, publicKey, magnetlinkMessage.MagnetUri, magnetlinkMessage.Clock)
if err != nil {
logger.Warn("failed to handle CommunityMessageArchiveMagnetlink", zap.Error(err))
allMessagesProcessed = false
continue
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
case protobuf.AnonymousMetricBatch:
logger.Debug("Handling AnonymousMetricBatch")
if m.anonMetricsServer == nil {
logger.Warn("unable to handle AnonymousMetricBatch, anonMetricsServer is nil")
continue
}
message := msg.ParsedMessage.Interface().(protobuf.AnonymousMetricBatch)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, message)
ams, err := m.anonMetricsServer.StoreMetrics(message)
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
if err != nil {
logger.Warn("failed to store AnonymousMetricBatch", zap.Error(err))
continue
}
messageState.Response.AnonymousMetrics = append(messageState.Response.AnonymousMetrics, ams...)
2022-05-18 10:42:51 +00:00
case protobuf.SyncWalletAccounts:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncWalletAccounts)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
2022-05-18 10:42:51 +00:00
logger.Debug("Handling SyncWalletAccount", zap.Any("message", p))
err = m.HandleSyncWalletAccount(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncWalletAccount", zap.Error(err))
allMessagesProcessed = false
continue
}
2022-08-07 14:25:03 +00:00
case protobuf.SyncContactRequestDecision:
logger.Info("SyncContactRequestDecision")
p := msg.ParsedMessage.Interface().(protobuf.SyncContactRequestDecision)
err := m.HandleSyncContactRequestDecision(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncContactRequestDecision", zap.Error(err))
2022-08-07 14:25:03 +00:00
continue
}
case protobuf.SyncSavedAddress:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncSavedAddress)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
err = m.handleSyncSavedAddress(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncSavedAddress", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.SyncAllKeycards:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncAllKeycards)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
err = m.handleSyncKeycards(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncAllKeycards", zap.Error(err))
allMessagesProcessed = false
continue
}
case protobuf.SyncKeycardAction:
if !common.IsPubKeyEqual(messageState.CurrentMessageState.PublicKey, &m.identity.PublicKey) {
logger.Warn("not coming from us, ignoring")
continue
}
p := msg.ParsedMessage.Interface().(protobuf.SyncKeycardAction)
m.outputToCSV(msg.TransportMessage.Timestamp, msg.ID, senderID, filter.Topic, filter.ChatID, msg.Type, p)
err = m.handleSyncKeycardActivity(messageState, p)
if err != nil {
logger.Warn("failed to handle SyncKeycardAction", zap.Error(err))
allMessagesProcessed = false
continue
}
default:
// Check if is an encrypted PushNotificationRegistration
if msg.Type == protobuf.ApplicationMetadataMessage_PUSH_NOTIFICATION_REGISTRATION {
logger.Debug("Received PushNotificationRegistration")
if m.pushNotificationServer == nil {
continue
}
logger.Debug("Handling PushNotificationRegistration")
if err := m.pushNotificationServer.HandlePushNotificationRegistration(publicKey, msg.ParsedMessage.Interface().([]byte)); err != nil {
allMessagesProcessed = false
logger.Warn("failed to handle PushNotificationRegistration", zap.Error(err))
}
// We continue in any case, no changes to messenger
continue
}
logger.Debug("message not handled", zap.Any("messageType", reflect.TypeOf(msg.ParsedMessage.Interface())))
2020-01-28 11:16:28 +00:00
}
2020-08-18 15:07:48 +00:00
} else {
logger.Debug("parsed message is nil")
}
}
2021-01-11 10:32:51 +00:00
// Process any community changes
for _, changes := range messageState.Response.CommunityChanges {
if changes.ShouldMemberJoin {
response, err := m.joinCommunity(context.TODO(), changes.Community.ID())
2021-01-11 10:32:51 +00:00
if err != nil {
logger.Error("cannot join community", zap.Error(err))
continue
}
if err := messageState.Response.Merge(response); err != nil {
logger.Error("cannot merge join community response", zap.Error(err))
continue
}
} else if changes.ShouldMemberLeave {
response, err := m.leaveCommunity(changes.Community.ID())
if err != nil {
logger.Error("cannot join community", zap.Error(err))
continue
}
if err := messageState.Response.Merge(response); err != nil {
logger.Error("cannot merge join community response", zap.Error(err))
continue
}
}
}
// Clean up as not used by clients currently
messageState.Response.CommunityChanges = nil
2022-01-13 10:06:01 +00:00
// NOTE: for now we confirm messages as processed regardless whether we
// actually processed them, this is because we need to differentiate
// from messages that we want to retry to process and messages that
// are never going to be processed
m.transport.MarkP2PMessageAsProcessed(gethcommon.BytesToHash(shhMessage.Hash))
if allMessagesProcessed {
processedMessages = append(processedMessages, types.EncodeHex(shhMessage.Hash))
}
}
if len(processedMessages) != 0 {
if err := m.transport.ConfirmMessagesProcessed(processedMessages, m.getTimesource().GetCurrentTime()); err != nil {
logger.Warn("failed to confirm processed messages", zap.Error(err))
}
2019-09-02 09:29:06 +00:00
}
}
return m.saveDataAndPrepareResponse(messageState)
}
func (m *Messenger) saveDataAndPrepareResponse(messageState *ReceivedMessageState) (*MessengerResponse, error) {
var err error
var contactsToSave []*Contact
2021-03-29 15:41:30 +00:00
messageState.ModifiedContacts.Range(func(id string, value bool) (shouldContinue bool) {
contact, ok := messageState.AllContacts.Load(id)
if ok {
contactsToSave = append(contactsToSave, contact)
2023-01-20 14:28:30 +00:00
messageState.Response.AddContact(contact)
}
2021-03-29 15:41:30 +00:00
return true
})
2021-01-11 10:32:51 +00:00
// Hydrate chat alias and identicon
for id := range messageState.Response.chats {
2021-03-29 15:41:30 +00:00
chat, _ := messageState.AllChats.Load(id)
2021-11-05 15:11:10 +00:00
if chat == nil {
continue
}
if chat.OneToOne() {
2021-03-29 15:41:30 +00:00
contact, ok := m.allContacts.Load(chat.ID)
if ok {
chat.Alias = contact.Alias
chat.Identicon = contact.Identicon
}
}
2021-01-11 10:32:51 +00:00
messageState.Response.AddChat(chat)
}
2021-03-29 15:41:30 +00:00
messageState.ModifiedInstallations.Range(func(id string, value bool) (shouldContinue bool) {
installation, _ := messageState.AllInstallations.Load(id)
messageState.Response.Installations = append(messageState.Response.Installations, installation)
if installation.InstallationMetadata != nil {
2021-03-29 15:41:30 +00:00
err = m.setInstallationMetadata(id, installation.InstallationMetadata)
if err != nil {
2021-03-29 15:41:30 +00:00
return false
}
}
2021-03-29 15:41:30 +00:00
return true
})
if err != nil {
return nil, err
}
2021-01-11 10:32:51 +00:00
if len(messageState.Response.chats) > 0 {
err = m.saveChats(messageState.Response.Chats())
if err != nil {
return nil, err
}
}
2021-01-11 10:32:51 +00:00
2021-05-27 17:24:04 +00:00
if len(messageState.Response.pinMessages) > 0 {
err = m.SavePinMessages(messageState.Response.PinMessages())
if err != nil {
return nil, err
}
}
2021-05-27 17:24:04 +00:00
2021-06-03 13:11:55 +00:00
messagesToSave := messageState.Response.Messages()
2022-09-29 11:50:23 +00:00
messagesCount := len(messagesToSave)
if messagesCount > 0 {
if messagesCount <= maxChunkSizeMessages {
err = m.SaveMessages(messagesToSave)
if err != nil {
return nil, err
}
} else {
messageChunks := chunkSlice(messagesToSave, maxChunkSizeMessages)
chunksCount := len(messageChunks)
for i, msgs := range messageChunks {
err := m.SaveMessages(msgs)
if err != nil {
return nil, err
}
// We slow down the saving of message chunks to keep the database responsive
// this is important when messages from history archives are handled,
// which could result in handling several thousand messages per archive
if i < chunksCount-1 {
time.Sleep(2 * time.Second)
}
}
}
}
for _, emojiReaction := range messageState.EmojiReactions {
messageState.Response.AddEmojiReaction(emojiReaction)
}
2020-08-07 13:49:37 +00:00
for _, groupChatInvitation := range messageState.GroupChatInvitations {
messageState.Response.Invitations = append(messageState.Response.Invitations, groupChatInvitation)
}
if len(contactsToSave) > 0 {
err = m.persistence.SaveContacts(contactsToSave)
if err != nil {
return nil, err
}
}
newMessagesIds := map[string]struct{}{}
2021-06-03 13:11:55 +00:00
for _, message := range messagesToSave {
2021-06-25 08:30:18 +00:00
if message.New {
newMessagesIds[message.ID] = struct{}{}
}
}
2021-06-03 13:11:55 +00:00
messagesWithResponses, err := m.pullMessagesAndResponsesFromDB(messagesToSave)
if err != nil {
return nil, err
}
2021-02-22 16:12:59 +00:00
messagesByID := map[string]*common.Message{}
for _, message := range messagesWithResponses {
messagesByID[message.ID] = message
}
2021-06-03 13:11:55 +00:00
messageState.Response.SetMessages(messagesWithResponses)
notificationsEnabled, err := m.settings.GetNotificationsEnabled()
if err != nil {
return nil, err
}
profilePicturesVisibility, err := m.settings.GetProfilePicturesVisibility()
if err != nil {
return nil, err
}
m.prepareMessages(messageState.Response.messages)
2021-06-03 13:11:55 +00:00
for _, message := range messageState.Response.messages {
if _, ok := newMessagesIds[message.ID]; ok {
message.New = true
if notificationsEnabled {
// Create notification body to be eventually passed to `localnotifications.SendMessageNotifications()`
if err = messageState.addNewMessageNotification(m.identity.PublicKey, message, messagesByID[message.ResponseTo], profilePicturesVisibility); err != nil {
return nil, err
}
}
// Create activity center notification body to be eventually passed to `activitycenter.SendActivityCenterNotifications()`
if err = messageState.addNewActivityCenterNotification(m.identity.PublicKey, m, message, messagesByID[message.ResponseTo]); err != nil {
return nil, err
}
}
}
// Reset installations
2021-03-29 15:41:30 +00:00
m.modifiedInstallations = new(stringBoolMap)
2022-01-17 03:42:11 +00:00
if len(messageState.AllBookmarks) > 0 {
bookmarks, err := m.storeSyncBookmarks(messageState.AllBookmarks)
if err != nil {
return nil, err
}
messageState.Response.AddBookmarks(bookmarks)
}
if len(messageState.AllVerificationRequests) > 0 {
for _, vr := range messageState.AllVerificationRequests {
messageState.Response.AddVerificationRequest(vr)
}
}
if len(messageState.AllTrustStatus) > 0 {
messageState.Response.AddTrustStatuses(messageState.AllTrustStatus)
}
return messageState.Response, nil
}
2022-01-17 03:42:11 +00:00
func (m *Messenger) storeSyncBookmarks(bookmarkMap map[string]*browsers.Bookmark) ([]*browsers.Bookmark, error) {
var bookmarks []*browsers.Bookmark
for _, bookmark := range bookmarkMap {
bookmarks = append(bookmarks, bookmark)
}
return m.browserDatabase.StoreSyncBookmarks(bookmarks)
}
2020-09-01 13:27:01 +00:00
func (m *Messenger) MessageByID(id string) (*common.Message, error) {
2019-08-06 21:50:13 +00:00
return m.persistence.MessageByID(id)
}
func (m *Messenger) MessagesExist(ids []string) (map[string]bool, error) {
return m.persistence.MessagesExist(ids)
2019-08-06 21:50:13 +00:00
}
2022-12-11 19:08:51 +00:00
func (m *Messenger) FirstUnseenMessageID(chatID string) (string, error) {
return m.persistence.FirstUnseenMessageID(chatID)
}
func (m *Messenger) latestIncomingMessageClock(chatID string) (uint64, error) {
return m.persistence.latestIncomingMessageClock(chatID)
}
2020-09-01 13:27:01 +00:00
func (m *Messenger) MessageByChatID(chatID, cursor string, limit int) ([]*common.Message, string, error) {
2020-11-03 10:16:05 +00:00
chat, err := m.persistence.Chat(chatID)
if err != nil {
return nil, "", err
}
2021-10-27 10:59:43 +00:00
if chat == nil {
return nil, "", ErrChatNotFound
}
var msgs []*common.Message
var nextCursor string
2020-11-03 10:16:05 +00:00
if chat.Timeline() {
var chatIDs = []string{"@" + contactIDFromPublicKey(&m.identity.PublicKey)}
2021-03-24 08:04:03 +00:00
m.allContacts.Range(func(contactID string, contact *Contact) (shouldContinue bool) {
2023-01-20 14:28:30 +00:00
if contact.added() {
2020-11-03 10:16:05 +00:00
chatIDs = append(chatIDs, "@"+contact.ID)
}
2021-03-24 08:04:03 +00:00
return true
})
msgs, nextCursor, err = m.persistence.MessageByChatIDs(chatIDs, cursor, limit)
if err != nil {
return nil, "", err
}
} else {
msgs, nextCursor, err = m.persistence.MessageByChatID(chatID, cursor, limit)
if err != nil {
return nil, "", err
}
}
if m.httpServer != nil {
for idx := range msgs {
m.prepareMessage(msgs[idx], m.httpServer)
}
}
return msgs, nextCursor, nil
}
func (m *Messenger) prepareMessages(messages map[string]*common.Message) {
if m.httpServer != nil {
for idx := range messages {
m.prepareMessage(messages[idx], m.httpServer)
}
2020-11-03 10:16:05 +00:00
}
2019-08-06 21:50:13 +00:00
}
func (m *Messenger) prepareMessage(msg *common.Message, s *server.MediaServer) {
if msg.QuotedMessage != nil && msg.QuotedMessage.ContentType == int64(protobuf.ChatMessage_IMAGE) {
msg.QuotedMessage.ImageLocalURL = s.MakeImageURL(msg.QuotedMessage.ID)
}
if msg.QuotedMessage != nil && msg.QuotedMessage.ContentType == int64(protobuf.ChatMessage_AUDIO) {
msg.QuotedMessage.AudioLocalURL = s.MakeAudioURL(msg.QuotedMessage.ID)
}
if msg.QuotedMessage != nil && msg.QuotedMessage.ContentType == int64(protobuf.ChatMessage_STICKER) {
msg.QuotedMessage.HasSticker = true
}
if msg.QuotedMessage != nil && msg.QuotedMessage.ContentType == int64(protobuf.ChatMessage_DISCORD_MESSAGE) {
dm := msg.QuotedMessage.DiscordMessage
exists, err := m.persistence.HasDiscordMessageAuthorImagePayload(dm.Author.Id)
if err != nil {
return
}
if exists {
msg.QuotedMessage.DiscordMessage.Author.LocalUrl = s.MakeDiscordAuthorAvatarURL(dm.Author.Id)
}
}
if msg.ContentType == protobuf.ChatMessage_IMAGE {
msg.ImageLocalURL = s.MakeImageURL(msg.ID)
}
if msg.ContentType == protobuf.ChatMessage_DISCORD_MESSAGE {
dm := msg.GetDiscordMessage()
exists, err := m.persistence.HasDiscordMessageAuthorImagePayload(dm.Author.Id)
if err != nil {
return
}
if exists {
dm.Author.LocalUrl = s.MakeDiscordAuthorAvatarURL(dm.Author.Id)
}
for idx, attachment := range dm.Attachments {
if strings.Contains(attachment.ContentType, "image") {
hasPayload, err := m.persistence.HasDiscordMessageAttachmentPayload(attachment.Id, dm.Id)
if err != nil {
m.logger.Error("failed to check if message attachment exist", zap.Error(err))
continue
}
if hasPayload {
localURL := s.MakeDiscordAttachmentURL(dm.Id, attachment.Id)
dm.Attachments[idx].LocalUrl = localURL
}
}
}
msg.Payload = &protobuf.ChatMessage_DiscordMessage{
DiscordMessage: dm,
}
}
if msg.ContentType == protobuf.ChatMessage_AUDIO {
msg.AudioLocalURL = s.MakeAudioURL(msg.ID)
}
if msg.ContentType == protobuf.ChatMessage_STICKER {
msg.StickerLocalURL = s.MakeStickerURL(msg.GetSticker().Hash)
}
}
func (m *Messenger) AllMessageByChatIDWhichMatchTerm(chatID string, searchTerm string, caseSensitive bool) ([]*common.Message, error) {
_, err := m.persistence.Chat(chatID)
if err != nil {
return nil, err
}
return m.persistence.AllMessageByChatIDWhichMatchTerm(chatID, searchTerm, caseSensitive)
}
func (m *Messenger) AllMessagesFromChatsAndCommunitiesWhichMatchTerm(communityIds []string, chatIds []string, searchTerm string, caseSensitive bool) ([]*common.Message, error) {
return m.persistence.AllMessagesFromChatsAndCommunitiesWhichMatchTerm(communityIds, chatIds, searchTerm, caseSensitive)
}
2020-09-01 13:27:01 +00:00
func (m *Messenger) SaveMessages(messages []*common.Message) error {
return m.persistence.SaveMessages(messages)
2019-08-06 21:50:13 +00:00
}
func (m *Messenger) DeleteMessage(id string) error {
return m.persistence.DeleteMessage(id)
2019-08-06 21:50:13 +00:00
}
func (m *Messenger) DeleteMessagesByChatID(id string) error {
return m.persistence.DeleteMessagesByChatID(id)
2019-08-06 21:50:13 +00:00
}
// MarkMessagesSeen marks messages with `ids` as seen in the chat `chatID`.
// It returns the number of affected messages or error. If there is an error,
// the number of affected messages is always zero.
func (m *Messenger) MarkMessagesSeen(chatID string, ids []string) (uint64, uint64, error) {
count, countWithMentions, err := m.persistence.MarkMessagesSeen(chatID, ids)
if err != nil {
return 0, 0, err
}
chat, err := m.persistence.Chat(chatID)
if err != nil {
return 0, 0, err
}
2021-03-29 15:41:30 +00:00
m.allChats.Store(chatID, chat)
return count, countWithMentions, nil
2019-08-06 21:50:13 +00:00
}
func (m *Messenger) syncChatMessagesRead(ctx context.Context, chatID string, clock uint64, rawMessageHandler RawMessageHandler) error {
2021-10-12 10:33:32 +00:00
if !m.hasPairedDevices() {
return nil
}
_, chat := m.getLastClockWithRelatedChat()
syncMessage := &protobuf.SyncChatMessagesRead{
Clock: clock,
Id: chatID,
}
encodedMessage, err := proto.Marshal(syncMessage)
if err != nil {
return err
}
rawMessage := common.RawMessage{
2021-10-12 10:33:32 +00:00
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SYNC_CHAT_MESSAGES_READ,
ResendAutomatically: true,
}
_, err = rawMessageHandler(ctx, rawMessage)
2021-10-12 10:33:32 +00:00
return err
}
func (m *Messenger) markAllRead(chatID string, clock uint64, shouldBeSynced bool) error {
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
2020-02-26 12:31:48 +00:00
if !ok {
return errors.New("chat not found")
}
2021-10-12 10:33:32 +00:00
seen, mentioned, err := m.persistence.MarkAllRead(chatID, clock)
2020-02-26 12:31:48 +00:00
if err != nil {
return err
}
2021-10-12 10:33:32 +00:00
if shouldBeSynced {
err := m.syncChatMessagesRead(context.Background(), chatID, clock, m.dispatchMessage)
2021-10-12 10:33:32 +00:00
if err != nil {
return err
}
}
chat.ReadMessagesAtClockValue = clock
chat.Highlight = false
2021-10-12 10:33:32 +00:00
if chat.UnviewedMessagesCount >= uint(seen) {
chat.UnviewedMessagesCount -= uint(seen)
} else {
chat.UnviewedMessagesCount = 0
}
if chat.UnviewedMentionsCount >= uint(mentioned) {
chat.UnviewedMentionsCount -= uint(mentioned)
} else {
chat.UnviewedMentionsCount = 0
}
2021-03-29 15:41:30 +00:00
// TODO(samyoul) remove storing of an updated reference pointer?
m.allChats.Store(chat.ID, chat)
return m.persistence.SaveChats([]*Chat{chat})
2020-02-26 12:31:48 +00:00
}
2021-10-12 10:33:32 +00:00
func (m *Messenger) MarkAllRead(chatID string) error {
err := m.persistence.DismissAllActivityCenterNotificationsFromChatID(chatID)
if err != nil {
return err
}
clock, _ := m.latestIncomingMessageClock(chatID)
if clock == 0 {
chat, ok := m.allChats.Load(chatID)
if !ok {
return errors.New("chat not found")
}
clock, _ = chat.NextClockAndTimestamp(m.getTimesource())
2021-10-12 10:33:32 +00:00
}
return m.markAllRead(chatID, clock, true)
}
func (m *Messenger) MarkAllReadInCommunity(communityID string) ([]string, error) {
err := m.persistence.DismissAllActivityCenterNotificationsFromCommunity(communityID)
if err != nil {
return nil, err
}
chatIDs, err := m.persistence.AllChatIDsByCommunity(communityID)
if err != nil {
return nil, err
}
err = m.persistence.MarkAllReadMultiple(chatIDs)
if err != nil {
return nil, err
}
for _, chatID := range chatIDs {
chat, ok := m.allChats.Load(chatID)
if ok {
chat.UnviewedMessagesCount = 0
chat.UnviewedMentionsCount = 0
m.allChats.Store(chat.ID, chat)
} else {
err = errors.New(fmt.Sprintf("chat with chatID %s not found", chatID))
}
}
return chatIDs, err
}
2020-06-26 07:46:14 +00:00
// MuteChat signals to the messenger that we don't want to be notified
// on new messages from this chat
func (m *Messenger) MuteChat(chatID string) error {
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
2020-06-26 07:46:14 +00:00
if !ok {
// Only one to one chan be muted when it's not in the database
publicKey, err := common.HexToPubkey(chatID)
if err != nil {
return err
}
// Create a one to one chat and set active to false
chat = CreateOneToOneChat(chatID, publicKey, m.getTimesource())
chat.Active = false
err = m.initChatSyncFields(chat)
if err != nil {
return err
}
err = m.saveChat(chat)
if err != nil {
return err
}
2020-06-26 07:46:14 +00:00
}
var contact *Contact
if chat.OneToOne() {
contact, _ = m.allContacts.Load(chatID)
}
return m.muteChat(chat, contact)
}
func (m *Messenger) muteChat(chat *Chat, contact *Contact) error {
err := m.persistence.MuteChat(chat.ID)
2020-06-26 07:46:14 +00:00
if err != nil {
return err
}
chat.Muted = true
2021-03-29 15:41:30 +00:00
// TODO(samyoul) remove storing of an updated reference pointer?
m.allChats.Store(chat.ID, chat)
2020-08-18 15:07:48 +00:00
if contact != nil {
err := m.syncContact(context.Background(), contact, m.dispatchMessage)
if err != nil {
return err
}
}
2020-08-18 15:07:48 +00:00
return m.reregisterForPushNotifications()
2020-06-26 07:46:14 +00:00
}
// UnmuteChat signals to the messenger that we want to be notified
// on new messages from this chat
func (m *Messenger) UnmuteChat(chatID string) error {
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
2020-06-26 07:46:14 +00:00
if !ok {
return errors.New("chat not found")
}
var contact *Contact
if chat.OneToOne() {
contact, _ = m.allContacts.Load(chatID)
}
return m.unmuteChat(chat, contact)
}
func (m *Messenger) unmuteChat(chat *Chat, contact *Contact) error {
err := m.persistence.UnmuteChat(chat.ID)
2020-06-26 07:46:14 +00:00
if err != nil {
return err
}
chat.Muted = false
2021-03-29 15:41:30 +00:00
// TODO(samyoul) remove storing of an updated reference pointer?
m.allChats.Store(chat.ID, chat)
if contact != nil {
err := m.syncContact(context.Background(), contact, m.dispatchMessage)
if err != nil {
return err
}
}
2020-08-18 15:07:48 +00:00
return m.reregisterForPushNotifications()
2020-06-26 07:46:14 +00:00
}
2019-08-06 21:50:13 +00:00
func (m *Messenger) UpdateMessageOutgoingStatus(id, newOutgoingStatus string) error {
return m.persistence.UpdateMessageOutgoingStatus(id, newOutgoingStatus)
}
// Identicon returns an identicon based on the input string
func Identicon(id string) (string, error) {
return identicon.GenerateBase64(id)
}
// GenerateAlias name returns the generated name given a public key hex encoded prefixed with 0x
func GenerateAlias(id string) (string, error) {
return alias.GenerateFromPublicKeyString(id)
}
func (m *Messenger) RequestTransaction(ctx context.Context, chatID, value, contract, address string) (*MessengerResponse, error) {
var response MessengerResponse
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, errors.New("Chat not found")
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
2020-09-01 13:27:01 +00:00
message := &common.Message{}
err := extendMessageFromChat(message, chat, &m.identity.PublicKey, m.transport)
if err != nil {
return nil, err
}
2020-07-25 11:46:43 +00:00
message.MessageType = protobuf.MessageType_ONE_TO_ONE
message.ContentType = protobuf.ChatMessage_TRANSACTION_COMMAND
message.Seen = true
message.Text = "Request transaction"
request := &protobuf.RequestTransaction{
Clock: message.Clock,
Address: address,
Value: value,
Contract: contract,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_REQUEST_TRANSACTION,
ResendAutomatically: true,
})
2020-09-01 13:27:01 +00:00
message.CommandParameters = &common.CommandParameters{
ID: rawMessage.ID,
Value: value,
Address: address,
Contract: contract,
2020-09-01 13:27:01 +00:00
CommandState: common.CommandStateRequestTransaction,
}
if err != nil {
return nil, err
}
messageID := rawMessage.ID
message.ID = messageID
message.CommandParameters.ID = messageID
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) RequestAddressForTransaction(ctx context.Context, chatID, from, value, contract string) (*MessengerResponse, error) {
var response MessengerResponse
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, errors.New("Chat not found")
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
2020-09-01 13:27:01 +00:00
message := &common.Message{}
err := extendMessageFromChat(message, chat, &m.identity.PublicKey, m.transport)
if err != nil {
return nil, err
}
2020-07-25 11:46:43 +00:00
message.MessageType = protobuf.MessageType_ONE_TO_ONE
message.ContentType = protobuf.ChatMessage_TRANSACTION_COMMAND
message.Seen = true
message.Text = "Request address for transaction"
request := &protobuf.RequestAddressForTransaction{
Clock: message.Clock,
Value: value,
Contract: contract,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_REQUEST_ADDRESS_FOR_TRANSACTION,
ResendAutomatically: true,
})
2020-09-01 13:27:01 +00:00
message.CommandParameters = &common.CommandParameters{
ID: rawMessage.ID,
From: from,
Value: value,
Contract: contract,
2020-09-01 13:27:01 +00:00
CommandState: common.CommandStateRequestAddressForTransaction,
}
if err != nil {
return nil, err
}
messageID := rawMessage.ID
message.ID = messageID
message.CommandParameters.ID = messageID
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) AcceptRequestAddressForTransaction(ctx context.Context, messageID, address string) (*MessengerResponse, error) {
var response MessengerResponse
message, err := m.MessageByID(messageID)
if err != nil {
return nil, err
}
if message == nil {
return nil, errors.New("message not found")
}
chatID := message.LocalChatID
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, errors.New("Chat not found")
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.WhisperTimestamp = timestamp
message.Timestamp = timestamp
message.Text = "Request address for transaction accepted"
message.Seen = true
2020-09-01 13:27:01 +00:00
message.OutgoingStatus = common.OutgoingStatusSending
// Hide previous message
2020-01-17 12:39:09 +00:00
previousMessage, err := m.persistence.MessageByCommandID(chatID, messageID)
if err != nil {
return nil, err
}
if previousMessage == nil {
return nil, errors.New("No previous message found")
}
err = m.persistence.HideMessage(previousMessage.ID)
if err != nil {
return nil, err
}
message.Replace = previousMessage.ID
request := &protobuf.AcceptRequestAddressForTransaction{
Clock: message.Clock,
Id: messageID,
Address: address,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_ACCEPT_REQUEST_ADDRESS_FOR_TRANSACTION,
ResendAutomatically: true,
})
if err != nil {
return nil, err
}
message.ID = rawMessage.ID
message.CommandParameters.Address = address
2020-09-01 13:27:01 +00:00
message.CommandParameters.CommandState = common.CommandStateRequestAddressForTransactionAccepted
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) DeclineRequestTransaction(ctx context.Context, messageID string) (*MessengerResponse, error) {
var response MessengerResponse
message, err := m.MessageByID(messageID)
if err != nil {
return nil, err
}
if message == nil {
return nil, errors.New("message not found")
}
chatID := message.LocalChatID
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, errors.New("Chat not found")
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.WhisperTimestamp = timestamp
message.Timestamp = timestamp
message.Text = "Transaction request declined"
message.Seen = true
2020-09-01 13:27:01 +00:00
message.OutgoingStatus = common.OutgoingStatusSending
message.Replace = messageID
err = m.persistence.HideMessage(messageID)
if err != nil {
return nil, err
}
request := &protobuf.DeclineRequestTransaction{
Clock: message.Clock,
Id: messageID,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_DECLINE_REQUEST_TRANSACTION,
ResendAutomatically: true,
})
if err != nil {
return nil, err
}
message.ID = rawMessage.ID
2020-09-01 13:27:01 +00:00
message.CommandParameters.CommandState = common.CommandStateRequestTransactionDeclined
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) DeclineRequestAddressForTransaction(ctx context.Context, messageID string) (*MessengerResponse, error) {
var response MessengerResponse
message, err := m.MessageByID(messageID)
if err != nil {
return nil, err
}
if message == nil {
return nil, errors.New("message not found")
}
chatID := message.LocalChatID
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, errors.New("Chat not found")
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.WhisperTimestamp = timestamp
message.Timestamp = timestamp
message.Text = "Request address for transaction declined"
message.Seen = true
2020-09-01 13:27:01 +00:00
message.OutgoingStatus = common.OutgoingStatusSending
message.Replace = messageID
err = m.persistence.HideMessage(messageID)
if err != nil {
return nil, err
}
request := &protobuf.DeclineRequestAddressForTransaction{
Clock: message.Clock,
Id: messageID,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_DECLINE_REQUEST_ADDRESS_FOR_TRANSACTION,
ResendAutomatically: true,
})
if err != nil {
return nil, err
}
message.ID = rawMessage.ID
2020-09-01 13:27:01 +00:00
message.CommandParameters.CommandState = common.CommandStateRequestAddressForTransactionDeclined
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) AcceptRequestTransaction(ctx context.Context, transactionHash, messageID string, signature []byte) (*MessengerResponse, error) {
var response MessengerResponse
message, err := m.MessageByID(messageID)
if err != nil {
return nil, err
}
if message == nil {
return nil, errors.New("message not found")
}
chatID := message.LocalChatID
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, errors.New("Chat not found")
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.WhisperTimestamp = timestamp
message.Timestamp = timestamp
message.Seen = true
message.Text = transactionSentTxt
2020-09-01 13:27:01 +00:00
message.OutgoingStatus = common.OutgoingStatusSending
// Hide previous message
2020-01-17 12:39:09 +00:00
previousMessage, err := m.persistence.MessageByCommandID(chatID, messageID)
if err != nil && err != common.ErrRecordNotFound {
return nil, err
}
if previousMessage != nil {
err = m.persistence.HideMessage(previousMessage.ID)
if err != nil {
return nil, err
}
message.Replace = previousMessage.ID
}
err = m.persistence.HideMessage(messageID)
if err != nil {
return nil, err
}
request := &protobuf.SendTransaction{
Clock: message.Clock,
Id: messageID,
TransactionHash: transactionHash,
Signature: signature,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SEND_TRANSACTION,
ResendAutomatically: true,
})
if err != nil {
return nil, err
}
message.ID = rawMessage.ID
message.CommandParameters.TransactionHash = transactionHash
message.CommandParameters.Signature = signature
2020-09-01 13:27:01 +00:00
message.CommandParameters.CommandState = common.CommandStateTransactionSent
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) SendTransaction(ctx context.Context, chatID, value, contract, transactionHash string, signature []byte) (*MessengerResponse, error) {
var response MessengerResponse
// A valid added chat is required.
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, errors.New("Chat not found")
}
if chat.ChatType != ChatTypeOneToOne {
return nil, errors.New("Need to be a one-to-one chat")
}
2020-09-01 13:27:01 +00:00
message := &common.Message{}
err := extendMessageFromChat(message, chat, &m.identity.PublicKey, m.transport)
if err != nil {
return nil, err
}
2020-07-25 11:46:43 +00:00
message.MessageType = protobuf.MessageType_ONE_TO_ONE
message.ContentType = protobuf.ChatMessage_TRANSACTION_COMMAND
message.LocalChatID = chatID
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.WhisperTimestamp = timestamp
message.Seen = true
message.Timestamp = timestamp
message.Text = transactionSentTxt
request := &protobuf.SendTransaction{
Clock: message.Clock,
TransactionHash: transactionHash,
Signature: signature,
ChatId: chatID,
}
encodedMessage, err := proto.Marshal(request)
if err != nil {
return nil, err
}
rawMessage, err := m.dispatchMessage(ctx, common.RawMessage{
LocalChatID: chat.ID,
Payload: encodedMessage,
MessageType: protobuf.ApplicationMetadataMessage_SEND_TRANSACTION,
ResendAutomatically: true,
})
if err != nil {
return nil, err
}
message.ID = rawMessage.ID
2020-09-01 13:27:01 +00:00
message.CommandParameters = &common.CommandParameters{
TransactionHash: transactionHash,
Value: value,
Contract: contract,
Signature: signature,
2020-09-01 13:27:01 +00:00
CommandState: common.CommandStateTransactionSent,
}
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-09-01 13:27:01 +00:00
err = m.persistence.SaveMessages([]*common.Message{message})
if err != nil {
return nil, err
}
2021-06-25 08:30:18 +00:00
return m.addMessagesAndChat(chat, []*common.Message{message}, &response)
}
func (m *Messenger) ValidateTransactions(ctx context.Context, addresses []types.Address) (*MessengerResponse, error) {
if m.verifyTransactionClient == nil {
return nil, nil
}
logger := m.logger.With(zap.String("site", "ValidateTransactions"))
logger.Debug("Validating transactions")
txs, err := m.persistence.TransactionsToValidate()
if err != nil {
logger.Error("Error pulling", zap.Error(err))
return nil, err
}
logger.Debug("Txs", zap.Int("count", len(txs)), zap.Any("txs", txs))
var response MessengerResponse
validator := NewTransactionValidator(addresses, m.persistence, m.verifyTransactionClient, m.logger)
responses, err := validator.ValidateTransactions(ctx)
if err != nil {
logger.Error("Error validating", zap.Error(err))
return nil, err
}
for _, validationResult := range responses {
2020-09-01 13:27:01 +00:00
var message *common.Message
chatID := contactIDFromPublicKey(validationResult.Transaction.From)
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
chat = OneToOneFromPublicKey(validationResult.Transaction.From, m.transport)
}
if validationResult.Message != nil {
message = validationResult.Message
} else {
2020-09-01 13:27:01 +00:00
message = &common.Message{}
err := extendMessageFromChat(message, chat, &m.identity.PublicKey, m.transport)
if err != nil {
return nil, err
}
}
2020-07-25 11:46:43 +00:00
message.MessageType = protobuf.MessageType_ONE_TO_ONE
message.ContentType = protobuf.ChatMessage_TRANSACTION_COMMAND
message.LocalChatID = chatID
message.OutgoingStatus = ""
clock, timestamp := chat.NextClockAndTimestamp(m.transport)
message.Clock = clock
message.Timestamp = timestamp
message.WhisperTimestamp = timestamp
message.Text = "Transaction received"
message.Seen = false
message.ID = validationResult.Transaction.MessageID
if message.CommandParameters == nil {
2020-09-01 13:27:01 +00:00
message.CommandParameters = &common.CommandParameters{}
} else {
message.CommandParameters = validationResult.Message.CommandParameters
}
message.CommandParameters.Value = validationResult.Value
message.CommandParameters.Contract = validationResult.Contract
message.CommandParameters.Address = validationResult.Address
2020-09-01 13:27:01 +00:00
message.CommandParameters.CommandState = common.CommandStateTransactionSent
message.CommandParameters.TransactionHash = validationResult.Transaction.TransactionHash
err = message.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
if err != nil {
return nil, err
}
err = chat.UpdateFromMessage(message, m.transport)
if err != nil {
return nil, err
}
2020-01-17 12:39:09 +00:00
if len(message.CommandParameters.ID) != 0 {
// Hide previous message
previousMessage, err := m.persistence.MessageByCommandID(chatID, message.CommandParameters.ID)
if err != nil && err != common.ErrRecordNotFound {
return nil, err
}
2020-01-17 12:39:09 +00:00
if previousMessage != nil {
err = m.persistence.HideMessage(previousMessage.ID)
if err != nil {
return nil, err
}
message.Replace = previousMessage.ID
}
}
2021-06-03 13:11:55 +00:00
response.AddMessage(message)
2021-03-29 15:41:30 +00:00
m.allChats.Store(chat.ID, chat)
2021-01-11 10:32:51 +00:00
response.AddChat(chat)
contact, err := m.getOrBuildContactFromMessage(message)
if err != nil {
return nil, err
}
notificationsEnabled, err := m.settings.GetNotificationsEnabled()
if err != nil {
return nil, err
}
profilePicturesVisibility, err := m.settings.GetProfilePicturesVisibility()
if err != nil {
return nil, err
}
if notificationsEnabled {
notification, err := NewMessageNotification(message.ID, message, chat, contact, m.allContacts, profilePicturesVisibility)
if err != nil {
return nil, err
}
response.AddNotification(notification)
}
}
2021-06-03 13:11:55 +00:00
if len(response.messages) > 0 {
err = m.SaveMessages(response.Messages())
if err != nil {
return nil, err
}
}
return &response, nil
}
// pullMessagesAndResponsesFromDB pulls all the messages and the one that have
// been replied to from the database
2020-09-01 13:27:01 +00:00
func (m *Messenger) pullMessagesAndResponsesFromDB(messages []*common.Message) ([]*common.Message, error) {
var messageIDs []string
for _, message := range messages {
messageIDs = append(messageIDs, message.ID)
if len(message.ResponseTo) != 0 {
messageIDs = append(messageIDs, message.ResponseTo)
}
}
// We pull from the database all the messages & replies involved,
// so we let the db build the correct messages
return m.persistence.MessagesByIDs(messageIDs)
}
2020-03-20 08:32:13 +00:00
func (m *Messenger) SignMessage(message string) ([]byte, error) {
hash := crypto.TextHash([]byte(message))
return crypto.Sign(hash, m.identity)
}
func (m *Messenger) getTimesource() common.TimeSource {
return m.transport
}
2020-07-22 07:41:40 +00:00
// AddPushNotificationsServer adds a push notification server
func (m *Messenger) AddPushNotificationsServer(ctx context.Context, publicKey *ecdsa.PublicKey, serverType pushnotificationclient.ServerType) error {
if m.pushNotificationClient == nil {
return errors.New("push notification client not enabled")
}
return m.pushNotificationClient.AddPushNotificationsServer(publicKey, serverType)
}
2020-07-17 11:41:49 +00:00
// RemovePushNotificationServer removes a push notification server
func (m *Messenger) RemovePushNotificationServer(ctx context.Context, publicKey *ecdsa.PublicKey) error {
if m.pushNotificationClient == nil {
return errors.New("push notification client not enabled")
}
return m.pushNotificationClient.RemovePushNotificationServer(publicKey)
}
2020-07-22 07:41:40 +00:00
// UnregisterFromPushNotifications unregister from any server
2020-07-15 12:43:15 +00:00
func (m *Messenger) UnregisterFromPushNotifications(ctx context.Context) error {
2020-07-16 07:45:42 +00:00
return m.pushNotificationClient.Unregister()
2020-07-15 12:43:15 +00:00
}
2020-07-22 07:41:40 +00:00
// DisableSendingPushNotifications signals the client not to send any push notification
2020-07-15 12:43:15 +00:00
func (m *Messenger) DisableSendingPushNotifications() error {
if m.pushNotificationClient == nil {
return errors.New("push notification client not enabled")
}
m.pushNotificationClient.DisableSending()
return nil
}
2020-07-22 07:41:40 +00:00
// EnableSendingPushNotifications signals the client to send push notifications
2020-07-15 12:43:15 +00:00
func (m *Messenger) EnableSendingPushNotifications() error {
if m.pushNotificationClient == nil {
return errors.New("push notification client not enabled")
}
m.pushNotificationClient.EnableSending()
return nil
}
func (m *Messenger) pushNotificationOptions() *pushnotificationclient.RegistrationOptions {
var contactIDs []*ecdsa.PublicKey
var mutedChatIDs []string
var publicChatIDs []string
2021-03-29 15:41:30 +00:00
m.allContacts.Range(func(contactID string, contact *Contact) (shouldContinue bool) {
2023-01-20 14:28:30 +00:00
if contact.added() && !contact.Blocked {
pk, err := contact.PublicKey()
if err != nil {
m.logger.Warn("could not parse contact public key")
2021-03-29 15:41:30 +00:00
return true
}
contactIDs = append(contactIDs, pk)
2021-10-01 14:50:16 +00:00
} else if contact.Blocked {
mutedChatIDs = append(mutedChatIDs, contact.ID)
}
2021-03-29 15:41:30 +00:00
return true
})
m.allChats.Range(func(chatID string, chat *Chat) (shouldContinue bool) {
if chat.Muted {
mutedChatIDs = append(mutedChatIDs, chat.ID)
}
if chat.Active && (chat.Public() || chat.CommunityChat()) {
publicChatIDs = append(publicChatIDs, chat.ID)
}
2021-03-29 15:41:30 +00:00
return true
})
return &pushnotificationclient.RegistrationOptions{
ContactIDs: contactIDs,
MutedChatIDs: mutedChatIDs,
PublicChatIDs: publicChatIDs,
}
2020-07-17 12:29:51 +00:00
}
// RegisterForPushNotification register deviceToken with any push notification server enabled
func (m *Messenger) RegisterForPushNotifications(ctx context.Context, deviceToken, apnTopic string, tokenType protobuf.PushNotificationRegistration_TokenType) error {
2020-07-17 12:29:51 +00:00
if m.pushNotificationClient == nil {
return errors.New("push notification client not enabled")
}
2020-07-20 13:58:54 +00:00
m.mutex.Lock()
defer m.mutex.Unlock()
2020-07-17 12:29:51 +00:00
err := m.pushNotificationClient.Register(deviceToken, apnTopic, tokenType, m.pushNotificationOptions())
2020-08-18 15:07:48 +00:00
if err != nil {
m.logger.Error("failed to register for push notifications", zap.Error(err))
return err
}
return nil
}
2020-07-22 07:41:40 +00:00
// RegisteredForPushNotifications returns whether we successfully registered with all the servers
2020-07-16 08:36:17 +00:00
func (m *Messenger) RegisteredForPushNotifications() (bool, error) {
if m.pushNotificationClient == nil {
return false, errors.New("no push notification client")
}
return m.pushNotificationClient.Registered()
}
2020-07-22 07:41:40 +00:00
// EnablePushNotificationsFromContactsOnly is used to indicate that we want to received push notifications only from contacts
2020-07-17 11:41:49 +00:00
func (m *Messenger) EnablePushNotificationsFromContactsOnly() error {
if m.pushNotificationClient == nil {
return errors.New("no push notification client")
}
2020-07-20 13:58:54 +00:00
m.mutex.Lock()
defer m.mutex.Unlock()
2020-07-17 11:41:49 +00:00
return m.pushNotificationClient.EnablePushNotificationsFromContactsOnly(m.pushNotificationOptions())
2020-07-17 11:41:49 +00:00
}
2020-07-22 07:41:40 +00:00
// DisablePushNotificationsFromContactsOnly is used to indicate that we want to received push notifications from anyone
2020-07-17 11:41:49 +00:00
func (m *Messenger) DisablePushNotificationsFromContactsOnly() error {
if m.pushNotificationClient == nil {
return errors.New("no push notification client")
}
2020-07-20 13:58:54 +00:00
m.mutex.Lock()
defer m.mutex.Unlock()
2020-07-17 11:41:49 +00:00
return m.pushNotificationClient.DisablePushNotificationsFromContactsOnly(m.pushNotificationOptions())
2020-07-17 11:41:49 +00:00
}
2020-09-03 07:30:03 +00:00
// EnablePushNotificationsBlockMentions is used to indicate that we dont want to received push notifications for mentions
func (m *Messenger) EnablePushNotificationsBlockMentions() error {
if m.pushNotificationClient == nil {
return errors.New("no push notification client")
}
m.mutex.Lock()
defer m.mutex.Unlock()
return m.pushNotificationClient.EnablePushNotificationsBlockMentions(m.pushNotificationOptions())
}
// DisablePushNotificationsBlockMentions is used to indicate that we want to received push notifications for mentions
func (m *Messenger) DisablePushNotificationsBlockMentions() error {
if m.pushNotificationClient == nil {
return errors.New("no push notification client")
}
m.mutex.Lock()
defer m.mutex.Unlock()
return m.pushNotificationClient.DisablePushNotificationsBlockMentions(m.pushNotificationOptions())
}
// GetPushNotificationsServers returns the servers used for push notifications
func (m *Messenger) GetPushNotificationsServers() ([]*pushnotificationclient.PushNotificationServer, error) {
2020-07-16 08:36:17 +00:00
if m.pushNotificationClient == nil {
return nil, errors.New("no push notification client")
}
return m.pushNotificationClient.GetServers()
}
2020-07-22 07:41:40 +00:00
// StartPushNotificationsServer initialize and start a push notification server, using the current messenger identity key
func (m *Messenger) StartPushNotificationsServer() error {
2020-07-14 14:07:19 +00:00
if m.pushNotificationServer == nil {
2020-07-22 07:41:40 +00:00
pushNotificationServerPersistence := pushnotificationserver.NewSQLitePersistence(m.database)
config := &pushnotificationserver.Config{
Enabled: true,
2020-07-14 14:07:19 +00:00
Logger: m.logger,
Identity: m.identity,
}
m.pushNotificationServer = pushnotificationserver.New(config, pushNotificationServerPersistence, m.sender)
2020-07-14 14:07:19 +00:00
}
return m.pushNotificationServer.Start()
}
2020-07-22 07:41:40 +00:00
// StopPushNotificationServer stops the push notification server if running
func (m *Messenger) StopPushNotificationsServer() error {
2020-07-14 14:07:19 +00:00
m.pushNotificationServer = nil
return nil
}
func generateAliasAndIdenticon(pk string) (string, string, error) {
identicon, err := identicon.GenerateBase64(pk)
if err != nil {
return "", "", err
}
name, err := alias.GenerateFromPublicKeyString(pk)
if err != nil {
return "", "", err
}
return name, identicon, nil
}
func (m *Messenger) SendEmojiReaction(ctx context.Context, chatID, messageID string, emojiID protobuf.EmojiReaction_Type) (*MessengerResponse, error) {
var response MessengerResponse
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(chatID)
if !ok {
return nil, ErrChatNotFound
}
clock, _ := chat.NextClockAndTimestamp(m.getTimesource())
emojiR := &EmojiReaction{
EmojiReaction: protobuf.EmojiReaction{
Clock: clock,
MessageId: messageID,
ChatId: chatID,
2020-07-28 08:02:51 +00:00
Type: emojiID,
},
LocalChatID: chatID,
From: types.EncodeHex(crypto.FromECDSAPub(&m.identity.PublicKey)),
}
encodedMessage, err := m.encodeChatEntity(chat, emojiR)
if err != nil {
return nil, err
}
2020-07-28 13:26:34 +00:00
_, err = m.dispatchMessage(ctx, common.RawMessage{
LocalChatID: chatID,
Payload: encodedMessage,
SkipGroupMessageWrap: true,
MessageType: protobuf.ApplicationMetadataMessage_EMOJI_REACTION,
// Don't resend using datasync, that would create quite a lot
// of traffic if clicking too eagelry
ResendAutomatically: false,
})
if err != nil {
return nil, err
}
response.AddEmojiReaction(emojiR)
2021-01-11 10:32:51 +00:00
response.AddChat(chat)
2020-07-21 23:42:55 +00:00
err = m.persistence.SaveEmojiReaction(emojiR)
if err != nil {
2020-12-15 14:43:41 +00:00
return nil, errors.Wrap(err, "Can't save emoji reaction in db")
2020-07-21 23:42:55 +00:00
}
return &response, nil
}
2020-07-27 15:57:01 +00:00
func (m *Messenger) EmojiReactionsByChatID(chatID string, cursor string, limit int) ([]*EmojiReaction, error) {
2020-11-16 11:54:39 +00:00
chat, err := m.persistence.Chat(chatID)
if err != nil {
return nil, err
}
if chat.Timeline() {
var chatIDs = []string{"@" + contactIDFromPublicKey(&m.identity.PublicKey)}
2021-03-29 15:41:30 +00:00
m.allContacts.Range(func(contactID string, contact *Contact) (shouldContinue bool) {
2023-01-20 14:28:30 +00:00
if contact.added() {
2020-11-16 11:54:39 +00:00
chatIDs = append(chatIDs, "@"+contact.ID)
}
2021-03-29 15:41:30 +00:00
return true
})
2020-11-16 11:54:39 +00:00
return m.persistence.EmojiReactionsByChatIDs(chatIDs, cursor, limit)
}
2020-07-27 15:57:01 +00:00
return m.persistence.EmojiReactionsByChatID(chatID, cursor, limit)
}
func (m *Messenger) EmojiReactionsByChatIDMessageID(chatID string, messageID string) ([]*EmojiReaction, error) {
_, err := m.persistence.Chat(chatID)
if err != nil {
return nil, err
}
return m.persistence.EmojiReactionsByChatIDMessageID(chatID, messageID)
}
func (m *Messenger) SendEmojiReactionRetraction(ctx context.Context, emojiReactionID string) (*MessengerResponse, error) {
emojiR, err := m.persistence.EmojiReactionByID(emojiReactionID)
if err != nil {
return nil, err
}
// Check that the sender is the key owner
pk := types.EncodeHex(crypto.FromECDSAPub(&m.identity.PublicKey))
if emojiR.From != pk {
return nil, errors.Errorf("identity mismatch, "+
"emoji reactions can only be retracted by the reaction sender, "+
"emoji reaction sent by '%s', current identity '%s'",
emojiR.From, pk,
)
}
// Get chat and clock
2021-03-29 15:41:30 +00:00
chat, ok := m.allChats.Load(emojiR.GetChatId())
if !ok {
return nil, ErrChatNotFound
}
clock, _ := chat.NextClockAndTimestamp(m.getTimesource())
// Update the relevant fields
emojiR.Clock = clock
emojiR.Retracted = true
encodedMessage, err := m.encodeChatEntity(chat, emojiR)
if err != nil {
return nil, err
}
// Send the marshalled EmojiReactionRetraction protobuf
2020-07-28 13:26:34 +00:00
_, err = m.dispatchMessage(ctx, common.RawMessage{
LocalChatID: emojiR.GetChatId(),
Payload: encodedMessage,
SkipGroupMessageWrap: true,
MessageType: protobuf.ApplicationMetadataMessage_EMOJI_REACTION,
// Don't resend using datasync, that would create quite a lot
// of traffic if clicking too eagelry
ResendAutomatically: false,
})
if err != nil {
return nil, err
}
// Update MessengerResponse
response := MessengerResponse{}
emojiR.Retracted = true
response.AddEmojiReaction(emojiR)
2021-01-11 10:32:51 +00:00
response.AddChat(chat)
// Persist retraction state for emoji reaction
err = m.persistence.SaveEmojiReaction(emojiR)
if err != nil {
return nil, err
}
return &response, nil
}
func (m *Messenger) encodeChatEntity(chat *Chat, message common.ChatEntity) ([]byte, error) {
var encodedMessage []byte
var err error
l := m.logger.With(zap.String("site", "Send"), zap.String("chatID", chat.ID))
switch chat.ChatType {
case ChatTypeOneToOne:
l.Debug("sending private message")
message.SetMessageType(protobuf.MessageType_ONE_TO_ONE)
encodedMessage, err = proto.Marshal(message.GetProtobuf())
if err != nil {
return nil, err
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
2020-10-20 15:10:28 +00:00
case ChatTypePublic, ChatTypeProfile:
l.Debug("sending public message", zap.String("chatName", chat.Name))
message.SetMessageType(protobuf.MessageType_PUBLIC_GROUP)
encodedMessage, err = proto.Marshal(message.GetProtobuf())
if err != nil {
return nil, err
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
case ChatTypeCommunityChat:
l.Debug("sending community chat message", zap.String("chatName", chat.Name))
// TODO: add grant
message.SetMessageType(protobuf.MessageType_COMMUNITY_CHAT)
encodedMessage, err = proto.Marshal(message.GetProtobuf())
if err != nil {
return nil, err
}
Anon Metrics Broadcast (#2198) * Protobufs and adapters * Added basic anon metric service and config init * Added fibonacci interval incrementer * Added basic Client.Start func and integrated interval incrementer * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Moved incrementer into dedicated file * Resolve incrementer test fail * Finalised the main loop functionality * Implemented delete loop framework * Updated adaptors file name * Added delete loop delay and quit, and tweak on RawMessage gen * Completed delete loop logic * Added DBLock to prevent deletion during mainLoop * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Function name update * Added sample config files for client and server * Fixes and testing for low level e2e * make generate * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * delinting * Fixing database tests * Attempted fix of does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error on sql resource loas * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work * Lint for the lint god * Why doesn't the linter list all its problems at once? * test tweaks * Fix for wakuV2 change * DB reset change * Fix for postgres db migrations fails * More robust implementation of postgres test setup and teardown * Added block for anon metrics functionality * Version Bump to 0.84.0 * Added test to check anon metrics broadcast is deactivated * Protobufs and adapters * Added basic anon metric service and config init * Added new processed field to app metrics table * Added id column to app metrics table * Added migration clean up * Added appmetrics GetUnprocessed and SetToProcessedByIDs and tests There was a wierd bug where metrics in the db that did not explicitly insert a value would be NULL, so could not be found by . In addition I've added a new primary id field to the app_metrics table so that updates could be done against very specific metric rows. * Updated adaptors and db to handle proto_id I need a way to distinguish individual metric items from each other so that I can ignore the ones that have been seen before. * Added postgres DB connection, integrated into anonmetrics.Server * Removed proto_id from SQL migration and model * Integrated postgres with Server and updated adaptors * Added sample config files for client and server * Fix lint * Fix for receiving an anonMetricBatch not in server mode * Postgres test fixes * Tidy up, make vendor and make generate * Moved all anon metric postgres migration logic and sources into a the protocol/anonmetrics package or sub packages. I don't know if this will fix the does: cannot open `does' (No such file or directory) not: cannot open `not' (No such file or directory) exist: cannot open `exist' (No such file or directory) error that happens in Jenkins but this could work
2021-09-01 12:02:18 +00:00
case ChatTypePrivateGroupChat:
message.SetMessageType(protobuf.MessageType_PRIVATE_GROUP)
l.Debug("sending group message", zap.String("chatName", chat.Name))
if !message.WrapGroupMessage() {
encodedMessage, err = proto.Marshal(message.GetProtobuf())
if err != nil {
return nil, err
}
} else {
group, err := newProtocolGroupFromChat(chat)
if err != nil {
return nil, err
}
2022-11-18 10:04:38 +00:00
// NOTE(cammellos): Disabling for now since the optimiziation is not
// applicable anymore after we changed group rules to allow
// anyone to change group details
encodedMessage, err = m.sender.EncodeMembershipUpdate(group, message)
if err != nil {
return nil, err
}
}
default:
return nil, errors.New("chat type not supported")
}
return encodedMessage, nil
}
func (m *Messenger) getOrBuildContactFromMessage(msg *common.Message) (*Contact, error) {
2021-03-29 15:41:30 +00:00
if c, ok := m.allContacts.Load(msg.From); ok {
return c, nil
}
senderPubKey, err := msg.GetSenderPubKey()
if err != nil {
return nil, err
}
senderID := contactIDFromPublicKey(senderPubKey)
c, err := buildContact(senderID, senderPubKey)
if err != nil {
return nil, err
}
2021-03-29 15:41:30 +00:00
// TODO(samyoul) remove storing of an updated reference pointer?
m.allContacts.Store(msg.From, c)
return c, nil
}
func (m *Messenger) BloomFilter() []byte {
return m.transport.BloomFilter()
}
Sync Settings (#2478) * Sync Settings * Added valueHandlers and Database singleton Some issues remain, need a way to comparing incoming sql.DB to check if the connection is to a different file or not. Maybe make singleton instance per filename * Added functionality to check the sqlite filename * Refactor of Database.SaveSyncSettings to be used as a handler * Implemented inteface for setting sync protobuf factories * Refactored and completed adhoc send setting sync * Tidying up * Immutability refactor * Refactor settings into dedicated package * Breakout structs * Tidy up * Refactor of bulk settings sync * Bug fixes * Addressing feedback * Fix code dropped during rebase * Fix for db closed * Fix for node config related crashes * Provisional fix for type assertion - issue 2 * Adding robust type assertion checks * Partial fix for null literal db storage and json encoding * Fix for passively handling nil sql.DB, and checking if elem has len and if len is 0 * Added test for preferred name behaviour * Adding saved sync settings to MessengerResponse * Completed granular initial sync and clock from network on save * add Settings to isEmpty * Refactor of protobufs, partially done * Added syncSetting receiver handling, some bug fixes * Fix for sticker packs * Implement inactive flag on sync protobuf factory * Refactor of types and structs * Added SettingField.CanSync functionality * Addressing rebase artifact * Refactor of Setting SELECT queries * Refactor of string return queries * VERSION bump and migration index bump * Deactiveate Sync Settings * Deactiveated preferred_name and send_status_updates Co-authored-by: Andrea Maria Piana <andrea.maria.piana@gmail.com>
2022-03-23 18:47:00 +00:00
func (m *Messenger) getSettings() (settings.Settings, error) {
sDB, err := accounts.NewDB(m.database)
if err != nil {
return settings.Settings{}, err
}
return sDB.GetSettings()
}
2022-02-10 10:00:59 +00:00
2022-01-17 03:42:11 +00:00
func (m *Messenger) handleSyncBookmark(state *ReceivedMessageState, message protobuf.SyncBookmark) error {
bookmark := &browsers.Bookmark{
URL: message.Url,
Name: message.Name,
ImageURL: message.ImageUrl,
Removed: message.Removed,
Clock: message.Clock,
}
state.AllBookmarks[message.Url] = bookmark
return nil
}
2022-02-10 10:00:59 +00:00
func (m *Messenger) handleSyncClearHistory(state *ReceivedMessageState, message protobuf.SyncClearHistory) error {
chatID := message.ChatId
existingChat, ok := state.AllChats.Load(chatID)
if !ok {
return ErrChatNotFound
}
if existingChat.DeletedAtClockValue >= message.ClearedAt {
return nil
}
err := m.persistence.ClearHistoryFromSyncMessage(existingChat, message.ClearedAt)
if err != nil {
return err
}
if existingChat.Public() {
err = m.transport.ClearProcessedMessageIDsCache()
if err != nil {
return err
}
}
state.AllChats.Store(chatID, existingChat)
state.Response.AddChat(existingChat)
state.Response.AddClearedHistory(&ClearedHistory{
ClearedAt: message.ClearedAt,
ChatID: chatID,
})
return nil
}
func (m *Messenger) handleSyncTrustedUser(state *ReceivedMessageState, message protobuf.SyncTrustedUser) error {
updated, err := m.verificationDatabase.UpsertTrustStatus(message.Id, verification.TrustStatus(message.Status), message.Clock)
if err != nil {
return err
}
if updated {
state.AllTrustStatus[message.Id] = verification.TrustStatus(message.Status)
contact, ok := m.allContacts.Load(message.Id)
if !ok {
m.logger.Info("contact not found")
return nil
}
contact.TrustStatus = verification.TrustStatus(message.Status)
m.allContacts.Store(contact.ID, contact)
state.ModifiedContacts.Store(contact.ID, true)
}
return nil
}
func ToVerificationRequest(message protobuf.SyncVerificationRequest) *verification.Request {
return &verification.Request{
From: message.From,
To: message.To,
Challenge: message.Challenge,
Response: message.Response,
RequestedAt: message.RequestedAt,
RepliedAt: message.RepliedAt,
RequestStatus: verification.RequestStatus(message.VerificationStatus),
}
}
func (m *Messenger) handleSyncVerificationRequest(state *ReceivedMessageState, message protobuf.SyncVerificationRequest) error {
verificationRequest := ToVerificationRequest(message)
err := m.verificationDatabase.SaveVerificationRequest(verificationRequest)
if err != nil {
return err
}
myPubKey := hexutil.Encode(crypto.FromECDSAPub(&m.identity.PublicKey))
state.AllVerificationRequests = append(state.AllVerificationRequests, verificationRequest)
if message.From == myPubKey { // Verification requests we sent
contact, ok := m.allContacts.Load(message.To)
if !ok {
m.logger.Info("contact not found")
return nil
}
contact.VerificationStatus = VerificationStatus(message.VerificationStatus)
if err := m.persistence.SaveContact(contact, nil); err != nil {
return err
}
m.allContacts.Store(contact.ID, contact)
state.ModifiedContacts.Store(contact.ID, true)
// TODO: create activity center notif
}
// else { // Verification requests we received
// // TODO: activity center notif
//}
return nil
}
2022-09-29 11:50:23 +00:00
func chunkSlice[T comparable](slice []T, chunkSize int) [][]T {
var chunks [][]T
for i := 0; i < len(slice); i += chunkSize {
end := i + chunkSize
// necessary check to avoid slicing beyond
// slice capacity
if end > len(slice) {
end = len(slice)
}
chunks = append(chunks, slice[i:end])
}
return chunks
}
func chunkAttachmentsByByteSize(slice []*protobuf.DiscordMessageAttachment, maxFileSizeBytes uint64) [][]*protobuf.DiscordMessageAttachment {
var chunks [][]*protobuf.DiscordMessageAttachment
currentChunkSize := uint64(0)
currentChunk := make([]*protobuf.DiscordMessageAttachment, 0)
for i, attachment := range slice {
payloadBytes := attachment.GetFileSizeBytes()
if currentChunkSize+payloadBytes > maxFileSizeBytes && len(currentChunk) > 0 {
chunks = append(chunks, currentChunk)
currentChunk = make([]*protobuf.DiscordMessageAttachment, 0)
currentChunkSize = uint64(0)
}
currentChunk = append(currentChunk, attachment)
currentChunkSize = currentChunkSize + payloadBytes
if i == len(slice)-1 {
chunks = append(chunks, currentChunk)
}
}
return chunks
}
2023-01-20 14:28:30 +00:00
2023-02-02 17:59:48 +00:00
func (m *Messenger) ImageServerURL() string {
return m.httpServer.MakeImageServerURL()
}
2023-01-20 14:28:30 +00:00
func (m *Messenger) myHexIdentity() string {
return common.PubkeyToHex(&m.identity.PublicKey)
}
2023-04-07 08:47:38 +00:00
func (m *Messenger) GetMentionsManager() *MentionManager {
return m.mentionsManager
}