2021-01-11 10:32:51 +00:00
|
|
|
package protocol
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"crypto/ecdsa"
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
"encoding/json"
|
2023-05-04 22:17:54 +00:00
|
|
|
"errors"
|
2021-04-19 12:09:46 +00:00
|
|
|
"fmt"
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
"os"
|
2023-09-26 16:47:50 +00:00
|
|
|
"reflect"
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
"strings"
|
2022-09-29 11:50:23 +00:00
|
|
|
"sync"
|
2021-01-11 10:32:51 +00:00
|
|
|
"time"
|
|
|
|
|
2023-10-25 10:13:35 +00:00
|
|
|
"golang.org/x/exp/maps"
|
2023-08-14 08:59:02 +00:00
|
|
|
"golang.org/x/exp/slices"
|
2023-06-01 20:02:34 +00:00
|
|
|
"golang.org/x/time/rate"
|
|
|
|
|
2023-08-15 15:27:01 +00:00
|
|
|
"github.com/golang/protobuf/proto"
|
2023-07-05 17:35:22 +00:00
|
|
|
"github.com/google/uuid"
|
2023-08-15 15:27:01 +00:00
|
|
|
|
2023-06-06 18:33:09 +00:00
|
|
|
gethcommon "github.com/ethereum/go-ethereum/common"
|
2022-05-04 00:10:00 +00:00
|
|
|
"github.com/ethereum/go-ethereum/common/hexutil"
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
"go.uber.org/zap"
|
|
|
|
|
2022-06-02 12:17:52 +00:00
|
|
|
"github.com/ethereum/go-ethereum/accounts/abi/bind"
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
"github.com/meirf/gopart"
|
|
|
|
|
feat: add verified wallet accounts to community requests
This commit extends the `CommunityRequestToJoin` with `RevealedAddresses` which represent wallet addresses and signatures provided by the sender, to proof a community owner ownership of those wallet addresses.
**Note: This only works with keystore files maanged by status-go**
At high level, the follwing happens:
1. User instructs Status to send a request to join to a community. By adding a password hash to the instruction, Status will try to unlock the users keystore and verify each wallet account.
2. For every verified wallet account, a signature is created for the following payload, using each wallet's private key
``` keccak256(chatkey + communityID + requestToJoinID) ``` A map of walletAddress->signature is then attached to the community request to join, which will be sent to the community owner
3. The owner node receives the request, and if the community requires users to hold tokens to become a member, it will check and verify whether the given wallet addresses are indeed owned by the sender. If any signature provided by the request cannot be recovered, the request is immediately declined by the owner.
4. The verified addresses are then added to the owner node's database such that, once the request should be accepted, the addresses can be used to check on chain whether they own the necessary funds to fulfill the community's permissions
The checking of required funds is **not** part of this commit. It will be added in a follow-up commit.
2023-03-17 09:19:40 +00:00
|
|
|
"github.com/status-im/status-go/account"
|
2021-01-11 10:32:51 +00:00
|
|
|
"github.com/status-im/status-go/eth-node/crypto"
|
|
|
|
"github.com/status-im/status-go/eth-node/types"
|
2022-11-04 13:56:13 +00:00
|
|
|
"github.com/status-im/status-go/images"
|
2022-09-06 18:07:22 +00:00
|
|
|
"github.com/status-im/status-go/multiaccounts/accounts"
|
2021-01-11 10:32:51 +00:00
|
|
|
"github.com/status-im/status-go/protocol/common"
|
|
|
|
"github.com/status-im/status-go/protocol/communities"
|
2023-07-07 13:03:37 +00:00
|
|
|
"github.com/status-im/status-go/protocol/communities/token"
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
"github.com/status-im/status-go/protocol/discord"
|
2021-01-11 10:32:51 +00:00
|
|
|
"github.com/status-im/status-go/protocol/protobuf"
|
|
|
|
"github.com/status-im/status-go/protocol/requests"
|
2021-04-19 12:09:46 +00:00
|
|
|
"github.com/status-im/status-go/protocol/transport"
|
2022-09-29 11:50:23 +00:00
|
|
|
v1protocol "github.com/status-im/status-go/protocol/v1"
|
2023-01-19 13:23:48 +00:00
|
|
|
localnotifications "github.com/status-im/status-go/services/local-notifications"
|
2023-06-21 11:20:43 +00:00
|
|
|
"github.com/status-im/status-go/services/wallet/bigint"
|
2023-01-19 13:23:48 +00:00
|
|
|
"github.com/status-im/status-go/signal"
|
2021-01-11 10:32:51 +00:00
|
|
|
)
|
|
|
|
|
2022-03-21 14:18:36 +00:00
|
|
|
// 7 days interval
|
|
|
|
var messageArchiveInterval = 7 * 24 * time.Hour
|
|
|
|
|
2023-03-28 14:40:00 +00:00
|
|
|
// 1 day interval
|
|
|
|
var updateActiveMembersInterval = 24 * time.Hour
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
const discordTimestampLayout = "2006-01-02T15:04:05+00:00"
|
|
|
|
|
2023-06-01 20:02:34 +00:00
|
|
|
const (
|
|
|
|
importSlowRate = time.Second / 1
|
|
|
|
importFastRate = time.Second / 100
|
|
|
|
importMessagesChunkSize = 10
|
2023-06-08 10:22:26 +00:00
|
|
|
importInitialDelay = time.Minute * 5
|
2023-06-01 20:02:34 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
|
|
|
maxChunkSizeMessages = 1000
|
|
|
|
maxChunkSizeBytes = 1500000
|
|
|
|
)
|
|
|
|
|
2023-11-03 10:30:24 +00:00
|
|
|
type FetchCommunityRequest struct {
|
|
|
|
// CommunityKey should be either a public or a private community key
|
|
|
|
CommunityKey string `json:"communityKey"`
|
|
|
|
Shard *common.Shard `json:"shard"`
|
|
|
|
TryDatabase bool `json:"tryDatabase"`
|
|
|
|
WaitForResponse bool `json:"waitForResponse"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (r *FetchCommunityRequest) Validate() error {
|
|
|
|
if len(r.CommunityKey) <= 2 {
|
|
|
|
return fmt.Errorf("community key is too short")
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (r *FetchCommunityRequest) getCommunityID() string {
|
|
|
|
return GetCommunityIDFromKey(r.CommunityKey)
|
|
|
|
}
|
|
|
|
|
|
|
|
func GetCommunityIDFromKey(communityKey string) string {
|
|
|
|
// Check if the key is a private key. strip the 0x at the start
|
|
|
|
if privateKey, err := crypto.HexToECDSA(communityKey[2:]); err == nil {
|
|
|
|
// It is a privateKey
|
|
|
|
return types.HexBytes(crypto.CompressPubkey(&privateKey.PublicKey)).String()
|
|
|
|
}
|
|
|
|
|
|
|
|
// Not a private key, use the public key
|
|
|
|
return communityKey
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
func (m *Messenger) publishOrg(org *communities.Community) error {
|
2023-06-22 12:16:21 +00:00
|
|
|
if org == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
m.logger.Debug("publishing org", zap.String("org-id", org.IDString()), zap.Any("org", org))
|
|
|
|
payload, err := org.MarshaledDescription()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
rawMessage := common.RawMessage{
|
|
|
|
Payload: payload,
|
|
|
|
Sender: org.PrivateKey(),
|
|
|
|
// we don't want to wrap in an encryption layer message
|
2023-11-08 18:05:33 +00:00
|
|
|
SkipEncryptionLayer: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_DESCRIPTION,
|
|
|
|
PubsubTopic: org.PubsubTopic(), // TODO: confirm if it should be sent in community pubsub topic
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
2021-06-23 14:13:48 +00:00
|
|
|
_, err = m.sender.SendPublic(context.Background(), org.IDString(), rawMessage)
|
2021-01-11 10:32:51 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
func (m *Messenger) publishCommunityEvents(community *communities.Community, msg *communities.CommunityEventsMessage) error {
|
2023-08-08 13:16:29 +00:00
|
|
|
m.logger.Debug("publishing community events", zap.String("admin-id", common.PubkeyToHex(&m.identity.PublicKey)), zap.Any("event", msg))
|
|
|
|
|
|
|
|
payload, err := msg.Marshal()
|
2023-06-14 14:15:46 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-08-08 13:16:29 +00:00
|
|
|
rawMessage := common.RawMessage{
|
|
|
|
Payload: payload,
|
|
|
|
Sender: m.identity,
|
|
|
|
// we don't want to wrap in an encryption layer message
|
2023-11-08 18:05:33 +00:00
|
|
|
SkipEncryptionLayer: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_EVENTS_MESSAGE,
|
|
|
|
PubsubTopic: community.PubsubTopic(), // TODO: confirm if it should be sent in community pubsub topic
|
2023-08-08 13:16:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: resend in case of failure?
|
|
|
|
_, err = m.sender.SendPublic(context.Background(), types.EncodeHex(msg.CommunityID), rawMessage)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) publishCommunityEventsRejected(community *communities.Community, msg *communities.CommunityEventsMessage) error {
|
|
|
|
if !community.IsControlNode() {
|
|
|
|
return communities.ErrNotControlNode
|
|
|
|
}
|
|
|
|
m.logger.Debug("publishing community events rejected", zap.Any("event", msg))
|
|
|
|
|
|
|
|
communityEventsMessage := msg.ToProtobuf()
|
|
|
|
communityEventsMessageRejected := &protobuf.CommunityEventsMessageRejected{
|
2023-08-18 11:39:59 +00:00
|
|
|
Msg: communityEventsMessage,
|
2023-08-08 13:16:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
payload, err := proto.Marshal(communityEventsMessageRejected)
|
2023-06-14 14:15:46 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
rawMessage := common.RawMessage{
|
|
|
|
Payload: payload,
|
2023-08-08 13:16:29 +00:00
|
|
|
Sender: community.PrivateKey(),
|
2023-06-14 14:15:46 +00:00
|
|
|
// we don't want to wrap in an encryption layer message
|
2023-11-08 18:05:33 +00:00
|
|
|
SkipEncryptionLayer: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_EVENTS_MESSAGE_REJECTED,
|
|
|
|
PubsubTopic: community.PubsubTopic(), // TODO: confirm if it should be sent in community pubsub topic
|
2023-06-14 14:15:46 +00:00
|
|
|
}
|
|
|
|
|
2023-08-08 13:16:29 +00:00
|
|
|
// TODO: resend in case of failure?
|
|
|
|
_, err = m.sender.SendPublic(context.Background(), types.EncodeHex(msg.CommunityID), rawMessage)
|
2023-06-14 14:15:46 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-09-20 08:37:46 +00:00
|
|
|
func (m *Messenger) publishCommunityPrivilegedMemberSyncMessage(msg *communities.CommunityPrivilegedMemberSyncMessage) error {
|
|
|
|
|
|
|
|
m.logger.Debug("publishing privileged user sync message", zap.Any("event", msg))
|
|
|
|
|
|
|
|
payload, err := proto.Marshal(msg.CommunityPrivilegedUserSyncMessage)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
rawMessage := &common.RawMessage{
|
2023-11-08 18:05:33 +00:00
|
|
|
Payload: payload,
|
|
|
|
Sender: msg.CommunityPrivateKey, // if empty, sender private key will be used in SendPrivate
|
|
|
|
SkipEncryptionLayer: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_PRIVILEGED_USER_SYNC_MESSAGE,
|
2023-09-20 08:37:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, receivers := range msg.Receivers {
|
|
|
|
_, err = m.sender.SendPrivate(context.Background(), receivers, rawMessage)
|
|
|
|
}
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-03-21 14:18:36 +00:00
|
|
|
func (m *Messenger) handleCommunitiesHistoryArchivesSubscription(c chan *communities.Subscription) {
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case sub, more := <-c:
|
|
|
|
if !more {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if sub.CreatingHistoryArchivesSignal != nil {
|
|
|
|
m.config.messengerSignalsHandler.CreatingHistoryArchives(sub.CreatingHistoryArchivesSignal.CommunityID)
|
|
|
|
}
|
|
|
|
|
|
|
|
if sub.HistoryArchivesCreatedSignal != nil {
|
|
|
|
m.config.messengerSignalsHandler.HistoryArchivesCreated(
|
|
|
|
sub.HistoryArchivesCreatedSignal.CommunityID,
|
|
|
|
sub.HistoryArchivesCreatedSignal.From,
|
|
|
|
sub.HistoryArchivesCreatedSignal.To,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
if sub.NoHistoryArchivesCreatedSignal != nil {
|
|
|
|
m.config.messengerSignalsHandler.NoHistoryArchivesCreated(
|
|
|
|
sub.NoHistoryArchivesCreatedSignal.CommunityID,
|
|
|
|
sub.NoHistoryArchivesCreatedSignal.From,
|
|
|
|
sub.NoHistoryArchivesCreatedSignal.To,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
if sub.HistoryArchivesSeedingSignal != nil {
|
|
|
|
|
|
|
|
m.config.messengerSignalsHandler.HistoryArchivesSeeding(sub.HistoryArchivesSeedingSignal.CommunityID)
|
|
|
|
|
2022-05-04 11:42:10 +00:00
|
|
|
c, err := m.communitiesManager.GetByIDString(sub.HistoryArchivesSeedingSignal.CommunityID)
|
2022-03-21 14:18:36 +00:00
|
|
|
if err != nil {
|
2022-05-04 11:42:10 +00:00
|
|
|
m.logger.Debug("failed to retrieve community by id string", zap.Error(err))
|
|
|
|
}
|
|
|
|
|
2023-07-21 09:41:26 +00:00
|
|
|
if c.IsControlNode() {
|
2022-05-04 11:42:10 +00:00
|
|
|
err := m.dispatchMagnetlinkMessage(sub.HistoryArchivesSeedingSignal.CommunityID)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Debug("failed to dispatch magnetlink message", zap.Error(err))
|
|
|
|
}
|
2022-03-21 14:18:36 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if sub.HistoryArchivesUnseededSignal != nil {
|
|
|
|
m.config.messengerSignalsHandler.HistoryArchivesUnseeded(sub.HistoryArchivesUnseededSignal.CommunityID)
|
|
|
|
}
|
|
|
|
|
|
|
|
if sub.HistoryArchiveDownloadedSignal != nil {
|
|
|
|
m.config.messengerSignalsHandler.HistoryArchiveDownloaded(
|
|
|
|
sub.HistoryArchiveDownloadedSignal.CommunityID,
|
|
|
|
sub.HistoryArchiveDownloadedSignal.From,
|
|
|
|
sub.HistoryArchiveDownloadedSignal.To,
|
|
|
|
)
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
|
|
|
|
if sub.DownloadingHistoryArchivesFinishedSignal != nil {
|
2022-12-01 14:02:17 +00:00
|
|
|
m.config.messengerSignalsHandler.DownloadingHistoryArchivesFinished(sub.DownloadingHistoryArchivesFinishedSignal.CommunityID)
|
|
|
|
}
|
|
|
|
|
|
|
|
if sub.DownloadingHistoryArchivesStartedSignal != nil {
|
|
|
|
m.config.messengerSignalsHandler.DownloadingHistoryArchivesStarted(sub.DownloadingHistoryArchivesStartedSignal.CommunityID)
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
2022-12-02 12:45:41 +00:00
|
|
|
|
|
|
|
if sub.ImportingHistoryArchiveMessagesSignal != nil {
|
|
|
|
m.config.messengerSignalsHandler.ImportingHistoryArchiveMessages(sub.ImportingHistoryArchiveMessagesSignal.CommunityID)
|
|
|
|
}
|
2023-07-05 17:35:22 +00:00
|
|
|
|
2022-03-21 14:18:36 +00:00
|
|
|
case <-m.quit:
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
// handleCommunitiesSubscription handles events from communities
|
|
|
|
func (m *Messenger) handleCommunitiesSubscription(c chan *communities.Subscription) {
|
|
|
|
var lastPublished int64
|
|
|
|
// We check every 5 minutes if we need to publish
|
|
|
|
ticker := time.NewTicker(5 * time.Minute)
|
|
|
|
|
2023-07-17 16:40:09 +00:00
|
|
|
recentlyPublishedOrgs := func() map[string]*communities.Community {
|
|
|
|
result := make(map[string]*communities.Community)
|
|
|
|
|
2023-10-19 22:06:09 +00:00
|
|
|
controlledCommunities, err := m.communitiesManager.Controlled()
|
2023-07-17 16:40:09 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("failed to retrieve orgs", zap.Error(err))
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2023-07-06 17:44:31 +00:00
|
|
|
for _, org := range controlledCommunities {
|
2023-07-17 16:40:09 +00:00
|
|
|
result[org.IDString()] = org
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
|
|
|
}()
|
|
|
|
|
|
|
|
publishOrgAndDistributeEncryptionKeys := func(community *communities.Community) {
|
|
|
|
err := m.publishOrg(community)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("failed to publish org", zap.Error(err))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
m.logger.Debug("published org")
|
|
|
|
|
2023-07-28 12:13:13 +00:00
|
|
|
recentlyPublishedOrg := recentlyPublishedOrgs[community.IDString()]
|
|
|
|
|
|
|
|
// signal client with published community
|
|
|
|
if m.config.messengerSignalsHandler != nil {
|
|
|
|
if recentlyPublishedOrg == nil || community.Clock() > recentlyPublishedOrg.Clock() {
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
m.config.messengerSignalsHandler.MessengerResponse(response)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-17 16:40:09 +00:00
|
|
|
// evaluate and distribute encryption keys (if any)
|
2023-07-28 12:13:13 +00:00
|
|
|
encryptionKeyActions := communities.EvaluateCommunityEncryptionKeyActions(recentlyPublishedOrg, community)
|
2023-07-17 16:40:09 +00:00
|
|
|
err = m.communitiesKeyDistributor.Distribute(community, encryptionKeyActions)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("failed to distribute encryption keys", zap.Error(err))
|
|
|
|
}
|
|
|
|
|
|
|
|
recentlyPublishedOrgs[community.IDString()] = community.CreateDeepCopy()
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
go func() {
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case sub, more := <-c:
|
|
|
|
if !more {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if sub.Community != nil {
|
2023-07-17 16:40:09 +00:00
|
|
|
publishOrgAndDistributeEncryptionKeys(sub.Community)
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
|
2023-07-18 15:06:12 +00:00
|
|
|
if sub.CommunityEventsMessage != nil {
|
2023-10-12 19:21:49 +00:00
|
|
|
err := m.publishCommunityEvents(sub.Community, sub.CommunityEventsMessage)
|
2023-06-14 14:15:46 +00:00
|
|
|
if err != nil {
|
2023-08-08 13:16:29 +00:00
|
|
|
m.logger.Warn("failed to publish community events", zap.Error(err))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if sub.CommunityEventsMessageInvalidClock != nil {
|
|
|
|
err := m.publishCommunityEventsRejected(sub.CommunityEventsMessageInvalidClock.Community,
|
|
|
|
sub.CommunityEventsMessageInvalidClock.CommunityEventsMessage)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("failed to publish community events rejected", zap.Error(err))
|
2023-06-14 14:15:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
refactor: EventSenders forward RequestToJoin decision to control node
This is a bigger change in how community membership requests are handled
among admins, token masters, owners, and control nodes.
Prior to this commit, all privileged users, also known as
`EventSenders`, were able to accept and reject community membership
requests and those changes would be applied by all users.
This commit changes this behaviour such that:
1. EventSenders can make a decision (accept, reject), but merely forward
their decision to the control node, which ultimately has to confirm
it
2. EventSenders are no longer removing or adding members to and from
communities
3. When an eventsender signaled a decision, the membership request will
enter a pending state (acceptedPending or rejectedPending)
4. Once a decision was made by one eventsender, no other eventsender can
override that decision
This implementation is covered with a bunch of tests:
- Ensure that decision made by event sender is shared with other event
senders
- `testAcceptMemberRequestToJoinResponseSharedWithOtherEventSenders()`
- `testRejectMemberRequestToJoinResponseSharedWithOtherEventSenders()`
- Ensure memebrship request stays pending, until control node has
confirmed decision by event senders
- `testAcceptMemberRequestToJoinNotConfirmedByControlNode()`
- `testRejectMemberRequestToJoinNotConfirmedByControlNode()`
- Ensure that decision made by event sender cannot be overriden by other
event senders
- `testEventSenderCannotOverrideRequestToJoinState()`
These test cases live in three test suites for different event sender
types respectively
- `OwnerWithoutCommunityKeyCommunityEventsSuite`
- `TokenMasterCommunityEventsSuite`
- `AdminCommunityEventsSuite`
In addition to the changes mentioned above, there's also a smaller
changes that ensures membership requests to *not* attached revealed wallet
addresses when the requests are sent to event senders (in addition to
control nodes).
Requests send to a control node will still include revealed addresses as
the control node needs them to verify token permissions.
This commit does not yet handle the case of event senders attempting to
kick and ban members.
Similar to accepting and rejecting membership requests, kicking and
banning need a new pending state. However, we don't track such state in
local databases yet so those two cases will be handled in future commit
to not have this commit grow larger.
2023-08-02 12:04:47 +00:00
|
|
|
if sub.AcceptedRequestsToJoin != nil {
|
|
|
|
for _, requestID := range sub.AcceptedRequestsToJoin {
|
|
|
|
accept := &requests.AcceptRequestToJoinCommunity{
|
|
|
|
ID: requestID,
|
|
|
|
}
|
|
|
|
_, err := m.AcceptRequestToJoinCommunity(accept)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("failed to accept request to join ", zap.Error(err))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if sub.RejectedRequestsToJoin != nil {
|
|
|
|
for _, requestID := range sub.RejectedRequestsToJoin {
|
|
|
|
reject := &requests.DeclineRequestToJoinCommunity{
|
|
|
|
ID: requestID,
|
|
|
|
}
|
|
|
|
_, err := m.DeclineRequestToJoinCommunity(reject)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("failed to decline request to join ", zap.Error(err))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-20 08:37:46 +00:00
|
|
|
if sub.CommunityPrivilegedMemberSyncMessage != nil {
|
|
|
|
if err := m.publishCommunityPrivilegedMemberSyncMessage(sub.CommunityPrivilegedMemberSyncMessage); err != nil {
|
|
|
|
m.logger.Warn("failed to publish community private members sync message", zap.Error(err))
|
|
|
|
}
|
|
|
|
}
|
2023-07-05 17:35:22 +00:00
|
|
|
if sub.TokenCommunityValidated != nil {
|
|
|
|
state := m.buildMessageState()
|
2023-10-31 14:20:40 +00:00
|
|
|
communityResponse := sub.TokenCommunityValidated
|
2023-07-05 17:35:22 +00:00
|
|
|
|
2023-10-31 14:20:40 +00:00
|
|
|
err := m.handleCommunityResponse(state, communityResponse)
|
2023-07-05 17:35:22 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to handle community response", zap.Error(err))
|
|
|
|
}
|
|
|
|
|
|
|
|
m.processCommunityChanges(state)
|
|
|
|
|
|
|
|
response, err := m.saveDataAndPrepareResponse(state)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to save data and prepare response")
|
|
|
|
}
|
|
|
|
|
2023-10-31 14:20:40 +00:00
|
|
|
// control node changed and we were kicked out. It now awaits our addresses
|
|
|
|
if communityResponse.Changes.ControlNodeChanged != nil && communityResponse.Changes.MemberKicked {
|
|
|
|
requestToJoin, err := m.sendSharedAddressToControlNode(communityResponse.Community.ControlNode(), communityResponse.Community)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("share address to control node failed", zap.String("id", types.EncodeHex(communityResponse.Community.ID())), zap.Error(err))
|
|
|
|
} else {
|
|
|
|
state.Response.RequestsToJoinCommunity = append(state.Response.RequestsToJoinCommunity, requestToJoin)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-20 14:25:13 +00:00
|
|
|
if m.config.messengerSignalsHandler != nil {
|
|
|
|
m.config.messengerSignalsHandler.MessengerResponse(response)
|
|
|
|
}
|
2023-07-05 17:35:22 +00:00
|
|
|
}
|
2023-09-20 08:37:46 +00:00
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
case <-ticker.C:
|
|
|
|
// If we are not online, we don't even try
|
|
|
|
if !m.online() {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// If not enough time has passed since last advertisement, we skip this
|
|
|
|
if time.Now().Unix()-lastPublished < communityAdvertiseIntervalSecond {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2023-10-19 22:06:09 +00:00
|
|
|
controlledCommunities, err := m.communitiesManager.Controlled()
|
2021-01-11 10:32:51 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("failed to retrieve orgs", zap.Error(err))
|
|
|
|
}
|
|
|
|
|
2023-07-06 17:44:31 +00:00
|
|
|
for idx := range controlledCommunities {
|
|
|
|
org := controlledCommunities[idx]
|
2022-09-29 11:50:23 +00:00
|
|
|
_, beingImported := m.importingCommunities[org.IDString()]
|
|
|
|
if !beingImported {
|
2023-07-17 16:40:09 +00:00
|
|
|
publishOrgAndDistributeEncryptionKeys(org)
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// set lastPublished
|
|
|
|
lastPublished = time.Now().Unix()
|
|
|
|
|
|
|
|
case <-m.quit:
|
|
|
|
return
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2023-03-28 14:40:00 +00:00
|
|
|
func (m *Messenger) updateCommunitiesActiveMembersPeriodically() {
|
|
|
|
communitiesLastUpdated := make(map[string]int64)
|
|
|
|
|
|
|
|
// We check every 5 minutes if we need to update
|
|
|
|
ticker := time.NewTicker(5 * time.Minute)
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-ticker.C:
|
2023-10-19 22:06:09 +00:00
|
|
|
controlledCommunities, err := m.communitiesManager.Controlled()
|
2023-03-28 14:40:00 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to update community active members count", zap.Error(err))
|
|
|
|
}
|
|
|
|
|
2023-07-06 17:44:31 +00:00
|
|
|
for _, community := range controlledCommunities {
|
2023-03-28 14:40:00 +00:00
|
|
|
lastUpdated, ok := communitiesLastUpdated[community.IDString()]
|
|
|
|
if !ok {
|
|
|
|
lastUpdated = 0
|
|
|
|
}
|
|
|
|
|
|
|
|
// If not enough time has passed since last update, we skip this
|
|
|
|
if time.Now().Unix()-lastUpdated < int64(updateActiveMembersInterval.Seconds()) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := m.updateCommunityActiveMembers(community.IDString()); err == nil {
|
|
|
|
communitiesLastUpdated[community.IDString()] = time.Now().Unix()
|
|
|
|
|
|
|
|
// Perf: ensure `updateCommunityActiveMembers` is not called few times in a row
|
|
|
|
// Next communities will be handled in subsequent ticks
|
|
|
|
break
|
|
|
|
} else {
|
|
|
|
m.logger.Error("failed to update community active members count", zap.Error(err))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
case <-m.quit:
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2023-07-19 12:14:42 +00:00
|
|
|
func (m *Messenger) CheckCommunitiesToUnmute() (*MessengerResponse, error) {
|
2023-06-17 08:19:05 +00:00
|
|
|
m.logger.Debug("watching communities to unmute")
|
2023-07-19 12:14:42 +00:00
|
|
|
response := &MessengerResponse{}
|
2023-06-17 08:19:05 +00:00
|
|
|
communities, err := m.communitiesManager.All()
|
|
|
|
if err != nil {
|
2023-07-19 12:14:42 +00:00
|
|
|
return nil, fmt.Errorf("couldn't get all communities: %v", err)
|
2023-06-17 08:19:05 +00:00
|
|
|
}
|
|
|
|
for _, community := range communities {
|
2023-07-19 12:14:42 +00:00
|
|
|
communityMuteTill, err := time.Parse(time.RFC3339, community.MuteTill().Format(time.RFC3339))
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
currTime, err := time.Parse(time.RFC3339, time.Now().Format(time.RFC3339))
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-06-17 08:19:05 +00:00
|
|
|
if currTime.After(communityMuteTill) && !communityMuteTill.Equal(time.Time{}) && community.Muted() {
|
2023-07-19 12:14:42 +00:00
|
|
|
err := m.communitiesManager.SetMuted(community.ID(), false)
|
2023-06-17 08:19:05 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Info("CheckCommunitiesToUnmute err", zap.Any("Couldn't unmute community", err))
|
|
|
|
break
|
|
|
|
}
|
2023-07-19 12:14:42 +00:00
|
|
|
|
|
|
|
err = m.MuteCommunityTill(community.ID(), time.Time{})
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Info("MuteCommunityTill err", zap.Any("Could not set mute community till", err))
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
unmutedCommunity, err := m.communitiesManager.GetByID(community.ID())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
response.AddCommunity(unmutedCommunity)
|
|
|
|
|
2023-06-17 08:19:05 +00:00
|
|
|
}
|
2023-07-19 12:14:42 +00:00
|
|
|
|
2023-06-17 08:19:05 +00:00
|
|
|
}
|
2023-07-19 12:14:42 +00:00
|
|
|
|
|
|
|
return response, nil
|
2023-06-17 08:19:05 +00:00
|
|
|
}
|
|
|
|
|
2023-03-28 14:40:00 +00:00
|
|
|
func (m *Messenger) updateCommunityActiveMembers(communityID string) error {
|
|
|
|
lastWeek := time.Now().AddDate(0, 0, -7).Unix()
|
|
|
|
count, err := m.persistence.CountActiveChattersInCommunity(communityID, lastWeek)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if err = m.communitiesManager.SetCommunityActiveMembersCount(communityID, uint64(count)); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
m.logger.Debug("community active members updated", zap.String("communityID", communityID), zap.Uint("count", count))
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
func (m *Messenger) Communities() ([]*communities.Community, error) {
|
|
|
|
return m.communitiesManager.All()
|
|
|
|
}
|
|
|
|
|
2023-10-26 15:09:43 +00:00
|
|
|
func (m *Messenger) ControlledCommunities() ([]*communities.Community, error) {
|
|
|
|
return m.communitiesManager.Controlled()
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
func (m *Messenger) JoinedCommunities() ([]*communities.Community, error) {
|
|
|
|
return m.communitiesManager.Joined()
|
|
|
|
}
|
|
|
|
|
2022-09-20 19:57:39 +00:00
|
|
|
func (m *Messenger) SpectatedCommunities() ([]*communities.Community, error) {
|
|
|
|
return m.communitiesManager.Spectated()
|
|
|
|
}
|
|
|
|
|
2023-11-08 13:06:40 +00:00
|
|
|
const (
|
|
|
|
fetchError int = 0
|
|
|
|
fetchSuccess int = 1
|
|
|
|
fetchHasUnknowns int = 2
|
|
|
|
)
|
2023-08-07 12:54:00 +00:00
|
|
|
|
2023-11-08 13:06:40 +00:00
|
|
|
func calcTimeTillNextUpdate(fetchResultsHistory []int) time.Duration {
|
2023-09-22 15:14:06 +00:00
|
|
|
// TODO lower this back again once the real curated community contract is up
|
|
|
|
// The current contract contains communities that are no longer accessible on waku
|
2023-11-08 13:06:40 +00:00
|
|
|
const shortTimeout = 30 * time.Second
|
|
|
|
const averageTimeout = 60 * time.Second
|
|
|
|
const longTimeout = 300 * time.Second
|
|
|
|
|
|
|
|
twoConsecutiveErrors := (len(fetchResultsHistory) == 2 &&
|
|
|
|
fetchResultsHistory[0] == fetchError &&
|
|
|
|
fetchResultsHistory[1] == fetchError)
|
|
|
|
|
|
|
|
twoConsecutiveHasUnknowns := (len(fetchResultsHistory) == 2 &&
|
|
|
|
fetchResultsHistory[0] == fetchHasUnknowns &&
|
|
|
|
fetchResultsHistory[1] == fetchHasUnknowns)
|
|
|
|
|
|
|
|
var timeTillNextUpdate time.Duration
|
|
|
|
|
|
|
|
if twoConsecutiveErrors || twoConsecutiveHasUnknowns {
|
|
|
|
timeTillNextUpdate = longTimeout
|
|
|
|
} else {
|
|
|
|
switch fetchResultsHistory[len(fetchResultsHistory)-1] {
|
|
|
|
case fetchError:
|
|
|
|
timeTillNextUpdate = shortTimeout
|
|
|
|
case fetchSuccess:
|
|
|
|
timeTillNextUpdate = longTimeout
|
|
|
|
case fetchHasUnknowns:
|
|
|
|
timeTillNextUpdate = averageTimeout
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return timeTillNextUpdate
|
|
|
|
}
|
|
|
|
|
|
|
|
// Regularly gets list of curated communities and signals them to client
|
|
|
|
func (m *Messenger) startCuratedCommunitiesUpdateLoop() {
|
|
|
|
logger := m.logger.Named("startCuratedCommunitiesUpdateLoop")
|
2023-08-07 12:54:00 +00:00
|
|
|
|
2023-09-26 16:47:50 +00:00
|
|
|
type curatedCommunities struct {
|
|
|
|
ContractCommunities []string
|
|
|
|
ContractFeaturedCommunities []string
|
|
|
|
UnknownCommunities []string
|
|
|
|
}
|
|
|
|
|
2023-08-07 12:54:00 +00:00
|
|
|
go func() {
|
|
|
|
|
2023-11-08 13:06:40 +00:00
|
|
|
var fetchResultsHistory = make([]int, 0)
|
|
|
|
var mu = sync.RWMutex{}
|
|
|
|
var c = curatedCommunities{}
|
|
|
|
|
|
|
|
for {
|
2023-08-07 12:54:00 +00:00
|
|
|
response, err := m.CuratedCommunities()
|
2023-09-26 16:47:50 +00:00
|
|
|
|
2023-08-07 12:54:00 +00:00
|
|
|
if err != nil {
|
2023-11-08 13:06:40 +00:00
|
|
|
fetchResultsHistory = append(fetchResultsHistory, fetchError)
|
2023-08-07 12:54:00 +00:00
|
|
|
} else {
|
2023-09-26 16:47:50 +00:00
|
|
|
mu.Lock()
|
|
|
|
// Check if it's the same values we had
|
|
|
|
if !reflect.DeepEqual(c.ContractCommunities, response.ContractCommunities) ||
|
|
|
|
!reflect.DeepEqual(c.ContractFeaturedCommunities, response.ContractFeaturedCommunities) ||
|
|
|
|
!reflect.DeepEqual(c.UnknownCommunities, response.UnknownCommunities) {
|
|
|
|
// One of the communities is different, send the updated response
|
|
|
|
m.config.messengerSignalsHandler.SendCuratedCommunitiesUpdate(response)
|
|
|
|
|
|
|
|
// Update the values
|
|
|
|
c.ContractCommunities = response.ContractCommunities
|
|
|
|
c.ContractFeaturedCommunities = response.ContractFeaturedCommunities
|
|
|
|
c.UnknownCommunities = response.UnknownCommunities
|
|
|
|
}
|
|
|
|
mu.Unlock()
|
2023-08-07 12:54:00 +00:00
|
|
|
|
|
|
|
if len(response.UnknownCommunities) == 0 {
|
2023-11-08 13:06:40 +00:00
|
|
|
fetchResultsHistory = append(fetchResultsHistory, fetchSuccess)
|
|
|
|
|
2023-08-07 12:54:00 +00:00
|
|
|
} else {
|
2023-11-08 13:06:40 +00:00
|
|
|
fetchResultsHistory = append(fetchResultsHistory, fetchHasUnknowns)
|
2023-08-07 12:54:00 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-11-08 13:06:40 +00:00
|
|
|
//keep only 2 last fetch results
|
|
|
|
if len(fetchResultsHistory) > 2 {
|
|
|
|
fetchResultsHistory = fetchResultsHistory[1:]
|
|
|
|
}
|
|
|
|
|
|
|
|
timeTillNextUpdate := calcTimeTillNextUpdate(fetchResultsHistory)
|
2023-08-07 12:54:00 +00:00
|
|
|
logger.Debug("Next curated communities update will happen in", zap.Duration("timeTillNextUpdate", timeTillNextUpdate))
|
|
|
|
|
|
|
|
select {
|
|
|
|
case <-time.After(timeTillNextUpdate):
|
|
|
|
case <-m.quit:
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2022-06-02 12:17:52 +00:00
|
|
|
func (m *Messenger) CuratedCommunities() (*communities.KnownCommunitiesResponse, error) {
|
2023-08-29 12:59:37 +00:00
|
|
|
if m.contractMaker == nil {
|
|
|
|
m.logger.Warn("contract maker not initialized")
|
|
|
|
return nil, errors.New("contract maker not initialized")
|
|
|
|
}
|
|
|
|
|
2023-11-03 10:19:38 +00:00
|
|
|
testNetworksEnabled, err := m.settings.GetTestNetworksEnabled()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2022-09-06 18:07:22 +00:00
|
|
|
|
2023-11-03 10:19:38 +00:00
|
|
|
chainID := uint64(10) // Optimism Mainnet
|
|
|
|
if testNetworksEnabled {
|
|
|
|
chainID = 420 // Optimism Goerli
|
|
|
|
}
|
2023-08-07 12:54:00 +00:00
|
|
|
|
2023-08-23 10:51:48 +00:00
|
|
|
directory, err := m.contractMaker.NewDirectory(chainID)
|
2022-06-02 12:17:52 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
callOpts := &bind.CallOpts{Context: context.Background(), Pending: false}
|
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
curatedCommunities, err := directory.GetCommunities(callOpts)
|
2022-06-02 12:17:52 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
var communityIDs []types.HexBytes
|
2023-10-12 19:21:49 +00:00
|
|
|
for _, c := range curatedCommunities {
|
2022-06-02 12:17:52 +00:00
|
|
|
communityIDs = append(communityIDs, c)
|
|
|
|
}
|
|
|
|
|
|
|
|
response, err := m.communitiesManager.GetStoredDescriptionForCommunities(communityIDs)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-05-05 15:55:32 +00:00
|
|
|
featuredCommunities, err := directory.GetFeaturedCommunities(callOpts)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-10-12 19:21:49 +00:00
|
|
|
|
2023-05-05 15:55:32 +00:00
|
|
|
for _, c := range featuredCommunities {
|
2023-05-08 08:19:53 +00:00
|
|
|
response.ContractFeaturedCommunities = append(response.ContractFeaturedCommunities, types.HexBytes(c).String())
|
2023-05-05 15:55:32 +00:00
|
|
|
}
|
|
|
|
|
2023-11-03 10:19:38 +00:00
|
|
|
// TODO: use mechanism to obtain shard from community ID (https://github.com/status-im/status-desktop/issues/12585)
|
2023-10-12 19:21:49 +00:00
|
|
|
var unknownCommunities []communities.CommunityShard
|
|
|
|
for _, u := range response.UnknownCommunities {
|
|
|
|
unknownCommunities = append(unknownCommunities, communities.CommunityShard{
|
|
|
|
CommunityID: u,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
go m.requestCommunitiesFromMailserver(unknownCommunities)
|
2022-06-02 12:17:52 +00:00
|
|
|
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2022-09-20 19:57:39 +00:00
|
|
|
func (m *Messenger) initCommunityChats(community *communities.Community) ([]*Chat, error) {
|
|
|
|
logger := m.logger.Named("initCommunityChats")
|
2021-01-11 10:32:51 +00:00
|
|
|
|
2023-05-22 21:38:02 +00:00
|
|
|
publicFiltersToInit := community.DefaultFilters()
|
2021-01-11 10:32:51 +00:00
|
|
|
|
|
|
|
chats := CreateCommunityChats(community, m.getTimesource())
|
|
|
|
|
2022-09-20 19:57:39 +00:00
|
|
|
for _, chat := range chats {
|
2023-05-22 21:38:02 +00:00
|
|
|
publicFiltersToInit = append(publicFiltersToInit, transport.FiltersToInitialize{ChatID: chat.ID, PubsubTopic: community.PubsubTopic()})
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Load transport filters
|
2023-05-22 21:38:02 +00:00
|
|
|
filters, err := m.transport.InitPublicFilters(publicFiltersToInit)
|
2021-01-11 10:32:51 +00:00
|
|
|
if err != nil {
|
2021-08-06 15:40:23 +00:00
|
|
|
logger.Debug("m.transport.InitPublicFilters error", zap.Error(err))
|
2021-01-11 10:32:51 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-07-21 09:41:26 +00:00
|
|
|
if community.IsControlNode() {
|
2022-01-10 12:04:52 +00:00
|
|
|
// Init the community filter so we can receive messages on the community
|
2023-05-22 21:38:02 +00:00
|
|
|
communityFilters, err := m.transport.InitCommunityFilters([]transport.CommunityFilterToInitialize{{
|
2023-10-12 19:21:49 +00:00
|
|
|
Shard: community.Shard().TransportShard(),
|
|
|
|
PrivKey: community.PrivateKey(),
|
2023-05-22 21:38:02 +00:00
|
|
|
}})
|
|
|
|
|
2022-01-10 12:04:52 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
filters = append(filters, communityFilters...)
|
|
|
|
}
|
|
|
|
|
2021-05-14 10:55:42 +00:00
|
|
|
willSync, err := m.scheduleSyncFilters(filters)
|
|
|
|
if err != nil {
|
2021-08-06 15:40:23 +00:00
|
|
|
logger.Debug("m.scheduleSyncFilters error", zap.Error(err))
|
2021-05-14 10:55:42 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if !willSync {
|
2021-05-28 11:05:01 +00:00
|
|
|
defaultSyncPeriod, err := m.settings.GetDefaultSyncPeriod()
|
|
|
|
if err != nil {
|
2021-08-06 15:40:23 +00:00
|
|
|
logger.Debug("m.settings.GetDefaultSyncPeriod error", zap.Error(err))
|
2021-05-28 11:05:01 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2021-05-31 14:35:14 +00:00
|
|
|
timestamp := uint32(m.getTimesource().GetCurrentTime()/1000) - defaultSyncPeriod
|
2021-05-14 10:55:42 +00:00
|
|
|
for idx := range chats {
|
|
|
|
chats[idx].SyncedTo = timestamp
|
|
|
|
chats[idx].SyncedFrom = timestamp
|
|
|
|
}
|
|
|
|
}
|
2021-03-25 15:15:22 +00:00
|
|
|
|
2021-07-22 17:41:49 +00:00
|
|
|
if err = m.saveChats(chats); err != nil {
|
2021-08-06 15:40:23 +00:00
|
|
|
logger.Debug("m.saveChats error", zap.Error(err))
|
2021-07-22 17:41:49 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-09-20 19:57:39 +00:00
|
|
|
return chats, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) initCommunitySettings(communityID types.HexBytes) (*communities.CommunitySettings, error) {
|
|
|
|
communitySettings, err := m.communitiesManager.GetCommunitySettingsByID(communityID)
|
2021-09-01 11:57:31 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2022-09-20 19:57:39 +00:00
|
|
|
if communitySettings != nil {
|
|
|
|
return communitySettings, nil
|
|
|
|
}
|
2021-09-01 11:57:31 +00:00
|
|
|
|
2022-09-20 19:57:39 +00:00
|
|
|
communitySettings = &communities.CommunitySettings{
|
|
|
|
CommunityID: communityID.String(),
|
|
|
|
HistoryArchiveSupportEnabled: true,
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := m.communitiesManager.SaveCommunitySettings(*communitySettings); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return communitySettings, nil
|
|
|
|
}
|
|
|
|
|
2023-05-29 17:57:05 +00:00
|
|
|
func (m *Messenger) JoinCommunity(ctx context.Context, communityID types.HexBytes, forceJoin bool) (*MessengerResponse, error) {
|
|
|
|
mr, err := m.joinCommunity(ctx, communityID, forceJoin)
|
2021-07-22 17:41:49 +00:00
|
|
|
if err != nil {
|
2022-09-20 19:57:39 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if com, ok := mr.communities[communityID.String()]; ok {
|
2023-01-06 12:21:14 +00:00
|
|
|
err = m.syncCommunity(context.Background(), com, m.dispatchMessage)
|
2022-09-20 19:57:39 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return mr, nil
|
|
|
|
}
|
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
func (m *Messenger) subscribeToCommunityShard(communityID []byte, shard *common.Shard) error {
|
|
|
|
if m.transport.WakuVersion() != 2 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-05-22 21:38:02 +00:00
|
|
|
// TODO: this should probably be moved completely to transport once pubsub topic logic is implemented
|
2023-10-12 19:21:49 +00:00
|
|
|
pubsubTopic := transport.GetPubsubTopic(shard.TransportShard())
|
|
|
|
|
|
|
|
privK, err := m.transport.RetrievePubsubTopicKey(pubsubTopic)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-05-22 21:38:02 +00:00
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
var pubK *ecdsa.PublicKey
|
|
|
|
if privK != nil {
|
|
|
|
pubK = &privK.PublicKey
|
|
|
|
}
|
|
|
|
|
|
|
|
return m.transport.SubscribeToPubsubTopic(pubsubTopic, pubK)
|
2023-05-22 21:38:02 +00:00
|
|
|
}
|
|
|
|
|
2023-05-29 17:57:05 +00:00
|
|
|
func (m *Messenger) joinCommunity(ctx context.Context, communityID types.HexBytes, forceJoin bool) (*MessengerResponse, error) {
|
2022-09-20 19:57:39 +00:00
|
|
|
logger := m.logger.Named("joinCommunity")
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
|
2023-05-29 17:57:05 +00:00
|
|
|
community, err := m.communitiesManager.JoinCommunity(communityID, forceJoin)
|
2022-09-20 19:57:39 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Debug("m.communitiesManager.JoinCommunity error", zap.Error(err))
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// chats and settings are already initialized for spectated communities
|
|
|
|
if !community.Spectated() {
|
|
|
|
chats, err := m.initCommunityChats(community)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
response.AddChats(chats)
|
|
|
|
|
|
|
|
if _, err = m.initCommunitySettings(communityID); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-05-22 21:38:02 +00:00
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
if err = m.subscribeToCommunityShard(community.ID(), community.Shard()); err != nil {
|
2023-05-22 21:38:02 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
2022-09-20 19:57:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
communitySettings, err := m.communitiesManager.GetCommunitySettingsByID(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
response.AddCommunity(community)
|
|
|
|
response.AddCommunitySettings(communitySettings)
|
|
|
|
|
|
|
|
if err = m.reregisterForPushNotifications(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if err = m.sendCurrentUserStatusToCommunity(ctx, community); err != nil {
|
2021-08-06 15:40:23 +00:00
|
|
|
logger.Debug("m.sendCurrentUserStatusToCommunity error", zap.Error(err))
|
2021-07-22 17:41:49 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-09-20 19:57:39 +00:00
|
|
|
if err = m.PublishIdentityImage(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) SpectateCommunity(communityID types.HexBytes) (*MessengerResponse, error) {
|
|
|
|
logger := m.logger.Named("SpectateCommunity")
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
|
|
|
|
community, err := m.communitiesManager.SpectateCommunity(communityID)
|
|
|
|
if err != nil {
|
|
|
|
logger.Debug("SpectateCommunity error", zap.Error(err))
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
chats, err := m.initCommunityChats(community)
|
2022-07-08 10:25:46 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2022-09-20 19:57:39 +00:00
|
|
|
response.AddChats(chats)
|
|
|
|
|
|
|
|
settings, err := m.initCommunitySettings(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
response.AddCommunitySettings(settings)
|
|
|
|
|
|
|
|
response.AddCommunity(community)
|
2022-07-08 10:25:46 +00:00
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
if err = m.subscribeToCommunityShard(community.ID(), community.Shard()); err != nil {
|
2023-05-22 21:38:02 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2021-07-22 17:41:49 +00:00
|
|
|
return response, nil
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
|
2023-07-19 12:14:42 +00:00
|
|
|
func (m *Messenger) MuteDuration(mutedType requests.MutingVariation) (time.Time, error) {
|
2023-06-17 08:19:05 +00:00
|
|
|
var MuteTill time.Time
|
|
|
|
|
2023-07-19 12:14:42 +00:00
|
|
|
switch mutedType {
|
2023-06-17 08:19:05 +00:00
|
|
|
case MuteTill1Min:
|
|
|
|
MuteTill = time.Now().Add(MuteFor1MinDuration)
|
|
|
|
case MuteFor15Min:
|
|
|
|
MuteTill = time.Now().Add(MuteFor15MinsDuration)
|
|
|
|
case MuteFor1Hr:
|
|
|
|
MuteTill = time.Now().Add(MuteFor1HrsDuration)
|
|
|
|
case MuteFor8Hr:
|
|
|
|
MuteTill = time.Now().Add(MuteFor8HrsDuration)
|
|
|
|
case MuteFor1Week:
|
|
|
|
MuteTill = time.Now().Add(MuteFor1WeekDuration)
|
|
|
|
default:
|
|
|
|
MuteTill = time.Time{}
|
|
|
|
}
|
|
|
|
|
2023-07-19 12:14:42 +00:00
|
|
|
muteTillTimeRemoveMs, err := time.Parse(time.RFC3339, MuteTill.Format(time.RFC3339))
|
2023-06-17 08:19:05 +00:00
|
|
|
if err != nil {
|
2023-07-19 12:14:42 +00:00
|
|
|
return time.Time{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return muteTillTimeRemoveMs, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) SetMuted(request *requests.MuteCommunity) error {
|
|
|
|
if err := request.Validate(); err != nil {
|
2023-06-17 08:19:05 +00:00
|
|
|
return err
|
|
|
|
}
|
2023-07-19 12:14:42 +00:00
|
|
|
|
|
|
|
if request.MutedType == Unmuted {
|
|
|
|
return m.communitiesManager.SetMuted(request.CommunityID, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
return m.communitiesManager.SetMuted(request.CommunityID, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) MuteCommunityTill(communityID []byte, muteTill time.Time) error {
|
|
|
|
return m.communitiesManager.MuteCommunityTill(communityID, muteTill)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) MuteAllCommunityChats(request *requests.MuteCommunity) (time.Time, error) {
|
|
|
|
return m.UpdateMuteCommunityStatus(request.CommunityID.String(), true, request.MutedType)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) UnMuteAllCommunityChats(communityID string) (time.Time, error) {
|
|
|
|
return m.UpdateMuteCommunityStatus(communityID, false, Unmuted)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) UpdateMuteCommunityStatus(communityID string, muted bool, mutedType requests.MutingVariation) (time.Time, error) {
|
|
|
|
community, err := m.communitiesManager.GetByIDString(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return time.Time{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
request := &requests.MuteCommunity{
|
|
|
|
CommunityID: community.ID(),
|
|
|
|
MutedType: mutedType,
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.SetMuted(request)
|
|
|
|
if err != nil {
|
|
|
|
return time.Time{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
muteTill, err := m.MuteDuration(mutedType)
|
|
|
|
if err != nil {
|
|
|
|
return time.Time{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.MuteCommunityTill(community.ID(), muteTill)
|
|
|
|
|
|
|
|
for _, chatID := range community.CommunityChatsIDs() {
|
|
|
|
if muted {
|
|
|
|
_, err := m.MuteChat(&requests.MuteChat{ChatID: communityID + chatID, MutedType: mutedType})
|
|
|
|
if err != nil {
|
|
|
|
return time.Time{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
} else {
|
|
|
|
err = m.UnmuteChat(communityID + chatID)
|
|
|
|
if err != nil {
|
|
|
|
return time.Time{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return time.Time{}, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return muteTill, err
|
2021-06-30 13:29:43 +00:00
|
|
|
}
|
|
|
|
|
2023-06-02 11:25:32 +00:00
|
|
|
func (m *Messenger) SetMutePropertyOnChatsByCategory(request *requests.MuteCategory, muted bool) error {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
community, err := m.communitiesManager.GetByIDString(request.CommunityID)
|
2022-07-06 16:16:19 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-06-02 11:25:32 +00:00
|
|
|
for _, chatID := range community.ChatsByCategoryID(request.CategoryID) {
|
2022-07-06 16:16:19 +00:00
|
|
|
if muted {
|
2023-06-02 11:25:32 +00:00
|
|
|
_, err = m.MuteChat(&requests.MuteChat{ChatID: request.CommunityID + chatID, MutedType: request.MutedType})
|
2022-07-06 16:16:19 +00:00
|
|
|
} else {
|
2023-06-02 11:25:32 +00:00
|
|
|
err = m.UnmuteChat(request.CommunityID + chatID)
|
2022-07-06 16:16:19 +00:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
// Generates a single hash for each address that needs to be revealed to a community.
|
|
|
|
// Each hash needs to be signed.
|
|
|
|
// The order of retuned hashes corresponds to the order of addresses in addressesToReveal.
|
|
|
|
func (m *Messenger) generateCommunityRequestsForSigning(memberPubKey string, communityID types.HexBytes, addressesToReveal []string, isEdit bool) ([]account.SignParams, error) {
|
2023-07-25 15:17:17 +00:00
|
|
|
walletAccounts, err := m.settings.GetActiveAccounts()
|
2023-07-10 14:11:37 +00:00
|
|
|
if err != nil {
|
2023-10-20 06:21:41 +00:00
|
|
|
return nil, err
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
containsAddress := func(addresses []string, targetAddress string) bool {
|
|
|
|
for _, address := range addresses {
|
2023-07-24 13:17:12 +00:00
|
|
|
if types.HexToAddress(address) == types.HexToAddress(targetAddress) {
|
2023-07-10 14:11:37 +00:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
msgsToSign := make([]account.SignParams, 0)
|
|
|
|
for _, walletAccount := range walletAccounts {
|
2023-07-10 14:11:37 +00:00
|
|
|
if walletAccount.Chat || walletAccount.Type == accounts.AccountTypeWatch {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(addressesToReveal) > 0 && !containsAddress(addressesToReveal, walletAccount.Address.Hex()) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
requestID := []byte{}
|
|
|
|
if !isEdit {
|
|
|
|
requestID = communities.CalculateRequestID(memberPubKey, communityID)
|
|
|
|
}
|
|
|
|
msgsToSign = append(msgsToSign, account.SignParams{
|
|
|
|
Data: types.EncodeHex(crypto.Keccak256(m.IdentityPublicKeyCompressed(), communityID, requestID)),
|
|
|
|
Address: walletAccount.Address.Hex(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
return msgsToSign, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) GenerateJoiningCommunityRequestsForSigning(memberPubKey string, communityID types.HexBytes, addressesToReveal []string) ([]account.SignParams, error) {
|
|
|
|
if len(communityID) == 0 {
|
|
|
|
return nil, errors.New("communityID has to be provided")
|
|
|
|
}
|
|
|
|
return m.generateCommunityRequestsForSigning(memberPubKey, communityID, addressesToReveal, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) GenerateEditCommunityRequestsForSigning(memberPubKey string, communityID types.HexBytes, addressesToReveal []string) ([]account.SignParams, error) {
|
|
|
|
return m.generateCommunityRequestsForSigning(memberPubKey, communityID, addressesToReveal, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Signs the provided messages with the provided accounts and password.
|
|
|
|
// Provided accounts must not belong to a keypair that is migrated to a keycard.
|
|
|
|
// Otherwise, the signing will fail, cause such accounts should be signed with a keycard.
|
|
|
|
func (m *Messenger) SignData(signParams []account.SignParams) ([]string, error) {
|
|
|
|
signatures := make([]string, len(signParams))
|
|
|
|
for i, param := range signParams {
|
|
|
|
if err := param.Validate(true); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
account, err := m.settings.GetAccountByAddress(types.HexToAddress(param.Address))
|
2023-07-10 14:11:37 +00:00
|
|
|
if err != nil {
|
2023-10-20 06:21:41 +00:00
|
|
|
return nil, err
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
if account.Chat || account.Type == accounts.AccountTypeWatch {
|
|
|
|
return nil, errors.New("cannot join a community using profile chat or watch-only account")
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
2023-10-20 06:21:41 +00:00
|
|
|
|
|
|
|
keypair, err := m.settings.GetKeypairByKeyUID(account.KeyUID)
|
2023-07-10 14:11:37 +00:00
|
|
|
if err != nil {
|
2023-10-20 06:21:41 +00:00
|
|
|
return nil, err
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
if keypair.MigratedToKeycard() {
|
|
|
|
return nil, errors.New("signing a joining community request for accounts migrated to keycard must be done with a keycard")
|
2023-07-14 17:33:47 +00:00
|
|
|
}
|
2023-10-20 06:21:41 +00:00
|
|
|
|
|
|
|
verifiedAccount, err := m.accountsManager.GetVerifiedWalletAccount(m.settings, param.Address, param.Password)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
signature, err := m.accountsManager.Sign(param, verifiedAccount)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
2023-10-20 06:21:41 +00:00
|
|
|
|
|
|
|
signatures[i] = types.EncodeHex(signature)
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
2023-10-20 06:21:41 +00:00
|
|
|
|
|
|
|
return signatures, nil
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
func (m *Messenger) RequestToJoinCommunity(request *requests.RequestToJoinCommunity) (*MessengerResponse, error) {
|
2023-10-20 06:21:41 +00:00
|
|
|
// TODO: Because of changes that need to be done in tests, calling this function and providing `request` without `AddressesToReveal`
|
|
|
|
// is not an error, but it should be.
|
2021-08-06 15:40:23 +00:00
|
|
|
logger := m.logger.Named("RequestToJoinCommunity")
|
2023-10-20 06:21:41 +00:00
|
|
|
if err := request.Validate(len(request.AddressesToReveal) > 0); err != nil {
|
2021-08-06 15:40:23 +00:00
|
|
|
logger.Debug("request failed to validate", zap.Error(err), zap.Any("request", request))
|
2021-01-11 10:32:51 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
requestToJoin := m.communitiesManager.CreateRequestToJoin(request)
|
|
|
|
|
|
|
|
if len(request.AddressesToReveal) > 0 {
|
|
|
|
revealedAddresses := make([]gethcommon.Address, 0)
|
|
|
|
for _, addr := range request.AddressesToReveal {
|
|
|
|
revealedAddresses = append(revealedAddresses, gethcommon.HexToAddress(addr))
|
|
|
|
}
|
|
|
|
|
|
|
|
permissions, err := m.communitiesManager.CheckPermissionToJoin(request.CommunityID, revealedAddresses)
|
2023-04-25 12:00:17 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-10-20 06:21:41 +00:00
|
|
|
if !permissions.Satisfied {
|
|
|
|
return nil, errors.New("permission to join not satisfied")
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, accountAndChainIDs := range permissions.ValidCombinations {
|
|
|
|
for i := range requestToJoin.RevealedAccounts {
|
|
|
|
if gethcommon.HexToAddress(requestToJoin.RevealedAccounts[i].Address) == accountAndChainIDs.Address {
|
|
|
|
requestToJoin.RevealedAccounts[i].ChainIds = accountAndChainIDs.ChainIDs
|
|
|
|
}
|
2023-04-25 12:00:17 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
community, err := m.communitiesManager.CheckCommunityForJoining(request.CommunityID)
|
2022-06-22 18:02:44 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
displayName, err := m.settings.DisplayName()
|
2021-08-06 15:40:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-01-11 10:32:51 +00:00
|
|
|
|
|
|
|
requestToJoinProto := &protobuf.CommunityRequestToJoin{
|
2023-06-06 18:33:09 +00:00
|
|
|
Clock: requestToJoin.Clock,
|
|
|
|
EnsName: requestToJoin.ENSName,
|
|
|
|
DisplayName: displayName,
|
2023-10-20 06:21:41 +00:00
|
|
|
CommunityId: request.CommunityID,
|
|
|
|
RevealedAccounts: requestToJoin.RevealedAccounts,
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
|
2023-07-14 17:06:37 +00:00
|
|
|
community, _, err = m.communitiesManager.SaveRequestToJoinAndCommunity(requestToJoin, community)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
err = m.syncCommunity(context.Background(), community, m.dispatchMessage)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
payload, err := proto.Marshal(requestToJoinProto)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
rawMessage := common.RawMessage{
|
2023-11-08 18:05:33 +00:00
|
|
|
Payload: payload,
|
|
|
|
CommunityID: community.ID(),
|
|
|
|
SkipEncryptionLayer: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_REQUEST_TO_JOIN,
|
|
|
|
PubsubTopic: common.DefaultNonProtectedPubsubTopic(community.Shard()),
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
2023-06-14 14:15:46 +00:00
|
|
|
|
2022-05-27 09:14:40 +00:00
|
|
|
_, err = m.sender.SendCommunityMessage(context.Background(), rawMessage)
|
2021-01-11 10:32:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-10-17 19:45:44 +00:00
|
|
|
// send request to join to privileged members
|
2023-10-25 13:16:49 +00:00
|
|
|
if !community.AutoAccept() {
|
2023-10-17 19:45:44 +00:00
|
|
|
privilegedMembers := community.GetFilteredPrivilegedMembers(map[string]struct{}{})
|
|
|
|
|
|
|
|
for _, member := range privilegedMembers[protobuf.CommunityMember_ROLE_OWNER] {
|
|
|
|
_, err := m.sender.SendPrivate(context.Background(), member, &rawMessage)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, member := range privilegedMembers[protobuf.CommunityMember_ROLE_TOKEN_MASTER] {
|
|
|
|
_, err := m.sender.SendPrivate(context.Background(), member, &rawMessage)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// don't send revealed addresses to admins
|
refactor: EventSenders forward RequestToJoin decision to control node
This is a bigger change in how community membership requests are handled
among admins, token masters, owners, and control nodes.
Prior to this commit, all privileged users, also known as
`EventSenders`, were able to accept and reject community membership
requests and those changes would be applied by all users.
This commit changes this behaviour such that:
1. EventSenders can make a decision (accept, reject), but merely forward
their decision to the control node, which ultimately has to confirm
it
2. EventSenders are no longer removing or adding members to and from
communities
3. When an eventsender signaled a decision, the membership request will
enter a pending state (acceptedPending or rejectedPending)
4. Once a decision was made by one eventsender, no other eventsender can
override that decision
This implementation is covered with a bunch of tests:
- Ensure that decision made by event sender is shared with other event
senders
- `testAcceptMemberRequestToJoinResponseSharedWithOtherEventSenders()`
- `testRejectMemberRequestToJoinResponseSharedWithOtherEventSenders()`
- Ensure memebrship request stays pending, until control node has
confirmed decision by event senders
- `testAcceptMemberRequestToJoinNotConfirmedByControlNode()`
- `testRejectMemberRequestToJoinNotConfirmedByControlNode()`
- Ensure that decision made by event sender cannot be overriden by other
event senders
- `testEventSenderCannotOverrideRequestToJoinState()`
These test cases live in three test suites for different event sender
types respectively
- `OwnerWithoutCommunityKeyCommunityEventsSuite`
- `TokenMasterCommunityEventsSuite`
- `AdminCommunityEventsSuite`
In addition to the changes mentioned above, there's also a smaller
changes that ensures membership requests to *not* attached revealed wallet
addresses when the requests are sent to event senders (in addition to
control nodes).
Requests send to a control node will still include revealed addresses as
the control node needs them to verify token permissions.
This commit does not yet handle the case of event senders attempting to
kick and ban members.
Similar to accepting and rejecting membership requests, kicking and
banning need a new pending state. However, we don't track such state in
local databases yet so those two cases will be handled in future commit
to not have this commit grow larger.
2023-08-02 12:04:47 +00:00
|
|
|
requestToJoinProto.RevealedAccounts = make([]*protobuf.RevealedAccount, 0)
|
|
|
|
payload, err = proto.Marshal(requestToJoinProto)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
rawMessage.Payload = payload
|
|
|
|
|
2023-10-17 19:45:44 +00:00
|
|
|
for _, member := range privilegedMembers[protobuf.CommunityMember_ROLE_ADMIN] {
|
|
|
|
_, err := m.sender.SendPrivate(context.Background(), member, &rawMessage)
|
2023-07-28 18:18:27 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-06-14 14:15:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
response := &MessengerResponse{RequestsToJoinCommunity: []*communities.RequestToJoin{requestToJoin}}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
|
2021-03-31 16:23:45 +00:00
|
|
|
// We send a push notification in the background
|
|
|
|
go func() {
|
|
|
|
if m.pushNotificationClient != nil {
|
|
|
|
pks, err := community.CanManageUsersPublicKeys()
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to get pks", zap.Error(err))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
for _, publicKey := range pks {
|
|
|
|
pkString := common.PubkeyToHex(publicKey)
|
|
|
|
_, err = m.pushNotificationClient.SendNotification(publicKey, nil, requestToJoin.ID, pkString, protobuf.PushNotification_REQUEST_TO_JOIN_COMMUNITY)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("error sending notification", zap.Error(err))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2022-10-25 22:06:20 +00:00
|
|
|
// Activity center notification
|
|
|
|
notification := &ActivityCenterNotification{
|
|
|
|
ID: types.FromHex(requestToJoin.ID.String()),
|
|
|
|
Type: ActivityCenterNotificationTypeCommunityRequest,
|
|
|
|
Timestamp: m.getTimesource().GetCurrentTime(),
|
|
|
|
CommunityID: community.IDString(),
|
|
|
|
MembershipStatus: ActivityCenterMembershipStatusPending,
|
2023-01-28 09:52:53 +00:00
|
|
|
Read: true,
|
2023-04-21 09:18:47 +00:00
|
|
|
Deleted: false,
|
2023-10-22 09:41:20 +00:00
|
|
|
UpdatedAt: m.GetCurrentTimeInMillis(),
|
2022-10-25 22:06:20 +00:00
|
|
|
}
|
|
|
|
|
2023-10-22 09:41:20 +00:00
|
|
|
err = m.addActivityCenterNotification(response, notification, nil)
|
2023-01-28 09:52:53 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to save notification", zap.Error(err))
|
|
|
|
return nil, err
|
2022-10-25 22:06:20 +00:00
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2023-07-10 14:11:37 +00:00
|
|
|
func (m *Messenger) EditSharedAddressesForCommunity(request *requests.EditSharedAddresses) (*MessengerResponse, error) {
|
|
|
|
logger := m.logger.Named("EditSharedAddressesForCommunity")
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
logger.Debug("request failed to validate", zap.Error(err), zap.Any("request", request))
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
community, err := m.communitiesManager.GetByID(request.CommunityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
if !community.HasMember(m.IdentityPublicKey()) {
|
|
|
|
return nil, errors.New("not part of the community")
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
2023-10-20 06:21:41 +00:00
|
|
|
|
|
|
|
revealedAddresses := make([]gethcommon.Address, 0)
|
|
|
|
for _, addr := range request.AddressesToReveal {
|
|
|
|
revealedAddresses = append(revealedAddresses, gethcommon.HexToAddress(addr))
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
checkPermissionResponse, err := m.communitiesManager.CheckPermissionToJoin(community.ID(), revealedAddresses)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
member := community.GetMember(m.IdentityPublicKey())
|
|
|
|
|
2023-08-18 11:39:59 +00:00
|
|
|
requestToEditRevealedAccountsProto := &protobuf.CommunityEditSharedAddresses{
|
2023-07-10 14:11:37 +00:00
|
|
|
Clock: member.LastUpdateClock + 1,
|
|
|
|
CommunityId: community.ID(),
|
|
|
|
RevealedAccounts: make([]*protobuf.RevealedAccount, 0),
|
|
|
|
}
|
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
for i := range request.AddressesToReveal {
|
|
|
|
revealedAcc := &protobuf.RevealedAccount{
|
|
|
|
Address: request.AddressesToReveal[i],
|
|
|
|
IsAirdropAddress: types.HexToAddress(request.AddressesToReveal[i]) == types.HexToAddress(request.AirdropAddress),
|
|
|
|
Signature: request.Signatures[i],
|
|
|
|
}
|
2023-07-10 14:11:37 +00:00
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
for _, accountAndChainIDs := range checkPermissionResponse.ValidCombinations {
|
|
|
|
if accountAndChainIDs.Address == gethcommon.HexToAddress(request.AddressesToReveal[i]) {
|
|
|
|
revealedAcc.ChainIds = accountAndChainIDs.ChainIDs
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
2023-07-10 14:11:37 +00:00
|
|
|
|
2023-10-20 06:21:41 +00:00
|
|
|
requestToEditRevealedAccountsProto.RevealedAccounts = append(requestToEditRevealedAccountsProto.RevealedAccounts, revealedAcc)
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
|
|
|
|
2023-08-29 18:56:30 +00:00
|
|
|
requestID := communities.CalculateRequestID(common.PubkeyToHex(&m.identity.PublicKey), request.CommunityID)
|
|
|
|
err = m.communitiesManager.RemoveRequestToJoinRevealedAddresses(requestID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
err = m.communitiesManager.SaveRequestToJoinRevealedAddresses(requestID, requestToEditRevealedAccountsProto.RevealedAccounts)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-07-10 14:11:37 +00:00
|
|
|
payload, err := proto.Marshal(requestToEditRevealedAccountsProto)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
rawMessage := common.RawMessage{
|
2023-11-08 18:05:33 +00:00
|
|
|
Payload: payload,
|
|
|
|
CommunityID: community.ID(),
|
|
|
|
SkipEncryptionLayer: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_EDIT_SHARED_ADDRESSES,
|
|
|
|
PubsubTopic: community.PubsubTopic(), // TODO: confirm if it should be sent in community pubsub topic
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
_, err = m.sender.SendCommunityMessage(context.Background(), rawMessage)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-09-20 08:37:46 +00:00
|
|
|
// send edit message also to TokenMasters and Owners
|
|
|
|
skipMembers := make(map[string]struct{})
|
|
|
|
skipMembers[common.PubkeyToHex(&m.identity.PublicKey)] = struct{}{}
|
|
|
|
|
|
|
|
privilegedMembers := community.GetFilteredPrivilegedMembers(skipMembers)
|
|
|
|
for role, members := range privilegedMembers {
|
|
|
|
if len(members) == 0 || (role != protobuf.CommunityMember_ROLE_TOKEN_MASTER && role != protobuf.CommunityMember_ROLE_OWNER) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
for _, member := range members {
|
|
|
|
_, err := m.sender.SendPrivate(context.Background(), member, &rawMessage)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-07-10 14:11:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2023-08-09 15:08:20 +00:00
|
|
|
func (m *Messenger) GetRevealedAccounts(communityID types.HexBytes, memberPk string) ([]*protobuf.RevealedAccount, error) {
|
|
|
|
return m.communitiesManager.GetRevealedAddresses(communityID, memberPk)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) GetRevealedAccountsForAllMembers(communityID types.HexBytes) (map[string][]*protobuf.RevealedAccount, error) {
|
|
|
|
community, err := m.communitiesManager.GetByID(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
membersRevealedAccounts := map[string][]*protobuf.RevealedAccount{}
|
|
|
|
for _, memberPubKey := range community.GetMemberPubkeys() {
|
|
|
|
memberPubKeyStr := common.PubkeyToHex(memberPubKey)
|
|
|
|
accounts, err := m.communitiesManager.GetRevealedAddresses(communityID, memberPubKeyStr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
membersRevealedAccounts[memberPubKeyStr] = accounts
|
|
|
|
}
|
|
|
|
return membersRevealedAccounts, nil
|
|
|
|
}
|
|
|
|
|
2021-05-23 13:34:17 +00:00
|
|
|
func (m *Messenger) CreateCommunityCategory(request *requests.CreateCommunityCategory) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var response MessengerResponse
|
2023-07-18 15:06:12 +00:00
|
|
|
_, changes, err := m.communitiesManager.CreateCategory(request, true)
|
2021-05-23 13:34:17 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-07-18 15:06:12 +00:00
|
|
|
response.AddCommunity(changes.Community)
|
2021-05-23 13:34:17 +00:00
|
|
|
response.CommunityChanges = []*communities.CommunityChanges{changes}
|
|
|
|
|
|
|
|
return &response, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) EditCommunityCategory(request *requests.EditCommunityCategory) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var response MessengerResponse
|
|
|
|
community, changes, err := m.communitiesManager.EditCategory(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
response.CommunityChanges = []*communities.CommunityChanges{changes}
|
|
|
|
|
|
|
|
return &response, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) ReorderCommunityCategories(request *requests.ReorderCommunityCategories) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var response MessengerResponse
|
|
|
|
community, changes, err := m.communitiesManager.ReorderCategories(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
response.CommunityChanges = []*communities.CommunityChanges{changes}
|
|
|
|
|
|
|
|
return &response, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) ReorderCommunityChat(request *requests.ReorderCommunityChat) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var response MessengerResponse
|
|
|
|
community, changes, err := m.communitiesManager.ReorderChat(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
response.CommunityChanges = []*communities.CommunityChanges{changes}
|
|
|
|
|
|
|
|
return &response, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) DeleteCommunityCategory(request *requests.DeleteCommunityCategory) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var response MessengerResponse
|
|
|
|
community, changes, err := m.communitiesManager.DeleteCategory(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
response.CommunityChanges = []*communities.CommunityChanges{changes}
|
|
|
|
|
|
|
|
return &response, nil
|
|
|
|
}
|
|
|
|
|
2023-10-22 09:41:20 +00:00
|
|
|
func (m *Messenger) CancelRequestToJoinCommunity(ctx context.Context, request *requests.CancelRequestToJoinCommunity) (*MessengerResponse, error) {
|
2022-10-28 08:41:20 +00:00
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
requestToJoin, community, err := m.communitiesManager.CancelRequestToJoin(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
displayName, err := m.settings.DisplayName()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
cancelRequestToJoinProto := &protobuf.CommunityCancelRequestToJoin{
|
|
|
|
Clock: community.Clock(),
|
|
|
|
EnsName: requestToJoin.ENSName,
|
|
|
|
DisplayName: displayName,
|
|
|
|
CommunityId: community.ID(),
|
|
|
|
}
|
|
|
|
|
|
|
|
payload, err := proto.Marshal(cancelRequestToJoinProto)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
rawMessage := common.RawMessage{
|
2023-11-08 18:05:33 +00:00
|
|
|
Payload: payload,
|
|
|
|
CommunityID: community.ID(),
|
|
|
|
SkipEncryptionLayer: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_CANCEL_REQUEST_TO_JOIN,
|
|
|
|
PubsubTopic: common.DefaultNonProtectedPubsubTopic(community.Shard()),
|
2022-10-28 08:41:20 +00:00
|
|
|
}
|
|
|
|
_, err = m.sender.SendCommunityMessage(context.Background(), rawMessage)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-10-25 13:16:49 +00:00
|
|
|
if !community.AutoAccept() {
|
2023-08-18 19:52:13 +00:00
|
|
|
// send cancelation to community admins also
|
|
|
|
rawMessage.Payload = payload
|
|
|
|
|
|
|
|
privilegedMembers := community.GetPrivilegedMembers()
|
|
|
|
for _, privilegedMember := range privilegedMembers {
|
|
|
|
_, err := m.sender.SendPrivate(context.Background(), privilegedMember, &rawMessage)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-28 08:41:20 +00:00
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
2023-02-07 18:22:49 +00:00
|
|
|
response.RequestsToJoinCommunity = append(response.RequestsToJoinCommunity, requestToJoin)
|
|
|
|
|
2023-04-21 09:18:47 +00:00
|
|
|
// delete activity center notification
|
|
|
|
notification, err := m.persistence.GetActivityCenterNotificationByID(requestToJoin.ID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if notification != nil {
|
2023-11-02 10:38:45 +00:00
|
|
|
notification.IncrementUpdatedAt(m.getTimesource())
|
2023-06-10 02:00:17 +00:00
|
|
|
err = m.persistence.DeleteActivityCenterNotificationByID(types.FromHex(requestToJoin.ID.String()), notification.UpdatedAt)
|
2023-04-21 09:18:47 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to delete notification from Activity Center", zap.Error(err))
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// set notification as deleted, so that the client will remove the activity center notification from UI
|
|
|
|
notification.Deleted = true
|
2023-11-02 10:38:45 +00:00
|
|
|
err = m.syncActivityCenterDeletedByIDs(ctx, []types.HexBytes{notification.ID}, notification.UpdatedAt)
|
2023-06-10 02:00:17 +00:00
|
|
|
if err != nil {
|
2023-10-22 09:41:20 +00:00
|
|
|
m.logger.Error("CancelRequestToJoinCommunity, failed to sync activity center notification as deleted", zap.Error(err))
|
2023-06-10 02:00:17 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
2023-04-21 09:18:47 +00:00
|
|
|
response.AddActivityCenterNotification(notification)
|
|
|
|
}
|
|
|
|
|
2022-10-28 08:41:20 +00:00
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2023-10-12 21:42:03 +00:00
|
|
|
func (m *Messenger) acceptRequestToJoinCommunity(requestToJoin *communities.RequestToJoin) (*MessengerResponse, error) {
|
2023-08-15 15:27:01 +00:00
|
|
|
community, err := m.communitiesManager.AcceptRequestToJoin(requestToJoin)
|
2023-03-21 15:06:20 +00:00
|
|
|
if err != nil {
|
2022-07-01 13:54:02 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
2021-01-11 10:32:51 +00:00
|
|
|
|
2023-08-18 19:52:13 +00:00
|
|
|
if community.IsControlNode() {
|
|
|
|
// If we are the control node, we send the response to the user
|
|
|
|
pk, err := common.HexToPubkey(requestToJoin.PublicKey)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2022-07-01 13:54:02 +00:00
|
|
|
|
2023-08-18 19:52:13 +00:00
|
|
|
grant, err := community.BuildGrant(pk, "")
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2022-07-01 13:54:02 +00:00
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
var key *ecdsa.PrivateKey
|
|
|
|
if m.transport.WakuVersion() == 2 {
|
|
|
|
key, err = m.transport.RetrievePubsubTopicKey(community.PubsubTopic())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-08-15 15:27:01 +00:00
|
|
|
requestToJoinResponseProto := &protobuf.CommunityRequestToJoinResponse{
|
2023-10-12 19:21:49 +00:00
|
|
|
Clock: community.Clock(),
|
|
|
|
Accepted: true,
|
|
|
|
CommunityId: community.ID(),
|
|
|
|
Community: community.Description(),
|
|
|
|
Grant: grant,
|
|
|
|
ProtectedTopicPrivateKey: crypto.FromECDSA(key),
|
|
|
|
Shard: community.Shard().Protobuffer(),
|
2023-08-15 15:27:01 +00:00
|
|
|
}
|
2022-07-01 13:54:02 +00:00
|
|
|
|
2023-08-15 15:27:01 +00:00
|
|
|
if m.torrentClientReady() && m.communitiesManager.TorrentFileExists(community.IDString()) {
|
|
|
|
magnetlink, err := m.communitiesManager.GetHistoryArchiveMagnetlink(community.ID())
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("couldn't get magnet link for community", zap.Error(err))
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
requestToJoinResponseProto.MagnetUri = magnetlink
|
|
|
|
}
|
|
|
|
|
|
|
|
payload, err := proto.Marshal(requestToJoinResponseProto)
|
2022-12-09 14:26:12 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-08-15 15:27:01 +00:00
|
|
|
rawMessage := &common.RawMessage{
|
2023-11-08 18:05:33 +00:00
|
|
|
Payload: payload,
|
|
|
|
Sender: community.PrivateKey(),
|
|
|
|
SkipEncryptionLayer: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_REQUEST_TO_JOIN_RESPONSE,
|
|
|
|
PubsubTopic: common.DefaultNonProtectedPubsubTopic(community.Shard()),
|
2023-08-15 15:27:01 +00:00
|
|
|
}
|
2022-07-01 13:54:02 +00:00
|
|
|
|
2023-08-15 15:27:01 +00:00
|
|
|
_, err = m.sender.SendPrivate(context.Background(), pk, rawMessage)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
2023-08-18 19:52:13 +00:00
|
|
|
response.AddRequestToJoinCommunity(requestToJoin)
|
2022-10-25 22:06:20 +00:00
|
|
|
|
2023-10-12 21:42:03 +00:00
|
|
|
// Update existing notification
|
|
|
|
notification, err := m.persistence.GetActivityCenterNotificationByID(requestToJoin.ID)
|
2022-10-25 22:06:20 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if notification != nil {
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipStatusAccepted
|
refactor: EventSenders forward RequestToJoin decision to control node
This is a bigger change in how community membership requests are handled
among admins, token masters, owners, and control nodes.
Prior to this commit, all privileged users, also known as
`EventSenders`, were able to accept and reject community membership
requests and those changes would be applied by all users.
This commit changes this behaviour such that:
1. EventSenders can make a decision (accept, reject), but merely forward
their decision to the control node, which ultimately has to confirm
it
2. EventSenders are no longer removing or adding members to and from
communities
3. When an eventsender signaled a decision, the membership request will
enter a pending state (acceptedPending or rejectedPending)
4. Once a decision was made by one eventsender, no other eventsender can
override that decision
This implementation is covered with a bunch of tests:
- Ensure that decision made by event sender is shared with other event
senders
- `testAcceptMemberRequestToJoinResponseSharedWithOtherEventSenders()`
- `testRejectMemberRequestToJoinResponseSharedWithOtherEventSenders()`
- Ensure memebrship request stays pending, until control node has
confirmed decision by event senders
- `testAcceptMemberRequestToJoinNotConfirmedByControlNode()`
- `testRejectMemberRequestToJoinNotConfirmedByControlNode()`
- Ensure that decision made by event sender cannot be overriden by other
event senders
- `testEventSenderCannotOverrideRequestToJoinState()`
These test cases live in three test suites for different event sender
types respectively
- `OwnerWithoutCommunityKeyCommunityEventsSuite`
- `TokenMasterCommunityEventsSuite`
- `AdminCommunityEventsSuite`
In addition to the changes mentioned above, there's also a smaller
changes that ensures membership requests to *not* attached revealed wallet
addresses when the requests are sent to event senders (in addition to
control nodes).
Requests send to a control node will still include revealed addresses as
the control node needs them to verify token permissions.
This commit does not yet handle the case of event senders attempting to
kick and ban members.
Similar to accepting and rejecting membership requests, kicking and
banning need a new pending state. However, we don't track such state in
local databases yet so those two cases will be handled in future commit
to not have this commit grow larger.
2023-08-02 12:04:47 +00:00
|
|
|
if community.HasPermissionToSendCommunityEvents() {
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipStatusAcceptedPending
|
|
|
|
}
|
2023-01-28 09:52:53 +00:00
|
|
|
notification.Read = true
|
|
|
|
notification.Accepted = true
|
2023-11-02 10:38:45 +00:00
|
|
|
notification.IncrementUpdatedAt(m.getTimesource())
|
2023-01-28 09:52:53 +00:00
|
|
|
|
2023-10-26 04:17:18 +00:00
|
|
|
err = m.addActivityCenterNotification(response, notification, m.syncActivityCenterCommunityRequestDecisionAdapter)
|
2023-01-28 09:52:53 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to save notification", zap.Error(err))
|
|
|
|
return nil, err
|
2022-10-25 22:06:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2023-10-12 21:42:03 +00:00
|
|
|
func (m *Messenger) AcceptRequestToJoinCommunity(request *requests.AcceptRequestToJoinCommunity) (*MessengerResponse, error) {
|
2021-01-11 10:32:51 +00:00
|
|
|
if err := request.Validate(); err != nil {
|
2022-10-25 22:06:20 +00:00
|
|
|
return nil, err
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
|
2023-10-12 21:42:03 +00:00
|
|
|
requestToJoin, err := m.communitiesManager.GetRequestToJoin(request.ID)
|
2022-10-25 22:06:20 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-10-12 21:42:03 +00:00
|
|
|
return m.acceptRequestToJoinCommunity(requestToJoin)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) declineRequestToJoinCommunity(requestToJoin *communities.RequestToJoin) (*MessengerResponse, error) {
|
|
|
|
community, err := m.communitiesManager.DeclineRequestToJoin(requestToJoin)
|
2023-08-15 15:27:01 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if community.IsControlNode() {
|
|
|
|
// Notify privileged members that request to join was rejected
|
|
|
|
// Send request to join without revealed addresses
|
2023-10-12 21:42:03 +00:00
|
|
|
requestToJoin.RevealedAccounts = make([]*protobuf.RevealedAccount, 0)
|
2023-08-15 15:27:01 +00:00
|
|
|
declinedRequestsToJoin := make(map[string]*protobuf.CommunityRequestToJoin)
|
2023-10-12 21:42:03 +00:00
|
|
|
declinedRequestsToJoin[requestToJoin.PublicKey] = requestToJoin.ToCommunityRequestToJoinProtobuf()
|
2023-08-15 15:27:01 +00:00
|
|
|
|
|
|
|
syncMsg := &protobuf.CommunityPrivilegedUserSyncMessage{
|
|
|
|
Type: protobuf.CommunityPrivilegedUserSyncMessage_CONTROL_NODE_REJECT_REQUEST_TO_JOIN,
|
|
|
|
CommunityId: community.ID(),
|
|
|
|
RequestToJoin: declinedRequestsToJoin,
|
|
|
|
}
|
|
|
|
|
|
|
|
payloadSyncMsg, err := proto.Marshal(syncMsg)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
rawSyncMessage := &common.RawMessage{
|
2023-11-08 18:05:33 +00:00
|
|
|
Payload: payloadSyncMsg,
|
|
|
|
Sender: community.PrivateKey(),
|
|
|
|
SkipEncryptionLayer: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_PRIVILEGED_USER_SYNC_MESSAGE,
|
2023-08-15 15:27:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
privilegedMembers := community.GetPrivilegedMembers()
|
|
|
|
for _, privilegedMember := range privilegedMembers {
|
|
|
|
if privilegedMember.Equal(&m.identity.PublicKey) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
_, err := m.sender.SendPrivate(context.Background(), privilegedMember, rawSyncMessage)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-12 21:42:03 +00:00
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
response.AddRequestToJoinCommunity(requestToJoin)
|
|
|
|
|
|
|
|
// Update existing notification
|
|
|
|
notification, err := m.persistence.GetActivityCenterNotificationByID(requestToJoin.ID)
|
2022-10-25 22:06:20 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if notification != nil {
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipStatusDeclined
|
refactor: EventSenders forward RequestToJoin decision to control node
This is a bigger change in how community membership requests are handled
among admins, token masters, owners, and control nodes.
Prior to this commit, all privileged users, also known as
`EventSenders`, were able to accept and reject community membership
requests and those changes would be applied by all users.
This commit changes this behaviour such that:
1. EventSenders can make a decision (accept, reject), but merely forward
their decision to the control node, which ultimately has to confirm
it
2. EventSenders are no longer removing or adding members to and from
communities
3. When an eventsender signaled a decision, the membership request will
enter a pending state (acceptedPending or rejectedPending)
4. Once a decision was made by one eventsender, no other eventsender can
override that decision
This implementation is covered with a bunch of tests:
- Ensure that decision made by event sender is shared with other event
senders
- `testAcceptMemberRequestToJoinResponseSharedWithOtherEventSenders()`
- `testRejectMemberRequestToJoinResponseSharedWithOtherEventSenders()`
- Ensure memebrship request stays pending, until control node has
confirmed decision by event senders
- `testAcceptMemberRequestToJoinNotConfirmedByControlNode()`
- `testRejectMemberRequestToJoinNotConfirmedByControlNode()`
- Ensure that decision made by event sender cannot be overriden by other
event senders
- `testEventSenderCannotOverrideRequestToJoinState()`
These test cases live in three test suites for different event sender
types respectively
- `OwnerWithoutCommunityKeyCommunityEventsSuite`
- `TokenMasterCommunityEventsSuite`
- `AdminCommunityEventsSuite`
In addition to the changes mentioned above, there's also a smaller
changes that ensures membership requests to *not* attached revealed wallet
addresses when the requests are sent to event senders (in addition to
control nodes).
Requests send to a control node will still include revealed addresses as
the control node needs them to verify token permissions.
This commit does not yet handle the case of event senders attempting to
kick and ban members.
Similar to accepting and rejecting membership requests, kicking and
banning need a new pending state. However, we don't track such state in
local databases yet so those two cases will be handled in future commit
to not have this commit grow larger.
2023-08-02 12:04:47 +00:00
|
|
|
if community.HasPermissionToSendCommunityEvents() {
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipStatusDeclinedPending
|
|
|
|
}
|
2023-01-28 09:52:53 +00:00
|
|
|
notification.Read = true
|
|
|
|
notification.Dismissed = true
|
2023-11-02 10:38:45 +00:00
|
|
|
notification.IncrementUpdatedAt(m.getTimesource())
|
2023-01-28 09:52:53 +00:00
|
|
|
|
2023-10-26 04:17:18 +00:00
|
|
|
err = m.addActivityCenterNotification(response, notification, m.syncActivityCenterCommunityRequestDecisionAdapter)
|
2023-01-28 09:52:53 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to save notification", zap.Error(err))
|
|
|
|
return nil, err
|
2022-10-25 22:06:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return response, nil
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
|
2023-10-12 21:42:03 +00:00
|
|
|
func (m *Messenger) DeclineRequestToJoinCommunity(request *requests.DeclineRequestToJoinCommunity) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
requestToJoin, err := m.communitiesManager.GetRequestToJoin(request.ID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return m.declineRequestToJoinCommunity(requestToJoin)
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
func (m *Messenger) LeaveCommunity(communityID types.HexBytes) (*MessengerResponse, error) {
|
2023-10-22 09:41:20 +00:00
|
|
|
_, err := m.persistence.DismissAllActivityCenterNotificationsFromCommunity(communityID.String(), m.GetCurrentTimeInMillis())
|
2022-04-04 01:02:40 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-10-27 19:20:08 +00:00
|
|
|
mr, err := m.leaveCommunity(communityID)
|
2021-08-06 15:40:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-07-06 17:44:31 +00:00
|
|
|
community, ok := mr.communities[communityID.String()]
|
|
|
|
if !ok {
|
|
|
|
return nil, communities.ErrOrgNotFound
|
|
|
|
}
|
|
|
|
|
2022-03-08 15:25:00 +00:00
|
|
|
err = m.communitiesManager.DeleteCommunitySettings(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-03-21 14:18:36 +00:00
|
|
|
m.communitiesManager.StopHistoryArchiveTasksInterval(communityID)
|
|
|
|
|
2023-07-06 17:44:31 +00:00
|
|
|
err = m.syncCommunity(context.Background(), community, m.dispatchMessage)
|
2022-08-22 10:10:31 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-07-06 17:44:31 +00:00
|
|
|
if !community.IsControlNode() {
|
2022-08-22 10:10:31 +00:00
|
|
|
requestToLeaveProto := &protobuf.CommunityRequestToLeave{
|
|
|
|
Clock: uint64(time.Now().Unix()),
|
|
|
|
CommunityId: communityID,
|
|
|
|
}
|
|
|
|
|
|
|
|
payload, err := proto.Marshal(requestToLeaveProto)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
community, err := m.communitiesManager.GetByID(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-08-22 10:10:31 +00:00
|
|
|
rawMessage := common.RawMessage{
|
2023-11-08 18:05:33 +00:00
|
|
|
Payload: payload,
|
|
|
|
CommunityID: communityID,
|
|
|
|
SkipEncryptionLayer: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_REQUEST_TO_LEAVE,
|
|
|
|
PubsubTopic: community.PubsubTopic(), // TODO: confirm if it should be sent in the community pubsub topic
|
2022-08-22 10:10:31 +00:00
|
|
|
}
|
|
|
|
_, err = m.sender.SendCommunityMessage(context.Background(), rawMessage)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-06 15:40:23 +00:00
|
|
|
return mr, nil
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
|
2023-10-27 19:20:08 +00:00
|
|
|
func (m *Messenger) leaveCommunity(communityID types.HexBytes) (*MessengerResponse, error) {
|
2021-01-11 10:32:51 +00:00
|
|
|
response := &MessengerResponse{}
|
|
|
|
|
|
|
|
community, err := m.communitiesManager.LeaveCommunity(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Make chat inactive
|
|
|
|
for chatID := range community.Chats() {
|
|
|
|
communityChatID := communityID.String() + chatID
|
2022-12-07 19:34:48 +00:00
|
|
|
response.AddRemovedChat(communityChatID)
|
|
|
|
|
|
|
|
_, err = m.deactivateChat(communityChatID, 0, false, false)
|
2021-01-11 10:32:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-03-25 15:15:22 +00:00
|
|
|
_, err = m.transport.RemoveFilterByChatID(communityChatID)
|
2021-01-11 10:32:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-27 19:20:08 +00:00
|
|
|
_, err = m.transport.RemoveFilterByChatID(communityID.String())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
response.AddCommunity(community)
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) kickedOutOfCommunity(communityID types.HexBytes) (*MessengerResponse, error) {
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
|
|
|
|
community, err := m.communitiesManager.KickedOutOfCommunity(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
response.AddCommunity(community)
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2023-10-22 09:41:20 +00:00
|
|
|
func (m *Messenger) CheckAndDeletePendingRequestToJoinCommunity(ctx context.Context, sendResponse bool) (*MessengerResponse, error) {
|
2023-04-21 09:18:47 +00:00
|
|
|
sendSignal := false
|
|
|
|
|
|
|
|
pendingRequestsToJoin, err := m.communitiesManager.PendingRequestsToJoin()
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to fetch pending request to join", zap.Error(err))
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(pendingRequestsToJoin) == 0 {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
timeNow := uint64(time.Now().Unix())
|
|
|
|
|
|
|
|
for _, requestToJoin := range pendingRequestsToJoin {
|
|
|
|
requestTimeOutClock, err := communities.AddTimeoutToRequestToJoinClock(requestToJoin.Clock)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if timeNow >= requestTimeOutClock {
|
|
|
|
err := m.communitiesManager.DeletePendingRequestToJoin(requestToJoin)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to delete pending request to join", zap.String("req-id", requestToJoin.ID.String()), zap.Error(err))
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
requestToJoin.Deleted = true
|
|
|
|
response.AddRequestToJoinCommunity(requestToJoin)
|
|
|
|
|
|
|
|
notification, err := m.persistence.GetActivityCenterNotificationByID(requestToJoin.ID)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to fetch pending request to join", zap.Error(err))
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if notification != nil {
|
|
|
|
// Delete activity centre notification for community admin
|
|
|
|
if notification.Type == ActivityCenterNotificationTypeCommunityMembershipRequest {
|
2023-10-22 09:41:20 +00:00
|
|
|
response2, err := m.MarkActivityCenterNotificationsDeleted(ctx, []types.HexBytes{notification.ID}, m.GetCurrentTimeInMillis(), true)
|
2023-06-10 02:00:17 +00:00
|
|
|
if err != nil {
|
2023-10-22 09:41:20 +00:00
|
|
|
m.logger.Error("[CheckAndDeletePendingRequestToJoinCommunity] failed to mark notification as deleted", zap.Error(err))
|
2023-06-10 02:00:17 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
2023-10-22 09:41:20 +00:00
|
|
|
response.AddActivityCenterNotifications(response2.ActivityCenterNotifications())
|
|
|
|
response.SetActivityCenterState(response2.ActivityCenterState())
|
2023-04-21 09:18:47 +00:00
|
|
|
}
|
|
|
|
// Update activity centre notification for requester
|
|
|
|
if notification.Type == ActivityCenterNotificationTypeCommunityRequest {
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipStatusIdle
|
|
|
|
notification.Read = false
|
|
|
|
notification.Deleted = false
|
2023-11-02 10:38:45 +00:00
|
|
|
notification.IncrementUpdatedAt(m.getTimesource())
|
2023-10-22 09:41:20 +00:00
|
|
|
err = m.addActivityCenterNotification(response, notification, m.syncActivityCenterUnreadByIDs)
|
2023-04-21 09:18:47 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to update notification in activity center", zap.Error(err))
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sendSignal = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if sendSignal && !sendResponse {
|
|
|
|
signal.SendNewMessages(response)
|
|
|
|
}
|
|
|
|
|
|
|
|
if sendResponse {
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
func (m *Messenger) CreateCommunityChat(communityID types.HexBytes, c *protobuf.CommunityChat) (*MessengerResponse, error) {
|
|
|
|
var response MessengerResponse
|
2022-09-02 08:36:07 +00:00
|
|
|
|
|
|
|
c.Identity.FirstMessageTimestamp = FirstMessageTimestampNoMessage
|
2023-07-18 15:06:12 +00:00
|
|
|
changes, err := m.communitiesManager.CreateChat(communityID, c, true, "")
|
2021-01-11 10:32:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-07-18 15:06:12 +00:00
|
|
|
response.AddCommunity(changes.Community)
|
2021-01-11 10:32:51 +00:00
|
|
|
response.CommunityChanges = []*communities.CommunityChanges{changes}
|
|
|
|
|
|
|
|
var chats []*Chat
|
2023-05-22 21:38:02 +00:00
|
|
|
var publicFiltersToInit []transport.FiltersToInitialize
|
2021-01-11 10:32:51 +00:00
|
|
|
for chatID, chat := range changes.ChatsAdded {
|
2023-07-18 15:06:12 +00:00
|
|
|
c := CreateCommunityChat(changes.Community.IDString(), chatID, chat, m.getTimesource())
|
2021-01-11 10:32:51 +00:00
|
|
|
chats = append(chats, c)
|
2023-05-22 21:38:02 +00:00
|
|
|
publicFiltersToInit = append(publicFiltersToInit, transport.FiltersToInitialize{ChatID: c.ID, PubsubTopic: changes.Community.PubsubTopic()})
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
response.AddChat(c)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Load filters
|
2023-05-22 21:38:02 +00:00
|
|
|
filters, err := m.transport.InitPublicFilters(publicFiltersToInit)
|
2021-01-11 10:32:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-05-14 10:55:42 +00:00
|
|
|
_, err = m.scheduleSyncFilters(filters)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-01-11 10:32:51 +00:00
|
|
|
|
2021-09-01 09:03:45 +00:00
|
|
|
err = m.saveChats(chats)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.reregisterForPushNotifications()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &response, nil
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
|
2021-06-01 12:13:17 +00:00
|
|
|
func (m *Messenger) EditCommunityChat(communityID types.HexBytes, chatID string, c *protobuf.CommunityChat) (*MessengerResponse, error) {
|
|
|
|
var response MessengerResponse
|
|
|
|
community, changes, err := m.communitiesManager.EditChat(communityID, chatID, c)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
response.CommunityChanges = []*communities.CommunityChanges{changes}
|
|
|
|
|
|
|
|
var chats []*Chat
|
2023-05-22 21:38:02 +00:00
|
|
|
var publicFiltersToInit []transport.FiltersToInitialize
|
2021-06-03 10:49:04 +00:00
|
|
|
for chatID, change := range changes.ChatsModified {
|
|
|
|
c := CreateCommunityChat(community.IDString(), chatID, change.ChatModified, m.getTimesource())
|
2021-06-01 12:13:17 +00:00
|
|
|
chats = append(chats, c)
|
2023-05-22 21:38:02 +00:00
|
|
|
publicFiltersToInit = append(publicFiltersToInit, transport.FiltersToInitialize{ChatID: c.ID, PubsubTopic: community.PubsubTopic()})
|
2021-06-01 12:13:17 +00:00
|
|
|
response.AddChat(c)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Load filters
|
2023-05-22 21:38:02 +00:00
|
|
|
filters, err := m.transport.InitPublicFilters(publicFiltersToInit)
|
2021-06-01 12:13:17 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
_, err = m.scheduleSyncFilters(filters)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &response, m.saveChats(chats)
|
|
|
|
}
|
|
|
|
|
2021-07-30 17:05:44 +00:00
|
|
|
func (m *Messenger) DeleteCommunityChat(communityID types.HexBytes, chatID string) (*MessengerResponse, error) {
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
|
|
|
|
community, _, err := m.communitiesManager.DeleteChat(communityID, chatID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
err = m.deleteChat(chatID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
response.AddRemovedChat(chatID)
|
|
|
|
|
|
|
|
_, err = m.transport.RemoveFilterByChatID(chatID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
response.AddCommunity(community)
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2022-07-19 09:31:52 +00:00
|
|
|
func (m *Messenger) CreateCommunity(request *requests.CreateCommunity, createDefaultChannel bool) (*MessengerResponse, error) {
|
2021-01-11 10:32:51 +00:00
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-07-19 09:31:52 +00:00
|
|
|
response := &MessengerResponse{}
|
|
|
|
|
2022-08-19 12:51:21 +00:00
|
|
|
community, err := m.communitiesManager.CreateCommunity(request, true)
|
2021-01-11 10:32:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-03-08 15:25:00 +00:00
|
|
|
communitySettings := communities.CommunitySettings{
|
|
|
|
CommunityID: community.IDString(),
|
|
|
|
HistoryArchiveSupportEnabled: request.HistoryArchiveSupportEnabled,
|
|
|
|
}
|
|
|
|
err = m.communitiesManager.SaveCommunitySettings(communitySettings)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
if err = m.subscribeToCommunityShard(community.ID(), community.Shard()); err != nil {
|
2023-05-22 21:38:02 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
// Init the community filter so we can receive messages on the community
|
2023-05-22 21:38:02 +00:00
|
|
|
_, err = m.transport.InitCommunityFilters([]transport.CommunityFilterToInitialize{{
|
2023-10-12 19:21:49 +00:00
|
|
|
Shard: community.Shard().TransportShard(),
|
|
|
|
PrivKey: community.PrivateKey(),
|
2023-05-22 21:38:02 +00:00
|
|
|
}})
|
2021-01-11 10:32:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2021-07-22 17:41:49 +00:00
|
|
|
// Init the default community filters
|
|
|
|
_, err = m.transport.InitPublicFilters(community.DefaultFilters())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-07-19 09:31:52 +00:00
|
|
|
if createDefaultChannel {
|
|
|
|
chatResponse, err := m.CreateCommunityChat(community.ID(), &protobuf.CommunityChat{
|
|
|
|
Identity: &protobuf.ChatIdentity{
|
2022-09-02 08:36:07 +00:00
|
|
|
DisplayName: "general",
|
|
|
|
Description: "General channel for the community",
|
|
|
|
Color: community.Description().Identity.Color,
|
|
|
|
FirstMessageTimestamp: FirstMessageTimestampNoMessage,
|
2022-07-19 09:31:52 +00:00
|
|
|
},
|
|
|
|
Permissions: &protobuf.CommunityPermissions{
|
2023-10-25 13:03:26 +00:00
|
|
|
Access: protobuf.CommunityPermissions_AUTO_ACCEPT,
|
2022-07-19 09:31:52 +00:00
|
|
|
},
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2022-05-10 13:25:08 +00:00
|
|
|
|
2022-07-19 09:31:52 +00:00
|
|
|
// updating community so it contains the general chat
|
|
|
|
community = chatResponse.Communities()[0]
|
|
|
|
response.AddChat(chatResponse.Chats()[0])
|
|
|
|
}
|
2022-05-13 13:44:26 +00:00
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
response.AddCommunity(community)
|
2022-03-08 15:25:00 +00:00
|
|
|
response.AddCommunitySettings(&communitySettings)
|
2023-01-06 12:21:14 +00:00
|
|
|
err = m.syncCommunity(context.Background(), community, m.dispatchMessage)
|
2021-08-06 15:40:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-01-11 10:32:51 +00:00
|
|
|
|
2022-03-21 14:18:36 +00:00
|
|
|
if m.config.torrentConfig != nil && m.config.torrentConfig.Enabled && communitySettings.HistoryArchiveSupportEnabled {
|
|
|
|
go m.communitiesManager.StartHistoryArchiveTasksInterval(community, messageArchiveInterval)
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
func (m *Messenger) SetCommunityShard(request *requests.SetCommunityShard) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
community, err := m.communitiesManager.SetShard(request.CommunityID, request.Shard)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var topicPrivKey *ecdsa.PrivateKey
|
|
|
|
if request.PrivateKey != nil {
|
|
|
|
topicPrivKey, err = crypto.ToECDSA(*request.PrivateKey)
|
|
|
|
} else {
|
|
|
|
topicPrivKey, err = crypto.GenerateKey()
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-10-30 18:34:21 +00:00
|
|
|
err = m.communitiesManager.UpdatePubsubTopicPrivateKey(community, topicPrivKey)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.UpdateCommunityFilters(community)
|
2023-10-12 19:21:49 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.SendCommunityShardKey(community, community.GetMemberPubkeys())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
2023-10-30 18:34:21 +00:00
|
|
|
response.AddCommunity(community)
|
2023-10-12 19:21:49 +00:00
|
|
|
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2023-10-30 18:34:21 +00:00
|
|
|
func (m *Messenger) UpdateCommunityFilters(community *communities.Community) error {
|
2023-10-12 19:21:49 +00:00
|
|
|
publicFiltersToInit := make([]transport.FiltersToInitialize, 0, len(community.DefaultFilters())+len(community.Chats()))
|
|
|
|
|
2023-11-01 12:01:51 +00:00
|
|
|
publicFiltersToInit = append(publicFiltersToInit, community.DefaultFilters()...)
|
2023-10-12 19:21:49 +00:00
|
|
|
|
|
|
|
for chatID := range community.Chats() {
|
|
|
|
communityChatID := community.IDString() + chatID
|
|
|
|
_, err := m.transport.RemoveFilterByChatID(communityChatID)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
publicFiltersToInit = append(publicFiltersToInit, transport.FiltersToInitialize{ChatID: communityChatID, PubsubTopic: community.PubsubTopic()})
|
|
|
|
}
|
|
|
|
|
|
|
|
_, err := m.transport.InitPublicFilters(publicFiltersToInit)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Init the community filter so we can receive messages on the community
|
|
|
|
_, err = m.transport.InitCommunityFilters([]transport.CommunityFilterToInitialize{{
|
|
|
|
Shard: community.Shard().TransportShard(),
|
|
|
|
PrivKey: community.PrivateKey(),
|
|
|
|
}})
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Init the default community filters
|
|
|
|
_, err = m.transport.InitPublicFilters(publicFiltersToInit)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if err = m.subscribeToCommunityShard(community.ID(), community.Shard()); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-03-02 16:27:48 +00:00
|
|
|
func (m *Messenger) CreateCommunityTokenPermission(request *requests.CreateCommunityTokenPermission) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
community, changes, err := m.communitiesManager.CreateCommunityTokenPermission(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-07-26 12:16:50 +00:00
|
|
|
if community.IsControlNode() {
|
2023-07-18 15:06:12 +00:00
|
|
|
// check existing member permission once, then check periodically
|
|
|
|
go func() {
|
2023-09-20 08:37:46 +00:00
|
|
|
if err := m.communitiesManager.ReevaluateCommunityMembersPermissions(community); err != nil {
|
2023-07-18 15:06:12 +00:00
|
|
|
m.logger.Debug("failed to check member permissions", zap.Error(err))
|
|
|
|
}
|
|
|
|
|
2023-06-23 10:49:26 +00:00
|
|
|
m.communitiesManager.ReevaluateMembersPeriodically(community.ID())
|
2023-07-18 15:06:12 +00:00
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2023-06-29 06:58:47 +00:00
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
2023-03-02 16:27:48 +00:00
|
|
|
response.CommunityChanges = []*communities.CommunityChanges{changes}
|
|
|
|
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) EditCommunityTokenPermission(request *requests.EditCommunityTokenPermission) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
community, changes, err := m.communitiesManager.EditCommunityTokenPermission(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-07-18 15:06:12 +00:00
|
|
|
// check if members still fulfill the token criteria of all
|
|
|
|
// BECOME_MEMBER permissions and kick them if necessary
|
|
|
|
//
|
|
|
|
// We do this in a separate routine to not block this function
|
2023-07-26 12:16:50 +00:00
|
|
|
if community.IsControlNode() {
|
2023-07-18 15:06:12 +00:00
|
|
|
go func() {
|
2023-09-20 08:37:46 +00:00
|
|
|
if err := m.communitiesManager.ReevaluateCommunityMembersPermissions(community); err != nil {
|
2023-07-18 15:06:12 +00:00
|
|
|
m.logger.Debug("failed to check member permissions", zap.Error(err))
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2023-06-29 06:58:47 +00:00
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
2023-03-02 16:27:48 +00:00
|
|
|
response.CommunityChanges = []*communities.CommunityChanges{changes}
|
|
|
|
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) DeleteCommunityTokenPermission(request *requests.DeleteCommunityTokenPermission) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
community, changes, err := m.communitiesManager.DeleteCommunityTokenPermission(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-07-18 15:06:12 +00:00
|
|
|
// check if members still fulfill the token criteria
|
|
|
|
// We do this in a separate routine to not block this function
|
2023-07-26 12:16:50 +00:00
|
|
|
if community.IsControlNode() {
|
2023-07-18 15:06:12 +00:00
|
|
|
go func() {
|
2023-09-20 08:37:46 +00:00
|
|
|
if err = m.communitiesManager.ReevaluateCommunityMembersPermissions(community); err != nil {
|
2023-07-18 15:06:12 +00:00
|
|
|
m.logger.Debug("failed to check member permissions", zap.Error(err))
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2023-06-29 06:58:47 +00:00
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
2023-03-02 16:27:48 +00:00
|
|
|
response.CommunityChanges = []*communities.CommunityChanges{changes}
|
2023-06-29 06:58:47 +00:00
|
|
|
|
2023-03-02 16:27:48 +00:00
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2023-08-18 15:29:44 +00:00
|
|
|
func (m *Messenger) ReevaluateCommunityMembersPermissions(request *requests.ReevaluateCommunityMembersPermissions) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-09-20 08:37:46 +00:00
|
|
|
community, err := m.communitiesManager.GetByID(request.CommunityID)
|
2023-08-18 15:29:44 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-09-20 08:37:46 +00:00
|
|
|
if err = m.communitiesManager.ReevaluateCommunityMembersPermissions(community); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-08-18 15:29:44 +00:00
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2021-05-18 19:32:15 +00:00
|
|
|
func (m *Messenger) EditCommunity(request *requests.EditCommunity) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
community, err := m.communitiesManager.EditCommunity(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-03-08 15:25:00 +00:00
|
|
|
communitySettings := communities.CommunitySettings{
|
|
|
|
CommunityID: community.IDString(),
|
|
|
|
HistoryArchiveSupportEnabled: request.HistoryArchiveSupportEnabled,
|
|
|
|
}
|
|
|
|
err = m.communitiesManager.UpdateCommunitySettings(communitySettings)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-03-21 14:18:36 +00:00
|
|
|
id := community.ID()
|
|
|
|
|
2022-12-09 14:26:12 +00:00
|
|
|
if m.torrentClientReady() {
|
2022-03-21 14:18:36 +00:00
|
|
|
if !communitySettings.HistoryArchiveSupportEnabled {
|
|
|
|
m.communitiesManager.StopHistoryArchiveTasksInterval(id)
|
|
|
|
} else if !m.communitiesManager.IsSeedingHistoryArchiveTorrent(id) {
|
|
|
|
var communities []*communities.Community
|
|
|
|
communities = append(communities, community)
|
|
|
|
go m.InitHistoryArchiveTasks(communities)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-18 19:32:15 +00:00
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
2022-03-08 15:25:00 +00:00
|
|
|
response.AddCommunitySettings(&communitySettings)
|
2022-06-01 07:55:48 +00:00
|
|
|
err = m.SyncCommunitySettings(context.Background(), &communitySettings)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-05-18 19:32:15 +00:00
|
|
|
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2023-07-21 09:41:26 +00:00
|
|
|
func (m *Messenger) RemovePrivateKey(id types.HexBytes) (*MessengerResponse, error) {
|
|
|
|
community, err := m.communitiesManager.RemovePrivateKey(id)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
func (m *Messenger) ExportCommunity(id types.HexBytes) (*ecdsa.PrivateKey, error) {
|
|
|
|
return m.communitiesManager.ExportCommunity(id)
|
|
|
|
}
|
|
|
|
|
2021-07-22 17:41:49 +00:00
|
|
|
func (m *Messenger) ImportCommunity(ctx context.Context, key *ecdsa.PrivateKey) (*MessengerResponse, error) {
|
2023-09-21 11:16:05 +00:00
|
|
|
clock, _ := m.getLastClockWithRelatedChat()
|
|
|
|
|
|
|
|
community, err := m.communitiesManager.ImportCommunity(key, clock)
|
2021-01-11 10:32:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Load filters
|
2021-07-22 17:41:49 +00:00
|
|
|
_, err = m.transport.InitPublicFilters(community.DefaultFilters())
|
2021-01-11 10:32:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-05-27 09:14:40 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-11-03 10:30:24 +00:00
|
|
|
_, err = m.FetchCommunity(&FetchCommunityRequest{
|
|
|
|
CommunityKey: community.IDString(),
|
|
|
|
Shard: community.Shard(),
|
|
|
|
TryDatabase: false,
|
|
|
|
WaitForResponse: true,
|
|
|
|
})
|
2022-09-21 10:50:56 +00:00
|
|
|
if err != nil {
|
|
|
|
// TODO In the future we should add a mechanism to re-apply next steps (adding owner, joining)
|
|
|
|
// if there is no connection with mailserver. Otherwise changes will be overwritten.
|
|
|
|
// Do not return error to make tests pass.
|
|
|
|
m.logger.Error("Can't request community info from mailserver")
|
|
|
|
}
|
2021-03-25 15:15:22 +00:00
|
|
|
|
|
|
|
// We add ourselves
|
2022-09-21 10:50:56 +00:00
|
|
|
community, err = m.communitiesManager.AddMemberOwnerToCommunity(community.ID(), &m.identity.PublicKey)
|
2021-03-25 15:15:22 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-04-19 12:09:46 +00:00
|
|
|
|
2023-05-29 17:57:05 +00:00
|
|
|
response, err := m.JoinCommunity(ctx, community.ID(), true)
|
2022-03-08 15:25:00 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-09-21 11:16:05 +00:00
|
|
|
// Notify other clients we are the control node now
|
|
|
|
err = m.syncCommunity(context.Background(), community, m.dispatchMessage)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-12-09 14:26:12 +00:00
|
|
|
if m.torrentClientReady() {
|
2022-03-21 14:18:36 +00:00
|
|
|
var communities []*communities.Community
|
|
|
|
communities = append(communities, community)
|
|
|
|
go m.InitHistoryArchiveTasks(communities)
|
|
|
|
}
|
2022-03-08 15:25:00 +00:00
|
|
|
return response, nil
|
2021-01-11 10:32:51 +00:00
|
|
|
}
|
|
|
|
|
2022-02-09 21:58:33 +00:00
|
|
|
func (m *Messenger) GetCommunityByID(communityID types.HexBytes) (*communities.Community, error) {
|
|
|
|
return m.communitiesManager.GetByID(communityID)
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
func (m *Messenger) ShareCommunity(request *requests.ShareCommunity) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
|
2021-06-29 13:56:06 +00:00
|
|
|
community, err := m.communitiesManager.GetByID(request.CommunityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
var messages []*common.Message
|
|
|
|
for _, pk := range request.Users {
|
2023-08-18 11:39:59 +00:00
|
|
|
message := common.NewMessage()
|
2021-01-11 10:32:51 +00:00
|
|
|
message.ChatId = pk.String()
|
|
|
|
message.CommunityID = request.CommunityID.String()
|
2021-06-29 13:56:06 +00:00
|
|
|
message.Text = fmt.Sprintf("Community %s has been shared with you", community.Name())
|
2022-08-08 10:49:39 +00:00
|
|
|
if request.InviteMessage != "" {
|
|
|
|
message.Text = request.InviteMessage
|
|
|
|
}
|
2021-01-11 10:32:51 +00:00
|
|
|
messages = append(messages, message)
|
|
|
|
r, err := m.CreateOneToOneChat(&requests.CreateOneToOneChat{ID: pk})
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := response.Merge(r); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sendMessagesResponse, err := m.SendChatMessages(context.Background(), messages)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := response.Merge(sendMessagesResponse); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2022-10-28 08:41:20 +00:00
|
|
|
func (m *Messenger) MyCanceledRequestsToJoin() ([]*communities.RequestToJoin, error) {
|
|
|
|
return m.communitiesManager.CanceledRequestsToJoinForUser(&m.identity.PublicKey)
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
func (m *Messenger) MyPendingRequestsToJoin() ([]*communities.RequestToJoin, error) {
|
|
|
|
return m.communitiesManager.PendingRequestsToJoinForUser(&m.identity.PublicKey)
|
|
|
|
}
|
|
|
|
|
2023-10-31 14:20:40 +00:00
|
|
|
func (m *Messenger) MyAwaitingAddressesRequestsToJoin() ([]*communities.RequestToJoin, error) {
|
|
|
|
return m.communitiesManager.AwaitingAddressesRequestsToJoinForUser(&m.identity.PublicKey)
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
func (m *Messenger) PendingRequestsToJoinForCommunity(id types.HexBytes) ([]*communities.RequestToJoin, error) {
|
|
|
|
return m.communitiesManager.PendingRequestsToJoinForCommunity(id)
|
|
|
|
}
|
|
|
|
|
2023-08-18 19:52:13 +00:00
|
|
|
func (m *Messenger) AllPendingRequestsToJoinForCommunity(id types.HexBytes) ([]*communities.RequestToJoin, error) {
|
2023-10-31 14:20:40 +00:00
|
|
|
// TODO: optimize and extract via one query
|
2023-08-18 19:52:13 +00:00
|
|
|
pendingRequests, err := m.communitiesManager.PendingRequestsToJoinForCommunity(id)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
acceptedPendingRequests, err := m.communitiesManager.AcceptedPendingRequestsToJoinForCommunity(id)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
declinedPendingRequests, err := m.communitiesManager.DeclinedPendingRequestsToJoinForCommunity(id)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-10-31 14:20:40 +00:00
|
|
|
ownershipChangedRequests, err := m.communitiesManager.RequestsToJoinForCommunityAwaitingAddresses(id)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-08-18 19:52:13 +00:00
|
|
|
pendingRequests = append(pendingRequests, acceptedPendingRequests...)
|
|
|
|
pendingRequests = append(pendingRequests, declinedPendingRequests...)
|
2023-10-31 14:20:40 +00:00
|
|
|
pendingRequests = append(pendingRequests, ownershipChangedRequests...)
|
2023-08-18 19:52:13 +00:00
|
|
|
|
|
|
|
return pendingRequests, nil
|
|
|
|
}
|
|
|
|
|
2022-08-04 07:44:35 +00:00
|
|
|
func (m *Messenger) DeclinedRequestsToJoinForCommunity(id types.HexBytes) ([]*communities.RequestToJoin, error) {
|
|
|
|
return m.communitiesManager.DeclinedRequestsToJoinForCommunity(id)
|
|
|
|
}
|
|
|
|
|
2022-10-28 08:41:20 +00:00
|
|
|
func (m *Messenger) CanceledRequestsToJoinForCommunity(id types.HexBytes) ([]*communities.RequestToJoin, error) {
|
|
|
|
return m.communitiesManager.CanceledRequestsToJoinForCommunity(id)
|
|
|
|
}
|
|
|
|
|
2023-06-14 14:15:46 +00:00
|
|
|
func (m *Messenger) AcceptedRequestsToJoinForCommunity(id types.HexBytes) ([]*communities.RequestToJoin, error) {
|
|
|
|
return m.communitiesManager.AcceptedRequestsToJoinForCommunity(id)
|
|
|
|
}
|
|
|
|
|
refactor: EventSenders forward RequestToJoin decision to control node
This is a bigger change in how community membership requests are handled
among admins, token masters, owners, and control nodes.
Prior to this commit, all privileged users, also known as
`EventSenders`, were able to accept and reject community membership
requests and those changes would be applied by all users.
This commit changes this behaviour such that:
1. EventSenders can make a decision (accept, reject), but merely forward
their decision to the control node, which ultimately has to confirm
it
2. EventSenders are no longer removing or adding members to and from
communities
3. When an eventsender signaled a decision, the membership request will
enter a pending state (acceptedPending or rejectedPending)
4. Once a decision was made by one eventsender, no other eventsender can
override that decision
This implementation is covered with a bunch of tests:
- Ensure that decision made by event sender is shared with other event
senders
- `testAcceptMemberRequestToJoinResponseSharedWithOtherEventSenders()`
- `testRejectMemberRequestToJoinResponseSharedWithOtherEventSenders()`
- Ensure memebrship request stays pending, until control node has
confirmed decision by event senders
- `testAcceptMemberRequestToJoinNotConfirmedByControlNode()`
- `testRejectMemberRequestToJoinNotConfirmedByControlNode()`
- Ensure that decision made by event sender cannot be overriden by other
event senders
- `testEventSenderCannotOverrideRequestToJoinState()`
These test cases live in three test suites for different event sender
types respectively
- `OwnerWithoutCommunityKeyCommunityEventsSuite`
- `TokenMasterCommunityEventsSuite`
- `AdminCommunityEventsSuite`
In addition to the changes mentioned above, there's also a smaller
changes that ensures membership requests to *not* attached revealed wallet
addresses when the requests are sent to event senders (in addition to
control nodes).
Requests send to a control node will still include revealed addresses as
the control node needs them to verify token permissions.
This commit does not yet handle the case of event senders attempting to
kick and ban members.
Similar to accepting and rejecting membership requests, kicking and
banning need a new pending state. However, we don't track such state in
local databases yet so those two cases will be handled in future commit
to not have this commit grow larger.
2023-08-02 12:04:47 +00:00
|
|
|
func (m *Messenger) AcceptedPendingRequestsToJoinForCommunity(id types.HexBytes) ([]*communities.RequestToJoin, error) {
|
|
|
|
return m.communitiesManager.AcceptedPendingRequestsToJoinForCommunity(id)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) DeclinedPendingRequestsToJoinForCommunity(id types.HexBytes) ([]*communities.RequestToJoin, error) {
|
|
|
|
return m.communitiesManager.DeclinedPendingRequestsToJoinForCommunity(id)
|
|
|
|
}
|
|
|
|
|
2021-01-11 10:32:51 +00:00
|
|
|
func (m *Messenger) RemoveUserFromCommunity(id types.HexBytes, pkString string) (*MessengerResponse, error) {
|
|
|
|
publicKey, err := common.HexToPubkey(pkString)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
community, err := m.communitiesManager.RemoveUserFromCommunity(id, publicKey)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
return response, nil
|
|
|
|
}
|
2021-03-19 09:15:45 +00:00
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
func (m *Messenger) SendCommunityShardKey(community *communities.Community, pubkeys []*ecdsa.PublicKey) error {
|
|
|
|
if m.transport.WakuVersion() != 2 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if !community.IsControlNode() {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-10-30 18:34:21 +00:00
|
|
|
key := community.PubsubTopicPrivateKey()
|
2023-10-12 19:21:49 +00:00
|
|
|
if key == nil {
|
|
|
|
return nil // No community shard key available
|
|
|
|
}
|
|
|
|
|
|
|
|
communityShardKey := &protobuf.CommunityShardKey{
|
|
|
|
Clock: m.getTimesource().GetCurrentTime(),
|
|
|
|
CommunityId: community.ID(),
|
|
|
|
PrivateKey: crypto.FromECDSA(key),
|
|
|
|
Shard: community.Shard().Protobuffer(),
|
|
|
|
}
|
|
|
|
|
|
|
|
encodedMessage, err := proto.Marshal(communityShardKey)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
rawMessage := common.RawMessage{
|
|
|
|
Recipients: pubkeys,
|
|
|
|
ResendAutomatically: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_SHARD_KEY,
|
|
|
|
Payload: encodedMessage,
|
|
|
|
}
|
|
|
|
|
|
|
|
_, err = m.sender.SendPubsubTopicKey(context.Background(), &rawMessage)
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-06-23 07:12:15 +00:00
|
|
|
func (m *Messenger) UnbanUserFromCommunity(request *requests.UnbanUserFromCommunity) (*MessengerResponse, error) {
|
|
|
|
community, err := m.communitiesManager.UnbanUserFromCommunity(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2023-10-22 09:41:20 +00:00
|
|
|
func (m *Messenger) BanUserFromCommunity(ctx context.Context, request *requests.BanUserFromCommunity) (*MessengerResponse, error) {
|
2021-03-19 09:15:45 +00:00
|
|
|
community, err := m.communitiesManager.BanUserFromCommunity(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
2023-10-22 09:41:20 +00:00
|
|
|
response, err = m.DeclineAllPendingGroupInvitesFromUser(ctx, response, request.User.String())
|
2021-11-25 15:21:42 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2021-03-19 09:15:45 +00:00
|
|
|
response.AddCommunity(community)
|
|
|
|
return response, nil
|
|
|
|
}
|
2021-04-19 12:09:46 +00:00
|
|
|
|
2022-12-02 11:34:02 +00:00
|
|
|
func (m *Messenger) AddRoleToMember(request *requests.AddRoleToMember) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
community, err := m.communitiesManager.AddRoleToMember(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) RemoveRoleFromMember(request *requests.RemoveRoleFromMember) (*MessengerResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
community, err := m.communitiesManager.RemoveRoleFromMember(request)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
response.AddCommunity(community)
|
|
|
|
return response, nil
|
|
|
|
}
|
|
|
|
|
2022-05-04 00:10:00 +00:00
|
|
|
func (m *Messenger) findCommunityInfoFromDB(communityID string) (*communities.Community, error) {
|
|
|
|
id, err := hexutil.Decode(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var community *communities.Community
|
|
|
|
community, err = m.GetCommunityByID(id)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return community, nil
|
|
|
|
}
|
|
|
|
|
2023-11-03 10:30:24 +00:00
|
|
|
// FetchCommunity installs filter for community and requests its details
|
|
|
|
// from mailserver.
|
|
|
|
//
|
|
|
|
// If `request.TryDatabase` is true, it first looks for community in database,
|
|
|
|
// and requests from mailserver only if it wasn't found locally.
|
|
|
|
// If `request.WaitForResponse` is true, it waits until it has the community before returning it.
|
|
|
|
// If `request.WaitForResponse` is false, it installs filter for community and requests its details
|
|
|
|
// from mailserver. When response received it will be passed through signals handler.
|
|
|
|
func (m *Messenger) FetchCommunity(request *FetchCommunityRequest) (*communities.Community, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, fmt.Errorf("invalid request: %w", err)
|
2023-07-07 18:26:38 +00:00
|
|
|
}
|
2023-11-03 10:30:24 +00:00
|
|
|
communityID := request.getCommunityID()
|
2023-07-07 18:26:38 +00:00
|
|
|
|
2023-11-03 10:30:24 +00:00
|
|
|
if request.TryDatabase {
|
2022-09-23 10:15:06 +00:00
|
|
|
community, err := m.findCommunityInfoFromDB(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if community != nil {
|
2023-11-03 10:30:24 +00:00
|
|
|
if !request.WaitForResponse {
|
|
|
|
m.config.messengerSignalsHandler.CommunityInfoFound(community)
|
|
|
|
}
|
2022-09-23 10:15:06 +00:00
|
|
|
return community, nil
|
|
|
|
}
|
2022-05-04 00:10:00 +00:00
|
|
|
}
|
2022-09-21 10:50:56 +00:00
|
|
|
|
2023-11-03 10:30:24 +00:00
|
|
|
return m.requestCommunityInfoFromMailserver(communityID, request.Shard, request.WaitForResponse)
|
2021-08-05 13:27:47 +00:00
|
|
|
}
|
|
|
|
|
2023-11-03 10:30:24 +00:00
|
|
|
// requestCommunityInfoFromMailserver installs filter for community and requests its details
|
2021-08-05 13:27:47 +00:00
|
|
|
// from mailserver. When response received it will be passed through signals handler
|
2023-10-12 19:21:49 +00:00
|
|
|
func (m *Messenger) requestCommunityInfoFromMailserver(communityID string, shard *common.Shard, waitForResponse bool) (*communities.Community, error) {
|
2023-10-25 10:13:35 +00:00
|
|
|
|
|
|
|
m.logger.Info("requesting community info", zap.String("communityID", communityID), zap.Any("shard", shard))
|
|
|
|
|
2022-06-02 12:17:52 +00:00
|
|
|
m.requestedCommunitiesLock.Lock()
|
|
|
|
defer m.requestedCommunitiesLock.Unlock()
|
|
|
|
|
2021-04-19 12:09:46 +00:00
|
|
|
if _, ok := m.requestedCommunities[communityID]; ok {
|
2021-08-05 13:27:47 +00:00
|
|
|
return nil, nil
|
2021-04-19 12:09:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
//If filter wasn't installed we create it and remember for deinstalling after
|
|
|
|
//response received
|
|
|
|
filter := m.transport.FilterByChatID(communityID)
|
|
|
|
if filter == nil {
|
2023-05-22 21:38:02 +00:00
|
|
|
filters, err := m.transport.InitPublicFilters([]transport.FiltersToInitialize{{
|
|
|
|
ChatID: communityID,
|
2023-10-12 19:21:49 +00:00
|
|
|
PubsubTopic: transport.GetPubsubTopic(shard.TransportShard()),
|
2023-05-22 21:38:02 +00:00
|
|
|
}})
|
2021-04-19 12:09:46 +00:00
|
|
|
if err != nil {
|
2021-08-05 13:27:47 +00:00
|
|
|
return nil, fmt.Errorf("Can't install filter for community: %v", err)
|
2021-04-19 12:09:46 +00:00
|
|
|
}
|
|
|
|
if len(filters) != 1 {
|
2021-08-05 13:27:47 +00:00
|
|
|
return nil, fmt.Errorf("Unexpected amount of filters created")
|
2021-04-19 12:09:46 +00:00
|
|
|
}
|
|
|
|
filter = filters[0]
|
|
|
|
m.requestedCommunities[communityID] = filter
|
|
|
|
} else {
|
|
|
|
//we don't remember filter id associated with community because it was already installed
|
|
|
|
m.requestedCommunities[communityID] = nil
|
|
|
|
}
|
|
|
|
|
2023-10-06 16:30:22 +00:00
|
|
|
defer m.forgetCommunityRequest(communityID)
|
|
|
|
|
2022-03-23 18:57:57 +00:00
|
|
|
to := uint32(m.transport.GetCurrentTime() / 1000)
|
|
|
|
from := to - oneMonthInSeconds
|
2021-04-19 12:09:46 +00:00
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
_, err := m.performMailserverRequest(func() (*MessengerResponse, error) {
|
2023-10-25 10:13:35 +00:00
|
|
|
batch := MailserverBatch{
|
|
|
|
From: from,
|
|
|
|
To: to,
|
|
|
|
Topics: []types.TopicType{filter.ContentTopic},
|
|
|
|
PubsubTopic: filter.PubsubTopic,
|
|
|
|
}
|
|
|
|
m.logger.Info("requesting historic", zap.Any("batch", batch))
|
2022-03-23 18:57:57 +00:00
|
|
|
err := m.processMailserverBatch(batch)
|
2022-01-31 10:33:56 +00:00
|
|
|
return nil, err
|
|
|
|
})
|
2023-10-25 10:13:35 +00:00
|
|
|
|
2021-08-05 13:27:47 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-04-19 12:09:46 +00:00
|
|
|
|
2023-11-03 10:30:24 +00:00
|
|
|
m.logger.Info("mailserver request performed",
|
|
|
|
zap.String("communityID", communityID),
|
|
|
|
zap.Bool("waitForResponse", waitForResponse),
|
|
|
|
)
|
|
|
|
|
2022-02-14 21:49:14 +00:00
|
|
|
if !waitForResponse {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
2023-10-06 16:30:22 +00:00
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)
|
2021-09-17 09:02:04 +00:00
|
|
|
defer cancel()
|
2021-04-19 12:09:46 +00:00
|
|
|
|
2023-10-06 16:30:22 +00:00
|
|
|
for {
|
2021-09-17 09:02:04 +00:00
|
|
|
select {
|
|
|
|
case <-time.After(200 * time.Millisecond):
|
|
|
|
//send signal to client that message status updated
|
2023-10-06 16:30:22 +00:00
|
|
|
community, err := m.communitiesManager.GetByIDString(communityID)
|
2021-09-17 09:02:04 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2021-08-05 13:27:47 +00:00
|
|
|
}
|
2021-09-17 09:02:04 +00:00
|
|
|
if community != nil && community.Name() != "" && community.DescriptionText() != "" {
|
2023-11-03 10:30:24 +00:00
|
|
|
m.logger.Debug("community info found",
|
|
|
|
zap.String("communityID", communityID),
|
|
|
|
zap.String("displayName", community.Name()))
|
2023-10-06 16:30:22 +00:00
|
|
|
return community, nil
|
2021-09-17 09:02:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
case <-ctx.Done():
|
2023-10-25 10:13:35 +00:00
|
|
|
m.logger.Error("failed to request community info", zap.String("communityID", communityID), zap.Error(ctx.Err()))
|
2023-10-06 16:30:22 +00:00
|
|
|
return nil, fmt.Errorf("failed to request community info for id '%s' from mailserver: %w", communityID, ctx.Err())
|
2021-09-17 09:02:04 +00:00
|
|
|
}
|
2021-08-05 13:27:47 +00:00
|
|
|
}
|
2021-04-19 12:09:46 +00:00
|
|
|
}
|
|
|
|
|
2023-11-03 10:30:24 +00:00
|
|
|
// requestCommunitiesFromMailserver installs filter for community and requests its details
|
2022-06-02 12:17:52 +00:00
|
|
|
// from mailserver. When response received it will be passed through signals handler
|
2023-10-12 19:21:49 +00:00
|
|
|
func (m *Messenger) requestCommunitiesFromMailserver(communities []communities.CommunityShard) {
|
2022-06-02 12:17:52 +00:00
|
|
|
m.requestedCommunitiesLock.Lock()
|
|
|
|
defer m.requestedCommunitiesLock.Unlock()
|
|
|
|
|
2023-10-25 10:13:35 +00:00
|
|
|
// we group topics by PubsubTopic
|
|
|
|
groupedTopics := map[string]map[types.TopicType]struct{}{}
|
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
for _, c := range communities {
|
|
|
|
if _, ok := m.requestedCommunities[c.CommunityID]; ok {
|
2023-05-22 21:38:02 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2022-06-02 12:17:52 +00:00
|
|
|
//If filter wasn't installed we create it and remember for deinstalling after
|
|
|
|
//response received
|
2023-10-12 19:21:49 +00:00
|
|
|
filter := m.transport.FilterByChatID(c.CommunityID)
|
2022-06-02 12:17:52 +00:00
|
|
|
if filter == nil {
|
2023-05-22 21:38:02 +00:00
|
|
|
filters, err := m.transport.InitPublicFilters([]transport.FiltersToInitialize{{
|
2023-10-12 19:21:49 +00:00
|
|
|
ChatID: c.CommunityID,
|
|
|
|
PubsubTopic: transport.GetPubsubTopic(c.Shard.TransportShard()),
|
2023-05-22 21:38:02 +00:00
|
|
|
}})
|
2022-06-02 12:17:52 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("Can't install filter for community", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if len(filters) != 1 {
|
|
|
|
m.logger.Error("Unexpected amount of filters created")
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
filter = filters[0]
|
2023-10-12 19:21:49 +00:00
|
|
|
m.requestedCommunities[c.CommunityID] = filter
|
2022-06-02 12:17:52 +00:00
|
|
|
} else {
|
|
|
|
//we don't remember filter id associated with community because it was already installed
|
2023-10-12 19:21:49 +00:00
|
|
|
m.requestedCommunities[c.CommunityID] = nil
|
2022-06-02 12:17:52 +00:00
|
|
|
}
|
2023-10-25 10:13:35 +00:00
|
|
|
|
|
|
|
if _, ok := groupedTopics[filter.PubsubTopic]; !ok {
|
|
|
|
groupedTopics[filter.PubsubTopic] = map[types.TopicType]struct{}{}
|
|
|
|
}
|
|
|
|
|
|
|
|
groupedTopics[filter.PubsubTopic][filter.ContentTopic] = struct{}{}
|
2022-06-02 12:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-10-25 10:13:35 +00:00
|
|
|
defer func() {
|
|
|
|
for _, c := range communities {
|
|
|
|
m.forgetCommunityRequest(c.CommunityID)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2022-06-02 12:17:52 +00:00
|
|
|
to := uint32(m.transport.GetCurrentTime() / 1000)
|
|
|
|
from := to - oneMonthInSeconds
|
|
|
|
|
2023-10-25 10:13:35 +00:00
|
|
|
wg := sync.WaitGroup{}
|
2022-06-02 12:17:52 +00:00
|
|
|
|
2023-10-25 10:13:35 +00:00
|
|
|
for pubsubTopic, contentTopics := range groupedTopics {
|
|
|
|
wg.Add(1)
|
|
|
|
go func(pubsubTopic string, contentTopics map[types.TopicType]struct{}) {
|
|
|
|
batch := MailserverBatch{
|
|
|
|
From: from,
|
|
|
|
To: to,
|
|
|
|
Topics: maps.Keys(contentTopics),
|
|
|
|
PubsubTopic: pubsubTopic,
|
|
|
|
}
|
|
|
|
_, err := m.performMailserverRequest(func() (*MessengerResponse, error) {
|
|
|
|
m.logger.Info("requesting historic", zap.Any("batch", batch))
|
|
|
|
err := m.processMailserverBatch(batch)
|
|
|
|
return nil, err
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("error performing mailserver request", zap.Any("batch", batch), zap.Error(err))
|
|
|
|
}
|
|
|
|
wg.Done()
|
|
|
|
}(pubsubTopic, contentTopics)
|
2022-06-02 12:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-10-25 10:13:35 +00:00
|
|
|
wg.Wait()
|
|
|
|
|
2022-06-02 12:17:52 +00:00
|
|
|
ctx := context.Background()
|
|
|
|
ctx, cancel := context.WithTimeout(ctx, 15*time.Second)
|
|
|
|
defer cancel()
|
|
|
|
|
|
|
|
fetching := true
|
|
|
|
for fetching {
|
|
|
|
select {
|
|
|
|
case <-time.After(200 * time.Millisecond):
|
|
|
|
allLoaded := true
|
2023-10-12 19:21:49 +00:00
|
|
|
for _, c := range communities {
|
|
|
|
community, err := m.communitiesManager.GetByIDString(c.CommunityID)
|
2022-06-02 12:17:52 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("Error loading community", zap.Error(err))
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
if community == nil || community.Name() == "" || community.DescriptionText() == "" {
|
|
|
|
allLoaded = false
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if allLoaded {
|
|
|
|
fetching = false
|
|
|
|
}
|
|
|
|
|
|
|
|
case <-ctx.Done():
|
|
|
|
fetching = false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2021-04-19 12:09:46 +00:00
|
|
|
// forgetCommunityRequest removes community from requested ones and removes filter
|
|
|
|
func (m *Messenger) forgetCommunityRequest(communityID string) {
|
2023-10-25 10:13:35 +00:00
|
|
|
m.logger.Info("forgetting community request", zap.String("communityID", communityID))
|
|
|
|
|
2021-04-19 12:09:46 +00:00
|
|
|
filter, ok := m.requestedCommunities[communityID]
|
|
|
|
if !ok {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if filter != nil {
|
|
|
|
err := m.transport.RemoveFilters([]*transport.Filter{filter})
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("cant remove filter", zap.Error(err))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
delete(m.requestedCommunities, communityID)
|
|
|
|
}
|
|
|
|
|
|
|
|
// passStoredCommunityInfoToSignalHandler calls signal handler with community info
|
|
|
|
func (m *Messenger) passStoredCommunityInfoToSignalHandler(communityID string) {
|
|
|
|
if m.config.messengerSignalsHandler == nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
//send signal to client that message status updated
|
|
|
|
community, err := m.communitiesManager.GetByIDString(communityID)
|
|
|
|
if community == nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-08-02 23:08:01 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("cant get community and pass it to signal handler", zap.Error(err))
|
2021-04-19 12:09:46 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-08-02 23:08:01 +00:00
|
|
|
//if there is no info helpful for client, we don't post it
|
|
|
|
if community.Name() == "" && community.DescriptionText() == "" && community.MembersCount() == 0 {
|
2021-04-19 12:09:46 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
m.config.messengerSignalsHandler.CommunityInfoFound(community)
|
|
|
|
m.forgetCommunityRequest(communityID)
|
|
|
|
}
|
2021-06-01 09:29:37 +00:00
|
|
|
|
|
|
|
// handleCommunityDescription handles an community description
|
2023-08-18 11:39:59 +00:00
|
|
|
func (m *Messenger) handleCommunityDescription(state *ReceivedMessageState, signer *ecdsa.PublicKey, description *protobuf.CommunityDescription, rawPayload []byte) error {
|
2023-10-30 08:22:19 +00:00
|
|
|
communityResponse, err := m.communitiesManager.HandleCommunityDescriptionMessage(signer, description, rawPayload, nil)
|
2021-06-01 09:29:37 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-07-05 17:35:22 +00:00
|
|
|
// If response is nil, but not error, it will be processed async
|
|
|
|
if communityResponse == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-06-14 14:15:46 +00:00
|
|
|
return m.handleCommunityResponse(state, communityResponse)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) handleCommunityResponse(state *ReceivedMessageState, communityResponse *communities.CommunityResponse) error {
|
2021-06-01 09:29:37 +00:00
|
|
|
community := communityResponse.Community
|
|
|
|
|
|
|
|
state.Response.AddCommunity(community)
|
|
|
|
state.Response.CommunityChanges = append(state.Response.CommunityChanges, communityResponse.Changes)
|
2023-08-18 19:52:13 +00:00
|
|
|
state.Response.AddRequestsToJoinCommunity(communityResponse.RequestsToJoin)
|
2021-06-01 09:29:37 +00:00
|
|
|
|
2023-09-07 10:33:20 +00:00
|
|
|
// If we haven't joined/spectated the org, nothing to do
|
|
|
|
if !community.Joined() && !community.Spectated() {
|
2021-06-01 09:29:37 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-11-23 13:41:06 +00:00
|
|
|
removedChatIDs := make([]string, 0)
|
|
|
|
for id := range communityResponse.Changes.ChatsRemoved {
|
|
|
|
chatID := community.IDString() + id
|
|
|
|
_, ok := state.AllChats.Load(chatID)
|
|
|
|
if ok {
|
|
|
|
removedChatIDs = append(removedChatIDs, chatID)
|
|
|
|
state.AllChats.Delete(chatID)
|
|
|
|
err := m.DeleteChat(chatID)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("couldn't delete chat", zap.Error(err))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-01 09:29:37 +00:00
|
|
|
// Update relevant chats names and add new ones
|
|
|
|
// Currently removal is not supported
|
|
|
|
chats := CreateCommunityChats(community, state.Timesource)
|
2023-05-22 21:38:02 +00:00
|
|
|
var publicFiltersToInit []transport.FiltersToInitialize
|
2021-06-01 09:29:37 +00:00
|
|
|
for i, chat := range chats {
|
|
|
|
|
|
|
|
oldChat, ok := state.AllChats.Load(chat.ID)
|
|
|
|
if !ok {
|
|
|
|
// Beware, don't use the reference in the range (i.e chat) as it's a shallow copy
|
|
|
|
state.AllChats.Store(chat.ID, chats[i])
|
|
|
|
|
|
|
|
state.Response.AddChat(chat)
|
2023-05-22 21:38:02 +00:00
|
|
|
publicFiltersToInit = append(publicFiltersToInit, transport.FiltersToInitialize{
|
|
|
|
ChatID: chat.ID,
|
|
|
|
PubsubTopic: community.PubsubTopic(),
|
|
|
|
})
|
2021-06-01 09:29:37 +00:00
|
|
|
// Update name, currently is the only field is mutable
|
2021-06-03 10:49:04 +00:00
|
|
|
} else if oldChat.Name != chat.Name ||
|
2021-10-04 13:02:25 +00:00
|
|
|
oldChat.Description != chat.Description ||
|
|
|
|
oldChat.Emoji != chat.Emoji ||
|
2022-09-02 08:36:07 +00:00
|
|
|
oldChat.Color != chat.Color ||
|
|
|
|
oldChat.UpdateFirstMessageTimestamp(chat.FirstMessageTimestamp) {
|
2021-06-01 09:29:37 +00:00
|
|
|
oldChat.Name = chat.Name
|
2021-06-03 10:49:04 +00:00
|
|
|
oldChat.Description = chat.Description
|
2021-10-04 13:02:25 +00:00
|
|
|
oldChat.Emoji = chat.Emoji
|
|
|
|
oldChat.Color = chat.Color
|
2021-06-01 09:29:37 +00:00
|
|
|
// TODO(samyoul) remove storing of an updated reference pointer?
|
|
|
|
state.AllChats.Store(chat.ID, oldChat)
|
|
|
|
state.Response.AddChat(chat)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-23 13:41:06 +00:00
|
|
|
for _, chatID := range removedChatIDs {
|
|
|
|
_, err := m.transport.RemoveFilterByChatID(chatID)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("couldn't remove filter", zap.Error(err))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-01 09:29:37 +00:00
|
|
|
// Load transport filters
|
2023-05-22 21:38:02 +00:00
|
|
|
filters, err := m.transport.InitPublicFilters(publicFiltersToInit)
|
2021-06-01 09:29:37 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
_, err = m.scheduleSyncFilters(filters)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-08-18 19:52:13 +00:00
|
|
|
for _, requestToJoin := range communityResponse.RequestsToJoin {
|
|
|
|
// Activity Center notification
|
|
|
|
notification, err := m.persistence.GetActivityCenterNotificationByID(requestToJoin.ID)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if notification != nil {
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipStatusAccepted
|
|
|
|
switch requestToJoin.State {
|
|
|
|
case communities.RequestToJoinStateDeclined:
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipStatusDeclined
|
|
|
|
case communities.RequestToJoinStateAccepted:
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipStatusAccepted
|
|
|
|
case communities.RequestToJoinStateAcceptedPending:
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipStatusAcceptedPending
|
|
|
|
case communities.RequestToJoinStateDeclinedPending:
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipStatusDeclinedPending
|
2023-10-31 14:20:40 +00:00
|
|
|
case communities.RequestToJoinStateAwaitingAddresses:
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipOwnershipChanged
|
2023-08-18 19:52:13 +00:00
|
|
|
default:
|
|
|
|
notification.MembershipStatus = ActivityCenterMembershipStatusPending
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
notification.Read = true
|
|
|
|
notification.Accepted = true
|
2023-11-02 10:38:45 +00:00
|
|
|
notification.IncrementUpdatedAt(m.getTimesource())
|
2023-08-18 19:52:13 +00:00
|
|
|
|
2023-10-22 09:41:20 +00:00
|
|
|
err = m.addActivityCenterNotification(state.Response, notification, nil)
|
2023-08-18 19:52:13 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to save notification", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-01 09:29:37 +00:00
|
|
|
return nil
|
|
|
|
}
|
2021-08-06 15:40:23 +00:00
|
|
|
|
2023-08-18 11:39:59 +00:00
|
|
|
func (m *Messenger) HandleCommunityEventsMessage(state *ReceivedMessageState, message *protobuf.CommunityEventsMessage, statusMessage *v1protocol.StatusMessage) error {
|
|
|
|
signer := state.CurrentMessageState.PublicKey
|
|
|
|
communityResponse, err := m.communitiesManager.HandleCommunityEventsMessage(signer, message)
|
2023-06-14 14:15:46 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return m.handleCommunityResponse(state, communityResponse)
|
|
|
|
}
|
|
|
|
|
2023-08-08 13:16:29 +00:00
|
|
|
// Re-sends rejected events, if any.
|
2023-08-18 11:39:59 +00:00
|
|
|
func (m *Messenger) HandleCommunityEventsMessageRejected(state *ReceivedMessageState, message *protobuf.CommunityEventsMessageRejected, statusMessage *v1protocol.StatusMessage) error {
|
|
|
|
signer := state.CurrentMessageState.PublicKey
|
|
|
|
reapplyEventsMessage, err := m.communitiesManager.HandleCommunityEventsMessageRejected(signer, message)
|
2023-08-08 13:16:29 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if reapplyEventsMessage == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-10-12 19:21:49 +00:00
|
|
|
community, err := m.communitiesManager.GetByID(reapplyEventsMessage.CommunityID)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.publishCommunityEvents(community, reapplyEventsMessage)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// HandleCommunityShardKey handles the private keys for the community shards
|
|
|
|
func (m *Messenger) HandleCommunityShardKey(state *ReceivedMessageState, message *protobuf.CommunityShardKey, statusMessage *v1protocol.StatusMessage) error {
|
|
|
|
// TODO: @cammellos: This is risky, it does not seem to support out of order messages
|
|
|
|
// (say that the community changes shards twice, last one wins, but we don't check clock
|
|
|
|
// etc)
|
|
|
|
|
|
|
|
// TODO: @cammellos: getbyid returns nil if the community is not in the db, so we need to handle it
|
|
|
|
community, err := m.communitiesManager.GetByID(message.CommunityId)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we haven't joined the community, nothing to do
|
|
|
|
if !community.Joined() {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
signer := state.CurrentMessageState.PublicKey
|
|
|
|
if signer == nil {
|
|
|
|
return errors.New("signer can't be nil")
|
|
|
|
}
|
|
|
|
|
2023-10-30 18:34:21 +00:00
|
|
|
err = m.handleCommunityShardAndFiltersFromProto(community, common.ShardFromProtobuff(message.Shard), message.PrivateKey)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2023-10-12 19:21:49 +00:00
|
|
|
}
|
|
|
|
|
2023-10-30 18:34:21 +00:00
|
|
|
state.Response.AddCommunity(community)
|
|
|
|
|
|
|
|
return nil
|
2023-10-12 19:21:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) handleCommunityShardAndFiltersFromProto(community *communities.Community, shard *common.Shard, privateKeyBytes []byte) error {
|
|
|
|
err := m.communitiesManager.UpdateShard(community, shard)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
var privKey *ecdsa.PrivateKey = nil
|
|
|
|
if privateKeyBytes != nil {
|
|
|
|
privKey, err = crypto.ToECDSA(privateKeyBytes)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-30 18:34:21 +00:00
|
|
|
err = m.communitiesManager.UpdatePubsubTopicPrivateKey(community, privKey)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.UpdateCommunityFilters(community)
|
2023-08-08 13:16:29 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-08-18 11:39:59 +00:00
|
|
|
func (m *Messenger) handleCommunityPrivilegedUserSyncMessage(state *ReceivedMessageState, signer *ecdsa.PublicKey, message *protobuf.CommunityPrivilegedUserSyncMessage) error {
|
2023-08-18 19:52:13 +00:00
|
|
|
if signer == nil {
|
|
|
|
return errors.New("signer can't be nil")
|
|
|
|
}
|
|
|
|
|
|
|
|
community, err := m.communitiesManager.GetByID(message.CommunityId)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-09-20 08:37:46 +00:00
|
|
|
// Currently this type of msg coming from the control node.
|
|
|
|
// If it will change in the future, check that events types starting from
|
|
|
|
// CONTROL_NODE were sent by a control node
|
|
|
|
isControlNodeMsg := common.IsPubKeyEqual(community.PublicKey(), signer)
|
|
|
|
if !isControlNodeMsg {
|
|
|
|
return errors.New("accepted/requested to join sync messages can be send only by the control node")
|
2023-08-18 19:52:13 +00:00
|
|
|
}
|
|
|
|
|
2023-09-20 08:37:46 +00:00
|
|
|
if community == nil {
|
|
|
|
return errors.New("community not found")
|
2023-08-18 19:52:13 +00:00
|
|
|
}
|
|
|
|
|
2023-08-18 11:39:59 +00:00
|
|
|
err = m.communitiesManager.ValidateCommunityPrivilegedUserSyncMessage(message)
|
2023-08-18 19:52:13 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
switch message.Type {
|
|
|
|
case protobuf.CommunityPrivilegedUserSyncMessage_CONTROL_NODE_ACCEPT_REQUEST_TO_JOIN:
|
|
|
|
fallthrough
|
|
|
|
case protobuf.CommunityPrivilegedUserSyncMessage_CONTROL_NODE_REJECT_REQUEST_TO_JOIN:
|
2023-08-18 11:39:59 +00:00
|
|
|
requestsToJoin, err := m.communitiesManager.HandleRequestToJoinPrivilegedUserSyncMessage(message, community.ID())
|
2023-08-18 19:52:13 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
state.Response.AddRequestsToJoinCommunity(requestsToJoin)
|
2023-09-20 08:37:46 +00:00
|
|
|
|
|
|
|
case protobuf.CommunityPrivilegedUserSyncMessage_CONTROL_NODE_ALL_SYNC_REQUESTS_TO_JOIN:
|
|
|
|
requestsToJoin, err := m.communitiesManager.HandleSyncAllRequestToJoinForNewPrivilegedMember(message, community.ID())
|
|
|
|
if err != nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
state.Response.AddRequestsToJoinCommunity(requestsToJoin)
|
2023-08-18 19:52:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2023-08-15 15:27:01 +00:00
|
|
|
}
|
|
|
|
|
2023-08-18 11:39:59 +00:00
|
|
|
func (m *Messenger) HandleCommunityPrivilegedUserSyncMessage(state *ReceivedMessageState, message *protobuf.CommunityPrivilegedUserSyncMessage, statusMessage *v1protocol.StatusMessage) error {
|
|
|
|
signer := state.CurrentMessageState.PublicKey
|
|
|
|
return m.handleCommunityPrivilegedUserSyncMessage(state, signer, message)
|
|
|
|
}
|
|
|
|
|
2023-10-31 14:20:40 +00:00
|
|
|
func (m *Messenger) sendSharedAddressToControlNode(receiver *ecdsa.PublicKey, community *communities.Community) (*communities.RequestToJoin, error) {
|
|
|
|
if receiver == nil {
|
|
|
|
return nil, errors.New("receiver can't be nil")
|
|
|
|
}
|
|
|
|
|
|
|
|
if community == nil {
|
|
|
|
return nil, communities.ErrOrgNotFound
|
|
|
|
}
|
|
|
|
|
|
|
|
m.logger.Info("share address to the new owner ", zap.String("community id", community.IDString()))
|
|
|
|
|
|
|
|
pk := common.PubkeyToHex(&m.identity.PublicKey)
|
|
|
|
|
|
|
|
requestToJoin, err := m.communitiesManager.GetCommunityRequestToJoinWithRevealedAddresses(pk, community.ID())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
requestToJoin.Clock = uint64(time.Now().Unix())
|
|
|
|
requestToJoin.State = communities.RequestToJoinStateAwaitingAddresses
|
|
|
|
payload, err := proto.Marshal(requestToJoin.ToCommunityRequestToJoinProtobuf())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
rawMessage := common.RawMessage{
|
2023-11-08 18:05:33 +00:00
|
|
|
Payload: payload,
|
|
|
|
CommunityID: community.ID(),
|
|
|
|
SkipEncryptionLayer: true,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_REQUEST_TO_JOIN,
|
|
|
|
PubsubTopic: community.PubsubTopic(), // TODO: confirm if it should be sent in community pubsub topic
|
2023-10-31 14:20:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if err = m.communitiesManager.SaveRequestToJoin(requestToJoin); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
_, err = m.sender.SendPrivate(context.Background(), receiver, &rawMessage)
|
|
|
|
return requestToJoin, err
|
|
|
|
}
|
|
|
|
|
2023-08-18 11:39:59 +00:00
|
|
|
func (m *Messenger) HandleSyncInstallationCommunity(messageState *ReceivedMessageState, syncCommunity *protobuf.SyncInstallationCommunity, statusMessage *v1protocol.StatusMessage) error {
|
|
|
|
return m.handleSyncInstallationCommunity(messageState, syncCommunity, nil)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) handleSyncInstallationCommunity(messageState *ReceivedMessageState, syncCommunity *protobuf.SyncInstallationCommunity, statusMessage *v1protocol.StatusMessage) error {
|
2021-08-06 15:40:23 +00:00
|
|
|
logger := m.logger.Named("handleSyncCommunity")
|
|
|
|
|
|
|
|
// Should handle community
|
2023-08-18 11:39:59 +00:00
|
|
|
shouldHandle, err := m.communitiesManager.ShouldHandleSyncCommunity(syncCommunity)
|
2021-08-06 15:40:23 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Debug("m.communitiesManager.ShouldHandleSyncCommunity error", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
logger.Debug("ShouldHandleSyncCommunity result", zap.Bool("shouldHandle", shouldHandle))
|
|
|
|
if !shouldHandle {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-11-07 17:30:00 +00:00
|
|
|
// Handle community keys
|
|
|
|
if len(syncCommunity.EncryptionKeys) != 0 {
|
2023-10-12 15:45:23 +00:00
|
|
|
// We pass nil,nil as private key/public key as they won't be encrypted
|
|
|
|
_, err := m.encryptor.HandleHashRatchetKeys(syncCommunity.Id, syncCommunity.EncryptionKeys, nil, nil)
|
2022-11-07 17:30:00 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-06 15:40:23 +00:00
|
|
|
// Handle any community requests to join.
|
|
|
|
// MUST BE HANDLED BEFORE DESCRIPTION!
|
|
|
|
pending := false
|
|
|
|
for _, rtj := range syncCommunity.RequestsToJoin {
|
|
|
|
req := new(communities.RequestToJoin)
|
|
|
|
req.InitFromSyncProtobuf(rtj)
|
|
|
|
|
|
|
|
if req.State == communities.RequestToJoinStatePending {
|
|
|
|
pending = true
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.communitiesManager.SaveRequestToJoin(req)
|
2021-11-11 16:37:04 +00:00
|
|
|
if err != nil && err != communities.ErrOldRequestToJoin {
|
2021-08-06 15:40:23 +00:00
|
|
|
logger.Debug("m.communitiesManager.SaveRequestToJoin error", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
logger.Debug("community requests to join pending state", zap.Bool("pending", pending))
|
|
|
|
|
|
|
|
// Don't use the public key of the private key, uncompress the community id
|
|
|
|
orgPubKey, err := crypto.DecompressPubkey(syncCommunity.Id)
|
|
|
|
if err != nil {
|
|
|
|
logger.Debug("crypto.DecompressPubkey error", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
logger.Debug("crypto.DecompressPubkey result", zap.Any("orgPubKey", orgPubKey))
|
|
|
|
|
|
|
|
var amm protobuf.ApplicationMetadataMessage
|
|
|
|
err = proto.Unmarshal(syncCommunity.Description, &amm)
|
|
|
|
if err != nil {
|
|
|
|
logger.Debug("proto.Unmarshal protobuf.ApplicationMetadataMessage error", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
var cd protobuf.CommunityDescription
|
|
|
|
err = proto.Unmarshal(amm.Payload, &cd)
|
|
|
|
if err != nil {
|
|
|
|
logger.Debug("proto.Unmarshal protobuf.CommunityDescription error", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-08-18 11:39:59 +00:00
|
|
|
err = m.handleCommunityDescription(messageState, orgPubKey, &cd, syncCommunity.Description)
|
2021-08-06 15:40:23 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Debug("m.handleCommunityDescription error", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-06-01 07:55:48 +00:00
|
|
|
if syncCommunity.Settings != nil {
|
2023-08-18 11:39:59 +00:00
|
|
|
err = m.HandleSyncCommunitySettings(messageState, syncCommunity.Settings, nil)
|
2022-06-01 07:55:48 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Debug("m.handleSyncCommunitySettings error", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-21 11:16:05 +00:00
|
|
|
if syncCommunity.ControlNode != nil {
|
|
|
|
err = m.communitiesManager.SetSyncControlNode(syncCommunity.Id, syncCommunity.ControlNode)
|
|
|
|
if err != nil {
|
|
|
|
logger.Debug("m.SetSyncControlNode", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
savedCommunity, err := m.communitiesManager.GetByID(syncCommunity.Id)
|
2023-07-07 13:03:37 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-08-22 17:48:42 +00:00
|
|
|
if err := m.handleCommunityTokensMetadataByPrivilegedMembers(savedCommunity); err != nil {
|
|
|
|
logger.Debug("m.handleCommunityTokensMetadataByPrivilegedMembers", zap.Error(err))
|
|
|
|
return err
|
2023-07-07 13:03:37 +00:00
|
|
|
}
|
|
|
|
|
2021-08-06 15:40:23 +00:00
|
|
|
// if we are not waiting for approval, join or leave the community
|
|
|
|
if !pending {
|
|
|
|
var mr *MessengerResponse
|
|
|
|
if syncCommunity.Joined {
|
2023-05-29 17:57:05 +00:00
|
|
|
mr, err = m.joinCommunity(context.Background(), syncCommunity.Id, false)
|
|
|
|
if err != nil && err != communities.ErrOrgAlreadyJoined {
|
2021-08-06 15:40:23 +00:00
|
|
|
logger.Debug("m.joinCommunity error", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
} else {
|
2023-10-27 19:20:08 +00:00
|
|
|
mr, err = m.leaveCommunity(syncCommunity.Id)
|
2021-08-06 15:40:23 +00:00
|
|
|
if err != nil {
|
|
|
|
logger.Debug("m.leaveCommunity error", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2023-05-29 17:57:05 +00:00
|
|
|
if mr != nil {
|
|
|
|
err = messageState.Response.Merge(mr)
|
|
|
|
if err != nil {
|
|
|
|
logger.Debug("messageState.Response.Merge error", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
2021-08-06 15:40:23 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// update the clock value
|
|
|
|
err = m.communitiesManager.SetSyncClock(syncCommunity.Id, syncCommunity.Clock)
|
|
|
|
if err != nil {
|
|
|
|
logger.Debug("m.communitiesManager.SetSyncClock", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
2022-03-08 15:25:00 +00:00
|
|
|
|
2023-08-18 11:39:59 +00:00
|
|
|
func (m *Messenger) HandleSyncCommunitySettings(messageState *ReceivedMessageState, syncCommunitySettings *protobuf.SyncCommunitySettings, statusMessage *v1protocol.StatusMessage) error {
|
|
|
|
shouldHandle, err := m.communitiesManager.ShouldHandleSyncCommunitySettings(syncCommunitySettings)
|
2022-06-01 07:55:48 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Debug("m.communitiesManager.ShouldHandleSyncCommunitySettings error", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
m.logger.Debug("ShouldHandleSyncCommunity result", zap.Bool("shouldHandle", shouldHandle))
|
|
|
|
if !shouldHandle {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-08-18 11:39:59 +00:00
|
|
|
communitySettings, err := m.communitiesManager.HandleSyncCommunitySettings(syncCommunitySettings)
|
2022-06-01 07:55:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
messageState.Response.AddCommunitySettings(communitySettings)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-08-22 17:48:42 +00:00
|
|
|
func (m *Messenger) handleCommunityTokensMetadataByPrivilegedMembers(community *communities.Community) error {
|
|
|
|
return m.communitiesManager.HandleCommunityTokensMetadataByPrivilegedMembers(community)
|
2023-07-07 13:03:37 +00:00
|
|
|
}
|
|
|
|
|
2022-03-21 14:18:36 +00:00
|
|
|
func (m *Messenger) InitHistoryArchiveTasks(communities []*communities.Community) {
|
|
|
|
|
2022-12-09 09:37:04 +00:00
|
|
|
m.communitiesManager.LogStdout("initializing history archive tasks")
|
|
|
|
|
2022-03-21 14:18:36 +00:00
|
|
|
for _, c := range communities {
|
|
|
|
|
|
|
|
if c.Joined() {
|
|
|
|
settings, err := m.communitiesManager.GetCommunitySettingsByID(c.ID())
|
|
|
|
if err != nil {
|
2022-12-09 09:37:04 +00:00
|
|
|
m.communitiesManager.LogStdout("failed to get community settings", zap.Error(err))
|
2022-03-21 14:18:36 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
if !settings.HistoryArchiveSupportEnabled {
|
2022-12-09 09:37:04 +00:00
|
|
|
m.communitiesManager.LogStdout("history archive support disabled for community", zap.String("id", c.IDString()))
|
2022-03-21 14:18:36 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2022-09-15 10:15:19 +00:00
|
|
|
// Check if there's already a torrent file for this community and seed it
|
|
|
|
if m.communitiesManager.TorrentFileExists(c.IDString()) {
|
|
|
|
err = m.communitiesManager.SeedHistoryArchiveTorrent(c.ID())
|
|
|
|
if err != nil {
|
2022-10-06 20:51:04 +00:00
|
|
|
m.communitiesManager.LogStdout("failed to seed history archive", zap.Error(err))
|
2022-09-15 10:15:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-21 14:18:36 +00:00
|
|
|
filters, err := m.communitiesManager.GetCommunityChatsFilters(c.ID())
|
|
|
|
if err != nil {
|
2022-12-09 09:37:04 +00:00
|
|
|
m.communitiesManager.LogStdout("failed to get community chats filters for community", zap.Error(err))
|
2022-03-21 14:18:36 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(filters) == 0 {
|
2022-10-06 20:51:04 +00:00
|
|
|
m.communitiesManager.LogStdout("no filters or chats for this community starting interval", zap.String("id", c.IDString()))
|
2022-03-21 14:18:36 +00:00
|
|
|
go m.communitiesManager.StartHistoryArchiveTasksInterval(c, messageArchiveInterval)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
topics := []types.TopicType{}
|
|
|
|
|
|
|
|
for _, filter := range filters {
|
2023-05-22 21:38:02 +00:00
|
|
|
topics = append(topics, filter.ContentTopic)
|
2022-03-21 14:18:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// First we need to know the timestamp of the latest waku message
|
|
|
|
// we've received for this community, so we can request messages we've
|
|
|
|
// possibly missed since then
|
|
|
|
latestWakuMessageTimestamp, err := m.communitiesManager.GetLatestWakuMessageTimestamp(topics)
|
|
|
|
if err != nil {
|
2022-10-06 20:51:04 +00:00
|
|
|
m.communitiesManager.LogStdout("failed to get Latest waku message timestamp", zap.Error(err))
|
2022-03-21 14:18:36 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if latestWakuMessageTimestamp == 0 {
|
|
|
|
// This means we don't have any waku messages for this community
|
|
|
|
// yet, either because no messages were sent in the community so far,
|
|
|
|
// or because messages haven't reached this node
|
|
|
|
//
|
|
|
|
// In this case we default to requesting messages from the store nodes
|
|
|
|
// for the past 30 days
|
|
|
|
latestWakuMessageTimestamp = uint64(time.Now().AddDate(0, 0, -30).Unix())
|
|
|
|
}
|
|
|
|
|
|
|
|
// Request possibly missed waku messages for community
|
|
|
|
_, err = m.syncFiltersFrom(filters, uint32(latestWakuMessageTimestamp))
|
|
|
|
if err != nil {
|
2022-10-06 20:51:04 +00:00
|
|
|
m.communitiesManager.LogStdout("failed to request missing messages", zap.Error(err))
|
2022-03-21 14:18:36 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// We figure out the end date of the last created archive and schedule
|
|
|
|
// the interval for creating future archives
|
|
|
|
// If the last end date is at least `interval` ago, we create an archive immediately first
|
|
|
|
lastArchiveEndDateTimestamp, err := m.communitiesManager.GetHistoryArchivePartitionStartTimestamp(c.ID())
|
|
|
|
if err != nil {
|
2022-10-06 20:51:04 +00:00
|
|
|
m.communitiesManager.LogStdout("failed to get archive partition start timestamp", zap.Error(err))
|
2022-03-21 14:18:36 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
to := time.Now()
|
|
|
|
lastArchiveEndDate := time.Unix(int64(lastArchiveEndDateTimestamp), 0)
|
|
|
|
durationSinceLastArchive := to.Sub(lastArchiveEndDate)
|
|
|
|
|
|
|
|
if lastArchiveEndDateTimestamp == 0 {
|
|
|
|
// No prior messages to be archived, so we just kick off the archive creation loop
|
|
|
|
// for future archives
|
|
|
|
go m.communitiesManager.StartHistoryArchiveTasksInterval(c, messageArchiveInterval)
|
|
|
|
} else if durationSinceLastArchive < messageArchiveInterval {
|
|
|
|
// Last archive is less than `interval` old, wait until `interval` is complete,
|
|
|
|
// then create archive and kick off archive creation loop for future archives
|
|
|
|
// Seed current archive in the meantime
|
|
|
|
err := m.communitiesManager.SeedHistoryArchiveTorrent(c.ID())
|
|
|
|
if err != nil {
|
2022-10-06 20:51:04 +00:00
|
|
|
m.communitiesManager.LogStdout("failed to seed history archive", zap.Error(err))
|
2022-03-21 14:18:36 +00:00
|
|
|
}
|
|
|
|
timeToNextInterval := messageArchiveInterval - durationSinceLastArchive
|
|
|
|
|
2022-12-09 09:37:04 +00:00
|
|
|
m.communitiesManager.LogStdout("starting history archive tasks interval in", zap.Any("timeLeft", timeToNextInterval))
|
2022-03-21 14:18:36 +00:00
|
|
|
time.AfterFunc(timeToNextInterval, func() {
|
2022-10-14 09:26:10 +00:00
|
|
|
err := m.communitiesManager.CreateAndSeedHistoryArchive(c.ID(), topics, lastArchiveEndDate, to.Add(timeToNextInterval), messageArchiveInterval, c.Encrypted())
|
2022-03-21 14:18:36 +00:00
|
|
|
if err != nil {
|
2022-10-06 20:51:04 +00:00
|
|
|
m.communitiesManager.LogStdout("failed to get create and seed history archive", zap.Error(err))
|
2022-03-21 14:18:36 +00:00
|
|
|
}
|
|
|
|
go m.communitiesManager.StartHistoryArchiveTasksInterval(c, messageArchiveInterval)
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
// Looks like the last archive was generated more than `interval`
|
|
|
|
// ago, so lets create a new archive now and then schedule the archive
|
|
|
|
// creation loop
|
2022-10-14 09:26:10 +00:00
|
|
|
err := m.communitiesManager.CreateAndSeedHistoryArchive(c.ID(), topics, lastArchiveEndDate, to, messageArchiveInterval, c.Encrypted())
|
2022-03-21 14:18:36 +00:00
|
|
|
if err != nil {
|
2022-10-06 20:51:04 +00:00
|
|
|
m.communitiesManager.LogStdout("failed to get create and seed history archive", zap.Error(err))
|
2022-03-21 14:18:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
go m.communitiesManager.StartHistoryArchiveTasksInterval(c, messageArchiveInterval)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-06-08 10:22:26 +00:00
|
|
|
func (m *Messenger) enableHistoryArchivesImportAfterDelay() {
|
|
|
|
go func() {
|
|
|
|
time.Sleep(importInitialDelay)
|
|
|
|
m.importDelayer.once.Do(func() {
|
|
|
|
close(m.importDelayer.wait)
|
|
|
|
})
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2023-09-15 07:42:28 +00:00
|
|
|
func (m *Messenger) checkIfIMemberOfCommunity(communityID types.HexBytes) error {
|
|
|
|
community, err := m.communitiesManager.GetByID(communityID)
|
|
|
|
if err != nil {
|
|
|
|
m.communitiesManager.LogStdout("couldn't get community to import archives", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if !community.HasMember(&m.identity.PublicKey) {
|
|
|
|
m.communitiesManager.LogStdout("can't import archives when user not a member of community")
|
|
|
|
return ErrUserNotMember
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-01-19 13:23:48 +00:00
|
|
|
func (m *Messenger) resumeHistoryArchivesImport(communityID types.HexBytes) error {
|
|
|
|
archiveIDsToImport, err := m.communitiesManager.GetMessageArchiveIDsToImport(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(archiveIDsToImport) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-09-15 07:42:28 +00:00
|
|
|
err = m.checkIfIMemberOfCommunity(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-01-19 13:23:48 +00:00
|
|
|
currentTask := m.communitiesManager.GetHistoryArchiveDownloadTask(communityID.String())
|
|
|
|
// no need to resume imports if there's already a task ongoing
|
|
|
|
if currentTask != nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create new task
|
|
|
|
task := &communities.HistoryArchiveDownloadTask{
|
2023-01-23 14:32:35 +00:00
|
|
|
CancelChan: make(chan struct{}),
|
|
|
|
Waiter: *new(sync.WaitGroup),
|
|
|
|
Cancelled: false,
|
2023-01-19 13:23:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
m.communitiesManager.AddHistoryArchiveDownloadTask(communityID.String(), task)
|
|
|
|
|
2023-01-20 12:45:32 +00:00
|
|
|
// this wait groups tracks the ongoing task for a particular community
|
|
|
|
task.Waiter.Add(1)
|
|
|
|
|
2023-01-19 13:23:48 +00:00
|
|
|
go func() {
|
2023-01-23 14:32:35 +00:00
|
|
|
defer task.Waiter.Done()
|
|
|
|
err := m.importHistoryArchives(communityID, task.CancelChan)
|
2023-01-19 13:23:48 +00:00
|
|
|
if err != nil {
|
|
|
|
m.communitiesManager.LogStdout("failed to import history archives", zap.Error(err))
|
|
|
|
}
|
|
|
|
m.config.messengerSignalsHandler.DownloadingHistoryArchivesFinished(types.EncodeHex(communityID))
|
|
|
|
}()
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-06-01 20:02:34 +00:00
|
|
|
func (m *Messenger) SpeedupArchivesImport() {
|
2023-06-08 10:22:26 +00:00
|
|
|
m.importRateLimiter.SetLimit(rate.Every(importFastRate))
|
2023-06-01 20:02:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) SlowdownArchivesImport() {
|
2023-06-08 10:22:26 +00:00
|
|
|
m.importRateLimiter.SetLimit(rate.Every(importSlowRate))
|
2023-06-01 20:02:34 +00:00
|
|
|
}
|
|
|
|
|
2023-01-19 13:23:48 +00:00
|
|
|
func (m *Messenger) importHistoryArchives(communityID types.HexBytes, cancel chan struct{}) error {
|
|
|
|
importTicker := time.NewTicker(100 * time.Millisecond)
|
|
|
|
defer importTicker.Stop()
|
|
|
|
|
2023-06-01 20:02:34 +00:00
|
|
|
ctx, cancelFunc := context.WithCancel(context.Background())
|
|
|
|
go func() {
|
|
|
|
<-cancel
|
|
|
|
cancelFunc()
|
|
|
|
}()
|
|
|
|
|
2023-06-08 10:22:26 +00:00
|
|
|
// don't proceed until initial import delay has passed
|
|
|
|
select {
|
|
|
|
case <-m.importDelayer.wait:
|
|
|
|
case <-ctx.Done():
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-01-19 13:23:48 +00:00
|
|
|
importMessageArchivesLoop:
|
|
|
|
for {
|
|
|
|
select {
|
2023-06-01 20:02:34 +00:00
|
|
|
case <-ctx.Done():
|
2023-01-19 13:23:48 +00:00
|
|
|
m.communitiesManager.LogStdout("interrupted importing history archive messages")
|
|
|
|
return nil
|
|
|
|
case <-importTicker.C:
|
2023-09-15 07:42:28 +00:00
|
|
|
err := m.checkIfIMemberOfCommunity(communityID)
|
|
|
|
if err != nil {
|
|
|
|
break importMessageArchivesLoop
|
|
|
|
}
|
2023-01-19 13:23:48 +00:00
|
|
|
archiveIDsToImport, err := m.communitiesManager.GetMessageArchiveIDsToImport(communityID)
|
|
|
|
if err != nil {
|
|
|
|
m.communitiesManager.LogStdout("couldn't get message archive IDs to import", zap.Error(err))
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(archiveIDsToImport) == 0 {
|
|
|
|
m.communitiesManager.LogStdout("no message archives to import")
|
|
|
|
break importMessageArchivesLoop
|
|
|
|
}
|
|
|
|
|
|
|
|
m.communitiesManager.LogStdout(fmt.Sprintf("importing message archive, %d left", len(archiveIDsToImport)))
|
|
|
|
|
|
|
|
// only process one archive at a time, so in case of cancel we don't
|
|
|
|
// wait for all archives to be processed first
|
|
|
|
downloadedArchiveID := archiveIDsToImport[0]
|
|
|
|
|
|
|
|
archiveMessages, err := m.communitiesManager.ExtractMessagesFromHistoryArchive(communityID, downloadedArchiveID)
|
|
|
|
if err != nil {
|
|
|
|
m.communitiesManager.LogStdout("failed to extract history archive messages", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
m.config.messengerSignalsHandler.ImportingHistoryArchiveMessages(types.EncodeHex(communityID))
|
2023-06-01 20:02:34 +00:00
|
|
|
|
|
|
|
for _, messagesChunk := range chunkSlice(archiveMessages, importMessagesChunkSize) {
|
2023-06-08 10:22:26 +00:00
|
|
|
if err := m.importRateLimiter.Wait(ctx); err != nil {
|
2023-06-01 20:02:34 +00:00
|
|
|
if !errors.Is(err, context.Canceled) {
|
|
|
|
m.communitiesManager.LogStdout("rate limiter error when handling archive messages", zap.Error(err))
|
|
|
|
}
|
|
|
|
continue importMessageArchivesLoop
|
|
|
|
}
|
|
|
|
|
2023-07-07 08:39:52 +00:00
|
|
|
response, err := m.handleArchiveMessages(messagesChunk)
|
2023-06-01 20:02:34 +00:00
|
|
|
if err != nil {
|
|
|
|
m.communitiesManager.LogStdout("failed to handle archive messages", zap.Error(err))
|
|
|
|
continue importMessageArchivesLoop
|
|
|
|
}
|
|
|
|
|
|
|
|
if !response.IsEmpty() {
|
|
|
|
notifications := response.Notifications()
|
|
|
|
response.ClearNotifications()
|
|
|
|
signal.SendNewMessages(response)
|
|
|
|
localnotifications.PushMessages(notifications)
|
|
|
|
}
|
2023-01-19 13:23:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
err = m.communitiesManager.SetMessageArchiveIDImported(communityID, downloadedArchiveID, true)
|
|
|
|
if err != nil {
|
|
|
|
m.communitiesManager.LogStdout("failed to mark history message archive as imported", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-03-21 14:18:36 +00:00
|
|
|
func (m *Messenger) dispatchMagnetlinkMessage(communityID string) error {
|
|
|
|
|
|
|
|
community, err := m.communitiesManager.GetByIDString(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
magnetlink, err := m.communitiesManager.GetHistoryArchiveMagnetlink(community.ID())
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
magnetLinkMessage := &protobuf.CommunityMessageArchiveMagnetlink{
|
|
|
|
Clock: m.getTimesource().GetCurrentTime(),
|
|
|
|
MagnetUri: magnetlink,
|
|
|
|
}
|
|
|
|
|
|
|
|
encodedMessage, err := proto.Marshal(magnetLinkMessage)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
chatID := community.MagnetlinkMessageChannelID()
|
|
|
|
rawMessage := common.RawMessage{
|
|
|
|
LocalChatID: chatID,
|
|
|
|
Sender: community.PrivateKey(),
|
|
|
|
Payload: encodedMessage,
|
2023-08-18 11:39:59 +00:00
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_COMMUNITY_MESSAGE_ARCHIVE_MAGNETLINK,
|
2022-03-21 14:18:36 +00:00
|
|
|
SkipGroupMessageWrap: true,
|
2023-05-22 21:38:02 +00:00
|
|
|
PubsubTopic: community.PubsubTopic(),
|
2022-03-21 14:18:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
_, err = m.sender.SendPublic(context.Background(), chatID, rawMessage)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.communitiesManager.UpdateCommunityDescriptionMagnetlinkMessageClock(community.ID(), magnetLinkMessage.Clock)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return m.communitiesManager.UpdateMagnetlinkMessageClock(community.ID(), magnetLinkMessage.Clock)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) EnableCommunityHistoryArchiveProtocol() error {
|
|
|
|
nodeConfig, err := m.settings.GetNodeConfig()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if nodeConfig.TorrentConfig.Enabled {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
nodeConfig.TorrentConfig.Enabled = true
|
|
|
|
err = m.settings.SaveSetting("node-config", nodeConfig)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
m.config.torrentConfig = &nodeConfig.TorrentConfig
|
|
|
|
m.communitiesManager.SetTorrentConfig(&nodeConfig.TorrentConfig)
|
|
|
|
err = m.communitiesManager.StartTorrentClient()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-10-19 22:06:09 +00:00
|
|
|
controlledCommunities, err := m.communitiesManager.Controlled()
|
2022-03-21 14:18:36 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-07-06 17:44:31 +00:00
|
|
|
if len(controlledCommunities) > 0 {
|
|
|
|
go m.InitHistoryArchiveTasks(controlledCommunities)
|
2022-03-21 14:18:36 +00:00
|
|
|
}
|
|
|
|
m.config.messengerSignalsHandler.HistoryArchivesProtocolEnabled()
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) DisableCommunityHistoryArchiveProtocol() error {
|
|
|
|
|
|
|
|
nodeConfig, err := m.settings.GetNodeConfig()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if !nodeConfig.TorrentConfig.Enabled {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
m.communitiesManager.StopTorrentClient()
|
|
|
|
|
|
|
|
nodeConfig.TorrentConfig.Enabled = false
|
|
|
|
err = m.settings.SaveSetting("node-config", nodeConfig)
|
|
|
|
m.config.torrentConfig = &nodeConfig.TorrentConfig
|
|
|
|
m.communitiesManager.SetTorrentConfig(&nodeConfig.TorrentConfig)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
m.config.messengerSignalsHandler.HistoryArchivesProtocolDisabled()
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-03-08 15:25:00 +00:00
|
|
|
func (m *Messenger) GetCommunitiesSettings() ([]communities.CommunitySettings, error) {
|
|
|
|
settings, err := m.communitiesManager.GetCommunitiesSettings()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return settings, nil
|
|
|
|
}
|
2022-06-01 07:55:48 +00:00
|
|
|
|
|
|
|
func (m *Messenger) SyncCommunitySettings(ctx context.Context, settings *communities.CommunitySettings) error {
|
|
|
|
|
|
|
|
if !m.hasPairedDevices() {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
clock, chat := m.getLastClockWithRelatedChat()
|
|
|
|
|
|
|
|
syncMessage := &protobuf.SyncCommunitySettings{
|
|
|
|
Clock: clock,
|
|
|
|
CommunityId: settings.CommunityID,
|
|
|
|
HistoryArchiveSupportEnabled: settings.HistoryArchiveSupportEnabled,
|
|
|
|
}
|
|
|
|
encodedMessage, err := proto.Marshal(syncMessage)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
_, err = m.dispatchMessage(ctx, common.RawMessage{
|
|
|
|
LocalChatID: chat.ID,
|
|
|
|
Payload: encodedMessage,
|
|
|
|
MessageType: protobuf.ApplicationMetadataMessage_SYNC_COMMUNITY_SETTINGS,
|
|
|
|
ResendAutomatically: true,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
chat.LastClockValue = clock
|
|
|
|
return m.saveChat(chat)
|
|
|
|
}
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
|
|
|
|
func (m *Messenger) ExtractDiscordDataFromImportFiles(filesToImport []string) (*discord.ExtractedData, map[string]*discord.ImportError) {
|
|
|
|
|
|
|
|
extractedData := &discord.ExtractedData{
|
|
|
|
Categories: map[string]*discord.Category{},
|
|
|
|
ExportedData: make([]*discord.ExportedData, 0),
|
|
|
|
OldestMessageTimestamp: 0,
|
2022-09-29 11:50:23 +00:00
|
|
|
MessageCount: 0,
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
errors := map[string]*discord.ImportError{}
|
|
|
|
|
|
|
|
for _, fileToImport := range filesToImport {
|
|
|
|
filePath := strings.Replace(fileToImport, "file://", "", -1)
|
2023-01-24 09:36:07 +00:00
|
|
|
|
|
|
|
fileInfo, err := os.Stat(filePath)
|
|
|
|
if err != nil {
|
|
|
|
errors[fileToImport] = discord.Error(err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
fileSize := fileInfo.Size()
|
|
|
|
if fileSize > discord.MaxImportFileSizeBytes {
|
|
|
|
errors[fileToImport] = discord.Error(discord.ErrImportFileTooBig.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
bytes, err := os.ReadFile(filePath)
|
|
|
|
if err != nil {
|
2022-09-29 11:50:23 +00:00
|
|
|
errors[fileToImport] = discord.Error(err.Error())
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
var discordExportedData discord.ExportedData
|
|
|
|
|
|
|
|
err = json.Unmarshal(bytes, &discordExportedData)
|
|
|
|
if err != nil {
|
2022-09-29 11:50:23 +00:00
|
|
|
errors[fileToImport] = discord.Error(err.Error())
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(discordExportedData.Messages) == 0 {
|
2022-09-29 11:50:23 +00:00
|
|
|
errors[fileToImport] = discord.Error(discord.ErrNoMessageData.Error())
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
discordExportedData.Channel.FilePath = filePath
|
|
|
|
categoryID := discordExportedData.Channel.CategoryID
|
|
|
|
|
|
|
|
discordCategory := discord.Category{
|
|
|
|
ID: categoryID,
|
|
|
|
Name: discordExportedData.Channel.CategoryName,
|
|
|
|
}
|
|
|
|
|
|
|
|
_, ok := extractedData.Categories[categoryID]
|
|
|
|
if !ok {
|
|
|
|
extractedData.Categories[categoryID] = &discordCategory
|
|
|
|
}
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
extractedData.MessageCount = extractedData.MessageCount + discordExportedData.MessageCount
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
extractedData.ExportedData = append(extractedData.ExportedData, &discordExportedData)
|
|
|
|
|
|
|
|
if len(discordExportedData.Messages) > 0 {
|
2022-09-29 11:50:23 +00:00
|
|
|
msgTime, err := time.Parse(discordTimestampLayout, discordExportedData.Messages[0].Timestamp)
|
feat: introduce messenger APIs to extract discord channels
As part of the new Discord <-> Status Community Import functionality,
we're adding an API that extracts all discord categories and channels
from a previously exported discord export file.
These APIs can be used in clients to show the user what categories and
channels will be imported later on.
There are two APIs:
1. `Messenger.ExtractDiscordCategoriesAndChannels(filesToimport
[]string) (*MessengerResponse, map[string]*discord.ImportError)`
This takes a list of exported discord export (JSON) files (typically one per
channel), reads them, and extracts the categories and channels into
dedicated data structures (`[]DiscordChannel` and `[]DiscordCategory`)
It also returns the oldest message timestamp found in all extracted
channels.
The API is synchronous and returns the extracted data as
a `*MessengerResponse`. This allows to make the API available
status-go's RPC interface.
The error case is a `map[string]*discord.ImportError` where each key
is a file path of a JSON file that we tried to extract data from, and
the value a `discord.ImportError` which holds an error message and an
error code, allowing for distinguishing between "critical" errors and
"non-critical" errors.
2. `Messenger.RequestExtractDiscordCategoriesAndChannels(filesToImport
[]string)`
This is the asynchronous counterpart to
`ExtractDiscordCategoriesAndChannels`. The reason this API has been
added is because discord servers can have a lot of message and
channel data, which causes `ExtractDiscordCategoriesAndChannels` to
block the thread for too long, making apps potentially feel like they
are stuck.
This API runs inside a go routine, eventually calls
`ExtractDiscordCategoriesAndChannels`, and then emits a newly
introduced `DiscordCategoriesAndChannelsExtractedSignal` that clients
can react to.
Failure of extraction has to be determined by the
`discord.ImportErrors` emitted by the signal.
**A note about exported discord history files**
We expect users to export their discord histories via the
[DiscordChatExporter](https://github.com/Tyrrrz/DiscordChatExporter/wiki/GUI%2C-CLI-and-Formats-explained#exportguild)
tool. The tool allows to export the data in different formats, such as
JSON, HTML and CSV.
We expect users to have their data exported as JSON.
Closes: https://github.com/status-im/status-desktop/issues/6690
2022-07-13 09:33:53 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to parse discord message timestamp", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if extractedData.OldestMessageTimestamp == 0 || int(msgTime.Unix()) <= extractedData.OldestMessageTimestamp {
|
|
|
|
// Exported discord channel data already comes with `messages` being
|
|
|
|
// sorted, starting with the oldest, so we can safely rely on the first
|
|
|
|
// message
|
|
|
|
extractedData.OldestMessageTimestamp = int(msgTime.Unix())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return extractedData, errors
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) ExtractDiscordChannelsAndCategories(filesToImport []string) (*MessengerResponse, map[string]*discord.ImportError) {
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
|
|
|
|
extractedData, errs := m.ExtractDiscordDataFromImportFiles(filesToImport)
|
|
|
|
|
|
|
|
for _, category := range extractedData.Categories {
|
|
|
|
response.AddDiscordCategory(category)
|
|
|
|
}
|
|
|
|
for _, export := range extractedData.ExportedData {
|
|
|
|
response.AddDiscordChannel(&export.Channel)
|
|
|
|
}
|
|
|
|
if extractedData.OldestMessageTimestamp != 0 {
|
|
|
|
response.DiscordOldestMessageTimestamp = extractedData.OldestMessageTimestamp
|
|
|
|
}
|
|
|
|
|
|
|
|
return response, errs
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) RequestExtractDiscordChannelsAndCategories(filesToImport []string) {
|
|
|
|
go func() {
|
|
|
|
response, errors := m.ExtractDiscordChannelsAndCategories(filesToImport)
|
|
|
|
m.config.messengerSignalsHandler.DiscordCategoriesAndChannelsExtracted(
|
|
|
|
response.DiscordCategories,
|
|
|
|
response.DiscordChannels,
|
|
|
|
int64(response.DiscordOldestMessageTimestamp),
|
|
|
|
errors)
|
|
|
|
}()
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-10-25 16:32:21 +00:00
|
|
|
func (m *Messenger) saveDiscordAuthorIfNotExists(discordAuthor *protobuf.DiscordMessageAuthor) *discord.ImportError {
|
|
|
|
exists, err := m.persistence.HasDiscordMessageAuthor(discordAuthor.GetId())
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to check if message author exists in database", zap.Error(err))
|
|
|
|
return discord.Error(err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
if !exists {
|
|
|
|
err := m.persistence.SaveDiscordMessageAuthor(discordAuthor)
|
|
|
|
if err != nil {
|
|
|
|
return discord.Error(err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) convertDiscordMessageTimeStamp(discordMessage *protobuf.DiscordMessage, timestamp time.Time) *discord.ImportError {
|
|
|
|
discordMessage.Timestamp = fmt.Sprintf("%d", timestamp.Unix())
|
|
|
|
|
|
|
|
if discordMessage.TimestampEdited != "" {
|
|
|
|
timestampEdited, err := time.Parse(discordTimestampLayout, discordMessage.TimestampEdited)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to parse discord message timestamp", zap.Error(err))
|
|
|
|
return discord.Warning(err.Error())
|
|
|
|
}
|
|
|
|
// Convert timestamp to unix timestamp
|
|
|
|
discordMessage.TimestampEdited = fmt.Sprintf("%d", timestampEdited.Unix())
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) createPinMessageFromDiscordMessage(message *common.Message, pinnedMessageID string, channelID string, community *communities.Community) (*common.PinMessage, *discord.ImportError) {
|
|
|
|
pinMessage := protobuf.PinMessage{
|
|
|
|
Clock: message.WhisperTimestamp,
|
|
|
|
MessageId: pinnedMessageID,
|
|
|
|
ChatId: message.LocalChatID,
|
|
|
|
MessageType: protobuf.MessageType_COMMUNITY_CHAT,
|
|
|
|
Pinned: true,
|
|
|
|
}
|
|
|
|
|
|
|
|
encodedPayload, err := proto.Marshal(&pinMessage)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to parse marshal pin message", zap.Error(err))
|
|
|
|
return nil, discord.Warning(err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
wrappedPayload, err := v1protocol.WrapMessageV1(encodedPayload, protobuf.ApplicationMetadataMessage_PIN_MESSAGE, community.PrivateKey())
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to wrap pin message", zap.Error(err))
|
|
|
|
return nil, discord.Warning(err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
pinMessageToSave := &common.PinMessage{
|
|
|
|
ID: types.EncodeHex(v1protocol.MessageID(&community.PrivateKey().PublicKey, wrappedPayload)),
|
|
|
|
PinMessage: &pinMessage,
|
|
|
|
LocalChatID: channelID,
|
|
|
|
From: message.From,
|
|
|
|
SigPubKey: message.SigPubKey,
|
|
|
|
WhisperTimestamp: message.WhisperTimestamp,
|
|
|
|
}
|
|
|
|
|
|
|
|
return pinMessageToSave, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) generateSystemPinnedMessage(pinMessage *common.PinMessage, channel *Chat, clockAndTimestamp uint64, pinnedMessageID string) (*common.Message, *discord.ImportError) {
|
|
|
|
id, err := generatePinMessageNotificationID(&m.identity.PublicKey, pinMessage, channel)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("failed to generate pin message notification ID",
|
|
|
|
zap.String("PinMessageId", pinMessage.ID))
|
|
|
|
return nil, discord.Warning(err.Error())
|
|
|
|
}
|
|
|
|
systemMessage := &common.Message{
|
|
|
|
ChatMessage: &protobuf.ChatMessage{
|
|
|
|
Clock: pinMessage.Clock,
|
|
|
|
Timestamp: clockAndTimestamp,
|
|
|
|
ChatId: channel.ID,
|
|
|
|
MessageType: pinMessage.MessageType,
|
|
|
|
ResponseTo: pinnedMessageID,
|
|
|
|
ContentType: protobuf.ChatMessage_SYSTEM_MESSAGE_PINNED_MESSAGE,
|
|
|
|
},
|
|
|
|
WhisperTimestamp: clockAndTimestamp,
|
|
|
|
ID: id,
|
|
|
|
LocalChatID: channel.ID,
|
|
|
|
From: pinMessage.From,
|
|
|
|
Seen: true,
|
|
|
|
}
|
|
|
|
|
|
|
|
return systemMessage, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) processDiscordMessages(discordChannel *discord.ExportedData,
|
|
|
|
channel *Chat,
|
|
|
|
importProgress *discord.ImportProgress,
|
|
|
|
progressUpdates chan *discord.ImportProgress,
|
|
|
|
fromDate int64,
|
|
|
|
community *communities.Community) (
|
|
|
|
map[string]*common.Message,
|
|
|
|
[]*common.PinMessage,
|
|
|
|
map[string]*protobuf.DiscordMessageAuthor,
|
|
|
|
[]*protobuf.DiscordMessageAttachment) {
|
|
|
|
|
|
|
|
messagesToSave := make(map[string]*common.Message, 0)
|
|
|
|
pinMessagesToSave := make([]*common.PinMessage, 0)
|
|
|
|
authorProfilesToSave := make(map[string]*protobuf.DiscordMessageAuthor, 0)
|
|
|
|
messageAttachmentsToDownload := make([]*protobuf.DiscordMessageAttachment, 0)
|
|
|
|
|
|
|
|
for _, discordMessage := range discordChannel.Messages {
|
|
|
|
|
|
|
|
timestamp, err := time.Parse(discordTimestampLayout, discordMessage.Timestamp)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to parse discord message timestamp", zap.Error(err))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Warning(err.Error()))
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if timestamp.Unix() < fromDate {
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
importErr := m.saveDiscordAuthorIfNotExists(discordMessage.Author)
|
|
|
|
if importErr != nil {
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, importErr)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
hasPayload, err := m.persistence.HasDiscordMessageAuthorImagePayload(discordMessage.Author.GetId())
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to check if message avatar payload exists in database", zap.Error(err))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if !hasPayload {
|
|
|
|
authorProfilesToSave[discordMessage.Author.Id] = discordMessage.Author
|
|
|
|
}
|
|
|
|
|
|
|
|
// Convert timestamp to unix timestamp
|
|
|
|
importErr = m.convertDiscordMessageTimeStamp(discordMessage, timestamp)
|
|
|
|
if importErr != nil {
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, importErr)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
for i := range discordMessage.Attachments {
|
|
|
|
discordMessage.Attachments[i].MessageId = discordMessage.Id
|
|
|
|
}
|
|
|
|
messageAttachmentsToDownload = append(messageAttachmentsToDownload, discordMessage.Attachments...)
|
|
|
|
|
|
|
|
clockAndTimestamp := uint64(timestamp.Unix()) * 1000
|
|
|
|
communityPubKey := community.PrivateKey().PublicKey
|
|
|
|
|
|
|
|
chatMessage := protobuf.ChatMessage{
|
|
|
|
Timestamp: clockAndTimestamp,
|
|
|
|
MessageType: protobuf.MessageType_COMMUNITY_CHAT,
|
|
|
|
ContentType: protobuf.ChatMessage_DISCORD_MESSAGE,
|
|
|
|
Clock: clockAndTimestamp,
|
|
|
|
ChatId: channel.ID,
|
|
|
|
Payload: &protobuf.ChatMessage_DiscordMessage{
|
|
|
|
DiscordMessage: discordMessage,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
// Handle message replies
|
|
|
|
if discordMessage.Type == string(discord.MessageTypeReply) && discordMessage.Reference != nil {
|
|
|
|
repliedMessageID := community.IDString() + discordMessage.Reference.MessageId
|
|
|
|
if _, exists := messagesToSave[repliedMessageID]; exists {
|
|
|
|
chatMessage.ResponseTo = repliedMessageID
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
messageToSave := &common.Message{
|
|
|
|
ID: community.IDString() + discordMessage.Id,
|
|
|
|
WhisperTimestamp: clockAndTimestamp,
|
|
|
|
From: types.EncodeHex(crypto.FromECDSAPub(&communityPubKey)),
|
|
|
|
Seen: true,
|
|
|
|
LocalChatID: channel.ID,
|
|
|
|
SigPubKey: &communityPubKey,
|
|
|
|
CommunityID: community.IDString(),
|
|
|
|
ChatMessage: &chatMessage,
|
|
|
|
}
|
|
|
|
|
|
|
|
err = messageToSave.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to prepare message content", zap.Error(err))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// Handle pin messages
|
|
|
|
if discordMessage.Type == string(discord.MessageTypeChannelPinned) && discordMessage.Reference != nil {
|
|
|
|
|
|
|
|
pinnedMessageID := community.IDString() + discordMessage.Reference.MessageId
|
|
|
|
_, exists := messagesToSave[pinnedMessageID]
|
|
|
|
if exists {
|
|
|
|
pinMessageToSave, importErr := m.createPinMessageFromDiscordMessage(messageToSave, pinnedMessageID, channel.ID, community)
|
|
|
|
if importErr != nil {
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, importErr)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
pinMessagesToSave = append(pinMessagesToSave, pinMessageToSave)
|
|
|
|
|
|
|
|
// Generate SystemMessagePinnedMessage
|
|
|
|
systemMessage, importErr := m.generateSystemPinnedMessage(pinMessageToSave, channel, clockAndTimestamp, pinnedMessageID)
|
|
|
|
if importErr != nil {
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, importErr)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
messagesToSave[systemMessage.ID] = systemMessage
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
messagesToSave[messageToSave.ID] = messageToSave
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return messagesToSave, pinMessagesToSave, authorProfilesToSave, messageAttachmentsToDownload
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) RequestImportDiscordChannel(request *requests.ImportDiscordChannel) {
|
|
|
|
go func() {
|
|
|
|
totalImportChunkCount := len(request.FilesToImport)
|
|
|
|
|
|
|
|
progressUpdates := make(chan *discord.ImportProgress)
|
|
|
|
|
|
|
|
done := make(chan struct{})
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel := make(chan []string)
|
2023-10-25 16:32:21 +00:00
|
|
|
|
|
|
|
var newChat *Chat
|
|
|
|
|
2023-11-07 10:44:10 +00:00
|
|
|
m.startPublishImportChannelProgressInterval(progressUpdates, cancel, done)
|
2023-10-25 16:32:21 +00:00
|
|
|
|
|
|
|
importProgress := &discord.ImportProgress{}
|
|
|
|
importProgress.Init(totalImportChunkCount, []discord.ImportTask{
|
|
|
|
discord.ChannelsCreationTask,
|
|
|
|
discord.ImportMessagesTask,
|
|
|
|
discord.DownloadAssetsTask,
|
2023-11-07 10:44:10 +00:00
|
|
|
discord.InitCommunityTask,
|
2023-10-25 16:32:21 +00:00
|
|
|
})
|
|
|
|
|
2023-11-07 10:44:10 +00:00
|
|
|
importProgress.ChannelID = request.DiscordChannelID
|
|
|
|
importProgress.ChannelName = request.Name
|
2023-10-25 16:32:21 +00:00
|
|
|
// initial progress immediately
|
|
|
|
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
errmsg := fmt.Sprintf("Request validation failed: '%s'", err.Error())
|
|
|
|
importProgress.AddTaskError(discord.ChannelsCreationTask, discord.Error(errmsg))
|
|
|
|
importProgress.StopTask(discord.ChannelsCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), "", request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Here's 3 steps: Find the corrent channel in files, get the community and create the channel
|
2023-11-07 10:44:10 +00:00
|
|
|
progressValue := float32(0.3)
|
2023-10-25 16:32:21 +00:00
|
|
|
|
2023-11-07 10:44:10 +00:00
|
|
|
m.publishChannelImportProgress(importProgress)
|
2023-10-25 16:32:21 +00:00
|
|
|
|
|
|
|
community, err := m.GetCommunityByID(request.CommunityID)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
errmsg := fmt.Sprintf("Couldn't get the community '%s': '%s'", request.CommunityID, err.Error())
|
|
|
|
importProgress.AddTaskError(discord.ChannelsCreationTask, discord.Error(errmsg))
|
|
|
|
importProgress.StopTask(discord.ChannelsCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), "", request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if community == nil {
|
|
|
|
errmsg := fmt.Sprintf("Couldn't get the community by id: '%s'", request.CommunityID)
|
|
|
|
importProgress.AddTaskError(discord.ChannelsCreationTask, discord.Error(errmsg))
|
|
|
|
importProgress.StopTask(discord.ChannelsCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), "", request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
importProgress.UpdateTaskProgress(discord.ChannelsCreationTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
for i, importFile := range request.FilesToImport {
|
|
|
|
m.importingChannels[request.DiscordChannelID] = false
|
|
|
|
|
|
|
|
exportData, errs := m.ExtractDiscordDataFromImportFiles([]string{importFile})
|
|
|
|
if len(errs) > 0 {
|
|
|
|
for _, err := range errs {
|
|
|
|
importProgress.AddTaskError(discord.ChannelsCreationTask, err)
|
|
|
|
}
|
|
|
|
importProgress.StopTask(discord.ChannelsCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), "", request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var channel *discord.ExportedData
|
|
|
|
|
|
|
|
for _, ch := range exportData.ExportedData {
|
|
|
|
if ch.Channel.ID == request.DiscordChannelID {
|
|
|
|
channel = ch
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if channel == nil {
|
|
|
|
if i < len(request.FilesToImport)-1 {
|
|
|
|
// skip this file
|
|
|
|
continue
|
|
|
|
} else if i == len(request.FilesToImport)-1 {
|
|
|
|
errmsg := fmt.Sprintf("Couldn't find the target channel id in files: '%s'", request.DiscordChannelID)
|
|
|
|
importProgress.AddTaskError(discord.ChannelsCreationTask, discord.Error(errmsg))
|
|
|
|
importProgress.StopTask(discord.ChannelsCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), "", request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
2023-11-07 10:44:10 +00:00
|
|
|
progressValue := float32(0.6)
|
2023-10-25 16:32:21 +00:00
|
|
|
|
|
|
|
importProgress.UpdateTaskProgress(discord.ChannelsCreationTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
2023-11-07 10:44:10 +00:00
|
|
|
if m.DiscordImportChannelMarkedAsCancelled(request.DiscordChannelID) {
|
|
|
|
importProgress.StopTask(discord.ChannelsCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- []string{string(request.CommunityID), "", request.DiscordChannelID}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-10-25 16:32:21 +00:00
|
|
|
if len(channel.Channel.ID) == 0 {
|
|
|
|
// skip this file and try to find in the next file
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
exists := false
|
|
|
|
|
|
|
|
for _, chatID := range community.ChatIDs() {
|
|
|
|
if strings.HasSuffix(chatID, request.DiscordChannelID) {
|
|
|
|
exists = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !exists {
|
|
|
|
|
|
|
|
communityChat := &protobuf.CommunityChat{
|
|
|
|
Permissions: &protobuf.CommunityPermissions{
|
2023-10-25 17:33:49 +00:00
|
|
|
Access: protobuf.CommunityPermissions_AUTO_ACCEPT,
|
2023-10-25 16:32:21 +00:00
|
|
|
},
|
|
|
|
Identity: &protobuf.ChatIdentity{
|
|
|
|
DisplayName: request.Name,
|
|
|
|
Emoji: request.Emoji,
|
|
|
|
Description: request.Description,
|
|
|
|
Color: request.Color,
|
|
|
|
},
|
|
|
|
CategoryId: "",
|
|
|
|
}
|
|
|
|
|
|
|
|
changes, err := m.communitiesManager.CreateChat(request.CommunityID, communityChat, false, channel.Channel.ID)
|
|
|
|
if err != nil {
|
|
|
|
errmsg := err.Error()
|
|
|
|
if errors.Is(err, communities.ErrInvalidCommunityDescriptionDuplicatedName) {
|
|
|
|
errmsg = fmt.Sprintf("Couldn't create channel '%s': %s", communityChat.Identity.DisplayName, err.Error())
|
2023-11-07 10:44:10 +00:00
|
|
|
fmt.Println(errmsg)
|
2023-10-25 16:32:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
importProgress.AddTaskError(discord.ChannelsCreationTask, discord.Error(errmsg))
|
|
|
|
importProgress.StopTask(discord.ChannelsCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), "", request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
community = changes.Community
|
|
|
|
for chatID, chat := range changes.ChatsAdded {
|
|
|
|
newChat = CreateCommunityChat(request.CommunityID.String(), chatID, chat, m.getTimesource())
|
|
|
|
}
|
|
|
|
|
2023-11-07 10:44:10 +00:00
|
|
|
progressValue = float32(1.0)
|
2023-10-25 16:32:21 +00:00
|
|
|
|
|
|
|
importProgress.UpdateTaskProgress(discord.ChannelsCreationTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
} else {
|
|
|
|
// When channel with current discord id already exist we should skip import
|
|
|
|
importProgress.AddTaskError(discord.ChannelsCreationTask, discord.Error("Channel already imported to this community"))
|
|
|
|
importProgress.StopTask(discord.ChannelsCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- []string{string(request.CommunityID), "", request.DiscordChannelID}
|
|
|
|
return
|
2023-10-25 16:32:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if m.DiscordImportChannelMarkedAsCancelled(request.DiscordChannelID) {
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
messagesToSave, pinMessagesToSave, authorProfilesToSave, messageAttachmentsToDownload :=
|
|
|
|
m.processDiscordMessages(channel, newChat, importProgress, progressUpdates, request.From, community)
|
|
|
|
|
|
|
|
var discordMessages []*protobuf.DiscordMessage
|
|
|
|
for _, msg := range messagesToSave {
|
|
|
|
if msg.ChatMessage.ContentType == protobuf.ChatMessage_DISCORD_MESSAGE {
|
|
|
|
discordMessages = append(discordMessages, msg.GetDiscordMessage())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// We save these messages in chunks, so we don't block the database
|
|
|
|
// for a longer period of time
|
|
|
|
discordMessageChunks := chunkSlice(discordMessages, maxChunkSizeMessages)
|
|
|
|
chunksCount := len(discordMessageChunks)
|
|
|
|
|
|
|
|
for ii, msgs := range discordMessageChunks {
|
|
|
|
m.communitiesManager.LogStdout(fmt.Sprintf("saving %d/%d chunk with %d discord messages", ii+1, chunksCount, len(msgs)))
|
|
|
|
err := m.persistence.SaveDiscordMessages(msgs)
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImportChannel(request.CommunityID.String(), newChat.ID)
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.DiscordImportChannelMarkedAsCancelled(request.DiscordChannelID) {
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// We're multiplying `chunksCount` by `0.25` so we leave 25% for additional save operations
|
|
|
|
// 0.5 are the previous 50% of progress
|
|
|
|
currentCount := ii + 1
|
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, 0.5+(float32(currentCount)/float32(chunksCount))*0.25)
|
|
|
|
importProgress.UpdateTaskProgress(discord.ImportMessagesTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
// We slow down the saving of message chunks to keep the database responsive
|
|
|
|
if currentCount < chunksCount {
|
|
|
|
time.Sleep(2 * time.Second)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get slice of all values in `messagesToSave` map
|
|
|
|
var messages = make([]*common.Message, 0, len(messagesToSave))
|
|
|
|
for _, msg := range messagesToSave {
|
|
|
|
messages = append(messages, msg)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Same as above, we save these messages in chunks so we don't block
|
|
|
|
// the database for a longer period of time
|
|
|
|
messageChunks := chunkSlice(messages, maxChunkSizeMessages)
|
|
|
|
chunksCount = len(messageChunks)
|
|
|
|
|
|
|
|
for ii, msgs := range messageChunks {
|
|
|
|
m.communitiesManager.LogStdout(fmt.Sprintf("saving %d/%d chunk with %d app messages", ii+1, chunksCount, len(msgs)))
|
|
|
|
err := m.persistence.SaveMessages(msgs)
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImportChannel(request.CommunityID.String(), request.DiscordChannelID)
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.DiscordImportChannelMarkedAsCancelled(request.DiscordChannelID) {
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// 0.75 are the previous 75% of progress, hence we multiply our chunk progress
|
|
|
|
// by 0.25
|
|
|
|
currentCount := ii + 1
|
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, 0.75+(float32(currentCount)/float32(chunksCount))*0.25)
|
|
|
|
// progressValue := 0.75 + ((float32(currentCount) / float32(chunksCount)) * 0.25)
|
|
|
|
importProgress.UpdateTaskProgress(discord.ImportMessagesTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
// We slow down the saving of message chunks to keep the database responsive
|
|
|
|
if currentCount < chunksCount {
|
|
|
|
time.Sleep(2 * time.Second)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pinMessageChunks := chunkSlice(pinMessagesToSave, maxChunkSizeMessages)
|
|
|
|
for _, pinMsgs := range pinMessageChunks {
|
|
|
|
err := m.persistence.SavePinMessages(pinMsgs)
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImportChannel(request.CommunityID.String(), request.DiscordChannelID)
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.DiscordImportChannelMarkedAsCancelled(request.DiscordChannelID) {
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
totalAssetsCount := len(messageAttachmentsToDownload) + len(authorProfilesToSave)
|
|
|
|
var assetCounter discord.AssetCounter
|
|
|
|
|
|
|
|
var wg sync.WaitGroup
|
|
|
|
|
|
|
|
for id, author := range authorProfilesToSave {
|
|
|
|
wg.Add(1)
|
|
|
|
go func(id string, author *protobuf.DiscordMessageAuthor) {
|
|
|
|
defer wg.Done()
|
|
|
|
|
|
|
|
m.communitiesManager.LogStdout(fmt.Sprintf("downloading asset %d/%d", assetCounter.Value()+1, totalAssetsCount))
|
|
|
|
imagePayload, err := discord.DownloadAvatarAsset(author.AvatarUrl)
|
|
|
|
if err != nil {
|
|
|
|
errmsg := fmt.Sprintf("Couldn't download profile avatar '%s': %s", author.AvatarUrl, err.Error())
|
|
|
|
importProgress.AddTaskError(
|
|
|
|
discord.DownloadAssetsTask,
|
|
|
|
discord.Warning(errmsg),
|
|
|
|
)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.persistence.UpdateDiscordMessageAuthorImage(author.Id, imagePayload)
|
|
|
|
if err != nil {
|
|
|
|
importProgress.AddTaskError(discord.DownloadAssetsTask, discord.Warning(err.Error()))
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
author.AvatarImagePayload = imagePayload
|
|
|
|
authorProfilesToSave[id] = author
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(request.DiscordChannelID) {
|
|
|
|
importProgress.StopTask(discord.DownloadAssetsTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
assetCounter.Increase()
|
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, (float32(assetCounter.Value())/float32(totalAssetsCount))*0.5)
|
|
|
|
importProgress.UpdateTaskProgress(discord.DownloadAssetsTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
}(id, author)
|
|
|
|
}
|
|
|
|
wg.Wait()
|
|
|
|
|
|
|
|
if m.DiscordImportChannelMarkedAsCancelled(request.DiscordChannelID) {
|
|
|
|
importProgress.StopTask(discord.DownloadAssetsTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
for idxRange := range gopart.Partition(len(messageAttachmentsToDownload), 100) {
|
|
|
|
attachments := messageAttachmentsToDownload[idxRange.Low:idxRange.High]
|
|
|
|
wg.Add(1)
|
|
|
|
go func(attachments []*protobuf.DiscordMessageAttachment) {
|
|
|
|
defer wg.Done()
|
|
|
|
for ii, attachment := range attachments {
|
|
|
|
|
|
|
|
m.communitiesManager.LogStdout(fmt.Sprintf("downloading asset %d/%d", assetCounter.Value()+1, totalAssetsCount))
|
|
|
|
|
|
|
|
assetPayload, contentType, err := discord.DownloadAsset(attachment.Url)
|
|
|
|
if err != nil {
|
|
|
|
errmsg := fmt.Sprintf("Couldn't download message attachment '%s': %s", attachment.Url, err.Error())
|
|
|
|
importProgress.AddTaskError(
|
|
|
|
discord.DownloadAssetsTask,
|
|
|
|
discord.Warning(errmsg),
|
|
|
|
)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
attachment.Payload = assetPayload
|
|
|
|
attachment.ContentType = contentType
|
|
|
|
messageAttachmentsToDownload[ii] = attachment
|
|
|
|
|
|
|
|
if m.DiscordImportChannelMarkedAsCancelled(request.DiscordChannelID) {
|
|
|
|
importProgress.StopTask(discord.DownloadAssetsTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
assetCounter.Increase()
|
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, (float32(assetCounter.Value())/float32(totalAssetsCount))*0.5)
|
|
|
|
importProgress.UpdateTaskProgress(discord.DownloadAssetsTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
}
|
|
|
|
}(attachments)
|
|
|
|
}
|
|
|
|
wg.Wait()
|
|
|
|
|
|
|
|
if m.DiscordImportChannelMarkedAsCancelled(request.DiscordChannelID) {
|
|
|
|
importProgress.StopTask(discord.DownloadAssetsTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
attachmentChunks := chunkAttachmentsByByteSize(messageAttachmentsToDownload, maxChunkSizeBytes)
|
|
|
|
chunksCount = len(attachmentChunks)
|
|
|
|
|
|
|
|
for ii, attachments := range attachmentChunks {
|
|
|
|
m.communitiesManager.LogStdout(fmt.Sprintf("saving %d/%d chunk with %d discord message attachments", ii+1, chunksCount, len(attachments)))
|
|
|
|
err := m.persistence.SaveDiscordMessageAttachments(attachments)
|
|
|
|
if err != nil {
|
2023-11-07 10:44:10 +00:00
|
|
|
importProgress.AddTaskError(discord.DownloadAssetsTask, discord.Warning(err.Error()))
|
2023-10-25 16:32:21 +00:00
|
|
|
importProgress.Stop()
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
2023-11-07 10:44:10 +00:00
|
|
|
continue
|
2023-10-25 16:32:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if m.DiscordImportChannelMarkedAsCancelled(request.DiscordChannelID) {
|
|
|
|
importProgress.StopTask(discord.DownloadAssetsTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// 0.5 are the previous 50% of progress, hence we multiply our chunk progress
|
|
|
|
// by 0.5
|
|
|
|
currentCount := ii + 1
|
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, 0.5+(float32(currentCount)/float32(chunksCount))*0.5)
|
|
|
|
importProgress.UpdateTaskProgress(discord.DownloadAssetsTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
// We slow down the saving of attachment chunks to keep the database responsive
|
|
|
|
if currentCount < chunksCount {
|
|
|
|
time.Sleep(2 * time.Second)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(attachmentChunks) == 0 {
|
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, 1.0)
|
|
|
|
importProgress.UpdateTaskProgress(discord.DownloadAssetsTask, progressValue)
|
|
|
|
}
|
|
|
|
|
|
|
|
_, err := m.transport.JoinPublic(newChat.ID)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to load filter for chat", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
wakuChatMessages, err := m.chatMessagesToWakuMessages(messages, community)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to convert chat messages into waku messages", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
wakuPinMessages, err := m.pinMessagesToWakuMessages(pinMessagesToSave, community)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to convert pin messages into waku messages", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
wakuMessages := append(wakuChatMessages, wakuPinMessages...)
|
|
|
|
|
|
|
|
topics, err := m.communitiesManager.GetCommunityChatsTopics(request.CommunityID)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to get community chat topics", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
startDate := time.Unix(int64(exportData.OldestMessageTimestamp), 0)
|
|
|
|
endDate := time.Now()
|
|
|
|
|
|
|
|
_, err = m.communitiesManager.CreateHistoryArchiveTorrentFromMessages(
|
|
|
|
request.CommunityID,
|
|
|
|
wakuMessages,
|
|
|
|
topics,
|
|
|
|
startDate,
|
|
|
|
endDate,
|
|
|
|
messageArchiveInterval,
|
|
|
|
community.Encrypted(),
|
|
|
|
)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to create history archive torrent", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
communitySettings, err := m.communitiesManager.GetCommunitySettingsByID(request.CommunityID)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("Failed to get community settings", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if m.torrentClientReady() && communitySettings.HistoryArchiveSupportEnabled {
|
|
|
|
|
|
|
|
err = m.communitiesManager.SeedHistoryArchiveTorrent(request.CommunityID)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to seed history archive", zap.Error(err))
|
|
|
|
}
|
|
|
|
go m.communitiesManager.StartHistoryArchiveTasksInterval(community, messageArchiveInterval)
|
|
|
|
}
|
|
|
|
}
|
2023-11-07 10:44:10 +00:00
|
|
|
|
|
|
|
importProgress.UpdateTaskProgress(discord.InitCommunityTask, float32(0.0))
|
|
|
|
|
2023-10-25 16:32:21 +00:00
|
|
|
if m.DiscordImportChannelMarkedAsCancelled(request.DiscordChannelID) {
|
|
|
|
importProgress.StopTask(discord.InitCommunityTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), newChat.ID, request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Chats need to be saved after the community has been published,
|
|
|
|
// hence we make this part of the `InitCommunityTask`
|
|
|
|
err = m.saveChat(newChat)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImportChannel(request.CommunityID.String(), request.DiscordChannelID)
|
|
|
|
importProgress.AddTaskError(discord.InitCommunityTask, discord.Error(err.Error()))
|
|
|
|
importProgress.Stop()
|
|
|
|
progressUpdates <- importProgress
|
2023-11-07 10:44:10 +00:00
|
|
|
cancel <- []string{string(request.CommunityID), request.DiscordChannelID}
|
2023-10-25 16:32:21 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-07 10:44:10 +00:00
|
|
|
importProgress.UpdateTaskProgress(discord.InitCommunityTask, float32(1.0))
|
|
|
|
|
|
|
|
m.config.messengerSignalsHandler.DiscordChannelImportFinished(string(request.CommunityID), newChat.ID)
|
2023-10-25 16:32:21 +00:00
|
|
|
close(done)
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
func (m *Messenger) RequestImportDiscordCommunity(request *requests.ImportDiscordCommunity) {
|
|
|
|
go func() {
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
totalImportChunkCount := len(request.FilesToImport)
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
progressUpdates := make(chan *discord.ImportProgress)
|
|
|
|
done := make(chan struct{})
|
|
|
|
cancel := make(chan string)
|
|
|
|
m.startPublishImportProgressInterval(progressUpdates, cancel, done)
|
|
|
|
|
|
|
|
importProgress := &discord.ImportProgress{}
|
2023-01-26 12:52:43 +00:00
|
|
|
importProgress.Init(totalImportChunkCount, []discord.ImportTask{
|
2022-09-29 11:50:23 +00:00
|
|
|
discord.CommunityCreationTask,
|
|
|
|
discord.ChannelsCreationTask,
|
|
|
|
discord.ImportMessagesTask,
|
|
|
|
discord.DownloadAssetsTask,
|
|
|
|
discord.InitCommunityTask,
|
|
|
|
})
|
|
|
|
importProgress.CommunityName = request.Name
|
|
|
|
|
|
|
|
// initial progress immediately
|
|
|
|
m.publishImportProgress(importProgress)
|
|
|
|
|
|
|
|
createCommunityRequest := request.ToCreateCommunityRequest()
|
|
|
|
|
|
|
|
// We're calling `CreateCommunity` on `communitiesManager` directly, instead of
|
|
|
|
// using the `Messenger` API, so we get more control over when we set up filters,
|
|
|
|
// the community is published and data is being synced (we don't want the community
|
|
|
|
// to show up in clients while the import is in progress)
|
|
|
|
discordCommunity, err := m.communitiesManager.CreateCommunity(createCommunityRequest, false)
|
|
|
|
if err != nil {
|
|
|
|
importProgress.AddTaskError(discord.CommunityCreationTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.CommunityCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
communitySettings := communities.CommunitySettings{
|
|
|
|
CommunityID: discordCommunity.IDString(),
|
|
|
|
HistoryArchiveSupportEnabled: true,
|
|
|
|
}
|
|
|
|
err = m.communitiesManager.SaveCommunitySettings(communitySettings)
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(discordCommunity.IDString())
|
|
|
|
importProgress.AddTaskError(discord.CommunityCreationTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.CommunityCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
communityID := discordCommunity.IDString()
|
|
|
|
|
|
|
|
// marking import as not cancelled
|
|
|
|
m.importingCommunities[communityID] = false
|
|
|
|
importProgress.CommunityID = communityID
|
2022-11-04 13:56:13 +00:00
|
|
|
importProgress.CommunityImages = make(map[string]images.IdentityImage)
|
|
|
|
|
|
|
|
imgs := discordCommunity.Images()
|
|
|
|
for t, i := range imgs {
|
|
|
|
importProgress.CommunityImages[t] = images.IdentityImage{Name: t, Payload: i.Payload}
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
importProgress.UpdateTaskProgress(discord.CommunityCreationTask, 1)
|
2022-09-29 11:50:23 +00:00
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.CommunityCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
var chatsToSave []*Chat
|
2023-09-15 14:19:10 +00:00
|
|
|
createdChats := make(map[string]*Chat, 0)
|
2023-01-26 12:52:43 +00:00
|
|
|
processedChannelIds := make(map[string]string, 0)
|
2022-09-29 11:50:23 +00:00
|
|
|
processedCategoriesIds := make(map[string]string, 0)
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
for i, importFile := range request.FilesToImport {
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
exportData, errs := m.ExtractDiscordDataFromImportFiles([]string{importFile})
|
|
|
|
if len(errs) > 0 {
|
|
|
|
for _, err := range errs {
|
|
|
|
importProgress.AddTaskError(discord.CommunityCreationTask, err)
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
2023-01-26 12:52:43 +00:00
|
|
|
totalChannelsCount := len(exportData.ExportedData)
|
|
|
|
totalMessageCount := exportData.MessageCount
|
|
|
|
|
|
|
|
if totalChannelsCount == 0 || totalMessageCount == 0 {
|
|
|
|
importError := discord.Error(fmt.Errorf("No channel to import messages from in file: %s", importFile).Error())
|
|
|
|
if totalMessageCount == 0 {
|
|
|
|
importError.Message = fmt.Errorf("No messages to import in file: %s", importFile).Error()
|
|
|
|
}
|
|
|
|
importProgress.AddTaskError(discord.ChannelsCreationTask, importError)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
importProgress.CurrentChunk = i + 1
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
// We actually only ever receive a single category
|
|
|
|
// from `exportData` but since it's a map, we still have to
|
|
|
|
// iterate over it to access its values
|
|
|
|
for _, category := range exportData.Categories {
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
categories := discordCommunity.Categories()
|
|
|
|
exists := false
|
|
|
|
for catID := range categories {
|
|
|
|
if strings.HasSuffix(catID, category.ID) {
|
|
|
|
exists = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
if !exists {
|
|
|
|
createCommunityCategoryRequest := &requests.CreateCommunityCategory{
|
|
|
|
CommunityID: discordCommunity.ID(),
|
|
|
|
CategoryName: category.Name,
|
|
|
|
ThirdPartyID: category.ID,
|
|
|
|
ChatIDs: make([]string, 0),
|
|
|
|
}
|
|
|
|
// We call `CreateCategory` on `communitiesManager` directly so we can control
|
|
|
|
// whether or not the community update should be published (it should not until the
|
|
|
|
// import has finished)
|
|
|
|
communityWithCategories, changes, err := m.communitiesManager.CreateCategory(createCommunityCategoryRequest, false)
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
importProgress.AddTaskError(discord.CommunityCreationTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.CommunityCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
|
|
|
discordCommunity = communityWithCategories
|
|
|
|
// This looks like we keep overriding the same field but there's
|
|
|
|
// only one `CategoriesAdded` change at this point.
|
|
|
|
for _, addedCategory := range changes.CategoriesAdded {
|
|
|
|
processedCategoriesIds[category.ID] = addedCategory.CategoryId
|
|
|
|
}
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, (float32(1) / 2))
|
|
|
|
importProgress.UpdateTaskProgress(discord.ChannelsCreationTask, progressValue)
|
|
|
|
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.CommunityCreationTask)
|
2022-09-29 11:50:23 +00:00
|
|
|
progressUpdates <- importProgress
|
2023-01-26 12:52:43 +00:00
|
|
|
cancel <- communityID
|
2022-09-29 11:50:23 +00:00
|
|
|
return
|
|
|
|
}
|
2023-01-26 12:52:43 +00:00
|
|
|
|
|
|
|
messagesToSave := make(map[string]*common.Message, 0)
|
|
|
|
pinMessagesToSave := make([]*common.PinMessage, 0)
|
|
|
|
authorProfilesToSave := make(map[string]*protobuf.DiscordMessageAuthor, 0)
|
|
|
|
messageAttachmentsToDownload := make([]*protobuf.DiscordMessageAttachment, 0)
|
|
|
|
|
|
|
|
// Save to access the first item here as we process
|
|
|
|
// exported data by files which only holds a single channel
|
|
|
|
channel := exportData.ExportedData[0]
|
|
|
|
chatIDs := discordCommunity.ChatIDs()
|
|
|
|
|
|
|
|
exists := false
|
|
|
|
for _, chatID := range chatIDs {
|
|
|
|
if strings.HasSuffix(chatID, channel.Channel.ID) {
|
|
|
|
exists = true
|
|
|
|
break
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
if !exists {
|
|
|
|
communityChat := &protobuf.CommunityChat{
|
|
|
|
Permissions: &protobuf.CommunityPermissions{
|
2023-10-25 13:03:26 +00:00
|
|
|
Access: protobuf.CommunityPermissions_AUTO_ACCEPT,
|
2023-01-26 12:52:43 +00:00
|
|
|
},
|
|
|
|
Identity: &protobuf.ChatIdentity{
|
|
|
|
DisplayName: channel.Channel.Name,
|
|
|
|
Emoji: "",
|
|
|
|
Description: channel.Channel.Description,
|
|
|
|
Color: discordCommunity.Color(),
|
|
|
|
},
|
|
|
|
CategoryId: processedCategoriesIds[channel.Channel.CategoryID],
|
|
|
|
}
|
|
|
|
|
|
|
|
// We call `CreateChat` on `communitiesManager` directly to get more control
|
|
|
|
// over whether we want to publish the updated community description.
|
2023-07-18 15:06:12 +00:00
|
|
|
changes, err := m.communitiesManager.CreateChat(discordCommunity.ID(), communityChat, false, channel.Channel.ID)
|
2023-01-26 12:52:43 +00:00
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
errmsg := err.Error()
|
2023-05-24 14:25:10 +00:00
|
|
|
if errors.Is(err, communities.ErrInvalidCommunityDescriptionDuplicatedName) {
|
2023-01-26 12:52:43 +00:00
|
|
|
errmsg = fmt.Sprintf("Couldn't create channel '%s': %s", communityChat.Identity.DisplayName, err.Error())
|
|
|
|
}
|
|
|
|
importProgress.AddTaskError(discord.ChannelsCreationTask, discord.Error(errmsg))
|
|
|
|
importProgress.StopTask(discord.ChannelsCreationTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
2023-07-18 15:06:12 +00:00
|
|
|
discordCommunity = changes.Community
|
2023-01-26 12:52:43 +00:00
|
|
|
|
|
|
|
// This looks like we keep overriding the chat id value
|
|
|
|
// as we iterate over `ChatsAdded`, however at this point we
|
|
|
|
// know there was only a single such change (and it's a map)
|
|
|
|
for chatID, chat := range changes.ChatsAdded {
|
|
|
|
c := CreateCommunityChat(communityID, chatID, chat, m.getTimesource())
|
2023-09-15 14:19:10 +00:00
|
|
|
createdChats[c.ID] = c
|
2023-01-26 12:52:43 +00:00
|
|
|
chatsToSave = append(chatsToSave, c)
|
|
|
|
processedChannelIds[channel.Channel.ID] = c.ID
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
progressValue = calculateProgress(i+1, totalImportChunkCount, 1)
|
2022-09-29 11:50:23 +00:00
|
|
|
importProgress.UpdateTaskProgress(discord.ChannelsCreationTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
for ii, discordMessage := range channel.Messages {
|
2022-09-29 11:50:23 +00:00
|
|
|
|
|
|
|
timestamp, err := time.Parse(discordTimestampLayout, discordMessage.Timestamp)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to parse discord message timestamp", zap.Error(err))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Warning(err.Error()))
|
2023-01-26 12:52:43 +00:00
|
|
|
progressUpdates <- importProgress
|
2022-09-29 11:50:23 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if timestamp.Unix() < request.From {
|
2023-01-26 12:52:43 +00:00
|
|
|
progressUpdates <- importProgress
|
2022-09-29 11:50:23 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
exists, err := m.persistence.HasDiscordMessageAuthor(discordMessage.Author.GetId())
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to check if message author exists in database", zap.Error(err))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
2023-01-26 12:52:43 +00:00
|
|
|
progressUpdates <- importProgress
|
2022-09-29 11:50:23 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if !exists {
|
|
|
|
err := m.persistence.SaveDiscordMessageAuthor(discordMessage.Author)
|
|
|
|
if err != nil {
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
2023-01-26 12:52:43 +00:00
|
|
|
progressUpdates <- importProgress
|
2022-09-29 11:50:23 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
hasPayload, err := m.persistence.HasDiscordMessageAuthorImagePayload(discordMessage.Author.GetId())
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to check if message avatar payload exists in database", zap.Error(err))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
2023-01-26 12:52:43 +00:00
|
|
|
progressUpdates <- importProgress
|
2022-09-29 11:50:23 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if !hasPayload {
|
|
|
|
authorProfilesToSave[discordMessage.Author.Id] = discordMessage.Author
|
|
|
|
}
|
|
|
|
|
|
|
|
// Convert timestamp to unix timestamp
|
|
|
|
discordMessage.Timestamp = fmt.Sprintf("%d", timestamp.Unix())
|
|
|
|
|
|
|
|
if discordMessage.TimestampEdited != "" {
|
|
|
|
timestampEdited, err := time.Parse(discordTimestampLayout, discordMessage.TimestampEdited)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to parse discord message timestamp", zap.Error(err))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Warning(err.Error()))
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
// Convert timestamp to unix timestamp
|
|
|
|
discordMessage.TimestampEdited = fmt.Sprintf("%d", timestampEdited.Unix())
|
|
|
|
}
|
|
|
|
|
|
|
|
for i := range discordMessage.Attachments {
|
|
|
|
discordMessage.Attachments[i].MessageId = discordMessage.Id
|
|
|
|
}
|
|
|
|
messageAttachmentsToDownload = append(messageAttachmentsToDownload, discordMessage.Attachments...)
|
|
|
|
|
|
|
|
clockAndTimestamp := uint64(timestamp.Unix()) * 1000
|
|
|
|
communityPubKey := discordCommunity.PrivateKey().PublicKey
|
|
|
|
|
|
|
|
chatMessage := protobuf.ChatMessage{
|
|
|
|
Timestamp: clockAndTimestamp,
|
|
|
|
MessageType: protobuf.MessageType_COMMUNITY_CHAT,
|
|
|
|
ContentType: protobuf.ChatMessage_DISCORD_MESSAGE,
|
|
|
|
Clock: clockAndTimestamp,
|
|
|
|
ChatId: processedChannelIds[channel.Channel.ID],
|
|
|
|
Payload: &protobuf.ChatMessage_DiscordMessage{
|
|
|
|
DiscordMessage: discordMessage,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
// Handle message replies
|
|
|
|
if discordMessage.Type == string(discord.MessageTypeReply) && discordMessage.Reference != nil {
|
2023-09-15 14:19:10 +00:00
|
|
|
repliedMessageID := communityID + discordMessage.Reference.MessageId
|
|
|
|
if _, exists := messagesToSave[repliedMessageID]; exists {
|
|
|
|
chatMessage.ResponseTo = repliedMessageID
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
messageToSave := &common.Message{
|
|
|
|
ID: communityID + discordMessage.Id,
|
|
|
|
WhisperTimestamp: clockAndTimestamp,
|
|
|
|
From: types.EncodeHex(crypto.FromECDSAPub(&communityPubKey)),
|
|
|
|
Seen: true,
|
|
|
|
LocalChatID: processedChannelIds[channel.Channel.ID],
|
|
|
|
SigPubKey: &communityPubKey,
|
|
|
|
CommunityID: communityID,
|
2023-08-18 11:39:59 +00:00
|
|
|
ChatMessage: &chatMessage,
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
|
2022-10-28 13:35:15 +00:00
|
|
|
err = messageToSave.PrepareContent(common.PubkeyToHex(&m.identity.PublicKey))
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to prepare message content", zap.Error(err))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
2023-01-26 12:52:43 +00:00
|
|
|
progressUpdates <- importProgress
|
2022-10-28 13:35:15 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
// Handle pin messages
|
|
|
|
if discordMessage.Type == string(discord.MessageTypeChannelPinned) && discordMessage.Reference != nil {
|
|
|
|
|
2023-09-15 14:19:10 +00:00
|
|
|
pinnedMessageID := communityID + discordMessage.Reference.MessageId
|
|
|
|
_, exists := messagesToSave[pinnedMessageID]
|
2022-09-29 11:50:23 +00:00
|
|
|
if exists {
|
|
|
|
pinMessage := protobuf.PinMessage{
|
|
|
|
Clock: messageToSave.WhisperTimestamp,
|
2023-09-15 14:19:10 +00:00
|
|
|
MessageId: pinnedMessageID,
|
2022-09-29 11:50:23 +00:00
|
|
|
ChatId: messageToSave.LocalChatID,
|
|
|
|
MessageType: protobuf.MessageType_COMMUNITY_CHAT,
|
|
|
|
Pinned: true,
|
|
|
|
}
|
|
|
|
|
|
|
|
encodedPayload, err := proto.Marshal(&pinMessage)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to parse marshal pin message", zap.Error(err))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Warning(err.Error()))
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
wrappedPayload, err := v1protocol.WrapMessageV1(encodedPayload, protobuf.ApplicationMetadataMessage_PIN_MESSAGE, discordCommunity.PrivateKey())
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to wrap pin message", zap.Error(err))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Warning(err.Error()))
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
pinMessageToSave := common.PinMessage{
|
2023-09-15 14:19:10 +00:00
|
|
|
ID: types.EncodeHex(v1protocol.MessageID(&communityPubKey, wrappedPayload)),
|
2023-08-18 11:39:59 +00:00
|
|
|
PinMessage: &pinMessage,
|
2022-09-29 11:50:23 +00:00
|
|
|
LocalChatID: processedChannelIds[channel.Channel.ID],
|
|
|
|
From: messageToSave.From,
|
|
|
|
SigPubKey: messageToSave.SigPubKey,
|
|
|
|
WhisperTimestamp: messageToSave.WhisperTimestamp,
|
|
|
|
}
|
|
|
|
|
|
|
|
pinMessagesToSave = append(pinMessagesToSave, &pinMessageToSave)
|
2023-09-15 14:19:10 +00:00
|
|
|
|
|
|
|
// Generate SystemMessagePinnedMessage
|
|
|
|
|
|
|
|
chat, ok := createdChats[pinMessageToSave.LocalChatID]
|
|
|
|
if !ok {
|
|
|
|
err := errors.New("failed to get chat for pin message")
|
|
|
|
m.logger.Warn(err.Error(),
|
|
|
|
zap.String("PinMessageId", pinMessageToSave.ID),
|
|
|
|
zap.String("ChatID", pinMessageToSave.LocalChatID))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Warning(err.Error()))
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
id, err := generatePinMessageNotificationID(&m.identity.PublicKey, &pinMessageToSave, chat)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Warn("failed to generate pin message notification ID",
|
|
|
|
zap.String("PinMessageId", pinMessageToSave.ID))
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Warning(err.Error()))
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
systemMessage := &common.Message{
|
|
|
|
ChatMessage: &protobuf.ChatMessage{
|
|
|
|
Clock: pinMessageToSave.Clock,
|
|
|
|
Timestamp: clockAndTimestamp,
|
|
|
|
ChatId: chat.ID,
|
|
|
|
MessageType: pinMessageToSave.MessageType,
|
|
|
|
ResponseTo: pinMessage.MessageId,
|
|
|
|
ContentType: protobuf.ChatMessage_SYSTEM_MESSAGE_PINNED_MESSAGE,
|
|
|
|
},
|
|
|
|
WhisperTimestamp: clockAndTimestamp,
|
|
|
|
ID: id,
|
|
|
|
LocalChatID: chat.ID,
|
|
|
|
From: messageToSave.From,
|
|
|
|
Seen: true,
|
|
|
|
}
|
|
|
|
|
|
|
|
messagesToSave[systemMessage.ID] = systemMessage
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
} else {
|
2023-09-15 14:19:10 +00:00
|
|
|
messagesToSave[messageToSave.ID] = messageToSave
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, float32(ii+1)/float32(len(channel.Messages))*0.5)
|
|
|
|
importProgress.UpdateTaskProgress(discord.ImportMessagesTask, progressValue)
|
2022-09-29 11:50:23 +00:00
|
|
|
progressUpdates <- importProgress
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
var discordMessages []*protobuf.DiscordMessage
|
|
|
|
for _, msg := range messagesToSave {
|
2023-09-15 14:19:10 +00:00
|
|
|
if msg.ChatMessage.ContentType == protobuf.ChatMessage_DISCORD_MESSAGE {
|
|
|
|
discordMessages = append(discordMessages, msg.GetDiscordMessage())
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
|
2023-09-15 14:19:10 +00:00
|
|
|
// We save these messages in chunks, so we don't block the database
|
2023-01-26 12:52:43 +00:00
|
|
|
// for a longer period of time
|
|
|
|
discordMessageChunks := chunkSlice(discordMessages, maxChunkSizeMessages)
|
|
|
|
chunksCount := len(discordMessageChunks)
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
for ii, msgs := range discordMessageChunks {
|
|
|
|
m.communitiesManager.LogStdout(fmt.Sprintf("saving %d/%d chunk with %d discord messages", ii+1, chunksCount, len(msgs)))
|
|
|
|
err = m.persistence.SaveDiscordMessages(msgs)
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
// We're multiplying `chunksCount` by `0.25` so we leave 25% for additional save operations
|
|
|
|
// 0.5 are the previous 50% of progress
|
|
|
|
currentCount := ii + 1
|
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, 0.5+(float32(currentCount)/float32(chunksCount))*0.25)
|
|
|
|
importProgress.UpdateTaskProgress(discord.ImportMessagesTask, progressValue)
|
2022-09-29 11:50:23 +00:00
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
// We slow down the saving of message chunks to keep the database responsive
|
|
|
|
if currentCount < chunksCount {
|
|
|
|
time.Sleep(2 * time.Second)
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
|
2023-09-15 14:19:10 +00:00
|
|
|
// Get slice of all values in `messagesToSave` map
|
|
|
|
|
|
|
|
var messages = make([]*common.Message, 0, len(messagesToSave))
|
2023-01-26 12:52:43 +00:00
|
|
|
for _, msg := range messagesToSave {
|
|
|
|
messages = append(messages, msg)
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
// Same as above, we save these messages in chunks so we don't block
|
|
|
|
// the database for a longer period of time
|
|
|
|
messageChunks := chunkSlice(messages, maxChunkSizeMessages)
|
|
|
|
chunksCount = len(messageChunks)
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
for ii, msgs := range messageChunks {
|
|
|
|
m.communitiesManager.LogStdout(fmt.Sprintf("saving %d/%d chunk with %d app messages", ii+1, chunksCount, len(msgs)))
|
|
|
|
err = m.persistence.SaveMessages(msgs)
|
2022-09-29 11:50:23 +00:00
|
|
|
if err != nil {
|
2023-01-26 12:52:43 +00:00
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
2022-09-29 11:50:23 +00:00
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
2022-09-29 11:50:23 +00:00
|
|
|
progressUpdates <- importProgress
|
2023-01-26 12:52:43 +00:00
|
|
|
cancel <- communityID
|
2022-09-29 11:50:23 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
// 0.75 are the previous 75% of progress, hence we multiply our chunk progress
|
|
|
|
// by 0.25
|
|
|
|
currentCount := ii + 1
|
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, 0.75+(float32(currentCount)/float32(chunksCount))*0.25)
|
|
|
|
// progressValue := 0.75 + ((float32(currentCount) / float32(chunksCount)) * 0.25)
|
|
|
|
importProgress.UpdateTaskProgress(discord.ImportMessagesTask, progressValue)
|
2022-09-29 11:50:23 +00:00
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
// We slow down the saving of message chunks to keep the database responsive
|
|
|
|
if currentCount < chunksCount {
|
|
|
|
time.Sleep(2 * time.Second)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pinMessageChunks := chunkSlice(pinMessagesToSave, maxChunkSizeMessages)
|
|
|
|
for _, pinMsgs := range pinMessageChunks {
|
|
|
|
err = m.persistence.SavePinMessages(pinMsgs)
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
importProgress.AddTaskError(discord.ImportMessagesTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
2022-09-29 11:50:23 +00:00
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.ImportMessagesTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
totalAssetsCount := len(messageAttachmentsToDownload) + len(authorProfilesToSave)
|
|
|
|
var assetCounter discord.AssetCounter
|
|
|
|
|
|
|
|
var wg sync.WaitGroup
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
for id, author := range authorProfilesToSave {
|
|
|
|
wg.Add(1)
|
|
|
|
go func(id string, author *protobuf.DiscordMessageAuthor) {
|
|
|
|
defer wg.Done()
|
|
|
|
|
|
|
|
m.communitiesManager.LogStdout(fmt.Sprintf("downloading asset %d/%d", assetCounter.Value()+1, totalAssetsCount))
|
|
|
|
imagePayload, err := discord.DownloadAvatarAsset(author.AvatarUrl)
|
2022-09-29 11:50:23 +00:00
|
|
|
if err != nil {
|
2023-01-26 12:52:43 +00:00
|
|
|
errmsg := fmt.Sprintf("Couldn't download profile avatar '%s': %s", author.AvatarUrl, err.Error())
|
2022-09-29 11:50:23 +00:00
|
|
|
importProgress.AddTaskError(
|
|
|
|
discord.DownloadAssetsTask,
|
|
|
|
discord.Warning(errmsg),
|
|
|
|
)
|
|
|
|
progressUpdates <- importProgress
|
2023-01-26 12:52:43 +00:00
|
|
|
return
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
err = m.persistence.UpdateDiscordMessageAuthorImage(author.Id, imagePayload)
|
|
|
|
if err != nil {
|
|
|
|
importProgress.AddTaskError(discord.DownloadAssetsTask, discord.Warning(err.Error()))
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
author.AvatarImagePayload = imagePayload
|
|
|
|
authorProfilesToSave[id] = author
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
if m.DiscordImportMarkedAsCancelled(discordCommunity.IDString()) {
|
2022-09-29 11:50:23 +00:00
|
|
|
importProgress.StopTask(discord.DownloadAssetsTask)
|
|
|
|
progressUpdates <- importProgress
|
2023-01-26 12:52:43 +00:00
|
|
|
cancel <- discordCommunity.IDString()
|
2022-09-29 11:50:23 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
assetCounter.Increase()
|
2023-09-15 14:19:10 +00:00
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, (float32(assetCounter.Value())/float32(totalAssetsCount))*0.5)
|
2023-01-26 12:52:43 +00:00
|
|
|
importProgress.UpdateTaskProgress(discord.DownloadAssetsTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
}(id, author)
|
|
|
|
}
|
|
|
|
wg.Wait()
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.DownloadAssetsTask)
|
2022-09-29 11:50:23 +00:00
|
|
|
progressUpdates <- importProgress
|
2023-01-26 12:52:43 +00:00
|
|
|
cancel <- communityID
|
2022-09-29 11:50:23 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
for idxRange := range gopart.Partition(len(messageAttachmentsToDownload), 100) {
|
|
|
|
attachments := messageAttachmentsToDownload[idxRange.Low:idxRange.High]
|
|
|
|
wg.Add(1)
|
|
|
|
go func(attachments []*protobuf.DiscordMessageAttachment) {
|
|
|
|
defer wg.Done()
|
|
|
|
for ii, attachment := range attachments {
|
|
|
|
|
|
|
|
m.communitiesManager.LogStdout(fmt.Sprintf("downloading asset %d/%d", assetCounter.Value()+1, totalAssetsCount))
|
|
|
|
|
|
|
|
assetPayload, contentType, err := discord.DownloadAsset(attachment.Url)
|
|
|
|
if err != nil {
|
|
|
|
errmsg := fmt.Sprintf("Couldn't download message attachment '%s': %s", attachment.Url, err.Error())
|
|
|
|
importProgress.AddTaskError(
|
|
|
|
discord.DownloadAssetsTask,
|
|
|
|
discord.Warning(errmsg),
|
|
|
|
)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
attachment.Payload = assetPayload
|
|
|
|
attachment.ContentType = contentType
|
|
|
|
messageAttachmentsToDownload[ii] = attachment
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.DownloadAssetsTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
assetCounter.Increase()
|
2023-09-15 14:19:10 +00:00
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, (float32(assetCounter.Value())/float32(totalAssetsCount))*0.5)
|
2023-01-26 12:52:43 +00:00
|
|
|
importProgress.UpdateTaskProgress(discord.DownloadAssetsTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
}
|
|
|
|
}(attachments)
|
|
|
|
}
|
|
|
|
wg.Wait()
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.DownloadAssetsTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
attachmentChunks := chunkAttachmentsByByteSize(messageAttachmentsToDownload, maxChunkSizeBytes)
|
|
|
|
chunksCount = len(attachmentChunks)
|
|
|
|
|
|
|
|
for ii, attachments := range attachmentChunks {
|
|
|
|
m.communitiesManager.LogStdout(fmt.Sprintf("saving %d/%d chunk with %d discord message attachments", ii+1, chunksCount, len(attachments)))
|
|
|
|
err = m.persistence.SaveDiscordMessageAttachments(attachments)
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
importProgress.AddTaskError(discord.DownloadAssetsTask, discord.Error(err.Error()))
|
|
|
|
importProgress.Stop()
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.DownloadAssetsTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// 0.5 are the previous 50% of progress, hence we multiply our chunk progress
|
|
|
|
// by 0.5
|
|
|
|
currentCount := ii + 1
|
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, 0.5+(float32(currentCount)/float32(chunksCount))*0.5)
|
|
|
|
importProgress.UpdateTaskProgress(discord.DownloadAssetsTask, progressValue)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
// We slow down the saving of attachment chunks to keep the database responsive
|
|
|
|
if currentCount < chunksCount {
|
|
|
|
time.Sleep(2 * time.Second)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-15 14:19:10 +00:00
|
|
|
if len(attachmentChunks) == 0 {
|
|
|
|
progressValue := calculateProgress(i+1, totalImportChunkCount, 1.0)
|
|
|
|
importProgress.UpdateTaskProgress(discord.DownloadAssetsTask, progressValue)
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
_, err := m.transport.JoinPublic(processedChannelIds[channel.Channel.ID])
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to load filter for chat", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
wakuChatMessages, err := m.chatMessagesToWakuMessages(messages, discordCommunity)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to convert chat messages into waku messages", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
wakuPinMessages, err := m.pinMessagesToWakuMessages(pinMessagesToSave, discordCommunity)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to convert pin messages into waku messages", zap.Error(err))
|
|
|
|
continue
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
wakuMessages := append(wakuChatMessages, wakuPinMessages...)
|
|
|
|
|
|
|
|
topics, err := m.communitiesManager.GetCommunityChatsTopics(discordCommunity.ID())
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to get community chat topics", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
startDate := time.Unix(int64(exportData.OldestMessageTimestamp), 0)
|
|
|
|
endDate := time.Now()
|
|
|
|
|
|
|
|
_, err = m.communitiesManager.CreateHistoryArchiveTorrentFromMessages(
|
|
|
|
discordCommunity.ID(),
|
|
|
|
wakuMessages,
|
|
|
|
topics,
|
|
|
|
startDate,
|
|
|
|
endDate,
|
|
|
|
messageArchiveInterval,
|
|
|
|
discordCommunity.Encrypted(),
|
|
|
|
)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to create history archive torrent", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.torrentClientReady() && communitySettings.HistoryArchiveSupportEnabled {
|
|
|
|
|
|
|
|
err = m.communitiesManager.SeedHistoryArchiveTorrent(discordCommunity.ID())
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to seed history archive", zap.Error(err))
|
|
|
|
}
|
|
|
|
go m.communitiesManager.StartHistoryArchiveTasksInterval(discordCommunity, messageArchiveInterval)
|
|
|
|
}
|
|
|
|
}
|
2022-09-29 11:50:23 +00:00
|
|
|
|
|
|
|
err = m.publishOrg(discordCommunity)
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
importProgress.AddTaskError(discord.InitCommunityTask, discord.Error(err.Error()))
|
|
|
|
importProgress.Stop()
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.InitCommunityTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Chats need to be saved after the community has been published,
|
|
|
|
// hence we make this part of the `InitCommunityTask`
|
|
|
|
err = m.saveChats(chatsToSave)
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
importProgress.AddTaskError(discord.InitCommunityTask, discord.Error(err.Error()))
|
|
|
|
importProgress.Stop()
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
2023-01-26 12:52:43 +00:00
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
importProgress.UpdateTaskProgress(discord.InitCommunityTask, 0.15)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.InitCommunityTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Init the community filter so we can receive messages on the community
|
2023-05-22 21:38:02 +00:00
|
|
|
_, err = m.transport.InitCommunityFilters([]transport.CommunityFilterToInitialize{{
|
2023-10-12 19:21:49 +00:00
|
|
|
Shard: discordCommunity.Shard().TransportShard(),
|
|
|
|
PrivKey: discordCommunity.PrivateKey(),
|
2023-05-22 21:38:02 +00:00
|
|
|
}})
|
2022-09-29 11:50:23 +00:00
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
importProgress.AddTaskError(discord.InitCommunityTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.InitCommunityTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
|
|
|
importProgress.UpdateTaskProgress(discord.InitCommunityTask, 0.25)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.InitCommunityTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
_, err = m.transport.InitPublicFilters(discordCommunity.DefaultFilters())
|
2022-09-29 11:50:23 +00:00
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
importProgress.AddTaskError(discord.InitCommunityTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.InitCommunityTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
importProgress.UpdateTaskProgress(discord.InitCommunityTask, 0.5)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.InitCommunityTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
filters := m.transport.Filters()
|
2022-09-29 11:50:23 +00:00
|
|
|
_, err = m.scheduleSyncFilters(filters)
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
importProgress.AddTaskError(discord.InitCommunityTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.InitCommunityTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
|
|
|
importProgress.UpdateTaskProgress(discord.InitCommunityTask, 0.75)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.InitCommunityTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.reregisterForPushNotifications()
|
|
|
|
if err != nil {
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
importProgress.AddTaskError(discord.InitCommunityTask, discord.Error(err.Error()))
|
|
|
|
importProgress.StopTask(discord.InitCommunityTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
return
|
|
|
|
}
|
|
|
|
importProgress.UpdateTaskProgress(discord.InitCommunityTask, 1)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
|
|
|
|
if m.DiscordImportMarkedAsCancelled(communityID) {
|
|
|
|
importProgress.StopTask(discord.InitCommunityTask)
|
|
|
|
progressUpdates <- importProgress
|
|
|
|
cancel <- communityID
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
m.config.messengerSignalsHandler.DiscordCommunityImportFinished(communityID)
|
|
|
|
close(done)
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2023-01-26 12:52:43 +00:00
|
|
|
func calculateProgress(i int, t int, currentProgress float32) float32 {
|
|
|
|
current := float32(1) / float32(t) * currentProgress
|
|
|
|
if i > 1 {
|
|
|
|
return float32(i-1)/float32(t) + current
|
|
|
|
}
|
|
|
|
return current
|
|
|
|
}
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
func (m *Messenger) MarkDiscordCommunityImportAsCancelled(communityID string) {
|
|
|
|
m.importingCommunities[communityID] = true
|
|
|
|
}
|
|
|
|
|
2023-10-25 16:32:21 +00:00
|
|
|
func (m *Messenger) MarkDiscordChannelImportAsCancelled(channelID string) {
|
|
|
|
m.importingChannels[channelID] = true
|
|
|
|
}
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
func (m *Messenger) DiscordImportMarkedAsCancelled(communityID string) bool {
|
|
|
|
cancelled, exists := m.importingCommunities[communityID]
|
|
|
|
return exists && cancelled
|
|
|
|
}
|
|
|
|
|
2023-10-25 16:32:21 +00:00
|
|
|
func (m *Messenger) DiscordImportChannelMarkedAsCancelled(channelID string) bool {
|
|
|
|
cancelled, exists := m.importingChannels[channelID]
|
|
|
|
return exists && cancelled
|
|
|
|
}
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
func (m *Messenger) cleanUpImports() {
|
|
|
|
for id := range m.importingCommunities {
|
|
|
|
m.cleanUpImport(id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) cleanUpImport(communityID string) {
|
|
|
|
community, err := m.communitiesManager.GetByIDString(communityID)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("clean up failed, couldn't delete community", zap.Error(err))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
deleteErr := m.communitiesManager.DeleteCommunity(community.ID())
|
|
|
|
if deleteErr != nil {
|
|
|
|
m.logger.Error("clean up failed, couldn't delete community", zap.Error(deleteErr))
|
|
|
|
}
|
|
|
|
deleteErr = m.persistence.DeleteMessagesByCommunityID(community.IDString())
|
|
|
|
if deleteErr != nil {
|
|
|
|
m.logger.Error("clean up failed, couldn't delete community messages", zap.Error(deleteErr))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-25 16:32:21 +00:00
|
|
|
func (m *Messenger) cleanUpImportChannel(communityID string, channelID string) {
|
2023-11-07 10:44:10 +00:00
|
|
|
_, err := m.DeleteCommunityChat(types.HexBytes(communityID), channelID)
|
2023-10-25 16:32:21 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("clean up failed, couldn't delete community chat", zap.Error(err))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.persistence.DeleteMessagesByChatID(channelID)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("clean up failed, couldn't delete community chat messages", zap.Error(err))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
func (m *Messenger) publishImportProgress(progress *discord.ImportProgress) {
|
|
|
|
m.config.messengerSignalsHandler.DiscordCommunityImportProgress(progress)
|
|
|
|
}
|
|
|
|
|
2023-11-07 10:44:10 +00:00
|
|
|
func (m *Messenger) publishChannelImportProgress(progress *discord.ImportProgress) {
|
|
|
|
m.config.messengerSignalsHandler.DiscordChannelImportProgress(progress)
|
|
|
|
}
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
func (m *Messenger) startPublishImportProgressInterval(c chan *discord.ImportProgress, cancel chan string, done chan struct{}) {
|
|
|
|
|
|
|
|
var currentProgress *discord.ImportProgress
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
ticker := time.NewTicker(2 * time.Second)
|
|
|
|
defer ticker.Stop()
|
|
|
|
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-ticker.C:
|
|
|
|
if currentProgress != nil {
|
|
|
|
m.publishImportProgress(currentProgress)
|
|
|
|
if currentProgress.Stopped {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case progressUpdate := <-c:
|
|
|
|
currentProgress = progressUpdate
|
|
|
|
case <-done:
|
|
|
|
if currentProgress != nil {
|
|
|
|
m.publishImportProgress(currentProgress)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
case communityID := <-cancel:
|
|
|
|
if currentProgress != nil {
|
|
|
|
m.publishImportProgress(currentProgress)
|
|
|
|
}
|
|
|
|
m.cleanUpImport(communityID)
|
|
|
|
m.config.messengerSignalsHandler.DiscordCommunityImportCancelled(communityID)
|
|
|
|
return
|
|
|
|
case <-m.quit:
|
|
|
|
m.cleanUpImports()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
2023-11-07 10:44:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) startPublishImportChannelProgressInterval(c chan *discord.ImportProgress, cancel chan []string, done chan struct{}) {
|
|
|
|
|
|
|
|
var currentProgress *discord.ImportProgress
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
ticker := time.NewTicker(2 * time.Second)
|
|
|
|
defer ticker.Stop()
|
|
|
|
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-ticker.C:
|
|
|
|
if currentProgress != nil {
|
|
|
|
m.publishChannelImportProgress(currentProgress)
|
|
|
|
if currentProgress.Stopped {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case progressUpdate := <-c:
|
|
|
|
currentProgress = progressUpdate
|
|
|
|
case <-done:
|
|
|
|
if currentProgress != nil {
|
|
|
|
m.publishChannelImportProgress(currentProgress)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
case ids := <-cancel:
|
|
|
|
if currentProgress != nil {
|
|
|
|
m.publishImportProgress(currentProgress)
|
|
|
|
}
|
|
|
|
if len(ids) > 0 {
|
|
|
|
communityID := ids[0]
|
|
|
|
channelID := ids[1]
|
|
|
|
discordChannelID := ids[2]
|
|
|
|
m.cleanUpImportChannel(communityID, channelID)
|
|
|
|
m.config.messengerSignalsHandler.DiscordChannelImportCancelled(discordChannelID)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
case <-m.quit:
|
|
|
|
m.cleanUpImports()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) pinMessagesToWakuMessages(pinMessages []*common.PinMessage, c *communities.Community) ([]*types.Message, error) {
|
|
|
|
wakuMessages := make([]*types.Message, 0)
|
|
|
|
for _, msg := range pinMessages {
|
|
|
|
|
|
|
|
filter := m.transport.FilterByChatID(msg.LocalChatID)
|
|
|
|
encodedPayload, err := proto.Marshal(msg.GetProtobuf())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
wrappedPayload, err := v1protocol.WrapMessageV1(encodedPayload, protobuf.ApplicationMetadataMessage_PIN_MESSAGE, c.PrivateKey())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
hash := crypto.Keccak256Hash(append([]byte(c.IDString()), wrappedPayload...))
|
|
|
|
wakuMessage := &types.Message{
|
2023-09-15 14:19:10 +00:00
|
|
|
Sig: crypto.FromECDSAPub(&c.PrivateKey().PublicKey),
|
|
|
|
Timestamp: uint32(msg.WhisperTimestamp / 1000),
|
|
|
|
Topic: filter.ContentTopic,
|
|
|
|
Payload: wrappedPayload,
|
|
|
|
Padding: []byte{1},
|
|
|
|
Hash: hash[:],
|
|
|
|
ThirdPartyID: msg.ID, // CommunityID + DiscordMessageID
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
wakuMessages = append(wakuMessages, wakuMessage)
|
|
|
|
}
|
|
|
|
|
|
|
|
return wakuMessages, nil
|
|
|
|
}
|
|
|
|
|
2022-12-09 14:26:12 +00:00
|
|
|
func (m *Messenger) torrentClientReady() bool {
|
|
|
|
// Simply checking for `torrentConfig.Enabled` isn't enough
|
|
|
|
// as there's a possiblity that the torrent client couldn't
|
|
|
|
// be instantiated (for example in case of port conflicts)
|
|
|
|
return m.config.torrentConfig != nil &&
|
|
|
|
m.config.torrentConfig.Enabled &&
|
|
|
|
m.communitiesManager.TorrentClientStarted()
|
|
|
|
}
|
|
|
|
|
2022-09-29 11:50:23 +00:00
|
|
|
func (m *Messenger) chatMessagesToWakuMessages(chatMessages []*common.Message, c *communities.Community) ([]*types.Message, error) {
|
|
|
|
wakuMessages := make([]*types.Message, 0)
|
|
|
|
for _, msg := range chatMessages {
|
|
|
|
|
|
|
|
filter := m.transport.FilterByChatID(msg.LocalChatID)
|
|
|
|
encodedPayload, err := proto.Marshal(msg.GetProtobuf())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
wrappedPayload, err := v1protocol.WrapMessageV1(encodedPayload, protobuf.ApplicationMetadataMessage_CHAT_MESSAGE, c.PrivateKey())
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2023-09-15 14:19:10 +00:00
|
|
|
hash := crypto.Keccak256Hash([]byte(msg.ID))
|
2022-09-29 11:50:23 +00:00
|
|
|
wakuMessage := &types.Message{
|
|
|
|
Sig: crypto.FromECDSAPub(&c.PrivateKey().PublicKey),
|
|
|
|
Timestamp: uint32(msg.WhisperTimestamp / 1000),
|
2023-05-22 21:38:02 +00:00
|
|
|
Topic: filter.ContentTopic,
|
2022-09-29 11:50:23 +00:00
|
|
|
Payload: wrappedPayload,
|
|
|
|
Padding: []byte{1},
|
|
|
|
Hash: hash[:],
|
2023-09-15 14:19:10 +00:00
|
|
|
ThirdPartyID: msg.ID, // CommunityID + DiscordMessageID
|
2022-09-29 11:50:23 +00:00
|
|
|
}
|
|
|
|
wakuMessages = append(wakuMessages, wakuMessage)
|
|
|
|
}
|
|
|
|
|
|
|
|
return wakuMessages, nil
|
|
|
|
}
|
2023-01-27 13:27:24 +00:00
|
|
|
|
2023-09-21 12:40:58 +00:00
|
|
|
func (m *Messenger) GetCommunityToken(communityID string, chainID int, address string) (*token.CommunityToken, error) {
|
|
|
|
return m.communitiesManager.GetCommunityToken(communityID, chainID, address)
|
|
|
|
}
|
|
|
|
|
2023-07-07 13:03:37 +00:00
|
|
|
func (m *Messenger) GetCommunityTokens(communityID string) ([]*token.CommunityToken, error) {
|
2023-03-02 17:33:30 +00:00
|
|
|
return m.communitiesManager.GetCommunityTokens(communityID)
|
2023-01-27 13:27:24 +00:00
|
|
|
}
|
|
|
|
|
2023-07-07 13:03:37 +00:00
|
|
|
func (m *Messenger) GetAllCommunityTokens() ([]*token.CommunityToken, error) {
|
2023-04-26 08:48:10 +00:00
|
|
|
return m.communitiesManager.GetAllCommunityTokens()
|
|
|
|
}
|
|
|
|
|
2023-07-07 13:03:37 +00:00
|
|
|
func (m *Messenger) SaveCommunityToken(token *token.CommunityToken, croppedImage *images.CroppedImage) (*token.CommunityToken, error) {
|
!refactor: introduce `SaveCommunityToken()` and change `AddCommunityToken()`
**This is a breaking change!**
Prior to this commit we had `AddCommunityToken(token *communities,
croppedImage CroppedImage)` that we used to
1. add a `CommunityToken` to the user's database and
2. to create a `CommunityTokenMetadata` from it which is then added to
the community's `CommunityDescription` and published to its members
However, I've then discovered that we need to separate these two things,
such that we can deploy a community token, then add it to the database
only for tracking purposes, **then** add it to the community description
(and propagate to members) once we know that the deploy tx indeed went
through.
To implement this, this commit introduces a new API
`SaveCommunityToken(token *communities.CommunityToken, croppedImage
CroppedImage)` which adds the token to the database only and doesn't
touch the community description.
The `AddCommunityToken` API is then changed that it's exclusively used
for adding an already saved `CommunityToken` to the community
description so it can be published to members. Hence, the signature is
now `AddCommunityToken(communityID string, chainID int, address
string)`, which makes this a breaking change.
Clients that used `AddCommunityToken()` before now need to ensure that
they first call `SaveCommunityToken()` as `AddCommunityToken()` will
fail otherwise.
2023-07-25 11:35:17 +00:00
|
|
|
return m.communitiesManager.SaveCommunityToken(token, croppedImage)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *Messenger) AddCommunityToken(communityID string, chainID int, address string) error {
|
2023-08-15 17:42:40 +00:00
|
|
|
communityToken, err := m.communitiesManager.GetCommunityToken(communityID, chainID, address)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-09-21 11:16:05 +00:00
|
|
|
clock, _ := m.getLastClockWithRelatedChat()
|
|
|
|
community, err := m.communitiesManager.AddCommunityToken(communityToken, clock)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.syncCommunity(context.Background(), community, m.dispatchMessage)
|
2023-08-15 17:42:40 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2023-01-27 13:27:24 +00:00
|
|
|
}
|
|
|
|
|
2023-07-07 13:03:37 +00:00
|
|
|
func (m *Messenger) UpdateCommunityTokenState(chainID int, contractAddress string, deployState token.DeployState) error {
|
2023-06-02 08:07:00 +00:00
|
|
|
return m.communitiesManager.UpdateCommunityTokenState(chainID, contractAddress, deployState)
|
|
|
|
}
|
|
|
|
|
2023-07-18 08:33:45 +00:00
|
|
|
func (m *Messenger) UpdateCommunityTokenAddress(chainID int, oldContractAddress string, newContractAddress string) error {
|
|
|
|
return m.communitiesManager.UpdateCommunityTokenAddress(chainID, oldContractAddress, newContractAddress)
|
|
|
|
}
|
|
|
|
|
2023-06-21 11:20:43 +00:00
|
|
|
func (m *Messenger) UpdateCommunityTokenSupply(chainID int, contractAddress string, supply *bigint.BigInt) error {
|
2023-06-02 08:07:00 +00:00
|
|
|
return m.communitiesManager.UpdateCommunityTokenSupply(chainID, contractAddress, supply)
|
2023-01-27 13:27:24 +00:00
|
|
|
}
|
2023-05-04 22:17:54 +00:00
|
|
|
|
2023-07-24 13:04:11 +00:00
|
|
|
func (m *Messenger) RemoveCommunityToken(chainID int, contractAddress string) error {
|
|
|
|
return m.communitiesManager.RemoveCommunityToken(chainID, contractAddress)
|
|
|
|
}
|
|
|
|
|
2023-04-25 12:00:17 +00:00
|
|
|
func (m *Messenger) CheckPermissionsToJoinCommunity(request *requests.CheckPermissionToJoinCommunity) (*communities.CheckPermissionToJoinResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
var addresses []gethcommon.Address
|
|
|
|
|
2023-08-15 17:26:23 +00:00
|
|
|
if len(request.Addresses) == 0 {
|
|
|
|
accounts, err := m.settings.GetActiveAccounts()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, a := range accounts {
|
|
|
|
addresses = append(addresses, gethcommon.HexToAddress(a.Address.Hex()))
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
for _, v := range request.Addresses {
|
|
|
|
addresses = append(addresses, gethcommon.HexToAddress(v))
|
|
|
|
}
|
2023-04-25 12:00:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return m.communitiesManager.CheckPermissionToJoin(request.CommunityID, addresses)
|
2023-05-04 22:17:54 +00:00
|
|
|
}
|
2023-06-01 20:02:34 +00:00
|
|
|
|
2023-06-12 15:17:37 +00:00
|
|
|
func (m *Messenger) CheckCommunityChannelPermissions(request *requests.CheckCommunityChannelPermissions) (*communities.CheckChannelPermissionsResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var addresses []gethcommon.Address
|
|
|
|
|
2023-08-18 19:50:23 +00:00
|
|
|
if len(request.Addresses) == 0 {
|
|
|
|
accounts, err := m.settings.GetActiveAccounts()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, a := range accounts {
|
|
|
|
addresses = append(addresses, gethcommon.HexToAddress(a.Address.Hex()))
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
for _, v := range request.Addresses {
|
|
|
|
addresses = append(addresses, gethcommon.HexToAddress(v))
|
|
|
|
}
|
2023-06-12 15:17:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return m.communitiesManager.CheckChannelPermissions(request.CommunityID, request.ChatID, addresses)
|
|
|
|
}
|
|
|
|
|
2023-06-13 12:50:15 +00:00
|
|
|
func (m *Messenger) CheckAllCommunityChannelsPermissions(request *requests.CheckAllCommunityChannelsPermissions) (*communities.CheckAllChannelsPermissionsResponse, error) {
|
|
|
|
if err := request.Validate(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var addresses []gethcommon.Address
|
|
|
|
|
2023-08-18 19:50:23 +00:00
|
|
|
if len(request.Addresses) == 0 {
|
|
|
|
accounts, err := m.settings.GetActiveAccounts()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, a := range accounts {
|
|
|
|
addresses = append(addresses, gethcommon.HexToAddress(a.Address.Hex()))
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
for _, v := range request.Addresses {
|
|
|
|
addresses = append(addresses, gethcommon.HexToAddress(v))
|
|
|
|
}
|
2023-06-13 12:50:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return m.communitiesManager.CheckAllChannelsPermissions(request.CommunityID, addresses)
|
|
|
|
}
|
|
|
|
|
2023-06-22 06:54:58 +00:00
|
|
|
func (m *Messenger) GetCommunityCheckChannelPermissionResponses(communityID types.HexBytes) (*communities.CheckAllChannelsPermissionsResponse, error) {
|
|
|
|
return m.communitiesManager.GetCheckChannelPermissionResponses(communityID)
|
|
|
|
}
|
|
|
|
|
2023-06-01 20:02:34 +00:00
|
|
|
func chunkSlice[T comparable](slice []T, chunkSize int) [][]T {
|
|
|
|
var chunks [][]T
|
|
|
|
for i := 0; i < len(slice); i += chunkSize {
|
|
|
|
end := i + chunkSize
|
|
|
|
|
|
|
|
// necessary check to avoid slicing beyond
|
|
|
|
// slice capacity
|
|
|
|
if end > len(slice) {
|
|
|
|
end = len(slice)
|
|
|
|
}
|
|
|
|
|
|
|
|
chunks = append(chunks, slice[i:end])
|
|
|
|
}
|
|
|
|
|
|
|
|
return chunks
|
|
|
|
}
|
|
|
|
|
|
|
|
func chunkAttachmentsByByteSize(slice []*protobuf.DiscordMessageAttachment, maxFileSizeBytes uint64) [][]*protobuf.DiscordMessageAttachment {
|
|
|
|
var chunks [][]*protobuf.DiscordMessageAttachment
|
|
|
|
|
|
|
|
currentChunkSize := uint64(0)
|
|
|
|
currentChunk := make([]*protobuf.DiscordMessageAttachment, 0)
|
|
|
|
|
|
|
|
for i, attachment := range slice {
|
|
|
|
payloadBytes := attachment.GetFileSizeBytes()
|
|
|
|
if currentChunkSize+payloadBytes > maxFileSizeBytes && len(currentChunk) > 0 {
|
|
|
|
chunks = append(chunks, currentChunk)
|
|
|
|
currentChunk = make([]*protobuf.DiscordMessageAttachment, 0)
|
|
|
|
currentChunkSize = uint64(0)
|
|
|
|
}
|
|
|
|
currentChunk = append(currentChunk, attachment)
|
|
|
|
currentChunkSize = currentChunkSize + payloadBytes
|
|
|
|
if i == len(slice)-1 {
|
|
|
|
chunks = append(chunks, currentChunk)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return chunks
|
|
|
|
}
|
2023-05-11 10:27:05 +00:00
|
|
|
|
2023-05-24 11:01:28 +00:00
|
|
|
// startCommunityRekeyLoop creates a 5-minute ticker and starts a routine that attempts to rekey every community every tick
|
|
|
|
func (m *Messenger) startCommunityRekeyLoop() {
|
2023-10-12 15:45:23 +00:00
|
|
|
logger := m.logger.Named("CommunityRekeyLoop")
|
2023-05-24 14:25:10 +00:00
|
|
|
var d time.Duration
|
|
|
|
if m.communitiesManager.RekeyInterval != 0 {
|
2023-06-28 10:53:46 +00:00
|
|
|
if m.communitiesManager.RekeyInterval < 10 {
|
|
|
|
d = time.Nanosecond
|
|
|
|
} else {
|
|
|
|
d = m.communitiesManager.RekeyInterval / 10
|
|
|
|
}
|
2023-05-24 14:25:10 +00:00
|
|
|
} else {
|
|
|
|
d = 5 * time.Minute
|
|
|
|
}
|
|
|
|
|
|
|
|
ticker := time.NewTicker(d)
|
2023-05-24 11:01:28 +00:00
|
|
|
go func() {
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-ticker.C:
|
2023-10-26 15:09:43 +00:00
|
|
|
m.rekeyCommunities(logger)
|
2023-05-24 11:01:28 +00:00
|
|
|
case <-m.quit:
|
|
|
|
ticker.Stop()
|
|
|
|
logger.Debug("CommunityRekeyLoop stopped")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2023-10-26 15:09:43 +00:00
|
|
|
// rekeyCommunities loops over controlled communities and rekeys if rekey interval elapsed
|
|
|
|
func (m *Messenger) rekeyCommunities(logger *zap.Logger) {
|
2023-06-28 10:53:46 +00:00
|
|
|
// TODO in future have a community level rki rather than a global rki
|
2023-10-26 15:09:43 +00:00
|
|
|
var rekeyInterval time.Duration
|
|
|
|
if m.communitiesManager.RekeyInterval == 0 {
|
|
|
|
rekeyInterval = 48 * time.Hour
|
|
|
|
} else {
|
|
|
|
rekeyInterval = m.communitiesManager.RekeyInterval
|
|
|
|
}
|
2023-05-11 10:27:05 +00:00
|
|
|
|
2023-10-26 15:09:43 +00:00
|
|
|
shouldRekey := func(hashRatchetGroupID []byte) bool {
|
|
|
|
key, err := m.sender.GetCurrentKeyForGroup(hashRatchetGroupID)
|
|
|
|
if err != nil {
|
|
|
|
logger.Error("failed to get current hash ratchet key", zap.Error(err))
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
keyDistributedAt := time.UnixMilli(int64(key.Timestamp))
|
|
|
|
return time.Now().After(keyDistributedAt.Add(rekeyInterval))
|
|
|
|
}
|
|
|
|
|
|
|
|
controlledCommunities, err := m.ControlledCommunities()
|
2023-05-11 10:27:05 +00:00
|
|
|
if err != nil {
|
2023-05-24 11:01:28 +00:00
|
|
|
logger.Error("error getting communities", zap.Error(err))
|
|
|
|
return
|
2023-05-11 10:27:05 +00:00
|
|
|
}
|
2023-10-26 15:09:43 +00:00
|
|
|
|
|
|
|
for _, c := range controlledCommunities {
|
|
|
|
keyActions := &communities.EncryptionKeyActions{
|
|
|
|
CommunityKeyAction: communities.EncryptionKeyAction{},
|
|
|
|
ChannelKeysActions: map[string]communities.EncryptionKeyAction{},
|
2023-06-28 10:53:46 +00:00
|
|
|
}
|
|
|
|
|
2023-10-26 15:09:43 +00:00
|
|
|
if c.Encrypted() && shouldRekey(c.ID()) {
|
|
|
|
keyActions.CommunityKeyAction = communities.EncryptionKeyAction{
|
|
|
|
ActionType: communities.EncryptionKeyRekey,
|
|
|
|
Members: c.Members(),
|
|
|
|
}
|
|
|
|
}
|
2023-06-28 10:53:46 +00:00
|
|
|
|
2023-10-26 15:09:43 +00:00
|
|
|
for channelID, channel := range c.Chats() {
|
|
|
|
if c.ChannelEncrypted(channelID) && shouldRekey([]byte(c.IDString()+channelID)) {
|
|
|
|
keyActions.ChannelKeysActions[channelID] = communities.EncryptionKeyAction{
|
|
|
|
ActionType: communities.EncryptionKeyRekey,
|
|
|
|
Members: channel.Members,
|
|
|
|
}
|
2023-05-18 10:01:18 +00:00
|
|
|
}
|
|
|
|
}
|
2023-10-26 15:09:43 +00:00
|
|
|
|
|
|
|
err = m.communitiesKeyDistributor.Distribute(c, keyActions)
|
|
|
|
if err != nil {
|
|
|
|
logger.Error("failed to rekey community", zap.Error(err), zap.String("community ID", c.IDString()))
|
|
|
|
continue
|
|
|
|
}
|
2023-05-11 10:27:05 +00:00
|
|
|
}
|
|
|
|
}
|
2023-08-14 08:59:02 +00:00
|
|
|
|
|
|
|
func (m *Messenger) GetCommunityMembersForWalletAddresses(communityID types.HexBytes, chainID uint64) (map[string]*Contact, error) {
|
|
|
|
community, err := m.communitiesManager.GetByID(communityID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
membersForAddresses := map[string]*Contact{}
|
|
|
|
|
2023-08-16 22:05:32 +00:00
|
|
|
for _, memberPubKey := range community.GetMemberPubkeys() {
|
|
|
|
memberPubKeyStr := common.PubkeyToHex(memberPubKey)
|
|
|
|
revealedAccounts, err := m.communitiesManager.GetRevealedAddresses(communityID, memberPubKeyStr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
for _, revealedAccount := range revealedAccounts {
|
|
|
|
if !slices.Contains(revealedAccount.ChainIds, chainID) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
contact, ok := m.allContacts.Load(memberPubKeyStr)
|
|
|
|
if ok {
|
|
|
|
membersForAddresses[revealedAccount.Address] = contact
|
|
|
|
} else {
|
|
|
|
m.logger.Error("community member is not a contact", zap.String("contact ID", memberPubKeyStr))
|
2023-08-14 08:59:02 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return membersForAddresses, nil
|
|
|
|
}
|
2023-07-05 17:35:22 +00:00
|
|
|
|
|
|
|
func (m *Messenger) processCommunityChanges(messageState *ReceivedMessageState) {
|
|
|
|
// Process any community changes
|
|
|
|
for _, changes := range messageState.Response.CommunityChanges {
|
|
|
|
if changes.ShouldMemberJoin {
|
|
|
|
response, err := m.joinCommunity(context.TODO(), changes.Community.ID(), false)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("cannot join community", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := messageState.Response.Merge(response); err != nil {
|
|
|
|
m.logger.Error("cannot merge join community response", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2023-10-27 19:20:08 +00:00
|
|
|
} else if changes.MemberKicked {
|
|
|
|
response, err := m.kickedOutOfCommunity(changes.Community.ID())
|
2023-07-05 17:35:22 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("cannot leave community", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := messageState.Response.Merge(response); err != nil {
|
|
|
|
m.logger.Error("cannot merge join community response", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// Activity Center notification
|
2023-10-22 09:41:20 +00:00
|
|
|
now := m.GetCurrentTimeInMillis()
|
2023-07-05 17:35:22 +00:00
|
|
|
notification := &ActivityCenterNotification{
|
|
|
|
ID: types.FromHex(uuid.New().String()),
|
|
|
|
Type: ActivityCenterNotificationTypeCommunityKicked,
|
|
|
|
Timestamp: now,
|
|
|
|
CommunityID: changes.Community.IDString(),
|
|
|
|
Read: false,
|
|
|
|
UpdatedAt: now,
|
|
|
|
}
|
|
|
|
|
2023-10-22 09:41:20 +00:00
|
|
|
err = m.addActivityCenterNotification(response, notification, nil)
|
2023-07-05 17:35:22 +00:00
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to save notification", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := messageState.Response.Merge(response); err != nil {
|
|
|
|
m.logger.Error("cannot merge notification response", zap.Error(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Clean up as not used by clients currently
|
|
|
|
messageState.Response.CommunityChanges = nil
|
|
|
|
}
|
|
|
|
|
2023-10-31 14:20:40 +00:00
|
|
|
func (m *Messenger) PromoteSelfToControlNode(communityID types.HexBytes) (*MessengerResponse, error) {
|
2023-09-21 11:16:05 +00:00
|
|
|
clock, _ := m.getLastClockWithRelatedChat()
|
2023-11-07 13:18:59 +00:00
|
|
|
|
|
|
|
community, err := m.FetchCommunity(&FetchCommunityRequest{
|
|
|
|
CommunityKey: types.EncodeHex(communityID),
|
|
|
|
Shard: nil,
|
|
|
|
TryDatabase: true,
|
|
|
|
WaitForResponse: true,
|
|
|
|
})
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
changes, err := m.communitiesManager.PromoteSelfToControlNode(community, clock)
|
2023-07-05 17:35:22 +00:00
|
|
|
if err != nil {
|
2023-09-21 11:16:05 +00:00
|
|
|
return nil, err
|
2023-07-05 17:35:22 +00:00
|
|
|
}
|
|
|
|
|
2023-10-31 14:20:40 +00:00
|
|
|
if len(changes.MembersRemoved) > 0 {
|
|
|
|
err = m.communitiesManager.GenerateRequestsToJoinForAutoApprovalOnNewOwnership(changes.Community.ID(), changes.MembersRemoved)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
err = m.syncCommunity(context.Background(), changes.Community, m.dispatchMessage)
|
2023-07-05 17:35:22 +00:00
|
|
|
if err != nil {
|
2023-09-21 11:16:05 +00:00
|
|
|
return nil, err
|
2023-07-05 17:35:22 +00:00
|
|
|
}
|
|
|
|
|
2023-10-31 14:20:40 +00:00
|
|
|
var response MessengerResponse
|
|
|
|
response.AddCommunity(changes.Community)
|
|
|
|
response.CommunityChanges = []*communities.CommunityChanges{changes}
|
|
|
|
|
|
|
|
return &response, nil
|
2023-07-05 17:35:22 +00:00
|
|
|
}
|
2023-10-13 08:08:40 +00:00
|
|
|
|
|
|
|
func (m *Messenger) CreateResponseWithACNotification(communityID string, acType ActivityCenterType, isRead bool) (*MessengerResponse, error) {
|
|
|
|
// Activity center notification
|
|
|
|
notification := &ActivityCenterNotification{
|
|
|
|
ID: types.FromHex(uuid.New().String()),
|
|
|
|
Type: acType,
|
|
|
|
Timestamp: m.getTimesource().GetCurrentTime(),
|
|
|
|
CommunityID: communityID,
|
|
|
|
Read: isRead,
|
|
|
|
Deleted: false,
|
|
|
|
UpdatedAt: m.GetCurrentTimeInMillis(),
|
|
|
|
}
|
|
|
|
|
|
|
|
response := &MessengerResponse{}
|
|
|
|
|
|
|
|
err := m.addActivityCenterNotification(response, notification, nil)
|
|
|
|
if err != nil {
|
|
|
|
m.logger.Error("failed to save notification", zap.Error(err))
|
|
|
|
return response, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return response, nil
|
|
|
|
}
|