fix: avoid duplicating UnfurlURLs requests (#12687)

This commit is contained in:
Igor Sirotin 2023-11-17 15:42:24 +00:00 committed by GitHub
parent ba30afd202
commit 03d4fbcc48
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 13 additions and 2 deletions

View File

@ -219,6 +219,7 @@ proc setText*(self: Controller, text: string, unfurlNewUrls: bool) =
if self.getLinkPreviewEnabled() and len(newUrls) > 0:
self.messageService.asyncUnfurlUrls(newUrls)
self.linkPreviewCache.markAsRequested(newUrls)
proc linkPreviewsFromCache*(self: Controller, urls: seq[string]): Table[string, LinkPreview] =
return self.linkPreviewCache.linkPreviews(urls)

View File

@ -1,13 +1,15 @@
import tables
import tables, sets
import ../../../../../../app_service/service/message/dto/link_preview
type
LinkPreviewCache* = ref object
cache: Table[string, LinkPreview]
requests: HashSet[string]
proc newLinkPreiewCache*(): LinkPreviewCache =
result = LinkPreviewCache()
result.cache = initTable[string, LinkPreview]()
result.requests = initHashSet[string]()
# Returns a table of link previews for given `urls`.
# If url is not found in cache, it's skipped
@ -31,14 +33,22 @@ proc add*(self: LinkPreviewCache, linkPreviews: Table[string, LinkPreview]): seq
for key, value in pairs(linkPreviews):
result.add(key)
self.cache[key] = value
self.requests.excl(key)
# Marks the URL as requested.
# This should be used to avoid duplicating unfurl requests.
proc markAsRequested*(self: LinkPreviewCache, urls: seq[string]) =
for url in urls:
self.requests.incl(url)
# Goes though given `urls` and returns a list
# of urls not found in cache.
proc unknownUrls*(self: LinkPreviewCache, urls: seq[string]): seq[string] =
for url in urls:
if not self.cache.hasKey(url):
if not self.cache.hasKey(url) and not self.requests.contains(url):
result.add(url)
# Clears link preview cache
proc clear*(self: LinkPreviewCache) =
self.cache.clear()
self.requests.clear()