refactor(databases): Creation of the databases folder to keep the logic for sqlite and postgres (#1811)

* Refactoring in sqlite and postgres. Creation of the databases folder.
This commit is contained in:
Ivan Folgueira Bande 2023-06-22 11:27:40 +02:00 committed by GitHub
parent a4da87bb8c
commit a44d4bfbcd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 281 additions and 244 deletions

View File

@ -19,7 +19,7 @@ import
metrics/chronos_httpserver
import
../../waku/common/utils/nat,
../../waku/common/sqlite,
../../waku/common/databases/db_sqlite,
../../waku/v2/waku_core,
../../waku/v2/waku_node,
../../waku/v2/node/waku_metrics,

View File

@ -1,16 +0,0 @@
{.used.}
import
std/[strutils, os],
stew/results,
testutils/unittests,
chronos
import
../../waku/common/postgres/asyncpool,
../../waku/common/postgres/pg_asyncpool_opts
suite "Async pool":
asyncTest "Create connection pool":
## TODO: extend unit tests
var pgOpts = PgAsyncPoolOptions.init()

View File

@ -5,8 +5,7 @@ import
stew/results,
testutils/unittests
import
../../waku/common/sqlite/database,
../../waku/common/sqlite/migrations {.all.}
../../waku/common/databases/db_sqlite {.all.}
proc newTestDatabase(): SqliteDatabase =

View File

@ -17,7 +17,7 @@ import
libp2p/protocols/pubsub/rpc/message,
libp2p/peerid
import
../../waku/common/sqlite,
../../waku/common/databases/db_sqlite,
../../waku/v2/node/peer_manager/peer_manager,
../../waku/v2/node/peer_manager/peer_store/waku_peer_storage,
../../waku/v2/waku_node,

View File

@ -4,7 +4,7 @@ import
testutils/unittests,
libp2p/crypto/crypto
import
../../waku/common/sqlite,
../../waku/common/databases/db_sqlite,
../../waku/v2/node/peer_manager/peer_manager,
../../waku/v2/node/peer_manager/peer_store/waku_peer_storage,
./testlib/wakucore

View File

@ -5,7 +5,7 @@ import
testutils/unittests,
chronos
import
../../../waku/common/sqlite,
../../../waku/common/databases/db_sqlite,
../../../waku/v2/waku_archive,
../../../waku/v2/waku_archive/driver/sqlite_driver,
../../../waku/v2/waku_core,

View File

@ -6,7 +6,7 @@ import
chronos,
chronicles
import
../../../waku/common/sqlite,
../../../waku/common/databases/db_sqlite,
../../../waku/v2/waku_archive,
../../../waku/v2/waku_archive/driver/sqlite_driver,
../../../waku/v2/waku_core,

View File

@ -6,7 +6,7 @@ import
testutils/unittests,
chronos
import
../../../waku/common/sqlite,
../../../waku/common/databases/db_sqlite,
../../../waku/v2/waku_core,
../../../waku/v2/waku_archive,
../../../waku/v2/waku_archive/driver/sqlite_driver,

View File

@ -7,7 +7,7 @@ import
chronos,
libp2p/crypto/crypto
import
../../../waku/common/sqlite,
../../../waku/common/databases/db_sqlite,
../../../waku/v2/waku_core,
../../../waku/v2/waku_archive/driver/sqlite_driver,
../../../waku/v2/waku_archive,

View File

@ -7,7 +7,7 @@ import
chronicles,
libp2p/crypto/crypto
import
../../waku/common/sqlite,
../../waku/common/databases/db_sqlite,
../../waku/v2/node/message_store/sqlite_store,
../../waku/v2/node/peer_manager,
../../waku/v2/waku_core,

View File

@ -13,7 +13,7 @@ import
libp2p/protocols/pubsub/pubsub,
libp2p/protocols/pubsub/gossipsub
import
../../../waku/common/sqlite,
../../../waku/common/databases/db_sqlite,
../../../waku/v2/waku_core,
../../../waku/v2/node/peer_manager,
../../../waku/v2/waku_archive,

View File

@ -0,0 +1,6 @@
import
stew/results
type
DatabaseResult*[T] = Result[T, string]

View File

@ -0,0 +1,8 @@
import
./common,
./db_postgres/pgasyncpool
export
common,
pgasyncpool

View File

@ -6,13 +6,13 @@ else:
{.push raises: [].}
import
std/sequtils,
std/[sequtils,nre, strformat],
stew/results,
chronicles,
chronos
import
../../driver,
./connection
./dbconn,
../common
logScope:
topics = "postgres asyncpool"
@ -39,8 +39,25 @@ type
conns: seq[PgDbConn]
proc new*(T: type PgAsyncPool,
connString: string,
maxConnections: int): T =
dbUrl: string,
maxConnections: int):
DatabaseResult[T] =
var connString: string
try:
let regex = re("""^postgres:\/\/([^:]+):([^@]+)@([^:]+):(\d+)\/(.+)$""")
let matches = find(dbUrl,regex).get.captures
let user = matches[0]
let password = matches[1]
let host = matches[2]
let port = matches[3]
let dbName = matches[4]
connString =
fmt"user={user} host={host} port={port} dbname={dbName} password={password}"
except KeyError,InvalidUnicodeError, RegexInternalError, ValueError,
StudyError, SyntaxError:
return err("could not parse postgres string: " & getCurrentExceptionMsg())
let pool = PgAsyncPool(
connString: connString,
@ -49,7 +66,7 @@ proc new*(T: type PgAsyncPool,
conns: newSeq[PgDbConn](0)
)
return pool
return ok(pool)
func isLive(pool: PgAsyncPool): bool =
pool.state == PgAsyncPoolState.Live
@ -70,17 +87,16 @@ proc close*(pool: PgAsyncPool):
# wait for the connections to be released and close them, without
# blocking the async runtime
if pool.conns.anyIt(it.busy):
while pool.conns.anyIt(it.busy):
await sleepAsync(0.milliseconds)
while pool.conns.anyIt(it.busy):
await sleepAsync(0.milliseconds)
for i in 0..<pool.conns.len:
if pool.conns[i].busy:
continue
for i in 0..<pool.conns.len:
if pool.conns[i].busy:
continue
pool.conns[i].dbConn.close()
pool.conns[i].busy = false
pool.conns[i].open = false
pool.conns[i].dbConn.close()
pool.conns[i].busy = false
pool.conns[i].open = false
for i in 0..<pool.conns.len:
if pool.conns[i].open:
@ -92,7 +108,7 @@ proc close*(pool: PgAsyncPool):
return ok()
proc getConnIndex(pool: PgAsyncPool):
Future[Result[int, string]] {.async.} =
Future[DatabaseResult[int]] {.async.} =
## Waits for a free connection or create if max connections limits have not been reached.
## Returns the index of the free connection
@ -101,7 +117,7 @@ proc getConnIndex(pool: PgAsyncPool):
# stablish new connections if we are under the limit
if pool.isBusy() and pool.conns.len < pool.maxConnections:
let connRes = connection.open(pool.connString)
let connRes = dbconn.open(pool.connString)
if connRes.isOk():
let conn = connRes.get()
pool.conns.add(PgDbConn(dbConn: conn, busy: true, open: true))
@ -129,7 +145,7 @@ proc releaseConn(pool: PgAsyncPool, conn: DbConn) =
proc query*(pool: PgAsyncPool,
query: string,
args: seq[string] = newSeq[string](0)):
Future[Result[seq[Row], string]] {.async.} =
Future[DatabaseResult[seq[Row]]] {.async.} =
## Runs the SQL query getting results.
## Retrieves info from the database.
@ -149,7 +165,7 @@ proc query*(pool: PgAsyncPool,
proc exec*(pool: PgAsyncPool,
query: string,
args: seq[string] = newSeq[string](0)):
Future[ArchiveDriverResult[void]] {.async.} =
Future[DatabaseResult[void]] {.async.} =
## Runs the SQL query without results.
## Alters the database state.
@ -169,14 +185,14 @@ proc exec*(pool: PgAsyncPool,
proc runStmt*(pool: PgAsyncPool,
baseStmt: string,
args: seq[string]):
Future[ArchiveDriverResult[void]] {.async.} =
Future[DatabaseResult[void]] {.async.} =
# Runs a stored statement, for performance purposes.
# In the current implementation, this is aimed
# to run the 'insertRow' stored statement aimed to add a new Waku message.
let connIndexRes = await pool.getConnIndex()
if connIndexRes.isErr():
return ArchiveDriverResult[void].err(connIndexRes.error())
return err(connIndexRes.error())
let conn = pool.conns[connIndexRes.value].dbConn
defer: pool.releaseConn(conn)

View File

@ -5,11 +5,12 @@
# Most of it is a direct copy, the only unique functions being `get` and `put`.
import
std/os,
std/[os, strutils, sequtils, algorithm],
stew/results,
chronicles,
sqlite3_abi
import
./common
logScope:
topics = "sqlite"
@ -24,7 +25,6 @@ type
AutoDisposed[T: ptr|ref] = object
val: T
template dispose(db: Sqlite) =
discard sqlite3_close(db)
@ -51,20 +51,19 @@ template checkErr*(op, cleanup: untyped) =
template checkErr*(op) =
checkErr(op): discard
type
DatabaseResult*[T] = Result[T, string]
SqliteDatabase* = ref object of RootObj
env*: Sqlite
type DataProc* = proc(s: RawStmtPtr) {.closure.} # the nim-eth definition is different; one more indirection
const NoopRowHandler* = proc(s: RawStmtPtr) {.closure.} = discard
proc new*(T: type SqliteDatabase,
path: string,
readOnly=false):
DatabaseResult[T] =
proc new*(T: type SqliteDatabase, path: string, readOnly=false): DatabaseResult[T] =
var env: AutoDisposed[ptr sqlite3]
defer: disposeIfUnreleased(env)
@ -111,14 +110,12 @@ proc new*(T: type SqliteDatabase, path: string, readOnly=false): DatabaseResult[
discard sqlite3_finalize(journalModePragma)
return err("Invalid pragma result: " & $x)
let journalModePragma = prepare("PRAGMA journal_mode = WAL;"): discard
checkWalPragmaResult(journalModePragma)
checkExec(journalModePragma)
ok(SqliteDatabase(env: env.release))
template prepare*(env: Sqlite, q: string, cleanup: untyped): ptr sqlite3_stmt =
var s: ptr sqlite3_stmt
checkErr sqlite3_prepare_v2(env, q, q.len.cint, addr s, nil):
@ -219,8 +216,8 @@ proc exec*[Params, Res](s: SqliteStmt[Params, Res],
discard sqlite3_reset(s) # same return information as step
discard sqlite3_clear_bindings(s) # no errors possible
proc query*(db: SqliteDatabase, query: string, onData: DataProc): DatabaseResult[bool] =
proc query*(db: SqliteDatabase, query: string, onData: DataProc):
DatabaseResult[bool] =
var s = prepare(db.env, query): discard
try:
@ -257,7 +254,6 @@ proc close*(db: SqliteDatabase) =
db[] = SqliteDatabase()[]
## Maintenance procedures
# TODO: Cache this value in the SqliteDatabase object.
@ -273,8 +269,7 @@ proc getPageSize*(db: SqliteDatabase): DatabaseResult[int64] =
if res.isErr():
return err("failed to get page_size")
ok(size)
return ok(size)
proc getFreelistCount*(db: SqliteDatabase): DatabaseResult[int64] =
## Return the number of unused pages in the database file.
@ -286,8 +281,7 @@ proc getFreelistCount*(db: SqliteDatabase): DatabaseResult[int64] =
if res.isErr():
return err("failed to get freelist_count")
ok(count)
return ok(count)
proc getPageCount*(db: SqliteDatabase): DatabaseResult[int64] =
## Return the total number of pages in the database file.
@ -299,8 +293,16 @@ proc getPageCount*(db: SqliteDatabase): DatabaseResult[int64] =
if res.isErr():
return err("failed to get page_count")
ok(count)
return ok(count)
proc gatherSqlitePageStats*(db: SqliteDatabase):
DatabaseResult[(int64, int64, int64)] =
let
pageSize = ?db.getPageSize()
pageCount = ?db.getPageCount()
freelistCount = ?db.getFreelistCount()
return ok((pageSize, pageCount, freelistCount))
proc vacuum*(db: SqliteDatabase): DatabaseResult[void] =
## The VACUUM command rebuilds the database file, repacking it into a minimal amount of disk space.
@ -308,8 +310,7 @@ proc vacuum*(db: SqliteDatabase): DatabaseResult[void] =
if res.isErr():
return err("vacuum failed")
ok()
return ok()
## Database scheme versioning
@ -331,7 +332,8 @@ proc getUserVersion*(database: SqliteDatabase): DatabaseResult[int64] =
ok(version)
proc setUserVersion*(database: SqliteDatabase, version: int64): DatabaseResult[void] =
proc setUserVersion*(database: SqliteDatabase, version: int64):
DatabaseResult[void] =
## Set the value of the user-version integer.
##
## The user-version is an integer that is available to applications to use however they want.
@ -345,3 +347,141 @@ proc setUserVersion*(database: SqliteDatabase, version: int64): DatabaseResult[v
return err("failed to set user_version")
ok()
## Migration scripts
proc getMigrationScriptVersion(path: string): DatabaseResult[int64] =
let name = extractFilename(path)
let parts = name.split("_", 1)
try:
let version = parseInt(parts[0])
return ok(version)
except ValueError:
return err("failed to parse file version: " & name)
proc isSqlScript(path: string): bool =
path.toLower().endsWith(".sql")
proc listSqlScripts(path: string): DatabaseResult[seq[string]] =
var scripts = newSeq[string]()
try:
for scriptPath in walkDirRec(path):
if isSqlScript(scriptPath):
scripts.add(scriptPath)
else:
debug "invalid migration script", file=scriptPath
except OSError:
return err("failed to list migration scripts: " & getCurrentExceptionMsg())
ok(scripts)
proc filterMigrationScripts(paths: seq[string],
lowVersion, highVersion: int64,
direction: string = "up"):
seq[string] =
## Returns migration scripts whose version fall between lowVersion and highVersion (inclusive)
let filterPredicate = proc(script: string): bool =
if not isSqlScript(script):
return false
if direction != "" and not script.toLower().endsWith("." & direction & ".sql"):
return false
let scriptVersionRes = getMigrationScriptVersion(script)
if scriptVersionRes.isErr():
return false
let scriptVersion = scriptVersionRes.value
return lowVersion < scriptVersion and scriptVersion <= highVersion
paths.filter(filterPredicate)
proc sortMigrationScripts(paths: seq[string]): seq[string] =
## Sort migration scripts paths alphabetically
paths.sorted(system.cmp[string])
proc loadMigrationScripts(paths: seq[string]): DatabaseResult[seq[string]] =
var loadedScripts = newSeq[string]()
for script in paths:
try:
loadedScripts.add(readFile(script))
except OSError, IOError:
return err("failed to load script '" & script & "': " & getCurrentExceptionMsg())
ok(loadedScripts)
proc breakIntoStatements(script: string): seq[string] =
var statements = newSeq[string]()
for chunk in script.split(';'):
if chunk.strip().isEmptyOrWhitespace():
continue
let statement = chunk.strip() & ";"
statements.add(statement)
statements
proc migrate*(db: SqliteDatabase,
targetVersion: int64,
migrationsScriptsDir: string):
DatabaseResult[void] =
## Compares the `user_version` of the sqlite database with the provided `targetVersion`, then
## it runs migration scripts if the `user_version` is outdated. The `migrationScriptsDir` path
## points to the directory holding the migrations scripts once the db is updated, it sets the
## `user_version` to the `tragetVersion`.
##
## NOTE: Down migration it is not currently supported
let userVersion = ?db.getUserVersion()
if userVersion == targetVersion:
debug "database schema is up to date", userVersion=userVersion, targetVersion=targetVersion
return ok()
info "database schema is outdated", userVersion=userVersion, targetVersion=targetVersion
# Load migration scripts
var migrationScriptsPaths = ?listSqlScripts(migrationsScriptsDir)
migrationScriptsPaths = filterMigrationScripts(migrationScriptsPaths, lowVersion=userVersion, highVersion=targetVersion, direction="up")
migrationScriptsPaths = sortMigrationScripts(migrationScriptsPaths)
if migrationScriptsPaths.len <= 0:
debug "no scripts to be run"
return ok()
let scripts = ?loadMigrationScripts(migrationScriptsPaths)
# Run the migration scripts
for script in scripts:
for statement in script.breakIntoStatements():
debug "executing migration statement", statement=statement
let execRes = db.query(statement, NoopRowHandler)
if execRes.isErr():
error "failed to execute migration statement", statement=statement, error=execRes.error
return err("failed to execute migration statement")
debug "migration statement executed succesfully", statement=statement
# Update user_version
?db.setUserVersion(targetVersion)
debug "database user_version updated", userVersion=targetVersion
ok()
proc performSqliteVacuum*(db: SqliteDatabase): DatabaseResult[void] =
## SQLite database vacuuming
# TODO: Run vacuuming conditionally based on database page stats
# if (pageCount > 0 and freelistCount > 0):
debug "starting sqlite database vacuuming"
let resVacuum = db.vacuum()
if resVacuum.isErr():
return err("failed to execute vacuum: " & resVacuum.error)
debug "finished sqlite database vacuuming"

View File

@ -0,0 +1,33 @@
import
std/strutils,
regex,
stew/results
proc validateDbUrl*(dbUrl: string): Result[string, string] =
## dbUrl mimics SQLAlchemy Database URL schema
## See: https://docs.sqlalchemy.org/en/14/core/engines.html#database-urls
let regex = re"^[\w\+]+:\/\/[\w\/\\\.\:\@]+$"
let dbUrl = dbUrl.strip()
if dbUrl == "" or dbUrl == "none" or dbUrl.match(regex):
return ok(dbUrl)
else:
return err("invalid 'db url' option format: " & dbUrl)
proc getDbEngine*(dbUrl: string): Result[string, string] =
let dbUrlParts = dbUrl.split("://", 1)
if dbUrlParts.len != 2:
return err("Incorrect dbUrl : " & dbUrl)
let engine = dbUrlParts[0]
return ok(engine)
proc getDbPath*(dbUrl: string): Result[string, string] =
let dbUrlParts = dbUrl.split("://", 1)
if dbUrlParts.len != 2:
return err("Incorrect dbUrl : " & dbUrl)
let path = dbUrlParts[1]
return ok(path)

View File

@ -1,7 +0,0 @@
{.push raises: [].}
import
./sqlite/database
export
database

View File

@ -1,138 +0,0 @@
{.push raises: [].}
import
std/[strutils, sequtils, os, algorithm],
stew/results,
chronicles
import
../sqlite
logScope:
topics = "sqlite"
## Migration scripts
proc getMigrationScriptVersion(path: string): DatabaseResult[int64] =
let name = extractFilename(path)
let parts = name.split("_", 1)
try:
let version = parseInt(parts[0])
return ok(version)
except ValueError:
return err("failed to parse file version: " & name)
proc isSqlScript(path: string): bool =
path.toLower().endsWith(".sql")
proc listSqlScripts(path: string): DatabaseResult[seq[string]] =
var scripts = newSeq[string]()
try:
for scriptPath in walkDirRec(path):
if isSqlScript(scriptPath):
scripts.add(scriptPath)
else:
debug "invalid migration script", file=scriptPath
except OSError:
return err("failed to list migration scripts: " & getCurrentExceptionMsg())
ok(scripts)
proc filterMigrationScripts(paths: seq[string], lowVersion, highVersion: int64, direction: string = "up"): seq[string] =
## Returns migration scripts whose version fall between lowVersion and highVersion (inclusive)
let filterPredicate = proc(script: string): bool =
if not isSqlScript(script):
return false
if direction != "" and not script.toLower().endsWith("." & direction & ".sql"):
return false
let scriptVersionRes = getMigrationScriptVersion(script)
if scriptVersionRes.isErr():
return false
let scriptVersion = scriptVersionRes.value
return lowVersion < scriptVersion and scriptVersion <= highVersion
paths.filter(filterPredicate)
proc sortMigrationScripts(paths: seq[string]): seq[string] =
## Sort migration scripts paths alphabetically
paths.sorted(system.cmp[string])
proc loadMigrationScripts(paths: seq[string]): DatabaseResult[seq[string]] =
var loadedScripts = newSeq[string]()
for script in paths:
try:
loadedScripts.add(readFile(script))
except OSError, IOError:
return err("failed to load script '" & script & "': " & getCurrentExceptionMsg())
ok(loadedScripts)
proc breakIntoStatements(script: string): seq[string] =
var statements = newSeq[string]()
for chunk in script.split(';'):
if chunk.strip().isEmptyOrWhitespace():
continue
let statement = chunk.strip() & ";"
statements.add(statement)
statements
proc migrate*(db: SqliteDatabase, targetVersion: int64, migrationsScriptsDir: string): DatabaseResult[void] =
## Compares the `user_version` of the sqlite database with the provided `targetVersion`, then
## it runs migration scripts if the `user_version` is outdated. The `migrationScriptsDir` path
## points to the directory holding the migrations scripts once the db is updated, it sets the
## `user_version` to the `tragetVersion`.
##
## NOTE: Down migration it is not currently supported
let userVersion = ?db.getUserVersion()
if userVersion == targetVersion:
debug "database schema is up to date", userVersion=userVersion, targetVersion=targetVersion
return ok()
info "database schema is outdated", userVersion=userVersion, targetVersion=targetVersion
# Load migration scripts
var migrationScriptsPaths = ?listSqlScripts(migrationsScriptsDir)
migrationScriptsPaths = filterMigrationScripts(migrationScriptsPaths, lowVersion=userVersion, highVersion=targetVersion, direction="up")
migrationScriptsPaths = sortMigrationScripts(migrationScriptsPaths)
if migrationScriptsPaths.len <= 0:
debug "no scripts to be run"
return ok()
let scripts = ?loadMigrationScripts(migrationScriptsPaths)
# Run the migration scripts
for script in scripts:
for statement in script.breakIntoStatements():
debug "executing migration statement", statement=statement
let execRes = db.query(statement, NoopRowHandler)
if execRes.isErr():
error "failed to execute migration statement", statement=statement, error=execRes.error
return err("failed to execute migration statement")
debug "migration statement executed succesfully", statement=statement
# Update user_version
?db.setUserVersion(targetVersion)
debug "database user_version updated", userVersion=targetVersion
ok()

View File

@ -9,8 +9,8 @@ import
stew/results,
chronicles
import
../../../../common/sqlite,
../../../../common/sqlite/migrations
../../../../common/databases/db_sqlite,
../../../../common/databases/common
logScope:
@ -23,7 +23,7 @@ template projectRoot: string = currentSourcePath.rsplit(DirSep, 1)[0] / ".." / "
const PeerStoreMigrationPath: string = projectRoot / "migrations" / "peer_store"
proc migrate*(db: SqliteDatabase, targetVersion = SchemaVersion): DatabaseResult[void] =
proc migrate*(db: SqliteDatabase, targetVersion = SchemaVersion): DatabaseResult[void] =
## Compares the `user_version` of the sqlite database with the provided `targetVersion`, then
## it runs migration scripts if the `user_version` is outdated. The `migrationScriptsDir` path
## points to the directory holding the migrations scripts once the db is updated, it sets the

View File

@ -10,12 +10,13 @@ import
sqlite3_abi,
libp2p/protobuf/minprotobuf
import
../../../../common/sqlite,
../../../../common/databases/db_sqlite,
../../../../common/databases/common,
../../../waku_core,
../waku_peer_store,
./peer_storage
export sqlite
export db_sqlite
type
WakuPeerStorage* = ref object of PeerStorage

View File

@ -12,7 +12,7 @@ import
../../../waku_core,
../../common,
../../driver,
asyncpool
../../../../common/databases/db_postgres as waku_postgres
export postgres_driver
@ -46,24 +46,11 @@ proc new*(T: type PostgresDriver,
maxConnections: int = DefaultMaxConnections):
ArchiveDriverResult[T] =
var connPool: PgAsyncPool
let connPoolRes = PgAsyncPool.new(dbUrl, maxConnections)
if connPoolRes.isErr():
return err("error creating PgAsyncPool: " & connPoolRes.error)
try:
let regex = re("""^postgres:\/\/([^:]+):([^@]+)@([^:]+):(\d+)\/(.+)$""")
let matches = find(dbUrl,regex).get.captures
let user = matches[0]
let password = matches[1]
let host = matches[2]
let port = matches[3]
let dbName = matches[4]
let connectionString = fmt"user={user} host={host} port={port} dbname={dbName} password={password}"
connPool = PgAsyncPool.new(connectionString, maxConnections)
except KeyError,InvalidUnicodeError, RegexInternalError, ValueError, StudyError, SyntaxError:
return err("could not parse postgres string")
return ok(PostgresDriver(connPool: connPool))
return ok(PostgresDriver(connPool: connPoolRes.get()))
proc createMessageTable(s: PostgresDriver):
Future[ArchiveDriverResult[void]] {.async.} =

View File

@ -5,8 +5,8 @@ import
stew/results,
chronicles
import
../../../../common/sqlite,
../../../../common/sqlite/migrations
../../../../common/databases/db_sqlite,
../../../../common/databases/common
logScope:
@ -30,7 +30,7 @@ proc migrate*(db: SqliteDatabase, targetVersion = SchemaVersion): DatabaseResult
## NOTE: Down migration it is not currently supported
debug "starting message store's sqlite database migration"
let migrationRes = migrations.migrate(db, targetVersion, migrationsScriptsDir=MessageStoreMigrationPath)
let migrationRes = migrate(db, targetVersion, migrationsScriptsDir=MessageStoreMigrationPath)
if migrationRes.isErr():
return err("failed to execute migration scripts: " & migrationRes.error)

View File

@ -9,7 +9,8 @@ import
stew/[results, byteutils],
sqlite3_abi
import
../../../../common/sqlite,
../../../../common/databases/db_sqlite,
../../../../common/databases/common,
../../../waku_core,
./cursor
@ -201,7 +202,10 @@ proc selectAllMessagesQuery(table: string): SqlQueryStr =
" FROM " & table &
" ORDER BY storedAt ASC"
proc selectAllMessages*(db: SqliteDatabase): DatabaseResult[seq[(PubsubTopic, WakuMessage, seq[byte], Timestamp)]] =
proc selectAllMessages*(db: SqliteDatabase): DatabaseResult[seq[(PubsubTopic,
WakuMessage,
seq[byte],
Timestamp)]] =
## Retrieve all messages from the store.
var rows: seq[(PubsubTopic, WakuMessage, seq[byte], Timestamp)]
proc queryRowCallback(s: ptr sqlite3_stmt) =
@ -355,7 +359,11 @@ proc selectMessagesByHistoryQueryWithLimit*(db: SqliteDatabase,
startTime: Option[Timestamp],
endTime: Option[Timestamp],
limit: uint,
ascending: bool): DatabaseResult[seq[(PubsubTopic, WakuMessage, seq[byte], Timestamp)]] =
ascending: bool):
DatabaseResult[seq[(PubsubTopic,
WakuMessage,
seq[byte],
Timestamp)]] =
var messages: seq[(PubsubTopic, WakuMessage, seq[byte], Timestamp)] = @[]

View File

@ -11,7 +11,7 @@ import
chronicles,
chronos
import
../../../../common/sqlite,
../../../../common/databases/db_sqlite,
../../../waku_core,
../../common,
../../driver,