2023-06-27 11:24:31 +00:00
|
|
|
|
|
|
|
when (NimMajor, NimMinor) < (1, 4):
|
|
|
|
{.push raises: [Defect].}
|
|
|
|
else:
|
|
|
|
{.push raises: [].}
|
|
|
|
|
|
|
|
import
|
|
|
|
stew/results,
|
2023-06-28 16:47:28 +00:00
|
|
|
chronicles,
|
|
|
|
chronos
|
2023-06-27 11:24:31 +00:00
|
|
|
import
|
|
|
|
../driver,
|
2023-08-09 17:11:50 +00:00
|
|
|
../../common/databases/dburl,
|
|
|
|
../../common/databases/db_sqlite,
|
2024-02-15 11:25:08 +00:00
|
|
|
../../common/error_handling,
|
2023-06-27 11:24:31 +00:00
|
|
|
./sqlite_driver,
|
|
|
|
./sqlite_driver/migrations as archive_driver_sqlite_migrations,
|
2024-03-01 11:05:27 +00:00
|
|
|
./postgres_driver/migrations as archive_postgres_driver_migrations,
|
2023-09-13 10:45:55 +00:00
|
|
|
./queue_driver
|
2023-06-27 11:24:31 +00:00
|
|
|
|
|
|
|
export
|
|
|
|
sqlite_driver,
|
2023-09-13 10:45:55 +00:00
|
|
|
queue_driver
|
|
|
|
|
|
|
|
when defined(postgres):
|
|
|
|
import ./postgres_driver ## This import adds dependency with an external libpq library
|
|
|
|
export postgres_driver
|
2023-06-27 11:24:31 +00:00
|
|
|
|
|
|
|
proc new*(T: type ArchiveDriver,
|
|
|
|
url: string,
|
|
|
|
vacuum: bool,
|
2023-09-06 17:16:37 +00:00
|
|
|
migrate: bool,
|
2023-11-24 15:21:22 +00:00
|
|
|
maxNumConn: int,
|
2024-02-15 11:25:08 +00:00
|
|
|
onFatalErrorAction: OnFatalErrorHandler):
|
2024-03-01 11:05:27 +00:00
|
|
|
Future[Result[T, string]] {.async.} =
|
2023-09-06 17:16:37 +00:00
|
|
|
## url - string that defines the database
|
|
|
|
## vacuum - if true, a cleanup operation will be applied to the database
|
|
|
|
## migrate - if true, the database schema will be updated
|
2023-11-24 15:21:22 +00:00
|
|
|
## maxNumConn - defines the maximum number of connections to handle simultaneously (Postgres)
|
2024-02-15 11:25:08 +00:00
|
|
|
## onFatalErrorAction - called if, e.g., the connection with db got lost
|
2023-06-27 11:24:31 +00:00
|
|
|
|
|
|
|
let dbUrlValidationRes = dburl.validateDbUrl(url)
|
|
|
|
if dbUrlValidationRes.isErr():
|
|
|
|
return err("DbUrl failure in ArchiveDriver.new: " &
|
|
|
|
dbUrlValidationRes.error)
|
|
|
|
|
|
|
|
let engineRes = dburl.getDbEngine(url)
|
|
|
|
if engineRes.isErr():
|
|
|
|
return err("error getting db engine in setupWakuArchiveDriver: " &
|
|
|
|
engineRes.error)
|
|
|
|
|
|
|
|
let engine = engineRes.get()
|
|
|
|
|
|
|
|
case engine
|
|
|
|
of "sqlite":
|
|
|
|
let pathRes = dburl.getDbPath(url)
|
|
|
|
if pathRes.isErr():
|
|
|
|
return err("error get path in setupWakuArchiveDriver: " & pathRes.error)
|
|
|
|
|
|
|
|
let dbRes = SqliteDatabase.new(pathRes.get())
|
|
|
|
if dbRes.isErr():
|
|
|
|
return err("error in setupWakuArchiveDriver: " & dbRes.error)
|
|
|
|
|
|
|
|
let db = dbRes.get()
|
|
|
|
|
|
|
|
# SQLite vacuum
|
2024-03-01 11:05:27 +00:00
|
|
|
let sqliteStatsRes = db.gatherSqlitePageStats()
|
|
|
|
if sqliteStatsRes.isErr():
|
|
|
|
return err("error while gathering sqlite stats: " & $sqliteStatsRes.error)
|
|
|
|
|
|
|
|
let (pageSize, pageCount, freelistCount) = sqliteStatsRes.get()
|
2023-06-27 11:24:31 +00:00
|
|
|
debug "sqlite database page stats", pageSize = pageSize,
|
|
|
|
pages = pageCount,
|
|
|
|
freePages = freelistCount
|
|
|
|
|
|
|
|
if vacuum and (pageCount > 0 and freelistCount > 0):
|
2024-03-01 11:05:27 +00:00
|
|
|
let vacuumRes = db.performSqliteVacuum()
|
|
|
|
if vacuumRes.isErr():
|
|
|
|
return err("error in vacuum sqlite: " & $vacuumRes.error)
|
2023-06-27 11:24:31 +00:00
|
|
|
|
|
|
|
# Database migration
|
|
|
|
if migrate:
|
2024-03-01 11:05:27 +00:00
|
|
|
let migrateRes = archive_driver_sqlite_migrations.migrate(db)
|
|
|
|
if migrateRes.isErr():
|
|
|
|
return err("error in migrate sqlite: " & $migrateRes.error)
|
2023-06-27 11:24:31 +00:00
|
|
|
|
|
|
|
debug "setting up sqlite waku archive driver"
|
|
|
|
let res = SqliteDriver.new(db)
|
|
|
|
if res.isErr():
|
|
|
|
return err("failed to init sqlite archive driver: " & res.error)
|
|
|
|
|
|
|
|
return ok(res.get())
|
|
|
|
|
2023-06-28 16:47:28 +00:00
|
|
|
of "postgres":
|
2023-09-13 10:45:55 +00:00
|
|
|
when defined(postgres):
|
2023-11-24 15:21:22 +00:00
|
|
|
let res = PostgresDriver.new(dbUrl = url,
|
|
|
|
maxConnections = maxNumConn,
|
2024-02-15 11:25:08 +00:00
|
|
|
onFatalErrorAction = onFatalErrorAction)
|
2023-09-13 10:45:55 +00:00
|
|
|
if res.isErr():
|
|
|
|
return err("failed to init postgres archive driver: " & res.error)
|
|
|
|
|
|
|
|
let driver = res.get()
|
|
|
|
|
2024-03-01 11:05:27 +00:00
|
|
|
# Database migration
|
|
|
|
if migrate:
|
|
|
|
let migrateRes = await archive_postgres_driver_migrations.migrate(driver)
|
|
|
|
if migrateRes.isErr():
|
|
|
|
return err("ArchiveDriver build failed in migration: " & $migrateRes.error)
|
2023-09-13 10:45:55 +00:00
|
|
|
|
|
|
|
return ok(driver)
|
|
|
|
|
|
|
|
else:
|
|
|
|
return err("Postgres has been configured but not been compiled. Check compiler definitions.")
|
2023-06-28 16:47:28 +00:00
|
|
|
|
2023-06-27 11:24:31 +00:00
|
|
|
else:
|
|
|
|
debug "setting up in-memory waku archive driver"
|
|
|
|
let driver = QueueDriver.new() # Defaults to a capacity of 25.000 messages
|
|
|
|
return ok(driver)
|
2023-10-10 09:59:09 +00:00
|
|
|
|