chore: Fresh scraping on each sync (#4)

This commit is contained in:
Filip Pajic 2024-05-09 12:40:06 +02:00 committed by GitHub
parent c51271253a
commit 6a9b7654cc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 24 additions and 1 deletions

View File

@ -36,7 +36,7 @@ export async function writeLargeFile(path, data) {
export function removeDirectory(path) {
return new Promise((resolve, reject) => {
fs.rmdir(path, {recursive: true}, err => {
fs.rm(path, {recursive: true}, err => {
if (err) {
reject(err)
}
@ -46,6 +46,20 @@ export function removeDirectory(path) {
})
}
export function directoryExists(path) {
return new Promise((resolve, _) => {
const accessBitwiseOperator = fs.constants.F_OK | fs.constants.R_OK;
fs.access(path, accessBitwiseOperator, err => {
if (err) {
resolve(false)
} else {
resolve(true)
}
})
})
}
export async function createDirectory(path) {
try {
/*

View File

@ -1,4 +1,6 @@
import { fetchDirectoryContents } from './fetch-content.mjs'
import path from 'path'
import { directoryExists, removeDirectory } from './file.mjs'
const directoriesToSync = ['codex', 'nomos', 'status', 'vac', 'waku']
@ -10,6 +12,13 @@ async function main() {
const baseSavePath = `./${dirName}/`
const prefixToRemove = dirName + '/'
const directoryPath = path.join(process.cwd(), dirName);
const shouldRemoveOldContent = await directoryExists(directoryPath);
if (shouldRemoveOldContent) {
await removeDirectory(directoryPath)
console.log(`Removed old ${dirName}`)
}
await fetchDirectoryContents(baseUrl, baseSavePath, prefixToRemove)
console.log(`Synced ${dirName}`)