diff --git a/README.md b/README.md index 6f4993e..e318887 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,7 @@ yarn install ## Running Locally ```shell -yarn start +yarn start # Run 'node fetch-content.js' in the root directory to fetch remote files ``` Check for spelling errors before deploying: @@ -42,10 +42,10 @@ yarn check:spell Create a production build locally to check for errors: ```shell -yarn build +yarn build # Run 'node fetch-content.js' and then 'docusaurus build' +# The 'fetch-content.js' script fetches documents from the nwaku and research repositories. # test the build - yarn serve ``` diff --git a/docs/research/research-and-studies/incentivization.md b/docs/research/research-and-studies/incentivization.md index fb0eeed..2749bdc 100644 --- a/docs/research/research-and-studies/incentivization.md +++ b/docs/research/research-and-studies/incentivization.md @@ -1,8 +1,3 @@ ---- -title: Incentivization in Decentralized Networks -description: The goal of this document is to outline and contextualize our approach to TWN i13n. ---- - Waku is a family of decentralized communication protocols. The Waku Network (TWN) consists of independent nodes running Waku protocols. TWN needs incentivization (shortened to i13n) to ensure proper node behavior. @@ -13,6 +8,7 @@ we focus on Waku Store - a client-server protocol for querying historical messag We introduce a minimal viable addition to Store to enable i13n, and list research directions for future work. +# Incentivization in decentralized networks ## Incentivization tools We can think of incentivization tools as a two-by-two matrix: diff --git a/fetch-content.js b/fetch-content.js new file mode 100644 index 0000000..f403285 --- /dev/null +++ b/fetch-content.js @@ -0,0 +1,77 @@ +const https = require('https'); +const fs = require('fs'); +const path = require('path'); + +async function fetchFromGitHub(url, callback) { + https.get(url, { headers: { 'User-Agent': 'Node.js' } }, (res) => { + let data = ''; + + res.on('data', (chunk) => { + data += chunk; + }); + + res.on('end', () => { + callback(null, JSON.parse(data)); + }); + }).on('error', (err) => { + callback(err, null); + }); +} + +async function fetchDirectoryContents(dirUrl, basePath, prefixToRemove) { + fetchFromGitHub(dirUrl, async (err, files) => { + if (err) { + console.error('Error fetching files:', err.message); + return; + } + + for (const file of files) { + const relativePath = file.path.replace(new RegExp(`^${prefixToRemove}`), ''); + const filePath = path.join(basePath, relativePath); + + if (file.type === 'file') { + await downloadAndSaveFile(file.download_url, filePath); + } else if (file.type === 'dir') { + await fetchDirectoryContents(file.url, basePath, prefixToRemove); // Recursive call for subdirectories + } + } + }); +} + +async function downloadAndSaveFile(url, filePath) { + const fullFilePath = path.join(__dirname, filePath); + + https.get(url, (res) => { + const directory = path.dirname(fullFilePath); + + // Ensure the directory exists + fs.mkdirSync(directory, { recursive: true }); + + const fileStream = fs.createWriteStream(fullFilePath); + res.pipe(fileStream); + + fileStream.on('finish', () => { + fileStream.close(); + console.log('Downloaded and saved:', filePath); + }); + }).on('error', (err) => { + console.error('Error downloading file:', err.message); + }); +} + +const repositories = [ + { + baseUrl: 'https://api.github.com/repos/waku-org/nwaku/contents/docs/benchmarks', + baseSavePath: '/docs/research/benchmarks/', + prefixToRemove: 'docs/benchmarks/' + }, + { + baseUrl: 'https://api.github.com/repos/waku-org/research/contents/docs', + baseSavePath: '/docs/research/research-and-studies/', + prefixToRemove: 'docs/' + } +]; + +repositories.forEach(repo => { + fetchDirectoryContents(repo.baseUrl, repo.baseSavePath, repo.prefixToRemove); +}); \ No newline at end of file diff --git a/package.json b/package.json index b59c7e3..099c942 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "scripts": { "docusaurus": "docusaurus", "start": "docusaurus start", - "build": "docusaurus build", + "build": "node fetch-content.js && docusaurus build", "swizzle": "docusaurus swizzle", "deploy": "docusaurus deploy", "clear": "docusaurus clear",