Cache docs in memory, speed up page loads during development
Summary: This only affects the website when it is hosted locally using express. The current version of the website is annoyingly sluggish, as the whole docs structure is parsed on each request. In this PR, we store the result of extracting the Markdown sources in memory, significantly speeding up page loads. We also delay the extraction of docs until a request is made that would require them (e.g. anything that hits `/react-native/docs/*`). There is still a 8 second delay when the docs are first visited, as expected. This can be improved in a later PR. Any changes to the docs structure may require a server restart to take effect. This is rare enough that I don't think it is a blocker. This PR significantly speeds up first page load times on the homepage and any non-docs site, and speeds up subsequent page loads on Docs. This will make for a better web development experience. Extracting the docs on each request takes around 8 seconds. Storing these in memory allows us to virtuall Closes https://github.com/facebook/react-native/pull/12203 Differential Revision: D4516697 Pulled By: hramos fbshipit-source-id: 05276e9827c82e38ccf064209b3fd38005f8e247
This commit is contained in:
parent
5d4d62f474
commit
6a8200df95
|
@ -20,6 +20,7 @@
|
|||
"glob": "6.0.4",
|
||||
"jsdoc-api": "^1.1.0",
|
||||
"jstransform": "11.0.3",
|
||||
"memory-cache": "^0.1.6",
|
||||
"mkdirp": "^0.5.1",
|
||||
"optimist": "0.6.0",
|
||||
"react": "~0.13.0",
|
||||
|
|
|
@ -16,6 +16,7 @@ var optimist = require('optimist');
|
|||
var path = require('path');
|
||||
var removeMd = require('remove-markdown');
|
||||
var extractDocs = require('./extractDocs');
|
||||
var cache = require('memory-cache');
|
||||
var argv = optimist.argv;
|
||||
|
||||
function splitHeader(content) {
|
||||
|
@ -90,7 +91,7 @@ function buildFile(layout, metadata, rawContent) {
|
|||
].filter(e => e).join('\n');
|
||||
}
|
||||
|
||||
function execute() {
|
||||
function execute(options) {
|
||||
var DOCS_MD_DIR = '../docs/';
|
||||
var BLOG_MD_DIR = '../blog/';
|
||||
|
||||
|
@ -135,23 +136,33 @@ function execute() {
|
|||
);
|
||||
}
|
||||
|
||||
extractDocs().forEach(function(content) {
|
||||
handleMarkdown(content, null);
|
||||
});
|
||||
|
||||
var files = glob.sync(DOCS_MD_DIR + '**/*.*');
|
||||
files.forEach(function(file) {
|
||||
var extension = path.extname(file);
|
||||
if (extension === '.md' || extension === '.markdown') {
|
||||
var content = fs.readFileSync(file, {encoding: 'utf8'});
|
||||
handleMarkdown(content, path.basename(file));
|
||||
if (options.extractDocs) {
|
||||
// Rendering docs can take up to 8 seconds. We wait until /docs/ are
|
||||
// requested before doing so, then we store the results in memory to
|
||||
// speed up subsequent requests.
|
||||
var extractedDocs = cache.get('extractedDocs');
|
||||
if (!extractedDocs) {
|
||||
extractedDocs = extractDocs();
|
||||
cache.put('extractedDocs', extractedDocs);
|
||||
}
|
||||
extractedDocs.forEach(function(content) {
|
||||
handleMarkdown(content, null);
|
||||
});
|
||||
|
||||
if (extension === '.json') {
|
||||
var content = fs.readFileSync(file, {encoding: 'utf8'});
|
||||
metadatas[path.basename(file, '.json')] = JSON.parse(content);
|
||||
}
|
||||
});
|
||||
var files = glob.sync(DOCS_MD_DIR + '**/*.*');
|
||||
files.forEach(function(file) {
|
||||
var extension = path.extname(file);
|
||||
if (extension === '.md' || extension === '.markdown') {
|
||||
var content = fs.readFileSync(file, {encoding: 'utf8'});
|
||||
handleMarkdown(content, path.basename(file));
|
||||
}
|
||||
|
||||
if (extension === '.json') {
|
||||
var content = fs.readFileSync(file, {encoding: 'utf8'});
|
||||
metadatas[path.basename(file, '.json')] = JSON.parse(content);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// we need to pass globals for the components to be configurable
|
||||
// metadata is generated in this process which has access to process.env
|
||||
|
|
|
@ -505,12 +505,15 @@ const styleDocs = docsList.stylesForEmbed.reduce(function(docs, filepath) {
|
|||
|
||||
function extractDocs() {
|
||||
componentCount = 0;
|
||||
var components = docsList.components.map(renderComponent);
|
||||
var apis = docsList.apis.map((filepath) => {
|
||||
return renderAPI(filepath, 'api');
|
||||
});
|
||||
var styles = docsList.stylesWithPermalink.map(renderStyle);
|
||||
return [].concat(
|
||||
docsList.components.map(renderComponent),
|
||||
docsList.apis.map((filepath) => {
|
||||
return renderAPI(filepath, 'api');
|
||||
}),
|
||||
docsList.stylesWithPermalink.map(renderStyle)
|
||||
components,
|
||||
apis,
|
||||
styles
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,8 @@ var app = connect()
|
|||
// convert all the md files on every request. This is not optimal
|
||||
// but fast enough that we don't really need to care right now.
|
||||
if (!server.noconvert && req.url.match(/\.html|\/$/)) {
|
||||
convert();
|
||||
var extractDocs = req.url.match(/\/docs/); // Lazily extract docs.
|
||||
convert({extractDocs});
|
||||
}
|
||||
next();
|
||||
})
|
||||
|
|
Loading…
Reference in New Issue