1
0
mirror of https://github.com/trambarhq/relaks-wordpress-example.git synced 2025-09-03 05:02:34 +02:00

Removing unneeded props (issue #12).

Moved code into separate modules.
This commit is contained in:
Chung Leong
2019-01-23 13:59:56 +01:00
parent a0f0c9191d
commit 36a9d0c619
11 changed files with 219 additions and 137 deletions

View File

@@ -6,25 +6,22 @@ const Compression = require('compression');
const SpiderDetector = require('spider-detector')
const DNSCache = require('dnscache');
const CrossFetch = require('cross-fetch');
const ReactDOMServer = require('react-dom/server');
const FrontEnd = require('./client/front-end');
const PageRenderer = require('./page-renderer');
const JSONRetriever = require('./json-retriever');
const NginxCache = require('./nginx-cache');
// enable DNS caching
let dnsCache = DNSCache({ enable: true, ttl: 300, cachesize: 100 });
let dnsCache = Bluebird.promisifyAll(DNSCache({
enable: true,
ttl: 300,
cachesize: 100
}));
const perPage = 10;
const serverPort = 80;
const wordpressHost = process.env.WORDPRESS_HOST;
const nginxHost = process.env.NGINX_HOST;
let wordpressIP;
dnsCache.lookup(wordpressHost, (err, result) => {
if (!err) {
wordpressIP = `::ffff:${result}`;
}
});
const SERVER_PORT = 80;
const WORDPRESS_HOST = process.env.WORDPRESS_HOST;
// start up Express
let app = Express();
app.set('json spaces', 2);
app.use(Compression())
@@ -35,23 +32,12 @@ app.get('/json/*', handleJSONRequest);
app.get(`/*`, handlePageRequest);
app.purge(`/*`, handlePurgeRequest);
app.use(handleError);
app.listen(serverPort);
app.listen(SERVER_PORT);
let pageDependencies = {};
// purge cache when starting up
NginxCache.purge(/.*/);
async function handleJSONRequest(req, res, next) {
try {
let path = `/wp-json/${req.url.substr(6)}`;
let url = `http://${wordpressHost}${path}`;
let sres = await CrossFetch(url);
let text = await sres.text();
res.send(text);
} catch (err) {
next(err);
}
}
function handleTimestampRequest(req, res, next) {
async function handleTimestampRequest(req, res, next) {
try {
let now = new Date;
let ts = now.toISOString();
@@ -61,44 +47,85 @@ function handleTimestampRequest(req, res, next) {
}
}
async function handlePageRequest(req, res, next) {
async function handleJSONRequest(req, res, next) {
try {
let host = `http://${nginxHost}`;
let path = req.url;
let noScript = (req.query.js === '0')
let target = (req.isSpider() || noScript) ? 'seo' : 'hydrate';
let sourceURLs = [];
// create a fetch() that remembers the URLs used
let fetch = (url, options) => {
console.log(`Fetching: ${url}`);
if (url.startsWith(host)) {
var relURL = url.substr(host.length);
sourceURLs.push(relURL);
}
return CrossFetch(url, options);
};
let options = { host, path, target, fetch };
let rootNode = await FrontEnd.render(options);
let appHTML = ReactDOMServer.renderToString(rootNode);
let indexHTMLPath = `${__dirname}/client/index.html`;
let html = await replaceHTMLComment(indexHTMLPath, 'REACT', appHTML);
if (target === 'hydrate') {
// add <noscript> tag to redirect to SEO version
let meta = `<meta http-equiv=refresh content="0; url=?js=0">`;
html += `<noscript>${meta}</noscript>`;
} else if (target === 'seo') {
res.set({ 'X-Accel-Expires': 0 });
}
res.type('html').send(html);
// save the URLs that the page depends on
pageDependencies[path] = sourceURLs.map(addTrailingSlash);
let path = `/wp-json/${req.url.substr(6)}`;
let json = await JSONRetriever.fetch(path);
res.send(json.text);
} catch (err) {
next(err);
}
}
let pageDependencies = {};
async function handlePageRequest(req, res, next) {
try {
let path = req.url;
let noJS = (req.query.js === '0');
let target = (req.isSpider() || noJS) ? 'seo' : 'hydrate';
let page = await PageRenderer.generate(path, target);
if (target === 'seo') {
// not caching content generated for SEO
res.set({ 'X-Accel-Expires': 0 });
} else {
// remember the URLs used by the page
pageDependencies[path] = page.sourceURLs.map(addTrailingSlash);
}
res.type('html').send(page.html);
} catch (err) {
next(err);
}
}
async function handlePurgeRequest(req, res) {
// verify that require is coming from WordPress
let remoteIP = req.connection.remoteAddress;
res.end();
let wordpressIP = await dnsCache.lookupAsync(WORDPRESS_HOST);
if (remoteIP !== `::ffff:${wordpressIP}`) {
return;
}
let url = req.url;
let method = req.headers['x-purge-method'];
if (method === 'regex' && url === '.*') {
pageDependencies = {};
await NginxCache.purge(/.*/);
} else if (method === 'default') {
// look for URLs that looks like /wp-json/wp/v2/pages/4/
let m = /^\/wp\-json\/(\w+\/\w+\/\w+)\/(\d+)\/$/.exec(url);
console.log(url, m)
if (!m) {
return;
}
// purge matching JSON files
let folderPath = m[1];
let pattern = new RegExp(`^/json/${folderPath}.*`);
let purgedURLs = await NginxCache.purge(pattern);
if (purgedURLs.length === 0) {
return;
}
purgedURLs = purgedURLs.map(addTrailingSlash);
// purge the timestamp so CSR code knows something has changed
await NginxCache.purge('/.mtime');
// look for pages that made use of the purged JSONs
for (let [ path, sourceURLs ] of Object.entries(pageDependencies)) {
let affected = sourceURLs.some((sourceURL) => {
return purgedURLs.indexOf(sourceURL) !== -1;
});
if (affected) {
// purge the cached page
await NginxCache.purge(path);
delete pageDependencies[path];
}
}
}
}
function handleError(err, req, res, next) {
if (!res.headersSent) {
res.type('text').status(400).send(err.message);
@@ -106,55 +133,6 @@ function handleError(err, req, res, next) {
console.error(err);
}
function handlePurgeRequest(req, res) {
let remoteIP = req.connection.remoteAddress;
if (remoteIP === wordpressIP) {
let url = req.url;
let method = req.headers['x-purge-method'];
purgeCachedFile(url, method);
}
res.end();
}
async function purgeCachedFile(url, method) {
let pattern, isJSON;
if (method === 'default' && url.startsWith('/wp-json/')) {
let path = url.substr(9);
let m = /^(\w+\/\w+\/(\w+)\/)(\d+)\/$/.exec(path);
if (m) {
let folderPath = m[1];
let folderType = m[2];
pattern = new RegExp(`^/json/${folderPath}.*`);
}
} else if (method === 'regex' && url === '.*') {
pattern = /.*/;
}
if (!pattern) {
return;
}
let purged = await NginxCache.purge(pattern);
for (let [ pageURL, sourceURLs ] of Object.entries(pageDependencies)) {
let affected = false;
for (let jsonURL of purged) {
jsonURL = addTrailingSlash(jsonURL);
if (sourceURLs.indexOf(jsonURL)) {
affected = true;
break;
}
}
if (affected) {
delete pageDependencies[pageURL];
await NginxCache.purge(pageURL);
}
}
await NginxCache.purge('/.mtime');
}
async function replaceHTMLComment(path, comment, newElement) {
let text = await FS.readFileAsync(path, 'utf-8');
return text.replace(`<!--${comment}-->`, newElement).replace(`<!--${comment}-->`, newElement);
}
/**
* Add trailing slash to URL
*
@@ -174,18 +152,18 @@ function addTrailingSlash(url) {
return url;
}
[ './index', './nginx-cache', './client/front-end' ].forEach((path) => {
let fullPath = require.resolve(path);
FS.watchFile(fullPath, (curr, prev) => {
if (curr.mtime !== prev.mtime) {
console.log('Restarting');
process.exit(0);
}
});
// restart process when a source file changes
Object.keys(require.cache).forEach((path) => {
if (!/node_modules/.test(path)) {
FS.watchFile(path, (curr, prev) => {
if (curr.mtime !== prev.mtime) {
console.log('Restarting');
process.exit(0);
}
});
}
});
NginxCache.purge(/.*/);
process.on('unhandledRejection', (err) => {
console.error(err);
});