diff --git a/README.md b/README.md index 63d7cc5..7113891 100644 --- a/README.md +++ b/README.md @@ -147,10 +147,10 @@ The handler detects whether the remote agent is a search-engine spider and handl ```javascript async function handlePageRequest(req, res, next) { try { - let path = req.url; - let noJS = (req.query.js === '0'); - let target = (req.isSpider() || noJS) ? 'seo' : 'hydrate'; - let page = await PageRenderer.generate(path, target); + const path = req.url; + const noJS = (req.query.js === '0'); + const target = (req.isSpider() || noJS) ? 'seo' : 'hydrate'; + const page = await PageRenderer.generate(path, target); if (target === 'seo') { // not caching content generated for SEO res.set({ 'X-Accel-Expires': 0 }); @@ -173,11 +173,11 @@ async function handlePageRequest(req, res, next) { async function generate(path, target) { console.log(`Regenerating page: ${path}`); // retrieve cached JSON through Nginx - let host = NGINX_HOST; + const host = NGINX_HOST; // create a fetch() that remembers the URLs used - let sourceURLs = []; - let agent = new HTTP.Agent({ keepAlive: true }); - let fetch = (url, options) => { + const sourceURLs = []; + const agent = new HTTP.Agent({ keepAlive: true }); + const fetch = (url, options) => { if (url.startsWith(host)) { sourceURLs.push(url.substr(host.length)); options = addHostHeader(options); @@ -185,21 +185,19 @@ async function generate(path, target) { } return CrossFetch(url, options); }; - let options = { host, path, target, fetch }; - let appHTML = await FrontEnd.render(options); - let htmlTemplate = await FS.readFileAsync(HTML_TEMPLATE, 'utf-8'); - let html = htmlTemplate.replace(``, appHTML); + const options = { host, path, target, fetch }; + const frontEndHTML = await FrontEnd.render(options); + const htmlTemplate = await FS.readFileAsync(HTML_TEMPLATE, 'utf-8'); + let html = htmlTemplate.replace(``, frontEndHTML); if (target === 'hydrate') { // add