mirror of
https://github.com/Pomax/BezierInfo-2.git
synced 2025-08-07 17:27:57 +02:00
safety
This commit is contained in:
3234
lib/site/handlers.js
Normal file
3234
lib/site/handlers.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -3,11 +3,11 @@
|
||||
"version": "2.0.0",
|
||||
"description": "pomax.github.io/bezierinfo",
|
||||
"scripts": {
|
||||
"localize": "node make-locales",
|
||||
"prebuild": "node buildmark",
|
||||
"localize": "node tools/make-locales",
|
||||
"prebuild": "node tools/buildmark -- set",
|
||||
"build": "run-s localize less build:** default",
|
||||
"default": "node copy-default-locale",
|
||||
"postbuild": "node buildmark",
|
||||
"default": "node tools/copy-default-locale",
|
||||
"postbuild": "node tools/buildmark -- resolve",
|
||||
"build:en-GB": "webpack -p",
|
||||
"build:zh-CN": "cross-env LOCALE=zh-CN webpack -p",
|
||||
"build:ja-JP": "cross-env LOCALE=ja-JP webpack -p",
|
||||
|
33
tools/aggregate-js-handlers.js
Normal file
33
tools/aggregate-js-handlers.js
Normal file
@@ -0,0 +1,33 @@
|
||||
/**********************************************************************
|
||||
*
|
||||
* This script is a JS handling aggregator that grabs all handler.js
|
||||
* files defined for any section, and turns it into a giant master
|
||||
* handler file for later use, keyed on section dir names.
|
||||
*
|
||||
**********************************************************************/
|
||||
|
||||
var fs = require("fs-extra");
|
||||
var glob = require('glob');
|
||||
var path = require("path");
|
||||
var jsxshim = require("./lib/jsx-shim");
|
||||
|
||||
const BASEDIR = path.join(__dirname, "..");
|
||||
|
||||
var index = require(path.join(BASEDIR, "components/sections"));
|
||||
var handlers = [];
|
||||
Object.keys(index).forEach( section => {
|
||||
var handlerFile = path.join(BASEDIR, `components/sections/${section}/handler.js`);
|
||||
if (fs.existsSync(handlerFile)) {
|
||||
let content = fs.readFileSync(handlerFile).toString();
|
||||
content = content.replace("module.exports = ","return ");
|
||||
content = `(function() { ${content} }())`;
|
||||
let def = ` ${section}: {
|
||||
handler: ${content}`;
|
||||
if (content.indexOf('keyHandlingOptions') > -1) { def += `,\n withKeys: true`; }
|
||||
def += `\n }`;
|
||||
handlers.push(def);
|
||||
}
|
||||
});
|
||||
|
||||
var masterFile = `module.exports = {\n${ handlers.join(',\n') }\n};\n`;
|
||||
fs.writeFileSync(path.join(BASEDIR, "lib/site/handlers.js"), masterFile);
|
@@ -1,6 +1,8 @@
|
||||
var fs = require('fs');
|
||||
var markfile = '.buildmark';
|
||||
if (!fs.existsSync(markfile)) {
|
||||
var resolve = process.argv.indexOf('resolve') > -1;
|
||||
|
||||
if (!resolve) {
|
||||
fs.writeFileSync(markfile, Date.now());
|
||||
} else {
|
||||
let mark = parseInt(fs.readFileSync(markfile).toString());
|
@@ -1,24 +1,3 @@
|
||||
var ReactDOMServer = require('react-dom/server');
|
||||
var marked = require("marked");
|
||||
var fs = require("fs-extra");
|
||||
|
||||
// bundle all content in a specific locale for use by the app
|
||||
const defaultLocale = "en-GB";
|
||||
var locale = defaultLocale;
|
||||
var lpos = process.argv.indexOf('--locale');
|
||||
if (lpos !== -1) { locale = process.argv[lpos+1]; }
|
||||
|
||||
// shim nodejs so that it knows what to do with jsx files: return empty objects.
|
||||
var Module = require('module');
|
||||
var originalRequire = Module.prototype.require;
|
||||
Module.prototype.require = function() {
|
||||
try {
|
||||
return originalRequire.apply(this, arguments);
|
||||
} catch (e) {
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* fix the stupid nonsense inability for markdown parsers to see link
|
||||
* syntax with `)` in the links themselves.
|
||||
@@ -284,7 +263,7 @@ function performChunking(data, chunks, chunker, moreChunkers) {
|
||||
* Split data up into "markdown" and "not markdown" parts.
|
||||
* We'll only run markdown conversion on the markdown parts.
|
||||
*/
|
||||
function chunk(data) {
|
||||
module.exports = function chunk(data) {
|
||||
var chunks = [];
|
||||
var chunkers = [
|
||||
chunkDivs,
|
||||
@@ -296,164 +275,4 @@ function chunk(data) {
|
||||
];
|
||||
performChunking(data, chunks, chunkLatex,chunkers);
|
||||
return chunks;
|
||||
}
|
||||
|
||||
/**
|
||||
* turn locale markdown into locale javascript data
|
||||
*/
|
||||
function processLocation(loc, fragmentid, number) {
|
||||
var processed = { data: '', title: `Unknown title (${fragmentid})` };
|
||||
try {
|
||||
data = fs.readFileSync(loc).toString();
|
||||
data = chunk(data).map(block => {
|
||||
// preserver is simple
|
||||
if (!block.convert) return block.data;
|
||||
|
||||
// markdown conversion is a little more work
|
||||
let d = marked(block.data.trim());
|
||||
|
||||
// serious can we fucking not, please.
|
||||
d = d.replace('<p></div></p>', '</div>')
|
||||
.replace(/&/g, '&')
|
||||
.replace(/'/g, "'")
|
||||
.replace(/"/g, '"')
|
||||
|
||||
// ``` conversion does odd things with <code> tags inside <pre> tags.
|
||||
d = d.replace(/<pre>(\r?\n)*<code>/g,'<pre>')
|
||||
.replace(/<\/code>(\r?\n)*<\/pre>/g,'</pre>');
|
||||
|
||||
// And then title extraction/rewriting
|
||||
d = d.replace(/<h1[^>]+>([^<]+)<\/h1>/,function(_,t) {
|
||||
processed.title = t;
|
||||
return `<SectionHeader name="${fragmentid}" title="` + t + `"${ number ? ' number="'+number+'"': ''}/>`;
|
||||
});
|
||||
|
||||
return d;
|
||||
}).join('');
|
||||
processed.data = data;
|
||||
} catch (e) {
|
||||
// console.warn(e);
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Form the content.js file content as a single string for file-writing.
|
||||
*/
|
||||
function formContentBundle(locale, content) {
|
||||
var bcode = JSON.stringify(content, false, 2);
|
||||
bcode = bcode.replace(/"<section>/g, "function(handler) { return <section>")
|
||||
.replace(/this\.(\w)/g, "handler.$1")
|
||||
.replace(/<\/section>"(,?)/g, "</section>; }$1\n")
|
||||
.replace(/\\"/g,'"')
|
||||
.replace(/\\n/g,'\n')
|
||||
.replace(/></g,'>\n<')
|
||||
.replace(/\\\\/g, '\\');
|
||||
|
||||
var bundle = [
|
||||
`var React = require('react');`,
|
||||
`var Graphic = require("../../components/Graphic.jsx");`,
|
||||
`var SectionHeader = require("../../components/SectionHeader.jsx");`,
|
||||
`var BSplineGraphic = require("../../components/BSplineGraphic.jsx");`,
|
||||
`var KnotController = require("../../components/KnotController.jsx");`,
|
||||
`var WeightController = require("../../components/WeightController.jsx");`,
|
||||
``,
|
||||
`SectionHeader.locale="${locale}";`,
|
||||
``,
|
||||
`module.exports = ${bcode};`,
|
||||
``
|
||||
].join('\n');
|
||||
|
||||
return bundle;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the locale switcher component.
|
||||
*/
|
||||
function processLocaleSwitcher(locale, content) {
|
||||
// We also need to localize the "LocaleSwitcher"
|
||||
var localeCode = locale;
|
||||
var loc = `./components/localized/LocaleSwitcher/content.${localeCode}.md`;
|
||||
if (!fs.existsSync(loc)) {
|
||||
localeCode = defaultLocale;
|
||||
loc = `./components/localized/LocaleSwitcher/content.${localeCode}.md`;
|
||||
}
|
||||
var key = "locale-switcher";
|
||||
var processed = processLocation(loc, key);
|
||||
content[key] = {
|
||||
locale: localeCode,
|
||||
title: key,
|
||||
getContent: "<section>" + processed.data + "</section>"
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a content.js bundle to the filesystem
|
||||
*/
|
||||
function writeContentBundle(locale, content) {
|
||||
var bundle = formContentBundle(locale, content);
|
||||
|
||||
// write the content.js file for bundling purposes
|
||||
var dir = `./locales/${locale}`;
|
||||
fs.ensureDirSync(dir);
|
||||
fs.writeFileSync(`${dir}/content.js`, bundle);
|
||||
|
||||
// Write the actual locale directory and generate a locale-specific index.html
|
||||
var html = fs.readFileSync('./index.template.html').toString();
|
||||
var preface = content.preface.getContent.replace(/<SectionHeader name="preface" title="([^"]+)"\/>/, "<h2>$1</h2>");
|
||||
html = html.replace("{{ PREFACE }}", preface);
|
||||
html = html.replace("{{ locale }}", locale);
|
||||
fs.ensureDirSync(locale);
|
||||
fs.writeFileSync(`./${locale}/index.html`, html);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single locale, with `en-GB` fallback for missing files.
|
||||
*/
|
||||
function processLocale(locale) {
|
||||
// Get the section map. This will try to load .jsx code, which will fail,
|
||||
// but the above shim makes a failure simply return an empty object instead.
|
||||
// This is good: we only care about the keys, not the content.
|
||||
var index = require("./components/sections");
|
||||
var sections = Object.keys(index);
|
||||
var content = { locale };
|
||||
|
||||
var processSection = (key, number) => {
|
||||
// Grab locale file, or defaultLocale file if the chosen locale has
|
||||
// has no translated content (yet)...
|
||||
var localeCode = locale;
|
||||
var loc = `./components/sections/${key}/content.${localeCode}.md`;
|
||||
if (!fs.existsSync(loc)) {
|
||||
localeCode = defaultLocale;
|
||||
loc = `./components/sections/${key}/content.${localeCode}.md`;
|
||||
}
|
||||
|
||||
// Read in the content.{lang}.md file
|
||||
var processed = processLocation(loc, key, number);
|
||||
|
||||
content[key] = {
|
||||
locale: localeCode,
|
||||
title: processed.title,
|
||||
getContent: "<section>" + processed.data + "</section>"
|
||||
};
|
||||
};
|
||||
|
||||
sections.forEach(processSection);
|
||||
processLocaleSwitcher(locale, content);
|
||||
writeContentBundle(locale, content);
|
||||
}
|
||||
|
||||
// find all locales used and generate their respective content dirs
|
||||
var glob = require('glob');
|
||||
glob("components/sections/**/content*md", (err, files) => {
|
||||
var locales = [];
|
||||
files.forEach(file => {
|
||||
let locale = file.match(/content\.([^.]+)\.md/)[1];
|
||||
if (locales.indexOf(locale) === -1) {
|
||||
locales.push(locale);
|
||||
}
|
||||
});
|
||||
locales.forEach(processLocale);
|
||||
});
|
@@ -1,13 +1,15 @@
|
||||
var fs = require("fs-extra");
|
||||
var path = require("path");
|
||||
const BASEDIR = path.join(__dirname, "..");
|
||||
|
||||
// copy the en-GB content as default content
|
||||
fs.copySync("en-GB/index.html", "index.html");
|
||||
fs.copySync("en-GB/article.js", "article.js");
|
||||
fs.copySync(path.join(BASEDIR, "en-GB/index.html"), path.join(BASEDIR, "index.html"));
|
||||
fs.copySync(path.join(BASEDIR, "en-GB/article.js"), path.join(BASEDIR, "article.js"));
|
||||
|
||||
// make sure to remove the `<base>` tag from the index, and replace the JSX "className" with "class"
|
||||
var html = fs.readFileSync("index.html").toString();
|
||||
var html = fs.readFileSync(path.join(BASEDIR, "index.html")).toString();
|
||||
html = html.replace(' <base href="..">\n', '');
|
||||
html = html.replace('className=', 'class=');
|
||||
html = html.replace('<script src="en-GB/article.js', '<script src="article.js');
|
||||
html = "<!-- AUTOGENERATED CONTENT, PLEASE EDIT 'index.template.html' INSTEAD! -->\n" + html;
|
||||
fs.writeFileSync("index.html", html);
|
||||
fs.writeFileSync(path.join(BASEDIR, "index.html"), html);
|
42
tools/form-index-jsx.js
Normal file
42
tools/form-index-jsx.js
Normal file
@@ -0,0 +1,42 @@
|
||||
/**********************************************************************
|
||||
*
|
||||
* This script is responsible for building the index.js file for each
|
||||
* section based on whether or not it has a handler, and whether or not
|
||||
* that handler requires any keyhandling for its interaction.
|
||||
*
|
||||
**********************************************************************/
|
||||
|
||||
var fs = require("fs-extra");
|
||||
var glob = require('glob');
|
||||
var path = require("path");
|
||||
var jsxshim = require("./lib/jsx-shim");
|
||||
|
||||
const BASEDIR = path.join(__dirname, "..");
|
||||
|
||||
var index = require(path.join(BASEDIR, "components/sections"));
|
||||
var handlers = [];
|
||||
Object.keys(index).forEach( section => {
|
||||
var handlerFile = path.join(BASEDIR, `components/sections/${section}/handler.js`);
|
||||
var hasHandler = false;
|
||||
var withKeys = false;
|
||||
if (fs.existsSync(handlerFile)) {
|
||||
hasHandler = true;
|
||||
let content = fs.readFileSync(handlerFile).toString();
|
||||
withKeys = (content.indexOf('keyHandlingOptions') > -1);
|
||||
}
|
||||
|
||||
var indexCode = [
|
||||
hasHandler ? `var handler = require("./handler.js");` : '',
|
||||
`var generateBase = require("../../generate-base");`,
|
||||
withKeys ? `var keyHandling = require("../../decorators/keyhandling-decorator.jsx");` : '',
|
||||
hasHandler ?
|
||||
withKeys ?
|
||||
`module.exports = keyHandling(generateBase("${section}", handler));`
|
||||
:
|
||||
`module.exports = generateBase("${section}", handler);`
|
||||
:
|
||||
`module.exports = generateBase("${section}");`
|
||||
].filter(l => !!l).join('\n');
|
||||
|
||||
console.log('[',section,"]\n", indexCode,'\n');
|
||||
});
|
278
tools/lib/chunk.js
Normal file
278
tools/lib/chunk.js
Normal file
@@ -0,0 +1,278 @@
|
||||
/**
|
||||
* fix the stupid nonsense inability for markdown parsers to see link
|
||||
* syntax with `)` in the links themselves.
|
||||
*/
|
||||
function fixMarkDownLinks(data, chunks, chunkMore) {
|
||||
var next = chunkMore ? chunkMore[0] : false,
|
||||
otherChunkers = chunkMore ? chunkMore.slice(1) : false,
|
||||
fixes = [];
|
||||
|
||||
data.replace(/\[[^\]]+\]\(/g, function(_match, pos, _fullstring) {
|
||||
// this is the start of a link. Find the offset at which the next `)`
|
||||
// is actually the link closer.
|
||||
var offset = 0;
|
||||
var start = pos + _match.length;
|
||||
var complex = false;
|
||||
for (let d=0, i=start; i<data.length; i++) {
|
||||
if (data[i] === '(') { d++; complex = true; }
|
||||
else if (data[i] === ')') { d--; }
|
||||
if (d<0) { offset = i - start; break; }
|
||||
}
|
||||
var end = start + offset;
|
||||
// we now know the *actual* link length. Safify it.
|
||||
if (complex) { fixes.push({ start, end, data: data.substring(start,end) }); }
|
||||
// and return the matched text because we don't want to replace right now.
|
||||
return _match
|
||||
});
|
||||
|
||||
// let's safify this data, if there was a complex pattern that needs fixin'
|
||||
if (fixes.length>0) {
|
||||
fixes.forEach(fix => {
|
||||
let s = fix.start,
|
||||
e = fix.end,
|
||||
newdata = fix.data.replace(/\(/g, '%28').replace(/\)/g, '%29');
|
||||
// I can't believe I still have to do this in 2017...
|
||||
data = data.substring(0,s) + newdata + data.substring(e);
|
||||
});
|
||||
}
|
||||
|
||||
// alright, let "the rest" deal with this data now.
|
||||
performChunking(data, chunks, next, otherChunkers);
|
||||
}
|
||||
|
||||
/**
|
||||
* ...
|
||||
*/
|
||||
function chunkBSplineGraphicsJSX(data, chunks, chunkMore) {
|
||||
var p = 0,
|
||||
next = chunkMore ? chunkMore[0] : false,
|
||||
otherChunkers = chunkMore ? chunkMore.slice(1) : false,
|
||||
bgfxTag = '<BSplineGraphic',
|
||||
bgfxEnd = '/>',
|
||||
bgfxEnd2 = '</BSplineGraphic>';
|
||||
|
||||
while (p !== -1) {
|
||||
// Let's check a BSplineGraphic tag
|
||||
let bgfx = data.indexOf(bgfxTag, p);
|
||||
if (bgfx === -1) {
|
||||
// No <BSplineGraphic/> block found: we're done here. Parse the remaining
|
||||
// data for whatever else might be in there.
|
||||
performChunking(data.substring(p), chunks, next, otherChunkers);
|
||||
break;
|
||||
}
|
||||
|
||||
// First parse the non-<BSplineGraphic/> data for whatever else might be in there.
|
||||
performChunking(data.substring(p, bgfx), chunks, next, otherChunkers);
|
||||
|
||||
let tail = data.substring(bgfx),
|
||||
noContent = !!tail.match(/^<BSplineGraphic[^>]+\/>/),
|
||||
eol;
|
||||
|
||||
// Then capture the <BSplineGraphic>...</BSplineGraphic> or <BSplineGraphic .../> block and mark it as "don't convert".
|
||||
if (noContent) {
|
||||
eol = data.indexOf(bgfxEnd, bgfx) + bgfxEnd.length;
|
||||
} else {
|
||||
eol = data.indexOf(bgfxEnd2, bgfx) + bgfxEnd2.length;
|
||||
}
|
||||
|
||||
chunks.push({ convert: false, type: "bgfx", s:bgfx, e:eol, data: data.substring(bgfx, eol) });
|
||||
p = eol;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* ...
|
||||
*/
|
||||
function chunkGraphicJSX(data, chunks, chunkMore) {
|
||||
var p = 0,
|
||||
next = chunkMore ? chunkMore[0] : false,
|
||||
otherChunkers = chunkMore ? chunkMore.slice(1) : false,
|
||||
gfxTag = '<Graphic',
|
||||
gfxEnd = '/>',
|
||||
gfxEnd2 = '</Graphic>';
|
||||
|
||||
while (p !== -1) {
|
||||
// Let's check a Graphic tag
|
||||
let gfx = data.indexOf(gfxTag, p);
|
||||
if (gfx === -1) {
|
||||
// No <Graphic/> block found: we're done here. Parse the remaining
|
||||
// data for whatever else might be in there.
|
||||
performChunking(data.substring(p), chunks, next, otherChunkers);
|
||||
break;
|
||||
}
|
||||
|
||||
// First parse the non-<Graphic/> data for whatever else might be in there.
|
||||
performChunking(data.substring(p, gfx), chunks, next, otherChunkers);
|
||||
|
||||
let tail = data.substring(gfx),
|
||||
noContent = !!tail.match(/^<Graphic[^>]+\/>/),
|
||||
eol;
|
||||
|
||||
// Then capture the <Graphic>...</Graphic> or <Graphic .../> block and mark it as "don't convert".
|
||||
if (noContent) {
|
||||
eol = data.indexOf(gfxEnd, gfx) + gfxEnd.length;
|
||||
} else {
|
||||
eol = data.indexOf(gfxEnd2, gfx) + gfxEnd2.length;
|
||||
}
|
||||
|
||||
chunks.push({ convert: false, type: "gfx", s:gfx, e:eol, data: data.substring(gfx, eol) });
|
||||
p = eol;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ...
|
||||
*/
|
||||
function chunkDivEnds(data, chunks, chunkMore) {
|
||||
var next = chunkMore ? chunkMore[0] : false,
|
||||
otherChunkers = chunkMore ? chunkMore.slice(1) : false;
|
||||
|
||||
var splt = data.split('</div>');
|
||||
var dlen = splt.length;
|
||||
splt.forEach( function(segment, pos) {
|
||||
performChunking(segment, chunks, next, otherChunkers);
|
||||
if (pos < dlen-1) {
|
||||
chunks.push({ convert: false, type: '</div>', s:-1, e:-1, data: '</div>' });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* ...
|
||||
*/
|
||||
function chunkTable(data, chunks, chunkMore) {
|
||||
var p = 0,
|
||||
next = chunkMore ? chunkMore[0] : false,
|
||||
otherChunkers = chunkMore ? chunkMore.slice(1) : false,
|
||||
tableMatch = '\n<table',
|
||||
tableClosingTag = '</table>\n';
|
||||
|
||||
while (p !== -1) {
|
||||
// Let's check for a <table> tag
|
||||
let table = data.indexOf(tableMatch, p);
|
||||
if (table === -1) {
|
||||
// No tables found: we're done here. Parse the remaining
|
||||
// data for whatever else might be in there.
|
||||
performChunking(data.substring(p), chunks, next, otherChunkers);
|
||||
break;
|
||||
}
|
||||
|
||||
// First parse the non-table data for whatever else might be in there.
|
||||
performChunking(data.substring(p, table), chunks, next, otherChunkers);
|
||||
|
||||
// then mark the table code as no-convert
|
||||
let eod = data.indexOf(tableClosingTag, table) + tableClosingTag.length;
|
||||
chunks.push({ convert: false, type: "table", s:table, e:eod, data: data.substring(table, eod) });
|
||||
p = eod;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ...
|
||||
*/
|
||||
function chunkDivs(data, chunks, chunkMore) {
|
||||
var p = 0,
|
||||
next = chunkMore ? chunkMore[0] : false,
|
||||
otherChunkers = chunkMore ? chunkMore.slice(1) : false,
|
||||
divMatch = '\n<div className="',
|
||||
divEnd = '">\n',
|
||||
divClosingTag = '</div>\n';
|
||||
|
||||
while (p !== -1) {
|
||||
// Let's check for a <div className="..."> tag
|
||||
let div = data.indexOf(divMatch, p);
|
||||
if (div === -1) {
|
||||
// No div tags found: we're done here. Parse the remaining
|
||||
// data for whatever else might be in there.
|
||||
performChunking(data.substring(p), chunks, next, otherChunkers);
|
||||
break;
|
||||
}
|
||||
|
||||
// First parse the non-div data for whatever else might be in there.
|
||||
performChunking(data.substring(p, div), chunks, next, otherChunkers);
|
||||
|
||||
// Now, if we have a div, there's a few options:
|
||||
//
|
||||
// - "figure" contains HTML content, not to be converted
|
||||
// - "note" contains markdown content, to be converted
|
||||
// - "howtocode" contains markdown content, to be converted
|
||||
let className = data.substring(div).match(/className="([^"]+)"/);
|
||||
if (className !== null) { className = className[1]; }
|
||||
|
||||
let eod, type="div";
|
||||
if (className === "figure" || className === "two-column") {
|
||||
eod = data.indexOf(divClosingTag, div) + divClosingTag.length;
|
||||
type += "." + className;
|
||||
} else {
|
||||
eod = data.indexOf(divEnd, div) + divEnd.length;
|
||||
}
|
||||
chunks.push({ convert: false, type: type, s:div, e:eod, data: data.substring(div, eod) });
|
||||
p = eod;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Split data up into "latex" and "not latex".
|
||||
* Anything that is not latex might still be "not markdown"
|
||||
* though, so we hand that data off to additional chunkers
|
||||
*/
|
||||
function chunkLatex(data, chunks, chunkMore) {
|
||||
var p = 0,
|
||||
next = chunkMore ? chunkMore[0] : false,
|
||||
otherChunkers = chunkMore ? chunkMore.slice(1) : false,
|
||||
latexEnd = '\\]';
|
||||
|
||||
while (p !== -1) {
|
||||
// Let's check a LaTeX block
|
||||
let latex = data.indexOf('\\[', p);
|
||||
if (latex === -1) {
|
||||
// No LaTeX block found: we're done here. Parse the remaining
|
||||
// data for whatever else might be in there.
|
||||
performChunking(data.substring(p), chunks, next, otherChunkers);
|
||||
break;
|
||||
}
|
||||
|
||||
// First parse the non-LaTeX data for whatever else might be in there.
|
||||
performChunking(data.substring(p, latex), chunks, next, otherChunkers);
|
||||
|
||||
// Then capture the LaTeX block and mark it as "don't convert"
|
||||
let eol = data.indexOf(latexEnd, latex) + latexEnd.length;
|
||||
chunks.push({ convert: false, type: "latex", s:latex, e:eol, data: data.substring(latex, eol) });
|
||||
p = eol;
|
||||
}
|
||||
}
|
||||
|
||||
// in-place chunking
|
||||
function performChunking(data, chunks, chunker, moreChunkers) {
|
||||
// If there's no further chunking function to run, just
|
||||
// record this data as a chunk of convertible data.
|
||||
if (!chunker) {
|
||||
if (data.trim()!=='') {
|
||||
chunks.push({ convert: true, data: data });
|
||||
}
|
||||
return "early";
|
||||
}
|
||||
|
||||
// otherwise, perform more chunking.
|
||||
chunker(data, chunks, moreChunkers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Split data up into "markdown" and "not markdown" parts.
|
||||
* We'll only run markdown conversion on the markdown parts.
|
||||
*/
|
||||
module.exports = function chunk(data) {
|
||||
var chunks = [];
|
||||
var chunkers = [
|
||||
chunkDivs,
|
||||
chunkDivEnds,
|
||||
chunkTable,
|
||||
chunkGraphicJSX,
|
||||
chunkBSplineGraphicsJSX,
|
||||
fixMarkDownLinks
|
||||
];
|
||||
performChunking(data, chunks, chunkLatex,chunkers);
|
||||
return chunks;
|
||||
};
|
11
tools/lib/jsx-shim.js
Normal file
11
tools/lib/jsx-shim.js
Normal file
@@ -0,0 +1,11 @@
|
||||
// shim nodejs so that it "knows" what to do with jsx files:
|
||||
// namely just return empty objects.
|
||||
var Module = require('module');
|
||||
var originalRequire = Module.prototype.require;
|
||||
Module.prototype.require = function() {
|
||||
try {
|
||||
return originalRequire.apply(this, arguments);
|
||||
} catch (e) {
|
||||
return {};
|
||||
}
|
||||
};
|
200
tools/make-locales.js
Normal file
200
tools/make-locales.js
Normal file
@@ -0,0 +1,200 @@
|
||||
/**********************************************************************
|
||||
*
|
||||
* This script is a locale aggregator and JSX generator, yielding
|
||||
* locale-specific node modules that contain the section content
|
||||
* keyed on section dir names.
|
||||
*
|
||||
* 1. find out which sections exist
|
||||
* 2. find out how many different locales exist
|
||||
* 3. for each locale:
|
||||
*
|
||||
* 1. for each section:
|
||||
*
|
||||
* 1. grab the associated locale
|
||||
* 2. chunk the data for "should be preserved" vs.
|
||||
* "should be processed as markdown".
|
||||
* 3. join the chunks back up after converting the
|
||||
* still acknowledged as markdown bits.
|
||||
* 4. aggregate with a function wrapper to allow for
|
||||
* JS bindings to a handler object.
|
||||
*
|
||||
* 2. dump the aggregated locale data as a content.js file
|
||||
* 3. generate a locale-specific index.html
|
||||
*
|
||||
*
|
||||
**********************************************************************/
|
||||
|
||||
var fs = require("fs-extra");
|
||||
var glob = require('glob');
|
||||
var path = require("path");
|
||||
var marked = require("marked");
|
||||
var chunk = require("./lib/chunk");
|
||||
var jsxshim = require("./lib/jsx-shim");
|
||||
|
||||
// make sure we know what our base location is
|
||||
const BASEDIR = path.join(__dirname,"..");
|
||||
|
||||
// bundle all content in a specific locale for use by the app
|
||||
const defaultLocale = "en-GB";
|
||||
var locale = defaultLocale;
|
||||
var lpos = process.argv.indexOf('--locale');
|
||||
if (lpos !== -1) { locale = process.argv[lpos+1]; }
|
||||
|
||||
/**
|
||||
* turn locale markdown into locale javascript data
|
||||
*/
|
||||
function processLocation(loc, fragmentid, number) {
|
||||
var processed = { data: '', title: `Unknown title (${fragmentid})` };
|
||||
try {
|
||||
data = fs.readFileSync(loc).toString();
|
||||
data = chunk(data).map(block => {
|
||||
// preserver is simple
|
||||
if (!block.convert) return block.data;
|
||||
|
||||
// markdown conversion is a little more work
|
||||
let d = marked(block.data.trim());
|
||||
|
||||
// serious can we fucking not, please.
|
||||
d = d.replace('<p></div></p>', '</div>')
|
||||
.replace(/&/g, '&')
|
||||
.replace(/'/g, "'")
|
||||
.replace(/"/g, '"')
|
||||
|
||||
// ``` conversion does odd things with <code> tags inside <pre> tags.
|
||||
d = d.replace(/<pre>(\r?\n)*<code>/g,'<pre>')
|
||||
.replace(/<\/code>(\r?\n)*<\/pre>/g,'</pre>');
|
||||
|
||||
// And then title extraction/rewriting
|
||||
d = d.replace(/<h1[^>]+>([^<]+)<\/h1>/,function(_,t) {
|
||||
processed.title = t;
|
||||
return `<SectionHeader name="${fragmentid}" title="` + t + `"${ number ? ' number="'+number+'"': ''}/>`;
|
||||
});
|
||||
|
||||
return d;
|
||||
}).join('');
|
||||
processed.data = data;
|
||||
} catch (e) {
|
||||
// console.warn(e);
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Form the content.js file content as a single string for file-writing.
|
||||
*/
|
||||
function formContentBundle(locale, content) {
|
||||
var bcode = JSON.stringify(content, false, 2);
|
||||
bcode = bcode.replace(/"<section>/g, "function(handler) { return <section>")
|
||||
.replace(/this\.(\w)/g, "handler.$1")
|
||||
.replace(/<\/section>"(,?)/g, "</section>; }$1\n")
|
||||
.replace(/\\"/g,'"')
|
||||
.replace(/\\n/g,'\n')
|
||||
.replace(/></g,'>\n<')
|
||||
.replace(/\\\\/g, '\\');
|
||||
|
||||
var bundle = [
|
||||
`var React = require('react');`,
|
||||
`var Graphic = require("../../components/Graphic.jsx");`,
|
||||
`var SectionHeader = require("../../components/SectionHeader.jsx");`,
|
||||
`var BSplineGraphic = require("../../components/BSplineGraphic.jsx");`,
|
||||
`var KnotController = require("../../components/KnotController.jsx");`,
|
||||
`var WeightController = require("../../components/WeightController.jsx");`,
|
||||
``,
|
||||
`SectionHeader.locale="${locale}";`,
|
||||
``,
|
||||
`module.exports = ${bcode};`,
|
||||
``
|
||||
].join('\n');
|
||||
|
||||
return bundle;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the locale switcher component.
|
||||
*/
|
||||
function processLocaleSwitcher(locale, content) {
|
||||
// We also need to localize the "LocaleSwitcher"
|
||||
var localeCode = locale;
|
||||
var loc = `./components/localized/LocaleSwitcher/content.${localeCode}.md`;
|
||||
if (!fs.existsSync(loc)) {
|
||||
localeCode = defaultLocale;
|
||||
loc = `./components/localized/LocaleSwitcher/content.${localeCode}.md`;
|
||||
}
|
||||
var key = "locale-switcher";
|
||||
var processed = processLocation(loc, key);
|
||||
content[key] = {
|
||||
locale: localeCode,
|
||||
title: key,
|
||||
getContent: "<section>" + processed.data + "</section>"
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a content.js bundle to the filesystem
|
||||
*/
|
||||
function writeContentBundle(locale, content) {
|
||||
var bundle = formContentBundle(locale, content);
|
||||
|
||||
// write the content.js file for bundling purposes
|
||||
var dir = `./locales/${locale}`;
|
||||
fs.ensureDirSync(dir);
|
||||
fs.writeFileSync(`${dir}/content.js`, bundle);
|
||||
|
||||
// Write the actual locale directory and generate a locale-specific index.html
|
||||
var html = fs.readFileSync('./index.template.html').toString();
|
||||
var preface = content.preface.getContent.replace(/<SectionHeader name="preface" title="([^"]+)"\/>/, "<h2>$1</h2>");
|
||||
html = html.replace("{{ PREFACE }}", preface);
|
||||
html = html.replace("{{ locale }}", locale);
|
||||
fs.ensureDirSync(locale);
|
||||
fs.writeFileSync(`./${locale}/index.html`, html);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single locale, with `en-GB` fallback for missing files.
|
||||
*/
|
||||
function processLocale(locale) {
|
||||
// Get the section map. This will try to load .jsx code, which will fail,
|
||||
// but the above shim makes a failure simply return an empty object instead.
|
||||
// This is good: we only care about the keys, not the content.
|
||||
var index = require(path.join(BASEDIR, "components/sections"));
|
||||
var sections = Object.keys(index);
|
||||
var content = { locale };
|
||||
|
||||
var processSection = (key, number) => {
|
||||
// Grab locale file, or defaultLocale file if the chosen locale has
|
||||
// has no translated content (yet)...
|
||||
var localeCode = locale;
|
||||
var loc = path.join(BASEDIR, `./components/sections/${key}/content.${localeCode}.md`);
|
||||
if (!fs.existsSync(loc)) {
|
||||
localeCode = defaultLocale;
|
||||
loc = path.join(BASEDIR, `./components/sections/${key}/content.${localeCode}.md`);
|
||||
}
|
||||
|
||||
// Read in the content.{lang}.md file
|
||||
var processed = processLocation(loc, key, number);
|
||||
|
||||
content[key] = {
|
||||
locale: localeCode,
|
||||
title: processed.title,
|
||||
getContent: "<section>" + processed.data + "</section>"
|
||||
};
|
||||
};
|
||||
|
||||
sections.forEach(processSection);
|
||||
processLocaleSwitcher(locale, content);
|
||||
writeContentBundle(locale, content);
|
||||
}
|
||||
|
||||
// find all locales used and generate their respective content dirs
|
||||
glob(path.join(BASEDIR,"components/sections/**/content*md"), (err, files) => {
|
||||
var locales = [];
|
||||
files.forEach(file => {
|
||||
let locale = file.match(/content\.([^.]+)\.md/)[1];
|
||||
if (locales.indexOf(locale) === -1) {
|
||||
locales.push(locale);
|
||||
}
|
||||
});
|
||||
locales.forEach(processLocale);
|
||||
});
|
Reference in New Issue
Block a user