diff --git a/dev/build.js b/dev/build.js index 25befb35..b80abf62 100644 --- a/dev/build.js +++ b/dev/build.js @@ -4,25 +4,34 @@ import { parse, join } from 'path' import * as esbuild from 'esbuild' import { rootDir, DEV, time } from './utils.js' +import { generateContentJSON } from './exo-parser.js' -const getHash = async head => { +const getHash = async (head) => { if (!head.startsWith('ref:')) return { hash: head.trim(), branch: 'detached' } const parts = head.split(' ')[1].trim().split('/') const branch = parts[parts.length - 1] const hash = await readFile(join(rootDir, '.git', ...parts), 'utf8') return { hash: hash.trim(), branch } } - + try { const head = await readFile(join(rootDir, '.git/HEAD'), 'utf8') const { hash, branch } = await getHash(head) process.env.HASH = `${branch}@${hash.trim()}` } catch (err) { - console.warn('Unable to load git commit version, fallback to time based hash', err) + console.warn( + 'Unable to load git commit version, fallback to time based hash', + err, + ) const now = Math.floor((Date.now() - 16e11) / 1000) process.env.HASH = `unk@${now.toString(36)}` } +export const exoJsDir = () => readdir(join(rootDir, 'js-introduction')) + +export const bundleJSONDir = (dirName) => + mkdir(join(rootDir, dirName), { recursive: true }) + const templateDir = join(rootDir, 'template') const readEntry = async ({ name, ext, base }) => [ name, @@ -51,6 +60,7 @@ const config = { const serve = () => esbuild.serve({ servedir }, config) const generate = async (file = 'index') => { + await generateContentJSON('js-introduction', 'public') const content = await readdir(templateDir) const entries = await Promise.all(content.map(parse).map(readEntry)) const templates = Object.fromEntries(entries) @@ -61,7 +71,6 @@ const generate = async (file = 'index') => { (cache[key] = templates[key]?.replace(//gm, replace) || ``) - return readTemplate(file) } diff --git a/dev/exo-parser.js b/dev/exo-parser.js new file mode 100644 index 00000000..5d126146 --- /dev/null +++ b/dev/exo-parser.js @@ -0,0 +1,81 @@ +import { readdir, writeFile, readFile, mkdir } from 'fs/promises' +import { join, parse as pathParse } from 'path' + +import { fromMarkdown } from 'mdast-util-from-markdown' + +import { rootDir } from './utils.js' + +// returns a flatten array of all the children +const children = (n) => + n.children ? [n, ...n.children.flatMap(children)] : [n] + +const getTrimValue = (n) => n.value?.trim() +const textContent = (n) => + children(n).map(getTrimValue).filter(Boolean).join(' ') || '' + +const isH1 = (node) => node.type === 'heading' && node.depth === 1 +const isH2 = (node) => node.type === 'heading' && node.depth === 2 +const isH3 = (node) => node.type === 'heading' && node.depth === 3 +const isP = (node) => node.type === 'paragraph' || node.type === 'text' +const isCODE = (node) => node.type === 'code' || node.type === 'inlineCode' +const isLI = (node) => node.type === 'list' + +const parseContent = (nodeList) => { + const content = { description: '' } + let mode, + test = isP + for (const node of nodeList) { + if (!content.title && isH1(node)) { + content.title = textContent(node) + } else if (isH2(node)) { + mode = textContent(node).toLowerCase() + content[mode] = [] + } else if (mode === 'notions') { + if (isLI(node)) { + content.notions = children(node).map(getTrimValue).filter(Boolean) + } + } else if (mode === 'instructions') { + if (isP(node) || isLI(node)) { + content.instructions = children(node) + .map(getTrimValue) + .filter(Boolean) + .join(' ') + } + } else if (mode === 'tests') { + if (isH3(node)) { + test = { name: textContent(node) } + content.tests.push(test) + } else if (test && isCODE(node)) { + test.code = textContent(node) + test.lang = node.lang + } else { + console.warn('ignored node', node) + } + } else if (mode) { + // any other mode is stored in raw tree + content[mode].push(node) + } else { + // before any mode is set, we are writing the description + content.description += `${textContent(node).trim()}\n` + } + } + return content +} + +const readdirParse = async (path) => + (await readdir(join(path))).map((file) => pathParse(join(path, file))) + +export const generateContentJSON = async (input, ouput) => { + const dirList = await readdirParse(join(rootDir, input)) + const contentProcessing = dirList.map(async ({ dir, base, name }) => { + const content = await readFile(join(dir, base)) + const root = fromMarkdown(content) + const parsed = parseContent(root.children) + const outDir = join(rootDir, ouput, dir.slice(rootDir.length)) + await mkdir(outDir, { recursive: true }) + await writeFile(join(outDir, `${name}.json`), JSON.stringify(parsed)) + return [name, parsed] + }) + + return Object.fromEntries(await Promise.all(contentProcessing)) +} diff --git a/dev/server.js b/dev/server.js index 0f1d5bb5..df9a4bbd 100644 --- a/dev/server.js +++ b/dev/server.js @@ -53,4 +53,6 @@ createServer(async (req, res) => { // Apply headers from the worker sendResponse({ body, options, res }) -}).listen(PORT, () => console.log(`Dev server ready on ${process.env.DOMAIN}`)) +}).listen(PORT, () => { + console.log(`Dev server ready on ${process.env.DOMAIN}`) +}) diff --git a/package.json b/package.json index 703b4077..15b3c6c8 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,7 @@ "dependencies": { "esbuild": "^0.11.2", "fast-toml": "^0.5.4", + "mdast-util-from-markdown": "^1.0.4", "preact": "^10.5.13" } }