Major update - simpler file structure, better layouts, moar content
This commit is contained in:
370
lib/build.js
Normal file
370
lib/build.js
Normal file
@@ -0,0 +1,370 @@
|
||||
const { exists } = require("fs-extra/lib/fs");
|
||||
|
||||
module.exports = async (config) => {
|
||||
const { promises: fs } = require("fs"),
|
||||
fse = require("fs-extra"),
|
||||
path = require("path"),
|
||||
ejs = require("ejs"),
|
||||
frontMatter = require("front-matter"),
|
||||
glob = require("glob"),
|
||||
hljs = require("highlight.js"),
|
||||
md = require("markdown-it")({
|
||||
highlight: (str, lang) => {
|
||||
if (lang && hljs.getLanguage(lang)) {
|
||||
try {
|
||||
return hljs.highlight(str, { language: lang }).value;
|
||||
} catch (__) {}
|
||||
}
|
||||
|
||||
return ""; // use external default escaping
|
||||
},
|
||||
html: true,
|
||||
linkify: true,
|
||||
typographer: true,
|
||||
xhtmlOut: true,
|
||||
}),
|
||||
emoji = require("markdown-it-emoji"),
|
||||
// { readJsonIfExists } = require("./utils"),
|
||||
{ build, isRebuild, logFunction: log = () => {} } = config || {},
|
||||
{ outputPath, journalsPerPage = 5, srcPath } = build,
|
||||
{ site } = config,
|
||||
copyAssets = async (directory) => {
|
||||
const assets = await fs.readdir(directory);
|
||||
|
||||
assets.forEach(async (asset) => {
|
||||
// we no longer merge scripts and styles, thanks to http/2's parallel file handling
|
||||
if (asset === "_root") {
|
||||
fse.copy(path.join(srcPath, "assets", asset), outputPath);
|
||||
} else {
|
||||
fse.copy(
|
||||
path.join(srcPath, "assets", asset),
|
||||
path.join(outputPath, asset)
|
||||
);
|
||||
}
|
||||
});
|
||||
},
|
||||
getReadTime = (text) => {
|
||||
const WPM = 275,
|
||||
fixedString = text.replace(/[^\w\s]+/g, ""),
|
||||
count = fixedString.split(/\s+/).length;
|
||||
|
||||
if (count < WPM) return "less than 1 minute";
|
||||
else return `${Math.ceil(count / WPM)} minutes`;
|
||||
},
|
||||
tagSorter = (a, b) => a.toLowerCase().localeCompare(b.toLowerCase()),
|
||||
parseFile = (file, pagePath, siteData, isSupport) => {
|
||||
const { dir, ext, name } = path.parse(file) || {},
|
||||
hasExt = name.indexOf(".") > -1,
|
||||
destPath = path.join(outputPath, dir),
|
||||
filePath = path.join(pagePath, file),
|
||||
// read page file
|
||||
data = fse.readFileSync(filePath, "utf-8"),
|
||||
// render page
|
||||
{ attributes, body } = frontMatter(data),
|
||||
{ content_type: contentType, tags: originalTags = [] } =
|
||||
attributes,
|
||||
// TODO: Look for tags in posts as well, link to them, and add them to tag pages
|
||||
tags =
|
||||
typeof originalTags === "string"
|
||||
? originalTags.split(/\W+/)
|
||||
: [].concat(originalTags),
|
||||
innerTags = (
|
||||
contentType === "journal"
|
||||
? body.match(/\b#(\w+)/g) || []
|
||||
: []
|
||||
).map((val) => val.replace("#", "")),
|
||||
allTags = [...tags, ...innerTags].sort(tagSorter),
|
||||
updatedBody =
|
||||
contentType === "journal"
|
||||
? allTags.reduce(
|
||||
(acc, tag) =>
|
||||
acc.replace(
|
||||
`#${tag}`,
|
||||
`
|
||||
<a href="/journal/tags/${tag}/index.html">
|
||||
#<span class="p-category category">${tag}</span>
|
||||
</a>`
|
||||
),
|
||||
body
|
||||
)
|
||||
: body;
|
||||
|
||||
return {
|
||||
...config,
|
||||
page: {
|
||||
name,
|
||||
...attributes,
|
||||
body: updatedBody,
|
||||
destPath,
|
||||
filePath,
|
||||
path: path.join(dir, hasExt ? name : `${name}.html`),
|
||||
tags: [...tags, ...innerTags].sort(tagSorter),
|
||||
ext,
|
||||
},
|
||||
site: {
|
||||
...site,
|
||||
pages: isSupport ? siteData : [],
|
||||
},
|
||||
};
|
||||
},
|
||||
parseContent = (page, siteData) => {
|
||||
const {
|
||||
body,
|
||||
content_type: contentType,
|
||||
filePath,
|
||||
// tags,
|
||||
} = page || {},
|
||||
{ ext } = path.parse(filePath) || {},
|
||||
{ pages, tags } = siteData || {};
|
||||
|
||||
let content = body,
|
||||
readTime;
|
||||
|
||||
if (ext === ".md") {
|
||||
if (contentType === "journal" && typeof body === "string") {
|
||||
readTime = getReadTime(body);
|
||||
}
|
||||
content = md.render(body);
|
||||
} else if (ext === ".ejs") {
|
||||
content = ejs.render(
|
||||
body,
|
||||
{ page, site: { ...site, pages, tags } },
|
||||
{ filename: filePath }
|
||||
);
|
||||
}
|
||||
|
||||
return { ...page, content, readTime };
|
||||
},
|
||||
renderFile = async (page, isSupport) => {
|
||||
const {
|
||||
content,
|
||||
destPath,
|
||||
layout,
|
||||
path: pagePath,
|
||||
pages,
|
||||
siteTags,
|
||||
tags,
|
||||
} = page || {};
|
||||
try {
|
||||
const layoutFileName = `${srcPath}/layouts/${
|
||||
layout || "default"
|
||||
}.ejs`,
|
||||
layoutData = await fs.readFile(layoutFileName, "utf-8"),
|
||||
completePage = isSupport
|
||||
? content
|
||||
: ejs.render(layoutData, {
|
||||
content,
|
||||
page,
|
||||
site: {
|
||||
...site,
|
||||
pages,
|
||||
tags:
|
||||
page.content_type === "journal"
|
||||
? siteTags
|
||||
: tags,
|
||||
},
|
||||
filename: layoutFileName,
|
||||
});
|
||||
|
||||
if (!completePage) {
|
||||
console.log("failed!", pagePath, content);
|
||||
return;
|
||||
}
|
||||
|
||||
// create destination directory
|
||||
fse.mkdirsSync(destPath);
|
||||
|
||||
// save the html file
|
||||
fse.writeFileSync(
|
||||
path.join(outputPath, pagePath),
|
||||
completePage
|
||||
);
|
||||
} catch (e) {
|
||||
console.log("failed!", pagePath);
|
||||
console.log("paths", destPath, outputPath);
|
||||
console.error(e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
md.use(emoji);
|
||||
|
||||
log(`${isRebuild ? "Reb" : "B"}uilding...`);
|
||||
|
||||
// clear destination folder
|
||||
fse.emptyDirSync(outputPath);
|
||||
|
||||
// copy assets folder
|
||||
await copyAssets(path.join(srcPath, "assets"));
|
||||
|
||||
const files = ["pages", "sitePosts"].reduce((acc, pageDir) => {
|
||||
return [
|
||||
...acc,
|
||||
...glob
|
||||
.sync("**/*.@(md|ejs|html)", {
|
||||
cwd: path.join(srcPath, pageDir),
|
||||
})
|
||||
.map((file) =>
|
||||
parseFile(file, path.join(srcPath, pageDir))
|
||||
),
|
||||
];
|
||||
}, []),
|
||||
sortByPubDate = (a, b) => {
|
||||
if (a.date_pub && b.date_pub) {
|
||||
let a_dt = new Date(a.date_pub).getTime(),
|
||||
b_dt = new Date(b.date_pub).getTime();
|
||||
if (a_dt < b_dt) {
|
||||
return 1;
|
||||
}
|
||||
if (b_dt < a_dt) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
if (a.date_pub) return -1;
|
||||
if (b.date_pub) return 1;
|
||||
return 0;
|
||||
},
|
||||
pages = files.map(({ page }) => ({ ...page })).sort(sortByPubDate),
|
||||
tagCloud = pages.reduce((acc, curr) => {
|
||||
const { tags } = curr;
|
||||
tags.forEach((tag) => {
|
||||
if (acc[tag]) acc[tag]++;
|
||||
else acc[tag] = 1;
|
||||
});
|
||||
return acc;
|
||||
}, {}),
|
||||
tags = Object.keys(tagCloud).sort(tagSorter),
|
||||
yearCloud = pages
|
||||
.filter(({ content_type = "" }) => content_type === "journal")
|
||||
.reduce((acc, curr) => {
|
||||
const { date_pub } = curr;
|
||||
if (date_pub) {
|
||||
const year = new Date(date_pub).getFullYear();
|
||||
if (acc[year]) acc[year]++;
|
||||
else acc[year] = 1;
|
||||
}
|
||||
return acc;
|
||||
}, {}),
|
||||
years = Object.keys(yearCloud).sort().reverse(),
|
||||
pagesWithContent = pages.map((page) =>
|
||||
parseContent(page, { pages, tags })
|
||||
);
|
||||
|
||||
// add data for the whole site to each page as it's rendered
|
||||
pagesWithContent.forEach((page) => {
|
||||
renderFile({ ...page, pages: pagesWithContent, siteTags: tags });
|
||||
});
|
||||
|
||||
/* Journal Stuff - Tags & Years */
|
||||
|
||||
// make page(s) for each tag
|
||||
tags.forEach((tag) => {
|
||||
// check counts
|
||||
let postCount = tagCloud[tag],
|
||||
pageCount = Math.ceil(postCount / journalsPerPage);
|
||||
for (let i = 1; i <= pageCount; i++) {
|
||||
const firstEntryIndex = journalsPerPage * (i - 1),
|
||||
lastEntryIndex = journalsPerPage * i;
|
||||
|
||||
renderFile({
|
||||
content: tag,
|
||||
destPath: path.join(outputPath, "journal", "tags", tag),
|
||||
entriesToList: pagesWithContent
|
||||
.filter(
|
||||
(p) =>
|
||||
p && Array.isArray(p.tags) && p.tags.includes(tag)
|
||||
)
|
||||
.slice(firstEntryIndex, lastEntryIndex),
|
||||
layout: "tag",
|
||||
path: `journal/tags/${tag}/${
|
||||
i === 1 ? "index.html" : `page${i}.html`
|
||||
}`,
|
||||
site: { ...site, pages: pagesWithContent, tags },
|
||||
pageCount,
|
||||
pageNum: i,
|
||||
pages: pagesWithContent,
|
||||
tag,
|
||||
tags,
|
||||
title: `Journal Entries Tagged with #${tag}`,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// make page(s) for each year
|
||||
years.forEach((year) => {
|
||||
// check counts
|
||||
let postCount = yearCloud[year],
|
||||
pageCount = Math.ceil(postCount / journalsPerPage);
|
||||
for (let i = 1; i <= pageCount; i++) {
|
||||
const firstEntryIndex = journalsPerPage * (i - 1),
|
||||
lastEntryIndex = journalsPerPage * i;
|
||||
|
||||
// TODO: rethink the data passed in here - you're paging solution works (kinda), take it over the finish line!
|
||||
renderFile({
|
||||
content: year,
|
||||
destPath: path.join(outputPath, "journal", year),
|
||||
entriesToList: pagesWithContent
|
||||
.filter(({ content_type = "", date_pub = "" }) => {
|
||||
if (!date_pub || content_type !== "journal")
|
||||
return false;
|
||||
|
||||
const p_dt = new Date(date_pub).getTime(),
|
||||
y1_dt = new Date(
|
||||
`${year}-01-01T00:00:00-0500`
|
||||
).getTime(),
|
||||
y2_dt = new Date(
|
||||
`${year}-12-31T23:59:59-0500`
|
||||
).getTime();
|
||||
return p_dt >= y1_dt && p_dt <= y2_dt;
|
||||
})
|
||||
.slice(firstEntryIndex, lastEntryIndex),
|
||||
layout: "journal-year",
|
||||
path: `journal/${year}/${
|
||||
i === 1 ? "index.html" : `page${i}.html`
|
||||
}`,
|
||||
site: { ...site, pages: pagesWithContent, tags },
|
||||
pageCount,
|
||||
pageNum: i,
|
||||
pages: pagesWithContent,
|
||||
tags,
|
||||
title: `Journal Entries from ${year}`,
|
||||
year,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/* Support pages - anything too weird / specific for markdown rendering */
|
||||
|
||||
// collect support pages
|
||||
const support = ["support"].reduce((acc, pageDir) => {
|
||||
return [
|
||||
...acc,
|
||||
...glob
|
||||
.sync("**/*.@(md|ejs|html)", {
|
||||
cwd: path.join(srcPath, pageDir),
|
||||
})
|
||||
.map((file) =>
|
||||
parseFile(
|
||||
file,
|
||||
path.join(srcPath, pageDir),
|
||||
pagesWithContent,
|
||||
true
|
||||
)
|
||||
),
|
||||
];
|
||||
}, []);
|
||||
|
||||
// write each one out
|
||||
support.forEach((fileData) => {
|
||||
const { page } = fileData;
|
||||
if (page?.ext === ".ejs") {
|
||||
const pageAndContent = parseContent(page, {
|
||||
pages: pagesWithContent,
|
||||
tags,
|
||||
});
|
||||
return renderFile({ ...fileData, ...pageAndContent, tags }, true);
|
||||
}
|
||||
return renderFile(fileData, true);
|
||||
});
|
||||
};
|
19
lib/defaults.json5
Normal file
19
lib/defaults.json5
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
/*
|
||||
|
||||
*/
|
||||
|
||||
/** TACO Express Default Options **/
|
||||
|
||||
/*
|
||||
The function used to log output (console.log, morgan, etc).
|
||||
Should take one (or more) strings as arguments.
|
||||
*/
|
||||
logFunction: null,
|
||||
|
||||
build: {},
|
||||
site: {},
|
||||
serve: {
|
||||
port: 5000,
|
||||
},
|
||||
}
|
43
lib/loadConfig.js
Normal file
43
lib/loadConfig.js
Normal file
@@ -0,0 +1,43 @@
|
||||
module.exports = (opts, envKey) => {
|
||||
const
|
||||
fs = require('fs'),
|
||||
path = require('path'),
|
||||
|
||||
json5 = require('json5'),
|
||||
|
||||
{ convertCamelToUpperSnakeCase, readJsonIfExists } = require('./utils'),
|
||||
|
||||
{ cwd, env } = process,
|
||||
|
||||
def = readJsonIfExists(path.resolve(__dirname, 'defaults.json5')),
|
||||
|
||||
// gets value from ENV || options || defaults (in that order)
|
||||
getVal = (envName) => {
|
||||
const snakeEnvName = `${envKey}_${convertCamelToUpperSnakeCase(envName)}`;
|
||||
if (env[snakeEnvName]) return env[snakeEnvName];
|
||||
if (opts[envName]) return opts[envName];
|
||||
return def[envName];
|
||||
},
|
||||
|
||||
// gets array from ENV || options || defaults (in that order)
|
||||
getArray = (envName, optName = '') => {
|
||||
if (optName === '') {
|
||||
optName = envName;
|
||||
envName = convertCamelToUpperSnakeCase(envName);
|
||||
}
|
||||
envName = `${envKey}_${envName}`;
|
||||
if (env[envName]) return env[envName].split(path.delimiter);
|
||||
if (Array.isArray(opts[optName]) && opts[optName].length) return opts[optName];
|
||||
return def[optName];
|
||||
};
|
||||
|
||||
|
||||
return {
|
||||
|
||||
...Object.keys(def).reduce((acc, curr) => {
|
||||
if (Array.isArray(def[curr])) acc[curr] = getArray(curr);
|
||||
else acc[curr] = getVal(curr);
|
||||
return acc;
|
||||
}, {}),
|
||||
};
|
||||
};
|
26
lib/serve.js
Normal file
26
lib/serve.js
Normal file
@@ -0,0 +1,26 @@
|
||||
module.exports = async (config) => {
|
||||
let isReady = false;
|
||||
const
|
||||
http = require('http'),
|
||||
|
||||
address = require('network-address'),
|
||||
handler = require('serve-handler'),
|
||||
|
||||
build = require('./build'),
|
||||
|
||||
{ build: buildOpts, logFunction: log = () => {}, serve: serveOpts } = config || {},
|
||||
{ outputPath, srcPath } = buildOpts || {},
|
||||
{ port = 5000 } = serveOpts || {},
|
||||
|
||||
server = http.createServer((request, response) => {
|
||||
// You pass two more arguments for config and middleware
|
||||
// More details here: https://github.com/vercel/serve-handler#options
|
||||
return handler(request, response, { public: outputPath });
|
||||
});
|
||||
|
||||
await build(config);
|
||||
|
||||
server.listen(port, async () => {
|
||||
log(`Running at http://${address()}:${port} / http://localhost:${port}`);
|
||||
});
|
||||
};
|
57
lib/utils.js
Normal file
57
lib/utils.js
Normal file
@@ -0,0 +1,57 @@
|
||||
module.exports = (() => {
|
||||
const
|
||||
chalk = require('chalk'),
|
||||
|
||||
getTime = () => {
|
||||
const
|
||||
now = new Date(),
|
||||
tzo = -now.getTimezoneOffset(),
|
||||
dif = tzo >= 0 ? '+' : '-',
|
||||
|
||||
pad = (num) => {
|
||||
const norm = Math.floor(Math.abs(num));
|
||||
return `${norm < 10 ? '0' : ''}${norm}`;
|
||||
};
|
||||
return [
|
||||
now.getFullYear(),
|
||||
'-',
|
||||
pad(now.getMonth() + 1),
|
||||
'-',
|
||||
pad(now.getDate()),
|
||||
'T',
|
||||
pad(now.getHours()),
|
||||
':',
|
||||
pad(now.getMinutes()),
|
||||
':',
|
||||
pad(now.getSeconds()),
|
||||
dif,
|
||||
pad(tzo / 60),
|
||||
':',
|
||||
pad(tzo % 60)
|
||||
].join('');
|
||||
};
|
||||
|
||||
return {
|
||||
convertCamelToUpperSnakeCase:
|
||||
str => str.replace(/[A-Z]/g, letter => `_${letter}`).toUpperCase(),
|
||||
|
||||
getTime,
|
||||
|
||||
log: (msg) => console.log(`${chalk.grey(`${getTime()}:`)} ${msg}`),
|
||||
|
||||
readJsonIfExists: (filePath) => {
|
||||
const
|
||||
fs = require('fs'),
|
||||
|
||||
json5 = require('json5');
|
||||
|
||||
try {
|
||||
return json5.parse(fs.readFileSync(filePath, {encoding: 'utf8'}));
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') return {};
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
};
|
||||
})();
|
58
lib/watch.js
Normal file
58
lib/watch.js
Normal file
@@ -0,0 +1,58 @@
|
||||
module.exports = async (config) => {
|
||||
let isReady = false;
|
||||
const
|
||||
http = require('http'),
|
||||
|
||||
chokidar = require('chokidar'),
|
||||
address = require('network-address'),
|
||||
handler = require('serve-handler'),
|
||||
|
||||
build = require('./build'),
|
||||
rebuild = (cfg) => {
|
||||
isReady = false;
|
||||
build({ ...cfg, isRebuild: true });
|
||||
isReady = true;
|
||||
},
|
||||
|
||||
{ build: buildOpts, logFunction: log = () => {}, serve: serveOpts } = config || {},
|
||||
{ outputPath, srcPath } = buildOpts || {},
|
||||
{ port = 5000 } = serveOpts || {},
|
||||
|
||||
watcher = chokidar.watch([srcPath, '*.json'], {
|
||||
ignored: /(^|[\/\\])\../, // ignore dotfiles
|
||||
persistent: true
|
||||
})
|
||||
.on('add', (path) => {
|
||||
if (isReady) {
|
||||
log(`File ${path} has been added`)
|
||||
rebuild(config);
|
||||
}
|
||||
})
|
||||
.on('change', (path) => {
|
||||
if (isReady) {
|
||||
log(`File ${path} has been changed`)
|
||||
rebuild(config);
|
||||
}
|
||||
})
|
||||
.on('ready', () => {
|
||||
isReady = true;
|
||||
})
|
||||
.on('unlink', (path) => {
|
||||
if (isReady) {
|
||||
log(`File ${path} has been removed`)
|
||||
rebuild(config);
|
||||
}
|
||||
}),
|
||||
|
||||
server = http.createServer((request, response) => {
|
||||
// You pass two more arguments for config and middleware
|
||||
// More details here: https://github.com/vercel/serve-handler#options
|
||||
return handler(request, response, { public: outputPath });
|
||||
});
|
||||
|
||||
await build(config);
|
||||
|
||||
server.listen(port, () => {
|
||||
log(`Running at http://${address()}:${port} / http://localhost:${port}`);
|
||||
});
|
||||
};
|
Reference in New Issue
Block a user