migrate to web-weevr
This commit is contained in:
parent
afcf37a284
commit
292f6ee3d0
377
lib/build.js
377
lib/build.js
@ -1,377 +0,0 @@
|
||||
const { exists } = require("fs-extra/lib/fs");
|
||||
|
||||
module.exports = async (config) => {
|
||||
const { promises: fs } = require("fs"),
|
||||
fse = require("fs-extra"),
|
||||
path = require("path"),
|
||||
ejs = require("ejs"),
|
||||
frontMatter = require("front-matter"),
|
||||
glob = require("glob"),
|
||||
hljs = require("highlight.js"),
|
||||
md = require("markdown-it")({
|
||||
highlight: (str, lang) => {
|
||||
if (lang && hljs.getLanguage(lang)) {
|
||||
try {
|
||||
return hljs.highlight(str, { language: lang }).value;
|
||||
} catch (__) {}
|
||||
}
|
||||
|
||||
return ""; // use external default escaping
|
||||
},
|
||||
html: true,
|
||||
linkify: true,
|
||||
typographer: true,
|
||||
xhtmlOut: true,
|
||||
})
|
||||
.use(require("markdown-it-footnote"))
|
||||
.use(require("markdown-it-emoji")),
|
||||
// { readJsonIfExists } = require("./utils"),
|
||||
{ build, isRebuild, logFunction: log = () => {} } = config || {},
|
||||
{ outputPath, journalsPerPage = 5, srcPath } = build,
|
||||
{ site } = config,
|
||||
copyAssets = async (directory) => {
|
||||
const assets = await fs.readdir(directory);
|
||||
|
||||
assets.forEach(async (asset) => {
|
||||
// we no longer merge scripts and styles, thanks to http/2's parallel file handling
|
||||
if (asset === "_root") {
|
||||
fse.copy(path.join(srcPath, "assets", asset), outputPath);
|
||||
} else {
|
||||
fse.copy(
|
||||
path.join(srcPath, "assets", asset),
|
||||
path.join(outputPath, asset)
|
||||
);
|
||||
}
|
||||
});
|
||||
},
|
||||
getReadTime = (text) => {
|
||||
const WPM = 275,
|
||||
fixedString = text.replace(/[^\w\s]+/g, ""),
|
||||
count = fixedString.split(/\s+/).length;
|
||||
|
||||
if (count < WPM) return "less than 1 minute";
|
||||
else return `${Math.ceil(count / WPM)} minutes`;
|
||||
},
|
||||
tagSorter = (a, b) => a.toLowerCase().localeCompare(b.toLowerCase()),
|
||||
parseFile = (file, pagePath, siteData, isSupport) => {
|
||||
const { dir, ext, name } = path.parse(file) || {},
|
||||
hasExt = name.indexOf(".") > -1,
|
||||
destPath = path.join(outputPath, dir),
|
||||
filePath = path.join(pagePath, file),
|
||||
// read page file
|
||||
data = fse.readFileSync(filePath, "utf-8"),
|
||||
info = fse.statSync(filePath, "utf-8"),
|
||||
// render page
|
||||
{ attributes, body } = frontMatter(data),
|
||||
{ content_type: contentType, tags: originalTags = [] } =
|
||||
attributes,
|
||||
// TODO: Look for tags in posts as well, link to them, and add them to tag pages
|
||||
tags =
|
||||
typeof originalTags === "string"
|
||||
? originalTags.split(/\W+/)
|
||||
: [].concat(originalTags),
|
||||
innerTags = (
|
||||
contentType === "journal"
|
||||
? body.match(/\b#(\w+)/g) || []
|
||||
: []
|
||||
).map((val) => val.replace("#", "")),
|
||||
allTags = [...tags, ...innerTags].sort(tagSorter),
|
||||
updatedBody =
|
||||
contentType === "journal"
|
||||
? allTags.reduce(
|
||||
(acc, tag) =>
|
||||
acc.replace(
|
||||
`#${tag}`,
|
||||
`
|
||||
<a href="/journal/tags/${tag}/index.html">
|
||||
#<span class="p-category category">${tag}</span>
|
||||
</a>`
|
||||
),
|
||||
body
|
||||
)
|
||||
: body;
|
||||
|
||||
return {
|
||||
...config,
|
||||
page: {
|
||||
name,
|
||||
...attributes,
|
||||
date_upd:
|
||||
attributes?.date_pub !== info.mtime
|
||||
? info.mtime
|
||||
: attributes.date_upd ?? "",
|
||||
body: updatedBody,
|
||||
destPath,
|
||||
filePath,
|
||||
path: path.join(dir, hasExt ? name : `${name}.html`),
|
||||
tags: [...tags, ...innerTags].sort(tagSorter),
|
||||
ext,
|
||||
},
|
||||
site: {
|
||||
...site,
|
||||
pages: isSupport ? siteData : [],
|
||||
},
|
||||
};
|
||||
},
|
||||
parseContent = (page, siteData) => {
|
||||
const {
|
||||
body,
|
||||
content_type: contentType,
|
||||
filePath,
|
||||
// tags,
|
||||
} = page || {},
|
||||
{ ext } = path.parse(filePath) || {},
|
||||
{ pages, tags } = siteData || {};
|
||||
|
||||
let content = body,
|
||||
readTime;
|
||||
|
||||
if (ext === ".md") {
|
||||
if (contentType === "journal" && typeof body === "string") {
|
||||
readTime = getReadTime(body);
|
||||
}
|
||||
content = md.render(body);
|
||||
} else if (ext === ".ejs") {
|
||||
content = ejs.render(
|
||||
body,
|
||||
{ page, site: { ...site, pages, tags } },
|
||||
{ filename: filePath }
|
||||
);
|
||||
}
|
||||
|
||||
return { ...page, content, readTime };
|
||||
},
|
||||
renderFile = async (page, isSupport) => {
|
||||
const {
|
||||
content,
|
||||
destPath,
|
||||
layout,
|
||||
path: pagePath,
|
||||
pages,
|
||||
siteTags,
|
||||
tags,
|
||||
} = page || {};
|
||||
try {
|
||||
const layoutFileName = `${srcPath}/layouts/${
|
||||
layout || "default"
|
||||
}.ejs`,
|
||||
layoutData = await fs.readFile(layoutFileName, "utf-8"),
|
||||
completePage = isSupport
|
||||
? content
|
||||
: ejs.render(layoutData, {
|
||||
content,
|
||||
page,
|
||||
site: {
|
||||
...site,
|
||||
pages,
|
||||
tags:
|
||||
page.content_type === "journal"
|
||||
? siteTags
|
||||
: tags,
|
||||
},
|
||||
filename: layoutFileName,
|
||||
});
|
||||
|
||||
if (!completePage) {
|
||||
console.log("failed!", pagePath, content);
|
||||
return;
|
||||
}
|
||||
|
||||
// create destination directory
|
||||
fse.mkdirsSync(destPath);
|
||||
|
||||
// save the html file
|
||||
fse.writeFileSync(
|
||||
path.join(outputPath, pagePath),
|
||||
completePage
|
||||
);
|
||||
} catch (e) {
|
||||
console.log("failed!", pagePath);
|
||||
console.log("paths", destPath, outputPath);
|
||||
console.error(e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
log(`${isRebuild ? "Reb" : "B"}uilding...`);
|
||||
|
||||
// clear destination folder
|
||||
fse.emptyDirSync(outputPath);
|
||||
|
||||
// copy assets folder
|
||||
await copyAssets(path.join(srcPath, "assets"));
|
||||
|
||||
const files = ["pages", "sitePosts"].reduce((acc, pageDir) => {
|
||||
return [
|
||||
...acc,
|
||||
...glob
|
||||
.sync("**/*.@(md|ejs|html)", {
|
||||
cwd: path.join(srcPath, pageDir),
|
||||
})
|
||||
.map((file) =>
|
||||
parseFile(file, path.join(srcPath, pageDir))
|
||||
),
|
||||
];
|
||||
}, []),
|
||||
sortByPubDate = (a, b) => {
|
||||
if (a.date_pub && b.date_pub) {
|
||||
let a_dt = new Date(a.date_pub).getTime(),
|
||||
b_dt = new Date(b.date_pub).getTime();
|
||||
if (a_dt < b_dt) {
|
||||
return 1;
|
||||
}
|
||||
if (b_dt < a_dt) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
if (a.date_pub) return -1;
|
||||
if (b.date_pub) return 1;
|
||||
return 0;
|
||||
},
|
||||
pages = files
|
||||
.map(({ page }) => ({ ...page }))
|
||||
.filter(({ is_draft, status }) => !is_draft && status !== "draft")
|
||||
.sort(sortByPubDate),
|
||||
tagCloud = pages.reduce((acc, curr) => {
|
||||
const { tags } = curr;
|
||||
tags.forEach((tag) => {
|
||||
if (acc[tag]) acc[tag]++;
|
||||
else acc[tag] = 1;
|
||||
});
|
||||
return acc;
|
||||
}, {}),
|
||||
tags = Object.keys(tagCloud).sort(tagSorter),
|
||||
yearCloud = pages
|
||||
.filter(({ content_type = "" }) => content_type === "journal")
|
||||
.reduce((acc, curr) => {
|
||||
const { date_pub } = curr;
|
||||
if (date_pub) {
|
||||
const year = new Date(date_pub).getFullYear();
|
||||
if (acc[year]) acc[year]++;
|
||||
else acc[year] = 1;
|
||||
}
|
||||
return acc;
|
||||
}, {}),
|
||||
years = Object.keys(yearCloud).sort().reverse(),
|
||||
pagesWithContent = pages.map((page) =>
|
||||
parseContent(page, { pages, tags })
|
||||
);
|
||||
|
||||
// add data for the whole site to each page as it's rendered
|
||||
pagesWithContent.forEach((page) => {
|
||||
renderFile({ ...page, pages: pagesWithContent, siteTags: tags });
|
||||
});
|
||||
|
||||
/* Journal Stuff - Tags & Years */
|
||||
|
||||
// make page(s) for each tag
|
||||
tags.forEach((tag) => {
|
||||
// check counts
|
||||
let postCount = tagCloud[tag],
|
||||
pageCount = Math.ceil(postCount / journalsPerPage);
|
||||
for (let i = 1; i <= pageCount; i++) {
|
||||
const firstEntryIndex = journalsPerPage * (i - 1),
|
||||
lastEntryIndex = journalsPerPage * i;
|
||||
|
||||
renderFile({
|
||||
content: tag,
|
||||
destPath: path.join(outputPath, "journal", "tags", tag),
|
||||
entriesToList: pagesWithContent
|
||||
.filter(
|
||||
(p) =>
|
||||
p && Array.isArray(p.tags) && p.tags.includes(tag)
|
||||
)
|
||||
.slice(firstEntryIndex, lastEntryIndex),
|
||||
layout: "tag",
|
||||
path: `journal/tags/${tag}/${
|
||||
i === 1 ? "index.html" : `page${i}.html`
|
||||
}`,
|
||||
site: { ...site, pages: pagesWithContent, tags },
|
||||
pageCount,
|
||||
pageNum: i,
|
||||
pages: pagesWithContent,
|
||||
tag,
|
||||
tags,
|
||||
title: `Journal Entries Tagged with #${tag}`,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// make page(s) for each year
|
||||
years.forEach((year) => {
|
||||
// check counts
|
||||
let postCount = yearCloud[year],
|
||||
pageCount = Math.ceil(postCount / journalsPerPage);
|
||||
for (let i = 1; i <= pageCount; i++) {
|
||||
const firstEntryIndex = journalsPerPage * (i - 1),
|
||||
lastEntryIndex = journalsPerPage * i;
|
||||
|
||||
// TODO: rethink the data passed in here - you're paging solution works (kinda), take it over the finish line!
|
||||
renderFile({
|
||||
content: year,
|
||||
destPath: path.join(outputPath, "journal", year),
|
||||
entriesToList: pagesWithContent
|
||||
.filter(({ content_type = "", date_pub = "" }) => {
|
||||
if (!date_pub || content_type !== "journal")
|
||||
return false;
|
||||
|
||||
const p_dt = new Date(date_pub).getTime(),
|
||||
y1_dt = new Date(
|
||||
`${year}-01-01T00:00:00-0500`
|
||||
).getTime(),
|
||||
y2_dt = new Date(
|
||||
`${year}-12-31T23:59:59-0500`
|
||||
).getTime();
|
||||
return p_dt >= y1_dt && p_dt <= y2_dt;
|
||||
})
|
||||
.slice(firstEntryIndex, lastEntryIndex),
|
||||
layout: "journal-year",
|
||||
path: `journal/${year}/${
|
||||
i === 1 ? "index.html" : `page${i}.html`
|
||||
}`,
|
||||
site: { ...site, pages: pagesWithContent, tags },
|
||||
pageCount,
|
||||
pageNum: i,
|
||||
pages: pagesWithContent,
|
||||
tags,
|
||||
title: `Journal Entries from ${year}`,
|
||||
year,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/* Support pages - anything too weird / specific for markdown rendering */
|
||||
|
||||
// collect support pages
|
||||
const support = ["support"].reduce((acc, pageDir) => {
|
||||
return [
|
||||
...acc,
|
||||
...glob
|
||||
.sync("**/*.@(md|ejs|html)", {
|
||||
cwd: path.join(srcPath, pageDir),
|
||||
})
|
||||
.map((file) =>
|
||||
parseFile(
|
||||
file,
|
||||
path.join(srcPath, pageDir),
|
||||
pagesWithContent,
|
||||
true
|
||||
)
|
||||
),
|
||||
];
|
||||
}, []);
|
||||
|
||||
// write each one out
|
||||
support.forEach((fileData) => {
|
||||
const { page } = fileData;
|
||||
if (page?.ext === ".ejs") {
|
||||
const pageAndContent = parseContent(page, {
|
||||
pages: pagesWithContent,
|
||||
tags,
|
||||
});
|
||||
return renderFile({ ...fileData, ...pageAndContent, tags }, true);
|
||||
}
|
||||
return renderFile(fileData, true);
|
||||
});
|
||||
};
|
1411
package-lock.json
generated
1411
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
15
package.json
15
package.json
@ -1,18 +1,13 @@
|
||||
{
|
||||
"name": "iew-site-builder",
|
||||
"version": "0.9.8",
|
||||
"version": "0.10.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"build": "node app.js build",
|
||||
"build:prod": "NODE_ENV=production node app.js build",
|
||||
"lint:ejs": "npx ejs-lint src/layout",
|
||||
"serve": "node app.js serve",
|
||||
"watch": "node app.js watch"
|
||||
},
|
||||
"scripts": {},
|
||||
"keywords": [],
|
||||
"author": "Eric Woodward (https://www.itsericwoodward.com)",
|
||||
"license": "MIT",
|
||||
"dependencies": {},
|
||||
"devDependencies": {}
|
||||
"devDependencies": {
|
||||
"web-weevr": "git+ssh://git@git.itsericwoodward.com:eric/web-weevr.git"
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user