mirror of
https://github.com/benjaminion/upgrading-ethereum-book.git
synced 2026-01-09 22:47:55 -05:00
The Great Astro Migration
This commit is contained in:
54
integrations/my_add_tooltips.js
Normal file
54
integrations/my_add_tooltips.js
Normal file
@@ -0,0 +1,54 @@
|
||||
import { visit } from 'unist-util-visit';
|
||||
|
||||
// Add a tooltip to constant values in the text according to the mapping in the
|
||||
// supplied file.
|
||||
|
||||
let constantsMap = {};
|
||||
|
||||
function addTooltips() {
|
||||
|
||||
return function(tree) {
|
||||
try {
|
||||
visit(tree, 'inlineCode', (node, index, parent) => {
|
||||
// HTML in headings causes problems for the page index, so skip these
|
||||
if (parent.type !== 'heading') {
|
||||
const text = node.value;
|
||||
const value = constantsMap[text];
|
||||
if (value) {
|
||||
node.type = 'html';
|
||||
node.value = `<code title="${text} = ${value}">${text}</code>`;
|
||||
node.children = undefined;
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default function(options) {
|
||||
|
||||
// Read the constants file and store it for later
|
||||
const constantsFile = options?.constantsFile || '';
|
||||
try {
|
||||
constantsMap = JSON.parse(fs.readFileSync(constantsFile, 'utf8'));
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
|
||||
return {
|
||||
name: 'myAddTooltips',
|
||||
hooks: {
|
||||
'astro:config:setup': ({ updateConfig }) => {
|
||||
updateConfig({
|
||||
markdown: {
|
||||
remarkPlugins: [
|
||||
addTooltips,
|
||||
],
|
||||
},
|
||||
});
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
63
integrations/my_autolink_headings.js
Normal file
63
integrations/my_autolink_headings.js
Normal file
@@ -0,0 +1,63 @@
|
||||
import { CONTINUE, SKIP, visit } from 'unist-util-visit';
|
||||
import { fromHtmlIsomorphic } from 'hast-util-from-html-isomorphic'
|
||||
import { toString } from 'hast-util-to-string';
|
||||
|
||||
// Add IDs and SVG permalinks to headings h3 to h6
|
||||
// (rehype-autolink-headings is good, but can't be configured to ignore h1 and h2)
|
||||
|
||||
const anchor = fromHtmlIsomorphic('<a class="anchor" ariaHidden="true"><svg aria-hidden="true" focusable="false" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>', {fragment: true}).children[0];
|
||||
|
||||
// The headings to process
|
||||
const headings = ['h3', 'h4', 'h5', 'h6'];
|
||||
|
||||
// Should match the method in bin/build/checks/links.pl
|
||||
function slugIt(heading) {
|
||||
return (
|
||||
toString(heading)
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/\s+/g, '-')
|
||||
.replace(/[^a-z0-9_-]/g, '')
|
||||
);
|
||||
}
|
||||
|
||||
function autolinkHeadings() {
|
||||
return function(tree) {
|
||||
try {
|
||||
visit(tree, 'element', node => {
|
||||
if (headings.indexOf(node.tagName) === -1) {
|
||||
return CONTINUE;
|
||||
}
|
||||
const newAnchor = structuredClone(anchor);
|
||||
if (node.properties.id) {
|
||||
newAnchor.properties = { ...newAnchor.properties, href: '#' + node.properties.id };
|
||||
} else {
|
||||
const id = slugIt(node);
|
||||
newAnchor.properties = { ...newAnchor.properties, href: '#' + id };
|
||||
node.properties.id = id;
|
||||
}
|
||||
node.children = [ newAnchor ].concat(node.children);
|
||||
return SKIP;
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default function() {
|
||||
return {
|
||||
name: 'myAutolinkHeadings',
|
||||
hooks: {
|
||||
'astro:config:setup': ({ updateConfig }) => {
|
||||
updateConfig({
|
||||
markdown: {
|
||||
rehypePlugins: [
|
||||
autolinkHeadings,
|
||||
],
|
||||
},
|
||||
});
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
23
integrations/my_build_checks.js
Normal file
23
integrations/my_build_checks.js
Normal file
@@ -0,0 +1,23 @@
|
||||
import runChecks from '../bin/build/prebuild.js';
|
||||
|
||||
function buildChecks(logger) {
|
||||
logger.info('Running pre-build checks:');
|
||||
runChecks(logger, false);
|
||||
}
|
||||
|
||||
export default function() {
|
||||
let doChecks;
|
||||
return {
|
||||
name: 'myBuildChecks',
|
||||
hooks: {
|
||||
'astro:config:setup': ({ command }) => {
|
||||
doChecks = (command === 'build');
|
||||
},
|
||||
'astro:config:done': ({ logger }) => {
|
||||
if (doChecks) {
|
||||
buildChecks(logger);
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
44
integrations/my_cleanup_html.js
Normal file
44
integrations/my_cleanup_html.js
Normal file
@@ -0,0 +1,44 @@
|
||||
import { visit, SKIP } from 'unist-util-visit';
|
||||
|
||||
// Clean up any weird HTML artefacts, especially those that fail validation
|
||||
|
||||
function cleanupHtml() {
|
||||
return function(tree) {
|
||||
try {
|
||||
|
||||
// Remove `is:raw=""` that's on `code` elements, probably from Prism.
|
||||
visit(tree, 'element', node => {
|
||||
if (node.tagName == 'code'
|
||||
&& node.properties['is:raw'] !== undefined) {
|
||||
delete(node.properties['is:raw']);
|
||||
}
|
||||
});
|
||||
|
||||
// Remove any comments
|
||||
visit(tree, 'comment', (node, index, parent) => {
|
||||
parent.children.splice(index, 1);
|
||||
return SKIP;
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default function() {
|
||||
return {
|
||||
name: 'myCleanupHtml',
|
||||
hooks: {
|
||||
'astro:config:setup': ({ updateConfig }) => {
|
||||
updateConfig({
|
||||
markdown: {
|
||||
rehypePlugins: [
|
||||
cleanupHtml,
|
||||
],
|
||||
},
|
||||
});
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
50
integrations/my_fixup_links.js
Normal file
50
integrations/my_fixup_links.js
Normal file
@@ -0,0 +1,50 @@
|
||||
import { visit } from 'unist-util-visit';
|
||||
|
||||
// Prepend `base` to URLs in the markdown file.
|
||||
// It seems that [Astro does not do this](https://github.com/withastro/astro/issues/3626)
|
||||
|
||||
function fixupLinks(basePath) {
|
||||
return function(tree) {
|
||||
try {
|
||||
visit(tree, 'element', node => {
|
||||
if (node.tagName == 'a'
|
||||
&& node.properties.href) {
|
||||
|
||||
// Add basePath prefix to local URLs that lack it
|
||||
// [Astro does not do this](https://github.com/withastro/astro/issues/3626)
|
||||
if(node.properties.href.startsWith('/')
|
||||
&& !node.properties.href.startsWith(basePath + '/')) {
|
||||
node.properties.href = basePath + node.properties.href;
|
||||
}
|
||||
|
||||
// Add rel="external noopener" and target="_blank" attributes to off-site links
|
||||
if(!node.properties.href.startsWith('/')
|
||||
&& !node.properties.href.startsWith('#')) {
|
||||
node.properties.rel = ['external', 'noopener'];
|
||||
node.properties.target = '_blank';
|
||||
}
|
||||
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default function() {
|
||||
return {
|
||||
name: 'myFixupLinks',
|
||||
hooks: {
|
||||
'astro:config:setup': ({ config, updateConfig }) => {
|
||||
updateConfig({
|
||||
markdown: {
|
||||
rehypePlugins: [
|
||||
[fixupLinks, config.base],
|
||||
],
|
||||
},
|
||||
});
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
19
integrations/my_htaccess.js
Normal file
19
integrations/my_htaccess.js
Normal file
@@ -0,0 +1,19 @@
|
||||
// Write a .htaccess file to set the correct 404 page
|
||||
|
||||
function writeHtaccess(base, dir, logger) {
|
||||
const file = dir.pathname + '.htaccess';
|
||||
const contents = `ErrorDocument 404 ${base}/404.html\n`;
|
||||
fs.writeFileSync(file, contents);
|
||||
logger.info(`Wrote .htaccess file to ${file}`);
|
||||
}
|
||||
|
||||
export default function(base) {
|
||||
return {
|
||||
name: 'myHtaccess',
|
||||
hooks: {
|
||||
'astro:build:done': ({ dir, logger }) => {
|
||||
writeHtaccess(base, dir, logger);
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
149
integrations/my_search_index.js
Normal file
149
integrations/my_search_index.js
Normal file
@@ -0,0 +1,149 @@
|
||||
import * as cheerio from 'cheerio';
|
||||
import { unified } from 'unified';
|
||||
import {rehype} from 'rehype'
|
||||
import parse from 'rehype-parse';
|
||||
import { toHtml } from 'hast-util-to-html';
|
||||
import fs from 'fs';
|
||||
|
||||
// File scoped to accumulate the index across calls to mySearchIndex
|
||||
const searchIndex = [];
|
||||
|
||||
function isExcludedFrontmatter (frontmatter, exclude) {
|
||||
for (let i = 0; i < exclude.frontmatter.length; i++) {
|
||||
const test = exclude.frontmatter[i];
|
||||
const [key, ...rest] = Object.keys(test);
|
||||
if (Object.prototype.hasOwnProperty.call(frontmatter, key)
|
||||
&& frontmatter[key] == test[key]) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Concatenate all text in child nodes while respecting exclusions
|
||||
function getText ($, node, exclude) {
|
||||
return [...$(node).contents().not(exclude.ignore)]
|
||||
.map(e => (e.type === 'text') ? e.data : getText($, e, exclude))
|
||||
.join('');
|
||||
}
|
||||
|
||||
// Recurse until we find an element we want to treat as a chunk, then get all its text content.
|
||||
function getChunks ($, node, chunkTypes, exclude, counts) {
|
||||
|
||||
if (counts === undefined) {
|
||||
counts = Array(chunkTypes.length).fill(0);
|
||||
}
|
||||
|
||||
for (let idx = 0; idx < chunkTypes.length; idx++) {
|
||||
|
||||
const type = chunkTypes[idx];
|
||||
|
||||
if ($(node).is(type.query)) {
|
||||
|
||||
const text = getText($, node, exclude);
|
||||
if (text !== '') {
|
||||
|
||||
const tagName = $(node).prop('tagName').toLowerCase()
|
||||
let id = $(node).attr('id');
|
||||
if ( id === undefined) {
|
||||
id = tagName + '_' + counts[idx];
|
||||
$(node).attr('id', id);
|
||||
++counts[idx];
|
||||
}
|
||||
|
||||
return [{
|
||||
type: tagName,
|
||||
label: type.label,
|
||||
id: id,
|
||||
text: text,
|
||||
weight: type.weight === undefined ? 1 : type.weight,
|
||||
}];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return [...$(node).children().not(exclude.ignore)]
|
||||
.map(e => getChunks($, e, chunkTypes, exclude, counts))
|
||||
.flat();
|
||||
}
|
||||
|
||||
function includePage(frontmatter, exclude) {
|
||||
return (frontmatter !== undefined
|
||||
&& isExcludedFrontmatter(frontmatter, exclude) === false
|
||||
&& exclude.pages?.indexOf(frontmatter.path) === -1);
|
||||
}
|
||||
|
||||
function buildSearchIndex(options) {
|
||||
|
||||
const { chunkTypes, exclude } = { ...options };
|
||||
|
||||
return function (tree, file) {
|
||||
|
||||
const frontmatter = file.data.astro.frontmatter;
|
||||
|
||||
if (includePage(frontmatter, exclude)) {
|
||||
// console.log('Processing ' + frontmatter.path);
|
||||
|
||||
// We convert between HAST and Cheerio by going via a HTML string.
|
||||
// TODO: avoid cheerio and just use unist-visit and related tools.
|
||||
const $ = cheerio.load(toHtml(tree, {allowDangerousHtml: true}), null, false);
|
||||
const chunks = getChunks($, $.root(), chunkTypes, exclude)
|
||||
|
||||
const pageIndexData = {
|
||||
frontmatter: {
|
||||
path: frontmatter.path,
|
||||
titles: frontmatter.titles,
|
||||
},
|
||||
chunks: chunks,
|
||||
}
|
||||
|
||||
searchIndex.push(pageIndexData);
|
||||
|
||||
return unified().use(parse, {fragment: true}).parse($.html());
|
||||
|
||||
} else {
|
||||
// console.log('Ignoring ' + frontmatter.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function writeSearchIndex(dir, file, logger) {
|
||||
|
||||
const fileName = dir.pathname + file;
|
||||
|
||||
if (searchIndex.length) {
|
||||
logger.info('Indexed ' + searchIndex.length + ' pages');
|
||||
} else {
|
||||
logger.warn('No pages were indexed');
|
||||
}
|
||||
|
||||
fs.writeFileSync(fileName, JSON.stringify(searchIndex));
|
||||
logger.info('Wrote search index to ' + fileName);
|
||||
}
|
||||
|
||||
export default function(options) {
|
||||
|
||||
if (options.enabled === false) {
|
||||
return {name: 'my-search-index'};
|
||||
}
|
||||
|
||||
return {
|
||||
name: 'mySearchIndex',
|
||||
hooks: {
|
||||
// We build the search index with rehype
|
||||
'astro:config:setup': ({ updateConfig }) => {
|
||||
updateConfig({
|
||||
markdown: {
|
||||
rehypePlugins: [
|
||||
[buildSearchIndex, options],
|
||||
],
|
||||
},
|
||||
});
|
||||
},
|
||||
// We write the search index to a file once the build is complete
|
||||
'astro:build:done': ({ dir, logger }) => {
|
||||
writeSearchIndex(dir, options.indexFile, logger);
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
177
integrations/my_svg_inline.js
Normal file
177
integrations/my_svg_inline.js
Normal file
@@ -0,0 +1,177 @@
|
||||
import { visit } from 'unist-util-visit';
|
||||
import { optimize } from 'svgo';
|
||||
import { getHashDigest } from 'loader-utils';
|
||||
import path from 'path';
|
||||
|
||||
// Inline SVG files into the Markdown AST
|
||||
|
||||
// SVGO doesn't really support adding elements, and the API changes.
|
||||
// The below is based on code from the "reusePaths" plugin.
|
||||
const addTitle = {
|
||||
name: 'addTitle',
|
||||
type: 'visitor',
|
||||
active: true,
|
||||
fn: (ast, params) => {
|
||||
return {
|
||||
element: {
|
||||
exit: (node, parentNode) => {
|
||||
if (node.name === 'svg' && parentNode.type === 'root') {
|
||||
const hasTitle = node.children.some(
|
||||
(child) => child.type === 'element' && child.name === 'title'
|
||||
)
|
||||
if (!hasTitle) {
|
||||
const titleElement = {
|
||||
type: 'element',
|
||||
name: 'title',
|
||||
attributes: {},
|
||||
children: [],
|
||||
}
|
||||
Object.defineProperty(titleElement, 'parentNode', {
|
||||
writable: true,
|
||||
value: node,
|
||||
});
|
||||
const titleContents = {
|
||||
type: 'text',
|
||||
value: params.titleText,
|
||||
}
|
||||
Object.defineProperty(titleContents, 'parentNode', {
|
||||
writable: true,
|
||||
value: titleElement,
|
||||
});
|
||||
titleElement.children.push(titleContents)
|
||||
node.children.unshift(titleElement);
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
// See https://www.npmjs.com/package/svgo
|
||||
const plugins = [
|
||||
'preset-default',
|
||||
'prefixIds',
|
||||
'removeDimensions',
|
||||
'removeXMLNS',
|
||||
{
|
||||
name: 'addAttributesToSVGElement',
|
||||
params: {attribute: {'role': 'img'}},
|
||||
},
|
||||
]
|
||||
|
||||
const addTitleSettings = {
|
||||
name: addTitle.name,
|
||||
type: addTitle.type,
|
||||
active: addTitle.active,
|
||||
fn: addTitle.fn,
|
||||
params: undefined,
|
||||
}
|
||||
|
||||
const addAttributes = {
|
||||
name: 'addAttributesToSVGElement',
|
||||
params: undefined,
|
||||
}
|
||||
|
||||
function inlineSvg(options) {
|
||||
|
||||
const filePath = options.filePath || '';
|
||||
const cachePathTmp = options.cachePath;
|
||||
const cachePath = cachePathTmp.endsWith('/') ? cachePathTmp : cachePathTmp + '/';
|
||||
const { logger, doCache} = options;
|
||||
|
||||
return function (tree) {
|
||||
try {
|
||||
visit(tree, 'paragraph', async node => {
|
||||
if (node.children[0].type == 'image') {
|
||||
|
||||
const image = node.children[0];
|
||||
|
||||
if (image.url.endsWith('.svg')) {
|
||||
|
||||
const originalSvg = fs.readFileSync(filePath + image.url, 'utf8');
|
||||
const basename = path.basename(image.url, '.svg');
|
||||
|
||||
// We need to distinguish multiple SVGs on the same page by using "prefixIds"
|
||||
const digest = getHashDigest(basename, 'md5', 'base52', 4);
|
||||
|
||||
// Configure the SVGO addAttributes plugin to add an ID to SVG element
|
||||
addAttributes['params'] = {attribute: {id: basename + "-svg"}};
|
||||
|
||||
// Configure our custom plugin that adds a title element
|
||||
addTitleSettings['params'] = {titleText: image.alt};
|
||||
|
||||
// If the cachePath option is provided, we load the optimised SVG from there
|
||||
// when it exists and is newer than the original SVG. If a cached version is
|
||||
// is not available or is older than the original SVG, we rewrite it.
|
||||
|
||||
const origMtime = fs.statSync(filePath + image.url).mtime;
|
||||
const cacheFile = doCache ? cachePath + basename + '.svg' : null;
|
||||
const goodCache = doCache
|
||||
&& fs.existsSync(cacheFile)
|
||||
&& (fs.statSync(cacheFile).mtime > origMtime);
|
||||
|
||||
let svg;
|
||||
if (goodCache) {
|
||||
svg = fs.readFileSync(cacheFile, 'utf8');
|
||||
logger.debug(`Using cached ${basename}.svg`);
|
||||
} else {
|
||||
svg = optimize(
|
||||
originalSvg,
|
||||
{
|
||||
path: digest,
|
||||
plugins: plugins.concat([addTitleSettings, addAttributes])
|
||||
}
|
||||
).data;
|
||||
logger.debug(`Optimising ${basename}.svg`);
|
||||
if (doCache) {
|
||||
fs.writeFileSync(cacheFile, svg);
|
||||
logger.debug(`Caching ${basename}.svg`);
|
||||
} else {
|
||||
logger.debug(`Not caching ${basename}.svg`);
|
||||
}
|
||||
}
|
||||
|
||||
// Modify the current node in-place
|
||||
node.type = 'html';
|
||||
node.value = svg;
|
||||
node.children = [];
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default function(options) {
|
||||
return {
|
||||
name: 'mySvgInline',
|
||||
hooks: {
|
||||
'astro:config:setup': ({ updateConfig, logger }) => {
|
||||
let doCache = false;
|
||||
if (options.cachePath) {
|
||||
try {
|
||||
if (fs.statSync(options.cachePath).isDirectory()) {
|
||||
doCache = true;
|
||||
} else {
|
||||
logger.warn(`Not caching SVGs: ${options.cachePath} is not a directory`);
|
||||
}
|
||||
} catch(e) {
|
||||
logger.warn(`Not caching SVGs: ${options.cachePath} does not exist`);
|
||||
}
|
||||
} else {
|
||||
logger.info('Not caching SVGs: no cachePath provided');
|
||||
}
|
||||
updateConfig({
|
||||
markdown: {
|
||||
remarkPlugins: [
|
||||
[inlineSvg, { ...options, logger: logger, doCache: doCache }],
|
||||
],
|
||||
},
|
||||
});
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user