Skip to content

Commit

Permalink
feat: new build pipeline
Browse files Browse the repository at this point in the history
  • Loading branch information
makamekm committed Oct 23, 2024
1 parent dbc8e5d commit 38e9e3b
Show file tree
Hide file tree
Showing 12 changed files with 374 additions and 391 deletions.
484 changes: 173 additions & 311 deletions CHANGELOG.md

Large diffs are not rendered by default.

37 changes: 37 additions & 0 deletions src/transform/fsContext.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import {readFileSync, writeFileSync} from 'fs';
import {readFile, writeFile} from 'fs/promises';

import {FsContext} from './typings';
import {isFileExists, isFileExistsAsync} from './utilsFS';

export class DefaultFsContext implements FsContext {
exist(path: string): boolean {
return isFileExists(path);
}

read(path: string): string {
return readFileSync(path, 'utf8');
}

write(path: string, content: string): void {
writeFileSync(path, content, {
encoding: 'utf8',
});
}

async existAsync(path: string): Promise<boolean> {
return await isFileExistsAsync(path);
}

async readAsync(path: string): Promise<string> {
return readFile(path, 'utf8');
}

async writeAsync(path: string, content: string): Promise<void> {
writeFile(path, content, {
encoding: 'utf8',
});
}
}

export const defaultFsContext = new DefaultFsContext();
19 changes: 10 additions & 9 deletions src/transform/plugins/images/collect.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,21 @@ type Options = MarkdownItPluginOpts & {
singlePage: boolean;
};

const collect = (input: string, options: Options) => {
const collect = async (input: string, options: Options) => {
const md = new MarkdownIt().use(imsize);

const {root, path, destPath = '', copyFile, singlePage} = options;
const {root, path, destPath = '', copyFile, singlePage, deps} = options;
const tokens = md.parse(input, {});
let result = input;

tokens.forEach((token) => {
for (const token of tokens) {
if (token.type !== 'inline') {
return;
}

const children = token.children || [];

children.forEach((childToken) => {
for (const childToken of children) {
if (childToken.type !== 'image') {
return;
}
Expand All @@ -40,15 +40,16 @@ const collect = (input: string, options: Options) => {
const targetPath = resolveRelativePath(path, src);
const targetDestPath = resolveRelativePath(destPath, src);

if (singlePage && !path.includes('_includes/')) {
const newSrc = relative(root, resolveRelativePath(path, src));
deps?.markDep?.(path, targetPath, 'image');

if (singlePage && !path.includes('_includes/')) {
const newSrc = relative(root, targetPath);
result = result.replace(src, newSrc);
}

copyFile(targetPath, targetDestPath);
});
});
await copyFile(targetPath, targetDestPath);
}
}

if (singlePage) {
return result;
Expand Down
26 changes: 17 additions & 9 deletions src/transform/plugins/images/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,22 +2,23 @@ import {join, sep} from 'path';
import {bold} from 'chalk';
import {optimize} from 'svgo';
import Token from 'markdown-it/lib/token';
import {readFileSync} from 'fs';

import {isFileExists, resolveRelativePath} from '../../utilsFS';
import {resolveRelativePath} from '../../utilsFS';
import {isExternalHref, isLocalUrl} from '../../utils';
import {MarkdownItPluginCb, MarkdownItPluginOpts} from '../typings';
import {StateCore} from '../../typings';
import {FsContext, StateCore} from '../../typings';
import {defaultFsContext} from '../../fsContext';

interface ImageOpts extends MarkdownItPluginOpts {
assetsPublicPath: string;
inlineSvg?: boolean;
}

function replaceImageSrc(
fs: FsContext,
token: Token,
state: StateCore,
{assetsPublicPath = sep, root = '', path: optsPath, log}: ImageOpts,
{assetsPublicPath = sep, root = '', path: optsPath, log, deps}: ImageOpts,
) {
const src = token.attrGet('src') || '';
const currentPath = state.env.path || optsPath;
Expand All @@ -28,7 +29,9 @@ function replaceImageSrc(

const path = resolveRelativePath(currentPath, src);

if (isFileExists(path)) {
deps?.markDep?.(currentPath, path, 'image');

if (fs.exist(path)) {
state.md.assets?.push(path);
} else {
log.error(`Asset not found: ${bold(src)} in ${bold(currentPath)}`);
Expand All @@ -51,15 +54,18 @@ function prefix() {
}

function convertSvg(
fs: FsContext,
token: Token,
state: StateCore,
{path: optsPath, log, notFoundCb, root}: SVGOpts,
{path: optsPath, log, notFoundCb, root, deps}: SVGOpts,
) {
const currentPath = state.env.path || optsPath;
const path = resolveRelativePath(currentPath, token.attrGet('src') || '');

try {
const raw = readFileSync(path).toString();
deps?.markDep?.(currentPath, path, 'image');

const raw = fs.read(path).toString();
const result = optimize(raw, {
plugins: [
{
Expand Down Expand Up @@ -90,6 +96,8 @@ function convertSvg(
type Opts = SVGOpts & ImageOpts;

const index: MarkdownItPluginCb<Opts> = (md, opts) => {
const fs = opts.fs ?? defaultFsContext;

md.assets = [];

const plugin = (state: StateCore) => {
Expand Down Expand Up @@ -117,9 +125,9 @@ const index: MarkdownItPluginCb<Opts> = (md, opts) => {
const shouldInlineSvg = opts.inlineSvg !== false && !isExternalHref(imgSrc);

if (imgSrc.endsWith('.svg') && shouldInlineSvg) {
childrenTokens[j] = convertSvg(childrenTokens[j], state, opts);
childrenTokens[j] = convertSvg(fs, childrenTokens[j], state, opts);
} else {
replaceImageSrc(childrenTokens[j], state, opts);
replaceImageSrc(fs, childrenTokens[j], state, opts);
}

childrenTokens[j].attrSet('yfm_patched', '1');
Expand Down
51 changes: 31 additions & 20 deletions src/transform/plugins/includes/collect.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,24 @@
import {relative} from 'path';
import {bold} from 'chalk';
import {readFileSync} from 'fs';

import {getRelativePath, isFileExists, resolveRelativePath} from '../../utilsFS';
import {getRelativePath, resolveRelativePath} from '../../utilsFS';
import {defaultFsContext} from '../../fsContext';

import {IncludeCollectOpts} from './types';

const includesPaths: string[] = [];

function processRecursive(
async function processRecursive(
includePath: string,
targetDestPath: string,
options: IncludeCollectOpts,
) {
const {path, log, copyFile, includedParentPath: includedParentPathNullable, included} = options;
const {
path,
log,
copyFile,
includedParentPath: includedParentPathNullable,
included,
fs,
} = options;
const includedParentPath = includedParentPathNullable || path;

const includeOptions = {
Expand All @@ -23,19 +28,19 @@ function processRecursive(
};

try {
const contentProcessed = copyFile(includePath, targetDestPath, includeOptions);
const contentProcessed = await copyFile(includePath, targetDestPath, includeOptions);

// To reduce file reading we can include the file content into the generated content
if (included) {
const content = contentProcessed ?? readFileSync(targetDestPath, 'utf8');
const content = contentProcessed ?? (await fs?.readAsync(targetDestPath));

if (content) {
const includedRelativePath = getRelativePath(includedParentPath, includePath);

// The appendix is the map that protects from multiple include files
if (!options.appendix?.has(includedRelativePath)) {
// Recursive function to include the depth structure
const includeContent = collectRecursive(content, {
const includeContent = await collectRecursive(content, {
...options,
path: includePath,
includedParentPath,
Expand All @@ -54,8 +59,8 @@ function processRecursive(
}
}

function collectRecursive(result: string, options: IncludeCollectOpts) {
const {root, path, destPath = '', log, singlePage} = options;
async function collectRecursive(result: string, options: IncludeCollectOpts) {
const {root, path, destPath = '', log, singlePage, fs = defaultFsContext, deps} = options;

const INCLUDE_REGEXP = /{%\s*include\s*(notitle)?\s*\[(.+?)]\((.+?)\)\s*%}/g;

Expand All @@ -67,19 +72,24 @@ function collectRecursive(result: string, options: IncludeCollectOpts) {

let includePath = resolveRelativePath(path, relativePath);
const hashIndex = relativePath.lastIndexOf('#');
if (hashIndex > -1 && !isFileExists(includePath)) {

deps?.markDep?.(path, includePath, 'include');

if (hashIndex > -1 && !(await fs.existAsync(includePath))) {
includePath = includePath.slice(0, includePath.lastIndexOf('#'));
relativePath = relativePath.slice(0, hashIndex);
}

const targetDestPath = resolveRelativePath(destPath, relativePath);

if (includesPaths.includes(includePath)) {
log.error(`Circular includes: ${bold(includesPaths.concat(path).join(' ▶ '))}`);
if (options.includesPaths?.includes(includePath)) {
log.error(
`Circular includes: ${bold(options.includesPaths?.concat(path).join(' ▶ '))}`,
);
break;
}

if (singlePage && !includesPaths.length) {
if (singlePage && !options.includesPaths?.length) {
const newRelativePath = relative(root, includePath);
const newInclude = matchedInclude.replace(relativePath, newRelativePath);

Expand All @@ -89,22 +99,23 @@ function collectRecursive(result: string, options: IncludeCollectOpts) {
INCLUDE_REGEXP.lastIndex = INCLUDE_REGEXP.lastIndex - delta;
}

includesPaths.push(includePath);
options.includesPaths?.push(includePath);

processRecursive(includePath, targetDestPath, options);
await processRecursive(includePath, targetDestPath, options);

includesPaths.pop();
options.includesPaths?.pop();
}

return result;
}

function collect(input: string, options: IncludeCollectOpts) {
async function collect(input: string, options: IncludeCollectOpts) {
const shouldWriteAppendix = !options.appendix;

options.includesPaths = options.includesPaths ?? [];
options.appendix = options.appendix ?? new Map();

input = collectRecursive(input, options);
input = await collectRecursive(input, options);

if (shouldWriteAppendix) {
// Appendix should be appended to the end of the file (it supports depth structure, so the included files will have included as well)
Expand Down
14 changes: 9 additions & 5 deletions src/transform/plugins/includes/index.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
import {bold} from 'chalk';
import Token from 'markdown-it/lib/token';

import {StateCore} from '../../typings';
import {
GetFileTokensOpts,
getFileTokens,
getFullIncludePath,
isFileExists,
resolveRelativePath,
} from '../../utilsFS';
import {findBlockTokens} from '../../utils';
import {StateCore} from '../../typings';
import {defaultFsContext} from '../../fsContext';
import {MarkdownItPluginCb, MarkdownItPluginOpts} from '../typings';

import {MarkdownItIncluded} from './types';
Expand All @@ -29,7 +29,7 @@ type Options = MarkdownItPluginOpts &
};

function unfoldIncludes(md: MarkdownItIncluded, state: StateCore, path: string, options: Options) {
const {root, notFoundCb, log, noReplaceInclude = false} = options;
const {root, notFoundCb, log, noReplaceInclude = false, fs = defaultFsContext, deps} = options;
const {tokens} = state;
let i = 0;

Expand Down Expand Up @@ -57,7 +57,11 @@ function unfoldIncludes(md: MarkdownItIncluded, state: StateCore, path: string,
let pathname = fullIncludePath;
let hash = '';
const hashIndex = fullIncludePath.lastIndexOf('#');
if (hashIndex > -1 && !isFileExists(pathname)) {
const existed = fs.exist(pathname);

deps?.markDep?.(path, pathname, 'include');

if (hashIndex > -1 && !existed) {
pathname = fullIncludePath.slice(0, hashIndex);
hash = fullIncludePath.slice(hashIndex + 1);
}
Expand All @@ -68,7 +72,7 @@ function unfoldIncludes(md: MarkdownItIncluded, state: StateCore, path: string,
continue;
}

const fileTokens = getFileTokens(pathname, state, {
const fileTokens = getFileTokens(fs, pathname, state, {
...options,
content: included, // The content forces the function to use it instead of reading from the disk
});
Expand Down
7 changes: 6 additions & 1 deletion src/transform/plugins/includes/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,15 @@ export interface MarkdownItIncluded extends MarkdownIt {

export type IncludeCollectOpts = MarkdownItPluginOpts & {
destPath: string;
copyFile(path: string, dest: string, opts: IncludeCollectOpts): string | null | undefined;
copyFile(
path: string,
dest: string,
opts: IncludeCollectOpts,
): Promise<string | null | undefined>;
singlePage: Boolean;
included: Boolean;
includedParentPath?: string;
additionalIncludedList?: string[];
includesPaths?: string[];
appendix?: Map<string, string>;
};
9 changes: 5 additions & 4 deletions src/transform/plugins/links/collect.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import url from 'url';

import {PAGE_LINK_REGEXP, getHrefTokenAttr, isLocalUrl} from '../../utils';
import {getSinglePageAnchorId, resolveRelativePath} from '../../utilsFS';
import {MarkdownItPluginOpts} from '../typings';

import index from './index';

Expand All @@ -12,17 +13,15 @@ const replaceLinkHref = (input: string, href: string, newHref: string) => {
return input.replace(`](${href})`, `](${newHref})`);
};

type Options = {
root: string;
path: string;
type Options = MarkdownItPluginOpts & {
singlePage: boolean;
};

/* Replace the links to the markdown and yaml files if the singlePage option is passed in the options
* Example: replace [Text](../../path/to/file.md#anchor) with [Text](#_path_to_file_anchor)
* */
const collect = (input: string, options: Options) => {
const {root, path: startPath, singlePage} = options;
const {root, path: startPath, singlePage, deps} = options;

if (!singlePage) {
return;
Expand Down Expand Up @@ -66,6 +65,8 @@ const collect = (input: string, options: Options) => {
if (pathname) {
const isPageFile = PAGE_LINK_REGEXP.test(pathname);
if (isPageFile) {
deps?.markDep?.(startPath, pathname, 'link');

const newHref = getSinglePageAnchorId({
root,
currentPath: startPath,
Expand Down
Loading

0 comments on commit 38e9e3b

Please sign in to comment.