This commit is contained in:
Yanick Champoux 2023-01-10 12:34:27 -05:00
parent 04f22fb927
commit 39f4227989
69 changed files with 3433 additions and 0 deletions

10
website/.gitignore vendored Normal file
View File

@ -0,0 +1,10 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
.env
.env.*
!.env.example
vite.config.js.timestamp-*
vite.config.ts.timestamp-*

1
website/.npmrc Normal file
View File

@ -0,0 +1 @@
engine-strict=true

38
website/README.md Normal file
View File

@ -0,0 +1,38 @@
# create-svelte
Everything you need to build a Svelte project, powered by [`create-svelte`](https://github.com/sveltejs/kit/tree/master/packages/create-svelte).
## Creating a project
If you're seeing this, you've probably already done this step. Congrats!
```bash
# create a new project in the current directory
npm create svelte@latest
# create a new project in my-app
npm create svelte@latest my-app
```
## Developing
Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server:
```bash
npm run dev
# or start the server and open the app in a new browser tab
npm run dev -- --open
```
## Building
To create a production version of your app:
```bash
npm run build
```
You can preview the production build with `npm run preview`.
> To deploy your app, you may need to install an [adapter](https://kit.svelte.dev/docs/adapters) for your target environment.

35
website/package.json Normal file
View File

@ -0,0 +1,35 @@
{
"name": "mydocs",
"version": "0.0.1",
"private": true,
"scripts": {
"dev": "vite dev",
"build": "vite build",
"preview": "vite preview"
},
"devDependencies": {
"@sveltejs/adapter-auto": "^1.0.0",
"@sveltejs/kit": "^1.0.0",
"svelte": "^3.54.0",
"vite": "^4.0.0"
},
"type": "module",
"dependencies": {
"@iconify-json/ri": "^1.1.4",
"@rollup/pluginutils": "^5.0.2",
"@sveltejs/adapter-static": "^1.0.1",
"@svelteness/kit-docs": "link:/home/yanick/work/javascript/kit-docs/packages/kit-docs",
"globby": "^13.1.3",
"gray-matter": "^4.0.3",
"kit-docs-workspace": "github:svelteness/kit-docs",
"kleur": "^4.1.5",
"lru-cache": "^7.14.1",
"markdown-it": "^13.0.1",
"markdown-it-anchor": "^8.6.6",
"markdown-it-container": "^3.0.0",
"markdown-it-emoji": "^2.0.2",
"shiki": "^0.12.1",
"toml": "^3.0.0",
"unplugin-icons": "^0.15.1"
}
}

24
website/src/app.html Normal file
View File

@ -0,0 +1,24 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%sveltekit.assets%/favicon.png" />
<meta name="viewport" content="width=device-width" />
<script>
const key = 'svelteness::color-scheme';
const scheme = localStorage[key];
const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
if (scheme === 'dark' || (scheme !== 'light' && prefersDark)) {
document.documentElement.classList.add('dark');
} else {
document.documentElement.classList.remove('dark');
}
</script>
%sveltekit.head%
</head>
<body data-sveltekit-preload-data="hover">
<div style="display: contents">%sveltekit.body%</div>
</body>
</html>

View File

@ -0,0 +1,24 @@
<div>
<h4>💡 Info</h4>
<slot/>
</div>
<style>
div {
background: lightgreen;
margin: 0px 2em;
padding: 0.25em 1em;
font-weight: normal;
border-radius: 1em;
color: black;
}
h4 {
margin: 0px;
}
</style>

View File

@ -0,0 +1,401 @@
import { type FilterPattern, createFilter } from '@rollup/pluginutils';
import { json, RequestHandler } from '@sveltejs/kit';
import { readFileSync } from 'fs';
import { globbySync } from 'globby';
import kleur from 'kleur';
import path from 'path';
import {
type MarkdownParser,
type ParsedMarkdownResult,
createMarkdownParser,
getFrontmatter,
parseMarkdown,
} from '../markdown-plugin/parser';
import { readDirDeepSync, sortOrderedFiles } from '../utils/fs';
import { kebabToTitleCase } from '../utils/string';
import { isString } from '../utils/unit';
const CWD = process.cwd();
const ROUTES_DIR = path.resolve(CWD, 'src/routes');
let parser: MarkdownParser;
const restParamsRE = /\[\.\.\.(.*?)\]/g;
const restPropsRE = /\[\.\.\.(.*?)\]/;
const deepMatchRE = /\[\.\.\..*?_deep\]/;
const layoutNameRE = /@.+/g;
const defaultIncludeRE = /\.md($|\?)/;
export type NoValue = null | undefined | void;
export type FalsyValue = false | NoValue;
export type HandleMetaRequestOptions = {
extensions?: string[];
filter?: (file: string) => boolean;
resolve?: FileResolver | null | (FileResolver | FalsyValue)[];
transform?: MetaTransform | null | (MetaTransform | FalsyValue)[];
};
export type FileResolver = (
slug: string,
helpers: { resolve: typeof resolveSlug },
) => ResolvedFile | FalsyValue | Promise<ResolvedFile | FalsyValue>;
export type ResolvedFile =
| string
| { file: string; transform: MetaTransform | (MetaTransform | FalsyValue)[] };
export type MetaTransform = (
data: { slug: string; filePath: string; parser: MarkdownParser } & ParsedMarkdownResult,
) => void | Promise<void>;
/**
* Careful this function will throw if it can't match the `slug` param to a file.
*/
export async function handleMetaRequest(slugParam: string, options: HandleMetaRequestOptions = {}) {
const { filter, extensions, resolve, transform } = options;
const slug = paramToSlug(slugParam);
const resolverArgs: Parameters<FileResolver> = [slug, { resolve: resolveSlug }];
let resolution: ResolvedFile | FalsyValue = null;
if (Array.isArray(resolve)) {
for (const resolver of resolve) {
if (resolver) resolution = await resolver?.(...resolverArgs);
if (resolution) break;
}
} else {
resolution = await resolve?.(...resolverArgs);
}
if (!resolution) {
resolution = resolveSlug(slug, { extensions });
}
const resolvedFile = isString(resolution) ? resolution : resolution?.file;
const resolvedTransform = isString(resolution) ? null : resolution?.transform;
if (!resolvedFile) {
throw Error('Could not find file.');
}
if (filter && !filter(`/${cleanFilePath(resolvedFile)}`)) {
return null;
}
const filePath = path.isAbsolute(resolvedFile) ? resolvedFile : path.resolve(CWD, resolvedFile);
const content = readFileSync(filePath).toString();
if (!parser) {
parser = await createMarkdownParser();
}
let result = parseMarkdown(parser, content, filePath);
result = JSON.parse(JSON.stringify(result));
const transformerArgs: Parameters<MetaTransform> = [{ slug, filePath, parser, ...result }];
const runTransform = async (transform?: HandleMetaRequestOptions['transform']) => {
if (Array.isArray(transform)) {
for (const transformer of transform) {
if (transformer) await transformer?.(...transformerArgs);
}
} else {
await transform?.(...transformerArgs);
}
};
await runTransform(transform);
await runTransform(resolvedTransform);
return result;
}
export type CreateMetaRequestHandlerOptions = {
include?: FilterPattern;
exclude?: FilterPattern;
debug?: boolean;
} & HandleMetaRequestOptions;
export function createMetaRequestHandler(
options: CreateMetaRequestHandlerOptions = {},
): RequestHandler {
const { include, exclude, debug, ...handlerOptions } = options;
const filter = createFilter(
include ?? handlerOptions.extensions?.map((ext) => new RegExp(`${ext}$`)) ?? defaultIncludeRE,
exclude,
);
return async ({ params }) => {
try {
const res = await handleMetaRequest(params.slug as string, { filter, ...handlerOptions });
if (!res) return new Response(null);
return json(res.meta);
} catch (e) {
if (debug) {
console.log(kleur.bold(kleur.red(`\n[kit-docs]: failed to handle meta request.`)));
console.log(`\n\n${e}\n`);
}
}
return new Response(null);
};
}
const headingRE = /#\s(.*?)($|\n|\r)/;
export type HandleSidebarRequestOptions = {
extensions?: string[];
filter?: (file: string) => boolean;
resolveTitle?: SidebarMetaResolver;
resolveCategory?: SidebarMetaResolver;
resolveSlug?: SidebarMetaResolver;
formatCategoryName?: (name: string, helpers: { format: (name: string) => string }) => string;
};
export type SidebarMetaResolver = (data: {
filePath: string;
relativeFilePath: string;
cleanFilePath: string;
dirname: string;
cleanDirname: string;
frontmatter: Record<string, any>;
fileContent: string;
resolve: () => string;
slugify: typeof slugifyFilePath;
}) => string | void | null | undefined | Promise<string | void | null | undefined>;
/**
* Careful this function will throw if it can't match the `dir` param to a directory.
*/
export async function handleSidebarRequest(
dirParam: string,
options: HandleSidebarRequestOptions = {},
) {
const { extensions, filter, formatCategoryName, resolveTitle, resolveCategory, resolveSlug } =
options;
const exts = extensions ?? ['.md'];
const globExt =
exts.length > 1 ? `.{${exts.map((ext) => ext.replace(/^\./, '')).join(',')}}` : exts[0];
const directory = paramToDir(dirParam);
const dirPath = path.resolve(ROUTES_DIR, directory);
const filePaths = sortOrderedFiles(readDirDeepSync(dirPath));
const links: Record<string, { title: string; slug: string; match?: 'deep' }[]> = {};
// Root at top.
links['.'] = [];
let hasRoot = false;
for (const filePath of filePaths) {
const filename = path.basename(filePath);
const relativeFilePath = path.relative(ROUTES_DIR, filePath);
const dirs = path.dirname(relativeFilePath).split('/');
const cleanPath = cleanFilePath(filePath);
const cleanDirs = path.dirname(cleanPath).split('/').slice(0, -1);
const cleanDirsReversed = cleanDirs.slice().reverse();
const isIndexFile = /\/\+page\./.test(cleanPath);
const isShallowRoot = cleanDirs.length === 0;
const isRoot = isShallowRoot || deepMatchRE.test(dirs[1]);
let isDeepMatch = false;
let isValidDeepMatch = false;
if (deepMatchRE.test(relativeFilePath)) {
const deepMatchDir = dirs.findIndex((dir) => deepMatchRE.test(dir));
isDeepMatch = deepMatchDir >= 0;
const glob = (depth: number) =>
`src/routes/*${cleanDirs.slice(0, depth).join('/*')}/*+page*${globExt}`;
let file = isDeepMatch ? globbySync(glob(deepMatchDir + 1))?.[0] : null;
if (isDeepMatch && !file) {
file = isDeepMatch ? globbySync(glob(deepMatchDir + 2))?.[0] : null;
}
isValidDeepMatch = isDeepMatch ? file === `src/routes/${relativeFilePath}` : false;
}
if (
filename.startsWith('_') ||
filename.startsWith('.') ||
(isShallowRoot && isIndexFile) ||
(isDeepMatch && !isValidDeepMatch) ||
!(filter?.(`/${cleanPath}`) ?? true)
) {
continue;
}
const fileContent = readFileSync(filePath).toString();
const frontmatter = getFrontmatter(fileContent);
const resolverData = {
filePath,
relativeFilePath,
cleanFilePath: cleanPath,
frontmatter,
fileContent,
dirname: path.dirname(filePath),
cleanDirname: path.dirname(cleanPath),
slugify: slugifyFilePath,
};
const categoryFormatter = formatCategoryName ?? kebabToTitleCase;
const formatCategory = (dirname: string) =>
categoryFormatter(dirname, { format: (name) => kebabToTitleCase(name) });
const resolveDefaultTitle = () =>
frontmatter.sidebar_title ??
frontmatter.title ??
(isDeepMatch ? formatCategory(cleanDirsReversed[0]) : null) ??
fileContent.match(headingRE)?.[1] ??
kebabToTitleCase(path.basename(cleanPath, path.extname(cleanPath)));
const resolveDefaultCategory = () =>
isRoot ? '.' : cleanDirsReversed[isIndexFile && isDeepMatch ? 1 : 0];
const resolveDefaultSlug = () => slugifyFilePath(filePath);
const category = formatCategory(
(await resolveCategory?.({ ...resolverData, resolve: resolveDefaultCategory })) ??
resolveDefaultCategory(),
);
const title =
(await resolveTitle?.({ ...resolverData, resolve: resolveDefaultTitle })) ??
resolveDefaultTitle();
const slug =
(await resolveSlug?.({ ...resolverData, resolve: resolveDefaultSlug })) ??
resolveDefaultSlug();
const match = isDeepMatch ? 'deep' : undefined;
(links[category] ??= []).push({ title, slug, match });
if (!hasRoot) hasRoot = category === '.';
}
if (!hasRoot) {
delete links['.'];
}
return { links };
}
export type CreateSidebarRequestHandlerOptions = {
include?: FilterPattern;
exclude?: FilterPattern;
debug?: boolean;
} & HandleSidebarRequestOptions;
export function createSidebarRequestHandler(
options: CreateSidebarRequestHandlerOptions = {},
): RequestHandler {
const { include, debug, exclude, ...handlerOptions } = options;
const filter = createFilter(
include ?? handlerOptions.extensions?.map((ext) => new RegExp(`${ext}$`)) ?? defaultIncludeRE,
exclude,
);
return async ({ params }) => {
try {
const { links } = await handleSidebarRequest(params.dir as string, {
filter,
...handlerOptions,
});
return json({ links });
} catch (e) {
if (debug) {
console.log(kleur.bold(kleur.red(`\n[kit-docs]: failed to handle sidebar request.`)));
console.log(`\n\n${e}\n`);
}
}
return new Response(null);
};
}
export type ResolveSlugOptions = {
extensions?: string[];
};
/**
* Attempts to resolve the given slug to a file in the `routes` directory. This function returns
* a relative file path.
*/
export function resolveSlug(slug: string, options: ResolveSlugOptions = {}): string | null {
const { extensions } = options;
const exts = extensions ?? ['.md'];
const globExt =
exts.length > 1 ? `.{${exts.map((ext) => ext.replace(/^\./, '')).join(',')}}` : exts[0];
const fileGlobBase = `src/routes/${slug
.split('/')
.slice(0, -1)
.map((s) => `*${s}`)
.join('/')}`;
const glob = `${fileGlobBase}/*${path.basename(slug)}/*${globExt}`;
let file = globbySync(glob)?.[0];
if (!file) {
const glob = `${fileGlobBase}/*${path.basename(slug)}/*index*${globExt}`;
file = globbySync(glob)?.[0];
}
if (!file) {
return null;
}
const matchedSlug = file
.replace(restParamsRE, '')
.replace(layoutNameRE, '')
.replace(path.extname(file), '')
.replace(/\/index$/, slug === 'index' ? '/index' : '');
if (matchedSlug !== `src/routes/${slug}/+page` || !exts.some((ext) => file.endsWith(ext))) {
return null;
}
return file;
}
/**
* Takes an absolute or relative file path and maps it to a relative path to `src/routes`, and
* strips out rest params and layout ids `{[...1]}index{@layout-id}.md`.
*
* @example `src/routes/docs/[...1getting-started]/[...1]intro.md` = `docs/getting-started/intro.md`
*/
export function cleanFilePath(filePath: string) {
const relativePath = path.relative(ROUTES_DIR, filePath);
return relativePath.replace(restParamsRE, '').replace(layoutNameRE, path.extname(filePath));
}
export function paramToSlug(param: string) {
return param.replace(/_/g, '/').replace(/\.html/, '');
}
export function paramToDir(param: string) {
return paramToSlug(param);
}
/**
* Maps a path that points to a file in the `routes` directory to a slug. The file path
* can be absolute or relative to the `routes` directory.
*/
export function slugifyFilePath(filePath: string) {
const cleanPath = cleanFilePath(filePath);
return `/${cleanPath
.replace(path.extname(cleanPath), '')
.replace(/\/?index$/, '')
.replace(/\/\+page$/, '')}`;
}

View File

@ -0,0 +1,51 @@
import path from 'path';
import {
type Highlighter,
type HighlighterOptions,
type Lang,
getHighlighter,
renderToHtml,
} from 'shiki';
import { type Plugin } from 'vite';
const PLUGIN_NAME = '@svelteness/highlight' as const;
export type HighlightPluginOptions = HighlighterOptions;
export const kitDocsHighlightPlugin = (options: HighlightPluginOptions = {}): Plugin => {
let highlighter: Highlighter;
const highlightQueryRE = /\?highlight/;
return {
name: PLUGIN_NAME,
enforce: 'pre' as const,
async configResolved() {
highlighter = await getHighlighter({
theme: 'material-palenight',
langs: [],
...options,
});
},
transform(code, id) {
if (!highlightQueryRE.test(id)) {
return null;
}
const lang = (new URLSearchParams(id).get('lang') ??
path.extname(id.replace(highlightQueryRE, '')).slice(1)) as Lang;
const tokens = highlighter.codeToThemedTokens(code, lang);
const html = renderToHtml(tokens)
.replace(/\sclass="shiki" style=".*?"/, '')
.trim();
return `
export const tokens = ${JSON.stringify(tokens)}
export const code = ${JSON.stringify(code)}
export const hlCode = ${JSON.stringify(html)}
`;
},
};
};

View File

@ -0,0 +1,7 @@
export * from './handlers';
export * from './highlight-plugin';
export * from './kit-docs-plugin';
export { kitDocsPlugin as default } from './kit-docs-plugin';
export * from './markdown-plugin';
export * from './markdown-plugin/parser';
export { kebabToTitleCase } from './utils/string';

View File

@ -0,0 +1,64 @@
import { resolve } from 'path';
import { type HighlighterOptions } from 'shiki';
import { type Plugin } from 'vite';
import { kitDocsHighlightPlugin } from './highlight-plugin';
import { type MarkdownPluginOptions, kitDocsMarkdownPlugin } from './markdown-plugin';
const __cwd = process.cwd();
export type KitDocsPluginOptions = {
highlight?: false;
shiki?: HighlighterOptions;
markdown?: MarkdownPluginOptions;
};
export const kitDocsPlugin = (options: KitDocsPluginOptions = {}): Plugin[] =>
[
corePlugin(),
options.highlight !== false && kitDocsHighlightPlugin(options.shiki),
kitDocsMarkdownPlugin({ ...options.markdown, shiki: options.shiki }),
].filter(Boolean) as Plugin[];
function corePlugin(): Plugin {
return {
name: '@svelteness/kit-docs',
enforce: 'pre',
config(config) {
const userAlias = config.resolve?.alias;
const aliasKeys: string[] = !Array.isArray(userAlias)
? Object.keys(userAlias ?? {})
: userAlias.map((alias) => alias.find) ?? [];
const hasAlias = (alias: string) => aliasKeys.includes(alias);
const alias = {
$fonts: resolve(__cwd, 'src/fonts'),
$img: resolve(__cwd, 'src/img'),
$kitDocs: resolve(__cwd, 'src/kit-docs'),
};
for (const find of Object.keys(alias)) {
if (hasAlias(find)) {
delete alias[find];
}
}
return {
optimizeDeps: {
include: ['shiki'],
exclude: ['@svelteness/kit-docs'],
},
resolve: {
alias,
},
build: {
rollupOptions: {
external: ['@svelteness/kit-docs/node'],
},
},
};
},
};
}

View File

@ -0,0 +1,192 @@
import { type FilterPattern, createFilter, normalizePath } from '@rollup/pluginutils';
import { globbySync } from 'globby';
import { resolve } from 'path';
import { fileURLToPath } from 'url';
import { type Plugin } from 'vite';
import { isLocalEnv } from '../utils/env';
import { getFileNameFromPath } from '../utils/path';
import {
type MarkdownComponents,
type MarkdownParser,
type MarkdownParserOptions,
type ParseMarkdownOptions,
AddTopLevelHtmlTags,
clearMarkdownCaches,
createMarkdownParser,
MarkdownComponentContainer,
parseMarkdownToSvelte,
} from './parser';
const PLUGIN_NAME = '@svelteness/markdown' as const;
const __cwd = process.cwd();
// @ts-ignore
const __dirname = fileURLToPath(import.meta.url);
export type MarkdownPluginOptions = MarkdownParserOptions & {
/**
* The markdown files to be parsed and rendered as Svelte components.
*
* @defaultValue /\+page\.md($|\?)/
*/
include?: FilterPattern;
/**
* The markdown files to _not_ be parsed.
*
* @defaultValue `null`
*/
exclude?: FilterPattern;
/**
* A glob pointing to Svelte component files that will be imported into every single
* markdown file.
*
* @defaultValue 'src/kit-docs/**\/[^_]*.svelte'
*/
globalComponents?: string;
/**
* Add custom top-level tags (e.g., `<svelte:head>`, `<script>` or `<style>`) to a markdown
* Svelte component.
*
* @defaultValue `null`
*/
topLevelHtmlTags?: AddTopLevelHtmlTags;
};
const DEFAULT_INCLUDE_RE = /\+page\.md($|\?)/;
const DEFAULT_EXCLUDE_RE = null;
const DEFAULT_GLOBAL_COMPONENTS = 'src/kit-docs/**/[^_]*.svelte';
export function kitDocsMarkdownPlugin(options: MarkdownPluginOptions = {}): Plugin {
let mode: string;
let baseUrl: string;
let parser: MarkdownParser;
let isBuild: boolean;
let define: Record<string, unknown> | undefined;
const {
include = DEFAULT_INCLUDE_RE,
exclude = DEFAULT_EXCLUDE_RE,
globalComponents = DEFAULT_GLOBAL_COMPONENTS,
topLevelHtmlTags,
...parserOptions
} = options;
const filter = createFilter(
options.include ?? DEFAULT_INCLUDE_RE,
options.exclude ?? DEFAULT_EXCLUDE_RE,
);
/** Page system file paths. */
const files = new Set<string>();
const globalComponentFiles = globbySync(globalComponents).map(normalizePath);
const parseOptions = (): ParseMarkdownOptions => ({
mode,
baseUrl,
escapeConstants: isBuild,
define,
globalComponentFiles,
topLevelHtmlTags,
});
const components: MarkdownComponents = parserOptions?.components ?? [];
function addGlobalComponents(files: string[]) {
for (const file of files) {
const name = getFileNameFromPath(file);
const has = globalComponentFiles.some((file) => getFileNameFromPath(file) === name);
if (!has) globalComponentFiles.push(file);
}
}
function addMarkdownComponents(files: string[]) {
for (const file of files) {
const componentName = getFileNameFromPath(file);
const has = components.some(({ name, type }) => type === 'custom' && name === componentName);
if (!has) {
components.push({
name: componentName,
type: 'custom',
container: getMarkdownContainer(file, componentName),
});
}
}
}
const absGlobalComponentPaths = globalComponentFiles
.map((path) => resolve(__cwd, path))
.map(normalizePath);
addMarkdownComponents(absGlobalComponentPaths);
try {
const root = isLocalEnv()
? resolve(__cwd, 'src/lib/kit-docs')
: resolve(__dirname, '../../client/kit-docs');
const paths = globbySync('**/*.svelte', { cwd: root }).map(normalizePath);
const absPaths = paths.map((path) => resolve(root, path)).map(normalizePath);
addMarkdownComponents(absPaths);
addGlobalComponents(absPaths);
} catch (e) {
// no-op
}
return {
name: PLUGIN_NAME,
enforce: 'pre' as const,
async configResolved(config) {
baseUrl = config.base;
mode = config.mode;
isBuild = config.command === 'build';
define = config.define;
parser = await createMarkdownParser({
...parserOptions,
components,
});
},
configureServer(server) {
function restart() {
clearMarkdownCaches();
files.clear();
server.restart();
}
server.watcher
.add(globalComponents)
.on('add', () => restart())
.on('unlink', () => restart());
},
transform(code, id) {
if (filter(id)) {
const filePath = normalizePath(id);
const { component } = parseMarkdownToSvelte(parser, code, filePath, parseOptions());
files.add(filePath);
return component;
}
return null;
},
async handleHotUpdate(ctx) {
const { file, read } = ctx;
// Hot reload `.md` files as `.svelte` files.
if (files.has(file)) {
const content = await read();
const { component } = parseMarkdownToSvelte(parser, content, file, parseOptions());
ctx.read = () => component;
}
},
};
}
function getMarkdownContainer(path: string, name: string): MarkdownComponentContainer | undefined {
if (!path.includes('kit-docs')) return;
if (name === 'Step') return { marker: '!' };
return undefined;
}

View File

@ -0,0 +1,119 @@
import MarkdownIt from 'markdown-it';
import { type HighlighterOptions } from 'shiki';
import {
anchorPlugin,
codePlugin,
containersPlugin,
createShikiPlugin,
customComponentPlugin,
emojiPlugin,
extractHeadersPlugin,
extractTitlePlugin,
hoistTagsPlugin,
importCodePlugin,
linksPlugin,
tocPlugin,
} from './plugins';
import type {
InlineElementRule,
MarkdownBlockComponent,
MarkdownComponents,
MarkdownCustomComponent,
MarkdownInlineComponent,
MarkdownParser,
} from './types';
export type MarkdownParserOptions = {
components?: MarkdownComponents;
shiki?: HighlighterOptions;
configureParser?(parser: MarkdownParser): void | Promise<void>;
};
export async function createMarkdownParser(
options: MarkdownParserOptions = {},
): Promise<MarkdownParser> {
const { configureParser, shiki = {}, components = [] } = options;
const inlineComponents = components.filter(
({ type }) => type === 'inline',
) as MarkdownInlineComponent[];
const blockComponents = components.filter(
({ type }) => type === 'block',
) as MarkdownBlockComponent[];
const customComponents = components.filter(
({ type }) => type === 'custom',
) as MarkdownCustomComponent[];
const parser = MarkdownIt({ html: true });
parser.use(emojiPlugin);
parser.use(anchorPlugin);
parser.use(tocPlugin);
parser.use(extractHeadersPlugin);
parser.use(extractTitlePlugin);
parser.use(customComponentPlugin);
parser.use(linksPlugin);
parser.use(codePlugin);
parser.use(containersPlugin, customComponents);
parser.use(importCodePlugin);
parser.use(await createShikiPlugin(shiki));
parser.use(hoistTagsPlugin);
responsiveTablePlugin(parser);
const inlineRuleMap: Partial<Record<InlineElementRule, string>> = {
strikethrough: 's',
emphasized: 'em',
};
for (const { name, rule } of inlineComponents) {
if (rule === 'image') {
parser.renderer.rules.image = function (tokens, idx, _, __, self) {
const token = tokens[idx];
return `<${name} ${self.renderAttrs(token)} />`;
};
continue;
}
const mappedRule = inlineRuleMap[rule] ?? rule;
parser.renderer.rules[`${mappedRule}_open`] = () => {
return `<${name}>`;
};
parser.renderer.rules[`${mappedRule}_close`] = () => {
return `</${name}>`;
};
}
for (const { name, rule } of blockComponents) {
parser.renderer.rules[`${rule}_open`] = (tokens, idx) => {
const token = tokens[idx];
const props: string[] = [];
if (/h(\d)/.test(token.tag)) {
props.push(`level=${token.tag.slice(1)}`);
}
return `<${name} ${props.join(' ')}>`;
};
parser.renderer.rules[`${rule}_close`] = () => {
return `</${name}>`;
};
}
await configureParser?.(parser);
return parser;
}
function responsiveTablePlugin(parser: MarkdownParser) {
parser.renderer.rules.table_open = function () {
return `<TableWrapper><table>`;
};
parser.renderer.rules.table_close = function () {
return '</table></TableWrapper>';
};
}

View File

@ -0,0 +1,4 @@
export * from './createMarkdownParser';
export * from './parseMarkdown';
export * from './plugins';
export * from './types';

View File

@ -0,0 +1,205 @@
import fs from 'fs';
import matter from 'gray-matter';
import LRUCache from 'lru-cache';
import toml from 'toml';
import { isLocalEnv } from '../../utils/env';
import { getFileNameFromPath } from '../../utils/path';
import { hashString } from '../../utils/string';
import type {
MarkdownMeta,
MarkdownParser,
MarkdownParserEnv,
ParsedMarkdownResult,
ParseMarkdownOptions,
} from './types';
import { commentOutTemplateTags, uncommentTemplateTags } from './utils/htmlEscape';
import { preventViteReplace } from './utils/preventViteReplace';
const kitDocsImportPath = isLocalEnv() ? '$lib' : '@svelteness/kit-docs';
export type ParseMarkdownToSvelteResult = {
component: string;
meta: MarkdownMeta;
};
const svelteCache = new LRUCache<string, ParseMarkdownToSvelteResult>({ max: 1024 });
export function parseMarkdownToSvelte(
parser: MarkdownParser,
source: string,
filePath: string,
options: ParseMarkdownOptions = {},
): ParseMarkdownToSvelteResult {
const isProd = options.mode === 'production';
const cacheKey = !isProd ? hashString(filePath + source) : '';
if (!isProd && svelteCache.has(cacheKey)) return svelteCache.get(cacheKey)!;
const {
html,
meta,
env: parserEnv,
} = parseMarkdown(parser, commentOutTemplateTags(source), filePath, {
...options,
});
const { hoistedTags = [] } = parserEnv as MarkdownParserEnv;
const fileName = getFileNameFromPath(filePath);
if (kitDocsImportPath.length) {
hoistedTags.push(
['<script>', `import { frontmatter } from "${kitDocsImportPath}";`, '</script>'].join('\n'),
);
}
hoistedTags.push(...(options.topLevelHtmlTags?.({ fileName, filePath, meta }) ?? []));
if (options.globalComponentFiles) {
addGlobalImports(hoistedTags, options.globalComponentFiles);
}
const component =
dedupeHoistedTags(hoistedTags).join('\n') + `\n\n${uncommentTemplateTags(html)}`;
const result: ParseMarkdownToSvelteResult = {
component,
meta,
};
svelteCache.set(cacheKey, result);
return result;
}
function addGlobalImports(tags: string[], files: string[]) {
const globalImports = files
.map((filePath) => {
const componentName = getFileNameFromPath(filePath);
return `import ${componentName} from '/${filePath.replace(/^\//, '')}';`;
})
.join('\n');
tags.push(['<script>', globalImports, '</script>'].join('\n'));
}
const frontmatterCache = new LRUCache({ max: 1024 });
export function getFrontmatter(source: string): Record<string, any> {
const cacheKey = hashString(source);
if (frontmatterCache.has(cacheKey)) return frontmatterCache.get(cacheKey)!;
const { data: frontmatter } = matter(source, {
excerpt_separator: '<!-- more -->',
engines: {
toml: toml.parse.bind(toml),
},
});
frontmatterCache.set(cacheKey, frontmatter ?? {});
return frontmatter ?? {};
}
const mdCache = new LRUCache<string, ParsedMarkdownResult>({ max: 1024 });
export function parseMarkdown(
parser: MarkdownParser,
source: string,
filePath: string,
options: ParseMarkdownOptions = {},
): ParsedMarkdownResult {
const isProd = options.mode === 'production';
const cacheKey = !isProd ? hashString(filePath + source) : '';
if (!isProd && mdCache.has(cacheKey)) return mdCache.get(cacheKey)!;
const {
data: frontmatter,
content,
excerpt,
} = matter(source, {
excerpt_separator: '<!-- more -->',
engines: {
toml: toml.parse.bind(toml),
},
});
const parserEnv: MarkdownParserEnv = {
filePath,
frontmatter,
};
let html = parser.render(content, parserEnv);
const excerptHtml = parser.render(excerpt ?? '');
if (options.escapeConstants) {
html = preventViteReplace(html, options.define);
}
const { headers = [], importedFiles = [], links = [], title = '' } = parserEnv;
const _title = frontmatter.title ?? title;
const description = frontmatter.description;
const result: ParsedMarkdownResult = {
content,
html,
links,
importedFiles,
env: parserEnv,
meta: {
excerpt: excerptHtml,
headers,
title: _title,
description,
frontmatter,
lastUpdated: Math.round(fs.statSync(filePath).mtimeMs),
},
};
mdCache.set(cacheKey, result);
return result;
}
const OPENING_SCRIPT_TAG_RE = /<\s*script[^>]*>/;
const OPENING_SCRIPT_MODULE_TAG_RE = /<\s*script[^>]*\scontext="module"\s*[^>]*>/;
const CLOSING_SCRIPT_TAG_RE = /<\/script>/;
const OPENING_STYLE_TAG_RE = /<\s*style[^>]*>/;
const CLOSING_STYLE_TAG_RE = /<\/style>/;
const OPENING_SVELTE_HEAD_TAG_RE = /<\s*svelte:head[^>]*>/;
const CLOSING_SVELTE_HEAD_TAG_RE = /<\/svelte:head>/;
function dedupeHoistedTags(tags: string[] = []): string[] {
const dedupe = new Map();
const merge = (key: string, tag: string, openingTagRe: RegExp, closingTagRE: RegExp) => {
if (!dedupe.has(key)) {
dedupe.set(key, tag);
return;
}
const block = dedupe.get(key)!;
dedupe.set(key, block.replace(closingTagRE, tag.replace(openingTagRe, '')));
};
tags.forEach((tag) => {
if (OPENING_SCRIPT_MODULE_TAG_RE.test(tag)) {
merge('module', tag, OPENING_SCRIPT_MODULE_TAG_RE, CLOSING_SCRIPT_TAG_RE);
} else if (OPENING_SCRIPT_TAG_RE.test(tag)) {
merge('script', tag, OPENING_SCRIPT_TAG_RE, CLOSING_SCRIPT_TAG_RE);
} else if (OPENING_STYLE_TAG_RE.test(tag)) {
merge('style', tag, OPENING_STYLE_TAG_RE, CLOSING_STYLE_TAG_RE);
} else if (OPENING_SVELTE_HEAD_TAG_RE.test(tag)) {
merge('svelte:head', tag, OPENING_SVELTE_HEAD_TAG_RE, CLOSING_SVELTE_HEAD_TAG_RE);
} else {
// Treat unknowns as unique and leave them as-is.
dedupe.set(Symbol(), tag);
}
});
return Array.from(dedupe.values());
}
export function clearMarkdownCaches() {
frontmatterCache.clear();
mdCache.clear();
svelteCache.clear();
}

View File

@ -0,0 +1,18 @@
import type { PluginSimple } from 'markdown-it';
import rawAnchorPlugin from 'markdown-it-anchor';
import { slugify } from '../utils/slugify';
export const anchorPlugin: PluginSimple = (parser) => {
return rawAnchorPlugin(parser, {
level: [2, 3, 4, 5, 6],
slugify,
permalink: rawAnchorPlugin.permalink.ariaHidden({
class: 'header-anchor',
symbol: '#',
space: true,
placement: 'before',
// renderAttrs: () => ({ 'sveltekit:noscroll': '' })
}),
});
};

View File

@ -0,0 +1,77 @@
import type { PluginSimple } from 'markdown-it';
import { uncommentTemplateTags } from '../../utils/htmlEscape';
import { resolveHighlightLines } from './resolveHighlightLines';
import { resolveLanguage } from './resolveLanguage';
/**
* Plugin to enable styled code fences with line numbers, syntax highlighting, etc.
*/
export const codePlugin: PluginSimple = (parser) => {
parser.renderer.rules.code_inline = (tokens, idx) => {
const token = tokens[idx];
const code = token.content;
const props = [`code={${JSON.stringify(code)}}`].join(' ');
return `<CodeInline ${props} />`;
};
// Override default fence renderer.
parser.renderer.rules.fence = (tokens, idx, options) => {
const token = tokens[idx];
// Get token info.
const info = token.info ? parser.utils.unescapeAll(token.info).trim() : '';
// Resolve language from token info.
const language = resolveLanguage(info);
// Get un-escaped code content.
const content = uncommentTemplateTags(token.content);
// Try to get highlighted code.
const html =
options.highlight?.(content, language.name, '') || parser.utils.escapeHtml(content);
const code = html.replace(/\sclass="shiki" style=".*?"/, '').trim();
const rawCode = token.content
.replace(/<script/g, '<script&#8203')
.replace(/<style/g, '<style&#8203');
const linesCount = (html.match(/"line"/g) || []).length;
// Resolve highlight line ranges from token info.
const highlightLinesRanges = resolveHighlightLines(info);
const highlight = `[${highlightLinesRanges
?.map((range) => `[${range[0]}, ${range[1]}]`)
.join(',')}]`;
const title = info.match(/\|?title="?(.*?)"?(\||{|$)/)?.[1];
const useLineNumbers = /\|?lineNumbers/.test(info);
const showCopyCode = /\|?copy/.test(info);
const copyHighlightOnly = /\|?copyHighlight/.test(info);
const copySteps = /\|?copySteps/.test(info);
const slot =
info.match(/\|?slot="?(.*?)"?(\||{|$)/)?.[1] ?? (/\|?slot/.test(info) && language.ext);
const props = [
title && `title="${title}"`,
`lang="${language.name}"`,
`ext="${language.ext}"`,
`linesCount={${linesCount}}`,
useLineNumbers && 'showLineNumbers',
(highlightLinesRanges?.length ?? 0) > 0 && `highlightLines={${highlight}}`,
showCopyCode && `rawCode={${JSON.stringify(rawCode)}}`,
showCopyCode && 'showCopyCode',
copyHighlightOnly && `copyHighlightOnly`,
copySteps && 'copySteps',
`code={${JSON.stringify(code)}}`,
slot && `slot="${slot}"`,
]
.filter(Boolean)
.join(' ');
return `<CodeFence ${props} />`;
};
};

View File

@ -0,0 +1,4 @@
export * from './codePlugin';
export * from './languages';
export * from './resolveHighlightLines';
export * from './resolveLanguage';

View File

@ -0,0 +1,101 @@
/**
* Language type for syntax highlight.
*/
export type HighlightLanguage = {
/**
* Name of the language.
*
* The name to be used for the class name, e.g. `class="language-typescript"`.
*/
name: string;
/**
* Extension of the language.
*
* The file extension, which will be used for the class name, e.g. `class="ext-ts"`
*/
ext: string;
/**
* Aliases that point to this language. Do not conflict with other languages.
*/
aliases: string[];
};
export const languageBash: HighlightLanguage = {
name: 'bash',
ext: 'sh',
aliases: ['bash', 'sh', 'shell', 'zsh'],
};
export const languageCsharp: HighlightLanguage = {
name: 'csharp',
ext: 'cs',
aliases: ['cs', 'csharp'],
};
export const languageDocker: HighlightLanguage = {
name: 'docker',
ext: 'docker',
aliases: ['docker', 'dockerfile'],
};
export const languageFsharp: HighlightLanguage = {
name: 'fsharp',
ext: 'fs',
aliases: ['fs', 'fsharp'],
};
export const languageJavascript: HighlightLanguage = {
name: 'javascript',
ext: 'js',
aliases: ['javascript', 'js'],
};
export const languageKotlin: HighlightLanguage = {
name: 'kotlin',
ext: 'kt',
aliases: ['kotlin', 'kt'],
};
export const languageMarkdown: HighlightLanguage = {
name: 'markdown',
ext: 'md',
aliases: ['markdown', 'md'],
};
export const languagePython: HighlightLanguage = {
name: 'python',
ext: 'py',
aliases: ['py', 'python'],
};
export const languageRuby: HighlightLanguage = {
name: 'ruby',
ext: 'rb',
aliases: ['rb', 'ruby'],
};
export const languageRust: HighlightLanguage = {
name: 'rust',
ext: 'rs',
aliases: ['rs', 'rust'],
};
export const languageStylus: HighlightLanguage = {
name: 'stylus',
ext: 'styl',
aliases: ['styl', 'stylus'],
};
export const languageTypescript: HighlightLanguage = {
name: 'typescript',
ext: 'ts',
aliases: ['ts', 'typescript'],
};
export const languageYaml: HighlightLanguage = {
name: 'yaml',
ext: 'yml',
aliases: ['yaml', 'yml'],
};

View File

@ -0,0 +1,23 @@
export type HighlightLinesRange = [number, number];
/**
* Resolve highlight-lines ranges from token info.
*/
export const resolveHighlightLines = (info: string): HighlightLinesRange[] | null => {
// Try to match highlight-lines mark.
const match = info.match(/{([\d,-]+)}/);
// No highlight-lines mark, return `null`.
if (match === null) {
return null;
}
// Resolve lines ranges from the highlight-lines mark.
return match[1].split(',').map((item) => {
const range = item.split('-');
if (range.length === 1) {
range.push(range[0]);
}
return range.map((str) => Number.parseInt(str, 10)) as HighlightLinesRange;
});
};

View File

@ -0,0 +1,45 @@
import type { HighlightLanguage } from './languages';
import * as languages from './languages';
type LanguagesMap = Record<string, HighlightLanguage>;
/**
* A key-value map to get language info from alias.
*
* - key: alias
* - value: language
*/
let languagesMap: LanguagesMap;
/**
* Lazy generate languages map.
*/
const getLanguagesMap = (): LanguagesMap => {
if (!languagesMap) {
languagesMap = Object.values(languages).reduce((result, item) => {
item.aliases.forEach((alias) => {
result[alias] = item;
});
return result;
}, {});
}
return languagesMap;
};
/**
* Resolve language for highlight from token info.
*/
export const resolveLanguage = (info: string): HighlightLanguage => {
// Get user-defined language alias.
const alias = info.match(/^([a-zA-Z]+)/)?.[1] || 'text';
// If the alias does not have a match in the map fallback to the alias itself.
return (
getLanguagesMap()[alias] ?? {
name: alias,
ext: alias,
aliases: [alias],
}
);
};

View File

@ -0,0 +1,63 @@
import type { PluginWithOptions } from 'markdown-it';
import type Token from 'markdown-it/lib/token';
import container from 'markdown-it-container';
import { titleToSnakeCase } from '../../../utils/string';
import { isString } from '../../../utils/unit';
import type { MarkdownCustomComponent, MarkdownParser } from '../types';
const propsRE = /(?:\s|\|)(.*?)=(.*?)(?=(\||$))/g;
const bodyRE = /\((.*?)\)(?:=)(.*)/;
const tagRE = /tag=(.*?)(?:&|\))/;
const slotRE = /slot=(.*?)(?:&|\))/;
function renderDefault(parser: MarkdownParser, componentName: string) {
return function (tokens: Token[], idx: number) {
const token = tokens[idx];
const props: string[] = [];
const body: string[] = [];
const matchedProps = token.info.trim().matchAll(propsRE);
for (const [propMatch, prop, value] of matchedProps) {
if (bodyRE.test(propMatch)) {
const [_, __, content] = propMatch.match(bodyRE) ?? [];
const tag = propMatch.match(tagRE)?.[1] ?? 'p';
const slot = propMatch.match(slotRE)?.[1];
if (isString(tag) && isString(content)) {
body.push(
[
`<${tag}${isString(slot) ? ` slot="${slot}"` : ''}>`,
parser
.render(content)
.replace(/^<p>/, '')
.replace(/<\/p>\n?$/, ''),
`</${tag}>`,
].join('\n'),
);
}
} else if (isString(prop) && isString(value)) {
props.push(`${prop}=${value}`);
}
}
if (token.nesting === 1) {
return `<${componentName} ${props.join(' ')}>\n ${body.join('\n ')}\n`;
} else {
return `</${componentName}>\n`;
}
};
}
export const containersPlugin: PluginWithOptions<MarkdownCustomComponent[]> = (
parser: MarkdownParser,
components = [],
) => {
for (const { name: componentName, container: options } of components) {
const name: string = options?.name ?? titleToSnakeCase(componentName);
const marker: string = options?.marker ?? ':';
const render = options?.renderer?.(componentName) ?? renderDefault(parser, componentName);
parser.use(container, name, { marker, render });
}
};

View File

@ -0,0 +1,38 @@
import type { PluginSimple } from 'markdown-it';
import { htmlBlockRule, HTMLBlockSequence } from './htmlBlockRule';
import { htmlInlineRule } from './htmlInlineRule';
/**
* Svelte reserved tags.
*
* @see https://svelte.dev/docs#svelte_self
*/
export const svelteReservedTags = [
'svelte:self',
'svelte:component',
'svelte:window',
'svelte:body',
'svelte:head',
'svelte:options',
'svelte:fragment',
'slot',
];
const svelteHtmlBlockSequence: HTMLBlockSequence[] = [
// Treat Svelte reserved tags as block tags.
[new RegExp('^</?(' + svelteReservedTags.join('|') + ')(?=(\\s|/?>|$))', 'i'), /^$/, true],
];
/**
* Replacing the default `htmlBlock` rule to allow using custom components in markdown.
*/
export const customComponentPlugin: PluginSimple = (parser) => {
// Override default html block ruler.
parser.block.ruler.at('html_block', htmlBlockRule(svelteHtmlBlockSequence), {
alt: ['paragraph', 'reference', 'blockquote'],
});
// Override default html inline ruler.
parser.inline.ruler.at('html_inline', htmlInlineRule);
};

View File

@ -0,0 +1,163 @@
import type { RuleBlock } from 'markdown-it/lib/parser_block';
import { HTML_OPEN_CLOSE_TAG_RE } from './htmlRe';
import { inlineTags } from './inlineTags';
// Forked and modified from 'markdown-it/lib/rules_block/html_block.js'
const blockNames = [
'address',
'article',
'aside',
'base',
'basefont',
'blockquote',
'body',
'caption',
'center',
'col',
'colgroup',
'dd',
'details',
'dialog',
'dir',
'div',
'dl',
'dt',
'fieldset',
'figcaption',
'figure',
'footer',
'form',
'frame',
'frameset',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
'head',
'header',
'hr',
'html',
'iframe',
'legend',
'li',
'link',
'main',
'menu',
'menuitem',
'nav',
'noframes',
'ol',
'optgroup',
'option',
'p',
'param',
'section',
'source',
'summary',
'table',
'tbody',
'td',
'tfoot',
'th',
'thead',
'title',
'tr',
'track',
'ul',
];
/**
* An array of opening and corresponding closing sequences for html tags. The last array value
* defines whether it can terminate a paragraph or not.
*/
export type HTMLBlockSequence = [RegExp, RegExp, boolean];
const HTML_SEQUENCES: HTMLBlockSequence[] = [
[/^<(script|pre|style)(?=(\s|>|$))/i, /<\/(script|pre|style)>/i, true],
[/^<!--/, /-->/, true],
[/^<\?/, /\?>/, true],
[/^<![A-Z]/, />/, true],
[/^<!\[CDATA\[/, /\]\]>/, true],
// MODIFIED HERE: Treat unknown tags as block tags (custom components), excluding known inline tags
[new RegExp('^</?(?!(' + inlineTags.join('|') + ')(?![\\w-]))\\w[\\w-]*[\\s/>]'), /^$/, true],
// eslint-disable-next-line import/namespace
[new RegExp('^</?(' + blockNames.join('|') + ')(?=(\\s|/?>|$))', 'i'), /^$/, true],
[new RegExp(HTML_OPEN_CLOSE_TAG_RE.source + '\\s*$'), /^$/, false],
];
export const htmlBlockRule = (customSequences: HTMLBlockSequence[] = []): RuleBlock => {
const sequences: HTMLBlockSequence[] = [...HTML_SEQUENCES, ...customSequences];
return (state, startLine, endLine, silent): boolean => {
let i: number;
let nextLine: number;
let lineText: string;
let pos = state.bMarks[startLine] + state.tShift[startLine];
let max = state.eMarks[startLine];
// if it's indented more than 3 spaces, it should be a code block
if (state.sCount[startLine] - state.blkIndent >= 4) {
return false;
}
if (!state.md.options.html) {
return false;
}
if (state.src.charCodeAt(pos) !== 0x3c /* < */) {
return false;
}
lineText = state.src.slice(pos, max);
for (i = 0; i < sequences.length; i++) {
if (sequences[i][0].test(lineText)) {
break;
}
}
if (i === sequences.length) {
return false;
}
if (silent) {
// true if this sequence can be a terminator, false otherwise
return sequences[i][2];
}
nextLine = startLine + 1;
// If we are here - we detected HTML block.
// Let's roll down till block end.
if (!sequences[i][1].test(lineText)) {
for (; nextLine < endLine; nextLine++) {
if (state.sCount[nextLine] < state.blkIndent) {
break;
}
pos = state.bMarks[nextLine] + state.tShift[nextLine];
max = state.eMarks[nextLine];
lineText = state.src.slice(pos, max);
if (sequences[i][1].test(lineText)) {
if (lineText.length !== 0) {
nextLine++;
}
break;
}
}
}
state.line = nextLine;
const token = state.push('html_block', '', 0);
token.map = [startLine, nextLine];
token.content = state.getLines(startLine, nextLine, state.blkIndent, true);
return true;
};
};

View File

@ -0,0 +1,44 @@
import type { RuleInline } from 'markdown-it/lib/parser_inline';
import { HTML_TAG_RE } from './htmlRe';
// Forked and modified from 'markdown-it/lib/rules_inline/html_inline.js'
const isLetter = (ch: number): boolean => {
const lc = ch | 0x20; // to lower case
return lc >= 0x61 /* a */ && lc <= 0x7a; /* z */
};
export const htmlInlineRule: RuleInline = (state, silent) => {
const pos = state.pos;
if (!state.md.options.html) {
return false;
}
// Check start.
const max = state.posMax;
if (state.src.charCodeAt(pos) !== 0x3c /* < */ || pos + 2 >= max) {
return false;
}
// Quick fail on second char.
const ch = state.src.charCodeAt(pos + 1);
if (ch !== 0x21 /* ! */ && ch !== 0x3f /* ? */ && ch !== 0x2f /* / */ && !isLetter(ch)) {
return false;
}
// MODIFIED HERE: Tweak the original HTML_TAG_RE.
const match = state.src.slice(pos).match(HTML_TAG_RE);
if (!match) {
return false;
}
if (!silent) {
const token = state.push('html_inline', '', 0);
token.content = state.src.slice(pos, pos + match[0].length);
}
state.pos += match[0].length;
return true;
};

View File

@ -0,0 +1,40 @@
// Forked and modified from 'markdown-it/lib/common/html_re.js'
// Regexps to match html elements.
// MODIFIED HERE: Support `@` as the first char of attr name
const attr_name = '[a-zA-Z_:@][a-zA-Z0-9:._-]*';
const unquoted = '[^"\'=<>`\\x00-\\x20]+';
const single_quoted = "'[^']*'";
const double_quoted = '"[^"]*"';
const attr_value = '(?:' + unquoted + '|' + single_quoted + '|' + double_quoted + ')';
const attribute = '(?:\\s+' + attr_name + '(?:\\s*=\\s*' + attr_value + ')?)';
const open_tag = '<[A-Za-z][A-Za-z0-9\\-]*' + attribute + '*\\s*\\/?>';
const close_tag = '<\\/[A-Za-z][A-Za-z0-9\\-]*\\s*>';
const comment = '<!---->|<!--(?:-?[^>-])(?:-?[^-])*-->';
const processing = '<[?][\\s\\S]*?[?]>';
const declaration = '<![A-Z]+\\s+[^>]*>';
const cdata = '<!\\[CDATA\\[[\\s\\S]*?\\]\\]>';
export const HTML_TAG_RE = new RegExp(
'^(?:' +
open_tag +
'|' +
close_tag +
'|' +
comment +
'|' +
processing +
'|' +
declaration +
'|' +
cdata +
')',
);
export const HTML_OPEN_CLOSE_TAG_RE = new RegExp('^(?:' + open_tag + '|' + close_tag + ')');

View File

@ -0,0 +1,5 @@
export * from './customComponentPlugin';
export * from './htmlBlockRule';
export * from './htmlInlineRule';
export * from './htmlRe';
export * from './inlineTags';

View File

@ -0,0 +1,73 @@
/**
* According to markdown spec, all non-block html tags are treated as "inline"
* tags (wrapped with <p></p>), including those "unknown" tags.
*
* Therefore, `markdown-it` processes "inline" tags and "unknown" tags in the same
* way, and does not care if a tag is "inline" or "unknown".
*
* As we want to take those "unknown" tags as custom components, we should treat them as
* "block" tags. So we have to distinguish between "inline" and "unknown" tags ourselves.
*
* The inline tags list comes from MDN.
*
* @see https://spec.commonmark.org/0.29/#raw-html
* @see https://developer.mozilla.org/en-US/docs/Web/HTML/Inline_elements
*/
export const inlineTags = [
'a',
'abbr',
'acronym',
'audio',
'b',
'bdi',
'bdo',
'big',
'br',
'button',
'canvas',
'cite',
'code',
'data',
'datalist',
'del',
'dfn',
'em',
'embed',
'i',
/* iframe is treated as HTML blocks in markdown spec */
// 'iframe',
'img',
'input',
'ins',
'kbd',
'label',
'map',
'mark',
'meter',
'noscript',
'object',
'output',
'picture',
'progress',
'q',
'ruby',
's',
'samp',
'script',
'select',
'slot',
'small',
'span',
'strong',
'sub',
'sup',
'svg',
'template',
'textarea',
'time',
'u',
'tt',
'var',
'video',
'wbr',
];

View File

@ -0,0 +1,6 @@
import type { PluginSimple } from 'markdown-it';
import rawEmojiPlugin from 'markdown-it-emoji';
export const emojiPlugin: PluginSimple = (parser) => {
return rawEmojiPlugin(parser);
};

View File

@ -0,0 +1,31 @@
import type { PluginSimple } from 'markdown-it';
import type { MarkdownHeader, MarkdownParserEnv } from '../types';
import { resolveHeadersFromTokens } from '../utils/resolveHeadersFromToken';
/**
* Extracting markdown headers to `env`. Would be used for generating sidebar nav and toc.
*/
export const extractHeadersPlugin: PluginSimple = (parser) => {
const level = [2, 3];
let headers: MarkdownHeader[];
// Push the rule to the end of the chain, and resolve headers from the parsed tokens.
parser.core.ruler.push('resolveExtractHeaders', (state) => {
headers = resolveHeadersFromTokens(state.tokens, {
level,
allowHtml: false,
escapeText: false,
});
return true;
});
// Extract headers to `env`.
const render = parser.render.bind(parser);
parser.render = (src, env: MarkdownParserEnv = {}) => {
const result = render(src, env);
env.headers = headers;
return result;
};
};

View File

@ -0,0 +1,33 @@
import type { PluginSimple } from 'markdown-it';
import type { MarkdownParserEnv } from '../types';
import { resolveTitleFromToken } from '../utils/resolveTitleFromToken';
/**
* Extracting markdown title to parser env.
*/
export const extractTitlePlugin: PluginSimple = (parser) => {
let title: string;
// Push the rule to the end of the chain, and resolve title from the parsed tokens.
parser.core.ruler.push('resolveExtractTitle', (state) => {
const tokenIdx = state.tokens.findIndex((token) => token.tag === 'h1');
if (tokenIdx > -1) {
title = resolveTitleFromToken(state.tokens[tokenIdx + 1], {
escapeText: false,
allowHtml: false,
});
} else {
title = '';
}
return true;
});
// Extract title to env.
const render = parser.render.bind(parser);
parser.render = (src, env: MarkdownParserEnv = {}) => {
const result = render(src, env);
env.title = (env.frontmatter?.title as string) ?? title;
return result;
};
};

View File

@ -0,0 +1,26 @@
import type { PluginSimple } from 'markdown-it';
import type { MarkdownParserEnv } from '../types';
/**
* Avoid rendering Svelte script/style blocks. Extract them into `env`, and hoist them root level.
*/
export const hoistTagsPlugin: PluginSimple = (parser) => {
const tags = ['script', 'style', 'svelte:head'];
const hoistTagsRegexp = new RegExp(`^<(${tags.join('|')})(?=(\\s|>|$))`, 'i');
const rawRule = parser.renderer.rules.html_block!;
parser.renderer.rules.html_block = (tokens, idx, options, env: MarkdownParserEnv, self) => {
const content = tokens[idx].content;
const hoistedTags = env.hoistedTags || (env.hoistedTags = []);
// Push hoisted tags to `env` and do not render them.
if (hoistTagsRegexp.test(content.trim())) {
hoistedTags.push(content);
return '';
}
return rawRule(tokens, idx, options, env, self);
};
};

View File

@ -0,0 +1,5 @@
export type ImportCodeTokenMeta = {
importPath: string;
lineStart: number;
lineEnd?: number;
};

View File

@ -0,0 +1,65 @@
import type { RuleBlock } from 'markdown-it/lib/parser_block';
import path from 'path';
import type { ImportCodeTokenMeta } from './ImportCodeTokenMeta';
// Min length of the import code syntax, i.e. '@[code]()'
const MIN_LENGTH = 9;
// Char codes of '@[code'
const START_CODES = [64, 91, 99, 111, 100, 101];
// Regexp to match the import syntax.
const SYNTAX_RE = /^@\[code(?:{(?:(\d+)?-(\d+)?)})?(?: ([^\]]+))?\]\(([^)]*)\)/;
export const createImportCodeBlockRule =
(): RuleBlock =>
(state, startLine, endLine, silent): boolean => {
// If it's indented more than 3 spaces, it should be a code block.
/* istanbul ignore if */
if (state.sCount[startLine] - state.blkIndent >= 4) {
return false;
}
const pos = state.bMarks[startLine] + state.tShift[startLine];
const max = state.eMarks[startLine];
// Return false if the length is shorter than min length.
if (pos + MIN_LENGTH > max) return false;
// Check if it's matched the start.
for (let i = 0; i < START_CODES.length; i += 1) {
if (state.src.charCodeAt(pos + i) !== START_CODES[i]) {
return false;
}
}
// Check if it's matched the syntax.
const match = state.src.slice(pos, max).match(SYNTAX_RE);
if (!match) return false;
// Return true as we have matched the syntax.
if (silent) return true;
const [, lineStart, lineEnd, info, importPath] = match;
const meta: ImportCodeTokenMeta = {
importPath,
lineStart: lineStart ? Number.parseInt(lineStart, 10) : 0,
lineEnd: lineEnd ? Number.parseInt(lineEnd, 10) : undefined,
};
// Create a import_code token.
const token = state.push('import_code', 'code', 0);
// Use user specified info, or fallback to file ext.
token.info = info ?? path.extname(meta.importPath).slice(1);
token.markup = '```';
token.map = [startLine, startLine + 1];
// Store token meta to be used in renderer rule.
token.meta = meta;
state.line = startLine + 1;
return true;
};

View File

@ -0,0 +1,30 @@
import type { PluginSimple } from 'markdown-it';
import type { MarkdownParserEnv } from '../../types';
import { createImportCodeBlockRule } from './createImportCodeBlockRule';
import { resolveImportCode } from './resolveImportCode';
export const importCodePlugin: PluginSimple = (parser) => {
// Add `import_code` block rule.
parser.block.ruler.before('fence', 'import_code', createImportCodeBlockRule(), {
alt: ['paragraph', 'reference', 'blockquote', 'list'],
});
// Add `import_code` renderer rule.
parser.renderer.rules.import_code = (tokens, idx, options, env: MarkdownParserEnv, slf) => {
const token = tokens[idx];
// Use imported code as token content.
const { importFilePath, importCode } = resolveImportCode(token.meta, env);
token.content = importCode;
// Extract imported files to env.
if (importFilePath) {
const importedFiles = env.importedFiles || (env.importedFiles = []);
importedFiles.push(importFilePath);
}
// Render the `import_code` token as a fence token.
return parser.renderer.rules.fence!(tokens, idx, options, env, slf);
};
};

View File

@ -0,0 +1,4 @@
export * from './createImportCodeBlockRule';
export * from './importCodePlugin';
export * from './ImportCodeTokenMeta';
export * from './resolveImportCode';

View File

@ -0,0 +1,47 @@
import fs from 'fs';
import path from 'path';
import type { MarkdownParserEnv } from '../../types';
import type { ImportCodeTokenMeta } from './ImportCodeTokenMeta';
export const resolveImportCode = (
{ importPath, lineStart, lineEnd }: ImportCodeTokenMeta,
{ filePath }: MarkdownParserEnv,
): {
importFilePath: string | null;
importCode: string;
} => {
let importFilePath = importPath;
if (!path.isAbsolute(importPath)) {
// If the importPath is relative path, we need to resolve it according to the markdown filePath.
if (!filePath) {
return {
importFilePath: null,
importCode: 'Error when resolving path',
};
}
importFilePath = path.resolve(filePath, '..', importPath);
}
// Check file existence.
if (!fs.existsSync(importFilePath)) {
return {
importFilePath,
importCode: 'File not found',
};
}
// Read file content.
const fileContent = fs.readFileSync(importFilePath).toString();
// Resolve partial import.
return {
importFilePath,
importCode: fileContent
.split('\n')
.slice(lineStart ? lineStart - 1 : lineStart, lineEnd)
.join('\n')
.replace(/\n?$/, '\n'),
};
};

View File

@ -0,0 +1,12 @@
export * from './anchorPlugin';
export * from './codePlugin';
export * from './containersPlugin';
export * from './customComponentPlugin';
export * from './emojiPlugin';
export * from './extractHeadersPlugin';
export * from './extractTitlePlugin';
export * from './hoistTagsPlugin';
export * from './importCodePlugin';
export * from './linksPlugin';
export * from './shikiPlugin';
export * from './tocPlugin';

View File

@ -0,0 +1,72 @@
import { lstatSync } from 'fs';
import { type PluginSimple } from 'markdown-it';
import { dirname, relative, resolve } from 'path';
import { isLinkExternal } from '../utils/isLink';
const ROUTES_DIR = resolve(process.cwd(), 'src/routes');
const restParamsRe = /\[\.\.\..*?\]/g;
/**
* Resolves link URLs.
*/
export const linksPlugin: PluginSimple = (parser) => {
// Attrs that going to be added to external links.
const externalAttrs = {
target: '_blank',
rel: 'noopener noreferrer',
};
parser.renderer.rules.link_open = (tokens, idx, _, env) => {
const token = tokens[idx];
const hrefIndex = token.attrIndex('href');
const props: string[] = [];
if (hrefIndex >= 0) {
const hrefAttr = token.attrs?.[hrefIndex];
const hrefLink = hrefAttr![1];
const internalLinkMatch = hrefLink.match(/^((?:.*)(?:\/|\.md|\.html))(#.*)?$/);
if (isLinkExternal(hrefLink, '/')) {
Object.entries(externalAttrs ?? {}).forEach(([key, val]) => {
token.attrSet(key, val);
});
} else if (internalLinkMatch) {
const rawPath = decodeURI(internalLinkMatch?.[1]);
const rawHash = internalLinkMatch?.[2] ?? '';
const { filePath } = env;
const absolutePath = rawPath?.startsWith('/')
? '.' + rawPath
: resolve(lstatSync(filePath).isDirectory() ? filePath : dirname(filePath), rawPath);
const slug = relative(ROUTES_DIR, absolutePath)
.replace(restParamsRe, '')
.replace(/\/\+page/, '')
.replace(/\.(md|html)/, '');
// Set new path.
hrefAttr![1] = '/' + slug + rawHash;
const links = env.links || (env.links = []);
links.push(hrefAttr![1]);
}
if (token.attrs) {
for (const [name, value] of token.attrs) {
props.push(`${name}="${value}"`);
}
}
}
return `<Link ${props.filter(Boolean).join(' ')}>${token.content}`;
};
parser.renderer.rules.link_close = () => {
return '</Link>';
};
};

View File

@ -0,0 +1,17 @@
import { type PluginSimple } from 'markdown-it';
import { type HighlighterOptions, getHighlighter, renderToHtml } from 'shiki';
export const createShikiPlugin = async (options?: HighlighterOptions) => {
const highlighter = await getHighlighter({
theme: 'material-palenight',
langs: ["bash", "javascript", "typescript", "svelte", "markdown", "html", "diff", "css"],
...options,
});
return ((parser) => {
parser.options.highlight = (code, lang) => {
const tokens = highlighter.codeToThemedTokens(code, lang);
return renderToHtml(tokens);
};
}) as PluginSimple;
};

View File

@ -0,0 +1,41 @@
import type { RuleBlock } from 'markdown-it/lib/parser_block';
/**
* Forked and modified from `markdown-it-toc-done-right`.
*
* - Remove the `inlineOptions` support.
* - Use `markdown-it` default renderer to render token whenever possible.
*
* @see https://github.com/nagaozen/markdown-it-toc-done-right
*/
export const createTocBlockRule = ({ pattern }: { pattern: RegExp }): RuleBlock => {
return (state, startLine, endLine, silent): boolean => {
// If it's indented more than 3 spaces, it should be a code block.
if (state.sCount[startLine] - state.blkIndent >= 4) {
return false;
}
const pos = state.bMarks[startLine] + state.tShift[startLine];
const max = state.eMarks[startLine];
/**
* Use whitespace as a line tokenizer and extract the first token to test against the
* placeholder anchored pattern, rejecting if `false`.
*/
const lineFirstToken = state.src.slice(pos, max).split(' ')[0];
if (!pattern.test(lineFirstToken)) return false;
if (silent) return true;
state.line = startLine + 1;
const tokenOpen = state.push('toc_open', 'TableOfContents', 1);
tokenOpen.markup = '';
tokenOpen.map = [startLine, state.line];
const tokenClose = state.push('toc_close', 'TableOfContents', -1);
tokenClose.markup = '';
return true;
};
};

View File

@ -0,0 +1,2 @@
export * from './createTocBlockRule';
export * from './tocPlugin';

View File

@ -0,0 +1,51 @@
import type { PluginSimple } from 'markdown-it';
import type { MarkdownHeader } from '../../types';
import { resolveHeadersFromTokens } from '../../utils/resolveHeadersFromToken';
import { createTocBlockRule } from './createTocBlockRule';
/**
* Generate table of contents.
*
* Forked and modified from `markdown-it-toc-done-right`:
*
* - Allows `html_inline` tags in headings to support custom components.
* - Allows custom tags for links.
* - Code refactor and optimizations.
*
* @see https://github.com/nagaozen/markdown-it-toc-done-right
*/
export const tocPlugin: PluginSimple = (parser) => {
const pattern = /^\[\[toc\]\]$/i;
const level = [2, 3];
let headers: MarkdownHeader[];
// Push the rule to the end of the chain, and resolve headers from the parsed tokens.
parser.core.ruler.push('resolveTocHeaders', (state) => {
headers = resolveHeadersFromTokens(state.tokens, {
level,
allowHtml: true,
escapeText: true,
});
return true;
});
// Add toc syntax as a block rule.
parser.block.ruler.before('heading', 'toc', createTocBlockRule({ pattern }), {
alt: ['paragraph', 'reference', 'blockquote'],
});
// Custom toc_body render rule.
parser.renderer.rules.toc_open = () => {
if (!headers) {
return '';
}
return `<TableOfContents headers={\`${JSON.stringify(headers, [
'title',
'slug',
'children',
])}\`}>`;
};
};

View File

@ -0,0 +1,120 @@
import type MarkdownIt from 'markdown-it';
import type Token from 'markdown-it/lib/token';
export type MarkdownParser = MarkdownIt;
export type ParseMarkdownOptions = {
mode?: string;
baseUrl?: string;
escapeConstants?: boolean;
define?: Record<string, unknown>;
globalComponentFiles?: string[];
topLevelHtmlTags?: AddTopLevelHtmlTags;
};
export type AddTopLevelHtmlTags = (data: {
fileName: string;
filePath: string;
meta: MarkdownMeta;
}) => string[] | undefined | null;
export type InlineElementRule = 'emphasized' | 'image' | 'strikethrough' | 'strong';
export type BlockElementRule =
| 'blockquote'
| 'heading'
| 'list_item'
| 'ordered_list'
| 'paragraph'
| 'table'
| 'bullet_list';
export type MarkdownInlineComponent = {
name: string;
type: 'inline';
rule: InlineElementRule;
};
export type MarkdownBlockComponent = {
name: string;
type: 'block';
rule: BlockElementRule;
};
export type MarkdownCustomComponent = {
name: string;
type: 'custom';
container?: MarkdownComponentContainer;
};
export type MarkdownComponentContainer = {
name?: string;
marker?: string;
renderer?(componentName: string): (tokens: Token[], idx: number) => string;
};
export type MarkdownComponents = (
| MarkdownInlineComponent
| MarkdownBlockComponent
| MarkdownCustomComponent
)[];
export type MarkdownMeta = {
title: string;
description: string;
excerpt: string;
headers: MarkdownHeader[];
frontmatter: MarkdownFrontmatter;
lastUpdated: number;
};
export type MarkdownFrontmatter = Record<string, unknown>;
export type MarkdownHeader = {
level: number;
title: string;
slug: string;
children?: MarkdownHeader[];
};
export type MarkdownLinks = string[];
export type ParsedMarkdownResult = {
content: string;
meta: MarkdownMeta;
html: string;
links: MarkdownLinks;
importedFiles: string[];
env: MarkdownParserEnv;
};
/**
* Metadata provided to markdown parser.
*/
export type MarkdownParserEnvInput = {
/** Absolute system file path of the markdown file. */
filePath?: string | null;
/** Frontmatter of the markdown file. */
frontmatter?: MarkdownFrontmatter;
};
/**
* Resources extracted from markdown parser.
*/
export type MarkdownParserEnvOutput = {
/** Headers that are extracted by `extractHeadersPlugin`. */
headers?: MarkdownHeader[];
/** Imported files that are extracted by `importCodePlugin`. */
importedFiles?: string[];
/** Hoisted `<script>` and `<style>` tags. */
hoistedTags?: string[];
/** Links that are extracted by `linksPlugin`. */
links?: MarkdownLinks;
/** Title that is extracted by `extractTitlePlugin`. */
title?: string;
};
/**
* The `env` object to be passed to `markdown-it` render function.
*/
export type MarkdownParserEnv = MarkdownParserEnvInput & MarkdownParserEnvOutput;

View File

@ -0,0 +1,43 @@
const htmlEscapeMap = {
'&': '&amp;',
'<': '&lt;',
'>': '&gt;',
"'": '&#39;',
'"': '&quot;',
};
const htmlEscapeRegexp = /[&<>'"]/g;
export const htmlEscape = (str: string): string =>
str.replace(htmlEscapeRegexp, (char) => htmlEscapeMap[char]);
const htmlUnescapeMap = {
'&amp;': '&',
'&#38;': '&',
'&lt;': '<',
'&#60;': '<',
'&gt;': '>',
'&#62;': '>',
'&apos;': "'",
'&#39;': "'",
'&quot;': '"',
'&#34;': '"',
};
const htmlUnescapeRegexp = /&(amp|#38|lt|#60|gt|#62|apos|#39|quot|#34);/g;
export const htmlUnescape = (str: string): string =>
str.replace(htmlUnescapeRegexp, (char) => htmlUnescapeMap[char]);
const TEMPLATE_TAG_RE =
/(\{#(if|each|await|key).*\})|(\{:(else|then|catch).*\})|(\{\/(if|each|key|await)\})|(\{@(html|debug).*\})/gim;
export function commentOutTemplateTags(source: string) {
return source.replace(TEMPLATE_TAG_RE, (match) => {
return `<!--&%& ${match} &%&-->`;
});
}
const TEMPLATE_TAG_COMMENT_RE = /(<!--&%&\s)|(\s&%&-->)/gim;
export function uncommentTemplateTags(source: string) {
return source.replace(TEMPLATE_TAG_COMMENT_RE, '');
}

View File

@ -0,0 +1,25 @@
/**
* Determine if a link is a http link or not.
*
* - http://github.com
* - https://github.com
* - //github.com
*/
export const isLinkHttp = (link: string): boolean => /^(https?:)?\/\//.test(link);
/**
* Determine if a link is external or not.
*/
export const isLinkExternal = (link: string, base = '/'): boolean => {
// http link
if (isLinkHttp(link)) {
return true;
}
// absolute link that does not start with `base`
if (link.startsWith('/') && !link.startsWith(base)) {
return true;
}
return false;
};

View File

@ -0,0 +1,25 @@
/**
* Global constants and env variables will be statically replaced by Vite in build mode. This
* util helps avoid that by inserting escape sequences.
*
* @see https://vitejs.dev/guide/env-and-mode.html#production-replacement
*/
export function preventViteReplace(source: string, define?: Record<string, unknown>): string {
source = source
.replace(/\bimport\.meta/g, 'import.<wbr/>meta')
.replace(/\bprocess\.env/g, 'process.<wbr/>env');
// Also avoid replacing defines.
if (define) {
const regex = new RegExp(
`\\b(${Object.keys(define)
.map((key) => key.replace(/[-[\]/{}()*+?.\\^$|]/g, '\\$&'))
.join('|')})`,
'g',
);
source = source.replace(regex, (_) => `${_[0]}<wbr/>${_.slice(1)}`);
}
return source;
}

View File

@ -0,0 +1,87 @@
import type Token from 'markdown-it/lib/token';
import type { MarkdownHeader } from '../types';
import { resolveTitleFromToken } from './resolveTitleFromToken';
import { slugify } from './slugify';
/**
* Resolve headers from `markdown-it` tokens.
*/
export const resolveHeadersFromTokens = (
tokens: Token[],
{
level,
allowHtml,
escapeText,
}: {
level: number[];
allowHtml: boolean;
escapeText: boolean;
},
): MarkdownHeader[] => {
const headers: MarkdownHeader[] = [];
// A temp headers stack for generating the headers tree.
const stack: MarkdownHeader[] = [];
// Push a header to the headers tree.
const push = (header: MarkdownHeader): void => {
while (stack.length !== 0 && header.level <= stack[0].level) {
stack.shift();
}
if (stack.length === 0) {
headers.push(header);
stack.push(header);
} else {
(stack[0].children ??= []).push(header);
stack.unshift(header);
}
};
tokens.forEach((_, idx) => {
const token = tokens[idx];
// If the token type does not match, skip.
if (token?.type !== 'heading_open') {
return;
}
// Get the level from the tag, `h1 -> 1`.
const headerLevel = Number.parseInt(token.tag.slice(1), 10);
// If the level should not be extracted, skip.
if (!level.includes(headerLevel)) {
return;
}
// The next token of 'heading_open' contains the heading content.
const nextToken = tokens[idx + 1];
// If the next token does not exist, skip.
if (!nextToken) {
return;
}
const title = resolveTitleFromToken(nextToken, {
allowHtml,
escapeText,
});
/**
* The id of the heading anchor is the slugified result of `markdown-it-anchor` if the id
* does not exist, we'll slugify the title ourselves.
*/
const slug = token.attrGet('id') ?? slugify(title);
// Push the header to tree.
push({
level: headerLevel,
title,
slug,
children: [],
});
});
return headers;
};

View File

@ -0,0 +1,52 @@
import type Token from 'markdown-it/lib/token';
import { htmlEscape } from './htmlEscape';
/**
* Resolve header title from `markdown-it` token. Typically using the next token of
* `heading_open` token.
*/
export const resolveTitleFromToken = (
token: Token,
{
allowHtml,
escapeText,
}: {
allowHtml: boolean;
escapeText: boolean;
},
): string => {
// Children of the token contains the parsed result of the heading title.
const children = token.children ?? [];
// Type of tokens to be included in the heading title.
const titleTokenTypes = ['text', 'emoji', 'code_inline'];
// Include 'html_inline' or not.
if (allowHtml) {
titleTokenTypes.push('html_inline');
}
// Filter the token type to be included in the title.
const titleTokens = children.filter(
(item) =>
titleTokenTypes.includes(item.type) &&
// Filter permalink symbol that generated by `markdown-it-anchor`.
!item.meta?.isPermalinkSymbol,
);
// Get title from tokens.
return titleTokens
.reduce((result, item) => {
if (escapeText) {
// Escape the content of 'code_inline' and 'text'.
if (item.type === 'code_inline' || item.type === 'text') {
return `${result}${htmlEscape(item.content)}`;
}
}
// Keep the content of 'emoji' and 'html_inline'.
return `${result}${item.content}`;
}, '')
.trim();
};

View File

@ -0,0 +1,22 @@
// eslint-disable-next-line no-control-regex
const rControl = /[\u0000-\u001f]/g;
const rSpecial = /[\s~`!@#$%^&*()\-_+=[\]{}|\\;:"'“”‘’<>,.?/]+/g;
const rCombining = /[\u0300-\u036F]/g;
export const slugify = (str: string): string =>
str
.normalize('NFKD')
// Remove accents
.replace(rCombining, '')
// Remove control characters
.replace(rControl, '')
// Replace special characters
.replace(rSpecial, '-')
// Remove continuos separators
.replace(/-{2,}/g, '-')
// Remove prefixing and trailing separators
.replace(/^-+|-+$/g, '')
// Ensure it doesn't start with a number (#121)
.replace(/^(\d)/, '_$1')
// Lowercase
.toLowerCase();

View File

@ -0,0 +1,10 @@
{
"extends": "../../tsconfig-build.json",
"compilerOptions": {
"baseUrl": ".",
"outDir": "../../node",
"declarationDir": "../../node-types",
"lib": ["es2019", "dom"]
},
"include": ["**/*.ts"]
}

View File

@ -0,0 +1,25 @@
import { readFileSync } from 'fs';
import { resolve } from 'path';
import { isUndefined } from './unit';
let isLocal;
export function isLocalEnv() {
if (!isUndefined(isLocal)) return isLocal;
try {
const pkgPath = resolve(process.cwd(), 'package.json');
if (pkgPath.endsWith('kit-docs/package.json')) {
const pkg = readFileSync(pkgPath).toString();
if (/"name": "@svelteness\/kit-docs"/.test(pkg)) {
isLocal = true;
return true;
}
}
} catch (e) {
// no-op
}
isLocal = false;
return false;
}

View File

@ -0,0 +1,107 @@
import { normalizePath } from '@rollup/pluginutils';
import { createHash } from 'crypto';
import { createReadStream, readdirSync, statSync } from 'fs';
import LRUCache from 'lru-cache';
import { resolve } from 'path';
export function checksumFile(path: string): Promise<string> {
return new Promise((resolve, reject) => {
const hash = createHash('sha256');
const stream = createReadStream(path);
stream.on('error', (err) => reject(err));
stream.on('data', (chunk) => hash.update(chunk));
stream.on('end', () => resolve(hash.digest('hex')));
});
}
const ignoreFileRE = /^(\.|_)/;
export function readDirDeepSync(dir: string, options: { maxDepth?: number; _depth?: number } = {}) {
const depth = options._depth ?? 0;
if (depth === options.maxDepth) return [];
const files: string[] = [];
for (const file of readdirSync(dir)) {
const filePath = resolve(dir, file);
const stat = statSync(filePath);
if (stat.isDirectory()) {
files.push(
...readDirDeepSync(filePath, {
...options,
_depth: depth + 1,
}),
);
} else if (!ignoreFileRE.test(file)) {
files.push(normalizePath(filePath));
}
}
return files;
}
const fileOrderRE = /\[\.\.\.(\d*?)(_|=|\])/;
const sortFilesCache = new LRUCache<string, number>({ max: 1024 });
export function sortOrderedFiles(files: string[]) {
return files.sort((fileA, fileB) => {
const cacheKey = fileA + fileB;
const cache = (result: number) => sortFilesCache.set(cacheKey, result);
if (sortFilesCache.has(cacheKey)) {
return sortFilesCache.get(cacheKey)!;
}
const tokensA = fileA.split('/').slice(1);
const tokensB = fileB.split('/').slice(1);
const len = Math.max(tokensA.length, tokensB.length);
for (let i = 0; i < len; i++) {
if (!(i in tokensA)) {
cache(-1);
return -1;
}
if (!(i in tokensB)) {
cache(1);
return 1;
}
const tokenA = tokensA[i].toLowerCase();
const tokenB = tokensB[i].toLowerCase();
const tokenAOrderNo = tokensA[i].match(fileOrderRE)?.[1];
const tokenBOrderNo = tokensB[i].match(fileOrderRE)?.[1];
if (tokenAOrderNo && tokenBOrderNo) {
const result = parseInt(tokenAOrderNo) - parseInt(tokenBOrderNo);
if (result !== 0) {
cache(result);
return result;
}
}
if (tokenA === tokenB) {
continue;
}
const isTokenADir = tokenA[tokenA.length - 1] === '/';
const isTokenBDir = tokenB[tokenB.length - 1] === '/';
let result;
if (isTokenADir === isTokenBDir) {
result = tokenA < tokenB ? -1 : 1;
} else {
result = isTokenADir ? 1 : -1;
}
cache(result);
return result;
}
cache(0);
return 0;
});
}

View File

@ -0,0 +1,5 @@
import { basename, extname } from 'path';
export function getFileNameFromPath(filePath: string) {
return basename(filePath, extname(filePath));
}

View File

@ -0,0 +1,17 @@
import { createHash } from 'crypto';
export function uppercaseFirstLetter(str: string) {
return str.charAt(0).toUpperCase() + str.slice(1);
}
export function titleToSnakeCase(str: string) {
return str.replace(/([A-Z]|[1-9])/g, (x) => '_' + x[0].toLowerCase()).slice(1);
}
export function kebabToTitleCase(str: string) {
return uppercaseFirstLetter(str.replace(/-./g, (x) => ' ' + x[1].toUpperCase()));
}
export function hashString(str: string) {
return createHash('sha256').update(str).digest('hex');
}

View File

@ -0,0 +1,7 @@
export function isString(value: any): value is string {
return typeof value === 'string';
}
export function isUndefined(value: unknown): value is undefined {
return typeof value === 'undefined';
}

View File

@ -0,0 +1,12 @@
import { createKitDocsLoader } from '$lib/loaders';
export const prerender = true;
/** @type {import('./$types').LayoutLoad} */
export const load = createKitDocsLoader({
sidebar: {
'/': null,
'/docs': '/docs',
},
});

View File

@ -0,0 +1,38 @@
<script>
import '@svelteness/kit-docs/client/polyfills/index.js';
import '@svelteness/kit-docs/client/styles/normalize.css';
import '@svelteness/kit-docs/client/styles/fonts.css';
import '@svelteness/kit-docs/client/styles/theme.css';
import '@svelteness/kit-docs/client/styles/vars.css';
import { KitDocs, KitDocsLayout } from '@svelteness/kit-docs';
// ...
/** @type {import('@svelteness/kit-docs').NavbarConfig} */
const navbar = {
links: [{ title: 'Get started', slug: '/latest/get-started', match:
/\/latest\/get-started/} ,
{ title: 'API', slug: '/latest/api', match: /\/latest\/api/ }],
};
const sidebar = {
links: {
"": [
{ title: 'Get started', slug: "/latest/get-started"}
,
{ title: 'API', slug: "/latest/api"}
]
}
};
export let data;
$: meta = data.meta;
</script>
<KitDocs {meta}>
<KitDocsLayout {navbar} {sidebar} >
<slot />
</KitDocsLayout>
</KitDocs>

View File

@ -0,0 +1,8 @@
import { redirect } from '@sveltejs/kit';
export const prerender = true;
/** @type {import('@sveltejs/kit').PageLoad} */
export async function load() {
throw redirect(307, `/latest/get-started`);
}

Binary file not shown.

View File

@ -0,0 +1,5 @@
import { createSidebarRequestHandler } from '../../../node/handlers';
export const prerender = false;
export const GET = createSidebarRequestHandler();

View File

@ -0,0 +1,5 @@
import { createMetaRequestHandler } from '../../../node/handlers';
export const prerender = true;
export const GET = createMetaRequestHandler();

View File

@ -0,0 +1,8 @@
import { redirect } from '@sveltejs/kit';
export const prerender = true;
/** @type {import('@sveltejs/kit').PageLoad} */
export async function load() {
throw redirect(307, `/latest/get-started`);
}

View File

@ -0,0 +1,317 @@
---
title: API
---
# API
:::info
All functions are curried, Remeda-style, so if you see `f(dataIn, ...others)`, it can be called with either `f(dataIn, ...others)` or `f(...others)(dataIn)`.
:::
## Importing
`updeep-remeda` exports a default function that is an alias to `u.update` and
has all the other functions available as props.
```
import u from '@yanick/updeep-remeda';
const foo = u({a:1}, { a: x => x + 1 });
const bar = u.updateIn({ a: { b: 2 } }, 'a.b', 3 );
```
Or you can import the functions piecemeal:
```
import { updateIn, omit } from '@yanick/updeep-remeda';
```
## `u(dataIn, updates)`
## `u.update(dataIn, updates)`
Update as many values as you want, as deeply as you want. The `updates` parameter can either be an object, a function, or a value. Everything returned from `u` is frozen recursively.
If `updates` is an object, for each key/value, it will apply the updates specified in the value to `object[key]`.
If `updates` is a function, it will call the function with `object` and return the value.
If `updates` is a value, it will return that value.
Sometimes, you may want to set an entire object to a property, or a function. In that case, you'll need to use a function to return that value, otherwise it would be interpreted as an update. Ex. `function() { return { a: 0 }; }`.
Also available at `u.update(...)`.
### Simple update
Object properties:
```js
const person = {
name: {
first: "Jane",
last: "West",
},
};
const result = u(person, { name: { first: "Susan" } });
expect(result).to.eql({ name: { first: "Susan", last: "West" } });
```
Array elements:
```js
const scoreboard = {
scores: [12, 28],
};
const result = u(scoreboard, { scores: { 1: 36 } });
expect(result).to.eql({ scores: [12, 36] });
```
### Multiple updates
```js
const person = {
name: {
first: "Mike",
last: "Smith",
},
scores: [12, 28],
};
const result = u(person, { name: { last: "Jones" }, scores: { 1: 36 } });
expect(result).to.eql({
name: { first: "Mike", last: "Jones" },
scores: [12, 36],
});
```
### Use a function
```js
const increment = (i) => i + 1;
var scoreboard = {
scores: {
team1: 0,
team2: 0,
},
};
const result = u(scoreboard, { scores: { team2: increment } });
expect(result).to.eql({ scores: { team1: 0, team2: 1 } });
```
### Array Manipulation
Non-trivial array manipulations, such as element removal/insertion/sorting, can be implemented with functions. Because there are so many possible manipulations, we don't provide any helpers and leave this up to you. Simply ensure your function is pure and does not mutate its arguments.
```js
function addTodo(todos) {
return [].concat(todos, [{ done: false }]);
}
const state = {
todos: [{ done: false }, { done: false }],
};
const result = u({ todos: addTodo }, state);
expect(result).to.eql({
todos: [{ done: false }, { done: false }, { done: false }],
});
```
Remeda is one of the many libraries providing good utility functions for
such manipulations.
```js
import { reject, concat, prop } from "remeda";
let state = {
todos: [{ done: true }, { done: false }],
};
// add a new todo
state = u(state, { todos: concat({ done: false }) });
expect(state).to.eql({
todos: [{ done: true }, { done: false }, { done: false }],
});
// remove all done todos
state = u(state, { todos: reject(prop("done")) });
expect(state).to.eql({ todos: [{ done: false }, { done: false }] });
```
### Default input data
When the input data is null or undefined, updeep uses a empty plain object.
```javascript
const result = u(null, { foo: "bar" });
expect(result).to.eql({ foo: "bar" });
```
### Partial application
```js
const inc = (i) => i + 1;
const addOneYear = u({ age: increment });
const result = addOneYear({ name: "Shannon Barnes", age: 62 });
expect(result).to.eql({ name: "Shannon Barnes", age: 63 });
```
## `u.freeze(dataIn)`
Freeze your initial state to protect against mutations. Only performs the freezing in development, and returns the original object unchanged in production.
```js
const state = u.freeze({ someKey: "Some Value" });
state.someKey = "Mutate"; // ERROR in development
```
## `u.updateIn(dataIn, path, value)`
Update a single value with a simple string or array path. Can be use to update nested objects, arrays, or a combination. Can also be used to update every element of a nested array with `'*'`.
```js
const result = u.updateIn(
{ bunny: { color: "black" } },
"bunny.color",
"brown"
);
expect(result).to.eql({ bunny: { color: "brown" } });
```
```js
const result = u.updateIn(
"0.1.color",
"brown"
)([[{ color: "blue" }, { color: "red" }], []]);
expect(result).to.eql([[{ color: "blue" }, { color: "brown" }], []]);
```
```js
const incr = (i) => i + 1;
const result = u.updateIn("bunny.age", incr)({ bunny: { age: 2 } });
expect(result).to.eql({ bunny: { age: 3 } });
```
```js
const result = u(
{ pets: [{ bunny: { age: 2 } }] }
{ pets: u.updateIn([0, "bunny", "age"], 3) },
);
expect(result).to.eql({ pets: [{ bunny: { age: 3 } }] });
```
```js
const result = u.updateIn(
"todos.*.done",
true
)({
todos: [{ done: false }, { done: false }],
});
expect(result).to.eql({
todos: [{ done: true }, { done: true }],
});
```
## `u.constant(dataIn)`
Sometimes, you want to replace an object outright rather than merging it.
You'll need to use a function that returns the new object.
`u.constant` creates that function for you.
```js
const user = {
name: "Mitch",
favorites: {
band: "Nirvana",
movie: "The Matrix",
},
};
const newFavorites = {
band: "Coldplay",
};
const result = u(user, { favorites: u.constant(newFavorites) });
expect(result).to.eql({ name: "Mitch", favorites: { band: "Coldplay" } });
```
```js
const alwaysFour = u.constant(4);
expect(alwaysFour(32)).to.eql(4);
```
## `u.if(dataIn, predicate, updates)`
Apply `updates` if `predicate` is truthy, or if `predicate` is a function.
It evaluates to truthy when called with `object`.
```js
function isEven(x) {
return x % 2 === 0;
}
function increment(x) {
return x + 1;
}
const result = u({ value: 2 }, { value: u.if(isEven, increment) });
expect(result).to.eql({ value: 3 });
```
## `u.filter(arrayIn, predicate)`
## `u.reject(arrayIn, predicate)`
## `u.pickBy(objectIn, predicate)`
## `u.omitBy(objectIn, predicate)`
## `u.pick(objectIn, keys)`
## `u.omit(objectIn, keys)`
Essentially the same as their Remeda counterparts. The difference being
that if the transformation results in no change, the original object/array is
returned.
## `u.matches(dataIn, condition)`
Do a deep comparison with `condition`, and returns
`true` if the `dataIn` object matches.
Scalar values are verified for equality (i.e., `{foo: 12}`
will verify that the object has the prop `foo` set to `12`), and
functions are going to be invoked with the object value of the object and
expected to return `true` upon matching.
```js
u.matches(
{ name: "Bob", age: 32, address: "..." },
{
name: "Bob",
age: (age) => age > 30,
}
); // true
```

View File

@ -0,0 +1,111 @@
---
title: Get Started
---
# updeep-remeda
> Easily update nested frozen objects and arrays in a declarative and immutable
> manner.
## About
:::info
This is a fork of the main updeep package. For ease of reading &mdash; not to
mention ease of shamelessly lifting large pieces of the original
documentation &mdash; in this documentation all mentions of `updeep` refers to this
fork.
:::
updeep makes updating deeply nested objects/arrays painless by allowing you to
declare the updates you would like to make and it will take care of the rest. It
will recursively return the same instance if no changes have been made, making
it ideal for using reference equality checks to detect changes.
Because of this, everything returned by updeep is frozen. Not only that, but
updeep assumes that every object passed in to update is immutable, so it may
freeze objects passed in as well. Note that the freezing only happens in
development.
This fork of updeep requires Remeda, but works very well with any other utility function ([lodash], [Ramda], etc).
## Differences with the original Updeep
* Under the hood, the use of lodash has
been replaced by Remeda (for better type support and tree-shaking abilities).
* The codebase has been ported to TypeScript (mostly for the lulz).
* The order of parameters in the non-curryied invocation of functions has been modified. In the original updeep the input object is the last parameter, whereas here it's the first.
```js
// original updeep
const dataIn = { a: 1, b: 2 };
let dataOut = u({ c: 3 }, dataIn); // simple call
dataOut = u({ c: 3 })(dataIn); // curried
// updeep-remeda
dataOut = u(dataIn, { c: 3 }); // simple call
dataOut = u({ c: 3 })(dataIn); // curried
```
* `withDefault` has been removed as the behavior can be implemented using
Remeda's `pipe`, or a simple `??`.
* `u.omitted` has been renamed `u.skip`.
## Installation
```bash
$ npm install @yanick/updeep-remeda
# or
$ pnpm install @yanick/updeep-remeda
```
## Full example
```js
import u from "@yanick/updeep-remeda";
const person = {
name: { first: "Bill", last: "Sagat" },
children: [
{ name: "Mary-Kate", age: 7 },
{ name: "Ashley", age: 7 },
],
todo: ["Be funny", "Manage household"],
email: "bill@example.com",
version: 1,
};
const inc = (i) => i + 1;
const eq = (x) => (y) => x === y;
const newPerson = u(person, {
// Change first name
name: { first: "Bob" },
// Increment all children's ages
children: u.map({ age: inc }),
// Update email
email: "bob@example.com",
// Remove todo
todo: u.reject(eq("Be funny")),
// Increment version
version: inc,
});
// => {
// name: { first: 'Bob', last: 'Sagat' },
// children: [
// { name: 'Mary-Kate', age: 8 },
// { name: 'Ashley', age: 8 }
// ],
// todo: [
// 'Manage household'
// ],
// email: 'bob@example.com',
// version: 2
//}
```

BIN
website/static/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

11
website/svelte.config.js Normal file
View File

@ -0,0 +1,11 @@
import adapter from '@sveltejs/adapter-static';
/** @type {import('@sveltejs/kit').Config} */
const config = {
extensions: ['.svelte', '.md'],
kit: {
adapter: adapter({ strict: false })
}
};
export default config;

View File

@ -0,0 +1,14 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"declaration": true,
"declarationMap": true,
"emitDeclarationOnly": true,
"preserveWatchOutput": true,
"noEmit": false,
"importHelpers": true,
"incremental": false,
"sourceMap": true,
"useDefineForClassFields": false
}
}

18
website/tsconfig.json Normal file
View File

@ -0,0 +1,18 @@
{
"extends": "./.svelte-kit/tsconfig.json",
"compilerOptions": {
"allowJs": true,
"checkJs": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": true,
"strict": true,
"paths": {
"$lib": ["./src/lib"],
"$lib/*": ["./src/lib/*"],
"$img/*": ["./src/img/*"]
}
}
}

30
website/vite.config.js Normal file
View File

@ -0,0 +1,30 @@
import { sveltekit } from '@sveltejs/kit/vite';
import kitDocs from '@svelteness/kit-docs/node';
import { resolve } from 'path';
import icons from 'unplugin-icons/vite';
const config = {
resolve: {
alias: {
$fonts: resolve(process.cwd(), 'src/lib/fonts'),
},
},
server: {
fs: {
strict: false,
},
},
plugins: [
icons({ compiler: 'svelte' }),
kitDocs({
markdown: {
shiki: {
theme: 'material-ocean',
},
},
}),
sveltekit()]
};
export default config;