Adding hotfixes for packages
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
Fabian Stamm
2020-10-14 02:56:11 +02:00
parent 46d8f8b289
commit 1b2d85eeef
95 changed files with 12467 additions and 2 deletions

14
markdown/.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,14 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Deno",
"type": "node",
"request": "launch",
"cwd": "${workspaceFolder}",
"runtimeExecutable": "deno",
"runtimeArgs": ["run", "--inspect", "-A", "app.ts"],
"port": 9229
}
]
}

9
markdown/.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,9 @@
{
"deno.enable": true,
"[typescript]": {
"editor.defaultFormatter": "axetroy.vscode-deno"
},
"[typescriptreact]": {
"editor.defaultFormatter": "axetroy.vscode-deno"
}
}

21
markdown/LICENSE Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Eivind Furuberg
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

62
markdown/README.md Normal file
View File

@ -0,0 +1,62 @@
# markdown
Deno Markdown module forked from https://github.com/ts-stack/markdown/tree/bb47aa8e625e89e6aa84f49a98536a3089dee831
### Example usage
Simple md2html.ts script:
```typescript
import { Marked } from "./mod.ts";
const decoder = new TextDecoder("utf-8");
const filename = Deno.args[0];
const markdown = decoder.decode(await Deno.readFile(filename));
const markup = Marked.parse(markdown);
console.log(markup.content);
console.log(JSON.stringify(markup.meta))
```
Now running:
```bash
deno run --allow-read md2html.ts example.md > example.html
```
Will output:
```html
<h1 id="hello-world">Hello World</h1>
<h2 id="this-an-example-for-md2html-ts-">
This an example for <code>md2html.ts</code>
</h2>
<p>A small paragraph that will become a <code>&lt;p&gt;</code> tag</p>
<hr />
<p>Code Block (md2html.ts)</p>
<pre><code class="lang-typescript">import { Marked } from &quot;./mod.ts&quot;;
const decoder = new TextDecoder("utf-8");
const filename = Deno.args[0];
const markdown = decoder.decode(await Deno.readFile(filename));
const markup = Marked.parse(markdown);
console.log(markup.content);
console.log(JSON.stringify(markup.meta))
</code></pre>
<p>
This module is forked from
<a
href="https://github.com/ts-stack/markdown/tree/bb47aa8e625e89e6aa84f49a98536a3089dee831"
>ts-stack/markdown</a
>
</p>
<p>Made for Deno <img src="https://deno.land/logo.svg" alt="deno-logo" /></p>
{"title":"Hello world!","subtitle":"Front-matter is supported!","boolean":true,"list-example":["this","is",{"a":"list"}]}
```
---
### Notes
I had to do some changes to the source code to make the compiler happy, mostly fixes for things that were uninitialized and possibly null or undefined

BIN
markdown/example.html Normal file

Binary file not shown.

34
markdown/example.md Normal file
View File

@ -0,0 +1,34 @@
---
title : Hello world!
subtitle : Front-matter is supported!
boolean: true
list-example:
- this
- is
- a: list
---
# Hello World
## This an example for `md2html.ts`
A small paragraph that will become a `<p>` tag
---
Code Block (md2html.ts)
```typescript
import { Marked } from "./mod.ts";
const decoder = new TextDecoder("utf-8");
const filename = Deno.args[0];
const markdown = decoder.decode(await Deno.readFile(filename));
const markup = Marked.parse(markdown);
console.log(markup.content);
console.log(JSON.stringify(markup.meta));
```
This module is forked from [ts-stack/markdown](https://github.com/ts-stack/markdown/tree/bb47aa8e625e89e6aa84f49a98536a3089dee831)
Made for Deno
![deno-logo](https://deno.land/logo.svg)

8
markdown/md2html.ts Normal file
View File

@ -0,0 +1,8 @@
import { Marked } from "./mod.ts";
const decoder = new TextDecoder("utf-8");
const filename = Deno.args[0];
const markdown = decoder.decode(await Deno.readFile(filename));
const markup = Marked.parse(markdown);
console.log(markup.content);
console.log(JSON.stringify(markup.meta))

12
markdown/meta.json Normal file
View File

@ -0,0 +1,12 @@
{
"name": "markdown",
"version": "0.0.1",
"description": "",
"author": "Fabian Stamm <dev@fabianstamm.de>",
"contributors": [],
"files": [
"**/*.ts",
"**/*.js",
"README.md"
]
}

8
markdown/mod.ts Normal file
View File

@ -0,0 +1,8 @@
export * from "./src/block-lexer.ts";
export * from "./src/helpers.ts";
export * from "./src/inline-lexer.ts";
export * from "./src/interfaces.ts";
export * from "./src/marked.ts";
export * from "./src/parser.ts";
export * from "./src/renderer.ts";
export * from "./src/extend-regexp.ts";

520
markdown/src/block-lexer.ts Normal file
View File

@ -0,0 +1,520 @@
/**
* @license
*
* Copyright (c) 2011-2014, Christopher Jeffrey. (MIT Licensed)
* https://github.com/chjj/marked
*
* Copyright (c) 2018, Костя Третяк. (MIT Licensed)
* https://github.com/ts-stack/markdown
*/
import { ExtendRegexp } from "./extend-regexp.ts";
import {
Align,
LexerReturns,
Links,
MarkedOptions,
RulesBlockBase,
RulesBlockGfm,
RulesBlockTables,
Token,
TokenType,
Obj
} from "./interfaces.ts";
import { Marked } from "./marked.ts";
import { load } from "https://deno.land/std/encoding/_yaml/loader/loader.ts";
export class BlockLexer<T extends typeof BlockLexer> {
static simpleRules: RegExp[] = [];
protected static rulesBase: RulesBlockBase;
/**
* GFM Block Grammar.
*/
protected static rulesGfm: RulesBlockGfm;
/**
* GFM + Tables Block Grammar.
*/
protected static rulesTables: RulesBlockTables;
protected rules!: RulesBlockBase | RulesBlockGfm | RulesBlockTables;
protected options: MarkedOptions;
protected links: Links = {};
protected tokens: Token[] = [];
protected frontmatter: Obj = {};
protected hasRulesGfm!: boolean;
protected hasRulesTables!: boolean;
constructor(protected staticThis: typeof BlockLexer, options?: object) {
this.options = options || Marked.options;
this.setRules();
}
/**
* Accepts Markdown text and returns object with tokens and links.
*
* @param src String of markdown source to be compiled.
* @param options Hash of options.
*/
static lex(
src: string,
options?: MarkedOptions,
top?: boolean,
isBlockQuote?: boolean,
): LexerReturns {
const lexer = new this(this, options);
return lexer.getTokens(src, top, isBlockQuote);
}
protected static getRulesBase(): RulesBlockBase {
if (this.rulesBase) {
return this.rulesBase;
}
const base: RulesBlockBase = {
newline: /^\n+/,
code: /^( {4}[^\n]+\n*)+/,
hr: /^( *[-*_]){3,} *(?:\n+|$)/,
heading: /^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)/,
lheading: /^([^\n]+)\n *(=|-){2,} *(?:\n+|$)/,
blockquote: /^( *>[^\n]+(\n[^\n]+)*\n*)+/,
list: /^( *)(bull) [\s\S]+?(?:hr|def|\n{2,}(?! )(?!\1bull )\n*|\s*$)/,
html:
/^ *(?:comment *(?:\n|\s*$)|closed *(?:\n{2,}|\s*$)|closing *(?:\n{2,}|\s*$))/,
def: /^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +["(]([^\n]+)[")])? *(?:\n+|$)/,
paragraph:
/^((?:[^\n]+\n?(?!hr|heading|lheading|blockquote|tag|def))+)\n*/,
text: /^[^\n]+/,
bullet: /(?:[*+-]|\d+\.)/,
item: /^( *)(bull) [^\n]*(?:\n(?!\1bull )[^\n]*)*/,
};
base.item = new ExtendRegexp(base.item, "gm").setGroup(/bull/g, base.bullet)
.getRegexp();
base.list = new ExtendRegexp(base.list)
.setGroup(/bull/g, base.bullet)
.setGroup("hr", "\\n+(?=\\1?(?:[-*_] *){3,}(?:\\n+|$))")
.setGroup("def", "\\n+(?=" + base.def.source + ")")
.getRegexp();
const tag = "(?!(?:" +
"a|em|strong|small|s|cite|q|dfn|abbr|data|time|code" +
"|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo" +
"|span|br|wbr|ins|del|img)\\b)\\w+(?!:/|[^\\w\\s@]*@)\\b";
base.html = new ExtendRegexp(base.html)
.setGroup("comment", /<!--[\s\S]*?-->/)
.setGroup("closed", /<(tag)[\s\S]+?<\/\1>/)
.setGroup("closing", /<tag(?:"[^"]*"|'[^']*'|[^'">])*?>/)
.setGroup(/tag/g, tag)
.getRegexp();
base.paragraph = new ExtendRegexp(base.paragraph)
.setGroup("hr", base.hr)
.setGroup("heading", base.heading)
.setGroup("lheading", base.lheading)
.setGroup("blockquote", base.blockquote)
.setGroup("tag", "<" + tag)
.setGroup("def", base.def)
.getRegexp();
return (this.rulesBase = base);
}
protected static getRulesGfm(): RulesBlockGfm {
if (this.rulesGfm) {
return this.rulesGfm;
}
const base = this.getRulesBase();
const gfm: RulesBlockGfm = {
...base,
...{
fences: /^ *(`{3,}|~{3,})[ \.]*(\S+)? *\n([\s\S]*?)\s*\1 *(?:\n+|$)/,
paragraph: /^/,
heading: /^ *(#{1,6}) +([^\n]+?) *#* *(?:\n+|$)/,
},
};
const group1 = gfm.fences.source.replace("\\1", "\\2");
const group2 = base.list.source.replace("\\1", "\\3");
gfm.paragraph = new ExtendRegexp(base.paragraph).setGroup(
"(?!",
`(?!${group1}|${group2}|`,
).getRegexp();
return (this.rulesGfm = gfm);
}
protected static getRulesTable(): RulesBlockTables {
if (this.rulesTables) {
return this.rulesTables;
}
return (this.rulesTables = {
...this.getRulesGfm(),
...{
nptable:
/^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*/,
table: /^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*/,
},
});
}
protected setRules() {
if (this.options.gfm) {
if (this.options.tables) {
this.rules = this.staticThis.getRulesTable();
} else {
this.rules = this.staticThis.getRulesGfm();
}
} else {
this.rules = this.staticThis.getRulesBase();
}
this.hasRulesGfm = (this.rules as RulesBlockGfm).fences !== undefined;
this.hasRulesTables = (this.rules as RulesBlockTables).table !== undefined;
}
/**
* Lexing.
*/
protected getTokens(
src: string,
top?: boolean,
isBlockQuote?: boolean,
): LexerReturns {
let nextPart = src;
let execArr, fmArr: RegExpExecArray | null;
mainLoop:
while (nextPart) {
// newline
if ((execArr = this.rules.newline.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
if (execArr[0].length > 1) {
this.tokens.push({ type: TokenType.space });
}
}
// code
if ((execArr = this.rules.code.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
const code = execArr[0].replace(/^ {4}/gm, "");
this.tokens.push({
type: TokenType.code,
text: !this.options.pedantic ? code.replace(/\n+$/, "") : code,
});
continue;
}
// fences code (gfm)
if (
this.hasRulesGfm &&
(execArr = (this.rules as RulesBlockGfm).fences.exec(nextPart))
) {
nextPart = nextPart.substring(execArr[0].length);
this.tokens.push({
type: TokenType.code,
lang: execArr[2],
text: execArr[3] || "",
});
continue;
}
// heading
if ((execArr = this.rules.heading.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
this.tokens.push({
type: TokenType.heading,
depth: execArr[1].length,
text: execArr[2],
});
continue;
}
// table no leading pipe (gfm)
if (
top && this.hasRulesTables &&
(execArr = (this.rules as RulesBlockTables).nptable.exec(nextPart))
) {
nextPart = nextPart.substring(execArr[0].length);
const item: Token = {
type: TokenType.table,
header: execArr[1].replace(/^ *| *\| *$/g, "").split(/ *\| */),
align: execArr[2].replace(/^ *|\| *$/g, "").split(
/ *\| */,
) as Align[],
cells: [],
};
if (!item.align) throw ReferenceError;
for (let i = 0; i < item.align.length; i++) {
if (/^ *-+: *$/.test(item.align[i])) {
item.align[i] = "right";
} else if (/^ *:-+: *$/.test(item.align[i])) {
item.align[i] = "center";
} else if (/^ *:-+ *$/.test(item.align[i])) {
item.align[i] = "left";
} else {
item.align[i] = "";
}
}
const td: string[] = execArr[3].replace(/\n$/, "").split("\n");
if (!item.cells) throw ReferenceError;
for (let i = 0; i < td.length; i++) {
item.cells[i] = td[i].split(/ *\| */);
}
this.tokens.push(item);
continue;
}
// lheading
if ((execArr = this.rules.lheading.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
this.tokens.push({
type: TokenType.heading,
depth: execArr[2] === "=" ? 1 : 2,
text: execArr[1],
});
continue;
}
// hr
if ((execArr = this.rules.hr.exec(nextPart))) {
// Checks if the previous string contains a content.
if ((this.tokens.length == 0) || (this.tokens.every(object => object.type == TokenType.space))) {
// Grabs front-matter data and parse it into Javascript object.
if (fmArr = /^(?:\-\-\-)(.*?)(?:\-\-\-|\.\.\.)/s.exec(nextPart)) {
nextPart = nextPart.substring(fmArr[0].length);
this.frontmatter = <Obj> load(fmArr[1]);
}
continue;
} else {
nextPart = nextPart.substring(execArr[0].length);
this.tokens.push({ type: TokenType.hr });
continue;
}
}
// blockquote
if ((execArr = this.rules.blockquote.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
this.tokens.push({ type: TokenType.blockquoteStart });
const str = execArr[0].replace(/^ *> ?/gm, "");
// Pass `top` to keep the current
// "toplevel" state. This is exactly
// how markdown.pl works.
this.getTokens(str);
this.tokens.push({ type: TokenType.blockquoteEnd });
continue;
}
// list
if ((execArr = this.rules.list.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
const bull: string = execArr[2];
this.tokens.push(
{ type: TokenType.listStart, ordered: bull.length > 1 },
);
// Get each top-level item.
const str = execArr[0].match(this.rules.item) || "";
const length = str.length;
let next = false;
let space: number;
let blockBullet: string;
let loose: boolean;
for (let i = 0; i < length; i++) {
let item = str[i];
// Remove the list item's bullet so it is seen as the next token.
space = item.length;
item = item.replace(/^ *([*+-]|\d+\.) +/, "");
// Outdent whatever the list item contains. Hacky.
if (item.indexOf("\n ") !== -1) {
space -= item.length;
item = !this.options.pedantic
? item.replace(new RegExp("^ {1," + space + "}", "gm"), "")
: item.replace(/^ {1,4}/gm, "");
}
// Determine whether the next list item belongs here.
// Backpedal if it does not belong in this list.
if (this.options.smartLists && i !== length - 1) {
const bb = this.staticThis.getRulesBase().bullet.exec(str[i + 1]);
blockBullet = bb ? bb[0] : "";
if (
bull !== blockBullet &&
!(bull.length > 1 && blockBullet.length > 1)
) {
nextPart = (str.slice(i + 1) as string[]).join("\n") + nextPart;
i = length - 1;
}
}
// Determine whether item is loose or not.
// Use: /(^|\n)(?! )[^\n]+\n\n(?!\s*$)/
// for discount behavior.
loose = next || /\n\n(?!\s*$)/.test(item);
if (i !== length - 1) {
next = item.charAt(item.length - 1) === "\n";
if (!loose) {
loose = next;
}
}
this.tokens.push(
{
type: loose ? TokenType.looseItemStart : TokenType.listItemStart,
},
);
// Recurse.
this.getTokens(item, false, isBlockQuote);
this.tokens.push({ type: TokenType.listItemEnd });
}
this.tokens.push({ type: TokenType.listEnd });
continue;
}
// html
if ((execArr = this.rules.html.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
const attr = execArr[1];
const isPre = attr === "pre" || attr === "script" || attr === "style";
this.tokens.push({
type: this.options.sanitize ? TokenType.paragraph : TokenType.html,
pre: !this.options.sanitizer && isPre,
text: execArr[0],
});
continue;
}
// def
if (top && (execArr = this.rules.def.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
this.links[execArr[1].toLowerCase()] = {
href: execArr[2],
title: execArr[3],
};
continue;
}
// table (gfm)
if (
top && this.hasRulesTables &&
(execArr = (this.rules as RulesBlockTables).table.exec(nextPart))
) {
nextPart = nextPart.substring(execArr[0].length);
const item: Token = {
type: TokenType.table,
header: execArr[1].replace(/^ *| *\| *$/g, "").split(/ *\| */),
align: execArr[2].replace(/^ *|\| *$/g, "").split(
/ *\| */,
) as Align[],
cells: [],
};
if (!item.align) throw ReferenceError;
for (let i = 0; i < item.align.length; i++) {
if (/^ *-+: *$/.test(item.align[i])) {
item.align[i] = "right";
} else if (/^ *:-+: *$/.test(item.align[i])) {
item.align[i] = "center";
} else if (/^ *:-+ *$/.test(item.align[i])) {
item.align[i] = "left";
} else {
item.align[i] = "";
}
}
const td = execArr[3].replace(/(?: *\| *)?\n$/, "").split("\n");
if (!item.cells) throw ReferenceError;
for (let i = 0; i < td.length; i++) {
item.cells[i] = td[i].replace(/^ *\| *| *\| *$/g, "").split(/ *\| */);
}
this.tokens.push(item);
continue;
}
// simple rules
if (this.staticThis.simpleRules.length) {
const simpleRules = this.staticThis.simpleRules;
for (let i = 0; i < simpleRules.length; i++) {
if ((execArr = simpleRules[i].exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
const type = "simpleRule" + (i + 1);
this.tokens.push({ type, execArr });
continue mainLoop;
}
}
}
// top-level paragraph
if (top && (execArr = this.rules.paragraph.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
if (execArr[1].slice(-1) === "\n") {
this.tokens.push({
type: TokenType.paragraph,
text: execArr[1].slice(0, -1),
});
} else {
this.tokens.push({
type: this.tokens.length > 0 ? TokenType.paragraph : TokenType.text,
text: execArr[1],
});
}
continue;
}
// text
// Top-level should never reach here.
if ((execArr = this.rules.text.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
this.tokens.push({ type: TokenType.text, text: execArr[0] });
continue;
}
if (nextPart) {
throw new Error(
"Infinite loop on byte: " + nextPart.charCodeAt(0) +
`, near text '${nextPart.slice(0, 30)}...'`,
);
}
}
return { tokens: this.tokens, links: this.links, meta: this.frontmatter };
}
}

View File

@ -0,0 +1,43 @@
/*
* @license
*
* Copyright (c) 2011-2014, Christopher Jeffrey. (MIT Licensed)
* https://github.com/chjj/marked
*
* Copyright (c) 2018, Костя Третяк. (MIT Licensed)
* https://github.com/ts-stack/markdown
*/
export class ExtendRegexp {
private source: string;
private flags: string;
constructor(regex: RegExp, flags: string = "") {
this.source = regex.source;
this.flags = flags;
}
/**
* Extend regular expression.
*
* @param groupName Regular expression for search a group name.
* @param groupRegexp Regular expression of named group.
*/
setGroup(groupName: RegExp | string, groupRegexp: RegExp | string): this {
let newRegexp: string = typeof groupRegexp == "string"
? groupRegexp
: groupRegexp.source;
newRegexp = newRegexp.replace(/(^|[^\[])\^/g, "$1");
// Extend regexp.
this.source = this.source.replace(groupName, newRegexp);
return this;
}
/**
* Returns a result of extending a regular expression.
*/
getRegexp(): RegExp {
return new RegExp(this.source, this.flags);
}
}

64
markdown/src/helpers.ts Normal file
View File

@ -0,0 +1,64 @@
/**
* @license
*
* Copyright (c) 2011-2014, Christopher Jeffrey. (MIT Licensed)
* https://github.com/chjj/marked
*
* Copyright (c) 2018, Костя Третяк. (MIT Licensed)
* https://github.com/ts-stack/markdown
*/
import type { Replacements } from "./interfaces.ts";
const escapeTest = /[&<>"']/;
const escapeReplace = /[&<>"']/g;
const replacements: Replacements = {
"&": "&amp;",
"<": "&lt;",
">": "&gt;",
'"': "&quot;",
// tslint:disable-next-line:quotemark
"'": "&#39;",
};
const escapeTestNoEncode = /[<>"']|&(?!#?\w+;)/;
const escapeReplaceNoEncode = /[<>"']|&(?!#?\w+;)/g;
export function escape(html: string, encode?: boolean) {
if (encode) {
if (escapeTest.test(html)) {
return html.replace(escapeReplace, (ch: string) => replacements[ch]);
}
} else {
if (escapeTestNoEncode.test(html)) {
return html.replace(
escapeReplaceNoEncode,
(ch: string) => replacements[ch]
);
}
}
return html;
}
export function unescape(html: string) {
// Explicitly match decimal, hex, and named HTML entities
return html.replace(/&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/gi, function (
_,
n
) {
n = n.toLowerCase();
if (n === "colon") {
return ":";
}
if (n.charAt(0) === "#") {
return n.charAt(1) === "x"
? String.fromCharCode(parseInt(n.substring(2), 16))
: String.fromCharCode(+n.substring(1));
}
return "";
});
}

View File

@ -0,0 +1,419 @@
/**
* @license
*
* Copyright (c) 2011-2014, Christopher Jeffrey. (MIT Licensed)
* https://github.com/chjj/marked
*
* Copyright (c) 2018, Костя Третяк. (MIT Licensed)
* https://github.com/ts-stack/markdown
*/
import { ExtendRegexp } from "./extend-regexp.ts";
import type {
Link,
Links,
MarkedOptions,
RulesInlineBase,
RulesInlineBreaks,
RulesInlineCallback,
RulesInlineGfm,
RulesInlinePedantic,
} from "./interfaces.ts";
import { Marked } from "./marked.ts";
import { Renderer } from "./renderer.ts";
/**
* Inline Lexer & Compiler.
*/
export class InlineLexer {
protected static rulesBase: RulesInlineBase;
/**
* Pedantic Inline Grammar.
*/
protected static rulesPedantic: RulesInlinePedantic;
/**
* GFM Inline Grammar
*/
protected static rulesGfm: RulesInlineGfm;
/**
* GFM + Line Breaks Inline Grammar.
*/
protected static rulesBreaks: RulesInlineBreaks;
protected rules!:
| RulesInlineBase
| RulesInlinePedantic
| RulesInlineGfm
| RulesInlineBreaks;
protected renderer: Renderer;
protected inLink!: boolean;
protected hasRulesGfm!: boolean;
protected ruleCallbacks!: RulesInlineCallback[];
constructor(
protected staticThis: typeof InlineLexer,
protected links: Links,
protected options: MarkedOptions = Marked.options,
renderer?: Renderer
) {
this.renderer =
renderer || this.options.renderer || new Renderer(this.options);
if (!this.links) {
throw new Error(`InlineLexer requires 'links' parameter.`);
}
this.setRules();
}
/**
* Static Lexing/Compiling Method.
*/
static output(src: string, links: Links, options: MarkedOptions): string {
const inlineLexer = new this(this, links, options);
return inlineLexer.output(src);
}
protected static getRulesBase(): RulesInlineBase {
if (this.rulesBase) {
return this.rulesBase;
}
/**
* Inline-Level Grammar.
*/
const base: RulesInlineBase = {
escape: /^\\([\\`*{}\[\]()#+\-.!_>])/,
autolink: /^<([^ <>]+(@|:\/)[^ <>]+)>/,
tag: /^<!--[\s\S]*?-->|^<\/?\w+(?:"[^"]*"|'[^']*'|[^<'">])*?>/,
link: /^!?\[(inside)\]\(href\)/,
reflink: /^!?\[(inside)\]\s*\[([^\]]*)\]/,
nolink: /^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]/,
strong: /^__([\s\S]+?)__(?!_)|^\*\*([\s\S]+?)\*\*(?!\*)/,
em: /^\b_((?:[^_]|__)+?)_\b|^\*((?:\*\*|[\s\S])+?)\*(?!\*)/,
code: /^(`+)([\s\S]*?[^`])\1(?!`)/,
br: /^ {2,}\n(?!\s*$)/,
text: /^[\s\S]+?(?=[\\<!\[_*`]| {2,}\n|$)/,
_inside: /(?:\[[^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*/,
_href: /\s*<?([\s\S]*?)>?(?:\s+['"]([\s\S]*?)['"])?\s*/,
};
base.link = new ExtendRegexp(base.link)
.setGroup("inside", base._inside)
.setGroup("href", base._href)
.getRegexp();
base.reflink = new ExtendRegexp(base.reflink)
.setGroup("inside", base._inside)
.getRegexp();
return (this.rulesBase = base);
}
protected static getRulesPedantic(): RulesInlinePedantic {
if (this.rulesPedantic) {
return this.rulesPedantic;
}
return (this.rulesPedantic = {
...this.getRulesBase(),
...{
strong: /^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,
em: /^_(?=\S)([\s\S]*?\S)_(?!_)|^\*(?=\S)([\s\S]*?\S)\*(?!\*)/,
},
});
}
protected static getRulesGfm(): RulesInlineGfm {
if (this.rulesGfm) {
return this.rulesGfm;
}
const base = this.getRulesBase();
const escape = new ExtendRegexp(base.escape)
.setGroup("])", "~|])")
.getRegexp();
const text = new ExtendRegexp(base.text)
.setGroup("]|", "~]|")
.setGroup("|", "|https?://|")
.getRegexp();
return (this.rulesGfm = {
...base,
...{
escape,
url: /^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])/,
del: /^~~(?=\S)([\s\S]*?\S)~~/,
text,
},
});
}
protected static getRulesBreaks(): RulesInlineBreaks {
if (this.rulesBreaks) {
return this.rulesBreaks;
}
const inline = this.getRulesGfm();
const gfm = this.getRulesGfm();
return (this.rulesBreaks = {
...gfm,
...{
br: new ExtendRegexp(inline.br).setGroup("{2,}", "*").getRegexp(),
text: new ExtendRegexp(gfm.text).setGroup("{2,}", "*").getRegexp(),
},
});
}
protected setRules() {
if (this.options.gfm) {
if (this.options.breaks) {
this.rules = this.staticThis.getRulesBreaks();
} else {
this.rules = this.staticThis.getRulesGfm();
}
} else if (this.options.pedantic) {
this.rules = this.staticThis.getRulesPedantic();
} else {
this.rules = this.staticThis.getRulesBase();
}
this.hasRulesGfm = (this.rules as RulesInlineGfm).url !== undefined;
}
/**
* Lexing/Compiling.
*/
output(nextPart: string): string {
nextPart = nextPart;
let execArr: RegExpExecArray | null;
let out = "";
while (nextPart) {
// escape
if ((execArr = this.rules.escape.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
out += execArr[1];
continue;
}
// autolink
if ((execArr = this.rules.autolink.exec(nextPart))) {
let text: string;
let href: string;
nextPart = nextPart.substring(execArr[0].length);
if (!this.options.escape) throw ReferenceError;
if (execArr[2] === "@") {
text = this.options.escape(
execArr[1].charAt(6) === ":"
? this.mangle(execArr[1].substring(7))
: this.mangle(execArr[1])
);
href = this.mangle("mailto:") + text;
} else {
text = this.options.escape(execArr[1]);
href = text;
}
out += this.renderer.link(href, "", text);
continue;
}
// url (gfm)
if (
!this.inLink &&
this.hasRulesGfm &&
(execArr = (this.rules as RulesInlineGfm).url.exec(nextPart))
) {
if (!this.options.escape) throw ReferenceError;
let text: string;
let href: string;
nextPart = nextPart.substring(execArr[0].length);
text = this.options.escape(execArr[1]);
href = text;
out += this.renderer.link(href, "", text);
continue;
}
// tag
if ((execArr = this.rules.tag.exec(nextPart))) {
if (!this.inLink && /^<a /i.test(execArr[0])) {
this.inLink = true;
} else if (this.inLink && /^<\/a>/i.test(execArr[0])) {
this.inLink = false;
}
nextPart = nextPart.substring(execArr[0].length);
if (!this.options.escape) throw ReferenceError;
out += this.options.sanitize
? this.options.sanitizer
? this.options.sanitizer(execArr[0])
: this.options.escape(execArr[0])
: execArr[0];
continue;
}
// link
if ((execArr = this.rules.link.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
this.inLink = true;
out += this.outputLink(execArr, {
href: execArr[2],
title: execArr[3],
});
this.inLink = false;
continue;
}
// reflink, nolink
if (
(execArr = this.rules.reflink.exec(nextPart)) ||
(execArr = this.rules.nolink.exec(nextPart))
) {
nextPart = nextPart.substring(execArr[0].length);
const keyLink = (execArr[2] || execArr[1]).replace(/\s+/g, " ");
const link = this.links[keyLink.toLowerCase()];
if (!link || !link.href) {
out += execArr[0].charAt(0);
nextPart = execArr[0].substring(1) + nextPart;
continue;
}
this.inLink = true;
out += this.outputLink(execArr, link);
this.inLink = false;
continue;
}
// strong
if ((execArr = this.rules.strong.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
out += this.renderer.strong(this.output(execArr[2] || execArr[1]));
continue;
}
// em
if ((execArr = this.rules.em.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
out += this.renderer.em(this.output(execArr[2] || execArr[1]));
continue;
}
// code
if ((execArr = this.rules.code.exec(nextPart))) {
if (!this.options.escape) throw ReferenceError;
nextPart = nextPart.substring(execArr[0].length);
out += this.renderer.codespan(
this.options.escape(execArr[2].trim(), true)
);
continue;
}
// br
if ((execArr = this.rules.br.exec(nextPart))) {
nextPart = nextPart.substring(execArr[0].length);
out += this.renderer.br();
continue;
}
// del (gfm)
if (
this.hasRulesGfm &&
(execArr = (this.rules as RulesInlineGfm).del.exec(nextPart))
) {
nextPart = nextPart.substring(execArr[0].length);
out += this.renderer.del(this.output(execArr[1]));
continue;
}
// text
if ((execArr = this.rules.text.exec(nextPart))) {
if (!this.options.escape) throw ReferenceError;
nextPart = nextPart.substring(execArr[0].length);
out += this.renderer.text(
this.options.escape(this.smartypants(execArr[0]))
);
continue;
}
if (nextPart) {
throw new Error("Infinite loop on byte: " + nextPart.charCodeAt(0));
}
}
return out;
}
/**
* Compile Link.
*/
protected outputLink(execArr: RegExpExecArray, link: Link) {
if (!this.options.escape) throw ReferenceError;
const href = this.options.escape(link.href);
const title = link.title ? this.options.escape(link.title) : null;
return execArr[0].charAt(0) !== "!"
? this.renderer.link(href, title || "", this.output(execArr[1]))
: this.renderer.image(href, title || "", this.options.escape(execArr[1]));
}
/**
* Smartypants Transformations.
*/
protected smartypants(text: string) {
if (!this.options.smartypants) {
return text;
}
return (
text
// em-dashes
.replace(/---/g, "\u2014")
// en-dashes
.replace(/--/g, "\u2013")
// opening singles
.replace(/(^|[-\u2014/(\[{"\s])'/g, "$1\u2018")
// closing singles & apostrophes
.replace(/'/g, "\u2019")
// opening doubles
.replace(/(^|[-\u2014/(\[{\u2018\s])"/g, "$1\u201c")
// closing doubles
.replace(/"/g, "\u201d")
// ellipses
.replace(/\.{3}/g, "\u2026")
);
}
/**
* Mangle Links.
*/
protected mangle(text: string) {
if (!this.options.mangle) {
return text;
}
let out = "";
const length = text.length;
for (let i = 0; i < length; i++) {
let str: string = "";
if (Math.random() > 0.5) {
str = "x" + text.charCodeAt(i).toString(16);
}
out += "&#" + str + ";";
}
return out;
}
}

196
markdown/src/interfaces.ts Normal file
View File

@ -0,0 +1,196 @@
/**
* @license
*
* Copyright (c) 2018, Костя Третяк. (MIT Licensed)
* https://github.com/ts-stack/markdown
*/
import { escape, unescape } from "./helpers.ts";
import type { Renderer } from "./renderer.ts";
export interface Obj {
[key: string]: any;
}
export interface RulesBlockBase {
newline: RegExp;
code: RegExp;
hr: RegExp;
heading: RegExp;
lheading: RegExp;
blockquote: RegExp;
list: RegExp;
html: RegExp;
def: RegExp;
paragraph: RegExp;
text: RegExp;
bullet: RegExp;
/**
* List item (<li>).
*/
item: RegExp;
}
export interface RulesBlockGfm extends RulesBlockBase {
fences: RegExp;
}
export interface RulesBlockTables extends RulesBlockGfm {
nptable: RegExp;
table: RegExp;
}
export interface Link {
href: string;
title: string;
}
export interface Links {
[key: string]: Link;
}
export enum TokenType {
space = 1,
text,
paragraph,
heading,
listStart,
listEnd,
looseItemStart,
looseItemEnd,
listItemStart,
listItemEnd,
blockquoteStart,
blockquoteEnd,
code,
table,
html,
hr,
}
export type Align = "center" | "left" | "right" | "";
export interface Token {
type: number | string;
text?: string;
lang?: string;
depth?: number;
header?: string[];
align?: Align[];
cells?: string[][];
ordered?: boolean;
pre?: boolean;
escaped?: boolean;
execArr?: RegExpExecArray;
/**
* Used for debugging. Identifies the line number in the resulting HTML file.
*/
line?: number;
}
export interface RulesInlineBase {
escape: RegExp;
autolink: RegExp;
tag: RegExp;
link: RegExp;
reflink: RegExp;
nolink: RegExp;
strong: RegExp;
em: RegExp;
code: RegExp;
br: RegExp;
text: RegExp;
_inside: RegExp;
_href: RegExp;
}
export interface RulesInlinePedantic extends RulesInlineBase {}
/**
* GFM Inline Grammar
*/
export interface RulesInlineGfm extends RulesInlineBase {
url: RegExp;
del: RegExp;
}
export interface RulesInlineBreaks extends RulesInlineGfm {}
export class MarkedOptions {
gfm?: boolean = true;
tables?: boolean = true;
breaks?: boolean = false;
pedantic?: boolean = false;
sanitize?: boolean = false;
sanitizer?: (text: string) => string;
mangle?: boolean = false;
smartLists?: boolean = false;
silent?: boolean = false;
/**
* @param code The section of code to pass to the highlighter.
* @param lang The programming language specified in the code block.
*/
highlight?: (code: string, lang?: string) => string;
langPrefix?: string = "lang-";
smartypants?: boolean = false;
headerPrefix?: string = "";
/**
* An object containing functions to render tokens to HTML. Default: `new Renderer()`
*/
renderer?: Renderer;
/**
* Self-close the tags for void elements (&lt;br/&gt;, &lt;img/&gt;, etc.)
* with a "/" as required by XHTML.
*/
xhtml?: boolean = false;
/**
* The function that will be using to escape HTML entities.
* By default using inner helper.
*/
escape?: (html: string, encode?: boolean) => string = escape;
/**
* The function that will be using to unescape HTML entities.
* By default using inner helper.
*/
unescape?: (html: string) => string = unescape;
/**
* If set to `true`, an inline text will not be taken in paragraph.
*
* ```ts
* // isNoP == false
* Marked.parse('some text'); // returns '<p>some text</p>'
*
* Marked.setOptions({isNoP: true});
*
* Marked.parse('some text'); // returns 'some text'
* ```
*/
isNoP?: boolean;
}
export interface LexerReturns {
tokens: Token[];
links: Links;
meta: Obj;
}
export interface Parsed {
content: string;
meta: Obj;
}
export interface DebugReturns extends LexerReturns {
result: string;
}
export interface Replacements {
[key: string]: string;
}
export interface RulesInlineCallback {
regexp?: RegExp;
condition(): RegExp;
tokenize(execArr: RegExpExecArray): void;
}
export type SimpleRenderer = (execArr?: RegExpExecArray) => string;

154
markdown/src/marked.ts Normal file
View File

@ -0,0 +1,154 @@
/**
* @license
*
* Copyright (c) 2011-2014, Christopher Jeffrey. (MIT Licensed)
* https://github.com/chjj/marked
*
* Copyright (c) 2018, Костя Третяк. (MIT Licensed)
* https://github.com/ts-stack/markdown
*/
import { BlockLexer } from "./block-lexer.ts";
import {
DebugReturns,
LexerReturns,
Links,
MarkedOptions,
SimpleRenderer,
Token,
TokenType,
Parsed
} from "./interfaces.ts";
import { Parser } from "./parser.ts";
export class Marked {
static options = new MarkedOptions();
protected static simpleRenderers: SimpleRenderer[] = [];
protected static parsed: Parsed = {
content: "",
meta: {},
};
/**
* Merges the default options with options that will be set.
*
* @param options Hash of options.
*/
static setOptions(options: MarkedOptions) {
Object.assign(this.options, options);
return this;
}
/**
* Setting simple block rule.
*/
static setBlockRule(regexp: RegExp, renderer: SimpleRenderer = () => "") {
BlockLexer.simpleRules.push(regexp);
this.simpleRenderers.push(renderer);
return this;
}
/**
* Accepts Markdown text and returns an object containing HTML and metadata.
*
* @param src String of markdown source to be compiled.
* @param options Hash of options. They replace, but do not merge with the default options.
* If you want the merging, you can to do this via `Marked.setOptions()`.
*/
static parse(src: string, options: MarkedOptions = this.options): Parsed {
try {
const { tokens, links, meta } = this.callBlockLexer(src, options);
this.parsed.content = this.callParser(tokens, links, options);
this.parsed.meta = meta;
return this.parsed;
} catch (e) {
this.parsed.content = this.callMe(e);
return this.parsed;
}
}
/**
* Accepts Markdown text and returns object with text in HTML format,
* tokens and links from `BlockLexer.parser()`.
*
* @param src String of markdown source to be compiled.
* @param options Hash of options. They replace, but do not merge with the default options.
* If you want the merging, you can to do this via `Marked.setOptions()`.
*/
static debug(
src: string,
options: MarkedOptions = this.options,
): DebugReturns {
const { tokens, links, meta } = this.callBlockLexer(src, options);
let origin = tokens.slice();
const parser = new Parser(options);
parser.simpleRenderers = this.simpleRenderers;
const result = parser.debug(links, tokens);
/**
* Translates a token type into a readable form,
* and moves `line` field to a first place in a token object.
*/
origin = origin.map((token) => {
token.type = (TokenType as any)[token.type] || token.type;
const line = token.line;
delete token.line;
if (line) {
return { ...{ line }, ...token };
} else {
return token;
}
});
return { tokens: origin, links, meta, result};
}
protected static callBlockLexer(
src: string = "",
options?: MarkedOptions,
): LexerReturns {
if (typeof src != "string") {
throw new Error(
`Expected that the 'src' parameter would have a 'string' type, got '${typeof src}'`,
);
}
// Preprocessing.
src = src
.replace(/\r\n|\r/g, "\n")
.replace(/\t/g, " ")
.replace(/\u00a0/g, " ")
.replace(/\u2424/g, "\n")
.replace(/^ +$/gm, "");
return BlockLexer.lex(src, options, true);
}
protected static callParser(
tokens: Token[],
links: Links,
options?: MarkedOptions,
): string {
if (this.simpleRenderers.length) {
const parser = new Parser(options);
parser.simpleRenderers = this.simpleRenderers;
return parser.parse(links, tokens);
} else {
return Parser.parse(tokens, links, options);
}
}
protected static callMe(err: Error) {
err.message +=
"\nPlease report this to https://github.com/ts-stack/markdown";
if (this.options.silent && this.options.escape) {
return "<p>An error occured:</p><pre>" +
this.options.escape(err.message + "", true) + "</pre>";
}
throw err;
}
}

247
markdown/src/parser.ts Normal file
View File

@ -0,0 +1,247 @@
/**
* @license
*
* Copyright (c) 2011-2014, Christopher Jeffrey. (MIT Licensed)
* https://github.com/chjj/marked
*
* Copyright (c) 2018, Костя Третяк. (MIT Licensed)
* https://github.com/ts-stack/markdown
*/
import { InlineLexer } from "./inline-lexer.ts";
import {
Links,
MarkedOptions,
SimpleRenderer,
Token,
TokenType,
} from "./interfaces.ts";
import { Marked } from "./marked.ts";
import { Renderer } from "./renderer.ts";
/**
* Parsing & Compiling.
*/
export class Parser {
simpleRenderers: SimpleRenderer[] = [];
protected tokens: Token[];
protected token: Token | undefined;
protected inlineLexer!: InlineLexer;
protected options: MarkedOptions;
protected renderer: Renderer;
protected line: number = 0;
constructor(options?: MarkedOptions) {
this.tokens = [];
this.token = undefined;
this.options = options || Marked.options;
this.renderer = this.options.renderer || new Renderer(this.options);
}
static parse(tokens: Token[], links: Links, options?: MarkedOptions): string {
const parser = new this(options);
return parser.parse(links, tokens);
}
parse(links: Links, tokens: Token[]) {
this.inlineLexer = new InlineLexer(
InlineLexer,
links,
this.options,
this.renderer,
);
this.tokens = tokens.reverse();
let out = "";
while (this.next()) {
out += this.tok();
}
return out;
}
debug(links: Links, tokens: Token[]) {
this.inlineLexer = new InlineLexer(
InlineLexer,
links,
this.options,
this.renderer,
);
this.tokens = tokens.reverse();
let out = "";
while (this.next()) {
const outToken: string = this.tok() || "";
if (!this.token) throw ReferenceError;
this.token.line = this.line += outToken.split("\n").length - 1;
out += outToken;
}
return out;
}
protected next() {
return (this.token = this.tokens.pop());
}
protected getNextElement() {
return this.tokens[this.tokens.length - 1];
}
protected parseText() {
if (!this.token) throw ReferenceError;
let body = this.token.text;
let nextElement: Token;
while (
(nextElement = this.getNextElement()) &&
nextElement.type == TokenType.text
) {
body += "\n" + this.next()?.text;
}
return this.inlineLexer.output(body || "");
}
protected tok() {
if (!this.token) throw ReferenceError;
switch (this.token.type) {
case TokenType.space: {
return "";
}
case TokenType.paragraph: {
return this.renderer.paragraph(
this.inlineLexer.output(this.token.text || ""),
);
}
case TokenType.text: {
if (this.options.isNoP) {
return this.parseText();
} else {
return this.renderer.paragraph(this.parseText());
}
}
case TokenType.heading: {
return this.renderer.heading(
this.inlineLexer.output(this.token.text || ""),
this.token.depth || 0,
this.token.text || "",
);
}
case TokenType.listStart: {
let body = "";
const ordered = this.token.ordered;
while (this.next()?.type != TokenType.listEnd) {
body += this.tok();
}
return this.renderer.list(body, ordered);
}
case TokenType.listItemStart: {
let body = "";
while (this.next()?.type != TokenType.listItemEnd) {
body += this.token.type == (TokenType.text as any)
? this.parseText()
: this.tok();
}
return this.renderer.listitem(body);
}
case TokenType.looseItemStart: {
let body = "";
while (this.next()?.type != TokenType.listItemEnd) {
body += this.tok();
}
return this.renderer.listitem(body);
}
case TokenType.code: {
return this.renderer.code(
this.token.text || "",
this.token.lang,
this.token.escaped,
);
}
case TokenType.table: {
let header = "";
let body = "";
let cell;
if (
!this.token || !this.token.header || !this.token.align ||
!this.token.cells
) {
throw ReferenceError;
}
// header
cell = "";
for (let i = 0; i < this.token.header.length; i++) {
const flags = { header: true, align: this.token.align[i] };
const out = this.inlineLexer.output(this.token.header[i]);
cell += this.renderer.tablecell(out, flags);
}
header += this.renderer.tablerow(cell);
for (const row of this.token.cells) {
cell = "";
for (let j = 0; j < row.length; j++) {
cell += this.renderer.tablecell(this.inlineLexer.output(row[j]), {
header: false,
align: this.token.align[j],
});
}
body += this.renderer.tablerow(cell);
}
return this.renderer.table(header, body);
}
case TokenType.blockquoteStart: {
let body = "";
while (this.next()?.type != TokenType.blockquoteEnd) {
body += this.tok();
}
return this.renderer.blockquote(body);
}
case TokenType.hr: {
return this.renderer.hr();
}
case TokenType.html: {
const html = !this.token.pre && !this.options.pedantic
? this.inlineLexer.output(this.token.text || "")
: this.token.text;
return this.renderer.html(html || "");
}
default: {
if (this.simpleRenderers.length) {
for (let i = 0; i < this.simpleRenderers.length; i++) {
if (this.token.type == "simpleRule" + (i + 1)) {
return this.simpleRenderers[i].call(
this.renderer,
this.token.execArr,
);
}
}
}
const errMsg = `Token with "${this.token.type}" type was not found.`;
if (this.options.silent) {
console.log(errMsg);
} else {
throw new Error(errMsg);
}
}
}
}
}

179
markdown/src/renderer.ts Normal file
View File

@ -0,0 +1,179 @@
/**
* @license
*
* Copyright (c) 2011-2014, Christopher Jeffrey. (MIT Licensed)
* https://github.com/chjj/marked
*
* Copyright (c) 2018, Костя Третяк. (MIT Licensed)
* https://github.com/ts-stack/markdown
*/
import type { Align, MarkedOptions } from "./interfaces.ts";
import { Marked } from "./marked.ts";
export class Renderer {
protected options: MarkedOptions;
constructor(options?: MarkedOptions) {
this.options = options || Marked.options;
}
code(code: string, lang?: string, escaped?: boolean): string {
if (this.options.highlight) {
const out = this.options.highlight(code, lang);
if (out != null && out !== code) {
escaped = true;
code = out;
}
}
if (!this.options.escape) throw ReferenceError;
if (!lang) {
return (
"\n<pre><code>" +
(escaped ? code : this.options.escape(code, true)) +
"\n</code></pre>\n"
);
}
return (
'\n<pre><code class="' +
this.options.langPrefix +
this.options.escape(lang, true) +
'">' +
(escaped ? code : this.options.escape(code, true)) +
"\n</code></pre>\n"
);
}
blockquote(quote: string): string {
return "<blockquote>\n" + quote + "</blockquote>\n";
}
html(html: string): string {
return html;
}
heading(text: string, level: number, raw: string): string {
const id: string =
this.options.headerPrefix + raw.toLowerCase().replace(/[^\w]+/g, "-");
return `<h${level} id="${id}">${text}</h${level}>\n`;
}
hr(): string {
return this.options.xhtml ? "<hr/>\n" : "<hr>\n";
}
list(body: string, ordered?: boolean): string {
const type = ordered ? "ol" : "ul";
return `\n<${type}>\n${body}</${type}>\n`;
}
listitem(text: string): string {
return "<li>" + text + "</li>\n";
}
paragraph(text: string): string {
return "<p>" + text + "</p>\n";
}
table(header: string, body: string): string {
return `
<table>
<thead>
${header}</thead>
<tbody>
${body}</tbody>
</table>
`;
}
tablerow(content: string): string {
return "<tr>\n" + content + "</tr>\n";
}
tablecell(
content: string,
flags: { header?: boolean; align?: Align }
): string {
const type = flags.header ? "th" : "td";
const tag = flags.align
? "<" + type + ' style="text-align:' + flags.align + '">'
: "<" + type + ">";
return tag + content + "</" + type + ">\n";
}
// *** Inline level renderer methods. ***
strong(text: string): string {
return "<strong>" + text + "</strong>";
}
em(text: string): string {
return "<em>" + text + "</em>";
}
codespan(text: string): string {
return "<code>" + text + "</code>";
}
br(): string {
return this.options.xhtml ? "<br/>" : "<br>";
}
del(text: string): string {
return "<del>" + text + "</del>";
}
link(href: string, title: string, text: string): string {
if (this.options.sanitize) {
let prot: string;
if (!this.options.unescape) throw ReferenceError;
try {
prot = decodeURIComponent(this.options.unescape(href))
.replace(/[^\w:]/g, "")
.toLowerCase();
} catch (e) {
return text;
}
if (
prot.indexOf("javascript:") === 0 ||
prot.indexOf("vbscript:") === 0 ||
prot.indexOf("data:") === 0
) {
return text;
}
}
let out = '<a href="' + href + '"';
if (title) {
out += ' title="' + title + '"';
}
out += ">" + text + "</a>";
return out;
}
image(href: string, title: string, text: string): string {
let out = '<img src="' + href + '" alt="' + text + '"';
if (title) {
out += ' title="' + title + '"';
}
out += this.options.xhtml ? "/>" : ">";
return out;
}
text(text: string): string {
return text;
}
}