fix: Convert to ESM (#2227)

BREAKING CHANGE:

- Default export removed. Use `import { marked } from 'marked'` or `const { marked } = require('marked')` instead.
- `/lib/marked.js` removed. Use `/marked.min.js` in script tag instead.
- When using marked in a script tag use `marked.parse(...)` instead of `marked(...)`
pull/2270/head
Ben McCann 2021-11-02 07:32:17 -07:00 committed by GitHub
parent 911dc9c8b1
commit 4afb228d95
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
48 changed files with 5931 additions and 3631 deletions

View File

@ -1,9 +1,9 @@
all: all:
@cp lib/marked.js marked.js @cp lib/marked.cjs marked.cjs
@uglifyjs --comments '/\*[^\0]+?Copyright[^\0]+?\*/' -o marked.min.js lib/marked.js @uglifyjs --comments '/\*[^\0]+?Copyright[^\0]+?\*/' -o marked.min.js lib/marked.cjs
clean: clean:
@rm marked.js @rm marked.cjs
@rm marked.min.js @rm marked.min.js
bench: bench:

View File

@ -69,7 +69,7 @@ $ cat hello.html
<script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script> <script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script>
<script> <script>
document.getElementById('content').innerHTML = document.getElementById('content').innerHTML =
marked('# Marked in the browser\n\nRendered by **marked**.'); marked.parse('# Marked in the browser\n\nRendered by **marked**.');
</script> </script>
</body> </body>
</html> </html>

125
bin/marked → bin/marked.js Executable file → Normal file
View File

@ -5,16 +5,17 @@
* Copyright (c) 2011-2013, Christopher Jeffrey (MIT License) * Copyright (c) 2011-2013, Christopher Jeffrey (MIT License)
*/ */
const fs = require('fs'), import { promises } from 'fs';
path = require('path'), import { marked } from '../lib/marked.esm.js';
marked = require('../');
const { readFile, writeFile } = promises;
/** /**
* Man Page * Man Page
*/ */
function help() { async function help() {
const spawn = require('child_process').spawn; const { spawn } = await import('child_process');
const options = { const options = {
cwd: process.cwd(), cwd: process.cwd(),
@ -23,16 +24,18 @@ function help() {
stdio: 'inherit' stdio: 'inherit'
}; };
spawn('man', [path.resolve(__dirname, '../man/marked.1')], options) const { dirname, resolve } = await import('path');
.on('error', function() { const { fileURLToPath } = await import('url');
fs.readFile(path.resolve(__dirname, '../man/marked.1.txt'), 'utf8', function(err, data) { const __dirname = dirname(fileURLToPath(import.meta.url));
if (err) throw err; spawn('man', [resolve(__dirname, '../man/marked.1')], options)
console.log(data); .on('error', async() => {
}); console.log(await readFile(resolve(__dirname, '../man/marked.1.txt'), 'utf8'));
}); });
} }
function version() { async function version() {
const { createRequire } = await import('module');
const require = createRequire(import.meta.url);
const pkg = require('../package.json'); const pkg = require('../package.json');
console.log(pkg.version); console.log(pkg.version);
} }
@ -41,15 +44,15 @@ function version() {
* Main * Main
*/ */
function main(argv, callback) { async function main(argv) {
const files = [], const files = [];
options = {}; const options = {};
let input, let input;
output, let output;
string, let string;
arg, let arg;
tokens, let tokens;
opt; let opt;
function getarg() { function getarg() {
let arg = argv.shift(); let arg = argv.shift();
@ -82,8 +85,6 @@ function main(argv, callback) {
while (argv.length) { while (argv.length) {
arg = getarg(); arg = getarg();
switch (arg) { switch (arg) {
case '--test':
return require('../test').main(process.argv.slice());
case '-o': case '-o':
case '--output': case '--output':
output = argv.shift(); output = argv.shift();
@ -102,10 +103,10 @@ function main(argv, callback) {
break; break;
case '-h': case '-h':
case '--help': case '--help':
return help(); return await help();
case '-v': case '-v':
case '--version': case '--version':
return version(); return await version();
default: default:
if (arg.indexOf('--') === 0) { if (arg.indexOf('--') === 0) {
opt = camelize(arg.replace(/^--(no-)?/, '')); opt = camelize(arg.replace(/^--(no-)?/, ''));
@ -128,62 +129,57 @@ function main(argv, callback) {
} }
} }
function getData(callback) { async function getData() {
if (!input) { if (!input) {
if (files.length <= 2) { if (files.length <= 2) {
if (string) { if (string) {
return callback(null, string); return string;
} }
return getStdin(callback); return await getStdin();
} }
input = files.pop(); input = files.pop();
} }
return fs.readFile(input, 'utf8', callback); return await readFile(input, 'utf8');
} }
return getData(function(err, data) { const data = await getData();
if (err) return callback(err);
data = tokens const html = tokens
? JSON.stringify(marked.lexer(data, options), null, 2) ? JSON.stringify(marked.lexer(data, options), null, 2)
: marked(data, options); : marked(data, options);
if (!output) { if (output) {
process.stdout.write(data + '\n'); return await writeFile(output, data);
return callback(); }
}
return fs.writeFile(output, data, callback); process.stdout.write(html + '\n');
});
} }
/** /**
* Helpers * Helpers
*/ */
function getStdin(callback) { function getStdin() {
const stdin = process.stdin; return new Promise((resolve, reject) => {
let buff = ''; const stdin = process.stdin;
let buff = '';
stdin.setEncoding('utf8'); stdin.setEncoding('utf8');
stdin.on('data', function(data) { stdin.on('data', function(data) {
buff += data; buff += data;
}); });
stdin.on('error', function(err) { stdin.on('error', function(err) {
return callback(err); reject(err);
}); });
stdin.on('end', function() { stdin.on('end', function() {
return callback(null, buff); resolve(buff);
}); });
try {
stdin.resume(); stdin.resume();
} catch (e) { });
callback(e);
}
} }
function camelize(text) { function camelize(text) {
@ -204,12 +200,9 @@ function handleError(err) {
* Expose / Entry Point * Expose / Entry Point
*/ */
if (!module.parent) { process.title = 'marked';
process.title = 'marked'; main(process.argv.slice()).then(code => {
main(process.argv.slice(), function(err, code) { process.exit(code || 0);
if (err) return handleError(err); }).catch(err => {
return process.exit(code || 0); handleError(err);
}); });
} else {
module.exports = main;
}

View File

@ -10,7 +10,7 @@
"markup", "markup",
"html" "html"
], ],
"main": "lib/marked.js", "main": "lib/marked.cjs",
"license": "MIT", "license": "MIT",
"ignore": [ "ignore": [
"**/.*", "**/.*",

View File

@ -1,8 +1,10 @@
const { mkdir, rmdir, readdir, stat, readFile, writeFile, copyFile } = require('fs').promises; import { promises } from 'fs';
const { join, dirname, parse, format } = require('path'); import { join, dirname, parse, format } from 'path';
const marked = require('./'); import { parse as marked } from './lib/marked.esm.js';
const { highlight, highlightAuto } = require('highlight.js'); import { HighlightJS } from 'highlight.js';
const titleize = require('titleize'); import titleize from 'titleize';
const { mkdir, rm, readdir, stat, readFile, writeFile, copyFile } = promises;
const { highlight, highlightAuto } = HighlightJS;
const cwd = process.cwd(); const cwd = process.cwd();
const inputDir = join(cwd, 'docs'); const inputDir = join(cwd, 'docs');
const outputDir = join(cwd, 'public'); const outputDir = join(cwd, 'public');
@ -12,7 +14,7 @@ const getTitle = str => str === 'INDEX' ? '' : titleize(str.replace(/_/g, ' '))
async function init() { async function init() {
console.log('Cleaning up output directory ' + outputDir); console.log('Cleaning up output directory ' + outputDir);
await rmdir(outputDir, { recursive: true }); await rm(outputDir, { force: true, recursive: true });
await mkdir(outputDir); await mkdir(outputDir);
await copyFile(join(cwd, 'LICENSE.md'), join(inputDir, 'LICENSE.md')); await copyFile(join(cwd, 'LICENSE.md'), join(inputDir, 'LICENSE.md'));
const tmpl = await readFile(templateFile, 'utf8'); const tmpl = await readFile(templateFile, 'utf8');

View File

@ -4,7 +4,7 @@
"repo": "markedjs/marked", "repo": "markedjs/marked",
"description": "A markdown parser built for speed", "description": "A markdown parser built for speed",
"keywords": ["markdown", "markup", "html"], "keywords": ["markdown", "markup", "html"],
"scripts": ["lib/marked.js"], "scripts": ["lib/marked.cjs"],
"main": "lib/marked.js", "main": "lib/marked.cjs",
"license": "MIT" "license": "MIT"
} }

View File

@ -56,7 +56,7 @@ $ marked -s "*hello world*"
<script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script> <script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script>
<script> <script>
document.getElementById('content').innerHTML = document.getElementById('content').innerHTML =
marked('# Marked in browser\n\nRendered by **marked**.'); marked.parse('# Marked in browser\n\nRendered by **marked**.');
</script> </script>
</body> </body>
</html> </html>
@ -65,8 +65,10 @@ $ marked -s "*hello world*"
**Node.js** **Node.js**
```js ```js
const marked = require("marked"); import { marked } from 'marked';
const html = marked('# Marked in Node.js\n\nRendered by **marked**.'); // or const { marked } = require('marked');
const html = marked.parse('# Marked in Node.js\n\nRendered by **marked**.');
``` ```
@ -79,7 +81,7 @@ We actively support the features of the following [Markdown flavors](https://git
| Flavor | Version | Status | | Flavor | Version | Status |
| :--------------------------------------------------------- | :------ | :----------------------------------------------------------------- | | :--------------------------------------------------------- | :------ | :----------------------------------------------------------------- |
| The original markdown.pl | -- | | | The original markdown.pl | -- | |
| [CommonMark](http://spec.commonmark.org/0.29/) | 0.29 | [Work in progress](https://github.com/markedjs/marked/issues/1202) | | [CommonMark](http://spec.commonmark.org/0.30/) | 0.30 | [Work in progress](https://github.com/markedjs/marked/issues/1202) |
| [GitHub Flavored Markdown](https://github.github.com/gfm/) | 0.29 | [Work in progress](https://github.com/markedjs/marked/issues/1202) | | [GitHub Flavored Markdown](https://github.github.com/gfm/) | 0.29 | [Work in progress](https://github.com/markedjs/marked/issues/1202) |
By supporting the above Markdown flavors, it's possible that Marked can help you use other flavors as well; however, these are not actively supported by the community. By supporting the above Markdown flavors, it's possible that Marked can help you use other flavors as well; however, these are not actively supported by the community.

View File

@ -1,7 +1,8 @@
## The `marked` function ## The `parse` function
```js ```js
marked(markdownString [,options] [,callback]) import { marked } from 'marked';
marked.parse(markdownString [,options] [,callback])
``` ```
|Argument |Type |Notes | |Argument |Type |Notes |
@ -14,7 +15,7 @@ marked(markdownString [,options] [,callback])
```js ```js
// Create reference instance // Create reference instance
const marked = require('marked'); import { marked } from 'marked';
// Set options // Set options
// `highlight` example uses https://highlightjs.org // `highlight` example uses https://highlightjs.org
@ -36,7 +37,7 @@ marked.setOptions({
}); });
// Compile // Compile
console.log(marked(markdownString)); console.log(marked.parse(markdownString));
``` ```
<h2 id="options">Options</h2> <h2 id="options">Options</h2>
@ -67,7 +68,7 @@ console.log(marked(markdownString));
You can parse inline markdown by running markdown through `marked.parseInline`. You can parse inline markdown by running markdown through `marked.parseInline`.
```js ```js
const blockHtml = marked('**strong** _em_'); const blockHtml = marked.parse('**strong** _em_');
console.log(blockHtml); // '<p><strong>strong</strong> <em>em</em></p>' console.log(blockHtml); // '<p><strong>strong</strong> <em>em</em></p>'
const inlineHtml = marked.parseInline('**strong** _em_'); const inlineHtml = marked.parseInline('**strong** _em_');
@ -87,7 +88,7 @@ marked.setOptions({
} }
}); });
marked(markdownString, (err, html) => { marked.parse(markdownString, (err, html) => {
console.log(html); console.log(html);
}); });
``` ```
@ -105,18 +106,18 @@ Marked can be run in a [worker thread](https://nodejs.org/api/worker_threads.htm
```js ```js
// markedWorker.js // markedWorker.js
const marked = require('marked'); import { marked } from 'marked';
const { parentPort } = require('worker_threads'); import { parentPort } from 'worker_threads';
parentPort.on('message', (markdownString) => { parentPort.on('message', (markdownString) => {
parentPort.postMessage(marked(markdownString)); parentPort.postMessage(marked.parse(markdownString));
}); });
``` ```
```js ```js
// index.js // index.js
const { Worker } = require('worker_threads'); import { Worker } from 'worker_threads';
const markedWorker = new Worker('./markedWorker.js'); const markedWorker = new Worker('./markedWorker.js');
const markedTimeout = setTimeout(() => { const markedTimeout = setTimeout(() => {
@ -144,7 +145,7 @@ importScripts('path/to/marked.min.js');
onmessage = (e) => { onmessage = (e) => {
const markdownString = e.data const markdownString = e.data
postMessage(marked(markdownString)); postMessage(marked.parse(markdownString));
}; };
``` ```

View File

@ -8,7 +8,7 @@ To champion the single-responsibility and open/closed principles, we have tried
```js ```js
const marked = require('marked'); import { marked } from 'marked';
marked.use({ marked.use({
pedantic: false, pedantic: false,
@ -69,7 +69,7 @@ Calling `marked.use()` to override the same function multiple times will give pr
```js ```js
// Create reference instance // Create reference instance
const marked = require('marked'); import { marked } from 'marked';
// Override function // Override function
const renderer = { const renderer = {
@ -89,7 +89,7 @@ const renderer = {
marked.use({ renderer }); marked.use({ renderer });
// Run marked // Run marked
console.log(marked('# heading+')); console.log(marked.parse('# heading+'));
``` ```
**Output:** **Output:**
@ -173,7 +173,7 @@ Calling `marked.use()` to override the same function multiple times will give pr
```js ```js
// Create reference instance // Create reference instance
const marked = require('marked'); import { marked } from 'marked';
// Override function // Override function
const tokenizer = { const tokenizer = {
@ -195,7 +195,7 @@ const tokenizer = {
marked.use({ tokenizer }); marked.use({ tokenizer });
// Run marked // Run marked
console.log(marked('$ latex code $\n\n` other code `')); console.log(marked.parse('$ latex code $\n\n` other code `'));
``` ```
**Output:** **Output:**
@ -264,7 +264,7 @@ The walkTokens function gets called with every token. Child tokens are called be
**Example:** Overriding heading tokens to start at h2. **Example:** Overriding heading tokens to start at h2.
```js ```js
const marked = require('marked'); import { marked } from 'marked';
// Override function // Override function
const walkTokens = (token) => { const walkTokens = (token) => {
@ -276,7 +276,7 @@ const walkTokens = (token) => {
marked.use({ walkTokens }); marked.use({ walkTokens });
// Run marked // Run marked
console.log(marked('# heading 2\n\n## heading 3')); console.log(marked.parse('# heading 2\n\n## heading 3'));
``` ```
**Output:** **Output:**
@ -422,7 +422,7 @@ marked.use({ extensions: [descriptionList] });
marked.use({ extensions: [description] }); marked.use({ extensions: [description] });
marked.use({ walkTokens }) marked.use({ walkTokens })
console.log(marked('A Description List:\n' console.log(marked.parse('A Description List:\n'
+ ': Topic 1 : Description 1\n' + ': Topic 1 : Description 1\n'
+ ': **Topic 2** : *Description 2*')); + ': **Topic 2** : *Description 2*'));
``` ```
@ -497,7 +497,7 @@ The Lexer builds an array of tokens, which will be passed to the Parser.
The Parser processes each token in the token array: The Parser processes each token in the token array:
``` js ``` js
const marked = require('marked'); import { marked } from 'marked';
const md = ` const md = `
# heading # heading

View File

@ -34,7 +34,7 @@ var inputDirty = true;
var $activeOutputElem = null; var $activeOutputElem = null;
var search = searchToObject(); var search = searchToObject();
var markedVersions = { var markedVersions = {
master: 'https://cdn.jsdelivr.net/gh/markedjs/marked/lib/marked.js' master: 'https://cdn.jsdelivr.net/gh/markedjs/marked/marked.min.js'
}; };
var markedVersionCache = {}; var markedVersionCache = {};
var delayTime = 1; var delayTime = 1;
@ -109,7 +109,7 @@ function setInitialVersion() {
.then(function(json) { .then(function(json) {
for (var i = 0; i < json.versions.length; i++) { for (var i = 0; i < json.versions.length; i++) {
var ver = json.versions[i]; var ver = json.versions[i];
markedVersions[ver] = 'https://cdn.jsdelivr.net/npm/marked@' + ver + '/lib/marked.js'; markedVersions[ver] = 'https://cdn.jsdelivr.net/npm/marked@' + ver + '/marked.min.js';
var opt = document.createElement('option'); var opt = document.createElement('option');
opt.textContent = ver; opt.textContent = ver;
opt.value = ver; opt.value = ver;
@ -122,7 +122,7 @@ function setInitialVersion() {
return res.json(); return res.json();
}) })
.then(function(json) { .then(function(json) {
markedVersions.master = 'https://cdn.jsdelivr.net/gh/markedjs/marked@' + json[0].sha + '/lib/marked.js'; markedVersions.master = 'https://cdn.jsdelivr.net/gh/markedjs/marked@' + json[0].sha + '/marked.min.js';
}) })
.catch(function() { .catch(function() {
// do nothing // do nothing
@ -262,7 +262,7 @@ function addCommitVersion(value, text, commit) {
if (markedVersions[value]) { if (markedVersions[value]) {
return; return;
} }
markedVersions[value] = 'https://cdn.jsdelivr.net/gh/markedjs/marked@' + commit + '/lib/marked.js'; markedVersions[value] = 'https://cdn.jsdelivr.net/gh/markedjs/marked@' + commit + '/marked.min.js';
var opt = document.createElement('option'); var opt = document.createElement('option');
opt.textContent = text; opt.textContent = text;
opt.value = value; opt.value = value;

View File

@ -1 +0,0 @@
module.exports = require('./lib/marked');

View File

@ -6,6 +6,7 @@
"helpers": [ "helpers": [
"helpers/helpers.js" "helpers/helpers.js"
], ],
"jsLoader": "import",
"stopSpecOnExpectationFailure": false, "stopSpecOnExpectationFailure": false,
"random": true "random": true
} }

2913
lib/marked.cjs Normal file

File diff suppressed because one or more lines are too long

View File

@ -9,11 +9,7 @@
* The code in this file is generated from files in ./src/ * The code in this file is generated from files in ./src/
*/ */
var esmEntry$1 = {exports: {}}; function getDefaults() {
var defaults$5 = {exports: {}};
function getDefaults$1() {
return { return {
baseUrl: null, baseUrl: null,
breaks: false, breaks: false,
@ -37,20 +33,15 @@ function getDefaults$1() {
}; };
} }
function changeDefaults$1(newDefaults) { let defaults = getDefaults();
defaults$5.exports.defaults = newDefaults;
}
defaults$5.exports = { function changeDefaults(newDefaults) {
defaults: getDefaults$1(), defaults = newDefaults;
getDefaults: getDefaults$1, }
changeDefaults: changeDefaults$1
};
/** /**
* Helpers * Helpers
*/ */
const escapeTest = /[&<>"']/; const escapeTest = /[&<>"']/;
const escapeReplace = /[&<>"']/g; const escapeReplace = /[&<>"']/g;
const escapeTestNoEncode = /[<>"']|&(?!#?\w+;)/; const escapeTestNoEncode = /[<>"']|&(?!#?\w+;)/;
@ -63,7 +54,7 @@ const escapeReplacements = {
"'": '&#39;' "'": '&#39;'
}; };
const getEscapeReplacement = (ch) => escapeReplacements[ch]; const getEscapeReplacement = (ch) => escapeReplacements[ch];
function escape$3(html, encode) { function escape(html, encode) {
if (encode) { if (encode) {
if (escapeTest.test(html)) { if (escapeTest.test(html)) {
return html.replace(escapeReplace, getEscapeReplacement); return html.replace(escapeReplace, getEscapeReplacement);
@ -79,7 +70,7 @@ function escape$3(html, encode) {
const unescapeTest = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/ig; const unescapeTest = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/ig;
function unescape$1(html) { function unescape(html) {
// explicitly match decimal, hex, and named HTML entities // explicitly match decimal, hex, and named HTML entities
return html.replace(unescapeTest, (_, n) => { return html.replace(unescapeTest, (_, n) => {
n = n.toLowerCase(); n = n.toLowerCase();
@ -94,7 +85,7 @@ function unescape$1(html) {
} }
const caret = /(^|[^\[])\^/g; const caret = /(^|[^\[])\^/g;
function edit$1(regex, opt) { function edit(regex, opt) {
regex = regex.source || regex; regex = regex.source || regex;
opt = opt || ''; opt = opt || '';
const obj = { const obj = {
@ -113,11 +104,11 @@ function edit$1(regex, opt) {
const nonWordAndColonTest = /[^\w:]/g; const nonWordAndColonTest = /[^\w:]/g;
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i; const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
function cleanUrl$1(sanitize, base, href) { function cleanUrl(sanitize, base, href) {
if (sanitize) { if (sanitize) {
let prot; let prot;
try { try {
prot = decodeURIComponent(unescape$1(href)) prot = decodeURIComponent(unescape(href))
.replace(nonWordAndColonTest, '') .replace(nonWordAndColonTest, '')
.toLowerCase(); .toLowerCase();
} catch (e) { } catch (e) {
@ -151,7 +142,7 @@ function resolveUrl(base, href) {
if (justDomain.test(base)) { if (justDomain.test(base)) {
baseUrls[' ' + base] = base + '/'; baseUrls[' ' + base] = base + '/';
} else { } else {
baseUrls[' ' + base] = rtrim$1(base, '/', true); baseUrls[' ' + base] = rtrim(base, '/', true);
} }
} }
base = baseUrls[' ' + base]; base = baseUrls[' ' + base];
@ -172,9 +163,9 @@ function resolveUrl(base, href) {
} }
} }
const noopTest$1 = { exec: function noopTest() {} }; const noopTest = { exec: function noopTest() {} };
function merge$2(obj) { function merge(obj) {
let i = 1, let i = 1,
target, target,
key; key;
@ -191,7 +182,7 @@ function merge$2(obj) {
return obj; return obj;
} }
function splitCells$1(tableRow, count) { function splitCells(tableRow, count) {
// ensure that every cell-delimiting pipe has a space // ensure that every cell-delimiting pipe has a space
// before it to distinguish it from an escaped pipe // before it to distinguish it from an escaped pipe
const row = tableRow.replace(/\|/g, (match, offset, str) => { const row = tableRow.replace(/\|/g, (match, offset, str) => {
@ -230,7 +221,7 @@ function splitCells$1(tableRow, count) {
// Remove trailing 'c's. Equivalent to str.replace(/c*$/, ''). // Remove trailing 'c's. Equivalent to str.replace(/c*$/, '').
// /c*$/ is vulnerable to REDOS. // /c*$/ is vulnerable to REDOS.
// invert: Remove suffix of non-c chars instead. Default falsey. // invert: Remove suffix of non-c chars instead. Default falsey.
function rtrim$1(str, c, invert) { function rtrim(str, c, invert) {
const l = str.length; const l = str.length;
if (l === 0) { if (l === 0) {
return ''; return '';
@ -254,7 +245,7 @@ function rtrim$1(str, c, invert) {
return str.substr(0, l - suffLen); return str.substr(0, l - suffLen);
} }
function findClosingBracket$1(str, b) { function findClosingBracket(str, b) {
if (str.indexOf(b[1]) === -1) { if (str.indexOf(b[1]) === -1) {
return -1; return -1;
} }
@ -276,14 +267,14 @@ function findClosingBracket$1(str, b) {
return -1; return -1;
} }
function checkSanitizeDeprecation$1(opt) { function checkSanitizeDeprecation(opt) {
if (opt && opt.sanitize && !opt.silent) { if (opt && opt.sanitize && !opt.silent) {
console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options'); console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options');
} }
} }
// copied from https://stackoverflow.com/a/5450113/806777 // copied from https://stackoverflow.com/a/5450113/806777
function repeatString$1(pattern, count) { function repeatString(pattern, count) {
if (count < 1) { if (count < 1) {
return ''; return '';
} }
@ -298,32 +289,9 @@ function repeatString$1(pattern, count) {
return result + pattern; return result + pattern;
} }
var helpers = {
escape: escape$3,
unescape: unescape$1,
edit: edit$1,
cleanUrl: cleanUrl$1,
resolveUrl,
noopTest: noopTest$1,
merge: merge$2,
splitCells: splitCells$1,
rtrim: rtrim$1,
findClosingBracket: findClosingBracket$1,
checkSanitizeDeprecation: checkSanitizeDeprecation$1,
repeatString: repeatString$1
};
const { defaults: defaults$4 } = defaults$5.exports;
const {
rtrim,
splitCells,
escape: escape$2,
findClosingBracket
} = helpers;
function outputLink(cap, link, raw, lexer) { function outputLink(cap, link, raw, lexer) {
const href = link.href; const href = link.href;
const title = link.title ? escape$2(link.title) : null; const title = link.title ? escape(link.title) : null;
const text = cap[1].replace(/\\([\[\]])/g, '$1'); const text = cap[1].replace(/\\([\[\]])/g, '$1');
if (cap[0].charAt(0) !== '!') { if (cap[0].charAt(0) !== '!') {
@ -344,7 +312,7 @@ function outputLink(cap, link, raw, lexer) {
raw, raw,
href, href,
title, title,
text: escape$2(text) text: escape(text)
}; };
} }
} }
@ -380,9 +348,9 @@ function indentCodeCompensation(raw, text) {
/** /**
* Tokenizer * Tokenizer
*/ */
var Tokenizer_1$1 = class Tokenizer { class Tokenizer {
constructor(options) { constructor(options) {
this.options = options || defaults$4; this.options = options || defaults;
} }
space(src) { space(src) {
@ -641,7 +609,7 @@ var Tokenizer_1$1 = class Tokenizer {
}; };
if (this.options.sanitize) { if (this.options.sanitize) {
token.type = 'paragraph'; token.type = 'paragraph';
token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape$2(cap[0]); token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
token.tokens = []; token.tokens = [];
this.lexer.inline(token.text, token.tokens); this.lexer.inline(token.text, token.tokens);
} }
@ -771,7 +739,7 @@ var Tokenizer_1$1 = class Tokenizer {
return { return {
type: 'escape', type: 'escape',
raw: cap[0], raw: cap[0],
text: escape$2(cap[1]) text: escape(cap[1])
}; };
} }
} }
@ -800,7 +768,7 @@ var Tokenizer_1$1 = class Tokenizer {
text: this.options.sanitize text: this.options.sanitize
? (this.options.sanitizer ? (this.options.sanitizer
? this.options.sanitizer(cap[0]) ? this.options.sanitizer(cap[0])
: escape$2(cap[0])) : escape(cap[0]))
: cap[0] : cap[0]
}; };
} }
@ -955,7 +923,7 @@ var Tokenizer_1$1 = class Tokenizer {
if (hasNonSpaceChars && hasSpaceCharsOnBothEnds) { if (hasNonSpaceChars && hasSpaceCharsOnBothEnds) {
text = text.substring(1, text.length - 1); text = text.substring(1, text.length - 1);
} }
text = escape$2(text, true); text = escape(text, true);
return { return {
type: 'codespan', type: 'codespan',
raw: cap[0], raw: cap[0],
@ -991,10 +959,10 @@ var Tokenizer_1$1 = class Tokenizer {
if (cap) { if (cap) {
let text, href; let text, href;
if (cap[2] === '@') { if (cap[2] === '@') {
text = escape$2(this.options.mangle ? mangle(cap[1]) : cap[1]); text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
href = 'mailto:' + text; href = 'mailto:' + text;
} else { } else {
text = escape$2(cap[1]); text = escape(cap[1]);
href = text; href = text;
} }
@ -1019,7 +987,7 @@ var Tokenizer_1$1 = class Tokenizer {
if (cap = this.rules.inline.url.exec(src)) { if (cap = this.rules.inline.url.exec(src)) {
let text, href; let text, href;
if (cap[2] === '@') { if (cap[2] === '@') {
text = escape$2(this.options.mangle ? mangle(cap[0]) : cap[0]); text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
href = 'mailto:' + text; href = 'mailto:' + text;
} else { } else {
// do extended autolink path validation // do extended autolink path validation
@ -1028,7 +996,7 @@ var Tokenizer_1$1 = class Tokenizer {
prevCapZero = cap[0]; prevCapZero = cap[0];
cap[0] = this.rules.inline._backpedal.exec(cap[0])[0]; cap[0] = this.rules.inline._backpedal.exec(cap[0])[0];
} while (prevCapZero !== cap[0]); } while (prevCapZero !== cap[0]);
text = escape$2(cap[0]); text = escape(cap[0]);
if (cap[1] === 'www.') { if (cap[1] === 'www.') {
href = 'http://' + text; href = 'http://' + text;
} else { } else {
@ -1056,9 +1024,9 @@ var Tokenizer_1$1 = class Tokenizer {
if (cap) { if (cap) {
let text; let text;
if (this.lexer.state.inRawBlock) { if (this.lexer.state.inRawBlock) {
text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape$2(cap[0])) : cap[0]; text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
} else { } else {
text = escape$2(this.options.smartypants ? smartypants(cap[0]) : cap[0]); text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
} }
return { return {
type: 'text', type: 'text',
@ -1067,18 +1035,12 @@ var Tokenizer_1$1 = class Tokenizer {
}; };
} }
} }
}; }
const {
noopTest,
edit,
merge: merge$1
} = helpers;
/** /**
* Block-Level Grammar * Block-Level Grammar
*/ */
const block$1 = { const block = {
newline: /^(?: *(?:\n|$))+/, newline: /^(?: *(?:\n|$))+/,
code: /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/, code: /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/,
fences: /^ {0,3}(`{3,}(?=[^`\n]*\n)|~{3,})([^\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?=\n|$)|$)/, fences: /^ {0,3}(`{3,}(?=[^`\n]*\n)|~{3,})([^\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?=\n|$)|$)/,
@ -1105,89 +1067,89 @@ const block$1 = {
text: /^[^\n]+/ text: /^[^\n]+/
}; };
block$1._label = /(?!\s*\])(?:\\[\[\]]|[^\[\]])+/; block._label = /(?!\s*\])(?:\\[\[\]]|[^\[\]])+/;
block$1._title = /(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/; block._title = /(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/;
block$1.def = edit(block$1.def) block.def = edit(block.def)
.replace('label', block$1._label) .replace('label', block._label)
.replace('title', block$1._title) .replace('title', block._title)
.getRegex(); .getRegex();
block$1.bullet = /(?:[*+-]|\d{1,9}[.)])/; block.bullet = /(?:[*+-]|\d{1,9}[.)])/;
block$1.listItemStart = edit(/^( *)(bull) */) block.listItemStart = edit(/^( *)(bull) */)
.replace('bull', block$1.bullet) .replace('bull', block.bullet)
.getRegex(); .getRegex();
block$1.list = edit(block$1.list) block.list = edit(block.list)
.replace(/bull/g, block$1.bullet) .replace(/bull/g, block.bullet)
.replace('hr', '\\n+(?=\\1?(?:(?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$))') .replace('hr', '\\n+(?=\\1?(?:(?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$))')
.replace('def', '\\n+(?=' + block$1.def.source + ')') .replace('def', '\\n+(?=' + block.def.source + ')')
.getRegex(); .getRegex();
block$1._tag = 'address|article|aside|base|basefont|blockquote|body|caption' block._tag = 'address|article|aside|base|basefont|blockquote|body|caption'
+ '|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption' + '|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption'
+ '|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe' + '|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe'
+ '|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option' + '|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option'
+ '|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr' + '|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr'
+ '|track|ul'; + '|track|ul';
block$1._comment = /<!--(?!-?>)[\s\S]*?(?:-->|$)/; block._comment = /<!--(?!-?>)[\s\S]*?(?:-->|$)/;
block$1.html = edit(block$1.html, 'i') block.html = edit(block.html, 'i')
.replace('comment', block$1._comment) .replace('comment', block._comment)
.replace('tag', block$1._tag) .replace('tag', block._tag)
.replace('attribute', / +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/) .replace('attribute', / +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/)
.getRegex(); .getRegex();
block$1.paragraph = edit(block$1._paragraph) block.paragraph = edit(block._paragraph)
.replace('hr', block$1.hr) .replace('hr', block.hr)
.replace('heading', ' {0,3}#{1,6} ') .replace('heading', ' {0,3}#{1,6} ')
.replace('|lheading', '') // setex headings don't interrupt commonmark paragraphs .replace('|lheading', '') // setex headings don't interrupt commonmark paragraphs
.replace('blockquote', ' {0,3}>') .replace('blockquote', ' {0,3}>')
.replace('fences', ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n') .replace('fences', ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n')
.replace('list', ' {0,3}(?:[*+-]|1[.)]) ') // only lists starting from 1 can interrupt .replace('list', ' {0,3}(?:[*+-]|1[.)]) ') // only lists starting from 1 can interrupt
.replace('html', '</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)') .replace('html', '</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)')
.replace('tag', block$1._tag) // pars can be interrupted by type (6) html blocks .replace('tag', block._tag) // pars can be interrupted by type (6) html blocks
.getRegex(); .getRegex();
block$1.blockquote = edit(block$1.blockquote) block.blockquote = edit(block.blockquote)
.replace('paragraph', block$1.paragraph) .replace('paragraph', block.paragraph)
.getRegex(); .getRegex();
/** /**
* Normal Block Grammar * Normal Block Grammar
*/ */
block$1.normal = merge$1({}, block$1); block.normal = merge({}, block);
/** /**
* GFM Block Grammar * GFM Block Grammar
*/ */
block$1.gfm = merge$1({}, block$1.normal, { block.gfm = merge({}, block.normal, {
table: '^ *([^\\n ].*\\|.*)\\n' // Header table: '^ *([^\\n ].*\\|.*)\\n' // Header
+ ' {0,3}(?:\\| *)?(:?-+:? *(?:\\| *:?-+:? *)*)(?:\\| *)?' // Align + ' {0,3}(?:\\| *)?(:?-+:? *(?:\\| *:?-+:? *)*)(?:\\| *)?' // Align
+ '(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)' // Cells + '(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)' // Cells
}); });
block$1.gfm.table = edit(block$1.gfm.table) block.gfm.table = edit(block.gfm.table)
.replace('hr', block$1.hr) .replace('hr', block.hr)
.replace('heading', ' {0,3}#{1,6} ') .replace('heading', ' {0,3}#{1,6} ')
.replace('blockquote', ' {0,3}>') .replace('blockquote', ' {0,3}>')
.replace('code', ' {4}[^\\n]') .replace('code', ' {4}[^\\n]')
.replace('fences', ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n') .replace('fences', ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n')
.replace('list', ' {0,3}(?:[*+-]|1[.)]) ') // only lists starting from 1 can interrupt .replace('list', ' {0,3}(?:[*+-]|1[.)]) ') // only lists starting from 1 can interrupt
.replace('html', '</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)') .replace('html', '</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)')
.replace('tag', block$1._tag) // tables can be interrupted by type (6) html blocks .replace('tag', block._tag) // tables can be interrupted by type (6) html blocks
.getRegex(); .getRegex();
/** /**
* Pedantic grammar (original John Gruber's loose markdown specification) * Pedantic grammar (original John Gruber's loose markdown specification)
*/ */
block$1.pedantic = merge$1({}, block$1.normal, { block.pedantic = merge({}, block.normal, {
html: edit( html: edit(
'^ *(?:comment *(?:\\n|\\s*$)' '^ *(?:comment *(?:\\n|\\s*$)'
+ '|<(tag)[\\s\\S]+?</\\1> *(?:\\n{2,}|\\s*$)' // closed tag + '|<(tag)[\\s\\S]+?</\\1> *(?:\\n{2,}|\\s*$)' // closed tag
+ '|<tag(?:"[^"]*"|\'[^\']*\'|\\s[^\'"/>\\s]*)*?/?> *(?:\\n{2,}|\\s*$))') + '|<tag(?:"[^"]*"|\'[^\']*\'|\\s[^\'"/>\\s]*)*?/?> *(?:\\n{2,}|\\s*$))')
.replace('comment', block$1._comment) .replace('comment', block._comment)
.replace(/tag/g, '(?!(?:' .replace(/tag/g, '(?!(?:'
+ 'a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub' + 'a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub'
+ '|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)' + '|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)'
@ -1196,10 +1158,10 @@ block$1.pedantic = merge$1({}, block$1.normal, {
def: /^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/, def: /^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/,
heading: /^(#{1,6})(.*)(?:\n+|$)/, heading: /^(#{1,6})(.*)(?:\n+|$)/,
fences: noopTest, // fences not supported fences: noopTest, // fences not supported
paragraph: edit(block$1.normal._paragraph) paragraph: edit(block.normal._paragraph)
.replace('hr', block$1.hr) .replace('hr', block.hr)
.replace('heading', ' *#{1,6} *[^\n]') .replace('heading', ' *#{1,6} *[^\n]')
.replace('lheading', block$1.lheading) .replace('lheading', block.lheading)
.replace('blockquote', ' {0,3}>') .replace('blockquote', ' {0,3}>')
.replace('|fences', '') .replace('|fences', '')
.replace('|list', '') .replace('|list', '')
@ -1210,7 +1172,7 @@ block$1.pedantic = merge$1({}, block$1.normal, {
/** /**
* Inline-Level Grammar * Inline-Level Grammar
*/ */
const inline$1 = { const inline = {
escape: /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/, escape: /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/,
autolink: /^<(scheme:[^\s\x00-\x1f<>]*|email)>/, autolink: /^<(scheme:[^\s\x00-\x1f<>]*|email)>/,
url: noopTest, url: noopTest,
@ -1240,73 +1202,73 @@ const inline$1 = {
// list of punctuation marks from CommonMark spec // list of punctuation marks from CommonMark spec
// without * and _ to handle the different emphasis markers * and _ // without * and _ to handle the different emphasis markers * and _
inline$1._punctuation = '!"#$%&\'()+\\-.,/:;<=>?@\\[\\]`^{|}~'; inline._punctuation = '!"#$%&\'()+\\-.,/:;<=>?@\\[\\]`^{|}~';
inline$1.punctuation = edit(inline$1.punctuation).replace(/punctuation/g, inline$1._punctuation).getRegex(); inline.punctuation = edit(inline.punctuation).replace(/punctuation/g, inline._punctuation).getRegex();
// sequences em should skip over [title](link), `code`, <html> // sequences em should skip over [title](link), `code`, <html>
inline$1.blockSkip = /\[[^\]]*?\]\([^\)]*?\)|`[^`]*?`|<[^>]*?>/g; inline.blockSkip = /\[[^\]]*?\]\([^\)]*?\)|`[^`]*?`|<[^>]*?>/g;
inline$1.escapedEmSt = /\\\*|\\_/g; inline.escapedEmSt = /\\\*|\\_/g;
inline$1._comment = edit(block$1._comment).replace('(?:-->|$)', '-->').getRegex(); inline._comment = edit(block._comment).replace('(?:-->|$)', '-->').getRegex();
inline$1.emStrong.lDelim = edit(inline$1.emStrong.lDelim) inline.emStrong.lDelim = edit(inline.emStrong.lDelim)
.replace(/punct/g, inline$1._punctuation) .replace(/punct/g, inline._punctuation)
.getRegex(); .getRegex();
inline$1.emStrong.rDelimAst = edit(inline$1.emStrong.rDelimAst, 'g') inline.emStrong.rDelimAst = edit(inline.emStrong.rDelimAst, 'g')
.replace(/punct/g, inline$1._punctuation) .replace(/punct/g, inline._punctuation)
.getRegex(); .getRegex();
inline$1.emStrong.rDelimUnd = edit(inline$1.emStrong.rDelimUnd, 'g') inline.emStrong.rDelimUnd = edit(inline.emStrong.rDelimUnd, 'g')
.replace(/punct/g, inline$1._punctuation) .replace(/punct/g, inline._punctuation)
.getRegex(); .getRegex();
inline$1._escapes = /\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/g; inline._escapes = /\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/g;
inline$1._scheme = /[a-zA-Z][a-zA-Z0-9+.-]{1,31}/; inline._scheme = /[a-zA-Z][a-zA-Z0-9+.-]{1,31}/;
inline$1._email = /[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/; inline._email = /[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/;
inline$1.autolink = edit(inline$1.autolink) inline.autolink = edit(inline.autolink)
.replace('scheme', inline$1._scheme) .replace('scheme', inline._scheme)
.replace('email', inline$1._email) .replace('email', inline._email)
.getRegex(); .getRegex();
inline$1._attribute = /\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/; inline._attribute = /\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/;
inline$1.tag = edit(inline$1.tag) inline.tag = edit(inline.tag)
.replace('comment', inline$1._comment) .replace('comment', inline._comment)
.replace('attribute', inline$1._attribute) .replace('attribute', inline._attribute)
.getRegex(); .getRegex();
inline$1._label = /(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/; inline._label = /(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/;
inline$1._href = /<(?:\\.|[^\n<>\\])+>|[^\s\x00-\x1f]*/; inline._href = /<(?:\\.|[^\n<>\\])+>|[^\s\x00-\x1f]*/;
inline$1._title = /"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/; inline._title = /"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/;
inline$1.link = edit(inline$1.link) inline.link = edit(inline.link)
.replace('label', inline$1._label) .replace('label', inline._label)
.replace('href', inline$1._href) .replace('href', inline._href)
.replace('title', inline$1._title) .replace('title', inline._title)
.getRegex(); .getRegex();
inline$1.reflink = edit(inline$1.reflink) inline.reflink = edit(inline.reflink)
.replace('label', inline$1._label) .replace('label', inline._label)
.getRegex(); .getRegex();
inline$1.reflinkSearch = edit(inline$1.reflinkSearch, 'g') inline.reflinkSearch = edit(inline.reflinkSearch, 'g')
.replace('reflink', inline$1.reflink) .replace('reflink', inline.reflink)
.replace('nolink', inline$1.nolink) .replace('nolink', inline.nolink)
.getRegex(); .getRegex();
/** /**
* Normal Inline Grammar * Normal Inline Grammar
*/ */
inline$1.normal = merge$1({}, inline$1); inline.normal = merge({}, inline);
/** /**
* Pedantic Inline Grammar * Pedantic Inline Grammar
*/ */
inline$1.pedantic = merge$1({}, inline$1.normal, { inline.pedantic = merge({}, inline.normal, {
strong: { strong: {
start: /^__|\*\*/, start: /^__|\*\*/,
middle: /^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/, middle: /^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,
@ -1320,10 +1282,10 @@ inline$1.pedantic = merge$1({}, inline$1.normal, {
endUnd: /_(?!_)/g endUnd: /_(?!_)/g
}, },
link: edit(/^!?\[(label)\]\((.*?)\)/) link: edit(/^!?\[(label)\]\((.*?)\)/)
.replace('label', inline$1._label) .replace('label', inline._label)
.getRegex(), .getRegex(),
reflink: edit(/^!?\[(label)\]\s*\[([^\]]*)\]/) reflink: edit(/^!?\[(label)\]\s*\[([^\]]*)\]/)
.replace('label', inline$1._label) .replace('label', inline._label)
.getRegex() .getRegex()
}); });
@ -1331,8 +1293,8 @@ inline$1.pedantic = merge$1({}, inline$1.normal, {
* GFM Inline Grammar * GFM Inline Grammar
*/ */
inline$1.gfm = merge$1({}, inline$1.normal, { inline.gfm = merge({}, inline.normal, {
escape: edit(inline$1.escape).replace('])', '~|])').getRegex(), escape: edit(inline.escape).replace('])', '~|])').getRegex(),
_extended_email: /[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/, _extended_email: /[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/,
url: /^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/, url: /^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/,
_backpedal: /(?:[^?!.,:;*_~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_~)]+(?!$))+/, _backpedal: /(?:[^?!.,:;*_~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_~)]+(?!$))+/,
@ -1340,31 +1302,21 @@ inline$1.gfm = merge$1({}, inline$1.normal, {
text: /^([`~]+|[^`~])(?:(?= {2,}\n)|(?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)|[\s\S]*?(?:(?=[\\<!\[`*~_]|\b_|https?:\/\/|ftp:\/\/|www\.|$)|[^ ](?= {2,}\n)|[^a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-](?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)))/ text: /^([`~]+|[^`~])(?:(?= {2,}\n)|(?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)|[\s\S]*?(?:(?=[\\<!\[`*~_]|\b_|https?:\/\/|ftp:\/\/|www\.|$)|[^ ](?= {2,}\n)|[^a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-](?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)))/
}); });
inline$1.gfm.url = edit(inline$1.gfm.url, 'i') inline.gfm.url = edit(inline.gfm.url, 'i')
.replace('email', inline$1.gfm._extended_email) .replace('email', inline.gfm._extended_email)
.getRegex(); .getRegex();
/** /**
* GFM + Line Breaks Inline Grammar * GFM + Line Breaks Inline Grammar
*/ */
inline$1.breaks = merge$1({}, inline$1.gfm, { inline.breaks = merge({}, inline.gfm, {
br: edit(inline$1.br).replace('{2,}', '*').getRegex(), br: edit(inline.br).replace('{2,}', '*').getRegex(),
text: edit(inline$1.gfm.text) text: edit(inline.gfm.text)
.replace('\\b_', '\\b_| {2,}\\n') .replace('\\b_', '\\b_| {2,}\\n')
.replace(/\{2,\}/g, '*') .replace(/\{2,\}/g, '*')
.getRegex() .getRegex()
}); });
var rules = {
block: block$1,
inline: inline$1
};
const Tokenizer$2 = Tokenizer_1$1;
const { defaults: defaults$3 } = defaults$5.exports;
const { block, inline } = rules;
const { repeatString } = helpers;
/** /**
* smartypants text replacement * smartypants text replacement
*/ */
@ -1409,12 +1361,12 @@ function mangle(text) {
/** /**
* Block Lexer * Block Lexer
*/ */
var Lexer_1$1 = class Lexer { class Lexer {
constructor(options) { constructor(options) {
this.tokens = []; this.tokens = [];
this.tokens.links = Object.create(null); this.tokens.links = Object.create(null);
this.options = options || defaults$3; this.options = options || defaults;
this.options.tokenizer = this.options.tokenizer || new Tokenizer$2(); this.options.tokenizer = this.options.tokenizer || new Tokenizer();
this.tokenizer = this.options.tokenizer; this.tokenizer = this.options.tokenizer;
this.tokenizer.options = this.options; this.tokenizer.options = this.options;
this.tokenizer.lexer = this; this.tokenizer.lexer = this;
@ -1850,20 +1802,14 @@ var Lexer_1$1 = class Lexer {
return tokens; return tokens;
} }
}; }
const { defaults: defaults$2 } = defaults$5.exports;
const {
cleanUrl,
escape: escape$1
} = helpers;
/** /**
* Renderer * Renderer
*/ */
var Renderer_1$1 = class Renderer { class Renderer {
constructor(options) { constructor(options) {
this.options = options || defaults$2; this.options = options || defaults;
} }
code(code, infostring, escaped) { code(code, infostring, escaped) {
@ -1880,15 +1826,15 @@ var Renderer_1$1 = class Renderer {
if (!lang) { if (!lang) {
return '<pre><code>' return '<pre><code>'
+ (escaped ? code : escape$1(code, true)) + (escaped ? code : escape(code, true))
+ '</code></pre>\n'; + '</code></pre>\n';
} }
return '<pre><code class="' return '<pre><code class="'
+ this.options.langPrefix + this.options.langPrefix
+ escape$1(lang, true) + escape(lang, true)
+ '">' + '">'
+ (escaped ? code : escape$1(code, true)) + (escaped ? code : escape(code, true))
+ '</code></pre>\n'; + '</code></pre>\n';
} }
@ -1992,7 +1938,7 @@ var Renderer_1$1 = class Renderer {
if (href === null) { if (href === null) {
return text; return text;
} }
let out = '<a href="' + escape$1(href) + '"'; let out = '<a href="' + escape(href) + '"';
if (title) { if (title) {
out += ' title="' + title + '"'; out += ' title="' + title + '"';
} }
@ -2017,14 +1963,13 @@ var Renderer_1$1 = class Renderer {
text(text) { text(text) {
return text; return text;
} }
}; }
/** /**
* TextRenderer * TextRenderer
* returns only the textual part of the token * returns only the textual part of the token
*/ */
class TextRenderer {
var TextRenderer_1$1 = class TextRenderer {
// no need for block level renderers // no need for block level renderers
strong(text) { strong(text) {
return text; return text;
@ -2061,13 +2006,12 @@ var TextRenderer_1$1 = class TextRenderer {
br() { br() {
return ''; return '';
} }
}; }
/** /**
* Slugger generates header id * Slugger generates header id
*/ */
class Slugger {
var Slugger_1$1 = class Slugger {
constructor() { constructor() {
this.seen = {}; this.seen = {};
} }
@ -2112,27 +2056,19 @@ var Slugger_1$1 = class Slugger {
const slug = this.serialize(value); const slug = this.serialize(value);
return this.getNextSafeSlug(slug, options.dryrun); return this.getNextSafeSlug(slug, options.dryrun);
} }
}; }
const Renderer$2 = Renderer_1$1;
const TextRenderer$2 = TextRenderer_1$1;
const Slugger$2 = Slugger_1$1;
const { defaults: defaults$1 } = defaults$5.exports;
const {
unescape
} = helpers;
/** /**
* Parsing & Compiling * Parsing & Compiling
*/ */
var Parser_1$1 = class Parser { class Parser {
constructor(options) { constructor(options) {
this.options = options || defaults$1; this.options = options || defaults;
this.options.renderer = this.options.renderer || new Renderer$2(); this.options.renderer = this.options.renderer || new Renderer();
this.renderer = this.options.renderer; this.renderer = this.options.renderer;
this.renderer.options = this.options; this.renderer.options = this.options;
this.textRenderer = new TextRenderer$2(); this.textRenderer = new TextRenderer();
this.slugger = new Slugger$2(); this.slugger = new Slugger();
} }
/** /**
@ -2399,29 +2335,12 @@ var Parser_1$1 = class Parser {
} }
return out; return out;
} }
}; }
const Lexer$1 = Lexer_1$1;
const Parser$1 = Parser_1$1;
const Tokenizer$1 = Tokenizer_1$1;
const Renderer$1 = Renderer_1$1;
const TextRenderer$1 = TextRenderer_1$1;
const Slugger$1 = Slugger_1$1;
const {
merge,
checkSanitizeDeprecation,
escape
} = helpers;
const {
getDefaults,
changeDefaults,
defaults
} = defaults$5.exports;
/** /**
* Marked * Marked
*/ */
function marked$1(src, opt, callback) { function marked(src, opt, callback) {
// throw error in case of non string input // throw error in case of non string input
if (typeof src === 'undefined' || src === null) { if (typeof src === 'undefined' || src === null) {
throw new Error('marked(): input parameter is undefined or null'); throw new Error('marked(): input parameter is undefined or null');
@ -2436,7 +2355,7 @@ function marked$1(src, opt, callback) {
opt = null; opt = null;
} }
opt = merge({}, marked$1.defaults, opt || {}); opt = merge({}, marked.defaults, opt || {});
checkSanitizeDeprecation(opt); checkSanitizeDeprecation(opt);
if (callback) { if (callback) {
@ -2444,7 +2363,7 @@ function marked$1(src, opt, callback) {
let tokens; let tokens;
try { try {
tokens = Lexer$1.lex(src, opt); tokens = Lexer.lex(src, opt);
} catch (e) { } catch (e) {
return callback(e); return callback(e);
} }
@ -2455,9 +2374,9 @@ function marked$1(src, opt, callback) {
if (!err) { if (!err) {
try { try {
if (opt.walkTokens) { if (opt.walkTokens) {
marked$1.walkTokens(tokens, opt.walkTokens); marked.walkTokens(tokens, opt.walkTokens);
} }
out = Parser$1.parse(tokens, opt); out = Parser.parse(tokens, opt);
} catch (e) { } catch (e) {
err = e; err = e;
} }
@ -2479,7 +2398,7 @@ function marked$1(src, opt, callback) {
if (!tokens.length) return done(); if (!tokens.length) return done();
let pending = 0; let pending = 0;
marked$1.walkTokens(tokens, function(token) { marked.walkTokens(tokens, function(token) {
if (token.type === 'code') { if (token.type === 'code') {
pending++; pending++;
setTimeout(() => { setTimeout(() => {
@ -2509,11 +2428,11 @@ function marked$1(src, opt, callback) {
} }
try { try {
const tokens = Lexer$1.lex(src, opt); const tokens = Lexer.lex(src, opt);
if (opt.walkTokens) { if (opt.walkTokens) {
marked$1.walkTokens(tokens, opt.walkTokens); marked.walkTokens(tokens, opt.walkTokens);
} }
return Parser$1.parse(tokens, opt); return Parser.parse(tokens, opt);
} catch (e) { } catch (e) {
e.message += '\nPlease report this to https://github.com/markedjs/marked.'; e.message += '\nPlease report this to https://github.com/markedjs/marked.';
if (opt.silent) { if (opt.silent) {
@ -2529,24 +2448,24 @@ function marked$1(src, opt, callback) {
* Options * Options
*/ */
marked$1.options = marked.options =
marked$1.setOptions = function(opt) { marked.setOptions = function(opt) {
merge(marked$1.defaults, opt); merge(marked.defaults, opt);
changeDefaults(marked$1.defaults); changeDefaults(marked.defaults);
return marked$1; return marked;
}; };
marked$1.getDefaults = getDefaults; marked.getDefaults = getDefaults;
marked$1.defaults = defaults; marked.defaults = defaults;
/** /**
* Use Extension * Use Extension
*/ */
marked$1.use = function(...args) { marked.use = function(...args) {
const opts = merge({}, ...args); const opts = merge({}, ...args);
const extensions = marked$1.defaults.extensions || { renderers: {}, childTokens: {} }; const extensions = marked.defaults.extensions || { renderers: {}, childTokens: {} };
let hasExtensions; let hasExtensions;
args.forEach((pack) => { args.forEach((pack) => {
@ -2605,7 +2524,7 @@ marked$1.use = function(...args) {
// ==-- Parse "overwrite" extensions --== // // ==-- Parse "overwrite" extensions --== //
if (pack.renderer) { if (pack.renderer) {
const renderer = marked$1.defaults.renderer || new Renderer$1(); const renderer = marked.defaults.renderer || new Renderer();
for (const prop in pack.renderer) { for (const prop in pack.renderer) {
const prevRenderer = renderer[prop]; const prevRenderer = renderer[prop];
// Replace renderer with func to run extension, but fall back if false // Replace renderer with func to run extension, but fall back if false
@ -2620,7 +2539,7 @@ marked$1.use = function(...args) {
opts.renderer = renderer; opts.renderer = renderer;
} }
if (pack.tokenizer) { if (pack.tokenizer) {
const tokenizer = marked$1.defaults.tokenizer || new Tokenizer$1(); const tokenizer = marked.defaults.tokenizer || new Tokenizer();
for (const prop in pack.tokenizer) { for (const prop in pack.tokenizer) {
const prevTokenizer = tokenizer[prop]; const prevTokenizer = tokenizer[prop];
// Replace tokenizer with func to run extension, but fall back if false // Replace tokenizer with func to run extension, but fall back if false
@ -2637,7 +2556,7 @@ marked$1.use = function(...args) {
// ==-- Parse WalkTokens extensions --== // // ==-- Parse WalkTokens extensions --== //
if (pack.walkTokens) { if (pack.walkTokens) {
const walkTokens = marked$1.defaults.walkTokens; const walkTokens = marked.defaults.walkTokens;
opts.walkTokens = function(token) { opts.walkTokens = function(token) {
pack.walkTokens.call(this, token); pack.walkTokens.call(this, token);
if (walkTokens) { if (walkTokens) {
@ -2650,7 +2569,7 @@ marked$1.use = function(...args) {
opts.extensions = extensions; opts.extensions = extensions;
} }
marked$1.setOptions(opts); marked.setOptions(opts);
}); });
}; };
@ -2658,32 +2577,32 @@ marked$1.use = function(...args) {
* Run callback for every token * Run callback for every token
*/ */
marked$1.walkTokens = function(tokens, callback) { marked.walkTokens = function(tokens, callback) {
for (const token of tokens) { for (const token of tokens) {
callback.call(marked$1, token); callback.call(marked, token);
switch (token.type) { switch (token.type) {
case 'table': { case 'table': {
for (const cell of token.header) { for (const cell of token.header) {
marked$1.walkTokens(cell.tokens, callback); marked.walkTokens(cell.tokens, callback);
} }
for (const row of token.rows) { for (const row of token.rows) {
for (const cell of row) { for (const cell of row) {
marked$1.walkTokens(cell.tokens, callback); marked.walkTokens(cell.tokens, callback);
} }
} }
break; break;
} }
case 'list': { case 'list': {
marked$1.walkTokens(token.items, callback); marked.walkTokens(token.items, callback);
break; break;
} }
default: { default: {
if (marked$1.defaults.extensions && marked$1.defaults.extensions.childTokens && marked$1.defaults.extensions.childTokens[token.type]) { // Walk any extensions if (marked.defaults.extensions && marked.defaults.extensions.childTokens && marked.defaults.extensions.childTokens[token.type]) { // Walk any extensions
marked$1.defaults.extensions.childTokens[token.type].forEach(function(childTokens) { marked.defaults.extensions.childTokens[token.type].forEach(function(childTokens) {
marked$1.walkTokens(token[childTokens], callback); marked.walkTokens(token[childTokens], callback);
}); });
} else if (token.tokens) { } else if (token.tokens) {
marked$1.walkTokens(token.tokens, callback); marked.walkTokens(token.tokens, callback);
} }
} }
} }
@ -2693,7 +2612,7 @@ marked$1.walkTokens = function(tokens, callback) {
/** /**
* Parse Inline * Parse Inline
*/ */
marked$1.parseInline = function(src, opt) { marked.parseInline = function(src, opt) {
// throw error in case of non string input // throw error in case of non string input
if (typeof src === 'undefined' || src === null) { if (typeof src === 'undefined' || src === null) {
throw new Error('marked.parseInline(): input parameter is undefined or null'); throw new Error('marked.parseInline(): input parameter is undefined or null');
@ -2703,15 +2622,15 @@ marked$1.parseInline = function(src, opt) {
+ Object.prototype.toString.call(src) + ', string expected'); + Object.prototype.toString.call(src) + ', string expected');
} }
opt = merge({}, marked$1.defaults, opt || {}); opt = merge({}, marked.defaults, opt || {});
checkSanitizeDeprecation(opt); checkSanitizeDeprecation(opt);
try { try {
const tokens = Lexer$1.lexInline(src, opt); const tokens = Lexer.lexInline(src, opt);
if (opt.walkTokens) { if (opt.walkTokens) {
marked$1.walkTokens(tokens, opt.walkTokens); marked.walkTokens(tokens, opt.walkTokens);
} }
return Parser$1.parseInline(tokens, opt); return Parser.parseInline(tokens, opt);
} catch (e) { } catch (e) {
e.message += '\nPlease report this to https://github.com/markedjs/marked.'; e.message += '\nPlease report this to https://github.com/markedjs/marked.';
if (opt.silent) { if (opt.silent) {
@ -2726,37 +2645,23 @@ marked$1.parseInline = function(src, opt) {
/** /**
* Expose * Expose
*/ */
marked$1.Parser = Parser$1; marked.Parser = Parser;
marked$1.parser = Parser$1.parse; marked.parser = Parser.parse;
marked$1.Renderer = Renderer$1; marked.Renderer = Renderer;
marked$1.TextRenderer = TextRenderer$1; marked.TextRenderer = TextRenderer;
marked$1.Lexer = Lexer$1; marked.Lexer = Lexer;
marked$1.lexer = Lexer$1.lex; marked.lexer = Lexer.lex;
marked$1.Tokenizer = Tokenizer$1; marked.Tokenizer = Tokenizer;
marked$1.Slugger = Slugger$1; marked.Slugger = Slugger;
marked$1.parse = marked$1; marked.parse = marked;
var marked_1 = marked$1; const options = marked.options;
const setOptions = marked.setOptions;
const use = marked.use;
const walkTokens = marked.walkTokens;
const parseInline = marked.parseInline;
const parse = marked;
const parser = Parser.parse;
const lexer = Lexer.lex;
const marked = marked_1; export { Lexer, Parser, Renderer, Slugger, TextRenderer, Tokenizer, defaults, getDefaults, lexer, marked, options, parse, parseInline, parser, setOptions, use, walkTokens };
const Lexer = Lexer_1$1;
const Parser = Parser_1$1;
const Tokenizer = Tokenizer_1$1;
const Renderer = Renderer_1$1;
const TextRenderer = TextRenderer_1$1;
const Slugger = Slugger_1$1;
esmEntry$1.exports = marked;
var parse = esmEntry$1.exports.parse = marked;
var Parser_1 = esmEntry$1.exports.Parser = Parser;
var parser = esmEntry$1.exports.parser = Parser.parse;
var Renderer_1 = esmEntry$1.exports.Renderer = Renderer;
var TextRenderer_1 = esmEntry$1.exports.TextRenderer = TextRenderer;
var Lexer_1 = esmEntry$1.exports.Lexer = Lexer;
var lexer = esmEntry$1.exports.lexer = Lexer.lex;
var Tokenizer_1 = esmEntry$1.exports.Tokenizer = Tokenizer;
var Slugger_1 = esmEntry$1.exports.Slugger = Slugger;
var esmEntry = esmEntry$1.exports;
export { Lexer_1 as Lexer, Parser_1 as Parser, Renderer_1 as Renderer, Slugger_1 as Slugger, TextRenderer_1 as TextRenderer, Tokenizer_1 as Tokenizer, esmEntry as default, lexer, parse, parser };

File diff suppressed because one or more lines are too long

View File

@ -6,7 +6,7 @@ marked \- a javascript markdown parser
.SH SYNOPSIS .SH SYNOPSIS
.B marked .B marked
[\-o \fI<output>\fP] [\-i \fI<input>\fP] [\-\-help] [\-o \fI<output>\fP] [\-i \fI<input>\fP] [\-s \fI<string>\fP] [\-\-help]
[\-\-tokens] [\-\-pedantic] [\-\-gfm] [\-\-tokens] [\-\-pedantic] [\-\-gfm]
[\-\-breaks] [\-\-sanitize] [\-\-breaks] [\-\-sanitize]
[\-\-smart\-lists] [\-\-lang\-prefix \fI<prefix>\fP] [\-\-smart\-lists] [\-\-lang\-prefix \fI<prefix>\fP]
@ -36,28 +36,8 @@ Specify file output. If none is specified, write to stdout.
Specify file input, otherwise use last argument as input file. Specify file input, otherwise use last argument as input file.
If no input file is specified, read from stdin. If no input file is specified, read from stdin.
.TP .TP
.BI \-\-test .BI \-s,\ \-\-string\ [\fIstring\fP]
Makes sure the test(s) pass. Specify string input instead of a file.
.RS
.PP
.B \-\-glob [\fIfile\fP]
Specify which test to use.
.PP
.B \-\-fix
Fixes tests.
.PP
.B \-\-bench
Benchmarks the test(s).
.PP
.B \-\-time
Times The test(s).
.PP
.B \-\-minified
Runs test file(s) as minified.
.PP
.B \-\-stop
Stop process if a test fails.
.RE
.TP .TP
.BI \-t,\ \-\-tokens .BI \-t,\ \-\-tokens
Output a token stream instead of html. Output a token stream instead of html.
@ -98,7 +78,8 @@ For configuring and running programmatically.
.B Example .B Example
require('marked')('*foo*', { gfm: true }); import { marked } from 'marked';
marked('*foo*', { gfm: true });
.SH BUGS .SH BUGS
Please report any bugs to https://github.com/markedjs/marked. Please report any bugs to https://github.com/markedjs/marked.

View File

@ -4,9 +4,9 @@ NAME
marked - a javascript markdown parser marked - a javascript markdown parser
SYNOPSIS SYNOPSIS
marked [-o <output>] [-i <input>] [--help] [--tokens] [--pedantic] marked [-o <output>] [-i <input>] [-s <string>] [--help] [--tokens]
[--gfm] [--breaks] [--sanitize] [--smart-lists] [--lang-prefix <pre- [--pedantic] [--gfm] [--breaks] [--sanitize] [--smart-lists]
fix>] [--no-etc...] [--silent] [filename] [--lang-prefix <prefix>] [--no-etc...] [--silent] [filename]
DESCRIPTION DESCRIPTION
@ -24,65 +24,55 @@ EXAMPLES
OPTIONS OPTIONS
-o, --output [output] -o, --output [output]
Specify file output. If none is specified, write to stdout. Specify file output. If none is specified, write to stdout.
-i, --input [input] -i, --input [input]
Specify file input, otherwise use last argument as input file. Specify file input, otherwise use last argument as input file.
If no input file is specified, read from stdin. If no input file is specified, read from stdin.
--test Makes sure the test(s) pass. -s, --string [string]
Specify string input instead of a file.
--glob [file] Specify which test to use.
--fix Fixes tests.
--bench Benchmarks the test(s).
--time Times The test(s).
--minified Runs test file(s) as minified.
--stop Stop process if a test fails.
-t, --tokens -t, --tokens
Output a token stream instead of html. Output a token stream instead of html.
--pedantic --pedantic
Conform to obscure parts of markdown.pl as much as possible. Conform to obscure parts of markdown.pl as much as possible.
Don't fix original markdown bugs. Don't fix original markdown bugs.
--gfm Enable github flavored markdown. --gfm Enable github flavored markdown.
--breaks --breaks
Enable GFM line breaks. Only works with the gfm option. Enable GFM line breaks. Only works with the gfm option.
--sanitize --sanitize
Sanitize output. Ignore any HTML input. Sanitize output. Ignore any HTML input.
--smart-lists --smart-lists
Use smarter list behavior than the original markdown. Use smarter list behavior than the original markdown.
--lang-prefix [prefix] --lang-prefix [prefix]
Set the prefix for code block classes. Set the prefix for code block classes.
--mangle --mangle
Mangle email addresses. Mangle email addresses.
--no-sanitize, -no-etc... --no-sanitize, -no-etc...
The inverse of any of the marked options above. The inverse of any of the marked options above.
--silent --silent
Silence error output. Silence error output.
-h, --help -h, --help
Display help information. Display help information.
CONFIGURATION CONFIGURATION
For configuring and running programmatically. For configuring and running programmatically.
Example Example
require('marked')('*foo*', { gfm: true }); import { marked } from 'marked';
marked('*foo*', { gfm: true });
BUGS BUGS
Please report any bugs to https://github.com/markedjs/marked. Please report any bugs to https://github.com/markedjs/marked.

2
marked.min.js vendored

File diff suppressed because one or more lines are too long

5009
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -3,11 +3,12 @@
"description": "A markdown parser built for speed", "description": "A markdown parser built for speed",
"author": "Christopher Jeffrey", "author": "Christopher Jeffrey",
"version": "3.0.8", "version": "3.0.8",
"main": "./src/marked.js", "type": "module",
"main": "./lib/marked.esm.js",
"module": "./lib/marked.esm.js", "module": "./lib/marked.esm.js",
"browser": "./lib/marked.js", "browser": "./lib/marked.cjs",
"bin": { "bin": {
"marked": "bin/marked" "marked": "bin/marked.js"
}, },
"man": "./man/marked.1", "man": "./man/marked.1",
"files": [ "files": [
@ -17,6 +18,13 @@
"man/", "man/",
"marked.min.js" "marked.min.js"
], ],
"exports": {
".": {
"import": "./lib/marked.esm.js",
"default": "./lib/marked.cjs"
},
"./package.json": "./package.json"
},
"repository": "git://github.com/markedjs/marked.git", "repository": "git://github.com/markedjs/marked.git",
"homepage": "https://marked.js.org", "homepage": "https://marked.js.org",
"bugs": { "bugs": {
@ -68,19 +76,19 @@
"test:all": "npm test && npm run test:lint", "test:all": "npm test && npm run test:lint",
"test:unit": "npm test -- test/unit/**/*-spec.js", "test:unit": "npm test -- test/unit/**/*-spec.js",
"test:specs": "npm test -- test/specs/**/*-spec.js", "test:specs": "npm test -- test/specs/**/*-spec.js",
"test:lint": "eslint bin/marked .", "test:lint": "eslint .",
"test:redos": "node test/vuln-regex.js", "test:redos": "node test/vuln-regex.js",
"test:update": "node test/update-specs.js", "test:update": "node test/update-specs.js",
"rules": "node test/rules.js", "rules": "node test/rules.js",
"bench": "npm run rollup && node test/bench.js", "bench": "npm run rollup && node test/bench.js",
"lint": "eslint --fix bin/marked .", "lint": "eslint --fix .",
"build:reset": "git checkout upstream/master lib/marked.js lib/marked.esm.js marked.min.js", "build:reset": "git checkout upstream/master lib/marked.cjs lib/marked.esm.js marked.min.js",
"build": "npm run rollup && npm run minify", "build": "npm run rollup && npm run minify",
"build:docs": "node build-docs.js", "build:docs": "node build-docs.js",
"rollup": "npm run rollup:umd && npm run rollup:esm", "rollup": "npm run rollup:umd && npm run rollup:esm",
"rollup:umd": "rollup -c rollup.config.js", "rollup:umd": "rollup -c rollup.config.js",
"rollup:esm": "rollup -c rollup.config.esm.js", "rollup:esm": "rollup -c rollup.config.esm.js",
"minify": "uglifyjs lib/marked.js -cm --comments /Copyright/ -o marked.min.js", "minify": "uglifyjs lib/marked.cjs -cm --comments /Copyright/ -o marked.min.js",
"preversion": "npm run build && (git diff --quiet || git commit -am build)" "preversion": "npm run build && (git diff --quiet || git commit -am build)"
}, },
"engines": { "engines": {

View File

@ -2,7 +2,7 @@ const commonjs = require('@rollup/plugin-commonjs');
const license = require('rollup-plugin-license'); const license = require('rollup-plugin-license');
module.exports = { module.exports = {
input: 'src/esm-entry.js', input: 'src/marked.js',
output: { output: {
file: 'lib/marked.esm.js', file: 'lib/marked.esm.js',
format: 'esm' format: 'esm'

View File

@ -5,10 +5,9 @@ const license = require('rollup-plugin-license');
module.exports = { module.exports = {
input: 'src/marked.js', input: 'src/marked.js',
output: { output: {
file: 'lib/marked.js', file: 'lib/marked.cjs',
format: 'umd', format: 'umd',
name: 'marked', name: 'marked'
exports: 'default'
}, },
plugins: [ plugins: [
license({ license({

View File

@ -1,7 +1,7 @@
const Tokenizer = require('./Tokenizer.js'); import { Tokenizer } from './Tokenizer.js';
const { defaults } = require('./defaults.js'); import { defaults } from './defaults.js';
const { block, inline } = require('./rules.js'); import { block, inline } from './rules.js';
const { repeatString } = require('./helpers.js'); import { repeatString } from './helpers.js';
/** /**
* smartypants text replacement * smartypants text replacement
@ -47,7 +47,7 @@ function mangle(text) {
/** /**
* Block Lexer * Block Lexer
*/ */
module.exports = class Lexer { export class Lexer {
constructor(options) { constructor(options) {
this.tokens = []; this.tokens = [];
this.tokens.links = Object.create(null); this.tokens.links = Object.create(null);
@ -488,4 +488,4 @@ module.exports = class Lexer {
return tokens; return tokens;
} }
}; }

View File

@ -1,15 +1,15 @@
const Renderer = require('./Renderer.js'); import { Renderer } from './Renderer.js';
const TextRenderer = require('./TextRenderer.js'); import { TextRenderer } from './TextRenderer.js';
const Slugger = require('./Slugger.js'); import { Slugger } from './Slugger.js';
const { defaults } = require('./defaults.js'); import { defaults } from './defaults.js';
const { import {
unescape unescape
} = require('./helpers.js'); } from './helpers.js';
/** /**
* Parsing & Compiling * Parsing & Compiling
*/ */
module.exports = class Parser { export class Parser {
constructor(options) { constructor(options) {
this.options = options || defaults; this.options = options || defaults;
this.options.renderer = this.options.renderer || new Renderer(); this.options.renderer = this.options.renderer || new Renderer();
@ -283,4 +283,4 @@ module.exports = class Parser {
} }
return out; return out;
} }
}; }

View File

@ -1,13 +1,13 @@
const { defaults } = require('./defaults.js'); import { defaults } from './defaults.js';
const { import {
cleanUrl, cleanUrl,
escape escape
} = require('./helpers.js'); } from './helpers.js';
/** /**
* Renderer * Renderer
*/ */
module.exports = class Renderer { export class Renderer {
constructor(options) { constructor(options) {
this.options = options || defaults; this.options = options || defaults;
} }
@ -163,4 +163,4 @@ module.exports = class Renderer {
text(text) { text(text) {
return text; return text;
} }
}; }

View File

@ -1,7 +1,7 @@
/** /**
* Slugger generates header id * Slugger generates header id
*/ */
module.exports = class Slugger { export class Slugger {
constructor() { constructor() {
this.seen = {}; this.seen = {};
} }
@ -46,4 +46,4 @@ module.exports = class Slugger {
const slug = this.serialize(value); const slug = this.serialize(value);
return this.getNextSafeSlug(slug, options.dryrun); return this.getNextSafeSlug(slug, options.dryrun);
} }
}; }

View File

@ -2,7 +2,7 @@
* TextRenderer * TextRenderer
* returns only the textual part of the token * returns only the textual part of the token
*/ */
module.exports = class TextRenderer { export class TextRenderer {
// no need for block level renderers // no need for block level renderers
strong(text) { strong(text) {
return text; return text;
@ -39,4 +39,4 @@ module.exports = class TextRenderer {
br() { br() {
return ''; return '';
} }
}; }

View File

@ -1,10 +1,10 @@
const { defaults } = require('./defaults.js'); import { defaults } from './defaults.js';
const { import {
rtrim, rtrim,
splitCells, splitCells,
escape, escape,
findClosingBracket findClosingBracket
} = require('./helpers.js'); } from './helpers.js';
function outputLink(cap, link, raw, lexer) { function outputLink(cap, link, raw, lexer) {
const href = link.href; const href = link.href;
@ -65,7 +65,7 @@ function indentCodeCompensation(raw, text) {
/** /**
* Tokenizer * Tokenizer
*/ */
module.exports = class Tokenizer { export class Tokenizer {
constructor(options) { constructor(options) {
this.options = options || defaults; this.options = options || defaults;
} }
@ -752,4 +752,4 @@ module.exports = class Tokenizer {
}; };
} }
} }
}; }

View File

@ -1,4 +1,4 @@
function getDefaults() { export function getDefaults() {
return { return {
baseUrl: null, baseUrl: null,
breaks: false, breaks: false,
@ -22,12 +22,8 @@ function getDefaults() {
}; };
} }
function changeDefaults(newDefaults) { export let defaults = getDefaults();
module.exports.defaults = newDefaults;
}
module.exports = { export function changeDefaults(newDefaults) {
defaults: getDefaults(), defaults = newDefaults;
getDefaults, }
changeDefaults
};

View File

@ -1,18 +0,0 @@
const marked = require('./marked.js');
const Lexer = require('./Lexer.js');
const Parser = require('./Parser.js');
const Tokenizer = require('./Tokenizer.js');
const Renderer = require('./Renderer.js');
const TextRenderer = require('./TextRenderer.js');
const Slugger = require('./Slugger.js');
module.exports = marked;
module.exports.parse = marked;
module.exports.Parser = Parser;
module.exports.parser = Parser.parse;
module.exports.Renderer = Renderer;
module.exports.TextRenderer = TextRenderer;
module.exports.Lexer = Lexer;
module.exports.lexer = Lexer.lex;
module.exports.Tokenizer = Tokenizer;
module.exports.Slugger = Slugger;

View File

@ -13,7 +13,7 @@ const escapeReplacements = {
"'": '&#39;' "'": '&#39;'
}; };
const getEscapeReplacement = (ch) => escapeReplacements[ch]; const getEscapeReplacement = (ch) => escapeReplacements[ch];
function escape(html, encode) { export function escape(html, encode) {
if (encode) { if (encode) {
if (escapeTest.test(html)) { if (escapeTest.test(html)) {
return html.replace(escapeReplace, getEscapeReplacement); return html.replace(escapeReplace, getEscapeReplacement);
@ -29,7 +29,7 @@ function escape(html, encode) {
const unescapeTest = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/ig; const unescapeTest = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/ig;
function unescape(html) { export function unescape(html) {
// explicitly match decimal, hex, and named HTML entities // explicitly match decimal, hex, and named HTML entities
return html.replace(unescapeTest, (_, n) => { return html.replace(unescapeTest, (_, n) => {
n = n.toLowerCase(); n = n.toLowerCase();
@ -44,7 +44,7 @@ function unescape(html) {
} }
const caret = /(^|[^\[])\^/g; const caret = /(^|[^\[])\^/g;
function edit(regex, opt) { export function edit(regex, opt) {
regex = regex.source || regex; regex = regex.source || regex;
opt = opt || ''; opt = opt || '';
const obj = { const obj = {
@ -63,7 +63,7 @@ function edit(regex, opt) {
const nonWordAndColonTest = /[^\w:]/g; const nonWordAndColonTest = /[^\w:]/g;
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i; const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
function cleanUrl(sanitize, base, href) { export function cleanUrl(sanitize, base, href) {
if (sanitize) { if (sanitize) {
let prot; let prot;
try { try {
@ -93,7 +93,7 @@ const justDomain = /^[^:]+:\/*[^/]*$/;
const protocol = /^([^:]+:)[\s\S]*$/; const protocol = /^([^:]+:)[\s\S]*$/;
const domain = /^([^:]+:\/*[^/]*)[\s\S]*$/; const domain = /^([^:]+:\/*[^/]*)[\s\S]*$/;
function resolveUrl(base, href) { export function resolveUrl(base, href) {
if (!baseUrls[' ' + base]) { if (!baseUrls[' ' + base]) {
// we can ignore everything in base after the last slash of its path component, // we can ignore everything in base after the last slash of its path component,
// but we might need to add _that_ // but we might need to add _that_
@ -122,9 +122,9 @@ function resolveUrl(base, href) {
} }
} }
const noopTest = { exec: function noopTest() {} }; export const noopTest = { exec: function noopTest() {} };
function merge(obj) { export function merge(obj) {
let i = 1, let i = 1,
target, target,
key; key;
@ -141,7 +141,7 @@ function merge(obj) {
return obj; return obj;
} }
function splitCells(tableRow, count) { export function splitCells(tableRow, count) {
// ensure that every cell-delimiting pipe has a space // ensure that every cell-delimiting pipe has a space
// before it to distinguish it from an escaped pipe // before it to distinguish it from an escaped pipe
const row = tableRow.replace(/\|/g, (match, offset, str) => { const row = tableRow.replace(/\|/g, (match, offset, str) => {
@ -180,7 +180,7 @@ function splitCells(tableRow, count) {
// Remove trailing 'c's. Equivalent to str.replace(/c*$/, ''). // Remove trailing 'c's. Equivalent to str.replace(/c*$/, '').
// /c*$/ is vulnerable to REDOS. // /c*$/ is vulnerable to REDOS.
// invert: Remove suffix of non-c chars instead. Default falsey. // invert: Remove suffix of non-c chars instead. Default falsey.
function rtrim(str, c, invert) { export function rtrim(str, c, invert) {
const l = str.length; const l = str.length;
if (l === 0) { if (l === 0) {
return ''; return '';
@ -204,7 +204,7 @@ function rtrim(str, c, invert) {
return str.substr(0, l - suffLen); return str.substr(0, l - suffLen);
} }
function findClosingBracket(str, b) { export function findClosingBracket(str, b) {
if (str.indexOf(b[1]) === -1) { if (str.indexOf(b[1]) === -1) {
return -1; return -1;
} }
@ -226,14 +226,14 @@ function findClosingBracket(str, b) {
return -1; return -1;
} }
function checkSanitizeDeprecation(opt) { export function checkSanitizeDeprecation(opt) {
if (opt && opt.sanitize && !opt.silent) { if (opt && opt.sanitize && !opt.silent) {
console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options'); console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options');
} }
} }
// copied from https://stackoverflow.com/a/5450113/806777 // copied from https://stackoverflow.com/a/5450113/806777
function repeatString(pattern, count) { export function repeatString(pattern, count) {
if (count < 1) { if (count < 1) {
return ''; return '';
} }
@ -247,18 +247,3 @@ function repeatString(pattern, count) {
} }
return result + pattern; return result + pattern;
} }
module.exports = {
escape,
unescape,
edit,
cleanUrl,
resolveUrl,
noopTest,
merge,
splitCells,
rtrim,
findClosingBracket,
checkSanitizeDeprecation,
repeatString
};

View File

@ -1,24 +1,24 @@
const Lexer = require('./Lexer.js'); import { Lexer } from './Lexer.js';
const Parser = require('./Parser.js'); import { Parser } from './Parser.js';
const Tokenizer = require('./Tokenizer.js'); import { Tokenizer } from './Tokenizer.js';
const Renderer = require('./Renderer.js'); import { Renderer } from './Renderer.js';
const TextRenderer = require('./TextRenderer.js'); import { TextRenderer } from './TextRenderer.js';
const Slugger = require('./Slugger.js'); import { Slugger } from './Slugger.js';
const { import {
merge, merge,
checkSanitizeDeprecation, checkSanitizeDeprecation,
escape escape
} = require('./helpers.js'); } from './helpers.js';
const { import {
getDefaults, getDefaults,
changeDefaults, changeDefaults,
defaults defaults
} = require('./defaults.js'); } from './defaults.js';
/** /**
* Marked * Marked
*/ */
function marked(src, opt, callback) { export function marked(src, opt, callback) {
// throw error in case of non string input // throw error in case of non string input
if (typeof src === 'undefined' || src === null) { if (typeof src === 'undefined' || src === null) {
throw new Error('marked(): input parameter is undefined or null'); throw new Error('marked(): input parameter is undefined or null');
@ -333,4 +333,18 @@ marked.Tokenizer = Tokenizer;
marked.Slugger = Slugger; marked.Slugger = Slugger;
marked.parse = marked; marked.parse = marked;
module.exports = marked; export const options = marked.options;
export const setOptions = marked.setOptions;
export const use = marked.use;
export const walkTokens = marked.walkTokens;
export const parseInline = marked.parseInline;
export const parse = marked;
export const parser = Parser.parse;
export const lexer = Lexer.lex;
export { defaults, getDefaults } from './defaults.js';
export { Lexer } from './Lexer.js';
export { Parser } from './Parser.js';
export { Tokenizer } from './Tokenizer.js';
export { Renderer } from './Renderer.js';
export { TextRenderer } from './TextRenderer.js';
export { Slugger } from './Slugger.js';

View File

@ -1,13 +1,13 @@
const { import {
noopTest, noopTest,
edit, edit,
merge merge
} = require('./helpers.js'); } from './helpers.js';
/** /**
* Block-Level Grammar * Block-Level Grammar
*/ */
const block = { export const block = {
newline: /^(?: *(?:\n|$))+/, newline: /^(?: *(?:\n|$))+/,
code: /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/, code: /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/,
fences: /^ {0,3}(`{3,}(?=[^`\n]*\n)|~{3,})([^\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?=\n|$)|$)/, fences: /^ {0,3}(`{3,}(?=[^`\n]*\n)|~{3,})([^\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?=\n|$)|$)/,
@ -139,7 +139,7 @@ block.pedantic = merge({}, block.normal, {
/** /**
* Inline-Level Grammar * Inline-Level Grammar
*/ */
const inline = { export const inline = {
escape: /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/, escape: /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/,
autolink: /^<(scheme:[^\s\x00-\x1f<>]*|email)>/, autolink: /^<(scheme:[^\s\x00-\x1f<>]*|email)>/,
url: noopTest, url: noopTest,
@ -283,8 +283,3 @@ inline.breaks = merge({}, inline.gfm, {
.replace(/\{2,\}/g, '*') .replace(/\{2,\}/g, '*')
.getRegex() .getRegex()
}); });
module.exports = {
block,
inline
};

85
test/bench.js vendored
View File

@ -1,15 +1,19 @@
const path = require('path'); import { dirname, resolve } from 'path';
const htmlDiffer = require('./helpers/html-differ.js'); import { fileURLToPath } from 'url';
const { loadFiles } = require('./helpers/load.js'); import { isEqual } from './helpers/html-differ.js';
import { loadFiles } from './helpers/load.js';
let marked = require('../lib/marked.js'); import { marked as esmMarked } from '../lib/marked.esm.js';
const es6marked = require('../src/marked.js');
const __dirname = dirname(fileURLToPath(import.meta.url));
let marked;
/** /**
* Load specs * Load specs
*/ */
function load() { export function load() {
const dir = path.resolve(__dirname, './specs/commonmark'); const dir = resolve(__dirname, './specs/commonmark');
const sections = loadFiles(dir); const sections = loadFiles(dir);
let specs = []; let specs = [];
@ -23,7 +27,7 @@ function load() {
/** /**
* Run all benchmarks * Run all benchmarks
*/ */
async function runBench(options) { export async function runBench(options) {
options = options || {}; options = options || {};
const specs = load(); const specs = load();
@ -38,9 +42,9 @@ async function runBench(options) {
if (options.marked) { if (options.marked) {
marked.setOptions(options.marked); marked.setOptions(options.marked);
} }
await bench('es5 marked', specs, marked); await bench('cjs marked', specs, marked.parse);
es6marked.setOptions({ esmMarked.setOptions({
gfm: false, gfm: false,
breaks: false, breaks: false,
pedantic: false, pedantic: false,
@ -48,9 +52,9 @@ async function runBench(options) {
smartLists: false smartLists: false
}); });
if (options.marked) { if (options.marked) {
es6marked.setOptions(options.marked); esmMarked.setOptions(options.marked);
} }
await bench('es6 marked', specs, es6marked); await bench('esm marked', specs, esmMarked.parse);
// GFM // GFM
marked.setOptions({ marked.setOptions({
@ -63,9 +67,9 @@ async function runBench(options) {
if (options.marked) { if (options.marked) {
marked.setOptions(options.marked); marked.setOptions(options.marked);
} }
await bench('es5 marked (gfm)', specs, marked); await bench('cjs marked (gfm)', specs, marked.parse);
es6marked.setOptions({ esmMarked.setOptions({
gfm: true, gfm: true,
breaks: false, breaks: false,
pedantic: false, pedantic: false,
@ -73,9 +77,9 @@ async function runBench(options) {
smartLists: false smartLists: false
}); });
if (options.marked) { if (options.marked) {
es6marked.setOptions(options.marked); esmMarked.setOptions(options.marked);
} }
await bench('es6 marked (gfm)', specs, es6marked); await bench('esm marked (gfm)', specs, esmMarked.parse);
// Pedantic // Pedantic
marked.setOptions({ marked.setOptions({
@ -88,9 +92,9 @@ async function runBench(options) {
if (options.marked) { if (options.marked) {
marked.setOptions(options.marked); marked.setOptions(options.marked);
} }
await bench('es5 marked (pedantic)', specs, marked); await bench('cjs marked (pedantic)', specs, marked.parse);
es6marked.setOptions({ esmMarked.setOptions({
gfm: false, gfm: false,
breaks: false, breaks: false,
pedantic: true, pedantic: true,
@ -98,35 +102,35 @@ async function runBench(options) {
smartLists: false smartLists: false
}); });
if (options.marked) { if (options.marked) {
es6marked.setOptions(options.marked); esmMarked.setOptions(options.marked);
} }
await bench('es6 marked (pedantic)', specs, es6marked); await bench('esm marked (pedantic)', specs, esmMarked.parse);
try { try {
await bench('commonmark', specs, (() => { await bench('commonmark', specs, (await (async() => {
const commonmark = require('commonmark'); const { Parser, HtmlRenderer } = await import('commonmark');
const parser = new commonmark.Parser(); const parser = new Parser();
const writer = new commonmark.HtmlRenderer(); const writer = new HtmlRenderer();
return function(text) { return function(text) {
return writer.render(parser.parse(text)); return writer.render(parser.parse(text));
}; };
})()); })()));
} catch (e) { } catch (e) {
console.error('Could not bench commonmark. (Error: %s)', e.message); console.error('Could not bench commonmark. (Error: %s)', e.message);
} }
try { try {
await bench('markdown-it', specs, (() => { await bench('markdown-it', specs, (await (async() => {
const MarkdownIt = require('markdown-it'); const MarkdownIt = (await import('markdown-it')).default;
const md = new MarkdownIt(); const md = new MarkdownIt();
return md.render.bind(md); return md.render.bind(md);
})()); })()));
} catch (e) { } catch (e) {
console.error('Could not bench markdown-it. (Error: %s)', e.message); console.error('Could not bench markdown-it. (Error: %s)', e.message);
} }
} }
async function bench(name, specs, engine) { export async function bench(name, specs, engine) {
const before = process.hrtime(); const before = process.hrtime();
for (let i = 0; i < 1e3; i++) { for (let i = 0; i < 1e3; i++) {
for (const spec of specs) { for (const spec of specs) {
@ -138,7 +142,7 @@ async function bench(name, specs, engine) {
let correct = 0; let correct = 0;
for (const spec of specs) { for (const spec of specs) {
if (await htmlDiffer.isEqual(spec.html, await engine(spec.markdown))) { if (await isEqual(spec.html, await engine(spec.markdown))) {
correct++; correct++;
} }
} }
@ -150,7 +154,7 @@ async function bench(name, specs, engine) {
/** /**
* A simple one-time benchmark * A simple one-time benchmark
*/ */
async function time(options) { export async function time(options) {
options = options || {}; options = options || {};
const specs = load(); const specs = load();
if (options.marked) { if (options.marked) {
@ -252,11 +256,13 @@ function camelize(text) {
/** /**
* Main * Main
*/ */
async function main(argv) { export default async function main(argv) {
marked = (await import('../lib/marked.cjs')).marked;
const opt = parseArg(argv); const opt = parseArg(argv);
if (opt.minified) { if (opt.minified) {
marked = require('../marked.min.js'); marked = (await import('../marked.min.js')).marked;
} }
if (opt.time) { if (opt.time) {
@ -275,14 +281,5 @@ function prettyElapsedTime(hrtimeElapsed) {
return seconds * 1e3 + frac; return seconds * 1e3 + frac;
} }
if (!module.parent) { process.title = 'marked bench';
process.title = 'marked bench'; main(process.argv.slice());
main(process.argv.slice());
} else {
module.exports = main;
module.exports.main = main;
module.exports.time = time;
module.exports.runBench = runBench;
module.exports.load = load;
module.exports.bench = bench;
}

View File

@ -1,9 +1,9 @@
const marked = require('../../src/marked.js'); import { marked, setOptions, getDefaults } from '../../src/marked.js';
const htmlDiffer = require('./html-differ.js'); import { isEqual, firstDiff } from './html-differ.js';
const assert = require('assert'); import { strictEqual } from 'assert';
beforeEach(() => { beforeEach(() => {
marked.setOptions(marked.getDefaults()); setOptions(getDefaults());
jasmine.addAsyncMatchers({ jasmine.addAsyncMatchers({
toRender: () => { toRender: () => {
@ -11,12 +11,12 @@ beforeEach(() => {
compare: async(spec, expected) => { compare: async(spec, expected) => {
const result = {}; const result = {};
const actual = marked(spec.markdown, spec.options); const actual = marked(spec.markdown, spec.options);
result.pass = await htmlDiffer.isEqual(expected, actual); result.pass = await isEqual(expected, actual);
if (result.pass) { if (result.pass) {
result.message = `${spec.markdown}\n------\n\nExpected: Should Fail`; result.message = `${spec.markdown}\n------\n\nExpected: Should Fail`;
} else { } else {
const diff = await htmlDiffer.firstDiff(actual, expected); const diff = await firstDiff(actual, expected);
result.message = `Expected: ${diff.expected}\n Actual: ${diff.actual}`; result.message = `Expected: ${diff.expected}\n Actual: ${diff.actual}`;
} }
return result; return result;
@ -27,12 +27,12 @@ beforeEach(() => {
return { return {
compare: async(actual, expected) => { compare: async(actual, expected) => {
const result = {}; const result = {};
result.pass = await htmlDiffer.isEqual(expected, actual); result.pass = await isEqual(expected, actual);
if (result.pass) { if (result.pass) {
result.message = `Expected '${actual}' not to equal '${expected}'`; result.message = `Expected '${actual}' not to equal '${expected}'`;
} else { } else {
const diff = await htmlDiffer.firstDiff(actual, expected); const diff = await firstDiff(actual, expected);
result.message = `Expected: ${diff.expected}\n Actual: ${diff.actual}`; result.message = `Expected: ${diff.expected}\n Actual: ${diff.actual}`;
} }
return result; return result;
@ -44,7 +44,7 @@ beforeEach(() => {
const result = {}; const result = {};
const actual = marked(spec.markdown, spec.options); const actual = marked(spec.markdown, spec.options);
result.pass = assert.strictEqual(expected, actual) === undefined; result.pass = strictEqual(expected, actual) === undefined;
return result; return result;
} }

View File

@ -1,40 +1,38 @@
const HtmlDiffer = require('@markedjs/html-differ').HtmlDiffer; import { HtmlDiffer } from '@markedjs/html-differ';
const htmlDiffer = new HtmlDiffer({ const htmlDiffer = new HtmlDiffer({
ignoreSelfClosingSlash: true, ignoreSelfClosingSlash: true,
ignoreComments: false ignoreComments: false
}); });
module.exports = { export const isEqual = htmlDiffer.isEqual.bind(htmlDiffer);
isEqual: htmlDiffer.isEqual.bind(htmlDiffer), export async function firstDiff(actual, expected, padding) {
firstDiff: async(actual, expected, padding) => { padding = padding || 30;
padding = padding || 30; const diffHtml = await htmlDiffer.diffHtml(actual, expected);
const diffHtml = await htmlDiffer.diffHtml(actual, expected); const result = diffHtml.reduce((obj, diff) => {
const result = diffHtml.reduce((obj, diff) => { if (diff.added) {
if (diff.added) { if (obj.firstIndex === null) {
if (obj.firstIndex === null) { obj.firstIndex = obj.expected.length;
obj.firstIndex = obj.expected.length;
}
obj.expected += diff.value;
} else if (diff.removed) {
if (obj.firstIndex === null) {
obj.firstIndex = obj.actual.length;
}
obj.actual += diff.value;
} else {
obj.actual += diff.value;
obj.expected += diff.value;
} }
obj.expected += diff.value;
} else if (diff.removed) {
if (obj.firstIndex === null) {
obj.firstIndex = obj.actual.length;
}
obj.actual += diff.value;
} else {
obj.actual += diff.value;
obj.expected += diff.value;
}
return obj; return obj;
}, { }, {
firstIndex: null, firstIndex: null,
actual: '', actual: '',
expected: '' expected: ''
}); });
return { return {
actual: result.actual.substring(result.firstIndex - padding, result.firstIndex + padding), actual: result.actual.substring(result.firstIndex - padding, result.firstIndex + padding),
expected: result.expected.substring(result.firstIndex - padding, result.firstIndex + padding) expected: result.expected.substring(result.firstIndex - padding, result.firstIndex + padding)
}; };
} }
};

View File

@ -1,8 +1,9 @@
'use strict'; import fs from 'fs';
import path from 'path';
import fm from 'front-matter';
import { createRequire } from 'module';
const fs = require('fs'); const require = createRequire(import.meta.url);
const path = require('path');
const fm = require('front-matter');
function node4Polyfills() { function node4Polyfills() {
// https://github.com/uxitten/polyfill/blob/master/string.polyfill.js // https://github.com/uxitten/polyfill/blob/master/string.polyfill.js
@ -45,7 +46,7 @@ function node4Polyfills() {
} }
node4Polyfills(); node4Polyfills();
function outputCompletionTable(title, specs) { export function outputCompletionTable(title, specs) {
let longestName = 0; let longestName = 0;
let maxSpecs = 0; let maxSpecs = 0;
@ -67,7 +68,7 @@ function outputCompletionTable(title, specs) {
console.log(); console.log();
} }
function loadFiles(dir) { export function loadFiles(dir) {
const files = fs.readdirSync(dir); const files = fs.readdirSync(dir);
return files.reduce((obj, file) => { return files.reduce((obj, file) => {
@ -93,9 +94,14 @@ function loadFiles(dir) {
}]; }];
break; break;
} }
case '.js': case '.cjs':
case '.json': { case '.json': {
specs = require(absFile); try {
specs = require(absFile);
} catch (err) {
console.log(`Error loading ${absFile}`);
throw err;
}
if (!Array.isArray(specs)) { if (!Array.isArray(specs)) {
specs = [specs]; specs = [specs];
} }
@ -125,8 +131,3 @@ function loadFiles(dir) {
return obj; return obj;
}, {}); }, {});
} }
module.exports = {
outputCompletionTable,
loadFiles
};

2
test/rules.js vendored
View File

@ -1,4 +1,4 @@
const rules = require('../src/rules.js'); import rules from '../src/rules.js';
const COLOR = { const COLOR = {
reset: '\x1b[0m', reset: '\x1b[0m',

View File

@ -1,12 +1,15 @@
const path = require('path'); import { dirname, resolve } from 'path';
const load = require('../helpers/load.js'); import { fileURLToPath } from 'url';
import { loadFiles, outputCompletionTable } from '../helpers/load.js';
const __dirname = dirname(fileURLToPath(import.meta.url));
function runSpecs(title, dir, showCompletionTable, options) { function runSpecs(title, dir, showCompletionTable, options) {
options = options || {}; options = options || {};
const specs = load.loadFiles(path.resolve(__dirname, dir)); const specs = loadFiles(resolve(__dirname, dir));
if (showCompletionTable) { if (showCompletionTable) {
load.outputCompletionTable(title, specs); outputCompletionTable(title, specs);
} }
describe(title, () => { describe(title, () => {

View File

@ -1,4 +1,4 @@
const Lexer = require('../../src/Lexer.js'); import { Lexer } from '../../src/Lexer.js';
function expectTokens({ md, options, tokens = [], links = {} }) { function expectTokens({ md, options, tokens = [], links = {} }) {
const lexer = new Lexer(options); const lexer = new Lexer(options);

View File

@ -1,4 +1,4 @@
const Parser = require('../../src/Parser.js'); import { Parser } from '../../src/Parser.js';
async function expectHtml({ tokens, options, html, inline }) { async function expectHtml({ tokens, options, html, inline }) {
const parser = new Parser(options); const parser = new Parser(options);

View File

@ -1,15 +1,15 @@
const marked = require('../../src/marked.js'); import { marked, Renderer, Slugger, lexer, parseInline, use, getDefaults, walkTokens as _walkTokens } from '../../src/marked.js';
describe('Test heading ID functionality', () => { describe('Test heading ID functionality', () => {
it('should add id attribute by default', () => { it('should add id attribute by default', () => {
const renderer = new marked.Renderer(); const renderer = new Renderer();
const slugger = new marked.Slugger(); const slugger = new Slugger();
const header = renderer.heading('test', 1, 'test', slugger); const header = renderer.heading('test', 1, 'test', slugger);
expect(header).toBe('<h1 id="test">test</h1>\n'); expect(header).toBe('<h1 id="test">test</h1>\n');
}); });
it('should NOT add id attribute when options set false', () => { it('should NOT add id attribute when options set false', () => {
const renderer = new marked.Renderer({ headerIds: false }); const renderer = new Renderer({ headerIds: false });
const header = renderer.heading('test', 1, 'test'); const header = renderer.heading('test', 1, 'test');
expect(header).toBe('<h1>test</h1>\n'); expect(header).toBe('<h1>test</h1>\n');
}); });
@ -17,26 +17,26 @@ describe('Test heading ID functionality', () => {
describe('Test slugger functionality', () => { describe('Test slugger functionality', () => {
it('should use lowercase slug', () => { it('should use lowercase slug', () => {
const slugger = new marked.Slugger(); const slugger = new Slugger();
expect(slugger.slug('Test')).toBe('test'); expect(slugger.slug('Test')).toBe('test');
}); });
it('should be unique to avoid collisions 1280', () => { it('should be unique to avoid collisions 1280', () => {
const slugger = new marked.Slugger(); const slugger = new Slugger();
expect(slugger.slug('test')).toBe('test'); expect(slugger.slug('test')).toBe('test');
expect(slugger.slug('test')).toBe('test-1'); expect(slugger.slug('test')).toBe('test-1');
expect(slugger.slug('test')).toBe('test-2'); expect(slugger.slug('test')).toBe('test-2');
}); });
it('should be unique when slug ends with number', () => { it('should be unique when slug ends with number', () => {
const slugger = new marked.Slugger(); const slugger = new Slugger();
expect(slugger.slug('test 1')).toBe('test-1'); expect(slugger.slug('test 1')).toBe('test-1');
expect(slugger.slug('test')).toBe('test'); expect(slugger.slug('test')).toBe('test');
expect(slugger.slug('test')).toBe('test-2'); expect(slugger.slug('test')).toBe('test-2');
}); });
it('should be unique when slug ends with hyphen number', () => { it('should be unique when slug ends with hyphen number', () => {
const slugger = new marked.Slugger(); const slugger = new Slugger();
expect(slugger.slug('foo')).toBe('foo'); expect(slugger.slug('foo')).toBe('foo');
expect(slugger.slug('foo')).toBe('foo-1'); expect(slugger.slug('foo')).toBe('foo-1');
expect(slugger.slug('foo 1')).toBe('foo-1-1'); expect(slugger.slug('foo 1')).toBe('foo-1-1');
@ -45,39 +45,39 @@ describe('Test slugger functionality', () => {
}); });
it('should allow non-latin chars', () => { it('should allow non-latin chars', () => {
const slugger = new marked.Slugger(); const slugger = new Slugger();
expect(slugger.slug('привет')).toBe('привет'); expect(slugger.slug('привет')).toBe('привет');
}); });
it('should remove ampersands 857', () => { it('should remove ampersands 857', () => {
const slugger = new marked.Slugger(); const slugger = new Slugger();
expect(slugger.slug('This & That Section')).toBe('this--that-section'); expect(slugger.slug('This & That Section')).toBe('this--that-section');
}); });
it('should remove periods', () => { it('should remove periods', () => {
const slugger = new marked.Slugger(); const slugger = new Slugger();
expect(slugger.slug('file.txt')).toBe('filetxt'); expect(slugger.slug('file.txt')).toBe('filetxt');
}); });
it('should remove html tags', () => { it('should remove html tags', () => {
const slugger = new marked.Slugger(); const slugger = new Slugger();
expect(slugger.slug('<em>html</em>')).toBe('html'); expect(slugger.slug('<em>html</em>')).toBe('html');
}); });
it('should not increment seen when using dryrun option', () => { it('should not increment seen when using dryrun option', () => {
const slugger = new marked.Slugger(); const slugger = new Slugger();
expect(slugger.slug('<h1>This Section</h1>', { dryrun: true })).toBe('this-section'); expect(slugger.slug('<h1>This Section</h1>', { dryrun: true })).toBe('this-section');
expect(slugger.slug('<h1>This Section</h1>')).toBe('this-section'); expect(slugger.slug('<h1>This Section</h1>')).toBe('this-section');
}); });
it('should still return the next unique id when using dryrun', () => { it('should still return the next unique id when using dryrun', () => {
const slugger = new marked.Slugger(); const slugger = new Slugger();
expect(slugger.slug('<h1>This Section</h1>')).toBe('this-section'); expect(slugger.slug('<h1>This Section</h1>')).toBe('this-section');
expect(slugger.slug('<h1>This Section</h1>', { dryrun: true })).toBe('this-section-1'); expect(slugger.slug('<h1>This Section</h1>', { dryrun: true })).toBe('this-section-1');
}); });
it('should be repeatable in a sequence', () => { it('should be repeatable in a sequence', () => {
const slugger = new marked.Slugger(); const slugger = new Slugger();
expect(slugger.slug('foo')).toBe('foo'); expect(slugger.slug('foo')).toBe('foo');
expect(slugger.slug('foo')).toBe('foo-1'); expect(slugger.slug('foo')).toBe('foo-1');
expect(slugger.slug('foo')).toBe('foo-2'); expect(slugger.slug('foo')).toBe('foo-2');
@ -92,7 +92,7 @@ describe('Test paragraph token type', () => {
it('should use the "paragraph" type on top level', () => { it('should use the "paragraph" type on top level', () => {
const md = 'A Paragraph.\n\n> A blockquote\n\n- list item\n'; const md = 'A Paragraph.\n\n> A blockquote\n\n- list item\n';
const tokens = marked.lexer(md); const tokens = lexer(md);
expect(tokens[0].type).toBe('paragraph'); expect(tokens[0].type).toBe('paragraph');
expect(tokens[2].tokens[0].type).toBe('paragraph'); expect(tokens[2].tokens[0].type).toBe('paragraph');
@ -101,17 +101,17 @@ describe('Test paragraph token type', () => {
}); });
describe('changeDefaults', () => { describe('changeDefaults', () => {
it('should change global defaults', () => { it('should change global defaults', async() => {
const { defaults, changeDefaults } = require('../../src/defaults'); const { defaults, changeDefaults } = await import('../../src/defaults.js');
expect(defaults.test).toBeUndefined(); expect(defaults.test).toBeUndefined();
changeDefaults({ test: true }); changeDefaults({ test: true });
expect(require('../../src/defaults').defaults.test).toBe(true); expect((await import('../../src/defaults.js')).defaults.test).toBe(true);
}); });
}); });
describe('inlineLexer', () => { describe('inlineLexer', () => {
it('should send html to renderer.html', () => { it('should send html to renderer.html', () => {
const renderer = new marked.Renderer(); const renderer = new Renderer();
spyOn(renderer, 'html').and.callThrough(); spyOn(renderer, 'html').and.callThrough();
const md = 'HTML Image: <img alt="MY IMAGE" src="example.png" />'; const md = 'HTML Image: <img alt="MY IMAGE" src="example.png" />';
marked(md, { renderer }); marked(md, { renderer });
@ -123,14 +123,14 @@ describe('inlineLexer', () => {
describe('parseInline', () => { describe('parseInline', () => {
it('should parse inline tokens', () => { it('should parse inline tokens', () => {
const md = '**strong** _em_'; const md = '**strong** _em_';
const html = marked.parseInline(md); const html = parseInline(md);
expect(html).toBe('<strong>strong</strong> <em>em</em>'); expect(html).toBe('<strong>strong</strong> <em>em</em>');
}); });
it('should not parse block tokens', () => { it('should not parse block tokens', () => {
const md = '# header\n\n_em_'; const md = '# header\n\n_em_';
const html = marked.parseInline(md); const html = parseInline(md);
expect(html).toBe('# header\n\n<em>em</em>'); expect(html).toBe('# header\n\n<em>em</em>');
}); });
@ -156,7 +156,7 @@ describe('use extension', () => {
return `<u>${token.text}</u>\n`; return `<u>${token.text}</u>\n`;
} }
}; };
marked.use({ extensions: [underline] }); use({ extensions: [underline] });
let html = marked('Not Underlined\n:Underlined\nNot Underlined'); let html = marked('Not Underlined\n:Underlined\nNot Underlined');
expect(html).toBe('<p>Not Underlined\n:Underlined\nNot Underlined</p>\n'); expect(html).toBe('<p>Not Underlined\n:Underlined\nNot Underlined</p>\n');
@ -186,7 +186,7 @@ describe('use extension', () => {
} }
}] }]
}; };
marked.use(underline); use(underline);
const html = marked('Not Underlined A\n:Underlined B:\nNot Underlined C\n:Not Underlined D'); const html = marked('Not Underlined A\n:Underlined B:\nNot Underlined C\n:Not Underlined D');
expect(html).toBe('<p>Not Underlined A</p>\n<u>Underlined B</u>\n<p>Not Underlined C\n:Not Underlined D</p>\n'); expect(html).toBe('<p>Not Underlined A</p>\n<u>Underlined B</u>\n<p>Not Underlined C\n:Not Underlined D</p>\n');
}); });
@ -211,7 +211,7 @@ describe('use extension', () => {
return `<u>${token.text}</u>`; return `<u>${token.text}</u>`;
} }
}; };
marked.use({ extensions: [underline] }); use({ extensions: [underline] });
const html = marked('Not Underlined =Underlined= Not Underlined'); const html = marked('Not Underlined =Underlined= Not Underlined');
expect(html).toBe('<p>Not Underlined <u>Underlined</u> Not Underlined</p>\n'); expect(html).toBe('<p>Not Underlined <u>Underlined</u> Not Underlined</p>\n');
}); });
@ -268,7 +268,7 @@ describe('use extension', () => {
return `\n<dt>${this.parser.parseInline(token.dt)}</dt><dd>${this.parser.parseInline(token.dd)}</dd>`; return `\n<dt>${this.parser.parseInline(token.dt)}</dt><dd>${this.parser.parseInline(token.dd)}</dd>`;
} }
}; };
marked.use({ extensions: [descriptionlist, description] }); use({ extensions: [descriptionlist, description] });
const html = marked('A Description List with One Description:\n' const html = marked('A Description List with One Description:\n'
+ ': Topic 1 : Description 1\n' + ': Topic 1 : Description 1\n'
+ ': **Topic 2** : *Description 2*'); + ': **Topic 2** : *Description 2*');
@ -299,7 +299,7 @@ describe('use extension', () => {
return `<u>${token.text}</u>\n`; return `<u>${token.text}</u>\n`;
} }
}; };
marked.use({ sanitize: true, silent: true, extensions: [extension] }); use({ sanitize: true, silent: true, extensions: [extension] });
const html = marked(':test:\ntest\n<div></div>'); const html = marked(':test:\ntest\n<div></div>');
expect(html).toBe('<u>test</u>\n<p>test</p>\n<p>&lt;div&gt;&lt;/div&gt;</p>\n'); expect(html).toBe('<u>test</u>\n<p>test</p>\n<p>&lt;div&gt;&lt;/div&gt;</p>\n');
}); });
@ -336,7 +336,7 @@ describe('use extension', () => {
return false; return false;
} }
}; };
marked.use({ extensions: [fallbackRenderer, extension] }); use({ extensions: [fallbackRenderer, extension] });
const html = marked(':Test:\n\n:test:\n\n:none:'); const html = marked(':Test:\n\n:test:\n\n:none:');
expect(html).toBe('fallbacktest'); expect(html).toBe('fallbacktest');
}); });
@ -379,7 +379,7 @@ describe('use extension', () => {
return false; return false;
} }
}; };
marked.use({ extensions: [extension, extension2] }); use({ extensions: [extension, extension2] });
const html = marked(':Test:\n\n:test:'); const html = marked(':Test:\n\n:test:');
expect(html).toBe('TESTtest'); expect(html).toBe('TESTtest');
}); });
@ -415,7 +415,7 @@ describe('use extension', () => {
} }
}] }]
}; };
marked.use(extension); use(extension);
const html = marked('# extension1\n:extension2:'); const html = marked('# extension1\n:extension2:');
expect(html).toBe('<h1>extension1 RENDERER EXTENSION</h1>\n<pre><code>extension2 TOKENIZER EXTENSION\n</code></pre>\n'); expect(html).toBe('<h1>extension1 RENDERER EXTENSION</h1>\n<pre><code>extension2 TOKENIZER EXTENSION\n</code></pre>\n');
}); });
@ -454,7 +454,7 @@ describe('use extension', () => {
} }
} }
}; };
marked.use(walkableDescription); use(walkableDescription);
const html = marked(': Topic 1 : Description 1\n' const html = marked(': Topic 1 : Description 1\n'
+ ': **Topic 2** : *Description 2*'); + ': **Topic 2** : *Description 2*');
expect(html).toBe('<p>\n<dt>Topic 1 walked - unwalked</dt><dd>Description 1 walked</dd>' expect(html).toBe('<p>\n<dt>Topic 1 walked - unwalked</dt><dd>Description 1 walked</dd>'
@ -588,14 +588,14 @@ used extension2 walked</p>
} }
it('should merge extensions when calling marked.use multiple times', () => { it('should merge extensions when calling marked.use multiple times', () => {
marked.use(createExtension('extension1')); use(createExtension('extension1'));
marked.use(createExtension('extension2')); use(createExtension('extension2'));
runTest(); runTest();
}); });
it('should merge extensions when calling marked.use with multiple extensions', () => { it('should merge extensions when calling marked.use with multiple extensions', () => {
marked.use( use(
createExtension('extension1'), createExtension('extension1'),
createExtension('extension2') createExtension('extension2')
); );
@ -604,7 +604,7 @@ used extension2 walked</p>
}); });
it('should fall back to any extensions with the same name if the first returns false', () => { it('should fall back to any extensions with the same name if the first returns false', () => {
marked.use( use(
createExtension('extension1'), createExtension('extension1'),
createExtension('extension2'), createExtension('extension2'),
createFalseExtension('extension1'), createFalseExtension('extension1'),
@ -663,7 +663,7 @@ used extension2 walked</p>
}, },
headerIds: false headerIds: false
}; };
marked.use(styleTags); use(styleTags);
const html = marked('This is a *paragraph* with blue text. {blue}\n' const html = marked('This is a *paragraph* with blue text. {blue}\n'
+ '# This is a *header* with red text {red}'); + '# This is a *header* with red text {red}');
expect(html).toBe('<p style="color:blue;">This is a <em>paragraph</em> with blue text.</p>\n' expect(html).toBe('<p style="color:blue;">This is a <em>paragraph</em> with blue text.</p>\n'
@ -679,7 +679,7 @@ used extension2 walked</p>
} }
}; };
spyOn(extension.renderer, 'paragraph').and.callThrough(); spyOn(extension.renderer, 'paragraph').and.callThrough();
marked.use(extension); use(extension);
const html = marked('text'); const html = marked('text');
expect(extension.renderer.paragraph).toHaveBeenCalledWith('text'); expect(extension.renderer.paragraph).toHaveBeenCalledWith('text');
expect(html).toBe('extension'); expect(html).toBe('extension');
@ -701,7 +701,7 @@ used extension2 walked</p>
} }
}; };
spyOn(extension.tokenizer, 'paragraph').and.callThrough(); spyOn(extension.tokenizer, 'paragraph').and.callThrough();
marked.use(extension); use(extension);
const html = marked('text'); const html = marked('text');
expect(extension.tokenizer.paragraph).toHaveBeenCalledWith('text'); expect(extension.tokenizer.paragraph).toHaveBeenCalledWith('text');
expect(html).toBe('<p>extension</p>\n'); expect(html).toBe('<p>extension</p>\n');
@ -714,7 +714,7 @@ used extension2 walked</p>
walked++; walked++;
} }
}; };
marked.use(extension); use(extension);
marked('text'); marked('text');
expect(walked).toBe(2); expect(walked).toBe(2);
}); });
@ -726,7 +726,7 @@ used extension2 walked</p>
walked++; walked++;
} }
}; };
marked.use(extension); use(extension);
marked('text', () => { marked('text', () => {
expect(walked).toBe(2); expect(walked).toBe(2);
done(); done();
@ -737,7 +737,7 @@ used extension2 walked</p>
const extension = { const extension = {
headerIds: false headerIds: false
}; };
marked.use(extension); use(extension);
const html = marked('# heading'); const html = marked('# heading');
expect(html).toBe('<h1>heading</h1>\n'); expect(html).toBe('<h1>heading</h1>\n');
}); });
@ -758,8 +758,8 @@ used extension2 walked</p>
token.walkedOnce = true; token.walkedOnce = true;
} }
}; };
marked.use(extension1); use(extension1);
marked.use(extension2); use(extension2);
marked('text'); marked('text');
expect(walkedOnce).toBe(2); expect(walkedOnce).toBe(2);
expect(walkedTwice).toBe(2); expect(walkedTwice).toBe(2);
@ -783,8 +783,8 @@ used extension2 walked</p>
} }
} }
}; };
marked.use(extension1); use(extension1);
marked.use(extension2); use(extension2);
const html = marked(` const html = marked(`
paragraph paragraph
@ -816,8 +816,8 @@ paragraph
} }
} }
}; };
marked.use(extension1); use(extension1);
marked.use(extension2); use(extension2);
const html = marked(` const html = marked(`
paragraph paragraph
@ -832,7 +832,7 @@ original
const extension = { const extension = {
renderer: { renderer: {
heading: () => { heading: () => {
return this.options ? 'arrow options\n' : 'arrow no options\n'; return this && this.options ? 'arrow options\n' : 'arrow no options\n';
}, },
html: function() { html: function() {
return this.options ? 'function options\n' : 'function no options\n'; return this.options ? 'function options\n' : 'function no options\n';
@ -842,7 +842,7 @@ original
} }
} }
}; };
marked.use(extension); use(extension);
const html = marked(` const html = marked(`
# heading # heading
@ -987,9 +987,9 @@ code
br br
br br
`; `;
const tokens = marked.lexer(markdown, { ...marked.getDefaults(), breaks: true }); const tokens = lexer(markdown, { ...getDefaults(), breaks: true });
const tokensSeen = []; const tokensSeen = [];
marked.walkTokens(tokens, (token) => { _walkTokens(tokens, (token) => {
tokensSeen.push([token.type, (token.raw || '').replace(/\n/g, '')]); tokensSeen.push([token.type, (token.raw || '').replace(/\n/g, '')]);
}); });

30
test/update-specs.js vendored
View File

@ -1,13 +1,13 @@
const fetch = require('node-fetch'); import fetch from 'node-fetch';
const cheerio = require('cheerio'); import { load } from 'cheerio';
const marked = require('../'); import marked from '../';
const htmlDiffer = require('./helpers/html-differ.js'); import { isEqual } from './helpers/html-differ.js';
const fs = require('fs'); import { readdirSync, unlinkSync, writeFileSync } from 'fs';
const path = require('path'); import { join, resolve } from 'path';
function removeFiles(dir) { function removeFiles(dir) {
fs.readdirSync(dir).forEach(file => { readdirSync(dir).forEach(file => {
fs.unlinkSync(path.join(dir, file)); unlinkSync(join(dir, file));
}); });
} }
@ -20,11 +20,11 @@ async function updateCommonmark(dir, options) {
const specs = await res2.json(); const specs = await res2.json();
specs.forEach(spec => { specs.forEach(spec => {
const html = marked(spec.markdown, options); const html = marked(spec.markdown, options);
if (!htmlDiffer.isEqual(html, spec.html)) { if (!isEqual(html, spec.html)) {
spec.shouldFail = true; spec.shouldFail = true;
} }
}); });
fs.writeFileSync(path.resolve(dir, `./commonmark.${version}.json`), JSON.stringify(specs, null, 2) + '\n'); writeFileSync(resolve(dir, `./commonmark.${version}.json`), JSON.stringify(specs, null, 2) + '\n');
console.log(`Saved CommonMark v${version} specs`); console.log(`Saved CommonMark v${version} specs`);
} catch (ex) { } catch (ex) {
console.log(ex); console.log(ex);
@ -35,7 +35,7 @@ async function updateGfm(dir) {
try { try {
const res = await fetch('https://github.github.com/gfm/'); const res = await fetch('https://github.github.com/gfm/');
const html = await res.text(); const html = await res.text();
const $ = cheerio.load(html); const $ = load(html);
const version = $('.version').text().match(/\d+\.\d+/)[0]; const version = $('.version').text().match(/\d+\.\d+/)[0];
if (!version) { if (!version) {
throw new Error('No version found'); throw new Error('No version found');
@ -58,19 +58,19 @@ async function updateGfm(dir) {
specs.forEach(spec => { specs.forEach(spec => {
const html = marked(spec.markdown, { gfm: true, pedantic: false }); const html = marked(spec.markdown, { gfm: true, pedantic: false });
if (!htmlDiffer.isEqual(html, spec.html)) { if (!isEqual(html, spec.html)) {
spec.shouldFail = true; spec.shouldFail = true;
} }
}); });
fs.writeFileSync(path.resolve(dir, `./gfm.${version}.json`), JSON.stringify(specs, null, 2) + '\n'); writeFileSync(resolve(dir, `./gfm.${version}.json`), JSON.stringify(specs, null, 2) + '\n');
console.log(`Saved GFM v${version} specs.`); console.log(`Saved GFM v${version} specs.`);
} catch (ex) { } catch (ex) {
console.log(ex); console.log(ex);
} }
} }
const commonmarkDir = path.resolve(__dirname, './specs/commonmark'); const commonmarkDir = resolve(__dirname, './specs/commonmark');
const gfmDir = path.resolve(__dirname, './specs/gfm'); const gfmDir = resolve(__dirname, './specs/gfm');
removeFiles(commonmarkDir); removeFiles(commonmarkDir);
removeFiles(gfmDir); removeFiles(gfmDir);
updateCommonmark(commonmarkDir, { gfm: false, pedantic: false, headerIds: false }); updateCommonmark(commonmarkDir, { gfm: false, pedantic: false, headerIds: false });

10
test/vuln-regex.js vendored
View File

@ -1,5 +1,5 @@
const regexp = require('../src/rules.js'); import regexp from '../src/rules.js';
const vulnRegexDetector = require('vuln-regex-detector'); import { test, responses } from 'vuln-regex-detector';
const promises = []; const promises = [];
function findRegexps(name, obj) { function findRegexps(name, obj) {
@ -18,12 +18,12 @@ function findRegexps(name, obj) {
async function testRegexp(name, source) { async function testRegexp(name, source) {
try { try {
const result = await vulnRegexDetector.test(source); const result = await test(source);
if (result === vulnRegexDetector.responses.safe) { if (result === responses.safe) {
console.log(`${name} is safe`); console.log(`${name} is safe`);
return true; return true;
} else if (result === vulnRegexDetector.responses.vulnerable) { } else if (result === responses.vulnerable) {
console.error(`${name} is vulnerable`); console.error(`${name} is vulnerable`);
} else { } else {
console.error(`${name} might be vulnerable: ` + result.toString()); console.error(`${name} might be vulnerable: ` + result.toString());