diff --git a/docs/USING_PRO.md b/docs/USING_PRO.md index 0119911ff4..012f7634e8 100644 --- a/docs/USING_PRO.md +++ b/docs/USING_PRO.md @@ -1,24 +1,71 @@ ## Extending Marked -To champion the single-responsibility and open/closed principles, we have tried to make it relatively painless to extend marked. If you are looking to add custom functionality, this is the place to start. +To champion the single-responsibility and open/closed principles, we have tried to make it relatively painless to extend Marked. If you are looking to add custom functionality, this is the place to start.
renderer
**code**(*string* code, *string* infostring, *boolean* escaped)
+- **blockquote**(*string* quote)
+- **html**(*string* html)
+- **heading**(*string* text, *number* level, *string* raw, *Slugger* slugger)
+- **hr**()
+- **list**(*string* body, *boolean* ordered, *number* start)
+- **listitem**(*string* text, *boolean* task, *boolean* checked)
+- **checkbox**(*boolean* checked)
+- **paragraph**(*string* text)
+- **table**(*string* header, *string* body)
+- **tablerow**(*string* content)
+- **tablecell**(*string* content, *object* flags)
+
+### Inline-level renderer methods
+
+- **strong**(*string* text)
+- **em**(*string* text)
+- **codespan**(*string* code)
+- **br**()
+- **del**(*string* text)
+- **link**(*string* href, *string* title, *string* text)
+- **image**(*string* href, *string* title, *string* text)
+- **text**(*string* text)
`slugger` has the `slug` method to create a unique id from value:
@@ -103,20 +161,13 @@ slugger.slug('foo') // foo-4
}
```
-### Inline level renderer methods
+***
-- strong(*string* text)
-- em(*string* text)
-- codespan(*string* code)
-- br()
-- del(*string* text)
-- link(*string* href, *string* title, *string* text)
-- image(*string* href, *string* title, *string* text)
-- text(*string* text)
+tokenizer
**space**(*string* src)
+- **code**(*string* src)
+- **fences**(*string* src)
+- **heading**(*string* src)
+- **nptable**(*string* src)
+- **hr**(*string* src)
+- **blockquote**(*string* src)
+- **list**(*string* src)
+- **html**(*string* src)
+- **def**(*string* src)
+- **table**(*string* src)
+- **lheading**(*string* src)
+- **paragraph**(*string* src)
+- **text**(*string* src)
### Inline level tokenizer methods
-- escape(*string* src)
-- tag(*string* src, *bool* inLink, *bool* inRawBlock)
-- link(*string* src)
-- reflink(*string* src, *object* links)
-- emStrong(*string* src, *string* maskedSrc, *string* prevChar)
-- codespan(*string* src)
-- br(*string* src)
-- del(*string* src)
-- autolink(*string* src, *function* mangle)
-- url(*string* src, *function* mangle)
-- inlineText(*string* src, *bool* inRawBlock, *function* smartypants)
+- **escape**(*string* src)
+- **tag**(*string* src, *bool* inLink, *bool* inRawBlock)
+- **link**(*string* src)
+- **reflink**(*string* src, *object* links)
+- **emStrong**(*string* src, *string* maskedSrc, *string* prevChar)
+- **codespan**(*string* src)
+- **br**(*string* src)
+- **del**(*string* src)
+- **autolink**(*string* src, *function* mangle)
+- **url**(*string* src, *function* mangle)
+- **inlineText**(*string* src, *bool* inRawBlock, *function* smartypants)
`mangle` is a method that changes text to HTML character references:
@@ -202,10 +253,14 @@ smartypants('"this ... string"')
// "“this … string”"
```
-walkTokens
extensions
name
level
start(string src)
src.match().index
, or even a simple src.index()
. Marked will use this function to ensure that it does not skip over any text that should be part of the custom token.tokenizer(string src, array tokens)
tokens
parameter contains the array of tokens that have been generated by the lexer up to that point, and can be used to access the previous token, for instance.
+
+The return value should be an object with the following parameters:
+
+ type
name
parameter of the extension.raw
tokens [optional]
walkTokens
function by default.this.blockTokens(string text)
this.inlineTokens(string text)
tokens
parameter.renderer(object token)
this.parse(array tokens)
this.parseInline(array tokens)
childTokens [optional]
walkTokens
functions. For instance, if you want to use a second custom parameter to contain child tokens in addition to tokens
, it could be listed here. If childTokens
is provided, the tokens
array will not be walked by default unless it is also included in the childTokens
array.
+ A Description List:
+'
- + (escaped ? code : escape$1(code, true))
+ + (escaped ? code : escape$2(code, true))
+ '
\n';
}
return ''
- + (escaped ? code : escape$1(code, true))
+ + (escaped ? code : escape$2(code, true))
+ '
\n';
}
@@ -1974,11 +2032,11 @@ var Renderer_1 = class Renderer {
}
link(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl$1(this.options.sanitize, this.options.baseUrl, href);
if (href === null) {
return text;
}
- let out = 'An error occurred:' - + escape(e.message + '', true) + + escape$3(e.message + '', true) + ''; } throw e; @@ -2494,59 +2562,113 @@ function marked(src, opt, callback) { marked.options = marked.setOptions = function(opt) { - merge(marked.defaults, opt); + merge$2(marked.defaults, opt); changeDefaults(marked.defaults); return marked; }; marked.getDefaults = getDefaults; -marked.defaults = defaults; +marked.defaults = defaults$5; /** * Use Extension */ marked.use = function(extension) { - const opts = merge({}, extension); - if (extension.renderer) { - const renderer = marked.defaults.renderer || new Renderer(); - for (const prop in extension.renderer) { - const prevRenderer = renderer[prop]; - renderer[prop] = (...args) => { - let ret = extension.renderer[prop].apply(renderer, args); - if (ret === false) { - ret = prevRenderer.apply(renderer, args); - } - return ret; - }; - } - opts.renderer = renderer; + if (!Array.isArray(extension)) { // Wrap in array if not already to unify processing + extension = [extension]; } - if (extension.tokenizer) { - const tokenizer = marked.defaults.tokenizer || new Tokenizer(); - for (const prop in extension.tokenizer) { - const prevTokenizer = tokenizer[prop]; - tokenizer[prop] = (...args) => { - let ret = extension.tokenizer[prop].apply(tokenizer, args); - if (ret === false) { - ret = prevTokenizer.apply(tokenizer, args); + + const opts = merge$2({}, ...extension); + const extensions = marked.defaults.extensions || { renderers: {}, walkableTokens: {} }; + let hasExtensions; + + extension.forEach((pack) => { + // ==-- Parse "addon" extensions --== // + if (pack.extensions) { + hasExtensions = true; + pack.extensions.forEach((ext) => { + if (ext.renderer && ext.name) { // Renderers must have 'name' property + extensions.renderers[ext.name] = ext.renderer; } - return ret; - }; + if (ext.walkableTokens && ext.name) { // walkableTokens must have 'name' + extensions.walkableTokens[ext.name] = ext.walkableTokens; + } + if (ext.tokenizer && ext.level) { // Tokenizers must have 'level' property + if (extensions[ext.level]) { + extensions[ext.level].push(ext.tokenizer); + } else { + extensions[ext.level] = [ext.tokenizer]; + } + if (ext.start) { // Function to check for start of token + if (ext.level === 'block') { + if (extensions.startBlock) { + extensions.startBlock.push(ext.start); + } else { + extensions.startBlock = [ext.start]; + } + } else if (ext.level === 'inline') { + if (extensions.startInline) { + extensions.startInline.push(ext.start); + } else { + extensions.startInline = [ext.start]; + } + } + } + } + }); } - opts.tokenizer = tokenizer; - } - if (extension.walkTokens) { - const walkTokens = marked.defaults.walkTokens; - opts.walkTokens = (token) => { - extension.walkTokens(token); - if (walkTokens) { - walkTokens(token); + + // ==-- Parse "overwrite" extensions --== // + if (pack.renderer) { + const renderer = marked.defaults.renderer || new Renderer_1(); + for (const prop in pack.renderer) { + const prevRenderer = renderer[prop]; + // Replace renderer with func to run extension, but fall back if fail + renderer[prop] = (...args) => { + let ret = pack.renderer[prop].apply(renderer, args); + if (ret === false) { + ret = prevRenderer.apply(renderer, args); + } + return ret; + }; } - }; - } - marked.setOptions(opts); + opts.renderer = renderer; + } + if (pack.tokenizer) { + const tokenizer = marked.defaults.tokenizer || new Tokenizer_1(); + for (const prop in pack.tokenizer) { + const prevTokenizer = tokenizer[prop]; + // Replace tokenizer with func to run extension, but fall back if fail + tokenizer[prop] = (...args) => { + let ret = pack.tokenizer[prop].apply(tokenizer, args); + if (ret === false) { + ret = prevTokenizer.apply(tokenizer, args); + } + return ret; + }; + } + opts.tokenizer = tokenizer; + } + + // ==-- Parse WalkTokens extensions --== // + if (pack.walkTokens) { + const walkTokens = marked.defaults.walkTokens; + opts.walkTokens = (token) => { + pack.walkTokens(token); + if (walkTokens) { + walkTokens(token); + } + }; + } + + if (hasExtensions) { + opts.extensions = extensions; + } + + marked.setOptions(opts); + }); }; /** @@ -2573,7 +2695,12 @@ marked.walkTokens = function(tokens, callback) { break; } default: { - if (token.tokens) { + if (marked.defaults?.extensions?.walkableTokens?.[token.type]) { // Walk any extensions + marked.defaults?.extensions.walkableTokens[token.type].forEach(function(walkableTokens) { + marked.walkTokens(token[walkableTokens], callback); + }); + } + if (token.tokens && !marked.defaults?.extensions?.walkableTokens[token.type]?.tokens) { marked.walkTokens(token.tokens, callback); } } @@ -2594,8 +2721,8 @@ marked.parseInline = function(src, opt) { + Object.prototype.toString.call(src) + ', string expected'); } - opt = merge({}, marked.defaults, opt || {}); - checkSanitizeDeprecation(opt); + opt = merge$2({}, marked.defaults, opt || {}); + checkSanitizeDeprecation$1(opt); try { const tokens = Lexer.lexInline(src, opt); @@ -2607,7 +2734,7 @@ marked.parseInline = function(src, opt) { e.message += '\nPlease report this to https://github.com/markedjs/marked.'; if (opt.silent) { return '
An error occurred:
' - + escape(e.message + '', true) + + escape$3(e.message + '', true) + ''; } throw e; diff --git a/src/Lexer.js b/src/Lexer.js index 6c02ed65e5..03a06960f6 100644 --- a/src/Lexer.js +++ b/src/Lexer.js @@ -123,9 +123,21 @@ module.exports = class Lexer { if (this.options.pedantic) { src = src.replace(/^ +$/gm, ''); } - let token, i, l, lastToken; + let token, i, l, lastToken, cutSrc, lastParagraphClipped; while (src) { + if (this.options?.extensions?.block + && this.options.extensions.block.some((extTokenizer) => { + if (token = extTokenizer.call(this, src, tokens)) { + src = src.substring(token.raw.length); + tokens.push(token); + return true; + } + return false; + })) { + continue; + } + // newline if (token = this.tokenizer.space(src)) { src = src.substring(token.raw.length); @@ -230,9 +242,30 @@ module.exports = class Lexer { } // top-level paragraph - if (top && (token = this.tokenizer.paragraph(src))) { + // prevent paragraph consuming extensions by clipping 'src' to extension start + cutSrc = src; + if (this.options.extensions?.startBlock) { + let startIndex = Infinity; + const tempSrc = src.slice(1); + let tempStart; + this.options.extensions.startBlock.forEach(function(getStartIndex) { + tempStart = getStartIndex.call(this, tempSrc); + if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); } + }); + if (startIndex < Infinity && startIndex >= 0) { + cutSrc = src.substring(0, startIndex + 1); + } + } + if (top && (token = this.tokenizer.paragraph(cutSrc))) { + lastToken = tokens[tokens.length - 1]; + if (lastParagraphClipped && lastToken.type === 'paragraph') { + lastToken.raw += '\n' + token.raw; + lastToken.text += '\n' + token.text; + } else { + tokens.push(token); + } + lastParagraphClipped = (cutSrc.length !== src.length); src = src.substring(token.raw.length); - tokens.push(token); continue; } @@ -332,7 +365,7 @@ module.exports = class Lexer { * Lexing/Compiling */ inlineTokens(src, tokens = [], inLink = false, inRawBlock = false) { - let token, lastToken; + let token, lastToken, cutSrc; // String with links masked to avoid interference with em and strong let maskedSrc = src; @@ -366,6 +399,19 @@ module.exports = class Lexer { } keepPrevChar = false; + // extensions + if (this.options?.extensions?.inline + && this.options.extensions.inline.some((extTokenizer) => { + if (token = extTokenizer.call(this, src, tokens)) { + src = src.substring(token.raw.length); + tokens.push(token); + return true; + } + return false; + })) { + continue; + } + // escape if (token = this.tokenizer.escape(src)) { src = src.substring(token.raw.length); @@ -378,7 +424,7 @@ module.exports = class Lexer { src = src.substring(token.raw.length); inLink = token.inLink; inRawBlock = token.inRawBlock; - const lastToken = tokens[tokens.length - 1]; + lastToken = tokens[tokens.length - 1]; if (lastToken && token.type === 'text' && lastToken.type === 'text') { lastToken.raw += token.raw; lastToken.text += token.text; @@ -401,7 +447,7 @@ module.exports = class Lexer { // reflink, nolink if (token = this.tokenizer.reflink(src, this.tokens.links)) { src = src.substring(token.raw.length); - const lastToken = tokens[tokens.length - 1]; + lastToken = tokens[tokens.length - 1]; if (token.type === 'link') { token.tokens = this.inlineTokens(token.text, [], true, inRawBlock); tokens.push(token); @@ -459,7 +505,21 @@ module.exports = class Lexer { } // text - if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) { + // prevent inlineText consuming extensions by clipping 'src' to extension start + cutSrc = src; + if (this.options.extensions?.startInline) { + let startIndex = Infinity; + const tempSrc = src.slice(1); + let tempStart; + this.options.extensions.startInline.forEach(function(getStartIndex) { + tempStart = getStartIndex.call(this, tempSrc); + if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); } + }); + if (startIndex < Infinity && startIndex >= 0) { + cutSrc = src.substring(0, startIndex + 1); + } + } + if (token = this.tokenizer.inlineText(cutSrc, inRawBlock, smartypants)) { src = src.substring(token.raw.length); if (token.raw.slice(-1) !== '_') { // Track prevChar before string of ____ started prevChar = token.raw.slice(-1); diff --git a/src/Parser.js b/src/Parser.js index 81fcb7da1e..f766269e0b 100644 --- a/src/Parser.js +++ b/src/Parser.js @@ -57,11 +57,22 @@ module.exports = class Parser { item, checked, task, - checkbox; + checkbox, + ret; const l = tokens.length; for (i = 0; i < l; i++) { token = tokens[i]; + + // Run any renderer extensions + if (this.options.extensions?.renderers?.[token.type]) { + ret = this.options.extensions.renderers[token.type].call(this, token); + if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(token.type)) { + out += ret || ''; + continue; + } + } + switch (token.type) { case 'space': { continue; @@ -179,6 +190,7 @@ module.exports = class Parser { out += top ? this.renderer.paragraph(body) : body; continue; } + default: { const errMsg = 'Token with "' + token.type + '" type was not found.'; if (this.options.silent) { @@ -201,11 +213,22 @@ module.exports = class Parser { renderer = renderer || this.renderer; let out = '', i, - token; + token, + ret; const l = tokens.length; for (i = 0; i < l; i++) { token = tokens[i]; + + // Run any renderer extensions + if (this.options.extensions?.renderers?.[token.type]) { + ret = this.options.extensions.renderers[token.type].call(this, token); + if (ret !== false || !['escape', 'html', 'link', 'image', 'strong', 'em', 'codespan', 'br', 'del', 'text'].includes(token.type)) { + out += ret || ''; + continue; + } + } + switch (token.type) { case 'escape': { out += renderer.text(token.text); diff --git a/src/defaults.js b/src/defaults.js index fe376563da..a4b451fe2f 100644 --- a/src/defaults.js +++ b/src/defaults.js @@ -2,6 +2,7 @@ function getDefaults() { return { baseUrl: null, breaks: false, + extensions: null, gfm: true, headerIds: true, headerPrefix: '', diff --git a/src/marked.js b/src/marked.js index 0ba08156af..540eb20852 100644 --- a/src/marked.js +++ b/src/marked.js @@ -141,46 +141,114 @@ marked.defaults = defaults; * Use Extension */ -marked.use = function(extension) { - const opts = merge({}, extension); - if (extension.renderer) { - const renderer = marked.defaults.renderer || new Renderer(); - for (const prop in extension.renderer) { - const prevRenderer = renderer[prop]; - renderer[prop] = (...args) => { - let ret = extension.renderer[prop].apply(renderer, args); - if (ret === false) { - ret = prevRenderer.apply(renderer, args); +marked.use = function(...args) { + const opts = merge({}, ...args); + const extensions = marked.defaults.extensions || { renderers: {}, childTokens: {} }; + let hasExtensions; + + args.forEach((pack) => { + // ==-- Parse "addon" extensions --== // + if (pack.extensions) { + hasExtensions = true; + pack.extensions.forEach((ext) => { + if (!ext.name) { + throw new Error('extension name required'); } - return ret; - }; + if (ext.renderer) { // Renderer extensions + const prevRenderer = extensions.renderers?.[ext.name]; + if (prevRenderer) { + // Replace extension with func to run new extension but fall back if false + extensions.renderers[ext.name] = function(...args) { + let ret = ext.renderer.apply(this, args); + if (ret === false) { + ret = prevRenderer.apply(this, args); + } + return ret; + }; + } else { + extensions.renderers[ext.name] = ext.renderer; + } + } + if (ext.tokenizer) { // Tokenizer Extensions + if (!ext.level || (ext.level !== 'block' && ext.level !== 'inline')) { + throw new Error("extension level must be 'block' or 'inline'"); + } + if (extensions[ext.level]) { + extensions[ext.level].unshift(ext.tokenizer); + } else { + extensions[ext.level] = [ext.tokenizer]; + } + if (ext.start) { // Function to check for start of token + if (ext.level === 'block') { + if (extensions.startBlock) { + extensions.startBlock.push(ext.start); + } else { + extensions.startBlock = [ext.start]; + } + } else if (ext.level === 'inline') { + if (extensions.startInline) { + extensions.startInline.push(ext.start); + } else { + extensions.startInline = [ext.start]; + } + } + } + } + if (ext.childTokens) { // Child tokens to be visited by walkTokens + extensions.childTokens[ext.name] = ext.childTokens; + } + }); } - opts.renderer = renderer; - } - if (extension.tokenizer) { - const tokenizer = marked.defaults.tokenizer || new Tokenizer(); - for (const prop in extension.tokenizer) { - const prevTokenizer = tokenizer[prop]; - tokenizer[prop] = (...args) => { - let ret = extension.tokenizer[prop].apply(tokenizer, args); - if (ret === false) { - ret = prevTokenizer.apply(tokenizer, args); + + // ==-- Parse "overwrite" extensions --== // + if (pack.renderer) { + const renderer = marked.defaults.renderer || new Renderer(); + for (const prop in pack.renderer) { + const prevRenderer = renderer[prop]; + // Replace renderer with func to run extension, but fall back if false + renderer[prop] = (...args) => { + let ret = pack.renderer[prop].apply(renderer, args); + if (ret === false) { + ret = prevRenderer.apply(renderer, args); + } + return ret; + }; + } + opts.renderer = renderer; + } + if (pack.tokenizer) { + const tokenizer = marked.defaults.tokenizer || new Tokenizer(); + for (const prop in pack.tokenizer) { + const prevTokenizer = tokenizer[prop]; + // Replace tokenizer with func to run extension, but fall back if false + tokenizer[prop] = (...args) => { + let ret = pack.tokenizer[prop].apply(tokenizer, args); + if (ret === false) { + ret = prevTokenizer.apply(tokenizer, args); + } + return ret; + }; + } + opts.tokenizer = tokenizer; + } + + // ==-- Parse WalkTokens extensions --== // + if (pack.walkTokens) { + const walkTokens = marked.defaults.walkTokens; + opts.walkTokens = (token) => { + pack.walkTokens.call(this, token); + if (walkTokens) { + walkTokens(token); } - return ret; }; } - opts.tokenizer = tokenizer; - } - if (extension.walkTokens) { - const walkTokens = marked.defaults.walkTokens; - opts.walkTokens = (token) => { - extension.walkTokens(token); - if (walkTokens) { - walkTokens(token); - } - }; - } - marked.setOptions(opts); + + if (hasExtensions) { + opts.extensions = extensions; + } + + marked.setOptions(opts); + }); }; /** @@ -207,7 +275,12 @@ marked.walkTokens = function(tokens, callback) { break; } default: { - if (token.tokens) { + if (marked.defaults?.extensions?.childTokens?.[token.type]) { // Walk any extensions + marked.defaults?.extensions.childTokens[token.type].forEach(function(childTokens) { + marked.walkTokens(token[childTokens], callback); + }); + } + if (token.tokens && !marked.defaults?.extensions?.childTokens[token.type]) { marked.walkTokens(token.tokens, callback); } } diff --git a/test/unit/marked-spec.js b/test/unit/marked-spec.js index 86813cc29e..0845909222 100644 --- a/test/unit/marked-spec.js +++ b/test/unit/marked-spec.js @@ -137,6 +137,515 @@ describe('parseInline', () => { }); describe('use extension', () => { + it('should use custom block tokenizer + renderer extensions', () => { + const underline = { + name: 'underline', + level: 'block', + tokenizer(src) { + const rule = /^:([^\n]*)(?:\n|$)/; + const match = rule.exec(src); + if (match) { + return { + type: 'underline', + raw: match[0], // This is the text that you want your token to consume from the source + text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer + }; + } + }, + renderer(token) { + return `${token.text}\n`; + } + }; + marked.use({ extensions: [underline] }); + let html = marked('Not Underlined\n:Underlined\nNot Underlined'); + expect(html).toBe('
Not Underlined\n:Underlined\nNot Underlined
\n'); + + html = marked('Not Underlined\n\n:Underlined\n\nNot Underlined'); + expect(html).toBe('Not Underlined
\nUnderlined\nNot Underlined
\n'); + }); + + it('should interrupt paragraphs if using "start" property', () => { + const underline = { + extensions: [{ + name: 'underline', + level: 'block', + start(src) { return src.match(/:/)?.index; }, + tokenizer(src) { + const rule = /^:([^\n]*):(?:\n|$)/; + const match = rule.exec(src); + if (match) { + return { + type: 'underline', + raw: match[0], // This is the text that you want your token to consume from the source + text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer + }; + } + }, + renderer(token) { + return `${token.text}\n`; + } + }] + }; + marked.use(underline); + const html = marked('Not Underlined A\n:Underlined B:\nNot Underlined C\n:Not Underlined D'); + expect(html).toBe('Not Underlined A
\nUnderlined B\nNot Underlined C\n:Not Underlined D
\n'); + }); + + it('should use custom inline tokenizer + renderer extensions', () => { + const underline = { + name: 'underline', + level: 'inline', + start(src) { return src.match(/=/)?.index; }, + tokenizer(src) { + const rule = /^=([^=]+)=/; + const match = rule.exec(src); + if (match) { + return { + type: 'underline', + raw: match[0], // This is the text that you want your token to consume from the source + text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer + }; + } + }, + renderer(token) { + return `${token.text}`; + } + }; + marked.use({ extensions: [underline] }); + const html = marked('Not Underlined =Underlined= Not Underlined'); + expect(html).toBe('Not Underlined Underlined Not Underlined
\n'); + }); + + it('should handle interacting block and inline extensions', () => { + const descriptionlist = { + name: 'descriptionList', + level: 'block', + start(src) { return src.match(/:[^:\n]/)?.index; }, + tokenizer(src, tokens) { + const rule = /^(?::[^:\n]+:[^:\n]*(?:\n|$))+/; + const match = rule.exec(src); + if (match) { + return { + type: 'descriptionList', + raw: match[0], // This is the text that you want your token to consume from the source + text: match[0].trim(), // You can add additional properties to your tokens to pass along to the renderer + tokens: this.inlineTokens(match[0].trim()) + }; + } + }, + renderer(token) { + return `A Description List with One Description:
\n' + + 'test
\n<div></div>
\n'); + }); + + it('should handle renderers that return false', () => { + const extension = { + name: 'test', + level: 'block', + tokenizer(src) { + const rule = /^:([^\n]*):(?:\n|$)/; + const match = rule.exec(src); + if (match) { + return { + type: 'test', + raw: match[0], // This is the text that you want your token to consume from the source + text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer + }; + } + }, + renderer(token) { + if (token.text === 'test') { + return 'test'; + } + return false; + } + }; + const fallbackRenderer = { + name: 'test', + level: 'block', + renderer(token) { + if (token.text === 'Test') { + return 'fallback'; + } + return false; + } + }; + marked.use({ extensions: [fallbackRenderer, extension] }); + const html = marked(':Test:\n\n:test:\n\n:none:'); + expect(html).toBe('fallbacktest'); + }); + + it('should fall back when tokenizers return false', () => { + const extension = { + name: 'test', + level: 'block', + tokenizer(src) { + const rule = /^:([^\n]*):(?:\n|$)/; + const match = rule.exec(src); + if (match) { + return { + type: 'test', + raw: match[0], // This is the text that you want your token to consume from the source + text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer + }; + } + return false; + }, + renderer(token) { + return token.text; + } + }; + const extension2 = { + name: 'test', + level: 'block', + tokenizer(src) { + const rule = /^:([^\n]*):(?:\n|$)/; + const match = rule.exec(src); + if (match) { + if (match[1].match(/^[A-Z]/)) { + return { + type: 'test', + raw: match[0], + text: match[1].trim().toUpperCase() + }; + } + } + return false; + } + }; + marked.use({ extensions: [extension, extension2] }); + const html = marked(':Test:\n\n:test:'); + expect(html).toBe('TESTtest'); + }); + + it('should override original tokenizer/renderer with same name, but fall back if returns false', () => { + const extension = { + extensions: [{ + name: 'heading', + level: 'block', + tokenizer(src) { + return false; // fall back to default `heading` tokenizer + }, + renderer(token) { + return 'extension2 TOKENIZER EXTENSION\n
\n');
+ });
+
+ it('should walk only specified child tokens', () => {
+ const walkableDescription = {
+ extensions: [{
+ name: 'walkableDescription',
+ level: 'inline',
+ start(src) { return src.match(/:/)?.index; },
+ tokenizer(src, tokens) {
+ const rule = /^:([^:\n]+):([^:\n]*)(?:\n|$)/;
+ const match = rule.exec(src);
+ if (match) {
+ return {
+ type: 'walkableDescription',
+ raw: match[0], // This is the text that you want your token to consume from the source
+ dt: this.inlineTokens(match[1].trim()), // You can add additional properties to your tokens to pass along to the renderer
+ dd: this.inlineTokens(match[2].trim()),
+ tokens: this.inlineTokens('unwalked')
+ };
+ }
+ },
+ renderer(token) {
+ return `\n\n
used extension1 walked +used extension2 walked
+This is a paragraph with blue text.
\n' + + '