2023-10-03 11:14:36 +08:00
/ * *
* marked v4 . 3.0 - a markdown parser
* Copyright ( c ) 2011 - 2023 , Christopher Jeffrey . ( MIT Licensed )
* https : //github.com/markedjs/marked
* /
/ * *
* DO NOT EDIT THIS FILE
* The code in this file is generated from files in . / src /
* /
'use strict' ;
function _defineProperties ( target , props ) {
for ( var i = 0 ; i < props . length ; i ++ ) {
var descriptor = props [ i ] ;
descriptor . enumerable = descriptor . enumerable || false ;
descriptor . configurable = true ;
if ( "value" in descriptor ) descriptor . writable = true ;
Object . defineProperty ( target , _toPropertyKey ( descriptor . key ) , descriptor ) ;
}
}
function _createClass ( Constructor , protoProps , staticProps ) {
if ( protoProps ) _defineProperties ( Constructor . prototype , protoProps ) ;
if ( staticProps ) _defineProperties ( Constructor , staticProps ) ;
Object . defineProperty ( Constructor , "prototype" , {
writable : false
} ) ;
return Constructor ;
}
function _extends ( ) {
_extends = Object . assign ? Object . assign . bind ( ) : function ( target ) {
for ( var i = 1 ; i < arguments . length ; i ++ ) {
var source = arguments [ i ] ;
for ( var key in source ) {
if ( Object . prototype . hasOwnProperty . call ( source , key ) ) {
target [ key ] = source [ key ] ;
}
}
}
return target ;
} ;
return _extends . apply ( this , arguments ) ;
}
function _unsupportedIterableToArray ( o , minLen ) {
if ( ! o ) return ;
if ( typeof o === "string" ) return _arrayLikeToArray ( o , minLen ) ;
var n = Object . prototype . toString . call ( o ) . slice ( 8 , - 1 ) ;
if ( n === "Object" && o . constructor ) n = o . constructor . name ;
if ( n === "Map" || n === "Set" ) return Array . from ( o ) ;
if ( n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/ . test ( n ) ) return _arrayLikeToArray ( o , minLen ) ;
}
function _arrayLikeToArray ( arr , len ) {
if ( len == null || len > arr . length ) len = arr . length ;
for ( var i = 0 , arr2 = new Array ( len ) ; i < len ; i ++ ) arr2 [ i ] = arr [ i ] ;
return arr2 ;
}
function _createForOfIteratorHelperLoose ( o , allowArrayLike ) {
var it = typeof Symbol !== "undefined" && o [ Symbol . iterator ] || o [ "@@iterator" ] ;
if ( it ) return ( it = it . call ( o ) ) . next . bind ( it ) ;
if ( Array . isArray ( o ) || ( it = _unsupportedIterableToArray ( o ) ) || allowArrayLike && o && typeof o . length === "number" ) {
if ( it ) o = it ;
var i = 0 ;
return function ( ) {
if ( i >= o . length ) return {
done : true
} ;
return {
done : false ,
value : o [ i ++ ]
} ;
} ;
}
throw new TypeError ( "Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method." ) ;
}
function _toPrimitive ( input , hint ) {
if ( typeof input !== "object" || input === null ) return input ;
var prim = input [ Symbol . toPrimitive ] ;
if ( prim !== undefined ) {
var res = prim . call ( input , hint || "default" ) ;
if ( typeof res !== "object" ) return res ;
throw new TypeError ( "@@toPrimitive must return a primitive value." ) ;
}
return ( hint === "string" ? String : Number ) ( input ) ;
}
function _toPropertyKey ( arg ) {
var key = _toPrimitive ( arg , "string" ) ;
return typeof key === "symbol" ? key : String ( key ) ;
}
function getDefaults ( ) {
return {
async : false ,
baseUrl : null ,
breaks : false ,
extensions : null ,
gfm : true ,
headerIds : true ,
headerPrefix : '' ,
highlight : null ,
hooks : null ,
langPrefix : 'language-' ,
mangle : true ,
pedantic : false ,
renderer : null ,
sanitize : false ,
sanitizer : null ,
silent : false ,
smartypants : false ,
tokenizer : null ,
walkTokens : null ,
xhtml : false
} ;
}
exports . defaults = getDefaults ( ) ;
function changeDefaults ( newDefaults ) {
exports . defaults = newDefaults ;
}
/ * *
* Helpers
* /
var escapeTest = /[&<>"']/ ;
var escapeReplace = new RegExp ( escapeTest . source , 'g' ) ;
var escapeTestNoEncode = /[<>"']|&(?!(#\d{1,7}|#[Xx][a-fA-F0-9]{1,6}|\w+);)/ ;
var escapeReplaceNoEncode = new RegExp ( escapeTestNoEncode . source , 'g' ) ;
var escapeReplacements = {
'&' : '&' ,
'<' : '<' ,
'>' : '>' ,
'"' : '"' ,
"'" : '''
} ;
var getEscapeReplacement = function getEscapeReplacement ( ch ) {
return escapeReplacements [ ch ] ;
} ;
function escape ( html , encode ) {
if ( encode ) {
if ( escapeTest . test ( html ) ) {
return html . replace ( escapeReplace , getEscapeReplacement ) ;
}
} else {
if ( escapeTestNoEncode . test ( html ) ) {
return html . replace ( escapeReplaceNoEncode , getEscapeReplacement ) ;
}
}
return html ;
}
var unescapeTest = /&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/ig ;
/ * *
* @ param { string } html
* /
function unescape ( html ) {
// explicitly match decimal, hex, and named HTML entities
return html . replace ( unescapeTest , function ( _ , n ) {
n = n . toLowerCase ( ) ;
if ( n === 'colon' ) return ':' ;
if ( n . charAt ( 0 ) === '#' ) {
return n . charAt ( 1 ) === 'x' ? String . fromCharCode ( parseInt ( n . substring ( 2 ) , 16 ) ) : String . fromCharCode ( + n . substring ( 1 ) ) ;
}
return '' ;
} ) ;
}
var caret = /(^|[^\[])\^/g ;
/ * *
* @ param { string | RegExp } regex
* @ param { string } opt
* /
function edit ( regex , opt ) {
regex = typeof regex === 'string' ? regex : regex . source ;
opt = opt || '' ;
var obj = {
replace : function replace ( name , val ) {
val = val . source || val ;
val = val . replace ( caret , '$1' ) ;
regex = regex . replace ( name , val ) ;
return obj ;
} ,
getRegex : function getRegex ( ) {
return new RegExp ( regex , opt ) ;
}
} ;
return obj ;
}
var nonWordAndColonTest = /[^\w:]/g ;
var originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i ;
/ * *
* @ param { boolean } sanitize
* @ param { string } base
* @ param { string } href
* /
function cleanUrl ( sanitize , base , href ) {
if ( sanitize ) {
var prot ;
try {
prot = decodeURIComponent ( unescape ( href ) ) . replace ( nonWordAndColonTest , '' ) . toLowerCase ( ) ;
} catch ( e ) {
return null ;
}
if ( prot . indexOf ( 'javascript:' ) === 0 || prot . indexOf ( 'vbscript:' ) === 0 || prot . indexOf ( 'data:' ) === 0 ) {
return null ;
}
}
if ( base && ! originIndependentUrl . test ( href ) ) {
href = resolveUrl ( base , href ) ;
}
try {
href = encodeURI ( href ) . replace ( /%25/g , '%' ) ;
} catch ( e ) {
return null ;
}
return href ;
}
var baseUrls = { } ;
var justDomain = /^[^:]+:\/*[^/]*$/ ;
var protocol = /^([^:]+:)[\s\S]*$/ ;
var domain = /^([^:]+:\/*[^/]*)[\s\S]*$/ ;
/ * *
* @ param { string } base
* @ param { string } href
* /
function resolveUrl ( base , href ) {
if ( ! baseUrls [ ' ' + base ] ) {
// we can ignore everything in base after the last slash of its path component,
// but we might need to add _that_
// https://tools.ietf.org/html/rfc3986#section-3
if ( justDomain . test ( base ) ) {
baseUrls [ ' ' + base ] = base + '/' ;
} else {
baseUrls [ ' ' + base ] = rtrim ( base , '/' , true ) ;
}
}
base = baseUrls [ ' ' + base ] ;
var relativeBase = base . indexOf ( ':' ) === - 1 ;
if ( href . substring ( 0 , 2 ) === '//' ) {
if ( relativeBase ) {
return href ;
}
return base . replace ( protocol , '$1' ) + href ;
} else if ( href . charAt ( 0 ) === '/' ) {
if ( relativeBase ) {
return href ;
}
return base . replace ( domain , '$1' ) + href ;
} else {
return base + href ;
}
}
var noopTest = {
exec : function noopTest ( ) { }
} ;
function splitCells ( tableRow , count ) {
// ensure that every cell-delimiting pipe has a space
// before it to distinguish it from an escaped pipe
var row = tableRow . replace ( /\|/g , function ( match , offset , str ) {
var escaped = false ,
curr = offset ;
while ( -- curr >= 0 && str [ curr ] === '\\' ) {
escaped = ! escaped ;
}
if ( escaped ) {
// odd number of slashes means | is escaped
// so we leave it alone
return '|' ;
} else {
// add space before unescaped |
return ' |' ;
}
} ) ,
cells = row . split ( / \|/ ) ;
var i = 0 ;
// First/last cell in a row cannot be empty if it has no leading/trailing pipe
if ( ! cells [ 0 ] . trim ( ) ) {
cells . shift ( ) ;
}
if ( cells . length > 0 && ! cells [ cells . length - 1 ] . trim ( ) ) {
cells . pop ( ) ;
}
if ( cells . length > count ) {
cells . splice ( count ) ;
} else {
while ( cells . length < count ) {
cells . push ( '' ) ;
}
}
for ( ; i < cells . length ; i ++ ) {
// leading or trailing whitespace is ignored per the gfm spec
cells [ i ] = cells [ i ] . trim ( ) . replace ( /\\\|/g , '|' ) ;
}
return cells ;
}
/ * *
* Remove trailing 'c' s . Equivalent to str . replace ( /c*$/ , '' ) .
* /c*$/ is vulnerable to REDOS .
*
* @ param { string } str
* @ param { string } c
* @ param { boolean } invert Remove suffix of non - c chars instead . Default falsey .
* /
function rtrim ( str , c , invert ) {
var l = str . length ;
if ( l === 0 ) {
return '' ;
}
// Length of suffix matching the invert condition.
var suffLen = 0 ;
// Step left until we fail to match the invert condition.
while ( suffLen < l ) {
var currChar = str . charAt ( l - suffLen - 1 ) ;
if ( currChar === c && ! invert ) {
suffLen ++ ;
} else if ( currChar !== c && invert ) {
suffLen ++ ;
} else {
break ;
}
}
return str . slice ( 0 , l - suffLen ) ;
}
function findClosingBracket ( str , b ) {
if ( str . indexOf ( b [ 1 ] ) === - 1 ) {
return - 1 ;
}
var l = str . length ;
var level = 0 ,
i = 0 ;
for ( ; i < l ; i ++ ) {
if ( str [ i ] === '\\' ) {
i ++ ;
} else if ( str [ i ] === b [ 0 ] ) {
level ++ ;
} else if ( str [ i ] === b [ 1 ] ) {
level -- ;
if ( level < 0 ) {
return i ;
}
}
}
return - 1 ;
}
function checkSanitizeDeprecation ( opt ) {
if ( opt && opt . sanitize && ! opt . silent ) {
console . warn ( 'marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options' ) ;
}
}
// copied from https://stackoverflow.com/a/5450113/806777
/ * *
* @ param { string } pattern
* @ param { number } count
* /
function repeatString ( pattern , count ) {
if ( count < 1 ) {
return '' ;
}
var result = '' ;
while ( count > 1 ) {
if ( count & 1 ) {
result += pattern ;
}
count >>= 1 ;
pattern += pattern ;
}
return result + pattern ;
}
function outputLink ( cap , link , raw , lexer ) {
var href = link . href ;
var title = link . title ? escape ( link . title ) : null ;
var text = cap [ 1 ] . replace ( /\\([\[\]])/g , '$1' ) ;
if ( cap [ 0 ] . charAt ( 0 ) !== '!' ) {
lexer . state . inLink = true ;
var token = {
type : 'link' ,
raw : raw ,
href : href ,
title : title ,
text : text ,
tokens : lexer . inlineTokens ( text )
} ;
lexer . state . inLink = false ;
return token ;
}
return {
type : 'image' ,
raw : raw ,
href : href ,
title : title ,
text : escape ( text )
} ;
}
function indentCodeCompensation ( raw , text ) {
var matchIndentToCode = raw . match ( /^(\s+)(?:```)/ ) ;
if ( matchIndentToCode === null ) {
return text ;
}
var indentToCode = matchIndentToCode [ 1 ] ;
return text . split ( '\n' ) . map ( function ( node ) {
var matchIndentInNode = node . match ( /^\s+/ ) ;
if ( matchIndentInNode === null ) {
return node ;
}
var indentInNode = matchIndentInNode [ 0 ] ;
if ( indentInNode . length >= indentToCode . length ) {
return node . slice ( indentToCode . length ) ;
}
return node ;
} ) . join ( '\n' ) ;
}
/ * *
* Tokenizer
* /
var Tokenizer = /*#__PURE__*/ function ( ) {
function Tokenizer ( options ) {
this . options = options || exports . defaults ;
}
var _proto = Tokenizer . prototype ;
_proto . space = function space ( src ) {
var cap = this . rules . block . newline . exec ( src ) ;
if ( cap && cap [ 0 ] . length > 0 ) {
return {
type : 'space' ,
raw : cap [ 0 ]
} ;
}
} ;
_proto . code = function code ( src ) {
var cap = this . rules . block . code . exec ( src ) ;
if ( cap ) {
var text = cap [ 0 ] . replace ( /^ {1,4}/gm , '' ) ;
return {
type : 'code' ,
raw : cap [ 0 ] ,
codeBlockStyle : 'indented' ,
text : ! this . options . pedantic ? rtrim ( text , '\n' ) : text
} ;
}
} ;
_proto . fences = function fences ( src ) {
var cap = this . rules . block . fences . exec ( src ) ;
if ( cap ) {
var raw = cap [ 0 ] ;
var text = indentCodeCompensation ( raw , cap [ 3 ] || '' ) ;
return {
type : 'code' ,
raw : raw ,
lang : cap [ 2 ] ? cap [ 2 ] . trim ( ) . replace ( this . rules . inline . _escapes , '$1' ) : cap [ 2 ] ,
text : text
} ;
}
} ;
_proto . heading = function heading ( src ) {
var cap = this . rules . block . heading . exec ( src ) ;
if ( cap ) {
var text = cap [ 2 ] . trim ( ) ;
// remove trailing #s
if ( /#$/ . test ( text ) ) {
var trimmed = rtrim ( text , '#' ) ;
if ( this . options . pedantic ) {
text = trimmed . trim ( ) ;
} else if ( ! trimmed || / $/ . test ( trimmed ) ) {
// CommonMark requires space before trailing #s
text = trimmed . trim ( ) ;
}
}
return {
type : 'heading' ,
raw : cap [ 0 ] ,
depth : cap [ 1 ] . length ,
text : text ,
tokens : this . lexer . inline ( text )
} ;
}
} ;
_proto . hr = function hr ( src ) {
var cap = this . rules . block . hr . exec ( src ) ;
if ( cap ) {
return {
type : 'hr' ,
raw : cap [ 0 ]
} ;
}
} ;
_proto . blockquote = function blockquote ( src ) {
var cap = this . rules . block . blockquote . exec ( src ) ;
if ( cap ) {
var text = cap [ 0 ] . replace ( /^ *>[ \t]?/gm , '' ) ;
var top = this . lexer . state . top ;
this . lexer . state . top = true ;
var tokens = this . lexer . blockTokens ( text ) ;
this . lexer . state . top = top ;
return {
type : 'blockquote' ,
raw : cap [ 0 ] ,
tokens : tokens ,
text : text
} ;
}
} ;
_proto . list = function list ( src ) {
var cap = this . rules . block . list . exec ( src ) ;
if ( cap ) {
var raw , istask , ischecked , indent , i , blankLine , endsWithBlankLine , line , nextLine , rawLine , itemContents , endEarly ;
var bull = cap [ 1 ] . trim ( ) ;
var isordered = bull . length > 1 ;
var list = {
type : 'list' ,
raw : '' ,
ordered : isordered ,
start : isordered ? + bull . slice ( 0 , - 1 ) : '' ,
loose : false ,
items : [ ]
} ;
bull = isordered ? "\\d{1,9}\\" + bull . slice ( - 1 ) : "\\" + bull ;
if ( this . options . pedantic ) {
bull = isordered ? bull : '[*+-]' ;
}
// Get next list item
var itemRegex = new RegExp ( "^( {0,3}" + bull + ")((?:[\t ][^\\n]*)?(?:\\n|$))" ) ;
// Check if current bullet point can start a new List Item
while ( src ) {
endEarly = false ;
if ( ! ( cap = itemRegex . exec ( src ) ) ) {
break ;
}
if ( this . rules . block . hr . test ( src ) ) {
// End list if bullet was actually HR (possibly move into itemRegex?)
break ;
}
raw = cap [ 0 ] ;
src = src . substring ( raw . length ) ;
line = cap [ 2 ] . split ( '\n' , 1 ) [ 0 ] . replace ( /^\t+/ , function ( t ) {
return ' ' . repeat ( 3 * t . length ) ;
} ) ;
nextLine = src . split ( '\n' , 1 ) [ 0 ] ;
if ( this . options . pedantic ) {
indent = 2 ;
itemContents = line . trimLeft ( ) ;
} else {
indent = cap [ 2 ] . search ( /[^ ]/ ) ; // Find first non-space char
indent = indent > 4 ? 1 : indent ; // Treat indented code blocks (> 4 spaces) as having only 1 indent
itemContents = line . slice ( indent ) ;
indent += cap [ 1 ] . length ;
}
blankLine = false ;
if ( ! line && /^ *$/ . test ( nextLine ) ) {
// Items begin with at most one blank line
raw += nextLine + '\n' ;
src = src . substring ( nextLine . length + 1 ) ;
endEarly = true ;
}
if ( ! endEarly ) {
var nextBulletRegex = new RegExp ( "^ {0," + Math . min ( 3 , indent - 1 ) + "}(?:[*+-]|\\d{1,9}[.)])((?:[ \t][^\\n]*)?(?:\\n|$))" ) ;
var hrRegex = new RegExp ( "^ {0," + Math . min ( 3 , indent - 1 ) + "}((?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$)" ) ;
var fencesBeginRegex = new RegExp ( "^ {0," + Math . min ( 3 , indent - 1 ) + "}(?:```|~~~)" ) ;
var headingBeginRegex = new RegExp ( "^ {0," + Math . min ( 3 , indent - 1 ) + "}#" ) ;
// Check if following lines should be included in List Item
while ( src ) {
rawLine = src . split ( '\n' , 1 ) [ 0 ] ;
nextLine = rawLine ;
// Re-align to follow commonmark nesting rules
if ( this . options . pedantic ) {
nextLine = nextLine . replace ( /^ {1,4}(?=( {4})*[^ ])/g , ' ' ) ;
}
// End list item if found code fences
if ( fencesBeginRegex . test ( nextLine ) ) {
break ;
}
// End list item if found start of new heading
if ( headingBeginRegex . test ( nextLine ) ) {
break ;
}
// End list item if found start of new bullet
if ( nextBulletRegex . test ( nextLine ) ) {
break ;
}
// Horizontal rule found
if ( hrRegex . test ( src ) ) {
break ;
}
if ( nextLine . search ( /[^ ]/ ) >= indent || ! nextLine . trim ( ) ) {
// Dedent if possible
itemContents += '\n' + nextLine . slice ( indent ) ;
} else {
// not enough indentation
if ( blankLine ) {
break ;
}
// paragraph continuation unless last line was a different block level element
if ( line . search ( /[^ ]/ ) >= 4 ) {
// indented code block
break ;
}
if ( fencesBeginRegex . test ( line ) ) {
break ;
}
if ( headingBeginRegex . test ( line ) ) {
break ;
}
if ( hrRegex . test ( line ) ) {
break ;
}
itemContents += '\n' + nextLine ;
}
if ( ! blankLine && ! nextLine . trim ( ) ) {
// Check if current line is blank
blankLine = true ;
}
raw += rawLine + '\n' ;
src = src . substring ( rawLine . length + 1 ) ;
line = nextLine . slice ( indent ) ;
}
}
if ( ! list . loose ) {
// If the previous item ended with a blank line, the list is loose
if ( endsWithBlankLine ) {
list . loose = true ;
} else if ( /\n *\n *$/ . test ( raw ) ) {
endsWithBlankLine = true ;
}
}
// Check for task list items
if ( this . options . gfm ) {
istask = /^\[[ xX]\] / . exec ( itemContents ) ;
if ( istask ) {
ischecked = istask [ 0 ] !== '[ ] ' ;
itemContents = itemContents . replace ( /^\[[ xX]\] +/ , '' ) ;
}
}
list . items . push ( {
type : 'list_item' ,
raw : raw ,
task : ! ! istask ,
checked : ischecked ,
loose : false ,
text : itemContents
} ) ;
list . raw += raw ;
}
// Do not consume newlines at end of final item. Alternatively, make itemRegex *start* with any newlines to simplify/speed up endsWithBlankLine logic
list . items [ list . items . length - 1 ] . raw = raw . trimRight ( ) ;
list . items [ list . items . length - 1 ] . text = itemContents . trimRight ( ) ;
list . raw = list . raw . trimRight ( ) ;
var l = list . items . length ;
// Item child tokens handled here at end because we needed to have the final item to trim it first
for ( i = 0 ; i < l ; i ++ ) {
this . lexer . state . top = false ;
list . items [ i ] . tokens = this . lexer . blockTokens ( list . items [ i ] . text , [ ] ) ;
if ( ! list . loose ) {
// Check if list should be loose
var spacers = list . items [ i ] . tokens . filter ( function ( t ) {
return t . type === 'space' ;
} ) ;
var hasMultipleLineBreaks = spacers . length > 0 && spacers . some ( function ( t ) {
return /\n.*\n/ . test ( t . raw ) ;
} ) ;
list . loose = hasMultipleLineBreaks ;
}
}
// Set all items to loose if list is loose
if ( list . loose ) {
for ( i = 0 ; i < l ; i ++ ) {
list . items [ i ] . loose = true ;
}
}
return list ;
}
} ;
_proto . html = function html ( src ) {
var cap = this . rules . block . html . exec ( src ) ;
if ( cap ) {
var token = {
type : 'html' ,
raw : cap [ 0 ] ,
pre : ! this . options . sanitizer && ( cap [ 1 ] === 'pre' || cap [ 1 ] === 'script' || cap [ 1 ] === 'style' ) ,
text : cap [ 0 ]
} ;
if ( this . options . sanitize ) {
var text = this . options . sanitizer ? this . options . sanitizer ( cap [ 0 ] ) : escape ( cap [ 0 ] ) ;
token . type = 'paragraph' ;
token . text = text ;
token . tokens = this . lexer . inline ( text ) ;
}
return token ;
}
} ;
_proto . def = function def ( src ) {
var cap = this . rules . block . def . exec ( src ) ;
if ( cap ) {
var tag = cap [ 1 ] . toLowerCase ( ) . replace ( /\s+/g , ' ' ) ;
var href = cap [ 2 ] ? cap [ 2 ] . replace ( /^<(.*)>$/ , '$1' ) . replace ( this . rules . inline . _escapes , '$1' ) : '' ;
var title = cap [ 3 ] ? cap [ 3 ] . substring ( 1 , cap [ 3 ] . length - 1 ) . replace ( this . rules . inline . _escapes , '$1' ) : cap [ 3 ] ;
return {
type : 'def' ,
tag : tag ,
raw : cap [ 0 ] ,
href : href ,
title : title
} ;
}
} ;
_proto . table = function table ( src ) {
var cap = this . rules . block . table . exec ( src ) ;
if ( cap ) {
var item = {
type : 'table' ,
header : splitCells ( cap [ 1 ] ) . map ( function ( c ) {
return {
text : c
} ;
} ) ,
align : cap [ 2 ] . replace ( /^ *|\| *$/g , '' ) . split ( / *\| */ ) ,
rows : cap [ 3 ] && cap [ 3 ] . trim ( ) ? cap [ 3 ] . replace ( /\n[ \t]*$/ , '' ) . split ( '\n' ) : [ ]
} ;
if ( item . header . length === item . align . length ) {
item . raw = cap [ 0 ] ;
var l = item . align . length ;
var i , j , k , row ;
for ( i = 0 ; i < l ; i ++ ) {
if ( /^ *-+: *$/ . test ( item . align [ i ] ) ) {
item . align [ i ] = 'right' ;
} else if ( /^ *:-+: *$/ . test ( item . align [ i ] ) ) {
item . align [ i ] = 'center' ;
} else if ( /^ *:-+ *$/ . test ( item . align [ i ] ) ) {
item . align [ i ] = 'left' ;
} else {
item . align [ i ] = null ;
}
}
l = item . rows . length ;
for ( i = 0 ; i < l ; i ++ ) {
item . rows [ i ] = splitCells ( item . rows [ i ] , item . header . length ) . map ( function ( c ) {
return {
text : c
} ;
} ) ;
}
// parse child tokens inside headers and cells
// header child tokens
l = item . header . length ;
for ( j = 0 ; j < l ; j ++ ) {
item . header [ j ] . tokens = this . lexer . inline ( item . header [ j ] . text ) ;
}
// cell child tokens
l = item . rows . length ;
for ( j = 0 ; j < l ; j ++ ) {
row = item . rows [ j ] ;
for ( k = 0 ; k < row . length ; k ++ ) {
row [ k ] . tokens = this . lexer . inline ( row [ k ] . text ) ;
}
}
return item ;
}
}
} ;
_proto . lheading = function lheading ( src ) {
var cap = this . rules . block . lheading . exec ( src ) ;
if ( cap ) {
return {
type : 'heading' ,
raw : cap [ 0 ] ,
depth : cap [ 2 ] . charAt ( 0 ) === '=' ? 1 : 2 ,
text : cap [ 1 ] ,
tokens : this . lexer . inline ( cap [ 1 ] )
} ;
}
} ;
_proto . paragraph = function paragraph ( src ) {
var cap = this . rules . block . paragraph . exec ( src ) ;
if ( cap ) {
var text = cap [ 1 ] . charAt ( cap [ 1 ] . length - 1 ) === '\n' ? cap [ 1 ] . slice ( 0 , - 1 ) : cap [ 1 ] ;
return {
type : 'paragraph' ,
raw : cap [ 0 ] ,
text : text ,
tokens : this . lexer . inline ( text )
} ;
}
} ;
_proto . text = function text ( src ) {
var cap = this . rules . block . text . exec ( src ) ;
if ( cap ) {
return {
type : 'text' ,
raw : cap [ 0 ] ,
text : cap [ 0 ] ,
tokens : this . lexer . inline ( cap [ 0 ] )
} ;
}
} ;
_proto . escape = function escape$1 ( src ) {
var cap = this . rules . inline . escape . exec ( src ) ;
if ( cap ) {
return {
type : 'escape' ,
raw : cap [ 0 ] ,
text : escape ( cap [ 1 ] )
} ;
}
} ;
_proto . tag = function tag ( src ) {
var cap = this . rules . inline . tag . exec ( src ) ;
if ( cap ) {
if ( ! this . lexer . state . inLink && /^<a /i . test ( cap [ 0 ] ) ) {
this . lexer . state . inLink = true ;
} else if ( this . lexer . state . inLink && /^<\/a>/i . test ( cap [ 0 ] ) ) {
this . lexer . state . inLink = false ;
}
if ( ! this . lexer . state . inRawBlock && /^<(pre|code|kbd|script)(\s|>)/i . test ( cap [ 0 ] ) ) {
this . lexer . state . inRawBlock = true ;
} else if ( this . lexer . state . inRawBlock && /^<\/(pre|code|kbd|script)(\s|>)/i . test ( cap [ 0 ] ) ) {
this . lexer . state . inRawBlock = false ;
}
return {
type : this . options . sanitize ? 'text' : 'html' ,
raw : cap [ 0 ] ,
inLink : this . lexer . state . inLink ,
inRawBlock : this . lexer . state . inRawBlock ,
text : this . options . sanitize ? this . options . sanitizer ? this . options . sanitizer ( cap [ 0 ] ) : escape ( cap [ 0 ] ) : cap [ 0 ]
} ;
}
} ;
_proto . link = function link ( src ) {
var cap = this . rules . inline . link . exec ( src ) ;
if ( cap ) {
var trimmedUrl = cap [ 2 ] . trim ( ) ;
if ( ! this . options . pedantic && /^</ . test ( trimmedUrl ) ) {
// commonmark requires matching angle brackets
if ( ! />$/ . test ( trimmedUrl ) ) {
return ;
}
// ending angle bracket cannot be escaped
var rtrimSlash = rtrim ( trimmedUrl . slice ( 0 , - 1 ) , '\\' ) ;
if ( ( trimmedUrl . length - rtrimSlash . length ) % 2 === 0 ) {
return ;
}
} else {
// find closing parenthesis
var lastParenIndex = findClosingBracket ( cap [ 2 ] , '()' ) ;
if ( lastParenIndex > - 1 ) {
var start = cap [ 0 ] . indexOf ( '!' ) === 0 ? 5 : 4 ;
var linkLen = start + cap [ 1 ] . length + lastParenIndex ;
cap [ 2 ] = cap [ 2 ] . substring ( 0 , lastParenIndex ) ;
cap [ 0 ] = cap [ 0 ] . substring ( 0 , linkLen ) . trim ( ) ;
cap [ 3 ] = '' ;
}
}
var href = cap [ 2 ] ;
var title = '' ;
if ( this . options . pedantic ) {
// split pedantic href and title
var link = /^([^'"]*[^\s])\s+(['"])(.*)\2/ . exec ( href ) ;
if ( link ) {
href = link [ 1 ] ;
title = link [ 3 ] ;
}
} else {
title = cap [ 3 ] ? cap [ 3 ] . slice ( 1 , - 1 ) : '' ;
}
href = href . trim ( ) ;
if ( /^</ . test ( href ) ) {
if ( this . options . pedantic && ! />$/ . test ( trimmedUrl ) ) {
// pedantic allows starting angle bracket without ending angle bracket
href = href . slice ( 1 ) ;
} else {
href = href . slice ( 1 , - 1 ) ;
}
}
return outputLink ( cap , {
href : href ? href . replace ( this . rules . inline . _escapes , '$1' ) : href ,
title : title ? title . replace ( this . rules . inline . _escapes , '$1' ) : title
} , cap [ 0 ] , this . lexer ) ;
}
} ;
_proto . reflink = function reflink ( src , links ) {
var cap ;
if ( ( cap = this . rules . inline . reflink . exec ( src ) ) || ( cap = this . rules . inline . nolink . exec ( src ) ) ) {
var link = ( cap [ 2 ] || cap [ 1 ] ) . replace ( /\s+/g , ' ' ) ;
link = links [ link . toLowerCase ( ) ] ;
if ( ! link ) {
var text = cap [ 0 ] . charAt ( 0 ) ;
return {
type : 'text' ,
raw : text ,
text : text
} ;
}
return outputLink ( cap , link , cap [ 0 ] , this . lexer ) ;
}
} ;
_proto . emStrong = function emStrong ( src , maskedSrc , prevChar ) {
if ( prevChar === void 0 ) {
prevChar = '' ;
}
var match = this . rules . inline . emStrong . lDelim . exec ( src ) ;
if ( ! match ) return ;
// _ can't be between two alphanumerics. \p{L}\p{N} includes non-english alphabet/numbers as well
if ( match [ 3 ] && prevChar . match ( / ( ? : [ 0 - 9 A - Z a - z \ x A A \ x B 2 \ x B 3 \ x B 5 \ x B 9 \ x B A \ x B C - \ x B E \ x C 0 - \ x D 6 \ x D 8 - \ x F 6 \ x F 8 - \ u 0 2 C 1 \ u 0 2 C 6 - \ u 0 2 D 1 \ u 0 2 E 0 - \ u 0 2 E 4 \ u 0 2 E C \ u 0 2 E E \ u 0 3 7 0 - \ u 0 3 7 4 \ u 0 3 7 6 \ u 0 3 7 7 \ u 0 3 7 A - \ u 0 3 7 D \ u 0 3 7 F \ u 0 3 8 6 \ u 0 3 8 8 - \ u 0 3 8 A \ u 0 3 8 C \ u 0 3 8 E - \ u 0 3 A 1 \ u 0 3 A 3 - \ u 0 3 F 5 \ u 0 3 F 7 - \ u 0 4 8 1 \ u 0 4 8 A - \ u 0 5 2 F \ u 0 5 3 1 - \ u 0 5 5 6 \ u 0 5 5 9 \ u 0 5 6 0 - \ u 0 5 8 8 \ u 0 5 D 0 - \ u 0 5 E A \ u 0 5 E F - \ u 0 5 F 2 \ u 0 6 2 0 - \ u 0 6 4 A \ u 0 6 6 0 - \ u 0 6 6 9 \ u 0 6 6 E \ u 0 6 6 F \ u 0 6 7 1 - \ u 0 6 D 3 \ u 0 6 D 5 \ u 0 6 E 5 \ u 0 6 E 6 \ u 0 6 E E - \ u 0 6 F C \ u 0 6 F F \ u 0 7 1 0 \ u 0 7 1 2 - \ u 0 7 2 F \ u 0 7 4 D - \ u 0 7 A 5 \ u 0 7 B 1 \ u 0 7 C 0 - \ u 0 7 E A \ u 0 7 F 4 \ u 0 7 F 5 \ u 0 7 F A \ u 0 8 0 0 - \ u 0 8 1 5 \ u 0 8 1 A \ u 0 8 2 4 \ u 0 8 2 8 \ u 0 8 4 0 - \ u 0 8 5 8 \ u 0 8 6 0 - \ u 0 8 6 A \ u 0 8 7 0 - \ u 0 8 8 7 \ u 0 8 8 9 - \ u 0 8 8 E \ u 0 8 A 0 - \ u 0 8 C 9 \ u 0 9 0 4 - \ u 0 9 3 9 \ u 0 9 3 D \ u 0 9 5 0 \ u 0 9 5 8 - \ u 0 9 6 1 \ u 0 9 6 6 - \ u 0 9 6 F \ u 0 9 7 1 - \ u 0 9 8 0 \ u 0 9 8 5 - \ u 0 9 8 C \ u 0 9 8 F \ u 0 9 9 0 \ u 0 9 9 3 - \ u 0 9 A 8 \ u 0 9 A A - \ u 0 9 B 0 \ u 0 9 B 2 \ u 0 9 B 6 - \ u 0 9 B 9 \ u 0 9 B D \ u 0 9 C E \ u 0 9 D C \ u 0 9 D D \ u 0 9 D F - \ u 0 9 E 1 \ u 0 9 E 6 - \ u 0 9 F 1 \ u 0 9 F 4 - \ u 0 9 F 9 \ u 0 9 F C \ u 0 A 0 5 - \ u 0 A 0 A \ u 0 A 0 F \ u 0 A 1 0 \ u 0 A 1 3 - \ u 0 A 2 8 \ u 0 A 2 A - \ u 0 A 3 0 \ u 0 A 3 2 \ u 0 A 3 3 \ u 0 A 3 5 \ u 0 A 3 6 \ u 0 A 3 8 \ u 0 A 3 9 \ u 0 A 5 9 - \ u 0 A 5 C \ u 0 A 5 E \ u 0 A 6 6 - \ u 0 A 6 F \ u 0 A 7 2 - \ u 0 A 7 4 \ u 0 A 8 5 - \ u 0 A 8 D \ u 0 A 8 F - \ u 0 A 9 1 \ u 0 A 9 3 - \ u 0 A A 8 \ u 0 A A A - \ u 0 A B 0 \ u 0 A B 2 \ u 0 A B 3 \ u 0 A B 5 - \ u 0 A B 9 \ u 0 A B D \ u 0 A D 0 \ u 0 A E 0 \ u 0 A E 1 \ u 0 A E 6 - \ u 0 A E F \ u 0 A F 9 \ u 0 B 0 5 - \ u 0 B 0 C \ u 0 B 0 F \ u 0 B 1 0 \ u 0 B 1 3 - \ u 0 B 2 8 \ u 0 B 2 A - \ u 0 B 3 0 \ u 0 B 3 2 \ u 0 B 3 3 \ u 0 B 3 5 - \ u 0 B 3 9 \ u 0 B 3 D \ u 0 B 5 C \ u 0 B 5 D \ u 0 B 5 F - \ u 0 B 6 1 \ u 0 B 6 6 - \ u 0 B 6 F \ u 0 B 7 1 - \ u 0 B 7 7 \ u 0 B 8 3 \ u 0 B 8 5 - \ u 0 B 8 A \ u 0 B 8 E - \ u 0 B 9 0 \ u 0 B 9 2 - \ u 0 B 9 5 \ u 0 B 9 9 \ u 0 B 9 A \ u 0 B 9 C \ u 0 B 9 E \ u 0 B 9 F \ u 0 B A 3 \ u 0 B A 4 \ u 0 B A 8 - \ u 0 B A A \ u 0 B A E - \ u 0 B B 9 \ u 0 B D 0 \ u 0 B E 6 - \ u 0 B F 2 \ u 0 C 0 5 - \ u 0 C 0 C \ u 0 C 0 E - \ u 0 C 1 0 \ u 0 C 1 2 - \ u 0 C 2 8 \ u 0 C 2 A - \ u 0 C 3 9 \ u 0 C 3 D \ u 0 C 5 8 - \ u 0 C 5 A \ u 0 C 5 D \ u 0 C 6 0 \ u 0 C 6 1 \ u 0 C 6 6 - \ u 0 C 6 F \ u 0 C 7 8 - \ u 0 C 7 E \ u 0 C 8 0 \ u 0 C 8 5 - \ u 0 C 8 C \ u 0 C 8 E - \ u 0 C 9 0 \ u 0 C 9 2 - \ u 0 C A 8 \ u 0 C A A - \ u 0 C B 3 \ u 0 C B 5 - \ u 0 C B 9 \ u 0 C B D \ u 0 C D D \ u 0 C D E \ u 0 C E 0 \ u 0 C E 1 \ u 0 C E 6 - \ u 0 C E F \ u 0 C F 1 \ u 0 C F 2 \ u 0 D 0 4 - \ u 0 D 0 C \ u 0 D 0 E - \ u 0 D 1 0 \ u 0 D 1 2 - \ u 0 D 3 A \ u 0 D 3 D \ u 0 D 4 E \ u 0 D 5 4 - \ u 0 D 5 6 \ u 0 D 5 8 - \ u 0 D 6 1 \ u 0 D 6 6 - \ u 0 D 7 8 \ u 0 D 7 A - \ u 0 D 7 F \ u 0 D 8 5 - \ u 0 D 9 6 \ u 0 D 9 A - \ u 0 D B 1 \ u 0 D B 3 - \ u 0 D B B \ u 0 D B D \ u 0 D C 0 - \ u 0 D C 6 \ u 0 D E 6 - \ u 0 D E F \ u 0 E 0 1 - \ u 0 E 3 0 \ u 0 E 3 2 \ u 0 E 3 3 \ u 0 E 4 0 - \ u 0 E 4 6 \ u 0 E 5 0 - \ u 0 E 5 9 \ u 0 E 8 1 \ u 0 E 8 2 \ u 0 E 8 4 \ u 0 E 8 6 - \ u 0 E 8 A \ u 0 E 8 C - \ u 0 E A 3 \ u 0 E A 5 \ u 0 E A 7 - \ u 0 E B 0 \ u 0 E B 2 \ u 0 E B 3 \ u 0 E B D \ u 0 E C 0 - \ u 0 E C 4 \ u 0 E C 6 \ u 0 E D 0 - \ u 0 E D 9 \ u 0 E D C - \ u 0 E D F \ u 0 F 0 0 \ u 0 F 2 0 - \ u 0 F 3 3 \ u 0 F 4 0 - \ u 0 F 4 7 \ u 0 F 4 9 - \ u 0 F 6 C \ u 0 F 8 8 - \ u 0 F 8 C \ u 1 0 0 0 - \ u 1 0 2 A \ u 1 0 3 F - \ u 1 0 4 9 \ u 1 0 5 0 - \ u 1 0 5 5 \ u 1 0 5 A - \ u 1 0 5 D \ u 1 0 6 1 \ u 1 0 6 5 \ u 1 0 6 6 \ u 1 0 6 E - \ u 1 0 7 0 \ u 1 0 7 5 - \ u 1 0 8 1 \ u 1 0 8 E \ u 1 0 9 0 - \ u 1 0 9 9 \ u 1 0 A 0 - \ u 1 0 C 5 \ u 1 0 C 7 \ u 1 0 C D \ u 1 0 D 0 - \ u 1 0 F A \ u 1 0 F C - \ u 1 2 4 8 \ u 1 2 4 A - \ u 1 2 4 D \ u 1 2 5 0 - \ u 1 2 5 6 \ u 1 2 5 8 \ u 1 2 5 A - \ u 1 2 5 D \ u 1 2 6 0 - \ u 1 2 8 8 \ u 1 2 8 A - \ u 1 2 8 D \ u 1 2 9 0 - \ u 1 2 B 0 \ u 1 2 B 2 - \ u 1 2 B 5 \ u 1 2 B 8 - \ u 1 2 B E \ u 1 2 C 0 \ u 1 2 C 2 - \ u 1 2 C 5 \ u 1 2 C 8 - \ u 1 2 D 6 \ u 1 2 D 8 - \ u 1 3 1 0 \ u 1 3 1 2 - \ u 1 3 1 5 \ u 1 3 1 8 - \ u 1 3 5 A \ u 1 3 6 9 - \ u 1 3 7 C \ u 1 3 8 0 - \ u 1 3 8 F \ u 1 3 A 0 - \ u 1 3 F 5 \ u 1 3 F 8 - \ u 1 3 F D \ u 1 4 0 1 - \ u 1 6 6 C \ u 1 6 6 F - \ u 1 6 7 F \ u 1 6 8 1 - \ u 1 6 9 A \ u 1 6 A 0 - \ u 1 6 E A \ u 1 6 E E - \ u 1 6 F 8 \ u 1 7 0 0 - \ u 1 7 1 1 \ u 1 7 1 F - \ u 1 7 3 1 \ u 1 7 4 0 - \ u 1 7 5 1 \ u 1 7 6 0 - \ u 1 7 6 C \ u 1 7 6 E - \ u 1 7 7 0 \ u 1 7 8 0 - \ u 1 7 B 3 \ u 1 7 D 7 \ u 1 7 D C \ u 1 7 E 0 - \ u 1 7 E 9 \ u 1 7 F 0 - \ u 1 7 F 9 \ u 1 8 1 0 - \ u 1 8 1 9 \ u 1 8 2 0 - \ u 1 8 7 8 \ u 1 8 8 0 - \ u 1 8 8 4 \ u 1 8 8 7 - \ u 1 8 A 8 \ u 1 8 A A \ u 1 8 B 0 - \ u 1 8 F 5 \ u 1 9 0 0 - \ u 1 9 1 E \ u 1 9 4 6 - \ u 1 9 6 D \ u 1 9 7 0 - \ u 1 9 7 4 \ u 1 9 8 0 - \ u 1 9 A B \ u 1 9 B 0 - \ u 1 9 C 9 \ u 1 9 D 0 - \ u 1 9 D A \ u 1 A 0 0 - \ u 1 A 1 6 \ u 1 A 2 0 - \ u 1 A 5 4 \ u 1 A 8 0 - \ u 1 A 8 9 \ u 1 A 9 0 - \ u 1 A 9 9 \ u 1 A A 7 \ u 1 B 0 5 - \ u 1 B 3 3 \ u 1 B 4 5 - \ u 1 B 4 C \ u 1 B 5 0 - \ u 1 B 5 9 \ u 1 B 8 3 - \ u 1 B A 0 \ u 1 B A E - \ u 1 B E 5 \ u 1 C 0 0 - \ u 1 C 2 3 \ u 1 C 4 0 - \ u 1 C 4 9 \ u 1 C 4 D - \ u 1 C 7 D \ u 1 C 8 0 - \ u 1 C 8 8 \ u 1 C 9 0 - \ u 1 C B A \ u 1 C B D - \ u 1 C B F \ u 1 C E 9 - \ u 1 C E C \ u 1 C E E - \ u 1 C F 3 \ u 1 C F 5 \ u 1 C F 6 \ u 1 C F A \ u 1 D 0 0 - \ u 1 D B F \ u 1 E 0 0 - \ u 1 F 1 5 \ u 1 F 1 8 - \ u 1 F 1 D \ u 1 F 2 0 - \ u 1 F 4 5 \ u 1 F 4 8 - \ u 1 F 4 D \ u 1 F 5 0 - \ u 1 F 5 7 \ u 1 F 5 9 \ u 1 F 5 B \ u 1 F 5 D \ u 1 F 5 F - \ u 1 F 7 D \ u 1 F 8 0 - \ u 1 F B 4 \ u 1 F B 6 - \ u 1 F B C \ u 1 F B E \ u 1 F C 2 - \ u 1 F C 4 \ u 1 F C 6 - \ u 1 F C C \ u 1 F D 0 - \ u 1 F D 3 \ u 1 F D 6 - \ u 1 F D B \ u 1 F E 0 - \ u 1 F E C \ u 1 F F 2 - \ u 1 F F 4 \ u 1 F F 6 - \ u 1 F F C \ u 2 0 7 0 \ u 2 0 7 1 \ u 2 0 7 4 - \ u 2 0 7 9 \ u 2 0 7 F - \ u 2 0 8 9 \ u 2 0 9 0 - \ u 2 0 9 C \ u 2 1 0 2 \ u 2 1 0 7 \ u 2 1 0 A - \ u 2 1 1 3 \ u 2 1 1 5 \ u 2 1 1 9 - \ u 2 1 1 D \ u 2 1 2 4 \ u 2 1 2 6 \ u 2 1 2 8 \ u 2 1 2 A - \ u 2 1 2 D \ u 2 1 2 F - \ u 2 1 3 9 \ u 2 1 3 C - \ u 2 1 3 F \ u 2 1 4 5 - \ u 2 1 4 9 \ u 2 1 4 E \ u 2 1 5 0 - \ u 2 1 8 9 \ u 2 4 6 0 - \ u 2 4 9 B \ u 2 4 E A - \ u 2 4 F F \ u 2 7 7 6 - \ u 2 7 9 3 \ u 2 C 0 0 - \ u 2 C E 4 \ u 2 C E B - \ u 2 C E E \ u 2 C F 2 \ u 2 C F 3 \ u 2 C F D \ u 2 D 0 0 - \ u 2 D 2 5 \ u 2 D 2 7 \ u 2 D 2 D \ u 2 D 3 0 - \ u 2 D 6 7 \ u 2 D 6 F \ u 2 D 8 0 - \ u 2 D 9 6 \ u 2 D A 0 - \ u 2 D A 6 \ u 2 D A 8 - \ u 2 D A E \ u 2 D B 0 - \ u 2 D B 6 \ u 2 D B 8 - \ u 2 D B E \ u 2 D C 0 - \ u 2 D C 6 \ u 2 D C 8 - \ u 2 D C E \ u 2 D D 0 - \ u 2 D D 6 \ u 2 D D 8 - \ u 2 D D E \ u 2 E 2 F \ u 3 0 0 5 - \ u 3 0 0 7 \ u 3 0 2 1 - \ u 3 0 2 9 \ u 3 0 3 1 - \ u 3 0 3 5 \ u 3 0 3 8 - \ u 3 0 3 C \ u 3 0 4 1 - \ u 3 0 9 6 \ u 3 0 9 D - \ u 3 0 9 F \ u 3 0 A 1 - \ u 3 0 F A \ u 3 0 F C - \ u 3 0 F F \ u 3 1 0 5 - \ u 3 1 2 F \ u 3 1 3 1 - \ u 3 1 8 E \ u 3 1 9 2 - \ u 3 1 9 5 \ u 3 1 A 0 - \ u 3 1 B F \ u 3 1 F 0 - \ u 3 1 F F \ u 3 2 2 0 - \ u 3 2 2 9 \ u 3 2 4 8 - \ u 3 2 4 F \ u 3 2 5 1 - \ u 3 2 5 F \ u 3 2 8 0 - \ u 3 2 8 9 \ u 3 2 B 1 - \ u 3 2 B F \ u 3 4 0 0 - \ u 4 D B F \ u 4 E 0 0 - \ u A 4 8 C \ u A 4 D 0 - \ u A 4 F D \ u A 5 0 0 - \ u A 6 0 C \ u A 6 1 0 - \ u A 6 2 B \ u A 6 4 0 - \ u A 6 6 E \ u A 6 7 F - \ u A 6 9 D \ u A 6 A 0 - \ u A 6 E F \ u A 7 1 7 - \ u A 7 1 F \ u A 7 2 2 - \ u A 7 8 8 \ u A 7 8 B - \ u A 7 C A \ u A 7 D 0 \ u A 7 D 1 \ u A 7 D 3 \ u A 7 D 5 - \ u A 7 D 9 \ u A 7 F 2 - \ u A 8 0 1 \ u A 8 0 3 - \ u A 8 0 5 \ u A 8 0 7 - \ u A 8 0 A \ u A 8 0 C - \ u A 8 2 2 \ u A 8 3 0 - \ u A 8 3 5 \ u A 8 4 0 - \ u A 8 7 3 \ u A 8 8 2 - \ u A 8 B 3 \ u A 8 D
var nextChar = match [ 1 ] || match [ 2 ] || '' ;
if ( ! nextChar || nextChar && ( prevChar === '' || this . rules . inline . punctuation . exec ( prevChar ) ) ) {
var lLength = match [ 0 ] . length - 1 ;
var rDelim ,
rLength ,
delimTotal = lLength ,
midDelimTotal = 0 ;
var endReg = match [ 0 ] [ 0 ] === '*' ? this . rules . inline . emStrong . rDelimAst : this . rules . inline . emStrong . rDelimUnd ;
endReg . lastIndex = 0 ;
// Clip maskedSrc to same section of string as src (move to lexer?)
maskedSrc = maskedSrc . slice ( - 1 * src . length + lLength ) ;
while ( ( match = endReg . exec ( maskedSrc ) ) != null ) {
rDelim = match [ 1 ] || match [ 2 ] || match [ 3 ] || match [ 4 ] || match [ 5 ] || match [ 6 ] ;
if ( ! rDelim ) continue ; // skip single * in __abc*abc__
rLength = rDelim . length ;
if ( match [ 3 ] || match [ 4 ] ) {
// found another Left Delim
delimTotal += rLength ;
continue ;
} else if ( match [ 5 ] || match [ 6 ] ) {
// either Left or Right Delim
if ( lLength % 3 && ! ( ( lLength + rLength ) % 3 ) ) {
midDelimTotal += rLength ;
continue ; // CommonMark Emphasis Rules 9-10
}
}
delimTotal -= rLength ;
if ( delimTotal > 0 ) continue ; // Haven't found enough closing delimiters
// Remove extra characters. *a*** -> *a*
rLength = Math . min ( rLength , rLength + delimTotal + midDelimTotal ) ;
var raw = src . slice ( 0 , lLength + match . index + ( match [ 0 ] . length - rDelim . length ) + rLength ) ;
// Create `em` if smallest delimiter has odd char count. *a***
if ( Math . min ( lLength , rLength ) % 2 ) {
var _text = raw . slice ( 1 , - 1 ) ;
return {
type : 'em' ,
raw : raw ,
text : _text ,
tokens : this . lexer . inlineTokens ( _text )
} ;
}
// Create 'strong' if smallest delimiter has even char count. **a***
var text = raw . slice ( 2 , - 2 ) ;
return {
type : 'strong' ,
raw : raw ,
text : text ,
tokens : this . lexer . inlineTokens ( text )
} ;
}
}
} ;
_proto . codespan = function codespan ( src ) {
var cap = this . rules . inline . code . exec ( src ) ;
if ( cap ) {
var text = cap [ 2 ] . replace ( /\n/g , ' ' ) ;
var hasNonSpaceChars = /[^ ]/ . test ( text ) ;
var hasSpaceCharsOnBothEnds = /^ / . test ( text ) && / $/ . test ( text ) ;
if ( hasNonSpaceChars && hasSpaceCharsOnBothEnds ) {
text = text . substring ( 1 , text . length - 1 ) ;
}
text = escape ( text , true ) ;
return {
type : 'codespan' ,
raw : cap [ 0 ] ,
text : text
} ;
}
} ;
_proto . br = function br ( src ) {
var cap = this . rules . inline . br . exec ( src ) ;
if ( cap ) {
return {
type : 'br' ,
raw : cap [ 0 ]
} ;
}
} ;
_proto . del = function del ( src ) {
var cap = this . rules . inline . del . exec ( src ) ;
if ( cap ) {
return {
type : 'del' ,
raw : cap [ 0 ] ,
text : cap [ 2 ] ,
tokens : this . lexer . inlineTokens ( cap [ 2 ] )
} ;
}
} ;
_proto . autolink = function autolink ( src , mangle ) {
var cap = this . rules . inline . autolink . exec ( src ) ;
if ( cap ) {
var text , href ;
if ( cap [ 2 ] === '@' ) {
text = escape ( this . options . mangle ? mangle ( cap [ 1 ] ) : cap [ 1 ] ) ;
href = 'mailto:' + text ;
} else {
text = escape ( cap [ 1 ] ) ;
href = text ;
}
return {
type : 'link' ,
raw : cap [ 0 ] ,
text : text ,
href : href ,
tokens : [ {
type : 'text' ,
raw : text ,
text : text
} ]
} ;
}
} ;
_proto . url = function url ( src , mangle ) {
var cap ;
if ( cap = this . rules . inline . url . exec ( src ) ) {
var text , href ;
if ( cap [ 2 ] === '@' ) {
text = escape ( this . options . mangle ? mangle ( cap [ 0 ] ) : cap [ 0 ] ) ;
href = 'mailto:' + text ;
} else {
// do extended autolink path validation
var prevCapZero ;
do {
prevCapZero = cap [ 0 ] ;
cap [ 0 ] = this . rules . inline . _backpedal . exec ( cap [ 0 ] ) [ 0 ] ;
} while ( prevCapZero !== cap [ 0 ] ) ;
text = escape ( cap [ 0 ] ) ;
if ( cap [ 1 ] === 'www.' ) {
href = 'http://' + cap [ 0 ] ;
} else {
href = cap [ 0 ] ;
}
}
return {
type : 'link' ,
raw : cap [ 0 ] ,
text : text ,
href : href ,
tokens : [ {
type : 'text' ,
raw : text ,
text : text
} ]
} ;
}
} ;
_proto . inlineText = function inlineText ( src , smartypants ) {
var cap = this . rules . inline . text . exec ( src ) ;
if ( cap ) {
var text ;
if ( this . lexer . state . inRawBlock ) {
text = this . options . sanitize ? this . options . sanitizer ? this . options . sanitizer ( cap [ 0 ] ) : escape ( cap [ 0 ] ) : cap [ 0 ] ;
} else {
text = escape ( this . options . smartypants ? smartypants ( cap [ 0 ] ) : cap [ 0 ] ) ;
}
return {
type : 'text' ,
raw : cap [ 0 ] ,
text : text
} ;
}
} ;
return Tokenizer ;
} ( ) ;
/ * *
* Block - Level Grammar
* /
var block = {
newline : /^(?: *(?:\n|$))+/ ,
code : /^( {4}[^\n]+(?:\n(?: *(?:\n|$))*)?)+/ ,
fences : /^ {0,3}(`{3,}(?=[^`\n]*(?:\n|$))|~{3,})([^\n]*)(?:\n|$)(?:|([\s\S]*?)(?:\n|$))(?: {0,3}\1[~`]* *(?=\n|$)|$)/ ,
hr : /^ {0,3}((?:-[\t ]*){3,}|(?:_[ \t]*){3,}|(?:\*[ \t]*){3,})(?:\n+|$)/ ,
heading : /^ {0,3}(#{1,6})(?=\s|$)(.*)(?:\n+|$)/ ,
blockquote : /^( {0,3}> ?(paragraph|[^\n]*)(?:\n|$))+/ ,
list : /^( {0,3}bull)([ \t][^\n]+?)?(?:\n|$)/ ,
html : '^ {0,3}(?:' // optional indentation
+ '<(script|pre|style|textarea)[\\s>][\\s\\S]*?(?:</\\1>[^\\n]*\\n+|$)' // (1)
+ '|comment[^\\n]*(\\n+|$)' // (2)
+ '|<\\?[\\s\\S]*?(?:\\?>\\n*|$)' // (3)
+ '|<![A-Z][\\s\\S]*?(?:>\\n*|$)' // (4)
+ '|<!\\[CDATA\\[[\\s\\S]*?(?:\\]\\]>\\n*|$)' // (5)
+ '|</?(tag)(?: +|\\n|/?>)[\\s\\S]*?(?:(?:\\n *)+\\n|$)' // (6)
+ '|<(?!script|pre|style|textarea)([a-z][\\w-]*)(?:attribute)*? */?>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$)' // (7) open tag
+ '|</(?!script|pre|style|textarea)[a-z][\\w-]*\\s*>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n *)+\\n|$)' // (7) closing tag
+ ')' ,
def : /^ {0,3}\[(label)\]: *(?:\n *)?([^<\s][^\s]*|<.*?>)(?:(?: +(?:\n *)?| *\n *)(title))? *(?:\n+|$)/ ,
table : noopTest ,
lheading : /^((?:.|\n(?!\n))+?)\n {0,3}(=+|-+) *(?:\n+|$)/ ,
// regex template, placeholders will be replaced according to different paragraph
// interruption rules of commonmark and the original markdown spec:
_paragraph : /^([^\n]+(?:\n(?!hr|heading|lheading|blockquote|fences|list|html|table| +\n)[^\n]+)*)/ ,
text : /^[^\n]+/
} ;
block . _label = /(?!\s*\])(?:\\.|[^\[\]\\])+/ ;
block . _title = /(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/ ;
block . def = edit ( block . def ) . replace ( 'label' , block . _label ) . replace ( 'title' , block . _title ) . getRegex ( ) ;
block . bullet = /(?:[*+-]|\d{1,9}[.)])/ ;
block . listItemStart = edit ( /^( *)(bull) */ ) . replace ( 'bull' , block . bullet ) . getRegex ( ) ;
block . list = edit ( block . list ) . replace ( /bull/g , block . bullet ) . replace ( 'hr' , '\\n+(?=\\1?(?:(?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$))' ) . replace ( 'def' , '\\n+(?=' + block . def . source + ')' ) . getRegex ( ) ;
block . _tag = 'address|article|aside|base|basefont|blockquote|body|caption' + '|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption' + '|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe' + '|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option' + '|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr' + '|track|ul' ;
block . _comment = /<!--(?!-?>)[\s\S]*?(?:-->|$)/ ;
block . html = edit ( block . html , 'i' ) . replace ( 'comment' , block . _comment ) . replace ( 'tag' , block . _tag ) . replace ( 'attribute' , / +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/ ) . getRegex ( ) ;
block . paragraph = edit ( block . _paragraph ) . replace ( 'hr' , block . hr ) . replace ( 'heading' , ' {0,3}#{1,6} ' ) . replace ( '|lheading' , '' ) // setex headings don't interrupt commonmark paragraphs
. replace ( '|table' , '' ) . replace ( 'blockquote' , ' {0,3}>' ) . replace ( 'fences' , ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n' ) . replace ( 'list' , ' {0,3}(?:[*+-]|1[.)]) ' ) // only lists starting from 1 can interrupt
. replace ( 'html' , '</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)' ) . replace ( 'tag' , block . _tag ) // pars can be interrupted by type (6) html blocks
. getRegex ( ) ;
block . blockquote = edit ( block . blockquote ) . replace ( 'paragraph' , block . paragraph ) . getRegex ( ) ;
/ * *
* Normal Block Grammar
* /
block . normal = _extends ( { } , block ) ;
/ * *
* GFM Block Grammar
* /
block . gfm = _extends ( { } , block . normal , {
table : '^ *([^\\n ].*\\|.*)\\n' // Header
+ ' {0,3}(?:\\| *)?(:?-+:? *(?:\\| *:?-+:? *)*)(?:\\| *)?' // Align
+ '(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)' // Cells
} ) ;
block . gfm . table = edit ( block . gfm . table ) . replace ( 'hr' , block . hr ) . replace ( 'heading' , ' {0,3}#{1,6} ' ) . replace ( 'blockquote' , ' {0,3}>' ) . replace ( 'code' , ' {4}[^\\n]' ) . replace ( 'fences' , ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n' ) . replace ( 'list' , ' {0,3}(?:[*+-]|1[.)]) ' ) // only lists starting from 1 can interrupt
. replace ( 'html' , '</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)' ) . replace ( 'tag' , block . _tag ) // tables can be interrupted by type (6) html blocks
. getRegex ( ) ;
block . gfm . paragraph = edit ( block . _paragraph ) . replace ( 'hr' , block . hr ) . replace ( 'heading' , ' {0,3}#{1,6} ' ) . replace ( '|lheading' , '' ) // setex headings don't interrupt commonmark paragraphs
. replace ( 'table' , block . gfm . table ) // interrupt paragraphs with table
. replace ( 'blockquote' , ' {0,3}>' ) . replace ( 'fences' , ' {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n' ) . replace ( 'list' , ' {0,3}(?:[*+-]|1[.)]) ' ) // only lists starting from 1 can interrupt
. replace ( 'html' , '</?(?:tag)(?: +|\\n|/?>)|<(?:script|pre|style|textarea|!--)' ) . replace ( 'tag' , block . _tag ) // pars can be interrupted by type (6) html blocks
. getRegex ( ) ;
/ * *
* Pedantic grammar ( original John Gruber ' s loose markdown specification )
* /
block . pedantic = _extends ( { } , block . normal , {
html : edit ( '^ *(?:comment *(?:\\n|\\s*$)' + '|<(tag)[\\s\\S]+?</\\1> *(?:\\n{2,}|\\s*$)' // closed tag
+ '|<tag(?:"[^"]*"|\'[^\']*\'|\\s[^\'"/>\\s]*)*?/?> *(?:\\n{2,}|\\s*$))' ) . replace ( 'comment' , block . _comment ) . replace ( /tag/g , '(?!(?:' + 'a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub' + '|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)' + '\\b)\\w+(?!:|[^\\w\\s@]*@)\\b' ) . getRegex ( ) ,
def : /^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/ ,
heading : /^(#{1,6})(.*)(?:\n+|$)/ ,
fences : noopTest ,
// fences not supported
lheading : /^(.+?)\n {0,3}(=+|-+) *(?:\n+|$)/ ,
paragraph : edit ( block . normal . _paragraph ) . replace ( 'hr' , block . hr ) . replace ( 'heading' , ' *#{1,6} *[^\n]' ) . replace ( 'lheading' , block . lheading ) . replace ( 'blockquote' , ' {0,3}>' ) . replace ( '|fences' , '' ) . replace ( '|list' , '' ) . replace ( '|html' , '' ) . getRegex ( )
} ) ;
/ * *
* Inline - Level Grammar
* /
var inline = {
escape : /^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/ ,
autolink : /^<(scheme:[^\s\x00-\x1f<>]*|email)>/ ,
url : noopTest ,
tag : '^comment' + '|^</[a-zA-Z][\\w:-]*\\s*>' // self-closing tag
+ '|^<[a-zA-Z][\\w-]*(?:attribute)*?\\s*/?>' // open tag
+ '|^<\\?[\\s\\S]*?\\?>' // processing instruction, e.g. <?php ?>
+ '|^<![a-zA-Z]+\\s[\\s\\S]*?>' // declaration, e.g. <!DOCTYPE html>
+ '|^<!\\[CDATA\\[[\\s\\S]*?\\]\\]>' ,
// CDATA section
link : /^!?\[(label)\]\(\s*(href)(?:\s+(title))?\s*\)/ ,
reflink : /^!?\[(label)\]\[(ref)\]/ ,
nolink : /^!?\[(ref)\](?:\[\])?/ ,
reflinkSearch : 'reflink|nolink(?!\\()' ,
emStrong : {
lDelim : /^(?:\*+(?:([punct_])|[^\s*]))|^_+(?:([punct*])|([^\s_]))/ ,
// (1) and (2) can only be a Right Delimiter. (3) and (4) can only be Left. (5) and (6) can be either Left or Right.
// () Skip orphan inside strong () Consume to delim (1) #*** (2) a***#, a*** (3) #***a, ***a (4) ***# (5) #***# (6) a***a
rDelimAst : /^(?:[^_*\\]|\\.)*?\_\_(?:[^_*\\]|\\.)*?\*(?:[^_*\\]|\\.)*?(?=\_\_)|(?:[^*\\]|\\.)+(?=[^*])|[punct_](\*+)(?=[\s]|$)|(?:[^punct*_\s\\]|\\.)(\*+)(?=[punct_\s]|$)|[punct_\s](\*+)(?=[^punct*_\s])|[\s](\*+)(?=[punct_])|[punct_](\*+)(?=[punct_])|(?:[^punct*_\s\\]|\\.)(\*+)(?=[^punct*_\s])/ ,
rDelimUnd : /^(?:[^_*\\]|\\.)*?\*\*(?:[^_*\\]|\\.)*?\_(?:[^_*\\]|\\.)*?(?=\*\*)|(?:[^_\\]|\\.)+(?=[^_])|[punct*](\_+)(?=[\s]|$)|(?:[^punct*_\s\\]|\\.)(\_+)(?=[punct*\s]|$)|[punct*\s](\_+)(?=[^punct*_\s])|[\s](\_+)(?=[punct*])|[punct*](\_+)(?=[punct*])/ // ^- Not allowed for _
} ,
code : /^(`+)([^`]|[^`][\s\S]*?[^`])\1(?!`)/ ,
br : /^( {2,}|\\)\n(?!\s*$)/ ,
del : noopTest ,
text : /^(`+|[^`])(?:(?= {2,}\n)|[\s\S]*?(?:(?=[\\<!\[`*_]|\b_|$)|[^ ](?= {2,}\n)))/ ,
punctuation : /^([\spunctuation])/
} ;
// list of punctuation marks from CommonMark spec
// without * and _ to handle the different emphasis markers * and _
inline . _punctuation = '!"#$%&\'()+\\-.,/:;<=>?@\\[\\]`^{|}~' ;
inline . punctuation = edit ( inline . punctuation ) . replace ( /punctuation/g , inline . _punctuation ) . getRegex ( ) ;
// sequences em should skip over [title](link), `code`, <html>
inline . blockSkip = /\[[^\]]*?\]\([^\)]*?\)|`[^`]*?`|<[^>]*?>/g ;
// lookbehind is not available on Safari as of version 16
// inline.escapedEmSt = /(?<=(?:^|[^\\)(?:\\[^])*)\\[*_]/g;
inline . escapedEmSt = /(?:^|[^\\])(?:\\\\)*\\[*_]/g ;
inline . _comment = edit ( block . _comment ) . replace ( '(?:-->|$)' , '-->' ) . getRegex ( ) ;
inline . emStrong . lDelim = edit ( inline . emStrong . lDelim ) . replace ( /punct/g , inline . _punctuation ) . getRegex ( ) ;
inline . emStrong . rDelimAst = edit ( inline . emStrong . rDelimAst , 'g' ) . replace ( /punct/g , inline . _punctuation ) . getRegex ( ) ;
inline . emStrong . rDelimUnd = edit ( inline . emStrong . rDelimUnd , 'g' ) . replace ( /punct/g , inline . _punctuation ) . getRegex ( ) ;
inline . _escapes = /\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/g ;
inline . _scheme = /[a-zA-Z][a-zA-Z0-9+.-]{1,31}/ ;
inline . _email = /[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/ ;
inline . autolink = edit ( inline . autolink ) . replace ( 'scheme' , inline . _scheme ) . replace ( 'email' , inline . _email ) . getRegex ( ) ;
inline . _attribute = /\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/ ;
inline . tag = edit ( inline . tag ) . replace ( 'comment' , inline . _comment ) . replace ( 'attribute' , inline . _attribute ) . getRegex ( ) ;
inline . _label = /(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/ ;
inline . _href = /<(?:\\.|[^\n<>\\])+>|[^\s\x00-\x1f]*/ ;
inline . _title = /"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/ ;
inline . link = edit ( inline . link ) . replace ( 'label' , inline . _label ) . replace ( 'href' , inline . _href ) . replace ( 'title' , inline . _title ) . getRegex ( ) ;
inline . reflink = edit ( inline . reflink ) . replace ( 'label' , inline . _label ) . replace ( 'ref' , block . _label ) . getRegex ( ) ;
inline . nolink = edit ( inline . nolink ) . replace ( 'ref' , block . _label ) . getRegex ( ) ;
inline . reflinkSearch = edit ( inline . reflinkSearch , 'g' ) . replace ( 'reflink' , inline . reflink ) . replace ( 'nolink' , inline . nolink ) . getRegex ( ) ;
/ * *
* Normal Inline Grammar
* /
inline . normal = _extends ( { } , inline ) ;
/ * *
* Pedantic Inline Grammar
* /
inline . pedantic = _extends ( { } , inline . normal , {
strong : {
start : /^__|\*\*/ ,
middle : /^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/ ,
endAst : /\*\*(?!\*)/g ,
endUnd : /__(?!_)/g
} ,
em : {
start : /^_|\*/ ,
middle : /^()\*(?=\S)([\s\S]*?\S)\*(?!\*)|^_(?=\S)([\s\S]*?\S)_(?!_)/ ,
endAst : /\*(?!\*)/g ,
endUnd : /_(?!_)/g
} ,
link : edit ( /^!?\[(label)\]\((.*?)\)/ ) . replace ( 'label' , inline . _label ) . getRegex ( ) ,
reflink : edit ( /^!?\[(label)\]\s*\[([^\]]*)\]/ ) . replace ( 'label' , inline . _label ) . getRegex ( )
} ) ;
/ * *
* GFM Inline Grammar
* /
inline . gfm = _extends ( { } , inline . normal , {
escape : edit ( inline . escape ) . replace ( '])' , '~|])' ) . getRegex ( ) ,
_extended _email : /[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/ ,
url : /^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/ ,
_backpedal : /(?:[^?!.,:;*_'"~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_'"~)]+(?!$))+/ ,
del : /^(~~?)(?=[^\s~])([\s\S]*?[^\s~])\1(?=[^~]|$)/ ,
text : /^([`~]+|[^`~])(?:(?= {2,}\n)|(?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)|[\s\S]*?(?:(?=[\\<!\[`*~_]|\b_|https?:\/\/|ftp:\/\/|www\.|$)|[^ ](?= {2,}\n)|[^a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-](?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)))/
} ) ;
inline . gfm . url = edit ( inline . gfm . url , 'i' ) . replace ( 'email' , inline . gfm . _extended _email ) . getRegex ( ) ;
/ * *
* GFM + Line Breaks Inline Grammar
* /
inline . breaks = _extends ( { } , inline . gfm , {
br : edit ( inline . br ) . replace ( '{2,}' , '*' ) . getRegex ( ) ,
text : edit ( inline . gfm . text ) . replace ( '\\b_' , '\\b_| {2,}\\n' ) . replace ( /\{2,\}/g , '*' ) . getRegex ( )
} ) ;
/ * *
* smartypants text replacement
* @ param { string } text
* /
function smartypants ( text ) {
return text
// em-dashes
. replace ( /---/g , "\u2014" )
// en-dashes
. replace ( /--/g , "\u2013" )
// opening singles
. replace ( /(^|[-\u2014/(\[{"\s])'/g , "$1\u2018" )
// closing singles & apostrophes
. replace ( /'/g , "\u2019" )
// opening doubles
. replace ( /(^|[-\u2014/(\[{\u2018\s])"/g , "$1\u201C" )
// closing doubles
. replace ( /"/g , "\u201D" )
// ellipses
. replace ( /\.{3}/g , "\u2026" ) ;
}
/ * *
* mangle email addresses
* @ param { string } text
* /
function mangle ( text ) {
var out = '' ,
i ,
ch ;
var l = text . length ;
for ( i = 0 ; i < l ; i ++ ) {
ch = text . charCodeAt ( i ) ;
if ( Math . random ( ) > 0.5 ) {
ch = 'x' + ch . toString ( 16 ) ;
}
out += '&#' + ch + ';' ;
}
return out ;
}
/ * *
* Block Lexer
* /
var Lexer = /*#__PURE__*/ function ( ) {
function Lexer ( options ) {
this . tokens = [ ] ;
this . tokens . links = Object . create ( null ) ;
this . options = options || exports . defaults ;
this . options . tokenizer = this . options . tokenizer || new Tokenizer ( ) ;
this . tokenizer = this . options . tokenizer ;
this . tokenizer . options = this . options ;
this . tokenizer . lexer = this ;
this . inlineQueue = [ ] ;
this . state = {
inLink : false ,
inRawBlock : false ,
top : true
} ;
var rules = {
block : block . normal ,
inline : inline . normal
} ;
if ( this . options . pedantic ) {
rules . block = block . pedantic ;
rules . inline = inline . pedantic ;
} else if ( this . options . gfm ) {
rules . block = block . gfm ;
if ( this . options . breaks ) {
rules . inline = inline . breaks ;
} else {
rules . inline = inline . gfm ;
}
}
this . tokenizer . rules = rules ;
}
/ * *
* Expose Rules
* /
/ * *
* Static Lex Method
* /
Lexer . lex = function lex ( src , options ) {
var lexer = new Lexer ( options ) ;
return lexer . lex ( src ) ;
}
/ * *
* Static Lex Inline Method
* / ;
Lexer . lexInline = function lexInline ( src , options ) {
var lexer = new Lexer ( options ) ;
return lexer . inlineTokens ( src ) ;
}
/ * *
* Preprocessing
* / ;
var _proto = Lexer . prototype ;
_proto . lex = function lex ( src ) {
src = src . replace ( /\r\n|\r/g , '\n' ) ;
this . blockTokens ( src , this . tokens ) ;
var next ;
while ( next = this . inlineQueue . shift ( ) ) {
this . inlineTokens ( next . src , next . tokens ) ;
}
return this . tokens ;
}
/ * *
* Lexing
* / ;
_proto . blockTokens = function blockTokens ( src , tokens ) {
var _this = this ;
if ( tokens === void 0 ) {
tokens = [ ] ;
}
if ( this . options . pedantic ) {
src = src . replace ( /\t/g , ' ' ) . replace ( /^ +$/gm , '' ) ;
} else {
src = src . replace ( /^( *)(\t+)/gm , function ( _ , leading , tabs ) {
return leading + ' ' . repeat ( tabs . length ) ;
} ) ;
}
var token , lastToken , cutSrc , lastParagraphClipped ;
while ( src ) {
if ( this . options . extensions && this . options . extensions . block && this . options . extensions . block . some ( function ( extTokenizer ) {
if ( token = extTokenizer . call ( {
lexer : _this
} , src , tokens ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
return true ;
}
return false ;
} ) ) {
continue ;
}
// newline
if ( token = this . tokenizer . space ( src ) ) {
src = src . substring ( token . raw . length ) ;
if ( token . raw . length === 1 && tokens . length > 0 ) {
// if there's a single \n as a spacer, it's terminating the last line,
// so move it there so that we don't get unecessary paragraph tags
tokens [ tokens . length - 1 ] . raw += '\n' ;
} else {
tokens . push ( token ) ;
}
continue ;
}
// code
if ( token = this . tokenizer . code ( src ) ) {
src = src . substring ( token . raw . length ) ;
lastToken = tokens [ tokens . length - 1 ] ;
// An indented code block cannot interrupt a paragraph.
if ( lastToken && ( lastToken . type === 'paragraph' || lastToken . type === 'text' ) ) {
lastToken . raw += '\n' + token . raw ;
lastToken . text += '\n' + token . text ;
this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
} else {
tokens . push ( token ) ;
}
continue ;
}
// fences
if ( token = this . tokenizer . fences ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// heading
if ( token = this . tokenizer . heading ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// hr
if ( token = this . tokenizer . hr ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// blockquote
if ( token = this . tokenizer . blockquote ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// list
if ( token = this . tokenizer . list ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// html
if ( token = this . tokenizer . html ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// def
if ( token = this . tokenizer . def ( src ) ) {
src = src . substring ( token . raw . length ) ;
lastToken = tokens [ tokens . length - 1 ] ;
if ( lastToken && ( lastToken . type === 'paragraph' || lastToken . type === 'text' ) ) {
lastToken . raw += '\n' + token . raw ;
lastToken . text += '\n' + token . raw ;
this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
} else if ( ! this . tokens . links [ token . tag ] ) {
this . tokens . links [ token . tag ] = {
href : token . href ,
title : token . title
} ;
}
continue ;
}
// table (gfm)
if ( token = this . tokenizer . table ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// lheading
if ( token = this . tokenizer . lheading ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// top-level paragraph
// prevent paragraph consuming extensions by clipping 'src' to extension start
cutSrc = src ;
if ( this . options . extensions && this . options . extensions . startBlock ) {
( function ( ) {
var startIndex = Infinity ;
var tempSrc = src . slice ( 1 ) ;
var tempStart = void 0 ;
_this . options . extensions . startBlock . forEach ( function ( getStartIndex ) {
tempStart = getStartIndex . call ( {
lexer : this
} , tempSrc ) ;
if ( typeof tempStart === 'number' && tempStart >= 0 ) {
startIndex = Math . min ( startIndex , tempStart ) ;
}
} ) ;
if ( startIndex < Infinity && startIndex >= 0 ) {
cutSrc = src . substring ( 0 , startIndex + 1 ) ;
}
} ) ( ) ;
}
if ( this . state . top && ( token = this . tokenizer . paragraph ( cutSrc ) ) ) {
lastToken = tokens [ tokens . length - 1 ] ;
if ( lastParagraphClipped && lastToken . type === 'paragraph' ) {
lastToken . raw += '\n' + token . raw ;
lastToken . text += '\n' + token . text ;
this . inlineQueue . pop ( ) ;
this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
} else {
tokens . push ( token ) ;
}
lastParagraphClipped = cutSrc . length !== src . length ;
src = src . substring ( token . raw . length ) ;
continue ;
}
// text
if ( token = this . tokenizer . text ( src ) ) {
src = src . substring ( token . raw . length ) ;
lastToken = tokens [ tokens . length - 1 ] ;
if ( lastToken && lastToken . type === 'text' ) {
lastToken . raw += '\n' + token . raw ;
lastToken . text += '\n' + token . text ;
this . inlineQueue . pop ( ) ;
this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
} else {
tokens . push ( token ) ;
}
continue ;
}
if ( src ) {
var errMsg = 'Infinite loop on byte: ' + src . charCodeAt ( 0 ) ;
if ( this . options . silent ) {
console . error ( errMsg ) ;
break ;
} else {
throw new Error ( errMsg ) ;
}
}
}
this . state . top = true ;
return tokens ;
} ;
_proto . inline = function inline ( src , tokens ) {
if ( tokens === void 0 ) {
tokens = [ ] ;
}
this . inlineQueue . push ( {
src : src ,
tokens : tokens
} ) ;
return tokens ;
}
/ * *
* Lexing / Compiling
* / ;
_proto . inlineTokens = function inlineTokens ( src , tokens ) {
var _this2 = this ;
if ( tokens === void 0 ) {
tokens = [ ] ;
}
var token , lastToken , cutSrc ;
// String with links masked to avoid interference with em and strong
var maskedSrc = src ;
var match ;
var keepPrevChar , prevChar ;
// Mask out reflinks
if ( this . tokens . links ) {
var links = Object . keys ( this . tokens . links ) ;
if ( links . length > 0 ) {
while ( ( match = this . tokenizer . rules . inline . reflinkSearch . exec ( maskedSrc ) ) != null ) {
if ( links . includes ( match [ 0 ] . slice ( match [ 0 ] . lastIndexOf ( '[' ) + 1 , - 1 ) ) ) {
maskedSrc = maskedSrc . slice ( 0 , match . index ) + '[' + repeatString ( 'a' , match [ 0 ] . length - 2 ) + ']' + maskedSrc . slice ( this . tokenizer . rules . inline . reflinkSearch . lastIndex ) ;
}
}
}
}
// Mask out other blocks
while ( ( match = this . tokenizer . rules . inline . blockSkip . exec ( maskedSrc ) ) != null ) {
maskedSrc = maskedSrc . slice ( 0 , match . index ) + '[' + repeatString ( 'a' , match [ 0 ] . length - 2 ) + ']' + maskedSrc . slice ( this . tokenizer . rules . inline . blockSkip . lastIndex ) ;
}
// Mask out escaped em & strong delimiters
while ( ( match = this . tokenizer . rules . inline . escapedEmSt . exec ( maskedSrc ) ) != null ) {
maskedSrc = maskedSrc . slice ( 0 , match . index + match [ 0 ] . length - 2 ) + '++' + maskedSrc . slice ( this . tokenizer . rules . inline . escapedEmSt . lastIndex ) ;
this . tokenizer . rules . inline . escapedEmSt . lastIndex -- ;
}
while ( src ) {
if ( ! keepPrevChar ) {
prevChar = '' ;
}
keepPrevChar = false ;
// extensions
if ( this . options . extensions && this . options . extensions . inline && this . options . extensions . inline . some ( function ( extTokenizer ) {
if ( token = extTokenizer . call ( {
lexer : _this2
} , src , tokens ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
return true ;
}
return false ;
} ) ) {
continue ;
}
// escape
if ( token = this . tokenizer . escape ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// tag
if ( token = this . tokenizer . tag ( src ) ) {
src = src . substring ( token . raw . length ) ;
lastToken = tokens [ tokens . length - 1 ] ;
if ( lastToken && token . type === 'text' && lastToken . type === 'text' ) {
lastToken . raw += token . raw ;
lastToken . text += token . text ;
} else {
tokens . push ( token ) ;
}
continue ;
}
// link
if ( token = this . tokenizer . link ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// reflink, nolink
if ( token = this . tokenizer . reflink ( src , this . tokens . links ) ) {
src = src . substring ( token . raw . length ) ;
lastToken = tokens [ tokens . length - 1 ] ;
if ( lastToken && token . type === 'text' && lastToken . type === 'text' ) {
lastToken . raw += token . raw ;
lastToken . text += token . text ;
} else {
tokens . push ( token ) ;
}
continue ;
}
// em & strong
if ( token = this . tokenizer . emStrong ( src , maskedSrc , prevChar ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// code
if ( token = this . tokenizer . codespan ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// br
if ( token = this . tokenizer . br ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// del (gfm)
if ( token = this . tokenizer . del ( src ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// autolink
if ( token = this . tokenizer . autolink ( src , mangle ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// url (gfm)
if ( ! this . state . inLink && ( token = this . tokenizer . url ( src , mangle ) ) ) {
src = src . substring ( token . raw . length ) ;
tokens . push ( token ) ;
continue ;
}
// text
// prevent inlineText consuming extensions by clipping 'src' to extension start
cutSrc = src ;
if ( this . options . extensions && this . options . extensions . startInline ) {
( function ( ) {
var startIndex = Infinity ;
var tempSrc = src . slice ( 1 ) ;
var tempStart = void 0 ;
_this2 . options . extensions . startInline . forEach ( function ( getStartIndex ) {
tempStart = getStartIndex . call ( {
lexer : this
} , tempSrc ) ;
if ( typeof tempStart === 'number' && tempStart >= 0 ) {
startIndex = Math . min ( startIndex , tempStart ) ;
}
} ) ;
if ( startIndex < Infinity && startIndex >= 0 ) {
cutSrc = src . substring ( 0 , startIndex + 1 ) ;
}
} ) ( ) ;
}
if ( token = this . tokenizer . inlineText ( cutSrc , smartypants ) ) {
src = src . substring ( token . raw . length ) ;
if ( token . raw . slice ( - 1 ) !== '_' ) {
// Track prevChar before string of ____ started
prevChar = token . raw . slice ( - 1 ) ;
}
keepPrevChar = true ;
lastToken = tokens [ tokens . length - 1 ] ;
if ( lastToken && lastToken . type === 'text' ) {
lastToken . raw += token . raw ;
lastToken . text += token . text ;
} else {
tokens . push ( token ) ;
}
continue ;
}
if ( src ) {
var errMsg = 'Infinite loop on byte: ' + src . charCodeAt ( 0 ) ;
if ( this . options . silent ) {
console . error ( errMsg ) ;
break ;
} else {
throw new Error ( errMsg ) ;
}
}
}
return tokens ;
} ;
_createClass ( Lexer , null , [ {
key : "rules" ,
get : function get ( ) {
return {
block : block ,
inline : inline
} ;
}
} ] ) ;
return Lexer ;
} ( ) ;
/ * *
* Renderer
* /
var Renderer = /*#__PURE__*/ function ( ) {
function Renderer ( options ) {
this . options = options || exports . defaults ;
}
var _proto = Renderer . prototype ;
_proto . code = function code ( _code , infostring , escaped ) {
var lang = ( infostring || '' ) . match ( /\S*/ ) [ 0 ] ;
if ( this . options . highlight ) {
var out = this . options . highlight ( _code , lang ) ;
if ( out != null && out !== _code ) {
escaped = true ;
_code = out ;
}
}
_code = _code . replace ( /\n$/ , '' ) + '\n' ;
if ( ! lang ) {
return '<pre><code>' + ( escaped ? _code : escape ( _code , true ) ) + '</code></pre>\n' ;
}
return '<pre><code class="' + this . options . langPrefix + escape ( lang ) + '">' + ( escaped ? _code : escape ( _code , true ) ) + '</code></pre>\n' ;
}
/ * *
* @ param { string } quote
* / ;
_proto . blockquote = function blockquote ( quote ) {
return "<blockquote>\n" + quote + "</blockquote>\n" ;
} ;
_proto . html = function html ( _html ) {
return _html ;
}
/ * *
* @ param { string } text
* @ param { string } level
* @ param { string } raw
* @ param { any } slugger
* / ;
_proto . heading = function heading ( text , level , raw , slugger ) {
if ( this . options . headerIds ) {
var id = this . options . headerPrefix + slugger . slug ( raw ) ;
return "<h" + level + " id=\"" + id + "\">" + text + "</h" + level + ">\n" ;
}
// ignore IDs
return "<h" + level + ">" + text + "</h" + level + ">\n" ;
} ;
_proto . hr = function hr ( ) {
return this . options . xhtml ? '<hr/>\n' : '<hr>\n' ;
} ;
_proto . list = function list ( body , ordered , start ) {
var type = ordered ? 'ol' : 'ul' ,
startatt = ordered && start !== 1 ? ' start="' + start + '"' : '' ;
return '<' + type + startatt + '>\n' + body + '</' + type + '>\n' ;
}
/ * *
* @ param { string } text
* / ;
_proto . listitem = function listitem ( text ) {
return "<li>" + text + "</li>\n" ;
} ;
_proto . checkbox = function checkbox ( checked ) {
return '<input ' + ( checked ? 'checked="" ' : '' ) + 'disabled="" type="checkbox"' + ( this . options . xhtml ? ' /' : '' ) + '> ' ;
}
/ * *
* @ param { string } text
* / ;
_proto . paragraph = function paragraph ( text ) {
return "<p>" + text + "</p>\n" ;
}
/ * *
* @ param { string } header
* @ param { string } body
* / ;
_proto . table = function table ( header , body ) {
if ( body ) body = "<tbody>" + body + "</tbody>" ;
return '<table>\n' + '<thead>\n' + header + '</thead>\n' + body + '</table>\n' ;
}
/ * *
* @ param { string } content
* / ;
_proto . tablerow = function tablerow ( content ) {
return "<tr>\n" + content + "</tr>\n" ;
} ;
_proto . tablecell = function tablecell ( content , flags ) {
var type = flags . header ? 'th' : 'td' ;
var tag = flags . align ? "<" + type + " align=\"" + flags . align + "\">" : "<" + type + ">" ;
return tag + content + ( "</" + type + ">\n" ) ;
}
/ * *
* span level renderer
* @ param { string } text
* / ;
_proto . strong = function strong ( text ) {
return "<strong>" + text + "</strong>" ;
}
/ * *
* @ param { string } text
* / ;
_proto . em = function em ( text ) {
return "<em>" + text + "</em>" ;
}
/ * *
* @ param { string } text
* / ;
_proto . codespan = function codespan ( text ) {
return "<code>" + text + "</code>" ;
} ;
_proto . br = function br ( ) {
return this . options . xhtml ? '<br/>' : '<br>' ;
}
/ * *
* @ param { string } text
* / ;
_proto . del = function del ( text ) {
return "<del>" + text + "</del>" ;
}
/ * *
* @ param { string } href
* @ param { string } title
* @ param { string } text
* / ;
_proto . link = function link ( href , title , text ) {
href = cleanUrl ( this . options . sanitize , this . options . baseUrl , href ) ;
if ( href === null ) {
return text ;
}
var out = '<a href="' + href + '"' ;
if ( title ) {
out += ' title="' + title + '"' ;
}
out += '>' + text + '</a>' ;
return out ;
}
/ * *
* @ param { string } href
* @ param { string } title
* @ param { string } text
* / ;
_proto . image = function image ( href , title , text ) {
href = cleanUrl ( this . options . sanitize , this . options . baseUrl , href ) ;
if ( href === null ) {
return text ;
}
var out = "<img src=\"" + href + "\" alt=\"" + text + "\"" ;
if ( title ) {
out += " title=\"" + title + "\"" ;
}
out += this . options . xhtml ? '/>' : '>' ;
return out ;
} ;
_proto . text = function text ( _text ) {
return _text ;
} ;
return Renderer ;
} ( ) ;
/ * *
* TextRenderer
* returns only the textual part of the token
* /
var TextRenderer = /*#__PURE__*/ function ( ) {
function TextRenderer ( ) { }
var _proto = TextRenderer . prototype ;
// no need for block level renderers
_proto . strong = function strong ( text ) {
return text ;
} ;
_proto . em = function em ( text ) {
return text ;
} ;
_proto . codespan = function codespan ( text ) {
return text ;
} ;
_proto . del = function del ( text ) {
return text ;
} ;
_proto . html = function html ( text ) {
return text ;
} ;
_proto . text = function text ( _text ) {
return _text ;
} ;
_proto . link = function link ( href , title , text ) {
return '' + text ;
} ;
_proto . image = function image ( href , title , text ) {
return '' + text ;
} ;
_proto . br = function br ( ) {
return '' ;
} ;
return TextRenderer ;
} ( ) ;
/ * *
* Slugger generates header id
* /
var Slugger = /*#__PURE__*/ function ( ) {
function Slugger ( ) {
this . seen = { } ;
}
/ * *
* @ param { string } value
* /
var _proto = Slugger . prototype ;
_proto . serialize = function serialize ( value ) {
return value . toLowerCase ( ) . trim ( )
// remove html tags
. replace ( /<[!\/a-z].*?>/ig , '' )
// remove unwanted chars
. replace ( /[\u2000-\u206F\u2E00-\u2E7F\\'!"#$%&()*+,./:;<=>?@[\]^`{|}~]/g , '' ) . replace ( /\s/g , '-' ) ;
}
/ * *
* Finds the next safe ( unique ) slug to use
* @ param { string } originalSlug
* @ param { boolean } isDryRun
* / ;
_proto . getNextSafeSlug = function getNextSafeSlug ( originalSlug , isDryRun ) {
var slug = originalSlug ;
var occurenceAccumulator = 0 ;
if ( this . seen . hasOwnProperty ( slug ) ) {
occurenceAccumulator = this . seen [ originalSlug ] ;
do {
occurenceAccumulator ++ ;
slug = originalSlug + '-' + occurenceAccumulator ;
} while ( this . seen . hasOwnProperty ( slug ) ) ;
}
if ( ! isDryRun ) {
this . seen [ originalSlug ] = occurenceAccumulator ;
this . seen [ slug ] = 0 ;
}
return slug ;
}
/ * *
* Convert string to unique id
* @ param { object } [ options ]
* @ param { boolean } [ options . dryrun ] Generates the next unique slug without
* updating the internal accumulator .
* / ;
_proto . slug = function slug ( value , options ) {
if ( options === void 0 ) {
options = { } ;
}
var slug = this . serialize ( value ) ;
return this . getNextSafeSlug ( slug , options . dryrun ) ;
} ;
return Slugger ;
} ( ) ;
/ * *
* Parsing & Compiling
* /
var Parser = /*#__PURE__*/ function ( ) {
function Parser ( options ) {
this . options = options || exports . defaults ;
this . options . renderer = this . options . renderer || new Renderer ( ) ;
this . renderer = this . options . renderer ;
this . renderer . options = this . options ;
this . textRenderer = new TextRenderer ( ) ;
this . slugger = new Slugger ( ) ;
}
/ * *
* Static Parse Method
* /
Parser . parse = function parse ( tokens , options ) {
var parser = new Parser ( options ) ;
return parser . parse ( tokens ) ;
}
/ * *
* Static Parse Inline Method
* / ;
Parser . parseInline = function parseInline ( tokens , options ) {
var parser = new Parser ( options ) ;
return parser . parseInline ( tokens ) ;
}
/ * *
* Parse Loop
* / ;
var _proto = Parser . prototype ;
_proto . parse = function parse ( tokens , top ) {
if ( top === void 0 ) {
top = true ;
}
var out = '' ,
i ,
j ,
k ,
l2 ,
l3 ,
row ,
cell ,
header ,
body ,
token ,
ordered ,
start ,
loose ,
itemBody ,
item ,
checked ,
task ,
checkbox ,
ret ;
var l = tokens . length ;
for ( i = 0 ; i < l ; i ++ ) {
token = tokens [ i ] ;
// Run any renderer extensions
if ( this . options . extensions && this . options . extensions . renderers && this . options . extensions . renderers [ token . type ] ) {
ret = this . options . extensions . renderers [ token . type ] . call ( {
parser : this
} , token ) ;
if ( ret !== false || ! [ 'space' , 'hr' , 'heading' , 'code' , 'table' , 'blockquote' , 'list' , 'html' , 'paragraph' , 'text' ] . includes ( token . type ) ) {
out += ret || '' ;
continue ;
}
}
switch ( token . type ) {
case 'space' :
{
continue ;
}
case 'hr' :
{
out += this . renderer . hr ( ) ;
continue ;
}
case 'heading' :
{
out += this . renderer . heading ( this . parseInline ( token . tokens ) , token . depth , unescape ( this . parseInline ( token . tokens , this . textRenderer ) ) , this . slugger ) ;
continue ;
}
case 'code' :
{
out += this . renderer . code ( token . text , token . lang , token . escaped ) ;
continue ;
}
case 'table' :
{
header = '' ;
// header
cell = '' ;
l2 = token . header . length ;
for ( j = 0 ; j < l2 ; j ++ ) {
cell += this . renderer . tablecell ( this . parseInline ( token . header [ j ] . tokens ) , {
header : true ,
align : token . align [ j ]
} ) ;
}
header += this . renderer . tablerow ( cell ) ;
body = '' ;
l2 = token . rows . length ;
for ( j = 0 ; j < l2 ; j ++ ) {
row = token . rows [ j ] ;
cell = '' ;
l3 = row . length ;
for ( k = 0 ; k < l3 ; k ++ ) {
cell += this . renderer . tablecell ( this . parseInline ( row [ k ] . tokens ) , {
header : false ,
align : token . align [ k ]
} ) ;
}
body += this . renderer . tablerow ( cell ) ;
}
out += this . renderer . table ( header , body ) ;
continue ;
}
case 'blockquote' :
{
body = this . parse ( token . tokens ) ;
out += this . renderer . blockquote ( body ) ;
continue ;
}
case 'list' :
{
ordered = token . ordered ;
start = token . start ;
loose = token . loose ;
l2 = token . items . length ;
body = '' ;
for ( j = 0 ; j < l2 ; j ++ ) {
item = token . items [ j ] ;
checked = item . checked ;
task = item . task ;
itemBody = '' ;
if ( item . task ) {
checkbox = this . renderer . checkbox ( checked ) ;
if ( loose ) {
if ( item . tokens . length > 0 && item . tokens [ 0 ] . type === 'paragraph' ) {
item . tokens [ 0 ] . text = checkbox + ' ' + item . tokens [ 0 ] . text ;
if ( item . tokens [ 0 ] . tokens && item . tokens [ 0 ] . tokens . length > 0 && item . tokens [ 0 ] . tokens [ 0 ] . type === 'text' ) {
item . tokens [ 0 ] . tokens [ 0 ] . text = checkbox + ' ' + item . tokens [ 0 ] . tokens [ 0 ] . text ;
}
} else {
item . tokens . unshift ( {
type : 'text' ,
text : checkbox
} ) ;
}
} else {
itemBody += checkbox ;
}
}
itemBody += this . parse ( item . tokens , loose ) ;
body += this . renderer . listitem ( itemBody , task , checked ) ;
}
out += this . renderer . list ( body , ordered , start ) ;
continue ;
}
case 'html' :
{
// TODO parse inline content if parameter markdown=1
out += this . renderer . html ( token . text ) ;
continue ;
}
case 'paragraph' :
{
out += this . renderer . paragraph ( this . parseInline ( token . tokens ) ) ;
continue ;
}
case 'text' :
{
body = token . tokens ? this . parseInline ( token . tokens ) : token . text ;
while ( i + 1 < l && tokens [ i + 1 ] . type === 'text' ) {
token = tokens [ ++ i ] ;
body += '\n' + ( token . tokens ? this . parseInline ( token . tokens ) : token . text ) ;
}
out += top ? this . renderer . paragraph ( body ) : body ;
continue ;
}
default :
{
var errMsg = 'Token with "' + token . type + '" type was not found.' ;
if ( this . options . silent ) {
console . error ( errMsg ) ;
return ;
} else {
throw new Error ( errMsg ) ;
}
}
}
}
return out ;
}
/ * *
* Parse Inline Tokens
* / ;
_proto . parseInline = function parseInline ( tokens , renderer ) {
renderer = renderer || this . renderer ;
var out = '' ,
i ,
token ,
ret ;
var l = tokens . length ;
for ( i = 0 ; i < l ; i ++ ) {
token = tokens [ i ] ;
// Run any renderer extensions
if ( this . options . extensions && this . options . extensions . renderers && this . options . extensions . renderers [ token . type ] ) {
ret = this . options . extensions . renderers [ token . type ] . call ( {
parser : this
} , token ) ;
if ( ret !== false || ! [ 'escape' , 'html' , 'link' , 'image' , 'strong' , 'em' , 'codespan' , 'br' , 'del' , 'text' ] . includes ( token . type ) ) {
out += ret || '' ;
continue ;
}
}
switch ( token . type ) {
case 'escape' :
{
out += renderer . text ( token . text ) ;
break ;
}
case 'html' :
{
out += renderer . html ( token . text ) ;
break ;
}
case 'link' :
{
out += renderer . link ( token . href , token . title , this . parseInline ( token . tokens , renderer ) ) ;
break ;
}
case 'image' :
{
out += renderer . image ( token . href , token . title , token . text ) ;
break ;
}
case 'strong' :
{
out += renderer . strong ( this . parseInline ( token . tokens , renderer ) ) ;
break ;
}
case 'em' :
{
out += renderer . em ( this . parseInline ( token . tokens , renderer ) ) ;
break ;
}
case 'codespan' :
{
out += renderer . codespan ( token . text ) ;
break ;
}
case 'br' :
{
out += renderer . br ( ) ;
break ;
}
case 'del' :
{
out += renderer . del ( this . parseInline ( token . tokens , renderer ) ) ;
break ;
}
case 'text' :
{
out += renderer . text ( token . text ) ;
break ;
}
default :
{
var errMsg = 'Token with "' + token . type + '" type was not found.' ;
if ( this . options . silent ) {
console . error ( errMsg ) ;
return ;
} else {
throw new Error ( errMsg ) ;
}
}
}
}
return out ;
} ;
return Parser ;
} ( ) ;
var Hooks = /*#__PURE__*/ function ( ) {
function Hooks ( options ) {
this . options = options || exports . defaults ;
}
var _proto = Hooks . prototype ;
/ * *
* Process markdown before marked
* /
_proto . preprocess = function preprocess ( markdown ) {
return markdown ;
}
/ * *
* Process HTML after marked is finished
* / ;
_proto . postprocess = function postprocess ( html ) {
return html ;
} ;
return Hooks ;
} ( ) ;
Hooks . passThroughHooks = new Set ( [ 'preprocess' , 'postprocess' ] ) ;
function onError ( silent , async , callback ) {
return function ( e ) {
e . message += '\nPlease report this to https://github.com/markedjs/marked.' ;
if ( silent ) {
var msg = '<p>An error occurred:</p><pre>' + escape ( e . message + '' , true ) + '</pre>' ;
if ( async ) {
return Promise . resolve ( msg ) ;
}
if ( callback ) {
callback ( null , msg ) ;
return ;
}
return msg ;
}
if ( async ) {
return Promise . reject ( e ) ;
}
if ( callback ) {
callback ( e ) ;
return ;
}
throw e ;
} ;
}
function parseMarkdown ( lexer , parser ) {
return function ( src , opt , callback ) {
if ( typeof opt === 'function' ) {
callback = opt ;
opt = null ;
}
var origOpt = _extends ( { } , opt ) ;
opt = _extends ( { } , marked . defaults , origOpt ) ;
var throwError = onError ( opt . silent , opt . async , callback ) ;
// throw error in case of non string input
if ( typeof src === 'undefined' || src === null ) {
return throwError ( new Error ( 'marked(): input parameter is undefined or null' ) ) ;
}
if ( typeof src !== 'string' ) {
return throwError ( new Error ( 'marked(): input parameter is of type ' + Object . prototype . toString . call ( src ) + ', string expected' ) ) ;
}
checkSanitizeDeprecation ( opt ) ;
if ( opt . hooks ) {
opt . hooks . options = opt ;
}
if ( callback ) {
var highlight = opt . highlight ;
var tokens ;
try {
if ( opt . hooks ) {
src = opt . hooks . preprocess ( src ) ;
}
tokens = lexer ( src , opt ) ;
} catch ( e ) {
return throwError ( e ) ;
}
var done = function done ( err ) {
var out ;
if ( ! err ) {
try {
if ( opt . walkTokens ) {
marked . walkTokens ( tokens , opt . walkTokens ) ;
}
out = parser ( tokens , opt ) ;
if ( opt . hooks ) {
out = opt . hooks . postprocess ( out ) ;
}
} catch ( e ) {
err = e ;
}
}
opt . highlight = highlight ;
return err ? throwError ( err ) : callback ( null , out ) ;
} ;
if ( ! highlight || highlight . length < 3 ) {
return done ( ) ;
}
delete opt . highlight ;
if ( ! tokens . length ) return done ( ) ;
var pending = 0 ;
marked . walkTokens ( tokens , function ( token ) {
if ( token . type === 'code' ) {
pending ++ ;
setTimeout ( function ( ) {
highlight ( token . text , token . lang , function ( err , code ) {
if ( err ) {
return done ( err ) ;
}
if ( code != null && code !== token . text ) {
token . text = code ;
token . escaped = true ;
}
pending -- ;
if ( pending === 0 ) {
done ( ) ;
}
} ) ;
} , 0 ) ;
}
} ) ;
if ( pending === 0 ) {
done ( ) ;
}
return ;
}
if ( opt . async ) {
return Promise . resolve ( opt . hooks ? opt . hooks . preprocess ( src ) : src ) . then ( function ( src ) {
return lexer ( src , opt ) ;
} ) . then ( function ( tokens ) {
return opt . walkTokens ? Promise . all ( marked . walkTokens ( tokens , opt . walkTokens ) ) . then ( function ( ) {
return tokens ;
} ) : tokens ;
} ) . then ( function ( tokens ) {
return parser ( tokens , opt ) ;
} ) . then ( function ( html ) {
return opt . hooks ? opt . hooks . postprocess ( html ) : html ;
} ) [ "catch" ] ( throwError ) ;
}
try {
if ( opt . hooks ) {
src = opt . hooks . preprocess ( src ) ;
}
var _tokens = lexer ( src , opt ) ;
if ( opt . walkTokens ) {
marked . walkTokens ( _tokens , opt . walkTokens ) ;
}
var html = parser ( _tokens , opt ) ;
if ( opt . hooks ) {
html = opt . hooks . postprocess ( html ) ;
}
return html ;
} catch ( e ) {
return throwError ( e ) ;
}
} ;
}
/ * *
* Marked
* /
function marked ( src , opt , callback ) {
return parseMarkdown ( Lexer . lex , Parser . parse ) ( src , opt , callback ) ;
}
/ * *
* Options
* /
marked . options = marked . setOptions = function ( opt ) {
marked . defaults = _extends ( { } , marked . defaults , opt ) ;
changeDefaults ( marked . defaults ) ;
return marked ;
} ;
marked . getDefaults = getDefaults ;
marked . defaults = exports . defaults ;
/ * *
* Use Extension
* /
marked . use = function ( ) {
var extensions = marked . defaults . extensions || {
renderers : { } ,
childTokens : { }
} ;
for ( var _len = arguments . length , args = new Array ( _len ) , _key = 0 ; _key < _len ; _key ++ ) {
args [ _key ] = arguments [ _key ] ;
}
args . forEach ( function ( pack ) {
// copy options to new object
var opts = _extends ( { } , pack ) ;
// set async to true if it was set to true before
opts . async = marked . defaults . async || opts . async || false ;
// ==-- Parse "addon" extensions --== //
if ( pack . extensions ) {
pack . extensions . forEach ( function ( ext ) {
if ( ! ext . name ) {
throw new Error ( 'extension name required' ) ;
}
if ( ext . renderer ) {
// Renderer extensions
var prevRenderer = extensions . renderers [ ext . name ] ;
if ( prevRenderer ) {
// Replace extension with func to run new extension but fall back if false
extensions . renderers [ ext . name ] = function ( ) {
for ( var _len2 = arguments . length , args = new Array ( _len2 ) , _key2 = 0 ; _key2 < _len2 ; _key2 ++ ) {
args [ _key2 ] = arguments [ _key2 ] ;
}
var ret = ext . renderer . apply ( this , args ) ;
if ( ret === false ) {
ret = prevRenderer . apply ( this , args ) ;
}
return ret ;
} ;
} else {
extensions . renderers [ ext . name ] = ext . renderer ;
}
}
if ( ext . tokenizer ) {
// Tokenizer Extensions
if ( ! ext . level || ext . level !== 'block' && ext . level !== 'inline' ) {
throw new Error ( "extension level must be 'block' or 'inline'" ) ;
}
if ( extensions [ ext . level ] ) {
extensions [ ext . level ] . unshift ( ext . tokenizer ) ;
} else {
extensions [ ext . level ] = [ ext . tokenizer ] ;
}
if ( ext . start ) {
// Function to check for start of token
if ( ext . level === 'block' ) {
if ( extensions . startBlock ) {
extensions . startBlock . push ( ext . start ) ;
} else {
extensions . startBlock = [ ext . start ] ;
}
} else if ( ext . level === 'inline' ) {
if ( extensions . startInline ) {
extensions . startInline . push ( ext . start ) ;
} else {
extensions . startInline = [ ext . start ] ;
}
}
}
}
if ( ext . childTokens ) {
// Child tokens to be visited by walkTokens
extensions . childTokens [ ext . name ] = ext . childTokens ;
}
} ) ;
opts . extensions = extensions ;
}
// ==-- Parse "overwrite" extensions --== //
if ( pack . renderer ) {
( function ( ) {
var renderer = marked . defaults . renderer || new Renderer ( ) ;
var _loop = function _loop ( prop ) {
var prevRenderer = renderer [ prop ] ;
// Replace renderer with func to run extension, but fall back if false
renderer [ prop ] = function ( ) {
for ( var _len3 = arguments . length , args = new Array ( _len3 ) , _key3 = 0 ; _key3 < _len3 ; _key3 ++ ) {
args [ _key3 ] = arguments [ _key3 ] ;
}
var ret = pack . renderer [ prop ] . apply ( renderer , args ) ;
if ( ret === false ) {
ret = prevRenderer . apply ( renderer , args ) ;
}
return ret ;
} ;
} ;
for ( var prop in pack . renderer ) {
_loop ( prop ) ;
}
opts . renderer = renderer ;
} ) ( ) ;
}
if ( pack . tokenizer ) {
( function ( ) {
var tokenizer = marked . defaults . tokenizer || new Tokenizer ( ) ;
var _loop2 = function _loop2 ( prop ) {
var prevTokenizer = tokenizer [ prop ] ;
// Replace tokenizer with func to run extension, but fall back if false
tokenizer [ prop ] = function ( ) {
for ( var _len4 = arguments . length , args = new Array ( _len4 ) , _key4 = 0 ; _key4 < _len4 ; _key4 ++ ) {
args [ _key4 ] = arguments [ _key4 ] ;
}
var ret = pack . tokenizer [ prop ] . apply ( tokenizer , args ) ;
if ( ret === false ) {
ret = prevTokenizer . apply ( tokenizer , args ) ;
}
return ret ;
} ;
} ;
for ( var prop in pack . tokenizer ) {
_loop2 ( prop ) ;
}
opts . tokenizer = tokenizer ;
} ) ( ) ;
}
// ==-- Parse Hooks extensions --== //
if ( pack . hooks ) {
( function ( ) {
var hooks = marked . defaults . hooks || new Hooks ( ) ;
var _loop3 = function _loop3 ( prop ) {
var prevHook = hooks [ prop ] ;
if ( Hooks . passThroughHooks . has ( prop ) ) {
hooks [ prop ] = function ( arg ) {
if ( marked . defaults . async ) {
return Promise . resolve ( pack . hooks [ prop ] . call ( hooks , arg ) ) . then ( function ( ret ) {
return prevHook . call ( hooks , ret ) ;
} ) ;
}
var ret = pack . hooks [ prop ] . call ( hooks , arg ) ;
return prevHook . call ( hooks , ret ) ;
} ;
} else {
hooks [ prop ] = function ( ) {
for ( var _len5 = arguments . length , args = new Array ( _len5 ) , _key5 = 0 ; _key5 < _len5 ; _key5 ++ ) {
args [ _key5 ] = arguments [ _key5 ] ;
}
var ret = pack . hooks [ prop ] . apply ( hooks , args ) ;
if ( ret === false ) {
ret = prevHook . apply ( hooks , args ) ;
}
return ret ;
} ;
}
} ;
for ( var prop in pack . hooks ) {
_loop3 ( prop ) ;
}
opts . hooks = hooks ;
} ) ( ) ;
}
// ==-- Parse WalkTokens extensions --== //
if ( pack . walkTokens ) {
var _walkTokens = marked . defaults . walkTokens ;
opts . walkTokens = function ( token ) {
var values = [ ] ;
values . push ( pack . walkTokens . call ( this , token ) ) ;
if ( _walkTokens ) {
values = values . concat ( _walkTokens . call ( this , token ) ) ;
}
return values ;
} ;
}
marked . setOptions ( opts ) ;
} ) ;
} ;
/ * *
* Run callback for every token
* /
marked . walkTokens = function ( tokens , callback ) {
var values = [ ] ;
var _loop4 = function _loop4 ( ) {
var token = _step . value ;
values = values . concat ( callback . call ( marked , token ) ) ;
switch ( token . type ) {
case 'table' :
{
for ( var _iterator2 = _createForOfIteratorHelperLoose ( token . header ) , _step2 ; ! ( _step2 = _iterator2 ( ) ) . done ; ) {
var cell = _step2 . value ;
values = values . concat ( marked . walkTokens ( cell . tokens , callback ) ) ;
}
for ( var _iterator3 = _createForOfIteratorHelperLoose ( token . rows ) , _step3 ; ! ( _step3 = _iterator3 ( ) ) . done ; ) {
var row = _step3 . value ;
for ( var _iterator4 = _createForOfIteratorHelperLoose ( row ) , _step4 ; ! ( _step4 = _iterator4 ( ) ) . done ; ) {
var _cell = _step4 . value ;
values = values . concat ( marked . walkTokens ( _cell . tokens , callback ) ) ;
}
}
break ;
}
case 'list' :
{
values = values . concat ( marked . walkTokens ( token . items , callback ) ) ;
break ;
}
default :
{
if ( marked . defaults . extensions && marked . defaults . extensions . childTokens && marked . defaults . extensions . childTokens [ token . type ] ) {
// Walk any extensions
marked . defaults . extensions . childTokens [ token . type ] . forEach ( function ( childTokens ) {
values = values . concat ( marked . walkTokens ( token [ childTokens ] , callback ) ) ;
} ) ;
} else if ( token . tokens ) {
values = values . concat ( marked . walkTokens ( token . tokens , callback ) ) ;
}
}
}
} ;
for ( var _iterator = _createForOfIteratorHelperLoose ( tokens ) , _step ; ! ( _step = _iterator ( ) ) . done ; ) {
_loop4 ( ) ;
}
return values ;
} ;
/ * *
* Parse Inline
* @ param { string } src
* /
marked . parseInline = parseMarkdown ( Lexer . lexInline , Parser . parseInline ) ;
/ * *
* Expose
* /
marked . Parser = Parser ;
marked . parser = Parser . parse ;
marked . Renderer = Renderer ;
marked . TextRenderer = TextRenderer ;
marked . Lexer = Lexer ;
marked . lexer = Lexer . lex ;
marked . Tokenizer = Tokenizer ;
marked . Slugger = Slugger ;
marked . Hooks = Hooks ;
marked . parse = marked ;
var options = marked . options ;
var setOptions = marked . setOptions ;
var use = marked . use ;
var walkTokens = marked . walkTokens ;
var parseInline = marked . parseInline ;
var parse = marked ;
var parser = Parser . parse ;
var lexer = Lexer . lex ;
exports . Hooks = Hooks ;
exports . Lexer = Lexer ;
exports . Parser = Parser ;
exports . Renderer = Renderer ;
exports . Slugger = Slugger ;
exports . TextRenderer = TextRenderer ;
exports . Tokenizer = Tokenizer ;
exports . getDefaults = getDefaults ;
exports . lexer = lexer ;
exports . marked = marked ;
exports . options = options ;
exports . parse = parse ;
exports . parseInline = parseInline ;
exports . parser = parser ;
exports . setOptions = setOptions ;
exports . use = use ;
exports . walkTokens = walkTokens ;