This commit is contained in:
186
node_modules/micromark/dist/tokenize/attention.js
generated
vendored
Normal file
186
node_modules/micromark/dist/tokenize/attention.js
generated
vendored
Normal file
@@ -0,0 +1,186 @@
|
||||
'use strict'
|
||||
|
||||
var chunkedPush = require('../util/chunked-push.js')
|
||||
var chunkedSplice = require('../util/chunked-splice.js')
|
||||
var classifyCharacter = require('../util/classify-character.js')
|
||||
var movePoint = require('../util/move-point.js')
|
||||
var resolveAll = require('../util/resolve-all.js')
|
||||
var shallow = require('../util/shallow.js')
|
||||
|
||||
var attention = {
|
||||
name: 'attention',
|
||||
tokenize: tokenizeAttention,
|
||||
resolveAll: resolveAllAttention
|
||||
}
|
||||
|
||||
function resolveAllAttention(events, context) {
|
||||
var index = -1
|
||||
var open
|
||||
var group
|
||||
var text
|
||||
var openingSequence
|
||||
var closingSequence
|
||||
var use
|
||||
var nextEvents
|
||||
var offset // Walk through all events.
|
||||
//
|
||||
// Note: performance of this is fine on an mb of normal markdown, but it’s
|
||||
// a bottleneck for malicious stuff.
|
||||
|
||||
while (++index < events.length) {
|
||||
// Find a token that can close.
|
||||
if (
|
||||
events[index][0] === 'enter' &&
|
||||
events[index][1].type === 'attentionSequence' &&
|
||||
events[index][1]._close
|
||||
) {
|
||||
open = index // Now walk back to find an opener.
|
||||
|
||||
while (open--) {
|
||||
// Find a token that can open the closer.
|
||||
if (
|
||||
events[open][0] === 'exit' &&
|
||||
events[open][1].type === 'attentionSequence' &&
|
||||
events[open][1]._open && // If the markers are the same:
|
||||
context.sliceSerialize(events[open][1]).charCodeAt(0) ===
|
||||
context.sliceSerialize(events[index][1]).charCodeAt(0)
|
||||
) {
|
||||
// If the opening can close or the closing can open,
|
||||
// and the close size *is not* a multiple of three,
|
||||
// but the sum of the opening and closing size *is* multiple of three,
|
||||
// then don’t match.
|
||||
if (
|
||||
(events[open][1]._close || events[index][1]._open) &&
|
||||
(events[index][1].end.offset - events[index][1].start.offset) % 3 &&
|
||||
!(
|
||||
(events[open][1].end.offset -
|
||||
events[open][1].start.offset +
|
||||
events[index][1].end.offset -
|
||||
events[index][1].start.offset) %
|
||||
3
|
||||
)
|
||||
) {
|
||||
continue
|
||||
} // Number of markers to use from the sequence.
|
||||
|
||||
use =
|
||||
events[open][1].end.offset - events[open][1].start.offset > 1 &&
|
||||
events[index][1].end.offset - events[index][1].start.offset > 1
|
||||
? 2
|
||||
: 1
|
||||
openingSequence = {
|
||||
type: use > 1 ? 'strongSequence' : 'emphasisSequence',
|
||||
start: movePoint(shallow(events[open][1].end), -use),
|
||||
end: shallow(events[open][1].end)
|
||||
}
|
||||
closingSequence = {
|
||||
type: use > 1 ? 'strongSequence' : 'emphasisSequence',
|
||||
start: shallow(events[index][1].start),
|
||||
end: movePoint(shallow(events[index][1].start), use)
|
||||
}
|
||||
text = {
|
||||
type: use > 1 ? 'strongText' : 'emphasisText',
|
||||
start: shallow(events[open][1].end),
|
||||
end: shallow(events[index][1].start)
|
||||
}
|
||||
group = {
|
||||
type: use > 1 ? 'strong' : 'emphasis',
|
||||
start: shallow(openingSequence.start),
|
||||
end: shallow(closingSequence.end)
|
||||
}
|
||||
events[open][1].end = shallow(openingSequence.start)
|
||||
events[index][1].start = shallow(closingSequence.end)
|
||||
nextEvents = [] // If there are more markers in the opening, add them before.
|
||||
|
||||
if (events[open][1].end.offset - events[open][1].start.offset) {
|
||||
nextEvents = chunkedPush(nextEvents, [
|
||||
['enter', events[open][1], context],
|
||||
['exit', events[open][1], context]
|
||||
])
|
||||
} // Opening.
|
||||
|
||||
nextEvents = chunkedPush(nextEvents, [
|
||||
['enter', group, context],
|
||||
['enter', openingSequence, context],
|
||||
['exit', openingSequence, context],
|
||||
['enter', text, context]
|
||||
]) // Between.
|
||||
|
||||
nextEvents = chunkedPush(
|
||||
nextEvents,
|
||||
resolveAll(
|
||||
context.parser.constructs.insideSpan.null,
|
||||
events.slice(open + 1, index),
|
||||
context
|
||||
)
|
||||
) // Closing.
|
||||
|
||||
nextEvents = chunkedPush(nextEvents, [
|
||||
['exit', text, context],
|
||||
['enter', closingSequence, context],
|
||||
['exit', closingSequence, context],
|
||||
['exit', group, context]
|
||||
]) // If there are more markers in the closing, add them after.
|
||||
|
||||
if (events[index][1].end.offset - events[index][1].start.offset) {
|
||||
offset = 2
|
||||
nextEvents = chunkedPush(nextEvents, [
|
||||
['enter', events[index][1], context],
|
||||
['exit', events[index][1], context]
|
||||
])
|
||||
} else {
|
||||
offset = 0
|
||||
}
|
||||
|
||||
chunkedSplice(events, open - 1, index - open + 3, nextEvents)
|
||||
index = open + nextEvents.length - offset - 2
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
} // Remove remaining sequences.
|
||||
|
||||
index = -1
|
||||
|
||||
while (++index < events.length) {
|
||||
if (events[index][1].type === 'attentionSequence') {
|
||||
events[index][1].type = 'data'
|
||||
}
|
||||
}
|
||||
|
||||
return events
|
||||
}
|
||||
|
||||
function tokenizeAttention(effects, ok) {
|
||||
var before = classifyCharacter(this.previous)
|
||||
var marker
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('attentionSequence')
|
||||
marker = code
|
||||
return sequence(code)
|
||||
}
|
||||
|
||||
function sequence(code) {
|
||||
var token
|
||||
var after
|
||||
var open
|
||||
var close
|
||||
|
||||
if (code === marker) {
|
||||
effects.consume(code)
|
||||
return sequence
|
||||
}
|
||||
|
||||
token = effects.exit('attentionSequence')
|
||||
after = classifyCharacter(code)
|
||||
open = !after || (after === 2 && before)
|
||||
close = !before || (before === 2 && after)
|
||||
token._open = marker === 42 ? open : open && (before || !close)
|
||||
token._close = marker === 42 ? close : close && (after || !open)
|
||||
return ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = attention
|
||||
125
node_modules/micromark/dist/tokenize/autolink.js
generated
vendored
Normal file
125
node_modules/micromark/dist/tokenize/autolink.js
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
'use strict'
|
||||
|
||||
var asciiAlpha = require('../character/ascii-alpha.js')
|
||||
var asciiAlphanumeric = require('../character/ascii-alphanumeric.js')
|
||||
var asciiAtext = require('../character/ascii-atext.js')
|
||||
var asciiControl = require('../character/ascii-control.js')
|
||||
|
||||
var autolink = {
|
||||
name: 'autolink',
|
||||
tokenize: tokenizeAutolink
|
||||
}
|
||||
|
||||
function tokenizeAutolink(effects, ok, nok) {
|
||||
var size = 1
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('autolink')
|
||||
effects.enter('autolinkMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('autolinkMarker')
|
||||
effects.enter('autolinkProtocol')
|
||||
return open
|
||||
}
|
||||
|
||||
function open(code) {
|
||||
if (asciiAlpha(code)) {
|
||||
effects.consume(code)
|
||||
return schemeOrEmailAtext
|
||||
}
|
||||
|
||||
return asciiAtext(code) ? emailAtext(code) : nok(code)
|
||||
}
|
||||
|
||||
function schemeOrEmailAtext(code) {
|
||||
return code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)
|
||||
? schemeInsideOrEmailAtext(code)
|
||||
: emailAtext(code)
|
||||
}
|
||||
|
||||
function schemeInsideOrEmailAtext(code) {
|
||||
if (code === 58) {
|
||||
effects.consume(code)
|
||||
return urlInside
|
||||
}
|
||||
|
||||
if (
|
||||
(code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) &&
|
||||
size++ < 32
|
||||
) {
|
||||
effects.consume(code)
|
||||
return schemeInsideOrEmailAtext
|
||||
}
|
||||
|
||||
return emailAtext(code)
|
||||
}
|
||||
|
||||
function urlInside(code) {
|
||||
if (code === 62) {
|
||||
effects.exit('autolinkProtocol')
|
||||
return end(code)
|
||||
}
|
||||
|
||||
if (code === 32 || code === 60 || asciiControl(code)) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return urlInside
|
||||
}
|
||||
|
||||
function emailAtext(code) {
|
||||
if (code === 64) {
|
||||
effects.consume(code)
|
||||
size = 0
|
||||
return emailAtSignOrDot
|
||||
}
|
||||
|
||||
if (asciiAtext(code)) {
|
||||
effects.consume(code)
|
||||
return emailAtext
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function emailAtSignOrDot(code) {
|
||||
return asciiAlphanumeric(code) ? emailLabel(code) : nok(code)
|
||||
}
|
||||
|
||||
function emailLabel(code) {
|
||||
if (code === 46) {
|
||||
effects.consume(code)
|
||||
size = 0
|
||||
return emailAtSignOrDot
|
||||
}
|
||||
|
||||
if (code === 62) {
|
||||
// Exit, then change the type.
|
||||
effects.exit('autolinkProtocol').type = 'autolinkEmail'
|
||||
return end(code)
|
||||
}
|
||||
|
||||
return emailValue(code)
|
||||
}
|
||||
|
||||
function emailValue(code) {
|
||||
if ((code === 45 || asciiAlphanumeric(code)) && size++ < 63) {
|
||||
effects.consume(code)
|
||||
return code === 45 ? emailValue : emailLabel
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function end(code) {
|
||||
effects.enter('autolinkMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('autolinkMarker')
|
||||
effects.exit('autolink')
|
||||
return ok
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = autolink
|
||||
67
node_modules/micromark/dist/tokenize/block-quote.js
generated
vendored
Normal file
67
node_modules/micromark/dist/tokenize/block-quote.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
'use strict'
|
||||
|
||||
var markdownSpace = require('../character/markdown-space.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
var blockQuote = {
|
||||
name: 'blockQuote',
|
||||
tokenize: tokenizeBlockQuoteStart,
|
||||
continuation: {
|
||||
tokenize: tokenizeBlockQuoteContinuation
|
||||
},
|
||||
exit: exit
|
||||
}
|
||||
|
||||
function tokenizeBlockQuoteStart(effects, ok, nok) {
|
||||
var self = this
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
if (code === 62) {
|
||||
if (!self.containerState.open) {
|
||||
effects.enter('blockQuote', {
|
||||
_container: true
|
||||
})
|
||||
self.containerState.open = true
|
||||
}
|
||||
|
||||
effects.enter('blockQuotePrefix')
|
||||
effects.enter('blockQuoteMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('blockQuoteMarker')
|
||||
return after
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function after(code) {
|
||||
if (markdownSpace(code)) {
|
||||
effects.enter('blockQuotePrefixWhitespace')
|
||||
effects.consume(code)
|
||||
effects.exit('blockQuotePrefixWhitespace')
|
||||
effects.exit('blockQuotePrefix')
|
||||
return ok
|
||||
}
|
||||
|
||||
effects.exit('blockQuotePrefix')
|
||||
return ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeBlockQuoteContinuation(effects, ok, nok) {
|
||||
return factorySpace(
|
||||
effects,
|
||||
effects.attempt(blockQuote, ok, nok),
|
||||
'linePrefix',
|
||||
this.parser.constructs.disable.null.indexOf('codeIndented') > -1
|
||||
? undefined
|
||||
: 4
|
||||
)
|
||||
}
|
||||
|
||||
function exit(effects) {
|
||||
effects.exit('blockQuote')
|
||||
}
|
||||
|
||||
module.exports = blockQuote
|
||||
34
node_modules/micromark/dist/tokenize/character-escape.js
generated
vendored
Normal file
34
node_modules/micromark/dist/tokenize/character-escape.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
'use strict'
|
||||
|
||||
var asciiPunctuation = require('../character/ascii-punctuation.js')
|
||||
|
||||
var characterEscape = {
|
||||
name: 'characterEscape',
|
||||
tokenize: tokenizeCharacterEscape
|
||||
}
|
||||
|
||||
function tokenizeCharacterEscape(effects, ok, nok) {
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('characterEscape')
|
||||
effects.enter('escapeMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('escapeMarker')
|
||||
return open
|
||||
}
|
||||
|
||||
function open(code) {
|
||||
if (asciiPunctuation(code)) {
|
||||
effects.enter('characterEscapeValue')
|
||||
effects.consume(code)
|
||||
effects.exit('characterEscapeValue')
|
||||
effects.exit('characterEscape')
|
||||
return ok
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = characterEscape
|
||||
94
node_modules/micromark/dist/tokenize/character-reference.js
generated
vendored
Normal file
94
node_modules/micromark/dist/tokenize/character-reference.js
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
'use strict'
|
||||
|
||||
var decodeEntity = require('parse-entities/decode-entity.js')
|
||||
var asciiAlphanumeric = require('../character/ascii-alphanumeric.js')
|
||||
var asciiDigit = require('../character/ascii-digit.js')
|
||||
var asciiHexDigit = require('../character/ascii-hex-digit.js')
|
||||
|
||||
function _interopDefaultLegacy(e) {
|
||||
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
|
||||
}
|
||||
|
||||
var decodeEntity__default = /*#__PURE__*/ _interopDefaultLegacy(decodeEntity)
|
||||
|
||||
var characterReference = {
|
||||
name: 'characterReference',
|
||||
tokenize: tokenizeCharacterReference
|
||||
}
|
||||
|
||||
function tokenizeCharacterReference(effects, ok, nok) {
|
||||
var self = this
|
||||
var size = 0
|
||||
var max
|
||||
var test
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('characterReference')
|
||||
effects.enter('characterReferenceMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('characterReferenceMarker')
|
||||
return open
|
||||
}
|
||||
|
||||
function open(code) {
|
||||
if (code === 35) {
|
||||
effects.enter('characterReferenceMarkerNumeric')
|
||||
effects.consume(code)
|
||||
effects.exit('characterReferenceMarkerNumeric')
|
||||
return numeric
|
||||
}
|
||||
|
||||
effects.enter('characterReferenceValue')
|
||||
max = 31
|
||||
test = asciiAlphanumeric
|
||||
return value(code)
|
||||
}
|
||||
|
||||
function numeric(code) {
|
||||
if (code === 88 || code === 120) {
|
||||
effects.enter('characterReferenceMarkerHexadecimal')
|
||||
effects.consume(code)
|
||||
effects.exit('characterReferenceMarkerHexadecimal')
|
||||
effects.enter('characterReferenceValue')
|
||||
max = 6
|
||||
test = asciiHexDigit
|
||||
return value
|
||||
}
|
||||
|
||||
effects.enter('characterReferenceValue')
|
||||
max = 7
|
||||
test = asciiDigit
|
||||
return value(code)
|
||||
}
|
||||
|
||||
function value(code) {
|
||||
var token
|
||||
|
||||
if (code === 59 && size) {
|
||||
token = effects.exit('characterReferenceValue')
|
||||
|
||||
if (
|
||||
test === asciiAlphanumeric &&
|
||||
!decodeEntity__default['default'](self.sliceSerialize(token))
|
||||
) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
effects.enter('characterReferenceMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('characterReferenceMarker')
|
||||
effects.exit('characterReference')
|
||||
return ok
|
||||
}
|
||||
|
||||
if (test(code) && size++ < max) {
|
||||
effects.consume(code)
|
||||
return value
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = characterReference
|
||||
176
node_modules/micromark/dist/tokenize/code-fenced.js
generated
vendored
Normal file
176
node_modules/micromark/dist/tokenize/code-fenced.js
generated
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
|
||||
var prefixSize = require('../util/prefix-size.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
var codeFenced = {
|
||||
name: 'codeFenced',
|
||||
tokenize: tokenizeCodeFenced,
|
||||
concrete: true
|
||||
}
|
||||
|
||||
function tokenizeCodeFenced(effects, ok, nok) {
|
||||
var self = this
|
||||
var closingFenceConstruct = {
|
||||
tokenize: tokenizeClosingFence,
|
||||
partial: true
|
||||
}
|
||||
var initialPrefix = prefixSize(this.events, 'linePrefix')
|
||||
var sizeOpen = 0
|
||||
var marker
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('codeFenced')
|
||||
effects.enter('codeFencedFence')
|
||||
effects.enter('codeFencedFenceSequence')
|
||||
marker = code
|
||||
return sequenceOpen(code)
|
||||
}
|
||||
|
||||
function sequenceOpen(code) {
|
||||
if (code === marker) {
|
||||
effects.consume(code)
|
||||
sizeOpen++
|
||||
return sequenceOpen
|
||||
}
|
||||
|
||||
effects.exit('codeFencedFenceSequence')
|
||||
return sizeOpen < 3
|
||||
? nok(code)
|
||||
: factorySpace(effects, infoOpen, 'whitespace')(code)
|
||||
}
|
||||
|
||||
function infoOpen(code) {
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
return openAfter(code)
|
||||
}
|
||||
|
||||
effects.enter('codeFencedFenceInfo')
|
||||
effects.enter('chunkString', {
|
||||
contentType: 'string'
|
||||
})
|
||||
return info(code)
|
||||
}
|
||||
|
||||
function info(code) {
|
||||
if (code === null || markdownLineEndingOrSpace(code)) {
|
||||
effects.exit('chunkString')
|
||||
effects.exit('codeFencedFenceInfo')
|
||||
return factorySpace(effects, infoAfter, 'whitespace')(code)
|
||||
}
|
||||
|
||||
if (code === 96 && code === marker) return nok(code)
|
||||
effects.consume(code)
|
||||
return info
|
||||
}
|
||||
|
||||
function infoAfter(code) {
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
return openAfter(code)
|
||||
}
|
||||
|
||||
effects.enter('codeFencedFenceMeta')
|
||||
effects.enter('chunkString', {
|
||||
contentType: 'string'
|
||||
})
|
||||
return meta(code)
|
||||
}
|
||||
|
||||
function meta(code) {
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
effects.exit('chunkString')
|
||||
effects.exit('codeFencedFenceMeta')
|
||||
return openAfter(code)
|
||||
}
|
||||
|
||||
if (code === 96 && code === marker) return nok(code)
|
||||
effects.consume(code)
|
||||
return meta
|
||||
}
|
||||
|
||||
function openAfter(code) {
|
||||
effects.exit('codeFencedFence')
|
||||
return self.interrupt ? ok(code) : content(code)
|
||||
}
|
||||
|
||||
function content(code) {
|
||||
if (code === null) {
|
||||
return after(code)
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.enter('lineEnding')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEnding')
|
||||
return effects.attempt(
|
||||
closingFenceConstruct,
|
||||
after,
|
||||
initialPrefix
|
||||
? factorySpace(effects, content, 'linePrefix', initialPrefix + 1)
|
||||
: content
|
||||
)
|
||||
}
|
||||
|
||||
effects.enter('codeFlowValue')
|
||||
return contentContinue(code)
|
||||
}
|
||||
|
||||
function contentContinue(code) {
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
effects.exit('codeFlowValue')
|
||||
return content(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return contentContinue
|
||||
}
|
||||
|
||||
function after(code) {
|
||||
effects.exit('codeFenced')
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
function tokenizeClosingFence(effects, ok, nok) {
|
||||
var size = 0
|
||||
return factorySpace(
|
||||
effects,
|
||||
closingSequenceStart,
|
||||
'linePrefix',
|
||||
this.parser.constructs.disable.null.indexOf('codeIndented') > -1
|
||||
? undefined
|
||||
: 4
|
||||
)
|
||||
|
||||
function closingSequenceStart(code) {
|
||||
effects.enter('codeFencedFence')
|
||||
effects.enter('codeFencedFenceSequence')
|
||||
return closingSequence(code)
|
||||
}
|
||||
|
||||
function closingSequence(code) {
|
||||
if (code === marker) {
|
||||
effects.consume(code)
|
||||
size++
|
||||
return closingSequence
|
||||
}
|
||||
|
||||
if (size < sizeOpen) return nok(code)
|
||||
effects.exit('codeFencedFenceSequence')
|
||||
return factorySpace(effects, closingSequenceEnd, 'whitespace')(code)
|
||||
}
|
||||
|
||||
function closingSequenceEnd(code) {
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
effects.exit('codeFencedFence')
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = codeFenced
|
||||
72
node_modules/micromark/dist/tokenize/code-indented.js
generated
vendored
Normal file
72
node_modules/micromark/dist/tokenize/code-indented.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var chunkedSplice = require('../util/chunked-splice.js')
|
||||
var prefixSize = require('../util/prefix-size.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
var codeIndented = {
|
||||
name: 'codeIndented',
|
||||
tokenize: tokenizeCodeIndented,
|
||||
resolve: resolveCodeIndented
|
||||
}
|
||||
var indentedContentConstruct = {
|
||||
tokenize: tokenizeIndentedContent,
|
||||
partial: true
|
||||
}
|
||||
|
||||
function resolveCodeIndented(events, context) {
|
||||
var code = {
|
||||
type: 'codeIndented',
|
||||
start: events[0][1].start,
|
||||
end: events[events.length - 1][1].end
|
||||
}
|
||||
chunkedSplice(events, 0, 0, [['enter', code, context]])
|
||||
chunkedSplice(events, events.length, 0, [['exit', code, context]])
|
||||
return events
|
||||
}
|
||||
|
||||
function tokenizeCodeIndented(effects, ok, nok) {
|
||||
return effects.attempt(indentedContentConstruct, afterPrefix, nok)
|
||||
|
||||
function afterPrefix(code) {
|
||||
if (code === null) {
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
return effects.attempt(indentedContentConstruct, afterPrefix, ok)(code)
|
||||
}
|
||||
|
||||
effects.enter('codeFlowValue')
|
||||
return content(code)
|
||||
}
|
||||
|
||||
function content(code) {
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
effects.exit('codeFlowValue')
|
||||
return afterPrefix(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return content
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeIndentedContent(effects, ok, nok) {
|
||||
var self = this
|
||||
return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)
|
||||
|
||||
function afterPrefix(code) {
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.enter('lineEnding')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEnding')
|
||||
return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)
|
||||
}
|
||||
|
||||
return prefixSize(self.events, 'linePrefix') < 4 ? nok(code) : ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = codeIndented
|
||||
162
node_modules/micromark/dist/tokenize/code-text.js
generated
vendored
Normal file
162
node_modules/micromark/dist/tokenize/code-text.js
generated
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
|
||||
var codeText = {
|
||||
name: 'codeText',
|
||||
tokenize: tokenizeCodeText,
|
||||
resolve: resolveCodeText,
|
||||
previous: previous
|
||||
}
|
||||
|
||||
function resolveCodeText(events) {
|
||||
var tailExitIndex = events.length - 4
|
||||
var headEnterIndex = 3
|
||||
var index
|
||||
var enter // If we start and end with an EOL or a space.
|
||||
|
||||
if (
|
||||
(events[headEnterIndex][1].type === 'lineEnding' ||
|
||||
events[headEnterIndex][1].type === 'space') &&
|
||||
(events[tailExitIndex][1].type === 'lineEnding' ||
|
||||
events[tailExitIndex][1].type === 'space')
|
||||
) {
|
||||
index = headEnterIndex // And we have data.
|
||||
|
||||
while (++index < tailExitIndex) {
|
||||
if (events[index][1].type === 'codeTextData') {
|
||||
// Then we have padding.
|
||||
events[tailExitIndex][1].type = events[headEnterIndex][1].type =
|
||||
'codeTextPadding'
|
||||
headEnterIndex += 2
|
||||
tailExitIndex -= 2
|
||||
break
|
||||
}
|
||||
}
|
||||
} // Merge adjacent spaces and data.
|
||||
|
||||
index = headEnterIndex - 1
|
||||
tailExitIndex++
|
||||
|
||||
while (++index <= tailExitIndex) {
|
||||
if (enter === undefined) {
|
||||
if (index !== tailExitIndex && events[index][1].type !== 'lineEnding') {
|
||||
enter = index
|
||||
}
|
||||
} else if (
|
||||
index === tailExitIndex ||
|
||||
events[index][1].type === 'lineEnding'
|
||||
) {
|
||||
events[enter][1].type = 'codeTextData'
|
||||
|
||||
if (index !== enter + 2) {
|
||||
events[enter][1].end = events[index - 1][1].end
|
||||
events.splice(enter + 2, index - enter - 2)
|
||||
tailExitIndex -= index - enter - 2
|
||||
index = enter + 2
|
||||
}
|
||||
|
||||
enter = undefined
|
||||
}
|
||||
}
|
||||
|
||||
return events
|
||||
}
|
||||
|
||||
function previous(code) {
|
||||
// If there is a previous code, there will always be a tail.
|
||||
return (
|
||||
code !== 96 ||
|
||||
this.events[this.events.length - 1][1].type === 'characterEscape'
|
||||
)
|
||||
}
|
||||
|
||||
function tokenizeCodeText(effects, ok, nok) {
|
||||
var sizeOpen = 0
|
||||
var size
|
||||
var token
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('codeText')
|
||||
effects.enter('codeTextSequence')
|
||||
return openingSequence(code)
|
||||
}
|
||||
|
||||
function openingSequence(code) {
|
||||
if (code === 96) {
|
||||
effects.consume(code)
|
||||
sizeOpen++
|
||||
return openingSequence
|
||||
}
|
||||
|
||||
effects.exit('codeTextSequence')
|
||||
return gap(code)
|
||||
}
|
||||
|
||||
function gap(code) {
|
||||
// EOF.
|
||||
if (code === null) {
|
||||
return nok(code)
|
||||
} // Closing fence?
|
||||
// Could also be data.
|
||||
|
||||
if (code === 96) {
|
||||
token = effects.enter('codeTextSequence')
|
||||
size = 0
|
||||
return closingSequence(code)
|
||||
} // Tabs don’t work, and virtual spaces don’t make sense.
|
||||
|
||||
if (code === 32) {
|
||||
effects.enter('space')
|
||||
effects.consume(code)
|
||||
effects.exit('space')
|
||||
return gap
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.enter('lineEnding')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEnding')
|
||||
return gap
|
||||
} // Data.
|
||||
|
||||
effects.enter('codeTextData')
|
||||
return data(code)
|
||||
} // In code.
|
||||
|
||||
function data(code) {
|
||||
if (
|
||||
code === null ||
|
||||
code === 32 ||
|
||||
code === 96 ||
|
||||
markdownLineEnding(code)
|
||||
) {
|
||||
effects.exit('codeTextData')
|
||||
return gap(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return data
|
||||
} // Closing fence.
|
||||
|
||||
function closingSequence(code) {
|
||||
// More.
|
||||
if (code === 96) {
|
||||
effects.consume(code)
|
||||
size++
|
||||
return closingSequence
|
||||
} // Done!
|
||||
|
||||
if (size === sizeOpen) {
|
||||
effects.exit('codeTextSequence')
|
||||
effects.exit('codeText')
|
||||
return ok(code)
|
||||
} // More or less accents: mark as data.
|
||||
|
||||
token.type = 'codeTextData'
|
||||
return data(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = codeText
|
||||
99
node_modules/micromark/dist/tokenize/content.js
generated
vendored
Normal file
99
node_modules/micromark/dist/tokenize/content.js
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var prefixSize = require('../util/prefix-size.js')
|
||||
var subtokenize = require('../util/subtokenize.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
// No name because it must not be turned off.
|
||||
var content = {
|
||||
tokenize: tokenizeContent,
|
||||
resolve: resolveContent,
|
||||
interruptible: true,
|
||||
lazy: true
|
||||
}
|
||||
var continuationConstruct = {
|
||||
tokenize: tokenizeContinuation,
|
||||
partial: true
|
||||
} // Content is transparent: it’s parsed right now. That way, definitions are also
|
||||
// parsed right now: before text in paragraphs (specifically, media) are parsed.
|
||||
|
||||
function resolveContent(events) {
|
||||
subtokenize(events)
|
||||
return events
|
||||
}
|
||||
|
||||
function tokenizeContent(effects, ok) {
|
||||
var previous
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('content')
|
||||
previous = effects.enter('chunkContent', {
|
||||
contentType: 'content'
|
||||
})
|
||||
return data(code)
|
||||
}
|
||||
|
||||
function data(code) {
|
||||
if (code === null) {
|
||||
return contentEnd(code)
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
return effects.check(
|
||||
continuationConstruct,
|
||||
contentContinue,
|
||||
contentEnd
|
||||
)(code)
|
||||
} // Data.
|
||||
|
||||
effects.consume(code)
|
||||
return data
|
||||
}
|
||||
|
||||
function contentEnd(code) {
|
||||
effects.exit('chunkContent')
|
||||
effects.exit('content')
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
function contentContinue(code) {
|
||||
effects.consume(code)
|
||||
effects.exit('chunkContent')
|
||||
previous = previous.next = effects.enter('chunkContent', {
|
||||
contentType: 'content',
|
||||
previous: previous
|
||||
})
|
||||
return data
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeContinuation(effects, ok, nok) {
|
||||
var self = this
|
||||
return startLookahead
|
||||
|
||||
function startLookahead(code) {
|
||||
effects.enter('lineEnding')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEnding')
|
||||
return factorySpace(effects, prefixed, 'linePrefix')
|
||||
}
|
||||
|
||||
function prefixed(code) {
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
if (
|
||||
self.parser.constructs.disable.null.indexOf('codeIndented') > -1 ||
|
||||
prefixSize(self.events, 'linePrefix') < 4
|
||||
) {
|
||||
return effects.interrupt(self.parser.constructs.flow, nok, ok)(code)
|
||||
}
|
||||
|
||||
return ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = content
|
||||
115
node_modules/micromark/dist/tokenize/definition.js
generated
vendored
Normal file
115
node_modules/micromark/dist/tokenize/definition.js
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
|
||||
var normalizeIdentifier = require('../util/normalize-identifier.js')
|
||||
var factoryDestination = require('./factory-destination.js')
|
||||
var factoryLabel = require('./factory-label.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
var factoryWhitespace = require('./factory-whitespace.js')
|
||||
var factoryTitle = require('./factory-title.js')
|
||||
|
||||
var definition = {
|
||||
name: 'definition',
|
||||
tokenize: tokenizeDefinition
|
||||
}
|
||||
var titleConstruct = {
|
||||
tokenize: tokenizeTitle,
|
||||
partial: true
|
||||
}
|
||||
|
||||
function tokenizeDefinition(effects, ok, nok) {
|
||||
var self = this
|
||||
var identifier
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('definition')
|
||||
return factoryLabel.call(
|
||||
self,
|
||||
effects,
|
||||
labelAfter,
|
||||
nok,
|
||||
'definitionLabel',
|
||||
'definitionLabelMarker',
|
||||
'definitionLabelString'
|
||||
)(code)
|
||||
}
|
||||
|
||||
function labelAfter(code) {
|
||||
identifier = normalizeIdentifier(
|
||||
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
|
||||
)
|
||||
|
||||
if (code === 58) {
|
||||
effects.enter('definitionMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('definitionMarker') // Note: blank lines can’t exist in content.
|
||||
|
||||
return factoryWhitespace(
|
||||
effects,
|
||||
factoryDestination(
|
||||
effects,
|
||||
effects.attempt(
|
||||
titleConstruct,
|
||||
factorySpace(effects, after, 'whitespace'),
|
||||
factorySpace(effects, after, 'whitespace')
|
||||
),
|
||||
nok,
|
||||
'definitionDestination',
|
||||
'definitionDestinationLiteral',
|
||||
'definitionDestinationLiteralMarker',
|
||||
'definitionDestinationRaw',
|
||||
'definitionDestinationString'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function after(code) {
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
effects.exit('definition')
|
||||
|
||||
if (self.parser.defined.indexOf(identifier) < 0) {
|
||||
self.parser.defined.push(identifier)
|
||||
}
|
||||
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeTitle(effects, ok, nok) {
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
return markdownLineEndingOrSpace(code)
|
||||
? factoryWhitespace(effects, before)(code)
|
||||
: nok(code)
|
||||
}
|
||||
|
||||
function before(code) {
|
||||
if (code === 34 || code === 39 || code === 40) {
|
||||
return factoryTitle(
|
||||
effects,
|
||||
factorySpace(effects, after, 'whitespace'),
|
||||
nok,
|
||||
'definitionTitle',
|
||||
'definitionTitleMarker',
|
||||
'definitionTitleString'
|
||||
)(code)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function after(code) {
|
||||
return code === null || markdownLineEnding(code) ? ok(code) : nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = definition
|
||||
131
node_modules/micromark/dist/tokenize/factory-destination.js
generated
vendored
Normal file
131
node_modules/micromark/dist/tokenize/factory-destination.js
generated
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
'use strict'
|
||||
|
||||
var asciiControl = require('../character/ascii-control.js')
|
||||
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
|
||||
// eslint-disable-next-line max-params
|
||||
function destinationFactory(
|
||||
effects,
|
||||
ok,
|
||||
nok,
|
||||
type,
|
||||
literalType,
|
||||
literalMarkerType,
|
||||
rawType,
|
||||
stringType,
|
||||
max
|
||||
) {
|
||||
var limit = max || Infinity
|
||||
var balance = 0
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
if (code === 60) {
|
||||
effects.enter(type)
|
||||
effects.enter(literalType)
|
||||
effects.enter(literalMarkerType)
|
||||
effects.consume(code)
|
||||
effects.exit(literalMarkerType)
|
||||
return destinationEnclosedBefore
|
||||
}
|
||||
|
||||
if (asciiControl(code) || code === 41) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
effects.enter(type)
|
||||
effects.enter(rawType)
|
||||
effects.enter(stringType)
|
||||
effects.enter('chunkString', {
|
||||
contentType: 'string'
|
||||
})
|
||||
return destinationRaw(code)
|
||||
}
|
||||
|
||||
function destinationEnclosedBefore(code) {
|
||||
if (code === 62) {
|
||||
effects.enter(literalMarkerType)
|
||||
effects.consume(code)
|
||||
effects.exit(literalMarkerType)
|
||||
effects.exit(literalType)
|
||||
effects.exit(type)
|
||||
return ok
|
||||
}
|
||||
|
||||
effects.enter(stringType)
|
||||
effects.enter('chunkString', {
|
||||
contentType: 'string'
|
||||
})
|
||||
return destinationEnclosed(code)
|
||||
}
|
||||
|
||||
function destinationEnclosed(code) {
|
||||
if (code === 62) {
|
||||
effects.exit('chunkString')
|
||||
effects.exit(stringType)
|
||||
return destinationEnclosedBefore(code)
|
||||
}
|
||||
|
||||
if (code === null || code === 60 || markdownLineEnding(code)) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return code === 92 ? destinationEnclosedEscape : destinationEnclosed
|
||||
}
|
||||
|
||||
function destinationEnclosedEscape(code) {
|
||||
if (code === 60 || code === 62 || code === 92) {
|
||||
effects.consume(code)
|
||||
return destinationEnclosed
|
||||
}
|
||||
|
||||
return destinationEnclosed(code)
|
||||
}
|
||||
|
||||
function destinationRaw(code) {
|
||||
if (code === 40) {
|
||||
if (++balance > limit) return nok(code)
|
||||
effects.consume(code)
|
||||
return destinationRaw
|
||||
}
|
||||
|
||||
if (code === 41) {
|
||||
if (!balance--) {
|
||||
effects.exit('chunkString')
|
||||
effects.exit(stringType)
|
||||
effects.exit(rawType)
|
||||
effects.exit(type)
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return destinationRaw
|
||||
}
|
||||
|
||||
if (code === null || markdownLineEndingOrSpace(code)) {
|
||||
if (balance) return nok(code)
|
||||
effects.exit('chunkString')
|
||||
effects.exit(stringType)
|
||||
effects.exit(rawType)
|
||||
effects.exit(type)
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
if (asciiControl(code)) return nok(code)
|
||||
effects.consume(code)
|
||||
return code === 92 ? destinationRawEscape : destinationRaw
|
||||
}
|
||||
|
||||
function destinationRawEscape(code) {
|
||||
if (code === 40 || code === 41 || code === 92) {
|
||||
effects.consume(code)
|
||||
return destinationRaw
|
||||
}
|
||||
|
||||
return destinationRaw(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = destinationFactory
|
||||
88
node_modules/micromark/dist/tokenize/factory-label.js
generated
vendored
Normal file
88
node_modules/micromark/dist/tokenize/factory-label.js
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var markdownSpace = require('../character/markdown-space.js')
|
||||
|
||||
// eslint-disable-next-line max-params
|
||||
function labelFactory(effects, ok, nok, type, markerType, stringType) {
|
||||
var self = this
|
||||
var size = 0
|
||||
var data
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter(type)
|
||||
effects.enter(markerType)
|
||||
effects.consume(code)
|
||||
effects.exit(markerType)
|
||||
effects.enter(stringType)
|
||||
return atBreak
|
||||
}
|
||||
|
||||
function atBreak(code) {
|
||||
if (
|
||||
code === null ||
|
||||
code === 91 ||
|
||||
(code === 93 && !data) ||
|
||||
/* c8 ignore next */
|
||||
(code === 94 &&
|
||||
/* c8 ignore next */
|
||||
!size &&
|
||||
/* c8 ignore next */
|
||||
'_hiddenFootnoteSupport' in self.parser.constructs) ||
|
||||
size > 999
|
||||
) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
if (code === 93) {
|
||||
effects.exit(stringType)
|
||||
effects.enter(markerType)
|
||||
effects.consume(code)
|
||||
effects.exit(markerType)
|
||||
effects.exit(type)
|
||||
return ok
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.enter('lineEnding')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEnding')
|
||||
return atBreak
|
||||
}
|
||||
|
||||
effects.enter('chunkString', {
|
||||
contentType: 'string'
|
||||
})
|
||||
return label(code)
|
||||
}
|
||||
|
||||
function label(code) {
|
||||
if (
|
||||
code === null ||
|
||||
code === 91 ||
|
||||
code === 93 ||
|
||||
markdownLineEnding(code) ||
|
||||
size++ > 999
|
||||
) {
|
||||
effects.exit('chunkString')
|
||||
return atBreak(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
data = data || !markdownSpace(code)
|
||||
return code === 92 ? labelEscape : label
|
||||
}
|
||||
|
||||
function labelEscape(code) {
|
||||
if (code === 91 || code === 92 || code === 93) {
|
||||
effects.consume(code)
|
||||
size++
|
||||
return label
|
||||
}
|
||||
|
||||
return label(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = labelFactory
|
||||
30
node_modules/micromark/dist/tokenize/factory-space.js
generated
vendored
Normal file
30
node_modules/micromark/dist/tokenize/factory-space.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
var markdownSpace = require('../character/markdown-space.js')
|
||||
|
||||
function spaceFactory(effects, ok, type, max) {
|
||||
var limit = max ? max - 1 : Infinity
|
||||
var size = 0
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
if (markdownSpace(code)) {
|
||||
effects.enter(type)
|
||||
return prefix(code)
|
||||
}
|
||||
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
function prefix(code) {
|
||||
if (markdownSpace(code) && size++ < limit) {
|
||||
effects.consume(code)
|
||||
return prefix
|
||||
}
|
||||
|
||||
effects.exit(type)
|
||||
return ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = spaceFactory
|
||||
75
node_modules/micromark/dist/tokenize/factory-title.js
generated
vendored
Normal file
75
node_modules/micromark/dist/tokenize/factory-title.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
function titleFactory(effects, ok, nok, type, markerType, stringType) {
|
||||
var marker
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter(type)
|
||||
effects.enter(markerType)
|
||||
effects.consume(code)
|
||||
effects.exit(markerType)
|
||||
marker = code === 40 ? 41 : code
|
||||
return atFirstTitleBreak
|
||||
}
|
||||
|
||||
function atFirstTitleBreak(code) {
|
||||
if (code === marker) {
|
||||
effects.enter(markerType)
|
||||
effects.consume(code)
|
||||
effects.exit(markerType)
|
||||
effects.exit(type)
|
||||
return ok
|
||||
}
|
||||
|
||||
effects.enter(stringType)
|
||||
return atTitleBreak(code)
|
||||
}
|
||||
|
||||
function atTitleBreak(code) {
|
||||
if (code === marker) {
|
||||
effects.exit(stringType)
|
||||
return atFirstTitleBreak(marker)
|
||||
}
|
||||
|
||||
if (code === null) {
|
||||
return nok(code)
|
||||
} // Note: blank lines can’t exist in content.
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.enter('lineEnding')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEnding')
|
||||
return factorySpace(effects, atTitleBreak, 'linePrefix')
|
||||
}
|
||||
|
||||
effects.enter('chunkString', {
|
||||
contentType: 'string'
|
||||
})
|
||||
return title(code)
|
||||
}
|
||||
|
||||
function title(code) {
|
||||
if (code === marker || code === null || markdownLineEnding(code)) {
|
||||
effects.exit('chunkString')
|
||||
return atTitleBreak(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return code === 92 ? titleEscape : title
|
||||
}
|
||||
|
||||
function titleEscape(code) {
|
||||
if (code === marker || code === 92) {
|
||||
effects.consume(code)
|
||||
return title
|
||||
}
|
||||
|
||||
return title(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = titleFactory
|
||||
32
node_modules/micromark/dist/tokenize/factory-whitespace.js
generated
vendored
Normal file
32
node_modules/micromark/dist/tokenize/factory-whitespace.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var markdownSpace = require('../character/markdown-space.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
function whitespaceFactory(effects, ok) {
|
||||
var seen
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.enter('lineEnding')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEnding')
|
||||
seen = true
|
||||
return start
|
||||
}
|
||||
|
||||
if (markdownSpace(code)) {
|
||||
return factorySpace(
|
||||
effects,
|
||||
start,
|
||||
seen ? 'linePrefix' : 'lineSuffix'
|
||||
)(code)
|
||||
}
|
||||
|
||||
return ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = whitespaceFactory
|
||||
31
node_modules/micromark/dist/tokenize/hard-break-escape.js
generated
vendored
Normal file
31
node_modules/micromark/dist/tokenize/hard-break-escape.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
|
||||
var hardBreakEscape = {
|
||||
name: 'hardBreakEscape',
|
||||
tokenize: tokenizeHardBreakEscape
|
||||
}
|
||||
|
||||
function tokenizeHardBreakEscape(effects, ok, nok) {
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('hardBreakEscape')
|
||||
effects.enter('escapeMarker')
|
||||
effects.consume(code)
|
||||
return open
|
||||
}
|
||||
|
||||
function open(code) {
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.exit('escapeMarker')
|
||||
effects.exit('hardBreakEscape')
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = hardBreakEscape
|
||||
129
node_modules/micromark/dist/tokenize/heading-atx.js
generated
vendored
Normal file
129
node_modules/micromark/dist/tokenize/heading-atx.js
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
|
||||
var markdownSpace = require('../character/markdown-space.js')
|
||||
var chunkedSplice = require('../util/chunked-splice.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
var headingAtx = {
|
||||
name: 'headingAtx',
|
||||
tokenize: tokenizeHeadingAtx,
|
||||
resolve: resolveHeadingAtx
|
||||
}
|
||||
|
||||
function resolveHeadingAtx(events, context) {
|
||||
var contentEnd = events.length - 2
|
||||
var contentStart = 3
|
||||
var content
|
||||
var text // Prefix whitespace, part of the opening.
|
||||
|
||||
if (events[contentStart][1].type === 'whitespace') {
|
||||
contentStart += 2
|
||||
} // Suffix whitespace, part of the closing.
|
||||
|
||||
if (
|
||||
contentEnd - 2 > contentStart &&
|
||||
events[contentEnd][1].type === 'whitespace'
|
||||
) {
|
||||
contentEnd -= 2
|
||||
}
|
||||
|
||||
if (
|
||||
events[contentEnd][1].type === 'atxHeadingSequence' &&
|
||||
(contentStart === contentEnd - 1 ||
|
||||
(contentEnd - 4 > contentStart &&
|
||||
events[contentEnd - 2][1].type === 'whitespace'))
|
||||
) {
|
||||
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4
|
||||
}
|
||||
|
||||
if (contentEnd > contentStart) {
|
||||
content = {
|
||||
type: 'atxHeadingText',
|
||||
start: events[contentStart][1].start,
|
||||
end: events[contentEnd][1].end
|
||||
}
|
||||
text = {
|
||||
type: 'chunkText',
|
||||
start: events[contentStart][1].start,
|
||||
end: events[contentEnd][1].end,
|
||||
contentType: 'text'
|
||||
}
|
||||
chunkedSplice(events, contentStart, contentEnd - contentStart + 1, [
|
||||
['enter', content, context],
|
||||
['enter', text, context],
|
||||
['exit', text, context],
|
||||
['exit', content, context]
|
||||
])
|
||||
}
|
||||
|
||||
return events
|
||||
}
|
||||
|
||||
function tokenizeHeadingAtx(effects, ok, nok) {
|
||||
var self = this
|
||||
var size = 0
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('atxHeading')
|
||||
effects.enter('atxHeadingSequence')
|
||||
return fenceOpenInside(code)
|
||||
}
|
||||
|
||||
function fenceOpenInside(code) {
|
||||
if (code === 35 && size++ < 6) {
|
||||
effects.consume(code)
|
||||
return fenceOpenInside
|
||||
}
|
||||
|
||||
if (code === null || markdownLineEndingOrSpace(code)) {
|
||||
effects.exit('atxHeadingSequence')
|
||||
return self.interrupt ? ok(code) : headingBreak(code)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function headingBreak(code) {
|
||||
if (code === 35) {
|
||||
effects.enter('atxHeadingSequence')
|
||||
return sequence(code)
|
||||
}
|
||||
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
effects.exit('atxHeading')
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
if (markdownSpace(code)) {
|
||||
return factorySpace(effects, headingBreak, 'whitespace')(code)
|
||||
}
|
||||
|
||||
effects.enter('atxHeadingText')
|
||||
return data(code)
|
||||
}
|
||||
|
||||
function sequence(code) {
|
||||
if (code === 35) {
|
||||
effects.consume(code)
|
||||
return sequence
|
||||
}
|
||||
|
||||
effects.exit('atxHeadingSequence')
|
||||
return headingBreak(code)
|
||||
}
|
||||
|
||||
function data(code) {
|
||||
if (code === null || code === 35 || markdownLineEndingOrSpace(code)) {
|
||||
effects.exit('atxHeadingText')
|
||||
return headingBreak(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return data
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = headingAtx
|
||||
486
node_modules/micromark/dist/tokenize/html-flow.js
generated
vendored
Normal file
486
node_modules/micromark/dist/tokenize/html-flow.js
generated
vendored
Normal file
@@ -0,0 +1,486 @@
|
||||
'use strict'
|
||||
|
||||
var asciiAlpha = require('../character/ascii-alpha.js')
|
||||
var asciiAlphanumeric = require('../character/ascii-alphanumeric.js')
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
|
||||
var markdownSpace = require('../character/markdown-space.js')
|
||||
var fromCharCode = require('../constant/from-char-code.js')
|
||||
var htmlBlockNames = require('../constant/html-block-names.js')
|
||||
var htmlRawNames = require('../constant/html-raw-names.js')
|
||||
var partialBlankLine = require('./partial-blank-line.js')
|
||||
|
||||
var htmlFlow = {
|
||||
name: 'htmlFlow',
|
||||
tokenize: tokenizeHtmlFlow,
|
||||
resolveTo: resolveToHtmlFlow,
|
||||
concrete: true
|
||||
}
|
||||
var nextBlankConstruct = {
|
||||
tokenize: tokenizeNextBlank,
|
||||
partial: true
|
||||
}
|
||||
|
||||
function resolveToHtmlFlow(events) {
|
||||
var index = events.length
|
||||
|
||||
while (index--) {
|
||||
if (events[index][0] === 'enter' && events[index][1].type === 'htmlFlow') {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (index > 1 && events[index - 2][1].type === 'linePrefix') {
|
||||
// Add the prefix start to the HTML token.
|
||||
events[index][1].start = events[index - 2][1].start // Add the prefix start to the HTML line token.
|
||||
|
||||
events[index + 1][1].start = events[index - 2][1].start // Remove the line prefix.
|
||||
|
||||
events.splice(index - 2, 2)
|
||||
}
|
||||
|
||||
return events
|
||||
}
|
||||
|
||||
function tokenizeHtmlFlow(effects, ok, nok) {
|
||||
var self = this
|
||||
var kind
|
||||
var startTag
|
||||
var buffer
|
||||
var index
|
||||
var marker
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('htmlFlow')
|
||||
effects.enter('htmlFlowData')
|
||||
effects.consume(code)
|
||||
return open
|
||||
}
|
||||
|
||||
function open(code) {
|
||||
if (code === 33) {
|
||||
effects.consume(code)
|
||||
return declarationStart
|
||||
}
|
||||
|
||||
if (code === 47) {
|
||||
effects.consume(code)
|
||||
return tagCloseStart
|
||||
}
|
||||
|
||||
if (code === 63) {
|
||||
effects.consume(code)
|
||||
kind = 3 // While we’re in an instruction instead of a declaration, we’re on a `?`
|
||||
// right now, so we do need to search for `>`, similar to declarations.
|
||||
|
||||
return self.interrupt ? ok : continuationDeclarationInside
|
||||
}
|
||||
|
||||
if (asciiAlpha(code)) {
|
||||
effects.consume(code)
|
||||
buffer = fromCharCode(code)
|
||||
startTag = true
|
||||
return tagName
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function declarationStart(code) {
|
||||
if (code === 45) {
|
||||
effects.consume(code)
|
||||
kind = 2
|
||||
return commentOpenInside
|
||||
}
|
||||
|
||||
if (code === 91) {
|
||||
effects.consume(code)
|
||||
kind = 5
|
||||
buffer = 'CDATA['
|
||||
index = 0
|
||||
return cdataOpenInside
|
||||
}
|
||||
|
||||
if (asciiAlpha(code)) {
|
||||
effects.consume(code)
|
||||
kind = 4
|
||||
return self.interrupt ? ok : continuationDeclarationInside
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function commentOpenInside(code) {
|
||||
if (code === 45) {
|
||||
effects.consume(code)
|
||||
return self.interrupt ? ok : continuationDeclarationInside
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function cdataOpenInside(code) {
|
||||
if (code === buffer.charCodeAt(index++)) {
|
||||
effects.consume(code)
|
||||
return index === buffer.length
|
||||
? self.interrupt
|
||||
? ok
|
||||
: continuation
|
||||
: cdataOpenInside
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function tagCloseStart(code) {
|
||||
if (asciiAlpha(code)) {
|
||||
effects.consume(code)
|
||||
buffer = fromCharCode(code)
|
||||
return tagName
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function tagName(code) {
|
||||
if (
|
||||
code === null ||
|
||||
code === 47 ||
|
||||
code === 62 ||
|
||||
markdownLineEndingOrSpace(code)
|
||||
) {
|
||||
if (
|
||||
code !== 47 &&
|
||||
startTag &&
|
||||
htmlRawNames.indexOf(buffer.toLowerCase()) > -1
|
||||
) {
|
||||
kind = 1
|
||||
return self.interrupt ? ok(code) : continuation(code)
|
||||
}
|
||||
|
||||
if (htmlBlockNames.indexOf(buffer.toLowerCase()) > -1) {
|
||||
kind = 6
|
||||
|
||||
if (code === 47) {
|
||||
effects.consume(code)
|
||||
return basicSelfClosing
|
||||
}
|
||||
|
||||
return self.interrupt ? ok(code) : continuation(code)
|
||||
}
|
||||
|
||||
kind = 7 // Do not support complete HTML when interrupting.
|
||||
|
||||
return self.interrupt
|
||||
? nok(code)
|
||||
: startTag
|
||||
? completeAttributeNameBefore(code)
|
||||
: completeClosingTagAfter(code)
|
||||
}
|
||||
|
||||
if (code === 45 || asciiAlphanumeric(code)) {
|
||||
effects.consume(code)
|
||||
buffer += fromCharCode(code)
|
||||
return tagName
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function basicSelfClosing(code) {
|
||||
if (code === 62) {
|
||||
effects.consume(code)
|
||||
return self.interrupt ? ok : continuation
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function completeClosingTagAfter(code) {
|
||||
if (markdownSpace(code)) {
|
||||
effects.consume(code)
|
||||
return completeClosingTagAfter
|
||||
}
|
||||
|
||||
return completeEnd(code)
|
||||
}
|
||||
|
||||
function completeAttributeNameBefore(code) {
|
||||
if (code === 47) {
|
||||
effects.consume(code)
|
||||
return completeEnd
|
||||
}
|
||||
|
||||
if (code === 58 || code === 95 || asciiAlpha(code)) {
|
||||
effects.consume(code)
|
||||
return completeAttributeName
|
||||
}
|
||||
|
||||
if (markdownSpace(code)) {
|
||||
effects.consume(code)
|
||||
return completeAttributeNameBefore
|
||||
}
|
||||
|
||||
return completeEnd(code)
|
||||
}
|
||||
|
||||
function completeAttributeName(code) {
|
||||
if (
|
||||
code === 45 ||
|
||||
code === 46 ||
|
||||
code === 58 ||
|
||||
code === 95 ||
|
||||
asciiAlphanumeric(code)
|
||||
) {
|
||||
effects.consume(code)
|
||||
return completeAttributeName
|
||||
}
|
||||
|
||||
return completeAttributeNameAfter(code)
|
||||
}
|
||||
|
||||
function completeAttributeNameAfter(code) {
|
||||
if (code === 61) {
|
||||
effects.consume(code)
|
||||
return completeAttributeValueBefore
|
||||
}
|
||||
|
||||
if (markdownSpace(code)) {
|
||||
effects.consume(code)
|
||||
return completeAttributeNameAfter
|
||||
}
|
||||
|
||||
return completeAttributeNameBefore(code)
|
||||
}
|
||||
|
||||
function completeAttributeValueBefore(code) {
|
||||
if (
|
||||
code === null ||
|
||||
code === 60 ||
|
||||
code === 61 ||
|
||||
code === 62 ||
|
||||
code === 96
|
||||
) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
if (code === 34 || code === 39) {
|
||||
effects.consume(code)
|
||||
marker = code
|
||||
return completeAttributeValueQuoted
|
||||
}
|
||||
|
||||
if (markdownSpace(code)) {
|
||||
effects.consume(code)
|
||||
return completeAttributeValueBefore
|
||||
}
|
||||
|
||||
marker = undefined
|
||||
return completeAttributeValueUnquoted(code)
|
||||
}
|
||||
|
||||
function completeAttributeValueQuoted(code) {
|
||||
if (code === marker) {
|
||||
effects.consume(code)
|
||||
return completeAttributeValueQuotedAfter
|
||||
}
|
||||
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return completeAttributeValueQuoted
|
||||
}
|
||||
|
||||
function completeAttributeValueUnquoted(code) {
|
||||
if (
|
||||
code === null ||
|
||||
code === 34 ||
|
||||
code === 39 ||
|
||||
code === 60 ||
|
||||
code === 61 ||
|
||||
code === 62 ||
|
||||
code === 96 ||
|
||||
markdownLineEndingOrSpace(code)
|
||||
) {
|
||||
return completeAttributeNameAfter(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return completeAttributeValueUnquoted
|
||||
}
|
||||
|
||||
function completeAttributeValueQuotedAfter(code) {
|
||||
if (code === 47 || code === 62 || markdownSpace(code)) {
|
||||
return completeAttributeNameBefore(code)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function completeEnd(code) {
|
||||
if (code === 62) {
|
||||
effects.consume(code)
|
||||
return completeAfter
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function completeAfter(code) {
|
||||
if (markdownSpace(code)) {
|
||||
effects.consume(code)
|
||||
return completeAfter
|
||||
}
|
||||
|
||||
return code === null || markdownLineEnding(code)
|
||||
? continuation(code)
|
||||
: nok(code)
|
||||
}
|
||||
|
||||
function continuation(code) {
|
||||
if (code === 45 && kind === 2) {
|
||||
effects.consume(code)
|
||||
return continuationCommentInside
|
||||
}
|
||||
|
||||
if (code === 60 && kind === 1) {
|
||||
effects.consume(code)
|
||||
return continuationRawTagOpen
|
||||
}
|
||||
|
||||
if (code === 62 && kind === 4) {
|
||||
effects.consume(code)
|
||||
return continuationClose
|
||||
}
|
||||
|
||||
if (code === 63 && kind === 3) {
|
||||
effects.consume(code)
|
||||
return continuationDeclarationInside
|
||||
}
|
||||
|
||||
if (code === 93 && kind === 5) {
|
||||
effects.consume(code)
|
||||
return continuationCharacterDataInside
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code) && (kind === 6 || kind === 7)) {
|
||||
return effects.check(
|
||||
nextBlankConstruct,
|
||||
continuationClose,
|
||||
continuationAtLineEnding
|
||||
)(code)
|
||||
}
|
||||
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
return continuationAtLineEnding(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return continuation
|
||||
}
|
||||
|
||||
function continuationAtLineEnding(code) {
|
||||
effects.exit('htmlFlowData')
|
||||
return htmlContinueStart(code)
|
||||
}
|
||||
|
||||
function htmlContinueStart(code) {
|
||||
if (code === null) {
|
||||
return done(code)
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.enter('lineEnding')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEnding')
|
||||
return htmlContinueStart
|
||||
}
|
||||
|
||||
effects.enter('htmlFlowData')
|
||||
return continuation(code)
|
||||
}
|
||||
|
||||
function continuationCommentInside(code) {
|
||||
if (code === 45) {
|
||||
effects.consume(code)
|
||||
return continuationDeclarationInside
|
||||
}
|
||||
|
||||
return continuation(code)
|
||||
}
|
||||
|
||||
function continuationRawTagOpen(code) {
|
||||
if (code === 47) {
|
||||
effects.consume(code)
|
||||
buffer = ''
|
||||
return continuationRawEndTag
|
||||
}
|
||||
|
||||
return continuation(code)
|
||||
}
|
||||
|
||||
function continuationRawEndTag(code) {
|
||||
if (code === 62 && htmlRawNames.indexOf(buffer.toLowerCase()) > -1) {
|
||||
effects.consume(code)
|
||||
return continuationClose
|
||||
}
|
||||
|
||||
if (asciiAlpha(code) && buffer.length < 8) {
|
||||
effects.consume(code)
|
||||
buffer += fromCharCode(code)
|
||||
return continuationRawEndTag
|
||||
}
|
||||
|
||||
return continuation(code)
|
||||
}
|
||||
|
||||
function continuationCharacterDataInside(code) {
|
||||
if (code === 93) {
|
||||
effects.consume(code)
|
||||
return continuationDeclarationInside
|
||||
}
|
||||
|
||||
return continuation(code)
|
||||
}
|
||||
|
||||
function continuationDeclarationInside(code) {
|
||||
if (code === 62) {
|
||||
effects.consume(code)
|
||||
return continuationClose
|
||||
}
|
||||
|
||||
return continuation(code)
|
||||
}
|
||||
|
||||
function continuationClose(code) {
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
effects.exit('htmlFlowData')
|
||||
return done(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return continuationClose
|
||||
}
|
||||
|
||||
function done(code) {
|
||||
effects.exit('htmlFlow')
|
||||
return ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeNextBlank(effects, ok, nok) {
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.exit('htmlFlowData')
|
||||
effects.enter('lineEndingBlank')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEndingBlank')
|
||||
return effects.attempt(partialBlankLine, ok, nok)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = htmlFlow
|
||||
435
node_modules/micromark/dist/tokenize/html-text.js
generated
vendored
Normal file
435
node_modules/micromark/dist/tokenize/html-text.js
generated
vendored
Normal file
@@ -0,0 +1,435 @@
|
||||
'use strict'
|
||||
|
||||
var asciiAlpha = require('../character/ascii-alpha.js')
|
||||
var asciiAlphanumeric = require('../character/ascii-alphanumeric.js')
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
|
||||
var markdownSpace = require('../character/markdown-space.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
var htmlText = {
|
||||
name: 'htmlText',
|
||||
tokenize: tokenizeHtmlText
|
||||
}
|
||||
|
||||
function tokenizeHtmlText(effects, ok, nok) {
|
||||
var self = this
|
||||
var marker
|
||||
var buffer
|
||||
var index
|
||||
var returnState
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('htmlText')
|
||||
effects.enter('htmlTextData')
|
||||
effects.consume(code)
|
||||
return open
|
||||
}
|
||||
|
||||
function open(code) {
|
||||
if (code === 33) {
|
||||
effects.consume(code)
|
||||
return declarationOpen
|
||||
}
|
||||
|
||||
if (code === 47) {
|
||||
effects.consume(code)
|
||||
return tagCloseStart
|
||||
}
|
||||
|
||||
if (code === 63) {
|
||||
effects.consume(code)
|
||||
return instruction
|
||||
}
|
||||
|
||||
if (asciiAlpha(code)) {
|
||||
effects.consume(code)
|
||||
return tagOpen
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function declarationOpen(code) {
|
||||
if (code === 45) {
|
||||
effects.consume(code)
|
||||
return commentOpen
|
||||
}
|
||||
|
||||
if (code === 91) {
|
||||
effects.consume(code)
|
||||
buffer = 'CDATA['
|
||||
index = 0
|
||||
return cdataOpen
|
||||
}
|
||||
|
||||
if (asciiAlpha(code)) {
|
||||
effects.consume(code)
|
||||
return declaration
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function commentOpen(code) {
|
||||
if (code === 45) {
|
||||
effects.consume(code)
|
||||
return commentStart
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function commentStart(code) {
|
||||
if (code === null || code === 62) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
if (code === 45) {
|
||||
effects.consume(code)
|
||||
return commentStartDash
|
||||
}
|
||||
|
||||
return comment(code)
|
||||
}
|
||||
|
||||
function commentStartDash(code) {
|
||||
if (code === null || code === 62) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
return comment(code)
|
||||
}
|
||||
|
||||
function comment(code) {
|
||||
if (code === null) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
if (code === 45) {
|
||||
effects.consume(code)
|
||||
return commentClose
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
returnState = comment
|
||||
return atLineEnding(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return comment
|
||||
}
|
||||
|
||||
function commentClose(code) {
|
||||
if (code === 45) {
|
||||
effects.consume(code)
|
||||
return end
|
||||
}
|
||||
|
||||
return comment(code)
|
||||
}
|
||||
|
||||
function cdataOpen(code) {
|
||||
if (code === buffer.charCodeAt(index++)) {
|
||||
effects.consume(code)
|
||||
return index === buffer.length ? cdata : cdataOpen
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function cdata(code) {
|
||||
if (code === null) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
if (code === 93) {
|
||||
effects.consume(code)
|
||||
return cdataClose
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
returnState = cdata
|
||||
return atLineEnding(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return cdata
|
||||
}
|
||||
|
||||
function cdataClose(code) {
|
||||
if (code === 93) {
|
||||
effects.consume(code)
|
||||
return cdataEnd
|
||||
}
|
||||
|
||||
return cdata(code)
|
||||
}
|
||||
|
||||
function cdataEnd(code) {
|
||||
if (code === 62) {
|
||||
return end(code)
|
||||
}
|
||||
|
||||
if (code === 93) {
|
||||
effects.consume(code)
|
||||
return cdataEnd
|
||||
}
|
||||
|
||||
return cdata(code)
|
||||
}
|
||||
|
||||
function declaration(code) {
|
||||
if (code === null || code === 62) {
|
||||
return end(code)
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
returnState = declaration
|
||||
return atLineEnding(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return declaration
|
||||
}
|
||||
|
||||
function instruction(code) {
|
||||
if (code === null) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
if (code === 63) {
|
||||
effects.consume(code)
|
||||
return instructionClose
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
returnState = instruction
|
||||
return atLineEnding(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return instruction
|
||||
}
|
||||
|
||||
function instructionClose(code) {
|
||||
return code === 62 ? end(code) : instruction(code)
|
||||
}
|
||||
|
||||
function tagCloseStart(code) {
|
||||
if (asciiAlpha(code)) {
|
||||
effects.consume(code)
|
||||
return tagClose
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function tagClose(code) {
|
||||
if (code === 45 || asciiAlphanumeric(code)) {
|
||||
effects.consume(code)
|
||||
return tagClose
|
||||
}
|
||||
|
||||
return tagCloseBetween(code)
|
||||
}
|
||||
|
||||
function tagCloseBetween(code) {
|
||||
if (markdownLineEnding(code)) {
|
||||
returnState = tagCloseBetween
|
||||
return atLineEnding(code)
|
||||
}
|
||||
|
||||
if (markdownSpace(code)) {
|
||||
effects.consume(code)
|
||||
return tagCloseBetween
|
||||
}
|
||||
|
||||
return end(code)
|
||||
}
|
||||
|
||||
function tagOpen(code) {
|
||||
if (code === 45 || asciiAlphanumeric(code)) {
|
||||
effects.consume(code)
|
||||
return tagOpen
|
||||
}
|
||||
|
||||
if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {
|
||||
return tagOpenBetween(code)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function tagOpenBetween(code) {
|
||||
if (code === 47) {
|
||||
effects.consume(code)
|
||||
return end
|
||||
}
|
||||
|
||||
if (code === 58 || code === 95 || asciiAlpha(code)) {
|
||||
effects.consume(code)
|
||||
return tagOpenAttributeName
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
returnState = tagOpenBetween
|
||||
return atLineEnding(code)
|
||||
}
|
||||
|
||||
if (markdownSpace(code)) {
|
||||
effects.consume(code)
|
||||
return tagOpenBetween
|
||||
}
|
||||
|
||||
return end(code)
|
||||
}
|
||||
|
||||
function tagOpenAttributeName(code) {
|
||||
if (
|
||||
code === 45 ||
|
||||
code === 46 ||
|
||||
code === 58 ||
|
||||
code === 95 ||
|
||||
asciiAlphanumeric(code)
|
||||
) {
|
||||
effects.consume(code)
|
||||
return tagOpenAttributeName
|
||||
}
|
||||
|
||||
return tagOpenAttributeNameAfter(code)
|
||||
}
|
||||
|
||||
function tagOpenAttributeNameAfter(code) {
|
||||
if (code === 61) {
|
||||
effects.consume(code)
|
||||
return tagOpenAttributeValueBefore
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
returnState = tagOpenAttributeNameAfter
|
||||
return atLineEnding(code)
|
||||
}
|
||||
|
||||
if (markdownSpace(code)) {
|
||||
effects.consume(code)
|
||||
return tagOpenAttributeNameAfter
|
||||
}
|
||||
|
||||
return tagOpenBetween(code)
|
||||
}
|
||||
|
||||
function tagOpenAttributeValueBefore(code) {
|
||||
if (
|
||||
code === null ||
|
||||
code === 60 ||
|
||||
code === 61 ||
|
||||
code === 62 ||
|
||||
code === 96
|
||||
) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
if (code === 34 || code === 39) {
|
||||
effects.consume(code)
|
||||
marker = code
|
||||
return tagOpenAttributeValueQuoted
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
returnState = tagOpenAttributeValueBefore
|
||||
return atLineEnding(code)
|
||||
}
|
||||
|
||||
if (markdownSpace(code)) {
|
||||
effects.consume(code)
|
||||
return tagOpenAttributeValueBefore
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
marker = undefined
|
||||
return tagOpenAttributeValueUnquoted
|
||||
}
|
||||
|
||||
function tagOpenAttributeValueQuoted(code) {
|
||||
if (code === marker) {
|
||||
effects.consume(code)
|
||||
return tagOpenAttributeValueQuotedAfter
|
||||
}
|
||||
|
||||
if (code === null) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
returnState = tagOpenAttributeValueQuoted
|
||||
return atLineEnding(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return tagOpenAttributeValueQuoted
|
||||
}
|
||||
|
||||
function tagOpenAttributeValueQuotedAfter(code) {
|
||||
if (code === 62 || code === 47 || markdownLineEndingOrSpace(code)) {
|
||||
return tagOpenBetween(code)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function tagOpenAttributeValueUnquoted(code) {
|
||||
if (
|
||||
code === null ||
|
||||
code === 34 ||
|
||||
code === 39 ||
|
||||
code === 60 ||
|
||||
code === 61 ||
|
||||
code === 96
|
||||
) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
if (code === 62 || markdownLineEndingOrSpace(code)) {
|
||||
return tagOpenBetween(code)
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return tagOpenAttributeValueUnquoted
|
||||
} // We can’t have blank lines in content, so no need to worry about empty
|
||||
// tokens.
|
||||
|
||||
function atLineEnding(code) {
|
||||
effects.exit('htmlTextData')
|
||||
effects.enter('lineEnding')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEnding')
|
||||
return factorySpace(
|
||||
effects,
|
||||
afterPrefix,
|
||||
'linePrefix',
|
||||
self.parser.constructs.disable.null.indexOf('codeIndented') > -1
|
||||
? undefined
|
||||
: 4
|
||||
)
|
||||
}
|
||||
|
||||
function afterPrefix(code) {
|
||||
effects.enter('htmlTextData')
|
||||
return returnState(code)
|
||||
}
|
||||
|
||||
function end(code) {
|
||||
if (code === 62) {
|
||||
effects.consume(code)
|
||||
effects.exit('htmlTextData')
|
||||
effects.exit('htmlText')
|
||||
return ok
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = htmlText
|
||||
330
node_modules/micromark/dist/tokenize/label-end.js
generated
vendored
Normal file
330
node_modules/micromark/dist/tokenize/label-end.js
generated
vendored
Normal file
@@ -0,0 +1,330 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
|
||||
var chunkedPush = require('../util/chunked-push.js')
|
||||
var chunkedSplice = require('../util/chunked-splice.js')
|
||||
var normalizeIdentifier = require('../util/normalize-identifier.js')
|
||||
var resolveAll = require('../util/resolve-all.js')
|
||||
var shallow = require('../util/shallow.js')
|
||||
var factoryDestination = require('./factory-destination.js')
|
||||
var factoryLabel = require('./factory-label.js')
|
||||
var factoryTitle = require('./factory-title.js')
|
||||
var factoryWhitespace = require('./factory-whitespace.js')
|
||||
|
||||
var labelEnd = {
|
||||
name: 'labelEnd',
|
||||
tokenize: tokenizeLabelEnd,
|
||||
resolveTo: resolveToLabelEnd,
|
||||
resolveAll: resolveAllLabelEnd
|
||||
}
|
||||
var resourceConstruct = {
|
||||
tokenize: tokenizeResource
|
||||
}
|
||||
var fullReferenceConstruct = {
|
||||
tokenize: tokenizeFullReference
|
||||
}
|
||||
var collapsedReferenceConstruct = {
|
||||
tokenize: tokenizeCollapsedReference
|
||||
}
|
||||
|
||||
function resolveAllLabelEnd(events) {
|
||||
var index = -1
|
||||
var token
|
||||
|
||||
while (++index < events.length) {
|
||||
token = events[index][1]
|
||||
|
||||
if (
|
||||
!token._used &&
|
||||
(token.type === 'labelImage' ||
|
||||
token.type === 'labelLink' ||
|
||||
token.type === 'labelEnd')
|
||||
) {
|
||||
// Remove the marker.
|
||||
events.splice(index + 1, token.type === 'labelImage' ? 4 : 2)
|
||||
token.type = 'data'
|
||||
index++
|
||||
}
|
||||
}
|
||||
|
||||
return events
|
||||
}
|
||||
|
||||
function resolveToLabelEnd(events, context) {
|
||||
var index = events.length
|
||||
var offset = 0
|
||||
var group
|
||||
var label
|
||||
var text
|
||||
var token
|
||||
var open
|
||||
var close
|
||||
var media // Find an opening.
|
||||
|
||||
while (index--) {
|
||||
token = events[index][1]
|
||||
|
||||
if (open) {
|
||||
// If we see another link, or inactive link label, we’ve been here before.
|
||||
if (
|
||||
token.type === 'link' ||
|
||||
(token.type === 'labelLink' && token._inactive)
|
||||
) {
|
||||
break
|
||||
} // Mark other link openings as inactive, as we can’t have links in
|
||||
// links.
|
||||
|
||||
if (events[index][0] === 'enter' && token.type === 'labelLink') {
|
||||
token._inactive = true
|
||||
}
|
||||
} else if (close) {
|
||||
if (
|
||||
events[index][0] === 'enter' &&
|
||||
(token.type === 'labelImage' || token.type === 'labelLink') &&
|
||||
!token._balanced
|
||||
) {
|
||||
open = index
|
||||
|
||||
if (token.type !== 'labelLink') {
|
||||
offset = 2
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if (token.type === 'labelEnd') {
|
||||
close = index
|
||||
}
|
||||
}
|
||||
|
||||
group = {
|
||||
type: events[open][1].type === 'labelLink' ? 'link' : 'image',
|
||||
start: shallow(events[open][1].start),
|
||||
end: shallow(events[events.length - 1][1].end)
|
||||
}
|
||||
label = {
|
||||
type: 'label',
|
||||
start: shallow(events[open][1].start),
|
||||
end: shallow(events[close][1].end)
|
||||
}
|
||||
text = {
|
||||
type: 'labelText',
|
||||
start: shallow(events[open + offset + 2][1].end),
|
||||
end: shallow(events[close - 2][1].start)
|
||||
}
|
||||
media = [
|
||||
['enter', group, context],
|
||||
['enter', label, context]
|
||||
] // Opening marker.
|
||||
|
||||
media = chunkedPush(media, events.slice(open + 1, open + offset + 3)) // Text open.
|
||||
|
||||
media = chunkedPush(media, [['enter', text, context]]) // Between.
|
||||
|
||||
media = chunkedPush(
|
||||
media,
|
||||
resolveAll(
|
||||
context.parser.constructs.insideSpan.null,
|
||||
events.slice(open + offset + 4, close - 3),
|
||||
context
|
||||
)
|
||||
) // Text close, marker close, label close.
|
||||
|
||||
media = chunkedPush(media, [
|
||||
['exit', text, context],
|
||||
events[close - 2],
|
||||
events[close - 1],
|
||||
['exit', label, context]
|
||||
]) // Reference, resource, or so.
|
||||
|
||||
media = chunkedPush(media, events.slice(close + 1)) // Media close.
|
||||
|
||||
media = chunkedPush(media, [['exit', group, context]])
|
||||
chunkedSplice(events, open, events.length, media)
|
||||
return events
|
||||
}
|
||||
|
||||
function tokenizeLabelEnd(effects, ok, nok) {
|
||||
var self = this
|
||||
var index = self.events.length
|
||||
var labelStart
|
||||
var defined // Find an opening.
|
||||
|
||||
while (index--) {
|
||||
if (
|
||||
(self.events[index][1].type === 'labelImage' ||
|
||||
self.events[index][1].type === 'labelLink') &&
|
||||
!self.events[index][1]._balanced
|
||||
) {
|
||||
labelStart = self.events[index][1]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
if (!labelStart) {
|
||||
return nok(code)
|
||||
} // It’s a balanced bracket, but contains a link.
|
||||
|
||||
if (labelStart._inactive) return balanced(code)
|
||||
defined =
|
||||
self.parser.defined.indexOf(
|
||||
normalizeIdentifier(
|
||||
self.sliceSerialize({
|
||||
start: labelStart.end,
|
||||
end: self.now()
|
||||
})
|
||||
)
|
||||
) > -1
|
||||
effects.enter('labelEnd')
|
||||
effects.enter('labelMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('labelMarker')
|
||||
effects.exit('labelEnd')
|
||||
return afterLabelEnd
|
||||
}
|
||||
|
||||
function afterLabelEnd(code) {
|
||||
// Resource: `[asd](fgh)`.
|
||||
if (code === 40) {
|
||||
return effects.attempt(
|
||||
resourceConstruct,
|
||||
ok,
|
||||
defined ? ok : balanced
|
||||
)(code)
|
||||
} // Collapsed (`[asd][]`) or full (`[asd][fgh]`) reference?
|
||||
|
||||
if (code === 91) {
|
||||
return effects.attempt(
|
||||
fullReferenceConstruct,
|
||||
ok,
|
||||
defined
|
||||
? effects.attempt(collapsedReferenceConstruct, ok, balanced)
|
||||
: balanced
|
||||
)(code)
|
||||
} // Shortcut reference: `[asd]`?
|
||||
|
||||
return defined ? ok(code) : balanced(code)
|
||||
}
|
||||
|
||||
function balanced(code) {
|
||||
labelStart._balanced = true
|
||||
return nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeResource(effects, ok, nok) {
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('resource')
|
||||
effects.enter('resourceMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('resourceMarker')
|
||||
return factoryWhitespace(effects, open)
|
||||
}
|
||||
|
||||
function open(code) {
|
||||
if (code === 41) {
|
||||
return end(code)
|
||||
}
|
||||
|
||||
return factoryDestination(
|
||||
effects,
|
||||
destinationAfter,
|
||||
nok,
|
||||
'resourceDestination',
|
||||
'resourceDestinationLiteral',
|
||||
'resourceDestinationLiteralMarker',
|
||||
'resourceDestinationRaw',
|
||||
'resourceDestinationString',
|
||||
3
|
||||
)(code)
|
||||
}
|
||||
|
||||
function destinationAfter(code) {
|
||||
return markdownLineEndingOrSpace(code)
|
||||
? factoryWhitespace(effects, between)(code)
|
||||
: end(code)
|
||||
}
|
||||
|
||||
function between(code) {
|
||||
if (code === 34 || code === 39 || code === 40) {
|
||||
return factoryTitle(
|
||||
effects,
|
||||
factoryWhitespace(effects, end),
|
||||
nok,
|
||||
'resourceTitle',
|
||||
'resourceTitleMarker',
|
||||
'resourceTitleString'
|
||||
)(code)
|
||||
}
|
||||
|
||||
return end(code)
|
||||
}
|
||||
|
||||
function end(code) {
|
||||
if (code === 41) {
|
||||
effects.enter('resourceMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('resourceMarker')
|
||||
effects.exit('resource')
|
||||
return ok
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeFullReference(effects, ok, nok) {
|
||||
var self = this
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
return factoryLabel.call(
|
||||
self,
|
||||
effects,
|
||||
afterLabel,
|
||||
nok,
|
||||
'reference',
|
||||
'referenceMarker',
|
||||
'referenceString'
|
||||
)(code)
|
||||
}
|
||||
|
||||
function afterLabel(code) {
|
||||
return self.parser.defined.indexOf(
|
||||
normalizeIdentifier(
|
||||
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
|
||||
)
|
||||
) < 0
|
||||
? nok(code)
|
||||
: ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeCollapsedReference(effects, ok, nok) {
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('reference')
|
||||
effects.enter('referenceMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('referenceMarker')
|
||||
return open
|
||||
}
|
||||
|
||||
function open(code) {
|
||||
if (code === 93) {
|
||||
effects.enter('referenceMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('referenceMarker')
|
||||
effects.exit('reference')
|
||||
return ok
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = labelEnd
|
||||
46
node_modules/micromark/dist/tokenize/label-start-image.js
generated
vendored
Normal file
46
node_modules/micromark/dist/tokenize/label-start-image.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict'
|
||||
|
||||
var labelEnd = require('./label-end.js')
|
||||
|
||||
var labelStartImage = {
|
||||
name: 'labelStartImage',
|
||||
tokenize: tokenizeLabelStartImage,
|
||||
resolveAll: labelEnd.resolveAll
|
||||
}
|
||||
|
||||
function tokenizeLabelStartImage(effects, ok, nok) {
|
||||
var self = this
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('labelImage')
|
||||
effects.enter('labelImageMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('labelImageMarker')
|
||||
return open
|
||||
}
|
||||
|
||||
function open(code) {
|
||||
if (code === 91) {
|
||||
effects.enter('labelMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('labelMarker')
|
||||
effects.exit('labelImage')
|
||||
return after
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function after(code) {
|
||||
/* c8 ignore next */
|
||||
return code === 94 &&
|
||||
/* c8 ignore next */
|
||||
'_hiddenFootnoteSupport' in self.parser.constructs
|
||||
? /* c8 ignore next */
|
||||
nok(code)
|
||||
: ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = labelStartImage
|
||||
35
node_modules/micromark/dist/tokenize/label-start-link.js
generated
vendored
Normal file
35
node_modules/micromark/dist/tokenize/label-start-link.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
|
||||
var labelEnd = require('./label-end.js')
|
||||
|
||||
var labelStartLink = {
|
||||
name: 'labelStartLink',
|
||||
tokenize: tokenizeLabelStartLink,
|
||||
resolveAll: labelEnd.resolveAll
|
||||
}
|
||||
|
||||
function tokenizeLabelStartLink(effects, ok, nok) {
|
||||
var self = this
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('labelLink')
|
||||
effects.enter('labelMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('labelMarker')
|
||||
effects.exit('labelLink')
|
||||
return after
|
||||
}
|
||||
|
||||
function after(code) {
|
||||
/* c8 ignore next */
|
||||
return code === 94 &&
|
||||
/* c8 ignore next */
|
||||
'_hiddenFootnoteSupport' in self.parser.constructs
|
||||
? /* c8 ignore next */
|
||||
nok(code)
|
||||
: ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = labelStartLink
|
||||
21
node_modules/micromark/dist/tokenize/line-ending.js
generated
vendored
Normal file
21
node_modules/micromark/dist/tokenize/line-ending.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
'use strict'
|
||||
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
var lineEnding = {
|
||||
name: 'lineEnding',
|
||||
tokenize: tokenizeLineEnding
|
||||
}
|
||||
|
||||
function tokenizeLineEnding(effects, ok) {
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('lineEnding')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEnding')
|
||||
return factorySpace(effects, ok, 'linePrefix')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = lineEnding
|
||||
214
node_modules/micromark/dist/tokenize/list.js
generated
vendored
Normal file
214
node_modules/micromark/dist/tokenize/list.js
generated
vendored
Normal file
@@ -0,0 +1,214 @@
|
||||
'use strict'
|
||||
|
||||
var asciiDigit = require('../character/ascii-digit.js')
|
||||
var markdownSpace = require('../character/markdown-space.js')
|
||||
var prefixSize = require('../util/prefix-size.js')
|
||||
var sizeChunks = require('../util/size-chunks.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
var partialBlankLine = require('./partial-blank-line.js')
|
||||
var thematicBreak = require('./thematic-break.js')
|
||||
|
||||
var list = {
|
||||
name: 'list',
|
||||
tokenize: tokenizeListStart,
|
||||
continuation: {
|
||||
tokenize: tokenizeListContinuation
|
||||
},
|
||||
exit: tokenizeListEnd
|
||||
}
|
||||
var listItemPrefixWhitespaceConstruct = {
|
||||
tokenize: tokenizeListItemPrefixWhitespace,
|
||||
partial: true
|
||||
}
|
||||
var indentConstruct = {
|
||||
tokenize: tokenizeIndent,
|
||||
partial: true
|
||||
}
|
||||
|
||||
function tokenizeListStart(effects, ok, nok) {
|
||||
var self = this
|
||||
var initialSize = prefixSize(self.events, 'linePrefix')
|
||||
var size = 0
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
var kind =
|
||||
self.containerState.type ||
|
||||
(code === 42 || code === 43 || code === 45
|
||||
? 'listUnordered'
|
||||
: 'listOrdered')
|
||||
|
||||
if (
|
||||
kind === 'listUnordered'
|
||||
? !self.containerState.marker || code === self.containerState.marker
|
||||
: asciiDigit(code)
|
||||
) {
|
||||
if (!self.containerState.type) {
|
||||
self.containerState.type = kind
|
||||
effects.enter(kind, {
|
||||
_container: true
|
||||
})
|
||||
}
|
||||
|
||||
if (kind === 'listUnordered') {
|
||||
effects.enter('listItemPrefix')
|
||||
return code === 42 || code === 45
|
||||
? effects.check(thematicBreak, nok, atMarker)(code)
|
||||
: atMarker(code)
|
||||
}
|
||||
|
||||
if (!self.interrupt || code === 49) {
|
||||
effects.enter('listItemPrefix')
|
||||
effects.enter('listItemValue')
|
||||
return inside(code)
|
||||
}
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function inside(code) {
|
||||
if (asciiDigit(code) && ++size < 10) {
|
||||
effects.consume(code)
|
||||
return inside
|
||||
}
|
||||
|
||||
if (
|
||||
(!self.interrupt || size < 2) &&
|
||||
(self.containerState.marker
|
||||
? code === self.containerState.marker
|
||||
: code === 41 || code === 46)
|
||||
) {
|
||||
effects.exit('listItemValue')
|
||||
return atMarker(code)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function atMarker(code) {
|
||||
effects.enter('listItemMarker')
|
||||
effects.consume(code)
|
||||
effects.exit('listItemMarker')
|
||||
self.containerState.marker = self.containerState.marker || code
|
||||
return effects.check(
|
||||
partialBlankLine, // Can’t be empty when interrupting.
|
||||
self.interrupt ? nok : onBlank,
|
||||
effects.attempt(
|
||||
listItemPrefixWhitespaceConstruct,
|
||||
endOfPrefix,
|
||||
otherPrefix
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
function onBlank(code) {
|
||||
self.containerState.initialBlankLine = true
|
||||
initialSize++
|
||||
return endOfPrefix(code)
|
||||
}
|
||||
|
||||
function otherPrefix(code) {
|
||||
if (markdownSpace(code)) {
|
||||
effects.enter('listItemPrefixWhitespace')
|
||||
effects.consume(code)
|
||||
effects.exit('listItemPrefixWhitespace')
|
||||
return endOfPrefix
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function endOfPrefix(code) {
|
||||
self.containerState.size =
|
||||
initialSize + sizeChunks(self.sliceStream(effects.exit('listItemPrefix')))
|
||||
return ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeListContinuation(effects, ok, nok) {
|
||||
var self = this
|
||||
self.containerState._closeFlow = undefined
|
||||
return effects.check(partialBlankLine, onBlank, notBlank)
|
||||
|
||||
function onBlank(code) {
|
||||
self.containerState.furtherBlankLines =
|
||||
self.containerState.furtherBlankLines ||
|
||||
self.containerState.initialBlankLine // We have a blank line.
|
||||
// Still, try to consume at most the items size.
|
||||
|
||||
return factorySpace(
|
||||
effects,
|
||||
ok,
|
||||
'listItemIndent',
|
||||
self.containerState.size + 1
|
||||
)(code)
|
||||
}
|
||||
|
||||
function notBlank(code) {
|
||||
if (self.containerState.furtherBlankLines || !markdownSpace(code)) {
|
||||
self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined
|
||||
return notInCurrentItem(code)
|
||||
}
|
||||
|
||||
self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined
|
||||
return effects.attempt(indentConstruct, ok, notInCurrentItem)(code)
|
||||
}
|
||||
|
||||
function notInCurrentItem(code) {
|
||||
// While we do continue, we signal that the flow should be closed.
|
||||
self.containerState._closeFlow = true // As we’re closing flow, we’re no longer interrupting.
|
||||
|
||||
self.interrupt = undefined
|
||||
return factorySpace(
|
||||
effects,
|
||||
effects.attempt(list, ok, nok),
|
||||
'linePrefix',
|
||||
self.parser.constructs.disable.null.indexOf('codeIndented') > -1
|
||||
? undefined
|
||||
: 4
|
||||
)(code)
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeIndent(effects, ok, nok) {
|
||||
var self = this
|
||||
return factorySpace(
|
||||
effects,
|
||||
afterPrefix,
|
||||
'listItemIndent',
|
||||
self.containerState.size + 1
|
||||
)
|
||||
|
||||
function afterPrefix(code) {
|
||||
return prefixSize(self.events, 'listItemIndent') ===
|
||||
self.containerState.size
|
||||
? ok(code)
|
||||
: nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeListEnd(effects) {
|
||||
effects.exit(this.containerState.type)
|
||||
}
|
||||
|
||||
function tokenizeListItemPrefixWhitespace(effects, ok, nok) {
|
||||
var self = this
|
||||
return factorySpace(
|
||||
effects,
|
||||
afterPrefix,
|
||||
'listItemPrefixWhitespace',
|
||||
self.parser.constructs.disable.null.indexOf('codeIndented') > -1
|
||||
? undefined
|
||||
: 4 + 1
|
||||
)
|
||||
|
||||
function afterPrefix(code) {
|
||||
return markdownSpace(code) ||
|
||||
!prefixSize(self.events, 'listItemPrefixWhitespace')
|
||||
? nok(code)
|
||||
: ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = list
|
||||
19
node_modules/micromark/dist/tokenize/partial-blank-line.js
generated
vendored
Normal file
19
node_modules/micromark/dist/tokenize/partial-blank-line.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
var partialBlankLine = {
|
||||
tokenize: tokenizePartialBlankLine,
|
||||
partial: true
|
||||
}
|
||||
|
||||
function tokenizePartialBlankLine(effects, ok, nok) {
|
||||
return factorySpace(effects, afterWhitespace, 'linePrefix')
|
||||
|
||||
function afterWhitespace(code) {
|
||||
return code === null || markdownLineEnding(code) ? ok(code) : nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = partialBlankLine
|
||||
117
node_modules/micromark/dist/tokenize/setext-underline.js
generated
vendored
Normal file
117
node_modules/micromark/dist/tokenize/setext-underline.js
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var shallow = require('../util/shallow.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
var setextUnderline = {
|
||||
name: 'setextUnderline',
|
||||
tokenize: tokenizeSetextUnderline,
|
||||
resolveTo: resolveToSetextUnderline
|
||||
}
|
||||
|
||||
function resolveToSetextUnderline(events, context) {
|
||||
var index = events.length
|
||||
var content
|
||||
var text
|
||||
var definition
|
||||
var heading // Find the opening of the content.
|
||||
// It’ll always exist: we don’t tokenize if it isn’t there.
|
||||
|
||||
while (index--) {
|
||||
if (events[index][0] === 'enter') {
|
||||
if (events[index][1].type === 'content') {
|
||||
content = index
|
||||
break
|
||||
}
|
||||
|
||||
if (events[index][1].type === 'paragraph') {
|
||||
text = index
|
||||
}
|
||||
} // Exit
|
||||
else {
|
||||
if (events[index][1].type === 'content') {
|
||||
// Remove the content end (if needed we’ll add it later)
|
||||
events.splice(index, 1)
|
||||
}
|
||||
|
||||
if (!definition && events[index][1].type === 'definition') {
|
||||
definition = index
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
heading = {
|
||||
type: 'setextHeading',
|
||||
start: shallow(events[text][1].start),
|
||||
end: shallow(events[events.length - 1][1].end)
|
||||
} // Change the paragraph to setext heading text.
|
||||
|
||||
events[text][1].type = 'setextHeadingText' // If we have definitions in the content, we’ll keep on having content,
|
||||
// but we need move it.
|
||||
|
||||
if (definition) {
|
||||
events.splice(text, 0, ['enter', heading, context])
|
||||
events.splice(definition + 1, 0, ['exit', events[content][1], context])
|
||||
events[content][1].end = shallow(events[definition][1].end)
|
||||
} else {
|
||||
events[content][1] = heading
|
||||
} // Add the heading exit at the end.
|
||||
|
||||
events.push(['exit', heading, context])
|
||||
return events
|
||||
}
|
||||
|
||||
function tokenizeSetextUnderline(effects, ok, nok) {
|
||||
var self = this
|
||||
var index = self.events.length
|
||||
var marker
|
||||
var paragraph // Find an opening.
|
||||
|
||||
while (index--) {
|
||||
// Skip enter/exit of line ending, line prefix, and content.
|
||||
// We can now either have a definition or a paragraph.
|
||||
if (
|
||||
self.events[index][1].type !== 'lineEnding' &&
|
||||
self.events[index][1].type !== 'linePrefix' &&
|
||||
self.events[index][1].type !== 'content'
|
||||
) {
|
||||
paragraph = self.events[index][1].type === 'paragraph'
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
if (!self.lazy && (self.interrupt || paragraph)) {
|
||||
effects.enter('setextHeadingLine')
|
||||
effects.enter('setextHeadingLineSequence')
|
||||
marker = code
|
||||
return closingSequence(code)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
function closingSequence(code) {
|
||||
if (code === marker) {
|
||||
effects.consume(code)
|
||||
return closingSequence
|
||||
}
|
||||
|
||||
effects.exit('setextHeadingLineSequence')
|
||||
return factorySpace(effects, closingSequenceEnd, 'lineSuffix')(code)
|
||||
}
|
||||
|
||||
function closingSequenceEnd(code) {
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
effects.exit('setextHeadingLine')
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
return nok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = setextUnderline
|
||||
53
node_modules/micromark/dist/tokenize/thematic-break.js
generated
vendored
Normal file
53
node_modules/micromark/dist/tokenize/thematic-break.js
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var markdownSpace = require('../character/markdown-space.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
var thematicBreak = {
|
||||
name: 'thematicBreak',
|
||||
tokenize: tokenizeThematicBreak
|
||||
}
|
||||
|
||||
function tokenizeThematicBreak(effects, ok, nok) {
|
||||
var size = 0
|
||||
var marker
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('thematicBreak')
|
||||
marker = code
|
||||
return atBreak(code)
|
||||
}
|
||||
|
||||
function atBreak(code) {
|
||||
if (code === marker) {
|
||||
effects.enter('thematicBreakSequence')
|
||||
return sequence(code)
|
||||
}
|
||||
|
||||
if (markdownSpace(code)) {
|
||||
return factorySpace(effects, atBreak, 'whitespace')(code)
|
||||
}
|
||||
|
||||
if (size < 3 || (code !== null && !markdownLineEnding(code))) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
effects.exit('thematicBreak')
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
function sequence(code) {
|
||||
if (code === marker) {
|
||||
effects.consume(code)
|
||||
size++
|
||||
return sequence
|
||||
}
|
||||
|
||||
effects.exit('thematicBreakSequence')
|
||||
return atBreak(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = thematicBreak
|
||||
Reference in New Issue
Block a user