planning
All checks were successful
Publish To Prod / deploy_and_publish (push) Successful in 35s

This commit is contained in:
2024-10-14 09:15:30 +02:00
parent bcba00a730
commit 6e64e138e2
21059 changed files with 2317811 additions and 1 deletions

216
node_modules/micromark/lib/tokenize/attention.js generated vendored Normal file
View File

@@ -0,0 +1,216 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var chunkedPush = require('../util/chunked-push.js')
var chunkedSplice = require('../util/chunked-splice.js')
var classifyCharacter = require('../util/classify-character.js')
var movePoint = require('../util/move-point.js')
var resolveAll = require('../util/resolve-all.js')
var shallow = require('../util/shallow.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var attention = {
name: 'attention',
tokenize: tokenizeAttention,
resolveAll: resolveAllAttention
}
// Take all events and resolve attention to emphasis or strong.
function resolveAllAttention(events, context) {
var index = -1
var open
var group
var text
var openingSequence
var closingSequence
var use
var nextEvents
var offset
// Walk through all events.
//
// Note: performance of this is fine on an mb of normal markdown, but its
// a bottleneck for malicious stuff.
while (++index < events.length) {
// Find a token that can close.
if (
events[index][0] === 'enter' &&
events[index][1].type === 'attentionSequence' &&
events[index][1]._close
) {
open = index
// Now walk back to find an opener.
while (open--) {
// Find a token that can open the closer.
if (
events[open][0] === 'exit' &&
events[open][1].type === 'attentionSequence' &&
events[open][1]._open &&
// If the markers are the same:
context.sliceSerialize(events[open][1]).charCodeAt(0) ===
context.sliceSerialize(events[index][1]).charCodeAt(0)
) {
// If the opening can close or the closing can open,
// and the close size *is not* a multiple of three,
// but the sum of the opening and closing size *is* multiple of three,
// then dont match.
if (
(events[open][1]._close || events[index][1]._open) &&
(events[index][1].end.offset - events[index][1].start.offset) % 3 &&
!(
(events[open][1].end.offset -
events[open][1].start.offset +
events[index][1].end.offset -
events[index][1].start.offset) %
3
)
) {
continue
}
// Number of markers to use from the sequence.
use =
events[open][1].end.offset - events[open][1].start.offset > 1 &&
events[index][1].end.offset - events[index][1].start.offset > 1
? 2
: 1
openingSequence = {
type: use > 1 ? types.strongSequence : types.emphasisSequence,
start: movePoint(shallow(events[open][1].end), -use),
end: shallow(events[open][1].end)
}
closingSequence = {
type: use > 1 ? types.strongSequence : types.emphasisSequence,
start: shallow(events[index][1].start),
end: movePoint(shallow(events[index][1].start), use)
}
text = {
type: use > 1 ? types.strongText : types.emphasisText,
start: shallow(events[open][1].end),
end: shallow(events[index][1].start)
}
group = {
type: use > 1 ? types.strong : types.emphasis,
start: shallow(openingSequence.start),
end: shallow(closingSequence.end)
}
events[open][1].end = shallow(openingSequence.start)
events[index][1].start = shallow(closingSequence.end)
nextEvents = []
// If there are more markers in the opening, add them before.
if (events[open][1].end.offset - events[open][1].start.offset) {
nextEvents = chunkedPush(nextEvents, [
['enter', events[open][1], context],
['exit', events[open][1], context]
])
}
// Opening.
nextEvents = chunkedPush(nextEvents, [
['enter', group, context],
['enter', openingSequence, context],
['exit', openingSequence, context],
['enter', text, context]
])
// Between.
nextEvents = chunkedPush(
nextEvents,
resolveAll(
context.parser.constructs.insideSpan.null,
events.slice(open + 1, index),
context
)
)
// Closing.
nextEvents = chunkedPush(nextEvents, [
['exit', text, context],
['enter', closingSequence, context],
['exit', closingSequence, context],
['exit', group, context]
])
// If there are more markers in the closing, add them after.
if (events[index][1].end.offset - events[index][1].start.offset) {
offset = 2
nextEvents = chunkedPush(nextEvents, [
['enter', events[index][1], context],
['exit', events[index][1], context]
])
} else {
offset = 0
}
chunkedSplice(events, open - 1, index - open + 3, nextEvents)
index = open + nextEvents.length - offset - 2
break
}
}
}
}
// Remove remaining sequences.
index = -1
while (++index < events.length) {
if (events[index][1].type === 'attentionSequence') {
events[index][1].type = 'data'
}
}
return events
}
function tokenizeAttention(effects, ok) {
var before = classifyCharacter(this.previous)
var marker
return start
function start(code) {
assert__default['default'](
code === codes.asterisk || code === codes.underscore,
'expected asterisk or underscore'
)
effects.enter('attentionSequence')
marker = code
return sequence(code)
}
function sequence(code) {
var token
var after
var open
var close
if (code === marker) {
effects.consume(code)
return sequence
}
token = effects.exit('attentionSequence')
after = classifyCharacter(code)
open = !after || (after === constants.characterGroupPunctuation && before)
close = !before || (before === constants.characterGroupPunctuation && after)
token._open = marker === codes.asterisk ? open : open && (before || !close)
token._close = marker === codes.asterisk ? close : close && (after || !open)
return ok(code)
}
}
module.exports = attention

207
node_modules/micromark/lib/tokenize/attention.mjs generated vendored Normal file
View File

@@ -0,0 +1,207 @@
var attention = {
name: 'attention',
tokenize: tokenizeAttention,
resolveAll: resolveAllAttention
}
export default attention
import assert from 'assert'
import codes from '../character/codes.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
import chunkedPush from '../util/chunked-push.mjs'
import chunkedSplice from '../util/chunked-splice.mjs'
import classifyCharacter from '../util/classify-character.mjs'
import movePoint from '../util/move-point.mjs'
import resolveAll from '../util/resolve-all.mjs'
import shallow from '../util/shallow.mjs'
// Take all events and resolve attention to emphasis or strong.
function resolveAllAttention(events, context) {
var index = -1
var open
var group
var text
var openingSequence
var closingSequence
var use
var nextEvents
var offset
// Walk through all events.
//
// Note: performance of this is fine on an mb of normal markdown, but its
// a bottleneck for malicious stuff.
while (++index < events.length) {
// Find a token that can close.
if (
events[index][0] === 'enter' &&
events[index][1].type === 'attentionSequence' &&
events[index][1]._close
) {
open = index
// Now walk back to find an opener.
while (open--) {
// Find a token that can open the closer.
if (
events[open][0] === 'exit' &&
events[open][1].type === 'attentionSequence' &&
events[open][1]._open &&
// If the markers are the same:
context.sliceSerialize(events[open][1]).charCodeAt(0) ===
context.sliceSerialize(events[index][1]).charCodeAt(0)
) {
// If the opening can close or the closing can open,
// and the close size *is not* a multiple of three,
// but the sum of the opening and closing size *is* multiple of three,
// then dont match.
if (
(events[open][1]._close || events[index][1]._open) &&
(events[index][1].end.offset - events[index][1].start.offset) % 3 &&
!(
(events[open][1].end.offset -
events[open][1].start.offset +
events[index][1].end.offset -
events[index][1].start.offset) %
3
)
) {
continue
}
// Number of markers to use from the sequence.
use =
events[open][1].end.offset - events[open][1].start.offset > 1 &&
events[index][1].end.offset - events[index][1].start.offset > 1
? 2
: 1
openingSequence = {
type: use > 1 ? types.strongSequence : types.emphasisSequence,
start: movePoint(shallow(events[open][1].end), -use),
end: shallow(events[open][1].end)
}
closingSequence = {
type: use > 1 ? types.strongSequence : types.emphasisSequence,
start: shallow(events[index][1].start),
end: movePoint(shallow(events[index][1].start), use)
}
text = {
type: use > 1 ? types.strongText : types.emphasisText,
start: shallow(events[open][1].end),
end: shallow(events[index][1].start)
}
group = {
type: use > 1 ? types.strong : types.emphasis,
start: shallow(openingSequence.start),
end: shallow(closingSequence.end)
}
events[open][1].end = shallow(openingSequence.start)
events[index][1].start = shallow(closingSequence.end)
nextEvents = []
// If there are more markers in the opening, add them before.
if (events[open][1].end.offset - events[open][1].start.offset) {
nextEvents = chunkedPush(nextEvents, [
['enter', events[open][1], context],
['exit', events[open][1], context]
])
}
// Opening.
nextEvents = chunkedPush(nextEvents, [
['enter', group, context],
['enter', openingSequence, context],
['exit', openingSequence, context],
['enter', text, context]
])
// Between.
nextEvents = chunkedPush(
nextEvents,
resolveAll(
context.parser.constructs.insideSpan.null,
events.slice(open + 1, index),
context
)
)
// Closing.
nextEvents = chunkedPush(nextEvents, [
['exit', text, context],
['enter', closingSequence, context],
['exit', closingSequence, context],
['exit', group, context]
])
// If there are more markers in the closing, add them after.
if (events[index][1].end.offset - events[index][1].start.offset) {
offset = 2
nextEvents = chunkedPush(nextEvents, [
['enter', events[index][1], context],
['exit', events[index][1], context]
])
} else {
offset = 0
}
chunkedSplice(events, open - 1, index - open + 3, nextEvents)
index = open + nextEvents.length - offset - 2
break
}
}
}
}
// Remove remaining sequences.
index = -1
while (++index < events.length) {
if (events[index][1].type === 'attentionSequence') {
events[index][1].type = 'data'
}
}
return events
}
function tokenizeAttention(effects, ok) {
var before = classifyCharacter(this.previous)
var marker
return start
function start(code) {
assert(
code === codes.asterisk || code === codes.underscore,
'expected asterisk or underscore'
)
effects.enter('attentionSequence')
marker = code
return sequence(code)
}
function sequence(code) {
var token
var after
var open
var close
if (code === marker) {
effects.consume(code)
return sequence
}
token = effects.exit('attentionSequence')
after = classifyCharacter(code)
open = !after || (after === constants.characterGroupPunctuation && before)
close = !before || (before === constants.characterGroupPunctuation && after)
token._open = marker === codes.asterisk ? open : open && (before || !close)
token._close = marker === codes.asterisk ? close : close && (after || !open)
return ok(code)
}
}

147
node_modules/micromark/lib/tokenize/autolink.js generated vendored Normal file
View File

@@ -0,0 +1,147 @@
'use strict'
var assert = require('assert')
var asciiAlpha = require('../character/ascii-alpha.js')
var asciiAlphanumeric = require('../character/ascii-alphanumeric.js')
var asciiAtext = require('../character/ascii-atext.js')
var asciiControl = require('../character/ascii-control.js')
var codes = require('../character/codes.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var autolink = {
name: 'autolink',
tokenize: tokenizeAutolink
}
function tokenizeAutolink(effects, ok, nok) {
var size = 1
return start
function start(code) {
assert__default['default'](code === codes.lessThan, 'expected `<`')
effects.enter(types.autolink)
effects.enter(types.autolinkMarker)
effects.consume(code)
effects.exit(types.autolinkMarker)
effects.enter(types.autolinkProtocol)
return open
}
function open(code) {
if (asciiAlpha(code)) {
effects.consume(code)
return schemeOrEmailAtext
}
return asciiAtext(code) ? emailAtext(code) : nok(code)
}
function schemeOrEmailAtext(code) {
return code === codes.plusSign ||
code === codes.dash ||
code === codes.dot ||
asciiAlphanumeric(code)
? schemeInsideOrEmailAtext(code)
: emailAtext(code)
}
function schemeInsideOrEmailAtext(code) {
if (code === codes.colon) {
effects.consume(code)
return urlInside
}
if (
(code === codes.plusSign ||
code === codes.dash ||
code === codes.dot ||
asciiAlphanumeric(code)) &&
size++ < constants.autolinkSchemeSizeMax
) {
effects.consume(code)
return schemeInsideOrEmailAtext
}
return emailAtext(code)
}
function urlInside(code) {
if (code === codes.greaterThan) {
effects.exit(types.autolinkProtocol)
return end(code)
}
if (code === codes.space || code === codes.lessThan || asciiControl(code)) {
return nok(code)
}
effects.consume(code)
return urlInside
}
function emailAtext(code) {
if (code === codes.atSign) {
effects.consume(code)
size = 0
return emailAtSignOrDot
}
if (asciiAtext(code)) {
effects.consume(code)
return emailAtext
}
return nok(code)
}
function emailAtSignOrDot(code) {
return asciiAlphanumeric(code) ? emailLabel(code) : nok(code)
}
function emailLabel(code) {
if (code === codes.dot) {
effects.consume(code)
size = 0
return emailAtSignOrDot
}
if (code === codes.greaterThan) {
// Exit, then change the type.
effects.exit(types.autolinkProtocol).type = types.autolinkEmail
return end(code)
}
return emailValue(code)
}
function emailValue(code) {
if (
(code === codes.dash || asciiAlphanumeric(code)) &&
size++ < constants.autolinkDomainSizeMax
) {
effects.consume(code)
return code === codes.dash ? emailValue : emailLabel
}
return nok(code)
}
function end(code) {
assert__default['default'].equal(code, codes.greaterThan, 'expected `>`')
effects.enter(types.autolinkMarker)
effects.consume(code)
effects.exit(types.autolinkMarker)
effects.exit(types.autolink)
return ok
}
}
module.exports = autolink

138
node_modules/micromark/lib/tokenize/autolink.mjs generated vendored Normal file
View File

@@ -0,0 +1,138 @@
var autolink = {
name: 'autolink',
tokenize: tokenizeAutolink
}
export default autolink
import assert from 'assert'
import asciiAlpha from '../character/ascii-alpha.mjs'
import asciiAlphanumeric from '../character/ascii-alphanumeric.mjs'
import asciiAtext from '../character/ascii-atext.mjs'
import asciiControl from '../character/ascii-control.mjs'
import codes from '../character/codes.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
function tokenizeAutolink(effects, ok, nok) {
var size = 1
return start
function start(code) {
assert(code === codes.lessThan, 'expected `<`')
effects.enter(types.autolink)
effects.enter(types.autolinkMarker)
effects.consume(code)
effects.exit(types.autolinkMarker)
effects.enter(types.autolinkProtocol)
return open
}
function open(code) {
if (asciiAlpha(code)) {
effects.consume(code)
return schemeOrEmailAtext
}
return asciiAtext(code) ? emailAtext(code) : nok(code)
}
function schemeOrEmailAtext(code) {
return code === codes.plusSign ||
code === codes.dash ||
code === codes.dot ||
asciiAlphanumeric(code)
? schemeInsideOrEmailAtext(code)
: emailAtext(code)
}
function schemeInsideOrEmailAtext(code) {
if (code === codes.colon) {
effects.consume(code)
return urlInside
}
if (
(code === codes.plusSign ||
code === codes.dash ||
code === codes.dot ||
asciiAlphanumeric(code)) &&
size++ < constants.autolinkSchemeSizeMax
) {
effects.consume(code)
return schemeInsideOrEmailAtext
}
return emailAtext(code)
}
function urlInside(code) {
if (code === codes.greaterThan) {
effects.exit(types.autolinkProtocol)
return end(code)
}
if (code === codes.space || code === codes.lessThan || asciiControl(code)) {
return nok(code)
}
effects.consume(code)
return urlInside
}
function emailAtext(code) {
if (code === codes.atSign) {
effects.consume(code)
size = 0
return emailAtSignOrDot
}
if (asciiAtext(code)) {
effects.consume(code)
return emailAtext
}
return nok(code)
}
function emailAtSignOrDot(code) {
return asciiAlphanumeric(code) ? emailLabel(code) : nok(code)
}
function emailLabel(code) {
if (code === codes.dot) {
effects.consume(code)
size = 0
return emailAtSignOrDot
}
if (code === codes.greaterThan) {
// Exit, then change the type.
effects.exit(types.autolinkProtocol).type = types.autolinkEmail
return end(code)
}
return emailValue(code)
}
function emailValue(code) {
if (
(code === codes.dash || asciiAlphanumeric(code)) &&
size++ < constants.autolinkDomainSizeMax
) {
effects.consume(code)
return code === codes.dash ? emailValue : emailLabel
}
return nok(code)
}
function end(code) {
assert.equal(code, codes.greaterThan, 'expected `>`')
effects.enter(types.autolinkMarker)
effects.consume(code)
effects.exit(types.autolinkMarker)
effects.exit(types.autolink)
return ok
}
}

67
node_modules/micromark/lib/tokenize/block-quote.js generated vendored Normal file
View File

@@ -0,0 +1,67 @@
'use strict'
var codes = require('../character/codes.js')
var markdownSpace = require('../character/markdown-space.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var factorySpace = require('./factory-space.js')
var blockQuote = {
name: 'blockQuote',
tokenize: tokenizeBlockQuoteStart,
continuation: {tokenize: tokenizeBlockQuoteContinuation},
exit: exit
}
function tokenizeBlockQuoteStart(effects, ok, nok) {
var self = this
return start
function start(code) {
if (code === codes.greaterThan) {
if (!self.containerState.open) {
effects.enter(types.blockQuote, {_container: true})
self.containerState.open = true
}
effects.enter(types.blockQuotePrefix)
effects.enter(types.blockQuoteMarker)
effects.consume(code)
effects.exit(types.blockQuoteMarker)
return after
}
return nok(code)
}
function after(code) {
if (markdownSpace(code)) {
effects.enter(types.blockQuotePrefixWhitespace)
effects.consume(code)
effects.exit(types.blockQuotePrefixWhitespace)
effects.exit(types.blockQuotePrefix)
return ok
}
effects.exit(types.blockQuotePrefix)
return ok(code)
}
}
function tokenizeBlockQuoteContinuation(effects, ok, nok) {
return factorySpace(
effects,
effects.attempt(blockQuote, ok, nok),
types.linePrefix,
this.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize
)
}
function exit(effects) {
effects.exit(types.blockQuote)
}
module.exports = blockQuote

64
node_modules/micromark/lib/tokenize/block-quote.mjs generated vendored Normal file
View File

@@ -0,0 +1,64 @@
var blockQuote = {
name: 'blockQuote',
tokenize: tokenizeBlockQuoteStart,
continuation: {tokenize: tokenizeBlockQuoteContinuation},
exit: exit
}
export default blockQuote
import codes from '../character/codes.mjs'
import markdownSpace from '../character/markdown-space.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
import spaceFactory from './factory-space.mjs'
function tokenizeBlockQuoteStart(effects, ok, nok) {
var self = this
return start
function start(code) {
if (code === codes.greaterThan) {
if (!self.containerState.open) {
effects.enter(types.blockQuote, {_container: true})
self.containerState.open = true
}
effects.enter(types.blockQuotePrefix)
effects.enter(types.blockQuoteMarker)
effects.consume(code)
effects.exit(types.blockQuoteMarker)
return after
}
return nok(code)
}
function after(code) {
if (markdownSpace(code)) {
effects.enter(types.blockQuotePrefixWhitespace)
effects.consume(code)
effects.exit(types.blockQuotePrefixWhitespace)
effects.exit(types.blockQuotePrefix)
return ok
}
effects.exit(types.blockQuotePrefix)
return ok(code)
}
}
function tokenizeBlockQuoteContinuation(effects, ok, nok) {
return spaceFactory(
effects,
effects.attempt(blockQuote, ok, nok),
types.linePrefix,
this.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize
)
}
function exit(effects) {
effects.exit(types.blockQuote)
}

View File

@@ -0,0 +1,44 @@
'use strict'
var assert = require('assert')
var asciiPunctuation = require('../character/ascii-punctuation.js')
var codes = require('../character/codes.js')
var types = require('../constant/types.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var characterEscape = {
name: 'characterEscape',
tokenize: tokenizeCharacterEscape
}
function tokenizeCharacterEscape(effects, ok, nok) {
return start
function start(code) {
assert__default['default'](code === codes.backslash, 'expected `\\`')
effects.enter(types.characterEscape)
effects.enter(types.escapeMarker)
effects.consume(code)
effects.exit(types.escapeMarker)
return open
}
function open(code) {
if (asciiPunctuation(code)) {
effects.enter(types.characterEscapeValue)
effects.consume(code)
effects.exit(types.characterEscapeValue)
effects.exit(types.characterEscape)
return ok
}
return nok(code)
}
}
module.exports = characterEscape

View File

@@ -0,0 +1,35 @@
var characterEscape = {
name: 'characterEscape',
tokenize: tokenizeCharacterEscape
}
export default characterEscape
import assert from 'assert'
import asciiPunctuation from '../character/ascii-punctuation.mjs'
import codes from '../character/codes.mjs'
import types from '../constant/types.mjs'
function tokenizeCharacterEscape(effects, ok, nok) {
return start
function start(code) {
assert(code === codes.backslash, 'expected `\\`')
effects.enter(types.characterEscape)
effects.enter(types.escapeMarker)
effects.consume(code)
effects.exit(types.escapeMarker)
return open
}
function open(code) {
if (asciiPunctuation(code)) {
effects.enter(types.characterEscapeValue)
effects.consume(code)
effects.exit(types.characterEscapeValue)
effects.exit(types.characterEscape)
return ok
}
return nok(code)
}
}

View File

@@ -0,0 +1,101 @@
'use strict'
var assert = require('assert')
var decodeEntity = require('parse-entities/decode-entity.js')
var asciiAlphanumeric = require('../character/ascii-alphanumeric.js')
var asciiDigit = require('../character/ascii-digit.js')
var asciiHexDigit = require('../character/ascii-hex-digit.js')
var codes = require('../character/codes.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var decodeEntity__default = /*#__PURE__*/ _interopDefaultLegacy(decodeEntity)
var characterReference = {
name: 'characterReference',
tokenize: tokenizeCharacterReference
}
function tokenizeCharacterReference(effects, ok, nok) {
var self = this
var size = 0
var max
var test
return start
function start(code) {
assert__default['default'](code === codes.ampersand, 'expected `&`')
effects.enter(types.characterReference)
effects.enter(types.characterReferenceMarker)
effects.consume(code)
effects.exit(types.characterReferenceMarker)
return open
}
function open(code) {
if (code === codes.numberSign) {
effects.enter(types.characterReferenceMarkerNumeric)
effects.consume(code)
effects.exit(types.characterReferenceMarkerNumeric)
return numeric
}
effects.enter(types.characterReferenceValue)
max = constants.characterReferenceNamedSizeMax
test = asciiAlphanumeric
return value(code)
}
function numeric(code) {
if (code === codes.uppercaseX || code === codes.lowercaseX) {
effects.enter(types.characterReferenceMarkerHexadecimal)
effects.consume(code)
effects.exit(types.characterReferenceMarkerHexadecimal)
effects.enter(types.characterReferenceValue)
max = constants.characterReferenceHexadecimalSizeMax
test = asciiHexDigit
return value
}
effects.enter(types.characterReferenceValue)
max = constants.characterReferenceDecimalSizeMax
test = asciiDigit
return value(code)
}
function value(code) {
var token
if (code === codes.semicolon && size) {
token = effects.exit(types.characterReferenceValue)
if (
test === asciiAlphanumeric &&
!decodeEntity__default['default'](self.sliceSerialize(token))
) {
return nok(code)
}
effects.enter(types.characterReferenceMarker)
effects.consume(code)
effects.exit(types.characterReferenceMarker)
effects.exit(types.characterReference)
return ok
}
if (test(code) && size++ < max) {
effects.consume(code)
return value
}
return nok(code)
}
}
module.exports = characterReference

View File

@@ -0,0 +1,88 @@
var characterReference = {
name: 'characterReference',
tokenize: tokenizeCharacterReference
}
export default characterReference
import assert from 'assert'
import decode from 'parse-entities/decode-entity.js'
import asciiAlphanumeric from '../character/ascii-alphanumeric.mjs'
import asciiDigit from '../character/ascii-digit.mjs'
import asciiHexDigit from '../character/ascii-hex-digit.mjs'
import codes from '../character/codes.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
function tokenizeCharacterReference(effects, ok, nok) {
var self = this
var size = 0
var max
var test
return start
function start(code) {
assert(code === codes.ampersand, 'expected `&`')
effects.enter(types.characterReference)
effects.enter(types.characterReferenceMarker)
effects.consume(code)
effects.exit(types.characterReferenceMarker)
return open
}
function open(code) {
if (code === codes.numberSign) {
effects.enter(types.characterReferenceMarkerNumeric)
effects.consume(code)
effects.exit(types.characterReferenceMarkerNumeric)
return numeric
}
effects.enter(types.characterReferenceValue)
max = constants.characterReferenceNamedSizeMax
test = asciiAlphanumeric
return value(code)
}
function numeric(code) {
if (code === codes.uppercaseX || code === codes.lowercaseX) {
effects.enter(types.characterReferenceMarkerHexadecimal)
effects.consume(code)
effects.exit(types.characterReferenceMarkerHexadecimal)
effects.enter(types.characterReferenceValue)
max = constants.characterReferenceHexadecimalSizeMax
test = asciiHexDigit
return value
}
effects.enter(types.characterReferenceValue)
max = constants.characterReferenceDecimalSizeMax
test = asciiDigit
return value(code)
}
function value(code) {
var token
if (code === codes.semicolon && size) {
token = effects.exit(types.characterReferenceValue)
if (test === asciiAlphanumeric && !decode(self.sliceSerialize(token))) {
return nok(code)
}
effects.enter(types.characterReferenceMarker)
effects.consume(code)
effects.exit(types.characterReferenceMarker)
effects.exit(types.characterReference)
return ok
}
if (test(code) && size++ < max) {
effects.consume(code)
return value
}
return nok(code)
}
}

185
node_modules/micromark/lib/tokenize/code-fenced.js generated vendored Normal file
View File

@@ -0,0 +1,185 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var prefixSize = require('../util/prefix-size.js')
var factorySpace = require('./factory-space.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var codeFenced = {
name: 'codeFenced',
tokenize: tokenizeCodeFenced,
concrete: true
}
function tokenizeCodeFenced(effects, ok, nok) {
var self = this
var closingFenceConstruct = {tokenize: tokenizeClosingFence, partial: true}
var initialPrefix = prefixSize(this.events, types.linePrefix)
var sizeOpen = 0
var marker
return start
function start(code) {
assert__default['default'](
code === codes.graveAccent || code === codes.tilde,
'expected `` ` `` or `~`'
)
effects.enter(types.codeFenced)
effects.enter(types.codeFencedFence)
effects.enter(types.codeFencedFenceSequence)
marker = code
return sequenceOpen(code)
}
function sequenceOpen(code) {
if (code === marker) {
effects.consume(code)
sizeOpen++
return sequenceOpen
}
effects.exit(types.codeFencedFenceSequence)
return sizeOpen < constants.codeFencedSequenceSizeMin
? nok(code)
: factorySpace(effects, infoOpen, types.whitespace)(code)
}
function infoOpen(code) {
if (code === codes.eof || markdownLineEnding(code)) {
return openAfter(code)
}
effects.enter(types.codeFencedFenceInfo)
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return info(code)
}
function info(code) {
if (code === codes.eof || markdownLineEndingOrSpace(code)) {
effects.exit(types.chunkString)
effects.exit(types.codeFencedFenceInfo)
return factorySpace(effects, infoAfter, types.whitespace)(code)
}
if (code === codes.graveAccent && code === marker) return nok(code)
effects.consume(code)
return info
}
function infoAfter(code) {
if (code === codes.eof || markdownLineEnding(code)) {
return openAfter(code)
}
effects.enter(types.codeFencedFenceMeta)
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return meta(code)
}
function meta(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.chunkString)
effects.exit(types.codeFencedFenceMeta)
return openAfter(code)
}
if (code === codes.graveAccent && code === marker) return nok(code)
effects.consume(code)
return meta
}
function openAfter(code) {
effects.exit(types.codeFencedFence)
return self.interrupt ? ok(code) : content(code)
}
function content(code) {
if (code === codes.eof) {
return after(code)
}
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return effects.attempt(
closingFenceConstruct,
after,
initialPrefix
? factorySpace(effects, content, types.linePrefix, initialPrefix + 1)
: content
)
}
effects.enter(types.codeFlowValue)
return contentContinue(code)
}
function contentContinue(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.codeFlowValue)
return content(code)
}
effects.consume(code)
return contentContinue
}
function after(code) {
effects.exit(types.codeFenced)
return ok(code)
}
function tokenizeClosingFence(effects, ok, nok) {
var size = 0
return factorySpace(
effects,
closingSequenceStart,
types.linePrefix,
this.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize
)
function closingSequenceStart(code) {
effects.enter(types.codeFencedFence)
effects.enter(types.codeFencedFenceSequence)
return closingSequence(code)
}
function closingSequence(code) {
if (code === marker) {
effects.consume(code)
size++
return closingSequence
}
if (size < sizeOpen) return nok(code)
effects.exit(types.codeFencedFenceSequence)
return factorySpace(effects, closingSequenceEnd, types.whitespace)(code)
}
function closingSequenceEnd(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.codeFencedFence)
return ok(code)
}
return nok(code)
}
}
}
module.exports = codeFenced

176
node_modules/micromark/lib/tokenize/code-fenced.mjs generated vendored Normal file
View File

@@ -0,0 +1,176 @@
var codeFenced = {
name: 'codeFenced',
tokenize: tokenizeCodeFenced,
concrete: true
}
export default codeFenced
import assert from 'assert'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
import prefixSize from '../util/prefix-size.mjs'
import spaceFactory from './factory-space.mjs'
function tokenizeCodeFenced(effects, ok, nok) {
var self = this
var closingFenceConstruct = {tokenize: tokenizeClosingFence, partial: true}
var initialPrefix = prefixSize(this.events, types.linePrefix)
var sizeOpen = 0
var marker
return start
function start(code) {
assert(
code === codes.graveAccent || code === codes.tilde,
'expected `` ` `` or `~`'
)
effects.enter(types.codeFenced)
effects.enter(types.codeFencedFence)
effects.enter(types.codeFencedFenceSequence)
marker = code
return sequenceOpen(code)
}
function sequenceOpen(code) {
if (code === marker) {
effects.consume(code)
sizeOpen++
return sequenceOpen
}
effects.exit(types.codeFencedFenceSequence)
return sizeOpen < constants.codeFencedSequenceSizeMin
? nok(code)
: spaceFactory(effects, infoOpen, types.whitespace)(code)
}
function infoOpen(code) {
if (code === codes.eof || markdownLineEnding(code)) {
return openAfter(code)
}
effects.enter(types.codeFencedFenceInfo)
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return info(code)
}
function info(code) {
if (code === codes.eof || markdownLineEndingOrSpace(code)) {
effects.exit(types.chunkString)
effects.exit(types.codeFencedFenceInfo)
return spaceFactory(effects, infoAfter, types.whitespace)(code)
}
if (code === codes.graveAccent && code === marker) return nok(code)
effects.consume(code)
return info
}
function infoAfter(code) {
if (code === codes.eof || markdownLineEnding(code)) {
return openAfter(code)
}
effects.enter(types.codeFencedFenceMeta)
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return meta(code)
}
function meta(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.chunkString)
effects.exit(types.codeFencedFenceMeta)
return openAfter(code)
}
if (code === codes.graveAccent && code === marker) return nok(code)
effects.consume(code)
return meta
}
function openAfter(code) {
effects.exit(types.codeFencedFence)
return self.interrupt ? ok(code) : content(code)
}
function content(code) {
if (code === codes.eof) {
return after(code)
}
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return effects.attempt(
closingFenceConstruct,
after,
initialPrefix
? spaceFactory(effects, content, types.linePrefix, initialPrefix + 1)
: content
)
}
effects.enter(types.codeFlowValue)
return contentContinue(code)
}
function contentContinue(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.codeFlowValue)
return content(code)
}
effects.consume(code)
return contentContinue
}
function after(code) {
effects.exit(types.codeFenced)
return ok(code)
}
function tokenizeClosingFence(effects, ok, nok) {
var size = 0
return spaceFactory(
effects,
closingSequenceStart,
types.linePrefix,
this.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize
)
function closingSequenceStart(code) {
effects.enter(types.codeFencedFence)
effects.enter(types.codeFencedFenceSequence)
return closingSequence(code)
}
function closingSequence(code) {
if (code === marker) {
effects.consume(code)
size++
return closingSequence
}
if (size < sizeOpen) return nok(code)
effects.exit(types.codeFencedFenceSequence)
return spaceFactory(effects, closingSequenceEnd, types.whitespace)(code)
}
function closingSequenceEnd(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.codeFencedFence)
return ok(code)
}
return nok(code)
}
}
}

91
node_modules/micromark/lib/tokenize/code-indented.js generated vendored Normal file
View File

@@ -0,0 +1,91 @@
'use strict'
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var chunkedSplice = require('../util/chunked-splice.js')
var prefixSize = require('../util/prefix-size.js')
var factorySpace = require('./factory-space.js')
var codeIndented = {
name: 'codeIndented',
tokenize: tokenizeCodeIndented,
resolve: resolveCodeIndented
}
var indentedContentConstruct = {
tokenize: tokenizeIndentedContent,
partial: true
}
function resolveCodeIndented(events, context) {
var code = {
type: types.codeIndented,
start: events[0][1].start,
end: events[events.length - 1][1].end
}
chunkedSplice(events, 0, 0, [['enter', code, context]])
chunkedSplice(events, events.length, 0, [['exit', code, context]])
return events
}
function tokenizeCodeIndented(effects, ok, nok) {
return effects.attempt(indentedContentConstruct, afterPrefix, nok)
function afterPrefix(code) {
if (code === codes.eof) {
return ok(code)
}
if (markdownLineEnding(code)) {
return effects.attempt(indentedContentConstruct, afterPrefix, ok)(code)
}
effects.enter(types.codeFlowValue)
return content(code)
}
function content(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.codeFlowValue)
return afterPrefix(code)
}
effects.consume(code)
return content
}
}
function tokenizeIndentedContent(effects, ok, nok) {
var self = this
return factorySpace(
effects,
afterPrefix,
types.linePrefix,
constants.tabSize + 1
)
function afterPrefix(code) {
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return factorySpace(
effects,
afterPrefix,
types.linePrefix,
constants.tabSize + 1
)
}
return prefixSize(self.events, types.linePrefix) < constants.tabSize
? nok(code)
: ok(code)
}
}
module.exports = codeIndented

88
node_modules/micromark/lib/tokenize/code-indented.mjs generated vendored Normal file
View File

@@ -0,0 +1,88 @@
var codeIndented = {
name: 'codeIndented',
tokenize: tokenizeCodeIndented,
resolve: resolveCodeIndented
}
export default codeIndented
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
import chunkedSplice from '../util/chunked-splice.mjs'
import prefixSize from '../util/prefix-size.mjs'
import spaceFactory from './factory-space.mjs'
var indentedContentConstruct = {
tokenize: tokenizeIndentedContent,
partial: true
}
function resolveCodeIndented(events, context) {
var code = {
type: types.codeIndented,
start: events[0][1].start,
end: events[events.length - 1][1].end
}
chunkedSplice(events, 0, 0, [['enter', code, context]])
chunkedSplice(events, events.length, 0, [['exit', code, context]])
return events
}
function tokenizeCodeIndented(effects, ok, nok) {
return effects.attempt(indentedContentConstruct, afterPrefix, nok)
function afterPrefix(code) {
if (code === codes.eof) {
return ok(code)
}
if (markdownLineEnding(code)) {
return effects.attempt(indentedContentConstruct, afterPrefix, ok)(code)
}
effects.enter(types.codeFlowValue)
return content(code)
}
function content(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.codeFlowValue)
return afterPrefix(code)
}
effects.consume(code)
return content
}
}
function tokenizeIndentedContent(effects, ok, nok) {
var self = this
return spaceFactory(
effects,
afterPrefix,
types.linePrefix,
constants.tabSize + 1
)
function afterPrefix(code) {
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return spaceFactory(
effects,
afterPrefix,
types.linePrefix,
constants.tabSize + 1
)
}
return prefixSize(self.events, types.linePrefix) < constants.tabSize
? nok(code)
: ok(code)
}
}

191
node_modules/micromark/lib/tokenize/code-text.js generated vendored Normal file
View File

@@ -0,0 +1,191 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var types = require('../constant/types.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var codeText = {
name: 'codeText',
tokenize: tokenizeCodeText,
resolve: resolveCodeText,
previous: previous
}
function resolveCodeText(events) {
var tailExitIndex = events.length - 4
var headEnterIndex = 3
var index
var enter
// If we start and end with an EOL or a space.
if (
(events[headEnterIndex][1].type === types.lineEnding ||
events[headEnterIndex][1].type === 'space') &&
(events[tailExitIndex][1].type === types.lineEnding ||
events[tailExitIndex][1].type === 'space')
) {
index = headEnterIndex
// And we have data.
while (++index < tailExitIndex) {
if (events[index][1].type === types.codeTextData) {
// Then we have padding.
events[tailExitIndex][1].type = events[headEnterIndex][1].type =
types.codeTextPadding
headEnterIndex += 2
tailExitIndex -= 2
break
}
}
}
// Merge adjacent spaces and data.
index = headEnterIndex - 1
tailExitIndex++
while (++index <= tailExitIndex) {
if (enter === undefined) {
if (
index !== tailExitIndex &&
events[index][1].type !== types.lineEnding
) {
enter = index
}
} else if (
index === tailExitIndex ||
events[index][1].type === types.lineEnding
) {
events[enter][1].type = types.codeTextData
if (index !== enter + 2) {
events[enter][1].end = events[index - 1][1].end
events.splice(enter + 2, index - enter - 2)
tailExitIndex -= index - enter - 2
index = enter + 2
}
enter = undefined
}
}
return events
}
function previous(code) {
// If there is a previous code, there will always be a tail.
return (
code !== codes.graveAccent ||
this.events[this.events.length - 1][1].type === types.characterEscape
)
}
function tokenizeCodeText(effects, ok, nok) {
var self = this
var sizeOpen = 0
var size
var token
return start
function start(code) {
assert__default['default'](code === codes.graveAccent, 'expected `` ` ``')
assert__default['default'](
previous.call(self, self.previous),
'expected correct previous'
)
effects.enter(types.codeText)
effects.enter(types.codeTextSequence)
return openingSequence(code)
}
function openingSequence(code) {
if (code === codes.graveAccent) {
effects.consume(code)
sizeOpen++
return openingSequence
}
effects.exit(types.codeTextSequence)
return gap(code)
}
function gap(code) {
// EOF.
if (code === codes.eof) {
return nok(code)
}
// Closing fence?
// Could also be data.
if (code === codes.graveAccent) {
token = effects.enter(types.codeTextSequence)
size = 0
return closingSequence(code)
}
// Tabs dont work, and virtual spaces dont make sense.
if (code === codes.space) {
effects.enter('space')
effects.consume(code)
effects.exit('space')
return gap
}
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return gap
}
// Data.
effects.enter(types.codeTextData)
return data(code)
}
// In code.
function data(code) {
if (
code === codes.eof ||
code === codes.space ||
code === codes.graveAccent ||
markdownLineEnding(code)
) {
effects.exit(types.codeTextData)
return gap(code)
}
effects.consume(code)
return data
}
// Closing fence.
function closingSequence(code) {
// More.
if (code === codes.graveAccent) {
effects.consume(code)
size++
return closingSequence
}
// Done!
if (size === sizeOpen) {
effects.exit(types.codeTextSequence)
effects.exit(types.codeText)
return ok(code)
}
// More or less accents: mark as data.
token.type = types.codeTextData
return data(code)
}
}
module.exports = codeText

179
node_modules/micromark/lib/tokenize/code-text.mjs generated vendored Normal file
View File

@@ -0,0 +1,179 @@
var codeText = {
name: 'codeText',
tokenize: tokenizeCodeText,
resolve: resolveCodeText,
previous: previous
}
export default codeText
import assert from 'assert'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import types from '../constant/types.mjs'
function resolveCodeText(events) {
var tailExitIndex = events.length - 4
var headEnterIndex = 3
var index
var enter
// If we start and end with an EOL or a space.
if (
(events[headEnterIndex][1].type === types.lineEnding ||
events[headEnterIndex][1].type === 'space') &&
(events[tailExitIndex][1].type === types.lineEnding ||
events[tailExitIndex][1].type === 'space')
) {
index = headEnterIndex
// And we have data.
while (++index < tailExitIndex) {
if (events[index][1].type === types.codeTextData) {
// Then we have padding.
events[tailExitIndex][1].type = events[headEnterIndex][1].type =
types.codeTextPadding
headEnterIndex += 2
tailExitIndex -= 2
break
}
}
}
// Merge adjacent spaces and data.
index = headEnterIndex - 1
tailExitIndex++
while (++index <= tailExitIndex) {
if (enter === undefined) {
if (
index !== tailExitIndex &&
events[index][1].type !== types.lineEnding
) {
enter = index
}
} else if (
index === tailExitIndex ||
events[index][1].type === types.lineEnding
) {
events[enter][1].type = types.codeTextData
if (index !== enter + 2) {
events[enter][1].end = events[index - 1][1].end
events.splice(enter + 2, index - enter - 2)
tailExitIndex -= index - enter - 2
index = enter + 2
}
enter = undefined
}
}
return events
}
function previous(code) {
// If there is a previous code, there will always be a tail.
return (
code !== codes.graveAccent ||
this.events[this.events.length - 1][1].type === types.characterEscape
)
}
function tokenizeCodeText(effects, ok, nok) {
var self = this
var sizeOpen = 0
var size
var token
return start
function start(code) {
assert(code === codes.graveAccent, 'expected `` ` ``')
assert(previous.call(self, self.previous), 'expected correct previous')
effects.enter(types.codeText)
effects.enter(types.codeTextSequence)
return openingSequence(code)
}
function openingSequence(code) {
if (code === codes.graveAccent) {
effects.consume(code)
sizeOpen++
return openingSequence
}
effects.exit(types.codeTextSequence)
return gap(code)
}
function gap(code) {
// EOF.
if (code === codes.eof) {
return nok(code)
}
// Closing fence?
// Could also be data.
if (code === codes.graveAccent) {
token = effects.enter(types.codeTextSequence)
size = 0
return closingSequence(code)
}
// Tabs dont work, and virtual spaces dont make sense.
if (code === codes.space) {
effects.enter('space')
effects.consume(code)
effects.exit('space')
return gap
}
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return gap
}
// Data.
effects.enter(types.codeTextData)
return data(code)
}
// In code.
function data(code) {
if (
code === codes.eof ||
code === codes.space ||
code === codes.graveAccent ||
markdownLineEnding(code)
) {
effects.exit(types.codeTextData)
return gap(code)
}
effects.consume(code)
return data
}
// Closing fence.
function closingSequence(code) {
// More.
if (code === codes.graveAccent) {
effects.consume(code)
size++
return closingSequence
}
// Done!
if (size === sizeOpen) {
effects.exit(types.codeTextSequence)
effects.exit(types.codeText)
return ok(code)
}
// More or less accents: mark as data.
token.type = types.codeTextData
return data(code)
}
}

121
node_modules/micromark/lib/tokenize/content.js generated vendored Normal file
View File

@@ -0,0 +1,121 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var prefixSize = require('../util/prefix-size.js')
var subtokenize = require('../util/subtokenize.js')
var factorySpace = require('./factory-space.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
// No name because it must not be turned off.
var content = {
tokenize: tokenizeContent,
resolve: resolveContent,
interruptible: true,
lazy: true
}
var continuationConstruct = {tokenize: tokenizeContinuation, partial: true}
// Content is transparent: its parsed right now. That way, definitions are also
// parsed right now: before text in paragraphs (specifically, media) are parsed.
function resolveContent(events) {
subtokenize(events)
return events
}
function tokenizeContent(effects, ok) {
var previous
return start
function start(code) {
assert__default['default'](
code !== codes.eof && !markdownLineEnding(code),
'expected no eof or eol'
)
effects.enter(types.content)
previous = effects.enter(types.chunkContent, {
contentType: constants.contentTypeContent
})
return data(code)
}
function data(code) {
if (code === codes.eof) {
return contentEnd(code)
}
if (markdownLineEnding(code)) {
return effects.check(
continuationConstruct,
contentContinue,
contentEnd
)(code)
}
// Data.
effects.consume(code)
return data
}
function contentEnd(code) {
effects.exit(types.chunkContent)
effects.exit(types.content)
return ok(code)
}
function contentContinue(code) {
assert__default['default'](markdownLineEnding(code), 'expected eol')
effects.consume(code)
effects.exit(types.chunkContent)
previous = previous.next = effects.enter(types.chunkContent, {
contentType: constants.contentTypeContent,
previous: previous
})
return data
}
}
function tokenizeContinuation(effects, ok, nok) {
var self = this
return startLookahead
function startLookahead(code) {
assert__default['default'](
markdownLineEnding(code),
'expected a line ending'
)
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return factorySpace(effects, prefixed, types.linePrefix)
}
function prefixed(code) {
if (code === codes.eof || markdownLineEnding(code)) {
return nok(code)
}
if (
self.parser.constructs.disable.null.indexOf('codeIndented') > -1 ||
prefixSize(self.events, types.linePrefix) < constants.tabSize
) {
return effects.interrupt(self.parser.constructs.flow, nok, ok)(code)
}
return ok(code)
}
}
module.exports = content

109
node_modules/micromark/lib/tokenize/content.mjs generated vendored Normal file
View File

@@ -0,0 +1,109 @@
// No name because it must not be turned off.
var content = {
tokenize: tokenizeContent,
resolve: resolveContent,
interruptible: true,
lazy: true
}
export default content
import assert from 'assert'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
import prefixSize from '../util/prefix-size.mjs'
import subtokenize from '../util/subtokenize.mjs'
import spaceFactory from './factory-space.mjs'
var continuationConstruct = {tokenize: tokenizeContinuation, partial: true}
// Content is transparent: its parsed right now. That way, definitions are also
// parsed right now: before text in paragraphs (specifically, media) are parsed.
function resolveContent(events) {
subtokenize(events)
return events
}
function tokenizeContent(effects, ok) {
var previous
return start
function start(code) {
assert(
code !== codes.eof && !markdownLineEnding(code),
'expected no eof or eol'
)
effects.enter(types.content)
previous = effects.enter(types.chunkContent, {
contentType: constants.contentTypeContent
})
return data(code)
}
function data(code) {
if (code === codes.eof) {
return contentEnd(code)
}
if (markdownLineEnding(code)) {
return effects.check(
continuationConstruct,
contentContinue,
contentEnd
)(code)
}
// Data.
effects.consume(code)
return data
}
function contentEnd(code) {
effects.exit(types.chunkContent)
effects.exit(types.content)
return ok(code)
}
function contentContinue(code) {
assert(markdownLineEnding(code), 'expected eol')
effects.consume(code)
effects.exit(types.chunkContent)
previous = previous.next = effects.enter(types.chunkContent, {
contentType: constants.contentTypeContent,
previous: previous
})
return data
}
}
function tokenizeContinuation(effects, ok, nok) {
var self = this
return startLookahead
function startLookahead(code) {
assert(markdownLineEnding(code), 'expected a line ending')
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return spaceFactory(effects, prefixed, types.linePrefix)
}
function prefixed(code) {
if (code === codes.eof || markdownLineEnding(code)) {
return nok(code)
}
if (
self.parser.constructs.disable.null.indexOf('codeIndented') > -1 ||
prefixSize(self.events, types.linePrefix) < constants.tabSize
) {
return effects.interrupt(self.parser.constructs.flow, nok, ok)(code)
}
return ok(code)
}
}

129
node_modules/micromark/lib/tokenize/definition.js generated vendored Normal file
View File

@@ -0,0 +1,129 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
var types = require('../constant/types.js')
var normalizeIdentifier = require('../util/normalize-identifier.js')
var factoryDestination = require('./factory-destination.js')
var factoryLabel = require('./factory-label.js')
var factorySpace = require('./factory-space.js')
var factoryWhitespace = require('./factory-whitespace.js')
var factoryTitle = require('./factory-title.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var definition = {
name: 'definition',
tokenize: tokenizeDefinition
}
var titleConstruct = {tokenize: tokenizeTitle, partial: true}
function tokenizeDefinition(effects, ok, nok) {
var self = this
var identifier
return start
function start(code) {
assert__default['default'](code === codes.leftSquareBracket, 'expected `[`')
effects.enter(types.definition)
return factoryLabel.call(
self,
effects,
labelAfter,
nok,
types.definitionLabel,
types.definitionLabelMarker,
types.definitionLabelString
)(code)
}
function labelAfter(code) {
identifier = normalizeIdentifier(
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
)
if (code === codes.colon) {
effects.enter(types.definitionMarker)
effects.consume(code)
effects.exit(types.definitionMarker)
// Note: blank lines cant exist in content.
return factoryWhitespace(
effects,
factoryDestination(
effects,
effects.attempt(
titleConstruct,
factorySpace(effects, after, types.whitespace),
factorySpace(effects, after, types.whitespace)
),
nok,
types.definitionDestination,
types.definitionDestinationLiteral,
types.definitionDestinationLiteralMarker,
types.definitionDestinationRaw,
types.definitionDestinationString
)
)
}
return nok(code)
}
function after(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.definition)
if (self.parser.defined.indexOf(identifier) < 0) {
self.parser.defined.push(identifier)
}
return ok(code)
}
return nok(code)
}
}
function tokenizeTitle(effects, ok, nok) {
return start
function start(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, before)(code)
: nok(code)
}
function before(code) {
if (
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.leftParenthesis
) {
return factoryTitle(
effects,
factorySpace(effects, after, types.whitespace),
nok,
types.definitionTitle,
types.definitionTitleMarker,
types.definitionTitleString
)(code)
}
return nok(code)
}
function after(code) {
return code === codes.eof || markdownLineEnding(code) ? ok(code) : nok(code)
}
}
module.exports = definition

120
node_modules/micromark/lib/tokenize/definition.mjs generated vendored Normal file
View File

@@ -0,0 +1,120 @@
var definition = {
name: 'definition',
tokenize: tokenizeDefinition
}
export default definition
import assert from 'assert'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs'
import types from '../constant/types.mjs'
import normalizeIdentifier from '../util/normalize-identifier.mjs'
import destinationFactory from './factory-destination.mjs'
import labelFactory from './factory-label.mjs'
import spaceFactory from './factory-space.mjs'
import whitespaceFactory from './factory-whitespace.mjs'
import titleFactory from './factory-title.mjs'
var titleConstruct = {tokenize: tokenizeTitle, partial: true}
function tokenizeDefinition(effects, ok, nok) {
var self = this
var identifier
return start
function start(code) {
assert(code === codes.leftSquareBracket, 'expected `[`')
effects.enter(types.definition)
return labelFactory.call(
self,
effects,
labelAfter,
nok,
types.definitionLabel,
types.definitionLabelMarker,
types.definitionLabelString
)(code)
}
function labelAfter(code) {
identifier = normalizeIdentifier(
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
)
if (code === codes.colon) {
effects.enter(types.definitionMarker)
effects.consume(code)
effects.exit(types.definitionMarker)
// Note: blank lines cant exist in content.
return whitespaceFactory(
effects,
destinationFactory(
effects,
effects.attempt(
titleConstruct,
spaceFactory(effects, after, types.whitespace),
spaceFactory(effects, after, types.whitespace)
),
nok,
types.definitionDestination,
types.definitionDestinationLiteral,
types.definitionDestinationLiteralMarker,
types.definitionDestinationRaw,
types.definitionDestinationString
)
)
}
return nok(code)
}
function after(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.definition)
if (self.parser.defined.indexOf(identifier) < 0) {
self.parser.defined.push(identifier)
}
return ok(code)
}
return nok(code)
}
}
function tokenizeTitle(effects, ok, nok) {
return start
function start(code) {
return markdownLineEndingOrSpace(code)
? whitespaceFactory(effects, before)(code)
: nok(code)
}
function before(code) {
if (
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.leftParenthesis
) {
return titleFactory(
effects,
spaceFactory(effects, after, types.whitespace),
nok,
types.definitionTitle,
types.definitionTitleMarker,
types.definitionTitleString
)(code)
}
return nok(code)
}
function after(code) {
return code === codes.eof || markdownLineEnding(code) ? ok(code) : nok(code)
}
}

View File

@@ -0,0 +1,145 @@
'use strict'
var asciiControl = require('../character/ascii-control.js')
var codes = require('../character/codes.js')
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
// eslint-disable-next-line max-params
function destinationFactory(
effects,
ok,
nok,
type,
literalType,
literalMarkerType,
rawType,
stringType,
max
) {
var limit = max || Infinity
var balance = 0
return start
function start(code) {
if (code === codes.lessThan) {
effects.enter(type)
effects.enter(literalType)
effects.enter(literalMarkerType)
effects.consume(code)
effects.exit(literalMarkerType)
return destinationEnclosedBefore
}
if (asciiControl(code) || code === codes.rightParenthesis) {
return nok(code)
}
effects.enter(type)
effects.enter(rawType)
effects.enter(stringType)
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return destinationRaw(code)
}
function destinationEnclosedBefore(code) {
if (code === codes.greaterThan) {
effects.enter(literalMarkerType)
effects.consume(code)
effects.exit(literalMarkerType)
effects.exit(literalType)
effects.exit(type)
return ok
}
effects.enter(stringType)
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return destinationEnclosed(code)
}
function destinationEnclosed(code) {
if (code === codes.greaterThan) {
effects.exit(types.chunkString)
effects.exit(stringType)
return destinationEnclosedBefore(code)
}
if (
code === codes.eof ||
code === codes.lessThan ||
markdownLineEnding(code)
) {
return nok(code)
}
effects.consume(code)
return code === codes.backslash
? destinationEnclosedEscape
: destinationEnclosed
}
function destinationEnclosedEscape(code) {
if (
code === codes.lessThan ||
code === codes.greaterThan ||
code === codes.backslash
) {
effects.consume(code)
return destinationEnclosed
}
return destinationEnclosed(code)
}
function destinationRaw(code) {
if (code === codes.leftParenthesis) {
if (++balance > limit) return nok(code)
effects.consume(code)
return destinationRaw
}
if (code === codes.rightParenthesis) {
if (!balance--) {
effects.exit(types.chunkString)
effects.exit(stringType)
effects.exit(rawType)
effects.exit(type)
return ok(code)
}
effects.consume(code)
return destinationRaw
}
if (code === codes.eof || markdownLineEndingOrSpace(code)) {
if (balance) return nok(code)
effects.exit(types.chunkString)
effects.exit(stringType)
effects.exit(rawType)
effects.exit(type)
return ok(code)
}
if (asciiControl(code)) return nok(code)
effects.consume(code)
return code === codes.backslash ? destinationRawEscape : destinationRaw
}
function destinationRawEscape(code) {
if (
code === codes.leftParenthesis ||
code === codes.rightParenthesis ||
code === codes.backslash
) {
effects.consume(code)
return destinationRaw
}
return destinationRaw(code)
}
}
module.exports = destinationFactory

View File

@@ -0,0 +1,143 @@
export default destinationFactory
import asciiControl from '../character/ascii-control.mjs'
import codes from '../character/codes.mjs'
import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
// eslint-disable-next-line max-params
function destinationFactory(
effects,
ok,
nok,
type,
literalType,
literalMarkerType,
rawType,
stringType,
max
) {
var limit = max || Infinity
var balance = 0
return start
function start(code) {
if (code === codes.lessThan) {
effects.enter(type)
effects.enter(literalType)
effects.enter(literalMarkerType)
effects.consume(code)
effects.exit(literalMarkerType)
return destinationEnclosedBefore
}
if (asciiControl(code) || code === codes.rightParenthesis) {
return nok(code)
}
effects.enter(type)
effects.enter(rawType)
effects.enter(stringType)
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return destinationRaw(code)
}
function destinationEnclosedBefore(code) {
if (code === codes.greaterThan) {
effects.enter(literalMarkerType)
effects.consume(code)
effects.exit(literalMarkerType)
effects.exit(literalType)
effects.exit(type)
return ok
}
effects.enter(stringType)
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return destinationEnclosed(code)
}
function destinationEnclosed(code) {
if (code === codes.greaterThan) {
effects.exit(types.chunkString)
effects.exit(stringType)
return destinationEnclosedBefore(code)
}
if (
code === codes.eof ||
code === codes.lessThan ||
markdownLineEnding(code)
) {
return nok(code)
}
effects.consume(code)
return code === codes.backslash
? destinationEnclosedEscape
: destinationEnclosed
}
function destinationEnclosedEscape(code) {
if (
code === codes.lessThan ||
code === codes.greaterThan ||
code === codes.backslash
) {
effects.consume(code)
return destinationEnclosed
}
return destinationEnclosed(code)
}
function destinationRaw(code) {
if (code === codes.leftParenthesis) {
if (++balance > limit) return nok(code)
effects.consume(code)
return destinationRaw
}
if (code === codes.rightParenthesis) {
if (!balance--) {
effects.exit(types.chunkString)
effects.exit(stringType)
effects.exit(rawType)
effects.exit(type)
return ok(code)
}
effects.consume(code)
return destinationRaw
}
if (code === codes.eof || markdownLineEndingOrSpace(code)) {
if (balance) return nok(code)
effects.exit(types.chunkString)
effects.exit(stringType)
effects.exit(rawType)
effects.exit(type)
return ok(code)
}
if (asciiControl(code)) return nok(code)
effects.consume(code)
return code === codes.backslash ? destinationRawEscape : destinationRaw
}
function destinationRawEscape(code) {
if (
code === codes.leftParenthesis ||
code === codes.rightParenthesis ||
code === codes.backslash
) {
effects.consume(code)
return destinationRaw
}
return destinationRaw(code)
}
}

102
node_modules/micromark/lib/tokenize/factory-label.js generated vendored Normal file
View File

@@ -0,0 +1,102 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var markdownSpace = require('../character/markdown-space.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
// eslint-disable-next-line max-params
function labelFactory(effects, ok, nok, type, markerType, stringType) {
var self = this
var size = 0
var data
return start
function start(code) {
assert__default['default'](code === codes.leftSquareBracket, 'expected `[`')
effects.enter(type)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.enter(stringType)
return atBreak
}
function atBreak(code) {
if (
code === codes.eof ||
code === codes.leftSquareBracket ||
(code === codes.rightSquareBracket && !data) ||
/* c8 ignore next */
(code === codes.caret &&
/* c8 ignore next */
!size &&
/* c8 ignore next */
'_hiddenFootnoteSupport' in self.parser.constructs) ||
size > constants.linkReferenceSizeMax
) {
return nok(code)
}
if (code === codes.rightSquareBracket) {
effects.exit(stringType)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.exit(type)
return ok
}
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return atBreak
}
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return label(code)
}
function label(code) {
if (
code === codes.eof ||
code === codes.leftSquareBracket ||
code === codes.rightSquareBracket ||
markdownLineEnding(code) ||
size++ > constants.linkReferenceSizeMax
) {
effects.exit(types.chunkString)
return atBreak(code)
}
effects.consume(code)
data = data || !markdownSpace(code)
return code === codes.backslash ? labelEscape : label
}
function labelEscape(code) {
if (
code === codes.leftSquareBracket ||
code === codes.backslash ||
code === codes.rightSquareBracket
) {
effects.consume(code)
size++
return label
}
return label(code)
}
}
module.exports = labelFactory

94
node_modules/micromark/lib/tokenize/factory-label.mjs generated vendored Normal file
View File

@@ -0,0 +1,94 @@
export default labelFactory
import assert from 'assert'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import markdownSpace from '../character/markdown-space.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
// eslint-disable-next-line max-params
function labelFactory(effects, ok, nok, type, markerType, stringType) {
var self = this
var size = 0
var data
return start
function start(code) {
assert(code === codes.leftSquareBracket, 'expected `[`')
effects.enter(type)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.enter(stringType)
return atBreak
}
function atBreak(code) {
if (
code === codes.eof ||
code === codes.leftSquareBracket ||
(code === codes.rightSquareBracket && !data) ||
/* c8 ignore next */
(code === codes.caret &&
/* c8 ignore next */
!size &&
/* c8 ignore next */
'_hiddenFootnoteSupport' in self.parser.constructs) ||
size > constants.linkReferenceSizeMax
) {
return nok(code)
}
if (code === codes.rightSquareBracket) {
effects.exit(stringType)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.exit(type)
return ok
}
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return atBreak
}
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return label(code)
}
function label(code) {
if (
code === codes.eof ||
code === codes.leftSquareBracket ||
code === codes.rightSquareBracket ||
markdownLineEnding(code) ||
size++ > constants.linkReferenceSizeMax
) {
effects.exit(types.chunkString)
return atBreak(code)
}
effects.consume(code)
data = data || !markdownSpace(code)
return code === codes.backslash ? labelEscape : label
}
function labelEscape(code) {
if (
code === codes.leftSquareBracket ||
code === codes.backslash ||
code === codes.rightSquareBracket
) {
effects.consume(code)
size++
return label
}
return label(code)
}
}

31
node_modules/micromark/lib/tokenize/factory-space.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
'use strict'
var markdownSpace = require('../character/markdown-space.js')
function spaceFactory(effects, ok, type, max) {
var limit = max ? max - 1 : Infinity
var size = 0
return start
function start(code) {
if (markdownSpace(code)) {
effects.enter(type)
return prefix(code)
}
return ok(code)
}
function prefix(code) {
if (markdownSpace(code) && size++ < limit) {
effects.consume(code)
return prefix
}
effects.exit(type)
return ok(code)
}
}
module.exports = spaceFactory

29
node_modules/micromark/lib/tokenize/factory-space.mjs generated vendored Normal file
View File

@@ -0,0 +1,29 @@
export default spaceFactory
import markdownSpace from '../character/markdown-space.mjs'
function spaceFactory(effects, ok, type, max) {
var limit = max ? max - 1 : Infinity
var size = 0
return start
function start(code) {
if (markdownSpace(code)) {
effects.enter(type)
return prefix(code)
}
return ok(code)
}
function prefix(code) {
if (markdownSpace(code) && size++ < limit) {
effects.consume(code)
return prefix
}
effects.exit(type)
return ok(code)
}
}

92
node_modules/micromark/lib/tokenize/factory-title.js generated vendored Normal file
View File

@@ -0,0 +1,92 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var factorySpace = require('./factory-space.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
// eslint-disable-next-line max-params
function titleFactory(effects, ok, nok, type, markerType, stringType) {
var marker
return start
function start(code) {
assert__default['default'](
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.leftParenthesis,
'expected `"`, `\'`, or `(`'
)
effects.enter(type)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
marker = code === codes.leftParenthesis ? codes.rightParenthesis : code
return atFirstTitleBreak
}
function atFirstTitleBreak(code) {
if (code === marker) {
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.exit(type)
return ok
}
effects.enter(stringType)
return atTitleBreak(code)
}
function atTitleBreak(code) {
if (code === marker) {
effects.exit(stringType)
return atFirstTitleBreak(marker)
}
if (code === codes.eof) {
return nok(code)
}
// Note: blank lines cant exist in content.
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return factorySpace(effects, atTitleBreak, types.linePrefix)
}
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return title(code)
}
function title(code) {
if (code === marker || code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.chunkString)
return atTitleBreak(code)
}
effects.consume(code)
return code === codes.backslash ? titleEscape : title
}
function titleEscape(code) {
if (code === marker || code === codes.backslash) {
effects.consume(code)
return title
}
return title(code)
}
}
module.exports = titleFactory

84
node_modules/micromark/lib/tokenize/factory-title.mjs generated vendored Normal file
View File

@@ -0,0 +1,84 @@
export default titleFactory
import assert from 'assert'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
import spaceFactory from './factory-space.mjs'
// eslint-disable-next-line max-params
function titleFactory(effects, ok, nok, type, markerType, stringType) {
var marker
return start
function start(code) {
assert(
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.leftParenthesis,
'expected `"`, `\'`, or `(`'
)
effects.enter(type)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
marker = code === codes.leftParenthesis ? codes.rightParenthesis : code
return atFirstTitleBreak
}
function atFirstTitleBreak(code) {
if (code === marker) {
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.exit(type)
return ok
}
effects.enter(stringType)
return atTitleBreak(code)
}
function atTitleBreak(code) {
if (code === marker) {
effects.exit(stringType)
return atFirstTitleBreak(marker)
}
if (code === codes.eof) {
return nok(code)
}
// Note: blank lines cant exist in content.
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return spaceFactory(effects, atTitleBreak, types.linePrefix)
}
effects.enter(types.chunkString, {contentType: constants.contentTypeString})
return title(code)
}
function title(code) {
if (code === marker || code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.chunkString)
return atTitleBreak(code)
}
effects.consume(code)
return code === codes.backslash ? titleEscape : title
}
function titleEscape(code) {
if (code === marker || code === codes.backslash) {
effects.consume(code)
return title
}
return title(code)
}
}

View File

@@ -0,0 +1,34 @@
'use strict'
var markdownLineEnding = require('../character/markdown-line-ending.js')
var markdownSpace = require('../character/markdown-space.js')
var types = require('../constant/types.js')
var factorySpace = require('./factory-space.js')
function whitespaceFactory(effects, ok) {
var seen
return start
function start(code) {
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
seen = true
return start
}
if (markdownSpace(code)) {
return factorySpace(
effects,
start,
seen ? types.linePrefix : types.lineSuffix
)(code)
}
return ok(code)
}
}
module.exports = whitespaceFactory

View File

@@ -0,0 +1,32 @@
export default whitespaceFactory
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import markdownSpace from '../character/markdown-space.mjs'
import types from '../constant/types.mjs'
import spaceFactory from './factory-space.mjs'
function whitespaceFactory(effects, ok) {
var seen
return start
function start(code) {
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
seen = true
return start
}
if (markdownSpace(code)) {
return spaceFactory(
effects,
start,
seen ? types.linePrefix : types.lineSuffix
)(code)
}
return ok(code)
}
}

View File

@@ -0,0 +1,41 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var types = require('../constant/types.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var hardBreakEscape = {
name: 'hardBreakEscape',
tokenize: tokenizeHardBreakEscape
}
function tokenizeHardBreakEscape(effects, ok, nok) {
return start
function start(code) {
assert__default['default'](code === codes.backslash, 'expected `\\`')
effects.enter(types.hardBreakEscape)
effects.enter(types.escapeMarker)
effects.consume(code)
return open
}
function open(code) {
if (markdownLineEnding(code)) {
effects.exit(types.escapeMarker)
effects.exit(types.hardBreakEscape)
return ok(code)
}
return nok(code)
}
}
module.exports = hardBreakEscape

View File

@@ -0,0 +1,32 @@
var hardBreakEscape = {
name: 'hardBreakEscape',
tokenize: tokenizeHardBreakEscape
}
export default hardBreakEscape
import assert from 'assert'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import types from '../constant/types.mjs'
function tokenizeHardBreakEscape(effects, ok, nok) {
return start
function start(code) {
assert(code === codes.backslash, 'expected `\\`')
effects.enter(types.hardBreakEscape)
effects.enter(types.escapeMarker)
effects.consume(code)
return open
}
function open(code) {
if (markdownLineEnding(code)) {
effects.exit(types.escapeMarker)
effects.exit(types.hardBreakEscape)
return ok(code)
}
return nok(code)
}
}

151
node_modules/micromark/lib/tokenize/heading-atx.js generated vendored Normal file
View File

@@ -0,0 +1,151 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
var markdownSpace = require('../character/markdown-space.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var chunkedSplice = require('../util/chunked-splice.js')
var factorySpace = require('./factory-space.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var headingAtx = {
name: 'headingAtx',
tokenize: tokenizeHeadingAtx,
resolve: resolveHeadingAtx
}
function resolveHeadingAtx(events, context) {
var contentEnd = events.length - 2
var contentStart = 3
var content
var text
// Prefix whitespace, part of the opening.
if (events[contentStart][1].type === types.whitespace) {
contentStart += 2
}
// Suffix whitespace, part of the closing.
if (
contentEnd - 2 > contentStart &&
events[contentEnd][1].type === types.whitespace
) {
contentEnd -= 2
}
if (
events[contentEnd][1].type === types.atxHeadingSequence &&
(contentStart === contentEnd - 1 ||
(contentEnd - 4 > contentStart &&
events[contentEnd - 2][1].type === types.whitespace))
) {
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4
}
if (contentEnd > contentStart) {
content = {
type: types.atxHeadingText,
start: events[contentStart][1].start,
end: events[contentEnd][1].end
}
text = {
type: types.chunkText,
start: events[contentStart][1].start,
end: events[contentEnd][1].end,
contentType: constants.contentTypeText
}
chunkedSplice(events, contentStart, contentEnd - contentStart + 1, [
['enter', content, context],
['enter', text, context],
['exit', text, context],
['exit', content, context]
])
}
return events
}
function tokenizeHeadingAtx(effects, ok, nok) {
var self = this
var size = 0
return start
function start(code) {
assert__default['default'](code === codes.numberSign, 'expected `#`')
effects.enter(types.atxHeading)
effects.enter(types.atxHeadingSequence)
return fenceOpenInside(code)
}
function fenceOpenInside(code) {
if (
code === codes.numberSign &&
size++ < constants.atxHeadingOpeningFenceSizeMax
) {
effects.consume(code)
return fenceOpenInside
}
if (code === codes.eof || markdownLineEndingOrSpace(code)) {
effects.exit(types.atxHeadingSequence)
return self.interrupt ? ok(code) : headingBreak(code)
}
return nok(code)
}
function headingBreak(code) {
if (code === codes.numberSign) {
effects.enter(types.atxHeadingSequence)
return sequence(code)
}
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.atxHeading)
return ok(code)
}
if (markdownSpace(code)) {
return factorySpace(effects, headingBreak, types.whitespace)(code)
}
effects.enter(types.atxHeadingText)
return data(code)
}
function sequence(code) {
if (code === codes.numberSign) {
effects.consume(code)
return sequence
}
effects.exit(types.atxHeadingSequence)
return headingBreak(code)
}
function data(code) {
if (
code === codes.eof ||
code === codes.numberSign ||
markdownLineEndingOrSpace(code)
) {
effects.exit(types.atxHeadingText)
return headingBreak(code)
}
effects.consume(code)
return data
}
}
module.exports = headingAtx

142
node_modules/micromark/lib/tokenize/heading-atx.mjs generated vendored Normal file
View File

@@ -0,0 +1,142 @@
var headingAtx = {
name: 'headingAtx',
tokenize: tokenizeHeadingAtx,
resolve: resolveHeadingAtx
}
export default headingAtx
import assert from 'assert'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs'
import markdownSpace from '../character/markdown-space.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
import chunkedSplice from '../util/chunked-splice.mjs'
import spaceFactory from './factory-space.mjs'
function resolveHeadingAtx(events, context) {
var contentEnd = events.length - 2
var contentStart = 3
var content
var text
// Prefix whitespace, part of the opening.
if (events[contentStart][1].type === types.whitespace) {
contentStart += 2
}
// Suffix whitespace, part of the closing.
if (
contentEnd - 2 > contentStart &&
events[contentEnd][1].type === types.whitespace
) {
contentEnd -= 2
}
if (
events[contentEnd][1].type === types.atxHeadingSequence &&
(contentStart === contentEnd - 1 ||
(contentEnd - 4 > contentStart &&
events[contentEnd - 2][1].type === types.whitespace))
) {
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4
}
if (contentEnd > contentStart) {
content = {
type: types.atxHeadingText,
start: events[contentStart][1].start,
end: events[contentEnd][1].end
}
text = {
type: types.chunkText,
start: events[contentStart][1].start,
end: events[contentEnd][1].end,
contentType: constants.contentTypeText
}
chunkedSplice(events, contentStart, contentEnd - contentStart + 1, [
['enter', content, context],
['enter', text, context],
['exit', text, context],
['exit', content, context]
])
}
return events
}
function tokenizeHeadingAtx(effects, ok, nok) {
var self = this
var size = 0
return start
function start(code) {
assert(code === codes.numberSign, 'expected `#`')
effects.enter(types.atxHeading)
effects.enter(types.atxHeadingSequence)
return fenceOpenInside(code)
}
function fenceOpenInside(code) {
if (
code === codes.numberSign &&
size++ < constants.atxHeadingOpeningFenceSizeMax
) {
effects.consume(code)
return fenceOpenInside
}
if (code === codes.eof || markdownLineEndingOrSpace(code)) {
effects.exit(types.atxHeadingSequence)
return self.interrupt ? ok(code) : headingBreak(code)
}
return nok(code)
}
function headingBreak(code) {
if (code === codes.numberSign) {
effects.enter(types.atxHeadingSequence)
return sequence(code)
}
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.atxHeading)
return ok(code)
}
if (markdownSpace(code)) {
return spaceFactory(effects, headingBreak, types.whitespace)(code)
}
effects.enter(types.atxHeadingText)
return data(code)
}
function sequence(code) {
if (code === codes.numberSign) {
effects.consume(code)
return sequence
}
effects.exit(types.atxHeadingSequence)
return headingBreak(code)
}
function data(code) {
if (
code === codes.eof ||
code === codes.numberSign ||
markdownLineEndingOrSpace(code)
) {
effects.exit(types.atxHeadingText)
return headingBreak(code)
}
effects.consume(code)
return data
}
}

513
node_modules/micromark/lib/tokenize/html-flow.js generated vendored Normal file
View File

@@ -0,0 +1,513 @@
'use strict'
var assert = require('assert')
var asciiAlpha = require('../character/ascii-alpha.js')
var asciiAlphanumeric = require('../character/ascii-alphanumeric.js')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
var markdownSpace = require('../character/markdown-space.js')
var constants = require('../constant/constants.js')
var fromCharCode = require('../constant/from-char-code.js')
var htmlBlockNames = require('../constant/html-block-names.js')
var htmlRawNames = require('../constant/html-raw-names.js')
var types = require('../constant/types.js')
var partialBlankLine = require('./partial-blank-line.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var htmlFlow = {
name: 'htmlFlow',
tokenize: tokenizeHtmlFlow,
resolveTo: resolveToHtmlFlow,
concrete: true
}
var nextBlankConstruct = {tokenize: tokenizeNextBlank, partial: true}
function resolveToHtmlFlow(events) {
var index = events.length
while (index--) {
if (
events[index][0] === 'enter' &&
events[index][1].type === types.htmlFlow
) {
break
}
}
if (index > 1 && events[index - 2][1].type === types.linePrefix) {
// Add the prefix start to the HTML token.
events[index][1].start = events[index - 2][1].start
// Add the prefix start to the HTML line token.
events[index + 1][1].start = events[index - 2][1].start
// Remove the line prefix.
events.splice(index - 2, 2)
}
return events
}
function tokenizeHtmlFlow(effects, ok, nok) {
var self = this
var kind
var startTag
var buffer
var index
var marker
return start
function start(code) {
assert__default['default'](code === codes.lessThan, 'expected `<`')
effects.enter(types.htmlFlow)
effects.enter(types.htmlFlowData)
effects.consume(code)
return open
}
function open(code) {
if (code === codes.exclamationMark) {
effects.consume(code)
return declarationStart
}
if (code === codes.slash) {
effects.consume(code)
return tagCloseStart
}
if (code === codes.questionMark) {
effects.consume(code)
kind = constants.htmlInstruction
// While were in an instruction instead of a declaration, were on a `?`
// right now, so we do need to search for `>`, similar to declarations.
return self.interrupt ? ok : continuationDeclarationInside
}
if (asciiAlpha(code)) {
effects.consume(code)
buffer = fromCharCode(code)
startTag = true
return tagName
}
return nok(code)
}
function declarationStart(code) {
if (code === codes.dash) {
effects.consume(code)
kind = constants.htmlComment
return commentOpenInside
}
if (code === codes.leftSquareBracket) {
effects.consume(code)
kind = constants.htmlCdata
buffer = constants.cdataOpeningString
index = 0
return cdataOpenInside
}
if (asciiAlpha(code)) {
effects.consume(code)
kind = constants.htmlDeclaration
return self.interrupt ? ok : continuationDeclarationInside
}
return nok(code)
}
function commentOpenInside(code) {
if (code === codes.dash) {
effects.consume(code)
return self.interrupt ? ok : continuationDeclarationInside
}
return nok(code)
}
function cdataOpenInside(code) {
if (code === buffer.charCodeAt(index++)) {
effects.consume(code)
return index === buffer.length
? self.interrupt
? ok
: continuation
: cdataOpenInside
}
return nok(code)
}
function tagCloseStart(code) {
if (asciiAlpha(code)) {
effects.consume(code)
buffer = fromCharCode(code)
return tagName
}
return nok(code)
}
function tagName(code) {
if (
code === codes.eof ||
code === codes.slash ||
code === codes.greaterThan ||
markdownLineEndingOrSpace(code)
) {
if (
code !== codes.slash &&
startTag &&
htmlRawNames.indexOf(buffer.toLowerCase()) > -1
) {
kind = constants.htmlRaw
return self.interrupt ? ok(code) : continuation(code)
}
if (htmlBlockNames.indexOf(buffer.toLowerCase()) > -1) {
kind = constants.htmlBasic
if (code === codes.slash) {
effects.consume(code)
return basicSelfClosing
}
return self.interrupt ? ok(code) : continuation(code)
}
kind = constants.htmlComplete
// Do not support complete HTML when interrupting.
return self.interrupt
? nok(code)
: startTag
? completeAttributeNameBefore(code)
: completeClosingTagAfter(code)
}
if (code === codes.dash || asciiAlphanumeric(code)) {
effects.consume(code)
buffer += fromCharCode(code)
return tagName
}
return nok(code)
}
function basicSelfClosing(code) {
if (code === codes.greaterThan) {
effects.consume(code)
return self.interrupt ? ok : continuation
}
return nok(code)
}
function completeClosingTagAfter(code) {
if (markdownSpace(code)) {
effects.consume(code)
return completeClosingTagAfter
}
return completeEnd(code)
}
function completeAttributeNameBefore(code) {
if (code === codes.slash) {
effects.consume(code)
return completeEnd
}
if (code === codes.colon || code === codes.underscore || asciiAlpha(code)) {
effects.consume(code)
return completeAttributeName
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeNameBefore
}
return completeEnd(code)
}
function completeAttributeName(code) {
if (
code === codes.dash ||
code === codes.dot ||
code === codes.colon ||
code === codes.underscore ||
asciiAlphanumeric(code)
) {
effects.consume(code)
return completeAttributeName
}
return completeAttributeNameAfter(code)
}
function completeAttributeNameAfter(code) {
if (code === codes.equalsTo) {
effects.consume(code)
return completeAttributeValueBefore
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeNameAfter
}
return completeAttributeNameBefore(code)
}
function completeAttributeValueBefore(code) {
if (
code === codes.eof ||
code === codes.lessThan ||
code === codes.equalsTo ||
code === codes.greaterThan ||
code === codes.graveAccent
) {
return nok(code)
}
if (code === codes.quotationMark || code === codes.apostrophe) {
effects.consume(code)
marker = code
return completeAttributeValueQuoted
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeValueBefore
}
marker = undefined
return completeAttributeValueUnquoted(code)
}
function completeAttributeValueQuoted(code) {
if (code === marker) {
effects.consume(code)
return completeAttributeValueQuotedAfter
}
if (code === codes.eof || markdownLineEnding(code)) {
return nok(code)
}
effects.consume(code)
return completeAttributeValueQuoted
}
function completeAttributeValueUnquoted(code) {
if (
code === codes.eof ||
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.lessThan ||
code === codes.equalsTo ||
code === codes.greaterThan ||
code === codes.graveAccent ||
markdownLineEndingOrSpace(code)
) {
return completeAttributeNameAfter(code)
}
effects.consume(code)
return completeAttributeValueUnquoted
}
function completeAttributeValueQuotedAfter(code) {
if (
code === codes.slash ||
code === codes.greaterThan ||
markdownSpace(code)
) {
return completeAttributeNameBefore(code)
}
return nok(code)
}
function completeEnd(code) {
if (code === codes.greaterThan) {
effects.consume(code)
return completeAfter
}
return nok(code)
}
function completeAfter(code) {
if (markdownSpace(code)) {
effects.consume(code)
return completeAfter
}
return code === codes.eof || markdownLineEnding(code)
? continuation(code)
: nok(code)
}
function continuation(code) {
if (code === codes.dash && kind === constants.htmlComment) {
effects.consume(code)
return continuationCommentInside
}
if (code === codes.lessThan && kind === constants.htmlRaw) {
effects.consume(code)
return continuationRawTagOpen
}
if (code === codes.greaterThan && kind === constants.htmlDeclaration) {
effects.consume(code)
return continuationClose
}
if (code === codes.questionMark && kind === constants.htmlInstruction) {
effects.consume(code)
return continuationDeclarationInside
}
if (code === codes.rightSquareBracket && kind === constants.htmlCdata) {
effects.consume(code)
return continuationCharacterDataInside
}
if (
markdownLineEnding(code) &&
(kind === constants.htmlBasic || kind === constants.htmlComplete)
) {
return effects.check(
nextBlankConstruct,
continuationClose,
continuationAtLineEnding
)(code)
}
if (code === codes.eof || markdownLineEnding(code)) {
return continuationAtLineEnding(code)
}
effects.consume(code)
return continuation
}
function continuationAtLineEnding(code) {
effects.exit(types.htmlFlowData)
return htmlContinueStart(code)
}
function htmlContinueStart(code) {
if (code === codes.eof) {
return done(code)
}
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return htmlContinueStart
}
effects.enter(types.htmlFlowData)
return continuation(code)
}
function continuationCommentInside(code) {
if (code === codes.dash) {
effects.consume(code)
return continuationDeclarationInside
}
return continuation(code)
}
function continuationRawTagOpen(code) {
if (code === codes.slash) {
effects.consume(code)
buffer = ''
return continuationRawEndTag
}
return continuation(code)
}
function continuationRawEndTag(code) {
if (
code === codes.greaterThan &&
htmlRawNames.indexOf(buffer.toLowerCase()) > -1
) {
effects.consume(code)
return continuationClose
}
if (asciiAlpha(code) && buffer.length < constants.htmlRawSizeMax) {
effects.consume(code)
buffer += fromCharCode(code)
return continuationRawEndTag
}
return continuation(code)
}
function continuationCharacterDataInside(code) {
if (code === codes.rightSquareBracket) {
effects.consume(code)
return continuationDeclarationInside
}
return continuation(code)
}
function continuationDeclarationInside(code) {
if (code === codes.greaterThan) {
effects.consume(code)
return continuationClose
}
return continuation(code)
}
function continuationClose(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.htmlFlowData)
return done(code)
}
effects.consume(code)
return continuationClose
}
function done(code) {
effects.exit(types.htmlFlow)
return ok(code)
}
}
function tokenizeNextBlank(effects, ok, nok) {
return start
function start(code) {
assert__default['default'](
markdownLineEnding(code),
'expected a line ending'
)
effects.exit(types.htmlFlowData)
effects.enter(types.lineEndingBlank)
effects.consume(code)
effects.exit(types.lineEndingBlank)
return effects.attempt(partialBlankLine, ok, nok)
}
}
module.exports = htmlFlow

498
node_modules/micromark/lib/tokenize/html-flow.mjs generated vendored Normal file
View File

@@ -0,0 +1,498 @@
var htmlFlow = {
name: 'htmlFlow',
tokenize: tokenizeHtmlFlow,
resolveTo: resolveToHtmlFlow,
concrete: true
}
export default htmlFlow
import assert from 'assert'
import asciiAlpha from '../character/ascii-alpha.mjs'
import asciiAlphanumeric from '../character/ascii-alphanumeric.mjs'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs'
import markdownSpace from '../character/markdown-space.mjs'
import constants from '../constant/constants.mjs'
import fromCharCode from '../constant/from-char-code.mjs'
import basics from '../constant/html-block-names.mjs'
import raws from '../constant/html-raw-names.mjs'
import types from '../constant/types.mjs'
import blank from './partial-blank-line.mjs'
var nextBlankConstruct = {tokenize: tokenizeNextBlank, partial: true}
function resolveToHtmlFlow(events) {
var index = events.length
while (index--) {
if (
events[index][0] === 'enter' &&
events[index][1].type === types.htmlFlow
) {
break
}
}
if (index > 1 && events[index - 2][1].type === types.linePrefix) {
// Add the prefix start to the HTML token.
events[index][1].start = events[index - 2][1].start
// Add the prefix start to the HTML line token.
events[index + 1][1].start = events[index - 2][1].start
// Remove the line prefix.
events.splice(index - 2, 2)
}
return events
}
function tokenizeHtmlFlow(effects, ok, nok) {
var self = this
var kind
var startTag
var buffer
var index
var marker
return start
function start(code) {
assert(code === codes.lessThan, 'expected `<`')
effects.enter(types.htmlFlow)
effects.enter(types.htmlFlowData)
effects.consume(code)
return open
}
function open(code) {
if (code === codes.exclamationMark) {
effects.consume(code)
return declarationStart
}
if (code === codes.slash) {
effects.consume(code)
return tagCloseStart
}
if (code === codes.questionMark) {
effects.consume(code)
kind = constants.htmlInstruction
// While were in an instruction instead of a declaration, were on a `?`
// right now, so we do need to search for `>`, similar to declarations.
return self.interrupt ? ok : continuationDeclarationInside
}
if (asciiAlpha(code)) {
effects.consume(code)
buffer = fromCharCode(code)
startTag = true
return tagName
}
return nok(code)
}
function declarationStart(code) {
if (code === codes.dash) {
effects.consume(code)
kind = constants.htmlComment
return commentOpenInside
}
if (code === codes.leftSquareBracket) {
effects.consume(code)
kind = constants.htmlCdata
buffer = constants.cdataOpeningString
index = 0
return cdataOpenInside
}
if (asciiAlpha(code)) {
effects.consume(code)
kind = constants.htmlDeclaration
return self.interrupt ? ok : continuationDeclarationInside
}
return nok(code)
}
function commentOpenInside(code) {
if (code === codes.dash) {
effects.consume(code)
return self.interrupt ? ok : continuationDeclarationInside
}
return nok(code)
}
function cdataOpenInside(code) {
if (code === buffer.charCodeAt(index++)) {
effects.consume(code)
return index === buffer.length
? self.interrupt
? ok
: continuation
: cdataOpenInside
}
return nok(code)
}
function tagCloseStart(code) {
if (asciiAlpha(code)) {
effects.consume(code)
buffer = fromCharCode(code)
return tagName
}
return nok(code)
}
function tagName(code) {
if (
code === codes.eof ||
code === codes.slash ||
code === codes.greaterThan ||
markdownLineEndingOrSpace(code)
) {
if (
code !== codes.slash &&
startTag &&
raws.indexOf(buffer.toLowerCase()) > -1
) {
kind = constants.htmlRaw
return self.interrupt ? ok(code) : continuation(code)
}
if (basics.indexOf(buffer.toLowerCase()) > -1) {
kind = constants.htmlBasic
if (code === codes.slash) {
effects.consume(code)
return basicSelfClosing
}
return self.interrupt ? ok(code) : continuation(code)
}
kind = constants.htmlComplete
// Do not support complete HTML when interrupting.
return self.interrupt
? nok(code)
: startTag
? completeAttributeNameBefore(code)
: completeClosingTagAfter(code)
}
if (code === codes.dash || asciiAlphanumeric(code)) {
effects.consume(code)
buffer += fromCharCode(code)
return tagName
}
return nok(code)
}
function basicSelfClosing(code) {
if (code === codes.greaterThan) {
effects.consume(code)
return self.interrupt ? ok : continuation
}
return nok(code)
}
function completeClosingTagAfter(code) {
if (markdownSpace(code)) {
effects.consume(code)
return completeClosingTagAfter
}
return completeEnd(code)
}
function completeAttributeNameBefore(code) {
if (code === codes.slash) {
effects.consume(code)
return completeEnd
}
if (code === codes.colon || code === codes.underscore || asciiAlpha(code)) {
effects.consume(code)
return completeAttributeName
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeNameBefore
}
return completeEnd(code)
}
function completeAttributeName(code) {
if (
code === codes.dash ||
code === codes.dot ||
code === codes.colon ||
code === codes.underscore ||
asciiAlphanumeric(code)
) {
effects.consume(code)
return completeAttributeName
}
return completeAttributeNameAfter(code)
}
function completeAttributeNameAfter(code) {
if (code === codes.equalsTo) {
effects.consume(code)
return completeAttributeValueBefore
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeNameAfter
}
return completeAttributeNameBefore(code)
}
function completeAttributeValueBefore(code) {
if (
code === codes.eof ||
code === codes.lessThan ||
code === codes.equalsTo ||
code === codes.greaterThan ||
code === codes.graveAccent
) {
return nok(code)
}
if (code === codes.quotationMark || code === codes.apostrophe) {
effects.consume(code)
marker = code
return completeAttributeValueQuoted
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeValueBefore
}
marker = undefined
return completeAttributeValueUnquoted(code)
}
function completeAttributeValueQuoted(code) {
if (code === marker) {
effects.consume(code)
return completeAttributeValueQuotedAfter
}
if (code === codes.eof || markdownLineEnding(code)) {
return nok(code)
}
effects.consume(code)
return completeAttributeValueQuoted
}
function completeAttributeValueUnquoted(code) {
if (
code === codes.eof ||
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.lessThan ||
code === codes.equalsTo ||
code === codes.greaterThan ||
code === codes.graveAccent ||
markdownLineEndingOrSpace(code)
) {
return completeAttributeNameAfter(code)
}
effects.consume(code)
return completeAttributeValueUnquoted
}
function completeAttributeValueQuotedAfter(code) {
if (
code === codes.slash ||
code === codes.greaterThan ||
markdownSpace(code)
) {
return completeAttributeNameBefore(code)
}
return nok(code)
}
function completeEnd(code) {
if (code === codes.greaterThan) {
effects.consume(code)
return completeAfter
}
return nok(code)
}
function completeAfter(code) {
if (markdownSpace(code)) {
effects.consume(code)
return completeAfter
}
return code === codes.eof || markdownLineEnding(code)
? continuation(code)
: nok(code)
}
function continuation(code) {
if (code === codes.dash && kind === constants.htmlComment) {
effects.consume(code)
return continuationCommentInside
}
if (code === codes.lessThan && kind === constants.htmlRaw) {
effects.consume(code)
return continuationRawTagOpen
}
if (code === codes.greaterThan && kind === constants.htmlDeclaration) {
effects.consume(code)
return continuationClose
}
if (code === codes.questionMark && kind === constants.htmlInstruction) {
effects.consume(code)
return continuationDeclarationInside
}
if (code === codes.rightSquareBracket && kind === constants.htmlCdata) {
effects.consume(code)
return continuationCharacterDataInside
}
if (
markdownLineEnding(code) &&
(kind === constants.htmlBasic || kind === constants.htmlComplete)
) {
return effects.check(
nextBlankConstruct,
continuationClose,
continuationAtLineEnding
)(code)
}
if (code === codes.eof || markdownLineEnding(code)) {
return continuationAtLineEnding(code)
}
effects.consume(code)
return continuation
}
function continuationAtLineEnding(code) {
effects.exit(types.htmlFlowData)
return htmlContinueStart(code)
}
function htmlContinueStart(code) {
if (code === codes.eof) {
return done(code)
}
if (markdownLineEnding(code)) {
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return htmlContinueStart
}
effects.enter(types.htmlFlowData)
return continuation(code)
}
function continuationCommentInside(code) {
if (code === codes.dash) {
effects.consume(code)
return continuationDeclarationInside
}
return continuation(code)
}
function continuationRawTagOpen(code) {
if (code === codes.slash) {
effects.consume(code)
buffer = ''
return continuationRawEndTag
}
return continuation(code)
}
function continuationRawEndTag(code) {
if (code === codes.greaterThan && raws.indexOf(buffer.toLowerCase()) > -1) {
effects.consume(code)
return continuationClose
}
if (asciiAlpha(code) && buffer.length < constants.htmlRawSizeMax) {
effects.consume(code)
buffer += fromCharCode(code)
return continuationRawEndTag
}
return continuation(code)
}
function continuationCharacterDataInside(code) {
if (code === codes.rightSquareBracket) {
effects.consume(code)
return continuationDeclarationInside
}
return continuation(code)
}
function continuationDeclarationInside(code) {
if (code === codes.greaterThan) {
effects.consume(code)
return continuationClose
}
return continuation(code)
}
function continuationClose(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.htmlFlowData)
return done(code)
}
effects.consume(code)
return continuationClose
}
function done(code) {
effects.exit(types.htmlFlow)
return ok(code)
}
}
function tokenizeNextBlank(effects, ok, nok) {
return start
function start(code) {
assert(markdownLineEnding(code), 'expected a line ending')
effects.exit(types.htmlFlowData)
effects.enter(types.lineEndingBlank)
effects.consume(code)
effects.exit(types.lineEndingBlank)
return effects.attempt(blank, ok, nok)
}
}

458
node_modules/micromark/lib/tokenize/html-text.js generated vendored Normal file
View File

@@ -0,0 +1,458 @@
'use strict'
var assert = require('assert')
var asciiAlpha = require('../character/ascii-alpha.js')
var asciiAlphanumeric = require('../character/ascii-alphanumeric.js')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
var markdownSpace = require('../character/markdown-space.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var factorySpace = require('./factory-space.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var htmlText = {
name: 'htmlText',
tokenize: tokenizeHtmlText
}
function tokenizeHtmlText(effects, ok, nok) {
var self = this
var marker
var buffer
var index
var returnState
return start
function start(code) {
assert__default['default'](code === codes.lessThan, 'expected `<`')
effects.enter(types.htmlText)
effects.enter(types.htmlTextData)
effects.consume(code)
return open
}
function open(code) {
if (code === codes.exclamationMark) {
effects.consume(code)
return declarationOpen
}
if (code === codes.slash) {
effects.consume(code)
return tagCloseStart
}
if (code === codes.questionMark) {
effects.consume(code)
return instruction
}
if (asciiAlpha(code)) {
effects.consume(code)
return tagOpen
}
return nok(code)
}
function declarationOpen(code) {
if (code === codes.dash) {
effects.consume(code)
return commentOpen
}
if (code === codes.leftSquareBracket) {
effects.consume(code)
buffer = constants.cdataOpeningString
index = 0
return cdataOpen
}
if (asciiAlpha(code)) {
effects.consume(code)
return declaration
}
return nok(code)
}
function commentOpen(code) {
if (code === codes.dash) {
effects.consume(code)
return commentStart
}
return nok(code)
}
function commentStart(code) {
if (code === codes.eof || code === codes.greaterThan) {
return nok(code)
}
if (code === codes.dash) {
effects.consume(code)
return commentStartDash
}
return comment(code)
}
function commentStartDash(code) {
if (code === codes.eof || code === codes.greaterThan) {
return nok(code)
}
return comment(code)
}
function comment(code) {
if (code === codes.eof) {
return nok(code)
}
if (code === codes.dash) {
effects.consume(code)
return commentClose
}
if (markdownLineEnding(code)) {
returnState = comment
return atLineEnding(code)
}
effects.consume(code)
return comment
}
function commentClose(code) {
if (code === codes.dash) {
effects.consume(code)
return end
}
return comment(code)
}
function cdataOpen(code) {
if (code === buffer.charCodeAt(index++)) {
effects.consume(code)
return index === buffer.length ? cdata : cdataOpen
}
return nok(code)
}
function cdata(code) {
if (code === codes.eof) {
return nok(code)
}
if (code === codes.rightSquareBracket) {
effects.consume(code)
return cdataClose
}
if (markdownLineEnding(code)) {
returnState = cdata
return atLineEnding(code)
}
effects.consume(code)
return cdata
}
function cdataClose(code) {
if (code === codes.rightSquareBracket) {
effects.consume(code)
return cdataEnd
}
return cdata(code)
}
function cdataEnd(code) {
if (code === codes.greaterThan) {
return end(code)
}
if (code === codes.rightSquareBracket) {
effects.consume(code)
return cdataEnd
}
return cdata(code)
}
function declaration(code) {
if (code === codes.eof || code === codes.greaterThan) {
return end(code)
}
if (markdownLineEnding(code)) {
returnState = declaration
return atLineEnding(code)
}
effects.consume(code)
return declaration
}
function instruction(code) {
if (code === codes.eof) {
return nok(code)
}
if (code === codes.questionMark) {
effects.consume(code)
return instructionClose
}
if (markdownLineEnding(code)) {
returnState = instruction
return atLineEnding(code)
}
effects.consume(code)
return instruction
}
function instructionClose(code) {
return code === codes.greaterThan ? end(code) : instruction(code)
}
function tagCloseStart(code) {
if (asciiAlpha(code)) {
effects.consume(code)
return tagClose
}
return nok(code)
}
function tagClose(code) {
if (code === codes.dash || asciiAlphanumeric(code)) {
effects.consume(code)
return tagClose
}
return tagCloseBetween(code)
}
function tagCloseBetween(code) {
if (markdownLineEnding(code)) {
returnState = tagCloseBetween
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagCloseBetween
}
return end(code)
}
function tagOpen(code) {
if (code === codes.dash || asciiAlphanumeric(code)) {
effects.consume(code)
return tagOpen
}
if (
code === codes.slash ||
code === codes.greaterThan ||
markdownLineEndingOrSpace(code)
) {
return tagOpenBetween(code)
}
return nok(code)
}
function tagOpenBetween(code) {
if (code === codes.slash) {
effects.consume(code)
return end
}
if (code === codes.colon || code === codes.underscore || asciiAlpha(code)) {
effects.consume(code)
return tagOpenAttributeName
}
if (markdownLineEnding(code)) {
returnState = tagOpenBetween
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenBetween
}
return end(code)
}
function tagOpenAttributeName(code) {
if (
code === codes.dash ||
code === codes.dot ||
code === codes.colon ||
code === codes.underscore ||
asciiAlphanumeric(code)
) {
effects.consume(code)
return tagOpenAttributeName
}
return tagOpenAttributeNameAfter(code)
}
function tagOpenAttributeNameAfter(code) {
if (code === codes.equalsTo) {
effects.consume(code)
return tagOpenAttributeValueBefore
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeNameAfter
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenAttributeNameAfter
}
return tagOpenBetween(code)
}
function tagOpenAttributeValueBefore(code) {
if (
code === codes.eof ||
code === codes.lessThan ||
code === codes.equalsTo ||
code === codes.greaterThan ||
code === codes.graveAccent
) {
return nok(code)
}
if (code === codes.quotationMark || code === codes.apostrophe) {
effects.consume(code)
marker = code
return tagOpenAttributeValueQuoted
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeValueBefore
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenAttributeValueBefore
}
effects.consume(code)
marker = undefined
return tagOpenAttributeValueUnquoted
}
function tagOpenAttributeValueQuoted(code) {
if (code === marker) {
effects.consume(code)
return tagOpenAttributeValueQuotedAfter
}
if (code === codes.eof) {
return nok(code)
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeValueQuoted
return atLineEnding(code)
}
effects.consume(code)
return tagOpenAttributeValueQuoted
}
function tagOpenAttributeValueQuotedAfter(code) {
if (
code === codes.greaterThan ||
code === codes.slash ||
markdownLineEndingOrSpace(code)
) {
return tagOpenBetween(code)
}
return nok(code)
}
function tagOpenAttributeValueUnquoted(code) {
if (
code === codes.eof ||
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.lessThan ||
code === codes.equalsTo ||
code === codes.graveAccent
) {
return nok(code)
}
if (code === codes.greaterThan || markdownLineEndingOrSpace(code)) {
return tagOpenBetween(code)
}
effects.consume(code)
return tagOpenAttributeValueUnquoted
}
// We cant have blank lines in content, so no need to worry about empty
// tokens.
function atLineEnding(code) {
assert__default['default'](returnState, 'expected return state')
assert__default['default'](markdownLineEnding(code), 'expected eol')
effects.exit(types.htmlTextData)
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return factorySpace(
effects,
afterPrefix,
types.linePrefix,
self.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize
)
}
function afterPrefix(code) {
effects.enter(types.htmlTextData)
return returnState(code)
}
function end(code) {
if (code === codes.greaterThan) {
effects.consume(code)
effects.exit(types.htmlTextData)
effects.exit(types.htmlText)
return ok
}
return nok(code)
}
}
module.exports = htmlText

449
node_modules/micromark/lib/tokenize/html-text.mjs generated vendored Normal file
View File

@@ -0,0 +1,449 @@
var htmlText = {
name: 'htmlText',
tokenize: tokenizeHtmlText
}
export default htmlText
import assert from 'assert'
import asciiAlpha from '../character/ascii-alpha.mjs'
import asciiAlphanumeric from '../character/ascii-alphanumeric.mjs'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs'
import markdownSpace from '../character/markdown-space.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
import spaceFactory from './factory-space.mjs'
function tokenizeHtmlText(effects, ok, nok) {
var self = this
var marker
var buffer
var index
var returnState
return start
function start(code) {
assert(code === codes.lessThan, 'expected `<`')
effects.enter(types.htmlText)
effects.enter(types.htmlTextData)
effects.consume(code)
return open
}
function open(code) {
if (code === codes.exclamationMark) {
effects.consume(code)
return declarationOpen
}
if (code === codes.slash) {
effects.consume(code)
return tagCloseStart
}
if (code === codes.questionMark) {
effects.consume(code)
return instruction
}
if (asciiAlpha(code)) {
effects.consume(code)
return tagOpen
}
return nok(code)
}
function declarationOpen(code) {
if (code === codes.dash) {
effects.consume(code)
return commentOpen
}
if (code === codes.leftSquareBracket) {
effects.consume(code)
buffer = constants.cdataOpeningString
index = 0
return cdataOpen
}
if (asciiAlpha(code)) {
effects.consume(code)
return declaration
}
return nok(code)
}
function commentOpen(code) {
if (code === codes.dash) {
effects.consume(code)
return commentStart
}
return nok(code)
}
function commentStart(code) {
if (code === codes.eof || code === codes.greaterThan) {
return nok(code)
}
if (code === codes.dash) {
effects.consume(code)
return commentStartDash
}
return comment(code)
}
function commentStartDash(code) {
if (code === codes.eof || code === codes.greaterThan) {
return nok(code)
}
return comment(code)
}
function comment(code) {
if (code === codes.eof) {
return nok(code)
}
if (code === codes.dash) {
effects.consume(code)
return commentClose
}
if (markdownLineEnding(code)) {
returnState = comment
return atLineEnding(code)
}
effects.consume(code)
return comment
}
function commentClose(code) {
if (code === codes.dash) {
effects.consume(code)
return end
}
return comment(code)
}
function cdataOpen(code) {
if (code === buffer.charCodeAt(index++)) {
effects.consume(code)
return index === buffer.length ? cdata : cdataOpen
}
return nok(code)
}
function cdata(code) {
if (code === codes.eof) {
return nok(code)
}
if (code === codes.rightSquareBracket) {
effects.consume(code)
return cdataClose
}
if (markdownLineEnding(code)) {
returnState = cdata
return atLineEnding(code)
}
effects.consume(code)
return cdata
}
function cdataClose(code) {
if (code === codes.rightSquareBracket) {
effects.consume(code)
return cdataEnd
}
return cdata(code)
}
function cdataEnd(code) {
if (code === codes.greaterThan) {
return end(code)
}
if (code === codes.rightSquareBracket) {
effects.consume(code)
return cdataEnd
}
return cdata(code)
}
function declaration(code) {
if (code === codes.eof || code === codes.greaterThan) {
return end(code)
}
if (markdownLineEnding(code)) {
returnState = declaration
return atLineEnding(code)
}
effects.consume(code)
return declaration
}
function instruction(code) {
if (code === codes.eof) {
return nok(code)
}
if (code === codes.questionMark) {
effects.consume(code)
return instructionClose
}
if (markdownLineEnding(code)) {
returnState = instruction
return atLineEnding(code)
}
effects.consume(code)
return instruction
}
function instructionClose(code) {
return code === codes.greaterThan ? end(code) : instruction(code)
}
function tagCloseStart(code) {
if (asciiAlpha(code)) {
effects.consume(code)
return tagClose
}
return nok(code)
}
function tagClose(code) {
if (code === codes.dash || asciiAlphanumeric(code)) {
effects.consume(code)
return tagClose
}
return tagCloseBetween(code)
}
function tagCloseBetween(code) {
if (markdownLineEnding(code)) {
returnState = tagCloseBetween
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagCloseBetween
}
return end(code)
}
function tagOpen(code) {
if (code === codes.dash || asciiAlphanumeric(code)) {
effects.consume(code)
return tagOpen
}
if (
code === codes.slash ||
code === codes.greaterThan ||
markdownLineEndingOrSpace(code)
) {
return tagOpenBetween(code)
}
return nok(code)
}
function tagOpenBetween(code) {
if (code === codes.slash) {
effects.consume(code)
return end
}
if (code === codes.colon || code === codes.underscore || asciiAlpha(code)) {
effects.consume(code)
return tagOpenAttributeName
}
if (markdownLineEnding(code)) {
returnState = tagOpenBetween
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenBetween
}
return end(code)
}
function tagOpenAttributeName(code) {
if (
code === codes.dash ||
code === codes.dot ||
code === codes.colon ||
code === codes.underscore ||
asciiAlphanumeric(code)
) {
effects.consume(code)
return tagOpenAttributeName
}
return tagOpenAttributeNameAfter(code)
}
function tagOpenAttributeNameAfter(code) {
if (code === codes.equalsTo) {
effects.consume(code)
return tagOpenAttributeValueBefore
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeNameAfter
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenAttributeNameAfter
}
return tagOpenBetween(code)
}
function tagOpenAttributeValueBefore(code) {
if (
code === codes.eof ||
code === codes.lessThan ||
code === codes.equalsTo ||
code === codes.greaterThan ||
code === codes.graveAccent
) {
return nok(code)
}
if (code === codes.quotationMark || code === codes.apostrophe) {
effects.consume(code)
marker = code
return tagOpenAttributeValueQuoted
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeValueBefore
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenAttributeValueBefore
}
effects.consume(code)
marker = undefined
return tagOpenAttributeValueUnquoted
}
function tagOpenAttributeValueQuoted(code) {
if (code === marker) {
effects.consume(code)
return tagOpenAttributeValueQuotedAfter
}
if (code === codes.eof) {
return nok(code)
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeValueQuoted
return atLineEnding(code)
}
effects.consume(code)
return tagOpenAttributeValueQuoted
}
function tagOpenAttributeValueQuotedAfter(code) {
if (
code === codes.greaterThan ||
code === codes.slash ||
markdownLineEndingOrSpace(code)
) {
return tagOpenBetween(code)
}
return nok(code)
}
function tagOpenAttributeValueUnquoted(code) {
if (
code === codes.eof ||
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.lessThan ||
code === codes.equalsTo ||
code === codes.graveAccent
) {
return nok(code)
}
if (code === codes.greaterThan || markdownLineEndingOrSpace(code)) {
return tagOpenBetween(code)
}
effects.consume(code)
return tagOpenAttributeValueUnquoted
}
// We cant have blank lines in content, so no need to worry about empty
// tokens.
function atLineEnding(code) {
assert(returnState, 'expected return state')
assert(markdownLineEnding(code), 'expected eol')
effects.exit(types.htmlTextData)
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return spaceFactory(
effects,
afterPrefix,
types.linePrefix,
self.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize
)
}
function afterPrefix(code) {
effects.enter(types.htmlTextData)
return returnState(code)
}
function end(code) {
if (code === codes.greaterThan) {
effects.consume(code)
effects.exit(types.htmlTextData)
effects.exit(types.htmlText)
return ok
}
return nok(code)
}
}

374
node_modules/micromark/lib/tokenize/label-end.js generated vendored Normal file
View File

@@ -0,0 +1,374 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEndingOrSpace = require('../character/markdown-line-ending-or-space.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var chunkedPush = require('../util/chunked-push.js')
var chunkedSplice = require('../util/chunked-splice.js')
var normalizeIdentifier = require('../util/normalize-identifier.js')
var resolveAll = require('../util/resolve-all.js')
var shallow = require('../util/shallow.js')
var factoryDestination = require('./factory-destination.js')
var factoryLabel = require('./factory-label.js')
var factoryTitle = require('./factory-title.js')
var factoryWhitespace = require('./factory-whitespace.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var labelEnd = {
name: 'labelEnd',
tokenize: tokenizeLabelEnd,
resolveTo: resolveToLabelEnd,
resolveAll: resolveAllLabelEnd
}
var resourceConstruct = {tokenize: tokenizeResource}
var fullReferenceConstruct = {tokenize: tokenizeFullReference}
var collapsedReferenceConstruct = {tokenize: tokenizeCollapsedReference}
function resolveAllLabelEnd(events) {
var index = -1
var token
while (++index < events.length) {
token = events[index][1]
if (
!token._used &&
(token.type === types.labelImage ||
token.type === types.labelLink ||
token.type === types.labelEnd)
) {
// Remove the marker.
events.splice(index + 1, token.type === types.labelImage ? 4 : 2)
token.type = types.data
index++
}
}
return events
}
function resolveToLabelEnd(events, context) {
var index = events.length
var offset = 0
var group
var label
var text
var token
var open
var close
var media
// Find an opening.
while (index--) {
token = events[index][1]
if (open) {
// If we see another link, or inactive link label, weve been here before.
if (
token.type === types.link ||
(token.type === types.labelLink && token._inactive)
) {
break
}
// Mark other link openings as inactive, as we cant have links in
// links.
if (events[index][0] === 'enter' && token.type === types.labelLink) {
token._inactive = true
}
} else if (close) {
if (
events[index][0] === 'enter' &&
(token.type === types.labelImage || token.type === types.labelLink) &&
!token._balanced
) {
open = index
if (token.type !== types.labelLink) {
offset = 2
break
}
}
} else if (token.type === types.labelEnd) {
close = index
}
}
group = {
type: events[open][1].type === types.labelLink ? types.link : types.image,
start: shallow(events[open][1].start),
end: shallow(events[events.length - 1][1].end)
}
label = {
type: types.label,
start: shallow(events[open][1].start),
end: shallow(events[close][1].end)
}
text = {
type: types.labelText,
start: shallow(events[open + offset + 2][1].end),
end: shallow(events[close - 2][1].start)
}
media = [
['enter', group, context],
['enter', label, context]
]
// Opening marker.
media = chunkedPush(media, events.slice(open + 1, open + offset + 3))
// Text open.
media = chunkedPush(media, [['enter', text, context]])
// Between.
media = chunkedPush(
media,
resolveAll(
context.parser.constructs.insideSpan.null,
events.slice(open + offset + 4, close - 3),
context
)
)
// Text close, marker close, label close.
media = chunkedPush(media, [
['exit', text, context],
events[close - 2],
events[close - 1],
['exit', label, context]
])
// Reference, resource, or so.
media = chunkedPush(media, events.slice(close + 1))
// Media close.
media = chunkedPush(media, [['exit', group, context]])
chunkedSplice(events, open, events.length, media)
return events
}
function tokenizeLabelEnd(effects, ok, nok) {
var self = this
var index = self.events.length
var labelStart
var defined
// Find an opening.
while (index--) {
if (
(self.events[index][1].type === types.labelImage ||
self.events[index][1].type === types.labelLink) &&
!self.events[index][1]._balanced
) {
labelStart = self.events[index][1]
break
}
}
return start
function start(code) {
assert__default['default'](
code === codes.rightSquareBracket,
'expected `]`'
)
if (!labelStart) {
return nok(code)
}
// Its a balanced bracket, but contains a link.
if (labelStart._inactive) return balanced(code)
defined =
self.parser.defined.indexOf(
normalizeIdentifier(
self.sliceSerialize({start: labelStart.end, end: self.now()})
)
) > -1
effects.enter(types.labelEnd)
effects.enter(types.labelMarker)
effects.consume(code)
effects.exit(types.labelMarker)
effects.exit(types.labelEnd)
return afterLabelEnd
}
function afterLabelEnd(code) {
// Resource: `[asd](fgh)`.
if (code === codes.leftParenthesis) {
return effects.attempt(
resourceConstruct,
ok,
defined ? ok : balanced
)(code)
}
// Collapsed (`[asd][]`) or full (`[asd][fgh]`) reference?
if (code === codes.leftSquareBracket) {
return effects.attempt(
fullReferenceConstruct,
ok,
defined
? effects.attempt(collapsedReferenceConstruct, ok, balanced)
: balanced
)(code)
}
// Shortcut reference: `[asd]`?
return defined ? ok(code) : balanced(code)
}
function balanced(code) {
labelStart._balanced = true
return nok(code)
}
}
function tokenizeResource(effects, ok, nok) {
return start
function start(code) {
assert__default['default'].equal(
code,
codes.leftParenthesis,
'expected left paren'
)
effects.enter(types.resource)
effects.enter(types.resourceMarker)
effects.consume(code)
effects.exit(types.resourceMarker)
return factoryWhitespace(effects, open)
}
function open(code) {
if (code === codes.rightParenthesis) {
return end(code)
}
return factoryDestination(
effects,
destinationAfter,
nok,
types.resourceDestination,
types.resourceDestinationLiteral,
types.resourceDestinationLiteralMarker,
types.resourceDestinationRaw,
types.resourceDestinationString,
constants.linkResourceDestinationBalanceMax
)(code)
}
function destinationAfter(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, between)(code)
: end(code)
}
function between(code) {
if (
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.leftParenthesis
) {
return factoryTitle(
effects,
factoryWhitespace(effects, end),
nok,
types.resourceTitle,
types.resourceTitleMarker,
types.resourceTitleString
)(code)
}
return end(code)
}
function end(code) {
if (code === codes.rightParenthesis) {
effects.enter(types.resourceMarker)
effects.consume(code)
effects.exit(types.resourceMarker)
effects.exit(types.resource)
return ok
}
return nok(code)
}
}
function tokenizeFullReference(effects, ok, nok) {
var self = this
return start
function start(code) {
assert__default['default'].equal(
code,
codes.leftSquareBracket,
'expected left bracket'
)
return factoryLabel.call(
self,
effects,
afterLabel,
nok,
types.reference,
types.referenceMarker,
types.referenceString
)(code)
}
function afterLabel(code) {
return self.parser.defined.indexOf(
normalizeIdentifier(
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
)
) < 0
? nok(code)
: ok(code)
}
}
function tokenizeCollapsedReference(effects, ok, nok) {
return start
function start(code) {
assert__default['default'].equal(
code,
codes.leftSquareBracket,
'expected left bracket'
)
effects.enter(types.reference)
effects.enter(types.referenceMarker)
effects.consume(code)
effects.exit(types.referenceMarker)
return open
}
function open(code) {
if (code === codes.rightSquareBracket) {
effects.enter(types.referenceMarker)
effects.consume(code)
effects.exit(types.referenceMarker)
effects.exit(types.reference)
return ok
}
return nok(code)
}
}
module.exports = labelEnd

350
node_modules/micromark/lib/tokenize/label-end.mjs generated vendored Normal file
View File

@@ -0,0 +1,350 @@
var labelEnd = {
name: 'labelEnd',
tokenize: tokenizeLabelEnd,
resolveTo: resolveToLabelEnd,
resolveAll: resolveAllLabelEnd
}
export default labelEnd
import assert from 'assert'
import codes from '../character/codes.mjs'
import markdownLineEndingOrSpace from '../character/markdown-line-ending-or-space.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
import chunkedPush from '../util/chunked-push.mjs'
import chunkedSplice from '../util/chunked-splice.mjs'
import normalizeIdentifier from '../util/normalize-identifier.mjs'
import resolveAll from '../util/resolve-all.mjs'
import shallow from '../util/shallow.mjs'
import destinationFactory from './factory-destination.mjs'
import labelFactory from './factory-label.mjs'
import titleFactory from './factory-title.mjs'
import whitespaceFactory from './factory-whitespace.mjs'
var resourceConstruct = {tokenize: tokenizeResource}
var fullReferenceConstruct = {tokenize: tokenizeFullReference}
var collapsedReferenceConstruct = {tokenize: tokenizeCollapsedReference}
function resolveAllLabelEnd(events) {
var index = -1
var token
while (++index < events.length) {
token = events[index][1]
if (
!token._used &&
(token.type === types.labelImage ||
token.type === types.labelLink ||
token.type === types.labelEnd)
) {
// Remove the marker.
events.splice(index + 1, token.type === types.labelImage ? 4 : 2)
token.type = types.data
index++
}
}
return events
}
function resolveToLabelEnd(events, context) {
var index = events.length
var offset = 0
var group
var label
var text
var token
var open
var close
var media
// Find an opening.
while (index--) {
token = events[index][1]
if (open) {
// If we see another link, or inactive link label, weve been here before.
if (
token.type === types.link ||
(token.type === types.labelLink && token._inactive)
) {
break
}
// Mark other link openings as inactive, as we cant have links in
// links.
if (events[index][0] === 'enter' && token.type === types.labelLink) {
token._inactive = true
}
} else if (close) {
if (
events[index][0] === 'enter' &&
(token.type === types.labelImage || token.type === types.labelLink) &&
!token._balanced
) {
open = index
if (token.type !== types.labelLink) {
offset = 2
break
}
}
} else if (token.type === types.labelEnd) {
close = index
}
}
group = {
type: events[open][1].type === types.labelLink ? types.link : types.image,
start: shallow(events[open][1].start),
end: shallow(events[events.length - 1][1].end)
}
label = {
type: types.label,
start: shallow(events[open][1].start),
end: shallow(events[close][1].end)
}
text = {
type: types.labelText,
start: shallow(events[open + offset + 2][1].end),
end: shallow(events[close - 2][1].start)
}
media = [
['enter', group, context],
['enter', label, context]
]
// Opening marker.
media = chunkedPush(media, events.slice(open + 1, open + offset + 3))
// Text open.
media = chunkedPush(media, [['enter', text, context]])
// Between.
media = chunkedPush(
media,
resolveAll(
context.parser.constructs.insideSpan.null,
events.slice(open + offset + 4, close - 3),
context
)
)
// Text close, marker close, label close.
media = chunkedPush(media, [
['exit', text, context],
events[close - 2],
events[close - 1],
['exit', label, context]
])
// Reference, resource, or so.
media = chunkedPush(media, events.slice(close + 1))
// Media close.
media = chunkedPush(media, [['exit', group, context]])
chunkedSplice(events, open, events.length, media)
return events
}
function tokenizeLabelEnd(effects, ok, nok) {
var self = this
var index = self.events.length
var labelStart
var defined
// Find an opening.
while (index--) {
if (
(self.events[index][1].type === types.labelImage ||
self.events[index][1].type === types.labelLink) &&
!self.events[index][1]._balanced
) {
labelStart = self.events[index][1]
break
}
}
return start
function start(code) {
assert(code === codes.rightSquareBracket, 'expected `]`')
if (!labelStart) {
return nok(code)
}
// Its a balanced bracket, but contains a link.
if (labelStart._inactive) return balanced(code)
defined =
self.parser.defined.indexOf(
normalizeIdentifier(
self.sliceSerialize({start: labelStart.end, end: self.now()})
)
) > -1
effects.enter(types.labelEnd)
effects.enter(types.labelMarker)
effects.consume(code)
effects.exit(types.labelMarker)
effects.exit(types.labelEnd)
return afterLabelEnd
}
function afterLabelEnd(code) {
// Resource: `[asd](fgh)`.
if (code === codes.leftParenthesis) {
return effects.attempt(
resourceConstruct,
ok,
defined ? ok : balanced
)(code)
}
// Collapsed (`[asd][]`) or full (`[asd][fgh]`) reference?
if (code === codes.leftSquareBracket) {
return effects.attempt(
fullReferenceConstruct,
ok,
defined
? effects.attempt(collapsedReferenceConstruct, ok, balanced)
: balanced
)(code)
}
// Shortcut reference: `[asd]`?
return defined ? ok(code) : balanced(code)
}
function balanced(code) {
labelStart._balanced = true
return nok(code)
}
}
function tokenizeResource(effects, ok, nok) {
return start
function start(code) {
assert.equal(code, codes.leftParenthesis, 'expected left paren')
effects.enter(types.resource)
effects.enter(types.resourceMarker)
effects.consume(code)
effects.exit(types.resourceMarker)
return whitespaceFactory(effects, open)
}
function open(code) {
if (code === codes.rightParenthesis) {
return end(code)
}
return destinationFactory(
effects,
destinationAfter,
nok,
types.resourceDestination,
types.resourceDestinationLiteral,
types.resourceDestinationLiteralMarker,
types.resourceDestinationRaw,
types.resourceDestinationString,
constants.linkResourceDestinationBalanceMax
)(code)
}
function destinationAfter(code) {
return markdownLineEndingOrSpace(code)
? whitespaceFactory(effects, between)(code)
: end(code)
}
function between(code) {
if (
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.leftParenthesis
) {
return titleFactory(
effects,
whitespaceFactory(effects, end),
nok,
types.resourceTitle,
types.resourceTitleMarker,
types.resourceTitleString
)(code)
}
return end(code)
}
function end(code) {
if (code === codes.rightParenthesis) {
effects.enter(types.resourceMarker)
effects.consume(code)
effects.exit(types.resourceMarker)
effects.exit(types.resource)
return ok
}
return nok(code)
}
}
function tokenizeFullReference(effects, ok, nok) {
var self = this
return start
function start(code) {
assert.equal(code, codes.leftSquareBracket, 'expected left bracket')
return labelFactory.call(
self,
effects,
afterLabel,
nok,
types.reference,
types.referenceMarker,
types.referenceString
)(code)
}
function afterLabel(code) {
return self.parser.defined.indexOf(
normalizeIdentifier(
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
)
) < 0
? nok(code)
: ok(code)
}
}
function tokenizeCollapsedReference(effects, ok, nok) {
return start
function start(code) {
assert.equal(code, codes.leftSquareBracket, 'expected left bracket')
effects.enter(types.reference)
effects.enter(types.referenceMarker)
effects.consume(code)
effects.exit(types.referenceMarker)
return open
}
function open(code) {
if (code === codes.rightSquareBracket) {
effects.enter(types.referenceMarker)
effects.consume(code)
effects.exit(types.referenceMarker)
effects.exit(types.reference)
return ok
}
return nok(code)
}
}

View File

@@ -0,0 +1,56 @@
'use strict'
var labelEnd = require('./label-end.js')
var assert = require('assert')
var codes = require('../character/codes.js')
var types = require('../constant/types.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var labelStartImage = {
name: 'labelStartImage',
tokenize: tokenizeLabelStartImage,
resolveAll: labelEnd.resolveAll
}
function tokenizeLabelStartImage(effects, ok, nok) {
var self = this
return start
function start(code) {
assert__default['default'](code === codes.exclamationMark, 'expected `!`')
effects.enter(types.labelImage)
effects.enter(types.labelImageMarker)
effects.consume(code)
effects.exit(types.labelImageMarker)
return open
}
function open(code) {
if (code === codes.leftSquareBracket) {
effects.enter(types.labelMarker)
effects.consume(code)
effects.exit(types.labelMarker)
effects.exit(types.labelImage)
return after
}
return nok(code)
}
function after(code) {
/* c8 ignore next */
return code === codes.caret &&
/* c8 ignore next */
'_hiddenFootnoteSupport' in self.parser.constructs
? /* c8 ignore next */ nok(code)
: ok(code)
}
}
module.exports = labelStartImage

View File

@@ -0,0 +1,48 @@
import labelEnd from './label-end.mjs'
var labelStartImage = {
name: 'labelStartImage',
tokenize: tokenizeLabelStartImage,
resolveAll: labelEnd.resolveAll
}
export default labelStartImage
import assert from 'assert'
import codes from '../character/codes.mjs'
import types from '../constant/types.mjs'
function tokenizeLabelStartImage(effects, ok, nok) {
var self = this
return start
function start(code) {
assert(code === codes.exclamationMark, 'expected `!`')
effects.enter(types.labelImage)
effects.enter(types.labelImageMarker)
effects.consume(code)
effects.exit(types.labelImageMarker)
return open
}
function open(code) {
if (code === codes.leftSquareBracket) {
effects.enter(types.labelMarker)
effects.consume(code)
effects.exit(types.labelMarker)
effects.exit(types.labelImage)
return after
}
return nok(code)
}
function after(code) {
/* c8 ignore next */
return code === codes.caret &&
/* c8 ignore next */
'_hiddenFootnoteSupport' in self.parser.constructs
? /* c8 ignore next */ nok(code)
: ok(code)
}
}

View File

@@ -0,0 +1,46 @@
'use strict'
var labelEnd = require('./label-end.js')
var assert = require('assert')
var codes = require('../character/codes.js')
var types = require('../constant/types.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var labelStartLink = {
name: 'labelStartLink',
tokenize: tokenizeLabelStartLink,
resolveAll: labelEnd.resolveAll
}
function tokenizeLabelStartLink(effects, ok, nok) {
var self = this
return start
function start(code) {
assert__default['default'](code === codes.leftSquareBracket, 'expected `[`')
effects.enter(types.labelLink)
effects.enter(types.labelMarker)
effects.consume(code)
effects.exit(types.labelMarker)
effects.exit(types.labelLink)
return after
}
function after(code) {
/* c8 ignore next */
return code === codes.caret &&
/* c8 ignore next */
'_hiddenFootnoteSupport' in self.parser.constructs
? /* c8 ignore next */
nok(code)
: ok(code)
}
}
module.exports = labelStartLink

View File

@@ -0,0 +1,38 @@
import labelEnd from './label-end.mjs'
var labelStartLink = {
name: 'labelStartLink',
tokenize: tokenizeLabelStartLink,
resolveAll: labelEnd.resolveAll
}
export default labelStartLink
import assert from 'assert'
import codes from '../character/codes.mjs'
import types from '../constant/types.mjs'
function tokenizeLabelStartLink(effects, ok, nok) {
var self = this
return start
function start(code) {
assert(code === codes.leftSquareBracket, 'expected `[`')
effects.enter(types.labelLink)
effects.enter(types.labelMarker)
effects.consume(code)
effects.exit(types.labelMarker)
effects.exit(types.labelLink)
return after
}
function after(code) {
/* c8 ignore next */
return code === codes.caret &&
/* c8 ignore next */
'_hiddenFootnoteSupport' in self.parser.constructs
? /* c8 ignore next */
nok(code)
: ok(code)
}
}

31
node_modules/micromark/lib/tokenize/line-ending.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
'use strict'
var assert = require('assert')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var types = require('../constant/types.js')
var factorySpace = require('./factory-space.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var lineEnding = {
name: 'lineEnding',
tokenize: tokenizeLineEnding
}
function tokenizeLineEnding(effects, ok) {
return start
function start(code) {
assert__default['default'](markdownLineEnding(code), 'expected eol')
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return factorySpace(effects, ok, types.linePrefix)
}
}
module.exports = lineEnding

22
node_modules/micromark/lib/tokenize/line-ending.mjs generated vendored Normal file
View File

@@ -0,0 +1,22 @@
var lineEnding = {
name: 'lineEnding',
tokenize: tokenizeLineEnding
}
export default lineEnding
import assert from 'assert'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import types from '../constant/types.mjs'
import spaceFactory from './factory-space.mjs'
function tokenizeLineEnding(effects, ok) {
return start
function start(code) {
assert(markdownLineEnding(code), 'expected eol')
effects.enter(types.lineEnding)
effects.consume(code)
effects.exit(types.lineEnding)
return spaceFactory(effects, ok, types.linePrefix)
}
}

219
node_modules/micromark/lib/tokenize/list.js generated vendored Normal file
View File

@@ -0,0 +1,219 @@
'use strict'
var asciiDigit = require('../character/ascii-digit.js')
var codes = require('../character/codes.js')
var markdownSpace = require('../character/markdown-space.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var prefixSize = require('../util/prefix-size.js')
var sizeChunks = require('../util/size-chunks.js')
var factorySpace = require('./factory-space.js')
var partialBlankLine = require('./partial-blank-line.js')
var thematicBreak = require('./thematic-break.js')
var list = {
name: 'list',
tokenize: tokenizeListStart,
continuation: {tokenize: tokenizeListContinuation},
exit: tokenizeListEnd
}
var listItemPrefixWhitespaceConstruct = {
tokenize: tokenizeListItemPrefixWhitespace,
partial: true
}
var indentConstruct = {tokenize: tokenizeIndent, partial: true}
function tokenizeListStart(effects, ok, nok) {
var self = this
var initialSize = prefixSize(self.events, types.linePrefix)
var size = 0
return start
function start(code) {
var kind =
self.containerState.type ||
(code === codes.asterisk || code === codes.plusSign || code === codes.dash
? types.listUnordered
: types.listOrdered)
if (
kind === types.listUnordered
? !self.containerState.marker || code === self.containerState.marker
: asciiDigit(code)
) {
if (!self.containerState.type) {
self.containerState.type = kind
effects.enter(kind, {_container: true})
}
if (kind === types.listUnordered) {
effects.enter(types.listItemPrefix)
return code === codes.asterisk || code === codes.dash
? effects.check(thematicBreak, nok, atMarker)(code)
: atMarker(code)
}
if (!self.interrupt || code === codes.digit1) {
effects.enter(types.listItemPrefix)
effects.enter(types.listItemValue)
return inside(code)
}
}
return nok(code)
}
function inside(code) {
if (asciiDigit(code) && ++size < constants.listItemValueSizeMax) {
effects.consume(code)
return inside
}
if (
(!self.interrupt || size < 2) &&
(self.containerState.marker
? code === self.containerState.marker
: code === codes.rightParenthesis || code === codes.dot)
) {
effects.exit(types.listItemValue)
return atMarker(code)
}
return nok(code)
}
function atMarker(code) {
effects.enter(types.listItemMarker)
effects.consume(code)
effects.exit(types.listItemMarker)
self.containerState.marker = self.containerState.marker || code
return effects.check(
partialBlankLine,
// Cant be empty when interrupting.
self.interrupt ? nok : onBlank,
effects.attempt(
listItemPrefixWhitespaceConstruct,
endOfPrefix,
otherPrefix
)
)
}
function onBlank(code) {
self.containerState.initialBlankLine = true
initialSize++
return endOfPrefix(code)
}
function otherPrefix(code) {
if (markdownSpace(code)) {
effects.enter(types.listItemPrefixWhitespace)
effects.consume(code)
effects.exit(types.listItemPrefixWhitespace)
return endOfPrefix
}
return nok(code)
}
function endOfPrefix(code) {
self.containerState.size =
initialSize +
sizeChunks(self.sliceStream(effects.exit(types.listItemPrefix)))
return ok(code)
}
}
function tokenizeListContinuation(effects, ok, nok) {
var self = this
self.containerState._closeFlow = undefined
return effects.check(partialBlankLine, onBlank, notBlank)
function onBlank(code) {
self.containerState.furtherBlankLines =
self.containerState.furtherBlankLines ||
self.containerState.initialBlankLine
// We have a blank line.
// Still, try to consume at most the items size.
return factorySpace(
effects,
ok,
types.listItemIndent,
self.containerState.size + 1
)(code)
}
function notBlank(code) {
if (self.containerState.furtherBlankLines || !markdownSpace(code)) {
self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined
return notInCurrentItem(code)
}
self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined
return effects.attempt(indentConstruct, ok, notInCurrentItem)(code)
}
function notInCurrentItem(code) {
// While we do continue, we signal that the flow should be closed.
self.containerState._closeFlow = true
// As were closing flow, were no longer interrupting.
self.interrupt = undefined
return factorySpace(
effects,
effects.attempt(list, ok, nok),
types.linePrefix,
self.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize
)(code)
}
}
function tokenizeIndent(effects, ok, nok) {
var self = this
return factorySpace(
effects,
afterPrefix,
types.listItemIndent,
self.containerState.size + 1
)
function afterPrefix(code) {
return prefixSize(self.events, types.listItemIndent) ===
self.containerState.size
? ok(code)
: nok(code)
}
}
function tokenizeListEnd(effects) {
effects.exit(this.containerState.type)
}
function tokenizeListItemPrefixWhitespace(effects, ok, nok) {
var self = this
return factorySpace(
effects,
afterPrefix,
types.listItemPrefixWhitespace,
self.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize + 1
)
function afterPrefix(code) {
return markdownSpace(code) ||
!prefixSize(self.events, types.listItemPrefixWhitespace)
? nok(code)
: ok(code)
}
}
module.exports = list

216
node_modules/micromark/lib/tokenize/list.mjs generated vendored Normal file
View File

@@ -0,0 +1,216 @@
var list = {
name: 'list',
tokenize: tokenizeListStart,
continuation: {tokenize: tokenizeListContinuation},
exit: tokenizeListEnd
}
export default list
import asciiDigit from '../character/ascii-digit.mjs'
import codes from '../character/codes.mjs'
import markdownSpace from '../character/markdown-space.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
import prefixSize from '../util/prefix-size.mjs'
import sizeChunks from '../util/size-chunks.mjs'
import spaceFactory from './factory-space.mjs'
import blank from './partial-blank-line.mjs'
import thematicBreak from './thematic-break.mjs'
var listItemPrefixWhitespaceConstruct = {
tokenize: tokenizeListItemPrefixWhitespace,
partial: true
}
var indentConstruct = {tokenize: tokenizeIndent, partial: true}
function tokenizeListStart(effects, ok, nok) {
var self = this
var initialSize = prefixSize(self.events, types.linePrefix)
var size = 0
return start
function start(code) {
var kind =
self.containerState.type ||
(code === codes.asterisk || code === codes.plusSign || code === codes.dash
? types.listUnordered
: types.listOrdered)
if (
kind === types.listUnordered
? !self.containerState.marker || code === self.containerState.marker
: asciiDigit(code)
) {
if (!self.containerState.type) {
self.containerState.type = kind
effects.enter(kind, {_container: true})
}
if (kind === types.listUnordered) {
effects.enter(types.listItemPrefix)
return code === codes.asterisk || code === codes.dash
? effects.check(thematicBreak, nok, atMarker)(code)
: atMarker(code)
}
if (!self.interrupt || code === codes.digit1) {
effects.enter(types.listItemPrefix)
effects.enter(types.listItemValue)
return inside(code)
}
}
return nok(code)
}
function inside(code) {
if (asciiDigit(code) && ++size < constants.listItemValueSizeMax) {
effects.consume(code)
return inside
}
if (
(!self.interrupt || size < 2) &&
(self.containerState.marker
? code === self.containerState.marker
: code === codes.rightParenthesis || code === codes.dot)
) {
effects.exit(types.listItemValue)
return atMarker(code)
}
return nok(code)
}
function atMarker(code) {
effects.enter(types.listItemMarker)
effects.consume(code)
effects.exit(types.listItemMarker)
self.containerState.marker = self.containerState.marker || code
return effects.check(
blank,
// Cant be empty when interrupting.
self.interrupt ? nok : onBlank,
effects.attempt(
listItemPrefixWhitespaceConstruct,
endOfPrefix,
otherPrefix
)
)
}
function onBlank(code) {
self.containerState.initialBlankLine = true
initialSize++
return endOfPrefix(code)
}
function otherPrefix(code) {
if (markdownSpace(code)) {
effects.enter(types.listItemPrefixWhitespace)
effects.consume(code)
effects.exit(types.listItemPrefixWhitespace)
return endOfPrefix
}
return nok(code)
}
function endOfPrefix(code) {
self.containerState.size =
initialSize +
sizeChunks(self.sliceStream(effects.exit(types.listItemPrefix)))
return ok(code)
}
}
function tokenizeListContinuation(effects, ok, nok) {
var self = this
self.containerState._closeFlow = undefined
return effects.check(blank, onBlank, notBlank)
function onBlank(code) {
self.containerState.furtherBlankLines =
self.containerState.furtherBlankLines ||
self.containerState.initialBlankLine
// We have a blank line.
// Still, try to consume at most the items size.
return spaceFactory(
effects,
ok,
types.listItemIndent,
self.containerState.size + 1
)(code)
}
function notBlank(code) {
if (self.containerState.furtherBlankLines || !markdownSpace(code)) {
self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined
return notInCurrentItem(code)
}
self.containerState.furtherBlankLines = self.containerState.initialBlankLine = undefined
return effects.attempt(indentConstruct, ok, notInCurrentItem)(code)
}
function notInCurrentItem(code) {
// While we do continue, we signal that the flow should be closed.
self.containerState._closeFlow = true
// As were closing flow, were no longer interrupting.
self.interrupt = undefined
return spaceFactory(
effects,
effects.attempt(list, ok, nok),
types.linePrefix,
self.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize
)(code)
}
}
function tokenizeIndent(effects, ok, nok) {
var self = this
return spaceFactory(
effects,
afterPrefix,
types.listItemIndent,
self.containerState.size + 1
)
function afterPrefix(code) {
return prefixSize(self.events, types.listItemIndent) ===
self.containerState.size
? ok(code)
: nok(code)
}
}
function tokenizeListEnd(effects) {
effects.exit(this.containerState.type)
}
function tokenizeListItemPrefixWhitespace(effects, ok, nok) {
var self = this
return spaceFactory(
effects,
afterPrefix,
types.listItemPrefixWhitespace,
self.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize + 1
)
function afterPrefix(code) {
return markdownSpace(code) ||
!prefixSize(self.events, types.listItemPrefixWhitespace)
? nok(code)
: ok(code)
}
}

View File

@@ -0,0 +1,21 @@
'use strict'
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var types = require('../constant/types.js')
var factorySpace = require('./factory-space.js')
var partialBlankLine = {
tokenize: tokenizePartialBlankLine,
partial: true
}
function tokenizePartialBlankLine(effects, ok, nok) {
return factorySpace(effects, afterWhitespace, types.linePrefix)
function afterWhitespace(code) {
return code === codes.eof || markdownLineEnding(code) ? ok(code) : nok(code)
}
}
module.exports = partialBlankLine

View File

@@ -0,0 +1,18 @@
var partialBlankLine = {
tokenize: tokenizePartialBlankLine,
partial: true
}
export default partialBlankLine
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import types from '../constant/types.mjs'
import spaceFactory from './factory-space.mjs'
function tokenizePartialBlankLine(effects, ok, nok) {
return spaceFactory(effects, afterWhitespace, types.linePrefix)
function afterWhitespace(code) {
return code === codes.eof || markdownLineEnding(code) ? ok(code) : nok(code)
}
}

138
node_modules/micromark/lib/tokenize/setext-underline.js generated vendored Normal file
View File

@@ -0,0 +1,138 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var types = require('../constant/types.js')
var shallow = require('../util/shallow.js')
var factorySpace = require('./factory-space.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var setextUnderline = {
name: 'setextUnderline',
tokenize: tokenizeSetextUnderline,
resolveTo: resolveToSetextUnderline
}
function resolveToSetextUnderline(events, context) {
var index = events.length
var content
var text
var definition
var heading
// Find the opening of the content.
// Itll always exist: we dont tokenize if it isnt there.
while (index--) {
if (events[index][0] === 'enter') {
if (events[index][1].type === types.content) {
content = index
break
}
if (events[index][1].type === types.paragraph) {
text = index
}
}
// Exit
else {
if (events[index][1].type === types.content) {
// Remove the content end (if needed well add it later)
events.splice(index, 1)
}
if (!definition && events[index][1].type === types.definition) {
definition = index
}
}
}
heading = {
type: types.setextHeading,
start: shallow(events[text][1].start),
end: shallow(events[events.length - 1][1].end)
}
// Change the paragraph to setext heading text.
events[text][1].type = types.setextHeadingText
// If we have definitions in the content, well keep on having content,
// but we need move it.
if (definition) {
events.splice(text, 0, ['enter', heading, context])
events.splice(definition + 1, 0, ['exit', events[content][1], context])
events[content][1].end = shallow(events[definition][1].end)
} else {
events[content][1] = heading
}
// Add the heading exit at the end.
events.push(['exit', heading, context])
return events
}
function tokenizeSetextUnderline(effects, ok, nok) {
var self = this
var index = self.events.length
var marker
var paragraph
// Find an opening.
while (index--) {
// Skip enter/exit of line ending, line prefix, and content.
// We can now either have a definition or a paragraph.
if (
self.events[index][1].type !== types.lineEnding &&
self.events[index][1].type !== types.linePrefix &&
self.events[index][1].type !== types.content
) {
paragraph = self.events[index][1].type === types.paragraph
break
}
}
return start
function start(code) {
assert__default['default'](
code === codes.dash || code === codes.equalsTo,
'expected `=` or `-`'
)
if (!self.lazy && (self.interrupt || paragraph)) {
effects.enter(types.setextHeadingLine)
effects.enter(types.setextHeadingLineSequence)
marker = code
return closingSequence(code)
}
return nok(code)
}
function closingSequence(code) {
if (code === marker) {
effects.consume(code)
return closingSequence
}
effects.exit(types.setextHeadingLineSequence)
return factorySpace(effects, closingSequenceEnd, types.lineSuffix)(code)
}
function closingSequenceEnd(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.setextHeadingLine)
return ok(code)
}
return nok(code)
}
}
module.exports = setextUnderline

View File

@@ -0,0 +1,129 @@
var setextUnderline = {
name: 'setextUnderline',
tokenize: tokenizeSetextUnderline,
resolveTo: resolveToSetextUnderline
}
export default setextUnderline
import assert from 'assert'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import types from '../constant/types.mjs'
import shallow from '../util/shallow.mjs'
import spaceFactory from './factory-space.mjs'
function resolveToSetextUnderline(events, context) {
var index = events.length
var content
var text
var definition
var heading
// Find the opening of the content.
// Itll always exist: we dont tokenize if it isnt there.
while (index--) {
if (events[index][0] === 'enter') {
if (events[index][1].type === types.content) {
content = index
break
}
if (events[index][1].type === types.paragraph) {
text = index
}
}
// Exit
else {
if (events[index][1].type === types.content) {
// Remove the content end (if needed well add it later)
events.splice(index, 1)
}
if (!definition && events[index][1].type === types.definition) {
definition = index
}
}
}
heading = {
type: types.setextHeading,
start: shallow(events[text][1].start),
end: shallow(events[events.length - 1][1].end)
}
// Change the paragraph to setext heading text.
events[text][1].type = types.setextHeadingText
// If we have definitions in the content, well keep on having content,
// but we need move it.
if (definition) {
events.splice(text, 0, ['enter', heading, context])
events.splice(definition + 1, 0, ['exit', events[content][1], context])
events[content][1].end = shallow(events[definition][1].end)
} else {
events[content][1] = heading
}
// Add the heading exit at the end.
events.push(['exit', heading, context])
return events
}
function tokenizeSetextUnderline(effects, ok, nok) {
var self = this
var index = self.events.length
var marker
var paragraph
// Find an opening.
while (index--) {
// Skip enter/exit of line ending, line prefix, and content.
// We can now either have a definition or a paragraph.
if (
self.events[index][1].type !== types.lineEnding &&
self.events[index][1].type !== types.linePrefix &&
self.events[index][1].type !== types.content
) {
paragraph = self.events[index][1].type === types.paragraph
break
}
}
return start
function start(code) {
assert(
code === codes.dash || code === codes.equalsTo,
'expected `=` or `-`'
)
if (!self.lazy && (self.interrupt || paragraph)) {
effects.enter(types.setextHeadingLine)
effects.enter(types.setextHeadingLineSequence)
marker = code
return closingSequence(code)
}
return nok(code)
}
function closingSequence(code) {
if (code === marker) {
effects.consume(code)
return closingSequence
}
effects.exit(types.setextHeadingLineSequence)
return spaceFactory(effects, closingSequenceEnd, types.lineSuffix)(code)
}
function closingSequenceEnd(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.setextHeadingLine)
return ok(code)
}
return nok(code)
}
}

74
node_modules/micromark/lib/tokenize/thematic-break.js generated vendored Normal file
View File

@@ -0,0 +1,74 @@
'use strict'
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var markdownSpace = require('../character/markdown-space.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var factorySpace = require('./factory-space.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
}
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var thematicBreak = {
name: 'thematicBreak',
tokenize: tokenizeThematicBreak
}
function tokenizeThematicBreak(effects, ok, nok) {
var size = 0
var marker
return start
function start(code) {
assert__default['default'](
code === codes.asterisk ||
code === codes.dash ||
code === codes.underscore,
'expected `*`, `-`, or `_`'
)
effects.enter(types.thematicBreak)
marker = code
return atBreak(code)
}
function atBreak(code) {
if (code === marker) {
effects.enter(types.thematicBreakSequence)
return sequence(code)
}
if (markdownSpace(code)) {
return factorySpace(effects, atBreak, types.whitespace)(code)
}
if (
size < constants.thematicBreakMarkerCountMin ||
(code !== codes.eof && !markdownLineEnding(code))
) {
return nok(code)
}
effects.exit(types.thematicBreak)
return ok(code)
}
function sequence(code) {
if (code === marker) {
effects.consume(code)
size++
return sequence
}
effects.exit(types.thematicBreakSequence)
return atBreak(code)
}
}
module.exports = thematicBreak

65
node_modules/micromark/lib/tokenize/thematic-break.mjs generated vendored Normal file
View File

@@ -0,0 +1,65 @@
var thematicBreak = {
name: 'thematicBreak',
tokenize: tokenizeThematicBreak
}
export default thematicBreak
import assert from 'assert'
import codes from '../character/codes.mjs'
import markdownLineEnding from '../character/markdown-line-ending.mjs'
import markdownSpace from '../character/markdown-space.mjs'
import constants from '../constant/constants.mjs'
import types from '../constant/types.mjs'
import spaceFactory from './factory-space.mjs'
function tokenizeThematicBreak(effects, ok, nok) {
var size = 0
var marker
return start
function start(code) {
assert(
code === codes.asterisk ||
code === codes.dash ||
code === codes.underscore,
'expected `*`, `-`, or `_`'
)
effects.enter(types.thematicBreak)
marker = code
return atBreak(code)
}
function atBreak(code) {
if (code === marker) {
effects.enter(types.thematicBreakSequence)
return sequence(code)
}
if (markdownSpace(code)) {
return spaceFactory(effects, atBreak, types.whitespace)(code)
}
if (
size < constants.thematicBreakMarkerCountMin ||
(code !== codes.eof && !markdownLineEnding(code))
) {
return nok(code)
}
effects.exit(types.thematicBreak)
return ok(code)
}
function sequence(code) {
if (code === marker) {
effects.consume(code)
size++
return sequence
}
effects.exit(types.thematicBreakSequence)
return atBreak(code)
}
}