This commit is contained in:
99
node_modules/micromark/dist/tokenize/content.js
generated
vendored
Normal file
99
node_modules/micromark/dist/tokenize/content.js
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
'use strict'
|
||||
|
||||
var markdownLineEnding = require('../character/markdown-line-ending.js')
|
||||
var prefixSize = require('../util/prefix-size.js')
|
||||
var subtokenize = require('../util/subtokenize.js')
|
||||
var factorySpace = require('./factory-space.js')
|
||||
|
||||
// No name because it must not be turned off.
|
||||
var content = {
|
||||
tokenize: tokenizeContent,
|
||||
resolve: resolveContent,
|
||||
interruptible: true,
|
||||
lazy: true
|
||||
}
|
||||
var continuationConstruct = {
|
||||
tokenize: tokenizeContinuation,
|
||||
partial: true
|
||||
} // Content is transparent: it’s parsed right now. That way, definitions are also
|
||||
// parsed right now: before text in paragraphs (specifically, media) are parsed.
|
||||
|
||||
function resolveContent(events) {
|
||||
subtokenize(events)
|
||||
return events
|
||||
}
|
||||
|
||||
function tokenizeContent(effects, ok) {
|
||||
var previous
|
||||
return start
|
||||
|
||||
function start(code) {
|
||||
effects.enter('content')
|
||||
previous = effects.enter('chunkContent', {
|
||||
contentType: 'content'
|
||||
})
|
||||
return data(code)
|
||||
}
|
||||
|
||||
function data(code) {
|
||||
if (code === null) {
|
||||
return contentEnd(code)
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
return effects.check(
|
||||
continuationConstruct,
|
||||
contentContinue,
|
||||
contentEnd
|
||||
)(code)
|
||||
} // Data.
|
||||
|
||||
effects.consume(code)
|
||||
return data
|
||||
}
|
||||
|
||||
function contentEnd(code) {
|
||||
effects.exit('chunkContent')
|
||||
effects.exit('content')
|
||||
return ok(code)
|
||||
}
|
||||
|
||||
function contentContinue(code) {
|
||||
effects.consume(code)
|
||||
effects.exit('chunkContent')
|
||||
previous = previous.next = effects.enter('chunkContent', {
|
||||
contentType: 'content',
|
||||
previous: previous
|
||||
})
|
||||
return data
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeContinuation(effects, ok, nok) {
|
||||
var self = this
|
||||
return startLookahead
|
||||
|
||||
function startLookahead(code) {
|
||||
effects.enter('lineEnding')
|
||||
effects.consume(code)
|
||||
effects.exit('lineEnding')
|
||||
return factorySpace(effects, prefixed, 'linePrefix')
|
||||
}
|
||||
|
||||
function prefixed(code) {
|
||||
if (code === null || markdownLineEnding(code)) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
if (
|
||||
self.parser.constructs.disable.null.indexOf('codeIndented') > -1 ||
|
||||
prefixSize(self.events, 'linePrefix') < 4
|
||||
) {
|
||||
return effects.interrupt(self.parser.constructs.flow, nok, ok)(code)
|
||||
}
|
||||
|
||||
return ok(code)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = content
|
||||
Reference in New Issue
Block a user