123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219 |
- 'use strict'
-
- var assert = require('assert')
- var codes = require('../character/codes.js')
- var assign = require('../constant/assign.js')
- var types = require('../constant/types.js')
- var chunkedSplice = require('./chunked-splice.js')
- var shallow = require('./shallow.js')
-
- function _interopDefaultLegacy(e) {
- return e && typeof e === 'object' && 'default' in e ? e : {default: e}
- }
-
- var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
-
- function subtokenize(events) {
- var jumps = {}
- var index = -1
- var event
- var lineIndex
- var otherIndex
- var otherEvent
- var parameters
- var subevents
- var more
-
- while (++index < events.length) {
- while (index in jumps) {
- index = jumps[index]
- }
-
- event = events[index]
-
- // Add a hook for the GFM tasklist extension, which needs to know if text
- // is in the first content of a list item.
- if (
- index &&
- event[1].type === types.chunkFlow &&
- events[index - 1][1].type === types.listItemPrefix
- ) {
- subevents = event[1]._tokenizer.events
- otherIndex = 0
-
- if (
- otherIndex < subevents.length &&
- subevents[otherIndex][1].type === types.lineEndingBlank
- ) {
- otherIndex += 2
- }
-
- if (
- otherIndex < subevents.length &&
- subevents[otherIndex][1].type === types.content
- ) {
- while (++otherIndex < subevents.length) {
- if (subevents[otherIndex][1].type === types.content) {
- break
- }
-
- if (subevents[otherIndex][1].type === types.chunkText) {
- subevents[otherIndex][1].isInFirstContentOfListItem = true
- otherIndex++
- }
- }
- }
- }
-
- // Enter.
- if (event[0] === 'enter') {
- if (event[1].contentType) {
- assign(jumps, subcontent(events, index))
- index = jumps[index]
- more = true
- }
- }
- // Exit.
- else if (event[1]._container || event[1]._movePreviousLineEndings) {
- otherIndex = index
- lineIndex = undefined
-
- while (otherIndex--) {
- otherEvent = events[otherIndex]
-
- if (
- otherEvent[1].type === types.lineEnding ||
- otherEvent[1].type === types.lineEndingBlank
- ) {
- if (otherEvent[0] === 'enter') {
- if (lineIndex) {
- events[lineIndex][1].type = types.lineEndingBlank
- }
-
- otherEvent[1].type = types.lineEnding
- lineIndex = otherIndex
- }
- } else {
- break
- }
- }
-
- if (lineIndex) {
- // Fix position.
- event[1].end = shallow(events[lineIndex][1].start)
-
- // Switch container exit w/ line endings.
- parameters = events.slice(lineIndex, index)
- parameters.unshift(event)
- chunkedSplice(events, lineIndex, index - lineIndex + 1, parameters)
- }
- }
- }
-
- return !more
- }
-
- function subcontent(events, eventIndex) {
- var token = events[eventIndex][1]
- var context = events[eventIndex][2]
- var startPosition = eventIndex - 1
- var startPositions = []
- var tokenizer =
- token._tokenizer || context.parser[token.contentType](token.start)
- var childEvents = tokenizer.events
- var jumps = []
- var gaps = {}
- var stream
- var previous
- var index
- var entered
- var end
- var adjust
-
- // Loop forward through the linked tokens to pass them in order to the
- // subtokenizer.
- while (token) {
- // Find the position of the event for this token.
- while (events[++startPosition][1] !== token) {
- // Empty.
- }
-
- startPositions.push(startPosition)
-
- if (!token._tokenizer) {
- stream = context.sliceStream(token)
-
- if (!token.next) {
- stream.push(codes.eof)
- }
-
- if (previous) {
- tokenizer.defineSkip(token.start)
- }
-
- if (token.isInFirstContentOfListItem) {
- tokenizer._gfmTasklistFirstContentOfListItem = true
- }
-
- tokenizer.write(stream)
-
- if (token.isInFirstContentOfListItem) {
- tokenizer._gfmTasklistFirstContentOfListItem = undefined
- }
- }
-
- // Unravel the next token.
- previous = token
- token = token.next
- }
-
- // Now, loop back through all events (and linked tokens), to figure out which
- // parts belong where.
- token = previous
- index = childEvents.length
-
- while (index--) {
- // Make sure we’ve at least seen something (final eol is part of the last
- // token).
- if (childEvents[index][0] === 'enter') {
- entered = true
- } else if (
- // Find a void token that includes a break.
- entered &&
- childEvents[index][1].type === childEvents[index - 1][1].type &&
- childEvents[index][1].start.line !== childEvents[index][1].end.line
- ) {
- add(childEvents.slice(index + 1, end))
- assert__default['default'](token.previous, 'expected a previous token')
- // Help GC.
- token._tokenizer = token.next = undefined
- token = token.previous
- end = index + 1
- }
- }
-
- assert__default['default'](!token.previous, 'expected no previous token')
- // Help GC.
- tokenizer.events = token._tokenizer = token.next = undefined
-
- // Do head:
- add(childEvents.slice(0, end))
-
- index = -1
- adjust = 0
-
- while (++index < jumps.length) {
- gaps[adjust + jumps[index][0]] = adjust + jumps[index][1]
- adjust += jumps[index][1] - jumps[index][0] - 1
- }
-
- return gaps
-
- function add(slice) {
- var start = startPositions.pop()
- jumps.unshift([start, start + slice.length - 1])
- chunkedSplice(events, start, 2, slice)
- }
- }
-
- module.exports = subtokenize
|