1
0
Fork 0
mirror of https://github.com/DanielnetoDotCom/YouPHPTube synced 2025-10-03 09:49:28 +02:00
This commit is contained in:
DanieL 2023-02-13 14:41:08 -03:00
parent 64c36d9f4e
commit 0d0338876d
1197 changed files with 121461 additions and 179724 deletions

View file

@ -116,8 +116,8 @@ class NodeType {
/// Define a node type.
static define(spec) {
let props = spec.props && spec.props.length ? Object.create(null) : noProps;
let flags = (spec.top ? 1 /* Top */ : 0) | (spec.skipped ? 2 /* Skipped */ : 0) |
(spec.error ? 4 /* Error */ : 0) | (spec.name == null ? 8 /* Anonymous */ : 0);
let flags = (spec.top ? 1 /* NodeFlag.Top */ : 0) | (spec.skipped ? 2 /* NodeFlag.Skipped */ : 0) |
(spec.error ? 4 /* NodeFlag.Error */ : 0) | (spec.name == null ? 8 /* NodeFlag.Anonymous */ : 0);
let type = new NodeType(spec.name || "", props, spec.id, flags);
if (spec.props)
for (let src of spec.props) {
@ -135,14 +135,14 @@ class NodeType {
/// the prop isn't present on this node.
prop(prop) { return this.props[prop.id]; }
/// True when this is the top node of a grammar.
get isTop() { return (this.flags & 1 /* Top */) > 0; }
get isTop() { return (this.flags & 1 /* NodeFlag.Top */) > 0; }
/// True when this node is produced by a skip rule.
get isSkipped() { return (this.flags & 2 /* Skipped */) > 0; }
get isSkipped() { return (this.flags & 2 /* NodeFlag.Skipped */) > 0; }
/// Indicates whether this is an error node.
get isError() { return (this.flags & 4 /* Error */) > 0; }
get isError() { return (this.flags & 4 /* NodeFlag.Error */) > 0; }
/// When true, this node type doesn't correspond to a user-declared
/// named node, for example because it is used to cache repetition.
get isAnonymous() { return (this.flags & 8 /* Anonymous */) > 0; }
get isAnonymous() { return (this.flags & 8 /* NodeFlag.Anonymous */) > 0; }
/// Returns true when this node's name or one of its
/// [groups](#common.NodeProp^group) matches the given string.
is(name) {
@ -175,7 +175,7 @@ class NodeType {
}
}
/// An empty dummy node type to use when no actual type is available.
NodeType.none = new NodeType("", Object.create(null), 0, 8 /* Anonymous */);
NodeType.none = new NodeType("", Object.create(null), 0, 8 /* NodeFlag.Anonymous */);
/// A node set holds a collection of node types. It is used to
/// compactly represent trees by storing their type ids, rather than a
/// full pointer to the type object, in a numeric array. Each parser
@ -321,6 +321,10 @@ class Tree {
/// position. If 1, it'll move into nodes that start at the
/// position. With 0, it'll only enter nodes that cover the position
/// from both sides.
///
/// Note that this will not enter
/// [overlays](#common.MountedTree.overlay), and you often want
/// [`resolveInner`](#common.Tree.resolveInner) instead.
resolve(pos, side = 0) {
let node = resolveNode(CachedNode.get(this) || this.topNode, pos, side, false);
CachedNode.set(this, node);
@ -380,7 +384,7 @@ class Tree {
/// which may have children grouped into subtrees with type
/// [`NodeType.none`](#common.NodeType^none).
balance(config = {}) {
return this.children.length <= 8 /* BranchFactor */ ? this :
return this.children.length <= 8 /* Balance.BranchFactor */ ? this :
balanceRange(NodeType.none, this.children, this.positions, 0, this.children.length, 0, this.length, (children, positions, length) => new Tree(this.type, children, positions, length, this.propValues), config.makeTree || ((children, positions, length) => new Tree(NodeType.none, children, positions, length)));
}
/// Build a tree from a postfix-ordered buffer of node information,
@ -459,26 +463,27 @@ class TreeBuffer {
return pick;
}
/// @internal
slice(startI, endI, from, to) {
slice(startI, endI, from) {
let b = this.buffer;
let copy = new Uint16Array(endI - startI);
let copy = new Uint16Array(endI - startI), len = 0;
for (let i = startI, j = 0; i < endI;) {
copy[j++] = b[i++];
copy[j++] = b[i++] - from;
copy[j++] = b[i++] - from;
let to = copy[j++] = b[i++] - from;
copy[j++] = b[i++] - startI;
len = Math.max(len, to);
}
return new TreeBuffer(copy, to - from, this.set);
return new TreeBuffer(copy, len, this.set);
}
}
function checkSide(side, pos, from, to) {
switch (side) {
case -2 /* Before */: return from < pos;
case -1 /* AtOrBefore */: return to >= pos && from < pos;
case 0 /* Around */: return from < pos && to > pos;
case 1 /* AtOrAfter */: return from <= pos && to > pos;
case 2 /* After */: return to > pos;
case 4 /* DontCare */: return true;
case -2 /* Side.Before */: return from < pos;
case -1 /* Side.AtOrBefore */: return to >= pos && from < pos;
case 0 /* Side.Around */: return from < pos && to > pos;
case 1 /* Side.AtOrAfter */: return from <= pos && to > pos;
case 2 /* Side.After */: return to > pos;
case 4 /* Side.DontCare */: return true;
}
}
function enterUnfinishedNodesBefore(node, pos) {
@ -568,10 +573,10 @@ class TreeNode {
return null;
}
}
get firstChild() { return this.nextChild(0, 1, 0, 4 /* DontCare */); }
get lastChild() { return this.nextChild(this._tree.children.length - 1, -1, 0, 4 /* DontCare */); }
childAfter(pos) { return this.nextChild(0, 1, pos, 2 /* After */); }
childBefore(pos) { return this.nextChild(this._tree.children.length - 1, -1, pos, -2 /* Before */); }
get firstChild() { return this.nextChild(0, 1, 0, 4 /* Side.DontCare */); }
get lastChild() { return this.nextChild(this._tree.children.length - 1, -1, 0, 4 /* Side.DontCare */); }
childAfter(pos) { return this.nextChild(0, 1, pos, 2 /* Side.After */); }
childBefore(pos) { return this.nextChild(this._tree.children.length - 1, -1, pos, -2 /* Side.Before */); }
enter(pos, side, mode = 0) {
let mounted;
if (!(mode & exports.IterMode.IgnoreOverlays) && (mounted = this._tree.prop(NodeProp.mounted)) && mounted.overlay) {
@ -594,10 +599,10 @@ class TreeNode {
return this._parent ? this._parent.nextSignificantParent() : null;
}
get nextSibling() {
return this._parent && this.index >= 0 ? this._parent.nextChild(this.index + 1, 1, 0, 4 /* DontCare */) : null;
return this._parent && this.index >= 0 ? this._parent.nextChild(this.index + 1, 1, 0, 4 /* Side.DontCare */) : null;
}
get prevSibling() {
return this._parent && this.index >= 0 ? this._parent.nextChild(this.index - 1, -1, 0, 4 /* DontCare */) : null;
return this._parent && this.index >= 0 ? this._parent.nextChild(this.index - 1, -1, 0, 4 /* Side.DontCare */) : null;
}
cursor(mode = 0) { return new TreeCursor(this, mode); }
get tree() { return this._tree; }
@ -659,24 +664,24 @@ class BufferContext {
}
}
class BufferNode {
get name() { return this.type.name; }
get from() { return this.context.start + this.context.buffer.buffer[this.index + 1]; }
get to() { return this.context.start + this.context.buffer.buffer[this.index + 2]; }
constructor(context, _parent, index) {
this.context = context;
this._parent = _parent;
this.index = index;
this.type = context.buffer.set.types[context.buffer.buffer[index]];
}
get name() { return this.type.name; }
get from() { return this.context.start + this.context.buffer.buffer[this.index + 1]; }
get to() { return this.context.start + this.context.buffer.buffer[this.index + 2]; }
child(dir, pos, side) {
let { buffer } = this.context;
let index = buffer.findChild(this.index + 4, buffer.buffer[this.index + 3], dir, pos - this.context.start, side);
return index < 0 ? null : new BufferNode(this.context, this, index);
}
get firstChild() { return this.child(1, 0, 4 /* DontCare */); }
get lastChild() { return this.child(-1, 0, 4 /* DontCare */); }
childAfter(pos) { return this.child(1, pos, 2 /* After */); }
childBefore(pos) { return this.child(-1, pos, -2 /* Before */); }
get firstChild() { return this.child(1, 0, 4 /* Side.DontCare */); }
get lastChild() { return this.child(-1, 0, 4 /* Side.DontCare */); }
childAfter(pos) { return this.child(1, pos, 2 /* Side.After */); }
childBefore(pos) { return this.child(-1, pos, -2 /* Side.Before */); }
enter(pos, side, mode = 0) {
if (mode & exports.IterMode.ExcludeBuffers)
return null;
@ -688,7 +693,7 @@ class BufferNode {
return this._parent || this.context.parent.nextSignificantParent();
}
externalSibling(dir) {
return this._parent ? null : this.context.parent.nextChild(this.context.index + dir, dir, 0, 4 /* DontCare */);
return this._parent ? null : this.context.parent.nextChild(this.context.index + dir, dir, 0, 4 /* Side.DontCare */);
}
get nextSibling() {
let { buffer } = this.context;
@ -702,7 +707,7 @@ class BufferNode {
let parentStart = this._parent ? this._parent.index + 4 : 0;
if (this.index == parentStart)
return this.externalSibling(-1);
return new BufferNode(this.context, this._parent, buffer.findChild(parentStart, this.index, -1, 0, 4 /* DontCare */));
return new BufferNode(this.context, this._parent, buffer.findChild(parentStart, this.index, -1, 0, 4 /* Side.DontCare */));
}
cursor(mode = 0) { return new TreeCursor(this, mode); }
get tree() { return null; }
@ -711,8 +716,8 @@ class BufferNode {
let { buffer } = this.context;
let startI = this.index + 4, endI = buffer.buffer[this.index + 3];
if (endI > startI) {
let from = buffer.buffer[this.index + 1], to = buffer.buffer[this.index + 2];
children.push(buffer.slice(startI, endI, from, to));
let from = buffer.buffer[this.index + 1];
children.push(buffer.slice(startI, endI, from));
positions.push(0);
}
return new Tree(this.type, children, positions, this.to - this.from);
@ -739,6 +744,8 @@ class BufferNode {
/// A tree cursor object focuses on a given node in a syntax tree, and
/// allows you to move to adjacent nodes.
class TreeCursor {
/// Shorthand for `.type.name`.
get name() { return this.type.name; }
/// @internal
constructor(node,
/// @internal
@ -762,8 +769,6 @@ class TreeCursor {
this.yieldBuf(node.index);
}
}
/// Shorthand for `.type.name`.
get name() { return this.type.name; }
yieldNode(node) {
if (!node)
return false;
@ -808,13 +813,13 @@ class TreeCursor {
}
/// Move the cursor to this node's first child. When this returns
/// false, the node has no child, and the cursor has not been moved.
firstChild() { return this.enterChild(1, 0, 4 /* DontCare */); }
firstChild() { return this.enterChild(1, 0, 4 /* Side.DontCare */); }
/// Move the cursor to this node's last child.
lastChild() { return this.enterChild(-1, 0, 4 /* DontCare */); }
lastChild() { return this.enterChild(-1, 0, 4 /* Side.DontCare */); }
/// Move the cursor to the first child that ends after `pos`.
childAfter(pos) { return this.enterChild(1, pos, 2 /* After */); }
childAfter(pos) { return this.enterChild(1, pos, 2 /* Side.After */); }
/// Move to the last child that starts before `pos`.
childBefore(pos) { return this.enterChild(-1, pos, -2 /* Before */); }
childBefore(pos) { return this.enterChild(-1, pos, -2 /* Side.Before */); }
/// Move the cursor to the child around `pos`. If side is -1 the
/// child may end at that position, when 1 it may start there. This
/// will also enter [overlaid](#common.MountedTree.overlay)
@ -840,19 +845,19 @@ class TreeCursor {
if (!this.buffer)
return !this._tree._parent ? false
: this.yield(this._tree.index < 0 ? null
: this._tree._parent.nextChild(this._tree.index + dir, dir, 0, 4 /* DontCare */, this.mode));
: this._tree._parent.nextChild(this._tree.index + dir, dir, 0, 4 /* Side.DontCare */, this.mode));
let { buffer } = this.buffer, d = this.stack.length - 1;
if (dir < 0) {
let parentStart = d < 0 ? 0 : this.stack[d] + 4;
if (this.index != parentStart)
return this.yieldBuf(buffer.findChild(parentStart, this.index, -1, 0, 4 /* DontCare */));
return this.yieldBuf(buffer.findChild(parentStart, this.index, -1, 0, 4 /* Side.DontCare */));
}
else {
let after = buffer.buffer[this.index + 3];
if (after < (d < 0 ? buffer.buffer.length : buffer.buffer[this.stack[d] + 3]))
return this.yieldBuf(after);
}
return d < 0 ? this.yield(this.buffer.parent.nextChild(this.buffer.index + dir, dir, 0, 4 /* DontCare */, this.mode)) : false;
return d < 0 ? this.yield(this.buffer.parent.nextChild(this.buffer.index + dir, dir, 0, 4 /* Side.DontCare */, this.mode)) : false;
}
/// Move to this node's next sibling, if any.
nextSibling() { return this.sibling(1); }
@ -889,7 +894,7 @@ class TreeCursor {
return true;
}
move(dir, enter) {
if (enter && this.enterChild(dir, 0, 4 /* DontCare */))
if (enter && this.enterChild(dir, 0, 4 /* Side.DontCare */))
return true;
for (;;) {
if (this.sibling(dir))
@ -899,7 +904,7 @@ class TreeCursor {
}
}
/// Move to the next node in a
/// [pre-order](https://en.wikipedia.org/wiki/Tree_traversal#Pre-order_(NLR))
/// [pre-order](https://en.wikipedia.org/wiki/Tree_traversal#Pre-order,_NLR)
/// traversal, going from a node to its first child or, if the
/// current node is empty or `enter` is false, its next sibling or
/// the next sibling of the first parent node that has one.
@ -1015,17 +1020,17 @@ function buildTree(data) {
let lookAheadAtStart = lookAhead;
while (size < 0) {
cursor.next();
if (size == -1 /* Reuse */) {
if (size == -1 /* SpecialRecord.Reuse */) {
let node = reused[id];
children.push(node);
positions.push(start - parentStart);
return;
}
else if (size == -3 /* ContextChange */) { // Context change
else if (size == -3 /* SpecialRecord.ContextChange */) { // Context change
contextHash = id;
return;
}
else if (size == -4 /* LookAhead */) {
else if (size == -4 /* SpecialRecord.LookAhead */) {
lookAhead = id;
return;
}
@ -1142,7 +1147,7 @@ function buildTree(data) {
fork.next();
while (fork.pos > startPos) {
if (fork.size < 0) {
if (fork.size == -3 /* ContextChange */)
if (fork.size == -3 /* SpecialRecord.ContextChange */)
localSkipped += 4;
else
break scan;
@ -1178,10 +1183,10 @@ function buildTree(data) {
buffer[--index] = start - bufferStart;
buffer[--index] = id;
}
else if (size == -3 /* ContextChange */) {
else if (size == -3 /* SpecialRecord.ContextChange */) {
contextHash = id;
}
else if (size == -4 /* LookAhead */) {
else if (size == -4 /* SpecialRecord.LookAhead */) {
lookAhead = id;
}
return index;
@ -1228,7 +1233,7 @@ mkTree) {
let total = 0;
for (let i = from; i < to; i++)
total += nodeSize(balanceType, children[i]);
let maxChild = Math.ceil((total * 1.5) / 8 /* BranchFactor */);
let maxChild = Math.ceil((total * 1.5) / 8 /* Balance.BranchFactor */);
let localChildren = [], localPositions = [];
function divide(children, positions, from, to, offset) {
for (let i = from; i < to;) {
@ -1331,16 +1336,16 @@ class TreeFragment {
this.to = to;
this.tree = tree;
this.offset = offset;
this.open = (openStart ? 1 /* Start */ : 0) | (openEnd ? 2 /* End */ : 0);
this.open = (openStart ? 1 /* Open.Start */ : 0) | (openEnd ? 2 /* Open.End */ : 0);
}
/// Whether the start of the fragment represents the start of a
/// parse, or the end of a change. (In the second case, it may not
/// be safe to reuse some nodes at the start, depending on the
/// parsing algorithm.)
get openStart() { return (this.open & 1 /* Start */) > 0; }
get openStart() { return (this.open & 1 /* Open.Start */) > 0; }
/// Whether the end of the fragment represents the end of a
/// full-document parse, or the start of a change.
get openEnd() { return (this.open & 2 /* End */) > 0; }
get openEnd() { return (this.open & 2 /* Open.End */) > 0; }
/// Create a set of fragments from a freshly parsed tree, or update
/// an existing set of fragments by replacing the ones that overlap
/// with a tree with content from the new tree. When `partial` is
@ -1534,7 +1539,7 @@ class MixedParse {
enter = false;
}
else if (covered && (isCovered = checkCover(covered.ranges, cursor.from, cursor.to))) {
enter = isCovered != 2 /* Full */;
enter = isCovered != 2 /* Cover.Full */;
}
else if (!cursor.type.isAnonymous && cursor.from < cursor.to && (nest = this.nest(cursor, this.input))) {
if (!cursor.tree)
@ -1589,16 +1594,16 @@ function checkCover(covered, from, to) {
if (range.from >= to)
break;
if (range.to > from)
return range.from <= from && range.to >= to ? 2 /* Full */ : 1 /* Partial */;
return range.from <= from && range.to >= to ? 2 /* Cover.Full */ : 1 /* Cover.Partial */;
}
return 0 /* None */;
return 0 /* Cover.None */;
}
// Take a piece of buffer and convert it into a stand-alone
// TreeBuffer.
function sliceBuf(buf, startI, endI, nodes, positions, off) {
if (startI < endI) {
let from = buf.buffer[startI + 1], to = buf.buffer[endI - 2];
nodes.push(buf.slice(startI, endI, from, to));
let from = buf.buffer[startI + 1];
nodes.push(buf.slice(startI, endI, from));
positions.push(from - off);
}
}
@ -1803,14 +1808,14 @@ function enterFragments(mounts, ranges) {
for (let i = 0, pos = from;; i++) {
let last = i == changes.length, end = last ? to : changes[i].from;
if (end > pos)
result.push(new TreeFragment(pos, end, mount.tree, -startPos, frag.from >= pos, frag.to <= end));
result.push(new TreeFragment(pos, end, mount.tree, -startPos, frag.from >= pos || frag.openStart, frag.to <= end || frag.openEnd));
if (last)
break;
pos = changes[i].to;
}
}
else {
result.push(new TreeFragment(from, to, mount.tree, -startPos, frag.from >= startPos, frag.to <= endPos));
result.push(new TreeFragment(from, to, mount.tree, -startPos, frag.from >= startPos || frag.openStart, frag.to <= endPos || frag.openEnd));
}
}
return result;

File diff suppressed because it is too large Load diff

View file

@ -112,8 +112,8 @@ class NodeType {
/// Define a node type.
static define(spec) {
let props = spec.props && spec.props.length ? Object.create(null) : noProps;
let flags = (spec.top ? 1 /* Top */ : 0) | (spec.skipped ? 2 /* Skipped */ : 0) |
(spec.error ? 4 /* Error */ : 0) | (spec.name == null ? 8 /* Anonymous */ : 0);
let flags = (spec.top ? 1 /* NodeFlag.Top */ : 0) | (spec.skipped ? 2 /* NodeFlag.Skipped */ : 0) |
(spec.error ? 4 /* NodeFlag.Error */ : 0) | (spec.name == null ? 8 /* NodeFlag.Anonymous */ : 0);
let type = new NodeType(spec.name || "", props, spec.id, flags);
if (spec.props)
for (let src of spec.props) {
@ -131,14 +131,14 @@ class NodeType {
/// the prop isn't present on this node.
prop(prop) { return this.props[prop.id]; }
/// True when this is the top node of a grammar.
get isTop() { return (this.flags & 1 /* Top */) > 0; }
get isTop() { return (this.flags & 1 /* NodeFlag.Top */) > 0; }
/// True when this node is produced by a skip rule.
get isSkipped() { return (this.flags & 2 /* Skipped */) > 0; }
get isSkipped() { return (this.flags & 2 /* NodeFlag.Skipped */) > 0; }
/// Indicates whether this is an error node.
get isError() { return (this.flags & 4 /* Error */) > 0; }
get isError() { return (this.flags & 4 /* NodeFlag.Error */) > 0; }
/// When true, this node type doesn't correspond to a user-declared
/// named node, for example because it is used to cache repetition.
get isAnonymous() { return (this.flags & 8 /* Anonymous */) > 0; }
get isAnonymous() { return (this.flags & 8 /* NodeFlag.Anonymous */) > 0; }
/// Returns true when this node's name or one of its
/// [groups](#common.NodeProp^group) matches the given string.
is(name) {
@ -171,7 +171,7 @@ class NodeType {
}
}
/// An empty dummy node type to use when no actual type is available.
NodeType.none = new NodeType("", Object.create(null), 0, 8 /* Anonymous */);
NodeType.none = new NodeType("", Object.create(null), 0, 8 /* NodeFlag.Anonymous */);
/// A node set holds a collection of node types. It is used to
/// compactly represent trees by storing their type ids, rather than a
/// full pointer to the type object, in a numeric array. Each parser
@ -317,6 +317,10 @@ class Tree {
/// position. If 1, it'll move into nodes that start at the
/// position. With 0, it'll only enter nodes that cover the position
/// from both sides.
///
/// Note that this will not enter
/// [overlays](#common.MountedTree.overlay), and you often want
/// [`resolveInner`](#common.Tree.resolveInner) instead.
resolve(pos, side = 0) {
let node = resolveNode(CachedNode.get(this) || this.topNode, pos, side, false);
CachedNode.set(this, node);
@ -376,7 +380,7 @@ class Tree {
/// which may have children grouped into subtrees with type
/// [`NodeType.none`](#common.NodeType^none).
balance(config = {}) {
return this.children.length <= 8 /* BranchFactor */ ? this :
return this.children.length <= 8 /* Balance.BranchFactor */ ? this :
balanceRange(NodeType.none, this.children, this.positions, 0, this.children.length, 0, this.length, (children, positions, length) => new Tree(this.type, children, positions, length, this.propValues), config.makeTree || ((children, positions, length) => new Tree(NodeType.none, children, positions, length)));
}
/// Build a tree from a postfix-ordered buffer of node information,
@ -455,26 +459,27 @@ class TreeBuffer {
return pick;
}
/// @internal
slice(startI, endI, from, to) {
slice(startI, endI, from) {
let b = this.buffer;
let copy = new Uint16Array(endI - startI);
let copy = new Uint16Array(endI - startI), len = 0;
for (let i = startI, j = 0; i < endI;) {
copy[j++] = b[i++];
copy[j++] = b[i++] - from;
copy[j++] = b[i++] - from;
let to = copy[j++] = b[i++] - from;
copy[j++] = b[i++] - startI;
len = Math.max(len, to);
}
return new TreeBuffer(copy, to - from, this.set);
return new TreeBuffer(copy, len, this.set);
}
}
function checkSide(side, pos, from, to) {
switch (side) {
case -2 /* Before */: return from < pos;
case -1 /* AtOrBefore */: return to >= pos && from < pos;
case 0 /* Around */: return from < pos && to > pos;
case 1 /* AtOrAfter */: return from <= pos && to > pos;
case 2 /* After */: return to > pos;
case 4 /* DontCare */: return true;
case -2 /* Side.Before */: return from < pos;
case -1 /* Side.AtOrBefore */: return to >= pos && from < pos;
case 0 /* Side.Around */: return from < pos && to > pos;
case 1 /* Side.AtOrAfter */: return from <= pos && to > pos;
case 2 /* Side.After */: return to > pos;
case 4 /* Side.DontCare */: return true;
}
}
function enterUnfinishedNodesBefore(node, pos) {
@ -564,10 +569,10 @@ class TreeNode {
return null;
}
}
get firstChild() { return this.nextChild(0, 1, 0, 4 /* DontCare */); }
get lastChild() { return this.nextChild(this._tree.children.length - 1, -1, 0, 4 /* DontCare */); }
childAfter(pos) { return this.nextChild(0, 1, pos, 2 /* After */); }
childBefore(pos) { return this.nextChild(this._tree.children.length - 1, -1, pos, -2 /* Before */); }
get firstChild() { return this.nextChild(0, 1, 0, 4 /* Side.DontCare */); }
get lastChild() { return this.nextChild(this._tree.children.length - 1, -1, 0, 4 /* Side.DontCare */); }
childAfter(pos) { return this.nextChild(0, 1, pos, 2 /* Side.After */); }
childBefore(pos) { return this.nextChild(this._tree.children.length - 1, -1, pos, -2 /* Side.Before */); }
enter(pos, side, mode = 0) {
let mounted;
if (!(mode & IterMode.IgnoreOverlays) && (mounted = this._tree.prop(NodeProp.mounted)) && mounted.overlay) {
@ -590,10 +595,10 @@ class TreeNode {
return this._parent ? this._parent.nextSignificantParent() : null;
}
get nextSibling() {
return this._parent && this.index >= 0 ? this._parent.nextChild(this.index + 1, 1, 0, 4 /* DontCare */) : null;
return this._parent && this.index >= 0 ? this._parent.nextChild(this.index + 1, 1, 0, 4 /* Side.DontCare */) : null;
}
get prevSibling() {
return this._parent && this.index >= 0 ? this._parent.nextChild(this.index - 1, -1, 0, 4 /* DontCare */) : null;
return this._parent && this.index >= 0 ? this._parent.nextChild(this.index - 1, -1, 0, 4 /* Side.DontCare */) : null;
}
cursor(mode = 0) { return new TreeCursor(this, mode); }
get tree() { return this._tree; }
@ -655,24 +660,24 @@ class BufferContext {
}
}
class BufferNode {
get name() { return this.type.name; }
get from() { return this.context.start + this.context.buffer.buffer[this.index + 1]; }
get to() { return this.context.start + this.context.buffer.buffer[this.index + 2]; }
constructor(context, _parent, index) {
this.context = context;
this._parent = _parent;
this.index = index;
this.type = context.buffer.set.types[context.buffer.buffer[index]];
}
get name() { return this.type.name; }
get from() { return this.context.start + this.context.buffer.buffer[this.index + 1]; }
get to() { return this.context.start + this.context.buffer.buffer[this.index + 2]; }
child(dir, pos, side) {
let { buffer } = this.context;
let index = buffer.findChild(this.index + 4, buffer.buffer[this.index + 3], dir, pos - this.context.start, side);
return index < 0 ? null : new BufferNode(this.context, this, index);
}
get firstChild() { return this.child(1, 0, 4 /* DontCare */); }
get lastChild() { return this.child(-1, 0, 4 /* DontCare */); }
childAfter(pos) { return this.child(1, pos, 2 /* After */); }
childBefore(pos) { return this.child(-1, pos, -2 /* Before */); }
get firstChild() { return this.child(1, 0, 4 /* Side.DontCare */); }
get lastChild() { return this.child(-1, 0, 4 /* Side.DontCare */); }
childAfter(pos) { return this.child(1, pos, 2 /* Side.After */); }
childBefore(pos) { return this.child(-1, pos, -2 /* Side.Before */); }
enter(pos, side, mode = 0) {
if (mode & IterMode.ExcludeBuffers)
return null;
@ -684,7 +689,7 @@ class BufferNode {
return this._parent || this.context.parent.nextSignificantParent();
}
externalSibling(dir) {
return this._parent ? null : this.context.parent.nextChild(this.context.index + dir, dir, 0, 4 /* DontCare */);
return this._parent ? null : this.context.parent.nextChild(this.context.index + dir, dir, 0, 4 /* Side.DontCare */);
}
get nextSibling() {
let { buffer } = this.context;
@ -698,7 +703,7 @@ class BufferNode {
let parentStart = this._parent ? this._parent.index + 4 : 0;
if (this.index == parentStart)
return this.externalSibling(-1);
return new BufferNode(this.context, this._parent, buffer.findChild(parentStart, this.index, -1, 0, 4 /* DontCare */));
return new BufferNode(this.context, this._parent, buffer.findChild(parentStart, this.index, -1, 0, 4 /* Side.DontCare */));
}
cursor(mode = 0) { return new TreeCursor(this, mode); }
get tree() { return null; }
@ -707,8 +712,8 @@ class BufferNode {
let { buffer } = this.context;
let startI = this.index + 4, endI = buffer.buffer[this.index + 3];
if (endI > startI) {
let from = buffer.buffer[this.index + 1], to = buffer.buffer[this.index + 2];
children.push(buffer.slice(startI, endI, from, to));
let from = buffer.buffer[this.index + 1];
children.push(buffer.slice(startI, endI, from));
positions.push(0);
}
return new Tree(this.type, children, positions, this.to - this.from);
@ -735,6 +740,8 @@ class BufferNode {
/// A tree cursor object focuses on a given node in a syntax tree, and
/// allows you to move to adjacent nodes.
class TreeCursor {
/// Shorthand for `.type.name`.
get name() { return this.type.name; }
/// @internal
constructor(node,
/// @internal
@ -758,8 +765,6 @@ class TreeCursor {
this.yieldBuf(node.index);
}
}
/// Shorthand for `.type.name`.
get name() { return this.type.name; }
yieldNode(node) {
if (!node)
return false;
@ -804,13 +809,13 @@ class TreeCursor {
}
/// Move the cursor to this node's first child. When this returns
/// false, the node has no child, and the cursor has not been moved.
firstChild() { return this.enterChild(1, 0, 4 /* DontCare */); }
firstChild() { return this.enterChild(1, 0, 4 /* Side.DontCare */); }
/// Move the cursor to this node's last child.
lastChild() { return this.enterChild(-1, 0, 4 /* DontCare */); }
lastChild() { return this.enterChild(-1, 0, 4 /* Side.DontCare */); }
/// Move the cursor to the first child that ends after `pos`.
childAfter(pos) { return this.enterChild(1, pos, 2 /* After */); }
childAfter(pos) { return this.enterChild(1, pos, 2 /* Side.After */); }
/// Move to the last child that starts before `pos`.
childBefore(pos) { return this.enterChild(-1, pos, -2 /* Before */); }
childBefore(pos) { return this.enterChild(-1, pos, -2 /* Side.Before */); }
/// Move the cursor to the child around `pos`. If side is -1 the
/// child may end at that position, when 1 it may start there. This
/// will also enter [overlaid](#common.MountedTree.overlay)
@ -836,19 +841,19 @@ class TreeCursor {
if (!this.buffer)
return !this._tree._parent ? false
: this.yield(this._tree.index < 0 ? null
: this._tree._parent.nextChild(this._tree.index + dir, dir, 0, 4 /* DontCare */, this.mode));
: this._tree._parent.nextChild(this._tree.index + dir, dir, 0, 4 /* Side.DontCare */, this.mode));
let { buffer } = this.buffer, d = this.stack.length - 1;
if (dir < 0) {
let parentStart = d < 0 ? 0 : this.stack[d] + 4;
if (this.index != parentStart)
return this.yieldBuf(buffer.findChild(parentStart, this.index, -1, 0, 4 /* DontCare */));
return this.yieldBuf(buffer.findChild(parentStart, this.index, -1, 0, 4 /* Side.DontCare */));
}
else {
let after = buffer.buffer[this.index + 3];
if (after < (d < 0 ? buffer.buffer.length : buffer.buffer[this.stack[d] + 3]))
return this.yieldBuf(after);
}
return d < 0 ? this.yield(this.buffer.parent.nextChild(this.buffer.index + dir, dir, 0, 4 /* DontCare */, this.mode)) : false;
return d < 0 ? this.yield(this.buffer.parent.nextChild(this.buffer.index + dir, dir, 0, 4 /* Side.DontCare */, this.mode)) : false;
}
/// Move to this node's next sibling, if any.
nextSibling() { return this.sibling(1); }
@ -885,7 +890,7 @@ class TreeCursor {
return true;
}
move(dir, enter) {
if (enter && this.enterChild(dir, 0, 4 /* DontCare */))
if (enter && this.enterChild(dir, 0, 4 /* Side.DontCare */))
return true;
for (;;) {
if (this.sibling(dir))
@ -895,7 +900,7 @@ class TreeCursor {
}
}
/// Move to the next node in a
/// [pre-order](https://en.wikipedia.org/wiki/Tree_traversal#Pre-order_(NLR))
/// [pre-order](https://en.wikipedia.org/wiki/Tree_traversal#Pre-order,_NLR)
/// traversal, going from a node to its first child or, if the
/// current node is empty or `enter` is false, its next sibling or
/// the next sibling of the first parent node that has one.
@ -1011,17 +1016,17 @@ function buildTree(data) {
let lookAheadAtStart = lookAhead;
while (size < 0) {
cursor.next();
if (size == -1 /* Reuse */) {
if (size == -1 /* SpecialRecord.Reuse */) {
let node = reused[id];
children.push(node);
positions.push(start - parentStart);
return;
}
else if (size == -3 /* ContextChange */) { // Context change
else if (size == -3 /* SpecialRecord.ContextChange */) { // Context change
contextHash = id;
return;
}
else if (size == -4 /* LookAhead */) {
else if (size == -4 /* SpecialRecord.LookAhead */) {
lookAhead = id;
return;
}
@ -1138,7 +1143,7 @@ function buildTree(data) {
fork.next();
while (fork.pos > startPos) {
if (fork.size < 0) {
if (fork.size == -3 /* ContextChange */)
if (fork.size == -3 /* SpecialRecord.ContextChange */)
localSkipped += 4;
else
break scan;
@ -1174,10 +1179,10 @@ function buildTree(data) {
buffer[--index] = start - bufferStart;
buffer[--index] = id;
}
else if (size == -3 /* ContextChange */) {
else if (size == -3 /* SpecialRecord.ContextChange */) {
contextHash = id;
}
else if (size == -4 /* LookAhead */) {
else if (size == -4 /* SpecialRecord.LookAhead */) {
lookAhead = id;
}
return index;
@ -1224,7 +1229,7 @@ mkTree) {
let total = 0;
for (let i = from; i < to; i++)
total += nodeSize(balanceType, children[i]);
let maxChild = Math.ceil((total * 1.5) / 8 /* BranchFactor */);
let maxChild = Math.ceil((total * 1.5) / 8 /* Balance.BranchFactor */);
let localChildren = [], localPositions = [];
function divide(children, positions, from, to, offset) {
for (let i = from; i < to;) {
@ -1327,16 +1332,16 @@ class TreeFragment {
this.to = to;
this.tree = tree;
this.offset = offset;
this.open = (openStart ? 1 /* Start */ : 0) | (openEnd ? 2 /* End */ : 0);
this.open = (openStart ? 1 /* Open.Start */ : 0) | (openEnd ? 2 /* Open.End */ : 0);
}
/// Whether the start of the fragment represents the start of a
/// parse, or the end of a change. (In the second case, it may not
/// be safe to reuse some nodes at the start, depending on the
/// parsing algorithm.)
get openStart() { return (this.open & 1 /* Start */) > 0; }
get openStart() { return (this.open & 1 /* Open.Start */) > 0; }
/// Whether the end of the fragment represents the end of a
/// full-document parse, or the start of a change.
get openEnd() { return (this.open & 2 /* End */) > 0; }
get openEnd() { return (this.open & 2 /* Open.End */) > 0; }
/// Create a set of fragments from a freshly parsed tree, or update
/// an existing set of fragments by replacing the ones that overlap
/// with a tree with content from the new tree. When `partial` is
@ -1530,7 +1535,7 @@ class MixedParse {
enter = false;
}
else if (covered && (isCovered = checkCover(covered.ranges, cursor.from, cursor.to))) {
enter = isCovered != 2 /* Full */;
enter = isCovered != 2 /* Cover.Full */;
}
else if (!cursor.type.isAnonymous && cursor.from < cursor.to && (nest = this.nest(cursor, this.input))) {
if (!cursor.tree)
@ -1585,16 +1590,16 @@ function checkCover(covered, from, to) {
if (range.from >= to)
break;
if (range.to > from)
return range.from <= from && range.to >= to ? 2 /* Full */ : 1 /* Partial */;
return range.from <= from && range.to >= to ? 2 /* Cover.Full */ : 1 /* Cover.Partial */;
}
return 0 /* None */;
return 0 /* Cover.None */;
}
// Take a piece of buffer and convert it into a stand-alone
// TreeBuffer.
function sliceBuf(buf, startI, endI, nodes, positions, off) {
if (startI < endI) {
let from = buf.buffer[startI + 1], to = buf.buffer[endI - 2];
nodes.push(buf.slice(startI, endI, from, to));
let from = buf.buffer[startI + 1];
nodes.push(buf.slice(startI, endI, from));
positions.push(from - off);
}
}
@ -1799,14 +1804,14 @@ function enterFragments(mounts, ranges) {
for (let i = 0, pos = from;; i++) {
let last = i == changes.length, end = last ? to : changes[i].from;
if (end > pos)
result.push(new TreeFragment(pos, end, mount.tree, -startPos, frag.from >= pos, frag.to <= end));
result.push(new TreeFragment(pos, end, mount.tree, -startPos, frag.from >= pos || frag.openStart, frag.to <= end || frag.openEnd));
if (last)
break;
pos = changes[i].to;
}
}
else {
result.push(new TreeFragment(from, to, mount.tree, -startPos, frag.from >= startPos, frag.to <= endPos));
result.push(new TreeFragment(from, to, mount.tree, -startPos, frag.from >= startPos || frag.openStart, frag.to <= endPos || frag.openEnd));
}
}
return result;

View file

@ -42,7 +42,7 @@ export interface Input {
readonly lineChunks: boolean;
read(from: number, to: number): string;
}
export declare type ParseWrapper = (inner: PartialParse, input: Input, fragments: readonly TreeFragment[], ranges: readonly {
export type ParseWrapper = (inner: PartialParse, input: Input, fragments: readonly TreeFragment[], ranges: readonly {
from: number;
to: number;
}[]) => PartialParse;

View file

@ -34,7 +34,7 @@ export declare class MountedTree {
to: number;
}[] | null, parser: Parser);
}
export declare type NodePropSource = (type: NodeType) => null | [NodeProp<any>, any];
export type NodePropSource = (type: NodeType) => null | [NodeProp<any>, any];
export declare class NodeType {
readonly name: string;
readonly id: number;
@ -94,7 +94,7 @@ export declare class Tree {
}): Tree;
static build(data: BuildData): Tree;
}
declare type BuildData = {
type BuildData = {
buffer: BufferCursor | readonly number[];
nodeSet: NodeSet;
topID: number;

View file

@ -1,6 +1,6 @@
{
"name": "@lezer/common",
"version": "1.0.0",
"version": "1.0.2",
"description": "Syntax tree data structure and parser interfaces for the lezer parser",
"main": "dist/index.cjs",
"type": "module",
@ -17,7 +17,7 @@
"rollup": "^2.52.2",
"@rollup/plugin-commonjs": "^15.1.0",
"@rollup/plugin-node-resolve": "^9.0.0",
"rollup-plugin-typescript2": "^0.30.0",
"rollup-plugin-typescript2": "^0.34.1",
"typescript": "^4.3.4",
"@types/mocha": "^5.2.6",
"ts-node": "^10.0.0",

View file

@ -1,4 +1,4 @@
import { Tree, NodeType } from "@lezer/common";
import { Tree, NodeType, SyntaxNodeRef } from "@lezer/common";
export declare class Tag {
readonly set: Tag[];
static define(parent?: Tag): Tag;
@ -19,6 +19,11 @@ export declare function tagHighlighter(tags: readonly {
all?: string;
}): Highlighter;
export declare function highlightTree(tree: Tree, highlighter: Highlighter | readonly Highlighter[], putStyle: (from: number, to: number, classes: string) => void, from?: number, to?: number): void;
export declare function getStyleTags(node: SyntaxNodeRef): {
tags: readonly Tag[];
opaque: boolean;
inherit: boolean;
} | null;
export declare const tags: {
comment: Tag;
lineComment: Tag;

View file

@ -88,23 +88,25 @@ class Modifier {
let set = [], tag = new Tag(set, base, mods);
for (let m of mods)
m.instances.push(tag);
let configs = permute(mods);
let configs = powerSet(mods);
for (let parent of base.set)
for (let config of configs)
set.push(Modifier.get(parent, config));
if (!parent.modified.length)
for (let config of configs)
set.push(Modifier.get(parent, config));
return tag;
}
}
function sameArray(a, b) {
return a.length == b.length && a.every((x, i) => x == b[i]);
}
function permute(array) {
let result = [array];
function powerSet(array) {
let sets = [[]];
for (let i = 0; i < array.length; i++) {
for (let a of permute(array.slice(0, i).concat(array.slice(i + 1))))
result.push(a);
for (let j = 0, e = sets.length; j < e; j++) {
sets.push(sets[j].concat(array[i]));
}
}
return result;
return sets.sort((a, b) => b.length - a.length);
}
/// This function is used to add a set of tags to a language syntax
/// via [`NodeSet.extend`](#common.NodeSet.extend) or
@ -163,10 +165,10 @@ function styleTags(spec) {
tags = [tags];
for (let part of prop.split(" "))
if (part) {
let pieces = [], mode = 2 /* Normal */, rest = part;
let pieces = [], mode = 2 /* Mode.Normal */, rest = part;
for (let pos = 0;;) {
if (rest == "..." && pos > 0 && pos + 3 == part.length) {
mode = 1 /* Inherit */;
mode = 1 /* Mode.Inherit */;
break;
}
let m = /^"(?:[^"\\]|\\.)*?"|[^\/!]+/.exec(rest);
@ -178,7 +180,7 @@ function styleTags(spec) {
break;
let next = part[pos++];
if (pos == part.length && next == "!") {
mode = 0 /* Opaque */;
mode = 0 /* Mode.Opaque */;
break;
}
if (next != "/")
@ -202,6 +204,8 @@ class Rule {
this.context = context;
this.next = next;
}
get opaque() { return this.mode == 0 /* Mode.Opaque */; }
get inherit() { return this.mode == 1 /* Mode.Inherit */; }
sort(other) {
if (!other || other.depth < this.depth) {
this.next = other;
@ -212,6 +216,7 @@ class Rule {
}
get depth() { return this.context ? this.context.length : 0; }
}
Rule.empty = new Rule([], 2 /* Mode.Normal */, null);
/// Define a [highlighter](#highlight.Highlighter) from an array of
/// tag/class pairs. Classes associated with more specific tags will
/// take precedence.
@ -239,7 +244,7 @@ function tagHighlighter(tags, options) {
}
return cls;
},
scope: scope
scope
};
}
function highlightTags(highlighters, tags) {
@ -292,25 +297,17 @@ class HighlightBuilder {
if (type.isTop)
highlighters = this.highlighters.filter(h => !h.scope || h.scope(type));
let cls = inheritedClass;
let rule = type.prop(ruleNodeProp), opaque = false;
while (rule) {
if (!rule.context || cursor.matchContext(rule.context)) {
let tagCls = highlightTags(highlighters, rule.tags);
if (tagCls) {
if (cls)
cls += " ";
cls += tagCls;
if (rule.mode == 1 /* Inherit */)
inheritedClass += (inheritedClass ? " " : "") + tagCls;
else if (rule.mode == 0 /* Opaque */)
opaque = true;
}
break;
}
rule = rule.next;
let rule = getStyleTags(cursor) || Rule.empty;
let tagCls = highlightTags(highlighters, rule.tags);
if (tagCls) {
if (cls)
cls += " ";
cls += tagCls;
if (rule.mode == 1 /* Mode.Inherit */)
inheritedClass += (inheritedClass ? " " : "") + tagCls;
}
this.startSpan(cursor.from, cls);
if (opaque)
if (rule.opaque)
return;
let mounted = cursor.tree && cursor.tree.prop(common.NodeProp.mounted);
if (mounted && mounted.overlay) {
@ -324,7 +321,7 @@ class HighlightBuilder {
if (rangeFrom < rangeTo && hasChild) {
while (cursor.from < rangeTo) {
this.highlightRange(cursor, rangeFrom, rangeTo, inheritedClass, highlighters);
this.startSpan(Math.min(to, cursor.to), cls);
this.startSpan(Math.min(rangeTo, cursor.to), cls);
if (cursor.to >= nextPos || !cursor.nextSibling())
break;
}
@ -353,6 +350,15 @@ class HighlightBuilder {
}
}
}
/// Match a syntax node's [highlight rules](#highlight.styleTags). If
/// there's a match, return its set of tags, and whether it is
/// opaque (uses a `!`) or applies to all child nodes (`/...`).
function getStyleTags(node) {
let rule = node.type.prop(ruleNodeProp);
while (rule && rule.context && !node.matchContext(rule.context))
rule = rule.next;
return rule || null;
}
const t = Tag.define;
const comment = t(), name = t(), typeName = t(name), propertyName = t(name), literal = t(), string = t(literal), number = t(literal), content = t(), heading = t(content), keyword = t(), operator = t(), punctuation = t(), bracket = t(punctuation), meta = t();
/// The default set of highlighting [tags](#highlight.Tag).
@ -453,7 +459,7 @@ const tags = {
moduleKeyword: t(keyword),
/// An operator.
operator,
/// An [operator](#highlight.tags.operator) that defines something.
/// An [operator](#highlight.tags.operator) that dereferences something.
derefOperator: t(operator),
/// Arithmetic-related [operator](#highlight.tags.operator).
arithmeticOperator: t(operator),
@ -652,6 +658,7 @@ const classHighlighter = tagHighlighter([
exports.Tag = Tag;
exports.classHighlighter = classHighlighter;
exports.getStyleTags = getStyleTags;
exports.highlightTree = highlightTree;
exports.styleTags = styleTags;
exports.tagHighlighter = tagHighlighter;

View file

@ -1,649 +0,0 @@
import { NodeProp } from '@lezer/common';
let nextTagID = 0;
/// Highlighting tags are markers that denote a highlighting category.
/// They are [associated](#highlight.styleTags) with parts of a syntax
/// tree by a language mode, and then mapped to an actual CSS style by
/// a [highlighter](#highlight.Highlighter).
///
/// Because syntax tree node types and highlight styles have to be
/// able to talk the same language, CodeMirror uses a mostly _closed_
/// [vocabulary](#highlight.tags) of syntax tags (as opposed to
/// traditional open string-based systems, which make it hard for
/// highlighting themes to cover all the tokens produced by the
/// various languages).
///
/// It _is_ possible to [define](#highlight.Tag^define) your own
/// highlighting tags for system-internal use (where you control both
/// the language package and the highlighter), but such tags will not
/// be picked up by regular highlighters (though you can derive them
/// from standard tags to allow highlighters to fall back to those).
class Tag {
/// @internal
constructor(
/// The set of this tag and all its parent tags, starting with
/// this one itself and sorted in order of decreasing specificity.
set,
/// The base unmodified tag that this one is based on, if it's
/// modified @internal
base,
/// The modifiers applied to this.base @internal
modified) {
this.set = set;
this.base = base;
this.modified = modified;
/// @internal
this.id = nextTagID++;
}
/// Define a new tag. If `parent` is given, the tag is treated as a
/// sub-tag of that parent, and
/// [highlighters](#highlight.tagHighlighter) that don't mention
/// this tag will try to fall back to the parent tag (or grandparent
/// tag, etc).
static define(parent) {
if (parent === null || parent === void 0 ? void 0 : parent.base)
throw new Error("Can not derive from a modified tag");
let tag = new Tag([], null, []);
tag.set.push(tag);
if (parent)
for (let t of parent.set)
tag.set.push(t);
return tag;
}
/// Define a tag _modifier_, which is a function that, given a tag,
/// will return a tag that is a subtag of the original. Applying the
/// same modifier to a twice tag will return the same value (`m1(t1)
/// == m1(t1)`) and applying multiple modifiers will, regardless or
/// order, produce the same tag (`m1(m2(t1)) == m2(m1(t1))`).
///
/// When multiple modifiers are applied to a given base tag, each
/// smaller set of modifiers is registered as a parent, so that for
/// example `m1(m2(m3(t1)))` is a subtype of `m1(m2(t1))`,
/// `m1(m3(t1)`, and so on.
static defineModifier() {
let mod = new Modifier;
return (tag) => {
if (tag.modified.indexOf(mod) > -1)
return tag;
return Modifier.get(tag.base || tag, tag.modified.concat(mod).sort((a, b) => a.id - b.id));
};
}
}
let nextModifierID = 0;
class Modifier {
constructor() {
this.instances = [];
this.id = nextModifierID++;
}
static get(base, mods) {
if (!mods.length)
return base;
let exists = mods[0].instances.find(t => t.base == base && sameArray(mods, t.modified));
if (exists)
return exists;
let set = [], tag = new Tag(set, base, mods);
for (let m of mods)
m.instances.push(tag);
let configs = permute(mods);
for (let parent of base.set)
for (let config of configs)
set.push(Modifier.get(parent, config));
return tag;
}
}
function sameArray(a, b) {
return a.length == b.length && a.every((x, i) => x == b[i]);
}
function permute(array) {
let result = [array];
for (let i = 0; i < array.length; i++) {
for (let a of permute(array.slice(0, i).concat(array.slice(i + 1))))
result.push(a);
}
return result;
}
/// This function is used to add a set of tags to a language syntax
/// via [`NodeSet.extend`](#common.NodeSet.extend) or
/// [`LRParser.configure`](#lr.LRParser.configure).
///
/// The argument object maps node selectors to [highlighting
/// tags](#highlight.Tag) or arrays of tags.
///
/// Node selectors may hold one or more (space-separated) node paths.
/// Such a path can be a [node name](#common.NodeType.name), or
/// multiple node names (or `*` wildcards) separated by slash
/// characters, as in `"Block/Declaration/VariableName"`. Such a path
/// matches the final node but only if its direct parent nodes are the
/// other nodes mentioned. A `*` in such a path matches any parent,
/// but only a single level—wildcards that match multiple parents
/// aren't supported, both for efficiency reasons and because Lezer
/// trees make it rather hard to reason about what they would match.)
///
/// A path can be ended with `/...` to indicate that the tag assigned
/// to the node should also apply to all child nodes, even if they
/// match their own style (by default, only the innermost style is
/// used).
///
/// When a path ends in `!`, as in `Attribute!`, no further matching
/// happens for the node's child nodes, and the entire node gets the
/// given style.
///
/// In this notation, node names that contain `/`, `!`, `*`, or `...`
/// must be quoted as JSON strings.
///
/// For example:
///
/// ```javascript
/// parser.withProps(
/// styleTags({
/// // Style Number and BigNumber nodes
/// "Number BigNumber": tags.number,
/// // Style Escape nodes whose parent is String
/// "String/Escape": tags.escape,
/// // Style anything inside Attributes nodes
/// "Attributes!": tags.meta,
/// // Add a style to all content inside Italic nodes
/// "Italic/...": tags.emphasis,
/// // Style InvalidString nodes as both `string` and `invalid`
/// "InvalidString": [tags.string, tags.invalid],
/// // Style the node named "/" as punctuation
/// '"/"': tags.punctuation
/// })
/// )
/// ```
function styleTags(spec) {
let byName = Object.create(null);
for (let prop in spec) {
let tags = spec[prop];
if (!Array.isArray(tags))
tags = [tags];
for (let part of prop.split(" "))
if (part) {
let pieces = [], mode = 2 /* Normal */, rest = part;
for (let pos = 0;;) {
if (rest == "..." && pos > 0 && pos + 3 == part.length) {
mode = 1 /* Inherit */;
break;
}
let m = /^"(?:[^"\\]|\\.)*?"|[^\/!]+/.exec(rest);
if (!m)
throw new RangeError("Invalid path: " + part);
pieces.push(m[0] == "*" ? "" : m[0][0] == '"' ? JSON.parse(m[0]) : m[0]);
pos += m[0].length;
if (pos == part.length)
break;
let next = part[pos++];
if (pos == part.length && next == "!") {
mode = 0 /* Opaque */;
break;
}
if (next != "/")
throw new RangeError("Invalid path: " + part);
rest = part.slice(pos);
}
let last = pieces.length - 1, inner = pieces[last];
if (!inner)
throw new RangeError("Invalid path: " + part);
let rule = new Rule(tags, mode, last > 0 ? pieces.slice(0, last) : null);
byName[inner] = rule.sort(byName[inner]);
}
}
return ruleNodeProp.add(byName);
}
const ruleNodeProp = new NodeProp();
class Rule {
constructor(tags, mode, context, next) {
this.tags = tags;
this.mode = mode;
this.context = context;
this.next = next;
}
sort(other) {
if (!other || other.depth < this.depth) {
this.next = other;
return this;
}
other.next = this.sort(other.next);
return other;
}
get depth() { return this.context ? this.context.length : 0; }
}
/// Define a [highlighter](#highlight.Highlighter) from an array of
/// tag/class pairs. Classes associated with more specific tags will
/// take precedence.
function tagHighlighter(tags, options) {
let map = Object.create(null);
for (let style of tags) {
if (!Array.isArray(style.tag))
map[style.tag.id] = style.class;
else
for (let tag of style.tag)
map[tag.id] = style.class;
}
let { scope, all = null } = options || {};
return {
style: (tags) => {
let cls = all;
for (let tag of tags) {
for (let sub of tag.set) {
let tagClass = map[sub.id];
if (tagClass) {
cls = cls ? cls + " " + tagClass : tagClass;
break;
}
}
}
return cls;
},
scope: scope
};
}
function highlightTags(highlighters, tags) {
let result = null;
for (let highlighter of highlighters) {
let value = highlighter.style(tags);
if (value)
result = result ? result + " " + value : value;
}
return result;
}
/// Highlight the given [tree](#common.Tree) with the given
/// [highlighter](#highlight.Highlighter).
function highlightTree(tree, highlighter,
/// Assign styling to a region of the text. Will be called, in order
/// of position, for any ranges where more than zero classes apply.
/// `classes` is a space separated string of CSS classes.
putStyle,
/// The start of the range to highlight.
from = 0,
/// The end of the range.
to = tree.length) {
let builder = new HighlightBuilder(from, Array.isArray(highlighter) ? highlighter : [highlighter], putStyle);
builder.highlightRange(tree.cursor(), from, to, "", builder.highlighters);
builder.flush(to);
}
class HighlightBuilder {
constructor(at, highlighters, span) {
this.at = at;
this.highlighters = highlighters;
this.span = span;
this.class = "";
}
startSpan(at, cls) {
if (cls != this.class) {
this.flush(at);
if (at > this.at)
this.at = at;
this.class = cls;
}
}
flush(to) {
if (to > this.at && this.class)
this.span(this.at, to, this.class);
}
highlightRange(cursor, from, to, inheritedClass, highlighters) {
let { type, from: start, to: end } = cursor;
if (start >= to || end <= from)
return;
if (type.isTop)
highlighters = this.highlighters.filter(h => !h.scope || h.scope(type));
let cls = inheritedClass;
let rule = type.prop(ruleNodeProp), opaque = false;
while (rule) {
if (!rule.context || cursor.matchContext(rule.context)) {
let tagCls = highlightTags(highlighters, rule.tags);
if (tagCls) {
if (cls)
cls += " ";
cls += tagCls;
if (rule.mode == 1 /* Inherit */)
inheritedClass += (inheritedClass ? " " : "") + tagCls;
else if (rule.mode == 0 /* Opaque */)
opaque = true;
}
break;
}
rule = rule.next;
}
this.startSpan(cursor.from, cls);
if (opaque)
return;
let mounted = cursor.tree && cursor.tree.prop(NodeProp.mounted);
if (mounted && mounted.overlay) {
let inner = cursor.node.enter(mounted.overlay[0].from + start, 1);
let innerHighlighters = this.highlighters.filter(h => !h.scope || h.scope(mounted.tree.type));
let hasChild = cursor.firstChild();
for (let i = 0, pos = start;; i++) {
let next = i < mounted.overlay.length ? mounted.overlay[i] : null;
let nextPos = next ? next.from + start : end;
let rangeFrom = Math.max(from, pos), rangeTo = Math.min(to, nextPos);
if (rangeFrom < rangeTo && hasChild) {
while (cursor.from < rangeTo) {
this.highlightRange(cursor, rangeFrom, rangeTo, inheritedClass, highlighters);
this.startSpan(Math.min(to, cursor.to), cls);
if (cursor.to >= nextPos || !cursor.nextSibling())
break;
}
}
if (!next || nextPos > to)
break;
pos = next.to + start;
if (pos > from) {
this.highlightRange(inner.cursor(), Math.max(from, next.from + start), Math.min(to, pos), inheritedClass, innerHighlighters);
this.startSpan(pos, cls);
}
}
if (hasChild)
cursor.parent();
}
else if (cursor.firstChild()) {
do {
if (cursor.to <= from)
continue;
if (cursor.from >= to)
break;
this.highlightRange(cursor, from, to, inheritedClass, highlighters);
this.startSpan(Math.min(to, cursor.to), cls);
} while (cursor.nextSibling());
cursor.parent();
}
}
}
const t = Tag.define;
const comment = t(), name = t(), typeName = t(name), propertyName = t(name), literal = t(), string = t(literal), number = t(literal), content = t(), heading = t(content), keyword = t(), operator = t(), punctuation = t(), bracket = t(punctuation), meta = t();
/// The default set of highlighting [tags](#highlight.Tag).
///
/// This collection is heavily biased towards programming languages,
/// and necessarily incomplete. A full ontology of syntactic
/// constructs would fill a stack of books, and be impractical to
/// write themes for. So try to make do with this set. If all else
/// fails, [open an
/// issue](https://github.com/codemirror/codemirror.next) to propose a
/// new tag, or [define](#highlight.Tag^define) a local custom tag for
/// your use case.
///
/// Note that it is not obligatory to always attach the most specific
/// tag possible to an element—if your grammar can't easily
/// distinguish a certain type of element (such as a local variable),
/// it is okay to style it as its more general variant (a variable).
///
/// For tags that extend some parent tag, the documentation links to
/// the parent.
const tags = {
/// A comment.
comment,
/// A line [comment](#highlight.tags.comment).
lineComment: t(comment),
/// A block [comment](#highlight.tags.comment).
blockComment: t(comment),
/// A documentation [comment](#highlight.tags.comment).
docComment: t(comment),
/// Any kind of identifier.
name,
/// The [name](#highlight.tags.name) of a variable.
variableName: t(name),
/// A type [name](#highlight.tags.name).
typeName: typeName,
/// A tag name (subtag of [`typeName`](#highlight.tags.typeName)).
tagName: t(typeName),
/// A property or field [name](#highlight.tags.name).
propertyName: propertyName,
/// An attribute name (subtag of [`propertyName`](#highlight.tags.propertyName)).
attributeName: t(propertyName),
/// The [name](#highlight.tags.name) of a class.
className: t(name),
/// A label [name](#highlight.tags.name).
labelName: t(name),
/// A namespace [name](#highlight.tags.name).
namespace: t(name),
/// The [name](#highlight.tags.name) of a macro.
macroName: t(name),
/// A literal value.
literal,
/// A string [literal](#highlight.tags.literal).
string,
/// A documentation [string](#highlight.tags.string).
docString: t(string),
/// A character literal (subtag of [string](#highlight.tags.string)).
character: t(string),
/// An attribute value (subtag of [string](#highlight.tags.string)).
attributeValue: t(string),
/// A number [literal](#highlight.tags.literal).
number,
/// An integer [number](#highlight.tags.number) literal.
integer: t(number),
/// A floating-point [number](#highlight.tags.number) literal.
float: t(number),
/// A boolean [literal](#highlight.tags.literal).
bool: t(literal),
/// Regular expression [literal](#highlight.tags.literal).
regexp: t(literal),
/// An escape [literal](#highlight.tags.literal), for example a
/// backslash escape in a string.
escape: t(literal),
/// A color [literal](#highlight.tags.literal).
color: t(literal),
/// A URL [literal](#highlight.tags.literal).
url: t(literal),
/// A language keyword.
keyword,
/// The [keyword](#highlight.tags.keyword) for the self or this
/// object.
self: t(keyword),
/// The [keyword](#highlight.tags.keyword) for null.
null: t(keyword),
/// A [keyword](#highlight.tags.keyword) denoting some atomic value.
atom: t(keyword),
/// A [keyword](#highlight.tags.keyword) that represents a unit.
unit: t(keyword),
/// A modifier [keyword](#highlight.tags.keyword).
modifier: t(keyword),
/// A [keyword](#highlight.tags.keyword) that acts as an operator.
operatorKeyword: t(keyword),
/// A control-flow related [keyword](#highlight.tags.keyword).
controlKeyword: t(keyword),
/// A [keyword](#highlight.tags.keyword) that defines something.
definitionKeyword: t(keyword),
/// A [keyword](#highlight.tags.keyword) related to defining or
/// interfacing with modules.
moduleKeyword: t(keyword),
/// An operator.
operator,
/// An [operator](#highlight.tags.operator) that defines something.
derefOperator: t(operator),
/// Arithmetic-related [operator](#highlight.tags.operator).
arithmeticOperator: t(operator),
/// Logical [operator](#highlight.tags.operator).
logicOperator: t(operator),
/// Bit [operator](#highlight.tags.operator).
bitwiseOperator: t(operator),
/// Comparison [operator](#highlight.tags.operator).
compareOperator: t(operator),
/// [Operator](#highlight.tags.operator) that updates its operand.
updateOperator: t(operator),
/// [Operator](#highlight.tags.operator) that defines something.
definitionOperator: t(operator),
/// Type-related [operator](#highlight.tags.operator).
typeOperator: t(operator),
/// Control-flow [operator](#highlight.tags.operator).
controlOperator: t(operator),
/// Program or markup punctuation.
punctuation,
/// [Punctuation](#highlight.tags.punctuation) that separates
/// things.
separator: t(punctuation),
/// Bracket-style [punctuation](#highlight.tags.punctuation).
bracket,
/// Angle [brackets](#highlight.tags.bracket) (usually `<` and `>`
/// tokens).
angleBracket: t(bracket),
/// Square [brackets](#highlight.tags.bracket) (usually `[` and `]`
/// tokens).
squareBracket: t(bracket),
/// Parentheses (usually `(` and `)` tokens). Subtag of
/// [bracket](#highlight.tags.bracket).
paren: t(bracket),
/// Braces (usually `{` and `}` tokens). Subtag of
/// [bracket](#highlight.tags.bracket).
brace: t(bracket),
/// Content, for example plain text in XML or markup documents.
content,
/// [Content](#highlight.tags.content) that represents a heading.
heading,
/// A level 1 [heading](#highlight.tags.heading).
heading1: t(heading),
/// A level 2 [heading](#highlight.tags.heading).
heading2: t(heading),
/// A level 3 [heading](#highlight.tags.heading).
heading3: t(heading),
/// A level 4 [heading](#highlight.tags.heading).
heading4: t(heading),
/// A level 5 [heading](#highlight.tags.heading).
heading5: t(heading),
/// A level 6 [heading](#highlight.tags.heading).
heading6: t(heading),
/// A prose separator (such as a horizontal rule).
contentSeparator: t(content),
/// [Content](#highlight.tags.content) that represents a list.
list: t(content),
/// [Content](#highlight.tags.content) that represents a quote.
quote: t(content),
/// [Content](#highlight.tags.content) that is emphasized.
emphasis: t(content),
/// [Content](#highlight.tags.content) that is styled strong.
strong: t(content),
/// [Content](#highlight.tags.content) that is part of a link.
link: t(content),
/// [Content](#highlight.tags.content) that is styled as code or
/// monospace.
monospace: t(content),
/// [Content](#highlight.tags.content) that has a strike-through
/// style.
strikethrough: t(content),
/// Inserted text in a change-tracking format.
inserted: t(),
/// Deleted text.
deleted: t(),
/// Changed text.
changed: t(),
/// An invalid or unsyntactic element.
invalid: t(),
/// Metadata or meta-instruction.
meta,
/// [Metadata](#highlight.tags.meta) that applies to the entire
/// document.
documentMeta: t(meta),
/// [Metadata](#highlight.tags.meta) that annotates or adds
/// attributes to a given syntactic element.
annotation: t(meta),
/// Processing instruction or preprocessor directive. Subtag of
/// [meta](#highlight.tags.meta).
processingInstruction: t(meta),
/// [Modifier](#highlight.Tag^defineModifier) that indicates that a
/// given element is being defined. Expected to be used with the
/// various [name](#highlight.tags.name) tags.
definition: Tag.defineModifier(),
/// [Modifier](#highlight.Tag^defineModifier) that indicates that
/// something is constant. Mostly expected to be used with
/// [variable names](#highlight.tags.variableName).
constant: Tag.defineModifier(),
/// [Modifier](#highlight.Tag^defineModifier) used to indicate that
/// a [variable](#highlight.tags.variableName) or [property
/// name](#highlight.tags.propertyName) is being called or defined
/// as a function.
function: Tag.defineModifier(),
/// [Modifier](#highlight.Tag^defineModifier) that can be applied to
/// [names](#highlight.tags.name) to indicate that they belong to
/// the language's standard environment.
standard: Tag.defineModifier(),
/// [Modifier](#highlight.Tag^defineModifier) that indicates a given
/// [names](#highlight.tags.name) is local to some scope.
local: Tag.defineModifier(),
/// A generic variant [modifier](#highlight.Tag^defineModifier) that
/// can be used to tag language-specific alternative variants of
/// some common tag. It is recommended for themes to define special
/// forms of at least the [string](#highlight.tags.string) and
/// [variable name](#highlight.tags.variableName) tags, since those
/// come up a lot.
special: Tag.defineModifier()
};
/// This is a highlighter that adds stable, predictable classes to
/// tokens, for styling with external CSS.
///
/// The following tags are mapped to their name prefixed with `"tok-"`
/// (for example `"tok-comment"`):
///
/// * [`link`](#highlight.tags.link)
/// * [`heading`](#highlight.tags.heading)
/// * [`emphasis`](#highlight.tags.emphasis)
/// * [`strong`](#highlight.tags.strong)
/// * [`keyword`](#highlight.tags.keyword)
/// * [`atom`](#highlight.tags.atom)
/// * [`bool`](#highlight.tags.bool)
/// * [`url`](#highlight.tags.url)
/// * [`labelName`](#highlight.tags.labelName)
/// * [`inserted`](#highlight.tags.inserted)
/// * [`deleted`](#highlight.tags.deleted)
/// * [`literal`](#highlight.tags.literal)
/// * [`string`](#highlight.tags.string)
/// * [`number`](#highlight.tags.number)
/// * [`variableName`](#highlight.tags.variableName)
/// * [`typeName`](#highlight.tags.typeName)
/// * [`namespace`](#highlight.tags.namespace)
/// * [`className`](#highlight.tags.className)
/// * [`macroName`](#highlight.tags.macroName)
/// * [`propertyName`](#highlight.tags.propertyName)
/// * [`operator`](#highlight.tags.operator)
/// * [`comment`](#highlight.tags.comment)
/// * [`meta`](#highlight.tags.meta)
/// * [`punctuation`](#highlight.tags.punctuation)
/// * [`invalid`](#highlight.tags.invalid)
///
/// In addition, these mappings are provided:
///
/// * [`regexp`](#highlight.tags.regexp),
/// [`escape`](#highlight.tags.escape), and
/// [`special`](#highlight.tags.special)[`(string)`](#highlight.tags.string)
/// are mapped to `"tok-string2"`
/// * [`special`](#highlight.tags.special)[`(variableName)`](#highlight.tags.variableName)
/// to `"tok-variableName2"`
/// * [`local`](#highlight.tags.local)[`(variableName)`](#highlight.tags.variableName)
/// to `"tok-variableName tok-local"`
/// * [`definition`](#highlight.tags.definition)[`(variableName)`](#highlight.tags.variableName)
/// to `"tok-variableName tok-definition"`
/// * [`definition`](#highlight.tags.definition)[`(propertyName)`](#highlight.tags.propertyName)
/// to `"tok-propertyName tok-definition"`
const classHighlighter = tagHighlighter([
{ tag: tags.link, class: "tok-link" },
{ tag: tags.heading, class: "tok-heading" },
{ tag: tags.emphasis, class: "tok-emphasis" },
{ tag: tags.strong, class: "tok-strong" },
{ tag: tags.keyword, class: "tok-keyword" },
{ tag: tags.atom, class: "tok-atom" },
{ tag: tags.bool, class: "tok-bool" },
{ tag: tags.url, class: "tok-url" },
{ tag: tags.labelName, class: "tok-labelName" },
{ tag: tags.inserted, class: "tok-inserted" },
{ tag: tags.deleted, class: "tok-deleted" },
{ tag: tags.literal, class: "tok-literal" },
{ tag: tags.string, class: "tok-string" },
{ tag: tags.number, class: "tok-number" },
{ tag: [tags.regexp, tags.escape, tags.special(tags.string)], class: "tok-string2" },
{ tag: tags.variableName, class: "tok-variableName" },
{ tag: tags.local(tags.variableName), class: "tok-variableName tok-local" },
{ tag: tags.definition(tags.variableName), class: "tok-variableName tok-definition" },
{ tag: tags.special(tags.variableName), class: "tok-variableName2" },
{ tag: tags.definition(tags.propertyName), class: "tok-propertyName tok-definition" },
{ tag: tags.typeName, class: "tok-typeName" },
{ tag: tags.namespace, class: "tok-namespace" },
{ tag: tags.className, class: "tok-className" },
{ tag: tags.macroName, class: "tok-macroName" },
{ tag: tags.propertyName, class: "tok-propertyName" },
{ tag: tags.operator, class: "tok-operator" },
{ tag: tags.comment, class: "tok-comment" },
{ tag: tags.meta, class: "tok-meta" },
{ tag: tags.invalid, class: "tok-invalid" },
{ tag: tags.punctuation, class: "tok-punctuation" }
]);
export { Tag, classHighlighter, highlightTags, highlightTree, styleTags, tagHighlighter, tags };

View file

@ -84,23 +84,25 @@ class Modifier {
let set = [], tag = new Tag(set, base, mods);
for (let m of mods)
m.instances.push(tag);
let configs = permute(mods);
let configs = powerSet(mods);
for (let parent of base.set)
for (let config of configs)
set.push(Modifier.get(parent, config));
if (!parent.modified.length)
for (let config of configs)
set.push(Modifier.get(parent, config));
return tag;
}
}
function sameArray(a, b) {
return a.length == b.length && a.every((x, i) => x == b[i]);
}
function permute(array) {
let result = [array];
function powerSet(array) {
let sets = [[]];
for (let i = 0; i < array.length; i++) {
for (let a of permute(array.slice(0, i).concat(array.slice(i + 1))))
result.push(a);
for (let j = 0, e = sets.length; j < e; j++) {
sets.push(sets[j].concat(array[i]));
}
}
return result;
return sets.sort((a, b) => b.length - a.length);
}
/// This function is used to add a set of tags to a language syntax
/// via [`NodeSet.extend`](#common.NodeSet.extend) or
@ -159,10 +161,10 @@ function styleTags(spec) {
tags = [tags];
for (let part of prop.split(" "))
if (part) {
let pieces = [], mode = 2 /* Normal */, rest = part;
let pieces = [], mode = 2 /* Mode.Normal */, rest = part;
for (let pos = 0;;) {
if (rest == "..." && pos > 0 && pos + 3 == part.length) {
mode = 1 /* Inherit */;
mode = 1 /* Mode.Inherit */;
break;
}
let m = /^"(?:[^"\\]|\\.)*?"|[^\/!]+/.exec(rest);
@ -174,7 +176,7 @@ function styleTags(spec) {
break;
let next = part[pos++];
if (pos == part.length && next == "!") {
mode = 0 /* Opaque */;
mode = 0 /* Mode.Opaque */;
break;
}
if (next != "/")
@ -198,6 +200,8 @@ class Rule {
this.context = context;
this.next = next;
}
get opaque() { return this.mode == 0 /* Mode.Opaque */; }
get inherit() { return this.mode == 1 /* Mode.Inherit */; }
sort(other) {
if (!other || other.depth < this.depth) {
this.next = other;
@ -208,6 +212,7 @@ class Rule {
}
get depth() { return this.context ? this.context.length : 0; }
}
Rule.empty = new Rule([], 2 /* Mode.Normal */, null);
/// Define a [highlighter](#highlight.Highlighter) from an array of
/// tag/class pairs. Classes associated with more specific tags will
/// take precedence.
@ -235,7 +240,7 @@ function tagHighlighter(tags, options) {
}
return cls;
},
scope: scope
scope
};
}
function highlightTags(highlighters, tags) {
@ -288,25 +293,17 @@ class HighlightBuilder {
if (type.isTop)
highlighters = this.highlighters.filter(h => !h.scope || h.scope(type));
let cls = inheritedClass;
let rule = type.prop(ruleNodeProp), opaque = false;
while (rule) {
if (!rule.context || cursor.matchContext(rule.context)) {
let tagCls = highlightTags(highlighters, rule.tags);
if (tagCls) {
if (cls)
cls += " ";
cls += tagCls;
if (rule.mode == 1 /* Inherit */)
inheritedClass += (inheritedClass ? " " : "") + tagCls;
else if (rule.mode == 0 /* Opaque */)
opaque = true;
}
break;
}
rule = rule.next;
let rule = getStyleTags(cursor) || Rule.empty;
let tagCls = highlightTags(highlighters, rule.tags);
if (tagCls) {
if (cls)
cls += " ";
cls += tagCls;
if (rule.mode == 1 /* Mode.Inherit */)
inheritedClass += (inheritedClass ? " " : "") + tagCls;
}
this.startSpan(cursor.from, cls);
if (opaque)
if (rule.opaque)
return;
let mounted = cursor.tree && cursor.tree.prop(NodeProp.mounted);
if (mounted && mounted.overlay) {
@ -320,7 +317,7 @@ class HighlightBuilder {
if (rangeFrom < rangeTo && hasChild) {
while (cursor.from < rangeTo) {
this.highlightRange(cursor, rangeFrom, rangeTo, inheritedClass, highlighters);
this.startSpan(Math.min(to, cursor.to), cls);
this.startSpan(Math.min(rangeTo, cursor.to), cls);
if (cursor.to >= nextPos || !cursor.nextSibling())
break;
}
@ -349,6 +346,15 @@ class HighlightBuilder {
}
}
}
/// Match a syntax node's [highlight rules](#highlight.styleTags). If
/// there's a match, return its set of tags, and whether it is
/// opaque (uses a `!`) or applies to all child nodes (`/...`).
function getStyleTags(node) {
let rule = node.type.prop(ruleNodeProp);
while (rule && rule.context && !node.matchContext(rule.context))
rule = rule.next;
return rule || null;
}
const t = Tag.define;
const comment = t(), name = t(), typeName = t(name), propertyName = t(name), literal = t(), string = t(literal), number = t(literal), content = t(), heading = t(content), keyword = t(), operator = t(), punctuation = t(), bracket = t(punctuation), meta = t();
/// The default set of highlighting [tags](#highlight.Tag).
@ -449,7 +455,7 @@ const tags = {
moduleKeyword: t(keyword),
/// An operator.
operator,
/// An [operator](#highlight.tags.operator) that defines something.
/// An [operator](#highlight.tags.operator) that dereferences something.
derefOperator: t(operator),
/// Arithmetic-related [operator](#highlight.tags.operator).
arithmeticOperator: t(operator),
@ -646,4 +652,4 @@ const classHighlighter = tagHighlighter([
{ tag: tags.punctuation, class: "tok-punctuation" }
]);
export { Tag, classHighlighter, highlightTree, styleTags, tagHighlighter, tags };
export { Tag, classHighlighter, getStyleTags, highlightTree, styleTags, tagHighlighter, tags };

View file

@ -1,6 +1,6 @@
{
"name": "@lezer/highlight",
"version": "1.0.0",
"version": "1.1.3",
"description": "Highlighting system for Lezer parse trees",
"main": "dist/index.cjs",
"type": "module",
@ -16,7 +16,7 @@
"rollup": "^2.52.2",
"@rollup/plugin-commonjs": "^15.1.0",
"@rollup/plugin-node-resolve": "^9.0.0",
"rollup-plugin-typescript2": "^0.30.0",
"rollup-plugin-typescript2": "^0.34.1",
"typescript": "^4.3.4"
},
"dependencies": {

2
node_modules/@lezer/lr/LICENSE generated vendored
View file

@ -1,6 +1,6 @@
MIT License
Copyright (C) 2018 by Marijn Haverbeke <marijnh@gmail.com> and others
Copyright (C) 2018 by Marijn Haverbeke <marijn@haverbeke.berlin> and others
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

387
node_modules/@lezer/lr/dist/index.cjs generated vendored
View file

@ -89,7 +89,8 @@ class Stack {
// Apply a reduce action
/// @internal
reduce(action) {
let depth = action >> 19 /* ReduceDepthShift */, type = action & 65535 /* ValueMask */;
var _a;
let depth = action >> 19 /* Action.ReduceDepthShift */, type = action & 65535 /* Action.ValueMask */;
let { parser } = this.p;
let dPrec = parser.dynamicPrecedence(type);
if (dPrec)
@ -108,15 +109,29 @@ class Stack {
// consume two extra frames (the dummy parent node for the skipped
// expression and the state that we'll be staying in, which should
// be moved to `this.state`).
let base = this.stack.length - ((depth - 1) * 3) - (action & 262144 /* StayFlag */ ? 6 : 0);
let start = this.stack[base - 2];
let bufferBase = this.stack[base - 1], count = this.bufferBase + this.buffer.length - bufferBase;
let base = this.stack.length - ((depth - 1) * 3) - (action & 262144 /* Action.StayFlag */ ? 6 : 0);
let start = base ? this.stack[base - 2] : this.p.ranges[0].from, size = this.reducePos - start;
// This is a kludge to try and detect overly deep left-associative
// trees, which will not increase the parse stack depth and thus
// won't be caught by the regular stack-depth limit check.
if (size >= 2000 /* Recover.MinBigReduction */ && !((_a = this.p.parser.nodeSet.types[type]) === null || _a === void 0 ? void 0 : _a.isAnonymous)) {
if (start == this.p.lastBigReductionStart) {
this.p.bigReductionCount++;
this.p.lastBigReductionSize = size;
}
else if (this.p.lastBigReductionSize < size) {
this.p.bigReductionCount = 1;
this.p.lastBigReductionStart = start;
this.p.lastBigReductionSize = size;
}
}
let bufferBase = base ? this.stack[base - 1] : 0, count = this.bufferBase + this.buffer.length - bufferBase;
// Store normal terms or `R -> R R` repeat reductions
if (type < parser.minRepeatTerm || (action & 131072 /* RepeatFlag */)) {
let pos = parser.stateFlag(this.state, 1 /* Skipped */) ? this.pos : this.reducePos;
if (type < parser.minRepeatTerm || (action & 131072 /* Action.RepeatFlag */)) {
let pos = parser.stateFlag(this.state, 1 /* StateFlag.Skipped */) ? this.pos : this.reducePos;
this.storeNode(type, start, pos, count + 4, true);
}
if (action & 262144 /* StayFlag */) {
if (action & 262144 /* Action.StayFlag */) {
this.state = this.stack[base];
}
else {
@ -130,7 +145,7 @@ class Stack {
// Shift a value into the buffer
/// @internal
storeNode(term, start, end, size = 4, isReduce = false) {
if (term == 0 /* Err */ &&
if (term == 0 /* Term.Err */ &&
(!this.stack.length || this.stack[this.stack.length - 1] < this.buffer.length + this.bufferBase)) {
// Try to omit/merge adjacent error nodes
let cur = this, top = this.buffer.length;
@ -138,7 +153,7 @@ class Stack {
top = cur.bufferBase - cur.parent.bufferBase;
cur = cur.parent;
}
if (top > 0 && cur.buffer[top - 4] == 0 /* Err */ && cur.buffer[top - 1] > -1) {
if (top > 0 && cur.buffer[top - 4] == 0 /* Term.Err */ && cur.buffer[top - 1] > -1) {
if (start == end)
return;
if (cur.buffer[top - 2] >= start) {
@ -152,7 +167,7 @@ class Stack {
}
else { // There may be skipped nodes that have to be moved forward
let index = this.buffer.length;
if (index > 0 && this.buffer[index - 4] != 0 /* Err */)
if (index > 0 && this.buffer[index - 4] != 0 /* Term.Err */)
while (index > 0 && this.buffer[index - 2] > end) {
// Move this record forward
this.buffer[index] = this.buffer[index - 4];
@ -173,14 +188,14 @@ class Stack {
/// @internal
shift(action, next, nextEnd) {
let start = this.pos;
if (action & 131072 /* GotoFlag */) {
this.pushState(action & 65535 /* ValueMask */, this.pos);
if (action & 131072 /* Action.GotoFlag */) {
this.pushState(action & 65535 /* Action.ValueMask */, this.pos);
}
else if ((action & 262144 /* StayFlag */) == 0) { // Regular shift
else if ((action & 262144 /* Action.StayFlag */) == 0) { // Regular shift
let nextState = action, { parser } = this.p;
if (nextEnd > this.pos || next <= parser.maxNode) {
this.pos = nextEnd;
if (!parser.stateFlag(nextState, 1 /* Skipped */))
if (!parser.stateFlag(nextState, 1 /* StateFlag.Skipped */))
this.reducePos = nextEnd;
}
this.pushState(nextState, start);
@ -198,7 +213,7 @@ class Stack {
// Apply an action
/// @internal
apply(action, next, nextEnd) {
if (action & 65536 /* ReduceFlag */)
if (action & 65536 /* Action.ReduceFlag */)
this.reduce(action);
else
this.shift(action, next, nextEnd);
@ -243,9 +258,9 @@ class Stack {
let isNode = next <= this.p.parser.maxNode;
if (isNode)
this.storeNode(next, this.pos, nextEnd, 4);
this.storeNode(0 /* Err */, this.pos, nextEnd, isNode ? 8 : 4);
this.storeNode(0 /* Term.Err */, this.pos, nextEnd, isNode ? 8 : 4);
this.pos = this.reducePos = nextEnd;
this.score -= 190 /* Delete */;
this.score -= 190 /* Recover.Delete */;
}
/// Check if the given term would be able to be shifted (optionally
/// after some reductions) on this stack. This can be useful for
@ -253,11 +268,11 @@ class Stack {
/// given token when it applies.
canShift(term) {
for (let sim = new SimulatedStack(this);;) {
let action = this.p.parser.stateSlot(sim.state, 4 /* DefaultReduce */) || this.p.parser.hasAction(sim.state, term);
if ((action & 65536 /* ReduceFlag */) == 0)
return true;
let action = this.p.parser.stateSlot(sim.state, 4 /* ParseState.DefaultReduce */) || this.p.parser.hasAction(sim.state, term);
if (action == 0)
return false;
if ((action & 65536 /* Action.ReduceFlag */) == 0)
return true;
sim.reduce(action);
}
}
@ -265,17 +280,17 @@ class Stack {
// inserts some missing token or rule.
/// @internal
recoverByInsert(next) {
if (this.stack.length >= 300 /* MaxInsertStackDepth */)
if (this.stack.length >= 300 /* Recover.MaxInsertStackDepth */)
return [];
let nextStates = this.p.parser.nextStates(this.state);
if (nextStates.length > 4 /* MaxNext */ << 1 || this.stack.length >= 120 /* DampenInsertStackDepth */) {
if (nextStates.length > 4 /* Recover.MaxNext */ << 1 || this.stack.length >= 120 /* Recover.DampenInsertStackDepth */) {
let best = [];
for (let i = 0, s; i < nextStates.length; i += 2) {
if ((s = nextStates[i + 1]) != this.state && this.p.parser.hasAction(s, next))
best.push(nextStates[i], s);
}
if (this.stack.length < 120 /* DampenInsertStackDepth */)
for (let i = 0; best.length < 4 /* MaxNext */ << 1 && i < nextStates.length; i += 2) {
if (this.stack.length < 120 /* Recover.DampenInsertStackDepth */)
for (let i = 0; best.length < 4 /* Recover.MaxNext */ << 1 && i < nextStates.length; i += 2) {
let s = nextStates[i + 1];
if (!best.some((v, i) => (i & 1) && v == s))
best.push(nextStates[i], s);
@ -283,15 +298,15 @@ class Stack {
nextStates = best;
}
let result = [];
for (let i = 0; i < nextStates.length && result.length < 4 /* MaxNext */; i += 2) {
for (let i = 0; i < nextStates.length && result.length < 4 /* Recover.MaxNext */; i += 2) {
let s = nextStates[i + 1];
if (s == this.state)
continue;
let stack = this.split();
stack.pushState(s, this.pos);
stack.storeNode(0 /* Err */, stack.pos, stack.pos, 4, true);
stack.storeNode(0 /* Term.Err */, stack.pos, stack.pos, 4, true);
stack.shiftContext(nextStates[i], this.pos);
stack.score -= 200 /* Insert */;
stack.score -= 200 /* Recover.Insert */;
result.push(stack);
}
return result;
@ -300,17 +315,17 @@ class Stack {
// be done.
/// @internal
forceReduce() {
let reduce = this.p.parser.stateSlot(this.state, 5 /* ForcedReduce */);
if ((reduce & 65536 /* ReduceFlag */) == 0)
let reduce = this.p.parser.stateSlot(this.state, 5 /* ParseState.ForcedReduce */);
if ((reduce & 65536 /* Action.ReduceFlag */) == 0)
return false;
let { parser } = this.p;
if (!parser.validAction(this.state, reduce)) {
let depth = reduce >> 19 /* ReduceDepthShift */, term = reduce & 65535 /* ValueMask */;
let depth = reduce >> 19 /* Action.ReduceDepthShift */, term = reduce & 65535 /* Action.ValueMask */;
let target = this.stack.length - depth * 3;
if (target < 0 || parser.getGoto(this.stack[target], term, false) < 0)
return false;
this.storeNode(0 /* Err */, this.reducePos, this.reducePos, 4, true);
this.score -= 100 /* Reduce */;
this.storeNode(0 /* Term.Err */, this.reducePos, this.reducePos, 4, true);
this.score -= 100 /* Recover.Reduce */;
}
this.reducePos = this.pos;
this.reduce(reduce);
@ -318,9 +333,9 @@ class Stack {
}
/// @internal
forceAll() {
while (!this.p.parser.stateFlag(this.state, 2 /* Accepting */)) {
while (!this.p.parser.stateFlag(this.state, 2 /* StateFlag.Accepting */)) {
if (!this.forceReduce()) {
this.storeNode(0 /* Err */, this.pos, this.pos, 4, true);
this.storeNode(0 /* Term.Err */, this.pos, this.pos, 4, true);
break;
}
}
@ -333,8 +348,8 @@ class Stack {
if (this.stack.length != 3)
return false;
let { parser } = this.p;
return parser.data[parser.stateSlot(this.state, 1 /* Actions */)] == 65535 /* End */ &&
!parser.stateSlot(this.state, 4 /* DefaultReduce */);
return parser.data[parser.stateSlot(this.state, 1 /* ParseState.Actions */)] == 65535 /* Seq.End */ &&
!parser.stateSlot(this.state, 4 /* ParseState.DefaultReduce */);
}
/// Restart the stack (put it back in its start state). Only safe
/// when this.stack.length == 3 (state is directly below the top
@ -415,6 +430,7 @@ var Recover;
Recover[Recover["MaxNext"] = 4] = "MaxNext";
Recover[Recover["MaxInsertStackDepth"] = 300] = "MaxInsertStackDepth";
Recover[Recover["DampenInsertStackDepth"] = 120] = "DampenInsertStackDepth";
Recover[Recover["MinBigReduction"] = 2000] = "MinBigReduction";
})(Recover || (Recover = {}));
// Used to cheaply run some reductions to scan ahead without mutating
// an entire stack
@ -426,7 +442,7 @@ class SimulatedStack {
this.base = this.stack.length;
}
reduce(action) {
let term = action & 65535 /* ValueMask */, depth = action >> 19 /* ReduceDepthShift */;
let term = action & 65535 /* Action.ValueMask */, depth = action >> 19 /* Action.ReduceDepthShift */;
if (depth == 0) {
if (this.stack == this.start.stack)
this.stack = this.stack.slice();
@ -477,6 +493,42 @@ class StackBufferCursor {
}
}
// See lezer-generator/src/encode.ts for comments about the encoding
// used here
function decodeArray(input, Type = Uint16Array) {
if (typeof input != "string")
return input;
let array = null;
for (let pos = 0, out = 0; pos < input.length;) {
let value = 0;
for (;;) {
let next = input.charCodeAt(pos++), stop = false;
if (next == 126 /* Encode.BigValCode */) {
value = 65535 /* Encode.BigVal */;
break;
}
if (next >= 92 /* Encode.Gap2 */)
next--;
if (next >= 34 /* Encode.Gap1 */)
next--;
let digit = next - 32 /* Encode.Start */;
if (digit >= 46 /* Encode.Base */) {
digit -= 46 /* Encode.Base */;
stop = true;
}
value += digit;
if (stop)
break;
value *= 46 /* Encode.Base */;
}
if (array)
array[out++] = value;
else
array = new Type(value);
}
return array;
}
class CachedToken {
constructor() {
this.start = -1;
@ -540,6 +592,15 @@ class InputStream {
}
return pos;
}
/// @internal
clipPos(pos) {
if (pos >= this.range.from && pos < this.range.to)
return pos;
for (let range of this.ranges)
if (range.to > pos)
return Math.max(pos, range.from);
return this.end;
}
/// Look at a code unit near the stream position. `.peek(0)` equals
/// `.next`, `.peek(-1)` gives you the previous character, and so
/// on.
@ -692,9 +753,40 @@ class TokenGroup {
this.data = data;
this.id = id;
}
token(input, stack) { readToken(this.data, input, stack, this.id); }
token(input, stack) {
let { parser } = stack.p;
readToken(this.data, input, stack, this.id, parser.data, parser.tokenPrecTable);
}
}
TokenGroup.prototype.contextual = TokenGroup.prototype.fallback = TokenGroup.prototype.extend = false;
/// @hide
class LocalTokenGroup {
constructor(data, precTable, elseToken) {
this.precTable = precTable;
this.elseToken = elseToken;
this.data = typeof data == "string" ? decodeArray(data) : data;
}
token(input, stack) {
let start = input.pos, cur;
for (;;) {
cur = input.pos;
readToken(this.data, input, stack, 0, this.data, this.precTable);
if (input.token.value > -1)
break;
if (this.elseToken == null)
return;
if (input.next < 0)
break;
input.advance();
input.reset(cur + 1, input.token);
}
if (cur > start) {
input.reset(start, input.token);
input.acceptToken(this.elseToken, cur - start);
}
}
}
LocalTokenGroup.prototype.contextual = TokenGroup.prototype.fallback = TokenGroup.prototype.extend = false;
/// `@external tokens` declarations in the grammar should resolve to
/// an instance of this class.
class ExternalTokenizer {
@ -731,9 +823,9 @@ class ExternalTokenizer {
//
// This function interprets that data, running through a stream as
// long as new states with the a matching group mask can be reached,
// and updating `token` when it matches a token.
function readToken(data, input, stack, group) {
let state = 0, groupMask = 1 << group, { parser } = stack.p, { dialect } = parser;
// and updating `input.token` when it matches a token.
function readToken(data, input, stack, group, precTable, precOffset) {
let state = 0, groupMask = 1 << group, { dialect } = stack.p.parser;
scan: for (;;) {
if ((groupMask & data[state]) == 0)
break;
@ -745,16 +837,23 @@ function readToken(data, input, stack, group) {
if ((data[i + 1] & groupMask) > 0) {
let term = data[i];
if (dialect.allows(term) &&
(input.token.value == -1 || input.token.value == term || parser.overrides(term, input.token.value))) {
(input.token.value == -1 || input.token.value == term ||
overrides(term, input.token.value, precTable, precOffset))) {
input.acceptToken(term);
break;
}
}
let next = input.next, low = 0, high = data[state + 2];
// Special case for EOF
if (input.next < 0 && high > low && data[accEnd + high * 3 - 3] == 65535 /* Seq.End */ && data[accEnd + high * 3 - 3] == 65535 /* Seq.End */) {
state = data[accEnd + high * 3 - 1];
continue scan;
}
// Do a binary search on the state's edges
for (let next = input.next, low = 0, high = data[state + 2]; low < high;) {
for (; low < high;) {
let mid = (low + high) >> 1;
let index = accEnd + mid + (mid << 1);
let from = data[index], to = data[index + 1];
let from = data[index], to = data[index + 1] || 0x10000;
if (next < from)
high = mid;
else if (next >= to)
@ -768,41 +867,15 @@ function readToken(data, input, stack, group) {
break;
}
}
// See lezer-generator/src/encode.ts for comments about the encoding
// used here
function decodeArray(input, Type = Uint16Array) {
if (typeof input != "string")
return input;
let array = null;
for (let pos = 0, out = 0; pos < input.length;) {
let value = 0;
for (;;) {
let next = input.charCodeAt(pos++), stop = false;
if (next == 126 /* BigValCode */) {
value = 65535 /* BigVal */;
break;
}
if (next >= 92 /* Gap2 */)
next--;
if (next >= 34 /* Gap1 */)
next--;
let digit = next - 32 /* Start */;
if (digit >= 46 /* Base */) {
digit -= 46 /* Base */;
stop = true;
}
value += digit;
if (stop)
break;
value *= 46 /* Base */;
}
if (array)
array[out++] = value;
else
array = new Type(value);
}
return array;
function findOffset(data, start, term) {
for (let i = start, next; (next = data[i]) != 65535 /* Seq.End */; i++)
if (next == term)
return i - start;
return -1;
}
function overrides(token, prev, tableData, tableOffset) {
let iPrev = findOffset(tableData, tableOffset, prev);
return iPrev < 0 || findOffset(tableData, tableOffset, token) < iPrev;
}
// Environment variable used to control console output
@ -819,8 +892,8 @@ function cutAt(tree, pos, side) {
if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos)))
for (;;) {
if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError)
return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Margin */))
: Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Margin */));
return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Safety.Margin */))
: Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Safety.Margin */));
if (side < 0 ? cursor.prevSibling() : cursor.nextSibling())
break;
if (!cursor.parent())
@ -924,7 +997,7 @@ class TokenCache {
let actionIndex = 0;
let main = null;
let { parser } = stack.p, { tokenizers } = parser;
let mask = parser.stateSlot(stack.state, 3 /* TokenizerMask */);
let mask = parser.stateSlot(stack.state, 3 /* ParseState.TokenizerMask */);
let context = stack.curContext ? stack.curContext.hash : 0;
let lookAhead = 0;
for (let i = 0; i < tokenizers.length; i++) {
@ -938,9 +1011,9 @@ class TokenCache {
token.mask = mask;
token.context = context;
}
if (token.lookAhead > token.end + 25 /* Margin */)
if (token.lookAhead > token.end + 25 /* Safety.Margin */)
lookAhead = Math.max(token.lookAhead, lookAhead);
if (token.value != 0 /* Err */) {
if (token.value != 0 /* Term.Err */) {
let startIndex = actionIndex;
if (token.extended > -1)
actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
@ -971,18 +1044,19 @@ class TokenCache {
let main = new CachedToken, { pos, p } = stack;
main.start = pos;
main.end = Math.min(pos + 1, p.stream.end);
main.value = pos == p.stream.end ? p.parser.eofTerm : 0 /* Err */;
main.value = pos == p.stream.end ? p.parser.eofTerm : 0 /* Term.Err */;
return main;
}
updateCachedToken(token, tokenizer, stack) {
tokenizer.token(this.stream.reset(stack.pos, token), stack);
let start = this.stream.clipPos(stack.pos);
tokenizer.token(this.stream.reset(start, token), stack);
if (token.value > -1) {
let { parser } = stack.p;
for (let i = 0; i < parser.specialized.length; i++)
if (parser.specialized[i] == token.value) {
let result = parser.specializers[i](this.stream.read(token.start, token.end), stack);
if (result >= 0 && stack.p.parser.dialect.allows(result >> 1)) {
if ((result & 1) == 0 /* Specialize */)
if ((result & 1) == 0 /* Specialize.Specialize */)
token.value = result >> 1;
else
token.extended = result >> 1;
@ -991,8 +1065,8 @@ class TokenCache {
}
}
else {
token.value = 0 /* Err */;
token.end = Math.min(stack.p.stream.end, stack.pos + 1);
token.value = 0 /* Term.Err */;
token.end = this.stream.clipPos(start + 1);
}
}
putAction(action, token, end, index) {
@ -1008,13 +1082,13 @@ class TokenCache {
addActions(stack, token, end, index) {
let { state } = stack, { parser } = stack.p, { data } = parser;
for (let set = 0; set < 2; set++) {
for (let i = parser.stateSlot(state, set ? 2 /* Skip */ : 1 /* Actions */);; i += 3) {
if (data[i] == 65535 /* End */) {
if (data[i + 1] == 1 /* Next */) {
for (let i = parser.stateSlot(state, set ? 2 /* ParseState.Skip */ : 1 /* ParseState.Actions */);; i += 3) {
if (data[i] == 65535 /* Seq.End */) {
if (data[i + 1] == 1 /* Seq.Next */) {
i = pair(data, i + 2);
}
else {
if (index == 0 && data[i + 1] == 2 /* Other */)
if (index == 0 && data[i + 1] == 2 /* Seq.Other */)
index = this.putAction(pair(data, i + 2), token, end, index);
break;
}
@ -1039,6 +1113,11 @@ var Rec;
// on recursive traversal.
Rec[Rec["CutDepth"] = 15000] = "CutDepth";
Rec[Rec["CutTo"] = 9000] = "CutTo";
Rec[Rec["MaxLeftAssociativeReductionCount"] = 300] = "MaxLeftAssociativeReductionCount";
// The maximum number of non-recovering stacks to explore (to avoid
// getting bogged down with exponentially multiplying stacks in
// ambiguous content)
Rec[Rec["MaxStackCount"] = 12] = "MaxStackCount";
})(Rec || (Rec = {}));
class Parse {
constructor(parser, input, fragments, ranges) {
@ -1050,6 +1129,9 @@ class Parse {
this.minStackPos = 0;
this.reused = [];
this.stoppedAt = null;
this.lastBigReductionStart = -1;
this.lastBigReductionSize = 0;
this.bigReductionCount = 0;
this.stream = new InputStream(input, ranges);
this.tokens = new TokenCache(parser, this.stream);
this.topTerm = parser.top[1];
@ -1072,6 +1154,18 @@ class Parse {
// This will hold stacks beyond `pos`.
let newStacks = this.stacks = [];
let stopped, stoppedTokens;
// If a large amount of reductions happened with the same start
// position, force the stack out of that production in order to
// avoid creating a tree too deep to recurse through.
// (This is an ugly kludge, because unfortunately there is no
// straightforward, cheap way to check for this happening, due to
// the history of reductions only being available in an
// expensive-to-access format in the stack buffers.)
if (this.bigReductionCount > 300 /* Rec.MaxLeftAssociativeReductionCount */ && stacks.length == 1) {
let [s] = stacks;
while (s.forceReduce() && s.stack.length && s.stack[s.stack.length - 2] >= this.lastBigReductionStart) { }
this.bigReductionCount = this.lastBigReductionSize = 0;
}
// Keep advancing any stacks at `pos` until they either move
// forward or can't be advanced. Gather stacks that can't be
// advanced further in `stopped`.
@ -1107,7 +1201,7 @@ class Parse {
throw new SyntaxError("No parse at " + pos);
}
if (!this.recovering)
this.recovering = 5 /* Distance */;
this.recovering = 5 /* Rec.Distance */;
}
if (this.recovering && stopped) {
let finished = this.stoppedAt != null && stopped[0].pos > this.stoppedAt ? stopped[0]
@ -1116,7 +1210,7 @@ class Parse {
return this.stackToTree(finished.forceAll());
}
if (this.recovering) {
let maxRemaining = this.recovering == 1 ? 1 : this.recovering * 3 /* MaxRemainingPerStep */;
let maxRemaining = this.recovering == 1 ? 1 : this.recovering * 3 /* Rec.MaxRemainingPerStep */;
if (newStacks.length > maxRemaining) {
newStacks.sort((a, b) => b.score - a.score);
while (newStacks.length > maxRemaining)
@ -1134,7 +1228,7 @@ class Parse {
for (let j = i + 1; j < newStacks.length; j++) {
let other = newStacks[j];
if (stack.sameState(other) ||
stack.buffer.length > 500 /* MinBufferLengthPrune */ && other.buffer.length > 500 /* MinBufferLengthPrune */) {
stack.buffer.length > 500 /* Rec.MinBufferLengthPrune */ && other.buffer.length > 500 /* Rec.MinBufferLengthPrune */) {
if (((stack.score - other.score) || (stack.buffer.length - other.buffer.length)) > 0) {
newStacks.splice(j--, 1);
}
@ -1145,6 +1239,8 @@ class Parse {
}
}
}
if (newStacks.length > 12 /* Rec.MaxStackCount */)
newStacks.splice(12 /* Rec.MaxStackCount */, newStacks.length - 12 /* Rec.MaxStackCount */);
}
this.minStackPos = newStacks[0].pos;
for (let i = 1; i < newStacks.length; i++)
@ -1185,15 +1281,15 @@ class Parse {
break;
}
}
let defaultReduce = parser.stateSlot(stack.state, 4 /* DefaultReduce */);
let defaultReduce = parser.stateSlot(stack.state, 4 /* ParseState.DefaultReduce */);
if (defaultReduce > 0) {
stack.reduce(defaultReduce);
if (verbose)
console.log(base + this.stackID(stack) + ` (via always-reduce ${parser.getName(defaultReduce & 65535 /* ValueMask */)})`);
console.log(base + this.stackID(stack) + ` (via always-reduce ${parser.getName(defaultReduce & 65535 /* Action.ValueMask */)})`);
return true;
}
if (stack.stack.length >= 15000 /* CutDepth */) {
while (stack.stack.length > 9000 /* CutTo */ && stack.forceReduce()) { }
if (stack.stack.length >= 15000 /* Rec.CutDepth */) {
while (stack.stack.length > 9000 /* Rec.CutTo */ && stack.forceReduce()) { }
}
let actions = this.tokens.getActions(stack);
for (let i = 0; i < actions.length;) {
@ -1202,8 +1298,8 @@ class Parse {
let localStack = last ? stack : stack.split();
localStack.apply(action, term, end);
if (verbose)
console.log(base + this.stackID(localStack) + ` (via ${(action & 65536 /* ReduceFlag */) == 0 ? "shift"
: `reduce of ${parser.getName(action & 65535 /* ValueMask */)}`} for ${parser.getName(term)} @ ${start}${localStack == stack ? "" : ", split"})`);
console.log(base + this.stackID(localStack) + ` (via ${(action & 65536 /* Action.ReduceFlag */) == 0 ? "shift"
: `reduce of ${parser.getName(action & 65535 /* Action.ValueMask */)}`} for ${parser.getName(term)} @ ${start}${localStack == stack ? "" : ", split"})`);
if (last)
return true;
else if (localStack.pos > start)
@ -1244,7 +1340,7 @@ class Parse {
continue;
}
let force = stack.split(), forceBase = base;
for (let j = 0; force.forceReduce() && j < 10 /* ForceReduceLimit */; j++) {
for (let j = 0; force.forceReduce() && j < 10 /* Rec.ForceReduceLimit */; j++) {
if (verbose)
console.log(forceBase + this.stackID(force) + " (via force-reduce)");
let done = this.advanceFully(force, newStacks);
@ -1261,7 +1357,7 @@ class Parse {
if (this.stream.end > stack.pos) {
if (tokenEnd == stack.pos) {
tokenEnd++;
token = 0 /* Err */;
token = 0 /* Term.Err */;
}
stack.recoverByDelete(token, tokenEnd);
if (verbose)
@ -1343,8 +1439,8 @@ class LRParser extends common.Parser {
super();
/// @internal
this.wrappers = [];
if (spec.version != 14 /* Version */)
throw new RangeError(`Parser version (${spec.version}) doesn't match runtime version (${14 /* Version */})`);
if (spec.version != 14 /* File.Version */)
throw new RangeError(`Parser version (${spec.version}) doesn't match runtime version (${14 /* File.Version */})`);
let nodeNames = spec.nodeNames.split(" ");
this.minRepeatTerm = nodeNames.length;
for (let i = 0; i < spec.repeatNodeCount; i++)
@ -1388,13 +1484,11 @@ class LRParser extends common.Parser {
this.bufferLength = common.DefaultBufferLength;
let tokenArray = decodeArray(spec.tokenData);
this.context = spec.context;
this.specialized = new Uint16Array(spec.specialized ? spec.specialized.length : 0);
this.specializers = [];
if (spec.specialized)
for (let i = 0; i < spec.specialized.length; i++) {
this.specialized[i] = spec.specialized[i].term;
this.specializers[i] = spec.specialized[i].get;
}
this.specializerSpecs = spec.specialized || [];
this.specialized = new Uint16Array(this.specializerSpecs.length);
for (let i = 0; i < this.specializerSpecs.length; i++)
this.specialized[i] = this.specializerSpecs[i].term;
this.specializers = this.specializerSpecs.map(getSpecializer);
this.states = decodeArray(spec.states, Uint32Array);
this.data = decodeArray(spec.stateData);
this.goto = decodeArray(spec.goto);
@ -1436,16 +1530,16 @@ class LRParser extends common.Parser {
hasAction(state, terminal) {
let data = this.data;
for (let set = 0; set < 2; set++) {
for (let i = this.stateSlot(state, set ? 2 /* Skip */ : 1 /* Actions */), next;; i += 3) {
if ((next = data[i]) == 65535 /* End */) {
if (data[i + 1] == 1 /* Next */)
for (let i = this.stateSlot(state, set ? 2 /* ParseState.Skip */ : 1 /* ParseState.Actions */), next;; i += 3) {
if ((next = data[i]) == 65535 /* Seq.End */) {
if (data[i + 1] == 1 /* Seq.Next */)
next = data[i = pair(data, i + 2)];
else if (data[i + 1] == 2 /* Other */)
else if (data[i + 1] == 2 /* Seq.Other */)
return pair(data, i + 2);
else
break;
}
if (next == terminal || next == 0 /* Err */)
if (next == terminal || next == 0 /* Term.Err */)
return pair(data, i + 1);
}
}
@ -1453,19 +1547,19 @@ class LRParser extends common.Parser {
}
/// @internal
stateSlot(state, slot) {
return this.states[(state * 6 /* Size */) + slot];
return this.states[(state * 6 /* ParseState.Size */) + slot];
}
/// @internal
stateFlag(state, flag) {
return (this.stateSlot(state, 0 /* Flags */) & flag) > 0;
return (this.stateSlot(state, 0 /* ParseState.Flags */) & flag) > 0;
}
/// @internal
validAction(state, action) {
if (action == this.stateSlot(state, 4 /* DefaultReduce */))
if (action == this.stateSlot(state, 4 /* ParseState.DefaultReduce */))
return true;
for (let i = this.stateSlot(state, 1 /* Actions */);; i += 3) {
if (this.data[i] == 65535 /* End */) {
if (this.data[i + 1] == 1 /* Next */)
for (let i = this.stateSlot(state, 1 /* ParseState.Actions */);; i += 3) {
if (this.data[i] == 65535 /* Seq.End */) {
if (this.data[i + 1] == 1 /* Seq.Next */)
i = pair(this.data, i + 2);
else
return false;
@ -1478,14 +1572,14 @@ class LRParser extends common.Parser {
/// goto jumps. @internal
nextStates(state) {
let result = [];
for (let i = this.stateSlot(state, 1 /* Actions */);; i += 3) {
if (this.data[i] == 65535 /* End */) {
if (this.data[i + 1] == 1 /* Next */)
for (let i = this.stateSlot(state, 1 /* ParseState.Actions */);; i += 3) {
if (this.data[i] == 65535 /* Seq.End */) {
if (this.data[i + 1] == 1 /* Seq.Next */)
i = pair(this.data, i + 2);
else
break;
}
if ((this.data[i + 2] & (65536 /* ReduceFlag */ >> 16)) == 0) {
if ((this.data[i + 2] & (65536 /* Action.ReduceFlag */ >> 16)) == 0) {
let value = this.data[i + 1];
if (!result.some((v, i) => (i & 1) && v == value))
result.push(this.data[i], value);
@ -1493,11 +1587,6 @@ class LRParser extends common.Parser {
}
return result;
}
/// @internal
overrides(token, prev) {
let iPrev = findOffset(this.data, this.tokenPrecTable, prev);
return iPrev < 0 || findOffset(this.data, this.tokenPrecTable, token) < iPrev;
}
/// Configure the parser. Returns a new parser instance that has the
/// given settings modified. Settings not provided in `config` are
/// kept from the original parser.
@ -1518,6 +1607,17 @@ class LRParser extends common.Parser {
let found = config.tokenizers.find(r => r.from == t);
return found ? found.to : t;
});
if (config.specializers) {
copy.specializers = this.specializers.slice();
copy.specializerSpecs = this.specializerSpecs.map((s, i) => {
let found = config.specializers.find(r => r.from == s.external);
if (!found)
return s;
let spec = Object.assign(Object.assign({}, s), { external: found.to });
copy.specializers[i] = getSpecializer(spec);
return spec;
});
}
if (config.contextTracker)
copy.context = config.contextTracker;
if (config.dialect)
@ -1564,37 +1664,40 @@ class LRParser extends common.Parser {
let disabled = null;
for (let i = 0; i < values.length; i++)
if (!flags[i]) {
for (let j = this.dialects[values[i]], id; (id = this.data[j++]) != 65535 /* End */;)
for (let j = this.dialects[values[i]], id; (id = this.data[j++]) != 65535 /* Seq.End */;)
(disabled || (disabled = new Uint8Array(this.maxTerm + 1)))[id] = 1;
}
return new Dialect(dialect, flags, disabled);
}
/// (used by the output of the parser generator) @internal
/// Used by the output of the parser generator. Not available to
/// user code. @hide
static deserialize(spec) {
return new LRParser(spec);
}
}
function pair(data, off) { return data[off] | (data[off + 1] << 16); }
function findOffset(data, start, term) {
for (let i = start, next; (next = data[i]) != 65535 /* End */; i++)
if (next == term)
return i - start;
return -1;
}
function findFinished(stacks) {
let best = null;
for (let stack of stacks) {
let stopped = stack.p.stoppedAt;
if ((stack.pos == stack.p.stream.end || stopped != null && stack.pos > stopped) &&
stack.p.parser.stateFlag(stack.state, 2 /* Accepting */) &&
stack.p.parser.stateFlag(stack.state, 2 /* StateFlag.Accepting */) &&
(!best || best.score < stack.score))
best = stack;
}
return best;
}
function getSpecializer(spec) {
if (spec.external) {
let mask = spec.extend ? 1 /* Specialize.Extend */ : 0 /* Specialize.Specialize */;
return (value, stack) => (spec.external(value, stack) << 1) | mask;
}
return spec.get;
}
exports.ContextTracker = ContextTracker;
exports.ExternalTokenizer = ExternalTokenizer;
exports.InputStream = InputStream;
exports.LRParser = LRParser;
exports.LocalTokenGroup = LocalTokenGroup;
exports.Stack = Stack;

View file

@ -1,3 +1,3 @@
export { LRParser, ParserConfig, ContextTracker } from "./parse";
export { InputStream, ExternalTokenizer } from "./token";
export { InputStream, ExternalTokenizer, LocalTokenGroup } from "./token";
export { Stack } from "./stack";

1590
node_modules/@lezer/lr/dist/index.es.js generated vendored

File diff suppressed because it is too large Load diff

390
node_modules/@lezer/lr/dist/index.js generated vendored
View file

@ -1,4 +1,4 @@
import { Parser, NodeSet, NodeType, DefaultBufferLength, NodeProp, Tree, IterMode } from '@lezer/common';
import { Parser, NodeProp, NodeSet, NodeType, DefaultBufferLength, Tree, IterMode } from '@lezer/common';
/// A parse stack. These are used internally by the parser to track
/// parsing progress. They also provide some properties and methods
@ -85,7 +85,8 @@ class Stack {
// Apply a reduce action
/// @internal
reduce(action) {
let depth = action >> 19 /* ReduceDepthShift */, type = action & 65535 /* ValueMask */;
var _a;
let depth = action >> 19 /* Action.ReduceDepthShift */, type = action & 65535 /* Action.ValueMask */;
let { parser } = this.p;
let dPrec = parser.dynamicPrecedence(type);
if (dPrec)
@ -104,15 +105,29 @@ class Stack {
// consume two extra frames (the dummy parent node for the skipped
// expression and the state that we'll be staying in, which should
// be moved to `this.state`).
let base = this.stack.length - ((depth - 1) * 3) - (action & 262144 /* StayFlag */ ? 6 : 0);
let start = this.stack[base - 2];
let bufferBase = this.stack[base - 1], count = this.bufferBase + this.buffer.length - bufferBase;
let base = this.stack.length - ((depth - 1) * 3) - (action & 262144 /* Action.StayFlag */ ? 6 : 0);
let start = base ? this.stack[base - 2] : this.p.ranges[0].from, size = this.reducePos - start;
// This is a kludge to try and detect overly deep left-associative
// trees, which will not increase the parse stack depth and thus
// won't be caught by the regular stack-depth limit check.
if (size >= 2000 /* Recover.MinBigReduction */ && !((_a = this.p.parser.nodeSet.types[type]) === null || _a === void 0 ? void 0 : _a.isAnonymous)) {
if (start == this.p.lastBigReductionStart) {
this.p.bigReductionCount++;
this.p.lastBigReductionSize = size;
}
else if (this.p.lastBigReductionSize < size) {
this.p.bigReductionCount = 1;
this.p.lastBigReductionStart = start;
this.p.lastBigReductionSize = size;
}
}
let bufferBase = base ? this.stack[base - 1] : 0, count = this.bufferBase + this.buffer.length - bufferBase;
// Store normal terms or `R -> R R` repeat reductions
if (type < parser.minRepeatTerm || (action & 131072 /* RepeatFlag */)) {
let pos = parser.stateFlag(this.state, 1 /* Skipped */) ? this.pos : this.reducePos;
if (type < parser.minRepeatTerm || (action & 131072 /* Action.RepeatFlag */)) {
let pos = parser.stateFlag(this.state, 1 /* StateFlag.Skipped */) ? this.pos : this.reducePos;
this.storeNode(type, start, pos, count + 4, true);
}
if (action & 262144 /* StayFlag */) {
if (action & 262144 /* Action.StayFlag */) {
this.state = this.stack[base];
}
else {
@ -126,7 +141,7 @@ class Stack {
// Shift a value into the buffer
/// @internal
storeNode(term, start, end, size = 4, isReduce = false) {
if (term == 0 /* Err */ &&
if (term == 0 /* Term.Err */ &&
(!this.stack.length || this.stack[this.stack.length - 1] < this.buffer.length + this.bufferBase)) {
// Try to omit/merge adjacent error nodes
let cur = this, top = this.buffer.length;
@ -134,7 +149,7 @@ class Stack {
top = cur.bufferBase - cur.parent.bufferBase;
cur = cur.parent;
}
if (top > 0 && cur.buffer[top - 4] == 0 /* Err */ && cur.buffer[top - 1] > -1) {
if (top > 0 && cur.buffer[top - 4] == 0 /* Term.Err */ && cur.buffer[top - 1] > -1) {
if (start == end)
return;
if (cur.buffer[top - 2] >= start) {
@ -148,7 +163,7 @@ class Stack {
}
else { // There may be skipped nodes that have to be moved forward
let index = this.buffer.length;
if (index > 0 && this.buffer[index - 4] != 0 /* Err */)
if (index > 0 && this.buffer[index - 4] != 0 /* Term.Err */)
while (index > 0 && this.buffer[index - 2] > end) {
// Move this record forward
this.buffer[index] = this.buffer[index - 4];
@ -169,14 +184,14 @@ class Stack {
/// @internal
shift(action, next, nextEnd) {
let start = this.pos;
if (action & 131072 /* GotoFlag */) {
this.pushState(action & 65535 /* ValueMask */, this.pos);
if (action & 131072 /* Action.GotoFlag */) {
this.pushState(action & 65535 /* Action.ValueMask */, this.pos);
}
else if ((action & 262144 /* StayFlag */) == 0) { // Regular shift
else if ((action & 262144 /* Action.StayFlag */) == 0) { // Regular shift
let nextState = action, { parser } = this.p;
if (nextEnd > this.pos || next <= parser.maxNode) {
this.pos = nextEnd;
if (!parser.stateFlag(nextState, 1 /* Skipped */))
if (!parser.stateFlag(nextState, 1 /* StateFlag.Skipped */))
this.reducePos = nextEnd;
}
this.pushState(nextState, start);
@ -194,7 +209,7 @@ class Stack {
// Apply an action
/// @internal
apply(action, next, nextEnd) {
if (action & 65536 /* ReduceFlag */)
if (action & 65536 /* Action.ReduceFlag */)
this.reduce(action);
else
this.shift(action, next, nextEnd);
@ -239,9 +254,9 @@ class Stack {
let isNode = next <= this.p.parser.maxNode;
if (isNode)
this.storeNode(next, this.pos, nextEnd, 4);
this.storeNode(0 /* Err */, this.pos, nextEnd, isNode ? 8 : 4);
this.storeNode(0 /* Term.Err */, this.pos, nextEnd, isNode ? 8 : 4);
this.pos = this.reducePos = nextEnd;
this.score -= 190 /* Delete */;
this.score -= 190 /* Recover.Delete */;
}
/// Check if the given term would be able to be shifted (optionally
/// after some reductions) on this stack. This can be useful for
@ -249,11 +264,11 @@ class Stack {
/// given token when it applies.
canShift(term) {
for (let sim = new SimulatedStack(this);;) {
let action = this.p.parser.stateSlot(sim.state, 4 /* DefaultReduce */) || this.p.parser.hasAction(sim.state, term);
if ((action & 65536 /* ReduceFlag */) == 0)
return true;
let action = this.p.parser.stateSlot(sim.state, 4 /* ParseState.DefaultReduce */) || this.p.parser.hasAction(sim.state, term);
if (action == 0)
return false;
if ((action & 65536 /* Action.ReduceFlag */) == 0)
return true;
sim.reduce(action);
}
}
@ -261,17 +276,17 @@ class Stack {
// inserts some missing token or rule.
/// @internal
recoverByInsert(next) {
if (this.stack.length >= 300 /* MaxInsertStackDepth */)
if (this.stack.length >= 300 /* Recover.MaxInsertStackDepth */)
return [];
let nextStates = this.p.parser.nextStates(this.state);
if (nextStates.length > 4 /* MaxNext */ << 1 || this.stack.length >= 120 /* DampenInsertStackDepth */) {
if (nextStates.length > 4 /* Recover.MaxNext */ << 1 || this.stack.length >= 120 /* Recover.DampenInsertStackDepth */) {
let best = [];
for (let i = 0, s; i < nextStates.length; i += 2) {
if ((s = nextStates[i + 1]) != this.state && this.p.parser.hasAction(s, next))
best.push(nextStates[i], s);
}
if (this.stack.length < 120 /* DampenInsertStackDepth */)
for (let i = 0; best.length < 4 /* MaxNext */ << 1 && i < nextStates.length; i += 2) {
if (this.stack.length < 120 /* Recover.DampenInsertStackDepth */)
for (let i = 0; best.length < 4 /* Recover.MaxNext */ << 1 && i < nextStates.length; i += 2) {
let s = nextStates[i + 1];
if (!best.some((v, i) => (i & 1) && v == s))
best.push(nextStates[i], s);
@ -279,15 +294,15 @@ class Stack {
nextStates = best;
}
let result = [];
for (let i = 0; i < nextStates.length && result.length < 4 /* MaxNext */; i += 2) {
for (let i = 0; i < nextStates.length && result.length < 4 /* Recover.MaxNext */; i += 2) {
let s = nextStates[i + 1];
if (s == this.state)
continue;
let stack = this.split();
stack.pushState(s, this.pos);
stack.storeNode(0 /* Err */, stack.pos, stack.pos, 4, true);
stack.storeNode(0 /* Term.Err */, stack.pos, stack.pos, 4, true);
stack.shiftContext(nextStates[i], this.pos);
stack.score -= 200 /* Insert */;
stack.score -= 200 /* Recover.Insert */;
result.push(stack);
}
return result;
@ -296,17 +311,17 @@ class Stack {
// be done.
/// @internal
forceReduce() {
let reduce = this.p.parser.stateSlot(this.state, 5 /* ForcedReduce */);
if ((reduce & 65536 /* ReduceFlag */) == 0)
let reduce = this.p.parser.stateSlot(this.state, 5 /* ParseState.ForcedReduce */);
if ((reduce & 65536 /* Action.ReduceFlag */) == 0)
return false;
let { parser } = this.p;
if (!parser.validAction(this.state, reduce)) {
let depth = reduce >> 19 /* ReduceDepthShift */, term = reduce & 65535 /* ValueMask */;
let depth = reduce >> 19 /* Action.ReduceDepthShift */, term = reduce & 65535 /* Action.ValueMask */;
let target = this.stack.length - depth * 3;
if (target < 0 || parser.getGoto(this.stack[target], term, false) < 0)
return false;
this.storeNode(0 /* Err */, this.reducePos, this.reducePos, 4, true);
this.score -= 100 /* Reduce */;
this.storeNode(0 /* Term.Err */, this.reducePos, this.reducePos, 4, true);
this.score -= 100 /* Recover.Reduce */;
}
this.reducePos = this.pos;
this.reduce(reduce);
@ -314,9 +329,9 @@ class Stack {
}
/// @internal
forceAll() {
while (!this.p.parser.stateFlag(this.state, 2 /* Accepting */)) {
while (!this.p.parser.stateFlag(this.state, 2 /* StateFlag.Accepting */)) {
if (!this.forceReduce()) {
this.storeNode(0 /* Err */, this.pos, this.pos, 4, true);
this.storeNode(0 /* Term.Err */, this.pos, this.pos, 4, true);
break;
}
}
@ -329,8 +344,8 @@ class Stack {
if (this.stack.length != 3)
return false;
let { parser } = this.p;
return parser.data[parser.stateSlot(this.state, 1 /* Actions */)] == 65535 /* End */ &&
!parser.stateSlot(this.state, 4 /* DefaultReduce */);
return parser.data[parser.stateSlot(this.state, 1 /* ParseState.Actions */)] == 65535 /* Seq.End */ &&
!parser.stateSlot(this.state, 4 /* ParseState.DefaultReduce */);
}
/// Restart the stack (put it back in its start state). Only safe
/// when this.stack.length == 3 (state is directly below the top
@ -411,6 +426,7 @@ var Recover;
Recover[Recover["MaxNext"] = 4] = "MaxNext";
Recover[Recover["MaxInsertStackDepth"] = 300] = "MaxInsertStackDepth";
Recover[Recover["DampenInsertStackDepth"] = 120] = "DampenInsertStackDepth";
Recover[Recover["MinBigReduction"] = 2000] = "MinBigReduction";
})(Recover || (Recover = {}));
// Used to cheaply run some reductions to scan ahead without mutating
// an entire stack
@ -422,7 +438,7 @@ class SimulatedStack {
this.base = this.stack.length;
}
reduce(action) {
let term = action & 65535 /* ValueMask */, depth = action >> 19 /* ReduceDepthShift */;
let term = action & 65535 /* Action.ValueMask */, depth = action >> 19 /* Action.ReduceDepthShift */;
if (depth == 0) {
if (this.stack == this.start.stack)
this.stack = this.stack.slice();
@ -473,6 +489,42 @@ class StackBufferCursor {
}
}
// See lezer-generator/src/encode.ts for comments about the encoding
// used here
function decodeArray(input, Type = Uint16Array) {
if (typeof input != "string")
return input;
let array = null;
for (let pos = 0, out = 0; pos < input.length;) {
let value = 0;
for (;;) {
let next = input.charCodeAt(pos++), stop = false;
if (next == 126 /* Encode.BigValCode */) {
value = 65535 /* Encode.BigVal */;
break;
}
if (next >= 92 /* Encode.Gap2 */)
next--;
if (next >= 34 /* Encode.Gap1 */)
next--;
let digit = next - 32 /* Encode.Start */;
if (digit >= 46 /* Encode.Base */) {
digit -= 46 /* Encode.Base */;
stop = true;
}
value += digit;
if (stop)
break;
value *= 46 /* Encode.Base */;
}
if (array)
array[out++] = value;
else
array = new Type(value);
}
return array;
}
class CachedToken {
constructor() {
this.start = -1;
@ -536,6 +588,15 @@ class InputStream {
}
return pos;
}
/// @internal
clipPos(pos) {
if (pos >= this.range.from && pos < this.range.to)
return pos;
for (let range of this.ranges)
if (range.to > pos)
return Math.max(pos, range.from);
return this.end;
}
/// Look at a code unit near the stream position. `.peek(0)` equals
/// `.next`, `.peek(-1)` gives you the previous character, and so
/// on.
@ -688,9 +749,40 @@ class TokenGroup {
this.data = data;
this.id = id;
}
token(input, stack) { readToken(this.data, input, stack, this.id); }
token(input, stack) {
let { parser } = stack.p;
readToken(this.data, input, stack, this.id, parser.data, parser.tokenPrecTable);
}
}
TokenGroup.prototype.contextual = TokenGroup.prototype.fallback = TokenGroup.prototype.extend = false;
/// @hide
class LocalTokenGroup {
constructor(data, precTable, elseToken) {
this.precTable = precTable;
this.elseToken = elseToken;
this.data = typeof data == "string" ? decodeArray(data) : data;
}
token(input, stack) {
let start = input.pos, cur;
for (;;) {
cur = input.pos;
readToken(this.data, input, stack, 0, this.data, this.precTable);
if (input.token.value > -1)
break;
if (this.elseToken == null)
return;
if (input.next < 0)
break;
input.advance();
input.reset(cur + 1, input.token);
}
if (cur > start) {
input.reset(start, input.token);
input.acceptToken(this.elseToken, cur - start);
}
}
}
LocalTokenGroup.prototype.contextual = TokenGroup.prototype.fallback = TokenGroup.prototype.extend = false;
/// `@external tokens` declarations in the grammar should resolve to
/// an instance of this class.
class ExternalTokenizer {
@ -727,9 +819,9 @@ class ExternalTokenizer {
//
// This function interprets that data, running through a stream as
// long as new states with the a matching group mask can be reached,
// and updating `token` when it matches a token.
function readToken(data, input, stack, group) {
let state = 0, groupMask = 1 << group, { parser } = stack.p, { dialect } = parser;
// and updating `input.token` when it matches a token.
function readToken(data, input, stack, group, precTable, precOffset) {
let state = 0, groupMask = 1 << group, { dialect } = stack.p.parser;
scan: for (;;) {
if ((groupMask & data[state]) == 0)
break;
@ -741,16 +833,23 @@ function readToken(data, input, stack, group) {
if ((data[i + 1] & groupMask) > 0) {
let term = data[i];
if (dialect.allows(term) &&
(input.token.value == -1 || input.token.value == term || parser.overrides(term, input.token.value))) {
(input.token.value == -1 || input.token.value == term ||
overrides(term, input.token.value, precTable, precOffset))) {
input.acceptToken(term);
break;
}
}
let next = input.next, low = 0, high = data[state + 2];
// Special case for EOF
if (input.next < 0 && high > low && data[accEnd + high * 3 - 3] == 65535 /* Seq.End */ && data[accEnd + high * 3 - 3] == 65535 /* Seq.End */) {
state = data[accEnd + high * 3 - 1];
continue scan;
}
// Do a binary search on the state's edges
for (let next = input.next, low = 0, high = data[state + 2]; low < high;) {
for (; low < high;) {
let mid = (low + high) >> 1;
let index = accEnd + mid + (mid << 1);
let from = data[index], to = data[index + 1];
let from = data[index], to = data[index + 1] || 0x10000;
if (next < from)
high = mid;
else if (next >= to)
@ -764,41 +863,15 @@ function readToken(data, input, stack, group) {
break;
}
}
// See lezer-generator/src/encode.ts for comments about the encoding
// used here
function decodeArray(input, Type = Uint16Array) {
if (typeof input != "string")
return input;
let array = null;
for (let pos = 0, out = 0; pos < input.length;) {
let value = 0;
for (;;) {
let next = input.charCodeAt(pos++), stop = false;
if (next == 126 /* BigValCode */) {
value = 65535 /* BigVal */;
break;
}
if (next >= 92 /* Gap2 */)
next--;
if (next >= 34 /* Gap1 */)
next--;
let digit = next - 32 /* Start */;
if (digit >= 46 /* Base */) {
digit -= 46 /* Base */;
stop = true;
}
value += digit;
if (stop)
break;
value *= 46 /* Base */;
}
if (array)
array[out++] = value;
else
array = new Type(value);
}
return array;
function findOffset(data, start, term) {
for (let i = start, next; (next = data[i]) != 65535 /* Seq.End */; i++)
if (next == term)
return i - start;
return -1;
}
function overrides(token, prev, tableData, tableOffset) {
let iPrev = findOffset(tableData, tableOffset, prev);
return iPrev < 0 || findOffset(tableData, tableOffset, token) < iPrev;
}
// Environment variable used to control console output
@ -815,8 +888,8 @@ function cutAt(tree, pos, side) {
if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos)))
for (;;) {
if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError)
return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Margin */))
: Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Margin */));
return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Safety.Margin */))
: Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Safety.Margin */));
if (side < 0 ? cursor.prevSibling() : cursor.nextSibling())
break;
if (!cursor.parent())
@ -920,7 +993,7 @@ class TokenCache {
let actionIndex = 0;
let main = null;
let { parser } = stack.p, { tokenizers } = parser;
let mask = parser.stateSlot(stack.state, 3 /* TokenizerMask */);
let mask = parser.stateSlot(stack.state, 3 /* ParseState.TokenizerMask */);
let context = stack.curContext ? stack.curContext.hash : 0;
let lookAhead = 0;
for (let i = 0; i < tokenizers.length; i++) {
@ -934,9 +1007,9 @@ class TokenCache {
token.mask = mask;
token.context = context;
}
if (token.lookAhead > token.end + 25 /* Margin */)
if (token.lookAhead > token.end + 25 /* Safety.Margin */)
lookAhead = Math.max(token.lookAhead, lookAhead);
if (token.value != 0 /* Err */) {
if (token.value != 0 /* Term.Err */) {
let startIndex = actionIndex;
if (token.extended > -1)
actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
@ -967,18 +1040,19 @@ class TokenCache {
let main = new CachedToken, { pos, p } = stack;
main.start = pos;
main.end = Math.min(pos + 1, p.stream.end);
main.value = pos == p.stream.end ? p.parser.eofTerm : 0 /* Err */;
main.value = pos == p.stream.end ? p.parser.eofTerm : 0 /* Term.Err */;
return main;
}
updateCachedToken(token, tokenizer, stack) {
tokenizer.token(this.stream.reset(stack.pos, token), stack);
let start = this.stream.clipPos(stack.pos);
tokenizer.token(this.stream.reset(start, token), stack);
if (token.value > -1) {
let { parser } = stack.p;
for (let i = 0; i < parser.specialized.length; i++)
if (parser.specialized[i] == token.value) {
let result = parser.specializers[i](this.stream.read(token.start, token.end), stack);
if (result >= 0 && stack.p.parser.dialect.allows(result >> 1)) {
if ((result & 1) == 0 /* Specialize */)
if ((result & 1) == 0 /* Specialize.Specialize */)
token.value = result >> 1;
else
token.extended = result >> 1;
@ -987,8 +1061,8 @@ class TokenCache {
}
}
else {
token.value = 0 /* Err */;
token.end = Math.min(stack.p.stream.end, stack.pos + 1);
token.value = 0 /* Term.Err */;
token.end = this.stream.clipPos(start + 1);
}
}
putAction(action, token, end, index) {
@ -1004,13 +1078,13 @@ class TokenCache {
addActions(stack, token, end, index) {
let { state } = stack, { parser } = stack.p, { data } = parser;
for (let set = 0; set < 2; set++) {
for (let i = parser.stateSlot(state, set ? 2 /* Skip */ : 1 /* Actions */);; i += 3) {
if (data[i] == 65535 /* End */) {
if (data[i + 1] == 1 /* Next */) {
for (let i = parser.stateSlot(state, set ? 2 /* ParseState.Skip */ : 1 /* ParseState.Actions */);; i += 3) {
if (data[i] == 65535 /* Seq.End */) {
if (data[i + 1] == 1 /* Seq.Next */) {
i = pair(data, i + 2);
}
else {
if (index == 0 && data[i + 1] == 2 /* Other */)
if (index == 0 && data[i + 1] == 2 /* Seq.Other */)
index = this.putAction(pair(data, i + 2), token, end, index);
break;
}
@ -1035,6 +1109,11 @@ var Rec;
// on recursive traversal.
Rec[Rec["CutDepth"] = 15000] = "CutDepth";
Rec[Rec["CutTo"] = 9000] = "CutTo";
Rec[Rec["MaxLeftAssociativeReductionCount"] = 300] = "MaxLeftAssociativeReductionCount";
// The maximum number of non-recovering stacks to explore (to avoid
// getting bogged down with exponentially multiplying stacks in
// ambiguous content)
Rec[Rec["MaxStackCount"] = 12] = "MaxStackCount";
})(Rec || (Rec = {}));
class Parse {
constructor(parser, input, fragments, ranges) {
@ -1046,6 +1125,9 @@ class Parse {
this.minStackPos = 0;
this.reused = [];
this.stoppedAt = null;
this.lastBigReductionStart = -1;
this.lastBigReductionSize = 0;
this.bigReductionCount = 0;
this.stream = new InputStream(input, ranges);
this.tokens = new TokenCache(parser, this.stream);
this.topTerm = parser.top[1];
@ -1068,6 +1150,18 @@ class Parse {
// This will hold stacks beyond `pos`.
let newStacks = this.stacks = [];
let stopped, stoppedTokens;
// If a large amount of reductions happened with the same start
// position, force the stack out of that production in order to
// avoid creating a tree too deep to recurse through.
// (This is an ugly kludge, because unfortunately there is no
// straightforward, cheap way to check for this happening, due to
// the history of reductions only being available in an
// expensive-to-access format in the stack buffers.)
if (this.bigReductionCount > 300 /* Rec.MaxLeftAssociativeReductionCount */ && stacks.length == 1) {
let [s] = stacks;
while (s.forceReduce() && s.stack.length && s.stack[s.stack.length - 2] >= this.lastBigReductionStart) { }
this.bigReductionCount = this.lastBigReductionSize = 0;
}
// Keep advancing any stacks at `pos` until they either move
// forward or can't be advanced. Gather stacks that can't be
// advanced further in `stopped`.
@ -1103,7 +1197,7 @@ class Parse {
throw new SyntaxError("No parse at " + pos);
}
if (!this.recovering)
this.recovering = 5 /* Distance */;
this.recovering = 5 /* Rec.Distance */;
}
if (this.recovering && stopped) {
let finished = this.stoppedAt != null && stopped[0].pos > this.stoppedAt ? stopped[0]
@ -1112,7 +1206,7 @@ class Parse {
return this.stackToTree(finished.forceAll());
}
if (this.recovering) {
let maxRemaining = this.recovering == 1 ? 1 : this.recovering * 3 /* MaxRemainingPerStep */;
let maxRemaining = this.recovering == 1 ? 1 : this.recovering * 3 /* Rec.MaxRemainingPerStep */;
if (newStacks.length > maxRemaining) {
newStacks.sort((a, b) => b.score - a.score);
while (newStacks.length > maxRemaining)
@ -1130,7 +1224,7 @@ class Parse {
for (let j = i + 1; j < newStacks.length; j++) {
let other = newStacks[j];
if (stack.sameState(other) ||
stack.buffer.length > 500 /* MinBufferLengthPrune */ && other.buffer.length > 500 /* MinBufferLengthPrune */) {
stack.buffer.length > 500 /* Rec.MinBufferLengthPrune */ && other.buffer.length > 500 /* Rec.MinBufferLengthPrune */) {
if (((stack.score - other.score) || (stack.buffer.length - other.buffer.length)) > 0) {
newStacks.splice(j--, 1);
}
@ -1141,6 +1235,8 @@ class Parse {
}
}
}
if (newStacks.length > 12 /* Rec.MaxStackCount */)
newStacks.splice(12 /* Rec.MaxStackCount */, newStacks.length - 12 /* Rec.MaxStackCount */);
}
this.minStackPos = newStacks[0].pos;
for (let i = 1; i < newStacks.length; i++)
@ -1181,15 +1277,15 @@ class Parse {
break;
}
}
let defaultReduce = parser.stateSlot(stack.state, 4 /* DefaultReduce */);
let defaultReduce = parser.stateSlot(stack.state, 4 /* ParseState.DefaultReduce */);
if (defaultReduce > 0) {
stack.reduce(defaultReduce);
if (verbose)
console.log(base + this.stackID(stack) + ` (via always-reduce ${parser.getName(defaultReduce & 65535 /* ValueMask */)})`);
console.log(base + this.stackID(stack) + ` (via always-reduce ${parser.getName(defaultReduce & 65535 /* Action.ValueMask */)})`);
return true;
}
if (stack.stack.length >= 15000 /* CutDepth */) {
while (stack.stack.length > 9000 /* CutTo */ && stack.forceReduce()) { }
if (stack.stack.length >= 15000 /* Rec.CutDepth */) {
while (stack.stack.length > 9000 /* Rec.CutTo */ && stack.forceReduce()) { }
}
let actions = this.tokens.getActions(stack);
for (let i = 0; i < actions.length;) {
@ -1198,8 +1294,8 @@ class Parse {
let localStack = last ? stack : stack.split();
localStack.apply(action, term, end);
if (verbose)
console.log(base + this.stackID(localStack) + ` (via ${(action & 65536 /* ReduceFlag */) == 0 ? "shift"
: `reduce of ${parser.getName(action & 65535 /* ValueMask */)}`} for ${parser.getName(term)} @ ${start}${localStack == stack ? "" : ", split"})`);
console.log(base + this.stackID(localStack) + ` (via ${(action & 65536 /* Action.ReduceFlag */) == 0 ? "shift"
: `reduce of ${parser.getName(action & 65535 /* Action.ValueMask */)}`} for ${parser.getName(term)} @ ${start}${localStack == stack ? "" : ", split"})`);
if (last)
return true;
else if (localStack.pos > start)
@ -1240,7 +1336,7 @@ class Parse {
continue;
}
let force = stack.split(), forceBase = base;
for (let j = 0; force.forceReduce() && j < 10 /* ForceReduceLimit */; j++) {
for (let j = 0; force.forceReduce() && j < 10 /* Rec.ForceReduceLimit */; j++) {
if (verbose)
console.log(forceBase + this.stackID(force) + " (via force-reduce)");
let done = this.advanceFully(force, newStacks);
@ -1257,7 +1353,7 @@ class Parse {
if (this.stream.end > stack.pos) {
if (tokenEnd == stack.pos) {
tokenEnd++;
token = 0 /* Err */;
token = 0 /* Term.Err */;
}
stack.recoverByDelete(token, tokenEnd);
if (verbose)
@ -1339,8 +1435,8 @@ class LRParser extends Parser {
super();
/// @internal
this.wrappers = [];
if (spec.version != 14 /* Version */)
throw new RangeError(`Parser version (${spec.version}) doesn't match runtime version (${14 /* Version */})`);
if (spec.version != 14 /* File.Version */)
throw new RangeError(`Parser version (${spec.version}) doesn't match runtime version (${14 /* File.Version */})`);
let nodeNames = spec.nodeNames.split(" ");
this.minRepeatTerm = nodeNames.length;
for (let i = 0; i < spec.repeatNodeCount; i++)
@ -1384,13 +1480,11 @@ class LRParser extends Parser {
this.bufferLength = DefaultBufferLength;
let tokenArray = decodeArray(spec.tokenData);
this.context = spec.context;
this.specialized = new Uint16Array(spec.specialized ? spec.specialized.length : 0);
this.specializers = [];
if (spec.specialized)
for (let i = 0; i < spec.specialized.length; i++) {
this.specialized[i] = spec.specialized[i].term;
this.specializers[i] = spec.specialized[i].get;
}
this.specializerSpecs = spec.specialized || [];
this.specialized = new Uint16Array(this.specializerSpecs.length);
for (let i = 0; i < this.specializerSpecs.length; i++)
this.specialized[i] = this.specializerSpecs[i].term;
this.specializers = this.specializerSpecs.map(getSpecializer);
this.states = decodeArray(spec.states, Uint32Array);
this.data = decodeArray(spec.stateData);
this.goto = decodeArray(spec.goto);
@ -1432,16 +1526,16 @@ class LRParser extends Parser {
hasAction(state, terminal) {
let data = this.data;
for (let set = 0; set < 2; set++) {
for (let i = this.stateSlot(state, set ? 2 /* Skip */ : 1 /* Actions */), next;; i += 3) {
if ((next = data[i]) == 65535 /* End */) {
if (data[i + 1] == 1 /* Next */)
for (let i = this.stateSlot(state, set ? 2 /* ParseState.Skip */ : 1 /* ParseState.Actions */), next;; i += 3) {
if ((next = data[i]) == 65535 /* Seq.End */) {
if (data[i + 1] == 1 /* Seq.Next */)
next = data[i = pair(data, i + 2)];
else if (data[i + 1] == 2 /* Other */)
else if (data[i + 1] == 2 /* Seq.Other */)
return pair(data, i + 2);
else
break;
}
if (next == terminal || next == 0 /* Err */)
if (next == terminal || next == 0 /* Term.Err */)
return pair(data, i + 1);
}
}
@ -1449,19 +1543,19 @@ class LRParser extends Parser {
}
/// @internal
stateSlot(state, slot) {
return this.states[(state * 6 /* Size */) + slot];
return this.states[(state * 6 /* ParseState.Size */) + slot];
}
/// @internal
stateFlag(state, flag) {
return (this.stateSlot(state, 0 /* Flags */) & flag) > 0;
return (this.stateSlot(state, 0 /* ParseState.Flags */) & flag) > 0;
}
/// @internal
validAction(state, action) {
if (action == this.stateSlot(state, 4 /* DefaultReduce */))
if (action == this.stateSlot(state, 4 /* ParseState.DefaultReduce */))
return true;
for (let i = this.stateSlot(state, 1 /* Actions */);; i += 3) {
if (this.data[i] == 65535 /* End */) {
if (this.data[i + 1] == 1 /* Next */)
for (let i = this.stateSlot(state, 1 /* ParseState.Actions */);; i += 3) {
if (this.data[i] == 65535 /* Seq.End */) {
if (this.data[i + 1] == 1 /* Seq.Next */)
i = pair(this.data, i + 2);
else
return false;
@ -1474,14 +1568,14 @@ class LRParser extends Parser {
/// goto jumps. @internal
nextStates(state) {
let result = [];
for (let i = this.stateSlot(state, 1 /* Actions */);; i += 3) {
if (this.data[i] == 65535 /* End */) {
if (this.data[i + 1] == 1 /* Next */)
for (let i = this.stateSlot(state, 1 /* ParseState.Actions */);; i += 3) {
if (this.data[i] == 65535 /* Seq.End */) {
if (this.data[i + 1] == 1 /* Seq.Next */)
i = pair(this.data, i + 2);
else
break;
}
if ((this.data[i + 2] & (65536 /* ReduceFlag */ >> 16)) == 0) {
if ((this.data[i + 2] & (65536 /* Action.ReduceFlag */ >> 16)) == 0) {
let value = this.data[i + 1];
if (!result.some((v, i) => (i & 1) && v == value))
result.push(this.data[i], value);
@ -1489,11 +1583,6 @@ class LRParser extends Parser {
}
return result;
}
/// @internal
overrides(token, prev) {
let iPrev = findOffset(this.data, this.tokenPrecTable, prev);
return iPrev < 0 || findOffset(this.data, this.tokenPrecTable, token) < iPrev;
}
/// Configure the parser. Returns a new parser instance that has the
/// given settings modified. Settings not provided in `config` are
/// kept from the original parser.
@ -1514,6 +1603,17 @@ class LRParser extends Parser {
let found = config.tokenizers.find(r => r.from == t);
return found ? found.to : t;
});
if (config.specializers) {
copy.specializers = this.specializers.slice();
copy.specializerSpecs = this.specializerSpecs.map((s, i) => {
let found = config.specializers.find(r => r.from == s.external);
if (!found)
return s;
let spec = Object.assign(Object.assign({}, s), { external: found.to });
copy.specializers[i] = getSpecializer(spec);
return spec;
});
}
if (config.contextTracker)
copy.context = config.contextTracker;
if (config.dialect)
@ -1560,33 +1660,35 @@ class LRParser extends Parser {
let disabled = null;
for (let i = 0; i < values.length; i++)
if (!flags[i]) {
for (let j = this.dialects[values[i]], id; (id = this.data[j++]) != 65535 /* End */;)
for (let j = this.dialects[values[i]], id; (id = this.data[j++]) != 65535 /* Seq.End */;)
(disabled || (disabled = new Uint8Array(this.maxTerm + 1)))[id] = 1;
}
return new Dialect(dialect, flags, disabled);
}
/// (used by the output of the parser generator) @internal
/// Used by the output of the parser generator. Not available to
/// user code. @hide
static deserialize(spec) {
return new LRParser(spec);
}
}
function pair(data, off) { return data[off] | (data[off + 1] << 16); }
function findOffset(data, start, term) {
for (let i = start, next; (next = data[i]) != 65535 /* End */; i++)
if (next == term)
return i - start;
return -1;
}
function findFinished(stacks) {
let best = null;
for (let stack of stacks) {
let stopped = stack.p.stoppedAt;
if ((stack.pos == stack.p.stream.end || stopped != null && stack.pos > stopped) &&
stack.p.parser.stateFlag(stack.state, 2 /* Accepting */) &&
stack.p.parser.stateFlag(stack.state, 2 /* StateFlag.Accepting */) &&
(!best || best.score < stack.score))
best = stack;
}
return best;
}
function getSpecializer(spec) {
if (spec.external) {
let mask = spec.extend ? 1 /* Specialize.Extend */ : 0 /* Specialize.Specialize */;
return (value, stack) => (spec.external(value, stack) << 1) | mask;
}
return spec.get;
}
export { ContextTracker, ExternalTokenizer, InputStream, LRParser, Stack };
export { ContextTracker, ExternalTokenizer, InputStream, LRParser, LocalTokenGroup, Stack };

View file

@ -45,6 +45,9 @@ export declare class Parse implements PartialParse {
tokens: TokenCache;
topTerm: number;
stoppedAt: null | number;
lastBigReductionStart: number;
lastBigReductionSize: number;
bigReductionCount: number;
constructor(parser: LRParser, input: Input, fragments: readonly TreeFragment[], ranges: readonly {
from: number;
to: number;
@ -83,6 +86,10 @@ export interface ParserConfig {
from: ExternalTokenizer;
to: ExternalTokenizer;
}[];
specializers?: {
from: (value: string, stack: Stack) => number;
to: (value: string, stack: Stack) => number;
}[];
contextTracker?: ContextTracker<any>;
strict?: boolean;
wrap?: ParseWrapper;
@ -98,5 +105,6 @@ export declare class LRParser extends Parser {
hasWrappers(): boolean;
getName(term: number): string;
get topNode(): NodeType;
static deserialize(spec: any): LRParser;
}
export {};

View file

@ -15,7 +15,8 @@ export declare const enum Recover {
Reduce = 100,
MaxNext = 4,
MaxInsertStackDepth = 300,
DampenInsertStackDepth = 120
DampenInsertStackDepth = 120,
MinBigReduction = 2000
}
export declare class StackBufferCursor implements BufferCursor {
stack: Stack;

View file

@ -24,6 +24,16 @@ export declare class InputStream {
}
export interface Tokenizer {
}
export declare class LocalTokenGroup implements Tokenizer {
readonly precTable: number;
readonly elseToken?: number;
contextual: boolean;
fallback: boolean;
extend: boolean;
readonly data: Readonly<Uint16Array>;
constructor(data: Readonly<Uint16Array> | string, precTable: number, elseToken?: number);
token(input: InputStream, stack: Stack): void;
}
interface ExternalOptions {
contextual?: boolean;
fallback?: boolean;

View file

@ -1,6 +1,6 @@
{
"name": "@lezer/lr",
"version": "1.0.0",
"version": "1.3.3",
"description": "Incremental parser",
"main": "dist/index.cjs",
"type": "module",
@ -10,7 +10,7 @@
},
"module": "dist/index.js",
"types": "dist/index.d.ts",
"author": "Marijn Haverbeke <marijnh@gmail.com>",
"author": "Marijn Haverbeke <marijn@haverbeke.berlin>",
"license": "MIT",
"repository": {
"type" : "git",
@ -20,7 +20,7 @@
"rollup": "^2.52.2",
"@rollup/plugin-commonjs": "^15.1.0",
"@rollup/plugin-node-resolve": "^9.0.0",
"rollup-plugin-typescript2": "^0.30.0",
"rollup-plugin-typescript2": "^0.34.1",
"typescript": "^4.3.4"
},
"dependencies": {