compose_syntax/parser/
mod.rs

1mod control_flow;
2mod expressions;
3mod funcs;
4mod patterns;
5mod statements;
6
7use crate::file::FileId;
8use crate::kind::SyntaxKind;
9use crate::node::SyntaxNode;
10use crate::scanner::Scanner;
11use crate::set::{SyntaxSet, syntax_set};
12use crate::{Lexer, Span, SyntaxError};
13use compose_utils::trace_log;
14use ecow::{EcoString, eco_format};
15use expressions::err_unclosed_delim;
16use std::collections::HashMap;
17use std::ops::{Index, IndexMut, Range};
18
19/// Represents the context in which an expression is being parsed.
20///
21/// This affects whether certain constructs (like closures or assignments)
22/// are valid. Used to disambiguate grammar cases during parsing.
23#[derive(Debug, Clone, Copy, PartialEq, Eq)]
24enum ExprContext {
25    /// Regular expression context. Most expression forms are allowed.
26    Expr,
27
28    /// Atomic expression context. Disallows parsing of certain constructs
29    /// like closures or binary operations. Typically used for expression
30    /// heads or in disambiguation scenarios.
31    AtomicExpr,
32
33    /// Statement context. Used at the top level of a statement to allow
34    /// constructs like assignments or let-bindings that are not valid in
35    /// pure expression contexts.
36    Statement,
37}
38
39impl ExprContext {
40    /// Returns true if this context allows parsing a full expression.
41    ///
42    /// This includes both `Expr` and `AtomicExpr` contexts, but excludes
43    /// `Statement`, which is expected to handle its own expression cases.
44    fn is_expr(&self) -> bool {
45        match self {
46            ExprContext::Expr => true,
47            ExprContext::AtomicExpr => true,
48            ExprContext::Statement => false,
49        }
50    }
51
52    /// Returns true if this context requires atomic (non-composite) expressions.
53    ///
54    /// This is typically used to suppress recursive constructs like closures,
55    /// binary expressions, or grouping in places like destructuring patterns.
56    fn is_atomic(&self) -> bool {
57        match self {
58            ExprContext::Expr => false,
59            ExprContext::AtomicExpr => true,
60            ExprContext::Statement => false,
61        }
62    }
63
64    /// Converts this context to an expression context (used for sub-expressions).
65    ///
66    /// If currently in `Statement`, it downgrades to `Expr` to parse an inner
67    /// expression. Otherwise, it preserves the current context.
68    fn to_expr(self) -> ExprContext {
69        match self {
70            ExprContext::Expr => ExprContext::Expr,
71            ExprContext::AtomicExpr => ExprContext::AtomicExpr,
72            ExprContext::Statement => ExprContext::Expr,
73        }
74    }
75}
76
77/// Parses one or more statements from the given input text, producing a concrete syntax tree (CST).
78///
79/// This is the main entry point for parsing Compose code. It assumes the input
80/// starts with a valid statement or leading whitespace. The parser is fault-tolerant,
81/// so even if the input contains syntax errors, it will return a complete tree
82/// including [`SyntaxError`] nodes where appropriate.
83///
84/// # Parameters
85///
86/// - `text`: The source text to parse.
87/// - `file_id`: An identifier for the file the text originated from, used for diagnostics.
88///
89/// # Returns
90///
91/// A vector of [`SyntaxNode`]s representing the parsed statements and any encountered
92/// syntax errors as [`SyntaxError`]s.
93pub fn parse(text: &str, file_id: FileId) -> Vec<SyntaxNode> {
94    parse_with_offset(text, file_id, 0)
95}
96
97/// Parses one or more statements from the input text, starting at a given offset, producing a CST.
98///
99/// This function is useful for parsing code fragments embedded in larger files,
100/// such as REPL inputs or inline statements. It assumes the text at `offset`
101/// begins with a valid statement or leading whitespace. The parser is fault-tolerant
102/// and will include [`SyntaxError`] in the result to represent syntax issues.
103///
104/// # Parameters
105///
106/// - `text`: The source text containing the statement(s).
107/// - `file_id`: An identifier for the file the text originated from, used for diagnostics.
108/// - `offset`: The byte offset into `text` at which parsing should begin.
109///
110/// # Returns
111///
112/// A vector of [`SyntaxNode`]s representing the parsed statements, including
113/// [`SyntaxError`] for any syntax issues encountered.
114pub fn parse_with_offset(text: &str, file_id: FileId, offset: usize) -> Vec<SyntaxNode> {
115    let mut p = Parser::new(text, offset, file_id);
116
117    statements::code(&mut p, syntax_set!(End));
118
119    p.finish()
120}
121
122impl Index<Marker> for Parser<'_> {
123    type Output = SyntaxNode;
124
125    fn index(&self, index: Marker) -> &Self::Output {
126        &self.nodes[index.0]
127    }
128}
129
130impl IndexMut<Marker> for Parser<'_> {
131    fn index_mut(&mut self, index: Marker) -> &mut Self::Output {
132        &mut self.nodes[index.0]
133    }
134}
135
136/// A recursive-descent parser for Compose syntax.
137///
138/// This parser constructs a concrete syntax tree (CST) from a stream of tokens
139/// produced by the lexer. The resulting CST is stored as a flat list of [`SyntaxNode`]s,
140/// but supports a hierarchical structure via `InnerNode`s, which groups
141/// child nodes under a parent [`SyntaxKind`].
142///
143/// # Design
144///
145/// - **Fault-tolerant:** Instead of failing on invalid input, the parser emits
146///   `InnerNode` entries and continues parsing, making it robust in
147///   interactive environments.
148/// - **Flat representation with nesting:** While all nodes are stored in a flat
149///   `Vec<SyntaxNode>`, hierarchical structure is expressed through `SyntaxNode::Inner`,
150///   which wraps a contiguous range of child nodes.
151/// - **Lazy AST:** The CST nodes contain enough information to construct the AST
152///   on demand, deferring costly semantic interpretation until needed.
153/// - **Memoized backtracking:** Internal memoization supports efficient parsing with
154///   lookahead and recovery.
155///
156/// # Fields
157///
158/// - `text`: The original source text being parsed.
159/// - `lexer`: The tokenizer that produces a stream of tokens from the input text.
160/// - `token`: The current token the parser is examining.
161/// - `balanced`: Internal state used for delimiter tracking (e.g., braces, parentheses).
162/// - `nodes`: The flat list of syntax nodes making up the CST. Includes nested
163///   structures via `SyntaxNode::Inner` and error recovery via `SyntaxNode::Error`.
164/// - `last_pos`: The byte offset of the last consumed token.
165/// - `memo`: A memoization arena used for backtracking and error recovery.
166#[derive(Debug)]
167pub struct Parser<'s> {
168    /// The full source text being parsed.
169    text: &'s str,
170
171    /// The lexer that tokenizes the input text on demand.
172    lexer: Lexer<'s>,
173
174    /// The current token being examined by the parser.
175    pub(crate) token: Token,
176
177    /// Tracks whether the parser is currently inside a balanced region (e.g., matching braces).
178    balanced: bool,
179
180    /// The list of syntax nodes produced during parsing.
181    ///
182    /// This includes both well-formed constructs and `SyntaxNode::Error` entries
183    /// for malformed or incomplete input. Nesting is expressed using
184    /// `SyntaxNode::Inner`, which groups a contiguous sequence of child nodes.
185    pub(crate) nodes: Vec<SyntaxNode>,
186
187    /// The byte offset of the last consumed token in the input text.
188    pub(crate) last_pos: usize,
189
190    /// Memoization table used to support efficient backtracking and recovery.
191    memo: MemoArena,
192}
193
194#[derive(Debug, Clone)]
195pub struct CheckPoint {
196    pub(crate) nodes_len: usize,
197    lexer_cursor: usize,
198    token: Token,
199}
200
201/// Represents the result of a fallible parsing expectation.
202///
203/// `ExpectResult` is returned when the parser tries to match an expected syntax element
204/// but may fail. If the expectation is not met, a [SyntaxError] is inserted into the CST,
205/// allowing error recovery without aborting parsing.
206///
207/// This allows for post-processing of errors or transforming them inline.
208///
209/// ## Variants
210/// - `Ok`: The expected syntax was found and consumed successfully.
211/// - `SyntaxError`: An error occurred, and a [SyntaxError] was inserted into the CST.
212pub enum ExpectResult<'a> {
213    Ok,
214    SyntaxError(&'a mut SyntaxError),
215}
216
217impl<'a> ExpectResult<'a> {
218    pub(crate) fn is_ok(&self) -> bool {
219        matches!(self, ExpectResult::Ok)
220    }
221}
222
223impl ExpectResult<'_> {
224    /// Applies a function to the inner `SyntaxError` if one exists.
225    ///
226    /// This is useful for annotating or augmenting syntax errors in-place.
227    pub fn map(self, f: impl FnOnce(&mut SyntaxError)) -> Self {
228        match self {
229            Self::Ok => self,
230            Self::SyntaxError(err) => {
231                f(err);
232                Self::SyntaxError(err)
233            }
234        }
235    }
236}
237
238// Error related methods
239impl<'s> Parser<'s> {
240    /// Asserts that the current token is of the expected kind and consumes it.
241    ///
242    /// Panics if the current token does not match `kind`. Use for invariant assumptions.
243    #[track_caller]
244    pub(crate) fn assert(&mut self, kind: SyntaxKind) {
245        assert_eq!(self.current(), kind, "Expected {:?}", kind);
246        self.eat();
247    }
248
249    /// Inserts a syntax error node before the current token position.
250    ///
251    /// The span will be 0 sized.
252    pub(crate) fn insert_error_before(
253        &mut self,
254        message: impl Into<EcoString>,
255    ) -> &mut SyntaxError {
256        let (span, text) = match self.last_node() {
257            Some(v) => (v.span().after(), ""),
258            None => (self.token.node.span(), ""),
259        };
260        let error = SyntaxNode::error(SyntaxError::new(message.into(), span), text);
261
262        trace_log!("inserting error: {:?}", error);
263        self.nodes.push(error);
264
265        self.last_err().unwrap()
266    }
267
268    /// Inserts a syntax error node at the current token position.
269    ///
270    /// The error message is stored and will be included in the final CST as a [SyntaxNode::Error].
271    /// Returns a mutable reference to the inserted error for immediate editing or tagging.
272    pub(crate) fn insert_error_here(&mut self, message: impl Into<EcoString>) -> &mut SyntaxError {
273        let error = SyntaxNode::error(
274            SyntaxError::new(message.into(), self.token.node.span()),
275            self.token.node.text(),
276        );
277        trace_log!("inserting error: {:?}", error);
278        self.nodes.push(error);
279
280        self.last_err().unwrap()
281    }
282
283    pub(crate) fn insert_error(&mut self, error: SyntaxError) -> &mut SyntaxError {
284        trace_log!("inserting error: {:?}", error);
285        let span = error.span;
286
287        let error = SyntaxNode::error(
288            error,
289            span.range()
290                .and_then(|r| self.get_text(r))
291                .unwrap_or_default(),
292        );
293        self.nodes.push(error);
294
295        self.last_err().unwrap()
296    }
297
298    /// Returns a mutable reference to the error at the given `Marker` if it exists.
299    pub(crate) fn err_at(&mut self, at: Marker) -> Option<&mut SyntaxError> {
300        self.nodes.get_mut(at.0).and_then(|n| n.error_mut())
301    }
302
303    /// Returns the most recently inserted syntax error node in the parser.
304    pub fn last_err(&mut self) -> Option<&mut SyntaxError> {
305        self.nodes.iter_mut().rev().find_map(|n| n.error_mut())
306    }
307
308    /// Emits a generic `expected ... got ...` syntax error for the current token.
309    ///
310    /// Example: `expected "Ident", got "Comma"`
311    #[track_caller]
312    pub(crate) fn expected(&mut self, expected: &str) {
313        let kind = self.current();
314        self.insert_error_here(eco_format!("expected {expected}, got {kind:?}"));
315    }
316
317    /// Emits a custom error message for an unexpected token and optionally recovers.
318    ///
319    /// If `recover_set` is provided, the parser will skip tokens until it finds a token
320    /// in the recovery set or reaches EOF. Returns the inserted [SyntaxError].
321    pub(crate) fn unexpected(
322        &mut self,
323        message: impl Into<EcoString>,
324        recover_set: Option<SyntaxSet>,
325    ) -> &mut SyntaxError {
326        self.balanced &= !self.token.kind.is_grouping();
327        let message = message.into();
328        trace_log!("error_unexpected: {}", message);
329        self.token.node.convert_to_error(match message.as_str() {
330            "" => eco_format!("unexpected token {:?}", self.token.kind),
331            _ => message,
332        });
333
334        let err_marker = self.marker();
335
336        self.eat();
337
338        if let Some(recover_set) = recover_set {
339            self.recover_until(recover_set);
340        }
341
342        self.err_at(err_marker).expect("An error was just inserted")
343    }
344
345    /// Consumes the current token if it matches `kind`. Otherwise inserts an error.
346    ///
347    /// Returns `true` if the expected token was present; otherwise returns `false`.
348    pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool {
349        let at = self.at(kind);
350        if at {
351            self.eat();
352        } else if kind == SyntaxKind::Ident && self.token.kind.is_keyword() {
353            self.token.node.expected(eco_format!("{kind:?}"));
354        } else {
355            self.balanced &= !kind.is_grouping();
356            self.expected(&format!("{kind:?}"));
357        }
358        at
359    }
360
361    /// Like `expect`, but attempts to recover by consuming the current token if it matches
362    /// a recovery set. Returns `true` if the expected token or a recovery token was consumed.
363    pub(crate) fn expect_or_recover(
364        &mut self,
365        expected: SyntaxKind,
366        recover_set: SyntaxSet,
367    ) -> bool {
368        if self.at(expected) {
369            self.eat();
370            return true;
371        }
372
373        self.expected(&format!("{expected:?}"));
374        if self.can_recover_with(recover_set) {
375            self.eat()
376        }
377        false
378    }
379
380    /// Returns true if the current token is in the recovery set or if a newline
381    /// can act as a soft boundary and is considered recoverable.
382    fn can_recover_with(&self, recover_set: SyntaxSet) -> bool {
383        recover_set.contains(self.current())
384            || (recover_set.contains(SyntaxKind::NewLine) && self.had_leading_newline())
385    }
386
387    /// Like `expect_or_recover`, but returns an [ExpectResult] for further error handling.
388    ///
389    /// An error node is inserted if the expected token is not found.
390    /// The parser then skips forward until a token in the recovery set is found.
391    pub(crate) fn expect_or_recover_until(
392        &mut self,
393        expected: SyntaxKind,
394        message: impl Into<EcoString>,
395        recover_set: SyntaxSet,
396    ) -> ExpectResult {
397        if self.at(expected) {
398            self.eat();
399            return ExpectResult::Ok;
400        }
401
402        self.insert_error_here(message);
403
404        self.recover_until(recover_set);
405        if self.current() == expected {
406            self.eat();
407        }
408        ExpectResult::SyntaxError(self.last_err().expect("An error was just inserted"))
409    }
410
411    /// Skips tokens until a token in the `recovery_set` is found or EOF is reached.
412    ///
413    /// Used in combination with error reporting for safe continuation.
414    ///
415    /// ### Behaviour
416    ///
417    /// Stops advancing when the current token is in the recovery set or at EOF.
418    pub(crate) fn recover_until(&mut self, recovery_set: SyntaxSet) {
419        while !self.can_recover_with(recovery_set) && !self.end() {
420            self.eat();
421        }
422    }
423
424    /// skips tokens until the given node, based on its span
425    pub(crate) fn recover_until_node(&mut self, node: &SyntaxNode) {
426        while !self.end() {
427            if self.token.node.span() == node.span() {
428                break;
429            }
430            self.eat();
431        }
432    }
433
434    /// Tries to consume a closing delimiter like `}` or `)` or reports a matching open delimiter as unclosed.
435    ///
436    /// This is used for detecting unbalanced groupings.
437    /// If the current token is a closing delimiter, it is consumed; otherwise, an error is inserted
438    /// pointing to the `open_marker` as the source of the unclosed delimiter.
439    #[track_caller]
440    pub(crate) fn expect_closing_delimiter(
441        &mut self,
442        open_marker: Marker,
443        expected_closing: SyntaxKind,
444    ) -> bool {
445        debug_assert!(expected_closing.is_grouping());
446        if self.eat_if(expected_closing) {
447            return true;
448        }
449
450        err_unclosed_delim(self, open_marker, expected_closing);
451
452        false
453    }
454}
455
456#[derive(Debug, Default)]
457struct MemoArena {
458    nodes: Vec<SyntaxNode>,
459    memo_map: HashMap<MemoKey, (Range<usize>, CheckPoint)>,
460}
461
462impl<'s> Parser<'s> {
463    pub(crate) fn new(text: &'s str, offset: usize, file_id: FileId) -> Self {
464        let mut lexer = Lexer::new(text, file_id);
465        lexer.jump(offset);
466
467        let token = Self::lex(&mut lexer);
468
469        Self {
470            text,
471            lexer,
472            token,
473            balanced: true,
474            nodes: vec![],
475            last_pos: 0,
476            memo: Default::default(),
477        }
478    }
479
480    pub fn scanner(&self) -> Scanner<'s> {
481        Scanner::new(self.lexer.clone()).with_offset(self.token.start)
482    }
483
484    pub(crate) fn finish(self) -> Vec<SyntaxNode> {
485        self.nodes
486    }
487
488    fn finish_into(self, kind: SyntaxKind) -> SyntaxNode {
489        assert!(self.end());
490        SyntaxNode::inner(kind, self.finish())
491    }
492
493    #[inline]
494    pub(crate) fn current(&self) -> SyntaxKind {
495        self.token.kind
496    }
497
498    pub(crate) fn last_text(&self) -> &'s str {
499        let Some(last) = self.nodes.last() else {
500            return "";
501        };
502        let Some(range) = last.span().range() else {
503            return "";
504        };
505
506        self.get_text(range).unwrap_or_default()
507    }
508
509    pub(crate) fn last_node(&self) -> Option<&SyntaxNode> {
510        self.nodes.last()
511    }
512
513    pub(crate) fn get_text(&self, range: Range<usize>) -> Option<&'s str> {
514        self.text.get(range)
515    }
516
517    pub(crate) fn current_text(&self) -> &'s str {
518        match self.token.node.span().range() {
519            Some(s) => self.text.get(s).expect("text should exist"),
520            None => &self.text[self.token.start..self.current_end()],
521        }
522    }
523
524    #[inline]
525    pub(crate) fn current_node(&self) -> &SyntaxNode {
526        &self.token.node
527    }
528
529    #[inline]
530    pub(crate) fn current_span(&self) -> Span {
531        self.current_node().span()
532    }
533
534    pub(crate) fn current_end(&self) -> usize {
535        self.lexer.cursor()
536    }
537
538    // Peeks the token kind after current
539    fn peek(&self) -> SyntaxKind {
540        let (kind, _) = self.lexer.clone().next();
541        kind
542    }
543    pub(crate) fn peeker(&self) -> SyntaxKindIter {
544        SyntaxKindIter::new(self.lexer.clone())
545    }
546
547    pub(crate) fn peek_at(&self, kind: SyntaxKind) -> bool {
548        self.peek() == kind
549    }
550
551    pub(crate) fn peek_at_set(&self, set: SyntaxSet) -> bool {
552        set.contains(self.peek())
553    }
554
555    pub(crate) fn at(&self, kind: SyntaxKind) -> bool {
556        self.current() == kind
557    }
558
559    pub(crate) fn at_set(&self, set: SyntaxSet) -> bool {
560        set.contains(self.current())
561    }
562
563    pub(crate) fn end(&self) -> bool {
564        self.at(SyntaxKind::End)
565    }
566
567    /// A marker that will point to the current token in the parser once it has been eaten.
568    pub(crate) fn marker(&self) -> Marker {
569        Marker(self.nodes.len())
570    }
571
572    fn had_leading_newline(&self) -> bool {
573        self.token.newline
574    }
575
576    pub(crate) fn eat(&mut self) {
577        self.nodes.push(std::mem::take(&mut self.token.node));
578
579        let mut next = Self::lex(&mut self.lexer);
580        while next.kind == SyntaxKind::Error {
581            self.nodes.push(next.node);
582            next = Self::lex(&mut self.lexer)
583        }
584
585        self.token = next;
586    }
587
588    pub(crate) fn eat_if(&mut self, kind: SyntaxKind) -> bool {
589        let at = self.at(kind);
590        if at {
591            self.eat();
592        }
593        at
594    }
595
596    /// Move the parser forward without adding the node to the nodes vec
597    pub(crate) fn skip(&mut self) {
598        self.token = Self::lex(&mut self.lexer);
599    }
600
601    /// Move the parser forward without adding the node to the nodes vec
602    /// if the current kind == `kind`
603    pub(crate) fn skip_if(&mut self, kind: SyntaxKind) -> bool {
604        let at = self.at(kind);
605        if at {
606            self.skip();
607        }
608        at
609    }
610
611    fn convert_and_eat(&mut self, kind: SyntaxKind) {
612        self.token.node.convert_to_kind(kind);
613        self.eat();
614    }
615
616    pub(crate) fn wrap(&mut self, from: Marker, kind: SyntaxKind) {
617        let to = self.marker().0;
618        let from = from.0.min(to);
619
620        let children = self.nodes.drain(from..to).collect();
621        self.nodes.insert(from, SyntaxNode::inner(kind, children))
622    }
623
624    fn lex(lexer: &mut Lexer) -> Token {
625        let prev_end = lexer.cursor();
626        let start = prev_end;
627        let (mut kind, mut node) = lexer.next();
628
629        while kind == SyntaxKind::Comment {
630            (kind, node) = lexer.next();
631        }
632
633        Token {
634            kind,
635            node,
636            newline: lexer.newline(),
637            start,
638            prev_end,
639        }
640    }
641
642    pub(crate) fn checkpoint(&self) -> CheckPoint {
643        trace_log!("Creating checkpoint: {}", self.nodes.len());
644        CheckPoint {
645            nodes_len: self.nodes.len(),
646            lexer_cursor: self.lexer.cursor(),
647            token: self.token.clone(),
648        }
649    }
650
651    pub(crate) fn restore(&mut self, checkpoint: CheckPoint) {
652        trace_log!("Restoring checkpoint: {}", checkpoint.nodes_len);
653        self.nodes.truncate(checkpoint.nodes_len);
654        self.restore_partial(checkpoint);
655    }
656
657    pub(crate) fn restore_partial(&mut self, checkpoint: CheckPoint) {
658        self.lexer.jump(checkpoint.lexer_cursor);
659        self.token = checkpoint.token;
660    }
661
662    pub(crate) fn restore_memo_or_checkpoint(&mut self) -> Option<(MemoKey, CheckPoint)> {
663        let key: MemoKey = self.current_start();
664        match self.memo.memo_map.get(&key).cloned() {
665            Some((range, checkpoint)) => {
666                // restore the memo
667                self.nodes.extend_from_slice(&self.memo.nodes[range]);
668                self.restore_partial(checkpoint);
669                None
670            }
671            None => Some((key, self.checkpoint())),
672        }
673    }
674
675    pub(crate) fn memoize_parsed_nodes(&mut self, key: MemoKey, prev_len: usize) {
676        let prev_memo_len = self.memo.nodes.len();
677
678        self.memo.nodes.extend_from_slice(&self.nodes[prev_len..]);
679        let checkpoint = self.checkpoint();
680        self.memo
681            .memo_map
682            .insert(key, (prev_memo_len..self.memo.nodes.len(), checkpoint));
683    }
684
685    fn current_start(&self) -> MemoKey {
686        self.token.start
687    }
688}
689
690type MemoKey = usize;
691
692// Represents a node's position in the parser.
693#[derive(Debug, Copy, Clone, PartialEq, Eq)]
694pub(crate) struct Marker(pub(crate) usize);
695
696pub(crate) struct SyntaxKindIter<'s> {
697    lexer: Lexer<'s>,
698    yielded_at_end: bool,
699}
700
701impl<'a> SyntaxKindIter<'a> {
702    pub fn new(lexer: Lexer<'a>) -> Self {
703        Self {
704            lexer,
705            yielded_at_end: false,
706        }
707    }
708}
709
710impl<'s> Iterator for SyntaxKindIter<'s> {
711    type Item = SyntaxKind;
712
713    fn next(&mut self) -> Option<Self::Item> {
714        if self.yielded_at_end {
715            return None;
716        }
717        let (kind, _) = self.lexer.next();
718        if kind == SyntaxKind::End {
719            self.yielded_at_end = true;
720        }
721        Some(kind)
722    }
723}
724
725#[derive(Debug, Clone)]
726pub(crate) struct Token {
727    pub(crate) kind: SyntaxKind,
728    pub(crate) node: SyntaxNode,
729    // Whether the preceding token had a trailing newline
730    pub(crate) newline: bool,
731
732    pub(crate) start: usize,
733
734    // The index into `text` of the end of the previous token
735    pub(crate) prev_end: usize,
736}