aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_syntax/src/parsing/text_token_source.rs
blob: e793f93a4dd0a5a1ffa0d30b90d4e3cf40e36649 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
//! FIXME: write short doc here

use ra_parser::Token as PToken;
use ra_parser::TokenSource;

use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextUnit};

pub(crate) struct TextTokenSource<'t> {
    text: &'t str,
    /// start position of each token(expect whitespace and comment)
    /// ```non-rust
    ///  struct Foo;
    /// ^------^---
    /// |      |  ^-
    /// 0      7  10
    /// ```
    /// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]`
    start_offsets: Vec<TextUnit>,
    /// non-whitespace/comment tokens
    /// ```non-rust
    /// struct Foo {}
    /// ^^^^^^ ^^^ ^^
    /// ```
    /// tokens: `[struct, Foo, {, }]`
    tokens: Vec<Token>,

    /// Current token and position
    curr: (PToken, usize),
}

impl<'t> TokenSource for TextTokenSource<'t> {
    fn current(&self) -> PToken {
        self.curr.0
    }

    fn lookahead_nth(&self, n: usize) -> PToken {
        mk_token(self.curr.1 + n, &self.start_offsets, &self.tokens)
    }

    fn bump(&mut self) {
        if self.curr.0.kind == EOF {
            return;
        }

        let pos = self.curr.1 + 1;
        self.curr = (mk_token(pos, &self.start_offsets, &self.tokens), pos);
    }

    fn is_keyword(&self, kw: &str) -> bool {
        let pos = self.curr.1;
        if !(pos < self.tokens.len()) {
            return false;
        }
        let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len);
        self.text[range] == *kw
    }
}

fn mk_token(pos: usize, start_offsets: &[TextUnit], tokens: &[Token]) -> PToken {
    let kind = tokens.get(pos).map(|t| t.kind).unwrap_or(EOF);
    let is_jointed_to_next = if pos + 1 < start_offsets.len() {
        start_offsets[pos] + tokens[pos].len == start_offsets[pos + 1]
    } else {
        false
    };

    PToken { kind, is_jointed_to_next }
}

impl<'t> TextTokenSource<'t> {
    /// Generate input from tokens(expect comment and whitespace).
    pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
        let mut tokens = Vec::new();
        let mut start_offsets = Vec::new();
        let mut len = 0.into();
        for &token in raw_tokens.iter() {
            if !token.kind.is_trivia() {
                tokens.push(token);
                start_offsets.push(len);
            }
            len += token.len;
        }

        let first = mk_token(0, &start_offsets, &tokens);
        TextTokenSource { text, start_offsets, tokens, curr: (first, 0) }
    }
}