aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_analysis/src/symbol_index.rs
blob: e5bdf0aa1c564b83ec8b552359c2e6bebf7f30c6 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
use std::{
    hash::{Hash, Hasher},
    sync::Arc,
};

use fst::{self, Streamer};
use ra_editor::{self, FileSymbol};
use ra_syntax::{
    SourceFileNode,
    SyntaxKind::{self, *},
};
use ra_db::{SyntaxDatabase, SourceRootId};
use rayon::prelude::*;

use crate::{
    Cancelable,
    FileId, Query,
};

salsa::query_group! {
    pub(crate) trait SymbolsDatabase: SyntaxDatabase {
        fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
            type FileSymbolsQuery;
        }
        fn library_symbols(id: SourceRootId) -> Arc<SymbolIndex> {
            type LibrarySymbolsQuery;
            storage input;
        }
    }
}

fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
    db.check_canceled()?;
    let syntax = db.source_file(file_id);
    Ok(Arc::new(SymbolIndex::for_file(file_id, syntax)))
}

#[derive(Default, Debug)]
pub(crate) struct SymbolIndex {
    symbols: Vec<(FileId, FileSymbol)>,
    map: fst::Map,
}

impl PartialEq for SymbolIndex {
    fn eq(&self, other: &SymbolIndex) -> bool {
        self.symbols == other.symbols
    }
}

impl Eq for SymbolIndex {}

impl Hash for SymbolIndex {
    fn hash<H: Hasher>(&self, hasher: &mut H) {
        self.symbols.hash(hasher)
    }
}

impl SymbolIndex {
    pub(crate) fn len(&self) -> usize {
        self.symbols.len()
    }

    pub(crate) fn for_files(
        files: impl ParallelIterator<Item = (FileId, SourceFileNode)>,
    ) -> SymbolIndex {
        let mut symbols = files
            .flat_map(|(file_id, file)| {
                ra_editor::file_symbols(&file)
                    .into_iter()
                    .map(move |symbol| (symbol.name.as_str().to_lowercase(), (file_id, symbol)))
                    .collect::<Vec<_>>()
            })
            .collect::<Vec<_>>();
        symbols.par_sort_by(|s1, s2| s1.0.cmp(&s2.0));
        symbols.dedup_by(|s1, s2| s1.0 == s2.0);
        let (names, symbols): (Vec<String>, Vec<(FileId, FileSymbol)>) =
            symbols.into_iter().unzip();
        let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap();
        SymbolIndex { symbols, map }
    }

    pub(crate) fn for_file(file_id: FileId, file: SourceFileNode) -> SymbolIndex {
        SymbolIndex::for_files(rayon::iter::once((file_id, file)))
    }
}

impl Query {
    pub(crate) fn search(self, indices: &[Arc<SymbolIndex>]) -> Vec<(FileId, FileSymbol)> {
        let mut op = fst::map::OpBuilder::new();
        for file_symbols in indices.iter() {
            let automaton = fst::automaton::Subsequence::new(&self.lowercased);
            op = op.add(file_symbols.map.search(automaton))
        }
        let mut stream = op.union();
        let mut res = Vec::new();
        while let Some((_, indexed_values)) = stream.next() {
            if res.len() >= self.limit {
                break;
            }
            for indexed_value in indexed_values {
                let file_symbols = &indices[indexed_value.index];
                let idx = indexed_value.value as usize;

                let (file_id, symbol) = &file_symbols.symbols[idx];
                if self.only_types && !is_type(symbol.kind) {
                    continue;
                }
                if self.exact && symbol.name != self.query {
                    continue;
                }
                res.push((*file_id, symbol.clone()));
            }
        }
        res
    }
}

fn is_type(kind: SyntaxKind) -> bool {
    match kind {
        STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_DEF => true,
        _ => false,
    }
}