aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_analysis/src/roots.rs
blob: 1e9e613ac2d9323001a3c77e1a407a5c491d1e25 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
use std::{sync::Arc};

use rustc_hash::FxHashSet;
use rayon::prelude::*;
use salsa::Database;

use crate::{
    Cancelable,
    db::{self, FilesDatabase, SyntaxDatabase},
    imp::FileResolverImp,
    symbol_index::SymbolIndex,
    FileId,
};

pub(crate) trait SourceRoot {
    fn contains(&self, file_id: FileId) -> bool;
    fn db(&self) -> &db::RootDatabase;
    fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) -> Cancelable<()>;
}

#[derive(Default, Debug, Clone)]
pub(crate) struct WritableSourceRoot {
    db: db::RootDatabase,
}

impl WritableSourceRoot {
    pub fn apply_changes(
        &mut self,
        changes: &mut dyn Iterator<Item = (FileId, Option<String>)>,
        file_resolver: Option<FileResolverImp>,
    ) {
        let mut changed = FxHashSet::default();
        let mut removed = FxHashSet::default();
        for (file_id, text) in changes {
            match text {
                None => {
                    removed.insert(file_id);
                }
                Some(text) => {
                    self.db
                        .query(db::FileTextQuery)
                        .set(file_id, Arc::new(text));
                    changed.insert(file_id);
                }
            }
        }
        let file_set = self.db.file_set();
        let mut files: FxHashSet<FileId> = file_set.files.clone();
        for file_id in removed {
            files.remove(&file_id);
        }
        files.extend(changed);
        let resolver = file_resolver.unwrap_or_else(|| file_set.resolver.clone());
        self.db
            .query(db::FileSetQuery)
            .set((), Arc::new(db::FileSet { files, resolver }));
    }
}

impl SourceRoot for WritableSourceRoot {
    fn contains(&self, file_id: FileId) -> bool {
        self.db.file_set().files.contains(&file_id)
    }
    fn db(&self) -> &db::RootDatabase {
        &self.db
    }
    fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) -> Cancelable<()> {
        for &file_id in self.db.file_set().files.iter() {
            let symbols = self.db.file_symbols(file_id)?;
            acc.push(symbols)
        }
        Ok(())
    }
}

#[derive(Debug, Clone)]
pub(crate) struct ReadonlySourceRoot {
    db: db::RootDatabase,
    symbol_index: Arc<SymbolIndex>,
}

impl ReadonlySourceRoot {
    pub(crate) fn new(
        files: Vec<(FileId, String)>,
        resolver: FileResolverImp,
    ) -> ReadonlySourceRoot {
        let db = db::RootDatabase::default();
        let mut file_ids = FxHashSet::default();
        for (file_id, text) in files {
            file_ids.insert(file_id);
            db.query(db::FileTextQuery).set(file_id, Arc::new(text));
        }

        db.query(db::FileSetQuery)
            .set((), Arc::new(db::FileSet { files: file_ids, resolver }));
        let file_set = db.file_set();
        let symbol_index =
            SymbolIndex::for_files(file_set.files.par_iter()
                .map_with(db.clone(), |db, &file_id| (file_id, db.file_syntax(file_id))));

        ReadonlySourceRoot { db, symbol_index: Arc::new(symbol_index) }
    }
}

impl SourceRoot for ReadonlySourceRoot {
    fn contains(&self, file_id: FileId) -> bool {
        self.db.file_set().files.contains(&file_id)
    }
    fn db(&self) -> &db::RootDatabase {
        &self.db
    }
    fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) -> Cancelable<()> {
        acc.push(Arc::clone(&self.symbol_index));
        Ok(())
    }
}