aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_analysis/src/db/imp.rs
blob: 7669b6184550c61efd33315987dd8964765cb47d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
use std::{
    sync::Arc,
    any::Any,
    hash::{Hash, Hasher},
    collections::hash_map::{DefaultHasher},
    iter,
};
use rustc_hash::FxHashMap;
use salsa;
use crate::{FileId, imp::FileResolverImp};
use super::{State, Query, QueryCtx};

pub(super) type Data = Arc<Any + Send + Sync + 'static>;

#[derive(Debug)]
pub(super) struct Db {
    names: Arc<FxHashMap<salsa::QueryTypeId, &'static str>>,
    pub(super) imp: salsa::Db<State, Data>,
}

impl Db {
    pub(super) fn new(mut reg: QueryRegistry) -> Db {
        let config = reg.config.take().unwrap();
        Db {
            names: Arc::new(reg.names),
            imp: salsa::Db::new(config, State::default())
        }
    }
    pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
        let names = self.names.clone();
        let mut invalidations = salsa::Invalidations::new();
        invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint));
        if resolver_changed {
            invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&()))));
        } else {
            invalidations.invalidate(FILE_SET, iter::empty());
        }
        let imp = self.imp.with_ground_data(
            new_state,
            invalidations,
        );
        Db { names, imp }
    }
    pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx<State, Data>) -> Vec<&'static str> {
        ctx.trace().into_iter().map(|it| self.names[&it]).collect()
    }
}

pub(crate) trait EvalQuery {
    type Params;
    type Output;
    fn query_type(&self) -> salsa::QueryTypeId;
    fn f(&self) -> salsa::QueryFn<State, Data>;
    fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output>;
}

impl<T, R> EvalQuery for Query<T, R>
where
    T: Hash + Send + Sync + 'static,
    R: Hash + Send + Sync + 'static,
{
    type Params = T;
    type Output = R;
    fn query_type(&self) -> salsa::QueryTypeId {
        salsa::QueryTypeId(self.0)
    }
    fn f(&self) -> salsa::QueryFn<State, Data> {
        let f = self.1;
        Box::new(move |ctx, data| {
            let ctx = QueryCtx { imp: ctx };
            let data: &T = data.downcast_ref().unwrap();
            let res = f(ctx, data);
            let h = hash(&res);
            (Arc::new(res), salsa::OutputFingerprint(h))
        })
    }
    fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output> {
        let query_id = salsa::QueryId(
            self.query_type(),
            salsa::InputFingerprint(hash(&params)),
        );
        let res = ctx.imp.get(query_id, Arc::new(params));
        res.downcast().unwrap()
    }
}

pub(super) struct QueryRegistry {
    config: Option<salsa::QueryConfig<State, Data>>,
    names: FxHashMap<salsa::QueryTypeId, &'static str>,
}

impl QueryRegistry {
    pub(super) fn new() -> QueryRegistry {
        let mut config = salsa::QueryConfig::<State, Data>::new();
        config = config.with_ground_query(
            FILE_TEXT, Box::new(|state, params| {
                let file_id: &FileId = params.downcast_ref().unwrap();
                let res = state.file_map[file_id].clone();
                let fingerprint = salsa::OutputFingerprint(hash(&res));
                (res, fingerprint)
            })
        );
        config = config.with_ground_query(
            FILE_SET, Box::new(|state, _params| {
                let file_ids: Vec<FileId> = state.file_map.keys().cloned().collect();
                let hash = hash(&file_ids);
                let file_resolver = state.file_resolver.clone();
                let res = (file_ids, file_resolver);
                let fingerprint = salsa::OutputFingerprint(hash);
                (Arc::new(res), fingerprint)
            })
        );
        let mut names = FxHashMap::default();
        names.insert(FILE_TEXT, "FILE_TEXT");
        names.insert(FILE_SET, "FILE_SET");
        QueryRegistry { config: Some(config), names }
    }
    pub(super) fn add<Q: EvalQuery>(&mut self, q: Q, name: &'static str) {
        let id = q.query_type();
        let prev = self.names.insert(id, name);
        assert!(prev.is_none(), "duplicate query: {:?}", id);
        let config = self.config.take().unwrap();
        let config = config.with_query(id, q.f());
        self.config= Some(config);
    }
}

fn hash<T: Hash>(x: &T) -> u64 {
    let mut hasher = DefaultHasher::new();
    x.hash(&mut hasher);
    hasher.finish()
}

const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0);
pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
    let query_id = salsa::QueryId(
        FILE_TEXT,
        salsa::InputFingerprint(hash(&file_id)),
    );
    let res = ctx.imp.get(query_id, Arc::new(file_id));
    res.downcast().unwrap()
}

const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1);
pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
    let query_id = salsa::QueryId(
        FILE_SET,
        salsa::InputFingerprint(hash(&())),
    );
    let res = ctx.imp.get(query_id, Arc::new(()));
    res.downcast().unwrap()
}