From 3ac605e6876056fa56098231cc2f96553faab8f0 Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Thu, 20 Dec 2018 21:56:28 +0100 Subject: Add beginnings of type infrastructure --- crates/ra_analysis/src/db.rs | 1 + crates/ra_hir/src/db.rs | 6 + crates/ra_hir/src/function.rs | 6 +- crates/ra_hir/src/lib.rs | 1 + crates/ra_hir/src/mock.rs | 3 +- crates/ra_hir/src/query_definitions.rs | 8 + crates/ra_hir/src/ty.rs | 478 +++++++++++++++++++++++++++++++++ crates/ra_hir/src/ty/primitive.rs | 98 +++++++ crates/ra_hir/src/ty/tests.rs | 45 ++++ crates/ra_syntax/src/ast/generated.rs | 44 ++- crates/ra_syntax/src/grammar.ron | 14 +- 11 files changed, 689 insertions(+), 15 deletions(-) create mode 100644 crates/ra_hir/src/ty.rs create mode 100644 crates/ra_hir/src/ty/primitive.rs create mode 100644 crates/ra_hir/src/ty/tests.rs (limited to 'crates') diff --git a/crates/ra_analysis/src/db.rs b/crates/ra_analysis/src/db.rs index 94729d296..f26c49887 100644 --- a/crates/ra_analysis/src/db.rs +++ b/crates/ra_analysis/src/db.rs @@ -93,6 +93,7 @@ salsa::database_storage! { fn item_map() for hir::db::ItemMapQuery; fn fn_syntax() for hir::db::FnSyntaxQuery; fn submodules() for hir::db::SubmodulesQuery; + fn infer() for hir::db::InferQuery; } } } diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index 62cf9ab17..f0bff3c02 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs @@ -14,6 +14,7 @@ use crate::{ function::FnId, module::{ModuleId, ModuleTree, ModuleSource, nameres::{ItemMap, InputModuleItems}}, + ty::InferenceResult, }; salsa::query_group! { @@ -30,6 +31,11 @@ pub trait HirDatabase: SyntaxDatabase use fn query_definitions::fn_syntax; } + fn infer(fn_id: FnId) -> Arc { + type InferQuery; + use fn query_definitions::infer; + } + fn file_items(file_id: FileId) -> Arc { type SourceFileItemsQuery; use fn query_definitions::file_items; diff --git a/crates/ra_hir/src/function.rs b/crates/ra_hir/src/function.rs index 2925beb16..360e9e9a0 100644 --- a/crates/ra_hir/src/function.rs +++ b/crates/ra_hir/src/function.rs @@ -10,7 +10,7 @@ use ra_syntax::{ ast::{self, AstNode, DocCommentsOwner, NameOwner}, }; -use crate::{ DefId, HirDatabase }; +use crate::{ DefId, HirDatabase, ty::InferenceResult }; pub use self::scope::FnScopes; @@ -35,6 +35,10 @@ impl Function { let syntax = db.fn_syntax(self.fn_id); FnSignatureInfo::new(syntax.borrowed()) } + + pub fn infer(&self, db: &impl HirDatabase) -> Arc { + db.infer(self.fn_id) + } } #[derive(Debug, Clone)] diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index f56214b47..e84f44675 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs @@ -25,6 +25,7 @@ pub mod source_binder; mod krate; mod module; mod function; +mod ty; use std::ops::Index; diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index 9423e6571..a9fa540d5 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs @@ -8,7 +8,7 @@ use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset}; use crate::{db, DefId, DefLoc}; -const WORKSPACE: SourceRootId = SourceRootId(0); +pub const WORKSPACE: SourceRootId = SourceRootId(0); #[derive(Debug)] pub(crate) struct MockDatabase { @@ -182,6 +182,7 @@ salsa::database_storage! { fn item_map() for db::ItemMapQuery; fn fn_syntax() for db::FnSyntaxQuery; fn submodules() for db::SubmodulesQuery; + fn infer() for db::InferQuery; } } } diff --git a/crates/ra_hir/src/query_definitions.rs b/crates/ra_hir/src/query_definitions.rs index efaeb1525..ccbfdf028 100644 --- a/crates/ra_hir/src/query_definitions.rs +++ b/crates/ra_hir/src/query_definitions.rs @@ -19,6 +19,7 @@ use crate::{ imp::Submodule, nameres::{InputModuleItems, ItemMap, Resolver}, }, + ty::{self, InferenceResult} }; /// Resolve `FnId` to the corresponding `SyntaxNode` @@ -35,6 +36,13 @@ pub(super) fn fn_scopes(db: &impl HirDatabase, fn_id: FnId) -> Arc { Arc::new(res) } +pub(super) fn infer(db: &impl HirDatabase, fn_id: FnId) -> Arc { + let syntax = db.fn_syntax(fn_id); + let scopes = db.fn_scopes(fn_id); + let res = ty::infer(db, syntax.borrowed(), scopes); + Arc::new(res) +} + pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc { let mut res = SourceFileItems::new(file_id); let source_file = db.source_file(file_id); diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs new file mode 100644 index 000000000..36dc5d137 --- /dev/null +++ b/crates/ra_hir/src/ty.rs @@ -0,0 +1,478 @@ +mod primitive; +#[cfg(test)] +mod tests; + +use rustc_hash::{FxHashMap, FxHashSet}; + +use std::sync::Arc; +use std::collections::HashMap; + +use ra_db::LocalSyntaxPtr; +use ra_syntax::{ + TextRange, TextUnit, + algo::visit::{visitor, Visitor}, + ast::{self, AstNode, DocCommentsOwner, NameOwner, LoopBodyOwner, ArgListOwner}, + SyntaxNodeRef +}; + +use crate::{ + FnScopes, + db::HirDatabase, + arena::{Arena, Id}, +}; + +// pub(crate) type TypeId = Id; + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +pub enum Ty { + /// The primitive boolean type. Written as `bool`. + Bool, + + /// The primitive character type; holds a Unicode scalar value + /// (a non-surrogate code point). Written as `char`. + Char, + + /// A primitive signed integer type. For example, `i32`. + Int(primitive::IntTy), + + /// A primitive unsigned integer type. For example, `u32`. + Uint(primitive::UintTy), + + /// A primitive floating-point type. For example, `f64`. + Float(primitive::FloatTy), + + /// Structures, enumerations and unions. + /// + /// Substs here, possibly against intuition, *may* contain `Param`s. + /// That is, even after substitution it is possible that there are type + /// variables. This happens when the `Adt` corresponds to an ADT + /// definition and not a concrete use of it. + // Adt(&'tcx AdtDef, &'tcx Substs<'tcx>), + + // Foreign(DefId), + + /// The pointee of a string slice. Written as `str`. + Str, + + /// An array with the given length. Written as `[T; n]`. + // Array(Ty<'tcx>, &'tcx ty::Const<'tcx>), + + /// The pointee of an array slice. Written as `[T]`. + Slice(TyRef), + + /// A raw pointer. Written as `*mut T` or `*const T` + // RawPtr(TypeAndMut<'tcx>), + + /// A reference; a pointer with an associated lifetime. Written as + /// `&'a mut T` or `&'a T`. + // Ref(Region<'tcx>, Ty<'tcx>, hir::Mutability), + + /// The anonymous type of a function declaration/definition. Each + /// function has a unique type, which is output (for a function + /// named `foo` returning an `i32`) as `fn() -> i32 {foo}`. + /// + /// For example the type of `bar` here: + /// + /// ```rust + /// fn foo() -> i32 { 1 } + /// let bar = foo; // bar: fn() -> i32 {foo} + /// ``` + // FnDef(DefId, &'tcx Substs<'tcx>), + + /// A pointer to a function. Written as `fn() -> i32`. + /// + /// For example the type of `bar` here: + /// + /// ```rust + /// fn foo() -> i32 { 1 } + /// let bar: fn() -> i32 = foo; + /// ``` + // FnPtr(PolyFnSig<'tcx>), + + /// A trait, defined with `trait`. + // Dynamic(Binder<&'tcx List>>, ty::Region<'tcx>), + + /// The anonymous type of a closure. Used to represent the type of + /// `|a| a`. + // Closure(DefId, ClosureSubsts<'tcx>), + + /// The anonymous type of a generator. Used to represent the type of + /// `|a| yield a`. + // Generator(DefId, GeneratorSubsts<'tcx>, hir::GeneratorMovability), + + /// A type representin the types stored inside a generator. + /// This should only appear in GeneratorInteriors. + // GeneratorWitness(Binder<&'tcx List>>), + + /// The never type `!` + Never, + + /// A tuple type. For example, `(i32, bool)`. + Tuple(Vec), + + /// The projection of an associated type. For example, + /// `>::N`. + // Projection(ProjectionTy<'tcx>), + + /// Opaque (`impl Trait`) type found in a return type. + /// The `DefId` comes either from + /// * the `impl Trait` ast::Ty node, + /// * or the `existential type` declaration + /// The substitutions are for the generics of the function in question. + /// After typeck, the concrete type can be found in the `types` map. + // Opaque(DefId, &'tcx Substs<'tcx>), + + /// A type parameter; for example, `T` in `fn f(x: T) {} + // Param(ParamTy), + + /// Bound type variable, used only when preparing a trait query. + // Bound(ty::DebruijnIndex, BoundTy), + + /// A placeholder type - universally quantified higher-ranked type. + // Placeholder(ty::PlaceholderType), + + /// A type variable used during type checking. + // Infer(InferTy), + + /// A placeholder for a type which could not be computed; this is + /// propagated to avoid useless error messages. + Unknown, +} + +type TyRef = Arc; + +impl Ty { + pub fn new(node: ast::TypeRef) -> Self { + use ra_syntax::ast::TypeRef::*; + match node { + ParenType(_inner) => Ty::Unknown, // TODO + TupleType(_inner) => Ty::Unknown, // TODO + NeverType(..) => Ty::Never, + PathType(_inner) => Ty::Unknown, // TODO + PointerType(_inner) => Ty::Unknown, // TODO + ArrayType(_inner) => Ty::Unknown, // TODO + SliceType(_inner) => Ty::Unknown, // TODO + ReferenceType(_inner) => Ty::Unknown, // TODO + PlaceholderType(_inner) => Ty::Unknown, // TODO + FnPointerType(_inner) => Ty::Unknown, // TODO + ForType(_inner) => Ty::Unknown, // TODO + ImplTraitType(_inner) => Ty::Unknown, // TODO + DynTraitType(_inner) => Ty::Unknown, // TODO + } + } + + pub fn unit() -> Self { + Ty::Tuple(Vec::new()) + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct InferenceResult { + type_for: FxHashMap, +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct InferenceContext { + scopes: Arc, + // TODO unification tables... + type_for: FxHashMap, +} + +impl InferenceContext { + fn new(scopes: Arc) -> Self { + InferenceContext { + type_for: FxHashMap::default(), + scopes + } + } + + fn write_ty(&mut self, node: SyntaxNodeRef, ty: Ty) { + self.type_for.insert(LocalSyntaxPtr::new(node), ty); + } + + fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool { + unimplemented!() + } + + fn infer_expr(&mut self, expr: ast::Expr) -> Ty { + let ty = match expr { + ast::Expr::IfExpr(e) => { + if let Some(condition) = e.condition() { + if let Some(e) = condition.expr() { + // TODO if no pat, this should be bool + self.infer_expr(e); + } + // TODO write type for pat + }; + let if_ty = if let Some(block) = e.then_branch() { + self.infer_block(block) + } else { + Ty::Unknown + }; + let else_ty = if let Some(block) = e.else_branch() { + self.infer_block(block) + } else { + Ty::Unknown + }; + if self.unify(&if_ty, &else_ty) { + // TODO actually, need to take the 'more specific' type (not unknown, never, ...) + if_ty + } else { + // TODO report diagnostic + Ty::Unknown + } + } + ast::Expr::BlockExpr(e) => { + if let Some(block) = e.block() { + self.infer_block(block) + } else { + Ty::Unknown + } + } + ast::Expr::LoopExpr(e) => { + if let Some(block) = e.loop_body() { + self.infer_block(block); + }; + // TODO never, or the type of the break param + Ty::Unknown + } + ast::Expr::WhileExpr(e) => { + if let Some(condition) = e.condition() { + if let Some(e) = condition.expr() { + // TODO if no pat, this should be bool + self.infer_expr(e); + } + // TODO write type for pat + }; + if let Some(block) = e.loop_body() { + // TODO + self.infer_block(block); + }; + // TODO always unit? + Ty::Unknown + } + ast::Expr::ForExpr(e) => { + if let Some(expr) = e.iterable() { + self.infer_expr(expr); + } + if let Some(pat) = e.pat() { + // TODO write type for pat + } + if let Some(block) = e.loop_body() { + self.infer_block(block); + } + // TODO always unit? + Ty::Unknown + } + ast::Expr::LambdaExpr(e) => { + let body_ty = if let Some(body) = e.body() { + self.infer_expr(body) + } else { + Ty::Unknown + }; + Ty::Unknown + } + ast::Expr::CallExpr(e) => { + if let Some(arg_list) = e.arg_list() { + for arg in arg_list.args() { + // TODO unify / expect argument type + self.infer_expr(arg); + } + } + Ty::Unknown + } + ast::Expr::MethodCallExpr(e) => { + if let Some(arg_list) = e.arg_list() { + for arg in arg_list.args() { + // TODO unify / expect argument type + self.infer_expr(arg); + } + } + Ty::Unknown + } + ast::Expr::MatchExpr(e) => { + let ty = if let Some(match_expr) = e.expr() { + self.infer_expr(match_expr) + } else { + Ty::Unknown + }; + if let Some(match_arm_list) = e.match_arm_list() { + for arm in match_arm_list.arms() { + // TODO type the bindings in pat + // TODO type the guard + let ty = if let Some(e) = arm.expr() { + self.infer_expr(e) + } else { + Ty::Unknown + }; + } + // TODO unify all the match arm types + Ty::Unknown + } else { + Ty::Unknown + } + } + ast::Expr::TupleExpr(e) => { + Ty::Unknown + } + ast::Expr::ArrayExpr(e) => { + Ty::Unknown + } + ast::Expr::PathExpr(e) => { + if let Some(p) = e.path() { + if p.qualifier().is_none() { + if let Some(name) = p.segment().and_then(|s| s.name_ref()) { + let s = self.scopes.resolve_local_name(name); + if let Some(scope_entry) = s { + if let Some(ty) = self.type_for.get(&scope_entry.ptr()) { + ty.clone() + } else { + // TODO introduce type variable? + Ty::Unknown + } + } else { + Ty::Unknown + } + } else { + Ty::Unknown + } + } else { + // TODO resolve path + Ty::Unknown + } + } else { + Ty::Unknown + } + } + ast::Expr::ContinueExpr(e) => { + Ty::Never + } + ast::Expr::BreakExpr(e) => { + Ty::Never + } + ast::Expr::ParenExpr(e) => { + if let Some(e) = e.expr() { + self.infer_expr(e) + } else { + Ty::Unknown + } + } + ast::Expr::Label(e) => { + Ty::Unknown + } + ast::Expr::ReturnExpr(e) => { + if let Some(e) = e.expr() { + // TODO unify with return type + self.infer_expr(e); + }; + Ty::Never + } + ast::Expr::MatchArmList(_) | ast::Expr::MatchArm(_) | ast::Expr::MatchGuard(_) => { + // Can this even occur outside of a match expression? + Ty::Unknown + } + ast::Expr::StructLit(e) => { + Ty::Unknown + } + ast::Expr::NamedFieldList(_) | ast::Expr::NamedField(_) => { + // Can this even occur outside of a struct literal? + Ty::Unknown + } + ast::Expr::IndexExpr(e) => { + Ty::Unknown + } + ast::Expr::FieldExpr(e) => { + Ty::Unknown + } + ast::Expr::TryExpr(e) => { + let inner_ty = if let Some(e) = e.expr() { + self.infer_expr(e) + } else { + Ty::Unknown + }; + Ty::Unknown + } + ast::Expr::CastExpr(e) => { + let inner_ty = if let Some(e) = e.expr() { + self.infer_expr(e) + } else { + Ty::Unknown + }; + let cast_ty = e.type_ref().map(Ty::new).unwrap_or(Ty::Unknown); + // TODO do the coercion... + cast_ty + } + ast::Expr::RefExpr(e) => { + let inner_ty = if let Some(e) = e.expr() { + self.infer_expr(e) + } else { + Ty::Unknown + }; + Ty::Unknown + } + ast::Expr::PrefixExpr(e) => { + let inner_ty = if let Some(e) = e.expr() { + self.infer_expr(e) + } else { + Ty::Unknown + }; + Ty::Unknown + } + ast::Expr::RangeExpr(e) => { + Ty::Unknown + } + ast::Expr::BinExpr(e) => { + Ty::Unknown + } + ast::Expr::Literal(e) => { + Ty::Unknown + } + }; + self.write_ty(expr.syntax(), ty.clone()); + ty + } + + fn infer_block(&mut self, node: ast::Block) -> Ty { + for stmt in node.statements() { + match stmt { + ast::Stmt::LetStmt(stmt) => { + if let Some(expr) = stmt.initializer() { + self.infer_expr(expr); + } + } + ast::Stmt::ExprStmt(expr_stmt) => { + if let Some(expr) = expr_stmt.expr() { + self.infer_expr(expr); + } + } + } + } + let ty = if let Some(expr) = node.expr() { + self.infer_expr(expr) + } else { + Ty::unit() + }; + self.write_ty(node.syntax(), ty.clone()); + ty + } +} + +pub fn infer(db: &impl HirDatabase, node: ast::FnDef, scopes: Arc) -> InferenceResult { + let mut ctx = InferenceContext::new(scopes); + + for param in node.param_list().unwrap().params() { + let pat = param.pat().unwrap(); + let type_ref = param.type_ref().unwrap(); + let ty = Ty::new(type_ref); + ctx.type_for.insert(LocalSyntaxPtr::new(pat.syntax()), ty); + } + + // TODO get Ty for node.ret_type() and pass that to infer_block as expectation + // (see Expectation in rustc_typeck) + + ctx.infer_block(node.body().unwrap()); + + // TODO 'resolve' the types: replace inference variables by their inferred results + + InferenceResult { type_for: ctx.type_for } +} diff --git a/crates/ra_hir/src/ty/primitive.rs b/crates/ra_hir/src/ty/primitive.rs new file mode 100644 index 000000000..4a5ce5a97 --- /dev/null +++ b/crates/ra_hir/src/ty/primitive.rs @@ -0,0 +1,98 @@ +use std::fmt; + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Copy)] +pub enum IntTy { + Isize, + I8, + I16, + I32, + I64, + I128, +} + +impl fmt::Debug for IntTy { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(self, f) + } +} + +impl fmt::Display for IntTy { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.ty_to_string()) + } +} + +impl IntTy { + pub fn ty_to_string(&self) -> &'static str { + match *self { + IntTy::Isize => "isize", + IntTy::I8 => "i8", + IntTy::I16 => "i16", + IntTy::I32 => "i32", + IntTy::I64 => "i64", + IntTy::I128 => "i128", + } + } +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Copy)] +pub enum UintTy { + Usize, + U8, + U16, + U32, + U64, + U128, +} + +impl UintTy { + pub fn ty_to_string(&self) -> &'static str { + match *self { + UintTy::Usize => "usize", + UintTy::U8 => "u8", + UintTy::U16 => "u16", + UintTy::U32 => "u32", + UintTy::U64 => "u64", + UintTy::U128 => "u128", + } + } +} + +impl fmt::Debug for UintTy { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(self, f) + } +} + +impl fmt::Display for UintTy { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.ty_to_string()) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Copy, PartialOrd, Ord)] +pub enum FloatTy { + F32, + F64, +} + +impl fmt::Debug for FloatTy { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(self, f) + } +} + +impl fmt::Display for FloatTy { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.ty_to_string()) + } +} + +impl FloatTy { + pub fn ty_to_string(self) -> &'static str { + match self { + FloatTy::F32 => "f32", + FloatTy::F64 => "f64", + } + } +} diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs new file mode 100644 index 000000000..f2466dd51 --- /dev/null +++ b/crates/ra_hir/src/ty/tests.rs @@ -0,0 +1,45 @@ +use std::sync::Arc; + +use salsa::Database; +use ra_db::{FilesDatabase, CrateGraph, SyntaxDatabase}; +use ra_syntax::{SmolStr, algo::visit::{visitor, Visitor}, ast::{self, AstNode}}; +use relative_path::RelativePath; + +use crate::{source_binder, mock::WORKSPACE, module::ModuleSourceNode}; + +use crate::{ + self as hir, + db::HirDatabase, + mock::MockDatabase, +}; + +fn infer_all_fns(fixture: &str) -> () { + let (db, source_root) = MockDatabase::with_files(fixture); + for &file_id in source_root.files.values() { + let source_file = db.source_file(file_id); + for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) { + let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap().unwrap(); + let inference_result = func.infer(&db); + for (syntax_ptr, ty) in &inference_result.type_for { + let node = syntax_ptr.resolve(&source_file); + eprintln!("{} '{}': {:?}", syntax_ptr.range(), node.text(), ty); + } + } + } +} + +#[test] +fn infer_smoke_test() { + let text = " + //- /lib.rs + fn foo(x: u32, y: !) -> i128 { + x; + y; + return 1; + \"hello\"; + 0 + } + "; + + infer_all_fns(text); +} diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs index bf056131e..91f27fb26 100644 --- a/crates/ra_syntax/src/ast/generated.rs +++ b/crates/ra_syntax/src/ast/generated.rs @@ -523,7 +523,15 @@ impl> CastExprNode { } -impl<'a> CastExpr<'a> {} +impl<'a> CastExpr<'a> { + pub fn expr(self) -> Option> { + super::child_opt(self) + } + + pub fn type_ref(self) -> Option> { + super::child_opt(self) + } +} // Char #[derive(Debug, Clone, Copy,)] @@ -2312,6 +2320,10 @@ impl<'a> Param<'a> { pub fn pat(self) -> Option> { super::child_opt(self) } + + pub fn type_ref(self) -> Option> { + super::child_opt(self) + } } // ParamList @@ -2394,7 +2406,11 @@ impl> ParenExprNode { } -impl<'a> ParenExpr<'a> {} +impl<'a> ParenExpr<'a> { + pub fn expr(self) -> Option> { + super::child_opt(self) + } +} // ParenType #[derive(Debug, Clone, Copy,)] @@ -2829,7 +2845,11 @@ impl> PrefixExprNode { } -impl<'a> PrefixExpr<'a> {} +impl<'a> PrefixExpr<'a> { + pub fn expr(self) -> Option> { + super::child_opt(self) + } +} // RangeExpr #[derive(Debug, Clone, Copy,)] @@ -2940,7 +2960,11 @@ impl> RefExprNode { } -impl<'a> RefExpr<'a> {} +impl<'a> RefExpr<'a> { + pub fn expr(self) -> Option> { + super::child_opt(self) + } +} // RefPat #[derive(Debug, Clone, Copy,)] @@ -3088,7 +3112,11 @@ impl> ReturnExprNode { } -impl<'a> ReturnExpr<'a> {} +impl<'a> ReturnExpr<'a> { + pub fn expr(self) -> Option> { + super::child_opt(self) + } +} // SelfParam #[derive(Debug, Clone, Copy,)] @@ -3578,7 +3606,11 @@ impl> TryExprNode { } -impl<'a> TryExpr<'a> {} +impl<'a> TryExpr<'a> { + pub fn expr(self) -> Option> { + super::child_opt(self) + } +} // TupleExpr #[derive(Debug, Clone, Copy,)] diff --git a/crates/ra_syntax/src/grammar.ron b/crates/ra_syntax/src/grammar.ron index eed67637e..c43db51b6 100644 --- a/crates/ra_syntax/src/grammar.ron +++ b/crates/ra_syntax/src/grammar.ron @@ -346,7 +346,7 @@ Grammar( "TupleExpr": (), "ArrayExpr": (), - "ParenExpr": (), + "ParenExpr": (options: ["Expr"]), "PathExpr": (options: ["Path"]), "LambdaExpr": ( options: [ @@ -377,7 +377,7 @@ Grammar( "BlockExpr": ( options: [ "Block" ] ), - "ReturnExpr": (), + "ReturnExpr": (options: ["Expr"]), "MatchExpr": ( options: [ "Expr", "MatchArmList" ], ), @@ -405,10 +405,10 @@ Grammar( ), "IndexExpr": (), "FieldExpr": (), - "TryExpr": (), - "CastExpr": (), - "RefExpr": (), - "PrefixExpr": (), + "TryExpr": (options: ["Expr"]), + "CastExpr": (options: ["Expr", "TypeRef"]), + "RefExpr": (options: ["Expr"]), + "PrefixExpr": (options: ["Expr"]), "RangeExpr": (), "BinExpr": (), "String": (), @@ -521,7 +521,7 @@ Grammar( ), "SelfParam": (), "Param": ( - options: [ "Pat" ], + options: [ "Pat", "TypeRef" ], ), "UseItem": ( options: [ "UseTree" ] -- cgit v1.2.3 From 3899898d75176ce3cd87f9e2acecd7e3a987dda5 Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Sat, 22 Dec 2018 22:17:55 +0100 Subject: Parse integer / float types --- crates/ra_hir/src/ty.rs | 22 ++++++++++++++++++++-- crates/ra_hir/src/ty/primitive.rs | 32 ++++++++++++++++++++++++++++++++ crates/ra_syntax/src/ast/generated.rs | 6 +++++- crates/ra_syntax/src/grammar.ron | 2 +- 4 files changed, 58 insertions(+), 4 deletions(-) (limited to 'crates') diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 36dc5d137..087385b98 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -9,7 +9,7 @@ use std::collections::HashMap; use ra_db::LocalSyntaxPtr; use ra_syntax::{ - TextRange, TextUnit, + TextRange, TextUnit, SmolStr, algo::visit::{visitor, Visitor}, ast::{self, AstNode, DocCommentsOwner, NameOwner, LoopBodyOwner, ArgListOwner}, SyntaxNodeRef @@ -148,7 +148,25 @@ impl Ty { ParenType(_inner) => Ty::Unknown, // TODO TupleType(_inner) => Ty::Unknown, // TODO NeverType(..) => Ty::Never, - PathType(_inner) => Ty::Unknown, // TODO + PathType(inner) => { + let path = if let Some(p) = inner.path() { p } else { return Ty::Unknown }; + if path.qualifier().is_none() { + let name = path.segment().and_then(|s| s.name_ref()).map(|n| n.text()).unwrap_or(SmolStr::new("")); + if let Some(int_ty) = primitive::IntTy::from_string(&name) { + Ty::Int(int_ty) + } else if let Some(uint_ty) = primitive::UintTy::from_string(&name) { + Ty::Uint(uint_ty) + } else if let Some(float_ty) = primitive::FloatTy::from_string(&name) { + Ty::Float(float_ty) + } else { + // TODO + Ty::Unknown + } + } else { + // TODO + Ty::Unknown + } + }, PointerType(_inner) => Ty::Unknown, // TODO ArrayType(_inner) => Ty::Unknown, // TODO SliceType(_inner) => Ty::Unknown, // TODO diff --git a/crates/ra_hir/src/ty/primitive.rs b/crates/ra_hir/src/ty/primitive.rs index 4a5ce5a97..ad79b17e4 100644 --- a/crates/ra_hir/src/ty/primitive.rs +++ b/crates/ra_hir/src/ty/primitive.rs @@ -33,6 +33,18 @@ impl IntTy { IntTy::I128 => "i128", } } + + pub fn from_string(s: &str) -> Option { + match s { + "isize" => Some(IntTy::Isize), + "i8" => Some(IntTy::I8), + "i16" => Some(IntTy::I16), + "i32" => Some(IntTy::I32), + "i64" => Some(IntTy::I64), + "i128" => Some(IntTy::I128), + _ => None, + } + } } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Copy)] @@ -56,6 +68,18 @@ impl UintTy { UintTy::U128 => "u128", } } + + pub fn from_string(s: &str) -> Option { + match s { + "usize" => Some(UintTy::Usize), + "u8" => Some(UintTy::U8), + "u16" => Some(UintTy::U16), + "u32" => Some(UintTy::U32), + "u64" => Some(UintTy::U64), + "u128" => Some(UintTy::U128), + _ => None, + } + } } impl fmt::Debug for UintTy { @@ -95,4 +119,12 @@ impl FloatTy { FloatTy::F64 => "f64", } } + + pub fn from_string(s: &str) -> Option { + match s { + "f32" => Some(FloatTy::F32), + "f64" => Some(FloatTy::F64), + _ => None, + } + } } diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs index 91f27fb26..74bf4d3cc 100644 --- a/crates/ra_syntax/src/ast/generated.rs +++ b/crates/ra_syntax/src/ast/generated.rs @@ -2697,7 +2697,11 @@ impl> PathTypeNode { } -impl<'a> PathType<'a> {} +impl<'a> PathType<'a> { + pub fn path(self) -> Option> { + super::child_opt(self) + } +} // PlaceholderPat #[derive(Debug, Clone, Copy,)] diff --git a/crates/ra_syntax/src/grammar.ron b/crates/ra_syntax/src/grammar.ron index c43db51b6..29b84854a 100644 --- a/crates/ra_syntax/src/grammar.ron +++ b/crates/ra_syntax/src/grammar.ron @@ -304,7 +304,7 @@ Grammar( "ParenType": (), "TupleType": (), "NeverType": (), - "PathType": (), + "PathType": (options: ["Path"]), "PointerType": (), "ArrayType": (), "SliceType": (), -- cgit v1.2.3 From 7348f7883fa2bd571fff036c82e98c102d05c362 Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Sun, 23 Dec 2018 12:05:54 +0100 Subject: Add testing infrastructure for type inference - move dir_tests to test_utils for that. --- crates/ra_analysis/src/imp.rs | 14 +++- crates/ra_analysis/src/lib.rs | 3 + crates/ra_hir/src/mock.rs | 9 ++ crates/ra_hir/src/ty.rs | 30 +++++++ crates/ra_hir/src/ty/tests.rs | 62 ++++++++------ crates/ra_hir/src/ty/tests/data/0001_basics.rs | 11 +++ crates/ra_hir/src/ty/tests/data/0001_basics.txt | 13 +++ crates/ra_syntax/tests/test.rs | 105 ++---------------------- crates/test_utils/src/lib.rs | 99 ++++++++++++++++++++++ 9 files changed, 222 insertions(+), 124 deletions(-) create mode 100644 crates/ra_hir/src/ty/tests/data/0001_basics.rs create mode 100644 crates/ra_hir/src/ty/tests/data/0001_basics.txt (limited to 'crates') diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs index b01382808..4e0631679 100644 --- a/crates/ra_analysis/src/imp.rs +++ b/crates/ra_analysis/src/imp.rs @@ -5,7 +5,8 @@ use std::{ use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit}; use ra_syntax::{ - ast::{self, ArgListOwner, Expr, NameOwner}, + ast::{self, ArgListOwner, Expr, NameOwner, FnDef}, + algo::find_covering_node, AstNode, SourceFileNode, SyntaxKind::*, SyntaxNodeRef, TextRange, TextUnit, @@ -510,6 +511,17 @@ impl AnalysisImpl { Ok(None) } + pub fn type_of(&self, file_id: FileId, range: TextRange) -> Cancelable> { + let file = self.db.source_file(file_id); + let syntax = file.syntax(); + let node = find_covering_node(syntax, range); + let parent_fn = node.ancestors().filter_map(FnDef::cast).next(); + let parent_fn = if let Some(p) = parent_fn { p } else { return Ok(None) }; + let function = ctry!(source_binder::function_from_source(&*self.db, file_id, parent_fn)?); + let infer = function.infer(&*self.db); + Ok(infer.type_of_node(node).map(|t| t.to_string())) + } + fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable> { let name = name_ref.text(); let mut query = Query::new(name.to_string()); diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs index 85df9c089..830898140 100644 --- a/crates/ra_analysis/src/lib.rs +++ b/crates/ra_analysis/src/lib.rs @@ -366,6 +366,9 @@ impl Analysis { ) -> Cancelable)>> { self.imp.resolve_callable(position) } + pub fn type_of(&self, file_id: FileId, range: TextRange) -> Cancelable> { + self.imp.type_of(file_id, range) + } } pub struct LibraryData { diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index a9fa540d5..3020ee793 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs @@ -24,6 +24,15 @@ impl MockDatabase { (db, source_root) } + pub(crate) fn with_single_file(text: &str) -> (MockDatabase, SourceRoot, FileId) { + let mut db = MockDatabase::default(); + let mut source_root = SourceRoot::default(); + let file_id = db.add_file(&mut source_root, "/main.rs", text); + db.query_mut(ra_db::SourceRootQuery) + .set(WORKSPACE, Arc::new(source_root.clone())); + (db, source_root, file_id) + } + pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) { let (db, _, position) = MockDatabase::from_fixture(fixture); let position = position.expect("expected a marker ( <|> )"); diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 087385b98..66b204dcd 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -6,6 +6,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; use std::sync::Arc; use std::collections::HashMap; +use std::fmt; use ra_db::LocalSyntaxPtr; use ra_syntax::{ @@ -184,11 +185,40 @@ impl Ty { } } +impl fmt::Display for Ty { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Ty::Bool => write!(f, "bool"), + Ty::Char => write!(f, "char"), + Ty::Int(t) => write!(f, "{}", t.ty_to_string()), + Ty::Uint(t) => write!(f, "{}", t.ty_to_string()), + Ty::Float(t) => write!(f, "{}", t.ty_to_string()), + Ty::Str => write!(f, "str"), + Ty::Slice(t) => write!(f, "[{}]", t), + Ty::Never => write!(f, "!"), + Ty::Tuple(ts) => { + write!(f, "(")?; + for t in ts { + write!(f, "{},", t)?; + } + write!(f, ")") + } + Ty::Unknown => write!(f, "[unknown]") + } + } +} + #[derive(Clone, PartialEq, Eq, Debug)] pub struct InferenceResult { type_for: FxHashMap, } +impl InferenceResult { + pub fn type_of_node(&self, node: SyntaxNodeRef) -> Option { + self.type_for.get(&LocalSyntaxPtr::new(node)).cloned() + } +} + #[derive(Clone, PartialEq, Eq, Debug)] pub struct InferenceContext { scopes: Arc, diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index f2466dd51..98eedaa3f 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs @@ -1,8 +1,11 @@ +use std::fmt::Write; use std::sync::Arc; +use std::path::{Path, PathBuf}; use salsa::Database; use ra_db::{FilesDatabase, CrateGraph, SyntaxDatabase}; use ra_syntax::{SmolStr, algo::visit::{visitor, Visitor}, ast::{self, AstNode}}; +use test_utils::{project_dir, dir_tests}; use relative_path::RelativePath; use crate::{source_binder, mock::WORKSPACE, module::ModuleSourceNode}; @@ -13,33 +16,46 @@ use crate::{ mock::MockDatabase, }; -fn infer_all_fns(fixture: &str) -> () { - let (db, source_root) = MockDatabase::with_files(fixture); - for &file_id in source_root.files.values() { - let source_file = db.source_file(file_id); - for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) { - let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap().unwrap(); - let inference_result = func.infer(&db); - for (syntax_ptr, ty) in &inference_result.type_for { - let node = syntax_ptr.resolve(&source_file); - eprintln!("{} '{}': {:?}", syntax_ptr.range(), node.text(), ty); - } +fn infer_file(content: &str) -> String { + let (db, source_root, file_id) = MockDatabase::with_single_file(content); + let source_file = db.source_file(file_id); + let mut acc = String::new(); + for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) { + let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap().unwrap(); + let inference_result = func.infer(&db); + for (syntax_ptr, ty) in &inference_result.type_for { + let node = syntax_ptr.resolve(&source_file); + write!(acc, "{} '{}': {}\n", syntax_ptr.range(), ellipsize(node.text().to_string().replace("\n", " "), 15), ty); } } + acc +} + +fn ellipsize(mut text: String, max_len: usize) -> String { + if text.len() <= max_len { + return text; + } + let ellipsis = "..."; + let e_len = ellipsis.len(); + let mut prefix_len = (max_len - e_len) / 2; + while !text.is_char_boundary(prefix_len) { + prefix_len += 1; + } + let mut suffix_len = max_len - e_len - prefix_len; + while !text.is_char_boundary(text.len() - suffix_len) { + suffix_len += 1; + } + text.replace_range(prefix_len..text.len() - suffix_len, ellipsis); + text } #[test] -fn infer_smoke_test() { - let text = " - //- /lib.rs - fn foo(x: u32, y: !) -> i128 { - x; - y; - return 1; - \"hello\"; - 0 - } - "; +pub fn infer_tests() { + dir_tests(&test_data_dir(), &["."], |text, _path| { + infer_file(text) + }); +} - infer_all_fns(text); +fn test_data_dir() -> PathBuf { + project_dir().join("crates/ra_hir/src/ty/tests/data") } diff --git a/crates/ra_hir/src/ty/tests/data/0001_basics.rs b/crates/ra_hir/src/ty/tests/data/0001_basics.rs new file mode 100644 index 000000000..59a60d031 --- /dev/null +++ b/crates/ra_hir/src/ty/tests/data/0001_basics.rs @@ -0,0 +1,11 @@ + +fn test(a: u32, b: isize, c: !, d: &str) { + a; + b; + c; + d; + 1usize; + 1isize; + "test"; + 1.0f32; +} diff --git a/crates/ra_hir/src/ty/tests/data/0001_basics.txt b/crates/ra_hir/src/ty/tests/data/0001_basics.txt new file mode 100644 index 000000000..0c46f243a --- /dev/null +++ b/crates/ra_hir/src/ty/tests/data/0001_basics.txt @@ -0,0 +1,13 @@ +[33; 34) 'd': [unknown] +[88; 94) '1isize': [unknown] +[48; 49) 'a': u32 +[55; 56) 'b': isize +[112; 118) '1.0f32': [unknown] +[76; 82) '1usize': [unknown] +[9; 10) 'a': u32 +[27; 28) 'c': ! +[62; 63) 'c': ! +[17; 18) 'b': isize +[100; 106) '"test"': [unknown] +[42; 121) '{ ...f32; }': () +[69; 70) 'd': [unknown] diff --git a/crates/ra_syntax/tests/test.rs b/crates/ra_syntax/tests/test.rs index 4266864bd..9d94a1a23 100644 --- a/crates/ra_syntax/tests/test.rs +++ b/crates/ra_syntax/tests/test.rs @@ -9,6 +9,7 @@ use std::{ path::{Path, PathBuf, Component}, }; +use test_utils::{project_dir, dir_tests, read_text, collect_tests}; use ra_syntax::{ utils::{check_fuzz_invariants, dump_tree}, SourceFileNode, @@ -16,7 +17,7 @@ use ra_syntax::{ #[test] fn lexer_tests() { - dir_tests(&["lexer"], |text, _| { + dir_tests(&test_data_dir(), &["lexer"], |text, _| { let tokens = ra_syntax::tokenize(text); dump_tokens(&tokens, text) }) @@ -24,7 +25,7 @@ fn lexer_tests() { #[test] fn parser_tests() { - dir_tests(&["parser/inline/ok", "parser/ok"], |text, path| { + dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { let file = SourceFileNode::parse(text); let errors = file.errors(); assert_eq!( @@ -35,7 +36,7 @@ fn parser_tests() { ); dump_tree(file.syntax()) }); - dir_tests(&["parser/err", "parser/inline/err"], |text, path| { + dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { let file = SourceFileNode::parse(text); let errors = file.errors(); assert_ne!( @@ -50,7 +51,7 @@ fn parser_tests() { #[test] fn parser_fuzz_tests() { - for (_, text) in collect_tests(&["parser/fuzz-failures"]) { + for (_, text) in collect_tests(&test_data_dir(), &["parser/fuzz-failures"]) { check_fuzz_invariants(&text) } } @@ -92,102 +93,6 @@ fn self_hosting_parsing() { "self_hosting_parsing found too few files - is it running in the right directory?" ) } -/// Read file and normalize newlines. -/// -/// `rustc` seems to always normalize `\r\n` newlines to `\n`: -/// -/// ``` -/// let s = " -/// "; -/// assert_eq!(s.as_bytes(), &[10]); -/// ``` -/// -/// so this should always be correct. -fn read_text(path: &Path) -> String { - fs::read_to_string(path) - .expect(&format!("File at {:?} should be valid", path)) - .replace("\r\n", "\n") -} - -fn dir_tests(paths: &[&str], f: F) -where - F: Fn(&str, &Path) -> String, -{ - for (path, input_code) in collect_tests(paths) { - let parse_tree = f(&input_code, &path); - let path = path.with_extension("txt"); - if !path.exists() { - println!("\nfile: {}", path.display()); - println!("No .txt file with expected result, creating...\n"); - println!("{}\n{}", input_code, parse_tree); - fs::write(&path, &parse_tree).unwrap(); - panic!("No expected result") - } - let expected = read_text(&path); - let expected = expected.as_str(); - let parse_tree = parse_tree.as_str(); - assert_equal_text(expected, parse_tree, &path); - } -} - -const REWRITE: bool = false; - -fn assert_equal_text(expected: &str, actual: &str, path: &Path) { - if expected == actual { - return; - } - let dir = project_dir(); - let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path); - if expected.trim() == actual.trim() { - println!("whitespace difference, rewriting"); - println!("file: {}\n", pretty_path.display()); - fs::write(path, actual).unwrap(); - return; - } - if REWRITE { - println!("rewriting {}", pretty_path.display()); - fs::write(path, actual).unwrap(); - return; - } - assert_eq_text!(expected, actual, "file: {}", pretty_path.display()); -} - -fn collect_tests(paths: &[&str]) -> Vec<(PathBuf, String)> { - paths - .iter() - .flat_map(|path| { - let path = test_data_dir().join(path); - test_from_dir(&path).into_iter() - }) - .map(|path| { - let text = read_text(&path); - (path, text) - }) - .collect() -} - -fn test_from_dir(dir: &Path) -> Vec { - let mut acc = Vec::new(); - for file in fs::read_dir(&dir).unwrap() { - let file = file.unwrap(); - let path = file.path(); - if path.extension().unwrap_or_default() == "rs" { - acc.push(path); - } - } - acc.sort(); - acc -} - -fn project_dir() -> PathBuf { - let dir = env!("CARGO_MANIFEST_DIR"); - PathBuf::from(dir) - .parent() - .unwrap() - .parent() - .unwrap() - .to_owned() -} fn test_data_dir() -> PathBuf { project_dir().join("crates/ra_syntax/tests/data") diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index beb936c61..012b1d0b4 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs @@ -1,4 +1,6 @@ use std::fmt; +use std::fs; +use std::path::{Path, PathBuf}; use itertools::Itertools; use text_unit::{TextRange, TextUnit}; @@ -262,3 +264,100 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a _ => Some((expected, actual)), } } + +pub fn dir_tests(test_data_dir: &Path, paths: &[&str], f: F) +where + F: Fn(&str, &Path) -> String, +{ + for (path, input_code) in collect_tests(test_data_dir, paths) { + let parse_tree = f(&input_code, &path); + let path = path.with_extension("txt"); + if !path.exists() { + println!("\nfile: {}", path.display()); + println!("No .txt file with expected result, creating...\n"); + println!("{}\n{}", input_code, parse_tree); + fs::write(&path, &parse_tree).unwrap(); + panic!("No expected result") + } + let expected = read_text(&path); + let expected = expected.as_str(); + let parse_tree = parse_tree.as_str(); + assert_equal_text(expected, parse_tree, &path); + } +} + +pub fn collect_tests(test_data_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> { + paths + .iter() + .flat_map(|path| { + let path = test_data_dir.to_owned().join(path); + test_from_dir(&path).into_iter() + }) + .map(|path| { + let text = read_text(&path); + (path, text) + }) + .collect() +} + +fn test_from_dir(dir: &Path) -> Vec { + let mut acc = Vec::new(); + for file in fs::read_dir(&dir).unwrap() { + let file = file.unwrap(); + let path = file.path(); + if path.extension().unwrap_or_default() == "rs" { + acc.push(path); + } + } + acc.sort(); + acc +} + +pub fn project_dir() -> PathBuf { + let dir = env!("CARGO_MANIFEST_DIR"); + PathBuf::from(dir) + .parent() + .unwrap() + .parent() + .unwrap() + .to_owned() +} + +/// Read file and normalize newlines. +/// +/// `rustc` seems to always normalize `\r\n` newlines to `\n`: +/// +/// ``` +/// let s = " +/// "; +/// assert_eq!(s.as_bytes(), &[10]); +/// ``` +/// +/// so this should always be correct. +pub fn read_text(path: &Path) -> String { + fs::read_to_string(path) + .expect(&format!("File at {:?} should be valid", path)) + .replace("\r\n", "\n") +} + +const REWRITE: bool = false; + +fn assert_equal_text(expected: &str, actual: &str, path: &Path) { + if expected == actual { + return; + } + let dir = project_dir(); + let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path); + if expected.trim() == actual.trim() { + println!("whitespace difference, rewriting"); + println!("file: {}\n", pretty_path.display()); + fs::write(path, actual).unwrap(); + return; + } + if REWRITE { + println!("rewriting {}", pretty_path.display()); + fs::write(path, actual).unwrap(); + return; + } + assert_eq_text!(expected, actual, "file: {}", pretty_path.display()); +} -- cgit v1.2.3 From 515c3bc59bfc227cbbb82f80b53c5c125be4fc30 Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Sun, 23 Dec 2018 12:15:46 +0100 Subject: Cleanup --- crates/ra_analysis/src/imp.rs | 10 +++- crates/ra_hir/src/ty.rs | 113 ++++++++++++++++++----------------------- crates/ra_hir/src/ty/tests.rs | 39 +++++++------- crates/ra_syntax/tests/test.rs | 56 +++++++++++--------- 4 files changed, 110 insertions(+), 108 deletions(-) (limited to 'crates') diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs index 4e0631679..f2912d235 100644 --- a/crates/ra_analysis/src/imp.rs +++ b/crates/ra_analysis/src/imp.rs @@ -516,8 +516,14 @@ impl AnalysisImpl { let syntax = file.syntax(); let node = find_covering_node(syntax, range); let parent_fn = node.ancestors().filter_map(FnDef::cast).next(); - let parent_fn = if let Some(p) = parent_fn { p } else { return Ok(None) }; - let function = ctry!(source_binder::function_from_source(&*self.db, file_id, parent_fn)?); + let parent_fn = if let Some(p) = parent_fn { + p + } else { + return Ok(None); + }; + let function = ctry!(source_binder::function_from_source( + &*self.db, file_id, parent_fn + )?); let infer = function.infer(&*self.db); Ok(infer.type_of_node(node).map(|t| t.to_string())) } diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 66b204dcd..88bce1960 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -2,24 +2,21 @@ mod primitive; #[cfg(test)] mod tests; -use rustc_hash::{FxHashMap, FxHashSet}; - use std::sync::Arc; -use std::collections::HashMap; use std::fmt; +use rustc_hash::{FxHashMap}; + use ra_db::LocalSyntaxPtr; use ra_syntax::{ - TextRange, TextUnit, SmolStr, - algo::visit::{visitor, Visitor}, - ast::{self, AstNode, DocCommentsOwner, NameOwner, LoopBodyOwner, ArgListOwner}, + SmolStr, + ast::{self, AstNode, LoopBodyOwner, ArgListOwner}, SyntaxNodeRef }; use crate::{ FnScopes, db::HirDatabase, - arena::{Arena, Id}, }; // pub(crate) type TypeId = Id; @@ -150,9 +147,17 @@ impl Ty { TupleType(_inner) => Ty::Unknown, // TODO NeverType(..) => Ty::Never, PathType(inner) => { - let path = if let Some(p) = inner.path() { p } else { return Ty::Unknown }; + let path = if let Some(p) = inner.path() { + p + } else { + return Ty::Unknown; + }; if path.qualifier().is_none() { - let name = path.segment().and_then(|s| s.name_ref()).map(|n| n.text()).unwrap_or(SmolStr::new("")); + let name = path + .segment() + .and_then(|s| s.name_ref()) + .map(|n| n.text()) + .unwrap_or(SmolStr::new("")); if let Some(int_ty) = primitive::IntTy::from_string(&name) { Ty::Int(int_ty) } else if let Some(uint_ty) = primitive::UintTy::from_string(&name) { @@ -167,16 +172,16 @@ impl Ty { // TODO Ty::Unknown } - }, - PointerType(_inner) => Ty::Unknown, // TODO - ArrayType(_inner) => Ty::Unknown, // TODO - SliceType(_inner) => Ty::Unknown, // TODO - ReferenceType(_inner) => Ty::Unknown, // TODO + } + PointerType(_inner) => Ty::Unknown, // TODO + ArrayType(_inner) => Ty::Unknown, // TODO + SliceType(_inner) => Ty::Unknown, // TODO + ReferenceType(_inner) => Ty::Unknown, // TODO PlaceholderType(_inner) => Ty::Unknown, // TODO - FnPointerType(_inner) => Ty::Unknown, // TODO - ForType(_inner) => Ty::Unknown, // TODO - ImplTraitType(_inner) => Ty::Unknown, // TODO - DynTraitType(_inner) => Ty::Unknown, // TODO + FnPointerType(_inner) => Ty::Unknown, // TODO + ForType(_inner) => Ty::Unknown, // TODO + ImplTraitType(_inner) => Ty::Unknown, // TODO + DynTraitType(_inner) => Ty::Unknown, // TODO } } @@ -203,7 +208,7 @@ impl fmt::Display for Ty { } write!(f, ")") } - Ty::Unknown => write!(f, "[unknown]") + Ty::Unknown => write!(f, "[unknown]"), } } } @@ -230,7 +235,7 @@ impl InferenceContext { fn new(scopes: Arc) -> Self { InferenceContext { type_for: FxHashMap::default(), - scopes + scopes, } } @@ -238,7 +243,7 @@ impl InferenceContext { self.type_for.insert(LocalSyntaxPtr::new(node), ty); } - fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool { + fn unify(&mut self, _ty1: &Ty, _ty2: &Ty) -> bool { unimplemented!() } @@ -303,7 +308,7 @@ impl InferenceContext { if let Some(expr) = e.iterable() { self.infer_expr(expr); } - if let Some(pat) = e.pat() { + if let Some(_pat) = e.pat() { // TODO write type for pat } if let Some(block) = e.loop_body() { @@ -313,7 +318,7 @@ impl InferenceContext { Ty::Unknown } ast::Expr::LambdaExpr(e) => { - let body_ty = if let Some(body) = e.body() { + let _body_ty = if let Some(body) = e.body() { self.infer_expr(body) } else { Ty::Unknown @@ -339,7 +344,7 @@ impl InferenceContext { Ty::Unknown } ast::Expr::MatchExpr(e) => { - let ty = if let Some(match_expr) = e.expr() { + let _ty = if let Some(match_expr) = e.expr() { self.infer_expr(match_expr) } else { Ty::Unknown @@ -348,7 +353,7 @@ impl InferenceContext { for arm in match_arm_list.arms() { // TODO type the bindings in pat // TODO type the guard - let ty = if let Some(e) = arm.expr() { + let _ty = if let Some(e) = arm.expr() { self.infer_expr(e) } else { Ty::Unknown @@ -360,12 +365,8 @@ impl InferenceContext { Ty::Unknown } } - ast::Expr::TupleExpr(e) => { - Ty::Unknown - } - ast::Expr::ArrayExpr(e) => { - Ty::Unknown - } + ast::Expr::TupleExpr(_e) => Ty::Unknown, + ast::Expr::ArrayExpr(_e) => Ty::Unknown, ast::Expr::PathExpr(e) => { if let Some(p) = e.path() { if p.qualifier().is_none() { @@ -392,12 +393,8 @@ impl InferenceContext { Ty::Unknown } } - ast::Expr::ContinueExpr(e) => { - Ty::Never - } - ast::Expr::BreakExpr(e) => { - Ty::Never - } + ast::Expr::ContinueExpr(_e) => Ty::Never, + ast::Expr::BreakExpr(_e) => Ty::Never, ast::Expr::ParenExpr(e) => { if let Some(e) = e.expr() { self.infer_expr(e) @@ -405,9 +402,7 @@ impl InferenceContext { Ty::Unknown } } - ast::Expr::Label(e) => { - Ty::Unknown - } + ast::Expr::Label(_e) => Ty::Unknown, ast::Expr::ReturnExpr(e) => { if let Some(e) = e.expr() { // TODO unify with return type @@ -419,21 +414,15 @@ impl InferenceContext { // Can this even occur outside of a match expression? Ty::Unknown } - ast::Expr::StructLit(e) => { - Ty::Unknown - } + ast::Expr::StructLit(_e) => Ty::Unknown, ast::Expr::NamedFieldList(_) | ast::Expr::NamedField(_) => { // Can this even occur outside of a struct literal? Ty::Unknown } - ast::Expr::IndexExpr(e) => { - Ty::Unknown - } - ast::Expr::FieldExpr(e) => { - Ty::Unknown - } + ast::Expr::IndexExpr(_e) => Ty::Unknown, + ast::Expr::FieldExpr(_e) => Ty::Unknown, ast::Expr::TryExpr(e) => { - let inner_ty = if let Some(e) = e.expr() { + let _inner_ty = if let Some(e) = e.expr() { self.infer_expr(e) } else { Ty::Unknown @@ -441,7 +430,7 @@ impl InferenceContext { Ty::Unknown } ast::Expr::CastExpr(e) => { - let inner_ty = if let Some(e) = e.expr() { + let _inner_ty = if let Some(e) = e.expr() { self.infer_expr(e) } else { Ty::Unknown @@ -451,7 +440,7 @@ impl InferenceContext { cast_ty } ast::Expr::RefExpr(e) => { - let inner_ty = if let Some(e) = e.expr() { + let _inner_ty = if let Some(e) = e.expr() { self.infer_expr(e) } else { Ty::Unknown @@ -459,22 +448,16 @@ impl InferenceContext { Ty::Unknown } ast::Expr::PrefixExpr(e) => { - let inner_ty = if let Some(e) = e.expr() { + let _inner_ty = if let Some(e) = e.expr() { self.infer_expr(e) } else { Ty::Unknown }; Ty::Unknown } - ast::Expr::RangeExpr(e) => { - Ty::Unknown - } - ast::Expr::BinExpr(e) => { - Ty::Unknown - } - ast::Expr::Literal(e) => { - Ty::Unknown - } + ast::Expr::RangeExpr(_e) => Ty::Unknown, + ast::Expr::BinExpr(_e) => Ty::Unknown, + ast::Expr::Literal(_e) => Ty::Unknown, }; self.write_ty(expr.syntax(), ty.clone()); ty @@ -505,7 +488,7 @@ impl InferenceContext { } } -pub fn infer(db: &impl HirDatabase, node: ast::FnDef, scopes: Arc) -> InferenceResult { +pub fn infer(_db: &impl HirDatabase, node: ast::FnDef, scopes: Arc) -> InferenceResult { let mut ctx = InferenceContext::new(scopes); for param in node.param_list().unwrap().params() { @@ -522,5 +505,7 @@ pub fn infer(db: &impl HirDatabase, node: ast::FnDef, scopes: Arc) -> // TODO 'resolve' the types: replace inference variables by their inferred results - InferenceResult { type_for: ctx.type_for } + InferenceResult { + type_for: ctx.type_for, + } } diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 98eedaa3f..0880b51bc 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs @@ -1,31 +1,38 @@ use std::fmt::Write; -use std::sync::Arc; -use std::path::{Path, PathBuf}; +use std::path::{PathBuf}; -use salsa::Database; -use ra_db::{FilesDatabase, CrateGraph, SyntaxDatabase}; -use ra_syntax::{SmolStr, algo::visit::{visitor, Visitor}, ast::{self, AstNode}}; +use ra_db::{SyntaxDatabase}; +use ra_syntax::ast::{self, AstNode}; use test_utils::{project_dir, dir_tests}; -use relative_path::RelativePath; - -use crate::{source_binder, mock::WORKSPACE, module::ModuleSourceNode}; use crate::{ - self as hir, - db::HirDatabase, + source_binder, mock::MockDatabase, }; fn infer_file(content: &str) -> String { - let (db, source_root, file_id) = MockDatabase::with_single_file(content); + let (db, _, file_id) = MockDatabase::with_single_file(content); let source_file = db.source_file(file_id); let mut acc = String::new(); - for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) { - let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap().unwrap(); + for fn_def in source_file + .syntax() + .descendants() + .filter_map(ast::FnDef::cast) + { + let func = source_binder::function_from_source(&db, file_id, fn_def) + .unwrap() + .unwrap(); let inference_result = func.infer(&db); for (syntax_ptr, ty) in &inference_result.type_for { let node = syntax_ptr.resolve(&source_file); - write!(acc, "{} '{}': {}\n", syntax_ptr.range(), ellipsize(node.text().to_string().replace("\n", " "), 15), ty); + write!( + acc, + "{} '{}': {}\n", + syntax_ptr.range(), + ellipsize(node.text().to_string().replace("\n", " "), 15), + ty + ) + .unwrap(); } } acc @@ -51,9 +58,7 @@ fn ellipsize(mut text: String, max_len: usize) -> String { #[test] pub fn infer_tests() { - dir_tests(&test_data_dir(), &["."], |text, _path| { - infer_file(text) - }); + dir_tests(&test_data_dir(), &["."], |text, _path| infer_file(text)); } fn test_data_dir() -> PathBuf { diff --git a/crates/ra_syntax/tests/test.rs b/crates/ra_syntax/tests/test.rs index 9d94a1a23..2235dc401 100644 --- a/crates/ra_syntax/tests/test.rs +++ b/crates/ra_syntax/tests/test.rs @@ -1,12 +1,10 @@ extern crate ra_syntax; -#[macro_use] extern crate test_utils; extern crate walkdir; use std::{ fmt::Write, - fs, - path::{Path, PathBuf, Component}, + path::{PathBuf, Component}, }; use test_utils::{project_dir, dir_tests, read_text, collect_tests}; @@ -25,28 +23,36 @@ fn lexer_tests() { #[test] fn parser_tests() { - dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { - let file = SourceFileNode::parse(text); - let errors = file.errors(); - assert_eq!( - &*errors, - &[] as &[ra_syntax::SyntaxError], - "There should be no errors in the file {:?}", - path.display() - ); - dump_tree(file.syntax()) - }); - dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { - let file = SourceFileNode::parse(text); - let errors = file.errors(); - assert_ne!( - &*errors, - &[] as &[ra_syntax::SyntaxError], - "There should be errors in the file {:?}", - path.display() - ); - dump_tree(file.syntax()) - }); + dir_tests( + &test_data_dir(), + &["parser/inline/ok", "parser/ok"], + |text, path| { + let file = SourceFileNode::parse(text); + let errors = file.errors(); + assert_eq!( + &*errors, + &[] as &[ra_syntax::SyntaxError], + "There should be no errors in the file {:?}", + path.display() + ); + dump_tree(file.syntax()) + }, + ); + dir_tests( + &test_data_dir(), + &["parser/err", "parser/inline/err"], + |text, path| { + let file = SourceFileNode::parse(text); + let errors = file.errors(); + assert_ne!( + &*errors, + &[] as &[ra_syntax::SyntaxError], + "There should be errors in the file {:?}", + path.display() + ); + dump_tree(file.syntax()) + }, + ); } #[test] -- cgit v1.2.3 From b4139d54fc68240aa8e4e221841298115d8fb00f Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Sun, 23 Dec 2018 12:54:53 +0100 Subject: Get rid of the terrible nesting in PathExpr inference --- crates/ra_hir/src/ty.rs | 40 ++++++++++++++-------------------------- 1 file changed, 14 insertions(+), 26 deletions(-) (limited to 'crates') diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 88bce1960..1d5473d17 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -247,6 +247,19 @@ impl InferenceContext { unimplemented!() } + fn infer_path_expr(&mut self, expr: ast::PathExpr) -> Option { + let p = expr.path()?; + if p.qualifier().is_none() { + let name = p.segment().and_then(|s| s.name_ref())?; + let scope_entry = self.scopes.resolve_local_name(name)?; + let ty = self.type_for.get(&scope_entry.ptr())?; + Some(ty.clone()) + } else { + // TODO resolve path + Some(Ty::Unknown) + } + } + fn infer_expr(&mut self, expr: ast::Expr) -> Ty { let ty = match expr { ast::Expr::IfExpr(e) => { @@ -367,32 +380,7 @@ impl InferenceContext { } ast::Expr::TupleExpr(_e) => Ty::Unknown, ast::Expr::ArrayExpr(_e) => Ty::Unknown, - ast::Expr::PathExpr(e) => { - if let Some(p) = e.path() { - if p.qualifier().is_none() { - if let Some(name) = p.segment().and_then(|s| s.name_ref()) { - let s = self.scopes.resolve_local_name(name); - if let Some(scope_entry) = s { - if let Some(ty) = self.type_for.get(&scope_entry.ptr()) { - ty.clone() - } else { - // TODO introduce type variable? - Ty::Unknown - } - } else { - Ty::Unknown - } - } else { - Ty::Unknown - } - } else { - // TODO resolve path - Ty::Unknown - } - } else { - Ty::Unknown - } - } + ast::Expr::PathExpr(e) => self.infer_path_expr(e).unwrap_or(Ty::Unknown), ast::Expr::ContinueExpr(_e) => Ty::Never, ast::Expr::BreakExpr(_e) => Ty::Never, ast::Expr::ParenExpr(e) => { -- cgit v1.2.3 From 5d609370900402db84beb3d7e28487256c6042c2 Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Sun, 23 Dec 2018 12:59:38 +0100 Subject: Remove unwraps --- crates/ra_hir/src/ty.rs | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) (limited to 'crates') diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 1d5473d17..eb5fea153 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -479,17 +479,30 @@ impl InferenceContext { pub fn infer(_db: &impl HirDatabase, node: ast::FnDef, scopes: Arc) -> InferenceResult { let mut ctx = InferenceContext::new(scopes); - for param in node.param_list().unwrap().params() { - let pat = param.pat().unwrap(); - let type_ref = param.type_ref().unwrap(); - let ty = Ty::new(type_ref); - ctx.type_for.insert(LocalSyntaxPtr::new(pat.syntax()), ty); + if let Some(param_list) = node.param_list() { + for param in param_list.params() { + let pat = if let Some(pat) = param.pat() { + pat + } else { + continue; + }; + if let Some(type_ref) = param.type_ref() { + let ty = Ty::new(type_ref); + ctx.type_for.insert(LocalSyntaxPtr::new(pat.syntax()), ty); + } else { + // TODO self param + ctx.type_for + .insert(LocalSyntaxPtr::new(pat.syntax()), Ty::Unknown); + }; + } } // TODO get Ty for node.ret_type() and pass that to infer_block as expectation // (see Expectation in rustc_typeck) - ctx.infer_block(node.body().unwrap()); + if let Some(block) = node.body() { + ctx.infer_block(block); + } // TODO 'resolve' the types: replace inference variables by their inferred results -- cgit v1.2.3 From 93ffbf80c632a7d38fc8bbdf6357bfd26a96a35a Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Sun, 23 Dec 2018 13:22:29 +0100 Subject: Make let statements kind of work --- crates/ra_hir/src/ty.rs | 44 +++++++++++++++++++++++----- crates/ra_hir/src/ty/tests/data/0002_let.rs | 5 ++++ crates/ra_hir/src/ty/tests/data/0002_let.txt | 7 +++++ crates/ra_syntax/src/ast/generated.rs | 4 +++ crates/ra_syntax/src/grammar.ron | 1 + 5 files changed, 53 insertions(+), 8 deletions(-) create mode 100644 crates/ra_hir/src/ty/tests/data/0002_let.rs create mode 100644 crates/ra_hir/src/ty/tests/data/0002_let.txt (limited to 'crates') diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index eb5fea153..615a1caed 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -243,8 +243,23 @@ impl InferenceContext { self.type_for.insert(LocalSyntaxPtr::new(node), ty); } - fn unify(&mut self, _ty1: &Ty, _ty2: &Ty) -> bool { - unimplemented!() + fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> Option { + if *ty1 == Ty::Unknown { + return Some(ty2.clone()); + } + if *ty2 == Ty::Unknown { + return Some(ty1.clone()); + } + if ty1 == ty2 { + return Some(ty1.clone()); + } + // TODO implement actual unification + return None; + } + + fn unify_with_coercion(&mut self, ty1: &Ty, ty2: &Ty) -> Option { + // TODO implement coercion + self.unify(ty1, ty2) } fn infer_path_expr(&mut self, expr: ast::PathExpr) -> Option { @@ -280,9 +295,8 @@ impl InferenceContext { } else { Ty::Unknown }; - if self.unify(&if_ty, &else_ty) { - // TODO actually, need to take the 'more specific' type (not unknown, never, ...) - if_ty + if let Some(ty) = self.unify(&if_ty, &else_ty) { + ty } else { // TODO report diagnostic Ty::Unknown @@ -455,9 +469,23 @@ impl InferenceContext { for stmt in node.statements() { match stmt { ast::Stmt::LetStmt(stmt) => { - if let Some(expr) = stmt.initializer() { - self.infer_expr(expr); - } + let decl_ty = if let Some(type_ref) = stmt.type_ref() { + Ty::new(type_ref) + } else { + Ty::Unknown + }; + let ty = if let Some(expr) = stmt.initializer() { + // TODO pass expectation + let expr_ty = self.infer_expr(expr); + self.unify_with_coercion(&expr_ty, &decl_ty) + .unwrap_or(decl_ty) + } else { + decl_ty + }; + + if let Some(pat) = stmt.pat() { + self.write_ty(pat.syntax(), ty); + }; } ast::Stmt::ExprStmt(expr_stmt) => { if let Some(expr) = expr_stmt.expr() { diff --git a/crates/ra_hir/src/ty/tests/data/0002_let.rs b/crates/ra_hir/src/ty/tests/data/0002_let.rs new file mode 100644 index 000000000..5641da75b --- /dev/null +++ b/crates/ra_hir/src/ty/tests/data/0002_let.rs @@ -0,0 +1,5 @@ +fn test() { + let a = 1isize; + let b: usize = 1; + let c = b; +} diff --git a/crates/ra_hir/src/ty/tests/data/0002_let.txt b/crates/ra_hir/src/ty/tests/data/0002_let.txt new file mode 100644 index 000000000..5f515ee59 --- /dev/null +++ b/crates/ra_hir/src/ty/tests/data/0002_let.txt @@ -0,0 +1,7 @@ +[51; 52) '1': [unknown] +[10; 70) '{ ...= b; }': () +[24; 30) '1isize': [unknown] +[20; 21) 'a': [unknown] +[62; 63) 'c': usize +[66; 67) 'b': usize +[40; 41) 'b': usize diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs index 74bf4d3cc..b15c4ef6f 100644 --- a/crates/ra_syntax/src/ast/generated.rs +++ b/crates/ra_syntax/src/ast/generated.rs @@ -1561,6 +1561,10 @@ impl<'a> LetStmt<'a> { super::child_opt(self) } + pub fn type_ref(self) -> Option> { + super::child_opt(self) + } + pub fn initializer(self) -> Option> { super::child_opt(self) } diff --git a/crates/ra_syntax/src/grammar.ron b/crates/ra_syntax/src/grammar.ron index 29b84854a..8dca493ee 100644 --- a/crates/ra_syntax/src/grammar.ron +++ b/crates/ra_syntax/src/grammar.ron @@ -499,6 +499,7 @@ Grammar( ), "LetStmt": ( options: [ ["pat", "Pat"], + ["type_ref", "TypeRef"], ["initializer", "Expr"], ]), "Condition": ( -- cgit v1.2.3 From ef67581104eb00a0c199f0b2a3b558da8a6f90a2 Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Sun, 23 Dec 2018 17:13:11 +0100 Subject: Resolve paths to defs (functions currently) during type inference --- crates/ra_analysis/src/db.rs | 1 + crates/ra_analysis/src/imp.rs | 2 +- crates/ra_hir/Cargo.toml | 3 + crates/ra_hir/src/db.rs | 9 +- crates/ra_hir/src/function.rs | 16 +- crates/ra_hir/src/lib.rs | 19 ++- crates/ra_hir/src/mock.rs | 1 + crates/ra_hir/src/module.rs | 1 + crates/ra_hir/src/module/nameres.rs | 4 +- crates/ra_hir/src/query_definitions.rs | 16 +- crates/ra_hir/src/ty.rs | 195 ++++++++++++++++--------- crates/ra_hir/src/ty/tests.rs | 7 +- crates/ra_hir/src/ty/tests/data/0003_paths.rs | 10 ++ crates/ra_hir/src/ty/tests/data/0003_paths.txt | 9 ++ crates/ra_syntax/src/ast/generated.rs | 6 +- crates/ra_syntax/src/grammar.ron | 2 +- 16 files changed, 214 insertions(+), 87 deletions(-) create mode 100644 crates/ra_hir/src/ty/tests/data/0003_paths.rs create mode 100644 crates/ra_hir/src/ty/tests/data/0003_paths.txt (limited to 'crates') diff --git a/crates/ra_analysis/src/db.rs b/crates/ra_analysis/src/db.rs index f26c49887..780a84291 100644 --- a/crates/ra_analysis/src/db.rs +++ b/crates/ra_analysis/src/db.rs @@ -94,6 +94,7 @@ salsa::database_storage! { fn fn_syntax() for hir::db::FnSyntaxQuery; fn submodules() for hir::db::SubmodulesQuery; fn infer() for hir::db::InferQuery; + fn type_for_def() for hir::db::TypeForDefQuery; } } } diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs index f2912d235..40996bfd7 100644 --- a/crates/ra_analysis/src/imp.rs +++ b/crates/ra_analysis/src/imp.rs @@ -524,7 +524,7 @@ impl AnalysisImpl { let function = ctry!(source_binder::function_from_source( &*self.db, file_id, parent_fn )?); - let infer = function.infer(&*self.db); + let infer = function.infer(&*self.db)?; Ok(infer.type_of_node(node).map(|t| t.to_string())) } diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 61650cee9..594176337 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml @@ -16,3 +16,6 @@ ra_syntax = { path = "../ra_syntax" } ra_editor = { path = "../ra_editor" } ra_db = { path = "../ra_db" } test_utils = { path = "../test_utils" } + +[dev-dependencies] +flexi_logger = "0.10.0" diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index f0bff3c02..d94f75857 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs @@ -14,7 +14,7 @@ use crate::{ function::FnId, module::{ModuleId, ModuleTree, ModuleSource, nameres::{ItemMap, InputModuleItems}}, - ty::InferenceResult, + ty::{InferenceResult, Ty}, }; salsa::query_group! { @@ -31,11 +31,16 @@ pub trait HirDatabase: SyntaxDatabase use fn query_definitions::fn_syntax; } - fn infer(fn_id: FnId) -> Arc { + fn infer(fn_id: FnId) -> Cancelable> { type InferQuery; use fn query_definitions::infer; } + fn type_for_def(def_id: DefId) -> Cancelable { + type TypeForDefQuery; + use fn query_definitions::type_for_def; + } + fn file_items(file_id: FileId) -> Arc { type SourceFileItemsQuery; use fn query_definitions::file_items; diff --git a/crates/ra_hir/src/function.rs b/crates/ra_hir/src/function.rs index 360e9e9a0..d36477b48 100644 --- a/crates/ra_hir/src/function.rs +++ b/crates/ra_hir/src/function.rs @@ -5,12 +5,13 @@ use std::{ sync::Arc, }; +use ra_db::Cancelable; use ra_syntax::{ TextRange, TextUnit, ast::{self, AstNode, DocCommentsOwner, NameOwner}, }; -use crate::{ DefId, HirDatabase, ty::InferenceResult }; +use crate::{ DefId, HirDatabase, ty::InferenceResult, Module }; pub use self::scope::FnScopes; @@ -18,7 +19,7 @@ pub use self::scope::FnScopes; pub struct FnId(pub(crate) DefId); pub struct Function { - fn_id: FnId, + pub(crate) fn_id: FnId, } impl Function { @@ -27,6 +28,10 @@ impl Function { Function { fn_id } } + pub fn syntax(&self, db: &impl HirDatabase) -> ast::FnDefNode { + db.fn_syntax(self.fn_id) + } + pub fn scopes(&self, db: &impl HirDatabase) -> Arc { db.fn_scopes(self.fn_id) } @@ -36,9 +41,14 @@ impl Function { FnSignatureInfo::new(syntax.borrowed()) } - pub fn infer(&self, db: &impl HirDatabase) -> Arc { + pub fn infer(&self, db: &impl HirDatabase) -> Cancelable> { db.infer(self.fn_id) } + + pub fn module(&self, db: &impl HirDatabase) -> Cancelable { + let loc = self.fn_id.0.loc(db); + Module::new(db, loc.source_root_id, loc.module_id) + } } #[derive(Debug, Clone)] diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index e84f44675..a0d99a84d 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs @@ -29,7 +29,7 @@ mod ty; use std::ops::Index; -use ra_syntax::{SyntaxNodeRef, SyntaxNode}; +use ra_syntax::{SyntaxNodeRef, SyntaxNode, SyntaxKind}; use ra_db::{LocationIntener, SourceRootId, FileId, Cancelable}; use crate::{ @@ -67,6 +67,23 @@ pub struct DefLoc { source_item_id: SourceItemId, } +impl DefKind { + pub(crate) fn for_syntax_kind(kind: SyntaxKind) -> Option { + match kind { + SyntaxKind::FN_DEF => Some(DefKind::Function), + SyntaxKind::MODULE => Some(DefKind::Module), + // These define items, but don't have their own DefKinds yet: + SyntaxKind::STRUCT_DEF => Some(DefKind::Item), + SyntaxKind::ENUM_DEF => Some(DefKind::Item), + SyntaxKind::TRAIT_DEF => Some(DefKind::Item), + SyntaxKind::TYPE_DEF => Some(DefKind::Item), + SyntaxKind::CONST_DEF => Some(DefKind::Item), + SyntaxKind::STATIC_DEF => Some(DefKind::Item), + _ => None, + } + } +} + impl DefId { pub(crate) fn loc(self, db: &impl AsRef>) -> DefLoc { db.as_ref().id2loc(self) diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index 3020ee793..b5a997170 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs @@ -192,6 +192,7 @@ salsa::database_storage! { fn fn_syntax() for db::FnSyntaxQuery; fn submodules() for db::SubmodulesQuery; fn infer() for db::InferQuery; + fn type_for_def() for db::TypeForDefQuery; } } } diff --git a/crates/ra_hir/src/module.rs b/crates/ra_hir/src/module.rs index cd31e8cfe..891119953 100644 --- a/crates/ra_hir/src/module.rs +++ b/crates/ra_hir/src/module.rs @@ -2,6 +2,7 @@ pub(super) mod imp; pub(super) mod nameres; use std::sync::Arc; +use log; use ra_syntax::{ algo::generate, diff --git a/crates/ra_hir/src/module/nameres.rs b/crates/ra_hir/src/module/nameres.rs index 39e891cda..0b152a406 100644 --- a/crates/ra_hir/src/module/nameres.rs +++ b/crates/ra_hir/src/module/nameres.rs @@ -272,13 +272,13 @@ where } } } - // Populate explicitelly declared items, except modules + // Populate explicitly declared items, except modules for item in input.items.iter() { if item.kind == MODULE { continue; } let def_loc = DefLoc { - kind: DefKind::Item, + kind: DefKind::for_syntax_kind(item.kind).unwrap_or(DefKind::Item), source_root_id: self.source_root, module_id, source_item_id: SourceItemId { diff --git a/crates/ra_hir/src/query_definitions.rs b/crates/ra_hir/src/query_definitions.rs index ccbfdf028..b654af920 100644 --- a/crates/ra_hir/src/query_definitions.rs +++ b/crates/ra_hir/src/query_definitions.rs @@ -11,7 +11,7 @@ use ra_syntax::{ use ra_db::{SourceRootId, FileId, Cancelable,}; use crate::{ - SourceFileItems, SourceItemId, DefKind, + SourceFileItems, SourceItemId, DefKind, Function, DefId, db::HirDatabase, function::{FnScopes, FnId}, module::{ @@ -19,7 +19,7 @@ use crate::{ imp::Submodule, nameres::{InputModuleItems, ItemMap, Resolver}, }, - ty::{self, InferenceResult} + ty::{self, InferenceResult, Ty} }; /// Resolve `FnId` to the corresponding `SyntaxNode` @@ -36,11 +36,13 @@ pub(super) fn fn_scopes(db: &impl HirDatabase, fn_id: FnId) -> Arc { Arc::new(res) } -pub(super) fn infer(db: &impl HirDatabase, fn_id: FnId) -> Arc { - let syntax = db.fn_syntax(fn_id); - let scopes = db.fn_scopes(fn_id); - let res = ty::infer(db, syntax.borrowed(), scopes); - Arc::new(res) +pub(super) fn infer(db: &impl HirDatabase, fn_id: FnId) -> Cancelable> { + let function = Function { fn_id }; + ty::infer(db, function).map(Arc::new) +} + +pub(super) fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable { + ty::type_for_def(db, def_id) } pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc { diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 615a1caed..13ee6cb27 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -5,21 +5,17 @@ mod tests; use std::sync::Arc; use std::fmt; +use log; use rustc_hash::{FxHashMap}; -use ra_db::LocalSyntaxPtr; +use ra_db::{LocalSyntaxPtr, Cancelable}; use ra_syntax::{ SmolStr, ast::{self, AstNode, LoopBodyOwner, ArgListOwner}, SyntaxNodeRef }; -use crate::{ - FnScopes, - db::HirDatabase, -}; - -// pub(crate) type TypeId = Id; +use crate::{Def, DefId, FnScopes, Module, Function, Path, db::HirDatabase}; #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub enum Ty { @@ -65,18 +61,6 @@ pub enum Ty { /// `&'a mut T` or `&'a T`. // Ref(Region<'tcx>, Ty<'tcx>, hir::Mutability), - /// The anonymous type of a function declaration/definition. Each - /// function has a unique type, which is output (for a function - /// named `foo` returning an `i32`) as `fn() -> i32 {foo}`. - /// - /// For example the type of `bar` here: - /// - /// ```rust - /// fn foo() -> i32 { 1 } - /// let bar = foo; // bar: fn() -> i32 {foo} - /// ``` - // FnDef(DefId, &'tcx Substs<'tcx>), - /// A pointer to a function. Written as `fn() -> i32`. /// /// For example the type of `bar` here: @@ -85,7 +69,7 @@ pub enum Ty { /// fn foo() -> i32 { 1 } /// let bar: fn() -> i32 = foo; /// ``` - // FnPtr(PolyFnSig<'tcx>), + FnPtr(Arc), /// A trait, defined with `trait`. // Dynamic(Binder<&'tcx List>>, ty::Region<'tcx>), @@ -139,6 +123,12 @@ pub enum Ty { type TyRef = Arc; +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +pub struct FnSig { + input: Vec, + output: Ty, +} + impl Ty { pub fn new(node: ast::TypeRef) -> Self { use ra_syntax::ast::TypeRef::*; @@ -208,11 +198,55 @@ impl fmt::Display for Ty { } write!(f, ")") } + Ty::FnPtr(sig) => { + write!(f, "fn(")?; + for t in &sig.input { + write!(f, "{},", t)?; + } + write!(f, ") -> {}", sig.output) + } Ty::Unknown => write!(f, "[unknown]"), } } } +pub fn type_for_fn(db: &impl HirDatabase, f: Function) -> Cancelable { + eprintln!("type_for_fn {:?}", f.fn_id); + let syntax = f.syntax(db); + let node = syntax.borrowed(); + // TODO we ignore type parameters for now + let input = node + .param_list() + .map(|pl| { + pl.params() + .map(|p| p.type_ref().map(|t| Ty::new(t)).unwrap_or(Ty::Unknown)) + .collect() + }) + .unwrap_or_else(Vec::new); + let output = node + .ret_type() + .and_then(|rt| rt.type_ref()) + .map(|t| Ty::new(t)) + .unwrap_or(Ty::Unknown); + let sig = FnSig { input, output }; + Ok(Ty::FnPtr(Arc::new(sig))) +} + +pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable { + let def = def_id.resolve(db)?; + match def { + Def::Module(..) => { + log::debug!("trying to get type for module {:?}", def_id); + Ok(Ty::Unknown) + } + Def::Function(f) => type_for_fn(db, f), + Def::Item => { + log::debug!("trying to get type for item of unknown type {:?}", def_id); + Ok(Ty::Unknown) + } + } +} + #[derive(Clone, PartialEq, Eq, Debug)] pub struct InferenceResult { type_for: FxHashMap, @@ -224,18 +258,22 @@ impl InferenceResult { } } -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct InferenceContext { +#[derive(Clone, Debug)] +pub struct InferenceContext<'a, D: HirDatabase> { + db: &'a D, scopes: Arc, + module: Module, // TODO unification tables... type_for: FxHashMap, } -impl InferenceContext { - fn new(scopes: Arc) -> Self { +impl<'a, D: HirDatabase> InferenceContext<'a, D> { + fn new(db: &'a D, scopes: Arc, module: Module) -> Self { InferenceContext { type_for: FxHashMap::default(), + db, scopes, + module, } } @@ -262,36 +300,42 @@ impl InferenceContext { self.unify(ty1, ty2) } - fn infer_path_expr(&mut self, expr: ast::PathExpr) -> Option { - let p = expr.path()?; - if p.qualifier().is_none() { - let name = p.segment().and_then(|s| s.name_ref())?; - let scope_entry = self.scopes.resolve_local_name(name)?; - let ty = self.type_for.get(&scope_entry.ptr())?; - Some(ty.clone()) - } else { - // TODO resolve path - Some(Ty::Unknown) - } + fn infer_path_expr(&mut self, expr: ast::PathExpr) -> Cancelable> { + let ast_path = ctry!(expr.path()); + let path = ctry!(Path::from_ast(ast_path)); + if path.is_ident() { + // resolve locally + let name = ctry!(ast_path.segment().and_then(|s| s.name_ref())); + if let Some(scope_entry) = self.scopes.resolve_local_name(name) { + let ty = ctry!(self.type_for.get(&scope_entry.ptr())); + return Ok(Some(ty.clone())); + }; + }; + + // resolve in module + let resolved = ctry!(self.module.resolve_path(self.db, path)?); + let ty = self.db.type_for_def(resolved)?; + // TODO we will need to add type variables for type parameters etc. here + Ok(Some(ty)) } - fn infer_expr(&mut self, expr: ast::Expr) -> Ty { + fn infer_expr(&mut self, expr: ast::Expr) -> Cancelable { let ty = match expr { ast::Expr::IfExpr(e) => { if let Some(condition) = e.condition() { if let Some(e) = condition.expr() { // TODO if no pat, this should be bool - self.infer_expr(e); + self.infer_expr(e)?; } // TODO write type for pat }; let if_ty = if let Some(block) = e.then_branch() { - self.infer_block(block) + self.infer_block(block)? } else { Ty::Unknown }; let else_ty = if let Some(block) = e.else_branch() { - self.infer_block(block) + self.infer_block(block)? } else { Ty::Unknown }; @@ -304,14 +348,14 @@ impl InferenceContext { } ast::Expr::BlockExpr(e) => { if let Some(block) = e.block() { - self.infer_block(block) + self.infer_block(block)? } else { Ty::Unknown } } ast::Expr::LoopExpr(e) => { if let Some(block) = e.loop_body() { - self.infer_block(block); + self.infer_block(block)?; }; // TODO never, or the type of the break param Ty::Unknown @@ -320,59 +364,69 @@ impl InferenceContext { if let Some(condition) = e.condition() { if let Some(e) = condition.expr() { // TODO if no pat, this should be bool - self.infer_expr(e); + self.infer_expr(e)?; } // TODO write type for pat }; if let Some(block) = e.loop_body() { // TODO - self.infer_block(block); + self.infer_block(block)?; }; // TODO always unit? Ty::Unknown } ast::Expr::ForExpr(e) => { if let Some(expr) = e.iterable() { - self.infer_expr(expr); + self.infer_expr(expr)?; } if let Some(_pat) = e.pat() { // TODO write type for pat } if let Some(block) = e.loop_body() { - self.infer_block(block); + self.infer_block(block)?; } // TODO always unit? Ty::Unknown } ast::Expr::LambdaExpr(e) => { let _body_ty = if let Some(body) = e.body() { - self.infer_expr(body) + self.infer_expr(body)? } else { Ty::Unknown }; Ty::Unknown } ast::Expr::CallExpr(e) => { + let _callee_ty = if let Some(e) = e.expr() { + self.infer_expr(e)? + } else { + Ty::Unknown + }; if let Some(arg_list) = e.arg_list() { for arg in arg_list.args() { // TODO unify / expect argument type - self.infer_expr(arg); + self.infer_expr(arg)?; } } Ty::Unknown } ast::Expr::MethodCallExpr(e) => { + let _receiver_ty = if let Some(e) = e.expr() { + self.infer_expr(e)? + } else { + Ty::Unknown + }; if let Some(arg_list) = e.arg_list() { for arg in arg_list.args() { // TODO unify / expect argument type - self.infer_expr(arg); + self.infer_expr(arg)?; } } Ty::Unknown } ast::Expr::MatchExpr(e) => { let _ty = if let Some(match_expr) = e.expr() { - self.infer_expr(match_expr) + self.infer_expr(match_expr)? } else { Ty::Unknown }; @@ -381,7 +435,7 @@ impl InferenceContext { // TODO type the bindings in pat // TODO type the guard let _ty = if let Some(e) = arm.expr() { - self.infer_expr(e) + self.infer_expr(e)? } else { Ty::Unknown }; @@ -394,12 +448,12 @@ impl InferenceContext { } ast::Expr::TupleExpr(_e) => Ty::Unknown, ast::Expr::ArrayExpr(_e) => Ty::Unknown, - ast::Expr::PathExpr(e) => self.infer_path_expr(e).unwrap_or(Ty::Unknown), + ast::Expr::PathExpr(e) => self.infer_path_expr(e)?.unwrap_or(Ty::Unknown), ast::Expr::ContinueExpr(_e) => Ty::Never, ast::Expr::BreakExpr(_e) => Ty::Never, ast::Expr::ParenExpr(e) => { if let Some(e) = e.expr() { - self.infer_expr(e) + self.infer_expr(e)? } else { Ty::Unknown } @@ -408,7 +462,7 @@ impl InferenceContext { ast::Expr::ReturnExpr(e) => { if let Some(e) = e.expr() { // TODO unify with return type - self.infer_expr(e); + self.infer_expr(e)?; }; Ty::Never } @@ -425,7 +479,7 @@ impl InferenceContext { ast::Expr::FieldExpr(_e) => Ty::Unknown, ast::Expr::TryExpr(e) => { let _inner_ty = if let Some(e) = e.expr() { - self.infer_expr(e) + self.infer_expr(e)? } else { Ty::Unknown }; @@ -433,7 +487,7 @@ impl InferenceContext { } ast::Expr::CastExpr(e) => { let _inner_ty = if let Some(e) = e.expr() { - self.infer_expr(e) + self.infer_expr(e)? } else { Ty::Unknown }; @@ -443,7 +497,7 @@ impl InferenceContext { } ast::Expr::RefExpr(e) => { let _inner_ty = if let Some(e) = e.expr() { - self.infer_expr(e) + self.infer_expr(e)? } else { Ty::Unknown }; @@ -451,7 +505,7 @@ impl InferenceContext { } ast::Expr::PrefixExpr(e) => { let _inner_ty = if let Some(e) = e.expr() { - self.infer_expr(e) + self.infer_expr(e)? } else { Ty::Unknown }; @@ -462,10 +516,10 @@ impl InferenceContext { ast::Expr::Literal(_e) => Ty::Unknown, }; self.write_ty(expr.syntax(), ty.clone()); - ty + Ok(ty) } - fn infer_block(&mut self, node: ast::Block) -> Ty { + fn infer_block(&mut self, node: ast::Block) -> Cancelable { for stmt in node.statements() { match stmt { ast::Stmt::LetStmt(stmt) => { @@ -476,7 +530,7 @@ impl InferenceContext { }; let ty = if let Some(expr) = stmt.initializer() { // TODO pass expectation - let expr_ty = self.infer_expr(expr); + let expr_ty = self.infer_expr(expr)?; self.unify_with_coercion(&expr_ty, &decl_ty) .unwrap_or(decl_ty) } else { @@ -489,23 +543,28 @@ impl InferenceContext { } ast::Stmt::ExprStmt(expr_stmt) => { if let Some(expr) = expr_stmt.expr() { - self.infer_expr(expr); + self.infer_expr(expr)?; } } } } let ty = if let Some(expr) = node.expr() { - self.infer_expr(expr) + self.infer_expr(expr)? } else { Ty::unit() }; self.write_ty(node.syntax(), ty.clone()); - ty + Ok(ty) } } -pub fn infer(_db: &impl HirDatabase, node: ast::FnDef, scopes: Arc) -> InferenceResult { - let mut ctx = InferenceContext::new(scopes); +pub fn infer(db: &impl HirDatabase, function: Function) -> Cancelable { + let scopes = function.scopes(db); + let module = function.module(db)?; + let mut ctx = InferenceContext::new(db, scopes, module); + + let syntax = function.syntax(db); + let node = syntax.borrowed(); if let Some(param_list) = node.param_list() { for param in param_list.params() { @@ -529,12 +588,12 @@ pub fn infer(_db: &impl HirDatabase, node: ast::FnDef, scopes: Arc) -> // (see Expectation in rustc_typeck) if let Some(block) = node.body() { - ctx.infer_block(block); + ctx.infer_block(block)?; } // TODO 'resolve' the types: replace inference variables by their inferred results - InferenceResult { + Ok(InferenceResult { type_for: ctx.type_for, - } + }) } diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 0880b51bc..e0458327a 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs @@ -1,5 +1,8 @@ use std::fmt::Write; use std::path::{PathBuf}; +use std::sync::Once; + +use flexi_logger::Logger; use ra_db::{SyntaxDatabase}; use ra_syntax::ast::{self, AstNode}; @@ -22,7 +25,7 @@ fn infer_file(content: &str) -> String { let func = source_binder::function_from_source(&db, file_id, fn_def) .unwrap() .unwrap(); - let inference_result = func.infer(&db); + let inference_result = func.infer(&db).unwrap(); for (syntax_ptr, ty) in &inference_result.type_for { let node = syntax_ptr.resolve(&source_file); write!( @@ -58,6 +61,8 @@ fn ellipsize(mut text: String, max_len: usize) -> String { #[test] pub fn infer_tests() { + static INIT: Once = Once::new(); + INIT.call_once(|| Logger::with_env().start().unwrap()); dir_tests(&test_data_dir(), &["."], |text, _path| infer_file(text)); } diff --git a/crates/ra_hir/src/ty/tests/data/0003_paths.rs b/crates/ra_hir/src/ty/tests/data/0003_paths.rs new file mode 100644 index 000000000..e8b11198b --- /dev/null +++ b/crates/ra_hir/src/ty/tests/data/0003_paths.rs @@ -0,0 +1,10 @@ +fn a() -> u32 { 1 } + +mod b { + fn c() -> u32 { 1 } +} + +fn test() { + a(); + b::c(); +} diff --git a/crates/ra_hir/src/ty/tests/data/0003_paths.txt b/crates/ra_hir/src/ty/tests/data/0003_paths.txt new file mode 100644 index 000000000..3a53370a2 --- /dev/null +++ b/crates/ra_hir/src/ty/tests/data/0003_paths.txt @@ -0,0 +1,9 @@ +[16; 17) '1': [unknown] +[14; 19) '{ 1 }': [unknown] +[47; 52) '{ 1 }': [unknown] +[49; 50) '1': [unknown] +[81; 87) 'b::c()': [unknown] +[66; 90) '{ ...c(); }': () +[72; 73) 'a': fn() -> u32 +[72; 75) 'a()': [unknown] +[81; 85) 'b::c': fn() -> u32 diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs index b15c4ef6f..c73533861 100644 --- a/crates/ra_syntax/src/ast/generated.rs +++ b/crates/ra_syntax/src/ast/generated.rs @@ -3083,7 +3083,11 @@ impl> RetTypeNode { } -impl<'a> RetType<'a> {} +impl<'a> RetType<'a> { + pub fn type_ref(self) -> Option> { + super::child_opt(self) + } +} // ReturnExpr #[derive(Debug, Clone, Copy,)] diff --git a/crates/ra_syntax/src/grammar.ron b/crates/ra_syntax/src/grammar.ron index 8dca493ee..e3b9032a0 100644 --- a/crates/ra_syntax/src/grammar.ron +++ b/crates/ra_syntax/src/grammar.ron @@ -254,7 +254,7 @@ Grammar( ], options: [ "ParamList", ["body", "Block"], "RetType" ], ), - "RetType": (), + "RetType": (options: ["TypeRef"]), "StructDef": ( traits: [ "NameOwner", -- cgit v1.2.3 From c85748f5fbe308610261da91aeb978eca751a0ff Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Sun, 23 Dec 2018 17:16:47 +0100 Subject: Type the return values of call expressions --- crates/ra_hir/src/ty.rs | 14 +++++++++++--- crates/ra_hir/src/ty/tests/data/0003_paths.txt | 4 ++-- 2 files changed, 13 insertions(+), 5 deletions(-) (limited to 'crates') diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 13ee6cb27..92de9842c 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -211,7 +211,6 @@ impl fmt::Display for Ty { } pub fn type_for_fn(db: &impl HirDatabase, f: Function) -> Cancelable { - eprintln!("type_for_fn {:?}", f.fn_id); let syntax = f.syntax(db); let node = syntax.borrowed(); // TODO we ignore type parameters for now @@ -397,7 +396,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { Ty::Unknown } ast::Expr::CallExpr(e) => { - let _callee_ty = if let Some(e) = e.expr() { + let callee_ty = if let Some(e) = e.expr() { self.infer_expr(e)? } else { Ty::Unknown @@ -408,7 +407,16 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { self.infer_expr(arg)?; } } - Ty::Unknown + match callee_ty { + Ty::FnPtr(sig) => { + sig.output.clone() + } + _ => { + // not callable + // TODO report an error? + Ty::Unknown + } + } } ast::Expr::MethodCallExpr(e) => { let _receiver_ty = if let Some(e) = e.expr() { diff --git a/crates/ra_hir/src/ty/tests/data/0003_paths.txt b/crates/ra_hir/src/ty/tests/data/0003_paths.txt index 3a53370a2..acde9b7ad 100644 --- a/crates/ra_hir/src/ty/tests/data/0003_paths.txt +++ b/crates/ra_hir/src/ty/tests/data/0003_paths.txt @@ -2,8 +2,8 @@ [14; 19) '{ 1 }': [unknown] [47; 52) '{ 1 }': [unknown] [49; 50) '1': [unknown] -[81; 87) 'b::c()': [unknown] +[81; 87) 'b::c()': u32 [66; 90) '{ ...c(); }': () [72; 73) 'a': fn() -> u32 -[72; 75) 'a()': [unknown] +[72; 75) 'a()': u32 [81; 85) 'b::c': fn() -> u32 -- cgit v1.2.3 From a1d0b5bc3c09ef511db442c39ff382e8bdd23276 Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Sun, 23 Dec 2018 17:33:02 +0100 Subject: Prepare Ty::new for resolution --- crates/ra_hir/src/ty.rs | 38 ++++++++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 14 deletions(-) (limited to 'crates') diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 92de9842c..7905d86a1 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -130,9 +130,9 @@ pub struct FnSig { } impl Ty { - pub fn new(node: ast::TypeRef) -> Self { + pub fn new(_db: &impl HirDatabase, node: ast::TypeRef) -> Cancelable { use ra_syntax::ast::TypeRef::*; - match node { + Ok(match node { ParenType(_inner) => Ty::Unknown, // TODO TupleType(_inner) => Ty::Unknown, // TODO NeverType(..) => Ty::Never, @@ -140,7 +140,7 @@ impl Ty { let path = if let Some(p) = inner.path() { p } else { - return Ty::Unknown; + return Ok(Ty::Unknown); }; if path.qualifier().is_none() { let name = path @@ -172,7 +172,7 @@ impl Ty { ForType(_inner) => Ty::Unknown, // TODO ImplTraitType(_inner) => Ty::Unknown, // TODO DynTraitType(_inner) => Ty::Unknown, // TODO - } + }) } pub fn unit() -> Self { @@ -218,19 +218,28 @@ pub fn type_for_fn(db: &impl HirDatabase, f: Function) -> Cancelable { .param_list() .map(|pl| { pl.params() - .map(|p| p.type_ref().map(|t| Ty::new(t)).unwrap_or(Ty::Unknown)) + .map(|p| { + p.type_ref() + .map(|t| Ty::new(db, t)) + .unwrap_or(Ok(Ty::Unknown)) + }) .collect() }) - .unwrap_or_else(Vec::new); + .unwrap_or_else(|| Ok(Vec::new()))?; let output = node .ret_type() .and_then(|rt| rt.type_ref()) - .map(|t| Ty::new(t)) - .unwrap_or(Ty::Unknown); + .map(|t| Ty::new(db, t)) + .unwrap_or(Ok(Ty::Unknown))?; let sig = FnSig { input, output }; Ok(Ty::FnPtr(Arc::new(sig))) } +// TODO this should probably be per namespace (i.e. types vs. values), since for +// a tuple struct `struct Foo(Bar)`, Foo has function type as a value, but +// defines the struct type Foo when used in the type namespace. rustc has a +// separate DefId for the constructor, but with the current DefId approach, that +// seems complicated. pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable { let def = def_id.resolve(db)?; match def { @@ -408,9 +417,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { } } match callee_ty { - Ty::FnPtr(sig) => { - sig.output.clone() - } + Ty::FnPtr(sig) => sig.output.clone(), _ => { // not callable // TODO report an error? @@ -499,7 +506,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { } else { Ty::Unknown }; - let cast_ty = e.type_ref().map(Ty::new).unwrap_or(Ty::Unknown); + let cast_ty = e + .type_ref() + .map(|t| Ty::new(self.db, t)) + .unwrap_or(Ok(Ty::Unknown))?; // TODO do the coercion... cast_ty } @@ -532,7 +542,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { match stmt { ast::Stmt::LetStmt(stmt) => { let decl_ty = if let Some(type_ref) = stmt.type_ref() { - Ty::new(type_ref) + Ty::new(self.db, type_ref)? } else { Ty::Unknown }; @@ -582,7 +592,7 @@ pub fn infer(db: &impl HirDatabase, function: Function) -> Cancelable Date: Mon, 24 Dec 2018 15:17:34 +0100 Subject: Clean up Ty a bit Removing irrelevant comments copied from rustc etc. --- crates/ra_hir/src/ty.rs | 60 ++++++++++++++++++------------------------------- 1 file changed, 22 insertions(+), 38 deletions(-) (limited to 'crates') diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 7905d86a1..e1edf1bff 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -35,32 +35,22 @@ pub enum Ty { /// A primitive floating-point type. For example, `f64`. Float(primitive::FloatTy), - /// Structures, enumerations and unions. - /// - /// Substs here, possibly against intuition, *may* contain `Param`s. - /// That is, even after substitution it is possible that there are type - /// variables. This happens when the `Adt` corresponds to an ADT - /// definition and not a concrete use of it. - // Adt(&'tcx AdtDef, &'tcx Substs<'tcx>), - - // Foreign(DefId), - + // Structures, enumerations and unions. + // Adt(AdtDef, Substs), /// The pointee of a string slice. Written as `str`. Str, - /// An array with the given length. Written as `[T; n]`. - // Array(Ty<'tcx>, &'tcx ty::Const<'tcx>), - + // An array with the given length. Written as `[T; n]`. + // Array(Ty, ty::Const), /// The pointee of an array slice. Written as `[T]`. Slice(TyRef), - /// A raw pointer. Written as `*mut T` or `*const T` + // A raw pointer. Written as `*mut T` or `*const T` // RawPtr(TypeAndMut<'tcx>), - /// A reference; a pointer with an associated lifetime. Written as - /// `&'a mut T` or `&'a T`. - // Ref(Region<'tcx>, Ty<'tcx>, hir::Mutability), - + // A reference; a pointer with an associated lifetime. Written as + // `&'a mut T` or `&'a T`. + // Ref(Ty<'tcx>, hir::Mutability), /// A pointer to a function. Written as `fn() -> i32`. /// /// For example the type of `bar` here: @@ -71,9 +61,8 @@ pub enum Ty { /// ``` FnPtr(Arc), - /// A trait, defined with `trait`. - // Dynamic(Binder<&'tcx List>>, ty::Region<'tcx>), - + // A trait, defined with `dyn trait`. + // Dynamic(), /// The anonymous type of a closure. Used to represent the type of /// `|a| a`. // Closure(DefId, ClosureSubsts<'tcx>), @@ -92,30 +81,25 @@ pub enum Ty { /// A tuple type. For example, `(i32, bool)`. Tuple(Vec), - /// The projection of an associated type. For example, - /// `>::N`. - // Projection(ProjectionTy<'tcx>), + // The projection of an associated type. For example, + // `>::N`. + // Projection(ProjectionTy), - /// Opaque (`impl Trait`) type found in a return type. - /// The `DefId` comes either from - /// * the `impl Trait` ast::Ty node, - /// * or the `existential type` declaration - /// The substitutions are for the generics of the function in question. - /// After typeck, the concrete type can be found in the `types` map. - // Opaque(DefId, &'tcx Substs<'tcx>), + // Opaque (`impl Trait`) type found in a return type. + // The `DefId` comes either from + // * the `impl Trait` ast::Ty node, + // * or the `existential type` declaration + // The substitutions are for the generics of the function in question. + // Opaque(DefId, Substs), - /// A type parameter; for example, `T` in `fn f(x: T) {} + // A type parameter; for example, `T` in `fn f(x: T) {} // Param(ParamTy), - /// Bound type variable, used only when preparing a trait query. - // Bound(ty::DebruijnIndex, BoundTy), - - /// A placeholder type - universally quantified higher-ranked type. + // A placeholder type - universally quantified higher-ranked type. // Placeholder(ty::PlaceholderType), - /// A type variable used during type checking. + // A type variable used during type checking. // Infer(InferTy), - /// A placeholder for a type which could not be computed; this is /// propagated to avoid useless error messages. Unknown, -- cgit v1.2.3 From 655f5bc26190b94e237dcc485e405de0d192e6ab Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Mon, 24 Dec 2018 15:19:49 +0100 Subject: Rename a variable for consistency --- crates/ra_hir/src/ty.rs | 18 +++++++++--------- crates/ra_hir/src/ty/tests.rs | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) (limited to 'crates') diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index e1edf1bff..c759d4c8b 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -241,12 +241,12 @@ pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable { #[derive(Clone, PartialEq, Eq, Debug)] pub struct InferenceResult { - type_for: FxHashMap, + type_of: FxHashMap, } impl InferenceResult { pub fn type_of_node(&self, node: SyntaxNodeRef) -> Option { - self.type_for.get(&LocalSyntaxPtr::new(node)).cloned() + self.type_of.get(&LocalSyntaxPtr::new(node)).cloned() } } @@ -256,13 +256,13 @@ pub struct InferenceContext<'a, D: HirDatabase> { scopes: Arc, module: Module, // TODO unification tables... - type_for: FxHashMap, + type_of: FxHashMap, } impl<'a, D: HirDatabase> InferenceContext<'a, D> { fn new(db: &'a D, scopes: Arc, module: Module) -> Self { InferenceContext { - type_for: FxHashMap::default(), + type_of: FxHashMap::default(), db, scopes, module, @@ -270,7 +270,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { } fn write_ty(&mut self, node: SyntaxNodeRef, ty: Ty) { - self.type_for.insert(LocalSyntaxPtr::new(node), ty); + self.type_of.insert(LocalSyntaxPtr::new(node), ty); } fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> Option { @@ -299,7 +299,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { // resolve locally let name = ctry!(ast_path.segment().and_then(|s| s.name_ref())); if let Some(scope_entry) = self.scopes.resolve_local_name(name) { - let ty = ctry!(self.type_for.get(&scope_entry.ptr())); + let ty = ctry!(self.type_of.get(&scope_entry.ptr())); return Ok(Some(ty.clone())); }; }; @@ -577,10 +577,10 @@ pub fn infer(db: &impl HirDatabase, function: Function) -> Cancelable Cancelable String { .unwrap() .unwrap(); let inference_result = func.infer(&db).unwrap(); - for (syntax_ptr, ty) in &inference_result.type_for { + for (syntax_ptr, ty) in &inference_result.type_of { let node = syntax_ptr.resolve(&source_file); write!( acc, -- cgit v1.2.3 From 4befde1eee5b1e2b7ddc9bf764b77f82b792c318 Mon Sep 17 00:00:00 2001 From: Florian Diebold Date: Mon, 24 Dec 2018 15:36:54 +0100 Subject: Change inference tests to have one per file --- crates/ra_hir/src/ty/tests.rs | 89 ++++++++++++++++++++++---- crates/ra_hir/src/ty/tests/data/0001_basics.rs | 11 ---- crates/ra_hir/src/ty/tests/data/0002_let.rs | 5 -- crates/ra_hir/src/ty/tests/data/0002_let.txt | 14 ++-- crates/ra_hir/src/ty/tests/data/0003_paths.rs | 10 --- crates/ra_hir/src/ty/tests/data/0003_paths.txt | 18 +++--- 6 files changed, 92 insertions(+), 55 deletions(-) delete mode 100644 crates/ra_hir/src/ty/tests/data/0001_basics.rs delete mode 100644 crates/ra_hir/src/ty/tests/data/0002_let.rs delete mode 100644 crates/ra_hir/src/ty/tests/data/0003_paths.rs (limited to 'crates') diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 021227749..b6c02cd80 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs @@ -1,19 +1,74 @@ use std::fmt::Write; -use std::path::{PathBuf}; -use std::sync::Once; - -use flexi_logger::Logger; +use std::path::{PathBuf, Path}; +use std::fs; use ra_db::{SyntaxDatabase}; use ra_syntax::ast::{self, AstNode}; -use test_utils::{project_dir, dir_tests}; +use test_utils::{project_dir, assert_eq_text, read_text}; use crate::{ source_binder, mock::MockDatabase, }; -fn infer_file(content: &str) -> String { +// These tests compare the inference results for all expressions in a file +// against snapshots of the current results. If you change something and these +// tests fail expectedly, you can update the comparison files by deleting them +// and running the tests again. Similarly, to add a new test, just write the +// test here in the same pattern and it will automatically write the snapshot. + +#[test] +fn infer_basics() { + check_inference( + r#" +fn test(a: u32, b: isize, c: !, d: &str) { + a; + b; + c; + d; + 1usize; + 1isize; + "test"; + 1.0f32; +}"#, + "0001_basics.txt", + ); +} + +#[test] +fn infer_let() { + check_inference( + r#" +fn test() { + let a = 1isize; + let b: usize = 1; + let c = b; +} +}"#, + "0002_let.txt", + ); +} + +#[test] +fn infer_paths() { + check_inference( + r#" +fn a() -> u32 { 1 } + +mod b { + fn c() -> u32 { 1 } +} + +fn test() { + a(); + b::c(); +} +}"#, + "0003_paths.txt", + ); +} + +fn infer(content: &str) -> String { let (db, _, file_id) = MockDatabase::with_single_file(content); let source_file = db.source_file(file_id); let mut acc = String::new(); @@ -41,6 +96,21 @@ fn infer_file(content: &str) -> String { acc } +fn check_inference(content: &str, data_file: impl AsRef) { + let data_file_path = test_data_dir().join(data_file); + let result = infer(content); + + if !data_file_path.exists() { + println!("File with expected result doesn't exist, creating...\n"); + println!("{}\n{}", content, result); + fs::write(&data_file_path, &result).unwrap(); + panic!("File {:?} with expected result was created", data_file_path); + } + + let expected = read_text(&data_file_path); + assert_eq_text!(&expected, &result); +} + fn ellipsize(mut text: String, max_len: usize) -> String { if text.len() <= max_len { return text; @@ -59,13 +129,6 @@ fn ellipsize(mut text: String, max_len: usize) -> String { text } -#[test] -pub fn infer_tests() { - static INIT: Once = Once::new(); - INIT.call_once(|| Logger::with_env().start().unwrap()); - dir_tests(&test_data_dir(), &["."], |text, _path| infer_file(text)); -} - fn test_data_dir() -> PathBuf { project_dir().join("crates/ra_hir/src/ty/tests/data") } diff --git a/crates/ra_hir/src/ty/tests/data/0001_basics.rs b/crates/ra_hir/src/ty/tests/data/0001_basics.rs deleted file mode 100644 index 59a60d031..000000000 --- a/crates/ra_hir/src/ty/tests/data/0001_basics.rs +++ /dev/null @@ -1,11 +0,0 @@ - -fn test(a: u32, b: isize, c: !, d: &str) { - a; - b; - c; - d; - 1usize; - 1isize; - "test"; - 1.0f32; -} diff --git a/crates/ra_hir/src/ty/tests/data/0002_let.rs b/crates/ra_hir/src/ty/tests/data/0002_let.rs deleted file mode 100644 index 5641da75b..000000000 --- a/crates/ra_hir/src/ty/tests/data/0002_let.rs +++ /dev/null @@ -1,5 +0,0 @@ -fn test() { - let a = 1isize; - let b: usize = 1; - let c = b; -} diff --git a/crates/ra_hir/src/ty/tests/data/0002_let.txt b/crates/ra_hir/src/ty/tests/data/0002_let.txt index 5f515ee59..2d0d1f57b 100644 --- a/crates/ra_hir/src/ty/tests/data/0002_let.txt +++ b/crates/ra_hir/src/ty/tests/data/0002_let.txt @@ -1,7 +1,7 @@ -[51; 52) '1': [unknown] -[10; 70) '{ ...= b; }': () -[24; 30) '1isize': [unknown] -[20; 21) 'a': [unknown] -[62; 63) 'c': usize -[66; 67) 'b': usize -[40; 41) 'b': usize +[21; 22) 'a': [unknown] +[52; 53) '1': [unknown] +[11; 71) '{ ...= b; }': () +[63; 64) 'c': usize +[25; 31) '1isize': [unknown] +[41; 42) 'b': usize +[67; 68) 'b': usize diff --git a/crates/ra_hir/src/ty/tests/data/0003_paths.rs b/crates/ra_hir/src/ty/tests/data/0003_paths.rs deleted file mode 100644 index e8b11198b..000000000 --- a/crates/ra_hir/src/ty/tests/data/0003_paths.rs +++ /dev/null @@ -1,10 +0,0 @@ -fn a() -> u32 { 1 } - -mod b { - fn c() -> u32 { 1 } -} - -fn test() { - a(); - b::c(); -} diff --git a/crates/ra_hir/src/ty/tests/data/0003_paths.txt b/crates/ra_hir/src/ty/tests/data/0003_paths.txt index acde9b7ad..dcb5456ae 100644 --- a/crates/ra_hir/src/ty/tests/data/0003_paths.txt +++ b/crates/ra_hir/src/ty/tests/data/0003_paths.txt @@ -1,9 +1,9 @@ -[16; 17) '1': [unknown] -[14; 19) '{ 1 }': [unknown] -[47; 52) '{ 1 }': [unknown] -[49; 50) '1': [unknown] -[81; 87) 'b::c()': u32 -[66; 90) '{ ...c(); }': () -[72; 73) 'a': fn() -> u32 -[72; 75) 'a()': u32 -[81; 85) 'b::c': fn() -> u32 +[15; 20) '{ 1 }': [unknown] +[17; 18) '1': [unknown] +[50; 51) '1': [unknown] +[48; 53) '{ 1 }': [unknown] +[82; 88) 'b::c()': u32 +[67; 91) '{ ...c(); }': () +[73; 74) 'a': fn() -> u32 +[73; 76) 'a()': u32 +[82; 86) 'b::c': fn() -> u32 -- cgit v1.2.3