|
1 | 1 | //! FIXME: write short doc here
|
2 | 2 |
|
3 |
| -pub(crate) mod validation; |
4 |
| - |
5 | 3 | use std::sync::Arc;
|
6 | 4 |
|
| 5 | +use hir_def::path::known; |
| 6 | +use hir_expand::diagnostics::DiagnosticSink; |
| 7 | +use ra_syntax::ast; |
7 | 8 | use ra_syntax::AstPtr;
|
| 9 | +use rustc_hash::FxHashSet; |
8 | 10 |
|
9 |
| -use crate::{db::HirDatabase, DefWithBody, HasBody, Resolver}; |
| 11 | +use crate::{ |
| 12 | + db::HirDatabase, |
| 13 | + diagnostics::{MissingFields, MissingOkInTailExpr}, |
| 14 | + ty::{ApplicationTy, InferenceResult, Ty, TypeCtor}, |
| 15 | + Adt, DefWithBody, Function, HasBody, Name, Path, Resolver, |
| 16 | +}; |
10 | 17 |
|
11 | 18 | pub use hir_def::{
|
12 | 19 | body::{
|
@@ -43,191 +50,121 @@ pub(crate) fn resolver_for_scope(
|
43 | 50 | r
|
44 | 51 | }
|
45 | 52 |
|
46 |
| -#[cfg(test)] |
47 |
| -mod tests { |
48 |
| - use hir_expand::Source; |
49 |
| - use ra_db::{fixture::WithFixture, SourceDatabase}; |
50 |
| - use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; |
51 |
| - use test_utils::{assert_eq_text, extract_offset}; |
52 |
| - |
53 |
| - use crate::{source_binder::SourceAnalyzer, test_db::TestDB}; |
54 |
| - |
55 |
| - fn do_check(code: &str, expected: &[&str]) { |
56 |
| - let (off, code) = extract_offset(code); |
57 |
| - let code = { |
58 |
| - let mut buf = String::new(); |
59 |
| - let off = u32::from(off) as usize; |
60 |
| - buf.push_str(&code[..off]); |
61 |
| - buf.push_str("marker"); |
62 |
| - buf.push_str(&code[off..]); |
63 |
| - buf |
64 |
| - }; |
| 53 | +pub(crate) struct ExprValidator<'a, 'b: 'a> { |
| 54 | + func: Function, |
| 55 | + infer: Arc<InferenceResult>, |
| 56 | + sink: &'a mut DiagnosticSink<'b>, |
| 57 | +} |
65 | 58 |
|
66 |
| - let (db, file_id) = TestDB::with_single_file(&code); |
67 |
| - |
68 |
| - let file = db.parse(file_id).ok().unwrap(); |
69 |
| - let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); |
70 |
| - let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); |
71 |
| - |
72 |
| - let scopes = analyzer.scopes(); |
73 |
| - let expr_id = analyzer |
74 |
| - .body_source_map() |
75 |
| - .node_expr(Source { file_id: file_id.into(), ast: &marker.into() }) |
76 |
| - .unwrap(); |
77 |
| - let scope = scopes.scope_for(expr_id); |
78 |
| - |
79 |
| - let actual = scopes |
80 |
| - .scope_chain(scope) |
81 |
| - .flat_map(|scope| scopes.entries(scope)) |
82 |
| - .map(|it| it.name().to_string()) |
83 |
| - .collect::<Vec<_>>() |
84 |
| - .join("\n"); |
85 |
| - let expected = expected.join("\n"); |
86 |
| - assert_eq_text!(&expected, &actual); |
| 59 | +impl<'a, 'b> ExprValidator<'a, 'b> { |
| 60 | + pub(crate) fn new( |
| 61 | + func: Function, |
| 62 | + infer: Arc<InferenceResult>, |
| 63 | + sink: &'a mut DiagnosticSink<'b>, |
| 64 | + ) -> ExprValidator<'a, 'b> { |
| 65 | + ExprValidator { func, infer, sink } |
87 | 66 | }
|
88 | 67 |
|
89 |
| - #[test] |
90 |
| - fn test_lambda_scope() { |
91 |
| - do_check( |
92 |
| - r" |
93 |
| - fn quux(foo: i32) { |
94 |
| - let f = |bar, baz: i32| { |
95 |
| - <|> |
96 |
| - }; |
97 |
| - }", |
98 |
| - &["bar", "baz", "foo"], |
99 |
| - ); |
100 |
| - } |
| 68 | + pub(crate) fn validate_body(&mut self, db: &impl HirDatabase) { |
| 69 | + let body = self.func.body(db); |
101 | 70 |
|
102 |
| - #[test] |
103 |
| - fn test_call_scope() { |
104 |
| - do_check( |
105 |
| - r" |
106 |
| - fn quux() { |
107 |
| - f(|x| <|> ); |
108 |
| - }", |
109 |
| - &["x"], |
110 |
| - ); |
111 |
| - } |
| 71 | + for e in body.exprs() { |
| 72 | + if let (id, Expr::RecordLit { path, fields, spread }) = e { |
| 73 | + self.validate_record_literal(id, path, fields, *spread, db); |
| 74 | + } |
| 75 | + } |
112 | 76 |
|
113 |
| - #[test] |
114 |
| - fn test_method_call_scope() { |
115 |
| - do_check( |
116 |
| - r" |
117 |
| - fn quux() { |
118 |
| - z.f(|x| <|> ); |
119 |
| - }", |
120 |
| - &["x"], |
121 |
| - ); |
| 77 | + let body_expr = &body[body.body_expr()]; |
| 78 | + if let Expr::Block { statements: _, tail: Some(t) } = body_expr { |
| 79 | + self.validate_results_in_tail_expr(body.body_expr(), *t, db); |
| 80 | + } |
122 | 81 | }
|
123 | 82 |
|
124 |
| - #[test] |
125 |
| - fn test_loop_scope() { |
126 |
| - do_check( |
127 |
| - r" |
128 |
| - fn quux() { |
129 |
| - loop { |
130 |
| - let x = (); |
131 |
| - <|> |
132 |
| - }; |
133 |
| - }", |
134 |
| - &["x"], |
135 |
| - ); |
136 |
| - } |
| 83 | + fn validate_record_literal( |
| 84 | + &mut self, |
| 85 | + id: ExprId, |
| 86 | + _path: &Option<Path>, |
| 87 | + fields: &[RecordLitField], |
| 88 | + spread: Option<ExprId>, |
| 89 | + db: &impl HirDatabase, |
| 90 | + ) { |
| 91 | + if spread.is_some() { |
| 92 | + return; |
| 93 | + } |
| 94 | + |
| 95 | + let struct_def = match self.infer[id].as_adt() { |
| 96 | + Some((Adt::Struct(s), _)) => s, |
| 97 | + _ => return, |
| 98 | + }; |
137 | 99 |
|
138 |
| - #[test] |
139 |
| - fn test_match() { |
140 |
| - do_check( |
141 |
| - r" |
142 |
| - fn quux() { |
143 |
| - match () { |
144 |
| - Some(x) => { |
145 |
| - <|> |
| 100 | + let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); |
| 101 | + let missed_fields: Vec<Name> = struct_def |
| 102 | + .fields(db) |
| 103 | + .iter() |
| 104 | + .filter_map(|f| { |
| 105 | + let name = f.name(db); |
| 106 | + if lit_fields.contains(&name) { |
| 107 | + None |
| 108 | + } else { |
| 109 | + Some(name) |
| 110 | + } |
| 111 | + }) |
| 112 | + .collect(); |
| 113 | + if missed_fields.is_empty() { |
| 114 | + return; |
| 115 | + } |
| 116 | + let source_map = self.func.body_source_map(db); |
| 117 | + |
| 118 | + if let Some(source_ptr) = source_map.expr_syntax(id) { |
| 119 | + if let Some(expr) = source_ptr.ast.a() { |
| 120 | + let root = source_ptr.file_syntax(db); |
| 121 | + if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) { |
| 122 | + if let Some(field_list) = record_lit.record_field_list() { |
| 123 | + self.sink.push(MissingFields { |
| 124 | + file: source_ptr.file_id, |
| 125 | + field_list: AstPtr::new(&field_list), |
| 126 | + missed_fields, |
| 127 | + }) |
146 | 128 | }
|
147 |
| - }; |
148 |
| - }", |
149 |
| - &["x"], |
150 |
| - ); |
151 |
| - } |
152 |
| - |
153 |
| - #[test] |
154 |
| - fn test_shadow_variable() { |
155 |
| - do_check( |
156 |
| - r" |
157 |
| - fn foo(x: String) { |
158 |
| - let x : &str = &x<|>; |
159 |
| - }", |
160 |
| - &["x"], |
161 |
| - ); |
| 129 | + } |
| 130 | + } |
| 131 | + } |
162 | 132 | }
|
163 | 133 |
|
164 |
| - fn do_check_local_name(code: &str, expected_offset: u32) { |
165 |
| - let (off, code) = extract_offset(code); |
166 |
| - |
167 |
| - let (db, file_id) = TestDB::with_single_file(&code); |
168 |
| - let file = db.parse(file_id).ok().unwrap(); |
169 |
| - let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()) |
170 |
| - .expect("failed to find a name at the target offset"); |
171 |
| - let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); |
172 |
| - let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); |
| 134 | + fn validate_results_in_tail_expr( |
| 135 | + &mut self, |
| 136 | + body_id: ExprId, |
| 137 | + id: ExprId, |
| 138 | + db: &impl HirDatabase, |
| 139 | + ) { |
| 140 | + // the mismatch will be on the whole block currently |
| 141 | + let mismatch = match self.infer.type_mismatch_for_expr(body_id) { |
| 142 | + Some(m) => m, |
| 143 | + None => return, |
| 144 | + }; |
173 | 145 |
|
174 |
| - let local_name_entry = analyzer.resolve_local_name(&name_ref).unwrap(); |
175 |
| - let local_name = |
176 |
| - local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); |
177 |
| - assert_eq!(local_name.range(), expected_name.syntax().text_range()); |
178 |
| - } |
| 146 | + let std_result_path = known::std_result_result(); |
179 | 147 |
|
180 |
| - #[test] |
181 |
| - fn test_resolve_local_name() { |
182 |
| - do_check_local_name( |
183 |
| - r#" |
184 |
| - fn foo(x: i32, y: u32) { |
185 |
| - { |
186 |
| - let z = x * 2; |
187 |
| - } |
188 |
| - { |
189 |
| - let t = x<|> * 3; |
190 |
| - } |
191 |
| - }"#, |
192 |
| - 21, |
193 |
| - ); |
194 |
| - } |
| 148 | + let resolver = self.func.resolver(db); |
| 149 | + let std_result_enum = match resolver.resolve_known_enum(db, &std_result_path) { |
| 150 | + Some(it) => it, |
| 151 | + _ => return, |
| 152 | + }; |
195 | 153 |
|
196 |
| - #[test] |
197 |
| - fn test_resolve_local_name_declaration() { |
198 |
| - do_check_local_name( |
199 |
| - r#" |
200 |
| - fn foo(x: String) { |
201 |
| - let x : &str = &x<|>; |
202 |
| - }"#, |
203 |
| - 21, |
204 |
| - ); |
205 |
| - } |
| 154 | + let std_result_ctor = TypeCtor::Adt(Adt::Enum(std_result_enum)); |
| 155 | + let params = match &mismatch.expected { |
| 156 | + Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &std_result_ctor => parameters, |
| 157 | + _ => return, |
| 158 | + }; |
206 | 159 |
|
207 |
| - #[test] |
208 |
| - fn test_resolve_local_name_shadow() { |
209 |
| - do_check_local_name( |
210 |
| - r" |
211 |
| - fn foo(x: String) { |
212 |
| - let x : &str = &x; |
213 |
| - x<|> |
214 |
| - } |
215 |
| - ", |
216 |
| - 53, |
217 |
| - ); |
218 |
| - } |
| 160 | + if params.len() == 2 && ¶ms[0] == &mismatch.actual { |
| 161 | + let source_map = self.func.body_source_map(db); |
219 | 162 |
|
220 |
| - #[test] |
221 |
| - fn ref_patterns_contribute_bindings() { |
222 |
| - do_check_local_name( |
223 |
| - r" |
224 |
| - fn foo() { |
225 |
| - if let Some(&from) = bar() { |
226 |
| - from<|>; |
| 163 | + if let Some(source_ptr) = source_map.expr_syntax(id) { |
| 164 | + if let Some(expr) = source_ptr.ast.a() { |
| 165 | + self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr }); |
227 | 166 | }
|
228 | 167 | }
|
229 |
| - ", |
230 |
| - 53, |
231 |
| - ); |
| 168 | + } |
232 | 169 | }
|
233 | 170 | }
|
0 commit comments