mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
4220: Introduce LowerCtx r=matklad a=edwin0cheng This PR introduces `LowerCtx` for path lowering. After this PR, there are only 2 places remains for using deprecated `Path::from_ast`, which is related to `AstTransform` I am not familiar. I would like to change these in another PR by others ;) related disscusiion: https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Fwg-rls-2.2E0/topic/Path.3A.3Afrom_src And also fixed part of https://github.com/rust-analyzer/rust-analyzer/issues/4176#issuecomment-620672930 4240: Bump deps r=matklad a=lnicola Co-authored-by: Edwin Cheng <edwin0cheng@gmail.com> Co-authored-by: Laurențiu Nicola <lnicola@dend.ro>
This commit is contained in:
commit
3232fd5179
24 changed files with 293 additions and 127 deletions
68
Cargo.lock
generated
68
Cargo.lock
generated
|
@ -58,9 +58,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "backtrace-sys"
|
name = "backtrace-sys"
|
||||||
version = "0.1.36"
|
version = "0.1.37"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "78848718ee1255a2485d1309ad9cdecfc2e7d0362dd11c6829364c6b35ae1bc7"
|
checksum = "18fbebbe1c9d1f383a9cc7e8ccdb471b91c8d024ee9c2ca5b5346121fe8b4399"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"libc",
|
"libc",
|
||||||
|
@ -170,7 +170,7 @@ dependencies = [
|
||||||
"chalk-ir",
|
"chalk-ir",
|
||||||
"chalk-macros",
|
"chalk-macros",
|
||||||
"chalk-rust-ir",
|
"chalk-rust-ir",
|
||||||
"ena",
|
"ena 0.13.1",
|
||||||
"itertools",
|
"itertools",
|
||||||
"petgraph",
|
"petgraph",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
@ -199,14 +199,15 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "console"
|
name = "console"
|
||||||
version = "0.10.0"
|
version = "0.10.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6728a28023f207181b193262711102bfbaf47cc9d13bc71d0736607ef8efe88c"
|
checksum = "2586208b33573b7f76ccfbe5adb076394c88deaf81b84d7213969805b0a952a7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clicolors-control",
|
"clicolors-control",
|
||||||
"encode_unicode",
|
"encode_unicode",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"libc",
|
"libc",
|
||||||
|
"terminal_size",
|
||||||
"termios",
|
"termios",
|
||||||
"winapi 0.3.8",
|
"winapi 0.3.8",
|
||||||
]
|
]
|
||||||
|
@ -315,6 +316,15 @@ dependencies = [
|
||||||
"log",
|
"log",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ena"
|
||||||
|
version = "0.14.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3"
|
||||||
|
dependencies = [
|
||||||
|
"log",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "encode_unicode"
|
name = "encode_unicode"
|
||||||
version = "0.3.6"
|
version = "0.3.6"
|
||||||
|
@ -381,9 +391,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fst"
|
name = "fst"
|
||||||
version = "0.4.1"
|
version = "0.4.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4eaf9ea41cc964d742f7fc7861db75d2d6e83a3ce0d897d5c6f8b621f015ddc8"
|
checksum = "81f9cac32c1741cdf6b66be7dcf0d9c7f25ccf12f8aa84c16cfa31f9f14513b3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fuchsia-zircon"
|
name = "fuchsia-zircon"
|
||||||
|
@ -447,9 +457,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hermit-abi"
|
name = "hermit-abi"
|
||||||
version = "0.1.11"
|
version = "0.1.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8a0d737e0f947a1864e93d33fdef4af8445a00d1ed8dc0c8ddb73139ea6abf15"
|
checksum = "61565ff7aaace3525556587bd2dc31d4a07071957be715e63ce7b1eccf51a8f4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
@ -814,9 +824,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "paste"
|
name = "paste"
|
||||||
version = "0.1.10"
|
version = "0.1.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ab4fb1930692d1b6a9cfabdde3d06ea0a7d186518e2f4d67660d8970e2fa647a"
|
checksum = "a3c897744f63f34f7ae3a024d9162bb5001f4ad661dd24bea0dc9f075d2de1c6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"paste-impl",
|
"paste-impl",
|
||||||
"proc-macro-hack",
|
"proc-macro-hack",
|
||||||
|
@ -824,9 +834,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "paste-impl"
|
name = "paste-impl"
|
||||||
version = "0.1.10"
|
version = "0.1.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a62486e111e571b1e93b710b61e8f493c0013be39629b714cb166bdb06aa5a8a"
|
checksum = "66fd6f92e3594f2dd7b3fc23e42d82e292f7bcda6d8e5dcd167072327234ab89"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro-hack",
|
"proc-macro-hack",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
|
@ -885,9 +895,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.3"
|
version = "1.0.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2bdc6c187c65bca4260c9011c9e3132efe4909da44726bad24cf7572ae338d7f"
|
checksum = "4c1f4b0efa5fc5e8ceb705136bfee52cfdb6a4e3509f770b478cd6ed434232a7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
@ -1023,7 +1033,7 @@ dependencies = [
|
||||||
"chalk-ir",
|
"chalk-ir",
|
||||||
"chalk-rust-ir",
|
"chalk-rust-ir",
|
||||||
"chalk-solve",
|
"chalk-solve",
|
||||||
"ena",
|
"ena 0.14.0",
|
||||||
"insta",
|
"insta",
|
||||||
"itertools",
|
"itertools",
|
||||||
"log",
|
"log",
|
||||||
|
@ -1374,9 +1384,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-ap-rustc_lexer"
|
name = "rustc-ap-rustc_lexer"
|
||||||
version = "652.0.0"
|
version = "656.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3a6a43c4d0889218c5e2ae68ffea239f303fc05ab1078c73f74e63feb87f7889"
|
checksum = "9cbba98ec46e96a4663197dfa8c0378752de2006e314e5400c0ca74929d6692f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-xid",
|
"unicode-xid",
|
||||||
]
|
]
|
||||||
|
@ -1486,18 +1496,18 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.104"
|
version = "1.0.106"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "414115f25f818d7dfccec8ee535d76949ae78584fc4f79a6f45a904bf8ab4449"
|
checksum = "36df6ac6412072f67cf767ebbde4133a5b2e88e76dc6187fa7104cd16f783399"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.104"
|
version = "1.0.106"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "128f9e303a5a29922045a830221b8f78ec74a5f544944f3d5984f8ec3895ef64"
|
checksum = "9e549e3abf4fb8621bd1609f11dfc9f5e50320802273b12f3811a67e6716ea6c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -1506,9 +1516,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.51"
|
version = "1.0.52"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "da07b57ee2623368351e9a0488bb0b261322a15a6e0ae53e243cbdc0f4208da9"
|
checksum = "a7894c8ed05b7a3a279aeb79025fdec1d3158080b75b98a08faf2806bb799edd"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
"ryu",
|
"ryu",
|
||||||
|
@ -1606,6 +1616,16 @@ dependencies = [
|
||||||
"winapi 0.3.8",
|
"winapi 0.3.8",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "terminal_size"
|
||||||
|
version = "0.1.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8038f95fc7a6f351163f4b964af631bd26c9e828f7db085f2a84aca56f70d13b"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"winapi 0.3.8",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "termios"
|
name = "termios"
|
||||||
version = "0.3.2"
|
version = "0.3.2"
|
||||||
|
|
|
@ -85,6 +85,7 @@ impl<'a> SubstituteTypeParams<'a> {
|
||||||
ast::TypeRef::PathType(path_type) => path_type.path()?,
|
ast::TypeRef::PathType(path_type) => path_type.path()?,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
// FIXME: use `hir::Path::from_src` instead.
|
||||||
let path = hir::Path::from_ast(path)?;
|
let path = hir::Path::from_ast(path)?;
|
||||||
let resolution = self.source_scope.resolve_hir_path(&path)?;
|
let resolution = self.source_scope.resolve_hir_path(&path)?;
|
||||||
match resolution {
|
match resolution {
|
||||||
|
@ -128,6 +129,7 @@ impl<'a> QualifyPaths<'a> {
|
||||||
// don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway
|
// don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
// FIXME: use `hir::Path::from_src` instead.
|
||||||
let hir_path = hir::Path::from_ast(p.clone());
|
let hir_path = hir::Path::from_ast(p.clone());
|
||||||
let resolution = self.source_scope.resolve_hir_path(&hir_path?)?;
|
let resolution = self.source_scope.resolve_hir_path(&hir_path?)?;
|
||||||
match resolution {
|
match resolution {
|
||||||
|
|
|
@ -27,7 +27,7 @@ pub(crate) fn replace_qualified_name_with_use(ctx: AssistCtx) -> Option<Assist>
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let hir_path = hir::Path::from_ast(path.clone())?;
|
let hir_path = ctx.sema.lower_path(&path)?;
|
||||||
let segments = collect_hir_path_segments(&hir_path)?;
|
let segments = collect_hir_path_segments(&hir_path)?;
|
||||||
if segments.len() < 2 {
|
if segments.len() < 2 {
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -70,6 +70,7 @@ pub use hir_def::{
|
||||||
type_ref::Mutability,
|
type_ref::Mutability,
|
||||||
};
|
};
|
||||||
pub use hir_expand::{
|
pub use hir_expand::{
|
||||||
name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, MacroDefId, MacroFile, Origin,
|
hygiene::Hygiene, name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, MacroDefId,
|
||||||
|
MacroFile, Origin,
|
||||||
};
|
};
|
||||||
pub use hir_ty::{display::HirDisplay, CallableDef};
|
pub use hir_ty::{display::HirDisplay, CallableDef};
|
||||||
|
|
|
@ -8,7 +8,7 @@ use hir_def::{
|
||||||
resolver::{self, HasResolver, Resolver},
|
resolver::{self, HasResolver, Resolver},
|
||||||
AsMacroCall, TraitId,
|
AsMacroCall, TraitId,
|
||||||
};
|
};
|
||||||
use hir_expand::ExpansionInfo;
|
use hir_expand::{hygiene::Hygiene, ExpansionInfo};
|
||||||
use hir_ty::associated_type_shorthand_candidates;
|
use hir_ty::associated_type_shorthand_candidates;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use ra_db::{FileId, FileRange};
|
use ra_db::{FileId, FileRange};
|
||||||
|
@ -246,6 +246,11 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
self.analyze(path.syntax()).resolve_path(self.db, path)
|
self.analyze(path.syntax()).resolve_path(self.db, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn lower_path(&self, path: &ast::Path) -> Option<Path> {
|
||||||
|
let src = self.find_file(path.syntax().clone());
|
||||||
|
Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into()))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> {
|
pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> {
|
||||||
self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
|
self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
|
||||||
}
|
}
|
||||||
|
|
|
@ -224,7 +224,8 @@ impl SourceAnalyzer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// This must be a normal source file rather than macro file.
|
// This must be a normal source file rather than macro file.
|
||||||
let hir_path = crate::Path::from_ast(path.clone())?;
|
let hir_path =
|
||||||
|
crate::Path::from_src(path.clone(), &Hygiene::new(db.upcast(), self.file_id))?;
|
||||||
resolve_hir_path(db, &self.resolver, &hir_path)
|
resolve_hir_path(db, &self.resolver, &hir_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,9 +12,15 @@ use ra_prof::profile;
|
||||||
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner, VisibilityOwner};
|
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner, VisibilityOwner};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
body::CfgExpander, db::DefDatabase, src::HasChildSource, src::HasSource, trace::Trace,
|
body::{CfgExpander, LowerCtx},
|
||||||
type_ref::TypeRef, visibility::RawVisibility, EnumId, HasModule, LocalEnumVariantId,
|
db::DefDatabase,
|
||||||
LocalFieldId, Lookup, ModuleId, StructId, UnionId, VariantId,
|
src::HasChildSource,
|
||||||
|
src::HasSource,
|
||||||
|
trace::Trace,
|
||||||
|
type_ref::TypeRef,
|
||||||
|
visibility::RawVisibility,
|
||||||
|
EnumId, HasModule, LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StructId, UnionId,
|
||||||
|
VariantId,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Note that we use `StructData` for unions as well!
|
/// Note that we use `StructData` for unions as well!
|
||||||
|
@ -198,6 +204,8 @@ fn lower_struct(
|
||||||
trace: &mut Trace<FieldData, Either<ast::TupleFieldDef, ast::RecordFieldDef>>,
|
trace: &mut Trace<FieldData, Either<ast::TupleFieldDef, ast::RecordFieldDef>>,
|
||||||
ast: &InFile<ast::StructKind>,
|
ast: &InFile<ast::StructKind>,
|
||||||
) -> StructKind {
|
) -> StructKind {
|
||||||
|
let ctx = LowerCtx::new(db, ast.file_id);
|
||||||
|
|
||||||
match &ast.value {
|
match &ast.value {
|
||||||
ast::StructKind::Tuple(fl) => {
|
ast::StructKind::Tuple(fl) => {
|
||||||
for (i, fd) in fl.fields().enumerate() {
|
for (i, fd) in fl.fields().enumerate() {
|
||||||
|
@ -210,7 +218,7 @@ fn lower_struct(
|
||||||
|| Either::Left(fd.clone()),
|
|| Either::Left(fd.clone()),
|
||||||
|| FieldData {
|
|| FieldData {
|
||||||
name: Name::new_tuple_field(i),
|
name: Name::new_tuple_field(i),
|
||||||
type_ref: TypeRef::from_ast_opt(fd.type_ref()),
|
type_ref: TypeRef::from_ast_opt(&ctx, fd.type_ref()),
|
||||||
visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
|
visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
@ -228,7 +236,7 @@ fn lower_struct(
|
||||||
|| Either::Right(fd.clone()),
|
|| Either::Right(fd.clone()),
|
||||||
|| FieldData {
|
|| FieldData {
|
||||||
name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing),
|
name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing),
|
||||||
type_ref: TypeRef::from_ast_opt(fd.ascribed_type()),
|
type_ref: TypeRef::from_ast_opt(&ctx, fd.ascribed_type()),
|
||||||
visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
|
visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
|
@ -15,6 +15,8 @@ use ra_prof::profile;
|
||||||
use ra_syntax::{ast, AstNode, AstPtr};
|
use ra_syntax::{ast, AstNode, AstPtr};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
|
pub(crate) use lower::LowerCtx;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
attr::Attrs,
|
attr::Attrs,
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
|
|
|
@ -3,8 +3,9 @@
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
|
hygiene::Hygiene,
|
||||||
name::{name, AsName, Name},
|
name::{name, AsName, Name},
|
||||||
MacroDefId, MacroDefKind,
|
HirFileId, MacroDefId, MacroDefKind,
|
||||||
};
|
};
|
||||||
use ra_arena::Arena;
|
use ra_arena::Arena;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
|
@ -26,7 +27,7 @@ use crate::{
|
||||||
LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
|
LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
|
||||||
},
|
},
|
||||||
item_scope::BuiltinShadowMode,
|
item_scope::BuiltinShadowMode,
|
||||||
path::GenericArgs,
|
path::{GenericArgs, Path},
|
||||||
type_ref::{Mutability, TypeRef},
|
type_ref::{Mutability, TypeRef},
|
||||||
AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId,
|
AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId,
|
||||||
StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc,
|
StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc,
|
||||||
|
@ -35,6 +36,23 @@ use crate::{
|
||||||
use super::{ExprSource, PatSource};
|
use super::{ExprSource, PatSource};
|
||||||
use ast::AstChildren;
|
use ast::AstChildren;
|
||||||
|
|
||||||
|
pub(crate) struct LowerCtx {
|
||||||
|
hygiene: Hygiene,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LowerCtx {
|
||||||
|
pub fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self {
|
||||||
|
LowerCtx { hygiene: Hygiene::new(db.upcast(), file_id) }
|
||||||
|
}
|
||||||
|
pub fn with_hygiene(hygiene: &Hygiene) -> Self {
|
||||||
|
LowerCtx { hygiene: hygiene.clone() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lower_path(&self, ast: ast::Path) -> Option<Path> {
|
||||||
|
Path::from_src(ast, &self.hygiene)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(super) fn lower(
|
pub(super) fn lower(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
def: DefWithBodyId,
|
def: DefWithBodyId,
|
||||||
|
@ -42,10 +60,13 @@ pub(super) fn lower(
|
||||||
params: Option<ast::ParamList>,
|
params: Option<ast::ParamList>,
|
||||||
body: Option<ast::Expr>,
|
body: Option<ast::Expr>,
|
||||||
) -> (Body, BodySourceMap) {
|
) -> (Body, BodySourceMap) {
|
||||||
|
let ctx = LowerCtx::new(db, expander.current_file_id.clone());
|
||||||
|
|
||||||
ExprCollector {
|
ExprCollector {
|
||||||
db,
|
db,
|
||||||
def,
|
def,
|
||||||
expander,
|
expander,
|
||||||
|
ctx,
|
||||||
source_map: BodySourceMap::default(),
|
source_map: BodySourceMap::default(),
|
||||||
body: Body {
|
body: Body {
|
||||||
exprs: Arena::default(),
|
exprs: Arena::default(),
|
||||||
|
@ -62,7 +83,7 @@ struct ExprCollector<'a> {
|
||||||
db: &'a dyn DefDatabase,
|
db: &'a dyn DefDatabase,
|
||||||
def: DefWithBodyId,
|
def: DefWithBodyId,
|
||||||
expander: Expander,
|
expander: Expander,
|
||||||
|
ctx: LowerCtx,
|
||||||
body: Body,
|
body: Body,
|
||||||
source_map: BodySourceMap,
|
source_map: BodySourceMap,
|
||||||
}
|
}
|
||||||
|
@ -237,7 +258,8 @@ impl ExprCollector<'_> {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
|
let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
|
||||||
let generic_args = e.type_arg_list().and_then(GenericArgs::from_ast);
|
let generic_args =
|
||||||
|
e.type_arg_list().and_then(|it| GenericArgs::from_ast(&self.ctx, it));
|
||||||
self.alloc_expr(
|
self.alloc_expr(
|
||||||
Expr::MethodCall { receiver, method_name, args, generic_args },
|
Expr::MethodCall { receiver, method_name, args, generic_args },
|
||||||
syntax_ptr,
|
syntax_ptr,
|
||||||
|
@ -343,7 +365,7 @@ impl ExprCollector<'_> {
|
||||||
}
|
}
|
||||||
ast::Expr::CastExpr(e) => {
|
ast::Expr::CastExpr(e) => {
|
||||||
let expr = self.collect_expr_opt(e.expr());
|
let expr = self.collect_expr_opt(e.expr());
|
||||||
let type_ref = TypeRef::from_ast_opt(e.type_ref());
|
let type_ref = TypeRef::from_ast_opt(&self.ctx, e.type_ref());
|
||||||
self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr)
|
self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr)
|
||||||
}
|
}
|
||||||
ast::Expr::RefExpr(e) => {
|
ast::Expr::RefExpr(e) => {
|
||||||
|
@ -365,12 +387,16 @@ impl ExprCollector<'_> {
|
||||||
if let Some(pl) = e.param_list() {
|
if let Some(pl) = e.param_list() {
|
||||||
for param in pl.params() {
|
for param in pl.params() {
|
||||||
let pat = self.collect_pat_opt(param.pat());
|
let pat = self.collect_pat_opt(param.pat());
|
||||||
let type_ref = param.ascribed_type().map(TypeRef::from_ast);
|
let type_ref =
|
||||||
|
param.ascribed_type().map(|it| TypeRef::from_ast(&self.ctx, it));
|
||||||
args.push(pat);
|
args.push(pat);
|
||||||
arg_types.push(type_ref);
|
arg_types.push(type_ref);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let ret_type = e.ret_type().and_then(|r| r.type_ref()).map(TypeRef::from_ast);
|
let ret_type = e
|
||||||
|
.ret_type()
|
||||||
|
.and_then(|r| r.type_ref())
|
||||||
|
.map(|it| TypeRef::from_ast(&self.ctx, it));
|
||||||
let body = self.collect_expr_opt(e.body());
|
let body = self.collect_expr_opt(e.body());
|
||||||
self.alloc_expr(Expr::Lambda { args, arg_types, ret_type, body }, syntax_ptr)
|
self.alloc_expr(Expr::Lambda { args, arg_types, ret_type, body }, syntax_ptr)
|
||||||
}
|
}
|
||||||
|
@ -476,7 +502,7 @@ impl ExprCollector<'_> {
|
||||||
.map(|s| match s {
|
.map(|s| match s {
|
||||||
ast::Stmt::LetStmt(stmt) => {
|
ast::Stmt::LetStmt(stmt) => {
|
||||||
let pat = self.collect_pat_opt(stmt.pat());
|
let pat = self.collect_pat_opt(stmt.pat());
|
||||||
let type_ref = stmt.ascribed_type().map(TypeRef::from_ast);
|
let type_ref = stmt.ascribed_type().map(|it| TypeRef::from_ast(&self.ctx, it));
|
||||||
let initializer = stmt.initializer().map(|e| self.collect_expr(e));
|
let initializer = stmt.initializer().map(|e| self.collect_expr(e));
|
||||||
Statement::Let { pat, type_ref, initializer }
|
Statement::Let { pat, type_ref, initializer }
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ use ra_syntax::ast::{
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
attr::Attrs,
|
attr::Attrs,
|
||||||
|
body::LowerCtx,
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
path::{path, AssociatedTypeBinding, GenericArgs, Path},
|
path::{path, AssociatedTypeBinding, GenericArgs, Path},
|
||||||
src::HasSource,
|
src::HasSource,
|
||||||
|
@ -40,13 +41,14 @@ impl FunctionData {
|
||||||
pub(crate) fn fn_data_query(db: &impl DefDatabase, func: FunctionId) -> Arc<FunctionData> {
|
pub(crate) fn fn_data_query(db: &impl DefDatabase, func: FunctionId) -> Arc<FunctionData> {
|
||||||
let loc = func.lookup(db);
|
let loc = func.lookup(db);
|
||||||
let src = loc.source(db);
|
let src = loc.source(db);
|
||||||
|
let ctx = LowerCtx::new(db, src.file_id);
|
||||||
let name = src.value.name().map(|n| n.as_name()).unwrap_or_else(Name::missing);
|
let name = src.value.name().map(|n| n.as_name()).unwrap_or_else(Name::missing);
|
||||||
let mut params = Vec::new();
|
let mut params = Vec::new();
|
||||||
let mut has_self_param = false;
|
let mut has_self_param = false;
|
||||||
if let Some(param_list) = src.value.param_list() {
|
if let Some(param_list) = src.value.param_list() {
|
||||||
if let Some(self_param) = param_list.self_param() {
|
if let Some(self_param) = param_list.self_param() {
|
||||||
let self_type = if let Some(type_ref) = self_param.ascribed_type() {
|
let self_type = if let Some(type_ref) = self_param.ascribed_type() {
|
||||||
TypeRef::from_ast(type_ref)
|
TypeRef::from_ast(&ctx, type_ref)
|
||||||
} else {
|
} else {
|
||||||
let self_type = TypeRef::Path(name![Self].into());
|
let self_type = TypeRef::Path(name![Self].into());
|
||||||
match self_param.kind() {
|
match self_param.kind() {
|
||||||
|
@ -63,14 +65,14 @@ impl FunctionData {
|
||||||
has_self_param = true;
|
has_self_param = true;
|
||||||
}
|
}
|
||||||
for param in param_list.params() {
|
for param in param_list.params() {
|
||||||
let type_ref = TypeRef::from_ast_opt(param.ascribed_type());
|
let type_ref = TypeRef::from_ast_opt(&ctx, param.ascribed_type());
|
||||||
params.push(type_ref);
|
params.push(type_ref);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let attrs = Attrs::new(&src.value, &Hygiene::new(db.upcast(), src.file_id));
|
let attrs = Attrs::new(&src.value, &Hygiene::new(db.upcast(), src.file_id));
|
||||||
|
|
||||||
let ret_type = if let Some(type_ref) = src.value.ret_type().and_then(|rt| rt.type_ref()) {
|
let ret_type = if let Some(type_ref) = src.value.ret_type().and_then(|rt| rt.type_ref()) {
|
||||||
TypeRef::from_ast(type_ref)
|
TypeRef::from_ast(&ctx, type_ref)
|
||||||
} else {
|
} else {
|
||||||
TypeRef::unit()
|
TypeRef::unit()
|
||||||
};
|
};
|
||||||
|
@ -122,7 +124,8 @@ impl TypeAliasData {
|
||||||
let loc = typ.lookup(db);
|
let loc = typ.lookup(db);
|
||||||
let node = loc.source(db);
|
let node = loc.source(db);
|
||||||
let name = node.value.name().map_or_else(Name::missing, |n| n.as_name());
|
let name = node.value.name().map_or_else(Name::missing, |n| n.as_name());
|
||||||
let type_ref = node.value.type_ref().map(TypeRef::from_ast);
|
let lower_ctx = LowerCtx::new(db, node.file_id);
|
||||||
|
let type_ref = node.value.type_ref().map(|it| TypeRef::from_ast(&lower_ctx, it));
|
||||||
let vis_default = RawVisibility::default_for_container(loc.container);
|
let vis_default = RawVisibility::default_for_container(loc.container);
|
||||||
let visibility = RawVisibility::from_ast_with_default(
|
let visibility = RawVisibility::from_ast_with_default(
|
||||||
db,
|
db,
|
||||||
|
@ -130,7 +133,7 @@ impl TypeAliasData {
|
||||||
node.as_ref().map(|n| n.visibility()),
|
node.as_ref().map(|n| n.visibility()),
|
||||||
);
|
);
|
||||||
let bounds = if let Some(bound_list) = node.value.type_bound_list() {
|
let bounds = if let Some(bound_list) = node.value.type_bound_list() {
|
||||||
bound_list.bounds().map(TypeBound::from_ast).collect()
|
bound_list.bounds().map(|it| TypeBound::from_ast(&lower_ctx, it)).collect()
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
|
@ -223,9 +226,10 @@ impl ImplData {
|
||||||
let _p = profile("impl_data_query");
|
let _p = profile("impl_data_query");
|
||||||
let impl_loc = id.lookup(db);
|
let impl_loc = id.lookup(db);
|
||||||
let src = impl_loc.source(db);
|
let src = impl_loc.source(db);
|
||||||
|
let lower_ctx = LowerCtx::new(db, src.file_id);
|
||||||
|
|
||||||
let target_trait = src.value.target_trait().map(TypeRef::from_ast);
|
let target_trait = src.value.target_trait().map(|it| TypeRef::from_ast(&lower_ctx, it));
|
||||||
let target_type = TypeRef::from_ast_opt(src.value.target_type());
|
let target_type = TypeRef::from_ast_opt(&lower_ctx, src.value.target_type());
|
||||||
let is_negative = src.value.excl_token().is_some();
|
let is_negative = src.value.excl_token().is_some();
|
||||||
let module_id = impl_loc.container.module(db);
|
let module_id = impl_loc.container.module(db);
|
||||||
|
|
||||||
|
@ -279,8 +283,9 @@ impl ConstData {
|
||||||
vis_default: RawVisibility,
|
vis_default: RawVisibility,
|
||||||
node: InFile<N>,
|
node: InFile<N>,
|
||||||
) -> ConstData {
|
) -> ConstData {
|
||||||
|
let ctx = LowerCtx::new(db, node.file_id);
|
||||||
let name = node.value.name().map(|n| n.as_name());
|
let name = node.value.name().map(|n| n.as_name());
|
||||||
let type_ref = TypeRef::from_ast_opt(node.value.ascribed_type());
|
let type_ref = TypeRef::from_ast_opt(&ctx, node.value.ascribed_type());
|
||||||
let visibility =
|
let visibility =
|
||||||
RawVisibility::from_ast_with_default(db, vis_default, node.map(|n| n.visibility()));
|
RawVisibility::from_ast_with_default(db, vis_default, node.map(|n| n.visibility()));
|
||||||
ConstData { name, type_ref, visibility }
|
ConstData { name, type_ref, visibility }
|
||||||
|
|
|
@ -15,6 +15,7 @@ use ra_prof::profile;
|
||||||
use ra_syntax::ast::{self, NameOwner, TypeBoundsOwner, TypeParamsOwner};
|
use ra_syntax::ast::{self, NameOwner, TypeBoundsOwner, TypeParamsOwner};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
body::LowerCtx,
|
||||||
child_by_source::ChildBySource,
|
child_by_source::ChildBySource,
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
dyn_map::DynMap,
|
dyn_map::DynMap,
|
||||||
|
@ -80,11 +81,13 @@ impl GenericParams {
|
||||||
fn new(db: &dyn DefDatabase, def: GenericDefId) -> (GenericParams, InFile<SourceMap>) {
|
fn new(db: &dyn DefDatabase, def: GenericDefId) -> (GenericParams, InFile<SourceMap>) {
|
||||||
let mut generics = GenericParams { types: Arena::default(), where_predicates: Vec::new() };
|
let mut generics = GenericParams { types: Arena::default(), where_predicates: Vec::new() };
|
||||||
let mut sm = ArenaMap::default();
|
let mut sm = ArenaMap::default();
|
||||||
|
|
||||||
// FIXME: add `: Sized` bound for everything except for `Self` in traits
|
// FIXME: add `: Sized` bound for everything except for `Self` in traits
|
||||||
let file_id = match def {
|
let file_id = match def {
|
||||||
GenericDefId::FunctionId(it) => {
|
GenericDefId::FunctionId(it) => {
|
||||||
let src = it.lookup(db).source(db);
|
let src = it.lookup(db).source(db);
|
||||||
generics.fill(&mut sm, &src.value);
|
let lower_ctx = LowerCtx::new(db, src.file_id);
|
||||||
|
generics.fill(&lower_ctx, &mut sm, &src.value);
|
||||||
// lower `impl Trait` in arguments
|
// lower `impl Trait` in arguments
|
||||||
let data = db.function_data(it);
|
let data = db.function_data(it);
|
||||||
for param in &data.params {
|
for param in &data.params {
|
||||||
|
@ -94,21 +97,25 @@ impl GenericParams {
|
||||||
}
|
}
|
||||||
GenericDefId::AdtId(AdtId::StructId(it)) => {
|
GenericDefId::AdtId(AdtId::StructId(it)) => {
|
||||||
let src = it.lookup(db).source(db);
|
let src = it.lookup(db).source(db);
|
||||||
generics.fill(&mut sm, &src.value);
|
let lower_ctx = LowerCtx::new(db, src.file_id);
|
||||||
|
generics.fill(&lower_ctx, &mut sm, &src.value);
|
||||||
src.file_id
|
src.file_id
|
||||||
}
|
}
|
||||||
GenericDefId::AdtId(AdtId::UnionId(it)) => {
|
GenericDefId::AdtId(AdtId::UnionId(it)) => {
|
||||||
let src = it.lookup(db).source(db);
|
let src = it.lookup(db).source(db);
|
||||||
generics.fill(&mut sm, &src.value);
|
let lower_ctx = LowerCtx::new(db, src.file_id);
|
||||||
|
generics.fill(&lower_ctx, &mut sm, &src.value);
|
||||||
src.file_id
|
src.file_id
|
||||||
}
|
}
|
||||||
GenericDefId::AdtId(AdtId::EnumId(it)) => {
|
GenericDefId::AdtId(AdtId::EnumId(it)) => {
|
||||||
let src = it.lookup(db).source(db);
|
let src = it.lookup(db).source(db);
|
||||||
generics.fill(&mut sm, &src.value);
|
let lower_ctx = LowerCtx::new(db, src.file_id);
|
||||||
|
generics.fill(&lower_ctx, &mut sm, &src.value);
|
||||||
src.file_id
|
src.file_id
|
||||||
}
|
}
|
||||||
GenericDefId::TraitId(it) => {
|
GenericDefId::TraitId(it) => {
|
||||||
let src = it.lookup(db).source(db);
|
let src = it.lookup(db).source(db);
|
||||||
|
let lower_ctx = LowerCtx::new(db, src.file_id);
|
||||||
|
|
||||||
// traits get the Self type as an implicit first type parameter
|
// traits get the Self type as an implicit first type parameter
|
||||||
let self_param_id = generics.types.alloc(TypeParamData {
|
let self_param_id = generics.types.alloc(TypeParamData {
|
||||||
|
@ -120,14 +127,16 @@ impl GenericParams {
|
||||||
// add super traits as bounds on Self
|
// add super traits as bounds on Self
|
||||||
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
|
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
|
||||||
let self_param = TypeRef::Path(name![Self].into());
|
let self_param = TypeRef::Path(name![Self].into());
|
||||||
generics.fill_bounds(&src.value, self_param);
|
generics.fill_bounds(&lower_ctx, &src.value, self_param);
|
||||||
|
|
||||||
generics.fill(&mut sm, &src.value);
|
generics.fill(&lower_ctx, &mut sm, &src.value);
|
||||||
src.file_id
|
src.file_id
|
||||||
}
|
}
|
||||||
GenericDefId::TypeAliasId(it) => {
|
GenericDefId::TypeAliasId(it) => {
|
||||||
let src = it.lookup(db).source(db);
|
let src = it.lookup(db).source(db);
|
||||||
generics.fill(&mut sm, &src.value);
|
let lower_ctx = LowerCtx::new(db, src.file_id);
|
||||||
|
|
||||||
|
generics.fill(&lower_ctx, &mut sm, &src.value);
|
||||||
src.file_id
|
src.file_id
|
||||||
}
|
}
|
||||||
// Note that we don't add `Self` here: in `impl`s, `Self` is not a
|
// Note that we don't add `Self` here: in `impl`s, `Self` is not a
|
||||||
|
@ -135,7 +144,9 @@ impl GenericParams {
|
||||||
// type, so this is handled by the resolver.
|
// type, so this is handled by the resolver.
|
||||||
GenericDefId::ImplId(it) => {
|
GenericDefId::ImplId(it) => {
|
||||||
let src = it.lookup(db).source(db);
|
let src = it.lookup(db).source(db);
|
||||||
generics.fill(&mut sm, &src.value);
|
let lower_ctx = LowerCtx::new(db, src.file_id);
|
||||||
|
|
||||||
|
generics.fill(&lower_ctx, &mut sm, &src.value);
|
||||||
src.file_id
|
src.file_id
|
||||||
}
|
}
|
||||||
// We won't be using this ID anyway
|
// We won't be using this ID anyway
|
||||||
|
@ -145,28 +156,38 @@ impl GenericParams {
|
||||||
(generics, InFile::new(file_id, sm))
|
(generics, InFile::new(file_id, sm))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fill(&mut self, sm: &mut SourceMap, node: &dyn TypeParamsOwner) {
|
fn fill(&mut self, lower_ctx: &LowerCtx, sm: &mut SourceMap, node: &dyn TypeParamsOwner) {
|
||||||
if let Some(params) = node.type_param_list() {
|
if let Some(params) = node.type_param_list() {
|
||||||
self.fill_params(sm, params)
|
self.fill_params(lower_ctx, sm, params)
|
||||||
}
|
}
|
||||||
if let Some(where_clause) = node.where_clause() {
|
if let Some(where_clause) = node.where_clause() {
|
||||||
self.fill_where_predicates(where_clause);
|
self.fill_where_predicates(lower_ctx, where_clause);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fill_bounds(&mut self, node: &dyn ast::TypeBoundsOwner, type_ref: TypeRef) {
|
fn fill_bounds(
|
||||||
|
&mut self,
|
||||||
|
lower_ctx: &LowerCtx,
|
||||||
|
node: &dyn ast::TypeBoundsOwner,
|
||||||
|
type_ref: TypeRef,
|
||||||
|
) {
|
||||||
for bound in
|
for bound in
|
||||||
node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
|
node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
|
||||||
{
|
{
|
||||||
self.add_where_predicate_from_bound(bound, type_ref.clone());
|
self.add_where_predicate_from_bound(lower_ctx, bound, type_ref.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fill_params(&mut self, sm: &mut SourceMap, params: ast::TypeParamList) {
|
fn fill_params(
|
||||||
|
&mut self,
|
||||||
|
lower_ctx: &LowerCtx,
|
||||||
|
sm: &mut SourceMap,
|
||||||
|
params: ast::TypeParamList,
|
||||||
|
) {
|
||||||
for type_param in params.type_params() {
|
for type_param in params.type_params() {
|
||||||
let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
|
let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
|
||||||
// FIXME: Use `Path::from_src`
|
// FIXME: Use `Path::from_src`
|
||||||
let default = type_param.default_type().map(TypeRef::from_ast);
|
let default = type_param.default_type().map(|it| TypeRef::from_ast(lower_ctx, it));
|
||||||
let param = TypeParamData {
|
let param = TypeParamData {
|
||||||
name: Some(name.clone()),
|
name: Some(name.clone()),
|
||||||
default,
|
default,
|
||||||
|
@ -176,29 +197,34 @@ impl GenericParams {
|
||||||
sm.insert(param_id, Either::Right(type_param.clone()));
|
sm.insert(param_id, Either::Right(type_param.clone()));
|
||||||
|
|
||||||
let type_ref = TypeRef::Path(name.into());
|
let type_ref = TypeRef::Path(name.into());
|
||||||
self.fill_bounds(&type_param, type_ref);
|
self.fill_bounds(&lower_ctx, &type_param, type_ref);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) {
|
fn fill_where_predicates(&mut self, lower_ctx: &LowerCtx, where_clause: ast::WhereClause) {
|
||||||
for pred in where_clause.predicates() {
|
for pred in where_clause.predicates() {
|
||||||
let type_ref = match pred.type_ref() {
|
let type_ref = match pred.type_ref() {
|
||||||
Some(type_ref) => type_ref,
|
Some(type_ref) => type_ref,
|
||||||
None => continue,
|
None => continue,
|
||||||
};
|
};
|
||||||
let type_ref = TypeRef::from_ast(type_ref);
|
let type_ref = TypeRef::from_ast(lower_ctx, type_ref);
|
||||||
for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
|
for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
|
||||||
self.add_where_predicate_from_bound(bound, type_ref.clone());
|
self.add_where_predicate_from_bound(lower_ctx, bound, type_ref.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) {
|
fn add_where_predicate_from_bound(
|
||||||
|
&mut self,
|
||||||
|
lower_ctx: &LowerCtx,
|
||||||
|
bound: ast::TypeBound,
|
||||||
|
type_ref: TypeRef,
|
||||||
|
) {
|
||||||
if bound.question_token().is_some() {
|
if bound.question_token().is_some() {
|
||||||
// FIXME: remove this bound
|
// FIXME: remove this bound
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let bound = TypeBound::from_ast(bound);
|
let bound = TypeBound::from_ast(lower_ctx, bound);
|
||||||
self.where_predicates
|
self.where_predicates
|
||||||
.push(WherePredicate { target: WherePredicateTarget::TypeRef(type_ref), bound });
|
.push(WherePredicate { target: WherePredicateTarget::TypeRef(type_ref), bound });
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,7 @@ use std::{
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::body::LowerCtx;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
hygiene::Hygiene,
|
hygiene::Hygiene,
|
||||||
name::{AsName, Name},
|
name::{AsName, Name},
|
||||||
|
@ -244,8 +245,8 @@ impl<'a> PathSegments<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GenericArgs {
|
impl GenericArgs {
|
||||||
pub(crate) fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> {
|
pub(crate) fn from_ast(lower_ctx: &LowerCtx, node: ast::TypeArgList) -> Option<GenericArgs> {
|
||||||
lower::lower_generic_args(node)
|
lower::lower_generic_args(lower_ctx, node)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn empty() -> GenericArgs {
|
pub(crate) fn empty() -> GenericArgs {
|
||||||
|
|
|
@ -13,6 +13,7 @@ use ra_syntax::ast::{self, AstNode, TypeAscriptionOwner, TypeBoundsOwner};
|
||||||
|
|
||||||
use super::AssociatedTypeBinding;
|
use super::AssociatedTypeBinding;
|
||||||
use crate::{
|
use crate::{
|
||||||
|
body::LowerCtx,
|
||||||
path::{GenericArg, GenericArgs, ModPath, Path, PathKind},
|
path::{GenericArg, GenericArgs, ModPath, Path, PathKind},
|
||||||
type_ref::{TypeBound, TypeRef},
|
type_ref::{TypeBound, TypeRef},
|
||||||
};
|
};
|
||||||
|
@ -26,6 +27,7 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
|
||||||
let mut type_anchor = None;
|
let mut type_anchor = None;
|
||||||
let mut segments = Vec::new();
|
let mut segments = Vec::new();
|
||||||
let mut generic_args = Vec::new();
|
let mut generic_args = Vec::new();
|
||||||
|
let ctx = LowerCtx::with_hygiene(hygiene);
|
||||||
loop {
|
loop {
|
||||||
let segment = path.segment()?;
|
let segment = path.segment()?;
|
||||||
|
|
||||||
|
@ -40,9 +42,10 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
|
||||||
Either::Left(name) => {
|
Either::Left(name) => {
|
||||||
let args = segment
|
let args = segment
|
||||||
.type_arg_list()
|
.type_arg_list()
|
||||||
.and_then(lower_generic_args)
|
.and_then(|it| lower_generic_args(&ctx, it))
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
lower_generic_args_from_fn_path(
|
lower_generic_args_from_fn_path(
|
||||||
|
&ctx,
|
||||||
segment.param_list(),
|
segment.param_list(),
|
||||||
segment.ret_type(),
|
segment.ret_type(),
|
||||||
)
|
)
|
||||||
|
@ -60,7 +63,7 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
|
||||||
ast::PathSegmentKind::Type { type_ref, trait_ref } => {
|
ast::PathSegmentKind::Type { type_ref, trait_ref } => {
|
||||||
assert!(path.qualifier().is_none()); // this can only occur at the first segment
|
assert!(path.qualifier().is_none()); // this can only occur at the first segment
|
||||||
|
|
||||||
let self_type = TypeRef::from_ast(type_ref?);
|
let self_type = TypeRef::from_ast(&ctx, type_ref?);
|
||||||
|
|
||||||
match trait_ref {
|
match trait_ref {
|
||||||
// <T>::foo
|
// <T>::foo
|
||||||
|
@ -128,10 +131,13 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn lower_generic_args(node: ast::TypeArgList) -> Option<GenericArgs> {
|
pub(super) fn lower_generic_args(
|
||||||
|
lower_ctx: &LowerCtx,
|
||||||
|
node: ast::TypeArgList,
|
||||||
|
) -> Option<GenericArgs> {
|
||||||
let mut args = Vec::new();
|
let mut args = Vec::new();
|
||||||
for type_arg in node.type_args() {
|
for type_arg in node.type_args() {
|
||||||
let type_ref = TypeRef::from_ast_opt(type_arg.type_ref());
|
let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.type_ref());
|
||||||
args.push(GenericArg::Type(type_ref));
|
args.push(GenericArg::Type(type_ref));
|
||||||
}
|
}
|
||||||
// lifetimes ignored for now
|
// lifetimes ignored for now
|
||||||
|
@ -140,9 +146,9 @@ pub(super) fn lower_generic_args(node: ast::TypeArgList) -> Option<GenericArgs>
|
||||||
let assoc_type_arg: ast::AssocTypeArg = assoc_type_arg;
|
let assoc_type_arg: ast::AssocTypeArg = assoc_type_arg;
|
||||||
if let Some(name_ref) = assoc_type_arg.name_ref() {
|
if let Some(name_ref) = assoc_type_arg.name_ref() {
|
||||||
let name = name_ref.as_name();
|
let name = name_ref.as_name();
|
||||||
let type_ref = assoc_type_arg.type_ref().map(TypeRef::from_ast);
|
let type_ref = assoc_type_arg.type_ref().map(|it| TypeRef::from_ast(lower_ctx, it));
|
||||||
let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
|
let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
|
||||||
l.bounds().map(TypeBound::from_ast).collect()
|
l.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect()
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
|
@ -159,6 +165,7 @@ pub(super) fn lower_generic_args(node: ast::TypeArgList) -> Option<GenericArgs>
|
||||||
/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
|
/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
|
||||||
/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`).
|
/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`).
|
||||||
fn lower_generic_args_from_fn_path(
|
fn lower_generic_args_from_fn_path(
|
||||||
|
ctx: &LowerCtx,
|
||||||
params: Option<ast::ParamList>,
|
params: Option<ast::ParamList>,
|
||||||
ret_type: Option<ast::RetType>,
|
ret_type: Option<ast::RetType>,
|
||||||
) -> Option<GenericArgs> {
|
) -> Option<GenericArgs> {
|
||||||
|
@ -167,14 +174,14 @@ fn lower_generic_args_from_fn_path(
|
||||||
if let Some(params) = params {
|
if let Some(params) = params {
|
||||||
let mut param_types = Vec::new();
|
let mut param_types = Vec::new();
|
||||||
for param in params.params() {
|
for param in params.params() {
|
||||||
let type_ref = TypeRef::from_ast_opt(param.ascribed_type());
|
let type_ref = TypeRef::from_ast_opt(&ctx, param.ascribed_type());
|
||||||
param_types.push(type_ref);
|
param_types.push(type_ref);
|
||||||
}
|
}
|
||||||
let arg = GenericArg::Type(TypeRef::Tuple(param_types));
|
let arg = GenericArg::Type(TypeRef::Tuple(param_types));
|
||||||
args.push(arg);
|
args.push(arg);
|
||||||
}
|
}
|
||||||
if let Some(ret_type) = ret_type {
|
if let Some(ret_type) = ret_type {
|
||||||
let type_ref = TypeRef::from_ast_opt(ret_type.type_ref());
|
let type_ref = TypeRef::from_ast_opt(&ctx, ret_type.type_ref());
|
||||||
bindings.push(AssociatedTypeBinding {
|
bindings.push(AssociatedTypeBinding {
|
||||||
name: name![Output],
|
name: name![Output],
|
||||||
type_ref: Some(type_ref),
|
type_ref: Some(type_ref),
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
use ra_syntax::ast::{self, TypeAscriptionOwner, TypeBoundsOwner};
|
use ra_syntax::ast::{self, TypeAscriptionOwner, TypeBoundsOwner};
|
||||||
|
|
||||||
use crate::path::Path;
|
use crate::{body::LowerCtx, path::Path};
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
pub enum Mutability {
|
pub enum Mutability {
|
||||||
|
@ -64,30 +64,34 @@ pub enum TypeBound {
|
||||||
|
|
||||||
impl TypeRef {
|
impl TypeRef {
|
||||||
/// Converts an `ast::TypeRef` to a `hir::TypeRef`.
|
/// Converts an `ast::TypeRef` to a `hir::TypeRef`.
|
||||||
pub(crate) fn from_ast(node: ast::TypeRef) -> Self {
|
pub(crate) fn from_ast(ctx: &LowerCtx, node: ast::TypeRef) -> Self {
|
||||||
match node {
|
match node {
|
||||||
ast::TypeRef::ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
|
ast::TypeRef::ParenType(inner) => TypeRef::from_ast_opt(&ctx, inner.type_ref()),
|
||||||
ast::TypeRef::TupleType(inner) => {
|
ast::TypeRef::TupleType(inner) => {
|
||||||
TypeRef::Tuple(inner.fields().map(TypeRef::from_ast).collect())
|
TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect())
|
||||||
}
|
}
|
||||||
ast::TypeRef::NeverType(..) => TypeRef::Never,
|
ast::TypeRef::NeverType(..) => TypeRef::Never,
|
||||||
ast::TypeRef::PathType(inner) => {
|
ast::TypeRef::PathType(inner) => {
|
||||||
// FIXME: Use `Path::from_src`
|
// FIXME: Use `Path::from_src`
|
||||||
inner.path().and_then(Path::from_ast).map(TypeRef::Path).unwrap_or(TypeRef::Error)
|
inner
|
||||||
|
.path()
|
||||||
|
.and_then(|it| ctx.lower_path(it))
|
||||||
|
.map(TypeRef::Path)
|
||||||
|
.unwrap_or(TypeRef::Error)
|
||||||
}
|
}
|
||||||
ast::TypeRef::PointerType(inner) => {
|
ast::TypeRef::PointerType(inner) => {
|
||||||
let inner_ty = TypeRef::from_ast_opt(inner.type_ref());
|
let inner_ty = TypeRef::from_ast_opt(&ctx, inner.type_ref());
|
||||||
let mutability = Mutability::from_mutable(inner.mut_token().is_some());
|
let mutability = Mutability::from_mutable(inner.mut_token().is_some());
|
||||||
TypeRef::RawPtr(Box::new(inner_ty), mutability)
|
TypeRef::RawPtr(Box::new(inner_ty), mutability)
|
||||||
}
|
}
|
||||||
ast::TypeRef::ArrayType(inner) => {
|
ast::TypeRef::ArrayType(inner) => {
|
||||||
TypeRef::Array(Box::new(TypeRef::from_ast_opt(inner.type_ref())))
|
TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.type_ref())))
|
||||||
}
|
}
|
||||||
ast::TypeRef::SliceType(inner) => {
|
ast::TypeRef::SliceType(inner) => {
|
||||||
TypeRef::Slice(Box::new(TypeRef::from_ast_opt(inner.type_ref())))
|
TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.type_ref())))
|
||||||
}
|
}
|
||||||
ast::TypeRef::ReferenceType(inner) => {
|
ast::TypeRef::ReferenceType(inner) => {
|
||||||
let inner_ty = TypeRef::from_ast_opt(inner.type_ref());
|
let inner_ty = TypeRef::from_ast_opt(&ctx, inner.type_ref());
|
||||||
let mutability = Mutability::from_mutable(inner.mut_token().is_some());
|
let mutability = Mutability::from_mutable(inner.mut_token().is_some());
|
||||||
TypeRef::Reference(Box::new(inner_ty), mutability)
|
TypeRef::Reference(Box::new(inner_ty), mutability)
|
||||||
}
|
}
|
||||||
|
@ -96,10 +100,13 @@ impl TypeRef {
|
||||||
let ret_ty = inner
|
let ret_ty = inner
|
||||||
.ret_type()
|
.ret_type()
|
||||||
.and_then(|rt| rt.type_ref())
|
.and_then(|rt| rt.type_ref())
|
||||||
.map(TypeRef::from_ast)
|
.map(|it| TypeRef::from_ast(ctx, it))
|
||||||
.unwrap_or_else(|| TypeRef::Tuple(Vec::new()));
|
.unwrap_or_else(|| TypeRef::Tuple(Vec::new()));
|
||||||
let mut params = if let Some(pl) = inner.param_list() {
|
let mut params = if let Some(pl) = inner.param_list() {
|
||||||
pl.params().map(|p| p.ascribed_type()).map(TypeRef::from_ast_opt).collect()
|
pl.params()
|
||||||
|
.map(|p| p.ascribed_type())
|
||||||
|
.map(|it| TypeRef::from_ast_opt(&ctx, it))
|
||||||
|
.collect()
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
|
@ -107,19 +114,19 @@ impl TypeRef {
|
||||||
TypeRef::Fn(params)
|
TypeRef::Fn(params)
|
||||||
}
|
}
|
||||||
// for types are close enough for our purposes to the inner type for now...
|
// for types are close enough for our purposes to the inner type for now...
|
||||||
ast::TypeRef::ForType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
|
ast::TypeRef::ForType(inner) => TypeRef::from_ast_opt(&ctx, inner.type_ref()),
|
||||||
ast::TypeRef::ImplTraitType(inner) => {
|
ast::TypeRef::ImplTraitType(inner) => {
|
||||||
TypeRef::ImplTrait(type_bounds_from_ast(inner.type_bound_list()))
|
TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
|
||||||
}
|
}
|
||||||
ast::TypeRef::DynTraitType(inner) => {
|
ast::TypeRef::DynTraitType(inner) => {
|
||||||
TypeRef::DynTrait(type_bounds_from_ast(inner.type_bound_list()))
|
TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self {
|
pub(crate) fn from_ast_opt(ctx: &LowerCtx, node: Option<ast::TypeRef>) -> Self {
|
||||||
if let Some(node) = node {
|
if let Some(node) = node {
|
||||||
TypeRef::from_ast(node)
|
TypeRef::from_ast(ctx, node)
|
||||||
} else {
|
} else {
|
||||||
TypeRef::Error
|
TypeRef::Error
|
||||||
}
|
}
|
||||||
|
@ -180,24 +187,27 @@ impl TypeRef {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn type_bounds_from_ast(type_bounds_opt: Option<ast::TypeBoundList>) -> Vec<TypeBound> {
|
pub(crate) fn type_bounds_from_ast(
|
||||||
|
lower_ctx: &LowerCtx,
|
||||||
|
type_bounds_opt: Option<ast::TypeBoundList>,
|
||||||
|
) -> Vec<TypeBound> {
|
||||||
if let Some(type_bounds) = type_bounds_opt {
|
if let Some(type_bounds) = type_bounds_opt {
|
||||||
type_bounds.bounds().map(TypeBound::from_ast).collect()
|
type_bounds.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect()
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TypeBound {
|
impl TypeBound {
|
||||||
pub(crate) fn from_ast(node: ast::TypeBound) -> Self {
|
pub(crate) fn from_ast(ctx: &LowerCtx, node: ast::TypeBound) -> Self {
|
||||||
match node.kind() {
|
match node.kind() {
|
||||||
ast::TypeBoundKind::PathType(path_type) => {
|
ast::TypeBoundKind::PathType(path_type) => {
|
||||||
let path = match path_type.path() {
|
let path = match path_type.path() {
|
||||||
Some(p) => p,
|
Some(p) => p,
|
||||||
None => return TypeBound::Error,
|
None => return TypeBound::Error,
|
||||||
};
|
};
|
||||||
// FIXME: Use `Path::from_src`
|
|
||||||
let path = match Path::from_ast(path) {
|
let path = match ctx.lower_path(path) {
|
||||||
Some(p) => p,
|
Some(p) => p,
|
||||||
None => return TypeBound::Error,
|
None => return TypeBound::Error,
|
||||||
};
|
};
|
||||||
|
|
|
@ -12,7 +12,7 @@ use crate::{
|
||||||
HirFileId, HirFileIdRepr, MacroCallId, MacroDefKind,
|
HirFileId, HirFileIdRepr, MacroCallId, MacroDefKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Hygiene {
|
pub struct Hygiene {
|
||||||
// This is what `$crate` expands to
|
// This is what `$crate` expands to
|
||||||
def_crate: Option<CrateId>,
|
def_crate: Option<CrateId>,
|
||||||
|
|
|
@ -11,7 +11,7 @@ doctest = false
|
||||||
itertools = "0.9.0"
|
itertools = "0.9.0"
|
||||||
arrayvec = "0.5.1"
|
arrayvec = "0.5.1"
|
||||||
smallvec = "1.2.0"
|
smallvec = "1.2.0"
|
||||||
ena = "0.13.1"
|
ena = "0.14.0"
|
||||||
log = "0.4.8"
|
log = "0.4.8"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
|
|
||||||
|
|
|
@ -338,6 +338,46 @@ pub fn baz() -> usize { 31usize }
|
||||||
assert_eq!("(i32, usize)", type_at_pos(&db, pos));
|
assert_eq!("(i32, usize)", type_at_pos(&db, pos));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn infer_macro_with_dollar_crate_is_correct_in_trait_associate_type() {
|
||||||
|
let (db, pos) = TestDB::with_position(
|
||||||
|
r#"
|
||||||
|
//- /main.rs crate:main deps:foo
|
||||||
|
use foo::Trait;
|
||||||
|
|
||||||
|
fn test() {
|
||||||
|
let msg = foo::Message(foo::MessageRef);
|
||||||
|
let r = msg.deref();
|
||||||
|
r<|>;
|
||||||
|
}
|
||||||
|
|
||||||
|
//- /lib.rs crate:foo
|
||||||
|
pub struct MessageRef;
|
||||||
|
pub struct Message(MessageRef);
|
||||||
|
|
||||||
|
pub trait Trait {
|
||||||
|
type Target;
|
||||||
|
fn deref(&self) -> &Self::Target;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! expand {
|
||||||
|
() => {
|
||||||
|
impl Trait for Message {
|
||||||
|
type Target = $crate::MessageRef;
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expand!();
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
assert_eq!("&MessageRef", type_at_pos(&db, pos));
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn infer_type_value_non_legacy_macro_use_as() {
|
fn infer_type_value_non_legacy_macro_use_as() {
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
|
|
|
@ -21,4 +21,4 @@ test_utils = { path = "../test_utils" }
|
||||||
cargo_metadata = "0.9.1"
|
cargo_metadata = "0.9.1"
|
||||||
difference = "2.0.0"
|
difference = "2.0.0"
|
||||||
# used as proc macro test target
|
# used as proc macro test target
|
||||||
serde_derive = "=1.0.104"
|
serde_derive = "=1.0.106"
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
SUBTREE $
|
SUBTREE $
|
||||||
PUNCH # [alone] 4294967295
|
PUNCH # [alone] 4294967295
|
||||||
|
SUBTREE [] 4294967295
|
||||||
|
IDENT doc 4294967295
|
||||||
|
SUBTREE () 4294967295
|
||||||
|
IDENT hidden 4294967295
|
||||||
|
PUNCH # [alone] 4294967295
|
||||||
SUBTREE [] 4294967295
|
SUBTREE [] 4294967295
|
||||||
IDENT allow 4294967295
|
IDENT allow 4294967295
|
||||||
SUBTREE () 4294967295
|
SUBTREE () 4294967295
|
||||||
|
|
|
@ -10,7 +10,7 @@ fn test_derive_serialize_proc_macro() {
|
||||||
assert_expand(
|
assert_expand(
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"Serialize",
|
"Serialize",
|
||||||
"1.0.104",
|
"1.0.106",
|
||||||
r##"struct Foo {}"##,
|
r##"struct Foo {}"##,
|
||||||
include_str!("fixtures/test_serialize_proc_macro.txt"),
|
include_str!("fixtures/test_serialize_proc_macro.txt"),
|
||||||
);
|
);
|
||||||
|
@ -21,7 +21,7 @@ fn test_derive_serialize_proc_macro_failed() {
|
||||||
assert_expand(
|
assert_expand(
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"Serialize",
|
"Serialize",
|
||||||
"1.0.104",
|
"1.0.106",
|
||||||
r##"
|
r##"
|
||||||
struct {}
|
struct {}
|
||||||
"##,
|
"##,
|
||||||
|
@ -37,7 +37,7 @@ SUBTREE $
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_derive_proc_macro_list() {
|
fn test_derive_proc_macro_list() {
|
||||||
let res = list("serde_derive", "1.0.104").join("\n");
|
let res = list("serde_derive", "1.0.106").join("\n");
|
||||||
|
|
||||||
assert_eq_text!(
|
assert_eq_text!(
|
||||||
&res,
|
&res,
|
||||||
|
|
|
@ -18,7 +18,7 @@ ra_db = { path = "../ra_db" }
|
||||||
ra_cfg = { path = "../ra_cfg" }
|
ra_cfg = { path = "../ra_cfg" }
|
||||||
ra_proc_macro = { path = "../ra_proc_macro" }
|
ra_proc_macro = { path = "../ra_proc_macro" }
|
||||||
|
|
||||||
serde = { version = "1.0.104", features = ["derive"] }
|
serde = { version = "1.0.106", features = ["derive"] }
|
||||||
serde_json = "1.0.48"
|
serde_json = "1.0.48"
|
||||||
|
|
||||||
anyhow = "1.0.26"
|
anyhow = "1.0.26"
|
||||||
|
|
|
@ -13,7 +13,7 @@ doctest = false
|
||||||
[dependencies]
|
[dependencies]
|
||||||
itertools = "0.9.0"
|
itertools = "0.9.0"
|
||||||
rowan = "0.10.0"
|
rowan = "0.10.0"
|
||||||
rustc_lexer = { version = "652.0.0", package = "rustc-ap-rustc_lexer" }
|
rustc_lexer = { version = "656.0.0", package = "rustc-ap-rustc_lexer" }
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
arrayvec = "0.5.1"
|
arrayvec = "0.5.1"
|
||||||
once_cell = "1.3.1"
|
once_cell = "1.3.1"
|
||||||
|
@ -27,7 +27,7 @@ ra_parser = { path = "../ra_parser" }
|
||||||
# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
|
# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
|
||||||
# to reduce number of compilations
|
# to reduce number of compilations
|
||||||
smol_str = { version = "0.1.15", features = ["serde"] }
|
smol_str = { version = "0.1.15", features = ["serde"] }
|
||||||
serde = { version = "1.0.104", features = ["derive"] }
|
serde = { version = "1.0.106", features = ["derive"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
test_utils = { path = "../test_utils" }
|
test_utils = { path = "../test_utils" }
|
||||||
|
|
|
@ -180,7 +180,7 @@ fn rustc_token_kind_to_syntax_kind(
|
||||||
return (syntax_kind, None);
|
return (syntax_kind, None);
|
||||||
|
|
||||||
fn match_literal_kind(kind: &rustc_lexer::LiteralKind) -> (SyntaxKind, Option<&'static str>) {
|
fn match_literal_kind(kind: &rustc_lexer::LiteralKind) -> (SyntaxKind, Option<&'static str>) {
|
||||||
use rustc_lexer::LiteralKind as LK;
|
use rustc_lexer::{LexRawStrError, LiteralKind as LK};
|
||||||
|
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
let syntax_kind = match *kind {
|
let syntax_kind = match *kind {
|
||||||
|
@ -215,21 +215,28 @@ fn rustc_token_kind_to_syntax_kind(
|
||||||
return (BYTE_STRING, Some("Missing trailing `\"` symbol to terminate the byte string literal"))
|
return (BYTE_STRING, Some("Missing trailing `\"` symbol to terminate the byte string literal"))
|
||||||
}
|
}
|
||||||
|
|
||||||
LK::RawStr { started: true, terminated: true, .. } => RAW_STRING,
|
LK::RawStr(str) => match str.validate() {
|
||||||
LK::RawStr { started: true, terminated: false, .. } => {
|
Ok(_) => RAW_STRING,
|
||||||
|
Err(LexRawStrError::InvalidStarter) => return (RAW_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw string literal")),
|
||||||
|
Err(LexRawStrError::NoTerminator { expected, found, .. }) => if expected == found {
|
||||||
|
return (RAW_STRING, Some("Missing trailing `\"` to terminate the raw string literal"))
|
||||||
|
} else {
|
||||||
return (RAW_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw string literal"))
|
return (RAW_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw string literal"))
|
||||||
}
|
|
||||||
LK::RawStr { started: false, .. } => {
|
|
||||||
return (RAW_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw string literal"))
|
|
||||||
}
|
|
||||||
|
|
||||||
LK::RawByteStr { started: true, terminated: true, .. } => RAW_BYTE_STRING,
|
},
|
||||||
LK::RawByteStr { started: true, terminated: false, .. } => {
|
Err(LexRawStrError::TooManyDelimiters { .. }) => return (RAW_STRING, Some("Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols")),
|
||||||
|
},
|
||||||
|
LK::RawByteStr(str) => match str.validate() {
|
||||||
|
Ok(_) => RAW_BYTE_STRING,
|
||||||
|
Err(LexRawStrError::InvalidStarter) => return (RAW_BYTE_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw byte string literal")),
|
||||||
|
Err(LexRawStrError::NoTerminator { expected, found, .. }) => if expected == found {
|
||||||
|
return (RAW_BYTE_STRING, Some("Missing trailing `\"` to terminate the raw byte string literal"))
|
||||||
|
} else {
|
||||||
return (RAW_BYTE_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw byte string literal"))
|
return (RAW_BYTE_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw byte string literal"))
|
||||||
}
|
|
||||||
LK::RawByteStr { started: false, .. } => {
|
},
|
||||||
return (RAW_BYTE_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw byte string literal"))
|
Err(LexRawStrError::TooManyDelimiters { .. }) => return (RAW_BYTE_STRING, Some("Too many `#` symbols: raw byte strings may be delimited by up to 65535 `#` symbols")),
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
(syntax_kind, None)
|
(syntax_kind, None)
|
||||||
|
|
|
@ -26,7 +26,7 @@ pico-args = "0.3.1"
|
||||||
rand = { version = "0.7.3", features = ["small_rng"] }
|
rand = { version = "0.7.3", features = ["small_rng"] }
|
||||||
relative-path = "1.0.0"
|
relative-path = "1.0.0"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
serde = { version = "1.0.104", features = ["derive"] }
|
serde = { version = "1.0.106", features = ["derive"] }
|
||||||
serde_json = "1.0.48"
|
serde_json = "1.0.48"
|
||||||
threadpool = "1.7.1"
|
threadpool = "1.7.1"
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue