5951: Rename record_field_pat to record_pat_field r=jonas-schievink a=pksunkara

The token was renamed but not this.

5975: Report better errors in project.json/sysroot r=jonas-schievink a=jonas-schievink

This does a bunch of light refactoring so that the `Sysroot` is loaded later, which makes sure that any errors are reported to the user. I then added a check that reports an error if libcore is missing in the loaded sysroot. Since a sysroot without libcore is very useless, this indicates a configuration error.

Co-authored-by: Pavan Kumar Sunkara <pavan.sss1991@gmail.com>
Co-authored-by: Jonas Schievink <jonas.schievink@ferrous-systems.com>
This commit is contained in:
bors[bot] 2020-09-11 13:47:33 +00:00 committed by GitHub
commit 96e988fcc3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 53 additions and 34 deletions

View file

@ -207,8 +207,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_record_field(field)
}
pub fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option<Field> {
self.imp.resolve_record_field_pat(field)
pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
self.imp.resolve_record_pat_field(field)
}
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
@ -433,8 +433,8 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(field.syntax()).resolve_record_field(self.db, field)
}
fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option<Field> {
self.analyze(field.syntax()).resolve_record_field_pat(self.db, field)
fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
self.analyze(field.syntax()).resolve_record_pat_field(self.db, field)
}
fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {

View file

@ -179,13 +179,13 @@ impl SourceAnalyzer {
Some((struct_field.into(), local))
}
pub(crate) fn resolve_record_field_pat(
pub(crate) fn resolve_record_pat_field(
&self,
_db: &dyn HirDatabase,
field: &ast::RecordPatField,
) -> Option<Field> {
let pat_id = self.pat_id(&field.pat()?)?;
let struct_field = self.infer.as_ref()?.record_field_pat_resolution(pat_id)?;
let struct_field = self.infer.as_ref()?.record_pat_field_resolution(pat_id)?;
Some(struct_field.into())
}

View file

@ -125,7 +125,7 @@ pub struct InferenceResult {
field_resolutions: FxHashMap<ExprId, FieldId>,
/// For each field in record literal, records the field it resolves to.
record_field_resolutions: FxHashMap<ExprId, FieldId>,
record_field_pat_resolutions: FxHashMap<PatId, FieldId>,
record_pat_field_resolutions: FxHashMap<PatId, FieldId>,
/// For each struct literal, records the variant it resolves to.
variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
/// For each associated item record what it resolves to
@ -146,8 +146,8 @@ impl InferenceResult {
pub fn record_field_resolution(&self, expr: ExprId) -> Option<FieldId> {
self.record_field_resolutions.get(&expr).copied()
}
pub fn record_field_pat_resolution(&self, pat: PatId) -> Option<FieldId> {
self.record_field_pat_resolutions.get(&pat).copied()
pub fn record_pat_field_resolution(&self, pat: PatId) -> Option<FieldId> {
self.record_pat_field_resolutions.get(&pat).copied()
}
pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
self.variant_resolutions.get(&id.into()).copied()

View file

@ -70,7 +70,7 @@ impl<'a> InferenceContext<'a> {
let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
if let Some(local_id) = matching_field {
let field_def = FieldId { parent: def.unwrap(), local_id };
self.result.record_field_pat_resolutions.insert(subpat.pat, field_def);
self.result.record_pat_field_resolutions.insert(subpat.pat, field_def);
}
let expected_ty =

View file

@ -157,9 +157,9 @@ pub fn classify_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option
ast::IdentPat(it) => {
let local = sema.to_def(&it)?;
if let Some(record_field_pat) = it.syntax().parent().and_then(ast::RecordPatField::cast) {
if record_field_pat.name_ref().is_none() {
if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) {
if let Some(record_pat_field) = it.syntax().parent().and_then(ast::RecordPatField::cast) {
if record_pat_field.name_ref().is_none() {
if let Some(field) = sema.resolve_record_pat_field(&record_pat_field) {
let field = Definition::Field(field);
return Some(NameClass::FieldShorthand { local, field });
}
@ -275,8 +275,8 @@ pub fn classify_name_ref(
}
}
if let Some(record_field_pat) = ast::RecordPatField::cast(parent.clone()) {
if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) {
if let Some(record_pat_field) = ast::RecordPatField::cast(parent.clone()) {
if let Some(field) = sema.resolve_record_pat_field(&record_pat_field) {
let field = Definition::Field(field);
return Some(NameRefClass::Definition(field));
}

View file

@ -188,7 +188,7 @@ fn tuple_pat_fields(p: &mut Parser) {
p.expect(T![')']);
}
// test record_field_pat_list
// test record_pat_field_list
// fn foo() {
// let S {} = ();
// let S { f, ref mut g } = ();
@ -208,7 +208,7 @@ fn record_pat_field_list(p: &mut Parser) {
c => {
let m = p.start();
match c {
// test record_field_pat
// test record_pat_field
// fn foo() {
// let S { 0: 1 } = ();
// let S { x: 1 } = ();

View file

@ -33,7 +33,7 @@ pub enum ProjectWorkspace {
/// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`.
Cargo { cargo: CargoWorkspace, sysroot: Sysroot },
/// Project workspace was manually specified using a `rust-project.json` file.
Json { project: ProjectJson },
Json { project: ProjectJson, sysroot: Option<Sysroot> },
}
impl fmt::Debug for ProjectWorkspace {
@ -44,10 +44,10 @@ impl fmt::Debug for ProjectWorkspace {
.field("n_packages", &cargo.packages().len())
.field("n_sysroot_crates", &sysroot.crates().len())
.finish(),
ProjectWorkspace::Json { project } => {
ProjectWorkspace::Json { project, sysroot } => {
let mut debug_struct = f.debug_struct("Json");
debug_struct.field("n_crates", &project.n_crates());
if let Some(sysroot) = &project.sysroot {
if let Some(sysroot) = sysroot {
debug_struct.field("n_sysroot_crates", &sysroot.crates().len());
}
debug_struct.finish()
@ -169,7 +169,11 @@ impl ProjectWorkspace {
})?;
let project_location = project_json.parent().unwrap().to_path_buf();
let project = ProjectJson::new(&project_location, data);
ProjectWorkspace::Json { project }
let sysroot = match &project.sysroot_src {
Some(path) => Some(Sysroot::load(path)?),
None => None,
};
ProjectWorkspace::Json { project, sysroot }
}
ProjectManifest::CargoToml(cargo_toml) => {
let cargo_version = utf8_stdout({
@ -203,12 +207,21 @@ impl ProjectWorkspace {
Ok(res)
}
pub fn load_inline(project_json: ProjectJson) -> Result<ProjectWorkspace> {
let sysroot = match &project_json.sysroot_src {
Some(path) => Some(Sysroot::load(path)?),
None => None,
};
Ok(ProjectWorkspace::Json { project: project_json, sysroot })
}
/// Returns the roots for the current `ProjectWorkspace`
/// The return type contains the path and whether or not
/// the root is a member of the current workspace
pub fn to_roots(&self) -> Vec<PackageRoot> {
match self {
ProjectWorkspace::Json { project } => project
ProjectWorkspace::Json { project, sysroot } => project
.crates()
.map(|(_, krate)| PackageRoot {
is_member: krate.is_workspace_member,
@ -217,7 +230,7 @@ impl ProjectWorkspace {
})
.collect::<FxHashSet<_>>()
.into_iter()
.chain(project.sysroot.as_ref().into_iter().flat_map(|sysroot| {
.chain(sysroot.as_ref().into_iter().flat_map(|sysroot| {
sysroot.crates().map(move |krate| PackageRoot {
is_member: false,
include: vec![sysroot[krate].root_dir().to_path_buf()],
@ -255,7 +268,7 @@ impl ProjectWorkspace {
pub fn proc_macro_dylib_paths(&self) -> Vec<AbsPathBuf> {
match self {
ProjectWorkspace::Json { project } => project
ProjectWorkspace::Json { project, sysroot: _ } => project
.crates()
.filter_map(|(_, krate)| krate.proc_macro_dylib_path.as_ref())
.cloned()
@ -285,9 +298,8 @@ impl ProjectWorkspace {
) -> CrateGraph {
let mut crate_graph = CrateGraph::default();
match self {
ProjectWorkspace::Json { project } => {
let sysroot_dps = project
.sysroot
ProjectWorkspace::Json { project, sysroot } => {
let sysroot_dps = sysroot
.as_ref()
.map(|sysroot| sysroot_to_crate_graph(&mut crate_graph, sysroot, target, load));

View file

@ -7,12 +7,12 @@ use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
use serde::{de, Deserialize};
use crate::{cfg_flag::CfgFlag, Sysroot};
use crate::cfg_flag::CfgFlag;
/// Roots and crates that compose this Rust project.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ProjectJson {
pub(crate) sysroot: Option<Sysroot>,
pub(crate) sysroot_src: Option<AbsPathBuf>,
crates: Vec<Crate>,
}
@ -35,7 +35,7 @@ pub struct Crate {
impl ProjectJson {
pub fn new(base: &AbsPath, data: ProjectJsonData) -> ProjectJson {
ProjectJson {
sysroot: data.sysroot_src.map(|it| base.join(it)).map(|it| Sysroot::load(&it)),
sysroot_src: data.sysroot_src.map(|it| base.join(it)),
crates: data
.crates
.into_iter()

View file

@ -51,11 +51,11 @@ impl Sysroot {
pub fn discover(cargo_toml: &AbsPath) -> Result<Sysroot> {
let current_dir = cargo_toml.parent().unwrap();
let sysroot_src_dir = discover_sysroot_src_dir(current_dir)?;
let res = Sysroot::load(&sysroot_src_dir);
let res = Sysroot::load(&sysroot_src_dir)?;
Ok(res)
}
pub fn load(sysroot_src_dir: &AbsPath) -> Sysroot {
pub fn load(sysroot_src_dir: &AbsPath) -> Result<Sysroot> {
let mut sysroot = Sysroot { crates: Arena::default() };
for name in SYSROOT_CRATES.trim().lines() {
@ -89,7 +89,14 @@ impl Sysroot {
}
}
sysroot
if sysroot.by_name("core").is_none() {
anyhow::bail!(
"could not find libcore in sysroot path `{}`",
sysroot_src_dir.as_ref().display()
);
}
Ok(sysroot)
}
fn by_name(&self, name: &str) -> Option<SysrootCrate> {

View file

@ -109,7 +109,7 @@ impl GlobalState {
)
}
LinkedProject::InlineJsonProject(it) => {
Ok(project_model::ProjectWorkspace::Json { project: it.clone() })
project_model::ProjectWorkspace::load_inline(it.clone())
}
})
.collect::<Vec<_>>();