Make records grammar more orthogonal

We used

  name [: expr]

grammar before, now it is

  [name :] expr

which makes things simpler
This commit is contained in:
Aleksey Kladov 2020-04-11 16:42:24 +02:00
parent e7a68c8f55
commit 7a39bc3ba2
13 changed files with 142 additions and 68 deletions

View file

@ -139,7 +139,7 @@ impl SourceAnalyzer {
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
field: &ast::FieldExpr, field: &ast::FieldExpr,
) -> Option<crate::StructField> { ) -> Option<StructField> {
let expr_id = self.expr_id(db, &field.clone().into())?; let expr_id = self.expr_id(db, &field.clone().into())?;
self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into()) self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
} }
@ -148,21 +148,19 @@ impl SourceAnalyzer {
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
field: &ast::RecordField, field: &ast::RecordField,
) -> Option<(crate::StructField, Option<Local>)> { ) -> Option<(StructField, Option<Local>)> {
let (expr_id, local) = match field.expr() { let expr = field.expr()?;
Some(it) => (self.expr_id(db, &it)?, None), let expr_id = self.expr_id(db, &expr)?;
None => { let local = if field.name_ref().is_some() {
let src = InFile { file_id: self.file_id, value: field }; None
let expr_id = self.body_source_map.as_ref()?.field_init_shorthand_expr(src)?; } else {
let local_name = field.name_ref()?.as_name(); let local_name = field.field_name()?.as_name();
let path = ModPath::from_segments(PathKind::Plain, once(local_name)); let path = ModPath::from_segments(PathKind::Plain, once(local_name));
let local = match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) { match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
Some(ValueNs::LocalBinding(pat_id)) => { Some(ValueNs::LocalBinding(pat_id)) => {
Some(Local { pat_id, parent: self.resolver.body_owner()? }) Some(Local { pat_id, parent: self.resolver.body_owner()? })
} }
_ => None, _ => None,
};
(expr_id, local)
} }
}; };
let struct_field = self.infer.as_ref()?.record_field_resolution(expr_id)?; let struct_field = self.infer.as_ref()?.record_field_resolution(expr_id)?;

View file

@ -294,11 +294,6 @@ impl BodySourceMap {
self.expansions.get(&src).cloned() self.expansions.get(&src).cloned()
} }
pub fn field_init_shorthand_expr(&self, node: InFile<&ast::RecordField>) -> Option<ExprId> {
let src = node.map(|it| Either::Right(AstPtr::new(it)));
self.expr_map.get(&src).cloned()
}
pub fn pat_syntax(&self, pat: PatId) -> Result<PatSource, SyntheticSyntax> { pub fn pat_syntax(&self, pat: PatId) -> Result<PatSource, SyntheticSyntax> {
self.pat_map_back[pat].clone() self.pat_map_back[pat].clone()
} }

View file

@ -27,7 +27,6 @@ use crate::{
}, },
item_scope::BuiltinShadowMode, item_scope::BuiltinShadowMode,
path::GenericArgs, path::GenericArgs,
path::Path,
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId, AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId,
StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc,
@ -113,13 +112,6 @@ impl ExprCollector<'_> {
fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId {
self.make_expr(expr, Err(SyntheticSyntax)) self.make_expr(expr, Err(SyntheticSyntax))
} }
fn alloc_expr_field_shorthand(&mut self, expr: Expr, ptr: AstPtr<ast::RecordField>) -> ExprId {
let ptr = Either::Right(ptr);
let src = self.expander.to_source(ptr);
let id = self.make_expr(expr, Ok(src.clone()));
self.source_map.expr_map.insert(src, id);
id
}
fn empty_block(&mut self) -> ExprId { fn empty_block(&mut self) -> ExprId {
self.alloc_expr_desugared(Expr::Block { statements: Vec::new(), tail: None }) self.alloc_expr_desugared(Expr::Block { statements: Vec::new(), tail: None })
} }
@ -309,22 +301,13 @@ impl ExprCollector<'_> {
if !self.expander.is_cfg_enabled(&attrs) { if !self.expander.is_cfg_enabled(&attrs) {
return None; return None;
} }
let name = field.field_name()?.as_name();
Some(RecordLitField { Some(RecordLitField {
name: field name,
.name_ref() expr: match field.expr() {
.map(|nr| nr.as_name()) Some(e) => self.collect_expr(e),
.unwrap_or_else(Name::missing), None => self.missing_expr(),
expr: if let Some(e) = field.expr() {
self.collect_expr(e)
} else if let Some(nr) = field.name_ref() {
// field shorthand
self.alloc_expr_field_shorthand(
Expr::Path(Path::from_name_ref(&nr)),
AstPtr::new(&field),
)
} else {
self.missing_expr()
}, },
}) })
}) })

View file

@ -134,11 +134,6 @@ impl Path {
lower::lower_path(path, hygiene) lower::lower_path(path, hygiene)
} }
/// Converts an `ast::NameRef` into a single-identifier `Path`.
pub(crate) fn from_name_ref(name_ref: &ast::NameRef) -> Path {
Path { type_anchor: None, mod_path: name_ref.as_name().into(), generic_args: vec![None] }
}
/// Converts a known mod path to `Path`. /// Converts a known mod path to `Path`.
pub(crate) fn from_known_path( pub(crate) fn from_known_path(
path: ModPath, path: ModPath,

View file

@ -3,7 +3,7 @@
use crate::completion::{CompletionContext, Completions}; use crate::completion::{CompletionContext, Completions};
pub(super) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionContext) { pub(super) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionContext) {
if !(ctx.is_trivial_path && !ctx.is_pat_binding_or_const) { if !(ctx.is_trivial_path && !ctx.is_pat_binding_or_const && !ctx.record_lit_syntax.is_some()) {
return; return;
} }

View file

@ -227,7 +227,7 @@ impl<'a> CompletionContext<'a> {
self.name_ref_syntax = self.name_ref_syntax =
find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
let name_range = name_ref.syntax().text_range(); let name_range = name_ref.syntax().text_range();
if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() { if ast::RecordField::for_field_name(&name_ref).is_some() {
self.record_lit_syntax = self.record_lit_syntax =
self.sema.find_node_at_offset_with_macros(&original_file, offset); self.sema.find_node_at_offset_with_macros(&original_file, offset);
} }

View file

@ -216,7 +216,7 @@ pub fn classify_name_ref(
} }
} }
if let Some(record_field) = ast::RecordField::cast(parent.clone()) { if let Some(record_field) = ast::RecordField::for_field_name(name_ref) {
tested_by!(goto_def_for_record_fields; force); tested_by!(goto_def_for_record_fields; force);
tested_by!(goto_def_for_field_init_shorthand; force); tested_by!(goto_def_for_field_init_shorthand; force);
if let Some((field, local)) = sema.resolve_record_field(&record_field) { if let Some((field, local)) = sema.resolve_record_field(&record_field) {

View file

@ -619,26 +619,39 @@ pub(crate) fn record_field_list(p: &mut Parser) {
let m = p.start(); let m = p.start();
p.bump(T!['{']); p.bump(T!['{']);
while !p.at(EOF) && !p.at(T!['}']) { while !p.at(EOF) && !p.at(T!['}']) {
let m = p.start();
// test record_literal_field_with_attr
// fn main() {
// S { #[cfg(test)] field: 1 }
// }
attributes::outer_attributes(p);
match p.current() { match p.current() {
// test record_literal_field_with_attr IDENT | INT_NUMBER => {
// fn main() { // test_err record_literal_before_ellipsis_recovery
// S { #[cfg(test)] field: 1 } // fn main() {
// } // S { field ..S::default() }
IDENT | INT_NUMBER | T![#] => { // }
let m = p.start(); if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) {
attributes::outer_attributes(p); name_ref_or_index(p);
name_ref_or_index(p); p.expect(T![:]);
if p.eat(T![:]) {
expr(p);
} }
expr(p);
m.complete(p, RECORD_FIELD); m.complete(p, RECORD_FIELD);
} }
T![.] if p.at(T![..]) => { T![.] if p.at(T![..]) => {
m.abandon(p);
p.bump(T![..]); p.bump(T![..]);
expr(p); expr(p);
} }
T!['{'] => error_block(p, "expected a field"), T!['{'] => {
_ => p.err_and_bump("expected identifier"), error_block(p, "expected a field");
m.abandon(p);
}
_ => {
p.err_and_bump("expected identifier");
m.abandon(p);
}
} }
if !p.at(T!['}']) { if !p.at(T!['}']) {
p.expect(T![,]); p.expect(T![,]);

View file

@ -187,6 +187,38 @@ impl ast::StructDef {
} }
} }
impl ast::RecordField {
pub fn for_field_name(field_name: &ast::NameRef) -> Option<ast::RecordField> {
eprintln!("field_name = {}", field_name);
dbg!(field_name.syntax().ancestors().nth(6));
let candidate =
field_name.syntax().parent().and_then(ast::RecordField::cast).or_else(|| {
field_name.syntax().ancestors().nth(4).and_then(ast::RecordField::cast)
})?;
if candidate.field_name().as_ref() == Some(field_name) {
Some(candidate)
} else {
None
}
}
/// Deals with field init shorthand
pub fn field_name(&self) -> Option<ast::NameRef> {
if let Some(name_ref) = self.name_ref() {
return Some(name_ref);
}
if let Some(ast::Expr::PathExpr(expr)) = self.expr() {
let path = expr.path()?;
let segment = path.segment()?;
let name_ref = segment.name_ref()?;
if path.qualifier().is_none() {
return Some(name_ref);
}
}
None
}
}
impl ast::EnumVariant { impl ast::EnumVariant {
pub fn parent_enum(&self) -> ast::EnumDef { pub fn parent_enum(&self) -> ast::EnumDef {
self.syntax() self.syntax()

View file

@ -0,0 +1,49 @@
SOURCE_FILE@[0; 45)
FN_DEF@[0; 44)
FN_KW@[0; 2) "fn"
WHITESPACE@[2; 3) " "
NAME@[3; 7)
IDENT@[3; 7) "main"
PARAM_LIST@[7; 9)
L_PAREN@[7; 8) "("
R_PAREN@[8; 9) ")"
WHITESPACE@[9; 10) " "
BLOCK_EXPR@[10; 44)
BLOCK@[10; 44)
L_CURLY@[10; 11) "{"
WHITESPACE@[11; 16) "\n "
RECORD_LIT@[16; 42)
PATH@[16; 17)
PATH_SEGMENT@[16; 17)
NAME_REF@[16; 17)
IDENT@[16; 17) "S"
WHITESPACE@[17; 18) " "
RECORD_FIELD_LIST@[18; 42)
L_CURLY@[18; 19) "{"
WHITESPACE@[19; 20) " "
RECORD_FIELD@[20; 40)
NAME_REF@[20; 25)
IDENT@[20; 25) "field"
WHITESPACE@[25; 26) " "
RANGE_EXPR@[26; 40)
DOT2@[26; 28) ".."
CALL_EXPR@[28; 40)
PATH_EXPR@[28; 38)
PATH@[28; 38)
PATH@[28; 29)
PATH_SEGMENT@[28; 29)
NAME_REF@[28; 29)
IDENT@[28; 29) "S"
COLON2@[29; 31) "::"
PATH_SEGMENT@[31; 38)
NAME_REF@[31; 38)
IDENT@[31; 38) "default"
ARG_LIST@[38; 40)
L_PAREN@[38; 39) "("
R_PAREN@[39; 40) ")"
WHITESPACE@[40; 41) " "
R_CURLY@[41; 42) "}"
WHITESPACE@[42; 43) "\n"
R_CURLY@[43; 44) "}"
WHITESPACE@[44; 45) "\n"
error [25; 25): expected COLON

View file

@ -0,0 +1,3 @@
fn main() {
S { field ..S::default() }
}

View file

@ -35,8 +35,11 @@ SOURCE_FILE@[0; 112)
L_CURLY@[27; 28) "{" L_CURLY@[27; 28) "{"
WHITESPACE@[28; 29) " " WHITESPACE@[28; 29) " "
RECORD_FIELD@[29; 30) RECORD_FIELD@[29; 30)
NAME_REF@[29; 30) PATH_EXPR@[29; 30)
IDENT@[29; 30) "x" PATH@[29; 30)
PATH_SEGMENT@[29; 30)
NAME_REF@[29; 30)
IDENT@[29; 30) "x"
COMMA@[30; 31) "," COMMA@[30; 31) ","
WHITESPACE@[31; 32) " " WHITESPACE@[31; 32) " "
RECORD_FIELD@[32; 37) RECORD_FIELD@[32; 37)
@ -62,8 +65,11 @@ SOURCE_FILE@[0; 112)
L_CURLY@[48; 49) "{" L_CURLY@[48; 49) "{"
WHITESPACE@[49; 50) " " WHITESPACE@[49; 50) " "
RECORD_FIELD@[50; 51) RECORD_FIELD@[50; 51)
NAME_REF@[50; 51) PATH_EXPR@[50; 51)
IDENT@[50; 51) "x" PATH@[50; 51)
PATH_SEGMENT@[50; 51)
NAME_REF@[50; 51)
IDENT@[50; 51) "x"
COMMA@[51; 52) "," COMMA@[51; 52) ","
WHITESPACE@[52; 53) " " WHITESPACE@[52; 53) " "
RECORD_FIELD@[53; 58) RECORD_FIELD@[53; 58)

View file

@ -395,7 +395,7 @@ pub fn skip_slow_tests() -> bool {
should_skip should_skip
} }
const REWRITE: bool = true; const REWRITE: bool = false;
/// Asserts that `expected` and `actual` strings are equal. If they differ only /// Asserts that `expected` and `actual` strings are equal. If they differ only
/// in trailing or leading whitespace the test won't fail and /// in trailing or leading whitespace the test won't fail and