Auto merge of #17195 - Veykril:unsafe-attr, r=Veykril

Implement unsafe attribute parsing
This commit is contained in:
bors 2024-05-13 09:50:52 +00:00
commit 067d9d995b
8 changed files with 534 additions and 11 deletions

View file

@ -252,8 +252,18 @@ impl Attr {
Some(Attr { id, path, input, ctxt: span.ctx })
}
fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> {
let ctxt = tt.first()?.first_span().ctx;
fn from_tt(db: &dyn ExpandDatabase, mut tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> {
if matches!(tt,
[tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text, .. })), ..]
if text == "unsafe"
) {
match tt.get(1) {
Some(tt::TokenTree::Subtree(subtree)) => tt = &subtree.token_trees,
_ => return None,
}
}
let first = &tt.first()?;
let ctxt = first.first_span().ctx;
let path_end = tt
.iter()
.position(|tt| {
@ -435,7 +445,7 @@ fn inner_attributes(
// Input subtree is: `(cfg, $(attr),+)`
// Split it up into a `cfg` subtree and the `attr` subtrees.
pub fn parse_cfg_attr_input(
fn parse_cfg_attr_input(
subtree: &Subtree,
) -> Option<(&[tt::TokenTree], impl Iterator<Item = &[tt::TokenTree]>)> {
let mut parts = subtree

View file

@ -609,6 +609,9 @@ fn main() {}
#[export_name = "main"]
fn __cortex_m_rt_main_trampoline() {}
#[unsafe(export_name = "main")]
fn __cortex_m_rt_main_trampoline_unsafe() {}
#[test]
fn test_foo() {}
@ -628,13 +631,14 @@ mod not_a_root {
"#,
expect![[r#"
[
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..253, name: \"\", kind: Module })",
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..331, name: \"\", kind: Module })",
"(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })",
"(Bin, NavigationTarget { file_id: FileId(0), full_range: 15..76, focus_range: 42..71, name: \"__cortex_m_rt_main_trampoline\", kind: Function })",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 78..102, focus_range: 89..97, name: \"test_foo\", kind: Function })",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 104..155, focus_range: 136..150, name: \"test_full_path\", kind: Function })",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 157..191, focus_range: 178..186, name: \"test_foo\", kind: Function })",
"(Bench, NavigationTarget { file_id: FileId(0), full_range: 193..215, focus_range: 205..210, name: \"bench\", kind: Function })",
"(Bin, NavigationTarget { file_id: FileId(0), full_range: 78..154, focus_range: 113..149, name: \"__cortex_m_rt_main_trampoline_unsafe\", kind: Function })",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 156..180, focus_range: 167..175, name: \"test_foo\", kind: Function })",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 182..233, focus_range: 214..228, name: \"test_full_path\", kind: Function })",
"(Test, NavigationTarget { file_id: FileId(0), full_range: 235..269, focus_range: 256..264, name: \"test_foo\", kind: Function })",
"(Bench, NavigationTarget { file_id: FileId(0), full_range: 271..293, focus_range: 283..288, name: \"bench\", kind: Function })",
]
"#]],
);

View file

@ -36,8 +36,33 @@ fn attr(p: &mut Parser<'_>, inner: bool) {
attr.complete(p, ATTR);
}
// test metas
// #![simple_ident]
// #![simple::path]
// #![simple_ident_expr = ""]
// #![simple::path::Expr = ""]
// #![simple_ident_tt(a b c)]
// #![simple_ident_tt[a b c]]
// #![simple_ident_tt{a b c}]
// #![simple::path::tt(a b c)]
// #![simple::path::tt[a b c]]
// #![simple::path::tt{a b c}]
// #![unsafe(simple_ident)]
// #![unsafe(simple::path)]
// #![unsafe(simple_ident_expr = "")]
// #![unsafe(simple::path::Expr = "")]
// #![unsafe(simple_ident_tt(a b c))]
// #![unsafe(simple_ident_tt[a b c])]
// #![unsafe(simple_ident_tt{a b c})]
// #![unsafe(simple::path::tt(a b c))]
// #![unsafe(simple::path::tt[a b c])]
// #![unsafe(simple::path::tt{a b c})]
pub(super) fn meta(p: &mut Parser<'_>) {
let meta = p.start();
let is_unsafe = p.eat(T![unsafe]);
if is_unsafe {
p.expect(T!['(']);
}
paths::use_path(p);
match p.current() {
@ -50,6 +75,9 @@ pub(super) fn meta(p: &mut Parser<'_>) {
T!['('] | T!['['] | T!['{'] => items::token_tree(p),
_ => {}
}
if is_unsafe {
p.expect(T![')']);
}
meta.complete(p, META);
}

View file

@ -0,0 +1,457 @@
SOURCE_FILE
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple_ident"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "path"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple_ident_expr"
WHITESPACE " "
EQ "="
WHITESPACE " "
LITERAL
STRING "\"\""
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
PATH
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "path"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "Expr"
WHITESPACE " "
EQ "="
WHITESPACE " "
LITERAL
STRING "\"\""
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple_ident_tt"
TOKEN_TREE
L_PAREN "("
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple_ident_tt"
TOKEN_TREE
L_BRACK "["
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_BRACK "]"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple_ident_tt"
TOKEN_TREE
L_CURLY "{"
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_CURLY "}"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
PATH
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "path"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "tt"
TOKEN_TREE
L_PAREN "("
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
PATH
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "path"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "tt"
TOKEN_TREE
L_BRACK "["
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_BRACK "]"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
PATH
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "path"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "tt"
TOKEN_TREE
L_CURLY "{"
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_CURLY "}"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
UNSAFE_KW "unsafe"
L_PAREN "("
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple_ident"
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
UNSAFE_KW "unsafe"
L_PAREN "("
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "path"
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
UNSAFE_KW "unsafe"
L_PAREN "("
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple_ident_expr"
WHITESPACE " "
EQ "="
WHITESPACE " "
LITERAL
STRING "\"\""
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
UNSAFE_KW "unsafe"
L_PAREN "("
PATH
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "path"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "Expr"
WHITESPACE " "
EQ "="
WHITESPACE " "
LITERAL
STRING "\"\""
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
UNSAFE_KW "unsafe"
L_PAREN "("
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple_ident_tt"
TOKEN_TREE
L_PAREN "("
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_PAREN ")"
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
UNSAFE_KW "unsafe"
L_PAREN "("
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple_ident_tt"
TOKEN_TREE
L_BRACK "["
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_BRACK "]"
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
UNSAFE_KW "unsafe"
L_PAREN "("
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple_ident_tt"
TOKEN_TREE
L_CURLY "{"
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_CURLY "}"
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
UNSAFE_KW "unsafe"
L_PAREN "("
PATH
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "path"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "tt"
TOKEN_TREE
L_PAREN "("
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_PAREN ")"
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
UNSAFE_KW "unsafe"
L_PAREN "("
PATH
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "path"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "tt"
TOKEN_TREE
L_BRACK "["
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_BRACK "]"
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"
ATTR
POUND "#"
BANG "!"
L_BRACK "["
META
UNSAFE_KW "unsafe"
L_PAREN "("
PATH
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "simple"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "path"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "tt"
TOKEN_TREE
L_CURLY "{"
IDENT "a"
WHITESPACE " "
IDENT "b"
WHITESPACE " "
IDENT "c"
R_CURLY "}"
R_PAREN ")"
R_BRACK "]"
WHITESPACE "\n"

View file

@ -0,0 +1,20 @@
#![simple_ident]
#![simple::path]
#![simple_ident_expr = ""]
#![simple::path::Expr = ""]
#![simple_ident_tt(a b c)]
#![simple_ident_tt[a b c]]
#![simple_ident_tt{a b c}]
#![simple::path::tt(a b c)]
#![simple::path::tt[a b c]]
#![simple::path::tt{a b c}]
#![unsafe(simple_ident)]
#![unsafe(simple::path)]
#![unsafe(simple_ident_expr = "")]
#![unsafe(simple::path::Expr = "")]
#![unsafe(simple_ident_tt(a b c))]
#![unsafe(simple_ident_tt[a b c])]
#![unsafe(simple_ident_tt{a b c})]
#![unsafe(simple::path::tt(a b c))]
#![unsafe(simple::path::tt[a b c])]
#![unsafe(simple::path::tt{a b c})]

View file

@ -314,7 +314,9 @@ Attr =
'#' '!'? '[' Meta ']'
Meta =
Path ('=' Expr | TokenTree)?
'unsafe' '(' Path ('=' Expr | TokenTree)? ')'
| Path ('=' Expr | TokenTree)?
//****************************//
// Statements and Expressions //
@ -391,7 +393,6 @@ FormatArgsExpr =
FormatArgsArg =
(Name '=')? Expr
# MacroCallExpr
MacroExpr =
MacroCall

View file

@ -778,7 +778,10 @@ impl Meta {
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]

View file

@ -9,7 +9,7 @@ use crate::{
pub(crate) fn validate_block_expr(block: ast::BlockExpr, errors: &mut Vec<SyntaxError>) {
if let Some(parent) = block.syntax().parent() {
match parent.kind() {
FN | EXPR_STMT | STMT_LIST => return,
FN | EXPR_STMT | STMT_LIST | MACRO_STMTS => return,
_ => {}
}
}