Merge branch 'master' into feature/issue/1856

# Conflicts:
#	crates/ra_assists/src/ast_editor.rs
This commit is contained in:
Alexander Andreev 2019-09-30 12:07:26 +03:00
commit 81efd696cc
53 changed files with 1180 additions and 957 deletions

View file

@ -4,7 +4,7 @@ use hir::db::HirDatabase;
use ra_db::FileRange;
use ra_fmt::{leading_indent, reindent};
use ra_syntax::{
algo::{find_covering_element, find_node_at_offset},
algo::{self, find_covering_element, find_node_at_offset},
AstNode, SourceFile, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit,
TokenAtOffset,
};
@ -179,6 +179,10 @@ impl AssistBuilder {
&mut self.edit
}
pub(crate) fn replace_ast<N: AstNode>(&mut self, old: N, new: N) {
algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
}
fn build(self) -> AssistAction {
AssistAction {
edit: self.edit.finish(),

View file

@ -15,7 +15,7 @@ pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist>
ctx.add_action(AssistId("add_derive"), "add `#[derive]`", |edit| {
let derive_attr = nominal
.attrs()
.filter_map(|x| x.as_call())
.filter_map(|x| x.as_simple_call())
.filter(|(name, _arg)| name == "derive")
.map(|(_name, arg)| arg)
.next();

View file

@ -2,11 +2,11 @@
use hir::{db::HirDatabase, HasSource};
use ra_syntax::{
ast::{self, make, AstNode, NameOwner},
ast::{self, edit, make, AstNode, NameOwner},
SmolStr,
};
use crate::{ast_editor::AstEditor, Assist, AssistCtx, AssistId};
use crate::{Assist, AssistCtx, AssistId};
#[derive(PartialEq)]
enum AddMissingImplMembersMode {
@ -77,30 +77,26 @@ fn add_missing_impl_members_inner(
ctx.add_action(AssistId(assist_id), label, |edit| {
let n_existing_items = impl_item_list.impl_items().count();
let items = missing_items.into_iter().map(|it| match it {
ast::ImplItem::FnDef(def) => strip_docstring(add_body(def).into()),
_ => strip_docstring(it),
});
let mut ast_editor = AstEditor::new(impl_item_list);
ast_editor.append_items(items);
let first_new_item = ast_editor.ast().impl_items().nth(n_existing_items).unwrap();
let cursor_position = first_new_item.syntax().text_range().start();
ast_editor.into_text_edit(edit.text_edit_builder());
let items = missing_items
.into_iter()
.map(|it| match it {
ast::ImplItem::FnDef(def) => ast::ImplItem::FnDef(add_body(def)),
_ => it,
})
.map(|it| edit::strip_attrs_and_docs(&it));
let new_impl_item_list = impl_item_list.append_items(items);
let cursor_position = {
let first_new_item = new_impl_item_list.impl_items().nth(n_existing_items).unwrap();
first_new_item.syntax().text_range().start()
};
edit.replace_ast(impl_item_list, new_impl_item_list);
edit.set_cursor(cursor_position);
});
ctx.build()
}
fn strip_docstring(item: ast::ImplItem) -> ast::ImplItem {
let mut ast_editor = AstEditor::new(item);
ast_editor.strip_attrs_and_docs();
ast_editor.ast().to_owned()
}
fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
if fn_def.body().is_none() {
fn_def.with_body(make::block_from_expr(make::expr_unimplemented()))

View file

@ -2,12 +2,12 @@
use hir::db::HirDatabase;
use ra_syntax::{
ast::{self, make, AstNode, NameOwner, TypeBoundsOwner},
ast::{self, edit, make, AstNode, NameOwner, TypeBoundsOwner},
SyntaxElement,
SyntaxKind::*,
};
use crate::{ast_editor::AstEditor, Assist, AssistCtx, AssistId};
use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn move_bounds_to_where_clause(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let type_param_list = ctx.node_at_offset::<ast::TypeParamList>()?;
@ -41,14 +41,12 @@ pub(crate) fn move_bounds_to_where_clause(mut ctx: AssistCtx<impl HirDatabase>)
.type_params()
.filter(|it| it.type_bound_list().is_some())
.map(|type_param| {
let without_bounds =
AstEditor::new(type_param.clone()).remove_bounds().ast().clone();
let without_bounds = type_param.remove_bounds();
(type_param, without_bounds)
});
let mut ast_editor = AstEditor::new(type_param_list.clone());
ast_editor.replace_descendants(new_params);
ast_editor.into_text_edit(edit.text_edit_builder());
let new_type_param_list = edit::replace_descendants(&type_param_list, new_params);
edit.replace_ast(type_param_list.clone(), new_type_param_list);
let where_clause = {
let predicates = type_param_list.type_params().filter_map(build_predicate);

View file

@ -1,247 +0,0 @@
//! FIXME: write short doc here
use std::{iter, ops::RangeInclusive};
use arrayvec::ArrayVec;
use rustc_hash::FxHashMap;
use ra_fmt::leading_indent;
use ra_syntax::{
algo,
ast::{self, make::tokens, TypeBoundsOwner},
AstNode, Direction, InsertPosition, SyntaxElement,
SyntaxKind::*,
T,
};
use ra_text_edit::TextEditBuilder;
pub struct AstEditor<N: AstNode> {
original_ast: N,
ast: N,
}
impl<N: AstNode> AstEditor<N> {
pub fn new(node: N) -> AstEditor<N>
where
N: Clone,
{
AstEditor { original_ast: node.clone(), ast: node }
}
pub fn into_text_edit(self, builder: &mut TextEditBuilder) {
for (from, to) in algo::diff(&self.original_ast.syntax(), self.ast().syntax()) {
builder.replace(from.text_range(), to.to_string())
}
}
pub fn ast(&self) -> &N {
&self.ast
}
pub fn replace_descendants<T: AstNode>(
&mut self,
replacement_map: impl Iterator<Item = (T, T)>,
) -> &mut Self {
let map = replacement_map
.map(|(from, to)| (from.syntax().clone().into(), to.syntax().clone().into()))
.collect::<FxHashMap<_, _>>();
let new_syntax = algo::replace_descendants(self.ast.syntax(), &map);
self.ast = N::cast(new_syntax).unwrap();
self
}
#[must_use]
fn insert_children(
&self,
position: InsertPosition<SyntaxElement>,
mut to_insert: impl Iterator<Item = SyntaxElement>,
) -> N {
let new_syntax = algo::insert_children(self.ast().syntax(), position, &mut to_insert);
N::cast(new_syntax).unwrap()
}
#[must_use]
fn replace_children(
&self,
to_delete: RangeInclusive<SyntaxElement>,
mut to_insert: impl Iterator<Item = SyntaxElement>,
) -> N {
let new_syntax = algo::replace_children(self.ast().syntax(), to_delete, &mut to_insert);
N::cast(new_syntax).unwrap()
}
fn do_make_multiline(&mut self) {
let l_curly =
match self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{']) {
Some(it) => it,
None => return,
};
let sibling = match l_curly.next_sibling_or_token() {
Some(it) => it,
None => return,
};
let existing_ws = match sibling.as_token() {
None => None,
Some(tok) if tok.kind() != WHITESPACE => None,
Some(ws) => {
if ws.text().contains('\n') {
return;
}
Some(ws.clone())
}
};
let indent = leading_indent(self.ast().syntax()).unwrap_or("".into());
let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
let to_insert = iter::once(ws.ws().into());
self.ast = match existing_ws {
None => self.insert_children(InsertPosition::After(l_curly), to_insert),
Some(ws) => {
self.replace_children(RangeInclusive::new(ws.clone().into(), ws.into()), to_insert)
}
};
}
}
impl AstEditor<ast::RecordFieldList> {
pub fn append_field(&mut self, field: &ast::RecordField) {
self.insert_field(InsertPosition::Last, field)
}
pub fn insert_field(
&mut self,
position: InsertPosition<&'_ ast::RecordField>,
field: &ast::RecordField,
) {
let is_multiline = self.ast().syntax().text().contains_char('\n');
let ws;
let space = if is_multiline {
ws = tokens::WsBuilder::new(&format!(
"\n{} ",
leading_indent(self.ast().syntax()).unwrap_or("".into())
));
ws.ws()
} else {
tokens::single_space()
};
let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new();
to_insert.push(space.into());
to_insert.push(field.syntax().clone().into());
to_insert.push(tokens::comma().into());
macro_rules! after_l_curly {
() => {{
let anchor = match self.l_curly() {
Some(it) => it,
None => return,
};
InsertPosition::After(anchor)
}};
}
macro_rules! after_field {
($anchor:expr) => {
if let Some(comma) = $anchor
.syntax()
.siblings_with_tokens(Direction::Next)
.find(|it| it.kind() == T![,])
{
InsertPosition::After(comma)
} else {
to_insert.insert(0, tokens::comma().into());
InsertPosition::After($anchor.syntax().clone().into())
}
};
};
let position = match position {
InsertPosition::First => after_l_curly!(),
InsertPosition::Last => {
if !is_multiline {
// don't insert comma before curly
to_insert.pop();
}
match self.ast().fields().last() {
Some(it) => after_field!(it),
None => after_l_curly!(),
}
}
InsertPosition::Before(anchor) => {
InsertPosition::Before(anchor.syntax().clone().into())
}
InsertPosition::After(anchor) => after_field!(anchor),
};
self.ast = self.insert_children(position, to_insert.iter().cloned());
}
fn l_curly(&self) -> Option<SyntaxElement> {
self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{'])
}
}
impl AstEditor<ast::ItemList> {
pub fn append_items(&mut self, items: impl Iterator<Item = ast::ImplItem>) {
if !self.ast().syntax().text().contains_char('\n') {
self.do_make_multiline();
}
items.for_each(|it| self.append_item(it));
}
pub fn append_item(&mut self, item: ast::ImplItem) {
let (indent, position) = match self.ast().impl_items().last() {
Some(it) => (
leading_indent(it.syntax()).unwrap_or_default().to_string(),
InsertPosition::After(it.syntax().clone().into()),
),
None => match self.l_curly() {
Some(it) => (
" ".to_string() + &leading_indent(self.ast().syntax()).unwrap_or_default(),
InsertPosition::After(it),
),
None => return,
},
};
let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
let to_insert: ArrayVec<[SyntaxElement; 2]> =
[ws.ws().into(), item.syntax().clone().into()].into();
self.ast = self.insert_children(position, to_insert.into_iter());
}
fn l_curly(&self) -> Option<SyntaxElement> {
self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{'])
}
}
impl AstEditor<ast::ImplItem> {
pub fn strip_attrs_and_docs(&mut self) {
while let Some(start) = self
.ast()
.syntax()
.children_with_tokens()
.find(|it| it.kind() == ATTR || it.kind() == COMMENT)
{
let end = match &start.next_sibling_or_token() {
Some(el) if el.kind() == WHITESPACE => el.clone(),
Some(_) | None => start.clone(),
};
self.ast = self.replace_children(RangeInclusive::new(start, end), iter::empty());
}
}
}
impl AstEditor<ast::TypeParam> {
pub fn remove_bounds(&mut self) -> &mut Self {
let colon = match self.ast.colon_token() {
Some(it) => it,
None => return self,
};
let end = match self.ast.type_bound_list() {
Some(it) => it.syntax().clone().into(),
None => colon.clone().into(),
};
self.ast = self.replace_children(RangeInclusive::new(colon.into(), end), iter::empty());
self
}
}

View file

@ -7,7 +7,6 @@
mod assist_ctx;
mod marks;
pub mod ast_editor;
use hir::db::HirDatabase;
use itertools::Itertools;

View file

@ -32,11 +32,10 @@ pub trait CheckCanceled {
fn catch_canceled<F, T>(&self, f: F) -> Result<T, Canceled>
where
Self: Sized,
Self: Sized + panic::RefUnwindSafe,
F: FnOnce(&Self) -> T + panic::UnwindSafe,
{
let this = panic::AssertUnwindSafe(self);
panic::catch_unwind(|| f(*this)).map_err(|err| match err.downcast::<Canceled>() {
panic::catch_unwind(|| f(self)).map_err(|err| match err.downcast::<Canceled>() {
Ok(canceled) => *canceled,
Err(payload) => panic::resume_unwind(payload),
})

View file

@ -153,7 +153,7 @@ impl LangItems {
fn lang_item_name<T: AttrsOwner>(node: &T) -> Option<SmolStr> {
node.attrs()
.filter_map(|a| a.as_key_value())
.filter_map(|a| a.as_simple_key_value())
.filter(|(key, _)| key == "lang")
.map(|(_, val)| val)
.nth(0)

View file

@ -355,8 +355,7 @@ impl<DB: AstDatabase> RawItemsCollector<&DB> {
let name = m.name().map(|it| it.as_name());
let ast_id = self.source_ast_id_map.ast_id(&m);
let export = m.has_atom_attr("macro_export")
|| m.attrs().filter_map(|x| x.as_call()).any(|(name, _)| name == "macro_export");
let export = m.attrs().filter_map(|x| x.simple_name()).any(|name| name == "macro_export");
let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export });
self.push_item(current_module, RawItem::Macro(m));
@ -387,7 +386,7 @@ impl<DB: AstDatabase> RawItemsCollector<&DB> {
fn extract_mod_path_attribute(module: &ast::Module) -> Option<SmolStr> {
module.attrs().into_iter().find_map(|attr| {
attr.as_key_value().and_then(|(name, value)| {
attr.as_simple_key_value().and_then(|(name, value)| {
let is_path = name == "path";
if is_path {
Some(value)

View file

@ -1,9 +1,8 @@
//! Trait solving using Chalk.
use std::sync::Arc;
use std::sync::{Arc, Mutex};
use chalk_ir::cast::Cast;
use log::debug;
use parking_lot::Mutex;
use ra_db::salsa;
use ra_prof::profile;
use rustc_hash::FxHashSet;
@ -38,7 +37,14 @@ impl TraitSolver {
) -> Option<chalk_solve::Solution> {
let context = ChalkContext { db, krate: self.krate };
debug!("solve goal: {:?}", goal);
let solution = self.inner.lock().solve(&context, goal);
let mut solver = match self.inner.lock() {
Ok(it) => it,
// Our cancellation works via unwinding, but, as chalk is not
// panic-safe, we need to make sure to propagate the cancellation.
// Ideally, we should also make chalk panic-safe.
Err(_) => ra_db::Canceled::throw(),
};
let solution = solver.solve(&context, goal);
debug!("solve({:?}) => {:?}", goal, solution);
solution
}

View file

@ -4,10 +4,10 @@ use std::cell::RefCell;
use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink};
use itertools::Itertools;
use ra_assists::ast_editor::AstEditor;
use ra_db::SourceDatabase;
use ra_prof::profile;
use ra_syntax::{
algo,
ast::{self, make, AstNode},
Location, SyntaxNode, TextRange, T,
};
@ -58,15 +58,15 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
})
})
.on::<hir::diagnostics::MissingFields, _>(|d| {
let node = d.ast(db);
let mut ast_editor = AstEditor::new(node);
let mut field_list = d.ast(db);
for f in d.missed_fields.iter() {
let field = make::record_field(make::name_ref(&f.to_string()), Some(make::expr_unit()));
ast_editor.append_field(&field);
field_list = field_list.append_field(&field);
}
let mut builder = TextEditBuilder::default();
ast_editor.into_text_edit(&mut builder);
algo::diff(&d.ast(db).syntax(), &field_list.syntax()).into_text_edit(&mut builder);
let fix =
SourceChange::source_file_edit_from("fill struct fields", file_id, builder.finish());
res.borrow_mut().push(Diagnostic {

View file

@ -79,7 +79,7 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
node_range: node.syntax().text_range(),
kind: node.syntax().kind(),
detail,
deprecated: node.attrs().filter_map(|x| x.as_named()).any(|x| x == "deprecated"),
deprecated: node.attrs().filter_map(|x| x.simple_name()).any(|x| x == "deprecated"),
})
}

View file

@ -19,7 +19,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.keyword\.unsafe { color: #DFAF8F; }
.keyword\.control { color: #F0DFAF; font-weight: bold; }
</style>
<pre><code><span class="attribute">#</span><span class="attribute">[</span><span class="attribute">derive</span><span class="attribute">(</span><span class="attribute">Clone</span><span class="attribute">,</span><span class="attribute"> </span><span class="attribute">Debug</span><span class="attribute">)</span><span class="attribute">]</span>
<pre><code><span class="attribute">#</span><span class="attribute">[</span><span class="attribute text">derive</span><span class="attribute">(</span><span class="attribute">Clone</span><span class="attribute">,</span><span class="attribute"> </span><span class="attribute">Debug</span><span class="attribute">)</span><span class="attribute">]</span>
<span class="keyword">struct</span> <span class="type">Foo</span> {
<span class="keyword">pub</span> <span class="field">x</span>: <span class="type">i32</span>,
<span class="keyword">pub</span> <span class="field">y</span>: <span class="type">i32</span>,

View file

@ -24,8 +24,23 @@ fn attribute(p: &mut Parser, inner: bool) {
p.bump(T![!]);
}
if p.at(T!['[']) {
items::token_tree(p);
if p.eat(T!['[']) {
paths::use_path(p);
match p.current() {
T![=] => {
p.bump(T![=]);
if expressions::literal(p).is_none() {
p.error("expected literal");
}
}
T!['('] | T!['['] | T!['{'] => items::token_tree(p),
_ => {}
}
if !p.eat(T![']']) {
p.error("expected `]`");
}
} else {
p.error("expected `[`");
}

View file

@ -5,6 +5,7 @@ pub mod visit;
use std::ops::RangeInclusive;
use itertools::Itertools;
use ra_text_edit::TextEditBuilder;
use rustc_hash::FxHashMap;
use crate::{
@ -65,6 +66,18 @@ pub enum InsertPosition<T> {
After(T),
}
pub struct TreeDiff {
replacements: FxHashMap<SyntaxElement, SyntaxElement>,
}
impl TreeDiff {
pub fn into_text_edit(&self, builder: &mut TextEditBuilder) {
for (from, to) in self.replacements.iter() {
builder.replace(from.text_range(), to.to_string())
}
}
}
/// Finds minimal the diff, which, applied to `from`, will result in `to`.
///
/// Specifically, returns a map whose keys are descendants of `from` and values
@ -72,12 +85,12 @@ pub enum InsertPosition<T> {
///
/// A trivial solution is a singletom map `{ from: to }`, but this function
/// tries to find a more fine-grained diff.
pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> FxHashMap<SyntaxElement, SyntaxElement> {
pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
let mut buf = FxHashMap::default();
// FIXME: this is both horrible inefficient and gives larger than
// necessary diff. I bet there's a cool algorithm to diff trees properly.
go(&mut buf, from.clone().into(), to.clone().into());
return buf;
return TreeDiff { replacements: buf };
fn go(
buf: &mut FxHashMap<SyntaxElement, SyntaxElement>,

View file

@ -5,7 +5,7 @@ mod traits;
mod tokens;
mod extensions;
mod expr_extensions;
mod edit;
pub mod edit;
pub mod make;
use std::marker::PhantomData;

View file

@ -1,14 +1,21 @@
//! This module contains functions for editing syntax trees. As the trees are
//! immutable, all function here return a fresh copy of the tree, instead of
//! doing an in-place modification.
use std::{iter, ops::RangeInclusive};
use arrayvec::ArrayVec;
use std::ops::RangeInclusive;
use rustc_hash::FxHashMap;
use crate::{
algo,
ast::{self, make, AstNode},
InsertPosition, SyntaxElement,
ast::{
self,
make::{self, tokens},
AstNode, TypeBoundsOwner,
},
AstToken, Direction, InsertPosition, SmolStr, SyntaxElement,
SyntaxKind::{ATTR, COMMENT, WHITESPACE},
SyntaxNode, T,
};
impl ast::FnDef {
@ -31,6 +38,218 @@ impl ast::FnDef {
}
}
impl ast::ItemList {
#[must_use]
pub fn append_items(&self, items: impl Iterator<Item = ast::ImplItem>) -> ast::ItemList {
let mut res = self.clone();
if !self.syntax().text().contains_char('\n') {
res = res.make_multiline();
}
items.for_each(|it| res = res.append_item(it));
res
}
#[must_use]
pub fn append_item(&self, item: ast::ImplItem) -> ast::ItemList {
let (indent, position) = match self.impl_items().last() {
Some(it) => (
leading_indent(it.syntax()).unwrap_or_default().to_string(),
InsertPosition::After(it.syntax().clone().into()),
),
None => match self.l_curly() {
Some(it) => (
" ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(),
InsertPosition::After(it),
),
None => return self.clone(),
},
};
let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
let to_insert: ArrayVec<[SyntaxElement; 2]> =
[ws.ws().into(), item.syntax().clone().into()].into();
insert_children(self, position, to_insert.into_iter())
}
fn l_curly(&self) -> Option<SyntaxElement> {
self.syntax().children_with_tokens().find(|it| it.kind() == T!['{'])
}
fn make_multiline(&self) -> ast::ItemList {
let l_curly = match self.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) {
Some(it) => it,
None => return self.clone(),
};
let sibling = match l_curly.next_sibling_or_token() {
Some(it) => it,
None => return self.clone(),
};
let existing_ws = match sibling.as_token() {
None => None,
Some(tok) if tok.kind() != WHITESPACE => None,
Some(ws) => {
if ws.text().contains('\n') {
return self.clone();
}
Some(ws.clone())
}
};
let indent = leading_indent(self.syntax()).unwrap_or("".into());
let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
let to_insert = iter::once(ws.ws().into());
match existing_ws {
None => insert_children(self, InsertPosition::After(l_curly), to_insert),
Some(ws) => {
replace_children(self, RangeInclusive::new(ws.clone().into(), ws.into()), to_insert)
}
}
}
}
impl ast::RecordFieldList {
#[must_use]
pub fn append_field(&self, field: &ast::RecordField) -> ast::RecordFieldList {
self.insert_field(InsertPosition::Last, field)
}
#[must_use]
pub fn insert_field(
&self,
position: InsertPosition<&'_ ast::RecordField>,
field: &ast::RecordField,
) -> ast::RecordFieldList {
let is_multiline = self.syntax().text().contains_char('\n');
let ws;
let space = if is_multiline {
ws = tokens::WsBuilder::new(&format!(
"\n{} ",
leading_indent(self.syntax()).unwrap_or("".into())
));
ws.ws()
} else {
tokens::single_space()
};
let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new();
to_insert.push(space.into());
to_insert.push(field.syntax().clone().into());
to_insert.push(tokens::comma().into());
macro_rules! after_l_curly {
() => {{
let anchor = match self.l_curly() {
Some(it) => it,
None => return self.clone(),
};
InsertPosition::After(anchor)
}};
}
macro_rules! after_field {
($anchor:expr) => {
if let Some(comma) = $anchor
.syntax()
.siblings_with_tokens(Direction::Next)
.find(|it| it.kind() == T![,])
{
InsertPosition::After(comma)
} else {
to_insert.insert(0, tokens::comma().into());
InsertPosition::After($anchor.syntax().clone().into())
}
};
};
let position = match position {
InsertPosition::First => after_l_curly!(),
InsertPosition::Last => {
if !is_multiline {
// don't insert comma before curly
to_insert.pop();
}
match self.fields().last() {
Some(it) => after_field!(it),
None => after_l_curly!(),
}
}
InsertPosition::Before(anchor) => {
InsertPosition::Before(anchor.syntax().clone().into())
}
InsertPosition::After(anchor) => after_field!(anchor),
};
insert_children(self, position, to_insert.iter().cloned())
}
fn l_curly(&self) -> Option<SyntaxElement> {
self.syntax().children_with_tokens().find(|it| it.kind() == T!['{'])
}
}
impl ast::TypeParam {
#[must_use]
pub fn remove_bounds(&self) -> ast::TypeParam {
let colon = match self.colon_token() {
Some(it) => it,
None => return self.clone(),
};
let end = match self.type_bound_list() {
Some(it) => it.syntax().clone().into(),
None => colon.clone().into(),
};
replace_children(self, RangeInclusive::new(colon.into(), end), iter::empty())
}
}
#[must_use]
pub fn strip_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N {
N::cast(strip_attrs_and_docs_inner(node.syntax().clone())).unwrap()
}
fn strip_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode {
while let Some(start) =
node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT)
{
let end = match &start.next_sibling_or_token() {
Some(el) if el.kind() == WHITESPACE => el.clone(),
Some(_) | None => start.clone(),
};
node = algo::replace_children(&node, RangeInclusive::new(start, end), &mut iter::empty());
}
node
}
#[must_use]
pub fn replace_descendants<N: AstNode, D: AstNode>(
parent: &N,
replacement_map: impl Iterator<Item = (D, D)>,
) -> N {
let map = replacement_map
.map(|(from, to)| (from.syntax().clone().into(), to.syntax().clone().into()))
.collect::<FxHashMap<_, _>>();
let new_syntax = algo::replace_descendants(parent.syntax(), &map);
N::cast(new_syntax).unwrap()
}
// Note this is copy-pasted from fmt. It seems like fmt should be a separate
// crate, but basic tree building should be this crate. However, tree building
// might want to call into fmt...
fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> {
let prev_tokens = std::iter::successors(node.first_token(), |token| token.prev_token());
for token in prev_tokens {
if let Some(ws) = ast::Whitespace::cast(token.clone()) {
let ws_text = ws.text();
if let Some(pos) = ws_text.rfind('\n') {
return Some(ws_text[pos + 1..].into());
}
}
if token.text().contains('\n') {
break;
}
}
None
}
#[must_use]
fn insert_children<N: AstNode>(
parent: &N,

View file

@ -1,10 +1,8 @@
//! Various extension methods to ast Nodes, which are hard to code-generate.
//! Extensions for various expressions live in a sibling `expr_extensions` module.
use itertools::Itertools;
use crate::{
ast::{self, child_opt, children, AstNode, SyntaxNode},
ast::{self, child_opt, children, AstChildren, AstNode, AttrInput, SyntaxNode},
SmolStr, SyntaxElement,
SyntaxKind::*,
SyntaxToken, T,
@ -38,62 +36,37 @@ fn text_of_first_token(node: &SyntaxNode) -> &SmolStr {
}
impl ast::Attr {
pub fn is_inner(&self) -> bool {
let tt = match self.value() {
None => return false,
Some(tt) => tt,
};
let prev = match tt.syntax().prev_sibling() {
None => return false,
Some(prev) => prev,
};
prev.kind() == T![!]
}
pub fn as_atom(&self) -> Option<SmolStr> {
let tt = self.value()?;
let (_bra, attr, _ket) = tt.syntax().children_with_tokens().collect_tuple()?;
if attr.kind() == IDENT {
Some(attr.as_token()?.text().clone())
} else {
None
pub fn as_simple_atom(&self) -> Option<SmolStr> {
match self.input() {
None => self.simple_name(),
Some(_) => None,
}
}
pub fn as_call(&self) -> Option<(SmolStr, ast::TokenTree)> {
let tt = self.value()?;
let (_bra, attr, args, _ket) = tt.syntax().children_with_tokens().collect_tuple()?;
let args = ast::TokenTree::cast(args.as_node()?.clone())?;
if attr.kind() == IDENT {
Some((attr.as_token()?.text().clone(), args))
} else {
None
pub fn as_simple_call(&self) -> Option<(SmolStr, ast::TokenTree)> {
match self.input() {
Some(AttrInput::TokenTree(tt)) => Some((self.simple_name()?, tt)),
_ => None,
}
}
pub fn as_named(&self) -> Option<SmolStr> {
let tt = self.value()?;
let attr = tt.syntax().children_with_tokens().nth(1)?;
if attr.kind() == IDENT {
Some(attr.as_token()?.text().clone())
} else {
None
pub fn as_simple_key_value(&self) -> Option<(SmolStr, SmolStr)> {
match self.input() {
Some(AttrInput::Literal(lit)) => {
let key = self.simple_name()?;
// FIXME: escape? raw string?
let value = lit.syntax().first_token()?.text().trim_matches('"').into();
Some((key, value))
}
_ => None,
}
}
pub fn as_key_value(&self) -> Option<(SmolStr, SmolStr)> {
let tt = self.value()?;
let tt_node = tt.syntax();
let attr = tt_node.children_with_tokens().nth(1)?;
if attr.kind() == IDENT {
let key = attr.as_token()?.text().clone();
let val_node = tt_node.children_with_tokens().find(|t| t.kind() == STRING)?;
let val = val_node.as_token()?.text().trim_start_matches('"').trim_end_matches('"');
Some((key, SmolStr::new(val)))
} else {
None
pub fn simple_name(&self) -> Option<SmolStr> {
let path = self.path()?;
match (path.segment(), path.qualifier()) {
(Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()),
_ => None,
}
}
}
@ -203,6 +176,16 @@ impl ast::ImplBlock {
}
}
impl ast::AttrsOwner for ast::ImplItem {
fn attrs(&self) -> AstChildren<ast::Attr> {
match self {
ast::ImplItem::FnDef(it) => it.attrs(),
ast::ImplItem::TypeAliasDef(it) => it.attrs(),
ast::ImplItem::ConstDef(it) => it.attrs(),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum StructKind {
Tuple(ast::TupleFieldDefList),

View file

@ -166,11 +166,52 @@ impl AstNode for Attr {
}
}
impl Attr {
pub fn value(&self) -> Option<TokenTree> {
pub fn path(&self) -> Option<Path> {
AstChildren::new(&self.syntax).next()
}
pub fn input(&self) -> Option<AttrInput> {
AstChildren::new(&self.syntax).next()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AttrInput {
Literal(Literal),
TokenTree(TokenTree),
}
impl From<Literal> for AttrInput {
fn from(node: Literal) -> AttrInput {
AttrInput::Literal(node)
}
}
impl From<TokenTree> for AttrInput {
fn from(node: TokenTree) -> AttrInput {
AttrInput::TokenTree(node)
}
}
impl AstNode for AttrInput {
fn can_cast(kind: SyntaxKind) -> bool {
match kind {
LITERAL | TOKEN_TREE => true,
_ => false,
}
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
let res = match syntax.kind() {
LITERAL => AttrInput::Literal(Literal { syntax }),
TOKEN_TREE => AttrInput::TokenTree(TokenTree { syntax }),
_ => return None,
};
Some(res)
}
fn syntax(&self) -> &SyntaxNode {
match self {
AttrInput::Literal(it) => &it.syntax,
AttrInput::TokenTree(it) => &it.syntax,
}
}
}
impl AttrInput {}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct AwaitExpr {
pub(crate) syntax: SyntaxNode,
}

View file

@ -99,7 +99,7 @@ pub trait AttrsOwner: AstNode {
children(self)
}
fn has_atom_attr(&self, atom: &str) -> bool {
self.attrs().filter_map(|x| x.as_atom()).any(|x| x == atom)
self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
}
}

View file

@ -576,7 +576,8 @@ Grammar(
traits: [ "NameOwner", "AttrsOwner","DocCommentsOwner" ],
options: [ "TokenTree", "Path" ],
),
"Attr": ( options: [ ["value", "TokenTree"] ] ),
"AttrInput": ( enum: [ "Literal", "TokenTree" ] ),
"Attr": ( options: [ "Path", [ "input", "AttrInput" ] ] ),
"TokenTree": (),
"TypeParamList": (
collections: [

View file

@ -2,20 +2,22 @@ SOURCE_FILE@[0; 54)
FN_DEF@[0; 31)
ATTR@[0; 18)
POUND@[0; 1) "#"
TOKEN_TREE@[1; 18)
L_BRACK@[1; 2) "["
IDENT@[2; 5) "foo"
TOKEN_TREE@[5; 17)
L_PAREN@[5; 6) "("
IDENT@[6; 9) "foo"
COMMA@[9; 10) ","
WHITESPACE@[10; 11) " "
PLUS@[11; 12) "+"
COMMA@[12; 13) ","
WHITESPACE@[13; 14) " "
INT_NUMBER@[14; 16) "92"
R_PAREN@[16; 17) ")"
R_BRACK@[17; 18) "]"
L_BRACK@[1; 2) "["
PATH@[2; 5)
PATH_SEGMENT@[2; 5)
NAME_REF@[2; 5)
IDENT@[2; 5) "foo"
TOKEN_TREE@[5; 17)
L_PAREN@[5; 6) "("
IDENT@[6; 9) "foo"
COMMA@[9; 10) ","
WHITESPACE@[10; 11) " "
PLUS@[11; 12) "+"
COMMA@[12; 13) ","
WHITESPACE@[13; 14) " "
INT_NUMBER@[14; 16) "92"
R_PAREN@[16; 17) ")"
R_BRACK@[17; 18) "]"
WHITESPACE@[18; 19) "\n"
FN_KW@[19; 21) "fn"
WHITESPACE@[21; 22) " "
@ -33,24 +35,26 @@ SOURCE_FILE@[0; 54)
WHITESPACE@[31; 34) "\n\n\n"
ATTR@[34; 53)
POUND@[34; 35) "#"
TOKEN_TREE@[35; 53)
L_BRACK@[35; 36) "["
IDENT@[36; 39) "foo"
TOKEN_TREE@[39; 53)
L_PAREN@[39; 40) "("
WHITESPACE@[40; 41) "\n"
FN_KW@[41; 43) "fn"
WHITESPACE@[43; 44) " "
IDENT@[44; 47) "foo"
TOKEN_TREE@[47; 49)
L_PAREN@[47; 48) "("
R_PAREN@[48; 49) ")"
WHITESPACE@[49; 50) " "
TOKEN_TREE@[50; 53)
L_CURLY@[50; 51) "{"
WHITESPACE@[51; 52) "\n"
R_CURLY@[52; 53) "}"
L_BRACK@[35; 36) "["
PATH@[36; 39)
PATH_SEGMENT@[36; 39)
NAME_REF@[36; 39)
IDENT@[36; 39) "foo"
TOKEN_TREE@[39; 53)
L_PAREN@[39; 40) "("
WHITESPACE@[40; 41) "\n"
FN_KW@[41; 43) "fn"
WHITESPACE@[43; 44) " "
IDENT@[44; 47) "foo"
TOKEN_TREE@[47; 49)
L_PAREN@[47; 48) "("
R_PAREN@[48; 49) ")"
WHITESPACE@[49; 50) " "
TOKEN_TREE@[50; 53)
L_CURLY@[50; 51) "{"
WHITESPACE@[51; 52) "\n"
R_CURLY@[52; 53) "}"
WHITESPACE@[53; 54) "\n"
error 53: expected R_PAREN
error 53: expected R_BRACK
error 53: expected `]`
error 53: expected an item

View file

@ -60,10 +60,12 @@ SOURCE_FILE@[0; 349)
FN_DEF@[95; 348)
ATTR@[95; 102)
POUND@[95; 96) "#"
TOKEN_TREE@[96; 102)
L_BRACK@[96; 97) "["
IDENT@[97; 101) "test"
R_BRACK@[101; 102) "]"
L_BRACK@[96; 97) "["
PATH@[97; 101)
PATH_SEGMENT@[97; 101)
NAME_REF@[97; 101)
IDENT@[97; 101) "test"
R_BRACK@[101; 102) "]"
WHITESPACE@[102; 103) "\n"
FN_KW@[103; 105) "fn"
WHITESPACE@[105; 106) " "

View file

@ -28,14 +28,16 @@ SOURCE_FILE@[0; 350)
ATTR@[39; 83)
POUND@[39; 40) "#"
EXCL@[40; 41) "!"
TOKEN_TREE@[41; 83)
L_BRACK@[41; 42) "["
IDENT@[42; 45) "doc"
TOKEN_TREE@[45; 82)
L_PAREN@[45; 46) "("
STRING@[46; 81) "\"Inner attributes not ..."
R_PAREN@[81; 82) ")"
R_BRACK@[82; 83) "]"
L_BRACK@[41; 42) "["
PATH@[42; 45)
PATH_SEGMENT@[42; 45)
NAME_REF@[42; 45)
IDENT@[42; 45) "doc"
TOKEN_TREE@[45; 82)
L_PAREN@[45; 46) "("
STRING@[46; 81) "\"Inner attributes not ..."
R_PAREN@[81; 82) ")"
R_BRACK@[82; 83) "]"
WHITESPACE@[83; 92) "\n "
COMMENT@[92; 122) "//! Nor are ModuleDoc ..."
WHITESPACE@[122; 127) "\n "
@ -57,26 +59,30 @@ SOURCE_FILE@[0; 350)
ATTR@[152; 171)
POUND@[152; 153) "#"
EXCL@[153; 154) "!"
TOKEN_TREE@[154; 171)
L_BRACK@[154; 155) "["
IDENT@[155; 158) "doc"
TOKEN_TREE@[158; 170)
L_PAREN@[158; 159) "("
STRING@[159; 169) "\"Nor here\""
R_PAREN@[169; 170) ")"
R_BRACK@[170; 171) "]"
L_BRACK@[154; 155) "["
PATH@[155; 158)
PATH_SEGMENT@[155; 158)
NAME_REF@[155; 158)
IDENT@[155; 158) "doc"
TOKEN_TREE@[158; 170)
L_PAREN@[158; 159) "("
STRING@[159; 169) "\"Nor here\""
R_PAREN@[169; 170) ")"
R_BRACK@[170; 171) "]"
WHITESPACE@[171; 180) "\n "
ATTR@[180; 212)
POUND@[180; 181) "#"
EXCL@[181; 182) "!"
TOKEN_TREE@[182; 212)
L_BRACK@[182; 183) "["
IDENT@[183; 186) "doc"
TOKEN_TREE@[186; 211)
L_PAREN@[186; 187) "("
STRING@[187; 210) "\"We error on each attr\""
R_PAREN@[210; 211) ")"
R_BRACK@[211; 212) "]"
L_BRACK@[182; 183) "["
PATH@[183; 186)
PATH_SEGMENT@[183; 186)
NAME_REF@[183; 186)
IDENT@[183; 186) "doc"
TOKEN_TREE@[186; 211)
L_PAREN@[186; 187) "("
STRING@[187; 210) "\"We error on each attr\""
R_PAREN@[210; 211) ")"
R_BRACK@[211; 212) "]"
WHITESPACE@[212; 221) "\n "
COMMENT@[221; 251) "//! Nor are ModuleDoc ..."
WHITESPACE@[251; 256) "\n "
@ -96,14 +102,16 @@ SOURCE_FILE@[0; 350)
ATTR@[283; 302)
POUND@[283; 284) "#"
EXCL@[284; 285) "!"
TOKEN_TREE@[285; 302)
L_BRACK@[285; 286) "["
IDENT@[286; 289) "doc"
TOKEN_TREE@[289; 301)
L_PAREN@[289; 290) "("
STRING@[290; 300) "\"Nor here\""
R_PAREN@[300; 301) ")"
R_BRACK@[301; 302) "]"
L_BRACK@[285; 286) "["
PATH@[286; 289)
PATH_SEGMENT@[286; 289)
NAME_REF@[286; 289)
IDENT@[286; 289) "doc"
TOKEN_TREE@[289; 301)
L_PAREN@[289; 290) "("
STRING@[290; 300) "\"Nor here\""
R_PAREN@[300; 301) ")"
R_BRACK@[301; 302) "]"
WHITESPACE@[302; 311) "\n "
COMMENT@[311; 341) "//! Nor are ModuleDoc ..."
WHITESPACE@[341; 346) "\n "

View file

@ -135,14 +135,16 @@ SOURCE_FILE@[0; 293)
MATCH_ARM@[210; 250)
ATTR@[210; 222)
POUND@[210; 211) "#"
TOKEN_TREE@[211; 222)
L_BRACK@[211; 212) "["
IDENT@[212; 215) "cfg"
TOKEN_TREE@[215; 221)
L_PAREN@[215; 216) "("
IDENT@[216; 220) "test"
R_PAREN@[220; 221) ")"
R_BRACK@[221; 222) "]"
L_BRACK@[211; 212) "["
PATH@[212; 215)
PATH_SEGMENT@[212; 215)
NAME_REF@[212; 215)
IDENT@[212; 215) "cfg"
TOKEN_TREE@[215; 221)
L_PAREN@[215; 216) "("
IDENT@[216; 220) "test"
R_PAREN@[220; 221) ")"
R_BRACK@[221; 222) "]"
WHITESPACE@[222; 231) "\n "
ATTR@[231; 232)
POUND@[231; 232) "#"

View file

@ -47,14 +47,16 @@ SOURCE_FILE@[0; 89)
MATCH_ARM@[68; 80)
ATTR@[68; 80)
POUND@[68; 69) "#"
TOKEN_TREE@[69; 80)
L_BRACK@[69; 70) "["
IDENT@[70; 73) "cfg"
TOKEN_TREE@[73; 79)
L_PAREN@[73; 74) "("
IDENT@[74; 78) "test"
R_PAREN@[78; 79) ")"
R_BRACK@[79; 80) "]"
L_BRACK@[69; 70) "["
PATH@[70; 73)
PATH_SEGMENT@[70; 73)
NAME_REF@[70; 73)
IDENT@[70; 73) "cfg"
TOKEN_TREE@[73; 79)
L_PAREN@[73; 74) "("
IDENT@[74; 78) "test"
R_PAREN@[78; 79) ")"
R_BRACK@[79; 80) "]"
WHITESPACE@[80; 85) "\n "
R_CURLY@[85; 86) "}"
WHITESPACE@[86; 87) "\n"

View file

@ -15,10 +15,12 @@ SOURCE_FILE@[0; 48)
EXPR_STMT@[14; 25)
ATTR@[14; 18)
POUND@[14; 15) "#"
TOKEN_TREE@[15; 18)
L_BRACK@[15; 16) "["
IDENT@[16; 17) "A"
R_BRACK@[17; 18) "]"
L_BRACK@[15; 16) "["
PATH@[16; 17)
PATH_SEGMENT@[16; 17)
NAME_REF@[16; 17)
IDENT@[16; 17) "A"
R_BRACK@[17; 18) "]"
WHITESPACE@[18; 19) " "
BIN_EXPR@[19; 24)
LITERAL@[19; 20)
@ -33,10 +35,12 @@ SOURCE_FILE@[0; 48)
EXPR_STMT@[29; 45)
ATTR@[29; 33)
POUND@[29; 30) "#"
TOKEN_TREE@[30; 33)
L_BRACK@[30; 31) "["
IDENT@[31; 32) "B"
R_BRACK@[32; 33) "]"
L_BRACK@[30; 31) "["
PATH@[31; 32)
PATH_SEGMENT@[31; 32)
NAME_REF@[31; 32)
IDENT@[31; 32) "B"
R_BRACK@[32; 33) "]"
WHITESPACE@[33; 34) " "
IF_EXPR@[34; 44)
IF_KW@[34; 36) "if"

View file

@ -11,18 +11,20 @@ SOURCE_FILE@[0; 64)
RECORD_FIELD_DEF@[15; 60)
ATTR@[15; 43)
POUND@[15; 16) "#"
TOKEN_TREE@[16; 43)
L_BRACK@[16; 17) "["
IDENT@[17; 22) "serde"
TOKEN_TREE@[22; 42)
L_PAREN@[22; 23) "("
IDENT@[23; 27) "with"
WHITESPACE@[27; 28) " "
EQ@[28; 29) "="
WHITESPACE@[29; 30) " "
STRING@[30; 41) "\"url_serde\""
R_PAREN@[41; 42) ")"
R_BRACK@[42; 43) "]"
L_BRACK@[16; 17) "["
PATH@[17; 22)
PATH_SEGMENT@[17; 22)
NAME_REF@[17; 22)
IDENT@[17; 22) "serde"
TOKEN_TREE@[22; 42)
L_PAREN@[22; 23) "("
IDENT@[23; 27) "with"
WHITESPACE@[27; 28) " "
EQ@[28; 29) "="
WHITESPACE@[29; 30) " "
STRING@[30; 41) "\"url_serde\""
R_PAREN@[41; 42) ")"
R_BRACK@[42; 43) "]"
WHITESPACE@[43; 48) "\n "
VISIBILITY@[48; 51)
PUB_KW@[48; 51) "pub"

View file

@ -11,18 +11,20 @@ SOURCE_FILE@[0; 60)
TUPLE_FIELD_DEF@[15; 55)
ATTR@[15; 43)
POUND@[15; 16) "#"
TOKEN_TREE@[16; 43)
L_BRACK@[16; 17) "["
IDENT@[17; 22) "serde"
TOKEN_TREE@[22; 42)
L_PAREN@[22; 23) "("
IDENT@[23; 27) "with"
WHITESPACE@[27; 28) " "
EQ@[28; 29) "="
WHITESPACE@[29; 30) " "
STRING@[30; 41) "\"url_serde\""
R_PAREN@[41; 42) ")"
R_BRACK@[42; 43) "]"
L_BRACK@[16; 17) "["
PATH@[17; 22)
PATH_SEGMENT@[17; 22)
NAME_REF@[17; 22)
IDENT@[17; 22) "serde"
TOKEN_TREE@[22; 42)
L_PAREN@[22; 23) "("
IDENT@[23; 27) "with"
WHITESPACE@[27; 28) " "
EQ@[28; 29) "="
WHITESPACE@[29; 30) " "
STRING@[30; 41) "\"url_serde\""
R_PAREN@[41; 42) ")"
R_BRACK@[42; 43) "]"
WHITESPACE@[43; 48) "\n "
VISIBILITY@[48; 51)
PUB_KW@[48; 51) "pub"

View file

@ -25,14 +25,16 @@ SOURCE_FILE@[0; 94)
ATTR@[54; 91)
POUND@[54; 55) "#"
EXCL@[55; 56) "!"
TOKEN_TREE@[56; 91)
L_BRACK@[56; 57) "["
IDENT@[57; 60) "doc"
TOKEN_TREE@[60; 90)
L_PAREN@[60; 61) "("
STRING@[61; 89) "\"This is also a doc c ..."
R_PAREN@[89; 90) ")"
R_BRACK@[90; 91) "]"
L_BRACK@[56; 57) "["
PATH@[57; 60)
PATH_SEGMENT@[57; 60)
NAME_REF@[57; 60)
IDENT@[57; 60) "doc"
TOKEN_TREE@[60; 90)
L_PAREN@[60; 61) "("
STRING@[61; 89) "\"This is also a doc c ..."
R_PAREN@[89; 90) ")"
R_BRACK@[90; 91) "]"
WHITESPACE@[91; 92) "\n"
R_CURLY@[92; 93) "}"
WHITESPACE@[93; 94) "\n"

View file

@ -25,38 +25,44 @@ SOURCE_FILE@[0; 139)
ATTR@[34; 60)
POUND@[34; 35) "#"
EXCL@[35; 36) "!"
TOKEN_TREE@[36; 60)
L_BRACK@[36; 37) "["
IDENT@[37; 40) "doc"
TOKEN_TREE@[40; 59)
L_PAREN@[40; 41) "("
STRING@[41; 58) "\"Inner attribute\""
R_PAREN@[58; 59) ")"
R_BRACK@[59; 60) "]"
L_BRACK@[36; 37) "["
PATH@[37; 40)
PATH_SEGMENT@[37; 40)
NAME_REF@[37; 40)
IDENT@[37; 40) "doc"
TOKEN_TREE@[40; 59)
L_PAREN@[40; 41) "("
STRING@[41; 58) "\"Inner attribute\""
R_PAREN@[58; 59) ")"
R_BRACK@[59; 60) "]"
WHITESPACE@[60; 69) "\n "
ATTR@[69; 86)
POUND@[69; 70) "#"
EXCL@[70; 71) "!"
TOKEN_TREE@[71; 86)
L_BRACK@[71; 72) "["
IDENT@[72; 75) "doc"
TOKEN_TREE@[75; 85)
L_PAREN@[75; 76) "("
STRING@[76; 84) "\"Can be\""
R_PAREN@[84; 85) ")"
R_BRACK@[85; 86) "]"
L_BRACK@[71; 72) "["
PATH@[72; 75)
PATH_SEGMENT@[72; 75)
NAME_REF@[72; 75)
IDENT@[72; 75) "doc"
TOKEN_TREE@[75; 85)
L_PAREN@[75; 76) "("
STRING@[76; 84) "\"Can be\""
R_PAREN@[84; 85) ")"
R_BRACK@[85; 86) "]"
WHITESPACE@[86; 95) "\n "
ATTR@[95; 113)
POUND@[95; 96) "#"
EXCL@[96; 97) "!"
TOKEN_TREE@[97; 113)
L_BRACK@[97; 98) "["
IDENT@[98; 101) "doc"
TOKEN_TREE@[101; 112)
L_PAREN@[101; 102) "("
STRING@[102; 111) "\"Stacked\""
R_PAREN@[111; 112) ")"
R_BRACK@[112; 113) "]"
L_BRACK@[97; 98) "["
PATH@[98; 101)
PATH_SEGMENT@[98; 101)
NAME_REF@[98; 101)
IDENT@[98; 101) "doc"
TOKEN_TREE@[101; 112)
L_PAREN@[101; 102) "("
STRING@[102; 111) "\"Stacked\""
R_PAREN@[111; 112) ")"
R_BRACK@[112; 113) "]"
WHITESPACE@[113; 122) "\n "
MATCH_ARM@[122; 129)
PLACEHOLDER_PAT@[122; 123)

View file

@ -25,18 +25,20 @@ SOURCE_FILE@[0; 259)
MATCH_ARM@[34; 74)
ATTR@[34; 58)
POUND@[34; 35) "#"
TOKEN_TREE@[35; 58)
L_BRACK@[35; 36) "["
IDENT@[36; 39) "cfg"
TOKEN_TREE@[39; 57)
L_PAREN@[39; 40) "("
IDENT@[40; 47) "feature"
WHITESPACE@[47; 48) " "
EQ@[48; 49) "="
WHITESPACE@[49; 50) " "
STRING@[50; 56) "\"some\""
R_PAREN@[56; 57) ")"
R_BRACK@[57; 58) "]"
L_BRACK@[35; 36) "["
PATH@[36; 39)
PATH_SEGMENT@[36; 39)
NAME_REF@[36; 39)
IDENT@[36; 39) "cfg"
TOKEN_TREE@[39; 57)
L_PAREN@[39; 40) "("
IDENT@[40; 47) "feature"
WHITESPACE@[47; 48) " "
EQ@[48; 49) "="
WHITESPACE@[49; 50) " "
STRING@[50; 56) "\"some\""
R_PAREN@[56; 57) ")"
R_BRACK@[57; 58) "]"
WHITESPACE@[58; 67) "\n "
PLACEHOLDER_PAT@[67; 68)
UNDERSCORE@[67; 68) "_"
@ -51,18 +53,20 @@ SOURCE_FILE@[0; 259)
MATCH_ARM@[84; 125)
ATTR@[84; 109)
POUND@[84; 85) "#"
TOKEN_TREE@[85; 109)
L_BRACK@[85; 86) "["
IDENT@[86; 89) "cfg"
TOKEN_TREE@[89; 108)
L_PAREN@[89; 90) "("
IDENT@[90; 97) "feature"
WHITESPACE@[97; 98) " "
EQ@[98; 99) "="
WHITESPACE@[99; 100) " "
STRING@[100; 107) "\"other\""
R_PAREN@[107; 108) ")"
R_BRACK@[108; 109) "]"
L_BRACK@[85; 86) "["
PATH@[86; 89)
PATH_SEGMENT@[86; 89)
NAME_REF@[86; 89)
IDENT@[86; 89) "cfg"
TOKEN_TREE@[89; 108)
L_PAREN@[89; 90) "("
IDENT@[90; 97) "feature"
WHITESPACE@[97; 98) " "
EQ@[98; 99) "="
WHITESPACE@[99; 100) " "
STRING@[100; 107) "\"other\""
R_PAREN@[107; 108) ")"
R_BRACK@[108; 109) "]"
WHITESPACE@[109; 118) "\n "
PLACEHOLDER_PAT@[118; 119)
UNDERSCORE@[118; 119) "_"
@ -77,48 +81,54 @@ SOURCE_FILE@[0; 259)
MATCH_ARM@[135; 249)
ATTR@[135; 159)
POUND@[135; 136) "#"
TOKEN_TREE@[136; 159)
L_BRACK@[136; 137) "["
IDENT@[137; 140) "cfg"
TOKEN_TREE@[140; 158)
L_PAREN@[140; 141) "("
IDENT@[141; 148) "feature"
WHITESPACE@[148; 149) " "
EQ@[149; 150) "="
WHITESPACE@[150; 151) " "
STRING@[151; 157) "\"many\""
R_PAREN@[157; 158) ")"
R_BRACK@[158; 159) "]"
L_BRACK@[136; 137) "["
PATH@[137; 140)
PATH_SEGMENT@[137; 140)
NAME_REF@[137; 140)
IDENT@[137; 140) "cfg"
TOKEN_TREE@[140; 158)
L_PAREN@[140; 141) "("
IDENT@[141; 148) "feature"
WHITESPACE@[148; 149) " "
EQ@[149; 150) "="
WHITESPACE@[150; 151) " "
STRING@[151; 157) "\"many\""
R_PAREN@[157; 158) ")"
R_BRACK@[158; 159) "]"
WHITESPACE@[159; 168) "\n "
ATTR@[168; 198)
POUND@[168; 169) "#"
TOKEN_TREE@[169; 198)
L_BRACK@[169; 170) "["
IDENT@[170; 173) "cfg"
TOKEN_TREE@[173; 197)
L_PAREN@[173; 174) "("
IDENT@[174; 181) "feature"
WHITESPACE@[181; 182) " "
EQ@[182; 183) "="
WHITESPACE@[183; 184) " "
STRING@[184; 196) "\"attributes\""
R_PAREN@[196; 197) ")"
R_BRACK@[197; 198) "]"
L_BRACK@[169; 170) "["
PATH@[170; 173)
PATH_SEGMENT@[170; 173)
NAME_REF@[170; 173)
IDENT@[170; 173) "cfg"
TOKEN_TREE@[173; 197)
L_PAREN@[173; 174) "("
IDENT@[174; 181) "feature"
WHITESPACE@[181; 182) " "
EQ@[182; 183) "="
WHITESPACE@[183; 184) " "
STRING@[184; 196) "\"attributes\""
R_PAREN@[196; 197) ")"
R_BRACK@[197; 198) "]"
WHITESPACE@[198; 207) "\n "
ATTR@[207; 233)
POUND@[207; 208) "#"
TOKEN_TREE@[208; 233)
L_BRACK@[208; 209) "["
IDENT@[209; 212) "cfg"
TOKEN_TREE@[212; 232)
L_PAREN@[212; 213) "("
IDENT@[213; 220) "feature"
WHITESPACE@[220; 221) " "
EQ@[221; 222) "="
WHITESPACE@[222; 223) " "
STRING@[223; 231) "\"before\""
R_PAREN@[231; 232) ")"
R_BRACK@[232; 233) "]"
L_BRACK@[208; 209) "["
PATH@[209; 212)
PATH_SEGMENT@[209; 212)
NAME_REF@[209; 212)
IDENT@[209; 212) "cfg"
TOKEN_TREE@[212; 232)
L_PAREN@[212; 213) "("
IDENT@[213; 220) "feature"
WHITESPACE@[220; 221) " "
EQ@[221; 222) "="
WHITESPACE@[222; 223) " "
STRING@[223; 231) "\"before\""
R_PAREN@[231; 232) ")"
R_BRACK@[232; 233) "]"
WHITESPACE@[233; 242) "\n "
PLACEHOLDER_PAT@[242; 243)
UNDERSCORE@[242; 243) "_"

View file

@ -9,14 +9,16 @@ SOURCE_FILE@[0; 64)
LIFETIME_PARAM@[7; 29)
ATTR@[7; 26)
POUND@[7; 8) "#"
TOKEN_TREE@[8; 26)
L_BRACK@[8; 9) "["
IDENT@[9; 15) "derive"
TOKEN_TREE@[15; 25)
L_PAREN@[15; 16) "("
IDENT@[16; 24) "Lifetime"
R_PAREN@[24; 25) ")"
R_BRACK@[25; 26) "]"
L_BRACK@[8; 9) "["
PATH@[9; 15)
PATH_SEGMENT@[9; 15)
NAME_REF@[9; 15)
IDENT@[9; 15) "derive"
TOKEN_TREE@[15; 25)
L_PAREN@[15; 16) "("
IDENT@[16; 24) "Lifetime"
R_PAREN@[24; 25) ")"
R_BRACK@[25; 26) "]"
WHITESPACE@[26; 27) " "
LIFETIME@[27; 29) "\'a"
COMMA@[29; 30) ","
@ -24,14 +26,16 @@ SOURCE_FILE@[0; 64)
TYPE_PARAM@[31; 48)
ATTR@[31; 46)
POUND@[31; 32) "#"
TOKEN_TREE@[32; 46)
L_BRACK@[32; 33) "["
IDENT@[33; 39) "derive"
TOKEN_TREE@[39; 45)
L_PAREN@[39; 40) "("
IDENT@[40; 44) "Type"
R_PAREN@[44; 45) ")"
R_BRACK@[45; 46) "]"
L_BRACK@[32; 33) "["
PATH@[33; 39)
PATH_SEGMENT@[33; 39)
NAME_REF@[33; 39)
IDENT@[33; 39) "derive"
TOKEN_TREE@[39; 45)
L_PAREN@[39; 40) "("
IDENT@[40; 44) "Type"
R_PAREN@[44; 45) ")"
R_BRACK@[45; 46) "]"
WHITESPACE@[46; 47) " "
NAME@[47; 48)
IDENT@[47; 48) "T"

View file

@ -24,14 +24,16 @@ SOURCE_FILE@[0; 46)
RECORD_FIELD@[20; 41)
ATTR@[20; 32)
POUND@[20; 21) "#"
TOKEN_TREE@[21; 32)
L_BRACK@[21; 22) "["
IDENT@[22; 25) "cfg"
TOKEN_TREE@[25; 31)
L_PAREN@[25; 26) "("
IDENT@[26; 30) "test"
R_PAREN@[30; 31) ")"
R_BRACK@[31; 32) "]"
L_BRACK@[21; 22) "["
PATH@[22; 25)
PATH_SEGMENT@[22; 25)
NAME_REF@[22; 25)
IDENT@[22; 25) "cfg"
TOKEN_TREE@[25; 31)
L_PAREN@[25; 26) "("
IDENT@[26; 30) "test"
R_PAREN@[30; 31) ")"
R_BRACK@[31; 32) "]"
WHITESPACE@[32; 33) " "
NAME_REF@[33; 38)
IDENT@[33; 38) "field"

View file

@ -15,10 +15,12 @@ SOURCE_FILE@[0; 82)
EXPR_STMT@[15; 26)
ATTR@[15; 19)
POUND@[15; 16) "#"
TOKEN_TREE@[16; 19)
L_BRACK@[16; 17) "["
IDENT@[17; 18) "A"
R_BRACK@[18; 19) "]"
L_BRACK@[16; 17) "["
PATH@[17; 18)
PATH_SEGMENT@[17; 18)
NAME_REF@[17; 18)
IDENT@[17; 18) "A"
R_BRACK@[18; 19) "]"
WHITESPACE@[19; 20) " "
CALL_EXPR@[20; 25)
PATH_EXPR@[20; 23)
@ -34,10 +36,12 @@ SOURCE_FILE@[0; 82)
EXPR_STMT@[31; 42)
ATTR@[31; 35)
POUND@[31; 32) "#"
TOKEN_TREE@[32; 35)
L_BRACK@[32; 33) "["
IDENT@[33; 34) "B"
R_BRACK@[34; 35) "]"
L_BRACK@[32; 33) "["
PATH@[33; 34)
PATH_SEGMENT@[33; 34)
NAME_REF@[33; 34)
IDENT@[33; 34) "B"
R_BRACK@[34; 35) "]"
WHITESPACE@[35; 36) " "
MACRO_CALL@[36; 42)
PATH@[36; 39)
@ -52,17 +56,21 @@ SOURCE_FILE@[0; 82)
EXPR_STMT@[47; 59)
ATTR@[47; 51)
POUND@[47; 48) "#"
TOKEN_TREE@[48; 51)
L_BRACK@[48; 49) "["
IDENT@[49; 50) "C"
R_BRACK@[50; 51) "]"
L_BRACK@[48; 49) "["
PATH@[49; 50)
PATH_SEGMENT@[49; 50)
NAME_REF@[49; 50)
IDENT@[49; 50) "C"
R_BRACK@[50; 51) "]"
WHITESPACE@[51; 52) " "
ATTR@[52; 56)
POUND@[52; 53) "#"
TOKEN_TREE@[53; 56)
L_BRACK@[53; 54) "["
IDENT@[54; 55) "D"
R_BRACK@[55; 56) "]"
L_BRACK@[53; 54) "["
PATH@[54; 55)
PATH_SEGMENT@[54; 55)
NAME_REF@[54; 55)
IDENT@[54; 55) "D"
R_BRACK@[55; 56) "]"
WHITESPACE@[56; 57) " "
BLOCK_EXPR@[57; 59)
BLOCK@[57; 59)
@ -72,10 +80,12 @@ SOURCE_FILE@[0; 82)
EXPR_STMT@[64; 79)
ATTR@[64; 68)
POUND@[64; 65) "#"
TOKEN_TREE@[65; 68)
L_BRACK@[65; 66) "["
IDENT@[66; 67) "D"
R_BRACK@[67; 68) "]"
L_BRACK@[65; 66) "["
PATH@[66; 67)
PATH_SEGMENT@[66; 67)
NAME_REF@[66; 67)
IDENT@[66; 67) "D"
R_BRACK@[67; 68) "]"
WHITESPACE@[68; 69) " "
RETURN_EXPR@[69; 78)
RETURN_KW@[69; 75) "return"

View file

@ -20,10 +20,12 @@ SOURCE_FILE@[0; 47)
TRY_EXPR@[17; 29)
ATTR@[17; 21)
POUND@[17; 18) "#"
TOKEN_TREE@[18; 21)
L_BRACK@[18; 19) "["
IDENT@[19; 20) "A"
R_BRACK@[20; 21) "]"
L_BRACK@[18; 19) "["
PATH@[19; 20)
PATH_SEGMENT@[19; 20)
NAME_REF@[19; 20)
IDENT@[19; 20) "A"
R_BRACK@[20; 21) "]"
WHITESPACE@[21; 22) " "
MACRO_CALL@[22; 28)
PATH@[22; 25)
@ -41,10 +43,12 @@ SOURCE_FILE@[0; 47)
REF_EXPR@[36; 44)
ATTR@[36; 40)
POUND@[36; 37) "#"
TOKEN_TREE@[37; 40)
L_BRACK@[37; 38) "["
IDENT@[38; 39) "B"
R_BRACK@[39; 40) "]"
L_BRACK@[37; 38) "["
PATH@[38; 39)
PATH_SEGMENT@[38; 39)
NAME_REF@[38; 39)
IDENT@[38; 39) "B"
R_BRACK@[39; 40) "]"
WHITESPACE@[40; 41) " "
AMP@[41; 42) "&"
TUPLE_EXPR@[42; 44)

View file

@ -29,14 +29,16 @@ SOURCE_FILE@[0; 56)
WHITESPACE@[24; 28) "\n "
ATTR@[28; 40)
POUND@[28; 29) "#"
TOKEN_TREE@[29; 40)
L_BRACK@[29; 30) "["
IDENT@[30; 33) "cfg"
TOKEN_TREE@[33; 39)
L_PAREN@[33; 34) "("
IDENT@[34; 38) "test"
R_PAREN@[38; 39) ")"
R_BRACK@[39; 40) "]"
L_BRACK@[29; 30) "["
PATH@[30; 33)
PATH_SEGMENT@[30; 33)
NAME_REF@[30; 33)
IDENT@[30; 33) "cfg"
TOKEN_TREE@[33; 39)
L_PAREN@[33; 34) "("
IDENT@[34; 38) "test"
R_PAREN@[38; 39) ")"
R_BRACK@[39; 40) "]"
WHITESPACE@[40; 44) "\n "
LITERAL@[44; 45)
INT_NUMBER@[44; 45) "1"

View file

@ -33,14 +33,16 @@ SOURCE_FILE@[0; 56)
WHITESPACE@[30; 34) "\n "
ATTR@[34; 46)
POUND@[34; 35) "#"
TOKEN_TREE@[35; 46)
L_BRACK@[35; 36) "["
IDENT@[36; 39) "cfg"
TOKEN_TREE@[39; 45)
L_PAREN@[39; 40) "("
IDENT@[40; 44) "test"
R_PAREN@[44; 45) ")"
R_BRACK@[45; 46) "]"
L_BRACK@[35; 36) "["
PATH@[36; 39)
PATH_SEGMENT@[36; 39)
NAME_REF@[36; 39)
IDENT@[36; 39) "cfg"
TOKEN_TREE@[39; 45)
L_PAREN@[39; 40) "("
IDENT@[40; 44) "test"
R_PAREN@[44; 45) ")"
R_BRACK@[45; 46) "]"
WHITESPACE@[46; 50) "\n "
LITERAL@[50; 51)
INT_NUMBER@[50; 51) "2"

View file

@ -8,10 +8,12 @@ SOURCE_FILE@[0; 26)
L_PAREN@[4; 5) "("
ATTR@[5; 16)
POUND@[5; 6) "#"
TOKEN_TREE@[6; 16)
L_BRACK@[6; 7) "["
IDENT@[7; 15) "must_use"
R_BRACK@[15; 16) "]"
L_BRACK@[6; 7) "["
PATH@[7; 15)
PATH_SEGMENT@[7; 15)
NAME_REF@[7; 15)
IDENT@[7; 15) "must_use"
R_BRACK@[15; 16) "]"
WHITESPACE@[16; 17) " "
SELF_PARAM@[17; 21)
SELF_KW@[17; 21) "self"

View file

@ -8,10 +8,12 @@ SOURCE_FILE@[0; 28)
L_PAREN@[4; 5) "("
ATTR@[5; 13)
POUND@[5; 6) "#"
TOKEN_TREE@[6; 13)
L_BRACK@[6; 7) "["
IDENT@[7; 12) "attr1"
R_BRACK@[12; 13) "]"
L_BRACK@[6; 7) "["
PATH@[7; 12)
PATH_SEGMENT@[7; 12)
NAME_REF@[7; 12)
IDENT@[7; 12) "attr1"
R_BRACK@[12; 13) "]"
WHITESPACE@[13; 14) " "
PARAM@[14; 23)
BIND_PAT@[14; 17)

View file

@ -2,163 +2,183 @@ SOURCE_FILE@[0; 236)
ATTR@[0; 8)
POUND@[0; 1) "#"
EXCL@[1; 2) "!"
TOKEN_TREE@[2; 8)
L_BRACK@[2; 3) "["
IDENT@[3; 7) "attr"
R_BRACK@[7; 8) "]"
L_BRACK@[2; 3) "["
PATH@[3; 7)
PATH_SEGMENT@[3; 7)
NAME_REF@[3; 7)
IDENT@[3; 7) "attr"
R_BRACK@[7; 8) "]"
WHITESPACE@[8; 9) "\n"
ATTR@[9; 23)
POUND@[9; 10) "#"
EXCL@[10; 11) "!"
TOKEN_TREE@[11; 23)
L_BRACK@[11; 12) "["
IDENT@[12; 16) "attr"
TOKEN_TREE@[16; 22)
L_PAREN@[16; 17) "("
TRUE_KW@[17; 21) "true"
R_PAREN@[21; 22) ")"
R_BRACK@[22; 23) "]"
L_BRACK@[11; 12) "["
PATH@[12; 16)
PATH_SEGMENT@[12; 16)
NAME_REF@[12; 16)
IDENT@[12; 16) "attr"
TOKEN_TREE@[16; 22)
L_PAREN@[16; 17) "("
TRUE_KW@[17; 21) "true"
R_PAREN@[21; 22) ")"
R_BRACK@[22; 23) "]"
WHITESPACE@[23; 24) "\n"
ATTR@[24; 39)
POUND@[24; 25) "#"
EXCL@[25; 26) "!"
TOKEN_TREE@[26; 39)
L_BRACK@[26; 27) "["
IDENT@[27; 31) "attr"
TOKEN_TREE@[31; 38)
L_PAREN@[31; 32) "("
IDENT@[32; 37) "ident"
R_PAREN@[37; 38) ")"
R_BRACK@[38; 39) "]"
L_BRACK@[26; 27) "["
PATH@[27; 31)
PATH_SEGMENT@[27; 31)
NAME_REF@[27; 31)
IDENT@[27; 31) "attr"
TOKEN_TREE@[31; 38)
L_PAREN@[31; 32) "("
IDENT@[32; 37) "ident"
R_PAREN@[37; 38) ")"
R_BRACK@[38; 39) "]"
WHITESPACE@[39; 40) "\n"
ATTR@[40; 116)
POUND@[40; 41) "#"
EXCL@[41; 42) "!"
TOKEN_TREE@[42; 116)
L_BRACK@[42; 43) "["
IDENT@[43; 47) "attr"
TOKEN_TREE@[47; 115)
L_PAREN@[47; 48) "("
IDENT@[48; 53) "ident"
COMMA@[53; 54) ","
WHITESPACE@[54; 55) " "
INT_NUMBER@[55; 58) "100"
COMMA@[58; 59) ","
WHITESPACE@[59; 60) " "
TRUE_KW@[60; 64) "true"
COMMA@[64; 65) ","
WHITESPACE@[65; 66) " "
STRING@[66; 72) "\"true\""
COMMA@[72; 73) ","
WHITESPACE@[73; 74) " "
IDENT@[74; 79) "ident"
WHITESPACE@[79; 80) " "
EQ@[80; 81) "="
WHITESPACE@[81; 82) " "
INT_NUMBER@[82; 85) "100"
COMMA@[85; 86) ","
WHITESPACE@[86; 87) " "
IDENT@[87; 92) "ident"
WHITESPACE@[92; 93) " "
EQ@[93; 94) "="
WHITESPACE@[94; 95) " "
STRING@[95; 102) "\"hello\""
COMMA@[102; 103) ","
WHITESPACE@[103; 104) " "
IDENT@[104; 109) "ident"
TOKEN_TREE@[109; 114)
L_PAREN@[109; 110) "("
INT_NUMBER@[110; 113) "100"
R_PAREN@[113; 114) ")"
R_PAREN@[114; 115) ")"
R_BRACK@[115; 116) "]"
L_BRACK@[42; 43) "["
PATH@[43; 47)
PATH_SEGMENT@[43; 47)
NAME_REF@[43; 47)
IDENT@[43; 47) "attr"
TOKEN_TREE@[47; 115)
L_PAREN@[47; 48) "("
IDENT@[48; 53) "ident"
COMMA@[53; 54) ","
WHITESPACE@[54; 55) " "
INT_NUMBER@[55; 58) "100"
COMMA@[58; 59) ","
WHITESPACE@[59; 60) " "
TRUE_KW@[60; 64) "true"
COMMA@[64; 65) ","
WHITESPACE@[65; 66) " "
STRING@[66; 72) "\"true\""
COMMA@[72; 73) ","
WHITESPACE@[73; 74) " "
IDENT@[74; 79) "ident"
WHITESPACE@[79; 80) " "
EQ@[80; 81) "="
WHITESPACE@[81; 82) " "
INT_NUMBER@[82; 85) "100"
COMMA@[85; 86) ","
WHITESPACE@[86; 87) " "
IDENT@[87; 92) "ident"
WHITESPACE@[92; 93) " "
EQ@[93; 94) "="
WHITESPACE@[94; 95) " "
STRING@[95; 102) "\"hello\""
COMMA@[102; 103) ","
WHITESPACE@[103; 104) " "
IDENT@[104; 109) "ident"
TOKEN_TREE@[109; 114)
L_PAREN@[109; 110) "("
INT_NUMBER@[110; 113) "100"
R_PAREN@[113; 114) ")"
R_PAREN@[114; 115) ")"
R_BRACK@[115; 116) "]"
WHITESPACE@[116; 117) "\n"
ATTR@[117; 130)
POUND@[117; 118) "#"
EXCL@[118; 119) "!"
TOKEN_TREE@[119; 130)
L_BRACK@[119; 120) "["
IDENT@[120; 124) "attr"
TOKEN_TREE@[124; 129)
L_PAREN@[124; 125) "("
INT_NUMBER@[125; 128) "100"
R_PAREN@[128; 129) ")"
R_BRACK@[129; 130) "]"
L_BRACK@[119; 120) "["
PATH@[120; 124)
PATH_SEGMENT@[120; 124)
NAME_REF@[120; 124)
IDENT@[120; 124) "attr"
TOKEN_TREE@[124; 129)
L_PAREN@[124; 125) "("
INT_NUMBER@[125; 128) "100"
R_PAREN@[128; 129) ")"
R_BRACK@[129; 130) "]"
WHITESPACE@[130; 131) "\n"
ATTR@[131; 155)
POUND@[131; 132) "#"
EXCL@[132; 133) "!"
TOKEN_TREE@[133; 155)
L_BRACK@[133; 134) "["
IDENT@[134; 138) "attr"
TOKEN_TREE@[138; 154)
L_PAREN@[138; 139) "("
IDENT@[139; 146) "enabled"
WHITESPACE@[146; 147) " "
EQ@[147; 148) "="
WHITESPACE@[148; 149) " "
TRUE_KW@[149; 153) "true"
R_PAREN@[153; 154) ")"
R_BRACK@[154; 155) "]"
L_BRACK@[133; 134) "["
PATH@[134; 138)
PATH_SEGMENT@[134; 138)
NAME_REF@[134; 138)
IDENT@[134; 138) "attr"
TOKEN_TREE@[138; 154)
L_PAREN@[138; 139) "("
IDENT@[139; 146) "enabled"
WHITESPACE@[146; 147) " "
EQ@[147; 148) "="
WHITESPACE@[148; 149) " "
TRUE_KW@[149; 153) "true"
R_PAREN@[153; 154) ")"
R_BRACK@[154; 155) "]"
WHITESPACE@[155; 156) "\n"
ATTR@[156; 173)
POUND@[156; 157) "#"
EXCL@[157; 158) "!"
TOKEN_TREE@[158; 173)
L_BRACK@[158; 159) "["
IDENT@[159; 166) "enabled"
TOKEN_TREE@[166; 172)
L_PAREN@[166; 167) "("
TRUE_KW@[167; 171) "true"
R_PAREN@[171; 172) ")"
R_BRACK@[172; 173) "]"
L_BRACK@[158; 159) "["
PATH@[159; 166)
PATH_SEGMENT@[159; 166)
NAME_REF@[159; 166)
IDENT@[159; 166) "enabled"
TOKEN_TREE@[166; 172)
L_PAREN@[166; 167) "("
TRUE_KW@[167; 171) "true"
R_PAREN@[171; 172) ")"
R_BRACK@[172; 173) "]"
WHITESPACE@[173; 174) "\n"
ATTR@[174; 191)
POUND@[174; 175) "#"
EXCL@[175; 176) "!"
TOKEN_TREE@[176; 191)
L_BRACK@[176; 177) "["
IDENT@[177; 181) "attr"
TOKEN_TREE@[181; 190)
L_PAREN@[181; 182) "("
STRING@[182; 189) "\"hello\""
R_PAREN@[189; 190) ")"
R_BRACK@[190; 191) "]"
L_BRACK@[176; 177) "["
PATH@[177; 181)
PATH_SEGMENT@[177; 181)
NAME_REF@[177; 181)
IDENT@[177; 181) "attr"
TOKEN_TREE@[181; 190)
L_PAREN@[181; 182) "("
STRING@[182; 189) "\"hello\""
R_PAREN@[189; 190) ")"
R_BRACK@[190; 191) "]"
WHITESPACE@[191; 192) "\n"
ATTR@[192; 214)
POUND@[192; 193) "#"
EXCL@[193; 194) "!"
TOKEN_TREE@[194; 214)
L_BRACK@[194; 195) "["
IDENT@[195; 199) "repr"
TOKEN_TREE@[199; 213)
L_PAREN@[199; 200) "("
IDENT@[200; 201) "C"
COMMA@[201; 202) ","
WHITESPACE@[202; 203) " "
IDENT@[203; 208) "align"
WHITESPACE@[208; 209) " "
EQ@[209; 210) "="
WHITESPACE@[210; 211) " "
INT_NUMBER@[211; 212) "4"
R_PAREN@[212; 213) ")"
R_BRACK@[213; 214) "]"
L_BRACK@[194; 195) "["
PATH@[195; 199)
PATH_SEGMENT@[195; 199)
NAME_REF@[195; 199)
IDENT@[195; 199) "repr"
TOKEN_TREE@[199; 213)
L_PAREN@[199; 200) "("
IDENT@[200; 201) "C"
COMMA@[201; 202) ","
WHITESPACE@[202; 203) " "
IDENT@[203; 208) "align"
WHITESPACE@[208; 209) " "
EQ@[209; 210) "="
WHITESPACE@[210; 211) " "
INT_NUMBER@[211; 212) "4"
R_PAREN@[212; 213) ")"
R_BRACK@[213; 214) "]"
WHITESPACE@[214; 215) "\n"
ATTR@[215; 236)
POUND@[215; 216) "#"
EXCL@[216; 217) "!"
TOKEN_TREE@[217; 236)
L_BRACK@[217; 218) "["
IDENT@[218; 222) "repr"
TOKEN_TREE@[222; 235)
L_PAREN@[222; 223) "("
IDENT@[223; 224) "C"
COMMA@[224; 225) ","
WHITESPACE@[225; 226) " "
IDENT@[226; 231) "align"
TOKEN_TREE@[231; 234)
L_PAREN@[231; 232) "("
INT_NUMBER@[232; 233) "4"
R_PAREN@[233; 234) ")"
R_PAREN@[234; 235) ")"
R_BRACK@[235; 236) "]"
L_BRACK@[217; 218) "["
PATH@[218; 222)
PATH_SEGMENT@[218; 222)
NAME_REF@[218; 222)
IDENT@[218; 222) "repr"
TOKEN_TREE@[222; 235)
L_PAREN@[222; 223) "("
IDENT@[223; 224) "C"
COMMA@[224; 225) ","
WHITESPACE@[225; 226) " "
IDENT@[226; 231) "align"
TOKEN_TREE@[231; 234)
L_PAREN@[231; 232) "("
INT_NUMBER@[232; 233) "4"
R_PAREN@[233; 234) ")"
R_PAREN@[234; 235) ")"
R_BRACK@[235; 236) "]"

View file

@ -65,10 +65,12 @@ SOURCE_FILE@[0; 118)
ATTR@[79; 87)
POUND@[79; 80) "#"
EXCL@[80; 81) "!"
TOKEN_TREE@[81; 87)
L_BRACK@[81; 82) "["
IDENT@[82; 86) "attr"
R_BRACK@[86; 87) "]"
L_BRACK@[81; 82) "["
PATH@[82; 86)
PATH_SEGMENT@[82; 86)
NAME_REF@[82; 86)
IDENT@[82; 86) "attr"
R_BRACK@[86; 87) "]"
WHITESPACE@[87; 92) "\n "
MODULE@[92; 98)
MOD_KW@[92; 95) "mod"

View file

@ -1,3 +1,6 @@
#[cfg(test)]
#[ignore]
fn foo() {}
#[path = "a.rs"]
mod b;

View file

@ -1,22 +1,26 @@
SOURCE_FILE@[0; 35)
SOURCE_FILE@[0; 60)
FN_DEF@[0; 34)
ATTR@[0; 12)
POUND@[0; 1) "#"
TOKEN_TREE@[1; 12)
L_BRACK@[1; 2) "["
IDENT@[2; 5) "cfg"
TOKEN_TREE@[5; 11)
L_PAREN@[5; 6) "("
IDENT@[6; 10) "test"
R_PAREN@[10; 11) ")"
R_BRACK@[11; 12) "]"
L_BRACK@[1; 2) "["
PATH@[2; 5)
PATH_SEGMENT@[2; 5)
NAME_REF@[2; 5)
IDENT@[2; 5) "cfg"
TOKEN_TREE@[5; 11)
L_PAREN@[5; 6) "("
IDENT@[6; 10) "test"
R_PAREN@[10; 11) ")"
R_BRACK@[11; 12) "]"
WHITESPACE@[12; 13) "\n"
ATTR@[13; 22)
POUND@[13; 14) "#"
TOKEN_TREE@[14; 22)
L_BRACK@[14; 15) "["
IDENT@[15; 21) "ignore"
R_BRACK@[21; 22) "]"
L_BRACK@[14; 15) "["
PATH@[15; 21)
PATH_SEGMENT@[15; 21)
NAME_REF@[15; 21)
IDENT@[15; 21) "ignore"
R_BRACK@[21; 22) "]"
WHITESPACE@[22; 23) "\n"
FN_KW@[23; 25) "fn"
WHITESPACE@[25; 26) " "
@ -30,4 +34,25 @@ SOURCE_FILE@[0; 35)
BLOCK@[32; 34)
L_CURLY@[32; 33) "{"
R_CURLY@[33; 34) "}"
WHITESPACE@[34; 35) "\n"
WHITESPACE@[34; 36) "\n\n"
MODULE@[36; 59)
ATTR@[36; 52)
POUND@[36; 37) "#"
L_BRACK@[37; 38) "["
PATH@[38; 42)
PATH_SEGMENT@[38; 42)
NAME_REF@[38; 42)
IDENT@[38; 42) "path"
WHITESPACE@[42; 43) " "
EQ@[43; 44) "="
WHITESPACE@[44; 45) " "
LITERAL@[45; 51)
STRING@[45; 51) "\"a.rs\""
R_BRACK@[51; 52) "]"
WHITESPACE@[52; 53) "\n"
MOD_KW@[53; 56) "mod"
WHITESPACE@[56; 57) " "
NAME@[57; 58)
IDENT@[57; 58) "b"
SEMI@[58; 59) ";"
WHITESPACE@[59; 60) "\n"

View file

@ -2,15 +2,17 @@ SOURCE_FILE@[0; 23)
FN_DEF@[0; 22)
ATTR@[0; 10)
POUND@[0; 1) "#"
TOKEN_TREE@[1; 10)
L_BRACK@[1; 2) "["
IDENT@[2; 5) "foo"
TOKEN_TREE@[5; 9)
L_PAREN@[5; 6) "("
IDENT@[6; 7) "a"
COMMA@[7; 8) ","
R_PAREN@[8; 9) ")"
R_BRACK@[9; 10) "]"
L_BRACK@[1; 2) "["
PATH@[2; 5)
PATH_SEGMENT@[2; 5)
NAME_REF@[2; 5)
IDENT@[2; 5) "foo"
TOKEN_TREE@[5; 9)
L_PAREN@[5; 6) "("
IDENT@[6; 7) "a"
COMMA@[7; 8) ","
R_PAREN@[8; 9) ")"
R_BRACK@[9; 10) "]"
WHITESPACE@[10; 11) "\n"
FN_KW@[11; 13) "fn"
WHITESPACE@[13; 14) " "

View file

@ -10,62 +10,73 @@ SOURCE_FILE@[0; 3813)
ATTR@[271; 302)
POUND@[271; 272) "#"
EXCL@[272; 273) "!"
TOKEN_TREE@[273; 302)
L_BRACK@[273; 274) "["
IDENT@[274; 279) "allow"
TOKEN_TREE@[279; 301)
L_PAREN@[279; 280) "("
IDENT@[280; 300) "non_camel_case_types"
R_PAREN@[300; 301) ")"
R_BRACK@[301; 302) "]"
L_BRACK@[273; 274) "["
PATH@[274; 279)
PATH_SEGMENT@[274; 279)
NAME_REF@[274; 279)
IDENT@[274; 279) "allow"
TOKEN_TREE@[279; 301)
L_PAREN@[279; 280) "("
IDENT@[280; 300) "non_camel_case_types"
R_PAREN@[300; 301) ")"
R_BRACK@[301; 302) "]"
WHITESPACE@[302; 303) "\n"
ATTR@[303; 323)
POUND@[303; 304) "#"
EXCL@[304; 305) "!"
TOKEN_TREE@[305; 323)
L_BRACK@[305; 306) "["
IDENT@[306; 311) "allow"
TOKEN_TREE@[311; 322)
L_PAREN@[311; 312) "("
IDENT@[312; 321) "dead_code"
R_PAREN@[321; 322) ")"
R_BRACK@[322; 323) "]"
L_BRACK@[305; 306) "["
PATH@[306; 311)
PATH_SEGMENT@[306; 311)
NAME_REF@[306; 311)
IDENT@[306; 311) "allow"
TOKEN_TREE@[311; 322)
L_PAREN@[311; 312) "("
IDENT@[312; 321) "dead_code"
R_PAREN@[321; 322) ")"
R_BRACK@[322; 323) "]"
WHITESPACE@[323; 324) "\n"
ATTR@[324; 351)
POUND@[324; 325) "#"
EXCL@[325; 326) "!"
TOKEN_TREE@[326; 351)
L_BRACK@[326; 327) "["
IDENT@[327; 332) "allow"
TOKEN_TREE@[332; 350)
L_PAREN@[332; 333) "("
IDENT@[333; 349) "unreachable_code"
R_PAREN@[349; 350) ")"
R_BRACK@[350; 351) "]"
L_BRACK@[326; 327) "["
PATH@[327; 332)
PATH_SEGMENT@[327; 332)
NAME_REF@[327; 332)
IDENT@[327; 332) "allow"
TOKEN_TREE@[332; 350)
L_PAREN@[332; 333) "("
IDENT@[333; 349) "unreachable_code"
R_PAREN@[349; 350) ")"
R_BRACK@[350; 351) "]"
WHITESPACE@[351; 352) "\n"
ATTR@[352; 376)
POUND@[352; 353) "#"
EXCL@[353; 354) "!"
TOKEN_TREE@[354; 376)
L_BRACK@[354; 355) "["
IDENT@[355; 360) "allow"
TOKEN_TREE@[360; 375)
L_PAREN@[360; 361) "("
IDENT@[361; 374) "unused_parens"
R_PAREN@[374; 375) ")"
R_BRACK@[375; 376) "]"
L_BRACK@[354; 355) "["
PATH@[355; 360)
PATH_SEGMENT@[355; 360)
NAME_REF@[355; 360)
IDENT@[355; 360) "allow"
TOKEN_TREE@[360; 375)
L_PAREN@[360; 361) "("
IDENT@[361; 374) "unused_parens"
R_PAREN@[374; 375) ")"
R_BRACK@[375; 376) "]"
WHITESPACE@[376; 378) "\n\n"
ATTR@[378; 405)
POUND@[378; 379) "#"
EXCL@[379; 380) "!"
TOKEN_TREE@[380; 405)
L_BRACK@[380; 381) "["
IDENT@[381; 396) "recursion_limit"
WHITESPACE@[396; 397) " "
EQ@[397; 398) "="
WHITESPACE@[398; 399) " "
L_BRACK@[380; 381) "["
PATH@[381; 396)
PATH_SEGMENT@[381; 396)
NAME_REF@[381; 396)
IDENT@[381; 396) "recursion_limit"
WHITESPACE@[396; 397) " "
EQ@[397; 398) "="
WHITESPACE@[398; 399) " "
LITERAL@[399; 404)
STRING@[399; 404) "\"128\""
R_BRACK@[404; 405) "]"
R_BRACK@[404; 405) "]"
WHITESPACE@[405; 407) "\n\n"
USE_ITEM@[407; 427)
USE_KW@[407; 410) "use"

View file

@ -17,18 +17,20 @@ SOURCE_FILE@[0; 166)
LET_STMT@[77; 163)
ATTR@[77; 106)
POUND@[77; 78) "#"
TOKEN_TREE@[78; 106)
L_BRACK@[78; 79) "["
IDENT@[79; 82) "cfg"
TOKEN_TREE@[82; 105)
L_PAREN@[82; 83) "("
IDENT@[83; 90) "feature"
WHITESPACE@[90; 91) " "
EQ@[91; 92) "="
WHITESPACE@[92; 93) " "
STRING@[93; 104) "\"backtrace\""
R_PAREN@[104; 105) ")"
R_BRACK@[105; 106) "]"
L_BRACK@[78; 79) "["
PATH@[79; 82)
PATH_SEGMENT@[79; 82)
NAME_REF@[79; 82)
IDENT@[79; 82) "cfg"
TOKEN_TREE@[82; 105)
L_PAREN@[82; 83) "("
IDENT@[83; 90) "feature"
WHITESPACE@[90; 91) " "
EQ@[91; 92) "="
WHITESPACE@[92; 93) " "
STRING@[93; 104) "\"backtrace\""
R_PAREN@[104; 105) ")"
R_BRACK@[105; 106) "]"
WHITESPACE@[106; 111) "\n "
LET_KW@[111; 114) "let"
WHITESPACE@[114; 115) " "

View file

@ -15,14 +15,16 @@ SOURCE_FILE@[0; 686)
ATTR@[17; 57)
POUND@[17; 18) "#"
EXCL@[18; 19) "!"
TOKEN_TREE@[19; 57)
L_BRACK@[19; 20) "["
IDENT@[20; 23) "doc"
TOKEN_TREE@[23; 56)
L_PAREN@[23; 24) "("
STRING@[24; 55) "\"Inner attributes all ..."
R_PAREN@[55; 56) ")"
R_BRACK@[56; 57) "]"
L_BRACK@[19; 20) "["
PATH@[20; 23)
PATH_SEGMENT@[20; 23)
NAME_REF@[20; 23)
IDENT@[20; 23) "doc"
TOKEN_TREE@[23; 56)
L_PAREN@[23; 24) "("
STRING@[24; 55) "\"Inner attributes all ..."
R_PAREN@[55; 56) ")"
R_BRACK@[56; 57) "]"
WHITESPACE@[57; 62) "\n "
COMMENT@[62; 97) "//! As are ModuleDoc ..."
WHITESPACE@[97; 102) "\n "
@ -34,26 +36,30 @@ SOURCE_FILE@[0; 686)
ATTR@[112; 180)
POUND@[112; 113) "#"
EXCL@[113; 114) "!"
TOKEN_TREE@[114; 180)
L_BRACK@[114; 115) "["
IDENT@[115; 118) "doc"
TOKEN_TREE@[118; 179)
L_PAREN@[118; 119) "("
STRING@[119; 178) "\"Inner attributes are ..."
R_PAREN@[178; 179) ")"
R_BRACK@[179; 180) "]"
L_BRACK@[114; 115) "["
PATH@[115; 118)
PATH_SEGMENT@[115; 118)
NAME_REF@[115; 118)
IDENT@[115; 118) "doc"
TOKEN_TREE@[118; 179)
L_PAREN@[118; 119) "("
STRING@[119; 178) "\"Inner attributes are ..."
R_PAREN@[178; 179) ")"
R_BRACK@[179; 180) "]"
WHITESPACE@[180; 189) "\n "
ATTR@[189; 244)
POUND@[189; 190) "#"
EXCL@[190; 191) "!"
TOKEN_TREE@[191; 244)
L_BRACK@[191; 192) "["
IDENT@[192; 195) "doc"
TOKEN_TREE@[195; 243)
L_PAREN@[195; 196) "("
STRING@[196; 242) "\"Being validated is n ..."
R_PAREN@[242; 243) ")"
R_BRACK@[243; 244) "]"
L_BRACK@[191; 192) "["
PATH@[192; 195)
PATH_SEGMENT@[192; 195)
NAME_REF@[192; 195)
IDENT@[192; 195) "doc"
TOKEN_TREE@[195; 243)
L_PAREN@[195; 196) "("
STRING@[196; 242) "\"Being validated is n ..."
R_PAREN@[242; 243) ")"
R_BRACK@[243; 244) "]"
WHITESPACE@[244; 253) "\n "
COMMENT@[253; 288) "//! As are ModuleDoc ..."
WHITESPACE@[288; 293) "\n "
@ -67,14 +73,16 @@ SOURCE_FILE@[0; 686)
ATTR@[310; 409)
POUND@[310; 311) "#"
EXCL@[311; 312) "!"
TOKEN_TREE@[312; 409)
L_BRACK@[312; 313) "["
IDENT@[313; 316) "doc"
TOKEN_TREE@[316; 408)
L_PAREN@[316; 317) "("
STRING@[317; 407) "\"Inner attributes are ..."
R_PAREN@[407; 408) ")"
R_BRACK@[408; 409) "]"
L_BRACK@[312; 313) "["
PATH@[313; 316)
PATH_SEGMENT@[313; 316)
NAME_REF@[313; 316)
IDENT@[313; 316) "doc"
TOKEN_TREE@[316; 408)
L_PAREN@[316; 317) "("
STRING@[317; 407) "\"Inner attributes are ..."
R_PAREN@[407; 408) ")"
R_BRACK@[408; 409) "]"
WHITESPACE@[409; 418) "\n "
COMMENT@[418; 453) "//! As are ModuleDoc ..."
WHITESPACE@[453; 458) "\n "
@ -154,14 +162,16 @@ SOURCE_FILE@[0; 686)
ATTR@[612; 639)
POUND@[612; 613) "#"
EXCL@[613; 614) "!"
TOKEN_TREE@[614; 639)
L_BRACK@[614; 615) "["
IDENT@[615; 620) "allow"
TOKEN_TREE@[620; 638)
L_PAREN@[620; 621) "("
IDENT@[621; 637) "unused_variables"
R_PAREN@[637; 638) ")"
R_BRACK@[638; 639) "]"
L_BRACK@[614; 615) "["
PATH@[615; 620)
PATH_SEGMENT@[615; 620)
NAME_REF@[615; 620)
IDENT@[615; 620) "allow"
TOKEN_TREE@[620; 638)
L_PAREN@[620; 621) "("
IDENT@[621; 637) "unused_variables"
R_PAREN@[637; 638) ")"
R_BRACK@[638; 639) "]"
WHITESPACE@[639; 640) " "
COMMENT@[640; 677) "// this is `inner_at ..."
WHITESPACE@[677; 682) "\n "

View file

@ -13,14 +13,16 @@ SOURCE_FILE@[0; 87)
ATTR@[47; 84)
POUND@[47; 48) "#"
EXCL@[48; 49) "!"
TOKEN_TREE@[49; 84)
L_BRACK@[49; 50) "["
IDENT@[50; 53) "doc"
TOKEN_TREE@[53; 83)
L_PAREN@[53; 54) "("
STRING@[54; 82) "\"This is also a doc c ..."
R_PAREN@[82; 83) ")"
R_BRACK@[83; 84) "]"
L_BRACK@[49; 50) "["
PATH@[50; 53)
PATH_SEGMENT@[50; 53)
NAME_REF@[50; 53)
IDENT@[50; 53) "doc"
TOKEN_TREE@[53; 83)
L_PAREN@[53; 54) "("
STRING@[54; 82) "\"This is also a doc c ..."
R_PAREN@[82; 83) ")"
R_BRACK@[83; 84) "]"
WHITESPACE@[84; 85) "\n"
R_CURLY@[85; 86) "}"
WHITESPACE@[86; 87) "\n"

View file

@ -8,17 +8,21 @@ SOURCE_FILE@[0; 519)
L_PAREN@[5; 6) "("
ATTR@[6; 14)
POUND@[6; 7) "#"
TOKEN_TREE@[7; 14)
L_BRACK@[7; 8) "["
IDENT@[8; 13) "attr1"
R_BRACK@[13; 14) "]"
L_BRACK@[7; 8) "["
PATH@[8; 13)
PATH_SEGMENT@[8; 13)
NAME_REF@[8; 13)
IDENT@[8; 13) "attr1"
R_BRACK@[13; 14) "]"
WHITESPACE@[14; 15) " "
ATTR@[15; 23)
POUND@[15; 16) "#"
TOKEN_TREE@[16; 23)
L_BRACK@[16; 17) "["
IDENT@[17; 22) "attr2"
R_BRACK@[22; 23) "]"
L_BRACK@[16; 17) "["
PATH@[17; 22)
PATH_SEGMENT@[17; 22)
NAME_REF@[17; 22)
IDENT@[17; 22) "attr2"
R_BRACK@[22; 23) "]"
WHITESPACE@[23; 24) " "
PARAM@[24; 33)
BIND_PAT@[24; 27)
@ -47,10 +51,12 @@ SOURCE_FILE@[0; 519)
L_PAREN@[43; 44) "("
ATTR@[44; 52)
POUND@[44; 45) "#"
TOKEN_TREE@[45; 52)
L_BRACK@[45; 46) "["
IDENT@[46; 51) "attr1"
R_BRACK@[51; 52) "]"
L_BRACK@[45; 46) "["
PATH@[46; 51)
PATH_SEGMENT@[46; 51)
NAME_REF@[46; 51)
IDENT@[46; 51) "attr1"
R_BRACK@[51; 52) "]"
WHITESPACE@[52; 53) " "
PARAM@[53; 58)
BIND_PAT@[53; 54)
@ -105,10 +111,12 @@ SOURCE_FILE@[0; 519)
WHITESPACE@[105; 106) " "
ATTR@[106; 113)
POUND@[106; 107) "#"
TOKEN_TREE@[107; 113)
L_BRACK@[107; 108) "["
IDENT@[108; 112) "attr"
R_BRACK@[112; 113) "]"
L_BRACK@[107; 108) "["
PATH@[108; 112)
PATH_SEGMENT@[108; 112)
NAME_REF@[108; 112)
IDENT@[108; 112) "attr"
R_BRACK@[112; 113) "]"
WHITESPACE@[113; 114) " "
DOTDOTDOT@[114; 117) "..."
R_PAREN@[117; 118) ")"
@ -148,10 +156,12 @@ SOURCE_FILE@[0; 519)
L_PAREN@[145; 146) "("
ATTR@[146; 153)
POUND@[146; 147) "#"
TOKEN_TREE@[147; 153)
L_BRACK@[147; 148) "["
IDENT@[148; 152) "attr"
R_BRACK@[152; 153) "]"
L_BRACK@[147; 148) "["
PATH@[148; 152)
PATH_SEGMENT@[148; 152)
NAME_REF@[148; 152)
IDENT@[148; 152) "attr"
R_BRACK@[152; 153) "]"
WHITESPACE@[153; 154) " "
PARAM@[154; 166)
REFERENCE_TYPE@[154; 166)
@ -196,10 +206,12 @@ SOURCE_FILE@[0; 519)
L_PAREN@[196; 197) "("
ATTR@[197; 204)
POUND@[197; 198) "#"
TOKEN_TREE@[198; 204)
L_BRACK@[198; 199) "["
IDENT@[199; 203) "attr"
R_BRACK@[203; 204) "]"
L_BRACK@[198; 199) "["
PATH@[199; 203)
PATH_SEGMENT@[199; 203)
NAME_REF@[199; 203)
IDENT@[199; 203) "attr"
R_BRACK@[203; 204) "]"
WHITESPACE@[204; 205) " "
PARAM@[205; 211)
PLACEHOLDER_PAT@[205; 206)
@ -216,10 +228,12 @@ SOURCE_FILE@[0; 519)
ATTR@[213; 221)
POUND@[213; 214) "#"
WHITESPACE@[214; 215) " "
TOKEN_TREE@[215; 221)
L_BRACK@[215; 216) "["
IDENT@[216; 220) "attr"
R_BRACK@[220; 221) "]"
L_BRACK@[215; 216) "["
PATH@[216; 220)
PATH_SEGMENT@[216; 220)
NAME_REF@[216; 220)
IDENT@[216; 220) "attr"
R_BRACK@[220; 221) "]"
WHITESPACE@[221; 222) " "
PARAM@[222; 232)
BIND_PAT@[222; 227)
@ -260,10 +274,12 @@ SOURCE_FILE@[0; 519)
L_PAREN@[256; 257) "("
ATTR@[257; 268)
POUND@[257; 258) "#"
TOKEN_TREE@[258; 268)
L_BRACK@[258; 259) "["
IDENT@[259; 267) "must_use"
R_BRACK@[267; 268) "]"
L_BRACK@[258; 259) "["
PATH@[259; 267)
PATH_SEGMENT@[259; 267)
NAME_REF@[259; 267)
IDENT@[259; 267) "must_use"
R_BRACK@[267; 268) "]"
WHITESPACE@[268; 269) " "
SELF_PARAM@[269; 273)
SELF_KW@[269; 273) "self"
@ -283,10 +299,12 @@ SOURCE_FILE@[0; 519)
L_PAREN@[288; 289) "("
ATTR@[289; 296)
POUND@[289; 290) "#"
TOKEN_TREE@[290; 296)
L_BRACK@[290; 291) "["
IDENT@[291; 295) "attr"
R_BRACK@[295; 296) "]"
L_BRACK@[290; 291) "["
PATH@[291; 295)
PATH_SEGMENT@[291; 295)
NAME_REF@[291; 295)
IDENT@[291; 295) "attr"
R_BRACK@[295; 296) "]"
WHITESPACE@[296; 297) " "
SELF_PARAM@[297; 301)
SELF_KW@[297; 301) "self"
@ -306,10 +324,12 @@ SOURCE_FILE@[0; 519)
L_PAREN@[316; 317) "("
ATTR@[317; 324)
POUND@[317; 318) "#"
TOKEN_TREE@[318; 324)
L_BRACK@[318; 319) "["
IDENT@[319; 323) "attr"
R_BRACK@[323; 324) "]"
L_BRACK@[318; 319) "["
PATH@[319; 323)
PATH_SEGMENT@[319; 323)
NAME_REF@[319; 323)
IDENT@[319; 323) "attr"
R_BRACK@[323; 324) "]"
WHITESPACE@[324; 325) " "
SELF_PARAM@[325; 330)
AMP@[325; 326) "&"
@ -335,10 +355,12 @@ SOURCE_FILE@[0; 519)
L_PAREN@[349; 350) "("
ATTR@[350; 357)
POUND@[350; 351) "#"
TOKEN_TREE@[351; 357)
L_BRACK@[351; 352) "["
IDENT@[352; 356) "attr"
R_BRACK@[356; 357) "]"
L_BRACK@[351; 352) "["
PATH@[352; 356)
PATH_SEGMENT@[352; 356)
NAME_REF@[352; 356)
IDENT@[352; 356) "attr"
R_BRACK@[356; 357) "]"
WHITESPACE@[357; 358) " "
SELF_PARAM@[358; 367)
AMP@[358; 359) "&"
@ -366,10 +388,12 @@ SOURCE_FILE@[0; 519)
L_PAREN@[386; 387) "("
ATTR@[387; 394)
POUND@[387; 388) "#"
TOKEN_TREE@[388; 394)
L_BRACK@[388; 389) "["
IDENT@[389; 393) "attr"
R_BRACK@[393; 394) "]"
L_BRACK@[388; 389) "["
PATH@[389; 393)
PATH_SEGMENT@[389; 393)
NAME_REF@[389; 393)
IDENT@[389; 393) "attr"
R_BRACK@[393; 394) "]"
WHITESPACE@[394; 395) " "
SELF_PARAM@[395; 403)
AMP@[395; 396) "&"
@ -397,10 +421,12 @@ SOURCE_FILE@[0; 519)
L_PAREN@[422; 423) "("
ATTR@[423; 430)
POUND@[423; 424) "#"
TOKEN_TREE@[424; 430)
L_BRACK@[424; 425) "["
IDENT@[425; 429) "attr"
R_BRACK@[429; 430) "]"
L_BRACK@[424; 425) "["
PATH@[425; 429)
PATH_SEGMENT@[425; 429)
NAME_REF@[425; 429)
IDENT@[425; 429) "attr"
R_BRACK@[429; 430) "]"
WHITESPACE@[430; 431) " "
SELF_PARAM@[431; 443)
AMP@[431; 432) "&"
@ -425,10 +451,12 @@ SOURCE_FILE@[0; 519)
L_PAREN@[457; 458) "("
ATTR@[458; 465)
POUND@[458; 459) "#"
TOKEN_TREE@[459; 465)
L_BRACK@[459; 460) "["
IDENT@[460; 464) "attr"
R_BRACK@[464; 465) "]"
L_BRACK@[459; 460) "["
PATH@[460; 464)
PATH_SEGMENT@[460; 464)
NAME_REF@[460; 464)
IDENT@[460; 464) "attr"
R_BRACK@[464; 465) "]"
WHITESPACE@[465; 466) " "
SELF_PARAM@[466; 476)
SELF_KW@[466; 470) "self"
@ -455,10 +483,12 @@ SOURCE_FILE@[0; 519)
L_PAREN@[490; 491) "("
ATTR@[491; 498)
POUND@[491; 492) "#"
TOKEN_TREE@[492; 498)
L_BRACK@[492; 493) "["
IDENT@[493; 497) "attr"
R_BRACK@[497; 498) "]"
L_BRACK@[492; 493) "["
PATH@[493; 497)
PATH_SEGMENT@[493; 497)
NAME_REF@[493; 497)
IDENT@[493; 497) "attr"
R_BRACK@[497; 498) "]"
WHITESPACE@[498; 499) " "
SELF_PARAM@[499; 513)
SELF_KW@[499; 503) "self"

View file

@ -4,10 +4,12 @@ SOURCE_FILE@[0; 65)
WHITESPACE@[13; 14) "\n"
ATTR@[14; 29)
POUND@[14; 15) "#"
TOKEN_TREE@[15; 29)
L_BRACK@[15; 16) "["
IDENT@[16; 28) "macro_export"
R_BRACK@[28; 29) "]"
L_BRACK@[15; 16) "["
PATH@[16; 28)
PATH_SEGMENT@[16; 28)
NAME_REF@[16; 28)
IDENT@[16; 28) "macro_export"
R_BRACK@[28; 29) "]"
WHITESPACE@[29; 30) "\n"
PATH@[30; 41)
PATH_SEGMENT@[30; 41)

View file

@ -50,7 +50,7 @@ describe('mapRustDiagnosticToVsCode', () => {
].join('\n')
);
assert.strictEqual(diagnostic.code, 'E0053');
assert.strictEqual(diagnostic.tags, undefined);
assert.deepStrictEqual(diagnostic.tags, []);
// No related information
assert.deepStrictEqual(diagnostic.relatedInformation, []);
@ -115,7 +115,7 @@ describe('mapRustDiagnosticToVsCode', () => {
);
assert.strictEqual(diagnostic.code, 'E0061');
assert.strictEqual(diagnostic.source, 'rustc');
assert.strictEqual(diagnostic.tags, undefined);
assert.deepStrictEqual(diagnostic.tags, []);
// One related information for the original definition
const relatedInformation = diagnostic.relatedInformation;
@ -149,7 +149,7 @@ describe('mapRustDiagnosticToVsCode', () => {
].join('\n')
);
assert.strictEqual(diagnostic.code, 'trivially_copy_pass_by_ref');
assert.strictEqual(diagnostic.tags, undefined);
assert.deepStrictEqual(diagnostic.tags, []);
// One related information for the lint definition
const relatedInformation = diagnostic.relatedInformation;
@ -189,7 +189,7 @@ describe('mapRustDiagnosticToVsCode', () => {
);
assert.strictEqual(diagnostic.code, 'E0308');
assert.strictEqual(diagnostic.source, 'rustc');
assert.strictEqual(diagnostic.tags, undefined);
assert.deepStrictEqual(diagnostic.tags, []);
// No related information
assert.deepStrictEqual(diagnostic.relatedInformation, []);

View file

@ -111,6 +111,17 @@ function isUnusedOrUnnecessary(rd: RustDiagnostic): boolean {
].includes(rd.code.code);
}
/**
* Determines if diagnostic is related to deprecated code
*/
function isDeprecated(rd: RustDiagnostic): boolean {
if (!rd.code) {
return false;
}
return ['deprecated'].includes(rd.code.code);
}
/**
* Converts a Rust child diagnostic to a VsCode related information
*
@ -200,6 +211,7 @@ export function mapRustDiagnosticToVsCode(
vd.source = source;
vd.code = code;
vd.relatedInformation = [];
vd.tags = [];
for (const secondarySpan of secondarySpans) {
const related = mapSecondarySpanToRelated(secondarySpan);
@ -234,7 +246,11 @@ export function mapRustDiagnosticToVsCode(
}
if (isUnusedOrUnnecessary(rd)) {
vd.tags = [vscode.DiagnosticTag.Unnecessary];
vd.tags.push(vscode.DiagnosticTag.Unnecessary);
}
if (isDeprecated(rd)) {
vd.tags.push(vscode.DiagnosticTag.Deprecated);
}
return {