4131: Switch to text-size r=matklad a=matklad



Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2020-04-25 10:16:02 +00:00 committed by GitHub
commit 29fc409e7f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
439 changed files with 26954 additions and 26996 deletions

16
Cargo.lock generated
View file

@ -1180,7 +1180,7 @@ dependencies = [
name = "ra_text_edit" name = "ra_text_edit"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"text_unit", "text-size",
] ]
[[package]] [[package]]
@ -1322,13 +1322,13 @@ dependencies = [
[[package]] [[package]]
name = "rowan" name = "rowan"
version = "0.9.1" version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ea7cadf87a9d8432e85cb4eb86bd2e765ace60c24ef86e79084dcae5d1c5a19" checksum = "1e081ed6eacce09e243b619ab90f069c27b0cff8a6d0eb8ad2ec935b65853798"
dependencies = [ dependencies = [
"rustc-hash", "rustc-hash",
"smol_str", "smol_str",
"text_unit", "text-size",
"thin-dst", "thin-dst",
] ]
@ -1620,14 +1620,14 @@ version = "0.1.0"
dependencies = [ dependencies = [
"difference", "difference",
"serde_json", "serde_json",
"text_unit", "text-size",
] ]
[[package]] [[package]]
name = "text_unit" name = "text-size"
version = "0.1.10" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20431e104bfecc1a40872578dbc390e10290a0e9c35fffe3ce6f73c15a9dbfc2" checksum = "f03e7efdedc3bc78cb2337f1e2785c39e45f5ef762d9e4ebb137fff7380a6d8a"
[[package]] [[package]]
name = "thin-dst" name = "thin-dst"

View file

@ -5,7 +5,7 @@ use ra_fmt::{leading_indent, reindent};
use ra_ide_db::RootDatabase; use ra_ide_db::RootDatabase;
use ra_syntax::{ use ra_syntax::{
algo::{self, find_covering_element, find_node_at_offset}, algo::{self, find_covering_element, find_node_at_offset},
AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextUnit, AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize,
TokenAtOffset, TokenAtOffset,
}; };
use ra_text_edit::TextEditBuilder; use ra_text_edit::TextEditBuilder;
@ -178,7 +178,7 @@ impl<'a> AssistGroup<'a> {
#[derive(Default)] #[derive(Default)]
pub(crate) struct ActionBuilder { pub(crate) struct ActionBuilder {
edit: TextEditBuilder, edit: TextEditBuilder,
cursor_position: Option<TextUnit>, cursor_position: Option<TextSize>,
target: Option<TextRange>, target: Option<TextRange>,
file: AssistFile, file: AssistFile,
} }
@ -211,12 +211,12 @@ impl ActionBuilder {
} }
/// Append specified `text` at the given `offset` /// Append specified `text` at the given `offset`
pub(crate) fn insert(&mut self, offset: TextUnit, text: impl Into<String>) { pub(crate) fn insert(&mut self, offset: TextSize, text: impl Into<String>) {
self.edit.insert(offset, text.into()) self.edit.insert(offset, text.into())
} }
/// Specify desired position of the cursor after the assist is applied. /// Specify desired position of the cursor after the assist is applied.
pub(crate) fn set_cursor(&mut self, offset: TextUnit) { pub(crate) fn set_cursor(&mut self, offset: TextSize) {
self.cursor_position = Some(offset) self.cursor_position = Some(offset)
} }

View file

@ -2,7 +2,7 @@ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
Direction, SmolStr, Direction, SmolStr,
SyntaxKind::{IDENT, WHITESPACE}, SyntaxKind::{IDENT, WHITESPACE},
TextRange, TextUnit, TextRange, TextSize,
}; };
use stdx::SepBy; use stdx::SepBy;
@ -60,7 +60,6 @@ pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> {
.collect::<Vec<SmolStr>>(); .collect::<Vec<SmolStr>>();
let has_more_derives = !new_attr_input.is_empty(); let has_more_derives = !new_attr_input.is_empty();
let new_attr_input = new_attr_input.iter().sep_by(", ").surround_with("(", ")").to_string(); let new_attr_input = new_attr_input.iter().sep_by(", ").surround_with("(", ")").to_string();
let new_attr_input_len = new_attr_input.len();
let mut buf = String::new(); let mut buf = String::new();
buf.push_str("\n\nimpl "); buf.push_str("\n\nimpl ");
@ -70,8 +69,9 @@ pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> {
buf.push_str(" {\n"); buf.push_str(" {\n");
let cursor_delta = if has_more_derives { let cursor_delta = if has_more_derives {
let delta = input.syntax().text_range().len() - TextSize::of(&new_attr_input);
edit.replace(input.syntax().text_range(), new_attr_input); edit.replace(input.syntax().text_range(), new_attr_input);
input.syntax().text_range().len() - TextUnit::from_usize(new_attr_input_len) delta
} else { } else {
let attr_range = attr.syntax().text_range(); let attr_range = attr.syntax().text_range();
edit.delete(attr_range); edit.delete(attr_range);
@ -81,13 +81,13 @@ pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> {
.next_sibling_or_token() .next_sibling_or_token()
.filter(|t| t.kind() == WHITESPACE) .filter(|t| t.kind() == WHITESPACE)
.map(|t| t.text_range()) .map(|t| t.text_range())
.unwrap_or_else(|| TextRange::from_to(TextUnit::from(0), TextUnit::from(0))); .unwrap_or_else(|| TextRange::new(TextSize::from(0), TextSize::from(0)));
edit.delete(line_break_range); edit.delete(line_break_range);
attr_range.len() + line_break_range.len() attr_range.len() + line_break_range.len()
}; };
edit.set_cursor(start_offset + TextUnit::of_str(&buf) - cursor_delta); edit.set_cursor(start_offset + TextSize::of(&buf) - cursor_delta);
buf.push_str("\n}"); buf.push_str("\n}");
edit.insert(start_offset, buf); edit.insert(start_offset, buf);
}) })

View file

@ -1,7 +1,7 @@
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, AttrsOwner}, ast::{self, AstNode, AttrsOwner},
SyntaxKind::{COMMENT, WHITESPACE}, SyntaxKind::{COMMENT, WHITESPACE},
TextUnit, TextSize,
}; };
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
@ -37,9 +37,9 @@ pub(crate) fn add_derive(ctx: AssistCtx) -> Option<Assist> {
let offset = match derive_attr { let offset = match derive_attr {
None => { None => {
edit.insert(node_start, "#[derive()]\n"); edit.insert(node_start, "#[derive()]\n");
node_start + TextUnit::of_str("#[derive(") node_start + TextSize::of("#[derive(")
} }
Some(tt) => tt.syntax().text_range().end() - TextUnit::of_char(')'), Some(tt) => tt.syntax().text_range().end() - TextSize::of(')'),
}; };
edit.target(nominal.syntax().text_range()); edit.target(nominal.syntax().text_range());
edit.set_cursor(offset) edit.set_cursor(offset)
@ -47,7 +47,7 @@ pub(crate) fn add_derive(ctx: AssistCtx) -> Option<Assist> {
} }
// Insert `derive` after doc comments. // Insert `derive` after doc comments.
fn derive_insertion_offset(nominal: &ast::NominalDef) -> Option<TextUnit> { fn derive_insertion_offset(nominal: &ast::NominalDef) -> Option<TextSize> {
let non_ws_child = nominal let non_ws_child = nominal
.syntax() .syntax()
.children_with_tokens() .children_with_tokens()

View file

@ -37,8 +37,8 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx) -> Option<Assist> {
let stmt_range = stmt.syntax().text_range(); let stmt_range = stmt.syntax().text_range();
let eq_range = stmt.eq_token()?.text_range(); let eq_range = stmt.eq_token()?.text_range();
// Assist should only be applicable if cursor is between 'let' and '=' // Assist should only be applicable if cursor is between 'let' and '='
let let_range = TextRange::from_to(stmt_range.start(), eq_range.start()); let let_range = TextRange::new(stmt_range.start(), eq_range.start());
let cursor_in_range = ctx.frange.range.is_subrange(&let_range); let cursor_in_range = let_range.contains_range(ctx.frange.range);
if !cursor_in_range { if !cursor_in_range {
return None; return None;
} }

View file

@ -1,6 +1,6 @@
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, NameOwner}, ast::{self, AstNode, NameOwner},
TextUnit, TextSize,
}; };
use stdx::format_to; use stdx::format_to;
@ -65,7 +65,7 @@ impl From<{0}> for {1} {{
variant_name variant_name
); );
edit.insert(start_offset, buf); edit.insert(start_offset, buf);
edit.set_cursor(start_offset + TextUnit::of_str("\n\n")); edit.set_cursor(start_offset + TextSize::of("\n\n"));
}, },
) )
} }

View file

@ -1,6 +1,6 @@
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
SyntaxKind, SyntaxNode, TextUnit, SyntaxKind, SyntaxNode, TextSize,
}; };
use crate::{Assist, AssistCtx, AssistFile, AssistId}; use crate::{Assist, AssistCtx, AssistFile, AssistId};
@ -69,8 +69,8 @@ pub(crate) fn add_function(ctx: AssistCtx) -> Option<Assist> {
} }
struct FunctionTemplate { struct FunctionTemplate {
insert_offset: TextUnit, insert_offset: TextSize,
cursor_offset: TextUnit, cursor_offset: TextSize,
fn_def: ast::SourceFile, fn_def: ast::SourceFile,
file: AssistFile, file: AssistFile,
} }
@ -129,7 +129,7 @@ impl FunctionBuilder {
let fn_def = indent_once.increase_indent(fn_def); let fn_def = indent_once.increase_indent(fn_def);
let fn_def = ast::make::add_trailing_newlines(1, fn_def); let fn_def = ast::make::add_trailing_newlines(1, fn_def);
let fn_def = indent.increase_indent(fn_def); let fn_def = indent.increase_indent(fn_def);
(fn_def, it.syntax().text_range().start() + TextUnit::from_usize(1)) (fn_def, it.syntax().text_range().start() + TextSize::of('{'))
} }
}; };

View file

@ -1,6 +1,6 @@
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, NameOwner, TypeParamsOwner}, ast::{self, AstNode, NameOwner, TypeParamsOwner},
TextUnit, TextSize,
}; };
use stdx::{format_to, SepBy}; use stdx::{format_to, SepBy};
@ -51,7 +51,7 @@ pub(crate) fn add_impl(ctx: AssistCtx) -> Option<Assist> {
format_to!(buf, "<{}>", generic_params) format_to!(buf, "<{}>", generic_params)
} }
buf.push_str(" {\n"); buf.push_str(" {\n");
edit.set_cursor(start_offset + TextUnit::of_str(&buf)); edit.set_cursor(start_offset + TextSize::of(&buf));
buf.push_str("\n}"); buf.push_str("\n}");
edit.insert(start_offset, buf); edit.insert(start_offset, buf);
}) })

View file

@ -3,7 +3,7 @@ use ra_syntax::{
ast::{ ast::{
self, AstNode, NameOwner, StructKind, TypeAscriptionOwner, TypeParamsOwner, VisibilityOwner, self, AstNode, NameOwner, StructKind, TypeAscriptionOwner, TypeParamsOwner, VisibilityOwner,
}, },
TextUnit, T, TextSize, T,
}; };
use stdx::{format_to, SepBy}; use stdx::{format_to, SepBy};
@ -77,16 +77,16 @@ pub(crate) fn add_new(ctx: AssistCtx) -> Option<Assist> {
.text_range() .text_range()
.end(); .end();
Some((start, TextUnit::from_usize(1))) Some((start, TextSize::of("\n")))
}) })
.unwrap_or_else(|| { .unwrap_or_else(|| {
buf = generate_impl_text(&strukt, &buf); buf = generate_impl_text(&strukt, &buf);
let start = strukt.syntax().text_range().end(); let start = strukt.syntax().text_range().end();
(start, TextUnit::from_usize(3)) (start, TextSize::of("\n}\n"))
}); });
edit.set_cursor(start_offset + TextUnit::of_str(&buf) - end_offset); edit.set_cursor(start_offset + TextSize::of(&buf) - end_offset);
edit.insert(start_offset, buf); edit.insert(start_offset, buf);
}) })
} }

View file

@ -26,7 +26,7 @@ pub(crate) fn apply_demorgan(ctx: AssistCtx) -> Option<Assist> {
let op = expr.op_kind()?; let op = expr.op_kind()?;
let op_range = expr.op_token()?.text_range(); let op_range = expr.op_token()?.text_range();
let opposite_op = opposite_logic_op(op)?; let opposite_op = opposite_logic_op(op)?;
let cursor_in_range = ctx.frange.range.is_subrange(&op_range); let cursor_in_range = op_range.contains_range(ctx.frange.range);
if !cursor_in_range { if !cursor_in_range {
return None; return None;
} }

View file

@ -5,7 +5,7 @@ use ra_syntax::{
ATTR, COMMENT, CONST_DEF, ENUM_DEF, FN_DEF, MODULE, STRUCT_DEF, TRAIT_DEF, VISIBILITY, ATTR, COMMENT, CONST_DEF, ENUM_DEF, FN_DEF, MODULE, STRUCT_DEF, TRAIT_DEF, VISIBILITY,
WHITESPACE, WHITESPACE,
}, },
SyntaxNode, TextUnit, T, SyntaxNode, TextSize, T,
}; };
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
@ -67,7 +67,7 @@ fn add_vis(ctx: AssistCtx) -> Option<Assist> {
}) })
} }
fn vis_offset(node: &SyntaxNode) -> TextUnit { fn vis_offset(node: &SyntaxNode) -> TextSize {
node.children_with_tokens() node.children_with_tokens()
.skip_while(|it| match it.kind() { .skip_while(|it| match it.kind() {
WHITESPACE | COMMENT | ATTR => true, WHITESPACE | COMMENT | ATTR => true,

View file

@ -23,7 +23,7 @@ pub(crate) fn flip_binexpr(ctx: AssistCtx) -> Option<Assist> {
let rhs = expr.rhs()?.syntax().clone(); let rhs = expr.rhs()?.syntax().clone();
let op_range = expr.op_token()?.text_range(); let op_range = expr.op_token()?.text_range();
// The assist should be applied only if the cursor is on the operator // The assist should be applied only if the cursor is on the operator
let cursor_in_range = ctx.frange.range.is_subrange(&op_range); let cursor_in_range = op_range.contains_range(ctx.frange.range);
if !cursor_in_range { if !cursor_in_range {
return None; return None;
} }

View file

@ -52,7 +52,7 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> {
.next_sibling_or_token() .next_sibling_or_token()
.and_then(|it| ast::Whitespace::cast(it.as_token()?.clone())) .and_then(|it| ast::Whitespace::cast(it.as_token()?.clone()))
{ {
TextRange::from_to( TextRange::new(
let_stmt.syntax().text_range().start(), let_stmt.syntax().text_range().start(),
whitespace.syntax().text_range().end(), whitespace.syntax().text_range().end(),
) )

View file

@ -4,7 +4,7 @@ use ra_syntax::{
BLOCK_EXPR, BREAK_EXPR, COMMENT, LAMBDA_EXPR, LOOP_EXPR, MATCH_ARM, PATH_EXPR, RETURN_EXPR, BLOCK_EXPR, BREAK_EXPR, COMMENT, LAMBDA_EXPR, LOOP_EXPR, MATCH_ARM, PATH_EXPR, RETURN_EXPR,
WHITESPACE, WHITESPACE,
}, },
SyntaxNode, TextUnit, SyntaxNode, TextSize,
}; };
use stdx::format_to; use stdx::format_to;
use test_utils::tested_by; use test_utils::tested_by;
@ -47,10 +47,10 @@ pub(crate) fn introduce_variable(ctx: AssistCtx) -> Option<Assist> {
let cursor_offset = if wrap_in_block { let cursor_offset = if wrap_in_block {
buf.push_str("{ let var_name = "); buf.push_str("{ let var_name = ");
TextUnit::of_str("{ let ") TextSize::of("{ let ")
} else { } else {
buf.push_str("let var_name = "); buf.push_str("let var_name = ");
TextUnit::of_str("let ") TextSize::of("let ")
}; };
format_to!(buf, "{}", expr.syntax()); format_to!(buf, "{}", expr.syntax());
let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone()); let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone());

View file

@ -28,7 +28,7 @@ pub(crate) fn invert_if(ctx: AssistCtx) -> Option<Assist> {
let if_keyword = ctx.find_token_at_offset(T![if])?; let if_keyword = ctx.find_token_at_offset(T![if])?;
let expr = ast::IfExpr::cast(if_keyword.parent())?; let expr = ast::IfExpr::cast(if_keyword.parent())?;
let if_range = if_keyword.text_range(); let if_range = if_keyword.text_range();
let cursor_in_range = ctx.frange.range.is_subrange(&if_range); let cursor_in_range = if_range.contains_range(ctx.frange.range);
if !cursor_in_range { if !cursor_in_range {
return None; return None;
} }

View file

@ -3,7 +3,7 @@ use std::iter::successors;
use ra_syntax::{ use ra_syntax::{
algo::neighbor, algo::neighbor,
ast::{self, AstNode}, ast::{self, AstNode},
Direction, TextUnit, Direction, TextSize,
}; };
use crate::{Assist, AssistCtx, AssistId, TextRange}; use crate::{Assist, AssistCtx, AssistId, TextRange};
@ -42,8 +42,8 @@ pub(crate) fn merge_match_arms(ctx: AssistCtx) -> Option<Assist> {
let current_text_range = current_arm.syntax().text_range(); let current_text_range = current_arm.syntax().text_range();
enum CursorPos { enum CursorPos {
InExpr(TextUnit), InExpr(TextSize),
InPat(TextUnit), InPat(TextSize),
} }
let cursor_pos = ctx.frange.range.start(); let cursor_pos = ctx.frange.range.start();
let cursor_pos = if current_expr.syntax().text_range().contains(cursor_pos) { let cursor_pos = if current_expr.syntax().text_range().contains(cursor_pos) {
@ -89,10 +89,10 @@ pub(crate) fn merge_match_arms(ctx: AssistCtx) -> Option<Assist> {
edit.target(current_text_range); edit.target(current_text_range);
edit.set_cursor(match cursor_pos { edit.set_cursor(match cursor_pos {
CursorPos::InExpr(back_offset) => start + TextUnit::from_usize(arm.len()) - back_offset, CursorPos::InExpr(back_offset) => start + TextSize::of(&arm) - back_offset,
CursorPos::InPat(offset) => offset, CursorPos::InPat(offset) => offset,
}); });
edit.replace(TextRange::from_to(start, end), arm); edit.replace(TextRange::new(start, end), arm);
}) })
} }

View file

@ -1,7 +1,7 @@
use ra_syntax::{ use ra_syntax::{
ast, ast,
ast::{AstNode, AstToken, IfExpr, MatchArm}, ast::{AstNode, AstToken, IfExpr, MatchArm},
TextUnit, TextSize,
}; };
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
@ -49,16 +49,16 @@ pub(crate) fn move_guard_to_arm_body(ctx: AssistCtx) -> Option<Assist> {
edit.delete(ele); edit.delete(ele);
ele.len() ele.len()
} else { } else {
TextUnit::from(0) TextSize::from(0)
} }
} }
_ => TextUnit::from(0), _ => TextSize::from(0),
}; };
edit.delete(guard.syntax().text_range()); edit.delete(guard.syntax().text_range());
edit.replace_node_and_indent(arm_expr.syntax(), buf); edit.replace_node_and_indent(arm_expr.syntax(), buf);
edit.set_cursor( edit.set_cursor(
arm_expr.syntax().text_range().start() + TextUnit::from(3) - offseting_amount, arm_expr.syntax().text_range().start() + TextSize::from(3) - offseting_amount,
); );
}) })
} }
@ -123,7 +123,7 @@ pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx) -> Option<Assist> {
} }
edit.insert(match_pat.syntax().text_range().end(), buf); edit.insert(match_pat.syntax().text_range().end(), buf);
edit.set_cursor(match_pat.syntax().text_range().end() + TextUnit::from(1)); edit.set_cursor(match_pat.syntax().text_range().end() + TextSize::from(1));
}, },
) )
} }

View file

@ -2,7 +2,7 @@ use ra_syntax::{
ast::{self, HasStringValue}, ast::{self, HasStringValue},
AstToken, AstToken,
SyntaxKind::{RAW_STRING, STRING}, SyntaxKind::{RAW_STRING, STRING},
TextUnit, TextSize,
}; };
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
@ -81,7 +81,7 @@ pub(crate) fn add_hash(ctx: AssistCtx) -> Option<Assist> {
let token = ctx.find_token_at_offset(RAW_STRING)?; let token = ctx.find_token_at_offset(RAW_STRING)?;
ctx.add_assist(AssistId("add_hash"), "Add # to raw string", |edit| { ctx.add_assist(AssistId("add_hash"), "Add # to raw string", |edit| {
edit.target(token.text_range()); edit.target(token.text_range());
edit.insert(token.text_range().start() + TextUnit::of_char('r'), "#"); edit.insert(token.text_range().start() + TextSize::of('r'), "#");
edit.insert(token.text_range().end(), "#"); edit.insert(token.text_range().end(), "#");
}) })
} }

View file

@ -1,6 +1,6 @@
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
TextUnit, T, TextSize, T,
}; };
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
@ -38,9 +38,9 @@ pub(crate) fn remove_dbg(ctx: AssistCtx) -> Option<Assist> {
let offset_start = file_range let offset_start = file_range
.start() .start()
.checked_sub(macro_range.start()) .checked_sub(macro_range.start())
.unwrap_or_else(|| TextUnit::from(0)); .unwrap_or_else(|| TextSize::from(0));
let dbg_size = TextUnit::of_str("dbg!("); let dbg_size = TextSize::of("dbg!(");
if offset_start > dbg_size { if offset_start > dbg_size {
file_range.start() - dbg_size file_range.start() - dbg_size
@ -53,7 +53,7 @@ pub(crate) fn remove_dbg(ctx: AssistCtx) -> Option<Assist> {
let macro_args = macro_call.token_tree()?.syntax().clone(); let macro_args = macro_call.token_tree()?.syntax().clone();
let text = macro_args.text(); let text = macro_args.text();
let without_parens = TextUnit::of_char('(')..text.len() - TextUnit::of_char(')'); let without_parens = TextSize::of('(')..text.len() - TextSize::of(')');
text.slice(without_parens).to_string() text.slice(without_parens).to_string()
}; };

View file

@ -27,6 +27,6 @@ pub(crate) fn remove_mut(ctx: AssistCtx) -> Option<Assist> {
ctx.add_assist(AssistId("remove_mut"), "Remove `mut` keyword", |edit| { ctx.add_assist(AssistId("remove_mut"), "Remove `mut` keyword", |edit| {
edit.set_cursor(delete_from); edit.set_cursor(delete_from);
edit.delete(TextRange::from_to(delete_from, delete_to)); edit.delete(TextRange::new(delete_from, delete_to));
}) })
} }

View file

@ -43,7 +43,7 @@ pub(crate) fn replace_qualified_name_with_use(ctx: AssistCtx) -> Option<Assist>
if let Some(last) = path.segment() { if let Some(last) = path.segment() {
// Here we are assuming the assist will provide a correct use statement // Here we are assuming the assist will provide a correct use statement
// so we can delete the path qualifier // so we can delete the path qualifier
edit.delete(TextRange::from_to( edit.delete(TextRange::new(
path.syntax().text_range().start(), path.syntax().text_range().start(),
last.syntax().text_range().start(), last.syntax().text_range().start(),
)); ));

View file

@ -19,7 +19,7 @@ pub mod ast_transform;
use ra_db::{FileId, FileRange}; use ra_db::{FileId, FileRange};
use ra_ide_db::RootDatabase; use ra_ide_db::RootDatabase;
use ra_syntax::{TextRange, TextUnit}; use ra_syntax::{TextRange, TextSize};
use ra_text_edit::TextEdit; use ra_text_edit::TextEdit;
pub(crate) use crate::assist_ctx::{Assist, AssistCtx, AssistHandler}; pub(crate) use crate::assist_ctx::{Assist, AssistCtx, AssistHandler};
@ -51,7 +51,7 @@ impl AssistLabel {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct AssistAction { pub struct AssistAction {
pub edit: TextEdit, pub edit: TextEdit,
pub cursor_position: Option<TextUnit>, pub cursor_position: Option<TextSize>,
// FIXME: This belongs to `AssistLabel` // FIXME: This belongs to `AssistLabel`
pub target: Option<TextRange>, pub target: Option<TextRange>,
pub file: AssistFile, pub file: AssistFile,
@ -104,7 +104,7 @@ pub fn resolved_assists(db: &RootDatabase, range: FileRange) -> Vec<ResolvedAssi
.flat_map(|it| it.0) .flat_map(|it| it.0)
.map(|it| it.into_resolved().unwrap()) .map(|it| it.into_resolved().unwrap())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
a.sort_by_key(|it| it.action.target.map_or(TextUnit::from(!0u32), |it| it.len())); a.sort_by_key(|it| it.action.target.map_or(TextSize::from(!0u32), |it| it.len()));
a a
} }
@ -308,8 +308,7 @@ mod tests {
let before = "struct Foo { <|>bar: u32 }"; let before = "struct Foo { <|>bar: u32 }";
let (before_cursor_pos, before) = extract_offset(before); let (before_cursor_pos, before) = extract_offset(before);
let (db, file_id) = helpers::with_single_file(&before); let (db, file_id) = helpers::with_single_file(&before);
let frange = let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) };
FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
let assists = resolved_assists(&db, frange); let assists = resolved_assists(&db, frange);
let mut assists = assists.iter(); let mut assists = assists.iter();

View file

@ -6,7 +6,7 @@ pub mod fixture;
use std::{panic, sync::Arc}; use std::{panic, sync::Arc};
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::{ast, Parse, SourceFile, TextRange, TextUnit}; use ra_syntax::{ast, Parse, SourceFile, TextRange, TextSize};
pub use crate::{ pub use crate::{
cancellation::Canceled, cancellation::Canceled,
@ -75,7 +75,7 @@ impl<T: salsa::Database> CheckCanceled for T {
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]
pub struct FilePosition { pub struct FilePosition {
pub file_id: FileId, pub file_id: FileId,
pub offset: TextUnit, pub offset: TextSize,
} }
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]

View file

@ -14,7 +14,7 @@ use ra_db::{FileId, FileRange};
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::{ use ra_syntax::{
algo::{find_node_at_offset, skip_trivia_token}, algo::{find_node_at_offset, skip_trivia_token},
ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize,
}; };
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -95,7 +95,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
let token = successors(Some(parent.with_value(token)), |token| { let token = successors(Some(parent.with_value(token)), |token| {
let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
let tt = macro_call.token_tree()?; let tt = macro_call.token_tree()?;
if !token.value.text_range().is_subrange(&tt.syntax().text_range()) { if !tt.syntax().text_range().contains_range(token.value.text_range()) {
return None; return None;
} }
let file_id = sa.expand(self.db, token.with_value(&macro_call))?; let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
@ -114,7 +114,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn descend_node_at_offset<N: ast::AstNode>( pub fn descend_node_at_offset<N: ast::AstNode>(
&self, &self,
node: &SyntaxNode, node: &SyntaxNode,
offset: TextUnit, offset: TextSize,
) -> Option<N> { ) -> Option<N> {
// Handle macro token cases // Handle macro token cases
node.token_at_offset(offset) node.token_at_offset(offset)
@ -142,7 +142,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn ancestors_at_offset_with_macros( pub fn ancestors_at_offset_with_macros(
&self, &self,
node: &SyntaxNode, node: &SyntaxNode,
offset: TextUnit, offset: TextSize,
) -> impl Iterator<Item = SyntaxNode> + '_ { ) -> impl Iterator<Item = SyntaxNode> + '_ {
node.token_at_offset(offset) node.token_at_offset(offset)
.map(|token| self.ancestors_with_macros(token.parent())) .map(|token| self.ancestors_with_macros(token.parent()))
@ -154,7 +154,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn find_node_at_offset_with_macros<N: AstNode>( pub fn find_node_at_offset_with_macros<N: AstNode>(
&self, &self,
node: &SyntaxNode, node: &SyntaxNode,
offset: TextUnit, offset: TextSize,
) -> Option<N> { ) -> Option<N> {
self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
} }
@ -164,7 +164,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn find_node_at_offset_with_descend<N: AstNode>( pub fn find_node_at_offset_with_descend<N: AstNode>(
&self, &self,
node: &SyntaxNode, node: &SyntaxNode,
offset: TextUnit, offset: TextSize,
) -> Option<N> { ) -> Option<N> {
if let Some(it) = find_node_at_offset(&node, offset) { if let Some(it) = find_node_at_offset(&node, offset) {
return Some(it); return Some(it);
@ -255,7 +255,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
SemanticsScope { db: self.db, resolver } SemanticsScope { db: self.db, resolver }
} }
pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextUnit) -> SemanticsScope<'db, DB> { pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db, DB> {
let node = self.find_file(node.clone()); let node = self.find_file(node.clone());
let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver;
SemanticsScope { db: self.db, resolver } SemanticsScope { db: self.db, resolver }
@ -271,7 +271,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.analyze2(src.as_ref(), None) self.analyze2(src.as_ref(), None)
} }
fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextUnit>) -> SourceAnalyzer { fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> SourceAnalyzer {
let _p = profile("Semantics::analyze2"); let _p = profile("Semantics::analyze2");
let container = match self.with_ctx(|ctx| ctx.find_container(src)) { let container = match self.with_ctx(|ctx| ctx.find_container(src)) {
@ -463,7 +463,7 @@ fn original_range_opt(
return None; return None;
} }
Some(first.with_value(first.value.text_range().extend_to(&last.value.text_range()))) Some(first.with_value(first.value.text_range().cover(last.value.text_range())))
})?) })?)
} }

View file

@ -23,7 +23,7 @@ use hir_ty::{
}; };
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
SyntaxNode, TextRange, TextUnit, SyntaxNode, TextRange, TextSize,
}; };
use crate::{ use crate::{
@ -50,7 +50,7 @@ impl SourceAnalyzer {
db: &dyn HirDatabase, db: &dyn HirDatabase,
def: DefWithBodyId, def: DefWithBodyId,
node: InFile<&SyntaxNode>, node: InFile<&SyntaxNode>,
offset: Option<TextUnit>, offset: Option<TextSize>,
) -> SourceAnalyzer { ) -> SourceAnalyzer {
let (body, source_map) = db.body_with_source_map(def); let (body, source_map) = db.body_with_source_map(def);
let scopes = db.expr_scopes(def); let scopes = db.expr_scopes(def);
@ -318,7 +318,7 @@ fn scope_for_offset(
db: &dyn HirDatabase, db: &dyn HirDatabase,
scopes: &ExprScopes, scopes: &ExprScopes,
source_map: &BodySourceMap, source_map: &BodySourceMap,
offset: InFile<TextUnit>, offset: InFile<TextSize>,
) -> Option<ScopeId> { ) -> Option<ScopeId> {
scopes scopes
.scope_by_expr() .scope_by_expr()
@ -354,7 +354,7 @@ fn adjust(
source_map: &BodySourceMap, source_map: &BodySourceMap,
expr_range: TextRange, expr_range: TextRange,
file_id: HirFileId, file_id: HirFileId,
offset: TextUnit, offset: TextSize,
) -> Option<ScopeId> { ) -> Option<ScopeId> {
let child_scopes = scopes let child_scopes = scopes
.scope_by_expr() .scope_by_expr()
@ -369,15 +369,15 @@ fn adjust(
let node = source.value.to_node(&root); let node = source.value.to_node(&root);
Some((node.syntax().text_range(), scope)) Some((node.syntax().text_range(), scope))
}) })
.filter(|(range, _)| { .filter(|&(range, _)| {
range.start() <= offset && range.is_subrange(&expr_range) && *range != expr_range range.start() <= offset && expr_range.contains_range(range) && range != expr_range
}); });
child_scopes child_scopes
.max_by(|(r1, _), (r2, _)| { .max_by(|&(r1, _), &(r2, _)| {
if r2.is_subrange(&r1) { if r1.contains_range(r2) {
std::cmp::Ordering::Greater std::cmp::Ordering::Greater
} else if r1.is_subrange(&r2) { } else if r2.contains_range(r1) {
std::cmp::Ordering::Less std::cmp::Ordering::Less
} else { } else {
r1.start().cmp(&r2.start()) r1.start().cmp(&r2.start())

View file

@ -194,7 +194,7 @@ mod tests {
let (off, code) = extract_offset(code); let (off, code) = extract_offset(code);
let code = { let code = {
let mut buf = String::new(); let mut buf = String::new();
let off = off.to_usize(); let off: usize = off.into();
buf.push_str(&code[..off]); buf.push_str(&code[..off]);
buf.push_str("marker"); buf.push_str("marker");
buf.push_str(&code[off..]); buf.push_str(&code[off..]);

View file

@ -2,7 +2,7 @@
use crate::db::AstDatabase; use crate::db::AstDatabase;
use crate::{ use crate::{
ast::{self, AstToken, HasStringValue}, ast::{self, AstToken, HasStringValue},
name, AstId, CrateId, MacroDefId, MacroDefKind, TextUnit, name, AstId, CrateId, MacroDefId, MacroDefKind, TextSize,
}; };
use crate::{quote, EagerMacroId, LazyMacroId, MacroCallId}; use crate::{quote, EagerMacroId, LazyMacroId, MacroCallId};
@ -127,7 +127,7 @@ fn stringify_expand(
let arg = loc.kind.arg(db).ok_or_else(|| mbe::ExpandError::UnexpectedToken)?; let arg = loc.kind.arg(db).ok_or_else(|| mbe::ExpandError::UnexpectedToken)?;
let macro_args = arg; let macro_args = arg;
let text = macro_args.text(); let text = macro_args.text();
let without_parens = TextUnit::of_char('(')..text.len() - TextUnit::of_char(')'); let without_parens = TextSize::of('(')..text.len() - TextSize::of(')');
text.slice(without_parens).to_string() text.slice(without_parens).to_string()
}; };

View file

@ -22,7 +22,7 @@ use ra_db::{impl_intern_key, salsa, CrateId, FileId};
use ra_syntax::{ use ra_syntax::{
algo, algo,
ast::{self, AstNode}, ast::{self, AstNode},
SyntaxNode, SyntaxToken, TextUnit, SyntaxNode, SyntaxToken, TextSize,
}; };
use crate::ast_id_map::FileAstId; use crate::ast_id_map::FileAstId;
@ -348,7 +348,7 @@ impl<N: AstNode> AstId<N> {
/// ///
/// * `InFile<SyntaxNode>` -- syntax node in a file /// * `InFile<SyntaxNode>` -- syntax node in a file
/// * `InFile<ast::FnDef>` -- ast node in a file /// * `InFile<ast::FnDef>` -- ast node in a file
/// * `InFile<TextUnit>` -- offset in a file /// * `InFile<TextSize>` -- offset in a file
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct InFile<T> { pub struct InFile<T> {
pub file_id: HirFileId, pub file_id: HirFileId,

View file

@ -117,7 +117,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" }; let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" };
format_to!( format_to!(
buf, buf,
"{}{} '{}': {}\n", "{}{:?} '{}': {}\n",
macro_prefix, macro_prefix,
range, range,
ellipsize(text, 15), ellipsize(text, 15),
@ -134,7 +134,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" }; let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
format_to!( format_to!(
buf, buf,
"{}{}: expected {}, got {}\n", "{}{:?}: expected {}, got {}\n",
macro_prefix, macro_prefix,
range, range,
mismatch.expected.display(&db), mismatch.expected.display(&db),

View file

@ -29,10 +29,10 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 41) '{ ...4 }; }': () 11..41 '{ ...4 }; }': ()
[21; 22) 'a': i32 21..22 'a': i32
[30; 38) '{ 1i64 }': i64 30..38 '{ 1i64 }': i64
[32; 36) '1i64': i64 32..36 '1i64': i64
"###); "###);
} }
@ -63,50 +63,50 @@ fn test2() {
} }
"#), "#),
@r###" @r###"
[31; 32) '_': &[T] 31..32 '_': &[T]
[45; 56) '{ loop {} }': T 45..56 '{ loop {} }': T
[47; 54) 'loop {}': ! 47..54 'loop {}': !
[52; 54) '{}': () 52..54 '{}': ()
[65; 66) '_': S<&[T]> 65..66 '_': S<&[T]>
[82; 93) '{ loop {} }': T 82..93 '{ loop {} }': T
[84; 91) 'loop {}': ! 84..91 'loop {}': !
[89; 91) '{}': () 89..91 '{}': ()
[122; 133) '{ loop {} }': *mut [T; _] 122..133 '{ loop {} }': *mut [T; _]
[124; 131) 'loop {}': ! 124..131 'loop {}': !
[129; 131) '{}': () 129..131 '{}': ()
[160; 173) '{ gen() }': *mut [U] 160..173 '{ gen() }': *mut [U]
[166; 169) 'gen': fn gen<U>() -> *mut [U; _] 166..169 'gen': fn gen<U>() -> *mut [U; _]
[166; 171) 'gen()': *mut [U; _] 166..171 'gen()': *mut [U; _]
[186; 420) '{ ...rr); }': () 186..420 '{ ...rr); }': ()
[196; 199) 'arr': &[u8; _] 196..199 'arr': &[u8; _]
[212; 216) '&[1]': &[u8; _] 212..216 '&[1]': &[u8; _]
[213; 216) '[1]': [u8; _] 213..216 '[1]': [u8; _]
[214; 215) '1': u8 214..215 '1': u8
[227; 228) 'a': &[u8] 227..228 'a': &[u8]
[237; 240) 'arr': &[u8; _] 237..240 'arr': &[u8; _]
[250; 251) 'b': u8 250..251 'b': u8
[254; 255) 'f': fn f<u8>(&[u8]) -> u8 254..255 'f': fn f<u8>(&[u8]) -> u8
[254; 260) 'f(arr)': u8 254..260 'f(arr)': u8
[256; 259) 'arr': &[u8; _] 256..259 'arr': &[u8; _]
[270; 271) 'c': &[u8] 270..271 'c': &[u8]
[280; 287) '{ arr }': &[u8] 280..287 '{ arr }': &[u8]
[282; 285) 'arr': &[u8; _] 282..285 'arr': &[u8; _]
[297; 298) 'd': u8 297..298 'd': u8
[301; 302) 'g': fn g<u8>(S<&[u8]>) -> u8 301..302 'g': fn g<u8>(S<&[u8]>) -> u8
[301; 316) 'g(S { a: arr })': u8 301..316 'g(S { a: arr })': u8
[303; 315) 'S { a: arr }': S<&[u8]> 303..315 'S { a: arr }': S<&[u8]>
[310; 313) 'arr': &[u8; _] 310..313 'arr': &[u8; _]
[326; 327) 'e': [&[u8]; _] 326..327 'e': [&[u8]; _]
[341; 346) '[arr]': [&[u8]; _] 341..346 '[arr]': [&[u8]; _]
[342; 345) 'arr': &[u8; _] 342..345 'arr': &[u8; _]
[356; 357) 'f': [&[u8]; _] 356..357 'f': [&[u8]; _]
[371; 379) '[arr; 2]': [&[u8]; _] 371..379 '[arr; 2]': [&[u8]; _]
[372; 375) 'arr': &[u8; _] 372..375 'arr': &[u8; _]
[377; 378) '2': usize 377..378 '2': usize
[389; 390) 'g': (&[u8], &[u8]) 389..390 'g': (&[u8], &[u8])
[407; 417) '(arr, arr)': (&[u8], &[u8]) 407..417 '(arr, arr)': (&[u8], &[u8])
[408; 411) 'arr': &[u8; _] 408..411 'arr': &[u8; _]
[413; 416) 'arr': &[u8; _] 413..416 'arr': &[u8; _]
"### "###
); );
} }
@ -120,11 +120,11 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 40) '{ ...[1]; }': () 11..40 '{ ...[1]; }': ()
[21; 22) 'x': &[i32] 21..22 'x': &[i32]
[33; 37) '&[1]': &[i32; _] 33..37 '&[1]': &[i32; _]
[34; 37) '[1]': [i32; _] 34..37 '[1]': [i32; _]
[35; 36) '1': i32 35..36 '1': i32
"###); "###);
} }
@ -150,31 +150,31 @@ fn test(a: A<[u8; 2]>, b: B<[u8; 2]>, c: C<[u8; 2]>) {
} }
"#), "#),
@r###" @r###"
[258; 259) 'x': A<[T]> 258..259 'x': A<[T]>
[279; 284) '{ x }': A<[T]> 279..284 '{ x }': A<[T]>
[281; 282) 'x': A<[T]> 281..282 'x': A<[T]>
[296; 297) 'x': B<[T]> 296..297 'x': B<[T]>
[317; 322) '{ x }': B<[T]> 317..322 '{ x }': B<[T]>
[319; 320) 'x': B<[T]> 319..320 'x': B<[T]>
[334; 335) 'x': C<[T]> 334..335 'x': C<[T]>
[355; 360) '{ x }': C<[T]> 355..360 '{ x }': C<[T]>
[357; 358) 'x': C<[T]> 357..358 'x': C<[T]>
[370; 371) 'a': A<[u8; _]> 370..371 'a': A<[u8; _]>
[385; 386) 'b': B<[u8; _]> 385..386 'b': B<[u8; _]>
[400; 401) 'c': C<[u8; _]> 400..401 'c': C<[u8; _]>
[415; 481) '{ ...(c); }': () 415..481 '{ ...(c); }': ()
[425; 426) 'd': A<[{unknown}]> 425..426 'd': A<[{unknown}]>
[429; 433) 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]> 429..433 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]>
[429; 436) 'foo1(a)': A<[{unknown}]> 429..436 'foo1(a)': A<[{unknown}]>
[434; 435) 'a': A<[u8; _]> 434..435 'a': A<[u8; _]>
[446; 447) 'e': B<[u8]> 446..447 'e': B<[u8]>
[450; 454) 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]> 450..454 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]>
[450; 457) 'foo2(b)': B<[u8]> 450..457 'foo2(b)': B<[u8]>
[455; 456) 'b': B<[u8; _]> 455..456 'b': B<[u8; _]>
[467; 468) 'f': C<[u8]> 467..468 'f': C<[u8]>
[471; 475) 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]> 471..475 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]>
[471; 478) 'foo3(c)': C<[u8]> 471..478 'foo3(c)': C<[u8]>
[476; 477) 'c': C<[u8; _]> 476..477 'c': C<[u8; _]>
"### "###
); );
} }
@ -193,24 +193,24 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 12) 'x': &[T] 11..12 'x': &[T]
[28; 39) '{ loop {} }': &[T] 28..39 '{ loop {} }': &[T]
[30; 37) 'loop {}': ! 30..37 'loop {}': !
[35; 37) '{}': () 35..37 '{}': ()
[50; 126) '{ ... }; }': () 50..126 '{ ... }; }': ()
[60; 61) 'x': &[i32] 60..61 'x': &[i32]
[64; 123) 'if tru... }': &[i32] 64..123 'if tru... }': &[i32]
[67; 71) 'true': bool 67..71 'true': bool
[72; 97) '{ ... }': &[i32] 72..97 '{ ... }': &[i32]
[82; 85) 'foo': fn foo<i32>(&[i32]) -> &[i32] 82..85 'foo': fn foo<i32>(&[i32]) -> &[i32]
[82; 91) 'foo(&[1])': &[i32] 82..91 'foo(&[1])': &[i32]
[86; 90) '&[1]': &[i32; _] 86..90 '&[1]': &[i32; _]
[87; 90) '[1]': [i32; _] 87..90 '[1]': [i32; _]
[88; 89) '1': i32 88..89 '1': i32
[103; 123) '{ ... }': &[i32; _] 103..123 '{ ... }': &[i32; _]
[113; 117) '&[1]': &[i32; _] 113..117 '&[1]': &[i32; _]
[114; 117) '[1]': [i32; _] 114..117 '[1]': [i32; _]
[115; 116) '1': i32 115..116 '1': i32
"### "###
); );
} }
@ -229,24 +229,24 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 12) 'x': &[T] 11..12 'x': &[T]
[28; 39) '{ loop {} }': &[T] 28..39 '{ loop {} }': &[T]
[30; 37) 'loop {}': ! 30..37 'loop {}': !
[35; 37) '{}': () 35..37 '{}': ()
[50; 126) '{ ... }; }': () 50..126 '{ ... }; }': ()
[60; 61) 'x': &[i32] 60..61 'x': &[i32]
[64; 123) 'if tru... }': &[i32] 64..123 'if tru... }': &[i32]
[67; 71) 'true': bool 67..71 'true': bool
[72; 92) '{ ... }': &[i32; _] 72..92 '{ ... }': &[i32; _]
[82; 86) '&[1]': &[i32; _] 82..86 '&[1]': &[i32; _]
[83; 86) '[1]': [i32; _] 83..86 '[1]': [i32; _]
[84; 85) '1': i32 84..85 '1': i32
[98; 123) '{ ... }': &[i32] 98..123 '{ ... }': &[i32]
[108; 111) 'foo': fn foo<i32>(&[i32]) -> &[i32] 108..111 'foo': fn foo<i32>(&[i32]) -> &[i32]
[108; 117) 'foo(&[1])': &[i32] 108..117 'foo(&[1])': &[i32]
[112; 116) '&[1]': &[i32; _] 112..116 '&[1]': &[i32; _]
[113; 116) '[1]': [i32; _] 113..116 '[1]': [i32; _]
[114; 115) '1': i32 114..115 '1': i32
"### "###
); );
} }
@ -265,31 +265,31 @@ fn test(i: i32) {
} }
"#), "#),
@r###" @r###"
[11; 12) 'x': &[T] 11..12 'x': &[T]
[28; 39) '{ loop {} }': &[T] 28..39 '{ loop {} }': &[T]
[30; 37) 'loop {}': ! 30..37 'loop {}': !
[35; 37) '{}': () 35..37 '{}': ()
[48; 49) 'i': i32 48..49 'i': i32
[56; 150) '{ ... }; }': () 56..150 '{ ... }; }': ()
[66; 67) 'x': &[i32] 66..67 'x': &[i32]
[70; 147) 'match ... }': &[i32] 70..147 'match ... }': &[i32]
[76; 77) 'i': i32 76..77 'i': i32
[88; 89) '2': i32 88..89 '2': i32
[88; 89) '2': i32 88..89 '2': i32
[93; 96) 'foo': fn foo<i32>(&[i32]) -> &[i32] 93..96 'foo': fn foo<i32>(&[i32]) -> &[i32]
[93; 102) 'foo(&[2])': &[i32] 93..102 'foo(&[2])': &[i32]
[97; 101) '&[2]': &[i32; _] 97..101 '&[2]': &[i32; _]
[98; 101) '[2]': [i32; _] 98..101 '[2]': [i32; _]
[99; 100) '2': i32 99..100 '2': i32
[112; 113) '1': i32 112..113 '1': i32
[112; 113) '1': i32 112..113 '1': i32
[117; 121) '&[1]': &[i32; _] 117..121 '&[1]': &[i32; _]
[118; 121) '[1]': [i32; _] 118..121 '[1]': [i32; _]
[119; 120) '1': i32 119..120 '1': i32
[131; 132) '_': i32 131..132 '_': i32
[136; 140) '&[3]': &[i32; _] 136..140 '&[3]': &[i32; _]
[137; 140) '[3]': [i32; _] 137..140 '[3]': [i32; _]
[138; 139) '3': i32 138..139 '3': i32
"### "###
); );
} }
@ -308,31 +308,31 @@ fn test(i: i32) {
} }
"#), "#),
@r###" @r###"
[11; 12) 'x': &[T] 11..12 'x': &[T]
[28; 39) '{ loop {} }': &[T] 28..39 '{ loop {} }': &[T]
[30; 37) 'loop {}': ! 30..37 'loop {}': !
[35; 37) '{}': () 35..37 '{}': ()
[48; 49) 'i': i32 48..49 'i': i32
[56; 150) '{ ... }; }': () 56..150 '{ ... }; }': ()
[66; 67) 'x': &[i32] 66..67 'x': &[i32]
[70; 147) 'match ... }': &[i32] 70..147 'match ... }': &[i32]
[76; 77) 'i': i32 76..77 'i': i32
[88; 89) '1': i32 88..89 '1': i32
[88; 89) '1': i32 88..89 '1': i32
[93; 97) '&[1]': &[i32; _] 93..97 '&[1]': &[i32; _]
[94; 97) '[1]': [i32; _] 94..97 '[1]': [i32; _]
[95; 96) '1': i32 95..96 '1': i32
[107; 108) '2': i32 107..108 '2': i32
[107; 108) '2': i32 107..108 '2': i32
[112; 115) 'foo': fn foo<i32>(&[i32]) -> &[i32] 112..115 'foo': fn foo<i32>(&[i32]) -> &[i32]
[112; 121) 'foo(&[2])': &[i32] 112..121 'foo(&[2])': &[i32]
[116; 120) '&[2]': &[i32; _] 116..120 '&[2]': &[i32; _]
[117; 120) '[2]': [i32; _] 117..120 '[2]': [i32; _]
[118; 119) '2': i32 118..119 '2': i32
[131; 132) '_': i32 131..132 '_': i32
[136; 140) '&[3]': &[i32; _] 136..140 '&[3]': &[i32; _]
[137; 140) '[3]': [i32; _] 137..140 '[3]': [i32; _]
[138; 139) '3': i32 138..139 '3': i32
"### "###
); );
} }
@ -353,24 +353,24 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 145) '{ ... }; }': () 11..145 '{ ... }; }': ()
[21; 22) 't': &mut i32 21..22 't': &mut i32
[25; 31) '&mut 1': &mut i32 25..31 '&mut 1': &mut i32
[30; 31) '1': i32 30..31 '1': i32
[41; 42) 'x': *const i32 41..42 'x': *const i32
[45; 142) 'match ... }': *const i32 45..142 'match ... }': *const i32
[51; 52) '1': i32 51..52 '1': i32
[63; 64) '1': i32 63..64 '1': i32
[63; 64) '1': i32 63..64 '1': i32
[68; 69) 't': &mut i32 68..69 't': &mut i32
[68; 81) 't as *mut i32': *mut i32 68..81 't as *mut i32': *mut i32
[91; 92) '2': i32 91..92 '2': i32
[91; 92) '2': i32 91..92 '2': i32
[96; 97) 't': &mut i32 96..97 't': &mut i32
[96; 105) 't as &i32': &i32 96..105 't as &i32': &i32
[115; 116) '_': i32 115..116 '_': i32
[120; 121) 't': &mut i32 120..121 't': &mut i32
[120; 135) 't as *const i32': *const i32 120..135 't as *const i32': *const i32
"### "###
); );
} }
@ -384,9 +384,9 @@ fn foo() -> u32 {
} }
"#, true), "#, true),
@r###" @r###"
[17; 40) '{ ...own; }': ! 17..40 '{ ...own; }': !
[23; 37) 'return unknown': ! 23..37 'return unknown': !
[30; 37) 'unknown': u32 30..37 'unknown': u32
"### "###
); );
} }
@ -404,24 +404,24 @@ fn test() {
} }
"#, true), "#, true),
@r###" @r###"
[30; 31) 'x': &Foo 30..31 'x': &Foo
[39; 41) '{}': () 39..41 '{}': ()
[52; 133) '{ ...oo); }': () 52..133 '{ ...oo); }': ()
[58; 71) 'takes_ref_foo': fn takes_ref_foo(&Foo) 58..71 'takes_ref_foo': fn takes_ref_foo(&Foo)
[58; 77) 'takes_...(&Foo)': () 58..77 'takes_...(&Foo)': ()
[72; 76) '&Foo': &Foo 72..76 '&Foo': &Foo
[73; 76) 'Foo': Foo 73..76 'Foo': Foo
[83; 96) 'takes_ref_foo': fn takes_ref_foo(&Foo) 83..96 'takes_ref_foo': fn takes_ref_foo(&Foo)
[83; 103) 'takes_...&&Foo)': () 83..103 'takes_...&&Foo)': ()
[97; 102) '&&Foo': &&Foo 97..102 '&&Foo': &&Foo
[98; 102) '&Foo': &Foo 98..102 '&Foo': &Foo
[99; 102) 'Foo': Foo 99..102 'Foo': Foo
[109; 122) 'takes_ref_foo': fn takes_ref_foo(&Foo) 109..122 'takes_ref_foo': fn takes_ref_foo(&Foo)
[109; 130) 'takes_...&&Foo)': () 109..130 'takes_...&&Foo)': ()
[123; 129) '&&&Foo': &&&Foo 123..129 '&&&Foo': &&&Foo
[124; 129) '&&Foo': &&Foo 124..129 '&&Foo': &&Foo
[125; 129) '&Foo': &Foo 125..129 '&Foo': &Foo
[126; 129) 'Foo': Foo 126..129 'Foo': Foo
"### "###
); );
} }
@ -439,26 +439,26 @@ fn test() {
} }
"#, true), "#, true),
@r###" @r###"
[29; 30) 'x': &T 29..30 'x': &T
[41; 47) '{ *x }': T 41..47 '{ *x }': T
[43; 45) '*x': T 43..45 '*x': T
[44; 45) 'x': &T 44..45 'x': &T
[58; 127) '{ ...oo); }': () 58..127 '{ ...oo); }': ()
[64; 73) 'takes_ref': fn takes_ref<Foo>(&Foo) -> Foo 64..73 'takes_ref': fn takes_ref<Foo>(&Foo) -> Foo
[64; 79) 'takes_ref(&Foo)': Foo 64..79 'takes_ref(&Foo)': Foo
[74; 78) '&Foo': &Foo 74..78 '&Foo': &Foo
[75; 78) 'Foo': Foo 75..78 'Foo': Foo
[85; 94) 'takes_ref': fn takes_ref<&Foo>(&&Foo) -> &Foo 85..94 'takes_ref': fn takes_ref<&Foo>(&&Foo) -> &Foo
[85; 101) 'takes_...&&Foo)': &Foo 85..101 'takes_...&&Foo)': &Foo
[95; 100) '&&Foo': &&Foo 95..100 '&&Foo': &&Foo
[96; 100) '&Foo': &Foo 96..100 '&Foo': &Foo
[97; 100) 'Foo': Foo 97..100 'Foo': Foo
[107; 116) 'takes_ref': fn takes_ref<&&Foo>(&&&Foo) -> &&Foo 107..116 'takes_ref': fn takes_ref<&&Foo>(&&&Foo) -> &&Foo
[107; 124) 'takes_...&&Foo)': &&Foo 107..124 'takes_...&&Foo)': &&Foo
[117; 123) '&&&Foo': &&&Foo 117..123 '&&&Foo': &&&Foo
[118; 123) '&&Foo': &&Foo 118..123 '&&Foo': &&Foo
[119; 123) '&Foo': &Foo 119..123 '&Foo': &Foo
[120; 123) 'Foo': Foo 120..123 'Foo': Foo
"### "###
); );
} }
@ -478,18 +478,18 @@ fn test() {
} }
"#, true), "#, true),
@r###" @r###"
[127; 128) 'x': &str 127..128 'x': &str
[136; 138) '{}': () 136..138 '{}': ()
[169; 180) '{ loop {} }': String 169..180 '{ loop {} }': String
[171; 178) 'loop {}': ! 171..178 'loop {}': !
[176; 178) '{}': () 176..178 '{}': ()
[191; 236) '{ ... }); }': () 191..236 '{ ... }); }': ()
[197; 210) 'takes_ref_str': fn takes_ref_str(&str) 197..210 'takes_ref_str': fn takes_ref_str(&str)
[197; 233) 'takes_...g() })': () 197..233 'takes_...g() })': ()
[211; 232) '&{ ret...ng() }': &String 211..232 '&{ ret...ng() }': &String
[212; 232) '{ retu...ng() }': String 212..232 '{ retu...ng() }': String
[214; 228) 'returns_string': fn returns_string() -> String 214..228 'returns_string': fn returns_string() -> String
[214; 230) 'return...ring()': String 214..230 'return...ring()': String
"### "###
); );
} }
@ -508,19 +508,19 @@ fn foo() {
} }
"#, true), "#, true),
@r###" @r###"
[10; 106) '{ ... }; }': () 10..106 '{ ... }; }': ()
[20; 21) 'x': || -> &u32 20..21 'x': || -> &u32
[24; 103) '|| { ... }': || -> &u32 24..103 '|| { ... }': || -> &u32
[27; 103) '{ ... }': &u32 27..103 '{ ... }': &u32
[37; 82) 'if tru... }': () 37..82 'if tru... }': ()
[40; 44) 'true': bool 40..44 'true': bool
[45; 82) '{ ... }': ! 45..82 '{ ... }': !
[59; 71) 'return &1u32': ! 59..71 'return &1u32': !
[66; 71) '&1u32': &u32 66..71 '&1u32': &u32
[67; 71) '1u32': u32 67..71 '1u32': u32
[91; 97) '&&1u32': &&u32 91..97 '&&1u32': &&u32
[92; 97) '&1u32': &u32 92..97 '&1u32': &u32
[93; 97) '1u32': u32 93..97 '1u32': u32
"### "###
); );
} }
@ -535,12 +535,12 @@ fn test() {
} }
"#, true), "#, true),
@r###" @r###"
[8; 9) 'x': u32 8..9 'x': u32
[25; 30) '{ 1 }': isize 25..30 '{ 1 }': isize
[27; 28) '1': isize 27..28 '1': isize
[41; 79) '{ ...foo; }': () 41..79 '{ ...foo; }': ()
[51; 52) 'f': fn(u32) -> isize 51..52 'f': fn(u32) -> isize
[73; 76) 'foo': fn foo(u32) -> isize 73..76 'foo': fn foo(u32) -> isize
"### "###
); );
} }
@ -554,12 +554,12 @@ fn test() {
} }
"#, true), "#, true),
@r###" @r###"
[11; 55) '{ ...1 }; }': () 11..55 '{ ...1 }; }': ()
[21; 22) 'f': fn(u32) -> isize 21..22 'f': fn(u32) -> isize
[43; 52) '|x| { 1 }': |u32| -> isize 43..52 '|x| { 1 }': |u32| -> isize
[44; 45) 'x': u32 44..45 'x': u32
[47; 52) '{ 1 }': isize 47..52 '{ 1 }': isize
[49; 50) '1': isize 49..50 '1': isize
"### "###
); );
} }
@ -577,11 +577,11 @@ impl<TT> S<TT> {
} }
"#, true), "#, true),
@r###" @r###"
[51; 55) 'self': &S<TT> 51..55 'self': &S<TT>
[64; 87) '{ ... }': &TT 64..87 '{ ... }': &TT
[74; 81) '&self.t': &TT 74..81 '&self.t': &TT
[75; 79) 'self': &S<TT> 75..79 'self': &S<TT>
[75; 81) 'self.t': TT 75..81 'self.t': TT
"### "###
); );
} }
@ -602,13 +602,13 @@ fn test() {
} }
"#, true), "#, true),
@r###" @r###"
[162; 199) '{ ... 3]; }': () 162..199 '{ ... 3]; }': ()
[172; 173) 'f': &[usize] 172..173 'f': &[usize]
[186; 196) '&[1, 2, 3]': &[usize; _] 186..196 '&[1, 2, 3]': &[usize; _]
[187; 196) '[1, 2, 3]': [usize; _] 187..196 '[1, 2, 3]': [usize; _]
[188; 189) '1': usize 188..189 '1': usize
[191; 192) '2': usize 191..192 '2': usize
[194; 195) '3': usize 194..195 '3': usize
"### "###
); );
} }
@ -642,19 +642,19 @@ fn test() {
} }
"#, true), "#, true),
@r###" @r###"
[388; 573) '{ ...bj2; }': () 388..573 '{ ...bj2; }': ()
[398; 401) 'obj': &dyn Baz<i8, i16> 398..401 'obj': &dyn Baz<i8, i16>
[423; 425) '&S': &S<i8, i16> 423..425 '&S': &S<i8, i16>
[424; 425) 'S': S<i8, i16> 424..425 'S': S<i8, i16>
[435; 438) 'obj': &dyn Bar<usize, i8, i16> 435..438 'obj': &dyn Bar<usize, i8, i16>
[460; 463) 'obj': &dyn Baz<i8, i16> 460..463 'obj': &dyn Baz<i8, i16>
[473; 476) 'obj': &dyn Foo<i8, usize> 473..476 'obj': &dyn Foo<i8, usize>
[495; 498) 'obj': &dyn Bar<usize, i8, i16> 495..498 'obj': &dyn Bar<usize, i8, i16>
[508; 512) 'obj2': &dyn Baz<i8, i16> 508..512 'obj2': &dyn Baz<i8, i16>
[534; 536) '&S': &S<i8, i16> 534..536 '&S': &S<i8, i16>
[535; 536) 'S': S<i8, i16> 535..536 'S': S<i8, i16>
[546; 547) '_': &dyn Foo<i8, usize> 546..547 '_': &dyn Foo<i8, usize>
[566; 570) 'obj2': &dyn Baz<i8, i16> 566..570 'obj2': &dyn Baz<i8, i16>
"### "###
); );
} }
@ -687,12 +687,12 @@ fn test() {
} }
"#, true), "#, true),
@r###" @r###"
[292; 348) '{ ...obj; }': () 292..348 '{ ...obj; }': ()
[302; 305) 'obj': &dyn D 302..305 'obj': &dyn D
[316; 318) '&S': &S 316..318 '&S': &S
[317; 318) 'S': S 317..318 'S': S
[328; 331) 'obj': &dyn A 328..331 'obj': &dyn A
[342; 345) 'obj': &dyn D 342..345 'obj': &dyn D
"### "###
); );
} }

View file

@ -67,12 +67,12 @@ fn main() {
} }
"#), "#),
@r###" @r###"
![0; 17) '{Foo(v...,2,])}': Foo !0..17 '{Foo(v...,2,])}': Foo
![1; 4) 'Foo': Foo({unknown}) -> Foo !1..4 'Foo': Foo({unknown}) -> Foo
![1; 16) 'Foo(vec![1,2,])': Foo !1..16 'Foo(vec![1,2,])': Foo
![5; 15) 'vec![1,2,]': {unknown} !5..15 'vec![1,2,]': {unknown}
[156; 182) '{ ...,2); }': () 156..182 '{ ...,2); }': ()
[166; 167) 'x': Foo 166..167 'x': Foo
"### "###
); );
} }
@ -100,14 +100,14 @@ fn main() {
} }
"#), "#),
@r###" @r###"
![0; 17) '{Foo(v...,2,])}': Foo !0..17 '{Foo(v...,2,])}': Foo
![1; 4) 'Foo': Foo({unknown}) -> Foo !1..4 'Foo': Foo({unknown}) -> Foo
![1; 16) 'Foo(vec![1,2,])': Foo !1..16 'Foo(vec![1,2,])': Foo
![5; 15) 'vec![1,2,]': {unknown} !5..15 'vec![1,2,]': {unknown}
[195; 251) '{ ...,2); }': () 195..251 '{ ...,2); }': ()
[205; 206) 'x': Foo 205..206 'x': Foo
[228; 229) 'y': {unknown} 228..229 'y': {unknown}
[232; 248) 'crate:...!(1,2)': {unknown} 232..248 'crate:...!(1,2)': {unknown}
"### "###
); );
} }
@ -131,11 +131,11 @@ fn main() {
} }
"#), "#),
@r###" @r###"
![0; 5) '42i32': i32 !0..5 '42i32': i32
![0; 5) '42i32': i32 !0..5 '42i32': i32
[111; 164) '{ ...!(); }': () 111..164 '{ ...!(); }': ()
[121; 122) 'x': i32 121..122 'x': i32
[148; 149) 'y': i32 148..149 'y': i32
"### "###
); );
} }
@ -172,51 +172,51 @@ fn spam() {
} }
"#), "#),
@r###" @r###"
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
![0; 6) '1isize': isize !0..6 '1isize': isize
[54; 457) '{ ...!(); }': ! 54..457 '{ ...!(); }': !
[88; 109) 'spam!(...am!())': {unknown} 88..109 'spam!(...am!())': {unknown}
[115; 134) 'for _ ...!() {}': () 115..134 'for _ ...!() {}': ()
[119; 120) '_': {unknown} 119..120 '_': {unknown}
[132; 134) '{}': () 132..134 '{}': ()
[139; 149) '|| spam!()': || -> isize 139..149 '|| spam!()': || -> isize
[155; 171) 'while ...!() {}': () 155..171 'while ...!() {}': ()
[169; 171) '{}': () 169..171 '{}': ()
[176; 189) 'break spam!()': ! 176..189 'break spam!()': !
[195; 209) 'return spam!()': ! 195..209 'return spam!()': !
[215; 269) 'match ... }': isize 215..269 'match ... }': isize
[239; 240) '_': isize 239..240 '_': isize
[274; 290) 'spam!(...am!())': {unknown} 274..290 'spam!(...am!())': {unknown}
[296; 318) 'Spam {...m!() }': {unknown} 296..318 'Spam {...m!() }': {unknown}
[324; 340) 'spam!(...am!()]': {unknown} 324..340 'spam!(...am!()]': {unknown}
[365; 381) 'spam!(... usize': usize 365..381 'spam!(... usize': usize
[387; 395) '&spam!()': &isize 387..395 '&spam!()': &isize
[401; 409) '-spam!()': isize 401..409 '-spam!()': isize
[415; 431) 'spam!(...pam!()': {unknown} 415..431 'spam!(...pam!()': {unknown}
[437; 454) 'spam!(...pam!()': isize 437..454 'spam!(...pam!()': isize
"### "###
); );
} }
@ -244,9 +244,9 @@ fn foo() {
} }
"#), "#),
@r###" @r###"
![0; 5) '42i32': i32 !0..5 '42i32': i32
[171; 206) '{ ...32); }': () 171..206 '{ ...32); }': ()
[181; 184) 'foo': i32 181..184 'foo': i32
"### "###
); );
} }
@ -357,12 +357,12 @@ fn main() {
} }
"#), "#),
@r###" @r###"
[159; 164) '{ 0 }': u64 159..164 '{ 0 }': u64
[161; 162) '0': u64 161..162 '0': u64
[175; 197) '{ ...f(); }': () 175..197 '{ ...f(); }': ()
[185; 187) '_a': u64 185..187 '_a': u64
[191; 192) 'f': fn f() -> u64 191..192 'f': fn f() -> u64
[191; 194) 'f()': u64 191..194 'f()': u64
"### "###
); );
} }
@ -379,10 +379,10 @@ fn main() {
} }
"#), "#),
@r###" @r###"
![0; 6) '1usize': usize !0..6 '1usize': usize
[11; 90) '{ ...!(); }': () 11..90 '{ ...!(); }': ()
[17; 66) 'macro_... }': {unknown} 17..66 'macro_... }': {unknown}
[75; 77) '_a': usize 75..77 '_a': usize
"### "###
); );
} }
@ -399,9 +399,9 @@ fn main() {
} }
"#), "#),
@r###" @r###"
![0; 1) '0': i32 !0..1 '0': i32
[64; 88) '{ ...!(); }': () 64..88 '{ ...!(); }': ()
[74; 75) 'x': i32 74..75 'x': i32
"### "###
); );
} }
@ -418,9 +418,9 @@ fn main() {
} }
"#), "#),
@r###" @r###"
![0; 2) '""': &str !0..2 '""': &str
[64; 88) '{ ...!(); }': () 64..88 '{ ...!(); }': ()
[74; 75) 'x': &str 74..75 'x': &str
"### "###
); );
} }
@ -437,9 +437,9 @@ fn main() {
} }
"#), "#),
@r###" @r###"
![0; 1) '0': i32 !0..1 '0': i32
[66; 92) '{ ...!(); }': () 66..92 '{ ...!(); }': ()
[76; 77) 'x': i32 76..77 'x': i32
"### "###
); );
} }
@ -456,9 +456,9 @@ fn main() {
} }
"#), "#),
@r###" @r###"
![0; 13) '"helloworld!"': &str !0..13 '"helloworld!"': &str
[66; 122) '{ ...")); }': () 66..122 '{ ...")); }': ()
[76; 77) 'x': &str 76..77 'x': &str
"### "###
); );
} }
@ -591,9 +591,9 @@ fn main() {
} }
"#), "#),
@r###" @r###"
![0; 13) '"helloworld!"': &str !0..13 '"helloworld!"': &str
[104; 161) '{ ...")); }': () 104..161 '{ ...")); }': ()
[114; 115) 'x': &str 114..115 'x': &str
"### "###
); );
} }
@ -611,9 +611,9 @@ fn main() {
} }
"#), "#),
@r###" @r###"
![0; 5) '"bar"': &str !0..5 '"bar"': &str
[88; 116) '{ ...o"); }': () 88..116 '{ ...o"); }': ()
[98; 99) 'x': &str 98..99 'x': &str
"### "###
); );
} }
@ -703,12 +703,12 @@ fn main() {
} }
"#), "#),
@r###" @r###"
[52; 111) '{ ... }; }': () 52..111 '{ ... }; }': ()
[62; 63) 'x': u32 62..63 'x': u32
[66; 108) 'match ... }': u32 66..108 'match ... }': u32
[72; 74) '()': () 72..74 '()': ()
[85; 92) 'unit!()': () 85..92 'unit!()': ()
[96; 101) '92u32': u32 96..101 '92u32': u32
"### "###
); );
} }

View file

@ -22,14 +22,14 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[45; 49) 'self': &[T] 45..49 'self': &[T]
[56; 79) '{ ... }': T 56..79 '{ ... }': T
[66; 73) 'loop {}': ! 66..73 'loop {}': !
[71; 73) '{}': () 71..73 '{}': ()
[133; 160) '{ ...o"); }': () 133..160 '{ ...o"); }': ()
[139; 149) '<[_]>::foo': fn foo<u8>(&[u8]) -> u8 139..149 '<[_]>::foo': fn foo<u8>(&[u8]) -> u8
[139; 157) '<[_]>:..."foo")': u8 139..157 '<[_]>:..."foo")': u8
[150; 156) 'b"foo"': &[u8] 150..156 'b"foo"': &[u8]
"### "###
); );
} }
@ -51,15 +51,15 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[49; 75) '{ ... }': A 49..75 '{ ... }': A
[59; 69) 'A { x: 0 }': A 59..69 'A { x: 0 }': A
[66; 67) '0': u32 66..67 '0': u32
[88; 122) '{ ...a.x; }': () 88..122 '{ ...a.x; }': ()
[98; 99) 'a': A 98..99 'a': A
[102; 108) 'A::new': fn new() -> A 102..108 'A::new': fn new() -> A
[102; 110) 'A::new()': A 102..110 'A::new()': A
[116; 117) 'a': A 116..117 'a': A
[116; 119) 'a.x': u32 116..119 'a.x': u32
"### "###
); );
} }
@ -86,19 +86,19 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[47; 67) '{ ... }': A 47..67 '{ ... }': A
[57; 61) 'A::B': A 57..61 'A::B': A
[88; 108) '{ ... }': A 88..108 '{ ... }': A
[98; 102) 'A::C': A 98..102 'A::C': A
[121; 178) '{ ... c; }': () 121..178 '{ ... c; }': ()
[131; 132) 'a': A 131..132 'a': A
[135; 139) 'A::b': fn b() -> A 135..139 'A::b': fn b() -> A
[135; 141) 'A::b()': A 135..141 'A::b()': A
[147; 148) 'a': A 147..148 'a': A
[158; 159) 'c': A 158..159 'c': A
[162; 166) 'A::c': fn c() -> A 162..166 'A::c': fn c() -> A
[162; 168) 'A::c()': A 162..168 'A::c()': A
[174; 175) 'c': A 174..175 'c': A
"### "###
); );
} }
@ -130,22 +130,22 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[56; 64) '{ A {} }': A 56..64 '{ A {} }': A
[58; 62) 'A {}': A 58..62 'A {}': A
[126; 132) '{ 99 }': u32 126..132 '{ 99 }': u32
[128; 130) '99': u32 128..130 '99': u32
[202; 210) '{ C {} }': C 202..210 '{ C {} }': C
[204; 208) 'C {}': C 204..208 'C {}': C
[241; 325) '{ ...g(); }': () 241..325 '{ ...g(); }': ()
[251; 252) 'x': A 251..252 'x': A
[255; 266) 'a::A::thing': fn thing() -> A 255..266 'a::A::thing': fn thing() -> A
[255; 268) 'a::A::thing()': A 255..268 'a::A::thing()': A
[278; 279) 'y': u32 278..279 'y': u32
[282; 293) 'b::B::thing': fn thing() -> u32 282..293 'b::B::thing': fn thing() -> u32
[282; 295) 'b::B::thing()': u32 282..295 'b::B::thing()': u32
[305; 306) 'z': C 305..306 'z': C
[309; 320) 'c::C::thing': fn thing() -> C 309..320 'c::C::thing': fn thing() -> C
[309; 322) 'c::C::thing()': C 309..322 'c::C::thing()': C
"### "###
); );
} }
@ -169,15 +169,15 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[64; 67) 'val': T 64..67 'val': T
[82; 109) '{ ... }': Gen<T> 82..109 '{ ... }': Gen<T>
[92; 103) 'Gen { val }': Gen<T> 92..103 'Gen { val }': Gen<T>
[98; 101) 'val': T 98..101 'val': T
[123; 155) '{ ...32); }': () 123..155 '{ ...32); }': ()
[133; 134) 'a': Gen<u32> 133..134 'a': Gen<u32>
[137; 146) 'Gen::make': fn make<u32>(u32) -> Gen<u32> 137..146 'Gen::make': fn make<u32>(u32) -> Gen<u32>
[137; 152) 'Gen::make(0u32)': Gen<u32> 137..152 'Gen::make(0u32)': Gen<u32>
[147; 151) '0u32': u32 147..151 '0u32': u32
"### "###
); );
} }
@ -201,13 +201,13 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[80; 104) '{ ... }': Gen<T> 80..104 '{ ... }': Gen<T>
[90; 98) 'loop { }': ! 90..98 'loop { }': !
[95; 98) '{ }': () 95..98 '{ }': ()
[118; 146) '{ ...e(); }': () 118..146 '{ ...e(); }': ()
[128; 129) 'a': Gen<u32> 128..129 'a': Gen<u32>
[132; 141) 'Gen::make': fn make<u32>() -> Gen<u32> 132..141 'Gen::make': fn make<u32>() -> Gen<u32>
[132; 143) 'Gen::make()': Gen<u32> 132..143 'Gen::make()': Gen<u32>
"### "###
); );
} }
@ -255,13 +255,13 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[76; 100) '{ ... }': Gen<T> 76..100 '{ ... }': Gen<T>
[86; 94) 'loop { }': ! 86..94 'loop { }': !
[91; 94) '{ }': () 91..94 '{ }': ()
[114; 149) '{ ...e(); }': () 114..149 '{ ...e(); }': ()
[124; 125) 'a': Gen<u32> 124..125 'a': Gen<u32>
[128; 144) 'Gen::<...::make': fn make<u32>() -> Gen<u32> 128..144 'Gen::<...::make': fn make<u32>() -> Gen<u32>
[128; 146) 'Gen::<...make()': Gen<u32> 128..146 'Gen::<...make()': Gen<u32>
"### "###
); );
} }
@ -286,13 +286,13 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[102; 126) '{ ... }': Gen<u32, T> 102..126 '{ ... }': Gen<u32, T>
[112; 120) 'loop { }': ! 112..120 'loop { }': !
[117; 120) '{ }': () 117..120 '{ }': ()
[140; 180) '{ ...e(); }': () 140..180 '{ ...e(); }': ()
[150; 151) 'a': Gen<u32, u64> 150..151 'a': Gen<u32, u64>
[154; 175) 'Gen::<...::make': fn make<u64>() -> Gen<u32, u64> 154..175 'Gen::<...::make': fn make<u64>() -> Gen<u32, u64>
[154; 177) 'Gen::<...make()': Gen<u32, u64> 154..177 'Gen::<...make()': Gen<u32, u64>
"### "###
); );
} }
@ -340,13 +340,13 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[31; 35) 'self': &Self 31..35 'self': &Self
[110; 114) 'self': &Self 110..114 'self': &Self
[170; 228) '{ ...i128 }': () 170..228 '{ ...i128 }': ()
[176; 178) 'S1': S1 176..178 'S1': S1
[176; 187) 'S1.method()': u32 176..187 'S1.method()': u32
[203; 205) 'S2': S2 203..205 'S2': S2
[203; 214) 'S2.method()': i128 203..214 'S2.method()': i128
"### "###
); );
} }
@ -387,14 +387,14 @@ mod bar_test {
} }
"#), "#),
@r###" @r###"
[63; 67) 'self': &Self 63..67 'self': &Self
[169; 173) 'self': &Self 169..173 'self': &Self
[300; 337) '{ ... }': () 300..337 '{ ... }': ()
[310; 311) 'S': S 310..311 'S': S
[310; 320) 'S.method()': u32 310..320 'S.method()': u32
[416; 454) '{ ... }': () 416..454 '{ ... }': ()
[426; 427) 'S': S 426..427 'S': S
[426; 436) 'S.method()': i128 426..436 'S.method()': i128
"### "###
); );
} }
@ -414,10 +414,10 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[33; 37) 'self': &Self 33..37 'self': &Self
[92; 111) '{ ...d(); }': () 92..111 '{ ...d(); }': ()
[98; 99) 'S': S 98..99 'S': S
[98; 108) 'S.method()': u32 98..108 'S.method()': u32
"### "###
); );
} }
@ -443,17 +443,17 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[43; 47) 'self': &Self 43..47 'self': &Self
[82; 86) 'self': &Self 82..86 'self': &Self
[210; 361) '{ ..., i8 }': () 210..361 '{ ..., i8 }': ()
[216; 218) 'S1': S1 216..218 'S1': S1
[216; 228) 'S1.method1()': (u8, u16, u32) 216..228 'S1.method1()': (u8, u16, u32)
[250; 252) 'S1': S1 250..252 'S1': S1
[250; 262) 'S1.method2()': (u32, u16, u8) 250..262 'S1.method2()': (u32, u16, u8)
[284; 286) 'S2': S2 284..286 'S2': S2
[284; 296) 'S2.method1()': (i8, i16, {unknown}) 284..296 'S2.method1()': (i8, i16, {unknown})
[324; 326) 'S2': S2 324..326 'S2': S2
[324; 336) 'S2.method2()': ({unknown}, i16, i8) 324..336 'S2.method2()': ({unknown}, i16, i8)
"### "###
); );
} }
@ -473,12 +473,12 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[33; 37) 'self': &Self 33..37 'self': &Self
[102; 127) '{ ...d(); }': () 102..127 '{ ...d(); }': ()
[108; 109) 'S': S<u32>(u32) -> S<u32> 108..109 'S': S<u32>(u32) -> S<u32>
[108; 115) 'S(1u32)': S<u32> 108..115 'S(1u32)': S<u32>
[108; 124) 'S(1u32...thod()': u32 108..124 'S(1u32...thod()': u32
[110; 114) '1u32': u32 110..114 '1u32': u32
"### "###
); );
} }
@ -499,16 +499,16 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[87; 193) '{ ...t(); }': () 87..193 '{ ...t(); }': ()
[97; 99) 's1': S 97..99 's1': S
[105; 121) 'Defaul...efault': fn default<S>() -> S 105..121 'Defaul...efault': fn default<S>() -> S
[105; 123) 'Defaul...ault()': S 105..123 'Defaul...ault()': S
[133; 135) 's2': S 133..135 's2': S
[138; 148) 'S::default': fn default<S>() -> S 138..148 'S::default': fn default<S>() -> S
[138; 150) 'S::default()': S 138..150 'S::default()': S
[160; 162) 's3': S 160..162 's3': S
[165; 188) '<S as ...efault': fn default<S>() -> S 165..188 '<S as ...efault': fn default<S>() -> S
[165; 190) '<S as ...ault()': S 165..190 '<S as ...ault()': S
"### "###
); );
} }
@ -531,16 +531,16 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[127; 211) '{ ...e(); }': () 127..211 '{ ...e(); }': ()
[137; 138) 'a': u32 137..138 'a': u32
[141; 148) 'S::make': fn make<S, u32>() -> u32 141..148 'S::make': fn make<S, u32>() -> u32
[141; 150) 'S::make()': u32 141..150 'S::make()': u32
[160; 161) 'b': u64 160..161 'b': u64
[164; 178) 'G::<u64>::make': fn make<G<u64>, u64>() -> u64 164..178 'G::<u64>::make': fn make<G<u64>, u64>() -> u64
[164; 180) 'G::<u6...make()': u64 164..180 'G::<u6...make()': u64
[190; 191) 'c': f64 190..191 'c': f64
[199; 206) 'G::make': fn make<G<f64>, f64>() -> f64 199..206 'G::make': fn make<G<f64>, f64>() -> f64
[199; 208) 'G::make()': f64 199..208 'G::make()': f64
"### "###
); );
} }
@ -565,22 +565,22 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[135; 313) '{ ...e(); }': () 135..313 '{ ...e(); }': ()
[145; 146) 'a': (u32, i64) 145..146 'a': (u32, i64)
[149; 163) 'S::make::<i64>': fn make<S, u32, i64>() -> (u32, i64) 149..163 'S::make::<i64>': fn make<S, u32, i64>() -> (u32, i64)
[149; 165) 'S::mak...i64>()': (u32, i64) 149..165 'S::mak...i64>()': (u32, i64)
[175; 176) 'b': (u32, i64) 175..176 'b': (u32, i64)
[189; 196) 'S::make': fn make<S, u32, i64>() -> (u32, i64) 189..196 'S::make': fn make<S, u32, i64>() -> (u32, i64)
[189; 198) 'S::make()': (u32, i64) 189..198 'S::make()': (u32, i64)
[208; 209) 'c': (u32, i64) 208..209 'c': (u32, i64)
[212; 233) 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64) 212..233 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
[212; 235) 'G::<u3...i64>()': (u32, i64) 212..235 'G::<u3...i64>()': (u32, i64)
[245; 246) 'd': (u32, i64) 245..246 'd': (u32, i64)
[259; 273) 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64) 259..273 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
[259; 275) 'G::mak...i64>()': (u32, i64) 259..275 'G::mak...i64>()': (u32, i64)
[285; 286) 'e': (u32, i64) 285..286 'e': (u32, i64)
[301; 308) 'G::make': fn make<G<u32>, u32, i64>() -> (u32, i64) 301..308 'G::make': fn make<G<u32>, u32, i64>() -> (u32, i64)
[301; 310) 'G::make()': (u32, i64) 301..310 'G::make()': (u32, i64)
"### "###
); );
} }
@ -599,10 +599,10 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[101; 127) '{ ...e(); }': () 101..127 '{ ...e(); }': ()
[111; 112) 'a': (S<i32>, i64) 111..112 'a': (S<i32>, i64)
[115; 122) 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64) 115..122 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64)
[115; 124) 'S::make()': (S<i32>, i64) 115..124 'S::make()': (S<i32>, i64)
"### "###
); );
} }
@ -623,13 +623,13 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[131; 203) '{ ...e(); }': () 131..203 '{ ...e(); }': ()
[141; 142) 'a': (S<u64>, i64) 141..142 'a': (S<u64>, i64)
[158; 165) 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64) 158..165 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64)
[158; 167) 'S::make()': (S<u64>, i64) 158..167 'S::make()': (S<u64>, i64)
[177; 178) 'b': (S<u32>, i32) 177..178 'b': (S<u32>, i32)
[191; 198) 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32) 191..198 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32)
[191; 200) 'S::make()': (S<u32>, i32) 191..200 'S::make()': (S<u32>, i32)
"### "###
); );
} }
@ -649,13 +649,13 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[107; 211) '{ ...>(); }': () 107..211 '{ ...>(); }': ()
[117; 118) 'a': (S<u64>, i64, u8) 117..118 'a': (S<u64>, i64, u8)
[121; 150) '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8) 121..150 '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
[121; 152) '<S as ...<u8>()': (S<u64>, i64, u8) 121..152 '<S as ...<u8>()': (S<u64>, i64, u8)
[162; 163) 'b': (S<u64>, i64, u8) 162..163 'b': (S<u64>, i64, u8)
[182; 206) 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8) 182..206 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
[182; 208) 'Trait:...<u8>()': (S<u64>, i64, u8) 182..208 'Trait:...<u8>()': (S<u64>, i64, u8)
"### "###
); );
} }
@ -672,11 +672,11 @@ fn test<T: Trait>(t: T) {
} }
"#), "#),
@r###" @r###"
[30; 34) 'self': &Self 30..34 'self': &Self
[64; 65) 't': T 64..65 't': T
[70; 89) '{ ...d(); }': () 70..89 '{ ...d(); }': ()
[76; 77) 't': T 76..77 't': T
[76; 86) 't.method()': u32 76..86 't.method()': u32
"### "###
); );
} }
@ -693,11 +693,11 @@ fn test<U, T: Trait<U>>(t: T) {
} }
"#), "#),
@r###" @r###"
[33; 37) 'self': &Self 33..37 'self': &Self
[71; 72) 't': T 71..72 't': T
[77; 96) '{ ...d(); }': () 77..96 '{ ...d(); }': ()
[83; 84) 't': T 83..84 't': T
[83; 93) 't.method()': U 83..93 't.method()': U
"### "###
); );
} }
@ -719,18 +719,18 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[29; 33) 'self': Self 29..33 'self': Self
[111; 202) '{ ...(S); }': () 111..202 '{ ...(S); }': ()
[121; 122) 'x': u32 121..122 'x': u32
[130; 131) 'S': S 130..131 'S': S
[130; 138) 'S.into()': u32 130..138 'S.into()': u32
[148; 149) 'y': u64 148..149 'y': u64
[157; 158) 'S': S 157..158 'S': S
[157; 165) 'S.into()': u64 157..165 'S.into()': u64
[175; 176) 'z': u64 175..176 'z': u64
[179; 196) 'Into::...::into': fn into<S, u64>(S) -> u64 179..196 'Into::...::into': fn into<S, u64>(S) -> u64
[179; 199) 'Into::...nto(S)': u64 179..199 'Into::...nto(S)': u64
[197; 198) 'S': S 197..198 'S': S
"### "###
); );
} }

View file

@ -30,54 +30,54 @@ fn test(x: &i32) {
} }
"#), "#),
@r###" @r###"
[9; 10) 'x': &i32 9..10 'x': &i32
[18; 369) '{ ...o_x; }': () 18..369 '{ ...o_x; }': ()
[28; 29) 'y': &i32 28..29 'y': &i32
[32; 33) 'x': &i32 32..33 'x': &i32
[43; 45) '&z': &i32 43..45 '&z': &i32
[44; 45) 'z': i32 44..45 'z': i32
[48; 49) 'x': &i32 48..49 'x': &i32
[59; 60) 'a': i32 59..60 'a': i32
[63; 64) 'z': i32 63..64 'z': i32
[74; 80) '(c, d)': (i32, &str) 74..80 '(c, d)': (i32, &str)
[75; 76) 'c': i32 75..76 'c': i32
[78; 79) 'd': &str 78..79 'd': &str
[83; 95) '(1, "hello")': (i32, &str) 83..95 '(1, "hello")': (i32, &str)
[84; 85) '1': i32 84..85 '1': i32
[87; 94) '"hello"': &str 87..94 '"hello"': &str
[102; 152) 'for (e... }': () 102..152 'for (e... }': ()
[106; 112) '(e, f)': ({unknown}, {unknown}) 106..112 '(e, f)': ({unknown}, {unknown})
[107; 108) 'e': {unknown} 107..108 'e': {unknown}
[110; 111) 'f': {unknown} 110..111 'f': {unknown}
[116; 125) 'some_iter': {unknown} 116..125 'some_iter': {unknown}
[126; 152) '{ ... }': () 126..152 '{ ... }': ()
[140; 141) 'g': {unknown} 140..141 'g': {unknown}
[144; 145) 'e': {unknown} 144..145 'e': {unknown}
[158; 205) 'if let... }': () 158..205 'if let... }': ()
[165; 170) '[val]': [{unknown}] 165..170 '[val]': [{unknown}]
[166; 169) 'val': {unknown} 166..169 'val': {unknown}
[173; 176) 'opt': [{unknown}] 173..176 'opt': [{unknown}]
[177; 205) '{ ... }': () 177..205 '{ ... }': ()
[191; 192) 'h': {unknown} 191..192 'h': {unknown}
[195; 198) 'val': {unknown} 195..198 'val': {unknown}
[215; 221) 'lambda': |u64, u64, i32| -> i32 215..221 'lambda': |u64, u64, i32| -> i32
[224; 256) '|a: u6...b; c }': |u64, u64, i32| -> i32 224..256 '|a: u6...b; c }': |u64, u64, i32| -> i32
[225; 226) 'a': u64 225..226 'a': u64
[233; 234) 'b': u64 233..234 'b': u64
[236; 237) 'c': i32 236..237 'c': i32
[244; 256) '{ a + b; c }': i32 244..256 '{ a + b; c }': i32
[246; 247) 'a': u64 246..247 'a': u64
[246; 251) 'a + b': u64 246..251 'a + b': u64
[250; 251) 'b': u64 250..251 'b': u64
[253; 254) 'c': i32 253..254 'c': i32
[267; 279) 'ref ref_to_x': &&i32 267..279 'ref ref_to_x': &&i32
[282; 283) 'x': &i32 282..283 'x': &i32
[293; 302) 'mut mut_x': &i32 293..302 'mut mut_x': &i32
[305; 306) 'x': &i32 305..306 'x': &i32
[316; 336) 'ref mu...f_to_x': &mut &i32 316..336 'ref mu...f_to_x': &mut &i32
[339; 340) 'x': &i32 339..340 'x': &i32
[350; 351) 'k': &mut &i32 350..351 'k': &mut &i32
[354; 366) 'mut_ref_to_x': &mut &i32 354..366 'mut_ref_to_x': &mut &i32
"### "###
); );
} }
@ -97,47 +97,47 @@ fn test(x: &i32) {
} }
"#, true), "#, true),
@r###" @r###"
[18; 29) '{ loop {} }': T 18..29 '{ loop {} }': T
[20; 27) 'loop {}': ! 20..27 'loop {}': !
[25; 27) '{}': () 25..27 '{}': ()
[38; 39) 'x': &i32 38..39 'x': &i32
[47; 209) '{ ...) {} }': () 47..209 '{ ...) {} }': ()
[53; 76) 'if let...y() {}': () 53..76 'if let...y() {}': ()
[60; 65) '"foo"': &str 60..65 '"foo"': &str
[60; 65) '"foo"': &str 60..65 '"foo"': &str
[68; 71) 'any': fn any<&str>() -> &str 68..71 'any': fn any<&str>() -> &str
[68; 73) 'any()': &str 68..73 'any()': &str
[74; 76) '{}': () 74..76 '{}': ()
[81; 100) 'if let...y() {}': () 81..100 'if let...y() {}': ()
[88; 89) '1': i32 88..89 '1': i32
[88; 89) '1': i32 88..89 '1': i32
[92; 95) 'any': fn any<i32>() -> i32 92..95 'any': fn any<i32>() -> i32
[92; 97) 'any()': i32 92..97 'any()': i32
[98; 100) '{}': () 98..100 '{}': ()
[105; 127) 'if let...y() {}': () 105..127 'if let...y() {}': ()
[112; 116) '1u32': u32 112..116 '1u32': u32
[112; 116) '1u32': u32 112..116 '1u32': u32
[119; 122) 'any': fn any<u32>() -> u32 119..122 'any': fn any<u32>() -> u32
[119; 124) 'any()': u32 119..124 'any()': u32
[125; 127) '{}': () 125..127 '{}': ()
[132; 154) 'if let...y() {}': () 132..154 'if let...y() {}': ()
[139; 143) '1f32': f32 139..143 '1f32': f32
[139; 143) '1f32': f32 139..143 '1f32': f32
[146; 149) 'any': fn any<f32>() -> f32 146..149 'any': fn any<f32>() -> f32
[146; 151) 'any()': f32 146..151 'any()': f32
[152; 154) '{}': () 152..154 '{}': ()
[159; 180) 'if let...y() {}': () 159..180 'if let...y() {}': ()
[166; 169) '1.0': f64 166..169 '1.0': f64
[166; 169) '1.0': f64 166..169 '1.0': f64
[172; 175) 'any': fn any<f64>() -> f64 172..175 'any': fn any<f64>() -> f64
[172; 177) 'any()': f64 172..177 'any()': f64
[178; 180) '{}': () 178..180 '{}': ()
[185; 207) 'if let...y() {}': () 185..207 'if let...y() {}': ()
[192; 196) 'true': bool 192..196 'true': bool
[192; 196) 'true': bool 192..196 'true': bool
[199; 202) 'any': fn any<bool>() -> bool 199..202 'any': fn any<bool>() -> bool
[199; 204) 'any()': bool 199..204 'any()': bool
[205; 207) '{}': () 205..207 '{}': ()
"### "###
); );
} }
@ -152,16 +152,16 @@ fn test(x: &i32) {
} }
"#, true), "#, true),
@r###" @r###"
[9; 10) 'x': &i32 9..10 'x': &i32
[18; 76) '{ ...2 {} }': () 18..76 '{ ...2 {} }': ()
[24; 46) 'if let...u32 {}': () 24..46 'if let...u32 {}': ()
[31; 36) '1..76': u32 31..36 '1..76': u32
[39; 43) '2u32': u32 39..43 '2u32': u32
[44; 46) '{}': () 44..46 '{}': ()
[51; 74) 'if let...u32 {}': () 51..74 'if let...u32 {}': ()
[58; 64) '1..=76': u32 58..64 '1..=76': u32
[67; 71) '2u32': u32 67..71 '2u32': u32
[72; 74) '{}': () 72..74 '{}': ()
"### "###
); );
} }
@ -178,19 +178,19 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[28; 79) '{ ...(1); }': () 28..79 '{ ...(1); }': ()
[38; 42) 'A(n)': A<i32> 38..42 'A(n)': A<i32>
[40; 41) 'n': &i32 40..41 'n': &i32
[45; 50) '&A(1)': &A<i32> 45..50 '&A(1)': &A<i32>
[46; 47) 'A': A<i32>(i32) -> A<i32> 46..47 'A': A<i32>(i32) -> A<i32>
[46; 50) 'A(1)': A<i32> 46..50 'A(1)': A<i32>
[48; 49) '1': i32 48..49 '1': i32
[60; 64) 'A(n)': A<i32> 60..64 'A(n)': A<i32>
[62; 63) 'n': &mut i32 62..63 'n': &mut i32
[67; 76) '&mut A(1)': &mut A<i32> 67..76 '&mut A(1)': &mut A<i32>
[72; 73) 'A': A<i32>(i32) -> A<i32> 72..73 'A': A<i32>(i32) -> A<i32>
[72; 76) 'A(1)': A<i32> 72..76 'A(1)': A<i32>
[74; 75) '1': i32 74..75 '1': i32
"### "###
); );
} }
@ -206,18 +206,18 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 57) '{ ...= v; }': () 11..57 '{ ...= v; }': ()
[21; 22) 'v': &(i32, &i32) 21..22 'v': &(i32, &i32)
[25; 33) '&(1, &2)': &(i32, &i32) 25..33 '&(1, &2)': &(i32, &i32)
[26; 33) '(1, &2)': (i32, &i32) 26..33 '(1, &2)': (i32, &i32)
[27; 28) '1': i32 27..28 '1': i32
[30; 32) '&2': &i32 30..32 '&2': &i32
[31; 32) '2': i32 31..32 '2': i32
[43; 50) '(_, &w)': (i32, &i32) 43..50 '(_, &w)': (i32, &i32)
[44; 45) '_': i32 44..45 '_': i32
[47; 49) '&w': &i32 47..49 '&w': &i32
[48; 49) 'w': i32 48..49 'w': i32
[53; 54) 'v': &(i32, &i32) 53..54 'v': &(i32, &i32)
"### "###
); );
} }
@ -242,30 +242,30 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 210) '{ ... } }': () 11..210 '{ ... } }': ()
[21; 26) 'slice': &[f64] 21..26 'slice': &[f64]
[37; 43) '&[0.0]': &[f64; _] 37..43 '&[0.0]': &[f64; _]
[38; 43) '[0.0]': [f64; _] 38..43 '[0.0]': [f64; _]
[39; 42) '0.0': f64 39..42 '0.0': f64
[49; 208) 'match ... }': () 49..208 'match ... }': ()
[55; 60) 'slice': &[f64] 55..60 'slice': &[f64]
[71; 74) '&[]': &[f64] 71..74 '&[]': &[f64]
[72; 74) '[]': [f64] 72..74 '[]': [f64]
[78; 80) '{}': () 78..80 '{}': ()
[90; 94) '&[a]': &[f64] 90..94 '&[a]': &[f64]
[91; 94) '[a]': [f64] 91..94 '[a]': [f64]
[92; 93) 'a': f64 92..93 'a': f64
[98; 124) '{ ... }': () 98..124 '{ ... }': ()
[112; 113) 'a': f64 112..113 'a': f64
[134; 141) '&[b, c]': &[f64] 134..141 '&[b, c]': &[f64]
[135; 141) '[b, c]': [f64] 135..141 '[b, c]': [f64]
[136; 137) 'b': f64 136..137 'b': f64
[139; 140) 'c': f64 139..140 'c': f64
[145; 186) '{ ... }': () 145..186 '{ ... }': ()
[159; 160) 'b': f64 159..160 'b': f64
[174; 175) 'c': f64 174..175 'c': f64
[195; 196) '_': &[f64] 195..196 '_': &[f64]
[200; 202) '{}': () 200..202 '{}': ()
"### "###
); );
} }
@ -288,25 +288,25 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 180) '{ ... } }': () 11..180 '{ ... } }': ()
[21; 24) 'arr': [f64; _] 21..24 'arr': [f64; _]
[37; 47) '[0.0, 1.0]': [f64; _] 37..47 '[0.0, 1.0]': [f64; _]
[38; 41) '0.0': f64 38..41 '0.0': f64
[43; 46) '1.0': f64 43..46 '1.0': f64
[53; 178) 'match ... }': () 53..178 'match ... }': ()
[59; 62) 'arr': [f64; _] 59..62 'arr': [f64; _]
[73; 81) '[1.0, a]': [f64; _] 73..81 '[1.0, a]': [f64; _]
[74; 77) '1.0': f64 74..77 '1.0': f64
[74; 77) '1.0': f64 74..77 '1.0': f64
[79; 80) 'a': f64 79..80 'a': f64
[85; 111) '{ ... }': () 85..111 '{ ... }': ()
[99; 100) 'a': f64 99..100 'a': f64
[121; 127) '[b, c]': [f64; _] 121..127 '[b, c]': [f64; _]
[122; 123) 'b': f64 122..123 'b': f64
[125; 126) 'c': f64 125..126 'c': f64
[131; 172) '{ ... }': () 131..172 '{ ... }': ()
[145; 146) 'b': f64 145..146 'b': f64
[160; 161) 'c': f64 160..161 'c': f64
"### "###
); );
} }
@ -339,31 +339,31 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[68; 289) '{ ... d; }': () 68..289 '{ ... d; }': ()
[78; 79) 'e': E 78..79 'e': E
[82; 95) 'E::A { x: 3 }': E 82..95 'E::A { x: 3 }': E
[92; 93) '3': usize 92..93 '3': usize
[106; 113) 'S(y, z)': S 106..113 'S(y, z)': S
[108; 109) 'y': u32 108..109 'y': u32
[111; 112) 'z': E 111..112 'z': E
[116; 119) 'foo': S 116..119 'foo': S
[129; 148) 'E::A {..._var }': E 129..148 'E::A {..._var }': E
[139; 146) 'new_var': usize 139..146 'new_var': usize
[151; 152) 'e': E 151..152 'e': E
[159; 245) 'match ... }': usize 159..245 'match ... }': usize
[165; 166) 'e': E 165..166 'e': E
[177; 187) 'E::A { x }': E 177..187 'E::A { x }': E
[184; 185) 'x': usize 184..185 'x': usize
[191; 192) 'x': usize 191..192 'x': usize
[202; 206) 'E::B': E 202..206 'E::B': E
[210; 213) 'foo': bool 210..213 'foo': bool
[217; 218) '1': usize 217..218 '1': usize
[228; 232) 'E::B': E 228..232 'E::B': E
[236; 238) '10': usize 236..238 '10': usize
[256; 275) 'ref d ...{ .. }': &E 256..275 'ref d ...{ .. }': &E
[264; 275) 'E::A { .. }': E 264..275 'E::A { .. }': E
[278; 279) 'e': E 278..279 'e': E
[285; 286) 'd': &E 285..286 'd': &E
"### "###
); );
} }
@ -391,23 +391,23 @@ fn test(a1: A<u32>, o: Option<u64>) {
} }
"#), "#),
@r###" @r###"
[79; 81) 'a1': A<u32> 79..81 'a1': A<u32>
[91; 92) 'o': Option<u64> 91..92 'o': Option<u64>
[107; 244) '{ ... }; }': () 107..244 '{ ... }; }': ()
[117; 128) 'A { x: x2 }': A<u32> 117..128 'A { x: x2 }': A<u32>
[124; 126) 'x2': u32 124..126 'x2': u32
[131; 133) 'a1': A<u32> 131..133 'a1': A<u32>
[143; 161) 'A::<i6...: x3 }': A<i64> 143..161 'A::<i6...: x3 }': A<i64>
[157; 159) 'x3': i64 157..159 'x3': i64
[164; 174) 'A { x: 1 }': A<i64> 164..174 'A { x: 1 }': A<i64>
[171; 172) '1': i64 171..172 '1': i64
[180; 241) 'match ... }': u64 180..241 'match ... }': u64
[186; 187) 'o': Option<u64> 186..187 'o': Option<u64>
[198; 213) 'Option::Some(t)': Option<u64> 198..213 'Option::Some(t)': Option<u64>
[211; 212) 't': u64 211..212 't': u64
[217; 218) 't': u64 217..218 't': u64
[228; 229) '_': Option<u64> 228..229 '_': Option<u64>
[233; 234) '1': u64 233..234 '1': u64
"### "###
); );
} }
@ -431,27 +431,27 @@ fn test() {
} }
"#, true), "#, true),
@r###" @r###"
[74; 75) '1': usize 74..75 '1': usize
[88; 310) '{ ...atch }': () 88..310 '{ ...atch }': ()
[98; 99) 'a': Option<u32> 98..99 'a': Option<u32>
[115; 119) 'None': Option<u32> 115..119 'None': Option<u32>
[129; 130) 'b': Option<i64> 129..130 'b': Option<i64>
[146; 183) 'match ... }': Option<i64> 146..183 'match ... }': Option<i64>
[152; 153) 'a': Option<u32> 152..153 'a': Option<u32>
[164; 168) 'None': Option<u32> 164..168 'None': Option<u32>
[172; 176) 'None': Option<i64> 172..176 'None': Option<i64>
[193; 194) '_': () 193..194 '_': ()
[201; 224) 'match ... Foo }': Foo 201..224 'match ... Foo }': Foo
[207; 209) '()': () 207..209 '()': ()
[212; 215) 'Foo': Foo 212..215 'Foo': Foo
[219; 222) 'Foo': Foo 219..222 'Foo': Foo
[255; 256) '_': () 255..256 '_': ()
[263; 286) 'match ... Bar }': usize 263..286 'match ... Bar }': usize
[269; 271) '()': () 269..271 '()': ()
[274; 277) 'Bar': usize 274..277 'Bar': usize
[281; 284) 'Bar': usize 281..284 'Bar': usize
[201; 224): expected (), got Foo 201..224: expected (), got Foo
[263; 286): expected (), got usize 263..286: expected (), got usize
"### "###
); );
} }
@ -469,15 +469,15 @@ fn main() {
} }
} }
"#), @" "#), @"
[28; 32) 'self': &S 28..32 'self': &S
[42; 51) '{ false }': bool 42..51 '{ false }': bool
[44; 49) 'false': bool 44..49 'false': bool
[65; 116) '{ ... } }': () 65..116 '{ ... } }': ()
[71; 114) 'match ... }': () 71..114 'match ... }': ()
[77; 78) 'S': S 77..78 'S': S
[89; 90) 's': S 89..90 's': S
[94; 95) 's': S 94..95 's': S
[94; 101) 's.foo()': bool 94..101 's.foo()': bool
[105; 107) '()': () 105..107 '()': ()
") ")
} }

View file

@ -14,11 +14,11 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 37) '{ l... {}; }': () 11..37 '{ l... {}; }': ()
[20; 21) 'x': () 20..21 'x': ()
[24; 34) 'if true {}': () 24..34 'if true {}': ()
[27; 31) 'true': bool 27..31 'true': bool
[32; 34) '{}': () 32..34 '{}': ()
"### "###
); );
} }
@ -34,10 +34,10 @@ fn test(x: X) {
} }
"#), "#),
@r###" @r###"
[20; 21) 'x': X 20..21 'x': X
[26; 47) '{ ...eld; }': () 26..47 '{ ...eld; }': ()
[32; 33) 'x': X 32..33 'x': X
[32; 44) 'x.some_field': {unknown} 32..44 'x.some_field': {unknown}
"### "###
); );
} }
@ -55,14 +55,14 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 89) '{ ... } }': () 11..89 '{ ... } }': ()
[17; 21) 'X {}': {unknown} 17..21 'X {}': {unknown}
[27; 87) 'match ... }': () 27..87 'match ... }': ()
[33; 34) 'x': {unknown} 33..34 'x': {unknown}
[45; 52) 'A::B {}': {unknown} 45..52 'A::B {}': {unknown}
[56; 58) '()': () 56..58 '()': ()
[68; 74) 'A::Y()': {unknown} 68..74 'A::Y()': {unknown}
[78; 80) '()': () 78..80 '()': ()
"### "###
); );
} }
@ -77,12 +77,12 @@ fn quux() {
} }
"#), "#),
@r###" @r###"
[11; 41) '{ ...+ y; }': () 11..41 '{ ...+ y; }': ()
[21; 22) 'y': i32 21..22 'y': i32
[25; 27) '92': i32 25..27 '92': i32
[33; 34) '1': i32 33..34 '1': i32
[33; 38) '1 + y': i32 33..38 '1 + y': i32
[37; 38) 'y': i32 37..38 'y': i32
"### "###
); );
} }
@ -99,13 +99,13 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 48) '{ ...&y]; }': () 11..48 '{ ...&y]; }': ()
[21; 22) 'y': &{unknown} 21..22 'y': &{unknown}
[25; 32) 'unknown': &{unknown} 25..32 'unknown': &{unknown}
[38; 45) '[y, &y]': [&&{unknown}; _] 38..45 '[y, &y]': [&&{unknown}; _]
[39; 40) 'y': &{unknown} 39..40 'y': &{unknown}
[42; 44) '&y': &&{unknown} 42..44 '&y': &&{unknown}
[43; 44) 'y': &{unknown} 43..44 'y': &{unknown}
"### "###
); );
} }
@ -123,20 +123,20 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[11; 80) '{ ...x)]; }': () 11..80 '{ ...x)]; }': ()
[21; 22) 'x': &&{unknown} 21..22 'x': &&{unknown}
[25; 32) 'unknown': &&{unknown} 25..32 'unknown': &&{unknown}
[42; 43) 'y': &&{unknown} 42..43 'y': &&{unknown}
[46; 53) 'unknown': &&{unknown} 46..53 'unknown': &&{unknown}
[59; 77) '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown}); _] 59..77 '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown}); _]
[60; 66) '(x, y)': (&&&{unknown}, &&&{unknown}) 60..66 '(x, y)': (&&&{unknown}, &&&{unknown})
[61; 62) 'x': &&{unknown} 61..62 'x': &&{unknown}
[64; 65) 'y': &&{unknown} 64..65 'y': &&{unknown}
[68; 76) '(&y, &x)': (&&&{unknown}, &&&{unknown}) 68..76 '(&y, &x)': (&&&{unknown}, &&&{unknown})
[69; 71) '&y': &&&{unknown} 69..71 '&y': &&&{unknown}
[70; 71) 'y': &&{unknown} 70..71 'y': &&{unknown}
[73; 75) '&x': &&&{unknown} 73..75 '&x': &&&{unknown}
[74; 75) 'x': &&{unknown} 74..75 'x': &&{unknown}
"### "###
); );
} }
@ -158,12 +158,12 @@ fn write() {
} }
"#), "#),
@r###" @r###"
[54; 139) '{ ... } }': () 54..139 '{ ... } }': ()
[60; 137) 'match ... }': () 60..137 'match ... }': ()
[66; 83) 'someth...nknown': Maybe<{unknown}> 66..83 'someth...nknown': Maybe<{unknown}>
[94; 124) 'Maybe:...thing)': Maybe<{unknown}> 94..124 'Maybe:...thing)': Maybe<{unknown}>
[106; 123) 'ref mu...ething': &mut {unknown} 106..123 'ref mu...ething': &mut {unknown}
[128; 130) '()': () 128..130 '()': ()
"### "###
); );
} }
@ -179,13 +179,13 @@ fn test_line_buffer() {
} }
"#), "#),
@r###" @r###"
[23; 53) '{ ...n']; }': () 23..53 '{ ...n']; }': ()
[29; 50) '&[0, b...b'\n']': &[u8; _] 29..50 '&[0, b...b'\n']': &[u8; _]
[30; 50) '[0, b'...b'\n']': [u8; _] 30..50 '[0, b'...b'\n']': [u8; _]
[31; 32) '0': u8 31..32 '0': u8
[34; 39) 'b'\n'': u8 34..39 'b'\n'': u8
[41; 42) '1': u8 41..42 '1': u8
[44; 49) 'b'\n'': u8 44..49 'b'\n'': u8
"### "###
); );
} }
@ -202,14 +202,14 @@ pub fn compute() {
} }
"#), "#),
@r###" @r###"
[18; 108) '{ ... } }': () 18..108 '{ ... } }': ()
[24; 106) 'match ... }': () 24..106 'match ... }': ()
[30; 37) 'nope!()': {unknown} 30..37 'nope!()': {unknown}
[48; 94) 'SizeSk...tail }': {unknown} 48..94 'SizeSk...tail }': {unknown}
[82; 86) 'true': bool 82..86 'true': bool
[82; 86) 'true': bool 82..86 'true': bool
[88; 92) 'tail': {unknown} 88..92 'tail': {unknown}
[98; 100) '{}': () 98..100 '{}': ()
"### "###
); );
} }
@ -226,14 +226,14 @@ pub fn primitive_type() {
} }
"#), "#),
@r###" @r###"
[25; 106) '{ ... } }': () 25..106 '{ ... } }': ()
[31; 104) 'match ... }': () 31..104 'match ... }': ()
[37; 42) '*self': {unknown} 37..42 '*self': {unknown}
[38; 42) 'self': {unknown} 38..42 'self': {unknown}
[53; 91) 'Borrow...), ..}': {unknown} 53..91 'Borrow...), ..}': {unknown}
[74; 86) 'Primitive(p)': {unknown} 74..86 'Primitive(p)': {unknown}
[84; 85) 'p': {unknown} 84..85 'p': {unknown}
[95; 97) '{}': () 95..97 '{}': ()
"### "###
); );
} }
@ -260,29 +260,29 @@ fn extra_compiler_flags() {
} }
"#), "#),
@r###" @r###"
[27; 323) '{ ... } }': () 27..323 '{ ... } }': ()
[33; 321) 'for co... }': () 33..321 'for co... }': ()
[37; 44) 'content': &{unknown} 37..44 'content': &{unknown}
[48; 61) 'doesnt_matter': {unknown} 48..61 'doesnt_matter': {unknown}
[62; 321) '{ ... }': () 62..321 '{ ... }': ()
[76; 80) 'name': &&{unknown} 76..80 'name': &&{unknown}
[83; 167) 'if doe... }': &&{unknown} 83..167 'if doe... }': &&{unknown}
[86; 99) 'doesnt_matter': bool 86..99 'doesnt_matter': bool
[100; 129) '{ ... }': &&{unknown} 100..129 '{ ... }': &&{unknown}
[114; 119) 'first': &&{unknown} 114..119 'first': &&{unknown}
[135; 167) '{ ... }': &&{unknown} 135..167 '{ ... }': &&{unknown}
[149; 157) '&content': &&{unknown} 149..157 '&content': &&{unknown}
[150; 157) 'content': &{unknown} 150..157 'content': &{unknown}
[182; 189) 'content': &{unknown} 182..189 'content': &{unknown}
[192; 314) 'if ICE... }': &{unknown} 192..314 'if ICE... }': &{unknown}
[195; 232) 'ICE_RE..._VALUE': {unknown} 195..232 'ICE_RE..._VALUE': {unknown}
[195; 248) 'ICE_RE...&name)': bool 195..248 'ICE_RE...&name)': bool
[242; 247) '&name': &&&{unknown} 242..247 '&name': &&&{unknown}
[243; 247) 'name': &&{unknown} 243..247 'name': &&{unknown}
[249; 277) '{ ... }': &&{unknown} 249..277 '{ ... }': &&{unknown}
[263; 267) 'name': &&{unknown} 263..267 'name': &&{unknown}
[283; 314) '{ ... }': &{unknown} 283..314 '{ ... }': &{unknown}
[297; 304) 'content': &{unknown} 297..304 'content': &{unknown}
"### "###
); );
} }
@ -303,11 +303,11 @@ fn test<R>(query_response: Canonical<QueryResponse<R>>) {
} }
"#), "#),
@r###" @r###"
[92; 106) 'query_response': Canonical<QueryResponse<R>> 92..106 'query_response': Canonical<QueryResponse<R>>
[137; 167) '{ ...lue; }': () 137..167 '{ ...lue; }': ()
[143; 164) '&query....value': &QueryResponse<R> 143..164 '&query....value': &QueryResponse<R>
[144; 158) 'query_response': Canonical<QueryResponse<R>> 144..158 'query_response': Canonical<QueryResponse<R>>
[144; 164) 'query_....value': QueryResponse<R> 144..164 'query_....value': QueryResponse<R>
"### "###
); );
} }
@ -322,9 +322,9 @@ fn test() {
} }
"#), "#),
@r###" @r###"
![0; 4) '0u32': u32 !0..4 '0u32': u32
[45; 70) '{ ...()); }': () 45..70 '{ ...()); }': ()
[55; 56) 'a': u32 55..56 'a': u32
"### "###
); );
} }
@ -345,10 +345,10 @@ pub fn main_loop() {
} }
"#), "#),
@r###" @r###"
[144; 146) '{}': () 144..146 '{}': ()
[169; 198) '{ ...t(); }': () 169..198 '{ ...t(); }': ()
[175; 193) 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher> 175..193 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher>
[175; 195) 'FxHash...ault()': HashSet<{unknown}, FxHasher> 175..195 'FxHash...ault()': HashSet<{unknown}, FxHasher>
"### "###
); );
} }
@ -375,13 +375,13 @@ fn issue_2669() {
}"# }"#
), ),
@r###" @r###"
[147; 262) '{ ... }': () 147..262 '{ ... }': ()
[161; 164) 'end': fn end<{unknown}>() 161..164 'end': fn end<{unknown}>()
[161; 166) 'end()': () 161..166 'end()': ()
[199; 252) '{ ... }': () 199..252 '{ ... }': ()
[221; 223) '_x': ! 221..223 '_x': !
[230; 237) 'loop {}': ! 230..237 'loop {}': !
[235; 237) '{}': () 235..237 '{}': ()
"### "###
) )
} }
@ -396,9 +396,9 @@ fn test() {
} }
"#), "#),
@r###" @r###"
[26; 53) '{ ...oo() }': () 26..53 '{ ...oo() }': ()
[32; 49) '<Trait...>::foo': {unknown} 32..49 '<Trait...>::foo': {unknown}
[32; 51) '<Trait...:foo()': () 32..51 '<Trait...:foo()': ()
"### "###
); );
} }
@ -496,13 +496,13 @@ fn foo(params: &[usize]) {
} }
"#), "#),
@r###" @r###"
[8; 14) 'params': &[usize] 8..14 'params': &[usize]
[26; 81) '{ ... } }': () 26..81 '{ ... } }': ()
[32; 79) 'match ... }': () 32..79 'match ... }': ()
[38; 44) 'params': &[usize] 38..44 'params': &[usize]
[55; 67) '[ps @ .., _]': [usize] 55..67 '[ps @ .., _]': [usize]
[65; 66) '_': usize 65..66 '_': usize
[71; 73) '{}': () 71..73 '{}': ()
"### "###
); );
} }
@ -523,13 +523,13 @@ fn foo(b: Bar) {
} }
"#), "#),
@r###" @r###"
[36; 37) 'b': Bar 36..37 'b': Bar
[44; 96) '{ ... } }': () 44..96 '{ ... } }': ()
[50; 94) 'match ... }': () 50..94 'match ... }': ()
[56; 57) 'b': Bar 56..57 'b': Bar
[68; 81) 'Bar { a: .. }': Bar 68..81 'Bar { a: .. }': Bar
[77; 79) '..': bool 77..79 '..': bool
[85; 87) '{}': () 85..87 '{}': ()
"### "###
); );
} }
@ -564,13 +564,13 @@ where
} }
"#), "#),
@r###" @r###"
[66; 70) 'self': Self 66..70 'self': Self
[268; 272) 'self': Self 268..272 'self': Self
[467; 471) 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}> 467..471 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
[489; 523) '{ ... }': () 489..523 '{ ... }': ()
[499; 503) 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}> 499..503 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
[499; 509) 'self.order': O 499..509 'self.order': O
[499; 516) 'self.o...into()': dyn QueryFragment<DB> 499..516 'self.o...into()': dyn QueryFragment<DB>
"### "###
); );
} }

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -183,8 +183,8 @@ mod tests {
call<|>ee(); call<|>ee();
} }
"#, "#,
"callee FN_DEF FileId(1) [0; 14) [3; 9)", "callee FN_DEF FileId(1) 0..14 3..9",
&["caller FN_DEF FileId(1) [15; 44) [18; 24) : [[33; 39)]"], &["caller FN_DEF FileId(1) 15..44 18..24 : [33..39]"],
&[], &[],
); );
} }
@ -199,8 +199,8 @@ mod tests {
callee(); callee();
} }
"#, "#,
"callee FN_DEF FileId(1) [0; 14) [3; 9)", "callee FN_DEF FileId(1) 0..14 3..9",
&["caller FN_DEF FileId(1) [15; 44) [18; 24) : [[33; 39)]"], &["caller FN_DEF FileId(1) 15..44 18..24 : [33..39]"],
&[], &[],
); );
} }
@ -216,8 +216,8 @@ mod tests {
callee(); callee();
} }
"#, "#,
"callee FN_DEF FileId(1) [0; 14) [3; 9)", "callee FN_DEF FileId(1) 0..14 3..9",
&["caller FN_DEF FileId(1) [15; 58) [18; 24) : [[33; 39), [47; 53)]"], &["caller FN_DEF FileId(1) 15..58 18..24 : [33..39, 47..53]"],
&[], &[],
); );
} }
@ -236,10 +236,10 @@ mod tests {
callee(); callee();
} }
"#, "#,
"callee FN_DEF FileId(1) [0; 14) [3; 9)", "callee FN_DEF FileId(1) 0..14 3..9",
&[ &[
"caller1 FN_DEF FileId(1) [15; 45) [18; 25) : [[34; 40)]", "caller1 FN_DEF FileId(1) 15..45 18..25 : [34..40]",
"caller2 FN_DEF FileId(1) [46; 76) [49; 56) : [[65; 71)]", "caller2 FN_DEF FileId(1) 46..76 49..56 : [65..71]",
], ],
&[], &[],
); );
@ -260,8 +260,8 @@ mod tests {
//- /foo/mod.rs //- /foo/mod.rs
pub fn callee() {} pub fn callee() {}
"#, "#,
"callee FN_DEF FileId(2) [0; 18) [7; 13)", "callee FN_DEF FileId(2) 0..18 7..13",
&["caller FN_DEF FileId(1) [26; 55) [29; 35) : [[44; 50)]"], &["caller FN_DEF FileId(1) 26..55 29..35 : [44..50]"],
&[], &[],
); );
} }
@ -277,9 +277,9 @@ mod tests {
callee(); callee();
} }
"#, "#,
"caller FN_DEF FileId(1) [15; 58) [18; 24)", "caller FN_DEF FileId(1) 15..58 18..24",
&[], &[],
&["callee FN_DEF FileId(1) [0; 14) [3; 9) : [[33; 39), [47; 53)]"], &["callee FN_DEF FileId(1) 0..14 3..9 : [33..39, 47..53]"],
); );
} }
@ -298,9 +298,9 @@ mod tests {
//- /foo/mod.rs //- /foo/mod.rs
pub fn callee() {} pub fn callee() {}
"#, "#,
"caller FN_DEF FileId(1) [26; 55) [29; 35)", "caller FN_DEF FileId(1) 26..55 29..35",
&[], &[],
&["callee FN_DEF FileId(2) [0; 18) [7; 13) : [[44; 50)]"], &["callee FN_DEF FileId(2) 0..18 7..13 : [44..50]"],
); );
} }
@ -321,9 +321,9 @@ mod tests {
} }
"#, "#,
"caller2 FN_DEF FileId(1) [32; 63) [35; 42)", "caller2 FN_DEF FileId(1) 32..63 35..42",
&["caller1 FN_DEF FileId(1) [0; 31) [3; 10) : [[19; 26)]"], &["caller1 FN_DEF FileId(1) 0..31 3..10 : [19..26]"],
&["caller3 FN_DEF FileId(1) [64; 80) [67; 74) : [[51; 58)]"], &["caller3 FN_DEF FileId(1) 64..80 67..74 : [51..58]"],
); );
} }
} }

View file

@ -126,7 +126,7 @@ impl FnCallNode {
ast::CallExpr(it) => Some(FnCallNode::CallExpr(it)), ast::CallExpr(it) => Some(FnCallNode::CallExpr(it)),
ast::MethodCallExpr(it) => { ast::MethodCallExpr(it) => {
let arg_list = it.arg_list()?; let arg_list = it.arg_list()?;
if !syntax.text_range().is_subrange(&arg_list.syntax().text_range()) { if !arg_list.syntax().text_range().contains_range(syntax.text_range()) {
return None; return None;
} }
Some(FnCallNode::MethodCallExpr(it)) Some(FnCallNode::MethodCallExpr(it))

View file

@ -147,190 +147,190 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "allow", label: "allow",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "allow(${0:lint})", insert: "allow(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "cfg", label: "cfg",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "cfg(${0:predicate})", insert: "cfg(${0:predicate})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "cfg_attr", label: "cfg_attr",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "cfg_attr(${1:predicate}, ${0:attr})", insert: "cfg_attr(${1:predicate}, ${0:attr})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "deny", label: "deny",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "deny(${0:lint})", insert: "deny(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "deprecated", label: "deprecated",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "deprecated = \"${0:reason}\"", insert: "deprecated = \"${0:reason}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "derive", label: "derive",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "derive(${0:Debug})", insert: "derive(${0:Debug})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "doc", label: "doc",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "doc = \"${0:docs}\"", insert: "doc = \"${0:docs}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "forbid", label: "forbid",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "forbid(${0:lint})", insert: "forbid(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "ignore", label: "ignore",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "ignore(${0:lint})", insert: "ignore(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "inline", label: "inline",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "inline(${0:lint})", insert: "inline(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "link", label: "link",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "link", insert: "link",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "link_name", label: "link_name",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "link_name = \"${0:symbol_name}\"", insert: "link_name = \"${0:symbol_name}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "macro_export", label: "macro_export",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "macro_export", insert: "macro_export",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "macro_use", label: "macro_use",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "macro_use", insert: "macro_use",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "must_use", label: "must_use",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "must_use = \"${0:reason}\"", insert: "must_use = \"${0:reason}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "no_mangle", label: "no_mangle",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "no_mangle", insert: "no_mangle",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "non_exhaustive", label: "non_exhaustive",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "non_exhaustive", insert: "non_exhaustive",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "path", label: "path",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "path =\"${0:path}\"", insert: "path =\"${0:path}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "proc_macro", label: "proc_macro",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "proc_macro", insert: "proc_macro",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "proc_macro_attribute", label: "proc_macro_attribute",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "proc_macro_attribute", insert: "proc_macro_attribute",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "proc_macro_derive", label: "proc_macro_derive",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "proc_macro_derive(${0:Trait})", insert: "proc_macro_derive(${0:Trait})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "repr", label: "repr",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "repr(${0:C})", insert: "repr(${0:C})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "should_panic", label: "should_panic",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "expected = \"${0:reason}\"", insert: "expected = \"${0:reason}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "target_feature", label: "target_feature",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "target_feature = \"${0:feature}\"", insert: "target_feature = \"${0:feature}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "test", label: "test",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "test", insert: "test",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "used", label: "used",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "used", insert: "used",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "warn", label: "warn",
source_range: [19; 19), source_range: 19..19,
delete: [19; 19), delete: 19..19,
insert: "warn(${0:lint})", insert: "warn(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
@ -351,232 +351,232 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "allow", label: "allow",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "allow(${0:lint})", insert: "allow(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "cfg", label: "cfg",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "cfg(${0:predicate})", insert: "cfg(${0:predicate})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "cfg_attr", label: "cfg_attr",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "cfg_attr(${1:predicate}, ${0:attr})", insert: "cfg_attr(${1:predicate}, ${0:attr})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "deny", label: "deny",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "deny(${0:lint})", insert: "deny(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "deprecated", label: "deprecated",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "deprecated = \"${0:reason}\"", insert: "deprecated = \"${0:reason}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "derive", label: "derive",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "derive(${0:Debug})", insert: "derive(${0:Debug})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "doc", label: "doc",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "doc = \"${0:docs}\"", insert: "doc = \"${0:docs}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "feature", label: "feature",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "feature(${0:flag})", insert: "feature(${0:flag})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "forbid", label: "forbid",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "forbid(${0:lint})", insert: "forbid(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "global_allocator", label: "global_allocator",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "global_allocator", insert: "global_allocator",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "ignore", label: "ignore",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "ignore(${0:lint})", insert: "ignore(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "inline", label: "inline",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "inline(${0:lint})", insert: "inline(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "link", label: "link",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "link", insert: "link",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "link_name", label: "link_name",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "link_name = \"${0:symbol_name}\"", insert: "link_name = \"${0:symbol_name}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "macro_export", label: "macro_export",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "macro_export", insert: "macro_export",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "macro_use", label: "macro_use",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "macro_use", insert: "macro_use",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "must_use", label: "must_use",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "must_use = \"${0:reason}\"", insert: "must_use = \"${0:reason}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "no_mangle", label: "no_mangle",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "no_mangle", insert: "no_mangle",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "no_std", label: "no_std",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "no_std", insert: "no_std",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "non_exhaustive", label: "non_exhaustive",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "non_exhaustive", insert: "non_exhaustive",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "panic_handler", label: "panic_handler",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "panic_handler", insert: "panic_handler",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "path", label: "path",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "path =\"${0:path}\"", insert: "path =\"${0:path}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "proc_macro", label: "proc_macro",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "proc_macro", insert: "proc_macro",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "proc_macro_attribute", label: "proc_macro_attribute",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "proc_macro_attribute", insert: "proc_macro_attribute",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "proc_macro_derive", label: "proc_macro_derive",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "proc_macro_derive(${0:Trait})", insert: "proc_macro_derive(${0:Trait})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "recursion_limit", label: "recursion_limit",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "recursion_limit = ${0:128}", insert: "recursion_limit = ${0:128}",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "repr", label: "repr",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "repr(${0:C})", insert: "repr(${0:C})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "should_panic", label: "should_panic",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "expected = \"${0:reason}\"", insert: "expected = \"${0:reason}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "target_feature", label: "target_feature",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "target_feature = \"${0:feature}\"", insert: "target_feature = \"${0:feature}\"",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "test", label: "test",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "test", insert: "test",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "used", label: "used",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "used", insert: "used",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "warn", label: "warn",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "warn(${0:lint})", insert: "warn(${0:lint})",
kind: Attribute, kind: Attribute,
}, },
CompletionItem { CompletionItem {
label: "windows_subsystem", label: "windows_subsystem",
source_range: [20; 20), source_range: 20..20,
delete: [20; 20), delete: 20..20,
insert: "windows_subsystem = \"${0:subsystem}\"", insert: "windows_subsystem = \"${0:subsystem}\"",
kind: Attribute, kind: Attribute,
}, },

View file

@ -94,8 +94,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [94; 94), source_range: 94..94,
delete: [94; 94), delete: 94..94,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -125,8 +125,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo()", label: "foo()",
source_range: [187; 187), source_range: 187..187,
delete: [187; 187), delete: 187..187,
insert: "foo()$0", insert: "foo()$0",
kind: Method, kind: Method,
lookup: "foo", lookup: "foo",
@ -134,8 +134,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [187; 187), source_range: 187..187,
delete: [187; 187), delete: 187..187,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "(u32,)", detail: "(u32,)",
@ -165,8 +165,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo()", label: "foo()",
source_range: [126; 126), source_range: 126..126,
delete: [126; 126), delete: 126..126,
insert: "foo()$0", insert: "foo()$0",
kind: Method, kind: Method,
lookup: "foo", lookup: "foo",
@ -174,8 +174,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [126; 126), source_range: 126..126,
delete: [126; 126), delete: 126..126,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "(u32, i32)", detail: "(u32, i32)",
@ -222,24 +222,24 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "crate_field", label: "crate_field",
source_range: [313; 313), source_range: 313..313,
delete: [313; 313), delete: 313..313,
insert: "crate_field", insert: "crate_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
CompletionItem { CompletionItem {
label: "pub_field", label: "pub_field",
source_range: [313; 313), source_range: 313..313,
delete: [313; 313), delete: 313..313,
insert: "pub_field", insert: "pub_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
CompletionItem { CompletionItem {
label: "super_field", label: "super_field",
source_range: [313; 313), source_range: 313..313,
delete: [313; 313), delete: 313..313,
insert: "super_field", insert: "super_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -267,8 +267,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_method()", label: "the_method()",
source_range: [144; 144), source_range: 144..144,
delete: [144; 144), delete: 144..144,
insert: "the_method()$0", insert: "the_method()$0",
kind: Method, kind: Method,
lookup: "the_method", lookup: "the_method",
@ -300,8 +300,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_method()", label: "the_method()",
source_range: [243; 243), source_range: 243..243,
delete: [243; 243), delete: 243..243,
insert: "the_method()$0", insert: "the_method()$0",
kind: Method, kind: Method,
lookup: "the_method", lookup: "the_method",
@ -333,8 +333,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_method()", label: "the_method()",
source_range: [256; 256), source_range: 256..256,
delete: [256; 256), delete: 256..256,
insert: "the_method()$0", insert: "the_method()$0",
kind: Method, kind: Method,
lookup: "the_method", lookup: "the_method",
@ -362,8 +362,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_method()", label: "the_method()",
source_range: [151; 151), source_range: 151..151,
delete: [151; 151), delete: 151..151,
insert: "the_method()$0", insert: "the_method()$0",
kind: Method, kind: Method,
lookup: "the_method", lookup: "the_method",
@ -391,8 +391,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_method()", label: "the_method()",
source_range: [155; 155), source_range: 155..155,
delete: [155; 155), delete: 155..155,
insert: "the_method()$0", insert: "the_method()$0",
kind: Method, kind: Method,
lookup: "the_method", lookup: "the_method",
@ -423,8 +423,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_method()", label: "the_method()",
source_range: [219; 219), source_range: 219..219,
delete: [219; 219), delete: 219..219,
insert: "the_method()$0", insert: "the_method()$0",
kind: Method, kind: Method,
lookup: "the_method", lookup: "the_method",
@ -475,8 +475,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_method()", label: "the_method()",
source_range: [249; 249), source_range: 249..249,
delete: [249; 249), delete: 249..249,
insert: "the_method()$0", insert: "the_method()$0",
kind: Method, kind: Method,
lookup: "the_method", lookup: "the_method",
@ -502,16 +502,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "0", label: "0",
source_range: [75; 75), source_range: 75..75,
delete: [75; 75), delete: 75..75,
insert: "0", insert: "0",
kind: Field, kind: Field,
detail: "i32", detail: "i32",
}, },
CompletionItem { CompletionItem {
label: "1", label: "1",
source_range: [75; 75), source_range: 75..75,
delete: [75; 75), delete: 75..75,
insert: "1", insert: "1",
kind: Field, kind: Field,
detail: "f64", detail: "f64",
@ -545,8 +545,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "blah()", label: "blah()",
source_range: [299; 300), source_range: 299..300,
delete: [299; 300), delete: 299..300,
insert: "blah()$0", insert: "blah()$0",
kind: Method, kind: Method,
lookup: "blah", lookup: "blah",
@ -572,8 +572,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [106; 106), source_range: 106..106,
delete: [106; 106), delete: 106..106,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -606,8 +606,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "await", label: "await",
source_range: [74; 74), source_range: 74..74,
delete: [74; 74), delete: 74..74,
insert: "await", insert: "await",
detail: "expr.await", detail: "expr.await",
}, },
@ -638,15 +638,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "A", label: "A",
source_range: [217; 217), source_range: 217..217,
delete: [217; 217), delete: 217..217,
insert: "A", insert: "A",
kind: Const, kind: Const,
}, },
CompletionItem { CompletionItem {
label: "b", label: "b",
source_range: [217; 217), source_range: 217..217,
delete: [217; 217), delete: 217..217,
insert: "b", insert: "b",
kind: Module, kind: Module,
}, },
@ -671,8 +671,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [156; 157), source_range: 156..157,
delete: [156; 157), delete: 156..157,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -698,8 +698,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [156; 157), source_range: 156..157,
delete: [156; 157), delete: 156..157,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -726,8 +726,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [156; 156), source_range: 156..156,
delete: [156; 156), delete: 156..156,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -753,8 +753,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [162; 163), source_range: 162..163,
delete: [162; 163), delete: 162..163,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -790,8 +790,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [552; 552), source_range: 552..552,
delete: [552; 552), delete: 552..552,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -820,8 +820,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_method()", label: "the_method()",
source_range: [201; 201), source_range: 201..201,
delete: [201; 201), delete: 201..201,
insert: "the_method()$0", insert: "the_method()$0",
kind: Method, kind: Method,
lookup: "the_method", lookup: "the_method",

View file

@ -75,8 +75,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "file_id: FileId", label: "file_id: FileId",
source_range: [110; 114), source_range: 110..114,
delete: [110; 114), delete: 110..114,
insert: "file_id: FileId", insert: "file_id: FileId",
lookup: "file_id", lookup: "file_id",
}, },
@ -99,8 +99,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "file_id: FileId", label: "file_id: FileId",
source_range: [110; 114), source_range: 110..114,
delete: [110; 114), delete: 110..114,
insert: "file_id: FileId", insert: "file_id: FileId",
lookup: "file_id", lookup: "file_id",
}, },
@ -126,8 +126,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "file_id: FileId", label: "file_id: FileId",
source_range: [289; 293), source_range: 289..293,
delete: [289; 293), delete: 289..293,
insert: "file_id: FileId", insert: "file_id: FileId",
lookup: "file_id", lookup: "file_id",
}, },

View file

@ -97,7 +97,7 @@ fn is_in_loop_body(leaf: &SyntaxToken) -> bool {
} }
}; };
if let Some(body) = loop_body { if let Some(body) = loop_body {
if leaf.text_range().is_subrange(&body.syntax().text_range()) { if body.syntax().text_range().contains_range(leaf.text_range()) {
return true; return true;
} }
} }
@ -140,22 +140,22 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "crate", label: "crate",
source_range: [21; 21), source_range: 21..21,
delete: [21; 21), delete: 21..21,
insert: "crate::", insert: "crate::",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "self", label: "self",
source_range: [21; 21), source_range: 21..21,
delete: [21; 21), delete: 21..21,
insert: "self", insert: "self",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "super", label: "super",
source_range: [21; 21), source_range: 21..21,
delete: [21; 21), delete: 21..21,
insert: "super::", insert: "super::",
kind: Keyword, kind: Keyword,
}, },
@ -173,15 +173,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "self", label: "self",
source_range: [24; 24), source_range: 24..24,
delete: [24; 24), delete: 24..24,
insert: "self", insert: "self",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "super", label: "super",
source_range: [24; 24), source_range: 24..24,
delete: [24; 24), delete: 24..24,
insert: "super::", insert: "super::",
kind: Keyword, kind: Keyword,
}, },
@ -199,15 +199,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "self", label: "self",
source_range: [28; 28), source_range: 28..28,
delete: [28; 28), delete: 28..28,
insert: "self", insert: "self",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "super", label: "super",
source_range: [28; 28), source_range: 28..28,
delete: [28; 28), delete: 28..28,
insert: "super::", insert: "super::",
kind: Keyword, kind: Keyword,
}, },
@ -230,36 +230,36 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "if $0 {}", insert: "if $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "loop", label: "loop",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "loop {$0}", insert: "loop {$0}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "match $0 {}", insert: "match $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "return", label: "return",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "return;", insert: "return;",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "while", label: "while",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
@ -284,50 +284,50 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "else", label: "else",
source_range: [108; 108), source_range: 108..108,
delete: [108; 108), delete: 108..108,
insert: "else {$0}", insert: "else {$0}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "else if", label: "else if",
source_range: [108; 108), source_range: 108..108,
delete: [108; 108), delete: 108..108,
insert: "else if $0 {}", insert: "else if $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [108; 108), source_range: 108..108,
delete: [108; 108), delete: 108..108,
insert: "if $0 {}", insert: "if $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "loop", label: "loop",
source_range: [108; 108), source_range: 108..108,
delete: [108; 108), delete: 108..108,
insert: "loop {$0}", insert: "loop {$0}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [108; 108), source_range: 108..108,
delete: [108; 108), delete: 108..108,
insert: "match $0 {}", insert: "match $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "return", label: "return",
source_range: [108; 108), source_range: 108..108,
delete: [108; 108), delete: 108..108,
insert: "return;", insert: "return;",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "while", label: "while",
source_range: [108; 108), source_range: 108..108,
delete: [108; 108), delete: 108..108,
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
@ -351,36 +351,36 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [56; 56), source_range: 56..56,
delete: [56; 56), delete: 56..56,
insert: "if $0 {}", insert: "if $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "loop", label: "loop",
source_range: [56; 56), source_range: 56..56,
delete: [56; 56), delete: 56..56,
insert: "loop {$0}", insert: "loop {$0}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [56; 56), source_range: 56..56,
delete: [56; 56), delete: 56..56,
insert: "match $0 {}", insert: "match $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "return", label: "return",
source_range: [56; 56), source_range: 56..56,
delete: [56; 56), delete: 56..56,
insert: "return $0;", insert: "return $0;",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "while", label: "while",
source_range: [56; 56), source_range: 56..56,
delete: [56; 56), delete: 56..56,
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
@ -400,36 +400,36 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "if $0 {}", insert: "if $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "loop", label: "loop",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "loop {$0}", insert: "loop {$0}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "match $0 {}", insert: "match $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "return", label: "return",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "return;", insert: "return;",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "while", label: "while",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
@ -454,36 +454,36 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [97; 97), source_range: 97..97,
delete: [97; 97), delete: 97..97,
insert: "if $0 {}", insert: "if $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "loop", label: "loop",
source_range: [97; 97), source_range: 97..97,
delete: [97; 97), delete: 97..97,
insert: "loop {$0}", insert: "loop {$0}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [97; 97), source_range: 97..97,
delete: [97; 97), delete: 97..97,
insert: "match $0 {}", insert: "match $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "return", label: "return",
source_range: [97; 97), source_range: 97..97,
delete: [97; 97), delete: 97..97,
insert: "return $0", insert: "return $0",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "while", label: "while",
source_range: [97; 97), source_range: 97..97,
delete: [97; 97), delete: 97..97,
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
@ -508,36 +508,36 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "if $0 {}", insert: "if $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "loop", label: "loop",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "loop {$0}", insert: "loop {$0}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "match $0 {}", insert: "match $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "return", label: "return",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "return $0;", insert: "return $0;",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "while", label: "while",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
@ -560,36 +560,36 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "if $0 {}", insert: "if $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "loop", label: "loop",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "loop {$0}", insert: "loop {$0}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "match $0 {}", insert: "match $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "return", label: "return",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "return $0;", insert: "return $0;",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "while", label: "while",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
@ -612,50 +612,50 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "break", label: "break",
source_range: [63; 63), source_range: 63..63,
delete: [63; 63), delete: 63..63,
insert: "break;", insert: "break;",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "continue", label: "continue",
source_range: [63; 63), source_range: 63..63,
delete: [63; 63), delete: 63..63,
insert: "continue;", insert: "continue;",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [63; 63), source_range: 63..63,
delete: [63; 63), delete: 63..63,
insert: "if $0 {}", insert: "if $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "loop", label: "loop",
source_range: [63; 63), source_range: 63..63,
delete: [63; 63), delete: 63..63,
insert: "loop {$0}", insert: "loop {$0}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [63; 63), source_range: 63..63,
delete: [63; 63), delete: 63..63,
insert: "match $0 {}", insert: "match $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "return", label: "return",
source_range: [63; 63), source_range: 63..63,
delete: [63; 63), delete: 63..63,
insert: "return $0;", insert: "return $0;",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "while", label: "while",
source_range: [63; 63), source_range: 63..63,
delete: [63; 63), delete: 63..63,
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
@ -676,36 +676,36 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [68; 68), source_range: 68..68,
delete: [68; 68), delete: 68..68,
insert: "if $0 {}", insert: "if $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "loop", label: "loop",
source_range: [68; 68), source_range: 68..68,
delete: [68; 68), delete: 68..68,
insert: "loop {$0}", insert: "loop {$0}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [68; 68), source_range: 68..68,
delete: [68; 68), delete: 68..68,
insert: "match $0 {}", insert: "match $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "return", label: "return",
source_range: [68; 68), source_range: 68..68,
delete: [68; 68), delete: 68..68,
insert: "return $0;", insert: "return $0;",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "while", label: "while",
source_range: [68; 68), source_range: 68..68,
delete: [68; 68), delete: 68..68,
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },
@ -732,50 +732,50 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "break", label: "break",
source_range: [122; 124), source_range: 122..124,
delete: [122; 124), delete: 122..124,
insert: "break", insert: "break",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "continue", label: "continue",
source_range: [122; 124), source_range: 122..124,
delete: [122; 124), delete: 122..124,
insert: "continue", insert: "continue",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [122; 124), source_range: 122..124,
delete: [122; 124), delete: 122..124,
insert: "if $0 {}", insert: "if $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "loop", label: "loop",
source_range: [122; 124), source_range: 122..124,
delete: [122; 124), delete: 122..124,
insert: "loop {$0}", insert: "loop {$0}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [122; 124), source_range: 122..124,
delete: [122; 124), delete: 122..124,
insert: "match $0 {}", insert: "match $0 {}",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "return", label: "return",
source_range: [122; 124), source_range: 122..124,
delete: [122; 124), delete: 122..124,
insert: "return", insert: "return",
kind: Keyword, kind: Keyword,
}, },
CompletionItem { CompletionItem {
label: "while", label: "while",
source_range: [122; 124), source_range: 122..124,
delete: [122; 124), delete: 122..124,
insert: "while $0 {}", insert: "while $0 {}",
kind: Keyword, kind: Keyword,
}, },

View file

@ -42,8 +42,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo!(…)", label: "foo!(…)",
source_range: [46; 46), source_range: 46..46,
delete: [46; 46), delete: 46..46,
insert: "foo!($0)", insert: "foo!($0)",
kind: Macro, kind: Macro,
detail: "macro_rules! foo", detail: "macro_rules! foo",
@ -82,8 +82,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "vec![…]", label: "vec![…]",
source_range: [280; 280), source_range: 280..280,
delete: [280; 280), delete: 280..280,
insert: "vec![$0]", insert: "vec![$0]",
kind: Macro, kind: Macro,
detail: "macro_rules! vec", detail: "macro_rules! vec",
@ -119,8 +119,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo! {…}", label: "foo! {…}",
source_range: [163; 163), source_range: 163..163,
delete: [163; 163), delete: 163..163,
insert: "foo! {$0}", insert: "foo! {$0}",
kind: Macro, kind: Macro,
detail: "macro_rules! foo", detail: "macro_rules! foo",
@ -130,8 +130,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [163; 163), source_range: 163..163,
delete: [163; 163), delete: 163..163,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",

View file

@ -63,37 +63,37 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Bar", label: "Bar",
source_range: [246; 246), source_range: 246..246,
delete: [246; 246), delete: 246..246,
insert: "Bar", insert: "Bar",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "E", label: "E",
source_range: [246; 246), source_range: 246..246,
delete: [246; 246), delete: 246..246,
insert: "E", insert: "E",
kind: Enum, kind: Enum,
}, },
CompletionItem { CompletionItem {
label: "X", label: "X",
source_range: [246; 246), source_range: 246..246,
delete: [246; 246), delete: 246..246,
insert: "X", insert: "X",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "Z", label: "Z",
source_range: [246; 246), source_range: 246..246,
delete: [246; 246), delete: 246..246,
insert: "Z", insert: "Z",
kind: Const, kind: Const,
}, },
CompletionItem { CompletionItem {
label: "m", label: "m",
source_range: [246; 246), source_range: 246..246,
delete: [246; 246), delete: 246..246,
insert: "m", insert: "m",
kind: Module, kind: Module,
}, },
@ -119,15 +119,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "E", label: "E",
source_range: [151; 151), source_range: 151..151,
delete: [151; 151), delete: 151..151,
insert: "E", insert: "E",
kind: Enum, kind: Enum,
}, },
CompletionItem { CompletionItem {
label: "m!(…)", label: "m!(…)",
source_range: [151; 151), source_range: 151..151,
delete: [151; 151), delete: 151..151,
insert: "m!($0)", insert: "m!($0)",
kind: Macro, kind: Macro,
detail: "macro_rules! m", detail: "macro_rules! m",

View file

@ -2,7 +2,7 @@
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
TextRange, TextUnit, TextRange, TextSize,
}; };
use ra_text_edit::TextEdit; use ra_text_edit::TextEdit;
@ -115,7 +115,7 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String { fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String {
if receiver_is_ambiguous_float_literal { if receiver_is_ambiguous_float_literal {
let text = receiver.syntax().text(); let text = receiver.syntax().text();
let without_dot = ..text.len() - TextUnit::of_char('.'); let without_dot = ..text.len() - TextSize::of('.');
text.slice(without_dot).to_string() text.slice(without_dot).to_string()
} else { } else {
receiver.to_string() receiver.to_string()
@ -143,7 +143,7 @@ fn postfix_snippet(
let edit = { let edit = {
let receiver_syntax = receiver.syntax(); let receiver_syntax = receiver.syntax();
let receiver_range = ctx.sema.original_range(receiver_syntax).range; let receiver_range = ctx.sema.original_range(receiver_syntax).range;
let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); let delete_range = TextRange::new(receiver_range.start(), ctx.source_range().end());
TextEdit::replace(delete_range, snippet.to_string()) TextEdit::replace(delete_range, snippet.to_string())
}; };
CompletionItem::new(CompletionKind::Postfix, ctx.source_range(), label) CompletionItem::new(CompletionKind::Postfix, ctx.source_range(), label)
@ -176,57 +176,57 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "box", label: "box",
source_range: [89; 89), source_range: 89..89,
delete: [85; 89), delete: 85..89,
insert: "Box::new(bar)", insert: "Box::new(bar)",
detail: "Box::new(expr)", detail: "Box::new(expr)",
}, },
CompletionItem { CompletionItem {
label: "dbg", label: "dbg",
source_range: [89; 89), source_range: 89..89,
delete: [85; 89), delete: 85..89,
insert: "dbg!(bar)", insert: "dbg!(bar)",
detail: "dbg!(expr)", detail: "dbg!(expr)",
}, },
CompletionItem { CompletionItem {
label: "if", label: "if",
source_range: [89; 89), source_range: 89..89,
delete: [85; 89), delete: 85..89,
insert: "if bar {$0}", insert: "if bar {$0}",
detail: "if expr {}", detail: "if expr {}",
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [89; 89), source_range: 89..89,
delete: [85; 89), delete: 85..89,
insert: "match bar {\n ${1:_} => {$0\\},\n}", insert: "match bar {\n ${1:_} => {$0\\},\n}",
detail: "match expr {}", detail: "match expr {}",
}, },
CompletionItem { CompletionItem {
label: "not", label: "not",
source_range: [89; 89), source_range: 89..89,
delete: [85; 89), delete: 85..89,
insert: "!bar", insert: "!bar",
detail: "!expr", detail: "!expr",
}, },
CompletionItem { CompletionItem {
label: "ref", label: "ref",
source_range: [89; 89), source_range: 89..89,
delete: [85; 89), delete: 85..89,
insert: "&bar", insert: "&bar",
detail: "&expr", detail: "&expr",
}, },
CompletionItem { CompletionItem {
label: "refm", label: "refm",
source_range: [89; 89), source_range: 89..89,
delete: [85; 89), delete: 85..89,
insert: "&mut bar", insert: "&mut bar",
detail: "&mut expr", detail: "&mut expr",
}, },
CompletionItem { CompletionItem {
label: "while", label: "while",
source_range: [89; 89), source_range: 89..89,
delete: [85; 89), delete: 85..89,
insert: "while bar {\n$0\n}", insert: "while bar {\n$0\n}",
detail: "while expr {}", detail: "while expr {}",
}, },
@ -250,43 +250,43 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "box", label: "box",
source_range: [91; 91), source_range: 91..91,
delete: [87; 91), delete: 87..91,
insert: "Box::new(bar)", insert: "Box::new(bar)",
detail: "Box::new(expr)", detail: "Box::new(expr)",
}, },
CompletionItem { CompletionItem {
label: "dbg", label: "dbg",
source_range: [91; 91), source_range: 91..91,
delete: [87; 91), delete: 87..91,
insert: "dbg!(bar)", insert: "dbg!(bar)",
detail: "dbg!(expr)", detail: "dbg!(expr)",
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [91; 91), source_range: 91..91,
delete: [87; 91), delete: 87..91,
insert: "match bar {\n ${1:_} => {$0\\},\n}", insert: "match bar {\n ${1:_} => {$0\\},\n}",
detail: "match expr {}", detail: "match expr {}",
}, },
CompletionItem { CompletionItem {
label: "not", label: "not",
source_range: [91; 91), source_range: 91..91,
delete: [87; 91), delete: 87..91,
insert: "!bar", insert: "!bar",
detail: "!expr", detail: "!expr",
}, },
CompletionItem { CompletionItem {
label: "ref", label: "ref",
source_range: [91; 91), source_range: 91..91,
delete: [87; 91), delete: 87..91,
insert: "&bar", insert: "&bar",
detail: "&expr", detail: "&expr",
}, },
CompletionItem { CompletionItem {
label: "refm", label: "refm",
source_range: [91; 91), source_range: 91..91,
delete: [87; 91), delete: 87..91,
insert: "&mut bar", insert: "&mut bar",
detail: "&mut expr", detail: "&mut expr",
}, },
@ -309,43 +309,43 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "box", label: "box",
source_range: [52; 52), source_range: 52..52,
delete: [49; 52), delete: 49..52,
insert: "Box::new(42)", insert: "Box::new(42)",
detail: "Box::new(expr)", detail: "Box::new(expr)",
}, },
CompletionItem { CompletionItem {
label: "dbg", label: "dbg",
source_range: [52; 52), source_range: 52..52,
delete: [49; 52), delete: 49..52,
insert: "dbg!(42)", insert: "dbg!(42)",
detail: "dbg!(expr)", detail: "dbg!(expr)",
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [52; 52), source_range: 52..52,
delete: [49; 52), delete: 49..52,
insert: "match 42 {\n ${1:_} => {$0\\},\n}", insert: "match 42 {\n ${1:_} => {$0\\},\n}",
detail: "match expr {}", detail: "match expr {}",
}, },
CompletionItem { CompletionItem {
label: "not", label: "not",
source_range: [52; 52), source_range: 52..52,
delete: [49; 52), delete: 49..52,
insert: "!42", insert: "!42",
detail: "!expr", detail: "!expr",
}, },
CompletionItem { CompletionItem {
label: "ref", label: "ref",
source_range: [52; 52), source_range: 52..52,
delete: [49; 52), delete: 49..52,
insert: "&42", insert: "&42",
detail: "&expr", detail: "&expr",
}, },
CompletionItem { CompletionItem {
label: "refm", label: "refm",
source_range: [52; 52), source_range: 52..52,
delete: [49; 52), delete: 49..52,
insert: "&mut 42", insert: "&mut 42",
detail: "&mut expr", detail: "&mut expr",
}, },
@ -370,43 +370,43 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "box", label: "box",
source_range: [149; 150), source_range: 149..150,
delete: [145; 150), delete: 145..150,
insert: "Box::new(bar)", insert: "Box::new(bar)",
detail: "Box::new(expr)", detail: "Box::new(expr)",
}, },
CompletionItem { CompletionItem {
label: "dbg", label: "dbg",
source_range: [149; 150), source_range: 149..150,
delete: [145; 150), delete: 145..150,
insert: "dbg!(bar)", insert: "dbg!(bar)",
detail: "dbg!(expr)", detail: "dbg!(expr)",
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [149; 150), source_range: 149..150,
delete: [145; 150), delete: 145..150,
insert: "match bar {\n ${1:_} => {$0\\},\n}", insert: "match bar {\n ${1:_} => {$0\\},\n}",
detail: "match expr {}", detail: "match expr {}",
}, },
CompletionItem { CompletionItem {
label: "not", label: "not",
source_range: [149; 150), source_range: 149..150,
delete: [145; 150), delete: 145..150,
insert: "!bar", insert: "!bar",
detail: "!expr", detail: "!expr",
}, },
CompletionItem { CompletionItem {
label: "ref", label: "ref",
source_range: [149; 150), source_range: 149..150,
delete: [145; 150), delete: 145..150,
insert: "&bar", insert: "&bar",
detail: "&expr", detail: "&expr",
}, },
CompletionItem { CompletionItem {
label: "refm", label: "refm",
source_range: [149; 150), source_range: 149..150,
delete: [145; 150), delete: 145..150,
insert: "&mut bar", insert: "&mut bar",
detail: "&mut expr", detail: "&mut expr",
}, },
@ -429,43 +429,43 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "box", label: "box",
source_range: [56; 56), source_range: 56..56,
delete: [49; 56), delete: 49..56,
insert: "Box::new(&&&&42)", insert: "Box::new(&&&&42)",
detail: "Box::new(expr)", detail: "Box::new(expr)",
}, },
CompletionItem { CompletionItem {
label: "dbg", label: "dbg",
source_range: [56; 56), source_range: 56..56,
delete: [49; 56), delete: 49..56,
insert: "dbg!(&&&&42)", insert: "dbg!(&&&&42)",
detail: "dbg!(expr)", detail: "dbg!(expr)",
}, },
CompletionItem { CompletionItem {
label: "match", label: "match",
source_range: [56; 56), source_range: 56..56,
delete: [49; 56), delete: 49..56,
insert: "match &&&&42 {\n ${1:_} => {$0\\},\n}", insert: "match &&&&42 {\n ${1:_} => {$0\\},\n}",
detail: "match expr {}", detail: "match expr {}",
}, },
CompletionItem { CompletionItem {
label: "not", label: "not",
source_range: [56; 56), source_range: 56..56,
delete: [53; 56), delete: 53..56,
insert: "!42", insert: "!42",
detail: "!expr", detail: "!expr",
}, },
CompletionItem { CompletionItem {
label: "ref", label: "ref",
source_range: [56; 56), source_range: 56..56,
delete: [53; 56), delete: 53..56,
insert: "&42", insert: "&42",
detail: "&expr", detail: "&expr",
}, },
CompletionItem { CompletionItem {
label: "refm", label: "refm",
source_range: [56; 56), source_range: 56..56,
delete: [53; 56), delete: 53..56,
insert: "&mut 42", insert: "&mut 42",
detail: "&mut expr", detail: "&mut expr",
}, },

View file

@ -162,8 +162,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "my", label: "my",
source_range: [27; 29), source_range: 27..29,
delete: [27; 29), delete: 27..29,
insert: "my", insert: "my",
kind: Module, kind: Module,
documentation: Documentation( documentation: Documentation(
@ -193,15 +193,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Foo", label: "Foo",
source_range: [31; 31), source_range: 31..31,
delete: [31; 31), delete: 31..31,
insert: "Foo", insert: "Foo",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "PublicBar", label: "PublicBar",
source_range: [31; 31), source_range: 31..31,
delete: [31; 31), delete: 31..31,
insert: "PublicBar", insert: "PublicBar",
kind: Struct, kind: Struct,
}, },
@ -226,8 +226,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Bar", label: "Bar",
source_range: [30; 30), source_range: 30..30,
delete: [30; 30), delete: 30..30,
insert: "Bar", insert: "Bar",
kind: Struct, kind: Struct,
}, },
@ -252,15 +252,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Spam", label: "Spam",
source_range: [11; 13), source_range: 11..13,
delete: [11; 13), delete: 11..13,
insert: "Spam", insert: "Spam",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "foo", label: "foo",
source_range: [11; 13), source_range: 11..13,
delete: [11; 13), delete: 11..13,
insert: "foo", insert: "foo",
kind: Module, kind: Module,
}, },
@ -285,15 +285,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Spam", label: "Spam",
source_range: [12; 14), source_range: 12..14,
delete: [12; 14), delete: 12..14,
insert: "Spam", insert: "Spam",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "foo", label: "foo",
source_range: [12; 14), source_range: 12..14,
delete: [12; 14), delete: 12..14,
insert: "foo", insert: "foo",
kind: Module, kind: Module,
}, },
@ -322,8 +322,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Spam", label: "Spam",
source_range: [23; 25), source_range: 23..25,
delete: [23; 25), delete: 23..25,
insert: "Spam", insert: "Spam",
kind: Struct, kind: Struct,
}, },
@ -352,8 +352,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Bar(…)", label: "Bar(…)",
source_range: [116; 116), source_range: 116..116,
delete: [116; 116), delete: 116..116,
insert: "Bar($0)", insert: "Bar($0)",
kind: EnumVariant, kind: EnumVariant,
lookup: "Bar", lookup: "Bar",
@ -365,8 +365,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "Foo", label: "Foo",
source_range: [116; 116), source_range: 116..116,
delete: [116; 116), delete: 116..116,
insert: "Foo", insert: "Foo",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
@ -402,8 +402,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Bar(…)", label: "Bar(…)",
source_range: [180; 180), source_range: 180..180,
delete: [180; 180), delete: 180..180,
insert: "Bar($0)", insert: "Bar($0)",
kind: EnumVariant, kind: EnumVariant,
lookup: "Bar", lookup: "Bar",
@ -415,8 +415,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "Foo", label: "Foo",
source_range: [180; 180), source_range: 180..180,
delete: [180; 180), delete: 180..180,
insert: "Foo", insert: "Foo",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
@ -426,8 +426,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "S(…)", label: "S(…)",
source_range: [180; 180), source_range: 180..180,
delete: [180; 180), delete: 180..180,
insert: "S($0)", insert: "S($0)",
kind: EnumVariant, kind: EnumVariant,
lookup: "S", lookup: "S",
@ -463,8 +463,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "m()", label: "m()",
source_range: [100; 100), source_range: 100..100,
delete: [100; 100), delete: 100..100,
insert: "m()$0", insert: "m()$0",
kind: Function, kind: Function,
lookup: "m", lookup: "m",
@ -499,8 +499,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "m()", label: "m()",
source_range: [105; 105), source_range: 105..105,
delete: [105; 105), delete: 105..105,
insert: "m()$0", insert: "m()$0",
kind: Method, kind: Method,
lookup: "m", lookup: "m",
@ -535,8 +535,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "C", label: "C",
source_range: [107; 107), source_range: 107..107,
delete: [107; 107), delete: 107..107,
insert: "C", insert: "C",
kind: Const, kind: Const,
detail: "const C: i32 = 42;", detail: "const C: i32 = 42;",
@ -570,8 +570,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "T", label: "T",
source_range: [101; 101), source_range: 101..101,
delete: [101; 101), delete: 101..101,
insert: "T", insert: "T",
kind: TypeAlias, kind: TypeAlias,
detail: "type T = i32;", detail: "type T = i32;",
@ -610,24 +610,24 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "PUBLIC_CONST", label: "PUBLIC_CONST",
source_range: [302; 302), source_range: 302..302,
delete: [302; 302), delete: 302..302,
insert: "PUBLIC_CONST", insert: "PUBLIC_CONST",
kind: Const, kind: Const,
detail: "pub(super) const PUBLIC_CONST: u32 = 1;", detail: "pub(super) const PUBLIC_CONST: u32 = 1;",
}, },
CompletionItem { CompletionItem {
label: "PublicType", label: "PublicType",
source_range: [302; 302), source_range: 302..302,
delete: [302; 302), delete: 302..302,
insert: "PublicType", insert: "PublicType",
kind: TypeAlias, kind: TypeAlias,
detail: "pub(super) type PublicType = u32;", detail: "pub(super) type PublicType = u32;",
}, },
CompletionItem { CompletionItem {
label: "public_method()", label: "public_method()",
source_range: [302; 302), source_range: 302..302,
delete: [302; 302), delete: 302..302,
insert: "public_method()$0", insert: "public_method()$0",
kind: Function, kind: Function,
lookup: "public_method", lookup: "public_method",
@ -659,8 +659,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "m()", label: "m()",
source_range: [100; 100), source_range: 100..100,
delete: [100; 100), delete: 100..100,
insert: "m()$0", insert: "m()$0",
kind: Function, kind: Function,
lookup: "m", lookup: "m",
@ -695,8 +695,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "m()", label: "m()",
source_range: [101; 101), source_range: 101..101,
delete: [101; 101), delete: 101..101,
insert: "m()$0", insert: "m()$0",
kind: Function, kind: Function,
lookup: "m", lookup: "m",
@ -728,8 +728,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "bar", label: "bar",
source_range: [9; 9), source_range: 9..9,
delete: [9; 9), delete: 9..9,
insert: "bar", insert: "bar",
kind: Module, kind: Module,
}, },
@ -756,8 +756,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "m()", label: "m()",
source_range: [73; 73), source_range: 73..73,
delete: [73; 73), delete: 73..73,
insert: "m()$0", insert: "m()$0",
kind: Function, kind: Function,
lookup: "m", lookup: "m",
@ -792,8 +792,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "m()", label: "m()",
source_range: [99; 99), source_range: 99..99,
delete: [99; 99), delete: 99..99,
insert: "m()$0", insert: "m()$0",
kind: Function, kind: Function,
lookup: "m", lookup: "m",
@ -828,8 +828,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "m()", label: "m()",
source_range: [110; 110), source_range: 110..110,
delete: [110; 110), delete: 110..110,
insert: "m()$0", insert: "m()$0",
kind: Function, kind: Function,
lookup: "m", lookup: "m",
@ -862,8 +862,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "bar()", label: "bar()",
source_range: [185; 185), source_range: 185..185,
delete: [185; 185), delete: 185..185,
insert: "bar()$0", insert: "bar()$0",
kind: Function, kind: Function,
lookup: "bar", lookup: "bar",
@ -871,8 +871,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "foo()", label: "foo()",
source_range: [185; 185), source_range: 185..185,
delete: [185; 185), delete: 185..185,
insert: "foo()$0", insert: "foo()$0",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -902,16 +902,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo!(…)", label: "foo!(…)",
source_range: [179; 179), source_range: 179..179,
delete: [179; 179), delete: 179..179,
insert: "foo!($0)", insert: "foo!($0)",
kind: Macro, kind: Macro,
detail: "#[macro_export]\nmacro_rules! foo", detail: "#[macro_export]\nmacro_rules! foo",
}, },
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [179; 179), source_range: 179..179,
delete: [179; 179), delete: 179..179,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -947,22 +947,22 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "RIGHT_CONST", label: "RIGHT_CONST",
source_range: [57; 57), source_range: 57..57,
delete: [57; 57), delete: 57..57,
insert: "RIGHT_CONST", insert: "RIGHT_CONST",
kind: Const, kind: Const,
}, },
CompletionItem { CompletionItem {
label: "RightType", label: "RightType",
source_range: [57; 57), source_range: 57..57,
delete: [57; 57), delete: 57..57,
insert: "RightType", insert: "RightType",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "right_fn()", label: "right_fn()",
source_range: [57; 57), source_range: 57..57,
delete: [57; 57), delete: 57..57,
insert: "right_fn()$0", insert: "right_fn()$0",
kind: Function, kind: Function,
lookup: "right_fn", lookup: "right_fn",
@ -986,8 +986,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo()", label: "foo()",
source_range: [93; 94), source_range: 93..94,
delete: [93; 94), delete: 93..94,
insert: "foo()$0", insert: "foo()$0",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -995,8 +995,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [93; 94), source_range: 93..94,
delete: [93; 94), delete: 93..94,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -1025,15 +1025,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "z", label: "z",
source_range: [57; 57), source_range: 57..57,
delete: [57; 57), delete: 57..57,
insert: "z", insert: "z",
kind: Module, kind: Module,
}, },
CompletionItem { CompletionItem {
label: "z()", label: "z()",
source_range: [57; 57), source_range: 57..57,
delete: [57; 57), delete: 57..57,
insert: "z()$0", insert: "z()$0",
kind: Function, kind: Function,
lookup: "z", lookup: "z",
@ -1064,8 +1064,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "new()", label: "new()",
source_range: [292; 292), source_range: 292..292,
delete: [292; 292), delete: 292..292,
insert: "new()$0", insert: "new()$0",
kind: Function, kind: Function,
lookup: "new", lookup: "new",

View file

@ -44,8 +44,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo", label: "foo",
source_range: [117; 118), source_range: 117..118,
delete: [117; 118), delete: 117..118,
insert: "foo", insert: "foo",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -73,16 +73,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "bar", label: "bar",
source_range: [161; 161), source_range: 161..161,
delete: [161; 161), delete: 161..161,
insert: "bar", insert: "bar",
kind: Field, kind: Field,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "foo", label: "foo",
source_range: [161; 161), source_range: 161..161,
delete: [161; 161), delete: 161..161,
insert: "foo", insert: "foo",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -109,8 +109,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo", label: "foo",
source_range: [171; 172), source_range: 171..172,
delete: [171; 172), delete: 171..172,
insert: "foo", insert: "foo",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -145,16 +145,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "bar", label: "bar",
source_range: [372; 372), source_range: 372..372,
delete: [372; 372), delete: 372..372,
insert: "bar", insert: "bar",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
CompletionItem { CompletionItem {
label: "baz", label: "baz",
source_range: [372; 372), source_range: 372..372,
delete: [372; 372), delete: 372..372,
insert: "baz", insert: "baz",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -190,8 +190,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [142; 145), source_range: 142..145,
delete: [142; 145), delete: 142..145,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -215,8 +215,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [83; 86), source_range: 83..86,
delete: [83; 86), delete: 83..86,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -241,8 +241,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "a", label: "a",
source_range: [119; 119), source_range: 119..119,
delete: [119; 119), delete: 119..119,
insert: "a", insert: "a",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -267,8 +267,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "b", label: "b",
source_range: [119; 119), source_range: 119..119,
delete: [119; 119), delete: 119..119,
insert: "b", insert: "b",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -292,8 +292,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "a", label: "a",
source_range: [93; 93), source_range: 93..93,
delete: [93; 93), delete: 93..93,
insert: "a", insert: "a",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -317,8 +317,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [137; 140), source_range: 137..140,
delete: [137; 140), delete: 137..140,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -352,16 +352,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "bar", label: "bar",
source_range: [302; 302), source_range: 302..302,
delete: [302; 302), delete: 302..302,
insert: "bar", insert: "bar",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
CompletionItem { CompletionItem {
label: "baz", label: "baz",
source_range: [302; 302), source_range: 302..302,
delete: [302; 302), delete: 302..302,
insert: "baz", insert: "baz",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -393,8 +393,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo2", label: "foo2",
source_range: [221; 221), source_range: 221..221,
delete: [221; 221), delete: 221..221,
insert: "foo2", insert: "foo2",
kind: Field, kind: Field,
detail: "u32", detail: "u32",

View file

@ -67,15 +67,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "pd", label: "pd",
source_range: [17; 17), source_range: 17..17,
delete: [17; 17), delete: 17..17,
insert: "eprintln!(\"$0 = {:?}\", $0);", insert: "eprintln!(\"$0 = {:?}\", $0);",
kind: Snippet, kind: Snippet,
}, },
CompletionItem { CompletionItem {
label: "ppd", label: "ppd",
source_range: [17; 17), source_range: 17..17,
delete: [17; 17), delete: 17..17,
insert: "eprintln!(\"$0 = {:#?}\", $0);", insert: "eprintln!(\"$0 = {:#?}\", $0);",
kind: Snippet, kind: Snippet,
}, },
@ -111,23 +111,23 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Test function", label: "Test function",
source_range: [78; 78), source_range: 78..78,
delete: [78; 78), delete: 78..78,
insert: "#[test]\nfn ${1:feature}() {\n $0\n}", insert: "#[test]\nfn ${1:feature}() {\n $0\n}",
kind: Snippet, kind: Snippet,
lookup: "tfn", lookup: "tfn",
}, },
CompletionItem { CompletionItem {
label: "macro_rules", label: "macro_rules",
source_range: [78; 78), source_range: 78..78,
delete: [78; 78), delete: 78..78,
insert: "macro_rules! $1 {\n\t($2) => {\n\t\t$0\n\t};\n}", insert: "macro_rules! $1 {\n\t($2) => {\n\t\t$0\n\t};\n}",
kind: Snippet, kind: Snippet,
}, },
CompletionItem { CompletionItem {
label: "pub(crate)", label: "pub(crate)",
source_range: [78; 78), source_range: 78..78,
delete: [78; 78), delete: 78..78,
insert: "pub(crate) $0", insert: "pub(crate) $0",
kind: Snippet, kind: Snippet,
}, },

View file

@ -141,7 +141,7 @@ fn add_function_impl(
} else { } else {
CompletionItemKind::Function CompletionItemKind::Function
}; };
let range = TextRange::from_to(fn_def_node.text_range().start(), ctx.source_range().end()); let range = TextRange::new(fn_def_node.text_range().start(), ctx.source_range().end());
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
Some(cap) => { Some(cap) => {
@ -167,7 +167,7 @@ fn add_type_alias_impl(
let snippet = format!("type {} = ", alias_name); let snippet = format!("type {} = ", alias_name);
let range = TextRange::from_to(type_def_node.text_range().start(), ctx.source_range().end()); let range = TextRange::new(type_def_node.text_range().start(), ctx.source_range().end());
CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone()) CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone())
.text_edit(TextEdit::replace(range, snippet)) .text_edit(TextEdit::replace(range, snippet))
@ -188,8 +188,7 @@ fn add_const_impl(
if let Some(const_name) = const_name { if let Some(const_name) = const_name {
let snippet = make_const_compl_syntax(&const_.source(ctx.db).value); let snippet = make_const_compl_syntax(&const_.source(ctx.db).value);
let range = let range = TextRange::new(const_def_node.text_range().start(), ctx.source_range().end());
TextRange::from_to(const_def_node.text_range().start(), ctx.source_range().end());
CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone()) CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone())
.text_edit(TextEdit::replace(range, snippet)) .text_edit(TextEdit::replace(range, snippet))
@ -216,7 +215,7 @@ fn make_const_compl_syntax(const_: &ast::ConstDef) -> String {
.map_or(const_end, |f| f.text_range().start()); .map_or(const_end, |f| f.text_range().start());
let len = end - start; let len = end - start;
let range = TextRange::from_to(0.into(), len); let range = TextRange::new(0.into(), len);
let syntax = const_.syntax().text().slice(range).to_string(); let syntax = const_.syntax().text().slice(range).to_string();
@ -254,24 +253,24 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "const TEST_CONST: u16 = ", label: "const TEST_CONST: u16 = ",
source_range: [209; 210), source_range: 209..210,
delete: [209; 210), delete: 209..210,
insert: "const TEST_CONST: u16 = ", insert: "const TEST_CONST: u16 = ",
kind: Const, kind: Const,
lookup: "TEST_CONST", lookup: "TEST_CONST",
}, },
CompletionItem { CompletionItem {
label: "fn test()", label: "fn test()",
source_range: [209; 210), source_range: 209..210,
delete: [209; 210), delete: 209..210,
insert: "fn test() {\n $0\n}", insert: "fn test() {\n $0\n}",
kind: Function, kind: Function,
lookup: "test", lookup: "test",
}, },
CompletionItem { CompletionItem {
label: "type TestType = ", label: "type TestType = ",
source_range: [209; 210), source_range: 209..210,
delete: [209; 210), delete: 209..210,
insert: "type TestType = ", insert: "type TestType = ",
kind: TypeAlias, kind: TypeAlias,
lookup: "TestType", lookup: "TestType",
@ -320,8 +319,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "fn test()", label: "fn test()",
source_range: [139; 140), source_range: 139..140,
delete: [139; 140), delete: 139..140,
insert: "fn test() {\n $0\n}", insert: "fn test() {\n $0\n}",
kind: Function, kind: Function,
lookup: "test", lookup: "test",
@ -349,8 +348,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "fn foo()", label: "fn foo()",
source_range: [141; 142), source_range: 141..142,
delete: [138; 142), delete: 138..142,
insert: "fn foo() {\n $0\n}", insert: "fn foo() {\n $0\n}",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -381,8 +380,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "fn foo_bar()", label: "fn foo_bar()",
source_range: [200; 201), source_range: 200..201,
delete: [197; 201), delete: 197..201,
insert: "fn foo_bar() {\n $0\n}", insert: "fn foo_bar() {\n $0\n}",
kind: Function, kind: Function,
lookup: "foo_bar", lookup: "foo_bar",
@ -432,8 +431,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "fn foo()", label: "fn foo()",
source_range: [144; 145), source_range: 144..145,
delete: [141; 145), delete: 141..145,
insert: "fn foo<T>() {\n $0\n}", insert: "fn foo<T>() {\n $0\n}",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -461,8 +460,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "fn foo()", label: "fn foo()",
source_range: [166; 167), source_range: 166..167,
delete: [163; 167), delete: 163..167,
insert: "fn foo<T>()\nwhere T: Into<String> {\n $0\n}", insert: "fn foo<T>()\nwhere T: Into<String> {\n $0\n}",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -488,8 +487,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "type SomeType = ", label: "type SomeType = ",
source_range: [124; 125), source_range: 124..125,
delete: [119; 125), delete: 119..125,
insert: "type SomeType = ", insert: "type SomeType = ",
kind: TypeAlias, kind: TypeAlias,
lookup: "SomeType", lookup: "SomeType",
@ -515,8 +514,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "const SOME_CONST: u16 = ", label: "const SOME_CONST: u16 = ",
source_range: [133; 134), source_range: 133..134,
delete: [127; 134), delete: 127..134,
insert: "const SOME_CONST: u16 = ", insert: "const SOME_CONST: u16 = ",
kind: Const, kind: Const,
lookup: "SOME_CONST", lookup: "SOME_CONST",
@ -542,8 +541,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "const SOME_CONST: u16 = ", label: "const SOME_CONST: u16 = ",
source_range: [138; 139), source_range: 138..139,
delete: [132; 139), delete: 132..139,
insert: "const SOME_CONST: u16 = ", insert: "const SOME_CONST: u16 = ",
kind: Const, kind: Const,
lookup: "SOME_CONST", lookup: "SOME_CONST",

View file

@ -85,8 +85,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "collections", label: "collections",
source_range: [21; 24), source_range: 21..24,
delete: [21; 24), delete: 21..24,
insert: "collections", insert: "collections",
}, },
] ]
@ -157,8 +157,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Enum", label: "Enum",
source_range: [231; 233), source_range: 231..233,
delete: [231; 233), delete: 231..233,
insert: "Enum", insert: "Enum",
kind: Enum, kind: Enum,
}, },
@ -183,8 +183,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "quux(…)", label: "quux(…)",
source_range: [91; 91), source_range: 91..91,
delete: [91; 91), delete: 91..91,
insert: "quux(${1:x})$0", insert: "quux(${1:x})$0",
kind: Function, kind: Function,
lookup: "quux", lookup: "quux",
@ -193,16 +193,16 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "x", label: "x",
source_range: [91; 91), source_range: 91..91,
delete: [91; 91), delete: 91..91,
insert: "x", insert: "x",
kind: Binding, kind: Binding,
detail: "i32", detail: "i32",
}, },
CompletionItem { CompletionItem {
label: "y", label: "y",
source_range: [91; 91), source_range: 91..91,
delete: [91; 91), delete: 91..91,
insert: "y", insert: "y",
kind: Binding, kind: Binding,
detail: "i32", detail: "i32",
@ -232,23 +232,23 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "a", label: "a",
source_range: [242; 242), source_range: 242..242,
delete: [242; 242), delete: 242..242,
insert: "a", insert: "a",
kind: Binding, kind: Binding,
}, },
CompletionItem { CompletionItem {
label: "b", label: "b",
source_range: [242; 242), source_range: 242..242,
delete: [242; 242), delete: 242..242,
insert: "b", insert: "b",
kind: Binding, kind: Binding,
detail: "i32", detail: "i32",
}, },
CompletionItem { CompletionItem {
label: "quux()", label: "quux()",
source_range: [242; 242), source_range: 242..242,
delete: [242; 242), delete: 242..242,
insert: "quux()$0", insert: "quux()$0",
kind: Function, kind: Function,
lookup: "quux", lookup: "quux",
@ -275,8 +275,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "quux()", label: "quux()",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "quux()$0", insert: "quux()$0",
kind: Function, kind: Function,
lookup: "quux", lookup: "quux",
@ -284,8 +284,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "x", label: "x",
source_range: [95; 95), source_range: 95..95,
delete: [95; 95), delete: 95..95,
insert: "x", insert: "x",
kind: Binding, kind: Binding,
}, },
@ -308,15 +308,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "T", label: "T",
source_range: [52; 52), source_range: 52..52,
delete: [52; 52), delete: 52..52,
insert: "T", insert: "T",
kind: TypeParam, kind: TypeParam,
}, },
CompletionItem { CompletionItem {
label: "quux()", label: "quux()",
source_range: [52; 52), source_range: 52..52,
delete: [52; 52), delete: 52..52,
insert: "quux()$0", insert: "quux()$0",
kind: Function, kind: Function,
lookup: "quux", lookup: "quux",
@ -341,22 +341,22 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Self", label: "Self",
source_range: [54; 54), source_range: 54..54,
delete: [54; 54), delete: 54..54,
insert: "Self", insert: "Self",
kind: TypeParam, kind: TypeParam,
}, },
CompletionItem { CompletionItem {
label: "T", label: "T",
source_range: [54; 54), source_range: 54..54,
delete: [54; 54), delete: 54..54,
insert: "T", insert: "T",
kind: TypeParam, kind: TypeParam,
}, },
CompletionItem { CompletionItem {
label: "X<…>", label: "X<…>",
source_range: [54; 54), source_range: 54..54,
delete: [54; 54), delete: 54..54,
insert: "X<$0>", insert: "X<$0>",
kind: Struct, kind: Struct,
lookup: "X", lookup: "X",
@ -380,15 +380,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Self", label: "Self",
source_range: [48; 48), source_range: 48..48,
delete: [48; 48), delete: 48..48,
insert: "Self", insert: "Self",
kind: TypeParam, kind: TypeParam,
}, },
CompletionItem { CompletionItem {
label: "X", label: "X",
source_range: [48; 48), source_range: 48..48,
delete: [48; 48), delete: 48..48,
insert: "X", insert: "X",
kind: Enum, kind: Enum,
}, },
@ -413,22 +413,22 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Baz", label: "Baz",
source_range: [105; 105), source_range: 105..105,
delete: [105; 105), delete: 105..105,
insert: "Baz", insert: "Baz",
kind: Enum, kind: Enum,
}, },
CompletionItem { CompletionItem {
label: "Foo", label: "Foo",
source_range: [105; 105), source_range: 105..105,
delete: [105; 105), delete: 105..105,
insert: "Foo", insert: "Foo",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "quux()", label: "quux()",
source_range: [105; 105), source_range: 105..105,
delete: [105; 105), delete: 105..105,
insert: "quux()$0", insert: "quux()$0",
kind: Function, kind: Function,
lookup: "quux", lookup: "quux",
@ -455,8 +455,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "other_crate", label: "other_crate",
source_range: [4; 4), source_range: 4..4,
delete: [4; 4), delete: 4..4,
insert: "other_crate", insert: "other_crate",
kind: Module, kind: Module,
}, },
@ -481,15 +481,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Bar", label: "Bar",
source_range: [117; 117), source_range: 117..117,
delete: [117; 117), delete: 117..117,
insert: "Bar", insert: "Bar",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "quux()", label: "quux()",
source_range: [117; 117), source_range: 117..117,
delete: [117; 117), delete: 117..117,
insert: "quux()$0", insert: "quux()$0",
kind: Function, kind: Function,
lookup: "quux", lookup: "quux",
@ -513,15 +513,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Foo", label: "Foo",
source_range: [55; 55), source_range: 55..55,
delete: [55; 55), delete: 55..55,
insert: "Foo", insert: "Foo",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "x()", label: "x()",
source_range: [55; 55), source_range: 55..55,
delete: [55; 55), delete: 55..55,
insert: "x()$0", insert: "x()$0",
kind: Function, kind: Function,
lookup: "x", lookup: "x",
@ -550,16 +550,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "bar", label: "bar",
source_range: [146; 146), source_range: 146..146,
delete: [146; 146), delete: 146..146,
insert: "bar", insert: "bar",
kind: Binding, kind: Binding,
detail: "i32", detail: "i32",
}, },
CompletionItem { CompletionItem {
label: "foo()", label: "foo()",
source_range: [146; 146), source_range: 146..146,
delete: [146; 146), delete: 146..146,
insert: "foo()$0", insert: "foo()$0",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -578,15 +578,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Self", label: "Self",
source_range: [25; 25), source_range: 25..25,
delete: [25; 25), delete: 25..25,
insert: "Self", insert: "Self",
kind: TypeParam, kind: TypeParam,
}, },
CompletionItem { CompletionItem {
label: "self", label: "self",
source_range: [25; 25), source_range: 25..25,
delete: [25; 25), delete: 25..25,
insert: "self", insert: "self",
kind: Binding, kind: Binding,
detail: "&{unknown}", detail: "&{unknown}",
@ -617,15 +617,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Option", label: "Option",
source_range: [18; 18), source_range: 18..18,
delete: [18; 18), delete: 18..18,
insert: "Option", insert: "Option",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "foo()", label: "foo()",
source_range: [18; 18), source_range: 18..18,
delete: [18; 18), delete: 18..18,
insert: "foo()$0", insert: "foo()$0",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -633,8 +633,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "std", label: "std",
source_range: [18; 18), source_range: 18..18,
delete: [18; 18), delete: 18..18,
insert: "std", insert: "std",
kind: Module, kind: Module,
}, },
@ -672,22 +672,22 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "String", label: "String",
source_range: [18; 18), source_range: 18..18,
delete: [18; 18), delete: 18..18,
insert: "String", insert: "String",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "core", label: "core",
source_range: [18; 18), source_range: 18..18,
delete: [18; 18), delete: 18..18,
insert: "core", insert: "core",
kind: Module, kind: Module,
}, },
CompletionItem { CompletionItem {
label: "foo()", label: "foo()",
source_range: [18; 18), source_range: 18..18,
delete: [18; 18), delete: 18..18,
insert: "foo()$0", insert: "foo()$0",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -695,8 +695,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "std", label: "std",
source_range: [18; 18), source_range: 18..18,
delete: [18; 18), delete: 18..18,
insert: "std", insert: "std",
kind: Module, kind: Module,
}, },
@ -742,46 +742,46 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "bar!(…)", label: "bar!(…)",
source_range: [252; 252), source_range: 252..252,
delete: [252; 252), delete: 252..252,
insert: "bar!($0)", insert: "bar!($0)",
kind: Macro, kind: Macro,
detail: "macro_rules! bar", detail: "macro_rules! bar",
}, },
CompletionItem { CompletionItem {
label: "baz!(…)", label: "baz!(…)",
source_range: [252; 252), source_range: 252..252,
delete: [252; 252), delete: 252..252,
insert: "baz!($0)", insert: "baz!($0)",
kind: Macro, kind: Macro,
detail: "#[macro_export]\nmacro_rules! baz", detail: "#[macro_export]\nmacro_rules! baz",
}, },
CompletionItem { CompletionItem {
label: "foo!(…)", label: "foo!(…)",
source_range: [252; 252), source_range: 252..252,
delete: [252; 252), delete: 252..252,
insert: "foo!($0)", insert: "foo!($0)",
kind: Macro, kind: Macro,
detail: "macro_rules! foo", detail: "macro_rules! foo",
}, },
CompletionItem { CompletionItem {
label: "m1", label: "m1",
source_range: [252; 252), source_range: 252..252,
delete: [252; 252), delete: 252..252,
insert: "m1", insert: "m1",
kind: Module, kind: Module,
}, },
CompletionItem { CompletionItem {
label: "m2", label: "m2",
source_range: [252; 252), source_range: 252..252,
delete: [252; 252), delete: 252..252,
insert: "m2", insert: "m2",
kind: Module, kind: Module,
}, },
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [252; 252), source_range: 252..252,
delete: [252; 252), delete: 252..252,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -811,16 +811,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo!(…)", label: "foo!(…)",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "foo!($0)", insert: "foo!($0)",
kind: Macro, kind: Macro,
detail: "macro_rules! foo", detail: "macro_rules! foo",
}, },
CompletionItem { CompletionItem {
label: "foo()", label: "foo()",
source_range: [49; 49), source_range: 49..49,
delete: [49; 49), delete: 49..49,
insert: "foo()$0", insert: "foo()$0",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -850,16 +850,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo!(…)", label: "foo!(…)",
source_range: [57; 57), source_range: 57..57,
delete: [57; 57), delete: 57..57,
insert: "foo!($0)", insert: "foo!($0)",
kind: Macro, kind: Macro,
detail: "macro_rules! foo", detail: "macro_rules! foo",
}, },
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [57; 57), source_range: 57..57,
delete: [57; 57), delete: 57..57,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -889,16 +889,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo!(…)", label: "foo!(…)",
source_range: [50; 50), source_range: 50..50,
delete: [50; 50), delete: 50..50,
insert: "foo!($0)", insert: "foo!($0)",
kind: Macro, kind: Macro,
detail: "macro_rules! foo", detail: "macro_rules! foo",
}, },
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [50; 50), source_range: 50..50,
delete: [50; 50), delete: 50..50,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -925,8 +925,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "frobnicate()", label: "frobnicate()",
source_range: [23; 24), source_range: 23..24,
delete: [23; 24), delete: 23..24,
insert: "frobnicate()$0", insert: "frobnicate()$0",
kind: Function, kind: Function,
lookup: "frobnicate", lookup: "frobnicate",
@ -934,8 +934,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [23; 24), source_range: 23..24,
delete: [23; 24), delete: 23..24,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -962,16 +962,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "m!(…)", label: "m!(…)",
source_range: [145; 145), source_range: 145..145,
delete: [145; 145), delete: 145..145,
insert: "m!($0)", insert: "m!($0)",
kind: Macro, kind: Macro,
detail: "macro_rules! m", detail: "macro_rules! m",
}, },
CompletionItem { CompletionItem {
label: "quux(…)", label: "quux(…)",
source_range: [145; 145), source_range: 145..145,
delete: [145; 145), delete: 145..145,
insert: "quux(${1:x})$0", insert: "quux(${1:x})$0",
kind: Function, kind: Function,
lookup: "quux", lookup: "quux",
@ -980,16 +980,16 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "x", label: "x",
source_range: [145; 145), source_range: 145..145,
delete: [145; 145), delete: 145..145,
insert: "x", insert: "x",
kind: Binding, kind: Binding,
detail: "i32", detail: "i32",
}, },
CompletionItem { CompletionItem {
label: "y", label: "y",
source_range: [145; 145), source_range: 145..145,
delete: [145; 145), delete: 145..145,
insert: "y", insert: "y",
kind: Binding, kind: Binding,
detail: "i32", detail: "i32",
@ -1015,16 +1015,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "m!(…)", label: "m!(…)",
source_range: [145; 146), source_range: 145..146,
delete: [145; 146), delete: 145..146,
insert: "m!($0)", insert: "m!($0)",
kind: Macro, kind: Macro,
detail: "macro_rules! m", detail: "macro_rules! m",
}, },
CompletionItem { CompletionItem {
label: "quux(…)", label: "quux(…)",
source_range: [145; 146), source_range: 145..146,
delete: [145; 146), delete: 145..146,
insert: "quux(${1:x})$0", insert: "quux(${1:x})$0",
kind: Function, kind: Function,
lookup: "quux", lookup: "quux",
@ -1033,16 +1033,16 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "x", label: "x",
source_range: [145; 146), source_range: 145..146,
delete: [145; 146), delete: 145..146,
insert: "x", insert: "x",
kind: Binding, kind: Binding,
detail: "i32", detail: "i32",
}, },
CompletionItem { CompletionItem {
label: "y", label: "y",
source_range: [145; 146), source_range: 145..146,
delete: [145; 146), delete: 145..146,
insert: "y", insert: "y",
kind: Binding, kind: Binding,
detail: "i32", detail: "i32",
@ -1068,16 +1068,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "m!(…)", label: "m!(…)",
source_range: [145; 146), source_range: 145..146,
delete: [145; 146), delete: 145..146,
insert: "m!($0)", insert: "m!($0)",
kind: Macro, kind: Macro,
detail: "macro_rules! m", detail: "macro_rules! m",
}, },
CompletionItem { CompletionItem {
label: "quux(…)", label: "quux(…)",
source_range: [145; 146), source_range: 145..146,
delete: [145; 146), delete: 145..146,
insert: "quux(${1:x})$0", insert: "quux(${1:x})$0",
kind: Function, kind: Function,
lookup: "quux", lookup: "quux",
@ -1086,16 +1086,16 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "x", label: "x",
source_range: [145; 146), source_range: 145..146,
delete: [145; 146), delete: 145..146,
insert: "x", insert: "x",
kind: Binding, kind: Binding,
detail: "i32", detail: "i32",
}, },
CompletionItem { CompletionItem {
label: "y", label: "y",
source_range: [145; 146), source_range: 145..146,
delete: [145; 146), delete: 145..146,
insert: "y", insert: "y",
kind: Binding, kind: Binding,
detail: "i32", detail: "i32",
@ -1121,14 +1121,14 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Quux", label: "Quux",
source_range: [82; 82), source_range: 82..82,
delete: [82; 82), delete: 82..82,
insert: "Quux", insert: "Quux",
}, },
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [82; 82), source_range: 82..82,
delete: [82; 82), delete: 82..82,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -1162,31 +1162,31 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Foo", label: "Foo",
source_range: [248; 250), source_range: 248..250,
delete: [248; 250), delete: 248..250,
insert: "Foo", insert: "Foo",
kind: Enum, kind: Enum,
}, },
CompletionItem { CompletionItem {
label: "Foo::Bar", label: "Foo::Bar",
source_range: [248; 250), source_range: 248..250,
delete: [248; 250), delete: 248..250,
insert: "Foo::Bar", insert: "Foo::Bar",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "Foo::Baz", label: "Foo::Baz",
source_range: [248; 250), source_range: 248..250,
delete: [248; 250), delete: 248..250,
insert: "Foo::Baz", insert: "Foo::Baz",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "Foo::Quux", label: "Foo::Quux",
source_range: [248; 250), source_range: 248..250,
delete: [248; 250), delete: 248..250,
insert: "Foo::Quux", insert: "Foo::Quux",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
@ -1220,31 +1220,31 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Foo", label: "Foo",
source_range: [219; 221), source_range: 219..221,
delete: [219; 221), delete: 219..221,
insert: "Foo", insert: "Foo",
kind: Enum, kind: Enum,
}, },
CompletionItem { CompletionItem {
label: "Foo::Bar", label: "Foo::Bar",
source_range: [219; 221), source_range: 219..221,
delete: [219; 221), delete: 219..221,
insert: "Foo::Bar", insert: "Foo::Bar",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "Foo::Baz", label: "Foo::Baz",
source_range: [219; 221), source_range: 219..221,
delete: [219; 221), delete: 219..221,
insert: "Foo::Baz", insert: "Foo::Baz",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "Foo::Quux", label: "Foo::Quux",
source_range: [219; 221), source_range: 219..221,
delete: [219; 221), delete: 219..221,
insert: "Foo::Quux", insert: "Foo::Quux",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
@ -1274,39 +1274,39 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Foo", label: "Foo",
source_range: [185; 186), source_range: 185..186,
delete: [185; 186), delete: 185..186,
insert: "Foo", insert: "Foo",
kind: Enum, kind: Enum,
}, },
CompletionItem { CompletionItem {
label: "Foo::Bar", label: "Foo::Bar",
source_range: [185; 186), source_range: 185..186,
delete: [185; 186), delete: 185..186,
insert: "Foo::Bar", insert: "Foo::Bar",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "Foo::Baz", label: "Foo::Baz",
source_range: [185; 186), source_range: 185..186,
delete: [185; 186), delete: 185..186,
insert: "Foo::Baz", insert: "Foo::Baz",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "Foo::Quux", label: "Foo::Quux",
source_range: [185; 186), source_range: 185..186,
delete: [185; 186), delete: 185..186,
insert: "Foo::Quux", insert: "Foo::Quux",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [185; 186), source_range: 185..186,
delete: [185; 186), delete: 185..186,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -1333,8 +1333,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "f()", label: "f()",
source_range: [98; 99), source_range: 98..99,
delete: [98; 99), delete: 98..99,
insert: "f()$0", insert: "f()$0",
kind: Function, kind: Function,
lookup: "f", lookup: "f",
@ -1342,15 +1342,15 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "m", label: "m",
source_range: [98; 99), source_range: 98..99,
delete: [98; 99), delete: 98..99,
insert: "m", insert: "m",
kind: Module, kind: Module,
}, },
CompletionItem { CompletionItem {
label: "m::E::V", label: "m::E::V",
source_range: [98; 99), source_range: 98..99,
delete: [98; 99), delete: 98..99,
insert: "m::E::V", insert: "m::E::V",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",

View file

@ -7,7 +7,7 @@ use ra_syntax::{
algo::{find_covering_element, find_node_at_offset}, algo::{find_covering_element, find_node_at_offset},
ast, AstNode, ast, AstNode,
SyntaxKind::*, SyntaxKind::*,
SyntaxNode, SyntaxToken, TextRange, TextUnit, SyntaxNode, SyntaxToken, TextRange, TextSize,
}; };
use ra_text_edit::AtomTextEdit; use ra_text_edit::AtomTextEdit;
@ -20,7 +20,7 @@ pub(crate) struct CompletionContext<'a> {
pub(super) sema: Semantics<'a, RootDatabase>, pub(super) sema: Semantics<'a, RootDatabase>,
pub(super) db: &'a RootDatabase, pub(super) db: &'a RootDatabase,
pub(super) config: &'a CompletionConfig, pub(super) config: &'a CompletionConfig,
pub(super) offset: TextUnit, pub(super) offset: TextSize,
/// The token before the cursor, in the original file. /// The token before the cursor, in the original file.
pub(super) original_token: SyntaxToken, pub(super) original_token: SyntaxToken,
/// The token before the cursor, in the macro-expanded file. /// The token before the cursor, in the macro-expanded file.
@ -167,7 +167,7 @@ impl<'a> CompletionContext<'a> {
match self.token.kind() { match self.token.kind() {
// workaroud when completion is triggered by trigger characters. // workaroud when completion is triggered by trigger characters.
IDENT => self.original_token.text_range(), IDENT => self.original_token.text_range(),
_ => TextRange::offset_len(self.offset, 0.into()), _ => TextRange::empty(self.offset),
} }
} }
@ -190,7 +190,7 @@ impl<'a> CompletionContext<'a> {
&mut self, &mut self,
original_file: &SyntaxNode, original_file: &SyntaxNode,
file_with_fake_ident: SyntaxNode, file_with_fake_ident: SyntaxNode,
offset: TextUnit, offset: TextSize,
) { ) {
// First, let's try to complete a reference to some declaration. // First, let's try to complete a reference to some declaration.
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&file_with_fake_ident, offset) { if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&file_with_fake_ident, offset) {
@ -224,7 +224,8 @@ impl<'a> CompletionContext<'a> {
} }
if let Some(let_stmt) = bind_pat.syntax().ancestors().find_map(ast::LetStmt::cast) { if let Some(let_stmt) = bind_pat.syntax().ancestors().find_map(ast::LetStmt::cast) {
if let Some(pat) = let_stmt.pat() { if let Some(pat) = let_stmt.pat() {
if bind_pat.syntax().text_range().is_subrange(&pat.syntax().text_range()) { if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range())
{
self.is_pat_binding_or_const = false; self.is_pat_binding_or_const = false;
} }
} }
@ -246,7 +247,7 @@ impl<'a> CompletionContext<'a> {
&mut self, &mut self,
original_file: &SyntaxNode, original_file: &SyntaxNode,
name_ref: ast::NameRef, name_ref: ast::NameRef,
offset: TextUnit, offset: TextSize,
) { ) {
self.name_ref_syntax = self.name_ref_syntax =
find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); find_node_at_offset(&original_file, name_ref.syntax().text_range().start());

View file

@ -452,8 +452,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Foo", label: "Foo",
source_range: [121; 123), source_range: 121..123,
delete: [121; 123), delete: 121..123,
insert: "Foo", insert: "Foo",
kind: EnumVariant, kind: EnumVariant,
detail: "{ x: i32, y: i32 }", detail: "{ x: i32, y: i32 }",
@ -478,8 +478,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Foo(…)", label: "Foo(…)",
source_range: [115; 117), source_range: 115..117,
delete: [115; 117), delete: 115..117,
insert: "Foo($0)", insert: "Foo($0)",
kind: EnumVariant, kind: EnumVariant,
lookup: "Foo", lookup: "Foo",
@ -506,8 +506,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Foo", label: "Foo",
source_range: [104; 106), source_range: 104..106,
delete: [104; 106), delete: 104..106,
insert: "Foo", insert: "Foo",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
@ -534,8 +534,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [203; 206), source_range: 203..206,
delete: [203; 206), delete: 203..206,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -543,8 +543,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "something_deprecated()", label: "something_deprecated()",
source_range: [203; 206), source_range: 203..206,
delete: [203; 206), delete: 203..206,
insert: "something_deprecated()$0", insert: "something_deprecated()$0",
kind: Function, kind: Function,
lookup: "something_deprecated", lookup: "something_deprecated",
@ -553,8 +553,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "something_else_deprecated()", label: "something_else_deprecated()",
source_range: [203; 206), source_range: 203..206,
delete: [203; 206), delete: 203..206,
insert: "something_else_deprecated()$0", insert: "something_else_deprecated()$0",
kind: Function, kind: Function,
lookup: "something_else_deprecated", lookup: "something_else_deprecated",
@ -580,8 +580,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [61; 64), source_range: 61..64,
delete: [61; 64), delete: 61..64,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -589,8 +589,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "no_args()", label: "no_args()",
source_range: [61; 64), source_range: 61..64,
delete: [61; 64), delete: 61..64,
insert: "no_args()$0", insert: "no_args()$0",
kind: Function, kind: Function,
lookup: "no_args", lookup: "no_args",
@ -610,8 +610,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [80; 85), source_range: 80..85,
delete: [80; 85), delete: 80..85,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -619,8 +619,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "with_args(…)", label: "with_args(…)",
source_range: [80; 85), source_range: 80..85,
delete: [80; 85), delete: 80..85,
insert: "with_args(${1:x}, ${2:y})$0", insert: "with_args(${1:x}, ${2:y})$0",
kind: Function, kind: Function,
lookup: "with_args", lookup: "with_args",
@ -646,8 +646,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo()", label: "foo()",
source_range: [163; 164), source_range: 163..164,
delete: [163; 164), delete: 163..164,
insert: "foo()$0", insert: "foo()$0",
kind: Method, kind: Method,
lookup: "foo", lookup: "foo",
@ -674,23 +674,23 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "None", label: "None",
source_range: [144; 147), source_range: 144..147,
delete: [144; 147), delete: 144..147,
insert: "None", insert: "None",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "Option", label: "Option",
source_range: [144; 147), source_range: 144..147,
delete: [144; 147), delete: 144..147,
insert: "Option", insert: "Option",
kind: Enum, kind: Enum,
}, },
CompletionItem { CompletionItem {
label: "Some(…)", label: "Some(…)",
source_range: [144; 147), source_range: 144..147,
delete: [144; 147), delete: 144..147,
insert: "Some($0)", insert: "Some($0)",
kind: EnumVariant, kind: EnumVariant,
lookup: "Some", lookup: "Some",
@ -699,8 +699,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [144; 147), source_range: 144..147,
delete: [144; 147), delete: 144..147,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -725,23 +725,23 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "None", label: "None",
source_range: [185; 188), source_range: 185..188,
delete: [185; 188), delete: 185..188,
insert: "None", insert: "None",
kind: EnumVariant, kind: EnumVariant,
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
label: "Option", label: "Option",
source_range: [185; 188), source_range: 185..188,
delete: [185; 188), delete: 185..188,
insert: "Option", insert: "Option",
kind: Enum, kind: Enum,
}, },
CompletionItem { CompletionItem {
label: "Some(…)", label: "Some(…)",
source_range: [185; 188), source_range: 185..188,
delete: [185; 188), delete: 185..188,
insert: "Some($0)", insert: "Some($0)",
kind: EnumVariant, kind: EnumVariant,
lookup: "Some", lookup: "Some",
@ -771,8 +771,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo(…)", label: "foo(…)",
source_range: [171; 172), source_range: 171..172,
delete: [171; 172), delete: 171..172,
insert: "foo(${1:x})$0", insert: "foo(${1:x})$0",
kind: Method, kind: Method,
lookup: "foo", lookup: "foo",
@ -806,8 +806,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo(…)", label: "foo(…)",
source_range: [171; 172), source_range: 171..172,
delete: [171; 172), delete: 171..172,
insert: "foo($0)", insert: "foo($0)",
kind: Method, kind: Method,
lookup: "foo", lookup: "foo",
@ -833,8 +833,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "foo", label: "foo",
source_range: [40; 41), source_range: 40..41,
delete: [40; 41), delete: 40..41,
insert: "foo", insert: "foo",
kind: Function, kind: Function,
detail: "pub fn foo()", detail: "pub fn foo()",
@ -860,16 +860,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "frobnicate", label: "frobnicate",
source_range: [35; 39), source_range: 35..39,
delete: [35; 39), delete: 35..39,
insert: "frobnicate", insert: "frobnicate",
kind: Function, kind: Function,
detail: "fn frobnicate()", detail: "fn frobnicate()",
}, },
CompletionItem { CompletionItem {
label: "main", label: "main",
source_range: [35; 39), source_range: 35..39,
delete: [35; 39), delete: 35..39,
insert: "main", insert: "main",
kind: Function, kind: Function,
detail: "fn main()", detail: "fn main()",
@ -892,8 +892,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "new", label: "new",
source_range: [67; 69), source_range: 67..69,
delete: [67; 69), delete: 67..69,
insert: "new", insert: "new",
kind: Function, kind: Function,
detail: "fn new() -> Foo", detail: "fn new() -> Foo",
@ -917,16 +917,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Vec<…>", label: "Vec<…>",
source_range: [61; 63), source_range: 61..63,
delete: [61; 63), delete: 61..63,
insert: "Vec<$0>", insert: "Vec<$0>",
kind: Struct, kind: Struct,
lookup: "Vec", lookup: "Vec",
}, },
CompletionItem { CompletionItem {
label: "foo(…)", label: "foo(…)",
source_range: [61; 63), source_range: 61..63,
delete: [61; 63), delete: 61..63,
insert: "foo(${1:xs})$0", insert: "foo(${1:xs})$0",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -947,16 +947,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Vec<…>", label: "Vec<…>",
source_range: [64; 66), source_range: 64..66,
delete: [64; 66), delete: 64..66,
insert: "Vec<$0>", insert: "Vec<$0>",
kind: TypeAlias, kind: TypeAlias,
lookup: "Vec", lookup: "Vec",
}, },
CompletionItem { CompletionItem {
label: "foo(…)", label: "foo(…)",
source_range: [64; 66), source_range: 64..66,
delete: [64; 66), delete: 64..66,
insert: "foo(${1:xs})$0", insert: "foo(${1:xs})$0",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -977,15 +977,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Vec", label: "Vec",
source_range: [68; 70), source_range: 68..70,
delete: [68; 70), delete: 68..70,
insert: "Vec", insert: "Vec",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "foo(…)", label: "foo(…)",
source_range: [68; 70), source_range: 68..70,
delete: [68; 70), delete: 68..70,
insert: "foo(${1:xs})$0", insert: "foo(${1:xs})$0",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -1006,15 +1006,15 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "Vec", label: "Vec",
source_range: [61; 63), source_range: 61..63,
delete: [61; 63), delete: 61..63,
insert: "Vec", insert: "Vec",
kind: Struct, kind: Struct,
}, },
CompletionItem { CompletionItem {
label: "foo(…)", label: "foo(…)",
source_range: [61; 63), source_range: 61..63,
delete: [61; 63), delete: 61..63,
insert: "foo(${1:xs})$0", insert: "foo(${1:xs})$0",
kind: Function, kind: Function,
lookup: "foo", lookup: "foo",
@ -1046,8 +1046,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "frobnicate!", label: "frobnicate!",
source_range: [9; 9), source_range: 9..9,
delete: [9; 9), delete: 9..9,
insert: "frobnicate", insert: "frobnicate",
kind: Macro, kind: Macro,
detail: "#[macro_export]\nmacro_rules! frobnicate", detail: "#[macro_export]\nmacro_rules! frobnicate",
@ -1072,16 +1072,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "frobnicate!", label: "frobnicate!",
source_range: [56; 60), source_range: 56..60,
delete: [56; 60), delete: 56..60,
insert: "frobnicate", insert: "frobnicate",
kind: Macro, kind: Macro,
detail: "macro_rules! frobnicate", detail: "macro_rules! frobnicate",
}, },
CompletionItem { CompletionItem {
label: "main()", label: "main()",
source_range: [56; 60), source_range: 56..60,
delete: [56; 60), delete: 56..60,
insert: "main()$0", insert: "main()$0",
kind: Function, kind: Function,
lookup: "main", lookup: "main",
@ -1109,24 +1109,24 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "another_field", label: "another_field",
source_range: [201; 201), source_range: 201..201,
delete: [201; 201), delete: 201..201,
insert: "another_field", insert: "another_field",
kind: Field, kind: Field,
detail: "i64", detail: "i64",
}, },
CompletionItem { CompletionItem {
label: "my_string", label: "my_string",
source_range: [201; 201), source_range: 201..201,
delete: [201; 201), delete: 201..201,
insert: "my_string", insert: "my_string",
kind: Field, kind: Field,
detail: "{unknown}", detail: "{unknown}",
}, },
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [201; 201), source_range: 201..201,
delete: [201; 201), delete: 201..201,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -1153,16 +1153,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "another_field", label: "another_field",
source_range: [208; 208), source_range: 208..208,
delete: [208; 208), delete: 208..208,
insert: "another_field", insert: "another_field",
kind: Field, kind: Field,
detail: "i64", detail: "i64",
}, },
CompletionItem { CompletionItem {
label: "another_good_type", label: "another_good_type",
source_range: [208; 208), source_range: 208..208,
delete: [208; 208), delete: 208..208,
insert: "another_good_type", insert: "another_good_type",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -1170,8 +1170,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [208; 208), source_range: 208..208,
delete: [208; 208), delete: 208..208,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -1201,16 +1201,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "another_field", label: "another_field",
source_range: [270; 270), source_range: 270..270,
delete: [270; 270), delete: 270..270,
insert: "another_field", insert: "another_field",
kind: Field, kind: Field,
detail: "i64", detail: "i64",
}, },
CompletionItem { CompletionItem {
label: "another_good_type", label: "another_good_type",
source_range: [270; 270), source_range: 270..270,
delete: [270; 270), delete: 270..270,
insert: "another_good_type", insert: "another_good_type",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -1218,8 +1218,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [270; 270), source_range: 270..270,
delete: [270; 270), delete: 270..270,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -1249,8 +1249,8 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "another_field", label: "another_field",
source_range: [336; 336), source_range: 336..336,
delete: [336; 336), delete: 336..336,
insert: "another_field", insert: "another_field",
kind: Field, kind: Field,
detail: "i64", detail: "i64",
@ -1258,16 +1258,16 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "another_good_type", label: "another_good_type",
source_range: [336; 336), source_range: 336..336,
delete: [336; 336), delete: 336..336,
insert: "another_good_type", insert: "another_good_type",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
}, },
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [336; 336), source_range: 336..336,
delete: [336; 336), delete: 336..336,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -1296,16 +1296,16 @@ mod tests {
[ [
CompletionItem { CompletionItem {
label: "another_field", label: "another_field",
source_range: [328; 328), source_range: 328..328,
delete: [328; 328), delete: 328..328,
insert: "another_field", insert: "another_field",
kind: Field, kind: Field,
detail: "i64", detail: "i64",
}, },
CompletionItem { CompletionItem {
label: "another_good_type", label: "another_good_type",
source_range: [328; 328), source_range: 328..328,
delete: [328; 328), delete: 328..328,
insert: "another_good_type", insert: "another_good_type",
kind: Field, kind: Field,
detail: "u32", detail: "u32",
@ -1313,8 +1313,8 @@ mod tests {
}, },
CompletionItem { CompletionItem {
label: "the_field", label: "the_field",
source_range: [328; 328), source_range: 328..328,
delete: [328; 328), delete: 328..328,
insert: "the_field", insert: "the_field",
kind: Field, kind: Field,
detail: "u32", detail: "u32",

View file

@ -171,7 +171,7 @@ fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(
if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() == T![self] { if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() == T![self] {
let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start(); let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start();
let end = use_tree_list_node.text_range().end(); let end = use_tree_list_node.text_range().end();
let range = TextRange::from_to(start, end); let range = TextRange::new(start, end);
return Some(TextEdit::delete(range)); return Some(TextEdit::delete(range));
} }
None None
@ -277,7 +277,7 @@ mod tests {
assert!( assert!(
diagnostic.range.start() <= file_position.offset diagnostic.range.start() <= file_position.offset
&& diagnostic.range.end() >= file_position.offset, && diagnostic.range.end() >= file_position.offset,
"diagnostic range {} does not touch cursor position {}", "diagnostic range {:?} does not touch cursor position {:?}",
diagnostic.range, diagnostic.range,
file_position.offset file_position.offset
); );
@ -603,7 +603,7 @@ mod tests {
[ [
Diagnostic { Diagnostic {
message: "unresolved module", message: "unresolved module",
range: [0; 8), range: 0..8,
fix: Some( fix: Some(
SourceChange { SourceChange {
label: "create module", label: "create module",
@ -652,7 +652,7 @@ mod tests {
[ [
Diagnostic { Diagnostic {
message: "Missing structure fields:\n- b", message: "Missing structure fields:\n- b",
range: [224; 233), range: 224..233,
fix: Some( fix: Some(
SourceChange { SourceChange {
label: "fill struct fields", label: "fill struct fields",
@ -664,7 +664,7 @@ mod tests {
edit: TextEdit { edit: TextEdit {
atoms: [ atoms: [
AtomTextEdit { AtomTextEdit {
delete: [3; 9), delete: 3..9,
insert: "{a:42, b: ()}", insert: "{a:42, b: ()}",
}, },
], ],

View file

@ -224,8 +224,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "Foo", label: "Foo",
navigation_range: [8; 11), navigation_range: 8..11,
node_range: [1; 26), node_range: 1..26,
kind: STRUCT_DEF, kind: STRUCT_DEF,
detail: None, detail: None,
deprecated: false, deprecated: false,
@ -235,8 +235,8 @@ fn very_obsolete() {}
0, 0,
), ),
label: "x", label: "x",
navigation_range: [18; 19), navigation_range: 18..19,
node_range: [18; 24), node_range: 18..24,
kind: RECORD_FIELD_DEF, kind: RECORD_FIELD_DEF,
detail: Some( detail: Some(
"i32", "i32",
@ -246,8 +246,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "m", label: "m",
navigation_range: [32; 33), navigation_range: 32..33,
node_range: [28; 158), node_range: 28..158,
kind: MODULE, kind: MODULE,
detail: None, detail: None,
deprecated: false, deprecated: false,
@ -257,8 +257,8 @@ fn very_obsolete() {}
2, 2,
), ),
label: "bar1", label: "bar1",
navigation_range: [43; 47), navigation_range: 43..47,
node_range: [40; 52), node_range: 40..52,
kind: FN_DEF, kind: FN_DEF,
detail: Some( detail: Some(
"fn()", "fn()",
@ -270,8 +270,8 @@ fn very_obsolete() {}
2, 2,
), ),
label: "bar2", label: "bar2",
navigation_range: [60; 64), navigation_range: 60..64,
node_range: [57; 81), node_range: 57..81,
kind: FN_DEF, kind: FN_DEF,
detail: Some( detail: Some(
"fn<T>(t: T) -> T", "fn<T>(t: T) -> T",
@ -283,8 +283,8 @@ fn very_obsolete() {}
2, 2,
), ),
label: "bar3", label: "bar3",
navigation_range: [89; 93), navigation_range: 89..93,
node_range: [86; 156), node_range: 86..156,
kind: FN_DEF, kind: FN_DEF,
detail: Some( detail: Some(
"fn<A, B>(a: A, b: B) -> Vec< u32 >", "fn<A, B>(a: A, b: B) -> Vec< u32 >",
@ -294,8 +294,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "E", label: "E",
navigation_range: [165; 166), navigation_range: 165..166,
node_range: [160; 180), node_range: 160..180,
kind: ENUM_DEF, kind: ENUM_DEF,
detail: None, detail: None,
deprecated: false, deprecated: false,
@ -305,8 +305,8 @@ fn very_obsolete() {}
6, 6,
), ),
label: "X", label: "X",
navigation_range: [169; 170), navigation_range: 169..170,
node_range: [169; 170), node_range: 169..170,
kind: ENUM_VARIANT, kind: ENUM_VARIANT,
detail: None, detail: None,
deprecated: false, deprecated: false,
@ -316,8 +316,8 @@ fn very_obsolete() {}
6, 6,
), ),
label: "Y", label: "Y",
navigation_range: [172; 173), navigation_range: 172..173,
node_range: [172; 178), node_range: 172..178,
kind: ENUM_VARIANT, kind: ENUM_VARIANT,
detail: None, detail: None,
deprecated: false, deprecated: false,
@ -325,8 +325,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "T", label: "T",
navigation_range: [186; 187), navigation_range: 186..187,
node_range: [181; 193), node_range: 181..193,
kind: TYPE_ALIAS_DEF, kind: TYPE_ALIAS_DEF,
detail: Some( detail: Some(
"()", "()",
@ -336,8 +336,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "S", label: "S",
navigation_range: [201; 202), navigation_range: 201..202,
node_range: [194; 213), node_range: 194..213,
kind: STATIC_DEF, kind: STATIC_DEF,
detail: Some( detail: Some(
"i32", "i32",
@ -347,8 +347,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "C", label: "C",
navigation_range: [220; 221), navigation_range: 220..221,
node_range: [214; 232), node_range: 214..232,
kind: CONST_DEF, kind: CONST_DEF,
detail: Some( detail: Some(
"i32", "i32",
@ -358,8 +358,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "impl E", label: "impl E",
navigation_range: [239; 240), navigation_range: 239..240,
node_range: [234; 243), node_range: 234..243,
kind: IMPL_DEF, kind: IMPL_DEF,
detail: None, detail: None,
deprecated: false, deprecated: false,
@ -367,8 +367,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "impl fmt::Debug for E", label: "impl fmt::Debug for E",
navigation_range: [265; 266), navigation_range: 265..266,
node_range: [245; 269), node_range: 245..269,
kind: IMPL_DEF, kind: IMPL_DEF,
detail: None, detail: None,
deprecated: false, deprecated: false,
@ -376,8 +376,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "mc", label: "mc",
navigation_range: [284; 286), navigation_range: 284..286,
node_range: [271; 303), node_range: 271..303,
kind: MACRO_CALL, kind: MACRO_CALL,
detail: None, detail: None,
deprecated: false, deprecated: false,
@ -385,8 +385,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "mcexp", label: "mcexp",
navigation_range: [334; 339), navigation_range: 334..339,
node_range: [305; 356), node_range: 305..356,
kind: MACRO_CALL, kind: MACRO_CALL,
detail: None, detail: None,
deprecated: false, deprecated: false,
@ -394,8 +394,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "mcexp", label: "mcexp",
navigation_range: [387; 392), navigation_range: 387..392,
node_range: [358; 409), node_range: 358..409,
kind: MACRO_CALL, kind: MACRO_CALL,
detail: None, detail: None,
deprecated: false, deprecated: false,
@ -403,8 +403,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "obsolete", label: "obsolete",
navigation_range: [428; 436), navigation_range: 428..436,
node_range: [411; 441), node_range: 411..441,
kind: FN_DEF, kind: FN_DEF,
detail: Some( detail: Some(
"fn()", "fn()",
@ -414,8 +414,8 @@ fn very_obsolete() {}
StructureNode { StructureNode {
parent: None, parent: None,
label: "very_obsolete", label: "very_obsolete",
navigation_range: [481; 494), navigation_range: 481..494,
node_range: [443; 499), node_range: 443..499,
kind: FN_DEF, kind: FN_DEF,
detail: Some( detail: Some(
"fn()", "fn()",

View file

@ -9,7 +9,7 @@ use ra_syntax::{
ast::{self, AstNode, AstToken}, ast::{self, AstNode, AstToken},
Direction, NodeOrToken, Direction, NodeOrToken,
SyntaxKind::{self, *}, SyntaxKind::{self, *},
SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, T,
}; };
use crate::FileRange; use crate::FileRange;
@ -121,10 +121,10 @@ fn extend_tokens_from_range(
let mut first_token = skip_trivia_token(first_token, Direction::Next)?; let mut first_token = skip_trivia_token(first_token, Direction::Next)?;
let mut last_token = skip_trivia_token(last_token, Direction::Prev)?; let mut last_token = skip_trivia_token(last_token, Direction::Prev)?;
while !first_token.text_range().is_subrange(&original_range) { while !original_range.contains_range(first_token.text_range()) {
first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?; first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?;
} }
while !last_token.text_range().is_subrange(&original_range) { while !original_range.contains_range(last_token.text_range()) {
last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?; last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?;
} }
@ -161,8 +161,8 @@ fn extend_tokens_from_range(
.take_while(validate) .take_while(validate)
.last()?; .last()?;
let range = first.text_range().extend_to(&last.text_range()); let range = first.text_range().cover(last.text_range());
if original_range.is_subrange(&range) && original_range != range { if range.contains_range(original_range) && original_range != range {
Some(range) Some(range)
} else { } else {
None None
@ -176,7 +176,7 @@ fn shallowest_node(node: &SyntaxNode) -> SyntaxNode {
fn extend_single_word_in_comment_or_string( fn extend_single_word_in_comment_or_string(
leaf: &SyntaxToken, leaf: &SyntaxToken,
offset: TextUnit, offset: TextSize,
) -> Option<TextRange> { ) -> Option<TextRange> {
let text: &str = leaf.text(); let text: &str = leaf.text();
let cursor_position: u32 = (offset - leaf.text_range().start()).into(); let cursor_position: u32 = (offset - leaf.text_range().start()).into();
@ -190,10 +190,10 @@ fn extend_single_word_in_comment_or_string(
let start_idx = before.rfind(non_word_char)? as u32; let start_idx = before.rfind(non_word_char)? as u32;
let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32; let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32;
let from: TextUnit = (start_idx + 1).into(); let from: TextSize = (start_idx + 1).into();
let to: TextUnit = (cursor_position + end_idx).into(); let to: TextSize = (cursor_position + end_idx).into();
let range = TextRange::from_to(from, to); let range = TextRange::new(from, to);
if range.is_empty() { if range.is_empty() {
None None
} else { } else {
@ -201,24 +201,24 @@ fn extend_single_word_in_comment_or_string(
} }
} }
fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextUnit) -> TextRange { fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange {
let ws_text = ws.text(); let ws_text = ws.text();
let suffix = TextRange::from_to(offset, ws.text_range().end()) - ws.text_range().start(); let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start();
let prefix = TextRange::from_to(ws.text_range().start(), offset) - ws.text_range().start(); let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start();
let ws_suffix = &ws_text.as_str()[suffix]; let ws_suffix = &ws_text.as_str()[suffix];
let ws_prefix = &ws_text.as_str()[prefix]; let ws_prefix = &ws_text.as_str()[prefix];
if ws_text.contains('\n') && !ws_suffix.contains('\n') { if ws_text.contains('\n') && !ws_suffix.contains('\n') {
if let Some(node) = ws.next_sibling_or_token() { if let Some(node) = ws.next_sibling_or_token() {
let start = match ws_prefix.rfind('\n') { let start = match ws_prefix.rfind('\n') {
Some(idx) => ws.text_range().start() + TextUnit::from((idx + 1) as u32), Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32),
None => node.text_range().start(), None => node.text_range().start(),
}; };
let end = if root.text().char_at(node.text_range().end()) == Some('\n') { let end = if root.text().char_at(node.text_range().end()) == Some('\n') {
node.text_range().end() + TextUnit::of_char('\n') node.text_range().end() + TextSize::of('\n')
} else { } else {
node.text_range().end() node.text_range().end()
}; };
return TextRange::from_to(start, end); return TextRange::new(start, end);
} }
} }
ws.text_range() ws.text_range()
@ -270,13 +270,10 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
.filter(|node| is_single_line_ws(node)) .filter(|node| is_single_line_ws(node))
.unwrap_or(delimiter_node); .unwrap_or(delimiter_node);
return Some(TextRange::from_to(node.text_range().start(), final_node.text_range().end())); return Some(TextRange::new(node.text_range().start(), final_node.text_range().end()));
} }
if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Prev) { if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Prev) {
return Some(TextRange::from_to( return Some(TextRange::new(delimiter_node.text_range().start(), node.text_range().end()));
delimiter_node.text_range().start(),
node.text_range().end(),
));
} }
None None
@ -286,10 +283,7 @@ fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
let prev = adj_comments(&comment, Direction::Prev); let prev = adj_comments(&comment, Direction::Prev);
let next = adj_comments(&comment, Direction::Next); let next = adj_comments(&comment, Direction::Next);
if prev != next { if prev != next {
Some(TextRange::from_to( Some(TextRange::new(prev.syntax().text_range().start(), next.syntax().text_range().end()))
prev.syntax().text_range().start(),
next.syntax().text_range().end(),
))
} else { } else {
None None
} }
@ -322,7 +316,7 @@ mod tests {
fn do_check(before: &str, afters: &[&str]) { fn do_check(before: &str, afters: &[&str]) {
let (cursor, before) = extract_offset(before); let (cursor, before) = extract_offset(before);
let (analysis, file_id) = single_file(&before); let (analysis, file_id) = single_file(&before);
let range = TextRange::offset_len(cursor, 0.into()); let range = TextRange::empty(cursor);
let mut frange = FileRange { file_id, range }; let mut frange = FileRange { file_id, range };
for &after in afters { for &after in afters {

View file

@ -141,7 +141,7 @@ fn contiguous_range_for_group_unless(
} }
if first != &last { if first != &last {
Some(TextRange::from_to(first.text_range().start(), last.text_range().end())) Some(TextRange::new(first.text_range().start(), last.text_range().end()))
} else { } else {
// The group consists of only one element, therefore it cannot be folded // The group consists of only one element, therefore it cannot be folded
None None
@ -187,10 +187,7 @@ fn contiguous_range_for_comment(
} }
if first != last { if first != last {
Some(TextRange::from_to( Some(TextRange::new(first.syntax().text_range().start(), last.syntax().text_range().end()))
first.syntax().text_range().start(),
last.syntax().text_range().end(),
))
} else { } else {
// The group consists of only one element, therefore it cannot be folded // The group consists of only one element, therefore it cannot be folded
None None

View file

@ -140,7 +140,7 @@ mod tests {
struct Foo; struct Foo;
enum E { X(Foo<|>) } enum E { X(Foo<|>) }
", ",
"Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", "Foo STRUCT_DEF FileId(1) 0..11 7..10",
"struct Foo;|Foo", "struct Foo;|Foo",
); );
} }
@ -153,7 +153,7 @@ mod tests {
struct Foo; struct Foo;
enum E { X(<|>Foo) } enum E { X(<|>Foo) }
", ",
"Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", "Foo STRUCT_DEF FileId(1) 0..11 7..10",
"struct Foo;|Foo", "struct Foo;|Foo",
); );
} }
@ -174,7 +174,7 @@ mod tests {
//- /b.rs //- /b.rs
struct Foo; struct Foo;
", ",
"Foo STRUCT_DEF FileId(2) [0; 11) [7; 10)", "Foo STRUCT_DEF FileId(2) 0..11 7..10",
"struct Foo;|Foo", "struct Foo;|Foo",
); );
} }
@ -189,7 +189,7 @@ mod tests {
//- /foo.rs //- /foo.rs
// empty // empty
", ",
"foo SOURCE_FILE FileId(2) [0; 10)", "foo SOURCE_FILE FileId(2) 0..10",
"// empty\n\n", "// empty\n\n",
); );
@ -201,7 +201,7 @@ mod tests {
//- /foo/mod.rs //- /foo/mod.rs
// empty // empty
", ",
"foo SOURCE_FILE FileId(2) [0; 10)", "foo SOURCE_FILE FileId(2) 0..10",
"// empty\n\n", "// empty\n\n",
); );
} }
@ -218,7 +218,7 @@ mod tests {
<|>foo!(); <|>foo!();
} }
", ",
"foo MACRO_CALL FileId(1) [0; 33) [13; 16)", "foo MACRO_CALL FileId(1) 0..33 13..16",
"macro_rules! foo { () => { () } }|foo", "macro_rules! foo { () => { () } }|foo",
); );
} }
@ -238,7 +238,7 @@ mod tests {
#[macro_export] #[macro_export]
macro_rules! foo { () => { () } } macro_rules! foo { () => { () } }
", ",
"foo MACRO_CALL FileId(2) [0; 49) [29; 32)", "foo MACRO_CALL FileId(2) 0..49 29..32",
"#[macro_export]\nmacro_rules! foo { () => { () } }|foo", "#[macro_export]\nmacro_rules! foo { () => { () } }|foo",
); );
} }
@ -254,7 +254,7 @@ mod tests {
#[macro_export] #[macro_export]
macro_rules! foo { () => { () } } macro_rules! foo { () => { () } }
", ",
"foo MACRO_CALL FileId(2) [0; 49) [29; 32)", "foo MACRO_CALL FileId(2) 0..49 29..32",
"#[macro_export]\nmacro_rules! foo { () => { () } }|foo", "#[macro_export]\nmacro_rules! foo { () => { () } }|foo",
); );
} }
@ -274,7 +274,7 @@ mod tests {
<|>foo(); <|>foo();
} }
", ",
"foo FN_DEF FileId(1) [64; 80) [75; 78)", "foo FN_DEF FileId(1) 64..80 75..78",
"define_fn!(foo);|foo", "define_fn!(foo);|foo",
); );
} }
@ -294,7 +294,7 @@ mod tests {
<|>foo(); <|>foo();
} }
", ",
"foo FN_DEF FileId(1) [51; 64) [51; 64)", "foo FN_DEF FileId(1) 51..64 51..64",
"define_fn!();|define_fn!();", "define_fn!();|define_fn!();",
); );
} }
@ -312,7 +312,7 @@ mod tests {
} }
} }
", ",
"foo MACRO_CALL FileId(1) [0; 28) [13; 16)", "foo MACRO_CALL FileId(1) 0..28 13..16",
"macro_rules! foo {() => {0}}|foo", "macro_rules! foo {() => {0}}|foo",
); );
} }
@ -330,7 +330,7 @@ mod tests {
} }
} }
", ",
"foo MACRO_CALL FileId(1) [0; 28) [13; 16)", "foo MACRO_CALL FileId(1) 0..28 13..16",
"macro_rules! foo {() => {0}}|foo", "macro_rules! foo {() => {0}}|foo",
); );
} }
@ -350,7 +350,7 @@ mod tests {
foo.frobnicate<|>(); foo.frobnicate<|>();
} }
", ",
"frobnicate FN_DEF FileId(1) [27; 51) [30; 40)", "frobnicate FN_DEF FileId(1) 27..51 30..40",
"fn frobnicate(&self) { }|frobnicate", "fn frobnicate(&self) { }|frobnicate",
); );
} }
@ -369,7 +369,7 @@ mod tests {
foo.spam<|>; foo.spam<|>;
} }
", ",
"spam RECORD_FIELD_DEF FileId(1) [17; 26) [17; 21)", "spam RECORD_FIELD_DEF FileId(1) 17..26 17..21",
"spam: u32|spam", "spam: u32|spam",
); );
} }
@ -390,7 +390,7 @@ mod tests {
} }
} }
", ",
"spam RECORD_FIELD_DEF FileId(1) [17; 26) [17; 21)", "spam RECORD_FIELD_DEF FileId(1) 17..26 17..21",
"spam: u32|spam", "spam: u32|spam",
); );
} }
@ -409,7 +409,7 @@ mod tests {
let Foo { spam<|>: _, } = foo let Foo { spam<|>: _, } = foo
} }
", ",
"spam RECORD_FIELD_DEF FileId(1) [17; 26) [17; 21)", "spam RECORD_FIELD_DEF FileId(1) 17..26 17..21",
"spam: u32|spam", "spam: u32|spam",
); );
} }
@ -426,7 +426,7 @@ mod tests {
Foo { spam<|>: m!() } Foo { spam<|>: m!() }
} }
", ",
"spam RECORD_FIELD_DEF FileId(1) [45; 54) [45; 49)", "spam RECORD_FIELD_DEF FileId(1) 45..54 45..49",
"spam: u32|spam", "spam: u32|spam",
); );
} }
@ -443,7 +443,7 @@ mod tests {
foo.<|>0; foo.<|>0;
} }
", ",
"TUPLE_FIELD_DEF FileId(1) [11; 14)", "TUPLE_FIELD_DEF FileId(1) 11..14",
"u32", "u32",
); );
} }
@ -462,7 +462,7 @@ mod tests {
Foo::frobnicate<|>(); Foo::frobnicate<|>();
} }
", ",
"frobnicate FN_DEF FileId(1) [27; 46) [30; 40)", "frobnicate FN_DEF FileId(1) 27..46 30..40",
"fn frobnicate() { }|frobnicate", "fn frobnicate() { }|frobnicate",
); );
} }
@ -480,7 +480,7 @@ mod tests {
Foo::frobnicate<|>(); Foo::frobnicate<|>();
} }
", ",
"frobnicate FN_DEF FileId(1) [16; 32) [19; 29)", "frobnicate FN_DEF FileId(1) 16..32 19..29",
"fn frobnicate();|frobnicate", "fn frobnicate();|frobnicate",
); );
} }
@ -500,7 +500,7 @@ mod tests {
Foo::frobnicate<|>(); Foo::frobnicate<|>();
} }
", ",
"frobnicate FN_DEF FileId(1) [30; 46) [33; 43)", "frobnicate FN_DEF FileId(1) 30..46 33..43",
"fn frobnicate();|frobnicate", "fn frobnicate();|frobnicate",
); );
} }
@ -517,7 +517,7 @@ mod tests {
} }
} }
", ",
"impl IMPL_DEF FileId(1) [12; 73)", "impl IMPL_DEF FileId(1) 12..73",
"impl Foo {...}", "impl Foo {...}",
); );
@ -531,7 +531,7 @@ mod tests {
} }
} }
", ",
"impl IMPL_DEF FileId(1) [12; 73)", "impl IMPL_DEF FileId(1) 12..73",
"impl Foo {...}", "impl Foo {...}",
); );
@ -545,7 +545,7 @@ mod tests {
} }
} }
", ",
"impl IMPL_DEF FileId(1) [15; 75)", "impl IMPL_DEF FileId(1) 15..75",
"impl Foo {...}", "impl Foo {...}",
); );
@ -558,7 +558,7 @@ mod tests {
} }
} }
", ",
"impl IMPL_DEF FileId(1) [15; 62)", "impl IMPL_DEF FileId(1) 15..62",
"impl Foo {...}", "impl Foo {...}",
); );
} }
@ -578,7 +578,7 @@ mod tests {
} }
} }
", ",
"impl IMPL_DEF FileId(1) [49; 115)", "impl IMPL_DEF FileId(1) 49..115",
"impl Make for Foo {...}", "impl Make for Foo {...}",
); );
@ -595,7 +595,7 @@ mod tests {
} }
} }
", ",
"impl IMPL_DEF FileId(1) [49; 115)", "impl IMPL_DEF FileId(1) 49..115",
"impl Make for Foo {...}", "impl Make for Foo {...}",
); );
} }
@ -607,7 +607,7 @@ mod tests {
//- /lib.rs //- /lib.rs
struct Foo<|> { value: u32 } struct Foo<|> { value: u32 }
", ",
"Foo STRUCT_DEF FileId(1) [0; 25) [7; 10)", "Foo STRUCT_DEF FileId(1) 0..25 7..10",
"struct Foo { value: u32 }|Foo", "struct Foo { value: u32 }|Foo",
); );
@ -618,7 +618,7 @@ mod tests {
field<|>: string, field<|>: string,
} }
"#, "#,
"field RECORD_FIELD_DEF FileId(1) [17; 30) [17; 22)", "field RECORD_FIELD_DEF FileId(1) 17..30 17..22",
"field: string|field", "field: string|field",
); );
@ -627,7 +627,7 @@ mod tests {
//- /lib.rs //- /lib.rs
fn foo_test<|>() { } fn foo_test<|>() { }
", ",
"foo_test FN_DEF FileId(1) [0; 17) [3; 11)", "foo_test FN_DEF FileId(1) 0..17 3..11",
"fn foo_test() { }|foo_test", "fn foo_test() { }|foo_test",
); );
@ -638,7 +638,7 @@ mod tests {
Variant, Variant,
} }
", ",
"Foo ENUM_DEF FileId(1) [0; 25) [5; 8)", "Foo ENUM_DEF FileId(1) 0..25 5..8",
"enum Foo {...}|Foo", "enum Foo {...}|Foo",
); );
@ -651,7 +651,7 @@ mod tests {
Variant3, Variant3,
} }
", ",
"Variant2 ENUM_VARIANT FileId(1) [29; 37) [29; 37)", "Variant2 ENUM_VARIANT FileId(1) 29..37 29..37",
"Variant2|Variant2", "Variant2|Variant2",
); );
@ -660,7 +660,7 @@ mod tests {
//- /lib.rs //- /lib.rs
static INNER<|>: &str = ""; static INNER<|>: &str = "";
"#, "#,
"INNER STATIC_DEF FileId(1) [0; 24) [7; 12)", "INNER STATIC_DEF FileId(1) 0..24 7..12",
"static INNER: &str = \"\";|INNER", "static INNER: &str = \"\";|INNER",
); );
@ -669,7 +669,7 @@ mod tests {
//- /lib.rs //- /lib.rs
const INNER<|>: &str = ""; const INNER<|>: &str = "";
"#, "#,
"INNER CONST_DEF FileId(1) [0; 23) [6; 11)", "INNER CONST_DEF FileId(1) 0..23 6..11",
"const INNER: &str = \"\";|INNER", "const INNER: &str = \"\";|INNER",
); );
@ -678,7 +678,7 @@ mod tests {
//- /lib.rs //- /lib.rs
type Thing<|> = Option<()>; type Thing<|> = Option<()>;
"#, "#,
"Thing TYPE_ALIAS_DEF FileId(1) [0; 24) [5; 10)", "Thing TYPE_ALIAS_DEF FileId(1) 0..24 5..10",
"type Thing = Option<()>;|Thing", "type Thing = Option<()>;|Thing",
); );
@ -687,7 +687,7 @@ mod tests {
//- /lib.rs //- /lib.rs
trait Foo<|> { } trait Foo<|> { }
"#, "#,
"Foo TRAIT_DEF FileId(1) [0; 13) [6; 9)", "Foo TRAIT_DEF FileId(1) 0..13 6..9",
"trait Foo { }|Foo", "trait Foo { }|Foo",
); );
@ -696,7 +696,7 @@ mod tests {
//- /lib.rs //- /lib.rs
mod bar<|> { } mod bar<|> { }
"#, "#,
"bar MODULE FileId(1) [0; 11) [4; 7)", "bar MODULE FileId(1) 0..11 4..7",
"mod bar { }|bar", "mod bar { }|bar",
); );
} }
@ -717,7 +717,7 @@ mod tests {
} }
mod confuse_index { fn foo(); } mod confuse_index { fn foo(); }
", ",
"foo FN_DEF FileId(1) [52; 63) [55; 58)", "foo FN_DEF FileId(1) 52..63 55..58",
"fn foo() {}|foo", "fn foo() {}|foo",
); );
} }
@ -746,7 +746,7 @@ mod tests {
format!(\"{}\", fo<|>o()) format!(\"{}\", fo<|>o())
} }
", ",
"foo FN_DEF FileId(1) [398; 415) [401; 404)", "foo FN_DEF FileId(1) 398..415 401..404",
"fn foo() -> i8 {}|foo", "fn foo() -> i8 {}|foo",
); );
} }
@ -760,7 +760,7 @@ mod tests {
t: <|>T, t: <|>T,
} }
", ",
"T TYPE_PARAM FileId(1) [11; 12)", "T TYPE_PARAM FileId(1) 11..12",
"T", "T",
); );
} }
@ -782,7 +782,7 @@ mod tests {
}); });
} }
", ",
"x BIND_PAT FileId(1) [69; 70)", "x BIND_PAT FileId(1) 69..70",
"x", "x",
); );
@ -801,7 +801,7 @@ mod tests {
}); });
} }
", ",
"y BIND_PAT FileId(1) [98; 99)", "y BIND_PAT FileId(1) 98..99",
"y", "y",
); );
} }
@ -818,7 +818,7 @@ mod tests {
} }
} }
", ",
"x BIND_PAT FileId(1) [39; 40)", "x BIND_PAT FileId(1) 39..40",
"x", "x",
); );
} }
@ -833,7 +833,7 @@ mod tests {
<|>foo!(); <|>foo!();
} }
", ",
"foo MACRO_CALL FileId(1) [15; 48) [28; 31)", "foo MACRO_CALL FileId(1) 15..48 28..31",
"macro_rules! foo { () => { () } }|foo", "macro_rules! foo { () => { () } }|foo",
); );
} }
@ -850,7 +850,7 @@ mod tests {
Foo { x<|> }; Foo { x<|> };
} }
", ",
"x BIND_PAT FileId(1) [42; 43)", "x BIND_PAT FileId(1) 42..43",
"x", "x",
) )
} }

View file

@ -68,7 +68,7 @@ mod tests {
f<|> f<|>
} }
", ",
"Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", "Foo STRUCT_DEF FileId(1) 0..11 7..10",
); );
} }
@ -83,7 +83,7 @@ mod tests {
f<|> f<|>
} }
", ",
"Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", "Foo STRUCT_DEF FileId(1) 0..11 7..10",
); );
} }
@ -102,7 +102,7 @@ mod tests {
} }
} }
", ",
"Foo STRUCT_DEF FileId(1) [52; 65) [59; 62)", "Foo STRUCT_DEF FileId(1) 52..65 59..62",
); );
} }
@ -114,7 +114,7 @@ mod tests {
struct Foo; struct Foo;
fn foo(<|>f: Foo) {} fn foo(<|>f: Foo) {}
", ",
"Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", "Foo STRUCT_DEF FileId(1) 0..11 7..10",
); );
} }
@ -130,7 +130,7 @@ mod tests {
bar.<|>0; bar.<|>0;
} }
", ",
"Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", "Foo STRUCT_DEF FileId(1) 0..11 7..10",
); );
} }
} }

View file

@ -275,7 +275,7 @@ mod tests {
", ",
); );
let hover = analysis.hover(position).unwrap().unwrap(); let hover = analysis.hover(position).unwrap().unwrap();
assert_eq!(hover.range, TextRange::from_to(95.into(), 100.into())); assert_eq!(hover.range, TextRange::new(95.into(), 100.into()));
assert_eq!(trim_markup_opt(hover.info.first()), Some("u32")); assert_eq!(trim_markup_opt(hover.info.first()), Some("u32"));
} }

View file

@ -86,7 +86,7 @@ mod tests {
struct Foo<|>; struct Foo<|>;
impl Foo {} impl Foo {}
", ",
&["impl IMPL_DEF FileId(1) [12; 23)"], &["impl IMPL_DEF FileId(1) 12..23"],
); );
} }
@ -99,7 +99,7 @@ mod tests {
impl Foo {} impl Foo {}
impl Foo {} impl Foo {}
", ",
&["impl IMPL_DEF FileId(1) [12; 23)", "impl IMPL_DEF FileId(1) [24; 35)"], &["impl IMPL_DEF FileId(1) 12..23", "impl IMPL_DEF FileId(1) 24..35"],
); );
} }
@ -116,7 +116,7 @@ mod tests {
impl super::Foo {} impl super::Foo {}
} }
", ",
&["impl IMPL_DEF FileId(1) [24; 42)", "impl IMPL_DEF FileId(1) [57; 75)"], &["impl IMPL_DEF FileId(1) 24..42", "impl IMPL_DEF FileId(1) 57..75"],
); );
} }
@ -133,7 +133,7 @@ mod tests {
//- /b.rs //- /b.rs
impl crate::Foo {} impl crate::Foo {}
", ",
&["impl IMPL_DEF FileId(2) [0; 18)", "impl IMPL_DEF FileId(3) [0; 18)"], &["impl IMPL_DEF FileId(2) 0..18", "impl IMPL_DEF FileId(3) 0..18"],
); );
} }
@ -146,7 +146,7 @@ mod tests {
struct Foo; struct Foo;
impl T for Foo {} impl T for Foo {}
", ",
&["impl IMPL_DEF FileId(1) [23; 40)"], &["impl IMPL_DEF FileId(1) 23..40"],
); );
} }
@ -164,7 +164,7 @@ mod tests {
//- /b.rs //- /b.rs
impl crate::T for crate::Foo {} impl crate::T for crate::Foo {}
", ",
&["impl IMPL_DEF FileId(2) [0; 31)", "impl IMPL_DEF FileId(3) [0; 31)"], &["impl IMPL_DEF FileId(2) 0..31", "impl IMPL_DEF FileId(3) 0..31"],
); );
} }
@ -180,9 +180,9 @@ mod tests {
impl T for &Foo {} impl T for &Foo {}
", ",
&[ &[
"impl IMPL_DEF FileId(1) [23; 34)", "impl IMPL_DEF FileId(1) 23..34",
"impl IMPL_DEF FileId(1) [35; 52)", "impl IMPL_DEF FileId(1) 35..52",
"impl IMPL_DEF FileId(1) [53; 71)", "impl IMPL_DEF FileId(1) 53..71",
], ],
); );
} }
@ -195,7 +195,7 @@ mod tests {
#[derive(Copy)] #[derive(Copy)]
struct Foo<|>; struct Foo<|>;
", ",
&["impl IMPL_DEF FileId(1) [0; 15)"], &["impl IMPL_DEF FileId(1) 0..15"],
); );
} }
} }

View file

@ -322,12 +322,12 @@ mod tests {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig{ parameter_hints: true, type_hints: false, chaining_hints: false, max_length: None}).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig{ parameter_hints: true, type_hints: false, chaining_hints: false, max_length: None}).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [106; 107), range: 106..107,
kind: ParameterHint, kind: ParameterHint,
label: "a", label: "a",
}, },
InlayHint { InlayHint {
range: [109; 110), range: 109..110,
kind: ParameterHint, kind: ParameterHint,
label: "b", label: "b",
}, },
@ -358,7 +358,7 @@ mod tests {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig{ type_hints: true, parameter_hints: false, chaining_hints: false, max_length: None}).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig{ type_hints: true, parameter_hints: false, chaining_hints: false, max_length: None}).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [97; 99), range: 97..99,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
@ -382,12 +382,12 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [69; 71), range: 69..71,
kind: TypeHint, kind: TypeHint,
label: "Test<i32>", label: "Test<i32>",
}, },
InlayHint { InlayHint {
range: [105; 111), range: 105..111,
kind: TypeHint, kind: TypeHint,
label: "&Test<i32>", label: "&Test<i32>",
}, },
@ -439,57 +439,57 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [193; 197), range: 193..197,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [236; 244), range: 236..244,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [275; 279), range: 275..279,
kind: TypeHint, kind: TypeHint,
label: "&str", label: "&str",
}, },
InlayHint { InlayHint {
range: [539; 543), range: 539..543,
kind: TypeHint, kind: TypeHint,
label: "(i32, char)", label: "(i32, char)",
}, },
InlayHint { InlayHint {
range: [566; 567), range: 566..567,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [570; 571), range: 570..571,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [573; 574), range: 573..574,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [577; 578), range: 577..578,
kind: TypeHint, kind: TypeHint,
label: "f64", label: "f64",
}, },
InlayHint { InlayHint {
range: [580; 581), range: 580..581,
kind: TypeHint, kind: TypeHint,
label: "f64", label: "f64",
}, },
InlayHint { InlayHint {
range: [584; 585), range: 584..585,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [627; 628), range: 627..628,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
@ -519,47 +519,47 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [21; 30), range: 21..30,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [57; 66), range: 57..66,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [115; 123), range: 115..123,
kind: TypeHint, kind: TypeHint,
label: "|…| -> i32", label: "|…| -> i32",
}, },
InlayHint { InlayHint {
range: [127; 128), range: 127..128,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [130; 131), range: 130..131,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [133; 134), range: 133..134,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [136; 137), range: 136..137,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [201; 213), range: 201..213,
kind: TypeHint, kind: TypeHint,
label: "&|…| -> i32", label: "&|…| -> i32",
}, },
InlayHint { InlayHint {
range: [236; 245), range: 236..245,
kind: TypeHint, kind: TypeHint,
label: "|| -> i32", label: "|| -> i32",
}, },
@ -583,12 +583,12 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [21; 30), range: 21..30,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [44; 53), range: 44..53,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
@ -633,57 +633,57 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [188; 192), range: 188..192,
kind: TypeHint, kind: TypeHint,
label: "CustomOption<Test>", label: "CustomOption<Test>",
}, },
InlayHint { InlayHint {
range: [267; 271), range: 267..271,
kind: TypeHint, kind: TypeHint,
label: "&CustomOption<Test>", label: "&CustomOption<Test>",
}, },
InlayHint { InlayHint {
range: [300; 304), range: 300..304,
kind: TypeHint, kind: TypeHint,
label: "&Test", label: "&Test",
}, },
InlayHint { InlayHint {
range: [341; 342), range: 341..342,
kind: TypeHint, kind: TypeHint,
label: "&CustomOption<u32>", label: "&CustomOption<u32>",
}, },
InlayHint { InlayHint {
range: [344; 345), range: 344..345,
kind: TypeHint, kind: TypeHint,
label: "&u8", label: "&u8",
}, },
InlayHint { InlayHint {
range: [387; 388), range: 387..388,
kind: TypeHint, kind: TypeHint,
label: "&CustomOption<u32>", label: "&CustomOption<u32>",
}, },
InlayHint { InlayHint {
range: [393; 394), range: 393..394,
kind: TypeHint, kind: TypeHint,
label: "&u8", label: "&u8",
}, },
InlayHint { InlayHint {
range: [441; 442), range: 441..442,
kind: TypeHint, kind: TypeHint,
label: "&u32", label: "&u32",
}, },
InlayHint { InlayHint {
range: [448; 449), range: 448..449,
kind: TypeHint, kind: TypeHint,
label: "&u8", label: "&u8",
}, },
InlayHint { InlayHint {
range: [500; 501), range: 500..501,
kind: TypeHint, kind: TypeHint,
label: "&u8", label: "&u8",
}, },
InlayHint { InlayHint {
range: [543; 544), range: 543..544,
kind: TypeHint, kind: TypeHint,
label: "&u8", label: "&u8",
}, },
@ -728,57 +728,57 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [188; 192), range: 188..192,
kind: TypeHint, kind: TypeHint,
label: "CustomOption<Test>", label: "CustomOption<Test>",
}, },
InlayHint { InlayHint {
range: [273; 277), range: 273..277,
kind: TypeHint, kind: TypeHint,
label: "&CustomOption<Test>", label: "&CustomOption<Test>",
}, },
InlayHint { InlayHint {
range: [309; 313), range: 309..313,
kind: TypeHint, kind: TypeHint,
label: "&Test", label: "&Test",
}, },
InlayHint { InlayHint {
range: [353; 354), range: 353..354,
kind: TypeHint, kind: TypeHint,
label: "&CustomOption<u32>", label: "&CustomOption<u32>",
}, },
InlayHint { InlayHint {
range: [356; 357), range: 356..357,
kind: TypeHint, kind: TypeHint,
label: "&u8", label: "&u8",
}, },
InlayHint { InlayHint {
range: [402; 403), range: 402..403,
kind: TypeHint, kind: TypeHint,
label: "&CustomOption<u32>", label: "&CustomOption<u32>",
}, },
InlayHint { InlayHint {
range: [408; 409), range: 408..409,
kind: TypeHint, kind: TypeHint,
label: "&u8", label: "&u8",
}, },
InlayHint { InlayHint {
range: [459; 460), range: 459..460,
kind: TypeHint, kind: TypeHint,
label: "&u32", label: "&u32",
}, },
InlayHint { InlayHint {
range: [466; 467), range: 466..467,
kind: TypeHint, kind: TypeHint,
label: "&u8", label: "&u8",
}, },
InlayHint { InlayHint {
range: [521; 522), range: 521..522,
kind: TypeHint, kind: TypeHint,
label: "&u8", label: "&u8",
}, },
InlayHint { InlayHint {
range: [567; 568), range: 567..568,
kind: TypeHint, kind: TypeHint,
label: "&u8", label: "&u8",
}, },
@ -823,52 +823,52 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [252; 256), range: 252..256,
kind: TypeHint, kind: TypeHint,
label: "CustomOption<Test>", label: "CustomOption<Test>",
}, },
InlayHint { InlayHint {
range: [277; 281), range: 277..281,
kind: TypeHint, kind: TypeHint,
label: "Test", label: "Test",
}, },
InlayHint { InlayHint {
range: [310; 311), range: 310..311,
kind: TypeHint, kind: TypeHint,
label: "CustomOption<u32>", label: "CustomOption<u32>",
}, },
InlayHint { InlayHint {
range: [313; 314), range: 313..314,
kind: TypeHint, kind: TypeHint,
label: "u8", label: "u8",
}, },
InlayHint { InlayHint {
range: [348; 349), range: 348..349,
kind: TypeHint, kind: TypeHint,
label: "CustomOption<u32>", label: "CustomOption<u32>",
}, },
InlayHint { InlayHint {
range: [354; 355), range: 354..355,
kind: TypeHint, kind: TypeHint,
label: "u8", label: "u8",
}, },
InlayHint { InlayHint {
range: [394; 395), range: 394..395,
kind: TypeHint, kind: TypeHint,
label: "u32", label: "u32",
}, },
InlayHint { InlayHint {
range: [401; 402), range: 401..402,
kind: TypeHint, kind: TypeHint,
label: "u8", label: "u8",
}, },
InlayHint { InlayHint {
range: [445; 446), range: 445..446,
kind: TypeHint, kind: TypeHint,
label: "u8", label: "u8",
}, },
InlayHint { InlayHint {
range: [480; 481), range: 480..481,
kind: TypeHint, kind: TypeHint,
label: "u8", label: "u8",
}, },
@ -895,17 +895,17 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig { max_length: Some(8), ..Default::default() }).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig { max_length: Some(8), ..Default::default() }).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [74; 75), range: 74..75,
kind: TypeHint, kind: TypeHint,
label: "Smol<u32>", label: "Smol<u32>",
}, },
InlayHint { InlayHint {
range: [98; 99), range: 98..99,
kind: TypeHint, kind: TypeHint,
label: "VeryLongOuterName<…>", label: "VeryLongOuterName<…>",
}, },
InlayHint { InlayHint {
range: [137; 138), range: 137..138,
kind: TypeHint, kind: TypeHint,
label: "Smol<Smol<…>>", label: "Smol<Smol<…>>",
}, },
@ -983,77 +983,77 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig::default()).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [798; 809), range: 798..809,
kind: TypeHint, kind: TypeHint,
label: "i32", label: "i32",
}, },
InlayHint { InlayHint {
range: [842; 843), range: 842..843,
kind: ParameterHint, kind: ParameterHint,
label: "foo", label: "foo",
}, },
InlayHint { InlayHint {
range: [845; 846), range: 845..846,
kind: ParameterHint, kind: ParameterHint,
label: "bar", label: "bar",
}, },
InlayHint { InlayHint {
range: [848; 855), range: 848..855,
kind: ParameterHint, kind: ParameterHint,
label: "msg", label: "msg",
}, },
InlayHint { InlayHint {
range: [860; 871), range: 860..871,
kind: ParameterHint, kind: ParameterHint,
label: "last", label: "last",
}, },
InlayHint { InlayHint {
range: [914; 917), range: 914..917,
kind: ParameterHint, kind: ParameterHint,
label: "param", label: "param",
}, },
InlayHint { InlayHint {
range: [937; 939), range: 937..939,
kind: ParameterHint, kind: ParameterHint,
label: "&self", label: "&self",
}, },
InlayHint { InlayHint {
range: [941; 945), range: 941..945,
kind: ParameterHint, kind: ParameterHint,
label: "param", label: "param",
}, },
InlayHint { InlayHint {
range: [980; 989), range: 980..989,
kind: ParameterHint, kind: ParameterHint,
label: "file_id", label: "file_id",
}, },
InlayHint { InlayHint {
range: [999; 1012), range: 999..1012,
kind: ParameterHint, kind: ParameterHint,
label: "name", label: "name",
}, },
InlayHint { InlayHint {
range: [1022; 1026), range: 1022..1026,
kind: ParameterHint, kind: ParameterHint,
label: "focus_range", label: "focus_range",
}, },
InlayHint { InlayHint {
range: [1036; 1048), range: 1036..1048,
kind: ParameterHint, kind: ParameterHint,
label: "full_range", label: "full_range",
}, },
InlayHint { InlayHint {
range: [1058; 1071), range: 1058..1071,
kind: ParameterHint, kind: ParameterHint,
label: "kind", label: "kind",
}, },
InlayHint { InlayHint {
range: [1081; 1085), range: 1081..1085,
kind: ParameterHint, kind: ParameterHint,
label: "docs", label: "docs",
}, },
InlayHint { InlayHint {
range: [1095; 1099), range: 1095..1099,
kind: ParameterHint, kind: ParameterHint,
label: "description", label: "description",
}, },
@ -1184,12 +1184,12 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig{ parameter_hints: false, type_hints: false, chaining_hints: true, max_length: None}).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig{ parameter_hints: false, type_hints: false, chaining_hints: true, max_length: None}).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [232; 269), range: 232..269,
kind: ChainingHint, kind: ChainingHint,
label: "B", label: "B",
}, },
InlayHint { InlayHint {
range: [232; 239), range: 232..239,
kind: ChainingHint, kind: ChainingHint,
label: "A", label: "A",
}, },
@ -1238,12 +1238,12 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig{ parameter_hints: false, type_hints: false, chaining_hints: true, max_length: None}).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig{ parameter_hints: false, type_hints: false, chaining_hints: true, max_length: None}).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [252; 323), range: 252..323,
kind: ChainingHint, kind: ChainingHint,
label: "C", label: "C",
}, },
InlayHint { InlayHint {
range: [252; 300), range: 252..300,
kind: ChainingHint, kind: ChainingHint,
label: "B", label: "B",
}, },
@ -1276,12 +1276,12 @@ fn main() {
assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig{ parameter_hints: false, type_hints: false, chaining_hints: true, max_length: None}).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsConfig{ parameter_hints: false, type_hints: false, chaining_hints: true, max_length: None}).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [403; 452), range: 403..452,
kind: ChainingHint, kind: ChainingHint,
label: "B<X<i32, bool>>", label: "B<X<i32, bool>>",
}, },
InlayHint { InlayHint {
range: [403; 422), range: 403..422,
kind: ChainingHint, kind: ChainingHint,
label: "A<X<i32, bool>>", label: "A<X<i32, bool>>",
}, },

View file

@ -7,7 +7,7 @@ use ra_syntax::{
ast::{self, AstNode, AstToken}, ast::{self, AstNode, AstToken},
Direction, NodeOrToken, SourceFile, Direction, NodeOrToken, SourceFile,
SyntaxKind::{self, WHITESPACE}, SyntaxKind::{self, WHITESPACE},
SyntaxNode, SyntaxToken, TextRange, TextUnit, T, SyntaxNode, SyntaxToken, TextRange, TextSize, T,
}; };
use ra_text_edit::{TextEdit, TextEditBuilder}; use ra_text_edit::{TextEdit, TextEditBuilder};
@ -19,7 +19,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
None => return TextEditBuilder::default().finish(), None => return TextEditBuilder::default().finish(),
Some(pos) => pos, Some(pos) => pos,
}; };
TextRange::offset_len(range.start() + pos, TextUnit::of_char('\n')) TextRange::at(range.start() + pos, TextSize::of('\n'))
} else { } else {
range range
}; };
@ -30,13 +30,13 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
}; };
let mut edit = TextEditBuilder::default(); let mut edit = TextEditBuilder::default();
for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {
let range = match range.intersection(&token.text_range()) { let range = match range.intersect(token.text_range()) {
Some(range) => range, Some(range) => range,
None => continue, None => continue,
} - token.text_range().start(); } - token.text_range().start();
let text = token.text(); let text = token.text();
for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') {
let pos: TextUnit = (pos as u32).into(); let pos: TextSize = (pos as u32).into();
let off = token.text_range().start() + range.start() + pos; let off = token.text_range().start() + range.start() + pos;
if !edit.invalidates_offset(off) { if !edit.invalidates_offset(off) {
remove_newline(&mut edit, &token, off); remove_newline(&mut edit, &token, off);
@ -47,16 +47,16 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
edit.finish() edit.finish()
} }
fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextUnit) { fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) {
if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
// The node is either the first or the last in the file // The node is either the first or the last in the file
let suff = &token.text()[TextRange::from_to( let suff = &token.text()[TextRange::new(
offset - token.text_range().start() + TextUnit::of_char('\n'), offset - token.text_range().start() + TextSize::of('\n'),
TextUnit::of_str(token.text()), TextSize::of(token.text().as_str()),
)]; )];
let spaces = suff.bytes().take_while(|&b| b == b' ').count(); let spaces = suff.bytes().take_while(|&b| b == b' ').count();
edit.replace(TextRange::offset_len(offset, ((spaces + 1) as u32).into()), " ".to_string()); edit.replace(TextRange::at(offset, ((spaces + 1) as u32).into()), " ".to_string());
return; return;
} }
@ -65,7 +65,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextU
let next = token.next_sibling_or_token().unwrap(); let next = token.next_sibling_or_token().unwrap();
if is_trailing_comma(prev.kind(), next.kind()) { if is_trailing_comma(prev.kind(), next.kind()) {
// Removes: trailing comma, newline (incl. surrounding whitespace) // Removes: trailing comma, newline (incl. surrounding whitespace)
edit.delete(TextRange::from_to(prev.text_range().start(), token.text_range().end())); edit.delete(TextRange::new(prev.text_range().start(), token.text_range().end()));
return; return;
} }
if prev.kind() == T![,] && next.kind() == T!['}'] { if prev.kind() == T![,] && next.kind() == T!['}'] {
@ -76,7 +76,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextU
" " " "
}; };
edit.replace( edit.replace(
TextRange::from_to(prev.text_range().start(), token.text_range().end()), TextRange::new(prev.text_range().start(), token.text_range().end()),
space.to_string(), space.to_string(),
); );
return; return;
@ -87,9 +87,9 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextU
next.as_token().cloned().and_then(ast::Comment::cast), next.as_token().cloned().and_then(ast::Comment::cast),
) { ) {
// Removes: newline (incl. surrounding whitespace), start of the next comment // Removes: newline (incl. surrounding whitespace), start of the next comment
edit.delete(TextRange::from_to( edit.delete(TextRange::new(
token.text_range().start(), token.text_range().start(),
next.syntax().text_range().start() + TextUnit::of_str(next.prefix()), next.syntax().text_range().start() + TextSize::of(next.prefix()),
)); ));
return; return;
} }
@ -170,7 +170,7 @@ mod tests {
fn check_join_lines(before: &str, after: &str) { fn check_join_lines(before: &str, after: &str) {
check_action(before, after, |file, offset| { check_action(before, after, |file, offset| {
let range = TextRange::offset_len(offset, 0.into()); let range = TextRange::empty(offset);
let res = join_lines(file, range); let res = join_lines(file, range);
Some(res) Some(res)
}) })
@ -420,10 +420,10 @@ fn foo() {
check_join_lines( check_join_lines(
r" r"
<|>use ra_syntax::{ <|>use ra_syntax::{
TextUnit, TextRange, TextSize, TextRange,
};", };",
r" r"
<|>use ra_syntax::{TextUnit, TextRange, <|>use ra_syntax::{TextSize, TextRange,
};", };",
); );
} }
@ -434,11 +434,11 @@ fn foo() {
check_join_lines( check_join_lines(
r" r"
use ra_syntax::{ use ra_syntax::{
<|> TextUnit, TextRange <|> TextSize, TextRange
};", };",
r" r"
use ra_syntax::{ use ra_syntax::{
<|> TextUnit, TextRange};", <|> TextSize, TextRange};",
); );
} }
@ -448,11 +448,11 @@ use ra_syntax::{
check_join_lines( check_join_lines(
r" r"
use ra_syntax::{ use ra_syntax::{
<|> TextUnit, TextRange, <|> TextSize, TextRange,
};", };",
r" r"
use ra_syntax::{ use ra_syntax::{
<|> TextUnit, TextRange};", <|> TextSize, TextRange};",
); );
} }

View file

@ -60,7 +60,7 @@ use ra_ide_db::{
symbol_index::{self, FileSymbol}, symbol_index::{self, FileSymbol},
LineIndexDatabase, LineIndexDatabase,
}; };
use ra_syntax::{SourceFile, TextRange, TextUnit}; use ra_syntax::{SourceFile, TextRange, TextSize};
use crate::display::ToNav; use crate::display::ToNav;
@ -265,7 +265,7 @@ impl Analysis {
/// Returns position of the matching brace (all types of braces are /// Returns position of the matching brace (all types of braces are
/// supported). /// supported).
pub fn matching_brace(&self, position: FilePosition) -> Cancelable<Option<TextUnit>> { pub fn matching_brace(&self, position: FilePosition) -> Cancelable<Option<TextSize>> {
self.with_db(|db| { self.with_db(|db| {
let parse = db.parse(position.file_id); let parse = db.parse(position.file_id);
let file = parse.tree(); let file = parse.tree();

View file

@ -1,8 +1,8 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use ra_syntax::{ast::AstNode, SourceFile, SyntaxKind, TextUnit, T}; use ra_syntax::{ast::AstNode, SourceFile, SyntaxKind, TextSize, T};
pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> { pub fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> {
const BRACES: &[SyntaxKind] = const BRACES: &[SyntaxKind] =
&[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]]; &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]];
let (brace_node, brace_idx) = file let (brace_node, brace_idx) = file

View file

@ -76,7 +76,7 @@ mod tests {
", ",
); );
let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); let nav = analysis.parent_module(pos).unwrap().pop().unwrap();
nav.assert_match("foo MODULE FileId(1) [0; 8)"); nav.assert_match("foo MODULE FileId(1) 0..8");
} }
#[test] #[test]
@ -95,7 +95,7 @@ mod tests {
", ",
); );
let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); let nav = analysis.parent_module(pos).unwrap().pop().unwrap();
nav.assert_match("foo MODULE FileId(1) [0; 8)"); nav.assert_match("foo MODULE FileId(1) 0..8");
} }
#[test] #[test]
@ -111,7 +111,7 @@ mod tests {
", ",
); );
let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); let nav = analysis.parent_module(pos).unwrap().pop().unwrap();
nav.assert_match("baz MODULE FileId(1) [32; 44)"); nav.assert_match("baz MODULE FileId(1) 32..44");
} }
#[test] #[test]

View file

@ -214,8 +214,8 @@ mod tests {
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(
refs, refs,
"Foo STRUCT_DEF FileId(1) [5; 39) [12; 15) Other", "Foo STRUCT_DEF FileId(1) 5..39 12..15 Other",
&["FileId(1) [138; 141) StructLiteral"], &["FileId(1) 138..141 StructLiteral"],
); );
} }
@ -231,8 +231,8 @@ mod tests {
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(
refs, refs,
"Foo STRUCT_DEF FileId(1) [5; 18) [12; 15) Other", "Foo STRUCT_DEF FileId(1) 5..18 12..15 Other",
&["FileId(1) [54; 57) Other", "FileId(1) [71; 74) StructLiteral"], &["FileId(1) 54..57 Other", "FileId(1) 71..74 StructLiteral"],
); );
} }
@ -248,8 +248,8 @@ mod tests {
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(
refs, refs,
"Foo STRUCT_DEF FileId(1) [5; 21) [12; 15) Other", "Foo STRUCT_DEF FileId(1) 5..21 12..15 Other",
&["FileId(1) [81; 84) StructLiteral"], &["FileId(1) 81..84 StructLiteral"],
); );
} }
@ -266,8 +266,8 @@ mod tests {
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(
refs, refs,
"Foo STRUCT_DEF FileId(1) [5; 21) [12; 15) Other", "Foo STRUCT_DEF FileId(1) 5..21 12..15 Other",
&["FileId(1) [71; 74) StructLiteral"], &["FileId(1) 71..74 StructLiteral"],
); );
} }
@ -289,12 +289,12 @@ mod tests {
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(
refs, refs,
"i BIND_PAT FileId(1) [33; 34) Other Write", "i BIND_PAT FileId(1) 33..34 Other Write",
&[ &[
"FileId(1) [67; 68) Other Write", "FileId(1) 67..68 Other Write",
"FileId(1) [71; 72) Other Read", "FileId(1) 71..72 Other Read",
"FileId(1) [101; 102) Other Write", "FileId(1) 101..102 Other Write",
"FileId(1) [127; 128) Other Write", "FileId(1) 127..128 Other Write",
], ],
); );
} }
@ -315,8 +315,8 @@ mod tests {
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(
refs, refs,
"spam BIND_PAT FileId(1) [44; 48) Other", "spam BIND_PAT FileId(1) 44..48 Other",
&["FileId(1) [71; 75) Other Read", "FileId(1) [78; 82) Other Read"], &["FileId(1) 71..75 Other Read", "FileId(1) 78..82 Other Read"],
); );
} }
@ -328,11 +328,7 @@ mod tests {
}"#; }"#;
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(refs, "i BIND_PAT FileId(1) 12..13 Other", &["FileId(1) 38..39 Other Read"]);
refs,
"i BIND_PAT FileId(1) [12; 13) Other",
&["FileId(1) [38; 39) Other Read"],
);
} }
#[test] #[test]
@ -343,11 +339,7 @@ mod tests {
}"#; }"#;
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(refs, "i BIND_PAT FileId(1) 12..13 Other", &["FileId(1) 38..39 Other Read"]);
refs,
"i BIND_PAT FileId(1) [12; 13) Other",
&["FileId(1) [38; 39) Other Read"],
);
} }
#[test] #[test]
@ -366,8 +358,8 @@ mod tests {
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(
refs, refs,
"spam RECORD_FIELD_DEF FileId(1) [66; 79) [70; 74) Other", "spam RECORD_FIELD_DEF FileId(1) 66..79 70..74 Other",
&["FileId(1) [152; 156) Other Read"], &["FileId(1) 152..156 Other Read"],
); );
} }
@ -382,7 +374,7 @@ mod tests {
"#; "#;
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result(refs, "f FN_DEF FileId(1) [88; 104) [91; 92) Other", &[]); check_result(refs, "f FN_DEF FileId(1) 88..104 91..92 Other", &[]);
} }
#[test] #[test]
@ -397,7 +389,7 @@ mod tests {
"#; "#;
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result(refs, "B ENUM_VARIANT FileId(1) [83; 84) [83; 84) Other", &[]); check_result(refs, "B ENUM_VARIANT FileId(1) 83..84 83..84 Other", &[]);
} }
#[test] #[test]
@ -438,8 +430,8 @@ mod tests {
let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); let refs = analysis.find_all_refs(pos, None).unwrap().unwrap();
check_result( check_result(
refs, refs,
"Foo STRUCT_DEF FileId(2) [16; 50) [27; 30) Other", "Foo STRUCT_DEF FileId(2) 16..50 27..30 Other",
&["FileId(1) [52; 55) StructLiteral", "FileId(3) [77; 80) StructLiteral"], &["FileId(1) 52..55 StructLiteral", "FileId(3) 77..80 StructLiteral"],
); );
} }
@ -466,11 +458,7 @@ mod tests {
let (analysis, pos) = analysis_and_position(code); let (analysis, pos) = analysis_and_position(code);
let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); let refs = analysis.find_all_refs(pos, None).unwrap().unwrap();
check_result( check_result(refs, "foo SOURCE_FILE FileId(2) 0..35 Other", &["FileId(1) 13..16 Other"]);
refs,
"foo SOURCE_FILE FileId(2) [0; 35) Other",
&["FileId(1) [13; 16) Other"],
);
} }
#[test] #[test]
@ -497,8 +485,8 @@ mod tests {
let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); let refs = analysis.find_all_refs(pos, None).unwrap().unwrap();
check_result( check_result(
refs, refs,
"Foo STRUCT_DEF FileId(3) [0; 41) [18; 21) Other", "Foo STRUCT_DEF FileId(3) 0..41 18..21 Other",
&["FileId(2) [20; 23) Other", "FileId(2) [46; 49) StructLiteral"], &["FileId(2) 20..23 Other", "FileId(2) 46..49 StructLiteral"],
); );
} }
@ -525,16 +513,16 @@ mod tests {
let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); let refs = analysis.find_all_refs(pos, None).unwrap().unwrap();
check_result( check_result(
refs, refs,
"quux FN_DEF FileId(1) [18; 34) [25; 29) Other", "quux FN_DEF FileId(1) 18..34 25..29 Other",
&["FileId(2) [16; 20) StructLiteral", "FileId(3) [16; 20) StructLiteral"], &["FileId(2) 16..20 StructLiteral", "FileId(3) 16..20 StructLiteral"],
); );
let refs = let refs =
analysis.find_all_refs(pos, Some(SearchScope::single_file(bar))).unwrap().unwrap(); analysis.find_all_refs(pos, Some(SearchScope::single_file(bar))).unwrap().unwrap();
check_result( check_result(
refs, refs,
"quux FN_DEF FileId(1) [18; 34) [25; 29) Other", "quux FN_DEF FileId(1) 18..34 25..29 Other",
&["FileId(3) [16; 20) StructLiteral"], &["FileId(3) 16..20 StructLiteral"],
); );
} }
@ -552,8 +540,8 @@ mod tests {
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(
refs, refs,
"m1 MACRO_CALL FileId(1) [9; 63) [46; 48) Other", "m1 MACRO_CALL FileId(1) 9..63 46..48 Other",
&["FileId(1) [96; 98) StructLiteral", "FileId(1) [114; 116) StructLiteral"], &["FileId(1) 96..98 StructLiteral", "FileId(1) 114..116 StructLiteral"],
); );
} }
@ -568,8 +556,8 @@ mod tests {
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(
refs, refs,
"i BIND_PAT FileId(1) [40; 41) Other Write", "i BIND_PAT FileId(1) 40..41 Other Write",
&["FileId(1) [59; 60) Other Write", "FileId(1) [63; 64) Other Read"], &["FileId(1) 59..60 Other Write", "FileId(1) 63..64 Other Read"],
); );
} }
@ -588,8 +576,8 @@ mod tests {
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(
refs, refs,
"f RECORD_FIELD_DEF FileId(1) [32; 38) [32; 33) Other", "f RECORD_FIELD_DEF FileId(1) 32..38 32..33 Other",
&["FileId(1) [96; 97) Other Read", "FileId(1) [117; 118) Other Write"], &["FileId(1) 96..97 Other Read", "FileId(1) 117..118 Other Write"],
); );
} }
@ -602,11 +590,7 @@ mod tests {
}"#; }"#;
let refs = get_all_refs(code); let refs = get_all_refs(code);
check_result( check_result(refs, "i BIND_PAT FileId(1) 36..37 Other", &["FileId(1) 51..52 Other Write"]);
refs,
"i BIND_PAT FileId(1) [36; 37) Other",
&["FileId(1) [51; 52) Other Write"],
);
} }
fn get_all_refs(text: &str) -> ReferenceSearchResult { fn get_all_refs(text: &str) -> ReferenceSearchResult {

View file

@ -54,15 +54,12 @@ fn source_edit_from_reference(reference: Reference, new_name: &str) -> SourceFil
ReferenceKind::StructFieldShorthandForField => { ReferenceKind::StructFieldShorthandForField => {
replacement_text.push_str(new_name); replacement_text.push_str(new_name);
replacement_text.push_str(": "); replacement_text.push_str(": ");
TextRange::from_to( TextRange::new(reference.file_range.range.start(), reference.file_range.range.start())
reference.file_range.range.start(),
reference.file_range.range.start(),
)
} }
ReferenceKind::StructFieldShorthandForLocal => { ReferenceKind::StructFieldShorthandForLocal => {
replacement_text.push_str(": "); replacement_text.push_str(": ");
replacement_text.push_str(new_name); replacement_text.push_str(new_name);
TextRange::from_to(reference.file_range.range.end(), reference.file_range.range.end()) TextRange::new(reference.file_range.range.end(), reference.file_range.range.end())
} }
_ => { _ => {
replacement_text.push_str(new_name); replacement_text.push_str(new_name);
@ -528,7 +525,7 @@ mod tests {
@r###" @r###"
Some( Some(
RangeInfo { RangeInfo {
range: [4; 7), range: 4..7,
info: SourceChange { info: SourceChange {
label: "rename", label: "rename",
source_file_edits: [ source_file_edits: [
@ -539,7 +536,7 @@ mod tests {
edit: TextEdit { edit: TextEdit {
atoms: [ atoms: [
AtomTextEdit { AtomTextEdit {
delete: [4; 7), delete: 4..7,
insert: "foo2", insert: "foo2",
}, },
], ],
@ -580,7 +577,7 @@ mod tests {
@r###" @r###"
Some( Some(
RangeInfo { RangeInfo {
range: [4; 7), range: 4..7,
info: SourceChange { info: SourceChange {
label: "rename", label: "rename",
source_file_edits: [ source_file_edits: [
@ -591,7 +588,7 @@ mod tests {
edit: TextEdit { edit: TextEdit {
atoms: [ atoms: [
AtomTextEdit { AtomTextEdit {
delete: [4; 7), delete: 4..7,
insert: "foo2", insert: "foo2",
}, },
], ],
@ -663,7 +660,7 @@ mod tests {
@r###" @r###"
Some( Some(
RangeInfo { RangeInfo {
range: [8; 11), range: 8..11,
info: SourceChange { info: SourceChange {
label: "rename", label: "rename",
source_file_edits: [ source_file_edits: [
@ -674,7 +671,7 @@ mod tests {
edit: TextEdit { edit: TextEdit {
atoms: [ atoms: [
AtomTextEdit { AtomTextEdit {
delete: [8; 11), delete: 8..11,
insert: "foo2", insert: "foo2",
}, },
], ],
@ -687,7 +684,7 @@ mod tests {
edit: TextEdit { edit: TextEdit {
atoms: [ atoms: [
AtomTextEdit { AtomTextEdit {
delete: [27; 30), delete: 27..30,
insert: "foo2", insert: "foo2",
}, },
], ],

View file

@ -164,11 +164,11 @@ mod tests {
@r###" @r###"
[ [
Runnable { Runnable {
range: [1; 21), range: 1..21,
kind: Bin, kind: Bin,
}, },
Runnable { Runnable {
range: [22; 46), range: 22..46,
kind: Test { kind: Test {
test_id: Path( test_id: Path(
"test_foo", "test_foo",
@ -179,7 +179,7 @@ mod tests {
}, },
}, },
Runnable { Runnable {
range: [47; 81), range: 47..81,
kind: Test { kind: Test {
test_id: Path( test_id: Path(
"test_foo", "test_foo",
@ -211,13 +211,13 @@ mod tests {
@r###" @r###"
[ [
Runnable { Runnable {
range: [1; 59), range: 1..59,
kind: TestMod { kind: TestMod {
path: "test_mod", path: "test_mod",
}, },
}, },
Runnable { Runnable {
range: [28; 57), range: 28..57,
kind: Test { kind: Test {
test_id: Path( test_id: Path(
"test_mod::test_foo1", "test_mod::test_foo1",
@ -251,13 +251,13 @@ mod tests {
@r###" @r###"
[ [
Runnable { Runnable {
range: [23; 85), range: 23..85,
kind: TestMod { kind: TestMod {
path: "foo::test_mod", path: "foo::test_mod",
}, },
}, },
Runnable { Runnable {
range: [46; 79), range: 46..79,
kind: Test { kind: Test {
test_id: Path( test_id: Path(
"foo::test_mod::test_foo1", "foo::test_mod::test_foo1",
@ -293,13 +293,13 @@ mod tests {
@r###" @r###"
[ [
Runnable { Runnable {
range: [41; 115), range: 41..115,
kind: TestMod { kind: TestMod {
path: "foo::bar::test_mod", path: "foo::bar::test_mod",
}, },
}, },
Runnable { Runnable {
range: [68; 105), range: 68..105,
kind: Test { kind: Test {
test_id: Path( test_id: Path(
"foo::bar::test_mod::test_foo1", "foo::bar::test_mod::test_foo1",

View file

@ -6,7 +6,7 @@
use ra_db::RelativePathBuf; use ra_db::RelativePathBuf;
use ra_text_edit::TextEdit; use ra_text_edit::TextEdit;
use crate::{FileId, FilePosition, SourceRootId, TextUnit}; use crate::{FileId, FilePosition, SourceRootId, TextSize};
#[derive(Debug)] #[derive(Debug)]
pub struct SourceChange { pub struct SourceChange {
@ -104,7 +104,7 @@ pub enum FileSystemEdit {
pub(crate) struct SingleFileChange { pub(crate) struct SingleFileChange {
pub label: String, pub label: String,
pub edit: TextEdit, pub edit: TextEdit,
pub cursor_position: Option<TextUnit>, pub cursor_position: Option<TextSize>,
} }
impl SingleFileChange { impl SingleFileChange {

View file

@ -61,16 +61,16 @@ impl HighlightedRangeStack {
let prev = self.stack.last_mut().unwrap(); let prev = self.stack.last_mut().unwrap();
let needs_flattening = !children.is_empty() let needs_flattening = !children.is_empty()
&& !prev.is_empty() && !prev.is_empty()
&& children.first().unwrap().range.is_subrange(&prev.last().unwrap().range); && prev.last().unwrap().range.contains_range(children.first().unwrap().range);
if !needs_flattening { if !needs_flattening {
prev.extend(children); prev.extend(children);
} else { } else {
let mut parent = prev.pop().unwrap(); let mut parent = prev.pop().unwrap();
for ele in children { for ele in children {
assert!(ele.range.is_subrange(&parent.range)); assert!(parent.range.contains_range(ele.range));
let mut cloned = parent.clone(); let mut cloned = parent.clone();
parent.range = TextRange::from_to(parent.range.start(), ele.range.start()); parent.range = TextRange::new(parent.range.start(), ele.range.start());
cloned.range = TextRange::from_to(ele.range.end(), cloned.range.end()); cloned.range = TextRange::new(ele.range.end(), cloned.range.end());
if !parent.range.is_empty() { if !parent.range.is_empty() {
prev.push(parent); prev.push(parent);
} }
@ -152,7 +152,7 @@ pub(crate) fn highlight(
}; };
// Element outside of the viewport, no need to highlight // Element outside of the viewport, no need to highlight
if range_to_highlight.intersection(&event_range).is_none() { if range_to_highlight.intersect(event_range).is_none() {
continue; continue;
} }
@ -309,7 +309,7 @@ fn macro_call_range(macro_call: &ast::MacroCall) -> Option<TextRange> {
} }
} }
Some(TextRange::from_to(range_start, range_end)) Some(TextRange::new(range_start, range_end))
} }
fn highlight_element( fn highlight_element(

View file

@ -1,11 +1,9 @@
//! Renders a bit of code as HTML. //! Renders a bit of code as HTML.
use ra_db::SourceDatabase; use ra_db::SourceDatabase;
use ra_syntax::{AstNode, TextUnit}; use ra_syntax::{AstNode, TextRange, TextSize};
use crate::{FileId, RootDatabase}; use crate::{syntax_highlighting::highlight, FileId, RootDatabase};
use super::highlight;
pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
let parse = db.parse(file_id); let parse = db.parse(file_id);
@ -23,17 +21,17 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
let ranges = highlight(db, file_id, None); let ranges = highlight(db, file_id, None);
let text = parse.tree().syntax().to_string(); let text = parse.tree().syntax().to_string();
let mut prev_pos = TextUnit::from(0); let mut prev_pos = TextSize::from(0);
let mut buf = String::new(); let mut buf = String::new();
buf.push_str(&STYLE); buf.push_str(&STYLE);
buf.push_str("<pre><code>"); buf.push_str("<pre><code>");
for range in &ranges { for range in &ranges {
if range.range.start() > prev_pos { if range.range.start() > prev_pos {
let curr = &text[prev_pos.to_usize()..range.range.start().to_usize()]; let curr = &text[TextRange::new(prev_pos, range.range.start())];
let text = html_escape(curr); let text = html_escape(curr);
buf.push_str(&text); buf.push_str(&text);
} }
let curr = &text[range.range.start().to_usize()..range.range.end().to_usize()]; let curr = &text[TextRange::new(range.range.start(), range.range.end())];
let class = range.highlight.to_string().replace('.', " "); let class = range.highlight.to_string().replace('.', " ");
let color = match (rainbow, range.binding_hash) { let color = match (rainbow, range.binding_hash) {
@ -47,7 +45,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
prev_pos = range.range.end(); prev_pos = range.range.end();
} }
// Add the remaining (non-highlighted) text // Add the remaining (non-highlighted) text
let curr = &text[prev_pos.to_usize()..]; let curr = &text[TextRange::new(prev_pos, TextSize::of(&text))];
let text = html_escape(curr); let text = html_escape(curr);
buf.push_str(&text); buf.push_str(&text);
buf.push_str("</code></pre>"); buf.push_str("</code></pre>");

View file

@ -126,7 +126,7 @@ fn test_ranges() {
// The "x" // The "x"
let highlights = &analysis let highlights = &analysis
.highlight_range(FileRange { file_id, range: TextRange::offset_len(82.into(), 1.into()) }) .highlight_range(FileRange { file_id, range: TextRange::at(82.into(), 1.into()) })
.unwrap(); .unwrap();
assert_eq!(&highlights[0].highlight.to_string(), "field.declaration"); assert_eq!(&highlights[0].highlight.to_string(), "field.declaration");

View file

@ -5,7 +5,7 @@ use ra_ide_db::RootDatabase;
use ra_syntax::{ use ra_syntax::{
algo, AstNode, NodeOrToken, SourceFile, algo, AstNode, NodeOrToken, SourceFile,
SyntaxKind::{RAW_STRING, STRING}, SyntaxKind::{RAW_STRING, STRING},
SyntaxToken, TextRange, TextUnit, SyntaxToken, TextRange, TextSize,
}; };
pub use ra_db::FileId; pub use ra_db::FileId;
@ -66,13 +66,10 @@ fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<St
let len = len.min(node_len); let len = len.min(node_len);
// Ensure our slice is inside the actual string // Ensure our slice is inside the actual string
let end = if start + len < TextUnit::of_str(&text) { let end =
start + len if start + len < TextSize::of(&text) { start + len } else { TextSize::of(&text) - start };
} else {
TextUnit::of_str(&text) - start
};
let text = &text[TextRange::from_to(start, end)]; let text = &text[TextRange::new(start, end)];
// Remove possible extra string quotes from the start // Remove possible extra string quotes from the start
// and the end of the string // and the end of the string
@ -112,20 +109,20 @@ mod tests {
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
SOURCE_FILE@[0; 11) SOURCE_FILE@0..11
FN_DEF@[0; 11) FN_DEF@0..11
FN_KW@[0; 2) "fn" FN_KW@0..2 "fn"
WHITESPACE@[2; 3) " " WHITESPACE@2..3 " "
NAME@[3; 6) NAME@3..6
IDENT@[3; 6) "foo" IDENT@3..6 "foo"
PARAM_LIST@[6; 8) PARAM_LIST@6..8
L_PAREN@[6; 7) "(" L_PAREN@6..7 "("
R_PAREN@[7; 8) ")" R_PAREN@7..8 ")"
WHITESPACE@[8; 9) " " WHITESPACE@8..9 " "
BLOCK_EXPR@[9; 11) BLOCK_EXPR@9..11
BLOCK@[9; 11) BLOCK@9..11
L_CURLY@[9; 10) "{" L_CURLY@9..10 "{"
R_CURLY@[10; 11) "}" R_CURLY@10..11 "}"
"# "#
.trim() .trim()
); );
@ -145,37 +142,37 @@ fn test() {
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
SOURCE_FILE@[0; 60) SOURCE_FILE@0..60
FN_DEF@[0; 60) FN_DEF@0..60
FN_KW@[0; 2) "fn" FN_KW@0..2 "fn"
WHITESPACE@[2; 3) " " WHITESPACE@2..3 " "
NAME@[3; 7) NAME@3..7
IDENT@[3; 7) "test" IDENT@3..7 "test"
PARAM_LIST@[7; 9) PARAM_LIST@7..9
L_PAREN@[7; 8) "(" L_PAREN@7..8 "("
R_PAREN@[8; 9) ")" R_PAREN@8..9 ")"
WHITESPACE@[9; 10) " " WHITESPACE@9..10 " "
BLOCK_EXPR@[10; 60) BLOCK_EXPR@10..60
BLOCK@[10; 60) BLOCK@10..60
L_CURLY@[10; 11) "{" L_CURLY@10..11 "{"
WHITESPACE@[11; 16) "\n " WHITESPACE@11..16 "\n "
EXPR_STMT@[16; 58) EXPR_STMT@16..58
MACRO_CALL@[16; 57) MACRO_CALL@16..57
PATH@[16; 22) PATH@16..22
PATH_SEGMENT@[16; 22) PATH_SEGMENT@16..22
NAME_REF@[16; 22) NAME_REF@16..22
IDENT@[16; 22) "assert" IDENT@16..22 "assert"
BANG@[22; 23) "!" BANG@22..23 "!"
TOKEN_TREE@[23; 57) TOKEN_TREE@23..57
L_PAREN@[23; 24) "(" L_PAREN@23..24 "("
STRING@[24; 52) "\"\n fn foo() {\n ..." STRING@24..52 "\"\n fn foo() {\n ..."
COMMA@[52; 53) "," COMMA@52..53 ","
WHITESPACE@[53; 54) " " WHITESPACE@53..54 " "
STRING@[54; 56) "\"\"" STRING@54..56 "\"\""
R_PAREN@[56; 57) ")" R_PAREN@56..57 ")"
SEMICOLON@[57; 58) ";" SEMICOLON@57..58 ";"
WHITESPACE@[58; 59) "\n" WHITESPACE@58..59 "\n"
R_CURLY@[59; 60) "}" R_CURLY@59..60 "}"
"# "#
.trim() .trim()
); );
@ -189,19 +186,19 @@ SOURCE_FILE@[0; 60)
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
FN_DEF@[0; 11) FN_DEF@0..11
FN_KW@[0; 2) "fn" FN_KW@0..2 "fn"
WHITESPACE@[2; 3) " " WHITESPACE@2..3 " "
NAME@[3; 6) NAME@3..6
IDENT@[3; 6) "foo" IDENT@3..6 "foo"
PARAM_LIST@[6; 8) PARAM_LIST@6..8
L_PAREN@[6; 7) "(" L_PAREN@6..7 "("
R_PAREN@[7; 8) ")" R_PAREN@7..8 ")"
WHITESPACE@[8; 9) " " WHITESPACE@8..9 " "
BLOCK_EXPR@[9; 11) BLOCK_EXPR@9..11
BLOCK@[9; 11) BLOCK@9..11
L_CURLY@[9; 10) "{" L_CURLY@9..10 "{"
R_CURLY@[10; 11) "}" R_CURLY@10..11 "}"
"# "#
.trim() .trim()
); );
@ -220,21 +217,21 @@ FN_DEF@[0; 11)
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
EXPR_STMT@[16; 58) EXPR_STMT@16..58
MACRO_CALL@[16; 57) MACRO_CALL@16..57
PATH@[16; 22) PATH@16..22
PATH_SEGMENT@[16; 22) PATH_SEGMENT@16..22
NAME_REF@[16; 22) NAME_REF@16..22
IDENT@[16; 22) "assert" IDENT@16..22 "assert"
BANG@[22; 23) "!" BANG@22..23 "!"
TOKEN_TREE@[23; 57) TOKEN_TREE@23..57
L_PAREN@[23; 24) "(" L_PAREN@23..24 "("
STRING@[24; 52) "\"\n fn foo() {\n ..." STRING@24..52 "\"\n fn foo() {\n ..."
COMMA@[52; 53) "," COMMA@52..53 ","
WHITESPACE@[53; 54) " " WHITESPACE@53..54 " "
STRING@[54; 56) "\"\"" STRING@54..56 "\"\""
R_PAREN@[56; 57) ")" R_PAREN@56..57 ")"
SEMICOLON@[57; 58) ";" SEMICOLON@57..58 ";"
"# "#
.trim() .trim()
); );
@ -257,21 +254,21 @@ fn bar() {
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
SOURCE_FILE@[0; 12) SOURCE_FILE@0..12
FN_DEF@[0; 12) FN_DEF@0..12
FN_KW@[0; 2) "fn" FN_KW@0..2 "fn"
WHITESPACE@[2; 3) " " WHITESPACE@2..3 " "
NAME@[3; 6) NAME@3..6
IDENT@[3; 6) "foo" IDENT@3..6 "foo"
PARAM_LIST@[6; 8) PARAM_LIST@6..8
L_PAREN@[6; 7) "(" L_PAREN@6..7 "("
R_PAREN@[7; 8) ")" R_PAREN@7..8 ")"
WHITESPACE@[8; 9) " " WHITESPACE@8..9 " "
BLOCK_EXPR@[9; 12) BLOCK_EXPR@9..12
BLOCK@[9; 12) BLOCK@9..12
L_CURLY@[9; 10) "{" L_CURLY@9..10 "{"
WHITESPACE@[10; 11) "\n" WHITESPACE@10..11 "\n"
R_CURLY@[11; 12) "}" R_CURLY@11..12 "}"
"# "#
.trim() .trim()
); );
@ -292,21 +289,21 @@ fn bar() {
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
SOURCE_FILE@[0; 12) SOURCE_FILE@0..12
FN_DEF@[0; 12) FN_DEF@0..12
FN_KW@[0; 2) "fn" FN_KW@0..2 "fn"
WHITESPACE@[2; 3) " " WHITESPACE@2..3 " "
NAME@[3; 6) NAME@3..6
IDENT@[3; 6) "foo" IDENT@3..6 "foo"
PARAM_LIST@[6; 8) PARAM_LIST@6..8
L_PAREN@[6; 7) "(" L_PAREN@6..7 "("
R_PAREN@[7; 8) ")" R_PAREN@7..8 ")"
WHITESPACE@[8; 9) " " WHITESPACE@8..9 " "
BLOCK_EXPR@[9; 12) BLOCK_EXPR@9..12
BLOCK@[9; 12) BLOCK@9..12
L_CURLY@[9; 10) "{" L_CURLY@9..10 "{"
WHITESPACE@[10; 11) "\n" WHITESPACE@10..11 "\n"
R_CURLY@[11; 12) "}" R_CURLY@11..12 "}"
"# "#
.trim() .trim()
); );
@ -326,36 +323,36 @@ fn bar() {
assert_eq_text!( assert_eq_text!(
syn.trim(), syn.trim(),
r#" r#"
SOURCE_FILE@[0; 25) SOURCE_FILE@0..25
FN_DEF@[0; 12) FN_DEF@0..12
FN_KW@[0; 2) "fn" FN_KW@0..2 "fn"
WHITESPACE@[2; 3) " " WHITESPACE@2..3 " "
NAME@[3; 6) NAME@3..6
IDENT@[3; 6) "foo" IDENT@3..6 "foo"
PARAM_LIST@[6; 8) PARAM_LIST@6..8
L_PAREN@[6; 7) "(" L_PAREN@6..7 "("
R_PAREN@[7; 8) ")" R_PAREN@7..8 ")"
WHITESPACE@[8; 9) " " WHITESPACE@8..9 " "
BLOCK_EXPR@[9; 12) BLOCK_EXPR@9..12
BLOCK@[9; 12) BLOCK@9..12
L_CURLY@[9; 10) "{" L_CURLY@9..10 "{"
WHITESPACE@[10; 11) "\n" WHITESPACE@10..11 "\n"
R_CURLY@[11; 12) "}" R_CURLY@11..12 "}"
WHITESPACE@[12; 13) "\n" WHITESPACE@12..13 "\n"
FN_DEF@[13; 25) FN_DEF@13..25
FN_KW@[13; 15) "fn" FN_KW@13..15 "fn"
WHITESPACE@[15; 16) " " WHITESPACE@15..16 " "
NAME@[16; 19) NAME@16..19
IDENT@[16; 19) "bar" IDENT@16..19 "bar"
PARAM_LIST@[19; 21) PARAM_LIST@19..21
L_PAREN@[19; 20) "(" L_PAREN@19..20 "("
R_PAREN@[20; 21) ")" R_PAREN@20..21 ")"
WHITESPACE@[21; 22) " " WHITESPACE@21..22 " "
BLOCK_EXPR@[22; 25) BLOCK_EXPR@22..25
BLOCK@[22; 25) BLOCK@22..25
L_CURLY@[22; 23) "{" L_CURLY@22..23 "{"
WHITESPACE@[23; 24) "\n" WHITESPACE@23..24 "\n"
R_CURLY@[24; 25) "}" R_CURLY@24..25 "}"
"# "#
.trim() .trim()
); );

View file

@ -1,11 +1,11 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use ra_syntax::{SourceFile, TextUnit}; use ra_syntax::{SourceFile, TextSize};
use ra_text_edit::TextEdit; use ra_text_edit::TextEdit;
pub use test_utils::*; pub use test_utils::*;
pub fn check_action<F: Fn(&SourceFile, TextUnit) -> Option<TextEdit>>( pub fn check_action<F: Fn(&SourceFile, TextSize) -> Option<TextEdit>>(
before: &str, before: &str,
after: &str, after: &str,
f: F, f: F,

View file

@ -21,7 +21,7 @@ use ra_ide_db::RootDatabase;
use ra_syntax::{ use ra_syntax::{
algo::find_node_at_offset, algo::find_node_at_offset,
ast::{self, AstToken}, ast::{self, AstToken},
AstNode, SourceFile, TextRange, TextUnit, AstNode, SourceFile, TextRange, TextSize,
}; };
use ra_text_edit::TextEdit; use ra_text_edit::TextEdit;
@ -45,7 +45,7 @@ pub(crate) fn on_char_typed(
fn on_char_typed_inner( fn on_char_typed_inner(
file: &SourceFile, file: &SourceFile,
offset: TextUnit, offset: TextSize,
char_typed: char, char_typed: char,
) -> Option<SingleFileChange> { ) -> Option<SingleFileChange> {
assert!(TRIGGER_CHARS.contains(char_typed)); assert!(TRIGGER_CHARS.contains(char_typed));
@ -60,7 +60,7 @@ fn on_char_typed_inner(
/// Returns an edit which should be applied after `=` was typed. Primarily, /// Returns an edit which should be applied after `=` was typed. Primarily,
/// this works when adding `let =`. /// this works when adding `let =`.
// FIXME: use a snippet completion instead of this hack here. // FIXME: use a snippet completion instead of this hack here.
fn on_eq_typed(file: &SourceFile, offset: TextUnit) -> Option<SingleFileChange> { fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option<SingleFileChange> {
assert_eq!(file.syntax().text().char_at(offset), Some('=')); assert_eq!(file.syntax().text().char_at(offset), Some('='));
let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?; let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?;
if let_stmt.semicolon_token().is_some() { if let_stmt.semicolon_token().is_some() {
@ -86,7 +86,7 @@ fn on_eq_typed(file: &SourceFile, offset: TextUnit) -> Option<SingleFileChange>
} }
/// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. /// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately.
fn on_dot_typed(file: &SourceFile, offset: TextUnit) -> Option<SingleFileChange> { fn on_dot_typed(file: &SourceFile, offset: TextSize) -> Option<SingleFileChange> {
assert_eq!(file.syntax().text().char_at(offset), Some('.')); assert_eq!(file.syntax().text().char_at(offset), Some('.'));
let whitespace = let whitespace =
file.syntax().token_at_offset(offset).left_biased().and_then(ast::Whitespace::cast)?; file.syntax().token_at_offset(offset).left_biased().and_then(ast::Whitespace::cast)?;
@ -96,34 +96,29 @@ fn on_dot_typed(file: &SourceFile, offset: TextUnit) -> Option<SingleFileChange>
let newline = text.rfind('\n')?; let newline = text.rfind('\n')?;
&text[newline + 1..] &text[newline + 1..]
}; };
let current_indent_len = TextUnit::of_str(current_indent); let current_indent_len = TextSize::of(current_indent);
// Make sure dot is a part of call chain // Make sure dot is a part of call chain
let field_expr = ast::FieldExpr::cast(whitespace.syntax().parent())?; let field_expr = ast::FieldExpr::cast(whitespace.syntax().parent())?;
let prev_indent = leading_indent(field_expr.syntax())?; let prev_indent = leading_indent(field_expr.syntax())?;
let target_indent = format!(" {}", prev_indent); let target_indent = format!(" {}", prev_indent);
let target_indent_len = TextUnit::of_str(&target_indent); let target_indent_len = TextSize::of(&target_indent);
if current_indent_len == target_indent_len { if current_indent_len == target_indent_len {
return None; return None;
} }
Some(SingleFileChange { Some(SingleFileChange {
label: "reindent dot".to_string(), label: "reindent dot".to_string(),
edit: TextEdit::replace( edit: TextEdit::replace(TextRange::new(offset - current_indent_len, offset), target_indent),
TextRange::from_to(offset - current_indent_len, offset), cursor_position: Some(offset + target_indent_len - current_indent_len + TextSize::of('.')),
target_indent,
),
cursor_position: Some(
offset + target_indent_len - current_indent_len + TextUnit::of_char('.'),
),
}) })
} }
/// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }` /// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }`
fn on_arrow_typed(file: &SourceFile, offset: TextUnit) -> Option<SingleFileChange> { fn on_arrow_typed(file: &SourceFile, offset: TextSize) -> Option<SingleFileChange> {
let file_text = file.syntax().text(); let file_text = file.syntax().text();
assert_eq!(file_text.char_at(offset), Some('>')); assert_eq!(file_text.char_at(offset), Some('>'));
let after_arrow = offset + TextUnit::of_char('>'); let after_arrow = offset + TextSize::of('>');
if file_text.char_at(after_arrow) != Some('{') { if file_text.char_at(after_arrow) != Some('{') {
return None; return None;
} }

View file

@ -7,7 +7,7 @@ use ra_syntax::{
ast::{self, AstToken}, ast::{self, AstToken},
AstNode, SmolStr, SourceFile, AstNode, SmolStr, SourceFile,
SyntaxKind::*, SyntaxKind::*,
SyntaxToken, TextUnit, TokenAtOffset, SyntaxToken, TextSize, TokenAtOffset,
}; };
use ra_text_edit::TextEdit; use ra_text_edit::TextEdit;
@ -28,7 +28,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
let prefix = comment.prefix(); let prefix = comment.prefix();
let comment_range = comment.syntax().text_range(); let comment_range = comment.syntax().text_range();
if position.offset < comment_range.start() + TextUnit::of_str(prefix) { if position.offset < comment_range.start() + TextSize::of(prefix) {
return None; return None;
} }
@ -39,7 +39,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
let indent = node_indent(&file, comment.syntax())?; let indent = node_indent(&file, comment.syntax())?;
let inserted = format!("\n{}{} ", indent, prefix); let inserted = format!("\n{}{} ", indent, prefix);
let cursor_position = position.offset + TextUnit::of_str(&inserted); let cursor_position = position.offset + TextSize::of(&inserted);
let edit = TextEdit::insert(position.offset, inserted); let edit = TextEdit::insert(position.offset, inserted);
Some( Some(

View file

@ -1,14 +1,14 @@
//! `LineIndex` maps flat `TextUnit` offsets into `(Line, Column)` //! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)`
//! representation. //! representation.
use std::iter; use std::iter;
use ra_syntax::{TextRange, TextUnit}; use ra_syntax::{TextRange, TextSize};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use superslice::Ext; use superslice::Ext;
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct LineIndex { pub struct LineIndex {
pub(crate) newlines: Vec<TextUnit>, pub(crate) newlines: Vec<TextSize>,
pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>, pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
} }
@ -22,12 +22,12 @@ pub struct LineCol {
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub(crate) struct Utf16Char { pub(crate) struct Utf16Char {
pub(crate) start: TextUnit, pub(crate) start: TextSize,
pub(crate) end: TextUnit, pub(crate) end: TextSize,
} }
impl Utf16Char { impl Utf16Char {
fn len(&self) -> TextUnit { fn len(&self) -> TextSize {
self.end - self.start self.end - self.start
} }
} }
@ -42,7 +42,8 @@ impl LineIndex {
let mut curr_col = 0.into(); let mut curr_col = 0.into();
let mut line = 0; let mut line = 0;
for c in text.chars() { for c in text.chars() {
curr_row += TextUnit::of_char(c); let c_len = TextSize::of(c);
curr_row += c_len;
if c == '\n' { if c == '\n' {
newlines.push(curr_row); newlines.push(curr_row);
@ -58,12 +59,11 @@ impl LineIndex {
continue; continue;
} }
let char_len = TextUnit::of_char(c); if !c.is_ascii() {
if char_len > TextUnit::from_usize(1) { utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + c_len });
utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + char_len });
} }
curr_col += char_len; curr_col += c_len;
} }
// Save any utf-16 characters seen in the last line // Save any utf-16 characters seen in the last line
@ -74,7 +74,7 @@ impl LineIndex {
LineIndex { newlines, utf16_lines } LineIndex { newlines, utf16_lines }
} }
pub fn line_col(&self, offset: TextUnit) -> LineCol { pub fn line_col(&self, offset: TextSize) -> LineCol {
let line = self.newlines.upper_bound(&offset) - 1; let line = self.newlines.upper_bound(&offset) - 1;
let line_start_offset = self.newlines[line]; let line_start_offset = self.newlines[line];
let col = offset - line_start_offset; let col = offset - line_start_offset;
@ -82,7 +82,7 @@ impl LineIndex {
LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 } LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 }
} }
pub fn offset(&self, line_col: LineCol) -> TextUnit { pub fn offset(&self, line_col: LineCol) -> TextSize {
//FIXME: return Result //FIXME: return Result
let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16); let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16);
self.newlines[line_col.line as usize] + col self.newlines[line_col.line as usize] + col
@ -97,35 +97,31 @@ impl LineIndex {
all.clone() all.clone()
.zip(all.skip(1)) .zip(all.skip(1))
.map(|(lo, hi)| TextRange::from_to(lo, hi)) .map(|(lo, hi)| TextRange::new(lo, hi))
.filter(|it| !it.is_empty()) .filter(|it| !it.is_empty())
} }
fn utf8_to_utf16_col(&self, line: u32, col: TextUnit) -> usize { fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize {
let mut res: usize = col.into();
if let Some(utf16_chars) = self.utf16_lines.get(&line) { if let Some(utf16_chars) = self.utf16_lines.get(&line) {
let mut correction = 0;
for c in utf16_chars { for c in utf16_chars {
if col >= c.end { if c.end <= col {
correction += c.len().to_usize() - 1; res -= usize::from(c.len()) - 1;
} else { } else {
// From here on, all utf16 characters come *after* the character we are mapping, // From here on, all utf16 characters come *after* the character we are mapping,
// so we don't need to take them into account // so we don't need to take them into account
break; break;
} }
} }
col.to_usize() - correction
} else {
col.to_usize()
} }
res
} }
fn utf16_to_utf8_col(&self, line: u32, col: u32) -> TextUnit { fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
let mut col: TextUnit = col.into();
if let Some(utf16_chars) = self.utf16_lines.get(&line) { if let Some(utf16_chars) = self.utf16_lines.get(&line) {
for c in utf16_chars { for c in utf16_chars {
if col >= c.start { if col >= u32::from(c.start) {
col += c.len() - TextUnit::from_usize(1); col += u32::from(c.len()) - 1;
} else { } else {
// From here on, all utf16 characters come *after* the character we are mapping, // From here on, all utf16 characters come *after* the character we are mapping,
// so we don't need to take them into account // so we don't need to take them into account
@ -134,12 +130,12 @@ impl LineIndex {
} }
} }
col col.into()
} }
} }
#[cfg(test)] #[cfg(test)]
mod test_line_index { mod tests {
use super::*; use super::*;
#[test] #[test]
@ -200,10 +196,10 @@ const C: char = 'メ';
assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20); assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20);
// UTF-16 to UTF-8, no changes // UTF-16 to UTF-8, no changes
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from(15)); assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
// UTF-16 to UTF-8 // UTF-16 to UTF-8
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from(21)); assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
} }
#[test] #[test]
@ -228,18 +224,18 @@ const C: char = \"メ メ\";
assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15); assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15);
// UTF-16 to UTF-8 // UTF-16 to UTF-8
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from_usize(15)); assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextUnit::from_usize(20)); assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20));
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from_usize(23)); assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(23));
assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15)); assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
} }
#[test] #[test]
fn test_splitlines() { fn test_splitlines() {
fn r(lo: u32, hi: u32) -> TextRange { fn r(lo: u32, hi: u32) -> TextRange {
TextRange::from_to(lo.into(), hi.into()) TextRange::new(lo.into(), hi.into())
} }
let text = "a\nbb\nccc\n"; let text = "a\nbb\nccc\n";

View file

@ -1,20 +1,22 @@
//! Code actions can specify desirable final position of the cursor. //! Code actions can specify desirable final position of the cursor.
//! //!
//! The position is specified as a `TextUnit` in the final file. We need to send //! The position is specified as a `TextSize` in the final file. We need to send
//! it in `(Line, Column)` coordinate though. However, we only have a LineIndex //! it in `(Line, Column)` coordinate though. However, we only have a LineIndex
//! for a file pre-edit! //! for a file pre-edit!
//! //!
//! Code in this module applies this "to (Line, Column) after edit" //! Code in this module applies this "to (Line, Column) after edit"
//! transformation. //! transformation.
use ra_syntax::{TextRange, TextUnit}; use std::convert::TryInto;
use ra_syntax::{TextRange, TextSize};
use ra_text_edit::{AtomTextEdit, TextEdit}; use ra_text_edit::{AtomTextEdit, TextEdit};
use crate::line_index::{LineCol, LineIndex, Utf16Char}; use crate::line_index::{LineCol, LineIndex, Utf16Char};
pub fn translate_offset_with_edit( pub fn translate_offset_with_edit(
line_index: &LineIndex, line_index: &LineIndex,
offset: TextUnit, offset: TextSize,
text_edit: &TextEdit, text_edit: &TextEdit,
) -> LineCol { ) -> LineCol {
let mut state = Edits::from_text_edit(&text_edit); let mut state = Edits::from_text_edit(&text_edit);
@ -84,7 +86,7 @@ pub fn translate_offset_with_edit(
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
enum Step { enum Step {
Newline(TextUnit), Newline(TextSize),
Utf16Char(TextRange), Utf16Char(TextRange),
} }
@ -92,7 +94,7 @@ enum Step {
struct LineIndexStepIter<'a> { struct LineIndexStepIter<'a> {
line_index: &'a LineIndex, line_index: &'a LineIndex,
next_newline_idx: usize, next_newline_idx: usize,
utf16_chars: Option<(TextUnit, std::slice::Iter<'a, Utf16Char>)>, utf16_chars: Option<(TextSize, std::slice::Iter<'a, Utf16Char>)>,
} }
impl LineIndexStepIter<'_> { impl LineIndexStepIter<'_> {
@ -111,7 +113,7 @@ impl Iterator for LineIndexStepIter<'_> {
.as_mut() .as_mut()
.and_then(|(newline, x)| { .and_then(|(newline, x)| {
let x = x.next()?; let x = x.next()?;
Some(Step::Utf16Char(TextRange::from_to(*newline + x.start, *newline + x.end))) Some(Step::Utf16Char(TextRange::new(*newline + x.start, *newline + x.end)))
}) })
.or_else(|| { .or_else(|| {
let next_newline = *self.line_index.newlines.get(self.next_newline_idx)?; let next_newline = *self.line_index.newlines.get(self.next_newline_idx)?;
@ -129,7 +131,7 @@ impl Iterator for LineIndexStepIter<'_> {
#[derive(Debug)] #[derive(Debug)]
struct OffsetStepIter<'a> { struct OffsetStepIter<'a> {
text: &'a str, text: &'a str,
offset: TextUnit, offset: TextSize,
} }
impl Iterator for OffsetStepIter<'_> { impl Iterator for OffsetStepIter<'_> {
@ -139,16 +141,17 @@ impl Iterator for OffsetStepIter<'_> {
.text .text
.char_indices() .char_indices()
.filter_map(|(i, c)| { .filter_map(|(i, c)| {
let i: TextSize = i.try_into().unwrap();
let char_len = TextSize::of(c);
if c == '\n' { if c == '\n' {
let next_offset = self.offset + TextUnit::from_usize(i + 1); let next_offset = self.offset + i + char_len;
let next = Step::Newline(next_offset); let next = Step::Newline(next_offset);
Some((next, next_offset)) Some((next, next_offset))
} else { } else {
let char_len = TextUnit::of_char(c); if !c.is_ascii() {
if char_len > TextUnit::from_usize(1) { let start = self.offset + i;
let start = self.offset + TextUnit::from_usize(i);
let end = start + char_len; let end = start + char_len;
let next = Step::Utf16Char(TextRange::from_to(start, end)); let next = Step::Utf16Char(TextRange::new(start, end));
let next_offset = end; let next_offset = end;
Some((next, next_offset)) Some((next, next_offset))
} else { } else {
@ -157,7 +160,7 @@ impl Iterator for OffsetStepIter<'_> {
} }
}) })
.next()?; .next()?;
let next_idx = (next_offset - self.offset).to_usize(); let next_idx: usize = (next_offset - self.offset).into();
self.text = &self.text[next_idx..]; self.text = &self.text[next_idx..];
self.offset = next_offset; self.offset = next_offset;
Some(next) Some(next)
@ -195,7 +198,7 @@ impl<'a> Edits<'a> {
match self.edits.split_first() { match self.edits.split_first() {
Some((next, rest)) => { Some((next, rest)) => {
let delete = self.translate_range(next.delete); let delete = self.translate_range(next.delete);
let diff = next.insert.len() as i64 - next.delete.len().to_usize() as i64; let diff = next.insert.len() as i64 - usize::from(next.delete.len()) as i64;
self.current = Some(TranslatedEdit { delete, insert: &next.insert, diff }); self.current = Some(TranslatedEdit { delete, insert: &next.insert, diff });
self.edits = rest; self.edits = rest;
} }
@ -244,15 +247,15 @@ impl<'a> Edits<'a> {
} else { } else {
let start = self.translate(range.start()); let start = self.translate(range.start());
let end = self.translate(range.end()); let end = self.translate(range.end());
TextRange::from_to(start, end) TextRange::new(start, end)
} }
} }
fn translate(&self, x: TextUnit) -> TextUnit { fn translate(&self, x: TextSize) -> TextSize {
if self.acc_diff == 0 { if self.acc_diff == 0 {
x x
} else { } else {
TextUnit::from((x.to_usize() as i64 + self.acc_diff) as u32) TextSize::from((usize::from(x) as i64 + self.acc_diff) as u32)
} }
} }
@ -271,29 +274,29 @@ impl<'a> Edits<'a> {
#[derive(Debug)] #[derive(Debug)]
struct RunningLineCol { struct RunningLineCol {
line: u32, line: u32,
last_newline: TextUnit, last_newline: TextSize,
col_adjust: TextUnit, col_adjust: TextSize,
} }
impl RunningLineCol { impl RunningLineCol {
fn new() -> RunningLineCol { fn new() -> RunningLineCol {
RunningLineCol { line: 0, last_newline: TextUnit::from(0), col_adjust: TextUnit::from(0) } RunningLineCol { line: 0, last_newline: TextSize::from(0), col_adjust: TextSize::from(0) }
} }
fn to_line_col(&self, offset: TextUnit) -> LineCol { fn to_line_col(&self, offset: TextSize) -> LineCol {
LineCol { LineCol {
line: self.line, line: self.line,
col_utf16: ((offset - self.last_newline) - self.col_adjust).into(), col_utf16: ((offset - self.last_newline) - self.col_adjust).into(),
} }
} }
fn add_line(&mut self, newline: TextUnit) { fn add_line(&mut self, newline: TextSize) {
self.line += 1; self.line += 1;
self.last_newline = newline; self.last_newline = newline;
self.col_adjust = TextUnit::from(0); self.col_adjust = TextSize::from(0);
} }
fn adjust_col(&mut self, range: TextRange) { fn adjust_col(&mut self, range: TextRange) {
self.col_adjust += range.len() - TextUnit::from(1); self.col_adjust += range.len() - TextSize::from(1);
} }
} }

View file

@ -4,13 +4,13 @@
//! get a super-set of matches. Then, we we confirm each match using precise //! get a super-set of matches. Then, we we confirm each match using precise
//! name resolution. //! name resolution.
use std::mem; use std::{convert::TryInto, mem};
use hir::{DefWithBody, HasSource, Module, ModuleSource, Semantics, Visibility}; use hir::{DefWithBody, HasSource, Module, ModuleSource, Semantics, Visibility};
use once_cell::unsync::Lazy; use once_cell::unsync::Lazy;
use ra_db::{FileId, FileRange, SourceDatabaseExt}; use ra_db::{FileId, FileRange, SourceDatabaseExt};
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::{ast, match_ast, AstNode, TextRange, TextUnit}; use ra_syntax::{ast, match_ast, AstNode, TextRange, TextSize};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use test_utils::tested_by; use test_utils::tested_by;
@ -85,7 +85,7 @@ impl SearchScope {
match (r1, r2) { match (r1, r2) {
(None, r) | (r, None) => Some(r), (None, r) | (r, None) => Some(r),
(Some(r1), Some(r2)) => { (Some(r1), Some(r2)) => {
let r = r1.intersection(&r2)?; let r = r1.intersect(r2)?;
Some(Some(r)) Some(Some(r))
} }
} }
@ -201,13 +201,13 @@ impl Definition {
for (file_id, search_range) in search_scope { for (file_id, search_range) in search_scope {
let text = db.file_text(file_id); let text = db.file_text(file_id);
let search_range = let search_range =
search_range.unwrap_or(TextRange::offset_len(0.into(), TextUnit::of_str(&text))); search_range.unwrap_or(TextRange::up_to(TextSize::of(text.as_str())));
let sema = Semantics::new(db); let sema = Semantics::new(db);
let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
for (idx, _) in text.match_indices(pat) { for (idx, _) in text.match_indices(pat) {
let offset = TextUnit::from_usize(idx); let offset: TextSize = idx.try_into().unwrap();
if !search_range.contains_inclusive(offset) { if !search_range.contains_inclusive(offset) {
tested_by!(search_filters_by_range; force); tested_by!(search_filters_by_range; force);
continue; continue;

View file

@ -5,7 +5,7 @@ use ra_syntax::{
ast::{self, make::tokens::doc_comment}, ast::{self, make::tokens::doc_comment},
tokenize, AstToken, Parse, SmolStr, SyntaxKind, tokenize, AstToken, Parse, SmolStr, SyntaxKind,
SyntaxKind::*, SyntaxKind::*,
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextUnit, Token as RawToken, T, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, T,
}; };
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use tt::buffer::{Cursor, TokenBuffer}; use tt::buffer::{Cursor, TokenBuffer};
@ -99,11 +99,11 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
let mut conv = RawConvertor { let mut conv = RawConvertor {
text, text,
offset: TextUnit::default(), offset: TextSize::default(),
inner: tokens.iter(), inner: tokens.iter(),
id_alloc: TokenIdAlloc { id_alloc: TokenIdAlloc {
map: Default::default(), map: Default::default(),
global_offset: TextUnit::default(), global_offset: TextSize::default(),
next_id: 0, next_id: 0,
}, },
}; };
@ -227,7 +227,7 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
struct TokenIdAlloc { struct TokenIdAlloc {
map: TokenMap, map: TokenMap,
global_offset: TextUnit, global_offset: TextSize,
next_id: u32, next_id: u32,
} }
@ -266,7 +266,7 @@ impl TokenIdAlloc {
/// A Raw Token (straightly from lexer) convertor /// A Raw Token (straightly from lexer) convertor
struct RawConvertor<'a> { struct RawConvertor<'a> {
text: &'a str, text: &'a str,
offset: TextUnit, offset: TextSize,
id_alloc: TokenIdAlloc, id_alloc: TokenIdAlloc,
inner: std::slice::Iter<'a, RawToken>, inner: std::slice::Iter<'a, RawToken>,
} }
@ -314,7 +314,7 @@ trait TokenConvertor {
} }
result.push(if k.is_punct() { result.push(if k.is_punct() {
assert_eq!(range.len().to_usize(), 1); assert_eq!(range.len(), TextSize::of('.'));
let delim = match k { let delim = match k {
T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])), T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])),
T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])), T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])),
@ -381,8 +381,8 @@ trait TokenConvertor {
k if k.is_keyword() => make_leaf!(Ident), k if k.is_keyword() => make_leaf!(Ident),
k if k.is_literal() => make_leaf!(Literal), k if k.is_literal() => make_leaf!(Literal),
LIFETIME => { LIFETIME => {
let char_unit = TextUnit::from_usize(1); let char_unit = TextSize::of('\'');
let r = TextRange::offset_len(range.start(), char_unit); let r = TextRange::at(range.start(), char_unit);
let apostrophe = tt::Leaf::from(tt::Punct { let apostrophe = tt::Leaf::from(tt::Punct {
char: '\'', char: '\'',
spacing: tt::Spacing::Joint, spacing: tt::Spacing::Joint,
@ -390,8 +390,7 @@ trait TokenConvertor {
}); });
result.push(apostrophe.into()); result.push(apostrophe.into());
let r = let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
TextRange::offset_len(range.start() + char_unit, range.len() - char_unit);
let ident = tt::Leaf::from(tt::Ident { let ident = tt::Leaf::from(tt::Ident {
text: SmolStr::new(&token.to_text()[1..]), text: SmolStr::new(&token.to_text()[1..]),
id: self.id_alloc().alloc(r), id: self.id_alloc().alloc(r),
@ -440,7 +439,7 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
fn bump(&mut self) -> Option<(Self::Token, TextRange)> { fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
let token = self.inner.next()?; let token = self.inner.next()?;
let range = TextRange::offset_len(self.offset, token.len); let range = TextRange::at(self.offset, token.len);
self.offset += token.len; self.offset += token.len;
Some(((*token, &self.text[range]), range)) Some(((*token, &self.text[range]), range))
@ -450,7 +449,7 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
let token = self.inner.as_slice().get(0).cloned(); let token = self.inner.as_slice().get(0).cloned();
token.map(|it| { token.map(|it| {
let range = TextRange::offset_len(self.offset, it.len); let range = TextRange::at(self.offset, it.len);
(it, &self.text[range]) (it, &self.text[range])
}) })
} }
@ -464,11 +463,11 @@ struct Convertor {
id_alloc: TokenIdAlloc, id_alloc: TokenIdAlloc,
current: Option<SyntaxToken>, current: Option<SyntaxToken>,
range: TextRange, range: TextRange,
punct_offset: Option<(SyntaxToken, TextUnit)>, punct_offset: Option<(SyntaxToken, TextSize)>,
} }
impl Convertor { impl Convertor {
fn new(node: &SyntaxNode, global_offset: TextUnit) -> Convertor { fn new(node: &SyntaxNode, global_offset: TextSize) -> Convertor {
Convertor { Convertor {
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } }, id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
current: node.first_token(), current: node.first_token(),
@ -481,7 +480,7 @@ impl Convertor {
#[derive(Debug)] #[derive(Debug)]
enum SynToken { enum SynToken {
Ordiniary(SyntaxToken), Ordiniary(SyntaxToken),
Punch(SyntaxToken, TextUnit), Punch(SyntaxToken, TextSize),
} }
impl SynToken { impl SynToken {
@ -500,7 +499,7 @@ impl SrcToken for SynToken {
fn to_char(&self) -> Option<char> { fn to_char(&self) -> Option<char> {
match self { match self {
SynToken::Ordiniary(_) => None, SynToken::Ordiniary(_) => None,
SynToken::Punch(it, i) => it.text().chars().nth(i.to_usize()), SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
} }
} }
fn to_text(&self) -> SmolStr { fn to_text(&self) -> SmolStr {
@ -516,26 +515,26 @@ impl TokenConvertor for Convertor {
fn bump(&mut self) -> Option<(Self::Token, TextRange)> { fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
if let Some((punct, offset)) = self.punct_offset.clone() { if let Some((punct, offset)) = self.punct_offset.clone() {
if offset.to_usize() + 1 < punct.text().len() { if usize::from(offset) + 1 < punct.text().len() {
let offset = offset + TextUnit::from_usize(1); let offset = offset + TextSize::of('.');
let range = punct.text_range(); let range = punct.text_range();
self.punct_offset = Some((punct.clone(), offset)); self.punct_offset = Some((punct.clone(), offset));
let range = TextRange::offset_len(range.start() + offset, TextUnit::from_usize(1)); let range = TextRange::at(range.start() + offset, TextSize::of('.'));
return Some((SynToken::Punch(punct, offset), range)); return Some((SynToken::Punch(punct, offset), range));
} }
} }
let curr = self.current.clone()?; let curr = self.current.clone()?;
if !curr.text_range().is_subrange(&self.range) { if !&self.range.contains_range(curr.text_range()) {
return None; return None;
} }
self.current = curr.next_token(); self.current = curr.next_token();
let token = if curr.kind().is_punct() { let token = if curr.kind().is_punct() {
let range = curr.text_range(); let range = curr.text_range();
let range = TextRange::offset_len(range.start(), TextUnit::from_usize(1)); let range = TextRange::at(range.start(), TextSize::of('.'));
self.punct_offset = Some((curr.clone(), TextUnit::from_usize(0))); self.punct_offset = Some((curr.clone(), 0.into()));
(SynToken::Punch(curr, TextUnit::from_usize(0)), range) (SynToken::Punch(curr, 0.into()), range)
} else { } else {
self.punct_offset = None; self.punct_offset = None;
let range = curr.text_range(); let range = curr.text_range();
@ -547,19 +546,19 @@ impl TokenConvertor for Convertor {
fn peek(&self) -> Option<Self::Token> { fn peek(&self) -> Option<Self::Token> {
if let Some((punct, mut offset)) = self.punct_offset.clone() { if let Some((punct, mut offset)) = self.punct_offset.clone() {
offset = offset + TextUnit::from_usize(1); offset = offset + TextSize::of('.');
if offset.to_usize() < punct.text().len() { if usize::from(offset) < punct.text().len() {
return Some(SynToken::Punch(punct, offset)); return Some(SynToken::Punch(punct, offset));
} }
} }
let curr = self.current.clone()?; let curr = self.current.clone()?;
if !curr.text_range().is_subrange(&self.range) { if !self.range.contains_range(curr.text_range()) {
return None; return None;
} }
let token = if curr.kind().is_punct() { let token = if curr.kind().is_punct() {
SynToken::Punch(curr, TextUnit::from_usize(0)) SynToken::Punch(curr, 0.into())
} else { } else {
SynToken::Ordiniary(curr) SynToken::Ordiniary(curr)
}; };
@ -574,8 +573,8 @@ impl TokenConvertor for Convertor {
struct TtTreeSink<'a> { struct TtTreeSink<'a> {
buf: String, buf: String,
cursor: Cursor<'a>, cursor: Cursor<'a>,
open_delims: FxHashMap<tt::TokenId, TextUnit>, open_delims: FxHashMap<tt::TokenId, TextSize>,
text_pos: TextUnit, text_pos: TextSize,
inner: SyntaxTreeBuilder, inner: SyntaxTreeBuilder,
token_map: TokenMap, token_map: TokenMap,
@ -641,7 +640,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
} }
tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id),
}; };
let range = TextRange::offset_len(self.text_pos, TextUnit::of_str(&text)); let range = TextRange::at(self.text_pos, TextSize::of(text.as_str()));
self.token_map.insert(id, range); self.token_map.insert(id, range);
self.cursor = self.cursor.bump(); self.cursor = self.cursor.bump();
text text
@ -658,10 +657,8 @@ impl<'a> TreeSink for TtTreeSink<'a> {
self.cursor = self.cursor.bump(); self.cursor = self.cursor.bump();
if let Some(id) = parent.delimiter.map(|it| it.id) { if let Some(id) = parent.delimiter.map(|it| it.id) {
if let Some(open_delim) = self.open_delims.get(&id) { if let Some(open_delim) = self.open_delims.get(&id) {
let open_range = let open_range = TextRange::at(*open_delim, TextSize::of('('));
TextRange::offset_len(*open_delim, TextUnit::from_usize(1)); let close_range = TextRange::at(self.text_pos, TextSize::of('('));
let close_range =
TextRange::offset_len(self.text_pos, TextUnit::from_usize(1));
self.token_map.insert_delim(id, open_range, close_range); self.token_map.insert_delim(id, open_range, close_range);
} }
} }
@ -672,7 +669,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
} }
}; };
self.buf += &text; self.buf += &text;
self.text_pos += TextUnit::of_str(&text); self.text_pos += TextSize::of(text.as_str());
} }
let text = SmolStr::new(self.buf.as_str()); let text = SmolStr::new(self.buf.as_str());
@ -690,7 +687,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
// other parts of RA such that we don't add whitespace here. // other parts of RA such that we don't add whitespace here.
if curr.spacing == tt::Spacing::Alone && curr.char != ';' { if curr.spacing == tt::Spacing::Alone && curr.char != ';' {
self.inner.token(WHITESPACE, " ".into()); self.inner.token(WHITESPACE, " ".into());
self.text_pos += TextUnit::of_char(' '); self.text_pos += TextSize::of(' ');
} }
} }
} }

View file

@ -257,30 +257,30 @@ fn test_expr_order() {
let dump = format!("{:#?}", expanded); let dump = format!("{:#?}", expanded);
assert_eq_text!( assert_eq_text!(
dump.trim(), dump.trim(),
r#"MACRO_ITEMS@[0; 15) r#"MACRO_ITEMS@0..15
FN_DEF@[0; 15) FN_DEF@0..15
FN_KW@[0; 2) "fn" FN_KW@0..2 "fn"
NAME@[2; 5) NAME@2..5
IDENT@[2; 5) "bar" IDENT@2..5 "bar"
PARAM_LIST@[5; 7) PARAM_LIST@5..7
L_PAREN@[5; 6) "(" L_PAREN@5..6 "("
R_PAREN@[6; 7) ")" R_PAREN@6..7 ")"
BLOCK_EXPR@[7; 15) BLOCK_EXPR@7..15
BLOCK@[7; 15) BLOCK@7..15
L_CURLY@[7; 8) "{" L_CURLY@7..8 "{"
EXPR_STMT@[8; 14) EXPR_STMT@8..14
BIN_EXPR@[8; 13) BIN_EXPR@8..13
BIN_EXPR@[8; 11) BIN_EXPR@8..11
LITERAL@[8; 9) LITERAL@8..9
INT_NUMBER@[8; 9) "1" INT_NUMBER@8..9 "1"
PLUS@[9; 10) "+" PLUS@9..10 "+"
LITERAL@[10; 11) LITERAL@10..11
INT_NUMBER@[10; 11) "1" INT_NUMBER@10..11 "1"
STAR@[11; 12) "*" STAR@11..12 "*"
LITERAL@[12; 13) LITERAL@12..13
INT_NUMBER@[12; 13) "2" INT_NUMBER@12..13 "2"
SEMICOLON@[13; 14) ";" SEMICOLON@13..14 ";"
R_CURLY@[14; 15) "}""#, R_CURLY@14..15 "}""#,
); );
} }
@ -490,39 +490,39 @@ fn test_expand_to_item_list() {
assert_eq!( assert_eq!(
format!("{:#?}", tree).trim(), format!("{:#?}", tree).trim(),
r#" r#"
MACRO_ITEMS@[0; 40) MACRO_ITEMS@0..40
STRUCT_DEF@[0; 20) STRUCT_DEF@0..20
STRUCT_KW@[0; 6) "struct" STRUCT_KW@0..6 "struct"
NAME@[6; 9) NAME@6..9
IDENT@[6; 9) "Foo" IDENT@6..9 "Foo"
RECORD_FIELD_DEF_LIST@[9; 20) RECORD_FIELD_DEF_LIST@9..20
L_CURLY@[9; 10) "{" L_CURLY@9..10 "{"
RECORD_FIELD_DEF@[10; 19) RECORD_FIELD_DEF@10..19
NAME@[10; 15) NAME@10..15
IDENT@[10; 15) "field" IDENT@10..15 "field"
COLON@[15; 16) ":" COLON@15..16 ":"
PATH_TYPE@[16; 19) PATH_TYPE@16..19
PATH@[16; 19) PATH@16..19
PATH_SEGMENT@[16; 19) PATH_SEGMENT@16..19
NAME_REF@[16; 19) NAME_REF@16..19
IDENT@[16; 19) "u32" IDENT@16..19 "u32"
R_CURLY@[19; 20) "}" R_CURLY@19..20 "}"
STRUCT_DEF@[20; 40) STRUCT_DEF@20..40
STRUCT_KW@[20; 26) "struct" STRUCT_KW@20..26 "struct"
NAME@[26; 29) NAME@26..29
IDENT@[26; 29) "Bar" IDENT@26..29 "Bar"
RECORD_FIELD_DEF_LIST@[29; 40) RECORD_FIELD_DEF_LIST@29..40
L_CURLY@[29; 30) "{" L_CURLY@29..30 "{"
RECORD_FIELD_DEF@[30; 39) RECORD_FIELD_DEF@30..39
NAME@[30; 35) NAME@30..35
IDENT@[30; 35) "field" IDENT@30..35 "field"
COLON@[35; 36) ":" COLON@35..36 ":"
PATH_TYPE@[36; 39) PATH_TYPE@36..39
PATH@[36; 39) PATH@36..39
PATH_SEGMENT@[36; 39) PATH_SEGMENT@36..39
NAME_REF@[36; 39) NAME_REF@36..39
IDENT@[36; 39) "u32" IDENT@36..39 "u32"
R_CURLY@[39; 40) "}""# R_CURLY@39..40 "}""#
.trim() .trim()
); );
} }
@ -623,37 +623,37 @@ fn test_tt_to_stmts() {
assert_eq!( assert_eq!(
format!("{:#?}", stmts).trim(), format!("{:#?}", stmts).trim(),
r#"MACRO_STMTS@[0; 15) r#"MACRO_STMTS@0..15
LET_STMT@[0; 7) LET_STMT@0..7
LET_KW@[0; 3) "let" LET_KW@0..3 "let"
BIND_PAT@[3; 4) BIND_PAT@3..4
NAME@[3; 4) NAME@3..4
IDENT@[3; 4) "a" IDENT@3..4 "a"
EQ@[4; 5) "=" EQ@4..5 "="
LITERAL@[5; 6) LITERAL@5..6
INT_NUMBER@[5; 6) "0" INT_NUMBER@5..6 "0"
SEMICOLON@[6; 7) ";" SEMICOLON@6..7 ";"
EXPR_STMT@[7; 14) EXPR_STMT@7..14
BIN_EXPR@[7; 13) BIN_EXPR@7..13
PATH_EXPR@[7; 8) PATH_EXPR@7..8
PATH@[7; 8) PATH@7..8
PATH_SEGMENT@[7; 8) PATH_SEGMENT@7..8
NAME_REF@[7; 8) NAME_REF@7..8
IDENT@[7; 8) "a" IDENT@7..8 "a"
EQ@[8; 9) "=" EQ@8..9 "="
BIN_EXPR@[9; 13) BIN_EXPR@9..13
LITERAL@[9; 11) LITERAL@9..11
INT_NUMBER@[9; 11) "10" INT_NUMBER@9..11 "10"
PLUS@[11; 12) "+" PLUS@11..12 "+"
LITERAL@[12; 13) LITERAL@12..13
INT_NUMBER@[12; 13) "1" INT_NUMBER@12..13 "1"
SEMICOLON@[13; 14) ";" SEMICOLON@13..14 ";"
EXPR_STMT@[14; 15) EXPR_STMT@14..15
PATH_EXPR@[14; 15) PATH_EXPR@14..15
PATH@[14; 15) PATH@14..15
PATH_SEGMENT@[14; 15) PATH_SEGMENT@14..15
NAME_REF@[14; 15) NAME_REF@14..15
IDENT@[14; 15) "a""#, IDENT@14..15 "a""#,
); );
} }
@ -974,20 +974,20 @@ fn test_tt_composite2() {
let res = format!("{:#?}", &node); let res = format!("{:#?}", &node);
assert_eq_text!( assert_eq_text!(
res.trim(), res.trim(),
r###"MACRO_ITEMS@[0; 10) r###"MACRO_ITEMS@0..10
MACRO_CALL@[0; 10) MACRO_CALL@0..10
PATH@[0; 3) PATH@0..3
PATH_SEGMENT@[0; 3) PATH_SEGMENT@0..3
NAME_REF@[0; 3) NAME_REF@0..3
IDENT@[0; 3) "abs" IDENT@0..3 "abs"
BANG@[3; 4) "!" BANG@3..4 "!"
TOKEN_TREE@[4; 10) TOKEN_TREE@4..10
L_PAREN@[4; 5) "(" L_PAREN@4..5 "("
EQ@[5; 6) "=" EQ@5..6 "="
R_ANGLE@[6; 7) ">" R_ANGLE@6..7 ">"
WHITESPACE@[7; 8) " " WHITESPACE@7..8 " "
POUND@[8; 9) "#" POUND@8..9 "#"
R_PAREN@[9; 10) ")""### R_PAREN@9..10 ")""###
); );
} }
@ -1113,69 +1113,69 @@ fn test_vec() {
assert_eq!( assert_eq!(
format!("{:#?}", tree).trim(), format!("{:#?}", tree).trim(),
r#"BLOCK_EXPR@[0; 45) r#"BLOCK_EXPR@0..45
BLOCK@[0; 45) BLOCK@0..45
L_CURLY@[0; 1) "{" L_CURLY@0..1 "{"
LET_STMT@[1; 20) LET_STMT@1..20
LET_KW@[1; 4) "let" LET_KW@1..4 "let"
BIND_PAT@[4; 8) BIND_PAT@4..8
MUT_KW@[4; 7) "mut" MUT_KW@4..7 "mut"
NAME@[7; 8) NAME@7..8
IDENT@[7; 8) "v" IDENT@7..8 "v"
EQ@[8; 9) "=" EQ@8..9 "="
CALL_EXPR@[9; 19) CALL_EXPR@9..19
PATH_EXPR@[9; 17) PATH_EXPR@9..17
PATH@[9; 17) PATH@9..17
PATH@[9; 12) PATH@9..12
PATH_SEGMENT@[9; 12) PATH_SEGMENT@9..12
NAME_REF@[9; 12) NAME_REF@9..12
IDENT@[9; 12) "Vec" IDENT@9..12 "Vec"
COLON2@[12; 14) "::" COLON2@12..14 "::"
PATH_SEGMENT@[14; 17) PATH_SEGMENT@14..17
NAME_REF@[14; 17) NAME_REF@14..17
IDENT@[14; 17) "new" IDENT@14..17 "new"
ARG_LIST@[17; 19) ARG_LIST@17..19
L_PAREN@[17; 18) "(" L_PAREN@17..18 "("
R_PAREN@[18; 19) ")" R_PAREN@18..19 ")"
SEMICOLON@[19; 20) ";" SEMICOLON@19..20 ";"
EXPR_STMT@[20; 33) EXPR_STMT@20..33
METHOD_CALL_EXPR@[20; 32) METHOD_CALL_EXPR@20..32
PATH_EXPR@[20; 21) PATH_EXPR@20..21
PATH@[20; 21) PATH@20..21
PATH_SEGMENT@[20; 21) PATH_SEGMENT@20..21
NAME_REF@[20; 21) NAME_REF@20..21
IDENT@[20; 21) "v" IDENT@20..21 "v"
DOT@[21; 22) "." DOT@21..22 "."
NAME_REF@[22; 26) NAME_REF@22..26
IDENT@[22; 26) "push" IDENT@22..26 "push"
ARG_LIST@[26; 32) ARG_LIST@26..32
L_PAREN@[26; 27) "(" L_PAREN@26..27 "("
LITERAL@[27; 31) LITERAL@27..31
INT_NUMBER@[27; 31) "1u32" INT_NUMBER@27..31 "1u32"
R_PAREN@[31; 32) ")" R_PAREN@31..32 ")"
SEMICOLON@[32; 33) ";" SEMICOLON@32..33 ";"
EXPR_STMT@[33; 43) EXPR_STMT@33..43
METHOD_CALL_EXPR@[33; 42) METHOD_CALL_EXPR@33..42
PATH_EXPR@[33; 34) PATH_EXPR@33..34
PATH@[33; 34) PATH@33..34
PATH_SEGMENT@[33; 34) PATH_SEGMENT@33..34
NAME_REF@[33; 34) NAME_REF@33..34
IDENT@[33; 34) "v" IDENT@33..34 "v"
DOT@[34; 35) "." DOT@34..35 "."
NAME_REF@[35; 39) NAME_REF@35..39
IDENT@[35; 39) "push" IDENT@35..39 "push"
ARG_LIST@[39; 42) ARG_LIST@39..42
L_PAREN@[39; 40) "(" L_PAREN@39..40 "("
LITERAL@[40; 41) LITERAL@40..41
INT_NUMBER@[40; 41) "2" INT_NUMBER@40..41 "2"
R_PAREN@[41; 42) ")" R_PAREN@41..42 ")"
SEMICOLON@[42; 43) ";" SEMICOLON@42..43 ";"
PATH_EXPR@[43; 44) PATH_EXPR@43..44
PATH@[43; 44) PATH@43..44
PATH_SEGMENT@[43; 44) PATH_SEGMENT@43..44
NAME_REF@[43; 44) NAME_REF@43..44
IDENT@[43; 44) "v" IDENT@43..44 "v"
R_CURLY@[44; 45) "}""# R_CURLY@44..45 "}""#
); );
} }
@ -1798,45 +1798,45 @@ fn test_no_space_after_semi_colon() {
let dump = format!("{:#?}", expanded); let dump = format!("{:#?}", expanded);
assert_eq_text!( assert_eq_text!(
dump.trim(), dump.trim(),
r###"MACRO_ITEMS@[0; 52) r###"MACRO_ITEMS@0..52
MODULE@[0; 26) MODULE@0..26
ATTR@[0; 21) ATTR@0..21
POUND@[0; 1) "#" POUND@0..1 "#"
L_BRACK@[1; 2) "[" L_BRACK@1..2 "["
PATH@[2; 5) PATH@2..5
PATH_SEGMENT@[2; 5) PATH_SEGMENT@2..5
NAME_REF@[2; 5) NAME_REF@2..5
IDENT@[2; 5) "cfg" IDENT@2..5 "cfg"
TOKEN_TREE@[5; 20) TOKEN_TREE@5..20
L_PAREN@[5; 6) "(" L_PAREN@5..6 "("
IDENT@[6; 13) "feature" IDENT@6..13 "feature"
EQ@[13; 14) "=" EQ@13..14 "="
STRING@[14; 19) "\"std\"" STRING@14..19 "\"std\""
R_PAREN@[19; 20) ")" R_PAREN@19..20 ")"
R_BRACK@[20; 21) "]" R_BRACK@20..21 "]"
MOD_KW@[21; 24) "mod" MOD_KW@21..24 "mod"
NAME@[24; 25) NAME@24..25
IDENT@[24; 25) "m" IDENT@24..25 "m"
SEMICOLON@[25; 26) ";" SEMICOLON@25..26 ";"
MODULE@[26; 52) MODULE@26..52
ATTR@[26; 47) ATTR@26..47
POUND@[26; 27) "#" POUND@26..27 "#"
L_BRACK@[27; 28) "[" L_BRACK@27..28 "["
PATH@[28; 31) PATH@28..31
PATH_SEGMENT@[28; 31) PATH_SEGMENT@28..31
NAME_REF@[28; 31) NAME_REF@28..31
IDENT@[28; 31) "cfg" IDENT@28..31 "cfg"
TOKEN_TREE@[31; 46) TOKEN_TREE@31..46
L_PAREN@[31; 32) "(" L_PAREN@31..32 "("
IDENT@[32; 39) "feature" IDENT@32..39 "feature"
EQ@[39; 40) "=" EQ@39..40 "="
STRING@[40; 45) "\"std\"" STRING@40..45 "\"std\""
R_PAREN@[45; 46) ")" R_PAREN@45..46 ")"
R_BRACK@[46; 47) "]" R_BRACK@46..47 "]"
MOD_KW@[47; 50) "mod" MOD_KW@47..50 "mod"
NAME@[50; 51) NAME@50..51
IDENT@[50; 51) "f" IDENT@50..51 "f"
SEMICOLON@[51; 52) ";""###, SEMICOLON@51..52 ";""###,
); );
} }

View file

@ -12,7 +12,7 @@ doctest = false
[dependencies] [dependencies]
itertools = "0.9.0" itertools = "0.9.0"
rowan = "0.9.1" rowan = "0.10.0"
rustc_lexer = { version = "652.0.0", package = "rustc-ap-rustc_lexer" } rustc_lexer = { version = "652.0.0", package = "rustc-ap-rustc_lexer" }
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
arrayvec = "0.5.1" arrayvec = "0.5.1"

View file

@ -11,7 +11,7 @@ use rustc_hash::FxHashMap;
use crate::{ use crate::{
AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr,
SyntaxToken, TextRange, TextUnit, SyntaxToken, TextRange, TextSize,
}; };
/// Returns ancestors of the node at the offset, sorted by length. This should /// Returns ancestors of the node at the offset, sorted by length. This should
@ -21,7 +21,7 @@ use crate::{
/// t.parent().ancestors())`. /// t.parent().ancestors())`.
pub fn ancestors_at_offset( pub fn ancestors_at_offset(
node: &SyntaxNode, node: &SyntaxNode,
offset: TextUnit, offset: TextSize,
) -> impl Iterator<Item = SyntaxNode> { ) -> impl Iterator<Item = SyntaxNode> {
node.token_at_offset(offset) node.token_at_offset(offset)
.map(|token| token.parent().ancestors()) .map(|token| token.parent().ancestors())
@ -37,7 +37,7 @@ pub fn ancestors_at_offset(
/// ``` /// ```
/// ///
/// then the shorter node will be silently preferred. /// then the shorter node will be silently preferred.
pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<N> { pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextSize) -> Option<N> {
ancestors_at_offset(syntax, offset).find_map(N::cast) ancestors_at_offset(syntax, offset).find_map(N::cast)
} }
@ -180,7 +180,7 @@ fn _insert_children(
position: InsertPosition<SyntaxElement>, position: InsertPosition<SyntaxElement>,
to_insert: &mut dyn Iterator<Item = SyntaxElement>, to_insert: &mut dyn Iterator<Item = SyntaxElement>,
) -> SyntaxNode { ) -> SyntaxNode {
let mut delta = TextUnit::default(); let mut delta = TextSize::default();
let to_insert = to_insert.map(|element| { let to_insert = to_insert.map(|element| {
delta += element.text_range().len(); delta += element.text_range().len();
to_green_element(element) to_green_element(element)
@ -347,7 +347,7 @@ fn with_children(
parent: &SyntaxNode, parent: &SyntaxNode,
new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>,
) -> SyntaxNode { ) -> SyntaxNode {
let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>(); let len = new_children.iter().map(|it| it.text_len()).sum::<TextSize>();
let new_node = rowan::GreenNode::new(rowan::SyntaxKind(parent.kind() as u16), new_children); let new_node = rowan::GreenNode::new(rowan::SyntaxKind(parent.kind() as u16), new_children);
let new_root_node = parent.replace_with(new_node); let new_root_node = parent.replace_with(new_node);
let new_root_node = SyntaxNode::new_root(new_root_node); let new_root_node = SyntaxNode::new_root(new_root_node);
@ -355,7 +355,7 @@ fn with_children(
// FIXME: use a more elegant way to re-fetch the node (#1185), make // FIXME: use a more elegant way to re-fetch the node (#1185), make
// `range` private afterwards // `range` private afterwards
let mut ptr = SyntaxNodePtr::new(parent); let mut ptr = SyntaxNodePtr::new(parent);
ptr.range = TextRange::offset_len(ptr.range.start(), len); ptr.range = TextRange::at(ptr.range.start(), len);
ptr.to_node(&new_root_node) ptr.to_node(&new_root_node)
} }

View file

@ -1,8 +1,10 @@
//! There are many AstNodes, but only a few tokens, so we hand-write them here. //! There are many AstNodes, but only a few tokens, so we hand-write them here.
use std::convert::{TryFrom, TryInto};
use crate::{ use crate::{
ast::{AstToken, Comment, RawString, String, Whitespace}, ast::{AstToken, Comment, RawString, String, Whitespace},
TextRange, TextUnit, TextRange, TextSize,
}; };
impl Comment { impl Comment {
@ -94,14 +96,14 @@ impl QuoteOffsets {
return None; return None;
} }
let start = TextUnit::from(0); let start = TextSize::from(0);
let left_quote = TextUnit::from_usize(left_quote) + TextUnit::of_char('"'); let left_quote = TextSize::try_from(left_quote).unwrap() + TextSize::of('"');
let right_quote = TextUnit::from_usize(right_quote); let right_quote = TextSize::try_from(right_quote).unwrap();
let end = TextUnit::of_str(literal); let end = TextSize::of(literal);
let res = QuoteOffsets { let res = QuoteOffsets {
quotes: [TextRange::from_to(start, left_quote), TextRange::from_to(right_quote, end)], quotes: [TextRange::new(start, left_quote), TextRange::new(right_quote, end)],
contents: TextRange::from_to(left_quote, right_quote), contents: TextRange::new(left_quote, right_quote),
}; };
Some(res) Some(res)
} }
@ -168,7 +170,7 @@ impl HasStringValue for RawString {
impl RawString { impl RawString {
pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> { pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
let contents_range = self.text_range_between_quotes()?; let contents_range = self.text_range_between_quotes()?;
assert!(range.is_subrange(&TextRange::offset_len(0.into(), contents_range.len()))); assert!(TextRange::up_to(contents_range.len()).contains_range(range));
Some(range + contents_range.start()) Some(range + contents_range.start())
} }
} }
@ -459,7 +461,7 @@ pub trait HasFormatSpecifier: AstToken {
while let Some((r, Ok(next_char))) = chars.peek() { while let Some((r, Ok(next_char))) = chars.peek() {
if next_char.is_ascii_digit() { if next_char.is_ascii_digit() {
chars.next(); chars.next();
range = range.extend_to(r); range = range.cover(*r);
} else { } else {
break; break;
} }
@ -477,7 +479,7 @@ pub trait HasFormatSpecifier: AstToken {
while let Some((r, Ok(next_char))) = chars.peek() { while let Some((r, Ok(next_char))) = chars.peek() {
if *next_char == '_' || next_char.is_ascii_digit() || next_char.is_alphabetic() { if *next_char == '_' || next_char.is_ascii_digit() || next_char.is_alphabetic() {
chars.next(); chars.next();
range = range.extend_to(r); range = range.cover(*r);
} else { } else {
break; break;
} }
@ -498,10 +500,8 @@ impl HasFormatSpecifier for String {
let mut res = Vec::with_capacity(text.len()); let mut res = Vec::with_capacity(text.len());
rustc_lexer::unescape::unescape_str(text, &mut |range, unescaped_char| { rustc_lexer::unescape::unescape_str(text, &mut |range, unescaped_char| {
res.push(( res.push((
TextRange::from_to( TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap())
TextUnit::from_usize(range.start), + offset,
TextUnit::from_usize(range.end),
) + offset,
unescaped_char, unescaped_char,
)) ))
}); });
@ -520,13 +520,7 @@ impl HasFormatSpecifier for RawString {
let mut res = Vec::with_capacity(text.len()); let mut res = Vec::with_capacity(text.len());
for (idx, c) in text.char_indices() { for (idx, c) in text.char_indices() {
res.push(( res.push((TextRange::at(idx.try_into().unwrap(), TextSize::of(c)) + offset, Ok(c)));
TextRange::from_to(
TextUnit::from_usize(idx),
TextUnit::from_usize(idx + c.len_utf8()),
) + offset,
Ok(c),
));
} }
Some(res) Some(res)
} }

View file

@ -1,8 +1,13 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use crate::{validation, AstNode, SourceFile, TextRange, TextUnit}; use std::{
convert::TryInto,
str::{self, FromStr},
};
use ra_text_edit::AtomTextEdit; use ra_text_edit::AtomTextEdit;
use std::str::{self, FromStr};
use crate::{validation, AstNode, SourceFile, TextRange};
fn check_file_invariants(file: &SourceFile) { fn check_file_invariants(file: &SourceFile) {
let root = file.syntax(); let root = file.syntax();
@ -34,10 +39,8 @@ impl CheckReparse {
let text = lines.collect::<Vec<_>>().join("\n"); let text = lines.collect::<Vec<_>>().join("\n");
let text = format!("{}{}{}", PREFIX, text, SUFFIX); let text = format!("{}{}{}", PREFIX, text, SUFFIX);
text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range
let delete = TextRange::offset_len( let delete =
TextUnit::from_usize(delete_start), TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap());
TextUnit::from_usize(delete_len),
);
let edited_text = let edited_text =
format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]); format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]);
let edit = AtomTextEdit { delete, insert }; let edit = AtomTextEdit { delete, insert };

View file

@ -55,7 +55,7 @@ pub use crate::{
}, },
}; };
pub use ra_parser::{SyntaxKind, T}; pub use ra_parser::{SyntaxKind, T};
pub use rowan::{SmolStr, SyntaxText, TextRange, TextUnit, TokenAtOffset, WalkEvent}; pub use rowan::{SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent};
/// `Parse` is the result of the parsing: a syntax tree and a collection of /// `Parse` is the result of the parsing: a syntax tree and a collection of
/// errors. /// errors.
@ -266,7 +266,7 @@ fn api_walkthrough() {
assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR); assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR);
// And text range: // And text range:
assert_eq!(expr_syntax.text_range(), TextRange::from_to(32.into(), 37.into())); assert_eq!(expr_syntax.text_range(), TextRange::new(32.into(), 37.into()));
// You can get node's text as a `SyntaxText` object, which will traverse the // You can get node's text as a `SyntaxText` object, which will traverse the
// tree collecting token's text: // tree collecting token's text:

View file

@ -1,10 +1,12 @@
//! Lexer analyzes raw input string and produces lexemes (tokens). //! Lexer analyzes raw input string and produces lexemes (tokens).
//! It is just a bridge to `rustc_lexer`. //! It is just a bridge to `rustc_lexer`.
use std::convert::TryInto;
use crate::{ use crate::{
SyntaxError, SyntaxError,
SyntaxKind::{self, *}, SyntaxKind::{self, *},
TextRange, TextUnit, T, TextRange, TextSize, T,
}; };
/// A token of Rust source. /// A token of Rust source.
@ -13,7 +15,7 @@ pub struct Token {
/// The kind of token. /// The kind of token.
pub kind: SyntaxKind, pub kind: SyntaxKind,
/// The length of the token. /// The length of the token.
pub len: TextUnit, pub len: TextSize,
} }
/// Break a string up into its component tokens. /// Break a string up into its component tokens.
@ -28,18 +30,19 @@ pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) {
let mut tokens = Vec::new(); let mut tokens = Vec::new();
let mut errors = Vec::new(); let mut errors = Vec::new();
let mut offset: usize = rustc_lexer::strip_shebang(text) let mut offset = match rustc_lexer::strip_shebang(text) {
.map(|shebang_len| { Some(shebang_len) => {
tokens.push(Token { kind: SHEBANG, len: TextUnit::from_usize(shebang_len) }); tokens.push(Token { kind: SHEBANG, len: shebang_len.try_into().unwrap() });
shebang_len shebang_len
}) }
.unwrap_or(0); None => 0,
};
let text_without_shebang = &text[offset..]; let text_without_shebang = &text[offset..];
for rustc_token in rustc_lexer::tokenize(text_without_shebang) { for rustc_token in rustc_lexer::tokenize(text_without_shebang) {
let token_len = TextUnit::from_usize(rustc_token.len); let token_len: TextSize = rustc_token.len.try_into().unwrap();
let token_range = TextRange::offset_len(TextUnit::from_usize(offset), token_len); let token_range = TextRange::at(offset.try_into().unwrap(), token_len);
let (syntax_kind, err_message) = let (syntax_kind, err_message) =
rustc_token_kind_to_syntax_kind(&rustc_token.kind, &text[token_range]); rustc_token_kind_to_syntax_kind(&rustc_token.kind, &text[token_range]);
@ -65,7 +68,7 @@ pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) {
/// Beware that unescape errors are not checked at tokenization time. /// Beware that unescape errors are not checked at tokenization time.
pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> { pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> {
lex_first_token(text) lex_first_token(text)
.filter(|(token, _)| token.len == TextUnit::of_str(text)) .filter(|(token, _)| token.len == TextSize::of(text))
.map(|(token, error)| (token.kind, error)) .map(|(token, error)| (token.kind, error))
} }
@ -75,7 +78,7 @@ pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxEr
/// Beware that unescape errors are not checked at tokenization time. /// Beware that unescape errors are not checked at tokenization time.
pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
lex_first_token(text) lex_first_token(text)
.filter(|(token, error)| !error.is_some() && token.len == TextUnit::of_str(text)) .filter(|(token, error)| !error.is_some() && token.len == TextSize::of(text))
.map(|(token, _error)| token.kind) .map(|(token, _error)| token.kind)
} }
@ -96,10 +99,9 @@ fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> {
let rustc_token = rustc_lexer::first_token(text); let rustc_token = rustc_lexer::first_token(text);
let (syntax_kind, err_message) = rustc_token_kind_to_syntax_kind(&rustc_token.kind, text); let (syntax_kind, err_message) = rustc_token_kind_to_syntax_kind(&rustc_token.kind, text);
let token = Token { kind: syntax_kind, len: TextUnit::from_usize(rustc_token.len) }; let token = Token { kind: syntax_kind, len: rustc_token.len.try_into().unwrap() };
let optional_error = err_message.map(|err_message| { let optional_error = err_message
SyntaxError::new(err_message, TextRange::from_to(0.into(), TextUnit::of_str(text))) .map(|err_message| SyntaxError::new(err_message, TextRange::up_to(TextSize::of(text))));
});
Some((token, optional_error)) Some((token, optional_error))
} }

View file

@ -19,7 +19,7 @@ use crate::{
syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode}, syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
SyntaxError, SyntaxError,
SyntaxKind::*, SyntaxKind::*,
TextRange, TextUnit, T, TextRange, TextSize, T,
}; };
pub(crate) fn incremental_reparse( pub(crate) fn incremental_reparse(
@ -176,7 +176,7 @@ fn merge_errors(
if old_err_range.end() <= range_before_reparse.start() { if old_err_range.end() <= range_before_reparse.start() {
res.push(old_err); res.push(old_err);
} else if old_err_range.start() >= range_before_reparse.end() { } else if old_err_range.start() >= range_before_reparse.end() {
let inserted_len = TextUnit::of_str(&edit.insert); let inserted_len = TextSize::of(&edit.insert);
res.push(old_err.with_range((old_err_range + inserted_len) - edit.delete.len())); res.push(old_err.with_range((old_err_range + inserted_len) - edit.delete.len()));
// Note: extra parens are intentional to prevent uint underflow, HWAB (here was a bug) // Note: extra parens are intentional to prevent uint underflow, HWAB (here was a bug)
} }

View file

@ -3,7 +3,7 @@
use ra_parser::Token as PToken; use ra_parser::Token as PToken;
use ra_parser::TokenSource; use ra_parser::TokenSource;
use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextUnit}; use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextSize};
pub(crate) struct TextTokenSource<'t> { pub(crate) struct TextTokenSource<'t> {
text: &'t str, text: &'t str,
@ -15,7 +15,7 @@ pub(crate) struct TextTokenSource<'t> {
/// 0 7 10 /// 0 7 10
/// ``` /// ```
/// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]` /// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]`
start_offsets: Vec<TextUnit>, start_offsets: Vec<TextSize>,
/// non-whitespace/comment tokens /// non-whitespace/comment tokens
/// ```non-rust /// ```non-rust
/// struct Foo {} /// struct Foo {}
@ -51,12 +51,12 @@ impl<'t> TokenSource for TextTokenSource<'t> {
if pos >= self.tokens.len() { if pos >= self.tokens.len() {
return false; return false;
} }
let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len); let range = TextRange::at(self.start_offsets[pos], self.tokens[pos].len);
self.text[range] == *kw self.text[range] == *kw
} }
} }
fn mk_token(pos: usize, start_offsets: &[TextUnit], tokens: &[Token]) -> PToken { fn mk_token(pos: usize, start_offsets: &[TextSize], tokens: &[Token]) -> PToken {
let kind = tokens.get(pos).map(|t| t.kind).unwrap_or(EOF); let kind = tokens.get(pos).map(|t| t.kind).unwrap_or(EOF);
let is_jointed_to_next = if pos + 1 < start_offsets.len() { let is_jointed_to_next = if pos + 1 < start_offsets.len() {
start_offsets[pos] + tokens[pos].len == start_offsets[pos + 1] start_offsets[pos] + tokens[pos].len == start_offsets[pos + 1]

View file

@ -9,7 +9,7 @@ use crate::{
syntax_node::GreenNode, syntax_node::GreenNode,
SmolStr, SyntaxError, SmolStr, SyntaxError,
SyntaxKind::{self, *}, SyntaxKind::{self, *},
SyntaxTreeBuilder, TextRange, TextUnit, SyntaxTreeBuilder, TextRange, TextSize,
}; };
/// Bridges the parser with our specific syntax tree representation. /// Bridges the parser with our specific syntax tree representation.
@ -18,7 +18,7 @@ use crate::{
pub(crate) struct TextTreeSink<'a> { pub(crate) struct TextTreeSink<'a> {
text: &'a str, text: &'a str,
tokens: &'a [Token], tokens: &'a [Token],
text_pos: TextUnit, text_pos: TextSize,
token_pos: usize, token_pos: usize,
state: State, state: State,
inner: SyntaxTreeBuilder, inner: SyntaxTreeBuilder,
@ -42,7 +42,7 @@ impl<'a> TreeSink for TextTreeSink<'a> {
let len = self.tokens[self.token_pos..self.token_pos + n_tokens] let len = self.tokens[self.token_pos..self.token_pos + n_tokens]
.iter() .iter()
.map(|it| it.len) .map(|it| it.len)
.sum::<TextUnit>(); .sum::<TextSize>();
self.do_token(kind, len, n_tokens); self.do_token(kind, len, n_tokens);
} }
@ -62,12 +62,12 @@ impl<'a> TreeSink for TextTreeSink<'a> {
self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count(); self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count();
let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias];
let mut trivia_end = let mut trivia_end =
self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextUnit>(); self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextSize>();
let n_attached_trivias = { let n_attached_trivias = {
let leading_trivias = leading_trivias.iter().rev().map(|it| { let leading_trivias = leading_trivias.iter().rev().map(|it| {
let next_end = trivia_end - it.len; let next_end = trivia_end - it.len;
let range = TextRange::from_to(next_end, trivia_end); let range = TextRange::new(next_end, trivia_end);
trivia_end = next_end; trivia_end = next_end;
(it.kind, &self.text[range]) (it.kind, &self.text[range])
}); });
@ -132,8 +132,8 @@ impl<'a> TextTreeSink<'a> {
} }
} }
fn do_token(&mut self, kind: SyntaxKind, len: TextUnit, n_tokens: usize) { fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) {
let range = TextRange::offset_len(self.text_pos, len); let range = TextRange::at(self.text_pos, len);
let text: SmolStr = self.text[range].into(); let text: SmolStr = self.text[range].into();
self.text_pos += len; self.text_pos += len;
self.token_pos += n_tokens; self.token_pos += n_tokens;

View file

@ -24,7 +24,7 @@ impl SyntaxNodePtr {
pub fn to_node(&self, root: &SyntaxNode) -> SyntaxNode { pub fn to_node(&self, root: &SyntaxNode) -> SyntaxNode {
assert!(root.parent().is_none()); assert!(root.parent().is_none());
successors(Some(root.clone()), |node| { successors(Some(root.clone()), |node| {
node.children().find(|it| self.range.is_subrange(&it.text_range())) node.children().find(|it| it.text_range().contains_range(self.range))
}) })
.find(|it| it.text_range() == self.range && it.kind() == self.kind) .find(|it| it.text_range() == self.range && it.kind() == self.kind)
.unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self)) .unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self))

View file

@ -2,7 +2,7 @@
use std::fmt; use std::fmt;
use crate::{TextRange, TextUnit}; use crate::{TextRange, TextSize};
/// Represents the result of unsuccessful tokenization, parsing /// Represents the result of unsuccessful tokenization, parsing
/// or tree validation. /// or tree validation.
@ -23,8 +23,8 @@ impl SyntaxError {
pub fn new(message: impl Into<String>, range: TextRange) -> Self { pub fn new(message: impl Into<String>, range: TextRange) -> Self {
Self(message.into(), range) Self(message.into(), range)
} }
pub fn new_at_offset(message: impl Into<String>, offset: TextUnit) -> Self { pub fn new_at_offset(message: impl Into<String>, offset: TextSize) -> Self {
Self(message.into(), TextRange::offset_len(offset, 0.into())) Self(message.into(), TextRange::empty(offset))
} }
pub fn range(&self) -> TextRange { pub fn range(&self) -> TextRange {

View file

@ -8,7 +8,7 @@
use rowan::{GreenNodeBuilder, Language}; use rowan::{GreenNodeBuilder, Language};
use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextUnit}; use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize};
pub(crate) use rowan::{GreenNode, GreenToken}; pub(crate) use rowan::{GreenNode, GreenToken};
@ -69,7 +69,7 @@ impl SyntaxTreeBuilder {
self.inner.finish_node() self.inner.finish_node()
} }
pub fn error(&mut self, error: ra_parser::ParseError, text_pos: TextUnit) { pub fn error(&mut self, error: ra_parser::ParseError, text_pos: TextSize) {
self.errors.push(SyntaxError::new_at_offset(error.0, text_pos)) self.errors.push(SyntaxError::new_at_offset(error.0, text_pos))
} }
} }

View file

@ -5,7 +5,7 @@ use std::{
use test_utils::{collect_rust_files, dir_tests, project_dir, read_text}; use test_utils::{collect_rust_files, dir_tests, project_dir, read_text};
use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextUnit, Token}; use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextSize, Token};
#[test] #[test]
fn lexer_tests() { fn lexer_tests() {
@ -121,12 +121,12 @@ fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) {
fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String { fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String {
let mut acc = String::new(); let mut acc = String::new();
let mut offset = TextUnit::from_usize(0); let mut offset: TextSize = 0.into();
for token in tokens { for token in tokens {
let token_len = token.len; let token_len = token.len;
let token_text = &text[TextRange::offset_len(offset, token.len)]; let token_text = &text[TextRange::at(offset, token.len)];
offset += token.len; offset += token.len;
writeln!(acc, "{:?} {} {:?}", token.kind, token_len, token_text).unwrap(); writeln!(acc, "{:?} {:?} {:?}", token.kind, token_len, token_text).unwrap();
} }
for err in errors { for err in errors {
writeln!(acc, "> error{:?} token({:?}) msg({})", err.range(), &text[err.range()], err) writeln!(acc, "> error{:?} token({:?}) msg({})", err.range(), &text[err.range()], err)

View file

@ -2,12 +2,14 @@
mod block; mod block;
use std::convert::TryFrom;
use rustc_lexer::unescape; use rustc_lexer::unescape;
use crate::{ use crate::{
ast, match_ast, AstNode, SyntaxError, ast, match_ast, AstNode, SyntaxError,
SyntaxKind::{BYTE, BYTE_STRING, CHAR, CONST_DEF, FN_DEF, INT_NUMBER, STRING, TYPE_ALIAS_DEF}, SyntaxKind::{BYTE, BYTE_STRING, CHAR, CONST_DEF, FN_DEF, INT_NUMBER, STRING, TYPE_ALIAS_DEF},
SyntaxNode, SyntaxToken, TextUnit, T, SyntaxNode, SyntaxToken, TextSize, T,
}; };
fn rustc_unescape_error_to_string(err: unescape::EscapeError) -> &'static str { fn rustc_unescape_error_to_string(err: unescape::EscapeError) -> &'static str {
@ -112,7 +114,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
// FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205)
let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {
let off = token.text_range().start() + TextUnit::from_usize(off + prefix_len); let off = token.text_range().start() + TextSize::try_from(off + prefix_len).unwrap();
acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off)); acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off));
}; };

View file

@ -1,2 +1,2 @@
CHAR 1 "\'" CHAR 1 "\'"
> error[0; 1) token("\'") msg(Missing trailing `'` symbol to terminate the character literal) > error0..1 token("\'") msg(Missing trailing `'` symbol to terminate the character literal)

View file

@ -1,2 +1,2 @@
CHAR 5 "\'🦀" CHAR 5 "\'🦀"
> error[0; 5) token("\'🦀") msg(Missing trailing `'` symbol to terminate the character literal) > error0..5 token("\'🦀") msg(Missing trailing `'` symbol to terminate the character literal)

View file

@ -1,2 +1,2 @@
CHAR 5 "\'\\x7f" CHAR 5 "\'\\x7f"
> error[0; 5) token("\'\\x7f") msg(Missing trailing `'` symbol to terminate the character literal) > error0..5 token("\'\\x7f") msg(Missing trailing `'` symbol to terminate the character literal)

View file

@ -1,2 +1,2 @@
CHAR 9 "\'\\u{20AA}" CHAR 9 "\'\\u{20AA}"
> error[0; 9) token("\'\\u{20AA}") msg(Missing trailing `'` symbol to terminate the character literal) > error0..9 token("\'\\u{20AA}") msg(Missing trailing `'` symbol to terminate the character literal)

View file

@ -1,2 +1,2 @@
CHAR 2 "\' " CHAR 2 "\' "
> error[0; 2) token("\' ") msg(Missing trailing `'` symbol to terminate the character literal) > error0..2 token("\' ") msg(Missing trailing `'` symbol to terminate the character literal)

Some files were not shown because too many files have changed in this diff Show more