mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 04:53:34 +00:00
Simplify proto conversion
Trait based infra in conv.rs is significantly more complicated than what we actually need here.
This commit is contained in:
parent
4578154b60
commit
1586bab0b9
8 changed files with 807 additions and 939 deletions
|
@ -11,7 +11,7 @@ use ra_syntax::{
|
|||
TextRange,
|
||||
};
|
||||
|
||||
use crate::FileSymbol;
|
||||
use crate::{FileRange, FileSymbol};
|
||||
|
||||
use super::short_label::ShortLabel;
|
||||
|
||||
|
@ -22,10 +22,11 @@ use super::short_label::ShortLabel;
|
|||
/// code, like a function or a struct, but this is not strictly required.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct NavigationTarget {
|
||||
// FIXME: use FileRange?
|
||||
file_id: FileId,
|
||||
full_range: TextRange,
|
||||
name: SmolStr,
|
||||
kind: SyntaxKind,
|
||||
full_range: TextRange,
|
||||
focus_range: Option<TextRange>,
|
||||
container_name: Option<SmolStr>,
|
||||
description: Option<String>,
|
||||
|
@ -63,6 +64,10 @@ impl NavigationTarget {
|
|||
self.file_id
|
||||
}
|
||||
|
||||
pub fn file_range(&self) -> FileRange {
|
||||
FileRange { file_id: self.file_id, range: self.full_range }
|
||||
}
|
||||
|
||||
pub fn full_range(&self) -> TextRange {
|
||||
self.full_range
|
||||
}
|
||||
|
|
|
@ -75,6 +75,7 @@ impl TextEdit {
|
|||
self.indels.is_empty()
|
||||
}
|
||||
|
||||
// FXME: impl IntoIter instead
|
||||
pub fn as_indels(&self) -> &[Indel] {
|
||||
&self.indels
|
||||
}
|
||||
|
|
|
@ -1,726 +0,0 @@
|
|||
//! Convenience module responsible for translating between rust-analyzer's types
|
||||
//! and LSP types.
|
||||
|
||||
use lsp_types::{
|
||||
self, CreateFile, DiagnosticSeverity, DocumentChangeOperation, DocumentChanges, Documentation,
|
||||
Location, LocationLink, MarkupContent, MarkupKind, ParameterInformation, ParameterLabel,
|
||||
Position, Range, RenameFile, ResourceOp, SemanticTokenModifier, SemanticTokenType,
|
||||
SignatureInformation, SymbolKind, TextDocumentEdit, TextDocumentIdentifier, TextDocumentItem,
|
||||
TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier, WorkspaceEdit,
|
||||
};
|
||||
use ra_ide::{
|
||||
translate_offset_with_edit, CompletionItem, CompletionItemKind, FileId, FilePosition,
|
||||
FileRange, FileSystemEdit, Fold, FoldKind, Highlight, HighlightModifier, HighlightTag,
|
||||
InlayHint, InlayKind, InsertTextFormat, LineCol, LineIndex, NavigationTarget, RangeInfo,
|
||||
ReferenceAccess, Severity, SourceChange, SourceFileEdit,
|
||||
};
|
||||
use ra_syntax::{SyntaxKind, TextRange, TextSize};
|
||||
use ra_text_edit::{Indel, TextEdit};
|
||||
use ra_vfs::LineEndings;
|
||||
|
||||
use crate::{
|
||||
req,
|
||||
semantic_tokens::{self, ModifierSet, CONSTANT, CONTROL_FLOW, MUTABLE, UNSAFE},
|
||||
world::WorldSnapshot,
|
||||
Result,
|
||||
};
|
||||
use semantic_tokens::{
|
||||
ATTRIBUTE, BUILTIN_TYPE, ENUM_MEMBER, FORMAT_SPECIFIER, LIFETIME, TYPE_ALIAS, UNION,
|
||||
UNRESOLVED_REFERENCE,
|
||||
};
|
||||
|
||||
pub trait Conv {
|
||||
type Output;
|
||||
fn conv(self) -> Self::Output;
|
||||
}
|
||||
|
||||
pub trait ConvWith<CTX> {
|
||||
type Output;
|
||||
fn conv_with(self, ctx: CTX) -> Self::Output;
|
||||
}
|
||||
|
||||
pub trait TryConvWith<CTX> {
|
||||
type Output;
|
||||
fn try_conv_with(self, ctx: CTX) -> Result<Self::Output>;
|
||||
}
|
||||
|
||||
impl Conv for SyntaxKind {
|
||||
type Output = SymbolKind;
|
||||
|
||||
fn conv(self) -> <Self as Conv>::Output {
|
||||
match self {
|
||||
SyntaxKind::FN_DEF => SymbolKind::Function,
|
||||
SyntaxKind::STRUCT_DEF => SymbolKind::Struct,
|
||||
SyntaxKind::ENUM_DEF => SymbolKind::Enum,
|
||||
SyntaxKind::ENUM_VARIANT => SymbolKind::EnumMember,
|
||||
SyntaxKind::TRAIT_DEF => SymbolKind::Interface,
|
||||
SyntaxKind::MACRO_CALL => SymbolKind::Function,
|
||||
SyntaxKind::MODULE => SymbolKind::Module,
|
||||
SyntaxKind::TYPE_ALIAS_DEF => SymbolKind::TypeParameter,
|
||||
SyntaxKind::RECORD_FIELD_DEF => SymbolKind::Field,
|
||||
SyntaxKind::STATIC_DEF => SymbolKind::Constant,
|
||||
SyntaxKind::CONST_DEF => SymbolKind::Constant,
|
||||
SyntaxKind::IMPL_DEF => SymbolKind::Object,
|
||||
_ => SymbolKind::Variable,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Conv for ReferenceAccess {
|
||||
type Output = ::lsp_types::DocumentHighlightKind;
|
||||
|
||||
fn conv(self) -> Self::Output {
|
||||
use lsp_types::DocumentHighlightKind;
|
||||
match self {
|
||||
ReferenceAccess::Read => DocumentHighlightKind::Read,
|
||||
ReferenceAccess::Write => DocumentHighlightKind::Write,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Conv for CompletionItemKind {
|
||||
type Output = ::lsp_types::CompletionItemKind;
|
||||
|
||||
fn conv(self) -> <Self as Conv>::Output {
|
||||
use lsp_types::CompletionItemKind::*;
|
||||
match self {
|
||||
CompletionItemKind::Keyword => Keyword,
|
||||
CompletionItemKind::Snippet => Snippet,
|
||||
CompletionItemKind::Module => Module,
|
||||
CompletionItemKind::Function => Function,
|
||||
CompletionItemKind::Struct => Struct,
|
||||
CompletionItemKind::Enum => Enum,
|
||||
CompletionItemKind::EnumVariant => EnumMember,
|
||||
CompletionItemKind::BuiltinType => Struct,
|
||||
CompletionItemKind::Binding => Variable,
|
||||
CompletionItemKind::Field => Field,
|
||||
CompletionItemKind::Trait => Interface,
|
||||
CompletionItemKind::TypeAlias => Struct,
|
||||
CompletionItemKind::Const => Constant,
|
||||
CompletionItemKind::Static => Value,
|
||||
CompletionItemKind::Method => Method,
|
||||
CompletionItemKind::TypeParam => TypeParameter,
|
||||
CompletionItemKind::Macro => Method,
|
||||
CompletionItemKind::Attribute => EnumMember,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Conv for Severity {
|
||||
type Output = DiagnosticSeverity;
|
||||
fn conv(self) -> DiagnosticSeverity {
|
||||
match self {
|
||||
Severity::Error => DiagnosticSeverity::Error,
|
||||
Severity::WeakWarning => DiagnosticSeverity::Hint,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ConvWith<(&LineIndex, LineEndings)> for CompletionItem {
|
||||
type Output = ::lsp_types::CompletionItem;
|
||||
|
||||
fn conv_with(self, ctx: (&LineIndex, LineEndings)) -> ::lsp_types::CompletionItem {
|
||||
let mut additional_text_edits = Vec::new();
|
||||
let mut text_edit = None;
|
||||
// LSP does not allow arbitrary edits in completion, so we have to do a
|
||||
// non-trivial mapping here.
|
||||
for indel in self.text_edit().as_indels() {
|
||||
if indel.delete.contains_range(self.source_range()) {
|
||||
text_edit = Some(if indel.delete == self.source_range() {
|
||||
indel.conv_with((ctx.0, ctx.1))
|
||||
} else {
|
||||
assert!(self.source_range().end() == indel.delete.end());
|
||||
let range1 = TextRange::new(indel.delete.start(), self.source_range().start());
|
||||
let range2 = self.source_range();
|
||||
let edit1 = Indel::replace(range1, String::new());
|
||||
let edit2 = Indel::replace(range2, indel.insert.clone());
|
||||
additional_text_edits.push(edit1.conv_with((ctx.0, ctx.1)));
|
||||
edit2.conv_with((ctx.0, ctx.1))
|
||||
})
|
||||
} else {
|
||||
assert!(self.source_range().intersect(indel.delete).is_none());
|
||||
additional_text_edits.push(indel.conv_with((ctx.0, ctx.1)));
|
||||
}
|
||||
}
|
||||
let text_edit = text_edit.unwrap();
|
||||
|
||||
let mut res = lsp_types::CompletionItem {
|
||||
label: self.label().to_string(),
|
||||
detail: self.detail().map(|it| it.to_string()),
|
||||
filter_text: Some(self.lookup().to_string()),
|
||||
kind: self.kind().map(|it| it.conv()),
|
||||
text_edit: Some(text_edit.into()),
|
||||
additional_text_edits: Some(additional_text_edits),
|
||||
documentation: self.documentation().map(|it| it.conv()),
|
||||
deprecated: Some(self.deprecated()),
|
||||
command: if self.trigger_call_info() {
|
||||
let cmd = lsp_types::Command {
|
||||
title: "triggerParameterHints".into(),
|
||||
command: "editor.action.triggerParameterHints".into(),
|
||||
arguments: None,
|
||||
};
|
||||
Some(cmd)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if self.score().is_some() {
|
||||
res.preselect = Some(true)
|
||||
}
|
||||
|
||||
if self.deprecated() {
|
||||
res.tags = Some(vec![lsp_types::CompletionItemTag::Deprecated])
|
||||
}
|
||||
|
||||
res.insert_text_format = Some(match self.insert_text_format() {
|
||||
InsertTextFormat::Snippet => lsp_types::InsertTextFormat::Snippet,
|
||||
InsertTextFormat::PlainText => lsp_types::InsertTextFormat::PlainText,
|
||||
});
|
||||
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
impl ConvWith<&LineIndex> for Position {
|
||||
type Output = TextSize;
|
||||
|
||||
fn conv_with(self, line_index: &LineIndex) -> TextSize {
|
||||
let line_col = LineCol { line: self.line as u32, col_utf16: self.character as u32 };
|
||||
line_index.offset(line_col)
|
||||
}
|
||||
}
|
||||
|
||||
impl ConvWith<&LineIndex> for TextSize {
|
||||
type Output = Position;
|
||||
|
||||
fn conv_with(self, line_index: &LineIndex) -> Position {
|
||||
let line_col = line_index.line_col(self);
|
||||
Position::new(u64::from(line_col.line), u64::from(line_col.col_utf16))
|
||||
}
|
||||
}
|
||||
|
||||
impl ConvWith<&LineIndex> for TextRange {
|
||||
type Output = Range;
|
||||
|
||||
fn conv_with(self, line_index: &LineIndex) -> Range {
|
||||
Range::new(self.start().conv_with(line_index), self.end().conv_with(line_index))
|
||||
}
|
||||
}
|
||||
|
||||
impl ConvWith<&LineIndex> for Range {
|
||||
type Output = TextRange;
|
||||
|
||||
fn conv_with(self, line_index: &LineIndex) -> TextRange {
|
||||
TextRange::new(self.start.conv_with(line_index), self.end.conv_with(line_index))
|
||||
}
|
||||
}
|
||||
|
||||
impl Conv for ra_ide::Documentation {
|
||||
type Output = lsp_types::Documentation;
|
||||
fn conv(self) -> Documentation {
|
||||
Documentation::MarkupContent(MarkupContent {
|
||||
kind: MarkupKind::Markdown,
|
||||
value: crate::markdown::format_docs(self.as_str()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ConvWith<bool> for ra_ide::FunctionSignature {
|
||||
type Output = lsp_types::SignatureInformation;
|
||||
fn conv_with(self, concise: bool) -> Self::Output {
|
||||
let (label, documentation, params) = if concise {
|
||||
let mut params = self.parameters;
|
||||
if self.has_self_param {
|
||||
params.remove(0);
|
||||
}
|
||||
(params.join(", "), None, params)
|
||||
} else {
|
||||
(self.to_string(), self.doc.map(|it| it.conv()), self.parameters)
|
||||
};
|
||||
|
||||
let parameters: Vec<ParameterInformation> = params
|
||||
.into_iter()
|
||||
.map(|param| ParameterInformation {
|
||||
label: ParameterLabel::Simple(param),
|
||||
documentation: None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
SignatureInformation { label, documentation, parameters: Some(parameters) }
|
||||
}
|
||||
}
|
||||
|
||||
impl ConvWith<(&LineIndex, LineEndings)> for TextEdit {
|
||||
type Output = Vec<lsp_types::TextEdit>;
|
||||
|
||||
fn conv_with(self, ctx: (&LineIndex, LineEndings)) -> Vec<lsp_types::TextEdit> {
|
||||
self.as_indels().iter().map_conv_with(ctx).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl ConvWith<(&LineIndex, LineEndings)> for &Indel {
|
||||
type Output = lsp_types::TextEdit;
|
||||
|
||||
fn conv_with(
|
||||
self,
|
||||
(line_index, line_endings): (&LineIndex, LineEndings),
|
||||
) -> lsp_types::TextEdit {
|
||||
let mut new_text = self.insert.clone();
|
||||
if line_endings == LineEndings::Dos {
|
||||
new_text = new_text.replace('\n', "\r\n");
|
||||
}
|
||||
lsp_types::TextEdit { range: self.delete.conv_with(line_index), new_text }
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct FoldConvCtx<'a> {
|
||||
pub(crate) text: &'a str,
|
||||
pub(crate) line_index: &'a LineIndex,
|
||||
pub(crate) line_folding_only: bool,
|
||||
}
|
||||
|
||||
impl ConvWith<&FoldConvCtx<'_>> for Fold {
|
||||
type Output = lsp_types::FoldingRange;
|
||||
|
||||
fn conv_with(self, ctx: &FoldConvCtx) -> lsp_types::FoldingRange {
|
||||
let kind = match self.kind {
|
||||
FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
|
||||
FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
|
||||
FoldKind::Mods => None,
|
||||
FoldKind::Block => None,
|
||||
};
|
||||
|
||||
let range = self.range.conv_with(&ctx.line_index);
|
||||
|
||||
if ctx.line_folding_only {
|
||||
// Clients with line_folding_only == true (such as VSCode) will fold the whole end line
|
||||
// even if it contains text not in the folding range. To prevent that we exclude
|
||||
// range.end.line from the folding region if there is more text after range.end
|
||||
// on the same line.
|
||||
let has_more_text_on_end_line = ctx.text
|
||||
[TextRange::new(self.range.end(), TextSize::of(ctx.text))]
|
||||
.chars()
|
||||
.take_while(|it| *it != '\n')
|
||||
.any(|it| !it.is_whitespace());
|
||||
|
||||
let end_line = if has_more_text_on_end_line {
|
||||
range.end.line.saturating_sub(1)
|
||||
} else {
|
||||
range.end.line
|
||||
};
|
||||
|
||||
lsp_types::FoldingRange {
|
||||
start_line: range.start.line,
|
||||
start_character: None,
|
||||
end_line,
|
||||
end_character: None,
|
||||
kind,
|
||||
}
|
||||
} else {
|
||||
lsp_types::FoldingRange {
|
||||
start_line: range.start.line,
|
||||
start_character: Some(range.start.character),
|
||||
end_line: range.end.line,
|
||||
end_character: Some(range.end.character),
|
||||
kind,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ConvWith<&LineIndex> for InlayHint {
|
||||
type Output = req::InlayHint;
|
||||
fn conv_with(self, line_index: &LineIndex) -> Self::Output {
|
||||
req::InlayHint {
|
||||
label: self.label.to_string(),
|
||||
range: self.range.conv_with(line_index),
|
||||
kind: match self.kind {
|
||||
InlayKind::ParameterHint => req::InlayKind::ParameterHint,
|
||||
InlayKind::TypeHint => req::InlayKind::TypeHint,
|
||||
InlayKind::ChainingHint => req::InlayKind::ChainingHint,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Conv for Highlight {
|
||||
type Output = (u32, u32);
|
||||
|
||||
fn conv(self) -> Self::Output {
|
||||
let mut mods = ModifierSet::default();
|
||||
let type_ = match self.tag {
|
||||
HighlightTag::Struct => SemanticTokenType::STRUCT,
|
||||
HighlightTag::Enum => SemanticTokenType::ENUM,
|
||||
HighlightTag::Union => UNION,
|
||||
HighlightTag::TypeAlias => TYPE_ALIAS,
|
||||
HighlightTag::Trait => SemanticTokenType::INTERFACE,
|
||||
HighlightTag::BuiltinType => BUILTIN_TYPE,
|
||||
HighlightTag::SelfType => SemanticTokenType::TYPE,
|
||||
HighlightTag::Field => SemanticTokenType::MEMBER,
|
||||
HighlightTag::Function => SemanticTokenType::FUNCTION,
|
||||
HighlightTag::Module => SemanticTokenType::NAMESPACE,
|
||||
HighlightTag::Constant => {
|
||||
mods |= CONSTANT;
|
||||
mods |= SemanticTokenModifier::STATIC;
|
||||
SemanticTokenType::VARIABLE
|
||||
}
|
||||
HighlightTag::Static => {
|
||||
mods |= SemanticTokenModifier::STATIC;
|
||||
SemanticTokenType::VARIABLE
|
||||
}
|
||||
HighlightTag::EnumVariant => ENUM_MEMBER,
|
||||
HighlightTag::Macro => SemanticTokenType::MACRO,
|
||||
HighlightTag::Local => SemanticTokenType::VARIABLE,
|
||||
HighlightTag::TypeParam => SemanticTokenType::TYPE_PARAMETER,
|
||||
HighlightTag::Lifetime => LIFETIME,
|
||||
HighlightTag::ByteLiteral | HighlightTag::NumericLiteral => SemanticTokenType::NUMBER,
|
||||
HighlightTag::CharLiteral | HighlightTag::StringLiteral => SemanticTokenType::STRING,
|
||||
HighlightTag::Comment => SemanticTokenType::COMMENT,
|
||||
HighlightTag::Attribute => ATTRIBUTE,
|
||||
HighlightTag::Keyword => SemanticTokenType::KEYWORD,
|
||||
HighlightTag::UnresolvedReference => UNRESOLVED_REFERENCE,
|
||||
HighlightTag::FormatSpecifier => FORMAT_SPECIFIER,
|
||||
};
|
||||
|
||||
for modifier in self.modifiers.iter() {
|
||||
let modifier = match modifier {
|
||||
HighlightModifier::Definition => SemanticTokenModifier::DECLARATION,
|
||||
HighlightModifier::ControlFlow => CONTROL_FLOW,
|
||||
HighlightModifier::Mutable => MUTABLE,
|
||||
HighlightModifier::Unsafe => UNSAFE,
|
||||
};
|
||||
mods |= modifier;
|
||||
}
|
||||
|
||||
(semantic_tokens::type_index(type_), mods.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ConvWith<CTX>, CTX> ConvWith<CTX> for Option<T> {
|
||||
type Output = Option<T::Output>;
|
||||
|
||||
fn conv_with(self, ctx: CTX) -> Self::Output {
|
||||
self.map(|x| ConvWith::conv_with(x, ctx))
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for &Url {
|
||||
type Output = FileId;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
|
||||
world.uri_to_file_id(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for FileId {
|
||||
type Output = Url;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<Url> {
|
||||
world.file_id_to_uri(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for &TextDocumentItem {
|
||||
type Output = FileId;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
|
||||
self.uri.try_conv_with(world)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for &VersionedTextDocumentIdentifier {
|
||||
type Output = FileId;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
|
||||
self.uri.try_conv_with(world)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for &TextDocumentIdentifier {
|
||||
type Output = FileId;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
|
||||
world.uri_to_file_id(&self.uri)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for &TextDocumentPositionParams {
|
||||
type Output = FilePosition;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FilePosition> {
|
||||
let file_id = self.text_document.try_conv_with(world)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let offset = self.position.conv_with(&line_index);
|
||||
Ok(FilePosition { file_id, offset })
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for (&TextDocumentIdentifier, Range) {
|
||||
type Output = FileRange;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileRange> {
|
||||
let file_id = self.0.try_conv_with(world)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let range = self.1.conv_with(&line_index);
|
||||
Ok(FileRange { file_id, range })
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: TryConvWith<CTX>, CTX: Copy> TryConvWith<CTX> for Vec<T> {
|
||||
type Output = Vec<<T as TryConvWith<CTX>>::Output>;
|
||||
fn try_conv_with(self, ctx: CTX) -> Result<Self::Output> {
|
||||
let mut res = Vec::with_capacity(self.len());
|
||||
for item in self {
|
||||
res.push(item.try_conv_with(ctx)?);
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for SourceChange {
|
||||
type Output = req::SourceChange;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<req::SourceChange> {
|
||||
let cursor_position = match self.cursor_position {
|
||||
None => None,
|
||||
Some(pos) => {
|
||||
let line_index = world.analysis().file_line_index(pos.file_id)?;
|
||||
let edit = self
|
||||
.source_file_edits
|
||||
.iter()
|
||||
.find(|it| it.file_id == pos.file_id)
|
||||
.map(|it| &it.edit);
|
||||
let line_col = match edit {
|
||||
Some(edit) => translate_offset_with_edit(&*line_index, pos.offset, edit),
|
||||
None => line_index.line_col(pos.offset),
|
||||
};
|
||||
let position =
|
||||
Position::new(u64::from(line_col.line), u64::from(line_col.col_utf16));
|
||||
Some(TextDocumentPositionParams {
|
||||
text_document: TextDocumentIdentifier::new(pos.file_id.try_conv_with(world)?),
|
||||
position,
|
||||
})
|
||||
}
|
||||
};
|
||||
let mut document_changes: Vec<DocumentChangeOperation> = Vec::new();
|
||||
for resource_op in self.file_system_edits.try_conv_with(world)? {
|
||||
document_changes.push(DocumentChangeOperation::Op(resource_op));
|
||||
}
|
||||
for text_document_edit in self.source_file_edits.try_conv_with(world)? {
|
||||
document_changes.push(DocumentChangeOperation::Edit(text_document_edit));
|
||||
}
|
||||
let workspace_edit = WorkspaceEdit {
|
||||
changes: None,
|
||||
document_changes: Some(DocumentChanges::Operations(document_changes)),
|
||||
};
|
||||
Ok(req::SourceChange { label: self.label, workspace_edit, cursor_position })
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for SourceFileEdit {
|
||||
type Output = TextDocumentEdit;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<TextDocumentEdit> {
|
||||
let text_document = VersionedTextDocumentIdentifier {
|
||||
uri: self.file_id.try_conv_with(world)?,
|
||||
version: None,
|
||||
};
|
||||
let line_index = world.analysis().file_line_index(self.file_id)?;
|
||||
let line_endings = world.file_line_endings(self.file_id);
|
||||
let edits =
|
||||
self.edit.as_indels().iter().map_conv_with((&line_index, line_endings)).collect();
|
||||
Ok(TextDocumentEdit { text_document, edits })
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for FileSystemEdit {
|
||||
type Output = ResourceOp;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<ResourceOp> {
|
||||
let res = match self {
|
||||
FileSystemEdit::CreateFile { source_root, path } => {
|
||||
let uri = world.path_to_uri(source_root, &path)?;
|
||||
ResourceOp::Create(CreateFile { uri, options: None })
|
||||
}
|
||||
FileSystemEdit::MoveFile { src, dst_source_root, dst_path } => {
|
||||
let old_uri = world.file_id_to_uri(src)?;
|
||||
let new_uri = world.path_to_uri(dst_source_root, &dst_path)?;
|
||||
ResourceOp::Rename(RenameFile { old_uri, new_uri, options: None })
|
||||
}
|
||||
};
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for &NavigationTarget {
|
||||
type Output = Location;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<Location> {
|
||||
let line_index = world.analysis().file_line_index(self.file_id())?;
|
||||
let range = self.range();
|
||||
to_location(self.file_id(), range, &world, &line_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for (FileId, RangeInfo<NavigationTarget>) {
|
||||
type Output = LocationLink;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<LocationLink> {
|
||||
let (src_file_id, target) = self;
|
||||
|
||||
let target_uri = target.info.file_id().try_conv_with(world)?;
|
||||
let src_line_index = world.analysis().file_line_index(src_file_id)?;
|
||||
let tgt_line_index = world.analysis().file_line_index(target.info.file_id())?;
|
||||
|
||||
let target_range = target.info.full_range().conv_with(&tgt_line_index);
|
||||
|
||||
let target_selection_range = target
|
||||
.info
|
||||
.focus_range()
|
||||
.map(|it| it.conv_with(&tgt_line_index))
|
||||
.unwrap_or(target_range);
|
||||
|
||||
let res = LocationLink {
|
||||
origin_selection_range: Some(target.range.conv_with(&src_line_index)),
|
||||
target_uri,
|
||||
target_range,
|
||||
target_selection_range,
|
||||
};
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryConvWith<&WorldSnapshot> for (FileId, RangeInfo<Vec<NavigationTarget>>) {
|
||||
type Output = req::GotoDefinitionResponse;
|
||||
fn try_conv_with(self, world: &WorldSnapshot) -> Result<req::GotoTypeDefinitionResponse> {
|
||||
let (file_id, RangeInfo { range, info: navs }) = self;
|
||||
let links = navs
|
||||
.into_iter()
|
||||
.map(|nav| (file_id, RangeInfo::new(range, nav)))
|
||||
.try_conv_with_to_vec(world)?;
|
||||
if world.config.client_caps.location_link {
|
||||
Ok(links.into())
|
||||
} else {
|
||||
let locations: Vec<Location> = links
|
||||
.into_iter()
|
||||
.map(|link| Location { uri: link.target_uri, range: link.target_selection_range })
|
||||
.collect();
|
||||
Ok(locations.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_call_hierarchy_item(
|
||||
file_id: FileId,
|
||||
range: TextRange,
|
||||
world: &WorldSnapshot,
|
||||
line_index: &LineIndex,
|
||||
nav: NavigationTarget,
|
||||
) -> Result<lsp_types::CallHierarchyItem> {
|
||||
Ok(lsp_types::CallHierarchyItem {
|
||||
name: nav.name().to_string(),
|
||||
kind: nav.kind().conv(),
|
||||
tags: None,
|
||||
detail: nav.description().map(|it| it.to_string()),
|
||||
uri: file_id.try_conv_with(&world)?,
|
||||
range: nav.range().conv_with(&line_index),
|
||||
selection_range: range.conv_with(&line_index),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn to_location(
|
||||
file_id: FileId,
|
||||
range: TextRange,
|
||||
world: &WorldSnapshot,
|
||||
line_index: &LineIndex,
|
||||
) -> Result<Location> {
|
||||
let url = file_id.try_conv_with(world)?;
|
||||
let loc = Location::new(url, range.conv_with(line_index));
|
||||
Ok(loc)
|
||||
}
|
||||
|
||||
pub trait MapConvWith<CTX>: Sized {
|
||||
type Output;
|
||||
|
||||
fn map_conv_with(self, ctx: CTX) -> ConvWithIter<Self, CTX> {
|
||||
ConvWithIter { iter: self, ctx }
|
||||
}
|
||||
}
|
||||
|
||||
impl<CTX, I> MapConvWith<CTX> for I
|
||||
where
|
||||
I: Iterator,
|
||||
I::Item: ConvWith<CTX>,
|
||||
{
|
||||
type Output = <I::Item as ConvWith<CTX>>::Output;
|
||||
}
|
||||
|
||||
pub struct ConvWithIter<I, CTX> {
|
||||
iter: I,
|
||||
ctx: CTX,
|
||||
}
|
||||
|
||||
impl<I, CTX> Iterator for ConvWithIter<I, CTX>
|
||||
where
|
||||
I: Iterator,
|
||||
I::Item: ConvWith<CTX>,
|
||||
CTX: Copy,
|
||||
{
|
||||
type Item = <I::Item as ConvWith<CTX>>::Output;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|item| item.conv_with(self.ctx))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TryConvWithToVec<CTX>: Sized {
|
||||
type Output;
|
||||
|
||||
fn try_conv_with_to_vec(self, ctx: CTX) -> Result<Vec<Self::Output>>;
|
||||
}
|
||||
|
||||
impl<I, CTX> TryConvWithToVec<CTX> for I
|
||||
where
|
||||
I: Iterator,
|
||||
I::Item: TryConvWith<CTX>,
|
||||
CTX: Copy,
|
||||
{
|
||||
type Output = <I::Item as TryConvWith<CTX>>::Output;
|
||||
|
||||
fn try_conv_with_to_vec(self, ctx: CTX) -> Result<Vec<Self::Output>> {
|
||||
self.map(|it| it.try_conv_with(ctx)).collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use test_utils::extract_ranges;
|
||||
|
||||
#[test]
|
||||
fn conv_fold_line_folding_only_fixup() {
|
||||
let text = r#"<fold>mod a;
|
||||
mod b;
|
||||
mod c;</fold>
|
||||
|
||||
fn main() <fold>{
|
||||
if cond <fold>{
|
||||
a::do_a();
|
||||
}</fold> else <fold>{
|
||||
b::do_b();
|
||||
}</fold>
|
||||
}</fold>"#;
|
||||
|
||||
let (ranges, text) = extract_ranges(text, "fold");
|
||||
assert_eq!(ranges.len(), 4);
|
||||
let folds = vec![
|
||||
Fold { range: ranges[0], kind: FoldKind::Mods },
|
||||
Fold { range: ranges[1], kind: FoldKind::Block },
|
||||
Fold { range: ranges[2], kind: FoldKind::Block },
|
||||
Fold { range: ranges[3], kind: FoldKind::Block },
|
||||
];
|
||||
|
||||
let line_index = LineIndex::new(&text);
|
||||
let ctx = FoldConvCtx { text: &text, line_index: &line_index, line_folding_only: true };
|
||||
let converted: Vec<_> = folds.into_iter().map_conv_with(&ctx).collect();
|
||||
|
||||
let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
|
||||
assert_eq!(converted.len(), expected_lines.len());
|
||||
for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) {
|
||||
assert_eq!(folding_range.start_line, *start_line);
|
||||
assert_eq!(folding_range.start_character, None);
|
||||
assert_eq!(folding_range.end_line, *end_line);
|
||||
assert_eq!(folding_range.end_character, None);
|
||||
}
|
||||
}
|
||||
}
|
42
crates/rust-analyzer/src/from_proto.rs
Normal file
42
crates/rust-analyzer/src/from_proto.rs
Normal file
|
@ -0,0 +1,42 @@
|
|||
//! Conversion lsp_types types to rust-analyzer specific ones.
|
||||
use ra_db::{FileId, FilePosition, FileRange};
|
||||
use ra_ide::{LineCol, LineIndex};
|
||||
use ra_syntax::{TextRange, TextSize};
|
||||
|
||||
use crate::{world::WorldSnapshot, Result};
|
||||
|
||||
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> TextSize {
|
||||
let line_col = LineCol { line: position.line as u32, col_utf16: position.character as u32 };
|
||||
line_index.offset(line_col)
|
||||
}
|
||||
|
||||
pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> TextRange {
|
||||
let start = offset(line_index, range.start);
|
||||
let end = offset(line_index, range.end);
|
||||
TextRange::new(start, end)
|
||||
}
|
||||
|
||||
pub(crate) fn file_id(world: &WorldSnapshot, url: &lsp_types::Url) -> Result<FileId> {
|
||||
world.uri_to_file_id(url)
|
||||
}
|
||||
|
||||
pub(crate) fn file_position(
|
||||
world: &WorldSnapshot,
|
||||
tdpp: lsp_types::TextDocumentPositionParams,
|
||||
) -> Result<FilePosition> {
|
||||
let file_id = file_id(world, &tdpp.text_document.uri)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let offset = offset(&*line_index, tdpp.position);
|
||||
Ok(FilePosition { file_id, offset })
|
||||
}
|
||||
|
||||
pub(crate) fn file_range(
|
||||
world: &WorldSnapshot,
|
||||
text_document_identifier: lsp_types::TextDocumentIdentifier,
|
||||
range: lsp_types::Range,
|
||||
) -> Result<FileRange> {
|
||||
let file_id = file_id(world, &text_document_identifier.uri)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let range = text_range(&line_index, range);
|
||||
Ok(FileRange { file_id, range })
|
||||
}
|
|
@ -20,9 +20,11 @@ macro_rules! eprintln {
|
|||
mod vfs_glob;
|
||||
mod caps;
|
||||
mod cargo_target_spec;
|
||||
mod conv;
|
||||
mod to_proto;
|
||||
mod from_proto;
|
||||
mod main_loop;
|
||||
mod markdown;
|
||||
// TODO: rename to lsp_ext
|
||||
pub mod req;
|
||||
pub mod config;
|
||||
mod world;
|
||||
|
|
|
@ -37,8 +37,8 @@ use threadpool::ThreadPool;
|
|||
|
||||
use crate::{
|
||||
config::{Config, FilesWatcher},
|
||||
conv::{ConvWith, TryConvWith},
|
||||
diagnostics::DiagnosticTask,
|
||||
from_proto,
|
||||
main_loop::{
|
||||
pending_requests::{PendingRequest, PendingRequests},
|
||||
subscriptions::Subscriptions,
|
||||
|
@ -584,7 +584,7 @@ fn on_notification(
|
|||
Ok(params) => {
|
||||
let DidChangeTextDocumentParams { text_document, content_changes } = params;
|
||||
let world = state.snapshot();
|
||||
let file_id = text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, &text_document.uri)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let uri = text_document.uri;
|
||||
let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?;
|
||||
|
@ -694,7 +694,7 @@ fn apply_document_changes(
|
|||
line_index = Cow::Owned(LineIndex::new(&old_text));
|
||||
}
|
||||
index_valid = IndexValid::UpToLineExclusive(range.start.line);
|
||||
let range = range.conv_with(&line_index);
|
||||
let range = from_proto::text_range(&line_index, range);
|
||||
let mut text = old_text.to_owned();
|
||||
match std::panic::catch_unwind(move || {
|
||||
text.replace_range(Range::<usize>::from(range), &change.text);
|
||||
|
|
|
@ -22,6 +22,7 @@ use ra_ide::{
|
|||
Assist, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, SearchScope,
|
||||
};
|
||||
use ra_prof::profile;
|
||||
use ra_project_model::TargetKind;
|
||||
use ra_syntax::{AstNode, SyntaxKind, TextRange, TextSize};
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
@ -31,18 +32,14 @@ use stdx::format_to;
|
|||
use crate::{
|
||||
cargo_target_spec::CargoTargetSpec,
|
||||
config::RustfmtConfig,
|
||||
conv::{
|
||||
to_call_hierarchy_item, to_location, Conv, ConvWith, FoldConvCtx, MapConvWith, TryConvWith,
|
||||
TryConvWithToVec,
|
||||
},
|
||||
diagnostics::DiagnosticTask,
|
||||
from_json,
|
||||
from_json, from_proto,
|
||||
req::{self, InlayHint, InlayHintsParams},
|
||||
semantic_tokens::SemanticTokensBuilder,
|
||||
to_proto,
|
||||
world::WorldSnapshot,
|
||||
LspError, Result,
|
||||
};
|
||||
use ra_project_model::TargetKind;
|
||||
|
||||
pub fn handle_analyzer_status(world: WorldSnapshot, _: ()) -> Result<String> {
|
||||
let _p = profile("handle_analyzer_status");
|
||||
|
@ -58,9 +55,9 @@ pub fn handle_analyzer_status(world: WorldSnapshot, _: ()) -> Result<String> {
|
|||
|
||||
pub fn handle_syntax_tree(world: WorldSnapshot, params: req::SyntaxTreeParams) -> Result<String> {
|
||||
let _p = profile("handle_syntax_tree");
|
||||
let id = params.text_document.try_conv_with(&world)?;
|
||||
let id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let line_index = world.analysis().file_line_index(id)?;
|
||||
let text_range = params.range.map(|p| p.conv_with(&line_index));
|
||||
let text_range = params.range.map(|r| from_proto::text_range(&line_index, r));
|
||||
let res = world.analysis().syntax_tree(id, text_range)?;
|
||||
Ok(res)
|
||||
}
|
||||
|
@ -70,9 +67,9 @@ pub fn handle_expand_macro(
|
|||
params: req::ExpandMacroParams,
|
||||
) -> Result<Option<req::ExpandedMacro>> {
|
||||
let _p = profile("handle_expand_macro");
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let offset = params.position.map(|p| p.conv_with(&line_index));
|
||||
let offset = params.position.map(|p| from_proto::offset(&line_index, p));
|
||||
|
||||
match offset {
|
||||
None => Ok(None),
|
||||
|
@ -88,16 +85,16 @@ pub fn handle_selection_range(
|
|||
params: req::SelectionRangeParams,
|
||||
) -> Result<Option<Vec<req::SelectionRange>>> {
|
||||
let _p = profile("handle_selection_range");
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let res: Result<Vec<req::SelectionRange>> = params
|
||||
.positions
|
||||
.into_iter()
|
||||
.map_conv_with(&line_index)
|
||||
.map(|position| {
|
||||
let offset = from_proto::offset(&line_index, position);
|
||||
let mut ranges = Vec::new();
|
||||
{
|
||||
let mut range = TextRange::new(position, position);
|
||||
let mut range = TextRange::new(offset, offset);
|
||||
loop {
|
||||
ranges.push(range);
|
||||
let frange = FileRange { file_id, range };
|
||||
|
@ -110,12 +107,12 @@ pub fn handle_selection_range(
|
|||
}
|
||||
}
|
||||
let mut range = req::SelectionRange {
|
||||
range: ranges.last().unwrap().conv_with(&line_index),
|
||||
range: to_proto::range(&line_index, *ranges.last().unwrap()),
|
||||
parent: None,
|
||||
};
|
||||
for r in ranges.iter().rev().skip(1) {
|
||||
for &r in ranges.iter().rev().skip(1) {
|
||||
range = req::SelectionRange {
|
||||
range: r.conv_with(&line_index),
|
||||
range: to_proto::range(&line_index, r),
|
||||
parent: Some(Box::new(range)),
|
||||
}
|
||||
}
|
||||
|
@ -131,22 +128,19 @@ pub fn handle_find_matching_brace(
|
|||
params: req::FindMatchingBraceParams,
|
||||
) -> Result<Vec<Position>> {
|
||||
let _p = profile("handle_find_matching_brace");
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let res = params
|
||||
.offsets
|
||||
.into_iter()
|
||||
.map_conv_with(&line_index)
|
||||
.map(|offset| {
|
||||
if let Ok(Some(matching_brace_offset)) =
|
||||
world.analysis().matching_brace(FilePosition { file_id, offset })
|
||||
{
|
||||
matching_brace_offset
|
||||
} else {
|
||||
offset
|
||||
}
|
||||
.map(|position| {
|
||||
let offset = from_proto::offset(&line_index, position);
|
||||
let offset = match world.analysis().matching_brace(FilePosition { file_id, offset }) {
|
||||
Ok(Some(matching_brace_offset)) => matching_brace_offset,
|
||||
Err(_) | Ok(None) => offset,
|
||||
};
|
||||
to_proto::position(&line_index, offset)
|
||||
})
|
||||
.map_conv_with(&line_index)
|
||||
.collect();
|
||||
Ok(res)
|
||||
}
|
||||
|
@ -156,8 +150,9 @@ pub fn handle_join_lines(
|
|||
params: req::JoinLinesParams,
|
||||
) -> Result<req::SourceChange> {
|
||||
let _p = profile("handle_join_lines");
|
||||
let frange = (¶ms.text_document, params.range).try_conv_with(&world)?;
|
||||
world.analysis().join_lines(frange)?.try_conv_with(&world)
|
||||
let frange = from_proto::file_range(&world, params.text_document, params.range)?;
|
||||
let source_change = world.analysis().join_lines(frange)?;
|
||||
to_proto::source_change(&world, source_change)
|
||||
}
|
||||
|
||||
pub fn handle_on_enter(
|
||||
|
@ -165,10 +160,10 @@ pub fn handle_on_enter(
|
|||
params: req::TextDocumentPositionParams,
|
||||
) -> Result<Option<req::SourceChange>> {
|
||||
let _p = profile("handle_on_enter");
|
||||
let position = params.try_conv_with(&world)?;
|
||||
let position = from_proto::file_position(&world, params)?;
|
||||
match world.analysis().on_enter(position)? {
|
||||
None => Ok(None),
|
||||
Some(edit) => Ok(Some(edit.try_conv_with(&world)?)),
|
||||
Some(source_change) => to_proto::source_change(&world, source_change).map(Some),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -178,7 +173,7 @@ pub fn handle_on_type_formatting(
|
|||
params: req::DocumentOnTypeFormattingParams,
|
||||
) -> Result<Option<Vec<TextEdit>>> {
|
||||
let _p = profile("handle_on_type_formatting");
|
||||
let mut position = params.text_document_position.try_conv_with(&world)?;
|
||||
let mut position = from_proto::file_position(&world, params.text_document_position)?;
|
||||
let line_index = world.analysis().file_line_index(position.file_id)?;
|
||||
let line_endings = world.file_line_endings(position.file_id);
|
||||
|
||||
|
@ -208,7 +203,7 @@ pub fn handle_on_type_formatting(
|
|||
// This should be a single-file edit
|
||||
let edit = edit.source_file_edits.pop().unwrap();
|
||||
|
||||
let change: Vec<TextEdit> = edit.edit.conv_with((&line_index, line_endings));
|
||||
let change = to_proto::text_edit_vec(&line_index, line_endings, edit.edit);
|
||||
Ok(Some(change))
|
||||
}
|
||||
|
||||
|
@ -217,9 +212,8 @@ pub fn handle_document_symbol(
|
|||
params: req::DocumentSymbolParams,
|
||||
) -> Result<Option<req::DocumentSymbolResponse>> {
|
||||
let _p = profile("handle_document_symbol");
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let url = file_id.try_conv_with(&world)?;
|
||||
|
||||
let mut parents: Vec<(DocumentSymbol, Option<usize>)> = Vec::new();
|
||||
|
||||
|
@ -227,10 +221,10 @@ pub fn handle_document_symbol(
|
|||
let doc_symbol = DocumentSymbol {
|
||||
name: symbol.label,
|
||||
detail: symbol.detail,
|
||||
kind: symbol.kind.conv(),
|
||||
kind: to_proto::symbol_kind(symbol.kind),
|
||||
deprecated: Some(symbol.deprecated),
|
||||
range: symbol.node_range.conv_with(&line_index),
|
||||
selection_range: symbol.navigation_range.conv_with(&line_index),
|
||||
range: to_proto::range(&line_index, symbol.node_range),
|
||||
selection_range: to_proto::range(&line_index, symbol.navigation_range),
|
||||
children: None,
|
||||
};
|
||||
parents.push((doc_symbol, symbol.parent));
|
||||
|
@ -249,34 +243,35 @@ pub fn handle_document_symbol(
|
|||
}
|
||||
}
|
||||
|
||||
if world.config.client_caps.hierarchical_symbols {
|
||||
Ok(Some(document_symbols.into()))
|
||||
let res = if world.config.client_caps.hierarchical_symbols {
|
||||
document_symbols.into()
|
||||
} else {
|
||||
let url = to_proto::url(&world, file_id)?;
|
||||
let mut symbol_information = Vec::<SymbolInformation>::new();
|
||||
for symbol in document_symbols {
|
||||
flatten_document_symbol(&symbol, None, &url, &mut symbol_information);
|
||||
}
|
||||
symbol_information.into()
|
||||
};
|
||||
return Ok(Some(res));
|
||||
|
||||
Ok(Some(symbol_information.into()))
|
||||
}
|
||||
}
|
||||
fn flatten_document_symbol(
|
||||
symbol: &DocumentSymbol,
|
||||
container_name: Option<String>,
|
||||
url: &Url,
|
||||
res: &mut Vec<SymbolInformation>,
|
||||
) {
|
||||
res.push(SymbolInformation {
|
||||
name: symbol.name.clone(),
|
||||
kind: symbol.kind,
|
||||
deprecated: symbol.deprecated,
|
||||
location: Location::new(url.clone(), symbol.range),
|
||||
container_name: container_name,
|
||||
});
|
||||
|
||||
fn flatten_document_symbol(
|
||||
symbol: &DocumentSymbol,
|
||||
container_name: Option<String>,
|
||||
url: &Url,
|
||||
res: &mut Vec<SymbolInformation>,
|
||||
) {
|
||||
res.push(SymbolInformation {
|
||||
name: symbol.name.clone(),
|
||||
kind: symbol.kind,
|
||||
deprecated: symbol.deprecated,
|
||||
location: Location::new(url.clone(), symbol.range),
|
||||
container_name: container_name,
|
||||
});
|
||||
|
||||
for child in symbol.children.iter().flatten() {
|
||||
flatten_document_symbol(child, Some(symbol.name.clone()), url, res);
|
||||
for child in symbol.children.iter().flatten() {
|
||||
flatten_document_symbol(child, Some(symbol.name.clone()), url, res);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -313,8 +308,8 @@ pub fn handle_workspace_symbol(
|
|||
for nav in world.analysis().symbol_search(query)? {
|
||||
let info = SymbolInformation {
|
||||
name: nav.name().to_string(),
|
||||
kind: nav.kind().conv(),
|
||||
location: nav.try_conv_with(world)?,
|
||||
kind: to_proto::symbol_kind(nav.kind()),
|
||||
location: to_proto::location(world, nav.file_range())?,
|
||||
container_name: nav.container_name().map(|v| v.to_string()),
|
||||
deprecated: None,
|
||||
};
|
||||
|
@ -329,12 +324,16 @@ pub fn handle_goto_definition(
|
|||
params: req::GotoDefinitionParams,
|
||||
) -> Result<Option<req::GotoDefinitionResponse>> {
|
||||
let _p = profile("handle_goto_definition");
|
||||
let position = params.text_document_position_params.try_conv_with(&world)?;
|
||||
let position = from_proto::file_position(&world, params.text_document_position_params)?;
|
||||
let nav_info = match world.analysis().goto_definition(position)? {
|
||||
None => return Ok(None),
|
||||
Some(it) => it,
|
||||
};
|
||||
let res = (position.file_id, nav_info).try_conv_with(&world)?;
|
||||
let res = to_proto::goto_definition_response(
|
||||
&world,
|
||||
FileRange { file_id: position.file_id, range: nav_info.range },
|
||||
nav_info.info,
|
||||
)?;
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
||||
|
@ -343,12 +342,16 @@ pub fn handle_goto_implementation(
|
|||
params: req::GotoImplementationParams,
|
||||
) -> Result<Option<req::GotoImplementationResponse>> {
|
||||
let _p = profile("handle_goto_implementation");
|
||||
let position = params.text_document_position_params.try_conv_with(&world)?;
|
||||
let position = from_proto::file_position(&world, params.text_document_position_params)?;
|
||||
let nav_info = match world.analysis().goto_implementation(position)? {
|
||||
None => return Ok(None),
|
||||
Some(it) => it,
|
||||
};
|
||||
let res = (position.file_id, nav_info).try_conv_with(&world)?;
|
||||
let res = to_proto::goto_definition_response(
|
||||
&world,
|
||||
FileRange { file_id: position.file_id, range: nav_info.range },
|
||||
nav_info.info,
|
||||
)?;
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
||||
|
@ -357,12 +360,16 @@ pub fn handle_goto_type_definition(
|
|||
params: req::GotoTypeDefinitionParams,
|
||||
) -> Result<Option<req::GotoTypeDefinitionResponse>> {
|
||||
let _p = profile("handle_goto_type_definition");
|
||||
let position = params.text_document_position_params.try_conv_with(&world)?;
|
||||
let position = from_proto::file_position(&world, params.text_document_position_params)?;
|
||||
let nav_info = match world.analysis().goto_type_definition(position)? {
|
||||
None => return Ok(None),
|
||||
Some(it) => it,
|
||||
};
|
||||
let res = (position.file_id, nav_info).try_conv_with(&world)?;
|
||||
let res = to_proto::goto_definition_response(
|
||||
&world,
|
||||
FileRange { file_id: position.file_id, range: nav_info.range },
|
||||
nav_info.info,
|
||||
)?;
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
||||
|
@ -371,8 +378,13 @@ pub fn handle_parent_module(
|
|||
params: req::TextDocumentPositionParams,
|
||||
) -> Result<Vec<Location>> {
|
||||
let _p = profile("handle_parent_module");
|
||||
let position = params.try_conv_with(&world)?;
|
||||
world.analysis().parent_module(position)?.iter().try_conv_with_to_vec(&world)
|
||||
let position = from_proto::file_position(&world, params)?;
|
||||
world
|
||||
.analysis()
|
||||
.parent_module(position)?
|
||||
.into_iter()
|
||||
.map(|it| to_proto::location(&world, it.file_range()))
|
||||
.collect::<Result<Vec<_>>>()
|
||||
}
|
||||
|
||||
pub fn handle_runnables(
|
||||
|
@ -380,9 +392,9 @@ pub fn handle_runnables(
|
|||
params: req::RunnablesParams,
|
||||
) -> Result<Vec<req::Runnable>> {
|
||||
let _p = profile("handle_runnables");
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let offset = params.position.map(|it| it.conv_with(&line_index));
|
||||
let offset = params.position.map(|it| from_proto::offset(&line_index, it));
|
||||
let mut res = Vec::new();
|
||||
let workspace_root = world.workspace_root_for(file_id);
|
||||
let cargo_spec = CargoTargetSpec::for_file(&world, file_id)?;
|
||||
|
@ -439,7 +451,7 @@ pub fn handle_completion(
|
|||
params: req::CompletionParams,
|
||||
) -> Result<Option<req::CompletionResponse>> {
|
||||
let _p = profile("handle_completion");
|
||||
let position = params.text_document_position.try_conv_with(&world)?;
|
||||
let position = from_proto::file_position(&world, params.text_document_position)?;
|
||||
let completion_triggered_after_single_colon = {
|
||||
let mut res = false;
|
||||
if let Some(ctx) = params.context {
|
||||
|
@ -468,8 +480,10 @@ pub fn handle_completion(
|
|||
};
|
||||
let line_index = world.analysis().file_line_index(position.file_id)?;
|
||||
let line_endings = world.file_line_endings(position.file_id);
|
||||
let items: Vec<CompletionItem> =
|
||||
items.into_iter().map(|item| item.conv_with((&line_index, line_endings))).collect();
|
||||
let items: Vec<CompletionItem> = items
|
||||
.into_iter()
|
||||
.map(|item| to_proto::completion_item(&line_index, line_endings, item))
|
||||
.collect();
|
||||
|
||||
Ok(Some(items.into()))
|
||||
}
|
||||
|
@ -479,17 +493,16 @@ pub fn handle_folding_range(
|
|||
params: FoldingRangeParams,
|
||||
) -> Result<Option<Vec<FoldingRange>>> {
|
||||
let _p = profile("handle_folding_range");
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let folds = world.analysis().folding_ranges(file_id)?;
|
||||
let text = world.analysis().file_text(file_id)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let ctx = FoldConvCtx {
|
||||
text: &text,
|
||||
line_index: &line_index,
|
||||
line_folding_only: world.config.client_caps.line_folding_only,
|
||||
};
|
||||
let res = Some(folds.into_iter().map_conv_with(&ctx).collect());
|
||||
Ok(res)
|
||||
let line_folding_only = world.config.client_caps.line_folding_only;
|
||||
let res = folds
|
||||
.into_iter()
|
||||
.map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it))
|
||||
.collect();
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
||||
pub fn handle_signature_help(
|
||||
|
@ -497,34 +510,34 @@ pub fn handle_signature_help(
|
|||
params: req::SignatureHelpParams,
|
||||
) -> Result<Option<req::SignatureHelp>> {
|
||||
let _p = profile("handle_signature_help");
|
||||
let position = params.text_document_position_params.try_conv_with(&world)?;
|
||||
if let Some(call_info) = world.analysis().call_info(position)? {
|
||||
let concise = !world.config.call_info_full;
|
||||
let mut active_parameter = call_info.active_parameter.map(|it| it as i64);
|
||||
if concise && call_info.signature.has_self_param {
|
||||
active_parameter = active_parameter.map(|it| it.saturating_sub(1));
|
||||
}
|
||||
let sig_info = call_info.signature.conv_with(concise);
|
||||
|
||||
Ok(Some(req::SignatureHelp {
|
||||
signatures: vec![sig_info],
|
||||
active_signature: Some(0),
|
||||
active_parameter,
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
let position = from_proto::file_position(&world, params.text_document_position_params)?;
|
||||
let call_info = match world.analysis().call_info(position)? {
|
||||
None => return Ok(None),
|
||||
Some(it) => it,
|
||||
};
|
||||
let concise = !world.config.call_info_full;
|
||||
let mut active_parameter = call_info.active_parameter.map(|it| it as i64);
|
||||
if concise && call_info.signature.has_self_param {
|
||||
active_parameter = active_parameter.map(|it| it.saturating_sub(1));
|
||||
}
|
||||
let sig_info = to_proto::signature_information(call_info.signature, concise);
|
||||
|
||||
Ok(Some(req::SignatureHelp {
|
||||
signatures: vec![sig_info],
|
||||
active_signature: Some(0),
|
||||
active_parameter,
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn handle_hover(world: WorldSnapshot, params: req::HoverParams) -> Result<Option<Hover>> {
|
||||
let _p = profile("handle_hover");
|
||||
let position = params.text_document_position_params.try_conv_with(&world)?;
|
||||
let position = from_proto::file_position(&world, params.text_document_position_params)?;
|
||||
let info = match world.analysis().hover(position)? {
|
||||
None => return Ok(None),
|
||||
Some(info) => info,
|
||||
};
|
||||
let line_index = world.analysis.file_line_index(position.file_id)?;
|
||||
let range = info.range.conv_with(&line_index);
|
||||
let range = to_proto::range(&line_index, info.range);
|
||||
let res = Hover {
|
||||
contents: HoverContents::Markup(MarkupContent {
|
||||
kind: MarkupKind::Markdown,
|
||||
|
@ -540,7 +553,7 @@ pub fn handle_prepare_rename(
|
|||
params: req::TextDocumentPositionParams,
|
||||
) -> Result<Option<PrepareRenameResponse>> {
|
||||
let _p = profile("handle_prepare_rename");
|
||||
let position = params.try_conv_with(&world)?;
|
||||
let position = from_proto::file_position(&world, params)?;
|
||||
|
||||
let optional_change = world.analysis().rename(position, "dummy")?;
|
||||
let range = match optional_change {
|
||||
|
@ -548,15 +561,14 @@ pub fn handle_prepare_rename(
|
|||
Some(it) => it.range,
|
||||
};
|
||||
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let range = range.conv_with(&line_index);
|
||||
let line_index = world.analysis().file_line_index(position.file_id)?;
|
||||
let range = to_proto::range(&line_index, range);
|
||||
Ok(Some(PrepareRenameResponse::Range(range)))
|
||||
}
|
||||
|
||||
pub fn handle_rename(world: WorldSnapshot, params: RenameParams) -> Result<Option<WorkspaceEdit>> {
|
||||
let _p = profile("handle_rename");
|
||||
let position = params.text_document_position.try_conv_with(&world)?;
|
||||
let position = from_proto::file_position(&world, params.text_document_position)?;
|
||||
|
||||
if params.new_name.is_empty() {
|
||||
return Err(LspError::new(
|
||||
|
@ -567,14 +579,13 @@ pub fn handle_rename(world: WorldSnapshot, params: RenameParams) -> Result<Optio
|
|||
}
|
||||
|
||||
let optional_change = world.analysis().rename(position, &*params.new_name)?;
|
||||
let change = match optional_change {
|
||||
let source_change = match optional_change {
|
||||
None => return Ok(None),
|
||||
Some(it) => it.info,
|
||||
};
|
||||
|
||||
let source_change_req = change.try_conv_with(&world)?;
|
||||
|
||||
Ok(Some(source_change_req.workspace_edit))
|
||||
let source_change = to_proto::source_change(&world, source_change)?;
|
||||
Ok(Some(source_change.workspace_edit))
|
||||
}
|
||||
|
||||
pub fn handle_references(
|
||||
|
@ -582,7 +593,7 @@ pub fn handle_references(
|
|||
params: req::ReferenceParams,
|
||||
) -> Result<Option<Vec<Location>>> {
|
||||
let _p = profile("handle_references");
|
||||
let position = params.text_document_position.try_conv_with(&world)?;
|
||||
let position = from_proto::file_position(&world, params.text_document_position)?;
|
||||
|
||||
let refs = match world.analysis().find_all_refs(position, None)? {
|
||||
None => return Ok(None),
|
||||
|
@ -591,33 +602,13 @@ pub fn handle_references(
|
|||
|
||||
let locations = if params.context.include_declaration {
|
||||
refs.into_iter()
|
||||
.filter_map(|reference| {
|
||||
let line_index =
|
||||
world.analysis().file_line_index(reference.file_range.file_id).ok()?;
|
||||
to_location(
|
||||
reference.file_range.file_id,
|
||||
reference.file_range.range,
|
||||
&world,
|
||||
&line_index,
|
||||
)
|
||||
.ok()
|
||||
})
|
||||
.filter_map(|reference| to_proto::location(&world, reference.file_range).ok())
|
||||
.collect()
|
||||
} else {
|
||||
// Only iterate over the references if include_declaration was false
|
||||
refs.references()
|
||||
.iter()
|
||||
.filter_map(|reference| {
|
||||
let line_index =
|
||||
world.analysis().file_line_index(reference.file_range.file_id).ok()?;
|
||||
to_location(
|
||||
reference.file_range.file_id,
|
||||
reference.file_range.range,
|
||||
&world,
|
||||
&line_index,
|
||||
)
|
||||
.ok()
|
||||
})
|
||||
.filter_map(|reference| to_proto::location(&world, reference.file_range).ok())
|
||||
.collect()
|
||||
};
|
||||
|
||||
|
@ -629,12 +620,12 @@ pub fn handle_formatting(
|
|||
params: DocumentFormattingParams,
|
||||
) -> Result<Option<Vec<TextEdit>>> {
|
||||
let _p = profile("handle_formatting");
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let file = world.analysis().file_text(file_id)?;
|
||||
let crate_ids = world.analysis().crate_for(file_id)?;
|
||||
|
||||
let file_line_index = world.analysis().file_line_index(file_id)?;
|
||||
let end_position = TextSize::of(file.as_str()).conv_with(&file_line_index);
|
||||
let end_position = to_proto::position(&file_line_index, TextSize::of(file.as_str()));
|
||||
|
||||
let mut rustfmt = match &world.config.rustfmt {
|
||||
RustfmtConfig::Rustfmt { extra_args } => {
|
||||
|
@ -700,33 +691,14 @@ pub fn handle_formatting(
|
|||
}]))
|
||||
}
|
||||
|
||||
fn create_single_code_action(assist: Assist, world: &WorldSnapshot) -> Result<CodeAction> {
|
||||
let arg = to_value(assist.source_change.try_conv_with(world)?)?;
|
||||
let title = assist.label;
|
||||
let command = Command {
|
||||
title: title.clone(),
|
||||
command: "rust-analyzer.applySourceChange".to_string(),
|
||||
arguments: Some(vec![arg]),
|
||||
};
|
||||
|
||||
Ok(CodeAction {
|
||||
title,
|
||||
kind: Some(String::new()),
|
||||
diagnostics: None,
|
||||
edit: None,
|
||||
command: Some(command),
|
||||
is_preferred: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn handle_code_action(
|
||||
world: WorldSnapshot,
|
||||
params: req::CodeActionParams,
|
||||
) -> Result<Option<CodeActionResponse>> {
|
||||
let _p = profile("handle_code_action");
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let range = params.range.conv_with(&line_index);
|
||||
let range = from_proto::text_range(&line_index, params.range);
|
||||
|
||||
let diagnostics = world.analysis().diagnostics(file_id)?;
|
||||
let mut res = CodeActionResponse::default();
|
||||
|
@ -739,7 +711,7 @@ pub fn handle_code_action(
|
|||
|
||||
for source_edit in fixes_from_diagnostics {
|
||||
let title = source_edit.label.clone();
|
||||
let edit = source_edit.try_conv_with(&world)?;
|
||||
let edit = to_proto::source_change(&world, source_edit)?;
|
||||
|
||||
let command = Command {
|
||||
title,
|
||||
|
@ -758,7 +730,7 @@ pub fn handle_code_action(
|
|||
}
|
||||
|
||||
for fix in world.check_fixes.get(&file_id).into_iter().flatten() {
|
||||
let fix_range = fix.range.conv_with(&line_index);
|
||||
let fix_range = from_proto::text_range(&line_index, fix.range);
|
||||
if fix_range.intersect(range).is_none() {
|
||||
continue;
|
||||
}
|
||||
|
@ -779,21 +751,21 @@ pub fn handle_code_action(
|
|||
.1
|
||||
.push(assist),
|
||||
None => {
|
||||
res.push(create_single_code_action(assist, &world)?.into());
|
||||
res.push(to_proto::code_action(&world, assist)?.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (group_label, (idx, assists)) in grouped_assists {
|
||||
if assists.len() == 1 {
|
||||
res[idx] =
|
||||
create_single_code_action(assists.into_iter().next().unwrap(), &world)?.into();
|
||||
res[idx] = to_proto::code_action(&world, assists.into_iter().next().unwrap())?.into();
|
||||
} else {
|
||||
let title = group_label;
|
||||
|
||||
let mut arguments = Vec::with_capacity(assists.len());
|
||||
for assist in assists {
|
||||
arguments.push(to_value(assist.source_change.try_conv_with(&world)?)?);
|
||||
let source_change = to_proto::source_change(&world, assist.source_change)?;
|
||||
arguments.push(to_value(source_change)?);
|
||||
}
|
||||
|
||||
let command = Some(Command {
|
||||
|
@ -838,7 +810,7 @@ pub fn handle_code_lens(
|
|||
params: req::CodeLensParams,
|
||||
) -> Result<Option<Vec<CodeLens>>> {
|
||||
let _p = profile("handle_code_lens");
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
|
||||
let mut lenses: Vec<CodeLens> = Default::default();
|
||||
|
@ -902,7 +874,7 @@ pub fn handle_code_lens(
|
|||
_ => false,
|
||||
})
|
||||
.map(|it| {
|
||||
let range = it.node_range.conv_with(&line_index);
|
||||
let range = to_proto::range(&line_index, it.node_range);
|
||||
let pos = range.start;
|
||||
let lens_params = req::GotoImplementationParams {
|
||||
text_document_position_params: req::TextDocumentPositionParams::new(
|
||||
|
@ -979,34 +951,33 @@ pub fn handle_document_highlight(
|
|||
params: req::DocumentHighlightParams,
|
||||
) -> Result<Option<Vec<DocumentHighlight>>> {
|
||||
let _p = profile("handle_document_highlight");
|
||||
let file_id = params.text_document_position_params.text_document.try_conv_with(&world)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let position = from_proto::file_position(&world, params.text_document_position_params)?;
|
||||
let line_index = world.analysis().file_line_index(position.file_id)?;
|
||||
|
||||
let refs = match world.analysis().find_all_refs(
|
||||
params.text_document_position_params.try_conv_with(&world)?,
|
||||
Some(SearchScope::single_file(file_id)),
|
||||
)? {
|
||||
let refs = match world
|
||||
.analysis()
|
||||
.find_all_refs(position, Some(SearchScope::single_file(position.file_id)))?
|
||||
{
|
||||
None => return Ok(None),
|
||||
Some(refs) => refs,
|
||||
};
|
||||
|
||||
Ok(Some(
|
||||
refs.into_iter()
|
||||
.filter(|reference| reference.file_range.file_id == file_id)
|
||||
.map(|reference| DocumentHighlight {
|
||||
range: reference.file_range.range.conv_with(&line_index),
|
||||
kind: reference.access.map(|it| it.conv()),
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
let res = refs
|
||||
.into_iter()
|
||||
.filter(|reference| reference.file_range.file_id == position.file_id)
|
||||
.map(|reference| DocumentHighlight {
|
||||
range: to_proto::range(&line_index, reference.file_range.range),
|
||||
kind: reference.access.map(to_proto::document_highlight_kind),
|
||||
})
|
||||
.collect();
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
||||
pub fn handle_ssr(world: WorldSnapshot, params: req::SsrParams) -> Result<req::SourceChange> {
|
||||
let _p = profile("handle_ssr");
|
||||
world
|
||||
.analysis()
|
||||
.structural_search_replace(¶ms.query, params.parse_only)??
|
||||
.try_conv_with(&world)
|
||||
let source_change =
|
||||
world.analysis().structural_search_replace(¶ms.query, params.parse_only)??;
|
||||
to_proto::source_change(&world, source_change)
|
||||
}
|
||||
|
||||
pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<DiagnosticTask> {
|
||||
|
@ -1017,8 +988,8 @@ pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<Dia
|
|||
.diagnostics(file_id)?
|
||||
.into_iter()
|
||||
.map(|d| Diagnostic {
|
||||
range: d.range.conv_with(&line_index),
|
||||
severity: Some(d.severity.conv()),
|
||||
range: to_proto::range(&line_index, d.range),
|
||||
severity: Some(to_proto::diagnostic_severity(d.severity)),
|
||||
code: None,
|
||||
source: Some("rust-analyzer".to_string()),
|
||||
message: d.message,
|
||||
|
@ -1045,7 +1016,7 @@ fn to_lsp_runnable(
|
|||
RunnableKind::Bin => "run binary".to_string(),
|
||||
};
|
||||
Ok(req::Runnable {
|
||||
range: runnable.range.conv_with(&line_index),
|
||||
range: to_proto::range(&line_index, runnable.range),
|
||||
label,
|
||||
bin: "cargo".to_string(),
|
||||
args,
|
||||
|
@ -1064,13 +1035,13 @@ pub fn handle_inlay_hints(
|
|||
params: InlayHintsParams,
|
||||
) -> Result<Vec<InlayHint>> {
|
||||
let _p = profile("handle_inlay_hints");
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let analysis = world.analysis();
|
||||
let line_index = analysis.file_line_index(file_id)?;
|
||||
Ok(analysis
|
||||
.inlay_hints(file_id, &world.config.inlay_hints)?
|
||||
.into_iter()
|
||||
.map_conv_with(&line_index)
|
||||
.map(|it| to_proto::inlay_int(&line_index, it))
|
||||
.collect())
|
||||
}
|
||||
|
||||
|
@ -1079,21 +1050,19 @@ pub fn handle_call_hierarchy_prepare(
|
|||
params: CallHierarchyPrepareParams,
|
||||
) -> Result<Option<Vec<CallHierarchyItem>>> {
|
||||
let _p = profile("handle_call_hierarchy_prepare");
|
||||
let position = params.text_document_position_params.try_conv_with(&world)?;
|
||||
let file_id = position.file_id;
|
||||
let position = from_proto::file_position(&world, params.text_document_position_params)?;
|
||||
|
||||
let nav_info = match world.analysis().call_hierarchy(position)? {
|
||||
None => return Ok(None),
|
||||
Some(it) => it,
|
||||
};
|
||||
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let RangeInfo { range, info: navs } = nav_info;
|
||||
let RangeInfo { range: _, info: navs } = nav_info;
|
||||
let res = navs
|
||||
.into_iter()
|
||||
.filter(|it| it.kind() == SyntaxKind::FN_DEF)
|
||||
.filter_map(|it| to_call_hierarchy_item(file_id, range, &world, &line_index, it).ok())
|
||||
.collect();
|
||||
.map(|it| to_proto::call_hierarchy_item(&world, it))
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
@ -1106,7 +1075,7 @@ pub fn handle_call_hierarchy_incoming(
|
|||
let item = params.item;
|
||||
|
||||
let doc = TextDocumentIdentifier::new(item.uri);
|
||||
let frange: FileRange = (&doc, item.range).try_conv_with(&world)?;
|
||||
let frange = from_proto::file_range(&world, doc, item.range)?;
|
||||
let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
|
||||
|
||||
let call_items = match world.analysis().incoming_calls(fpos)? {
|
||||
|
@ -1119,11 +1088,14 @@ pub fn handle_call_hierarchy_incoming(
|
|||
for call_item in call_items.into_iter() {
|
||||
let file_id = call_item.target.file_id();
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let range = call_item.target.range();
|
||||
let item = to_call_hierarchy_item(file_id, range, &world, &line_index, call_item.target)?;
|
||||
let item = to_proto::call_hierarchy_item(&world, call_item.target)?;
|
||||
res.push(CallHierarchyIncomingCall {
|
||||
from: item,
|
||||
from_ranges: call_item.ranges.iter().map(|it| it.conv_with(&line_index)).collect(),
|
||||
from_ranges: call_item
|
||||
.ranges
|
||||
.into_iter()
|
||||
.map(|it| to_proto::range(&line_index, it))
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1138,7 +1110,7 @@ pub fn handle_call_hierarchy_outgoing(
|
|||
let item = params.item;
|
||||
|
||||
let doc = TextDocumentIdentifier::new(item.uri);
|
||||
let frange: FileRange = (&doc, item.range).try_conv_with(&world)?;
|
||||
let frange = from_proto::file_range(&world, doc, item.range)?;
|
||||
let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
|
||||
|
||||
let call_items = match world.analysis().outgoing_calls(fpos)? {
|
||||
|
@ -1151,11 +1123,14 @@ pub fn handle_call_hierarchy_outgoing(
|
|||
for call_item in call_items.into_iter() {
|
||||
let file_id = call_item.target.file_id();
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
let range = call_item.target.range();
|
||||
let item = to_call_hierarchy_item(file_id, range, &world, &line_index, call_item.target)?;
|
||||
let item = to_proto::call_hierarchy_item(&world, call_item.target)?;
|
||||
res.push(CallHierarchyOutgoingCall {
|
||||
to: item,
|
||||
from_ranges: call_item.ranges.iter().map(|it| it.conv_with(&line_index)).collect(),
|
||||
from_ranges: call_item
|
||||
.ranges
|
||||
.into_iter()
|
||||
.map(|it| to_proto::range(&line_index, it))
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1168,19 +1143,20 @@ pub fn handle_semantic_tokens(
|
|||
) -> Result<Option<SemanticTokensResult>> {
|
||||
let _p = profile("handle_semantic_tokens");
|
||||
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let file_id = from_proto::file_id(&world, ¶ms.text_document.uri)?;
|
||||
let text = world.analysis().file_text(file_id)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
|
||||
let mut builder = SemanticTokensBuilder::default();
|
||||
|
||||
for highlight_range in world.analysis().highlight(file_id)?.into_iter() {
|
||||
let (token_index, modifier_bitset) = highlight_range.highlight.conv();
|
||||
let (token_index, modifier_bitset) =
|
||||
to_proto::token_type_index_modifiers_bitself(highlight_range.highlight);
|
||||
for mut range in line_index.lines(highlight_range.range) {
|
||||
if text[range].ends_with('\n') {
|
||||
range = TextRange::new(range.start(), range.end() - TextSize::of('\n'));
|
||||
}
|
||||
let range = range.conv_with(&line_index);
|
||||
let range = to_proto::range(&line_index, range);
|
||||
builder.push(range, token_index, modifier_bitset);
|
||||
}
|
||||
}
|
||||
|
@ -1196,14 +1172,16 @@ pub fn handle_semantic_tokens_range(
|
|||
) -> Result<Option<SemanticTokensRangeResult>> {
|
||||
let _p = profile("handle_semantic_tokens_range");
|
||||
|
||||
let frange = (¶ms.text_document, params.range).try_conv_with(&world)?;
|
||||
let frange = from_proto::file_range(&world, params.text_document, params.range)?;
|
||||
let line_index = world.analysis().file_line_index(frange.file_id)?;
|
||||
|
||||
let mut builder = SemanticTokensBuilder::default();
|
||||
|
||||
for highlight_range in world.analysis().highlight_range(frange)?.into_iter() {
|
||||
let (token_type, token_modifiers) = highlight_range.highlight.conv();
|
||||
builder.push(highlight_range.range.conv_with(&line_index), token_type, token_modifiers);
|
||||
let (token_type, token_modifiers) =
|
||||
to_proto::token_type_index_modifiers_bitself(highlight_range.highlight);
|
||||
let range = to_proto::range(&line_index, highlight_range.range);
|
||||
builder.push(range, token_type, token_modifiers);
|
||||
}
|
||||
|
||||
let tokens = builder.build();
|
||||
|
|
566
crates/rust-analyzer/src/to_proto.rs
Normal file
566
crates/rust-analyzer/src/to_proto.rs
Normal file
|
@ -0,0 +1,566 @@
|
|||
//! Conversion of rust-analyzer specific types to lsp_types equivalents.
|
||||
use ra_db::{FileId, FileRange};
|
||||
use ra_ide::{
|
||||
translate_offset_with_edit, Assist, CompletionItem, CompletionItemKind, Documentation,
|
||||
FileSystemEdit, Fold, FoldKind, FunctionSignature, Highlight, HighlightModifier, HighlightTag,
|
||||
InlayHint, InlayKind, InsertTextFormat, LineIndex, NavigationTarget, ReferenceAccess, Severity,
|
||||
SourceChange, SourceFileEdit,
|
||||
};
|
||||
use ra_syntax::{SyntaxKind, TextRange, TextSize};
|
||||
use ra_text_edit::{Indel, TextEdit};
|
||||
use ra_vfs::LineEndings;
|
||||
|
||||
use crate::{req, semantic_tokens, world::WorldSnapshot, Result};
|
||||
|
||||
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
|
||||
let line_col = line_index.line_col(offset);
|
||||
let line = u64::from(line_col.line);
|
||||
let character = u64::from(line_col.col_utf16);
|
||||
lsp_types::Position::new(line, character)
|
||||
}
|
||||
|
||||
pub(crate) fn range(line_index: &LineIndex, range: TextRange) -> lsp_types::Range {
|
||||
let start = position(line_index, range.start());
|
||||
let end = position(line_index, range.end());
|
||||
lsp_types::Range::new(start, end)
|
||||
}
|
||||
|
||||
pub(crate) fn symbol_kind(syntax_kind: SyntaxKind) -> lsp_types::SymbolKind {
|
||||
match syntax_kind {
|
||||
SyntaxKind::FN_DEF => lsp_types::SymbolKind::Function,
|
||||
SyntaxKind::STRUCT_DEF => lsp_types::SymbolKind::Struct,
|
||||
SyntaxKind::ENUM_DEF => lsp_types::SymbolKind::Enum,
|
||||
SyntaxKind::ENUM_VARIANT => lsp_types::SymbolKind::EnumMember,
|
||||
SyntaxKind::TRAIT_DEF => lsp_types::SymbolKind::Interface,
|
||||
SyntaxKind::MACRO_CALL => lsp_types::SymbolKind::Function,
|
||||
SyntaxKind::MODULE => lsp_types::SymbolKind::Module,
|
||||
SyntaxKind::TYPE_ALIAS_DEF => lsp_types::SymbolKind::TypeParameter,
|
||||
SyntaxKind::RECORD_FIELD_DEF => lsp_types::SymbolKind::Field,
|
||||
SyntaxKind::STATIC_DEF => lsp_types::SymbolKind::Constant,
|
||||
SyntaxKind::CONST_DEF => lsp_types::SymbolKind::Constant,
|
||||
SyntaxKind::IMPL_DEF => lsp_types::SymbolKind::Object,
|
||||
_ => lsp_types::SymbolKind::Variable,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn document_highlight_kind(
|
||||
reference_access: ReferenceAccess,
|
||||
) -> lsp_types::DocumentHighlightKind {
|
||||
match reference_access {
|
||||
ReferenceAccess::Read => lsp_types::DocumentHighlightKind::Read,
|
||||
ReferenceAccess::Write => lsp_types::DocumentHighlightKind::Write,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSeverity {
|
||||
match severity {
|
||||
Severity::Error => lsp_types::DiagnosticSeverity::Error,
|
||||
Severity::WeakWarning => lsp_types::DiagnosticSeverity::Hint,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation {
|
||||
let value = crate::markdown::format_docs(documentation.as_str());
|
||||
let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value };
|
||||
lsp_types::Documentation::MarkupContent(markup_content)
|
||||
}
|
||||
|
||||
pub(crate) fn insert_text_format(
|
||||
insert_text_format: InsertTextFormat,
|
||||
) -> lsp_types::InsertTextFormat {
|
||||
match insert_text_format {
|
||||
InsertTextFormat::Snippet => lsp_types::InsertTextFormat::Snippet,
|
||||
InsertTextFormat::PlainText => lsp_types::InsertTextFormat::PlainText,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn completion_item_kind(
|
||||
completion_item_kind: CompletionItemKind,
|
||||
) -> lsp_types::CompletionItemKind {
|
||||
match completion_item_kind {
|
||||
CompletionItemKind::Keyword => lsp_types::CompletionItemKind::Keyword,
|
||||
CompletionItemKind::Snippet => lsp_types::CompletionItemKind::Snippet,
|
||||
CompletionItemKind::Module => lsp_types::CompletionItemKind::Module,
|
||||
CompletionItemKind::Function => lsp_types::CompletionItemKind::Function,
|
||||
CompletionItemKind::Struct => lsp_types::CompletionItemKind::Struct,
|
||||
CompletionItemKind::Enum => lsp_types::CompletionItemKind::Enum,
|
||||
CompletionItemKind::EnumVariant => lsp_types::CompletionItemKind::EnumMember,
|
||||
CompletionItemKind::BuiltinType => lsp_types::CompletionItemKind::Struct,
|
||||
CompletionItemKind::Binding => lsp_types::CompletionItemKind::Variable,
|
||||
CompletionItemKind::Field => lsp_types::CompletionItemKind::Field,
|
||||
CompletionItemKind::Trait => lsp_types::CompletionItemKind::Interface,
|
||||
CompletionItemKind::TypeAlias => lsp_types::CompletionItemKind::Struct,
|
||||
CompletionItemKind::Const => lsp_types::CompletionItemKind::Constant,
|
||||
CompletionItemKind::Static => lsp_types::CompletionItemKind::Value,
|
||||
CompletionItemKind::Method => lsp_types::CompletionItemKind::Method,
|
||||
CompletionItemKind::TypeParam => lsp_types::CompletionItemKind::TypeParameter,
|
||||
CompletionItemKind::Macro => lsp_types::CompletionItemKind::Method,
|
||||
CompletionItemKind::Attribute => lsp_types::CompletionItemKind::EnumMember,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn text_edit(
|
||||
line_index: &LineIndex,
|
||||
line_endings: LineEndings,
|
||||
indel: Indel,
|
||||
) -> lsp_types::TextEdit {
|
||||
let range = range(line_index, indel.delete);
|
||||
let new_text = match line_endings {
|
||||
LineEndings::Unix => indel.insert,
|
||||
LineEndings::Dos => indel.insert.replace('\n', "\r\n"),
|
||||
};
|
||||
lsp_types::TextEdit { range, new_text }
|
||||
}
|
||||
|
||||
pub(crate) fn text_edit_vec(
|
||||
line_index: &LineIndex,
|
||||
line_endings: LineEndings,
|
||||
text_edit: TextEdit,
|
||||
) -> Vec<lsp_types::TextEdit> {
|
||||
text_edit
|
||||
.as_indels()
|
||||
.iter()
|
||||
.map(|it| self::text_edit(line_index, line_endings, it.clone()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(crate) fn completion_item(
|
||||
line_index: &LineIndex,
|
||||
line_endings: LineEndings,
|
||||
completion_item: CompletionItem,
|
||||
) -> lsp_types::CompletionItem {
|
||||
let mut additional_text_edits = Vec::new();
|
||||
let mut text_edit = None;
|
||||
// LSP does not allow arbitrary edits in completion, so we have to do a
|
||||
// non-trivial mapping here.
|
||||
let source_range = completion_item.source_range();
|
||||
for indel in completion_item.text_edit().as_indels() {
|
||||
if indel.delete.contains_range(source_range) {
|
||||
text_edit = Some(if indel.delete == source_range {
|
||||
self::text_edit(line_index, line_endings, indel.clone())
|
||||
} else {
|
||||
assert!(source_range.end() == indel.delete.end());
|
||||
let range1 = TextRange::new(indel.delete.start(), source_range.start());
|
||||
let range2 = source_range;
|
||||
let indel1 = Indel::replace(range1, String::new());
|
||||
let indel2 = Indel::replace(range2, indel.insert.clone());
|
||||
additional_text_edits.push(self::text_edit(line_index, line_endings, indel1));
|
||||
self::text_edit(line_index, line_endings, indel2)
|
||||
})
|
||||
} else {
|
||||
assert!(source_range.intersect(indel.delete).is_none());
|
||||
let text_edit = self::text_edit(line_index, line_endings, indel.clone());
|
||||
additional_text_edits.push(text_edit);
|
||||
}
|
||||
}
|
||||
let text_edit = text_edit.unwrap();
|
||||
|
||||
let mut res = lsp_types::CompletionItem {
|
||||
label: completion_item.label().to_string(),
|
||||
detail: completion_item.detail().map(|it| it.to_string()),
|
||||
filter_text: Some(completion_item.lookup().to_string()),
|
||||
kind: completion_item.kind().map(completion_item_kind),
|
||||
text_edit: Some(text_edit.into()),
|
||||
additional_text_edits: Some(additional_text_edits),
|
||||
documentation: completion_item.documentation().map(documentation),
|
||||
deprecated: Some(completion_item.deprecated()),
|
||||
command: if completion_item.trigger_call_info() {
|
||||
let cmd = lsp_types::Command {
|
||||
title: "triggerParameterHints".into(),
|
||||
command: "editor.action.triggerParameterHints".into(),
|
||||
arguments: None,
|
||||
};
|
||||
Some(cmd)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if completion_item.score().is_some() {
|
||||
res.preselect = Some(true)
|
||||
}
|
||||
|
||||
if completion_item.deprecated() {
|
||||
res.tags = Some(vec![lsp_types::CompletionItemTag::Deprecated])
|
||||
}
|
||||
|
||||
res.insert_text_format = Some(insert_text_format(completion_item.insert_text_format()));
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn signature_information(
|
||||
signature: FunctionSignature,
|
||||
concise: bool,
|
||||
) -> lsp_types::SignatureInformation {
|
||||
let (label, documentation, params) = if concise {
|
||||
let mut params = signature.parameters;
|
||||
if signature.has_self_param {
|
||||
params.remove(0);
|
||||
}
|
||||
(params.join(", "), None, params)
|
||||
} else {
|
||||
(signature.to_string(), signature.doc.map(documentation), signature.parameters)
|
||||
};
|
||||
|
||||
let parameters: Vec<lsp_types::ParameterInformation> = params
|
||||
.into_iter()
|
||||
.map(|param| lsp_types::ParameterInformation {
|
||||
label: lsp_types::ParameterLabel::Simple(param),
|
||||
documentation: None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
lsp_types::SignatureInformation { label, documentation, parameters: Some(parameters) }
|
||||
}
|
||||
|
||||
pub(crate) fn inlay_int(line_index: &LineIndex, inlay_hint: InlayHint) -> req::InlayHint {
|
||||
req::InlayHint {
|
||||
label: inlay_hint.label.to_string(),
|
||||
range: range(line_index, inlay_hint.range),
|
||||
kind: match inlay_hint.kind {
|
||||
InlayKind::ParameterHint => req::InlayKind::ParameterHint,
|
||||
InlayKind::TypeHint => req::InlayKind::TypeHint,
|
||||
InlayKind::ChainingHint => req::InlayKind::ChainingHint,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: this is wrong
|
||||
pub(crate) fn token_type_index_modifiers_bitself(highlight: Highlight) -> (u32, u32) {
|
||||
let mut mods = semantic_tokens::ModifierSet::default();
|
||||
let type_ = match highlight.tag {
|
||||
HighlightTag::Struct => lsp_types::SemanticTokenType::STRUCT,
|
||||
HighlightTag::Enum => lsp_types::SemanticTokenType::ENUM,
|
||||
HighlightTag::Union => semantic_tokens::UNION,
|
||||
HighlightTag::TypeAlias => semantic_tokens::TYPE_ALIAS,
|
||||
HighlightTag::Trait => lsp_types::SemanticTokenType::INTERFACE,
|
||||
HighlightTag::BuiltinType => semantic_tokens::BUILTIN_TYPE,
|
||||
HighlightTag::SelfType => lsp_types::SemanticTokenType::TYPE,
|
||||
HighlightTag::Field => lsp_types::SemanticTokenType::MEMBER,
|
||||
HighlightTag::Function => lsp_types::SemanticTokenType::FUNCTION,
|
||||
HighlightTag::Module => lsp_types::SemanticTokenType::NAMESPACE,
|
||||
HighlightTag::Constant => {
|
||||
mods |= semantic_tokens::CONSTANT;
|
||||
mods |= lsp_types::SemanticTokenModifier::STATIC;
|
||||
lsp_types::SemanticTokenType::VARIABLE
|
||||
}
|
||||
HighlightTag::Static => {
|
||||
mods |= lsp_types::SemanticTokenModifier::STATIC;
|
||||
lsp_types::SemanticTokenType::VARIABLE
|
||||
}
|
||||
HighlightTag::EnumVariant => semantic_tokens::ENUM_MEMBER,
|
||||
HighlightTag::Macro => lsp_types::SemanticTokenType::MACRO,
|
||||
HighlightTag::Local => lsp_types::SemanticTokenType::VARIABLE,
|
||||
HighlightTag::TypeParam => lsp_types::SemanticTokenType::TYPE_PARAMETER,
|
||||
HighlightTag::Lifetime => semantic_tokens::LIFETIME,
|
||||
HighlightTag::ByteLiteral | HighlightTag::NumericLiteral => {
|
||||
lsp_types::SemanticTokenType::NUMBER
|
||||
}
|
||||
HighlightTag::CharLiteral | HighlightTag::StringLiteral => {
|
||||
lsp_types::SemanticTokenType::STRING
|
||||
}
|
||||
HighlightTag::Comment => lsp_types::SemanticTokenType::COMMENT,
|
||||
HighlightTag::Attribute => semantic_tokens::ATTRIBUTE,
|
||||
HighlightTag::Keyword => lsp_types::SemanticTokenType::KEYWORD,
|
||||
HighlightTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE,
|
||||
HighlightTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER,
|
||||
};
|
||||
|
||||
for modifier in highlight.modifiers.iter() {
|
||||
let modifier = match modifier {
|
||||
HighlightModifier::Definition => lsp_types::SemanticTokenModifier::DECLARATION,
|
||||
HighlightModifier::ControlFlow => semantic_tokens::CONTROL_FLOW,
|
||||
HighlightModifier::Mutable => semantic_tokens::MUTABLE,
|
||||
HighlightModifier::Unsafe => semantic_tokens::UNSAFE,
|
||||
};
|
||||
mods |= modifier;
|
||||
}
|
||||
|
||||
(semantic_tokens::type_index(type_), mods.0)
|
||||
}
|
||||
|
||||
pub(crate) fn folding_range(
|
||||
text: &str,
|
||||
line_index: &LineIndex,
|
||||
line_folding_only: bool,
|
||||
fold: Fold,
|
||||
) -> lsp_types::FoldingRange {
|
||||
let kind = match fold.kind {
|
||||
FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
|
||||
FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
|
||||
FoldKind::Mods | FoldKind::Block => None,
|
||||
};
|
||||
|
||||
let range = range(line_index, fold.range);
|
||||
|
||||
if line_folding_only {
|
||||
// Clients with line_folding_only == true (such as VSCode) will fold the whole end line
|
||||
// even if it contains text not in the folding range. To prevent that we exclude
|
||||
// range.end.line from the folding region if there is more text after range.end
|
||||
// on the same line.
|
||||
let has_more_text_on_end_line = text[TextRange::new(fold.range.end(), TextSize::of(text))]
|
||||
.chars()
|
||||
.take_while(|it| *it != '\n')
|
||||
.any(|it| !it.is_whitespace());
|
||||
|
||||
let end_line = if has_more_text_on_end_line {
|
||||
range.end.line.saturating_sub(1)
|
||||
} else {
|
||||
range.end.line
|
||||
};
|
||||
|
||||
lsp_types::FoldingRange {
|
||||
start_line: range.start.line,
|
||||
start_character: None,
|
||||
end_line,
|
||||
end_character: None,
|
||||
kind,
|
||||
}
|
||||
} else {
|
||||
lsp_types::FoldingRange {
|
||||
start_line: range.start.line,
|
||||
start_character: Some(range.start.character),
|
||||
end_line: range.end.line,
|
||||
end_character: Some(range.end.character),
|
||||
kind,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn url(world: &WorldSnapshot, file_id: FileId) -> Result<lsp_types::Url> {
|
||||
world.file_id_to_uri(file_id)
|
||||
}
|
||||
|
||||
pub(crate) fn text_document_identifier(
|
||||
world: &WorldSnapshot,
|
||||
file_id: FileId,
|
||||
) -> Result<lsp_types::TextDocumentIdentifier> {
|
||||
let res = lsp_types::TextDocumentIdentifier { uri: url(world, file_id)? };
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub(crate) fn versioned_text_document_identifier(
|
||||
world: &WorldSnapshot,
|
||||
file_id: FileId,
|
||||
version: Option<i64>,
|
||||
) -> Result<lsp_types::VersionedTextDocumentIdentifier> {
|
||||
let res = lsp_types::VersionedTextDocumentIdentifier { uri: url(world, file_id)?, version };
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub(crate) fn location(world: &WorldSnapshot, frange: FileRange) -> Result<lsp_types::Location> {
|
||||
let url = url(world, frange.file_id)?;
|
||||
let line_index = world.analysis().file_line_index(frange.file_id)?;
|
||||
let range = range(&line_index, frange.range);
|
||||
let loc = lsp_types::Location::new(url, range);
|
||||
Ok(loc)
|
||||
}
|
||||
|
||||
pub(crate) fn location_link(
|
||||
world: &WorldSnapshot,
|
||||
src: FileRange,
|
||||
target: NavigationTarget,
|
||||
) -> Result<lsp_types::LocationLink> {
|
||||
let src_location = location(world, src)?;
|
||||
let (target_uri, target_range, target_selection_range) = location_info(world, target)?;
|
||||
let res = lsp_types::LocationLink {
|
||||
origin_selection_range: Some(src_location.range),
|
||||
target_uri,
|
||||
target_range,
|
||||
target_selection_range,
|
||||
};
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn location_info(
|
||||
world: &WorldSnapshot,
|
||||
target: NavigationTarget,
|
||||
) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
|
||||
let line_index = world.analysis().file_line_index(target.file_id())?;
|
||||
|
||||
let target_uri = url(world, target.file_id())?;
|
||||
let target_range = range(&line_index, target.full_range());
|
||||
let target_selection_range =
|
||||
target.focus_range().map(|it| range(&line_index, it)).unwrap_or(target_range);
|
||||
Ok((target_uri, target_range, target_selection_range))
|
||||
}
|
||||
|
||||
pub(crate) fn goto_definition_response(
|
||||
world: &WorldSnapshot,
|
||||
src: FileRange,
|
||||
targets: Vec<NavigationTarget>,
|
||||
) -> Result<lsp_types::GotoDefinitionResponse> {
|
||||
if world.config.client_caps.location_link {
|
||||
let links = targets
|
||||
.into_iter()
|
||||
.map(|nav| location_link(world, src, nav))
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
Ok(links.into())
|
||||
} else {
|
||||
let locations = targets
|
||||
.into_iter()
|
||||
.map(|nav| {
|
||||
location(
|
||||
world,
|
||||
FileRange {
|
||||
file_id: nav.file_id(),
|
||||
range: nav.focus_range().unwrap_or(nav.range()),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
Ok(locations.into())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn text_document_edit(
|
||||
world: &WorldSnapshot,
|
||||
source_file_edit: SourceFileEdit,
|
||||
) -> Result<lsp_types::TextDocumentEdit> {
|
||||
let text_document = versioned_text_document_identifier(world, source_file_edit.file_id, None)?;
|
||||
let line_index = world.analysis().file_line_index(source_file_edit.file_id)?;
|
||||
let line_endings = world.file_line_endings(source_file_edit.file_id);
|
||||
let edits = source_file_edit
|
||||
.edit
|
||||
.as_indels()
|
||||
.iter()
|
||||
.map(|it| text_edit(&line_index, line_endings, it.clone()))
|
||||
.collect();
|
||||
Ok(lsp_types::TextDocumentEdit { text_document, edits })
|
||||
}
|
||||
|
||||
pub(crate) fn resource_op(
|
||||
world: &WorldSnapshot,
|
||||
file_system_edit: FileSystemEdit,
|
||||
) -> Result<lsp_types::ResourceOp> {
|
||||
let res = match file_system_edit {
|
||||
FileSystemEdit::CreateFile { source_root, path } => {
|
||||
let uri = world.path_to_uri(source_root, &path)?;
|
||||
lsp_types::ResourceOp::Create(lsp_types::CreateFile { uri, options: None })
|
||||
}
|
||||
FileSystemEdit::MoveFile { src, dst_source_root, dst_path } => {
|
||||
let old_uri = world.file_id_to_uri(src)?;
|
||||
let new_uri = world.path_to_uri(dst_source_root, &dst_path)?;
|
||||
lsp_types::ResourceOp::Rename(lsp_types::RenameFile { old_uri, new_uri, options: None })
|
||||
}
|
||||
};
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub(crate) fn source_change(
|
||||
world: &WorldSnapshot,
|
||||
source_change: SourceChange,
|
||||
) -> Result<req::SourceChange> {
|
||||
let cursor_position = match source_change.cursor_position {
|
||||
None => None,
|
||||
Some(pos) => {
|
||||
let line_index = world.analysis().file_line_index(pos.file_id)?;
|
||||
let edit = source_change
|
||||
.source_file_edits
|
||||
.iter()
|
||||
.find(|it| it.file_id == pos.file_id)
|
||||
.map(|it| &it.edit);
|
||||
let line_col = match edit {
|
||||
Some(edit) => translate_offset_with_edit(&*line_index, pos.offset, edit),
|
||||
None => line_index.line_col(pos.offset),
|
||||
};
|
||||
let position =
|
||||
lsp_types::Position::new(u64::from(line_col.line), u64::from(line_col.col_utf16));
|
||||
Some(lsp_types::TextDocumentPositionParams {
|
||||
text_document: text_document_identifier(world, pos.file_id)?,
|
||||
position,
|
||||
})
|
||||
}
|
||||
};
|
||||
let mut document_changes: Vec<lsp_types::DocumentChangeOperation> = Vec::new();
|
||||
for op in source_change.file_system_edits {
|
||||
let op = resource_op(&world, op)?;
|
||||
document_changes.push(lsp_types::DocumentChangeOperation::Op(op));
|
||||
}
|
||||
for edit in source_change.source_file_edits {
|
||||
let edit = text_document_edit(&world, edit)?;
|
||||
document_changes.push(lsp_types::DocumentChangeOperation::Edit(edit));
|
||||
}
|
||||
let workspace_edit = lsp_types::WorkspaceEdit {
|
||||
changes: None,
|
||||
document_changes: Some(lsp_types::DocumentChanges::Operations(document_changes)),
|
||||
};
|
||||
Ok(req::SourceChange { label: source_change.label, workspace_edit, cursor_position })
|
||||
}
|
||||
|
||||
pub fn call_hierarchy_item(
|
||||
world: &WorldSnapshot,
|
||||
target: NavigationTarget,
|
||||
) -> Result<lsp_types::CallHierarchyItem> {
|
||||
let name = target.name().to_string();
|
||||
let detail = target.description().map(|it| it.to_string());
|
||||
let kind = symbol_kind(target.kind());
|
||||
let (uri, range, selection_range) = location_info(world, target)?;
|
||||
Ok(lsp_types::CallHierarchyItem { name, kind, tags: None, detail, uri, range, selection_range })
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use test_utils::extract_ranges;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn conv_fold_line_folding_only_fixup() {
|
||||
let text = r#"<fold>mod a;
|
||||
mod b;
|
||||
mod c;</fold>
|
||||
|
||||
fn main() <fold>{
|
||||
if cond <fold>{
|
||||
a::do_a();
|
||||
}</fold> else <fold>{
|
||||
b::do_b();
|
||||
}</fold>
|
||||
}</fold>"#;
|
||||
|
||||
let (ranges, text) = extract_ranges(text, "fold");
|
||||
assert_eq!(ranges.len(), 4);
|
||||
let folds = vec![
|
||||
Fold { range: ranges[0], kind: FoldKind::Mods },
|
||||
Fold { range: ranges[1], kind: FoldKind::Block },
|
||||
Fold { range: ranges[2], kind: FoldKind::Block },
|
||||
Fold { range: ranges[3], kind: FoldKind::Block },
|
||||
];
|
||||
|
||||
let line_index = LineIndex::new(&text);
|
||||
let converted: Vec<lsp_types::FoldingRange> =
|
||||
folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect();
|
||||
|
||||
let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
|
||||
assert_eq!(converted.len(), expected_lines.len());
|
||||
for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) {
|
||||
assert_eq!(folding_range.start_line, *start_line);
|
||||
assert_eq!(folding_range.start_character, None);
|
||||
assert_eq!(folding_range.end_line, *end_line);
|
||||
assert_eq!(folding_range.end_character, None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn code_action(world: &WorldSnapshot, assist: Assist) -> Result<lsp_types::CodeAction> {
|
||||
let source_change = source_change(&world, assist.source_change)?;
|
||||
let arg = serde_json::to_value(source_change)?;
|
||||
let title = assist.label;
|
||||
let command = lsp_types::Command {
|
||||
title: title.clone(),
|
||||
command: "rust-analyzer.applySourceChange".to_string(),
|
||||
arguments: Some(vec![arg]),
|
||||
};
|
||||
|
||||
Ok(lsp_types::CodeAction {
|
||||
title,
|
||||
kind: Some(String::new()),
|
||||
diagnostics: None,
|
||||
edit: None,
|
||||
command: Some(command),
|
||||
is_preferred: None,
|
||||
})
|
||||
}
|
Loading…
Reference in a new issue