4418: Refactor protocol handling r=matklad a=matklad

bors r+
🤖

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2020-05-10 17:28:59 +00:00 committed by GitHub
commit 348cbc054c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 999 additions and 1123 deletions

View file

@ -11,7 +11,7 @@ use ra_syntax::{
TextRange,
};
use crate::FileSymbol;
use crate::{FileRange, FileSymbol};
use super::short_label::ShortLabel;
@ -22,10 +22,11 @@ use super::short_label::ShortLabel;
/// code, like a function or a struct, but this is not strictly required.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct NavigationTarget {
// FIXME: use FileRange?
file_id: FileId,
full_range: TextRange,
name: SmolStr,
kind: SyntaxKind,
full_range: TextRange,
focus_range: Option<TextRange>,
container_name: Option<SmolStr>,
description: Option<String>,
@ -63,6 +64,10 @@ impl NavigationTarget {
self.file_id
}
pub fn file_range(&self) -> FileRange {
FileRange { file_id: self.file_id, range: self.full_range }
}
pub fn full_range(&self) -> TextRange {
self.full_range
}

View file

@ -75,6 +75,7 @@ impl TextEdit {
self.indels.is_empty()
}
// FXME: impl IntoIter instead
pub fn as_indels(&self) -> &[Indel] {
&self.indels
}

View file

@ -1,726 +0,0 @@
//! Convenience module responsible for translating between rust-analyzer's types
//! and LSP types.
use lsp_types::{
self, CreateFile, DiagnosticSeverity, DocumentChangeOperation, DocumentChanges, Documentation,
Location, LocationLink, MarkupContent, MarkupKind, ParameterInformation, ParameterLabel,
Position, Range, RenameFile, ResourceOp, SemanticTokenModifier, SemanticTokenType,
SignatureInformation, SymbolKind, TextDocumentEdit, TextDocumentIdentifier, TextDocumentItem,
TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier, WorkspaceEdit,
};
use ra_ide::{
translate_offset_with_edit, CompletionItem, CompletionItemKind, FileId, FilePosition,
FileRange, FileSystemEdit, Fold, FoldKind, Highlight, HighlightModifier, HighlightTag,
InlayHint, InlayKind, InsertTextFormat, LineCol, LineIndex, NavigationTarget, RangeInfo,
ReferenceAccess, Severity, SourceChange, SourceFileEdit,
};
use ra_syntax::{SyntaxKind, TextRange, TextSize};
use ra_text_edit::{Indel, TextEdit};
use ra_vfs::LineEndings;
use crate::{
req,
semantic_tokens::{self, ModifierSet, CONSTANT, CONTROL_FLOW, MUTABLE, UNSAFE},
world::WorldSnapshot,
Result,
};
use semantic_tokens::{
ATTRIBUTE, BUILTIN_TYPE, ENUM_MEMBER, FORMAT_SPECIFIER, LIFETIME, TYPE_ALIAS, UNION,
UNRESOLVED_REFERENCE,
};
pub trait Conv {
type Output;
fn conv(self) -> Self::Output;
}
pub trait ConvWith<CTX> {
type Output;
fn conv_with(self, ctx: CTX) -> Self::Output;
}
pub trait TryConvWith<CTX> {
type Output;
fn try_conv_with(self, ctx: CTX) -> Result<Self::Output>;
}
impl Conv for SyntaxKind {
type Output = SymbolKind;
fn conv(self) -> <Self as Conv>::Output {
match self {
SyntaxKind::FN_DEF => SymbolKind::Function,
SyntaxKind::STRUCT_DEF => SymbolKind::Struct,
SyntaxKind::ENUM_DEF => SymbolKind::Enum,
SyntaxKind::ENUM_VARIANT => SymbolKind::EnumMember,
SyntaxKind::TRAIT_DEF => SymbolKind::Interface,
SyntaxKind::MACRO_CALL => SymbolKind::Function,
SyntaxKind::MODULE => SymbolKind::Module,
SyntaxKind::TYPE_ALIAS_DEF => SymbolKind::TypeParameter,
SyntaxKind::RECORD_FIELD_DEF => SymbolKind::Field,
SyntaxKind::STATIC_DEF => SymbolKind::Constant,
SyntaxKind::CONST_DEF => SymbolKind::Constant,
SyntaxKind::IMPL_DEF => SymbolKind::Object,
_ => SymbolKind::Variable,
}
}
}
impl Conv for ReferenceAccess {
type Output = ::lsp_types::DocumentHighlightKind;
fn conv(self) -> Self::Output {
use lsp_types::DocumentHighlightKind;
match self {
ReferenceAccess::Read => DocumentHighlightKind::Read,
ReferenceAccess::Write => DocumentHighlightKind::Write,
}
}
}
impl Conv for CompletionItemKind {
type Output = ::lsp_types::CompletionItemKind;
fn conv(self) -> <Self as Conv>::Output {
use lsp_types::CompletionItemKind::*;
match self {
CompletionItemKind::Keyword => Keyword,
CompletionItemKind::Snippet => Snippet,
CompletionItemKind::Module => Module,
CompletionItemKind::Function => Function,
CompletionItemKind::Struct => Struct,
CompletionItemKind::Enum => Enum,
CompletionItemKind::EnumVariant => EnumMember,
CompletionItemKind::BuiltinType => Struct,
CompletionItemKind::Binding => Variable,
CompletionItemKind::Field => Field,
CompletionItemKind::Trait => Interface,
CompletionItemKind::TypeAlias => Struct,
CompletionItemKind::Const => Constant,
CompletionItemKind::Static => Value,
CompletionItemKind::Method => Method,
CompletionItemKind::TypeParam => TypeParameter,
CompletionItemKind::Macro => Method,
CompletionItemKind::Attribute => EnumMember,
}
}
}
impl Conv for Severity {
type Output = DiagnosticSeverity;
fn conv(self) -> DiagnosticSeverity {
match self {
Severity::Error => DiagnosticSeverity::Error,
Severity::WeakWarning => DiagnosticSeverity::Hint,
}
}
}
impl ConvWith<(&LineIndex, LineEndings)> for CompletionItem {
type Output = ::lsp_types::CompletionItem;
fn conv_with(self, ctx: (&LineIndex, LineEndings)) -> ::lsp_types::CompletionItem {
let mut additional_text_edits = Vec::new();
let mut text_edit = None;
// LSP does not allow arbitrary edits in completion, so we have to do a
// non-trivial mapping here.
for indel in self.text_edit().as_indels() {
if indel.delete.contains_range(self.source_range()) {
text_edit = Some(if indel.delete == self.source_range() {
indel.conv_with((ctx.0, ctx.1))
} else {
assert!(self.source_range().end() == indel.delete.end());
let range1 = TextRange::new(indel.delete.start(), self.source_range().start());
let range2 = self.source_range();
let edit1 = Indel::replace(range1, String::new());
let edit2 = Indel::replace(range2, indel.insert.clone());
additional_text_edits.push(edit1.conv_with((ctx.0, ctx.1)));
edit2.conv_with((ctx.0, ctx.1))
})
} else {
assert!(self.source_range().intersect(indel.delete).is_none());
additional_text_edits.push(indel.conv_with((ctx.0, ctx.1)));
}
}
let text_edit = text_edit.unwrap();
let mut res = lsp_types::CompletionItem {
label: self.label().to_string(),
detail: self.detail().map(|it| it.to_string()),
filter_text: Some(self.lookup().to_string()),
kind: self.kind().map(|it| it.conv()),
text_edit: Some(text_edit.into()),
additional_text_edits: Some(additional_text_edits),
documentation: self.documentation().map(|it| it.conv()),
deprecated: Some(self.deprecated()),
command: if self.trigger_call_info() {
let cmd = lsp_types::Command {
title: "triggerParameterHints".into(),
command: "editor.action.triggerParameterHints".into(),
arguments: None,
};
Some(cmd)
} else {
None
},
..Default::default()
};
if self.score().is_some() {
res.preselect = Some(true)
}
if self.deprecated() {
res.tags = Some(vec![lsp_types::CompletionItemTag::Deprecated])
}
res.insert_text_format = Some(match self.insert_text_format() {
InsertTextFormat::Snippet => lsp_types::InsertTextFormat::Snippet,
InsertTextFormat::PlainText => lsp_types::InsertTextFormat::PlainText,
});
res
}
}
impl ConvWith<&LineIndex> for Position {
type Output = TextSize;
fn conv_with(self, line_index: &LineIndex) -> TextSize {
let line_col = LineCol { line: self.line as u32, col_utf16: self.character as u32 };
line_index.offset(line_col)
}
}
impl ConvWith<&LineIndex> for TextSize {
type Output = Position;
fn conv_with(self, line_index: &LineIndex) -> Position {
let line_col = line_index.line_col(self);
Position::new(u64::from(line_col.line), u64::from(line_col.col_utf16))
}
}
impl ConvWith<&LineIndex> for TextRange {
type Output = Range;
fn conv_with(self, line_index: &LineIndex) -> Range {
Range::new(self.start().conv_with(line_index), self.end().conv_with(line_index))
}
}
impl ConvWith<&LineIndex> for Range {
type Output = TextRange;
fn conv_with(self, line_index: &LineIndex) -> TextRange {
TextRange::new(self.start.conv_with(line_index), self.end.conv_with(line_index))
}
}
impl Conv for ra_ide::Documentation {
type Output = lsp_types::Documentation;
fn conv(self) -> Documentation {
Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown,
value: crate::markdown::format_docs(self.as_str()),
})
}
}
impl ConvWith<bool> for ra_ide::FunctionSignature {
type Output = lsp_types::SignatureInformation;
fn conv_with(self, concise: bool) -> Self::Output {
let (label, documentation, params) = if concise {
let mut params = self.parameters;
if self.has_self_param {
params.remove(0);
}
(params.join(", "), None, params)
} else {
(self.to_string(), self.doc.map(|it| it.conv()), self.parameters)
};
let parameters: Vec<ParameterInformation> = params
.into_iter()
.map(|param| ParameterInformation {
label: ParameterLabel::Simple(param),
documentation: None,
})
.collect();
SignatureInformation { label, documentation, parameters: Some(parameters) }
}
}
impl ConvWith<(&LineIndex, LineEndings)> for TextEdit {
type Output = Vec<lsp_types::TextEdit>;
fn conv_with(self, ctx: (&LineIndex, LineEndings)) -> Vec<lsp_types::TextEdit> {
self.as_indels().iter().map_conv_with(ctx).collect()
}
}
impl ConvWith<(&LineIndex, LineEndings)> for &Indel {
type Output = lsp_types::TextEdit;
fn conv_with(
self,
(line_index, line_endings): (&LineIndex, LineEndings),
) -> lsp_types::TextEdit {
let mut new_text = self.insert.clone();
if line_endings == LineEndings::Dos {
new_text = new_text.replace('\n', "\r\n");
}
lsp_types::TextEdit { range: self.delete.conv_with(line_index), new_text }
}
}
pub(crate) struct FoldConvCtx<'a> {
pub(crate) text: &'a str,
pub(crate) line_index: &'a LineIndex,
pub(crate) line_folding_only: bool,
}
impl ConvWith<&FoldConvCtx<'_>> for Fold {
type Output = lsp_types::FoldingRange;
fn conv_with(self, ctx: &FoldConvCtx) -> lsp_types::FoldingRange {
let kind = match self.kind {
FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
FoldKind::Mods => None,
FoldKind::Block => None,
};
let range = self.range.conv_with(&ctx.line_index);
if ctx.line_folding_only {
// Clients with line_folding_only == true (such as VSCode) will fold the whole end line
// even if it contains text not in the folding range. To prevent that we exclude
// range.end.line from the folding region if there is more text after range.end
// on the same line.
let has_more_text_on_end_line = ctx.text
[TextRange::new(self.range.end(), TextSize::of(ctx.text))]
.chars()
.take_while(|it| *it != '\n')
.any(|it| !it.is_whitespace());
let end_line = if has_more_text_on_end_line {
range.end.line.saturating_sub(1)
} else {
range.end.line
};
lsp_types::FoldingRange {
start_line: range.start.line,
start_character: None,
end_line,
end_character: None,
kind,
}
} else {
lsp_types::FoldingRange {
start_line: range.start.line,
start_character: Some(range.start.character),
end_line: range.end.line,
end_character: Some(range.end.character),
kind,
}
}
}
}
impl ConvWith<&LineIndex> for InlayHint {
type Output = req::InlayHint;
fn conv_with(self, line_index: &LineIndex) -> Self::Output {
req::InlayHint {
label: self.label.to_string(),
range: self.range.conv_with(line_index),
kind: match self.kind {
InlayKind::ParameterHint => req::InlayKind::ParameterHint,
InlayKind::TypeHint => req::InlayKind::TypeHint,
InlayKind::ChainingHint => req::InlayKind::ChainingHint,
},
}
}
}
impl Conv for Highlight {
type Output = (u32, u32);
fn conv(self) -> Self::Output {
let mut mods = ModifierSet::default();
let type_ = match self.tag {
HighlightTag::Struct => SemanticTokenType::STRUCT,
HighlightTag::Enum => SemanticTokenType::ENUM,
HighlightTag::Union => UNION,
HighlightTag::TypeAlias => TYPE_ALIAS,
HighlightTag::Trait => SemanticTokenType::INTERFACE,
HighlightTag::BuiltinType => BUILTIN_TYPE,
HighlightTag::SelfType => SemanticTokenType::TYPE,
HighlightTag::Field => SemanticTokenType::MEMBER,
HighlightTag::Function => SemanticTokenType::FUNCTION,
HighlightTag::Module => SemanticTokenType::NAMESPACE,
HighlightTag::Constant => {
mods |= CONSTANT;
mods |= SemanticTokenModifier::STATIC;
SemanticTokenType::VARIABLE
}
HighlightTag::Static => {
mods |= SemanticTokenModifier::STATIC;
SemanticTokenType::VARIABLE
}
HighlightTag::EnumVariant => ENUM_MEMBER,
HighlightTag::Macro => SemanticTokenType::MACRO,
HighlightTag::Local => SemanticTokenType::VARIABLE,
HighlightTag::TypeParam => SemanticTokenType::TYPE_PARAMETER,
HighlightTag::Lifetime => LIFETIME,
HighlightTag::ByteLiteral | HighlightTag::NumericLiteral => SemanticTokenType::NUMBER,
HighlightTag::CharLiteral | HighlightTag::StringLiteral => SemanticTokenType::STRING,
HighlightTag::Comment => SemanticTokenType::COMMENT,
HighlightTag::Attribute => ATTRIBUTE,
HighlightTag::Keyword => SemanticTokenType::KEYWORD,
HighlightTag::UnresolvedReference => UNRESOLVED_REFERENCE,
HighlightTag::FormatSpecifier => FORMAT_SPECIFIER,
};
for modifier in self.modifiers.iter() {
let modifier = match modifier {
HighlightModifier::Definition => SemanticTokenModifier::DECLARATION,
HighlightModifier::ControlFlow => CONTROL_FLOW,
HighlightModifier::Mutable => MUTABLE,
HighlightModifier::Unsafe => UNSAFE,
};
mods |= modifier;
}
(semantic_tokens::type_index(type_), mods.0)
}
}
impl<T: ConvWith<CTX>, CTX> ConvWith<CTX> for Option<T> {
type Output = Option<T::Output>;
fn conv_with(self, ctx: CTX) -> Self::Output {
self.map(|x| ConvWith::conv_with(x, ctx))
}
}
impl TryConvWith<&WorldSnapshot> for &Url {
type Output = FileId;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
world.uri_to_file_id(self)
}
}
impl TryConvWith<&WorldSnapshot> for FileId {
type Output = Url;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<Url> {
world.file_id_to_uri(self)
}
}
impl TryConvWith<&WorldSnapshot> for &TextDocumentItem {
type Output = FileId;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
self.uri.try_conv_with(world)
}
}
impl TryConvWith<&WorldSnapshot> for &VersionedTextDocumentIdentifier {
type Output = FileId;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
self.uri.try_conv_with(world)
}
}
impl TryConvWith<&WorldSnapshot> for &TextDocumentIdentifier {
type Output = FileId;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
world.uri_to_file_id(&self.uri)
}
}
impl TryConvWith<&WorldSnapshot> for &TextDocumentPositionParams {
type Output = FilePosition;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FilePosition> {
let file_id = self.text_document.try_conv_with(world)?;
let line_index = world.analysis().file_line_index(file_id)?;
let offset = self.position.conv_with(&line_index);
Ok(FilePosition { file_id, offset })
}
}
impl TryConvWith<&WorldSnapshot> for (&TextDocumentIdentifier, Range) {
type Output = FileRange;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileRange> {
let file_id = self.0.try_conv_with(world)?;
let line_index = world.analysis().file_line_index(file_id)?;
let range = self.1.conv_with(&line_index);
Ok(FileRange { file_id, range })
}
}
impl<T: TryConvWith<CTX>, CTX: Copy> TryConvWith<CTX> for Vec<T> {
type Output = Vec<<T as TryConvWith<CTX>>::Output>;
fn try_conv_with(self, ctx: CTX) -> Result<Self::Output> {
let mut res = Vec::with_capacity(self.len());
for item in self {
res.push(item.try_conv_with(ctx)?);
}
Ok(res)
}
}
impl TryConvWith<&WorldSnapshot> for SourceChange {
type Output = req::SourceChange;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<req::SourceChange> {
let cursor_position = match self.cursor_position {
None => None,
Some(pos) => {
let line_index = world.analysis().file_line_index(pos.file_id)?;
let edit = self
.source_file_edits
.iter()
.find(|it| it.file_id == pos.file_id)
.map(|it| &it.edit);
let line_col = match edit {
Some(edit) => translate_offset_with_edit(&*line_index, pos.offset, edit),
None => line_index.line_col(pos.offset),
};
let position =
Position::new(u64::from(line_col.line), u64::from(line_col.col_utf16));
Some(TextDocumentPositionParams {
text_document: TextDocumentIdentifier::new(pos.file_id.try_conv_with(world)?),
position,
})
}
};
let mut document_changes: Vec<DocumentChangeOperation> = Vec::new();
for resource_op in self.file_system_edits.try_conv_with(world)? {
document_changes.push(DocumentChangeOperation::Op(resource_op));
}
for text_document_edit in self.source_file_edits.try_conv_with(world)? {
document_changes.push(DocumentChangeOperation::Edit(text_document_edit));
}
let workspace_edit = WorkspaceEdit {
changes: None,
document_changes: Some(DocumentChanges::Operations(document_changes)),
};
Ok(req::SourceChange { label: self.label, workspace_edit, cursor_position })
}
}
impl TryConvWith<&WorldSnapshot> for SourceFileEdit {
type Output = TextDocumentEdit;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<TextDocumentEdit> {
let text_document = VersionedTextDocumentIdentifier {
uri: self.file_id.try_conv_with(world)?,
version: None,
};
let line_index = world.analysis().file_line_index(self.file_id)?;
let line_endings = world.file_line_endings(self.file_id);
let edits =
self.edit.as_indels().iter().map_conv_with((&line_index, line_endings)).collect();
Ok(TextDocumentEdit { text_document, edits })
}
}
impl TryConvWith<&WorldSnapshot> for FileSystemEdit {
type Output = ResourceOp;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<ResourceOp> {
let res = match self {
FileSystemEdit::CreateFile { source_root, path } => {
let uri = world.path_to_uri(source_root, &path)?;
ResourceOp::Create(CreateFile { uri, options: None })
}
FileSystemEdit::MoveFile { src, dst_source_root, dst_path } => {
let old_uri = world.file_id_to_uri(src)?;
let new_uri = world.path_to_uri(dst_source_root, &dst_path)?;
ResourceOp::Rename(RenameFile { old_uri, new_uri, options: None })
}
};
Ok(res)
}
}
impl TryConvWith<&WorldSnapshot> for &NavigationTarget {
type Output = Location;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<Location> {
let line_index = world.analysis().file_line_index(self.file_id())?;
let range = self.range();
to_location(self.file_id(), range, &world, &line_index)
}
}
impl TryConvWith<&WorldSnapshot> for (FileId, RangeInfo<NavigationTarget>) {
type Output = LocationLink;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<LocationLink> {
let (src_file_id, target) = self;
let target_uri = target.info.file_id().try_conv_with(world)?;
let src_line_index = world.analysis().file_line_index(src_file_id)?;
let tgt_line_index = world.analysis().file_line_index(target.info.file_id())?;
let target_range = target.info.full_range().conv_with(&tgt_line_index);
let target_selection_range = target
.info
.focus_range()
.map(|it| it.conv_with(&tgt_line_index))
.unwrap_or(target_range);
let res = LocationLink {
origin_selection_range: Some(target.range.conv_with(&src_line_index)),
target_uri,
target_range,
target_selection_range,
};
Ok(res)
}
}
impl TryConvWith<&WorldSnapshot> for (FileId, RangeInfo<Vec<NavigationTarget>>) {
type Output = req::GotoDefinitionResponse;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<req::GotoTypeDefinitionResponse> {
let (file_id, RangeInfo { range, info: navs }) = self;
let links = navs
.into_iter()
.map(|nav| (file_id, RangeInfo::new(range, nav)))
.try_conv_with_to_vec(world)?;
if world.config.client_caps.location_link {
Ok(links.into())
} else {
let locations: Vec<Location> = links
.into_iter()
.map(|link| Location { uri: link.target_uri, range: link.target_selection_range })
.collect();
Ok(locations.into())
}
}
}
pub fn to_call_hierarchy_item(
file_id: FileId,
range: TextRange,
world: &WorldSnapshot,
line_index: &LineIndex,
nav: NavigationTarget,
) -> Result<lsp_types::CallHierarchyItem> {
Ok(lsp_types::CallHierarchyItem {
name: nav.name().to_string(),
kind: nav.kind().conv(),
tags: None,
detail: nav.description().map(|it| it.to_string()),
uri: file_id.try_conv_with(&world)?,
range: nav.range().conv_with(&line_index),
selection_range: range.conv_with(&line_index),
})
}
pub fn to_location(
file_id: FileId,
range: TextRange,
world: &WorldSnapshot,
line_index: &LineIndex,
) -> Result<Location> {
let url = file_id.try_conv_with(world)?;
let loc = Location::new(url, range.conv_with(line_index));
Ok(loc)
}
pub trait MapConvWith<CTX>: Sized {
type Output;
fn map_conv_with(self, ctx: CTX) -> ConvWithIter<Self, CTX> {
ConvWithIter { iter: self, ctx }
}
}
impl<CTX, I> MapConvWith<CTX> for I
where
I: Iterator,
I::Item: ConvWith<CTX>,
{
type Output = <I::Item as ConvWith<CTX>>::Output;
}
pub struct ConvWithIter<I, CTX> {
iter: I,
ctx: CTX,
}
impl<I, CTX> Iterator for ConvWithIter<I, CTX>
where
I: Iterator,
I::Item: ConvWith<CTX>,
CTX: Copy,
{
type Item = <I::Item as ConvWith<CTX>>::Output;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|item| item.conv_with(self.ctx))
}
}
pub trait TryConvWithToVec<CTX>: Sized {
type Output;
fn try_conv_with_to_vec(self, ctx: CTX) -> Result<Vec<Self::Output>>;
}
impl<I, CTX> TryConvWithToVec<CTX> for I
where
I: Iterator,
I::Item: TryConvWith<CTX>,
CTX: Copy,
{
type Output = <I::Item as TryConvWith<CTX>>::Output;
fn try_conv_with_to_vec(self, ctx: CTX) -> Result<Vec<Self::Output>> {
self.map(|it| it.try_conv_with(ctx)).collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
use test_utils::extract_ranges;
#[test]
fn conv_fold_line_folding_only_fixup() {
let text = r#"<fold>mod a;
mod b;
mod c;</fold>
fn main() <fold>{
if cond <fold>{
a::do_a();
}</fold> else <fold>{
b::do_b();
}</fold>
}</fold>"#;
let (ranges, text) = extract_ranges(text, "fold");
assert_eq!(ranges.len(), 4);
let folds = vec![
Fold { range: ranges[0], kind: FoldKind::Mods },
Fold { range: ranges[1], kind: FoldKind::Block },
Fold { range: ranges[2], kind: FoldKind::Block },
Fold { range: ranges[3], kind: FoldKind::Block },
];
let line_index = LineIndex::new(&text);
let ctx = FoldConvCtx { text: &text, line_index: &line_index, line_folding_only: true };
let converted: Vec<_> = folds.into_iter().map_conv_with(&ctx).collect();
let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
assert_eq!(converted.len(), expected_lines.len());
for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) {
assert_eq!(folding_range.start_line, *start_line);
assert_eq!(folding_range.start_character, None);
assert_eq!(folding_range.end_line, *end_line);
assert_eq!(folding_range.end_character, None);
}
}
}

View file

@ -0,0 +1,42 @@
//! Conversion lsp_types types to rust-analyzer specific ones.
use ra_db::{FileId, FilePosition, FileRange};
use ra_ide::{LineCol, LineIndex};
use ra_syntax::{TextRange, TextSize};
use crate::{world::WorldSnapshot, Result};
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> TextSize {
let line_col = LineCol { line: position.line as u32, col_utf16: position.character as u32 };
line_index.offset(line_col)
}
pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> TextRange {
let start = offset(line_index, range.start);
let end = offset(line_index, range.end);
TextRange::new(start, end)
}
pub(crate) fn file_id(world: &WorldSnapshot, url: &lsp_types::Url) -> Result<FileId> {
world.uri_to_file_id(url)
}
pub(crate) fn file_position(
world: &WorldSnapshot,
tdpp: lsp_types::TextDocumentPositionParams,
) -> Result<FilePosition> {
let file_id = file_id(world, &tdpp.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let offset = offset(&*line_index, tdpp.position);
Ok(FilePosition { file_id, offset })
}
pub(crate) fn file_range(
world: &WorldSnapshot,
text_document_identifier: lsp_types::TextDocumentIdentifier,
range: lsp_types::Range,
) -> Result<FileRange> {
let file_id = file_id(world, &text_document_identifier.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let range = text_range(&line_index, range);
Ok(FileRange { file_id, range })
}

View file

@ -20,10 +20,11 @@ macro_rules! eprintln {
mod vfs_glob;
mod caps;
mod cargo_target_spec;
mod conv;
mod to_proto;
mod from_proto;
mod main_loop;
mod markdown;
pub mod req;
pub mod lsp_ext;
pub mod config;
mod world;
mod diagnostics;

View file

@ -1,25 +1,12 @@
//! Defines `rust-analyzer` specific custom messages.
//! rust-analyzer extensions to the LSP.
use std::path::PathBuf;
use lsp_types::request::Request;
use lsp_types::{Location, Position, Range, TextDocumentIdentifier};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
pub use lsp_types::{
notification::*, request::*, ApplyWorkspaceEditParams, CodeActionParams, CodeLens,
CodeLensParams, CompletionParams, CompletionResponse, ConfigurationItem, ConfigurationParams,
DiagnosticTag, DidChangeConfigurationParams, DidChangeWatchedFilesParams,
DidChangeWatchedFilesRegistrationOptions, DocumentHighlightParams,
DocumentOnTypeFormattingParams, DocumentSymbolParams, DocumentSymbolResponse,
FileSystemWatcher, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams,
InitializeResult, MessageType, PartialResultParams, ProgressParams, ProgressParamsValue,
ProgressToken, PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams,
SelectionRange, SelectionRangeParams, SemanticTokensParams, SemanticTokensRangeParams,
SemanticTokensRangeResult, SemanticTokensResult, ServerCapabilities, ShowMessageParams,
SignatureHelp, SignatureHelpParams, SymbolKind, TextDocumentEdit, TextDocumentPositionParams,
TextEdit, WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
};
use std::path::PathBuf;
pub enum AnalyzerStatus {}
impl Request for AnalyzerStatus {
@ -91,7 +78,7 @@ pub struct FindMatchingBraceParams {
pub enum ParentModule {}
impl Request for ParentModule {
type Params = TextDocumentPositionParams;
type Params = lsp_types::TextDocumentPositionParams;
type Result = Vec<Location>;
const METHOD: &'static str = "rust-analyzer/parentModule";
}
@ -114,7 +101,7 @@ pub struct JoinLinesParams {
pub enum OnEnter {}
impl Request for OnEnter {
type Params = TextDocumentPositionParams;
type Params = lsp_types::TextDocumentPositionParams;
type Result = Option<SourceChange>;
const METHOD: &'static str = "rust-analyzer/onEnter";
}
@ -150,8 +137,8 @@ pub struct Runnable {
#[serde(rename_all = "camelCase")]
pub struct SourceChange {
pub label: String,
pub workspace_edit: WorkspaceEdit,
pub cursor_position: Option<TextDocumentPositionParams>,
pub workspace_edit: lsp_types::WorkspaceEdit,
pub cursor_position: Option<lsp_types::TextDocumentPositionParams>,
}
pub enum InlayHints {}

View file

@ -37,13 +37,12 @@ use threadpool::ThreadPool;
use crate::{
config::{Config, FilesWatcher},
conv::{ConvWith, TryConvWith},
diagnostics::DiagnosticTask,
from_proto, lsp_ext,
main_loop::{
pending_requests::{PendingRequest, PendingRequests},
subscriptions::Subscriptions,
},
req,
world::{WorldSnapshot, WorldState},
Result,
};
@ -104,7 +103,7 @@ pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection)
if project_roots.is_empty() && config.notifications.cargo_toml_not_found {
show_message(
req::MessageType::Error,
lsp_types::MessageType::Error,
format!(
"rust-analyzer failed to discover workspace, no Cargo.toml found, dirs searched: {}",
ws_roots.iter().format_with(", ", |it, f| f(&it.display()))
@ -124,7 +123,7 @@ pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection)
.map_err(|err| {
log::error!("failed to load workspace: {:#}", err);
show_message(
req::MessageType::Error,
lsp_types::MessageType::Error,
format!("rust-analyzer failed to load workspace: {:#}", err),
&connection.sender,
);
@ -142,23 +141,25 @@ pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection)
.collect::<std::result::Result<Vec<_>, _>>()?;
if let FilesWatcher::Client = config.files.watcher {
let registration_options = req::DidChangeWatchedFilesRegistrationOptions {
let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
watchers: workspaces
.iter()
.flat_map(ProjectWorkspace::to_roots)
.filter(PackageRoot::is_member)
.map(|root| format!("{}/**/*.rs", root.path().display()))
.map(|glob_pattern| req::FileSystemWatcher { glob_pattern, kind: None })
.map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None })
.collect(),
};
let registration = req::Registration {
let registration = lsp_types::Registration {
id: "file-watcher".to_string(),
method: "workspace/didChangeWatchedFiles".to_string(),
register_options: Some(serde_json::to_value(registration_options).unwrap()),
};
let params = req::RegistrationParams { registrations: vec![registration] };
let request =
request_new::<req::RegisterCapability>(loop_state.next_request_id(), params);
let params = lsp_types::RegistrationParams { registrations: vec![registration] };
let request = request_new::<lsp_types::request::RegisterCapability>(
loop_state.next_request_id(),
params,
);
connection.sender.send(request.into()).unwrap();
}
@ -258,14 +259,14 @@ impl fmt::Debug for Event {
match self {
Event::Msg(Message::Notification(not)) => {
if notification_is::<req::DidOpenTextDocument>(not)
|| notification_is::<req::DidChangeTextDocument>(not)
if notification_is::<lsp_types::notification::DidOpenTextDocument>(not)
|| notification_is::<lsp_types::notification::DidChangeTextDocument>(not)
{
return debug_verbose_not(not, f);
}
}
Event::Task(Task::Notify(not)) => {
if notification_is::<req::PublishDiagnostics>(not) {
if notification_is::<lsp_types::notification::PublishDiagnostics>(not) {
return debug_verbose_not(not, f);
}
}
@ -450,7 +451,7 @@ fn loop_turn(
log::error!("overly long loop turn: {:?}", loop_duration);
if env::var("RA_PROFILE").is_ok() {
show_message(
req::MessageType::Error,
lsp_types::MessageType::Error,
format!("overly long loop turn: {:?}", loop_duration),
&connection.sender,
);
@ -500,45 +501,51 @@ fn on_request(
request_received,
};
pool_dispatcher
.on_sync::<req::CollectGarbage>(|s, ()| Ok(s.collect_garbage()))?
.on_sync::<req::JoinLines>(|s, p| handlers::handle_join_lines(s.snapshot(), p))?
.on_sync::<req::OnEnter>(|s, p| handlers::handle_on_enter(s.snapshot(), p))?
.on_sync::<req::SelectionRangeRequest>(|s, p| {
.on_sync::<lsp_ext::CollectGarbage>(|s, ()| Ok(s.collect_garbage()))?
.on_sync::<lsp_ext::JoinLines>(|s, p| handlers::handle_join_lines(s.snapshot(), p))?
.on_sync::<lsp_ext::OnEnter>(|s, p| handlers::handle_on_enter(s.snapshot(), p))?
.on_sync::<lsp_types::request::SelectionRangeRequest>(|s, p| {
handlers::handle_selection_range(s.snapshot(), p)
})?
.on_sync::<req::FindMatchingBrace>(|s, p| {
.on_sync::<lsp_ext::FindMatchingBrace>(|s, p| {
handlers::handle_find_matching_brace(s.snapshot(), p)
})?
.on::<req::AnalyzerStatus>(handlers::handle_analyzer_status)?
.on::<req::SyntaxTree>(handlers::handle_syntax_tree)?
.on::<req::ExpandMacro>(handlers::handle_expand_macro)?
.on::<req::OnTypeFormatting>(handlers::handle_on_type_formatting)?
.on::<req::DocumentSymbolRequest>(handlers::handle_document_symbol)?
.on::<req::WorkspaceSymbol>(handlers::handle_workspace_symbol)?
.on::<req::GotoDefinition>(handlers::handle_goto_definition)?
.on::<req::GotoImplementation>(handlers::handle_goto_implementation)?
.on::<req::GotoTypeDefinition>(handlers::handle_goto_type_definition)?
.on::<req::ParentModule>(handlers::handle_parent_module)?
.on::<req::Runnables>(handlers::handle_runnables)?
.on::<req::Completion>(handlers::handle_completion)?
.on::<req::CodeActionRequest>(handlers::handle_code_action)?
.on::<req::CodeLensRequest>(handlers::handle_code_lens)?
.on::<req::CodeLensResolve>(handlers::handle_code_lens_resolve)?
.on::<req::FoldingRangeRequest>(handlers::handle_folding_range)?
.on::<req::SignatureHelpRequest>(handlers::handle_signature_help)?
.on::<req::HoverRequest>(handlers::handle_hover)?
.on::<req::PrepareRenameRequest>(handlers::handle_prepare_rename)?
.on::<req::Rename>(handlers::handle_rename)?
.on::<req::References>(handlers::handle_references)?
.on::<req::Formatting>(handlers::handle_formatting)?
.on::<req::DocumentHighlightRequest>(handlers::handle_document_highlight)?
.on::<req::InlayHints>(handlers::handle_inlay_hints)?
.on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)?
.on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)?
.on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)?
.on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
.on::<req::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)?
.on::<req::Ssr>(handlers::handle_ssr)?
.on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)?
.on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)?
.on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)?
.on::<lsp_ext::ParentModule>(handlers::handle_parent_module)?
.on::<lsp_ext::Runnables>(handlers::handle_runnables)?
.on::<lsp_ext::InlayHints>(handlers::handle_inlay_hints)?
.on::<lsp_types::request::OnTypeFormatting>(handlers::handle_on_type_formatting)?
.on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)?
.on::<lsp_types::request::WorkspaceSymbol>(handlers::handle_workspace_symbol)?
.on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)?
.on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)?
.on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)?
.on::<lsp_types::request::Completion>(handlers::handle_completion)?
.on::<lsp_types::request::CodeActionRequest>(handlers::handle_code_action)?
.on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)?
.on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)?
.on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)?
.on::<lsp_types::request::SignatureHelpRequest>(handlers::handle_signature_help)?
.on::<lsp_types::request::HoverRequest>(handlers::handle_hover)?
.on::<lsp_types::request::PrepareRenameRequest>(handlers::handle_prepare_rename)?
.on::<lsp_types::request::Rename>(handlers::handle_rename)?
.on::<lsp_types::request::References>(handlers::handle_references)?
.on::<lsp_types::request::Formatting>(handlers::handle_formatting)?
.on::<lsp_types::request::DocumentHighlightRequest>(handlers::handle_document_highlight)?
.on::<lsp_types::request::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)?
.on::<lsp_types::request::CallHierarchyIncomingCalls>(
handlers::handle_call_hierarchy_incoming,
)?
.on::<lsp_types::request::CallHierarchyOutgoingCalls>(
handlers::handle_call_hierarchy_outgoing,
)?
.on::<lsp_types::request::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
.on::<lsp_types::request::SemanticTokensRangeRequest>(
handlers::handle_semantic_tokens_range,
)?
.on::<lsp_ext::Ssr>(handlers::handle_ssr)?
.finish();
Ok(())
}
@ -549,7 +556,7 @@ fn on_notification(
loop_state: &mut LoopState,
not: Notification,
) -> Result<()> {
let not = match notification_cast::<req::Cancel>(not) {
let not = match notification_cast::<lsp_types::notification::Cancel>(not) {
Ok(params) => {
let id: RequestId = match params.id {
NumberOrString::Number(id) => id.into(),
@ -567,7 +574,7 @@ fn on_notification(
}
Err(not) => not,
};
let not = match notification_cast::<req::DidOpenTextDocument>(not) {
let not = match notification_cast::<lsp_types::notification::DidOpenTextDocument>(not) {
Ok(params) => {
let uri = params.text_document.uri;
let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?;
@ -580,11 +587,11 @@ fn on_notification(
}
Err(not) => not,
};
let not = match notification_cast::<req::DidChangeTextDocument>(not) {
let not = match notification_cast::<lsp_types::notification::DidChangeTextDocument>(not) {
Ok(params) => {
let DidChangeTextDocumentParams { text_document, content_changes } = params;
let world = state.snapshot();
let file_id = text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let uri = text_document.uri;
let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?;
@ -595,7 +602,7 @@ fn on_notification(
}
Err(not) => not,
};
let not = match notification_cast::<req::DidSaveTextDocument>(not) {
let not = match notification_cast::<lsp_types::notification::DidSaveTextDocument>(not) {
Ok(_params) => {
if let Some(flycheck) = &state.flycheck {
flycheck.update();
@ -604,7 +611,7 @@ fn on_notification(
}
Err(not) => not,
};
let not = match notification_cast::<req::DidCloseTextDocument>(not) {
let not = match notification_cast::<lsp_types::notification::DidCloseTextDocument>(not) {
Ok(params) => {
let uri = params.text_document.uri;
let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?;
@ -612,22 +619,22 @@ fn on_notification(
loop_state.subscriptions.remove_sub(FileId(file_id.0));
}
let params =
req::PublishDiagnosticsParams { uri, diagnostics: Vec::new(), version: None };
let not = notification_new::<req::PublishDiagnostics>(params);
lsp_types::PublishDiagnosticsParams { uri, diagnostics: Vec::new(), version: None };
let not = notification_new::<lsp_types::notification::PublishDiagnostics>(params);
msg_sender.send(not.into()).unwrap();
return Ok(());
}
Err(not) => not,
};
let not = match notification_cast::<req::DidChangeConfiguration>(not) {
let not = match notification_cast::<lsp_types::notification::DidChangeConfiguration>(not) {
Ok(_) => {
// As stated in https://github.com/microsoft/language-server-protocol/issues/676,
// this notification's parameters should be ignored and the actual config queried separately.
let request_id = loop_state.next_request_id();
let request = request_new::<req::WorkspaceConfiguration>(
let request = request_new::<lsp_types::request::WorkspaceConfiguration>(
request_id.clone(),
req::ConfigurationParams {
items: vec![req::ConfigurationItem {
lsp_types::ConfigurationParams {
items: vec![lsp_types::ConfigurationItem {
scope_uri: None,
section: Some("rust-analyzer".to_string()),
}],
@ -640,7 +647,7 @@ fn on_notification(
}
Err(not) => not,
};
let not = match notification_cast::<req::DidChangeWatchedFiles>(not) {
let not = match notification_cast::<lsp_types::notification::DidChangeWatchedFiles>(not) {
Ok(params) => {
let mut vfs = state.vfs.write();
for change in params.changes {
@ -694,7 +701,7 @@ fn apply_document_changes(
line_index = Cow::Owned(LineIndex::new(&old_text));
}
index_valid = IndexValid::UpToLineExclusive(range.start.line);
let range = range.conv_with(&line_index);
let range = from_proto::text_range(&line_index, range);
let mut text = old_text.to_owned();
match std::panic::catch_unwind(move || {
text.replace_range(Range::<usize>::from(range), &change.text);
@ -742,11 +749,11 @@ fn on_check_task(
}
CheckTask::Status(progress) => {
let params = req::ProgressParams {
token: req::ProgressToken::String("rustAnalyzer/cargoWatcher".to_string()),
value: req::ProgressParamsValue::WorkDone(progress),
let params = lsp_types::ProgressParams {
token: lsp_types::ProgressToken::String("rustAnalyzer/cargoWatcher".to_string()),
value: lsp_types::ProgressParamsValue::WorkDone(progress),
};
let not = notification_new::<req::Progress>(params);
let not = notification_new::<lsp_types::notification::Progress>(params);
task_sender.send(Task::Notify(not)).unwrap();
}
};
@ -768,8 +775,8 @@ fn on_diagnostic_task(task: DiagnosticTask, msg_sender: &Sender<Message>, state:
};
let diagnostics = state.diagnostics.diagnostics_for(file_id).cloned().collect();
let params = req::PublishDiagnosticsParams { uri, diagnostics, version: None };
let not = notification_new::<req::PublishDiagnostics>(params);
let params = lsp_types::PublishDiagnosticsParams { uri, diagnostics, version: None };
let not = notification_new::<lsp_types::notification::PublishDiagnostics>(params);
msg_sender.send(not.into()).unwrap();
}
}
@ -782,10 +789,10 @@ fn send_startup_progress(sender: &Sender<Message>, loop_state: &mut LoopState) {
match (prev, loop_state.workspace_loaded) {
(None, false) => {
let work_done_progress_create = request_new::<req::WorkDoneProgressCreate>(
let work_done_progress_create = request_new::<lsp_types::request::WorkDoneProgressCreate>(
loop_state.next_request_id(),
WorkDoneProgressCreateParams {
token: req::ProgressToken::String("rustAnalyzer/startup".into()),
token: lsp_types::ProgressToken::String("rustAnalyzer/startup".into()),
},
);
sender.send(work_done_progress_create.into()).unwrap();
@ -817,9 +824,10 @@ fn send_startup_progress(sender: &Sender<Message>, loop_state: &mut LoopState) {
}
fn send_startup_progress_notif(sender: &Sender<Message>, work_done_progress: WorkDoneProgress) {
let notif = notification_new::<req::Progress>(req::ProgressParams {
token: req::ProgressToken::String("rustAnalyzer/startup".into()),
value: req::ProgressParamsValue::WorkDone(work_done_progress),
let notif =
notification_new::<lsp_types::notification::Progress>(lsp_types::ProgressParams {
token: lsp_types::ProgressToken::String("rustAnalyzer/startup".into()),
value: lsp_types::ProgressParamsValue::WorkDone(work_done_progress),
});
sender.send(notif.into()).unwrap();
}
@ -842,7 +850,7 @@ impl<'a> PoolDispatcher<'a> {
f: fn(&mut WorldState, R::Params) -> Result<R::Result>,
) -> Result<&mut Self>
where
R: req::Request + 'static,
R: lsp_types::request::Request + 'static,
R::Params: DeserializeOwned + panic::UnwindSafe + 'static,
R::Result: Serialize + 'static,
{
@ -865,7 +873,7 @@ impl<'a> PoolDispatcher<'a> {
/// Dispatches the request onto thread pool
fn on<R>(&mut self, f: fn(WorldSnapshot, R::Params) -> Result<R::Result>) -> Result<&mut Self>
where
R: req::Request + 'static,
R: lsp_types::request::Request + 'static,
R::Params: DeserializeOwned + Send + 'static,
R::Result: Serialize + 'static,
{
@ -891,7 +899,7 @@ impl<'a> PoolDispatcher<'a> {
fn parse<R>(&mut self) -> Option<(RequestId, R::Params)>
where
R: req::Request + 'static,
R: lsp_types::request::Request + 'static,
R::Params: DeserializeOwned + 'static,
{
let req = self.req.take()?;
@ -928,7 +936,7 @@ impl<'a> PoolDispatcher<'a> {
fn result_to_task<R>(id: RequestId, result: Result<R::Result>) -> Task
where
R: req::Request + 'static,
R: lsp_types::request::Request + 'static,
R::Params: DeserializeOwned + 'static,
R::Result: Serialize + 'static,
{
@ -984,10 +992,14 @@ fn update_file_notifications_on_threadpool(
}
}
pub fn show_message(typ: req::MessageType, message: impl Into<String>, sender: &Sender<Message>) {
pub fn show_message(
typ: lsp_types::MessageType,
message: impl Into<String>,
sender: &Sender<Message>,
) {
let message = message.into();
let params = req::ShowMessageParams { typ, message };
let not = notification_new::<req::ShowMessage>(params);
let params = lsp_types::ShowMessageParams { typ, message };
let not = notification_new::<lsp_types::notification::ShowMessage>(params);
sender.send(not.into()).unwrap();
}

View file

@ -22,6 +22,7 @@ use ra_ide::{
Assist, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, SearchScope,
};
use ra_prof::profile;
use ra_project_model::TargetKind;
use ra_syntax::{AstNode, SyntaxKind, TextRange, TextSize};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
@ -31,18 +32,13 @@ use stdx::format_to;
use crate::{
cargo_target_spec::CargoTargetSpec,
config::RustfmtConfig,
conv::{
to_call_hierarchy_item, to_location, Conv, ConvWith, FoldConvCtx, MapConvWith, TryConvWith,
TryConvWithToVec,
},
diagnostics::DiagnosticTask,
from_json,
req::{self, InlayHint, InlayHintsParams},
semantic_tokens::SemanticTokensBuilder,
from_json, from_proto,
lsp_ext::{self, InlayHint, InlayHintsParams},
to_proto,
world::WorldSnapshot,
LspError, Result,
};
use ra_project_model::TargetKind;
pub fn handle_analyzer_status(world: WorldSnapshot, _: ()) -> Result<String> {
let _p = profile("handle_analyzer_status");
@ -56,48 +52,51 @@ pub fn handle_analyzer_status(world: WorldSnapshot, _: ()) -> Result<String> {
Ok(buf)
}
pub fn handle_syntax_tree(world: WorldSnapshot, params: req::SyntaxTreeParams) -> Result<String> {
pub fn handle_syntax_tree(
world: WorldSnapshot,
params: lsp_ext::SyntaxTreeParams,
) -> Result<String> {
let _p = profile("handle_syntax_tree");
let id = params.text_document.try_conv_with(&world)?;
let id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(id)?;
let text_range = params.range.map(|p| p.conv_with(&line_index));
let text_range = params.range.map(|r| from_proto::text_range(&line_index, r));
let res = world.analysis().syntax_tree(id, text_range)?;
Ok(res)
}
pub fn handle_expand_macro(
world: WorldSnapshot,
params: req::ExpandMacroParams,
) -> Result<Option<req::ExpandedMacro>> {
params: lsp_ext::ExpandMacroParams,
) -> Result<Option<lsp_ext::ExpandedMacro>> {
let _p = profile("handle_expand_macro");
let file_id = params.text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let offset = params.position.map(|p| p.conv_with(&line_index));
let offset = params.position.map(|p| from_proto::offset(&line_index, p));
match offset {
None => Ok(None),
Some(offset) => {
let res = world.analysis().expand_macro(FilePosition { file_id, offset })?;
Ok(res.map(|it| req::ExpandedMacro { name: it.name, expansion: it.expansion }))
Ok(res.map(|it| lsp_ext::ExpandedMacro { name: it.name, expansion: it.expansion }))
}
}
}
pub fn handle_selection_range(
world: WorldSnapshot,
params: req::SelectionRangeParams,
) -> Result<Option<Vec<req::SelectionRange>>> {
params: lsp_types::SelectionRangeParams,
) -> Result<Option<Vec<lsp_types::SelectionRange>>> {
let _p = profile("handle_selection_range");
let file_id = params.text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let res: Result<Vec<req::SelectionRange>> = params
let res: Result<Vec<lsp_types::SelectionRange>> = params
.positions
.into_iter()
.map_conv_with(&line_index)
.map(|position| {
let offset = from_proto::offset(&line_index, position);
let mut ranges = Vec::new();
{
let mut range = TextRange::new(position, position);
let mut range = TextRange::new(offset, offset);
loop {
ranges.push(range);
let frange = FileRange { file_id, range };
@ -109,13 +108,13 @@ pub fn handle_selection_range(
}
}
}
let mut range = req::SelectionRange {
range: ranges.last().unwrap().conv_with(&line_index),
let mut range = lsp_types::SelectionRange {
range: to_proto::range(&line_index, *ranges.last().unwrap()),
parent: None,
};
for r in ranges.iter().rev().skip(1) {
range = req::SelectionRange {
range: r.conv_with(&line_index),
for &r in ranges.iter().rev().skip(1) {
range = lsp_types::SelectionRange {
range: to_proto::range(&line_index, r),
parent: Some(Box::new(range)),
}
}
@ -128,57 +127,55 @@ pub fn handle_selection_range(
pub fn handle_find_matching_brace(
world: WorldSnapshot,
params: req::FindMatchingBraceParams,
params: lsp_ext::FindMatchingBraceParams,
) -> Result<Vec<Position>> {
let _p = profile("handle_find_matching_brace");
let file_id = params.text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let res = params
.offsets
.into_iter()
.map_conv_with(&line_index)
.map(|offset| {
if let Ok(Some(matching_brace_offset)) =
world.analysis().matching_brace(FilePosition { file_id, offset })
{
matching_brace_offset
} else {
offset
}
.map(|position| {
let offset = from_proto::offset(&line_index, position);
let offset = match world.analysis().matching_brace(FilePosition { file_id, offset }) {
Ok(Some(matching_brace_offset)) => matching_brace_offset,
Err(_) | Ok(None) => offset,
};
to_proto::position(&line_index, offset)
})
.map_conv_with(&line_index)
.collect();
Ok(res)
}
pub fn handle_join_lines(
world: WorldSnapshot,
params: req::JoinLinesParams,
) -> Result<req::SourceChange> {
params: lsp_ext::JoinLinesParams,
) -> Result<lsp_ext::SourceChange> {
let _p = profile("handle_join_lines");
let frange = (&params.text_document, params.range).try_conv_with(&world)?;
world.analysis().join_lines(frange)?.try_conv_with(&world)
let frange = from_proto::file_range(&world, params.text_document, params.range)?;
let source_change = world.analysis().join_lines(frange)?;
to_proto::source_change(&world, source_change)
}
pub fn handle_on_enter(
world: WorldSnapshot,
params: req::TextDocumentPositionParams,
) -> Result<Option<req::SourceChange>> {
params: lsp_types::TextDocumentPositionParams,
) -> Result<Option<lsp_ext::SourceChange>> {
let _p = profile("handle_on_enter");
let position = params.try_conv_with(&world)?;
let position = from_proto::file_position(&world, params)?;
match world.analysis().on_enter(position)? {
None => Ok(None),
Some(edit) => Ok(Some(edit.try_conv_with(&world)?)),
Some(source_change) => to_proto::source_change(&world, source_change).map(Some),
}
}
// Don't forget to add new trigger characters to `ServerCapabilities` in `caps.rs`.
pub fn handle_on_type_formatting(
world: WorldSnapshot,
params: req::DocumentOnTypeFormattingParams,
params: lsp_types::DocumentOnTypeFormattingParams,
) -> Result<Option<Vec<TextEdit>>> {
let _p = profile("handle_on_type_formatting");
let mut position = params.text_document_position.try_conv_with(&world)?;
let mut position = from_proto::file_position(&world, params.text_document_position)?;
let line_index = world.analysis().file_line_index(position.file_id)?;
let line_endings = world.file_line_endings(position.file_id);
@ -208,18 +205,17 @@ pub fn handle_on_type_formatting(
// This should be a single-file edit
let edit = edit.source_file_edits.pop().unwrap();
let change: Vec<TextEdit> = edit.edit.conv_with((&line_index, line_endings));
let change = to_proto::text_edit_vec(&line_index, line_endings, edit.edit);
Ok(Some(change))
}
pub fn handle_document_symbol(
world: WorldSnapshot,
params: req::DocumentSymbolParams,
) -> Result<Option<req::DocumentSymbolResponse>> {
params: lsp_types::DocumentSymbolParams,
) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
let _p = profile("handle_document_symbol");
let file_id = params.text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let url = file_id.try_conv_with(&world)?;
let mut parents: Vec<(DocumentSymbol, Option<usize>)> = Vec::new();
@ -227,10 +223,10 @@ pub fn handle_document_symbol(
let doc_symbol = DocumentSymbol {
name: symbol.label,
detail: symbol.detail,
kind: symbol.kind.conv(),
kind: to_proto::symbol_kind(symbol.kind),
deprecated: Some(symbol.deprecated),
range: symbol.node_range.conv_with(&line_index),
selection_range: symbol.navigation_range.conv_with(&line_index),
range: to_proto::range(&line_index, symbol.node_range),
selection_range: to_proto::range(&line_index, symbol.navigation_range),
children: None,
};
parents.push((doc_symbol, symbol.parent));
@ -249,17 +245,17 @@ pub fn handle_document_symbol(
}
}
if world.config.client_caps.hierarchical_symbols {
Ok(Some(document_symbols.into()))
let res = if world.config.client_caps.hierarchical_symbols {
document_symbols.into()
} else {
let url = to_proto::url(&world, file_id)?;
let mut symbol_information = Vec::<SymbolInformation>::new();
for symbol in document_symbols {
flatten_document_symbol(&symbol, None, &url, &mut symbol_information);
}
Ok(Some(symbol_information.into()))
}
}
symbol_information.into()
};
return Ok(Some(res));
fn flatten_document_symbol(
symbol: &DocumentSymbol,
@ -279,10 +275,11 @@ fn flatten_document_symbol(
flatten_document_symbol(child, Some(symbol.name.clone()), url, res);
}
}
}
pub fn handle_workspace_symbol(
world: WorldSnapshot,
params: req::WorkspaceSymbolParams,
params: lsp_types::WorkspaceSymbolParams,
) -> Result<Option<Vec<SymbolInformation>>> {
let _p = profile("handle_workspace_symbol");
let all_symbols = params.query.contains('#');
@ -313,8 +310,8 @@ pub fn handle_workspace_symbol(
for nav in world.analysis().symbol_search(query)? {
let info = SymbolInformation {
name: nav.name().to_string(),
kind: nav.kind().conv(),
location: nav.try_conv_with(world)?,
kind: to_proto::symbol_kind(nav.kind()),
location: to_proto::location(world, nav.file_range())?,
container_name: nav.container_name().map(|v| v.to_string()),
deprecated: None,
};
@ -326,63 +323,80 @@ pub fn handle_workspace_symbol(
pub fn handle_goto_definition(
world: WorldSnapshot,
params: req::GotoDefinitionParams,
) -> Result<Option<req::GotoDefinitionResponse>> {
params: lsp_types::GotoDefinitionParams,
) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = profile("handle_goto_definition");
let position = params.text_document_position_params.try_conv_with(&world)?;
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let nav_info = match world.analysis().goto_definition(position)? {
None => return Ok(None),
Some(it) => it,
};
let res = (position.file_id, nav_info).try_conv_with(&world)?;
let res = to_proto::goto_definition_response(
&world,
FileRange { file_id: position.file_id, range: nav_info.range },
nav_info.info,
)?;
Ok(Some(res))
}
pub fn handle_goto_implementation(
world: WorldSnapshot,
params: req::GotoImplementationParams,
) -> Result<Option<req::GotoImplementationResponse>> {
params: lsp_types::request::GotoImplementationParams,
) -> Result<Option<lsp_types::request::GotoImplementationResponse>> {
let _p = profile("handle_goto_implementation");
let position = params.text_document_position_params.try_conv_with(&world)?;
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let nav_info = match world.analysis().goto_implementation(position)? {
None => return Ok(None),
Some(it) => it,
};
let res = (position.file_id, nav_info).try_conv_with(&world)?;
let res = to_proto::goto_definition_response(
&world,
FileRange { file_id: position.file_id, range: nav_info.range },
nav_info.info,
)?;
Ok(Some(res))
}
pub fn handle_goto_type_definition(
world: WorldSnapshot,
params: req::GotoTypeDefinitionParams,
) -> Result<Option<req::GotoTypeDefinitionResponse>> {
params: lsp_types::request::GotoTypeDefinitionParams,
) -> Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
let _p = profile("handle_goto_type_definition");
let position = params.text_document_position_params.try_conv_with(&world)?;
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let nav_info = match world.analysis().goto_type_definition(position)? {
None => return Ok(None),
Some(it) => it,
};
let res = (position.file_id, nav_info).try_conv_with(&world)?;
let res = to_proto::goto_definition_response(
&world,
FileRange { file_id: position.file_id, range: nav_info.range },
nav_info.info,
)?;
Ok(Some(res))
}
pub fn handle_parent_module(
world: WorldSnapshot,
params: req::TextDocumentPositionParams,
params: lsp_types::TextDocumentPositionParams,
) -> Result<Vec<Location>> {
let _p = profile("handle_parent_module");
let position = params.try_conv_with(&world)?;
world.analysis().parent_module(position)?.iter().try_conv_with_to_vec(&world)
let position = from_proto::file_position(&world, params)?;
world
.analysis()
.parent_module(position)?
.into_iter()
.map(|it| to_proto::location(&world, it.file_range()))
.collect::<Result<Vec<_>>>()
}
pub fn handle_runnables(
world: WorldSnapshot,
params: req::RunnablesParams,
) -> Result<Vec<req::Runnable>> {
params: lsp_ext::RunnablesParams,
) -> Result<Vec<lsp_ext::Runnable>> {
let _p = profile("handle_runnables");
let file_id = params.text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let offset = params.position.map(|it| it.conv_with(&line_index));
let offset = params.position.map(|it| from_proto::offset(&line_index, it));
let mut res = Vec::new();
let workspace_root = world.workspace_root_for(file_id);
let cargo_spec = CargoTargetSpec::for_file(&world, file_id)?;
@ -408,7 +422,7 @@ pub fn handle_runnables(
match cargo_spec {
Some(spec) => {
for &cmd in ["check", "test"].iter() {
res.push(req::Runnable {
res.push(lsp_ext::Runnable {
range: Default::default(),
label: format!("cargo {} -p {}", cmd, spec.package),
bin: "cargo".to_string(),
@ -420,7 +434,7 @@ pub fn handle_runnables(
}
}
None => {
res.push(req::Runnable {
res.push(lsp_ext::Runnable {
range: Default::default(),
label: "cargo check --workspace".to_string(),
bin: "cargo".to_string(),
@ -436,10 +450,10 @@ pub fn handle_runnables(
pub fn handle_completion(
world: WorldSnapshot,
params: req::CompletionParams,
) -> Result<Option<req::CompletionResponse>> {
params: lsp_types::CompletionParams,
) -> Result<Option<lsp_types::CompletionResponse>> {
let _p = profile("handle_completion");
let position = params.text_document_position.try_conv_with(&world)?;
let position = from_proto::file_position(&world, params.text_document_position)?;
let completion_triggered_after_single_colon = {
let mut res = false;
if let Some(ctx) = params.context {
@ -468,8 +482,10 @@ pub fn handle_completion(
};
let line_index = world.analysis().file_line_index(position.file_id)?;
let line_endings = world.file_line_endings(position.file_id);
let items: Vec<CompletionItem> =
items.into_iter().map(|item| item.conv_with((&line_index, line_endings))).collect();
let items: Vec<CompletionItem> = items
.into_iter()
.map(|item| to_proto::completion_item(&line_index, line_endings, item))
.collect();
Ok(Some(items.into()))
}
@ -479,52 +495,51 @@ pub fn handle_folding_range(
params: FoldingRangeParams,
) -> Result<Option<Vec<FoldingRange>>> {
let _p = profile("handle_folding_range");
let file_id = params.text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let folds = world.analysis().folding_ranges(file_id)?;
let text = world.analysis().file_text(file_id)?;
let line_index = world.analysis().file_line_index(file_id)?;
let ctx = FoldConvCtx {
text: &text,
line_index: &line_index,
line_folding_only: world.config.client_caps.line_folding_only,
};
let res = Some(folds.into_iter().map_conv_with(&ctx).collect());
Ok(res)
let line_folding_only = world.config.client_caps.line_folding_only;
let res = folds
.into_iter()
.map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it))
.collect();
Ok(Some(res))
}
pub fn handle_signature_help(
world: WorldSnapshot,
params: req::SignatureHelpParams,
) -> Result<Option<req::SignatureHelp>> {
params: lsp_types::SignatureHelpParams,
) -> Result<Option<lsp_types::SignatureHelp>> {
let _p = profile("handle_signature_help");
let position = params.text_document_position_params.try_conv_with(&world)?;
if let Some(call_info) = world.analysis().call_info(position)? {
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let call_info = match world.analysis().call_info(position)? {
None => return Ok(None),
Some(it) => it,
};
let concise = !world.config.call_info_full;
let mut active_parameter = call_info.active_parameter.map(|it| it as i64);
if concise && call_info.signature.has_self_param {
active_parameter = active_parameter.map(|it| it.saturating_sub(1));
}
let sig_info = call_info.signature.conv_with(concise);
let sig_info = to_proto::signature_information(call_info.signature, concise);
Ok(Some(req::SignatureHelp {
Ok(Some(lsp_types::SignatureHelp {
signatures: vec![sig_info],
active_signature: Some(0),
active_parameter,
}))
} else {
Ok(None)
}
}
pub fn handle_hover(world: WorldSnapshot, params: req::HoverParams) -> Result<Option<Hover>> {
pub fn handle_hover(world: WorldSnapshot, params: lsp_types::HoverParams) -> Result<Option<Hover>> {
let _p = profile("handle_hover");
let position = params.text_document_position_params.try_conv_with(&world)?;
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let info = match world.analysis().hover(position)? {
None => return Ok(None),
Some(info) => info,
};
let line_index = world.analysis.file_line_index(position.file_id)?;
let range = info.range.conv_with(&line_index);
let range = to_proto::range(&line_index, info.range);
let res = Hover {
contents: HoverContents::Markup(MarkupContent {
kind: MarkupKind::Markdown,
@ -537,10 +552,10 @@ pub fn handle_hover(world: WorldSnapshot, params: req::HoverParams) -> Result<Op
pub fn handle_prepare_rename(
world: WorldSnapshot,
params: req::TextDocumentPositionParams,
params: lsp_types::TextDocumentPositionParams,
) -> Result<Option<PrepareRenameResponse>> {
let _p = profile("handle_prepare_rename");
let position = params.try_conv_with(&world)?;
let position = from_proto::file_position(&world, params)?;
let optional_change = world.analysis().rename(position, "dummy")?;
let range = match optional_change {
@ -548,15 +563,14 @@ pub fn handle_prepare_rename(
Some(it) => it.range,
};
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id)?;
let range = range.conv_with(&line_index);
let line_index = world.analysis().file_line_index(position.file_id)?;
let range = to_proto::range(&line_index, range);
Ok(Some(PrepareRenameResponse::Range(range)))
}
pub fn handle_rename(world: WorldSnapshot, params: RenameParams) -> Result<Option<WorkspaceEdit>> {
let _p = profile("handle_rename");
let position = params.text_document_position.try_conv_with(&world)?;
let position = from_proto::file_position(&world, params.text_document_position)?;
if params.new_name.is_empty() {
return Err(LspError::new(
@ -567,22 +581,21 @@ pub fn handle_rename(world: WorldSnapshot, params: RenameParams) -> Result<Optio
}
let optional_change = world.analysis().rename(position, &*params.new_name)?;
let change = match optional_change {
let source_change = match optional_change {
None => return Ok(None),
Some(it) => it.info,
};
let source_change_req = change.try_conv_with(&world)?;
Ok(Some(source_change_req.workspace_edit))
let source_change = to_proto::source_change(&world, source_change)?;
Ok(Some(source_change.workspace_edit))
}
pub fn handle_references(
world: WorldSnapshot,
params: req::ReferenceParams,
params: lsp_types::ReferenceParams,
) -> Result<Option<Vec<Location>>> {
let _p = profile("handle_references");
let position = params.text_document_position.try_conv_with(&world)?;
let position = from_proto::file_position(&world, params.text_document_position)?;
let refs = match world.analysis().find_all_refs(position, None)? {
None => return Ok(None),
@ -591,33 +604,13 @@ pub fn handle_references(
let locations = if params.context.include_declaration {
refs.into_iter()
.filter_map(|reference| {
let line_index =
world.analysis().file_line_index(reference.file_range.file_id).ok()?;
to_location(
reference.file_range.file_id,
reference.file_range.range,
&world,
&line_index,
)
.ok()
})
.filter_map(|reference| to_proto::location(&world, reference.file_range).ok())
.collect()
} else {
// Only iterate over the references if include_declaration was false
refs.references()
.iter()
.filter_map(|reference| {
let line_index =
world.analysis().file_line_index(reference.file_range.file_id).ok()?;
to_location(
reference.file_range.file_id,
reference.file_range.range,
&world,
&line_index,
)
.ok()
})
.filter_map(|reference| to_proto::location(&world, reference.file_range).ok())
.collect()
};
@ -629,12 +622,12 @@ pub fn handle_formatting(
params: DocumentFormattingParams,
) -> Result<Option<Vec<TextEdit>>> {
let _p = profile("handle_formatting");
let file_id = params.text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let file = world.analysis().file_text(file_id)?;
let crate_ids = world.analysis().crate_for(file_id)?;
let file_line_index = world.analysis().file_line_index(file_id)?;
let end_position = TextSize::of(file.as_str()).conv_with(&file_line_index);
let end_position = to_proto::position(&file_line_index, TextSize::of(file.as_str()));
let mut rustfmt = match &world.config.rustfmt {
RustfmtConfig::Rustfmt { extra_args } => {
@ -700,33 +693,14 @@ pub fn handle_formatting(
}]))
}
fn create_single_code_action(assist: Assist, world: &WorldSnapshot) -> Result<CodeAction> {
let arg = to_value(assist.source_change.try_conv_with(world)?)?;
let title = assist.label;
let command = Command {
title: title.clone(),
command: "rust-analyzer.applySourceChange".to_string(),
arguments: Some(vec![arg]),
};
Ok(CodeAction {
title,
kind: Some(String::new()),
diagnostics: None,
edit: None,
command: Some(command),
is_preferred: None,
})
}
pub fn handle_code_action(
world: WorldSnapshot,
params: req::CodeActionParams,
params: lsp_types::CodeActionParams,
) -> Result<Option<CodeActionResponse>> {
let _p = profile("handle_code_action");
let file_id = params.text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let range = params.range.conv_with(&line_index);
let range = from_proto::text_range(&line_index, params.range);
let diagnostics = world.analysis().diagnostics(file_id)?;
let mut res = CodeActionResponse::default();
@ -739,7 +713,7 @@ pub fn handle_code_action(
for source_edit in fixes_from_diagnostics {
let title = source_edit.label.clone();
let edit = source_edit.try_conv_with(&world)?;
let edit = to_proto::source_change(&world, source_edit)?;
let command = Command {
title,
@ -758,7 +732,7 @@ pub fn handle_code_action(
}
for fix in world.check_fixes.get(&file_id).into_iter().flatten() {
let fix_range = fix.range.conv_with(&line_index);
let fix_range = from_proto::text_range(&line_index, fix.range);
if fix_range.intersect(range).is_none() {
continue;
}
@ -779,21 +753,21 @@ pub fn handle_code_action(
.1
.push(assist),
None => {
res.push(create_single_code_action(assist, &world)?.into());
res.push(to_proto::code_action(&world, assist)?.into());
}
}
}
for (group_label, (idx, assists)) in grouped_assists {
if assists.len() == 1 {
res[idx] =
create_single_code_action(assists.into_iter().next().unwrap(), &world)?.into();
res[idx] = to_proto::code_action(&world, assists.into_iter().next().unwrap())?.into();
} else {
let title = group_label;
let mut arguments = Vec::with_capacity(assists.len());
for assist in assists {
arguments.push(to_value(assist.source_change.try_conv_with(&world)?)?);
let source_change = to_proto::source_change(&world, assist.source_change)?;
arguments.push(to_value(source_change)?);
}
let command = Some(Command {
@ -835,10 +809,10 @@ pub fn handle_code_action(
pub fn handle_code_lens(
world: WorldSnapshot,
params: req::CodeLensParams,
params: lsp_types::CodeLensParams,
) -> Result<Option<Vec<CodeLens>>> {
let _p = profile("handle_code_lens");
let file_id = params.text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?;
let mut lenses: Vec<CodeLens> = Default::default();
@ -902,10 +876,10 @@ pub fn handle_code_lens(
_ => false,
})
.map(|it| {
let range = it.node_range.conv_with(&line_index);
let range = to_proto::range(&line_index, it.node_range);
let pos = range.start;
let lens_params = req::GotoImplementationParams {
text_document_position_params: req::TextDocumentPositionParams::new(
let lens_params = lsp_types::request::GotoImplementationParams {
text_document_position_params: lsp_types::TextDocumentPositionParams::new(
params.text_document.clone(),
pos,
),
@ -926,7 +900,7 @@ pub fn handle_code_lens(
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
enum CodeLensResolveData {
Impls(req::GotoImplementationParams),
Impls(lsp_types::request::GotoImplementationParams),
}
pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Result<CodeLens> {
@ -937,9 +911,9 @@ pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Re
Some(CodeLensResolveData::Impls(lens_params)) => {
let locations: Vec<Location> =
match handle_goto_implementation(world, lens_params.clone())? {
Some(req::GotoDefinitionResponse::Scalar(loc)) => vec![loc],
Some(req::GotoDefinitionResponse::Array(locs)) => locs,
Some(req::GotoDefinitionResponse::Link(links)) => links
Some(lsp_types::GotoDefinitionResponse::Scalar(loc)) => vec![loc],
Some(lsp_types::GotoDefinitionResponse::Array(locs)) => locs,
Some(lsp_types::GotoDefinitionResponse::Link(links)) => links
.into_iter()
.map(|link| Location::new(link.target_uri, link.target_selection_range))
.collect(),
@ -976,37 +950,39 @@ pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Re
pub fn handle_document_highlight(
world: WorldSnapshot,
params: req::DocumentHighlightParams,
params: lsp_types::DocumentHighlightParams,
) -> Result<Option<Vec<DocumentHighlight>>> {
let _p = profile("handle_document_highlight");
let file_id = params.text_document_position_params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id)?;
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let line_index = world.analysis().file_line_index(position.file_id)?;
let refs = match world.analysis().find_all_refs(
params.text_document_position_params.try_conv_with(&world)?,
Some(SearchScope::single_file(file_id)),
)? {
let refs = match world
.analysis()
.find_all_refs(position, Some(SearchScope::single_file(position.file_id)))?
{
None => return Ok(None),
Some(refs) => refs,
};
Ok(Some(
refs.into_iter()
.filter(|reference| reference.file_range.file_id == file_id)
let res = refs
.into_iter()
.filter(|reference| reference.file_range.file_id == position.file_id)
.map(|reference| DocumentHighlight {
range: reference.file_range.range.conv_with(&line_index),
kind: reference.access.map(|it| it.conv()),
range: to_proto::range(&line_index, reference.file_range.range),
kind: reference.access.map(to_proto::document_highlight_kind),
})
.collect(),
))
.collect();
Ok(Some(res))
}
pub fn handle_ssr(world: WorldSnapshot, params: req::SsrParams) -> Result<req::SourceChange> {
pub fn handle_ssr(
world: WorldSnapshot,
params: lsp_ext::SsrParams,
) -> Result<lsp_ext::SourceChange> {
let _p = profile("handle_ssr");
world
.analysis()
.structural_search_replace(&params.query, params.parse_only)??
.try_conv_with(&world)
let source_change =
world.analysis().structural_search_replace(&params.query, params.parse_only)??;
to_proto::source_change(&world, source_change)
}
pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<DiagnosticTask> {
@ -1017,8 +993,8 @@ pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<Dia
.diagnostics(file_id)?
.into_iter()
.map(|d| Diagnostic {
range: d.range.conv_with(&line_index),
severity: Some(d.severity.conv()),
range: to_proto::range(&line_index, d.range),
severity: Some(to_proto::diagnostic_severity(d.severity)),
code: None,
source: Some("rust-analyzer".to_string()),
message: d.message,
@ -1033,7 +1009,7 @@ fn to_lsp_runnable(
world: &WorldSnapshot,
file_id: FileId,
runnable: Runnable,
) -> Result<req::Runnable> {
) -> Result<lsp_ext::Runnable> {
let spec = CargoTargetSpec::for_file(world, file_id)?;
let (args, extra_args) = CargoTargetSpec::runnable_args(spec, &runnable.kind)?;
let line_index = world.analysis().file_line_index(file_id)?;
@ -1044,8 +1020,8 @@ fn to_lsp_runnable(
RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id),
RunnableKind::Bin => "run binary".to_string(),
};
Ok(req::Runnable {
range: runnable.range.conv_with(&line_index),
Ok(lsp_ext::Runnable {
range: to_proto::range(&line_index, runnable.range),
label,
bin: "cargo".to_string(),
args,
@ -1064,13 +1040,13 @@ pub fn handle_inlay_hints(
params: InlayHintsParams,
) -> Result<Vec<InlayHint>> {
let _p = profile("handle_inlay_hints");
let file_id = params.text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let analysis = world.analysis();
let line_index = analysis.file_line_index(file_id)?;
Ok(analysis
.inlay_hints(file_id, &world.config.inlay_hints)?
.into_iter()
.map_conv_with(&line_index)
.map(|it| to_proto::inlay_int(&line_index, it))
.collect())
}
@ -1079,21 +1055,19 @@ pub fn handle_call_hierarchy_prepare(
params: CallHierarchyPrepareParams,
) -> Result<Option<Vec<CallHierarchyItem>>> {
let _p = profile("handle_call_hierarchy_prepare");
let position = params.text_document_position_params.try_conv_with(&world)?;
let file_id = position.file_id;
let position = from_proto::file_position(&world, params.text_document_position_params)?;
let nav_info = match world.analysis().call_hierarchy(position)? {
None => return Ok(None),
Some(it) => it,
};
let line_index = world.analysis().file_line_index(file_id)?;
let RangeInfo { range, info: navs } = nav_info;
let RangeInfo { range: _, info: navs } = nav_info;
let res = navs
.into_iter()
.filter(|it| it.kind() == SyntaxKind::FN_DEF)
.filter_map(|it| to_call_hierarchy_item(file_id, range, &world, &line_index, it).ok())
.collect();
.map(|it| to_proto::call_hierarchy_item(&world, it))
.collect::<Result<Vec<_>>>()?;
Ok(Some(res))
}
@ -1106,7 +1080,7 @@ pub fn handle_call_hierarchy_incoming(
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
let frange: FileRange = (&doc, item.range).try_conv_with(&world)?;
let frange = from_proto::file_range(&world, doc, item.range)?;
let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
let call_items = match world.analysis().incoming_calls(fpos)? {
@ -1119,11 +1093,14 @@ pub fn handle_call_hierarchy_incoming(
for call_item in call_items.into_iter() {
let file_id = call_item.target.file_id();
let line_index = world.analysis().file_line_index(file_id)?;
let range = call_item.target.range();
let item = to_call_hierarchy_item(file_id, range, &world, &line_index, call_item.target)?;
let item = to_proto::call_hierarchy_item(&world, call_item.target)?;
res.push(CallHierarchyIncomingCall {
from: item,
from_ranges: call_item.ranges.iter().map(|it| it.conv_with(&line_index)).collect(),
from_ranges: call_item
.ranges
.into_iter()
.map(|it| to_proto::range(&line_index, it))
.collect(),
});
}
@ -1138,7 +1115,7 @@ pub fn handle_call_hierarchy_outgoing(
let item = params.item;
let doc = TextDocumentIdentifier::new(item.uri);
let frange: FileRange = (&doc, item.range).try_conv_with(&world)?;
let frange = from_proto::file_range(&world, doc, item.range)?;
let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
let call_items = match world.analysis().outgoing_calls(fpos)? {
@ -1151,11 +1128,14 @@ pub fn handle_call_hierarchy_outgoing(
for call_item in call_items.into_iter() {
let file_id = call_item.target.file_id();
let line_index = world.analysis().file_line_index(file_id)?;
let range = call_item.target.range();
let item = to_call_hierarchy_item(file_id, range, &world, &line_index, call_item.target)?;
let item = to_proto::call_hierarchy_item(&world, call_item.target)?;
res.push(CallHierarchyOutgoingCall {
to: item,
from_ranges: call_item.ranges.iter().map(|it| it.conv_with(&line_index)).collect(),
from_ranges: call_item
.ranges
.into_iter()
.map(|it| to_proto::range(&line_index, it))
.collect(),
});
}
@ -1168,26 +1148,13 @@ pub fn handle_semantic_tokens(
) -> Result<Option<SemanticTokensResult>> {
let _p = profile("handle_semantic_tokens");
let file_id = params.text_document.try_conv_with(&world)?;
let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let text = world.analysis().file_text(file_id)?;
let line_index = world.analysis().file_line_index(file_id)?;
let mut builder = SemanticTokensBuilder::default();
for highlight_range in world.analysis().highlight(file_id)?.into_iter() {
let (token_index, modifier_bitset) = highlight_range.highlight.conv();
for mut range in line_index.lines(highlight_range.range) {
if text[range].ends_with('\n') {
range = TextRange::new(range.start(), range.end() - TextSize::of('\n'));
}
let range = range.conv_with(&line_index);
builder.push(range, token_index, modifier_bitset);
}
}
let tokens = builder.build();
Ok(Some(tokens.into()))
let highlights = world.analysis().highlight(file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
Ok(Some(semantic_tokens.into()))
}
pub fn handle_semantic_tokens_range(
@ -1196,17 +1163,11 @@ pub fn handle_semantic_tokens_range(
) -> Result<Option<SemanticTokensRangeResult>> {
let _p = profile("handle_semantic_tokens_range");
let frange = (&params.text_document, params.range).try_conv_with(&world)?;
let frange = from_proto::file_range(&world, params.text_document, params.range)?;
let text = world.analysis().file_text(frange.file_id)?;
let line_index = world.analysis().file_line_index(frange.file_id)?;
let mut builder = SemanticTokensBuilder::default();
for highlight_range in world.analysis().highlight_range(frange)?.into_iter() {
let (token_type, token_modifiers) = highlight_range.highlight.conv();
builder.push(highlight_range.range.conv_with(&line_index), token_type, token_modifiers);
}
let tokens = builder.build();
Ok(Some(tokens.into()))
let highlights = world.analysis().highlight_range(frange)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
Ok(Some(semantic_tokens.into()))
}

View file

@ -0,0 +1,592 @@
//! Conversion of rust-analyzer specific types to lsp_types equivalents.
use ra_db::{FileId, FileRange};
use ra_ide::{
translate_offset_with_edit, Assist, CompletionItem, CompletionItemKind, Documentation,
FileSystemEdit, Fold, FoldKind, FunctionSignature, Highlight, HighlightModifier, HighlightTag,
HighlightedRange, InlayHint, InlayKind, InsertTextFormat, LineIndex, NavigationTarget,
ReferenceAccess, Severity, SourceChange, SourceFileEdit,
};
use ra_syntax::{SyntaxKind, TextRange, TextSize};
use ra_text_edit::{Indel, TextEdit};
use ra_vfs::LineEndings;
use crate::{lsp_ext, semantic_tokens, world::WorldSnapshot, Result};
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
let line_col = line_index.line_col(offset);
let line = u64::from(line_col.line);
let character = u64::from(line_col.col_utf16);
lsp_types::Position::new(line, character)
}
pub(crate) fn range(line_index: &LineIndex, range: TextRange) -> lsp_types::Range {
let start = position(line_index, range.start());
let end = position(line_index, range.end());
lsp_types::Range::new(start, end)
}
pub(crate) fn symbol_kind(syntax_kind: SyntaxKind) -> lsp_types::SymbolKind {
match syntax_kind {
SyntaxKind::FN_DEF => lsp_types::SymbolKind::Function,
SyntaxKind::STRUCT_DEF => lsp_types::SymbolKind::Struct,
SyntaxKind::ENUM_DEF => lsp_types::SymbolKind::Enum,
SyntaxKind::ENUM_VARIANT => lsp_types::SymbolKind::EnumMember,
SyntaxKind::TRAIT_DEF => lsp_types::SymbolKind::Interface,
SyntaxKind::MACRO_CALL => lsp_types::SymbolKind::Function,
SyntaxKind::MODULE => lsp_types::SymbolKind::Module,
SyntaxKind::TYPE_ALIAS_DEF => lsp_types::SymbolKind::TypeParameter,
SyntaxKind::RECORD_FIELD_DEF => lsp_types::SymbolKind::Field,
SyntaxKind::STATIC_DEF => lsp_types::SymbolKind::Constant,
SyntaxKind::CONST_DEF => lsp_types::SymbolKind::Constant,
SyntaxKind::IMPL_DEF => lsp_types::SymbolKind::Object,
_ => lsp_types::SymbolKind::Variable,
}
}
pub(crate) fn document_highlight_kind(
reference_access: ReferenceAccess,
) -> lsp_types::DocumentHighlightKind {
match reference_access {
ReferenceAccess::Read => lsp_types::DocumentHighlightKind::Read,
ReferenceAccess::Write => lsp_types::DocumentHighlightKind::Write,
}
}
pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSeverity {
match severity {
Severity::Error => lsp_types::DiagnosticSeverity::Error,
Severity::WeakWarning => lsp_types::DiagnosticSeverity::Hint,
}
}
pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation {
let value = crate::markdown::format_docs(documentation.as_str());
let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value };
lsp_types::Documentation::MarkupContent(markup_content)
}
pub(crate) fn insert_text_format(
insert_text_format: InsertTextFormat,
) -> lsp_types::InsertTextFormat {
match insert_text_format {
InsertTextFormat::Snippet => lsp_types::InsertTextFormat::Snippet,
InsertTextFormat::PlainText => lsp_types::InsertTextFormat::PlainText,
}
}
pub(crate) fn completion_item_kind(
completion_item_kind: CompletionItemKind,
) -> lsp_types::CompletionItemKind {
match completion_item_kind {
CompletionItemKind::Keyword => lsp_types::CompletionItemKind::Keyword,
CompletionItemKind::Snippet => lsp_types::CompletionItemKind::Snippet,
CompletionItemKind::Module => lsp_types::CompletionItemKind::Module,
CompletionItemKind::Function => lsp_types::CompletionItemKind::Function,
CompletionItemKind::Struct => lsp_types::CompletionItemKind::Struct,
CompletionItemKind::Enum => lsp_types::CompletionItemKind::Enum,
CompletionItemKind::EnumVariant => lsp_types::CompletionItemKind::EnumMember,
CompletionItemKind::BuiltinType => lsp_types::CompletionItemKind::Struct,
CompletionItemKind::Binding => lsp_types::CompletionItemKind::Variable,
CompletionItemKind::Field => lsp_types::CompletionItemKind::Field,
CompletionItemKind::Trait => lsp_types::CompletionItemKind::Interface,
CompletionItemKind::TypeAlias => lsp_types::CompletionItemKind::Struct,
CompletionItemKind::Const => lsp_types::CompletionItemKind::Constant,
CompletionItemKind::Static => lsp_types::CompletionItemKind::Value,
CompletionItemKind::Method => lsp_types::CompletionItemKind::Method,
CompletionItemKind::TypeParam => lsp_types::CompletionItemKind::TypeParameter,
CompletionItemKind::Macro => lsp_types::CompletionItemKind::Method,
CompletionItemKind::Attribute => lsp_types::CompletionItemKind::EnumMember,
}
}
pub(crate) fn text_edit(
line_index: &LineIndex,
line_endings: LineEndings,
indel: Indel,
) -> lsp_types::TextEdit {
let range = range(line_index, indel.delete);
let new_text = match line_endings {
LineEndings::Unix => indel.insert,
LineEndings::Dos => indel.insert.replace('\n', "\r\n"),
};
lsp_types::TextEdit { range, new_text }
}
pub(crate) fn text_edit_vec(
line_index: &LineIndex,
line_endings: LineEndings,
text_edit: TextEdit,
) -> Vec<lsp_types::TextEdit> {
text_edit
.as_indels()
.iter()
.map(|it| self::text_edit(line_index, line_endings, it.clone()))
.collect()
}
pub(crate) fn completion_item(
line_index: &LineIndex,
line_endings: LineEndings,
completion_item: CompletionItem,
) -> lsp_types::CompletionItem {
let mut additional_text_edits = Vec::new();
let mut text_edit = None;
// LSP does not allow arbitrary edits in completion, so we have to do a
// non-trivial mapping here.
let source_range = completion_item.source_range();
for indel in completion_item.text_edit().as_indels() {
if indel.delete.contains_range(source_range) {
text_edit = Some(if indel.delete == source_range {
self::text_edit(line_index, line_endings, indel.clone())
} else {
assert!(source_range.end() == indel.delete.end());
let range1 = TextRange::new(indel.delete.start(), source_range.start());
let range2 = source_range;
let indel1 = Indel::replace(range1, String::new());
let indel2 = Indel::replace(range2, indel.insert.clone());
additional_text_edits.push(self::text_edit(line_index, line_endings, indel1));
self::text_edit(line_index, line_endings, indel2)
})
} else {
assert!(source_range.intersect(indel.delete).is_none());
let text_edit = self::text_edit(line_index, line_endings, indel.clone());
additional_text_edits.push(text_edit);
}
}
let text_edit = text_edit.unwrap();
let mut res = lsp_types::CompletionItem {
label: completion_item.label().to_string(),
detail: completion_item.detail().map(|it| it.to_string()),
filter_text: Some(completion_item.lookup().to_string()),
kind: completion_item.kind().map(completion_item_kind),
text_edit: Some(text_edit.into()),
additional_text_edits: Some(additional_text_edits),
documentation: completion_item.documentation().map(documentation),
deprecated: Some(completion_item.deprecated()),
command: if completion_item.trigger_call_info() {
let cmd = lsp_types::Command {
title: "triggerParameterHints".into(),
command: "editor.action.triggerParameterHints".into(),
arguments: None,
};
Some(cmd)
} else {
None
},
..Default::default()
};
if completion_item.score().is_some() {
res.preselect = Some(true)
}
if completion_item.deprecated() {
res.tags = Some(vec![lsp_types::CompletionItemTag::Deprecated])
}
res.insert_text_format = Some(insert_text_format(completion_item.insert_text_format()));
res
}
pub(crate) fn signature_information(
signature: FunctionSignature,
concise: bool,
) -> lsp_types::SignatureInformation {
let (label, documentation, params) = if concise {
let mut params = signature.parameters;
if signature.has_self_param {
params.remove(0);
}
(params.join(", "), None, params)
} else {
(signature.to_string(), signature.doc.map(documentation), signature.parameters)
};
let parameters: Vec<lsp_types::ParameterInformation> = params
.into_iter()
.map(|param| lsp_types::ParameterInformation {
label: lsp_types::ParameterLabel::Simple(param),
documentation: None,
})
.collect();
lsp_types::SignatureInformation { label, documentation, parameters: Some(parameters) }
}
pub(crate) fn inlay_int(line_index: &LineIndex, inlay_hint: InlayHint) -> lsp_ext::InlayHint {
lsp_ext::InlayHint {
label: inlay_hint.label.to_string(),
range: range(line_index, inlay_hint.range),
kind: match inlay_hint.kind {
InlayKind::ParameterHint => lsp_ext::InlayKind::ParameterHint,
InlayKind::TypeHint => lsp_ext::InlayKind::TypeHint,
InlayKind::ChainingHint => lsp_ext::InlayKind::ChainingHint,
},
}
}
pub(crate) fn semantic_tokens(
text: &str,
line_index: &LineIndex,
highlights: Vec<HighlightedRange>,
) -> lsp_types::SemanticTokens {
let mut builder = semantic_tokens::SemanticTokensBuilder::default();
for highlight_range in highlights {
let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
let token_index = semantic_tokens::type_index(type_);
let modifier_bitset = mods.0;
for mut text_range in line_index.lines(highlight_range.range) {
if text[text_range].ends_with('\n') {
text_range =
TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n'));
}
let range = range(&line_index, text_range);
builder.push(range, token_index, modifier_bitset);
}
}
builder.build()
}
fn semantic_token_type_and_modifiers(
highlight: Highlight,
) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {
let mut mods = semantic_tokens::ModifierSet::default();
let type_ = match highlight.tag {
HighlightTag::Struct => lsp_types::SemanticTokenType::STRUCT,
HighlightTag::Enum => lsp_types::SemanticTokenType::ENUM,
HighlightTag::Union => semantic_tokens::UNION,
HighlightTag::TypeAlias => semantic_tokens::TYPE_ALIAS,
HighlightTag::Trait => lsp_types::SemanticTokenType::INTERFACE,
HighlightTag::BuiltinType => semantic_tokens::BUILTIN_TYPE,
HighlightTag::SelfType => lsp_types::SemanticTokenType::TYPE,
HighlightTag::Field => lsp_types::SemanticTokenType::MEMBER,
HighlightTag::Function => lsp_types::SemanticTokenType::FUNCTION,
HighlightTag::Module => lsp_types::SemanticTokenType::NAMESPACE,
HighlightTag::Constant => {
mods |= semantic_tokens::CONSTANT;
mods |= lsp_types::SemanticTokenModifier::STATIC;
lsp_types::SemanticTokenType::VARIABLE
}
HighlightTag::Static => {
mods |= lsp_types::SemanticTokenModifier::STATIC;
lsp_types::SemanticTokenType::VARIABLE
}
HighlightTag::EnumVariant => semantic_tokens::ENUM_MEMBER,
HighlightTag::Macro => lsp_types::SemanticTokenType::MACRO,
HighlightTag::Local => lsp_types::SemanticTokenType::VARIABLE,
HighlightTag::TypeParam => lsp_types::SemanticTokenType::TYPE_PARAMETER,
HighlightTag::Lifetime => semantic_tokens::LIFETIME,
HighlightTag::ByteLiteral | HighlightTag::NumericLiteral => {
lsp_types::SemanticTokenType::NUMBER
}
HighlightTag::CharLiteral | HighlightTag::StringLiteral => {
lsp_types::SemanticTokenType::STRING
}
HighlightTag::Comment => lsp_types::SemanticTokenType::COMMENT,
HighlightTag::Attribute => semantic_tokens::ATTRIBUTE,
HighlightTag::Keyword => lsp_types::SemanticTokenType::KEYWORD,
HighlightTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE,
HighlightTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER,
};
for modifier in highlight.modifiers.iter() {
let modifier = match modifier {
HighlightModifier::Definition => lsp_types::SemanticTokenModifier::DECLARATION,
HighlightModifier::ControlFlow => semantic_tokens::CONTROL_FLOW,
HighlightModifier::Mutable => semantic_tokens::MUTABLE,
HighlightModifier::Unsafe => semantic_tokens::UNSAFE,
};
mods |= modifier;
}
(type_, mods)
}
pub(crate) fn folding_range(
text: &str,
line_index: &LineIndex,
line_folding_only: bool,
fold: Fold,
) -> lsp_types::FoldingRange {
let kind = match fold.kind {
FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
FoldKind::Mods | FoldKind::Block => None,
};
let range = range(line_index, fold.range);
if line_folding_only {
// Clients with line_folding_only == true (such as VSCode) will fold the whole end line
// even if it contains text not in the folding range. To prevent that we exclude
// range.end.line from the folding region if there is more text after range.end
// on the same line.
let has_more_text_on_end_line = text[TextRange::new(fold.range.end(), TextSize::of(text))]
.chars()
.take_while(|it| *it != '\n')
.any(|it| !it.is_whitespace());
let end_line = if has_more_text_on_end_line {
range.end.line.saturating_sub(1)
} else {
range.end.line
};
lsp_types::FoldingRange {
start_line: range.start.line,
start_character: None,
end_line,
end_character: None,
kind,
}
} else {
lsp_types::FoldingRange {
start_line: range.start.line,
start_character: Some(range.start.character),
end_line: range.end.line,
end_character: Some(range.end.character),
kind,
}
}
}
pub(crate) fn url(world: &WorldSnapshot, file_id: FileId) -> Result<lsp_types::Url> {
world.file_id_to_uri(file_id)
}
pub(crate) fn text_document_identifier(
world: &WorldSnapshot,
file_id: FileId,
) -> Result<lsp_types::TextDocumentIdentifier> {
let res = lsp_types::TextDocumentIdentifier { uri: url(world, file_id)? };
Ok(res)
}
pub(crate) fn versioned_text_document_identifier(
world: &WorldSnapshot,
file_id: FileId,
version: Option<i64>,
) -> Result<lsp_types::VersionedTextDocumentIdentifier> {
let res = lsp_types::VersionedTextDocumentIdentifier { uri: url(world, file_id)?, version };
Ok(res)
}
pub(crate) fn location(world: &WorldSnapshot, frange: FileRange) -> Result<lsp_types::Location> {
let url = url(world, frange.file_id)?;
let line_index = world.analysis().file_line_index(frange.file_id)?;
let range = range(&line_index, frange.range);
let loc = lsp_types::Location::new(url, range);
Ok(loc)
}
pub(crate) fn location_link(
world: &WorldSnapshot,
src: FileRange,
target: NavigationTarget,
) -> Result<lsp_types::LocationLink> {
let src_location = location(world, src)?;
let (target_uri, target_range, target_selection_range) = location_info(world, target)?;
let res = lsp_types::LocationLink {
origin_selection_range: Some(src_location.range),
target_uri,
target_range,
target_selection_range,
};
Ok(res)
}
fn location_info(
world: &WorldSnapshot,
target: NavigationTarget,
) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
let line_index = world.analysis().file_line_index(target.file_id())?;
let target_uri = url(world, target.file_id())?;
let target_range = range(&line_index, target.full_range());
let target_selection_range =
target.focus_range().map(|it| range(&line_index, it)).unwrap_or(target_range);
Ok((target_uri, target_range, target_selection_range))
}
pub(crate) fn goto_definition_response(
world: &WorldSnapshot,
src: FileRange,
targets: Vec<NavigationTarget>,
) -> Result<lsp_types::GotoDefinitionResponse> {
if world.config.client_caps.location_link {
let links = targets
.into_iter()
.map(|nav| location_link(world, src, nav))
.collect::<Result<Vec<_>>>()?;
Ok(links.into())
} else {
let locations = targets
.into_iter()
.map(|nav| {
location(
world,
FileRange {
file_id: nav.file_id(),
range: nav.focus_range().unwrap_or(nav.range()),
},
)
})
.collect::<Result<Vec<_>>>()?;
Ok(locations.into())
}
}
pub(crate) fn text_document_edit(
world: &WorldSnapshot,
source_file_edit: SourceFileEdit,
) -> Result<lsp_types::TextDocumentEdit> {
let text_document = versioned_text_document_identifier(world, source_file_edit.file_id, None)?;
let line_index = world.analysis().file_line_index(source_file_edit.file_id)?;
let line_endings = world.file_line_endings(source_file_edit.file_id);
let edits = source_file_edit
.edit
.as_indels()
.iter()
.map(|it| text_edit(&line_index, line_endings, it.clone()))
.collect();
Ok(lsp_types::TextDocumentEdit { text_document, edits })
}
pub(crate) fn resource_op(
world: &WorldSnapshot,
file_system_edit: FileSystemEdit,
) -> Result<lsp_types::ResourceOp> {
let res = match file_system_edit {
FileSystemEdit::CreateFile { source_root, path } => {
let uri = world.path_to_uri(source_root, &path)?;
lsp_types::ResourceOp::Create(lsp_types::CreateFile { uri, options: None })
}
FileSystemEdit::MoveFile { src, dst_source_root, dst_path } => {
let old_uri = world.file_id_to_uri(src)?;
let new_uri = world.path_to_uri(dst_source_root, &dst_path)?;
lsp_types::ResourceOp::Rename(lsp_types::RenameFile { old_uri, new_uri, options: None })
}
};
Ok(res)
}
pub(crate) fn source_change(
world: &WorldSnapshot,
source_change: SourceChange,
) -> Result<lsp_ext::SourceChange> {
let cursor_position = match source_change.cursor_position {
None => None,
Some(pos) => {
let line_index = world.analysis().file_line_index(pos.file_id)?;
let edit = source_change
.source_file_edits
.iter()
.find(|it| it.file_id == pos.file_id)
.map(|it| &it.edit);
let line_col = match edit {
Some(edit) => translate_offset_with_edit(&*line_index, pos.offset, edit),
None => line_index.line_col(pos.offset),
};
let position =
lsp_types::Position::new(u64::from(line_col.line), u64::from(line_col.col_utf16));
Some(lsp_types::TextDocumentPositionParams {
text_document: text_document_identifier(world, pos.file_id)?,
position,
})
}
};
let mut document_changes: Vec<lsp_types::DocumentChangeOperation> = Vec::new();
for op in source_change.file_system_edits {
let op = resource_op(&world, op)?;
document_changes.push(lsp_types::DocumentChangeOperation::Op(op));
}
for edit in source_change.source_file_edits {
let edit = text_document_edit(&world, edit)?;
document_changes.push(lsp_types::DocumentChangeOperation::Edit(edit));
}
let workspace_edit = lsp_types::WorkspaceEdit {
changes: None,
document_changes: Some(lsp_types::DocumentChanges::Operations(document_changes)),
};
Ok(lsp_ext::SourceChange { label: source_change.label, workspace_edit, cursor_position })
}
pub fn call_hierarchy_item(
world: &WorldSnapshot,
target: NavigationTarget,
) -> Result<lsp_types::CallHierarchyItem> {
let name = target.name().to_string();
let detail = target.description().map(|it| it.to_string());
let kind = symbol_kind(target.kind());
let (uri, range, selection_range) = location_info(world, target)?;
Ok(lsp_types::CallHierarchyItem { name, kind, tags: None, detail, uri, range, selection_range })
}
#[cfg(test)]
mod tests {
use test_utils::extract_ranges;
use super::*;
#[test]
fn conv_fold_line_folding_only_fixup() {
let text = r#"<fold>mod a;
mod b;
mod c;</fold>
fn main() <fold>{
if cond <fold>{
a::do_a();
}</fold> else <fold>{
b::do_b();
}</fold>
}</fold>"#;
let (ranges, text) = extract_ranges(text, "fold");
assert_eq!(ranges.len(), 4);
let folds = vec![
Fold { range: ranges[0], kind: FoldKind::Mods },
Fold { range: ranges[1], kind: FoldKind::Block },
Fold { range: ranges[2], kind: FoldKind::Block },
Fold { range: ranges[3], kind: FoldKind::Block },
];
let line_index = LineIndex::new(&text);
let converted: Vec<lsp_types::FoldingRange> =
folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect();
let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
assert_eq!(converted.len(), expected_lines.len());
for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) {
assert_eq!(folding_range.start_line, *start_line);
assert_eq!(folding_range.start_character, None);
assert_eq!(folding_range.end_line, *end_line);
assert_eq!(folding_range.end_character, None);
}
}
}
pub(crate) fn code_action(world: &WorldSnapshot, assist: Assist) -> Result<lsp_types::CodeAction> {
let source_change = source_change(&world, assist.source_change)?;
let arg = serde_json::to_value(source_change)?;
let title = assist.label;
let command = lsp_types::Command {
title: title.clone(),
command: "rust-analyzer.applySourceChange".to_string(),
arguments: Some(vec![arg]),
};
Ok(lsp_types::CodeAction {
title,
kind: Some(String::new()),
diagnostics: None,
edit: None,
command: Some(command),
is_preferred: None,
})
}

View file

@ -3,15 +3,16 @@ mod support;
use std::{collections::HashMap, path::PathBuf, time::Instant};
use lsp_types::{
CodeActionContext, DidOpenTextDocumentParams, DocumentFormattingParams, FormattingOptions,
GotoDefinitionParams, HoverParams, PartialResultParams, Position, Range, TextDocumentItem,
TextDocumentPositionParams, WorkDoneProgressParams,
};
use rust_analyzer::req::{
CodeActionParams, CodeActionRequest, Completion, CompletionParams, DidOpenTextDocument,
Formatting, GotoDefinition, GotoTypeDefinition, HoverRequest, OnEnter, Runnables,
RunnablesParams,
notification::DidOpenTextDocument,
request::{
CodeActionRequest, Completion, Formatting, GotoDefinition, GotoTypeDefinition, HoverRequest,
},
CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
DocumentFormattingParams, FormattingOptions, GotoDefinitionParams, HoverParams,
PartialResultParams, Position, Range, TextDocumentItem, TextDocumentPositionParams,
WorkDoneProgressParams,
};
use rust_analyzer::lsp_ext::{OnEnter, Runnables, RunnablesParams};
use serde_json::json;
use tempfile::TempDir;
use test_utils::skip_slow_tests;

View file

@ -13,15 +13,15 @@ use lsp_types::{
request::Shutdown,
DidOpenTextDocumentParams, TextDocumentIdentifier, TextDocumentItem, Url, WorkDoneProgress,
};
use lsp_types::{ProgressParams, ProgressParamsValue};
use serde::Serialize;
use serde_json::{to_string_pretty, Value};
use tempfile::TempDir;
use test_utils::{find_mismatch, parse_fixture};
use req::{ProgressParams, ProgressParamsValue};
use rust_analyzer::{
config::{ClientCapsConfig, Config},
main_loop, req,
main_loop,
};
pub struct Project<'a> {
@ -206,7 +206,7 @@ impl Server {
Message::Notification(n) if n.method == "$/progress" => {
match n.clone().extract::<ProgressParams>("$/progress").unwrap() {
ProgressParams {
token: req::ProgressToken::String(ref token),
token: lsp_types::ProgressToken::String(ref token),
value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(_)),
} if token == "rustAnalyzer/startup" => true,
_ => false,