Cleanup URL handling

This commit is contained in:
Aleksey Kladov 2020-06-13 11:00:06 +02:00
parent b56ad148db
commit 50bbf7233d
6 changed files with 134 additions and 160 deletions

View file

@ -1,10 +1,6 @@
//! This module provides the functionality needed to convert diagnostics from
//! `cargo check` json format to the LSP diagnostic format.
use std::{
collections::HashMap,
path::{Component, Path, Prefix},
str::FromStr,
};
use std::{collections::HashMap, path::Path};
use lsp_types::{
Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, Location,
@ -13,7 +9,7 @@ use lsp_types::{
use ra_flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion};
use stdx::format_to;
use crate::{lsp_ext, Result};
use crate::{lsp_ext, to_proto::url_from_abs_path};
/// Converts a Rust level string to a LSP severity
fn map_level_to_severity(val: DiagnosticLevel) -> Option<DiagnosticSeverity> {
@ -65,7 +61,7 @@ fn map_span_to_location(span: &DiagnosticSpan, workspace_root: &Path) -> Locatio
fn map_span_to_location_naive(span: &DiagnosticSpan, workspace_root: &Path) -> Location {
let mut file_name = workspace_root.to_path_buf();
file_name.push(&span.file_name);
let uri = url_from_path_with_drive_lowercasing(file_name).unwrap();
let uri = url_from_abs_path(&file_name);
// FIXME: this doesn't handle UTF16 offsets correctly
let range = Range::new(
@ -275,70 +271,16 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
.collect()
}
/// Returns a `Url` object from a given path, will lowercase drive letters if present.
/// This will only happen when processing windows paths.
///
/// When processing non-windows path, this is essentially the same as `Url::from_file_path`.
pub fn url_from_path_with_drive_lowercasing(path: impl AsRef<Path>) -> Result<Url> {
let component_has_windows_drive = path.as_ref().components().any(|comp| {
if let Component::Prefix(c) = comp {
return matches!(c.kind(), Prefix::Disk(_) | Prefix::VerbatimDisk(_));
}
false
});
// VSCode expects drive letters to be lowercased, where rust will uppercase the drive letters.
let res = if component_has_windows_drive {
let url_original = Url::from_file_path(&path)
.map_err(|_| format!("can't convert path to url: {}", path.as_ref().display()))?;
let drive_partition: Vec<&str> = url_original.as_str().rsplitn(2, ':').collect();
// There is a drive partition, but we never found a colon.
// This should not happen, but in this case we just pass it through.
if drive_partition.len() == 1 {
return Ok(url_original);
}
let joined = drive_partition[1].to_ascii_lowercase() + ":" + drive_partition[0];
let url = Url::from_str(&joined).expect("This came from a valid `Url`");
url
} else {
Url::from_file_path(&path)
.map_err(|_| format!("can't convert path to url: {}", path.as_ref().display()))?
};
Ok(res)
}
#[cfg(test)]
#[cfg(not(windows))]
mod tests {
use super::*;
// `Url` is not able to parse windows paths on unix machines.
#[test]
#[cfg(target_os = "windows")]
fn test_lowercase_drive_letter_with_drive() {
let url = url_from_path_with_drive_lowercasing("C:\\Test").unwrap();
assert_eq!(url.to_string(), "file:///c:/Test");
}
#[test]
#[cfg(target_os = "windows")]
fn test_drive_without_colon_passthrough() {
let url = url_from_path_with_drive_lowercasing(r#"\\localhost\C$\my_dir"#).unwrap();
assert_eq!(url.to_string(), "file://localhost/C$/my_dir");
}
#[cfg(not(windows))]
fn parse_diagnostic(val: &str) -> ra_flycheck::Diagnostic {
serde_json::from_str::<ra_flycheck::Diagnostic>(val).unwrap()
}
#[test]
#[cfg(not(windows))]
fn snap_rustc_incompatible_type_for_trait() {
let diag = parse_diagnostic(
r##"{
@ -392,7 +334,6 @@ mod tests {
}
#[test]
#[cfg(not(windows))]
fn snap_rustc_unused_variable() {
let diag = parse_diagnostic(
r##"{
@ -475,7 +416,6 @@ mod tests {
}
#[test]
#[cfg(not(windows))]
fn snap_rustc_wrong_number_of_parameters() {
let diag = parse_diagnostic(
r##"{
@ -600,7 +540,6 @@ mod tests {
}
#[test]
#[cfg(not(windows))]
fn snap_clippy_pass_by_ref() {
let diag = parse_diagnostic(
r##"{
@ -721,7 +660,6 @@ mod tests {
}
#[test]
#[cfg(not(windows))]
fn snap_rustc_mismatched_type() {
let diag = parse_diagnostic(
r##"{
@ -765,7 +703,6 @@ mod tests {
}
#[test]
#[cfg(not(windows))]
fn snap_handles_macro_location() {
let diag = parse_diagnostic(
r##"{
@ -1037,7 +974,6 @@ mod tests {
}
#[test]
#[cfg(not(windows))]
fn snap_macro_compiler_error() {
let diag = parse_diagnostic(
r##"{
@ -1267,7 +1203,6 @@ mod tests {
}
#[test]
#[cfg(not(windows))]
fn snap_multi_line_fix() {
let diag = parse_diagnostic(
r##"{

View file

@ -17,7 +17,7 @@ pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Tex
}
pub(crate) fn file_id(world: &GlobalStateSnapshot, url: &lsp_types::Url) -> Result<FileId> {
world.uri_to_file_id(url)
world.url_to_file_id(url)
}
pub(crate) fn file_position(

View file

@ -22,10 +22,9 @@ use stdx::format_to;
use crate::{
config::Config,
diagnostics::{
to_proto::url_from_path_with_drive_lowercasing, CheckFixes, DiagnosticCollection,
},
diagnostics::{CheckFixes, DiagnosticCollection},
main_loop::pending_requests::{CompletedRequest, LatestRequests},
to_proto::url_from_abs_path,
vfs_glob::{Glob, RustPackageFilterBuilder},
LspError, Result,
};
@ -274,8 +273,8 @@ impl GlobalStateSnapshot {
&self.analysis
}
pub fn uri_to_file_id(&self, uri: &Url) -> Result<FileId> {
let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?;
pub fn url_to_file_id(&self, url: &Url) -> Result<FileId> {
let path = url.to_file_path().map_err(|()| format!("invalid uri: {}", url))?;
let file = self.vfs.read().path2file(&path).ok_or_else(|| {
// Show warning as this file is outside current workspace
// FIXME: just handle such files, and remove `LspError::UNKNOWN_FILE`.
@ -287,11 +286,8 @@ impl GlobalStateSnapshot {
Ok(FileId(file.0))
}
pub fn file_id_to_uri(&self, id: FileId) -> Result<Url> {
let path = self.vfs.read().file2path(VfsFile(id.0));
let url = url_from_path_with_drive_lowercasing(path)?;
Ok(url)
pub fn file_id_to_url(&self, id: FileId) -> Url {
file_id_to_url(&self.vfs.read(), id)
}
pub fn file_id_to_path(&self, id: FileId) -> PathBuf {
@ -302,12 +298,10 @@ impl GlobalStateSnapshot {
self.vfs.read().file_line_endings(VfsFile(id.0))
}
pub fn path_to_uri(&self, root: SourceRootId, path: &RelativePathBuf) -> Result<Url> {
pub fn path_to_url(&self, root: SourceRootId, path: &RelativePathBuf) -> Url {
let base = self.vfs.read().root2path(VfsRoot(root.0));
let path = path.to_path(base);
let url = Url::from_file_path(&path)
.map_err(|_| format!("can't convert path to url: {}", path.display()))?;
Ok(url)
url_from_abs_path(&path)
}
pub fn status(&self) -> String {
@ -335,3 +329,8 @@ impl GlobalStateSnapshot {
self.workspaces.iter().find_map(|ws| ws.workspace_root_for(&path))
}
}
pub(crate) fn file_id_to_url(vfs: &Vfs, id: FileId) -> Url {
let path = vfs.file2path(VfsFile(id.0));
url_from_abs_path(&path)
}

View file

@ -27,7 +27,7 @@ use ra_flycheck::{CheckTask, Status};
use ra_ide::{Canceled, FileId, LibraryData, LineIndex, SourceRootId};
use ra_prof::profile;
use ra_project_model::{PackageRoot, ProjectWorkspace};
use ra_vfs::{VfsFile, VfsTask, Watch};
use ra_vfs::{VfsTask, Watch};
use relative_path::RelativePathBuf;
use rustc_hash::FxHashSet;
use serde::{de::DeserializeOwned, Serialize};
@ -35,9 +35,9 @@ use threadpool::ThreadPool;
use crate::{
config::{Config, FilesWatcher, LinkedProject},
diagnostics::{to_proto::url_from_path_with_drive_lowercasing, DiagnosticTask},
diagnostics::DiagnosticTask,
from_proto,
global_state::{GlobalState, GlobalStateSnapshot},
global_state::{file_id_to_url, GlobalState, GlobalStateSnapshot},
lsp_ext,
main_loop::{
pending_requests::{PendingRequest, PendingRequests},
@ -801,17 +801,9 @@ fn on_diagnostic_task(task: DiagnosticTask, msg_sender: &Sender<Message>, state:
let subscriptions = state.diagnostics.handle_task(task);
for file_id in subscriptions {
let path = state.vfs.read().file2path(VfsFile(file_id.0));
let uri = match url_from_path_with_drive_lowercasing(&path) {
Ok(uri) => uri,
Err(err) => {
log::error!("Couldn't convert path to url ({}): {}", err, path.display());
continue;
}
};
let url = file_id_to_url(&state.vfs.read(), file_id);
let diagnostics = state.diagnostics.diagnostics_for(file_id).cloned().collect();
let params = lsp_types::PublishDiagnosticsParams { uri, diagnostics, version: None };
let params = lsp_types::PublishDiagnosticsParams { uri: url, diagnostics, version: None };
let not = notification_new::<lsp_types::notification::PublishDiagnostics>(params);
msg_sender.send(not.into()).unwrap();
}

View file

@ -258,7 +258,7 @@ pub fn handle_document_symbol(
let res = if snap.config.client_caps.hierarchical_symbols {
document_symbols.into()
} else {
let url = to_proto::url(&snap, file_id)?;
let url = to_proto::url(&snap, file_id);
let mut symbol_information = Vec::<SymbolInformation>::new();
for symbol in document_symbols {
flatten_document_symbol(&symbol, None, &url, &mut symbol_information);
@ -1160,7 +1160,7 @@ fn show_impl_command_link(
) -> Option<lsp_ext::CommandLinkGroup> {
if snap.config.hover.implementations {
if let Some(nav_data) = snap.analysis().goto_implementation(*position).unwrap_or(None) {
let uri = to_proto::url(snap, position.file_id).ok()?;
let uri = to_proto::url(snap, position.file_id);
let line_index = snap.analysis().file_line_index(position.file_id).ok()?;
let position = to_proto::position(&line_index, position.offset);
let locations: Vec<_> = nav_data

View file

@ -1,4 +1,7 @@
//! Conversion of rust-analyzer specific types to lsp_types equivalents.
use std::path::{self, Path};
use itertools::Itertools;
use ra_db::{FileId, FileRange};
use ra_ide::{
Assist, CompletionItem, CompletionItemKind, Documentation, FileSystemEdit, Fold, FoldKind,
@ -385,24 +388,55 @@ pub(crate) fn folding_range(
}
}
pub(crate) fn url(snap: &GlobalStateSnapshot, file_id: FileId) -> Result<lsp_types::Url> {
snap.file_id_to_uri(file_id)
pub(crate) fn url(snap: &GlobalStateSnapshot, file_id: FileId) -> lsp_types::Url {
snap.file_id_to_url(file_id)
}
/// Returns a `Url` object from a given path, will lowercase drive letters if present.
/// This will only happen when processing windows paths.
///
/// When processing non-windows path, this is essentially the same as `Url::from_file_path`.
pub(crate) fn url_from_abs_path(path: &Path) -> lsp_types::Url {
assert!(path.is_absolute());
let url = lsp_types::Url::from_file_path(path).unwrap();
match path.components().next() {
Some(path::Component::Prefix(prefix)) if matches!(prefix.kind(), path::Prefix::Disk(_) | path::Prefix::VerbatimDisk(_)) =>
{
// Need to lowercase driver letter
}
_ => return url,
}
let driver_letter_range = {
let (scheme, drive_letter, _rest) = match url.as_str().splitn(3, ':').collect_tuple() {
Some(it) => it,
None => return url,
};
let start = scheme.len() + ':'.len_utf8();
start..(start + drive_letter.len())
};
// Note: lowercasing the `path` itself doesn't help, the `Url::parse`
// machinery *also* canonicalizes the drive letter. So, just massage the
// string in place.
let mut url = url.into_string();
url[driver_letter_range].make_ascii_lowercase();
lsp_types::Url::parse(&url).unwrap()
}
pub(crate) fn versioned_text_document_identifier(
snap: &GlobalStateSnapshot,
file_id: FileId,
version: Option<i64>,
) -> Result<lsp_types::VersionedTextDocumentIdentifier> {
let res = lsp_types::VersionedTextDocumentIdentifier { uri: url(snap, file_id)?, version };
Ok(res)
) -> lsp_types::VersionedTextDocumentIdentifier {
lsp_types::VersionedTextDocumentIdentifier { uri: url(snap, file_id), version }
}
pub(crate) fn location(
snap: &GlobalStateSnapshot,
frange: FileRange,
) -> Result<lsp_types::Location> {
let url = url(snap, frange.file_id)?;
let url = url(snap, frange.file_id);
let line_index = snap.analysis().file_line_index(frange.file_id)?;
let range = range(&line_index, frange.range);
let loc = lsp_types::Location::new(url, range);
@ -438,7 +472,7 @@ fn location_info(
) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
let line_index = snap.analysis().file_line_index(target.file_id())?;
let target_uri = url(snap, target.file_id())?;
let target_uri = url(snap, target.file_id());
let target_range = range(&line_index, target.full_range());
let target_selection_range =
target.focus_range().map(|it| range(&line_index, it)).unwrap_or(target_range);
@ -478,7 +512,7 @@ pub(crate) fn snippet_text_document_edit(
is_snippet: bool,
source_file_edit: SourceFileEdit,
) -> Result<lsp_ext::SnippetTextDocumentEdit> {
let text_document = versioned_text_document_identifier(snap, source_file_edit.file_id, None)?;
let text_document = versioned_text_document_identifier(snap, source_file_edit.file_id, None);
let line_index = snap.analysis().file_line_index(source_file_edit.file_id)?;
let line_endings = snap.file_line_endings(source_file_edit.file_id);
let edits = source_file_edit
@ -492,19 +526,18 @@ pub(crate) fn snippet_text_document_edit(
pub(crate) fn resource_op(
snap: &GlobalStateSnapshot,
file_system_edit: FileSystemEdit,
) -> Result<lsp_types::ResourceOp> {
let res = match file_system_edit {
) -> lsp_types::ResourceOp {
match file_system_edit {
FileSystemEdit::CreateFile { source_root, path } => {
let uri = snap.path_to_uri(source_root, &path)?;
let uri = snap.path_to_url(source_root, &path);
lsp_types::ResourceOp::Create(lsp_types::CreateFile { uri, options: None })
}
FileSystemEdit::MoveFile { src, dst_source_root, dst_path } => {
let old_uri = snap.file_id_to_uri(src)?;
let new_uri = snap.path_to_uri(dst_source_root, &dst_path)?;
let old_uri = snap.file_id_to_url(src);
let new_uri = snap.path_to_url(dst_source_root, &dst_path);
lsp_types::ResourceOp::Rename(lsp_types::RenameFile { old_uri, new_uri, options: None })
}
};
Ok(res)
}
}
pub(crate) fn snippet_workspace_edit(
@ -513,7 +546,7 @@ pub(crate) fn snippet_workspace_edit(
) -> Result<lsp_ext::SnippetWorkspaceEdit> {
let mut document_changes: Vec<lsp_ext::SnippetDocumentChangeOperation> = Vec::new();
for op in source_change.file_system_edits {
let op = resource_op(&snap, op)?;
let op = resource_op(&snap, op);
document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Op(op));
}
for edit in source_change.source_file_edits {
@ -568,7 +601,7 @@ impl From<lsp_ext::SnippetWorkspaceEdit> for lsp_types::WorkspaceEdit {
}
}
pub fn call_hierarchy_item(
pub(crate) fn call_hierarchy_item(
snap: &GlobalStateSnapshot,
target: NavigationTarget,
) -> Result<lsp_types::CallHierarchyItem> {
@ -579,50 +612,6 @@ pub fn call_hierarchy_item(
Ok(lsp_types::CallHierarchyItem { name, kind, tags: None, detail, uri, range, selection_range })
}
#[cfg(test)]
mod tests {
use test_utils::extract_ranges;
use super::*;
#[test]
fn conv_fold_line_folding_only_fixup() {
let text = r#"<fold>mod a;
mod b;
mod c;</fold>
fn main() <fold>{
if cond <fold>{
a::do_a();
}</fold> else <fold>{
b::do_b();
}</fold>
}</fold>"#;
let (ranges, text) = extract_ranges(text, "fold");
assert_eq!(ranges.len(), 4);
let folds = vec![
Fold { range: ranges[0], kind: FoldKind::Mods },
Fold { range: ranges[1], kind: FoldKind::Block },
Fold { range: ranges[2], kind: FoldKind::Block },
Fold { range: ranges[3], kind: FoldKind::Block },
];
let line_index = LineIndex::new(&text);
let converted: Vec<lsp_types::FoldingRange> =
folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect();
let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
assert_eq!(converted.len(), expected_lines.len());
for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) {
assert_eq!(folding_range.start_line, *start_line);
assert_eq!(folding_range.start_character, None);
assert_eq!(folding_range.end_line, *end_line);
assert_eq!(folding_range.end_character, None);
}
}
}
pub(crate) fn unresolved_code_action(
snap: &GlobalStateSnapshot,
assist: Assist,
@ -676,3 +665,62 @@ pub(crate) fn runnable(
},
})
}
#[cfg(test)]
mod tests {
use test_utils::extract_ranges;
use super::*;
#[test]
fn conv_fold_line_folding_only_fixup() {
let text = r#"<fold>mod a;
mod b;
mod c;</fold>
fn main() <fold>{
if cond <fold>{
a::do_a();
}</fold> else <fold>{
b::do_b();
}</fold>
}</fold>"#;
let (ranges, text) = extract_ranges(text, "fold");
assert_eq!(ranges.len(), 4);
let folds = vec![
Fold { range: ranges[0], kind: FoldKind::Mods },
Fold { range: ranges[1], kind: FoldKind::Block },
Fold { range: ranges[2], kind: FoldKind::Block },
Fold { range: ranges[3], kind: FoldKind::Block },
];
let line_index = LineIndex::new(&text);
let converted: Vec<lsp_types::FoldingRange> =
folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect();
let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
assert_eq!(converted.len(), expected_lines.len());
for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) {
assert_eq!(folding_range.start_line, *start_line);
assert_eq!(folding_range.start_character, None);
assert_eq!(folding_range.end_line, *end_line);
assert_eq!(folding_range.end_character, None);
}
}
// `Url` is not able to parse windows paths on unix machines.
#[test]
#[cfg(target_os = "windows")]
fn test_lowercase_drive_letter_with_drive() {
let url = url_from_abs_path(Path::new("C:\\Test"));
assert_eq!(url.to_string(), "file:///c:/Test");
}
#[test]
#[cfg(target_os = "windows")]
fn test_drive_without_colon_passthrough() {
let url = url_from_abs_path(Path::new(r#"\\localhost\C$\my_dir"#));
assert_eq!(url.to_string(), "file://localhost/C$/my_dir");
}
}