This commit is contained in:
Lukas Wirth 2024-02-28 14:36:29 +01:00 committed by Lukas Wirth
parent c8fdcea85c
commit ed7e9aa5d8
7 changed files with 77 additions and 76 deletions

View file

@ -1,4 +1,4 @@
use hir::{self, HasVisibility}; use hir::HasVisibility;
use ide_db::{ use ide_db::{
assists::{AssistId, AssistKind}, assists::{AssistId, AssistKind},
defs::Definition, defs::Definition,

View file

@ -54,33 +54,33 @@ fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing {
} }
} }
struct LiteralFormatter<S>(bridge::Literal<S, Symbol>); /// Invokes the callback with a `&[&str]` consisting of each part of the
/// literal's representation. This is done to allow the `ToString` and
impl<S> LiteralFormatter<S> { /// `Display` implementations to borrow references to symbol values, and
/// Invokes the callback with a `&[&str]` consisting of each part of the /// both be optimized to reduce overhead.
/// literal's representation. This is done to allow the `ToString` and fn literal_with_stringify_parts<S, R>(
/// `Display` implementations to borrow references to symbol values, and literal: &bridge::Literal<S, Symbol>,
/// both be optimized to reduce overhead. interner: SymbolInternerRef,
fn with_stringify_parts<R>( f: impl FnOnce(&[&str]) -> R,
&self, ) -> R {
interner: SymbolInternerRef, /// Returns a string containing exactly `num` '#' characters.
f: impl FnOnce(&[&str]) -> R, /// Uses a 256-character source string literal which is always safe to
) -> R { /// index with a `u8` index.
/// Returns a string containing exactly `num` '#' characters. fn get_hashes_str(num: u8) -> &'static str {
/// Uses a 256-character source string literal which is always safe to const HASHES: &str = "\
/// index with a `u8` index.
fn get_hashes_str(num: u8) -> &'static str {
const HASHES: &str = "\
################################################################\ ################################################################\
################################################################\ ################################################################\
################################################################\ ################################################################\
################################################################\ ################################################################\
"; ";
const _: () = assert!(HASHES.len() == 256); const _: () = assert!(HASHES.len() == 256);
&HASHES[..num as usize] &HASHES[..num as usize]
} }
self.with_symbol_and_suffix(interner, |symbol, suffix| match self.0.kind { {
let symbol = &*literal.symbol.text(interner);
let suffix = &*literal.suffix.map(|s| s.text(interner)).unwrap_or_default();
match literal.kind {
bridge::LitKind::Byte => f(&["b'", symbol, "'", suffix]), bridge::LitKind::Byte => f(&["b'", symbol, "'", suffix]),
bridge::LitKind::Char => f(&["'", symbol, "'", suffix]), bridge::LitKind::Char => f(&["'", symbol, "'", suffix]),
bridge::LitKind::Str => f(&["\"", symbol, "\"", suffix]), bridge::LitKind::Str => f(&["\"", symbol, "\"", suffix]),
@ -101,16 +101,6 @@ impl<S> LiteralFormatter<S> {
bridge::LitKind::Integer | bridge::LitKind::Float | bridge::LitKind::ErrWithGuar => { bridge::LitKind::Integer | bridge::LitKind::Float | bridge::LitKind::ErrWithGuar => {
f(&[symbol, suffix]) f(&[symbol, suffix])
} }
}) }
}
fn with_symbol_and_suffix<R>(
&self,
interner: SymbolInternerRef,
f: impl FnOnce(&str, &str) -> R,
) -> R {
let symbol = self.0.symbol.text(interner);
let suffix = self.0.suffix.map(|s| s.text(interner)).unwrap_or_default();
f(symbol.as_str(), suffix.as_str())
} }
} }

View file

@ -15,8 +15,8 @@ use proc_macro::bridge::{self, server};
use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER};
use crate::server::{ use crate::server::{
delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, delim_to_external, delim_to_internal, literal_with_stringify_parts,
Symbol, SymbolInternerRef, SYMBOL_INTERNER, token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER,
}; };
mod tt { mod tt {
pub use ::tt::*; pub use ::tt::*;
@ -180,12 +180,11 @@ impl server::TokenStream for RaSpanServer {
} }
bridge::TokenTree::Literal(literal) => { bridge::TokenTree::Literal(literal) => {
let literal = LiteralFormatter(literal); let text = literal_with_stringify_parts(&literal, self.interner, |parts| {
let text = literal.with_stringify_parts(self.interner, |parts| {
::tt::SmolStr::from_iter(parts.iter().copied()) ::tt::SmolStr::from_iter(parts.iter().copied())
}); });
let literal = tt::Literal { text, span: literal.0.span }; let literal = tt::Literal { text, span: literal.span };
let leaf: tt::Leaf = tt::Leaf::from(literal); let leaf: tt::Leaf = tt::Leaf::from(literal);
let tree = tt::TokenTree::from(leaf); let tree = tt::TokenTree::from(leaf);
Self::TokenStream::from_iter(iter::once(tree)) Self::TokenStream::from_iter(iter::once(tree))
@ -251,10 +250,17 @@ impl server::TokenStream for RaSpanServer {
.into_iter() .into_iter()
.map(|tree| match tree { .map(|tree| match tree {
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
bridge::TokenTree::Ident(bridge::Ident { bridge::TokenTree::Ident(match ident.text.strip_prefix("r#") {
sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")), Some(text) => bridge::Ident {
is_raw: ident.text.starts_with("r#"), sym: Symbol::intern(self.interner, text),
span: ident.span, is_raw: true,
span: ident.span,
},
None => bridge::Ident {
sym: Symbol::intern(self.interner, &ident.text),
is_raw: false,
span: ident.span,
},
}) })
} }
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
@ -285,11 +291,12 @@ impl server::TokenStream for RaSpanServer {
} }
impl server::SourceFile for RaSpanServer { impl server::SourceFile for RaSpanServer {
// FIXME these are all stubs
fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
// FIXME
true true
} }
fn path(&mut self, _file: &Self::SourceFile) -> String { fn path(&mut self, _file: &Self::SourceFile) -> String {
// FIXME
String::new() String::new()
} }
fn is_real(&mut self, _file: &Self::SourceFile) -> bool { fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
@ -306,11 +313,15 @@ impl server::Span for RaSpanServer {
SourceFile {} SourceFile {}
} }
fn save_span(&mut self, _span: Self::Span) -> usize { fn save_span(&mut self, _span: Self::Span) -> usize {
// FIXME stub, requires builtin quote! implementation // FIXME, quote is incompatible with third-party tools
// This is called by the quote proc-macro which is expanded when the proc-macro is compiled
// As such, r-a will never observe this
0 0
} }
fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
// FIXME stub, requires builtin quote! implementation // FIXME, quote is incompatible with third-party tools
// This is called by the expansion of quote!, r-a will observe this, but we don't have
// access to the spans that were encoded
self.call_site self.call_site
} }
/// Recent feature, not yet in the proc_macro /// Recent feature, not yet in the proc_macro

View file

@ -8,8 +8,8 @@ use std::{
use proc_macro::bridge::{self, server}; use proc_macro::bridge::{self, server};
use crate::server::{ use crate::server::{
delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, delim_to_external, delim_to_internal, literal_with_stringify_parts,
Symbol, SymbolInternerRef, SYMBOL_INTERNER, token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER,
}; };
mod tt { mod tt {
pub use proc_macro_api::msg::TokenId; pub use proc_macro_api::msg::TokenId;
@ -171,12 +171,12 @@ impl server::TokenStream for TokenIdServer {
} }
bridge::TokenTree::Literal(literal) => { bridge::TokenTree::Literal(literal) => {
let literal = LiteralFormatter(literal); let text = literal_with_stringify_parts(&literal, self.interner, |parts| {
let text = literal.with_stringify_parts(self.interner, |parts| {
::tt::SmolStr::from_iter(parts.iter().copied()) ::tt::SmolStr::from_iter(parts.iter().copied())
}); });
let literal = tt::Literal { text, span: literal.0.span }; let literal = tt::Literal { text, span: literal.span };
let leaf = tt::Leaf::from(literal); let leaf = tt::Leaf::from(literal);
let tree = TokenTree::from(leaf); let tree = TokenTree::from(leaf);
Self::TokenStream::from_iter(iter::once(tree)) Self::TokenStream::from_iter(iter::once(tree))

View file

@ -91,7 +91,7 @@ pub(crate) fn handle_did_change_text_document(
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) { if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
let Some(DocumentData { version, data }) = state.mem_docs.get_mut(&path) else { let Some(DocumentData { version, data }) = state.mem_docs.get_mut(&path) else {
tracing::error!("unexpected DidChangeTextDocument: {}", path); tracing::error!(?path, "unexpected DidChangeTextDocument");
return Ok(()); return Ok(());
}; };
// The version passed in DidChangeTextDocument is the version after all edits are applied // The version passed in DidChangeTextDocument is the version after all edits are applied

View file

@ -144,6 +144,16 @@ This option does not take effect until rust-analyzer is restarted.
-- --
Compilation target override (target triple). Compilation target override (target triple).
-- --
[[rust-analyzer.cargo.targetDir]]rust-analyzer.cargo.targetDir (default: `null`)::
+
--
Optional path to a rust-analyzer specific target directory.
This prevents rust-analyzer's `cargo check` and initial build-script and proc-macro
building from locking the `Cargo.lock` at the expense of duplicating build artifacts.
Set to `true` to use a subdirectory of the existing target directory or
set to a path relative to the workspace to use that path.
--
[[rust-analyzer.cargo.unsetTest]]rust-analyzer.cargo.unsetTest (default: `["core"]`):: [[rust-analyzer.cargo.unsetTest]]rust-analyzer.cargo.unsetTest (default: `["core"]`)::
+ +
-- --
@ -814,16 +824,6 @@ Command to be executed instead of 'cargo' for runnables.
Additional arguments to be passed to cargo for runnables such as Additional arguments to be passed to cargo for runnables such as
tests or binaries. For example, it may be `--release`. tests or binaries. For example, it may be `--release`.
-- --
[[rust-analyzer.rust.analyzerTargetDir]]rust-analyzer.rust.analyzerTargetDir (default: `null`)::
+
--
Optional path to a rust-analyzer specific target directory.
This prevents rust-analyzer's `cargo check` from locking the `Cargo.lock`
at the expense of duplicating build artifacts.
Set to `true` to use a subdirectory of the existing target directory or
set to a path relative to the workspace to use that path.
--
[[rust-analyzer.rustc.source]]rust-analyzer.rustc.source (default: `null`):: [[rust-analyzer.rustc.source]]rust-analyzer.rustc.source (default: `null`)::
+ +
-- --

View file

@ -671,6 +671,21 @@
"string" "string"
] ]
}, },
"rust-analyzer.cargo.targetDir": {
"markdownDescription": "Optional path to a rust-analyzer specific target directory.\nThis prevents rust-analyzer's `cargo check` and initial build-script and proc-macro\nbuilding from locking the `Cargo.lock` at the expense of duplicating build artifacts.\n\nSet to `true` to use a subdirectory of the existing target directory or\nset to a path relative to the workspace to use that path.",
"default": null,
"anyOf": [
{
"type": "null"
},
{
"type": "boolean"
},
{
"type": "string"
}
]
},
"rust-analyzer.cargo.unsetTest": { "rust-analyzer.cargo.unsetTest": {
"markdownDescription": "Unsets the implicit `#[cfg(test)]` for the specified crates.", "markdownDescription": "Unsets the implicit `#[cfg(test)]` for the specified crates.",
"default": [ "default": [
@ -1543,21 +1558,6 @@
"type": "string" "type": "string"
} }
}, },
"rust-analyzer.rust.analyzerTargetDir": {
"markdownDescription": "Optional path to a rust-analyzer specific target directory.\nThis prevents rust-analyzer's `cargo check` from locking the `Cargo.lock`\nat the expense of duplicating build artifacts.\n\nSet to `true` to use a subdirectory of the existing target directory or\nset to a path relative to the workspace to use that path.",
"default": null,
"anyOf": [
{
"type": "null"
},
{
"type": "boolean"
},
{
"type": "string"
}
]
},
"rust-analyzer.rustc.source": { "rust-analyzer.rustc.source": {
"markdownDescription": "Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private\nprojects, or \"discover\" to try to automatically find it if the `rustc-dev` component\nis installed.\n\nAny project which uses rust-analyzer with the rustcPrivate\ncrates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.\n\nThis option does not take effect until rust-analyzer is restarted.", "markdownDescription": "Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private\nprojects, or \"discover\" to try to automatically find it if the `rustc-dev` component\nis installed.\n\nAny project which uses rust-analyzer with the rustcPrivate\ncrates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.\n\nThis option does not take effect until rust-analyzer is restarted.",
"default": null, "default": null,