mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 12:33:33 +00:00
Merge #8795
8795: Allow semantic tokens for strings to be disabled r=matklad a=djrenren Fixes https://github.com/rust-analyzer/rust-analyzer/issues/7111 Pretty straightforward change, but open to any suggestions if there's a more recommended testing strategy than what I went with. Co-authored-by: John Renner <john@jrenner.net>
This commit is contained in:
commit
f9d4a9eaee
6 changed files with 78 additions and 10 deletions
|
@ -208,6 +208,13 @@ config_data! {
|
|||
/// Advanced option, fully override the command rust-analyzer uses for
|
||||
/// formatting.
|
||||
rustfmt_overrideCommand: Option<Vec<String>> = "null",
|
||||
|
||||
/// Use semantic tokens for strings.
|
||||
///
|
||||
/// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
|
||||
/// By disabling semantic tokens for strings, other grammars can be used to highlight
|
||||
/// their contents.
|
||||
semanticStringTokens: bool = "true",
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -381,6 +388,9 @@ impl Config {
|
|||
pub fn line_folding_only(&self) -> bool {
|
||||
try_or!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?, false)
|
||||
}
|
||||
pub fn semantic_strings(&self) -> bool {
|
||||
self.data.semanticStringTokens
|
||||
}
|
||||
pub fn hierarchical_symbols(&self) -> bool {
|
||||
try_or!(
|
||||
self.caps
|
||||
|
|
|
@ -1394,7 +1394,9 @@ pub(crate) fn handle_semantic_tokens_full(
|
|||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
let highlights = snap.analysis.highlight(file_id)?;
|
||||
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
||||
let semantic_strings = snap.config.semantic_strings();
|
||||
let semantic_tokens =
|
||||
to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
|
||||
|
||||
// Unconditionally cache the tokens
|
||||
snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
|
||||
|
@ -1413,8 +1415,9 @@ pub(crate) fn handle_semantic_tokens_full_delta(
|
|||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
let highlights = snap.analysis.highlight(file_id)?;
|
||||
|
||||
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
||||
let semantic_strings = snap.config.semantic_strings();
|
||||
let semantic_tokens =
|
||||
to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
|
||||
|
||||
let mut cache = snap.semantic_tokens_cache.lock();
|
||||
let cached_tokens = cache.entry(params.text_document.uri).or_default();
|
||||
|
@ -1443,7 +1446,9 @@ pub(crate) fn handle_semantic_tokens_range(
|
|||
let line_index = snap.file_line_index(frange.file_id)?;
|
||||
|
||||
let highlights = snap.analysis.highlight_range(frange)?;
|
||||
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
||||
let semantic_strings = snap.config.semantic_strings();
|
||||
let semantic_tokens =
|
||||
to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
|
||||
Ok(Some(semantic_tokens.into()))
|
||||
}
|
||||
|
||||
|
|
|
@ -381,6 +381,7 @@ pub(crate) fn semantic_tokens(
|
|||
text: &str,
|
||||
line_index: &LineIndex,
|
||||
highlights: Vec<HlRange>,
|
||||
include_strings: bool,
|
||||
) -> lsp_types::SemanticTokens {
|
||||
let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
|
||||
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
|
||||
|
@ -389,8 +390,11 @@ pub(crate) fn semantic_tokens(
|
|||
if highlight_range.highlight.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
|
||||
let token_index = semantic_tokens::type_index(type_);
|
||||
let (typ, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
|
||||
if !include_strings && typ == lsp_types::SemanticTokenType::STRING {
|
||||
continue;
|
||||
}
|
||||
let token_index = semantic_tokens::type_index(typ);
|
||||
let modifier_bitset = mods.0;
|
||||
|
||||
for mut text_range in line_index.index.lines(highlight_range.range) {
|
||||
|
|
|
@ -18,15 +18,16 @@ use lsp_types::{
|
|||
notification::DidOpenTextDocument,
|
||||
request::{
|
||||
CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest,
|
||||
WillRenameFiles,
|
||||
SemanticTokensRangeRequest, WillRenameFiles,
|
||||
},
|
||||
CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
|
||||
DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams,
|
||||
PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
|
||||
TextDocumentPositionParams, WorkDoneProgressParams,
|
||||
PartialResultParams, Position, Range, RenameFilesParams, SemanticTokens,
|
||||
SemanticTokensRangeParams, TextDocumentItem, TextDocumentPositionParams,
|
||||
WorkDoneProgressParams,
|
||||
};
|
||||
use rust_analyzer::lsp_ext::{OnEnter, Runnables, RunnablesParams};
|
||||
use serde_json::json;
|
||||
use serde_json::{from_value, json};
|
||||
use test_utils::skip_slow_tests;
|
||||
|
||||
use crate::{
|
||||
|
@ -37,6 +38,40 @@ use crate::{
|
|||
const PROFILE: &str = "";
|
||||
// const PROFILE: &'static str = "*@3>100";
|
||||
|
||||
#[test]
|
||||
fn can_disable_semantic_strings() {
|
||||
if skip_slow_tests() {
|
||||
return;
|
||||
}
|
||||
|
||||
[true, false].iter().for_each(|semantic_strings| {
|
||||
let server = Project::with_fixture(
|
||||
r#"
|
||||
//- /Cargo.toml
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.0.0"
|
||||
|
||||
//- /src/lib.rs
|
||||
const foo: &'static str = "hi";
|
||||
"#,
|
||||
)
|
||||
.with_config(serde_json::json!({ "semanticStringTokens": semantic_strings }))
|
||||
.server()
|
||||
.wait_until_workspace_is_loaded();
|
||||
|
||||
let res = server.send_request::<SemanticTokensRangeRequest>(SemanticTokensRangeParams {
|
||||
text_document: server.doc_id("src/lib.rs"),
|
||||
partial_result_params: PartialResultParams::default(),
|
||||
work_done_progress_params: WorkDoneProgressParams::default(),
|
||||
range: Range::new(Position::new(0, 26), Position::new(0, 30)),
|
||||
});
|
||||
|
||||
let tok_res: SemanticTokens = from_value(res).expect("invalid server response");
|
||||
assert!(tok_res.data.len() == *semantic_strings as usize);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_items_from_standard_library() {
|
||||
if skip_slow_tests() {
|
||||
|
|
|
@ -332,3 +332,12 @@ Additional arguments to `rustfmt`.
|
|||
Advanced option, fully override the command rust-analyzer uses for
|
||||
formatting.
|
||||
--
|
||||
[[rust-analyzer.semanticStringTokens]]rust-analyzer.semanticStringTokens (default: `true`)::
|
||||
+
|
||||
--
|
||||
Use semantic tokens for strings.
|
||||
|
||||
In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
|
||||
By disabling semantic tokens for strings, other grammars can be used to highlight
|
||||
their contents.
|
||||
--
|
||||
|
|
|
@ -778,6 +778,11 @@
|
|||
"type": "string"
|
||||
}
|
||||
},
|
||||
"rust-analyzer.semanticStringTokens": {
|
||||
"markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
"$generated-end": false
|
||||
}
|
||||
},
|
||||
|
|
Loading…
Reference in a new issue