Auto merge of #13084 - Veykril:highlight-config, r=Veykril

Add some more highlighting configurations

The following can be enabled/disabled now in terms of highlighting:
- doc comment injection (enabled by default)
- punctuation highlighting (disabled by default)
- operator highlighting (enabled by default)
- punctuation specialized highlighting (disabled by default)
- operator specialized highlighting (disabled by default)
- macro call bang highlighting (disabled by default)

This PR also changes our `attribute` semantic token type to the `decorator` type which landed upstream (but not yet in lsp-types).

Specialized highlighting is disabled by default, as all clients will have to ship something to map these back to the standard punctuation/operator token (we do this in VSCode via the inheritance mapping for example). This is a lot of maintenance work, and not something every client wants to do, pushing that need to use the user. As this is a rather niche use in the first place this will just be disabled by default.

Punctuation highlighting is disabled by default, punctuation is usually something that can be done by the native syntactic highlighting of the client, so there is no loss in quality. The main reason for this though is that punctuation adds a lot of extra token data that we sent over, a lot of clients struggle with applying this, so disabling this improves the UX for a lot of people. Note that we still highlight punctuations with special meaning as that special entity, (the never type `!` will still be tagged as a builtin type if it occurs as such)

Separate highlighting of the macro call bang `!` is disabled by default, as I think people actually didn't like that change that much, though at the same time I feel like not many people even noticed that change (I prefer it be separate, but that's not enough reason for it to be enabled by default I believe :^) )

cc https://github.com/rust-lang/rust-analyzer/issues/12783 https://github.com/rust-lang/rust-analyzer/issues/13066
This commit is contained in:
bors 2022-08-23 12:39:57 +00:00
commit f045f14626
14 changed files with 279 additions and 83 deletions

4
Cargo.lock generated
View file

@ -871,9 +871,9 @@ dependencies = [
[[package]]
name = "lsp-types"
version = "0.93.0"
version = "0.93.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70c74e2173b2b31f8655d33724b4b45ac13f439386f66290f539c22b144c2212"
checksum = "a3bcfee315dde785ba887edb540b08765fd7df75a7d948844be6bf5712246734"
dependencies = [
"bitflags",
"serde",

View file

@ -98,7 +98,7 @@ pub use crate::{
static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData},
syntax_highlighting::{
tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
HlRange,
HighlightConfig, HlRange,
},
};
pub use hir::{Documentation, Semantics};
@ -517,8 +517,12 @@ impl Analysis {
}
/// Computes syntax highlighting for the given file
pub fn highlight(&self, file_id: FileId) -> Cancellable<Vec<HlRange>> {
self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false))
pub fn highlight(
&self,
highlight_config: HighlightConfig,
file_id: FileId,
) -> Cancellable<Vec<HlRange>> {
self.with_db(|db| syntax_highlighting::highlight(db, highlight_config, file_id, None))
}
/// Computes all ranges to highlight for a given item in a file.
@ -533,9 +537,13 @@ impl Analysis {
}
/// Computes syntax highlighting for the given file range.
pub fn highlight_range(&self, frange: FileRange) -> Cancellable<Vec<HlRange>> {
pub fn highlight_range(
&self,
highlight_config: HighlightConfig,
frange: FileRange,
) -> Cancellable<Vec<HlRange>> {
self.with_db(|db| {
syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false)
syntax_highlighting::highlight(db, highlight_config, frange.file_id, Some(frange.range))
})
}

View file

@ -14,7 +14,7 @@ mod html;
mod tests;
use hir::{Name, Semantics};
use ide_db::{FxHashMap, RootDatabase};
use ide_db::{FxHashMap, RootDatabase, SymbolKind};
use syntax::{
ast, AstNode, AstToken, NodeOrToken, SyntaxKind::*, SyntaxNode, TextRange, WalkEvent, T,
};
@ -24,7 +24,7 @@ use crate::{
escape::highlight_escape_string, format::highlight_format_string, highlights::Highlights,
macro_::MacroHighlighter, tags::Highlight,
},
FileId, HlMod, HlTag,
FileId, HlMod, HlOperator, HlPunct, HlTag,
};
pub(crate) use html::highlight_as_html;
@ -36,6 +36,26 @@ pub struct HlRange {
pub binding_hash: Option<u64>,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct HighlightConfig {
/// Whether to highlight strings
pub strings: bool,
/// Whether to highlight punctuation
pub punctuation: bool,
/// Whether to specialize punctuation highlights
pub specialize_punctuation: bool,
/// Whether to highlight operator
pub operator: bool,
/// Whether to specialize operator highlights
pub specialize_operator: bool,
/// Whether to inject highlights into doc comments
pub inject_doc_comment: bool,
/// Whether to highlight the macro call bang
pub macro_bang: bool,
/// Whether to highlight unresolved things be their syntax
pub syntactic_name_ref_highlighting: bool,
}
// Feature: Semantic Syntax Highlighting
//
// rust-analyzer highlights the code semantically.
@ -155,9 +175,9 @@ pub struct HlRange {
// image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[]
pub(crate) fn highlight(
db: &RootDatabase,
config: HighlightConfig,
file_id: FileId,
range_to_highlight: Option<TextRange>,
syntactic_name_ref_highlighting: bool,
) -> Vec<HlRange> {
let _p = profile::span("highlight");
let sema = Semantics::new(db);
@ -183,26 +203,18 @@ pub(crate) fn highlight(
Some(it) => it.krate(),
None => return hl.to_vec(),
};
traverse(
&mut hl,
&sema,
file_id,
&root,
krate,
range_to_highlight,
syntactic_name_ref_highlighting,
);
traverse(&mut hl, &sema, config, file_id, &root, krate, range_to_highlight);
hl.to_vec()
}
fn traverse(
hl: &mut Highlights,
sema: &Semantics<'_, RootDatabase>,
config: HighlightConfig,
file_id: FileId,
root: &SyntaxNode,
krate: hir::Crate,
range_to_highlight: TextRange,
syntactic_name_ref_highlighting: bool,
) {
let is_unlinked = sema.to_module_def(file_id).is_none();
let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
@ -323,9 +335,11 @@ fn traverse(
Enter(it) => it,
Leave(NodeOrToken::Token(_)) => continue,
Leave(NodeOrToken::Node(node)) => {
// Doc comment highlighting injection, we do this when leaving the node
// so that we overwrite the highlighting of the doc comment itself.
inject::doc_comment(hl, sema, file_id, &node);
if config.inject_doc_comment {
// Doc comment highlighting injection, we do this when leaving the node
// so that we overwrite the highlighting of the doc comment itself.
inject::doc_comment(hl, sema, config, file_id, &node);
}
continue;
}
};
@ -400,7 +414,8 @@ fn traverse(
let string_to_highlight = ast::String::cast(descended_token.clone());
if let Some((string, expanded_string)) = string.zip(string_to_highlight) {
if string.is_raw() {
if inject::ra_fixture(hl, sema, &string, &expanded_string).is_some() {
if inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some()
{
continue;
}
}
@ -421,7 +436,7 @@ fn traverse(
sema,
krate,
&mut bindings_shadow_count,
syntactic_name_ref_highlighting,
config.syntactic_name_ref_highlighting,
name_like,
),
NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)),
@ -439,6 +454,29 @@ fn traverse(
// something unresolvable. FIXME: There should be a way to prevent that
continue;
}
// apply config filtering
match &mut highlight.tag {
HlTag::StringLiteral if !config.strings => continue,
// If punctuation is disabled, make the macro bang part of the macro call again.
tag @ HlTag::Punctuation(HlPunct::MacroBang) => {
if !config.macro_bang {
*tag = HlTag::Symbol(SymbolKind::Macro);
} else if !config.specialize_punctuation {
*tag = HlTag::Punctuation(HlPunct::Other);
}
}
HlTag::Punctuation(_) if !config.punctuation => continue,
tag @ HlTag::Punctuation(_) if !config.specialize_punctuation => {
*tag = HlTag::Punctuation(HlPunct::Other);
}
HlTag::Operator(_) if !config.operator && highlight.mods.is_empty() => continue,
tag @ HlTag::Operator(_) if !config.specialize_operator => {
*tag = HlTag::Operator(HlOperator::Other);
}
_ => (),
}
if inside_attribute {
highlight |= HlMod::Attribute
}

View file

@ -5,7 +5,10 @@ use oorandom::Rand32;
use stdx::format_to;
use syntax::AstNode;
use crate::{syntax_highlighting::highlight, FileId, RootDatabase};
use crate::{
syntax_highlighting::{highlight, HighlightConfig},
FileId, RootDatabase,
};
pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
let parse = db.parse(file_id);
@ -20,7 +23,21 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
)
}
let hl_ranges = highlight(db, file_id, None, false);
let hl_ranges = highlight(
db,
HighlightConfig {
strings: true,
punctuation: true,
specialize_punctuation: true,
specialize_operator: true,
operator: true,
inject_doc_comment: true,
macro_bang: true,
syntactic_name_ref_highlighting: false,
},
file_id,
None,
);
let text = parse.tree().syntax().to_string();
let mut buf = String::new();
buf.push_str(STYLE);

View file

@ -15,13 +15,14 @@ use syntax::{
use crate::{
doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def},
syntax_highlighting::{highlights::Highlights, injector::Injector},
syntax_highlighting::{highlights::Highlights, injector::Injector, HighlightConfig},
Analysis, HlMod, HlRange, HlTag, RootDatabase,
};
pub(super) fn ra_fixture(
hl: &mut Highlights,
sema: &Semantics<'_, RootDatabase>,
config: HighlightConfig,
literal: &ast::String,
expanded: &ast::String,
) -> Option<()> {
@ -63,7 +64,13 @@ pub(super) fn ra_fixture(
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
for mut hl_range in analysis.highlight(tmp_file_id).unwrap() {
for mut hl_range in analysis
.highlight(
HighlightConfig { syntactic_name_ref_highlighting: false, ..config },
tmp_file_id,
)
.unwrap()
{
for range in inj.map_range_up(hl_range.range) {
if let Some(range) = literal.map_range_up(range) {
hl_range.range = range;
@ -86,6 +93,7 @@ const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
pub(super) fn doc_comment(
hl: &mut Highlights,
sema: &Semantics<'_, RootDatabase>,
config: HighlightConfig,
src_file_id: FileId,
node: &SyntaxNode,
) {
@ -206,7 +214,14 @@ pub(super) fn doc_comment(
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
if let Ok(ranges) = analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)) {
if let Ok(ranges) = analysis.with_db(|db| {
super::highlight(
db,
HighlightConfig { syntactic_name_ref_highlighting: true, ..config },
tmp_file_id,
None,
)
}) {
for HlRange { range, highlight, binding_hash } in ranges {
for range in inj.map_range_up(range) {
hl.add(HlRange { range, highlight: highlight | HlMod::Injected, binding_hash });

View file

@ -199,7 +199,7 @@ impl fmt::Display for HlTag {
}
impl HlMod {
const ALL: &'static [HlMod; HlMod::Unsafe as u8 as usize + 1] = &[
const ALL: &'static [HlMod; 19] = &[
HlMod::Associated,
HlMod::Async,
HlMod::Attribute,
@ -296,7 +296,7 @@ impl Highlight {
Highlight { tag, mods: HlMods::default() }
}
pub fn is_empty(&self) -> bool {
self.tag == HlTag::None && self.mods == HlMods::default()
self.tag == HlTag::None && self.mods.is_empty()
}
}
@ -330,6 +330,10 @@ impl ops::BitOr<HlMod> for Highlight {
}
impl HlMods {
pub fn is_empty(&self) -> bool {
self.0 == 0
}
pub fn contains(self, m: HlMod) -> bool {
self.0 & m.mask() == m.mask()
}

View file

@ -4,7 +4,18 @@ use expect_test::{expect_file, ExpectFile};
use ide_db::SymbolKind;
use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear};
use crate::{fixture, FileRange, HlTag, TextRange};
use crate::{fixture, FileRange, HighlightConfig, HlTag, TextRange};
const HL_CONFIG: HighlightConfig = HighlightConfig {
strings: true,
punctuation: true,
specialize_punctuation: true,
specialize_operator: true,
operator: true,
inject_doc_comment: true,
macro_bang: true,
syntactic_name_ref_highlighting: false,
};
#[test]
fn attributes() {
@ -996,7 +1007,10 @@ struct Foo {
// The "x"
let highlights = &analysis
.highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) })
.highlight_range(
HL_CONFIG,
FileRange { file_id, range: TextRange::at(45.into(), 1.into()) },
)
.unwrap();
assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public");
@ -1011,7 +1025,7 @@ macro_rules! test {}
}"#
.trim(),
);
let _ = analysis.highlight(file_id).unwrap();
let _ = analysis.highlight(HL_CONFIG, file_id).unwrap();
}
/// Highlights the code given by the `ra_fixture` argument, renders the
@ -1035,7 +1049,7 @@ fn benchmark_syntax_highlighting_long_struct() {
let hash = {
let _pt = bench("syntax highlighting long struct");
analysis
.highlight(file_id)
.highlight(HL_CONFIG, file_id)
.unwrap()
.iter()
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
@ -1061,7 +1075,7 @@ fn syntax_highlighting_not_quadratic() {
let time = Instant::now();
let hash = analysis
.highlight(file_id)
.highlight(HL_CONFIG, file_id)
.unwrap()
.iter()
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
@ -1086,7 +1100,7 @@ fn benchmark_syntax_highlighting_parser() {
let hash = {
let _pt = bench("syntax highlighting parser");
analysis
.highlight(file_id)
.highlight(HL_CONFIG, file_id)
.unwrap()
.iter()
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function))

View file

@ -22,8 +22,8 @@ anyhow = "1.0.57"
crossbeam-channel = "0.5.5"
dissimilar = "1.0.4"
itertools = "0.10.3"
lsp-types = { version = "0.93.0", features = ["proposed"] }
scip = "0.1.1"
lsp-types = { version = "0.93.1", features = ["proposed"] }
parking_lot = "0.12.1"
xflags = "0.2.4"
oorandom = "11.1.3"
@ -89,5 +89,5 @@ in-rust-tree = [
"proc-macro-srv/sysroot-abi",
"sourcegen/in-rust-tree",
"ide/in-rust-tree",
"syntax/in-rust-tree"
"syntax/in-rust-tree",
]

View file

@ -12,8 +12,8 @@ use std::{ffi::OsString, fmt, iter, path::PathBuf};
use flycheck::FlycheckConfig;
use ide::{
AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig, JoinLinesConfig,
Snippet, SnippetScope,
HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig,
JoinLinesConfig, Snippet, SnippetScope,
};
use ide_db::{
imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
@ -385,6 +385,34 @@ config_data! {
/// available on a nightly build.
rustfmt_rangeFormatting_enable: bool = "false",
/// Inject additional highlighting into doc comments.
///
/// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
/// doc links.
semanticHighlighting_doc_comment_inject_enable: bool = "true",
/// Use semantic tokens for operators.
///
/// When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
/// they are tagged with modifiers.
semanticHighlighting_operator_enable: bool = "true",
/// Use specialized semantic tokens for operators.
///
/// When enabled, rust-analyzer will emit special token types for operator tokens instead
/// of the generic `operator` token type.
semanticHighlighting_operator_specialization_enable: bool = "false",
/// Use semantic tokens for punctuations.
///
/// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
/// they are tagged with modifiers or have a special role.
semanticHighlighting_punctuation_enable: bool = "false",
/// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
/// calls.
semanticHighlighting_punctuation_separate_macro_bang: bool = "false",
/// Use specialized semantic tokens for punctuations.
///
/// When enabled, rust-analyzer will emit special token types for punctuation tokens instead
/// of the generic `punctuation` token type.
semanticHighlighting_punctuation_specialization_enable: bool = "false",
/// Use semantic tokens for strings.
///
/// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
@ -1171,8 +1199,19 @@ impl Config {
}
}
pub fn highlighting_strings(&self) -> bool {
self.data.semanticHighlighting_strings_enable
pub fn highlighting_config(&self) -> HighlightConfig {
HighlightConfig {
strings: self.data.semanticHighlighting_strings_enable,
punctuation: self.data.semanticHighlighting_punctuation_enable,
specialize_punctuation: self
.data
.semanticHighlighting_punctuation_specialization_enable,
macro_bang: self.data.semanticHighlighting_punctuation_separate_macro_bang,
operator: self.data.semanticHighlighting_operator_enable,
specialize_operator: self.data.semanticHighlighting_operator_specialization_enable,
inject_doc_comment: self.data.semanticHighlighting_doc_comment_inject_enable,
syntactic_name_ref_highlighting: false,
}
}
pub fn hover(&self) -> HoverConfig {

View file

@ -1504,10 +1504,8 @@ pub(crate) fn handle_semantic_tokens_full(
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let highlights = snap.analysis.highlight(file_id)?;
let highlight_strings = snap.config.highlighting_strings();
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
let highlights = snap.analysis.highlight(snap.config.highlighting_config(), file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
// Unconditionally cache the tokens
snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
@ -1525,10 +1523,8 @@ pub(crate) fn handle_semantic_tokens_full_delta(
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let highlights = snap.analysis.highlight(file_id)?;
let highlight_strings = snap.config.highlighting_strings();
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
let highlights = snap.analysis.highlight(snap.config.highlighting_config(), file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let mut cache = snap.semantic_tokens_cache.lock();
let cached_tokens = cache.entry(params.text_document.uri).or_default();
@ -1556,10 +1552,8 @@ pub(crate) fn handle_semantic_tokens_range(
let text = snap.analysis.file_text(frange.file_id)?;
let line_index = snap.file_line_index(frange.file_id)?;
let highlights = snap.analysis.highlight_range(frange)?;
let highlight_strings = snap.config.highlighting_strings();
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
let highlights = snap.analysis.highlight_range(snap.config.highlighting_config(), frange)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
Ok(Some(semantic_tokens.into()))
}

View file

@ -12,26 +12,26 @@ macro_rules! define_semantic_token_types {
$(pub(crate) const $ident: SemanticTokenType = SemanticTokenType::new($string);)*
pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
SemanticTokenType::COMMENT,
SemanticTokenType::KEYWORD,
SemanticTokenType::STRING,
SemanticTokenType::NUMBER,
SemanticTokenType::REGEXP,
SemanticTokenType::OPERATOR,
SemanticTokenType::NAMESPACE,
SemanticTokenType::TYPE,
SemanticTokenType::STRUCT,
SemanticTokenType::CLASS,
SemanticTokenType::INTERFACE,
SemanticTokenType::ENUM,
SemanticTokenType::COMMENT,
SemanticTokenType::ENUM_MEMBER,
SemanticTokenType::TYPE_PARAMETER,
SemanticTokenType::ENUM,
SemanticTokenType::FUNCTION,
SemanticTokenType::METHOD,
SemanticTokenType::PROPERTY,
SemanticTokenType::INTERFACE,
SemanticTokenType::KEYWORD,
SemanticTokenType::MACRO,
SemanticTokenType::VARIABLE,
SemanticTokenType::METHOD,
SemanticTokenType::NAMESPACE,
SemanticTokenType::NUMBER,
SemanticTokenType::OPERATOR,
SemanticTokenType::PARAMETER,
SemanticTokenType::PROPERTY,
SemanticTokenType::REGEXP,
SemanticTokenType::STRING,
SemanticTokenType::STRUCT,
SemanticTokenType::TYPE_PARAMETER,
SemanticTokenType::TYPE,
SemanticTokenType::VARIABLE,
$($ident),*
];
};
@ -40,7 +40,6 @@ macro_rules! define_semantic_token_types {
define_semantic_token_types![
(ANGLE, "angle"),
(ARITHMETIC, "arithmetic"),
(ATTRIBUTE, "attribute"),
(ATTRIBUTE_BRACKET, "attributeBracket"),
(BITWISE, "bitwise"),
(BOOLEAN, "boolean"),
@ -63,7 +62,6 @@ define_semantic_token_types![
(LIFETIME, "lifetime"),
(LOGICAL, "logical"),
(MACRO_BANG, "macroBang"),
(OPERATOR, "operator"),
(PARENTHESIS, "parenthesis"),
(PUNCTUATION, "punctuation"),
(SELF_KEYWORD, "selfKeyword"),
@ -82,11 +80,7 @@ macro_rules! define_semantic_token_modifiers {
pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
SemanticTokenModifier::DOCUMENTATION,
SemanticTokenModifier::DECLARATION,
SemanticTokenModifier::DEFINITION,
SemanticTokenModifier::STATIC,
SemanticTokenModifier::ABSTRACT,
SemanticTokenModifier::DEPRECATED,
SemanticTokenModifier::READONLY,
SemanticTokenModifier::DEFAULT_LIBRARY,
$($ident),*
];

View file

@ -517,7 +517,6 @@ pub(crate) fn semantic_tokens(
text: &str,
line_index: &LineIndex,
highlights: Vec<HlRange>,
highlight_strings: bool,
) -> lsp_types::SemanticTokens {
let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
@ -526,10 +525,8 @@ pub(crate) fn semantic_tokens(
if highlight_range.highlight.is_empty() {
continue;
}
let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
if !highlight_strings && ty == lsp_types::SemanticTokenType::STRING {
continue;
}
let token_index = semantic_tokens::type_index(ty);
let modifier_bitset = mods.0;
@ -561,7 +558,7 @@ fn semantic_token_type_and_modifiers(
let mut mods = semantic_tokens::ModifierSet::default();
let type_ = match highlight.tag {
HlTag::Symbol(symbol) => match symbol {
SymbolKind::Attribute => semantic_tokens::ATTRIBUTE,
SymbolKind::Attribute => lsp_types::SemanticTokenType::DECORATOR,
SymbolKind::Derive => semantic_tokens::DERIVE,
SymbolKind::DeriveHelper => semantic_tokens::DERIVE_HELPER,
SymbolKind::Module => lsp_types::SemanticTokenType::NAMESPACE,
@ -616,7 +613,7 @@ fn semantic_token_type_and_modifiers(
HlOperator::Arithmetic => semantic_tokens::ARITHMETIC,
HlOperator::Logical => semantic_tokens::LOGICAL,
HlOperator::Comparison => semantic_tokens::COMPARISON,
HlOperator::Other => semantic_tokens::OPERATOR,
HlOperator::Other => lsp_types::SemanticTokenType::OPERATOR,
},
HlTag::StringLiteral => lsp_types::SemanticTokenType::STRING,
HlTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE,

View file

@ -587,6 +587,52 @@ Enables the use of rustfmt's unstable range formatting command for the
`textDocument/rangeFormatting` request. The rustfmt option is unstable and only
available on a nightly build.
--
[[rust-analyzer.semanticHighlighting.doc.comment.inject.enable]]rust-analyzer.semanticHighlighting.doc.comment.inject.enable (default: `true`)::
+
--
Inject additional highlighting into doc comments.
When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
doc links.
--
[[rust-analyzer.semanticHighlighting.operator.enable]]rust-analyzer.semanticHighlighting.operator.enable (default: `true`)::
+
--
Use semantic tokens for operators.
When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
they are tagged with modifiers.
--
[[rust-analyzer.semanticHighlighting.operator.specialization.enable]]rust-analyzer.semanticHighlighting.operator.specialization.enable (default: `false`)::
+
--
Use specialized semantic tokens for operators.
When enabled, rust-analyzer will emit special token types for operator tokens instead
of the generic `operator` token type.
--
[[rust-analyzer.semanticHighlighting.punctuation.enable]]rust-analyzer.semanticHighlighting.punctuation.enable (default: `false`)::
+
--
Use semantic tokens for punctuations.
When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
they are tagged with modifiers or have a special role.
--
[[rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang]]rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang (default: `false`)::
+
--
When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
calls.
--
[[rust-analyzer.semanticHighlighting.punctuation.specialization.enable]]rust-analyzer.semanticHighlighting.punctuation.specialization.enable (default: `false`)::
+
--
Use specialized semantic tokens for punctuations.
When enabled, rust-analyzer will emit special token types for punctuation tokens instead
of the generic `punctuation` token type.
--
[[rust-analyzer.semanticHighlighting.strings.enable]]rust-analyzer.semanticHighlighting.strings.enable (default: `true`)::
+
--

View file

@ -1084,6 +1084,36 @@
"default": false,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.doc.comment.inject.enable": {
"markdownDescription": "Inject additional highlighting into doc comments.\n\nWhen enabled, rust-analyzer will highlight rust source in doc comments as well as intra\ndoc links.",
"default": true,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.operator.enable": {
"markdownDescription": "Use semantic tokens for operators.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for operator tokens when\nthey are tagged with modifiers.",
"default": true,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.operator.specialization.enable": {
"markdownDescription": "Use specialized semantic tokens for operators.\n\nWhen enabled, rust-analyzer will emit special token types for operator tokens instead\nof the generic `operator` token type.",
"default": false,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.punctuation.enable": {
"markdownDescription": "Use semantic tokens for punctuations.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when\nthey are tagged with modifiers or have a special role.",
"default": false,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang": {
"markdownDescription": "When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro\ncalls.",
"default": false,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.punctuation.specialization.enable": {
"markdownDescription": "Use specialized semantic tokens for punctuations.\n\nWhen enabled, rust-analyzer will emit special token types for punctuation tokens instead\nof the generic `punctuation` token type.",
"default": false,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.strings.enable": {
"markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
"default": true,