mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 04:23:25 +00:00
Merge #5526
5526: Handle semantic token deltas r=kjeremy a=kjeremy This basically takes the naive approach where we always compute the tokens but save space sending over the wire which apparently solves some GC problems with vscode. This is waiting for https://github.com/gluon-lang/lsp-types/pull/174 to be merged. I am also unsure of the best way to stash the tokens into `DocumentData` in a safe manner. Co-authored-by: kjeremy <kjeremy@gmail.com> Co-authored-by: Jeremy Kolb <kjeremy@gmail.com>
This commit is contained in:
commit
f1d507270c
7 changed files with 208 additions and 14 deletions
|
@ -76,7 +76,9 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti
|
||||||
token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(),
|
token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(),
|
||||||
},
|
},
|
||||||
|
|
||||||
document_provider: Some(SemanticTokensDocumentProvider::Bool(true)),
|
document_provider: Some(SemanticTokensDocumentProvider::Edits {
|
||||||
|
edits: Some(true),
|
||||||
|
}),
|
||||||
range_provider: Some(true),
|
range_provider: Some(true),
|
||||||
work_done_progress_options: Default::default(),
|
work_done_progress_options: Default::default(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
//! In-memory document information.
|
//! In-memory document information.
|
||||||
|
|
||||||
/// Information about a document that the Language Client
|
/// Information about a document that the Language Client
|
||||||
// knows about.
|
/// knows about.
|
||||||
// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose
|
/// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose
|
||||||
// client notifications.
|
/// client notifications.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub(crate) struct DocumentData {
|
pub(crate) struct DocumentData {
|
||||||
pub version: Option<i64>,
|
pub version: Option<i64>,
|
||||||
|
|
|
@ -7,8 +7,8 @@ use std::{sync::Arc, time::Instant};
|
||||||
|
|
||||||
use crossbeam_channel::{unbounded, Receiver, Sender};
|
use crossbeam_channel::{unbounded, Receiver, Sender};
|
||||||
use flycheck::FlycheckHandle;
|
use flycheck::FlycheckHandle;
|
||||||
use lsp_types::Url;
|
use lsp_types::{SemanticTokens, Url};
|
||||||
use parking_lot::RwLock;
|
use parking_lot::{Mutex, RwLock};
|
||||||
use ra_db::{CrateId, VfsPath};
|
use ra_db::{CrateId, VfsPath};
|
||||||
use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId};
|
use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId};
|
||||||
use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target};
|
use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target};
|
||||||
|
@ -71,6 +71,7 @@ pub(crate) struct GlobalState {
|
||||||
pub(crate) analysis_host: AnalysisHost,
|
pub(crate) analysis_host: AnalysisHost,
|
||||||
pub(crate) diagnostics: DiagnosticCollection,
|
pub(crate) diagnostics: DiagnosticCollection,
|
||||||
pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>,
|
pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>,
|
||||||
|
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
|
||||||
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
|
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
|
||||||
pub(crate) status: Status,
|
pub(crate) status: Status,
|
||||||
pub(crate) source_root_config: SourceRootConfig,
|
pub(crate) source_root_config: SourceRootConfig,
|
||||||
|
@ -86,6 +87,7 @@ pub(crate) struct GlobalStateSnapshot {
|
||||||
pub(crate) check_fixes: CheckFixes,
|
pub(crate) check_fixes: CheckFixes,
|
||||||
pub(crate) latest_requests: Arc<RwLock<LatestRequests>>,
|
pub(crate) latest_requests: Arc<RwLock<LatestRequests>>,
|
||||||
mem_docs: FxHashMap<VfsPath, DocumentData>,
|
mem_docs: FxHashMap<VfsPath, DocumentData>,
|
||||||
|
pub semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
|
||||||
vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
|
vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
|
||||||
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
|
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
|
||||||
}
|
}
|
||||||
|
@ -120,6 +122,7 @@ impl GlobalState {
|
||||||
analysis_host,
|
analysis_host,
|
||||||
diagnostics: Default::default(),
|
diagnostics: Default::default(),
|
||||||
mem_docs: FxHashMap::default(),
|
mem_docs: FxHashMap::default(),
|
||||||
|
semantic_tokens_cache: Arc::new(Default::default()),
|
||||||
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
|
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
|
||||||
status: Status::default(),
|
status: Status::default(),
|
||||||
source_root_config: SourceRootConfig::default(),
|
source_root_config: SourceRootConfig::default(),
|
||||||
|
@ -186,6 +189,7 @@ impl GlobalState {
|
||||||
latest_requests: Arc::clone(&self.latest_requests),
|
latest_requests: Arc::clone(&self.latest_requests),
|
||||||
check_fixes: Arc::clone(&self.diagnostics.check_fixes),
|
check_fixes: Arc::clone(&self.diagnostics.check_fixes),
|
||||||
mem_docs: self.mem_docs.clone(),
|
mem_docs: self.mem_docs.clone(),
|
||||||
|
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,9 +13,10 @@ use lsp_types::{
|
||||||
CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
|
CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
|
||||||
CodeActionKind, CodeLens, Command, CompletionItem, Diagnostic, DocumentFormattingParams,
|
CodeActionKind, CodeLens, Command, CompletionItem, Diagnostic, DocumentFormattingParams,
|
||||||
DocumentHighlight, DocumentSymbol, FoldingRange, FoldingRangeParams, HoverContents, Location,
|
DocumentHighlight, DocumentSymbol, FoldingRange, FoldingRangeParams, HoverContents, Location,
|
||||||
Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensParams,
|
Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensEditResult,
|
||||||
SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
|
SemanticTokensEditsParams, SemanticTokensParams, SemanticTokensRangeParams,
|
||||||
SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
|
SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, SymbolTag,
|
||||||
|
TextDocumentIdentifier, Url, WorkspaceEdit,
|
||||||
};
|
};
|
||||||
use ra_ide::{
|
use ra_ide::{
|
||||||
FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query,
|
FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query,
|
||||||
|
@ -1179,6 +1180,40 @@ pub(crate) fn handle_semantic_tokens(
|
||||||
|
|
||||||
let highlights = snap.analysis.highlight(file_id)?;
|
let highlights = snap.analysis.highlight(file_id)?;
|
||||||
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
||||||
|
|
||||||
|
// Unconditionally cache the tokens
|
||||||
|
snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
|
||||||
|
|
||||||
|
Ok(Some(semantic_tokens.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn handle_semantic_tokens_edits(
|
||||||
|
snap: GlobalStateSnapshot,
|
||||||
|
params: SemanticTokensEditsParams,
|
||||||
|
) -> Result<Option<SemanticTokensEditResult>> {
|
||||||
|
let _p = profile("handle_semantic_tokens_edits");
|
||||||
|
|
||||||
|
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||||
|
let text = snap.analysis.file_text(file_id)?;
|
||||||
|
let line_index = snap.analysis.file_line_index(file_id)?;
|
||||||
|
|
||||||
|
let highlights = snap.analysis.highlight(file_id)?;
|
||||||
|
|
||||||
|
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
||||||
|
|
||||||
|
let mut cache = snap.semantic_tokens_cache.lock();
|
||||||
|
let cached_tokens = cache.entry(params.text_document.uri).or_default();
|
||||||
|
|
||||||
|
if let Some(prev_id) = &cached_tokens.result_id {
|
||||||
|
if *prev_id == params.previous_result_id {
|
||||||
|
let edits = to_proto::semantic_token_edits(&cached_tokens, &semantic_tokens);
|
||||||
|
*cached_tokens = semantic_tokens;
|
||||||
|
return Ok(Some(edits.into()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*cached_tokens = semantic_tokens.clone();
|
||||||
|
|
||||||
Ok(Some(semantic_tokens.into()))
|
Ok(Some(semantic_tokens.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -386,6 +386,9 @@ impl GlobalState {
|
||||||
handlers::handle_call_hierarchy_outgoing,
|
handlers::handle_call_hierarchy_outgoing,
|
||||||
)?
|
)?
|
||||||
.on::<lsp_types::request::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
|
.on::<lsp_types::request::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
|
||||||
|
.on::<lsp_types::request::SemanticTokensEditsRequest>(
|
||||||
|
handlers::handle_semantic_tokens_edits,
|
||||||
|
)?
|
||||||
.on::<lsp_types::request::SemanticTokensRangeRequest>(
|
.on::<lsp_types::request::SemanticTokensRangeRequest>(
|
||||||
handlers::handle_semantic_tokens_range,
|
handlers::handle_semantic_tokens_range,
|
||||||
)?
|
)?
|
||||||
|
@ -443,6 +446,8 @@ impl GlobalState {
|
||||||
None => log::error!("orphan DidCloseTextDocument: {}", path),
|
None => log::error!("orphan DidCloseTextDocument: {}", path),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.semantic_tokens_cache.lock().remove(¶ms.text_document.uri);
|
||||||
|
|
||||||
if let Some(path) = path.as_path() {
|
if let Some(path) = path.as_path() {
|
||||||
this.loader.handle.invalidate(path.to_path_buf());
|
this.loader.handle.invalidate(path.to_path_buf());
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,10 @@
|
||||||
|
|
||||||
use std::ops;
|
use std::ops;
|
||||||
|
|
||||||
use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens};
|
use lsp_types::{
|
||||||
|
Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
|
||||||
|
SemanticTokensEdit,
|
||||||
|
};
|
||||||
|
|
||||||
macro_rules! define_semantic_token_types {
|
macro_rules! define_semantic_token_types {
|
||||||
($(($ident:ident, $string:literal)),*$(,)?) => {
|
($(($ident:ident, $string:literal)),*$(,)?) => {
|
||||||
|
@ -89,14 +92,18 @@ impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
|
||||||
/// Tokens are encoded relative to each other.
|
/// Tokens are encoded relative to each other.
|
||||||
///
|
///
|
||||||
/// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45
|
/// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45
|
||||||
#[derive(Default)]
|
|
||||||
pub(crate) struct SemanticTokensBuilder {
|
pub(crate) struct SemanticTokensBuilder {
|
||||||
|
id: String,
|
||||||
prev_line: u32,
|
prev_line: u32,
|
||||||
prev_char: u32,
|
prev_char: u32,
|
||||||
data: Vec<SemanticToken>,
|
data: Vec<SemanticToken>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SemanticTokensBuilder {
|
impl SemanticTokensBuilder {
|
||||||
|
pub fn new(id: String) -> Self {
|
||||||
|
SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
|
||||||
|
}
|
||||||
|
|
||||||
/// Push a new token onto the builder
|
/// Push a new token onto the builder
|
||||||
pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
|
pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
|
||||||
let mut push_line = range.start.line as u32;
|
let mut push_line = range.start.line as u32;
|
||||||
|
@ -127,10 +134,136 @@ impl SemanticTokensBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build(self) -> SemanticTokens {
|
pub fn build(self) -> SemanticTokens {
|
||||||
SemanticTokens { result_id: None, data: self.data }
|
SemanticTokens { result_id: Some(self.id), data: self.data }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
|
||||||
|
let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
|
||||||
|
|
||||||
|
let (_, old) = old.split_at(offset);
|
||||||
|
let (_, new) = new.split_at(offset);
|
||||||
|
|
||||||
|
let offset_from_end =
|
||||||
|
new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
|
||||||
|
|
||||||
|
let (old, _) = old.split_at(old.len() - offset_from_end);
|
||||||
|
let (new, _) = new.split_at(new.len() - offset_from_end);
|
||||||
|
|
||||||
|
if old.is_empty() && new.is_empty() {
|
||||||
|
vec![]
|
||||||
|
} else {
|
||||||
|
// The lsp data field is actually a byte-diff but we
|
||||||
|
// travel in tokens so `start` and `delete_count` are in multiples of the
|
||||||
|
// serialized size of `SemanticToken`.
|
||||||
|
vec![SemanticTokensEdit {
|
||||||
|
start: 5 * offset as u32,
|
||||||
|
delete_count: 5 * old.len() as u32,
|
||||||
|
data: Some(new.into()),
|
||||||
|
}]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn type_index(type_: SemanticTokenType) -> u32 {
|
pub fn type_index(type_: SemanticTokenType) -> u32 {
|
||||||
SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32
|
SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
|
||||||
|
SemanticToken {
|
||||||
|
delta_line: t.0,
|
||||||
|
delta_start: t.1,
|
||||||
|
length: t.2,
|
||||||
|
token_type: t.3,
|
||||||
|
token_modifiers_bitset: t.4,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_diff_insert_at_end() {
|
||||||
|
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
|
||||||
|
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
|
||||||
|
|
||||||
|
let edits = diff_tokens(&before, &after);
|
||||||
|
assert_eq!(
|
||||||
|
edits[0],
|
||||||
|
SemanticTokensEdit {
|
||||||
|
start: 10,
|
||||||
|
delete_count: 0,
|
||||||
|
data: Some(vec![from((11, 12, 13, 14, 15))])
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_diff_insert_at_beginning() {
|
||||||
|
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
|
||||||
|
let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
|
||||||
|
|
||||||
|
let edits = diff_tokens(&before, &after);
|
||||||
|
assert_eq!(
|
||||||
|
edits[0],
|
||||||
|
SemanticTokensEdit {
|
||||||
|
start: 0,
|
||||||
|
delete_count: 0,
|
||||||
|
data: Some(vec![from((11, 12, 13, 14, 15))])
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_diff_insert_in_middle() {
|
||||||
|
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
|
||||||
|
let after = [
|
||||||
|
from((1, 2, 3, 4, 5)),
|
||||||
|
from((10, 20, 30, 40, 50)),
|
||||||
|
from((60, 70, 80, 90, 100)),
|
||||||
|
from((6, 7, 8, 9, 10)),
|
||||||
|
];
|
||||||
|
|
||||||
|
let edits = diff_tokens(&before, &after);
|
||||||
|
assert_eq!(
|
||||||
|
edits[0],
|
||||||
|
SemanticTokensEdit {
|
||||||
|
start: 5,
|
||||||
|
delete_count: 0,
|
||||||
|
data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_diff_remove_from_end() {
|
||||||
|
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
|
||||||
|
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
|
||||||
|
|
||||||
|
let edits = diff_tokens(&before, &after);
|
||||||
|
assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_diff_remove_from_beginning() {
|
||||||
|
let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
|
||||||
|
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
|
||||||
|
|
||||||
|
let edits = diff_tokens(&before, &after);
|
||||||
|
assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_diff_remove_from_middle() {
|
||||||
|
let before = [
|
||||||
|
from((1, 2, 3, 4, 5)),
|
||||||
|
from((10, 20, 30, 40, 50)),
|
||||||
|
from((60, 70, 80, 90, 100)),
|
||||||
|
from((6, 7, 8, 9, 10)),
|
||||||
|
];
|
||||||
|
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
|
||||||
|
|
||||||
|
let edits = diff_tokens(&before, &after);
|
||||||
|
assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
//! Conversion of rust-analyzer specific types to lsp_types equivalents.
|
//! Conversion of rust-analyzer specific types to lsp_types equivalents.
|
||||||
use std::path::{self, Path};
|
use std::{
|
||||||
|
path::{self, Path},
|
||||||
|
sync::atomic::{AtomicU32, Ordering},
|
||||||
|
};
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use ra_db::{FileId, FileRange};
|
use ra_db::{FileId, FileRange};
|
||||||
|
@ -303,12 +306,15 @@ pub(crate) fn inlay_int(line_index: &LineIndex, inlay_hint: InlayHint) -> lsp_ex
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
|
||||||
|
|
||||||
pub(crate) fn semantic_tokens(
|
pub(crate) fn semantic_tokens(
|
||||||
text: &str,
|
text: &str,
|
||||||
line_index: &LineIndex,
|
line_index: &LineIndex,
|
||||||
highlights: Vec<HighlightedRange>,
|
highlights: Vec<HighlightedRange>,
|
||||||
) -> lsp_types::SemanticTokens {
|
) -> lsp_types::SemanticTokens {
|
||||||
let mut builder = semantic_tokens::SemanticTokensBuilder::default();
|
let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
|
||||||
|
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
|
||||||
|
|
||||||
for highlight_range in highlights {
|
for highlight_range in highlights {
|
||||||
let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
|
let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
|
||||||
|
@ -328,6 +334,15 @@ pub(crate) fn semantic_tokens(
|
||||||
builder.build()
|
builder.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn semantic_token_edits(
|
||||||
|
previous: &lsp_types::SemanticTokens,
|
||||||
|
current: &lsp_types::SemanticTokens,
|
||||||
|
) -> lsp_types::SemanticTokensEdits {
|
||||||
|
let result_id = current.result_id.clone();
|
||||||
|
let edits = semantic_tokens::diff_tokens(&previous.data, ¤t.data);
|
||||||
|
lsp_types::SemanticTokensEdits { result_id, edits }
|
||||||
|
}
|
||||||
|
|
||||||
fn semantic_token_type_and_modifiers(
|
fn semantic_token_type_and_modifiers(
|
||||||
highlight: Highlight,
|
highlight: Highlight,
|
||||||
) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {
|
) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {
|
||||||
|
|
Loading…
Reference in a new issue