5526: Handle semantic token deltas r=kjeremy a=kjeremy

This basically takes the naive approach where we always compute the tokens but save space sending over the wire which apparently solves some GC problems with vscode.

This is waiting for https://github.com/gluon-lang/lsp-types/pull/174 to be merged. I am also unsure of the best way to stash the tokens into `DocumentData` in a safe manner.

Co-authored-by: kjeremy <kjeremy@gmail.com>
Co-authored-by: Jeremy Kolb <kjeremy@gmail.com>
This commit is contained in:
bors[bot] 2020-08-06 01:44:38 +00:00 committed by GitHub
commit f1d507270c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 208 additions and 14 deletions

View file

@ -76,7 +76,9 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti
token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.to_vec(),
},
document_provider: Some(SemanticTokensDocumentProvider::Bool(true)),
document_provider: Some(SemanticTokensDocumentProvider::Edits {
edits: Some(true),
}),
range_provider: Some(true),
work_done_progress_options: Default::default(),
}

View file

@ -1,9 +1,9 @@
//! In-memory document information.
/// Information about a document that the Language Client
// knows about.
// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose
// client notifications.
/// knows about.
/// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose
/// client notifications.
#[derive(Debug, Clone)]
pub(crate) struct DocumentData {
pub version: Option<i64>,

View file

@ -7,8 +7,8 @@ use std::{sync::Arc, time::Instant};
use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle;
use lsp_types::Url;
use parking_lot::RwLock;
use lsp_types::{SemanticTokens, Url};
use parking_lot::{Mutex, RwLock};
use ra_db::{CrateId, VfsPath};
use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId};
use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target};
@ -71,6 +71,7 @@ pub(crate) struct GlobalState {
pub(crate) analysis_host: AnalysisHost,
pub(crate) diagnostics: DiagnosticCollection,
pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>,
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
pub(crate) status: Status,
pub(crate) source_root_config: SourceRootConfig,
@ -86,6 +87,7 @@ pub(crate) struct GlobalStateSnapshot {
pub(crate) check_fixes: CheckFixes,
pub(crate) latest_requests: Arc<RwLock<LatestRequests>>,
mem_docs: FxHashMap<VfsPath, DocumentData>,
pub semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
}
@ -120,6 +122,7 @@ impl GlobalState {
analysis_host,
diagnostics: Default::default(),
mem_docs: FxHashMap::default(),
semantic_tokens_cache: Arc::new(Default::default()),
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
status: Status::default(),
source_root_config: SourceRootConfig::default(),
@ -186,6 +189,7 @@ impl GlobalState {
latest_requests: Arc::clone(&self.latest_requests),
check_fixes: Arc::clone(&self.diagnostics.check_fixes),
mem_docs: self.mem_docs.clone(),
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
}
}

View file

@ -13,9 +13,10 @@ use lsp_types::{
CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
CodeActionKind, CodeLens, Command, CompletionItem, Diagnostic, DocumentFormattingParams,
DocumentHighlight, DocumentSymbol, FoldingRange, FoldingRangeParams, HoverContents, Location,
Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensParams,
SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensEditResult,
SemanticTokensEditsParams, SemanticTokensParams, SemanticTokensRangeParams,
SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, SymbolTag,
TextDocumentIdentifier, Url, WorkspaceEdit,
};
use ra_ide::{
FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query,
@ -1179,6 +1180,40 @@ pub(crate) fn handle_semantic_tokens(
let highlights = snap.analysis.highlight(file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
// Unconditionally cache the tokens
snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
Ok(Some(semantic_tokens.into()))
}
pub(crate) fn handle_semantic_tokens_edits(
snap: GlobalStateSnapshot,
params: SemanticTokensEditsParams,
) -> Result<Option<SemanticTokensEditResult>> {
let _p = profile("handle_semantic_tokens_edits");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.analysis.file_line_index(file_id)?;
let highlights = snap.analysis.highlight(file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let mut cache = snap.semantic_tokens_cache.lock();
let cached_tokens = cache.entry(params.text_document.uri).or_default();
if let Some(prev_id) = &cached_tokens.result_id {
if *prev_id == params.previous_result_id {
let edits = to_proto::semantic_token_edits(&cached_tokens, &semantic_tokens);
*cached_tokens = semantic_tokens;
return Ok(Some(edits.into()));
}
}
*cached_tokens = semantic_tokens.clone();
Ok(Some(semantic_tokens.into()))
}

View file

@ -386,6 +386,9 @@ impl GlobalState {
handlers::handle_call_hierarchy_outgoing,
)?
.on::<lsp_types::request::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
.on::<lsp_types::request::SemanticTokensEditsRequest>(
handlers::handle_semantic_tokens_edits,
)?
.on::<lsp_types::request::SemanticTokensRangeRequest>(
handlers::handle_semantic_tokens_range,
)?
@ -443,6 +446,8 @@ impl GlobalState {
None => log::error!("orphan DidCloseTextDocument: {}", path),
}
this.semantic_tokens_cache.lock().remove(&params.text_document.uri);
if let Some(path) = path.as_path() {
this.loader.handle.invalidate(path.to_path_buf());
}

View file

@ -2,7 +2,10 @@
use std::ops;
use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens};
use lsp_types::{
Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
SemanticTokensEdit,
};
macro_rules! define_semantic_token_types {
($(($ident:ident, $string:literal)),*$(,)?) => {
@ -89,14 +92,18 @@ impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
/// Tokens are encoded relative to each other.
///
/// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45
#[derive(Default)]
pub(crate) struct SemanticTokensBuilder {
id: String,
prev_line: u32,
prev_char: u32,
data: Vec<SemanticToken>,
}
impl SemanticTokensBuilder {
pub fn new(id: String) -> Self {
SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
}
/// Push a new token onto the builder
pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
let mut push_line = range.start.line as u32;
@ -127,10 +134,136 @@ impl SemanticTokensBuilder {
}
pub fn build(self) -> SemanticTokens {
SemanticTokens { result_id: None, data: self.data }
SemanticTokens { result_id: Some(self.id), data: self.data }
}
}
pub fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
let (_, old) = old.split_at(offset);
let (_, new) = new.split_at(offset);
let offset_from_end =
new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
let (old, _) = old.split_at(old.len() - offset_from_end);
let (new, _) = new.split_at(new.len() - offset_from_end);
if old.is_empty() && new.is_empty() {
vec![]
} else {
// The lsp data field is actually a byte-diff but we
// travel in tokens so `start` and `delete_count` are in multiples of the
// serialized size of `SemanticToken`.
vec![SemanticTokensEdit {
start: 5 * offset as u32,
delete_count: 5 * old.len() as u32,
data: Some(new.into()),
}]
}
}
pub fn type_index(type_: SemanticTokenType) -> u32 {
SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32
}
#[cfg(test)]
mod tests {
use super::*;
fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
SemanticToken {
delta_line: t.0,
delta_start: t.1,
length: t.2,
token_type: t.3,
token_modifiers_bitset: t.4,
}
}
#[test]
fn test_diff_insert_at_end() {
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
let edits = diff_tokens(&before, &after);
assert_eq!(
edits[0],
SemanticTokensEdit {
start: 10,
delete_count: 0,
data: Some(vec![from((11, 12, 13, 14, 15))])
}
);
}
#[test]
fn test_diff_insert_at_beginning() {
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
let edits = diff_tokens(&before, &after);
assert_eq!(
edits[0],
SemanticTokensEdit {
start: 0,
delete_count: 0,
data: Some(vec![from((11, 12, 13, 14, 15))])
}
);
}
#[test]
fn test_diff_insert_in_middle() {
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
let after = [
from((1, 2, 3, 4, 5)),
from((10, 20, 30, 40, 50)),
from((60, 70, 80, 90, 100)),
from((6, 7, 8, 9, 10)),
];
let edits = diff_tokens(&before, &after);
assert_eq!(
edits[0],
SemanticTokensEdit {
start: 5,
delete_count: 0,
data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
}
);
}
#[test]
fn test_diff_remove_from_end() {
let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
let edits = diff_tokens(&before, &after);
assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
}
#[test]
fn test_diff_remove_from_beginning() {
let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
let edits = diff_tokens(&before, &after);
assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
}
#[test]
fn test_diff_remove_from_middle() {
let before = [
from((1, 2, 3, 4, 5)),
from((10, 20, 30, 40, 50)),
from((60, 70, 80, 90, 100)),
from((6, 7, 8, 9, 10)),
];
let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
let edits = diff_tokens(&before, &after);
assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });
}
}

View file

@ -1,5 +1,8 @@
//! Conversion of rust-analyzer specific types to lsp_types equivalents.
use std::path::{self, Path};
use std::{
path::{self, Path},
sync::atomic::{AtomicU32, Ordering},
};
use itertools::Itertools;
use ra_db::{FileId, FileRange};
@ -303,12 +306,15 @@ pub(crate) fn inlay_int(line_index: &LineIndex, inlay_hint: InlayHint) -> lsp_ex
}
}
static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
pub(crate) fn semantic_tokens(
text: &str,
line_index: &LineIndex,
highlights: Vec<HighlightedRange>,
) -> lsp_types::SemanticTokens {
let mut builder = semantic_tokens::SemanticTokensBuilder::default();
let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
for highlight_range in highlights {
let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
@ -328,6 +334,15 @@ pub(crate) fn semantic_tokens(
builder.build()
}
pub(crate) fn semantic_token_edits(
previous: &lsp_types::SemanticTokens,
current: &lsp_types::SemanticTokens,
) -> lsp_types::SemanticTokensEdits {
let result_id = current.result_id.clone();
let edits = semantic_tokens::diff_tokens(&previous.data, &current.data);
lsp_types::SemanticTokensEdits { result_id, edits }
}
fn semantic_token_type_and_modifiers(
highlight: Highlight,
) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {