Address PR comments

This commit is contained in:
Jeremy Kolb 2020-08-05 21:35:35 -04:00
parent fcfd7cb1e3
commit 195111d769
4 changed files with 12 additions and 18 deletions

View file

@ -3,15 +3,12 @@
//! //!
//! Each tick provides an immutable snapshot of the state as `WorldSnapshot`. //! Each tick provides an immutable snapshot of the state as `WorldSnapshot`.
use std::{ use std::{sync::Arc, time::Instant};
sync::{Arc, Mutex},
time::Instant,
};
use crossbeam_channel::{unbounded, Receiver, Sender}; use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle; use flycheck::FlycheckHandle;
use lsp_types::{SemanticTokens, Url}; use lsp_types::{SemanticTokens, Url};
use parking_lot::RwLock; use parking_lot::{Mutex, RwLock};
use ra_db::{CrateId, VfsPath}; use ra_db::{CrateId, VfsPath};
use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId}; use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId};
use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target};

View file

@ -1187,10 +1187,7 @@ pub(crate) fn handle_semantic_tokens(
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
// Unconditionally cache the tokens // Unconditionally cache the tokens
snap.semantic_tokens_cache snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
.lock()
.unwrap()
.insert(params.text_document.uri, semantic_tokens.clone());
Ok(Some(semantic_tokens.into())) Ok(Some(semantic_tokens.into()))
} }
@ -1209,7 +1206,7 @@ pub(crate) fn handle_semantic_tokens_edits(
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let mut cache = snap.semantic_tokens_cache.lock().unwrap(); let mut cache = snap.semantic_tokens_cache.lock();
let cached_tokens = cache.entry(params.text_document.uri).or_default(); let cached_tokens = cache.entry(params.text_document.uri).or_default();
if let Some(prev_id) = &cached_tokens.result_id { if let Some(prev_id) = &cached_tokens.result_id {

View file

@ -452,7 +452,7 @@ impl GlobalState {
None => log::error!("orphan DidCloseTextDocument: {}", path), None => log::error!("orphan DidCloseTextDocument: {}", path),
} }
this.semantic_tokens_cache.lock().unwrap().remove(&params.text_document.uri); this.semantic_tokens_cache.lock().remove(&params.text_document.uri);
if let Some(path) = path.as_path() { if let Some(path) = path.as_path() {
this.loader.handle.invalidate(path.to_path_buf()); this.loader.handle.invalidate(path.to_path_buf());

View file

@ -1,6 +1,8 @@
//! Conversion of rust-analyzer specific types to lsp_types equivalents. //! Conversion of rust-analyzer specific types to lsp_types equivalents.
use std::path::{self, Path}; use std::{
use std::time::SystemTime; path::{self, Path},
sync::atomic::{AtomicU32, Ordering},
};
use itertools::Itertools; use itertools::Itertools;
use ra_db::{FileId, FileRange}; use ra_db::{FileId, FileRange};
@ -304,16 +306,14 @@ pub(crate) fn inlay_int(line_index: &LineIndex, inlay_hint: InlayHint) -> lsp_ex
} }
} }
static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
pub(crate) fn semantic_tokens( pub(crate) fn semantic_tokens(
text: &str, text: &str,
line_index: &LineIndex, line_index: &LineIndex,
highlights: Vec<HighlightedRange>, highlights: Vec<HighlightedRange>,
) -> lsp_types::SemanticTokens { ) -> lsp_types::SemanticTokens {
let id = match SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) { let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
Ok(d) => d.as_millis().to_string(),
Err(_) => String::new(),
};
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id); let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
for highlight_range in highlights { for highlight_range in highlights {