mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-28 04:45:05 +00:00
internal: Arc<String> -> Arc<str>
This commit is contained in:
parent
63e3bf118d
commit
f00dcf9a69
16 changed files with 34 additions and 36 deletions
|
@ -12,7 +12,7 @@ use crate::{CrateGraph, ProcMacros, SourceDatabaseExt, SourceRoot, SourceRootId}
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Change {
|
pub struct Change {
|
||||||
pub roots: Option<Vec<SourceRoot>>,
|
pub roots: Option<Vec<SourceRoot>>,
|
||||||
pub files_changed: Vec<(FileId, Option<Arc<String>>)>,
|
pub files_changed: Vec<(FileId, Option<Arc<str>>)>,
|
||||||
pub crate_graph: Option<CrateGraph>,
|
pub crate_graph: Option<CrateGraph>,
|
||||||
pub proc_macros: Option<ProcMacros>,
|
pub proc_macros: Option<ProcMacros>,
|
||||||
}
|
}
|
||||||
|
@ -42,7 +42,7 @@ impl Change {
|
||||||
self.roots = Some(roots);
|
self.roots = Some(roots);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<String>>) {
|
pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<str>>) {
|
||||||
self.files_changed.push((file_id, new_text))
|
self.files_changed.push((file_id, new_text))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ impl Change {
|
||||||
let source_root = db.source_root(source_root_id);
|
let source_root = db.source_root(source_root_id);
|
||||||
let durability = durability(&source_root);
|
let durability = durability(&source_root);
|
||||||
// XXX: can't actually remove the file, just reset the text
|
// XXX: can't actually remove the file, just reset the text
|
||||||
let text = text.unwrap_or_default();
|
let text = text.unwrap_or_else(|| Arc::from(""));
|
||||||
db.set_file_text_with_durability(file_id, text, durability)
|
db.set_file_text_with_durability(file_id, text, durability)
|
||||||
}
|
}
|
||||||
if let Some(crate_graph) = self.crate_graph {
|
if let Some(crate_graph) = self.crate_graph {
|
||||||
|
|
|
@ -190,7 +190,7 @@ impl ChangeFixture {
|
||||||
default_target_data_layout = meta.target_data_layout;
|
default_target_data_layout = meta.target_data_layout;
|
||||||
}
|
}
|
||||||
|
|
||||||
change.change_file(file_id, Some(Arc::new(text)));
|
change.change_file(file_id, Some(Arc::from(text)));
|
||||||
let path = VfsPath::new_virtual_path(meta.path);
|
let path = VfsPath::new_virtual_path(meta.path);
|
||||||
file_set.insert(file_id, path);
|
file_set.insert(file_id, path);
|
||||||
files.push(file_id);
|
files.push(file_id);
|
||||||
|
@ -240,7 +240,7 @@ impl ChangeFixture {
|
||||||
fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string()));
|
fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string()));
|
||||||
roots.push(SourceRoot::new_library(fs));
|
roots.push(SourceRoot::new_library(fs));
|
||||||
|
|
||||||
change.change_file(core_file, Some(Arc::new(mini_core.source_code())));
|
change.change_file(core_file, Some(Arc::from(mini_core.source_code())));
|
||||||
|
|
||||||
let all_crates = crate_graph.crates_in_topological_order();
|
let all_crates = crate_graph.crates_in_topological_order();
|
||||||
|
|
||||||
|
@ -279,7 +279,7 @@ impl ChangeFixture {
|
||||||
);
|
);
|
||||||
roots.push(SourceRoot::new_library(fs));
|
roots.push(SourceRoot::new_library(fs));
|
||||||
|
|
||||||
change.change_file(proc_lib_file, Some(Arc::new(source)));
|
change.change_file(proc_lib_file, Some(Arc::from(source)));
|
||||||
|
|
||||||
let all_crates = crate_graph.crates_in_topological_order();
|
let all_crates = crate_graph.crates_in_topological_order();
|
||||||
|
|
||||||
|
|
|
@ -57,7 +57,7 @@ pub const DEFAULT_LRU_CAP: usize = 128;
|
||||||
|
|
||||||
pub trait FileLoader {
|
pub trait FileLoader {
|
||||||
/// Text of the file.
|
/// Text of the file.
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<String>;
|
fn file_text(&self, file_id: FileId) -> Arc<str>;
|
||||||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
|
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
|
||||||
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
|
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
|
||||||
}
|
}
|
||||||
|
@ -90,7 +90,7 @@ fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFil
|
||||||
#[salsa::query_group(SourceDatabaseExtStorage)]
|
#[salsa::query_group(SourceDatabaseExtStorage)]
|
||||||
pub trait SourceDatabaseExt: SourceDatabase {
|
pub trait SourceDatabaseExt: SourceDatabase {
|
||||||
#[salsa::input]
|
#[salsa::input]
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<String>;
|
fn file_text(&self, file_id: FileId) -> Arc<str>;
|
||||||
/// Path to a file, relative to the root of its source root.
|
/// Path to a file, relative to the root of its source root.
|
||||||
/// Source root of the file.
|
/// Source root of the file.
|
||||||
#[salsa::input]
|
#[salsa::input]
|
||||||
|
@ -118,7 +118,7 @@ fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHas
|
||||||
pub struct FileLoaderDelegate<T>(pub T);
|
pub struct FileLoaderDelegate<T>(pub T);
|
||||||
|
|
||||||
impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
|
impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<String> {
|
fn file_text(&self, file_id: FileId) -> Arc<str> {
|
||||||
SourceDatabaseExt::file_text(self.0, file_id)
|
SourceDatabaseExt::file_text(self.0, file_id)
|
||||||
}
|
}
|
||||||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||||
|
|
|
@ -15,7 +15,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
|
||||||
});
|
});
|
||||||
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
|
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
|
||||||
}
|
}
|
||||||
db.set_file_text(pos.file_id, Arc::new(ra_fixture_change.to_string()));
|
db.set_file_text(pos.file_id, Arc::from(ra_fixture_change));
|
||||||
|
|
||||||
{
|
{
|
||||||
let events = db.log_executed(|| {
|
let events = db.log_executed(|| {
|
||||||
|
@ -96,7 +96,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() {
|
||||||
});
|
});
|
||||||
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
|
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
|
||||||
}
|
}
|
||||||
db.set_file_text(pos.file_id, Arc::new("m!(Y);".to_string()));
|
db.set_file_text(pos.file_id, Arc::from("m!(Y);"));
|
||||||
|
|
||||||
{
|
{
|
||||||
let events = db.log_executed(|| {
|
let events = db.log_executed(|| {
|
||||||
|
@ -150,7 +150,7 @@ fn quux() { 92 }
|
||||||
m!(Y);
|
m!(Y);
|
||||||
m!(Z);
|
m!(Z);
|
||||||
"#;
|
"#;
|
||||||
db.set_file_text(pos.file_id, Arc::new(new_text.to_string()));
|
db.set_file_text(pos.file_id, Arc::from(new_text));
|
||||||
|
|
||||||
{
|
{
|
||||||
let events = db.log_executed(|| {
|
let events = db.log_executed(|| {
|
||||||
|
|
|
@ -71,7 +71,7 @@ impl fmt::Debug for TestDB {
|
||||||
impl panic::RefUnwindSafe for TestDB {}
|
impl panic::RefUnwindSafe for TestDB {}
|
||||||
|
|
||||||
impl FileLoader for TestDB {
|
impl FileLoader for TestDB {
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<String> {
|
fn file_text(&self, file_id: FileId) -> Arc<str> {
|
||||||
FileLoaderDelegate(self).file_text(file_id)
|
FileLoaderDelegate(self).file_text(file_id)
|
||||||
}
|
}
|
||||||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||||
|
|
|
@ -76,7 +76,7 @@ impl salsa::ParallelDatabase for TestDB {
|
||||||
impl panic::RefUnwindSafe for TestDB {}
|
impl panic::RefUnwindSafe for TestDB {}
|
||||||
|
|
||||||
impl FileLoader for TestDB {
|
impl FileLoader for TestDB {
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<String> {
|
fn file_text(&self, file_id: FileId) -> Arc<str> {
|
||||||
FileLoaderDelegate(self).file_text(file_id)
|
FileLoaderDelegate(self).file_text(file_id)
|
||||||
}
|
}
|
||||||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||||
|
|
|
@ -572,10 +572,9 @@ fn salsa_bug() {
|
||||||
let x = 1;
|
let x = 1;
|
||||||
x.push(1);
|
x.push(1);
|
||||||
}
|
}
|
||||||
"
|
";
|
||||||
.to_string();
|
|
||||||
|
|
||||||
db.set_file_text(pos.file_id, Arc::new(new_text));
|
db.set_file_text(pos.file_id, Arc::from(new_text));
|
||||||
|
|
||||||
let module = db.module_for_file(pos.file_id);
|
let module = db.module_for_file(pos.file_id);
|
||||||
let crate_def_map = module.def_map(&db);
|
let crate_def_map = module.def_map(&db);
|
||||||
|
|
|
@ -33,10 +33,9 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||||
+
|
+
|
||||||
1
|
1
|
||||||
}
|
}
|
||||||
"
|
";
|
||||||
.to_string();
|
|
||||||
|
|
||||||
db.set_file_text(pos.file_id, Arc::new(new_text));
|
db.set_file_text(pos.file_id, Arc::from(new_text));
|
||||||
|
|
||||||
{
|
{
|
||||||
let events = db.log_executed(|| {
|
let events = db.log_executed(|| {
|
||||||
|
|
|
@ -113,7 +113,7 @@ impl Upcast<dyn HirDatabase> for RootDatabase {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileLoader for RootDatabase {
|
impl FileLoader for RootDatabase {
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<String> {
|
fn file_text(&self, file_id: FileId) -> Arc<str> {
|
||||||
FileLoaderDelegate(self).file_text(file_id)
|
FileLoaderDelegate(self).file_text(file_id)
|
||||||
}
|
}
|
||||||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||||
|
|
|
@ -438,11 +438,11 @@ impl<'a> FindUsages<'a> {
|
||||||
fn scope_files<'a>(
|
fn scope_files<'a>(
|
||||||
sema: &'a Semantics<'_, RootDatabase>,
|
sema: &'a Semantics<'_, RootDatabase>,
|
||||||
scope: &'a SearchScope,
|
scope: &'a SearchScope,
|
||||||
) -> impl Iterator<Item = (Arc<String>, FileId, TextRange)> + 'a {
|
) -> impl Iterator<Item = (Arc<str>, FileId, TextRange)> + 'a {
|
||||||
scope.entries.iter().map(|(&file_id, &search_range)| {
|
scope.entries.iter().map(|(&file_id, &search_range)| {
|
||||||
let text = sema.db.file_text(file_id);
|
let text = sema.db.file_text(file_id);
|
||||||
let search_range =
|
let search_range =
|
||||||
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str())));
|
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
|
||||||
|
|
||||||
(text, file_id, search_range)
|
(text, file_id, search_range)
|
||||||
})
|
})
|
||||||
|
@ -553,7 +553,7 @@ impl<'a> FindUsages<'a> {
|
||||||
|
|
||||||
let text = sema.db.file_text(file_id);
|
let text = sema.db.file_text(file_id);
|
||||||
let search_range =
|
let search_range =
|
||||||
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str())));
|
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
|
||||||
|
|
||||||
let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
|
let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
|
||||||
let finder = &Finder::new("self");
|
let finder = &Finder::new("self");
|
||||||
|
|
|
@ -224,7 +224,7 @@ impl<'db> MatchFinder<'db> {
|
||||||
let file = self.sema.parse(file_id);
|
let file = self.sema.parse(file_id);
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
let file_text = self.sema.db.file_text(file_id);
|
let file_text = self.sema.db.file_text(file_id);
|
||||||
let mut remaining_text = file_text.as_str();
|
let mut remaining_text = &*file_text;
|
||||||
let mut base = 0;
|
let mut base = 0;
|
||||||
let len = snippet.len() as u32;
|
let len = snippet.len() as u32;
|
||||||
while let Some(offset) = remaining_text.find(snippet) {
|
while let Some(offset) = remaining_text.find(snippet) {
|
||||||
|
|
|
@ -244,7 +244,7 @@ impl Analysis {
|
||||||
Err("Analysis::from_single_file has no target layout".into()),
|
Err("Analysis::from_single_file has no target layout".into()),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
change.change_file(file_id, Some(Arc::new(text)));
|
change.change_file(file_id, Some(Arc::from(text)));
|
||||||
change.set_crate_graph(crate_graph);
|
change.set_crate_graph(crate_graph);
|
||||||
host.apply_change(change);
|
host.apply_change(change);
|
||||||
(host.analysis(), file_id)
|
(host.analysis(), file_id)
|
||||||
|
@ -263,7 +263,7 @@ impl Analysis {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the text of the source file.
|
/// Gets the text of the source file.
|
||||||
pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<String>> {
|
pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
|
||||||
self.with_db(|db| db.file_text(file_id))
|
self.with_db(|db| db.file_text(file_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -164,8 +164,8 @@ impl fmt::Display for FilesStats {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StatCollect<FileId, Arc<String>> for FilesStats {
|
impl StatCollect<FileId, Arc<str>> for FilesStats {
|
||||||
fn collect_entry(&mut self, _: FileId, value: Option<Arc<String>>) {
|
fn collect_entry(&mut self, _: FileId, value: Option<Arc<str>>) {
|
||||||
self.total += 1;
|
self.total += 1;
|
||||||
self.size += value.unwrap().len();
|
self.size += value.unwrap().len();
|
||||||
}
|
}
|
||||||
|
|
|
@ -162,9 +162,9 @@ fn load_crate_graph(
|
||||||
let changes = vfs.take_changes();
|
let changes = vfs.take_changes();
|
||||||
for file in changes {
|
for file in changes {
|
||||||
if file.exists() {
|
if file.exists() {
|
||||||
let contents = vfs.file_contents(file.file_id).to_vec();
|
let contents = vfs.file_contents(file.file_id);
|
||||||
if let Ok(text) = String::from_utf8(contents) {
|
if let Ok(text) = std::str::from_utf8(contents) {
|
||||||
analysis_change.change_file(file.file_id, Some(Arc::new(text)))
|
analysis_change.change_file(file.file_id, Some(Arc::from(text)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -269,7 +269,7 @@ impl GlobalState {
|
||||||
String::from_utf8(bytes).ok().and_then(|text| {
|
String::from_utf8(bytes).ok().and_then(|text| {
|
||||||
let (text, line_endings) = LineEndings::normalize(text);
|
let (text, line_endings) = LineEndings::normalize(text);
|
||||||
line_endings_map.insert(file.file_id, line_endings);
|
line_endings_map.insert(file.file_id, line_endings);
|
||||||
Some(Arc::new(text))
|
Some(Arc::from(text))
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
|
|
@ -65,7 +65,7 @@ fn integrated_highlighting_benchmark() {
|
||||||
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
|
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
|
||||||
text.push_str("\npub fn _dummy() {}\n");
|
text.push_str("\npub fn _dummy() {}\n");
|
||||||
let mut change = Change::new();
|
let mut change = Change::new();
|
||||||
change.change_file(file_id, Some(Arc::new(text)));
|
change.change_file(file_id, Some(Arc::from(text)));
|
||||||
host.apply_change(change);
|
host.apply_change(change);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ fn integrated_completion_benchmark() {
|
||||||
patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
|
patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
|
||||||
+ "sel".len();
|
+ "sel".len();
|
||||||
let mut change = Change::new();
|
let mut change = Change::new();
|
||||||
change.change_file(file_id, Some(Arc::new(text)));
|
change.change_file(file_id, Some(Arc::from(text)));
|
||||||
host.apply_change(change);
|
host.apply_change(change);
|
||||||
completion_offset
|
completion_offset
|
||||||
};
|
};
|
||||||
|
@ -160,7 +160,7 @@ fn integrated_completion_benchmark() {
|
||||||
patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
|
patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
|
||||||
+ "self.".len();
|
+ "self.".len();
|
||||||
let mut change = Change::new();
|
let mut change = Change::new();
|
||||||
change.change_file(file_id, Some(Arc::new(text)));
|
change.change_file(file_id, Some(Arc::from(text)));
|
||||||
host.apply_change(change);
|
host.apply_change(change);
|
||||||
completion_offset
|
completion_offset
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in a new issue