mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-13 13:48:50 +00:00
Merge pull request #4548 from bnjjj/fix_4464
add support of feature flag for runnables
This commit is contained in:
commit
130318b823
9 changed files with 234 additions and 19 deletions
3
Cargo.lock
generated
3
Cargo.lock
generated
|
@ -1372,17 +1372,20 @@ dependencies = [
|
||||||
"lsp-types",
|
"lsp-types",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"pico-args",
|
"pico-args",
|
||||||
|
"ra_cfg",
|
||||||
"ra_db",
|
"ra_db",
|
||||||
"ra_flycheck",
|
"ra_flycheck",
|
||||||
"ra_hir",
|
"ra_hir",
|
||||||
"ra_hir_def",
|
"ra_hir_def",
|
||||||
"ra_hir_ty",
|
"ra_hir_ty",
|
||||||
"ra_ide",
|
"ra_ide",
|
||||||
|
"ra_mbe",
|
||||||
"ra_proc_macro_srv",
|
"ra_proc_macro_srv",
|
||||||
"ra_prof",
|
"ra_prof",
|
||||||
"ra_project_model",
|
"ra_project_model",
|
||||||
"ra_syntax",
|
"ra_syntax",
|
||||||
"ra_text_edit",
|
"ra_text_edit",
|
||||||
|
"ra_tt",
|
||||||
"ra_vfs",
|
"ra_vfs",
|
||||||
"rand",
|
"rand",
|
||||||
"relative-path",
|
"relative-path",
|
||||||
|
|
|
@ -88,13 +88,17 @@ fn next_cfg_expr(it: &mut SliceIter<tt::TokenTree>) -> Option<CfgExpr> {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use mbe::ast_to_token_tree;
|
use mbe::{ast_to_token_tree, TokenMap};
|
||||||
use ra_syntax::ast::{self, AstNode};
|
use ra_syntax::ast::{self, AstNode};
|
||||||
|
|
||||||
fn assert_parse_result(input: &str, expected: CfgExpr) {
|
fn get_token_tree_generated(input: &str) -> (tt::Subtree, TokenMap) {
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let (tt, _) = ast_to_token_tree(&tt).unwrap();
|
ast_to_token_tree(&tt).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn assert_parse_result(input: &str, expected: CfgExpr) {
|
||||||
|
let (tt, _) = get_token_tree_generated(input);
|
||||||
assert_eq!(parse_cfg(&tt), expected);
|
assert_eq!(parse_cfg(&tt), expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -62,6 +62,7 @@ pub use crate::{
|
||||||
|
|
||||||
pub use hir_def::{
|
pub use hir_def::{
|
||||||
adt::StructKind,
|
adt::StructKind,
|
||||||
|
attr::Attrs,
|
||||||
body::scope::ExprScopes,
|
body::scope::ExprScopes,
|
||||||
builtin_type::BuiltinType,
|
builtin_type::BuiltinType,
|
||||||
docs::Documentation,
|
docs::Documentation,
|
||||||
|
|
|
@ -81,7 +81,7 @@ impl Attrs {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn AttrsOwner>) -> Attrs {
|
pub fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn AttrsOwner>) -> Attrs {
|
||||||
let hygiene = Hygiene::new(db.upcast(), owner.file_id);
|
let hygiene = Hygiene::new(db.upcast(), owner.file_id);
|
||||||
Attrs::new(owner.value, &hygiene)
|
Attrs::new(owner.value, &hygiene)
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@ use crate::{
|
||||||
AsMacroCall, DefWithBodyId, HasModule, Lookup, ModuleId,
|
AsMacroCall, DefWithBodyId, HasModule, Lookup, ModuleId,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// A subset of Exander that only deals with cfg attributes. We only need it to
|
/// A subset of Expander that only deals with cfg attributes. We only need it to
|
||||||
/// avoid cyclic queries in crate def map during enum processing.
|
/// avoid cyclic queries in crate def map during enum processing.
|
||||||
pub(crate) struct CfgExpander {
|
pub(crate) struct CfgExpander {
|
||||||
cfg_options: CfgOptions,
|
cfg_options: CfgOptions,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//! FIXME: write short doc here
|
//! FIXME: write short doc here
|
||||||
|
|
||||||
use hir::{AsAssocItem, Semantics};
|
use hir::{AsAssocItem, Attrs, HirFileId, InFile, Semantics};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use ra_ide_db::RootDatabase;
|
use ra_ide_db::RootDatabase;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
|
@ -10,12 +10,14 @@ use ra_syntax::{
|
||||||
|
|
||||||
use crate::FileId;
|
use crate::FileId;
|
||||||
use ast::DocCommentsOwner;
|
use ast::DocCommentsOwner;
|
||||||
|
use ra_cfg::CfgExpr;
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Runnable {
|
pub struct Runnable {
|
||||||
pub range: TextRange,
|
pub range: TextRange,
|
||||||
pub kind: RunnableKind,
|
pub kind: RunnableKind,
|
||||||
|
pub cfg_exprs: Vec<CfgExpr>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -45,20 +47,24 @@ pub enum RunnableKind {
|
||||||
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
|
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
|
||||||
let sema = Semantics::new(db);
|
let sema = Semantics::new(db);
|
||||||
let source_file = sema.parse(file_id);
|
let source_file = sema.parse(file_id);
|
||||||
source_file.syntax().descendants().filter_map(|i| runnable(&sema, i)).collect()
|
source_file.syntax().descendants().filter_map(|i| runnable(&sema, i, file_id)).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn runnable(sema: &Semantics<RootDatabase>, item: SyntaxNode) -> Option<Runnable> {
|
fn runnable(sema: &Semantics<RootDatabase>, item: SyntaxNode, file_id: FileId) -> Option<Runnable> {
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match item {
|
match item {
|
||||||
ast::FnDef(it) => runnable_fn(sema, it),
|
ast::FnDef(it) => runnable_fn(sema, it, file_id),
|
||||||
ast::Module(it) => runnable_mod(sema, it),
|
ast::Module(it) => runnable_mod(sema, it, file_id),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn runnable_fn(sema: &Semantics<RootDatabase>, fn_def: ast::FnDef) -> Option<Runnable> {
|
fn runnable_fn(
|
||||||
|
sema: &Semantics<RootDatabase>,
|
||||||
|
fn_def: ast::FnDef,
|
||||||
|
file_id: FileId,
|
||||||
|
) -> Option<Runnable> {
|
||||||
let name_string = fn_def.name()?.text().to_string();
|
let name_string = fn_def.name()?.text().to_string();
|
||||||
|
|
||||||
let kind = if name_string == "main" {
|
let kind = if name_string == "main" {
|
||||||
|
@ -111,7 +117,12 @@ fn runnable_fn(sema: &Semantics<RootDatabase>, fn_def: ast::FnDef) -> Option<Run
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
Some(Runnable { range: fn_def.syntax().text_range(), kind })
|
|
||||||
|
let attrs = Attrs::from_attrs_owner(sema.db, InFile::new(HirFileId::from(file_id), &fn_def));
|
||||||
|
let cfg_exprs =
|
||||||
|
attrs.by_key("cfg").tt_values().map(|subtree| ra_cfg::parse_cfg(subtree)).collect();
|
||||||
|
|
||||||
|
Some(Runnable { range: fn_def.syntax().text_range(), kind, cfg_exprs })
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -147,7 +158,11 @@ fn has_doc_test(fn_def: &ast::FnDef) -> bool {
|
||||||
fn_def.doc_comment_text().map_or(false, |comment| comment.contains("```"))
|
fn_def.doc_comment_text().map_or(false, |comment| comment.contains("```"))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn runnable_mod(sema: &Semantics<RootDatabase>, module: ast::Module) -> Option<Runnable> {
|
fn runnable_mod(
|
||||||
|
sema: &Semantics<RootDatabase>,
|
||||||
|
module: ast::Module,
|
||||||
|
file_id: FileId,
|
||||||
|
) -> Option<Runnable> {
|
||||||
let has_test_function = module
|
let has_test_function = module
|
||||||
.item_list()?
|
.item_list()?
|
||||||
.items()
|
.items()
|
||||||
|
@ -160,11 +175,20 @@ fn runnable_mod(sema: &Semantics<RootDatabase>, module: ast::Module) -> Option<R
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let range = module.syntax().text_range();
|
let range = module.syntax().text_range();
|
||||||
let module = sema.to_def(&module)?;
|
let module_def = sema.to_def(&module)?;
|
||||||
|
|
||||||
let path =
|
let path = module_def
|
||||||
module.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
|
.path_to_root(sema.db)
|
||||||
Some(Runnable { range, kind: RunnableKind::TestMod { path } })
|
.into_iter()
|
||||||
|
.rev()
|
||||||
|
.filter_map(|it| it.name(sema.db))
|
||||||
|
.join("::");
|
||||||
|
|
||||||
|
let attrs = Attrs::from_attrs_owner(sema.db, InFile::new(HirFileId::from(file_id), &module));
|
||||||
|
let cfg_exprs =
|
||||||
|
attrs.by_key("cfg").tt_values().map(|subtree| ra_cfg::parse_cfg(subtree)).collect();
|
||||||
|
|
||||||
|
Some(Runnable { range, kind: RunnableKind::TestMod { path }, cfg_exprs })
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -196,6 +220,7 @@ mod tests {
|
||||||
Runnable {
|
Runnable {
|
||||||
range: 1..21,
|
range: 1..21,
|
||||||
kind: Bin,
|
kind: Bin,
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
Runnable {
|
Runnable {
|
||||||
range: 22..46,
|
range: 22..46,
|
||||||
|
@ -207,6 +232,7 @@ mod tests {
|
||||||
ignore: false,
|
ignore: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
Runnable {
|
Runnable {
|
||||||
range: 47..81,
|
range: 47..81,
|
||||||
|
@ -218,6 +244,7 @@ mod tests {
|
||||||
ignore: true,
|
ignore: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
"###
|
"###
|
||||||
|
@ -245,6 +272,7 @@ mod tests {
|
||||||
Runnable {
|
Runnable {
|
||||||
range: 1..21,
|
range: 1..21,
|
||||||
kind: Bin,
|
kind: Bin,
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
Runnable {
|
Runnable {
|
||||||
range: 22..64,
|
range: 22..64,
|
||||||
|
@ -253,6 +281,7 @@ mod tests {
|
||||||
"foo",
|
"foo",
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
"###
|
"###
|
||||||
|
@ -283,6 +312,7 @@ mod tests {
|
||||||
Runnable {
|
Runnable {
|
||||||
range: 1..21,
|
range: 1..21,
|
||||||
kind: Bin,
|
kind: Bin,
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
Runnable {
|
Runnable {
|
||||||
range: 51..105,
|
range: 51..105,
|
||||||
|
@ -291,6 +321,7 @@ mod tests {
|
||||||
"Data::foo",
|
"Data::foo",
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
"###
|
"###
|
||||||
|
@ -318,6 +349,7 @@ mod tests {
|
||||||
kind: TestMod {
|
kind: TestMod {
|
||||||
path: "test_mod",
|
path: "test_mod",
|
||||||
},
|
},
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
Runnable {
|
Runnable {
|
||||||
range: 28..57,
|
range: 28..57,
|
||||||
|
@ -329,6 +361,7 @@ mod tests {
|
||||||
ignore: false,
|
ignore: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
"###
|
"###
|
||||||
|
@ -358,6 +391,7 @@ mod tests {
|
||||||
kind: TestMod {
|
kind: TestMod {
|
||||||
path: "foo::test_mod",
|
path: "foo::test_mod",
|
||||||
},
|
},
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
Runnable {
|
Runnable {
|
||||||
range: 46..79,
|
range: 46..79,
|
||||||
|
@ -369,6 +403,7 @@ mod tests {
|
||||||
ignore: false,
|
ignore: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
"###
|
"###
|
||||||
|
@ -400,6 +435,7 @@ mod tests {
|
||||||
kind: TestMod {
|
kind: TestMod {
|
||||||
path: "foo::bar::test_mod",
|
path: "foo::bar::test_mod",
|
||||||
},
|
},
|
||||||
|
cfg_exprs: [],
|
||||||
},
|
},
|
||||||
Runnable {
|
Runnable {
|
||||||
range: 68..105,
|
range: 68..105,
|
||||||
|
@ -411,6 +447,89 @@ mod tests {
|
||||||
ignore: false,
|
ignore: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
cfg_exprs: [],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_runnables_with_feature() {
|
||||||
|
let (analysis, pos) = analysis_and_position(
|
||||||
|
r#"
|
||||||
|
//- /lib.rs crate:foo cfg:feature=foo
|
||||||
|
<|> //empty
|
||||||
|
#[test]
|
||||||
|
#[cfg(feature = "foo")]
|
||||||
|
fn test_foo1() {}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
let runnables = analysis.runnables(pos.file_id).unwrap();
|
||||||
|
assert_debug_snapshot!(&runnables,
|
||||||
|
@r###"
|
||||||
|
[
|
||||||
|
Runnable {
|
||||||
|
range: 1..58,
|
||||||
|
kind: Test {
|
||||||
|
test_id: Name(
|
||||||
|
"test_foo1",
|
||||||
|
),
|
||||||
|
attr: TestAttr {
|
||||||
|
ignore: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
cfg_exprs: [
|
||||||
|
KeyValue {
|
||||||
|
key: "feature",
|
||||||
|
value: "foo",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
"###
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_runnables_with_features() {
|
||||||
|
let (analysis, pos) = analysis_and_position(
|
||||||
|
r#"
|
||||||
|
//- /lib.rs crate:foo cfg:feature=foo,feature=bar
|
||||||
|
<|> //empty
|
||||||
|
#[test]
|
||||||
|
#[cfg(all(feature = "foo", feature = "bar"))]
|
||||||
|
fn test_foo1() {}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
let runnables = analysis.runnables(pos.file_id).unwrap();
|
||||||
|
assert_debug_snapshot!(&runnables,
|
||||||
|
@r###"
|
||||||
|
[
|
||||||
|
Runnable {
|
||||||
|
range: 1..80,
|
||||||
|
kind: Test {
|
||||||
|
test_id: Name(
|
||||||
|
"test_foo1",
|
||||||
|
),
|
||||||
|
attr: TestAttr {
|
||||||
|
ignore: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
cfg_exprs: [
|
||||||
|
All(
|
||||||
|
[
|
||||||
|
KeyValue {
|
||||||
|
key: "feature",
|
||||||
|
value: "foo",
|
||||||
|
},
|
||||||
|
KeyValue {
|
||||||
|
key: "feature",
|
||||||
|
value: "bar",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
"###
|
"###
|
||||||
|
|
|
@ -40,6 +40,7 @@ ra_project_model = { path = "../ra_project_model" }
|
||||||
ra_syntax = { path = "../ra_syntax" }
|
ra_syntax = { path = "../ra_syntax" }
|
||||||
ra_text_edit = { path = "../ra_text_edit" }
|
ra_text_edit = { path = "../ra_text_edit" }
|
||||||
ra_vfs = "0.6.0"
|
ra_vfs = "0.6.0"
|
||||||
|
ra_cfg = { path = "../ra_cfg"}
|
||||||
|
|
||||||
# This should only be used in CLI
|
# This should only be used in CLI
|
||||||
ra_db = { path = "../ra_db" }
|
ra_db = { path = "../ra_db" }
|
||||||
|
@ -55,6 +56,8 @@ winapi = "0.3.8"
|
||||||
tempfile = "3.1.0"
|
tempfile = "3.1.0"
|
||||||
insta = "0.16.0"
|
insta = "0.16.0"
|
||||||
test_utils = { path = "../test_utils" }
|
test_utils = { path = "../test_utils" }
|
||||||
|
mbe = { path = "../ra_mbe", package = "ra_mbe" }
|
||||||
|
tt = { path = "../ra_tt", package = "ra_tt" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
jemalloc = [ "ra_prof/jemalloc" ]
|
jemalloc = [ "ra_prof/jemalloc" ]
|
||||||
|
|
|
@ -4,6 +4,7 @@ use ra_ide::{FileId, RunnableKind, TestId};
|
||||||
use ra_project_model::{self, ProjectWorkspace, TargetKind};
|
use ra_project_model::{self, ProjectWorkspace, TargetKind};
|
||||||
|
|
||||||
use crate::{world::WorldSnapshot, Result};
|
use crate::{world::WorldSnapshot, Result};
|
||||||
|
use ra_syntax::SmolStr;
|
||||||
|
|
||||||
/// Abstract representation of Cargo target.
|
/// Abstract representation of Cargo target.
|
||||||
///
|
///
|
||||||
|
@ -20,6 +21,7 @@ impl CargoTargetSpec {
|
||||||
pub(crate) fn runnable_args(
|
pub(crate) fn runnable_args(
|
||||||
spec: Option<CargoTargetSpec>,
|
spec: Option<CargoTargetSpec>,
|
||||||
kind: &RunnableKind,
|
kind: &RunnableKind,
|
||||||
|
features_needed: &Vec<SmolStr>,
|
||||||
) -> Result<(Vec<String>, Vec<String>)> {
|
) -> Result<(Vec<String>, Vec<String>)> {
|
||||||
let mut args = Vec::new();
|
let mut args = Vec::new();
|
||||||
let mut extra_args = Vec::new();
|
let mut extra_args = Vec::new();
|
||||||
|
@ -73,6 +75,12 @@ impl CargoTargetSpec {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
features_needed.iter().for_each(|feature| {
|
||||||
|
args.push("--features".to_string());
|
||||||
|
args.push(feature.to_string());
|
||||||
|
});
|
||||||
|
|
||||||
Ok((args, extra_args))
|
Ok((args, extra_args))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,13 +17,14 @@ use lsp_types::{
|
||||||
SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult,
|
SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult,
|
||||||
SemanticTokensResult, SymbolInformation, TextDocumentIdentifier, Url, WorkspaceEdit,
|
SemanticTokensResult, SymbolInformation, TextDocumentIdentifier, Url, WorkspaceEdit,
|
||||||
};
|
};
|
||||||
|
use ra_cfg::CfgExpr;
|
||||||
use ra_ide::{
|
use ra_ide::{
|
||||||
FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, SearchScope,
|
FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, SearchScope,
|
||||||
TextEdit,
|
TextEdit,
|
||||||
};
|
};
|
||||||
use ra_prof::profile;
|
use ra_prof::profile;
|
||||||
use ra_project_model::TargetKind;
|
use ra_project_model::TargetKind;
|
||||||
use ra_syntax::{AstNode, SyntaxKind, TextRange, TextSize};
|
use ra_syntax::{AstNode, SmolStr, SyntaxKind, TextRange, TextSize};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::to_value;
|
use serde_json::to_value;
|
||||||
|
@ -978,7 +979,12 @@ fn to_lsp_runnable(
|
||||||
) -> Result<lsp_ext::Runnable> {
|
) -> Result<lsp_ext::Runnable> {
|
||||||
let spec = CargoTargetSpec::for_file(world, file_id)?;
|
let spec = CargoTargetSpec::for_file(world, file_id)?;
|
||||||
let target = spec.as_ref().map(|s| s.target.clone());
|
let target = spec.as_ref().map(|s| s.target.clone());
|
||||||
let (args, extra_args) = CargoTargetSpec::runnable_args(spec, &runnable.kind)?;
|
let mut features_needed = vec![];
|
||||||
|
for cfg_expr in &runnable.cfg_exprs {
|
||||||
|
collect_minimal_features_needed(cfg_expr, &mut features_needed);
|
||||||
|
}
|
||||||
|
let (args, extra_args) =
|
||||||
|
CargoTargetSpec::runnable_args(spec, &runnable.kind, &features_needed)?;
|
||||||
let line_index = world.analysis().file_line_index(file_id)?;
|
let line_index = world.analysis().file_line_index(file_id)?;
|
||||||
let label = match &runnable.kind {
|
let label = match &runnable.kind {
|
||||||
RunnableKind::Test { test_id, .. } => format!("test {}", test_id),
|
RunnableKind::Test { test_id, .. } => format!("test {}", test_id),
|
||||||
|
@ -1004,6 +1010,26 @@ fn to_lsp_runnable(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Fill minimal features needed
|
||||||
|
fn collect_minimal_features_needed(cfg_expr: &CfgExpr, features: &mut Vec<SmolStr>) {
|
||||||
|
match cfg_expr {
|
||||||
|
CfgExpr::KeyValue { key, value } if key == "feature" => features.push(value.clone()),
|
||||||
|
CfgExpr::All(preds) => {
|
||||||
|
preds.iter().for_each(|cfg| collect_minimal_features_needed(cfg, features));
|
||||||
|
}
|
||||||
|
CfgExpr::Any(preds) => {
|
||||||
|
for cfg in preds {
|
||||||
|
let len_features = features.len();
|
||||||
|
collect_minimal_features_needed(cfg, features);
|
||||||
|
if len_features != features.len() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn handle_inlay_hints(
|
pub fn handle_inlay_hints(
|
||||||
world: WorldSnapshot,
|
world: WorldSnapshot,
|
||||||
params: InlayHintsParams,
|
params: InlayHintsParams,
|
||||||
|
@ -1140,3 +1166,54 @@ pub fn handle_semantic_tokens_range(
|
||||||
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
||||||
Ok(Some(semantic_tokens.into()))
|
Ok(Some(semantic_tokens.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
use mbe::{ast_to_token_tree, TokenMap};
|
||||||
|
use ra_cfg::parse_cfg;
|
||||||
|
use ra_syntax::{
|
||||||
|
ast::{self, AstNode},
|
||||||
|
SmolStr,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn get_token_tree_generated(input: &str) -> (tt::Subtree, TokenMap) {
|
||||||
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
|
ast_to_token_tree(&tt).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cfg_expr_minimal_features_needed() {
|
||||||
|
let (subtree, _) = get_token_tree_generated(r#"#![cfg(feature = "baz")]"#);
|
||||||
|
let cfg_expr = parse_cfg(&subtree);
|
||||||
|
let mut min_features = vec![];
|
||||||
|
collect_minimal_features_needed(&cfg_expr, &mut min_features);
|
||||||
|
|
||||||
|
assert_eq!(min_features, vec![SmolStr::new("baz")]);
|
||||||
|
|
||||||
|
let (subtree, _) =
|
||||||
|
get_token_tree_generated(r#"#![cfg(all(feature = "baz", feature = "foo"))]"#);
|
||||||
|
let cfg_expr = parse_cfg(&subtree);
|
||||||
|
|
||||||
|
let mut min_features = vec![];
|
||||||
|
collect_minimal_features_needed(&cfg_expr, &mut min_features);
|
||||||
|
assert_eq!(min_features, vec![SmolStr::new("baz"), SmolStr::new("foo")]);
|
||||||
|
|
||||||
|
let (subtree, _) =
|
||||||
|
get_token_tree_generated(r#"#![cfg(any(feature = "baz", feature = "foo", unix))]"#);
|
||||||
|
let cfg_expr = parse_cfg(&subtree);
|
||||||
|
|
||||||
|
let mut min_features = vec![];
|
||||||
|
collect_minimal_features_needed(&cfg_expr, &mut min_features);
|
||||||
|
assert_eq!(min_features, vec![SmolStr::new("baz")]);
|
||||||
|
|
||||||
|
let (subtree, _) = get_token_tree_generated(r#"#![cfg(foo)]"#);
|
||||||
|
let cfg_expr = parse_cfg(&subtree);
|
||||||
|
|
||||||
|
let mut min_features = vec![];
|
||||||
|
collect_minimal_features_needed(&cfg_expr, &mut min_features);
|
||||||
|
assert!(min_features.is_empty());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue