mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-13 21:54:42 +00:00
2501: Fix coercion from &Foo to an inference variable in a reference r=matklad a=flodiebold We didn't try to unify within the reference, but we should. 2502: Delay legacy macro expansion r=matklad a=edwin0cheng This PR make the following changes: * Delay legacy macro expansion such that we concentrated all item collecting macro expansion in one place. * Add `MacroDirective` to replace 3-tuples * After this refactoring, no macro is expanded recursively, hence we can remove the `MacroStackMonitor` and we handle the expansion limit by the fix-point loop count. 2503: Code: check whether the LSP binary is in PATH r=matklad a=lnicola I'm not really sure about the TS changes. I just made a couple of functions async and it seems to work. Co-authored-by: Florian Diebold <flodiebold@gmail.com> Co-authored-by: Edwin Cheng <edwin0cheng@gmail.com> Co-authored-by: Laurențiu Nicola <lnicola@dend.ro>
This commit is contained in:
commit
d0ad30ad97
9 changed files with 140 additions and 158 deletions
|
@ -37,7 +37,8 @@ $ cargo xtask install
|
|||
$ cargo xtask install --server
|
||||
```
|
||||
|
||||
For non-standard setup of VS Code and other editors, see [./docs/user](./docs/user).
|
||||
For non-standard setup of VS Code and other editors, or if the language server
|
||||
cannot start, see [./docs/user](./docs/user).
|
||||
|
||||
## Documentation
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ use hir_expand::{
|
|||
use ra_cfg::CfgOptions;
|
||||
use ra_db::{CrateId, FileId};
|
||||
use ra_syntax::ast;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustc_hash::FxHashMap;
|
||||
use test_utils::tested_by;
|
||||
|
||||
use crate::{
|
||||
|
@ -63,42 +63,12 @@ pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> C
|
|||
unexpanded_macros: Vec::new(),
|
||||
unexpanded_attribute_macros: Vec::new(),
|
||||
mod_dirs: FxHashMap::default(),
|
||||
macro_stack_monitor: MacroStackMonitor::default(),
|
||||
poison_macros: FxHashSet::default(),
|
||||
cfg_options,
|
||||
};
|
||||
collector.collect();
|
||||
collector.finish()
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct MacroStackMonitor {
|
||||
counts: FxHashMap<MacroDefId, u32>,
|
||||
|
||||
/// Mainly use for test
|
||||
validator: Option<Box<dyn Fn(u32) -> bool>>,
|
||||
}
|
||||
|
||||
impl MacroStackMonitor {
|
||||
fn increase(&mut self, macro_def_id: MacroDefId) {
|
||||
*self.counts.entry(macro_def_id).or_default() += 1;
|
||||
}
|
||||
|
||||
fn decrease(&mut self, macro_def_id: MacroDefId) {
|
||||
*self.counts.entry(macro_def_id).or_default() -= 1;
|
||||
}
|
||||
|
||||
fn is_poison(&self, macro_def_id: MacroDefId) -> bool {
|
||||
let cur = *self.counts.get(¯o_def_id).unwrap_or(&0);
|
||||
|
||||
if let Some(validator) = &self.validator {
|
||||
validator(cur)
|
||||
} else {
|
||||
cur > 100
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
enum PartialResolvedImport {
|
||||
/// None of any namespaces is resolved
|
||||
|
@ -127,6 +97,14 @@ struct ImportDirective {
|
|||
status: PartialResolvedImport,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
struct MacroDirective {
|
||||
module_id: LocalModuleId,
|
||||
ast_id: AstId<ast::MacroCall>,
|
||||
path: Path,
|
||||
legacy: Option<MacroCallId>,
|
||||
}
|
||||
|
||||
/// Walks the tree of module recursively
|
||||
struct DefCollector<'a, DB> {
|
||||
db: &'a DB,
|
||||
|
@ -134,25 +112,9 @@ struct DefCollector<'a, DB> {
|
|||
glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, LocalImportId)>>,
|
||||
unresolved_imports: Vec<ImportDirective>,
|
||||
resolved_imports: Vec<ImportDirective>,
|
||||
unexpanded_macros: Vec<(LocalModuleId, AstId<ast::MacroCall>, Path)>,
|
||||
unexpanded_macros: Vec<MacroDirective>,
|
||||
unexpanded_attribute_macros: Vec<(LocalModuleId, AstId<ast::ModuleItem>, Path)>,
|
||||
mod_dirs: FxHashMap<LocalModuleId, ModDir>,
|
||||
|
||||
/// Some macro use `$tt:tt which mean we have to handle the macro perfectly
|
||||
/// To prevent stack overflow, we add a deep counter here for prevent that.
|
||||
macro_stack_monitor: MacroStackMonitor,
|
||||
/// Some macros are not well-behavior, which leads to infinite loop
|
||||
/// e.g. macro_rules! foo { ($ty:ty) => { foo!($ty); } }
|
||||
/// We mark it down and skip it in collector
|
||||
///
|
||||
/// FIXME:
|
||||
/// Right now it only handle a poison macro in a single crate,
|
||||
/// such that if other crate try to call that macro,
|
||||
/// the whole process will do again until it became poisoned in that crate.
|
||||
/// We should handle this macro set globally
|
||||
/// However, do we want to put it as a global variable?
|
||||
poison_macros: FxHashSet<MacroDefId>,
|
||||
|
||||
cfg_options: &'a CfgOptions,
|
||||
}
|
||||
|
||||
|
@ -556,18 +518,24 @@ where
|
|||
std::mem::replace(&mut self.unexpanded_attribute_macros, Vec::new());
|
||||
let mut resolved = Vec::new();
|
||||
let mut res = ReachedFixedPoint::Yes;
|
||||
macros.retain(|(module_id, ast_id, path)| {
|
||||
macros.retain(|directive| {
|
||||
if let Some(call_id) = directive.legacy {
|
||||
res = ReachedFixedPoint::No;
|
||||
resolved.push((directive.module_id, call_id));
|
||||
return false;
|
||||
}
|
||||
|
||||
let resolved_res = self.def_map.resolve_path_fp_with_macro(
|
||||
self.db,
|
||||
ResolveMode::Other,
|
||||
*module_id,
|
||||
path,
|
||||
directive.module_id,
|
||||
&directive.path,
|
||||
BuiltinShadowMode::Module,
|
||||
);
|
||||
|
||||
if let Some(def) = resolved_res.resolved_def.take_macros() {
|
||||
let call_id = def.as_call_id(self.db, MacroCallKind::FnLike(*ast_id));
|
||||
resolved.push((*module_id, call_id, def));
|
||||
let call_id = def.as_call_id(self.db, MacroCallKind::FnLike(directive.ast_id));
|
||||
resolved.push((directive.module_id, call_id));
|
||||
res = ReachedFixedPoint::No;
|
||||
return false;
|
||||
}
|
||||
|
@ -579,7 +547,7 @@ where
|
|||
|
||||
if let Some(def) = resolved_res {
|
||||
let call_id = def.as_call_id(self.db, MacroCallKind::Attr(*ast_id));
|
||||
resolved.push((*module_id, call_id, def));
|
||||
resolved.push((*module_id, call_id));
|
||||
res = ReachedFixedPoint::No;
|
||||
return false;
|
||||
}
|
||||
|
@ -590,8 +558,8 @@ where
|
|||
self.unexpanded_macros = macros;
|
||||
self.unexpanded_attribute_macros = attribute_macros;
|
||||
|
||||
for (module_id, macro_call_id, macro_def_id) in resolved {
|
||||
self.collect_macro_expansion(module_id, macro_call_id, macro_def_id);
|
||||
for (module_id, macro_call_id) in resolved {
|
||||
self.collect_macro_expansion(module_id, macro_call_id);
|
||||
}
|
||||
|
||||
res
|
||||
|
@ -611,36 +579,18 @@ where
|
|||
None
|
||||
}
|
||||
|
||||
fn collect_macro_expansion(
|
||||
&mut self,
|
||||
module_id: LocalModuleId,
|
||||
macro_call_id: MacroCallId,
|
||||
macro_def_id: MacroDefId,
|
||||
) {
|
||||
if self.poison_macros.contains(¯o_def_id) {
|
||||
return;
|
||||
fn collect_macro_expansion(&mut self, module_id: LocalModuleId, macro_call_id: MacroCallId) {
|
||||
let file_id: HirFileId = macro_call_id.as_file();
|
||||
let raw_items = self.db.raw_items(file_id);
|
||||
let mod_dir = self.mod_dirs[&module_id].clone();
|
||||
ModCollector {
|
||||
def_collector: &mut *self,
|
||||
file_id,
|
||||
module_id,
|
||||
raw_items: &raw_items,
|
||||
mod_dir,
|
||||
}
|
||||
|
||||
self.macro_stack_monitor.increase(macro_def_id);
|
||||
|
||||
if !self.macro_stack_monitor.is_poison(macro_def_id) {
|
||||
let file_id: HirFileId = macro_call_id.as_file();
|
||||
let raw_items = self.db.raw_items(file_id);
|
||||
let mod_dir = self.mod_dirs[&module_id].clone();
|
||||
ModCollector {
|
||||
def_collector: &mut *self,
|
||||
file_id,
|
||||
module_id,
|
||||
raw_items: &raw_items,
|
||||
mod_dir,
|
||||
}
|
||||
.collect(raw_items.items());
|
||||
} else {
|
||||
log::error!("Too deep macro expansion: {:?}", macro_call_id);
|
||||
self.poison_macros.insert(macro_def_id);
|
||||
}
|
||||
|
||||
self.macro_stack_monitor.decrease(macro_def_id);
|
||||
.collect(raw_items.items());
|
||||
}
|
||||
|
||||
fn finish(self) -> CrateDefMap {
|
||||
|
@ -908,15 +858,20 @@ where
|
|||
return;
|
||||
}
|
||||
|
||||
// Case 2: try to resolve in legacy scope and expand macro_rules, triggering
|
||||
// recursive item collection.
|
||||
// Case 2: try to resolve in legacy scope and expand macro_rules
|
||||
if let Some(macro_def) = mac.path.as_ident().and_then(|name| {
|
||||
self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name)
|
||||
}) {
|
||||
let macro_call_id =
|
||||
macro_def.as_call_id(self.def_collector.db, MacroCallKind::FnLike(ast_id));
|
||||
|
||||
self.def_collector.collect_macro_expansion(self.module_id, macro_call_id, macro_def);
|
||||
self.def_collector.unexpanded_macros.push(MacroDirective {
|
||||
module_id: self.module_id,
|
||||
path: mac.path.clone(),
|
||||
ast_id,
|
||||
legacy: Some(macro_call_id),
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -926,7 +881,13 @@ where
|
|||
if path.is_ident() {
|
||||
path.kind = PathKind::Self_;
|
||||
}
|
||||
self.def_collector.unexpanded_macros.push((self.module_id, ast_id, path));
|
||||
|
||||
self.def_collector.unexpanded_macros.push(MacroDirective {
|
||||
module_id: self.module_id,
|
||||
path,
|
||||
ast_id,
|
||||
legacy: None,
|
||||
});
|
||||
}
|
||||
|
||||
fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) {
|
||||
|
@ -951,19 +912,13 @@ fn is_macro_rules(path: &Path) -> bool {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{db::DefDatabase, test_db::TestDB};
|
||||
use ra_arena::Arena;
|
||||
use ra_db::{fixture::WithFixture, SourceDatabase};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::{db::DefDatabase, test_db::TestDB};
|
||||
|
||||
use super::*;
|
||||
|
||||
fn do_collect_defs(
|
||||
db: &impl DefDatabase,
|
||||
def_map: CrateDefMap,
|
||||
monitor: MacroStackMonitor,
|
||||
) -> (CrateDefMap, FxHashSet<MacroDefId>) {
|
||||
fn do_collect_defs(db: &impl DefDatabase, def_map: CrateDefMap) -> CrateDefMap {
|
||||
let mut collector = DefCollector {
|
||||
db,
|
||||
def_map,
|
||||
|
@ -973,19 +928,13 @@ mod tests {
|
|||
unexpanded_macros: Vec::new(),
|
||||
unexpanded_attribute_macros: Vec::new(),
|
||||
mod_dirs: FxHashMap::default(),
|
||||
macro_stack_monitor: monitor,
|
||||
poison_macros: FxHashSet::default(),
|
||||
cfg_options: &CfgOptions::default(),
|
||||
};
|
||||
collector.collect();
|
||||
(collector.def_map, collector.poison_macros)
|
||||
collector.def_map
|
||||
}
|
||||
|
||||
fn do_limited_resolve(
|
||||
code: &str,
|
||||
limit: u32,
|
||||
poison_limit: u32,
|
||||
) -> (CrateDefMap, FxHashSet<MacroDefId>) {
|
||||
fn do_resolve(code: &str) -> CrateDefMap {
|
||||
let (db, _file_id) = TestDB::with_single_file(&code);
|
||||
let krate = db.test_crate();
|
||||
|
||||
|
@ -1003,59 +952,18 @@ mod tests {
|
|||
diagnostics: Vec::new(),
|
||||
}
|
||||
};
|
||||
|
||||
let mut monitor = MacroStackMonitor::default();
|
||||
monitor.validator = Some(Box::new(move |count| {
|
||||
assert!(count < limit);
|
||||
count >= poison_limit
|
||||
}));
|
||||
|
||||
do_collect_defs(&db, def_map, monitor)
|
||||
do_collect_defs(&db, def_map)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macro_expand_limit_width() {
|
||||
do_limited_resolve(
|
||||
fn test_macro_expand_will_stop() {
|
||||
do_resolve(
|
||||
r#"
|
||||
macro_rules! foo {
|
||||
($($ty:ty)*) => { foo!($($ty)*, $($ty)*); }
|
||||
}
|
||||
foo!(KABOOM);
|
||||
"#,
|
||||
16,
|
||||
1000,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macro_expand_poisoned() {
|
||||
let (_, poison_macros) = do_limited_resolve(
|
||||
r#"
|
||||
macro_rules! foo {
|
||||
($ty:ty) => { foo!($ty); }
|
||||
}
|
||||
foo!(KABOOM);
|
||||
"#,
|
||||
100,
|
||||
16,
|
||||
);
|
||||
|
||||
assert_eq!(poison_macros.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macro_expand_normal() {
|
||||
let (_, poison_macros) = do_limited_resolve(
|
||||
r#"
|
||||
macro_rules! foo {
|
||||
($ident:ident) => { struct $ident {} }
|
||||
}
|
||||
foo!(Bar);
|
||||
"#,
|
||||
16,
|
||||
16,
|
||||
);
|
||||
|
||||
assert_eq!(poison_macros.len(), 0);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -332,7 +332,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||
// It will not recurse to `coerce`.
|
||||
return self.table.unify_substs(st1, st2, 0);
|
||||
}
|
||||
_ => {}
|
||||
_ => {
|
||||
if self.table.unify_inner_trivial(&derefed_ty, &to_ty) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -403,3 +403,40 @@ fn test() {
|
|||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn coerce_autoderef_generic() {
|
||||
assert_snapshot!(
|
||||
infer_with_mismatches(r#"
|
||||
struct Foo;
|
||||
fn takes_ref<T>(x: &T) -> T { *x }
|
||||
fn test() {
|
||||
takes_ref(&Foo);
|
||||
takes_ref(&&Foo);
|
||||
takes_ref(&&&Foo);
|
||||
}
|
||||
"#, true),
|
||||
@r###"
|
||||
[29; 30) 'x': &T
|
||||
[41; 47) '{ *x }': T
|
||||
[43; 45) '*x': T
|
||||
[44; 45) 'x': &T
|
||||
[58; 127) '{ ...oo); }': ()
|
||||
[64; 73) 'takes_ref': fn takes_ref<Foo>(&T) -> T
|
||||
[64; 79) 'takes_ref(&Foo)': Foo
|
||||
[74; 78) '&Foo': &Foo
|
||||
[75; 78) 'Foo': Foo
|
||||
[85; 94) 'takes_ref': fn takes_ref<&Foo>(&T) -> T
|
||||
[85; 101) 'takes_...&&Foo)': &Foo
|
||||
[95; 100) '&&Foo': &&Foo
|
||||
[96; 100) '&Foo': &Foo
|
||||
[97; 100) 'Foo': Foo
|
||||
[107; 116) 'takes_ref': fn takes_ref<&&Foo>(&T) -> T
|
||||
[107; 124) 'takes_...&&Foo)': &&Foo
|
||||
[117; 123) '&&&Foo': &&&Foo
|
||||
[118; 123) '&&Foo': &&Foo
|
||||
[119; 123) '&Foo': &Foo
|
||||
[120; 123) 'Foo': Foo
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
|
|
@ -204,4 +204,19 @@ Installation:
|
|||
|
||||
* You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
|
||||
|
||||
* Note that `ra_lsp_server` binary must be in `$PATH` for this to work. If it's not the case, you can specify full path to the binary, which is typically `.cargo/bin/ra_lsp_server`.
|
||||
### Setting up the `PATH` variable
|
||||
|
||||
On Unix systems, `rustup` adds `~/.cargo/bin` to `PATH` by modifying the shell's
|
||||
startup file. Depending on your configuration, your Desktop Environment might not
|
||||
actually load it. If you find that `rust-analyzer` only runs when starting the
|
||||
editor from the terminal, you will have to set up your `PATH` variable manually.
|
||||
|
||||
There are a couple of ways to do that:
|
||||
|
||||
- for Code, set `rust-analyzer.raLspServerPath` to `~/.cargo/bin` (the `~` is
|
||||
automatically resolved by the extension)
|
||||
- copy the binary to a location that is already in `PATH`, e.g. `/usr/local/bin`
|
||||
- on Linux, use PAM to configure the `PATH` variable, by e.g. putting
|
||||
`PATH DEFAULT=/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:@{HOME}/.cargo/bin:@{HOME}/.local/bin`
|
||||
in your `~/.pam_environment` file; note that this might interfere with other
|
||||
defaults set by the system administrator via `/etc/environment`.
|
||||
|
|
5
editors/code/package-lock.json
generated
5
editors/code/package-lock.json
generated
|
@ -763,6 +763,11 @@
|
|||
"chalk": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"lookpath": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/lookpath/-/lookpath-1.0.3.tgz",
|
||||
"integrity": "sha512-XIdgzlX26g10XnzyZdO/4obybEmfGnZyWQZ2DgmmEfVB79X+n3lhUoIzMe501C6s7RmCpAo66OPegWc+CsxYMg=="
|
||||
},
|
||||
"magic-string": {
|
||||
"version": "0.25.3",
|
||||
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.3.tgz",
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
"singleQuote": true
|
||||
},
|
||||
"dependencies": {
|
||||
"lookpath": "^1.0.3",
|
||||
"seedrandom": "^3.0.1",
|
||||
"vscode-languageclient": "^5.3.0-next.4"
|
||||
},
|
||||
|
|
|
@ -14,7 +14,7 @@ import * as events from './events';
|
|||
import * as notifications from './notifications';
|
||||
import { Server } from './server';
|
||||
|
||||
export function activate(context: vscode.ExtensionContext) {
|
||||
export async function activate(context: vscode.ExtensionContext) {
|
||||
function disposeOnDeactivation(disposable: vscode.Disposable) {
|
||||
context.subscriptions.push(disposable);
|
||||
}
|
||||
|
@ -159,7 +159,11 @@ export function activate(context: vscode.ExtensionContext) {
|
|||
});
|
||||
|
||||
// Start the language server, finally!
|
||||
startServer();
|
||||
try {
|
||||
await startServer();
|
||||
} catch (e) {
|
||||
vscode.window.showErrorMessage(e.message);
|
||||
}
|
||||
|
||||
if (Server.config.displayInlayHints) {
|
||||
const hintsUpdater = new HintsUpdater();
|
||||
|
@ -204,10 +208,10 @@ export function deactivate(): Thenable<void> {
|
|||
return Server.client.stop();
|
||||
}
|
||||
|
||||
async function reloadServer(startServer: () => void) {
|
||||
async function reloadServer(startServer: () => Promise<void>) {
|
||||
if (Server.client != null) {
|
||||
vscode.window.showInformationMessage('Reloading rust-analyzer...');
|
||||
await Server.client.stop();
|
||||
startServer();
|
||||
await startServer();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { lookpath } from 'lookpath';
|
||||
import { homedir } from 'os';
|
||||
import * as lc from 'vscode-languageclient';
|
||||
|
||||
|
@ -17,7 +18,7 @@ export class Server {
|
|||
public static config = new Config();
|
||||
public static client: lc.LanguageClient;
|
||||
|
||||
public static start(
|
||||
public static async start(
|
||||
notificationHandlers: Iterable<[string, lc.GenericNotificationHandler]>
|
||||
) {
|
||||
// '.' Is the fallback if no folder is open
|
||||
|
@ -27,8 +28,14 @@ export class Server {
|
|||
folder = workspace.workspaceFolders[0].uri.fsPath.toString();
|
||||
}
|
||||
|
||||
const command = expandPathResolving(this.config.raLspServerPath);
|
||||
if (!(await lookpath(command))) {
|
||||
throw new Error(
|
||||
`Cannot find rust-analyzer server \`${command}\` in PATH.`
|
||||
);
|
||||
}
|
||||
const run: lc.Executable = {
|
||||
command: expandPathResolving(this.config.raLspServerPath),
|
||||
command,
|
||||
options: { cwd: folder }
|
||||
};
|
||||
const serverOptions: lc.ServerOptions = {
|
||||
|
|
Loading…
Reference in a new issue