mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-27 20:35:09 +00:00
Remove unintentional changes
This commit is contained in:
parent
dc32f756e0
commit
d6f9e8806a
6 changed files with 13 additions and 16 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1163,7 +1163,6 @@ dependencies = [
|
||||||
"ra_ide_db",
|
"ra_ide_db",
|
||||||
"ra_parser",
|
"ra_parser",
|
||||||
"ra_prof",
|
"ra_prof",
|
||||||
"ra_project_model",
|
|
||||||
"ra_ssr",
|
"ra_ssr",
|
||||||
"ra_syntax",
|
"ra_syntax",
|
||||||
"ra_text_edit",
|
"ra_text_edit",
|
||||||
|
|
|
@ -186,17 +186,18 @@ impl ModuleDef {
|
||||||
module.visibility_of(db, self)
|
module.visibility_of(db, self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn name(&self, db: &dyn HirDatabase) -> Option<Name> {
|
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
|
||||||
match self {
|
match self {
|
||||||
ModuleDef::Module(m) => m.name(db),
|
ModuleDef::Adt(it) => Some(it.name(db)),
|
||||||
ModuleDef::Function(m) => Some(m.name(db)),
|
ModuleDef::Trait(it) => Some(it.name(db)),
|
||||||
ModuleDef::Adt(m) => Some(m.name(db)),
|
ModuleDef::Function(it) => Some(it.name(db)),
|
||||||
ModuleDef::EnumVariant(m) => Some(m.name(db)),
|
ModuleDef::EnumVariant(it) => Some(it.name(db)),
|
||||||
ModuleDef::Const(m) => m.name(db),
|
ModuleDef::TypeAlias(it) => Some(it.name(db)),
|
||||||
ModuleDef::Static(m) => m.name(db),
|
ModuleDef::Module(it) => it.name(db),
|
||||||
ModuleDef::Trait(m) => Some(m.name(db)),
|
ModuleDef::Const(it) => it.name(db),
|
||||||
ModuleDef::TypeAlias(m) => Some(m.name(db)),
|
ModuleDef::Static(it) => it.name(db),
|
||||||
ModuleDef::BuiltinType(m) => Some(m.as_name()),
|
|
||||||
|
ModuleDef::BuiltinType(it) => Some(it.as_name()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,6 @@ ra_prof = { path = "../ra_prof" }
|
||||||
test_utils = { path = "../test_utils" }
|
test_utils = { path = "../test_utils" }
|
||||||
ra_assists = { path = "../ra_assists" }
|
ra_assists = { path = "../ra_assists" }
|
||||||
ra_ssr = { path = "../ra_ssr" }
|
ra_ssr = { path = "../ra_ssr" }
|
||||||
ra_project_model = { path = "../ra_project_model" }
|
|
||||||
ra_hir_def = { path = "../ra_hir_def" }
|
ra_hir_def = { path = "../ra_hir_def" }
|
||||||
ra_tt = { path = "../ra_tt" }
|
ra_tt = { path = "../ra_tt" }
|
||||||
ra_parser = { path = "../ra_parser" }
|
ra_parser = { path = "../ra_parser" }
|
||||||
|
|
|
@ -6,7 +6,6 @@ use test_utils::assert_eq_text;
|
||||||
use utils::*;
|
use utils::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[ignore]
|
|
||||||
fn test_derive_serialize_proc_macro() {
|
fn test_derive_serialize_proc_macro() {
|
||||||
assert_expand(
|
assert_expand(
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
|
@ -18,7 +17,6 @@ fn test_derive_serialize_proc_macro() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[ignore]
|
|
||||||
fn test_derive_serialize_proc_macro_failed() {
|
fn test_derive_serialize_proc_macro_failed() {
|
||||||
assert_expand(
|
assert_expand(
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
|
|
|
@ -47,7 +47,7 @@ use crate::syntax_node::GreenNode;
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
algo::InsertPosition,
|
algo::InsertPosition,
|
||||||
ast::{AstNode, AstToken},
|
ast::{AstNode, AstToken},
|
||||||
parsing::{lex_single_syntax_kind, lex_single_valid_syntax_kind, parse_text, tokenize, Token},
|
parsing::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token},
|
||||||
ptr::{AstPtr, SyntaxNodePtr},
|
ptr::{AstPtr, SyntaxNodePtr},
|
||||||
syntax_error::SyntaxError,
|
syntax_error::SyntaxError,
|
||||||
syntax_node::{
|
syntax_node::{
|
||||||
|
|
|
@ -15,7 +15,7 @@ pub use lexer::*;
|
||||||
pub(crate) use self::reparsing::incremental_reparse;
|
pub(crate) use self::reparsing::incremental_reparse;
|
||||||
use ra_parser::SyntaxKind;
|
use ra_parser::SyntaxKind;
|
||||||
|
|
||||||
pub fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
|
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
|
||||||
let (tokens, lexer_errors) = tokenize(&text);
|
let (tokens, lexer_errors) = tokenize(&text);
|
||||||
|
|
||||||
let mut token_source = TextTokenSource::new(text, &tokens);
|
let mut token_source = TextTokenSource::new(text, &tokens);
|
||||||
|
|
Loading…
Reference in a new issue