Deny unreachable-pub

It's very useful when `pub` is equivalent to "this is crate's public
API", let's enforce this!

Ideally, we should enforce it for local `cargo test`, and only during
CI, but that needs https://github.com/rust-lang/cargo/issues/5034.
This commit is contained in:
Aleksey Kladov 2020-11-02 13:13:32 +01:00
parent e7f90866bc
commit b610118453
34 changed files with 157 additions and 158 deletions

View file

@ -12,7 +12,7 @@ env:
CARGO_NET_RETRY: 10 CARGO_NET_RETRY: 10
CI: 1 CI: 1
RUST_BACKTRACE: short RUST_BACKTRACE: short
RUSTFLAGS: -D warnings RUSTFLAGS: "-D warnings " # -W unreachable-pub"
RUSTUP_MAX_RETRIES: 10 RUSTUP_MAX_RETRIES: 10
jobs: jobs:

View file

@ -7,7 +7,7 @@ on:
env: env:
CARGO_INCREMENTAL: 0 CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10 CARGO_NET_RETRY: 10
RUSTFLAGS: -D warnings RUSTFLAGS: "-D warnings " # -W unreachable-pub"
RUSTUP_MAX_RETRIES: 10 RUSTUP_MAX_RETRIES: 10
jobs: jobs:

View file

@ -11,7 +11,7 @@ on:
env: env:
CARGO_INCREMENTAL: 0 CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10 CARGO_NET_RETRY: 10
RUSTFLAGS: -D warnings RUSTFLAGS: "-D warnings " # -W unreachable-pub"
RUSTUP_MAX_RETRIES: 10 RUSTUP_MAX_RETRIES: 10
jobs: jobs:

View file

@ -7,7 +7,7 @@ on:
env: env:
CARGO_INCREMENTAL: 0 CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10 CARGO_NET_RETRY: 10
RUSTFLAGS: -D warnings RUSTFLAGS: "-D warnings " # -W unreachable-pub"
RUSTUP_MAX_RETRIES: 10 RUSTUP_MAX_RETRIES: 10
jobs: jobs:

View file

@ -12,9 +12,7 @@ use cfg::CfgOptions;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use syntax::SmolStr; use syntax::SmolStr;
use tt::TokenExpander; use tt::TokenExpander;
use vfs::{file_set::FileSet, VfsPath}; use vfs::{file_set::FileSet, FileId, VfsPath};
pub use vfs::FileId;
/// Files are grouped into source roots. A source root is a directory on the /// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a /// file systems which is watched for changes. Typically it corresponds to a

View file

@ -14,11 +14,11 @@ pub use crate::{
change::Change, change::Change,
input::{ input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, Dependency, Edition, Env, CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, Dependency, Edition, Env,
FileId, ProcMacroId, SourceRoot, SourceRootId, ProcMacroId, SourceRoot, SourceRootId,
}, },
}; };
pub use salsa; pub use salsa;
pub use vfs::{file_set::FileSet, VfsPath}; pub use vfs::{file_set::FileSet, FileId, VfsPath};
#[macro_export] #[macro_export]
macro_rules! impl_intern_key { macro_rules! impl_intern_key {

View file

@ -45,14 +45,14 @@ pub(crate) struct LowerCtx {
} }
impl LowerCtx { impl LowerCtx {
pub fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self { pub(crate) fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self {
LowerCtx { hygiene: Hygiene::new(db.upcast(), file_id) } LowerCtx { hygiene: Hygiene::new(db.upcast(), file_id) }
} }
pub fn with_hygiene(hygiene: &Hygiene) -> Self { pub(crate) fn with_hygiene(hygiene: &Hygiene) -> Self {
LowerCtx { hygiene: hygiene.clone() } LowerCtx { hygiene: hygiene.clone() }
} }
pub fn lower_path(&self, ast: ast::Path) -> Option<Path> { pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
Path::from_src(ast, &self.hygiene) Path::from_src(ast, &self.hygiene)
} }
} }

View file

@ -486,12 +486,12 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
/// Helper wrapper for `AstId` with `ModPath` /// Helper wrapper for `AstId` with `ModPath`
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
struct AstIdWithPath<T: ast::AstNode> { struct AstIdWithPath<T: ast::AstNode> {
pub ast_id: AstId<T>, ast_id: AstId<T>,
pub path: path::ModPath, path: path::ModPath,
} }
impl<T: ast::AstNode> AstIdWithPath<T> { impl<T: ast::AstNode> AstIdWithPath<T> {
pub fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> { fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path } AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
} }
} }

View file

@ -122,13 +122,13 @@ enum ImportSource {
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
struct Import { struct Import {
pub path: ModPath, path: ModPath,
pub alias: Option<ImportAlias>, alias: Option<ImportAlias>,
pub visibility: RawVisibility, visibility: RawVisibility,
pub is_glob: bool, is_glob: bool,
pub is_prelude: bool, is_prelude: bool,
pub is_extern_crate: bool, is_extern_crate: bool,
pub is_macro_use: bool, is_macro_use: bool,
source: ImportSource, source: ImportSource,
} }

View file

@ -25,7 +25,7 @@ use crate::{db::DefDatabase, ModuleDefId};
crate::db::DefDatabaseStorage crate::db::DefDatabaseStorage
)] )]
#[derive(Default)] #[derive(Default)]
pub struct TestDB { pub(crate) struct TestDB {
storage: salsa::Storage<TestDB>, storage: salsa::Storage<TestDB>,
events: Mutex<Option<Vec<salsa::Event>>>, events: Mutex<Option<Vec<salsa::Event>>>,
} }
@ -72,7 +72,7 @@ impl FileLoader for TestDB {
} }
impl TestDB { impl TestDB {
pub fn module_for_file(&self, file_id: FileId) -> crate::ModuleId { pub(crate) fn module_for_file(&self, file_id: FileId) -> crate::ModuleId {
for &krate in self.relevant_crates(file_id).iter() { for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate); let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules.iter() { for (local_id, data) in crate_def_map.modules.iter() {
@ -84,13 +84,13 @@ impl TestDB {
panic!("Can't find module for file") panic!("Can't find module for file")
} }
pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> { pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
*self.events.lock().unwrap() = Some(Vec::new()); *self.events.lock().unwrap() = Some(Vec::new());
f(); f();
self.events.lock().unwrap().take().unwrap() self.events.lock().unwrap().take().unwrap()
} }
pub fn log_executed(&self, f: impl FnOnce()) -> Vec<String> { pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
let events = self.log(f); let events = self.log(f);
events events
.into_iter() .into_iter()
@ -105,7 +105,7 @@ impl TestDB {
.collect() .collect()
} }
pub fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> { pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
let mut files = Vec::new(); let mut files = Vec::new();
let crate_graph = self.crate_graph(); let crate_graph = self.crate_graph();
for krate in crate_graph.iter() { for krate in crate_graph.iter() {
@ -129,7 +129,7 @@ impl TestDB {
.collect() .collect()
} }
pub fn diagnostics<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) { pub(crate) fn diagnostics<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) {
let crate_graph = self.crate_graph(); let crate_graph = self.crate_graph();
for krate in crate_graph.iter() { for krate in crate_graph.iter() {
let crate_def_map = self.crate_def_map(krate); let crate_def_map = self.crate_def_map(krate);
@ -148,7 +148,7 @@ impl TestDB {
} }
} }
pub fn check_diagnostics(&self) { pub(crate) fn check_diagnostics(&self) {
let db: &TestDB = self; let db: &TestDB = self;
let annotations = db.extract_annotations(); let annotations = db.extract_annotations();
assert!(!annotations.is_empty()); assert!(!annotations.is_empty());

View file

@ -6,7 +6,7 @@
/// Converts an identifier to an UpperCamelCase form. /// Converts an identifier to an UpperCamelCase form.
/// Returns `None` if the string is already is UpperCamelCase. /// Returns `None` if the string is already is UpperCamelCase.
pub fn to_camel_case(ident: &str) -> Option<String> { pub(crate) fn to_camel_case(ident: &str) -> Option<String> {
if is_camel_case(ident) { if is_camel_case(ident) {
return None; return None;
} }
@ -59,7 +59,7 @@ pub fn to_camel_case(ident: &str) -> Option<String> {
/// Converts an identifier to a lower_snake_case form. /// Converts an identifier to a lower_snake_case form.
/// Returns `None` if the string is already in lower_snake_case. /// Returns `None` if the string is already in lower_snake_case.
pub fn to_lower_snake_case(ident: &str) -> Option<String> { pub(crate) fn to_lower_snake_case(ident: &str) -> Option<String> {
if is_lower_snake_case(ident) { if is_lower_snake_case(ident) {
return None; return None;
} else if is_upper_snake_case(ident) { } else if is_upper_snake_case(ident) {
@ -71,7 +71,7 @@ pub fn to_lower_snake_case(ident: &str) -> Option<String> {
/// Converts an identifier to an UPPER_SNAKE_CASE form. /// Converts an identifier to an UPPER_SNAKE_CASE form.
/// Returns `None` if the string is already is UPPER_SNAKE_CASE. /// Returns `None` if the string is already is UPPER_SNAKE_CASE.
pub fn to_upper_snake_case(ident: &str) -> Option<String> { pub(crate) fn to_upper_snake_case(ident: &str) -> Option<String> {
if is_upper_snake_case(ident) { if is_upper_snake_case(ident) {
return None; return None;
} else if is_lower_snake_case(ident) { } else if is_lower_snake_case(ident) {

View file

@ -17,17 +17,10 @@ use crate::{
ApplicationTy, InferenceResult, Ty, TypeCtor, ApplicationTy, InferenceResult, Ty, TypeCtor,
}; };
pub use hir_def::{ pub(crate) use hir_def::{
body::{ body::{Body, BodySourceMap},
scope::{ExprScopes, ScopeEntry, ScopeId}, expr::{Expr, ExprId, MatchArm, Pat, PatId},
Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource, LocalFieldId, VariantId,
},
expr::{
ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
},
src::HasSource,
LocalFieldId, Lookup, VariantId,
}; };
pub(super) struct ExprValidator<'a, 'b: 'a> { pub(super) struct ExprValidator<'a, 'b: 'a> {

View file

@ -59,12 +59,12 @@ impl<'a, 'b> UnsafeValidator<'a, 'b> {
} }
} }
pub struct UnsafeExpr { pub(crate) struct UnsafeExpr {
pub expr: ExprId, pub(crate) expr: ExprId,
pub inside_unsafe_block: bool, pub(crate) inside_unsafe_block: bool,
} }
pub fn unsafe_expressions( pub(crate) fn unsafe_expressions(
db: &dyn HirDatabase, db: &dyn HirDatabase,
infer: &InferenceResult, infer: &InferenceResult,
def: DefWithBodyId, def: DefWithBodyId,

View file

@ -214,9 +214,9 @@ struct InferenceContext<'a> {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct BreakableContext { struct BreakableContext {
pub may_break: bool, may_break: bool,
pub break_ty: Ty, break_ty: Ty,
pub label: Option<name::Name>, label: Option<name::Name>,
} }
fn find_breakable<'c>( fn find_breakable<'c>(

View file

@ -107,7 +107,7 @@ impl<'a> InferenceContext<'a> {
} }
} }
pub fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> { pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
match ty.callable_sig(self.db) { match ty.callable_sig(self.db) {
Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())), Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())),
None => self.callable_sig_from_fn_trait(ty, num_args), None => self.callable_sig_from_fn_trait(ty, num_args),

View file

@ -127,7 +127,7 @@ where
} }
impl<T> Canonicalized<T> { impl<T> Canonicalized<T> {
pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty { pub(super) fn decanonicalize_ty(&self, mut ty: Ty) -> Ty {
ty.walk_mut_binders( ty.walk_mut_binders(
&mut |ty, binders| { &mut |ty, binders| {
if let &mut Ty::Bound(bound) = ty { if let &mut Ty::Bound(bound) = ty {
@ -141,7 +141,11 @@ impl<T> Canonicalized<T> {
ty ty
} }
pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Substs>) { pub(super) fn apply_solution(
&self,
ctx: &mut InferenceContext<'_>,
solution: Canonical<Substs>,
) {
// the solution may contain new variables, which we need to convert to new inference vars // the solution may contain new variables, which we need to convert to new inference vars
let new_vars = Substs( let new_vars = Substs(
solution solution
@ -164,7 +168,7 @@ impl<T> Canonicalized<T> {
} }
} }
pub fn unify(tys: &Canonical<(Ty, Ty)>) -> Option<Substs> { pub(crate) fn unify(tys: &Canonical<(Ty, Ty)>) -> Option<Substs> {
let mut table = InferenceTable::new(); let mut table = InferenceTable::new();
let vars = Substs( let vars = Substs(
tys.kinds tys.kinds
@ -199,41 +203,46 @@ pub(crate) struct InferenceTable {
} }
impl InferenceTable { impl InferenceTable {
pub fn new() -> Self { pub(crate) fn new() -> Self {
InferenceTable { var_unification_table: InPlaceUnificationTable::new() } InferenceTable { var_unification_table: InPlaceUnificationTable::new() }
} }
pub fn new_type_var(&mut self) -> Ty { pub(crate) fn new_type_var(&mut self) -> Ty {
Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown))) Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
} }
pub fn new_integer_var(&mut self) -> Ty { pub(crate) fn new_integer_var(&mut self) -> Ty {
Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown))) Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
} }
pub fn new_float_var(&mut self) -> Ty { pub(crate) fn new_float_var(&mut self) -> Ty {
Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown))) Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
} }
pub fn new_maybe_never_type_var(&mut self) -> Ty { pub(crate) fn new_maybe_never_type_var(&mut self) -> Ty {
Ty::Infer(InferTy::MaybeNeverTypeVar( Ty::Infer(InferTy::MaybeNeverTypeVar(
self.var_unification_table.new_key(TypeVarValue::Unknown), self.var_unification_table.new_key(TypeVarValue::Unknown),
)) ))
} }
pub fn resolve_ty_completely(&mut self, ty: Ty) -> Ty { pub(crate) fn resolve_ty_completely(&mut self, ty: Ty) -> Ty {
self.resolve_ty_completely_inner(&mut Vec::new(), ty) self.resolve_ty_completely_inner(&mut Vec::new(), ty)
} }
pub fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty { pub(crate) fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
self.resolve_ty_as_possible_inner(&mut Vec::new(), ty) self.resolve_ty_as_possible_inner(&mut Vec::new(), ty)
} }
pub fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool { pub(crate) fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
self.unify_inner(ty1, ty2, 0) self.unify_inner(ty1, ty2, 0)
} }
pub fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs, depth: usize) -> bool { pub(crate) fn unify_substs(
&mut self,
substs1: &Substs,
substs2: &Substs,
depth: usize,
) -> bool {
substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth)) substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth))
} }
@ -331,7 +340,7 @@ impl InferenceTable {
/// If `ty` is a type variable with known type, returns that type; /// If `ty` is a type variable with known type, returns that type;
/// otherwise, return ty. /// otherwise, return ty.
pub fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> { pub(crate) fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
let mut ty = Cow::Borrowed(ty); let mut ty = Cow::Borrowed(ty);
// The type variable could resolve to a int/float variable. Hence try // The type variable could resolve to a int/float variable. Hence try
// resolving up to three times; each type of variable shouldn't occur // resolving up to three times; each type of variable shouldn't occur

View file

@ -1,6 +1,5 @@
//! The type system. We currently use this to infer types for completion, hover //! The type system. We currently use this to infer types for completion, hover
//! information and various assists. //! information and various assists.
#[allow(unused)] #[allow(unused)]
macro_rules! eprintln { macro_rules! eprintln {
($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
@ -1115,5 +1114,5 @@ pub struct ReturnTypeImplTraits {
#[derive(Clone, PartialEq, Eq, Debug, Hash)] #[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub(crate) struct ReturnTypeImplTrait { pub(crate) struct ReturnTypeImplTrait {
pub bounds: Binders<Vec<GenericPredicate>>, pub(crate) bounds: Binders<Vec<GenericPredicate>>,
} }

View file

@ -21,7 +21,7 @@ use test_utils::extract_annotations;
crate::db::HirDatabaseStorage crate::db::HirDatabaseStorage
)] )]
#[derive(Default)] #[derive(Default)]
pub struct TestDB { pub(crate) struct TestDB {
storage: salsa::Storage<TestDB>, storage: salsa::Storage<TestDB>,
events: Mutex<Option<Vec<salsa::Event>>>, events: Mutex<Option<Vec<salsa::Event>>>,
} }
@ -113,13 +113,13 @@ impl TestDB {
} }
impl TestDB { impl TestDB {
pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> { pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
*self.events.lock().unwrap() = Some(Vec::new()); *self.events.lock().unwrap() = Some(Vec::new());
f(); f();
self.events.lock().unwrap().take().unwrap() self.events.lock().unwrap().take().unwrap()
} }
pub fn log_executed(&self, f: impl FnOnce()) -> Vec<String> { pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
let events = self.log(f); let events = self.log(f);
events events
.into_iter() .into_iter()

View file

@ -8,12 +8,12 @@ use super::{from_chalk, Interner, TypeAliasAsAssocType};
use crate::{db::HirDatabase, CallableDefId}; use crate::{db::HirDatabase, CallableDefId};
use hir_def::{AdtId, AssocContainerId, Lookup, TypeAliasId}; use hir_def::{AdtId, AssocContainerId, Lookup, TypeAliasId};
pub use unsafe_tls::{set_current_program, with_current_program}; pub(crate) use unsafe_tls::{set_current_program, with_current_program};
pub struct DebugContext<'a>(&'a dyn HirDatabase); pub(crate) struct DebugContext<'a>(&'a dyn HirDatabase);
impl DebugContext<'_> { impl DebugContext<'_> {
pub fn debug_struct_id( pub(crate) fn debug_struct_id(
&self, &self,
id: super::AdtId, id: super::AdtId,
f: &mut fmt::Formatter<'_>, f: &mut fmt::Formatter<'_>,
@ -26,7 +26,7 @@ impl DebugContext<'_> {
write!(f, "{}", name) write!(f, "{}", name)
} }
pub fn debug_trait_id( pub(crate) fn debug_trait_id(
&self, &self,
id: super::TraitId, id: super::TraitId,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -36,7 +36,7 @@ impl DebugContext<'_> {
write!(fmt, "{}", trait_data.name) write!(fmt, "{}", trait_data.name)
} }
pub fn debug_assoc_type_id( pub(crate) fn debug_assoc_type_id(
&self, &self,
id: super::AssocTypeId, id: super::AssocTypeId,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -51,7 +51,7 @@ impl DebugContext<'_> {
write!(fmt, "{}::{}", trait_data.name, type_alias_data.name) write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
} }
pub fn debug_opaque_ty_id( pub(crate) fn debug_opaque_ty_id(
&self, &self,
opaque_ty_id: chalk_ir::OpaqueTyId<Interner>, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -59,7 +59,7 @@ impl DebugContext<'_> {
fmt.debug_struct("OpaqueTyId").field("index", &opaque_ty_id.0).finish() fmt.debug_struct("OpaqueTyId").field("index", &opaque_ty_id.0).finish()
} }
pub fn debug_alias( pub(crate) fn debug_alias(
&self, &self,
alias_ty: &AliasTy<Interner>, alias_ty: &AliasTy<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -70,7 +70,7 @@ impl DebugContext<'_> {
} }
} }
pub fn debug_projection_ty( pub(crate) fn debug_projection_ty(
&self, &self,
projection_ty: &chalk_ir::ProjectionTy<Interner>, projection_ty: &chalk_ir::ProjectionTy<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -95,7 +95,7 @@ impl DebugContext<'_> {
write!(fmt, ">::{}", type_alias_data.name) write!(fmt, ">::{}", type_alias_data.name)
} }
pub fn debug_opaque_ty( pub(crate) fn debug_opaque_ty(
&self, &self,
opaque_ty: &chalk_ir::OpaqueTy<Interner>, opaque_ty: &chalk_ir::OpaqueTy<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -103,7 +103,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", opaque_ty.opaque_ty_id) write!(fmt, "{:?}", opaque_ty.opaque_ty_id)
} }
pub fn debug_ty( pub(crate) fn debug_ty(
&self, &self,
ty: &chalk_ir::Ty<Interner>, ty: &chalk_ir::Ty<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -111,7 +111,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", ty.data(&Interner)) write!(fmt, "{:?}", ty.data(&Interner))
} }
pub fn debug_lifetime( pub(crate) fn debug_lifetime(
&self, &self,
lifetime: &Lifetime<Interner>, lifetime: &Lifetime<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -119,7 +119,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", lifetime.data(&Interner)) write!(fmt, "{:?}", lifetime.data(&Interner))
} }
pub fn debug_generic_arg( pub(crate) fn debug_generic_arg(
&self, &self,
parameter: &GenericArg<Interner>, parameter: &GenericArg<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -127,7 +127,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", parameter.data(&Interner).inner_debug()) write!(fmt, "{:?}", parameter.data(&Interner).inner_debug())
} }
pub fn debug_goal( pub(crate) fn debug_goal(
&self, &self,
goal: &Goal<Interner>, goal: &Goal<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -136,7 +136,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", goal_data) write!(fmt, "{:?}", goal_data)
} }
pub fn debug_goals( pub(crate) fn debug_goals(
&self, &self,
goals: &Goals<Interner>, goals: &Goals<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -144,7 +144,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", goals.debug(&Interner)) write!(fmt, "{:?}", goals.debug(&Interner))
} }
pub fn debug_program_clause_implication( pub(crate) fn debug_program_clause_implication(
&self, &self,
pci: &ProgramClauseImplication<Interner>, pci: &ProgramClauseImplication<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -152,7 +152,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", pci.debug(&Interner)) write!(fmt, "{:?}", pci.debug(&Interner))
} }
pub fn debug_substitution( pub(crate) fn debug_substitution(
&self, &self,
substitution: &chalk_ir::Substitution<Interner>, substitution: &chalk_ir::Substitution<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -160,7 +160,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", substitution.debug(&Interner)) write!(fmt, "{:?}", substitution.debug(&Interner))
} }
pub fn debug_separator_trait_ref( pub(crate) fn debug_separator_trait_ref(
&self, &self,
separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>, separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -168,7 +168,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", separator_trait_ref.debug(&Interner)) write!(fmt, "{:?}", separator_trait_ref.debug(&Interner))
} }
pub fn debug_fn_def_id( pub(crate) fn debug_fn_def_id(
&self, &self,
fn_def_id: chalk_ir::FnDefId<Interner>, fn_def_id: chalk_ir::FnDefId<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -190,7 +190,7 @@ impl DebugContext<'_> {
} }
} }
pub fn debug_const( pub(crate) fn debug_const(
&self, &self,
_constant: &chalk_ir::Const<Interner>, _constant: &chalk_ir::Const<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -198,42 +198,42 @@ impl DebugContext<'_> {
write!(fmt, "const") write!(fmt, "const")
} }
pub fn debug_variable_kinds( pub(crate) fn debug_variable_kinds(
&self, &self,
variable_kinds: &chalk_ir::VariableKinds<Interner>, variable_kinds: &chalk_ir::VariableKinds<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result { ) -> fmt::Result {
write!(fmt, "{:?}", variable_kinds.as_slice(&Interner)) write!(fmt, "{:?}", variable_kinds.as_slice(&Interner))
} }
pub fn debug_variable_kinds_with_angles( pub(crate) fn debug_variable_kinds_with_angles(
&self, &self,
variable_kinds: &chalk_ir::VariableKinds<Interner>, variable_kinds: &chalk_ir::VariableKinds<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result { ) -> fmt::Result {
write!(fmt, "{:?}", variable_kinds.inner_debug(&Interner)) write!(fmt, "{:?}", variable_kinds.inner_debug(&Interner))
} }
pub fn debug_canonical_var_kinds( pub(crate) fn debug_canonical_var_kinds(
&self, &self,
canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Interner>, canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result { ) -> fmt::Result {
write!(fmt, "{:?}", canonical_var_kinds.as_slice(&Interner)) write!(fmt, "{:?}", canonical_var_kinds.as_slice(&Interner))
} }
pub fn debug_program_clause( pub(crate) fn debug_program_clause(
&self, &self,
clause: &chalk_ir::ProgramClause<Interner>, clause: &chalk_ir::ProgramClause<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result { ) -> fmt::Result {
write!(fmt, "{:?}", clause.data(&Interner)) write!(fmt, "{:?}", clause.data(&Interner))
} }
pub fn debug_program_clauses( pub(crate) fn debug_program_clauses(
&self, &self,
clauses: &chalk_ir::ProgramClauses<Interner>, clauses: &chalk_ir::ProgramClauses<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result { ) -> fmt::Result {
write!(fmt, "{:?}", clauses.as_slice(&Interner)) write!(fmt, "{:?}", clauses.as_slice(&Interner))
} }
pub fn debug_quantified_where_clauses( pub(crate) fn debug_quantified_where_clauses(
&self, &self,
clauses: &chalk_ir::QuantifiedWhereClauses<Interner>, clauses: &chalk_ir::QuantifiedWhereClauses<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
@ -249,7 +249,7 @@ mod unsafe_tls {
scoped_thread_local!(static PROGRAM: DebugContext); scoped_thread_local!(static PROGRAM: DebugContext);
pub fn with_current_program<R>( pub(crate) fn with_current_program<R>(
op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R, op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R,
) -> R { ) -> R {
if PROGRAM.is_set() { if PROGRAM.is_set() {
@ -259,7 +259,7 @@ mod unsafe_tls {
} }
} }
pub fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R pub(crate) fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
where where
OP: FnOnce() -> R, OP: FnOnce() -> R,
{ {

View file

@ -61,16 +61,16 @@ macro_rules! err {
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub(super) struct Match { pub(super) struct Match {
pub bindings: Bindings, pub(super) bindings: Bindings,
/// We currently just keep the first error and count the rest to compare matches. /// We currently just keep the first error and count the rest to compare matches.
pub err: Option<ExpandError>, pub(super) err: Option<ExpandError>,
pub err_count: usize, pub(super) err_count: usize,
/// How many top-level token trees were left to match. /// How many top-level token trees were left to match.
pub unmatched_tts: usize, pub(super) unmatched_tts: usize,
} }
impl Match { impl Match {
pub fn add_err(&mut self, err: ExpandError) { pub(super) fn add_err(&mut self, err: ExpandError) {
let prev_err = self.err.take(); let prev_err = self.err.take();
self.err = prev_err.or(Some(err)); self.err = prev_err.or(Some(err));
self.err_count += 1; self.err_count += 1;

View file

@ -7,9 +7,9 @@ use tt::buffer::{Cursor, TokenBuffer};
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
struct TtToken { struct TtToken {
pub kind: SyntaxKind, kind: SyntaxKind,
pub is_joint_to_next: bool, is_joint_to_next: bool,
pub text: SmolStr, text: SmolStr,
} }
pub(crate) struct SubtreeTokenSource<'a> { pub(crate) struct SubtreeTokenSource<'a> {
@ -30,7 +30,7 @@ impl<'a> SubtreeTokenSource<'a> {
} }
impl<'a> SubtreeTokenSource<'a> { impl<'a> SubtreeTokenSource<'a> {
pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { pub(crate) fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> {
let cursor = buffer.begin(); let cursor = buffer.begin();
let mut res = SubtreeTokenSource { let mut res = SubtreeTokenSource {

View file

@ -30,7 +30,7 @@ pub(crate) struct ProcMacroProcessThread {
} }
impl ProcMacroProcessSrv { impl ProcMacroProcessSrv {
pub fn run( pub(crate) fn run(
process_path: PathBuf, process_path: PathBuf,
args: impl IntoIterator<Item = impl AsRef<OsStr>>, args: impl IntoIterator<Item = impl AsRef<OsStr>>,
) -> io::Result<(ProcMacroProcessThread, ProcMacroProcessSrv)> { ) -> io::Result<(ProcMacroProcessThread, ProcMacroProcessSrv)> {
@ -48,7 +48,7 @@ impl ProcMacroProcessSrv {
Ok((thread, srv)) Ok((thread, srv))
} }
pub fn find_proc_macros( pub(crate) fn find_proc_macros(
&self, &self,
dylib_path: &Path, dylib_path: &Path,
) -> Result<Vec<(String, ProcMacroKind)>, tt::ExpansionError> { ) -> Result<Vec<(String, ProcMacroKind)>, tt::ExpansionError> {
@ -58,7 +58,7 @@ impl ProcMacroProcessSrv {
Ok(result.macros) Ok(result.macros)
} }
pub fn custom_derive( pub(crate) fn custom_derive(
&self, &self,
dylib_path: &Path, dylib_path: &Path,
subtree: &Subtree, subtree: &Subtree,
@ -75,7 +75,7 @@ impl ProcMacroProcessSrv {
Ok(result.expansion) Ok(result.expansion)
} }
pub fn send_task<R>(&self, req: Request) -> Result<R, tt::ExpansionError> pub(crate) fn send_task<R>(&self, req: Request) -> Result<R, tt::ExpansionError>
where where
R: TryFrom<Response, Error = &'static str>, R: TryFrom<Response, Error = &'static str>,
{ {

View file

@ -75,18 +75,18 @@ struct TokenIdDef(u32);
#[serde(remote = "Delimiter")] #[serde(remote = "Delimiter")]
struct DelimiterDef { struct DelimiterDef {
#[serde(with = "TokenIdDef")] #[serde(with = "TokenIdDef")]
pub id: TokenId, id: TokenId,
#[serde(with = "DelimiterKindDef")] #[serde(with = "DelimiterKindDef")]
pub kind: DelimiterKind, kind: DelimiterKind,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
#[serde(remote = "Subtree")] #[serde(remote = "Subtree")]
struct SubtreeDef { struct SubtreeDef {
#[serde(default, with = "opt_delimiter_def")] #[serde(default, with = "opt_delimiter_def")]
pub delimiter: Option<Delimiter>, delimiter: Option<Delimiter>,
#[serde(with = "vec_token_tree")] #[serde(with = "vec_token_tree")]
pub token_trees: Vec<TokenTree>, token_trees: Vec<TokenTree>,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -112,19 +112,19 @@ enum LeafDef {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
#[serde(remote = "Literal")] #[serde(remote = "Literal")]
struct LiteralDef { struct LiteralDef {
pub text: SmolStr, text: SmolStr,
#[serde(with = "TokenIdDef")] #[serde(with = "TokenIdDef")]
pub id: TokenId, id: TokenId,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
#[serde(remote = "Punct")] #[serde(remote = "Punct")]
struct PunctDef { struct PunctDef {
pub char: char, char: char,
#[serde(with = "SpacingDef")] #[serde(with = "SpacingDef")]
pub spacing: Spacing, spacing: Spacing,
#[serde(with = "TokenIdDef")] #[serde(with = "TokenIdDef")]
pub id: TokenId, id: TokenId,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
@ -137,16 +137,16 @@ enum SpacingDef {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
#[serde(remote = "Ident")] #[serde(remote = "Ident")]
struct IdentDef { struct IdentDef {
pub text: SmolStr, text: SmolStr,
#[serde(with = "TokenIdDef")] #[serde(with = "TokenIdDef")]
pub id: TokenId, id: TokenId,
} }
mod opt_delimiter_def { mod opt_delimiter_def {
use super::{Delimiter, DelimiterDef}; use super::{Delimiter, DelimiterDef};
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub fn serialize<S>(value: &Option<Delimiter>, serializer: S) -> Result<S::Ok, S::Error> pub(super) fn serialize<S>(value: &Option<Delimiter>, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
@ -155,7 +155,7 @@ mod opt_delimiter_def {
value.as_ref().map(Helper).serialize(serializer) value.as_ref().map(Helper).serialize(serializer)
} }
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Delimiter>, D::Error> pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Option<Delimiter>, D::Error>
where where
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
@ -170,7 +170,7 @@ mod opt_subtree_def {
use super::{Subtree, SubtreeDef}; use super::{Subtree, SubtreeDef};
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub fn serialize<S>(value: &Option<Subtree>, serializer: S) -> Result<S::Ok, S::Error> pub(super) fn serialize<S>(value: &Option<Subtree>, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
@ -179,7 +179,7 @@ mod opt_subtree_def {
value.as_ref().map(Helper).serialize(serializer) value.as_ref().map(Helper).serialize(serializer)
} }
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Subtree>, D::Error> pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Option<Subtree>, D::Error>
where where
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
@ -194,7 +194,7 @@ mod vec_token_tree {
use super::{TokenTree, TokenTreeDef}; use super::{TokenTree, TokenTreeDef};
use serde::{ser::SerializeSeq, Deserialize, Deserializer, Serialize, Serializer}; use serde::{ser::SerializeSeq, Deserialize, Deserializer, Serialize, Serializer};
pub fn serialize<S>(value: &Vec<TokenTree>, serializer: S) -> Result<S::Ok, S::Error> pub(super) fn serialize<S>(value: &Vec<TokenTree>, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
@ -209,7 +209,7 @@ mod vec_token_tree {
seq.end() seq.end()
} }
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<TokenTree>, D::Error> pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Vec<TokenTree>, D::Error>
where where
D: Deserializer<'de>, D: Deserializer<'de>,
{ {

View file

@ -9,6 +9,7 @@
//! RA than `proc-macro2` token stream. //! RA than `proc-macro2` token stream.
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` //! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)… //! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
#![allow(unreachable_pub)]
#[allow(dead_code)] #[allow(dead_code)]
#[doc(hidden)] #[doc(hidden)]

View file

@ -27,7 +27,7 @@ pub fn init_from(spec: &str) {
filter.install(); filter.install();
} }
pub type Label = &'static str; type Label = &'static str;
/// This function starts a profiling scope in the current execution stack with a given description. /// This function starts a profiling scope in the current execution stack with a given description.
/// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop. /// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop.
@ -173,7 +173,7 @@ impl ProfileStack {
true true
} }
pub fn pop(&mut self, label: Label, detail: Option<String>) { fn pop(&mut self, label: Label, detail: Option<String>) {
let start = self.starts.pop().unwrap(); let start = self.starts.pop().unwrap();
let duration = start.elapsed(); let duration = start.elapsed();
self.messages.finish(Message { duration, label, detail }); self.messages.finish(Message { duration, label, detail });

View file

@ -4,15 +4,15 @@ use std::ops;
use arena::Arena; use arena::Arena;
#[derive(Default)] #[derive(Default)]
pub struct Tree<T> { pub(crate) struct Tree<T> {
nodes: Arena<Node<T>>, nodes: Arena<Node<T>>,
current_path: Vec<(Idx<T>, Option<Idx<T>>)>, current_path: Vec<(Idx<T>, Option<Idx<T>>)>,
} }
pub type Idx<T> = arena::Idx<Node<T>>; pub(crate) type Idx<T> = arena::Idx<Node<T>>;
impl<T> Tree<T> { impl<T> Tree<T> {
pub fn start(&mut self) pub(crate) fn start(&mut self)
where where
T: Default, T: Default,
{ {
@ -30,19 +30,19 @@ impl<T> Tree<T> {
self.current_path.push((me, None)); self.current_path.push((me, None));
} }
pub fn finish(&mut self, data: T) { pub(crate) fn finish(&mut self, data: T) {
let (me, _last_child) = self.current_path.pop().unwrap(); let (me, _last_child) = self.current_path.pop().unwrap();
self.nodes[me].data = data; self.nodes[me].data = data;
} }
pub fn root(&self) -> Option<Idx<T>> { pub(crate) fn root(&self) -> Option<Idx<T>> {
self.nodes.iter().next().map(|(idx, _)| idx) self.nodes.iter().next().map(|(idx, _)| idx)
} }
pub fn children(&self, idx: Idx<T>) -> impl Iterator<Item = Idx<T>> + '_ { pub(crate) fn children(&self, idx: Idx<T>) -> impl Iterator<Item = Idx<T>> + '_ {
NodeIter { nodes: &self.nodes, next: self.nodes[idx].first_child } NodeIter { nodes: &self.nodes, next: self.nodes[idx].first_child }
} }
pub fn clear(&mut self) { pub(crate) fn clear(&mut self) {
self.nodes.clear(); self.nodes.clear();
self.current_path.clear(); self.current_path.clear();
} }
@ -55,7 +55,7 @@ impl<T> ops::Index<Idx<T>> for Tree<T> {
} }
} }
pub struct Node<T> { pub(crate) struct Node<T> {
data: T, data: T,
first_child: Option<Idx<T>>, first_child: Option<Idx<T>>,
next_sibling: Option<Idx<T>>, next_sibling: Option<Idx<T>>,

View file

@ -17,7 +17,7 @@ use crate::{
pub use self::{ pub use self::{
expr_ext::{ArrayExprKind, BinOp, Effect, ElseBranch, LiteralKind, PrefixOp, RangeOp}, expr_ext::{ArrayExprKind, BinOp, Effect, ElseBranch, LiteralKind, PrefixOp, RangeOp},
generated::*, generated::{nodes::*, tokens::*},
node_ext::{ node_ext::{
AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents, AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents,
StructKind, TypeBoundKind, VisibilityKind, StructKind, TypeBoundKind, VisibilityKind,

View file

@ -1,8 +1,8 @@
//! This file is actually hand-written, but the submodules are indeed generated. //! This file is actually hand-written, but the submodules are indeed generated.
#[rustfmt::skip] #[rustfmt::skip]
mod nodes; pub(crate) mod nodes;
#[rustfmt::skip] #[rustfmt::skip]
mod tokens; pub(crate) mod tokens;
use crate::{ use crate::{
AstNode, AstNode,
@ -10,7 +10,7 @@ use crate::{
SyntaxNode, SyntaxNode,
}; };
pub use {nodes::*, tokens::*}; pub(crate) use nodes::*;
// Stmt is the only nested enum, so it's easier to just hand-write it // Stmt is the only nested enum, so it's easier to just hand-write it
impl AstNode for Stmt { impl AstNode for Stmt {

View file

@ -46,16 +46,19 @@ use text_edit::Indel;
pub use crate::{ pub use crate::{
algo::InsertPosition, algo::InsertPosition,
ast::{AstNode, AstToken}, ast::{AstNode, AstToken},
parsing::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token}, parsing::lexer::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token},
ptr::{AstPtr, SyntaxNodePtr}, ptr::{AstPtr, SyntaxNodePtr},
syntax_error::SyntaxError, syntax_error::SyntaxError,
syntax_node::{ syntax_node::{
Direction, GreenNode, NodeOrToken, SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken,
SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder, SyntaxTreeBuilder,
}, },
}; };
pub use parser::{SyntaxKind, T}; pub use parser::{SyntaxKind, T};
pub use rowan::{SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent}; pub use rowan::{
Direction, GreenNode, NodeOrToken, SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset,
WalkEvent,
};
/// `Parse` is the result of the parsing: a syntax tree and a collection of /// `Parse` is the result of the parsing: a syntax tree and a collection of
/// errors. /// errors.

View file

@ -1,7 +1,7 @@
//! Lexing, bridging to parser (which does the actual parsing) and //! Lexing, bridging to parser (which does the actual parsing) and
//! incremental reparsing. //! incremental reparsing.
mod lexer; pub(crate) mod lexer;
mod text_token_source; mod text_token_source;
mod text_tree_sink; mod text_tree_sink;
mod reparsing; mod reparsing;
@ -10,7 +10,7 @@ use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
use text_token_source::TextTokenSource; use text_token_source::TextTokenSource;
use text_tree_sink::TextTreeSink; use text_tree_sink::TextTreeSink;
pub use lexer::*; pub(crate) use lexer::*;
pub(crate) use self::reparsing::incremental_reparse; pub(crate) use self::reparsing::incremental_reparse;
use parser::SyntaxKind; use parser::SyntaxKind;

View file

@ -65,7 +65,7 @@ fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> parser::Tok
impl<'t> TextTokenSource<'t> { impl<'t> TextTokenSource<'t> {
/// Generate input from tokens(expect comment and whitespace). /// Generate input from tokens(expect comment and whitespace).
pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> { pub(crate) fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
let token_offset_pairs: Vec<_> = raw_tokens let token_offset_pairs: Vec<_> = raw_tokens
.iter() .iter()
.filter_map({ .filter_map({

View file

@ -10,9 +10,7 @@ use rowan::{GreenNodeBuilder, Language};
use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize}; use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize};
pub use rowan::GreenNode; pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken};
pub(crate) use rowan::GreenToken;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum RustLanguage {} pub enum RustLanguage {}
@ -34,8 +32,6 @@ pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>; pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>; pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
pub use rowan::{Direction, NodeOrToken};
#[derive(Default)] #[derive(Default)]
pub struct SyntaxTreeBuilder { pub struct SyntaxTreeBuilder {
errors: Vec<SyntaxError>, errors: Vec<SyntaxError>,

View file

@ -287,7 +287,7 @@ impl VirtualPath {
Some(res) Some(res)
} }
pub fn name_and_extension(&self) -> Option<(&str, Option<&str>)> { pub(crate) fn name_and_extension(&self) -> Option<(&str, Option<&str>)> {
let file_path = if self.0.ends_with('/') { &self.0[..&self.0.len() - 1] } else { &self.0 }; let file_path = if self.0.ends_with('/') { &self.0[..&self.0.len() - 1] } else { &self.0 };
let file_name = match file_path.rfind('/') { let file_name = match file_path.rfind('/') {
Some(position) => &file_path[position + 1..], Some(position) => &file_path[position + 1..],

View file

@ -45,15 +45,15 @@ pub fn generate_parser_tests(mode: Mode) -> Result<()> {
#[derive(Debug)] #[derive(Debug)]
struct Test { struct Test {
pub name: String, name: String,
pub text: String, text: String,
pub ok: bool, ok: bool,
} }
#[derive(Default, Debug)] #[derive(Default, Debug)]
struct Tests { struct Tests {
pub ok: HashMap<String, Test>, ok: HashMap<String, Test>,
pub err: HashMap<String, Test>, err: HashMap<String, Test>,
} }
fn collect_tests(s: &str) -> Vec<Test> { fn collect_tests(s: &str) -> Vec<Test> {