6438: Deny unreachable-pub r=matklad a=matklad

It's very useful when `pub` is equivalent to "this is crate's public
API", let's enforce this!

Ideally, we should enforce it for local `cargo test`, and only during
CI, but that needs https://github.com/rust-lang/cargo/issues/5034.


Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2020-11-02 13:08:53 +00:00 committed by GitHub
commit 731b38fa3c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 157 additions and 158 deletions

View file

@ -12,7 +12,7 @@ env:
CARGO_NET_RETRY: 10
CI: 1
RUST_BACKTRACE: short
RUSTFLAGS: -D warnings
RUSTFLAGS: "-D warnings " # -W unreachable-pub"
RUSTUP_MAX_RETRIES: 10
jobs:

View file

@ -7,7 +7,7 @@ on:
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
RUSTFLAGS: -D warnings
RUSTFLAGS: "-D warnings " # -W unreachable-pub"
RUSTUP_MAX_RETRIES: 10
jobs:

View file

@ -11,7 +11,7 @@ on:
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
RUSTFLAGS: -D warnings
RUSTFLAGS: "-D warnings " # -W unreachable-pub"
RUSTUP_MAX_RETRIES: 10
jobs:

View file

@ -7,7 +7,7 @@ on:
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
RUSTFLAGS: -D warnings
RUSTFLAGS: "-D warnings " # -W unreachable-pub"
RUSTUP_MAX_RETRIES: 10
jobs:

View file

@ -12,9 +12,7 @@ use cfg::CfgOptions;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::SmolStr;
use tt::TokenExpander;
use vfs::{file_set::FileSet, VfsPath};
pub use vfs::FileId;
use vfs::{file_set::FileSet, FileId, VfsPath};
/// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a

View file

@ -14,11 +14,11 @@ pub use crate::{
change::Change,
input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, Dependency, Edition, Env,
FileId, ProcMacroId, SourceRoot, SourceRootId,
ProcMacroId, SourceRoot, SourceRootId,
},
};
pub use salsa;
pub use vfs::{file_set::FileSet, VfsPath};
pub use vfs::{file_set::FileSet, FileId, VfsPath};
#[macro_export]
macro_rules! impl_intern_key {

View file

@ -45,14 +45,14 @@ pub(crate) struct LowerCtx {
}
impl LowerCtx {
pub fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self {
pub(crate) fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self {
LowerCtx { hygiene: Hygiene::new(db.upcast(), file_id) }
}
pub fn with_hygiene(hygiene: &Hygiene) -> Self {
pub(crate) fn with_hygiene(hygiene: &Hygiene) -> Self {
LowerCtx { hygiene: hygiene.clone() }
}
pub fn lower_path(&self, ast: ast::Path) -> Option<Path> {
pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
Path::from_src(ast, &self.hygiene)
}
}

View file

@ -486,12 +486,12 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
/// Helper wrapper for `AstId` with `ModPath`
#[derive(Clone, Debug, Eq, PartialEq)]
struct AstIdWithPath<T: ast::AstNode> {
pub ast_id: AstId<T>,
pub path: path::ModPath,
ast_id: AstId<T>,
path: path::ModPath,
}
impl<T: ast::AstNode> AstIdWithPath<T> {
pub fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
}
}

View file

@ -122,13 +122,13 @@ enum ImportSource {
#[derive(Clone, Debug, Eq, PartialEq)]
struct Import {
pub path: ModPath,
pub alias: Option<ImportAlias>,
pub visibility: RawVisibility,
pub is_glob: bool,
pub is_prelude: bool,
pub is_extern_crate: bool,
pub is_macro_use: bool,
path: ModPath,
alias: Option<ImportAlias>,
visibility: RawVisibility,
is_glob: bool,
is_prelude: bool,
is_extern_crate: bool,
is_macro_use: bool,
source: ImportSource,
}

View file

@ -25,7 +25,7 @@ use crate::{db::DefDatabase, ModuleDefId};
crate::db::DefDatabaseStorage
)]
#[derive(Default)]
pub struct TestDB {
pub(crate) struct TestDB {
storage: salsa::Storage<TestDB>,
events: Mutex<Option<Vec<salsa::Event>>>,
}
@ -72,7 +72,7 @@ impl FileLoader for TestDB {
}
impl TestDB {
pub fn module_for_file(&self, file_id: FileId) -> crate::ModuleId {
pub(crate) fn module_for_file(&self, file_id: FileId) -> crate::ModuleId {
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules.iter() {
@ -84,13 +84,13 @@ impl TestDB {
panic!("Can't find module for file")
}
pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
*self.events.lock().unwrap() = Some(Vec::new());
f();
self.events.lock().unwrap().take().unwrap()
}
pub fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
let events = self.log(f);
events
.into_iter()
@ -105,7 +105,7 @@ impl TestDB {
.collect()
}
pub fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
let mut files = Vec::new();
let crate_graph = self.crate_graph();
for krate in crate_graph.iter() {
@ -129,7 +129,7 @@ impl TestDB {
.collect()
}
pub fn diagnostics<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) {
pub(crate) fn diagnostics<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) {
let crate_graph = self.crate_graph();
for krate in crate_graph.iter() {
let crate_def_map = self.crate_def_map(krate);
@ -148,7 +148,7 @@ impl TestDB {
}
}
pub fn check_diagnostics(&self) {
pub(crate) fn check_diagnostics(&self) {
let db: &TestDB = self;
let annotations = db.extract_annotations();
assert!(!annotations.is_empty());

View file

@ -6,7 +6,7 @@
/// Converts an identifier to an UpperCamelCase form.
/// Returns `None` if the string is already is UpperCamelCase.
pub fn to_camel_case(ident: &str) -> Option<String> {
pub(crate) fn to_camel_case(ident: &str) -> Option<String> {
if is_camel_case(ident) {
return None;
}
@ -59,7 +59,7 @@ pub fn to_camel_case(ident: &str) -> Option<String> {
/// Converts an identifier to a lower_snake_case form.
/// Returns `None` if the string is already in lower_snake_case.
pub fn to_lower_snake_case(ident: &str) -> Option<String> {
pub(crate) fn to_lower_snake_case(ident: &str) -> Option<String> {
if is_lower_snake_case(ident) {
return None;
} else if is_upper_snake_case(ident) {
@ -71,7 +71,7 @@ pub fn to_lower_snake_case(ident: &str) -> Option<String> {
/// Converts an identifier to an UPPER_SNAKE_CASE form.
/// Returns `None` if the string is already is UPPER_SNAKE_CASE.
pub fn to_upper_snake_case(ident: &str) -> Option<String> {
pub(crate) fn to_upper_snake_case(ident: &str) -> Option<String> {
if is_upper_snake_case(ident) {
return None;
} else if is_lower_snake_case(ident) {

View file

@ -17,17 +17,10 @@ use crate::{
ApplicationTy, InferenceResult, Ty, TypeCtor,
};
pub use hir_def::{
body::{
scope::{ExprScopes, ScopeEntry, ScopeId},
Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource,
},
expr::{
ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
},
src::HasSource,
LocalFieldId, Lookup, VariantId,
pub(crate) use hir_def::{
body::{Body, BodySourceMap},
expr::{Expr, ExprId, MatchArm, Pat, PatId},
LocalFieldId, VariantId,
};
pub(super) struct ExprValidator<'a, 'b: 'a> {

View file

@ -59,12 +59,12 @@ impl<'a, 'b> UnsafeValidator<'a, 'b> {
}
}
pub struct UnsafeExpr {
pub expr: ExprId,
pub inside_unsafe_block: bool,
pub(crate) struct UnsafeExpr {
pub(crate) expr: ExprId,
pub(crate) inside_unsafe_block: bool,
}
pub fn unsafe_expressions(
pub(crate) fn unsafe_expressions(
db: &dyn HirDatabase,
infer: &InferenceResult,
def: DefWithBodyId,

View file

@ -214,9 +214,9 @@ struct InferenceContext<'a> {
#[derive(Clone, Debug)]
struct BreakableContext {
pub may_break: bool,
pub break_ty: Ty,
pub label: Option<name::Name>,
may_break: bool,
break_ty: Ty,
label: Option<name::Name>,
}
fn find_breakable<'c>(

View file

@ -107,7 +107,7 @@ impl<'a> InferenceContext<'a> {
}
}
pub fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
match ty.callable_sig(self.db) {
Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())),
None => self.callable_sig_from_fn_trait(ty, num_args),

View file

@ -127,7 +127,7 @@ where
}
impl<T> Canonicalized<T> {
pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty {
pub(super) fn decanonicalize_ty(&self, mut ty: Ty) -> Ty {
ty.walk_mut_binders(
&mut |ty, binders| {
if let &mut Ty::Bound(bound) = ty {
@ -141,7 +141,11 @@ impl<T> Canonicalized<T> {
ty
}
pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Substs>) {
pub(super) fn apply_solution(
&self,
ctx: &mut InferenceContext<'_>,
solution: Canonical<Substs>,
) {
// the solution may contain new variables, which we need to convert to new inference vars
let new_vars = Substs(
solution
@ -164,7 +168,7 @@ impl<T> Canonicalized<T> {
}
}
pub fn unify(tys: &Canonical<(Ty, Ty)>) -> Option<Substs> {
pub(crate) fn unify(tys: &Canonical<(Ty, Ty)>) -> Option<Substs> {
let mut table = InferenceTable::new();
let vars = Substs(
tys.kinds
@ -199,41 +203,46 @@ pub(crate) struct InferenceTable {
}
impl InferenceTable {
pub fn new() -> Self {
pub(crate) fn new() -> Self {
InferenceTable { var_unification_table: InPlaceUnificationTable::new() }
}
pub fn new_type_var(&mut self) -> Ty {
pub(crate) fn new_type_var(&mut self) -> Ty {
Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
pub fn new_integer_var(&mut self) -> Ty {
pub(crate) fn new_integer_var(&mut self) -> Ty {
Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
pub fn new_float_var(&mut self) -> Ty {
pub(crate) fn new_float_var(&mut self) -> Ty {
Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
pub fn new_maybe_never_type_var(&mut self) -> Ty {
pub(crate) fn new_maybe_never_type_var(&mut self) -> Ty {
Ty::Infer(InferTy::MaybeNeverTypeVar(
self.var_unification_table.new_key(TypeVarValue::Unknown),
))
}
pub fn resolve_ty_completely(&mut self, ty: Ty) -> Ty {
pub(crate) fn resolve_ty_completely(&mut self, ty: Ty) -> Ty {
self.resolve_ty_completely_inner(&mut Vec::new(), ty)
}
pub fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
pub(crate) fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
self.resolve_ty_as_possible_inner(&mut Vec::new(), ty)
}
pub fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
pub(crate) fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
self.unify_inner(ty1, ty2, 0)
}
pub fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs, depth: usize) -> bool {
pub(crate) fn unify_substs(
&mut self,
substs1: &Substs,
substs2: &Substs,
depth: usize,
) -> bool {
substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth))
}
@ -331,7 +340,7 @@ impl InferenceTable {
/// If `ty` is a type variable with known type, returns that type;
/// otherwise, return ty.
pub fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
pub(crate) fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
let mut ty = Cow::Borrowed(ty);
// The type variable could resolve to a int/float variable. Hence try
// resolving up to three times; each type of variable shouldn't occur

View file

@ -1,6 +1,5 @@
//! The type system. We currently use this to infer types for completion, hover
//! information and various assists.
#[allow(unused)]
macro_rules! eprintln {
($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
@ -1115,5 +1114,5 @@ pub struct ReturnTypeImplTraits {
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub(crate) struct ReturnTypeImplTrait {
pub bounds: Binders<Vec<GenericPredicate>>,
pub(crate) bounds: Binders<Vec<GenericPredicate>>,
}

View file

@ -21,7 +21,7 @@ use test_utils::extract_annotations;
crate::db::HirDatabaseStorage
)]
#[derive(Default)]
pub struct TestDB {
pub(crate) struct TestDB {
storage: salsa::Storage<TestDB>,
events: Mutex<Option<Vec<salsa::Event>>>,
}
@ -113,13 +113,13 @@ impl TestDB {
}
impl TestDB {
pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
*self.events.lock().unwrap() = Some(Vec::new());
f();
self.events.lock().unwrap().take().unwrap()
}
pub fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
let events = self.log(f);
events
.into_iter()

View file

@ -8,12 +8,12 @@ use super::{from_chalk, Interner, TypeAliasAsAssocType};
use crate::{db::HirDatabase, CallableDefId};
use hir_def::{AdtId, AssocContainerId, Lookup, TypeAliasId};
pub use unsafe_tls::{set_current_program, with_current_program};
pub(crate) use unsafe_tls::{set_current_program, with_current_program};
pub struct DebugContext<'a>(&'a dyn HirDatabase);
pub(crate) struct DebugContext<'a>(&'a dyn HirDatabase);
impl DebugContext<'_> {
pub fn debug_struct_id(
pub(crate) fn debug_struct_id(
&self,
id: super::AdtId,
f: &mut fmt::Formatter<'_>,
@ -26,7 +26,7 @@ impl DebugContext<'_> {
write!(f, "{}", name)
}
pub fn debug_trait_id(
pub(crate) fn debug_trait_id(
&self,
id: super::TraitId,
fmt: &mut fmt::Formatter<'_>,
@ -36,7 +36,7 @@ impl DebugContext<'_> {
write!(fmt, "{}", trait_data.name)
}
pub fn debug_assoc_type_id(
pub(crate) fn debug_assoc_type_id(
&self,
id: super::AssocTypeId,
fmt: &mut fmt::Formatter<'_>,
@ -51,7 +51,7 @@ impl DebugContext<'_> {
write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
}
pub fn debug_opaque_ty_id(
pub(crate) fn debug_opaque_ty_id(
&self,
opaque_ty_id: chalk_ir::OpaqueTyId<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -59,7 +59,7 @@ impl DebugContext<'_> {
fmt.debug_struct("OpaqueTyId").field("index", &opaque_ty_id.0).finish()
}
pub fn debug_alias(
pub(crate) fn debug_alias(
&self,
alias_ty: &AliasTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -70,7 +70,7 @@ impl DebugContext<'_> {
}
}
pub fn debug_projection_ty(
pub(crate) fn debug_projection_ty(
&self,
projection_ty: &chalk_ir::ProjectionTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -95,7 +95,7 @@ impl DebugContext<'_> {
write!(fmt, ">::{}", type_alias_data.name)
}
pub fn debug_opaque_ty(
pub(crate) fn debug_opaque_ty(
&self,
opaque_ty: &chalk_ir::OpaqueTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -103,7 +103,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", opaque_ty.opaque_ty_id)
}
pub fn debug_ty(
pub(crate) fn debug_ty(
&self,
ty: &chalk_ir::Ty<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -111,7 +111,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", ty.data(&Interner))
}
pub fn debug_lifetime(
pub(crate) fn debug_lifetime(
&self,
lifetime: &Lifetime<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -119,7 +119,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", lifetime.data(&Interner))
}
pub fn debug_generic_arg(
pub(crate) fn debug_generic_arg(
&self,
parameter: &GenericArg<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -127,7 +127,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", parameter.data(&Interner).inner_debug())
}
pub fn debug_goal(
pub(crate) fn debug_goal(
&self,
goal: &Goal<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -136,7 +136,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", goal_data)
}
pub fn debug_goals(
pub(crate) fn debug_goals(
&self,
goals: &Goals<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -144,7 +144,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", goals.debug(&Interner))
}
pub fn debug_program_clause_implication(
pub(crate) fn debug_program_clause_implication(
&self,
pci: &ProgramClauseImplication<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -152,7 +152,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", pci.debug(&Interner))
}
pub fn debug_substitution(
pub(crate) fn debug_substitution(
&self,
substitution: &chalk_ir::Substitution<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -160,7 +160,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", substitution.debug(&Interner))
}
pub fn debug_separator_trait_ref(
pub(crate) fn debug_separator_trait_ref(
&self,
separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -168,7 +168,7 @@ impl DebugContext<'_> {
write!(fmt, "{:?}", separator_trait_ref.debug(&Interner))
}
pub fn debug_fn_def_id(
pub(crate) fn debug_fn_def_id(
&self,
fn_def_id: chalk_ir::FnDefId<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -190,7 +190,7 @@ impl DebugContext<'_> {
}
}
pub fn debug_const(
pub(crate) fn debug_const(
&self,
_constant: &chalk_ir::Const<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -198,42 +198,42 @@ impl DebugContext<'_> {
write!(fmt, "const")
}
pub fn debug_variable_kinds(
pub(crate) fn debug_variable_kinds(
&self,
variable_kinds: &chalk_ir::VariableKinds<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", variable_kinds.as_slice(&Interner))
}
pub fn debug_variable_kinds_with_angles(
pub(crate) fn debug_variable_kinds_with_angles(
&self,
variable_kinds: &chalk_ir::VariableKinds<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", variable_kinds.inner_debug(&Interner))
}
pub fn debug_canonical_var_kinds(
pub(crate) fn debug_canonical_var_kinds(
&self,
canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", canonical_var_kinds.as_slice(&Interner))
}
pub fn debug_program_clause(
pub(crate) fn debug_program_clause(
&self,
clause: &chalk_ir::ProgramClause<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", clause.data(&Interner))
}
pub fn debug_program_clauses(
pub(crate) fn debug_program_clauses(
&self,
clauses: &chalk_ir::ProgramClauses<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> fmt::Result {
write!(fmt, "{:?}", clauses.as_slice(&Interner))
}
pub fn debug_quantified_where_clauses(
pub(crate) fn debug_quantified_where_clauses(
&self,
clauses: &chalk_ir::QuantifiedWhereClauses<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -249,7 +249,7 @@ mod unsafe_tls {
scoped_thread_local!(static PROGRAM: DebugContext);
pub fn with_current_program<R>(
pub(crate) fn with_current_program<R>(
op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R,
) -> R {
if PROGRAM.is_set() {
@ -259,7 +259,7 @@ mod unsafe_tls {
}
}
pub fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
pub(crate) fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
where
OP: FnOnce() -> R,
{

View file

@ -61,16 +61,16 @@ macro_rules! err {
#[derive(Debug, Default)]
pub(super) struct Match {
pub bindings: Bindings,
pub(super) bindings: Bindings,
/// We currently just keep the first error and count the rest to compare matches.
pub err: Option<ExpandError>,
pub err_count: usize,
pub(super) err: Option<ExpandError>,
pub(super) err_count: usize,
/// How many top-level token trees were left to match.
pub unmatched_tts: usize,
pub(super) unmatched_tts: usize,
}
impl Match {
pub fn add_err(&mut self, err: ExpandError) {
pub(super) fn add_err(&mut self, err: ExpandError) {
let prev_err = self.err.take();
self.err = prev_err.or(Some(err));
self.err_count += 1;

View file

@ -7,9 +7,9 @@ use tt::buffer::{Cursor, TokenBuffer};
#[derive(Debug, Clone, Eq, PartialEq)]
struct TtToken {
pub kind: SyntaxKind,
pub is_joint_to_next: bool,
pub text: SmolStr,
kind: SyntaxKind,
is_joint_to_next: bool,
text: SmolStr,
}
pub(crate) struct SubtreeTokenSource<'a> {
@ -30,7 +30,7 @@ impl<'a> SubtreeTokenSource<'a> {
}
impl<'a> SubtreeTokenSource<'a> {
pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> {
pub(crate) fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> {
let cursor = buffer.begin();
let mut res = SubtreeTokenSource {

View file

@ -30,7 +30,7 @@ pub(crate) struct ProcMacroProcessThread {
}
impl ProcMacroProcessSrv {
pub fn run(
pub(crate) fn run(
process_path: PathBuf,
args: impl IntoIterator<Item = impl AsRef<OsStr>>,
) -> io::Result<(ProcMacroProcessThread, ProcMacroProcessSrv)> {
@ -48,7 +48,7 @@ impl ProcMacroProcessSrv {
Ok((thread, srv))
}
pub fn find_proc_macros(
pub(crate) fn find_proc_macros(
&self,
dylib_path: &Path,
) -> Result<Vec<(String, ProcMacroKind)>, tt::ExpansionError> {
@ -58,7 +58,7 @@ impl ProcMacroProcessSrv {
Ok(result.macros)
}
pub fn custom_derive(
pub(crate) fn custom_derive(
&self,
dylib_path: &Path,
subtree: &Subtree,
@ -75,7 +75,7 @@ impl ProcMacroProcessSrv {
Ok(result.expansion)
}
pub fn send_task<R>(&self, req: Request) -> Result<R, tt::ExpansionError>
pub(crate) fn send_task<R>(&self, req: Request) -> Result<R, tt::ExpansionError>
where
R: TryFrom<Response, Error = &'static str>,
{

View file

@ -75,18 +75,18 @@ struct TokenIdDef(u32);
#[serde(remote = "Delimiter")]
struct DelimiterDef {
#[serde(with = "TokenIdDef")]
pub id: TokenId,
id: TokenId,
#[serde(with = "DelimiterKindDef")]
pub kind: DelimiterKind,
kind: DelimiterKind,
}
#[derive(Serialize, Deserialize)]
#[serde(remote = "Subtree")]
struct SubtreeDef {
#[serde(default, with = "opt_delimiter_def")]
pub delimiter: Option<Delimiter>,
delimiter: Option<Delimiter>,
#[serde(with = "vec_token_tree")]
pub token_trees: Vec<TokenTree>,
token_trees: Vec<TokenTree>,
}
#[derive(Serialize, Deserialize)]
@ -112,19 +112,19 @@ enum LeafDef {
#[derive(Serialize, Deserialize)]
#[serde(remote = "Literal")]
struct LiteralDef {
pub text: SmolStr,
text: SmolStr,
#[serde(with = "TokenIdDef")]
pub id: TokenId,
id: TokenId,
}
#[derive(Serialize, Deserialize)]
#[serde(remote = "Punct")]
struct PunctDef {
pub char: char,
char: char,
#[serde(with = "SpacingDef")]
pub spacing: Spacing,
spacing: Spacing,
#[serde(with = "TokenIdDef")]
pub id: TokenId,
id: TokenId,
}
#[derive(Serialize, Deserialize)]
@ -137,16 +137,16 @@ enum SpacingDef {
#[derive(Serialize, Deserialize)]
#[serde(remote = "Ident")]
struct IdentDef {
pub text: SmolStr,
text: SmolStr,
#[serde(with = "TokenIdDef")]
pub id: TokenId,
id: TokenId,
}
mod opt_delimiter_def {
use super::{Delimiter, DelimiterDef};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub fn serialize<S>(value: &Option<Delimiter>, serializer: S) -> Result<S::Ok, S::Error>
pub(super) fn serialize<S>(value: &Option<Delimiter>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
@ -155,7 +155,7 @@ mod opt_delimiter_def {
value.as_ref().map(Helper).serialize(serializer)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Delimiter>, D::Error>
pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Option<Delimiter>, D::Error>
where
D: Deserializer<'de>,
{
@ -170,7 +170,7 @@ mod opt_subtree_def {
use super::{Subtree, SubtreeDef};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub fn serialize<S>(value: &Option<Subtree>, serializer: S) -> Result<S::Ok, S::Error>
pub(super) fn serialize<S>(value: &Option<Subtree>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
@ -179,7 +179,7 @@ mod opt_subtree_def {
value.as_ref().map(Helper).serialize(serializer)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Subtree>, D::Error>
pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Option<Subtree>, D::Error>
where
D: Deserializer<'de>,
{
@ -194,7 +194,7 @@ mod vec_token_tree {
use super::{TokenTree, TokenTreeDef};
use serde::{ser::SerializeSeq, Deserialize, Deserializer, Serialize, Serializer};
pub fn serialize<S>(value: &Vec<TokenTree>, serializer: S) -> Result<S::Ok, S::Error>
pub(super) fn serialize<S>(value: &Vec<TokenTree>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
@ -209,7 +209,7 @@ mod vec_token_tree {
seq.end()
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<TokenTree>, D::Error>
pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Vec<TokenTree>, D::Error>
where
D: Deserializer<'de>,
{

View file

@ -9,6 +9,7 @@
//! RA than `proc-macro2` token stream.
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
#![allow(unreachable_pub)]
#[allow(dead_code)]
#[doc(hidden)]

View file

@ -27,7 +27,7 @@ pub fn init_from(spec: &str) {
filter.install();
}
pub type Label = &'static str;
type Label = &'static str;
/// This function starts a profiling scope in the current execution stack with a given description.
/// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop.
@ -173,7 +173,7 @@ impl ProfileStack {
true
}
pub fn pop(&mut self, label: Label, detail: Option<String>) {
fn pop(&mut self, label: Label, detail: Option<String>) {
let start = self.starts.pop().unwrap();
let duration = start.elapsed();
self.messages.finish(Message { duration, label, detail });

View file

@ -4,15 +4,15 @@ use std::ops;
use arena::Arena;
#[derive(Default)]
pub struct Tree<T> {
pub(crate) struct Tree<T> {
nodes: Arena<Node<T>>,
current_path: Vec<(Idx<T>, Option<Idx<T>>)>,
}
pub type Idx<T> = arena::Idx<Node<T>>;
pub(crate) type Idx<T> = arena::Idx<Node<T>>;
impl<T> Tree<T> {
pub fn start(&mut self)
pub(crate) fn start(&mut self)
where
T: Default,
{
@ -30,19 +30,19 @@ impl<T> Tree<T> {
self.current_path.push((me, None));
}
pub fn finish(&mut self, data: T) {
pub(crate) fn finish(&mut self, data: T) {
let (me, _last_child) = self.current_path.pop().unwrap();
self.nodes[me].data = data;
}
pub fn root(&self) -> Option<Idx<T>> {
pub(crate) fn root(&self) -> Option<Idx<T>> {
self.nodes.iter().next().map(|(idx, _)| idx)
}
pub fn children(&self, idx: Idx<T>) -> impl Iterator<Item = Idx<T>> + '_ {
pub(crate) fn children(&self, idx: Idx<T>) -> impl Iterator<Item = Idx<T>> + '_ {
NodeIter { nodes: &self.nodes, next: self.nodes[idx].first_child }
}
pub fn clear(&mut self) {
pub(crate) fn clear(&mut self) {
self.nodes.clear();
self.current_path.clear();
}
@ -55,7 +55,7 @@ impl<T> ops::Index<Idx<T>> for Tree<T> {
}
}
pub struct Node<T> {
pub(crate) struct Node<T> {
data: T,
first_child: Option<Idx<T>>,
next_sibling: Option<Idx<T>>,

View file

@ -17,7 +17,7 @@ use crate::{
pub use self::{
expr_ext::{ArrayExprKind, BinOp, Effect, ElseBranch, LiteralKind, PrefixOp, RangeOp},
generated::*,
generated::{nodes::*, tokens::*},
node_ext::{
AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents,
StructKind, TypeBoundKind, VisibilityKind,

View file

@ -1,8 +1,8 @@
//! This file is actually hand-written, but the submodules are indeed generated.
#[rustfmt::skip]
mod nodes;
pub(crate) mod nodes;
#[rustfmt::skip]
mod tokens;
pub(crate) mod tokens;
use crate::{
AstNode,
@ -10,7 +10,7 @@ use crate::{
SyntaxNode,
};
pub use {nodes::*, tokens::*};
pub(crate) use nodes::*;
// Stmt is the only nested enum, so it's easier to just hand-write it
impl AstNode for Stmt {

View file

@ -46,16 +46,19 @@ use text_edit::Indel;
pub use crate::{
algo::InsertPosition,
ast::{AstNode, AstToken},
parsing::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token},
parsing::lexer::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token},
ptr::{AstPtr, SyntaxNodePtr},
syntax_error::SyntaxError,
syntax_node::{
Direction, GreenNode, NodeOrToken, SyntaxElement, SyntaxElementChildren, SyntaxNode,
SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder,
SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken,
SyntaxTreeBuilder,
},
};
pub use parser::{SyntaxKind, T};
pub use rowan::{SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent};
pub use rowan::{
Direction, GreenNode, NodeOrToken, SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset,
WalkEvent,
};
/// `Parse` is the result of the parsing: a syntax tree and a collection of
/// errors.

View file

@ -1,7 +1,7 @@
//! Lexing, bridging to parser (which does the actual parsing) and
//! incremental reparsing.
mod lexer;
pub(crate) mod lexer;
mod text_token_source;
mod text_tree_sink;
mod reparsing;
@ -10,7 +10,7 @@ use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
use text_token_source::TextTokenSource;
use text_tree_sink::TextTreeSink;
pub use lexer::*;
pub(crate) use lexer::*;
pub(crate) use self::reparsing::incremental_reparse;
use parser::SyntaxKind;

View file

@ -65,7 +65,7 @@ fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> parser::Tok
impl<'t> TextTokenSource<'t> {
/// Generate input from tokens(expect comment and whitespace).
pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
pub(crate) fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
let token_offset_pairs: Vec<_> = raw_tokens
.iter()
.filter_map({

View file

@ -10,9 +10,7 @@ use rowan::{GreenNodeBuilder, Language};
use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize};
pub use rowan::GreenNode;
pub(crate) use rowan::GreenToken;
pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum RustLanguage {}
@ -34,8 +32,6 @@ pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
pub use rowan::{Direction, NodeOrToken};
#[derive(Default)]
pub struct SyntaxTreeBuilder {
errors: Vec<SyntaxError>,

View file

@ -287,7 +287,7 @@ impl VirtualPath {
Some(res)
}
pub fn name_and_extension(&self) -> Option<(&str, Option<&str>)> {
pub(crate) fn name_and_extension(&self) -> Option<(&str, Option<&str>)> {
let file_path = if self.0.ends_with('/') { &self.0[..&self.0.len() - 1] } else { &self.0 };
let file_name = match file_path.rfind('/') {
Some(position) => &file_path[position + 1..],

View file

@ -45,15 +45,15 @@ pub fn generate_parser_tests(mode: Mode) -> Result<()> {
#[derive(Debug)]
struct Test {
pub name: String,
pub text: String,
pub ok: bool,
name: String,
text: String,
ok: bool,
}
#[derive(Default, Debug)]
struct Tests {
pub ok: HashMap<String, Test>,
pub err: HashMap<String, Test>,
ok: HashMap<String, Test>,
err: HashMap<String, Test>,
}
fn collect_tests(s: &str) -> Vec<Test> {