Skip to content

Commit

Permalink
Fix violations of non_camel_case_types
Browse files Browse the repository at this point in the history
  • Loading branch information
sjwang05 committed Oct 12, 2023
1 parent 9401707 commit 7dde3ef
Show file tree
Hide file tree
Showing 12 changed files with 188 additions and 189 deletions.
224 changes: 112 additions & 112 deletions compiler/rustc_data_structures/src/stable_hasher.rs

Large diffs are not rendered by default.

28 changes: 14 additions & 14 deletions compiler/rustc_data_structures/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,16 @@
//! | `Lock<T>` | `RefCell<T>` | `RefCell<T>` or |
//! | | | `parking_lot::Mutex<T>` |
//! | `RwLock<T>` | `RefCell<T>` | `parking_lot::RwLock<T>` |
//! | `MTLock<T>` [^1] | `T` | `Lock<T>` |
//! | `MTLockRef<'a, T>` [^2] | `&'a mut MTLock<T>` | `&'a MTLock<T>` |
//! | `MtLock<T>` [^1] | `T` | `Lock<T>` |
//! | `MtLockRef<'a, T>` [^2] | `&'a mut MtLock<T>` | `&'a MtLock<T>` |
//! | | | |
//! | `ParallelIterator` | `Iterator` | `rayon::iter::ParallelIterator` |
//!
//! [^1] `MTLock` is similar to `Lock`, but the serial version avoids the cost
//! [^1] `MtLock` is similar to `Lock`, but the serial version avoids the cost
//! of a `RefCell`. This is appropriate when interior mutability is not
//! required.
//!
//! [^2] `MTLockRef` is a typedef.
//! [^2] `MtLockRef` is a typedef.

pub use crate::marker::*;
use std::collections::HashMap;
Expand Down Expand Up @@ -212,15 +212,15 @@ cfg_if! {

use std::cell::RefCell as InnerRwLock;

pub type MTLockRef<'a, T> = &'a mut MTLock<T>;
pub type MtLockRef<'a, T> = &'a mut MtLock<T>;

#[derive(Debug, Default)]
pub struct MTLock<T>(T);
pub struct MtLock<T>(T);

impl<T> MTLock<T> {
impl<T> MtLock<T> {
#[inline(always)]
pub fn new(inner: T) -> Self {
MTLock(inner)
MtLock(inner)
}

#[inline(always)]
Expand All @@ -245,10 +245,10 @@ cfg_if! {
}

// FIXME: Probably a bad idea (in the threaded case)
impl<T: Clone> Clone for MTLock<T> {
impl<T: Clone> Clone for MtLock<T> {
#[inline]
fn clone(&self) -> Self {
MTLock(self.0.clone())
MtLock(self.0.clone())
}
}
} else {
Expand All @@ -269,15 +269,15 @@ cfg_if! {
pub use std::sync::Arc as Lrc;
pub use std::sync::Weak as Weak;

pub type MTLockRef<'a, T> = &'a MTLock<T>;
pub type MtLockRef<'a, T> = &'a MtLock<T>;

#[derive(Debug, Default)]
pub struct MTLock<T>(Lock<T>);
pub struct MtLock<T>(Lock<T>);

impl<T> MTLock<T> {
impl<T> MtLock<T> {
#[inline(always)]
pub fn new(inner: T) -> Self {
MTLock(Lock::new(inner))
MtLock(Lock::new(inner))
}

#[inline(always)]
Expand Down
14 changes: 7 additions & 7 deletions compiler/rustc_monomorphize/src/collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@
//! regardless of whether it is actually needed or not.

use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::sync::{par_for_each_in, MTLock, MTLockRef};
use rustc_data_structures::sync::{par_for_each_in, MtLock, MtLockRef};
use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId};
Expand Down Expand Up @@ -263,13 +263,13 @@ pub fn collect_crate_mono_items(

debug!("building mono item graph, beginning at roots");

let mut visited = MTLock::new(FxHashSet::default());
let mut usage_map = MTLock::new(UsageMap::new());
let mut visited = MtLock::new(FxHashSet::default());
let mut usage_map = MtLock::new(UsageMap::new());
let recursion_limit = tcx.recursion_limit();

{
let visited: MTLockRef<'_, _> = &mut visited;
let usage_map: MTLockRef<'_, _> = &mut usage_map;
let visited: MtLockRef<'_, _> = &mut visited;
let usage_map: MtLockRef<'_, _> = &mut usage_map;

tcx.sess.time("monomorphization_collector_graph_walk", || {
par_for_each_in(roots, |root| {
Expand Down Expand Up @@ -333,10 +333,10 @@ fn collect_roots(tcx: TyCtxt<'_>, mode: MonoItemCollectionMode) -> Vec<MonoItem<
fn collect_items_rec<'tcx>(
tcx: TyCtxt<'tcx>,
starting_item: Spanned<MonoItem<'tcx>>,
visited: MTLockRef<'_, FxHashSet<MonoItem<'tcx>>>,
visited: MtLockRef<'_, FxHashSet<MonoItem<'tcx>>>,
recursion_depths: &mut DefIdMap<usize>,
recursion_limit: Limit,
usage_map: MTLockRef<'_, UsageMap<'tcx>>,
usage_map: MtLockRef<'_, UsageMap<'tcx>>,
) {
if !visited.lock_mut().insert(starting_item.node) {
// We've been here already, no need to search again.
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_smir/src/rustc_internal/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ where
(Ok(Ok(())), Some(ControlFlow::Break(value))) => Err(CompilerError::Interrupted(value)),
(Ok(Ok(_)), None) => Err(CompilerError::Skipped),
(Ok(Err(_)), _) => Err(CompilerError::CompilationFailed),
(Err(_), _) => Err(CompilerError::ICE),
(Err(_), _) => Err(CompilerError::Ice),
}
}
}
Expand Down
3 changes: 1 addition & 2 deletions compiler/stable_mir/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,7 @@ pub type ImplTraitDecls = Vec<ImplDef>;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CompilerError<T> {
/// Internal compiler error (I.e.: Compiler crashed).
#[allow(non_camel_case_types)]
ICE,
Ice,
/// Compilation failed.
CompilationFailed,
/// Compilation was interrupted.
Expand Down
6 changes: 3 additions & 3 deletions library/alloc/src/collections/btree/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use super::dedup_sorted_iter::DedupSortedIter;
use super::navigate::{LazyLeafRange, LeafRange};
use super::node::{self, marker, ForceResult::*, Handle, NodeRef, Root};
use super::search::{SearchBound, SearchResult::*};
use super::set_val::SetValZST;
use super::set_val::SetValZst;

mod entry;

Expand Down Expand Up @@ -288,7 +288,7 @@ impl<K: Clone, V: Clone, A: Allocator + Clone> Clone for BTreeMap<K, V, A> {
}
}

impl<K, Q: ?Sized, A: Allocator + Clone> super::Recover<Q> for BTreeMap<K, SetValZST, A>
impl<K, Q: ?Sized, A: Allocator + Clone> super::Recover<Q> for BTreeMap<K, SetValZst, A>
where
K: Borrow<Q> + Ord,
Q: Ord,
Expand Down Expand Up @@ -335,7 +335,7 @@ where
alloc: (*map.alloc).clone(),
_marker: PhantomData,
}
.insert(SetValZST::default());
.insert(SetValZst::default());
None
}
}
Expand Down
20 changes: 10 additions & 10 deletions library/alloc/src/collections/btree/set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use core::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub};

use super::map::{BTreeMap, Keys};
use super::merge_iter::MergeIterInner;
use super::set_val::SetValZST;
use super::set_val::SetValZst;
use super::Recover;

use crate::alloc::{Allocator, Global};
Expand Down Expand Up @@ -76,7 +76,7 @@ pub struct BTreeSet<
T,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
map: BTreeMap<T, SetValZST, A>,
map: BTreeMap<T, SetValZst, A>,
}

#[stable(feature = "rust1", since = "1.0.0")]
Expand Down Expand Up @@ -130,7 +130,7 @@ impl<T: Clone, A: Allocator + Clone> Clone for BTreeSet<T, A> {
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, T: 'a> {
iter: Keys<'a, T, SetValZST>,
iter: Keys<'a, T, SetValZst>,
}

#[stable(feature = "collection_debug", since = "1.17.0")]
Expand All @@ -152,7 +152,7 @@ pub struct IntoIter<
T,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
iter: super::map::IntoIter<T, SetValZST, A>,
iter: super::map::IntoIter<T, SetValZst, A>,
}

/// An iterator over a sub-range of items in a `BTreeSet`.
Expand All @@ -165,7 +165,7 @@ pub struct IntoIter<
#[derive(Debug)]
#[stable(feature = "btree_range", since = "1.17.0")]
pub struct Range<'a, T: 'a> {
iter: super::map::Range<'a, T, SetValZST>,
iter: super::map::Range<'a, T, SetValZst>,
}

/// A lazy iterator producing elements in the difference of `BTreeSet`s.
Expand Down Expand Up @@ -900,7 +900,7 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
where
T: Ord,
{
self.map.insert(value, SetValZST::default()).is_none()
self.map.insert(value, SetValZst::default()).is_none()
}

/// Adds a value to the set, replacing the existing element, if any, that is
Expand Down Expand Up @@ -1197,7 +1197,7 @@ impl<T: Ord> FromIterator<T> for BTreeSet<T> {

impl<T: Ord, A: Allocator + Clone> BTreeSet<T, A> {
fn from_sorted_iter<I: Iterator<Item = T>>(iter: I, alloc: A) -> BTreeSet<T, A> {
let iter = iter.map(|k| (k, SetValZST::default()));
let iter = iter.map(|k| (k, SetValZst::default()));
let map = BTreeMap::bulk_build_from_sorted_iter(iter, alloc);
BTreeSet { map }
}
Expand All @@ -1221,7 +1221,7 @@ impl<T: Ord, const N: usize> From<[T; N]> for BTreeSet<T> {

// use stable sort to preserve the insertion order.
arr.sort();
let iter = IntoIterator::into_iter(arr).map(|k| (k, SetValZST::default()));
let iter = IntoIterator::into_iter(arr).map(|k| (k, SetValZst::default()));
let map = BTreeMap::bulk_build_from_sorted_iter(iter, Global);
BTreeSet { map }
}
Expand Down Expand Up @@ -1272,7 +1272,7 @@ pub struct ExtractIf<
F: 'a + FnMut(&T) -> bool,
{
pred: F,
inner: super::map::ExtractIfInner<'a, T, SetValZST>,
inner: super::map::ExtractIfInner<'a, T, SetValZst>,
/// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`.
alloc: A,
}
Expand All @@ -1297,7 +1297,7 @@ where

fn next(&mut self) -> Option<T> {
let pred = &mut self.pred;
let mut mapped_pred = |k: &T, _v: &mut SetValZST| pred(k);
let mut mapped_pred = |k: &T, _v: &mut SetValZst| pred(k);
self.inner.next(&mut mapped_pred, self.alloc.clone()).map(|(k, _)| k)
}

Expand Down
5 changes: 2 additions & 3 deletions library/alloc/src/collections/btree/set_val.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,8 @@
/// Used instead of `()` to differentiate between:
/// * `BTreeMap<T, ()>` (possible user-defined map)
/// * `BTreeMap<T, SetValZST>` (internal set representation)
#[allow(non_camel_case_types)]
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Default)]
pub struct SetValZST;
pub struct SetValZst;

/// A trait to differentiate between `BTreeMap` and `BTreeSet` values.
/// Returns `true` only for type `SetValZST`, `false` for all other types (blanket implementation).
Expand All @@ -23,7 +22,7 @@ impl<V> IsSetVal for V {
}

// Specialization
impl IsSetVal for SetValZST {
impl IsSetVal for SetValZst {
fn is_set_val() -> bool {
true
}
Expand Down
11 changes: 5 additions & 6 deletions library/core/src/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2478,16 +2478,15 @@ extern "rust-intrinsic" {
/// `unreachable_unchecked` is actually being reached. The bug is in *crate A*,
/// which violates the principle that a `const fn` must behave the same at
/// compile-time and at run-time. The unsafe code in crate B is fine.
#[allow(non_camel_case_types)]
#[rustc_const_unstable(feature = "const_eval_select", issue = "none")]
pub fn const_eval_select<ARG: Tuple, F, G, RET>(
arg: ARG,
pub fn const_eval_select<Arg: Tuple, F, G, Ret>(
arg: Arg,
called_in_const: F,
called_at_rt: G,
) -> RET
) -> Ret
where
G: FnOnce<ARG, Output = RET>,
F: FnOnce<ARG, Output = RET>;
G: FnOnce<Arg, Output = Ret>,
F: FnOnce<Arg, Output = Ret>;

/// This method creates a pointer to any `Some` value. If the argument is
/// `None`, an invalid within-bounds pointer (that is still acceptable for
Expand Down
2 changes: 2 additions & 0 deletions library/core/src/iter/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -368,6 +368,8 @@ macro_rules! impl_fold_via_try_fold {
impl_fold_via_try_fold! { @internal spec_rfold -> spec_try_rfold }
};
(@internal $fold:ident -> $try_fold:ident) => {
// `A` and `F` are already used as generic parameters in impls where this macro appears,
// hence the strange names
#[allow(non_camel_case_types)]
#[inline]
fn $fold<AAA, FFF>(mut self, init: AAA, fold: FFF) -> AAA
Expand Down
28 changes: 14 additions & 14 deletions library/std/src/sys/personality/dwarf/eh.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,15 @@ pub const DW_EH_PE_indirect: u8 = 0x80;

#[allow(non_camel_case_types)]
#[derive(Copy, Clone)]
pub struct EHContext<'a> {
pub struct EhContext<'a> {
pub ip: usize, // Current instruction pointer
pub func_start: usize, // Address of the current function
pub get_text_start: &'a dyn Fn() -> usize, // Get address of the code section
pub get_data_start: &'a dyn Fn() -> usize, // Get address of the data section
}

#[allow(non_camel_case_types)]
pub enum EHAction {
pub enum EhAction {
None,
Cleanup(usize),
Catch(usize),
Expand All @@ -55,9 +55,9 @@ pub enum EHAction {

pub const USING_SJLJ_EXCEPTIONS: bool = cfg!(all(target_os = "ios", target_arch = "arm"));

pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>) -> Result<EHAction, ()> {
pub unsafe fn find_eh_action(lsda: *const u8, context: &EhContext<'_>) -> Result<EhAction, ()> {
if lsda.is_null() {
return Ok(EHAction::None);
return Ok(EhAction::None);
}

let func_start = context.func_start;
Expand Down Expand Up @@ -95,22 +95,22 @@ pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>) -> Result
}
if ip < func_start + cs_start + cs_len {
if cs_lpad == 0 {
return Ok(EHAction::None);
return Ok(EhAction::None);
} else {
let lpad = lpad_base + cs_lpad;
return Ok(interpret_cs_action(action_table as *mut u8, cs_action_entry, lpad));
}
}
}
// Ip is not present in the table. This indicates a nounwind call.
Ok(EHAction::Terminate)
Ok(EhAction::Terminate)
} else {
// SjLj version:
// The "IP" is an index into the call-site table, with two exceptions:
// -1 means 'no-action', and 0 means 'terminate'.
match ip as isize {
-1 => return Ok(EHAction::None),
0 => return Ok(EHAction::Terminate),
-1 => return Ok(EhAction::None),
0 => return Ok(EhAction::Terminate),
_ => (),
}
let mut idx = ip;
Expand All @@ -132,24 +132,24 @@ unsafe fn interpret_cs_action(
action_table: *mut u8,
cs_action_entry: u64,
lpad: usize,
) -> EHAction {
) -> EhAction {
if cs_action_entry == 0 {
// If cs_action_entry is 0 then this is a cleanup (Drop::drop). We run these
// for both Rust panics and foreign exceptions.
EHAction::Cleanup(lpad)
EhAction::Cleanup(lpad)
} else {
// If lpad != 0 and cs_action_entry != 0, we have to check ttype_index.
// If ttype_index == 0 under the condition, we take cleanup action.
let action_record = (action_table as *mut u8).offset(cs_action_entry as isize - 1);
let mut action_reader = DwarfReader::new(action_record);
let ttype_index = action_reader.read_sleb128();
if ttype_index == 0 {
EHAction::Cleanup(lpad)
EhAction::Cleanup(lpad)
} else if ttype_index > 0 {
// Stop unwinding Rust panics at catch_unwind.
EHAction::Catch(lpad)
EhAction::Catch(lpad)
} else {
EHAction::Filter(lpad)
EhAction::Filter(lpad)
}
}
}
Expand All @@ -161,7 +161,7 @@ fn round_up(unrounded: usize, align: usize) -> Result<usize, ()> {

unsafe fn read_encoded_pointer(
reader: &mut DwarfReader,
context: &EHContext<'_>,
context: &EhContext<'_>,
encoding: u8,
) -> Result<usize, ()> {
if encoding == DW_EH_PE_omit {
Expand Down
Loading

0 comments on commit 7dde3ef

Please sign in to comment.