Skip to content

Commit

Permalink
Auto merge of rust-lang#76376 - Dylan-DPC:rollup-8chsbw9, r=Dylan-DPC
Browse files Browse the repository at this point in the history
Rollup of 11 pull requests

Successful merges:

 - rust-lang#75695 (Add a regression test for issue-72793)
 - rust-lang#75741 (Refactor byteorder to std in rustc_middle)
 - rust-lang#75954 (Unstable Book: add links to tracking issues for FFI features)
 - rust-lang#75994 (`impl Rc::new_cyclic`)
 - rust-lang#76060 (Link vec doc to & reference)
 - rust-lang#76078 (Remove disambiguators from intra doc link text)
 - rust-lang#76082 (Fix intra-doc links on pub re-exports)
 - rust-lang#76254 (Fold length constant in Rvalue::Repeat)
 - rust-lang#76258 (x.py check checks tests/examples/benches)
 - rust-lang#76263 (inliner: Check for codegen fn attributes compatibility)
 - rust-lang#76285 (Move jointness censoring to proc_macro)

Failed merges:

r? @ghost
  • Loading branch information
bors committed Sep 5, 2020
2 parents 81a769f + 85cee57 commit 7d289ae
Show file tree
Hide file tree
Showing 31 changed files with 714 additions and 86 deletions.
1 change: 0 additions & 1 deletion Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -3722,7 +3722,6 @@ name = "rustc_middle"
version = "0.0.0"
dependencies = [
"bitflags",
"byteorder",
"chalk-ir",
"measureme",
"polonius-engine",
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -403,8 +403,8 @@ impl Cursor {
self.index = index;
}

pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
self.stream.0[self.index..].get(n).map(|(tree, _)| tree.clone())
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
}
}

Expand Down
22 changes: 17 additions & 5 deletions compiler/rustc_expand/src/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,15 +47,26 @@ impl ToInternal<token::DelimToken> for Delimiter {
}
}

impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
for TokenTree<Group, Punct, Ident, Literal>
impl
FromInternal<(
TreeAndJoint,
Option<&'_ tokenstream::TokenTree>,
&'_ ParseSess,
&'_ mut Vec<Self>,
)> for TokenTree<Group, Punct, Ident, Literal>
{
fn from_internal(
((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>),
((tree, is_joint), look_ahead, sess, stack): (
TreeAndJoint,
Option<&tokenstream::TokenTree>,
&ParseSess,
&mut Vec<Self>,
),
) -> Self {
use rustc_ast::token::*;

let joint = is_joint == Joint;
let joint = is_joint == Joint
&& matches!(look_ahead, Some(tokenstream::TokenTree::Token(t)) if t.is_op());
let Token { kind, span } = match tree {
tokenstream::TokenTree::Delimited(span, delim, tts) => {
let delimiter = Delimiter::from_internal(delim);
Expand Down Expand Up @@ -445,7 +456,8 @@ impl server::TokenStreamIter for Rustc<'_> {
loop {
let tree = iter.stack.pop().or_else(|| {
let next = iter.cursor.next_with_joint()?;
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
let lookahead = iter.cursor.look_ahead(0);
Some(TokenTree::from_internal((next, lookahead, self.sess, &mut iter.stack)))
})?;
// A hack used to pass AST fragments to attribute and derive macros
// as a single nonterminal token instead of a token stream.
Expand Down
1 change: 0 additions & 1 deletion compiler/rustc_middle/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ rustc_index = { path = "../rustc_index" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_ast = { path = "../rustc_ast" }
rustc_span = { path = "../rustc_span" }
byteorder = { version = "1.3" }
chalk-ir = "0.21.0"
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
measureme = "0.7.1"
Expand Down
12 changes: 4 additions & 8 deletions compiler/rustc_middle/src/mir/interpret/allocation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -345,10 +345,8 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {

/// Reads a *non-ZST* scalar.
///
/// ZSTs can't be read for two reasons:
/// * byte-order cannot work with zero-element buffers;
/// * in order to obtain a `Pointer`, we need to check for ZSTness anyway due to integer
/// pointers being valid for ZSTs.
/// ZSTs can't be read because in order to obtain a `Pointer`, we need to check
/// for ZSTness anyway due to integer pointers being valid for ZSTs.
///
/// It is the caller's responsibility to check bounds and alignment beforehand.
/// Most likely, you want to call `InterpCx::read_scalar` instead of this method.
Expand Down Expand Up @@ -397,10 +395,8 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {

/// Writes a *non-ZST* scalar.
///
/// ZSTs can't be read for two reasons:
/// * byte-order cannot work with zero-element buffers;
/// * in order to obtain a `Pointer`, we need to check for ZSTness anyway due to integer
/// pointers being valid for ZSTs.
/// ZSTs can't be read because in order to obtain a `Pointer`, we need to check
/// for ZSTness anyway due to integer pointers being valid for ZSTs.
///
/// It is the caller's responsibility to check bounds and alignment beforehand.
/// Most likely, you want to call `InterpCx::write_scalar` instead of this method.
Expand Down
32 changes: 23 additions & 9 deletions compiler/rustc_middle/src/mir/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,10 +98,10 @@ mod value;
use std::convert::TryFrom;
use std::fmt;
use std::io;
use std::io::{Read, Write};
use std::num::NonZeroU32;
use std::sync::atomic::{AtomicU32, Ordering};

use byteorder::{BigEndian, LittleEndian, ReadBytesExt, WriteBytesExt};
use rustc_ast::LitKind;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::{HashMapExt, Lock};
Expand Down Expand Up @@ -561,19 +561,33 @@ pub fn write_target_uint(
mut target: &mut [u8],
data: u128,
) -> Result<(), io::Error> {
let len = target.len();
// This u128 holds an "any-size uint" (since smaller uints can fits in it)
// So we do not write all bytes of the u128, just the "payload".
match endianness {
Endian::Little => target.write_uint128::<LittleEndian>(data, len),
Endian::Big => target.write_uint128::<BigEndian>(data, len),
}
Endian::Little => target.write(&data.to_le_bytes())?,
Endian::Big => target.write(&data.to_be_bytes()[16 - target.len()..])?,
};
debug_assert!(target.len() == 0); // We should have filled the target buffer.
Ok(())
}

#[inline]
pub fn read_target_uint(endianness: Endian, mut source: &[u8]) -> Result<u128, io::Error> {
match endianness {
Endian::Little => source.read_uint128::<LittleEndian>(source.len()),
Endian::Big => source.read_uint128::<BigEndian>(source.len()),
}
// This u128 holds an "any-size uint" (since smaller uints can fits in it)
let mut buf = [0u8; std::mem::size_of::<u128>()];
// So we do not read exactly 16 bytes into the u128, just the "payload".
let uint = match endianness {
Endian::Little => {
source.read(&mut buf)?;
Ok(u128::from_le_bytes(buf))
}
Endian::Big => {
source.read(&mut buf[16 - source.len()..])?;
Ok(u128::from_be_bytes(buf))
}
};
debug_assert!(source.len() == 0); // We should have consumed the source buffer.
uint
}

////////////////////////////////////////////////////////////////////////////////
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_middle/src/mir/type_foldable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ impl<'tcx> TypeFoldable<'tcx> for Rvalue<'tcx> {
use crate::mir::Rvalue::*;
match *self {
Use(ref op) => Use(op.fold_with(folder)),
Repeat(ref op, len) => Repeat(op.fold_with(folder), len),
Repeat(ref op, len) => Repeat(op.fold_with(folder), len.fold_with(folder)),
ThreadLocalRef(did) => ThreadLocalRef(did.fold_with(folder)),
Ref(region, bk, ref place) => {
Ref(region.fold_with(folder), bk, place.fold_with(folder))
Expand Down
22 changes: 17 additions & 5 deletions compiler/rustc_mir/src/transform/inline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use rustc_attr as attr;
use rustc_hir::def_id::DefId;
use rustc_index::bit_set::BitSet;
use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
use rustc_middle::mir::visit::*;
use rustc_middle::mir::*;
use rustc_middle::ty::subst::{Subst, SubstsRef};
Expand Down Expand Up @@ -45,7 +45,8 @@ impl<'tcx> MirPass<'tcx> for Inline {
// based function.
debug!("function inlining is disabled when compiling with `instrument_coverage`");
} else {
Inliner { tcx, source }.run_pass(body);
Inliner { tcx, source, codegen_fn_attrs: tcx.codegen_fn_attrs(source.def_id()) }
.run_pass(body);
}
}
}
Expand All @@ -54,6 +55,7 @@ impl<'tcx> MirPass<'tcx> for Inline {
struct Inliner<'tcx> {
tcx: TyCtxt<'tcx>,
source: MirSource<'tcx>,
codegen_fn_attrs: &'tcx CodegenFnAttrs,
}

impl Inliner<'tcx> {
Expand Down Expand Up @@ -242,9 +244,19 @@ impl Inliner<'tcx> {
return false;
}

// Avoid inlining functions marked as no_sanitize if sanitizer is enabled,
// since instrumentation might be enabled and performed on the caller.
if self.tcx.sess.opts.debugging_opts.sanitizer.intersects(codegen_fn_attrs.no_sanitize) {
let self_features = &self.codegen_fn_attrs.target_features;
let callee_features = &codegen_fn_attrs.target_features;
if callee_features.iter().any(|feature| !self_features.contains(feature)) {
debug!("`callee has extra target features - not inlining");
return false;
}

let self_no_sanitize =
self.codegen_fn_attrs.no_sanitize & self.tcx.sess.opts.debugging_opts.sanitizer;
let callee_no_sanitize =
codegen_fn_attrs.no_sanitize & self.tcx.sess.opts.debugging_opts.sanitizer;
if self_no_sanitize != callee_no_sanitize {
debug!("`callee has incompatible no_sanitize attribute - not inlining");
return false;
}

Expand Down
5 changes: 1 addition & 4 deletions compiler/rustc_parse/src/lexer/tokentrees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -262,10 +262,7 @@ impl<'a> TokenTreesReader<'a> {
}
_ => {
let tt = TokenTree::Token(self.token.take());
let mut is_joint = self.bump();
if !self.token.is_op() {
is_joint = NonJoint;
}
let is_joint = self.bump();
Ok((tt, is_joint))
}
}
Expand Down
10 changes: 5 additions & 5 deletions compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -822,15 +822,15 @@ impl<'a> Parser<'a> {
}

let frame = &self.token_cursor.frame;
looker(&match frame.tree_cursor.look_ahead(dist - 1) {
match frame.tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree {
TokenTree::Token(token) => token,
TokenTree::Token(token) => looker(token),
TokenTree::Delimited(dspan, delim, _) => {
Token::new(token::OpenDelim(delim), dspan.open)
looker(&Token::new(token::OpenDelim(delim.clone()), dspan.open))
}
},
None => Token::new(token::CloseDelim(frame.delim), frame.span.close),
})
None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)),
}
}

/// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
Expand Down
44 changes: 44 additions & 0 deletions library/alloc/src/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,50 @@ impl<T> Rc<T> {
)
}

/// Constructs a new `Rc<T>` using a weak reference to itself. Attempting
/// to upgrade the weak reference before this function returns will result
/// in a `None` value. However, the weak reference may be cloned freely and
/// stored for use at a later time.
#[unstable(feature = "arc_new_cyclic", issue = "75861")]
pub fn new_cyclic(data_fn: impl FnOnce(&Weak<T>) -> T) -> Rc<T> {
// Construct the inner in the "uninitialized" state with a single
// weak reference.
let uninit_ptr: NonNull<_> = Box::leak(box RcBox {
strong: Cell::new(0),
weak: Cell::new(1),
value: mem::MaybeUninit::<T>::uninit(),
})
.into();

let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();

let weak = Weak { ptr: init_ptr };

// It's important we don't give up ownership of the weak pointer, or
// else the memory might be freed by the time `data_fn` returns. If
// we really wanted to pass ownership, we could create an additional
// weak pointer for ourselves, but this would result in additional
// updates to the weak reference count which might not be necessary
// otherwise.
let data = data_fn(&weak);

unsafe {
let inner = init_ptr.as_ptr();
ptr::write(&raw mut (*inner).value, data);

let prev_value = (*inner).strong.get();
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
(*inner).strong.set(1);
}

let strong = Rc::from_inner(init_ptr);

// Strong references should collectively own a shared weak reference,
// so don't run the destructor for our old weak reference.
mem::forget(weak);
strong
}

/// Constructs a new `Rc` with uninitialized contents.
///
/// # Examples
Expand Down
66 changes: 66 additions & 0 deletions library/alloc/src/rc/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -434,3 +434,69 @@ fn test_array_from_slice() {
let a: Result<Rc<[u32; 2]>, _> = r.clone().try_into();
assert!(a.is_err());
}

#[test]
fn test_rc_cyclic_with_zero_refs() {
struct ZeroRefs {
inner: Weak<ZeroRefs>,
}

let zero_refs = Rc::new_cyclic(|inner| {
assert_eq!(inner.strong_count(), 0);
assert!(inner.upgrade().is_none());
ZeroRefs { inner: Weak::new() }
});

assert_eq!(Rc::strong_count(&zero_refs), 1);
assert_eq!(Rc::weak_count(&zero_refs), 0);
assert_eq!(zero_refs.inner.strong_count(), 0);
assert_eq!(zero_refs.inner.weak_count(), 0);
}

#[test]
fn test_rc_cyclic_with_one_ref() {
struct OneRef {
inner: Weak<OneRef>,
}

let one_ref = Rc::new_cyclic(|inner| {
assert_eq!(inner.strong_count(), 0);
assert!(inner.upgrade().is_none());
OneRef { inner: inner.clone() }
});

assert_eq!(Rc::strong_count(&one_ref), 1);
assert_eq!(Rc::weak_count(&one_ref), 1);

let one_ref2 = Weak::upgrade(&one_ref.inner).unwrap();
assert!(Rc::ptr_eq(&one_ref, &one_ref2));

assert_eq!(one_ref.inner.strong_count(), 2);
assert_eq!(one_ref.inner.weak_count(), 1);
}

#[test]
fn test_rc_cyclic_with_two_ref() {
struct TwoRefs {
inner: Weak<TwoRefs>,
inner1: Weak<TwoRefs>,
}

let two_refs = Rc::new_cyclic(|inner| {
assert_eq!(inner.strong_count(), 0);
assert!(inner.upgrade().is_none());
TwoRefs { inner: inner.clone(), inner1: inner.clone() }
});

assert_eq!(Rc::strong_count(&two_refs), 1);
assert_eq!(Rc::weak_count(&two_refs), 2);

let two_ref3 = Weak::upgrade(&two_refs.inner).unwrap();
assert!(Rc::ptr_eq(&two_refs, &two_ref3));

let two_ref2 = Weak::upgrade(&two_refs.inner1).unwrap();
assert!(Rc::ptr_eq(&two_refs, &two_ref2));

assert_eq!(Rc::strong_count(&two_refs), 3);
assert_eq!(Rc::weak_count(&two_refs), 2);
}
4 changes: 3 additions & 1 deletion library/alloc/src/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ use crate::raw_vec::RawVec;
/// # Slicing
///
/// A `Vec` can be mutable. Slices, on the other hand, are read-only objects.
/// To get a slice, use `&`. Example:
/// To get a [slice], use [`&`]. Example:
///
/// ```
/// fn read_slice(slice: &[usize]) {
Expand Down Expand Up @@ -287,6 +287,8 @@ use crate::raw_vec::RawVec;
/// [`insert`]: Vec::insert
/// [`reserve`]: Vec::reserve
/// [owned slice]: Box
/// [slice]: ../../std/primitive.slice.html
/// [`&`]: ../../std/primitive.reference.html
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "vec_type")]
pub struct Vec<T> {
Expand Down
2 changes: 1 addition & 1 deletion src/bootstrap/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -382,7 +382,7 @@ impl<'a> Builder<'a> {
native::Lld
),
Kind::Check | Kind::Clippy | Kind::Fix | Kind::Format => {
describe!(check::Std, check::Rustc, check::Rustdoc, check::Clippy)
describe!(check::Std, check::Rustc, check::Rustdoc, check::Clippy, check::Bootstrap)
}
Kind::Test => describe!(
crate::toolstate::ToolStateCheck,
Expand Down
Loading

0 comments on commit 7d289ae

Please sign in to comment.