diff --git a/RELEASES.md b/RELEASES.md index 971a63b240f3e..08040f4815836 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1060,7 +1060,7 @@ Version 1.52.1 (2021-05-10) This release disables incremental compilation, unless the user has explicitly opted in via the newly added RUSTC_FORCE_INCREMENTAL=1 environment variable. -This is due to the widespread, and frequently occuring, breakage encountered by +This is due to the widespread, and frequently occurring, breakage encountered by Rust users due to newly enabled incremental verification in 1.52.0. Notably, Rust users **should** upgrade to 1.52.0 or 1.52.1: the bugs that are detected by newly added incremental verification are still present in past stable versions, diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs index 974e59b65ec91..be3f6a12706f8 100644 --- a/compiler/rustc_codegen_gcc/src/builder.rs +++ b/compiler/rustc_codegen_gcc/src/builder.rs @@ -1064,7 +1064,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { let val_type = value.get_type(); match (type_is_pointer(val_type), type_is_pointer(dest_ty)) { (false, true) => { - // NOTE: Projecting a field of a pointer type will attemp a cast from a signed char to + // NOTE: Projecting a field of a pointer type will attempt a cast from a signed char to // a pointer, which is not supported by gccjit. return self.cx.context.new_cast(None, self.inttoptr(value, val_type.make_pointer()), dest_ty); }, diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs index 3b06587061d9d..e9d13a4ebaf8c 100644 --- a/compiler/rustc_codegen_llvm/src/llvm_util.rs +++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs @@ -369,12 +369,12 @@ pub fn target_cpu(sess: &Session) -> &str { /// The list of LLVM features computed from CLI flags (`-Ctarget-cpu`, `-Ctarget-feature`, /// `--target` and similar). pub(crate) fn global_llvm_features(sess: &Session, diagnostics: bool) -> Vec { - // Features that come earlier are overriden by conflicting features later in the string. + // Features that come earlier are overridden by conflicting features later in the string. // Typically we'll want more explicit settings to override the implicit ones, so: // - // * Features from -Ctarget-cpu=*; are overriden by [^1] - // * Features implied by --target; are overriden by - // * Features from -Ctarget-feature; are overriden by + // * Features from -Ctarget-cpu=*; are overridden by [^1] + // * Features implied by --target; are overridden by + // * Features from -Ctarget-feature; are overridden by // * function specific features. // // [^1]: target-cpu=native is handled here, other target-cpu values are handled implicitly @@ -383,7 +383,7 @@ pub(crate) fn global_llvm_features(sess: &Session, diagnostics: bool) -> Vec> InterpCx<'mir, 'tcx, M> { let val = self.subst_from_current_frame_and_normalize_erasing_regions(constant.literal)?; // This can still fail: - // * During ConstProp, with `TooGeneric` or since the `requried_consts` were not all + // * During ConstProp, with `TooGeneric` or since the `required_consts` were not all // checked yet. // * During CTFE, since promoteds in `const`/`static` initializer bodies can fail. diff --git a/compiler/rustc_driver/src/lib.rs b/compiler/rustc_driver/src/lib.rs index b36ce63dda756..43c9e9296b90a 100644 --- a/compiler/rustc_driver/src/lib.rs +++ b/compiler/rustc_driver/src/lib.rs @@ -1228,7 +1228,7 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) { /// /// A custom rustc driver can skip calling this to set up a custom ICE hook. pub fn install_ice_hook() { - // If the user has not explicitly overriden "RUST_BACKTRACE", then produce + // If the user has not explicitly overridden "RUST_BACKTRACE", then produce // full backtraces. When a compiler ICE happens, we want to gather // as much information as possible to present in the issue opened // by the user. Compiler developers and other rustc users can diff --git a/compiler/rustc_error_codes/src/error_codes/E0772.md b/compiler/rustc_error_codes/src/error_codes/E0772.md index 262e52351ef01..3b73abaf776c2 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0772.md +++ b/compiler/rustc_error_codes/src/error_codes/E0772.md @@ -45,7 +45,7 @@ trait object's internal data to be accessed safely from any trait methods. This rule also goes for any lifetime any struct made into a trait object may have. In the implementation for `dyn Person`, the `'2` lifetime representing the -internal data was ommitted, meaning that the compiler inferred the lifetime +internal data was omitted, meaning that the compiler inferred the lifetime `'static`. As a result, the implementation's `is_cool` is inferred by the compiler to look like this: diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 1b97618050939..4af376b983c94 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -1841,7 +1841,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { self.flat_map_node(node) } - fn flat_map_stmt(&mut self, mut node: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> { + fn flat_map_stmt(&mut self, node: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> { // FIXME: invocations in semicolon-less expressions positions are expanded as expressions, // changing that requires some compatibility measures. if node.is_expr() { @@ -1863,7 +1863,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { self.cx.current_expansion.is_trailing_mac = false; res } - _ => assign_id!(self, &mut node.id, || noop_flat_map_stmt(node, self)), + _ => noop_flat_map_stmt(node, self), }; } diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs index a419612e315df..bb36dfd793d4a 100644 --- a/compiler/rustc_expand/src/mbe/macro_parser.rs +++ b/compiler/rustc_expand/src/mbe/macro_parser.rs @@ -154,7 +154,7 @@ type NamedMatchVec = SmallVec<[NamedMatch; 4]>; /// lifetime. By separating `'tt` from `'root`, we can show that. #[derive(Clone)] struct MatcherPos<'root, 'tt> { - /// The token or sequence of tokens that make up the matcher + /// The token or sequence of tokens that make up the matcher. `elts` is short for "elements". top_elts: TokenTreeOrTokenTreeSlice<'tt>, /// The position of the "dot" in this matcher @@ -184,17 +184,8 @@ struct MatcherPos<'root, 'tt> { /// in this matcher. match_hi: usize, - // The following fields are used if we are matching a repetition. If we aren't, they should be - // `None`. - /// The KleeneOp of this sequence if we are in a repetition. - seq_op: Option, - - /// The separator if we are in a repetition. - sep: Option, - - /// The "parent" matcher position if we are in a repetition. That is, the matcher position just - /// before we enter the sequence. - up: Option>, + /// This field is only used if we are matching a repetition. + repetition: Option>, /// Specifically used to "unzip" token trees. By "unzip", we mean to unwrap the delimiters from /// a delimited token tree (e.g., something wrapped in `(` `)`) or to get the contents of a doc @@ -207,7 +198,38 @@ struct MatcherPos<'root, 'tt> { stack: SmallVec<[MatcherTtFrame<'tt>; 1]>, } +// This type is used a lot. Make sure it doesn't unintentionally get bigger. +#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] +rustc_data_structures::static_assert_size!(MatcherPos<'_, '_>, 192); + impl<'root, 'tt> MatcherPos<'root, 'tt> { + /// Generates the top-level matcher position in which the "dot" is before the first token of + /// the matcher `ms`. + fn new(ms: &'tt [TokenTree]) -> Self { + let match_idx_hi = count_names(ms); + MatcherPos { + // Start with the top level matcher given to us. + top_elts: TtSeq(ms), + + // The "dot" is before the first token of the matcher. + idx: 0, + + // Initialize `matches` to a bunch of empty `Vec`s -- one for each metavar in + // `top_elts`. `match_lo` for `top_elts` is 0 and `match_hi` is `match_idx_hi`. + // `match_cur` is 0 since we haven't actually matched anything yet. + matches: create_matches(match_idx_hi), + match_lo: 0, + match_cur: 0, + match_hi: match_idx_hi, + + // Haven't descended into any delimiters, so this is empty. + stack: smallvec![], + + // Haven't descended into any sequences, so this is `None`. + repetition: None, + } + } + /// Adds `m` as a named match for the `idx`-th metavar. fn push_match(&mut self, idx: usize, m: NamedMatch) { let matches = Lrc::make_mut(&mut self.matches[idx]); @@ -215,6 +237,19 @@ impl<'root, 'tt> MatcherPos<'root, 'tt> { } } +#[derive(Clone)] +struct MatcherPosRepetition<'root, 'tt> { + /// The KleeneOp of this sequence. + seq_op: mbe::KleeneOp, + + /// The separator. + sep: Option, + + /// The "parent" matcher position. That is, the matcher position just before we enter the + /// sequence. + up: MatcherPosHandle<'root, 'tt>, +} + // Lots of MatcherPos instances are created at runtime. Allocating them on the // heap is slow. Furthermore, using SmallVec to allocate them all // on the stack is also slow, because MatcherPos is quite a large type and @@ -258,6 +293,12 @@ impl<'root, 'tt> DerefMut for MatcherPosHandle<'root, 'tt> { } } +enum EofItems<'root, 'tt> { + None, + One(MatcherPosHandle<'root, 'tt>), + Multiple, +} + /// Represents the possible results of an attempted parse. crate enum ParseResult { /// Parsed successfully. @@ -300,35 +341,6 @@ fn create_matches(len: usize) -> Box<[Lrc]> { .into_boxed_slice() } -/// Generates the top-level matcher position in which the "dot" is before the first token of the -/// matcher `ms`. -fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree]) -> MatcherPos<'root, 'tt> { - let match_idx_hi = count_names(ms); - let matches = create_matches(match_idx_hi); - MatcherPos { - // Start with the top level matcher given to us - top_elts: TtSeq(ms), // "elts" is an abbr. for "elements" - // The "dot" is before the first token of the matcher - idx: 0, - - // Initialize `matches` to a bunch of empty `Vec`s -- one for each metavar in `top_elts`. - // `match_lo` for `top_elts` is 0 and `match_hi` is `matches.len()`. `match_cur` is 0 since - // we haven't actually matched anything yet. - matches, - match_lo: 0, - match_cur: 0, - match_hi: match_idx_hi, - - // Haven't descended into any delimiters, so empty stack - stack: smallvec![], - - // Haven't descended into any sequences, so both of these are `None`. - seq_op: None, - sep: None, - up: None, - } -} - /// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`: /// so it is associated with a single ident in a parse, and all /// `MatchedNonterminal`s in the `NamedMatch` have the same non-terminal type @@ -475,10 +487,10 @@ fn inner_parse_loop<'root, 'tt>( sess: &ParseSess, cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, next_items: &mut Vec>, - eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, + eof_items: &mut EofItems<'root, 'tt>, token: &Token, -) -> ParseResult<()> { +) -> Result<(), (rustc_span::Span, String)> { // Pop items from `cur_items` until it is empty. while let Some(mut item) = cur_items.pop() { // When unzipped trees end, remove them. This corresponds to backtracking out of a @@ -504,7 +516,7 @@ fn inner_parse_loop<'root, 'tt>( // We are repeating iff there is a parent. If the matcher is inside of a repetition, // then we could be at the end of a sequence or at the beginning of the next // repetition. - if item.up.is_some() { + if let Some(repetition) = &item.repetition { // At this point, regardless of whether there is a separator, we should add all // matches from the complete repetition of the sequence to the shared, top-level // `matches` list (actually, `up.matches`, which could itself not be the top-level, @@ -515,7 +527,7 @@ fn inner_parse_loop<'root, 'tt>( // NOTE: removing the condition `idx == len` allows trailing separators. if idx == len { // Get the `up` matcher - let mut new_pos = item.up.clone().unwrap(); + let mut new_pos = repetition.up.clone(); // Add matches from this repetition to the `matches` of `up` for idx in item.match_lo..item.match_hi { @@ -530,32 +542,33 @@ fn inner_parse_loop<'root, 'tt>( } // Check if we need a separator. - if idx == len && item.sep.is_some() { + if idx == len && repetition.sep.is_some() { // We have a separator, and it is the current token. We can advance past the // separator token. - if item.sep.as_ref().map_or(false, |sep| token_name_eq(token, sep)) { + if repetition.sep.as_ref().map_or(false, |sep| token_name_eq(token, sep)) { item.idx += 1; next_items.push(item); } - } - // We don't need a separator. Move the "dot" back to the beginning of the matcher - // and try to match again UNLESS we are only allowed to have _one_ repetition. - else if item.seq_op != Some(mbe::KleeneOp::ZeroOrOne) { + } else if repetition.seq_op != mbe::KleeneOp::ZeroOrOne { + // We don't need a separator. Move the "dot" back to the beginning of the + // matcher and try to match again UNLESS we are only allowed to have _one_ + // repetition. item.match_cur = item.match_lo; item.idx = 0; cur_items.push(item); } + } else { + // If we are not in a repetition, then being at the end of a matcher means that we + // have reached the potential end of the input. + *eof_items = match eof_items { + EofItems::None => EofItems::One(item), + EofItems::One(_) | EofItems::Multiple => EofItems::Multiple, + } } - // If we are not in a repetition, then being at the end of a matcher means that we have - // reached the potential end of the input. - else { - eof_items.push(item); - } - } - // We are in the middle of a matcher. - else { - // Look at what token in the matcher we are trying to match the current token (`token`) - // against. Depending on that, we may generate new items. + } else { + // We are in the middle of a matcher. Look at what token in the matcher we are trying + // to match the current token (`token`) against. Depending on that, we may generate new + // items. match item.top_elts.get_tt(idx) { // Need to descend into a sequence TokenTree::Sequence(sp, seq) => { @@ -578,14 +591,16 @@ fn inner_parse_loop<'root, 'tt>( let matches = create_matches(item.matches.len()); cur_items.push(MatcherPosHandle::Box(Box::new(MatcherPos { stack: smallvec![], - sep: seq.separator.clone(), - seq_op: Some(seq.kleene.op), idx: 0, matches, match_lo: item.match_cur, match_cur: item.match_cur, match_hi: item.match_cur + seq.num_captures, - up: Some(item), + repetition: Some(MatcherPosRepetition { + up: item, + sep: seq.separator.clone(), + seq_op: seq.kleene.op, + }), top_elts: Tt(TokenTree::Sequence(sp, seq)), }))); } @@ -593,7 +608,7 @@ fn inner_parse_loop<'root, 'tt>( // We need to match a metavar (but the identifier is invalid)... this is an error TokenTree::MetaVarDecl(span, _, None) => { if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() { - return Error(span, "missing fragment specifier".to_string()); + return Err((span, "missing fragment specifier".to_string())); } } @@ -641,7 +656,7 @@ fn inner_parse_loop<'root, 'tt>( } // Yay a successful parse (so far)! - Success(()) + Ok(()) } /// Use the given sequence of token trees (`ms`) as a matcher. Match the token @@ -659,17 +674,18 @@ pub(super) fn parse_tt( // // This MatcherPos instance is allocated on the stack. All others -- and // there are frequently *no* others! -- are allocated on the heap. - let mut initial = initial_matcher_pos(ms); + let mut initial = MatcherPos::new(ms); let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)]; let mut next_items = Vec::new(); loop { + assert!(next_items.is_empty()); + // Matcher positions black-box parsed by parser.rs (`parser`) let mut bb_items = SmallVec::new(); // Matcher positions that would be valid if the macro invocation was over now - let mut eof_items = SmallVec::new(); - assert!(next_items.is_empty()); + let mut eof_items = EofItems::None; // Process `cur_items` until either we have finished the input or we need to get some // parsing from the black-box parser done. The result is that `next_items` will contain a @@ -678,37 +694,34 @@ pub(super) fn parse_tt( parser.sess, &mut cur_items, &mut next_items, - &mut eof_items, &mut bb_items, + &mut eof_items, &parser.token, ) { - Success(_) => {} - Failure(token, msg) => return Failure(token, msg), - Error(sp, msg) => return Error(sp, msg), - ErrorReported => return ErrorReported, + Ok(()) => {} + Err((sp, msg)) => return Error(sp, msg), } // inner parse loop handled all cur_items, so it's empty assert!(cur_items.is_empty()); - // We need to do some post processing after the `inner_parser_loop`. + // We need to do some post processing after the `inner_parse_loop`. // // Error messages here could be improved with links to original rules. // If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise, // either the parse is ambiguous (which should never happen) or there is a syntax error. if parser.token == token::Eof { - if eof_items.len() == 1 { - let matches = - eof_items[0].matches.iter_mut().map(|dv| Lrc::make_mut(dv).pop().unwrap()); - return nameize(parser.sess, ms, matches); - } else if eof_items.len() > 1 { - return Error( - parser.token.span, - "ambiguity: multiple successful parses".to_string(), - ); - } else { - return Failure( + return match eof_items { + EofItems::One(mut eof_item) => { + let matches = + eof_item.matches.iter_mut().map(|dv| Lrc::make_mut(dv).pop().unwrap()); + nameize(parser.sess, ms, matches) + } + EofItems::Multiple => { + Error(parser.token.span, "ambiguity: multiple successful parses".to_string()) + } + EofItems::None => Failure( Token::new( token::Eof, if parser.token.span.is_dummy() { @@ -718,12 +731,12 @@ pub(super) fn parse_tt( }, ), "missing tokens in macro arguments", - ); - } + ), + }; } - // Performance hack: eof_items may share matchers via Rc with other things that we want - // to modify. Dropping eof_items now may drop these refcounts to 1, preventing an - // unnecessary implicit clone later in Rc::make_mut. + // Performance hack: `eof_items` may share matchers via `Rc` with other things that we want + // to modify. Dropping `eof_items` now may drop these refcounts to 1, preventing an + // unnecessary implicit clone later in `Rc::make_mut`. drop(eof_items); // If there are no possible next positions AND we aren't waiting for the black-box parser, @@ -731,9 +744,10 @@ pub(super) fn parse_tt( if bb_items.is_empty() && next_items.is_empty() { return Failure(parser.token.clone(), "no rules expected this token in macro call"); } - // Another possibility is that we need to call out to parse some rust nonterminal - // (black-box) parser. However, if there is not EXACTLY ONE of these, something is wrong. - else if (!bb_items.is_empty() && !next_items.is_empty()) || bb_items.len() > 1 { + + if (!bb_items.is_empty() && !next_items.is_empty()) || bb_items.len() > 1 { + // We need to call out to parse some rust nonterminal (black-box) parser. But something + // is wrong, because there is not EXACTLY ONE of these. let nts = bb_items .iter() .map(|item| match item.top_elts.get_tt(item.idx) { @@ -755,15 +769,15 @@ pub(super) fn parse_tt( ), ); } - // Dump all possible `next_items` into `cur_items` for the next iteration. - else if !next_items.is_empty() { - // Now process the next token + + if !next_items.is_empty() { + // Dump all possible `next_items` into `cur_items` for the next iteration. Then process + // the next token. cur_items.extend(next_items.drain(..)); parser.to_mut().bump(); - } - // Finally, we have the case where we need to call the black-box parser to get some - // nonterminal. - else { + } else { + // Finally, we have the case where we need to call the black-box parser to get some + // nonterminal. assert_eq!(bb_items.len(), 1); let mut item = bb_items.pop().unwrap(); diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index 54000527c15b9..760dea77f9c2b 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -233,7 +233,7 @@ pub(super) fn transcribe<'a>( } else { // Other variables are emitted into the output stream as groups with // `Delimiter::None` to maintain parsing priorities. - // `Interpolated` is currenty used for such groups in rustc parser. + // `Interpolated` is currently used for such groups in rustc parser. marker.visit_span(&mut sp); TokenTree::token(token::Interpolated(nt.clone()), sp) }; diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 869cada400f0f..0aef5982cff50 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -847,7 +847,7 @@ impl server::Span for Rustc<'_, '_> { /// the `quote` proc-macro. This will save the span of /// "hello" into the metadata of `my_proc_macro`. As a result, /// the body of `my_proc_macro` (after expansion) will end - /// up containg a call that looks like this: + /// up containing a call that looks like this: /// `proc_macro::Ident::new("hello", proc_macro::Span::recover_proc_macro_span(0))` /// /// where `0` is the id returned by this function. diff --git a/compiler/rustc_incremental/src/persist/load.rs b/compiler/rustc_incremental/src/persist/load.rs index 870c3f8068245..908a936142475 100644 --- a/compiler/rustc_incremental/src/persist/load.rs +++ b/compiler/rustc_incremental/src/persist/load.rs @@ -27,7 +27,7 @@ pub enum LoadResult { }, /// The file either didn't exist or was produced by an incompatible compiler version. DataOutOfDate, - /// An error occured. + /// An error occurred. Error { #[allow(missing_docs)] message: String, diff --git a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs index 3bc30f0220d40..16f7504cbbe58 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs @@ -928,7 +928,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { /// performing that replacement, we'll turn all remaining infer type params to use their name from /// their definition, and replace all the `[type error]`s back to being infer so they display in /// the output as `_`. If we didn't go through `[type error]`, we would either show all type params -/// by their name *or* `_`, neither of which is desireable: we want to show all types that we could +/// by their name *or* `_`, neither of which is desirable: we want to show all types that we could /// infer as `_` to reduce verbosity and avoid telling the user about unnecessary type annotations. struct ResolvedTypeParamEraser<'tcx> { tcx: TyCtxt<'tcx>, diff --git a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mismatched_static_lifetime.rs b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mismatched_static_lifetime.rs index e0420291aa3f5..4710eae6189a5 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mismatched_static_lifetime.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mismatched_static_lifetime.rs @@ -58,7 +58,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { bug!("Node not an impl."); }; - // Next, let's figure out the set of trait objects with implict static bounds + // Next, let's figure out the set of trait objects with implicit static bounds let ty = self.tcx().type_of(*impl_def_id); let mut v = super::static_impl_trait::TraitObjectVisitor(FxHashSet::default()); v.visit_ty(ty); diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index c3e168c472729..06f73c1c91600 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -26,7 +26,7 @@ use rustc_middle::mir::interpret; use rustc_middle::thir; use rustc_middle::traits::specialization_graph; use rustc_middle::ty::codec::TyEncoder; -use rustc_middle::ty::fast_reject::{self, SimplifiedType, SimplifyParams}; +use rustc_middle::ty::fast_reject::{self, SimplifiedType, TreatParams}; use rustc_middle::ty::query::Providers; use rustc_middle::ty::{self, SymbolName, Ty, TyCtxt}; use rustc_serialize::{opaque, Encodable, Encoder}; @@ -2043,7 +2043,7 @@ impl<'tcx, 'v> ItemLikeVisitor<'v> for ImplsVisitor<'tcx> { let simplified_self_ty = fast_reject::simplify_type( self.tcx, trait_ref.self_ty(), - SimplifyParams::No, + TreatParams::AsPlaceholders, ); self.impls diff --git a/compiler/rustc_middle/src/ty/fast_reject.rs b/compiler/rustc_middle/src/ty/fast_reject.rs index 3c1ac66e2d136..c0dd4db2945dc 100644 --- a/compiler/rustc_middle/src/ty/fast_reject.rs +++ b/compiler/rustc_middle/src/ty/fast_reject.rs @@ -49,9 +49,14 @@ where } #[derive(PartialEq, Eq, Debug, Clone, Copy)] -pub enum SimplifyParams { - Yes, - No, +pub enum TreatParams { + /// Treat parameters as bound types in the given environment. + /// + /// For this to be correct the input has to be fully normalized + /// in its param env as it may otherwise cause us to ignore + /// potentially applying impls. + AsBoundTypes, + AsPlaceholders, } /// Tries to simplify a type by only returning the outermost injective¹ layer, if one exists. @@ -59,26 +64,21 @@ pub enum SimplifyParams { /// The idea is to get something simple that we can use to quickly decide if two types could unify, /// for example during method lookup. /// -/// A special case here are parameters and projections. Projections can be normalized to -/// a different type, meaning that `::Assoc` and `u8` can be unified, even though -/// their outermost layer is different while parameters like `T` of impls are later replaced -/// with an inference variable, which then also allows unification with other types. +/// A special case here are parameters and projections, which are only injective +/// if they are treated as bound types. /// -/// When using `SimplifyParams::Yes`, we still return a simplified type for params and projections², -/// the reasoning for this can be seen at the places doing this. +/// For example when storing impls based on their simplified self type, we treat +/// generic parameters as placeholders. We must not simplify them here, +/// as they can unify with any other type. /// +/// With projections we have to be even more careful, as even when treating them as bound types +/// this is still only correct if they are fully normalized. /// -/// ¹ meaning that if two outermost layers are different, then the whole types are also different. -/// ² FIXME(@lcnr): this seems like it can actually end up being unsound with the way it's used during -/// candidate selection. We do not consider non blanket impls for `<_ as Trait>::Assoc` even -/// though `_` can be inferred to a concrete type later at which point a concrete impl -/// could actually apply. After experimenting for about an hour I wasn't able to cause any issues -/// this way so I am not going to change this until we actually find an issue as I am really -/// interesting in getting an actual test for this. -pub fn simplify_type( - tcx: TyCtxt<'_>, - ty: Ty<'_>, - can_simplify_params: SimplifyParams, +/// ¹ meaning that if the outermost layers are different, then the whole types are also different. +pub fn simplify_type<'tcx>( + tcx: TyCtxt<'tcx>, + ty: Ty<'tcx>, + treat_params: TreatParams, ) -> Option { match *ty.kind() { ty::Bool => Some(BoolSimplifiedType), @@ -91,7 +91,7 @@ pub fn simplify_type( ty::Array(..) => Some(ArraySimplifiedType), ty::Slice(..) => Some(SliceSimplifiedType), ty::RawPtr(ptr) => Some(PtrSimplifiedType(ptr.mutbl)), - ty::Dynamic(ref trait_info, ..) => match trait_info.principal_def_id() { + ty::Dynamic(trait_info, ..) => match trait_info.principal_def_id() { Some(principal_def_id) if !tcx.trait_is_auto(principal_def_id) => { Some(TraitSimplifiedType(principal_def_id)) } @@ -100,24 +100,21 @@ pub fn simplify_type( ty::Ref(_, _, mutbl) => Some(RefSimplifiedType(mutbl)), ty::FnDef(def_id, _) | ty::Closure(def_id, _) => Some(ClosureSimplifiedType(def_id)), ty::Generator(def_id, _, _) => Some(GeneratorSimplifiedType(def_id)), - ty::GeneratorWitness(ref tys) => { - Some(GeneratorWitnessSimplifiedType(tys.skip_binder().len())) - } + ty::GeneratorWitness(tys) => Some(GeneratorWitnessSimplifiedType(tys.skip_binder().len())), ty::Never => Some(NeverSimplifiedType), - ty::Tuple(ref tys) => Some(TupleSimplifiedType(tys.len())), - ty::FnPtr(ref f) => Some(FunctionSimplifiedType(f.skip_binder().inputs().len())), - ty::Projection(_) | ty::Param(_) => { - if can_simplify_params == SimplifyParams::Yes { - // In normalized types, projections don't unify with - // anything. when lazy normalization happens, this - // will change. It would still be nice to have a way - // to deal with known-not-to-unify-with-anything - // projections (e.g., the likes of <__S as Encoder>::Error). + ty::Tuple(tys) => Some(TupleSimplifiedType(tys.len())), + ty::FnPtr(f) => Some(FunctionSimplifiedType(f.skip_binder().inputs().len())), + ty::Param(_) | ty::Projection(_) => match treat_params { + // When treated as bound types, projections don't unify with + // anything as long as they are fully normalized. + // + // We will have to be careful with lazy normalization here. + TreatParams::AsBoundTypes => { + debug!("treating `{}` as a bound type", ty); Some(ParameterSimplifiedType) - } else { - None } - } + TreatParams::AsPlaceholders => None, + }, ty::Opaque(def_id, _) => Some(OpaqueSimplifiedType(def_id)), ty::Foreign(def_id) => Some(ForeignSimplifiedType(def_id)), ty::Placeholder(..) | ty::Bound(..) | ty::Infer(_) | ty::Error(_) => None, diff --git a/compiler/rustc_middle/src/ty/trait_def.rs b/compiler/rustc_middle/src/ty/trait_def.rs index 6100eb48a1863..8ebeca50c4174 100644 --- a/compiler/rustc_middle/src/ty/trait_def.rs +++ b/compiler/rustc_middle/src/ty/trait_def.rs @@ -1,5 +1,5 @@ use crate::traits::specialization_graph; -use crate::ty::fast_reject::{self, SimplifiedType, SimplifyParams}; +use crate::ty::fast_reject::{self, SimplifiedType, TreatParams}; use crate::ty::fold::TypeFoldable; use crate::ty::{Ident, Ty, TyCtxt}; use rustc_hir as hir; @@ -150,7 +150,7 @@ impl<'tcx> TyCtxt<'tcx> { self_ty: Ty<'tcx>, ) -> impl Iterator + 'tcx { let impls = self.trait_impls_of(def_id); - if let Some(simp) = fast_reject::simplify_type(self, self_ty, SimplifyParams::No) { + if let Some(simp) = fast_reject::simplify_type(self, self_ty, TreatParams::AsPlaceholders) { if let Some(impls) = impls.non_blanket_impls.get(&simp) { return impls.iter().copied(); } @@ -180,14 +180,14 @@ impl<'tcx> TyCtxt<'tcx> { } } - // Note that we're using `SimplifyParams::Yes` to query `non_blanket_impls` while using - // `SimplifyParams::No` while actually adding them. + // Note that we're using `TreatParams::AsBoundTypes` to query `non_blanket_impls` while using + // `TreatParams::AsPlaceholders` while actually adding them. // // This way, when searching for some impl for `T: Trait`, we do not look at any impls // whose outer level is not a parameter or projection. Especially for things like // `T: Clone` this is incredibly useful as we would otherwise look at all the impls // of `Clone` for `Option`, `Vec`, `ConcreteType` and so on. - if let Some(simp) = fast_reject::simplify_type(self, self_ty, SimplifyParams::Yes) { + if let Some(simp) = fast_reject::simplify_type(self, self_ty, TreatParams::AsBoundTypes) { if let Some(impls) = impls.non_blanket_impls.get(&simp) { for &impl_def_id in impls { if let result @ Some(_) = f(impl_def_id) { @@ -247,7 +247,7 @@ pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> Trait } if let Some(simplified_self_ty) = - fast_reject::simplify_type(tcx, impl_self_ty, SimplifyParams::No) + fast_reject::simplify_type(tcx, impl_self_ty, TreatParams::AsPlaceholders) { impls.non_blanket_impls.entry(simplified_self_ty).or_default().push(impl_def_id); } else { diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index 086f0249114ba..3342bd146c4ee 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -3207,7 +3207,7 @@ impl<'a> Resolver<'a> { } } } - ImportKind::ExternCrate { source, target, .. } => { + ImportKind::ExternCrate { source, target } => { suggestion = Some(format!( "extern crate {} as {};", source.unwrap_or(target.name), diff --git a/compiler/rustc_trait_selection/src/traits/coherence.rs b/compiler/rustc_trait_selection/src/traits/coherence.rs index 018d1eefef7d2..63efa951f9674 100644 --- a/compiler/rustc_trait_selection/src/traits/coherence.rs +++ b/compiler/rustc_trait_selection/src/traits/coherence.rs @@ -20,7 +20,7 @@ use rustc_hir::CRATE_HIR_ID; use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::traits::TraitEngine; use rustc_middle::traits::specialization_graph::OverlapMode; -use rustc_middle::ty::fast_reject::{self, SimplifyParams}; +use rustc_middle::ty::fast_reject::{self, TreatParams}; use rustc_middle::ty::fold::TypeFoldable; use rustc_middle::ty::subst::Subst; use rustc_middle::ty::{self, Ty, TyCtxt}; @@ -87,8 +87,8 @@ where impl2_ref.iter().flat_map(|tref| tref.substs.types()), ) .any(|(ty1, ty2)| { - let t1 = fast_reject::simplify_type(tcx, ty1, SimplifyParams::No); - let t2 = fast_reject::simplify_type(tcx, ty2, SimplifyParams::No); + let t1 = fast_reject::simplify_type(tcx, ty1, TreatParams::AsPlaceholders); + let t2 = fast_reject::simplify_type(tcx, ty2, TreatParams::AsPlaceholders); if let (Some(t1), Some(t2)) = (t1, t2) { // Simplified successfully diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 092ef5f11e9a2..8af4606db8520 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -36,7 +36,7 @@ use rustc_infer::infer::LateBoundRegionConversionTime; use rustc_middle::dep_graph::{DepKind, DepNodeIndex}; use rustc_middle::mir::interpret::ErrorHandled; use rustc_middle::thir::abstract_const::NotConstEvaluatable; -use rustc_middle::ty::fast_reject::{self, SimplifyParams}; +use rustc_middle::ty::fast_reject::{self, TreatParams}; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::relate::TypeRelation; use rustc_middle::ty::subst::{GenericArgKind, Subst, SubstsRef}; @@ -2176,8 +2176,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { fn fast_reject_trait_refs( &mut self, - obligation: &TraitObligation<'_>, - impl_trait_ref: &ty::TraitRef<'_>, + obligation: &TraitObligation<'tcx>, + impl_trait_ref: &ty::TraitRef<'tcx>, ) -> bool { // We can avoid creating type variables and doing the full // substitution if we find that any of the input types, when @@ -2193,10 +2193,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let simplified_obligation_ty = fast_reject::simplify_type( self.tcx(), obligation_ty, - SimplifyParams::Yes, + TreatParams::AsBoundTypes, + ); + let simplified_impl_ty = fast_reject::simplify_type( + self.tcx(), + impl_ty, + TreatParams::AsPlaceholders, ); - let simplified_impl_ty = - fast_reject::simplify_type(self.tcx(), impl_ty, SimplifyParams::No); simplified_obligation_ty.is_some() && simplified_impl_ty.is_some() diff --git a/compiler/rustc_trait_selection/src/traits/specialize/specialization_graph.rs b/compiler/rustc_trait_selection/src/traits/specialize/specialization_graph.rs index e31a9b200e873..8b23dcfe3808a 100644 --- a/compiler/rustc_trait_selection/src/traits/specialize/specialization_graph.rs +++ b/compiler/rustc_trait_selection/src/traits/specialize/specialization_graph.rs @@ -2,7 +2,7 @@ use super::OverlapError; use crate::traits; use rustc_hir::def_id::DefId; -use rustc_middle::ty::fast_reject::{self, SimplifiedType, SimplifyParams}; +use rustc_middle::ty::fast_reject::{self, SimplifiedType, TreatParams}; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::{self, TyCtxt, TypeFoldable}; @@ -49,7 +49,9 @@ impl ChildrenExt<'_> for Children { /// Insert an impl into this set of children without comparing to any existing impls. fn insert_blindly(&mut self, tcx: TyCtxt<'_>, impl_def_id: DefId) { let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); - if let Some(st) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), SimplifyParams::No) { + if let Some(st) = + fast_reject::simplify_type(tcx, trait_ref.self_ty(), TreatParams::AsPlaceholders) + { debug!("insert_blindly: impl_def_id={:?} st={:?}", impl_def_id, st); self.non_blanket_impls.entry(st).or_default().push(impl_def_id) } else { @@ -64,7 +66,9 @@ impl ChildrenExt<'_> for Children { fn remove_existing(&mut self, tcx: TyCtxt<'_>, impl_def_id: DefId) { let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); let vec: &mut Vec; - if let Some(st) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), SimplifyParams::No) { + if let Some(st) = + fast_reject::simplify_type(tcx, trait_ref.self_ty(), TreatParams::AsPlaceholders) + { debug!("remove_existing: impl_def_id={:?} st={:?}", impl_def_id, st); vec = self.non_blanket_impls.get_mut(&st).unwrap(); } else { @@ -312,7 +316,8 @@ impl GraphExt for Graph { let mut parent = trait_def_id; let mut last_lint = None; - let simplified = fast_reject::simplify_type(tcx, trait_ref.self_ty(), SimplifyParams::No); + let simplified = + fast_reject::simplify_type(tcx, trait_ref.self_ty(), TreatParams::AsPlaceholders); // Descend the specialization tree, where `parent` is the current parent node. loop { diff --git a/compiler/rustc_typeck/src/check/method/suggest.rs b/compiler/rustc_typeck/src/check/method/suggest.rs index 1a345a303caea..0ae2dfa180b9e 100644 --- a/compiler/rustc_typeck/src/check/method/suggest.rs +++ b/compiler/rustc_typeck/src/check/method/suggest.rs @@ -12,7 +12,7 @@ use rustc_hir::lang_items::LangItem; use rustc_hir::{ExprKind, Node, QPath}; use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use rustc_middle::traits::util::supertraits; -use rustc_middle::ty::fast_reject::{simplify_type, SimplifyParams}; +use rustc_middle::ty::fast_reject::{simplify_type, TreatParams}; use rustc_middle::ty::print::with_crate_prefix; use rustc_middle::ty::ToPolyTraitRef; use rustc_middle::ty::{self, DefIdTree, ToPredicate, Ty, TyCtxt, TypeFoldable}; @@ -1777,7 +1777,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // FIXME: Even though negative bounds are not implemented, we could maybe handle // cases where a positive bound implies a negative impl. (candidates, Vec::new()) - } else if let Some(simp_rcvr_ty) = simplify_type(self.tcx, rcvr_ty, SimplifyParams::Yes) + } else if let Some(simp_rcvr_ty) = + simplify_type(self.tcx, rcvr_ty, TreatParams::AsBoundTypes) { let mut potential_candidates = Vec::new(); let mut explicitly_negative = Vec::new(); @@ -1792,7 +1793,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .any(|imp_did| { let imp = self.tcx.impl_trait_ref(imp_did).unwrap(); let imp_simp = - simplify_type(self.tcx, imp.self_ty(), SimplifyParams::Yes); + simplify_type(self.tcx, imp.self_ty(), TreatParams::AsBoundTypes); imp_simp.map_or(false, |s| s == simp_rcvr_ty) }) { diff --git a/library/core/src/iter/adapters/map.rs b/library/core/src/iter/adapters/map.rs index b2ed82508dd29..d2077a63e150a 100644 --- a/library/core/src/iter/adapters/map.rs +++ b/library/core/src/iter/adapters/map.rs @@ -38,7 +38,7 @@ use crate::ops::Try; /// } /// ``` /// -/// This will print "('a', 1), ('b', 2), ('c', 3)". +/// This will print `('a', 1), ('b', 2), ('c', 3)`. /// /// Now consider this twist where we add a call to `rev`. This version will /// print `('c', 1), ('b', 2), ('a', 3)`. Note that the letters are reversed, diff --git a/library/std/src/sys/unix/thread.rs b/library/std/src/sys/unix/thread.rs index cf8cf5ad49f73..ff01ce2733329 100644 --- a/library/std/src/sys/unix/thread.rs +++ b/library/std/src/sys/unix/thread.rs @@ -279,10 +279,15 @@ pub fn available_parallelism() -> io::Result { ))] { #[cfg(any(target_os = "android", target_os = "linux"))] { + let quota = cgroup2_quota().max(1); let mut set: libc::cpu_set_t = unsafe { mem::zeroed() }; - if unsafe { libc::sched_getaffinity(0, mem::size_of::(), &mut set) } == 0 { - let count = unsafe { libc::CPU_COUNT(&set) }; - return Ok(unsafe { NonZeroUsize::new_unchecked(count as usize) }); + unsafe { + if libc::sched_getaffinity(0, mem::size_of::(), &mut set) == 0 { + let count = libc::CPU_COUNT(&set) as usize; + let count = count.min(quota); + // SAFETY: affinity mask can't be empty and the quota gets clamped to a minimum of 1 + return Ok(NonZeroUsize::new_unchecked(count)); + } } } match unsafe { libc::sysconf(libc::_SC_NPROCESSORS_ONLN) } { @@ -368,6 +373,80 @@ pub fn available_parallelism() -> io::Result { } } +/// Returns cgroup CPU quota in core-equivalents, rounded down, or usize::MAX if the quota cannot +/// be determined or is not set. +#[cfg(any(target_os = "android", target_os = "linux"))] +fn cgroup2_quota() -> usize { + use crate::ffi::OsString; + use crate::fs::{try_exists, File}; + use crate::io::Read; + use crate::os::unix::ffi::OsStringExt; + use crate::path::PathBuf; + + let mut quota = usize::MAX; + + let _: Option<()> = try { + let mut buf = Vec::with_capacity(128); + // find our place in the cgroup hierarchy + File::open("/proc/self/cgroup").ok()?.read_to_end(&mut buf).ok()?; + let cgroup_path = buf + .split(|&c| c == b'\n') + .filter_map(|line| { + let mut fields = line.splitn(3, |&c| c == b':'); + // expect cgroupv2 which has an empty 2nd field + if fields.nth(1) != Some(b"") { + return None; + } + let path = fields.last()?; + // skip leading slash + Some(path[1..].to_owned()) + }) + .next()?; + let cgroup_path = PathBuf::from(OsString::from_vec(cgroup_path)); + + let mut path = PathBuf::with_capacity(128); + let mut read_buf = String::with_capacity(20); + + let cgroup_mount = "/sys/fs/cgroup"; + + path.push(cgroup_mount); + path.push(&cgroup_path); + + path.push("cgroup.controllers"); + + // skip if we're not looking at cgroup2 + if matches!(try_exists(&path), Err(_) | Ok(false)) { + return usize::MAX; + }; + + path.pop(); + + while path.starts_with(cgroup_mount) { + path.push("cpu.max"); + + read_buf.clear(); + + if File::open(&path).and_then(|mut f| f.read_to_string(&mut read_buf)).is_ok() { + let raw_quota = read_buf.lines().next()?; + let mut raw_quota = raw_quota.split(' '); + let limit = raw_quota.next()?; + let period = raw_quota.next()?; + match (limit.parse::(), period.parse::()) { + (Ok(limit), Ok(period)) => { + quota = quota.min(limit / period); + } + _ => {} + } + } + + path.pop(); // pop filename + path.pop(); // pop dir + } + }; + + quota +} + #[cfg(all( not(target_os = "linux"), not(target_os = "freebsd"), diff --git a/library/std/src/thread/mod.rs b/library/std/src/thread/mod.rs index beb606099341e..09d1e714ab6dd 100644 --- a/library/std/src/thread/mod.rs +++ b/library/std/src/thread/mod.rs @@ -1524,7 +1524,10 @@ fn _assert_sync_and_send() { /// /// On Linux: /// - It may overcount the amount of parallelism available when limited by a -/// process-wide affinity mask, or when affected by cgroup limits. +/// process-wide affinity mask or cgroup quotas and cgroup2 fs or `sched_getaffinity()` can't be +/// queried, e.g. due to sandboxing. +/// - It may undercount the amount of parallelism if the current thread's affinity mask +/// does not reflect the process' cpuset, e.g. due to pinned threads. /// /// On all targets: /// - It may overcount the amount of parallelism available when running in a VM diff --git a/src/test/rustdoc/macro-higher-kinded-function.rs b/src/test/rustdoc/macro-higher-kinded-function.rs new file mode 100644 index 0000000000000..02a4305644e7d --- /dev/null +++ b/src/test/rustdoc/macro-higher-kinded-function.rs @@ -0,0 +1,21 @@ +#![crate_name = "foo"] + +pub struct TyCtxt<'tcx>(&'tcx u8); + +macro_rules! gen { + ($(($name:ident, $tcx:lifetime, [$k:ty], [$r:ty]))*) => { + pub struct Providers { + $(pub $name: for<$tcx> fn(TyCtxt<$tcx>, $k) -> $r,)* + } + } +} + +// @has 'foo/struct.Providers.html' +// @has - '//*[@class="docblock item-decl"]//code' "pub a: for<'tcx> fn(_: TyCtxt<'tcx>, _: u8) -> i8," +// @has - '//*[@class="docblock item-decl"]//code' "pub b: for<'tcx> fn(_: TyCtxt<'tcx>, _: u16) -> i16," +// @has - '//*[@id="structfield.a"]/code' "a: for<'tcx> fn(_: TyCtxt<'tcx>, _: u8) -> i8" +// @has - '//*[@id="structfield.b"]/code' "b: for<'tcx> fn(_: TyCtxt<'tcx>, _: u16) -> i16" +gen! { + (a, 'tcx, [u8], [i8]) + (b, 'tcx, [u16], [i16]) +} diff --git a/src/test/ui/check-cfg/stmt-no-ice.rs b/src/test/ui/check-cfg/stmt-no-ice.rs new file mode 100644 index 0000000000000..cf76487ed46fb --- /dev/null +++ b/src/test/ui/check-cfg/stmt-no-ice.rs @@ -0,0 +1,10 @@ +// This test checks that there is no ICE with this code +// +// check-pass +// compile-flags:--check-cfg=names() -Z unstable-options + +fn main() { + #[cfg(crossbeam_loom)] + //~^ WARNING unexpected `cfg` condition name + {} +} diff --git a/src/test/ui/check-cfg/stmt-no-ice.stderr b/src/test/ui/check-cfg/stmt-no-ice.stderr new file mode 100644 index 0000000000000..da65b596911d2 --- /dev/null +++ b/src/test/ui/check-cfg/stmt-no-ice.stderr @@ -0,0 +1,10 @@ +warning: unexpected `cfg` condition name + --> $DIR/stmt-no-ice.rs:7:11 + | +LL | #[cfg(crossbeam_loom)] + | ^^^^^^^^^^^^^^ + | + = note: `#[warn(unexpected_cfgs)]` on by default + +warning: 1 warning emitted +