From f9c16997dc016a3ef1456f56df2ab564a1c48cb2 Mon Sep 17 00:00:00 2001 From: Jonathan Pallant Date: Mon, 25 Nov 2024 16:06:02 +0000 Subject: [PATCH 001/142] dist: Re-work how we describe the licence of Rust in our distributions ) add COPYRIGHT*.html files to the rustc binary distribution ) add contents of LICENSE folder to dist tarballs, because some of our in-tree licences will require that the license text is reproduced. ) The wording of COPYRIGHT is adjusted to not include license text (`reuse` ensures that it's in the LICENSE folder) ) A blanket copyright notice is added to LICENCE-MIT as required by the text. The general approach is that the license statements are now compiled using a tool in CI (generate-copyright), and you get either: * the source code (COPYRIGHT, LICENCE-APACHE, LICENCE-MIT, REUSE.toml and the LICENCES folder), or * the compiled version (COPYRIGHT.html, COPYRIGHT-library.html and the LICENCES folder). --- COPYRIGHT | 392 +----------------- LICENSE-MIT | 2 + src/bootstrap/src/core/build_steps/dist.rs | 25 +- src/bootstrap/src/core/build_steps/run.rs | 4 +- .../templates/COPYRIGHT-library.html | 23 +- .../templates/COPYRIGHT.html | 26 +- 6 files changed, 77 insertions(+), 395 deletions(-) diff --git a/COPYRIGHT b/COPYRIGHT index 428a438a086e9..4dad74f234eaa 100644 --- a/COPYRIGHT +++ b/COPYRIGHT @@ -3,6 +3,7 @@ Short version for non-lawyers: The Rust Project is dual-licensed under Apache 2.0 and MIT terms. +It is Copyright (c) The Rust Project Contributors. Longer version: @@ -11,374 +12,23 @@ copyright assignment is required to contribute to the Rust project. Some files include explicit copyright notices and/or license notices. For full authorship information, see the version control history or -https://thanks.rust-lang.org - -Except as otherwise noted (below and/or in individual files), Rust is -licensed under the Apache License, Version 2.0 or - or the MIT license - or , at your option. - - -The Rust Project includes packages written by third parties. -The following third party packages are included, and carry -their own copyright notices and license terms: - -* LLVM, located in src/llvm-project, is licensed under the following - terms. - - ============================================================================== - The LLVM Project is under the Apache License v2.0 with LLVM Exceptions: - ============================================================================== - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - - ---- LLVM Exceptions to the Apache 2.0 License ---- - - As an exception, if, as a result of your compiling your source code, portions - of this Software are embedded into an Object form of such source code, you - may redistribute such embedded portions in such Object form without complying - with the conditions of Sections 4(a), 4(b) and 4(d) of the License. - - In addition, if you combine or link compiled forms of this Software with - software that is licensed under the GPLv2 ("Combined Software") and if a - court of competent jurisdiction determines that the patent provision (Section - 3), the indemnity provision (Section 9) or other Section of the License - conflicts with the conditions of the GPLv2, you may retroactively and - prospectively choose to deem waived or otherwise exclude such Section(s) of - the License, but only in their entirety and only with respect to the Combined - Software. - - ============================================================================== - Software from third parties included in the LLVM Project: - ============================================================================== - The LLVM Project contains third party software which is under different license - terms. All such code will be identified clearly using at least one of two - mechanisms: - 1) It will be in a separate directory tree with its own `LICENSE.txt` or - `LICENSE` file at the top containing the specific license and restrictions - which apply to that software, or - 2) It will contain specific license and restriction terms at the top of every - file. - - ============================================================================== - Legacy LLVM License (https://llvm.org/docs/DeveloperPolicy.html#legacy): - ============================================================================== - University of Illinois/NCSA - Open Source License - - Copyright (c) 2003-2019 University of Illinois at Urbana-Champaign. - All rights reserved. - - Developed by: - - LLVM Team - - University of Illinois at Urbana-Champaign - - http://llvm.org - - Permission is hereby granted, free of charge, to any person obtaining a copy of - this software and associated documentation files (the "Software"), to deal with - the Software without restriction, including without limitation the rights to - use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies - of the Software, and to permit persons to whom the Software is furnished to do - so, subject to the following conditions: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimers. - - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimers in the - documentation and/or other materials provided with the distribution. - - * Neither the names of the LLVM Team, University of Illinois at - Urbana-Champaign, nor the names of its contributors may be used to - endorse or promote products derived from this Software without specific - prior written permission. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS - FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE - SOFTWARE. - -* Portions of the FFI code for interacting with the native ABI - is derived from the Clay programming language, which carries - the following license. - - Copyright (C) 2008-2010 Tachyon Technologies. - All rights reserved. - - Redistribution and use in source and binary forms, with - or without modification, are permitted provided that the - following conditions are met: - - 1. Redistributions of source code must retain the above - copyright notice, this list of conditions and the - following disclaimer. - - 2. Redistributions in binary form must reproduce the - above copyright notice, this list of conditions and - the following disclaimer in the documentation and/or - other materials provided with the distribution. - - THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR - IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - DEVELOPERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, - INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF - USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE - USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - OF SUCH DAMAGE. - -* Portions of internationalization code use code or data from Unicode, which - carry the following license: - - UNICODE LICENSE V3 - - COPYRIGHT AND PERMISSION NOTICE - - Copyright © 1991-2024 Unicode, Inc. - - NOTICE TO USER: Carefully read the following legal agreement. BY - DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING DATA FILES, AND/OR - SOFTWARE, YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE - TERMS AND CONDITIONS OF THIS AGREEMENT. IF YOU DO NOT AGREE, DO NOT - DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE THE DATA FILES OR SOFTWARE. - - Permission is hereby granted, free of charge, to any person obtaining a - copy of data files and any associated documentation (the "Data Files") or - software and any associated documentation (the "Software") to deal in the - Data Files or Software without restriction, including without limitation - the rights to use, copy, modify, merge, publish, distribute, and/or sell - copies of the Data Files or Software, and to permit persons to whom the - Data Files or Software are furnished to do so, provided that either (a) - this copyright and permission notice appear with all copies of the Data - Files or Software, or (b) this copyright and permission notice appear in - associated Documentation. - - THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY - KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF - THIRD PARTY RIGHTS. - - IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE - BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, - OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, - WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, - ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THE DATA - FILES OR SOFTWARE. - - Except as contained in this notice, the name of a copyright holder shall - not be used in advertising or otherwise to promote the sale, use or other - dealings in these Data Files or Software without prior written - authorization of the copyright holder. + + +Except as otherwise noted, Rust is licensed under the Apache License, Version +2.0 or or the MIT +license or , at your option. + +We track licenses for third-party materials in two ways: + +* We use [REUSE](https://reuse.software) to track license information for + in-tree source files - both those authored by the Rust project and those + authored by third parties. See `REUSE.toml`, and our cached output of the + `reuse` tool which is committed to `license-metadata.json`. +* We use `cargo` to track license information for out-of-tree dependencies. + +These two sources of information are collected by the tool `generate-copyright` +into a file called `COPYRIGHT.html`, which is shipped with each binary release +of Rust. Please refer to that file for detailed information as to the components of +any given Rust release. We also produce a `COPYRIGHT-library.html` file which only +covers the subset of source code used in the Rust Standard Library, as opposed +to the toolchain as a whole. diff --git a/LICENSE-MIT b/LICENSE-MIT index 31aa79387f27e..a2070c4be7acd 100644 --- a/LICENSE-MIT +++ b/LICENSE-MIT @@ -1,3 +1,5 @@ +Copyright (c) The Rust Project Contributors + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index a636c4a9ef131..c26e0e250fb75 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -503,14 +503,22 @@ impl Step for Rustc { // Debugger scripts builder.ensure(DebuggerScripts { sysroot: image.to_owned(), host }); - // Misc license info - let cp = |file: &str| { - builder.install(&builder.src.join(file), &image.join("share/doc/rust"), 0o644); + // HTML copyright files + let file_list = builder.ensure(super::run::GenerateCopyright); + for file in file_list { + builder.install(&file, &image.join("share/doc/rust"), 0o644); + } + + // README + builder.install(&builder.src.join("README.md"), &image.join("share/doc/rust"), 0o644); + + // The REUSE-managed license files + let license = |path: &Path| { + builder.install(path, &image.join("share/doc/rust/licences"), 0o644); }; - cp("COPYRIGHT"); - cp("LICENSE-APACHE"); - cp("LICENSE-MIT"); - cp("README.md"); + for entry in t!(std::fs::read_dir(builder.src.join("LICENSES"))).flatten() { + license(&entry.path()); + } } } } @@ -986,6 +994,7 @@ impl Step for PlainSourceTarball { "CONTRIBUTING.md", "README.md", "RELEASES.md", + "REUSE.toml", "configure", "x.py", "config.example.toml", @@ -993,7 +1002,7 @@ impl Step for PlainSourceTarball { "Cargo.lock", ".gitmodules", ]; - let src_dirs = ["src", "compiler", "library", "tests"]; + let src_dirs = ["src", "compiler", "library", "tests", "LICENSES"]; copy_src_dirs(builder, &builder.src, &src_dirs, &[], plain_dst_src); diff --git a/src/bootstrap/src/core/build_steps/run.rs b/src/bootstrap/src/core/build_steps/run.rs index c76504761beb2..0a6aea26359f0 100644 --- a/src/bootstrap/src/core/build_steps/run.rs +++ b/src/bootstrap/src/core/build_steps/run.rs @@ -196,7 +196,7 @@ impl Step for CollectLicenseMetadata { pub struct GenerateCopyright; impl Step for GenerateCopyright { - type Output = PathBuf; + type Output = Vec; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { @@ -220,7 +220,7 @@ impl Step for GenerateCopyright { cmd.env("CARGO", &builder.initial_cargo); cmd.run(builder); - dest + vec![dest, dest_libstd] } } diff --git a/src/tools/generate-copyright/templates/COPYRIGHT-library.html b/src/tools/generate-copyright/templates/COPYRIGHT-library.html index 2c1eba741db0a..590a84dd9312e 100644 --- a/src/tools/generate-copyright/templates/COPYRIGHT-library.html +++ b/src/tools/generate-copyright/templates/COPYRIGHT-library.html @@ -8,20 +8,31 @@

Copyright notices for The Rust Standard Library

-

This file describes the copyright and licensing information for the Rust -Standard Library source code within The Rust Project git tree, and the -third-party dependencies used when building the Rust Standard Library.

-

Table of Contents

+

Short version for non-lawyers

+ +The Rust Standard Library is dual-licensed under Apache 2.0 and MIT terms. + +

Longer version

+ +

Copyrights in the Rust Standard Library are retained by their contributors. No copyright assignment is required to contribute to the Rust project.

+ +

Some files include explicit copyright notices and/or license notices. For full authorship information, see the version control history or https://thanks.rust-lang.org.

+ +

Except as otherwise noted (below and/or in individual files), the Rust Standard Library is licensed under the Apache License, Version 2.0 or the MIT license, at your option.

+ +

This file describes the copyright and licensing information for the source code within The Rust Project git tree related to the Rust Standard Library, and the third-party dependencies used when building the Rust Standard Library.

+

In-tree files

-

The following licenses cover the in-tree source files that were used in this -release:

+

The following licenses cover the in-tree source files that were used in this release:

{{ in_tree|safe }} diff --git a/src/tools/generate-copyright/templates/COPYRIGHT.html b/src/tools/generate-copyright/templates/COPYRIGHT.html index ccb177a54d419..a0ed7bfb8d0f3 100644 --- a/src/tools/generate-copyright/templates/COPYRIGHT.html +++ b/src/tools/generate-copyright/templates/COPYRIGHT.html @@ -8,27 +8,37 @@

Copyright notices for The Rust Toolchain

-

This file describes the copyright and licensing information for the source -code within The Rust Project git tree, and the third-party dependencies used -when building the Rust toolchain (including the Rust Standard Library).

-

Table of Contents

+

Short version for non-lawyers

+ +The Rust Project is dual-licensed under Apache 2.0 and MIT terms. + +

Longer version

+ +

Copyrights in the Rust project are retained by their contributors. No copyright assignment is required to contribute to the Rust project.

+ +

Some files include explicit copyright notices and/or license notices. For full authorship information, see the version control history or https://thanks.rust-lang.org.

+ +

Except as otherwise noted (below and/or in individual files), Rust is licensed under the Apache License, Version 2.0 or the MIT license, at your option.

+ +

This file describes the copyright and licensing information for the source code within The Rust Project git tree, and the third-party dependencies used when building the Rust toolchain (including the Rust Standard Library).

+

In-tree files

-

The following licenses cover the in-tree source files that were used in this -release:

+

The following licenses cover the in-tree source files that were used in this release:

{{ in_tree|safe }}

Out-of-tree dependencies

-

The following licenses cover the out-of-tree crates that were used in this -release:

+

The following licenses cover the out-of-tree crates that were used in this release:

{% for (key, value) in dependencies %}

📦 {{key.name}}-{{key.version}}

From 70293476acadcb432317a8856628e05548ce6d82 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Thu, 12 Dec 2024 16:55:28 +0100 Subject: [PATCH 002/142] Enable "jump to def" feature on prelude variants --- src/librustdoc/html/highlight.rs | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 48a537ad5e7ad..62cf2b63f7fd6 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -337,7 +337,7 @@ enum Class { Ident(Span), Lifetime, PreludeTy(Span), - PreludeVal, + PreludeVal(Span), QuestionMark, Decoration(&'static str), } @@ -385,7 +385,7 @@ impl Class { Class::Ident(_) => "", Class::Lifetime => "lifetime", Class::PreludeTy(_) => "prelude-ty", - Class::PreludeVal => "prelude-val", + Class::PreludeVal(_) => "prelude-val", Class::QuestionMark => "question-mark", Class::Decoration(kind) => kind, } @@ -395,7 +395,11 @@ impl Class { /// a "span" (a tuple representing `(lo, hi)` equivalent of `Span`). fn get_span(self) -> Option { match self { - Self::Ident(sp) | Self::Self_(sp) | Self::Macro(sp) | Self::PreludeTy(sp) => Some(sp), + Self::Ident(sp) + | Self::Self_(sp) + | Self::Macro(sp) + | Self::PreludeTy(sp) + | Self::PreludeVal(sp) => Some(sp), Self::Comment | Self::DocComment | Self::Attribute @@ -406,7 +410,6 @@ impl Class { | Self::Number | Self::Bool | Self::Lifetime - | Self::PreludeVal | Self::QuestionMark | Self::Decoration(_) => None, } @@ -851,7 +854,9 @@ impl<'src> Classifier<'src> { TokenKind::Ident => match get_real_ident_class(text, false) { None => match text { "Option" | "Result" => Class::PreludeTy(self.new_span(before, text)), - "Some" | "None" | "Ok" | "Err" => Class::PreludeVal, + "Some" | "None" | "Ok" | "Err" => { + Class::PreludeVal(self.new_span(before, text)) + } // "union" is a weak keyword and is only considered as a keyword when declaring // a union type. "union" if self.check_if_is_union_keyword() => Class::KeyWord, From f6525a621ae815a981aff1c9d2e05ef631e62bd5 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Thu, 12 Dec 2024 16:55:53 +0100 Subject: [PATCH 003/142] Enable "jump to def" feature on patterns --- src/librustdoc/html/render/span_map.rs | 27 +++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/src/librustdoc/html/render/span_map.rs b/src/librustdoc/html/render/span_map.rs index 81d79a6be9683..e9250cf7edd10 100644 --- a/src/librustdoc/html/render/span_map.rs +++ b/src/librustdoc/html/render/span_map.rs @@ -4,7 +4,7 @@ use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::{DefId, LOCAL_CRATE}; use rustc_hir::intravisit::{self, Visitor}; -use rustc_hir::{ExprKind, HirId, Item, ItemKind, Mod, Node}; +use rustc_hir::{ExprKind, HirId, Item, ItemKind, Mod, Node, Pat, PatKind, QPath}; use rustc_middle::hir::nested_filter; use rustc_middle::ty::TyCtxt; use rustc_span::hygiene::MacroKind; @@ -189,6 +189,27 @@ impl SpanMapVisitor<'_> { self.matches.insert(span, link); } } + + fn handle_pat(&mut self, p: &Pat<'_>) { + match p.kind { + PatKind::Binding(_, _, _, Some(p)) => self.handle_pat(p), + PatKind::Struct(qpath, _, _) + | PatKind::TupleStruct(qpath, _, _) + | PatKind::Path(qpath) => match qpath { + QPath::TypeRelative(_, path) if matches!(path.res, Res::Err) => { + self.handle_call(path.hir_id, Some(p.hir_id), qpath.span()); + } + QPath::Resolved(_, path) => self.handle_path(path), + _ => {} + }, + PatKind::Or(pats) => { + for pat in pats { + self.handle_pat(pat); + } + } + _ => {} + } + } } impl<'tcx> Visitor<'tcx> for SpanMapVisitor<'tcx> { @@ -206,6 +227,10 @@ impl<'tcx> Visitor<'tcx> for SpanMapVisitor<'tcx> { intravisit::walk_path(self, path); } + fn visit_pat(&mut self, p: &Pat<'tcx>) { + self.handle_pat(p); + } + fn visit_mod(&mut self, m: &'tcx Mod<'tcx>, span: Span, id: HirId) { // To make the difference between "mod foo {}" and "mod foo;". In case we "import" another // file, we want to link to it. Otherwise no need to create a link. From 140640a8cf7650f0c574f444c300e4c7cbf92cf3 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Thu, 12 Dec 2024 17:08:36 +0100 Subject: [PATCH 004/142] Add regression test for patterns support in "jump to def" feature --- tests/rustdoc/jump-to-def-pats.rs | 52 +++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 tests/rustdoc/jump-to-def-pats.rs diff --git a/tests/rustdoc/jump-to-def-pats.rs b/tests/rustdoc/jump-to-def-pats.rs new file mode 100644 index 0000000000000..147902b44cf5c --- /dev/null +++ b/tests/rustdoc/jump-to-def-pats.rs @@ -0,0 +1,52 @@ +// This test ensures that patterns also get a link generated. + +//@ compile-flags: -Zunstable-options --generate-link-to-definition + +#![crate_name = "foo"] + +//@ has 'src/foo/jump-to-def-pats.rs.html' + +use std::fmt; + +pub enum MyEnum { + Ok(T), + Err(E), + Some(T), + None, +} + +pub enum X { + A, +} + +pub fn foo() -> Result<(), ()> { + // FIXME: would be nice to be able to check both the class and the href at the same time so + // we could check the text as well... + //@ has - '//a[@class="prelude-val"]/@href' '{{channel}}/core/result/enum.Result.html#variant.Ok' + //@ has - '//a[@href="{{channel}}/core/result/enum.Result.html#variant.Ok"]' 'Ok' + Ok(()) +} + +impl fmt::Display for MyEnum { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + //@ has - '//a[@href="#12"]' 'Self::Ok' + Self::Ok(_) => f.write_str("MyEnum::Ok"), + //@ has - '//a[@href="#13"]' 'MyEnum::Err' + MyEnum::Err(_) => f.write_str("MyEnum::Err"), + //@ has - '//a[@href="#14"]' 'Self::Some' + Self::Some(_) => f.write_str("MyEnum::Some"), + //@ has - '//a[@href="#15"]' 'Self::None' + Self::None => f.write_str("MyEnum::None"), + } + } +} + +impl X { + fn p(&self) -> &str { + match self { + //@ has - '//a[@href="#19"]' 'Self::A' + Self::A => "X::A", + } + } +} From 12a12c9a8399bd7f4eacb42d76a7480df5623989 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Thu, 12 Dec 2024 17:09:38 +0100 Subject: [PATCH 005/142] Rename `handle_call` into `infer_id` --- src/librustdoc/html/render/span_map.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/librustdoc/html/render/span_map.rs b/src/librustdoc/html/render/span_map.rs index e9250cf7edd10..2570596a49ca8 100644 --- a/src/librustdoc/html/render/span_map.rs +++ b/src/librustdoc/html/render/span_map.rs @@ -170,7 +170,7 @@ impl SpanMapVisitor<'_> { true } - fn handle_call(&mut self, hir_id: HirId, expr_hir_id: Option, span: Span) { + fn infer_id(&mut self, hir_id: HirId, expr_hir_id: Option, span: Span) { let hir = self.tcx.hir(); let body_id = hir.enclosing_body_owner(hir_id); // FIXME: this is showing error messages for parts of the code that are not @@ -197,7 +197,7 @@ impl SpanMapVisitor<'_> { | PatKind::TupleStruct(qpath, _, _) | PatKind::Path(qpath) => match qpath { QPath::TypeRelative(_, path) if matches!(path.res, Res::Err) => { - self.handle_call(path.hir_id, Some(p.hir_id), qpath.span()); + self.infer_id(path.hir_id, Some(p.hir_id), qpath.span()); } QPath::Resolved(_, path) => self.handle_path(path), _ => {} @@ -253,9 +253,9 @@ impl<'tcx> Visitor<'tcx> for SpanMapVisitor<'tcx> { fn visit_expr(&mut self, expr: &'tcx rustc_hir::Expr<'tcx>) { match expr.kind { ExprKind::MethodCall(segment, ..) => { - self.handle_call(segment.hir_id, Some(expr.hir_id), segment.ident.span) + self.infer_id(segment.hir_id, Some(expr.hir_id), segment.ident.span) } - ExprKind::Call(call, ..) => self.handle_call(call.hir_id, None, call.span), + ExprKind::Call(call, ..) => self.infer_id(call.hir_id, None, call.span), _ => { if self.handle_macro(expr.span) { // We don't want to go deeper into the macro. From 878a79691caa5b02bb8a91f10866903a4af8b523 Mon Sep 17 00:00:00 2001 From: LemonJ <1632798336@qq.com> Date: Thu, 19 Dec 2024 13:54:37 +0800 Subject: [PATCH 006/142] Add missing safety descriptions to Arc's 'from_raw','increment_strong_count','decrement_strong_count' --- library/alloc/src/sync.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 6cf41a3fa4e16..dd899c8a71d0d 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -1376,6 +1376,8 @@ impl Arc { /// different types. See [`mem::transmute`][transmute] for more information /// on what restrictions apply in this case. /// + /// The raw pointer must point to a block of memory allocated by the global allocator + /// /// The user of `from_raw` has to make sure a specific value of `T` is only /// dropped once. /// @@ -1431,7 +1433,8 @@ impl Arc { /// /// The pointer must have been obtained through `Arc::into_raw`, and the /// associated `Arc` instance must be valid (i.e. the strong count must be at - /// least 1) for the duration of this method. + /// least 1) for the duration of this method, and `ptr` must point to a block of memory + /// allocated by the global allocator. /// /// # Examples /// @@ -1465,7 +1468,8 @@ impl Arc { /// /// The pointer must have been obtained through `Arc::into_raw`, and the /// associated `Arc` instance must be valid (i.e. the strong count must be at - /// least 1) when invoking this method. This method can be used to release the final + /// least 1) when invoking this method, and `ptr` must point to a block of memory + /// allocated by the global allocator. This method can be used to release the final /// `Arc` and backing storage, but **should not** be called after the final `Arc` has been /// released. /// From fc9a14d31a5b79bccfbc7d64e64163ddbb8a4a06 Mon Sep 17 00:00:00 2001 From: lcnr Date: Wed, 18 Dec 2024 10:30:40 +0100 Subject: [PATCH 007/142] cleanup promoteds move check --- compiler/rustc_borrowck/src/lib.rs | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 19b5c8689c840..a3034ad1e6564 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -183,9 +183,6 @@ fn do_mir_borrowck<'tcx>( let location_table = LocationTable::new(body); let move_data = MoveData::gather_moves(body, tcx, |_| true); - let promoted_move_data = promoted - .iter_enumerated() - .map(|(idx, body)| (idx, MoveData::gather_moves(body, tcx, |_| true))); let flow_inits = MaybeInitializedPlaces::new(tcx, body, &move_data) .iterate_to_fixpoint(tcx, body, Some("borrowck")) @@ -235,10 +232,14 @@ fn do_mir_borrowck<'tcx>( false }; - for (idx, move_data) in promoted_move_data { + // While promoteds should mostly be correct by construction, we need to check them for + // invalid moves to detect moving out of arrays:`struct S; fn main() { &([S][0]); }`. + for promoted_body in &promoted { use rustc_middle::mir::visit::Visitor; - - let promoted_body = &promoted[idx]; + // This assumes that we won't use some of the fields of the `promoted_mbcx` + // when detecting and reporting move errors. While it would be nice to move + // this check out of `MirBorrowckCtxt`, actually doing so is far from trivial. + let move_data = MoveData::gather_moves(promoted_body, tcx, |_| true); let mut promoted_mbcx = MirBorrowckCtxt { infcx: &infcx, body: promoted_body, @@ -262,9 +263,6 @@ fn do_mir_borrowck<'tcx>( move_errors: Vec::new(), diags, }; - MoveVisitor { ctxt: &mut promoted_mbcx }.visit_body(promoted_body); - promoted_mbcx.report_move_errors(); - struct MoveVisitor<'a, 'b, 'infcx, 'tcx> { ctxt: &'a mut MirBorrowckCtxt<'b, 'infcx, 'tcx>, } @@ -276,6 +274,8 @@ fn do_mir_borrowck<'tcx>( } } } + MoveVisitor { ctxt: &mut promoted_mbcx }.visit_body(promoted_body); + promoted_mbcx.report_move_errors(); } let mut mbcx = MirBorrowckCtxt { From c89f0dc01d08cbc79a7bcb8a308585fb1a6544f1 Mon Sep 17 00:00:00 2001 From: Caio Date: Sun, 22 Dec 2024 17:12:42 -0300 Subject: [PATCH 008/142] Adjust syntax --- compiler/rustc_data_structures/src/flock.rs | 25 +++ .../rustc_data_structures/src/profiling.rs | 63 ++++++ .../rustc_span/src/analyze_source_file.rs | 160 +++++++++++++++ library/core/src/macros/mod.rs | 52 +++++ library/core/tests/lib.rs | 3 + library/core/tests/macros.rs | 53 +++-- library/core/tests/macros_bootstrap.rs | 193 ++++++++++++++++++ 7 files changed, 528 insertions(+), 21 deletions(-) create mode 100644 library/core/tests/macros_bootstrap.rs diff --git a/compiler/rustc_data_structures/src/flock.rs b/compiler/rustc_data_structures/src/flock.rs index e03962a54ecab..292a33d56469a 100644 --- a/compiler/rustc_data_structures/src/flock.rs +++ b/compiler/rustc_data_structures/src/flock.rs @@ -4,6 +4,7 @@ //! green/native threading. This is just a bare-bones enough solution for //! librustdoc, it is not production quality at all. +#[cfg(bootstrap)] cfg_match! { cfg(target_os = "linux") => { mod linux; @@ -27,4 +28,28 @@ cfg_match! { } } +#[cfg(not(bootstrap))] +cfg_match! { + target_os = "linux" => { + mod linux; + use linux as imp; + } + target_os = "redox" => { + mod linux; + use linux as imp; + } + unix => { + mod unix; + use unix as imp; + } + windows => { + mod windows; + use self::windows as imp; + } + _ => { + mod unsupported; + use unsupported as imp; + } +} + pub use imp::Lock; diff --git a/compiler/rustc_data_structures/src/profiling.rs b/compiler/rustc_data_structures/src/profiling.rs index 19050746c2f18..18e98e6c39fa9 100644 --- a/compiler/rustc_data_structures/src/profiling.rs +++ b/compiler/rustc_data_structures/src/profiling.rs @@ -860,6 +860,7 @@ fn get_thread_id() -> u32 { } // Memory reporting +#[cfg(bootstrap)] cfg_match! { cfg(windows) => { pub fn get_resident_set_size() -> Option { @@ -921,5 +922,67 @@ cfg_match! { } } +#[cfg(not(bootstrap))] +cfg_match! { + windows => { + pub fn get_resident_set_size() -> Option { + use std::mem; + + use windows::{ + Win32::System::ProcessStatus::{K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS}, + Win32::System::Threading::GetCurrentProcess, + }; + + let mut pmc = PROCESS_MEMORY_COUNTERS::default(); + let pmc_size = mem::size_of_val(&pmc); + unsafe { + K32GetProcessMemoryInfo( + GetCurrentProcess(), + &mut pmc, + pmc_size as u32, + ) + } + .ok() + .ok()?; + + Some(pmc.WorkingSetSize) + } + } + target_os = "macos" => { + pub fn get_resident_set_size() -> Option { + use libc::{c_int, c_void, getpid, proc_pidinfo, proc_taskinfo, PROC_PIDTASKINFO}; + use std::mem; + const PROC_TASKINFO_SIZE: c_int = mem::size_of::() as c_int; + + unsafe { + let mut info: proc_taskinfo = mem::zeroed(); + let info_ptr = &mut info as *mut proc_taskinfo as *mut c_void; + let pid = getpid() as c_int; + let ret = proc_pidinfo(pid, PROC_PIDTASKINFO, 0, info_ptr, PROC_TASKINFO_SIZE); + if ret == PROC_TASKINFO_SIZE { + Some(info.pti_resident_size as usize) + } else { + None + } + } + } + } + unix => { + pub fn get_resident_set_size() -> Option { + let field = 1; + let contents = fs::read("/proc/self/statm").ok()?; + let contents = String::from_utf8(contents).ok()?; + let s = contents.split_whitespace().nth(field)?; + let npages = s.parse::().ok()?; + Some(npages * 4096) + } + } + _ => { + pub fn get_resident_set_size() -> Option { + None + } + } +} + #[cfg(test)] mod tests; diff --git a/compiler/rustc_span/src/analyze_source_file.rs b/compiler/rustc_span/src/analyze_source_file.rs index 28ce883daee9a..fba205665803c 100644 --- a/compiler/rustc_span/src/analyze_source_file.rs +++ b/compiler/rustc_span/src/analyze_source_file.rs @@ -29,6 +29,7 @@ pub(crate) fn analyze_source_file(src: &str) -> (Vec, Vec { fn analyze_source_file_dispatch( @@ -185,6 +186,165 @@ cfg_match! { } } } + +#[cfg(not(bootstrap))] +cfg_match! { + any(target_arch = "x86", target_arch = "x86_64") => { + fn analyze_source_file_dispatch( + src: &str, + lines: &mut Vec, + multi_byte_chars: &mut Vec, + ) { + if is_x86_feature_detected!("sse2") { + unsafe { + analyze_source_file_sse2(src, lines, multi_byte_chars); + } + } else { + analyze_source_file_generic( + src, + src.len(), + RelativeBytePos::from_u32(0), + lines, + multi_byte_chars, + ); + } + } + + /// Checks 16 byte chunks of text at a time. If the chunk contains + /// something other than printable ASCII characters and newlines, the + /// function falls back to the generic implementation. Otherwise it uses + /// SSE2 intrinsics to quickly find all newlines. + #[target_feature(enable = "sse2")] + unsafe fn analyze_source_file_sse2( + src: &str, + lines: &mut Vec, + multi_byte_chars: &mut Vec, + ) { + #[cfg(target_arch = "x86")] + use std::arch::x86::*; + #[cfg(target_arch = "x86_64")] + use std::arch::x86_64::*; + + const CHUNK_SIZE: usize = 16; + + let src_bytes = src.as_bytes(); + + let chunk_count = src.len() / CHUNK_SIZE; + + // This variable keeps track of where we should start decoding a + // chunk. If a multi-byte character spans across chunk boundaries, + // we need to skip that part in the next chunk because we already + // handled it. + let mut intra_chunk_offset = 0; + + for chunk_index in 0..chunk_count { + let ptr = src_bytes.as_ptr() as *const __m128i; + // We don't know if the pointer is aligned to 16 bytes, so we + // use `loadu`, which supports unaligned loading. + let chunk = unsafe { _mm_loadu_si128(ptr.add(chunk_index)) }; + + // For character in the chunk, see if its byte value is < 0, which + // indicates that it's part of a UTF-8 char. + let multibyte_test = unsafe { _mm_cmplt_epi8(chunk, _mm_set1_epi8(0)) }; + // Create a bit mask from the comparison results. + let multibyte_mask = unsafe { _mm_movemask_epi8(multibyte_test) }; + + // If the bit mask is all zero, we only have ASCII chars here: + if multibyte_mask == 0 { + assert!(intra_chunk_offset == 0); + + // Check if there are any control characters in the chunk. All + // control characters that we can encounter at this point have a + // byte value less than 32 or ... + let control_char_test0 = unsafe { _mm_cmplt_epi8(chunk, _mm_set1_epi8(32)) }; + let control_char_mask0 = unsafe { _mm_movemask_epi8(control_char_test0) }; + + // ... it's the ASCII 'DEL' character with a value of 127. + let control_char_test1 = unsafe { _mm_cmpeq_epi8(chunk, _mm_set1_epi8(127)) }; + let control_char_mask1 = unsafe { _mm_movemask_epi8(control_char_test1) }; + + let control_char_mask = control_char_mask0 | control_char_mask1; + + if control_char_mask != 0 { + // Check for newlines in the chunk + let newlines_test = unsafe { _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8)) }; + let newlines_mask = unsafe { _mm_movemask_epi8(newlines_test) }; + + if control_char_mask == newlines_mask { + // All control characters are newlines, record them + let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32; + let output_offset = RelativeBytePos::from_usize(chunk_index * CHUNK_SIZE + 1); + + loop { + let index = newlines_mask.trailing_zeros(); + + if index >= CHUNK_SIZE as u32 { + // We have arrived at the end of the chunk. + break; + } + + lines.push(RelativeBytePos(index) + output_offset); + + // Clear the bit, so we can find the next one. + newlines_mask &= (!1) << index; + } + + // We are done for this chunk. All control characters were + // newlines and we took care of those. + continue; + } else { + // Some of the control characters are not newlines, + // fall through to the slow path below. + } + } else { + // No control characters, nothing to record for this chunk + continue; + } + } + + // The slow path. + // There are control chars in here, fallback to generic decoding. + let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset; + intra_chunk_offset = analyze_source_file_generic( + &src[scan_start..], + CHUNK_SIZE - intra_chunk_offset, + RelativeBytePos::from_usize(scan_start), + lines, + multi_byte_chars, + ); + } + + // There might still be a tail left to analyze + let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset; + if tail_start < src.len() { + analyze_source_file_generic( + &src[tail_start..], + src.len() - tail_start, + RelativeBytePos::from_usize(tail_start), + lines, + multi_byte_chars, + ); + } + } + } + _ => { + // The target (or compiler version) does not support SSE2 ... + fn analyze_source_file_dispatch( + src: &str, + lines: &mut Vec, + multi_byte_chars: &mut Vec, + ) { + analyze_source_file_generic( + src, + src.len(), + RelativeBytePos::from_u32(0), + lines, + multi_byte_chars, + ); + } + } +} + // `scan_len` determines the number of bytes in `src` to scan. Note that the // function can read past `scan_len` if a multi-byte character start within the // range but extends past it. The overflow is returned by the function. diff --git a/library/core/src/macros/mod.rs b/library/core/src/macros/mod.rs index bff7ad98df3fa..611647c427b76 100644 --- a/library/core/src/macros/mod.rs +++ b/library/core/src/macros/mod.rs @@ -224,6 +224,7 @@ pub macro assert_matches { /// } /// } /// ``` +#[cfg(bootstrap)] #[unstable(feature = "cfg_match", issue = "115585")] #[rustc_diagnostic_item = "cfg_match"] pub macro cfg_match { @@ -284,6 +285,57 @@ pub macro cfg_match { } } +/// A macro for defining `#[cfg]` match-like statements. +/// +/// It is similar to the `if/elif` C preprocessor macro by allowing definition of a cascade of +/// `#[cfg]` cases, emitting the implementation which matches first. +/// +/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code +/// without having to rewrite each clause multiple times. +/// +/// Trailing `_` wildcard match arms are **optional** and they indicate a fallback branch when +/// all previous declarations do not evaluate to true. +/// +/// # Example +/// +/// ``` +/// #![feature(cfg_match)] +/// +/// cfg_match! { +/// unix => { +/// fn foo() { /* unix specific functionality */ } +/// } +/// target_pointer_width = "32" => { +/// fn foo() { /* non-unix, 32-bit functionality */ } +/// } +/// _ => { +/// fn foo() { /* fallback implementation */ } +/// } +/// } +/// ``` +#[cfg(not(bootstrap))] +#[unstable(feature = "cfg_match", issue = "115585")] +#[rustc_diagnostic_item = "cfg_match"] +pub macro cfg_match { + ({ $($tt:tt)* }) => {{ + cfg_match! { $($tt)* } + }}, + (_ => { $($output:tt)* }) => { + $($output)* + }, + ( + $cfg:meta => $output:tt + $($( $rest:tt )+)? + ) => { + #[cfg($cfg)] + cfg_match! { _ => $output } + $( + #[cfg(not($cfg))] + cfg_match! { $($rest)+ } + )? + }, +} + /// Asserts that a boolean expression is `true` at runtime. /// /// This will invoke the [`panic!`] macro if the provided expression cannot be diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs index 89b65eefd027e..24c78b01c5c5d 100644 --- a/library/core/tests/lib.rs +++ b/library/core/tests/lib.rs @@ -149,7 +149,10 @@ mod intrinsics; mod io; mod iter; mod lazy; +#[cfg(not(bootstrap))] mod macros; +#[cfg(bootstrap)] +mod macros_bootstrap; mod manually_drop; mod mem; mod net; diff --git a/library/core/tests/macros.rs b/library/core/tests/macros.rs index fdb4ea2941285..b30a40b7df28e 100644 --- a/library/core/tests/macros.rs +++ b/library/core/tests/macros.rs @@ -10,7 +10,7 @@ struct Struct; impl Trait for Struct { cfg_match! { - cfg(feature = "blah") => { + feature = "blah" => { fn blah(&self) { unimplemented!(); } @@ -47,21 +47,21 @@ fn matches_leading_pipe() { #[test] fn cfg_match_basic() { cfg_match! { - cfg(target_pointer_width = "64") => { fn f0_() -> bool { true }} + target_pointer_width = "64" => { fn f0_() -> bool { true }} } cfg_match! { - cfg(unix) => { fn f1_() -> bool { true }} - cfg(any(target_os = "macos", target_os = "linux")) => { fn f1_() -> bool { false }} + unix => { fn f1_() -> bool { true } } + any(target_os = "macos", target_os = "linux") => { fn f1_() -> bool { false }} } cfg_match! { - cfg(target_pointer_width = "32") => { fn f2_() -> bool { false }} - cfg(target_pointer_width = "64") => { fn f2_() -> bool { true }} + target_pointer_width = "32" => { fn f2_() -> bool { false } } + target_pointer_width = "64" => { fn f2_() -> bool { true } } } cfg_match! { - cfg(target_pointer_width = "16") => { fn f3_() -> i32 { 1 }} + target_pointer_width = "16" => { fn f3_() -> i32 { 1 } } _ => { fn f3_() -> i32 { 2 }} } @@ -83,7 +83,7 @@ fn cfg_match_basic() { #[test] fn cfg_match_debug_assertions() { cfg_match! { - cfg(debug_assertions) => { + debug_assertions => { assert!(cfg!(debug_assertions)); assert_eq!(4, 2+2); } @@ -98,13 +98,13 @@ fn cfg_match_debug_assertions() { #[test] fn cfg_match_no_duplication_on_64() { cfg_match! { - cfg(windows) => { + windows => { fn foo() {} } - cfg(unix) => { + unix => { fn foo() {} } - cfg(target_pointer_width = "64") => { + target_pointer_width = "64" => { fn foo() {} } } @@ -114,7 +114,7 @@ fn cfg_match_no_duplication_on_64() { #[test] fn cfg_match_options() { cfg_match! { - cfg(test) => { + test => { use core::option::Option as Option2; fn works1() -> Option2 { Some(1) } } @@ -122,26 +122,26 @@ fn cfg_match_options() { } cfg_match! { - cfg(feature = "foo") => { fn works2() -> bool { false } } - cfg(test) => { fn works2() -> bool { true } } + feature = "foo" => { fn works2() -> bool { false } } + test => { fn works2() -> bool { true } } _ => { fn works2() -> bool { false } } } cfg_match! { - cfg(feature = "foo") => { fn works3() -> bool { false } } + feature = "foo" => { fn works3() -> bool { false } } _ => { fn works3() -> bool { true } } } cfg_match! { - cfg(test) => { + test => { use core::option::Option as Option3; fn works4() -> Option3 { Some(1) } } } cfg_match! { - cfg(feature = "foo") => { fn works5() -> bool { false } } - cfg(test) => { fn works5() -> bool { true } } + feature = "foo" => { fn works5() -> bool { false } } + test => { fn works5() -> bool { true } } } assert!(works1().is_some()); @@ -154,7 +154,7 @@ fn cfg_match_options() { #[test] fn cfg_match_two_functions() { cfg_match! { - cfg(target_pointer_width = "64") => { + target_pointer_width = "64" => { fn foo1() {} fn bar1() {} } @@ -178,7 +178,7 @@ fn cfg_match_two_functions() { fn _accepts_expressions() -> i32 { cfg_match! { - cfg(unix) => { 1 } + unix => { 1 } _ => { 2 } } } @@ -189,7 +189,18 @@ fn _allows_stmt_expr_attributes() { let one = 1; let two = 2; cfg_match! { - cfg(unix) => { one * two; } + unix => { one * two; } _ => { one + two; } } } + +fn _expression() { + let _ = cfg_match!({ + windows => { + " XP" + } + _ => { + "" + } + }); +} diff --git a/library/core/tests/macros_bootstrap.rs b/library/core/tests/macros_bootstrap.rs new file mode 100644 index 0000000000000..f10ef862c5dd9 --- /dev/null +++ b/library/core/tests/macros_bootstrap.rs @@ -0,0 +1,193 @@ +#![allow(unused_must_use)] + +#[allow(dead_code)] +trait Trait { + fn blah(&self); +} + +#[allow(dead_code)] +struct Struct; + +impl Trait for Struct { + cfg_match! { + cfg(feature = "blah") => { + fn blah(&self) { + unimplemented!(); + } + } + _ => { + fn blah(&self) { + unimplemented!(); + } + } + } +} + +#[test] +fn assert_eq_trailing_comma() { + assert_eq!(1, 1,); +} + +#[test] +fn assert_escape() { + assert!(r#"☃\backslash"#.contains("\\")); +} + +#[test] +fn assert_ne_trailing_comma() { + assert_ne!(1, 2,); +} + +#[rustfmt::skip] +#[test] +fn matches_leading_pipe() { + matches!(1, | 1 | 2 | 3); +} + +#[test] +fn cfg_match_basic() { + cfg_match! { + cfg(target_pointer_width = "64") => { fn f0_() -> bool { true }} + } + + cfg_match! { + cfg(unix) => { fn f1_() -> bool { true }} + cfg(any(target_os = "macos", target_os = "linux")) => { fn f1_() -> bool { false }} + } + + cfg_match! { + cfg(target_pointer_width = "32") => { fn f2_() -> bool { false }} + cfg(target_pointer_width = "64") => { fn f2_() -> bool { true }} + } + + cfg_match! { + cfg(target_pointer_width = "16") => { fn f3_() -> i32 { 1 }} + _ => { fn f3_() -> i32 { 2 }} + } + + #[cfg(target_pointer_width = "64")] + assert!(f0_()); + + #[cfg(unix)] + assert!(f1_()); + + #[cfg(target_pointer_width = "32")] + assert!(!f2_()); + #[cfg(target_pointer_width = "64")] + assert!(f2_()); + + #[cfg(not(target_pointer_width = "16"))] + assert_eq!(f3_(), 2); +} + +#[test] +fn cfg_match_debug_assertions() { + cfg_match! { + cfg(debug_assertions) => { + assert!(cfg!(debug_assertions)); + assert_eq!(4, 2+2); + } + _ => { + assert!(cfg!(not(debug_assertions))); + assert_eq!(10, 5+5); + } + } +} + +#[cfg(target_pointer_width = "64")] +#[test] +fn cfg_match_no_duplication_on_64() { + cfg_match! { + cfg(windows) => { + fn foo() {} + } + cfg(unix) => { + fn foo() {} + } + cfg(target_pointer_width = "64") => { + fn foo() {} + } + } + foo(); +} + +#[test] +fn cfg_match_options() { + cfg_match! { + cfg(test) => { + use core::option::Option as Option2; + fn works1() -> Option2 { Some(1) } + } + _ => { fn works1() -> Option { None } } + } + + cfg_match! { + cfg(feature = "foo") => { fn works2() -> bool { false } } + cfg(test) => { fn works2() -> bool { true } } + _ => { fn works2() -> bool { false } } + } + + cfg_match! { + cfg(feature = "foo") => { fn works3() -> bool { false } } + _ => { fn works3() -> bool { true } } + } + + cfg_match! { + cfg(test) => { + use core::option::Option as Option3; + fn works4() -> Option3 { Some(1) } + } + } + + cfg_match! { + cfg(feature = "foo") => { fn works5() -> bool { false } } + cfg(test) => { fn works5() -> bool { true } } + } + + assert!(works1().is_some()); + assert!(works2()); + assert!(works3()); + assert!(works4().is_some()); + assert!(works5()); +} + +#[test] +fn cfg_match_two_functions() { + cfg_match! { + cfg(target_pointer_width = "64") => { + fn foo1() {} + fn bar1() {} + } + _ => { + fn foo2() {} + fn bar2() {} + } + } + + #[cfg(target_pointer_width = "64")] + { + foo1(); + bar1(); + } + #[cfg(not(target_pointer_width = "64"))] + { + foo2(); + bar2(); + } +} + +fn _accepts_expressions() -> i32 { + cfg_match! { + cfg(unix) => { 1 } + _ => { 2 } + } +} + +fn _allows_stmt_expr_attributes() { + let one = 1; + let two = 2; + cfg_match! { + cfg(unix) => { one * two; } + _ => { one + two; } + } +} From f7f2fa540f59384481d0cf1e2de799883219087f Mon Sep 17 00:00:00 2001 From: Jonathan Pallant Date: Mon, 6 Jan 2025 11:02:30 +0000 Subject: [PATCH 009/142] Ensure generate-copyright is executed from the project root. --- src/bootstrap/src/core/build_steps/run.rs | 3 +++ src/tools/generate-copyright/src/main.rs | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/src/bootstrap/src/core/build_steps/run.rs b/src/bootstrap/src/core/build_steps/run.rs index 0a6aea26359f0..1c2a8dbaf90ed 100644 --- a/src/bootstrap/src/core/build_steps/run.rs +++ b/src/bootstrap/src/core/build_steps/run.rs @@ -218,6 +218,9 @@ impl Step for GenerateCopyright { cmd.env("DEST_LIBSTD", &dest_libstd); cmd.env("OUT_DIR", &builder.out); cmd.env("CARGO", &builder.initial_cargo); + // it is important that generate-copyright runs from the root of the + // source tree, because it uses relative paths + cmd.current_dir(&builder.src); cmd.run(builder); vec![dest, dest_libstd] diff --git a/src/tools/generate-copyright/src/main.rs b/src/tools/generate-copyright/src/main.rs index f83d16d0cabf3..0a446ecff5b85 100644 --- a/src/tools/generate-copyright/src/main.rs +++ b/src/tools/generate-copyright/src/main.rs @@ -8,7 +8,10 @@ mod cargo_metadata; /// The entry point to the binary. /// -/// You should probably let `bootstrap` execute this program instead of running it directly. +/// You should probably let `bootstrap` execute this program instead of running +/// it directly. It assumes that the current working directory is the root of a +/// Rust git repository checkout, and constructs a bunch of relative paths based +/// on that assumption. /// /// Run `x.py run generate-copyright` fn main() -> Result<(), Error> { From e7bd340ba41a327510f36eff96776aaba73f0f60 Mon Sep 17 00:00:00 2001 From: Mara Bos Date: Thu, 31 Oct 2024 11:39:27 +0100 Subject: [PATCH 010/142] Switch missing_abi lint to warn-by-default. --- compiler/rustc_lint_defs/src/builtin.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index 8399f4c12f4f5..0718842f1bb0e 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -3595,7 +3595,7 @@ declare_lint! { /// /// [Other ABIs]: https://doc.rust-lang.org/reference/items/external-blocks.html#abi pub MISSING_ABI, - Allow, + Warn, "No declared ABI for extern declaration" } From 585c9765a203df7cc7b497d848aae4ba979fcb39 Mon Sep 17 00:00:00 2001 From: Mara Bos Date: Thu, 31 Oct 2024 11:39:45 +0100 Subject: [PATCH 011/142] Update tests. --- .../rustc_codegen_gcc/example/mini_core.rs | 2 +- .../example/mini_core_hello_world.rs | 4 +-- .../rustc_codegen_gcc/example/mod_bench.rs | 2 +- .../rustc_codegen_gcc/example/std_example.rs | 2 +- src/tools/clippy/tests/ui/boxed_local.rs | 1 + src/tools/clippy/tests/ui/boxed_local.stderr | 4 +-- .../clippy/tests/ui/doc/doc-fixable.fixed | 2 +- src/tools/clippy/tests/ui/doc/doc-fixable.rs | 2 +- .../missing_const_for_fn/could_be_const.fixed | 1 + .../ui/missing_const_for_fn/could_be_const.rs | 1 + .../could_be_const.stderr | 16 +++++----- tests/ui/associated-types/issue-91231.rs | 2 +- tests/ui/attributes/key-value-expansion.rs | 2 +- tests/ui/borrowck/issue-92157.rs | 2 +- .../coherence-negative-impls-copy.rs | 2 +- .../cfg_accessible-not_sure.rs | 2 +- .../validation-ice-extern-type-field.rs | 2 +- tests/ui/delegation/glob-non-impl.rs | 2 +- .../ui/extern/extern-type-diag-not-similar.rs | 4 +-- tests/ui/extern/issue-10025.rs | 2 +- tests/ui/extern/issue-95829.rs | 2 +- tests/ui/extern/issue-95829.stderr | 8 ++--- tests/ui/extern/not-in-block.rs | 1 + tests/ui/extern/not-in-block.stderr | 4 +-- .../issue-91370-foreign-fn-block-impl.rs | 2 +- .../issue-91370-foreign-fn-block-impl.stderr | 4 +-- ...d-rustc_legacy_const_generics-arguments.rs | 2 +- tests/ui/link-native-libs/issue-109144.rs | 2 +- .../suggest-libname-only-1.stderr | 10 +++++- .../suggest-libname-only-2.stderr | 10 +++++- tests/ui/lint/function-item-references.rs | 2 +- tests/ui/lint/lint-ctypes.rs | 2 +- tests/ui/lint/lint-ctypes.stderr | 4 +-- tests/ui/offset-of/offset-of-dst-field.rs | 2 +- tests/ui/parser/bad-fn-ptr-qualifier.fixed | 8 ++--- tests/ui/parser/bad-fn-ptr-qualifier.rs | 8 ++--- tests/ui/parser/bad-fn-ptr-qualifier.stderr | 32 +++++++++---------- tests/ui/parser/bad-lit-suffixes.rs | 4 +-- tests/ui/parser/bad-lit-suffixes.stderr | 16 +++++++++- .../issue-87217-keyword-order/wrong-unsafe.rs | 1 + .../wrong-unsafe.stderr | 2 +- tests/ui/parser/item-kw-case-mismatch.fixed | 2 +- tests/ui/parser/item-kw-case-mismatch.rs | 2 +- tests/ui/parser/item-kw-case-mismatch.stderr | 8 ++--- tests/ui/parser/lit-err-in-macro.rs | 2 +- tests/ui/parser/lit-err-in-macro.stderr | 14 +++++++- .../recover/recover-const-async-fn-ptr.rs | 8 ++--- .../recover/recover-const-async-fn-ptr.stderr | 32 +++++++++---------- .../recover/recover-fn-ptr-with-generics.rs | 2 +- .../recover-fn-ptr-with-generics.stderr | 16 +++++----- .../recover-missing-semi-before-item.fixed | 2 +- .../recover-missing-semi-before-item.rs | 2 +- .../recover-missing-semi-before-item.stderr | 2 +- tests/ui/proc-macro/inner-attrs.stderr | 10 +++++- tests/ui/proc-macro/issue-66286.rs | 2 +- tests/ui/proc-macro/issue-66286.stderr | 10 +++--- .../link-ordinal-not-foreign-fn.rs | 2 +- .../unsafe-attributes-fix.fixed | 10 +++--- .../unsafe-attributes-fix.rs | 10 +++--- ...ric-monomorphisation-extern-nonnull-ptr.rs | 2 +- tests/ui/statics/uninhabited-static.rs | 2 +- tests/ui/unpretty/expanded-exhaustive.rs | 8 ++--- tests/ui/unpretty/expanded-exhaustive.stdout | 8 ++--- 63 files changed, 197 insertions(+), 142 deletions(-) diff --git a/compiler/rustc_codegen_gcc/example/mini_core.rs b/compiler/rustc_codegen_gcc/example/mini_core.rs index cdd151613df84..685e03c07c189 100644 --- a/compiler/rustc_codegen_gcc/example/mini_core.rs +++ b/compiler/rustc_codegen_gcc/example/mini_core.rs @@ -681,7 +681,7 @@ impl Index for [T] { } } -extern { +extern "C" { type VaListImpl; } diff --git a/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs b/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs index dcfa34cb729d8..1d51e0a1856ba 100644 --- a/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs +++ b/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs @@ -258,13 +258,13 @@ fn main() { assert_eq!(((|()| 42u8) as fn(()) -> u8)(()), 42); - extern { + extern "C" { #[linkage = "weak"] static ABC: *const u8; } { - extern { + extern "C" { #[linkage = "weak"] static ABC: *const u8; } diff --git a/compiler/rustc_codegen_gcc/example/mod_bench.rs b/compiler/rustc_codegen_gcc/example/mod_bench.rs index cae911c1073d5..e8a9cade74745 100644 --- a/compiler/rustc_codegen_gcc/example/mod_bench.rs +++ b/compiler/rustc_codegen_gcc/example/mod_bench.rs @@ -3,7 +3,7 @@ #![allow(internal_features)] #[link(name = "c")] -extern {} +extern "C" {} #[panic_handler] fn panic_handler(_: &core::panic::PanicInfo<'_>) -> ! { diff --git a/compiler/rustc_codegen_gcc/example/std_example.rs b/compiler/rustc_codegen_gcc/example/std_example.rs index 9e43b4635f0d3..5fa1e0afb0603 100644 --- a/compiler/rustc_codegen_gcc/example/std_example.rs +++ b/compiler/rustc_codegen_gcc/example/std_example.rs @@ -7,7 +7,7 @@ use std::arch::x86_64::*; use std::io::Write; use std::ops::Coroutine; -extern { +extern "C" { pub fn printf(format: *const i8, ...) -> i32; } diff --git a/src/tools/clippy/tests/ui/boxed_local.rs b/src/tools/clippy/tests/ui/boxed_local.rs index e2c27e585fce8..4f361f5162be4 100644 --- a/src/tools/clippy/tests/ui/boxed_local.rs +++ b/src/tools/clippy/tests/ui/boxed_local.rs @@ -173,6 +173,7 @@ mod issue_3739 { /// This shouldn't warn for `boxed_local` as it is intended to called from non-Rust code. pub extern "C" fn do_not_warn_me(_c_pointer: Box) -> () {} +#[allow(missing_abi)] #[rustfmt::skip] // Forces rustfmt to not add ABI pub extern fn do_not_warn_me_no_abi(_c_pointer: Box) -> () {} diff --git a/src/tools/clippy/tests/ui/boxed_local.stderr b/src/tools/clippy/tests/ui/boxed_local.stderr index d3156c820b2c5..08fe375afb235 100644 --- a/src/tools/clippy/tests/ui/boxed_local.stderr +++ b/src/tools/clippy/tests/ui/boxed_local.stderr @@ -14,13 +14,13 @@ LL | pub fn new(_needs_name: Box>) -> () {} | ^^^^^^^^^^^ error: local variable doesn't need to be boxed here - --> tests/ui/boxed_local.rs:188:44 + --> tests/ui/boxed_local.rs:189:44 | LL | fn default_impl_x(self: Box, x: Box) -> u32 { | ^ error: local variable doesn't need to be boxed here - --> tests/ui/boxed_local.rs:196:16 + --> tests/ui/boxed_local.rs:197:16 | LL | fn foo(x: Box) {} | ^ diff --git a/src/tools/clippy/tests/ui/doc/doc-fixable.fixed b/src/tools/clippy/tests/ui/doc/doc-fixable.fixed index 5cf5c608a85ce..83574a5cd98d0 100644 --- a/src/tools/clippy/tests/ui/doc/doc-fixable.fixed +++ b/src/tools/clippy/tests/ui/doc/doc-fixable.fixed @@ -240,7 +240,7 @@ fn parenthesized_word() {} /// UXes fn plural_acronym_test() {} -extern { +extern "C" { /// `foo()` fn in_extern(); } diff --git a/src/tools/clippy/tests/ui/doc/doc-fixable.rs b/src/tools/clippy/tests/ui/doc/doc-fixable.rs index 420211c653902..20fe89cdc5369 100644 --- a/src/tools/clippy/tests/ui/doc/doc-fixable.rs +++ b/src/tools/clippy/tests/ui/doc/doc-fixable.rs @@ -240,7 +240,7 @@ fn parenthesized_word() {} /// UXes fn plural_acronym_test() {} -extern { +extern "C" { /// foo() fn in_extern(); } diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.fixed b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.fixed index 014fbb85c7a33..dd9dedcdd0441 100644 --- a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.fixed +++ b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.fixed @@ -149,6 +149,7 @@ mod msrv { //~^ ERROR: this could be a `const fn` #[rustfmt::skip] + #[allow(missing_abi)] const extern fn implicit_c() {} //~^ ERROR: this could be a `const fn` diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs index 4f7c2cbcf0b42..f974478540cd5 100644 --- a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs +++ b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs @@ -149,6 +149,7 @@ mod msrv { //~^ ERROR: this could be a `const fn` #[rustfmt::skip] + #[allow(missing_abi)] extern fn implicit_c() {} //~^ ERROR: this could be a `const fn` diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr index cc7dfd0888d04..33836bdfe9f8a 100644 --- a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr +++ b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr @@ -222,7 +222,7 @@ LL | const extern "C" fn c() {} | +++++ error: this could be a `const fn` - --> tests/ui/missing_const_for_fn/could_be_const.rs:152:9 + --> tests/ui/missing_const_for_fn/could_be_const.rs:153:9 | LL | extern fn implicit_c() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -233,7 +233,7 @@ LL | const extern fn implicit_c() {} | +++++ error: this could be a `const fn` - --> tests/ui/missing_const_for_fn/could_be_const.rs:169:9 + --> tests/ui/missing_const_for_fn/could_be_const.rs:170:9 | LL | / pub fn new(strings: Vec) -> Self { LL | | Self { strings } @@ -246,7 +246,7 @@ LL | pub const fn new(strings: Vec) -> Self { | +++++ error: this could be a `const fn` - --> tests/ui/missing_const_for_fn/could_be_const.rs:174:9 + --> tests/ui/missing_const_for_fn/could_be_const.rs:175:9 | LL | / pub fn empty() -> Self { LL | | Self { strings: Vec::new() } @@ -259,7 +259,7 @@ LL | pub const fn empty() -> Self { | +++++ error: this could be a `const fn` - --> tests/ui/missing_const_for_fn/could_be_const.rs:185:9 + --> tests/ui/missing_const_for_fn/could_be_const.rs:186:9 | LL | / pub fn new(text: String) -> Self { LL | | let vec = Vec::new(); @@ -273,7 +273,7 @@ LL | pub const fn new(text: String) -> Self { | +++++ error: this could be a `const fn` - --> tests/ui/missing_const_for_fn/could_be_const.rs:204:5 + --> tests/ui/missing_const_for_fn/could_be_const.rs:205:5 | LL | fn alias_ty_is_projection(bar: <() as FooTrait>::Foo) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -284,7 +284,7 @@ LL | const fn alias_ty_is_projection(bar: <() as FooTrait>::Foo) {} | +++++ error: this could be a `const fn` - --> tests/ui/missing_const_for_fn/could_be_const.rs:208:5 + --> tests/ui/missing_const_for_fn/could_be_const.rs:209:5 | LL | extern "C-unwind" fn c_unwind() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -295,7 +295,7 @@ LL | const extern "C-unwind" fn c_unwind() {} | +++++ error: this could be a `const fn` - --> tests/ui/missing_const_for_fn/could_be_const.rs:210:5 + --> tests/ui/missing_const_for_fn/could_be_const.rs:211:5 | LL | extern "system" fn system() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -306,7 +306,7 @@ LL | const extern "system" fn system() {} | +++++ error: this could be a `const fn` - --> tests/ui/missing_const_for_fn/could_be_const.rs:212:5 + --> tests/ui/missing_const_for_fn/could_be_const.rs:213:5 | LL | extern "system-unwind" fn system_unwind() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/ui/associated-types/issue-91231.rs b/tests/ui/associated-types/issue-91231.rs index d1c99fd44fa02..a04ab60a7fcdb 100644 --- a/tests/ui/associated-types/issue-91231.rs +++ b/tests/ui/associated-types/issue-91231.rs @@ -3,7 +3,7 @@ #![feature(extern_types)] #![allow(dead_code)] -extern { +extern "C" { type Extern; } diff --git a/tests/ui/attributes/key-value-expansion.rs b/tests/ui/attributes/key-value-expansion.rs index e5700a7593562..9034e0a739823 100644 --- a/tests/ui/attributes/key-value-expansion.rs +++ b/tests/ui/attributes/key-value-expansion.rs @@ -39,7 +39,7 @@ bug!(); macro_rules! doc_comment { ($x:expr) => { #[doc = $x] - extern {} + extern "C" {} }; } diff --git a/tests/ui/borrowck/issue-92157.rs b/tests/ui/borrowck/issue-92157.rs index 3a6f8908b219f..a2b685cdf6ede 100644 --- a/tests/ui/borrowck/issue-92157.rs +++ b/tests/ui/borrowck/issue-92157.rs @@ -5,7 +5,7 @@ #[cfg(target_os = "linux")] #[link(name = "c")] -extern {} +extern "C" {} #[lang = "start"] fn start(_main: fn() -> T, _argc: isize, _argv: *const *const u8) -> isize { diff --git a/tests/ui/coherence/coherence-negative-impls-copy.rs b/tests/ui/coherence/coherence-negative-impls-copy.rs index 377d750f8baa4..c0d852180a518 100644 --- a/tests/ui/coherence/coherence-negative-impls-copy.rs +++ b/tests/ui/coherence/coherence-negative-impls-copy.rs @@ -18,7 +18,7 @@ impl !Copy for WithDrop {} struct Type; trait Trait {} -extern { +extern "C" { type ExternType; } diff --git a/tests/ui/conditional-compilation/cfg_accessible-not_sure.rs b/tests/ui/conditional-compilation/cfg_accessible-not_sure.rs index e357d3c6cb56f..2ac57f356740f 100644 --- a/tests/ui/conditional-compilation/cfg_accessible-not_sure.rs +++ b/tests/ui/conditional-compilation/cfg_accessible-not_sure.rs @@ -63,7 +63,7 @@ const B: bool = true; // ForeignType::unresolved - error -extern { +extern "C" { type ForeignType; } diff --git a/tests/ui/consts/const-eval/validation-ice-extern-type-field.rs b/tests/ui/consts/const-eval/validation-ice-extern-type-field.rs index 3502409d576c2..8a32b170c40a4 100644 --- a/tests/ui/consts/const-eval/validation-ice-extern-type-field.rs +++ b/tests/ui/consts/const-eval/validation-ice-extern-type-field.rs @@ -1,6 +1,6 @@ #![feature(extern_types)] -extern { +extern "C" { type Opaque; } diff --git a/tests/ui/delegation/glob-non-impl.rs b/tests/ui/delegation/glob-non-impl.rs index d523731eeb35f..e3a4061fb15a6 100644 --- a/tests/ui/delegation/glob-non-impl.rs +++ b/tests/ui/delegation/glob-non-impl.rs @@ -11,7 +11,7 @@ trait OtherTrait { reuse Trait::*; //~ ERROR glob delegation is only supported in impls } -extern { +extern "C" { reuse Trait::*; //~ ERROR delegation is not supported in `extern` blocks } diff --git a/tests/ui/extern/extern-type-diag-not-similar.rs b/tests/ui/extern/extern-type-diag-not-similar.rs index 39d00a6c1bc31..cd3eec9f1f7b8 100644 --- a/tests/ui/extern/extern-type-diag-not-similar.rs +++ b/tests/ui/extern/extern-type-diag-not-similar.rs @@ -4,11 +4,11 @@ // because they are both extern types. #![feature(extern_types)] -extern { +extern "C" { type ShouldNotBeMentioned; } -extern { +extern "C" { type Foo; } diff --git a/tests/ui/extern/issue-10025.rs b/tests/ui/extern/issue-10025.rs index 140012f4a1613..9be0f616fd2d6 100644 --- a/tests/ui/extern/issue-10025.rs +++ b/tests/ui/extern/issue-10025.rs @@ -1,5 +1,5 @@ //@ run-pass -#![allow(dead_code)] +#![allow(dead_code, missing_abi)] unsafe extern fn foo() {} unsafe extern "C" fn bar() {} diff --git a/tests/ui/extern/issue-95829.rs b/tests/ui/extern/issue-95829.rs index c5ae4c6826597..493d53d2532f2 100644 --- a/tests/ui/extern/issue-95829.rs +++ b/tests/ui/extern/issue-95829.rs @@ -1,6 +1,6 @@ //@ edition:2018 -extern { +extern "C" { async fn L() { //~ ERROR: incorrect function inside `extern` block //~^ ERROR: functions in `extern` blocks cannot have `async` qualifier async fn M() {} diff --git a/tests/ui/extern/issue-95829.stderr b/tests/ui/extern/issue-95829.stderr index 2f396b8cc0415..2acd0fa3a2650 100644 --- a/tests/ui/extern/issue-95829.stderr +++ b/tests/ui/extern/issue-95829.stderr @@ -1,8 +1,8 @@ error: incorrect function inside `extern` block --> $DIR/issue-95829.rs:4:14 | -LL | extern { - | ------ `extern` blocks define existing foreign functions and functions inside of them cannot have a body +LL | extern "C" { + | ---------- `extern` blocks define existing foreign functions and functions inside of them cannot have a body LL | async fn L() { | ______________^___- | | | @@ -18,8 +18,8 @@ LL | | } error: functions in `extern` blocks cannot have `async` qualifier --> $DIR/issue-95829.rs:4:5 | -LL | extern { - | ------ in this `extern` block +LL | extern "C" { + | ---------- in this `extern` block LL | async fn L() { | ^^^^^ help: remove the `async` qualifier diff --git a/tests/ui/extern/not-in-block.rs b/tests/ui/extern/not-in-block.rs index d3bcafdef7b3c..c21616350902e 100644 --- a/tests/ui/extern/not-in-block.rs +++ b/tests/ui/extern/not-in-block.rs @@ -1,4 +1,5 @@ #![crate_type = "lib"] +#![allow(missing_abi)] extern fn none_fn(x: bool) -> i32; //~^ ERROR free function without a body diff --git a/tests/ui/extern/not-in-block.stderr b/tests/ui/extern/not-in-block.stderr index 2544949ab17a8..f86c279a23424 100644 --- a/tests/ui/extern/not-in-block.stderr +++ b/tests/ui/extern/not-in-block.stderr @@ -1,5 +1,5 @@ error: free function without a body - --> $DIR/not-in-block.rs:3:1 + --> $DIR/not-in-block.rs:4:1 | LL | extern fn none_fn(x: bool) -> i32; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -14,7 +14,7 @@ LL | extern { fn none_fn(x: bool) -> i32; } | ~~~~~~~~ + error: free function without a body - --> $DIR/not-in-block.rs:5:1 + --> $DIR/not-in-block.rs:6:1 | LL | extern "C" fn c_fn(x: bool) -> i32; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/ui/foreign/issue-91370-foreign-fn-block-impl.rs b/tests/ui/foreign/issue-91370-foreign-fn-block-impl.rs index 2ac3ca29355d8..e8634de86ea49 100644 --- a/tests/ui/foreign/issue-91370-foreign-fn-block-impl.rs +++ b/tests/ui/foreign/issue-91370-foreign-fn-block-impl.rs @@ -1,6 +1,6 @@ // Regression test for issue #91370. -extern { +extern "C" { //~^ `extern` blocks define existing foreign functions fn f() { //~^ incorrect function inside `extern` block diff --git a/tests/ui/foreign/issue-91370-foreign-fn-block-impl.stderr b/tests/ui/foreign/issue-91370-foreign-fn-block-impl.stderr index fea2ab61e929f..155fdf9d09ab0 100644 --- a/tests/ui/foreign/issue-91370-foreign-fn-block-impl.stderr +++ b/tests/ui/foreign/issue-91370-foreign-fn-block-impl.stderr @@ -1,8 +1,8 @@ error: incorrect function inside `extern` block --> $DIR/issue-91370-foreign-fn-block-impl.rs:5:8 | -LL | extern { - | ------ `extern` blocks define existing foreign functions and functions inside of them cannot have a body +LL | extern "C" { + | ---------- `extern` blocks define existing foreign functions and functions inside of them cannot have a body LL | LL | fn f() { | ________^___- diff --git a/tests/ui/invalid/invalid-rustc_legacy_const_generics-arguments.rs b/tests/ui/invalid/invalid-rustc_legacy_const_generics-arguments.rs index 6eabd9b1015b7..73a0363904aeb 100644 --- a/tests/ui/invalid/invalid-rustc_legacy_const_generics-arguments.rs +++ b/tests/ui/invalid/invalid-rustc_legacy_const_generics-arguments.rs @@ -21,7 +21,7 @@ struct S; #[rustc_legacy_const_generics(0usize)] //~ ERROR suffixed literals are not allowed in attributes fn foo6() {} -extern { +extern "C" { #[rustc_legacy_const_generics(1)] //~ ERROR attribute should be applied to a function fn foo7(); //~ ERROR foreign items may not have const parameters } diff --git a/tests/ui/link-native-libs/issue-109144.rs b/tests/ui/link-native-libs/issue-109144.rs index 2f740e5538973..6970a4989bb53 100644 --- a/tests/ui/link-native-libs/issue-109144.rs +++ b/tests/ui/link-native-libs/issue-109144.rs @@ -1,4 +1,4 @@ #![crate_type = "lib"] #[link(kind = "static", modifiers = "+whole-archive,+bundle")] //~^ ERROR `#[link]` attribute requires a `name = "string"` argument -extern {} +extern "C" {} diff --git a/tests/ui/link-native-libs/suggest-libname-only-1.stderr b/tests/ui/link-native-libs/suggest-libname-only-1.stderr index e142835a9d64c..aae8f7de966a0 100644 --- a/tests/ui/link-native-libs/suggest-libname-only-1.stderr +++ b/tests/ui/link-native-libs/suggest-libname-only-1.stderr @@ -1,6 +1,14 @@ +warning: extern declarations without an explicit ABI are deprecated + --> $DIR/suggest-libname-only-1.rs:7:1 + | +LL | extern { } + | ^^^^^^ help: explicitly specify the C ABI: `extern "C"` + | + = note: `#[warn(missing_abi)]` on by default + error: could not find native static library `libfoo.a`, perhaps an -L flag is missing? | = help: only provide the library name `foo`, not the full filename -error: aborting due to 1 previous error +error: aborting due to 1 previous error; 1 warning emitted diff --git a/tests/ui/link-native-libs/suggest-libname-only-2.stderr b/tests/ui/link-native-libs/suggest-libname-only-2.stderr index 392d2f01f619b..a2adaee3f97f5 100644 --- a/tests/ui/link-native-libs/suggest-libname-only-2.stderr +++ b/tests/ui/link-native-libs/suggest-libname-only-2.stderr @@ -1,6 +1,14 @@ +warning: extern declarations without an explicit ABI are deprecated + --> $DIR/suggest-libname-only-2.rs:7:1 + | +LL | extern { } + | ^^^^^^ help: explicitly specify the C ABI: `extern "C"` + | + = note: `#[warn(missing_abi)]` on by default + error: could not find native static library `bar.lib`, perhaps an -L flag is missing? | = help: only provide the library name `bar`, not the full filename -error: aborting due to 1 previous error +error: aborting due to 1 previous error; 1 warning emitted diff --git a/tests/ui/lint/function-item-references.rs b/tests/ui/lint/function-item-references.rs index 918d72e28a918..4f2fc4de8632e 100644 --- a/tests/ui/lint/function-item-references.rs +++ b/tests/ui/lint/function-item-references.rs @@ -11,7 +11,7 @@ fn baz(x: u32, y: u32) -> u32 { x + y } unsafe fn unsafe_fn() { } extern "C" fn c_fn() { } unsafe extern "C" fn unsafe_c_fn() { } -unsafe extern fn variadic(_x: u32, _args: ...) { } +unsafe extern "C" fn variadic(_x: u32, _args: ...) { } fn take_generic_ref<'a, T>(_x: &'a T) { } fn take_generic_array(_x: [T; N]) { } fn multiple_generic(_x: T, _y: U) { } diff --git a/tests/ui/lint/lint-ctypes.rs b/tests/ui/lint/lint-ctypes.rs index dae07930aba60..6dd9be10a48f8 100644 --- a/tests/ui/lint/lint-ctypes.rs +++ b/tests/ui/lint/lint-ctypes.rs @@ -72,7 +72,7 @@ extern "C" { pub fn transparent_fn(p: TransparentBadFn); //~ ERROR: uses type `Box` pub fn raw_array(arr: [u8; 8]); //~ ERROR: uses type `[u8; 8]` - pub fn no_niche_a(a: Option>); + pub fn no_niche_a(a: Option>); //~^ ERROR: uses type `Option>` pub fn no_niche_b(b: Option>); //~^ ERROR: uses type `Option>` diff --git a/tests/ui/lint/lint-ctypes.stderr b/tests/ui/lint/lint-ctypes.stderr index 2c81c7b8e4b67..8137ae868d356 100644 --- a/tests/ui/lint/lint-ctypes.stderr +++ b/tests/ui/lint/lint-ctypes.stderr @@ -227,8 +227,8 @@ LL | pub fn raw_array(arr: [u8; 8]); error: `extern` block uses type `Option>`, which is not FFI-safe --> $DIR/lint-ctypes.rs:75:26 | -LL | pub fn no_niche_a(a: Option>); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe +LL | pub fn no_niche_a(a: Option>); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe | = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum = note: enum has no representation hint diff --git a/tests/ui/offset-of/offset-of-dst-field.rs b/tests/ui/offset-of/offset-of-dst-field.rs index 5ae15f323577f..2e0bdb151e180 100644 --- a/tests/ui/offset-of/offset-of-dst-field.rs +++ b/tests/ui/offset-of/offset-of-dst-field.rs @@ -16,7 +16,7 @@ struct Beta { z: dyn Trait, } -extern { +extern "C" { type Extern; } diff --git a/tests/ui/parser/bad-fn-ptr-qualifier.fixed b/tests/ui/parser/bad-fn-ptr-qualifier.fixed index 558a27cd4562d..e2a2f9486b785 100644 --- a/tests/ui/parser/bad-fn-ptr-qualifier.fixed +++ b/tests/ui/parser/bad-fn-ptr-qualifier.fixed @@ -4,9 +4,9 @@ pub type T0 = fn(); //~ ERROR an `fn` pointer type cannot be `const` pub type T1 = extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` -pub type T2 = unsafe extern fn(); //~ ERROR an `fn` pointer type cannot be `const` +pub type T2 = unsafe extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` pub type T3 = fn(); //~ ERROR an `fn` pointer type cannot be `async` -pub type T4 = extern fn(); //~ ERROR an `fn` pointer type cannot be `async` +pub type T4 = extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `async` pub type T5 = unsafe extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `async` pub type T6 = unsafe extern "C" fn(); //~^ ERROR an `fn` pointer type cannot be `const` @@ -14,9 +14,9 @@ pub type T6 = unsafe extern "C" fn(); pub type FTT0 = for<'a> fn(); //~ ERROR an `fn` pointer type cannot be `const` pub type FTT1 = for<'a> extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` -pub type FTT2 = for<'a> unsafe extern fn(); //~ ERROR an `fn` pointer type cannot be `const` +pub type FTT2 = for<'a> unsafe extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` pub type FTT3 = for<'a> fn(); //~ ERROR an `fn` pointer type cannot be `async` -pub type FTT4 = for<'a> extern fn(); //~ ERROR an `fn` pointer type cannot be `async` +pub type FTT4 = for<'a> extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `async` pub type FTT5 = for<'a> unsafe extern "C" fn(); //~^ ERROR an `fn` pointer type cannot be `async` pub type FTT6 = for<'a> unsafe extern "C" fn(); diff --git a/tests/ui/parser/bad-fn-ptr-qualifier.rs b/tests/ui/parser/bad-fn-ptr-qualifier.rs index 9750f480935f2..f2611c93b1763 100644 --- a/tests/ui/parser/bad-fn-ptr-qualifier.rs +++ b/tests/ui/parser/bad-fn-ptr-qualifier.rs @@ -4,9 +4,9 @@ pub type T0 = const fn(); //~ ERROR an `fn` pointer type cannot be `const` pub type T1 = const extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` -pub type T2 = const unsafe extern fn(); //~ ERROR an `fn` pointer type cannot be `const` +pub type T2 = const unsafe extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` pub type T3 = async fn(); //~ ERROR an `fn` pointer type cannot be `async` -pub type T4 = async extern fn(); //~ ERROR an `fn` pointer type cannot be `async` +pub type T4 = async extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `async` pub type T5 = async unsafe extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `async` pub type T6 = const async unsafe extern "C" fn(); //~^ ERROR an `fn` pointer type cannot be `const` @@ -14,9 +14,9 @@ pub type T6 = const async unsafe extern "C" fn(); pub type FTT0 = for<'a> const fn(); //~ ERROR an `fn` pointer type cannot be `const` pub type FTT1 = for<'a> const extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` -pub type FTT2 = for<'a> const unsafe extern fn(); //~ ERROR an `fn` pointer type cannot be `const` +pub type FTT2 = for<'a> const unsafe extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` pub type FTT3 = for<'a> async fn(); //~ ERROR an `fn` pointer type cannot be `async` -pub type FTT4 = for<'a> async extern fn(); //~ ERROR an `fn` pointer type cannot be `async` +pub type FTT4 = for<'a> async extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `async` pub type FTT5 = for<'a> async unsafe extern "C" fn(); //~^ ERROR an `fn` pointer type cannot be `async` pub type FTT6 = for<'a> const async unsafe extern "C" fn(); diff --git a/tests/ui/parser/bad-fn-ptr-qualifier.stderr b/tests/ui/parser/bad-fn-ptr-qualifier.stderr index 523ee47b0c942..ddc8bac678cfb 100644 --- a/tests/ui/parser/bad-fn-ptr-qualifier.stderr +++ b/tests/ui/parser/bad-fn-ptr-qualifier.stderr @@ -29,15 +29,15 @@ LL + pub type T1 = extern "C" fn(); error: an `fn` pointer type cannot be `const` --> $DIR/bad-fn-ptr-qualifier.rs:7:15 | -LL | pub type T2 = const unsafe extern fn(); - | -----^^^^^^^^^^^^^^^^^^^ +LL | pub type T2 = const unsafe extern "C" fn(); + | -----^^^^^^^^^^^^^^^^^^^^^^^ | | | `const` because of this | help: remove the `const` qualifier | -LL - pub type T2 = const unsafe extern fn(); -LL + pub type T2 = unsafe extern fn(); +LL - pub type T2 = const unsafe extern "C" fn(); +LL + pub type T2 = unsafe extern "C" fn(); | error: an `fn` pointer type cannot be `async` @@ -57,15 +57,15 @@ LL + pub type T3 = fn(); error: an `fn` pointer type cannot be `async` --> $DIR/bad-fn-ptr-qualifier.rs:9:15 | -LL | pub type T4 = async extern fn(); - | -----^^^^^^^^^^^^ +LL | pub type T4 = async extern "C" fn(); + | -----^^^^^^^^^^^^^^^^ | | | `async` because of this | help: remove the `async` qualifier | -LL - pub type T4 = async extern fn(); -LL + pub type T4 = extern fn(); +LL - pub type T4 = async extern "C" fn(); +LL + pub type T4 = extern "C" fn(); | error: an `fn` pointer type cannot be `async` @@ -141,15 +141,15 @@ LL + pub type FTT1 = for<'a> extern "C" fn(); error: an `fn` pointer type cannot be `const` --> $DIR/bad-fn-ptr-qualifier.rs:17:17 | -LL | pub type FTT2 = for<'a> const unsafe extern fn(); - | ^^^^^^^^-----^^^^^^^^^^^^^^^^^^^ +LL | pub type FTT2 = for<'a> const unsafe extern "C" fn(); + | ^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^ | | | `const` because of this | help: remove the `const` qualifier | -LL - pub type FTT2 = for<'a> const unsafe extern fn(); -LL + pub type FTT2 = for<'a> unsafe extern fn(); +LL - pub type FTT2 = for<'a> const unsafe extern "C" fn(); +LL + pub type FTT2 = for<'a> unsafe extern "C" fn(); | error: an `fn` pointer type cannot be `async` @@ -169,15 +169,15 @@ LL + pub type FTT3 = for<'a> fn(); error: an `fn` pointer type cannot be `async` --> $DIR/bad-fn-ptr-qualifier.rs:19:17 | -LL | pub type FTT4 = for<'a> async extern fn(); - | ^^^^^^^^-----^^^^^^^^^^^^ +LL | pub type FTT4 = for<'a> async extern "C" fn(); + | ^^^^^^^^-----^^^^^^^^^^^^^^^^ | | | `async` because of this | help: remove the `async` qualifier | -LL - pub type FTT4 = for<'a> async extern fn(); -LL + pub type FTT4 = for<'a> extern fn(); +LL - pub type FTT4 = for<'a> async extern "C" fn(); +LL + pub type FTT4 = for<'a> extern "C" fn(); | error: an `fn` pointer type cannot be `async` diff --git a/tests/ui/parser/bad-lit-suffixes.rs b/tests/ui/parser/bad-lit-suffixes.rs index c614f49388574..f29dc53d322b9 100644 --- a/tests/ui/parser/bad-lit-suffixes.rs +++ b/tests/ui/parser/bad-lit-suffixes.rs @@ -1,10 +1,10 @@ #![feature(rustc_attrs)] -extern +extern //~ WARN missing_abi "C"suffix //~ ERROR suffixes on string literals are invalid fn foo() {} -extern +extern //~ WARN missing_abi "C"suffix //~ ERROR suffixes on string literals are invalid {} diff --git a/tests/ui/parser/bad-lit-suffixes.stderr b/tests/ui/parser/bad-lit-suffixes.stderr index b5dacdf7d0d3d..121db2058f109 100644 --- a/tests/ui/parser/bad-lit-suffixes.stderr +++ b/tests/ui/parser/bad-lit-suffixes.stderr @@ -49,6 +49,20 @@ LL | #[rustc_layout_scalar_valid_range_start(0suffix)] | = help: the suffix must be one of the numeric types (`u32`, `isize`, `f32`, etc.) +warning: extern declarations without an explicit ABI are deprecated + --> $DIR/bad-lit-suffixes.rs:3:1 + | +LL | extern + | ^^^^^^ help: explicitly specify the C ABI: `extern "C"` + | + = note: `#[warn(missing_abi)]` on by default + +warning: extern declarations without an explicit ABI are deprecated + --> $DIR/bad-lit-suffixes.rs:7:1 + | +LL | extern + | ^^^^^^ help: explicitly specify the C ABI: `extern "C"` + error: suffixes on string literals are invalid --> $DIR/bad-lit-suffixes.rs:12:5 | @@ -149,5 +163,5 @@ LL | 1.0e10suffix; | = help: valid suffixes are `f32` and `f64` -error: aborting due to 21 previous errors +error: aborting due to 21 previous errors; 2 warnings emitted diff --git a/tests/ui/parser/issues/issue-87217-keyword-order/wrong-unsafe.rs b/tests/ui/parser/issues/issue-87217-keyword-order/wrong-unsafe.rs index 34b687a0f5261..5ecf804de09fe 100644 --- a/tests/ui/parser/issues/issue-87217-keyword-order/wrong-unsafe.rs +++ b/tests/ui/parser/issues/issue-87217-keyword-order/wrong-unsafe.rs @@ -1,4 +1,5 @@ //@ edition:2018 +#![allow(missing_abi)] // There is an order to respect for keywords before a function: // `, const, async, unsafe, extern, ""` diff --git a/tests/ui/parser/issues/issue-87217-keyword-order/wrong-unsafe.stderr b/tests/ui/parser/issues/issue-87217-keyword-order/wrong-unsafe.stderr index 0e9f7c51e1a45..232da9acef309 100644 --- a/tests/ui/parser/issues/issue-87217-keyword-order/wrong-unsafe.stderr +++ b/tests/ui/parser/issues/issue-87217-keyword-order/wrong-unsafe.stderr @@ -1,5 +1,5 @@ error: expected `fn`, found keyword `unsafe` - --> $DIR/wrong-unsafe.rs:9:8 + --> $DIR/wrong-unsafe.rs:10:8 | LL | extern unsafe fn test() {} | -------^^^^^^ diff --git a/tests/ui/parser/item-kw-case-mismatch.fixed b/tests/ui/parser/item-kw-case-mismatch.fixed index f5afa482712f6..4ee8f9c19dc27 100644 --- a/tests/ui/parser/item-kw-case-mismatch.fixed +++ b/tests/ui/parser/item-kw-case-mismatch.fixed @@ -25,7 +25,7 @@ const unsafe fn _e() {} //~| ERROR keyword `unsafe` is written in the wrong case //~| ERROR keyword `fn` is written in the wrong case -unsafe extern fn _f() {} +unsafe extern "C" fn _f() {} //~^ ERROR keyword `unsafe` is written in the wrong case //~| ERROR keyword `extern` is written in the wrong case diff --git a/tests/ui/parser/item-kw-case-mismatch.rs b/tests/ui/parser/item-kw-case-mismatch.rs index ea224e08a0085..6c858b848cf2c 100644 --- a/tests/ui/parser/item-kw-case-mismatch.rs +++ b/tests/ui/parser/item-kw-case-mismatch.rs @@ -25,7 +25,7 @@ CONST UNSAFE FN _e() {} //~| ERROR keyword `unsafe` is written in the wrong case //~| ERROR keyword `fn` is written in the wrong case -unSAFE EXTern fn _f() {} +unSAFE EXTern "C" fn _f() {} //~^ ERROR keyword `unsafe` is written in the wrong case //~| ERROR keyword `extern` is written in the wrong case diff --git a/tests/ui/parser/item-kw-case-mismatch.stderr b/tests/ui/parser/item-kw-case-mismatch.stderr index 0abc59e064a00..36df72b5cad1c 100644 --- a/tests/ui/parser/item-kw-case-mismatch.stderr +++ b/tests/ui/parser/item-kw-case-mismatch.stderr @@ -111,23 +111,23 @@ LL | CONST UNSAFE fn _e() {} error: keyword `unsafe` is written in the wrong case --> $DIR/item-kw-case-mismatch.rs:28:1 | -LL | unSAFE EXTern fn _f() {} +LL | unSAFE EXTern "C" fn _f() {} | ^^^^^^ | help: write it in the correct case | -LL | unsafe EXTern fn _f() {} +LL | unsafe EXTern "C" fn _f() {} | ~~~~~~ error: keyword `extern` is written in the wrong case --> $DIR/item-kw-case-mismatch.rs:28:8 | -LL | unSAFE EXTern fn _f() {} +LL | unSAFE EXTern "C" fn _f() {} | ^^^^^^ | help: write it in the correct case | -LL | unSAFE extern fn _f() {} +LL | unSAFE extern "C" fn _f() {} | ~~~~~~ error: keyword `extern` is written in the wrong case diff --git a/tests/ui/parser/lit-err-in-macro.rs b/tests/ui/parser/lit-err-in-macro.rs index cff8ee6b40ca6..ca117ac4a15d4 100644 --- a/tests/ui/parser/lit-err-in-macro.rs +++ b/tests/ui/parser/lit-err-in-macro.rs @@ -1,6 +1,6 @@ macro_rules! f { ($abi:literal) => { - extern $abi fn f() {} + extern $abi fn f() {} //~ WARN missing_abi } } diff --git a/tests/ui/parser/lit-err-in-macro.stderr b/tests/ui/parser/lit-err-in-macro.stderr index 12e6d51906030..fc2603d0b104c 100644 --- a/tests/ui/parser/lit-err-in-macro.stderr +++ b/tests/ui/parser/lit-err-in-macro.stderr @@ -4,5 +4,17 @@ error: suffixes on string literals are invalid LL | f!("Foo"__); | ^^^^^^^ invalid suffix `__` -error: aborting due to 1 previous error +warning: extern declarations without an explicit ABI are deprecated + --> $DIR/lit-err-in-macro.rs:3:9 + | +LL | extern $abi fn f() {} + | ^^^^^^ help: explicitly specify the C ABI: `extern "C"` +... +LL | f!("Foo"__); + | ----------- in this macro invocation + | + = note: `#[warn(missing_abi)]` on by default + = note: this warning originates in the macro `f` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: aborting due to 1 previous error; 1 warning emitted diff --git a/tests/ui/parser/recover/recover-const-async-fn-ptr.rs b/tests/ui/parser/recover/recover-const-async-fn-ptr.rs index 2d8a3858aa603..45d753495999b 100644 --- a/tests/ui/parser/recover/recover-const-async-fn-ptr.rs +++ b/tests/ui/parser/recover/recover-const-async-fn-ptr.rs @@ -2,9 +2,9 @@ type T0 = const fn(); //~ ERROR an `fn` pointer type cannot be `const` type T1 = const extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` -type T2 = const unsafe extern fn(); //~ ERROR an `fn` pointer type cannot be `const` +type T2 = const unsafe extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` type T3 = async fn(); //~ ERROR an `fn` pointer type cannot be `async` -type T4 = async extern fn(); //~ ERROR an `fn` pointer type cannot be `async` +type T4 = async extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `async` type T5 = async unsafe extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `async` type T6 = const async unsafe extern "C" fn(); //~^ ERROR an `fn` pointer type cannot be `const` @@ -12,9 +12,9 @@ type T6 = const async unsafe extern "C" fn(); type FT0 = for<'a> const fn(); //~ ERROR an `fn` pointer type cannot be `const` type FT1 = for<'a> const extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` -type FT2 = for<'a> const unsafe extern fn(); //~ ERROR an `fn` pointer type cannot be `const` +type FT2 = for<'a> const unsafe extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `const` type FT3 = for<'a> async fn(); //~ ERROR an `fn` pointer type cannot be `async` -type FT4 = for<'a> async extern fn(); //~ ERROR an `fn` pointer type cannot be `async` +type FT4 = for<'a> async extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `async` type FT5 = for<'a> async unsafe extern "C" fn(); //~ ERROR an `fn` pointer type cannot be `async` type FT6 = for<'a> const async unsafe extern "C" fn(); //~^ ERROR an `fn` pointer type cannot be `const` diff --git a/tests/ui/parser/recover/recover-const-async-fn-ptr.stderr b/tests/ui/parser/recover/recover-const-async-fn-ptr.stderr index 8e5b76163ade1..9112a0e135a50 100644 --- a/tests/ui/parser/recover/recover-const-async-fn-ptr.stderr +++ b/tests/ui/parser/recover/recover-const-async-fn-ptr.stderr @@ -29,15 +29,15 @@ LL + type T1 = extern "C" fn(); error: an `fn` pointer type cannot be `const` --> $DIR/recover-const-async-fn-ptr.rs:5:11 | -LL | type T2 = const unsafe extern fn(); - | -----^^^^^^^^^^^^^^^^^^^ +LL | type T2 = const unsafe extern "C" fn(); + | -----^^^^^^^^^^^^^^^^^^^^^^^ | | | `const` because of this | help: remove the `const` qualifier | -LL - type T2 = const unsafe extern fn(); -LL + type T2 = unsafe extern fn(); +LL - type T2 = const unsafe extern "C" fn(); +LL + type T2 = unsafe extern "C" fn(); | error: an `fn` pointer type cannot be `async` @@ -57,15 +57,15 @@ LL + type T3 = fn(); error: an `fn` pointer type cannot be `async` --> $DIR/recover-const-async-fn-ptr.rs:7:11 | -LL | type T4 = async extern fn(); - | -----^^^^^^^^^^^^ +LL | type T4 = async extern "C" fn(); + | -----^^^^^^^^^^^^^^^^ | | | `async` because of this | help: remove the `async` qualifier | -LL - type T4 = async extern fn(); -LL + type T4 = extern fn(); +LL - type T4 = async extern "C" fn(); +LL + type T4 = extern "C" fn(); | error: an `fn` pointer type cannot be `async` @@ -141,15 +141,15 @@ LL + type FT1 = for<'a> extern "C" fn(); error: an `fn` pointer type cannot be `const` --> $DIR/recover-const-async-fn-ptr.rs:15:12 | -LL | type FT2 = for<'a> const unsafe extern fn(); - | ^^^^^^^^-----^^^^^^^^^^^^^^^^^^^ +LL | type FT2 = for<'a> const unsafe extern "C" fn(); + | ^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^ | | | `const` because of this | help: remove the `const` qualifier | -LL - type FT2 = for<'a> const unsafe extern fn(); -LL + type FT2 = for<'a> unsafe extern fn(); +LL - type FT2 = for<'a> const unsafe extern "C" fn(); +LL + type FT2 = for<'a> unsafe extern "C" fn(); | error: an `fn` pointer type cannot be `async` @@ -169,15 +169,15 @@ LL + type FT3 = for<'a> fn(); error: an `fn` pointer type cannot be `async` --> $DIR/recover-const-async-fn-ptr.rs:17:12 | -LL | type FT4 = for<'a> async extern fn(); - | ^^^^^^^^-----^^^^^^^^^^^^ +LL | type FT4 = for<'a> async extern "C" fn(); + | ^^^^^^^^-----^^^^^^^^^^^^^^^^ | | | `async` because of this | help: remove the `async` qualifier | -LL - type FT4 = for<'a> async extern fn(); -LL + type FT4 = for<'a> extern fn(); +LL - type FT4 = for<'a> async extern "C" fn(); +LL + type FT4 = for<'a> extern "C" fn(); | error: an `fn` pointer type cannot be `async` diff --git a/tests/ui/parser/recover/recover-fn-ptr-with-generics.rs b/tests/ui/parser/recover/recover-fn-ptr-with-generics.rs index 76c56a715d2a4..ab2cfc961dff1 100644 --- a/tests/ui/parser/recover/recover-fn-ptr-with-generics.rs +++ b/tests/ui/parser/recover/recover-fn-ptr-with-generics.rs @@ -19,7 +19,7 @@ fn main() { type Hmm = fn<>(); //~^ ERROR function pointer types may not have generic parameters - let _: extern fn<'a: 'static>(); + let _: extern "C" fn<'a: 'static>(); //~^ ERROR function pointer types may not have generic parameters //~| ERROR bounds cannot be used in this context diff --git a/tests/ui/parser/recover/recover-fn-ptr-with-generics.stderr b/tests/ui/parser/recover/recover-fn-ptr-with-generics.stderr index 6b6cb2d6bdde7..9023856ef2486 100644 --- a/tests/ui/parser/recover/recover-fn-ptr-with-generics.stderr +++ b/tests/ui/parser/recover/recover-fn-ptr-with-generics.stderr @@ -59,15 +59,15 @@ LL | type Hmm = fn<>(); | ^^ error: function pointer types may not have generic parameters - --> $DIR/recover-fn-ptr-with-generics.rs:22:21 + --> $DIR/recover-fn-ptr-with-generics.rs:22:25 | -LL | let _: extern fn<'a: 'static>(); - | ^^^^^^^^^^^^^ +LL | let _: extern "C" fn<'a: 'static>(); + | ^^^^^^^^^^^^^ | help: consider moving the lifetime parameter to a `for` parameter list | -LL - let _: extern fn<'a: 'static>(); -LL + let _: for<'a> extern fn(); +LL - let _: extern "C" fn<'a: 'static>(); +LL + let _: for<'a> extern "C" fn(); | error: function pointer types may not have generic parameters @@ -101,10 +101,10 @@ LL | type Identity = fn(T) -> T; | ^ not found in this scope error: bounds cannot be used in this context - --> $DIR/recover-fn-ptr-with-generics.rs:22:26 + --> $DIR/recover-fn-ptr-with-generics.rs:22:30 | -LL | let _: extern fn<'a: 'static>(); - | ^^^^^^^ +LL | let _: extern "C" fn<'a: 'static>(); + | ^^^^^^^ error: aborting due to 12 previous errors diff --git a/tests/ui/parser/recover/recover-missing-semi-before-item.fixed b/tests/ui/parser/recover/recover-missing-semi-before-item.fixed index 6f85452c6fb96..871fa0d24eca7 100644 --- a/tests/ui/parser/recover/recover-missing-semi-before-item.fixed +++ b/tests/ui/parser/recover/recover-missing-semi-before-item.fixed @@ -28,7 +28,7 @@ fn for_fn() { fn for_extern() { let foo = 3; //~ ERROR expected `;`, found keyword `extern` - extern fn foo() {} + extern "C" fn foo() {} } fn for_impl() { diff --git a/tests/ui/parser/recover/recover-missing-semi-before-item.rs b/tests/ui/parser/recover/recover-missing-semi-before-item.rs index f75945b55c2b0..de92603a74c68 100644 --- a/tests/ui/parser/recover/recover-missing-semi-before-item.rs +++ b/tests/ui/parser/recover/recover-missing-semi-before-item.rs @@ -28,7 +28,7 @@ fn for_fn() { fn for_extern() { let foo = 3 //~ ERROR expected `;`, found keyword `extern` - extern fn foo() {} + extern "C" fn foo() {} } fn for_impl() { diff --git a/tests/ui/parser/recover/recover-missing-semi-before-item.stderr b/tests/ui/parser/recover/recover-missing-semi-before-item.stderr index 61c43f2f18998..3b55cd9ddd086 100644 --- a/tests/ui/parser/recover/recover-missing-semi-before-item.stderr +++ b/tests/ui/parser/recover/recover-missing-semi-before-item.stderr @@ -35,7 +35,7 @@ error: expected `;`, found keyword `extern` | LL | let foo = 3 | ^ help: add `;` here -LL | extern fn foo() {} +LL | extern "C" fn foo() {} | ------ unexpected token error: expected `;`, found keyword `impl` diff --git a/tests/ui/proc-macro/inner-attrs.stderr b/tests/ui/proc-macro/inner-attrs.stderr index ee8732c650dc9..3ab180be821a9 100644 --- a/tests/ui/proc-macro/inner-attrs.stderr +++ b/tests/ui/proc-macro/inner-attrs.stderr @@ -22,5 +22,13 @@ error: expected non-macro inner attribute, found attribute macro `print_attr` LL | #![print_attr] | ^^^^^^^^^^ not a non-macro inner attribute -error: aborting due to 4 previous errors +warning: extern declarations without an explicit ABI are deprecated + --> $DIR/inner-attrs.rs:82:1 + | +LL | extern { + | ^^^^^^ help: explicitly specify the C ABI: `extern "C"` + | + = note: `#[warn(missing_abi)]` on by default + +error: aborting due to 4 previous errors; 1 warning emitted diff --git a/tests/ui/proc-macro/issue-66286.rs b/tests/ui/proc-macro/issue-66286.rs index 57d1af26e934d..882f87fa4ac22 100644 --- a/tests/ui/proc-macro/issue-66286.rs +++ b/tests/ui/proc-macro/issue-66286.rs @@ -5,7 +5,7 @@ extern crate issue_66286; #[issue_66286::vec_ice] -pub extern fn foo(_: Vec(u32)) -> u32 { +pub extern "C" fn foo(_: Vec(u32)) -> u32 { //~^ ERROR: parenthesized type parameters may only be used with a `Fn` trait 0 } diff --git a/tests/ui/proc-macro/issue-66286.stderr b/tests/ui/proc-macro/issue-66286.stderr index fc4c2062fd7bc..c92bed1b56344 100644 --- a/tests/ui/proc-macro/issue-66286.stderr +++ b/tests/ui/proc-macro/issue-66286.stderr @@ -1,13 +1,13 @@ error[E0214]: parenthesized type parameters may only be used with a `Fn` trait - --> $DIR/issue-66286.rs:8:22 + --> $DIR/issue-66286.rs:8:26 | -LL | pub extern fn foo(_: Vec(u32)) -> u32 { - | ^^^^^^^^ only `Fn` traits may use parentheses +LL | pub extern "C" fn foo(_: Vec(u32)) -> u32 { + | ^^^^^^^^ only `Fn` traits may use parentheses | help: use angle brackets instead | -LL | pub extern fn foo(_: Vec) -> u32 { - | ~ ~ +LL | pub extern "C" fn foo(_: Vec) -> u32 { + | ~ ~ error: aborting due to 1 previous error diff --git a/tests/ui/rfcs/rfc-2627-raw-dylib/link-ordinal-not-foreign-fn.rs b/tests/ui/rfcs/rfc-2627-raw-dylib/link-ordinal-not-foreign-fn.rs index f33a3d62e2688..5982c771033d0 100644 --- a/tests/ui/rfcs/rfc-2627-raw-dylib/link-ordinal-not-foreign-fn.rs +++ b/tests/ui/rfcs/rfc-2627-raw-dylib/link-ordinal-not-foreign-fn.rs @@ -11,7 +11,7 @@ fn test() {} static mut imported_val: i32 = 123; #[link(name = "exporter", kind = "raw-dylib")] -extern { +extern "C" { #[link_ordinal(13)] fn imported_function(); diff --git a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.fixed b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.fixed index 586881d180767..8b179f7ef933e 100644 --- a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.fixed +++ b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.fixed @@ -4,7 +4,7 @@ macro_rules! tt { ($e:tt) => { #$e - extern fn foo() {} + extern "C" fn foo() {} } } @@ -13,7 +13,7 @@ macro_rules! ident { #[unsafe($e)] //~^ ERROR: unsafe attribute used without unsafe //~| WARN this is accepted in the current edition - extern fn bar() {} + extern "C" fn bar() {} } } @@ -22,21 +22,21 @@ macro_rules! ident2 { #[unsafe($e = $l)] //~^ ERROR: unsafe attribute used without unsafe //~| WARN this is accepted in the current edition - extern fn bars() {} + extern "C" fn bars() {} } } macro_rules! meta { ($m:meta) => { #[$m] - extern fn baz() {} + extern "C" fn baz() {} } } macro_rules! meta2 { ($m:meta) => { #[$m] - extern fn baw() {} + extern "C" fn baw() {} } } diff --git a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.rs b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.rs index 03e122c7d57e7..34e5a6b96e389 100644 --- a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.rs +++ b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.rs @@ -4,7 +4,7 @@ macro_rules! tt { ($e:tt) => { #$e - extern fn foo() {} + extern "C" fn foo() {} } } @@ -13,7 +13,7 @@ macro_rules! ident { #[$e] //~^ ERROR: unsafe attribute used without unsafe //~| WARN this is accepted in the current edition - extern fn bar() {} + extern "C" fn bar() {} } } @@ -22,21 +22,21 @@ macro_rules! ident2 { #[$e = $l] //~^ ERROR: unsafe attribute used without unsafe //~| WARN this is accepted in the current edition - extern fn bars() {} + extern "C" fn bars() {} } } macro_rules! meta { ($m:meta) => { #[$m] - extern fn baz() {} + extern "C" fn baz() {} } } macro_rules! meta2 { ($m:meta) => { #[$m] - extern fn baw() {} + extern "C" fn baw() {} } } diff --git a/tests/ui/simd/type-generic-monomorphisation-extern-nonnull-ptr.rs b/tests/ui/simd/type-generic-monomorphisation-extern-nonnull-ptr.rs index a969295c9f9cd..65c57c42530a1 100644 --- a/tests/ui/simd/type-generic-monomorphisation-extern-nonnull-ptr.rs +++ b/tests/ui/simd/type-generic-monomorphisation-extern-nonnull-ptr.rs @@ -6,7 +6,7 @@ use std::ptr::NonNull; -extern { +extern "C" { type Extern; } diff --git a/tests/ui/statics/uninhabited-static.rs b/tests/ui/statics/uninhabited-static.rs index a0f83f450792a..0f7c5ae6ef4a3 100644 --- a/tests/ui/statics/uninhabited-static.rs +++ b/tests/ui/statics/uninhabited-static.rs @@ -2,7 +2,7 @@ #![deny(uninhabited_static)] enum Void {} -extern { +extern "C" { static VOID: Void; //~ ERROR static of uninhabited type //~| WARN: previously accepted static NEVER: !; //~ ERROR static of uninhabited type diff --git a/tests/ui/unpretty/expanded-exhaustive.rs b/tests/ui/unpretty/expanded-exhaustive.rs index e052627e71cfe..15c710c041837 100644 --- a/tests/ui/unpretty/expanded-exhaustive.rs +++ b/tests/ui/unpretty/expanded-exhaustive.rs @@ -452,15 +452,15 @@ mod items { /// ItemKind::Fn mod item_fn { pub const unsafe extern "C" fn f() {} - pub async unsafe extern fn g() {} + pub async unsafe extern "C" fn g() {} fn h<'a, T>() where T: 'a {} trait TraitItems { - unsafe extern fn f(); + unsafe extern "C" fn f(); } impl TraitItems for _ { - default unsafe extern fn f() {} + default unsafe extern "C" fn f() {} } } @@ -472,7 +472,7 @@ mod items { /// ItemKind::ForeignMod mod item_foreign_mod { unsafe extern "C++" {} - unsafe extern {} + unsafe extern "C" {} } /// ItemKind::GlobalAsm diff --git a/tests/ui/unpretty/expanded-exhaustive.stdout b/tests/ui/unpretty/expanded-exhaustive.stdout index 132d00cd8edd5..f649ae34c9bf0 100644 --- a/tests/ui/unpretty/expanded-exhaustive.stdout +++ b/tests/ui/unpretty/expanded-exhaustive.stdout @@ -433,13 +433,13 @@ mod items { /// ItemKind::Fn mod item_fn { pub const unsafe extern "C" fn f() {} - pub async unsafe extern fn g() {} + pub async unsafe extern "C" fn g() {} fn h<'a, T>() where T: 'a {} trait TraitItems { - unsafe extern fn f(); + unsafe extern "C" fn f(); } impl TraitItems for _ { - default unsafe extern fn f() {} + default unsafe extern "C" fn f() {} } } /// ItemKind::Mod @@ -447,7 +447,7 @@ mod items { /// ItemKind::ForeignMod mod item_foreign_mod { unsafe extern "C++" {} - unsafe extern {} + unsafe extern "C" {} } /// ItemKind::GlobalAsm mod item_global_asm { From 3c3186148e4a66a507e606f191c35ed49d873b08 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 8 Jan 2025 00:28:47 +0000 Subject: [PATCH 012/142] Don't allow transmuting ZSTs in dispatch_from_dyn impl --- compiler/rustc_hir_analysis/messages.ftl | 2 +- .../src/coherence/builtin.rs | 17 +++++++--- .../ui/invalid_dispatch_from_dyn_impls.stderr | 4 +-- .../self/dispatch-from-dyn-zst-transmute.rs | 34 +++++++++++++++++++ .../dispatch-from-dyn-zst-transmute.stderr | 16 +++++++++ 5 files changed, 66 insertions(+), 7 deletions(-) create mode 100644 tests/ui/self/dispatch-from-dyn-zst-transmute.rs create mode 100644 tests/ui/self/dispatch-from-dyn-zst-transmute.stderr diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl index 0c3ed9b5c609b..d7ab6eca84b3d 100644 --- a/compiler/rustc_hir_analysis/messages.ftl +++ b/compiler/rustc_hir_analysis/messages.ftl @@ -135,7 +135,7 @@ hir_analysis_dispatch_from_dyn_multi = implementing the `DispatchFromDyn` trait hir_analysis_dispatch_from_dyn_repr = structs implementing `DispatchFromDyn` may not have `#[repr(packed)]` or `#[repr(C)]` -hir_analysis_dispatch_from_dyn_zst = the trait `DispatchFromDyn` may only be implemented for structs containing the field being coerced, ZST fields with 1 byte alignment, and nothing else +hir_analysis_dispatch_from_dyn_zst = the trait `DispatchFromDyn` may only be implemented for structs containing the field being coerced, ZST fields with 1 byte alignment that don't mention type/const generics, and nothing else .note = extra field `{$name}` of type `{$ty}` is not allowed hir_analysis_drop_impl_negative = negative `Drop` impls are not supported diff --git a/compiler/rustc_hir_analysis/src/coherence/builtin.rs b/compiler/rustc_hir_analysis/src/coherence/builtin.rs index 3b98f358b1e95..760c09a1e7263 100644 --- a/compiler/rustc_hir_analysis/src/coherence/builtin.rs +++ b/compiler/rustc_hir_analysis/src/coherence/builtin.rs @@ -259,16 +259,25 @@ fn visit_implementation_of_dispatch_from_dyn(checker: &Checker<'_>) -> Result<() let coerced_fields = fields .iter() .filter(|field| { + // Ignore PhantomData fields + if tcx.type_of(field.did).instantiate_identity().is_phantom_data() { + return false; + } + let ty_a = field.ty(tcx, args_a); let ty_b = field.ty(tcx, args_b); + // Allow 1-ZSTs that don't mention type params. + // + // Allowing type params here would allow us to possibly transmute + // between ZSTs, which may be used to create library unsoundness. if let Ok(layout) = tcx.layout_of(infcx.typing_env(param_env).as_query_input(ty_a)) + && layout.is_1zst() + && !ty_a.has_non_region_param() { - if layout.is_1zst() { - // ignore 1-ZST fields - return false; - } + // ignore 1-ZST fields + return false; } if ty_a == ty_b { diff --git a/tests/ui/invalid_dispatch_from_dyn_impls.stderr b/tests/ui/invalid_dispatch_from_dyn_impls.stderr index 168ed37d0e69f..02718334c733e 100644 --- a/tests/ui/invalid_dispatch_from_dyn_impls.stderr +++ b/tests/ui/invalid_dispatch_from_dyn_impls.stderr @@ -1,4 +1,4 @@ -error[E0378]: the trait `DispatchFromDyn` may only be implemented for structs containing the field being coerced, ZST fields with 1 byte alignment, and nothing else +error[E0378]: the trait `DispatchFromDyn` may only be implemented for structs containing the field being coerced, ZST fields with 1 byte alignment that don't mention type/const generics, and nothing else --> $DIR/invalid_dispatch_from_dyn_impls.rs:10:1 | LL | / impl DispatchFromDyn> for WrapperWithExtraField @@ -35,7 +35,7 @@ LL | | where LL | | T: Unsize, | |_________________^ -error[E0378]: the trait `DispatchFromDyn` may only be implemented for structs containing the field being coerced, ZST fields with 1 byte alignment, and nothing else +error[E0378]: the trait `DispatchFromDyn` may only be implemented for structs containing the field being coerced, ZST fields with 1 byte alignment that don't mention type/const generics, and nothing else --> $DIR/invalid_dispatch_from_dyn_impls.rs:46:1 | LL | / impl DispatchFromDyn> for OverAligned diff --git a/tests/ui/self/dispatch-from-dyn-zst-transmute.rs b/tests/ui/self/dispatch-from-dyn-zst-transmute.rs new file mode 100644 index 0000000000000..57c255b4d7bca --- /dev/null +++ b/tests/ui/self/dispatch-from-dyn-zst-transmute.rs @@ -0,0 +1,34 @@ +#![feature(arbitrary_self_types)] +#![feature(unsize)] +#![feature(dispatch_from_dyn)] + +use std::marker::PhantomData; +use std::marker::Unsize; +use std::ops::DispatchFromDyn; +use std::ops::Deref; + +struct IsSendToken(PhantomData T>); + +struct Foo<'a, U: ?Sized> { + token: IsSendToken, + ptr: &'a U, +} + +impl<'a, T, U> DispatchFromDyn> for Foo<'a, T> +//~^ ERROR implementing the `DispatchFromDyn` trait requires multiple coercions +where + T: Unsize + ?Sized, + U: ?Sized {} + +trait Bar { + fn f(self: Foo<'_, Self>); +} + +impl Deref for Foo<'_, U> { + type Target = U; + fn deref(&self) -> &U { + self.ptr + } +} + +fn main() {} diff --git a/tests/ui/self/dispatch-from-dyn-zst-transmute.stderr b/tests/ui/self/dispatch-from-dyn-zst-transmute.stderr new file mode 100644 index 0000000000000..5a8ae88b5f1b8 --- /dev/null +++ b/tests/ui/self/dispatch-from-dyn-zst-transmute.stderr @@ -0,0 +1,16 @@ +error[E0378]: implementing the `DispatchFromDyn` trait requires multiple coercions + --> $DIR/dispatch-from-dyn-zst-transmute.rs:17:1 + | +LL | / impl<'a, T, U> DispatchFromDyn> for Foo<'a, T> +LL | | +LL | | where +LL | | T: Unsize + ?Sized, +LL | | U: ?Sized {} + | |_____________^ + | + = note: the trait `DispatchFromDyn` may only be implemented for a coercion between structures with a single field being coerced + = note: currently, 2 fields need coercions: `token` (`IsSendToken` to `IsSendToken`), `ptr` (`&'a T` to `&'a U`) + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0378`. From 11bc805369b7e2aa641e885c137b2c86ce701e2c Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 8 Jan 2025 00:29:22 +0000 Subject: [PATCH 013/142] Don't allow DispatchFromDyn impls that transmute ZST to non-ZST --- .../src/coherence/builtin.rs | 26 +++++++++---------- ...patch-from-dyn-zst-transmute-zst-nonzst.rs | 25 ++++++++++++++++++ ...h-from-dyn-zst-transmute-zst-nonzst.stderr | 16 ++++++++++++ 3 files changed, 54 insertions(+), 13 deletions(-) create mode 100644 tests/ui/self/dispatch-from-dyn-zst-transmute-zst-nonzst.rs create mode 100644 tests/ui/self/dispatch-from-dyn-zst-transmute-zst-nonzst.stderr diff --git a/compiler/rustc_hir_analysis/src/coherence/builtin.rs b/compiler/rustc_hir_analysis/src/coherence/builtin.rs index 760c09a1e7263..a661e588b952b 100644 --- a/compiler/rustc_hir_analysis/src/coherence/builtin.rs +++ b/compiler/rustc_hir_analysis/src/coherence/builtin.rs @@ -267,20 +267,20 @@ fn visit_implementation_of_dispatch_from_dyn(checker: &Checker<'_>) -> Result<() let ty_a = field.ty(tcx, args_a); let ty_b = field.ty(tcx, args_b); - // Allow 1-ZSTs that don't mention type params. - // - // Allowing type params here would allow us to possibly transmute - // between ZSTs, which may be used to create library unsoundness. - if let Ok(layout) = - tcx.layout_of(infcx.typing_env(param_env).as_query_input(ty_a)) - && layout.is_1zst() - && !ty_a.has_non_region_param() - { - // ignore 1-ZST fields - return false; - } - if ty_a == ty_b { + // Allow 1-ZSTs that don't mention type params. + // + // Allowing type params here would allow us to possibly transmute + // between ZSTs, which may be used to create library unsoundness. + if let Ok(layout) = + tcx.layout_of(infcx.typing_env(param_env).as_query_input(ty_a)) + && layout.is_1zst() + && !ty_a.has_non_region_param() + { + // ignore 1-ZST fields + return false; + } + res = Err(tcx.dcx().emit_err(errors::DispatchFromDynZST { span, name: field.name, diff --git a/tests/ui/self/dispatch-from-dyn-zst-transmute-zst-nonzst.rs b/tests/ui/self/dispatch-from-dyn-zst-transmute-zst-nonzst.rs new file mode 100644 index 0000000000000..71f198f7dc70e --- /dev/null +++ b/tests/ui/self/dispatch-from-dyn-zst-transmute-zst-nonzst.rs @@ -0,0 +1,25 @@ +// We used to allow erroneous `DispatchFromDyn` impls whose RHS type contained +// fields that weren't ZSTs. I don't believe this was possible to abuse, but +// it's at least nice to give users better errors. + +#![feature(arbitrary_self_types)] +#![feature(unsize)] +#![feature(dispatch_from_dyn)] + +use std::marker::Unsize; +use std::ops::DispatchFromDyn; + +struct Dispatchable { + _ptr: Box, + z: Z, +} + +impl DispatchFromDyn> for Dispatchable +//~^ ERROR implementing the `DispatchFromDyn` trait requires multiple coercions +where + T: Unsize + ?Sized, + U: ?Sized, +{ +} + +fn main() {} diff --git a/tests/ui/self/dispatch-from-dyn-zst-transmute-zst-nonzst.stderr b/tests/ui/self/dispatch-from-dyn-zst-transmute-zst-nonzst.stderr new file mode 100644 index 0000000000000..1f13c51f67996 --- /dev/null +++ b/tests/ui/self/dispatch-from-dyn-zst-transmute-zst-nonzst.stderr @@ -0,0 +1,16 @@ +error[E0378]: implementing the `DispatchFromDyn` trait requires multiple coercions + --> $DIR/dispatch-from-dyn-zst-transmute-zst-nonzst.rs:17:1 + | +LL | / impl DispatchFromDyn> for Dispatchable +LL | | +LL | | where +LL | | T: Unsize + ?Sized, +LL | | U: ?Sized, + | |______________^ + | + = note: the trait `DispatchFromDyn` may only be implemented for a coercion between structures with a single field being coerced + = note: currently, 2 fields need coercions: `_ptr` (`Box` to `Box`), `z` (`()` to `i32`) + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0378`. From be2369708a0fd53fc1ed80cbcca25f24e12a781d Mon Sep 17 00:00:00 2001 From: spore Date: Wed, 8 Jan 2025 18:31:29 +0800 Subject: [PATCH 014/142] Detect overflow when the literal is larger than i128::MAX --- compiler/rustc_lint/src/types/literal.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_lint/src/types/literal.rs b/compiler/rustc_lint/src/types/literal.rs index 83942918e3b58..c3e558a3e20a9 100644 --- a/compiler/rustc_lint/src/types/literal.rs +++ b/compiler/rustc_lint/src/types/literal.rs @@ -206,12 +206,15 @@ fn get_type_suggestion(t: Ty<'_>, val: u128, negative: bool) -> Option<&'static ty::Uint(_) => Some(Integer::fit_unsigned(val).uint_ty_str()), ty::Int(_) if negative => Some(Integer::fit_signed(-(val as i128)).int_ty_str()), ty::Int(int) => { - let signed = Integer::fit_signed(val as i128); let unsigned = Integer::fit_unsigned(val); - Some(if Some(unsigned.size().bits()) == int.bit_width() { - unsigned.uint_ty_str() + Some(if let Ok(signed) = i128::try_from(val).map(Integer::fit_signed) { + if Some(unsigned.size().bits()) == int.bit_width() { + unsigned.uint_ty_str() + } else { + signed.int_ty_str() + } } else { - signed.int_ty_str() + unsigned.uint_ty_str() }) } _ => None, From 330be171440eb4efe479c5b1fcfd079153c5159d Mon Sep 17 00:00:00 2001 From: spore Date: Wed, 8 Jan 2025 19:00:10 +0800 Subject: [PATCH 015/142] Detect overflow when the literal is negative --- compiler/rustc_lint/src/types/literal.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/compiler/rustc_lint/src/types/literal.rs b/compiler/rustc_lint/src/types/literal.rs index c3e558a3e20a9..13eca89fba67b 100644 --- a/compiler/rustc_lint/src/types/literal.rs +++ b/compiler/rustc_lint/src/types/literal.rs @@ -204,7 +204,13 @@ fn get_type_suggestion(t: Ty<'_>, val: u128, negative: bool) -> Option<&'static match t.kind() { ty::Uint(ty::UintTy::Usize) | ty::Int(ty::IntTy::Isize) => None, ty::Uint(_) => Some(Integer::fit_unsigned(val).uint_ty_str()), - ty::Int(_) if negative => Some(Integer::fit_signed(-(val as i128)).int_ty_str()), + ty::Int(_) if negative => { + if val > i128::MAX as u128 + 1 { + None + } else { + Some(Integer::fit_signed(val.wrapping_neg() as i128).int_ty_str()) + } + } ty::Int(int) => { let unsigned = Integer::fit_unsigned(val); Some(if let Ok(signed) = i128::try_from(val).map(Integer::fit_signed) { From 1517a41c57b328b2e89641d79b1272e7004f7514 Mon Sep 17 00:00:00 2001 From: spore Date: Wed, 8 Jan 2025 19:00:35 +0800 Subject: [PATCH 016/142] Add test cases and test for `HELP` --- tests/ui/lint/type-overflow.rs | 26 +++++++++++++++ tests/ui/lint/type-overflow.stderr | 51 ++++++++++++++++++++++++++---- 2 files changed, 70 insertions(+), 7 deletions(-) diff --git a/tests/ui/lint/type-overflow.rs b/tests/ui/lint/type-overflow.rs index 7239e1c983764..0848d676e237c 100644 --- a/tests/ui/lint/type-overflow.rs +++ b/tests/ui/lint/type-overflow.rs @@ -3,20 +3,46 @@ fn main() { let error = 255i8; //~WARNING literal out of range for `i8` + //~^ HELP consider using the type `u8` instead let ok = 0b1000_0001; // should be ok -> i32 let ok = 0b0111_1111i8; // should be ok -> 127i8 let fail = 0b1000_0001i8; //~WARNING literal out of range for `i8` + //~^ HELP consider using the type `u8` instead + //~| HELP consider using the type `u8` for the literal and cast it to `i8` let fail = 0x8000_0000_0000_0000i64; //~WARNING literal out of range for `i64` + //~^ HELP consider using the type `u64` instead + //~| HELP consider using the type `u64` for the literal and cast it to `i64` let fail = 0x1_FFFF_FFFFu32; //~WARNING literal out of range for `u32` + //~^ HELP consider using the type `u64` instead let fail: i128 = 0x8000_0000_0000_0000_0000_0000_0000_0000; //~^ WARNING literal out of range for `i128` + //~| HELP consider using the type `u128` instead + //~| HELP consider using the type `u128` for the literal and cast it to `i128` + + let fail = 0x8000_0000_0000_0000_0000_0000_0000_0000; + //~^ WARNING literal out of range for `i32` + //~| HELP consider using the type `u128` instead + + let fail = -0x8000_0000_0000_0000_0000_0000_0000_0000; + //~^ WARNING literal out of range for `i32` + //~| HELP consider using the type `i128` instead + + let fail = -0x8000_0000_0000_0000_0000_0000_0000_0001i128; + //~^ WARNING literal out of range for `i128` + + let fail = 340282366920938463463374607431768211455i8; + //~^ WARNING literal out of range for `i8` + //~| HELP consider using the type `u128` instead let fail = 0x8FFF_FFFF_FFFF_FFFE; //~WARNING literal out of range for `i32` + //~| HELP consider using the type `i128` instead + //~| HELP let fail = -0b1111_1111i8; //~WARNING literal out of range for `i8` + //~| HELP consider using the type `i16` instead } diff --git a/tests/ui/lint/type-overflow.stderr b/tests/ui/lint/type-overflow.stderr index e7c90dcc81bb2..dbbe81e5c182d 100644 --- a/tests/ui/lint/type-overflow.stderr +++ b/tests/ui/lint/type-overflow.stderr @@ -13,7 +13,7 @@ LL | #![warn(overflowing_literals)] | ^^^^^^^^^^^^^^^^^^^^ warning: literal out of range for `i8` - --> $DIR/type-overflow.rs:10:16 + --> $DIR/type-overflow.rs:11:16 | LL | let fail = 0b1000_0001i8; | ^^^^^^^^^^^^^ @@ -29,7 +29,7 @@ LL | let fail = 0b1000_0001u8 as i8; | ~~~~~~~~~~~~~~~~~~~ warning: literal out of range for `i64` - --> $DIR/type-overflow.rs:12:16 + --> $DIR/type-overflow.rs:15:16 | LL | let fail = 0x8000_0000_0000_0000i64; | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -45,7 +45,7 @@ LL | let fail = 0x8000_0000_0000_0000u64 as i64; | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ warning: literal out of range for `u32` - --> $DIR/type-overflow.rs:14:16 + --> $DIR/type-overflow.rs:19:16 | LL | let fail = 0x1_FFFF_FFFFu32; | ^^^^^^^^^^^^^^^^ help: consider using the type `u64` instead: `0x1_FFFF_FFFFu64` @@ -53,7 +53,7 @@ LL | let fail = 0x1_FFFF_FFFFu32; = note: the literal `0x1_FFFF_FFFFu32` (decimal `8589934591`) does not fit into the type `u32` and will become `4294967295u32` warning: literal out of range for `i128` - --> $DIR/type-overflow.rs:16:22 + --> $DIR/type-overflow.rs:22:22 | LL | let fail: i128 = 0x8000_0000_0000_0000_0000_0000_0000_0000; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -66,7 +66,44 @@ LL | let fail: i128 = 0x8000_0000_0000_0000_0000_0000_0000_0000u128 as i128; | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ warning: literal out of range for `i32` - --> $DIR/type-overflow.rs:19:16 + --> $DIR/type-overflow.rs:27:16 + | +LL | let fail = 0x8000_0000_0000_0000_0000_0000_0000_0000; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: the literal `0x8000_0000_0000_0000_0000_0000_0000_0000` (decimal `170141183460469231731687303715884105728`) does not fit into the type `i32` and will become `0i32` + = help: consider using the type `u128` instead + +warning: literal out of range for `i32` + --> $DIR/type-overflow.rs:31:17 + | +LL | let fail = -0x8000_0000_0000_0000_0000_0000_0000_0000; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: the literal `0x8000_0000_0000_0000_0000_0000_0000_0000` (decimal `170141183460469231731687303715884105728`) does not fit into the type `i32` + = note: and the value `-0x8000_0000_0000_0000_0000_0000_0000_0000` will become `0i32` + = help: consider using the type `i128` instead + +warning: literal out of range for `i128` + --> $DIR/type-overflow.rs:35:17 + | +LL | let fail = -0x8000_0000_0000_0000_0000_0000_0000_0001i128; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: the literal `0x8000_0000_0000_0000_0000_0000_0000_0001i128` (decimal `170141183460469231731687303715884105729`) does not fit into the type `i128` + = note: and the value `-0x8000_0000_0000_0000_0000_0000_0000_0001i128` will become `170141183460469231731687303715884105727i128` + +warning: literal out of range for `i8` + --> $DIR/type-overflow.rs:38:16 + | +LL | let fail = 340282366920938463463374607431768211455i8; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: the literal `340282366920938463463374607431768211455i8` does not fit into the type `i8` whose range is `-128..=127` + = help: consider using the type `u128` instead + +warning: literal out of range for `i32` + --> $DIR/type-overflow.rs:42:16 | LL | let fail = 0x8FFF_FFFF_FFFF_FFFE; | ^^^^^^^^^^^^^^^^^^^^^ @@ -79,7 +116,7 @@ LL | let fail = 0x8FFF_FFFF_FFFF_FFFEu32 as i32; | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ warning: literal out of range for `i8` - --> $DIR/type-overflow.rs:21:17 + --> $DIR/type-overflow.rs:46:17 | LL | let fail = -0b1111_1111i8; | ^^^^^^^^^^^^^ help: consider using the type `i16` instead: `0b1111_1111i16` @@ -87,5 +124,5 @@ LL | let fail = -0b1111_1111i8; = note: the literal `0b1111_1111i8` (decimal `255`) does not fit into the type `i8` = note: and the value `-0b1111_1111i8` will become `1i8` -warning: 7 warnings emitted +warning: 11 warnings emitted From c04e65dbc5f936fdd4dc53ba7b7a058165aad157 Mon Sep 17 00:00:00 2001 From: spore Date: Thu, 9 Jan 2025 02:02:40 +0800 Subject: [PATCH 017/142] Extract integer conversion into a function --- compiler/rustc_lint/src/types/literal.rs | 38 ++++++++++++++---------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/compiler/rustc_lint/src/types/literal.rs b/compiler/rustc_lint/src/types/literal.rs index 13eca89fba67b..4faf345bcd58f 100644 --- a/compiler/rustc_lint/src/types/literal.rs +++ b/compiler/rustc_lint/src/types/literal.rs @@ -204,29 +204,35 @@ fn get_type_suggestion(t: Ty<'_>, val: u128, negative: bool) -> Option<&'static match t.kind() { ty::Uint(ty::UintTy::Usize) | ty::Int(ty::IntTy::Isize) => None, ty::Uint(_) => Some(Integer::fit_unsigned(val).uint_ty_str()), - ty::Int(_) if negative => { - if val > i128::MAX as u128 + 1 { - None - } else { - Some(Integer::fit_signed(val.wrapping_neg() as i128).int_ty_str()) - } - } ty::Int(int) => { - let unsigned = Integer::fit_unsigned(val); - Some(if let Ok(signed) = i128::try_from(val).map(Integer::fit_signed) { - if Some(unsigned.size().bits()) == int.bit_width() { - unsigned.uint_ty_str() - } else { - signed.int_ty_str() - } + let signed = literal_to_i128(val, negative).map(|v| Integer::fit_signed(v)); + if negative { + signed.map(Integer::int_ty_str) } else { - unsigned.uint_ty_str() - }) + let unsigned = Integer::fit_unsigned(val); + Some(if let Some(signed) = signed { + if Some(unsigned.size().bits()) == int.bit_width() { + unsigned.uint_ty_str() + } else { + signed.int_ty_str() + } + } else { + unsigned.uint_ty_str() + }) + } } _ => None, } } +fn literal_to_i128(val: u128, negative: bool) -> Option { + if negative { + (val <= i128::MAX as u128 + 1).then(|| val.wrapping_neg() as i128) + } else { + val.try_into().ok() + } +} + fn lint_int_literal<'tcx>( cx: &LateContext<'tcx>, type_limits: &TypeLimits, From 91b6b4e038b339982f8f7b79325a44f1a8cbb511 Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Thu, 19 Dec 2024 08:57:49 +0000 Subject: [PATCH 018/142] Harden `Ty` constructors a bit in debug mode `Ty::new` wasn't used anywhere outside this module `Ty::new_adt` shouldn't ever be used for anything but adts. This hasn't caught any bugs, but seems good to check anyway --- compiler/rustc_middle/src/ty/sty.rs | 39 +++++++++++++++++++++++++++-- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 2980964898c3a..bf37ae05c8251 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -401,7 +401,7 @@ impl<'tcx> Ty<'tcx> { /// The more specific methods will often optimize their creation. #[allow(rustc::usage_of_ty_tykind)] #[inline] - pub fn new(tcx: TyCtxt<'tcx>, st: TyKind<'tcx>) -> Ty<'tcx> { + fn new(tcx: TyCtxt<'tcx>, st: TyKind<'tcx>) -> Ty<'tcx> { tcx.mk_ty_from_kind(st) } @@ -613,6 +613,41 @@ impl<'tcx> Ty<'tcx> { #[inline] pub fn new_adt(tcx: TyCtxt<'tcx>, def: AdtDef<'tcx>, args: GenericArgsRef<'tcx>) -> Ty<'tcx> { tcx.debug_assert_args_compatible(def.did(), args); + if cfg!(debug_assertions) { + match tcx.def_kind(def.did()) { + DefKind::Struct | DefKind::Union | DefKind::Enum => {} + DefKind::Mod + | DefKind::Variant + | DefKind::Trait + | DefKind::TyAlias + | DefKind::ForeignTy + | DefKind::TraitAlias + | DefKind::AssocTy + | DefKind::TyParam + | DefKind::Fn + | DefKind::Const + | DefKind::ConstParam + | DefKind::Static { .. } + | DefKind::Ctor(..) + | DefKind::AssocFn + | DefKind::AssocConst + | DefKind::Macro(..) + | DefKind::ExternCrate + | DefKind::Use + | DefKind::ForeignMod + | DefKind::AnonConst + | DefKind::InlineConst + | DefKind::OpaqueTy + | DefKind::Field + | DefKind::LifetimeParam + | DefKind::GlobalAsm + | DefKind::Impl { .. } + | DefKind::Closure + | DefKind::SyntheticCoroutineBody => { + bug!("not an adt: {def:?} ({:?})", tcx.def_kind(def.did())) + } + } + } Ty::new(tcx, Adt(def, args)) } @@ -772,7 +807,7 @@ impl<'tcx> Ty<'tcx> { } } }); - Ty::new(tcx, Adt(adt_def, args)) + Ty::new_adt(tcx, adt_def, args) } #[inline] From 37f2998c6e7eeac6a5e4f1aaec9ab353bb2f7b0b Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Thu, 19 Dec 2024 08:57:49 +0000 Subject: [PATCH 019/142] Use trait definition cycle detection for trait alias definitions, too --- .../src/collect/predicates_of.rs | 2 +- .../infinite/infinite-trait-alias-recursion.rs | 2 +- .../infinite-trait-alias-recursion.stderr | 8 ++++---- .../ui/traits/alias/infinite_normalization.rs | 11 +++++++++++ .../traits/alias/infinite_normalization.stderr | 18 ++++++++++++++++++ 5 files changed, 35 insertions(+), 6 deletions(-) create mode 100644 tests/ui/traits/alias/infinite_normalization.rs create mode 100644 tests/ui/traits/alias/infinite_normalization.stderr diff --git a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs index 0c19e2e4c5184..02ccdd834ae1c 100644 --- a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs @@ -653,7 +653,7 @@ pub(super) fn implied_predicates_with_filter<'tcx>( } } } - PredicateFilter::SelfAndAssociatedTypeBounds => { + PredicateFilter::All | PredicateFilter::SelfAndAssociatedTypeBounds => { for &(pred, span) in implied_bounds { debug!("superbound: {:?}", pred); if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder() diff --git a/tests/ui/infinite/infinite-trait-alias-recursion.rs b/tests/ui/infinite/infinite-trait-alias-recursion.rs index ec86744e68c53..0bd4624de3f1e 100644 --- a/tests/ui/infinite/infinite-trait-alias-recursion.rs +++ b/tests/ui/infinite/infinite-trait-alias-recursion.rs @@ -1,7 +1,7 @@ #![feature(trait_alias)] trait T1 = T2; -//~^ ERROR cycle detected when computing the super predicates of `T1` +//~^ ERROR cycle detected when computing the implied predicates of `T1` trait T2 = T3; diff --git a/tests/ui/infinite/infinite-trait-alias-recursion.stderr b/tests/ui/infinite/infinite-trait-alias-recursion.stderr index b3980cb935efe..5b0cbd5823155 100644 --- a/tests/ui/infinite/infinite-trait-alias-recursion.stderr +++ b/tests/ui/infinite/infinite-trait-alias-recursion.stderr @@ -1,20 +1,20 @@ -error[E0391]: cycle detected when computing the super predicates of `T1` +error[E0391]: cycle detected when computing the implied predicates of `T1` --> $DIR/infinite-trait-alias-recursion.rs:3:12 | LL | trait T1 = T2; | ^^ | -note: ...which requires computing the super predicates of `T2`... +note: ...which requires computing the implied predicates of `T2`... --> $DIR/infinite-trait-alias-recursion.rs:6:12 | LL | trait T2 = T3; | ^^ -note: ...which requires computing the super predicates of `T3`... +note: ...which requires computing the implied predicates of `T3`... --> $DIR/infinite-trait-alias-recursion.rs:8:12 | LL | trait T3 = T1 + T3; | ^^ - = note: ...which again requires computing the super predicates of `T1`, completing the cycle + = note: ...which again requires computing the implied predicates of `T1`, completing the cycle = note: trait aliases cannot be recursive note: cycle used when checking that `T1` is well-formed --> $DIR/infinite-trait-alias-recursion.rs:3:1 diff --git a/tests/ui/traits/alias/infinite_normalization.rs b/tests/ui/traits/alias/infinite_normalization.rs new file mode 100644 index 0000000000000..848afc1efb23a --- /dev/null +++ b/tests/ui/traits/alias/infinite_normalization.rs @@ -0,0 +1,11 @@ +//! This test used to get stuck in an infinite +//! recursion during normalization. +//! +//! issue: https://github.com/rust-lang/rust/issues/133901 + +#![feature(trait_alias)] +fn foo>() {} +trait Baz = Baz>; +//~^ ERROR: cycle detected when computing the implied predicates of `Baz` + +fn main() {} diff --git a/tests/ui/traits/alias/infinite_normalization.stderr b/tests/ui/traits/alias/infinite_normalization.stderr new file mode 100644 index 0000000000000..5fe423609e5ac --- /dev/null +++ b/tests/ui/traits/alias/infinite_normalization.stderr @@ -0,0 +1,18 @@ +error[E0391]: cycle detected when computing the implied predicates of `Baz` + --> $DIR/infinite_normalization.rs:8:16 + | +LL | trait Baz = Baz>; + | ^^^^^^^^^^^^^^ + | + = note: ...which immediately requires computing the implied predicates of `Baz` again + = note: trait aliases cannot be recursive +note: cycle used when computing normalized predicates of `foo` + --> $DIR/infinite_normalization.rs:7:1 + | +LL | fn foo>() {} + | ^^^^^^^^^^^^^^^^^^^^^ + = note: see https://rustc-dev-guide.rust-lang.org/overview.html#queries and https://rustc-dev-guide.rust-lang.org/query.html for more information + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0391`. From c9946376cf5a49e12e02f724c23b10db3a17c24e Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Wed, 8 Jan 2025 08:53:10 +0000 Subject: [PATCH 020/142] Only treat plain literal patterns as short --- src/tools/rustfmt/src/patterns.rs | 37 +++++++++++++++-------- src/tools/rustfmt/tests/source/pattern.rs | 10 ++++++ src/tools/rustfmt/tests/target/pattern.rs | 8 +++++ 3 files changed, 43 insertions(+), 12 deletions(-) diff --git a/src/tools/rustfmt/src/patterns.rs b/src/tools/rustfmt/src/patterns.rs index b55469f332aa1..1d88726d94586 100644 --- a/src/tools/rustfmt/src/patterns.rs +++ b/src/tools/rustfmt/src/patterns.rs @@ -31,18 +31,31 @@ use crate::utils::{format_mutability, mk_sp, mk_sp_lo_plus_one, rewrite_ident}; /// - `[small, ntp]` /// - unary tuple constructor `([small, ntp])` /// - `&[small]` -pub(crate) fn is_short_pattern(pat: &ast::Pat, pat_str: &str) -> bool { +pub(crate) fn is_short_pattern( + context: &RewriteContext<'_>, + pat: &ast::Pat, + pat_str: &str, +) -> bool { // We also require that the pattern is reasonably 'small' with its literal width. - pat_str.len() <= 20 && !pat_str.contains('\n') && is_short_pattern_inner(pat) + pat_str.len() <= 20 && !pat_str.contains('\n') && is_short_pattern_inner(context, pat) } -fn is_short_pattern_inner(pat: &ast::Pat) -> bool { - match pat.kind { - ast::PatKind::Rest - | ast::PatKind::Never - | ast::PatKind::Wild - | ast::PatKind::Err(_) - | ast::PatKind::Expr(_) => true, +fn is_short_pattern_inner(context: &RewriteContext<'_>, pat: &ast::Pat) -> bool { + match &pat.kind { + ast::PatKind::Rest | ast::PatKind::Never | ast::PatKind::Wild | ast::PatKind::Err(_) => { + true + } + ast::PatKind::Expr(expr) => match &expr.kind { + ast::ExprKind::Lit(_) => true, + ast::ExprKind::Unary(ast::UnOp::Neg, expr) => match &expr.kind { + ast::ExprKind::Lit(_) => true, + _ => unreachable!(), + }, + ast::ExprKind::ConstBlock(_) | ast::ExprKind::Path(..) => { + context.config.style_edition() <= StyleEdition::Edition2024 + } + _ => unreachable!(), + }, ast::PatKind::Ident(_, _, ref pat) => pat.is_none(), ast::PatKind::Struct(..) | ast::PatKind::MacCall(..) @@ -57,8 +70,8 @@ fn is_short_pattern_inner(pat: &ast::Pat) -> bool { ast::PatKind::Box(ref p) | PatKind::Deref(ref p) | ast::PatKind::Ref(ref p, _) - | ast::PatKind::Paren(ref p) => is_short_pattern_inner(&*p), - PatKind::Or(ref pats) => pats.iter().all(|p| is_short_pattern_inner(p)), + | ast::PatKind::Paren(ref p) => is_short_pattern_inner(context, &*p), + PatKind::Or(ref pats) => pats.iter().all(|p| is_short_pattern_inner(context, p)), } } @@ -96,7 +109,7 @@ impl Rewrite for Pat { let use_mixed_layout = pats .iter() .zip(pat_strs.iter()) - .all(|(pat, pat_str)| is_short_pattern(pat, pat_str)); + .all(|(pat, pat_str)| is_short_pattern(context, pat, pat_str)); let items: Vec<_> = pat_strs.into_iter().map(ListItem::from_str).collect(); let tactic = if use_mixed_layout { DefinitiveListTactic::Mixed diff --git a/src/tools/rustfmt/tests/source/pattern.rs b/src/tools/rustfmt/tests/source/pattern.rs index f06d03cadf2f7..ed6ad690fa98d 100644 --- a/src/tools/rustfmt/tests/source/pattern.rs +++ b/src/tools/rustfmt/tests/source/pattern.rs @@ -88,3 +88,13 @@ fn issue3728() { | c; foo((1,)); } + +fn literals() { + match 42 { + const { 1 + 2 } | 4 + | 6 => {} + 10 | 11 | 12 + | 13 | 14 => {} + _ => {} + } +} \ No newline at end of file diff --git a/src/tools/rustfmt/tests/target/pattern.rs b/src/tools/rustfmt/tests/target/pattern.rs index 576018ac623fd..e867f65929dd9 100644 --- a/src/tools/rustfmt/tests/target/pattern.rs +++ b/src/tools/rustfmt/tests/target/pattern.rs @@ -96,3 +96,11 @@ fn issue3728() { let foo = |(c,)| c; foo((1,)); } + +fn literals() { + match 42 { + const { 1 + 2 } | 4 | 6 => {} + 10 | 11 | 12 | 13 | 14 => {} + _ => {} + } +} From ad7bb20344921c5f3dd42b29074a357562d5aa01 Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Fri, 10 Jan 2025 09:10:41 +0000 Subject: [PATCH 021/142] Stable Hash: Ignore all HirIds that just identify the node itself --- compiler/rustc_hir/src/hir.rs | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index dd96b30fefc36..f05222fabda6e 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -34,6 +34,7 @@ use crate::intravisit::FnKind; #[derive(Debug, Copy, Clone, HashStable_Generic)] pub struct Lifetime { + #[stable_hasher(ignore)] pub hir_id: HirId, /// Either "`'a`", referring to a named lifetime definition, @@ -214,6 +215,7 @@ impl Path<'_> { pub struct PathSegment<'hir> { /// The identifier portion of this path segment. pub ident: Ident, + #[stable_hasher(ignore)] pub hir_id: HirId, pub res: Res, @@ -304,6 +306,7 @@ pub enum ConstArgKind<'hir> { #[derive(Clone, Copy, Debug, HashStable_Generic)] pub struct InferArg { + #[stable_hasher(ignore)] pub hir_id: HirId, pub span: Span, } @@ -592,6 +595,7 @@ pub enum GenericParamKind<'hir> { #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct GenericParam<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, pub name: ParamName, @@ -850,6 +854,7 @@ impl<'hir> Generics<'hir> { /// A single predicate in a where-clause. #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct WherePredicate<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub span: Span, pub kind: &'hir WherePredicateKind<'hir>, @@ -1521,6 +1526,7 @@ impl fmt::Debug for DotDotPos { #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct PatExpr<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub span: Span, pub kind: PatExprKind<'hir>, @@ -1610,6 +1616,7 @@ pub enum PatKind<'hir> { /// A statement. #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct Stmt<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub kind: StmtKind<'hir>, pub span: Span, @@ -1641,6 +1648,7 @@ pub struct LetStmt<'hir> { pub init: Option<&'hir Expr<'hir>>, /// Else block for a `let...else` binding. pub els: Option<&'hir Block<'hir>>, + #[stable_hasher(ignore)] pub hir_id: HirId, pub span: Span, /// Can be `ForLoopDesugar` if the `let` statement is part of a `for` loop @@ -1937,6 +1945,7 @@ pub type Lit = Spanned; /// `const N: usize = { ... }` with `tcx.hir().opt_const_param_default_param_def_id(..)` #[derive(Copy, Clone, Debug, HashStable_Generic)] pub struct AnonConst { + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, pub body: BodyId, @@ -1946,6 +1955,7 @@ pub struct AnonConst { /// An inline constant expression `const { something }`. #[derive(Copy, Clone, Debug, HashStable_Generic)] pub struct ConstBlock { + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, pub body: BodyId, @@ -1961,6 +1971,7 @@ pub struct ConstBlock { /// [rust lang reference]: https://doc.rust-lang.org/reference/expressions.html #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct Expr<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub kind: ExprKind<'hir>, pub span: Span, @@ -2839,6 +2850,7 @@ pub enum ImplItemKind<'hir> { /// * the `f(..): Bound` in `Trait` (feature `return_type_notation`) #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct AssocItemConstraint<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub ident: Ident, pub gen_args: &'hir GenericArgs<'hir>, @@ -2907,6 +2919,7 @@ impl<'hir> AssocItemConstraintKind<'hir> { #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct Ty<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub kind: TyKind<'hir>, pub span: Span, @@ -3102,6 +3115,7 @@ pub struct UnsafeBinderTy<'hir> { #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct OpaqueTy<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, pub bounds: GenericBounds<'hir>, @@ -3138,6 +3152,7 @@ impl PreciseCapturingArg<'_> { /// since resolve_bound_vars operates on `Lifetime`s. #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct PreciseCapturingNonLifetimeArg { + #[stable_hasher(ignore)] pub hir_id: HirId, pub ident: Ident, pub res: Res, @@ -3311,6 +3326,7 @@ impl InlineAsm<'_> { /// Represents a parameter in a function header. #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct Param<'hir> { + #[stable_hasher(ignore)] pub hir_id: HirId, pub pat: &'hir Pat<'hir>, pub ty_span: Span, @@ -3468,6 +3484,7 @@ pub struct Variant<'hir> { /// Name of the variant. pub ident: Ident, /// Id of the variant (not the constructor, see `VariantData::ctor_hir_id()`). + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, /// Fields and constructor id of the variant. @@ -3540,6 +3557,7 @@ pub struct FieldDef<'hir> { pub span: Span, pub vis_span: Span, pub ident: Ident, + #[stable_hasher(ignore)] pub hir_id: HirId, pub def_id: LocalDefId, pub ty: &'hir Ty<'hir>, @@ -3564,11 +3582,11 @@ pub enum VariantData<'hir> { /// A tuple variant. /// /// E.g., `Bar(..)` as in `enum Foo { Bar(..) }`. - Tuple(&'hir [FieldDef<'hir>], HirId, LocalDefId), + Tuple(&'hir [FieldDef<'hir>], #[stable_hasher(ignore)] HirId, LocalDefId), /// A unit variant. /// /// E.g., `Bar = ..` as in `enum Foo { Bar = .. }`. - Unit(HirId, LocalDefId), + Unit(#[stable_hasher(ignore)] HirId, LocalDefId), } impl<'hir> VariantData<'hir> { From 4a857557563a06a70c1c7533cf4220f42b37198a Mon Sep 17 00:00:00 2001 From: spore Date: Sat, 11 Jan 2025 13:05:15 +0800 Subject: [PATCH 022/142] Minor simplification Apply eta-reduction on map to simplify code and make the style more consistent --- compiler/rustc_lint/src/types/literal.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/rustc_lint/src/types/literal.rs b/compiler/rustc_lint/src/types/literal.rs index 4faf345bcd58f..f7a59437bbc27 100644 --- a/compiler/rustc_lint/src/types/literal.rs +++ b/compiler/rustc_lint/src/types/literal.rs @@ -205,7 +205,7 @@ fn get_type_suggestion(t: Ty<'_>, val: u128, negative: bool) -> Option<&'static ty::Uint(ty::UintTy::Usize) | ty::Int(ty::IntTy::Isize) => None, ty::Uint(_) => Some(Integer::fit_unsigned(val).uint_ty_str()), ty::Int(int) => { - let signed = literal_to_i128(val, negative).map(|v| Integer::fit_signed(v)); + let signed = literal_to_i128(val, negative).map(Integer::fit_signed); if negative { signed.map(Integer::int_ty_str) } else { From 2f5a3d4b06a0a9e8749c2e361758ec517df0c96a Mon Sep 17 00:00:00 2001 From: Yuri Astrakhan Date: Tue, 10 Dec 2024 15:45:23 -0500 Subject: [PATCH 023/142] Convert `struct FromBytesWithNulError` into enum One of `CStr` constructors, `CStr::from_bytes_with_nul(bytes: &[u8])` handles 3 cases: 1. `bytes` has one NULL as the last value - creates CStr 2. `bytes` has no NULL - error 3. `bytes` has a NULL in some other position - error The 3rd case is error that may require lossy conversion, but the 2nd case can easily be handled by the user code. Unfortunately, this function returns an opaque `FromBytesWithNulError` error in both 2nd and 3rd case, so the user cannot detect just the 2nd case - having to re-implement the entire function and bring in the `memchr` dependency. In [this code](https://github.com/gquintard/varnish-rs/blob/f86d7a87683b08d2e634d63e77d9dc1d24ed4a13/varnish-sys/src/vcl/ws.rs#L158), my FFI code needs to copy user's `&[u8]` into a C-allocated memory blob in a NUL-terminated `CStr` format. My code must first validate if `&[u8]` has a trailing NUL (case 1), no NUL (adds one on the fly - case 2), or NUL in the middle (3rd case - error). I had to re-implement `from_bytes_with_nul` and add `memchr`dependency just to handle the 2nd case. This PR renames the former `kind` enum from `FromBytesWithNulErrorKind` to `FromBytesWithNulError`, and removes the original struct. --- library/core/src/ffi/c_str.rs | 50 +++++++++++++---------------------- 1 file changed, 19 insertions(+), 31 deletions(-) diff --git a/library/core/src/ffi/c_str.rs b/library/core/src/ffi/c_str.rs index 8831443a10f17..63915c35288a3 100644 --- a/library/core/src/ffi/c_str.rs +++ b/library/core/src/ffi/c_str.rs @@ -124,37 +124,25 @@ pub struct CStr { /// /// let _: FromBytesWithNulError = CStr::from_bytes_with_nul(b"f\0oo").unwrap_err(); /// ``` -#[derive(Clone, PartialEq, Eq, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Debug)] #[stable(feature = "core_c_str", since = "1.64.0")] -pub struct FromBytesWithNulError { - kind: FromBytesWithNulErrorKind, -} - -#[derive(Clone, PartialEq, Eq, Debug)] -enum FromBytesWithNulErrorKind { - InteriorNul(usize), +pub enum FromBytesWithNulError { + /// Data provided contains an interior nul byte at byte `pos`. + InteriorNul { + /// The position of the interior nul byte. + pos: usize, + }, + /// Data provided is not nul terminated. NotNulTerminated, } -// FIXME: const stability attributes should not be required here, I think -impl FromBytesWithNulError { - const fn interior_nul(pos: usize) -> FromBytesWithNulError { - FromBytesWithNulError { kind: FromBytesWithNulErrorKind::InteriorNul(pos) } - } - const fn not_nul_terminated() -> FromBytesWithNulError { - FromBytesWithNulError { kind: FromBytesWithNulErrorKind::NotNulTerminated } - } -} - #[stable(feature = "frombyteswithnulerror_impls", since = "1.17.0")] impl Error for FromBytesWithNulError { #[allow(deprecated)] fn description(&self) -> &str { - match self.kind { - FromBytesWithNulErrorKind::InteriorNul(..) => { - "data provided contains an interior nul byte" - } - FromBytesWithNulErrorKind::NotNulTerminated => "data provided is not nul terminated", + match self { + Self::InteriorNul { .. } => "data provided contains an interior nul byte", + Self::NotNulTerminated => "data provided is not nul terminated", } } } @@ -199,7 +187,7 @@ impl fmt::Display for FromBytesWithNulError { #[allow(deprecated, deprecated_in_future)] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.description())?; - if let FromBytesWithNulErrorKind::InteriorNul(pos) = self.kind { + if let Self::InteriorNul { pos } = self { write!(f, " at byte pos {pos}")?; } Ok(()) @@ -349,25 +337,25 @@ impl CStr { /// use std::ffi::CStr; /// /// let cstr = CStr::from_bytes_with_nul(b"hello\0"); - /// assert!(cstr.is_ok()); + /// assert_eq!(cstr, Ok(c"hello")); /// ``` /// /// Creating a `CStr` without a trailing nul terminator is an error: /// /// ``` - /// use std::ffi::CStr; + /// use std::ffi::{CStr, FromBytesWithNulError}; /// /// let cstr = CStr::from_bytes_with_nul(b"hello"); - /// assert!(cstr.is_err()); + /// assert_eq!(cstr, Err(FromBytesWithNulError::NotNulTerminated)); /// ``` /// /// Creating a `CStr` with an interior nul byte is an error: /// /// ``` - /// use std::ffi::CStr; + /// use std::ffi::{CStr, FromBytesWithNulError}; /// /// let cstr = CStr::from_bytes_with_nul(b"he\0llo\0"); - /// assert!(cstr.is_err()); + /// assert_eq!(cstr, Err(FromBytesWithNulError::InteriorNul { pos: 2 })); /// ``` #[stable(feature = "cstr_from_bytes", since = "1.10.0")] #[rustc_const_stable(feature = "const_cstr_methods", since = "1.72.0")] @@ -379,8 +367,8 @@ impl CStr { // of the byte slice. Ok(unsafe { Self::from_bytes_with_nul_unchecked(bytes) }) } - Some(nul_pos) => Err(FromBytesWithNulError::interior_nul(nul_pos)), - None => Err(FromBytesWithNulError::not_nul_terminated()), + Some(pos) => Err(FromBytesWithNulError::InteriorNul { pos }), + None => Err(FromBytesWithNulError::NotNulTerminated), } } From 86b86fa8fb89aca91aa176a0727b9f233cd14353 Mon Sep 17 00:00:00 2001 From: Yuri Astrakhan Date: Sat, 11 Jan 2025 02:56:58 -0500 Subject: [PATCH 024/142] Rename `pos` to `position` --- library/core/src/ffi/c_str.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/library/core/src/ffi/c_str.rs b/library/core/src/ffi/c_str.rs index 63915c35288a3..7180593edf0d0 100644 --- a/library/core/src/ffi/c_str.rs +++ b/library/core/src/ffi/c_str.rs @@ -127,10 +127,10 @@ pub struct CStr { #[derive(Clone, Copy, PartialEq, Eq, Debug)] #[stable(feature = "core_c_str", since = "1.64.0")] pub enum FromBytesWithNulError { - /// Data provided contains an interior nul byte at byte `pos`. + /// Data provided contains an interior nul byte at byte `position`. InteriorNul { /// The position of the interior nul byte. - pos: usize, + position: usize, }, /// Data provided is not nul terminated. NotNulTerminated, @@ -187,8 +187,8 @@ impl fmt::Display for FromBytesWithNulError { #[allow(deprecated, deprecated_in_future)] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.description())?; - if let Self::InteriorNul { pos } = self { - write!(f, " at byte pos {pos}")?; + if let Self::InteriorNul { position } = self { + write!(f, " at byte pos {position}")?; } Ok(()) } @@ -355,7 +355,7 @@ impl CStr { /// use std::ffi::{CStr, FromBytesWithNulError}; /// /// let cstr = CStr::from_bytes_with_nul(b"he\0llo\0"); - /// assert_eq!(cstr, Err(FromBytesWithNulError::InteriorNul { pos: 2 })); + /// assert_eq!(cstr, Err(FromBytesWithNulError::InteriorNul { position: 2 })); /// ``` #[stable(feature = "cstr_from_bytes", since = "1.10.0")] #[rustc_const_stable(feature = "const_cstr_methods", since = "1.72.0")] @@ -367,7 +367,7 @@ impl CStr { // of the byte slice. Ok(unsafe { Self::from_bytes_with_nul_unchecked(bytes) }) } - Some(pos) => Err(FromBytesWithNulError::InteriorNul { pos }), + Some(position) => Err(FromBytesWithNulError::InteriorNul { position }), None => Err(FromBytesWithNulError::NotNulTerminated), } } From d4057e8df01a2d1b580952e1be4099347bd5c27a Mon Sep 17 00:00:00 2001 From: binarycat Date: Fri, 10 Jan 2025 16:45:32 -0600 Subject: [PATCH 025/142] re-add --disable-minification to rustdoc this also makes the rust.docs-minification option work as advertised in config.toml nothing fancy this time, this is intended to be perma-unstable. it's only really here for the benefit of rustdoc devs. mitegates https://github.com/rust-lang/rust/issues/135345 --- src/librustdoc/config.rs | 6 ++++++ src/librustdoc/html/render/write_shared.rs | 10 +++++++++- src/librustdoc/lib.rs | 9 ++++++++- .../rustdoc-default-output/output-default.stdout | 2 +- 4 files changed, 24 insertions(+), 3 deletions(-) diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs index af3c7cc7be345..80bc6cebd2aa9 100644 --- a/src/librustdoc/config.rs +++ b/src/librustdoc/config.rs @@ -303,6 +303,8 @@ pub(crate) struct RenderOptions { pub(crate) include_parts_dir: Vec, /// Where to write crate-info pub(crate) parts_out_dir: Option, + /// disable minification of CSS/JS + pub(crate) disable_minification: bool, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -781,6 +783,9 @@ impl Options { let unstable_features = rustc_feature::UnstableFeatures::from_environment(crate_name.as_deref()); + + let disable_minification = matches.opt_present("disable-minification"); + let options = Options { bin_crate, proc_macro_crate, @@ -857,6 +862,7 @@ impl Options { should_merge, include_parts_dir, parts_out_dir, + disable_minification, }; Some((input, options, render_options)) } diff --git a/src/librustdoc/html/render/write_shared.rs b/src/librustdoc/html/render/write_shared.rs index ce10e5ecc2441..fb6f3bc2c76ed 100644 --- a/src/librustdoc/html/render/write_shared.rs +++ b/src/librustdoc/html/render/write_shared.rs @@ -207,7 +207,15 @@ fn write_static_files( if opt.emit.is_empty() || opt.emit.contains(&EmitType::Toolchain) { static_files::for_each(|f: &static_files::StaticFile| { let filename = static_dir.join(f.output_filename()); - fs::write(&filename, f.minified()).map_err(|e| PathError::new(e, &filename)) + let contents: &[u8]; + let contents_vec: Vec; + if opt.disable_minification { + contents = f.bytes; + } else { + contents_vec = f.minified(); + contents = &contents_vec; + }; + fs::write(&filename, contents).map_err(|e| PathError::new(e, &filename)) })?; } diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 96ca96ee6bc15..330b2507de89b 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -651,8 +651,15 @@ fn opts() -> Vec { "", "add arguments to be used when compiling doctests", ), + opt( + Unstable, + FlagMulti, + "", + "disable-minification", + "diable the minification of CSS/JS files", + "", + ), // deprecated / removed options - opt(Unstable, FlagMulti, "", "disable-minification", "removed", ""), opt( Stable, Multi, diff --git a/tests/run-make/rustdoc-default-output/output-default.stdout b/tests/run-make/rustdoc-default-output/output-default.stdout index c2d9309ba2e6d..adf7ee794fd28 100644 --- a/tests/run-make/rustdoc-default-output/output-default.stdout +++ b/tests/run-make/rustdoc-default-output/output-default.stdout @@ -194,7 +194,7 @@ Options: --doctest-compilation-args add arguments to be used when compiling doctests --disable-minification - removed + diable the minification of CSS/JS files --plugin-path DIR removed, see issue #44136 for From 0f1e253066e8deab86b9f071c94b746116e4b819 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 12 Jan 2025 00:23:37 +0000 Subject: [PATCH 026/142] cargo update compiler & tools dependencies: Locking 27 packages to latest compatible versions Updating bitflags v2.6.0 -> v2.7.0 Updating clap v4.5.23 -> v4.5.26 Updating clap_builder v4.5.23 -> v4.5.26 Updating clap_complete v4.5.40 -> v4.5.42 Updating clap_derive v4.5.18 -> v4.5.24 Updating handlebars v6.2.0 -> v6.3.0 Updating libz-sys v1.1.20 -> v1.1.21 Updating linux-raw-sys v0.4.14 -> v0.4.15 Updating phf v0.11.2 -> v0.11.3 Updating phf_codegen v0.11.2 -> v0.11.3 Updating phf_generator v0.11.2 -> v0.11.3 Updating phf_shared v0.11.2 -> v0.11.3 Updating pin-project-lite v0.2.15 -> v0.2.16 Updating proc-macro2 v1.0.92 -> v1.0.93 Updating rustix v0.38.42 -> v0.38.43 Updating serde_json v1.0.134 -> v1.0.135 Adding siphasher v1.0.1 Updating syn v2.0.94 -> v2.0.96 Updating thiserror v2.0.9 -> v2.0.11 Updating thiserror-impl v2.0.9 -> v2.0.11 Updating tokio v1.42.0 -> v1.43.0 Updating uuid v1.11.0 -> v1.11.1 Updating wasm-encoder v0.222.0 -> v0.223.0 Adding wasmparser v0.223.0 Updating wast v222.0.0 -> v223.0.0 Updating wat v1.222.0 -> v1.223.0 Updating xattr v1.3.1 -> v1.4.0 note: pass `--verbose` to see 39 unchanged dependencies behind latest library dependencies: Locking 0 packages to latest compatible versions note: pass `--verbose` to see 4 unchanged dependencies behind latest rustbook dependencies: Locking 27 packages to latest compatible versions Updating bitflags v2.6.0 -> v2.7.0 Updating cc v1.2.7 -> v1.2.8 Updating clap v4.5.23 -> v4.5.26 Updating clap_builder v4.5.23 -> v4.5.26 Updating clap_complete v4.5.40 -> v4.5.42 Updating clap_derive v4.5.18 -> v4.5.24 Adding darling v0.20.10 Adding darling_core v0.20.10 Adding darling_macro v0.20.10 Adding derive_builder v0.20.2 Adding derive_builder_core v0.20.2 Adding derive_builder_macro v0.20.2 Updating handlebars v6.2.0 -> v6.3.0 Adding ident_case v1.0.1 Updating linux-raw-sys v0.4.14 -> v0.4.15 Updating phf v0.11.2 -> v0.11.3 Updating phf_codegen v0.11.2 -> v0.11.3 Updating phf_generator v0.11.2 -> v0.11.3 Updating phf_shared v0.11.2 -> v0.11.3 Updating proc-macro2 v1.0.92 -> v1.0.93 Updating rustix v0.38.42 -> v0.38.43 Updating serde_json v1.0.134 -> v1.0.135 Adding siphasher v1.0.1 Updating syn v2.0.94 -> v2.0.96 Updating thiserror v2.0.9 -> v2.0.11 Updating thiserror-impl v2.0.9 -> v2.0.11 Updating winnow v0.6.22 -> v0.6.24 --- Cargo.lock | 210 ++++++++++++++++++---------------- src/tools/rustbook/Cargo.lock | 183 ++++++++++++++++++++--------- 2 files changed, 244 insertions(+), 149 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 117f1475e000a..85e5e6c97105c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -254,9 +254,9 @@ dependencies = [ [[package]] name = "bitflags" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "1be3f42a67d6d345ecd59f675f3f012d6974981560836e938c22b424b85ce1be" [[package]] name = "blake3" @@ -407,7 +407,7 @@ dependencies = [ "semver", "serde", "serde_json", - "thiserror 2.0.9", + "thiserror 2.0.11", ] [[package]] @@ -481,9 +481,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.23" +version = "4.5.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" +checksum = "a8eb5e908ef3a6efbe1ed62520fb7287959888c88485abe072543190ecc66783" dependencies = [ "clap_builder", "clap_derive", @@ -501,9 +501,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.23" +version = "4.5.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" +checksum = "96b01801b5fc6a0a232407abc821660c9c6d25a1cafc0d4f85f29fb8d9afc121" dependencies = [ "anstream", "anstyle", @@ -514,23 +514,23 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.40" +version = "4.5.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac2e663e3e3bed2d32d065a8404024dad306e699a04263ec59919529f803aee9" +checksum = "33a7e468e750fa4b6be660e8b5651ad47372e8fb114030b594c2d75d48c5ffd0" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.18" +version = "4.5.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -561,7 +561,7 @@ dependencies = [ "rustc_tools_util", "serde", "serde_json", - "syn 2.0.94", + "syn 2.0.96", "tempfile", "termize", "tokio", @@ -671,7 +671,7 @@ dependencies = [ "nom", "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -896,7 +896,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -907,7 +907,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -944,7 +944,7 @@ checksum = "62d671cc41a825ebabc75757b62d3d168c577f9149b2d49ece1dad1f72119d25" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -965,7 +965,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -975,7 +975,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -987,7 +987,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -1065,7 +1065,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -1362,7 +1362,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -1502,17 +1502,18 @@ dependencies = [ [[package]] name = "handlebars" -version = "6.2.0" +version = "6.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd4ccde012831f9a071a637b0d4e31df31c0f6c525784b35ae76a9ac6bc1e315" +checksum = "3d6b224b95c1e668ac0270325ad563b2eef1469fbbb8959bc7c692c844b813d9" dependencies = [ + "derive_builder", "log", "num-order", "pest", "pest_derive", "serde", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.11", ] [[package]] @@ -1596,7 +1597,7 @@ dependencies = [ "markup5ever", "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -1785,7 +1786,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -2085,9 +2086,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.20" +version = "1.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" +checksum = "df9b68e50e6e0b26f672573834882eb57759f6db9b3be2ea3c35c91188bb4eaa" dependencies = [ "cc", "libc", @@ -2114,9 +2115,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "litemap" @@ -2717,7 +2718,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror 2.0.9", + "thiserror 2.0.11", "ucd-trie", ] @@ -2741,7 +2742,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -2757,21 +2758,21 @@ dependencies = [ [[package]] name = "phf" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ - "phf_shared 0.11.2", + "phf_shared 0.11.3", ] [[package]] name = "phf_codegen" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" +checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" dependencies = [ - "phf_generator 0.11.2", - "phf_shared 0.11.2", + "phf_generator 0.11.3", + "phf_shared 0.11.3", ] [[package]] @@ -2786,11 +2787,11 @@ dependencies = [ [[package]] name = "phf_generator" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ - "phf_shared 0.11.2", + "phf_shared 0.11.3", "rand", ] @@ -2800,23 +2801,23 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ - "siphasher", + "siphasher 0.3.11", ] [[package]] name = "phf_shared" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ - "siphasher", + "siphasher 1.0.1", ] [[package]] name = "pin-project-lite" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -2886,9 +2887,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.92" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] @@ -3151,7 +3152,7 @@ dependencies = [ "rinja_parser", "rustc-hash 2.1.0", "serde", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -3792,7 +3793,7 @@ dependencies = [ "fluent-syntax", "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", "unic-langid", ] @@ -3927,7 +3928,7 @@ version = "0.0.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -4075,7 +4076,7 @@ version = "0.0.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", "synstructure", ] @@ -4663,7 +4664,7 @@ version = "0.0.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", "synstructure", ] @@ -4752,7 +4753,7 @@ dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -4786,9 +4787,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.42" +version = "0.38.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" +checksum = "a78891ee6bf2340288408954ac787aa063d8e8817e9f53abb37c695c6d834ef6" dependencies = [ "bitflags", "errno", @@ -4889,14 +4890,14 @@ checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] name = "serde_json" -version = "1.0.134" +version = "1.0.135" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" +checksum = "2b0d7ba2887406110130a978386c4e1befb98c674b4fba677954e4db976630d9" dependencies = [ "indexmap", "itoa", @@ -4969,6 +4970,12 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + [[package]] name = "slab" version = "0.4.9" @@ -5147,9 +5154,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.94" +version = "2.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "987bc0be1cdea8b10216bd06e2ca407d40b9543468fafd3ddfb02f36e77f71f3" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" dependencies = [ "proc-macro2", "quote", @@ -5164,7 +5171,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -5292,11 +5299,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.9" +version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc" +checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" dependencies = [ - "thiserror-impl 2.0.9", + "thiserror-impl 2.0.11", ] [[package]] @@ -5307,18 +5314,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] name = "thiserror-impl" -version = "2.0.9" +version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4" +checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -5441,9 +5448,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.42.0" +version = "1.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" +checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" dependencies = [ "backtrace", "bytes", @@ -5519,7 +5526,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -5690,7 +5697,7 @@ checksum = "1ed7f4237ba393424195053097c1516bd4590dc82b84f2f97c5c69e12704555b" dependencies = [ "proc-macro-hack", "quote", - "syn 2.0.94", + "syn 2.0.96", "unic-langid-impl", ] @@ -5828,9 +5835,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" +checksum = "b913a3b5fe84142e269d63cc62b64319ccaf89b748fc31fe025177f767a756c4" dependencies = [ "getrandom", ] @@ -5896,7 +5903,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", "wasm-bindgen-shared", ] @@ -5918,7 +5925,7 @@ checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5968,12 +5975,12 @@ dependencies = [ [[package]] name = "wasm-encoder" -version = "0.222.0" +version = "0.223.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3432682105d7e994565ef928ccf5856cf6af4ba3dddebedb737f61caed70f956" +checksum = "7e636076193fa68103e937ac951b5f2f587624097017d764b8984d9c0f149464" dependencies = [ "leb128", - "wasmparser 0.222.0", + "wasmparser 0.223.0", ] [[package]] @@ -6011,6 +6018,15 @@ name = "wasmparser" version = "0.222.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4adf50fde1b1a49c1add6a80d47aea500c88db70551805853aa8b88f3ea27ab5" +dependencies = [ + "bitflags", +] + +[[package]] +name = "wasmparser" +version = "0.223.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5a99faceb1a5a84dd6084ec4bfa4b2ab153b5793b43fd8f58b89232634afc35" dependencies = [ "bitflags", "indexmap", @@ -6019,22 +6035,22 @@ dependencies = [ [[package]] name = "wast" -version = "222.0.0" +version = "223.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce7191f4b7da0dd300cc32476abae6457154e4625d9b1bc26890828a9a26f6e" +checksum = "d59b2ba8a2ff9f06194b7be9524f92e45e70149f4dacc0d0c7ad92b59ac875e4" dependencies = [ "bumpalo", "leb128", "memchr", "unicode-width 0.2.0", - "wasm-encoder 0.222.0", + "wasm-encoder 0.223.0", ] [[package]] name = "wat" -version = "1.222.0" +version = "1.223.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fde61b4b52f9a84ae31b5e8902a2cd3162ea45d8bf564c729c3288fe52f4334" +checksum = "662786915c427e4918ff01eabb3c4756d4d947cd8f635761526b4cc9da2eaaad" dependencies = [ "wast", ] @@ -6100,7 +6116,7 @@ dependencies = [ "rayon", "serde", "serde_json", - "syn 2.0.94", + "syn 2.0.96", "windows-metadata", ] @@ -6133,7 +6149,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -6144,7 +6160,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -6376,9 +6392,9 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" [[package]] name = "xattr" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" +checksum = "e105d177a3871454f754b33bb0ee637ecaaac997446375fd3e5d43a2ed00c909" dependencies = [ "libc", "linux-raw-sys", @@ -6423,7 +6439,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", "synstructure", ] @@ -6445,7 +6461,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] [[package]] @@ -6465,7 +6481,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", "synstructure", ] @@ -6488,5 +6504,5 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.94", + "syn 2.0.96", ] diff --git a/src/tools/rustbook/Cargo.lock b/src/tools/rustbook/Cargo.lock index b5b360db252fe..e05b3c33ca34a 100644 --- a/src/tools/rustbook/Cargo.lock +++ b/src/tools/rustbook/Cargo.lock @@ -123,9 +123,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "1be3f42a67d6d345ecd59f675f3f012d6974981560836e938c22b424b85ce1be" [[package]] name = "block-buffer" @@ -161,9 +161,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "cc" -version = "1.2.7" +version = "1.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a012a0df96dd6d06ba9a1b29d6402d1a5d77c6befd2566afdc26e10603dc93d7" +checksum = "ad0cf6e91fde44c773c6ee7ec6bba798504641a8bc2eb7e37a04ffbf4dfaa55a" dependencies = [ "shlex", ] @@ -190,9 +190,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.23" +version = "4.5.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" +checksum = "a8eb5e908ef3a6efbe1ed62520fb7287959888c88485abe072543190ecc66783" dependencies = [ "clap_builder", "clap_derive", @@ -200,9 +200,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.23" +version = "4.5.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" +checksum = "96b01801b5fc6a0a232407abc821660c9c6d25a1cafc0d4f85f29fb8d9afc121" dependencies = [ "anstream", "anstyle", @@ -213,18 +213,18 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.40" +version = "4.5.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac2e663e3e3bed2d32d065a8404024dad306e699a04263ec59919529f803aee9" +checksum = "33a7e468e750fa4b6be660e8b5651ad47372e8fb114030b594c2d75d48c5ffd0" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.18" +version = "4.5.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" dependencies = [ "heck", "proc-macro2", @@ -278,6 +278,41 @@ dependencies = [ "typenum", ] +[[package]] +name = "darling" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +dependencies = [ + "darling_core", + "quote", + "syn", +] + [[package]] name = "dateparser" version = "0.2.1" @@ -301,6 +336,37 @@ dependencies = [ "winapi", ] +[[package]] +name = "derive_builder" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "derive_builder_macro" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" +dependencies = [ + "derive_builder_core", + "syn", +] + [[package]] name = "digest" version = "0.10.7" @@ -452,17 +518,18 @@ dependencies = [ [[package]] name = "handlebars" -version = "6.2.0" +version = "6.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd4ccde012831f9a071a637b0d4e31df31c0f6c525784b35ae76a9ac6bc1e315" +checksum = "3d6b224b95c1e668ac0270325ad563b2eef1469fbbb8959bc7c692c844b813d9" dependencies = [ + "derive_builder", "log", "num-order", "pest", "pest_derive", "serde", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.11", ] [[package]] @@ -653,6 +720,12 @@ dependencies = [ "syn", ] +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + [[package]] name = "idna" version = "1.0.3" @@ -739,9 +812,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "litemap" @@ -1006,7 +1079,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror 2.0.9", + "thiserror 2.0.11", "ucd-trie", ] @@ -1046,21 +1119,21 @@ dependencies = [ [[package]] name = "phf" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ - "phf_shared 0.11.2", + "phf_shared 0.11.3", ] [[package]] name = "phf_codegen" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" +checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" dependencies = [ - "phf_generator 0.11.2", - "phf_shared 0.11.2", + "phf_generator 0.11.3", + "phf_shared 0.11.3", ] [[package]] @@ -1075,11 +1148,11 @@ dependencies = [ [[package]] name = "phf_generator" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ - "phf_shared 0.11.2", + "phf_shared 0.11.3", "rand", ] @@ -1089,16 +1162,16 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ - "siphasher", + "siphasher 0.3.11", ] [[package]] name = "phf_shared" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ - "siphasher", + "siphasher 1.0.1", ] [[package]] @@ -1133,9 +1206,9 @@ checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "proc-macro2" -version = "1.0.92" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] @@ -1146,7 +1219,7 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76979bea66e7875e7509c4ec5300112b316af87fa7a252ca91c448b32dfe3993" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "memchr", "pulldown-cmark-escape 0.10.1", "unicase", @@ -1158,7 +1231,7 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "679341d22c78c6c649893cbd6c3278dcbe9fc4faa62fea3a9296ae2b50c14625" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "memchr", "pulldown-cmark-escape 0.11.0", "unicase", @@ -1170,7 +1243,7 @@ version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "getopts", "memchr", "pulldown-cmark-escape 0.11.0", @@ -1252,7 +1325,7 @@ version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", ] [[package]] @@ -1298,11 +1371,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.42" +version = "0.38.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" +checksum = "a78891ee6bf2340288408954ac787aa063d8e8817e9f53abb37c695c6d834ef6" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.7.0", "errno", "libc", "linux-raw-sys", @@ -1358,9 +1431,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.134" +version = "1.0.135" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" +checksum = "2b0d7ba2887406110130a978386c4e1befb98c674b4fba677954e4db976630d9" dependencies = [ "itoa", "memchr", @@ -1400,6 +1473,12 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + [[package]] name = "smallvec" version = "1.13.2" @@ -1446,9 +1525,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "syn" -version = "2.0.94" +version = "2.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "987bc0be1cdea8b10216bd06e2ca407d40b9543468fafd3ddfb02f36e77f71f3" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" dependencies = [ "proc-macro2", "quote", @@ -1538,11 +1617,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.9" +version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc" +checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" dependencies = [ - "thiserror-impl 2.0.9", + "thiserror-impl 2.0.11", ] [[package]] @@ -1558,9 +1637,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "2.0.9" +version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4" +checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" dependencies = [ "proc-macro2", "quote", @@ -1882,9 +1961,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.22" +version = "0.6.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39281189af81c07ec09db316b302a3e67bf9bd7cbf6c820b50e35fee9c2fa980" +checksum = "c8d71a593cc5c42ad7876e2c1fda56f314f3754c084128833e64f1345ff8a03a" dependencies = [ "memchr", ] From 5055864071a821f505a16c7afc8c6eea2f0807b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Mon, 30 Dec 2024 20:35:25 +0000 Subject: [PATCH 027/142] disable NLL liveness optimization when using polonius in NLLs some locals are marked live at all points if one of their regions escapes the function but that doesn't work in a flow-sensitive setting like polonius --- .../src/type_check/liveness/mod.rs | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_borrowck/src/type_check/liveness/mod.rs b/compiler/rustc_borrowck/src/type_check/liveness/mod.rs index f23602d03588f..4e0b2a4e29681 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/mod.rs @@ -38,11 +38,19 @@ pub(super) fn generate<'a, 'tcx>( ) { debug!("liveness::generate"); - let free_regions = regions_that_outlive_free_regions( - typeck.infcx.num_region_vars(), - &typeck.universal_regions, - &typeck.constraints.outlives_constraints, - ); + // NLLs can avoid computing some liveness data here because its constraints are + // location-insensitive, but that doesn't work in polonius: locals whose type contains a region + // that outlives a free region are not necessarily live everywhere in a flow-sensitive setting, + // unlike NLLs. + let free_regions = if !typeck.tcx().sess.opts.unstable_opts.polonius.is_next_enabled() { + regions_that_outlive_free_regions( + typeck.infcx.num_region_vars(), + &typeck.universal_regions, + &typeck.constraints.outlives_constraints, + ) + } else { + typeck.universal_regions.universal_regions_iter().collect() + }; let (relevant_live_locals, boring_locals) = compute_relevant_live_locals(typeck.tcx(), &free_regions, body); From 0c978bc4e6632972a484f3b39c83dee1e151daa4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Tue, 31 Dec 2024 16:17:28 +0000 Subject: [PATCH 028/142] introduce reachability to the constraint graph --- compiler/rustc_borrowck/src/nll.rs | 10 +-- .../src/polonius/loan_liveness.rs | 82 +++++++++++++++++++ compiler/rustc_borrowck/src/polonius/mod.rs | 30 +++++-- 3 files changed, 112 insertions(+), 10 deletions(-) create mode 100644 compiler/rustc_borrowck/src/polonius/loan_liveness.rs diff --git a/compiler/rustc_borrowck/src/nll.rs b/compiler/rustc_borrowck/src/nll.rs index aa0bfd7214729..590cbef05d53e 100644 --- a/compiler/rustc_borrowck/src/nll.rs +++ b/compiler/rustc_borrowck/src/nll.rs @@ -103,7 +103,7 @@ pub(crate) fn compute_regions<'a, 'tcx>( constraints, universal_region_relations, opaque_type_values, - mut polonius_context, + polonius_context, } = type_check::type_check( infcx, body, @@ -142,10 +142,10 @@ pub(crate) fn compute_regions<'a, 'tcx>( location_map, ); - // If requested for `-Zpolonius=next`, convert NLL constraints to localized outlives - // constraints. - let localized_outlives_constraints = polonius_context.as_mut().map(|polonius_context| { - polonius_context.create_localized_constraints(infcx.tcx, ®ioncx, body) + // If requested for `-Zpolonius=next`, convert NLL constraints to localized outlives constraints + // and use them to compute loan liveness. + let localized_outlives_constraints = polonius_context.as_ref().map(|polonius_context| { + polonius_context.compute_loan_liveness(infcx.tcx, ®ioncx, body, borrow_set) }); // If requested: dump NLL facts, and run legacy polonius analysis. diff --git a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs new file mode 100644 index 0000000000000..796a63da79d6e --- /dev/null +++ b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs @@ -0,0 +1,82 @@ +use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; +use rustc_middle::mir::Body; +use rustc_middle::ty::{RegionVid, TyCtxt}; +use rustc_mir_dataflow::points::PointIndex; + +use super::{LiveLoans, LocalizedOutlivesConstraintSet}; +use crate::BorrowSet; +use crate::region_infer::values::LivenessValues; + +/// With the full graph of constraints, we can compute loan reachability, and trace loan liveness +/// throughout the CFG. +pub(super) fn compute_loan_liveness<'tcx>( + _tcx: TyCtxt<'tcx>, + _body: &Body<'tcx>, + liveness: &LivenessValues, + borrow_set: &BorrowSet<'tcx>, + localized_outlives_constraints: &LocalizedOutlivesConstraintSet, +) -> LiveLoans { + let mut live_loans = LiveLoans::new(borrow_set.len()); + let graph = index_constraints(&localized_outlives_constraints); + let mut visited = FxHashSet::default(); + let mut stack = Vec::new(); + + // Compute reachability per loan by traversing each loan's subgraph starting from where it is + // introduced. + for (loan_idx, loan) in borrow_set.iter_enumerated() { + visited.clear(); + stack.clear(); + + let start_node = LocalizedNode { + region: loan.region, + point: liveness.point_from_location(loan.reserve_location), + }; + stack.push(start_node); + + while let Some(node) = stack.pop() { + if !visited.insert(node) { + continue; + } + + // Record the loan as being live on entry to this point. + live_loans.insert(node.point, loan_idx); + + for succ in outgoing_edges(&graph, node) { + stack.push(succ); + } + } + } + + live_loans +} + +/// The localized constraint graph is currently the per-node map of its physical edges. In the +/// future, we'll add logical edges to model constraints that hold at all points in the CFG. +type LocalizedConstraintGraph = FxHashMap>; + +/// A node in the graph to be traversed, one of the two vertices of a localized outlives constraint. +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +struct LocalizedNode { + region: RegionVid, + point: PointIndex, +} + +/// Index the outlives constraints into a graph of edges per node. +fn index_constraints(constraints: &LocalizedOutlivesConstraintSet) -> LocalizedConstraintGraph { + let mut edges = LocalizedConstraintGraph::default(); + for constraint in &constraints.outlives { + let source = LocalizedNode { region: constraint.source, point: constraint.from }; + let target = LocalizedNode { region: constraint.target, point: constraint.to }; + edges.entry(source).or_default().insert(target); + } + + edges +} + +/// Returns the outgoing edges of a given node, not its transitive closure. +fn outgoing_edges( + graph: &LocalizedConstraintGraph, + node: LocalizedNode, +) -> impl Iterator + use<'_> { + graph.get(&node).into_iter().flat_map(|edges| edges.iter().copied()) +} diff --git a/compiler/rustc_borrowck/src/polonius/mod.rs b/compiler/rustc_borrowck/src/polonius/mod.rs index 7d0f9397021be..313ed5d66493b 100644 --- a/compiler/rustc_borrowck/src/polonius/mod.rs +++ b/compiler/rustc_borrowck/src/polonius/mod.rs @@ -37,6 +37,7 @@ mod constraints; mod dump; pub(crate) mod legacy; mod liveness_constraints; +mod loan_liveness; mod typeck_constraints; use std::collections::BTreeMap; @@ -49,8 +50,12 @@ use rustc_mir_dataflow::points::PointIndex; pub(crate) use self::constraints::*; pub(crate) use self::dump::dump_polonius_mir; use self::liveness_constraints::create_liveness_constraints; +use self::loan_liveness::compute_loan_liveness; use self::typeck_constraints::convert_typeck_constraints; -use crate::RegionInferenceContext; +use crate::dataflow::BorrowIndex; +use crate::{BorrowSet, RegionInferenceContext}; + +pub(crate) type LiveLoans = SparseBitMatrix; /// This struct holds the data needed to create the Polonius localized constraints. pub(crate) struct PoloniusContext { @@ -82,14 +87,20 @@ impl PoloniusContext { Self { live_region_variances: BTreeMap::new(), live_regions: None } } - /// Creates a constraint set for `-Zpolonius=next` by: + /// Computes live loans using the set of loans model for `-Zpolonius=next`. + /// + /// First, creates a constraint graph combining regions and CFG points, by: /// - converting NLL typeck constraints to be localized /// - encoding liveness constraints - pub(crate) fn create_localized_constraints<'tcx>( + /// + /// Then, this graph is traversed, and combined with kills, reachability is recorded as loan + /// liveness, to be used by the loan scope and active loans computations. + pub(crate) fn compute_loan_liveness<'tcx>( &self, tcx: TyCtxt<'tcx>, regioncx: &RegionInferenceContext<'tcx>, body: &Body<'tcx>, + borrow_set: &BorrowSet<'tcx>, ) -> LocalizedOutlivesConstraintSet { let mut localized_outlives_constraints = LocalizedOutlivesConstraintSet::default(); convert_typeck_constraints( @@ -113,8 +124,17 @@ impl PoloniusContext { &mut localized_outlives_constraints, ); - // FIXME: here, we can trace loan reachability in the constraint graph and record this as loan - // liveness for the next step in the chain, the NLL loan scope and active loans computations. + // Now that we have a complete graph, we can compute reachability to trace the liveness of + // loans for the next step in the chain, the NLL loan scope and active loans computations. + let _live_loans = compute_loan_liveness( + tcx, + body, + regioncx.liveness_constraints(), + borrow_set, + &localized_outlives_constraints, + ); + // FIXME: record the live loans in the regioncx's liveness constraints, where the + // location-insensitive variant's data is stored. localized_outlives_constraints } From 67a1bb1554c96c887c0b61bd21a21826aa3b4d8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Tue, 31 Dec 2024 16:39:17 +0000 Subject: [PATCH 029/142] replace location-insensitive analysis with location-sensitive analysis we're in in the endgame now set up the location-sensitive analysis end to end: - stop recording inflowing loans and loan liveness in liveness - replace location-insensitive liveness data with live loans computed by reachability - remove equivalence between polonius scopes and NLL scopes, and only run one scope computation --- compiler/rustc_borrowck/src/dataflow.rs | 63 ++++++++----------- compiler/rustc_borrowck/src/nll.rs | 2 +- compiler/rustc_borrowck/src/polonius/mod.rs | 7 +-- .../rustc_borrowck/src/region_infer/mod.rs | 37 +++++------ .../rustc_borrowck/src/region_infer/values.rs | 57 ++++------------- .../src/type_check/liveness/trace.rs | 33 +--------- 6 files changed, 59 insertions(+), 140 deletions(-) diff --git a/compiler/rustc_borrowck/src/dataflow.rs b/compiler/rustc_borrowck/src/dataflow.rs index a7a6f2da509c1..3a200642ce58c 100644 --- a/compiler/rustc_borrowck/src/dataflow.rs +++ b/compiler/rustc_borrowck/src/dataflow.rs @@ -325,10 +325,17 @@ impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { let sccs = self.regioncx.constraint_sccs(); let universal_regions = self.regioncx.universal_regions(); + // The loop below was useful for the location-insensitive analysis but shouldn't be + // impactful in the location-sensitive case. It seems that it does, however, as without it a + // handful of tests fail. That likely means some liveness or outlives data related to choice + // regions is missing + // FIXME: investigate the impact of loans traversing applied member constraints and why some + // tests fail otherwise. + // // We first handle the cases where the loan doesn't go out of scope, depending on the // issuing region's successors. for successor in graph::depth_first_search(&self.regioncx.region_graph(), issuing_region) { - // 1. Via applied member constraints + // Via applied member constraints // // The issuing region can flow into the choice regions, and they are either: // - placeholders or free regions themselves, @@ -346,14 +353,6 @@ impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { return; } } - - // 2. Via regions that are live at all points: placeholders and free regions. - // - // If the issuing region outlives such a region, its loan escapes the function and - // cannot go out of scope. We can early return. - if self.regioncx.is_region_live_at_all_points(successor) { - return; - } } let first_block = loan_issued_at.block; @@ -461,34 +460,26 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> { regioncx: &RegionInferenceContext<'tcx>, borrow_set: &'a BorrowSet<'tcx>, ) -> Self { - let mut borrows_out_of_scope_at_location = - calculate_borrows_out_of_scope_at_location(body, regioncx, borrow_set); - - // The in-tree polonius analysis computes loans going out of scope using the set-of-loans - // model, and makes sure they're identical to the existing computation of the set-of-points - // model. - if tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { - let mut polonius_prec = PoloniusOutOfScopePrecomputer::new(body, regioncx); - for (loan_idx, loan_data) in borrow_set.iter_enumerated() { - let issuing_region = loan_data.region; - let loan_issued_at = loan_data.reserve_location; - - polonius_prec.precompute_loans_out_of_scope( - loan_idx, - issuing_region, - loan_issued_at, - ); - } - - assert_eq!( - borrows_out_of_scope_at_location, polonius_prec.loans_out_of_scope_at_location, - "polonius loan scopes differ from NLL borrow scopes, for body {:?}", - body.span, - ); - - borrows_out_of_scope_at_location = polonius_prec.loans_out_of_scope_at_location; - } + let borrows_out_of_scope_at_location = + if !tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { + calculate_borrows_out_of_scope_at_location(body, regioncx, borrow_set) + } else { + // The in-tree polonius analysis computes loans going out of scope using the + // set-of-loans model. + let mut polonius_prec = PoloniusOutOfScopePrecomputer::new(body, regioncx); + for (loan_idx, loan_data) in borrow_set.iter_enumerated() { + let issuing_region = loan_data.region; + let loan_issued_at = loan_data.reserve_location; + + polonius_prec.precompute_loans_out_of_scope( + loan_idx, + issuing_region, + loan_issued_at, + ); + } + polonius_prec.loans_out_of_scope_at_location + }; Borrows { tcx, body, borrow_set, borrows_out_of_scope_at_location } } diff --git a/compiler/rustc_borrowck/src/nll.rs b/compiler/rustc_borrowck/src/nll.rs index 590cbef05d53e..35264bd1a7075 100644 --- a/compiler/rustc_borrowck/src/nll.rs +++ b/compiler/rustc_borrowck/src/nll.rs @@ -145,7 +145,7 @@ pub(crate) fn compute_regions<'a, 'tcx>( // If requested for `-Zpolonius=next`, convert NLL constraints to localized outlives constraints // and use them to compute loan liveness. let localized_outlives_constraints = polonius_context.as_ref().map(|polonius_context| { - polonius_context.compute_loan_liveness(infcx.tcx, ®ioncx, body, borrow_set) + polonius_context.compute_loan_liveness(infcx.tcx, &mut regioncx, body, borrow_set) }); // If requested: dump NLL facts, and run legacy polonius analysis. diff --git a/compiler/rustc_borrowck/src/polonius/mod.rs b/compiler/rustc_borrowck/src/polonius/mod.rs index 313ed5d66493b..52a5f75d8a239 100644 --- a/compiler/rustc_borrowck/src/polonius/mod.rs +++ b/compiler/rustc_borrowck/src/polonius/mod.rs @@ -98,7 +98,7 @@ impl PoloniusContext { pub(crate) fn compute_loan_liveness<'tcx>( &self, tcx: TyCtxt<'tcx>, - regioncx: &RegionInferenceContext<'tcx>, + regioncx: &mut RegionInferenceContext<'tcx>, body: &Body<'tcx>, borrow_set: &BorrowSet<'tcx>, ) -> LocalizedOutlivesConstraintSet { @@ -126,15 +126,14 @@ impl PoloniusContext { // Now that we have a complete graph, we can compute reachability to trace the liveness of // loans for the next step in the chain, the NLL loan scope and active loans computations. - let _live_loans = compute_loan_liveness( + let live_loans = compute_loan_liveness( tcx, body, regioncx.liveness_constraints(), borrow_set, &localized_outlives_constraints, ); - // FIXME: record the live loans in the regioncx's liveness constraints, where the - // location-insensitive variant's data is stored. + regioncx.record_live_loans(live_loans); localized_outlives_constraints } diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs index c177538ee177a..d2268c4779d63 100644 --- a/compiler/rustc_borrowck/src/region_infer/mod.rs +++ b/compiler/rustc_borrowck/src/region_infer/mod.rs @@ -31,6 +31,7 @@ use crate::constraints::{ConstraintSccIndex, OutlivesConstraint, OutlivesConstra use crate::dataflow::BorrowIndex; use crate::diagnostics::{RegionErrorKind, RegionErrors, UniverseInfo}; use crate::member_constraints::{MemberConstraintSet, NllMemberConstraintIndex}; +use crate::polonius::LiveLoans; use crate::polonius::legacy::PoloniusOutput; use crate::region_infer::reverse_sccs::ReverseSccGraph; use crate::region_infer::values::{LivenessValues, RegionElement, RegionValues, ToElementIndex}; @@ -2171,28 +2172,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { self.constraint_graph.region_graph(&self.constraints, self.universal_regions().fr_static) } - /// Returns whether the given region is considered live at all points: whether it is a - /// placeholder or a free region. - pub(crate) fn is_region_live_at_all_points(&self, region: RegionVid) -> bool { - // FIXME: there must be a cleaner way to find this information. At least, when - // higher-ranked subtyping is abstracted away from the borrowck main path, we'll only - // need to check whether this is a universal region. - let origin = self.region_definition(region).origin; - let live_at_all_points = matches!( - origin, - NllRegionVariableOrigin::Placeholder(_) | NllRegionVariableOrigin::FreeRegion - ); - live_at_all_points - } - - /// Returns whether the `loan_idx` is live at the given `location`: whether its issuing - /// region is contained within the type of a variable that is live at this point. - /// Note: for now, the sets of live loans is only available when using `-Zpolonius=next`. - pub(crate) fn is_loan_live_at(&self, loan_idx: BorrowIndex, location: Location) -> bool { - let point = self.liveness_constraints.point_from_location(location); - self.liveness_constraints.is_loan_live_at(loan_idx, point) - } - /// Returns the representative `RegionVid` for a given SCC. /// See `RegionTracker` for how a region variable ID is chosen. /// @@ -2208,6 +2187,20 @@ impl<'tcx> RegionInferenceContext<'tcx> { pub(crate) fn liveness_constraints(&self) -> &LivenessValues { &self.liveness_constraints } + + /// When using `-Zpolonius=next`, records the given live loans for the loan scopes and active + /// loans dataflow computations. + pub(crate) fn record_live_loans(&mut self, live_loans: LiveLoans) { + self.liveness_constraints.record_live_loans(live_loans); + } + + /// Returns whether the `loan_idx` is live at the given `location`: whether its issuing + /// region is contained within the type of a variable that is live at this point. + /// Note: for now, the sets of live loans is only available when using `-Zpolonius=next`. + pub(crate) fn is_loan_live_at(&self, loan_idx: BorrowIndex, location: Location) -> bool { + let point = self.liveness_constraints.point_from_location(location); + self.liveness_constraints.is_loan_live_at(loan_idx, point) + } } impl<'tcx> RegionDefinition<'tcx> { diff --git a/compiler/rustc_borrowck/src/region_infer/values.rs b/compiler/rustc_borrowck/src/region_infer/values.rs index 11fb125ca2281..f1bcb353dc61c 100644 --- a/compiler/rustc_borrowck/src/region_infer/values.rs +++ b/compiler/rustc_borrowck/src/region_infer/values.rs @@ -11,6 +11,7 @@ use rustc_mir_dataflow::points::{DenseLocationMap, PointIndex}; use tracing::debug; use crate::BorrowIndex; +use crate::polonius::LiveLoans; rustc_index::newtype_index! { /// A single integer representing a `ty::Placeholder`. @@ -50,29 +51,8 @@ pub(crate) struct LivenessValues { /// region is live, only that it is. points: Option>, - /// When using `-Zpolonius=next`, for each point: the loans flowing into the live regions at - /// that point. - pub(crate) loans: Option, -} - -/// Data used to compute the loans that are live at a given point in the CFG, when using -/// `-Zpolonius=next`. -pub(crate) struct LiveLoans { - /// The set of loans that flow into a given region. When individual regions are marked as live - /// in the CFG, these inflowing loans are recorded as live. - pub(crate) inflowing_loans: SparseBitMatrix, - - /// The set of loans that are live at a given point in the CFG. - pub(crate) live_loans: SparseBitMatrix, -} - -impl LiveLoans { - pub(crate) fn new(num_loans: usize) -> Self { - LiveLoans { - live_loans: SparseBitMatrix::new(num_loans), - inflowing_loans: SparseBitMatrix::new(num_loans), - } - } + /// When using `-Zpolonius=next`, the set of loans that are live at a given point in the CFG. + live_loans: Option, } impl LivenessValues { @@ -82,7 +62,7 @@ impl LivenessValues { live_regions: None, points: Some(SparseIntervalMatrix::new(location_map.num_points())), location_map, - loans: None, + live_loans: None, } } @@ -95,7 +75,7 @@ impl LivenessValues { live_regions: Some(Default::default()), points: None, location_map, - loans: None, + live_loans: None, } } @@ -129,13 +109,6 @@ impl LivenessValues { } else if self.location_map.point_in_range(point) { self.live_regions.as_mut().unwrap().insert(region); } - - // When available, record the loans flowing into this region as live at the given point. - if let Some(loans) = self.loans.as_mut() { - if let Some(inflowing) = loans.inflowing_loans.row(region) { - loans.live_loans.union_row(point, inflowing); - } - } } /// Records `region` as being live at all the given `points`. @@ -146,17 +119,6 @@ impl LivenessValues { } else if points.iter().any(|point| self.location_map.point_in_range(point)) { self.live_regions.as_mut().unwrap().insert(region); } - - // When available, record the loans flowing into this region as live at the given points. - if let Some(loans) = self.loans.as_mut() { - if let Some(inflowing) = loans.inflowing_loans.row(region) { - if !inflowing.is_empty() { - for point in points.iter() { - loans.live_loans.union_row(point, inflowing); - } - } - } - } } /// Records `region` as being live at all the control-flow points. @@ -213,12 +175,17 @@ impl LivenessValues { self.location_map.to_location(point) } + /// When using `-Zpolonius=next`, records the given live loans for the loan scopes and active + /// loans dataflow computations. + pub(crate) fn record_live_loans(&mut self, live_loans: LiveLoans) { + self.live_loans = Some(live_loans); + } + /// When using `-Zpolonius=next`, returns whether the `loan_idx` is live at the given `point`. pub(crate) fn is_loan_live_at(&self, loan_idx: BorrowIndex, point: PointIndex) -> bool { - self.loans + self.live_loans .as_ref() .expect("Accessing live loans requires `-Zpolonius=next`") - .live_loans .contains(point, loan_idx) } } diff --git a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs index 4c0d3138f2d37..c564d85616e25 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs @@ -16,7 +16,7 @@ use rustc_trait_selection::traits::query::type_op::{DropckOutlives, TypeOp, Type use tracing::debug; use crate::polonius; -use crate::region_infer::values::{self, LiveLoans}; +use crate::region_infer::values; use crate::type_check::liveness::local_use_map::LocalUseMap; use crate::type_check::{NormalizeLocation, TypeChecker}; @@ -44,37 +44,6 @@ pub(super) fn trace<'a, 'tcx>( boring_locals: Vec, ) { let local_use_map = &LocalUseMap::build(&relevant_live_locals, location_map, body); - - // When using `-Zpolonius=next`, compute the set of loans that can reach a given region. - if typeck.tcx().sess.opts.unstable_opts.polonius.is_next_enabled() { - let borrow_set = &typeck.borrow_set; - let mut live_loans = LiveLoans::new(borrow_set.len()); - let outlives_constraints = &typeck.constraints.outlives_constraints; - let graph = outlives_constraints.graph(typeck.infcx.num_region_vars()); - let region_graph = - graph.region_graph(outlives_constraints, typeck.universal_regions.fr_static); - - // Traverse each issuing region's constraints, and record the loan as flowing into the - // outlived region. - for (loan, issuing_region_data) in borrow_set.iter_enumerated() { - for succ in rustc_data_structures::graph::depth_first_search( - ®ion_graph, - issuing_region_data.region, - ) { - // We don't need to mention that a loan flows into its issuing region. - if succ == issuing_region_data.region { - continue; - } - - live_loans.inflowing_loans.insert(succ, loan); - } - } - - // Store the inflowing loans in the liveness constraints: they will be used to compute live - // loans when liveness data is recorded there. - typeck.constraints.liveness_constraints.loans = Some(live_loans); - }; - let cx = LivenessContext { typeck, body, From 550cf1f4a4ccee7ef2e49eab7e7b6dd4d2930712 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Tue, 31 Dec 2024 17:32:26 +0000 Subject: [PATCH 030/142] handle kills in reachability --- .../src/polonius/loan_liveness.rs | 133 +++++++++++++++++- 1 file changed, 126 insertions(+), 7 deletions(-) diff --git a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs index 796a63da79d6e..aacc342e0f123 100644 --- a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs +++ b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs @@ -1,22 +1,34 @@ +use std::collections::{BTreeMap, BTreeSet}; + use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; -use rustc_middle::mir::Body; +use rustc_middle::mir::visit::Visitor; +use rustc_middle::mir::{ + Body, Local, Location, Place, Rvalue, Statement, StatementKind, Terminator, TerminatorKind, +}; use rustc_middle::ty::{RegionVid, TyCtxt}; use rustc_mir_dataflow::points::PointIndex; use super::{LiveLoans, LocalizedOutlivesConstraintSet}; -use crate::BorrowSet; +use crate::dataflow::BorrowIndex; use crate::region_infer::values::LivenessValues; +use crate::{BorrowSet, PlaceConflictBias, places_conflict}; -/// With the full graph of constraints, we can compute loan reachability, and trace loan liveness -/// throughout the CFG. +/// With the full graph of constraints, we can compute loan reachability, stop at kills, and trace +/// loan liveness throughout the CFG. pub(super) fn compute_loan_liveness<'tcx>( - _tcx: TyCtxt<'tcx>, - _body: &Body<'tcx>, + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, liveness: &LivenessValues, borrow_set: &BorrowSet<'tcx>, localized_outlives_constraints: &LocalizedOutlivesConstraintSet, ) -> LiveLoans { let mut live_loans = LiveLoans::new(borrow_set.len()); + + // FIXME: it may be preferable for kills to be encoded in the edges themselves, to simplify and + // likely make traversal (and constraint generation) more efficient. We also display kills on + // edges when visualizing the constraint graph anyways. + let kills = collect_kills(body, tcx, borrow_set); + let graph = index_constraints(&localized_outlives_constraints); let mut visited = FxHashSet::default(); let mut stack = Vec::new(); @@ -41,7 +53,16 @@ pub(super) fn compute_loan_liveness<'tcx>( // Record the loan as being live on entry to this point. live_loans.insert(node.point, loan_idx); + // Continuing traversal will depend on whether the loan is killed at this point. + let current_location = liveness.location_from_point(node.point); + let is_loan_killed = + kills.get(¤t_location).is_some_and(|kills| kills.contains(&loan_idx)); + for succ in outgoing_edges(&graph, node) { + // If the loan is killed at this point, it is killed _on exit_. + if is_loan_killed { + continue; + } stack.push(succ); } } @@ -61,7 +82,7 @@ struct LocalizedNode { point: PointIndex, } -/// Index the outlives constraints into a graph of edges per node. +/// Traverses the constraints and returns the indexable graph of edges per node. fn index_constraints(constraints: &LocalizedOutlivesConstraintSet) -> LocalizedConstraintGraph { let mut edges = LocalizedConstraintGraph::default(); for constraint in &constraints.outlives { @@ -80,3 +101,101 @@ fn outgoing_edges( ) -> impl Iterator + use<'_> { graph.get(&node).into_iter().flat_map(|edges| edges.iter().copied()) } + +/// Traverses the MIR and collects kills. +fn collect_kills<'tcx>( + body: &Body<'tcx>, + tcx: TyCtxt<'tcx>, + borrow_set: &BorrowSet<'tcx>, +) -> BTreeMap> { + let mut collector = KillsCollector { borrow_set, tcx, body, kills: BTreeMap::default() }; + for (block, data) in body.basic_blocks.iter_enumerated() { + collector.visit_basic_block_data(block, data); + } + collector.kills +} + +struct KillsCollector<'a, 'tcx> { + body: &'a Body<'tcx>, + tcx: TyCtxt<'tcx>, + borrow_set: &'a BorrowSet<'tcx>, + + /// The set of loans killed at each location. + kills: BTreeMap>, +} + +// This visitor has a similar structure to the `Borrows` dataflow computation with respect to kills, +// and the datalog polonius fact generation for the `loan_killed_at` relation. +impl<'tcx> KillsCollector<'_, 'tcx> { + /// Records the borrows on the specified place as `killed`. For example, when assigning to a + /// local, or on a call's return destination. + fn record_killed_borrows_for_place(&mut self, place: Place<'tcx>, location: Location) { + let other_borrows_of_local = self + .borrow_set + .local_map + .get(&place.local) + .into_iter() + .flat_map(|bs| bs.iter()) + .copied(); + + // If the borrowed place is a local with no projections, all other borrows of this + // local must conflict. This is purely an optimization so we don't have to call + // `places_conflict` for every borrow. + if place.projection.is_empty() { + if !self.body.local_decls[place.local].is_ref_to_static() { + self.kills.entry(location).or_default().extend(other_borrows_of_local); + } + return; + } + + // By passing `PlaceConflictBias::NoOverlap`, we conservatively assume that any given + // pair of array indices are not equal, so that when `places_conflict` returns true, we + // will be assured that two places being compared definitely denotes the same sets of + // locations. + let definitely_conflicting_borrows = other_borrows_of_local.filter(|&i| { + places_conflict( + self.tcx, + self.body, + self.borrow_set[i].borrowed_place, + place, + PlaceConflictBias::NoOverlap, + ) + }); + + self.kills.entry(location).or_default().extend(definitely_conflicting_borrows); + } + + /// Records the borrows on the specified local as `killed`. + fn record_killed_borrows_for_local(&mut self, local: Local, location: Location) { + if let Some(borrow_indices) = self.borrow_set.local_map.get(&local) { + self.kills.entry(location).or_default().extend(borrow_indices.iter()); + } + } +} + +impl<'tcx> Visitor<'tcx> for KillsCollector<'_, 'tcx> { + fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { + // Make sure there are no remaining borrows for locals that have gone out of scope. + if let StatementKind::StorageDead(local) = statement.kind { + self.record_killed_borrows_for_local(local, location); + } + + self.super_statement(statement, location); + } + + fn visit_assign(&mut self, place: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) { + // When we see `X = ...`, then kill borrows of `(*X).foo` and so forth. + self.record_killed_borrows_for_place(*place, location); + self.super_assign(place, rvalue, location); + } + + fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { + // A `Call` terminator's return value can be a local which has borrows, so we need to record + // those as killed as well. + if let TerminatorKind::Call { destination, .. } = terminator.kind { + self.record_killed_borrows_for_place(destination, location); + } + + self.super_terminator(terminator, location); + } +} From 0f3dd33e1d39f242c2da4d13c650b14b11c0d3c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Tue, 31 Dec 2024 17:37:58 +0000 Subject: [PATCH 031/142] deal with naive reachability weakness it's a bit mind-bending --- .../src/polonius/loan_liveness.rs | 95 ++++++++++++++++--- 1 file changed, 82 insertions(+), 13 deletions(-) diff --git a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs index aacc342e0f123..c519453652fe8 100644 --- a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs +++ b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs @@ -53,15 +53,69 @@ pub(super) fn compute_loan_liveness<'tcx>( // Record the loan as being live on entry to this point. live_loans.insert(node.point, loan_idx); - // Continuing traversal will depend on whether the loan is killed at this point. + // Here, we have a conundrum. There's currently a weakness in our theory, in that + // we're using a single notion of reachability to represent what used to be _two_ + // different transitive closures. It didn't seem impactful when coming up with the + // single-graph and reachability through space (regions) + time (CFG) concepts, but in + // practice the combination of time-traveling with kills is more impactful than + // initially anticipated. + // + // Kills should prevent a loan from reaching its successor points in the CFG, but not + // while time-traveling: we're not actually at that CFG point, but looking for + // predecessor regions that contain the loan. One of the two TCs we had pushed the + // transitive subset edges to each point instead of having backward edges, and the + // problem didn't exist before. In the abstract, naive reachability is not enough to + // model this, we'd need a slightly different solution. For example, maybe with a + // two-step traversal: + // - at each point we first traverse the subgraph (and possibly time-travel) looking for + // exit nodes while ignoring kills, + // - and then when we're back at the current point, we continue normally. + // + // Another (less annoying) subtlety is that kills and the loan use-map are + // flow-insensitive. Kills can actually appear in places before a loan is introduced, or + // at a location that is actually unreachable in the CFG from the introduction point, + // and these can also be encountered during time-traveling. + // + // The simplest change that made sense to "fix" the issues above is taking into + // account kills that are: + // - reachable from the introduction point + // - encountered during forward traversal. Note that this is not transitive like the + // two-step traversal described above: only kills encountered on exit via a backward + // edge are ignored. + // + // In our test suite, there are a couple of cases where kills are encountered while + // time-traveling, however as far as we can tell, always in cases where they would be + // unreachable. We have reason to believe that this is a property of the single-graph + // approach (but haven't proved it yet): + // - reachable kills while time-traveling would also be encountered via regular + // traversal + // - it makes _some_ sense to ignore unreachable kills, but subtleties around dead code + // in general need to be better thought through (like they were for NLLs). + // - ignoring kills is a conservative approximation: the loan is still live and could + // cause false positive errors at another place access. Soundness issues in this + // domain should look more like the absence of reachability instead. + // + // This is enough in practice to pass tests, and therefore is what we have implemented + // for now. + // + // FIXME: all of the above. Analyze potential unsoundness, possibly in concert with a + // borrowck implementation in a-mir-formality, fuzzing, or manually crafting + // counter-examples. + + // Continuing traversal will depend on whether the loan is killed at this point, and + // whether we're time-traveling. let current_location = liveness.location_from_point(node.point); let is_loan_killed = kills.get(¤t_location).is_some_and(|kills| kills.contains(&loan_idx)); for succ in outgoing_edges(&graph, node) { - // If the loan is killed at this point, it is killed _on exit_. + // If the loan is killed at this point, it is killed _on exit_. But only during + // forward traversal. if is_loan_killed { - continue; + let destination = liveness.location_from_point(succ.point); + if current_location.is_predecessor_of(destination, body) { + continue; + } } stack.push(succ); } @@ -130,6 +184,16 @@ impl<'tcx> KillsCollector<'_, 'tcx> { /// Records the borrows on the specified place as `killed`. For example, when assigning to a /// local, or on a call's return destination. fn record_killed_borrows_for_place(&mut self, place: Place<'tcx>, location: Location) { + // For the reasons described in graph traversal, we also filter out kills + // unreachable from the loan's introduction point, as they would stop traversal when + // e.g. checking for reachability in the subset graph through invariance constraints + // higher up. + let filter_unreachable_kills = |loan| { + let introduction = self.borrow_set[loan].reserve_location; + let reachable = introduction.is_predecessor_of(location, self.body); + reachable + }; + let other_borrows_of_local = self .borrow_set .local_map @@ -143,7 +207,10 @@ impl<'tcx> KillsCollector<'_, 'tcx> { // `places_conflict` for every borrow. if place.projection.is_empty() { if !self.body.local_decls[place.local].is_ref_to_static() { - self.kills.entry(location).or_default().extend(other_borrows_of_local); + self.kills + .entry(location) + .or_default() + .extend(other_borrows_of_local.filter(|&loan| filter_unreachable_kills(loan))); } return; } @@ -152,15 +219,17 @@ impl<'tcx> KillsCollector<'_, 'tcx> { // pair of array indices are not equal, so that when `places_conflict` returns true, we // will be assured that two places being compared definitely denotes the same sets of // locations. - let definitely_conflicting_borrows = other_borrows_of_local.filter(|&i| { - places_conflict( - self.tcx, - self.body, - self.borrow_set[i].borrowed_place, - place, - PlaceConflictBias::NoOverlap, - ) - }); + let definitely_conflicting_borrows = other_borrows_of_local + .filter(|&i| { + places_conflict( + self.tcx, + self.body, + self.borrow_set[i].borrowed_place, + place, + PlaceConflictBias::NoOverlap, + ) + }) + .filter(|&loan| filter_unreachable_kills(loan)); self.kills.entry(location).or_default().extend(definitely_conflicting_borrows); } From 735013d281c991187f327da0c660a026deb255de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Tue, 31 Dec 2024 17:39:00 +0000 Subject: [PATCH 032/142] remove location-insensitive ICE test --- tests/crashes/127628.rs | 14 -------------- 1 file changed, 14 deletions(-) delete mode 100644 tests/crashes/127628.rs diff --git a/tests/crashes/127628.rs b/tests/crashes/127628.rs deleted file mode 100644 index f11ab3f7e8d85..0000000000000 --- a/tests/crashes/127628.rs +++ /dev/null @@ -1,14 +0,0 @@ -//@ known-bug: #127628 -//@ compile-flags: -Zpolonius=next - -use std::io::{self, Read}; - -pub struct Container<'a> { - reader: &'a mut dyn Read, -} - -impl<'a> Container { - pub fn wrap<'s>(reader: &'s mut dyn io::Read) -> Container<'s> { - Container { reader: reader } - } -} From 8ac045dd4c1aad7eca07522a4dd9c22181674728 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Fri, 10 Jan 2025 08:42:00 +0000 Subject: [PATCH 033/142] move out of scope precomputer code this addresses review comments while: - keeping the symmetry between the NLL and Polonius out of scope precomputers - keeping the unstable `calculate_borrows_out_of_scope_at_location` function to avoid churn for consumers --- compiler/rustc_borrowck/src/dataflow.rs | 66 ++++++++++++------------- 1 file changed, 32 insertions(+), 34 deletions(-) diff --git a/compiler/rustc_borrowck/src/dataflow.rs b/compiler/rustc_borrowck/src/dataflow.rs index 3a200642ce58c..7511a55b03ae5 100644 --- a/compiler/rustc_borrowck/src/dataflow.rs +++ b/compiler/rustc_borrowck/src/dataflow.rs @@ -187,19 +187,28 @@ struct OutOfScopePrecomputer<'a, 'tcx> { borrows_out_of_scope_at_location: FxIndexMap>, } -impl<'a, 'tcx> OutOfScopePrecomputer<'a, 'tcx> { - fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self { - OutOfScopePrecomputer { +impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> { + fn compute( + body: &Body<'tcx>, + regioncx: &RegionInferenceContext<'tcx>, + borrow_set: &BorrowSet<'tcx>, + ) -> FxIndexMap> { + let mut prec = OutOfScopePrecomputer { visited: DenseBitSet::new_empty(body.basic_blocks.len()), visit_stack: vec![], body, regioncx, borrows_out_of_scope_at_location: FxIndexMap::default(), + }; + for (borrow_index, borrow_data) in borrow_set.iter_enumerated() { + let borrow_region = borrow_data.region; + let location = borrow_data.reserve_location; + prec.precompute_borrows_out_of_scope(borrow_index, borrow_region, location); } + + prec.borrows_out_of_scope_at_location } -} -impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> { fn precompute_borrows_out_of_scope( &mut self, borrow_index: BorrowIndex, @@ -280,15 +289,7 @@ pub fn calculate_borrows_out_of_scope_at_location<'tcx>( regioncx: &RegionInferenceContext<'tcx>, borrow_set: &BorrowSet<'tcx>, ) -> FxIndexMap> { - let mut prec = OutOfScopePrecomputer::new(body, regioncx); - for (borrow_index, borrow_data) in borrow_set.iter_enumerated() { - let borrow_region = borrow_data.region; - let location = borrow_data.reserve_location; - - prec.precompute_borrows_out_of_scope(borrow_index, borrow_region, location); - } - - prec.borrows_out_of_scope_at_location + OutOfScopePrecomputer::compute(body, regioncx, borrow_set) } struct PoloniusOutOfScopePrecomputer<'a, 'tcx> { @@ -300,19 +301,30 @@ struct PoloniusOutOfScopePrecomputer<'a, 'tcx> { loans_out_of_scope_at_location: FxIndexMap>, } -impl<'a, 'tcx> PoloniusOutOfScopePrecomputer<'a, 'tcx> { - fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self { - Self { +impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { + fn compute( + body: &Body<'tcx>, + regioncx: &RegionInferenceContext<'tcx>, + borrow_set: &BorrowSet<'tcx>, + ) -> FxIndexMap> { + // The in-tree polonius analysis computes loans going out of scope using the + // set-of-loans model. + let mut prec = PoloniusOutOfScopePrecomputer { visited: DenseBitSet::new_empty(body.basic_blocks.len()), visit_stack: vec![], body, regioncx, loans_out_of_scope_at_location: FxIndexMap::default(), + }; + for (loan_idx, loan_data) in borrow_set.iter_enumerated() { + let issuing_region = loan_data.region; + let loan_issued_at = loan_data.reserve_location; + prec.precompute_loans_out_of_scope(loan_idx, issuing_region, loan_issued_at); } + + prec.loans_out_of_scope_at_location } -} -impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { /// Loans are in scope while they are live: whether they are contained within any live region. /// In the location-insensitive analysis, a loan will be contained in a region if the issuing /// region can reach it in the subset graph. So this is a reachability problem. @@ -464,21 +476,7 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> { if !tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { calculate_borrows_out_of_scope_at_location(body, regioncx, borrow_set) } else { - // The in-tree polonius analysis computes loans going out of scope using the - // set-of-loans model. - let mut polonius_prec = PoloniusOutOfScopePrecomputer::new(body, regioncx); - for (loan_idx, loan_data) in borrow_set.iter_enumerated() { - let issuing_region = loan_data.region; - let loan_issued_at = loan_data.reserve_location; - - polonius_prec.precompute_loans_out_of_scope( - loan_idx, - issuing_region, - loan_issued_at, - ); - } - - polonius_prec.loans_out_of_scope_at_location + PoloniusOutOfScopePrecomputer::compute(body, regioncx, borrow_set) }; Borrows { tcx, body, borrow_set, borrows_out_of_scope_at_location } } From f52724c917216a8c9126479b0e0c097afe447593 Mon Sep 17 00:00:00 2001 From: spore Date: Sun, 12 Jan 2025 19:50:01 +0800 Subject: [PATCH 034/142] Add comment on case to mark the original issue --- tests/ui/lint/type-overflow.rs | 2 +- tests/ui/lint/type-overflow.stderr | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/ui/lint/type-overflow.rs b/tests/ui/lint/type-overflow.rs index 0848d676e237c..102d042773b99 100644 --- a/tests/ui/lint/type-overflow.rs +++ b/tests/ui/lint/type-overflow.rs @@ -28,7 +28,7 @@ fn main() { //~^ WARNING literal out of range for `i32` //~| HELP consider using the type `u128` instead - let fail = -0x8000_0000_0000_0000_0000_0000_0000_0000; + let fail = -0x8000_0000_0000_0000_0000_0000_0000_0000; // issue #131849 //~^ WARNING literal out of range for `i32` //~| HELP consider using the type `i128` instead diff --git a/tests/ui/lint/type-overflow.stderr b/tests/ui/lint/type-overflow.stderr index dbbe81e5c182d..720044c68ebda 100644 --- a/tests/ui/lint/type-overflow.stderr +++ b/tests/ui/lint/type-overflow.stderr @@ -77,7 +77,7 @@ LL | let fail = 0x8000_0000_0000_0000_0000_0000_0000_0000; warning: literal out of range for `i32` --> $DIR/type-overflow.rs:31:17 | -LL | let fail = -0x8000_0000_0000_0000_0000_0000_0000_0000; +LL | let fail = -0x8000_0000_0000_0000_0000_0000_0000_0000; // issue #131849 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: the literal `0x8000_0000_0000_0000_0000_0000_0000_0000` (decimal `170141183460469231731687303715884105728`) does not fit into the type `i32` From 74e2e8b59834aede7aafee57e00ede07ae13739b Mon Sep 17 00:00:00 2001 From: spore Date: Sun, 12 Jan 2025 20:18:53 +0800 Subject: [PATCH 035/142] Suggest the smallest fitting type instead Changes the behavior of the `overflowing_literals` suggestion so that it always suggest the smallest type regardless of the original type size. --- compiler/rustc_lint/src/types/literal.rs | 4 ++-- tests/ui/lint/type-overflow.rs | 2 +- tests/ui/lint/type-overflow.stderr | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_lint/src/types/literal.rs b/compiler/rustc_lint/src/types/literal.rs index f7a59437bbc27..4b5163522f866 100644 --- a/compiler/rustc_lint/src/types/literal.rs +++ b/compiler/rustc_lint/src/types/literal.rs @@ -204,14 +204,14 @@ fn get_type_suggestion(t: Ty<'_>, val: u128, negative: bool) -> Option<&'static match t.kind() { ty::Uint(ty::UintTy::Usize) | ty::Int(ty::IntTy::Isize) => None, ty::Uint(_) => Some(Integer::fit_unsigned(val).uint_ty_str()), - ty::Int(int) => { + ty::Int(_) => { let signed = literal_to_i128(val, negative).map(Integer::fit_signed); if negative { signed.map(Integer::int_ty_str) } else { let unsigned = Integer::fit_unsigned(val); Some(if let Some(signed) = signed { - if Some(unsigned.size().bits()) == int.bit_width() { + if unsigned.size() < signed.size() { unsigned.uint_ty_str() } else { signed.int_ty_str() diff --git a/tests/ui/lint/type-overflow.rs b/tests/ui/lint/type-overflow.rs index 102d042773b99..1e74a8925f621 100644 --- a/tests/ui/lint/type-overflow.rs +++ b/tests/ui/lint/type-overflow.rs @@ -40,7 +40,7 @@ fn main() { //~| HELP consider using the type `u128` instead let fail = 0x8FFF_FFFF_FFFF_FFFE; //~WARNING literal out of range for `i32` - //~| HELP consider using the type `i128` instead + //~| HELP consider using the type `u64` instead //~| HELP let fail = -0b1111_1111i8; //~WARNING literal out of range for `i8` diff --git a/tests/ui/lint/type-overflow.stderr b/tests/ui/lint/type-overflow.stderr index 720044c68ebda..9fdb05ed1c036 100644 --- a/tests/ui/lint/type-overflow.stderr +++ b/tests/ui/lint/type-overflow.stderr @@ -109,7 +109,7 @@ LL | let fail = 0x8FFF_FFFF_FFFF_FFFE; | ^^^^^^^^^^^^^^^^^^^^^ | = note: the literal `0x8FFF_FFFF_FFFF_FFFE` (decimal `10376293541461622782`) does not fit into the type `i32` and will become `-2i32` - = help: consider using the type `i128` instead + = help: consider using the type `u64` instead help: to use as a negative number (decimal `-2`), consider using the type `u32` for the literal and cast it to `i32` | LL | let fail = 0x8FFF_FFFF_FFFF_FFFEu32 as i32; From 9cbd17778a6587804edf38c38221dbabc6ae5b44 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Sun, 12 Jan 2025 03:20:09 +0000 Subject: [PATCH 036/142] Detect unstable lint docs that dont enable their feature --- src/tools/lint-docs/src/lib.rs | 38 +++++++++++++++++++++++++--------- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/src/tools/lint-docs/src/lib.rs b/src/tools/lint-docs/src/lib.rs index 8c7ff08ccd72c..5aae50aebc3d6 100644 --- a/src/tools/lint-docs/src/lib.rs +++ b/src/tools/lint-docs/src/lib.rs @@ -480,26 +480,44 @@ impl<'a> LintExtractor<'a> { .map(|msg| msg["rendered"].as_str().expect("rendered field should exist").to_string()) .collect(); if matches.is_empty() { - // Some lints override their code to something else (E0566). - // Try to find something that looks like it could be our lint. - let matches: Vec<_> = msgs.iter().filter(|msg| - matches!(&msg["rendered"], serde_json::Value::String(s) if s.contains(name))) - .map(|msg| msg["rendered"].as_str().expect("rendered field should exist").to_string()) - .collect(); - if matches.is_empty() { + // Try to detect if an unstable lint forgot to enable a `#![feature(..)]`. + if name != "test_unstable_lint" && msgs.iter().any(|msg| { + matches!(&msg["code"]["code"], serde_json::Value::String(s) if s=="unknown_lints") + && matches!(&msg["message"], serde_json::Value::String(s) if s.contains(name)) + }) { let rendered: Vec<&str> = msgs.iter().filter_map(|msg| msg["rendered"].as_str()).collect(); let non_json: Vec<&str> = stderr.lines().filter(|line| !line.starts_with('{')).collect(); Err(format!( - "did not find lint `{}` in output of example, got:\n{}\n{}", + "lint `{}` is unstable and must be enabled in example. see:\n{}\n{}", name, + rendered.join("\n"), non_json.join("\n"), - rendered.join("\n") ) .into()) } else { - Ok(matches.join("\n")) + // Some lints override their code to something else (E0566). + // Try to find something that looks like it could be our lint. + let matches: Vec<_> = msgs.iter().filter(|msg| + matches!(&msg["rendered"], serde_json::Value::String(s) if s.contains(name))) + .map(|msg| msg["rendered"].as_str().expect("rendered field should exist").to_string()) + .collect(); + if matches.is_empty() { + let rendered: Vec<&str> = + msgs.iter().filter_map(|msg| msg["rendered"].as_str()).collect(); + let non_json: Vec<&str> = + stderr.lines().filter(|line| !line.starts_with('{')).collect(); + Err(format!( + "did not find lint `{}` in output of example, got:\n{}\n{}", + name, + non_json.join("\n"), + rendered.join("\n") + ) + .into()) + } else { + Ok(matches.join("\n")) + } } } else { Ok(matches.join("\n")) From 7f743c7d1f90ebc0f9b00d3850dbe30524e8ab44 Mon Sep 17 00:00:00 2001 From: Audrey Dutcher Date: Sun, 29 Dec 2024 22:56:03 -0800 Subject: [PATCH 037/142] bootstrap: do not rely on LIBRARY_PATH env variable Clang will not respect this value in cross configurations. --- src/bootstrap/src/core/build_steps/test.rs | 14 ++++++++---- src/bootstrap/src/core/builder/cargo.rs | 12 ++++++----- src/bootstrap/src/utils/helpers.rs | 25 ---------------------- 3 files changed, 17 insertions(+), 34 deletions(-) diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 7d4404fa97b5c..c7c4cc7c9ee96 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -24,8 +24,8 @@ use crate::core::config::flags::{Subcommand, get_completion}; use crate::utils::build_stamp::{self, BuildStamp}; use crate::utils::exec::{BootstrapCommand, command}; use crate::utils::helpers::{ - self, LldThreads, add_link_lib_path, add_rustdoc_cargo_linker_args, dylib_path, dylib_path_var, - linker_args, linker_flags, t, target_supports_cranelift_backend, up_to_date, + self, LldThreads, add_rustdoc_cargo_linker_args, dylib_path, dylib_path_var, linker_args, + linker_flags, t, target_supports_cranelift_backend, up_to_date, }; use crate::utils::render_tests::{add_flags_and_try_run_tests, try_run_tests}; use crate::{CLang, DocTests, GitRepo, Mode, PathSet, envify}; @@ -1975,11 +1975,17 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the // Tests that use compiler libraries may inherit the `-lLLVM` link // requirement, but the `-L` library path is not propagated across // separate compilations. We can add LLVM's library path to the - // platform-specific environment variable as a workaround. + // rustc args as a workaround. if !builder.config.dry_run() && suite.ends_with("fulldeps") { let llvm_libdir = command(&llvm_config).arg("--libdir").run_capture_stdout(builder).stdout(); - add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cmd); + let mut rustflags = env::var("RUSTFLAGS").unwrap_or_default(); + if target.is_msvc() { + rustflags.push_str(&format!("-Clink-arg=-LIBPATH:{llvm_libdir}")); + } else { + rustflags.push_str(&format!("-Clink-arg=-L{llvm_libdir}")); + } + cmd.env("RUSTFLAGS", rustflags); } if !builder.config.dry_run() && matches!(mode, "run-make" | "coverage-run") { diff --git a/src/bootstrap/src/core/builder/cargo.rs b/src/bootstrap/src/core/builder/cargo.rs index 2692a129ef3b9..f9fb19ddb0952 100644 --- a/src/bootstrap/src/core/builder/cargo.rs +++ b/src/bootstrap/src/core/builder/cargo.rs @@ -8,9 +8,7 @@ use crate::core::build_steps::{compile, test}; use crate::core::config::SplitDebuginfo; use crate::core::config::flags::Color; use crate::utils::build_stamp; -use crate::utils::helpers::{ - self, LldThreads, add_link_lib_path, check_cfg_arg, linker_args, linker_flags, -}; +use crate::utils::helpers::{self, LldThreads, check_cfg_arg, linker_args, linker_flags}; use crate::{ BootstrapCommand, CLang, Compiler, DocTests, DryRun, EXTRA_CHECK_CFGS, GitRepo, Mode, TargetSelection, command, prepare_behaviour_dump_dir, t, @@ -947,12 +945,16 @@ impl Builder<'_> { // Tools that use compiler libraries may inherit the `-lLLVM` link // requirement, but the `-L` library path is not propagated across // separate Cargo projects. We can add LLVM's library path to the - // platform-specific environment variable as a workaround. + // rustc args as a workaround. if mode == Mode::ToolRustc || mode == Mode::Codegen { if let Some(llvm_config) = self.llvm_config(target) { let llvm_libdir = command(llvm_config).arg("--libdir").run_capture_stdout(self).stdout(); - add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cargo); + if target.is_msvc() { + rustflags.arg(&format!("-Clink-arg=-LIBPATH:{llvm_libdir}")); + } else { + rustflags.arg(&format!("-Clink-arg=-L{llvm_libdir}")); + } } } diff --git a/src/bootstrap/src/utils/helpers.rs b/src/bootstrap/src/utils/helpers.rs index 516c314024b28..a1b1748c85b53 100644 --- a/src/bootstrap/src/utils/helpers.rs +++ b/src/bootstrap/src/utils/helpers.rs @@ -106,31 +106,6 @@ pub fn add_dylib_path(path: Vec, cmd: &mut BootstrapCommand) { cmd.env(dylib_path_var(), t!(env::join_paths(list))); } -/// Adds a list of lookup paths to `cmd`'s link library lookup path. -pub fn add_link_lib_path(path: Vec, cmd: &mut BootstrapCommand) { - let mut list = link_lib_path(); - for path in path { - list.insert(0, path); - } - cmd.env(link_lib_path_var(), t!(env::join_paths(list))); -} - -/// Returns the environment variable which the link library lookup path -/// resides in for this platform. -fn link_lib_path_var() -> &'static str { - if cfg!(target_env = "msvc") { "LIB" } else { "LIBRARY_PATH" } -} - -/// Parses the `link_lib_path_var()` environment variable, returning a list of -/// paths that are members of this lookup path. -fn link_lib_path() -> Vec { - let var = match env::var_os(link_lib_path_var()) { - Some(v) => v, - None => return vec![], - }; - env::split_paths(&var).collect() -} - pub struct TimeIt(bool, Instant); /// Returns an RAII structure that prints out how long it took to drop. From e2fcdb8d360acd561e2b6425e41009d9fff2d989 Mon Sep 17 00:00:00 2001 From: Noah Lev Date: Sun, 12 Jan 2025 20:09:51 -0800 Subject: [PATCH 038/142] rustdoc: Extract `AttributesExt::cfg` trait method as function It's never overridden, so it shouldn't be on the trait. --- src/librustdoc/clean/inline.rs | 17 ++-- src/librustdoc/clean/mod.rs | 2 +- src/librustdoc/clean/types.rs | 147 +++++++++++++++++---------------- src/librustdoc/doctest/rust.rs | 5 +- 4 files changed, 88 insertions(+), 83 deletions(-) diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 019a888bd2f62..a17e0b1e4ccb1 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -17,12 +17,12 @@ use rustc_span::symbol::{Symbol, sym}; use thin_vec::{ThinVec, thin_vec}; use tracing::{debug, trace}; -use super::Item; +use super::{Item, extract_cfg_from_attrs}; use crate::clean::{ - self, Attributes, AttributesExt, ImplKind, ItemId, Type, clean_bound_vars, clean_generics, - clean_impl_item, clean_middle_assoc_item, clean_middle_field, clean_middle_ty, - clean_poly_fn_sig, clean_trait_ref_with_constraints, clean_ty, clean_ty_alias_inner_type, - clean_ty_generics, clean_variant_def, utils, + self, Attributes, ImplKind, ItemId, Type, clean_bound_vars, clean_generics, clean_impl_item, + clean_middle_assoc_item, clean_middle_field, clean_middle_ty, clean_poly_fn_sig, + clean_trait_ref_with_constraints, clean_ty, clean_ty_alias_inner_type, clean_ty_generics, + clean_variant_def, utils, }; use crate::core::DocContext; use crate::formats::item_type::ItemType; @@ -408,10 +408,13 @@ pub(crate) fn merge_attrs( } else { Attributes::from_hir(&both) }, - both.cfg(cx.tcx, &cx.cache.hidden_cfg), + extract_cfg_from_attrs(&both[..], cx.tcx, &cx.cache.hidden_cfg), ) } else { - (Attributes::from_hir(old_attrs), old_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg)) + ( + Attributes::from_hir(old_attrs), + extract_cfg_from_attrs(&old_attrs[..], cx.tcx, &cx.cache.hidden_cfg), + ) } } diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 2ed2df799dd1c..a36927c47ebab 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -200,7 +200,7 @@ fn generate_item_with_correct_attrs( target_attrs.iter().map(|attr| (Cow::Borrowed(attr), None)).collect() }; - let cfg = attrs.cfg(cx.tcx, &cx.cache.hidden_cfg); + let cfg = extract_cfg_from_attrs(&attrs[..], cx.tcx, &cx.cache.hidden_cfg); let attrs = Attributes::from_hir_iter(attrs.iter().map(|(attr, did)| (&**attr, *did)), false); let name = renamed.or(Some(name)); diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index dcee96978d2b5..704caeb0025cf 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -479,7 +479,7 @@ impl Item { name, kind, Attributes::from_hir(hir_attrs), - hir_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg), + extract_cfg_from_attrs(hir_attrs, cx.tcx, &cx.cache.hidden_cfg), ) } @@ -990,95 +990,98 @@ pub(crate) trait AttributesExt { fn lists(&self, name: Symbol) -> Self::AttributeIterator<'_>; fn iter(&self) -> Self::Attributes<'_>; +} - fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet) -> Option> { - let sess = tcx.sess; - let doc_cfg_active = tcx.features().doc_cfg(); - let doc_auto_cfg_active = tcx.features().doc_auto_cfg(); - - fn single(it: T) -> Option { - let mut iter = it.into_iter(); - let item = iter.next()?; - if iter.next().is_some() { - return None; - } - Some(item) +pub fn extract_cfg_from_attrs( + attrs: &A, + tcx: TyCtxt<'_>, + hidden_cfg: &FxHashSet, +) -> Option> { + let sess = tcx.sess; + let doc_cfg_active = tcx.features().doc_cfg(); + let doc_auto_cfg_active = tcx.features().doc_auto_cfg(); + + fn single(it: T) -> Option { + let mut iter = it.into_iter(); + let item = iter.next()?; + if iter.next().is_some() { + return None; } + Some(item) + } - let mut cfg = if doc_cfg_active || doc_auto_cfg_active { - let mut doc_cfg = self + let mut cfg = if doc_cfg_active || doc_auto_cfg_active { + let mut doc_cfg = attrs + .iter() + .filter(|attr| attr.has_name(sym::doc)) + .flat_map(|attr| attr.meta_item_list().unwrap_or_default()) + .filter(|attr| attr.has_name(sym::cfg)) + .peekable(); + if doc_cfg.peek().is_some() && doc_cfg_active { + doc_cfg + .filter_map(|attr| Cfg::parse(&attr).ok()) + .fold(Cfg::True, |cfg, new_cfg| cfg & new_cfg) + } else if doc_auto_cfg_active { + // If there is no `doc(cfg())`, then we retrieve the `cfg()` attributes (because + // `doc(cfg())` overrides `cfg()`). + attrs .iter() - .filter(|attr| attr.has_name(sym::doc)) - .flat_map(|attr| attr.meta_item_list().unwrap_or_default()) .filter(|attr| attr.has_name(sym::cfg)) - .peekable(); - if doc_cfg.peek().is_some() && doc_cfg_active { - doc_cfg - .filter_map(|attr| Cfg::parse(&attr).ok()) - .fold(Cfg::True, |cfg, new_cfg| cfg & new_cfg) - } else if doc_auto_cfg_active { - // If there is no `doc(cfg())`, then we retrieve the `cfg()` attributes (because - // `doc(cfg())` overrides `cfg()`). - self.iter() - .filter(|attr| attr.has_name(sym::cfg)) - .filter_map(|attr| single(attr.meta_item_list()?)) - .filter_map(|attr| { - Cfg::parse_without(attr.meta_item()?, hidden_cfg).ok().flatten() - }) - .fold(Cfg::True, |cfg, new_cfg| cfg & new_cfg) - } else { - Cfg::True - } + .filter_map(|attr| single(attr.meta_item_list()?)) + .filter_map(|attr| Cfg::parse_without(attr.meta_item()?, hidden_cfg).ok().flatten()) + .fold(Cfg::True, |cfg, new_cfg| cfg & new_cfg) } else { Cfg::True - }; - - for attr in self.iter() { - // #[doc] - if attr.doc_str().is_none() && attr.has_name(sym::doc) { - // #[doc(...)] - if let Some(list) = attr.meta_item_list() { - for item in list { - // #[doc(hidden)] - if !item.has_name(sym::cfg) { - continue; - } - // #[doc(cfg(...))] - if let Some(cfg_mi) = item - .meta_item() - .and_then(|item| rustc_expand::config::parse_cfg(item, sess)) - { - match Cfg::parse(cfg_mi) { - Ok(new_cfg) => cfg &= new_cfg, - Err(e) => { - sess.dcx().span_err(e.span, e.msg); - } + } + } else { + Cfg::True + }; + + for attr in attrs.iter() { + // #[doc] + if attr.doc_str().is_none() && attr.has_name(sym::doc) { + // #[doc(...)] + if let Some(list) = attr.meta_item_list() { + for item in list { + // #[doc(hidden)] + if !item.has_name(sym::cfg) { + continue; + } + // #[doc(cfg(...))] + if let Some(cfg_mi) = item + .meta_item() + .and_then(|item| rustc_expand::config::parse_cfg(item, sess)) + { + match Cfg::parse(cfg_mi) { + Ok(new_cfg) => cfg &= new_cfg, + Err(e) => { + sess.dcx().span_err(e.span, e.msg); } } } } } } + } - // treat #[target_feature(enable = "feat")] attributes as if they were - // #[doc(cfg(target_feature = "feat"))] attributes as well - for attr in self.lists(sym::target_feature) { - if attr.has_name(sym::enable) { - if attr.value_str().is_some() { - // Clone `enable = "feat"`, change to `target_feature = "feat"`. - // Unwrap is safe because `value_str` succeeded above. - let mut meta = attr.meta_item().unwrap().clone(); - meta.path = ast::Path::from_ident(Ident::with_dummy_span(sym::target_feature)); - - if let Ok(feat_cfg) = Cfg::parse(&ast::MetaItemInner::MetaItem(meta)) { - cfg &= feat_cfg; - } + // treat #[target_feature(enable = "feat")] attributes as if they were + // #[doc(cfg(target_feature = "feat"))] attributes as well + for attr in attrs.lists(sym::target_feature) { + if attr.has_name(sym::enable) { + if attr.value_str().is_some() { + // Clone `enable = "feat"`, change to `target_feature = "feat"`. + // Unwrap is safe because `value_str` succeeded above. + let mut meta = attr.meta_item().unwrap().clone(); + meta.path = ast::Path::from_ident(Ident::with_dummy_span(sym::target_feature)); + + if let Ok(feat_cfg) = Cfg::parse(&ast::MetaItemInner::MetaItem(meta)) { + cfg &= feat_cfg; } } } - - if cfg == Cfg::True { None } else { Some(Arc::new(cfg)) } } + + if cfg == Cfg::True { None } else { Some(Arc::new(cfg)) } } impl AttributesExt for [hir::Attribute] { diff --git a/src/librustdoc/doctest/rust.rs b/src/librustdoc/doctest/rust.rs index 0903baddabebe..5986694a25793 100644 --- a/src/librustdoc/doctest/rust.rs +++ b/src/librustdoc/doctest/rust.rs @@ -13,8 +13,7 @@ use rustc_span::source_map::SourceMap; use rustc_span::{BytePos, DUMMY_SP, FileName, Pos, Span}; use super::{DocTestVisitor, ScrapedDocTest}; -use crate::clean::Attributes; -use crate::clean::types::AttributesExt; +use crate::clean::{Attributes, extract_cfg_from_attrs}; use crate::html::markdown::{self, ErrorCodes, LangString, MdRelLine}; struct RustCollector { @@ -97,7 +96,7 @@ impl HirCollector<'_> { nested: F, ) { let ast_attrs = self.tcx.hir().attrs(self.tcx.local_def_id_to_hir_id(def_id)); - if let Some(ref cfg) = ast_attrs.cfg(self.tcx, &FxHashSet::default()) { + if let Some(ref cfg) = extract_cfg_from_attrs(ast_attrs, self.tcx, &FxHashSet::default()) { if !cfg.matches(&self.tcx.sess.psess, Some(self.tcx.features())) { return; } From 5ccd6c04e5b0e81c8a0fdd8c0e9363be49ef053d Mon Sep 17 00:00:00 2001 From: Noah Lev Date: Sun, 12 Jan 2025 20:19:20 -0800 Subject: [PATCH 039/142] rustdoc: Extract `AttributesExt::lists` trait method as function The two implementations were identical, so there's no need to use a trait method. --- src/librustdoc/clean/mod.rs | 10 ++++---- src/librustdoc/clean/types.rs | 43 +++++++++++++---------------------- src/librustdoc/visit_ast.rs | 6 ++--- 3 files changed, 24 insertions(+), 35 deletions(-) diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index a36927c47ebab..ed0c566d53df8 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -186,8 +186,7 @@ fn generate_item_with_correct_attrs( // For glob re-exports the item may or may not exist to be re-exported (potentially the cfgs // on the path up until the glob can be removed, and only cfgs on the globbed item itself // matter), for non-inlined re-exports see #85043. - let is_inline = inline::load_attrs(cx, import_id.to_def_id()) - .lists(sym::doc) + let is_inline = hir_attr_lists(inline::load_attrs(cx, import_id.to_def_id()), sym::doc) .get_word_attr(sym::inline) .is_some() || (is_glob_import(cx.tcx, import_id) @@ -979,13 +978,14 @@ fn clean_proc_macro<'tcx>( ) -> ItemKind { let attrs = cx.tcx.hir().attrs(item.hir_id()); if kind == MacroKind::Derive - && let Some(derive_name) = attrs.lists(sym::proc_macro_derive).find_map(|mi| mi.ident()) + && let Some(derive_name) = + hir_attr_lists(attrs, sym::proc_macro_derive).find_map(|mi| mi.ident()) { *name = derive_name.name; } let mut helpers = Vec::new(); - for mi in attrs.lists(sym::proc_macro_derive) { + for mi in hir_attr_lists(attrs, sym::proc_macro_derive) { if !mi.has_name(sym::attributes) { continue; } @@ -2985,7 +2985,7 @@ fn clean_use_statement_inner<'tcx>( let visibility = cx.tcx.visibility(import.owner_id); let attrs = cx.tcx.hir().attrs(import.hir_id()); - let inline_attr = attrs.lists(sym::doc).get_word_attr(sym::inline); + let inline_attr = hir_attr_lists(attrs, sym::doc).get_word_attr(sym::inline); let pub_underscore = visibility.is_public() && name == kw::Underscore; let current_mod = cx.tcx.parent_module_from_def_id(import.owner_id.def_id); let import_def_id = import.owner_id.def_id; diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index 704caeb0025cf..70be5742c08c2 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -980,18 +980,24 @@ pub(crate) struct Module { } pub(crate) trait AttributesExt { - type AttributeIterator<'a>: Iterator - where - Self: 'a; type Attributes<'a>: Iterator where Self: 'a; - fn lists(&self, name: Symbol) -> Self::AttributeIterator<'_>; - fn iter(&self) -> Self::Attributes<'_>; } +pub fn hir_attr_lists( + attrs: &A, + name: Symbol, +) -> impl Iterator + use<'_, A> { + attrs + .iter() + .filter(move |attr| attr.has_name(name)) + .filter_map(ast::attr::AttributeExt::meta_item_list) + .flatten() +} + pub fn extract_cfg_from_attrs( attrs: &A, tcx: TyCtxt<'_>, @@ -1066,7 +1072,7 @@ pub fn extract_cfg_from_attrs( // treat #[target_feature(enable = "feat")] attributes as if they were // #[doc(cfg(target_feature = "feat"))] attributes as well - for attr in attrs.lists(sym::target_feature) { + for attr in hir_attr_lists(attrs, sym::target_feature) { if attr.has_name(sym::enable) { if attr.value_str().is_some() { // Clone `enable = "feat"`, change to `target_feature = "feat"`. @@ -1085,38 +1091,19 @@ pub fn extract_cfg_from_attrs( } impl AttributesExt for [hir::Attribute] { - type AttributeIterator<'a> = impl Iterator + 'a; type Attributes<'a> = impl Iterator + 'a; - fn lists(&self, name: Symbol) -> Self::AttributeIterator<'_> { - self.iter() - .filter(move |attr| attr.has_name(name)) - .filter_map(ast::attr::AttributeExt::meta_item_list) - .flatten() - } - fn iter(&self) -> Self::Attributes<'_> { self.iter() } } impl AttributesExt for [(Cow<'_, hir::Attribute>, Option)] { - type AttributeIterator<'a> - = impl Iterator + 'a - where - Self: 'a; type Attributes<'a> = impl Iterator + 'a where Self: 'a; - fn lists(&self, name: Symbol) -> Self::AttributeIterator<'_> { - AttributesExt::iter(self) - .filter(move |attr| attr.has_name(name)) - .filter_map(hir::Attribute::meta_item_list) - .flatten() - } - fn iter(&self) -> Self::Attributes<'_> { self.iter().map(move |(attr, _)| match attr { Cow::Borrowed(attr) => *attr, @@ -1188,7 +1175,7 @@ pub(crate) struct Attributes { impl Attributes { pub(crate) fn lists(&self, name: Symbol) -> impl Iterator + '_ { - self.other_attrs.lists(name) + hir_attr_lists(&self.other_attrs[..], name) } pub(crate) fn has_doc_flag(&self, flag: Symbol) -> bool { @@ -1255,7 +1242,9 @@ impl Attributes { pub(crate) fn get_doc_aliases(&self) -> Box<[Symbol]> { let mut aliases = FxIndexSet::default(); - for attr in self.other_attrs.lists(sym::doc).filter(|a| a.has_name(sym::alias)) { + for attr in + hir_attr_lists(&self.other_attrs[..], sym::doc).filter(|a| a.has_name(sym::alias)) + { if let Some(values) = attr.meta_item_list() { for l in values { if let Some(lit) = l.lit() diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 794ea54b3ef24..d46b0dee36cb2 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -19,7 +19,7 @@ use tracing::debug; use crate::clean::cfg::Cfg; use crate::clean::utils::{inherits_doc_hidden, should_ignore_res}; -use crate::clean::{AttributesExt, NestedAttributesExt, reexport_chain}; +use crate::clean::{NestedAttributesExt, hir_attr_lists, reexport_chain}; use crate::core; /// This module is used to store stuff from Rust's AST in a more convenient @@ -247,8 +247,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { let document_hidden = self.cx.render_options.document_hidden; let use_attrs = tcx.hir().attrs(tcx.local_def_id_to_hir_id(def_id)); // Don't inline `doc(hidden)` imports so they can be stripped at a later stage. - let is_no_inline = use_attrs.lists(sym::doc).has_word(sym::no_inline) - || (document_hidden && use_attrs.lists(sym::doc).has_word(sym::hidden)); + let is_no_inline = hir_attr_lists(use_attrs, sym::doc).has_word(sym::no_inline) + || (document_hidden && hir_attr_lists(use_attrs, sym::doc).has_word(sym::hidden)); if is_no_inline { return false; From f92b32c5f64ea900f2cd6936f648f407daee0d25 Mon Sep 17 00:00:00 2001 From: Noah Lev Date: Sun, 12 Jan 2025 20:31:53 -0800 Subject: [PATCH 040/142] rustdoc: Eliminate `AttributesExt` The new code is more explicit and avoids trait magic that added needless complexity to this part of rustdoc. --- src/librustdoc/clean/inline.rs | 4 +-- src/librustdoc/clean/mod.rs | 10 +++++-- src/librustdoc/clean/types.rs | 51 +++++++--------------------------- src/librustdoc/doctest/rust.rs | 4 ++- 4 files changed, 23 insertions(+), 46 deletions(-) diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index a17e0b1e4ccb1..3d51ab1967d44 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -408,12 +408,12 @@ pub(crate) fn merge_attrs( } else { Attributes::from_hir(&both) }, - extract_cfg_from_attrs(&both[..], cx.tcx, &cx.cache.hidden_cfg), + extract_cfg_from_attrs(both.iter(), cx.tcx, &cx.cache.hidden_cfg), ) } else { ( Attributes::from_hir(old_attrs), - extract_cfg_from_attrs(&old_attrs[..], cx.tcx, &cx.cache.hidden_cfg), + extract_cfg_from_attrs(old_attrs.iter(), cx.tcx, &cx.cache.hidden_cfg), ) } } diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index ed0c566d53df8..ed064768c70c6 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -198,8 +198,14 @@ fn generate_item_with_correct_attrs( // We only keep the item's attributes. target_attrs.iter().map(|attr| (Cow::Borrowed(attr), None)).collect() }; - - let cfg = extract_cfg_from_attrs(&attrs[..], cx.tcx, &cx.cache.hidden_cfg); + let cfg = extract_cfg_from_attrs( + attrs.iter().map(move |(attr, _)| match attr { + Cow::Borrowed(attr) => *attr, + Cow::Owned(attr) => attr, + }), + cx.tcx, + &cx.cache.hidden_cfg, + ); let attrs = Attributes::from_hir_iter(attrs.iter().map(|(attr, did)| (&**attr, *did)), false); let name = renamed.or(Some(name)); diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index 70be5742c08c2..edec6777c2e18 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -1,4 +1,3 @@ -use std::borrow::Cow; use std::hash::Hash; use std::path::PathBuf; use std::sync::{Arc, OnceLock as OnceCell}; @@ -479,7 +478,7 @@ impl Item { name, kind, Attributes::from_hir(hir_attrs), - extract_cfg_from_attrs(hir_attrs, cx.tcx, &cx.cache.hidden_cfg), + extract_cfg_from_attrs(hir_attrs.iter(), cx.tcx, &cx.cache.hidden_cfg), ) } @@ -979,27 +978,19 @@ pub(crate) struct Module { pub(crate) span: Span, } -pub(crate) trait AttributesExt { - type Attributes<'a>: Iterator - where - Self: 'a; - - fn iter(&self) -> Self::Attributes<'_>; -} - -pub fn hir_attr_lists( - attrs: &A, +pub(crate) fn hir_attr_lists<'a, I: IntoIterator>( + attrs: I, name: Symbol, -) -> impl Iterator + use<'_, A> { +) -> impl Iterator + use<'a, I> { attrs - .iter() + .into_iter() .filter(move |attr| attr.has_name(name)) .filter_map(ast::attr::AttributeExt::meta_item_list) .flatten() } -pub fn extract_cfg_from_attrs( - attrs: &A, +pub(crate) fn extract_cfg_from_attrs<'a, I: Iterator + Clone>( + attrs: I, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet, ) -> Option> { @@ -1018,7 +1009,7 @@ pub fn extract_cfg_from_attrs( let mut cfg = if doc_cfg_active || doc_auto_cfg_active { let mut doc_cfg = attrs - .iter() + .clone() .filter(|attr| attr.has_name(sym::doc)) .flat_map(|attr| attr.meta_item_list().unwrap_or_default()) .filter(|attr| attr.has_name(sym::cfg)) @@ -1031,7 +1022,7 @@ pub fn extract_cfg_from_attrs( // If there is no `doc(cfg())`, then we retrieve the `cfg()` attributes (because // `doc(cfg())` overrides `cfg()`). attrs - .iter() + .clone() .filter(|attr| attr.has_name(sym::cfg)) .filter_map(|attr| single(attr.meta_item_list()?)) .filter_map(|attr| Cfg::parse_without(attr.meta_item()?, hidden_cfg).ok().flatten()) @@ -1043,7 +1034,7 @@ pub fn extract_cfg_from_attrs( Cfg::True }; - for attr in attrs.iter() { + for attr in attrs.clone() { // #[doc] if attr.doc_str().is_none() && attr.has_name(sym::doc) { // #[doc(...)] @@ -1090,28 +1081,6 @@ pub fn extract_cfg_from_attrs( if cfg == Cfg::True { None } else { Some(Arc::new(cfg)) } } -impl AttributesExt for [hir::Attribute] { - type Attributes<'a> = impl Iterator + 'a; - - fn iter(&self) -> Self::Attributes<'_> { - self.iter() - } -} - -impl AttributesExt for [(Cow<'_, hir::Attribute>, Option)] { - type Attributes<'a> - = impl Iterator + 'a - where - Self: 'a; - - fn iter(&self) -> Self::Attributes<'_> { - self.iter().map(move |(attr, _)| match attr { - Cow::Borrowed(attr) => *attr, - Cow::Owned(attr) => attr, - }) - } -} - pub(crate) trait NestedAttributesExt { /// Returns `true` if the attribute list contains a specific `word` fn has_word(self, word: Symbol) -> bool diff --git a/src/librustdoc/doctest/rust.rs b/src/librustdoc/doctest/rust.rs index 5986694a25793..bd292efeb7ea2 100644 --- a/src/librustdoc/doctest/rust.rs +++ b/src/librustdoc/doctest/rust.rs @@ -96,7 +96,9 @@ impl HirCollector<'_> { nested: F, ) { let ast_attrs = self.tcx.hir().attrs(self.tcx.local_def_id_to_hir_id(def_id)); - if let Some(ref cfg) = extract_cfg_from_attrs(ast_attrs, self.tcx, &FxHashSet::default()) { + if let Some(ref cfg) = + extract_cfg_from_attrs(ast_attrs.iter(), self.tcx, &FxHashSet::default()) + { if !cfg.matches(&self.tcx.sess.psess, Some(self.tcx.features())) { return; } From 3d547641218cc24fecc3d745550c973d12092877 Mon Sep 17 00:00:00 2001 From: Kajetan Puchalski Date: Tue, 3 Dec 2024 13:46:26 +0000 Subject: [PATCH 041/142] ci: Enable opt-dist for dist-aarch64-linux builds Enable optimised AArch64 dist builds with the opt-dist pipeline. For the time being, disable bolt on aarch64 due to upstream bolt bugs. --- .../dist-aarch64-linux/Dockerfile | 5 +++- src/tools/opt-dist/src/bolt.rs | 13 +++++++-- src/tools/opt-dist/src/main.rs | 28 ++++++++++++++----- 3 files changed, 36 insertions(+), 10 deletions(-) diff --git a/src/ci/docker/host-aarch64/dist-aarch64-linux/Dockerfile b/src/ci/docker/host-aarch64/dist-aarch64-linux/Dockerfile index 8b0fb0710d4dc..6f33c63218182 100644 --- a/src/ci/docker/host-aarch64/dist-aarch64-linux/Dockerfile +++ b/src/ci/docker/host-aarch64/dist-aarch64-linux/Dockerfile @@ -91,9 +91,12 @@ ENV RUST_CONFIGURE_ARGS \ --set rust.debug-assertions=false \ --set rust.jemalloc \ --set rust.use-lld=true \ + --set rust.lto=thin \ --set rust.codegen-units=1 -ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS +ENV SCRIPT python3 ../x.py build --set rust.debug=true opt-dist && \ + ./build/$HOSTS/stage0-tools-bin/opt-dist linux-ci -- python3 ../x.py dist \ + --host $HOSTS --target $HOSTS --include-default-paths build-manifest bootstrap ENV CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=clang ENV LIBCURL_NO_PKG_CONFIG 1 diff --git a/src/tools/opt-dist/src/bolt.rs b/src/tools/opt-dist/src/bolt.rs index d9efbb8d880e7..0f1fda38115c7 100644 --- a/src/tools/opt-dist/src/bolt.rs +++ b/src/tools/opt-dist/src/bolt.rs @@ -1,6 +1,7 @@ use anyhow::Context; use camino::{Utf8Path, Utf8PathBuf}; +use crate::environment::Environment; use crate::exec::cmd; use crate::training::BoltProfile; use crate::utils::io::copy_file; @@ -45,13 +46,21 @@ pub fn with_bolt_instrumented anyhow::Result, R>( } /// Optimizes the file at `path` with BOLT in-place using the given `profile`. -pub fn bolt_optimize(path: &Utf8Path, profile: &BoltProfile) -> anyhow::Result<()> { +pub fn bolt_optimize( + path: &Utf8Path, + profile: &BoltProfile, + env: &Environment, +) -> anyhow::Result<()> { // Copy the artifact to a new location, so that we do not use the same input and output file. // BOLT cannot handle optimizing when the input and output is the same file, because it performs // in-place patching. let temp_path = tempfile::NamedTempFile::new()?.into_temp_path(); copy_file(path, &temp_path)?; + // FIXME: cdsplit in llvm-bolt is currently broken on AArch64, drop this once it's fixed upstream + let split_strategy = + if env.host_tuple().starts_with("aarch64") { "profile2" } else { "cdsplit" }; + cmd(&["llvm-bolt"]) .arg(temp_path.display()) .arg("-data") @@ -65,7 +74,7 @@ pub fn bolt_optimize(path: &Utf8Path, profile: &BoltProfile) -> anyhow::Result<( // Split function code into hot and code regions .arg("-split-functions") // Split using best available strategy (three-way splitting, Cache-Directed Sort) - .arg("-split-strategy=cdsplit") + .arg(format!("-split-strategy={split_strategy}")) // Split as many basic blocks as possible .arg("-split-all-cold") // Move jump tables to a separate section diff --git a/src/tools/opt-dist/src/main.rs b/src/tools/opt-dist/src/main.rs index c871200f3cfcb..aa05b5f0e76ca 100644 --- a/src/tools/opt-dist/src/main.rs +++ b/src/tools/opt-dist/src/main.rs @@ -146,6 +146,21 @@ fn create_environment(args: Args) -> anyhow::Result<(Environment, Vec)> let target_triple = std::env::var("PGO_HOST").expect("PGO_HOST environment variable missing"); + let is_aarch64 = target_triple.starts_with("aarch64"); + + let mut skip_tests = vec![ + // Fails because of linker errors, as of June 2023. + "tests/ui/process/nofile-limit.rs".to_string(), + ]; + + if is_aarch64 { + skip_tests.extend([ + // Those tests fail only inside of Docker on aarch64, as of December 2024 + "tests/ui/consts/promoted_running_out_of_memory_issue-130687.rs".to_string(), + "tests/ui/consts/large_const_alloc.rs".to_string(), + ]); + } + let checkout_dir = Utf8PathBuf::from("/checkout"); let env = EnvironmentBuilder::default() .host_tuple(target_triple) @@ -155,11 +170,9 @@ fn create_environment(args: Args) -> anyhow::Result<(Environment, Vec)> .artifact_dir(Utf8PathBuf::from("/tmp/tmp-multistage/opt-artifacts")) .build_dir(checkout_dir.join("obj")) .shared_llvm(true) - .use_bolt(true) - .skipped_tests(vec![ - // Fails because of linker errors, as of June 2023. - "tests/ui/process/nofile-limit.rs".to_string(), - ]) + // FIXME: Enable bolt for aarch64 once it's fixed upstream. Broken as of December 2024. + .use_bolt(!is_aarch64) + .skipped_tests(skip_tests) .build()?; (env, shared.build_args) @@ -304,7 +317,8 @@ fn execute_pipeline( // the final dist build. However, when BOLT optimizes an artifact, it does so *in-place*, // therefore it will actually optimize all the hard links, which means that the final // packaged `libLLVM.so` file *will* be BOLT optimized. - bolt_optimize(&llvm_lib, &llvm_profile).context("Could not optimize LLVM with BOLT")?; + bolt_optimize(&llvm_lib, &llvm_profile, env) + .context("Could not optimize LLVM with BOLT")?; let rustc_lib = io::find_file_in_dir(&libdir, "librustc_driver", ".so")?; @@ -319,7 +333,7 @@ fn execute_pipeline( print_free_disk_space()?; // Now optimize the library with BOLT. - bolt_optimize(&rustc_lib, &rustc_profile) + bolt_optimize(&rustc_lib, &rustc_profile, env) .context("Could not optimize rustc with BOLT")?; // LLVM is not being cleared here, we want to use the BOLT-optimized LLVM From ebd5ce1828dd2dfdef35786e1374c7ca30f1b0c9 Mon Sep 17 00:00:00 2001 From: binarycat Date: Thu, 9 Jan 2025 14:16:05 -0600 Subject: [PATCH 042/142] for purely return-type based searches, deprioritize clone-like functions A clone-like function in a function that takes as an argument the type that it returns. However, functions that return a type variable are not counted as clone-line. Because we're not unifying the whole function at once, a function like `U -> T` would otherwise be counted as "clone-like" because the generics will just unify with anything when done seperatly. Co-authored-by: Michael Howell --- src/librustdoc/html/static/js/search.js | 46 +++++++++++++++++++++++ tests/rustdoc-js-std/return-based-sort.js | 30 +++++++++++++++ 2 files changed, 76 insertions(+) create mode 100644 tests/rustdoc-js-std/return-based-sort.js diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js index 5fd5eb1447843..0a0550ab82f9b 100644 --- a/src/librustdoc/html/static/js/search.js +++ b/src/librustdoc/html/static/js/search.js @@ -2717,9 +2717,26 @@ class DocSearch { const normalizedUserQuery = parsedQuery.userQuery.toLowerCase(); const isMixedCase = normalizedUserQuery !== userQuery; const result_list = []; + const isReturnTypeQuery = parsedQuery.elems.length === 0 || + typeInfo === "returned"; for (const result of results.values()) { result.item = this.searchIndex[result.id]; result.word = this.searchIndex[result.id].word; + if (isReturnTypeQuery) { + // we are doing a return-type based search, + // deprioritize "clone-like" results, + // ie. functions that also take the queried type as an argument. + const hasType = result.item && result.item.type; + if (!hasType) { + continue; + } + const inputs = result.item.type.inputs; + const where_clause = result.item.type.where_clause; + if (containsTypeFromQuery(inputs, where_clause)) { + result.path_dist *= 100; + result.dist *= 100; + } + } result_list.push(result); } @@ -3540,6 +3557,35 @@ class DocSearch { return false; } + /** + * This function checks if the given list contains any + * (non-generic) types mentioned in the query. + * + * @param {Array} list - A list of function types. + * @param {[FunctionType]} where_clause - Trait bounds for generic items. + */ + function containsTypeFromQuery(list, where_clause) { + if (!list) return false; + for (const ty of parsedQuery.returned) { + // negative type ids are generics + if (ty.id < 0) { + continue; + } + if (checkIfInList(list, ty, where_clause, null, 0)) { + return true; + } + } + for (const ty of parsedQuery.elems) { + if (ty.id < 0) { + continue; + } + if (checkIfInList(list, ty, where_clause, null, 0)) { + return true; + } + } + return false; + } + /** * This function checks if the object (`row`) matches the given type (`elem`) and its * generics (if any). diff --git a/tests/rustdoc-js-std/return-based-sort.js b/tests/rustdoc-js-std/return-based-sort.js new file mode 100644 index 0000000000000..30baf1cd36e58 --- /dev/null +++ b/tests/rustdoc-js-std/return-based-sort.js @@ -0,0 +1,30 @@ +// test that `clone`-like functions are sorted lower when +// a search is based soley on return type + +const FILTER_CRATE = "core"; + +const EXPECTED = [ + { + 'query': '-> AllocError', + 'others': [ + { 'path': 'core::alloc::Allocator', 'name': 'allocate' }, + { 'path': 'core::alloc::AllocError', 'name': 'clone' }, + ], + }, + { + 'query': 'AllocError', + 'returned': [ + { 'path': 'core::alloc::Allocator', 'name': 'allocate' }, + { 'path': 'core::alloc::AllocError', 'name': 'clone' }, + ], + }, + { + 'query': '-> &str', + 'others': [ + // type_name_of_val should not be consider clone-like + { 'path': 'core::any', 'name': 'type_name_of_val' }, + // this returns `Option<&str>`, and thus should be sorted lower + { 'path': 'core::str::Split', 'name': 'next' }, + ], + }, +] From 377dbc96a6d5b7d22b75d4bd98fd00d9e1bb194e Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Tue, 14 Jan 2025 01:50:53 +0000 Subject: [PATCH 043/142] Leak check in impossible_predicates to avoid monomorphizing impossible instances --- .../rustc_trait_selection/src/traits/mod.rs | 15 +++++-- .../impossible-method-modulo-binders-2.rs | 39 ++++++++++++++++++ .../impossible-method-modulo-binders.rs | 40 +++++++++++++++++++ 3 files changed, 91 insertions(+), 3 deletions(-) create mode 100644 tests/ui/traits/trait-upcasting/impossible-method-modulo-binders-2.rs create mode 100644 tests/ui/traits/trait-upcasting/impossible-method-modulo-binders.rs diff --git a/compiler/rustc_trait_selection/src/traits/mod.rs b/compiler/rustc_trait_selection/src/traits/mod.rs index 1dcd0d0dfb830..da16a74209965 100644 --- a/compiler/rustc_trait_selection/src/traits/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/mod.rs @@ -714,9 +714,18 @@ pub fn impossible_predicates<'tcx>(tcx: TyCtxt<'tcx>, predicates: Vec( diff --git a/tests/ui/traits/trait-upcasting/impossible-method-modulo-binders-2.rs b/tests/ui/traits/trait-upcasting/impossible-method-modulo-binders-2.rs new file mode 100644 index 0000000000000..bc43360553194 --- /dev/null +++ b/tests/ui/traits/trait-upcasting/impossible-method-modulo-binders-2.rs @@ -0,0 +1,39 @@ +//@ build-pass +//@ revisions: current next +//@ ignore-compare-mode-next-solver (explicit revisions) +//@[next] compile-flags: -Znext-solver + +#![allow(coherence_leak_check)] + +type A = fn(&'static ()); +type B = fn(&()); + +trait Bound: From> { +} +impl Bound for String {} + +trait Trt { + fn __(&self, x: T) where T: Bound { + T::from(()); + } +} + +impl Trt for S {} + +type GetAssoc = ::Ty; + +trait WithAssoc { + type Ty; +} + +impl WithAssoc for B { + type Ty = String; +} + +impl WithAssoc for A { + type Ty = (); +} + +fn main() { + let x: &'static dyn Trt = &(); +} diff --git a/tests/ui/traits/trait-upcasting/impossible-method-modulo-binders.rs b/tests/ui/traits/trait-upcasting/impossible-method-modulo-binders.rs new file mode 100644 index 0000000000000..63ad1c0a06082 --- /dev/null +++ b/tests/ui/traits/trait-upcasting/impossible-method-modulo-binders.rs @@ -0,0 +1,40 @@ +//@ build-pass +//@ revisions: current next +//@ ignore-compare-mode-next-solver (explicit revisions) +//@[next] compile-flags: -Znext-solver + +trait Foo {} +impl Foo for fn(&'static ()) {} + +trait Bar { + type Assoc: Default; +} +impl Bar for T { + type Assoc = usize; +} +impl Bar for fn(&()) { + type Assoc = (); +} + +fn needs_foo() -> usize { + needs_bar::() +} + +fn needs_bar() -> ::Assoc { + Default::default() +} + +trait Evil { + fn bad(&self) + where + T: Foo, + { + needs_foo::(); + } +} + +impl Evil for () {} + +fn main() { + let x: &dyn Evil = &(); +} From 2c4aee92fa65e74f23ad7853937db8d2c4bfa6c8 Mon Sep 17 00:00:00 2001 From: Aditya Kumar Date: Sun, 29 Dec 2024 07:01:54 +0000 Subject: [PATCH 044/142] Made `Path::name` only have item name rather than full name --- src/librustdoc/json/conversions.rs | 2 +- src/rustdoc-json-types/lib.rs | 2 +- tests/rustdoc-json/return_private.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs index 7f072aa7e2fc4..093f8eff8398f 100644 --- a/src/librustdoc/json/conversions.rs +++ b/src/librustdoc/json/conversions.rs @@ -620,7 +620,7 @@ impl FromClean for Type { impl FromClean for Path { fn from_clean(path: clean::Path, renderer: &JsonRenderer<'_>) -> Path { Path { - name: path.whole_name(), + name: path.last_opt().map_or(String::from(""), |s| String::from(s.as_str())), id: renderer.id_from_item_default(path.def_id().into()), args: path.segments.last().map(|args| Box::new(args.clone().args.into_json(renderer))), } diff --git a/src/rustdoc-json-types/lib.rs b/src/rustdoc-json-types/lib.rs index 84b33e3d86068..5a99977ded5eb 100644 --- a/src/rustdoc-json-types/lib.rs +++ b/src/rustdoc-json-types/lib.rs @@ -30,7 +30,7 @@ pub type FxHashMap = HashMap; // re-export for use in src/librustdoc /// This integer is incremented with every breaking change to the API, /// and is returned along with the JSON blob as [`Crate::format_version`]. /// Consuming code should assert that this value matches the format version(s) that it supports. -pub const FORMAT_VERSION: u32 = 37; +pub const FORMAT_VERSION: u32 = 38; /// The root of the emitted JSON blob. /// diff --git a/tests/rustdoc-json/return_private.rs b/tests/rustdoc-json/return_private.rs index 0b341e2bda7d2..c238a536e0d8d 100644 --- a/tests/rustdoc-json/return_private.rs +++ b/tests/rustdoc-json/return_private.rs @@ -6,7 +6,7 @@ mod secret { } //@ has "$.index[*][?(@.name=='get_secret')].inner.function" -//@ is "$.index[*][?(@.name=='get_secret')].inner.function.sig.output.resolved_path.name" \"secret::Secret\" +//@ is "$.index[*][?(@.name=='get_secret')].inner.function.sig.output.resolved_path.name" \"Secret\" pub fn get_secret() -> secret::Secret { secret::Secret } From 94ffced66747a94554e0e0fa420eb21abdced095 Mon Sep 17 00:00:00 2001 From: lcnr Date: Tue, 14 Jan 2025 10:06:22 +0100 Subject: [PATCH 045/142] add note to test --- .../trait-upcasting/impossible-method-modulo-binders-2.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ui/traits/trait-upcasting/impossible-method-modulo-binders-2.rs b/tests/ui/traits/trait-upcasting/impossible-method-modulo-binders-2.rs index bc43360553194..c03b5145aa73b 100644 --- a/tests/ui/traits/trait-upcasting/impossible-method-modulo-binders-2.rs +++ b/tests/ui/traits/trait-upcasting/impossible-method-modulo-binders-2.rs @@ -2,7 +2,7 @@ //@ revisions: current next //@ ignore-compare-mode-next-solver (explicit revisions) //@[next] compile-flags: -Znext-solver - +// Regression test for #135462. #![allow(coherence_leak_check)] type A = fn(&'static ()); From 3a66b97b9bd4992c4743a8e4d2be55df127b3a25 Mon Sep 17 00:00:00 2001 From: Kleis Auke Wolthuizen Date: Tue, 14 Jan 2025 10:49:32 +0100 Subject: [PATCH 046/142] Remove remnant of asmjs See: https://github.com/rust-lang/rust/issues/131467#issuecomment-2529314603. --- src/ci/docker/scripts/emscripten.sh | 24 ------------------------ 1 file changed, 24 deletions(-) delete mode 100644 src/ci/docker/scripts/emscripten.sh diff --git a/src/ci/docker/scripts/emscripten.sh b/src/ci/docker/scripts/emscripten.sh deleted file mode 100644 index 8b2b39ee1629e..0000000000000 --- a/src/ci/docker/scripts/emscripten.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -set -ex - -hide_output() { - set +x - on_err=" -echo ERROR: An error was encountered with the build. -cat /tmp/build.log -exit 1 -" - trap "$on_err" ERR - bash -c "while true; do sleep 30; echo \$(date) - building ...; done" & - PING_LOOP_PID=$! - "$@" &> /tmp/build.log - trap - ERR - kill $PING_LOOP_PID - rm -f /tmp/build.log - set -x -} - -git clone https://github.com/emscripten-core/emsdk.git /emsdk-portable -cd /emsdk-portable -hide_output ./emsdk install 3.1.68 -./emsdk activate 3.1.68 From a907c56a77b6e9328441373c39517d38ef045ecf Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Fri, 13 Dec 2024 12:19:46 +0000 Subject: [PATCH 047/142] Add hir::HeaderSafety to make follow up commits simpler --- compiler/rustc_ast_lowering/src/delegation.rs | 4 +-- compiler/rustc_ast_lowering/src/item.rs | 6 ++++- compiler/rustc_hir/src/hir.rs | 25 +++++++++++++++++-- compiler/rustc_hir_analysis/src/collect.rs | 17 ++++++++----- compiler/rustc_hir_pretty/src/lib.rs | 8 ++++-- compiler/rustc_hir_typeck/src/coercion.rs | 7 +++--- compiler/rustc_hir_typeck/src/lib.rs | 2 +- .../src/middle/codegen_fn_attrs.rs | 3 +++ compiler/rustc_middle/src/ty/mod.rs | 1 + .../rustc_mir_build/src/check_unsafety.rs | 23 +++++++++++++---- compiler/rustc_resolve/src/late.rs | 7 +++--- src/librustdoc/clean/mod.rs | 2 +- src/librustdoc/clean/types.rs | 6 ++--- src/librustdoc/html/format.rs | 8 ++++++ src/librustdoc/html/render/print_item.rs | 2 +- src/tools/clippy/clippy_lints/src/derive.rs | 2 +- .../clippy_lints/src/doc/missing_headers.rs | 2 +- .../src/functions/misnamed_getters.rs | 2 +- .../src/functions/not_unsafe_ptr_arg_deref.rs | 6 ++--- .../clippy_lints/src/inherent_to_string.rs | 2 +- .../clippy/clippy_lints/src/methods/mod.rs | 2 +- .../clippy_lints/src/new_without_default.rs | 2 +- src/tools/clippy/clippy_lints/src/ptr.rs | 2 +- 23 files changed, 101 insertions(+), 40 deletions(-) diff --git a/compiler/rustc_ast_lowering/src/delegation.rs b/compiler/rustc_ast_lowering/src/delegation.rs index 758f1dc1c3558..4bd0c0b413fc7 100644 --- a/compiler/rustc_ast_lowering/src/delegation.rs +++ b/compiler/rustc_ast_lowering/src/delegation.rs @@ -198,7 +198,7 @@ impl<'hir> LoweringContext<'_, 'hir> { Asyncness::No => hir::IsAsync::NotAsync, }; hir::FnHeader { - safety: sig.safety, + safety: sig.safety.into(), constness: self.tcx.constness(sig_id), asyncness, abi: sig.abi, @@ -384,7 +384,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn generate_header_error(&self) -> hir::FnHeader { hir::FnHeader { - safety: hir::Safety::Safe, + safety: hir::Safety::Safe.into(), constness: hir::Constness::NotConst, asyncness: hir::IsAsync::NotAsync, abi: abi::Abi::Rust, diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs index 6fce911693863..9aaa0dfb69c74 100644 --- a/compiler/rustc_ast_lowering/src/item.rs +++ b/compiler/rustc_ast_lowering/src/item.rs @@ -1358,8 +1358,12 @@ impl<'hir> LoweringContext<'_, 'hir> { } else { hir::IsAsync::NotAsync }; + + let safety = self.lower_safety(h.safety, default_safety); + let safety = safety.into(); + hir::FnHeader { - safety: self.lower_safety(h.safety, default_safety), + safety, asyncness, constness: self.lower_constness(h.constness), abi: self.lower_extern(h.ext), diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index dd96b30fefc36..f115dfaaa1eb7 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -3762,9 +3762,20 @@ impl fmt::Display for Constness { } } +#[derive(Copy, Clone, Debug, HashStable_Generic, PartialEq, Eq)] +pub enum HeaderSafety { + Normal(Safety), +} + +impl From for HeaderSafety { + fn from(v: Safety) -> Self { + Self::Normal(v) + } +} + #[derive(Copy, Clone, Debug, HashStable_Generic)] pub struct FnHeader { - pub safety: Safety, + pub safety: HeaderSafety, pub constness: Constness, pub asyncness: IsAsync, pub abi: ExternAbi, @@ -3780,7 +3791,17 @@ impl FnHeader { } pub fn is_unsafe(&self) -> bool { - self.safety.is_unsafe() + self.safety().is_unsafe() + } + + pub fn is_safe(&self) -> bool { + self.safety().is_safe() + } + + pub fn safety(&self) -> Safety { + match self.safety { + HeaderSafety::Normal(safety) => safety, + } } } diff --git a/compiler/rustc_hir_analysis/src/collect.rs b/compiler/rustc_hir_analysis/src/collect.rs index d41b03640b696..86c6532c97ddb 100644 --- a/compiler/rustc_hir_analysis/src/collect.rs +++ b/compiler/rustc_hir_analysis/src/collect.rs @@ -1336,7 +1336,7 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<'_, ty::PolyFn { icx.lowerer().lower_fn_ty( hir_id, - sig.header.safety, + sig.header.safety(), sig.header.abi, sig.decl, Some(generics), @@ -1351,13 +1351,18 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<'_, ty::PolyFn kind: TraitItemKind::Fn(FnSig { header, decl, span: _ }, _), generics, .. - }) => { - icx.lowerer().lower_fn_ty(hir_id, header.safety, header.abi, decl, Some(generics), None) - } + }) => icx.lowerer().lower_fn_ty( + hir_id, + header.safety(), + header.abi, + decl, + Some(generics), + None, + ), ForeignItem(&hir::ForeignItem { kind: ForeignItemKind::Fn(sig, _, _), .. }) => { let abi = tcx.hir().get_foreign_abi(hir_id); - compute_sig_of_foreign_fn_decl(tcx, def_id, sig.decl, abi, sig.header.safety) + compute_sig_of_foreign_fn_decl(tcx, def_id, sig.decl, abi, sig.header.safety()) } Ctor(data) | Variant(hir::Variant { data, .. }) if data.ctor().is_some() => { @@ -1405,7 +1410,7 @@ fn lower_fn_sig_recovering_infer_ret_ty<'tcx>( icx.lowerer().lower_fn_ty( icx.tcx().local_def_id_to_hir_id(def_id), - sig.header.safety, + sig.header.safety(), sig.header.abi, sig.decl, Some(generics), diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index 39b6823cf0e0c..1afc4bedee093 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -2407,7 +2407,7 @@ impl<'a> State<'a> { self.print_fn( decl, hir::FnHeader { - safety, + safety: safety.into(), abi, constness: hir::Constness::NotConst, asyncness: hir::IsAsync::NotAsync, @@ -2423,12 +2423,16 @@ impl<'a> State<'a> { fn print_fn_header_info(&mut self, header: hir::FnHeader) { self.print_constness(header.constness); + let safety = match header.safety { + hir::HeaderSafety::Normal(safety) => safety, + }; + match header.asyncness { hir::IsAsync::NotAsync => {} hir::IsAsync::Async(_) => self.word_nbsp("async"), } - self.print_safety(header.safety); + self.print_safety(safety); if header.abi != ExternAbi::Rust { self.word_nbsp("extern"); diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs index 9ebc7a4657e84..11a0b3b917eaa 100644 --- a/compiler/rustc_hir_typeck/src/coercion.rs +++ b/compiler/rustc_hir_typeck/src/coercion.rs @@ -932,10 +932,11 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { return Err(TypeError::ForceInlineCast); } - // Safe `#[target_feature]` functions are not assignable to safe fn pointers - // (RFC 2396). + // Safe `#[target_feature]` functions are not assignable to safe fn pointers (RFC 2396), + // report a better error than a safety mismatch. + // FIXME(target_feature): do this inside `coerce_from_safe_fn`. if b_hdr.safety.is_safe() - && !self.tcx.codegen_fn_attrs(def_id).target_features.is_empty() + && self.tcx.codegen_fn_attrs(def_id).safe_target_features { return Err(TypeError::TargetFeatureCast(def_id)); } diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs index a406ec9a8fbb4..cb21961f36b2d 100644 --- a/compiler/rustc_hir_typeck/src/lib.rs +++ b/compiler/rustc_hir_typeck/src/lib.rs @@ -139,7 +139,7 @@ fn typeck_with_fallback<'tcx>( // type that has an infer in it, lower the type directly so that it'll // be correctly filled with infer. We'll use this inference to provide // a suggestion later on. - fcx.lowerer().lower_fn_ty(id, header.safety, header.abi, decl, None, None) + fcx.lowerer().lower_fn_ty(id, header.safety(), header.abi, decl, None, None) } else { tcx.fn_sig(def_id).instantiate_identity() }; diff --git a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs index 16d868300db3b..e05f42af6fd24 100644 --- a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs +++ b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs @@ -30,6 +30,8 @@ pub struct CodegenFnAttrs { /// features (only enabled features are supported right now). /// Implied target features have already been applied. pub target_features: Vec, + /// Whether the function was declared safe, but has target features + pub safe_target_features: bool, /// The `#[linkage = "..."]` attribute on Rust-defined items and the value we found. pub linkage: Option, /// The `#[linkage = "..."]` attribute on foreign items and the value we found. @@ -150,6 +152,7 @@ impl CodegenFnAttrs { link_name: None, link_ordinal: None, target_features: vec![], + safe_target_features: false, linkage: None, import_linkage: None, link_section: None, diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index bf0ccdc0f1013..d2875fb37944a 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -222,6 +222,7 @@ pub struct DelegationFnSig { pub param_count: usize, pub has_self: bool, pub c_variadic: bool, + pub target_feature: bool, } #[derive(Clone, Copy, Debug)] diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index f7071eb139f35..0ecc8d84f2af7 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -478,19 +478,27 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { return; // don't visit the whole expression } ExprKind::Call { fun, ty: _, args: _, from_hir_call: _, fn_span: _ } => { - if self.thir[fun].ty.fn_sig(self.tcx).safety().is_unsafe() { - let func_id = if let ty::FnDef(func_id, _) = self.thir[fun].ty.kind() { + let fn_ty = self.thir[fun].ty; + let sig = fn_ty.fn_sig(self.tcx); + let (callee_features, safe_target_features): (&[_], _) = match fn_ty.kind() { + ty::FnDef(func_id, ..) => { + let cg_attrs = self.tcx.codegen_fn_attrs(func_id); + (&cg_attrs.target_features, cg_attrs.safe_target_features) + } + _ => (&[], false), + }; + if sig.safety().is_unsafe() && !safe_target_features { + let func_id = if let ty::FnDef(func_id, _) = fn_ty.kind() { Some(*func_id) } else { None }; self.requires_unsafe(expr.span, CallToUnsafeFunction(func_id)); - } else if let &ty::FnDef(func_did, _) = self.thir[fun].ty.kind() { + } else if let &ty::FnDef(func_did, _) = fn_ty.kind() { // If the called function has target features the calling function hasn't, // the call requires `unsafe`. Don't check this on wasm // targets, though. For more information on wasm see the // is_like_wasm check in hir_analysis/src/collect.rs - let callee_features = &self.tcx.codegen_fn_attrs(func_did).target_features; if !self.tcx.sess.target.options.is_like_wasm && !callee_features.iter().all(|feature| { self.body_target_features.iter().any(|f| f.name == feature.name) @@ -1111,7 +1119,12 @@ pub(crate) fn check_unsafety(tcx: TyCtxt<'_>, def: LocalDefId) { let hir_id = tcx.local_def_id_to_hir_id(def); let safety_context = tcx.hir().fn_sig_by_hir_id(hir_id).map_or(SafetyContext::Safe, |fn_sig| { - if fn_sig.header.safety.is_unsafe() { SafetyContext::UnsafeFn } else { SafetyContext::Safe } + match fn_sig.header.safety { + hir::HeaderSafety::Normal(safety) => match safety { + hir::Safety::Unsafe => SafetyContext::UnsafeFn, + hir::Safety::Safe => SafetyContext::Safe, + }, + } }); let body_target_features = &tcx.body_codegen_attrs(def.to_def_id()).target_features; let mut warnings = Vec::new(); diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index 7324d3fe7862d..c4aeaf478bd10 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -5019,12 +5019,13 @@ struct ItemInfoCollector<'a, 'ra, 'tcx> { } impl ItemInfoCollector<'_, '_, '_> { - fn collect_fn_info(&mut self, sig: &FnSig, id: NodeId) { + fn collect_fn_info(&mut self, sig: &FnSig, id: NodeId, attrs: &[Attribute]) { let sig = DelegationFnSig { header: sig.header, param_count: sig.decl.inputs.len(), has_self: sig.decl.has_self(), c_variadic: sig.decl.c_variadic(), + target_feature: attrs.iter().any(|attr| attr.has_name(sym::target_feature)), }; self.r.delegation_fn_sigs.insert(self.r.local_def_id(id), sig); } @@ -5043,7 +5044,7 @@ impl<'ast> Visitor<'ast> for ItemInfoCollector<'_, '_, '_> { | ItemKind::Trait(box Trait { ref generics, .. }) | ItemKind::TraitAlias(ref generics, _) => { if let ItemKind::Fn(box Fn { ref sig, .. }) = &item.kind { - self.collect_fn_info(sig, item.id); + self.collect_fn_info(sig, item.id, &item.attrs); } let def_id = self.r.local_def_id(item.id); @@ -5076,7 +5077,7 @@ impl<'ast> Visitor<'ast> for ItemInfoCollector<'_, '_, '_> { fn visit_assoc_item(&mut self, item: &'ast AssocItem, ctxt: AssocCtxt) { if let AssocItemKind::Fn(box Fn { ref sig, .. }) = &item.kind { - self.collect_fn_info(sig, item.id); + self.collect_fn_info(sig, item.id, &item.attrs); } visit::walk_assoc_item(self, item, ctxt); } diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 2ed2df799dd1c..f619b2c66ac9e 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -3094,7 +3094,7 @@ fn clean_maybe_renamed_foreign_item<'tcx>( let kind = match item.kind { hir::ForeignItemKind::Fn(sig, names, generics) => ForeignFunctionItem( clean_function(cx, &sig, generics, FunctionArgs::Names(names)), - sig.header.safety, + sig.header.safety(), ), hir::ForeignItemKind::Static(ty, mutability, safety) => ForeignStaticItem( Static { type_: Box::new(clean_ty(ty, cx)), mutability, expr: None }, diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index dcee96978d2b5..f6462191734aa 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -668,7 +668,7 @@ impl Item { ty::Asyncness::Yes => hir::IsAsync::Async(DUMMY_SP), ty::Asyncness::No => hir::IsAsync::NotAsync, }; - hir::FnHeader { safety: sig.safety(), abi: sig.abi(), constness, asyncness } + hir::FnHeader { safety: sig.safety().into(), abi: sig.abi(), constness, asyncness } } let header = match self.kind { ItemKind::ForeignFunctionItem(_, safety) => { @@ -676,9 +676,9 @@ impl Item { let abi = tcx.fn_sig(def_id).skip_binder().abi(); hir::FnHeader { safety: if abi == ExternAbi::RustIntrinsic { - intrinsic_operation_unsafety(tcx, def_id.expect_local()) + intrinsic_operation_unsafety(tcx, def_id.expect_local()).into() } else { - safety + safety.into() }, abi, constness: if tcx.is_const_fn(def_id) { diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 621abd535010f..17d6abfbd0413 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -1637,6 +1637,14 @@ impl PrintWithSpace for hir::Safety { } } +impl PrintWithSpace for hir::HeaderSafety { + fn print_with_space(&self) -> &str { + match self { + hir::HeaderSafety::Normal(safety) => safety.print_with_space(), + } + } +} + impl PrintWithSpace for hir::IsAsync { fn print_with_space(&self) -> &str { match self { diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs index e8230e63c0f60..1376bdb2e90f4 100644 --- a/src/librustdoc/html/render/print_item.rs +++ b/src/librustdoc/html/render/print_item.rs @@ -469,7 +469,7 @@ fn item_module(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, items: &[cl let unsafety_flag = match myitem.kind { clean::FunctionItem(_) | clean::ForeignFunctionItem(..) - if myitem.fn_header(tcx).unwrap().safety.is_unsafe() => + if myitem.fn_header(tcx).unwrap().is_unsafe() => { "" } diff --git a/src/tools/clippy/clippy_lints/src/derive.rs b/src/tools/clippy/clippy_lints/src/derive.rs index 7c2f5efd8dd44..91ddbb44ff898 100644 --- a/src/tools/clippy/clippy_lints/src/derive.rs +++ b/src/tools/clippy/clippy_lints/src/derive.rs @@ -419,7 +419,7 @@ impl<'tcx> Visitor<'tcx> for UnsafeVisitor<'_, 'tcx> { id: LocalDefId, ) -> Self::Result { if let Some(header) = kind.header() - && header.safety.is_unsafe() + && header.is_unsafe() { ControlFlow::Break(()) } else { diff --git a/src/tools/clippy/clippy_lints/src/doc/missing_headers.rs b/src/tools/clippy/clippy_lints/src/doc/missing_headers.rs index 3e2b7055de4d0..8e2af6bf14a62 100644 --- a/src/tools/clippy/clippy_lints/src/doc/missing_headers.rs +++ b/src/tools/clippy/clippy_lints/src/doc/missing_headers.rs @@ -32,7 +32,7 @@ pub fn check( } let span = cx.tcx.def_span(owner_id); - match (headers.safety, sig.header.safety) { + match (headers.safety, sig.header.safety()) { (false, Safety::Unsafe) => span_lint( cx, MISSING_SAFETY_DOC, diff --git a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs index 017571c38db6e..854fe144c2919 100644 --- a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs +++ b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs @@ -34,7 +34,7 @@ pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body: ImplicitSelfKind::None => return, }; - let name = if sig.header.safety.is_unsafe() { + let name = if sig.header.is_unsafe() { name.strip_suffix("_unchecked").unwrap_or(name) } else { name diff --git a/src/tools/clippy/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs b/src/tools/clippy/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs index 3ded8dc301271..8a74951ef63e7 100644 --- a/src/tools/clippy/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs +++ b/src/tools/clippy/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs @@ -20,8 +20,8 @@ pub(super) fn check_fn<'tcx>( def_id: LocalDefId, ) { let safety = match kind { - intravisit::FnKind::ItemFn(_, _, hir::FnHeader { safety, .. }) => safety, - intravisit::FnKind::Method(_, sig) => sig.header.safety, + intravisit::FnKind::ItemFn(_, _, header) => header.safety(), + intravisit::FnKind::Method(_, sig) => sig.header.safety(), intravisit::FnKind::Closure => return, }; @@ -31,7 +31,7 @@ pub(super) fn check_fn<'tcx>( pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) { if let hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Provided(eid)) = item.kind { let body = cx.tcx.hir().body(eid); - check_raw_ptr(cx, sig.header.safety, sig.decl, body, item.owner_id.def_id); + check_raw_ptr(cx, sig.header.safety(), sig.decl, body, item.owner_id.def_id); } } diff --git a/src/tools/clippy/clippy_lints/src/inherent_to_string.rs b/src/tools/clippy/clippy_lints/src/inherent_to_string.rs index e096dd2517593..415b47adac5a1 100644 --- a/src/tools/clippy/clippy_lints/src/inherent_to_string.rs +++ b/src/tools/clippy/clippy_lints/src/inherent_to_string.rs @@ -95,7 +95,7 @@ impl<'tcx> LateLintPass<'tcx> for InherentToString { if let ImplItemKind::Fn(ref signature, _) = impl_item.kind // #11201 && let header = signature.header - && header.safety.is_safe() + && header.is_safe() && header.abi == Abi::Rust && impl_item.ident.name == sym::to_string && let decl = signature.decl diff --git a/src/tools/clippy/clippy_lints/src/methods/mod.rs b/src/tools/clippy/clippy_lints/src/methods/mod.rs index 51351f6b7cd1e..3965c4d40878d 100644 --- a/src/tools/clippy/clippy_lints/src/methods/mod.rs +++ b/src/tools/clippy/clippy_lints/src/methods/mod.rs @@ -5309,7 +5309,7 @@ fn lint_binary_expr_with_method_call(cx: &LateContext<'_>, info: &mut BinaryExpr } const FN_HEADER: hir::FnHeader = hir::FnHeader { - safety: hir::Safety::Safe, + safety: hir::HeaderSafety::Normal(hir::Safety::Safe), constness: hir::Constness::NotConst, asyncness: hir::IsAsync::NotAsync, abi: rustc_target::spec::abi::Abi::Rust, diff --git a/src/tools/clippy/clippy_lints/src/new_without_default.rs b/src/tools/clippy/clippy_lints/src/new_without_default.rs index abdce69e76458..688374b5676ce 100644 --- a/src/tools/clippy/clippy_lints/src/new_without_default.rs +++ b/src/tools/clippy/clippy_lints/src/new_without_default.rs @@ -75,7 +75,7 @@ impl<'tcx> LateLintPass<'tcx> for NewWithoutDefault { if let hir::ImplItemKind::Fn(ref sig, _) = impl_item.kind { let name = impl_item.ident.name; let id = impl_item.owner_id; - if sig.header.safety.is_unsafe() { + if sig.header.is_unsafe() { // can't be implemented for unsafe new return; } diff --git a/src/tools/clippy/clippy_lints/src/ptr.rs b/src/tools/clippy/clippy_lints/src/ptr.rs index a86926d8416c6..506adf0f2cc5e 100644 --- a/src/tools/clippy/clippy_lints/src/ptr.rs +++ b/src/tools/clippy/clippy_lints/src/ptr.rs @@ -541,7 +541,7 @@ fn check_mut_from_ref<'tcx>(cx: &LateContext<'tcx>, sig: &FnSig<'_>, body: Optio .collect(); if let Some(args) = args && !args.is_empty() - && body.is_none_or(|body| sig.header.safety.is_unsafe() || contains_unsafe_block(cx, body.value)) + && body.is_none_or(|body| sig.header.is_unsafe() || contains_unsafe_block(cx, body.value)) { span_lint_and_then( cx, From 0e5ee891b2b07321175e398153bfa86c667f3494 Mon Sep 17 00:00:00 2001 From: joboet Date: Mon, 25 Nov 2024 09:50:24 +0100 Subject: [PATCH 048/142] Revert "Remove the Arc rt::init allocation for thread info" This reverts commit 0747f2898e83df7e601189c0f31762e84328becb. --- library/std/src/rt.rs | 2 +- library/std/src/thread/mod.rs | 170 ++++++++------------------ tests/debuginfo/thread.rs | 8 +- tests/rustdoc/demo-allocator-54478.rs | 1 - 4 files changed, 58 insertions(+), 123 deletions(-) diff --git a/library/std/src/rt.rs b/library/std/src/rt.rs index 24a362072ab61..56952aac6c63d 100644 --- a/library/std/src/rt.rs +++ b/library/std/src/rt.rs @@ -110,7 +110,7 @@ unsafe fn init(argc: isize, argv: *const *const u8, sigpipe: u8) { // handle does not match the current ID, we should attempt to use the // current thread ID here instead of unconditionally creating a new // one. Also see #130210. - let thread = unsafe { Thread::new_main(thread::current_id()) }; + let thread = Thread::new_main(thread::current_id()); if let Err(_thread) = thread::set_current(thread) { // `thread::current` will create a new handle if none has been set yet. // Thus, if someone uses it before main, this call will fail. That's a diff --git a/library/std/src/thread/mod.rs b/library/std/src/thread/mod.rs index b6ee00a253a95..81d75641ab595 100644 --- a/library/std/src/thread/mod.rs +++ b/library/std/src/thread/mod.rs @@ -158,12 +158,9 @@ #[cfg(all(test, not(any(target_os = "emscripten", target_os = "wasi"))))] mod tests; -use core::cell::SyncUnsafeCell; -use core::ffi::CStr; -use core::mem::MaybeUninit; - use crate::any::Any; use crate::cell::UnsafeCell; +use crate::ffi::CStr; use crate::marker::PhantomData; use crate::mem::{self, ManuallyDrop, forget}; use crate::num::NonZero; @@ -1259,31 +1256,30 @@ impl ThreadId { // Thread //////////////////////////////////////////////////////////////////////////////// +/// The internal representation of a `Thread`'s name. +enum ThreadName { + Main, + Other(ThreadNameString), + Unnamed, +} + // This module ensures private fields are kept private, which is necessary to enforce the safety requirements. mod thread_name_string { use core::str; + use super::ThreadName; use crate::ffi::{CStr, CString}; /// Like a `String` it's guaranteed UTF-8 and like a `CString` it's null terminated. pub(crate) struct ThreadNameString { inner: CString, } - - impl ThreadNameString { - pub fn as_str(&self) -> &str { - // SAFETY: `self.inner` is only initialised via `String`, which upholds the validity invariant of `str`. - unsafe { str::from_utf8_unchecked(self.inner.to_bytes()) } - } - } - impl core::ops::Deref for ThreadNameString { type Target = CStr; fn deref(&self) -> &CStr { &self.inner } } - impl From for ThreadNameString { fn from(s: String) -> Self { Self { @@ -1291,82 +1287,34 @@ mod thread_name_string { } } } + impl ThreadName { + pub fn as_cstr(&self) -> Option<&CStr> { + match self { + ThreadName::Main => Some(c"main"), + ThreadName::Other(other) => Some(other), + ThreadName::Unnamed => None, + } + } + + pub fn as_str(&self) -> Option<&str> { + // SAFETY: `as_cstr` can only return `Some` for a fixed CStr or a `ThreadNameString`, + // which is guaranteed to be UTF-8. + self.as_cstr().map(|s| unsafe { str::from_utf8_unchecked(s.to_bytes()) }) + } + } } pub(crate) use thread_name_string::ThreadNameString; -static MAIN_THREAD_INFO: SyncUnsafeCell<(MaybeUninit, MaybeUninit)> = - SyncUnsafeCell::new((MaybeUninit::uninit(), MaybeUninit::uninit())); - -/// The internal representation of a `Thread` that is not the main thread. -struct OtherInner { - name: Option, +/// The internal representation of a `Thread` handle +struct Inner { + name: ThreadName, // Guaranteed to be UTF-8 id: ThreadId, parker: Parker, } -/// The internal representation of a `Thread` handle. -#[derive(Clone)] -enum Inner { - /// Represents the main thread. May only be constructed by Thread::new_main. - Main(&'static (ThreadId, Parker)), - /// Represents any other thread. - Other(Pin>), -} - impl Inner { - fn id(&self) -> ThreadId { - match self { - Self::Main((thread_id, _)) => *thread_id, - Self::Other(other) => other.id, - } - } - - fn cname(&self) -> Option<&CStr> { - match self { - Self::Main(_) => Some(c"main"), - Self::Other(other) => other.name.as_deref(), - } - } - - fn name(&self) -> Option<&str> { - match self { - Self::Main(_) => Some("main"), - Self::Other(other) => other.name.as_ref().map(ThreadNameString::as_str), - } - } - - fn into_raw(self) -> *const () { - match self { - // Just return the pointer to `MAIN_THREAD_INFO`. - Self::Main(ptr) => crate::ptr::from_ref(ptr).cast(), - Self::Other(arc) => { - // Safety: We only expose an opaque pointer, which maintains the `Pin` invariant. - let inner = unsafe { Pin::into_inner_unchecked(arc) }; - Arc::into_raw(inner) as *const () - } - } - } - - /// # Safety - /// - /// See [`Thread::from_raw`]. - unsafe fn from_raw(ptr: *const ()) -> Self { - // If the pointer is to `MAIN_THREAD_INFO`, we know it is the `Main` variant. - if crate::ptr::eq(ptr.cast(), &MAIN_THREAD_INFO) { - Self::Main(unsafe { &*ptr.cast() }) - } else { - // Safety: Upheld by caller - Self::Other(unsafe { Pin::new_unchecked(Arc::from_raw(ptr as *const OtherInner)) }) - } - } - - fn parker(&self) -> Pin<&Parker> { - match self { - Self::Main((_, parker_ref)) => Pin::static_ref(parker_ref), - Self::Other(inner) => unsafe { - Pin::map_unchecked(inner.as_ref(), |inner| &inner.parker) - }, - } + fn parker(self: Pin<&Self>) -> Pin<&Parker> { + unsafe { Pin::map_unchecked(self, |inner| &inner.parker) } } } @@ -1390,47 +1338,33 @@ impl Inner { /// docs of [`Builder`] and [`spawn`] for more details. /// /// [`thread::current`]: current::current -pub struct Thread(Inner); +pub struct Thread { + inner: Pin>, +} impl Thread { /// Used only internally to construct a thread object without spawning. pub(crate) fn new(id: ThreadId, name: String) -> Thread { - Self::new_inner(id, Some(ThreadNameString::from(name))) + Self::new_inner(id, ThreadName::Other(name.into())) } pub(crate) fn new_unnamed(id: ThreadId) -> Thread { - Self::new_inner(id, None) + Self::new_inner(id, ThreadName::Unnamed) } - /// Used in runtime to construct main thread - /// - /// # Safety - /// - /// This must only ever be called once, and must be called on the main thread. - pub(crate) unsafe fn new_main(thread_id: ThreadId) -> Thread { - // Safety: As this is only called once and on the main thread, nothing else is accessing MAIN_THREAD_INFO - // as the only other read occurs in `main_thread_info` *after* the main thread has been constructed, - // and this function is the only one that constructs the main thread. - // - // Pre-main thread spawning cannot hit this either, as the caller promises that this is only called on the main thread. - let main_thread_info = unsafe { &mut *MAIN_THREAD_INFO.get() }; - - unsafe { Parker::new_in_place((&raw mut main_thread_info.1).cast()) }; - main_thread_info.0.write(thread_id); - - // Store a `'static` ref to the initialised ThreadId and Parker, - // to avoid having to repeatedly prove initialisation. - Self(Inner::Main(unsafe { &*MAIN_THREAD_INFO.get().cast() })) + /// Constructs the thread handle for the main thread. + pub(crate) fn new_main(id: ThreadId) -> Thread { + Self::new_inner(id, ThreadName::Main) } - fn new_inner(id: ThreadId, name: Option) -> Thread { + fn new_inner(id: ThreadId, name: ThreadName) -> Thread { // We have to use `unsafe` here to construct the `Parker` in-place, // which is required for the UNIX implementation. // // SAFETY: We pin the Arc immediately after creation, so its address never // changes. let inner = unsafe { - let mut arc = Arc::::new_uninit(); + let mut arc = Arc::::new_uninit(); let ptr = Arc::get_mut_unchecked(&mut arc).as_mut_ptr(); (&raw mut (*ptr).name).write(name); (&raw mut (*ptr).id).write(id); @@ -1438,7 +1372,7 @@ impl Thread { Pin::new_unchecked(arc.assume_init()) }; - Self(Inner::Other(inner)) + Thread { inner } } /// Like the public [`park`], but callable on any handle. This is used to @@ -1447,7 +1381,7 @@ impl Thread { /// # Safety /// May only be called from the thread to which this handle belongs. pub(crate) unsafe fn park(&self) { - unsafe { self.0.parker().park() } + unsafe { self.inner.as_ref().parker().park() } } /// Like the public [`park_timeout`], but callable on any handle. This is @@ -1456,7 +1390,7 @@ impl Thread { /// # Safety /// May only be called from the thread to which this handle belongs. pub(crate) unsafe fn park_timeout(&self, dur: Duration) { - unsafe { self.0.parker().park_timeout(dur) } + unsafe { self.inner.as_ref().parker().park_timeout(dur) } } /// Atomically makes the handle's token available if it is not already. @@ -1492,7 +1426,7 @@ impl Thread { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn unpark(&self) { - self.0.parker().unpark(); + self.inner.as_ref().parker().unpark(); } /// Gets the thread's unique identifier. @@ -1512,7 +1446,7 @@ impl Thread { #[stable(feature = "thread_id", since = "1.19.0")] #[must_use] pub fn id(&self) -> ThreadId { - self.0.id() + self.inner.id } /// Gets the thread's name. @@ -1555,11 +1489,7 @@ impl Thread { #[stable(feature = "rust1", since = "1.0.0")] #[must_use] pub fn name(&self) -> Option<&str> { - self.0.name() - } - - fn cname(&self) -> Option<&CStr> { - self.0.cname() + self.inner.name.as_str() } /// Consumes the `Thread`, returning a raw pointer. @@ -1583,7 +1513,9 @@ impl Thread { /// ``` #[unstable(feature = "thread_raw", issue = "97523")] pub fn into_raw(self) -> *const () { - self.0.into_raw() + // Safety: We only expose an opaque pointer, which maintains the `Pin` invariant. + let inner = unsafe { Pin::into_inner_unchecked(self.inner) }; + Arc::into_raw(inner) as *const () } /// Constructs a `Thread` from a raw pointer. @@ -1605,7 +1537,11 @@ impl Thread { #[unstable(feature = "thread_raw", issue = "97523")] pub unsafe fn from_raw(ptr: *const ()) -> Thread { // Safety: Upheld by caller. - unsafe { Thread(Inner::from_raw(ptr)) } + unsafe { Thread { inner: Pin::new_unchecked(Arc::from_raw(ptr as *const Inner)) } } + } + + fn cname(&self) -> Option<&CStr> { + self.inner.name.as_cstr() } } diff --git a/tests/debuginfo/thread.rs b/tests/debuginfo/thread.rs index dc8cb0832192b..0415f586f5d90 100644 --- a/tests/debuginfo/thread.rs +++ b/tests/debuginfo/thread.rs @@ -12,15 +12,15 @@ // cdb-check:join_handle,d [Type: std::thread::JoinHandle >] // cdb-check: [...] __0 [Type: std::thread::JoinInner >] // -// cdb-command:dx -r3 t,d +// cdb-command:dx t,d // cdb-check:t,d : [...] [Type: std::thread::Thread *] -// cdb-check: [...] __0 : Other [Type: enum2$] -// cdb-check: [...] __0 [Type: core::pin::Pin >] +// cdb-check:[...] inner [...][Type: core::pin::Pin >] use std::thread; #[allow(unused_variables)] -fn main() { +fn main() +{ let join_handle = thread::spawn(|| { println!("Initialize a thread"); }); diff --git a/tests/rustdoc/demo-allocator-54478.rs b/tests/rustdoc/demo-allocator-54478.rs index 80acfc0ff58a1..dd98e80f03ade 100644 --- a/tests/rustdoc/demo-allocator-54478.rs +++ b/tests/rustdoc/demo-allocator-54478.rs @@ -40,7 +40,6 @@ //! } //! //! fn main() { -//! drop(String::from("An allocation")); //! assert!(unsafe { HIT }); //! } //! ``` From 14f7f4b7bfcfbfe9a6f778e762cac83420e08007 Mon Sep 17 00:00:00 2001 From: joboet Date: Mon, 25 Nov 2024 13:01:35 +0100 Subject: [PATCH 049/142] std: lazily allocate the main thread handle Thereby, we also allow accessing thread::current before main: as the runtime no longer tries to install its own handle, this will no longer trigger an abort. Rather, the name returned from name will only be "main" after the runtime initialization code has run, but I think that is acceptable. This new approach also requires some changes to the signal handling code, as calling `thread::current` would now allocate when called on the main thread, which is not acceptable. I fixed this by adding a new function (`with_current_name`) that performs all the naming logic without allocation or without initializing the thread ID (which could allocate on some platforms). --- library/std/src/panicking.rs | 39 ++--- library/std/src/rt.rs | 23 +-- .../std/src/sys/pal/unix/stack_overflow.rs | 9 +- .../std/src/sys/pal/windows/stack_overflow.rs | 8 +- library/std/src/thread/current.rs | 36 ++--- library/std/src/thread/mod.rs | 150 ++++++++++++------ 6 files changed, 151 insertions(+), 114 deletions(-) diff --git a/library/std/src/panicking.rs b/library/std/src/panicking.rs index 3b02254548b1c..8e50bf11dd082 100644 --- a/library/std/src/panicking.rs +++ b/library/std/src/panicking.rs @@ -258,31 +258,34 @@ fn default_hook(info: &PanicHookInfo<'_>) { let location = info.location().unwrap(); let msg = payload_as_str(info.payload()); - let thread = thread::try_current(); - let name = thread.as_ref().and_then(|t| t.name()).unwrap_or(""); let write = #[optimize(size)] |err: &mut dyn crate::io::Write| { // Use a lock to prevent mixed output in multithreading context. // Some platforms also require it when printing a backtrace, like `SymFromAddr` on Windows. let mut lock = backtrace::lock(); - // Try to write the panic message to a buffer first to prevent other concurrent outputs - // interleaving with it. - let mut buffer = [0u8; 512]; - let mut cursor = crate::io::Cursor::new(&mut buffer[..]); - - let write_msg = |dst: &mut dyn crate::io::Write| { - // We add a newline to ensure the panic message appears at the start of a line. - writeln!(dst, "\nthread '{name}' panicked at {location}:\n{msg}") - }; - if write_msg(&mut cursor).is_ok() { - let pos = cursor.position() as usize; - let _ = err.write_all(&buffer[0..pos]); - } else { - // The message did not fit into the buffer, write it directly instead. - let _ = write_msg(err); - }; + thread::with_current_name(|name| { + let name = name.unwrap_or(""); + + // Try to write the panic message to a buffer first to prevent other concurrent outputs + // interleaving with it. + let mut buffer = [0u8; 512]; + let mut cursor = crate::io::Cursor::new(&mut buffer[..]); + + let write_msg = |dst: &mut dyn crate::io::Write| { + // We add a newline to ensure the panic message appears at the start of a line. + writeln!(dst, "\nthread '{name}' panicked at {location}:\n{msg}") + }; + + if write_msg(&mut cursor).is_ok() { + let pos = cursor.position() as usize; + let _ = err.write_all(&buffer[0..pos]); + } else { + // The message did not fit into the buffer, write it directly instead. + let _ = write_msg(err); + }; + }); static FIRST_PANIC: AtomicBool = AtomicBool::new(true); diff --git a/library/std/src/rt.rs b/library/std/src/rt.rs index 56952aac6c63d..384369b0012b5 100644 --- a/library/std/src/rt.rs +++ b/library/std/src/rt.rs @@ -23,7 +23,7 @@ pub use core::panicking::{panic_display, panic_fmt}; #[rustfmt::skip] use crate::any::Any; use crate::sync::Once; -use crate::thread::{self, Thread}; +use crate::thread::{self, main_thread}; use crate::{mem, panic, sys}; // Prints to the "panic output", depending on the platform this may be: @@ -102,24 +102,9 @@ unsafe fn init(argc: isize, argv: *const *const u8, sigpipe: u8) { sys::init(argc, argv, sigpipe) }; - // Set up the current thread handle to give it the right name. - // - // When code running before main uses `ReentrantLock` (for example by - // using `println!`), the thread ID can become initialized before we - // create this handle. Since `set_current` fails when the ID of the - // handle does not match the current ID, we should attempt to use the - // current thread ID here instead of unconditionally creating a new - // one. Also see #130210. - let thread = Thread::new_main(thread::current_id()); - if let Err(_thread) = thread::set_current(thread) { - // `thread::current` will create a new handle if none has been set yet. - // Thus, if someone uses it before main, this call will fail. That's a - // bad idea though, as we then cannot set the main thread name here. - // - // FIXME: detect the main thread in `thread::current` and use the - // correct name there. - rtabort!("code running before main must not use thread::current"); - } + // Remember the main thread ID to give it the correct name. + // SAFETY: this is the only time and place where we call this function. + unsafe { main_thread::set(thread::current_id()) }; } /// Clean up the thread-local runtime state. This *should* be run after all other diff --git a/library/std/src/sys/pal/unix/stack_overflow.rs b/library/std/src/sys/pal/unix/stack_overflow.rs index 69b31da427fcb..db5c6bd3a1c32 100644 --- a/library/std/src/sys/pal/unix/stack_overflow.rs +++ b/library/std/src/sys/pal/unix/stack_overflow.rs @@ -100,10 +100,11 @@ mod imp { // If the faulting address is within the guard page, then we print a // message saying so and abort. if start <= addr && addr < end { - rtprintpanic!( - "\nthread '{}' has overflowed its stack\n", - thread::current().name().unwrap_or("") - ); + thread::with_current_name(|name| { + let name = name.unwrap_or(""); + rtprintpanic!("\nthread '{name}' has overflowed its stack\n"); + }); + rtabort!("stack overflow"); } else { // Unregister ourselves by reverting back to the default behavior. diff --git a/library/std/src/sys/pal/windows/stack_overflow.rs b/library/std/src/sys/pal/windows/stack_overflow.rs index 467e21ab56a28..734cd30bed08f 100644 --- a/library/std/src/sys/pal/windows/stack_overflow.rs +++ b/library/std/src/sys/pal/windows/stack_overflow.rs @@ -18,10 +18,10 @@ unsafe extern "system" fn vectored_handler(ExceptionInfo: *mut c::EXCEPTION_POIN let code = rec.ExceptionCode; if code == c::EXCEPTION_STACK_OVERFLOW { - rtprintpanic!( - "\nthread '{}' has overflowed its stack\n", - thread::current().name().unwrap_or("") - ); + thread::with_current_name(|name| { + let name = name.unwrap_or(""); + rtprintpanic!("\nthread '{name}' has overflowed its stack\n"); + }); } c::EXCEPTION_CONTINUE_SEARCH } diff --git a/library/std/src/thread/current.rs b/library/std/src/thread/current.rs index 3d2c288b36085..fa88059fe64e3 100644 --- a/library/std/src/thread/current.rs +++ b/library/std/src/thread/current.rs @@ -15,7 +15,7 @@ local_pointer! { /// /// We store the thread ID so that it never gets destroyed during the lifetime /// of a thread, either using `#[thread_local]` or multiple `local_pointer!`s. -mod id { +pub(super) mod id { use super::*; cfg_if::cfg_if! { @@ -27,7 +27,7 @@ mod id { pub(super) const CHEAP: bool = true; - pub(super) fn get() -> Option { + pub(crate) fn get() -> Option { ID.get() } @@ -44,7 +44,7 @@ mod id { pub(super) const CHEAP: bool = false; - pub(super) fn get() -> Option { + pub(crate) fn get() -> Option { let id0 = ID0.get().addr() as u64; let id16 = ID16.get().addr() as u64; let id32 = ID32.get().addr() as u64; @@ -67,7 +67,7 @@ mod id { pub(super) const CHEAP: bool = false; - pub(super) fn get() -> Option { + pub(crate) fn get() -> Option { let id0 = ID0.get().addr() as u64; let id32 = ID32.get().addr() as u64; ThreadId::from_u64((id32 << 32) + id0) @@ -85,7 +85,7 @@ mod id { pub(super) const CHEAP: bool = true; - pub(super) fn get() -> Option { + pub(crate) fn get() -> Option { let id = ID.get().addr() as u64; ThreadId::from_u64(id) } @@ -112,7 +112,7 @@ mod id { /// Tries to set the thread handle for the current thread. Fails if a handle was /// already set or if the thread ID of `thread` would change an already-set ID. -pub(crate) fn set_current(thread: Thread) -> Result<(), Thread> { +pub(super) fn set_current(thread: Thread) -> Result<(), Thread> { if CURRENT.get() != NONE { return Err(thread); } @@ -140,28 +140,28 @@ pub(crate) fn current_id() -> ThreadId { // to retrieve it from the current thread handle, which will only take one // TLS access. if !id::CHEAP { - let current = CURRENT.get(); - if current > DESTROYED { - unsafe { - let current = ManuallyDrop::new(Thread::from_raw(current)); - return current.id(); - } + if let Some(id) = try_with_current(|t| t.map(|t| t.id())) { + return id; } } id::get_or_init() } -/// Gets a handle to the thread that invokes it, if the handle has been initialized. -pub(crate) fn try_current() -> Option { +/// Gets a reference to the handle of the thread that invokes it, if the handle +/// has been initialized. +pub(super) fn try_with_current(f: F) -> R +where + F: FnOnce(Option<&Thread>) -> R, +{ let current = CURRENT.get(); if current > DESTROYED { unsafe { let current = ManuallyDrop::new(Thread::from_raw(current)); - Some((*current).clone()) + f(Some(¤t)) } } else { - None + f(None) } } @@ -176,7 +176,7 @@ pub(crate) fn current_or_unnamed() -> Thread { (*current).clone() } } else if current == DESTROYED { - Thread::new_unnamed(id::get_or_init()) + Thread::new(id::get_or_init(), None) } else { init_current(current) } @@ -221,7 +221,7 @@ fn init_current(current: *mut ()) -> Thread { CURRENT.set(BUSY); // If the thread ID was initialized already, use it. let id = id::get_or_init(); - let thread = Thread::new_unnamed(id); + let thread = Thread::new(id, None); // Make sure that `crate::rt::thread_cleanup` will be run, which will // call `drop_current`. diff --git a/library/std/src/thread/mod.rs b/library/std/src/thread/mod.rs index 81d75641ab595..cf7a951d100ec 100644 --- a/library/std/src/thread/mod.rs +++ b/library/std/src/thread/mod.rs @@ -183,7 +183,8 @@ mod current; #[stable(feature = "rust1", since = "1.0.0")] pub use current::current; -pub(crate) use current::{current_id, current_or_unnamed, drop_current, set_current, try_current}; +pub(crate) use current::{current_id, current_or_unnamed, drop_current}; +use current::{set_current, try_with_current}; mod spawnhook; @@ -498,10 +499,7 @@ impl Builder { }); let id = ThreadId::new(); - let my_thread = match name { - Some(name) => Thread::new(id, name), - None => Thread::new_unnamed(id), - }; + let my_thread = Thread::new(id, name); let hooks = if no_hooks { spawnhook::ChildSpawnHooks::default() @@ -1232,7 +1230,7 @@ impl ThreadId { } } - #[cfg(not(target_thread_local))] + #[cfg(any(not(target_thread_local), target_has_atomic = "64"))] fn from_u64(v: u64) -> Option { NonZero::new(v).map(ThreadId) } @@ -1256,30 +1254,16 @@ impl ThreadId { // Thread //////////////////////////////////////////////////////////////////////////////// -/// The internal representation of a `Thread`'s name. -enum ThreadName { - Main, - Other(ThreadNameString), - Unnamed, -} - // This module ensures private fields are kept private, which is necessary to enforce the safety requirements. mod thread_name_string { - use core::str; - - use super::ThreadName; use crate::ffi::{CStr, CString}; + use crate::str; /// Like a `String` it's guaranteed UTF-8 and like a `CString` it's null terminated. pub(crate) struct ThreadNameString { inner: CString, } - impl core::ops::Deref for ThreadNameString { - type Target = CStr; - fn deref(&self) -> &CStr { - &self.inner - } - } + impl From for ThreadNameString { fn from(s: String) -> Self { Self { @@ -1287,27 +1271,91 @@ mod thread_name_string { } } } - impl ThreadName { - pub fn as_cstr(&self) -> Option<&CStr> { - match self { - ThreadName::Main => Some(c"main"), - ThreadName::Other(other) => Some(other), - ThreadName::Unnamed => None, - } + + impl ThreadNameString { + pub fn as_cstr(&self) -> &CStr { + &self.inner } - pub fn as_str(&self) -> Option<&str> { - // SAFETY: `as_cstr` can only return `Some` for a fixed CStr or a `ThreadNameString`, - // which is guaranteed to be UTF-8. - self.as_cstr().map(|s| unsafe { str::from_utf8_unchecked(s.to_bytes()) }) + pub fn as_str(&self) -> &str { + // SAFETY: `ThreadNameString` is guaranteed to be UTF-8. + unsafe { str::from_utf8_unchecked(self.inner.to_bytes()) } + } + } +} + +use thread_name_string::ThreadNameString; + +pub(crate) mod main_thread { + cfg_if::cfg_if! { + if #[cfg(target_has_atomic = "64")] { + use super::ThreadId; + use crate::sync::atomic::AtomicU64; + use crate::sync::atomic::Ordering::Relaxed; + + static MAIN: AtomicU64 = AtomicU64::new(0); + + pub(super) fn get() -> Option { + ThreadId::from_u64(MAIN.load(Relaxed)) + } + + /// # Safety + /// May only be called once. + pub(crate) unsafe fn set(id: ThreadId) { + MAIN.store(id.as_u64().get(), Relaxed) + } + } else { + use super::ThreadId; + use crate::mem::MaybeUninit; + use crate::sync::atomic::AtomicBool; + use crate::sync::atomic::Ordering::{Acquire, Release}; + + static INIT: AtomicBool = AtomicBool::new(false); + static mut MAIN: MaybeUninit = MaybeUninit::uninit(); + + pub(super) fn get() -> Option { + if INIT.load(Acquire) { + Some(unsafe { MAIN.assume_init() }) + } else { + None + } + } + + /// # Safety + /// May only be called once. + pub(crate) unsafe fn set(id: ThreadId) { + unsafe { MAIN = MaybeUninit::new(id) }; + INIT.store(true, Release); + } } } } -pub(crate) use thread_name_string::ThreadNameString; + +pub(crate) fn with_current_name(f: F) -> R +where + F: FnOnce(Option<&str>) -> R, +{ + try_with_current(|thread| { + if let Some(thread) = thread { + if let Some(name) = &thread.inner.name { + return f(Some(name.as_str())); + } else if Some(thread.inner.id) == main_thread::get() { + return f(Some("main")); + } + } else if let Some(main) = main_thread::get() + && let Some(id) = current::id::get() + && id == main + { + return f(Some("main")); + } + + f(None) + }) +} /// The internal representation of a `Thread` handle struct Inner { - name: ThreadName, // Guaranteed to be UTF-8 + name: Option, id: ThreadId, parker: Parker, } @@ -1343,21 +1391,9 @@ pub struct Thread { } impl Thread { - /// Used only internally to construct a thread object without spawning. - pub(crate) fn new(id: ThreadId, name: String) -> Thread { - Self::new_inner(id, ThreadName::Other(name.into())) - } - - pub(crate) fn new_unnamed(id: ThreadId) -> Thread { - Self::new_inner(id, ThreadName::Unnamed) - } - - /// Constructs the thread handle for the main thread. - pub(crate) fn new_main(id: ThreadId) -> Thread { - Self::new_inner(id, ThreadName::Main) - } + pub(crate) fn new(id: ThreadId, name: Option) -> Thread { + let name = name.map(ThreadNameString::from); - fn new_inner(id: ThreadId, name: ThreadName) -> Thread { // We have to use `unsafe` here to construct the `Parker` in-place, // which is required for the UNIX implementation. // @@ -1489,7 +1525,13 @@ impl Thread { #[stable(feature = "rust1", since = "1.0.0")] #[must_use] pub fn name(&self) -> Option<&str> { - self.inner.name.as_str() + if let Some(name) = &self.inner.name { + Some(name.as_str()) + } else if main_thread::get() == Some(self.inner.id) { + Some("main") + } else { + None + } } /// Consumes the `Thread`, returning a raw pointer. @@ -1541,7 +1583,13 @@ impl Thread { } fn cname(&self) -> Option<&CStr> { - self.inner.name.as_cstr() + if let Some(name) = &self.inner.name { + Some(name.as_cstr()) + } else if main_thread::get() == Some(self.inner.id) { + Some(c"main") + } else { + None + } } } From 1d2525260416349272fc2262818344274d896518 Mon Sep 17 00:00:00 2001 From: joboet Date: Mon, 25 Nov 2024 14:03:02 +0100 Subject: [PATCH 050/142] make sure that the allocator is actually called in allocator test Originally authored by GnomedDev --- tests/rustdoc/demo-allocator-54478.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/rustdoc/demo-allocator-54478.rs b/tests/rustdoc/demo-allocator-54478.rs index dd98e80f03ade..80acfc0ff58a1 100644 --- a/tests/rustdoc/demo-allocator-54478.rs +++ b/tests/rustdoc/demo-allocator-54478.rs @@ -40,6 +40,7 @@ //! } //! //! fn main() { +//! drop(String::from("An allocation")); //! assert!(unsafe { HIT }); //! } //! ``` From 2c28cc45c83961ccfaba790f7dc8e6c1c9d26502 Mon Sep 17 00:00:00 2001 From: joboet Date: Wed, 27 Nov 2024 14:40:09 +0100 Subject: [PATCH 051/142] add comments explaining main thread identification --- library/std/src/thread/current.rs | 3 +++ library/std/src/thread/mod.rs | 26 ++++++++++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/library/std/src/thread/current.rs b/library/std/src/thread/current.rs index fa88059fe64e3..414711298f047 100644 --- a/library/std/src/thread/current.rs +++ b/library/std/src/thread/current.rs @@ -156,6 +156,9 @@ where { let current = CURRENT.get(); if current > DESTROYED { + // SAFETY: `Arc` does not contain interior mutability, so it does not + // matter that the address of the handle might be different depending + // on where this is called. unsafe { let current = ManuallyDrop::new(Thread::from_raw(current)); f(Some(¤t)) diff --git a/library/std/src/thread/mod.rs b/library/std/src/thread/mod.rs index cf7a951d100ec..59b395336f2e3 100644 --- a/library/std/src/thread/mod.rs +++ b/library/std/src/thread/mod.rs @@ -1286,6 +1286,18 @@ mod thread_name_string { use thread_name_string::ThreadNameString; +/// Store the ID of the main thread. +/// +/// The thread handle for the main thread is created lazily, and this might even +/// happen pre-main. Since not every platform has a way to identify the main +/// thread when that happens – macOS's `pthread_main_np` function being a notable +/// exception – we cannot assign it the right name right then. Instead, in our +/// runtime startup code, we remember the thread ID of the main thread (through +/// this modules `set` function) and use it to identify the main thread from then +/// on. This works reliably and has the additional advantage that we can report +/// the right thread name on main even after the thread handle has been destroyed. +/// Note however that this also means that the name reported in pre-main functions +/// will be incorrect, but that's just something we have to live with. pub(crate) mod main_thread { cfg_if::cfg_if! { if #[cfg(target_has_atomic = "64")] { @@ -1331,21 +1343,35 @@ pub(crate) mod main_thread { } } +/// Run a function with the current thread's name. +/// +/// Modulo thread local accesses, this function is safe to call from signal +/// handlers and in similar circumstances where allocations are not possible. pub(crate) fn with_current_name(f: F) -> R where F: FnOnce(Option<&str>) -> R, { try_with_current(|thread| { if let Some(thread) = thread { + // If there is a current thread handle, try to use the name stored + // there. if let Some(name) = &thread.inner.name { return f(Some(name.as_str())); } else if Some(thread.inner.id) == main_thread::get() { + // The main thread doesn't store its name in the handle, we must + // identify it through its ID. Since we already have the `Thread`, + // we can retrieve the ID from it instead of going through another + // thread local. return f(Some("main")); } } else if let Some(main) = main_thread::get() && let Some(id) = current::id::get() && id == main { + // The main thread doesn't always have a thread handle, we must + // identify it through its ID instead. The checks are ordered so + // that the current ID is only loaded if it is actually needed, + // since loading it from TLS might need multiple expensive accesses. return f(Some("main")); } From 1a23a6fd8b27943ffdbac2d5e4eb088cdecb06ff Mon Sep 17 00:00:00 2001 From: Zalathar Date: Sun, 12 Jan 2025 12:40:03 +1100 Subject: [PATCH 052/142] Add wrapper type `ReversedGraph` for swapping successors/predecessors --- .../rustc_data_structures/src/graph/mod.rs | 1 + .../src/graph/reversed.rs | 42 +++++++++++++++++++ 2 files changed, 43 insertions(+) create mode 100644 compiler/rustc_data_structures/src/graph/reversed.rs diff --git a/compiler/rustc_data_structures/src/graph/mod.rs b/compiler/rustc_data_structures/src/graph/mod.rs index 103ddd917bf84..92035e8bc480f 100644 --- a/compiler/rustc_data_structures/src/graph/mod.rs +++ b/compiler/rustc_data_structures/src/graph/mod.rs @@ -4,6 +4,7 @@ pub mod dominators; pub mod implementation; pub mod iterate; mod reference; +pub mod reversed; pub mod scc; pub mod vec_graph; diff --git a/compiler/rustc_data_structures/src/graph/reversed.rs b/compiler/rustc_data_structures/src/graph/reversed.rs new file mode 100644 index 0000000000000..9b726deaa15b6 --- /dev/null +++ b/compiler/rustc_data_structures/src/graph/reversed.rs @@ -0,0 +1,42 @@ +use crate::graph::{DirectedGraph, Predecessors, Successors}; + +/// View that reverses the direction of edges in its underlying graph, so that +/// successors become predecessors and vice-versa. +/// +/// Because of `impl Graph for &G`, the underlying graph can be +/// wrapped by-reference instead of by-value if desired. +#[derive(Clone, Copy, Debug)] +pub struct ReversedGraph { + pub inner: G, +} + +impl ReversedGraph { + pub fn new(inner: G) -> Self { + Self { inner } + } +} + +impl DirectedGraph for ReversedGraph { + type Node = G::Node; + + fn num_nodes(&self) -> usize { + self.inner.num_nodes() + } +} + +// Implementing `StartNode` is not possible in general, because the start node +// of an underlying graph is instead an _end_ node in the reversed graph. +// But would be possible to define another wrapper type that adds an explicit +// start node to its underlying graph, if desired. + +impl Successors for ReversedGraph { + fn successors(&self, node: Self::Node) -> impl Iterator { + self.inner.predecessors(node) + } +} + +impl Predecessors for ReversedGraph { + fn predecessors(&self, node: Self::Node) -> impl Iterator { + self.inner.successors(node) + } +} From e70112caf86dac4a01739538d3e6f3d163e47642 Mon Sep 17 00:00:00 2001 From: Zalathar Date: Sun, 12 Jan 2025 12:44:28 +1100 Subject: [PATCH 053/142] Add `DenseBitSet::union_not` This is similar to the existing `union`, except that bits in the RHS are negated before being incorporated into the LHS. Currently only `DenseBitSet` is supported. Supporting other bitset types is possible, but non-trivial, and currently isn't needed. --- compiler/rustc_index/src/bit_set.rs | 30 +++++++++++++++++++++++ compiler/rustc_index/src/bit_set/tests.rs | 26 ++++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/compiler/rustc_index/src/bit_set.rs b/compiler/rustc_index/src/bit_set.rs index d93707b745d40..f12df831cb503 100644 --- a/compiler/rustc_index/src/bit_set.rs +++ b/compiler/rustc_index/src/bit_set.rs @@ -281,6 +281,24 @@ impl DenseBitSet { } bit_relations_inherent_impls! {} + + /// Sets `self = self | !other`. + /// + /// FIXME: Incorporate this into [`BitRelations`] and fill out + /// implementations for other bitset types, if needed. + pub fn union_not(&mut self, other: &DenseBitSet) { + assert_eq!(self.domain_size, other.domain_size); + + // FIXME(Zalathar): If we were to forcibly _set_ all excess bits before + // the bitwise update, and then clear them again afterwards, we could + // quickly and accurately detect whether the update changed anything. + // But that's only worth doing if there's an actual use-case. + + bitwise(&mut self.words, &other.words, |a, b| a | !b); + // The bitwise update `a | !b` can result in the last word containing + // out-of-domain bits, so we need to clear them. + self.clear_excess_bits(); + } } // dense REL dense @@ -1087,6 +1105,18 @@ impl fmt::Debug for ChunkedBitSet { } } +/// Sets `out_vec[i] = op(out_vec[i], in_vec[i])` for each index `i` in both +/// slices. The slices must have the same length. +/// +/// Returns true if at least one bit in `out_vec` was changed. +/// +/// ## Warning +/// Some bitwise operations (e.g. union-not, xor) can set output bits that were +/// unset in in both inputs. If this happens in the last word/chunk of a bitset, +/// it can cause the bitset to contain out-of-domain values, which need to +/// be cleared with `clear_excess_bits_in_final_word`. This also makes the +/// "changed" return value unreliable, because the change might have only +/// affected excess bits. #[inline] fn bitwise(out_vec: &mut [Word], in_vec: &[Word], op: Op) -> bool where diff --git a/compiler/rustc_index/src/bit_set/tests.rs b/compiler/rustc_index/src/bit_set/tests.rs index 0350740aa811a..eaa4aafe72134 100644 --- a/compiler/rustc_index/src/bit_set/tests.rs +++ b/compiler/rustc_index/src/bit_set/tests.rs @@ -75,6 +75,32 @@ fn union_two_sets() { assert!(set1.contains(64)); } +#[test] +fn union_not() { + let mut a = DenseBitSet::::new_empty(100); + let mut b = DenseBitSet::::new_empty(100); + + a.insert(3); + a.insert(5); + a.insert(80); + a.insert(81); + + b.insert(5); // Already in `a`. + b.insert(7); + b.insert(63); + b.insert(81); // Already in `a`. + b.insert(90); + + a.union_not(&b); + + // After union-not, `a` should contain all values in the domain, except for + // the ones that are in `b` and were _not_ already in `a`. + assert_eq!( + a.iter().collect::>(), + (0usize..100).filter(|&x| !matches!(x, 7 | 63 | 90)).collect::>(), + ); +} + #[test] fn chunked_bitset() { let mut b0 = ChunkedBitSet::::new_empty(0); From 99657aa3389885d40e7aa0f1729186a72a4be8b5 Mon Sep 17 00:00:00 2001 From: lcnr Date: Tue, 14 Jan 2025 11:58:42 +0100 Subject: [PATCH 054/142] mir borrowck: cleanup late-bound region handling --- compiler/rustc_borrowck/src/renumber.rs | 1 - .../rustc_borrowck/src/universal_regions.rs | 65 ++++++++----------- .../binder/nested-closures-regions.stderr | 4 +- .../escape-argument-callee.stderr | 2 +- ...er-to-static-comparing-against-free.stderr | 2 +- ...ail-to-approximate-longer-no-bounds.stderr | 4 +- ...-to-approximate-longer-wrong-bounds.stderr | 4 +- 7 files changed, 34 insertions(+), 48 deletions(-) diff --git a/compiler/rustc_borrowck/src/renumber.rs b/compiler/rustc_borrowck/src/renumber.rs index d83d6ade2032c..e355d2b415b03 100644 --- a/compiler/rustc_borrowck/src/renumber.rs +++ b/compiler/rustc_borrowck/src/renumber.rs @@ -34,7 +34,6 @@ pub(crate) enum RegionCtxt { Location(Location), TyContext(TyContext), Free(Symbol), - Bound(Symbol), LateBound(Symbol), Existential(Option), Placeholder(Symbol), diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs index 3dc4569c57b9a..26af86c0cdd49 100644 --- a/compiler/rustc_borrowck/src/universal_regions.rs +++ b/compiler/rustc_borrowck/src/universal_regions.rs @@ -467,15 +467,13 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { self.infcx.tcx.local_parent(self.mir_def), |r| { debug!(?r); - if !indices.indices.contains_key(&r) { - let region_vid = { - let name = r.get_name_or_anon(); - self.infcx.next_nll_region_var(FR, || RegionCtxt::LateBound(name)) - }; - - debug!(?region_vid); - indices.insert_late_bound_region(r, region_vid.as_var()); - } + let region_vid = { + let name = r.get_name_or_anon(); + self.infcx.next_nll_region_var(FR, || RegionCtxt::LateBound(name)) + }; + + debug!(?region_vid); + indices.insert_late_bound_region(r, region_vid.as_var()); }, ); @@ -484,21 +482,17 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { self.infcx.num_region_vars() }; - // "Liberate" the late-bound regions. These correspond to - // "local" free regions. + // Converse of above, if this is a function/closure then the late-bound regions declared + // on its signature are local. + // + // We manually loop over `bound_inputs_and_output` instead of using + // `for_each_late_bound_region_in_item` as we may need to add the otherwise + // implicit `ClosureEnv` region. let bound_inputs_and_output = self.compute_inputs_and_output(&indices, defining_ty); - - let inputs_and_output = self.infcx.replace_bound_regions_with_nll_infer_vars( - FR, - self.mir_def, - bound_inputs_and_output, - &mut indices, - ); - // Converse of above, if this is a function/closure then the late-bound regions declared on its - // signature are local. - for_each_late_bound_region_in_item(self.infcx.tcx, self.mir_def, |r| { - debug!(?r); - if !indices.indices.contains_key(&r) { + for (idx, bound_var) in bound_inputs_and_output.bound_vars().iter().enumerate() { + if let ty::BoundVariableKind::Region(kind) = bound_var { + let kind = ty::LateParamRegionKind::from_bound(ty::BoundVar::from_usize(idx), kind); + let r = ty::Region::new_late_param(self.infcx.tcx, self.mir_def.to_def_id(), kind); let region_vid = { let name = r.get_name_or_anon(); self.infcx.next_nll_region_var(FR, || RegionCtxt::LateBound(name)) @@ -507,7 +501,12 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { debug!(?region_vid); indices.insert_late_bound_region(r, region_vid.as_var()); } - }); + } + let inputs_and_output = self.infcx.replace_bound_regions_with_nll_infer_vars( + self.mir_def, + bound_inputs_and_output, + &indices, + ); let (unnormalized_output_ty, mut unnormalized_input_tys) = inputs_and_output.split_last().unwrap(); @@ -832,10 +831,9 @@ impl<'tcx> BorrowckInferCtxt<'tcx> { #[instrument(level = "debug", skip(self, indices))] fn replace_bound_regions_with_nll_infer_vars( &self, - origin: NllRegionVariableOrigin, all_outlive_scope: LocalDefId, value: ty::Binder<'tcx, T>, - indices: &mut UniversalRegionIndices<'tcx>, + indices: &UniversalRegionIndices<'tcx>, ) -> T where T: TypeFoldable>, @@ -845,18 +843,7 @@ impl<'tcx> BorrowckInferCtxt<'tcx> { let kind = ty::LateParamRegionKind::from_bound(br.var, br.kind); let liberated_region = ty::Region::new_late_param(self.tcx, all_outlive_scope.to_def_id(), kind); - let region_vid = { - let name = match br.kind.get_name() { - Some(name) => name, - _ => sym::anon, - }; - - self.next_nll_region_var(origin, || RegionCtxt::Bound(name)) - }; - - indices.insert_late_bound_region(liberated_region, region_vid.as_var()); - debug!(?liberated_region, ?region_vid); - region_vid + ty::Region::new_var(self.tcx, indices.to_region_vid(liberated_region)) }); value } @@ -870,7 +857,7 @@ impl<'tcx> UniversalRegionIndices<'tcx> { /// well. These are used for error reporting. fn insert_late_bound_region(&mut self, r: ty::Region<'tcx>, vid: ty::RegionVid) { debug!("insert_late_bound_region({:?}, {:?})", r, vid); - self.indices.insert(r, vid); + assert_eq!(self.indices.insert(r, vid), None); } /// Converts `r` into a local inference variable: `r` can either diff --git a/tests/ui/closures/binder/nested-closures-regions.stderr b/tests/ui/closures/binder/nested-closures-regions.stderr index a30339ac67b1c..909cbcaa808a5 100644 --- a/tests/ui/closures/binder/nested-closures-regions.stderr +++ b/tests/ui/closures/binder/nested-closures-regions.stderr @@ -9,7 +9,7 @@ LL | for<'a> || -> () { for<'c> |_: &'a ()| -> () {}; }; extern "rust-call" fn((&(),)), (), ] - = note: late-bound region is '?4 + = note: late-bound region is '?3 = note: late-bound region is '?2 = note: number of external vids: 3 = note: where '?1: '?2 @@ -26,7 +26,7 @@ LL | for<'a> || -> () { for<'c> |_: &'a ()| -> () {}; }; extern "rust-call" fn(()), (), ] - = note: late-bound region is '?2 + = note: late-bound region is '?1 note: no external requirements --> $DIR/nested-closures-regions.rs:7:1 diff --git a/tests/ui/nll/closure-requirements/escape-argument-callee.stderr b/tests/ui/nll/closure-requirements/escape-argument-callee.stderr index a7a59dccf2234..a445534c8d801 100644 --- a/tests/ui/nll/closure-requirements/escape-argument-callee.stderr +++ b/tests/ui/nll/closure-requirements/escape-argument-callee.stderr @@ -17,7 +17,7 @@ LL | let mut closure = expect_sig(|p, y| *p = y); | - - ^^^^^^ assignment requires that `'1` must outlive `'2` | | | | | has type `&'1 i32` - | has type `&'?2 mut &'2 i32` + | has type `&'?1 mut &'2 i32` note: no external requirements --> $DIR/escape-argument-callee.rs:20:1 diff --git a/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-comparing-against-free.stderr b/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-comparing-against-free.stderr index 0094d7a50d369..621c1ea083b25 100644 --- a/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-comparing-against-free.stderr +++ b/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-comparing-against-free.stderr @@ -20,7 +20,7 @@ LL | foo(cell, |cell_a, cell_x| { LL | cell_a.set(cell_x.get()); // forces 'x: 'a, error in closure | ^^^^^^^^^^^^^^^^^^^^^^^^ `cell_x` escapes the closure body here | - = note: requirement occurs because of the type `Cell<&'?8 u32>`, which makes the generic argument `&'?8 u32` invariant + = note: requirement occurs because of the type `Cell<&'?9 u32>`, which makes the generic argument `&'?9 u32` invariant = note: the struct `Cell` is invariant over the parameter `T` = help: see for more information about variance diff --git a/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-no-bounds.stderr b/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-no-bounds.stderr index 4558ff50674f8..f48ed2823ddb1 100644 --- a/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-no-bounds.stderr +++ b/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-no-bounds.stderr @@ -16,9 +16,9 @@ error: lifetime may not live long enough --> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:37:9 | LL | establish_relationships(&cell_a, &cell_b, |_outlives, x, y| { - | --------- - has type `&'?7 Cell<&'1 u32>` + | --------- - has type `&'?6 Cell<&'1 u32>` | | - | has type `&'?5 Cell<&'2 &'?1 u32>` + | has type `&'?4 Cell<&'2 &'?1 u32>` LL | // Only works if 'x: 'y: LL | demand_y(x, y, x.get()) | ^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2` diff --git a/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-wrong-bounds.stderr b/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-wrong-bounds.stderr index 83173ae80c00d..a090e94593fe6 100644 --- a/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-wrong-bounds.stderr +++ b/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-wrong-bounds.stderr @@ -16,9 +16,9 @@ error: lifetime may not live long enough --> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:41:9 | LL | establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| { - | ---------- ---------- has type `&'?8 Cell<&'2 &'?2 u32>` + | ---------- ---------- has type `&'?7 Cell<&'2 &'?2 u32>` | | - | has type `&'?6 Cell<&'1 &'?1 u32>` + | has type `&'?5 Cell<&'1 &'?1 u32>` LL | // Only works if 'x: 'y: LL | demand_y(x, y, x.get()) | ^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2` From a40c659cefb58dc7eb323fa4a58dfa3ea9ebd158 Mon Sep 17 00:00:00 2001 From: Nikita Popov Date: Tue, 14 Jan 2025 14:46:09 +0100 Subject: [PATCH 055/142] Update to LLVM 19.1.7 --- src/llvm-project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/llvm-project b/src/llvm-project index 59512b0027382..7e8c93c87c611 160000 --- a/src/llvm-project +++ b/src/llvm-project @@ -1 +1 @@ -Subproject commit 59512b00273829823da74050d373b8d46dbca558 +Subproject commit 7e8c93c87c611f21d9bd95100563392f4c18bfe7 From 244316f711175eb072959d5654454b9e51cf3251 Mon Sep 17 00:00:00 2001 From: binarycat Date: Tue, 14 Jan 2025 08:54:55 -0600 Subject: [PATCH 056/142] add disclaimer to --disable-minification --- src/librustdoc/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 330b2507de89b..e9b2bff089fbd 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -656,7 +656,7 @@ fn opts() -> Vec { FlagMulti, "", "disable-minification", - "diable the minification of CSS/JS files", + "diable the minification of CSS/JS files (perma-unstable, do not use with cached files)", "", ), // deprecated / removed options From f5e23d5c7b36d5c24e0133e0c2218810688e691c Mon Sep 17 00:00:00 2001 From: binarycat Date: Tue, 14 Jan 2025 09:51:55 -0600 Subject: [PATCH 057/142] fix typo and unit test --- src/librustdoc/lib.rs | 2 +- tests/run-make/rustdoc-default-output/output-default.stdout | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index e9b2bff089fbd..ba620b6cb6bef 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -656,7 +656,7 @@ fn opts() -> Vec { FlagMulti, "", "disable-minification", - "diable the minification of CSS/JS files (perma-unstable, do not use with cached files)", + "disable the minification of CSS/JS files (perma-unstable, do not use with cached files)", "", ), // deprecated / removed options diff --git a/tests/run-make/rustdoc-default-output/output-default.stdout b/tests/run-make/rustdoc-default-output/output-default.stdout index adf7ee794fd28..c1b246e849ce4 100644 --- a/tests/run-make/rustdoc-default-output/output-default.stdout +++ b/tests/run-make/rustdoc-default-output/output-default.stdout @@ -194,7 +194,8 @@ Options: --doctest-compilation-args add arguments to be used when compiling doctests --disable-minification - diable the minification of CSS/JS files + disable the minification of CSS/JS files + (perma-unstable, do not use with cached files) --plugin-path DIR removed, see issue #44136 for From faafa5c3107344adc5e57dd6d6dc74e019bd7daf Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Tue, 14 Jan 2025 16:32:17 +0000 Subject: [PATCH 058/142] Fix legacy symbol mangling of closures --- compiler/rustc_symbol_mangling/src/legacy.rs | 8 ++++---- tests/codegen-units/item-collection/closures.rs | 6 ++++++ 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_symbol_mangling/src/legacy.rs b/compiler/rustc_symbol_mangling/src/legacy.rs index 58c0a05df1f2c..879f3fac21fb4 100644 --- a/compiler/rustc_symbol_mangling/src/legacy.rs +++ b/compiler/rustc_symbol_mangling/src/legacy.rs @@ -19,15 +19,15 @@ pub(super) fn mangle<'tcx>( let def_id = instance.def_id(); // We want to compute the "type" of this item. Unfortunately, some - // kinds of items (e.g., closures) don't have an entry in the - // item-type array. So walk back up the find the closest parent - // that DOES have an entry. + // kinds of items (e.g., synthetic static allocations from const eval) + // don't have a proper implementation for the `type_of` query. So walk + // back up the find the closest parent that DOES have a type. let mut ty_def_id = def_id; let instance_ty; loop { let key = tcx.def_key(ty_def_id); match key.disambiguated_data.data { - DefPathData::TypeNs(_) | DefPathData::ValueNs(_) => { + DefPathData::TypeNs(_) | DefPathData::ValueNs(_) | DefPathData::Closure => { instance_ty = tcx.type_of(ty_def_id).instantiate_identity(); debug!(?instance_ty); break; diff --git a/tests/codegen-units/item-collection/closures.rs b/tests/codegen-units/item-collection/closures.rs index 5fc8023092533..864f98817a874 100644 --- a/tests/codegen-units/item-collection/closures.rs +++ b/tests/codegen-units/item-collection/closures.rs @@ -10,3 +10,9 @@ pub async fn async_fn() {} pub fn closure() { let _ = || {}; } + +//~ MONO_ITEM fn A::{constant#0}::{closure#0} @@ +trait A where + [(); (|| {}, 1).1]: Sized, +{ +} From 98f673e93a47b33a451f6b2cd99e5d5b03b57563 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=AE=B8=E6=9D=B0=E5=8F=8B=20Jieyou=20Xu=20=28Joe=29?= <39484203+jieyouxu@users.noreply.github.com> Date: Tue, 14 Jan 2025 04:32:33 +0800 Subject: [PATCH 059/142] tests: port `extern-fn-reachable` to rmake.rs Co-authored-by: binarycat --- .../tidy/src/allowed_run_make_makefiles.txt | 1 - tests/run-make/extern-fn-reachable/Makefile | 26 ----------- tests/run-make/extern-fn-reachable/dylib.rs | 15 ++++++ tests/run-make/extern-fn-reachable/rmake.rs | 46 +++++++++++++++++++ 4 files changed, 61 insertions(+), 27 deletions(-) delete mode 100644 tests/run-make/extern-fn-reachable/Makefile create mode 100644 tests/run-make/extern-fn-reachable/rmake.rs diff --git a/src/tools/tidy/src/allowed_run_make_makefiles.txt b/src/tools/tidy/src/allowed_run_make_makefiles.txt index b20d8678d0eae..575716eb6dd9d 100644 --- a/src/tools/tidy/src/allowed_run_make_makefiles.txt +++ b/src/tools/tidy/src/allowed_run_make_makefiles.txt @@ -1,5 +1,4 @@ run-make/cat-and-grep-sanity-check/Makefile -run-make/extern-fn-reachable/Makefile run-make/jobserver-error/Makefile run-make/split-debuginfo/Makefile run-make/symbol-mangling-hashed/Makefile diff --git a/tests/run-make/extern-fn-reachable/Makefile b/tests/run-make/extern-fn-reachable/Makefile deleted file mode 100644 index 3297251bfd1aa..0000000000000 --- a/tests/run-make/extern-fn-reachable/Makefile +++ /dev/null @@ -1,26 +0,0 @@ -# ignore-cross-compile -include ../tools.mk - -# ignore-windows-msvc - -NM=nm -D - -ifeq ($(UNAME),Darwin) -NM=nm -gU -endif - -ifdef IS_WINDOWS -NM=nm -g -endif - -# This overrides the LD_LIBRARY_PATH for RUN -TARGET_RPATH_DIR:=$(TARGET_RPATH_DIR):$(TMPDIR) - -all: - $(RUSTC) dylib.rs -o $(TMPDIR)/libdylib.so -C prefer-dynamic - - [ "$$($(NM) $(TMPDIR)/libdylib.so | grep -v __imp_ | grep -c fun1)" -eq "1" ] - [ "$$($(NM) $(TMPDIR)/libdylib.so | grep -v __imp_ | grep -c fun2)" -eq "1" ] - [ "$$($(NM) $(TMPDIR)/libdylib.so | grep -v __imp_ | grep -c fun3)" -eq "1" ] - [ "$$($(NM) $(TMPDIR)/libdylib.so | grep -v __imp_ | grep -c fun4)" -eq "1" ] - [ "$$($(NM) $(TMPDIR)/libdylib.so | grep -v __imp_ | grep -c fun5)" -eq "1" ] diff --git a/tests/run-make/extern-fn-reachable/dylib.rs b/tests/run-make/extern-fn-reachable/dylib.rs index fe0c7023b27b8..42b8270b214a3 100644 --- a/tests/run-make/extern-fn-reachable/dylib.rs +++ b/tests/run-make/extern-fn-reachable/dylib.rs @@ -1,19 +1,34 @@ #![crate_type = "dylib"] #![allow(dead_code)] +// `pub` extern fn here is a Rust nameres visibility concept, and should not affect symbol +// visibility in the dylib. #[no_mangle] pub extern "C" fn fun1() {} + +// (Lack of) `pub` for the extern fn here is a Rust nameres visibility concept, and should not +// affect symbol visibility in the dylib. #[no_mangle] extern "C" fn fun2() {} +// Modules are a Rust nameres concept, and should not affect symbol visibility in the dylib if the +// extern fn is nested inside a module. mod foo { #[no_mangle] pub extern "C" fn fun3() {} } + +// Similarly, the Rust visibility of the containing module is a Rust nameres concept, and should not +// affect symbol visibility in the dylib. pub mod bar { #[no_mangle] pub extern "C" fn fun4() {} } +// Non-extern `#[no_mangle]` fn should induce a symbol visible in the dylib. #[no_mangle] pub fn fun5() {} + +// The Rust visibility of the fn should not affect is symbol visibility in the dylib. +#[no_mangle] +fn fun6() {} diff --git a/tests/run-make/extern-fn-reachable/rmake.rs b/tests/run-make/extern-fn-reachable/rmake.rs new file mode 100644 index 0000000000000..2fc992b14ebb3 --- /dev/null +++ b/tests/run-make/extern-fn-reachable/rmake.rs @@ -0,0 +1,46 @@ +//! Smoke test to check that that symbols of `extern "C"` functions and `#[no_mangle]` rust +//! functions: +//! +//! 1. Are externally visible in the dylib produced. +//! 2. That the symbol visibility is orthogonal to the Rust nameres visibility of the functions +//! involved. + +//@ ignore-cross-compile + +use std::collections::BTreeSet; + +use run_make_support::object::{self, Object}; +use run_make_support::{dynamic_lib_name, is_darwin, path, rfs, rustc}; + +fn main() { + let dylib = dynamic_lib_name("dylib"); + rustc().input("dylib.rs").output(&dylib).arg("-Cprefer-dynamic").run(); + + let expected_symbols = if is_darwin() { + // Mach-O states that all exported symbols should have an underscore as prefix. At the + // same time dlsym will implicitly add it, so outside of compilers, linkers and people + // writing assembly, nobody needs to be aware of this. + BTreeSet::from(["_fun1", "_fun2", "_fun3", "_fun4", "_fun5", "_fun6"]) + } else { + BTreeSet::from(["fun1", "fun2", "fun3", "fun4", "fun5", "fun6"]) + }; + + let mut found_symbols = BTreeSet::new(); + + let blob = rfs::read(path(dylib)); + let file = object::File::parse(&*blob).unwrap(); + for export in file.exports().unwrap() { + let sym_name = export.name(); + let sym_name = std::str::from_utf8(sym_name).unwrap(); + found_symbols.insert(sym_name); + } + + println!("expected_symbols = {:?}", expected_symbols); + println!("found_symbols = {:?}", found_symbols); + if !found_symbols.is_superset(&expected_symbols) { + for diff in expected_symbols.difference(&found_symbols) { + eprintln!("missing symbol: {}", diff); + } + panic!("missing expected symbols"); + } +} From c141f506bcf9eb3423f319ec7f047cf6a132af4b Mon Sep 17 00:00:00 2001 From: Yotam Ofek Date: Tue, 14 Jan 2025 17:24:34 +0000 Subject: [PATCH 060/142] =?UTF-8?q?Add=20missing=20closing=20backtick=20in?= =?UTF-8?q?=20commit=20hook=20message=20=F0=9F=90=B8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/bootstrap/src/core/build_steps/format.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bootstrap/src/core/build_steps/format.rs b/src/bootstrap/src/core/build_steps/format.rs index 40c908c3fa996..c7eafadee2df3 100644 --- a/src/bootstrap/src/core/build_steps/format.rs +++ b/src/bootstrap/src/core/build_steps/format.rs @@ -116,7 +116,7 @@ fn print_paths(verb: &str, adjective: Option<&str>, paths: &[String]) { println!("fmt: {verb} {len} {adjective}files"); } if len > 1000 && !CiEnv::is_ci() { - println!("hint: if this number seems too high, try running `git fetch origin master"); + println!("hint: if this number seems too high, try running `git fetch origin master`"); } } From bf545ce2fee9f1fef0fd9e63e46b5aab5de61465 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Tue, 14 Jan 2025 17:54:53 +0000 Subject: [PATCH 061/142] Prefer lower TraitUpcasting candidates --- .../src/solve/trait_goals.rs | 8 +++-- .../rustc_trait_selection/src/solve/select.rs | 4 +++ .../src/traits/select/confirmation.rs | 2 +- .../src/traits/select/mod.rs | 12 ++++++++ compiler/rustc_type_ir/src/solve/mod.rs | 5 ++-- .../prefer-lower-candidates.rs | 29 +++++++++++++++++++ 6 files changed, 54 insertions(+), 6 deletions(-) create mode 100644 tests/ui/traits/trait-upcasting/prefer-lower-candidates.rs diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs index d68fca608299a..f4d7c3ce76cf2 100644 --- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs @@ -741,12 +741,14 @@ where a_data.principal(), )); } else if let Some(a_principal) = a_data.principal() { - for new_a_principal in - elaborate::supertraits(self.cx(), a_principal.with_self_ty(cx, a_ty)).skip(1) + for (idx, new_a_principal) in + elaborate::supertraits(self.cx(), a_principal.with_self_ty(cx, a_ty)) + .enumerate() + .skip(1) { responses.extend(self.consider_builtin_upcast_to_principal( goal, - CandidateSource::BuiltinImpl(BuiltinImplSource::TraitUpcasting), + CandidateSource::BuiltinImpl(BuiltinImplSource::TraitUpcasting(idx)), a_data, a_region, b_data, diff --git a/compiler/rustc_trait_selection/src/solve/select.rs b/compiler/rustc_trait_selection/src/solve/select.rs index 1661852903c48..b0b6274907d0c 100644 --- a/compiler/rustc_trait_selection/src/solve/select.rs +++ b/compiler/rustc_trait_selection/src/solve/select.rs @@ -117,6 +117,10 @@ fn candidate_should_be_dropped_in_favor_of<'tcx>( CandidateSource::BuiltinImpl(BuiltinImplSource::Object(a)), CandidateSource::BuiltinImpl(BuiltinImplSource::Object(b)), ) => a >= b, + ( + CandidateSource::BuiltinImpl(BuiltinImplSource::TraitUpcasting(a)), + CandidateSource::BuiltinImpl(BuiltinImplSource::TraitUpcasting(b)), + ) => a >= b, // Prefer dyn candidates over non-dyn candidates. This is necessary to // handle the unsoundness between `impl Any for T` and `dyn Any: Any`. ( diff --git a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs index 3619d16cde248..0ccb0fc0615c0 100644 --- a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs +++ b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs @@ -1090,7 +1090,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { )? .expect("did not expect ambiguity during confirmation"); - Ok(ImplSource::Builtin(BuiltinImplSource::TraitUpcasting, nested)) + Ok(ImplSource::Builtin(BuiltinImplSource::TraitUpcasting(idx), nested)) } fn confirm_builtin_unsize_candidate( diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 9e7da5eb3689c..5581ea46882c0 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -1895,6 +1895,18 @@ impl<'tcx> SelectionContext<'_, 'tcx> { Some(None) => {} None => return None, } + // Same for upcasting. + let upcast_bound = candidates + .iter() + .filter_map(|c| { + if let TraitUpcastingUnsizeCandidate(i) = c.candidate { Some(i) } else { None } + }) + .try_reduce(|c1, c2| if has_non_region_infer { None } else { Some(c1.min(c2)) }); + match upcast_bound { + Some(Some(index)) => return Some(TraitUpcastingUnsizeCandidate(index)), + Some(None) => {} + None => return None, + } // Finally, handle overlapping user-written impls. let impls = candidates.iter().filter_map(|c| { diff --git a/compiler/rustc_type_ir/src/solve/mod.rs b/compiler/rustc_type_ir/src/solve/mod.rs index 1ae904d50e066..fbb5c7430eb2a 100644 --- a/compiler/rustc_type_ir/src/solve/mod.rs +++ b/compiler/rustc_type_ir/src/solve/mod.rs @@ -177,8 +177,9 @@ pub enum BuiltinImplSource { /// A built-in implementation of `Upcast` for trait objects to other trait objects. /// /// This can be removed when `feature(dyn_upcasting)` is stabilized, since we only - /// use it to detect when upcasting traits in hir typeck. - TraitUpcasting, + /// use it to detect when upcasting traits in hir typeck. The index is only used + /// for winnowing. + TraitUpcasting(usize), /// Unsizing a tuple like `(A, B, ..., X)` to `(A, B, ..., Y)` if `X` unsizes to `Y`. /// /// This can be removed when `feature(tuple_unsizing)` is stabilized, since we only diff --git a/tests/ui/traits/trait-upcasting/prefer-lower-candidates.rs b/tests/ui/traits/trait-upcasting/prefer-lower-candidates.rs new file mode 100644 index 0000000000000..4bc59b09fb5af --- /dev/null +++ b/tests/ui/traits/trait-upcasting/prefer-lower-candidates.rs @@ -0,0 +1,29 @@ +//@ revisions: current next +//@ ignore-compare-mode-next-solver (explicit revisions) +//@[next] compile-flags: -Znext-solver +//@ check-pass + +// Ensure we don't have ambiguity when upcasting to two supertraits +// that are identical modulo normalization. + +#![feature(trait_upcasting)] + +trait Supertrait { + fn method(&self) {} +} +impl Supertrait for () {} + +trait Identity { + type Selff; +} +impl Identity for Selff { + type Selff = Selff; +} +trait Trait

: Supertrait<()> + Supertrait<

::Selff> {} + +impl

Trait

for () {} + +fn main() { + let x: &dyn Trait<()> = &(); + let x: &dyn Supertrait<()> = x; +} From fcc34b2c44baa3e1b578e7a416f7b51c0c0d397e Mon Sep 17 00:00:00 2001 From: Trevor Gross Date: Tue, 7 Jan 2025 02:21:16 +0000 Subject: [PATCH 062/142] Update compiler-builtins to 0.1.141 0.1.141 syncs changes from `libm`. Most of the `libm` changes are testing- or configuration-related. --- ...029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch | 4 ++-- library/Cargo.lock | 4 ++-- library/alloc/Cargo.toml | 2 +- library/std/Cargo.toml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch index bf07e455a75f5..b2eeb52633604 100644 --- a/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch +++ b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch @@ -16,8 +16,8 @@ index 7165c3e48af..968552ad435 100644 [dependencies] core = { path = "../core" } --compiler_builtins = { version = "=0.1.140", features = ['rustc-dep-of-std'] } -+compiler_builtins = { version = "=0.1.140", features = ['rustc-dep-of-std', 'no-f16-f128'] } +-compiler_builtins = { version = "=0.1.141", features = ['rustc-dep-of-std'] } ++compiler_builtins = { version = "=0.1.141", features = ['rustc-dep-of-std', 'no-f16-f128'] } [dev-dependencies] rand = { version = "0.8.5", default-features = false, features = ["alloc"] } diff --git a/library/Cargo.lock b/library/Cargo.lock index 207c744ee2248..b01fec9b8f5c4 100644 --- a/library/Cargo.lock +++ b/library/Cargo.lock @@ -61,9 +61,9 @@ dependencies = [ [[package]] name = "compiler_builtins" -version = "0.1.140" +version = "0.1.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df14d41c5d172a886df3753d54238eefb0f61c96cbd8b363c33ccc92c457bee3" +checksum = "b5e7a0206befe4e574e37d6d7a0fe82e88fdf54bedb0608f239cb11b7a6aa6be" dependencies = [ "cc", "rustc-std-workspace-core", diff --git a/library/alloc/Cargo.toml b/library/alloc/Cargo.toml index 07596fa16f982..36f617b45261a 100644 --- a/library/alloc/Cargo.toml +++ b/library/alloc/Cargo.toml @@ -10,7 +10,7 @@ edition = "2021" [dependencies] core = { path = "../core" } -compiler_builtins = { version = "=0.1.140", features = ['rustc-dep-of-std'] } +compiler_builtins = { version = "=0.1.141", features = ['rustc-dep-of-std'] } [dev-dependencies] rand = { version = "0.8.5", default-features = false, features = ["alloc"] } diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml index e7f7f38cb4154..f34dc185eb195 100644 --- a/library/std/Cargo.toml +++ b/library/std/Cargo.toml @@ -17,7 +17,7 @@ cfg-if = { version = "1.0", features = ['rustc-dep-of-std'] } panic_unwind = { path = "../panic_unwind", optional = true } panic_abort = { path = "../panic_abort" } core = { path = "../core", public = true } -compiler_builtins = { version = "=0.1.140" } +compiler_builtins = { version = "=0.1.141" } unwind = { path = "../unwind" } hashbrown = { version = "0.15", default-features = false, features = [ 'rustc-dep-of-std', From 3d9dcf46362148e453822b5b54719918061aef40 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Sun, 12 Jan 2025 03:23:34 +0000 Subject: [PATCH 063/142] Un-spaghettify the control flow of generate_lint_output Co-authored-by: Eric Huss --- src/tools/lint-docs/src/lib.rs | 89 +++++++++++++++++++--------------- 1 file changed, 49 insertions(+), 40 deletions(-) diff --git a/src/tools/lint-docs/src/lib.rs b/src/tools/lint-docs/src/lib.rs index 5aae50aebc3d6..f6e844657807f 100644 --- a/src/tools/lint-docs/src/lib.rs +++ b/src/tools/lint-docs/src/lib.rs @@ -473,55 +473,64 @@ impl<'a> LintExtractor<'a> { .filter(|line| line.starts_with('{')) .map(serde_json::from_str) .collect::, _>>()?; + // First try to find the messages with the `code` field set to our lint. let matches: Vec<_> = msgs .iter() .filter(|msg| matches!(&msg["code"]["code"], serde_json::Value::String(s) if s==name)) .map(|msg| msg["rendered"].as_str().expect("rendered field should exist").to_string()) .collect(); - if matches.is_empty() { - // Try to detect if an unstable lint forgot to enable a `#![feature(..)]`. - if name != "test_unstable_lint" && msgs.iter().any(|msg| { + if !matches.is_empty() { + return Ok(matches.join("\n")); + } + + // Try to detect if an unstable lint forgot to enable a `#![feature(..)]`. + // Specifically exclude `test_unstable_lint` which exercises this on purpose. + if name != "test_unstable_lint" + && msgs.iter().any(|msg| { matches!(&msg["code"]["code"], serde_json::Value::String(s) if s=="unknown_lints") && matches!(&msg["message"], serde_json::Value::String(s) if s.contains(name)) - }) { - let rendered: Vec<&str> = - msgs.iter().filter_map(|msg| msg["rendered"].as_str()).collect(); - let non_json: Vec<&str> = - stderr.lines().filter(|line| !line.starts_with('{')).collect(); - Err(format!( - "lint `{}` is unstable and must be enabled in example. see:\n{}\n{}", - name, - rendered.join("\n"), - non_json.join("\n"), - ) - .into()) - } else { - // Some lints override their code to something else (E0566). - // Try to find something that looks like it could be our lint. - let matches: Vec<_> = msgs.iter().filter(|msg| - matches!(&msg["rendered"], serde_json::Value::String(s) if s.contains(name))) - .map(|msg| msg["rendered"].as_str().expect("rendered field should exist").to_string()) - .collect(); - if matches.is_empty() { - let rendered: Vec<&str> = - msgs.iter().filter_map(|msg| msg["rendered"].as_str()).collect(); - let non_json: Vec<&str> = - stderr.lines().filter(|line| !line.starts_with('{')).collect(); - Err(format!( - "did not find lint `{}` in output of example, got:\n{}\n{}", - name, - non_json.join("\n"), - rendered.join("\n") - ) - .into()) - } else { - Ok(matches.join("\n")) - } - } - } else { - Ok(matches.join("\n")) + }) + { + let rendered: Vec<&str> = + msgs.iter().filter_map(|msg| msg["rendered"].as_str()).collect(); + let non_json: Vec<&str> = + stderr.lines().filter(|line| !line.starts_with('{')).collect(); + return Err(format!( + "did not find lint `{}` in output of example (got unknown_lints)\n\ + Is the lint possibly misspelled, or does it need a `#![feature(...)]`?\n\ + Output was:\n\ + {}\n{}", + name, + rendered.join("\n"), + non_json.join("\n"), + ) + .into()); } + + // Some lints override their code to something else (E0566). + // Try to find something that looks like it could be our lint. + let matches: Vec<_> = msgs + .iter() + .filter( + |msg| matches!(&msg["rendered"], serde_json::Value::String(s) if s.contains(name)), + ) + .map(|msg| msg["rendered"].as_str().expect("rendered field should exist").to_string()) + .collect(); + if !matches.is_empty() { + return Ok(matches.join("\n")); + } + + // Otherwise, give a descriptive error. + let rendered: Vec<&str> = msgs.iter().filter_map(|msg| msg["rendered"].as_str()).collect(); + let non_json: Vec<&str> = stderr.lines().filter(|line| !line.starts_with('{')).collect(); + Err(format!( + "did not find lint `{}` in output of example, got:\n{}\n{}", + name, + non_json.join("\n"), + rendered.join("\n") + ) + .into()) } /// Saves the mdbook lint chapters at the given path. From 516a93353d63ab9079e5ea5709f338f14d165272 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Sat, 11 Jan 2025 21:48:42 +0000 Subject: [PATCH 064/142] Make sure we can produce ConstArgHasWrongType errors for valtree consts --- .../src/solve/fulfill.rs | 5 ++- ...ype-mismatch-in-nested-goal.current.stderr | 45 +++++++++++++++++++ .../type-mismatch-in-nested-goal.next.stderr | 45 +++++++++++++++++++ .../type-mismatch-in-nested-goal.rs | 17 +++++++ 4 files changed, 110 insertions(+), 2 deletions(-) create mode 100644 tests/ui/const-generics/type-mismatch-in-nested-goal.current.stderr create mode 100644 tests/ui/const-generics/type-mismatch-in-nested-goal.next.stderr create mode 100644 tests/ui/const-generics/type-mismatch-in-nested-goal.rs diff --git a/compiler/rustc_trait_selection/src/solve/fulfill.rs b/compiler/rustc_trait_selection/src/solve/fulfill.rs index 7db0f2bb5a7cc..4498beff4ea34 100644 --- a/compiler/rustc_trait_selection/src/solve/fulfill.rs +++ b/compiler/rustc_trait_selection/src/solve/fulfill.rs @@ -264,9 +264,10 @@ fn fulfillment_error_for_no_solution<'tcx>( infcx.tcx.type_of(uv.def).instantiate(infcx.tcx, uv.args) } ty::ConstKind::Param(param_ct) => param_ct.find_ty_from_env(obligation.param_env), - _ => span_bug!( + ty::ConstKind::Value(ty, _) => ty, + kind => span_bug!( obligation.cause.span, - "ConstArgHasWrongType failed but we don't know how to compute type" + "ConstArgHasWrongType failed but we don't know how to compute type for {kind:?}" ), }; FulfillmentErrorCode::Select(SelectionError::ConstArgHasWrongType { diff --git a/tests/ui/const-generics/type-mismatch-in-nested-goal.current.stderr b/tests/ui/const-generics/type-mismatch-in-nested-goal.current.stderr new file mode 100644 index 0000000000000..c6fb07926c802 --- /dev/null +++ b/tests/ui/const-generics/type-mismatch-in-nested-goal.current.stderr @@ -0,0 +1,45 @@ +error: the constant `N` is not of type `bool` + --> $DIR/type-mismatch-in-nested-goal.rs:9:50 + | +LL | fn needs_a(_: [u8; N]) where (): A {} + | ^^^^ expected `bool`, found `usize` + | +note: required by a const generic parameter in `A` + --> $DIR/type-mismatch-in-nested-goal.rs:5:9 + | +LL | trait A {} + | ^^^^^^^^^^^^^ required by this const generic parameter in `A` + +error: the constant `true` is not of type `usize` + --> $DIR/type-mismatch-in-nested-goal.rs:13:13 + | +LL | needs_a([]); + | ------- ^^ expected `usize`, found `bool` + | | + | required by a bound introduced by this call + | +note: required by a const generic parameter in `needs_a` + --> $DIR/type-mismatch-in-nested-goal.rs:9:12 + | +LL | fn needs_a(_: [u8; N]) where (): A {} + | ^^^^^^^^^^^^^^ required by this const generic parameter in `needs_a` + +error[E0308]: mismatched types + --> $DIR/type-mismatch-in-nested-goal.rs:13:13 + | +LL | needs_a([]); + | ------- ^^ expected an array with a size of true, found one with a size of 0 + | | + | arguments to this function are incorrect + | + = note: expected array `[u8; true]` + found array `[_; 0]` +note: function defined here + --> $DIR/type-mismatch-in-nested-goal.rs:9:4 + | +LL | fn needs_a(_: [u8; N]) where (): A {} + | ^^^^^^^ ---------- + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0308`. diff --git a/tests/ui/const-generics/type-mismatch-in-nested-goal.next.stderr b/tests/ui/const-generics/type-mismatch-in-nested-goal.next.stderr new file mode 100644 index 0000000000000..c6fb07926c802 --- /dev/null +++ b/tests/ui/const-generics/type-mismatch-in-nested-goal.next.stderr @@ -0,0 +1,45 @@ +error: the constant `N` is not of type `bool` + --> $DIR/type-mismatch-in-nested-goal.rs:9:50 + | +LL | fn needs_a(_: [u8; N]) where (): A {} + | ^^^^ expected `bool`, found `usize` + | +note: required by a const generic parameter in `A` + --> $DIR/type-mismatch-in-nested-goal.rs:5:9 + | +LL | trait A {} + | ^^^^^^^^^^^^^ required by this const generic parameter in `A` + +error: the constant `true` is not of type `usize` + --> $DIR/type-mismatch-in-nested-goal.rs:13:13 + | +LL | needs_a([]); + | ------- ^^ expected `usize`, found `bool` + | | + | required by a bound introduced by this call + | +note: required by a const generic parameter in `needs_a` + --> $DIR/type-mismatch-in-nested-goal.rs:9:12 + | +LL | fn needs_a(_: [u8; N]) where (): A {} + | ^^^^^^^^^^^^^^ required by this const generic parameter in `needs_a` + +error[E0308]: mismatched types + --> $DIR/type-mismatch-in-nested-goal.rs:13:13 + | +LL | needs_a([]); + | ------- ^^ expected an array with a size of true, found one with a size of 0 + | | + | arguments to this function are incorrect + | + = note: expected array `[u8; true]` + found array `[_; 0]` +note: function defined here + --> $DIR/type-mismatch-in-nested-goal.rs:9:4 + | +LL | fn needs_a(_: [u8; N]) where (): A {} + | ^^^^^^^ ---------- + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0308`. diff --git a/tests/ui/const-generics/type-mismatch-in-nested-goal.rs b/tests/ui/const-generics/type-mismatch-in-nested-goal.rs new file mode 100644 index 0000000000000..fd29019f89bc5 --- /dev/null +++ b/tests/ui/const-generics/type-mismatch-in-nested-goal.rs @@ -0,0 +1,17 @@ +//@ revisions: current next +//@[next] compile-flags: -Znext-solver +//@ ignore-compare-mode-next-solver (explicit revisions) + +trait A {} + +impl A for () {} + +fn needs_a(_: [u8; N]) where (): A {} +//~^ ERROR the constant `N` is not of type `bool` + +pub fn main() { + needs_a([]); + //~^ ERROR the constant `true` is not of type `usize` + //~| ERROR mismatched types + // FIXME(const_generics): we should hide this error as we've already errored above +} From 3cd75812c818c5e7855f1be8f6a754c9f7a9f0f8 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 8 Jan 2025 00:46:03 +0000 Subject: [PATCH 065/142] Normalize field before checking PhantomData in coerce/dispatch impl validation --- .../src/coherence/builtin.rs | 23 ++++++++++++++--- ...hantomdata-in-coerce-and-dispatch-impls.rs | 25 +++++++++++++++++++ 2 files changed, 45 insertions(+), 3 deletions(-) create mode 100644 tests/ui/self/phantomdata-in-coerce-and-dispatch-impls.rs diff --git a/compiler/rustc_hir_analysis/src/coherence/builtin.rs b/compiler/rustc_hir_analysis/src/coherence/builtin.rs index a661e588b952b..b43a808ccdc10 100644 --- a/compiler/rustc_hir_analysis/src/coherence/builtin.rs +++ b/compiler/rustc_hir_analysis/src/coherence/builtin.rs @@ -260,13 +260,22 @@ fn visit_implementation_of_dispatch_from_dyn(checker: &Checker<'_>) -> Result<() .iter() .filter(|field| { // Ignore PhantomData fields - if tcx.type_of(field.did).instantiate_identity().is_phantom_data() { + let unnormalized_ty = tcx.type_of(field.did).instantiate_identity(); + if tcx + .try_normalize_erasing_regions( + ty::TypingEnv::non_body_analysis(tcx, def_a.did()), + unnormalized_ty, + ) + .unwrap_or(unnormalized_ty) + .is_phantom_data() + { return false; } let ty_a = field.ty(tcx, args_a); let ty_b = field.ty(tcx, args_b); + // FIXME: We could do normalization here, but is it really worth it? if ty_a == ty_b { // Allow 1-ZSTs that don't mention type params. // @@ -469,8 +478,16 @@ pub(crate) fn coerce_unsized_info<'tcx>( .filter_map(|(i, f)| { let (a, b) = (f.ty(tcx, args_a), f.ty(tcx, args_b)); - if tcx.type_of(f.did).instantiate_identity().is_phantom_data() { - // Ignore PhantomData fields + // Ignore PhantomData fields + let unnormalized_ty = tcx.type_of(f.did).instantiate_identity(); + if tcx + .try_normalize_erasing_regions( + ty::TypingEnv::non_body_analysis(tcx, def_a.did()), + unnormalized_ty, + ) + .unwrap_or(unnormalized_ty) + .is_phantom_data() + { return None; } diff --git a/tests/ui/self/phantomdata-in-coerce-and-dispatch-impls.rs b/tests/ui/self/phantomdata-in-coerce-and-dispatch-impls.rs new file mode 100644 index 0000000000000..9c7e33830f5fb --- /dev/null +++ b/tests/ui/self/phantomdata-in-coerce-and-dispatch-impls.rs @@ -0,0 +1,25 @@ +//@ check-pass + +#![feature(coerce_unsized, dispatch_from_dyn, unsize)] + +use std::marker::Unsize; +use std::ops::{CoerceUnsized, DispatchFromDyn}; +use std::marker::PhantomData; + +trait Mirror { + type Assoc; +} +impl Mirror for T { + type Assoc = T; +} + +struct W { + t: &'static T, + f: as Mirror>::Assoc, +} + +impl CoerceUnsized> for W where T: Unsize {} + +impl DispatchFromDyn> for W where T: Unsize {} + +fn main() {} From 2669f2a7c730766600a079be5fe81b6bc1186c45 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Mon, 13 Jan 2025 02:03:41 +0000 Subject: [PATCH 066/142] Do not consider traits that have unsatisfied const conditions to be conditionally const --- .../src/check_consts/check.rs | 26 +++++++++++++------ tests/ui/issues/issue-25901.rs | 2 +- tests/ui/issues/issue-25901.stderr | 13 ++++++---- .../arbitrary-self-from-method-substs-ice.rs | 2 +- ...bitrary-self-from-method-substs-ice.stderr | 9 +++---- .../const-traits/cross-crate.stocknc.stderr | 8 +++--- 6 files changed, 34 insertions(+), 26 deletions(-) diff --git a/compiler/rustc_const_eval/src/check_consts/check.rs b/compiler/rustc_const_eval/src/check_consts/check.rs index 7a5f6c1726846..6c940124193c7 100644 --- a/compiler/rustc_const_eval/src/check_consts/check.rs +++ b/compiler/rustc_const_eval/src/check_consts/check.rs @@ -35,6 +35,12 @@ use crate::errors; type QualifResults<'mir, 'tcx, Q> = rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>; +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +enum ConstConditionsHold { + Yes, + No, +} + #[derive(Default)] pub(crate) struct Qualifs<'mir, 'tcx> { has_mut_interior: Option>, @@ -376,15 +382,15 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> { callee: DefId, callee_args: ty::GenericArgsRef<'tcx>, call_span: Span, - ) -> bool { + ) -> Option { let tcx = self.tcx; if !tcx.is_conditionally_const(callee) { - return false; + return None; } let const_conditions = tcx.const_conditions(callee).instantiate(tcx, callee_args); if const_conditions.is_empty() { - return false; + return None; } let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(self.body.typing_env(tcx)); @@ -413,12 +419,13 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> { })); let errors = ocx.select_all_or_error(); - if !errors.is_empty() { + if errors.is_empty() { + Some(ConstConditionsHold::Yes) + } else { tcx.dcx() .span_delayed_bug(call_span, "this should have reported a ~const error in HIR"); + Some(ConstConditionsHold::No) } - - true } pub fn check_drop_terminator( @@ -706,7 +713,10 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { trace!("attempting to call a trait method"); let trait_is_const = tcx.is_const_trait(trait_did); - if trait_is_const { + // Only consider a trait to be const if the const conditions hold. + // Otherwise, it's really misleading to call something "conditionally" + // const when it's very obviously not conditionally const. + if trait_is_const && has_const_conditions == Some(ConstConditionsHold::Yes) { // Trait calls are always conditionally-const. self.check_op(ops::ConditionallyConstCall { callee, @@ -730,7 +740,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { } // Even if we know the callee, ensure we can use conditionally-const calls. - if has_const_conditions { + if has_const_conditions.is_some() { self.check_op(ops::ConditionallyConstCall { callee, args: fn_args, diff --git a/tests/ui/issues/issue-25901.rs b/tests/ui/issues/issue-25901.rs index bfcee1ac503a2..0ca34da95f55d 100644 --- a/tests/ui/issues/issue-25901.rs +++ b/tests/ui/issues/issue-25901.rs @@ -2,7 +2,7 @@ struct A; struct B; static S: &'static B = &A; -//~^ ERROR cannot perform conditionally-const deref coercion +//~^ ERROR cannot perform non-const deref coercion use std::ops::Deref; diff --git a/tests/ui/issues/issue-25901.stderr b/tests/ui/issues/issue-25901.stderr index a954f38af8397..233b5bfee5075 100644 --- a/tests/ui/issues/issue-25901.stderr +++ b/tests/ui/issues/issue-25901.stderr @@ -1,4 +1,4 @@ -error[E0658]: cannot perform conditionally-const deref coercion on `A` in statics +error[E0015]: cannot perform non-const deref coercion on `A` in statics --> $DIR/issue-25901.rs:4:24 | LL | static S: &'static B = &A; @@ -10,11 +10,14 @@ note: deref defined here | LL | type Target = B; | ^^^^^^^^^^^ +note: impl defined here, but it is not `const` + --> $DIR/issue-25901.rs:9:1 + | +LL | impl Deref for A { + | ^^^^^^^^^^^^^^^^ = note: calls in statics are limited to constant functions, tuple structs and tuple variants - = note: see issue #67792 for more information - = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + = note: consider wrapping this expression in `std::sync::LazyLock::new(|| ...)` error: aborting due to 1 previous error -For more information about this error, try `rustc --explain E0658`. +For more information about this error, try `rustc --explain E0015`. diff --git a/tests/ui/self/arbitrary-self-from-method-substs-ice.rs b/tests/ui/self/arbitrary-self-from-method-substs-ice.rs index 2d6df816bb1ec..46e4afd8532e1 100644 --- a/tests/ui/self/arbitrary-self-from-method-substs-ice.rs +++ b/tests/ui/self/arbitrary-self-from-method-substs-ice.rs @@ -11,7 +11,7 @@ impl Foo { //~^ ERROR invalid generic `self` parameter type //~| ERROR destructor of `R` cannot be evaluated at compile-time self.0 - //~^ ERROR cannot perform conditionally-const deref coercion on `R` in constant functions + //~^ ERROR cannot perform non-const deref coercion on `R` in constant functions } } diff --git a/tests/ui/self/arbitrary-self-from-method-substs-ice.stderr b/tests/ui/self/arbitrary-self-from-method-substs-ice.stderr index e6319d5a2c9c4..f217370b024b5 100644 --- a/tests/ui/self/arbitrary-self-from-method-substs-ice.stderr +++ b/tests/ui/self/arbitrary-self-from-method-substs-ice.stderr @@ -1,4 +1,4 @@ -error[E0658]: cannot perform conditionally-const deref coercion on `R` in constant functions +error[E0015]: cannot perform non-const deref coercion on `R` in constant functions --> $DIR/arbitrary-self-from-method-substs-ice.rs:13:9 | LL | self.0 @@ -6,9 +6,6 @@ LL | self.0 | = note: attempting to deref into `Foo` = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants - = note: see issue #67792 for more information - = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date error[E0493]: destructor of `R` cannot be evaluated at compile-time --> $DIR/arbitrary-self-from-method-substs-ice.rs:10:43 @@ -30,5 +27,5 @@ LL | const fn get>(self: R) -> u32 { error: aborting due to 3 previous errors -Some errors have detailed explanations: E0493, E0658, E0801. -For more information about an error, try `rustc --explain E0493`. +Some errors have detailed explanations: E0015, E0493, E0801. +For more information about an error, try `rustc --explain E0015`. diff --git a/tests/ui/traits/const-traits/cross-crate.stocknc.stderr b/tests/ui/traits/const-traits/cross-crate.stocknc.stderr index 2358731c901f6..fb47bf9169fe2 100644 --- a/tests/ui/traits/const-traits/cross-crate.stocknc.stderr +++ b/tests/ui/traits/const-traits/cross-crate.stocknc.stderr @@ -1,13 +1,10 @@ -error[E0658]: cannot call conditionally-const method `::func` in constant functions +error[E0015]: cannot call non-const method `::func` in constant functions --> $DIR/cross-crate.rs:19:14 | LL | NonConst.func(); | ^^^^^^ | = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants - = note: see issue #67792 for more information - = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date error[E0658]: cannot call conditionally-const method `::func` in constant functions --> $DIR/cross-crate.rs:22:11 @@ -22,4 +19,5 @@ LL | Const.func(); error: aborting due to 2 previous errors -For more information about this error, try `rustc --explain E0658`. +Some errors have detailed explanations: E0015, E0658. +For more information about an error, try `rustc --explain E0015`. From b89a6e49329f0813af07fa3f8472fb0333a8bc36 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 8 Jan 2025 18:04:15 +0000 Subject: [PATCH 067/142] Consider more erroneous layouts as LayoutError::ReferencesError to suppress spurious errors --- compiler/rustc_ty_utils/src/layout.rs | 32 +++++++----- tests/ui/enum-discriminant/eval-error.rs | 37 ++++++++++++++ tests/ui/enum-discriminant/eval-error.stderr | 51 +++++++++++++++++++ .../layout/base-layout-is-sized-ice-123078.rs | 1 - .../base-layout-is-sized-ice-123078.stderr | 14 +---- tests/ui/layout/debug.stderr | 2 +- 6 files changed, 111 insertions(+), 26 deletions(-) create mode 100644 tests/ui/enum-discriminant/eval-error.rs create mode 100644 tests/ui/enum-discriminant/eval-error.stderr diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index ab606478c5136..17be0bd0ab9e1 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -105,21 +105,27 @@ fn map_error<'tcx>( // See `tests/ui/layout/trivial-bounds-sized.rs` for an example. assert!(field.layout.is_unsized(), "invalid layout error {err:#?}"); if !field.ty.is_sized(cx.tcx(), cx.typing_env) { - cx.tcx().dcx().delayed_bug(format!( + let guar = cx.tcx().dcx().delayed_bug(format!( "encountered unexpected unsized field in layout of {ty:?}: {field:#?}" )); + LayoutError::ReferencesError(guar) + } else { + LayoutError::Unknown(ty) } - LayoutError::Unknown(ty) } LayoutCalculatorError::EmptyUnion => { // This is always a compile error. - cx.tcx().dcx().delayed_bug(format!("computed layout of empty union: {ty:?}")); - LayoutError::Unknown(ty) + let guar = + cx.tcx().dcx().delayed_bug(format!("computed layout of empty union: {ty:?}")); + LayoutError::ReferencesError(guar) } LayoutCalculatorError::ReprConflict => { // packed enums are the only known trigger of this, but others might arise - cx.tcx().dcx().delayed_bug(format!("computed impossible repr (packed enum?): {ty:?}")); - LayoutError::Unknown(ty) + let guar = cx + .tcx() + .dcx() + .delayed_bug(format!("computed impossible repr (packed enum?): {ty:?}")); + LayoutError::ReferencesError(guar) } }; error(cx, err) @@ -432,8 +438,10 @@ fn layout_of_uncached<'tcx>( ty::Adt(def, args) if def.repr().simd() => { if !def.is_struct() { // Should have yielded E0517 by now. - tcx.dcx().delayed_bug("#[repr(simd)] was applied to an ADT that is not a struct"); - return Err(error(cx, LayoutError::Unknown(ty))); + let guar = tcx + .dcx() + .delayed_bug("#[repr(simd)] was applied to an ADT that is not a struct"); + return Err(error(cx, LayoutError::ReferencesError(guar))); } let fields = &def.non_enum_variant().fields; @@ -459,10 +467,10 @@ fn layout_of_uncached<'tcx>( // (should be caught by typeck) for fi in fields { if fi.ty(tcx, args) != f0_ty { - tcx.dcx().delayed_bug( + let guar = tcx.dcx().delayed_bug( "#[repr(simd)] was applied to an ADT with heterogeneous field type", ); - return Err(error(cx, LayoutError::Unknown(ty))); + return Err(error(cx, LayoutError::ReferencesError(guar))); } } @@ -567,11 +575,11 @@ fn layout_of_uncached<'tcx>( if def.is_union() { if def.repr().pack.is_some() && def.repr().align.is_some() { - tcx.dcx().span_delayed_bug( + let guar = tcx.dcx().span_delayed_bug( tcx.def_span(def.did()), "union cannot be packed and aligned", ); - return Err(error(cx, LayoutError::Unknown(ty))); + return Err(error(cx, LayoutError::ReferencesError(guar))); } return Ok(tcx.mk_layout( diff --git a/tests/ui/enum-discriminant/eval-error.rs b/tests/ui/enum-discriminant/eval-error.rs new file mode 100644 index 0000000000000..f2c3b58162765 --- /dev/null +++ b/tests/ui/enum-discriminant/eval-error.rs @@ -0,0 +1,37 @@ +union Foo { + a: str, + //~^ ERROR the size for values of type `str` cannot be known at compilation time + //~| ERROR field must implement `Copy` or be wrapped in `ManuallyDrop<...>` +} + +enum Bar { + Boo = { + let _: Option = None; + 0 + }, +} + +union Foo2 {} +//~^ ERROR unions cannot have zero fields + +enum Bar2 { + Boo = { + let _: Option = None; + 0 + }, +} + +#[repr(u8, packed)] +//~^ ERROR attribute should be applied to a struct or union +enum Foo3 { + A +} + +enum Bar3 { + Boo = { + let _: Option = None; + 0 + }, +} + +fn main() {} diff --git a/tests/ui/enum-discriminant/eval-error.stderr b/tests/ui/enum-discriminant/eval-error.stderr new file mode 100644 index 0000000000000..0f12308de3c1c --- /dev/null +++ b/tests/ui/enum-discriminant/eval-error.stderr @@ -0,0 +1,51 @@ +error: unions cannot have zero fields + --> $DIR/eval-error.rs:14:1 + | +LL | union Foo2 {} + | ^^^^^^^^^^^^^ + +error[E0517]: attribute should be applied to a struct or union + --> $DIR/eval-error.rs:24:12 + | +LL | #[repr(u8, packed)] + | ^^^^^^ +LL | +LL | / enum Foo3 { +LL | | A +LL | | } + | |_- not a struct or union + +error[E0277]: the size for values of type `str` cannot be known at compilation time + --> $DIR/eval-error.rs:2:8 + | +LL | a: str, + | ^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `str` + = note: no field of a union may have a dynamically sized type + = help: change the field's type to have a statically known size +help: borrowed types always have a statically known size + | +LL | a: &str, + | + +help: the `Box` type always has a statically known size and allocates its contents in the heap + | +LL | a: Box, + | ++++ + + +error[E0740]: field must implement `Copy` or be wrapped in `ManuallyDrop<...>` to be used in a union + --> $DIR/eval-error.rs:2:5 + | +LL | a: str, + | ^^^^^^ + | + = note: union fields must not have drop side-effects, which is currently enforced via either `Copy` or `ManuallyDrop<...>` +help: wrap the field type in `ManuallyDrop<...>` + | +LL | a: std::mem::ManuallyDrop, + | +++++++++++++++++++++++ + + +error: aborting due to 4 previous errors + +Some errors have detailed explanations: E0277, E0517, E0740. +For more information about an error, try `rustc --explain E0277`. diff --git a/tests/ui/layout/base-layout-is-sized-ice-123078.rs b/tests/ui/layout/base-layout-is-sized-ice-123078.rs index b1c33e1507551..15f11145f845e 100644 --- a/tests/ui/layout/base-layout-is-sized-ice-123078.rs +++ b/tests/ui/layout/base-layout-is-sized-ice-123078.rs @@ -8,7 +8,6 @@ struct S { } const C: S = unsafe { std::mem::transmute(()) }; -//~^ ERROR cannot transmute between types of different sizes, or dependently-sized types const _: [(); { C; 0 diff --git a/tests/ui/layout/base-layout-is-sized-ice-123078.stderr b/tests/ui/layout/base-layout-is-sized-ice-123078.stderr index 455bd2cbf8b6e..9181368533a46 100644 --- a/tests/ui/layout/base-layout-is-sized-ice-123078.stderr +++ b/tests/ui/layout/base-layout-is-sized-ice-123078.stderr @@ -16,16 +16,6 @@ help: the `Box` type always has a statically known size and allocates its conten LL | a: Box<[u8]>, | ++++ + -error[E0512]: cannot transmute between types of different sizes, or dependently-sized types - --> $DIR/base-layout-is-sized-ice-123078.rs:10:23 - | -LL | const C: S = unsafe { std::mem::transmute(()) }; - | ^^^^^^^^^^^^^^^^^^^ - | - = note: source type: `()` (0 bits) - = note: target type: `S` (size can vary because of [u8]) - -error: aborting due to 2 previous errors +error: aborting due to 1 previous error -Some errors have detailed explanations: E0277, E0512. -For more information about an error, try `rustc --explain E0277`. +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/layout/debug.stderr b/tests/ui/layout/debug.stderr index 1fc55511384f2..319c0de26a90e 100644 --- a/tests/ui/layout/debug.stderr +++ b/tests/ui/layout/debug.stderr @@ -590,7 +590,7 @@ LL | type Impossible = (str, str); = help: the trait `Sized` is not implemented for `str` = note: only the last element of a tuple may have a dynamically sized type -error: the type `EmptyUnion` has an unknown layout +error: the type has an unknown layout --> $DIR/debug.rs:83:1 | LL | union EmptyUnion {} From 2743df848b0970ad9d97b6ce8222d4fd64898b61 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Sun, 12 Jan 2025 20:52:30 +0000 Subject: [PATCH 068/142] Enforce syntactical stability of const traits in HIR --- compiler/rustc_middle/src/middle/stability.rs | 89 ++++++++++++++++++- compiler/rustc_passes/src/stability.rs | 36 +++++++- compiler/rustc_resolve/src/macros.rs | 1 + library/core/src/intrinsics/fallback.rs | 1 + library/core/src/marker.rs | 1 + library/core/src/ops/arith.rs | 1 + library/core/src/ops/deref.rs | 2 + library/core/src/ops/drop.rs | 1 + tests/ui/consts/promoted-const-drop.rs | 2 +- tests/ui/consts/promoted_const_call.rs | 16 ++-- tests/ui/consts/promoted_const_call.stderr | 37 ++++---- .../missing-const-stability.rs | 1 + .../missing-const-stability.stderr | 14 ++- .../const-traits/auxiliary/staged-api.rs | 1 + .../call-const-trait-method-pass.rs | 2 +- .../const-traits/const-and-non-const-impl.rs | 2 +- tests/ui/traits/const-traits/generic-bound.rs | 2 +- .../const-traits/syntactical-unstable.rs | 34 +++++++ .../const-traits/syntactical-unstable.stderr | 67 ++++++++++++++ .../trait-default-body-stability.rs | 1 + .../trait-default-body-stability.stderr | 4 +- 21 files changed, 278 insertions(+), 37 deletions(-) create mode 100644 tests/ui/traits/const-traits/syntactical-unstable.rs create mode 100644 tests/ui/traits/const-traits/syntactical-unstable.stderr diff --git a/compiler/rustc_middle/src/middle/stability.rs b/compiler/rustc_middle/src/middle/stability.rs index 84c3c2eb49e27..75ca289056e33 100644 --- a/compiler/rustc_middle/src/middle/stability.rs +++ b/compiler/rustc_middle/src/middle/stability.rs @@ -30,6 +30,14 @@ pub enum StabilityLevel { Stable, } +#[derive(Copy, Clone)] +pub enum UnstableKind { + /// Enforcing regular stability of an item + Regular, + /// Enforcing const stability of an item + Const(Span), +} + /// An entry in the `depr_map`. #[derive(Copy, Clone, HashStable, Debug, Encodable, Decodable)] pub struct DeprecationEntry { @@ -108,10 +116,16 @@ pub fn report_unstable( is_soft: bool, span: Span, soft_handler: impl FnOnce(&'static Lint, Span, String), + kind: UnstableKind, ) { + let qual = match kind { + UnstableKind::Regular => "", + UnstableKind::Const(_) => " const", + }; + let msg = match reason { - Some(r) => format!("use of unstable library feature `{feature}`: {r}"), - None => format!("use of unstable library feature `{feature}`"), + Some(r) => format!("use of unstable{qual} library feature `{feature}`: {r}"), + None => format!("use of unstable{qual} library feature `{feature}`"), }; if is_soft { @@ -121,6 +135,9 @@ pub fn report_unstable( if let Some((inner_types, msg, sugg, applicability)) = suggestion { err.span_suggestion(inner_types, msg, sugg, applicability); } + if let UnstableKind::Const(kw) = kind { + err.span_label(kw, "trait is not stable as const yet"); + } err.emit(); } } @@ -587,6 +604,7 @@ impl<'tcx> TyCtxt<'tcx> { is_soft, span, soft_handler, + UnstableKind::Regular, ), EvalResult::Unmarked => unmarked(span, def_id), } @@ -594,6 +612,73 @@ impl<'tcx> TyCtxt<'tcx> { is_allowed } + /// This function is analogous to `check_optional_stability` but with the logic in + /// `eval_stability_allow_unstable` inlined, and which operating on const stability + /// instead of regular stability. + /// + /// This enforces *syntactical* const stability of const traits. In other words, + /// it enforces the ability to name `~const`/`const` traits in trait bounds in various + /// syntax positions in HIR (including in the trait of an impl header). + pub fn check_const_stability(self, def_id: DefId, span: Span, const_kw_span: Span) { + let is_staged_api = self.lookup_stability(def_id.krate.as_def_id()).is_some(); + if !is_staged_api { + return; + } + + // Only the cross-crate scenario matters when checking unstable APIs + let cross_crate = !def_id.is_local(); + if !cross_crate { + return; + } + + let stability = self.lookup_const_stability(def_id); + debug!( + "stability: \ + inspecting def_id={:?} span={:?} of stability={:?}", + def_id, span, stability + ); + + match stability { + Some(ConstStability { + level: attr::StabilityLevel::Unstable { reason, issue, is_soft, implied_by, .. }, + feature, + .. + }) => { + assert!(!is_soft); + + if span.allows_unstable(feature) { + debug!("body stability: skipping span={:?} since it is internal", span); + return; + } + if self.features().enabled(feature) { + return; + } + + // If this item was previously part of a now-stabilized feature which is still + // enabled (i.e. the user hasn't removed the attribute for the stabilized feature + // yet) then allow use of this item. + if let Some(implied_by) = implied_by + && self.features().enabled(implied_by) + { + return; + } + + report_unstable( + self.sess, + feature, + reason.to_opt_reason(), + issue, + None, + false, + span, + |_, _, _| {}, + UnstableKind::Const(const_kw_span), + ); + } + Some(_) | None => {} + } + } + pub fn lookup_deprecation(self, id: DefId) -> Option { self.lookup_deprecation_entry(id).map(|depr| depr.attr) } diff --git a/compiler/rustc_passes/src/stability.rs b/compiler/rustc_passes/src/stability.rs index 30f9e698521f1..aabc33b015e5e 100644 --- a/compiler/rustc_passes/src/stability.rs +++ b/compiler/rustc_passes/src/stability.rs @@ -593,9 +593,11 @@ impl<'tcx> MissingStabilityAnnotations<'tcx> { } fn check_missing_const_stability(&self, def_id: LocalDefId, span: Span) { - let is_const = self.tcx.is_const_fn(def_id.to_def_id()); + let is_const = self.tcx.is_const_fn(def_id.to_def_id()) + || (self.tcx.def_kind(def_id.to_def_id()) == DefKind::Trait + && self.tcx.is_const_trait(def_id.to_def_id())); - // Reachable const fn must have a stability attribute. + // Reachable const fn/trait must have a stability attribute. if is_const && self.effective_visibilities.is_reachable(def_id) && self.tcx.lookup_const_stability(def_id).is_none() @@ -772,7 +774,13 @@ impl<'tcx> Visitor<'tcx> for Checker<'tcx> { // For implementations of traits, check the stability of each item // individually as it's possible to have a stable trait with unstable // items. - hir::ItemKind::Impl(hir::Impl { of_trait: Some(ref t), self_ty, items, .. }) => { + hir::ItemKind::Impl(hir::Impl { + of_trait: Some(ref t), + self_ty, + items, + constness, + .. + }) => { let features = self.tcx.features(); if features.staged_api() { let attrs = self.tcx.hir().attrs(item.hir_id()); @@ -814,6 +822,16 @@ impl<'tcx> Visitor<'tcx> for Checker<'tcx> { } } + match constness { + rustc_hir::Constness::Const => { + if let Some(def_id) = t.trait_def_id() { + // FIXME(const_trait_impl): Improve the span here. + self.tcx.check_const_stability(def_id, t.path.span, t.path.span); + } + } + rustc_hir::Constness::NotConst => {} + } + for impl_item_ref in *items { let impl_item = self.tcx.associated_item(impl_item_ref.id.owner_id); @@ -829,6 +847,18 @@ impl<'tcx> Visitor<'tcx> for Checker<'tcx> { intravisit::walk_item(self, item); } + fn visit_poly_trait_ref(&mut self, t: &'tcx hir::PolyTraitRef<'tcx>) { + match t.modifiers.constness { + hir::BoundConstness::Always(span) | hir::BoundConstness::Maybe(span) => { + if let Some(def_id) = t.trait_ref.trait_def_id() { + self.tcx.check_const_stability(def_id, t.trait_ref.path.span, span); + } + } + hir::BoundConstness::Never => {} + } + intravisit::walk_poly_trait_ref(self, t); + } + fn visit_path(&mut self, path: &hir::Path<'tcx>, id: hir::HirId) { if let Some(def_id) = path.res.opt_def_id() { let method_span = path.segments.last().map(|s| s.ident.span); diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index b24e343c58df8..25e35fead7e83 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -1031,6 +1031,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { is_soft, span, soft_handler, + stability::UnstableKind::Regular, ); } } diff --git a/library/core/src/intrinsics/fallback.rs b/library/core/src/intrinsics/fallback.rs index 1779126b180ea..70484e4d0f2a1 100644 --- a/library/core/src/intrinsics/fallback.rs +++ b/library/core/src/intrinsics/fallback.rs @@ -8,6 +8,7 @@ #![allow(missing_docs)] #[const_trait] +#[rustc_const_unstable(feature = "core_intrinsics_fallbacks", issue = "none")] pub trait CarryingMulAdd: Copy + 'static { type Unsigned: Copy + 'static; fn carrying_mul_add( diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs index 4af9b666e54fe..01af964a83e26 100644 --- a/library/core/src/marker.rs +++ b/library/core/src/marker.rs @@ -952,6 +952,7 @@ marker_impls! { /// This should be used for `~const` bounds, /// as non-const bounds will always hold for every type. #[unstable(feature = "const_destruct", issue = "133214")] +#[rustc_const_unstable(feature = "const_destruct", issue = "133214")] #[lang = "destruct"] #[rustc_on_unimplemented(message = "can't drop `{Self}`", append_const_msg)] #[rustc_deny_explicit_impl] diff --git a/library/core/src/ops/arith.rs b/library/core/src/ops/arith.rs index 810b906b8715e..fe7ff2d9ede6a 100644 --- a/library/core/src/ops/arith.rs +++ b/library/core/src/ops/arith.rs @@ -65,6 +65,7 @@ /// ``` #[lang = "add"] #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_const_unstable(feature = "const_ops", issue = "90080")] #[rustc_on_unimplemented( on(all(_Self = "{integer}", Rhs = "{float}"), message = "cannot add a float to an integer",), on(all(_Self = "{float}", Rhs = "{integer}"), message = "cannot add an integer to a float",), diff --git a/library/core/src/ops/deref.rs b/library/core/src/ops/deref.rs index ed0d30a0f5026..11490ea2bfcb4 100644 --- a/library/core/src/ops/deref.rs +++ b/library/core/src/ops/deref.rs @@ -134,6 +134,7 @@ #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "Deref"] #[const_trait] +#[rustc_const_unstable(feature = "const_deref", issue = "88955")] pub trait Deref { /// The resulting type after dereferencing. #[stable(feature = "rust1", since = "1.0.0")] @@ -263,6 +264,7 @@ impl const Deref for &mut T { #[doc(alias = "*")] #[stable(feature = "rust1", since = "1.0.0")] #[const_trait] +#[rustc_const_unstable(feature = "const_deref", issue = "88955")] pub trait DerefMut: ~const Deref { /// Mutably dereferences the value. #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/core/src/ops/drop.rs b/library/core/src/ops/drop.rs index 78b5252195f82..e024b7fb4d301 100644 --- a/library/core/src/ops/drop.rs +++ b/library/core/src/ops/drop.rs @@ -204,6 +204,7 @@ #[lang = "drop"] #[stable(feature = "rust1", since = "1.0.0")] #[const_trait] +#[rustc_const_unstable(feature = "const_destruct", issue = "133214")] pub trait Drop { /// Executes the destructor for this type. /// diff --git a/tests/ui/consts/promoted-const-drop.rs b/tests/ui/consts/promoted-const-drop.rs index e09c30ea7857b..1d1897e15d443 100644 --- a/tests/ui/consts/promoted-const-drop.rs +++ b/tests/ui/consts/promoted-const-drop.rs @@ -1,4 +1,4 @@ -#![feature(const_trait_impl)] +#![feature(const_trait_impl, const_destruct)] struct A(); diff --git a/tests/ui/consts/promoted_const_call.rs b/tests/ui/consts/promoted_const_call.rs index c3920ff7241bc..79cb2ea2a02a0 100644 --- a/tests/ui/consts/promoted_const_call.rs +++ b/tests/ui/consts/promoted_const_call.rs @@ -1,6 +1,4 @@ -//@ known-bug: #103507 - -#![feature(const_trait_impl)] +#![feature(const_trait_impl, const_destruct)] struct Panic; impl const Drop for Panic { fn drop(&mut self) { panic!(); } } @@ -8,15 +6,15 @@ impl const Drop for Panic { fn drop(&mut self) { panic!(); } } pub const fn id(x: T) -> T { x } pub const C: () = { let _: &'static _ = &id(&Panic); - //FIXME ~^ ERROR: temporary value dropped while borrowed - //FIXME ~| ERROR: temporary value dropped while borrowed + //~^ ERROR: temporary value dropped while borrowed + //~| ERROR: temporary value dropped while borrowed }; fn main() { let _: &'static _ = &id(&Panic); - //FIXME ~^ ERROR: temporary value dropped while borrowed - //FIXME ~| ERROR: temporary value dropped while borrowed + //~^ ERROR: temporary value dropped while borrowed + //~| ERROR: temporary value dropped while borrowed let _: &'static _ = &&(Panic, 0).1; - //FIXME~^ ERROR: temporary value dropped while borrowed - //FIXME~| ERROR: temporary value dropped while borrowed + //~^ ERROR: temporary value dropped while borrowed + //~| ERROR: temporary value dropped while borrowed } diff --git a/tests/ui/consts/promoted_const_call.stderr b/tests/ui/consts/promoted_const_call.stderr index 40c6d083b066f..7a9cdd687048d 100644 --- a/tests/ui/consts/promoted_const_call.stderr +++ b/tests/ui/consts/promoted_const_call.stderr @@ -1,17 +1,25 @@ -error[E0493]: destructor of `Panic` cannot be evaluated at compile-time - --> $DIR/promoted_const_call.rs:10:30 +error[E0716]: temporary value dropped while borrowed + --> $DIR/promoted_const_call.rs:8:26 | LL | let _: &'static _ = &id(&Panic); - | ^^^^^ - value is dropped here - | | - | the destructor for this type cannot be evaluated in constants + | ---------- ^^^^^^^^^^ creates a temporary value which is freed while still in use + | | + | type annotation requires that borrow lasts for `'static` +... +LL | }; + | - temporary value is freed at the end of this statement + +error[E0716]: temporary value dropped while borrowed + --> $DIR/promoted_const_call.rs:8:30 | - = note: see issue #133214 for more information - = help: add `#![feature(const_destruct)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date +LL | let _: &'static _ = &id(&Panic); + | ---------- ^^^^^ - temporary value is freed at the end of this statement + | | | + | | creates a temporary value which is freed while still in use + | type annotation requires that borrow lasts for `'static` error[E0716]: temporary value dropped while borrowed - --> $DIR/promoted_const_call.rs:16:26 + --> $DIR/promoted_const_call.rs:14:26 | LL | let _: &'static _ = &id(&Panic); | ---------- ^^^^^^^^^^ creates a temporary value which is freed while still in use @@ -22,7 +30,7 @@ LL | } | - temporary value is freed at the end of this statement error[E0716]: temporary value dropped while borrowed - --> $DIR/promoted_const_call.rs:16:30 + --> $DIR/promoted_const_call.rs:14:30 | LL | let _: &'static _ = &id(&Panic); | ---------- ^^^^^ - temporary value is freed at the end of this statement @@ -31,7 +39,7 @@ LL | let _: &'static _ = &id(&Panic); | type annotation requires that borrow lasts for `'static` error[E0716]: temporary value dropped while borrowed - --> $DIR/promoted_const_call.rs:19:26 + --> $DIR/promoted_const_call.rs:17:26 | LL | let _: &'static _ = &&(Panic, 0).1; | ---------- ^^^^^^^^^^^^^ creates a temporary value which is freed while still in use @@ -42,7 +50,7 @@ LL | } | - temporary value is freed at the end of this statement error[E0716]: temporary value dropped while borrowed - --> $DIR/promoted_const_call.rs:19:27 + --> $DIR/promoted_const_call.rs:17:27 | LL | let _: &'static _ = &&(Panic, 0).1; | ---------- ^^^^^^^^^^ creates a temporary value which is freed while still in use @@ -52,7 +60,6 @@ LL | let _: &'static _ = &&(Panic, 0).1; LL | } | - temporary value is freed at the end of this statement -error: aborting due to 5 previous errors +error: aborting due to 6 previous errors -Some errors have detailed explanations: E0493, E0716. -For more information about an error, try `rustc --explain E0493`. +For more information about this error, try `rustc --explain E0716`. diff --git a/tests/ui/stability-attribute/missing-const-stability.rs b/tests/ui/stability-attribute/missing-const-stability.rs index 1982073073621..c3e72e8394880 100644 --- a/tests/ui/stability-attribute/missing-const-stability.rs +++ b/tests/ui/stability-attribute/missing-const-stability.rs @@ -22,6 +22,7 @@ impl Foo { #[stable(feature = "stable", since = "1.0.0")] #[const_trait] pub trait Bar { +//~^ ERROR trait has missing const stability attribute #[stable(feature = "stable", since = "1.0.0")] fn fun(); } diff --git a/tests/ui/stability-attribute/missing-const-stability.stderr b/tests/ui/stability-attribute/missing-const-stability.stderr index baa4c34af0600..09461e6fb54d6 100644 --- a/tests/ui/stability-attribute/missing-const-stability.stderr +++ b/tests/ui/stability-attribute/missing-const-stability.stderr @@ -4,8 +4,18 @@ error: function has missing const stability attribute LL | pub const fn foo() {} | ^^^^^^^^^^^^^^^^^^^^^ +error: trait has missing const stability attribute + --> $DIR/missing-const-stability.rs:24:1 + | +LL | / pub trait Bar { +LL | | +LL | | #[stable(feature = "stable", since = "1.0.0")] +LL | | fn fun(); +LL | | } + | |_^ + error: function has missing const stability attribute - --> $DIR/missing-const-stability.rs:36:1 + --> $DIR/missing-const-stability.rs:37:1 | LL | pub const unsafe fn size_of_val(x: *const T) -> usize { 42 } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -16,5 +26,5 @@ error: associated function has missing const stability attribute LL | pub const fn foo() {} | ^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 3 previous errors +error: aborting due to 4 previous errors diff --git a/tests/ui/traits/const-traits/auxiliary/staged-api.rs b/tests/ui/traits/const-traits/auxiliary/staged-api.rs index abe22db702c65..933a25769dca2 100644 --- a/tests/ui/traits/const-traits/auxiliary/staged-api.rs +++ b/tests/ui/traits/const-traits/auxiliary/staged-api.rs @@ -4,6 +4,7 @@ #![stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_const_unstable(feature = "unstable", issue = "none")] #[const_trait] pub trait MyTrait { #[stable(feature = "rust1", since = "1.0.0")] diff --git a/tests/ui/traits/const-traits/call-const-trait-method-pass.rs b/tests/ui/traits/const-traits/call-const-trait-method-pass.rs index b854b422b3a99..3004647ede075 100644 --- a/tests/ui/traits/const-traits/call-const-trait-method-pass.rs +++ b/tests/ui/traits/const-traits/call-const-trait-method-pass.rs @@ -1,6 +1,6 @@ //@ known-bug: #110395 -#![feature(const_trait_impl)] +#![feature(const_trait_impl, const_ops)] struct Int(i32); diff --git a/tests/ui/traits/const-traits/const-and-non-const-impl.rs b/tests/ui/traits/const-traits/const-and-non-const-impl.rs index 6b96fcf0ae31d..85e2c5d3df62a 100644 --- a/tests/ui/traits/const-traits/const-and-non-const-impl.rs +++ b/tests/ui/traits/const-traits/const-and-non-const-impl.rs @@ -1,6 +1,6 @@ //@ known-bug: #110395 -#![feature(const_trait_impl)] +#![feature(const_trait_impl, const_ops)] pub struct Int(i32); diff --git a/tests/ui/traits/const-traits/generic-bound.rs b/tests/ui/traits/const-traits/generic-bound.rs index 5eb236acde22c..99de21471b20c 100644 --- a/tests/ui/traits/const-traits/generic-bound.rs +++ b/tests/ui/traits/const-traits/generic-bound.rs @@ -1,6 +1,6 @@ //@ check-pass -#![feature(const_trait_impl)] +#![feature(const_trait_impl, const_ops)] use std::marker::PhantomData; diff --git a/tests/ui/traits/const-traits/syntactical-unstable.rs b/tests/ui/traits/const-traits/syntactical-unstable.rs new file mode 100644 index 0000000000000..e192e80fabd9e --- /dev/null +++ b/tests/ui/traits/const-traits/syntactical-unstable.rs @@ -0,0 +1,34 @@ +//@ aux-build:staged-api.rs + +// Ensure that we enforce const stability of traits in `~const`/`const` bounds. + +#![feature(const_trait_impl)] + +use std::ops::Deref; + +extern crate staged_api; +use staged_api::MyTrait; + +#[const_trait] +trait Foo: ~const MyTrait { + //~^ ERROR use of unstable const library feature `unstable` + type Item: ~const MyTrait; + //~^ ERROR use of unstable const library feature `unstable` +} + +const fn where_clause() where T: ~const MyTrait {} +//~^ ERROR use of unstable const library feature `unstable` + +const fn nested() where T: Deref {} +//~^ ERROR use of unstable const library feature `unstable` + +const fn rpit() -> impl ~const MyTrait { Local } +//~^ ERROR use of unstable const library feature `unstable` + +struct Local; +impl const MyTrait for Local { +//~^ ERROR use of unstable const library feature `unstable` + fn func() {} +} + +fn main() {} diff --git a/tests/ui/traits/const-traits/syntactical-unstable.stderr b/tests/ui/traits/const-traits/syntactical-unstable.stderr new file mode 100644 index 0000000000000..a2ce2f2b6e9d2 --- /dev/null +++ b/tests/ui/traits/const-traits/syntactical-unstable.stderr @@ -0,0 +1,67 @@ +error[E0658]: use of unstable const library feature `unstable` + --> $DIR/syntactical-unstable.rs:13:19 + | +LL | trait Foo: ~const MyTrait { + | ------ ^^^^^^^ + | | + | trait is not stable as const yet + | + = help: add `#![feature(unstable)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: use of unstable const library feature `unstable` + --> $DIR/syntactical-unstable.rs:19:44 + | +LL | const fn where_clause() where T: ~const MyTrait {} + | ------ ^^^^^^^ + | | + | trait is not stable as const yet + | + = help: add `#![feature(unstable)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: use of unstable const library feature `unstable` + --> $DIR/syntactical-unstable.rs:22:52 + | +LL | const fn nested() where T: Deref {} + | ------ ^^^^^^^ + | | + | trait is not stable as const yet + | + = help: add `#![feature(unstable)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: use of unstable const library feature `unstable` + --> $DIR/syntactical-unstable.rs:25:32 + | +LL | const fn rpit() -> impl ~const MyTrait { Local } + | ------ ^^^^^^^ + | | + | trait is not stable as const yet + | + = help: add `#![feature(unstable)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: use of unstable const library feature `unstable` + --> $DIR/syntactical-unstable.rs:29:12 + | +LL | impl const MyTrait for Local { + | ^^^^^^^ trait is not stable as const yet + | + = help: add `#![feature(unstable)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: use of unstable const library feature `unstable` + --> $DIR/syntactical-unstable.rs:15:23 + | +LL | type Item: ~const MyTrait; + | ------ ^^^^^^^ + | | + | trait is not stable as const yet + | + = help: add `#![feature(unstable)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error: aborting due to 6 previous errors + +For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/traits/const-traits/trait-default-body-stability.rs b/tests/ui/traits/const-traits/trait-default-body-stability.rs index 5f7486eb176af..567f1b3c28424 100644 --- a/tests/ui/traits/const-traits/trait-default-body-stability.rs +++ b/tests/ui/traits/const-traits/trait-default-body-stability.rs @@ -38,6 +38,7 @@ impl const FromResidual for T { } #[stable(feature = "foo", since = "1.0")] +#[rustc_const_unstable(feature = "const_tr", issue = "none")] #[const_trait] pub trait Tr { #[stable(feature = "foo", since = "1.0")] diff --git a/tests/ui/traits/const-traits/trait-default-body-stability.stderr b/tests/ui/traits/const-traits/trait-default-body-stability.stderr index 77b81211e8152..a13d9a1e075b2 100644 --- a/tests/ui/traits/const-traits/trait-default-body-stability.stderr +++ b/tests/ui/traits/const-traits/trait-default-body-stability.stderr @@ -17,7 +17,7 @@ LL | impl const FromResidual for T { = note: adding a non-const method body in the future would be a breaking change error[E0015]: `?` is not allowed on `T` in constant functions - --> $DIR/trait-default-body-stability.rs:45:9 + --> $DIR/trait-default-body-stability.rs:46:9 | LL | T? | ^^ @@ -25,7 +25,7 @@ LL | T? = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants error[E0015]: `?` is not allowed on `T` in constant functions - --> $DIR/trait-default-body-stability.rs:45:9 + --> $DIR/trait-default-body-stability.rs:46:9 | LL | T? | ^^ From 5775190dbacd8334e6b146180c9268a33be15a25 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Mon, 30 Dec 2024 18:48:53 +0000 Subject: [PATCH 069/142] Make sure to scrape region constraints from deeply normalizing type outlives assumptions in borrowck --- .../src/type_check/free_region_relations.rs | 67 +++++++++++++------ .../known-type-outlives-has-constraints.rs | 13 ++++ 2 files changed, 61 insertions(+), 19 deletions(-) create mode 100644 tests/ui/traits/next-solver/known-type-outlives-has-constraints.rs diff --git a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs index ea965eb654583..edf612f4e97a4 100644 --- a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs +++ b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs @@ -5,13 +5,14 @@ use rustc_infer::infer::canonical::QueryRegionConstraints; use rustc_infer::infer::outlives::env::RegionBoundPairs; use rustc_infer::infer::region_constraints::GenericKind; use rustc_infer::infer::{InferCtxt, outlives}; +use rustc_infer::traits::ScrubbedTraitError; use rustc_middle::mir::ConstraintCategory; use rustc_middle::traits::ObligationCause; use rustc_middle::traits::query::OutlivesBound; use rustc_middle::ty::{self, RegionVid, Ty, TypeVisitableExt}; use rustc_span::{ErrorGuaranteed, Span}; -use rustc_trait_selection::error_reporting::InferCtxtErrorExt; -use rustc_trait_selection::solve::deeply_normalize; +use rustc_trait_selection::solve::NoSolution; +use rustc_trait_selection::traits::query::type_op::custom::CustomTypeOp; use rustc_trait_selection::traits::query::type_op::{self, TypeOp}; use tracing::{debug, instrument}; use type_op::TypeOpOutput; @@ -229,24 +230,14 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { let mut constraints = vec![]; let mut known_type_outlives_obligations = vec![]; for bound in param_env.caller_bounds() { - let Some(mut outlives) = bound.as_type_outlives_clause() else { continue }; - - // In the new solver, normalize the type-outlives obligation assumptions. - if self.infcx.next_trait_solver() { - match deeply_normalize( - self.infcx.at(&ObligationCause::misc(span, defining_ty_def_id), param_env), + if let Some(outlives) = bound.as_type_outlives_clause() { + self.normalize_and_push_type_outlives_obligation( outlives, - ) { - Ok(normalized_outlives) => { - outlives = normalized_outlives; - } - Err(e) => { - self.infcx.err_ctxt().report_fulfillment_errors(e); - } - } - } - - known_type_outlives_obligations.push(outlives); + span, + &mut known_type_outlives_obligations, + &mut constraints, + ); + }; } let unnormalized_input_output_tys = self @@ -356,6 +347,44 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { } } + fn normalize_and_push_type_outlives_obligation( + &self, + mut outlives: ty::PolyTypeOutlivesPredicate<'tcx>, + span: Span, + known_type_outlives_obligations: &mut Vec>, + constraints: &mut Vec<&QueryRegionConstraints<'tcx>>, + ) { + // In the new solver, normalize the type-outlives obligation assumptions. + if self.infcx.next_trait_solver() { + let Ok(TypeOpOutput { + output: normalized_outlives, + constraints: constraints_normalize, + error_info: _, + }) = CustomTypeOp::new( + |ocx| { + ocx.deeply_normalize( + &ObligationCause::dummy_with_span(span), + self.param_env, + outlives, + ) + .map_err(|_: Vec>| NoSolution) + }, + "normalize type outlives obligation", + ) + .fully_perform(self.infcx, span) + else { + self.infcx.dcx().delayed_bug(format!("could not normalize {outlives:?}")); + return; + }; + outlives = normalized_outlives; + if let Some(c) = constraints_normalize { + constraints.push(c); + } + } + + known_type_outlives_obligations.push(outlives); + } + /// Update the type of a single local, which should represent /// either the return type of the MIR or one of its arguments. At /// the same time, compute and add any implied bounds that come diff --git a/tests/ui/traits/next-solver/known-type-outlives-has-constraints.rs b/tests/ui/traits/next-solver/known-type-outlives-has-constraints.rs new file mode 100644 index 0000000000000..55fea005ea1ab --- /dev/null +++ b/tests/ui/traits/next-solver/known-type-outlives-has-constraints.rs @@ -0,0 +1,13 @@ +//@ compile-flags: -Znext-solver +//@ check-pass + +trait Norm { + type Out; +} +impl<'a, T: 'a> Norm for &'a T { + type Out = T; +} + +fn hello<'a, T: 'a>() where <&'a T as Norm>::Out: 'a {} + +fn main() {} From 6ad9c1dc5453159a06499ba06c6b42eeb9d8dd1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Tue, 14 Jan 2025 10:44:31 +0100 Subject: [PATCH 070/142] Run clippy for rustc_codegen_gcc --- src/bootstrap/src/core/build_steps/clippy.rs | 24 +++++++++++++++----- src/bootstrap/src/core/builder/mod.rs | 1 + 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/src/bootstrap/src/core/build_steps/clippy.rs b/src/bootstrap/src/core/build_steps/clippy.rs index c3375b69961bf..fe8c89f7a5394 100644 --- a/src/bootstrap/src/core/build_steps/clippy.rs +++ b/src/bootstrap/src/core/build_steps/clippy.rs @@ -334,6 +334,7 @@ lint_any!( CargoMiri, "src/tools/miri/cargo-miri", "cargo-miri"; Clippy, "src/tools/clippy", "clippy"; CollectLicenseMetadata, "src/tools/collect-license-metadata", "collect-license-metadata"; + CodegenGcc, "compiler/rustc_codegen_gcc", "rustc-codegen-gcc"; Compiletest, "src/tools/compiletest", "compiletest"; CoverageDump, "src/tools/coverage-dump", "coverage-dump"; Jsondocck, "src/tools/jsondocck", "jsondocck"; @@ -400,6 +401,12 @@ impl Step for CI { ], forbid: vec![], }; + builder.ensure(Std { + target: self.target, + config: self.config.merge(&library_clippy_cfg), + crates: vec![], + }); + let compiler_clippy_cfg = LintConfig { allow: vec!["clippy::all".into()], warn: vec![], @@ -419,16 +426,21 @@ impl Step for CI { ], forbid: vec![], }; - - builder.ensure(Std { - target: self.target, - config: self.config.merge(&library_clippy_cfg), - crates: vec![], - }); builder.ensure(Rustc { target: self.target, config: self.config.merge(&compiler_clippy_cfg), crates: vec![], }); + + let rustc_codegen_gcc = LintConfig { + allow: vec![], + warn: vec![], + deny: vec!["warnings".into()], + forbid: vec![], + }; + builder.ensure(CodegenGcc { + target: self.target, + config: self.config.merge(&rustc_codegen_gcc), + }); } } diff --git a/src/bootstrap/src/core/builder/mod.rs b/src/bootstrap/src/core/builder/mod.rs index 17de176242732..b293ac4f35154 100644 --- a/src/bootstrap/src/core/builder/mod.rs +++ b/src/bootstrap/src/core/builder/mod.rs @@ -900,6 +900,7 @@ impl<'a> Builder<'a> { clippy::BuildManifest, clippy::CargoMiri, clippy::Clippy, + clippy::CodegenGcc, clippy::CollectLicenseMetadata, clippy::Compiletest, clippy::CoverageDump, From 4f6902d1ef8c4fc1ef1453e426df435faebef78e Mon Sep 17 00:00:00 2001 From: binarycat Date: Tue, 14 Jan 2025 12:25:25 -0600 Subject: [PATCH 071/142] fix underlining of hovered intra-doc links. fixes https://github.com/rust-lang/rust/issues/133484 --- src/librustdoc/html/static/css/rustdoc.css | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index 27496381b2ce0..a1ab258ff304a 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -942,6 +942,8 @@ rustdoc-toolbar { pre, .rustdoc.src .example-wrap, .example-wrap .src-line-numbers { background-color: var(--code-block-background-color); border-radius: var(--code-block-border-radius); + /* code blocks within links (such as in most intra-doc links) should be underlined */ + text-decoration: inherit; } #main-content { From 62d5562bc6ff3063f41e4712304484ab63399225 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Tue, 14 Jan 2025 23:32:17 +0100 Subject: [PATCH 072/142] Fix clippy lints --- src/librustdoc/clean/cfg.rs | 6 +++--- src/librustdoc/clean/utils.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs index c59dce185f4c1..bfa789b1f3912 100644 --- a/src/librustdoc/clean/cfg.rs +++ b/src/librustdoc/clean/cfg.rs @@ -90,11 +90,11 @@ impl Cfg { }, MetaItemKind::List(ref items) => { let orig_len = items.len(); - let sub_cfgs = + let mut sub_cfgs = items.iter().filter_map(|i| Cfg::parse_nested(i, exclude).transpose()); let ret = match name { - sym::all => sub_cfgs.fold(Ok(Cfg::True), |x, y| Ok(x? & y?)), - sym::any => sub_cfgs.fold(Ok(Cfg::False), |x, y| Ok(x? | y?)), + sym::all => sub_cfgs.try_fold(Cfg::True, |x, y| Ok(x & y?)), + sym::any => sub_cfgs.try_fold(Cfg::False, |x, y| Ok(x | y?)), sym::not => { if orig_len == 1 { let mut sub_cfgs = sub_cfgs.collect::>(); diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs index 99e88f878fba6..80dc6b7250cce 100644 --- a/src/librustdoc/clean/utils.rs +++ b/src/librustdoc/clean/utils.rs @@ -320,7 +320,7 @@ pub(crate) fn name_from_pat(p: &hir::Pat<'_>) -> Symbol { ); return Symbol::intern("()"); } - PatKind::Guard(p, _) => return name_from_pat(&*p), + PatKind::Guard(p, _) => return name_from_pat(p), PatKind::Range(..) => return kw::Underscore, PatKind::Slice(begin, ref mid, end) => { let begin = begin.iter().map(|p| name_from_pat(p).to_string()); From a911da13ddbfbe8cc9c7dec367cba715b6fb7f87 Mon Sep 17 00:00:00 2001 From: Trevor Gross Date: Sun, 15 Dec 2024 11:03:37 +0000 Subject: [PATCH 073/142] Use a C-safe return type for `__rust_[ui]128_*` overflowing intrinsics Combined with [1], this will change the overflowing multiplication operations to return an `extern "C"`-safe type. Link: https://github.com/rust-lang/compiler-builtins/pull/735 [1] --- .../src/codegen_i128.rs | 18 +-- .../src/compiler_builtins.rs | 2 +- compiler/rustc_codegen_gcc/src/int.rs | 123 +++++++++--------- .../rustc_codegen_gcc/src/intrinsic/mod.rs | 6 +- 4 files changed, 75 insertions(+), 74 deletions(-) diff --git a/compiler/rustc_codegen_cranelift/src/codegen_i128.rs b/compiler/rustc_codegen_cranelift/src/codegen_i128.rs index 025667e66b2a2..dcfd7ddabbc42 100644 --- a/compiler/rustc_codegen_cranelift/src/codegen_i128.rs +++ b/compiler/rustc_codegen_cranelift/src/codegen_i128.rs @@ -76,20 +76,22 @@ pub(crate) fn maybe_codegen_mul_checked<'tcx>( } let is_signed = type_sign(lhs.layout().ty); - - let out_ty = Ty::new_tup(fx.tcx, &[lhs.layout().ty, fx.tcx.types.bool]); - let out_place = CPlace::new_stack_slot(fx, fx.layout_of(out_ty)); + let oflow_out_place = CPlace::new_stack_slot(fx, fx.layout_of(fx.tcx.types.i32)); let param_types = vec![ - AbiParam::special(fx.pointer_type, ArgumentPurpose::StructReturn), AbiParam::new(types::I128), AbiParam::new(types::I128), + AbiParam::special(fx.pointer_type, ArgumentPurpose::Normal), ]; - let args = [out_place.to_ptr().get_addr(fx), lhs.load_scalar(fx), rhs.load_scalar(fx)]; - fx.lib_call( + let args = [lhs.load_scalar(fx), rhs.load_scalar(fx), oflow_out_place.to_ptr().get_addr(fx)]; + let ret = fx.lib_call( if is_signed { "__rust_i128_mulo" } else { "__rust_u128_mulo" }, param_types, - vec![], + vec![AbiParam::new(types::I128)], &args, ); - Some(out_place.to_cvalue(fx)) + let mul = ret[0]; + let oflow = oflow_out_place.to_cvalue(fx).load_scalar(fx); + let oflow = clif_intcast(fx, oflow, types::I8, false); + let layout = fx.layout_of(Ty::new_tup(fx.tcx, &[lhs.layout().ty, fx.tcx.types.bool])); + Some(CValue::by_val_pair(mul, oflow, layout)) } diff --git a/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs b/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs index f8e3a034421da..2484c10848ed3 100644 --- a/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs +++ b/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs @@ -43,7 +43,7 @@ builtin_functions! { fn __divti3(n: i128, d: i128) -> i128; fn __umodti3(n: u128, d: u128) -> u128; fn __modti3(n: i128, d: i128) -> i128; - fn __rust_u128_mulo(a: u128, b: u128) -> (u128, bool); + fn __rust_u128_mulo(a: u128, b: u128, oflow: &mut i32) -> u128; // floats fn __floattisf(i: i128) -> f32; diff --git a/compiler/rustc_codegen_gcc/src/int.rs b/compiler/rustc_codegen_gcc/src/int.rs index 7b9d1feb8d710..fe6a65bed03b0 100644 --- a/compiler/rustc_codegen_gcc/src/int.rs +++ b/compiler/rustc_codegen_gcc/src/int.rs @@ -322,36 +322,26 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { }, } } else { - match new_kind { - Int(I128) | Uint(U128) => { - let func_name = match oop { - OverflowOp::Add => match new_kind { - Int(I128) => "__rust_i128_addo", - Uint(U128) => "__rust_u128_addo", - _ => unreachable!(), - }, - OverflowOp::Sub => match new_kind { - Int(I128) => "__rust_i128_subo", - Uint(U128) => "__rust_u128_subo", - _ => unreachable!(), - }, - OverflowOp::Mul => match new_kind { - Int(I128) => "__rust_i128_mulo", // TODO(antoyo): use __muloti4d instead? - Uint(U128) => "__rust_u128_mulo", - _ => unreachable!(), - }, - }; - return self.operation_with_overflow(func_name, lhs, rhs); - } - _ => match oop { - OverflowOp::Mul => match new_kind { - Int(I32) => "__mulosi4", - Int(I64) => "__mulodi4", - _ => unreachable!(), - }, - _ => unimplemented!("overflow operation for {:?}", new_kind), + let (func_name, width) = match oop { + OverflowOp::Add => match new_kind { + Int(I128) => ("__rust_i128_addo", 128), + Uint(U128) => ("__rust_u128_addo", 128), + _ => unreachable!(), }, - } + OverflowOp::Sub => match new_kind { + Int(I128) => ("__rust_i128_subo", 128), + Uint(U128) => ("__rust_u128_subo", 128), + _ => unreachable!(), + }, + OverflowOp::Mul => match new_kind { + Int(I32) => ("__mulosi4", 32), + Int(I64) => ("__mulodi4", 64), + Int(I128) => ("__rust_i128_mulo", 128), // TODO(antoyo): use __muloti4d instead? + Uint(U128) => ("__rust_u128_mulo", 128), + _ => unreachable!(), + }, + }; + return self.operation_with_overflow(func_name, lhs, rhs, width); }; let intrinsic = self.context.get_builtin_function(name); @@ -364,80 +354,87 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { (res.dereference(self.location).to_rvalue(), overflow) } + /// Non-`__builtin_*` overflow operations with a `fn(T, T, &mut i32) -> T` signature. pub fn operation_with_overflow( &self, func_name: &str, lhs: RValue<'gcc>, rhs: RValue<'gcc>, + width: u64, ) -> (RValue<'gcc>, RValue<'gcc>) { let a_type = lhs.get_type(); let b_type = rhs.get_type(); debug_assert!(a_type.dyncast_array().is_some()); debug_assert!(b_type.dyncast_array().is_some()); + let overflow_type = self.i32_type; + let overflow_param_type = overflow_type.make_pointer(); + let res_type = a_type; + + let overflow_value = + self.current_func().new_local(self.location, overflow_type, "overflow"); + let overflow_addr = overflow_value.get_address(self.location); + let param_a = self.context.new_parameter(self.location, a_type, "a"); let param_b = self.context.new_parameter(self.location, b_type, "b"); - let result_field = self.context.new_field(self.location, a_type, "result"); - let overflow_field = self.context.new_field(self.location, self.bool_type, "overflow"); - - let ret_ty = Ty::new_tup(self.tcx, &[self.tcx.types.i128, self.tcx.types.bool]); + let param_overflow = + self.context.new_parameter(self.location, overflow_param_type, "overflow"); + + let a_elem_type = a_type.dyncast_array().expect("non-array a value"); + debug_assert!(a_elem_type.is_integral()); + let res_ty = match width { + 32 => self.tcx.types.i32, + 64 => self.tcx.types.i64, + 128 => self.tcx.types.i128, + _ => unreachable!("unexpected integer size"), + }; let layout = self .tcx - .layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(ret_ty)) + .layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(res_ty)) .unwrap(); let arg_abi = ArgAbi { layout, mode: PassMode::Direct(ArgAttributes::new()) }; let mut fn_abi = FnAbi { - args: vec![arg_abi.clone(), arg_abi.clone()].into_boxed_slice(), + args: vec![arg_abi.clone(), arg_abi.clone(), arg_abi.clone()].into_boxed_slice(), ret: arg_abi, c_variadic: false, - fixed_count: 2, + fixed_count: 3, conv: Conv::C, can_unwind: false, }; fn_abi.adjust_for_foreign_abi(self.cx, spec::abi::Abi::C { unwind: false }).unwrap(); - let indirect = matches!(fn_abi.ret.mode, PassMode::Indirect { .. }); - - let return_type = self - .context - .new_struct_type(self.location, "result_overflow", &[result_field, overflow_field]); - let result = if indirect { - let return_value = - self.current_func().new_local(self.location, return_type.as_type(), "return_value"); - let return_param_type = return_type.as_type().make_pointer(); - let return_param = - self.context.new_parameter(self.location, return_param_type, "return_value"); + let ret_indirect = matches!(fn_abi.ret.mode, PassMode::Indirect { .. }); + + let result = if ret_indirect { + let res_value = self.current_func().new_local(self.location, res_type, "result_value"); + let res_addr = res_value.get_address(self.location); + let res_param_type = res_type.make_pointer(); + let param_res = self.context.new_parameter(self.location, res_param_type, "result"); + let func = self.context.new_function( self.location, FunctionType::Extern, self.type_void(), - &[return_param, param_a, param_b], + &[param_res, param_a, param_b, param_overflow], func_name, false, ); - self.llbb().add_eval( - self.location, - self.context.new_call(self.location, func, &[ - return_value.get_address(self.location), - lhs, - rhs, - ]), - ); - return_value.to_rvalue() + let _void = + self.context.new_call(self.location, func, &[res_addr, lhs, rhs, overflow_addr]); + res_value.to_rvalue() } else { let func = self.context.new_function( self.location, FunctionType::Extern, - return_type.as_type(), - &[param_a, param_b], + res_type, + &[param_a, param_b, param_overflow], func_name, false, ); - self.context.new_call(self.location, func, &[lhs, rhs]) + self.context.new_call(self.location, func, &[lhs, rhs, overflow_addr]) }; - let overflow = result.access_field(self.location, overflow_field); - let int_result = result.access_field(self.location, result_field); - (int_result, overflow) + + (result, self.context.new_cast(self.location, overflow_value, self.bool_type).to_rvalue()) } pub fn gcc_icmp( diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs index 42d189d1b7d38..48606f5f91c0f 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs @@ -1001,7 +1001,8 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { 128 => "__rust_i128_addo", _ => unreachable!(), }; - let (int_result, overflow) = self.operation_with_overflow(func_name, lhs, rhs); + let (int_result, overflow) = + self.operation_with_overflow(func_name, lhs, rhs, width); self.llbb().add_assignment(self.location, res, int_result); overflow }; @@ -1071,7 +1072,8 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { 128 => "__rust_i128_subo", _ => unreachable!(), }; - let (int_result, overflow) = self.operation_with_overflow(func_name, lhs, rhs); + let (int_result, overflow) = + self.operation_with_overflow(func_name, lhs, rhs, width); self.llbb().add_assignment(self.location, res, int_result); overflow }; From f6a2db8e1b6ee82ebf44b82dfc83740051c59d02 Mon Sep 17 00:00:00 2001 From: Trevor Gross Date: Wed, 15 Jan 2025 03:58:37 +0000 Subject: [PATCH 074/142] Update compiler-builtins to 0.1.143 0.1.142 fixes an issue parsing optimization flags, and 0.1.143 changes `__rust_[ui]128_*` builtins to use a C-safe signature. --- ...029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch | 4 ++-- library/Cargo.lock | 4 ++-- library/alloc/Cargo.toml | 2 +- library/std/Cargo.toml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch index b2eeb52633604..bf58e48515855 100644 --- a/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch +++ b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-Disable-f16-and-f128-in-compiler-builtins.patch @@ -16,8 +16,8 @@ index 7165c3e48af..968552ad435 100644 [dependencies] core = { path = "../core" } --compiler_builtins = { version = "=0.1.141", features = ['rustc-dep-of-std'] } -+compiler_builtins = { version = "=0.1.141", features = ['rustc-dep-of-std', 'no-f16-f128'] } +-compiler_builtins = { version = "=0.1.143", features = ['rustc-dep-of-std'] } ++compiler_builtins = { version = "=0.1.143", features = ['rustc-dep-of-std', 'no-f16-f128'] } [dev-dependencies] rand = { version = "0.8.5", default-features = false, features = ["alloc"] } diff --git a/library/Cargo.lock b/library/Cargo.lock index b01fec9b8f5c4..c8007dd9be046 100644 --- a/library/Cargo.lock +++ b/library/Cargo.lock @@ -61,9 +61,9 @@ dependencies = [ [[package]] name = "compiler_builtins" -version = "0.1.141" +version = "0.1.143" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e7a0206befe4e574e37d6d7a0fe82e88fdf54bedb0608f239cb11b7a6aa6be" +checksum = "c85ba2077e3eab3dd81be4ece6b7fb2ad0887c1fb813e9a45400baf75c6c7c29" dependencies = [ "cc", "rustc-std-workspace-core", diff --git a/library/alloc/Cargo.toml b/library/alloc/Cargo.toml index 36f617b45261a..96caac890a35c 100644 --- a/library/alloc/Cargo.toml +++ b/library/alloc/Cargo.toml @@ -10,7 +10,7 @@ edition = "2021" [dependencies] core = { path = "../core" } -compiler_builtins = { version = "=0.1.141", features = ['rustc-dep-of-std'] } +compiler_builtins = { version = "=0.1.143", features = ['rustc-dep-of-std'] } [dev-dependencies] rand = { version = "0.8.5", default-features = false, features = ["alloc"] } diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml index f34dc185eb195..da58d7c13bd12 100644 --- a/library/std/Cargo.toml +++ b/library/std/Cargo.toml @@ -17,7 +17,7 @@ cfg-if = { version = "1.0", features = ['rustc-dep-of-std'] } panic_unwind = { path = "../panic_unwind", optional = true } panic_abort = { path = "../panic_abort" } core = { path = "../core", public = true } -compiler_builtins = { version = "=0.1.141" } +compiler_builtins = { version = "=0.1.143" } unwind = { path = "../unwind" } hashbrown = { version = "0.15", default-features = false, features = [ 'rustc-dep-of-std', From c89ee081dd33eead1b4d0c3d287f96f3a664553b Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 15 Jan 2025 04:15:48 +0000 Subject: [PATCH 075/142] Make sure we actually use the right trivial lifetime substs when eagerly monomorphizing drop for structs --- compiler/rustc_monomorphize/src/collector.rs | 22 +++++++++++++++---- compiler/rustc_type_ir/src/binder.rs | 2 +- .../item-collection/drop-glue-eager.rs | 13 +++++++++++ 3 files changed, 32 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs index 0f3943cfe6a1a..bb603df112942 100644 --- a/compiler/rustc_monomorphize/src/collector.rs +++ b/compiler/rustc_monomorphize/src/collector.rs @@ -1410,19 +1410,33 @@ impl<'v> RootCollector<'_, 'v> { && !self.tcx.generics_of(id.owner_id).requires_monomorphization(self.tcx) { debug!("RootCollector: ADT drop-glue for `{id:?}`",); + let id_args = + ty::GenericArgs::for_item(self.tcx, id.owner_id.to_def_id(), |param, _| { + match param.kind { + GenericParamDefKind::Lifetime => { + self.tcx.lifetimes.re_erased.into() + } + GenericParamDefKind::Type { .. } + | GenericParamDefKind::Const { .. } => { + unreachable!( + "`own_requires_monomorphization` check means that \ + we should have no type/const params" + ) + } + } + }); // This type is impossible to instantiate, so we should not try to // generate a `drop_in_place` instance for it. if self.tcx.instantiate_and_check_impossible_predicates(( id.owner_id.to_def_id(), - ty::List::empty(), + id_args, )) { return; } - let ty = self.tcx.erase_regions( - self.tcx.type_of(id.owner_id.to_def_id()).instantiate_identity(), - ); + let ty = + self.tcx.type_of(id.owner_id.to_def_id()).instantiate(self.tcx, id_args); assert!(!ty.has_non_region_param()); visit_drop_use(self.tcx, ty, true, DUMMY_SP, self.output); } diff --git a/compiler/rustc_type_ir/src/binder.rs b/compiler/rustc_type_ir/src/binder.rs index cb59bc608c203..0d0092ea1aa08 100644 --- a/compiler/rustc_type_ir/src/binder.rs +++ b/compiler/rustc_type_ir/src/binder.rs @@ -804,7 +804,7 @@ impl<'a, I: Interner> ArgFolder<'a, I> { #[inline(never)] fn region_param_out_of_range(&self, ebr: I::EarlyParamRegion, r: I::Region) -> ! { panic!( - "const parameter `{:?}` ({:?}/{}) out of range when instantiating args={:?}", + "region parameter `{:?}` ({:?}/{}) out of range when instantiating args={:?}", ebr, r, ebr.index(), diff --git a/tests/codegen-units/item-collection/drop-glue-eager.rs b/tests/codegen-units/item-collection/drop-glue-eager.rs index 77470767ee1a3..c81074de49040 100644 --- a/tests/codegen-units/item-collection/drop-glue-eager.rs +++ b/tests/codegen-units/item-collection/drop-glue-eager.rs @@ -41,3 +41,16 @@ impl<'a> Drop for StructWithDropAndLt<'a> { struct StructWithDropAndLt<'a> { x: &'a i32, } + +// Make sure we don't ICE when checking impossible predicates for the struct. +// Regression test for . +//~ MONO_ITEM fn std::ptr::drop_in_place::> - shim(Some(StructWithLtAndPredicate<'_>)) +struct StructWithLtAndPredicate<'a: 'a> { + x: &'a i32, +} + +// We should be able to monomorphize drops for struct with lifetimes. +impl<'a> Drop for StructWithLtAndPredicate<'a> { + //~ MONO_ITEM fn as std::ops::Drop>::drop + fn drop(&mut self) {} +} From 58d6301cad7ac20407e708cd9d1ec499d1804015 Mon Sep 17 00:00:00 2001 From: Zachary S Date: Mon, 23 Dec 2024 00:33:10 -0600 Subject: [PATCH 076/142] Update ReadDir::next in std::sys::pal::unix::fs to use `&raw const (*ptr).field` instead of `ptr.offset(...).cast()`. Also, the macro is only called three times, and all with the same local variable entry_ptr, so just use the local variable directly, and rename the macro to entry_field_ptr. --- library/std/src/sys/pal/unix/fs.rs | 36 +++++++++++------------------- 1 file changed, 13 insertions(+), 23 deletions(-) diff --git a/library/std/src/sys/pal/unix/fs.rs b/library/std/src/sys/pal/unix/fs.rs index 54fa31620aced..fdf011c19482a 100644 --- a/library/std/src/sys/pal/unix/fs.rs +++ b/library/std/src/sys/pal/unix/fs.rs @@ -709,7 +709,7 @@ impl Iterator for ReadDir { // thread safety for readdir() as long an individual DIR* is not accessed // concurrently, which is sufficient for Rust. super::os::set_errno(0); - let entry_ptr = readdir64(self.inner.dirp.0); + let entry_ptr: *const dirent64 = readdir64(self.inner.dirp.0); if entry_ptr.is_null() { // We either encountered an error, or reached the end. Either way, // the next call to next() should return None. @@ -735,29 +735,19 @@ impl Iterator for ReadDir { // contents were "simply" partially initialized data. // // Like for uninitialized contents, converting entry_ptr to `&dirent64` - // would not be legal. However, unique to dirent64 is that we don't even - // get to use `&raw const (*entry_ptr).d_name` because that operation - // requires the full extent of *entry_ptr to be in bounds of the same - // allocation, which is not necessarily the case here. - // - // Instead we must access fields individually through their offsets. - macro_rules! offset_ptr { - ($entry_ptr:expr, $field:ident) => {{ - const OFFSET: isize = mem::offset_of!(dirent64, $field) as isize; - if true { - // Cast to the same type determined by the else branch. - $entry_ptr.byte_offset(OFFSET).cast::<_>() - } else { - #[allow(deref_nullptr)] - { - &raw const (*ptr::null::()).$field - } - } - }}; + // would not be legal. However, we can use `&raw const (*entry_ptr).d_name` + // to refer the fields individually, because that operation is equivalent + // to `byte_offset` and thus does not require the full extent of `*entry_ptr` + // to be in bounds of the same allocation, only the offset of the field + // being referenced. + macro_rules! entry_field_ptr { + ($field:ident) => { + &raw const (*entry_ptr).$field + }; } // d_name is guaranteed to be null-terminated. - let name = CStr::from_ptr(offset_ptr!(entry_ptr, d_name).cast()); + let name = CStr::from_ptr(entry_field_ptr!(d_name).cast()); let name_bytes = name.to_bytes(); if name_bytes == b"." || name_bytes == b".." { continue; @@ -765,14 +755,14 @@ impl Iterator for ReadDir { #[cfg(not(target_os = "vita"))] let entry = dirent64_min { - d_ino: *offset_ptr!(entry_ptr, d_ino) as u64, + d_ino: *entry_field_ptr!(d_ino) as u64, #[cfg(not(any( target_os = "solaris", target_os = "illumos", target_os = "aix", target_os = "nto", )))] - d_type: *offset_ptr!(entry_ptr, d_type) as u8, + d_type: *entry_field_ptr!(d_type) as u8, }; #[cfg(target_os = "vita")] From 620feadb38165877836400d6a6451437e46ab42f Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 15 Jan 2025 09:21:51 +0100 Subject: [PATCH 077/142] remove some known-bug that do not seem to make sense --- tests/ui/consts/const-block-const-bound.rs | 4 +--- tests/ui/consts/const-block-const-bound.stderr | 4 ++-- tests/ui/consts/issue-94675.rs | 6 +----- tests/ui/consts/issue-94675.stderr | 2 +- 4 files changed, 5 insertions(+), 11 deletions(-) diff --git a/tests/ui/consts/const-block-const-bound.rs b/tests/ui/consts/const-block-const-bound.rs index 596aac09b3114..b4b89a93e759f 100644 --- a/tests/ui/consts/const-block-const-bound.rs +++ b/tests/ui/consts/const-block-const-bound.rs @@ -1,5 +1,3 @@ -//@ known-bug: #103507 - #![allow(unused)] #![feature(const_trait_impl, negative_impls, const_destruct)] @@ -16,6 +14,6 @@ impl Drop for UnconstDrop { fn main() { const { f(UnconstDrop); - //FIXME ~^ ERROR can't drop + //~^ ERROR trait bound `UnconstDrop: const Destruct` is not satisfied } } diff --git a/tests/ui/consts/const-block-const-bound.stderr b/tests/ui/consts/const-block-const-bound.stderr index 0931eff2175bc..14c62fb4d2573 100644 --- a/tests/ui/consts/const-block-const-bound.stderr +++ b/tests/ui/consts/const-block-const-bound.stderr @@ -1,5 +1,5 @@ error[E0277]: the trait bound `UnconstDrop: const Destruct` is not satisfied - --> $DIR/const-block-const-bound.rs:18:11 + --> $DIR/const-block-const-bound.rs:16:11 | LL | f(UnconstDrop); | - ^^^^^^^^^^^ @@ -7,7 +7,7 @@ LL | f(UnconstDrop); | required by a bound introduced by this call | note: required by a bound in `f` - --> $DIR/const-block-const-bound.rs:8:15 + --> $DIR/const-block-const-bound.rs:6:15 | LL | const fn f(x: T) {} | ^^^^^^ required by this bound in `f` diff --git a/tests/ui/consts/issue-94675.rs b/tests/ui/consts/issue-94675.rs index 2e30eebb07b2e..e1c6861c5103b 100644 --- a/tests/ui/consts/issue-94675.rs +++ b/tests/ui/consts/issue-94675.rs @@ -1,5 +1,3 @@ -//@ known-bug: #103507 - #![feature(const_trait_impl, const_vec_string_slice)] struct Foo<'a> { @@ -9,9 +7,7 @@ struct Foo<'a> { impl<'a> Foo<'a> { const fn spam(&mut self, baz: &mut Vec) { self.bar[0] = baz.len(); - //FIXME ~^ ERROR: cannot call - //FIXME ~| ERROR: cannot call - //FIXME ~| ERROR: the trait bound + //~^ ERROR: cannot call } } diff --git a/tests/ui/consts/issue-94675.stderr b/tests/ui/consts/issue-94675.stderr index 8cad13724f20e..63a86b456332f 100644 --- a/tests/ui/consts/issue-94675.stderr +++ b/tests/ui/consts/issue-94675.stderr @@ -1,5 +1,5 @@ error[E0015]: cannot call non-const operator in constant functions - --> $DIR/issue-94675.rs:11:17 + --> $DIR/issue-94675.rs:9:17 | LL | self.bar[0] = baz.len(); | ^^^ From 67a07e01cf8db40d30cea7489d78413bb3b79b00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Wed, 15 Jan 2025 08:06:03 +0000 Subject: [PATCH 078/142] add test for issue 135514 --- ...apping-impls-in-new-solver-issue-135514.rs | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 tests/incremental/overlapping-impls-in-new-solver-issue-135514.rs diff --git a/tests/incremental/overlapping-impls-in-new-solver-issue-135514.rs b/tests/incremental/overlapping-impls-in-new-solver-issue-135514.rs new file mode 100644 index 0000000000000..8fcc913fa3719 --- /dev/null +++ b/tests/incremental/overlapping-impls-in-new-solver-issue-135514.rs @@ -0,0 +1,40 @@ +// Regression test for #135514 where the new solver didn't properly record deps for incremental +// compilation, similarly to `track-deps-in-new-solver.rs`. +// +// In this specially crafted example, @steffahn was able to trigger unsoundness with an overlapping +// impl that was accepted during the incremental rebuild. + +//@ revisions: cpass1 cfail2 +//@ compile-flags: -Znext-solver + +pub trait Trait {} + +pub struct S0(T); + +pub struct S(T); +impl Trait for S where S0: Trait {} + +pub struct W; + +pub trait Other { + type Choose; +} + +// first impl +impl Other for T { + type Choose = L; +} + +// second impl +impl Other for S { + //[cfail2]~^ ERROR conflicting implementations of trait + type Choose = R; +} + +#[cfg(cpass1)] +impl Trait for W {} + +#[cfg(cfail2)] +impl Trait for S {} + +fn main() {} From 0922c191d5ce113fed345eeb21e8f5e4130efb2d Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 15 Jan 2025 09:32:30 +0100 Subject: [PATCH 079/142] fix known-bug link in normalize-tait-in-const --- tests/ui/impl-trait/normalize-tait-in-const.rs | 3 ++- tests/ui/impl-trait/normalize-tait-in-const.stderr | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/ui/impl-trait/normalize-tait-in-const.rs b/tests/ui/impl-trait/normalize-tait-in-const.rs index 1fd543b72e7cc..a735ef766737b 100644 --- a/tests/ui/impl-trait/normalize-tait-in-const.rs +++ b/tests/ui/impl-trait/normalize-tait-in-const.rs @@ -1,4 +1,5 @@ -//@ known-bug: #103507 +//! This is a regression test for . +//@ known-bug: #110395 #![feature(type_alias_impl_trait)] #![feature(const_trait_impl, const_destruct)] diff --git a/tests/ui/impl-trait/normalize-tait-in-const.stderr b/tests/ui/impl-trait/normalize-tait-in-const.stderr index f4e8a872cec12..c6cd1b139c5b0 100644 --- a/tests/ui/impl-trait/normalize-tait-in-const.stderr +++ b/tests/ui/impl-trait/normalize-tait-in-const.stderr @@ -1,5 +1,5 @@ error: `~const` can only be applied to `#[const_trait]` traits - --> $DIR/normalize-tait-in-const.rs:26:35 + --> $DIR/normalize-tait-in-const.rs:27:35 | LL | const fn with_positive ~const Fn(&'a Alias<'a>) + ~const Destruct>(fun: F) { | ^^^^^^ can't be applied to `Fn` @@ -8,7 +8,7 @@ note: `Fn` can't be used with `~const` because it isn't annotated with `#[const_ --> $SRC_DIR/core/src/ops/function.rs:LL:COL error: `~const` can only be applied to `#[const_trait]` traits - --> $DIR/normalize-tait-in-const.rs:26:35 + --> $DIR/normalize-tait-in-const.rs:27:35 | LL | const fn with_positive ~const Fn(&'a Alias<'a>) + ~const Destruct>(fun: F) { | ^^^^^^ can't be applied to `Fn` @@ -18,7 +18,7 @@ note: `Fn` can't be used with `~const` because it isn't annotated with `#[const_ = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error[E0015]: cannot call non-const closure in constant functions - --> $DIR/normalize-tait-in-const.rs:27:5 + --> $DIR/normalize-tait-in-const.rs:28:5 | LL | fun(filter_positive()); | ^^^^^^^^^^^^^^^^^^^^^^ From 561a097b653c53d264fb60b417ba07c19ee14f4a Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 1 Jan 2025 18:45:41 +0100 Subject: [PATCH 080/142] late_report_deprecation: move fast-path closer to the core logic --- compiler/rustc_middle/src/middle/stability.rs | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_middle/src/middle/stability.rs b/compiler/rustc_middle/src/middle/stability.rs index 84c3c2eb49e27..999908b9bd80e 100644 --- a/compiler/rustc_middle/src/middle/stability.rs +++ b/compiler/rustc_middle/src/middle/stability.rs @@ -232,9 +232,18 @@ fn late_report_deprecation( return; } + let is_in_effect = depr.is_in_effect(); + let lint = deprecation_lint(is_in_effect); + + // Calculating message for lint involves calling `self.def_path_str`, + // which will by default invoke the expensive `visible_parent_map` query. + // Skip all that work if the lint is allowed anyway. + if tcx.lint_level_at_node(lint, hir_id).0 == Level::Allow { + return; + } + let def_path = with_no_trimmed_paths!(tcx.def_path_str(def_id)); let def_kind = tcx.def_descr(def_id); - let is_in_effect = depr.is_in_effect(); let method_span = method_span.unwrap_or(span); let suggestion = @@ -250,7 +259,7 @@ fn late_report_deprecation( note: depr.note, since_kind: deprecated_since_kind(is_in_effect, depr.since), }; - tcx.emit_node_span_lint(deprecation_lint(is_in_effect), hir_id, method_span, diag); + tcx.emit_node_span_lint(lint, hir_id, method_span, diag); } /// Result of `TyCtxt::eval_stability`. @@ -360,13 +369,7 @@ impl<'tcx> TyCtxt<'tcx> { // hierarchy. let depr_attr = &depr_entry.attr; if !skip || depr_attr.is_since_rustc_version() { - // Calculating message for lint involves calling `self.def_path_str`. - // Which by default to calculate visible path will invoke expensive `visible_parent_map` query. - // So we skip message calculation altogether, if lint is allowed. - let lint = deprecation_lint(depr_attr.is_in_effect()); - if self.lint_level_at_node(lint, id).0 != Level::Allow { - late_report_deprecation(self, depr_attr, span, method_span, id, def_id); - } + late_report_deprecation(self, depr_attr, span, method_span, id, def_id); } }; } From cf0ab86251f1ab3e11132332695eee73ff746e27 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 1 Jan 2025 19:09:01 +0100 Subject: [PATCH 081/142] allowed_through_unstable_modules: support showing a deprecation message when the unstable module name is used --- compiler/rustc_ast/src/attr/mod.rs | 2 + .../src/stability.rs | 15 +- .../src/attributes/stability.rs | 21 +-- compiler/rustc_feature/src/builtin_attrs.rs | 2 +- compiler/rustc_passes/src/stability.rs | 136 +++++++++++++----- src/librustdoc/clean/types.rs | 2 +- src/librustdoc/formats/cache.rs | 2 +- src/librustdoc/passes/propagate_stability.rs | 6 +- .../clippy_lints/src/std_instead_of_core.rs | 2 +- .../allowed-through-unstable.rs | 1 + .../allowed-through-unstable.stderr | 12 +- .../allowed-through-unstable-core.rs | 4 + 12 files changed, 146 insertions(+), 59 deletions(-) diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 97385b2eaab89..51f1858001321 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -723,6 +723,8 @@ impl MetaItemLit { pub trait AttributeExt: Debug { fn id(&self) -> AttrId; + /// For a single-segment attribute (i.e., `#[attr]` and not `#[path::atrr]`), + /// return the name of the attribute, else return the empty identifier. fn name_or_empty(&self) -> Symbol { self.ident().unwrap_or_else(Ident::empty).name } diff --git a/compiler/rustc_attr_data_structures/src/stability.rs b/compiler/rustc_attr_data_structures/src/stability.rs index 3c77d4c766c57..dfda04387ec5a 100644 --- a/compiler/rustc_attr_data_structures/src/stability.rs +++ b/compiler/rustc_attr_data_structures/src/stability.rs @@ -101,6 +101,16 @@ impl PartialConstStability { } } +#[derive(Encodable, Decodable, PartialEq, Copy, Clone, Debug, Eq, Hash)] +#[derive(HashStable_Generic)] +pub enum AllowedThroughUnstableModules { + /// This does not get a deprecation warning. We still generally would prefer people to use the + /// fully stable path, and a warning will likely be emitted in the future. + WithoutDeprecation, + /// Emit the given deprecation warning. + WithDeprecation(Symbol), +} + /// The available stability levels. #[derive(Encodable, Decodable, PartialEq, Copy, Clone, Debug, Eq, Hash)] #[derive(HashStable_Generic)] @@ -137,9 +147,8 @@ pub enum StabilityLevel { Stable { /// Rust release which stabilized this feature. since: StableSince, - /// Is this item allowed to be referred to on stable, despite being contained in unstable - /// modules? - allowed_through_unstable_modules: bool, + /// This is `Some` if this item allowed to be referred to on stable via unstable modules. + allowed_through_unstable_modules: Option, }, } diff --git a/compiler/rustc_attr_parsing/src/attributes/stability.rs b/compiler/rustc_attr_parsing/src/attributes/stability.rs index 89937e1c593d1..bfbe51b27d88d 100644 --- a/compiler/rustc_attr_parsing/src/attributes/stability.rs +++ b/compiler/rustc_attr_parsing/src/attributes/stability.rs @@ -6,8 +6,8 @@ use rustc_ast::MetaItem; use rustc_ast::attr::AttributeExt; use rustc_ast_pretty::pprust; use rustc_attr_data_structures::{ - ConstStability, DefaultBodyStability, Stability, StabilityLevel, StableSince, UnstableReason, - VERSION_PLACEHOLDER, + AllowedThroughUnstableModules, ConstStability, DefaultBodyStability, Stability, StabilityLevel, + StableSince, UnstableReason, VERSION_PLACEHOLDER, }; use rustc_errors::ErrorGuaranteed; use rustc_session::Session; @@ -24,11 +24,16 @@ pub fn find_stability( item_sp: Span, ) -> Option<(Stability, Span)> { let mut stab: Option<(Stability, Span)> = None; - let mut allowed_through_unstable_modules = false; + let mut allowed_through_unstable_modules = None; for attr in attrs { match attr.name_or_empty() { - sym::rustc_allowed_through_unstable_modules => allowed_through_unstable_modules = true, + sym::rustc_allowed_through_unstable_modules => { + allowed_through_unstable_modules = Some(match attr.value_str() { + Some(msg) => AllowedThroughUnstableModules::WithDeprecation(msg), + None => AllowedThroughUnstableModules::WithoutDeprecation, + }) + } sym::unstable => { if stab.is_some() { sess.dcx().emit_err(session_diagnostics::MultipleStabilityLevels { @@ -56,15 +61,15 @@ pub fn find_stability( } } - if allowed_through_unstable_modules { + if let Some(allowed_through_unstable_modules) = allowed_through_unstable_modules { match &mut stab { Some(( Stability { - level: StabilityLevel::Stable { allowed_through_unstable_modules, .. }, + level: StabilityLevel::Stable { allowed_through_unstable_modules: in_stab, .. }, .. }, _, - )) => *allowed_through_unstable_modules = true, + )) => *in_stab = Some(allowed_through_unstable_modules), _ => { sess.dcx() .emit_err(session_diagnostics::RustcAllowedUnstablePairing { span: item_sp }); @@ -283,7 +288,7 @@ fn parse_stability(sess: &Session, attr: &impl AttributeExt) -> Option<(Symbol, match feature { Ok(feature) => { - let level = StabilityLevel::Stable { since, allowed_through_unstable_modules: false }; + let level = StabilityLevel::Stable { since, allowed_through_unstable_modules: None }; Some((feature, level)) } Err(ErrorGuaranteed { .. }) => None, diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index c28a4360f6f91..5510e7e09e50c 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -623,7 +623,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ EncodeCrossCrate::No, "allow_internal_unsafe side-steps the unsafe_code lint", ), rustc_attr!( - rustc_allowed_through_unstable_modules, Normal, template!(Word), + rustc_allowed_through_unstable_modules, Normal, template!(Word, NameValueStr: "deprecation message"), WarnFollowing, EncodeCrossCrate::No, "rustc_allowed_through_unstable_modules special cases accidental stabilizations of stable items \ through unstable paths" diff --git a/compiler/rustc_passes/src/stability.rs b/compiler/rustc_passes/src/stability.rs index 30f9e698521f1..f6c892203f91f 100644 --- a/compiler/rustc_passes/src/stability.rs +++ b/compiler/rustc_passes/src/stability.rs @@ -5,8 +5,8 @@ use std::mem::replace; use std::num::NonZero; use rustc_attr_parsing::{ - self as attr, ConstStability, DeprecatedSince, Stability, StabilityLevel, StableSince, - UnstableReason, VERSION_PLACEHOLDER, + self as attr, AllowedThroughUnstableModules, ConstStability, DeprecatedSince, Stability, + StabilityLevel, StableSince, UnstableReason, VERSION_PLACEHOLDER, }; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::unord::{ExtendUnord, UnordMap, UnordSet}; @@ -20,11 +20,16 @@ use rustc_hir::{FieldDef, Item, ItemKind, TraitRef, Ty, TyKind, Variant}; use rustc_middle::hir::nested_filter; use rustc_middle::middle::lib_features::{FeatureStability, LibFeatures}; use rustc_middle::middle::privacy::EffectiveVisibilities; -use rustc_middle::middle::stability::{AllowUnstable, DeprecationEntry, Index}; +use rustc_middle::middle::stability::{ + AllowUnstable, Deprecated, DeprecationEntry, EvalResult, Index, +}; use rustc_middle::query::Providers; use rustc_middle::ty::TyCtxt; +use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_session::lint; -use rustc_session::lint::builtin::{INEFFECTIVE_UNSTABLE_TRAIT_IMPL, USELESS_DEPRECATED}; +use rustc_session::lint::builtin::{ + DEPRECATED, INEFFECTIVE_UNSTABLE_TRAIT_IMPL, USELESS_DEPRECATED, +}; use rustc_span::{Span, Symbol, sym}; use tracing::{debug, info}; @@ -844,42 +849,95 @@ impl<'tcx> Visitor<'tcx> for Checker<'tcx> { }, ); - let is_allowed_through_unstable_modules = |def_id| { - self.tcx.lookup_stability(def_id).is_some_and(|stab| match stab.level { - StabilityLevel::Stable { allowed_through_unstable_modules, .. } => { - allowed_through_unstable_modules + if item_is_allowed { + // The item itself is allowed; check whether the path there is also allowed. + let is_allowed_through_unstable_modules: Option = + self.tcx.lookup_stability(def_id).and_then(|stab| match stab.level { + StabilityLevel::Stable { allowed_through_unstable_modules, .. } => { + allowed_through_unstable_modules + } + _ => None, + }); + + if is_allowed_through_unstable_modules.is_none() { + // Check parent modules stability as well if the item the path refers to is itself + // stable. We only emit warnings for unstable path segments if the item is stable + // or allowed because stability is often inherited, so the most common case is that + // both the segments and the item are unstable behind the same feature flag. + // + // We check here rather than in `visit_path_segment` to prevent visiting the last + // path segment twice + // + // We include special cases via #[rustc_allowed_through_unstable_modules] for items + // that were accidentally stabilized through unstable paths before this check was + // added, such as `core::intrinsics::transmute` + let parents = path.segments.iter().rev().skip(1); + for path_segment in parents { + if let Some(def_id) = path_segment.res.opt_def_id() { + // use `None` for id to prevent deprecation check + self.tcx.check_stability_allow_unstable( + def_id, + None, + path.span, + None, + if is_unstable_reexport(self.tcx, id) { + AllowUnstable::Yes + } else { + AllowUnstable::No + }, + ); + } } - _ => false, - }) - }; - - if item_is_allowed && !is_allowed_through_unstable_modules(def_id) { - // Check parent modules stability as well if the item the path refers to is itself - // stable. We only emit warnings for unstable path segments if the item is stable - // or allowed because stability is often inherited, so the most common case is that - // both the segments and the item are unstable behind the same feature flag. - // - // We check here rather than in `visit_path_segment` to prevent visiting the last - // path segment twice - // - // We include special cases via #[rustc_allowed_through_unstable_modules] for items - // that were accidentally stabilized through unstable paths before this check was - // added, such as `core::intrinsics::transmute` - let parents = path.segments.iter().rev().skip(1); - for path_segment in parents { - if let Some(def_id) = path_segment.res.opt_def_id() { - // use `None` for id to prevent deprecation check - self.tcx.check_stability_allow_unstable( - def_id, - None, - path.span, - None, - if is_unstable_reexport(self.tcx, id) { - AllowUnstable::Yes - } else { - AllowUnstable::No - }, - ); + } else if let Some(AllowedThroughUnstableModules::WithDeprecation(deprecation)) = + is_allowed_through_unstable_modules + { + // Similar to above, but we cannot use `check_stability_allow_unstable` as that would + // immediately show the stability error. We just want to know the result and disaplay + // our own kind of error. + let parents = path.segments.iter().rev().skip(1); + for path_segment in parents { + if let Some(def_id) = path_segment.res.opt_def_id() { + // use `None` for id to prevent deprecation check + let eval_result = self.tcx.eval_stability_allow_unstable( + def_id, + None, + path.span, + None, + if is_unstable_reexport(self.tcx, id) { + AllowUnstable::Yes + } else { + AllowUnstable::No + }, + ); + let is_allowed = matches!(eval_result, EvalResult::Allow); + if !is_allowed { + // Calculating message for lint involves calling `self.def_path_str`, + // which will by default invoke the expensive `visible_parent_map` query. + // Skip all that work if the lint is allowed anyway. + if self.tcx.lint_level_at_node(DEPRECATED, id).0 + == lint::Level::Allow + { + return; + } + // Show a deprecation message. + let def_path = + with_no_trimmed_paths!(self.tcx.def_path_str(def_id)); + let def_kind = self.tcx.def_descr(def_id); + let diag = Deprecated { + sub: None, + kind: def_kind.to_owned(), + path: def_path, + note: Some(deprecation), + since_kind: lint::DeprecatedSinceKind::InEffect, + }; + self.tcx.emit_node_span_lint( + DEPRECATED, + id, + method_span.unwrap_or(path.span), + diag, + ); + } + } } } } diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index dcee96978d2b5..5e7c175657955 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -406,7 +406,7 @@ impl Item { // were never supposed to work at all. let stab = self.stability(tcx)?; if let rustc_attr_parsing::StabilityLevel::Stable { - allowed_through_unstable_modules: true, + allowed_through_unstable_modules: Some(_), .. } = stab.level { diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs index cbb3ce6abe6a7..4760e579199aa 100644 --- a/src/librustdoc/formats/cache.rs +++ b/src/librustdoc/formats/cache.rs @@ -316,7 +316,7 @@ impl DocFolder for CacheBuilder<'_, '_> { let skip_because_unstable = matches!( item.stability.map(|stab| stab.level), - Some(StabilityLevel::Stable { allowed_through_unstable_modules: true, .. }) + Some(StabilityLevel::Stable { allowed_through_unstable_modules: Some(_), .. }) ); if (!self.cache.stripped_mod && !skip_because_unstable) || self.is_json_output { diff --git a/src/librustdoc/passes/propagate_stability.rs b/src/librustdoc/passes/propagate_stability.rs index febb52a3b00a2..9c958710c4200 100644 --- a/src/librustdoc/passes/propagate_stability.rs +++ b/src/librustdoc/passes/propagate_stability.rs @@ -119,7 +119,7 @@ fn merge_stability( parent_stability: Option, ) -> Option { if let Some(own_stab) = own_stability - && let StabilityLevel::Stable { since: own_since, allowed_through_unstable_modules: false } = + && let StabilityLevel::Stable { since: own_since, allowed_through_unstable_modules: None } = own_stab.level && let Some(parent_stab) = parent_stability && (parent_stab.is_unstable() @@ -127,12 +127,12 @@ fn merge_stability( { parent_stability } else if let Some(mut own_stab) = own_stability - && let StabilityLevel::Stable { since, allowed_through_unstable_modules: true } = + && let StabilityLevel::Stable { since, allowed_through_unstable_modules: Some(_) } = own_stab.level && parent_stability.is_some_and(|stab| stab.is_stable()) { // this property does not apply transitively through re-exports - own_stab.level = StabilityLevel::Stable { since, allowed_through_unstable_modules: false }; + own_stab.level = StabilityLevel::Stable { since, allowed_through_unstable_modules: None }; Some(own_stab) } else { own_stability diff --git a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs index 82ff13a5aff16..8ec7bfe9edd3f 100644 --- a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs +++ b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs @@ -180,7 +180,7 @@ fn is_stable(cx: &LateContext<'_>, mut def_id: DefId, msrv: &Msrv) -> bool { if let Some(stability) = cx.tcx.lookup_stability(def_id) && let StabilityLevel::Stable { since, - allowed_through_unstable_modules: false, + allowed_through_unstable_modules: None, } = stability.level { let stable = match since { diff --git a/tests/ui/stability-attribute/allowed-through-unstable.rs b/tests/ui/stability-attribute/allowed-through-unstable.rs index 29911a70be942..e03417a4dae8d 100644 --- a/tests/ui/stability-attribute/allowed-through-unstable.rs +++ b/tests/ui/stability-attribute/allowed-through-unstable.rs @@ -6,4 +6,5 @@ extern crate allowed_through_unstable_core; use allowed_through_unstable_core::unstable_module::OldStableTraitAllowedThoughUnstable; +use allowed_through_unstable_core::unstable_module::OldStableTraitAllowedThoughUnstableWithDeprecation; //~WARN use of deprecated module `allowed_through_unstable_core::unstable_module`: use the new path instead use allowed_through_unstable_core::unstable_module::NewStableTraitNotAllowedThroughUnstable; //~ ERROR use of unstable library feature `unstable_test_feature` diff --git a/tests/ui/stability-attribute/allowed-through-unstable.stderr b/tests/ui/stability-attribute/allowed-through-unstable.stderr index 00eea9f730d66..8d07b0cf9e8f3 100644 --- a/tests/ui/stability-attribute/allowed-through-unstable.stderr +++ b/tests/ui/stability-attribute/allowed-through-unstable.stderr @@ -1,5 +1,13 @@ +warning: use of deprecated module `allowed_through_unstable_core::unstable_module`: use the new path instead + --> $DIR/allowed-through-unstable.rs:9:53 + | +LL | use allowed_through_unstable_core::unstable_module::OldStableTraitAllowedThoughUnstableWithDeprecation; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(deprecated)]` on by default + error[E0658]: use of unstable library feature `unstable_test_feature` - --> $DIR/allowed-through-unstable.rs:9:5 + --> $DIR/allowed-through-unstable.rs:10:5 | LL | use allowed_through_unstable_core::unstable_module::NewStableTraitNotAllowedThroughUnstable; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -8,6 +16,6 @@ LL | use allowed_through_unstable_core::unstable_module::NewStableTraitNotAllowe = help: add `#![feature(unstable_test_feature)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date -error: aborting due to 1 previous error +error: aborting due to 1 previous error; 1 warning emitted For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/stability-attribute/auxiliary/allowed-through-unstable-core.rs b/tests/ui/stability-attribute/auxiliary/allowed-through-unstable-core.rs index b597009a309c9..9dfbb451d04bb 100644 --- a/tests/ui/stability-attribute/auxiliary/allowed-through-unstable-core.rs +++ b/tests/ui/stability-attribute/auxiliary/allowed-through-unstable-core.rs @@ -9,6 +9,10 @@ pub mod unstable_module { #[rustc_allowed_through_unstable_modules] pub trait OldStableTraitAllowedThoughUnstable {} + #[stable(feature = "stable_test_feature", since = "1.2.0")] + #[rustc_allowed_through_unstable_modules = "use the new path instead"] + pub trait OldStableTraitAllowedThoughUnstableWithDeprecation {} + #[stable(feature = "stable_test_feature", since = "1.2.0")] pub trait NewStableTraitNotAllowedThroughUnstable {} } From f1c95c9000804cd84206dfd309aa26c67eb13a4c Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 1 Jan 2025 20:22:59 +0100 Subject: [PATCH 082/142] intrinsics: deprecate calling them via the unstable std::intrinsics path --- library/core/src/intrinsics/mod.rs | 24 ++++++++++--- .../clippy/tests/ui/std_instead_of_core.fixed | 2 +- .../clippy/tests/ui/std_instead_of_core.rs | 2 +- src/tools/clippy/tests/ui/transmute.rs | 22 ++++++------ src/tools/clippy/tests/ui/transmute.stderr | 36 +++++++++---------- .../accidental-stable-in-unstable.rs | 1 + .../accidental-stable-in-unstable.stderr | 10 +++++- 7 files changed, 61 insertions(+), 36 deletions(-) diff --git a/library/core/src/intrinsics/mod.rs b/library/core/src/intrinsics/mod.rs index 7b31bbec7547c..41b2ffad6680d 100644 --- a/library/core/src/intrinsics/mod.rs +++ b/library/core/src/intrinsics/mod.rs @@ -1897,7 +1897,11 @@ pub const fn forget(_: T) { /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_allowed_through_unstable_modules] +#[cfg_attr(bootstrap, rustc_allowed_through_unstable_modules)] +#[cfg_attr( + not(bootstrap), + rustc_allowed_through_unstable_modules = "import this function via `std::mem` instead" +)] #[rustc_const_stable(feature = "const_transmute", since = "1.56.0")] #[rustc_diagnostic_item = "transmute"] #[rustc_nounwind] @@ -4325,7 +4329,11 @@ pub const fn ptr_metadata + ?Sized, M>(_ptr: *cons /// [`Vec::append`]: ../../std/vec/struct.Vec.html#method.append #[doc(alias = "memcpy")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_allowed_through_unstable_modules] +#[cfg_attr(bootstrap, rustc_allowed_through_unstable_modules)] +#[cfg_attr( + not(bootstrap), + rustc_allowed_through_unstable_modules = "import this function via `std::mem` instead" +)] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces @@ -4429,7 +4437,11 @@ pub const unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: us /// ``` #[doc(alias = "memmove")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_allowed_through_unstable_modules] +#[cfg_attr(bootstrap, rustc_allowed_through_unstable_modules)] +#[cfg_attr( + not(bootstrap), + rustc_allowed_through_unstable_modules = "import this function via `std::mem` instead" +)] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces @@ -4512,7 +4524,11 @@ pub const unsafe fn copy(src: *const T, dst: *mut T, count: usize) { /// ``` #[doc(alias = "memset")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_allowed_through_unstable_modules] +#[cfg_attr(bootstrap, rustc_allowed_through_unstable_modules)] +#[cfg_attr( + not(bootstrap), + rustc_allowed_through_unstable_modules = "import this function via `std::mem` instead" +)] #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces diff --git a/src/tools/clippy/tests/ui/std_instead_of_core.fixed b/src/tools/clippy/tests/ui/std_instead_of_core.fixed index 227b98c683e97..ec158ee02de89 100644 --- a/src/tools/clippy/tests/ui/std_instead_of_core.fixed +++ b/src/tools/clippy/tests/ui/std_instead_of_core.fixed @@ -1,7 +1,7 @@ //@aux-build:proc_macro_derive.rs #![warn(clippy::std_instead_of_core)] -#![allow(unused_imports)] +#![allow(unused_imports, deprecated)] extern crate alloc; diff --git a/src/tools/clippy/tests/ui/std_instead_of_core.rs b/src/tools/clippy/tests/ui/std_instead_of_core.rs index 01bb78dd3bf1d..9c3c1658d8fec 100644 --- a/src/tools/clippy/tests/ui/std_instead_of_core.rs +++ b/src/tools/clippy/tests/ui/std_instead_of_core.rs @@ -1,7 +1,7 @@ //@aux-build:proc_macro_derive.rs #![warn(clippy::std_instead_of_core)] -#![allow(unused_imports)] +#![allow(unused_imports, deprecated)] extern crate alloc; diff --git a/src/tools/clippy/tests/ui/transmute.rs b/src/tools/clippy/tests/ui/transmute.rs index eeea3f080b1c9..7f5bdea4acf3c 100644 --- a/src/tools/clippy/tests/ui/transmute.rs +++ b/src/tools/clippy/tests/ui/transmute.rs @@ -24,31 +24,31 @@ fn my_vec() -> MyVec { #[warn(clippy::useless_transmute)] unsafe fn _generic<'a, T, U: 'a>(t: &'a T) { // FIXME: should lint - // let _: &'a T = core::intrinsics::transmute(t); + // let _: &'a T = core::mem::transmute(t); - let _: &'a U = core::intrinsics::transmute(t); + let _: &'a U = core::mem::transmute(t); - let _: *const T = core::intrinsics::transmute(t); + let _: *const T = core::mem::transmute(t); //~^ ERROR: transmute from a reference to a pointer //~| NOTE: `-D clippy::useless-transmute` implied by `-D warnings` - let _: *mut T = core::intrinsics::transmute(t); + let _: *mut T = core::mem::transmute(t); //~^ ERROR: transmute from a reference to a pointer - let _: *const U = core::intrinsics::transmute(t); + let _: *const U = core::mem::transmute(t); //~^ ERROR: transmute from a reference to a pointer } #[warn(clippy::useless_transmute)] fn useless() { unsafe { - let _: Vec = core::intrinsics::transmute(my_vec()); + let _: Vec = core::mem::transmute(my_vec()); //~^ ERROR: transmute from a type (`std::vec::Vec`) to itself let _: Vec = core::mem::transmute(my_vec()); //~^ ERROR: transmute from a type (`std::vec::Vec`) to itself - let _: Vec = std::intrinsics::transmute(my_vec()); + let _: Vec = std::mem::transmute(my_vec()); //~^ ERROR: transmute from a type (`std::vec::Vec`) to itself let _: Vec = std::mem::transmute(my_vec()); @@ -94,17 +94,17 @@ fn crosspointer() { let int_mut_ptr: *mut Usize = &mut int as *mut Usize; unsafe { - let _: Usize = core::intrinsics::transmute(int_const_ptr); + let _: Usize = core::mem::transmute(int_const_ptr); //~^ ERROR: transmute from a type (`*const Usize`) to the type that it points to ( //~| NOTE: `-D clippy::crosspointer-transmute` implied by `-D warnings` - let _: Usize = core::intrinsics::transmute(int_mut_ptr); + let _: Usize = core::mem::transmute(int_mut_ptr); //~^ ERROR: transmute from a type (`*mut Usize`) to the type that it points to (`U - let _: *const Usize = core::intrinsics::transmute(my_int()); + let _: *const Usize = core::mem::transmute(my_int()); //~^ ERROR: transmute from a type (`Usize`) to a pointer to that type (`*const Usi - let _: *mut Usize = core::intrinsics::transmute(my_int()); + let _: *mut Usize = core::mem::transmute(my_int()); //~^ ERROR: transmute from a type (`Usize`) to a pointer to that type (`*mut Usize } } diff --git a/src/tools/clippy/tests/ui/transmute.stderr b/src/tools/clippy/tests/ui/transmute.stderr index 41a10f381dc53..b5032772856ee 100644 --- a/src/tools/clippy/tests/ui/transmute.stderr +++ b/src/tools/clippy/tests/ui/transmute.stderr @@ -1,8 +1,8 @@ error: transmute from a reference to a pointer --> tests/ui/transmute.rs:31:23 | -LL | let _: *const T = core::intrinsics::transmute(t); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T` +LL | let _: *const T = core::mem::transmute(t); + | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T` | = note: `-D clippy::useless-transmute` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::useless_transmute)]` @@ -10,20 +10,20 @@ LL | let _: *const T = core::intrinsics::transmute(t); error: transmute from a reference to a pointer --> tests/ui/transmute.rs:35:21 | -LL | let _: *mut T = core::intrinsics::transmute(t); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *mut T` +LL | let _: *mut T = core::mem::transmute(t); + | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *mut T` error: transmute from a reference to a pointer --> tests/ui/transmute.rs:38:23 | -LL | let _: *const U = core::intrinsics::transmute(t); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *const U` +LL | let _: *const U = core::mem::transmute(t); + | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *const U` error: transmute from a type (`std::vec::Vec`) to itself --> tests/ui/transmute.rs:45:27 | -LL | let _: Vec = core::intrinsics::transmute(my_vec()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | let _: Vec = core::mem::transmute(my_vec()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a type (`std::vec::Vec`) to itself --> tests/ui/transmute.rs:48:27 @@ -34,8 +34,8 @@ LL | let _: Vec = core::mem::transmute(my_vec()); error: transmute from a type (`std::vec::Vec`) to itself --> tests/ui/transmute.rs:51:27 | -LL | let _: Vec = std::intrinsics::transmute(my_vec()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | let _: Vec = std::mem::transmute(my_vec()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a type (`std::vec::Vec`) to itself --> tests/ui/transmute.rs:54:27 @@ -64,8 +64,8 @@ LL | let _: *const usize = std::mem::transmute(1 + 1usize); error: transmute from a type (`*const Usize`) to the type that it points to (`Usize`) --> tests/ui/transmute.rs:97:24 | -LL | let _: Usize = core::intrinsics::transmute(int_const_ptr); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | let _: Usize = core::mem::transmute(int_const_ptr); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `-D clippy::crosspointer-transmute` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::crosspointer_transmute)]` @@ -73,20 +73,20 @@ LL | let _: Usize = core::intrinsics::transmute(int_const_ptr); error: transmute from a type (`*mut Usize`) to the type that it points to (`Usize`) --> tests/ui/transmute.rs:101:24 | -LL | let _: Usize = core::intrinsics::transmute(int_mut_ptr); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | let _: Usize = core::mem::transmute(int_mut_ptr); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a type (`Usize`) to a pointer to that type (`*const Usize`) --> tests/ui/transmute.rs:104:31 | -LL | let _: *const Usize = core::intrinsics::transmute(my_int()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | let _: *const Usize = core::mem::transmute(my_int()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a type (`Usize`) to a pointer to that type (`*mut Usize`) --> tests/ui/transmute.rs:107:29 | -LL | let _: *mut Usize = core::intrinsics::transmute(my_int()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | let _: *mut Usize = core::mem::transmute(my_int()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a `u8` to a `bool` --> tests/ui/transmute.rs:114:28 diff --git a/tests/ui/stability-attribute/accidental-stable-in-unstable.rs b/tests/ui/stability-attribute/accidental-stable-in-unstable.rs index 86a9d2066eb58..a36a78ee442ce 100644 --- a/tests/ui/stability-attribute/accidental-stable-in-unstable.rs +++ b/tests/ui/stability-attribute/accidental-stable-in-unstable.rs @@ -8,3 +8,4 @@ use core::unicode::UNICODE_VERSION; //~ ERROR use of unstable library feature `u // Known accidental stabilizations with known users // fully stable @ core::mem::transmute use core::intrinsics::transmute; // depended upon by rand_core +//~^WARN deprecated diff --git a/tests/ui/stability-attribute/accidental-stable-in-unstable.stderr b/tests/ui/stability-attribute/accidental-stable-in-unstable.stderr index 9943e6d7ac6ab..16e3676aa6503 100644 --- a/tests/ui/stability-attribute/accidental-stable-in-unstable.stderr +++ b/tests/ui/stability-attribute/accidental-stable-in-unstable.stderr @@ -7,6 +7,14 @@ LL | use core::unicode::UNICODE_VERSION; = help: add `#![feature(unicode_internals)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date -error: aborting due to 1 previous error +warning: use of deprecated module `std::intrinsics`: import this function via `std::mem` instead + --> $DIR/accidental-stable-in-unstable.rs:10:23 + | +LL | use core::intrinsics::transmute; // depended upon by rand_core + | ^^^^^^^^^ + | + = note: `#[warn(deprecated)]` on by default + +error: aborting due to 1 previous error; 1 warning emitted For more information about this error, try `rustc --explain E0658`. From 7ae494abd45bb09549bba796ac0c5848b423a38d Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 6 Jan 2025 17:32:57 +0100 Subject: [PATCH 083/142] show deprecation message in rustdoc, too --- src/librustdoc/clean/types.rs | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index 5e7c175657955..8088ab4c82c30 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -6,7 +6,9 @@ use std::{fmt, iter}; use arrayvec::ArrayVec; use rustc_abi::{ExternAbi, VariantIdx}; -use rustc_attr_parsing::{ConstStability, Deprecation, Stability, StableSince}; +use rustc_attr_parsing::{ + AllowedThroughUnstableModules, ConstStability, Deprecation, Stability, StableSince, +}; use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet}; use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId}; @@ -406,15 +408,19 @@ impl Item { // were never supposed to work at all. let stab = self.stability(tcx)?; if let rustc_attr_parsing::StabilityLevel::Stable { - allowed_through_unstable_modules: Some(_), + allowed_through_unstable_modules: Some(note), .. } = stab.level { + let note = match note { + AllowedThroughUnstableModules::WithDeprecation(note) => Some(note), + // FIXME: Would be better to say *something* here about the *path* being + // deprecated rather than the item. + AllowedThroughUnstableModules::WithoutDeprecation => None, + }; Some(Deprecation { - // FIXME(#131676, #135003): when a note is added to this stability tag, - // translate it here since: rustc_attr_parsing::DeprecatedSince::Unspecified, - note: None, + note, suggestion: None, }) } else { From ebbcfd4e7721aaf3211f0e8d3d6e304400c80f78 Mon Sep 17 00:00:00 2001 From: lcnr Date: Wed, 15 Jan 2025 09:58:04 +0100 Subject: [PATCH 084/142] avoid running the overlap check twice --- .../src/traits/coherence.rs | 55 +++++++++++-------- .../occurs-check/associated-type.next.stderr | 2 - .../occurs-check/associated-type.old.stderr | 2 - 3 files changed, 33 insertions(+), 26 deletions(-) diff --git a/compiler/rustc_trait_selection/src/traits/coherence.rs b/compiler/rustc_trait_selection/src/traits/coherence.rs index 971d3a8110254..190e34618d2af 100644 --- a/compiler/rustc_trait_selection/src/traits/coherence.rs +++ b/compiler/rustc_trait_selection/src/traits/coherence.rs @@ -116,28 +116,39 @@ pub fn overlapping_impls( return None; } - let _overlap_with_bad_diagnostics = overlap( - tcx, - TrackAmbiguityCauses::No, - skip_leak_check, - impl1_def_id, - impl2_def_id, - overlap_mode, - )?; - - // In the case where we detect an error, run the check again, but - // this time tracking intercrate ambiguity causes for better - // diagnostics. (These take time and can lead to false errors.) - let overlap = overlap( - tcx, - TrackAmbiguityCauses::Yes, - skip_leak_check, - impl1_def_id, - impl2_def_id, - overlap_mode, - ) - .unwrap(); - Some(overlap) + if tcx.next_trait_solver_in_coherence() { + overlap( + tcx, + TrackAmbiguityCauses::Yes, + skip_leak_check, + impl1_def_id, + impl2_def_id, + overlap_mode, + ) + } else { + let _overlap_with_bad_diagnostics = overlap( + tcx, + TrackAmbiguityCauses::No, + skip_leak_check, + impl1_def_id, + impl2_def_id, + overlap_mode, + )?; + + // In the case where we detect an error, run the check again, but + // this time tracking intercrate ambiguity causes for better + // diagnostics. (These take time and can lead to false errors.) + let overlap = overlap( + tcx, + TrackAmbiguityCauses::Yes, + skip_leak_check, + impl1_def_id, + impl2_def_id, + overlap_mode, + ) + .unwrap(); + Some(overlap) + } } fn fresh_impl_header<'tcx>(infcx: &InferCtxt<'tcx>, impl_def_id: DefId) -> ty::ImplHeader<'tcx> { diff --git a/tests/ui/coherence/occurs-check/associated-type.next.stderr b/tests/ui/coherence/occurs-check/associated-type.next.stderr index 466b991471ed9..25f9523f4e455 100644 --- a/tests/ui/coherence/occurs-check/associated-type.next.stderr +++ b/tests/ui/coherence/occurs-check/associated-type.next.stderr @@ -1,7 +1,5 @@ WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } - WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } - WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } error[E0119]: conflicting implementations of trait `Overlap fn(&'a (), ())>` for type `for<'a> fn(&'a (), ())` --> $DIR/associated-type.rs:32:1 | diff --git a/tests/ui/coherence/occurs-check/associated-type.old.stderr b/tests/ui/coherence/occurs-check/associated-type.old.stderr index 1e0345f4ec05b..e091ddcacb20a 100644 --- a/tests/ui/coherence/occurs-check/associated-type.old.stderr +++ b/tests/ui/coherence/occurs-check/associated-type.old.stderr @@ -1,7 +1,5 @@ WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } - WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } - WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } error[E0119]: conflicting implementations of trait `Overlap fn(&'a (), ())>` for type `for<'a> fn(&'a (), ())` --> $DIR/associated-type.rs:32:1 | From 56178ddc90d50426f35ab5a9adce8419d5dc3cac Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Fri, 13 Dec 2024 12:19:46 +0000 Subject: [PATCH 085/142] Treat safe target_feature functions as unsafe by default --- compiler/rustc_ast_lowering/src/delegation.rs | 15 +++++++++-- compiler/rustc_ast_lowering/src/item.rs | 24 +++++++++++++---- .../rustc_codegen_ssa/src/codegen_attrs.rs | 12 ++++++--- compiler/rustc_hir/src/hir.rs | 11 ++++++++ compiler/rustc_hir_pretty/src/lib.rs | 4 +++ compiler/rustc_hir_typeck/src/coercion.rs | 12 ++++++--- .../rustc_mir_build/src/check_unsafety.rs | 4 +++ src/librustdoc/clean/types.rs | 15 +++++++++-- src/librustdoc/html/format.rs | 1 + .../fn-exception-target-features.stderr | 4 +-- tests/ui/macros/issue-68060.rs | 2 -- tests/ui/macros/issue-68060.stderr | 10 ++----- .../rfc-2396-target_feature-11/fn-ptr.stderr | 8 ++---- .../rfc-2396-target_feature-11/fn-traits.rs | 6 ++--- .../fn-traits.stderr | 27 ++++++++++--------- .../safe-calls.stderr | 12 +++++++++ .../rfc-2396-target_feature-11/trait-impl.rs | 1 + .../trait-impl.stderr | 19 +++++++++++-- tests/ui/target-feature/invalid-attribute.rs | 4 +++ .../target-feature/invalid-attribute.stderr | 24 +++++++++++++---- 20 files changed, 159 insertions(+), 56 deletions(-) diff --git a/compiler/rustc_ast_lowering/src/delegation.rs b/compiler/rustc_ast_lowering/src/delegation.rs index 4bd0c0b413fc7..266e77c0e02e6 100644 --- a/compiler/rustc_ast_lowering/src/delegation.rs +++ b/compiler/rustc_ast_lowering/src/delegation.rs @@ -188,7 +188,14 @@ impl<'hir> LoweringContext<'_, 'hir> { ) -> hir::FnSig<'hir> { let header = if let Some(local_sig_id) = sig_id.as_local() { match self.resolver.delegation_fn_sigs.get(&local_sig_id) { - Some(sig) => self.lower_fn_header(sig.header, hir::Safety::Safe), + Some(sig) => self.lower_fn_header( + sig.header, + // HACK: we override the default safety instead of generating attributes from the ether. + // We are not forwarding the attributes, as the delegation fn sigs are collected on the ast, + // and here we need the hir attributes. + if sig.target_feature { hir::Safety::Unsafe } else { hir::Safety::Safe }, + &[], + ), None => self.generate_header_error(), } } else { @@ -198,7 +205,11 @@ impl<'hir> LoweringContext<'_, 'hir> { Asyncness::No => hir::IsAsync::NotAsync, }; hir::FnHeader { - safety: sig.safety.into(), + safety: if self.tcx.codegen_fn_attrs(sig_id).safe_target_features { + hir::HeaderSafety::SafeTargetFeatures + } else { + hir::HeaderSafety::Normal(sig.safety) + }, constness: self.tcx.constness(sig_id), asyncness, abi: sig.abi, diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs index 9aaa0dfb69c74..61d7da429f8e8 100644 --- a/compiler/rustc_ast_lowering/src/item.rs +++ b/compiler/rustc_ast_lowering/src/item.rs @@ -231,7 +231,7 @@ impl<'hir> LoweringContext<'_, 'hir> { }); let sig = hir::FnSig { decl, - header: this.lower_fn_header(*header, hir::Safety::Safe), + header: this.lower_fn_header(*header, hir::Safety::Safe, attrs), span: this.lower_span(*fn_sig_span), }; hir::ItemKind::Fn { sig, generics, body: body_id, has_body: body.is_some() } @@ -610,7 +610,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_foreign_item(&mut self, i: &ForeignItem) -> &'hir hir::ForeignItem<'hir> { let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); let owner_id = hir_id.expect_owner(); - self.lower_attrs(hir_id, &i.attrs); + let attrs = self.lower_attrs(hir_id, &i.attrs); let item = hir::ForeignItem { owner_id, ident: self.lower_ident(i.ident), @@ -634,7 +634,7 @@ impl<'hir> LoweringContext<'_, 'hir> { }); // Unmarked safety in unsafe block defaults to unsafe. - let header = self.lower_fn_header(sig.header, hir::Safety::Unsafe); + let header = self.lower_fn_header(sig.header, hir::Safety::Unsafe, attrs); hir::ForeignItemKind::Fn( hir::FnSig { header, decl, span: self.lower_span(sig.span) }, @@ -776,6 +776,7 @@ impl<'hir> LoweringContext<'_, 'hir> { i.id, FnDeclKind::Trait, sig.header.coroutine_kind, + attrs, ); (generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(names)), false) } @@ -795,6 +796,7 @@ impl<'hir> LoweringContext<'_, 'hir> { i.id, FnDeclKind::Trait, sig.header.coroutine_kind, + attrs, ); (generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)), true) } @@ -911,6 +913,7 @@ impl<'hir> LoweringContext<'_, 'hir> { i.id, if self.is_in_trait_impl { FnDeclKind::Impl } else { FnDeclKind::Inherent }, sig.header.coroutine_kind, + attrs, ); (generics, hir::ImplItemKind::Fn(sig, body_id)) @@ -1339,8 +1342,9 @@ impl<'hir> LoweringContext<'_, 'hir> { id: NodeId, kind: FnDeclKind, coroutine_kind: Option, + attrs: &[hir::Attribute], ) -> (&'hir hir::Generics<'hir>, hir::FnSig<'hir>) { - let header = self.lower_fn_header(sig.header, hir::Safety::Safe); + let header = self.lower_fn_header(sig.header, hir::Safety::Safe, attrs); let itctx = ImplTraitContext::Universal; let (generics, decl) = self.lower_generics(generics, id, itctx, |this| { this.lower_fn_decl(&sig.decl, id, sig.span, kind, coroutine_kind) @@ -1352,6 +1356,7 @@ impl<'hir> LoweringContext<'_, 'hir> { &mut self, h: FnHeader, default_safety: hir::Safety, + attrs: &[hir::Attribute], ) -> hir::FnHeader { let asyncness = if let Some(CoroutineKind::Async { span, .. }) = h.coroutine_kind { hir::IsAsync::Async(span) @@ -1360,7 +1365,16 @@ impl<'hir> LoweringContext<'_, 'hir> { }; let safety = self.lower_safety(h.safety, default_safety); - let safety = safety.into(); + + // Treat safe `#[target_feature]` functions as unsafe, but also remember that we did so. + let safety = if attrs.iter().any(|attr| attr.has_name(sym::target_feature)) + && safety.is_safe() + && !self.tcx.sess.target.is_like_wasm + { + hir::HeaderSafety::SafeTargetFeatures + } else { + safety.into() + }; hir::FnHeader { safety, diff --git a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs index 37b53bb5bea47..1daa17fbaf347 100644 --- a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs +++ b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs @@ -250,10 +250,14 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { } } sym::target_feature => { - if !tcx.is_closure_like(did.to_def_id()) - && let Some(fn_sig) = fn_sig() - && fn_sig.skip_binder().safety().is_safe() - { + let Some(sig) = tcx.hir_node_by_def_id(did).fn_sig() else { + tcx.dcx().span_delayed_bug(attr.span, "target_feature applied to non-fn"); + continue; + }; + let safe_target_features = + matches!(sig.header.safety, hir::HeaderSafety::SafeTargetFeatures); + codegen_fn_attrs.safe_target_features = safe_target_features; + if safe_target_features { if tcx.sess.target.is_like_wasm || tcx.sess.opts.actually_rustdoc { // The `#[target_feature]` attribute is allowed on // WebAssembly targets on all functions, including safe diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index f115dfaaa1eb7..740c3ee8c076b 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -3762,8 +3762,18 @@ impl fmt::Display for Constness { } } +/// The actualy safety specified in syntax. We may treat +/// its safety different within the type system to create a +/// "sound by default" system that needs checking this enum +/// explicitly to allow unsafe operations. #[derive(Copy, Clone, Debug, HashStable_Generic, PartialEq, Eq)] pub enum HeaderSafety { + /// A safe function annotated with `#[target_features]`. + /// The type system treats this function as an unsafe function, + /// but safety checking will check this enum to treat it as safe + /// and allowing calling other safe target feature functions with + /// the same features without requiring an additional unsafe block. + SafeTargetFeatures, Normal(Safety), } @@ -3800,6 +3810,7 @@ impl FnHeader { pub fn safety(&self) -> Safety { match self.safety { + HeaderSafety::SafeTargetFeatures => Safety::Unsafe, HeaderSafety::Normal(safety) => safety, } } diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index 1afc4bedee093..3ff6acd79fcb6 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -2424,6 +2424,10 @@ impl<'a> State<'a> { self.print_constness(header.constness); let safety = match header.safety { + hir::HeaderSafety::SafeTargetFeatures => { + self.word_nbsp("#[target_feature]"); + hir::Safety::Safe + } hir::HeaderSafety::Normal(safety) => safety, }; diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs index 11a0b3b917eaa..ec7c1efa38e2e 100644 --- a/compiler/rustc_hir_typeck/src/coercion.rs +++ b/compiler/rustc_hir_typeck/src/coercion.rs @@ -932,9 +932,15 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { return Err(TypeError::ForceInlineCast); } - // Safe `#[target_feature]` functions are not assignable to safe fn pointers (RFC 2396), - // report a better error than a safety mismatch. - // FIXME(target_feature): do this inside `coerce_from_safe_fn`. + let fn_attrs = self.tcx.codegen_fn_attrs(def_id); + if matches!(fn_attrs.inline, InlineAttr::Force { .. }) { + return Err(TypeError::ForceInlineCast); + } + + // FIXME(target_feature): Safe `#[target_feature]` functions could be cast to safe fn pointers (RFC 2396), + // as you can already write that "cast" in user code by wrapping a target_feature fn call in a closure, + // which is safe. This is sound because you already need to be executing code that is satisfying the target + // feature constraints.. if b_hdr.safety.is_safe() && self.tcx.codegen_fn_attrs(def_id).safe_target_features { diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index 0ecc8d84f2af7..ba8802d786cf1 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -1120,6 +1120,10 @@ pub(crate) fn check_unsafety(tcx: TyCtxt<'_>, def: LocalDefId) { let hir_id = tcx.local_def_id_to_hir_id(def); let safety_context = tcx.hir().fn_sig_by_hir_id(hir_id).map_or(SafetyContext::Safe, |fn_sig| { match fn_sig.header.safety { + // We typeck the body as safe, but otherwise treat it as unsafe everywhere else. + // Call sites to other SafeTargetFeatures functions are checked explicitly and don't need + // to care about safety of the body. + hir::HeaderSafety::SafeTargetFeatures => SafetyContext::Safe, hir::HeaderSafety::Normal(safety) => match safety { hir::Safety::Unsafe => SafetyContext::UnsafeFn, hir::Safety::Safe => SafetyContext::Safe, diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index f6462191734aa..ff1c0294fb8a1 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -668,14 +668,25 @@ impl Item { ty::Asyncness::Yes => hir::IsAsync::Async(DUMMY_SP), ty::Asyncness::No => hir::IsAsync::NotAsync, }; - hir::FnHeader { safety: sig.safety().into(), abi: sig.abi(), constness, asyncness } + hir::FnHeader { + safety: if tcx.codegen_fn_attrs(def_id).safe_target_features { + hir::HeaderSafety::SafeTargetFeatures + } else { + sig.safety().into() + }, + abi: sig.abi(), + constness, + asyncness, + } } let header = match self.kind { ItemKind::ForeignFunctionItem(_, safety) => { let def_id = self.def_id().unwrap(); let abi = tcx.fn_sig(def_id).skip_binder().abi(); hir::FnHeader { - safety: if abi == ExternAbi::RustIntrinsic { + safety: if tcx.codegen_fn_attrs(def_id).safe_target_features { + hir::HeaderSafety::SafeTargetFeatures + } else if abi == ExternAbi::RustIntrinsic { intrinsic_operation_unsafety(tcx, def_id.expect_local()).into() } else { safety.into() diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 17d6abfbd0413..92935c72b479e 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -1640,6 +1640,7 @@ impl PrintWithSpace for hir::Safety { impl PrintWithSpace for hir::HeaderSafety { fn print_with_space(&self) -> &str { match self { + hir::HeaderSafety::SafeTargetFeatures => "", hir::HeaderSafety::Normal(safety) => safety.print_with_space(), } } diff --git a/tests/ui/async-await/async-closures/fn-exception-target-features.stderr b/tests/ui/async-await/async-closures/fn-exception-target-features.stderr index db5895108bb0e..d3a5f292dd1a7 100644 --- a/tests/ui/async-await/async-closures/fn-exception-target-features.stderr +++ b/tests/ui/async-await/async-closures/fn-exception-target-features.stderr @@ -1,8 +1,8 @@ -error[E0277]: the trait bound `fn() -> Pin + 'static)>> {target_feature}: AsyncFn()` is not satisfied +error[E0277]: the trait bound `unsafe fn() -> Pin + 'static)>> {target_feature}: AsyncFn()` is not satisfied --> $DIR/fn-exception-target-features.rs:16:10 | LL | test(target_feature); - | ---- ^^^^^^^^^^^^^^ the trait `AsyncFn()` is not implemented for fn item `fn() -> Pin + 'static)>> {target_feature}` + | ---- ^^^^^^^^^^^^^^ the trait `AsyncFn()` is not implemented for fn item `unsafe fn() -> Pin + 'static)>> {target_feature}` | | | required by a bound introduced by this call | diff --git a/tests/ui/macros/issue-68060.rs b/tests/ui/macros/issue-68060.rs index 1a826bd60e039..4eddb96848cd0 100644 --- a/tests/ui/macros/issue-68060.rs +++ b/tests/ui/macros/issue-68060.rs @@ -3,8 +3,6 @@ fn main() { .map( #[target_feature(enable = "")] //~^ ERROR: attribute should be applied to a function - //~| ERROR: feature named `` is not valid - //~| NOTE: `` is not valid for this target #[track_caller] //~^ ERROR: `#[track_caller]` on closures is currently unstable //~| NOTE: see issue #87417 diff --git a/tests/ui/macros/issue-68060.stderr b/tests/ui/macros/issue-68060.stderr index 5724a9ea43843..ef2246d5bd6bb 100644 --- a/tests/ui/macros/issue-68060.stderr +++ b/tests/ui/macros/issue-68060.stderr @@ -7,14 +7,8 @@ LL | #[target_feature(enable = "")] LL | |_| (), | ------ not a function definition -error: the feature named `` is not valid for this target - --> $DIR/issue-68060.rs:4:30 - | -LL | #[target_feature(enable = "")] - | ^^^^^^^^^^^ `` is not valid for this target - error[E0658]: `#[track_caller]` on closures is currently unstable - --> $DIR/issue-68060.rs:8:13 + --> $DIR/issue-68060.rs:6:13 | LL | #[track_caller] | ^^^^^^^^^^^^^^^ @@ -23,6 +17,6 @@ LL | #[track_caller] = help: add `#![feature(closure_track_caller)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date -error: aborting due to 3 previous errors +error: aborting due to 2 previous errors For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr index cc941be5479e3..b65eb889bdd2f 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr @@ -10,13 +10,9 @@ LL | let foo: fn() = foo; | expected due to this | = note: expected fn pointer `fn()` - found fn item `fn() {foo}` - = note: fn items are distinct from fn pointers + found fn item `unsafe fn() {foo}` = note: functions with `#[target_feature]` can only be coerced to `unsafe` function pointers -help: consider casting to a fn pointer - | -LL | let foo: fn() = foo as fn(); - | ~~~~~~~~~~~ + = note: unsafe functions cannot be coerced into safe function pointers error: aborting due to 1 previous error diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.rs b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.rs index 3c370a1b8f321..584d97526298d 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.rs +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.rs @@ -21,9 +21,9 @@ fn call_once(f: impl FnOnce()) { } fn main() { - call(foo); //~ ERROR expected a `Fn()` closure, found `fn() {foo}` - call_mut(foo); //~ ERROR expected a `FnMut()` closure, found `fn() {foo}` - call_once(foo); //~ ERROR expected a `FnOnce()` closure, found `fn() {foo}` + call(foo); //~ ERROR expected a `Fn()` closure, found `unsafe fn() {foo}` + call_mut(foo); //~ ERROR expected a `FnMut()` closure, found `unsafe fn() {foo}` + call_once(foo); //~ ERROR expected a `FnOnce()` closure, found `unsafe fn() {foo}` call(foo_unsafe); //~^ ERROR expected a `Fn()` closure, found `unsafe fn() {foo_unsafe}` diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr index 4c07f4d6b990d..5d79cd7736f21 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr @@ -1,13 +1,14 @@ -error[E0277]: expected a `Fn()` closure, found `fn() {foo}` +error[E0277]: expected a `Fn()` closure, found `unsafe fn() {foo}` --> $DIR/fn-traits.rs:24:10 | LL | call(foo); - | ---- ^^^ expected an `Fn()` closure, found `fn() {foo}` + | ---- ^^^ call the function in a closure: `|| unsafe { /* code */ }` | | | required by a bound introduced by this call | - = help: the trait `Fn()` is not implemented for fn item `fn() {foo}` - = note: wrap the `fn() {foo}` in a closure with no arguments: `|| { /* code */ }` + = help: the trait `Fn()` is not implemented for fn item `unsafe fn() {foo}` + = note: unsafe function cannot be called generically without an unsafe block + = note: wrap the `unsafe fn() {foo}` in a closure with no arguments: `|| { /* code */ }` = note: `#[target_feature]` functions do not implement the `Fn` traits note: required by a bound in `call` --> $DIR/fn-traits.rs:11:17 @@ -15,16 +16,17 @@ note: required by a bound in `call` LL | fn call(f: impl Fn()) { | ^^^^ required by this bound in `call` -error[E0277]: expected a `FnMut()` closure, found `fn() {foo}` +error[E0277]: expected a `FnMut()` closure, found `unsafe fn() {foo}` --> $DIR/fn-traits.rs:25:14 | LL | call_mut(foo); - | -------- ^^^ expected an `FnMut()` closure, found `fn() {foo}` + | -------- ^^^ call the function in a closure: `|| unsafe { /* code */ }` | | | required by a bound introduced by this call | - = help: the trait `FnMut()` is not implemented for fn item `fn() {foo}` - = note: wrap the `fn() {foo}` in a closure with no arguments: `|| { /* code */ }` + = help: the trait `FnMut()` is not implemented for fn item `unsafe fn() {foo}` + = note: unsafe function cannot be called generically without an unsafe block + = note: wrap the `unsafe fn() {foo}` in a closure with no arguments: `|| { /* code */ }` = note: `#[target_feature]` functions do not implement the `Fn` traits note: required by a bound in `call_mut` --> $DIR/fn-traits.rs:15:25 @@ -32,16 +34,17 @@ note: required by a bound in `call_mut` LL | fn call_mut(mut f: impl FnMut()) { | ^^^^^^^ required by this bound in `call_mut` -error[E0277]: expected a `FnOnce()` closure, found `fn() {foo}` +error[E0277]: expected a `FnOnce()` closure, found `unsafe fn() {foo}` --> $DIR/fn-traits.rs:26:15 | LL | call_once(foo); - | --------- ^^^ expected an `FnOnce()` closure, found `fn() {foo}` + | --------- ^^^ call the function in a closure: `|| unsafe { /* code */ }` | | | required by a bound introduced by this call | - = help: the trait `FnOnce()` is not implemented for fn item `fn() {foo}` - = note: wrap the `fn() {foo}` in a closure with no arguments: `|| { /* code */ }` + = help: the trait `FnOnce()` is not implemented for fn item `unsafe fn() {foo}` + = note: unsafe function cannot be called generically without an unsafe block + = note: wrap the `unsafe fn() {foo}` in a closure with no arguments: `|| { /* code */ }` = note: `#[target_feature]` functions do not implement the `Fn` traits note: required by a bound in `call_once` --> $DIR/fn-traits.rs:19:22 diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/safe-calls.stderr b/tests/ui/rfcs/rfc-2396-target_feature-11/safe-calls.stderr index 901bf640845dd..003c20b867502 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/safe-calls.stderr +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/safe-calls.stderr @@ -26,6 +26,9 @@ LL | Quux.avx_bmi2(); error[E0133]: call to function `avx_bmi2` with `#[target_feature]` is unsafe and requires unsafe function or block --> $DIR/safe-calls.rs:38:5 | +LL | fn bar() { + | -------- items do not inherit unsafety from separate enclosing items +LL | sse2(); LL | avx_bmi2(); | ^^^^^^^^^^ call to function with `#[target_feature]` | @@ -34,6 +37,9 @@ LL | avx_bmi2(); error[E0133]: call to function `Quux::avx_bmi2` with `#[target_feature]` is unsafe and requires unsafe function or block --> $DIR/safe-calls.rs:40:5 | +LL | fn bar() { + | -------- items do not inherit unsafety from separate enclosing items +... LL | Quux.avx_bmi2(); | ^^^^^^^^^^^^^^^ call to function with `#[target_feature]` | @@ -42,6 +48,9 @@ LL | Quux.avx_bmi2(); error[E0133]: call to function `avx_bmi2` with `#[target_feature]` is unsafe and requires unsafe function or block --> $DIR/safe-calls.rs:47:5 | +LL | fn baz() { + | -------- items do not inherit unsafety from separate enclosing items +LL | sse2(); LL | avx_bmi2(); | ^^^^^^^^^^ call to function with `#[target_feature]` | @@ -50,6 +59,9 @@ LL | avx_bmi2(); error[E0133]: call to function `Quux::avx_bmi2` with `#[target_feature]` is unsafe and requires unsafe function or block --> $DIR/safe-calls.rs:49:5 | +LL | fn baz() { + | -------- items do not inherit unsafety from separate enclosing items +... LL | Quux.avx_bmi2(); | ^^^^^^^^^^^^^^^ call to function with `#[target_feature]` | diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.rs b/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.rs index df575b0f6b636..a2ac6ff45fccf 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.rs +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.rs @@ -13,6 +13,7 @@ impl Foo for Bar { #[target_feature(enable = "sse2")] //~^ ERROR cannot be applied to safe trait method fn foo(&self) {} + //~^ ERROR method `foo` has an incompatible type for trait #[target_feature(enable = "sse2")] unsafe fn unsf_foo(&self) {} diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.stderr b/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.stderr index 00efbb52f159b..55532fe0168ed 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.stderr +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.stderr @@ -1,3 +1,17 @@ +error[E0053]: method `foo` has an incompatible type for trait + --> $DIR/trait-impl.rs:15:5 + | +LL | fn foo(&self) {} + | ^^^^^^^^^^^^^ expected safe fn, found unsafe fn + | +note: type in trait + --> $DIR/trait-impl.rs:6:5 + | +LL | fn foo(&self); + | ^^^^^^^^^^^^^^ + = note: expected signature `fn(&Bar)` + found signature `unsafe fn(&Bar)` + error: `#[target_feature(..)]` cannot be applied to safe trait method --> $DIR/trait-impl.rs:13:5 | @@ -8,7 +22,7 @@ LL | fn foo(&self) {} | ------------- not an `unsafe` function error: `#[target_feature(..)]` cannot be applied to safe trait method - --> $DIR/trait-impl.rs:22:5 + --> $DIR/trait-impl.rs:23:5 | LL | #[target_feature(enable = "sse2")] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot be applied to safe trait method @@ -16,5 +30,6 @@ LL | LL | fn foo(&self) {} | ------------- not an `unsafe` function -error: aborting due to 2 previous errors +error: aborting due to 3 previous errors +For more information about this error, try `rustc --explain E0053`. diff --git a/tests/ui/target-feature/invalid-attribute.rs b/tests/ui/target-feature/invalid-attribute.rs index 2f951c4a00a9e..c0f5b6b2fb292 100644 --- a/tests/ui/target-feature/invalid-attribute.rs +++ b/tests/ui/target-feature/invalid-attribute.rs @@ -97,6 +97,7 @@ impl Foo {} trait Quux { fn foo(); //~ NOTE `foo` from trait + //~^ NOTE: type in trait } impl Quux for Foo { @@ -106,6 +107,9 @@ impl Quux for Foo { //~| NOTE: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date fn foo() {} //~^ NOTE not an `unsafe` function + //~| ERROR: incompatible type for trait + //~| NOTE: expected safe fn, found unsafe fn + //~| NOTE: expected signature `fn()` } fn main() { diff --git a/tests/ui/target-feature/invalid-attribute.stderr b/tests/ui/target-feature/invalid-attribute.stderr index bf48911edec5f..c00f2254ac38b 100644 --- a/tests/ui/target-feature/invalid-attribute.stderr +++ b/tests/ui/target-feature/invalid-attribute.stderr @@ -126,7 +126,7 @@ LL | impl Foo {} | ----------- not a function definition error: attribute should be applied to a function definition - --> $DIR/invalid-attribute.rs:112:5 + --> $DIR/invalid-attribute.rs:116:5 | LL | #[target_feature(enable = "sse2")] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -138,7 +138,7 @@ LL | | } | |_____- not a function definition error: attribute should be applied to a function definition - --> $DIR/invalid-attribute.rs:120:5 + --> $DIR/invalid-attribute.rs:124:5 | LL | #[target_feature(enable = "sse2")] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -192,8 +192,22 @@ LL | impl Quux for u8 {} LL | fn foo(); | --------- `foo` from trait +error[E0053]: method `foo` has an incompatible type for trait + --> $DIR/invalid-attribute.rs:108:5 + | +LL | fn foo() {} + | ^^^^^^^^ expected safe fn, found unsafe fn + | +note: type in trait + --> $DIR/invalid-attribute.rs:99:5 + | +LL | fn foo(); + | ^^^^^^^^^ + = note: expected signature `fn()` + found signature `unsafe fn()` + error[E0658]: `#[target_feature(..)]` can only be applied to `unsafe` functions - --> $DIR/invalid-attribute.rs:103:5 + --> $DIR/invalid-attribute.rs:104:5 | LL | #[target_feature(enable = "sse2")] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -205,7 +219,7 @@ LL | fn foo() {} = help: add `#![feature(target_feature_11)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date -error: aborting due to 23 previous errors +error: aborting due to 24 previous errors -Some errors have detailed explanations: E0046, E0658. +Some errors have detailed explanations: E0046, E0053, E0658. For more information about an error, try `rustc --explain E0046`. From e1a8b0da2d142ed156865693cd79b5522769e131 Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Sun, 15 Dec 2024 20:11:19 +0000 Subject: [PATCH 086/142] Hide the internal unsafety of safe target_feature fn items --- .../src/error_reporting/infer/mod.rs | 16 ++++++++++++++-- .../rfc-2396-target_feature-11/fn-ptr.stderr | 2 +- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs index 53300c95fa709..e3820701c5c26 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs @@ -850,8 +850,20 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // unsafe extern "C" for<'a> fn(&'a T) -> &'a T // ^^^^^^ - values.0.push(sig1.safety.prefix_str(), sig1.safety != sig2.safety); - values.1.push(sig2.safety.prefix_str(), sig1.safety != sig2.safety); + let safety = |fn_def, sig: ty::FnSig<'_>| match fn_def { + None => sig.safety.prefix_str(), + Some((did, _)) => { + if self.tcx.codegen_fn_attrs(did).safe_target_features { + "#[target_features] " + } else { + sig.safety.prefix_str() + } + } + }; + let safety1 = safety(fn_def1, sig1); + let safety2 = safety(fn_def2, sig2); + values.0.push(safety1, safety1 != safety2); + values.1.push(safety2, safety1 != safety2); // unsafe extern "C" for<'a> fn(&'a T) -> &'a T // ^^^^^^^^^^ diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr index b65eb889bdd2f..9a80286c35df0 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr @@ -10,7 +10,7 @@ LL | let foo: fn() = foo; | expected due to this | = note: expected fn pointer `fn()` - found fn item `unsafe fn() {foo}` + found fn item `#[target_features] fn() {foo}` = note: functions with `#[target_feature]` can only be coerced to `unsafe` function pointers = note: unsafe functions cannot be coerced into safe function pointers From 50654e538413635de2d50675112f1fd2d9a4e6d2 Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Sun, 15 Dec 2024 20:32:14 +0000 Subject: [PATCH 087/142] Render fn defs with target_features attrs with the attribute --- compiler/rustc_middle/src/ty/print/pretty.rs | 9 ++++++++- .../fn-exception-target-features.stderr | 5 +++-- .../rfc-2396-target_feature-11/fn-traits.rs | 6 +++--- .../fn-traits.stderr | 18 +++++++++--------- 4 files changed, 23 insertions(+), 15 deletions(-) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 9fe1caa4b58f5..ac900edefe12f 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -690,7 +690,14 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { if with_reduced_queries() { p!(print_def_path(def_id, args)); } else { - let sig = self.tcx().fn_sig(def_id).instantiate(self.tcx(), args); + let mut sig = self.tcx().fn_sig(def_id).instantiate(self.tcx(), args); + if self.tcx().codegen_fn_attrs(def_id).safe_target_features { + p!("#[target_features] "); + sig = sig.map_bound(|mut sig| { + sig.safety = hir::Safety::Safe; + sig + }); + } p!(print(sig), " {{", print_value_path(def_id, args), "}}"); } } diff --git a/tests/ui/async-await/async-closures/fn-exception-target-features.stderr b/tests/ui/async-await/async-closures/fn-exception-target-features.stderr index d3a5f292dd1a7..37977b45250ff 100644 --- a/tests/ui/async-await/async-closures/fn-exception-target-features.stderr +++ b/tests/ui/async-await/async-closures/fn-exception-target-features.stderr @@ -1,11 +1,12 @@ -error[E0277]: the trait bound `unsafe fn() -> Pin + 'static)>> {target_feature}: AsyncFn()` is not satisfied +error[E0277]: the trait bound `#[target_features] fn() -> Pin + 'static)>> {target_feature}: AsyncFn()` is not satisfied --> $DIR/fn-exception-target-features.rs:16:10 | LL | test(target_feature); - | ---- ^^^^^^^^^^^^^^ the trait `AsyncFn()` is not implemented for fn item `unsafe fn() -> Pin + 'static)>> {target_feature}` + | ---- ^^^^^^^^^^^^^^ unsatisfied trait bound | | | required by a bound introduced by this call | + = help: the trait `AsyncFn()` is not implemented for fn item `#[target_features] fn() -> Pin + 'static)>> {target_feature}` note: required by a bound in `test` --> $DIR/fn-exception-target-features.rs:13:17 | diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.rs b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.rs index 584d97526298d..3eae79faf423b 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.rs +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.rs @@ -21,9 +21,9 @@ fn call_once(f: impl FnOnce()) { } fn main() { - call(foo); //~ ERROR expected a `Fn()` closure, found `unsafe fn() {foo}` - call_mut(foo); //~ ERROR expected a `FnMut()` closure, found `unsafe fn() {foo}` - call_once(foo); //~ ERROR expected a `FnOnce()` closure, found `unsafe fn() {foo}` + call(foo); //~ ERROR expected a `Fn()` closure, found `#[target_features] fn() {foo}` + call_mut(foo); //~ ERROR expected a `FnMut()` closure, found `#[target_features] fn() {foo}` + call_once(foo); //~ ERROR expected a `FnOnce()` closure, found `#[target_features] fn() {foo}` call(foo_unsafe); //~^ ERROR expected a `Fn()` closure, found `unsafe fn() {foo_unsafe}` diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr index 5d79cd7736f21..6c3f5bef80e7c 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr @@ -1,4 +1,4 @@ -error[E0277]: expected a `Fn()` closure, found `unsafe fn() {foo}` +error[E0277]: expected a `Fn()` closure, found `#[target_features] fn() {foo}` --> $DIR/fn-traits.rs:24:10 | LL | call(foo); @@ -6,9 +6,9 @@ LL | call(foo); | | | required by a bound introduced by this call | - = help: the trait `Fn()` is not implemented for fn item `unsafe fn() {foo}` + = help: the trait `Fn()` is not implemented for fn item `#[target_features] fn() {foo}` = note: unsafe function cannot be called generically without an unsafe block - = note: wrap the `unsafe fn() {foo}` in a closure with no arguments: `|| { /* code */ }` + = note: wrap the `#[target_features] fn() {foo}` in a closure with no arguments: `|| { /* code */ }` = note: `#[target_feature]` functions do not implement the `Fn` traits note: required by a bound in `call` --> $DIR/fn-traits.rs:11:17 @@ -16,7 +16,7 @@ note: required by a bound in `call` LL | fn call(f: impl Fn()) { | ^^^^ required by this bound in `call` -error[E0277]: expected a `FnMut()` closure, found `unsafe fn() {foo}` +error[E0277]: expected a `FnMut()` closure, found `#[target_features] fn() {foo}` --> $DIR/fn-traits.rs:25:14 | LL | call_mut(foo); @@ -24,9 +24,9 @@ LL | call_mut(foo); | | | required by a bound introduced by this call | - = help: the trait `FnMut()` is not implemented for fn item `unsafe fn() {foo}` + = help: the trait `FnMut()` is not implemented for fn item `#[target_features] fn() {foo}` = note: unsafe function cannot be called generically without an unsafe block - = note: wrap the `unsafe fn() {foo}` in a closure with no arguments: `|| { /* code */ }` + = note: wrap the `#[target_features] fn() {foo}` in a closure with no arguments: `|| { /* code */ }` = note: `#[target_feature]` functions do not implement the `Fn` traits note: required by a bound in `call_mut` --> $DIR/fn-traits.rs:15:25 @@ -34,7 +34,7 @@ note: required by a bound in `call_mut` LL | fn call_mut(mut f: impl FnMut()) { | ^^^^^^^ required by this bound in `call_mut` -error[E0277]: expected a `FnOnce()` closure, found `unsafe fn() {foo}` +error[E0277]: expected a `FnOnce()` closure, found `#[target_features] fn() {foo}` --> $DIR/fn-traits.rs:26:15 | LL | call_once(foo); @@ -42,9 +42,9 @@ LL | call_once(foo); | | | required by a bound introduced by this call | - = help: the trait `FnOnce()` is not implemented for fn item `unsafe fn() {foo}` + = help: the trait `FnOnce()` is not implemented for fn item `#[target_features] fn() {foo}` = note: unsafe function cannot be called generically without an unsafe block - = note: wrap the `unsafe fn() {foo}` in a closure with no arguments: `|| { /* code */ }` + = note: wrap the `#[target_features] fn() {foo}` in a closure with no arguments: `|| { /* code */ }` = note: `#[target_feature]` functions do not implement the `Fn` traits note: required by a bound in `call_once` --> $DIR/fn-traits.rs:19:22 From 33651f49a0999c07f7f33bfd5d6d98bf91390837 Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Mon, 16 Dec 2024 10:25:11 +0000 Subject: [PATCH 088/142] Render fn defs with target_features attrs with the attribute [second site] --- .../src/error_reporting/infer/mod.rs | 52 +++++++++++++------ .../src/error_reporting/infer/region.rs | 8 +-- .../trait-impl.stderr | 20 +++---- .../target-feature/invalid-attribute.stderr | 28 +++++----- 4 files changed, 64 insertions(+), 44 deletions(-) diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs index e3820701c5c26..7032f7b9d318e 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs @@ -824,9 +824,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { fn cmp_fn_sig( &self, sig1: &ty::PolyFnSig<'tcx>, - fn_def1: Option<(DefId, &'tcx [ty::GenericArg<'tcx>])>, + fn_def1: Option<(DefId, Option<&'tcx [ty::GenericArg<'tcx>]>)>, sig2: &ty::PolyFnSig<'tcx>, - fn_def2: Option<(DefId, &'tcx [ty::GenericArg<'tcx>])>, + fn_def2: Option<(DefId, Option<&'tcx [ty::GenericArg<'tcx>]>)>, ) -> (DiagStyledString, DiagStyledString) { let sig1 = &(self.normalize_fn_sig)(*sig1); let sig2 = &(self.normalize_fn_sig)(*sig2); @@ -944,23 +944,23 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { (values.1).0.extend(x2.0); } - let fmt = |(did, args)| format!(" {{{}}}", self.tcx.def_path_str_with_args(did, args)); + let fmt = |did, args| format!(" {{{}}}", self.tcx.def_path_str_with_args(did, args)); match (fn_def1, fn_def2) { - (None, None) => {} - (Some(fn_def1), Some(fn_def2)) => { - let path1 = fmt(fn_def1); - let path2 = fmt(fn_def2); + (Some((fn_def1, Some(fn_args1))), Some((fn_def2, Some(fn_args2)))) => { + let path1 = fmt(fn_def1, fn_args1); + let path2 = fmt(fn_def2, fn_args2); let same_path = path1 == path2; values.0.push(path1, !same_path); values.1.push(path2, !same_path); } - (Some(fn_def1), None) => { - values.0.push_highlighted(fmt(fn_def1)); + (Some((fn_def1, Some(fn_args1))), None) => { + values.0.push_highlighted(fmt(fn_def1, fn_args1)); } - (None, Some(fn_def2)) => { - values.1.push_highlighted(fmt(fn_def2)); + (None, Some((fn_def2, Some(fn_args2)))) => { + values.1.push_highlighted(fmt(fn_def2, fn_args2)); } + _ => {} } values @@ -1351,17 +1351,22 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { (ty::FnDef(did1, args1), ty::FnDef(did2, args2)) => { let sig1 = self.tcx.fn_sig(*did1).instantiate(self.tcx, args1); let sig2 = self.tcx.fn_sig(*did2).instantiate(self.tcx, args2); - self.cmp_fn_sig(&sig1, Some((*did1, args1)), &sig2, Some((*did2, args2))) + self.cmp_fn_sig( + &sig1, + Some((*did1, Some(args1))), + &sig2, + Some((*did2, Some(args2))), + ) } (ty::FnDef(did1, args1), ty::FnPtr(sig_tys2, hdr2)) => { let sig1 = self.tcx.fn_sig(*did1).instantiate(self.tcx, args1); - self.cmp_fn_sig(&sig1, Some((*did1, args1)), &sig_tys2.with(*hdr2), None) + self.cmp_fn_sig(&sig1, Some((*did1, Some(args1))), &sig_tys2.with(*hdr2), None) } (ty::FnPtr(sig_tys1, hdr1), ty::FnDef(did2, args2)) => { let sig2 = self.tcx.fn_sig(*did2).instantiate(self.tcx, args2); - self.cmp_fn_sig(&sig_tys1.with(*hdr1), None, &sig2, Some((*did2, args2))) + self.cmp_fn_sig(&sig_tys1.with(*hdr1), None, &sig2, Some((*did2, Some(args2)))) } (ty::FnPtr(sig_tys1, hdr1), ty::FnPtr(sig_tys2, hdr2)) => { @@ -1543,7 +1548,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { (false, Mismatch::Fixed("existential projection")) } }; - let Some(vals) = self.values_str(values) else { + let Some(vals) = self.values_str(values, cause) else { // Derived error. Cancel the emitter. // NOTE(eddyb) this was `.cancel()`, but `diag` // is borrowed, so we can't fully defuse it. @@ -1968,7 +1973,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { }) | ObligationCauseCode::BlockTailExpression(.., source)) = code && let hir::MatchSource::TryDesugar(_) = source - && let Some((expected_ty, found_ty, _)) = self.values_str(trace.values) + && let Some((expected_ty, found_ty, _)) = self.values_str(trace.values, &trace.cause) { suggestions.push(TypeErrorAdditionalDiags::TryCannotConvert { found: found_ty.content(), @@ -2097,6 +2102,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { fn values_str( &self, values: ValuePairs<'tcx>, + cause: &ObligationCause<'tcx>, ) -> Option<(DiagStyledString, DiagStyledString, Option)> { match values { ValuePairs::Regions(exp_found) => self.expected_found_str(exp_found), @@ -2121,7 +2127,19 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if exp_found.references_error() { return None; } - let (exp, fnd) = self.cmp_fn_sig(&exp_found.expected, None, &exp_found.found, None); + let (fn_def1, fn_def2) = if let ObligationCauseCode::CompareImplItem { + impl_item_def_id, + trait_item_def_id, + .. + } = *cause.code() + { + (Some((trait_item_def_id, None)), Some((impl_item_def_id.to_def_id(), None))) + } else { + (None, None) + }; + + let (exp, fnd) = + self.cmp_fn_sig(&exp_found.expected, fn_def1, &exp_found.found, fn_def2); Some((exp, fnd, None)) } } diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs index 98b5fb2052f11..3acca47025cbd 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs @@ -221,7 +221,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { infer::Subtype(ref trace) => RegionOriginNote::WithRequirement { span: trace.cause.span, requirement: ObligationCauseAsDiagArg(trace.cause.clone()), - expected_found: self.values_str(trace.values).map(|(e, f, _)| (e, f)), + expected_found: self.values_str(trace.values, &trace.cause).map(|(e, f, _)| (e, f)), } .add_to_diag(err), infer::Reborrow(span) => { @@ -946,8 +946,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if let infer::Subtype(ref sup_trace) = sup_origin && let infer::Subtype(ref sub_trace) = sub_origin - && let Some((sup_expected, sup_found, _)) = self.values_str(sup_trace.values) - && let Some((sub_expected, sub_found, _)) = self.values_str(sub_trace.values) + && let Some((sup_expected, sup_found, _)) = + self.values_str(sup_trace.values, &sup_trace.cause) + && let Some((sub_expected, sub_found, _)) = + self.values_str(sub_trace.values, &sup_trace.cause) && sub_expected == sup_expected && sub_found == sup_found { diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.stderr b/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.stderr index 55532fe0168ed..1ab1fad64ccf9 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.stderr +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/trait-impl.stderr @@ -1,3 +1,12 @@ +error: `#[target_feature(..)]` cannot be applied to safe trait method + --> $DIR/trait-impl.rs:13:5 + | +LL | #[target_feature(enable = "sse2")] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot be applied to safe trait method +LL | +LL | fn foo(&self) {} + | ------------- not an `unsafe` function + error[E0053]: method `foo` has an incompatible type for trait --> $DIR/trait-impl.rs:15:5 | @@ -10,16 +19,7 @@ note: type in trait LL | fn foo(&self); | ^^^^^^^^^^^^^^ = note: expected signature `fn(&Bar)` - found signature `unsafe fn(&Bar)` - -error: `#[target_feature(..)]` cannot be applied to safe trait method - --> $DIR/trait-impl.rs:13:5 - | -LL | #[target_feature(enable = "sse2")] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot be applied to safe trait method -LL | -LL | fn foo(&self) {} - | ------------- not an `unsafe` function + found signature `#[target_features] fn(&Bar)` error: `#[target_feature(..)]` cannot be applied to safe trait method --> $DIR/trait-impl.rs:23:5 diff --git a/tests/ui/target-feature/invalid-attribute.stderr b/tests/ui/target-feature/invalid-attribute.stderr index c00f2254ac38b..10fcf65bb9a83 100644 --- a/tests/ui/target-feature/invalid-attribute.stderr +++ b/tests/ui/target-feature/invalid-attribute.stderr @@ -192,20 +192,6 @@ LL | impl Quux for u8 {} LL | fn foo(); | --------- `foo` from trait -error[E0053]: method `foo` has an incompatible type for trait - --> $DIR/invalid-attribute.rs:108:5 - | -LL | fn foo() {} - | ^^^^^^^^ expected safe fn, found unsafe fn - | -note: type in trait - --> $DIR/invalid-attribute.rs:99:5 - | -LL | fn foo(); - | ^^^^^^^^^ - = note: expected signature `fn()` - found signature `unsafe fn()` - error[E0658]: `#[target_feature(..)]` can only be applied to `unsafe` functions --> $DIR/invalid-attribute.rs:104:5 | @@ -219,6 +205,20 @@ LL | fn foo() {} = help: add `#![feature(target_feature_11)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date +error[E0053]: method `foo` has an incompatible type for trait + --> $DIR/invalid-attribute.rs:108:5 + | +LL | fn foo() {} + | ^^^^^^^^ expected safe fn, found unsafe fn + | +note: type in trait + --> $DIR/invalid-attribute.rs:99:5 + | +LL | fn foo(); + | ^^^^^^^^^ + = note: expected signature `fn()` + found signature `#[target_features] fn()` + error: aborting due to 24 previous errors Some errors have detailed explanations: E0046, E0053, E0658. From 1952b87780758652477bcd8ae11acc850781e2b4 Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Sun, 15 Dec 2024 20:26:18 +0000 Subject: [PATCH 089/142] Try to render shorthand differently --- .../src/error_reporting/traits/on_unimplemented.rs | 12 +++++++++--- .../rfcs/rfc-2396-target_feature-11/fn-traits.stderr | 9 +++------ 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs index 51efe39a7bc55..2d248d00066e0 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs @@ -205,9 +205,15 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { if self_ty.is_fn() { let fn_sig = self_ty.fn_sig(self.tcx); - let shortname = match fn_sig.safety() { - hir::Safety::Safe => "fn", - hir::Safety::Unsafe => "unsafe fn", + let shortname = if let ty::FnDef(def_id, _) = self_ty.kind() + && self.tcx.codegen_fn_attrs(def_id).safe_target_features + { + "#[target_feature] fn" + } else { + match fn_sig.safety() { + hir::Safety::Safe => "fn", + hir::Safety::Unsafe => "unsafe fn", + } }; flags.push((sym::_Self, Some(shortname.to_owned()))); } diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr index 6c3f5bef80e7c..2915b9ad1b3d8 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-traits.stderr @@ -2,12 +2,11 @@ error[E0277]: expected a `Fn()` closure, found `#[target_features] fn() {foo}` --> $DIR/fn-traits.rs:24:10 | LL | call(foo); - | ---- ^^^ call the function in a closure: `|| unsafe { /* code */ }` + | ---- ^^^ expected an `Fn()` closure, found `#[target_features] fn() {foo}` | | | required by a bound introduced by this call | = help: the trait `Fn()` is not implemented for fn item `#[target_features] fn() {foo}` - = note: unsafe function cannot be called generically without an unsafe block = note: wrap the `#[target_features] fn() {foo}` in a closure with no arguments: `|| { /* code */ }` = note: `#[target_feature]` functions do not implement the `Fn` traits note: required by a bound in `call` @@ -20,12 +19,11 @@ error[E0277]: expected a `FnMut()` closure, found `#[target_features] fn() {foo} --> $DIR/fn-traits.rs:25:14 | LL | call_mut(foo); - | -------- ^^^ call the function in a closure: `|| unsafe { /* code */ }` + | -------- ^^^ expected an `FnMut()` closure, found `#[target_features] fn() {foo}` | | | required by a bound introduced by this call | = help: the trait `FnMut()` is not implemented for fn item `#[target_features] fn() {foo}` - = note: unsafe function cannot be called generically without an unsafe block = note: wrap the `#[target_features] fn() {foo}` in a closure with no arguments: `|| { /* code */ }` = note: `#[target_feature]` functions do not implement the `Fn` traits note: required by a bound in `call_mut` @@ -38,12 +36,11 @@ error[E0277]: expected a `FnOnce()` closure, found `#[target_features] fn() {foo --> $DIR/fn-traits.rs:26:15 | LL | call_once(foo); - | --------- ^^^ call the function in a closure: `|| unsafe { /* code */ }` + | --------- ^^^ expected an `FnOnce()` closure, found `#[target_features] fn() {foo}` | | | required by a bound introduced by this call | = help: the trait `FnOnce()` is not implemented for fn item `#[target_features] fn() {foo}` - = note: unsafe function cannot be called generically without an unsafe block = note: wrap the `#[target_features] fn() {foo}` in a closure with no arguments: `|| { /* code */ }` = note: `#[target_feature]` functions do not implement the `Fn` traits note: required by a bound in `call_once` From 767d4fe64e3644ffcd89b3265fdee6659d08786a Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Thu, 9 Jan 2025 09:30:05 +0000 Subject: [PATCH 090/142] Avoid notes that only make sense for unsafe functions --- compiler/rustc_mir_build/src/check_unsafety.rs | 7 +++++-- .../src/error_reporting/infer/note_and_explain.rs | 6 ++++-- .../ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr | 1 - .../rfc-2396-target_feature-11/safe-calls.stderr | 12 ------------ 4 files changed, 9 insertions(+), 17 deletions(-) diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index ba8802d786cf1..6279d0f94af63 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -747,7 +747,10 @@ impl UnsafeOpKind { ) { let parent_id = tcx.hir().get_parent_item(hir_id); let parent_owner = tcx.hir_owner_node(parent_id); - let should_suggest = parent_owner.fn_sig().is_some_and(|sig| sig.header.is_unsafe()); + let should_suggest = parent_owner.fn_sig().is_some_and(|sig| { + // Do not suggest for safe target_feature functions + matches!(sig.header.safety, hir::HeaderSafety::Normal(hir::Safety::Unsafe)) + }); let unsafe_not_inherited_note = if should_suggest { suggest_unsafe_block.then(|| { let body_span = tcx.hir().body(parent_owner.body_id().unwrap()).value.span; @@ -910,7 +913,7 @@ impl UnsafeOpKind { { true } else if let Some(sig) = tcx.hir().fn_sig_by_hir_id(*id) - && sig.header.is_unsafe() + && matches!(sig.header.safety, hir::HeaderSafety::Normal(hir::Safety::Unsafe)) { true } else { diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs index 2cfccc57c971a..1dd09fe7aafa2 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs @@ -461,9 +461,11 @@ impl Trait for X { (ty::FnPtr(_, hdr), ty::FnDef(def_id, _)) | (ty::FnDef(def_id, _), ty::FnPtr(_, hdr)) => { if tcx.fn_sig(def_id).skip_binder().safety() < hdr.safety { - diag.note( + if !tcx.codegen_fn_attrs(def_id).safe_target_features { + diag.note( "unsafe functions cannot be coerced into safe function pointers", - ); + ); + } } } (ty::Adt(_, _), ty::Adt(def, args)) diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr index 9a80286c35df0..a2bda229d10e1 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr @@ -12,7 +12,6 @@ LL | let foo: fn() = foo; = note: expected fn pointer `fn()` found fn item `#[target_features] fn() {foo}` = note: functions with `#[target_feature]` can only be coerced to `unsafe` function pointers - = note: unsafe functions cannot be coerced into safe function pointers error: aborting due to 1 previous error diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/safe-calls.stderr b/tests/ui/rfcs/rfc-2396-target_feature-11/safe-calls.stderr index 003c20b867502..901bf640845dd 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/safe-calls.stderr +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/safe-calls.stderr @@ -26,9 +26,6 @@ LL | Quux.avx_bmi2(); error[E0133]: call to function `avx_bmi2` with `#[target_feature]` is unsafe and requires unsafe function or block --> $DIR/safe-calls.rs:38:5 | -LL | fn bar() { - | -------- items do not inherit unsafety from separate enclosing items -LL | sse2(); LL | avx_bmi2(); | ^^^^^^^^^^ call to function with `#[target_feature]` | @@ -37,9 +34,6 @@ LL | avx_bmi2(); error[E0133]: call to function `Quux::avx_bmi2` with `#[target_feature]` is unsafe and requires unsafe function or block --> $DIR/safe-calls.rs:40:5 | -LL | fn bar() { - | -------- items do not inherit unsafety from separate enclosing items -... LL | Quux.avx_bmi2(); | ^^^^^^^^^^^^^^^ call to function with `#[target_feature]` | @@ -48,9 +42,6 @@ LL | Quux.avx_bmi2(); error[E0133]: call to function `avx_bmi2` with `#[target_feature]` is unsafe and requires unsafe function or block --> $DIR/safe-calls.rs:47:5 | -LL | fn baz() { - | -------- items do not inherit unsafety from separate enclosing items -LL | sse2(); LL | avx_bmi2(); | ^^^^^^^^^^ call to function with `#[target_feature]` | @@ -59,9 +50,6 @@ LL | avx_bmi2(); error[E0133]: call to function `Quux::avx_bmi2` with `#[target_feature]` is unsafe and requires unsafe function or block --> $DIR/safe-calls.rs:49:5 | -LL | fn baz() { - | -------- items do not inherit unsafety from separate enclosing items -... LL | Quux.avx_bmi2(); | ^^^^^^^^^^^^^^^ call to function with `#[target_feature]` | From 896953aee71be9f175a4b48e3235502999229aed Mon Sep 17 00:00:00 2001 From: onur-ozkan Date: Wed, 15 Jan 2025 14:19:02 +0300 Subject: [PATCH 091/142] remove outdated FIXME Signed-off-by: onur-ozkan --- src/bootstrap/src/core/build_steps/doc.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/bootstrap/src/core/build_steps/doc.rs b/src/bootstrap/src/core/build_steps/doc.rs index dc09d2d109330..0eb4080c053a7 100644 --- a/src/bootstrap/src/core/build_steps/doc.rs +++ b/src/bootstrap/src/core/build_steps/doc.rs @@ -65,8 +65,6 @@ macro_rules! book { // NOTE: When adding a book here, make sure to ALSO build the book by // adding a build step in `src/bootstrap/code/builder/mod.rs`! // NOTE: Make sure to add the corresponding submodule when adding a new book. -// FIXME: Make checking for a submodule automatic somehow (maybe by having a list of all submodules -// and checking against it?). book!( CargoBook, "src/tools/cargo/src/doc", "cargo", &[]; ClippyBook, "src/tools/clippy/book", "clippy", &[]; From 0128c910f487e010a0c5693080a680f0d0a56a74 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Wed, 15 Jan 2025 15:42:20 +0100 Subject: [PATCH 092/142] Add GUI test for #135499 --- tests/rustdoc-gui/links-color.goml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/rustdoc-gui/links-color.goml b/tests/rustdoc-gui/links-color.goml index ad1b5e801ca32..8d26b826479dc 100644 --- a/tests/rustdoc-gui/links-color.goml +++ b/tests/rustdoc-gui/links-color.goml @@ -5,6 +5,7 @@ go-to: "file://" + |DOC_PATH| + "/test_docs/index.html" // This is needed so that the text color is computed. show-text: true +// First we check the links of the different items. define-function: ( "check-colors", [theme, mod, macro, struct, enum, trait, fn, type, union, keyword, @@ -36,6 +37,11 @@ define-function: ( }, ALL, ) + move-cursor-to: ".desc a[href='long_code_block_link/index.html']" + assert-css: ( + ".desc a[href='long_code_block_link/index.html']", + {"text-decoration": "underline solid " + |mod|}, + ) }, ) From a61cd86a4e0d88abfc19a13a7959d397e4dd148b Mon Sep 17 00:00:00 2001 From: Oli Scherer Date: Wed, 15 Jan 2025 10:50:07 +0000 Subject: [PATCH 093/142] Methods of const traits are const --- .../src/const_eval/fn_queries.rs | 20 +++++++++++-------- .../src/const_eval/machine.rs | 5 +---- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/compiler/rustc_const_eval/src/const_eval/fn_queries.rs b/compiler/rustc_const_eval/src/const_eval/fn_queries.rs index 8af17d01b0a36..35c3e3ed3150e 100644 --- a/compiler/rustc_const_eval/src/const_eval/fn_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/fn_queries.rs @@ -4,14 +4,18 @@ use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_middle::query::Providers; use rustc_middle::ty::TyCtxt; -fn parent_impl_constness(tcx: TyCtxt<'_>, def_id: LocalDefId) -> hir::Constness { +fn parent_impl_or_trait_constness(tcx: TyCtxt<'_>, def_id: LocalDefId) -> hir::Constness { let parent_id = tcx.local_parent(def_id); - if matches!(tcx.def_kind(parent_id), DefKind::Impl { .. }) - && let Some(header) = tcx.impl_trait_header(parent_id) - { - header.constness - } else { - hir::Constness::NotConst + match tcx.def_kind(parent_id) { + DefKind::Impl { of_trait: true } => tcx.impl_trait_header(parent_id).unwrap().constness, + DefKind::Trait => { + if tcx.is_const_trait(parent_id.into()) { + hir::Constness::Const + } else { + hir::Constness::NotConst + } + } + _ => hir::Constness::NotConst, } } @@ -34,7 +38,7 @@ fn constness(tcx: TyCtxt<'_>, def_id: LocalDefId) -> hir::Constness { // If the function itself is not annotated with `const`, it may still be a `const fn` // if it resides in a const trait impl. - parent_impl_constness(tcx, def_id) + parent_impl_or_trait_constness(tcx, def_id) } else { tcx.dcx().span_bug( tcx.def_span(def_id), diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs index ba7fbb254c66d..cfdfbdb7880b2 100644 --- a/compiler/rustc_const_eval/src/const_eval/machine.rs +++ b/compiler/rustc_const_eval/src/const_eval/machine.rs @@ -360,10 +360,7 @@ impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> { // sensitive check here. But we can at least rule out functions that are not const at // all. That said, we have to allow calling functions inside a trait marked with // #[const_trait]. These *are* const-checked! - // FIXME(const_trait_impl): why does `is_const_fn` not classify them as const? - if (!ecx.tcx.is_const_fn(def) && !ecx.tcx.is_const_default_method(def)) - || ecx.tcx.has_attr(def, sym::rustc_do_not_const_check) - { + if !ecx.tcx.is_const_fn(def) || ecx.tcx.has_attr(def, sym::rustc_do_not_const_check) { // We certainly do *not* want to actually call the fn // though, so be sure we return here. throw_unsup_format!("calling non-const function `{}`", instance) From b535a1dd654fd538776249d951593a8712d1b32a Mon Sep 17 00:00:00 2001 From: DJMrTV Date: Tue, 14 Jan 2025 19:26:28 +0100 Subject: [PATCH 094/142] fix typo in typenames of pin documentation --- library/core/src/pin.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/library/core/src/pin.rs b/library/core/src/pin.rs index 83730285636fb..71044190f0c88 100644 --- a/library/core/src/pin.rs +++ b/library/core/src/pin.rs @@ -331,7 +331,7 @@ //! //! Note that this invariant is enforced by simply making it impossible to call code that would //! perform a move on the pinned value. This is the case since the only way to access that pinned -//! value is through the pinning [Pin]<[&mut] T>>, which in turn restricts our access. +//! value is through the pinning [Pin]<[&mut] T>, which in turn restricts our access. //! //! ## [`Unpin`] //! @@ -379,7 +379,7 @@ //! //! Exposing access to the inner field which you want to remain pinned must then be carefully //! considered as well! Remember, exposing a method that gives access to a -//! [Pin]<[&mut] InnerT>> where InnerT: [Unpin] would allow safe code to +//! [Pin]<[&mut] InnerT> where InnerT: [Unpin] would allow safe code to //! trivially move the inner value out of that pinning pointer, which is precisely what you're //! seeking to prevent! Exposing a field of a pinned value through a pinning pointer is called //! "projecting" a pin, and the more general case of deciding in which cases a pin should be able From c4a5e12567a4f5b540141c31c87f479bb3e73e63 Mon Sep 17 00:00:00 2001 From: Aeon <165953379+AeonSolstice@users.noreply.github.com> Date: Wed, 15 Jan 2025 16:08:22 -0500 Subject: [PATCH 095/142] Clarify note in `std::sync::LazyLock` example --- library/std/src/sync/lazy_lock.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/std/src/sync/lazy_lock.rs b/library/std/src/sync/lazy_lock.rs index 1e4f9b79e0f4a..98c83d8d326ca 100644 --- a/library/std/src/sync/lazy_lock.rs +++ b/library/std/src/sync/lazy_lock.rs @@ -31,7 +31,7 @@ union Data { /// ``` /// use std::sync::LazyLock; /// -/// // n.b. static items do not call [`Drop`] on program termination, so this won't be deallocated. +/// // Note: static items do not call [`Drop`] on program termination, so this won't be deallocated. /// // this is fine, as the OS can deallocate the terminated program faster than we can free memory /// // but tools like valgrind might report "memory leaks" as it isn't obvious this is intentional. /// static DEEP_THOUGHT: LazyLock = LazyLock::new(|| { From e7e52029786aca443a20b95d46dd409cad96103f Mon Sep 17 00:00:00 2001 From: Flakebi Date: Thu, 2 Jan 2025 22:42:10 +0100 Subject: [PATCH 096/142] Add gpu-kernel calling convention The amdgpu-kernel calling convention was reverted in commit f6b21e90d1ec01081bc2619efb68af6788a63d65 due to inactivity in the amdgpu target. Introduce a `gpu-kernel` calling convention that translates to `ptx_kernel` or `amdgpu_kernel`, depending on the target that rust compiles for. --- compiler/rustc_abi/src/extern_abi/mod.rs | 35 ++++-- .../rustc_codegen_cranelift/src/abi/mod.rs | 6 +- compiler/rustc_codegen_llvm/src/abi.rs | 22 +++- compiler/rustc_codegen_llvm/src/context.rs | 5 +- compiler/rustc_codegen_llvm/src/declare.rs | 2 +- compiler/rustc_codegen_llvm/src/llvm/ffi.rs | 1 + compiler/rustc_feature/src/unstable.rs | 2 + compiler/rustc_middle/src/ty/layout.rs | 1 + .../rustc_smir/src/rustc_internal/internal.rs | 1 + .../rustc_smir/src/rustc_smir/convert/abi.rs | 1 + .../rustc_smir/src/rustc_smir/convert/ty.rs | 1 + compiler/rustc_span/src/symbol.rs | 1 + compiler/rustc_target/src/callconv/mod.rs | 3 + compiler/rustc_target/src/json.rs | 1 + compiler/rustc_target/src/spec/mod.rs | 1 + compiler/rustc_ty_utils/src/abi.rs | 1 + compiler/stable_mir/src/abi.rs | 2 + compiler/stable_mir/src/ty.rs | 1 + tests/codegen/gpu-kernel-abi.rs | 18 +++ tests/ui/abi/unsupported.aarch64.stderr | 64 +++++----- tests/ui/abi/unsupported.arm.stderr | 58 +++++---- tests/ui/abi/unsupported.i686.stderr | 46 ++++--- tests/ui/abi/unsupported.riscv32.stderr | 58 +++++---- tests/ui/abi/unsupported.riscv64.stderr | 58 +++++---- tests/ui/abi/unsupported.rs | 3 + tests/ui/abi/unsupported.x64.stderr | 58 +++++---- .../feature-gate-abi_gpu_kernel.rs | 45 +++++++ .../feature-gate-abi_gpu_kernel.stderr | 114 ++++++++++++++++++ tests/ui/print-calling-conventions.stdout | 1 + 29 files changed, 435 insertions(+), 175 deletions(-) create mode 100644 tests/codegen/gpu-kernel-abi.rs create mode 100644 tests/ui/feature-gates/feature-gate-abi_gpu_kernel.rs create mode 100644 tests/ui/feature-gates/feature-gate-abi_gpu_kernel.stderr diff --git a/compiler/rustc_abi/src/extern_abi/mod.rs b/compiler/rustc_abi/src/extern_abi/mod.rs index 390f2dbc10fdf..1dd22bbef8b52 100644 --- a/compiler/rustc_abi/src/extern_abi/mod.rs +++ b/compiler/rustc_abi/src/extern_abi/mod.rs @@ -45,6 +45,9 @@ pub enum ExternAbi { PtxKernel, Msp430Interrupt, X86Interrupt, + /// An entry-point function called by the GPU's host + // FIXME: should not be callable from Rust on GPU targets, is for host's use only + GpuKernel, EfiApi, AvrInterrupt, AvrNonBlockingInterrupt, @@ -122,6 +125,7 @@ const AbiDatas: &[AbiData] = &[ AbiData { abi: Abi::PtxKernel, name: "ptx-kernel" }, AbiData { abi: Abi::Msp430Interrupt, name: "msp430-interrupt" }, AbiData { abi: Abi::X86Interrupt, name: "x86-interrupt" }, + AbiData { abi: Abi::GpuKernel, name: "gpu-kernel" }, AbiData { abi: Abi::EfiApi, name: "efiapi" }, AbiData { abi: Abi::AvrInterrupt, name: "avr-interrupt" }, AbiData { abi: Abi::AvrNonBlockingInterrupt, name: "avr-non-blocking-interrupt" }, @@ -235,6 +239,10 @@ pub fn is_stable(name: &str) -> Result<(), AbiDisabled> { feature: sym::abi_x86_interrupt, explain: "x86-interrupt ABI is experimental and subject to change", }), + "gpu-kernel" => Err(AbiDisabled::Unstable { + feature: sym::abi_gpu_kernel, + explain: "gpu-kernel ABI is experimental and subject to change", + }), "avr-interrupt" | "avr-non-blocking-interrupt" => Err(AbiDisabled::Unstable { feature: sym::abi_avr_interrupt, explain: "avr-interrupt and avr-non-blocking-interrupt ABIs are experimental and subject to change", @@ -289,20 +297,21 @@ impl Abi { PtxKernel => 19, Msp430Interrupt => 20, X86Interrupt => 21, - EfiApi => 22, - AvrInterrupt => 23, - AvrNonBlockingInterrupt => 24, - CCmseNonSecureCall => 25, - CCmseNonSecureEntry => 26, + GpuKernel => 22, + EfiApi => 23, + AvrInterrupt => 24, + AvrNonBlockingInterrupt => 25, + CCmseNonSecureCall => 26, + CCmseNonSecureEntry => 27, // Cross-platform ABIs - System { unwind: false } => 27, - System { unwind: true } => 28, - RustIntrinsic => 29, - RustCall => 30, - Unadjusted => 31, - RustCold => 32, - RiscvInterruptM => 33, - RiscvInterruptS => 34, + System { unwind: false } => 28, + System { unwind: true } => 29, + RustIntrinsic => 30, + RustCall => 31, + Unadjusted => 32, + RustCold => 33, + RiscvInterruptM => 34, + RiscvInterruptS => 35, }; debug_assert!( AbiDatas diff --git a/compiler/rustc_codegen_cranelift/src/abi/mod.rs b/compiler/rustc_codegen_cranelift/src/abi/mod.rs index 2466bfe60c7ab..2c99597922e8f 100644 --- a/compiler/rustc_codegen_cranelift/src/abi/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/abi/mod.rs @@ -65,7 +65,11 @@ pub(crate) fn conv_to_call_conv(sess: &Session, c: Conv, default_call_conv: Call sess.dcx().fatal("C-cmse-nonsecure-entry call conv is not yet implemented"); } - Conv::Msp430Intr | Conv::PtxKernel | Conv::AvrInterrupt | Conv::AvrNonBlockingInterrupt => { + Conv::Msp430Intr + | Conv::PtxKernel + | Conv::GpuKernel + | Conv::AvrInterrupt + | Conv::AvrNonBlockingInterrupt => { unreachable!("tried to use {c:?} call conv which only exists on an unsupported target"); } } diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 1d35138b01348..31ee0eeca11fa 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -1,3 +1,4 @@ +use std::borrow::Borrow; use std::cmp; use libc::c_uint; @@ -312,7 +313,7 @@ impl<'ll, 'tcx> ArgAbiBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { pub(crate) trait FnAbiLlvmExt<'ll, 'tcx> { fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type; fn ptr_to_llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type; - fn llvm_cconv(&self) -> llvm::CallConv; + fn llvm_cconv(&self, cx: &CodegenCx<'ll, 'tcx>) -> llvm::CallConv; /// Apply attributes to a function declaration/definition. fn apply_attrs_llfn( @@ -404,8 +405,8 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { cx.type_ptr_ext(cx.data_layout().instruction_address_space) } - fn llvm_cconv(&self) -> llvm::CallConv { - self.conv.into() + fn llvm_cconv(&self, cx: &CodegenCx<'ll, 'tcx>) -> llvm::CallConv { + llvm::CallConv::from_conv(self.conv, cx.tcx.sess.target.arch.borrow()) } fn apply_attrs_llfn( @@ -617,7 +618,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { } } - let cconv = self.llvm_cconv(); + let cconv = self.llvm_cconv(&bx.cx); if cconv != llvm::CCallConv { llvm::SetInstructionCallConv(callsite, cconv); } @@ -655,8 +656,8 @@ impl<'tcx> AbiBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> { } } -impl From for llvm::CallConv { - fn from(conv: Conv) -> Self { +impl llvm::CallConv { + pub fn from_conv(conv: Conv, arch: &str) -> Self { match conv { Conv::C | Conv::Rust @@ -666,6 +667,15 @@ impl From for llvm::CallConv { Conv::Cold => llvm::ColdCallConv, Conv::PreserveMost => llvm::PreserveMost, Conv::PreserveAll => llvm::PreserveAll, + Conv::GpuKernel => { + if arch == "amdgpu" { + llvm::AmdgpuKernel + } else if arch == "nvptx64" { + llvm::PtxKernel + } else { + panic!("Architecture {arch} does not support GpuKernel calling convention"); + } + } Conv::AvrInterrupt => llvm::AvrInterrupt, Conv::AvrNonBlockingInterrupt => llvm::AvrNonBlockingInterrupt, Conv::ArmAapcs => llvm::ArmAapcsCallConv, diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs index d8fbe51b975a0..6534575184256 100644 --- a/compiler/rustc_codegen_llvm/src/context.rs +++ b/compiler/rustc_codegen_llvm/src/context.rs @@ -741,7 +741,10 @@ impl<'ll, 'tcx> MiscCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { if self.get_declared_value(entry_name).is_none() { Some(self.declare_entry_fn( entry_name, - self.sess().target.entry_abi.into(), + llvm::CallConv::from_conv( + self.sess().target.entry_abi, + self.sess().target.arch.borrow(), + ), llvm::UnnamedAddr::Global, fn_type, )) diff --git a/compiler/rustc_codegen_llvm/src/declare.rs b/compiler/rustc_codegen_llvm/src/declare.rs index 3ec386f6b076c..c72b5b5611f2b 100644 --- a/compiler/rustc_codegen_llvm/src/declare.rs +++ b/compiler/rustc_codegen_llvm/src/declare.rs @@ -125,7 +125,7 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> { let llfn = declare_raw_fn( self, name, - fn_abi.llvm_cconv(), + fn_abi.llvm_cconv(self), llvm::UnnamedAddr::Global, llvm::Visibility::Default, fn_abi.llvm_type(self), diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs index 472d4a3a72b3f..5bac08ee6b0f2 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs @@ -120,6 +120,7 @@ pub enum CallConv { X86_Intr = 83, AvrNonBlockingInterrupt = 84, AvrInterrupt = 85, + AmdgpuKernel = 91, } /// Must match the layout of `LLVMLinkage`. diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index d40823d2ed627..97142310d84f1 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -359,6 +359,8 @@ declare_features! ( (unstable, abi_avr_interrupt, "1.45.0", Some(69664)), /// Allows `extern "C-cmse-nonsecure-call" fn()`. (unstable, abi_c_cmse_nonsecure_call, "1.51.0", Some(81391)), + /// Allows `extern "gpu-kernel" fn()`. + (unstable, abi_gpu_kernel, "CURRENT_RUSTC_VERSION", Some(135467)), /// Allows `extern "msp430-interrupt" fn()`. (unstable, abi_msp430_interrupt, "1.16.0", Some(38487)), /// Allows `extern "ptx-*" fn()`. diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index 6e6da6de749a1..337ef5efbbad3 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -1240,6 +1240,7 @@ pub fn fn_can_unwind(tcx: TyCtxt<'_>, fn_def_id: Option, abi: ExternAbi) PtxKernel | Msp430Interrupt | X86Interrupt + | GpuKernel | EfiApi | AvrInterrupt | AvrNonBlockingInterrupt diff --git a/compiler/rustc_smir/src/rustc_internal/internal.rs b/compiler/rustc_smir/src/rustc_internal/internal.rs index c465367b6b9e0..3bc896dd7efcf 100644 --- a/compiler/rustc_smir/src/rustc_internal/internal.rs +++ b/compiler/rustc_smir/src/rustc_internal/internal.rs @@ -472,6 +472,7 @@ impl RustcInternal for Abi { Abi::PtxKernel => rustc_abi::ExternAbi::PtxKernel, Abi::Msp430Interrupt => rustc_abi::ExternAbi::Msp430Interrupt, Abi::X86Interrupt => rustc_abi::ExternAbi::X86Interrupt, + Abi::GpuKernel => rustc_abi::ExternAbi::GpuKernel, Abi::EfiApi => rustc_abi::ExternAbi::EfiApi, Abi::AvrInterrupt => rustc_abi::ExternAbi::AvrInterrupt, Abi::AvrNonBlockingInterrupt => rustc_abi::ExternAbi::AvrNonBlockingInterrupt, diff --git a/compiler/rustc_smir/src/rustc_smir/convert/abi.rs b/compiler/rustc_smir/src/rustc_smir/convert/abi.rs index b39a15a863326..4a03ff4beae89 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/abi.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/abi.rs @@ -113,6 +113,7 @@ impl<'tcx> Stable<'tcx> for callconv::Conv { Conv::X86VectorCall => CallConvention::X86VectorCall, Conv::X86_64SysV => CallConvention::X86_64SysV, Conv::X86_64Win64 => CallConvention::X86_64Win64, + Conv::GpuKernel => CallConvention::GpuKernel, Conv::AvrInterrupt => CallConvention::AvrInterrupt, Conv::AvrNonBlockingInterrupt => CallConvention::AvrNonBlockingInterrupt, Conv::RiscvInterrupt { .. } => CallConvention::RiscvInterrupt, diff --git a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs index e15dad78c693f..a7e122639eacf 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs @@ -911,6 +911,7 @@ impl<'tcx> Stable<'tcx> for rustc_abi::ExternAbi { ExternAbi::Win64 { unwind } => Abi::Win64 { unwind }, ExternAbi::SysV64 { unwind } => Abi::SysV64 { unwind }, ExternAbi::PtxKernel => Abi::PtxKernel, + ExternAbi::GpuKernel => Abi::GpuKernel, ExternAbi::Msp430Interrupt => Abi::Msp430Interrupt, ExternAbi::X86Interrupt => Abi::X86Interrupt, ExternAbi::EfiApi => Abi::EfiApi, diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 4ecc4201f89d5..01de80093a275 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -379,6 +379,7 @@ symbols! { abi_avr_interrupt, abi_c_cmse_nonsecure_call, abi_efiapi, + abi_gpu_kernel, abi_msp430_interrupt, abi_ptx, abi_riscv_interrupt, diff --git a/compiler/rustc_target/src/callconv/mod.rs b/compiler/rustc_target/src/callconv/mod.rs index 746e81738076f..fc17b72a23b94 100644 --- a/compiler/rustc_target/src/callconv/mod.rs +++ b/compiler/rustc_target/src/callconv/mod.rs @@ -547,6 +547,8 @@ pub enum Conv { PtxKernel, + GpuKernel, + X86Fastcall, X86Intr, X86Stdcall, @@ -866,6 +868,7 @@ impl FromStr for Conv { "X86VectorCall" => Ok(Conv::X86VectorCall), "X86_64SysV" => Ok(Conv::X86_64SysV), "X86_64Win64" => Ok(Conv::X86_64Win64), + "GpuKernel" => Ok(Conv::GpuKernel), "AvrInterrupt" => Ok(Conv::AvrInterrupt), "AvrNonBlockingInterrupt" => Ok(Conv::AvrNonBlockingInterrupt), "RiscvInterrupt(machine)" => { diff --git a/compiler/rustc_target/src/json.rs b/compiler/rustc_target/src/json.rs index b09d8d724efd4..961cce5d6b9f9 100644 --- a/compiler/rustc_target/src/json.rs +++ b/compiler/rustc_target/src/json.rs @@ -113,6 +113,7 @@ impl ToJson for crate::abi::call::Conv { Self::X86VectorCall => "X86VectorCall", Self::X86_64SysV => "X86_64SysV", Self::X86_64Win64 => "X86_64Win64", + Self::GpuKernel => "GpuKernel", Self::AvrInterrupt => "AvrInterrupt", Self::AvrNonBlockingInterrupt => "AvrNonBlockingInterrupt", Self::RiscvInterrupt { kind } => { diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index 02962d55a60ed..48c493cc2e1fc 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -2854,6 +2854,7 @@ impl Target { } Win64 { .. } | SysV64 { .. } => self.arch == "x86_64", PtxKernel => self.arch == "nvptx64", + GpuKernel => ["amdgpu", "nvptx64"].contains(&&self.arch[..]), Msp430Interrupt => self.arch == "msp430", RiscvInterruptM | RiscvInterruptS => ["riscv32", "riscv64"].contains(&&self.arch[..]), AvrInterrupt | AvrNonBlockingInterrupt => self.arch == "avr", diff --git a/compiler/rustc_ty_utils/src/abi.rs b/compiler/rustc_ty_utils/src/abi.rs index b63534880d1c4..c528179ae0e7a 100644 --- a/compiler/rustc_ty_utils/src/abi.rs +++ b/compiler/rustc_ty_utils/src/abi.rs @@ -293,6 +293,7 @@ fn conv_from_spec_abi(tcx: TyCtxt<'_>, abi: ExternAbi, c_variadic: bool) -> Conv PtxKernel => Conv::PtxKernel, Msp430Interrupt => Conv::Msp430Intr, X86Interrupt => Conv::X86Intr, + GpuKernel => Conv::GpuKernel, AvrInterrupt => Conv::AvrInterrupt, AvrNonBlockingInterrupt => Conv::AvrNonBlockingInterrupt, RiscvInterruptM => Conv::RiscvInterrupt { kind: RiscvInterruptKind::Machine }, diff --git a/compiler/stable_mir/src/abi.rs b/compiler/stable_mir/src/abi.rs index 17e6a852022d7..861b6692b5367 100644 --- a/compiler/stable_mir/src/abi.rs +++ b/compiler/stable_mir/src/abi.rs @@ -442,6 +442,8 @@ pub enum CallConvention { PtxKernel, + GpuKernel, + X86Fastcall, X86Intr, X86Stdcall, diff --git a/compiler/stable_mir/src/ty.rs b/compiler/stable_mir/src/ty.rs index d7eb435e13f40..3434597e7b052 100644 --- a/compiler/stable_mir/src/ty.rs +++ b/compiler/stable_mir/src/ty.rs @@ -1077,6 +1077,7 @@ pub enum Abi { PtxKernel, Msp430Interrupt, X86Interrupt, + GpuKernel, EfiApi, AvrInterrupt, AvrNonBlockingInterrupt, diff --git a/tests/codegen/gpu-kernel-abi.rs b/tests/codegen/gpu-kernel-abi.rs new file mode 100644 index 0000000000000..fba1793649433 --- /dev/null +++ b/tests/codegen/gpu-kernel-abi.rs @@ -0,0 +1,18 @@ +// Checks that the gpu-kernel calling convention correctly translates to LLVM calling conventions. + +//@ revisions: nvptx +//@ [nvptx] compile-flags: --crate-type=rlib --target=nvptx64-nvidia-cuda +//@ [nvptx] needs-llvm-components: nvptx +#![feature(no_core, lang_items, abi_gpu_kernel)] +#![no_core] + +#[lang = "sized"] +trait Sized {} +#[lang = "freeze"] +trait Freeze {} +#[lang = "copy"] +trait Copy {} + +// nvptx: define ptx_kernel void @fun(i32 +#[no_mangle] +pub extern "gpu-kernel" fn fun(_: i32) {} diff --git a/tests/ui/abi/unsupported.aarch64.stderr b/tests/ui/abi/unsupported.aarch64.stderr index 81aa200012feb..2eb6ab08232e6 100644 --- a/tests/ui/abi/unsupported.aarch64.stderr +++ b/tests/ui/abi/unsupported.aarch64.stderr @@ -1,5 +1,5 @@ warning: the calling convention "ptx-kernel" is not supported on this target - --> $DIR/unsupported.rs:35:15 + --> $DIR/unsupported.rs:36:15 | LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -9,13 +9,13 @@ LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { = note: `#[warn(unsupported_fn_ptr_calling_conventions)]` on by default error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:40:1 + --> $DIR/unsupported.rs:41:1 | LL | extern "ptx-kernel" {} | ^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "aapcs" is not supported on this target - --> $DIR/unsupported.rs:49:17 + --> $DIR/unsupported.rs:52:17 | LL | fn aapcs_ptr(f: extern "aapcs" fn()) { | ^^^^^^^^^^^^^^^^^^^ @@ -24,13 +24,13 @@ LL | fn aapcs_ptr(f: extern "aapcs" fn()) { = note: for more information, see issue #130260 error[E0570]: `"aapcs"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:62:1 + --> $DIR/unsupported.rs:65:1 | LL | extern "aapcs" {} | ^^^^^^^^^^^^^^^^^ warning: the calling convention "msp430-interrupt" is not supported on this target - --> $DIR/unsupported.rs:71:18 + --> $DIR/unsupported.rs:74:18 | LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -39,13 +39,13 @@ LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:76:1 + --> $DIR/unsupported.rs:79:1 | LL | extern "msp430-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "avr-interrupt" is not supported on this target - --> $DIR/unsupported.rs:81:15 + --> $DIR/unsupported.rs:84:15 | LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -54,13 +54,13 @@ LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:86:1 + --> $DIR/unsupported.rs:89:1 | LL | extern "avr-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "riscv-interrupt-m" is not supported on this target - --> $DIR/unsupported.rs:94:17 + --> $DIR/unsupported.rs:97:17 | LL | fn riscv_ptr(f: extern "riscv-interrupt-m" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -69,13 +69,13 @@ LL | fn riscv_ptr(f: extern "riscv-interrupt-m" fn()) { = note: for more information, see issue #130260 error[E0570]: `"riscv-interrupt-m"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:105:1 + --> $DIR/unsupported.rs:108:1 | LL | extern "riscv-interrupt-m" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "x86-interrupt" is not supported on this target - --> $DIR/unsupported.rs:116:15 + --> $DIR/unsupported.rs:119:15 | LL | fn x86_ptr(f: extern "x86-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -84,13 +84,13 @@ LL | fn x86_ptr(f: extern "x86-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"x86-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:127:1 + --> $DIR/unsupported.rs:130:1 | LL | extern "x86-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "thiscall" is not supported on this target - --> $DIR/unsupported.rs:139:20 + --> $DIR/unsupported.rs:142:20 | LL | fn thiscall_ptr(f: extern "thiscall" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^ @@ -99,13 +99,13 @@ LL | fn thiscall_ptr(f: extern "thiscall" fn()) { = note: for more information, see issue #130260 error[E0570]: `"thiscall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:152:1 + --> $DIR/unsupported.rs:155:1 | LL | extern "thiscall" {} | ^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "stdcall" is not supported on this target - --> $DIR/unsupported.rs:165:19 + --> $DIR/unsupported.rs:168:19 | LL | fn stdcall_ptr(f: extern "stdcall" fn()) { | ^^^^^^^^^^^^^^^^^^^^^ @@ -114,13 +114,13 @@ LL | fn stdcall_ptr(f: extern "stdcall" fn()) { = note: for more information, see issue #130260 error[E0570]: `"stdcall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:178:1 + --> $DIR/unsupported.rs:181:1 | LL | extern "stdcall" {} | ^^^^^^^^^^^^^^^^^^^ warning: the calling convention "C-cmse-nonsecure-call" is not supported on this target - --> $DIR/unsupported.rs:185:21 + --> $DIR/unsupported.rs:188:21 | LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -129,7 +129,7 @@ LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { = note: for more information, see issue #130260 warning: the calling convention "C-cmse-nonsecure-entry" is not supported on this target - --> $DIR/unsupported.rs:193:22 + --> $DIR/unsupported.rs:196:22 | LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -138,65 +138,71 @@ LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { = note: for more information, see issue #130260 error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:198:1 + --> $DIR/unsupported.rs:201:1 | LL | extern "C-cmse-nonsecure-entry" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:33:1 + --> $DIR/unsupported.rs:34:1 | LL | extern "ptx-kernel" fn ptx() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error[E0570]: `"aapcs"` is not a supported ABI for the current target +error[E0570]: `"gpu-kernel"` is not a supported ABI for the current target --> $DIR/unsupported.rs:43:1 | +LL | extern "gpu-kernel" fn gpu() {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0570]: `"aapcs"` is not a supported ABI for the current target + --> $DIR/unsupported.rs:46:1 + | LL | extern "aapcs" fn aapcs() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:69:1 + --> $DIR/unsupported.rs:72:1 | LL | extern "msp430-interrupt" fn msp430() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:79:1 + --> $DIR/unsupported.rs:82:1 | LL | extern "avr-interrupt" fn avr() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"riscv-interrupt-m"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:89:1 + --> $DIR/unsupported.rs:92:1 | LL | extern "riscv-interrupt-m" fn riscv() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"x86-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:111:1 + --> $DIR/unsupported.rs:114:1 | LL | extern "x86-interrupt" fn x86() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"thiscall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:133:1 + --> $DIR/unsupported.rs:136:1 | LL | extern "thiscall" fn thiscall() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"stdcall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:159:1 + --> $DIR/unsupported.rs:162:1 | LL | extern "stdcall" fn stdcall() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:191:1 + --> $DIR/unsupported.rs:194:1 | LL | extern "C-cmse-nonsecure-entry" fn cmse_entry() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 18 previous errors; 10 warnings emitted +error: aborting due to 19 previous errors; 10 warnings emitted For more information about this error, try `rustc --explain E0570`. diff --git a/tests/ui/abi/unsupported.arm.stderr b/tests/ui/abi/unsupported.arm.stderr index 8e758ee451f53..ee878379cc646 100644 --- a/tests/ui/abi/unsupported.arm.stderr +++ b/tests/ui/abi/unsupported.arm.stderr @@ -1,5 +1,5 @@ warning: the calling convention "ptx-kernel" is not supported on this target - --> $DIR/unsupported.rs:35:15 + --> $DIR/unsupported.rs:36:15 | LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -9,13 +9,13 @@ LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { = note: `#[warn(unsupported_fn_ptr_calling_conventions)]` on by default error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:40:1 + --> $DIR/unsupported.rs:41:1 | LL | extern "ptx-kernel" {} | ^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "msp430-interrupt" is not supported on this target - --> $DIR/unsupported.rs:71:18 + --> $DIR/unsupported.rs:74:18 | LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -24,13 +24,13 @@ LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:76:1 + --> $DIR/unsupported.rs:79:1 | LL | extern "msp430-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "avr-interrupt" is not supported on this target - --> $DIR/unsupported.rs:81:15 + --> $DIR/unsupported.rs:84:15 | LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -39,13 +39,13 @@ LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:86:1 + --> $DIR/unsupported.rs:89:1 | LL | extern "avr-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "riscv-interrupt-m" is not supported on this target - --> $DIR/unsupported.rs:94:17 + --> $DIR/unsupported.rs:97:17 | LL | fn riscv_ptr(f: extern "riscv-interrupt-m" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -54,13 +54,13 @@ LL | fn riscv_ptr(f: extern "riscv-interrupt-m" fn()) { = note: for more information, see issue #130260 error[E0570]: `"riscv-interrupt-m"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:105:1 + --> $DIR/unsupported.rs:108:1 | LL | extern "riscv-interrupt-m" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "x86-interrupt" is not supported on this target - --> $DIR/unsupported.rs:116:15 + --> $DIR/unsupported.rs:119:15 | LL | fn x86_ptr(f: extern "x86-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -69,13 +69,13 @@ LL | fn x86_ptr(f: extern "x86-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"x86-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:127:1 + --> $DIR/unsupported.rs:130:1 | LL | extern "x86-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "thiscall" is not supported on this target - --> $DIR/unsupported.rs:139:20 + --> $DIR/unsupported.rs:142:20 | LL | fn thiscall_ptr(f: extern "thiscall" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^ @@ -84,13 +84,13 @@ LL | fn thiscall_ptr(f: extern "thiscall" fn()) { = note: for more information, see issue #130260 error[E0570]: `"thiscall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:152:1 + --> $DIR/unsupported.rs:155:1 | LL | extern "thiscall" {} | ^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "stdcall" is not supported on this target - --> $DIR/unsupported.rs:165:19 + --> $DIR/unsupported.rs:168:19 | LL | fn stdcall_ptr(f: extern "stdcall" fn()) { | ^^^^^^^^^^^^^^^^^^^^^ @@ -99,13 +99,13 @@ LL | fn stdcall_ptr(f: extern "stdcall" fn()) { = note: for more information, see issue #130260 error[E0570]: `"stdcall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:178:1 + --> $DIR/unsupported.rs:181:1 | LL | extern "stdcall" {} | ^^^^^^^^^^^^^^^^^^^ warning: the calling convention "C-cmse-nonsecure-call" is not supported on this target - --> $DIR/unsupported.rs:185:21 + --> $DIR/unsupported.rs:188:21 | LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -114,7 +114,7 @@ LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { = note: for more information, see issue #130260 warning: the calling convention "C-cmse-nonsecure-entry" is not supported on this target - --> $DIR/unsupported.rs:193:22 + --> $DIR/unsupported.rs:196:22 | LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -123,59 +123,65 @@ LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { = note: for more information, see issue #130260 error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:198:1 + --> $DIR/unsupported.rs:201:1 | LL | extern "C-cmse-nonsecure-entry" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:33:1 + --> $DIR/unsupported.rs:34:1 | LL | extern "ptx-kernel" fn ptx() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +error[E0570]: `"gpu-kernel"` is not a supported ABI for the current target + --> $DIR/unsupported.rs:43:1 + | +LL | extern "gpu-kernel" fn gpu() {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:69:1 + --> $DIR/unsupported.rs:72:1 | LL | extern "msp430-interrupt" fn msp430() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:79:1 + --> $DIR/unsupported.rs:82:1 | LL | extern "avr-interrupt" fn avr() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"riscv-interrupt-m"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:89:1 + --> $DIR/unsupported.rs:92:1 | LL | extern "riscv-interrupt-m" fn riscv() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"x86-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:111:1 + --> $DIR/unsupported.rs:114:1 | LL | extern "x86-interrupt" fn x86() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"thiscall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:133:1 + --> $DIR/unsupported.rs:136:1 | LL | extern "thiscall" fn thiscall() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"stdcall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:159:1 + --> $DIR/unsupported.rs:162:1 | LL | extern "stdcall" fn stdcall() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:191:1 + --> $DIR/unsupported.rs:194:1 | LL | extern "C-cmse-nonsecure-entry" fn cmse_entry() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 16 previous errors; 9 warnings emitted +error: aborting due to 17 previous errors; 9 warnings emitted For more information about this error, try `rustc --explain E0570`. diff --git a/tests/ui/abi/unsupported.i686.stderr b/tests/ui/abi/unsupported.i686.stderr index b3c74ad635375..02b2cdd356f7d 100644 --- a/tests/ui/abi/unsupported.i686.stderr +++ b/tests/ui/abi/unsupported.i686.stderr @@ -1,5 +1,5 @@ warning: the calling convention "ptx-kernel" is not supported on this target - --> $DIR/unsupported.rs:35:15 + --> $DIR/unsupported.rs:36:15 | LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -9,13 +9,13 @@ LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { = note: `#[warn(unsupported_fn_ptr_calling_conventions)]` on by default error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:40:1 + --> $DIR/unsupported.rs:41:1 | LL | extern "ptx-kernel" {} | ^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "aapcs" is not supported on this target - --> $DIR/unsupported.rs:49:17 + --> $DIR/unsupported.rs:52:17 | LL | fn aapcs_ptr(f: extern "aapcs" fn()) { | ^^^^^^^^^^^^^^^^^^^ @@ -24,13 +24,13 @@ LL | fn aapcs_ptr(f: extern "aapcs" fn()) { = note: for more information, see issue #130260 error[E0570]: `"aapcs"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:62:1 + --> $DIR/unsupported.rs:65:1 | LL | extern "aapcs" {} | ^^^^^^^^^^^^^^^^^ warning: the calling convention "msp430-interrupt" is not supported on this target - --> $DIR/unsupported.rs:71:18 + --> $DIR/unsupported.rs:74:18 | LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -39,13 +39,13 @@ LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:76:1 + --> $DIR/unsupported.rs:79:1 | LL | extern "msp430-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "avr-interrupt" is not supported on this target - --> $DIR/unsupported.rs:81:15 + --> $DIR/unsupported.rs:84:15 | LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -54,13 +54,13 @@ LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:86:1 + --> $DIR/unsupported.rs:89:1 | LL | extern "avr-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "riscv-interrupt-m" is not supported on this target - --> $DIR/unsupported.rs:94:17 + --> $DIR/unsupported.rs:97:17 | LL | fn riscv_ptr(f: extern "riscv-interrupt-m" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -69,13 +69,13 @@ LL | fn riscv_ptr(f: extern "riscv-interrupt-m" fn()) { = note: for more information, see issue #130260 error[E0570]: `"riscv-interrupt-m"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:105:1 + --> $DIR/unsupported.rs:108:1 | LL | extern "riscv-interrupt-m" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "C-cmse-nonsecure-call" is not supported on this target - --> $DIR/unsupported.rs:185:21 + --> $DIR/unsupported.rs:188:21 | LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -84,7 +84,7 @@ LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { = note: for more information, see issue #130260 warning: the calling convention "C-cmse-nonsecure-entry" is not supported on this target - --> $DIR/unsupported.rs:193:22 + --> $DIR/unsupported.rs:196:22 | LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -93,47 +93,53 @@ LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { = note: for more information, see issue #130260 error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:198:1 + --> $DIR/unsupported.rs:201:1 | LL | extern "C-cmse-nonsecure-entry" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:33:1 + --> $DIR/unsupported.rs:34:1 | LL | extern "ptx-kernel" fn ptx() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error[E0570]: `"aapcs"` is not a supported ABI for the current target +error[E0570]: `"gpu-kernel"` is not a supported ABI for the current target --> $DIR/unsupported.rs:43:1 | +LL | extern "gpu-kernel" fn gpu() {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0570]: `"aapcs"` is not a supported ABI for the current target + --> $DIR/unsupported.rs:46:1 + | LL | extern "aapcs" fn aapcs() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:69:1 + --> $DIR/unsupported.rs:72:1 | LL | extern "msp430-interrupt" fn msp430() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:79:1 + --> $DIR/unsupported.rs:82:1 | LL | extern "avr-interrupt" fn avr() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"riscv-interrupt-m"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:89:1 + --> $DIR/unsupported.rs:92:1 | LL | extern "riscv-interrupt-m" fn riscv() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:191:1 + --> $DIR/unsupported.rs:194:1 | LL | extern "C-cmse-nonsecure-entry" fn cmse_entry() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 12 previous errors; 7 warnings emitted +error: aborting due to 13 previous errors; 7 warnings emitted For more information about this error, try `rustc --explain E0570`. diff --git a/tests/ui/abi/unsupported.riscv32.stderr b/tests/ui/abi/unsupported.riscv32.stderr index 92728b1df18c0..abf403da8bd15 100644 --- a/tests/ui/abi/unsupported.riscv32.stderr +++ b/tests/ui/abi/unsupported.riscv32.stderr @@ -1,5 +1,5 @@ warning: the calling convention "ptx-kernel" is not supported on this target - --> $DIR/unsupported.rs:35:15 + --> $DIR/unsupported.rs:36:15 | LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -9,13 +9,13 @@ LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { = note: `#[warn(unsupported_fn_ptr_calling_conventions)]` on by default error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:40:1 + --> $DIR/unsupported.rs:41:1 | LL | extern "ptx-kernel" {} | ^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "aapcs" is not supported on this target - --> $DIR/unsupported.rs:49:17 + --> $DIR/unsupported.rs:52:17 | LL | fn aapcs_ptr(f: extern "aapcs" fn()) { | ^^^^^^^^^^^^^^^^^^^ @@ -24,13 +24,13 @@ LL | fn aapcs_ptr(f: extern "aapcs" fn()) { = note: for more information, see issue #130260 error[E0570]: `"aapcs"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:62:1 + --> $DIR/unsupported.rs:65:1 | LL | extern "aapcs" {} | ^^^^^^^^^^^^^^^^^ warning: the calling convention "msp430-interrupt" is not supported on this target - --> $DIR/unsupported.rs:71:18 + --> $DIR/unsupported.rs:74:18 | LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -39,13 +39,13 @@ LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:76:1 + --> $DIR/unsupported.rs:79:1 | LL | extern "msp430-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "avr-interrupt" is not supported on this target - --> $DIR/unsupported.rs:81:15 + --> $DIR/unsupported.rs:84:15 | LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -54,13 +54,13 @@ LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:86:1 + --> $DIR/unsupported.rs:89:1 | LL | extern "avr-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "x86-interrupt" is not supported on this target - --> $DIR/unsupported.rs:116:15 + --> $DIR/unsupported.rs:119:15 | LL | fn x86_ptr(f: extern "x86-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -69,13 +69,13 @@ LL | fn x86_ptr(f: extern "x86-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"x86-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:127:1 + --> $DIR/unsupported.rs:130:1 | LL | extern "x86-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "thiscall" is not supported on this target - --> $DIR/unsupported.rs:139:20 + --> $DIR/unsupported.rs:142:20 | LL | fn thiscall_ptr(f: extern "thiscall" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^ @@ -84,13 +84,13 @@ LL | fn thiscall_ptr(f: extern "thiscall" fn()) { = note: for more information, see issue #130260 error[E0570]: `"thiscall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:152:1 + --> $DIR/unsupported.rs:155:1 | LL | extern "thiscall" {} | ^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "stdcall" is not supported on this target - --> $DIR/unsupported.rs:165:19 + --> $DIR/unsupported.rs:168:19 | LL | fn stdcall_ptr(f: extern "stdcall" fn()) { | ^^^^^^^^^^^^^^^^^^^^^ @@ -99,13 +99,13 @@ LL | fn stdcall_ptr(f: extern "stdcall" fn()) { = note: for more information, see issue #130260 error[E0570]: `"stdcall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:178:1 + --> $DIR/unsupported.rs:181:1 | LL | extern "stdcall" {} | ^^^^^^^^^^^^^^^^^^^ warning: the calling convention "C-cmse-nonsecure-call" is not supported on this target - --> $DIR/unsupported.rs:185:21 + --> $DIR/unsupported.rs:188:21 | LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -114,7 +114,7 @@ LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { = note: for more information, see issue #130260 warning: the calling convention "C-cmse-nonsecure-entry" is not supported on this target - --> $DIR/unsupported.rs:193:22 + --> $DIR/unsupported.rs:196:22 | LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -123,59 +123,65 @@ LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { = note: for more information, see issue #130260 error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:198:1 + --> $DIR/unsupported.rs:201:1 | LL | extern "C-cmse-nonsecure-entry" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:33:1 + --> $DIR/unsupported.rs:34:1 | LL | extern "ptx-kernel" fn ptx() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error[E0570]: `"aapcs"` is not a supported ABI for the current target +error[E0570]: `"gpu-kernel"` is not a supported ABI for the current target --> $DIR/unsupported.rs:43:1 | +LL | extern "gpu-kernel" fn gpu() {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0570]: `"aapcs"` is not a supported ABI for the current target + --> $DIR/unsupported.rs:46:1 + | LL | extern "aapcs" fn aapcs() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:69:1 + --> $DIR/unsupported.rs:72:1 | LL | extern "msp430-interrupt" fn msp430() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:79:1 + --> $DIR/unsupported.rs:82:1 | LL | extern "avr-interrupt" fn avr() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"x86-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:111:1 + --> $DIR/unsupported.rs:114:1 | LL | extern "x86-interrupt" fn x86() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"thiscall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:133:1 + --> $DIR/unsupported.rs:136:1 | LL | extern "thiscall" fn thiscall() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"stdcall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:159:1 + --> $DIR/unsupported.rs:162:1 | LL | extern "stdcall" fn stdcall() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:191:1 + --> $DIR/unsupported.rs:194:1 | LL | extern "C-cmse-nonsecure-entry" fn cmse_entry() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 16 previous errors; 9 warnings emitted +error: aborting due to 17 previous errors; 9 warnings emitted For more information about this error, try `rustc --explain E0570`. diff --git a/tests/ui/abi/unsupported.riscv64.stderr b/tests/ui/abi/unsupported.riscv64.stderr index 92728b1df18c0..abf403da8bd15 100644 --- a/tests/ui/abi/unsupported.riscv64.stderr +++ b/tests/ui/abi/unsupported.riscv64.stderr @@ -1,5 +1,5 @@ warning: the calling convention "ptx-kernel" is not supported on this target - --> $DIR/unsupported.rs:35:15 + --> $DIR/unsupported.rs:36:15 | LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -9,13 +9,13 @@ LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { = note: `#[warn(unsupported_fn_ptr_calling_conventions)]` on by default error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:40:1 + --> $DIR/unsupported.rs:41:1 | LL | extern "ptx-kernel" {} | ^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "aapcs" is not supported on this target - --> $DIR/unsupported.rs:49:17 + --> $DIR/unsupported.rs:52:17 | LL | fn aapcs_ptr(f: extern "aapcs" fn()) { | ^^^^^^^^^^^^^^^^^^^ @@ -24,13 +24,13 @@ LL | fn aapcs_ptr(f: extern "aapcs" fn()) { = note: for more information, see issue #130260 error[E0570]: `"aapcs"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:62:1 + --> $DIR/unsupported.rs:65:1 | LL | extern "aapcs" {} | ^^^^^^^^^^^^^^^^^ warning: the calling convention "msp430-interrupt" is not supported on this target - --> $DIR/unsupported.rs:71:18 + --> $DIR/unsupported.rs:74:18 | LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -39,13 +39,13 @@ LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:76:1 + --> $DIR/unsupported.rs:79:1 | LL | extern "msp430-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "avr-interrupt" is not supported on this target - --> $DIR/unsupported.rs:81:15 + --> $DIR/unsupported.rs:84:15 | LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -54,13 +54,13 @@ LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:86:1 + --> $DIR/unsupported.rs:89:1 | LL | extern "avr-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "x86-interrupt" is not supported on this target - --> $DIR/unsupported.rs:116:15 + --> $DIR/unsupported.rs:119:15 | LL | fn x86_ptr(f: extern "x86-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -69,13 +69,13 @@ LL | fn x86_ptr(f: extern "x86-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"x86-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:127:1 + --> $DIR/unsupported.rs:130:1 | LL | extern "x86-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "thiscall" is not supported on this target - --> $DIR/unsupported.rs:139:20 + --> $DIR/unsupported.rs:142:20 | LL | fn thiscall_ptr(f: extern "thiscall" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^ @@ -84,13 +84,13 @@ LL | fn thiscall_ptr(f: extern "thiscall" fn()) { = note: for more information, see issue #130260 error[E0570]: `"thiscall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:152:1 + --> $DIR/unsupported.rs:155:1 | LL | extern "thiscall" {} | ^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "stdcall" is not supported on this target - --> $DIR/unsupported.rs:165:19 + --> $DIR/unsupported.rs:168:19 | LL | fn stdcall_ptr(f: extern "stdcall" fn()) { | ^^^^^^^^^^^^^^^^^^^^^ @@ -99,13 +99,13 @@ LL | fn stdcall_ptr(f: extern "stdcall" fn()) { = note: for more information, see issue #130260 error[E0570]: `"stdcall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:178:1 + --> $DIR/unsupported.rs:181:1 | LL | extern "stdcall" {} | ^^^^^^^^^^^^^^^^^^^ warning: the calling convention "C-cmse-nonsecure-call" is not supported on this target - --> $DIR/unsupported.rs:185:21 + --> $DIR/unsupported.rs:188:21 | LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -114,7 +114,7 @@ LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { = note: for more information, see issue #130260 warning: the calling convention "C-cmse-nonsecure-entry" is not supported on this target - --> $DIR/unsupported.rs:193:22 + --> $DIR/unsupported.rs:196:22 | LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -123,59 +123,65 @@ LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { = note: for more information, see issue #130260 error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:198:1 + --> $DIR/unsupported.rs:201:1 | LL | extern "C-cmse-nonsecure-entry" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:33:1 + --> $DIR/unsupported.rs:34:1 | LL | extern "ptx-kernel" fn ptx() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error[E0570]: `"aapcs"` is not a supported ABI for the current target +error[E0570]: `"gpu-kernel"` is not a supported ABI for the current target --> $DIR/unsupported.rs:43:1 | +LL | extern "gpu-kernel" fn gpu() {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0570]: `"aapcs"` is not a supported ABI for the current target + --> $DIR/unsupported.rs:46:1 + | LL | extern "aapcs" fn aapcs() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:69:1 + --> $DIR/unsupported.rs:72:1 | LL | extern "msp430-interrupt" fn msp430() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:79:1 + --> $DIR/unsupported.rs:82:1 | LL | extern "avr-interrupt" fn avr() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"x86-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:111:1 + --> $DIR/unsupported.rs:114:1 | LL | extern "x86-interrupt" fn x86() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"thiscall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:133:1 + --> $DIR/unsupported.rs:136:1 | LL | extern "thiscall" fn thiscall() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"stdcall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:159:1 + --> $DIR/unsupported.rs:162:1 | LL | extern "stdcall" fn stdcall() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:191:1 + --> $DIR/unsupported.rs:194:1 | LL | extern "C-cmse-nonsecure-entry" fn cmse_entry() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 16 previous errors; 9 warnings emitted +error: aborting due to 17 previous errors; 9 warnings emitted For more information about this error, try `rustc --explain E0570`. diff --git a/tests/ui/abi/unsupported.rs b/tests/ui/abi/unsupported.rs index a56f001ef9520..7d4142f0dee79 100644 --- a/tests/ui/abi/unsupported.rs +++ b/tests/ui/abi/unsupported.rs @@ -19,6 +19,7 @@ abi_ptx, abi_msp430_interrupt, abi_avr_interrupt, + abi_gpu_kernel, abi_x86_interrupt, abi_riscv_interrupt, abi_c_cmse_nonsecure_call, @@ -39,6 +40,8 @@ fn ptx_ptr(f: extern "ptx-kernel" fn()) { } extern "ptx-kernel" {} //~^ ERROR is not a supported ABI +extern "gpu-kernel" fn gpu() {} +//~^ ERROR is not a supported ABI extern "aapcs" fn aapcs() {} //[x64]~^ ERROR is not a supported ABI diff --git a/tests/ui/abi/unsupported.x64.stderr b/tests/ui/abi/unsupported.x64.stderr index 27a4f1f532c6c..824a33c948add 100644 --- a/tests/ui/abi/unsupported.x64.stderr +++ b/tests/ui/abi/unsupported.x64.stderr @@ -1,5 +1,5 @@ warning: the calling convention "ptx-kernel" is not supported on this target - --> $DIR/unsupported.rs:35:15 + --> $DIR/unsupported.rs:36:15 | LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -9,13 +9,13 @@ LL | fn ptx_ptr(f: extern "ptx-kernel" fn()) { = note: `#[warn(unsupported_fn_ptr_calling_conventions)]` on by default error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:40:1 + --> $DIR/unsupported.rs:41:1 | LL | extern "ptx-kernel" {} | ^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "aapcs" is not supported on this target - --> $DIR/unsupported.rs:49:17 + --> $DIR/unsupported.rs:52:17 | LL | fn aapcs_ptr(f: extern "aapcs" fn()) { | ^^^^^^^^^^^^^^^^^^^ @@ -24,13 +24,13 @@ LL | fn aapcs_ptr(f: extern "aapcs" fn()) { = note: for more information, see issue #130260 error[E0570]: `"aapcs"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:62:1 + --> $DIR/unsupported.rs:65:1 | LL | extern "aapcs" {} | ^^^^^^^^^^^^^^^^^ warning: the calling convention "msp430-interrupt" is not supported on this target - --> $DIR/unsupported.rs:71:18 + --> $DIR/unsupported.rs:74:18 | LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -39,13 +39,13 @@ LL | fn msp430_ptr(f: extern "msp430-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:76:1 + --> $DIR/unsupported.rs:79:1 | LL | extern "msp430-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "avr-interrupt" is not supported on this target - --> $DIR/unsupported.rs:81:15 + --> $DIR/unsupported.rs:84:15 | LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -54,13 +54,13 @@ LL | fn avr_ptr(f: extern "avr-interrupt" fn()) { = note: for more information, see issue #130260 error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:86:1 + --> $DIR/unsupported.rs:89:1 | LL | extern "avr-interrupt" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "riscv-interrupt-m" is not supported on this target - --> $DIR/unsupported.rs:94:17 + --> $DIR/unsupported.rs:97:17 | LL | fn riscv_ptr(f: extern "riscv-interrupt-m" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -69,13 +69,13 @@ LL | fn riscv_ptr(f: extern "riscv-interrupt-m" fn()) { = note: for more information, see issue #130260 error[E0570]: `"riscv-interrupt-m"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:105:1 + --> $DIR/unsupported.rs:108:1 | LL | extern "riscv-interrupt-m" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "thiscall" is not supported on this target - --> $DIR/unsupported.rs:139:20 + --> $DIR/unsupported.rs:142:20 | LL | fn thiscall_ptr(f: extern "thiscall" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^ @@ -84,13 +84,13 @@ LL | fn thiscall_ptr(f: extern "thiscall" fn()) { = note: for more information, see issue #130260 error[E0570]: `"thiscall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:152:1 + --> $DIR/unsupported.rs:155:1 | LL | extern "thiscall" {} | ^^^^^^^^^^^^^^^^^^^^ warning: the calling convention "stdcall" is not supported on this target - --> $DIR/unsupported.rs:165:19 + --> $DIR/unsupported.rs:168:19 | LL | fn stdcall_ptr(f: extern "stdcall" fn()) { | ^^^^^^^^^^^^^^^^^^^^^ @@ -99,13 +99,13 @@ LL | fn stdcall_ptr(f: extern "stdcall" fn()) { = note: for more information, see issue #130260 error[E0570]: `"stdcall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:178:1 + --> $DIR/unsupported.rs:181:1 | LL | extern "stdcall" {} | ^^^^^^^^^^^^^^^^^^^ warning: the calling convention "C-cmse-nonsecure-call" is not supported on this target - --> $DIR/unsupported.rs:185:21 + --> $DIR/unsupported.rs:188:21 | LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -114,7 +114,7 @@ LL | fn cmse_call_ptr(f: extern "C-cmse-nonsecure-call" fn()) { = note: for more information, see issue #130260 warning: the calling convention "C-cmse-nonsecure-entry" is not supported on this target - --> $DIR/unsupported.rs:193:22 + --> $DIR/unsupported.rs:196:22 | LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -123,59 +123,65 @@ LL | fn cmse_entry_ptr(f: extern "C-cmse-nonsecure-entry" fn()) { = note: for more information, see issue #130260 error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:198:1 + --> $DIR/unsupported.rs:201:1 | LL | extern "C-cmse-nonsecure-entry" {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"ptx-kernel"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:33:1 + --> $DIR/unsupported.rs:34:1 | LL | extern "ptx-kernel" fn ptx() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error[E0570]: `"aapcs"` is not a supported ABI for the current target +error[E0570]: `"gpu-kernel"` is not a supported ABI for the current target --> $DIR/unsupported.rs:43:1 | +LL | extern "gpu-kernel" fn gpu() {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0570]: `"aapcs"` is not a supported ABI for the current target + --> $DIR/unsupported.rs:46:1 + | LL | extern "aapcs" fn aapcs() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"msp430-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:69:1 + --> $DIR/unsupported.rs:72:1 | LL | extern "msp430-interrupt" fn msp430() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"avr-interrupt"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:79:1 + --> $DIR/unsupported.rs:82:1 | LL | extern "avr-interrupt" fn avr() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"riscv-interrupt-m"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:89:1 + --> $DIR/unsupported.rs:92:1 | LL | extern "riscv-interrupt-m" fn riscv() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"thiscall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:133:1 + --> $DIR/unsupported.rs:136:1 | LL | extern "thiscall" fn thiscall() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"stdcall"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:159:1 + --> $DIR/unsupported.rs:162:1 | LL | extern "stdcall" fn stdcall() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0570]: `"C-cmse-nonsecure-entry"` is not a supported ABI for the current target - --> $DIR/unsupported.rs:191:1 + --> $DIR/unsupported.rs:194:1 | LL | extern "C-cmse-nonsecure-entry" fn cmse_entry() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 16 previous errors; 9 warnings emitted +error: aborting due to 17 previous errors; 9 warnings emitted For more information about this error, try `rustc --explain E0570`. diff --git a/tests/ui/feature-gates/feature-gate-abi_gpu_kernel.rs b/tests/ui/feature-gates/feature-gate-abi_gpu_kernel.rs new file mode 100644 index 0000000000000..7d39820f086e9 --- /dev/null +++ b/tests/ui/feature-gates/feature-gate-abi_gpu_kernel.rs @@ -0,0 +1,45 @@ +//@ compile-flags: --crate-type=rlib + +#![feature(no_core, lang_items)] +#![no_core] + +#[lang="sized"] +trait Sized { } + +#[lang="tuple_trait"] +trait Tuple { } + +// Functions +extern "gpu-kernel" fn f1(_: ()) {} //~ ERROR gpu-kernel ABI is experimental and subject to change +//~^ ERROR is not a supported ABI + +// Methods in trait definition +trait Tr { + extern "gpu-kernel" fn m1(_: ()); //~ ERROR gpu-kernel ABI is experimental and subject to change + + extern "gpu-kernel" fn dm1(_: ()) {} //~ ERROR gpu-kernel ABI is experimental and subject to change + //~^ ERROR is not a supported ABI +} + +struct S; + +// Methods in trait impl +impl Tr for S { + extern "gpu-kernel" fn m1(_: ()) {} //~ ERROR gpu-kernel ABI is experimental and subject to change + //~^ ERROR is not a supported ABI +} + +// Methods in inherent impl +impl S { + extern "gpu-kernel" fn im1(_: ()) {} //~ ERROR gpu-kernel ABI is experimental and subject to change + //~^ ERROR is not a supported ABI +} + +// Function pointer types +type A1 = extern "gpu-kernel" fn(_: ()); //~ ERROR gpu-kernel ABI is experimental and subject to change +//~^ WARN the calling convention "gpu-kernel" is not supported on this target +//~^^ WARN this was previously accepted by the compiler but is being phased out + +// Foreign modules +extern "gpu-kernel" {} //~ ERROR gpu-kernel ABI is experimental and subject to change +//~^ ERROR is not a supported ABI diff --git a/tests/ui/feature-gates/feature-gate-abi_gpu_kernel.stderr b/tests/ui/feature-gates/feature-gate-abi_gpu_kernel.stderr new file mode 100644 index 0000000000000..771c49acb97a5 --- /dev/null +++ b/tests/ui/feature-gates/feature-gate-abi_gpu_kernel.stderr @@ -0,0 +1,114 @@ +error[E0658]: gpu-kernel ABI is experimental and subject to change + --> $DIR/feature-gate-abi_gpu_kernel.rs:13:8 + | +LL | extern "gpu-kernel" fn f1(_: ()) {} + | ^^^^^^^^^^^^ + | + = note: see issue #135467 for more information + = help: add `#![feature(abi_gpu_kernel)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: gpu-kernel ABI is experimental and subject to change + --> $DIR/feature-gate-abi_gpu_kernel.rs:18:12 + | +LL | extern "gpu-kernel" fn m1(_: ()); + | ^^^^^^^^^^^^ + | + = note: see issue #135467 for more information + = help: add `#![feature(abi_gpu_kernel)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: gpu-kernel ABI is experimental and subject to change + --> $DIR/feature-gate-abi_gpu_kernel.rs:20:12 + | +LL | extern "gpu-kernel" fn dm1(_: ()) {} + | ^^^^^^^^^^^^ + | + = note: see issue #135467 for more information + = help: add `#![feature(abi_gpu_kernel)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: gpu-kernel ABI is experimental and subject to change + --> $DIR/feature-gate-abi_gpu_kernel.rs:28:12 + | +LL | extern "gpu-kernel" fn m1(_: ()) {} + | ^^^^^^^^^^^^ + | + = note: see issue #135467 for more information + = help: add `#![feature(abi_gpu_kernel)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: gpu-kernel ABI is experimental and subject to change + --> $DIR/feature-gate-abi_gpu_kernel.rs:34:12 + | +LL | extern "gpu-kernel" fn im1(_: ()) {} + | ^^^^^^^^^^^^ + | + = note: see issue #135467 for more information + = help: add `#![feature(abi_gpu_kernel)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: gpu-kernel ABI is experimental and subject to change + --> $DIR/feature-gate-abi_gpu_kernel.rs:39:18 + | +LL | type A1 = extern "gpu-kernel" fn(_: ()); + | ^^^^^^^^^^^^ + | + = note: see issue #135467 for more information + = help: add `#![feature(abi_gpu_kernel)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: gpu-kernel ABI is experimental and subject to change + --> $DIR/feature-gate-abi_gpu_kernel.rs:44:8 + | +LL | extern "gpu-kernel" {} + | ^^^^^^^^^^^^ + | + = note: see issue #135467 for more information + = help: add `#![feature(abi_gpu_kernel)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +warning: the calling convention "gpu-kernel" is not supported on this target + --> $DIR/feature-gate-abi_gpu_kernel.rs:39:11 + | +LL | type A1 = extern "gpu-kernel" fn(_: ()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + = note: for more information, see issue #130260 + = note: `#[warn(unsupported_fn_ptr_calling_conventions)]` on by default + +error[E0570]: `"gpu-kernel"` is not a supported ABI for the current target + --> $DIR/feature-gate-abi_gpu_kernel.rs:44:1 + | +LL | extern "gpu-kernel" {} + | ^^^^^^^^^^^^^^^^^^^^^^ + +error[E0570]: `"gpu-kernel"` is not a supported ABI for the current target + --> $DIR/feature-gate-abi_gpu_kernel.rs:13:1 + | +LL | extern "gpu-kernel" fn f1(_: ()) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0570]: `"gpu-kernel"` is not a supported ABI for the current target + --> $DIR/feature-gate-abi_gpu_kernel.rs:20:5 + | +LL | extern "gpu-kernel" fn dm1(_: ()) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0570]: `"gpu-kernel"` is not a supported ABI for the current target + --> $DIR/feature-gate-abi_gpu_kernel.rs:28:5 + | +LL | extern "gpu-kernel" fn m1(_: ()) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0570]: `"gpu-kernel"` is not a supported ABI for the current target + --> $DIR/feature-gate-abi_gpu_kernel.rs:34:5 + | +LL | extern "gpu-kernel" fn im1(_: ()) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 12 previous errors; 1 warning emitted + +Some errors have detailed explanations: E0570, E0658. +For more information about an error, try `rustc --explain E0570`. diff --git a/tests/ui/print-calling-conventions.stdout b/tests/ui/print-calling-conventions.stdout index 4415b3c858e35..539b2d5dee405 100644 --- a/tests/ui/print-calling-conventions.stdout +++ b/tests/ui/print-calling-conventions.stdout @@ -12,6 +12,7 @@ cdecl-unwind efiapi fastcall fastcall-unwind +gpu-kernel msp430-interrupt ptx-kernel riscv-interrupt-m From f78a1bd89ade0d326a47222e51c61bd631530416 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Wed, 15 Jan 2025 22:09:52 +0000 Subject: [PATCH 097/142] Detect if-else chains with a missing final else in type errors ``` error[E0308]: `if` and `else` have incompatible types --> $DIR/if-else-chain-missing-else.rs:12:12 | LL | let x = if let Ok(x) = res { | ______________- LL | | x | | - expected because of this LL | | } else if let Err(e) = res { | | ____________^ LL | || return Err(e); LL | || }; | || ^ | ||_____| | |_____`if` and `else` have incompatible types | expected `i32`, found `()` | = note: `if` expressions without `else` evaluate to `()` = note: consider adding an `else` block that evaluates to the expected type ``` We probably want a longer explanation and fewer spans on this case. Partially address #133316. --- .../src/error_reporting/infer/mod.rs | 8 +++++++ .../ui/expr/if/if-else-chain-missing-else.rs | 20 +++++++++++++++++ .../expr/if/if-else-chain-missing-else.stderr | 22 +++++++++++++++++++ 3 files changed, 50 insertions(+) create mode 100644 tests/ui/expr/if/if-else-chain-missing-else.rs create mode 100644 tests/ui/expr/if/if-else-chain-missing-else.stderr diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs index 7032f7b9d318e..9778299eb191f 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs @@ -620,6 +620,14 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { }) => { let then_span = self.find_block_span_from_hir_id(then_id); let else_span = self.find_block_span_from_hir_id(else_id); + if let hir::Node::Expr(e) = self.tcx.hir_node(else_id) + && let hir::ExprKind::If(_cond, _then, None) = e.kind + && else_ty.is_unit() + { + // Account for `let x = if a { 1 } else if b { 2 };` + err.note("`if` expressions without `else` evaluate to `()`"); + err.note("consider adding an `else` block that evaluates to the expected type"); + } err.span_label(then_span, "expected because of this"); if let Some(sp) = outer_span { err.span_label(sp, "`if` and `else` have incompatible types"); diff --git a/tests/ui/expr/if/if-else-chain-missing-else.rs b/tests/ui/expr/if/if-else-chain-missing-else.rs new file mode 100644 index 0000000000000..995aac07f2f76 --- /dev/null +++ b/tests/ui/expr/if/if-else-chain-missing-else.rs @@ -0,0 +1,20 @@ +enum Cause { Cause1, Cause2 } +struct MyErr { x: Cause } + +fn main() { + _ = f(); +} + +fn f() -> Result { + let res = could_fail(); + let x = if let Ok(x) = res { + x + } else if let Err(e) = res { //~ ERROR `if` and `else` + return Err(e); + }; + Ok(x) +} + +fn could_fail() -> Result { + Ok(0) +} diff --git a/tests/ui/expr/if/if-else-chain-missing-else.stderr b/tests/ui/expr/if/if-else-chain-missing-else.stderr new file mode 100644 index 0000000000000..374c4927e3003 --- /dev/null +++ b/tests/ui/expr/if/if-else-chain-missing-else.stderr @@ -0,0 +1,22 @@ +error[E0308]: `if` and `else` have incompatible types + --> $DIR/if-else-chain-missing-else.rs:12:12 + | +LL | let x = if let Ok(x) = res { + | ______________- +LL | | x + | | - expected because of this +LL | | } else if let Err(e) = res { + | | ____________^ +LL | || return Err(e); +LL | || }; + | || ^ + | ||_____| + | |_____`if` and `else` have incompatible types + | expected `i32`, found `()` + | + = note: `if` expressions without `else` evaluate to `()` + = note: consider adding an `else` block that evaluates to the expected type + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0308`. From 2238b00dac093dce9614ead293cbc3fe66577787 Mon Sep 17 00:00:00 2001 From: Zalathar Date: Thu, 16 Jan 2025 12:53:46 +1100 Subject: [PATCH 098/142] Update docs for `-Clink-dead-code` to discourage its use --- compiler/rustc_session/src/config.rs | 7 ------- compiler/rustc_session/src/options.rs | 2 +- src/doc/rustc/src/codegen-options/index.md | 10 +++++----- 3 files changed, 6 insertions(+), 13 deletions(-) diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index 5192ad61af2b1..21afb7df7cb3a 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -132,13 +132,6 @@ pub enum LtoCli { } /// The different settings that the `-C instrument-coverage` flag can have. -/// -/// Coverage instrumentation now supports combining `-C instrument-coverage` -/// with compiler and linker optimization (enabled with `-O` or `-C opt-level=1` -/// and higher). Nevertheless, there are many variables, depending on options -/// selected, code structure, and enabled attributes. If errors are encountered, -/// either while compiling or when generating `llvm-cov show` reports, consider -/// lowering the optimization level, or including/excluding `-C link-dead-code`. #[derive(Clone, Copy, PartialEq, Hash, Debug)] pub enum InstrumentCoverage { /// `-C instrument-coverage=no` (or `off`, `false` etc.) diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 3af6df6301774..63aaa3abc8e56 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -1638,7 +1638,7 @@ options! { "extra arguments to append to the linker invocation (space separated)"), #[rustc_lint_opt_deny_field_access("use `Session::link_dead_code` instead of this field")] link_dead_code: Option = (None, parse_opt_bool, [TRACKED], - "keep dead code at link time (useful for code coverage) (default: no)"), + "try to generate and link dead code (default: no)"), link_self_contained: LinkSelfContained = (LinkSelfContained::default(), parse_link_self_contained, [UNTRACKED], "control whether to link Rust provided C objects/libraries or rely \ on a C toolchain or linker installed in the system"), diff --git a/src/doc/rustc/src/codegen-options/index.md b/src/doc/rustc/src/codegen-options/index.md index e987d06b0f31f..f45217c69ff0f 100644 --- a/src/doc/rustc/src/codegen-options/index.md +++ b/src/doc/rustc/src/codegen-options/index.md @@ -207,14 +207,14 @@ options should be separated by spaces. ## link-dead-code -This flag controls whether the linker will keep dead code. It takes one of -the following values: +Tries to generate and link dead code that would otherwise not be generated or +linked. It takes one of the following values: -* `y`, `yes`, `on`, `true` or no value: keep dead code. +* `y`, `yes`, `on`, `true` or no value: try to keep dead code. * `n`, `no`, `off` or `false`: remove dead code (the default). -An example of when this flag might be useful is when trying to construct code coverage -metrics. +This flag was historically used to help improve some older forms of code +coverage measurement. Its use is not recommended. ## link-self-contained From c18718c9c28cc4c700312775c2b7f87cd79d3063 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Fri, 10 Jan 2025 13:00:45 -0800 Subject: [PATCH 099/142] Less unsafe in `dangling`/`without_provenance` --- library/core/src/ptr/alignment.rs | 12 ++- library/core/src/ptr/mod.rs | 11 +-- library/core/src/ptr/non_null.rs | 16 ++-- ...n.DataflowConstProp.32bit.panic-abort.diff | 63 +++++---------- ....DataflowConstProp.32bit.panic-unwind.diff | 71 ++++++---------- ...n.DataflowConstProp.64bit.panic-abort.diff | 63 +++++---------- ....DataflowConstProp.64bit.panic-unwind.diff | 71 ++++++---------- ...oxed_slice.main.GVN.32bit.panic-abort.diff | 72 +++++------------ ...xed_slice.main.GVN.32bit.panic-unwind.diff | 80 ++++++------------- ...oxed_slice.main.GVN.64bit.panic-abort.diff | 72 +++++------------ ...xed_slice.main.GVN.64bit.panic-unwind.diff | 80 ++++++------------- .../gvn_ptr_eq_with_constant.main.GVN.diff | 30 +++++-- ...ated_loop.PreCodegen.after.panic-abort.mir | 32 ++++---- ...ted_loop.PreCodegen.after.panic-unwind.mir | 12 +-- ...ward_loop.PreCodegen.after.panic-abort.mir | 8 +- ...ard_loop.PreCodegen.after.panic-unwind.mir | 8 +- ...erse_loop.PreCodegen.after.panic-abort.mir | 14 ++-- ...rse_loop.PreCodegen.after.panic-unwind.mir | 14 ++-- 18 files changed, 272 insertions(+), 457 deletions(-) diff --git a/library/core/src/ptr/alignment.rs b/library/core/src/ptr/alignment.rs index 74a1d40f4e734..2da94e72566e9 100644 --- a/library/core/src/ptr/alignment.rs +++ b/library/core/src/ptr/alignment.rs @@ -42,9 +42,10 @@ impl Alignment { /// but in an `Alignment` instead of a `usize`. #[unstable(feature = "ptr_alignment_type", issue = "102070")] #[inline] + #[must_use] pub const fn of() -> Self { - // SAFETY: rustc ensures that type alignment is always a power of two. - unsafe { Alignment::new_unchecked(mem::align_of::()) } + // This can't actually panic since type alignment is always a power of two. + const { Alignment::new(mem::align_of::()).unwrap() } } /// Creates an `Alignment` from a `usize`, or returns `None` if it's @@ -95,8 +96,13 @@ impl Alignment { #[unstable(feature = "ptr_alignment_type", issue = "102070")] #[inline] pub const fn as_nonzero(self) -> NonZero { + // This transmutes directly to avoid the UbCheck in `NonZero::new_unchecked` + // since there's no way for the user to trip that check anyway -- the + // validity invariant of the type would have to have been broken earlier -- + // and emitting it in an otherwise simple method is bad for compile time. + // SAFETY: All the discriminants are non-zero. - unsafe { NonZero::new_unchecked(self.as_usize()) } + unsafe { mem::transmute::>(self) } } /// Returns the base-2 logarithm of the alignment. diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index f58c0e1241142..e1348552b65c3 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -596,12 +596,7 @@ pub const fn null_mut() -> *mut T { #[stable(feature = "strict_provenance", since = "1.84.0")] #[rustc_const_stable(feature = "strict_provenance", since = "1.84.0")] pub const fn without_provenance(addr: usize) -> *const T { - // An int-to-pointer transmute currently has exactly the intended semantics: it creates a - // pointer without provenance. Note that this is *not* a stable guarantee about transmute - // semantics, it relies on sysroot crates having special status. - // SAFETY: every valid integer is also a valid pointer (as long as you don't dereference that - // pointer). - unsafe { mem::transmute(addr) } + without_provenance_mut(addr) } /// Creates a new pointer that is dangling, but non-null and well-aligned. @@ -618,7 +613,7 @@ pub const fn without_provenance(addr: usize) -> *const T { #[stable(feature = "strict_provenance", since = "1.84.0")] #[rustc_const_stable(feature = "strict_provenance", since = "1.84.0")] pub const fn dangling() -> *const T { - without_provenance(mem::align_of::()) + dangling_mut() } /// Creates a pointer with the given address and no [provenance][crate::ptr#provenance]. @@ -661,7 +656,7 @@ pub const fn without_provenance_mut(addr: usize) -> *mut T { #[stable(feature = "strict_provenance", since = "1.84.0")] #[rustc_const_stable(feature = "strict_provenance", since = "1.84.0")] pub const fn dangling_mut() -> *mut T { - without_provenance_mut(mem::align_of::()) + NonNull::dangling().as_ptr() } /// Converts an address back to a pointer, picking up some previously 'exposed' diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index 2c9131254f7c4..d93069d384edd 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -91,12 +91,12 @@ impl NonNull { /// /// This is a [Strict Provenance][crate::ptr#strict-provenance] API. #[unstable(feature = "nonnull_provenance", issue = "135243")] + #[must_use] + #[inline] pub const fn without_provenance(addr: NonZero) -> Self { + let pointer = crate::ptr::without_provenance(addr.get()); // SAFETY: we know `addr` is non-zero. - unsafe { - let ptr = crate::ptr::without_provenance_mut(addr.get()); - NonNull::new_unchecked(ptr) - } + unsafe { NonNull { pointer } } } /// Creates a new `NonNull` that is dangling, but well-aligned. @@ -123,11 +123,8 @@ impl NonNull { #[must_use] #[inline] pub const fn dangling() -> Self { - // SAFETY: ptr::dangling_mut() returns a non-null well-aligned pointer. - unsafe { - let ptr = crate::ptr::dangling_mut::(); - NonNull::new_unchecked(ptr) - } + let align = crate::ptr::Alignment::of::(); + NonNull::without_provenance(align.as_nonzero()) } /// Converts an address back to a mutable pointer, picking up some previously 'exposed' @@ -137,6 +134,7 @@ impl NonNull { /// /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API. #[unstable(feature = "nonnull_provenance", issue = "135243")] + #[inline] pub fn with_exposed_provenance(addr: NonZero) -> Self { // SAFETY: we know `addr` is non-zero. unsafe { diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff index 5ea9902b26251..5a830254f6199 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff @@ -16,25 +16,23 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let _6: *mut [bool; 0]; + let mut _6: std::num::NonZero; scope 6 { - scope 10 (inlined NonNull::<[bool; 0]>::new_unchecked) { - let mut _8: bool; - let _9: (); - let mut _10: *mut (); - let mut _11: *const [bool; 0]; - scope 11 (inlined core::ub_checks::check_language_ub) { - scope 12 (inlined core::ub_checks::check_language_ub::runtime) { + scope 8 (inlined std::ptr::Alignment::as_nonzero) { + } + scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 10 { + } + scope 11 (inlined NonZero::::get) { + } + scope 12 (inlined without_provenance::<[bool; 0]>) { + scope 13 (inlined without_provenance_mut::<[bool; 0]>) { } } } } - scope 7 (inlined dangling_mut::<[bool; 0]>) { - let mut _7: usize; - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined without_provenance_mut::<[bool; 0]>) { - } + scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { } } } @@ -44,40 +42,13 @@ StorageLive(_1); StorageLive(_2); StorageLive(_3); - StorageLive(_9); StorageLive(_4); StorageLive(_5); StorageLive(_6); + _6 = const NonZero::(core::num::niche_types::NonZeroUsizeInner(1_usize)); StorageLive(_7); - _7 = const 1_usize; - _6 = const {0x1 as *mut [bool; 0]}; - StorageLive(_11); - StorageLive(_8); - _8 = UbChecks(); - switchInt(move _8) -> [0: bb4, otherwise: bb2]; - } - - bb1: { - StorageDead(_1); - return; - } - - bb2: { - StorageLive(_10); - _10 = const {0x1 as *mut ()}; - _9 = NonNull::::new_unchecked::precondition_check(const {0x1 as *mut ()}) -> [return: bb3, unwind unreachable]; - } - - bb3: { - StorageDead(_10); - goto -> bb4; - } - - bb4: { - StorageDead(_8); - _11 = const {0x1 as *const [bool; 0]}; + _7 = const {0x1 as *const [bool; 0]}; _5 = const NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}; - StorageDead(_11); StorageDead(_7); StorageDead(_6); _4 = const Unique::<[bool; 0]> {{ pointer: NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}, _marker: PhantomData::<[bool; 0]> }}; @@ -85,13 +56,17 @@ _3 = const Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC0, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}; StorageDead(_4); _2 = const Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC1, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global); - StorageDead(_9); StorageDead(_3); _1 = const A {{ foo: Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC2, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global) }}; StorageDead(_2); _0 = const (); drop(_1) -> [return: bb1, unwind unreachable]; } + + bb1: { + StorageDead(_1); + return; + } } ALLOC2 (size: 8, align: 4) { .. } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff index cc5a41a7f63b1..c11368a347c57 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff @@ -16,25 +16,23 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let _6: *mut [bool; 0]; + let mut _6: std::num::NonZero; scope 6 { - scope 10 (inlined NonNull::<[bool; 0]>::new_unchecked) { - let mut _8: bool; - let _9: (); - let mut _10: *mut (); - let mut _11: *const [bool; 0]; - scope 11 (inlined core::ub_checks::check_language_ub) { - scope 12 (inlined core::ub_checks::check_language_ub::runtime) { + scope 8 (inlined std::ptr::Alignment::as_nonzero) { + } + scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 10 { + } + scope 11 (inlined NonZero::::get) { + } + scope 12 (inlined without_provenance::<[bool; 0]>) { + scope 13 (inlined without_provenance_mut::<[bool; 0]>) { } } } } - scope 7 (inlined dangling_mut::<[bool; 0]>) { - let mut _7: usize; - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined without_provenance_mut::<[bool; 0]>) { - } + scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { } } } @@ -44,44 +42,13 @@ StorageLive(_1); StorageLive(_2); StorageLive(_3); - StorageLive(_9); StorageLive(_4); StorageLive(_5); StorageLive(_6); + _6 = const NonZero::(core::num::niche_types::NonZeroUsizeInner(1_usize)); StorageLive(_7); - _7 = const 1_usize; - _6 = const {0x1 as *mut [bool; 0]}; - StorageLive(_11); - StorageLive(_8); - _8 = UbChecks(); - switchInt(move _8) -> [0: bb5, otherwise: bb3]; - } - - bb1: { - StorageDead(_1); - return; - } - - bb2 (cleanup): { - resume; - } - - bb3: { - StorageLive(_10); - _10 = const {0x1 as *mut ()}; - _9 = NonNull::::new_unchecked::precondition_check(const {0x1 as *mut ()}) -> [return: bb4, unwind unreachable]; - } - - bb4: { - StorageDead(_10); - goto -> bb5; - } - - bb5: { - StorageDead(_8); - _11 = const {0x1 as *const [bool; 0]}; + _7 = const {0x1 as *const [bool; 0]}; _5 = const NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}; - StorageDead(_11); StorageDead(_7); StorageDead(_6); _4 = const Unique::<[bool; 0]> {{ pointer: NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}, _marker: PhantomData::<[bool; 0]> }}; @@ -89,13 +56,21 @@ _3 = const Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC0, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}; StorageDead(_4); _2 = const Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC1, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global); - StorageDead(_9); StorageDead(_3); _1 = const A {{ foo: Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC2, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global) }}; StorageDead(_2); _0 = const (); drop(_1) -> [return: bb1, unwind: bb2]; } + + bb1: { + StorageDead(_1); + return; + } + + bb2 (cleanup): { + resume; + } } ALLOC2 (size: 8, align: 4) { .. } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff index 3d398fbea7935..037ed02ce6556 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff @@ -16,25 +16,23 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let _6: *mut [bool; 0]; + let mut _6: std::num::NonZero; scope 6 { - scope 10 (inlined NonNull::<[bool; 0]>::new_unchecked) { - let mut _8: bool; - let _9: (); - let mut _10: *mut (); - let mut _11: *const [bool; 0]; - scope 11 (inlined core::ub_checks::check_language_ub) { - scope 12 (inlined core::ub_checks::check_language_ub::runtime) { + scope 8 (inlined std::ptr::Alignment::as_nonzero) { + } + scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 10 { + } + scope 11 (inlined NonZero::::get) { + } + scope 12 (inlined without_provenance::<[bool; 0]>) { + scope 13 (inlined without_provenance_mut::<[bool; 0]>) { } } } } - scope 7 (inlined dangling_mut::<[bool; 0]>) { - let mut _7: usize; - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined without_provenance_mut::<[bool; 0]>) { - } + scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { } } } @@ -44,40 +42,13 @@ StorageLive(_1); StorageLive(_2); StorageLive(_3); - StorageLive(_9); StorageLive(_4); StorageLive(_5); StorageLive(_6); + _6 = const NonZero::(core::num::niche_types::NonZeroUsizeInner(1_usize)); StorageLive(_7); - _7 = const 1_usize; - _6 = const {0x1 as *mut [bool; 0]}; - StorageLive(_11); - StorageLive(_8); - _8 = UbChecks(); - switchInt(move _8) -> [0: bb4, otherwise: bb2]; - } - - bb1: { - StorageDead(_1); - return; - } - - bb2: { - StorageLive(_10); - _10 = const {0x1 as *mut ()}; - _9 = NonNull::::new_unchecked::precondition_check(const {0x1 as *mut ()}) -> [return: bb3, unwind unreachable]; - } - - bb3: { - StorageDead(_10); - goto -> bb4; - } - - bb4: { - StorageDead(_8); - _11 = const {0x1 as *const [bool; 0]}; + _7 = const {0x1 as *const [bool; 0]}; _5 = const NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}; - StorageDead(_11); StorageDead(_7); StorageDead(_6); _4 = const Unique::<[bool; 0]> {{ pointer: NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}, _marker: PhantomData::<[bool; 0]> }}; @@ -85,13 +56,17 @@ _3 = const Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC0, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}; StorageDead(_4); _2 = const Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC1, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global); - StorageDead(_9); StorageDead(_3); _1 = const A {{ foo: Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC2, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global) }}; StorageDead(_2); _0 = const (); drop(_1) -> [return: bb1, unwind unreachable]; } + + bb1: { + StorageDead(_1); + return; + } } ALLOC2 (size: 16, align: 8) { .. } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff index dc99c3f7a8c9b..86351c7875933 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff @@ -16,25 +16,23 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let _6: *mut [bool; 0]; + let mut _6: std::num::NonZero; scope 6 { - scope 10 (inlined NonNull::<[bool; 0]>::new_unchecked) { - let mut _8: bool; - let _9: (); - let mut _10: *mut (); - let mut _11: *const [bool; 0]; - scope 11 (inlined core::ub_checks::check_language_ub) { - scope 12 (inlined core::ub_checks::check_language_ub::runtime) { + scope 8 (inlined std::ptr::Alignment::as_nonzero) { + } + scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 10 { + } + scope 11 (inlined NonZero::::get) { + } + scope 12 (inlined without_provenance::<[bool; 0]>) { + scope 13 (inlined without_provenance_mut::<[bool; 0]>) { } } } } - scope 7 (inlined dangling_mut::<[bool; 0]>) { - let mut _7: usize; - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined without_provenance_mut::<[bool; 0]>) { - } + scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { } } } @@ -44,44 +42,13 @@ StorageLive(_1); StorageLive(_2); StorageLive(_3); - StorageLive(_9); StorageLive(_4); StorageLive(_5); StorageLive(_6); + _6 = const NonZero::(core::num::niche_types::NonZeroUsizeInner(1_usize)); StorageLive(_7); - _7 = const 1_usize; - _6 = const {0x1 as *mut [bool; 0]}; - StorageLive(_11); - StorageLive(_8); - _8 = UbChecks(); - switchInt(move _8) -> [0: bb5, otherwise: bb3]; - } - - bb1: { - StorageDead(_1); - return; - } - - bb2 (cleanup): { - resume; - } - - bb3: { - StorageLive(_10); - _10 = const {0x1 as *mut ()}; - _9 = NonNull::::new_unchecked::precondition_check(const {0x1 as *mut ()}) -> [return: bb4, unwind unreachable]; - } - - bb4: { - StorageDead(_10); - goto -> bb5; - } - - bb5: { - StorageDead(_8); - _11 = const {0x1 as *const [bool; 0]}; + _7 = const {0x1 as *const [bool; 0]}; _5 = const NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}; - StorageDead(_11); StorageDead(_7); StorageDead(_6); _4 = const Unique::<[bool; 0]> {{ pointer: NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}, _marker: PhantomData::<[bool; 0]> }}; @@ -89,13 +56,21 @@ _3 = const Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC0, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}; StorageDead(_4); _2 = const Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC1, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global); - StorageDead(_9); StorageDead(_3); _1 = const A {{ foo: Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC2, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global) }}; StorageDead(_2); _0 = const (); drop(_1) -> [return: bb1, unwind: bb2]; } + + bb1: { + StorageDead(_1); + return; + } + + bb2 (cleanup): { + resume; + } } ALLOC2 (size: 16, align: 8) { .. } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff index 6a3ec54306994..20a3897a934f0 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff @@ -16,25 +16,23 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let _6: *mut [bool; 0]; + let mut _6: std::num::NonZero; scope 6 { - scope 10 (inlined NonNull::<[bool; 0]>::new_unchecked) { - let mut _8: bool; - let _9: (); - let mut _10: *mut (); - let mut _11: *const [bool; 0]; - scope 11 (inlined core::ub_checks::check_language_ub) { - scope 12 (inlined core::ub_checks::check_language_ub::runtime) { + scope 8 (inlined std::ptr::Alignment::as_nonzero) { + } + scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 10 { + } + scope 11 (inlined NonZero::::get) { + } + scope 12 (inlined without_provenance::<[bool; 0]>) { + scope 13 (inlined without_provenance_mut::<[bool; 0]>) { } } } } - scope 7 (inlined dangling_mut::<[bool; 0]>) { - let mut _7: usize; - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined without_provenance_mut::<[bool; 0]>) { - } + scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { } } } @@ -44,46 +42,16 @@ StorageLive(_1); StorageLive(_2); StorageLive(_3); - StorageLive(_9); StorageLive(_4); StorageLive(_5); StorageLive(_6); +- _6 = const std::ptr::Alignment::of::<[bool; 0]>::{constant#0} as std::num::NonZero (Transmute); ++ _6 = const NonZero::(core::num::niche_types::NonZeroUsizeInner(1_usize)); StorageLive(_7); -- _7 = AlignOf([bool; 0]); -- _6 = copy _7 as *mut [bool; 0] (Transmute); -+ _7 = const 1_usize; -+ _6 = const {0x1 as *mut [bool; 0]}; - StorageLive(_11); - StorageLive(_8); - _8 = UbChecks(); - switchInt(move _8) -> [0: bb4, otherwise: bb2]; - } - - bb1: { - StorageDead(_1); - return; - } - - bb2: { - StorageLive(_10); -- _10 = copy _7 as *mut () (Transmute); -- _9 = NonNull::::new_unchecked::precondition_check(move _10) -> [return: bb3, unwind unreachable]; -+ _10 = const {0x1 as *mut ()}; -+ _9 = NonNull::::new_unchecked::precondition_check(const {0x1 as *mut ()}) -> [return: bb3, unwind unreachable]; - } - - bb3: { - StorageDead(_10); - goto -> bb4; - } - - bb4: { - StorageDead(_8); -- _11 = copy _7 as *const [bool; 0] (Transmute); -- _5 = NonNull::<[bool; 0]> { pointer: copy _11 }; -+ _11 = const {0x1 as *const [bool; 0]}; +- _7 = copy _6 as *const [bool; 0] (Transmute); +- _5 = NonNull::<[bool; 0]> { pointer: copy _7 }; ++ _7 = const {0x1 as *const [bool; 0]}; + _5 = const NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}; - StorageDead(_11); StorageDead(_7); StorageDead(_6); - _4 = Unique::<[bool; 0]> { pointer: move _5, _marker: const PhantomData::<[bool; 0]> }; @@ -94,7 +62,6 @@ StorageDead(_4); - _2 = Box::<[bool]>(copy _3, const std::alloc::Global); + _2 = const Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC1, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global); - StorageDead(_9); StorageDead(_3); - _1 = A { foo: move _2 }; + _1 = const A {{ foo: Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC2, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global) }}; @@ -102,6 +69,11 @@ _0 = const (); drop(_1) -> [return: bb1, unwind unreachable]; } + + bb1: { + StorageDead(_1); + return; + } } + + ALLOC2 (size: 8, align: 4) { .. } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff index 9471ad47cd9df..2e396301fd0ec 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff @@ -16,25 +16,23 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let _6: *mut [bool; 0]; + let mut _6: std::num::NonZero; scope 6 { - scope 10 (inlined NonNull::<[bool; 0]>::new_unchecked) { - let mut _8: bool; - let _9: (); - let mut _10: *mut (); - let mut _11: *const [bool; 0]; - scope 11 (inlined core::ub_checks::check_language_ub) { - scope 12 (inlined core::ub_checks::check_language_ub::runtime) { + scope 8 (inlined std::ptr::Alignment::as_nonzero) { + } + scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 10 { + } + scope 11 (inlined NonZero::::get) { + } + scope 12 (inlined without_provenance::<[bool; 0]>) { + scope 13 (inlined without_provenance_mut::<[bool; 0]>) { } } } } - scope 7 (inlined dangling_mut::<[bool; 0]>) { - let mut _7: usize; - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined without_provenance_mut::<[bool; 0]>) { - } + scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { } } } @@ -44,50 +42,16 @@ StorageLive(_1); StorageLive(_2); StorageLive(_3); - StorageLive(_9); StorageLive(_4); StorageLive(_5); StorageLive(_6); +- _6 = const std::ptr::Alignment::of::<[bool; 0]>::{constant#0} as std::num::NonZero (Transmute); ++ _6 = const NonZero::(core::num::niche_types::NonZeroUsizeInner(1_usize)); StorageLive(_7); -- _7 = AlignOf([bool; 0]); -- _6 = copy _7 as *mut [bool; 0] (Transmute); -+ _7 = const 1_usize; -+ _6 = const {0x1 as *mut [bool; 0]}; - StorageLive(_11); - StorageLive(_8); - _8 = UbChecks(); - switchInt(move _8) -> [0: bb5, otherwise: bb3]; - } - - bb1: { - StorageDead(_1); - return; - } - - bb2 (cleanup): { - resume; - } - - bb3: { - StorageLive(_10); -- _10 = copy _7 as *mut () (Transmute); -- _9 = NonNull::::new_unchecked::precondition_check(move _10) -> [return: bb4, unwind unreachable]; -+ _10 = const {0x1 as *mut ()}; -+ _9 = NonNull::::new_unchecked::precondition_check(const {0x1 as *mut ()}) -> [return: bb4, unwind unreachable]; - } - - bb4: { - StorageDead(_10); - goto -> bb5; - } - - bb5: { - StorageDead(_8); -- _11 = copy _7 as *const [bool; 0] (Transmute); -- _5 = NonNull::<[bool; 0]> { pointer: copy _11 }; -+ _11 = const {0x1 as *const [bool; 0]}; +- _7 = copy _6 as *const [bool; 0] (Transmute); +- _5 = NonNull::<[bool; 0]> { pointer: copy _7 }; ++ _7 = const {0x1 as *const [bool; 0]}; + _5 = const NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}; - StorageDead(_11); StorageDead(_7); StorageDead(_6); - _4 = Unique::<[bool; 0]> { pointer: move _5, _marker: const PhantomData::<[bool; 0]> }; @@ -98,7 +62,6 @@ StorageDead(_4); - _2 = Box::<[bool]>(copy _3, const std::alloc::Global); + _2 = const Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC1, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global); - StorageDead(_9); StorageDead(_3); - _1 = A { foo: move _2 }; + _1 = const A {{ foo: Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC2, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global) }}; @@ -106,6 +69,15 @@ _0 = const (); drop(_1) -> [return: bb1, unwind: bb2]; } + + bb1: { + StorageDead(_1); + return; + } + + bb2 (cleanup): { + resume; + } } + + ALLOC2 (size: 8, align: 4) { .. } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff index 187927b8ecab7..319691174cf66 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff @@ -16,25 +16,23 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let _6: *mut [bool; 0]; + let mut _6: std::num::NonZero; scope 6 { - scope 10 (inlined NonNull::<[bool; 0]>::new_unchecked) { - let mut _8: bool; - let _9: (); - let mut _10: *mut (); - let mut _11: *const [bool; 0]; - scope 11 (inlined core::ub_checks::check_language_ub) { - scope 12 (inlined core::ub_checks::check_language_ub::runtime) { + scope 8 (inlined std::ptr::Alignment::as_nonzero) { + } + scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 10 { + } + scope 11 (inlined NonZero::::get) { + } + scope 12 (inlined without_provenance::<[bool; 0]>) { + scope 13 (inlined without_provenance_mut::<[bool; 0]>) { } } } } - scope 7 (inlined dangling_mut::<[bool; 0]>) { - let mut _7: usize; - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined without_provenance_mut::<[bool; 0]>) { - } + scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { } } } @@ -44,46 +42,16 @@ StorageLive(_1); StorageLive(_2); StorageLive(_3); - StorageLive(_9); StorageLive(_4); StorageLive(_5); StorageLive(_6); +- _6 = const std::ptr::Alignment::of::<[bool; 0]>::{constant#0} as std::num::NonZero (Transmute); ++ _6 = const NonZero::(core::num::niche_types::NonZeroUsizeInner(1_usize)); StorageLive(_7); -- _7 = AlignOf([bool; 0]); -- _6 = copy _7 as *mut [bool; 0] (Transmute); -+ _7 = const 1_usize; -+ _6 = const {0x1 as *mut [bool; 0]}; - StorageLive(_11); - StorageLive(_8); - _8 = UbChecks(); - switchInt(move _8) -> [0: bb4, otherwise: bb2]; - } - - bb1: { - StorageDead(_1); - return; - } - - bb2: { - StorageLive(_10); -- _10 = copy _7 as *mut () (Transmute); -- _9 = NonNull::::new_unchecked::precondition_check(move _10) -> [return: bb3, unwind unreachable]; -+ _10 = const {0x1 as *mut ()}; -+ _9 = NonNull::::new_unchecked::precondition_check(const {0x1 as *mut ()}) -> [return: bb3, unwind unreachable]; - } - - bb3: { - StorageDead(_10); - goto -> bb4; - } - - bb4: { - StorageDead(_8); -- _11 = copy _7 as *const [bool; 0] (Transmute); -- _5 = NonNull::<[bool; 0]> { pointer: copy _11 }; -+ _11 = const {0x1 as *const [bool; 0]}; +- _7 = copy _6 as *const [bool; 0] (Transmute); +- _5 = NonNull::<[bool; 0]> { pointer: copy _7 }; ++ _7 = const {0x1 as *const [bool; 0]}; + _5 = const NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}; - StorageDead(_11); StorageDead(_7); StorageDead(_6); - _4 = Unique::<[bool; 0]> { pointer: move _5, _marker: const PhantomData::<[bool; 0]> }; @@ -94,7 +62,6 @@ StorageDead(_4); - _2 = Box::<[bool]>(copy _3, const std::alloc::Global); + _2 = const Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC1, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global); - StorageDead(_9); StorageDead(_3); - _1 = A { foo: move _2 }; + _1 = const A {{ foo: Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC2, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global) }}; @@ -102,6 +69,11 @@ _0 = const (); drop(_1) -> [return: bb1, unwind unreachable]; } + + bb1: { + StorageDead(_1); + return; + } } + + ALLOC2 (size: 16, align: 8) { .. } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff index 031c021ba5abf..5dafc89d53f29 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff @@ -16,25 +16,23 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let _6: *mut [bool; 0]; + let mut _6: std::num::NonZero; scope 6 { - scope 10 (inlined NonNull::<[bool; 0]>::new_unchecked) { - let mut _8: bool; - let _9: (); - let mut _10: *mut (); - let mut _11: *const [bool; 0]; - scope 11 (inlined core::ub_checks::check_language_ub) { - scope 12 (inlined core::ub_checks::check_language_ub::runtime) { + scope 8 (inlined std::ptr::Alignment::as_nonzero) { + } + scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 10 { + } + scope 11 (inlined NonZero::::get) { + } + scope 12 (inlined without_provenance::<[bool; 0]>) { + scope 13 (inlined without_provenance_mut::<[bool; 0]>) { } } } } - scope 7 (inlined dangling_mut::<[bool; 0]>) { - let mut _7: usize; - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined without_provenance_mut::<[bool; 0]>) { - } + scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { } } } @@ -44,50 +42,16 @@ StorageLive(_1); StorageLive(_2); StorageLive(_3); - StorageLive(_9); StorageLive(_4); StorageLive(_5); StorageLive(_6); +- _6 = const std::ptr::Alignment::of::<[bool; 0]>::{constant#0} as std::num::NonZero (Transmute); ++ _6 = const NonZero::(core::num::niche_types::NonZeroUsizeInner(1_usize)); StorageLive(_7); -- _7 = AlignOf([bool; 0]); -- _6 = copy _7 as *mut [bool; 0] (Transmute); -+ _7 = const 1_usize; -+ _6 = const {0x1 as *mut [bool; 0]}; - StorageLive(_11); - StorageLive(_8); - _8 = UbChecks(); - switchInt(move _8) -> [0: bb5, otherwise: bb3]; - } - - bb1: { - StorageDead(_1); - return; - } - - bb2 (cleanup): { - resume; - } - - bb3: { - StorageLive(_10); -- _10 = copy _7 as *mut () (Transmute); -- _9 = NonNull::::new_unchecked::precondition_check(move _10) -> [return: bb4, unwind unreachable]; -+ _10 = const {0x1 as *mut ()}; -+ _9 = NonNull::::new_unchecked::precondition_check(const {0x1 as *mut ()}) -> [return: bb4, unwind unreachable]; - } - - bb4: { - StorageDead(_10); - goto -> bb5; - } - - bb5: { - StorageDead(_8); -- _11 = copy _7 as *const [bool; 0] (Transmute); -- _5 = NonNull::<[bool; 0]> { pointer: copy _11 }; -+ _11 = const {0x1 as *const [bool; 0]}; +- _7 = copy _6 as *const [bool; 0] (Transmute); +- _5 = NonNull::<[bool; 0]> { pointer: copy _7 }; ++ _7 = const {0x1 as *const [bool; 0]}; + _5 = const NonNull::<[bool; 0]> {{ pointer: {0x1 as *const [bool; 0]} }}; - StorageDead(_11); StorageDead(_7); StorageDead(_6); - _4 = Unique::<[bool; 0]> { pointer: move _5, _marker: const PhantomData::<[bool; 0]> }; @@ -98,7 +62,6 @@ StorageDead(_4); - _2 = Box::<[bool]>(copy _3, const std::alloc::Global); + _2 = const Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC1, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global); - StorageDead(_9); StorageDead(_3); - _1 = A { foo: move _2 }; + _1 = const A {{ foo: Box::<[bool]>(Unique::<[bool]> {{ pointer: NonNull::<[bool]> {{ pointer: Indirect { alloc_id: ALLOC2, offset: Size(0 bytes) }: *const [bool] }}, _marker: PhantomData::<[bool]> }}, std::alloc::Global) }}; @@ -106,6 +69,15 @@ _0 = const (); drop(_1) -> [return: bb1, unwind: bb2]; } + + bb1: { + StorageDead(_1); + return; + } + + bb2 (cleanup): { + resume; + } } + + ALLOC2 (size: 16, align: 8) { .. } diff --git a/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff b/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff index 1e378d30a3e9d..8e7964297d060 100644 --- a/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff +++ b/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff @@ -6,17 +6,33 @@ let _1: bool; let mut _2: *mut u8; scope 1 (inlined dangling_mut::) { - let mut _3: usize; - scope 2 (inlined align_of::) { + scope 2 (inlined NonNull::::dangling) { + let mut _3: std::num::NonZero; + scope 3 { + scope 5 (inlined std::ptr::Alignment::as_nonzero) { + } + scope 6 (inlined NonNull::::without_provenance) { + scope 7 { + } + scope 8 (inlined NonZero::::get) { + } + scope 9 (inlined without_provenance::) { + scope 10 (inlined without_provenance_mut::) { + } + } + } + } + scope 4 (inlined std::ptr::Alignment::of::) { + } } - scope 3 (inlined without_provenance_mut::) { + scope 11 (inlined NonNull::::as_ptr) { } } - scope 4 (inlined Foo::::cmp_ptr) { + scope 12 (inlined Foo::::cmp_ptr) { let mut _4: *const u8; let mut _5: *mut u8; let mut _6: *const u8; - scope 5 (inlined std::ptr::eq::) { + scope 13 (inlined std::ptr::eq::) { } } @@ -24,9 +40,9 @@ StorageLive(_1); StorageLive(_2); StorageLive(_3); -- _3 = AlignOf(u8); +- _3 = const std::ptr::Alignment::of::::{constant#0} as std::num::NonZero (Transmute); - _2 = copy _3 as *mut u8 (Transmute); -+ _3 = const 1_usize; ++ _3 = const NonZero::(core::num::niche_types::NonZeroUsizeInner(1_usize)); + _2 = const {0x1 as *mut u8}; StorageDead(_3); StorageLive(_4); diff --git a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir index a2ef53e0e1308..496ec78fd8d3e 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir @@ -19,30 +19,30 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () { debug i => _22; debug x => _23; } - scope 18 (inlined > as Iterator>::next) { + scope 19 (inlined > as Iterator>::next) { let mut _14: &mut std::slice::Iter<'_, T>; let mut _15: std::option::Option<&T>; let mut _19: (usize, bool); let mut _20: (usize, &T); - scope 19 { + scope 20 { let _18: usize; - scope 24 { + scope 25 { } } - scope 20 { - scope 21 { - scope 27 (inlined as FromResidual>>::from_residual) { + scope 21 { + scope 22 { + scope 28 (inlined as FromResidual>>::from_residual) { } } } - scope 22 { - scope 23 { + scope 23 { + scope 24 { } } - scope 25 (inlined as Try>::branch) { + scope 26 (inlined as Try>::branch) { let mut _16: isize; let _17: &T; - scope 26 { + scope 27 { } } } @@ -60,10 +60,12 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () { scope 7 { } scope 12 (inlined without_provenance::) { + scope 13 (inlined without_provenance_mut::) { + } } - scope 13 (inlined NonNull::::as_ptr) { + scope 14 (inlined NonNull::::as_ptr) { } - scope 14 (inlined std::ptr::mut_ptr::::add) { + scope 15 (inlined std::ptr::mut_ptr::::add) { } } scope 8 (inlined as From<&[T]>>::from) { @@ -79,11 +81,11 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () { } } } - scope 15 (inlined as Iterator>::enumerate) { - scope 16 (inlined Enumerate::>::new) { + scope 16 (inlined as Iterator>::enumerate) { + scope 17 (inlined Enumerate::>::new) { } } - scope 17 (inlined > as IntoIterator>::into_iter) { + scope 18 (inlined > as IntoIterator>::into_iter) { } bb0: { diff --git a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir index c1b846e662bc2..c4547cb888fab 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir @@ -35,10 +35,12 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () { scope 7 { } scope 12 (inlined without_provenance::) { + scope 13 (inlined without_provenance_mut::) { + } } - scope 13 (inlined NonNull::::as_ptr) { + scope 14 (inlined NonNull::::as_ptr) { } - scope 14 (inlined std::ptr::mut_ptr::::add) { + scope 15 (inlined std::ptr::mut_ptr::::add) { } } scope 8 (inlined as From<&[T]>>::from) { @@ -54,11 +56,11 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () { } } } - scope 15 (inlined as Iterator>::enumerate) { - scope 16 (inlined Enumerate::>::new) { + scope 16 (inlined as Iterator>::enumerate) { + scope 17 (inlined Enumerate::>::new) { } } - scope 17 (inlined > as IntoIterator>::into_iter) { + scope 18 (inlined > as IntoIterator>::into_iter) { } bb0: { diff --git a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir index 8cebf2c6bace6..7d011ea3347f3 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir @@ -32,10 +32,12 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 7 { } scope 12 (inlined without_provenance::) { + scope 13 (inlined without_provenance_mut::) { + } } - scope 13 (inlined NonNull::::as_ptr) { + scope 14 (inlined NonNull::::as_ptr) { } - scope 14 (inlined std::ptr::mut_ptr::::add) { + scope 15 (inlined std::ptr::mut_ptr::::add) { } } scope 8 (inlined as From<&[T]>>::from) { @@ -51,7 +53,7 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () { } } } - scope 15 (inlined as IntoIterator>::into_iter) { + scope 16 (inlined as IntoIterator>::into_iter) { } bb0: { diff --git a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir index e7e39240fed58..75e6542a3a4b0 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir @@ -32,10 +32,12 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 7 { } scope 12 (inlined without_provenance::) { + scope 13 (inlined without_provenance_mut::) { + } } - scope 13 (inlined NonNull::::as_ptr) { + scope 14 (inlined NonNull::::as_ptr) { } - scope 14 (inlined std::ptr::mut_ptr::::add) { + scope 15 (inlined std::ptr::mut_ptr::::add) { } } scope 8 (inlined as From<&[T]>>::from) { @@ -51,7 +53,7 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () { } } } - scope 15 (inlined as IntoIterator>::into_iter) { + scope 16 (inlined as IntoIterator>::into_iter) { } bb0: { diff --git a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir index 58f95d0a43231..41bc91ab028dc 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir @@ -18,7 +18,7 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 2 { debug x => _17; } - scope 18 (inlined > as Iterator>::next) { + scope 19 (inlined > as Iterator>::next) { let mut _14: &mut std::slice::Iter<'_, T>; } } @@ -35,10 +35,12 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 7 { } scope 12 (inlined without_provenance::) { + scope 13 (inlined without_provenance_mut::) { + } } - scope 13 (inlined NonNull::::as_ptr) { + scope 14 (inlined NonNull::::as_ptr) { } - scope 14 (inlined std::ptr::mut_ptr::::add) { + scope 15 (inlined std::ptr::mut_ptr::::add) { } } scope 8 (inlined as From<&[T]>>::from) { @@ -54,11 +56,11 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { } } } - scope 15 (inlined as Iterator>::rev) { - scope 16 (inlined Rev::>::new) { + scope 16 (inlined as Iterator>::rev) { + scope 17 (inlined Rev::>::new) { } } - scope 17 (inlined > as IntoIterator>::into_iter) { + scope 18 (inlined > as IntoIterator>::into_iter) { } bb0: { diff --git a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir index e7ddacf314457..6ed8ef9715bb5 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir @@ -18,7 +18,7 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 2 { debug x => _17; } - scope 18 (inlined > as Iterator>::next) { + scope 19 (inlined > as Iterator>::next) { let mut _14: &mut std::slice::Iter<'_, T>; } } @@ -35,10 +35,12 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 7 { } scope 12 (inlined without_provenance::) { + scope 13 (inlined without_provenance_mut::) { + } } - scope 13 (inlined NonNull::::as_ptr) { + scope 14 (inlined NonNull::::as_ptr) { } - scope 14 (inlined std::ptr::mut_ptr::::add) { + scope 15 (inlined std::ptr::mut_ptr::::add) { } } scope 8 (inlined as From<&[T]>>::from) { @@ -54,11 +56,11 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { } } } - scope 15 (inlined as Iterator>::rev) { - scope 16 (inlined Rev::>::new) { + scope 16 (inlined as Iterator>::rev) { + scope 17 (inlined Rev::>::new) { } } - scope 17 (inlined > as IntoIterator>::into_iter) { + scope 18 (inlined > as IntoIterator>::into_iter) { } bb0: { From 48e671ec88386f29ab2e3840a6fde3645abbc085 Mon Sep 17 00:00:00 2001 From: ClearLove <98693523+DiuDiu777@users.noreply.github.com> Date: Thu, 16 Jan 2025 15:16:43 +0800 Subject: [PATCH 100/142] fix typo in library/alloc/src/sync.rs Co-authored-by: Ibraheem Ahmed --- library/alloc/src/sync.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index dd899c8a71d0d..f7201a5ec2fcf 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -1376,7 +1376,7 @@ impl Arc { /// different types. See [`mem::transmute`][transmute] for more information /// on what restrictions apply in this case. /// - /// The raw pointer must point to a block of memory allocated by the global allocator + /// The raw pointer must point to a block of memory allocated by the global allocator. /// /// The user of `from_raw` has to make sure a specific value of `T` is only /// dropped once. From 8fee6a77394ffda819e58a648d4b44e1e566f34b Mon Sep 17 00:00:00 2001 From: Luca Versari Date: Tue, 14 Jan 2025 22:51:09 +0100 Subject: [PATCH 101/142] Coerce safe-to-call target_feature functions to fn pointers. --- compiler/rustc_borrowck/src/type_check/mod.rs | 15 ++++++++- compiler/rustc_hir_typeck/src/coercion.rs | 23 ++++++------- compiler/rustc_middle/src/ty/context.rs | 33 ++++++++++++++++++- .../rustc_mir_build/src/check_unsafety.rs | 11 ++----- .../rfcs/rfc-2396-target_feature-11/fn-ptr.rs | 14 ++++++++ .../rfc-2396-target_feature-11/fn-ptr.stderr | 19 +++++++++-- .../return-fn-ptr.rs | 22 +++++++++++++ 7 files changed, 114 insertions(+), 23 deletions(-) create mode 100644 tests/ui/rfcs/rfc-2396-target_feature-11/return-fn-ptr.rs diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index a1979c8b8aba1..eca8a688ff4a2 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -1654,7 +1654,20 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { match *cast_kind { CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, coercion_source) => { let is_implicit_coercion = coercion_source == CoercionSource::Implicit; - let src_sig = op.ty(body, tcx).fn_sig(tcx); + let src_ty = op.ty(body, tcx); + let mut src_sig = src_ty.fn_sig(tcx); + if let ty::FnDef(def_id, _) = src_ty.kind() + && let ty::FnPtr(_, target_hdr) = *ty.kind() + && tcx.codegen_fn_attrs(def_id).safe_target_features + && target_hdr.safety.is_safe() + && let Some(safe_sig) = tcx.adjust_target_feature_sig( + *def_id, + src_sig, + body.source.def_id(), + ) + { + src_sig = safe_sig; + } // HACK: This shouldn't be necessary... We can remove this when we actually // get binders with where clauses, then elaborate implied bounds into that diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs index ec7c1efa38e2e..6945dbc321697 100644 --- a/compiler/rustc_hir_typeck/src/coercion.rs +++ b/compiler/rustc_hir_typeck/src/coercion.rs @@ -920,7 +920,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { match b.kind() { ty::FnPtr(_, b_hdr) => { - let a_sig = a.fn_sig(self.tcx); + let mut a_sig = a.fn_sig(self.tcx); if let ty::FnDef(def_id, _) = *a.kind() { // Intrinsics are not coercible to function pointers if self.tcx.intrinsic(def_id).is_some() { @@ -932,19 +932,20 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { return Err(TypeError::ForceInlineCast); } - let fn_attrs = self.tcx.codegen_fn_attrs(def_id); - if matches!(fn_attrs.inline, InlineAttr::Force { .. }) { - return Err(TypeError::ForceInlineCast); - } - - // FIXME(target_feature): Safe `#[target_feature]` functions could be cast to safe fn pointers (RFC 2396), - // as you can already write that "cast" in user code by wrapping a target_feature fn call in a closure, - // which is safe. This is sound because you already need to be executing code that is satisfying the target - // feature constraints.. if b_hdr.safety.is_safe() && self.tcx.codegen_fn_attrs(def_id).safe_target_features { - return Err(TypeError::TargetFeatureCast(def_id)); + // Allow the coercion if the current function has all the features that would be + // needed to call the coercee safely. + if let Some(safe_sig) = self.tcx.adjust_target_feature_sig( + def_id, + a_sig, + self.fcx.body_id.into(), + ) { + a_sig = safe_sig; + } else { + return Err(TypeError::TargetFeatureCast(def_id)); + } } } diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index fab0047babfd0..7035e641f39e0 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -60,7 +60,7 @@ use crate::dep_graph::{DepGraph, DepKindStruct}; use crate::infer::canonical::{CanonicalParamEnvCache, CanonicalVarInfo, CanonicalVarInfos}; use crate::lint::lint_level; use crate::metadata::ModChild; -use crate::middle::codegen_fn_attrs::CodegenFnAttrs; +use crate::middle::codegen_fn_attrs::{CodegenFnAttrs, TargetFeature}; use crate::middle::{resolve_bound_vars, stability}; use crate::mir::interpret::{self, Allocation, ConstAllocation}; use crate::mir::{Body, Local, Place, PlaceElem, ProjectionKind, Promoted}; @@ -1776,6 +1776,37 @@ impl<'tcx> TyCtxt<'tcx> { pub fn dcx(self) -> DiagCtxtHandle<'tcx> { self.sess.dcx() } + + pub fn is_target_feature_call_safe( + self, + callee_features: &[TargetFeature], + body_features: &[TargetFeature], + ) -> bool { + // If the called function has target features the calling function hasn't, + // the call requires `unsafe`. Don't check this on wasm + // targets, though. For more information on wasm see the + // is_like_wasm check in hir_analysis/src/collect.rs + self.sess.target.options.is_like_wasm + || callee_features + .iter() + .all(|feature| body_features.iter().any(|f| f.name == feature.name)) + } + + /// Returns the safe version of the signature of the given function, if calling it + /// would be safe in the context of the given caller. + pub fn adjust_target_feature_sig( + self, + fun_def: DefId, + fun_sig: ty::Binder<'tcx, ty::FnSig<'tcx>>, + caller: DefId, + ) -> Option>> { + let fun_features = &self.codegen_fn_attrs(fun_def).target_features; + let callee_features = &self.codegen_fn_attrs(caller).target_features; + if self.is_target_feature_call_safe(&fun_features, &callee_features) { + return Some(fun_sig.map_bound(|sig| ty::FnSig { safety: hir::Safety::Safe, ..sig })); + } + None + } } impl<'tcx> TyCtxtAt<'tcx> { diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index 6279d0f94af63..5eed9ef798d01 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -495,14 +495,9 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { }; self.requires_unsafe(expr.span, CallToUnsafeFunction(func_id)); } else if let &ty::FnDef(func_did, _) = fn_ty.kind() { - // If the called function has target features the calling function hasn't, - // the call requires `unsafe`. Don't check this on wasm - // targets, though. For more information on wasm see the - // is_like_wasm check in hir_analysis/src/collect.rs - if !self.tcx.sess.target.options.is_like_wasm - && !callee_features.iter().all(|feature| { - self.body_target_features.iter().any(|f| f.name == feature.name) - }) + if !self + .tcx + .is_target_feature_call_safe(callee_features, self.body_target_features) { let missing: Vec<_> = callee_features .iter() diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.rs b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.rs index 364b4d3581276..d7c17299d061c 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.rs +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.rs @@ -2,9 +2,23 @@ #![feature(target_feature_11)] +#[target_feature(enable = "avx")] +fn foo_avx() {} + #[target_feature(enable = "sse2")] fn foo() {} +#[target_feature(enable = "sse2")] +fn bar() { + let foo: fn() = foo; // this is OK, as we have the necessary target features. + let foo: fn() = foo_avx; //~ ERROR mismatched types +} + fn main() { + if std::is_x86_feature_detected!("sse2") { + unsafe { + bar(); + } + } let foo: fn() = foo; //~ ERROR mismatched types } diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr index a2bda229d10e1..1228404120a4c 100644 --- a/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/fn-ptr.stderr @@ -1,5 +1,20 @@ error[E0308]: mismatched types - --> $DIR/fn-ptr.rs:9:21 + --> $DIR/fn-ptr.rs:14:21 + | +LL | #[target_feature(enable = "avx")] + | --------------------------------- `#[target_feature]` added here +... +LL | let foo: fn() = foo_avx; + | ---- ^^^^^^^ cannot coerce functions with `#[target_feature]` to safe function pointers + | | + | expected due to this + | + = note: expected fn pointer `fn()` + found fn item `#[target_features] fn() {foo_avx}` + = note: functions with `#[target_feature]` can only be coerced to `unsafe` function pointers + +error[E0308]: mismatched types + --> $DIR/fn-ptr.rs:23:21 | LL | #[target_feature(enable = "sse2")] | ---------------------------------- `#[target_feature]` added here @@ -13,6 +28,6 @@ LL | let foo: fn() = foo; found fn item `#[target_features] fn() {foo}` = note: functions with `#[target_feature]` can only be coerced to `unsafe` function pointers -error: aborting due to 1 previous error +error: aborting due to 2 previous errors For more information about this error, try `rustc --explain E0308`. diff --git a/tests/ui/rfcs/rfc-2396-target_feature-11/return-fn-ptr.rs b/tests/ui/rfcs/rfc-2396-target_feature-11/return-fn-ptr.rs new file mode 100644 index 0000000000000..b49493d66096d --- /dev/null +++ b/tests/ui/rfcs/rfc-2396-target_feature-11/return-fn-ptr.rs @@ -0,0 +1,22 @@ +//@ only-x86_64 +//@ run-pass + +#![feature(target_feature_11)] + +#[target_feature(enable = "sse2")] +fn foo() -> bool { + true +} + +#[target_feature(enable = "sse2")] +fn bar() -> fn() -> bool { + foo +} + +fn main() { + if !std::is_x86_feature_detected!("sse2") { + return; + } + let f = unsafe { bar() }; + assert!(f()); +} From f5fe0a029389f25056c916ba6fafc18518f708f6 Mon Sep 17 00:00:00 2001 From: MarcoIeni <11428655+MarcoIeni@users.noreply.github.com> Date: Thu, 16 Jan 2025 11:21:41 +0100 Subject: [PATCH 102/142] ci: mirror ubuntu:22.04 to ghcr.io --- .github/workflows/ghcr.yml | 57 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 .github/workflows/ghcr.yml diff --git a/.github/workflows/ghcr.yml b/.github/workflows/ghcr.yml new file mode 100644 index 0000000000000..0fcd1b178163e --- /dev/null +++ b/.github/workflows/ghcr.yml @@ -0,0 +1,57 @@ +# Mirror DockerHub images used by the Rust project to ghcr.io. +# Images are available at https://github.com/orgs/rust-lang/packages. +# +# In some CI jobs, we pull images from ghcr.io instead of Docker Hub because +# Docker Hub has a rate limit, while ghcr.io doesn't. +# Those images are pushed to ghcr.io by this job. +# +# Note that authenticating to DockerHub or other registries isn't possible +# for PR jobs, because forks can't access secrets. +# That's why we use ghcr.io: it has no rate limit and it doesn't require authentication. + +name: GHCR + +on: + workflow_dispatch: + schedule: + # Run daily at midnight UTC + - cron: '0 0 * * *' + +jobs: + mirror: + name: DockerHub mirror + runs-on: ubuntu-24.04 + if: github.repository == 'rust-lang/rust' + permissions: + # Needed to write to the ghcr.io registry + packages: write + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Log in to registry + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.repository_owner }} --password-stdin + + # Download crane in the current directory. + # We use crane because it copies the docker image for all the architectures available in + # DockerHub for the image. + # Learn more about crane at + # https://github.com/google/go-containerregistry/blob/main/cmd/crane/README.md + - name: Download crane + run: | + curl -sL "https://github.com/google/go-containerregistry/releases/download/${VERSION}/go-containerregistry_${OS}_${ARCH}.tar.gz" | tar -xzf - + env: + VERSION: v0.20.2 + OS: Linux + ARCH: x86_64 + + - name: Mirror DockerHub + run: | + # DockerHub image we want to mirror + image="ubuntu:22.04" + + # Mirror image from DockerHub to ghcr.io + ./crane copy \ + docker.io/${image} \ + ghcr.io/${{ github.repository_owner }}/${image} From f1300c860e6ff4e024f0a347b87f94e36785ce49 Mon Sep 17 00:00:00 2001 From: Zalathar Date: Sun, 12 Jan 2025 21:36:07 +1100 Subject: [PATCH 103/142] coverage: Completely overhaul counter assignment, using node-flow graphs --- .../src/graph/iterate/mod.rs | 10 + .../src/coverage/counters.rs | 446 +++--------------- .../src/coverage/counters/balanced_flow.rs | 133 ++++++ .../src/coverage/counters/iter_nodes.rs | 16 + .../src/coverage/counters/node_flow.rs | 290 ++++++++++++ .../src/coverage/counters/node_flow/tests.rs | 64 +++ .../src/coverage/counters/tests.rs | 41 -- .../src/coverage/counters/union_find.rs | 116 +++++ .../src/coverage/counters/union_find/tests.rs | 32 ++ .../rustc_mir_transform/src/coverage/graph.rs | 174 +------ .../rustc_mir_transform/src/coverage/mod.rs | 43 +- tests/coverage/abort.cov-map | 61 ++- tests/coverage/assert-ne.cov-map | 10 +- tests/coverage/assert.cov-map | 45 +- tests/coverage/assert.coverage | 14 +- tests/coverage/assert_not.cov-map | 12 +- tests/coverage/async.cov-map | 47 +- tests/coverage/async_block.cov-map | 16 +- tests/coverage/async_closure.cov-map | 10 +- tests/coverage/await_ready.cov-map | 10 +- tests/coverage/branch/guard.cov-map | 30 +- tests/coverage/branch/if-let.cov-map | 22 +- tests/coverage/branch/if.cov-map | 186 ++++---- tests/coverage/branch/lazy-boolean.cov-map | 212 ++++----- tests/coverage/branch/let-else.cov-map | 22 +- tests/coverage/branch/match-arms.cov-map | 114 +++-- tests/coverage/branch/match-trivial.cov-map | 6 +- tests/coverage/branch/no-mir-spans.cov-map | 67 +-- tests/coverage/branch/while.cov-map | 120 +++-- tests/coverage/continue.cov-map | 141 +++--- tests/coverage/coroutine.cov-map | 29 +- tests/coverage/inline.cov-map | 40 +- tests/coverage/issue-84561.cov-map | 184 +++----- tests/coverage/loop-break.cov-map | 10 +- tests/coverage/loops_branches.cov-map | 130 ++--- tests/coverage/match_or_pattern.cov-map | 99 ++-- tests/coverage/mcdc/nested_if.cov-map | 205 ++++---- tests/coverage/nested_loops.cov-map | 75 ++- tests/coverage/overflow.cov-map | 45 +- tests/coverage/overflow.coverage | 14 +- tests/coverage/panic_unwind.cov-map | 45 +- tests/coverage/panic_unwind.coverage | 14 +- tests/coverage/simple_loop.cov-map | 10 +- tests/coverage/simple_match.cov-map | 38 +- tests/coverage/try_error_result.cov-map | 266 +++++------ tests/coverage/unicode.cov-map | 13 +- tests/coverage/unused.cov-map | 58 ++- tests/coverage/while.cov-map | 10 +- tests/coverage/while_early_ret.cov-map | 36 +- tests/coverage/yield.cov-map | 50 +- ...ment_coverage.main.InstrumentCoverage.diff | 10 +- 51 files changed, 1924 insertions(+), 1967 deletions(-) create mode 100644 compiler/rustc_mir_transform/src/coverage/counters/balanced_flow.rs create mode 100644 compiler/rustc_mir_transform/src/coverage/counters/iter_nodes.rs create mode 100644 compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs create mode 100644 compiler/rustc_mir_transform/src/coverage/counters/node_flow/tests.rs delete mode 100644 compiler/rustc_mir_transform/src/coverage/counters/tests.rs create mode 100644 compiler/rustc_mir_transform/src/coverage/counters/union_find.rs create mode 100644 compiler/rustc_mir_transform/src/coverage/counters/union_find/tests.rs diff --git a/compiler/rustc_data_structures/src/graph/iterate/mod.rs b/compiler/rustc_data_structures/src/graph/iterate/mod.rs index ecda7d3fba832..7b4573d7a84c7 100644 --- a/compiler/rustc_data_structures/src/graph/iterate/mod.rs +++ b/compiler/rustc_data_structures/src/graph/iterate/mod.rs @@ -125,6 +125,16 @@ where pub fn visited(&self, node: G::Node) -> bool { self.visited.contains(node) } + + /// Returns a reference to the set of nodes that have been visited, with + /// the same caveats as [`Self::visited`]. + /// + /// When incorporating the visited nodes into another bitset, using bulk + /// operations like `union` or `intersect` can be more efficient than + /// processing each node individually. + pub fn visited_set(&self) -> &DenseBitSet { + &self.visited + } } impl std::fmt::Debug for DepthFirstSearch diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index a9111a5ef9b4a..1a9323329f61f 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -1,18 +1,24 @@ use std::cmp::Ordering; use std::fmt::{self, Debug}; +use either::Either; +use itertools::Itertools; use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::graph::DirectedGraph; use rustc_index::IndexVec; use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::coverage::{CounterId, CovTerm, Expression, ExpressionId, Op}; -use tracing::{debug, debug_span, instrument}; -use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph, ReadyFirstTraversal}; +use crate::coverage::counters::balanced_flow::BalancedFlowGraph; +use crate::coverage::counters::iter_nodes::IterNodes; +use crate::coverage::counters::node_flow::{CounterTerm, MergedNodeFlowGraph, NodeCounters}; +use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph}; -#[cfg(test)] -mod tests; +mod balanced_flow; +mod iter_nodes; +mod node_flow; +mod union_find; /// The coverage counter or counter expression associated with a particular /// BCB node or BCB edge. @@ -48,10 +54,12 @@ struct BcbExpression { } /// Enum representing either a node or an edge in the coverage graph. +/// +/// FIXME(#135481): This enum is no longer needed now that we only instrument +/// nodes and not edges. It can be removed in a subsequent PR. #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub(super) enum Site { Node { bcb: BasicCoverageBlock }, - Edge { from_bcb: BasicCoverageBlock, to_bcb: BasicCoverageBlock }, } /// Generates and stores coverage counter and coverage expression information @@ -79,10 +87,38 @@ impl CoverageCounters { graph: &CoverageGraph, bcb_needs_counter: &DenseBitSet, ) -> Self { - let mut builder = CountersBuilder::new(graph, bcb_needs_counter); - builder.make_bcb_counters(); + let balanced_graph = BalancedFlowGraph::for_graph(graph, |n| !graph[n].is_out_summable); + let merged_graph = MergedNodeFlowGraph::for_balanced_graph(&balanced_graph); + + // A "reloop" node has exactly one out-edge, which jumps back to the top + // of an enclosing loop. Reloop nodes are typically visited more times + // than loop-exit nodes, so try to avoid giving them physical counters. + let is_reloop_node = IndexVec::from_fn_n( + |node| match graph.successors[node].as_slice() { + &[succ] => graph.dominates(succ, node), + _ => false, + }, + graph.num_nodes(), + ); - builder.into_coverage_counters() + let mut nodes = balanced_graph.iter_nodes().rev().collect::>(); + // The first node is the sink, which must not get a physical counter. + assert_eq!(nodes[0], balanced_graph.sink); + // Sort the real nodes, such that earlier (lesser) nodes take priority + // in being given a counter expression instead of a physical counter. + nodes[1..].sort_by(|&a, &b| { + // Start with a dummy `Equal` to make the actual tests line up nicely. + Ordering::Equal + // Prefer a physical counter for return/yield nodes. + .then_with(|| Ord::cmp(&graph[a].is_out_summable, &graph[b].is_out_summable)) + // Prefer an expression for reloop nodes (see definition above). + .then_with(|| Ord::cmp(&is_reloop_node[a], &is_reloop_node[b]).reverse()) + // Otherwise, prefer a physical counter for dominating nodes. + .then_with(|| graph.cmp_in_dominator_order(a, b).reverse()) + }); + let node_counters = merged_graph.make_node_counters(&nodes); + + Transcriber::new(graph.num_nodes(), node_counters).transcribe_counters(bcb_needs_counter) } fn with_num_bcbs(num_bcbs: usize) -> Self { @@ -182,321 +218,51 @@ impl CoverageCounters { } } -/// Symbolic representation of the coverage counter to be used for a particular -/// node or edge in the coverage graph. The same site counter can be used for -/// multiple sites, if they have been determined to have the same count. -#[derive(Clone, Copy, Debug)] -enum SiteCounter { - /// A physical counter at some node/edge. - Phys { site: Site }, - /// A counter expression for a node that takes the sum of all its in-edge - /// counters. - NodeSumExpr { bcb: BasicCoverageBlock }, - /// A counter expression for an edge that takes the counter of its source - /// node, and subtracts the counters of all its sibling out-edges. - EdgeDiffExpr { from_bcb: BasicCoverageBlock, to_bcb: BasicCoverageBlock }, -} - -/// Yields the graph successors of `from_bcb` that aren't `to_bcb`. This is -/// used when creating a counter expression for [`SiteCounter::EdgeDiffExpr`]. -/// -/// For example, in this diagram the sibling out-edge targets of edge `AC` are -/// the nodes `B` and `D`. -/// -/// ```text -/// A -/// / | \ -/// B C D -/// ``` -fn sibling_out_edge_targets( - graph: &CoverageGraph, - from_bcb: BasicCoverageBlock, - to_bcb: BasicCoverageBlock, -) -> impl Iterator + Captures<'_> { - graph.successors[from_bcb].iter().copied().filter(move |&t| t != to_bcb) -} - -/// Helper struct that allows counter creation to inspect the BCB graph, and -/// the set of nodes that need counters. -struct CountersBuilder<'a> { - graph: &'a CoverageGraph, - bcb_needs_counter: &'a DenseBitSet, - - site_counters: FxHashMap, -} - -impl<'a> CountersBuilder<'a> { - fn new( - graph: &'a CoverageGraph, - bcb_needs_counter: &'a DenseBitSet, - ) -> Self { - assert_eq!(graph.num_nodes(), bcb_needs_counter.domain_size()); - Self { graph, bcb_needs_counter, site_counters: FxHashMap::default() } - } - - fn make_bcb_counters(&mut self) { - debug!("make_bcb_counters(): adding a counter or expression to each BasicCoverageBlock"); - - // Traverse the coverage graph, ensuring that every node that needs a - // coverage counter has one. - for bcb in ReadyFirstTraversal::new(self.graph) { - let _span = debug_span!("traversal", ?bcb).entered(); - if self.bcb_needs_counter.contains(bcb) { - self.make_node_counter_and_out_edge_counters(bcb); - } - } - } - - /// Make sure the given node has a node counter, and then make sure each of - /// its out-edges has an edge counter (if appropriate). - #[instrument(level = "debug", skip(self))] - fn make_node_counter_and_out_edge_counters(&mut self, from_bcb: BasicCoverageBlock) { - // First, ensure that this node has a counter of some kind. - // We might also use that counter to compute one of the out-edge counters. - self.get_or_make_node_counter(from_bcb); - - // If this node's out-edges won't sum to the node's counter, - // then there's no reason to create edge counters here. - if !self.graph[from_bcb].is_out_summable { - return; - } - - // When choosing which out-edge should be given a counter expression, ignore edges that - // already have counters, or could use the existing counter of their target node. - let out_edge_has_counter = |to_bcb| { - if self.site_counters.contains_key(&Site::Edge { from_bcb, to_bcb }) { - return true; - } - self.graph.sole_predecessor(to_bcb) == Some(from_bcb) - && self.site_counters.contains_key(&Site::Node { bcb: to_bcb }) - }; - - // Determine the set of out-edges that could benefit from being given an expression. - let candidate_successors = self.graph.successors[from_bcb] - .iter() - .copied() - .filter(|&to_bcb| !out_edge_has_counter(to_bcb)) - .collect::>(); - debug!(?candidate_successors); - - // If there are out-edges without counters, choose one to be given an expression - // (computed from this node and the other out-edges) instead of a physical counter. - let Some(to_bcb) = self.choose_out_edge_for_expression(from_bcb, &candidate_successors) - else { - return; - }; - - // For each out-edge other than the one that was chosen to get an expression, - // ensure that it has a counter (existing counter/expression or a new counter). - for target in sibling_out_edge_targets(self.graph, from_bcb, to_bcb) { - self.get_or_make_edge_counter(from_bcb, target); - } - - // Now create an expression for the chosen edge, by taking the counter - // for its source node and subtracting the sum of its sibling out-edges. - let counter = SiteCounter::EdgeDiffExpr { from_bcb, to_bcb }; - self.site_counters.insert(Site::Edge { from_bcb, to_bcb }, counter); - } - - #[instrument(level = "debug", skip(self))] - fn get_or_make_node_counter(&mut self, bcb: BasicCoverageBlock) -> SiteCounter { - // If the BCB already has a counter, return it. - if let Some(&counter) = self.site_counters.get(&Site::Node { bcb }) { - debug!("{bcb:?} already has a counter: {counter:?}"); - return counter; - } - - let counter = self.make_node_counter_inner(bcb); - self.site_counters.insert(Site::Node { bcb }, counter); - counter - } - - fn make_node_counter_inner(&mut self, bcb: BasicCoverageBlock) -> SiteCounter { - // If the node's sole in-edge already has a counter, use that. - if let Some(sole_pred) = self.graph.sole_predecessor(bcb) - && let Some(&edge_counter) = - self.site_counters.get(&Site::Edge { from_bcb: sole_pred, to_bcb: bcb }) - { - return edge_counter; - } - - let predecessors = self.graph.predecessors[bcb].as_slice(); - - // Handle cases where we can't compute a node's count from its in-edges: - // - START_BCB has no in-edges, so taking the sum would panic (or be wrong). - // - For nodes with one in-edge, or that directly loop to themselves, - // trying to get the in-edge counts would require this node's counter, - // leading to infinite recursion. - if predecessors.len() <= 1 || predecessors.contains(&bcb) { - debug!(?bcb, ?predecessors, "node has <=1 predecessors or is its own predecessor"); - let counter = SiteCounter::Phys { site: Site::Node { bcb } }; - debug!(?bcb, ?counter, "node gets a physical counter"); - return counter; - } - - // A BCB with multiple incoming edges can compute its count by ensuring that counters - // exist for each of those edges, and then adding them up to get a total count. - for &from_bcb in predecessors { - self.get_or_make_edge_counter(from_bcb, bcb); - } - let sum_of_in_edges = SiteCounter::NodeSumExpr { bcb }; - - debug!("{bcb:?} gets a new counter (sum of predecessor counters): {sum_of_in_edges:?}"); - sum_of_in_edges - } - - #[instrument(level = "debug", skip(self))] - fn get_or_make_edge_counter( - &mut self, - from_bcb: BasicCoverageBlock, - to_bcb: BasicCoverageBlock, - ) -> SiteCounter { - // If the edge already has a counter, return it. - if let Some(&counter) = self.site_counters.get(&Site::Edge { from_bcb, to_bcb }) { - debug!("Edge {from_bcb:?}->{to_bcb:?} already has a counter: {counter:?}"); - return counter; - } - - let counter = self.make_edge_counter_inner(from_bcb, to_bcb); - self.site_counters.insert(Site::Edge { from_bcb, to_bcb }, counter); - counter - } - - fn make_edge_counter_inner( - &mut self, - from_bcb: BasicCoverageBlock, - to_bcb: BasicCoverageBlock, - ) -> SiteCounter { - // If the target node has exactly one in-edge (i.e. this one), then just - // use the node's counter, since it will have the same value. - if let Some(sole_pred) = self.graph.sole_predecessor(to_bcb) { - assert_eq!(sole_pred, from_bcb); - // This call must take care not to invoke `get_or_make_edge` for - // this edge, since that would result in infinite recursion! - return self.get_or_make_node_counter(to_bcb); - } - - // If the source node has exactly one out-edge (i.e. this one) and would have - // the same execution count as that edge, then just use the node's counter. - if let Some(simple_succ) = self.graph.simple_successor(from_bcb) { - assert_eq!(simple_succ, to_bcb); - return self.get_or_make_node_counter(from_bcb); - } - - // Make a new counter to count this edge. - let counter = SiteCounter::Phys { site: Site::Edge { from_bcb, to_bcb } }; - debug!(?from_bcb, ?to_bcb, ?counter, "edge gets a physical counter"); - counter - } - - /// Given a set of candidate out-edges (represented by their successor node), - /// choose one to be given a counter expression instead of a physical counter. - fn choose_out_edge_for_expression( - &self, - from_bcb: BasicCoverageBlock, - candidate_successors: &[BasicCoverageBlock], - ) -> Option { - // Try to find a candidate that leads back to the top of a loop, - // because reloop edges tend to be executed more times than loop-exit edges. - if let Some(reloop_target) = self.find_good_reloop_edge(from_bcb, &candidate_successors) { - debug!("Selecting reloop target {reloop_target:?} to get an expression"); - return Some(reloop_target); - } - - // We couldn't identify a "good" edge, so just choose an arbitrary one. - let arbitrary_target = candidate_successors.first().copied()?; - debug!(?arbitrary_target, "selecting arbitrary out-edge to get an expression"); - Some(arbitrary_target) - } - - /// Given a set of candidate out-edges (represented by their successor node), - /// tries to find one that leads back to the top of a loop. - /// - /// Reloop edges are good candidates for counter expressions, because they - /// will tend to be executed more times than a loop-exit edge, so it's nice - /// for them to be able to avoid a physical counter increment. - fn find_good_reloop_edge( - &self, - from_bcb: BasicCoverageBlock, - candidate_successors: &[BasicCoverageBlock], - ) -> Option { - // If there are no candidates, avoid iterating over the loop stack. - if candidate_successors.is_empty() { - return None; - } - - // Consider each loop on the current traversal context stack, top-down. - for loop_header_node in self.graph.loop_headers_containing(from_bcb) { - // Try to find a candidate edge that doesn't exit this loop. - for &target_bcb in candidate_successors { - // An edge is a reloop edge if its target dominates any BCB that has - // an edge back to the loop header. (Otherwise it's an exit edge.) - let is_reloop_edge = self - .graph - .reloop_predecessors(loop_header_node) - .any(|reloop_bcb| self.graph.dominates(target_bcb, reloop_bcb)); - if is_reloop_edge { - // We found a good out-edge to be given an expression. - return Some(target_bcb); - } - } - - // All of the candidate edges exit this loop, so keep looking - // for a good reloop edge for one of the outer loops. - } - - None - } - - fn into_coverage_counters(self) -> CoverageCounters { - Transcriber::new(&self).transcribe_counters() - } -} - -/// Helper struct for converting `CountersBuilder` into a final `CoverageCounters`. -struct Transcriber<'a> { - old: &'a CountersBuilder<'a>, +struct Transcriber { + old: NodeCounters, new: CoverageCounters, phys_counter_for_site: FxHashMap, } -impl<'a> Transcriber<'a> { - fn new(old: &'a CountersBuilder<'a>) -> Self { +impl Transcriber { + fn new(num_nodes: usize, old: NodeCounters) -> Self { Self { old, - new: CoverageCounters::with_num_bcbs(old.graph.num_nodes()), + new: CoverageCounters::with_num_bcbs(num_nodes), phys_counter_for_site: FxHashMap::default(), } } - fn transcribe_counters(mut self) -> CoverageCounters { - for bcb in self.old.bcb_needs_counter.iter() { + fn transcribe_counters( + mut self, + bcb_needs_counter: &DenseBitSet, + ) -> CoverageCounters { + for bcb in bcb_needs_counter.iter() { let site = Site::Node { bcb }; - let site_counter = self.site_counter(site); - - // Resolve the site counter into flat lists of nodes/edges whose - // physical counts contribute to the counter for this node. - // Distinguish between counts that will be added vs subtracted. - let mut pos = vec![]; - let mut neg = vec![]; - self.push_resolved_sites(site_counter, &mut pos, &mut neg); - - // Simplify by cancelling out sites that appear on both sides. - let (mut pos, mut neg) = sort_and_cancel(pos, neg); + let (mut pos, mut neg): (Vec<_>, Vec<_>) = + self.old.counter_expr(bcb).iter().partition_map( + |&CounterTerm { node, op }| match op { + Op::Add => Either::Left(node), + Op::Subtract => Either::Right(node), + }, + ); if pos.is_empty() { - // If we somehow end up with no positive terms after cancellation, - // fall back to creating a physical counter. There's no known way - // for this to happen, but it's hard to confidently rule it out. + // If we somehow end up with no positive terms, fall back to + // creating a physical counter. There's no known way for this + // to happen, but we can avoid an ICE if it does. debug_assert!(false, "{site:?} has no positive counter terms"); - pos = vec![Some(site)]; + pos = vec![bcb]; neg = vec![]; } - let mut new_counters_for_sites = |sites: Vec>| { + pos.sort(); + neg.sort(); + + let mut new_counters_for_sites = |sites: Vec| { sites .into_iter() - .filter_map(|id| try { self.ensure_phys_counter(id?) }) + .map(|node| self.ensure_phys_counter(Site::Node { bcb: node })) .collect::>() }; let mut pos = new_counters_for_sites(pos); @@ -513,79 +279,7 @@ impl<'a> Transcriber<'a> { self.new } - fn site_counter(&self, site: Site) -> SiteCounter { - self.old.site_counters.get(&site).copied().unwrap_or_else(|| { - // We should have already created all necessary site counters. - // But if we somehow didn't, avoid crashing in release builds, - // and just use an extra physical counter instead. - debug_assert!(false, "{site:?} should have a counter"); - SiteCounter::Phys { site } - }) - } - fn ensure_phys_counter(&mut self, site: Site) -> BcbCounter { *self.phys_counter_for_site.entry(site).or_insert_with(|| self.new.make_phys_counter(site)) } - - /// Resolves the given counter into flat lists of nodes/edges, whose counters - /// will then be added and subtracted to form a counter expression. - fn push_resolved_sites(&self, counter: SiteCounter, pos: &mut Vec, neg: &mut Vec) { - match counter { - SiteCounter::Phys { site } => pos.push(site), - SiteCounter::NodeSumExpr { bcb } => { - for &from_bcb in &self.old.graph.predecessors[bcb] { - let edge_counter = self.site_counter(Site::Edge { from_bcb, to_bcb: bcb }); - self.push_resolved_sites(edge_counter, pos, neg); - } - } - SiteCounter::EdgeDiffExpr { from_bcb, to_bcb } => { - // First, add the count for `from_bcb`. - let node_counter = self.site_counter(Site::Node { bcb: from_bcb }); - self.push_resolved_sites(node_counter, pos, neg); - - // Then subtract the counts for the other out-edges. - for target in sibling_out_edge_targets(self.old.graph, from_bcb, to_bcb) { - let edge_counter = self.site_counter(Site::Edge { from_bcb, to_bcb: target }); - // Swap `neg` and `pos` so that the counter is subtracted. - self.push_resolved_sites(edge_counter, neg, pos); - } - } - } - } -} - -/// Given two lists: -/// - Sorts each list. -/// - Converts each list to `Vec>`. -/// - Scans for values that appear in both lists, and cancels them out by -/// replacing matching pairs of values with `None`. -fn sort_and_cancel(mut pos: Vec, mut neg: Vec) -> (Vec>, Vec>) { - pos.sort(); - neg.sort(); - - // Convert to `Vec>`. If `T` has a niche, this should be zero-cost. - let mut pos = pos.into_iter().map(Some).collect::>(); - let mut neg = neg.into_iter().map(Some).collect::>(); - - // Scan through the lists using two cursors. When either cursor reaches the - // end of its list, there can be no more equal pairs, so stop. - let mut p = 0; - let mut n = 0; - while p < pos.len() && n < neg.len() { - // If the values are equal, remove them and advance both cursors. - // Otherwise, advance whichever cursor points to the lesser value. - // (Choosing which cursor to advance relies on both lists being sorted.) - match pos[p].cmp(&neg[n]) { - Ordering::Less => p += 1, - Ordering::Equal => { - pos[p] = None; - neg[n] = None; - p += 1; - n += 1; - } - Ordering::Greater => n += 1, - } - } - - (pos, neg) } diff --git a/compiler/rustc_mir_transform/src/coverage/counters/balanced_flow.rs b/compiler/rustc_mir_transform/src/coverage/counters/balanced_flow.rs new file mode 100644 index 0000000000000..c108f96a564cf --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/balanced_flow.rs @@ -0,0 +1,133 @@ +//! A control-flow graph can be said to have “balanced flow” if the flow +//! (execution count) of each node is equal to the sum of its in-edge flows, +//! and also equal to the sum of its out-edge flows. +//! +//! Control-flow graphs typically have one or more nodes that don't satisfy the +//! balanced-flow property, e.g.: +//! - The start node has out-edges, but no in-edges. +//! - Return nodes have in-edges, but no out-edges. +//! - `Yield` nodes can have an out-flow that is less than their in-flow. +//! - Inescapable loops cause the in-flow/out-flow relationship to break down. +//! +//! Balanced-flow graphs are nevertheless useful for analysis, so this module +//! provides a wrapper type ([`BalancedFlowGraph`]) that imposes balanced flow +//! on an underlying graph. This is done by non-destructively adding synthetic +//! nodes and edges as necessary. + +use rustc_data_structures::graph; +use rustc_data_structures::graph::iterate::DepthFirstSearch; +use rustc_data_structures::graph::reversed::ReversedGraph; +use rustc_index::Idx; +use rustc_index::bit_set::DenseBitSet; + +use crate::coverage::counters::iter_nodes::IterNodes; + +/// A view of an underlying graph that has been augmented to have “balanced flow”. +/// This means that the flow (execution count) of each node is equal to the +/// sum of its in-edge flows, and also equal to the sum of its out-edge flows. +/// +/// To achieve this, a synthetic "sink" node is non-destructively added to the +/// graph, with synthetic in-edges from these nodes: +/// - Any node that has no out-edges. +/// - Any node that explicitly requires a sink edge, as indicated by a +/// caller-supplied `force_sink_edge` function. +/// - Any node that would otherwise be unable to reach the sink, because it is +/// part of an inescapable loop. +/// +/// To make the graph fully balanced, there is also a synthetic edge from the +/// sink node back to the start node. +/// +/// --- +/// The benefit of having a balanced-flow graph is that it can be subsequently +/// transformed in ways that are guaranteed to preserve balanced flow +/// (e.g. merging nodes together), which is useful for discovering relationships +/// between the node flows of different nodes in the graph. +pub(crate) struct BalancedFlowGraph { + graph: G, + sink_edge_nodes: DenseBitSet, + pub(crate) sink: G::Node, +} + +impl BalancedFlowGraph { + /// Creates a balanced view of an underlying graph, by adding a synthetic + /// sink node that has in-edges from nodes that need or request such an edge, + /// and a single out-edge to the start node. + /// + /// Assumes that all nodes in the underlying graph are reachable from the + /// start node. + pub(crate) fn for_graph(graph: G, force_sink_edge: impl Fn(G::Node) -> bool) -> Self + where + G: graph::ControlFlowGraph, + { + let mut sink_edge_nodes = DenseBitSet::new_empty(graph.num_nodes()); + let mut dfs = DepthFirstSearch::new(ReversedGraph::new(&graph)); + + // First, determine the set of nodes that explicitly request or require + // an out-edge to the sink. + for node in graph.iter_nodes() { + if force_sink_edge(node) || graph.successors(node).next().is_none() { + sink_edge_nodes.insert(node); + dfs.push_start_node(node); + } + } + + // Next, find all nodes that are currently not reverse-reachable from + // `sink_edge_nodes`, and add them to the set as well. + dfs.complete_search(); + sink_edge_nodes.union_not(dfs.visited_set()); + + // The sink node is 1 higher than the highest real node. + let sink = G::Node::new(graph.num_nodes()); + + BalancedFlowGraph { graph, sink_edge_nodes, sink } + } +} + +impl graph::DirectedGraph for BalancedFlowGraph +where + G: graph::DirectedGraph, +{ + type Node = G::Node; + + /// Returns the number of nodes in this balanced-flow graph, which is 1 + /// more than the number of nodes in the underlying graph, to account for + /// the synthetic sink node. + fn num_nodes(&self) -> usize { + // The sink node's index is already the size of the underlying graph, + // so just add 1 to that instead. + self.sink.index() + 1 + } +} + +impl graph::StartNode for BalancedFlowGraph +where + G: graph::StartNode, +{ + fn start_node(&self) -> Self::Node { + self.graph.start_node() + } +} + +impl graph::Successors for BalancedFlowGraph +where + G: graph::StartNode + graph::Successors, +{ + fn successors(&self, node: Self::Node) -> impl Iterator { + let real_edges; + let sink_edge; + + if node == self.sink { + // The sink node has no real out-edges, and one synthetic out-edge + // to the start node. + real_edges = None; + sink_edge = Some(self.graph.start_node()); + } else { + // Real nodes have their real out-edges, and possibly one synthetic + // out-edge to the sink node. + real_edges = Some(self.graph.successors(node)); + sink_edge = self.sink_edge_nodes.contains(node).then_some(self.sink); + } + + real_edges.into_iter().flatten().chain(sink_edge) + } +} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/iter_nodes.rs b/compiler/rustc_mir_transform/src/coverage/counters/iter_nodes.rs new file mode 100644 index 0000000000000..9d87f7af1b04e --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/iter_nodes.rs @@ -0,0 +1,16 @@ +use rustc_data_structures::graph; +use rustc_index::Idx; + +pub(crate) trait IterNodes: graph::DirectedGraph { + /// Iterates over all nodes of a graph in ascending numeric order. + /// Assumes that nodes are densely numbered, i.e. every index in + /// `0..num_nodes` is a valid node. + /// + /// FIXME: Can this just be part of [`graph::DirectedGraph`]? + fn iter_nodes( + &self, + ) -> impl Iterator + DoubleEndedIterator + ExactSizeIterator { + (0..self.num_nodes()).map(::new) + } +} +impl IterNodes for G {} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs new file mode 100644 index 0000000000000..5e5d66249597d --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs @@ -0,0 +1,290 @@ +//! For each node in a control-flow graph, determines whether that node should +//! have a physical counter, or a counter expression that is derived from the +//! physical counters of other nodes. +//! +//! Based on the algorithm given in +//! "Optimal measurement points for program frequency counts" +//! (Knuth & Stevenson, 1973). + +use rustc_data_structures::graph; +use rustc_index::bit_set::DenseBitSet; +use rustc_index::{Idx, IndexVec}; +use rustc_middle::mir::coverage::Op; +use smallvec::SmallVec; + +use crate::coverage::counters::iter_nodes::IterNodes; +use crate::coverage::counters::union_find::{FrozenUnionFind, UnionFind}; + +#[cfg(test)] +mod tests; + +/// View of some underlying graph, in which each node's successors have been +/// merged into a single "supernode". +/// +/// The resulting supernodes have no obvious meaning on their own. +/// However, merging successor nodes means that a node's out-edges can all +/// be combined into a single out-edge, whose flow is the same as the flow +/// (execution count) of its corresponding node in the original graph. +/// +/// With all node flows now in the original graph now represented as edge flows +/// in the merged graph, it becomes possible to analyze the original node flows +/// using techniques for analyzing edge flows. +#[derive(Debug)] +pub(crate) struct MergedNodeFlowGraph { + /// Maps each node to the supernode that contains it, indicated by some + /// arbitrary "root" node that is part of that supernode. + supernodes: FrozenUnionFind, + /// For each node, stores the single supernode that all of its successors + /// have been merged into. + /// + /// (Note that each node in a supernode can potentially have a _different_ + /// successor supernode from its peers.) + succ_supernodes: IndexVec, +} + +impl MergedNodeFlowGraph { + /// Creates a "merged" view of an underlying graph. + /// + /// The given graph is assumed to have [“balanced flow”](balanced-flow), + /// though it does not necessarily have to be a `BalancedFlowGraph`. + /// + /// [balanced-flow]: `crate::coverage::counters::balanced_flow::BalancedFlowGraph`. + pub(crate) fn for_balanced_graph(graph: G) -> Self + where + G: graph::DirectedGraph + graph::Successors, + { + let mut supernodes = UnionFind::::new(graph.num_nodes()); + + // For each node, merge its successors into a single supernode, and + // arbitrarily choose one of those successors to represent all of them. + let successors = graph + .iter_nodes() + .map(|node| { + graph + .successors(node) + .reduce(|a, b| supernodes.unify(a, b)) + .expect("each node in a balanced graph must have at least one out-edge") + }) + .collect::>(); + + // Now that unification is complete, freeze the supernode forest, + // and resolve each arbitrarily-chosen successor to its canonical root. + // (This avoids having to explicitly resolve them later.) + let supernodes = supernodes.freeze(); + let succ_supernodes = successors.into_iter().map(|succ| supernodes.find(succ)).collect(); + + Self { supernodes, succ_supernodes } + } + + fn num_nodes(&self) -> usize { + self.succ_supernodes.len() + } + + fn is_supernode(&self, node: Node) -> bool { + self.supernodes.find(node) == node + } + + /// Using the information in this merged graph, together with a given + /// permutation of all nodes in the graph, to create physical counters and + /// counter expressions for each node in the underlying graph. + /// + /// The given list must contain exactly one copy of each node in the + /// underlying balanced-flow graph. The order of nodes is used as a hint to + /// influence counter allocation: + /// - Earlier nodes are more likely to receive counter expressions. + /// - Later nodes are more likely to receive physical counters. + pub(crate) fn make_node_counters(&self, all_nodes_permutation: &[Node]) -> NodeCounters { + let mut builder = SpantreeBuilder::new(self); + + for &node in all_nodes_permutation { + builder.visit_node(node); + } + + NodeCounters { counter_exprs: builder.finish() } + } +} + +/// End result of allocating physical counters and counter expressions for the +/// nodes of a graph. +#[derive(Debug)] +pub(crate) struct NodeCounters { + counter_exprs: IndexVec>, +} + +impl NodeCounters { + /// For the given node, returns the finished list of terms that represent + /// its physical counter or counter expression. Always non-empty. + /// + /// If a node was given a physical counter, its "expression" will contain + /// that counter as its sole element. + pub(crate) fn counter_expr(&self, this: Node) -> &[CounterTerm] { + self.counter_exprs[this].as_slice() + } +} + +#[derive(Debug)] +struct SpantreeEdge { + /// If true, this edge in the spantree has been reversed an odd number of + /// times, so all physical counters added to its node's counter expression + /// need to be negated. + is_reversed: bool, + /// Each spantree edge is "claimed" by the (regular) node that caused it to + /// be created. When a node with a physical counter traverses this edge, + /// that counter is added to the claiming node's counter expression. + claiming_node: Node, + /// Supernode at the other end of this spantree edge. Transitively points + /// to the "root" of this supernode's spantree component. + span_parent: Node, +} + +/// Part of a node's counter expression, which is a sum of counter terms. +#[derive(Debug)] +pub(crate) struct CounterTerm { + /// Whether to add or subtract the value of the node's physical counter. + pub(crate) op: Op, + /// The node whose physical counter is represented by this term. + pub(crate) node: Node, +} + +/// Stores the list of counter terms that make up a node's counter expression. +type CounterExprVec = SmallVec<[CounterTerm; 2]>; + +#[derive(Debug)] +struct SpantreeBuilder<'a, Node: Idx> { + graph: &'a MergedNodeFlowGraph, + is_unvisited: DenseBitSet, + /// Links supernodes to each other, gradually forming a spanning tree of + /// the merged-flow graph. + /// + /// A supernode without a span edge is the root of its component of the + /// spantree. Nodes that aren't supernodes cannot have a spantree edge. + span_edges: IndexVec>>, + /// An in-progress counter expression for each node. Each expression is + /// initially empty, and will be filled in as relevant nodes are visited. + counter_exprs: IndexVec>, +} + +impl<'a, Node: Idx> SpantreeBuilder<'a, Node> { + fn new(graph: &'a MergedNodeFlowGraph) -> Self { + let num_nodes = graph.num_nodes(); + Self { + graph, + is_unvisited: DenseBitSet::new_filled(num_nodes), + span_edges: IndexVec::from_fn_n(|_| None, num_nodes), + counter_exprs: IndexVec::from_fn_n(|_| SmallVec::new(), num_nodes), + } + } + + /// Given a supernode, finds the supernode that is the "root" of its + /// spantree component. Two nodes that have the same spantree root are + /// connected in the spantree. + fn spantree_root(&self, this: Node) -> Node { + debug_assert!(self.graph.is_supernode(this)); + + match self.span_edges[this] { + None => this, + Some(SpantreeEdge { span_parent, .. }) => self.spantree_root(span_parent), + } + } + + /// Rotates edges in the spantree so that `this` is the root of its + /// spantree component. + fn yank_to_spantree_root(&mut self, this: Node) { + debug_assert!(self.graph.is_supernode(this)); + + // Temporarily remove this supernode (any any spantree-children) from its + // spantree component, by disconnecting the edge to its spantree-parent. + let Some(SpantreeEdge { is_reversed, claiming_node, span_parent }) = + self.span_edges[this].take() + else { + // This supernode has no spantree-parent edge, so it is already the + // root of its spantree component. + return; + }; + + // Recursively make our immediate spantree-parent the root of what's + // left of its component, so that only one more edge rotation is needed. + self.yank_to_spantree_root(span_parent); + + // Recreate the removed edge, but in the opposite direction. + // Now `this` is the root of its spantree component. + self.span_edges[span_parent] = + Some(SpantreeEdge { is_reversed: !is_reversed, claiming_node, span_parent: this }); + } + + /// Must be called exactly once for each node in the balanced-flow graph. + fn visit_node(&mut self, this: Node) { + // Assert that this node was unvisited, and mark it visited. + assert!(self.is_unvisited.remove(this), "node has already been visited: {this:?}"); + + // Get the supernode containing `this`, and make it the root of its + // component of the spantree. + let this_supernode = self.graph.supernodes.find(this); + self.yank_to_spantree_root(this_supernode); + + // Get the supernode containing all of this's successors. + let succ_supernode = self.graph.succ_supernodes[this]; + debug_assert!(self.graph.is_supernode(succ_supernode)); + + // If two supernodes are already connected in the spantree, they will + // have the same spantree root. (Each supernode is connected to itself.) + if this_supernode != self.spantree_root(succ_supernode) { + // Adding this node's flow edge to the spantree would cause two + // previously-disconnected supernodes to become connected, so add + // it. That spantree-edge is now "claimed" by this node. + // + // Claiming a spantree-edge means that this node will get a counter + // expression instead of a physical counter. That expression is + // currently empty, but will be built incrementally as the other + // nodes are visited. + self.span_edges[this_supernode] = Some(SpantreeEdge { + is_reversed: false, + claiming_node: this, + span_parent: succ_supernode, + }); + } else { + // This node's flow edge would join two supernodes that are already + // connected in the spantree (or are the same supernode). That would + // create a cycle in the spantree, so don't add an edge. + // + // Instead, create a physical counter for this node, and add that + // counter to all expressions on the path from `succ_supernode` to + // `this_supernode`. + + // Instead of setting `this.measure = true` as in the original paper, + // we just add the node's ID to its own "expression". + self.counter_exprs[this].push(CounterTerm { node: this, op: Op::Add }); + + // Walk the spantree from `this.successor` back to `this`. For each + // spantree edge along the way, add this node's physical counter to + // the counter expression of the node that claimed the spantree edge. + let mut curr = succ_supernode; + while curr != this_supernode { + let &SpantreeEdge { is_reversed, claiming_node, span_parent } = + self.span_edges[curr].as_ref().unwrap(); + let op = if is_reversed { Op::Subtract } else { Op::Add }; + self.counter_exprs[claiming_node].push(CounterTerm { node: this, op }); + + curr = span_parent; + } + } + } + + /// Asserts that all nodes have been visited, and returns the computed + /// counter expressions (made up of physical counters) for each node. + fn finish(self) -> IndexVec> { + let Self { graph, is_unvisited, span_edges, counter_exprs } = self; + assert!(is_unvisited.is_empty(), "some nodes were never visited: {is_unvisited:?}"); + debug_assert!( + span_edges + .iter_enumerated() + .all(|(node, span_edge)| { span_edge.is_some() <= graph.is_supernode(node) }), + "only supernodes can have a span edge", + ); + debug_assert!( + counter_exprs.iter().all(|expr| !expr.is_empty()), + "after visiting all nodes, every node should have a non-empty expression", + ); + counter_exprs + } +} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/node_flow/tests.rs b/compiler/rustc_mir_transform/src/coverage/counters/node_flow/tests.rs new file mode 100644 index 0000000000000..9e7f754523d3d --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/node_flow/tests.rs @@ -0,0 +1,64 @@ +use itertools::Itertools; +use rustc_data_structures::graph; +use rustc_data_structures::graph::vec_graph::VecGraph; +use rustc_index::Idx; +use rustc_middle::mir::coverage::Op; + +use super::{CounterTerm, MergedNodeFlowGraph, NodeCounters}; + +fn merged_node_flow_graph(graph: G) -> MergedNodeFlowGraph { + MergedNodeFlowGraph::for_balanced_graph(graph) +} + +fn make_graph(num_nodes: usize, edge_pairs: Vec<(Node, Node)>) -> VecGraph { + VecGraph::new(num_nodes, edge_pairs) +} + +/// Example used in "Optimal Measurement Points for Program Frequency Counts" +/// (Knuth & Stevenson, 1973), but with 0-based node IDs. +#[test] +fn example_driver() { + let graph = make_graph::(5, vec![ + (0, 1), + (0, 3), + (1, 0), + (1, 2), + (2, 1), + (2, 4), + (3, 3), + (3, 4), + (4, 0), + ]); + + let merged = merged_node_flow_graph(&graph); + let counters = merged.make_node_counters(&[3, 1, 2, 0, 4]); + + assert_eq!(format_counter_expressions(&counters), &[ + // (comment to force vertical formatting for clarity) + "[0]: +c0", + "[1]: +c0 +c2 -c4", + "[2]: +c2", + "[3]: +c3", + "[4]: +c4", + ]); +} + +fn format_counter_expressions(counters: &NodeCounters) -> Vec { + let format_item = |&CounterTerm { node, op }| { + let op = match op { + Op::Subtract => '-', + Op::Add => '+', + }; + format!("{op}c{node:?}") + }; + + counters + .counter_exprs + .indices() + .map(|node| { + let mut expr = counters.counter_expr(node).iter().collect::>(); + expr.sort_by_key(|item| item.node.index()); + format!("[{node:?}]: {}", expr.into_iter().map(format_item).join(" ")) + }) + .collect() +} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/tests.rs b/compiler/rustc_mir_transform/src/coverage/counters/tests.rs deleted file mode 100644 index 794d4358f8237..0000000000000 --- a/compiler/rustc_mir_transform/src/coverage/counters/tests.rs +++ /dev/null @@ -1,41 +0,0 @@ -use std::fmt::Debug; - -use super::sort_and_cancel; - -fn flatten(input: Vec>) -> Vec { - input.into_iter().flatten().collect() -} - -fn sort_and_cancel_and_flatten(pos: Vec, neg: Vec) -> (Vec, Vec) { - let (pos_actual, neg_actual) = sort_and_cancel(pos, neg); - (flatten(pos_actual), flatten(neg_actual)) -} - -#[track_caller] -fn check_test_case( - pos: Vec, - neg: Vec, - pos_expected: Vec, - neg_expected: Vec, -) { - eprintln!("pos = {pos:?}; neg = {neg:?}"); - let output = sort_and_cancel_and_flatten(pos, neg); - assert_eq!(output, (pos_expected, neg_expected)); -} - -#[test] -fn cancellation() { - let cases: &[(Vec, Vec, Vec, Vec)] = &[ - (vec![], vec![], vec![], vec![]), - (vec![4, 2, 1, 5, 3], vec![], vec![1, 2, 3, 4, 5], vec![]), - (vec![5, 5, 5, 5, 5], vec![5], vec![5, 5, 5, 5], vec![]), - (vec![1, 1, 2, 2, 3, 3], vec![1, 2, 3], vec![1, 2, 3], vec![]), - (vec![1, 1, 2, 2, 3, 3], vec![2, 4, 2], vec![1, 1, 3, 3], vec![4]), - ]; - - for (pos, neg, pos_expected, neg_expected) in cases { - check_test_case(pos.to_vec(), neg.to_vec(), pos_expected.to_vec(), neg_expected.to_vec()); - // Same test case, but with its inputs flipped and its outputs flipped. - check_test_case(neg.to_vec(), pos.to_vec(), neg_expected.to_vec(), pos_expected.to_vec()); - } -} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/union_find.rs b/compiler/rustc_mir_transform/src/coverage/counters/union_find.rs new file mode 100644 index 0000000000000..2da4f5f5fce11 --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/union_find.rs @@ -0,0 +1,116 @@ +use std::cmp::Ordering; +use std::mem; + +use rustc_index::{Idx, IndexVec}; + +#[cfg(test)] +mod tests; + +/// Simple implementation of a union-find data structure, i.e. a disjoint-set +/// forest. +#[derive(Debug)] +pub(crate) struct UnionFind { + table: IndexVec>, +} + +#[derive(Debug)] +struct UnionFindEntry { + /// Transitively points towards the "root" of the set containing this key. + /// + /// Invariant: A root key is its own parent. + parent: Key, + /// When merging two "root" keys, their ranks determine which key becomes + /// the new root, to prevent the parent tree from becoming unnecessarily + /// tall. See [`UnionFind::unify`] for details. + rank: u32, +} + +impl UnionFind { + /// Creates a new disjoint-set forest containing the keys `0..num_keys`. + /// Initially, every key is part of its own one-element set. + pub(crate) fn new(num_keys: usize) -> Self { + // Initially, every key is the root of its own set, so its parent is itself. + Self { table: IndexVec::from_fn_n(|key| UnionFindEntry { parent: key, rank: 0 }, num_keys) } + } + + /// Returns the "root" key of the disjoint-set containing the given key. + /// If two keys have the same root, they belong to the same set. + /// + /// Also updates internal data structures to make subsequent `find` + /// operations faster. + pub(crate) fn find(&mut self, key: Key) -> Key { + // Loop until we find a key that is its own parent. + let mut curr = key; + while let parent = self.table[curr].parent + && curr != parent + { + // Perform "path compression" by peeking one layer ahead, and + // setting the current key's parent to that value. + // (This works even when `parent` is the root of its set, because + // of the invariant that a root is its own parent.) + let parent_parent = self.table[parent].parent; + self.table[curr].parent = parent_parent; + + // Advance by one step and continue. + curr = parent; + } + curr + } + + /// Merges the set containing `a` and the set containing `b` into one set. + /// + /// Returns the common root of both keys, after the merge. + pub(crate) fn unify(&mut self, a: Key, b: Key) -> Key { + let mut a = self.find(a); + let mut b = self.find(b); + + // If both keys have the same root, they're already in the same set, + // so there's nothing more to do. + if a == b { + return a; + }; + + // Ensure that `a` has strictly greater rank, swapping if necessary. + // If both keys have the same rank, increment the rank of `a` so that + // future unifications will also prefer `a`, leading to flatter trees. + match Ord::cmp(&self.table[a].rank, &self.table[b].rank) { + Ordering::Less => mem::swap(&mut a, &mut b), + Ordering::Equal => self.table[a].rank += 1, + Ordering::Greater => {} + } + + debug_assert!(self.table[a].rank > self.table[b].rank); + debug_assert_eq!(self.table[b].parent, b); + + // Make `a` the parent of `b`. + self.table[b].parent = a; + + a + } + + /// Creates a snapshot of this disjoint-set forest that can no longer be + /// mutated, but can be queried without mutation. + pub(crate) fn freeze(&mut self) -> FrozenUnionFind { + // Just resolve each key to its actual root. + let roots = self.table.indices().map(|key| self.find(key)).collect(); + FrozenUnionFind { roots } + } +} + +/// Snapshot of a disjoint-set forest that can no longer be mutated, but can be +/// queried in O(1) time without mutation. +/// +/// This is really just a wrapper around a direct mapping from keys to roots, +/// but with a [`Self::find`] method that resembles [`UnionFind::find`]. +#[derive(Debug)] +pub(crate) struct FrozenUnionFind { + roots: IndexVec, +} + +impl FrozenUnionFind { + /// Returns the "root" key of the disjoint-set containing the given key. + /// If two keys have the same root, they belong to the same set. + pub(crate) fn find(&self, key: Key) -> Key { + self.roots[key] + } +} diff --git a/compiler/rustc_mir_transform/src/coverage/counters/union_find/tests.rs b/compiler/rustc_mir_transform/src/coverage/counters/union_find/tests.rs new file mode 100644 index 0000000000000..34a4e4f8e6edc --- /dev/null +++ b/compiler/rustc_mir_transform/src/coverage/counters/union_find/tests.rs @@ -0,0 +1,32 @@ +use super::UnionFind; + +#[test] +fn empty() { + let mut sets = UnionFind::::new(10); + + for i in 1..10 { + assert_eq!(sets.find(i), i); + } +} + +#[test] +fn transitive() { + let mut sets = UnionFind::::new(10); + + sets.unify(3, 7); + sets.unify(4, 2); + + assert_eq!(sets.find(7), sets.find(3)); + assert_eq!(sets.find(2), sets.find(4)); + assert_ne!(sets.find(3), sets.find(4)); + + sets.unify(7, 4); + + assert_eq!(sets.find(7), sets.find(3)); + assert_eq!(sets.find(2), sets.find(4)); + assert_eq!(sets.find(3), sets.find(4)); + + for i in [0, 1, 5, 6, 8, 9] { + assert_eq!(sets.find(i), i); + } +} diff --git a/compiler/rustc_mir_transform/src/coverage/graph.rs b/compiler/rustc_mir_transform/src/coverage/graph.rs index 3fa8b063fa75f..25dc7f31227c8 100644 --- a/compiler/rustc_mir_transform/src/coverage/graph.rs +++ b/compiler/rustc_mir_transform/src/coverage/graph.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use std::collections::VecDeque; use std::ops::{Index, IndexMut}; -use std::{iter, mem, slice}; +use std::{mem, slice}; use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::FxHashSet; @@ -211,54 +210,6 @@ impl CoverageGraph { self.dominator_order_rank[a].cmp(&self.dominator_order_rank[b]) } - /// Returns the source of this node's sole in-edge, if it has exactly one. - /// That edge can be assumed to have the same execution count as the node - /// itself (in the absence of panics). - pub(crate) fn sole_predecessor( - &self, - to_bcb: BasicCoverageBlock, - ) -> Option { - // Unlike `simple_successor`, there is no need for extra checks here. - if let &[from_bcb] = self.predecessors[to_bcb].as_slice() { Some(from_bcb) } else { None } - } - - /// Returns the target of this node's sole out-edge, if it has exactly - /// one, but only if that edge can be assumed to have the same execution - /// count as the node itself (in the absence of panics). - pub(crate) fn simple_successor( - &self, - from_bcb: BasicCoverageBlock, - ) -> Option { - // If a node's count is the sum of its out-edges, and it has exactly - // one out-edge, then that edge has the same count as the node. - if self.bcbs[from_bcb].is_out_summable - && let &[to_bcb] = self.successors[from_bcb].as_slice() - { - Some(to_bcb) - } else { - None - } - } - - /// For each loop that contains the given node, yields the "loop header" - /// node representing that loop, from innermost to outermost. If the given - /// node is itself a loop header, it is yielded first. - pub(crate) fn loop_headers_containing( - &self, - bcb: BasicCoverageBlock, - ) -> impl Iterator + Captures<'_> { - let self_if_loop_header = self.is_loop_header.contains(bcb).then_some(bcb).into_iter(); - - let mut curr = Some(bcb); - let strictly_enclosing = iter::from_fn(move || { - let enclosing = self.enclosing_loop_header[curr?]; - curr = enclosing; - enclosing - }); - - self_if_loop_header.chain(strictly_enclosing) - } - /// For the given node, yields the subset of its predecessor nodes that /// it dominates. If that subset is non-empty, the node is a "loop header", /// and each of those predecessors represents an in-edge that jumps back to @@ -489,126 +440,3 @@ impl<'a, 'tcx> graph::Successors for CoverageRelevantSubgraph<'a, 'tcx> { self.coverage_successors(bb).into_iter() } } - -/// State of a node in the coverage graph during ready-first traversal. -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -enum ReadyState { - /// This node has not yet been added to the fallback queue or ready queue. - Unqueued, - /// This node is currently in the fallback queue. - InFallbackQueue, - /// This node's predecessors have all been visited, so it is in the ready queue. - /// (It might also have a stale entry in the fallback queue.) - InReadyQueue, - /// This node has been visited. - /// (It might also have a stale entry in the fallback queue.) - Visited, -} - -/// Iterator that visits nodes in the coverage graph, in an order that always -/// prefers "ready" nodes whose predecessors have already been visited. -pub(crate) struct ReadyFirstTraversal<'a> { - graph: &'a CoverageGraph, - - /// For each node, the number of its predecessor nodes that haven't been visited yet. - n_unvisited_preds: IndexVec, - /// Indicates whether a node has been visited, or which queue it is in. - state: IndexVec, - - /// Holds unvisited nodes whose predecessors have all been visited. - ready_queue: VecDeque, - /// Holds unvisited nodes with some unvisited predecessors. - /// Also contains stale entries for nodes that were upgraded to ready. - fallback_queue: VecDeque, -} - -impl<'a> ReadyFirstTraversal<'a> { - pub(crate) fn new(graph: &'a CoverageGraph) -> Self { - let num_nodes = graph.num_nodes(); - - let n_unvisited_preds = - IndexVec::from_fn_n(|node| graph.predecessors[node].len() as u32, num_nodes); - let mut state = IndexVec::from_elem_n(ReadyState::Unqueued, num_nodes); - - // We know from coverage graph construction that the start node is the - // only node with no predecessors. - debug_assert!( - n_unvisited_preds.iter_enumerated().all(|(node, &n)| (node == START_BCB) == (n == 0)) - ); - let ready_queue = VecDeque::from(vec![START_BCB]); - state[START_BCB] = ReadyState::InReadyQueue; - - Self { graph, state, n_unvisited_preds, ready_queue, fallback_queue: VecDeque::new() } - } - - /// Returns the next node from the ready queue, or else the next unvisited - /// node from the fallback queue. - fn next_inner(&mut self) -> Option { - // Always prefer to yield a ready node if possible. - if let Some(node) = self.ready_queue.pop_front() { - assert_eq!(self.state[node], ReadyState::InReadyQueue); - return Some(node); - } - - while let Some(node) = self.fallback_queue.pop_front() { - match self.state[node] { - // This entry in the fallback queue is not stale, so yield it. - ReadyState::InFallbackQueue => return Some(node), - // This node was added to the fallback queue, but later became - // ready and was visited via the ready queue. Ignore it here. - ReadyState::Visited => {} - // Unqueued nodes can't be in the fallback queue, by definition. - // We know that the ready queue is empty at this point. - ReadyState::Unqueued | ReadyState::InReadyQueue => unreachable!( - "unexpected state for {node:?} in the fallback queue: {:?}", - self.state[node] - ), - } - } - - None - } - - fn mark_visited_and_enqueue_successors(&mut self, node: BasicCoverageBlock) { - assert!(self.state[node] < ReadyState::Visited); - self.state[node] = ReadyState::Visited; - - // For each of this node's successors, decrease the successor's - // "unvisited predecessors" count, and enqueue it if appropriate. - for &succ in &self.graph.successors[node] { - let is_unqueued = match self.state[succ] { - ReadyState::Unqueued => true, - ReadyState::InFallbackQueue => false, - ReadyState::InReadyQueue => { - unreachable!("nodes in the ready queue have no unvisited predecessors") - } - // The successor was already visited via one of its other predecessors. - ReadyState::Visited => continue, - }; - - self.n_unvisited_preds[succ] -= 1; - if self.n_unvisited_preds[succ] == 0 { - // This node's predecessors have all been visited, so add it to - // the ready queue. If it's already in the fallback queue, that - // fallback entry will be ignored later. - self.state[succ] = ReadyState::InReadyQueue; - self.ready_queue.push_back(succ); - } else if is_unqueued { - // This node has unvisited predecessors, so add it to the - // fallback queue in case we run out of ready nodes later. - self.state[succ] = ReadyState::InFallbackQueue; - self.fallback_queue.push_back(succ); - } - } - } -} - -impl<'a> Iterator for ReadyFirstTraversal<'a> { - type Item = BasicCoverageBlock; - - fn next(&mut self) -> Option { - let node = self.next_inner()?; - self.mark_visited_and_enqueue_successors(node); - Some(node) - } -} diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index 57956448414b5..b1b609595b74a 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -15,10 +15,7 @@ use rustc_middle::hir::nested_filter; use rustc_middle::mir::coverage::{ CoverageKind, DecisionInfo, FunctionCoverageInfo, Mapping, MappingKind, }; -use rustc_middle::mir::{ - self, BasicBlock, BasicBlockData, SourceInfo, Statement, StatementKind, Terminator, - TerminatorKind, -}; +use rustc_middle::mir::{self, BasicBlock, Statement, StatementKind, TerminatorKind}; use rustc_middle::ty::TyCtxt; use rustc_span::Span; use rustc_span::def_id::LocalDefId; @@ -248,19 +245,6 @@ fn inject_coverage_statements<'tcx>( // to create a new block between the two BCBs, and inject into that. let target_bb = match site { Site::Node { bcb } => graph[bcb].leader_bb(), - Site::Edge { from_bcb, to_bcb } => { - // Create a new block between the last block of `from_bcb` and - // the first block of `to_bcb`. - let from_bb = graph[from_bcb].last_bb(); - let to_bb = graph[to_bcb].leader_bb(); - - let new_bb = inject_edge_counter_basic_block(mir_body, from_bb, to_bb); - debug!( - "Edge {from_bcb:?} (last {from_bb:?}) -> {to_bcb:?} (leader {to_bb:?}) \ - requires a new MIR BasicBlock {new_bb:?} for counter increment {id:?}", - ); - new_bb - } }; inject_statement(mir_body, CoverageKind::CounterIncrement { id }, target_bb); @@ -335,31 +319,6 @@ fn inject_mcdc_statements<'tcx>( } } -/// Given two basic blocks that have a control-flow edge between them, creates -/// and returns a new block that sits between those blocks. -fn inject_edge_counter_basic_block( - mir_body: &mut mir::Body<'_>, - from_bb: BasicBlock, - to_bb: BasicBlock, -) -> BasicBlock { - let span = mir_body[from_bb].terminator().source_info.span.shrink_to_hi(); - let new_bb = mir_body.basic_blocks_mut().push(BasicBlockData { - statements: vec![], // counter will be injected here - terminator: Some(Terminator { - source_info: SourceInfo::outermost(span), - kind: TerminatorKind::Goto { target: to_bb }, - }), - is_cleanup: false, - }); - let edge_ref = mir_body[from_bb] - .terminator_mut() - .successors_mut() - .find(|successor| **successor == to_bb) - .expect("from_bb should have a successor for to_bb"); - *edge_ref = new_bb; - new_bb -} - fn inject_statement(mir_body: &mut mir::Body<'_>, counter_kind: CoverageKind, bb: BasicBlock) { debug!(" injecting statement {counter_kind:?} for {bb:?}"); let data = &mut mir_body[bb]; diff --git a/tests/coverage/abort.cov-map b/tests/coverage/abort.cov-map index 396edec275d75..84fae4a595a71 100644 --- a/tests/coverage/abort.cov-map +++ b/tests/coverage/abort.cov-map @@ -1,40 +1,37 @@ Function name: abort::main -Raw bytes (89): 0x[01, 01, 0a, 07, 09, 01, 05, 03, 0d, 03, 13, 0d, 11, 03, 0d, 03, 1f, 0d, 15, 03, 0d, 05, 09, 0d, 01, 0d, 01, 01, 1b, 03, 02, 0b, 00, 18, 22, 01, 0c, 00, 19, 11, 00, 1a, 02, 0a, 0e, 02, 09, 00, 0a, 22, 02, 0c, 00, 19, 15, 00, 1a, 00, 31, 1a, 00, 30, 00, 31, 22, 04, 0c, 00, 19, 05, 00, 1a, 00, 31, 09, 00, 30, 00, 31, 27, 01, 09, 00, 17, 0d, 02, 05, 01, 02] +Raw bytes (83): 0x[01, 01, 07, 05, 01, 05, 0b, 01, 09, 05, 13, 01, 0d, 05, 1b, 01, 11, 0d, 01, 0d, 01, 01, 1b, 05, 02, 0b, 00, 18, 02, 01, 0c, 00, 19, 09, 00, 1a, 02, 0a, 06, 02, 09, 00, 0a, 02, 02, 0c, 00, 19, 0d, 00, 1a, 00, 31, 0e, 00, 30, 00, 31, 02, 04, 0c, 00, 19, 11, 00, 1a, 00, 31, 16, 00, 30, 00, 31, 02, 01, 09, 00, 17, 01, 02, 05, 01, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 10 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(2) -- expression 1 operands: lhs = Counter(0), rhs = Counter(1) -- expression 2 operands: lhs = Expression(0, Add), rhs = Counter(3) -- expression 3 operands: lhs = Expression(0, Add), rhs = Expression(4, Add) -- expression 4 operands: lhs = Counter(3), rhs = Counter(4) -- expression 5 operands: lhs = Expression(0, Add), rhs = Counter(3) -- expression 6 operands: lhs = Expression(0, Add), rhs = Expression(7, Add) -- expression 7 operands: lhs = Counter(3), rhs = Counter(5) -- expression 8 operands: lhs = Expression(0, Add), rhs = Counter(3) -- expression 9 operands: lhs = Counter(1), rhs = Counter(2) +Number of expressions: 7 +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) +- expression 1 operands: lhs = Counter(1), rhs = Expression(2, Add) +- expression 2 operands: lhs = Counter(0), rhs = Counter(2) +- expression 3 operands: lhs = Counter(1), rhs = Expression(4, Add) +- expression 4 operands: lhs = Counter(0), rhs = Counter(3) +- expression 5 operands: lhs = Counter(1), rhs = Expression(6, Add) +- expression 6 operands: lhs = Counter(0), rhs = Counter(4) Number of file 0 mappings: 13 - Code(Counter(0)) at (prev + 13, 1) to (start + 1, 27) -- Code(Expression(0, Add)) at (prev + 2, 11) to (start + 0, 24) - = ((c0 + c1) + c2) -- Code(Expression(8, Sub)) at (prev + 1, 12) to (start + 0, 25) - = (((c0 + c1) + c2) - c3) -- Code(Counter(4)) at (prev + 0, 26) to (start + 2, 10) -- Code(Expression(3, Sub)) at (prev + 2, 9) to (start + 0, 10) - = (((c0 + c1) + c2) - (c3 + c4)) -- Code(Expression(8, Sub)) at (prev + 2, 12) to (start + 0, 25) - = (((c0 + c1) + c2) - c3) -- Code(Counter(5)) at (prev + 0, 26) to (start + 0, 49) -- Code(Expression(6, Sub)) at (prev + 0, 48) to (start + 0, 49) - = (((c0 + c1) + c2) - (c3 + c5)) -- Code(Expression(8, Sub)) at (prev + 4, 12) to (start + 0, 25) - = (((c0 + c1) + c2) - c3) -- Code(Counter(1)) at (prev + 0, 26) to (start + 0, 49) -- Code(Counter(2)) at (prev + 0, 48) to (start + 0, 49) -- Code(Expression(9, Add)) at (prev + 1, 9) to (start + 0, 23) - = (c1 + c2) -- Code(Counter(3)) at (prev + 2, 5) to (start + 1, 2) -Highest counter ID seen: c5 +- Code(Counter(1)) at (prev + 2, 11) to (start + 0, 24) +- Code(Expression(0, Sub)) at (prev + 1, 12) to (start + 0, 25) + = (c1 - c0) +- Code(Counter(2)) at (prev + 0, 26) to (start + 2, 10) +- Code(Expression(1, Sub)) at (prev + 2, 9) to (start + 0, 10) + = (c1 - (c0 + c2)) +- Code(Expression(0, Sub)) at (prev + 2, 12) to (start + 0, 25) + = (c1 - c0) +- Code(Counter(3)) at (prev + 0, 26) to (start + 0, 49) +- Code(Expression(3, Sub)) at (prev + 0, 48) to (start + 0, 49) + = (c1 - (c0 + c3)) +- Code(Expression(0, Sub)) at (prev + 4, 12) to (start + 0, 25) + = (c1 - c0) +- Code(Counter(4)) at (prev + 0, 26) to (start + 0, 49) +- Code(Expression(5, Sub)) at (prev + 0, 48) to (start + 0, 49) + = (c1 - (c0 + c4)) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 23) + = (c1 - c0) +- Code(Counter(0)) at (prev + 2, 5) to (start + 1, 2) +Highest counter ID seen: c4 Function name: abort::might_abort Raw bytes (21): 0x[01, 01, 01, 01, 05, 03, 01, 03, 01, 01, 14, 05, 02, 09, 01, 24, 02, 02, 0c, 03, 02] diff --git a/tests/coverage/assert-ne.cov-map b/tests/coverage/assert-ne.cov-map index 906abcd3c2e59..b432e63c168ce 100644 --- a/tests/coverage/assert-ne.cov-map +++ b/tests/coverage/assert-ne.cov-map @@ -1,14 +1,16 @@ Function name: assert_ne::main -Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 08, 01, 03, 1c, 05, 04, 0d, 00, 13, 02, 02, 0d, 00, 13, 09, 03, 05, 01, 02] +Raw bytes (28): 0x[01, 01, 02, 01, 05, 01, 09, 04, 01, 08, 01, 03, 1c, 05, 04, 0d, 00, 13, 02, 02, 0d, 00, 13, 06, 03, 05, 01, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 1 +Number of expressions: 2 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(0), rhs = Counter(2) Number of file 0 mappings: 4 - Code(Counter(0)) at (prev + 8, 1) to (start + 3, 28) - Code(Counter(1)) at (prev + 4, 13) to (start + 0, 19) - Code(Expression(0, Sub)) at (prev + 2, 13) to (start + 0, 19) = (c0 - c1) -- Code(Counter(2)) at (prev + 3, 5) to (start + 1, 2) -Highest counter ID seen: c2 +- Code(Expression(1, Sub)) at (prev + 3, 5) to (start + 1, 2) + = (c0 - c2) +Highest counter ID seen: c1 diff --git a/tests/coverage/assert.cov-map b/tests/coverage/assert.cov-map index 3bbf7a43e6d8d..903cccfe9cbce 100644 --- a/tests/coverage/assert.cov-map +++ b/tests/coverage/assert.cov-map @@ -1,32 +1,29 @@ Function name: assert::main -Raw bytes (67): 0x[01, 01, 09, 07, 0d, 0b, 09, 01, 05, 03, 11, 17, 11, 1b, 0d, 01, 09, 23, 0d, 05, 09, 09, 01, 09, 01, 01, 1b, 03, 02, 0b, 00, 18, 0e, 01, 0c, 00, 1a, 05, 00, 1b, 02, 0a, 12, 02, 13, 00, 20, 09, 00, 21, 02, 0a, 0d, 02, 09, 00, 0a, 1f, 01, 09, 00, 17, 11, 02, 05, 01, 02] +Raw bytes (61): 0x[01, 01, 06, 05, 01, 05, 17, 01, 09, 05, 13, 17, 0d, 01, 09, 09, 01, 09, 01, 01, 1b, 05, 02, 0b, 00, 18, 02, 01, 0c, 00, 1a, 09, 00, 1b, 02, 0a, 06, 02, 13, 00, 20, 0d, 00, 21, 02, 0a, 0e, 02, 09, 00, 0a, 02, 01, 09, 00, 17, 01, 02, 05, 01, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 9 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(3) -- expression 1 operands: lhs = Expression(2, Add), rhs = Counter(2) -- expression 2 operands: lhs = Counter(0), rhs = Counter(1) -- expression 3 operands: lhs = Expression(0, Add), rhs = Counter(4) -- expression 4 operands: lhs = Expression(5, Add), rhs = Counter(4) -- expression 5 operands: lhs = Expression(6, Add), rhs = Counter(3) -- expression 6 operands: lhs = Counter(0), rhs = Counter(2) -- expression 7 operands: lhs = Expression(8, Add), rhs = Counter(3) -- expression 8 operands: lhs = Counter(1), rhs = Counter(2) +Number of expressions: 6 +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) +- expression 1 operands: lhs = Counter(1), rhs = Expression(5, Add) +- expression 2 operands: lhs = Counter(0), rhs = Counter(2) +- expression 3 operands: lhs = Counter(1), rhs = Expression(4, Add) +- expression 4 operands: lhs = Expression(5, Add), rhs = Counter(3) +- expression 5 operands: lhs = Counter(0), rhs = Counter(2) Number of file 0 mappings: 9 - Code(Counter(0)) at (prev + 9, 1) to (start + 1, 27) -- Code(Expression(0, Add)) at (prev + 2, 11) to (start + 0, 24) - = (((c0 + c1) + c2) + c3) -- Code(Expression(3, Sub)) at (prev + 1, 12) to (start + 0, 26) - = ((((c0 + c1) + c2) + c3) - c4) -- Code(Counter(1)) at (prev + 0, 27) to (start + 2, 10) -- Code(Expression(4, Sub)) at (prev + 2, 19) to (start + 0, 32) - = (((c0 + c2) + c3) - c4) -- Code(Counter(2)) at (prev + 0, 33) to (start + 2, 10) -- Code(Counter(3)) at (prev + 2, 9) to (start + 0, 10) -- Code(Expression(7, Add)) at (prev + 1, 9) to (start + 0, 23) - = ((c1 + c2) + c3) -- Code(Counter(4)) at (prev + 2, 5) to (start + 1, 2) -Highest counter ID seen: c4 +- Code(Counter(1)) at (prev + 2, 11) to (start + 0, 24) +- Code(Expression(0, Sub)) at (prev + 1, 12) to (start + 0, 26) + = (c1 - c0) +- Code(Counter(2)) at (prev + 0, 27) to (start + 2, 10) +- Code(Expression(1, Sub)) at (prev + 2, 19) to (start + 0, 32) + = (c1 - (c0 + c2)) +- Code(Counter(3)) at (prev + 0, 33) to (start + 2, 10) +- Code(Expression(3, Sub)) at (prev + 2, 9) to (start + 0, 10) + = (c1 - ((c0 + c2) + c3)) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 23) + = (c1 - c0) +- Code(Counter(0)) at (prev + 2, 5) to (start + 1, 2) +Highest counter ID seen: c3 Function name: assert::might_fail_assert Raw bytes (21): 0x[01, 01, 01, 01, 05, 03, 01, 04, 01, 02, 0f, 02, 02, 25, 00, 3d, 05, 01, 01, 00, 02] diff --git a/tests/coverage/assert.coverage b/tests/coverage/assert.coverage index 22fb6821fba39..dfd919660f557 100644 --- a/tests/coverage/assert.coverage +++ b/tests/coverage/assert.coverage @@ -9,16 +9,16 @@ LL| | LL| 1|fn main() -> Result<(), u8> { LL| 1| let mut countdown = 10; - LL| 11| while countdown > 0 { - LL| 11| if countdown == 1 { + LL| 10| while countdown > 0 { + LL| 9| if countdown == 1 { LL| 1| might_fail_assert(3); - LL| 10| } else if countdown < 5 { + LL| 8| } else if countdown < 5 { LL| 3| might_fail_assert(2); - LL| 6| } - LL| 10| countdown -= 1; + LL| 5| } + LL| 9| countdown -= 1; LL| | } - LL| 0| Ok(()) - LL| 0|} + LL| 1| Ok(()) + LL| 1|} LL| | LL| |// Notes: LL| |// 1. Compare this program and its coverage results to those of the very similar test diff --git a/tests/coverage/assert_not.cov-map b/tests/coverage/assert_not.cov-map index 401dd6450e952..397eaa17caf6d 100644 --- a/tests/coverage/assert_not.cov-map +++ b/tests/coverage/assert_not.cov-map @@ -1,17 +1,15 @@ Function name: assert_not::main -Raw bytes (33): 0x[01, 01, 02, 05, 00, 0d, 00, 05, 01, 06, 01, 01, 12, 05, 02, 05, 00, 14, 02, 01, 05, 00, 14, 0d, 01, 05, 00, 16, 06, 01, 01, 00, 02] +Raw bytes (31): 0x[01, 01, 01, 0d, 00, 05, 01, 06, 01, 01, 12, 05, 02, 05, 00, 14, 09, 01, 05, 00, 14, 0d, 01, 05, 00, 16, 02, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 2 -- expression 0 operands: lhs = Counter(1), rhs = Zero -- expression 1 operands: lhs = Counter(3), rhs = Zero +Number of expressions: 1 +- expression 0 operands: lhs = Counter(3), rhs = Zero Number of file 0 mappings: 5 - Code(Counter(0)) at (prev + 6, 1) to (start + 1, 18) - Code(Counter(1)) at (prev + 2, 5) to (start + 0, 20) -- Code(Expression(0, Sub)) at (prev + 1, 5) to (start + 0, 20) - = (c1 - Zero) +- Code(Counter(2)) at (prev + 1, 5) to (start + 0, 20) - Code(Counter(3)) at (prev + 1, 5) to (start + 0, 22) -- Code(Expression(1, Sub)) at (prev + 1, 1) to (start + 0, 2) +- Code(Expression(0, Sub)) at (prev + 1, 1) to (start + 0, 2) = (c3 - Zero) Highest counter ID seen: c3 diff --git a/tests/coverage/async.cov-map b/tests/coverage/async.cov-map index d3eed6c4f2acb..521562f6b9125 100644 --- a/tests/coverage/async.cov-map +++ b/tests/coverage/async.cov-map @@ -155,34 +155,38 @@ Number of file 0 mappings: 1 Highest counter ID seen: c0 Function name: async::i::{closure#0} -Raw bytes (63): 0x[01, 01, 02, 07, 15, 0d, 11, 0b, 01, 2c, 13, 04, 0c, 09, 05, 09, 00, 0a, 01, 00, 0e, 00, 18, 05, 00, 1c, 00, 21, 09, 00, 27, 00, 30, 11, 01, 09, 00, 0a, 19, 00, 0e, 00, 17, 1d, 00, 1b, 00, 20, 11, 00, 24, 00, 26, 15, 01, 0e, 00, 10, 03, 02, 01, 00, 02] +Raw bytes (65): 0x[01, 01, 03, 05, 09, 11, 15, 0d, 11, 0b, 01, 2c, 13, 04, 0c, 09, 05, 09, 00, 0a, 01, 00, 0e, 00, 18, 05, 00, 1c, 00, 21, 09, 00, 27, 00, 30, 15, 01, 09, 00, 0a, 02, 00, 0e, 00, 17, 11, 00, 1b, 00, 20, 15, 00, 24, 00, 26, 06, 01, 0e, 00, 10, 0b, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 2 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(5) -- expression 1 operands: lhs = Counter(3), rhs = Counter(4) +Number of expressions: 3 +- expression 0 operands: lhs = Counter(1), rhs = Counter(2) +- expression 1 operands: lhs = Counter(4), rhs = Counter(5) +- expression 2 operands: lhs = Counter(3), rhs = Counter(4) Number of file 0 mappings: 11 - Code(Counter(0)) at (prev + 44, 19) to (start + 4, 12) - Code(Counter(2)) at (prev + 5, 9) to (start + 0, 10) - Code(Counter(0)) at (prev + 0, 14) to (start + 0, 24) - Code(Counter(1)) at (prev + 0, 28) to (start + 0, 33) - Code(Counter(2)) at (prev + 0, 39) to (start + 0, 48) -- Code(Counter(4)) at (prev + 1, 9) to (start + 0, 10) -- Code(Counter(6)) at (prev + 0, 14) to (start + 0, 23) -- Code(Counter(7)) at (prev + 0, 27) to (start + 0, 32) -- Code(Counter(4)) at (prev + 0, 36) to (start + 0, 38) -- Code(Counter(5)) at (prev + 1, 14) to (start + 0, 16) -- Code(Expression(0, Add)) at (prev + 2, 1) to (start + 0, 2) - = ((c3 + c4) + c5) -Highest counter ID seen: c7 +- Code(Counter(5)) at (prev + 1, 9) to (start + 0, 10) +- Code(Expression(0, Sub)) at (prev + 0, 14) to (start + 0, 23) + = (c1 - c2) +- Code(Counter(4)) at (prev + 0, 27) to (start + 0, 32) +- Code(Counter(5)) at (prev + 0, 36) to (start + 0, 38) +- Code(Expression(1, Sub)) at (prev + 1, 14) to (start + 0, 16) + = (c4 - c5) +- Code(Expression(2, Add)) at (prev + 2, 1) to (start + 0, 2) + = (c3 + c4) +Highest counter ID seen: c5 Function name: async::j -Raw bytes (58): 0x[01, 01, 02, 07, 0d, 05, 09, 0a, 01, 37, 01, 00, 0d, 01, 0b, 0b, 00, 0c, 05, 01, 09, 00, 0a, 01, 00, 0e, 00, 1b, 05, 00, 1f, 00, 27, 09, 01, 09, 00, 0a, 11, 00, 0e, 00, 1a, 09, 00, 1e, 00, 20, 0d, 01, 0e, 00, 10, 03, 02, 01, 00, 02] +Raw bytes (60): 0x[01, 01, 03, 01, 05, 01, 0b, 05, 09, 0a, 01, 37, 01, 00, 0d, 01, 0b, 0b, 00, 0c, 05, 01, 09, 00, 0a, 01, 00, 0e, 00, 1b, 05, 00, 1f, 00, 27, 09, 01, 09, 00, 0a, 02, 00, 0e, 00, 1a, 09, 00, 1e, 00, 20, 06, 01, 0e, 00, 10, 01, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 2 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(3) -- expression 1 operands: lhs = Counter(1), rhs = Counter(2) +Number of expressions: 3 +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(0), rhs = Expression(2, Add) +- expression 2 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 10 - Code(Counter(0)) at (prev + 55, 1) to (start + 0, 13) - Code(Counter(0)) at (prev + 11, 11) to (start + 0, 12) @@ -190,12 +194,13 @@ Number of file 0 mappings: 10 - Code(Counter(0)) at (prev + 0, 14) to (start + 0, 27) - Code(Counter(1)) at (prev + 0, 31) to (start + 0, 39) - Code(Counter(2)) at (prev + 1, 9) to (start + 0, 10) -- Code(Counter(4)) at (prev + 0, 14) to (start + 0, 26) +- Code(Expression(0, Sub)) at (prev + 0, 14) to (start + 0, 26) + = (c0 - c1) - Code(Counter(2)) at (prev + 0, 30) to (start + 0, 32) -- Code(Counter(3)) at (prev + 1, 14) to (start + 0, 16) -- Code(Expression(0, Add)) at (prev + 2, 1) to (start + 0, 2) - = ((c1 + c2) + c3) -Highest counter ID seen: c4 +- Code(Expression(1, Sub)) at (prev + 1, 14) to (start + 0, 16) + = (c0 - (c1 + c2)) +- Code(Counter(0)) at (prev + 2, 1) to (start + 0, 2) +Highest counter ID seen: c2 Function name: async::j::c Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 39, 05, 01, 12, 05, 02, 0d, 00, 0e, 02, 02, 0d, 00, 0e, 01, 02, 05, 00, 06] diff --git a/tests/coverage/async_block.cov-map b/tests/coverage/async_block.cov-map index 14ed4850d4a72..5eb69e668ca5d 100644 --- a/tests/coverage/async_block.cov-map +++ b/tests/coverage/async_block.cov-map @@ -1,16 +1,18 @@ Function name: async_block::main -Raw bytes (36): 0x[01, 01, 01, 01, 05, 06, 01, 07, 01, 00, 0b, 05, 01, 09, 00, 0a, 03, 00, 0e, 00, 13, 05, 00, 14, 01, 16, 05, 07, 0a, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (36): 0x[01, 01, 01, 05, 01, 06, 01, 07, 01, 00, 0b, 02, 01, 09, 00, 0a, 05, 00, 0e, 00, 13, 02, 00, 14, 01, 16, 02, 07, 0a, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 -- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) Number of file 0 mappings: 6 - Code(Counter(0)) at (prev + 7, 1) to (start + 0, 11) -- Code(Counter(1)) at (prev + 1, 9) to (start + 0, 10) -- Code(Expression(0, Add)) at (prev + 0, 14) to (start + 0, 19) - = (c0 + c1) -- Code(Counter(1)) at (prev + 0, 20) to (start + 1, 22) -- Code(Counter(1)) at (prev + 7, 10) to (start + 2, 6) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 10) + = (c1 - c0) +- Code(Counter(1)) at (prev + 0, 14) to (start + 0, 19) +- Code(Expression(0, Sub)) at (prev + 0, 20) to (start + 1, 22) + = (c1 - c0) +- Code(Expression(0, Sub)) at (prev + 7, 10) to (start + 2, 6) + = (c1 - c0) - Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) Highest counter ID seen: c1 diff --git a/tests/coverage/async_closure.cov-map b/tests/coverage/async_closure.cov-map index 04c05ba098b40..9144a938a9e24 100644 --- a/tests/coverage/async_closure.cov-map +++ b/tests/coverage/async_closure.cov-map @@ -8,14 +8,16 @@ Number of file 0 mappings: 1 Highest counter ID seen: c0 Function name: async_closure::call_once::::{closure#0} -Raw bytes (14): 0x[01, 01, 00, 02, 01, 06, 2b, 01, 0e, 05, 02, 01, 00, 02] +Raw bytes (16): 0x[01, 01, 01, 05, 09, 02, 01, 06, 2b, 01, 0e, 02, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 0 +Number of expressions: 1 +- expression 0 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 2 - Code(Counter(0)) at (prev + 6, 43) to (start + 1, 14) -- Code(Counter(1)) at (prev + 2, 1) to (start + 0, 2) -Highest counter ID seen: c1 +- Code(Expression(0, Sub)) at (prev + 2, 1) to (start + 0, 2) + = (c1 - c2) +Highest counter ID seen: c0 Function name: async_closure::main Raw bytes (14): 0x[01, 01, 00, 02, 01, 0a, 01, 01, 16, 01, 02, 05, 02, 02] diff --git a/tests/coverage/await_ready.cov-map b/tests/coverage/await_ready.cov-map index bc1af4e42e8b9..61fd4c7814d8c 100644 --- a/tests/coverage/await_ready.cov-map +++ b/tests/coverage/await_ready.cov-map @@ -8,12 +8,14 @@ Number of file 0 mappings: 1 Highest counter ID seen: c0 Function name: await_ready::await_ready::{closure#0} -Raw bytes (14): 0x[01, 01, 00, 02, 01, 0e, 1e, 03, 0f, 05, 04, 01, 00, 02] +Raw bytes (16): 0x[01, 01, 01, 05, 09, 02, 01, 0e, 1e, 03, 0f, 02, 04, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 0 +Number of expressions: 1 +- expression 0 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 2 - Code(Counter(0)) at (prev + 14, 30) to (start + 3, 15) -- Code(Counter(1)) at (prev + 4, 1) to (start + 0, 2) -Highest counter ID seen: c1 +- Code(Expression(0, Sub)) at (prev + 4, 1) to (start + 0, 2) + = (c1 - c2) +Highest counter ID seen: c0 diff --git a/tests/coverage/branch/guard.cov-map b/tests/coverage/branch/guard.cov-map index 7ca499bd847c3..55f45daa9c93d 100644 --- a/tests/coverage/branch/guard.cov-map +++ b/tests/coverage/branch/guard.cov-map @@ -1,21 +1,22 @@ Function name: guard::branch_match_guard -Raw bytes (89): 0x[01, 01, 08, 05, 0d, 05, 17, 0d, 11, 1f, 17, 05, 09, 0d, 11, 1f, 15, 05, 09, 0d, 01, 0c, 01, 01, 10, 02, 03, 0b, 00, 0c, 15, 01, 14, 02, 0a, 0d, 03, 0e, 00, 0f, 05, 00, 14, 00, 19, 20, 0d, 02, 00, 14, 00, 1e, 0d, 00, 1d, 02, 0a, 11, 03, 0e, 00, 0f, 02, 00, 14, 00, 19, 20, 11, 06, 00, 14, 00, 1e, 11, 00, 1d, 02, 0a, 0e, 03, 0e, 02, 0a, 1b, 04, 01, 00, 02] +Raw bytes (89): 0x[01, 01, 08, 05, 0d, 09, 05, 05, 0f, 0d, 11, 17, 1b, 01, 05, 1f, 11, 09, 0d, 0d, 01, 0c, 01, 01, 10, 02, 03, 0b, 00, 0c, 06, 01, 14, 02, 0a, 0d, 03, 0e, 00, 0f, 05, 00, 14, 00, 19, 20, 0d, 02, 00, 14, 00, 1e, 0d, 00, 1d, 02, 0a, 11, 03, 0e, 00, 0f, 02, 00, 14, 00, 19, 20, 11, 0a, 00, 14, 00, 1e, 11, 00, 1d, 02, 0a, 12, 03, 0e, 02, 0a, 01, 04, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 8 - expression 0 operands: lhs = Counter(1), rhs = Counter(3) -- expression 1 operands: lhs = Counter(1), rhs = Expression(5, Add) -- expression 2 operands: lhs = Counter(3), rhs = Counter(4) -- expression 3 operands: lhs = Expression(7, Add), rhs = Expression(5, Add) -- expression 4 operands: lhs = Counter(1), rhs = Counter(2) -- expression 5 operands: lhs = Counter(3), rhs = Counter(4) -- expression 6 operands: lhs = Expression(7, Add), rhs = Counter(5) -- expression 7 operands: lhs = Counter(1), rhs = Counter(2) +- expression 1 operands: lhs = Counter(2), rhs = Counter(1) +- expression 2 operands: lhs = Counter(1), rhs = Expression(3, Add) +- expression 3 operands: lhs = Counter(3), rhs = Counter(4) +- expression 4 operands: lhs = Expression(5, Add), rhs = Expression(6, Add) +- expression 5 operands: lhs = Counter(0), rhs = Counter(1) +- expression 6 operands: lhs = Expression(7, Add), rhs = Counter(4) +- expression 7 operands: lhs = Counter(2), rhs = Counter(3) Number of file 0 mappings: 13 - Code(Counter(0)) at (prev + 12, 1) to (start + 1, 16) - Code(Expression(0, Sub)) at (prev + 3, 11) to (start + 0, 12) = (c1 - c3) -- Code(Counter(5)) at (prev + 1, 20) to (start + 2, 10) +- Code(Expression(1, Sub)) at (prev + 1, 20) to (start + 2, 10) + = (c2 - c1) - Code(Counter(3)) at (prev + 3, 14) to (start + 0, 15) - Code(Counter(1)) at (prev + 0, 20) to (start + 0, 25) - Branch { true: Counter(3), false: Expression(0, Sub) } at (prev + 0, 20) to (start + 0, 30) @@ -25,13 +26,12 @@ Number of file 0 mappings: 13 - Code(Counter(4)) at (prev + 3, 14) to (start + 0, 15) - Code(Expression(0, Sub)) at (prev + 0, 20) to (start + 0, 25) = (c1 - c3) -- Branch { true: Counter(4), false: Expression(1, Sub) } at (prev + 0, 20) to (start + 0, 30) +- Branch { true: Counter(4), false: Expression(2, Sub) } at (prev + 0, 20) to (start + 0, 30) true = c4 false = (c1 - (c3 + c4)) - Code(Counter(4)) at (prev + 0, 29) to (start + 2, 10) -- Code(Expression(3, Sub)) at (prev + 3, 14) to (start + 2, 10) - = ((c1 + c2) - (c3 + c4)) -- Code(Expression(6, Add)) at (prev + 4, 1) to (start + 0, 2) - = ((c1 + c2) + c5) -Highest counter ID seen: c5 +- Code(Expression(4, Sub)) at (prev + 3, 14) to (start + 2, 10) + = ((c0 + c1) - ((c2 + c3) + c4)) +- Code(Counter(0)) at (prev + 4, 1) to (start + 0, 2) +Highest counter ID seen: c4 diff --git a/tests/coverage/branch/if-let.cov-map b/tests/coverage/branch/if-let.cov-map index 773c53924653c..db45df2a5cd00 100644 --- a/tests/coverage/branch/if-let.cov-map +++ b/tests/coverage/branch/if-let.cov-map @@ -1,22 +1,22 @@ Function name: if_let::if_let -Raw bytes (43): 0x[01, 01, 01, 05, 09, 07, 01, 0c, 01, 01, 10, 20, 02, 09, 03, 0c, 00, 13, 02, 00, 11, 00, 12, 05, 00, 16, 00, 1b, 02, 00, 1c, 02, 06, 09, 02, 0c, 02, 06, 05, 03, 05, 01, 02] +Raw bytes (43): 0x[01, 01, 01, 01, 05, 07, 01, 0c, 01, 01, 10, 20, 02, 05, 03, 0c, 00, 13, 02, 00, 11, 00, 12, 01, 00, 16, 00, 1b, 02, 00, 1c, 02, 06, 05, 02, 0c, 02, 06, 01, 03, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 7 - Code(Counter(0)) at (prev + 12, 1) to (start + 1, 16) -- Branch { true: Expression(0, Sub), false: Counter(2) } at (prev + 3, 12) to (start + 0, 19) - true = (c1 - c2) - false = c2 +- Branch { true: Expression(0, Sub), false: Counter(1) } at (prev + 3, 12) to (start + 0, 19) + true = (c0 - c1) + false = c1 - Code(Expression(0, Sub)) at (prev + 0, 17) to (start + 0, 18) - = (c1 - c2) -- Code(Counter(1)) at (prev + 0, 22) to (start + 0, 27) + = (c0 - c1) +- Code(Counter(0)) at (prev + 0, 22) to (start + 0, 27) - Code(Expression(0, Sub)) at (prev + 0, 28) to (start + 2, 6) - = (c1 - c2) -- Code(Counter(2)) at (prev + 2, 12) to (start + 2, 6) -- Code(Counter(1)) at (prev + 3, 5) to (start + 1, 2) -Highest counter ID seen: c2 + = (c0 - c1) +- Code(Counter(1)) at (prev + 2, 12) to (start + 2, 6) +- Code(Counter(0)) at (prev + 3, 5) to (start + 1, 2) +Highest counter ID seen: c1 Function name: if_let::if_let_chain Raw bytes (74): 0x[01, 01, 08, 01, 05, 01, 1f, 05, 09, 01, 1f, 05, 09, 01, 1f, 05, 09, 05, 09, 0a, 01, 17, 01, 00, 33, 20, 02, 05, 01, 0c, 00, 13, 02, 00, 11, 00, 12, 01, 00, 16, 00, 17, 20, 16, 09, 01, 10, 00, 17, 16, 00, 15, 00, 16, 02, 00, 1a, 00, 1b, 16, 01, 05, 03, 06, 1f, 03, 0c, 02, 06, 01, 03, 05, 01, 02] diff --git a/tests/coverage/branch/if.cov-map b/tests/coverage/branch/if.cov-map index 3d9a1d2e1ab69..a6b865318c662 100644 --- a/tests/coverage/branch/if.cov-map +++ b/tests/coverage/branch/if.cov-map @@ -1,134 +1,134 @@ Function name: if::branch_and -Raw bytes (54): 0x[01, 01, 03, 05, 09, 09, 0d, 05, 0d, 08, 01, 2b, 01, 01, 10, 05, 03, 08, 00, 09, 20, 09, 02, 00, 08, 00, 09, 09, 00, 0d, 00, 0e, 20, 0d, 06, 00, 0d, 00, 0e, 0d, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 05, 03, 01, 00, 02] +Raw bytes (54): 0x[01, 01, 03, 01, 05, 05, 09, 01, 09, 08, 01, 2b, 01, 01, 10, 01, 03, 08, 00, 09, 20, 05, 02, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 20, 09, 06, 00, 0d, 00, 0e, 09, 00, 0f, 02, 06, 0a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) -- expression 1 operands: lhs = Counter(2), rhs = Counter(3) -- expression 2 operands: lhs = Counter(1), rhs = Counter(3) +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(1), rhs = Counter(2) +- expression 2 operands: lhs = Counter(0), rhs = Counter(2) Number of file 0 mappings: 8 - Code(Counter(0)) at (prev + 43, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 3, 8) to (start + 0, 9) -- Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 8) to (start + 0, 9) +- Code(Counter(0)) at (prev + 3, 8) to (start + 0, 9) +- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 8) to (start + 0, 9) + true = c1 + false = (c0 - c1) +- Code(Counter(1)) at (prev + 0, 13) to (start + 0, 14) +- Branch { true: Counter(2), false: Expression(1, Sub) } at (prev + 0, 13) to (start + 0, 14) true = c2 false = (c1 - c2) -- Code(Counter(2)) at (prev + 0, 13) to (start + 0, 14) -- Branch { true: Counter(3), false: Expression(1, Sub) } at (prev + 0, 13) to (start + 0, 14) - true = c3 - false = (c2 - c3) -- Code(Counter(3)) at (prev + 0, 15) to (start + 2, 6) +- Code(Counter(2)) at (prev + 0, 15) to (start + 2, 6) - Code(Expression(2, Sub)) at (prev + 2, 12) to (start + 2, 6) - = (c1 - c3) -- Code(Counter(1)) at (prev + 3, 1) to (start + 0, 2) -Highest counter ID seen: c3 + = (c0 - c2) +- Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) +Highest counter ID seen: c2 Function name: if::branch_not -Raw bytes (116): 0x[01, 01, 07, 05, 09, 05, 0d, 05, 0d, 05, 11, 05, 11, 05, 15, 05, 15, 12, 01, 0c, 01, 01, 10, 05, 03, 08, 00, 09, 20, 09, 02, 00, 08, 00, 09, 09, 01, 09, 00, 11, 02, 01, 05, 00, 06, 05, 01, 08, 00, 0a, 20, 0a, 0d, 00, 08, 00, 0a, 0a, 00, 0b, 02, 06, 0d, 02, 05, 00, 06, 05, 01, 08, 00, 0b, 20, 11, 12, 00, 08, 00, 0b, 11, 00, 0c, 02, 06, 12, 02, 05, 00, 06, 05, 01, 08, 00, 0c, 20, 1a, 15, 00, 08, 00, 0c, 1a, 00, 0d, 02, 06, 15, 02, 05, 00, 06, 05, 01, 01, 00, 02] +Raw bytes (116): 0x[01, 01, 07, 01, 05, 01, 09, 01, 09, 01, 0d, 01, 0d, 01, 11, 01, 11, 12, 01, 0c, 01, 01, 10, 01, 03, 08, 00, 09, 20, 05, 02, 00, 08, 00, 09, 05, 01, 09, 00, 11, 02, 01, 05, 00, 06, 01, 01, 08, 00, 0a, 20, 0a, 09, 00, 08, 00, 0a, 0a, 00, 0b, 02, 06, 09, 02, 05, 00, 06, 01, 01, 08, 00, 0b, 20, 0d, 12, 00, 08, 00, 0b, 0d, 00, 0c, 02, 06, 12, 02, 05, 00, 06, 01, 01, 08, 00, 0c, 20, 1a, 11, 00, 08, 00, 0c, 1a, 00, 0d, 02, 06, 11, 02, 05, 00, 06, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 7 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) -- expression 1 operands: lhs = Counter(1), rhs = Counter(3) -- expression 2 operands: lhs = Counter(1), rhs = Counter(3) -- expression 3 operands: lhs = Counter(1), rhs = Counter(4) -- expression 4 operands: lhs = Counter(1), rhs = Counter(4) -- expression 5 operands: lhs = Counter(1), rhs = Counter(5) -- expression 6 operands: lhs = Counter(1), rhs = Counter(5) +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(0), rhs = Counter(2) +- expression 2 operands: lhs = Counter(0), rhs = Counter(2) +- expression 3 operands: lhs = Counter(0), rhs = Counter(3) +- expression 4 operands: lhs = Counter(0), rhs = Counter(3) +- expression 5 operands: lhs = Counter(0), rhs = Counter(4) +- expression 6 operands: lhs = Counter(0), rhs = Counter(4) Number of file 0 mappings: 18 - Code(Counter(0)) at (prev + 12, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 3, 8) to (start + 0, 9) -- Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 8) to (start + 0, 9) - true = c2 - false = (c1 - c2) -- Code(Counter(2)) at (prev + 1, 9) to (start + 0, 17) +- Code(Counter(0)) at (prev + 3, 8) to (start + 0, 9) +- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 8) to (start + 0, 9) + true = c1 + false = (c0 - c1) +- Code(Counter(1)) at (prev + 1, 9) to (start + 0, 17) - Code(Expression(0, Sub)) at (prev + 1, 5) to (start + 0, 6) - = (c1 - c2) -- Code(Counter(1)) at (prev + 1, 8) to (start + 0, 10) -- Branch { true: Expression(2, Sub), false: Counter(3) } at (prev + 0, 8) to (start + 0, 10) - true = (c1 - c3) - false = c3 + = (c0 - c1) +- Code(Counter(0)) at (prev + 1, 8) to (start + 0, 10) +- Branch { true: Expression(2, Sub), false: Counter(2) } at (prev + 0, 8) to (start + 0, 10) + true = (c0 - c2) + false = c2 - Code(Expression(2, Sub)) at (prev + 0, 11) to (start + 2, 6) - = (c1 - c3) -- Code(Counter(3)) at (prev + 2, 5) to (start + 0, 6) -- Code(Counter(1)) at (prev + 1, 8) to (start + 0, 11) -- Branch { true: Counter(4), false: Expression(4, Sub) } at (prev + 0, 8) to (start + 0, 11) - true = c4 - false = (c1 - c4) -- Code(Counter(4)) at (prev + 0, 12) to (start + 2, 6) + = (c0 - c2) +- Code(Counter(2)) at (prev + 2, 5) to (start + 0, 6) +- Code(Counter(0)) at (prev + 1, 8) to (start + 0, 11) +- Branch { true: Counter(3), false: Expression(4, Sub) } at (prev + 0, 8) to (start + 0, 11) + true = c3 + false = (c0 - c3) +- Code(Counter(3)) at (prev + 0, 12) to (start + 2, 6) - Code(Expression(4, Sub)) at (prev + 2, 5) to (start + 0, 6) - = (c1 - c4) -- Code(Counter(1)) at (prev + 1, 8) to (start + 0, 12) -- Branch { true: Expression(6, Sub), false: Counter(5) } at (prev + 0, 8) to (start + 0, 12) - true = (c1 - c5) - false = c5 + = (c0 - c3) +- Code(Counter(0)) at (prev + 1, 8) to (start + 0, 12) +- Branch { true: Expression(6, Sub), false: Counter(4) } at (prev + 0, 8) to (start + 0, 12) + true = (c0 - c4) + false = c4 - Code(Expression(6, Sub)) at (prev + 0, 13) to (start + 2, 6) - = (c1 - c5) -- Code(Counter(5)) at (prev + 2, 5) to (start + 0, 6) -- Code(Counter(1)) at (prev + 1, 1) to (start + 0, 2) -Highest counter ID seen: c5 + = (c0 - c4) +- Code(Counter(4)) at (prev + 2, 5) to (start + 0, 6) +- Code(Counter(0)) at (prev + 1, 1) to (start + 0, 2) +Highest counter ID seen: c4 Function name: if::branch_not_as -Raw bytes (90): 0x[01, 01, 05, 05, 09, 05, 0d, 05, 0d, 05, 11, 05, 11, 0e, 01, 1d, 01, 01, 10, 05, 03, 08, 00, 14, 20, 02, 09, 00, 08, 00, 14, 02, 00, 15, 02, 06, 09, 02, 05, 00, 06, 05, 01, 08, 00, 15, 20, 0d, 0a, 00, 08, 00, 15, 0d, 00, 16, 02, 06, 0a, 02, 05, 00, 06, 05, 01, 08, 00, 16, 20, 12, 11, 00, 08, 00, 16, 12, 00, 17, 02, 06, 11, 02, 05, 00, 06, 05, 01, 01, 00, 02] +Raw bytes (90): 0x[01, 01, 05, 01, 05, 01, 09, 01, 09, 01, 0d, 01, 0d, 0e, 01, 1d, 01, 01, 10, 01, 03, 08, 00, 14, 20, 02, 05, 00, 08, 00, 14, 02, 00, 15, 02, 06, 05, 02, 05, 00, 06, 01, 01, 08, 00, 15, 20, 09, 0a, 00, 08, 00, 15, 09, 00, 16, 02, 06, 0a, 02, 05, 00, 06, 01, 01, 08, 00, 16, 20, 12, 0d, 00, 08, 00, 16, 12, 00, 17, 02, 06, 0d, 02, 05, 00, 06, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 5 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) -- expression 1 operands: lhs = Counter(1), rhs = Counter(3) -- expression 2 operands: lhs = Counter(1), rhs = Counter(3) -- expression 3 operands: lhs = Counter(1), rhs = Counter(4) -- expression 4 operands: lhs = Counter(1), rhs = Counter(4) +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(0), rhs = Counter(2) +- expression 2 operands: lhs = Counter(0), rhs = Counter(2) +- expression 3 operands: lhs = Counter(0), rhs = Counter(3) +- expression 4 operands: lhs = Counter(0), rhs = Counter(3) Number of file 0 mappings: 14 - Code(Counter(0)) at (prev + 29, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 3, 8) to (start + 0, 20) -- Branch { true: Expression(0, Sub), false: Counter(2) } at (prev + 0, 8) to (start + 0, 20) - true = (c1 - c2) - false = c2 +- Code(Counter(0)) at (prev + 3, 8) to (start + 0, 20) +- Branch { true: Expression(0, Sub), false: Counter(1) } at (prev + 0, 8) to (start + 0, 20) + true = (c0 - c1) + false = c1 - Code(Expression(0, Sub)) at (prev + 0, 21) to (start + 2, 6) - = (c1 - c2) -- Code(Counter(2)) at (prev + 2, 5) to (start + 0, 6) -- Code(Counter(1)) at (prev + 1, 8) to (start + 0, 21) -- Branch { true: Counter(3), false: Expression(2, Sub) } at (prev + 0, 8) to (start + 0, 21) - true = c3 - false = (c1 - c3) -- Code(Counter(3)) at (prev + 0, 22) to (start + 2, 6) + = (c0 - c1) +- Code(Counter(1)) at (prev + 2, 5) to (start + 0, 6) +- Code(Counter(0)) at (prev + 1, 8) to (start + 0, 21) +- Branch { true: Counter(2), false: Expression(2, Sub) } at (prev + 0, 8) to (start + 0, 21) + true = c2 + false = (c0 - c2) +- Code(Counter(2)) at (prev + 0, 22) to (start + 2, 6) - Code(Expression(2, Sub)) at (prev + 2, 5) to (start + 0, 6) - = (c1 - c3) -- Code(Counter(1)) at (prev + 1, 8) to (start + 0, 22) -- Branch { true: Expression(4, Sub), false: Counter(4) } at (prev + 0, 8) to (start + 0, 22) - true = (c1 - c4) - false = c4 + = (c0 - c2) +- Code(Counter(0)) at (prev + 1, 8) to (start + 0, 22) +- Branch { true: Expression(4, Sub), false: Counter(3) } at (prev + 0, 8) to (start + 0, 22) + true = (c0 - c3) + false = c3 - Code(Expression(4, Sub)) at (prev + 0, 23) to (start + 2, 6) - = (c1 - c4) -- Code(Counter(4)) at (prev + 2, 5) to (start + 0, 6) -- Code(Counter(1)) at (prev + 1, 1) to (start + 0, 2) -Highest counter ID seen: c4 + = (c0 - c3) +- Code(Counter(3)) at (prev + 2, 5) to (start + 0, 6) +- Code(Counter(0)) at (prev + 1, 1) to (start + 0, 2) +Highest counter ID seen: c3 Function name: if::branch_or -Raw bytes (60): 0x[01, 01, 06, 05, 09, 05, 17, 09, 0d, 09, 0d, 05, 17, 09, 0d, 08, 01, 35, 01, 01, 10, 05, 03, 08, 00, 09, 20, 09, 02, 00, 08, 00, 09, 02, 00, 0d, 00, 0e, 20, 0d, 12, 00, 0d, 00, 0e, 17, 00, 0f, 02, 06, 12, 02, 0c, 02, 06, 05, 03, 01, 00, 02] +Raw bytes (60): 0x[01, 01, 06, 01, 05, 01, 17, 05, 09, 05, 09, 01, 17, 05, 09, 08, 01, 35, 01, 01, 10, 01, 03, 08, 00, 09, 20, 05, 02, 00, 08, 00, 09, 02, 00, 0d, 00, 0e, 20, 09, 12, 00, 0d, 00, 0e, 17, 00, 0f, 02, 06, 12, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 6 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) -- expression 1 operands: lhs = Counter(1), rhs = Expression(5, Add) -- expression 2 operands: lhs = Counter(2), rhs = Counter(3) -- expression 3 operands: lhs = Counter(2), rhs = Counter(3) -- expression 4 operands: lhs = Counter(1), rhs = Expression(5, Add) -- expression 5 operands: lhs = Counter(2), rhs = Counter(3) +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(0), rhs = Expression(5, Add) +- expression 2 operands: lhs = Counter(1), rhs = Counter(2) +- expression 3 operands: lhs = Counter(1), rhs = Counter(2) +- expression 4 operands: lhs = Counter(0), rhs = Expression(5, Add) +- expression 5 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 8 - Code(Counter(0)) at (prev + 53, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 3, 8) to (start + 0, 9) -- Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 8) to (start + 0, 9) - true = c2 - false = (c1 - c2) +- Code(Counter(0)) at (prev + 3, 8) to (start + 0, 9) +- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 8) to (start + 0, 9) + true = c1 + false = (c0 - c1) - Code(Expression(0, Sub)) at (prev + 0, 13) to (start + 0, 14) - = (c1 - c2) -- Branch { true: Counter(3), false: Expression(4, Sub) } at (prev + 0, 13) to (start + 0, 14) - true = c3 - false = (c1 - (c2 + c3)) + = (c0 - c1) +- Branch { true: Counter(2), false: Expression(4, Sub) } at (prev + 0, 13) to (start + 0, 14) + true = c2 + false = (c0 - (c1 + c2)) - Code(Expression(5, Add)) at (prev + 0, 15) to (start + 2, 6) - = (c2 + c3) + = (c1 + c2) - Code(Expression(4, Sub)) at (prev + 2, 12) to (start + 2, 6) - = (c1 - (c2 + c3)) -- Code(Counter(1)) at (prev + 3, 1) to (start + 0, 2) -Highest counter ID seen: c3 + = (c0 - (c1 + c2)) +- Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) +Highest counter ID seen: c2 diff --git a/tests/coverage/branch/lazy-boolean.cov-map b/tests/coverage/branch/lazy-boolean.cov-map index 94522734bcd42..622f30e2b56f6 100644 --- a/tests/coverage/branch/lazy-boolean.cov-map +++ b/tests/coverage/branch/lazy-boolean.cov-map @@ -1,148 +1,148 @@ Function name: lazy_boolean::branch_and -Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 13, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0d, 00, 0e, 20, 09, 02, 00, 0d, 00, 0e, 09, 00, 12, 00, 13, 05, 01, 05, 01, 02] +Raw bytes (38): 0x[01, 01, 01, 01, 05, 06, 01, 13, 01, 01, 10, 01, 04, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 02, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 6 - Code(Counter(0)) at (prev + 19, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 4, 9) to (start + 0, 10) -- Code(Counter(1)) at (prev + 0, 13) to (start + 0, 14) -- Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 14) - true = c2 - false = (c1 - c2) -- Code(Counter(2)) at (prev + 0, 18) to (start + 0, 19) -- Code(Counter(1)) at (prev + 1, 5) to (start + 1, 2) -Highest counter ID seen: c2 +- Code(Counter(0)) at (prev + 4, 9) to (start + 0, 10) +- Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14) +- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 14) + true = c1 + false = (c0 - c1) +- Code(Counter(1)) at (prev + 0, 18) to (start + 0, 19) +- Code(Counter(0)) at (prev + 1, 5) to (start + 1, 2) +Highest counter ID seen: c1 Function name: lazy_boolean::branch_or -Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 1b, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0d, 00, 0e, 20, 09, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 05, 01, 05, 01, 02] +Raw bytes (38): 0x[01, 01, 01, 01, 05, 06, 01, 1b, 01, 01, 10, 01, 04, 09, 00, 0a, 01, 00, 0d, 00, 0e, 20, 05, 02, 00, 0d, 00, 0e, 02, 00, 12, 00, 13, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 6 - Code(Counter(0)) at (prev + 27, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 4, 9) to (start + 0, 10) -- Code(Counter(1)) at (prev + 0, 13) to (start + 0, 14) -- Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 14) - true = c2 - false = (c1 - c2) +- Code(Counter(0)) at (prev + 4, 9) to (start + 0, 10) +- Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14) +- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 14) + true = c1 + false = (c0 - c1) - Code(Expression(0, Sub)) at (prev + 0, 18) to (start + 0, 19) - = (c1 - c2) -- Code(Counter(1)) at (prev + 1, 5) to (start + 1, 2) -Highest counter ID seen: c2 + = (c0 - c1) +- Code(Counter(0)) at (prev + 1, 5) to (start + 1, 2) +Highest counter ID seen: c1 Function name: lazy_boolean::chain -Raw bytes (141): 0x[01, 01, 0f, 05, 09, 09, 0d, 0d, 11, 05, 15, 05, 15, 05, 3b, 15, 19, 05, 3b, 15, 19, 05, 37, 3b, 1d, 15, 19, 05, 37, 3b, 1d, 15, 19, 13, 01, 24, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0d, 00, 12, 20, 09, 02, 00, 0d, 00, 12, 09, 00, 16, 00, 1b, 20, 0d, 06, 00, 16, 00, 1b, 0d, 00, 1f, 00, 24, 20, 11, 0a, 00, 1f, 00, 24, 11, 00, 28, 00, 2d, 05, 01, 05, 00, 11, 05, 03, 09, 00, 0a, 05, 00, 0d, 00, 12, 20, 15, 12, 00, 0d, 00, 12, 12, 00, 16, 00, 1b, 20, 19, 1e, 00, 16, 00, 1b, 1e, 00, 1f, 00, 24, 20, 1d, 32, 00, 1f, 00, 24, 32, 00, 28, 00, 2d, 05, 01, 05, 01, 02] +Raw bytes (141): 0x[01, 01, 0f, 01, 05, 05, 09, 09, 0d, 01, 11, 01, 11, 01, 3b, 11, 15, 01, 3b, 11, 15, 01, 37, 3b, 19, 11, 15, 01, 37, 3b, 19, 11, 15, 13, 01, 24, 01, 01, 10, 01, 04, 09, 00, 0a, 01, 00, 0d, 00, 12, 20, 05, 02, 00, 0d, 00, 12, 05, 00, 16, 00, 1b, 20, 09, 06, 00, 16, 00, 1b, 09, 00, 1f, 00, 24, 20, 0d, 0a, 00, 1f, 00, 24, 0d, 00, 28, 00, 2d, 01, 01, 05, 00, 11, 01, 03, 09, 00, 0a, 01, 00, 0d, 00, 12, 20, 11, 12, 00, 0d, 00, 12, 12, 00, 16, 00, 1b, 20, 15, 1e, 00, 16, 00, 1b, 1e, 00, 1f, 00, 24, 20, 19, 32, 00, 1f, 00, 24, 32, 00, 28, 00, 2d, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 15 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) -- expression 1 operands: lhs = Counter(2), rhs = Counter(3) -- expression 2 operands: lhs = Counter(3), rhs = Counter(4) -- expression 3 operands: lhs = Counter(1), rhs = Counter(5) -- expression 4 operands: lhs = Counter(1), rhs = Counter(5) -- expression 5 operands: lhs = Counter(1), rhs = Expression(14, Add) -- expression 6 operands: lhs = Counter(5), rhs = Counter(6) -- expression 7 operands: lhs = Counter(1), rhs = Expression(14, Add) -- expression 8 operands: lhs = Counter(5), rhs = Counter(6) -- expression 9 operands: lhs = Counter(1), rhs = Expression(13, Add) -- expression 10 operands: lhs = Expression(14, Add), rhs = Counter(7) -- expression 11 operands: lhs = Counter(5), rhs = Counter(6) -- expression 12 operands: lhs = Counter(1), rhs = Expression(13, Add) -- expression 13 operands: lhs = Expression(14, Add), rhs = Counter(7) -- expression 14 operands: lhs = Counter(5), rhs = Counter(6) +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(1), rhs = Counter(2) +- expression 2 operands: lhs = Counter(2), rhs = Counter(3) +- expression 3 operands: lhs = Counter(0), rhs = Counter(4) +- expression 4 operands: lhs = Counter(0), rhs = Counter(4) +- expression 5 operands: lhs = Counter(0), rhs = Expression(14, Add) +- expression 6 operands: lhs = Counter(4), rhs = Counter(5) +- expression 7 operands: lhs = Counter(0), rhs = Expression(14, Add) +- expression 8 operands: lhs = Counter(4), rhs = Counter(5) +- expression 9 operands: lhs = Counter(0), rhs = Expression(13, Add) +- expression 10 operands: lhs = Expression(14, Add), rhs = Counter(6) +- expression 11 operands: lhs = Counter(4), rhs = Counter(5) +- expression 12 operands: lhs = Counter(0), rhs = Expression(13, Add) +- expression 13 operands: lhs = Expression(14, Add), rhs = Counter(6) +- expression 14 operands: lhs = Counter(4), rhs = Counter(5) Number of file 0 mappings: 19 - Code(Counter(0)) at (prev + 36, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 4, 9) to (start + 0, 10) -- Code(Counter(1)) at (prev + 0, 13) to (start + 0, 18) -- Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 18) +- Code(Counter(0)) at (prev + 4, 9) to (start + 0, 10) +- Code(Counter(0)) at (prev + 0, 13) to (start + 0, 18) +- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 13) to (start + 0, 18) + true = c1 + false = (c0 - c1) +- Code(Counter(1)) at (prev + 0, 22) to (start + 0, 27) +- Branch { true: Counter(2), false: Expression(1, Sub) } at (prev + 0, 22) to (start + 0, 27) true = c2 false = (c1 - c2) -- Code(Counter(2)) at (prev + 0, 22) to (start + 0, 27) -- Branch { true: Counter(3), false: Expression(1, Sub) } at (prev + 0, 22) to (start + 0, 27) +- Code(Counter(2)) at (prev + 0, 31) to (start + 0, 36) +- Branch { true: Counter(3), false: Expression(2, Sub) } at (prev + 0, 31) to (start + 0, 36) true = c3 false = (c2 - c3) -- Code(Counter(3)) at (prev + 0, 31) to (start + 0, 36) -- Branch { true: Counter(4), false: Expression(2, Sub) } at (prev + 0, 31) to (start + 0, 36) +- Code(Counter(3)) at (prev + 0, 40) to (start + 0, 45) +- Code(Counter(0)) at (prev + 1, 5) to (start + 0, 17) +- Code(Counter(0)) at (prev + 3, 9) to (start + 0, 10) +- Code(Counter(0)) at (prev + 0, 13) to (start + 0, 18) +- Branch { true: Counter(4), false: Expression(4, Sub) } at (prev + 0, 13) to (start + 0, 18) true = c4 - false = (c3 - c4) -- Code(Counter(4)) at (prev + 0, 40) to (start + 0, 45) -- Code(Counter(1)) at (prev + 1, 5) to (start + 0, 17) -- Code(Counter(1)) at (prev + 3, 9) to (start + 0, 10) -- Code(Counter(1)) at (prev + 0, 13) to (start + 0, 18) -- Branch { true: Counter(5), false: Expression(4, Sub) } at (prev + 0, 13) to (start + 0, 18) - true = c5 - false = (c1 - c5) + false = (c0 - c4) - Code(Expression(4, Sub)) at (prev + 0, 22) to (start + 0, 27) - = (c1 - c5) -- Branch { true: Counter(6), false: Expression(7, Sub) } at (prev + 0, 22) to (start + 0, 27) - true = c6 - false = (c1 - (c5 + c6)) + = (c0 - c4) +- Branch { true: Counter(5), false: Expression(7, Sub) } at (prev + 0, 22) to (start + 0, 27) + true = c5 + false = (c0 - (c4 + c5)) - Code(Expression(7, Sub)) at (prev + 0, 31) to (start + 0, 36) - = (c1 - (c5 + c6)) -- Branch { true: Counter(7), false: Expression(12, Sub) } at (prev + 0, 31) to (start + 0, 36) - true = c7 - false = (c1 - ((c5 + c6) + c7)) + = (c0 - (c4 + c5)) +- Branch { true: Counter(6), false: Expression(12, Sub) } at (prev + 0, 31) to (start + 0, 36) + true = c6 + false = (c0 - ((c4 + c5) + c6)) - Code(Expression(12, Sub)) at (prev + 0, 40) to (start + 0, 45) - = (c1 - ((c5 + c6) + c7)) -- Code(Counter(1)) at (prev + 1, 5) to (start + 1, 2) -Highest counter ID seen: c7 + = (c0 - ((c4 + c5) + c6)) +- Code(Counter(0)) at (prev + 1, 5) to (start + 1, 2) +Highest counter ID seen: c6 Function name: lazy_boolean::nested_mixed -Raw bytes (137): 0x[01, 01, 0d, 05, 09, 05, 1f, 09, 0d, 09, 0d, 1f, 11, 09, 0d, 1f, 11, 09, 0d, 05, 15, 15, 19, 05, 19, 05, 33, 19, 1d, 13, 01, 31, 01, 01, 10, 05, 04, 09, 00, 0a, 05, 00, 0e, 00, 13, 20, 09, 02, 00, 0e, 00, 13, 02, 00, 17, 00, 1d, 20, 0d, 06, 00, 17, 00, 1d, 1f, 00, 23, 00, 28, 20, 11, 1a, 00, 23, 00, 28, 1a, 00, 2c, 00, 33, 05, 01, 05, 00, 11, 05, 03, 09, 00, 0a, 05, 00, 0e, 00, 13, 20, 15, 22, 00, 0e, 00, 13, 15, 00, 17, 00, 1c, 20, 19, 26, 00, 17, 00, 1c, 2a, 00, 22, 00, 28, 20, 1d, 2e, 00, 22, 00, 28, 1d, 00, 2c, 00, 33, 05, 01, 05, 01, 02] +Raw bytes (137): 0x[01, 01, 0d, 01, 05, 01, 1f, 05, 09, 05, 09, 1f, 0d, 05, 09, 1f, 0d, 05, 09, 01, 11, 11, 15, 01, 15, 01, 33, 15, 19, 13, 01, 31, 01, 01, 10, 01, 04, 09, 00, 0a, 01, 00, 0e, 00, 13, 20, 05, 02, 00, 0e, 00, 13, 02, 00, 17, 00, 1d, 20, 09, 06, 00, 17, 00, 1d, 1f, 00, 23, 00, 28, 20, 0d, 1a, 00, 23, 00, 28, 1a, 00, 2c, 00, 33, 01, 01, 05, 00, 11, 01, 03, 09, 00, 0a, 01, 00, 0e, 00, 13, 20, 11, 22, 00, 0e, 00, 13, 11, 00, 17, 00, 1c, 20, 15, 26, 00, 17, 00, 1c, 2a, 00, 22, 00, 28, 20, 19, 2e, 00, 22, 00, 28, 19, 00, 2c, 00, 33, 01, 01, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 13 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) -- expression 1 operands: lhs = Counter(1), rhs = Expression(7, Add) -- expression 2 operands: lhs = Counter(2), rhs = Counter(3) -- expression 3 operands: lhs = Counter(2), rhs = Counter(3) -- expression 4 operands: lhs = Expression(7, Add), rhs = Counter(4) -- expression 5 operands: lhs = Counter(2), rhs = Counter(3) -- expression 6 operands: lhs = Expression(7, Add), rhs = Counter(4) -- expression 7 operands: lhs = Counter(2), rhs = Counter(3) -- expression 8 operands: lhs = Counter(1), rhs = Counter(5) -- expression 9 operands: lhs = Counter(5), rhs = Counter(6) -- expression 10 operands: lhs = Counter(1), rhs = Counter(6) -- expression 11 operands: lhs = Counter(1), rhs = Expression(12, Add) -- expression 12 operands: lhs = Counter(6), rhs = Counter(7) +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(0), rhs = Expression(7, Add) +- expression 2 operands: lhs = Counter(1), rhs = Counter(2) +- expression 3 operands: lhs = Counter(1), rhs = Counter(2) +- expression 4 operands: lhs = Expression(7, Add), rhs = Counter(3) +- expression 5 operands: lhs = Counter(1), rhs = Counter(2) +- expression 6 operands: lhs = Expression(7, Add), rhs = Counter(3) +- expression 7 operands: lhs = Counter(1), rhs = Counter(2) +- expression 8 operands: lhs = Counter(0), rhs = Counter(4) +- expression 9 operands: lhs = Counter(4), rhs = Counter(5) +- expression 10 operands: lhs = Counter(0), rhs = Counter(5) +- expression 11 operands: lhs = Counter(0), rhs = Expression(12, Add) +- expression 12 operands: lhs = Counter(5), rhs = Counter(6) Number of file 0 mappings: 19 - Code(Counter(0)) at (prev + 49, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 4, 9) to (start + 0, 10) -- Code(Counter(1)) at (prev + 0, 14) to (start + 0, 19) -- Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 14) to (start + 0, 19) - true = c2 - false = (c1 - c2) +- Code(Counter(0)) at (prev + 4, 9) to (start + 0, 10) +- Code(Counter(0)) at (prev + 0, 14) to (start + 0, 19) +- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 14) to (start + 0, 19) + true = c1 + false = (c0 - c1) - Code(Expression(0, Sub)) at (prev + 0, 23) to (start + 0, 29) - = (c1 - c2) -- Branch { true: Counter(3), false: Expression(1, Sub) } at (prev + 0, 23) to (start + 0, 29) - true = c3 - false = (c1 - (c2 + c3)) + = (c0 - c1) +- Branch { true: Counter(2), false: Expression(1, Sub) } at (prev + 0, 23) to (start + 0, 29) + true = c2 + false = (c0 - (c1 + c2)) - Code(Expression(7, Add)) at (prev + 0, 35) to (start + 0, 40) - = (c2 + c3) -- Branch { true: Counter(4), false: Expression(6, Sub) } at (prev + 0, 35) to (start + 0, 40) - true = c4 - false = ((c2 + c3) - c4) + = (c1 + c2) +- Branch { true: Counter(3), false: Expression(6, Sub) } at (prev + 0, 35) to (start + 0, 40) + true = c3 + false = ((c1 + c2) - c3) - Code(Expression(6, Sub)) at (prev + 0, 44) to (start + 0, 51) - = ((c2 + c3) - c4) -- Code(Counter(1)) at (prev + 1, 5) to (start + 0, 17) -- Code(Counter(1)) at (prev + 3, 9) to (start + 0, 10) -- Code(Counter(1)) at (prev + 0, 14) to (start + 0, 19) -- Branch { true: Counter(5), false: Expression(8, Sub) } at (prev + 0, 14) to (start + 0, 19) + = ((c1 + c2) - c3) +- Code(Counter(0)) at (prev + 1, 5) to (start + 0, 17) +- Code(Counter(0)) at (prev + 3, 9) to (start + 0, 10) +- Code(Counter(0)) at (prev + 0, 14) to (start + 0, 19) +- Branch { true: Counter(4), false: Expression(8, Sub) } at (prev + 0, 14) to (start + 0, 19) + true = c4 + false = (c0 - c4) +- Code(Counter(4)) at (prev + 0, 23) to (start + 0, 28) +- Branch { true: Counter(5), false: Expression(9, Sub) } at (prev + 0, 23) to (start + 0, 28) true = c5 - false = (c1 - c5) -- Code(Counter(5)) at (prev + 0, 23) to (start + 0, 28) -- Branch { true: Counter(6), false: Expression(9, Sub) } at (prev + 0, 23) to (start + 0, 28) - true = c6 - false = (c5 - c6) + false = (c4 - c5) - Code(Expression(10, Sub)) at (prev + 0, 34) to (start + 0, 40) - = (c1 - c6) -- Branch { true: Counter(7), false: Expression(11, Sub) } at (prev + 0, 34) to (start + 0, 40) - true = c7 - false = (c1 - (c6 + c7)) -- Code(Counter(7)) at (prev + 0, 44) to (start + 0, 51) -- Code(Counter(1)) at (prev + 1, 5) to (start + 1, 2) -Highest counter ID seen: c7 + = (c0 - c5) +- Branch { true: Counter(6), false: Expression(11, Sub) } at (prev + 0, 34) to (start + 0, 40) + true = c6 + false = (c0 - (c5 + c6)) +- Code(Counter(6)) at (prev + 0, 44) to (start + 0, 51) +- Code(Counter(0)) at (prev + 1, 5) to (start + 1, 2) +Highest counter ID seen: c6 diff --git a/tests/coverage/branch/let-else.cov-map b/tests/coverage/branch/let-else.cov-map index e6bf7ed6a9258..215d71599e4c0 100644 --- a/tests/coverage/branch/let-else.cov-map +++ b/tests/coverage/branch/let-else.cov-map @@ -1,20 +1,20 @@ Function name: let_else::let_else -Raw bytes (43): 0x[01, 01, 01, 05, 09, 07, 01, 0c, 01, 01, 10, 20, 02, 09, 03, 09, 00, 10, 02, 00, 0e, 00, 0f, 05, 00, 13, 00, 18, 09, 01, 09, 01, 0f, 02, 04, 05, 00, 0b, 05, 01, 01, 00, 02] +Raw bytes (43): 0x[01, 01, 01, 01, 05, 07, 01, 0c, 01, 01, 10, 20, 02, 05, 03, 09, 00, 10, 02, 00, 0e, 00, 0f, 01, 00, 13, 00, 18, 05, 01, 09, 01, 0f, 02, 04, 05, 00, 0b, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 7 - Code(Counter(0)) at (prev + 12, 1) to (start + 1, 16) -- Branch { true: Expression(0, Sub), false: Counter(2) } at (prev + 3, 9) to (start + 0, 16) - true = (c1 - c2) - false = c2 +- Branch { true: Expression(0, Sub), false: Counter(1) } at (prev + 3, 9) to (start + 0, 16) + true = (c0 - c1) + false = c1 - Code(Expression(0, Sub)) at (prev + 0, 14) to (start + 0, 15) - = (c1 - c2) -- Code(Counter(1)) at (prev + 0, 19) to (start + 0, 24) -- Code(Counter(2)) at (prev + 1, 9) to (start + 1, 15) + = (c0 - c1) +- Code(Counter(0)) at (prev + 0, 19) to (start + 0, 24) +- Code(Counter(1)) at (prev + 1, 9) to (start + 1, 15) - Code(Expression(0, Sub)) at (prev + 4, 5) to (start + 0, 11) - = (c1 - c2) -- Code(Counter(1)) at (prev + 1, 1) to (start + 0, 2) -Highest counter ID seen: c2 + = (c0 - c1) +- Code(Counter(0)) at (prev + 1, 1) to (start + 0, 2) +Highest counter ID seen: c1 diff --git a/tests/coverage/branch/match-arms.cov-map b/tests/coverage/branch/match-arms.cov-map index 53d0a4edbd0cd..d5b4d04d40192 100644 --- a/tests/coverage/branch/match-arms.cov-map +++ b/tests/coverage/branch/match-arms.cov-map @@ -1,86 +1,80 @@ Function name: match_arms::guards -Raw bytes (98): 0x[01, 01, 0d, 11, 19, 27, 19, 2b, 00, 2f, 11, 33, 0d, 05, 09, 1f, 25, 23, 21, 27, 1d, 2b, 00, 2f, 11, 33, 0d, 05, 09, 0c, 01, 30, 01, 01, 10, 11, 03, 0b, 00, 10, 1d, 01, 11, 00, 29, 20, 1d, 05, 00, 17, 00, 1b, 21, 01, 11, 00, 29, 20, 21, 09, 00, 17, 00, 1b, 25, 01, 11, 00, 29, 20, 25, 0d, 00, 17, 00, 1b, 19, 01, 11, 00, 29, 20, 19, 02, 00, 17, 00, 1b, 06, 01, 0e, 00, 18, 1b, 03, 05, 01, 02] +Raw bytes (88): 0x[01, 01, 08, 15, 05, 19, 09, 1d, 0d, 21, 11, 01, 17, 1b, 11, 1f, 0d, 05, 09, 0c, 01, 30, 01, 01, 10, 21, 03, 0b, 00, 10, 05, 01, 11, 00, 29, 20, 05, 02, 00, 17, 00, 1b, 09, 01, 11, 00, 29, 20, 09, 06, 00, 17, 00, 1b, 0d, 01, 11, 00, 29, 20, 0d, 0a, 00, 17, 00, 1b, 11, 01, 11, 00, 29, 20, 11, 0e, 00, 17, 00, 1b, 12, 01, 0e, 00, 18, 01, 03, 05, 01, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 13 -- expression 0 operands: lhs = Counter(4), rhs = Counter(6) -- expression 1 operands: lhs = Expression(9, Add), rhs = Counter(6) -- expression 2 operands: lhs = Expression(10, Add), rhs = Zero -- expression 3 operands: lhs = Expression(11, Add), rhs = Counter(4) -- expression 4 operands: lhs = Expression(12, Add), rhs = Counter(3) -- expression 5 operands: lhs = Counter(1), rhs = Counter(2) -- expression 6 operands: lhs = Expression(7, Add), rhs = Counter(9) -- expression 7 operands: lhs = Expression(8, Add), rhs = Counter(8) -- expression 8 operands: lhs = Expression(9, Add), rhs = Counter(7) -- expression 9 operands: lhs = Expression(10, Add), rhs = Zero -- expression 10 operands: lhs = Expression(11, Add), rhs = Counter(4) -- expression 11 operands: lhs = Expression(12, Add), rhs = Counter(3) -- expression 12 operands: lhs = Counter(1), rhs = Counter(2) +Number of expressions: 8 +- expression 0 operands: lhs = Counter(5), rhs = Counter(1) +- expression 1 operands: lhs = Counter(6), rhs = Counter(2) +- expression 2 operands: lhs = Counter(7), rhs = Counter(3) +- expression 3 operands: lhs = Counter(8), rhs = Counter(4) +- expression 4 operands: lhs = Counter(0), rhs = Expression(5, Add) +- expression 5 operands: lhs = Expression(6, Add), rhs = Counter(4) +- expression 6 operands: lhs = Expression(7, Add), rhs = Counter(3) +- expression 7 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 12 - Code(Counter(0)) at (prev + 48, 1) to (start + 1, 16) -- Code(Counter(4)) at (prev + 3, 11) to (start + 0, 16) -- Code(Counter(7)) at (prev + 1, 17) to (start + 0, 41) -- Branch { true: Counter(7), false: Counter(1) } at (prev + 0, 23) to (start + 0, 27) - true = c7 - false = c1 -- Code(Counter(8)) at (prev + 1, 17) to (start + 0, 41) -- Branch { true: Counter(8), false: Counter(2) } at (prev + 0, 23) to (start + 0, 27) - true = c8 - false = c2 -- Code(Counter(9)) at (prev + 1, 17) to (start + 0, 41) -- Branch { true: Counter(9), false: Counter(3) } at (prev + 0, 23) to (start + 0, 27) - true = c9 - false = c3 -- Code(Counter(6)) at (prev + 1, 17) to (start + 0, 41) -- Branch { true: Counter(6), false: Expression(0, Sub) } at (prev + 0, 23) to (start + 0, 27) - true = c6 - false = (c4 - c6) -- Code(Expression(1, Sub)) at (prev + 1, 14) to (start + 0, 24) - = (((((c1 + c2) + c3) + c4) + Zero) - c6) -- Code(Expression(6, Add)) at (prev + 3, 5) to (start + 1, 2) - = (((((((c1 + c2) + c3) + c4) + Zero) + c7) + c8) + c9) -Highest counter ID seen: c9 +- Code(Counter(8)) at (prev + 3, 11) to (start + 0, 16) +- Code(Counter(1)) at (prev + 1, 17) to (start + 0, 41) +- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 0, 23) to (start + 0, 27) + true = c1 + false = (c5 - c1) +- Code(Counter(2)) at (prev + 1, 17) to (start + 0, 41) +- Branch { true: Counter(2), false: Expression(1, Sub) } at (prev + 0, 23) to (start + 0, 27) + true = c2 + false = (c6 - c2) +- Code(Counter(3)) at (prev + 1, 17) to (start + 0, 41) +- Branch { true: Counter(3), false: Expression(2, Sub) } at (prev + 0, 23) to (start + 0, 27) + true = c3 + false = (c7 - c3) +- Code(Counter(4)) at (prev + 1, 17) to (start + 0, 41) +- Branch { true: Counter(4), false: Expression(3, Sub) } at (prev + 0, 23) to (start + 0, 27) + true = c4 + false = (c8 - c4) +- Code(Expression(4, Sub)) at (prev + 1, 14) to (start + 0, 24) + = (c0 - (((c1 + c2) + c3) + c4)) +- Code(Counter(0)) at (prev + 3, 5) to (start + 1, 2) +Highest counter ID seen: c8 Function name: match_arms::match_arms -Raw bytes (45): 0x[01, 01, 03, 05, 07, 0b, 11, 09, 0d, 07, 01, 18, 01, 01, 10, 05, 03, 0b, 00, 10, 09, 01, 11, 00, 21, 0d, 01, 11, 00, 21, 11, 01, 11, 00, 21, 02, 01, 11, 00, 21, 05, 03, 05, 01, 02] +Raw bytes (45): 0x[01, 01, 03, 01, 07, 0b, 0d, 05, 09, 07, 01, 18, 01, 01, 10, 01, 03, 0b, 00, 10, 05, 01, 11, 00, 21, 09, 01, 11, 00, 21, 0d, 01, 11, 00, 21, 02, 01, 11, 00, 21, 01, 03, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 3 -- expression 0 operands: lhs = Counter(1), rhs = Expression(1, Add) -- expression 1 operands: lhs = Expression(2, Add), rhs = Counter(4) -- expression 2 operands: lhs = Counter(2), rhs = Counter(3) +- expression 0 operands: lhs = Counter(0), rhs = Expression(1, Add) +- expression 1 operands: lhs = Expression(2, Add), rhs = Counter(3) +- expression 2 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 7 - Code(Counter(0)) at (prev + 24, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 3, 11) to (start + 0, 16) +- Code(Counter(0)) at (prev + 3, 11) to (start + 0, 16) +- Code(Counter(1)) at (prev + 1, 17) to (start + 0, 33) - Code(Counter(2)) at (prev + 1, 17) to (start + 0, 33) - Code(Counter(3)) at (prev + 1, 17) to (start + 0, 33) -- Code(Counter(4)) at (prev + 1, 17) to (start + 0, 33) - Code(Expression(0, Sub)) at (prev + 1, 17) to (start + 0, 33) - = (c1 - ((c2 + c3) + c4)) -- Code(Counter(1)) at (prev + 3, 5) to (start + 1, 2) -Highest counter ID seen: c4 + = (c0 - ((c1 + c2) + c3)) +- Code(Counter(0)) at (prev + 3, 5) to (start + 1, 2) +Highest counter ID seen: c3 Function name: match_arms::or_patterns -Raw bytes (57): 0x[01, 01, 04, 09, 0d, 05, 0b, 03, 11, 05, 03, 09, 01, 25, 01, 01, 10, 05, 03, 0b, 00, 10, 09, 01, 11, 00, 12, 0d, 00, 1e, 00, 1f, 03, 00, 24, 00, 2e, 11, 01, 11, 00, 12, 06, 00, 1e, 00, 1f, 0e, 00, 24, 00, 2e, 05, 03, 05, 01, 02] +Raw bytes (57): 0x[01, 01, 04, 05, 09, 01, 0b, 03, 0d, 01, 03, 09, 01, 25, 01, 01, 10, 01, 03, 0b, 00, 10, 05, 01, 11, 00, 12, 09, 00, 1e, 00, 1f, 03, 00, 24, 00, 2e, 0d, 01, 11, 00, 12, 06, 00, 1e, 00, 1f, 0e, 00, 24, 00, 2e, 01, 03, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 4 -- expression 0 operands: lhs = Counter(2), rhs = Counter(3) -- expression 1 operands: lhs = Counter(1), rhs = Expression(2, Add) -- expression 2 operands: lhs = Expression(0, Add), rhs = Counter(4) -- expression 3 operands: lhs = Counter(1), rhs = Expression(0, Add) +- expression 0 operands: lhs = Counter(1), rhs = Counter(2) +- expression 1 operands: lhs = Counter(0), rhs = Expression(2, Add) +- expression 2 operands: lhs = Expression(0, Add), rhs = Counter(3) +- expression 3 operands: lhs = Counter(0), rhs = Expression(0, Add) Number of file 0 mappings: 9 - Code(Counter(0)) at (prev + 37, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 3, 11) to (start + 0, 16) -- Code(Counter(2)) at (prev + 1, 17) to (start + 0, 18) -- Code(Counter(3)) at (prev + 0, 30) to (start + 0, 31) +- Code(Counter(0)) at (prev + 3, 11) to (start + 0, 16) +- Code(Counter(1)) at (prev + 1, 17) to (start + 0, 18) +- Code(Counter(2)) at (prev + 0, 30) to (start + 0, 31) - Code(Expression(0, Add)) at (prev + 0, 36) to (start + 0, 46) - = (c2 + c3) -- Code(Counter(4)) at (prev + 1, 17) to (start + 0, 18) + = (c1 + c2) +- Code(Counter(3)) at (prev + 1, 17) to (start + 0, 18) - Code(Expression(1, Sub)) at (prev + 0, 30) to (start + 0, 31) - = (c1 - ((c2 + c3) + c4)) + = (c0 - ((c1 + c2) + c3)) - Code(Expression(3, Sub)) at (prev + 0, 36) to (start + 0, 46) - = (c1 - (c2 + c3)) -- Code(Counter(1)) at (prev + 3, 5) to (start + 1, 2) -Highest counter ID seen: c4 + = (c0 - (c1 + c2)) +- Code(Counter(0)) at (prev + 3, 5) to (start + 1, 2) +Highest counter ID seen: c3 diff --git a/tests/coverage/branch/match-trivial.cov-map b/tests/coverage/branch/match-trivial.cov-map index 6af8ce46f5fae..31322f127af78 100644 --- a/tests/coverage/branch/match-trivial.cov-map +++ b/tests/coverage/branch/match-trivial.cov-map @@ -8,12 +8,12 @@ Number of file 0 mappings: 1 Highest counter ID seen: (none) Function name: match_trivial::trivial -Raw bytes (14): 0x[01, 01, 00, 02, 01, 1e, 01, 01, 10, 05, 03, 0b, 05, 02] +Raw bytes (14): 0x[01, 01, 00, 02, 01, 1e, 01, 01, 10, 01, 03, 0b, 05, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 0 Number of file 0 mappings: 2 - Code(Counter(0)) at (prev + 30, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 3, 11) to (start + 5, 2) -Highest counter ID seen: c1 +- Code(Counter(0)) at (prev + 3, 11) to (start + 5, 2) +Highest counter ID seen: c0 diff --git a/tests/coverage/branch/no-mir-spans.cov-map b/tests/coverage/branch/no-mir-spans.cov-map index 6003efc36ca2d..8fb44ef30fdb0 100644 --- a/tests/coverage/branch/no-mir-spans.cov-map +++ b/tests/coverage/branch/no-mir-spans.cov-map @@ -1,56 +1,63 @@ Function name: no_mir_spans::while_cond -Raw bytes (16): 0x[01, 01, 00, 02, 01, 10, 01, 00, 11, 20, 05, 09, 04, 0b, 00, 10] +Raw bytes (18): 0x[01, 01, 01, 05, 01, 02, 01, 10, 01, 00, 11, 20, 02, 01, 04, 0b, 00, 10] Number of files: 1 - file 0 => global file 1 -Number of expressions: 0 +Number of expressions: 1 +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) Number of file 0 mappings: 2 - Code(Counter(0)) at (prev + 16, 1) to (start + 0, 17) -- Branch { true: Counter(1), false: Counter(2) } at (prev + 4, 11) to (start + 0, 16) - true = c1 - false = c2 -Highest counter ID seen: c2 +- Branch { true: Expression(0, Sub), false: Counter(0) } at (prev + 4, 11) to (start + 0, 16) + true = (c1 - c0) + false = c0 +Highest counter ID seen: c0 Function name: no_mir_spans::while_cond_not -Raw bytes (16): 0x[01, 01, 00, 02, 01, 19, 01, 00, 15, 20, 09, 05, 04, 0b, 00, 14] +Raw bytes (18): 0x[01, 01, 01, 05, 01, 02, 01, 19, 01, 00, 15, 20, 02, 01, 04, 0b, 00, 14] Number of files: 1 - file 0 => global file 1 -Number of expressions: 0 +Number of expressions: 1 +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) Number of file 0 mappings: 2 - Code(Counter(0)) at (prev + 25, 1) to (start + 0, 21) -- Branch { true: Counter(2), false: Counter(1) } at (prev + 4, 11) to (start + 0, 20) - true = c2 - false = c1 -Highest counter ID seen: c2 +- Branch { true: Expression(0, Sub), false: Counter(0) } at (prev + 4, 11) to (start + 0, 20) + true = (c1 - c0) + false = c0 +Highest counter ID seen: c0 Function name: no_mir_spans::while_op_and -Raw bytes (25): 0x[01, 01, 01, 05, 09, 03, 01, 22, 01, 00, 13, 20, 05, 0d, 05, 0b, 00, 10, 20, 02, 09, 00, 14, 00, 19] +Raw bytes (31): 0x[01, 01, 04, 09, 05, 09, 01, 0f, 09, 01, 05, 03, 01, 22, 01, 00, 13, 20, 05, 02, 05, 0b, 00, 10, 20, 06, 0a, 00, 14, 00, 19] Number of files: 1 - file 0 => global file 1 -Number of expressions: 1 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) +Number of expressions: 4 +- expression 0 operands: lhs = Counter(2), rhs = Counter(1) +- expression 1 operands: lhs = Counter(2), rhs = Counter(0) +- expression 2 operands: lhs = Expression(3, Add), rhs = Counter(2) +- expression 3 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 3 - Code(Counter(0)) at (prev + 34, 1) to (start + 0, 19) -- Branch { true: Counter(1), false: Counter(3) } at (prev + 5, 11) to (start + 0, 16) +- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 5, 11) to (start + 0, 16) true = c1 - false = c3 -- Branch { true: Expression(0, Sub), false: Counter(2) } at (prev + 0, 20) to (start + 0, 25) - true = (c1 - c2) - false = c2 -Highest counter ID seen: c3 + false = (c2 - c1) +- Branch { true: Expression(1, Sub), false: Expression(2, Sub) } at (prev + 0, 20) to (start + 0, 25) + true = (c2 - c0) + false = ((c0 + c1) - c2) +Highest counter ID seen: c1 Function name: no_mir_spans::while_op_or -Raw bytes (25): 0x[01, 01, 01, 09, 0d, 03, 01, 2d, 01, 00, 12, 20, 05, 09, 05, 0b, 00, 10, 20, 0d, 02, 00, 14, 00, 19] +Raw bytes (29): 0x[01, 01, 03, 09, 05, 09, 0b, 01, 05, 03, 01, 2d, 01, 00, 12, 20, 05, 02, 05, 0b, 00, 10, 20, 06, 01, 00, 14, 00, 19] Number of files: 1 - file 0 => global file 1 -Number of expressions: 1 -- expression 0 operands: lhs = Counter(2), rhs = Counter(3) +Number of expressions: 3 +- expression 0 operands: lhs = Counter(2), rhs = Counter(1) +- expression 1 operands: lhs = Counter(2), rhs = Expression(2, Add) +- expression 2 operands: lhs = Counter(0), rhs = Counter(1) Number of file 0 mappings: 3 - Code(Counter(0)) at (prev + 45, 1) to (start + 0, 18) -- Branch { true: Counter(1), false: Counter(2) } at (prev + 5, 11) to (start + 0, 16) +- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 5, 11) to (start + 0, 16) true = c1 - false = c2 -- Branch { true: Counter(3), false: Expression(0, Sub) } at (prev + 0, 20) to (start + 0, 25) - true = c3 - false = (c2 - c3) -Highest counter ID seen: c3 + false = (c2 - c1) +- Branch { true: Expression(1, Sub), false: Counter(0) } at (prev + 0, 20) to (start + 0, 25) + true = (c2 - (c0 + c1)) + false = c0 +Highest counter ID seen: c1 diff --git a/tests/coverage/branch/while.cov-map b/tests/coverage/branch/while.cov-map index 5eb08a42803bc..5ce92c72b5120 100644 --- a/tests/coverage/branch/while.cov-map +++ b/tests/coverage/branch/while.cov-map @@ -1,90 +1,88 @@ Function name: while::while_cond -Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 0c, 01, 01, 10, 05, 03, 09, 00, 12, 03, 01, 0b, 00, 10, 20, 09, 05, 00, 0b, 00, 10, 09, 00, 11, 02, 06, 05, 03, 01, 00, 02] +Raw bytes (38): 0x[01, 01, 01, 05, 01, 06, 01, 0c, 01, 01, 10, 01, 03, 09, 00, 12, 05, 01, 0b, 00, 10, 20, 02, 01, 00, 0b, 00, 10, 02, 00, 11, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) Number of file 0 mappings: 6 - Code(Counter(0)) at (prev + 12, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 3, 9) to (start + 0, 18) -- Code(Expression(0, Add)) at (prev + 1, 11) to (start + 0, 16) - = (c1 + c2) -- Branch { true: Counter(2), false: Counter(1) } at (prev + 0, 11) to (start + 0, 16) - true = c2 - false = c1 -- Code(Counter(2)) at (prev + 0, 17) to (start + 2, 6) -- Code(Counter(1)) at (prev + 3, 1) to (start + 0, 2) -Highest counter ID seen: c2 +- Code(Counter(0)) at (prev + 3, 9) to (start + 0, 18) +- Code(Counter(1)) at (prev + 1, 11) to (start + 0, 16) +- Branch { true: Expression(0, Sub), false: Counter(0) } at (prev + 0, 11) to (start + 0, 16) + true = (c1 - c0) + false = c0 +- Code(Expression(0, Sub)) at (prev + 0, 17) to (start + 2, 6) + = (c1 - c0) +- Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) +Highest counter ID seen: c1 Function name: while::while_cond_not -Raw bytes (38): 0x[01, 01, 01, 05, 09, 06, 01, 15, 01, 01, 10, 05, 03, 09, 00, 12, 03, 01, 0b, 00, 14, 20, 09, 05, 00, 0b, 00, 14, 09, 00, 15, 02, 06, 05, 03, 01, 00, 02] +Raw bytes (38): 0x[01, 01, 01, 05, 01, 06, 01, 15, 01, 01, 10, 01, 03, 09, 00, 12, 05, 01, 0b, 00, 14, 20, 02, 01, 00, 0b, 00, 14, 02, 00, 15, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) Number of file 0 mappings: 6 - Code(Counter(0)) at (prev + 21, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 3, 9) to (start + 0, 18) -- Code(Expression(0, Add)) at (prev + 1, 11) to (start + 0, 20) - = (c1 + c2) -- Branch { true: Counter(2), false: Counter(1) } at (prev + 0, 11) to (start + 0, 20) - true = c2 - false = c1 -- Code(Counter(2)) at (prev + 0, 21) to (start + 2, 6) -- Code(Counter(1)) at (prev + 3, 1) to (start + 0, 2) -Highest counter ID seen: c2 +- Code(Counter(0)) at (prev + 3, 9) to (start + 0, 18) +- Code(Counter(1)) at (prev + 1, 11) to (start + 0, 20) +- Branch { true: Expression(0, Sub), false: Counter(0) } at (prev + 0, 11) to (start + 0, 20) + true = (c1 - c0) + false = c0 +- Code(Expression(0, Sub)) at (prev + 0, 21) to (start + 2, 6) + = (c1 - c0) +- Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) +Highest counter ID seen: c1 Function name: while::while_op_and -Raw bytes (56): 0x[01, 01, 04, 05, 09, 03, 0d, 03, 0d, 05, 0d, 08, 01, 1e, 01, 01, 10, 05, 03, 09, 01, 12, 03, 02, 0b, 00, 10, 20, 0a, 0d, 00, 0b, 00, 10, 0a, 00, 14, 00, 19, 20, 09, 0e, 00, 14, 00, 19, 09, 00, 1a, 03, 06, 05, 04, 01, 00, 02] +Raw bytes (58): 0x[01, 01, 05, 05, 09, 05, 01, 0f, 05, 01, 09, 05, 01, 08, 01, 1e, 01, 01, 10, 01, 03, 09, 01, 12, 05, 02, 0b, 00, 10, 20, 09, 02, 00, 0b, 00, 10, 09, 00, 14, 00, 19, 20, 12, 0a, 00, 14, 00, 19, 12, 00, 1a, 03, 06, 01, 04, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 4 +Number of expressions: 5 - expression 0 operands: lhs = Counter(1), rhs = Counter(2) -- expression 1 operands: lhs = Expression(0, Add), rhs = Counter(3) -- expression 2 operands: lhs = Expression(0, Add), rhs = Counter(3) -- expression 3 operands: lhs = Counter(1), rhs = Counter(3) +- expression 1 operands: lhs = Counter(1), rhs = Counter(0) +- expression 2 operands: lhs = Expression(3, Add), rhs = Counter(1) +- expression 3 operands: lhs = Counter(0), rhs = Counter(2) +- expression 4 operands: lhs = Counter(1), rhs = Counter(0) Number of file 0 mappings: 8 - Code(Counter(0)) at (prev + 30, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 3, 9) to (start + 1, 18) -- Code(Expression(0, Add)) at (prev + 2, 11) to (start + 0, 16) - = (c1 + c2) -- Branch { true: Expression(2, Sub), false: Counter(3) } at (prev + 0, 11) to (start + 0, 16) - true = ((c1 + c2) - c3) - false = c3 -- Code(Expression(2, Sub)) at (prev + 0, 20) to (start + 0, 25) - = ((c1 + c2) - c3) -- Branch { true: Counter(2), false: Expression(3, Sub) } at (prev + 0, 20) to (start + 0, 25) +- Code(Counter(0)) at (prev + 3, 9) to (start + 1, 18) +- Code(Counter(1)) at (prev + 2, 11) to (start + 0, 16) +- Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 11) to (start + 0, 16) true = c2 - false = (c1 - c3) -- Code(Counter(2)) at (prev + 0, 26) to (start + 3, 6) -- Code(Counter(1)) at (prev + 4, 1) to (start + 0, 2) -Highest counter ID seen: c3 + false = (c1 - c2) +- Code(Counter(2)) at (prev + 0, 20) to (start + 0, 25) +- Branch { true: Expression(4, Sub), false: Expression(2, Sub) } at (prev + 0, 20) to (start + 0, 25) + true = (c1 - c0) + false = ((c0 + c2) - c1) +- Code(Expression(4, Sub)) at (prev + 0, 26) to (start + 3, 6) + = (c1 - c0) +- Code(Counter(0)) at (prev + 4, 1) to (start + 0, 2) +Highest counter ID seen: c2 Function name: while::while_op_or -Raw bytes (58): 0x[01, 01, 05, 07, 0d, 05, 09, 05, 0d, 05, 0d, 09, 0d, 08, 01, 29, 01, 01, 10, 05, 03, 09, 01, 12, 03, 02, 0b, 00, 10, 20, 09, 0f, 00, 0b, 00, 10, 0f, 00, 14, 00, 19, 20, 0d, 05, 00, 14, 00, 19, 13, 00, 1a, 03, 06, 05, 04, 01, 00, 02] +Raw bytes (56): 0x[01, 01, 04, 05, 09, 05, 0b, 01, 09, 05, 01, 08, 01, 29, 01, 01, 10, 01, 03, 09, 01, 12, 05, 02, 0b, 00, 10, 20, 09, 02, 00, 0b, 00, 10, 02, 00, 14, 00, 19, 20, 06, 01, 00, 14, 00, 19, 0e, 00, 1a, 03, 06, 01, 04, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 5 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(3) -- expression 1 operands: lhs = Counter(1), rhs = Counter(2) -- expression 2 operands: lhs = Counter(1), rhs = Counter(3) -- expression 3 operands: lhs = Counter(1), rhs = Counter(3) -- expression 4 operands: lhs = Counter(2), rhs = Counter(3) +Number of expressions: 4 +- expression 0 operands: lhs = Counter(1), rhs = Counter(2) +- expression 1 operands: lhs = Counter(1), rhs = Expression(2, Add) +- expression 2 operands: lhs = Counter(0), rhs = Counter(2) +- expression 3 operands: lhs = Counter(1), rhs = Counter(0) Number of file 0 mappings: 8 - Code(Counter(0)) at (prev + 41, 1) to (start + 1, 16) -- Code(Counter(1)) at (prev + 3, 9) to (start + 1, 18) -- Code(Expression(0, Add)) at (prev + 2, 11) to (start + 0, 16) - = ((c1 + c2) + c3) -- Branch { true: Counter(2), false: Expression(3, Add) } at (prev + 0, 11) to (start + 0, 16) +- Code(Counter(0)) at (prev + 3, 9) to (start + 1, 18) +- Code(Counter(1)) at (prev + 2, 11) to (start + 0, 16) +- Branch { true: Counter(2), false: Expression(0, Sub) } at (prev + 0, 11) to (start + 0, 16) true = c2 - false = (c1 + c3) -- Code(Expression(3, Add)) at (prev + 0, 20) to (start + 0, 25) - = (c1 + c3) -- Branch { true: Counter(3), false: Counter(1) } at (prev + 0, 20) to (start + 0, 25) - true = c3 - false = c1 -- Code(Expression(4, Add)) at (prev + 0, 26) to (start + 3, 6) - = (c2 + c3) -- Code(Counter(1)) at (prev + 4, 1) to (start + 0, 2) -Highest counter ID seen: c3 + false = (c1 - c2) +- Code(Expression(0, Sub)) at (prev + 0, 20) to (start + 0, 25) + = (c1 - c2) +- Branch { true: Expression(1, Sub), false: Counter(0) } at (prev + 0, 20) to (start + 0, 25) + true = (c1 - (c0 + c2)) + false = c0 +- Code(Expression(3, Sub)) at (prev + 0, 26) to (start + 3, 6) + = (c1 - c0) +- Code(Counter(0)) at (prev + 4, 1) to (start + 0, 2) +Highest counter ID seen: c2 diff --git a/tests/coverage/continue.cov-map b/tests/coverage/continue.cov-map index eb968fbb747ab..d926741cbcb3c 100644 --- a/tests/coverage/continue.cov-map +++ b/tests/coverage/continue.cov-map @@ -1,80 +1,75 @@ Function name: continue::main -Raw bytes (210): 0x[01, 01, 1c, 07, 09, 01, 05, 03, 0d, 1f, 15, 0d, 11, 1b, 19, 1f, 15, 0d, 11, 33, 21, 19, 1d, 2f, 25, 33, 21, 19, 1d, 47, 2d, 25, 29, 43, 31, 47, 2d, 25, 29, 5b, 39, 31, 35, 57, 3d, 5b, 39, 31, 35, 35, 39, 3d, 41, 6b, 45, 3d, 41, 3d, 45, 1e, 01, 03, 01, 03, 12, 03, 04, 0e, 00, 13, 0a, 01, 0f, 00, 16, 05, 02, 11, 00, 19, 09, 02, 12, 04, 0e, 1b, 06, 0e, 00, 13, 16, 01, 0f, 00, 16, 15, 01, 16, 02, 0e, 11, 04, 11, 00, 19, 15, 03, 09, 00, 0e, 2f, 02, 0e, 00, 13, 2a, 01, 0f, 00, 16, 1d, 01, 15, 02, 0e, 21, 04, 11, 00, 19, 1d, 03, 09, 00, 0e, 43, 02, 0e, 00, 13, 3e, 01, 0c, 00, 13, 29, 01, 0d, 00, 15, 2d, 01, 0a, 01, 0e, 57, 03, 0e, 00, 13, 52, 01, 0f, 00, 16, 39, 01, 16, 02, 0e, 35, 03, 12, 02, 0e, 5f, 04, 09, 00, 0e, 6b, 02, 0e, 00, 13, 66, 01, 0f, 00, 16, 41, 01, 16, 02, 0e, 6e, 04, 11, 00, 16, 41, 03, 09, 00, 0e, 3d, 02, 0d, 01, 02] +Raw bytes (198): 0x[01, 01, 16, 05, 01, 05, 0b, 01, 09, 0d, 01, 0d, 1f, 01, 11, 0d, 1f, 01, 11, 15, 01, 15, 2b, 01, 19, 1d, 01, 1d, 37, 01, 21, 25, 01, 25, 43, 01, 29, 25, 01, 2d, 01, 53, 2d, 01, 31, 2d, 01, 1e, 01, 03, 01, 03, 12, 05, 04, 0e, 00, 13, 02, 01, 0f, 00, 16, 09, 02, 11, 00, 19, 06, 02, 12, 04, 0e, 0d, 06, 0e, 00, 13, 0e, 01, 0f, 00, 16, 1a, 01, 16, 02, 0e, 11, 04, 11, 00, 19, 1a, 03, 09, 00, 0e, 15, 02, 0e, 00, 13, 22, 01, 0f, 00, 16, 19, 01, 15, 02, 0e, 26, 04, 11, 00, 19, 19, 03, 09, 00, 0e, 1d, 02, 0e, 00, 13, 2e, 01, 0c, 00, 13, 21, 01, 0d, 00, 15, 32, 01, 0a, 01, 0e, 25, 03, 0e, 00, 13, 46, 01, 0f, 00, 16, 3e, 01, 16, 02, 0e, 29, 03, 12, 02, 0e, 46, 04, 09, 00, 0e, 2d, 02, 0e, 00, 13, 31, 01, 0f, 00, 16, 56, 01, 16, 02, 0e, 4e, 04, 11, 00, 16, 56, 03, 09, 00, 0e, 01, 02, 0d, 01, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 28 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(2) -- expression 1 operands: lhs = Counter(0), rhs = Counter(1) -- expression 2 operands: lhs = Expression(0, Add), rhs = Counter(3) -- expression 3 operands: lhs = Expression(7, Add), rhs = Counter(5) -- expression 4 operands: lhs = Counter(3), rhs = Counter(4) -- expression 5 operands: lhs = Expression(6, Add), rhs = Counter(6) -- expression 6 operands: lhs = Expression(7, Add), rhs = Counter(5) -- expression 7 operands: lhs = Counter(3), rhs = Counter(4) -- expression 8 operands: lhs = Expression(12, Add), rhs = Counter(8) -- expression 9 operands: lhs = Counter(6), rhs = Counter(7) -- expression 10 operands: lhs = Expression(11, Add), rhs = Counter(9) -- expression 11 operands: lhs = Expression(12, Add), rhs = Counter(8) -- expression 12 operands: lhs = Counter(6), rhs = Counter(7) -- expression 13 operands: lhs = Expression(17, Add), rhs = Counter(11) -- expression 14 operands: lhs = Counter(9), rhs = Counter(10) -- expression 15 operands: lhs = Expression(16, Add), rhs = Counter(12) -- expression 16 operands: lhs = Expression(17, Add), rhs = Counter(11) -- expression 17 operands: lhs = Counter(9), rhs = Counter(10) -- expression 18 operands: lhs = Expression(22, Add), rhs = Counter(14) -- expression 19 operands: lhs = Counter(12), rhs = Counter(13) -- expression 20 operands: lhs = Expression(21, Add), rhs = Counter(15) -- expression 21 operands: lhs = Expression(22, Add), rhs = Counter(14) -- expression 22 operands: lhs = Counter(12), rhs = Counter(13) -- expression 23 operands: lhs = Counter(13), rhs = Counter(14) -- expression 24 operands: lhs = Counter(15), rhs = Counter(16) -- expression 25 operands: lhs = Expression(26, Add), rhs = Counter(17) -- expression 26 operands: lhs = Counter(15), rhs = Counter(16) -- expression 27 operands: lhs = Counter(15), rhs = Counter(17) +Number of expressions: 22 +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) +- expression 1 operands: lhs = Counter(1), rhs = Expression(2, Add) +- expression 2 operands: lhs = Counter(0), rhs = Counter(2) +- expression 3 operands: lhs = Counter(3), rhs = Counter(0) +- expression 4 operands: lhs = Counter(3), rhs = Expression(7, Add) +- expression 5 operands: lhs = Counter(0), rhs = Counter(4) +- expression 6 operands: lhs = Counter(3), rhs = Expression(7, Add) +- expression 7 operands: lhs = Counter(0), rhs = Counter(4) +- expression 8 operands: lhs = Counter(5), rhs = Counter(0) +- expression 9 operands: lhs = Counter(5), rhs = Expression(10, Add) +- expression 10 operands: lhs = Counter(0), rhs = Counter(6) +- expression 11 operands: lhs = Counter(7), rhs = Counter(0) +- expression 12 operands: lhs = Counter(7), rhs = Expression(13, Add) +- expression 13 operands: lhs = Counter(0), rhs = Counter(8) +- expression 14 operands: lhs = Counter(9), rhs = Counter(0) +- expression 15 operands: lhs = Counter(9), rhs = Expression(16, Add) +- expression 16 operands: lhs = Counter(0), rhs = Counter(10) +- expression 17 operands: lhs = Counter(9), rhs = Counter(0) +- expression 18 operands: lhs = Counter(11), rhs = Counter(0) +- expression 19 operands: lhs = Expression(20, Add), rhs = Counter(11) +- expression 20 operands: lhs = Counter(0), rhs = Counter(12) +- expression 21 operands: lhs = Counter(11), rhs = Counter(0) Number of file 0 mappings: 30 - Code(Counter(0)) at (prev + 3, 1) to (start + 3, 18) -- Code(Expression(0, Add)) at (prev + 4, 14) to (start + 0, 19) - = ((c0 + c1) + c2) -- Code(Expression(2, Sub)) at (prev + 1, 15) to (start + 0, 22) - = (((c0 + c1) + c2) - c3) -- Code(Counter(1)) at (prev + 2, 17) to (start + 0, 25) -- Code(Counter(2)) at (prev + 2, 18) to (start + 4, 14) -- Code(Expression(6, Add)) at (prev + 6, 14) to (start + 0, 19) - = ((c3 + c4) + c5) -- Code(Expression(5, Sub)) at (prev + 1, 15) to (start + 0, 22) - = (((c3 + c4) + c5) - c6) -- Code(Counter(5)) at (prev + 1, 22) to (start + 2, 14) +- Code(Counter(1)) at (prev + 4, 14) to (start + 0, 19) +- Code(Expression(0, Sub)) at (prev + 1, 15) to (start + 0, 22) + = (c1 - c0) +- Code(Counter(2)) at (prev + 2, 17) to (start + 0, 25) +- Code(Expression(1, Sub)) at (prev + 2, 18) to (start + 4, 14) + = (c1 - (c0 + c2)) +- Code(Counter(3)) at (prev + 6, 14) to (start + 0, 19) +- Code(Expression(3, Sub)) at (prev + 1, 15) to (start + 0, 22) + = (c3 - c0) +- Code(Expression(6, Sub)) at (prev + 1, 22) to (start + 2, 14) + = (c3 - (c0 + c4)) - Code(Counter(4)) at (prev + 4, 17) to (start + 0, 25) -- Code(Counter(5)) at (prev + 3, 9) to (start + 0, 14) -- Code(Expression(11, Add)) at (prev + 2, 14) to (start + 0, 19) - = ((c6 + c7) + c8) -- Code(Expression(10, Sub)) at (prev + 1, 15) to (start + 0, 22) - = (((c6 + c7) + c8) - c9) -- Code(Counter(7)) at (prev + 1, 21) to (start + 2, 14) -- Code(Counter(8)) at (prev + 4, 17) to (start + 0, 25) -- Code(Counter(7)) at (prev + 3, 9) to (start + 0, 14) -- Code(Expression(16, Add)) at (prev + 2, 14) to (start + 0, 19) - = ((c9 + c10) + c11) -- Code(Expression(15, Sub)) at (prev + 1, 12) to (start + 0, 19) - = (((c9 + c10) + c11) - c12) -- Code(Counter(10)) at (prev + 1, 13) to (start + 0, 21) -- Code(Counter(11)) at (prev + 1, 10) to (start + 1, 14) -- Code(Expression(21, Add)) at (prev + 3, 14) to (start + 0, 19) - = ((c12 + c13) + c14) -- Code(Expression(20, Sub)) at (prev + 1, 15) to (start + 0, 22) - = (((c12 + c13) + c14) - c15) -- Code(Counter(14)) at (prev + 1, 22) to (start + 2, 14) -- Code(Counter(13)) at (prev + 3, 18) to (start + 2, 14) -- Code(Expression(23, Add)) at (prev + 4, 9) to (start + 0, 14) - = (c13 + c14) -- Code(Expression(26, Add)) at (prev + 2, 14) to (start + 0, 19) - = (c15 + c16) -- Code(Expression(25, Sub)) at (prev + 1, 15) to (start + 0, 22) - = ((c15 + c16) - c17) -- Code(Counter(16)) at (prev + 1, 22) to (start + 2, 14) -- Code(Expression(27, Sub)) at (prev + 4, 17) to (start + 0, 22) - = (c15 - c17) -- Code(Counter(16)) at (prev + 3, 9) to (start + 0, 14) -- Code(Counter(15)) at (prev + 2, 13) to (start + 1, 2) -Highest counter ID seen: c16 +- Code(Expression(6, Sub)) at (prev + 3, 9) to (start + 0, 14) + = (c3 - (c0 + c4)) +- Code(Counter(5)) at (prev + 2, 14) to (start + 0, 19) +- Code(Expression(8, Sub)) at (prev + 1, 15) to (start + 0, 22) + = (c5 - c0) +- Code(Counter(6)) at (prev + 1, 21) to (start + 2, 14) +- Code(Expression(9, Sub)) at (prev + 4, 17) to (start + 0, 25) + = (c5 - (c0 + c6)) +- Code(Counter(6)) at (prev + 3, 9) to (start + 0, 14) +- Code(Counter(7)) at (prev + 2, 14) to (start + 0, 19) +- Code(Expression(11, Sub)) at (prev + 1, 12) to (start + 0, 19) + = (c7 - c0) +- Code(Counter(8)) at (prev + 1, 13) to (start + 0, 21) +- Code(Expression(12, Sub)) at (prev + 1, 10) to (start + 1, 14) + = (c7 - (c0 + c8)) +- Code(Counter(9)) at (prev + 3, 14) to (start + 0, 19) +- Code(Expression(17, Sub)) at (prev + 1, 15) to (start + 0, 22) + = (c9 - c0) +- Code(Expression(15, Sub)) at (prev + 1, 22) to (start + 2, 14) + = (c9 - (c0 + c10)) +- Code(Counter(10)) at (prev + 3, 18) to (start + 2, 14) +- Code(Expression(17, Sub)) at (prev + 4, 9) to (start + 0, 14) + = (c9 - c0) +- Code(Counter(11)) at (prev + 2, 14) to (start + 0, 19) +- Code(Counter(12)) at (prev + 1, 15) to (start + 0, 22) +- Code(Expression(21, Sub)) at (prev + 1, 22) to (start + 2, 14) + = (c11 - c0) +- Code(Expression(19, Sub)) at (prev + 4, 17) to (start + 0, 22) + = ((c0 + c12) - c11) +- Code(Expression(21, Sub)) at (prev + 3, 9) to (start + 0, 14) + = (c11 - c0) +- Code(Counter(0)) at (prev + 2, 13) to (start + 1, 2) +Highest counter ID seen: c12 diff --git a/tests/coverage/coroutine.cov-map b/tests/coverage/coroutine.cov-map index 7457a528a8694..c6f2d415056d9 100644 --- a/tests/coverage/coroutine.cov-map +++ b/tests/coverage/coroutine.cov-map @@ -13,28 +13,25 @@ Number of file 0 mappings: 4 Highest counter ID seen: c1 Function name: coroutine::main -Raw bytes (57): 0x[01, 01, 04, 07, 0d, 05, 09, 11, 19, 11, 15, 09, 01, 13, 01, 02, 16, 01, 08, 0b, 00, 2e, 11, 01, 2b, 00, 2d, 03, 01, 0e, 00, 35, 11, 02, 0b, 00, 2e, 0a, 01, 22, 00, 27, 15, 00, 2c, 00, 2e, 0e, 01, 0e, 00, 35, 15, 02, 01, 00, 02] +Raw bytes (53): 0x[01, 01, 02, 01, 05, 05, 09, 09, 01, 13, 01, 02, 16, 01, 08, 0b, 00, 2e, 05, 01, 2b, 00, 2d, 02, 01, 0e, 00, 35, 05, 02, 0b, 00, 2e, 0d, 01, 22, 00, 27, 09, 00, 2c, 00, 2e, 06, 01, 0e, 00, 35, 09, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 4 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(3) +Number of expressions: 2 +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) - expression 1 operands: lhs = Counter(1), rhs = Counter(2) -- expression 2 operands: lhs = Counter(4), rhs = Counter(6) -- expression 3 operands: lhs = Counter(4), rhs = Counter(5) Number of file 0 mappings: 9 - Code(Counter(0)) at (prev + 19, 1) to (start + 2, 22) - Code(Counter(0)) at (prev + 8, 11) to (start + 0, 46) -- Code(Counter(4)) at (prev + 1, 43) to (start + 0, 45) -- Code(Expression(0, Add)) at (prev + 1, 14) to (start + 0, 53) - = ((c1 + c2) + c3) -- Code(Counter(4)) at (prev + 2, 11) to (start + 0, 46) -- Code(Expression(2, Sub)) at (prev + 1, 34) to (start + 0, 39) - = (c4 - c6) -- Code(Counter(5)) at (prev + 0, 44) to (start + 0, 46) -- Code(Expression(3, Sub)) at (prev + 1, 14) to (start + 0, 53) - = (c4 - c5) -- Code(Counter(5)) at (prev + 2, 1) to (start + 0, 2) -Highest counter ID seen: c5 +- Code(Counter(1)) at (prev + 1, 43) to (start + 0, 45) +- Code(Expression(0, Sub)) at (prev + 1, 14) to (start + 0, 53) + = (c0 - c1) +- Code(Counter(1)) at (prev + 2, 11) to (start + 0, 46) +- Code(Counter(3)) at (prev + 1, 34) to (start + 0, 39) +- Code(Counter(2)) at (prev + 0, 44) to (start + 0, 46) +- Code(Expression(1, Sub)) at (prev + 1, 14) to (start + 0, 53) + = (c1 - c2) +- Code(Counter(2)) at (prev + 2, 1) to (start + 0, 2) +Highest counter ID seen: c3 Function name: coroutine::main::{closure#0} Raw bytes (14): 0x[01, 01, 00, 02, 01, 16, 08, 01, 1f, 05, 02, 10, 01, 06] diff --git a/tests/coverage/inline.cov-map b/tests/coverage/inline.cov-map index 39ba2b2d99bfb..a569ad53cbc24 100644 --- a/tests/coverage/inline.cov-map +++ b/tests/coverage/inline.cov-map @@ -1,15 +1,16 @@ Function name: inline::display:: -Raw bytes (31): 0x[01, 01, 01, 01, 05, 05, 01, 29, 01, 00, 22, 05, 01, 09, 00, 0a, 03, 00, 0e, 00, 10, 05, 00, 11, 02, 06, 01, 03, 05, 01, 02] +Raw bytes (31): 0x[01, 01, 01, 05, 01, 05, 01, 29, 01, 00, 22, 02, 01, 09, 00, 0a, 05, 00, 0e, 00, 10, 02, 00, 11, 02, 06, 01, 03, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 -- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) Number of file 0 mappings: 5 - Code(Counter(0)) at (prev + 41, 1) to (start + 0, 34) -- Code(Counter(1)) at (prev + 1, 9) to (start + 0, 10) -- Code(Expression(0, Add)) at (prev + 0, 14) to (start + 0, 16) - = (c0 + c1) -- Code(Counter(1)) at (prev + 0, 17) to (start + 2, 6) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 10) + = (c1 - c0) +- Code(Counter(1)) at (prev + 0, 14) to (start + 0, 16) +- Code(Expression(0, Sub)) at (prev + 0, 17) to (start + 2, 6) + = (c1 - c0) - Code(Counter(0)) at (prev + 3, 5) to (start + 1, 2) Highest counter ID seen: c1 @@ -41,28 +42,29 @@ Number of file 0 mappings: 1 Highest counter ID seen: c0 Function name: inline::permutate:: -Raw bytes (54): 0x[01, 01, 05, 01, 05, 01, 0b, 05, 0d, 13, 0d, 01, 09, 08, 01, 0f, 01, 02, 0e, 05, 02, 0f, 02, 06, 02, 02, 0f, 00, 14, 11, 01, 0d, 00, 0e, 0d, 00, 12, 00, 16, 11, 00, 17, 04, 0a, 06, 05, 0c, 02, 06, 0e, 03, 01, 00, 02] +Raw bytes (54): 0x[01, 01, 05, 01, 05, 0d, 09, 0d, 09, 01, 13, 05, 09, 08, 01, 0f, 01, 02, 0e, 05, 02, 0f, 02, 06, 02, 02, 0f, 00, 14, 0a, 01, 0d, 00, 0e, 09, 00, 12, 00, 16, 0a, 00, 17, 04, 0a, 0e, 05, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 5 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) -- expression 1 operands: lhs = Counter(0), rhs = Expression(2, Add) -- expression 2 operands: lhs = Counter(1), rhs = Counter(3) -- expression 3 operands: lhs = Expression(4, Add), rhs = Counter(3) -- expression 4 operands: lhs = Counter(0), rhs = Counter(2) +- expression 1 operands: lhs = Counter(3), rhs = Counter(2) +- expression 2 operands: lhs = Counter(3), rhs = Counter(2) +- expression 3 operands: lhs = Counter(0), rhs = Expression(4, Add) +- expression 4 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 8 - Code(Counter(0)) at (prev + 15, 1) to (start + 2, 14) - Code(Counter(1)) at (prev + 2, 15) to (start + 2, 6) - Code(Expression(0, Sub)) at (prev + 2, 15) to (start + 0, 20) = (c0 - c1) -- Code(Counter(4)) at (prev + 1, 13) to (start + 0, 14) -- Code(Counter(3)) at (prev + 0, 18) to (start + 0, 22) -- Code(Counter(4)) at (prev + 0, 23) to (start + 4, 10) -- Code(Expression(1, Sub)) at (prev + 5, 12) to (start + 2, 6) - = (c0 - (c1 + c3)) -- Code(Expression(3, Sub)) at (prev + 3, 1) to (start + 0, 2) - = ((c0 + c2) - c3) -Highest counter ID seen: c4 +- Code(Expression(2, Sub)) at (prev + 1, 13) to (start + 0, 14) + = (c3 - c2) +- Code(Counter(2)) at (prev + 0, 18) to (start + 0, 22) +- Code(Expression(2, Sub)) at (prev + 0, 23) to (start + 4, 10) + = (c3 - c2) +- Code(Expression(3, Sub)) at (prev + 5, 12) to (start + 2, 6) + = (c0 - (c1 + c2)) +- Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) +Highest counter ID seen: c2 Function name: inline::permutations:: Raw bytes (9): 0x[01, 01, 00, 01, 01, 0a, 01, 03, 02] diff --git a/tests/coverage/issue-84561.cov-map b/tests/coverage/issue-84561.cov-map index a2ab558f9608f..efb9d43bf5bea 100644 --- a/tests/coverage/issue-84561.cov-map +++ b/tests/coverage/issue-84561.cov-map @@ -59,147 +59,109 @@ Number of file 0 mappings: 1 Highest counter ID seen: c0 Function name: issue_84561::test3 -Raw bytes (409): 0x[01, 01, 3b, 05, 09, 0d, 11, 15, 19, 15, 1f, 19, 1d, 15, 1b, 1f, 21, 19, 1d, 25, 29, 21, 25, 2d, 31, 21, 33, 25, 2d, 35, 39, 3d, 41, 3d, 43, 41, 45, 5f, 4d, 45, 49, 5f, 67, 45, 49, 4d, 51, 5f, 63, 45, 49, 67, 59, 4d, 51, 97, 01, 55, 51, 59, 97, 01, 55, 51, 59, 97, 01, 83, 01, 51, 59, 55, 5d, 97, 01, 9f, 01, 51, 59, 55, 61, 97, 01, 9b, 01, 51, 59, 9f, 01, 65, 55, 61, db, 01, e7, 01, 69, 71, 6d, 75, 69, 6d, 69, 6d, 69, bb, 01, 6d, 00, 69, e7, 01, 6d, 75, db, 01, e3, 01, 69, 71, e7, 01, 79, 6d, 75, db, 01, df, 01, 69, 71, e3, 01, 7d, e7, 01, 79, 6d, 75, 7d, 81, 01, 33, 01, 08, 01, 03, 1c, 05, 04, 09, 01, 1c, 02, 02, 05, 04, 1f, 0d, 05, 05, 00, 1f, 06, 01, 05, 00, 1f, 15, 01, 09, 01, 1c, 0a, 02, 05, 00, 1f, 0e, 01, 05, 00, 0f, 16, 00, 20, 00, 30, 21, 01, 05, 03, 0f, 25, 03, 20, 00, 30, 29, 00, 33, 00, 41, 22, 00, 4b, 00, 5a, 26, 01, 05, 00, 0f, 2d, 05, 09, 03, 10, 31, 05, 0d, 00, 1b, 2a, 02, 0d, 00, 1c, 2e, 04, 09, 05, 06, 35, 06, 05, 03, 06, 36, 04, 05, 03, 06, 3d, 04, 09, 04, 06, 3a, 05, 08, 00, 0f, 45, 01, 09, 03, 0a, 3e, 05, 09, 03, 0a, 46, 05, 08, 00, 0f, 51, 01, 09, 00, 13, 55, 03, 0d, 00, 1d, 4e, 03, 09, 00, 13, 5a, 03, 0d, 00, 1d, 72, 03, 05, 00, 0f, 72, 01, 0c, 00, 13, 5d, 01, 0d, 00, 13, 7a, 02, 0d, 00, 13, 86, 01, 04, 05, 02, 13, 65, 03, 0d, 00, 13, 92, 01, 02, 0d, 00, 13, a2, 01, 03, 05, 00, 0f, 69, 01, 0c, 00, 13, 6d, 01, 0d, 03, 0e, 71, 04, 0d, 00, 13, b2, 01, 02, 0d, 00, 17, b2, 01, 01, 14, 00, 1b, 00, 01, 15, 00, 1b, b6, 01, 02, 15, 00, 1b, be, 01, 04, 0d, 00, 13, 79, 03, 09, 00, 19, c6, 01, 02, 05, 00, 0f, d6, 01, 03, 09, 00, 22, 7d, 02, 05, 00, 0f, ea, 01, 03, 09, 00, 2c, 81, 01, 02, 01, 00, 02] +Raw bytes (317): 0x[01, 01, 1c, 1d, 21, 25, 29, 21, 25, 2d, 31, 21, 17, 25, 2d, 41, 45, 49, 4d, 51, 55, 33, 51, 49, 4d, 33, 37, 49, 4d, 51, 59, 55, 59, 55, 59, 47, 5d, 55, 59, 61, 65, 71, 75, 69, 6d, 69, 6d, 69, 5f, 6d, 00, 67, 79, 71, 75, 79, 7d, 7d, 81, 01, 33, 01, 08, 01, 03, 1c, 05, 04, 09, 01, 1c, 09, 02, 05, 04, 1f, 0d, 05, 05, 00, 1f, 11, 01, 05, 00, 1f, 15, 01, 09, 01, 1c, 19, 02, 05, 00, 1f, 1d, 01, 05, 00, 0f, 02, 00, 20, 00, 30, 21, 01, 05, 03, 0f, 25, 03, 20, 00, 30, 29, 00, 33, 00, 41, 06, 00, 4b, 00, 5a, 0a, 01, 05, 00, 0f, 2d, 05, 09, 03, 10, 31, 05, 0d, 00, 1b, 0e, 02, 0d, 00, 1c, 12, 04, 09, 05, 06, 35, 06, 05, 03, 06, 39, 04, 05, 03, 06, 3d, 04, 09, 04, 06, 41, 05, 08, 00, 0f, 45, 01, 09, 03, 0a, 1a, 05, 09, 03, 0a, 33, 05, 08, 00, 0f, 51, 01, 09, 00, 13, 22, 03, 0d, 00, 1d, 26, 03, 09, 00, 13, 2e, 03, 0d, 00, 1d, 47, 03, 05, 00, 0f, 47, 01, 0c, 00, 13, 5d, 01, 0d, 00, 13, 42, 02, 0d, 00, 13, 61, 04, 05, 02, 13, 65, 03, 0d, 00, 13, 4a, 02, 0d, 00, 13, 67, 03, 05, 00, 0f, 69, 01, 0c, 00, 13, 6d, 01, 0d, 03, 0e, 71, 04, 0d, 00, 13, 56, 02, 0d, 00, 17, 56, 01, 14, 00, 1b, 00, 01, 15, 00, 1b, 5a, 02, 15, 00, 1b, 75, 04, 0d, 00, 13, 62, 03, 09, 00, 19, 79, 02, 05, 00, 0f, 6a, 03, 09, 00, 22, 7d, 02, 05, 00, 0f, 6e, 03, 09, 00, 2c, 81, 01, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 59 -- expression 0 operands: lhs = Counter(1), rhs = Counter(2) -- expression 1 operands: lhs = Counter(3), rhs = Counter(4) -- expression 2 operands: lhs = Counter(5), rhs = Counter(6) -- expression 3 operands: lhs = Counter(5), rhs = Expression(7, Add) -- expression 4 operands: lhs = Counter(6), rhs = Counter(7) -- expression 5 operands: lhs = Counter(5), rhs = Expression(6, Add) -- expression 6 operands: lhs = Expression(7, Add), rhs = Counter(8) -- expression 7 operands: lhs = Counter(6), rhs = Counter(7) -- expression 8 operands: lhs = Counter(9), rhs = Counter(10) -- expression 9 operands: lhs = Counter(8), rhs = Counter(9) -- expression 10 operands: lhs = Counter(11), rhs = Counter(12) -- expression 11 operands: lhs = Counter(8), rhs = Expression(12, Add) -- expression 12 operands: lhs = Counter(9), rhs = Counter(11) -- expression 13 operands: lhs = Counter(13), rhs = Counter(14) -- expression 14 operands: lhs = Counter(15), rhs = Counter(16) -- expression 15 operands: lhs = Counter(15), rhs = Expression(16, Add) -- expression 16 operands: lhs = Counter(16), rhs = Counter(17) -- expression 17 operands: lhs = Expression(23, Add), rhs = Counter(19) -- expression 18 operands: lhs = Counter(17), rhs = Counter(18) -- expression 19 operands: lhs = Expression(23, Add), rhs = Expression(25, Add) -- expression 20 operands: lhs = Counter(17), rhs = Counter(18) -- expression 21 operands: lhs = Counter(19), rhs = Counter(20) -- expression 22 operands: lhs = Expression(23, Add), rhs = Expression(24, Add) -- expression 23 operands: lhs = Counter(17), rhs = Counter(18) -- expression 24 operands: lhs = Expression(25, Add), rhs = Counter(22) -- expression 25 operands: lhs = Counter(19), rhs = Counter(20) -- expression 26 operands: lhs = Expression(37, Add), rhs = Counter(21) -- expression 27 operands: lhs = Counter(20), rhs = Counter(22) -- expression 28 operands: lhs = Expression(37, Add), rhs = Counter(21) -- expression 29 operands: lhs = Counter(20), rhs = Counter(22) -- expression 30 operands: lhs = Expression(37, Add), rhs = Expression(32, Add) -- expression 31 operands: lhs = Counter(20), rhs = Counter(22) -- expression 32 operands: lhs = Counter(21), rhs = Counter(23) -- expression 33 operands: lhs = Expression(37, Add), rhs = Expression(39, Add) -- expression 34 operands: lhs = Counter(20), rhs = Counter(22) -- expression 35 operands: lhs = Counter(21), rhs = Counter(24) -- expression 36 operands: lhs = Expression(37, Add), rhs = Expression(38, Add) -- expression 37 operands: lhs = Counter(20), rhs = Counter(22) -- expression 38 operands: lhs = Expression(39, Add), rhs = Counter(25) -- expression 39 operands: lhs = Counter(21), rhs = Counter(24) -- expression 40 operands: lhs = Expression(54, Add), rhs = Expression(57, Add) -- expression 41 operands: lhs = Counter(26), rhs = Counter(28) -- expression 42 operands: lhs = Counter(27), rhs = Counter(29) -- expression 43 operands: lhs = Counter(26), rhs = Counter(27) -- expression 44 operands: lhs = Counter(26), rhs = Counter(27) -- expression 45 operands: lhs = Counter(26), rhs = Expression(46, Add) -- expression 46 operands: lhs = Counter(27), rhs = Zero -- expression 47 operands: lhs = Counter(26), rhs = Expression(57, Add) -- expression 48 operands: lhs = Counter(27), rhs = Counter(29) -- expression 49 operands: lhs = Expression(54, Add), rhs = Expression(56, Add) -- expression 50 operands: lhs = Counter(26), rhs = Counter(28) -- expression 51 operands: lhs = Expression(57, Add), rhs = Counter(30) -- expression 52 operands: lhs = Counter(27), rhs = Counter(29) -- expression 53 operands: lhs = Expression(54, Add), rhs = Expression(55, Add) -- expression 54 operands: lhs = Counter(26), rhs = Counter(28) -- expression 55 operands: lhs = Expression(56, Add), rhs = Counter(31) -- expression 56 operands: lhs = Expression(57, Add), rhs = Counter(30) -- expression 57 operands: lhs = Counter(27), rhs = Counter(29) -- expression 58 operands: lhs = Counter(31), rhs = Counter(32) +Number of expressions: 28 +- expression 0 operands: lhs = Counter(7), rhs = Counter(8) +- expression 1 operands: lhs = Counter(9), rhs = Counter(10) +- expression 2 operands: lhs = Counter(8), rhs = Counter(9) +- expression 3 operands: lhs = Counter(11), rhs = Counter(12) +- expression 4 operands: lhs = Counter(8), rhs = Expression(5, Add) +- expression 5 operands: lhs = Counter(9), rhs = Counter(11) +- expression 6 operands: lhs = Counter(16), rhs = Counter(17) +- expression 7 operands: lhs = Counter(18), rhs = Counter(19) +- expression 8 operands: lhs = Counter(20), rhs = Counter(21) +- expression 9 operands: lhs = Expression(12, Add), rhs = Counter(20) +- expression 10 operands: lhs = Counter(18), rhs = Counter(19) +- expression 11 operands: lhs = Expression(12, Add), rhs = Expression(13, Add) +- expression 12 operands: lhs = Counter(18), rhs = Counter(19) +- expression 13 operands: lhs = Counter(20), rhs = Counter(22) +- expression 14 operands: lhs = Counter(21), rhs = Counter(22) +- expression 15 operands: lhs = Counter(21), rhs = Counter(22) +- expression 16 operands: lhs = Expression(17, Add), rhs = Counter(23) +- expression 17 operands: lhs = Counter(21), rhs = Counter(22) +- expression 18 operands: lhs = Counter(24), rhs = Counter(25) +- expression 19 operands: lhs = Counter(28), rhs = Counter(29) +- expression 20 operands: lhs = Counter(26), rhs = Counter(27) +- expression 21 operands: lhs = Counter(26), rhs = Counter(27) +- expression 22 operands: lhs = Counter(26), rhs = Expression(23, Add) +- expression 23 operands: lhs = Counter(27), rhs = Zero +- expression 24 operands: lhs = Expression(25, Add), rhs = Counter(30) +- expression 25 operands: lhs = Counter(28), rhs = Counter(29) +- expression 26 operands: lhs = Counter(30), rhs = Counter(31) +- expression 27 operands: lhs = Counter(31), rhs = Counter(32) Number of file 0 mappings: 51 - Code(Counter(0)) at (prev + 8, 1) to (start + 3, 28) - Code(Counter(1)) at (prev + 4, 9) to (start + 1, 28) -- Code(Expression(0, Sub)) at (prev + 2, 5) to (start + 4, 31) - = (c1 - c2) +- Code(Counter(2)) at (prev + 2, 5) to (start + 4, 31) - Code(Counter(3)) at (prev + 5, 5) to (start + 0, 31) -- Code(Expression(1, Sub)) at (prev + 1, 5) to (start + 0, 31) - = (c3 - c4) +- Code(Counter(4)) at (prev + 1, 5) to (start + 0, 31) - Code(Counter(5)) at (prev + 1, 9) to (start + 1, 28) -- Code(Expression(2, Sub)) at (prev + 2, 5) to (start + 0, 31) - = (c5 - c6) -- Code(Expression(3, Sub)) at (prev + 1, 5) to (start + 0, 15) - = (c5 - (c6 + c7)) -- Code(Expression(5, Sub)) at (prev + 0, 32) to (start + 0, 48) - = (c5 - ((c6 + c7) + c8)) +- Code(Counter(6)) at (prev + 2, 5) to (start + 0, 31) +- Code(Counter(7)) at (prev + 1, 5) to (start + 0, 15) +- Code(Expression(0, Sub)) at (prev + 0, 32) to (start + 0, 48) + = (c7 - c8) - Code(Counter(8)) at (prev + 1, 5) to (start + 3, 15) - Code(Counter(9)) at (prev + 3, 32) to (start + 0, 48) - Code(Counter(10)) at (prev + 0, 51) to (start + 0, 65) -- Code(Expression(8, Sub)) at (prev + 0, 75) to (start + 0, 90) +- Code(Expression(1, Sub)) at (prev + 0, 75) to (start + 0, 90) = (c9 - c10) -- Code(Expression(9, Sub)) at (prev + 1, 5) to (start + 0, 15) +- Code(Expression(2, Sub)) at (prev + 1, 5) to (start + 0, 15) = (c8 - c9) - Code(Counter(11)) at (prev + 5, 9) to (start + 3, 16) - Code(Counter(12)) at (prev + 5, 13) to (start + 0, 27) -- Code(Expression(10, Sub)) at (prev + 2, 13) to (start + 0, 28) +- Code(Expression(3, Sub)) at (prev + 2, 13) to (start + 0, 28) = (c11 - c12) -- Code(Expression(11, Sub)) at (prev + 4, 9) to (start + 5, 6) +- Code(Expression(4, Sub)) at (prev + 4, 9) to (start + 5, 6) = (c8 - (c9 + c11)) - Code(Counter(13)) at (prev + 6, 5) to (start + 3, 6) -- Code(Expression(13, Sub)) at (prev + 4, 5) to (start + 3, 6) - = (c13 - c14) +- Code(Counter(14)) at (prev + 4, 5) to (start + 3, 6) - Code(Counter(15)) at (prev + 4, 9) to (start + 4, 6) -- Code(Expression(14, Sub)) at (prev + 5, 8) to (start + 0, 15) - = (c15 - c16) +- Code(Counter(16)) at (prev + 5, 8) to (start + 0, 15) - Code(Counter(17)) at (prev + 1, 9) to (start + 3, 10) -- Code(Expression(15, Sub)) at (prev + 5, 9) to (start + 3, 10) - = (c15 - (c16 + c17)) -- Code(Expression(17, Sub)) at (prev + 5, 8) to (start + 0, 15) - = ((c17 + c18) - c19) +- Code(Expression(6, Sub)) at (prev + 5, 9) to (start + 3, 10) + = (c16 - c17) +- Code(Expression(12, Add)) at (prev + 5, 8) to (start + 0, 15) + = (c18 + c19) - Code(Counter(20)) at (prev + 1, 9) to (start + 0, 19) -- Code(Counter(21)) at (prev + 3, 13) to (start + 0, 29) -- Code(Expression(19, Sub)) at (prev + 3, 9) to (start + 0, 19) - = ((c17 + c18) - (c19 + c20)) -- Code(Expression(22, Sub)) at (prev + 3, 13) to (start + 0, 29) - = ((c17 + c18) - ((c19 + c20) + c22)) -- Code(Expression(28, Sub)) at (prev + 3, 5) to (start + 0, 15) - = ((c20 + c22) - c21) -- Code(Expression(28, Sub)) at (prev + 1, 12) to (start + 0, 19) - = ((c20 + c22) - c21) +- Code(Expression(8, Sub)) at (prev + 3, 13) to (start + 0, 29) + = (c20 - c21) +- Code(Expression(9, Sub)) at (prev + 3, 9) to (start + 0, 19) + = ((c18 + c19) - c20) +- Code(Expression(11, Sub)) at (prev + 3, 13) to (start + 0, 29) + = ((c18 + c19) - (c20 + c22)) +- Code(Expression(17, Add)) at (prev + 3, 5) to (start + 0, 15) + = (c21 + c22) +- Code(Expression(17, Add)) at (prev + 1, 12) to (start + 0, 19) + = (c21 + c22) - Code(Counter(23)) at (prev + 1, 13) to (start + 0, 19) -- Code(Expression(30, Sub)) at (prev + 2, 13) to (start + 0, 19) - = ((c20 + c22) - (c21 + c23)) -- Code(Expression(33, Sub)) at (prev + 4, 5) to (start + 2, 19) - = ((c20 + c22) - (c21 + c24)) +- Code(Expression(16, Sub)) at (prev + 2, 13) to (start + 0, 19) + = ((c21 + c22) - c23) +- Code(Counter(24)) at (prev + 4, 5) to (start + 2, 19) - Code(Counter(25)) at (prev + 3, 13) to (start + 0, 19) -- Code(Expression(36, Sub)) at (prev + 2, 13) to (start + 0, 19) - = ((c20 + c22) - ((c21 + c24) + c25)) -- Code(Expression(40, Sub)) at (prev + 3, 5) to (start + 0, 15) - = ((c26 + c28) - (c27 + c29)) +- Code(Expression(18, Sub)) at (prev + 2, 13) to (start + 0, 19) + = (c24 - c25) +- Code(Expression(25, Add)) at (prev + 3, 5) to (start + 0, 15) + = (c28 + c29) - Code(Counter(26)) at (prev + 1, 12) to (start + 0, 19) - Code(Counter(27)) at (prev + 1, 13) to (start + 3, 14) - Code(Counter(28)) at (prev + 4, 13) to (start + 0, 19) -- Code(Expression(44, Sub)) at (prev + 2, 13) to (start + 0, 23) +- Code(Expression(21, Sub)) at (prev + 2, 13) to (start + 0, 23) = (c26 - c27) -- Code(Expression(44, Sub)) at (prev + 1, 20) to (start + 0, 27) +- Code(Expression(21, Sub)) at (prev + 1, 20) to (start + 0, 27) = (c26 - c27) - Code(Zero) at (prev + 1, 21) to (start + 0, 27) -- Code(Expression(45, Sub)) at (prev + 2, 21) to (start + 0, 27) +- Code(Expression(22, Sub)) at (prev + 2, 21) to (start + 0, 27) = (c26 - (c27 + Zero)) -- Code(Expression(47, Sub)) at (prev + 4, 13) to (start + 0, 19) - = (c26 - (c27 + c29)) -- Code(Counter(30)) at (prev + 3, 9) to (start + 0, 25) -- Code(Expression(49, Sub)) at (prev + 2, 5) to (start + 0, 15) - = ((c26 + c28) - ((c27 + c29) + c30)) -- Code(Expression(53, Sub)) at (prev + 3, 9) to (start + 0, 34) - = ((c26 + c28) - (((c27 + c29) + c30) + c31)) +- Code(Counter(29)) at (prev + 4, 13) to (start + 0, 19) +- Code(Expression(24, Sub)) at (prev + 3, 9) to (start + 0, 25) + = ((c28 + c29) - c30) +- Code(Counter(30)) at (prev + 2, 5) to (start + 0, 15) +- Code(Expression(26, Sub)) at (prev + 3, 9) to (start + 0, 34) + = (c30 - c31) - Code(Counter(31)) at (prev + 2, 5) to (start + 0, 15) -- Code(Expression(58, Sub)) at (prev + 3, 9) to (start + 0, 44) +- Code(Expression(27, Sub)) at (prev + 3, 9) to (start + 0, 44) = (c31 - c32) - Code(Counter(32)) at (prev + 2, 1) to (start + 0, 2) Highest counter ID seen: c32 diff --git a/tests/coverage/loop-break.cov-map b/tests/coverage/loop-break.cov-map index 0b4c42a43da0c..f13e82da15143 100644 --- a/tests/coverage/loop-break.cov-map +++ b/tests/coverage/loop-break.cov-map @@ -1,15 +1,15 @@ Function name: loop_break::main -Raw bytes (31): 0x[01, 01, 01, 01, 05, 05, 01, 03, 01, 00, 0b, 03, 02, 0c, 00, 27, 01, 01, 0d, 00, 12, 05, 01, 09, 00, 0a, 01, 02, 01, 00, 02] +Raw bytes (31): 0x[01, 01, 01, 05, 01, 05, 01, 03, 01, 00, 0b, 05, 02, 0c, 00, 27, 01, 01, 0d, 00, 12, 02, 01, 09, 00, 0a, 01, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 1 -- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) Number of file 0 mappings: 5 - Code(Counter(0)) at (prev + 3, 1) to (start + 0, 11) -- Code(Expression(0, Add)) at (prev + 2, 12) to (start + 0, 39) - = (c0 + c1) +- Code(Counter(1)) at (prev + 2, 12) to (start + 0, 39) - Code(Counter(0)) at (prev + 1, 13) to (start + 0, 18) -- Code(Counter(1)) at (prev + 1, 9) to (start + 0, 10) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 10) + = (c1 - c0) - Code(Counter(0)) at (prev + 2, 1) to (start + 0, 2) Highest counter ID seen: c1 diff --git a/tests/coverage/loops_branches.cov-map b/tests/coverage/loops_branches.cov-map index 0279a1a51579e..912141b6d5f36 100644 --- a/tests/coverage/loops_branches.cov-map +++ b/tests/coverage/loops_branches.cov-map @@ -1,32 +1,14 @@ Function name: ::fmt -Raw bytes (152): 0x[01, 01, 18, 05, 00, 27, 57, 53, 00, 01, 1d, 11, 19, 27, 11, 53, 00, 01, 1d, 27, 57, 53, 00, 01, 1d, 11, 19, 53, 57, 01, 1d, 11, 19, 53, 47, 01, 1d, 57, 00, 11, 19, 53, 57, 01, 1d, 11, 19, 5f, 19, 11, 15, 14, 01, 09, 05, 01, 10, 05, 02, 10, 00, 15, 00, 01, 17, 00, 1b, 00, 00, 1c, 00, 1e, 02, 01, 0d, 00, 0e, 05, 01, 0d, 00, 1e, 11, 00, 1e, 00, 1f, 00, 01, 10, 01, 0a, 22, 03, 0d, 00, 0e, 16, 00, 12, 00, 17, 22, 01, 10, 00, 14, 4e, 01, 14, 00, 19, 00, 01, 1b, 00, 1f, 00, 00, 20, 00, 22, 3e, 01, 11, 00, 12, 4e, 01, 11, 00, 22, 15, 00, 22, 00, 23, 00, 01, 14, 01, 0e, 19, 03, 09, 00, 0f, 5b, 01, 05, 00, 06] +Raw bytes (116): 0x[01, 01, 06, 05, 00, 1d, 00, 0f, 13, 01, 19, 11, 15, 15, 19, 14, 01, 09, 05, 01, 10, 05, 02, 10, 00, 15, 00, 01, 17, 00, 1b, 00, 00, 1c, 00, 1e, 02, 01, 0d, 00, 0e, 05, 01, 0d, 00, 1e, 11, 00, 1e, 00, 1f, 00, 01, 10, 01, 0a, 19, 03, 0d, 00, 0e, 15, 00, 12, 00, 17, 19, 01, 10, 00, 14, 1d, 01, 14, 00, 19, 00, 01, 1b, 00, 1f, 00, 00, 20, 00, 22, 06, 01, 11, 00, 12, 1d, 01, 11, 00, 22, 0a, 00, 22, 00, 23, 00, 01, 14, 01, 0e, 16, 03, 09, 00, 0f, 01, 01, 05, 00, 06] Number of files: 1 - file 0 => global file 1 -Number of expressions: 24 +Number of expressions: 6 - expression 0 operands: lhs = Counter(1), rhs = Zero -- expression 1 operands: lhs = Expression(9, Add), rhs = Expression(21, Add) -- expression 2 operands: lhs = Expression(20, Add), rhs = Zero -- expression 3 operands: lhs = Counter(0), rhs = Counter(7) -- expression 4 operands: lhs = Counter(4), rhs = Counter(6) -- expression 5 operands: lhs = Expression(9, Add), rhs = Counter(4) -- expression 6 operands: lhs = Expression(20, Add), rhs = Zero -- expression 7 operands: lhs = Counter(0), rhs = Counter(7) -- expression 8 operands: lhs = Expression(9, Add), rhs = Expression(21, Add) -- expression 9 operands: lhs = Expression(20, Add), rhs = Zero -- expression 10 operands: lhs = Counter(0), rhs = Counter(7) -- expression 11 operands: lhs = Counter(4), rhs = Counter(6) -- expression 12 operands: lhs = Expression(20, Add), rhs = Expression(21, Add) -- expression 13 operands: lhs = Counter(0), rhs = Counter(7) -- expression 14 operands: lhs = Counter(4), rhs = Counter(6) -- expression 15 operands: lhs = Expression(20, Add), rhs = Expression(17, Add) -- expression 16 operands: lhs = Counter(0), rhs = Counter(7) -- expression 17 operands: lhs = Expression(21, Add), rhs = Zero -- expression 18 operands: lhs = Counter(4), rhs = Counter(6) -- expression 19 operands: lhs = Expression(20, Add), rhs = Expression(21, Add) -- expression 20 operands: lhs = Counter(0), rhs = Counter(7) -- expression 21 operands: lhs = Counter(4), rhs = Counter(6) -- expression 22 operands: lhs = Expression(23, Add), rhs = Counter(6) -- expression 23 operands: lhs = Counter(4), rhs = Counter(5) +- expression 1 operands: lhs = Counter(7), rhs = Zero +- expression 2 operands: lhs = Expression(3, Add), rhs = Expression(4, Add) +- expression 3 operands: lhs = Counter(0), rhs = Counter(6) +- expression 4 operands: lhs = Counter(4), rhs = Counter(5) +- expression 5 operands: lhs = Counter(5), rhs = Counter(6) Number of file 0 mappings: 20 - Code(Counter(0)) at (prev + 9, 5) to (start + 1, 16) - Code(Counter(1)) at (prev + 2, 16) to (start + 0, 21) @@ -37,57 +19,37 @@ Number of file 0 mappings: 20 - Code(Counter(1)) at (prev + 1, 13) to (start + 0, 30) - Code(Counter(4)) at (prev + 0, 30) to (start + 0, 31) - Code(Zero) at (prev + 1, 16) to (start + 1, 10) -- Code(Expression(8, Sub)) at (prev + 3, 13) to (start + 0, 14) - = (((c0 + c7) + Zero) - (c4 + c6)) -- Code(Expression(5, Sub)) at (prev + 0, 18) to (start + 0, 23) - = (((c0 + c7) + Zero) - c4) -- Code(Expression(8, Sub)) at (prev + 1, 16) to (start + 0, 20) - = (((c0 + c7) + Zero) - (c4 + c6)) -- Code(Expression(19, Sub)) at (prev + 1, 20) to (start + 0, 25) - = ((c0 + c7) - (c4 + c6)) +- Code(Counter(6)) at (prev + 3, 13) to (start + 0, 14) +- Code(Counter(5)) at (prev + 0, 18) to (start + 0, 23) +- Code(Counter(6)) at (prev + 1, 16) to (start + 0, 20) +- Code(Counter(7)) at (prev + 1, 20) to (start + 0, 25) - Code(Zero) at (prev + 1, 27) to (start + 0, 31) - Code(Zero) at (prev + 0, 32) to (start + 0, 34) -- Code(Expression(15, Sub)) at (prev + 1, 17) to (start + 0, 18) - = ((c0 + c7) - ((c4 + c6) + Zero)) -- Code(Expression(19, Sub)) at (prev + 1, 17) to (start + 0, 34) - = ((c0 + c7) - (c4 + c6)) -- Code(Counter(5)) at (prev + 0, 34) to (start + 0, 35) +- Code(Expression(1, Sub)) at (prev + 1, 17) to (start + 0, 18) + = (c7 - Zero) +- Code(Counter(7)) at (prev + 1, 17) to (start + 0, 34) +- Code(Expression(2, Sub)) at (prev + 0, 34) to (start + 0, 35) + = ((c0 + c6) - (c4 + c5)) - Code(Zero) at (prev + 1, 20) to (start + 1, 14) -- Code(Counter(6)) at (prev + 3, 9) to (start + 0, 15) -- Code(Expression(22, Add)) at (prev + 1, 5) to (start + 0, 6) - = ((c4 + c5) + c6) -Highest counter ID seen: c6 +- Code(Expression(5, Sub)) at (prev + 3, 9) to (start + 0, 15) + = (c5 - c6) +- Code(Counter(0)) at (prev + 1, 5) to (start + 0, 6) +Highest counter ID seen: c7 Function name: ::fmt -Raw bytes (154): 0x[01, 01, 19, 01, 00, 01, 00, 2b, 63, 2f, 0d, 01, 00, 11, 15, 2b, 11, 2f, 0d, 01, 00, 2b, 63, 2f, 0d, 01, 00, 11, 15, 57, 63, 01, 0d, 11, 15, 57, 4b, 01, 0d, 63, 00, 11, 15, 57, 63, 01, 0d, 11, 15, 63, 21, 11, 15, 14, 01, 22, 05, 01, 11, 00, 01, 12, 01, 0a, 02, 02, 10, 00, 15, 00, 01, 17, 00, 1b, 00, 00, 1c, 00, 1e, 06, 01, 0d, 00, 0e, 02, 01, 0d, 00, 1e, 11, 00, 1e, 00, 1f, 26, 02, 0d, 00, 0e, 1a, 00, 12, 00, 17, 26, 01, 10, 00, 15, 00, 00, 16, 01, 0e, 52, 02, 14, 00, 19, 00, 01, 1b, 00, 1f, 00, 00, 20, 00, 22, 42, 01, 11, 00, 12, 52, 01, 11, 00, 22, 21, 00, 22, 00, 23, 15, 03, 09, 00, 0f, 5f, 01, 05, 00, 06] +Raw bytes (122): 0x[01, 01, 09, 01, 00, 01, 00, 0d, 00, 0d, 00, 0d, 00, 1b, 1f, 01, 0d, 09, 1d, 09, 0d, 14, 01, 22, 05, 01, 11, 00, 01, 12, 01, 0a, 02, 02, 10, 00, 15, 00, 01, 17, 00, 1b, 00, 00, 1c, 00, 1e, 06, 01, 0d, 00, 0e, 02, 01, 0d, 00, 1e, 1d, 00, 1e, 00, 1f, 0d, 02, 0d, 00, 0e, 09, 00, 12, 00, 17, 0d, 01, 10, 00, 15, 00, 00, 16, 01, 0e, 12, 02, 14, 00, 19, 00, 01, 1b, 00, 1f, 00, 00, 20, 00, 22, 0e, 01, 11, 00, 12, 12, 01, 11, 00, 22, 16, 00, 22, 00, 23, 22, 03, 09, 00, 0f, 01, 01, 05, 00, 06] Number of files: 1 - file 0 => global file 1 -Number of expressions: 25 +Number of expressions: 9 - expression 0 operands: lhs = Counter(0), rhs = Zero - expression 1 operands: lhs = Counter(0), rhs = Zero -- expression 2 operands: lhs = Expression(10, Add), rhs = Expression(24, Add) -- expression 3 operands: lhs = Expression(11, Add), rhs = Counter(3) -- expression 4 operands: lhs = Counter(0), rhs = Zero -- expression 5 operands: lhs = Counter(4), rhs = Counter(5) -- expression 6 operands: lhs = Expression(10, Add), rhs = Counter(4) -- expression 7 operands: lhs = Expression(11, Add), rhs = Counter(3) -- expression 8 operands: lhs = Counter(0), rhs = Zero -- expression 9 operands: lhs = Expression(10, Add), rhs = Expression(24, Add) -- expression 10 operands: lhs = Expression(11, Add), rhs = Counter(3) -- expression 11 operands: lhs = Counter(0), rhs = Zero -- expression 12 operands: lhs = Counter(4), rhs = Counter(5) -- expression 13 operands: lhs = Expression(21, Add), rhs = Expression(24, Add) -- expression 14 operands: lhs = Counter(0), rhs = Counter(3) -- expression 15 operands: lhs = Counter(4), rhs = Counter(5) -- expression 16 operands: lhs = Expression(21, Add), rhs = Expression(18, Add) -- expression 17 operands: lhs = Counter(0), rhs = Counter(3) -- expression 18 operands: lhs = Expression(24, Add), rhs = Zero -- expression 19 operands: lhs = Counter(4), rhs = Counter(5) -- expression 20 operands: lhs = Expression(21, Add), rhs = Expression(24, Add) -- expression 21 operands: lhs = Counter(0), rhs = Counter(3) -- expression 22 operands: lhs = Counter(4), rhs = Counter(5) -- expression 23 operands: lhs = Expression(24, Add), rhs = Counter(8) -- expression 24 operands: lhs = Counter(4), rhs = Counter(5) +- expression 2 operands: lhs = Counter(3), rhs = Zero +- expression 3 operands: lhs = Counter(3), rhs = Zero +- expression 4 operands: lhs = Counter(3), rhs = Zero +- expression 5 operands: lhs = Expression(6, Add), rhs = Expression(7, Add) +- expression 6 operands: lhs = Counter(0), rhs = Counter(3) +- expression 7 operands: lhs = Counter(2), rhs = Counter(7) +- expression 8 operands: lhs = Counter(2), rhs = Counter(3) Number of file 0 mappings: 20 - Code(Counter(0)) at (prev + 34, 5) to (start + 1, 17) - Code(Zero) at (prev + 1, 18) to (start + 1, 10) @@ -99,27 +61,25 @@ Number of file 0 mappings: 20 = (c0 - Zero) - Code(Expression(0, Sub)) at (prev + 1, 13) to (start + 0, 30) = (c0 - Zero) -- Code(Counter(4)) at (prev + 0, 30) to (start + 0, 31) -- Code(Expression(9, Sub)) at (prev + 2, 13) to (start + 0, 14) - = (((c0 + Zero) + c3) - (c4 + c5)) -- Code(Expression(6, Sub)) at (prev + 0, 18) to (start + 0, 23) - = (((c0 + Zero) + c3) - c4) -- Code(Expression(9, Sub)) at (prev + 1, 16) to (start + 0, 21) - = (((c0 + Zero) + c3) - (c4 + c5)) +- Code(Counter(7)) at (prev + 0, 30) to (start + 0, 31) +- Code(Counter(3)) at (prev + 2, 13) to (start + 0, 14) +- Code(Counter(2)) at (prev + 0, 18) to (start + 0, 23) +- Code(Counter(3)) at (prev + 1, 16) to (start + 0, 21) - Code(Zero) at (prev + 0, 22) to (start + 1, 14) -- Code(Expression(20, Sub)) at (prev + 2, 20) to (start + 0, 25) - = ((c0 + c3) - (c4 + c5)) +- Code(Expression(4, Sub)) at (prev + 2, 20) to (start + 0, 25) + = (c3 - Zero) - Code(Zero) at (prev + 1, 27) to (start + 0, 31) - Code(Zero) at (prev + 0, 32) to (start + 0, 34) -- Code(Expression(16, Sub)) at (prev + 1, 17) to (start + 0, 18) - = ((c0 + c3) - ((c4 + c5) + Zero)) -- Code(Expression(20, Sub)) at (prev + 1, 17) to (start + 0, 34) - = ((c0 + c3) - (c4 + c5)) -- Code(Counter(8)) at (prev + 0, 34) to (start + 0, 35) -- Code(Counter(5)) at (prev + 3, 9) to (start + 0, 15) -- Code(Expression(23, Add)) at (prev + 1, 5) to (start + 0, 6) - = ((c4 + c5) + c8) -Highest counter ID seen: c8 +- Code(Expression(3, Sub)) at (prev + 1, 17) to (start + 0, 18) + = (c3 - Zero) +- Code(Expression(4, Sub)) at (prev + 1, 17) to (start + 0, 34) + = (c3 - Zero) +- Code(Expression(5, Sub)) at (prev + 0, 34) to (start + 0, 35) + = ((c0 + c3) - (c2 + c7)) +- Code(Expression(8, Sub)) at (prev + 3, 9) to (start + 0, 15) + = (c2 - c3) +- Code(Counter(0)) at (prev + 1, 5) to (start + 0, 6) +Highest counter ID seen: c7 Function name: loops_branches::main Raw bytes (9): 0x[01, 01, 00, 01, 01, 37, 01, 05, 02] diff --git a/tests/coverage/match_or_pattern.cov-map b/tests/coverage/match_or_pattern.cov-map index 2beb327bc0594..ae77eedfe72ec 100644 --- a/tests/coverage/match_or_pattern.cov-map +++ b/tests/coverage/match_or_pattern.cov-map @@ -1,76 +1,49 @@ Function name: match_or_pattern::main -Raw bytes (185): 0x[01, 01, 1c, 01, 05, 09, 0d, 23, 11, 09, 0d, 1f, 15, 23, 11, 09, 0d, 23, 11, 09, 0d, 19, 1d, 43, 21, 19, 1d, 3f, 25, 43, 21, 19, 1d, 43, 21, 19, 1d, 29, 2d, 63, 31, 29, 2d, 5f, 35, 63, 31, 29, 2d, 63, 31, 29, 2d, 39, 3d, 6f, 41, 39, 3d, 19, 01, 01, 01, 08, 0f, 05, 08, 10, 03, 06, 02, 03, 05, 00, 06, 01, 01, 0b, 00, 11, 11, 03, 1b, 00, 1d, 23, 01, 0e, 00, 10, 1f, 02, 08, 00, 0f, 15, 00, 10, 03, 06, 12, 03, 05, 00, 06, 1f, 01, 0b, 00, 11, 21, 01, 1b, 00, 1d, 43, 01, 0e, 00, 10, 3f, 02, 08, 00, 0f, 25, 00, 10, 03, 06, 32, 03, 05, 00, 06, 3f, 01, 0b, 00, 11, 31, 01, 1b, 00, 1d, 63, 01, 0e, 00, 10, 5f, 02, 08, 00, 0f, 35, 00, 10, 03, 06, 52, 03, 05, 00, 06, 5f, 01, 0b, 00, 11, 41, 01, 1b, 00, 1d, 6f, 01, 0e, 00, 10, 6b, 02, 01, 00, 02] +Raw bytes (145): 0x[01, 01, 08, 01, 05, 01, 09, 01, 0d, 01, 11, 01, 15, 01, 19, 01, 1d, 01, 21, 19, 01, 01, 01, 08, 0f, 05, 08, 10, 03, 06, 02, 03, 05, 00, 06, 01, 01, 0b, 00, 11, 06, 03, 1b, 00, 1d, 09, 01, 0e, 00, 10, 01, 02, 08, 00, 0f, 0d, 00, 10, 03, 06, 0a, 03, 05, 00, 06, 01, 01, 0b, 00, 11, 0e, 01, 1b, 00, 1d, 11, 01, 0e, 00, 10, 01, 02, 08, 00, 0f, 15, 00, 10, 03, 06, 12, 03, 05, 00, 06, 01, 01, 0b, 00, 11, 16, 01, 1b, 00, 1d, 19, 01, 0e, 00, 10, 01, 02, 08, 00, 0f, 1d, 00, 10, 03, 06, 1a, 03, 05, 00, 06, 01, 01, 0b, 00, 11, 1e, 01, 1b, 00, 1d, 21, 01, 0e, 00, 10, 01, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 28 +Number of expressions: 8 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) -- expression 1 operands: lhs = Counter(2), rhs = Counter(3) -- expression 2 operands: lhs = Expression(8, Add), rhs = Counter(4) -- expression 3 operands: lhs = Counter(2), rhs = Counter(3) -- expression 4 operands: lhs = Expression(7, Add), rhs = Counter(5) -- expression 5 operands: lhs = Expression(8, Add), rhs = Counter(4) -- expression 6 operands: lhs = Counter(2), rhs = Counter(3) -- expression 7 operands: lhs = Expression(8, Add), rhs = Counter(4) -- expression 8 operands: lhs = Counter(2), rhs = Counter(3) -- expression 9 operands: lhs = Counter(6), rhs = Counter(7) -- expression 10 operands: lhs = Expression(16, Add), rhs = Counter(8) -- expression 11 operands: lhs = Counter(6), rhs = Counter(7) -- expression 12 operands: lhs = Expression(15, Add), rhs = Counter(9) -- expression 13 operands: lhs = Expression(16, Add), rhs = Counter(8) -- expression 14 operands: lhs = Counter(6), rhs = Counter(7) -- expression 15 operands: lhs = Expression(16, Add), rhs = Counter(8) -- expression 16 operands: lhs = Counter(6), rhs = Counter(7) -- expression 17 operands: lhs = Counter(10), rhs = Counter(11) -- expression 18 operands: lhs = Expression(24, Add), rhs = Counter(12) -- expression 19 operands: lhs = Counter(10), rhs = Counter(11) -- expression 20 operands: lhs = Expression(23, Add), rhs = Counter(13) -- expression 21 operands: lhs = Expression(24, Add), rhs = Counter(12) -- expression 22 operands: lhs = Counter(10), rhs = Counter(11) -- expression 23 operands: lhs = Expression(24, Add), rhs = Counter(12) -- expression 24 operands: lhs = Counter(10), rhs = Counter(11) -- expression 25 operands: lhs = Counter(14), rhs = Counter(15) -- expression 26 operands: lhs = Expression(27, Add), rhs = Counter(16) -- expression 27 operands: lhs = Counter(14), rhs = Counter(15) +- expression 1 operands: lhs = Counter(0), rhs = Counter(2) +- expression 2 operands: lhs = Counter(0), rhs = Counter(3) +- expression 3 operands: lhs = Counter(0), rhs = Counter(4) +- expression 4 operands: lhs = Counter(0), rhs = Counter(5) +- expression 5 operands: lhs = Counter(0), rhs = Counter(6) +- expression 6 operands: lhs = Counter(0), rhs = Counter(7) +- expression 7 operands: lhs = Counter(0), rhs = Counter(8) Number of file 0 mappings: 25 - Code(Counter(0)) at (prev + 1, 1) to (start + 8, 15) - Code(Counter(1)) at (prev + 8, 16) to (start + 3, 6) - Code(Expression(0, Sub)) at (prev + 3, 5) to (start + 0, 6) = (c0 - c1) - Code(Counter(0)) at (prev + 1, 11) to (start + 0, 17) -- Code(Counter(4)) at (prev + 3, 27) to (start + 0, 29) -- Code(Expression(8, Add)) at (prev + 1, 14) to (start + 0, 16) - = (c2 + c3) -- Code(Expression(7, Add)) at (prev + 2, 8) to (start + 0, 15) - = ((c2 + c3) + c4) +- Code(Expression(1, Sub)) at (prev + 3, 27) to (start + 0, 29) + = (c0 - c2) +- Code(Counter(2)) at (prev + 1, 14) to (start + 0, 16) +- Code(Counter(0)) at (prev + 2, 8) to (start + 0, 15) +- Code(Counter(3)) at (prev + 0, 16) to (start + 3, 6) +- Code(Expression(2, Sub)) at (prev + 3, 5) to (start + 0, 6) + = (c0 - c3) +- Code(Counter(0)) at (prev + 1, 11) to (start + 0, 17) +- Code(Expression(3, Sub)) at (prev + 1, 27) to (start + 0, 29) + = (c0 - c4) +- Code(Counter(4)) at (prev + 1, 14) to (start + 0, 16) +- Code(Counter(0)) at (prev + 2, 8) to (start + 0, 15) - Code(Counter(5)) at (prev + 0, 16) to (start + 3, 6) - Code(Expression(4, Sub)) at (prev + 3, 5) to (start + 0, 6) - = (((c2 + c3) + c4) - c5) -- Code(Expression(7, Add)) at (prev + 1, 11) to (start + 0, 17) - = ((c2 + c3) + c4) -- Code(Counter(8)) at (prev + 1, 27) to (start + 0, 29) -- Code(Expression(16, Add)) at (prev + 1, 14) to (start + 0, 16) - = (c6 + c7) -- Code(Expression(15, Add)) at (prev + 2, 8) to (start + 0, 15) - = ((c6 + c7) + c8) -- Code(Counter(9)) at (prev + 0, 16) to (start + 3, 6) -- Code(Expression(12, Sub)) at (prev + 3, 5) to (start + 0, 6) - = (((c6 + c7) + c8) - c9) -- Code(Expression(15, Add)) at (prev + 1, 11) to (start + 0, 17) - = ((c6 + c7) + c8) -- Code(Counter(12)) at (prev + 1, 27) to (start + 0, 29) -- Code(Expression(24, Add)) at (prev + 1, 14) to (start + 0, 16) - = (c10 + c11) -- Code(Expression(23, Add)) at (prev + 2, 8) to (start + 0, 15) - = ((c10 + c11) + c12) -- Code(Counter(13)) at (prev + 0, 16) to (start + 3, 6) -- Code(Expression(20, Sub)) at (prev + 3, 5) to (start + 0, 6) - = (((c10 + c11) + c12) - c13) -- Code(Expression(23, Add)) at (prev + 1, 11) to (start + 0, 17) - = ((c10 + c11) + c12) -- Code(Counter(16)) at (prev + 1, 27) to (start + 0, 29) -- Code(Expression(27, Add)) at (prev + 1, 14) to (start + 0, 16) - = (c14 + c15) -- Code(Expression(26, Add)) at (prev + 2, 1) to (start + 0, 2) - = ((c14 + c15) + c16) -Highest counter ID seen: c16 + = (c0 - c5) +- Code(Counter(0)) at (prev + 1, 11) to (start + 0, 17) +- Code(Expression(5, Sub)) at (prev + 1, 27) to (start + 0, 29) + = (c0 - c6) +- Code(Counter(6)) at (prev + 1, 14) to (start + 0, 16) +- Code(Counter(0)) at (prev + 2, 8) to (start + 0, 15) +- Code(Counter(7)) at (prev + 0, 16) to (start + 3, 6) +- Code(Expression(6, Sub)) at (prev + 3, 5) to (start + 0, 6) + = (c0 - c7) +- Code(Counter(0)) at (prev + 1, 11) to (start + 0, 17) +- Code(Expression(7, Sub)) at (prev + 1, 27) to (start + 0, 29) + = (c0 - c8) +- Code(Counter(8)) at (prev + 1, 14) to (start + 0, 16) +- Code(Counter(0)) at (prev + 2, 1) to (start + 0, 2) +Highest counter ID seen: c8 diff --git a/tests/coverage/mcdc/nested_if.cov-map b/tests/coverage/mcdc/nested_if.cov-map index 72c7d68840d37..a231ac7b4c984 100644 --- a/tests/coverage/mcdc/nested_if.cov-map +++ b/tests/coverage/mcdc/nested_if.cov-map @@ -1,207 +1,196 @@ Function name: nested_if::doubly_nested_if_in_condition -Raw bytes (168): 0x[01, 01, 0e, 01, 05, 05, 09, 05, 09, 05, 13, 09, 19, 19, 1d, 05, 1f, 09, 1d, 09, 0d, 2b, 05, 01, 15, 33, 05, 37, 15, 01, 11, 14, 01, 0f, 01, 01, 09, 28, 09, 02, 01, 08, 00, 4e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 11, 15, 02, 00, 00, 00, 0d, 00, 4e, 05, 00, 10, 00, 11, 28, 06, 02, 00, 10, 00, 36, 30, 09, 0a, 01, 00, 02, 00, 10, 00, 11, 30, 0d, 21, 02, 00, 00, 00, 15, 00, 36, 0a, 00, 18, 00, 19, 28, 03, 02, 00, 18, 00, 1e, 30, 19, 0e, 01, 02, 00, 00, 18, 00, 19, 19, 00, 1d, 00, 1e, 30, 1d, 16, 02, 00, 00, 00, 1d, 00, 1e, 1d, 00, 21, 00, 25, 1a, 00, 2f, 00, 34, 23, 00, 39, 00, 3e, 21, 00, 48, 00, 4c, 11, 00, 4f, 02, 06, 26, 02, 0c, 02, 06, 2e, 03, 01, 00, 02] +Raw bytes (170): 0x[01, 01, 0f, 01, 05, 05, 11, 05, 09, 05, 37, 09, 0d, 05, 09, 05, 1f, 09, 15, 15, 19, 05, 2b, 09, 19, 09, 0d, 05, 37, 09, 0d, 01, 11, 14, 01, 0f, 01, 01, 09, 28, 09, 02, 01, 08, 00, 4e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 11, 06, 02, 00, 00, 00, 0d, 00, 4e, 05, 00, 10, 00, 11, 28, 06, 02, 00, 10, 00, 36, 30, 09, 16, 01, 00, 02, 00, 10, 00, 11, 30, 0d, 32, 02, 00, 00, 00, 15, 00, 36, 16, 00, 18, 00, 19, 28, 03, 02, 00, 18, 00, 1e, 30, 15, 1a, 01, 02, 00, 00, 18, 00, 19, 15, 00, 1d, 00, 1e, 30, 19, 22, 02, 00, 00, 00, 1d, 00, 1e, 19, 00, 21, 00, 25, 26, 00, 2f, 00, 34, 37, 00, 39, 00, 3e, 32, 00, 48, 00, 4c, 11, 00, 4f, 02, 06, 3a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 14 +Number of expressions: 15 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) -- expression 1 operands: lhs = Counter(1), rhs = Counter(2) +- expression 1 operands: lhs = Counter(1), rhs = Counter(4) - expression 2 operands: lhs = Counter(1), rhs = Counter(2) -- expression 3 operands: lhs = Counter(1), rhs = Expression(4, Add) -- expression 4 operands: lhs = Counter(2), rhs = Counter(6) -- expression 5 operands: lhs = Counter(6), rhs = Counter(7) +- expression 3 operands: lhs = Counter(1), rhs = Expression(13, Add) +- expression 4 operands: lhs = Counter(2), rhs = Counter(3) +- expression 5 operands: lhs = Counter(1), rhs = Counter(2) - expression 6 operands: lhs = Counter(1), rhs = Expression(7, Add) -- expression 7 operands: lhs = Counter(2), rhs = Counter(7) -- expression 8 operands: lhs = Counter(2), rhs = Counter(3) -- expression 9 operands: lhs = Expression(10, Add), rhs = Counter(1) -- expression 10 operands: lhs = Counter(0), rhs = Counter(5) -- expression 11 operands: lhs = Expression(12, Add), rhs = Counter(1) -- expression 12 operands: lhs = Expression(13, Add), rhs = Counter(5) -- expression 13 operands: lhs = Counter(0), rhs = Counter(4) +- expression 7 operands: lhs = Counter(2), rhs = Counter(5) +- expression 8 operands: lhs = Counter(5), rhs = Counter(6) +- expression 9 operands: lhs = Counter(1), rhs = Expression(10, Add) +- expression 10 operands: lhs = Counter(2), rhs = Counter(6) +- expression 11 operands: lhs = Counter(2), rhs = Counter(3) +- expression 12 operands: lhs = Counter(1), rhs = Expression(13, Add) +- expression 13 operands: lhs = Counter(2), rhs = Counter(3) +- expression 14 operands: lhs = Counter(0), rhs = Counter(4) Number of file 0 mappings: 20 - Code(Counter(0)) at (prev + 15, 1) to (start + 1, 9) - MCDCDecision { bitmap_idx: 9, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 78) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 false = (c0 - c1) -- MCDCBranch { true: Counter(4), false: Counter(5), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 78) +- MCDCBranch { true: Counter(4), false: Expression(1, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 78) true = c4 - false = c5 + false = (c1 - c4) - Code(Counter(1)) at (prev + 0, 16) to (start + 0, 17) - MCDCDecision { bitmap_idx: 6, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 54) -- MCDCBranch { true: Counter(2), false: Expression(2, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17) +- MCDCBranch { true: Counter(2), false: Expression(5, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17) true = c2 false = (c1 - c2) -- MCDCBranch { true: Counter(3), false: Counter(8), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 21) to (start + 0, 54) +- MCDCBranch { true: Counter(3), false: Expression(12, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 21) to (start + 0, 54) true = c3 - false = c8 -- Code(Expression(2, Sub)) at (prev + 0, 24) to (start + 0, 25) + false = (c1 - (c2 + c3)) +- Code(Expression(5, Sub)) at (prev + 0, 24) to (start + 0, 25) = (c1 - c2) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 0, 24) to (start + 0, 30) -- MCDCBranch { true: Counter(6), false: Expression(3, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 24) to (start + 0, 25) +- MCDCBranch { true: Counter(5), false: Expression(6, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 24) to (start + 0, 25) + true = c5 + false = (c1 - (c2 + c5)) +- Code(Counter(5)) at (prev + 0, 29) to (start + 0, 30) +- MCDCBranch { true: Counter(6), false: Expression(8, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 29) to (start + 0, 30) true = c6 - false = (c1 - (c2 + c6)) -- Code(Counter(6)) at (prev + 0, 29) to (start + 0, 30) -- MCDCBranch { true: Counter(7), false: Expression(5, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 29) to (start + 0, 30) - true = c7 - false = (c6 - c7) -- Code(Counter(7)) at (prev + 0, 33) to (start + 0, 37) -- Code(Expression(6, Sub)) at (prev + 0, 47) to (start + 0, 52) - = (c1 - (c2 + c7)) -- Code(Expression(8, Add)) at (prev + 0, 57) to (start + 0, 62) + false = (c5 - c6) +- Code(Counter(6)) at (prev + 0, 33) to (start + 0, 37) +- Code(Expression(9, Sub)) at (prev + 0, 47) to (start + 0, 52) + = (c1 - (c2 + c6)) +- Code(Expression(13, Add)) at (prev + 0, 57) to (start + 0, 62) = (c2 + c3) -- Code(Counter(8)) at (prev + 0, 72) to (start + 0, 76) +- Code(Expression(12, Sub)) at (prev + 0, 72) to (start + 0, 76) + = (c1 - (c2 + c3)) - Code(Counter(4)) at (prev + 0, 79) to (start + 2, 6) -- Code(Expression(9, Sub)) at (prev + 2, 12) to (start + 2, 6) - = ((c0 + c5) - c1) -- Code(Expression(11, Sub)) at (prev + 3, 1) to (start + 0, 2) - = (((c0 + c4) + c5) - c1) -Highest counter ID seen: c8 +- Code(Expression(14, Sub)) at (prev + 2, 12) to (start + 2, 6) + = (c0 - c4) +- Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) +Highest counter ID seen: c6 Function name: nested_if::nested_if_in_condition -Raw bytes (124): 0x[01, 01, 0d, 01, 05, 05, 09, 05, 09, 05, 1f, 09, 0d, 09, 0d, 05, 1f, 09, 0d, 27, 05, 01, 15, 2f, 05, 33, 15, 01, 11, 0e, 01, 07, 01, 01, 09, 28, 06, 02, 01, 08, 00, 2e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 11, 15, 02, 00, 00, 00, 0d, 00, 2e, 05, 00, 10, 00, 11, 28, 03, 02, 00, 10, 00, 16, 30, 09, 0a, 01, 00, 02, 00, 10, 00, 11, 0a, 00, 15, 00, 16, 30, 0d, 1a, 02, 00, 00, 00, 15, 00, 16, 1f, 00, 19, 00, 1d, 1a, 00, 27, 00, 2c, 11, 00, 2f, 02, 06, 22, 02, 0c, 02, 06, 2a, 03, 01, 00, 02] +Raw bytes (118): 0x[01, 01, 0a, 01, 05, 05, 11, 05, 09, 05, 09, 05, 23, 09, 0d, 09, 0d, 05, 23, 09, 0d, 01, 11, 0e, 01, 07, 01, 01, 09, 28, 06, 02, 01, 08, 00, 2e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 11, 06, 02, 00, 00, 00, 0d, 00, 2e, 05, 00, 10, 00, 11, 28, 03, 02, 00, 10, 00, 16, 30, 09, 0e, 01, 00, 02, 00, 10, 00, 11, 0e, 00, 15, 00, 16, 30, 0d, 1e, 02, 00, 00, 00, 15, 00, 16, 23, 00, 19, 00, 1d, 1e, 00, 27, 00, 2c, 11, 00, 2f, 02, 06, 26, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 13 +Number of expressions: 10 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) -- expression 1 operands: lhs = Counter(1), rhs = Counter(2) +- expression 1 operands: lhs = Counter(1), rhs = Counter(4) - expression 2 operands: lhs = Counter(1), rhs = Counter(2) -- expression 3 operands: lhs = Counter(1), rhs = Expression(7, Add) -- expression 4 operands: lhs = Counter(2), rhs = Counter(3) +- expression 3 operands: lhs = Counter(1), rhs = Counter(2) +- expression 4 operands: lhs = Counter(1), rhs = Expression(8, Add) - expression 5 operands: lhs = Counter(2), rhs = Counter(3) -- expression 6 operands: lhs = Counter(1), rhs = Expression(7, Add) -- expression 7 operands: lhs = Counter(2), rhs = Counter(3) -- expression 8 operands: lhs = Expression(9, Add), rhs = Counter(1) -- expression 9 operands: lhs = Counter(0), rhs = Counter(5) -- expression 10 operands: lhs = Expression(11, Add), rhs = Counter(1) -- expression 11 operands: lhs = Expression(12, Add), rhs = Counter(5) -- expression 12 operands: lhs = Counter(0), rhs = Counter(4) +- expression 6 operands: lhs = Counter(2), rhs = Counter(3) +- expression 7 operands: lhs = Counter(1), rhs = Expression(8, Add) +- expression 8 operands: lhs = Counter(2), rhs = Counter(3) +- expression 9 operands: lhs = Counter(0), rhs = Counter(4) Number of file 0 mappings: 14 - Code(Counter(0)) at (prev + 7, 1) to (start + 1, 9) - MCDCDecision { bitmap_idx: 6, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 46) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 false = (c0 - c1) -- MCDCBranch { true: Counter(4), false: Counter(5), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 46) +- MCDCBranch { true: Counter(4), false: Expression(1, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 46) true = c4 - false = c5 + false = (c1 - c4) - Code(Counter(1)) at (prev + 0, 16) to (start + 0, 17) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 22) -- MCDCBranch { true: Counter(2), false: Expression(2, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17) +- MCDCBranch { true: Counter(2), false: Expression(3, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17) true = c2 false = (c1 - c2) -- Code(Expression(2, Sub)) at (prev + 0, 21) to (start + 0, 22) +- Code(Expression(3, Sub)) at (prev + 0, 21) to (start + 0, 22) = (c1 - c2) -- MCDCBranch { true: Counter(3), false: Expression(6, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 21) to (start + 0, 22) +- MCDCBranch { true: Counter(3), false: Expression(7, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 21) to (start + 0, 22) true = c3 false = (c1 - (c2 + c3)) -- Code(Expression(7, Add)) at (prev + 0, 25) to (start + 0, 29) +- Code(Expression(8, Add)) at (prev + 0, 25) to (start + 0, 29) = (c2 + c3) -- Code(Expression(6, Sub)) at (prev + 0, 39) to (start + 0, 44) +- Code(Expression(7, Sub)) at (prev + 0, 39) to (start + 0, 44) = (c1 - (c2 + c3)) - Code(Counter(4)) at (prev + 0, 47) to (start + 2, 6) -- Code(Expression(8, Sub)) at (prev + 2, 12) to (start + 2, 6) - = ((c0 + c5) - c1) -- Code(Expression(10, Sub)) at (prev + 3, 1) to (start + 0, 2) - = (((c0 + c4) + c5) - c1) -Highest counter ID seen: c5 +- Code(Expression(9, Sub)) at (prev + 2, 12) to (start + 2, 6) + = (c0 - c4) +- Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) +Highest counter ID seen: c4 Function name: nested_if::nested_in_then_block_in_condition -Raw bytes (176): 0x[01, 01, 12, 01, 05, 05, 09, 05, 09, 05, 33, 09, 0d, 09, 0d, 33, 11, 09, 0d, 11, 15, 33, 15, 09, 0d, 05, 33, 09, 0d, 3b, 05, 01, 1d, 43, 05, 47, 1d, 01, 19, 14, 01, 22, 01, 01, 09, 28, 09, 02, 01, 08, 00, 4b, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 19, 1d, 02, 00, 00, 00, 0d, 00, 4b, 05, 00, 10, 00, 11, 28, 03, 02, 00, 10, 00, 16, 30, 09, 0a, 01, 00, 02, 00, 10, 00, 11, 0a, 00, 15, 00, 16, 30, 0d, 2e, 02, 00, 00, 00, 15, 00, 16, 33, 00, 1c, 00, 1d, 28, 06, 02, 00, 1c, 00, 22, 30, 11, 1a, 01, 02, 00, 00, 1c, 00, 1d, 11, 00, 21, 00, 22, 30, 15, 22, 02, 00, 00, 00, 21, 00, 22, 15, 00, 25, 00, 29, 26, 00, 33, 00, 38, 2e, 00, 44, 00, 49, 19, 00, 4c, 02, 06, 36, 02, 0c, 02, 06, 3e, 03, 01, 00, 02] +Raw bytes (170): 0x[01, 01, 0f, 01, 05, 05, 19, 05, 09, 05, 09, 05, 37, 09, 0d, 09, 0d, 37, 11, 09, 0d, 11, 15, 37, 15, 09, 0d, 05, 37, 09, 0d, 01, 19, 14, 01, 22, 01, 01, 09, 28, 09, 02, 01, 08, 00, 4b, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 19, 06, 02, 00, 00, 00, 0d, 00, 4b, 05, 00, 10, 00, 11, 28, 03, 02, 00, 10, 00, 16, 30, 09, 0e, 01, 00, 02, 00, 10, 00, 11, 0e, 00, 15, 00, 16, 30, 0d, 32, 02, 00, 00, 00, 15, 00, 16, 37, 00, 1c, 00, 1d, 28, 06, 02, 00, 1c, 00, 22, 30, 11, 1e, 01, 02, 00, 00, 1c, 00, 1d, 11, 00, 21, 00, 22, 30, 15, 26, 02, 00, 00, 00, 21, 00, 22, 15, 00, 25, 00, 29, 2a, 00, 33, 00, 38, 32, 00, 44, 00, 49, 19, 00, 4c, 02, 06, 3a, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 18 +Number of expressions: 15 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) -- expression 1 operands: lhs = Counter(1), rhs = Counter(2) +- expression 1 operands: lhs = Counter(1), rhs = Counter(6) - expression 2 operands: lhs = Counter(1), rhs = Counter(2) -- expression 3 operands: lhs = Counter(1), rhs = Expression(12, Add) -- expression 4 operands: lhs = Counter(2), rhs = Counter(3) +- expression 3 operands: lhs = Counter(1), rhs = Counter(2) +- expression 4 operands: lhs = Counter(1), rhs = Expression(13, Add) - expression 5 operands: lhs = Counter(2), rhs = Counter(3) -- expression 6 operands: lhs = Expression(12, Add), rhs = Counter(4) -- expression 7 operands: lhs = Counter(2), rhs = Counter(3) -- expression 8 operands: lhs = Counter(4), rhs = Counter(5) -- expression 9 operands: lhs = Expression(12, Add), rhs = Counter(5) -- expression 10 operands: lhs = Counter(2), rhs = Counter(3) -- expression 11 operands: lhs = Counter(1), rhs = Expression(12, Add) -- expression 12 operands: lhs = Counter(2), rhs = Counter(3) -- expression 13 operands: lhs = Expression(14, Add), rhs = Counter(1) -- expression 14 operands: lhs = Counter(0), rhs = Counter(7) -- expression 15 operands: lhs = Expression(16, Add), rhs = Counter(1) -- expression 16 operands: lhs = Expression(17, Add), rhs = Counter(7) -- expression 17 operands: lhs = Counter(0), rhs = Counter(6) +- expression 6 operands: lhs = Counter(2), rhs = Counter(3) +- expression 7 operands: lhs = Expression(13, Add), rhs = Counter(4) +- expression 8 operands: lhs = Counter(2), rhs = Counter(3) +- expression 9 operands: lhs = Counter(4), rhs = Counter(5) +- expression 10 operands: lhs = Expression(13, Add), rhs = Counter(5) +- expression 11 operands: lhs = Counter(2), rhs = Counter(3) +- expression 12 operands: lhs = Counter(1), rhs = Expression(13, Add) +- expression 13 operands: lhs = Counter(2), rhs = Counter(3) +- expression 14 operands: lhs = Counter(0), rhs = Counter(6) Number of file 0 mappings: 20 - Code(Counter(0)) at (prev + 34, 1) to (start + 1, 9) - MCDCDecision { bitmap_idx: 9, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 75) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 false = (c0 - c1) -- MCDCBranch { true: Counter(6), false: Counter(7), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 75) +- MCDCBranch { true: Counter(6), false: Expression(1, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 75) true = c6 - false = c7 + false = (c1 - c6) - Code(Counter(1)) at (prev + 0, 16) to (start + 0, 17) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 22) -- MCDCBranch { true: Counter(2), false: Expression(2, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17) +- MCDCBranch { true: Counter(2), false: Expression(3, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17) true = c2 false = (c1 - c2) -- Code(Expression(2, Sub)) at (prev + 0, 21) to (start + 0, 22) +- Code(Expression(3, Sub)) at (prev + 0, 21) to (start + 0, 22) = (c1 - c2) -- MCDCBranch { true: Counter(3), false: Expression(11, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 21) to (start + 0, 22) +- MCDCBranch { true: Counter(3), false: Expression(12, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 21) to (start + 0, 22) true = c3 false = (c1 - (c2 + c3)) -- Code(Expression(12, Add)) at (prev + 0, 28) to (start + 0, 29) +- Code(Expression(13, Add)) at (prev + 0, 28) to (start + 0, 29) = (c2 + c3) - MCDCDecision { bitmap_idx: 6, conditions_num: 2 } at (prev + 0, 28) to (start + 0, 34) -- MCDCBranch { true: Counter(4), false: Expression(6, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 28) to (start + 0, 29) +- MCDCBranch { true: Counter(4), false: Expression(7, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 28) to (start + 0, 29) true = c4 false = ((c2 + c3) - c4) - Code(Counter(4)) at (prev + 0, 33) to (start + 0, 34) -- MCDCBranch { true: Counter(5), false: Expression(8, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 33) to (start + 0, 34) +- MCDCBranch { true: Counter(5), false: Expression(9, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 33) to (start + 0, 34) true = c5 false = (c4 - c5) - Code(Counter(5)) at (prev + 0, 37) to (start + 0, 41) -- Code(Expression(9, Sub)) at (prev + 0, 51) to (start + 0, 56) +- Code(Expression(10, Sub)) at (prev + 0, 51) to (start + 0, 56) = ((c2 + c3) - c5) -- Code(Expression(11, Sub)) at (prev + 0, 68) to (start + 0, 73) +- Code(Expression(12, Sub)) at (prev + 0, 68) to (start + 0, 73) = (c1 - (c2 + c3)) - Code(Counter(6)) at (prev + 0, 76) to (start + 2, 6) -- Code(Expression(13, Sub)) at (prev + 2, 12) to (start + 2, 6) - = ((c0 + c7) - c1) -- Code(Expression(15, Sub)) at (prev + 3, 1) to (start + 0, 2) - = (((c0 + c6) + c7) - c1) -Highest counter ID seen: c7 +- Code(Expression(14, Sub)) at (prev + 2, 12) to (start + 2, 6) + = (c0 - c6) +- Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) +Highest counter ID seen: c6 Function name: nested_if::nested_single_condition_decision -Raw bytes (89): 0x[01, 01, 08, 01, 05, 05, 09, 05, 09, 13, 05, 01, 11, 1b, 05, 1f, 11, 01, 0d, 0b, 01, 17, 01, 04, 09, 28, 03, 02, 04, 08, 00, 29, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 11, 02, 00, 00, 00, 0d, 00, 29, 05, 00, 10, 00, 11, 20, 09, 0a, 00, 10, 00, 11, 09, 00, 14, 00, 19, 0a, 00, 23, 00, 27, 0d, 00, 2a, 02, 06, 0e, 02, 0c, 02, 06, 16, 03, 01, 00, 02] +Raw bytes (83): 0x[01, 01, 05, 01, 05, 05, 0d, 05, 09, 05, 09, 01, 0d, 0b, 01, 17, 01, 04, 09, 28, 03, 02, 04, 08, 00, 29, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 06, 02, 00, 00, 00, 0d, 00, 29, 05, 00, 10, 00, 11, 20, 09, 0e, 00, 10, 00, 11, 09, 00, 14, 00, 19, 0e, 00, 23, 00, 27, 0d, 00, 2a, 02, 06, 12, 02, 0c, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 8 +Number of expressions: 5 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) -- expression 1 operands: lhs = Counter(1), rhs = Counter(2) +- expression 1 operands: lhs = Counter(1), rhs = Counter(3) - expression 2 operands: lhs = Counter(1), rhs = Counter(2) -- expression 3 operands: lhs = Expression(4, Add), rhs = Counter(1) -- expression 4 operands: lhs = Counter(0), rhs = Counter(4) -- expression 5 operands: lhs = Expression(6, Add), rhs = Counter(1) -- expression 6 operands: lhs = Expression(7, Add), rhs = Counter(4) -- expression 7 operands: lhs = Counter(0), rhs = Counter(3) +- expression 3 operands: lhs = Counter(1), rhs = Counter(2) +- expression 4 operands: lhs = Counter(0), rhs = Counter(3) Number of file 0 mappings: 11 - Code(Counter(0)) at (prev + 23, 1) to (start + 4, 9) - MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 4, 8) to (start + 0, 41) - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9) true = c1 false = (c0 - c1) -- MCDCBranch { true: Counter(3), false: Counter(4), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 41) +- MCDCBranch { true: Counter(3), false: Expression(1, Sub), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 41) true = c3 - false = c4 + false = (c1 - c3) - Code(Counter(1)) at (prev + 0, 16) to (start + 0, 17) -- Branch { true: Counter(2), false: Expression(2, Sub) } at (prev + 0, 16) to (start + 0, 17) +- Branch { true: Counter(2), false: Expression(3, Sub) } at (prev + 0, 16) to (start + 0, 17) true = c2 false = (c1 - c2) - Code(Counter(2)) at (prev + 0, 20) to (start + 0, 25) -- Code(Expression(2, Sub)) at (prev + 0, 35) to (start + 0, 39) +- Code(Expression(3, Sub)) at (prev + 0, 35) to (start + 0, 39) = (c1 - c2) - Code(Counter(3)) at (prev + 0, 42) to (start + 2, 6) -- Code(Expression(3, Sub)) at (prev + 2, 12) to (start + 2, 6) - = ((c0 + c4) - c1) -- Code(Expression(5, Sub)) at (prev + 3, 1) to (start + 0, 2) - = (((c0 + c3) + c4) - c1) -Highest counter ID seen: c4 +- Code(Expression(4, Sub)) at (prev + 2, 12) to (start + 2, 6) + = (c0 - c3) +- Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) +Highest counter ID seen: c3 diff --git a/tests/coverage/nested_loops.cov-map b/tests/coverage/nested_loops.cov-map index 6ba5887d243b8..e9e41bd53e783 100644 --- a/tests/coverage/nested_loops.cov-map +++ b/tests/coverage/nested_loops.cov-map @@ -1,46 +1,41 @@ Function name: nested_loops::main -Raw bytes (103): 0x[01, 01, 11, 27, 09, 01, 05, 03, 0d, 13, 0d, 17, 15, 03, 11, 1f, 0d, 23, 15, 27, 11, 01, 05, 2f, 0d, 3b, 15, 01, 11, 3b, 0d, 01, 11, 05, 09, 0d, 19, 0d, 01, 01, 01, 02, 1b, 03, 04, 13, 00, 20, 0a, 01, 0d, 01, 18, 0e, 02, 12, 00, 17, 1a, 01, 10, 00, 16, 05, 01, 11, 00, 16, 2a, 01, 0e, 03, 16, 36, 04, 11, 01, 1b, 19, 02, 15, 00, 21, 11, 01, 18, 02, 12, 15, 03, 0d, 00, 0e, 3f, 02, 09, 00, 17, 43, 02, 01, 00, 02] +Raw bytes (97): 0x[01, 01, 0e, 07, 2f, 05, 11, 01, 0d, 2f, 05, 01, 0d, 27, 05, 01, 09, 33, 27, 05, 15, 01, 09, 2f, 33, 01, 0d, 05, 15, 05, 01, 0d, 01, 01, 01, 02, 1b, 05, 04, 13, 00, 20, 09, 01, 0d, 01, 18, 0d, 02, 12, 00, 17, 11, 01, 10, 00, 16, 02, 01, 11, 00, 16, 0e, 01, 0e, 03, 16, 15, 04, 11, 01, 1b, 16, 02, 15, 00, 21, 1e, 01, 18, 02, 12, 2a, 03, 0d, 00, 0e, 36, 02, 09, 00, 17, 01, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 17 -- expression 0 operands: lhs = Expression(9, Add), rhs = Counter(2) -- expression 1 operands: lhs = Counter(0), rhs = Counter(1) -- expression 2 operands: lhs = Expression(0, Add), rhs = Counter(3) -- expression 3 operands: lhs = Expression(4, Add), rhs = Counter(3) -- expression 4 operands: lhs = Expression(5, Add), rhs = Counter(5) -- expression 5 operands: lhs = Expression(0, Add), rhs = Counter(4) -- expression 6 operands: lhs = Expression(7, Add), rhs = Counter(3) -- expression 7 operands: lhs = Expression(8, Add), rhs = Counter(5) -- expression 8 operands: lhs = Expression(9, Add), rhs = Counter(4) -- expression 9 operands: lhs = Counter(0), rhs = Counter(1) -- expression 10 operands: lhs = Expression(11, Add), rhs = Counter(3) -- expression 11 operands: lhs = Expression(14, Add), rhs = Counter(5) -- expression 12 operands: lhs = Counter(0), rhs = Counter(4) -- expression 13 operands: lhs = Expression(14, Add), rhs = Counter(3) -- expression 14 operands: lhs = Counter(0), rhs = Counter(4) -- expression 15 operands: lhs = Counter(1), rhs = Counter(2) -- expression 16 operands: lhs = Counter(3), rhs = Counter(6) +Number of expressions: 14 +- expression 0 operands: lhs = Expression(1, Add), rhs = Expression(11, Add) +- expression 1 operands: lhs = Counter(1), rhs = Counter(4) +- expression 2 operands: lhs = Counter(0), rhs = Counter(3) +- expression 3 operands: lhs = Expression(11, Add), rhs = Counter(1) +- expression 4 operands: lhs = Counter(0), rhs = Counter(3) +- expression 5 operands: lhs = Expression(9, Add), rhs = Counter(1) +- expression 6 operands: lhs = Counter(0), rhs = Counter(2) +- expression 7 operands: lhs = Expression(12, Add), rhs = Expression(9, Add) +- expression 8 operands: lhs = Counter(1), rhs = Counter(5) +- expression 9 operands: lhs = Counter(0), rhs = Counter(2) +- expression 10 operands: lhs = Expression(11, Add), rhs = Expression(12, Add) +- expression 11 operands: lhs = Counter(0), rhs = Counter(3) +- expression 12 operands: lhs = Counter(1), rhs = Counter(5) +- expression 13 operands: lhs = Counter(1), rhs = Counter(0) Number of file 0 mappings: 13 - Code(Counter(0)) at (prev + 1, 1) to (start + 2, 27) -- Code(Expression(0, Add)) at (prev + 4, 19) to (start + 0, 32) - = ((c0 + c1) + c2) -- Code(Expression(2, Sub)) at (prev + 1, 13) to (start + 1, 24) - = (((c0 + c1) + c2) - c3) -- Code(Expression(3, Sub)) at (prev + 2, 18) to (start + 0, 23) - = (((((c0 + c1) + c2) + c4) + c5) - c3) -- Code(Expression(6, Sub)) at (prev + 1, 16) to (start + 0, 22) - = ((((c0 + c1) + c4) + c5) - c3) -- Code(Counter(1)) at (prev + 1, 17) to (start + 0, 22) -- Code(Expression(10, Sub)) at (prev + 1, 14) to (start + 3, 22) - = (((c0 + c4) + c5) - c3) -- Code(Expression(13, Sub)) at (prev + 4, 17) to (start + 1, 27) - = ((c0 + c4) - c3) -- Code(Counter(6)) at (prev + 2, 21) to (start + 0, 33) -- Code(Counter(4)) at (prev + 1, 24) to (start + 2, 18) -- Code(Counter(5)) at (prev + 3, 13) to (start + 0, 14) -- Code(Expression(15, Add)) at (prev + 2, 9) to (start + 0, 23) - = (c1 + c2) -- Code(Expression(16, Add)) at (prev + 2, 1) to (start + 0, 2) - = (c3 + c6) -Highest counter ID seen: c6 +- Code(Counter(1)) at (prev + 4, 19) to (start + 0, 32) +- Code(Counter(2)) at (prev + 1, 13) to (start + 1, 24) +- Code(Counter(3)) at (prev + 2, 18) to (start + 0, 23) +- Code(Counter(4)) at (prev + 1, 16) to (start + 0, 22) +- Code(Expression(0, Sub)) at (prev + 1, 17) to (start + 0, 22) + = ((c1 + c4) - (c0 + c3)) +- Code(Expression(3, Sub)) at (prev + 1, 14) to (start + 3, 22) + = ((c0 + c3) - c1) +- Code(Counter(5)) at (prev + 4, 17) to (start + 1, 27) +- Code(Expression(5, Sub)) at (prev + 2, 21) to (start + 0, 33) + = ((c0 + c2) - c1) +- Code(Expression(7, Sub)) at (prev + 1, 24) to (start + 2, 18) + = ((c1 + c5) - (c0 + c2)) +- Code(Expression(10, Sub)) at (prev + 3, 13) to (start + 0, 14) + = ((c0 + c3) - (c1 + c5)) +- Code(Expression(13, Sub)) at (prev + 2, 9) to (start + 0, 23) + = (c1 - c0) +- Code(Counter(0)) at (prev + 2, 1) to (start + 0, 2) +Highest counter ID seen: c5 diff --git a/tests/coverage/overflow.cov-map b/tests/coverage/overflow.cov-map index 01abcc15003ee..1178d65de102e 100644 --- a/tests/coverage/overflow.cov-map +++ b/tests/coverage/overflow.cov-map @@ -1,32 +1,29 @@ Function name: overflow::main -Raw bytes (67): 0x[01, 01, 09, 07, 0d, 0b, 09, 01, 05, 03, 11, 17, 11, 1b, 0d, 01, 09, 23, 0d, 05, 09, 09, 01, 10, 01, 01, 1b, 03, 02, 0b, 00, 18, 0e, 01, 0c, 00, 1a, 05, 00, 1b, 03, 0a, 12, 03, 13, 00, 20, 09, 00, 21, 03, 0a, 0d, 03, 09, 00, 0a, 1f, 01, 09, 00, 17, 11, 02, 05, 01, 02] +Raw bytes (61): 0x[01, 01, 06, 05, 01, 05, 17, 01, 09, 05, 13, 17, 0d, 01, 09, 09, 01, 10, 01, 01, 1b, 05, 02, 0b, 00, 18, 02, 01, 0c, 00, 1a, 09, 00, 1b, 03, 0a, 06, 03, 13, 00, 20, 0d, 00, 21, 03, 0a, 0e, 03, 09, 00, 0a, 02, 01, 09, 00, 17, 01, 02, 05, 01, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 9 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(3) -- expression 1 operands: lhs = Expression(2, Add), rhs = Counter(2) -- expression 2 operands: lhs = Counter(0), rhs = Counter(1) -- expression 3 operands: lhs = Expression(0, Add), rhs = Counter(4) -- expression 4 operands: lhs = Expression(5, Add), rhs = Counter(4) -- expression 5 operands: lhs = Expression(6, Add), rhs = Counter(3) -- expression 6 operands: lhs = Counter(0), rhs = Counter(2) -- expression 7 operands: lhs = Expression(8, Add), rhs = Counter(3) -- expression 8 operands: lhs = Counter(1), rhs = Counter(2) +Number of expressions: 6 +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) +- expression 1 operands: lhs = Counter(1), rhs = Expression(5, Add) +- expression 2 operands: lhs = Counter(0), rhs = Counter(2) +- expression 3 operands: lhs = Counter(1), rhs = Expression(4, Add) +- expression 4 operands: lhs = Expression(5, Add), rhs = Counter(3) +- expression 5 operands: lhs = Counter(0), rhs = Counter(2) Number of file 0 mappings: 9 - Code(Counter(0)) at (prev + 16, 1) to (start + 1, 27) -- Code(Expression(0, Add)) at (prev + 2, 11) to (start + 0, 24) - = (((c0 + c1) + c2) + c3) -- Code(Expression(3, Sub)) at (prev + 1, 12) to (start + 0, 26) - = ((((c0 + c1) + c2) + c3) - c4) -- Code(Counter(1)) at (prev + 0, 27) to (start + 3, 10) -- Code(Expression(4, Sub)) at (prev + 3, 19) to (start + 0, 32) - = (((c0 + c2) + c3) - c4) -- Code(Counter(2)) at (prev + 0, 33) to (start + 3, 10) -- Code(Counter(3)) at (prev + 3, 9) to (start + 0, 10) -- Code(Expression(7, Add)) at (prev + 1, 9) to (start + 0, 23) - = ((c1 + c2) + c3) -- Code(Counter(4)) at (prev + 2, 5) to (start + 1, 2) -Highest counter ID seen: c4 +- Code(Counter(1)) at (prev + 2, 11) to (start + 0, 24) +- Code(Expression(0, Sub)) at (prev + 1, 12) to (start + 0, 26) + = (c1 - c0) +- Code(Counter(2)) at (prev + 0, 27) to (start + 3, 10) +- Code(Expression(1, Sub)) at (prev + 3, 19) to (start + 0, 32) + = (c1 - (c0 + c2)) +- Code(Counter(3)) at (prev + 0, 33) to (start + 3, 10) +- Code(Expression(3, Sub)) at (prev + 3, 9) to (start + 0, 10) + = (c1 - ((c0 + c2) + c3)) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 23) + = (c1 - c0) +- Code(Counter(0)) at (prev + 2, 5) to (start + 1, 2) +Highest counter ID seen: c3 Function name: overflow::might_overflow Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 05, 01, 01, 12, 05, 01, 13, 02, 06, 02, 02, 05, 00, 06, 01, 01, 09, 05, 02] diff --git a/tests/coverage/overflow.coverage b/tests/coverage/overflow.coverage index 9057c244ccf0c..bd3d436f4585b 100644 --- a/tests/coverage/overflow.coverage +++ b/tests/coverage/overflow.coverage @@ -15,18 +15,18 @@ LL| | LL| 1|fn main() -> Result<(), u8> { LL| 1| let mut countdown = 10; - LL| 11| while countdown > 0 { - LL| 11| if countdown == 1 { + LL| 10| while countdown > 0 { + LL| 9| if countdown == 1 { LL| 1| let result = might_overflow(10); LL| 1| println!("Result: {}", result); - LL| 10| } else if countdown < 5 { + LL| 8| } else if countdown < 5 { LL| 3| let result = might_overflow(1); LL| 3| println!("Result: {}", result); - LL| 6| } - LL| 10| countdown -= 1; + LL| 5| } + LL| 9| countdown -= 1; LL| | } - LL| 0| Ok(()) - LL| 0|} + LL| 1| Ok(()) + LL| 1|} LL| | LL| |// Notes: LL| |// 1. Compare this program and its coverage results to those of the very similar test `assert.rs`, diff --git a/tests/coverage/panic_unwind.cov-map b/tests/coverage/panic_unwind.cov-map index 005c4babbea8d..4628a24689e7a 100644 --- a/tests/coverage/panic_unwind.cov-map +++ b/tests/coverage/panic_unwind.cov-map @@ -1,32 +1,29 @@ Function name: panic_unwind::main -Raw bytes (67): 0x[01, 01, 09, 07, 0d, 0b, 09, 01, 05, 03, 11, 17, 11, 1b, 0d, 01, 09, 23, 0d, 05, 09, 09, 01, 0d, 01, 01, 1b, 03, 02, 0b, 00, 18, 0e, 01, 0c, 00, 1a, 05, 00, 1b, 02, 0a, 12, 02, 13, 00, 20, 09, 00, 21, 02, 0a, 0d, 02, 09, 00, 0a, 1f, 01, 09, 00, 17, 11, 02, 05, 01, 02] +Raw bytes (61): 0x[01, 01, 06, 05, 01, 05, 17, 01, 09, 05, 13, 17, 0d, 01, 09, 09, 01, 0d, 01, 01, 1b, 05, 02, 0b, 00, 18, 02, 01, 0c, 00, 1a, 09, 00, 1b, 02, 0a, 06, 02, 13, 00, 20, 0d, 00, 21, 02, 0a, 0e, 02, 09, 00, 0a, 02, 01, 09, 00, 17, 01, 02, 05, 01, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 9 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(3) -- expression 1 operands: lhs = Expression(2, Add), rhs = Counter(2) -- expression 2 operands: lhs = Counter(0), rhs = Counter(1) -- expression 3 operands: lhs = Expression(0, Add), rhs = Counter(4) -- expression 4 operands: lhs = Expression(5, Add), rhs = Counter(4) -- expression 5 operands: lhs = Expression(6, Add), rhs = Counter(3) -- expression 6 operands: lhs = Counter(0), rhs = Counter(2) -- expression 7 operands: lhs = Expression(8, Add), rhs = Counter(3) -- expression 8 operands: lhs = Counter(1), rhs = Counter(2) +Number of expressions: 6 +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) +- expression 1 operands: lhs = Counter(1), rhs = Expression(5, Add) +- expression 2 operands: lhs = Counter(0), rhs = Counter(2) +- expression 3 operands: lhs = Counter(1), rhs = Expression(4, Add) +- expression 4 operands: lhs = Expression(5, Add), rhs = Counter(3) +- expression 5 operands: lhs = Counter(0), rhs = Counter(2) Number of file 0 mappings: 9 - Code(Counter(0)) at (prev + 13, 1) to (start + 1, 27) -- Code(Expression(0, Add)) at (prev + 2, 11) to (start + 0, 24) - = (((c0 + c1) + c2) + c3) -- Code(Expression(3, Sub)) at (prev + 1, 12) to (start + 0, 26) - = ((((c0 + c1) + c2) + c3) - c4) -- Code(Counter(1)) at (prev + 0, 27) to (start + 2, 10) -- Code(Expression(4, Sub)) at (prev + 2, 19) to (start + 0, 32) - = (((c0 + c2) + c3) - c4) -- Code(Counter(2)) at (prev + 0, 33) to (start + 2, 10) -- Code(Counter(3)) at (prev + 2, 9) to (start + 0, 10) -- Code(Expression(7, Add)) at (prev + 1, 9) to (start + 0, 23) - = ((c1 + c2) + c3) -- Code(Counter(4)) at (prev + 2, 5) to (start + 1, 2) -Highest counter ID seen: c4 +- Code(Counter(1)) at (prev + 2, 11) to (start + 0, 24) +- Code(Expression(0, Sub)) at (prev + 1, 12) to (start + 0, 26) + = (c1 - c0) +- Code(Counter(2)) at (prev + 0, 27) to (start + 2, 10) +- Code(Expression(1, Sub)) at (prev + 2, 19) to (start + 0, 32) + = (c1 - (c0 + c2)) +- Code(Counter(3)) at (prev + 0, 33) to (start + 2, 10) +- Code(Expression(3, Sub)) at (prev + 2, 9) to (start + 0, 10) + = (c1 - ((c0 + c2) + c3)) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 23) + = (c1 - c0) +- Code(Counter(0)) at (prev + 2, 5) to (start + 1, 2) +Highest counter ID seen: c3 Function name: panic_unwind::might_panic Raw bytes (21): 0x[01, 01, 01, 01, 05, 03, 01, 04, 01, 01, 14, 05, 02, 09, 01, 19, 02, 02, 0c, 03, 02] diff --git a/tests/coverage/panic_unwind.coverage b/tests/coverage/panic_unwind.coverage index eaf96cb02896b..a80ab6d16b01c 100644 --- a/tests/coverage/panic_unwind.coverage +++ b/tests/coverage/panic_unwind.coverage @@ -12,16 +12,16 @@ LL| | LL| 1|fn main() -> Result<(), u8> { LL| 1| let mut countdown = 10; - LL| 11| while countdown > 0 { - LL| 11| if countdown == 1 { + LL| 10| while countdown > 0 { + LL| 9| if countdown == 1 { LL| 1| might_panic(true); - LL| 10| } else if countdown < 5 { + LL| 8| } else if countdown < 5 { LL| 3| might_panic(false); - LL| 6| } - LL| 10| countdown -= 1; + LL| 5| } + LL| 9| countdown -= 1; LL| | } - LL| 0| Ok(()) - LL| 0|} + LL| 1| Ok(()) + LL| 1|} LL| | LL| |// Notes: LL| |// 1. Compare this program and its coverage results to those of the similar tests `abort.rs` and diff --git a/tests/coverage/simple_loop.cov-map b/tests/coverage/simple_loop.cov-map index d1e684efbbcd5..8e428b267d561 100644 --- a/tests/coverage/simple_loop.cov-map +++ b/tests/coverage/simple_loop.cov-map @@ -1,19 +1,19 @@ Function name: simple_loop::main -Raw bytes (43): 0x[01, 01, 02, 01, 05, 01, 09, 07, 01, 04, 01, 09, 10, 05, 0a, 05, 05, 06, 02, 05, 05, 00, 06, 07, 05, 0d, 02, 0e, 01, 04, 0d, 00, 12, 09, 02, 0a, 03, 0a, 01, 06, 01, 00, 02] +Raw bytes (43): 0x[01, 01, 02, 01, 05, 09, 01, 07, 01, 04, 01, 09, 10, 05, 0a, 05, 05, 06, 02, 05, 05, 00, 06, 09, 05, 0d, 02, 0e, 01, 04, 0d, 00, 12, 06, 02, 0a, 03, 0a, 01, 06, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 2 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) -- expression 1 operands: lhs = Counter(0), rhs = Counter(2) +- expression 1 operands: lhs = Counter(2), rhs = Counter(0) Number of file 0 mappings: 7 - Code(Counter(0)) at (prev + 4, 1) to (start + 9, 16) - Code(Counter(1)) at (prev + 10, 5) to (start + 5, 6) - Code(Expression(0, Sub)) at (prev + 5, 5) to (start + 0, 6) = (c0 - c1) -- Code(Expression(1, Add)) at (prev + 5, 13) to (start + 2, 14) - = (c0 + c2) +- Code(Counter(2)) at (prev + 5, 13) to (start + 2, 14) - Code(Counter(0)) at (prev + 4, 13) to (start + 0, 18) -- Code(Counter(2)) at (prev + 2, 10) to (start + 3, 10) +- Code(Expression(1, Sub)) at (prev + 2, 10) to (start + 3, 10) + = (c2 - c0) - Code(Counter(0)) at (prev + 6, 1) to (start + 0, 2) Highest counter ID seen: c2 diff --git a/tests/coverage/simple_match.cov-map b/tests/coverage/simple_match.cov-map index 8f973742959ea..15f114daa7fb2 100644 --- a/tests/coverage/simple_match.cov-map +++ b/tests/coverage/simple_match.cov-map @@ -1,31 +1,27 @@ Function name: simple_match::main -Raw bytes (72): 0x[01, 01, 09, 01, 05, 23, 0d, 01, 09, 1f, 11, 23, 0d, 01, 09, 1f, 11, 23, 0d, 01, 09, 0a, 01, 04, 01, 07, 0f, 05, 07, 10, 02, 06, 02, 02, 05, 00, 06, 1f, 05, 09, 00, 0d, 1a, 05, 0d, 00, 16, 09, 02, 0d, 00, 0e, 1a, 02, 11, 02, 12, 09, 04, 0d, 07, 0e, 0d, 0a, 0d, 00, 0f, 11, 03, 01, 00, 02] +Raw bytes (64): 0x[01, 01, 05, 01, 05, 09, 01, 09, 01, 09, 13, 01, 0d, 0a, 01, 04, 01, 07, 0f, 05, 07, 10, 02, 06, 02, 02, 05, 00, 06, 09, 05, 09, 00, 0d, 0a, 05, 0d, 00, 16, 0d, 02, 0d, 00, 0e, 0a, 02, 11, 02, 12, 0d, 04, 0d, 07, 0e, 0e, 0a, 0d, 00, 0f, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 9 +Number of expressions: 5 - expression 0 operands: lhs = Counter(0), rhs = Counter(1) -- expression 1 operands: lhs = Expression(8, Add), rhs = Counter(3) -- expression 2 operands: lhs = Counter(0), rhs = Counter(2) -- expression 3 operands: lhs = Expression(7, Add), rhs = Counter(4) -- expression 4 operands: lhs = Expression(8, Add), rhs = Counter(3) -- expression 5 operands: lhs = Counter(0), rhs = Counter(2) -- expression 6 operands: lhs = Expression(7, Add), rhs = Counter(4) -- expression 7 operands: lhs = Expression(8, Add), rhs = Counter(3) -- expression 8 operands: lhs = Counter(0), rhs = Counter(2) +- expression 1 operands: lhs = Counter(2), rhs = Counter(0) +- expression 2 operands: lhs = Counter(2), rhs = Counter(0) +- expression 3 operands: lhs = Counter(2), rhs = Expression(4, Add) +- expression 4 operands: lhs = Counter(0), rhs = Counter(3) Number of file 0 mappings: 10 - Code(Counter(0)) at (prev + 4, 1) to (start + 7, 15) - Code(Counter(1)) at (prev + 7, 16) to (start + 2, 6) - Code(Expression(0, Sub)) at (prev + 2, 5) to (start + 0, 6) = (c0 - c1) -- Code(Expression(7, Add)) at (prev + 5, 9) to (start + 0, 13) - = ((c0 + c2) + c3) -- Code(Expression(6, Sub)) at (prev + 5, 13) to (start + 0, 22) - = (((c0 + c2) + c3) - c4) -- Code(Counter(2)) at (prev + 2, 13) to (start + 0, 14) -- Code(Expression(6, Sub)) at (prev + 2, 17) to (start + 2, 18) - = (((c0 + c2) + c3) - c4) -- Code(Counter(2)) at (prev + 4, 13) to (start + 7, 14) -- Code(Counter(3)) at (prev + 10, 13) to (start + 0, 15) -- Code(Counter(4)) at (prev + 3, 1) to (start + 0, 2) -Highest counter ID seen: c4 +- Code(Counter(2)) at (prev + 5, 9) to (start + 0, 13) +- Code(Expression(2, Sub)) at (prev + 5, 13) to (start + 0, 22) + = (c2 - c0) +- Code(Counter(3)) at (prev + 2, 13) to (start + 0, 14) +- Code(Expression(2, Sub)) at (prev + 2, 17) to (start + 2, 18) + = (c2 - c0) +- Code(Counter(3)) at (prev + 4, 13) to (start + 7, 14) +- Code(Expression(3, Sub)) at (prev + 10, 13) to (start + 0, 15) + = (c2 - (c0 + c3)) +- Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) +Highest counter ID seen: c3 diff --git a/tests/coverage/try_error_result.cov-map b/tests/coverage/try_error_result.cov-map index f90b73592bdfd..03012f744c18c 100644 --- a/tests/coverage/try_error_result.cov-map +++ b/tests/coverage/try_error_result.cov-map @@ -55,158 +55,148 @@ Number of file 0 mappings: 4 Highest counter ID seen: c1 Function name: try_error_result::test1 -Raw bytes (69): 0x[01, 01, 05, 07, 09, 01, 00, 03, 0d, 03, 13, 0d, 11, 0b, 01, 0d, 01, 02, 17, 03, 07, 09, 00, 0e, 0a, 02, 09, 04, 1a, 11, 06, 0d, 00, 29, 15, 00, 29, 00, 2a, 00, 01, 0d, 00, 2a, 00, 00, 2a, 00, 2b, 0e, 04, 0d, 00, 2a, 00, 00, 2a, 00, 2b, 0d, 03, 05, 00, 0b, 01, 01, 01, 00, 02] +Raw bytes (63): 0x[01, 01, 02, 09, 0d, 05, 09, 0b, 01, 0d, 01, 02, 17, 05, 07, 09, 00, 0e, 09, 02, 09, 04, 1a, 0d, 06, 0d, 00, 29, 11, 00, 29, 00, 2a, 00, 01, 0d, 00, 2a, 00, 00, 2a, 00, 2b, 02, 04, 0d, 00, 2a, 00, 00, 2a, 00, 2b, 06, 03, 05, 00, 0b, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 5 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(2) -- expression 1 operands: lhs = Counter(0), rhs = Zero -- expression 2 operands: lhs = Expression(0, Add), rhs = Counter(3) -- expression 3 operands: lhs = Expression(0, Add), rhs = Expression(4, Add) -- expression 4 operands: lhs = Counter(3), rhs = Counter(4) +Number of expressions: 2 +- expression 0 operands: lhs = Counter(2), rhs = Counter(3) +- expression 1 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 11 - Code(Counter(0)) at (prev + 13, 1) to (start + 2, 23) -- Code(Expression(0, Add)) at (prev + 7, 9) to (start + 0, 14) - = ((c0 + Zero) + c2) -- Code(Expression(2, Sub)) at (prev + 2, 9) to (start + 4, 26) - = (((c0 + Zero) + c2) - c3) -- Code(Counter(4)) at (prev + 6, 13) to (start + 0, 41) -- Code(Counter(5)) at (prev + 0, 41) to (start + 0, 42) +- Code(Counter(1)) at (prev + 7, 9) to (start + 0, 14) +- Code(Counter(2)) at (prev + 2, 9) to (start + 4, 26) +- Code(Counter(3)) at (prev + 6, 13) to (start + 0, 41) +- Code(Counter(4)) at (prev + 0, 41) to (start + 0, 42) - Code(Zero) at (prev + 1, 13) to (start + 0, 42) - Code(Zero) at (prev + 0, 42) to (start + 0, 43) -- Code(Expression(3, Sub)) at (prev + 4, 13) to (start + 0, 42) - = (((c0 + Zero) + c2) - (c3 + c4)) +- Code(Expression(0, Sub)) at (prev + 4, 13) to (start + 0, 42) + = (c2 - c3) - Code(Zero) at (prev + 0, 42) to (start + 0, 43) -- Code(Counter(3)) at (prev + 3, 5) to (start + 0, 11) +- Code(Expression(1, Sub)) at (prev + 3, 5) to (start + 0, 11) + = (c1 - c2) - Code(Counter(0)) at (prev + 1, 1) to (start + 0, 2) -Highest counter ID seen: c5 +Highest counter ID seen: c4 Function name: try_error_result::test2 -Raw bytes (355): 0x[01, 01, 3b, 07, 09, 01, 05, 03, 0d, 11, 15, 11, 4b, 15, 19, 11, 43, 47, 21, 4b, 1d, 15, 19, 11, 4b, 15, 19, 11, 47, 4b, 1d, 15, 19, 11, 43, 47, 21, 4b, 1d, 15, 19, 45, 5f, 25, 29, 45, 25, 45, 5f, 25, 29, 03, 8b, 01, 8f, 01, 31, 93, 01, 2d, 0d, 11, 03, 93, 01, 0d, 11, 03, 8f, 01, 93, 01, 2d, 0d, 11, 03, 8b, 01, 8f, 01, 31, 93, 01, 2d, 0d, 11, 49, a7, 01, 35, 39, 49, 35, 49, a7, 01, 35, 39, 4d, bb, 01, 3d, 41, 4d, 3d, 4d, bb, 01, 3d, 41, c3, 01, 41, c7, 01, 3d, cb, 01, 39, cf, 01, 35, d3, 01, 31, d7, 01, 2d, db, 01, 29, df, 01, 25, e3, 01, 21, e7, 01, 1d, eb, 01, 19, 0d, 15, 28, 01, 3d, 01, 03, 17, 03, 08, 09, 00, 0e, 0a, 02, 09, 04, 1a, 11, 06, 0d, 00, 2f, 15, 00, 2f, 00, 30, 0e, 00, 31, 03, 35, 19, 04, 11, 00, 12, 2a, 02, 11, 04, 12, 3e, 05, 11, 00, 14, 2a, 00, 17, 00, 41, 1d, 00, 41, 00, 42, 32, 00, 43, 00, 5f, 21, 00, 5f, 00, 60, 3e, 01, 0d, 00, 20, 5a, 01, 11, 00, 14, 45, 00, 17, 00, 41, 25, 00, 41, 00, 42, 56, 00, 43, 00, 60, 29, 00, 60, 00, 61, 5a, 01, 0d, 00, 20, 86, 01, 04, 11, 00, 14, 72, 00, 17, 00, 42, 2d, 00, 42, 00, 43, 7a, 00, 44, 00, 61, 31, 00, 61, 00, 62, 86, 01, 01, 0d, 00, 20, a2, 01, 01, 11, 00, 14, 49, 00, 17, 01, 36, 35, 01, 36, 00, 37, 9e, 01, 01, 12, 00, 2f, 39, 00, 2f, 00, 30, a2, 01, 01, 0d, 00, 20, b6, 01, 01, 11, 00, 14, 4d, 00, 17, 01, 36, 3d, 02, 11, 00, 12, b2, 01, 01, 12, 00, 2f, 41, 01, 11, 00, 12, b6, 01, 02, 0d, 00, 20, 0d, 03, 05, 00, 0b, bf, 01, 01, 01, 00, 02] +Raw bytes (336): 0x[01, 01, 36, 0d, 11, 0d, 3f, 11, 15, 0d, 37, 3b, 1d, 3f, 19, 11, 15, 0d, 3f, 11, 15, 0d, 3b, 3f, 19, 11, 15, 0d, 37, 3b, 1d, 3f, 19, 11, 15, 41, 53, 21, 25, 41, 21, 41, 53, 21, 25, 09, 73, 77, 2d, 0d, 29, 09, 0d, 09, 77, 0d, 29, 09, 73, 77, 2d, 0d, 29, 45, 8b, 01, 31, 35, 45, 31, 45, 8b, 01, 31, 35, 49, 9f, 01, 39, 3d, 49, 39, 49, 9f, 01, 39, 3d, 05, 09, ab, 01, 09, af, 01, 3d, b3, 01, 39, b7, 01, 35, bb, 01, 31, bf, 01, 2d, c3, 01, 29, c7, 01, 25, cb, 01, 21, cf, 01, 1d, d3, 01, 19, d7, 01, 15, 05, 11, 28, 01, 3d, 01, 03, 17, 05, 08, 09, 00, 0e, 09, 02, 09, 04, 1a, 0d, 06, 0d, 00, 2f, 11, 00, 2f, 00, 30, 02, 00, 31, 03, 35, 15, 04, 11, 00, 12, 1e, 02, 11, 04, 12, 32, 05, 11, 00, 14, 1e, 00, 17, 00, 41, 19, 00, 41, 00, 42, 26, 00, 43, 00, 5f, 1d, 00, 5f, 00, 60, 32, 01, 0d, 00, 20, 4e, 01, 11, 00, 14, 41, 00, 17, 00, 41, 21, 00, 41, 00, 42, 4a, 00, 43, 00, 60, 25, 00, 60, 00, 61, 4e, 01, 0d, 00, 20, 6e, 04, 11, 00, 14, 62, 00, 17, 00, 42, 29, 00, 42, 00, 43, 66, 00, 44, 00, 61, 2d, 00, 61, 00, 62, 6e, 01, 0d, 00, 20, 86, 01, 01, 11, 00, 14, 45, 00, 17, 01, 36, 31, 01, 36, 00, 37, 82, 01, 01, 12, 00, 2f, 35, 00, 2f, 00, 30, 86, 01, 01, 0d, 00, 20, 9a, 01, 01, 11, 00, 14, 49, 00, 17, 01, 36, 39, 02, 11, 00, 12, 96, 01, 01, 12, 00, 2f, 3d, 01, 11, 00, 12, 9a, 01, 02, 0d, 00, 20, a2, 01, 03, 05, 00, 0b, a6, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 59 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(2) -- expression 1 operands: lhs = Counter(0), rhs = Counter(1) -- expression 2 operands: lhs = Expression(0, Add), rhs = Counter(3) -- expression 3 operands: lhs = Counter(4), rhs = Counter(5) -- expression 4 operands: lhs = Counter(4), rhs = Expression(18, Add) -- expression 5 operands: lhs = Counter(5), rhs = Counter(6) -- expression 6 operands: lhs = Counter(4), rhs = Expression(16, Add) -- expression 7 operands: lhs = Expression(17, Add), rhs = Counter(8) -- expression 8 operands: lhs = Expression(18, Add), rhs = Counter(7) -- expression 9 operands: lhs = Counter(5), rhs = Counter(6) -- expression 10 operands: lhs = Counter(4), rhs = Expression(18, Add) -- expression 11 operands: lhs = Counter(5), rhs = Counter(6) -- expression 12 operands: lhs = Counter(4), rhs = Expression(17, Add) -- expression 13 operands: lhs = Expression(18, Add), rhs = Counter(7) -- expression 14 operands: lhs = Counter(5), rhs = Counter(6) -- expression 15 operands: lhs = Counter(4), rhs = Expression(16, Add) -- expression 16 operands: lhs = Expression(17, Add), rhs = Counter(8) -- expression 17 operands: lhs = Expression(18, Add), rhs = Counter(7) -- expression 18 operands: lhs = Counter(5), rhs = Counter(6) -- expression 19 operands: lhs = Counter(17), rhs = Expression(23, Add) -- expression 20 operands: lhs = Counter(9), rhs = Counter(10) -- expression 21 operands: lhs = Counter(17), rhs = Counter(9) -- expression 22 operands: lhs = Counter(17), rhs = Expression(23, Add) -- expression 23 operands: lhs = Counter(9), rhs = Counter(10) -- expression 24 operands: lhs = Expression(0, Add), rhs = Expression(34, Add) -- expression 25 operands: lhs = Expression(35, Add), rhs = Counter(12) -- expression 26 operands: lhs = Expression(36, Add), rhs = Counter(11) -- expression 27 operands: lhs = Counter(3), rhs = Counter(4) -- expression 28 operands: lhs = Expression(0, Add), rhs = Expression(36, Add) -- expression 29 operands: lhs = Counter(3), rhs = Counter(4) -- expression 30 operands: lhs = Expression(0, Add), rhs = Expression(35, Add) -- expression 31 operands: lhs = Expression(36, Add), rhs = Counter(11) -- expression 32 operands: lhs = Counter(3), rhs = Counter(4) -- expression 33 operands: lhs = Expression(0, Add), rhs = Expression(34, Add) -- expression 34 operands: lhs = Expression(35, Add), rhs = Counter(12) -- expression 35 operands: lhs = Expression(36, Add), rhs = Counter(11) -- expression 36 operands: lhs = Counter(3), rhs = Counter(4) -- expression 37 operands: lhs = Counter(18), rhs = Expression(41, Add) -- expression 38 operands: lhs = Counter(13), rhs = Counter(14) -- expression 39 operands: lhs = Counter(18), rhs = Counter(13) -- expression 40 operands: lhs = Counter(18), rhs = Expression(41, Add) -- expression 41 operands: lhs = Counter(13), rhs = Counter(14) -- expression 42 operands: lhs = Counter(19), rhs = Expression(46, Add) -- expression 43 operands: lhs = Counter(15), rhs = Counter(16) -- expression 44 operands: lhs = Counter(19), rhs = Counter(15) -- expression 45 operands: lhs = Counter(19), rhs = Expression(46, Add) -- expression 46 operands: lhs = Counter(15), rhs = Counter(16) -- expression 47 operands: lhs = Expression(48, Add), rhs = Counter(16) -- expression 48 operands: lhs = Expression(49, Add), rhs = Counter(15) -- expression 49 operands: lhs = Expression(50, Add), rhs = Counter(14) -- expression 50 operands: lhs = Expression(51, Add), rhs = Counter(13) -- expression 51 operands: lhs = Expression(52, Add), rhs = Counter(12) -- expression 52 operands: lhs = Expression(53, Add), rhs = Counter(11) -- expression 53 operands: lhs = Expression(54, Add), rhs = Counter(10) -- expression 54 operands: lhs = Expression(55, Add), rhs = Counter(9) -- expression 55 operands: lhs = Expression(56, Add), rhs = Counter(8) -- expression 56 operands: lhs = Expression(57, Add), rhs = Counter(7) -- expression 57 operands: lhs = Expression(58, Add), rhs = Counter(6) -- expression 58 operands: lhs = Counter(3), rhs = Counter(5) +Number of expressions: 54 +- expression 0 operands: lhs = Counter(3), rhs = Counter(4) +- expression 1 operands: lhs = Counter(3), rhs = Expression(15, Add) +- expression 2 operands: lhs = Counter(4), rhs = Counter(5) +- expression 3 operands: lhs = Counter(3), rhs = Expression(13, Add) +- expression 4 operands: lhs = Expression(14, Add), rhs = Counter(7) +- expression 5 operands: lhs = Expression(15, Add), rhs = Counter(6) +- expression 6 operands: lhs = Counter(4), rhs = Counter(5) +- expression 7 operands: lhs = Counter(3), rhs = Expression(15, Add) +- expression 8 operands: lhs = Counter(4), rhs = Counter(5) +- expression 9 operands: lhs = Counter(3), rhs = Expression(14, Add) +- expression 10 operands: lhs = Expression(15, Add), rhs = Counter(6) +- expression 11 operands: lhs = Counter(4), rhs = Counter(5) +- expression 12 operands: lhs = Counter(3), rhs = Expression(13, Add) +- expression 13 operands: lhs = Expression(14, Add), rhs = Counter(7) +- expression 14 operands: lhs = Expression(15, Add), rhs = Counter(6) +- expression 15 operands: lhs = Counter(4), rhs = Counter(5) +- expression 16 operands: lhs = Counter(16), rhs = Expression(20, Add) +- expression 17 operands: lhs = Counter(8), rhs = Counter(9) +- expression 18 operands: lhs = Counter(16), rhs = Counter(8) +- expression 19 operands: lhs = Counter(16), rhs = Expression(20, Add) +- expression 20 operands: lhs = Counter(8), rhs = Counter(9) +- expression 21 operands: lhs = Counter(2), rhs = Expression(28, Add) +- expression 22 operands: lhs = Expression(29, Add), rhs = Counter(11) +- expression 23 operands: lhs = Counter(3), rhs = Counter(10) +- expression 24 operands: lhs = Counter(2), rhs = Counter(3) +- expression 25 operands: lhs = Counter(2), rhs = Expression(29, Add) +- expression 26 operands: lhs = Counter(3), rhs = Counter(10) +- expression 27 operands: lhs = Counter(2), rhs = Expression(28, Add) +- expression 28 operands: lhs = Expression(29, Add), rhs = Counter(11) +- expression 29 operands: lhs = Counter(3), rhs = Counter(10) +- expression 30 operands: lhs = Counter(17), rhs = Expression(34, Add) +- expression 31 operands: lhs = Counter(12), rhs = Counter(13) +- expression 32 operands: lhs = Counter(17), rhs = Counter(12) +- expression 33 operands: lhs = Counter(17), rhs = Expression(34, Add) +- expression 34 operands: lhs = Counter(12), rhs = Counter(13) +- expression 35 operands: lhs = Counter(18), rhs = Expression(39, Add) +- expression 36 operands: lhs = Counter(14), rhs = Counter(15) +- expression 37 operands: lhs = Counter(18), rhs = Counter(14) +- expression 38 operands: lhs = Counter(18), rhs = Expression(39, Add) +- expression 39 operands: lhs = Counter(14), rhs = Counter(15) +- expression 40 operands: lhs = Counter(1), rhs = Counter(2) +- expression 41 operands: lhs = Expression(42, Add), rhs = Counter(2) +- expression 42 operands: lhs = Expression(43, Add), rhs = Counter(15) +- expression 43 operands: lhs = Expression(44, Add), rhs = Counter(14) +- expression 44 operands: lhs = Expression(45, Add), rhs = Counter(13) +- expression 45 operands: lhs = Expression(46, Add), rhs = Counter(12) +- expression 46 operands: lhs = Expression(47, Add), rhs = Counter(11) +- expression 47 operands: lhs = Expression(48, Add), rhs = Counter(10) +- expression 48 operands: lhs = Expression(49, Add), rhs = Counter(9) +- expression 49 operands: lhs = Expression(50, Add), rhs = Counter(8) +- expression 50 operands: lhs = Expression(51, Add), rhs = Counter(7) +- expression 51 operands: lhs = Expression(52, Add), rhs = Counter(6) +- expression 52 operands: lhs = Expression(53, Add), rhs = Counter(5) +- expression 53 operands: lhs = Counter(1), rhs = Counter(4) Number of file 0 mappings: 40 - Code(Counter(0)) at (prev + 61, 1) to (start + 3, 23) -- Code(Expression(0, Add)) at (prev + 8, 9) to (start + 0, 14) - = ((c0 + c1) + c2) -- Code(Expression(2, Sub)) at (prev + 2, 9) to (start + 4, 26) - = (((c0 + c1) + c2) - c3) -- Code(Counter(4)) at (prev + 6, 13) to (start + 0, 47) -- Code(Counter(5)) at (prev + 0, 47) to (start + 0, 48) -- Code(Expression(3, Sub)) at (prev + 0, 49) to (start + 3, 53) - = (c4 - c5) -- Code(Counter(6)) at (prev + 4, 17) to (start + 0, 18) -- Code(Expression(10, Sub)) at (prev + 2, 17) to (start + 4, 18) - = (c4 - (c5 + c6)) -- Code(Expression(15, Sub)) at (prev + 5, 17) to (start + 0, 20) - = (c4 - (((c5 + c6) + c7) + c8)) -- Code(Expression(10, Sub)) at (prev + 0, 23) to (start + 0, 65) - = (c4 - (c5 + c6)) -- Code(Counter(7)) at (prev + 0, 65) to (start + 0, 66) -- Code(Expression(12, Sub)) at (prev + 0, 67) to (start + 0, 95) - = (c4 - ((c5 + c6) + c7)) -- Code(Counter(8)) at (prev + 0, 95) to (start + 0, 96) -- Code(Expression(15, Sub)) at (prev + 1, 13) to (start + 0, 32) - = (c4 - (((c5 + c6) + c7) + c8)) -- Code(Expression(22, Sub)) at (prev + 1, 17) to (start + 0, 20) - = (c17 - (c9 + c10)) -- Code(Counter(17)) at (prev + 0, 23) to (start + 0, 65) -- Code(Counter(9)) at (prev + 0, 65) to (start + 0, 66) -- Code(Expression(21, Sub)) at (prev + 0, 67) to (start + 0, 96) - = (c17 - c9) -- Code(Counter(10)) at (prev + 0, 96) to (start + 0, 97) -- Code(Expression(22, Sub)) at (prev + 1, 13) to (start + 0, 32) - = (c17 - (c9 + c10)) -- Code(Expression(33, Sub)) at (prev + 4, 17) to (start + 0, 20) - = (((c0 + c1) + c2) - (((c3 + c4) + c11) + c12)) -- Code(Expression(28, Sub)) at (prev + 0, 23) to (start + 0, 66) - = (((c0 + c1) + c2) - (c3 + c4)) -- Code(Counter(11)) at (prev + 0, 66) to (start + 0, 67) -- Code(Expression(30, Sub)) at (prev + 0, 68) to (start + 0, 97) - = (((c0 + c1) + c2) - ((c3 + c4) + c11)) -- Code(Counter(12)) at (prev + 0, 97) to (start + 0, 98) +- Code(Counter(1)) at (prev + 8, 9) to (start + 0, 14) +- Code(Counter(2)) at (prev + 2, 9) to (start + 4, 26) +- Code(Counter(3)) at (prev + 6, 13) to (start + 0, 47) +- Code(Counter(4)) at (prev + 0, 47) to (start + 0, 48) +- Code(Expression(0, Sub)) at (prev + 0, 49) to (start + 3, 53) + = (c3 - c4) +- Code(Counter(5)) at (prev + 4, 17) to (start + 0, 18) +- Code(Expression(7, Sub)) at (prev + 2, 17) to (start + 4, 18) + = (c3 - (c4 + c5)) +- Code(Expression(12, Sub)) at (prev + 5, 17) to (start + 0, 20) + = (c3 - (((c4 + c5) + c6) + c7)) +- Code(Expression(7, Sub)) at (prev + 0, 23) to (start + 0, 65) + = (c3 - (c4 + c5)) +- Code(Counter(6)) at (prev + 0, 65) to (start + 0, 66) +- Code(Expression(9, Sub)) at (prev + 0, 67) to (start + 0, 95) + = (c3 - ((c4 + c5) + c6)) +- Code(Counter(7)) at (prev + 0, 95) to (start + 0, 96) +- Code(Expression(12, Sub)) at (prev + 1, 13) to (start + 0, 32) + = (c3 - (((c4 + c5) + c6) + c7)) +- Code(Expression(19, Sub)) at (prev + 1, 17) to (start + 0, 20) + = (c16 - (c8 + c9)) +- Code(Counter(16)) at (prev + 0, 23) to (start + 0, 65) +- Code(Counter(8)) at (prev + 0, 65) to (start + 0, 66) +- Code(Expression(18, Sub)) at (prev + 0, 67) to (start + 0, 96) + = (c16 - c8) +- Code(Counter(9)) at (prev + 0, 96) to (start + 0, 97) +- Code(Expression(19, Sub)) at (prev + 1, 13) to (start + 0, 32) + = (c16 - (c8 + c9)) +- Code(Expression(27, Sub)) at (prev + 4, 17) to (start + 0, 20) + = (c2 - ((c3 + c10) + c11)) +- Code(Expression(24, Sub)) at (prev + 0, 23) to (start + 0, 66) + = (c2 - c3) +- Code(Counter(10)) at (prev + 0, 66) to (start + 0, 67) +- Code(Expression(25, Sub)) at (prev + 0, 68) to (start + 0, 97) + = (c2 - (c3 + c10)) +- Code(Counter(11)) at (prev + 0, 97) to (start + 0, 98) +- Code(Expression(27, Sub)) at (prev + 1, 13) to (start + 0, 32) + = (c2 - ((c3 + c10) + c11)) +- Code(Expression(33, Sub)) at (prev + 1, 17) to (start + 0, 20) + = (c17 - (c12 + c13)) +- Code(Counter(17)) at (prev + 0, 23) to (start + 1, 54) +- Code(Counter(12)) at (prev + 1, 54) to (start + 0, 55) +- Code(Expression(32, Sub)) at (prev + 1, 18) to (start + 0, 47) + = (c17 - c12) +- Code(Counter(13)) at (prev + 0, 47) to (start + 0, 48) - Code(Expression(33, Sub)) at (prev + 1, 13) to (start + 0, 32) - = (((c0 + c1) + c2) - (((c3 + c4) + c11) + c12)) -- Code(Expression(40, Sub)) at (prev + 1, 17) to (start + 0, 20) - = (c18 - (c13 + c14)) + = (c17 - (c12 + c13)) +- Code(Expression(38, Sub)) at (prev + 1, 17) to (start + 0, 20) + = (c18 - (c14 + c15)) - Code(Counter(18)) at (prev + 0, 23) to (start + 1, 54) -- Code(Counter(13)) at (prev + 1, 54) to (start + 0, 55) -- Code(Expression(39, Sub)) at (prev + 1, 18) to (start + 0, 47) - = (c18 - c13) -- Code(Counter(14)) at (prev + 0, 47) to (start + 0, 48) -- Code(Expression(40, Sub)) at (prev + 1, 13) to (start + 0, 32) - = (c18 - (c13 + c14)) -- Code(Expression(45, Sub)) at (prev + 1, 17) to (start + 0, 20) - = (c19 - (c15 + c16)) -- Code(Counter(19)) at (prev + 0, 23) to (start + 1, 54) -- Code(Counter(15)) at (prev + 2, 17) to (start + 0, 18) -- Code(Expression(44, Sub)) at (prev + 1, 18) to (start + 0, 47) - = (c19 - c15) -- Code(Counter(16)) at (prev + 1, 17) to (start + 0, 18) -- Code(Expression(45, Sub)) at (prev + 2, 13) to (start + 0, 32) - = (c19 - (c15 + c16)) -- Code(Counter(3)) at (prev + 3, 5) to (start + 0, 11) -- Code(Expression(47, Add)) at (prev + 1, 1) to (start + 0, 2) - = ((((((((((((c3 + c5) + c6) + c7) + c8) + c9) + c10) + c11) + c12) + c13) + c14) + c15) + c16) -Highest counter ID seen: c19 +- Code(Counter(14)) at (prev + 2, 17) to (start + 0, 18) +- Code(Expression(37, Sub)) at (prev + 1, 18) to (start + 0, 47) + = (c18 - c14) +- Code(Counter(15)) at (prev + 1, 17) to (start + 0, 18) +- Code(Expression(38, Sub)) at (prev + 2, 13) to (start + 0, 32) + = (c18 - (c14 + c15)) +- Code(Expression(40, Sub)) at (prev + 3, 5) to (start + 0, 11) + = (c1 - c2) +- Code(Expression(41, Sub)) at (prev + 1, 1) to (start + 0, 2) + = (((((((((((((c1 + c4) + c5) + c6) + c7) + c8) + c9) + c10) + c11) + c12) + c13) + c14) + c15) - c2) +Highest counter ID seen: c18 diff --git a/tests/coverage/unicode.cov-map b/tests/coverage/unicode.cov-map index 0a4e367bb9e37..7b9dc0b9bc887 100644 --- a/tests/coverage/unicode.cov-map +++ b/tests/coverage/unicode.cov-map @@ -1,16 +1,17 @@ Function name: unicode::main -Raw bytes (53): 0x[01, 01, 02, 01, 05, 01, 0d, 09, 01, 0e, 01, 00, 0b, 05, 01, 09, 00, 0c, 03, 00, 10, 00, 1b, 05, 00, 1c, 00, 28, 01, 02, 08, 00, 25, 09, 00, 29, 00, 46, 0d, 00, 47, 02, 06, 06, 02, 05, 00, 06, 01, 02, 05, 01, 02] +Raw bytes (53): 0x[01, 01, 02, 05, 01, 01, 0d, 09, 01, 0e, 01, 00, 0b, 02, 01, 09, 00, 0c, 05, 00, 10, 00, 1b, 02, 00, 1c, 00, 28, 01, 02, 08, 00, 25, 09, 00, 29, 00, 46, 0d, 00, 47, 02, 06, 06, 02, 05, 00, 06, 01, 02, 05, 01, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 2 -- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) - expression 1 operands: lhs = Counter(0), rhs = Counter(3) Number of file 0 mappings: 9 - Code(Counter(0)) at (prev + 14, 1) to (start + 0, 11) -- Code(Counter(1)) at (prev + 1, 9) to (start + 0, 12) -- Code(Expression(0, Add)) at (prev + 0, 16) to (start + 0, 27) - = (c0 + c1) -- Code(Counter(1)) at (prev + 0, 28) to (start + 0, 40) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 12) + = (c1 - c0) +- Code(Counter(1)) at (prev + 0, 16) to (start + 0, 27) +- Code(Expression(0, Sub)) at (prev + 0, 28) to (start + 0, 40) + = (c1 - c0) - Code(Counter(0)) at (prev + 2, 8) to (start + 0, 37) - Code(Counter(2)) at (prev + 0, 41) to (start + 0, 70) - Code(Counter(3)) at (prev + 0, 71) to (start + 2, 6) diff --git a/tests/coverage/unused.cov-map b/tests/coverage/unused.cov-map index 4eae63f380ca7..c18d331ec2e0e 100644 --- a/tests/coverage/unused.cov-map +++ b/tests/coverage/unused.cov-map @@ -1,44 +1,42 @@ Function name: unused::foo:: -Raw bytes (42): 0x[01, 01, 04, 07, 09, 01, 05, 03, 0d, 05, 09, 06, 01, 03, 01, 01, 12, 03, 02, 0b, 00, 11, 0a, 01, 09, 00, 0f, 09, 00, 13, 00, 19, 0f, 01, 09, 00, 0f, 0d, 02, 01, 00, 02] +Raw bytes (40): 0x[01, 01, 03, 05, 01, 05, 0b, 01, 09, 06, 01, 03, 01, 01, 12, 05, 02, 0b, 00, 11, 02, 01, 09, 00, 0f, 06, 00, 13, 00, 19, 02, 01, 09, 00, 0f, 01, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 4 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(2) -- expression 1 operands: lhs = Counter(0), rhs = Counter(1) -- expression 2 operands: lhs = Expression(0, Add), rhs = Counter(3) -- expression 3 operands: lhs = Counter(1), rhs = Counter(2) +Number of expressions: 3 +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) +- expression 1 operands: lhs = Counter(1), rhs = Expression(2, Add) +- expression 2 operands: lhs = Counter(0), rhs = Counter(2) Number of file 0 mappings: 6 - Code(Counter(0)) at (prev + 3, 1) to (start + 1, 18) -- Code(Expression(0, Add)) at (prev + 2, 11) to (start + 0, 17) - = ((c0 + c1) + c2) -- Code(Expression(2, Sub)) at (prev + 1, 9) to (start + 0, 15) - = (((c0 + c1) + c2) - c3) -- Code(Counter(2)) at (prev + 0, 19) to (start + 0, 25) -- Code(Expression(3, Add)) at (prev + 1, 9) to (start + 0, 15) - = (c1 + c2) -- Code(Counter(3)) at (prev + 2, 1) to (start + 0, 2) -Highest counter ID seen: c3 +- Code(Counter(1)) at (prev + 2, 11) to (start + 0, 17) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 15) + = (c1 - c0) +- Code(Expression(1, Sub)) at (prev + 0, 19) to (start + 0, 25) + = (c1 - (c0 + c2)) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 15) + = (c1 - c0) +- Code(Counter(0)) at (prev + 2, 1) to (start + 0, 2) +Highest counter ID seen: c1 Function name: unused::foo:: -Raw bytes (42): 0x[01, 01, 04, 07, 09, 01, 05, 03, 0d, 05, 09, 06, 01, 03, 01, 01, 12, 03, 02, 0b, 00, 11, 0a, 01, 09, 00, 0f, 09, 00, 13, 00, 19, 0f, 01, 09, 00, 0f, 0d, 02, 01, 00, 02] +Raw bytes (40): 0x[01, 01, 03, 05, 01, 05, 0b, 01, 09, 06, 01, 03, 01, 01, 12, 05, 02, 0b, 00, 11, 02, 01, 09, 00, 0f, 06, 00, 13, 00, 19, 02, 01, 09, 00, 0f, 01, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 4 -- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(2) -- expression 1 operands: lhs = Counter(0), rhs = Counter(1) -- expression 2 operands: lhs = Expression(0, Add), rhs = Counter(3) -- expression 3 operands: lhs = Counter(1), rhs = Counter(2) +Number of expressions: 3 +- expression 0 operands: lhs = Counter(1), rhs = Counter(0) +- expression 1 operands: lhs = Counter(1), rhs = Expression(2, Add) +- expression 2 operands: lhs = Counter(0), rhs = Counter(2) Number of file 0 mappings: 6 - Code(Counter(0)) at (prev + 3, 1) to (start + 1, 18) -- Code(Expression(0, Add)) at (prev + 2, 11) to (start + 0, 17) - = ((c0 + c1) + c2) -- Code(Expression(2, Sub)) at (prev + 1, 9) to (start + 0, 15) - = (((c0 + c1) + c2) - c3) -- Code(Counter(2)) at (prev + 0, 19) to (start + 0, 25) -- Code(Expression(3, Add)) at (prev + 1, 9) to (start + 0, 15) - = (c1 + c2) -- Code(Counter(3)) at (prev + 2, 1) to (start + 0, 2) -Highest counter ID seen: c3 +- Code(Counter(1)) at (prev + 2, 11) to (start + 0, 17) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 15) + = (c1 - c0) +- Code(Expression(1, Sub)) at (prev + 0, 19) to (start + 0, 25) + = (c1 - (c0 + c2)) +- Code(Expression(0, Sub)) at (prev + 1, 9) to (start + 0, 15) + = (c1 - c0) +- Code(Counter(0)) at (prev + 2, 1) to (start + 0, 2) +Highest counter ID seen: c1 Function name: unused::main Raw bytes (9): 0x[01, 01, 00, 01, 01, 25, 01, 04, 02] diff --git a/tests/coverage/while.cov-map b/tests/coverage/while.cov-map index 29493a651dcbb..d42aa8a7b847d 100644 --- a/tests/coverage/while.cov-map +++ b/tests/coverage/while.cov-map @@ -1,14 +1,12 @@ Function name: while::main -Raw bytes (26): 0x[01, 01, 01, 01, 00, 04, 01, 01, 01, 01, 10, 03, 02, 0b, 00, 14, 00, 00, 15, 02, 06, 01, 03, 01, 00, 02] +Raw bytes (24): 0x[01, 01, 00, 04, 01, 01, 01, 01, 10, 05, 02, 0b, 00, 14, 00, 00, 15, 02, 06, 01, 03, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 1 -- expression 0 operands: lhs = Counter(0), rhs = Zero +Number of expressions: 0 Number of file 0 mappings: 4 - Code(Counter(0)) at (prev + 1, 1) to (start + 1, 16) -- Code(Expression(0, Add)) at (prev + 2, 11) to (start + 0, 20) - = (c0 + Zero) +- Code(Counter(1)) at (prev + 2, 11) to (start + 0, 20) - Code(Zero) at (prev + 0, 21) to (start + 2, 6) - Code(Counter(0)) at (prev + 3, 1) to (start + 0, 2) -Highest counter ID seen: c0 +Highest counter ID seen: c1 diff --git a/tests/coverage/while_early_ret.cov-map b/tests/coverage/while_early_ret.cov-map index 554056fa80109..69b51bf9ca34b 100644 --- a/tests/coverage/while_early_ret.cov-map +++ b/tests/coverage/while_early_ret.cov-map @@ -1,26 +1,28 @@ Function name: while_early_ret::main -Raw bytes (59): 0x[01, 01, 05, 01, 05, 03, 09, 01, 09, 01, 13, 09, 0d, 09, 01, 05, 01, 01, 1b, 03, 03, 09, 02, 0a, 06, 05, 0d, 02, 0e, 0a, 06, 15, 02, 16, 0d, 04, 15, 00, 1b, 0e, 04, 15, 00, 1b, 05, 03, 0a, 03, 0a, 09, 06, 05, 00, 0b, 01, 01, 01, 00, 02] +Raw bytes (63): 0x[01, 01, 07, 0f, 05, 01, 09, 0f, 13, 01, 09, 05, 0d, 05, 01, 05, 09, 09, 01, 05, 01, 01, 1b, 05, 03, 09, 02, 0a, 09, 05, 0d, 02, 0e, 02, 06, 15, 02, 16, 0d, 04, 15, 00, 1b, 0a, 04, 15, 00, 1b, 16, 03, 0a, 03, 0a, 1a, 06, 05, 00, 0b, 01, 01, 01, 00, 02] Number of files: 1 - file 0 => global file 1 -Number of expressions: 5 -- expression 0 operands: lhs = Counter(0), rhs = Counter(1) -- expression 1 operands: lhs = Expression(0, Add), rhs = Counter(2) -- expression 2 operands: lhs = Counter(0), rhs = Counter(2) -- expression 3 operands: lhs = Counter(0), rhs = Expression(4, Add) -- expression 4 operands: lhs = Counter(2), rhs = Counter(3) +Number of expressions: 7 +- expression 0 operands: lhs = Expression(3, Add), rhs = Counter(1) +- expression 1 operands: lhs = Counter(0), rhs = Counter(2) +- expression 2 operands: lhs = Expression(3, Add), rhs = Expression(4, Add) +- expression 3 operands: lhs = Counter(0), rhs = Counter(2) +- expression 4 operands: lhs = Counter(1), rhs = Counter(3) +- expression 5 operands: lhs = Counter(1), rhs = Counter(0) +- expression 6 operands: lhs = Counter(1), rhs = Counter(2) Number of file 0 mappings: 9 - Code(Counter(0)) at (prev + 5, 1) to (start + 1, 27) -- Code(Expression(0, Add)) at (prev + 3, 9) to (start + 2, 10) - = (c0 + c1) -- Code(Expression(1, Sub)) at (prev + 5, 13) to (start + 2, 14) - = ((c0 + c1) - c2) -- Code(Expression(2, Sub)) at (prev + 6, 21) to (start + 2, 22) - = (c0 - c2) +- Code(Counter(1)) at (prev + 3, 9) to (start + 2, 10) +- Code(Counter(2)) at (prev + 5, 13) to (start + 2, 14) +- Code(Expression(0, Sub)) at (prev + 6, 21) to (start + 2, 22) + = ((c0 + c2) - c1) - Code(Counter(3)) at (prev + 4, 21) to (start + 0, 27) -- Code(Expression(3, Sub)) at (prev + 4, 21) to (start + 0, 27) - = (c0 - (c2 + c3)) -- Code(Counter(1)) at (prev + 3, 10) to (start + 3, 10) -- Code(Counter(2)) at (prev + 6, 5) to (start + 0, 11) +- Code(Expression(2, Sub)) at (prev + 4, 21) to (start + 0, 27) + = ((c0 + c2) - (c1 + c3)) +- Code(Expression(5, Sub)) at (prev + 3, 10) to (start + 3, 10) + = (c1 - c0) +- Code(Expression(6, Sub)) at (prev + 6, 5) to (start + 0, 11) + = (c1 - c2) - Code(Counter(0)) at (prev + 1, 1) to (start + 0, 2) Highest counter ID seen: c3 diff --git a/tests/coverage/yield.cov-map b/tests/coverage/yield.cov-map index 868fec4b10796..d296f9bd778a2 100644 --- a/tests/coverage/yield.cov-map +++ b/tests/coverage/yield.cov-map @@ -1,36 +1,36 @@ Function name: yield::main -Raw bytes (94): 0x[01, 01, 05, 05, 00, 0d, 15, 0d, 11, 19, 1d, 25, 29, 10, 01, 07, 01, 01, 16, 01, 07, 0b, 00, 2e, 0d, 01, 27, 00, 29, 03, 01, 0e, 00, 34, 0d, 02, 0b, 00, 2e, 06, 01, 22, 00, 27, 11, 00, 2c, 00, 2e, 0a, 01, 0e, 00, 34, 11, 03, 09, 00, 16, 11, 08, 0b, 00, 2e, 21, 01, 27, 00, 29, 0f, 01, 0e, 00, 34, 21, 02, 0b, 00, 2e, 2d, 01, 27, 00, 29, 13, 01, 0e, 00, 34, 2d, 02, 01, 00, 02] +Raw bytes (94): 0x[01, 01, 05, 01, 05, 05, 09, 09, 11, 11, 15, 11, 15, 10, 01, 07, 01, 01, 16, 01, 07, 0b, 00, 2e, 05, 01, 27, 00, 29, 02, 01, 0e, 00, 34, 05, 02, 0b, 00, 2e, 0d, 01, 22, 00, 27, 09, 00, 2c, 00, 2e, 06, 01, 0e, 00, 34, 09, 03, 09, 00, 16, 09, 08, 0b, 00, 2e, 11, 01, 27, 00, 29, 0a, 01, 0e, 00, 34, 11, 02, 0b, 00, 2e, 12, 01, 27, 00, 29, 15, 01, 0e, 00, 34, 12, 02, 01, 00, 02] Number of files: 1 - file 0 => global file 1 Number of expressions: 5 -- expression 0 operands: lhs = Counter(1), rhs = Zero -- expression 1 operands: lhs = Counter(3), rhs = Counter(5) -- expression 2 operands: lhs = Counter(3), rhs = Counter(4) -- expression 3 operands: lhs = Counter(6), rhs = Counter(7) -- expression 4 operands: lhs = Counter(9), rhs = Counter(10) +- expression 0 operands: lhs = Counter(0), rhs = Counter(1) +- expression 1 operands: lhs = Counter(1), rhs = Counter(2) +- expression 2 operands: lhs = Counter(2), rhs = Counter(4) +- expression 3 operands: lhs = Counter(4), rhs = Counter(5) +- expression 4 operands: lhs = Counter(4), rhs = Counter(5) Number of file 0 mappings: 16 - Code(Counter(0)) at (prev + 7, 1) to (start + 1, 22) - Code(Counter(0)) at (prev + 7, 11) to (start + 0, 46) -- Code(Counter(3)) at (prev + 1, 39) to (start + 0, 41) -- Code(Expression(0, Add)) at (prev + 1, 14) to (start + 0, 52) - = (c1 + Zero) -- Code(Counter(3)) at (prev + 2, 11) to (start + 0, 46) -- Code(Expression(1, Sub)) at (prev + 1, 34) to (start + 0, 39) - = (c3 - c5) -- Code(Counter(4)) at (prev + 0, 44) to (start + 0, 46) +- Code(Counter(1)) at (prev + 1, 39) to (start + 0, 41) +- Code(Expression(0, Sub)) at (prev + 1, 14) to (start + 0, 52) + = (c0 - c1) +- Code(Counter(1)) at (prev + 2, 11) to (start + 0, 46) +- Code(Counter(3)) at (prev + 1, 34) to (start + 0, 39) +- Code(Counter(2)) at (prev + 0, 44) to (start + 0, 46) +- Code(Expression(1, Sub)) at (prev + 1, 14) to (start + 0, 52) + = (c1 - c2) +- Code(Counter(2)) at (prev + 3, 9) to (start + 0, 22) +- Code(Counter(2)) at (prev + 8, 11) to (start + 0, 46) +- Code(Counter(4)) at (prev + 1, 39) to (start + 0, 41) - Code(Expression(2, Sub)) at (prev + 1, 14) to (start + 0, 52) - = (c3 - c4) -- Code(Counter(4)) at (prev + 3, 9) to (start + 0, 22) -- Code(Counter(4)) at (prev + 8, 11) to (start + 0, 46) -- Code(Counter(8)) at (prev + 1, 39) to (start + 0, 41) -- Code(Expression(3, Add)) at (prev + 1, 14) to (start + 0, 52) - = (c6 + c7) -- Code(Counter(8)) at (prev + 2, 11) to (start + 0, 46) -- Code(Counter(11)) at (prev + 1, 39) to (start + 0, 41) -- Code(Expression(4, Add)) at (prev + 1, 14) to (start + 0, 52) - = (c9 + c10) -- Code(Counter(11)) at (prev + 2, 1) to (start + 0, 2) -Highest counter ID seen: c11 + = (c2 - c4) +- Code(Counter(4)) at (prev + 2, 11) to (start + 0, 46) +- Code(Expression(4, Sub)) at (prev + 1, 39) to (start + 0, 41) + = (c4 - c5) +- Code(Counter(5)) at (prev + 1, 14) to (start + 0, 52) +- Code(Expression(4, Sub)) at (prev + 2, 1) to (start + 0, 2) + = (c4 - c5) +Highest counter ID seen: c5 Function name: yield::main::{closure#0} Raw bytes (14): 0x[01, 01, 00, 02, 01, 09, 08, 01, 10, 05, 02, 10, 01, 06] diff --git a/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff b/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff index b480d1ac13a76..fa09cf0b83f87 100644 --- a/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff +++ b/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff @@ -8,11 +8,11 @@ let mut _3: !; + coverage body span: $DIR/instrument_coverage.rs:10:11: 16:2 (#0) -+ coverage ExpressionId(0) => Expression { lhs: Counter(0), op: Add, rhs: Counter(1) }; ++ coverage ExpressionId(0) => Expression { lhs: Counter(1), op: Subtract, rhs: Counter(0) }; + coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:10:1: 10:11 (#0); -+ coverage Code(Expression(0)) => $DIR/instrument_coverage.rs:12:12: 12:17 (#0); ++ coverage Code(Counter(1)) => $DIR/instrument_coverage.rs:12:12: 12:17 (#0); + coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:13:13: 13:18 (#0); -+ coverage Code(Counter(1)) => $DIR/instrument_coverage.rs:14:10: 14:10 (#0); ++ coverage Code(Expression(0)) => $DIR/instrument_coverage.rs:14:10: 14:10 (#0); + coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:16:2: 16:2 (#0); + bb0: { @@ -21,7 +21,7 @@ } bb1: { -+ Coverage::ExpressionUsed(0); ++ Coverage::CounterIncrement(1); falseUnwind -> [real: bb2, unwind: bb6]; } @@ -41,7 +41,7 @@ } bb5: { -+ Coverage::CounterIncrement(1); ++ Coverage::ExpressionUsed(0); _1 = const (); StorageDead(_2); goto -> bb1; From 6eabf03526e561ea31b99e14a069e86cc168bede Mon Sep 17 00:00:00 2001 From: Zalathar Date: Thu, 16 Jan 2025 21:41:37 +1100 Subject: [PATCH 104/142] coverage: Make `yank_to_spantree_root` iterative instead of recursive This avoids having to worry about stack space when traversing very long spantree paths, e.g. when instrumenting a long sequence of if/else statements. --- .../src/coverage/counters/node_flow.rs | 54 ++++++++++++------- compiler/rustc_mir_transform/src/lib.rs | 1 + 2 files changed, 36 insertions(+), 19 deletions(-) diff --git a/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs index 5e5d66249597d..610498c6c0ec7 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters/node_flow.rs @@ -159,6 +159,8 @@ struct SpantreeBuilder<'a, Node: Idx> { /// A supernode without a span edge is the root of its component of the /// spantree. Nodes that aren't supernodes cannot have a spantree edge. span_edges: IndexVec>>, + /// Shared path buffer recycled by all calls to `yank_to_spantree_root`. + yank_buffer: Vec, /// An in-progress counter expression for each node. Each expression is /// initially empty, and will be filled in as relevant nodes are visited. counter_exprs: IndexVec>, @@ -171,6 +173,7 @@ impl<'a, Node: Idx> SpantreeBuilder<'a, Node> { graph, is_unvisited: DenseBitSet::new_filled(num_nodes), span_edges: IndexVec::from_fn_n(|_| None, num_nodes), + yank_buffer: vec![], counter_exprs: IndexVec::from_fn_n(|_| SmallVec::new(), num_nodes), } } @@ -192,24 +195,37 @@ impl<'a, Node: Idx> SpantreeBuilder<'a, Node> { fn yank_to_spantree_root(&mut self, this: Node) { debug_assert!(self.graph.is_supernode(this)); - // Temporarily remove this supernode (any any spantree-children) from its - // spantree component, by disconnecting the edge to its spantree-parent. - let Some(SpantreeEdge { is_reversed, claiming_node, span_parent }) = - self.span_edges[this].take() - else { - // This supernode has no spantree-parent edge, so it is already the - // root of its spantree component. - return; - }; - - // Recursively make our immediate spantree-parent the root of what's - // left of its component, so that only one more edge rotation is needed. - self.yank_to_spantree_root(span_parent); - - // Recreate the removed edge, but in the opposite direction. - // Now `this` is the root of its spantree component. - self.span_edges[span_parent] = - Some(SpantreeEdge { is_reversed: !is_reversed, claiming_node, span_parent: this }); + // The rotation is done iteratively, by first traversing from `this` to + // its root and storing the path in a buffer, and then traversing the + // path buffer backwards to reverse all the edges. + + // Recycle the same path buffer for all calls to this method. + let path_buf = &mut self.yank_buffer; + path_buf.clear(); + path_buf.push(this); + + // Traverse the spantree until we reach a supernode that has no + // span-parent, which must be the root. + let mut curr = this; + while let &Some(SpantreeEdge { span_parent, .. }) = &self.span_edges[curr] { + path_buf.push(span_parent); + curr = span_parent; + } + + // For each spantree edge `a -> b` in the path that was just traversed, + // reverse it to become `a <- b`, while preserving `claiming_node`. + for &[a, b] in path_buf.array_windows::<2>().rev() { + let SpantreeEdge { is_reversed, claiming_node, span_parent } = self.span_edges[a] + .take() + .expect("all nodes in the path (except the last) have a `span_parent`"); + debug_assert_eq!(span_parent, b); + debug_assert!(self.span_edges[b].is_none()); + self.span_edges[b] = + Some(SpantreeEdge { is_reversed: !is_reversed, claiming_node, span_parent: a }); + } + + // The result of the rotation is that `this` is now a spantree root. + debug_assert!(self.span_edges[this].is_none()); } /// Must be called exactly once for each node in the balanced-flow graph. @@ -273,7 +289,7 @@ impl<'a, Node: Idx> SpantreeBuilder<'a, Node> { /// Asserts that all nodes have been visited, and returns the computed /// counter expressions (made up of physical counters) for each node. fn finish(self) -> IndexVec> { - let Self { graph, is_unvisited, span_edges, counter_exprs } = self; + let Self { graph, is_unvisited, span_edges, yank_buffer: _, counter_exprs } = self; assert!(is_unvisited.is_empty(), "some nodes were never visited: {is_unvisited:?}"); debug_assert!( span_edges diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 9459ef99445c5..db999bea98692 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -1,4 +1,5 @@ // tidy-alphabetical-start +#![feature(array_windows)] #![feature(assert_matches)] #![feature(box_patterns)] #![feature(const_type_name)] From 702134a9300dd36b8c49a044a06d4bc8e454cf87 Mon Sep 17 00:00:00 2001 From: Folkert de Vries Date: Wed, 15 Jan 2025 15:15:52 +0100 Subject: [PATCH 105/142] use indirect return for `i128` and `f128` on wasm32 --- compiler/rustc_target/src/callconv/wasm.rs | 12 ++++++ tests/codegen/f128-wasm32-callconv.rs | 49 ++++++++++++++++++++++ tests/codegen/i128-wasm32-callconv.rs | 49 ++++++++++++++++++++++ 3 files changed, 110 insertions(+) create mode 100644 tests/codegen/f128-wasm32-callconv.rs create mode 100644 tests/codegen/i128-wasm32-callconv.rs diff --git a/compiler/rustc_target/src/callconv/wasm.rs b/compiler/rustc_target/src/callconv/wasm.rs index 3c4cd76a75464..d01b59cbb032b 100644 --- a/compiler/rustc_target/src/callconv/wasm.rs +++ b/compiler/rustc_target/src/callconv/wasm.rs @@ -1,3 +1,5 @@ +use rustc_abi::{BackendRepr, Float, Integer, Primitive}; + use crate::abi::call::{ArgAbi, FnAbi}; use crate::abi::{HasDataLayout, TyAbiInterface}; @@ -27,6 +29,16 @@ where if ret.layout.is_aggregate() && !unwrap_trivial_aggregate(cx, ret) { ret.make_indirect(); } + + // `long double`, `__int128_t` and `__uint128_t` use an indirect return + if let BackendRepr::Scalar(scalar) = ret.layout.backend_repr { + match scalar.primitive() { + Primitive::Int(Integer::I128, _) | Primitive::Float(Float::F128) => { + ret.make_indirect(); + } + _ => {} + } + } } fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>) diff --git a/tests/codegen/f128-wasm32-callconv.rs b/tests/codegen/f128-wasm32-callconv.rs new file mode 100644 index 0000000000000..8b1b5e7fb013f --- /dev/null +++ b/tests/codegen/f128-wasm32-callconv.rs @@ -0,0 +1,49 @@ +//! Verify that Rust implements the expected calling convention for `f128` + +//@ add-core-stubs +//@ compile-flags: -O --target wasm32-wasip1 +//@ needs-llvm-components: webassembly + +#![crate_type = "lib"] +#![no_std] +#![no_core] +#![feature(no_core, lang_items, f128)] + +extern crate minicore; + +extern "C" { + fn extern_call(arg0: f128); + fn extern_ret() -> f128; +} + +#[no_mangle] +pub extern "C" fn pass(_arg0: u32, arg1: f128) { + // CHECK-LABEL: @pass( + // an f128 is passed via registers + // CHECK-SAME: fp128 noundef %arg1 + // CHECK: call void @extern_call + unsafe { extern_call(arg1) }; +} + +// Check that we produce the correct return ABI +#[no_mangle] +pub extern "C" fn ret(_arg0: u32, arg1: f128) -> f128 { + // CHECK-LABEL: @ret( + // but an f128 is returned via the stack + // CHECK-SAME: sret + // CHECK: store fp128 %arg1 + // CHECK-NEXT: ret void + arg1 +} + +// Check that we consume the correct return ABI +#[no_mangle] +pub extern "C" fn forward(dst: *mut f128) { + // CHECK-LABEL: @forward + // CHECK-SAME: ptr{{.*}} %dst) + // without optimizatons, an intermediate alloca is used + // CHECK: call void @extern_ret + // CHECK: store fp128 + // CHECK: ret void + unsafe { *dst = extern_ret() }; +} diff --git a/tests/codegen/i128-wasm32-callconv.rs b/tests/codegen/i128-wasm32-callconv.rs new file mode 100644 index 0000000000000..c6d25fbe8bea2 --- /dev/null +++ b/tests/codegen/i128-wasm32-callconv.rs @@ -0,0 +1,49 @@ +//! Verify that Rust implements the expected calling convention for `i128`/`u128`. + +//@ add-core-stubs +//@ compile-flags: -O --target wasm32-wasip1 +//@ needs-llvm-components: webassembly + +#![crate_type = "lib"] +#![no_std] +#![no_core] +#![feature(no_core, lang_items)] + +extern crate minicore; + +extern "C" { + fn extern_call(arg0: i128); + fn extern_ret() -> i128; +} + +#[no_mangle] +pub extern "C" fn pass(_arg0: u32, arg1: i128) { + // CHECK-LABEL: @pass( + // an i128 is passed via registers + // CHECK-SAME: i128 noundef %arg1 + // CHECK: call void @extern_call + unsafe { extern_call(arg1) }; +} + +// Check that we produce the correct return ABI +#[no_mangle] +pub extern "C" fn ret(_arg0: u32, arg1: i128) -> i128 { + // CHECK-LABEL: @ret( + // but an i128 is returned via the stack + // CHECK-SAME: sret + // CHECK: store i128 %arg1 + // CHECK-NEXT: ret void + arg1 +} + +// Check that we consume the correct return ABI +#[no_mangle] +pub extern "C" fn forward(dst: *mut i128) { + // CHECK-LABEL: @forward + // CHECK-SAME: ptr{{.*}} %dst) + // without optimizatons, an intermediate alloca is used + // CHECK: call void @extern_ret + // CHECK: store i128 + // CHECK: ret void + unsafe { *dst = extern_ret() }; +} From efe888871c0b49ca9230649ea54b4d0f0bc62d25 Mon Sep 17 00:00:00 2001 From: Jiahao XU Date: Fri, 17 Jan 2025 01:30:05 +1100 Subject: [PATCH 106/142] Move `std::pipe::*` into `std::io` Signed-off-by: Jiahao XU --- library/std/src/io/mod.rs | 249 +++++++++++++++++ library/std/src/io/tests.rs | 17 ++ library/std/src/lib.rs | 2 - library/std/src/pipe.rs | 258 ------------------ library/std/src/pipe/tests.rs | 19 -- library/std/src/sys/anonymous_pipe/unix.rs | 3 +- .../std/src/sys/anonymous_pipe/unsupported.rs | 3 +- library/std/src/sys/anonymous_pipe/windows.rs | 4 +- library/std/tests/pipe_subprocess.rs | 3 +- 9 files changed, 271 insertions(+), 287 deletions(-) delete mode 100644 library/std/src/pipe.rs delete mode 100644 library/std/src/pipe/tests.rs diff --git a/library/std/src/io/mod.rs b/library/std/src/io/mod.rs index 7912f969bbd9f..231c8712ebd55 100644 --- a/library/std/src/io/mod.rs +++ b/library/std/src/io/mod.rs @@ -330,6 +330,7 @@ pub use self::{ }; use crate::mem::take; use crate::ops::{Deref, DerefMut}; +use crate::sys::anonymous_pipe::{AnonPipe, pipe as pipe_inner}; use crate::{cmp, fmt, slice, str, sys}; mod buffered; @@ -3250,3 +3251,251 @@ impl Iterator for Lines { } } } + +/// Create anonymous pipe that is close-on-exec and blocking. +/// +/// # Behavior +/// +/// A pipe is a synchronous, unidirectional data channel between two or more processes, like an +/// interprocess [`mpsc`](crate::sync::mpsc) provided by the OS. In particular: +/// +/// * A read on a [`PipeReader`] blocks until the pipe is non-empty. +/// * A write on a [`PipeWriter`] blocks when the pipe is full. +/// * When all copies of a [`PipeWriter`] are closed, a read on the corresponding [`PipeReader`] +/// returns EOF. +/// * [`PipeReader`] can be shared, but only one process will consume the data in the pipe. +/// +/// # Capacity +/// +/// Pipe capacity is platform dependent. To quote the Linux [man page]: +/// +/// > Different implementations have different limits for the pipe capacity. Applications should +/// > not rely on a particular capacity: an application should be designed so that a reading process +/// > consumes data as soon as it is available, so that a writing process does not remain blocked. +/// +/// # Examples +/// +/// ```no_run +/// #![feature(anonymous_pipe)] +/// # #[cfg(miri)] fn main() {} +/// # #[cfg(not(miri))] +/// # fn main() -> std::io::Result<()> { +/// # use std::process::Command; +/// # use std::io::{Read, Write}; +/// let (ping_rx, mut ping_tx) = std::io::pipe()?; +/// let (mut pong_rx, pong_tx) = std::io::pipe()?; +/// +/// // Spawn a process that echoes its input. +/// let mut echo_server = Command::new("cat").stdin(ping_rx).stdout(pong_tx).spawn()?; +/// +/// ping_tx.write_all(b"hello")?; +/// // Close to unblock echo_server's reader. +/// drop(ping_tx); +/// +/// let mut buf = String::new(); +/// // Block until echo_server's writer is closed. +/// pong_rx.read_to_string(&mut buf)?; +/// assert_eq!(&buf, "hello"); +/// +/// echo_server.wait()?; +/// # Ok(()) +/// # } +/// ``` +/// [pipe]: https://man7.org/linux/man-pages/man2/pipe.2.html +/// [CreatePipe]: https://learn.microsoft.com/en-us/windows/win32/api/namedpipeapi/nf-namedpipeapi-createpipe +/// [man page]: https://man7.org/linux/man-pages/man7/pipe.7.html +#[unstable(feature = "anonymous_pipe", issue = "127154")] +#[inline] +pub fn pipe() -> Result<(PipeReader, PipeWriter)> { + pipe_inner().map(|(reader, writer)| (PipeReader(reader), PipeWriter(writer))) +} + +/// Read end of the anonymous pipe. +#[unstable(feature = "anonymous_pipe", issue = "127154")] +#[derive(Debug)] +pub struct PipeReader(pub(crate) AnonPipe); + +/// Write end of the anonymous pipe. +#[unstable(feature = "anonymous_pipe", issue = "127154")] +#[derive(Debug)] +pub struct PipeWriter(pub(crate) AnonPipe); + +impl PipeReader { + /// Create a new [`PipeReader`] instance that shares the same underlying file description. + /// + /// # Examples + /// + /// ```no_run + /// #![feature(anonymous_pipe)] + /// # #[cfg(miri)] fn main() {} + /// # #[cfg(not(miri))] + /// # fn main() -> std::io::Result<()> { + /// # use std::fs; + /// # use std::io::Write; + /// # use std::process::Command; + /// const NUM_SLOT: u8 = 2; + /// const NUM_PROC: u8 = 5; + /// const OUTPUT: &str = "work.txt"; + /// + /// let mut jobs = vec![]; + /// let (reader, mut writer) = std::io::pipe()?; + /// + /// // Write NUM_SLOT characters the pipe. + /// writer.write_all(&[b'|'; NUM_SLOT as usize])?; + /// + /// // Spawn several processes that read a character from the pipe, do some work, then + /// // write back to the pipe. When the pipe is empty, the processes block, so only + /// // NUM_SLOT processes can be working at any given time. + /// for _ in 0..NUM_PROC { + /// jobs.push( + /// Command::new("bash") + /// .args(["-c", + /// &format!( + /// "read -n 1\n\ + /// echo -n 'x' >> '{OUTPUT}'\n\ + /// echo -n '|'", + /// ), + /// ]) + /// .stdin(reader.try_clone()?) + /// .stdout(writer.try_clone()?) + /// .spawn()?, + /// ); + /// } + /// + /// // Wait for all jobs to finish. + /// for mut job in jobs { + /// job.wait()?; + /// } + /// + /// // Check our work and clean up. + /// let xs = fs::read_to_string(OUTPUT)?; + /// fs::remove_file(OUTPUT)?; + /// assert_eq!(xs, "x".repeat(NUM_PROC.into())); + /// # Ok(()) + /// # } + /// ``` + #[unstable(feature = "anonymous_pipe", issue = "127154")] + pub fn try_clone(&self) -> Result { + self.0.try_clone().map(Self) + } +} + +impl PipeWriter { + /// Create a new [`PipeWriter`] instance that shares the same underlying file description. + /// + /// # Examples + /// + /// ```no_run + /// #![feature(anonymous_pipe)] + /// # #[cfg(miri)] fn main() {} + /// # #[cfg(not(miri))] + /// # fn main() -> std::io::Result<()> { + /// # use std::process::Command; + /// # use std::io::Read; + /// let (mut reader, writer) = std::io::pipe()?; + /// + /// // Spawn a process that writes to stdout and stderr. + /// let mut peer = Command::new("bash") + /// .args([ + /// "-c", + /// "echo -n foo\n\ + /// echo -n bar >&2" + /// ]) + /// .stdout(writer.try_clone()?) + /// .stderr(writer) + /// .spawn()?; + /// + /// // Read and check the result. + /// let mut msg = String::new(); + /// reader.read_to_string(&mut msg)?; + /// assert_eq!(&msg, "foobar"); + /// + /// peer.wait()?; + /// # Ok(()) + /// # } + /// ``` + #[unstable(feature = "anonymous_pipe", issue = "127154")] + pub fn try_clone(&self) -> Result { + self.0.try_clone().map(Self) + } +} + +#[unstable(feature = "anonymous_pipe", issue = "127154")] +impl Read for &PipeReader { + fn read(&mut self, buf: &mut [u8]) -> Result { + self.0.read(buf) + } + fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> Result { + self.0.read_vectored(bufs) + } + #[inline] + fn is_read_vectored(&self) -> bool { + self.0.is_read_vectored() + } + fn read_to_end(&mut self, buf: &mut Vec) -> Result { + self.0.read_to_end(buf) + } + fn read_buf(&mut self, buf: BorrowedCursor<'_>) -> Result<()> { + self.0.read_buf(buf) + } +} + +#[unstable(feature = "anonymous_pipe", issue = "127154")] +impl Read for PipeReader { + fn read(&mut self, buf: &mut [u8]) -> Result { + self.0.read(buf) + } + fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> Result { + self.0.read_vectored(bufs) + } + #[inline] + fn is_read_vectored(&self) -> bool { + self.0.is_read_vectored() + } + fn read_to_end(&mut self, buf: &mut Vec) -> Result { + self.0.read_to_end(buf) + } + fn read_buf(&mut self, buf: BorrowedCursor<'_>) -> Result<()> { + self.0.read_buf(buf) + } +} + +#[unstable(feature = "anonymous_pipe", issue = "127154")] +impl Write for &PipeWriter { + fn write(&mut self, buf: &[u8]) -> Result { + self.0.write(buf) + } + #[inline] + fn flush(&mut self) -> Result<()> { + Ok(()) + } + + fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> Result { + self.0.write_vectored(bufs) + } + + #[inline] + fn is_write_vectored(&self) -> bool { + self.0.is_write_vectored() + } +} + +#[unstable(feature = "anonymous_pipe", issue = "127154")] +impl Write for PipeWriter { + fn write(&mut self, buf: &[u8]) -> Result { + self.0.write(buf) + } + #[inline] + fn flush(&mut self) -> Result<()> { + Ok(()) + } + + fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> Result { + self.0.write_vectored(bufs) + } + + #[inline] + fn is_write_vectored(&self) -> bool { + self.0.is_write_vectored() + } +} diff --git a/library/std/src/io/tests.rs b/library/std/src/io/tests.rs index 47cbb9614afdd..85098b3bb181f 100644 --- a/library/std/src/io/tests.rs +++ b/library/std/src/io/tests.rs @@ -823,3 +823,20 @@ fn try_oom_error() { let io_err = io::Error::from(reserve_err); assert_eq!(io::ErrorKind::OutOfMemory, io_err.kind()); } + +#[test] +#[cfg(all(windows, unix, not(miri)))] +fn pipe_creation_clone_and_rw() { + let (rx, tx) = std::io::pipe().unwrap(); + + tx.try_clone().unwrap().write_all(b"12345").unwrap(); + drop(tx); + + let mut rx2 = rx.try_clone().unwrap(); + drop(rx); + + let mut s = String::new(); + rx2.read_to_string(&mut s).unwrap(); + drop(rx2); + assert_eq!(s, "12345"); +} diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 5c12236617c98..38d7ecc736392 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -596,8 +596,6 @@ pub mod panic; #[unstable(feature = "pattern_type_macro", issue = "123646")] pub mod pat; pub mod path; -#[unstable(feature = "anonymous_pipe", issue = "127154")] -pub mod pipe; pub mod process; #[unstable(feature = "random", issue = "130703")] pub mod random; diff --git a/library/std/src/pipe.rs b/library/std/src/pipe.rs deleted file mode 100644 index 913c22588a76c..0000000000000 --- a/library/std/src/pipe.rs +++ /dev/null @@ -1,258 +0,0 @@ -//! A cross-platform anonymous pipe. -//! -//! This module provides support for anonymous OS pipes, like [pipe] on Linux or [CreatePipe] on -//! Windows. -//! -//! # Behavior -//! -//! A pipe is a synchronous, unidirectional data channel between two or more processes, like an -//! interprocess [`mpsc`](crate::sync::mpsc) provided by the OS. In particular: -//! -//! * A read on a [`PipeReader`] blocks until the pipe is non-empty. -//! * A write on a [`PipeWriter`] blocks when the pipe is full. -//! * When all copies of a [`PipeWriter`] are closed, a read on the corresponding [`PipeReader`] -//! returns EOF. -//! * [`PipeReader`] can be shared, but only one process will consume the data in the pipe. -//! -//! # Capacity -//! -//! Pipe capacity is platform dependent. To quote the Linux [man page]: -//! -//! > Different implementations have different limits for the pipe capacity. Applications should -//! > not rely on a particular capacity: an application should be designed so that a reading process -//! > consumes data as soon as it is available, so that a writing process does not remain blocked. -//! -//! # Examples -//! -//! ```no_run -//! #![feature(anonymous_pipe)] -//! # #[cfg(miri)] fn main() {} -//! # #[cfg(not(miri))] -//! # fn main() -> std::io::Result<()> { -//! # use std::process::Command; -//! # use std::io::{Read, Write}; -//! let (ping_rx, mut ping_tx) = std::pipe::pipe()?; -//! let (mut pong_rx, pong_tx) = std::pipe::pipe()?; -//! -//! // Spawn a process that echoes its input. -//! let mut echo_server = Command::new("cat").stdin(ping_rx).stdout(pong_tx).spawn()?; -//! -//! ping_tx.write_all(b"hello")?; -//! // Close to unblock echo_server's reader. -//! drop(ping_tx); -//! -//! let mut buf = String::new(); -//! // Block until echo_server's writer is closed. -//! pong_rx.read_to_string(&mut buf)?; -//! assert_eq!(&buf, "hello"); -//! -//! echo_server.wait()?; -//! # Ok(()) -//! # } -//! ``` -//! [pipe]: https://man7.org/linux/man-pages/man2/pipe.2.html -//! [CreatePipe]: https://learn.microsoft.com/en-us/windows/win32/api/namedpipeapi/nf-namedpipeapi-createpipe -//! [man page]: https://man7.org/linux/man-pages/man7/pipe.7.html -use crate::io; -use crate::sys::anonymous_pipe::{AnonPipe, pipe as pipe_inner}; - -/// Create anonymous pipe that is close-on-exec and blocking. -/// -/// # Examples -/// -/// See the [module-level](crate::pipe) documentation for examples. -#[unstable(feature = "anonymous_pipe", issue = "127154")] -#[inline] -pub fn pipe() -> io::Result<(PipeReader, PipeWriter)> { - pipe_inner().map(|(reader, writer)| (PipeReader(reader), PipeWriter(writer))) -} - -/// Read end of the anonymous pipe. -#[unstable(feature = "anonymous_pipe", issue = "127154")] -#[derive(Debug)] -pub struct PipeReader(pub(crate) AnonPipe); - -/// Write end of the anonymous pipe. -#[unstable(feature = "anonymous_pipe", issue = "127154")] -#[derive(Debug)] -pub struct PipeWriter(pub(crate) AnonPipe); - -impl PipeReader { - /// Create a new [`PipeReader`] instance that shares the same underlying file description. - /// - /// # Examples - /// - /// ```no_run - /// #![feature(anonymous_pipe)] - /// # #[cfg(miri)] fn main() {} - /// # #[cfg(not(miri))] - /// # fn main() -> std::io::Result<()> { - /// # use std::fs; - /// # use std::io::Write; - /// # use std::process::Command; - /// const NUM_SLOT: u8 = 2; - /// const NUM_PROC: u8 = 5; - /// const OUTPUT: &str = "work.txt"; - /// - /// let mut jobs = vec![]; - /// let (reader, mut writer) = std::pipe::pipe()?; - /// - /// // Write NUM_SLOT characters the pipe. - /// writer.write_all(&[b'|'; NUM_SLOT as usize])?; - /// - /// // Spawn several processes that read a character from the pipe, do some work, then - /// // write back to the pipe. When the pipe is empty, the processes block, so only - /// // NUM_SLOT processes can be working at any given time. - /// for _ in 0..NUM_PROC { - /// jobs.push( - /// Command::new("bash") - /// .args(["-c", - /// &format!( - /// "read -n 1\n\ - /// echo -n 'x' >> '{OUTPUT}'\n\ - /// echo -n '|'", - /// ), - /// ]) - /// .stdin(reader.try_clone()?) - /// .stdout(writer.try_clone()?) - /// .spawn()?, - /// ); - /// } - /// - /// // Wait for all jobs to finish. - /// for mut job in jobs { - /// job.wait()?; - /// } - /// - /// // Check our work and clean up. - /// let xs = fs::read_to_string(OUTPUT)?; - /// fs::remove_file(OUTPUT)?; - /// assert_eq!(xs, "x".repeat(NUM_PROC.into())); - /// # Ok(()) - /// # } - /// ``` - #[unstable(feature = "anonymous_pipe", issue = "127154")] - pub fn try_clone(&self) -> io::Result { - self.0.try_clone().map(Self) - } -} - -impl PipeWriter { - /// Create a new [`PipeWriter`] instance that shares the same underlying file description. - /// - /// # Examples - /// - /// ```no_run - /// #![feature(anonymous_pipe)] - /// # #[cfg(miri)] fn main() {} - /// # #[cfg(not(miri))] - /// # fn main() -> std::io::Result<()> { - /// # use std::process::Command; - /// # use std::io::Read; - /// let (mut reader, writer) = std::pipe::pipe()?; - /// - /// // Spawn a process that writes to stdout and stderr. - /// let mut peer = Command::new("bash") - /// .args([ - /// "-c", - /// "echo -n foo\n\ - /// echo -n bar >&2" - /// ]) - /// .stdout(writer.try_clone()?) - /// .stderr(writer) - /// .spawn()?; - /// - /// // Read and check the result. - /// let mut msg = String::new(); - /// reader.read_to_string(&mut msg)?; - /// assert_eq!(&msg, "foobar"); - /// - /// peer.wait()?; - /// # Ok(()) - /// # } - /// ``` - #[unstable(feature = "anonymous_pipe", issue = "127154")] - pub fn try_clone(&self) -> io::Result { - self.0.try_clone().map(Self) - } -} - -#[unstable(feature = "anonymous_pipe", issue = "127154")] -impl io::Read for &PipeReader { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - self.0.read(buf) - } - fn read_vectored(&mut self, bufs: &mut [io::IoSliceMut<'_>]) -> io::Result { - self.0.read_vectored(bufs) - } - #[inline] - fn is_read_vectored(&self) -> bool { - self.0.is_read_vectored() - } - fn read_to_end(&mut self, buf: &mut Vec) -> io::Result { - self.0.read_to_end(buf) - } - fn read_buf(&mut self, buf: io::BorrowedCursor<'_>) -> io::Result<()> { - self.0.read_buf(buf) - } -} - -#[unstable(feature = "anonymous_pipe", issue = "127154")] -impl io::Read for PipeReader { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - self.0.read(buf) - } - fn read_vectored(&mut self, bufs: &mut [io::IoSliceMut<'_>]) -> io::Result { - self.0.read_vectored(bufs) - } - #[inline] - fn is_read_vectored(&self) -> bool { - self.0.is_read_vectored() - } - fn read_to_end(&mut self, buf: &mut Vec) -> io::Result { - self.0.read_to_end(buf) - } - fn read_buf(&mut self, buf: io::BorrowedCursor<'_>) -> io::Result<()> { - self.0.read_buf(buf) - } -} - -#[unstable(feature = "anonymous_pipe", issue = "127154")] -impl io::Write for &PipeWriter { - fn write(&mut self, buf: &[u8]) -> io::Result { - self.0.write(buf) - } - #[inline] - fn flush(&mut self) -> io::Result<()> { - Ok(()) - } - - fn write_vectored(&mut self, bufs: &[io::IoSlice<'_>]) -> io::Result { - self.0.write_vectored(bufs) - } - - #[inline] - fn is_write_vectored(&self) -> bool { - self.0.is_write_vectored() - } -} - -#[unstable(feature = "anonymous_pipe", issue = "127154")] -impl io::Write for PipeWriter { - fn write(&mut self, buf: &[u8]) -> io::Result { - self.0.write(buf) - } - #[inline] - fn flush(&mut self) -> io::Result<()> { - Ok(()) - } - - fn write_vectored(&mut self, bufs: &[io::IoSlice<'_>]) -> io::Result { - self.0.write_vectored(bufs) - } - - #[inline] - fn is_write_vectored(&self) -> bool { - self.0.is_write_vectored() - } -} diff --git a/library/std/src/pipe/tests.rs b/library/std/src/pipe/tests.rs deleted file mode 100644 index 9c38e10678752..0000000000000 --- a/library/std/src/pipe/tests.rs +++ /dev/null @@ -1,19 +0,0 @@ -use crate::io::{Read, Write}; -use crate::pipe::pipe; - -#[test] -#[cfg(all(windows, unix, not(miri)))] -fn pipe_creation_clone_and_rw() { - let (rx, tx) = pipe().unwrap(); - - tx.try_clone().unwrap().write_all(b"12345").unwrap(); - drop(tx); - - let mut rx2 = rx.try_clone().unwrap(); - drop(rx); - - let mut s = String::new(); - rx2.read_to_string(&mut s).unwrap(); - drop(rx2); - assert_eq!(s, "12345"); -} diff --git a/library/std/src/sys/anonymous_pipe/unix.rs b/library/std/src/sys/anonymous_pipe/unix.rs index 9168024730e67..9e398765634b7 100644 --- a/library/std/src/sys/anonymous_pipe/unix.rs +++ b/library/std/src/sys/anonymous_pipe/unix.rs @@ -1,6 +1,5 @@ -use crate::io; +use crate::io::{self, PipeReader, PipeWriter}; use crate::os::fd::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd}; -use crate::pipe::{PipeReader, PipeWriter}; use crate::process::Stdio; use crate::sys::fd::FileDesc; use crate::sys::pipe::anon_pipe; diff --git a/library/std/src/sys/anonymous_pipe/unsupported.rs b/library/std/src/sys/anonymous_pipe/unsupported.rs index dd51e70315e96..4e79ac9c21aad 100644 --- a/library/std/src/sys/anonymous_pipe/unsupported.rs +++ b/library/std/src/sys/anonymous_pipe/unsupported.rs @@ -1,5 +1,4 @@ -use crate::io; -use crate::pipe::{PipeReader, PipeWriter}; +use crate::io::{self, PipeReader, PipeWriter}; use crate::process::Stdio; pub use crate::sys::pipe::AnonPipe; diff --git a/library/std/src/sys/anonymous_pipe/windows.rs b/library/std/src/sys/anonymous_pipe/windows.rs index a48198f8a812b..eb7fa8ec1c9a1 100644 --- a/library/std/src/sys/anonymous_pipe/windows.rs +++ b/library/std/src/sys/anonymous_pipe/windows.rs @@ -1,12 +1,12 @@ +use crate::io::{self, PipeReader, PipeWriter}; use crate::os::windows::io::{ AsHandle, AsRawHandle, BorrowedHandle, FromRawHandle, IntoRawHandle, OwnedHandle, RawHandle, }; -use crate::pipe::{PipeReader, PipeWriter}; use crate::process::Stdio; +use crate::ptr; use crate::sys::c; use crate::sys::handle::Handle; use crate::sys_common::{FromInner, IntoInner}; -use crate::{io, ptr}; pub type AnonPipe = Handle; diff --git a/library/std/tests/pipe_subprocess.rs b/library/std/tests/pipe_subprocess.rs index 1535742a83a21..df946cdcf2b70 100644 --- a/library/std/tests/pipe_subprocess.rs +++ b/library/std/tests/pipe_subprocess.rs @@ -3,8 +3,7 @@ fn main() { #[cfg(all(not(miri), any(unix, windows)))] { - use std::io::Read; - use std::pipe::pipe; + use std::io::{Read, pipe}; use std::{env, process}; if env::var("I_AM_THE_CHILD").is_ok() { From cde58dd5f781c3998d2421132854d2a833937e85 Mon Sep 17 00:00:00 2001 From: onur-ozkan Date: Thu, 16 Jan 2025 17:34:31 +0300 Subject: [PATCH 107/142] resolve symlinks of LLVM tool binaries before copying them There is a chance that these tools are being installed from an external LLVM and we have no control over them. If any of these tools use symlinks, they will fail during tarball distribution. This change makes copying process to resolve symlinks just before placing them into the destination path. Signed-off-by: onur-ozkan --- src/bootstrap/src/core/build_steps/compile.rs | 8 +++++++- src/bootstrap/src/lib.rs | 8 ++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index 4c4df922b3799..fd9bf47234c63 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -1777,7 +1777,13 @@ impl Step for Assemble { // When using `download-ci-llvm`, some of the tools // may not exist, so skip trying to copy them. if src_path.exists() { - builder.copy_link(&src_path, &libdir_bin.join(&tool_exe)); + // There is a chance that these tools are being installed from an external LLVM. + // Use `Builder::resolve_symlink_and_copy` instead of `Builder::copy_link` to ensure + // we are copying the original file not the symlinked path, which causes issues for + // tarball distribution. + // + // See https://github.com/rust-lang/rust/issues/135554. + builder.resolve_symlink_and_copy(&src_path, &libdir_bin.join(&tool_exe)); } } } diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index ebe2c332258d7..482e23cd04c31 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -1633,6 +1633,14 @@ Executed at: {executed_at}"#, paths } + /// Copies a file from `src` to `dst`. + /// + /// If `src` is a symlink, `src` will be resolved to the actual path + /// and copied to `dst` instead of the symlink itself. + pub fn resolve_symlink_and_copy(&self, src: &Path, dst: &Path) { + self.copy_link_internal(src, dst, true); + } + /// Links a file from `src` to `dst`. /// Attempts to use hard links if possible, falling back to copying. /// You can neither rely on this being a copy nor it being a link, From 1fcbb4dea7f8a7dfaa997748ee556858d1122a52 Mon Sep 17 00:00:00 2001 From: Jonathan Pallant Date: Thu, 16 Jan 2025 16:04:02 +0000 Subject: [PATCH 108/142] Add license-metadata.json to rustc-src tarball. --- src/bootstrap/src/core/build_steps/dist.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index b298f472e0dc7..afa409aa83506 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -1001,6 +1001,7 @@ impl Step for PlainSourceTarball { "README.md", "RELEASES.md", "REUSE.toml", + "license-metadata.json", "configure", "x.py", "config.example.toml", From 9397d133f683070863029552313b5e083398bf42 Mon Sep 17 00:00:00 2001 From: binarycat Date: Mon, 13 Jan 2025 15:31:15 -0600 Subject: [PATCH 109/142] Treat other items as functions for the purpose of type-based search constants and statics are nullary functions, and struct fields are unary functions. functions (along with methods and trait methods) are prioritized over other items, like fields and constants. --- .../rustdoc/src/read-documentation/search.md | 14 +++++++++- src/librustdoc/html/render/search_index.rs | 27 +++++++++++++++++++ src/librustdoc/html/static/js/search.js | 16 +++++++++++ tests/rustdoc-gui/search-tab.goml | 2 +- tests/rustdoc-js-std/const-is-nullary-func.js | 7 +++++ tests/rustdoc-js-std/field-is-unary-func.js | 7 +++++ 6 files changed, 71 insertions(+), 2 deletions(-) create mode 100644 tests/rustdoc-js-std/const-is-nullary-func.js create mode 100644 tests/rustdoc-js-std/field-is-unary-func.js diff --git a/src/doc/rustdoc/src/read-documentation/search.md b/src/doc/rustdoc/src/read-documentation/search.md index e06dcdb7ed2fb..bace2f5f95390 100644 --- a/src/doc/rustdoc/src/read-documentation/search.md +++ b/src/doc/rustdoc/src/read-documentation/search.md @@ -52,9 +52,10 @@ methods on the allocator or free functions. [`Layout`]: ../../alloc/index.html?search=Layout&filter-crate=alloc -## Searching By Type Signature for functions +## Searching By Type Signature If you know more specifically what the function you want to look at does, +or you want to know how to get from one type to another, Rustdoc can search by more than one type at once in the parameters and return value. Multiple parameters are separated by `,` commas, and the return value is written with after a `->` arrow. @@ -86,6 +87,17 @@ the standard library and functions that are included in the results list: [iterasslice]: ../../std/vec/struct.Vec.html?search=vec%3A%3Aintoiter%20->%20[T]&filter-crate=std [iterreduce]: ../../std/index.html?search=iterator%2C%20fnmut%20->%20T&filter-crate=std +### Non-functions in type-based search +Certain items that are not functions are treated as though they +were a semantically equivelent function. + +For example, struct fields are treated as though they were getter methods. +This means that a search for `CpuidResult -> u32` will show +the `CpuidResult::eax` field in the results. + +Additionally, `const` and `static` items are treated as nullary functions, +so `-> u32` will match `u32::MAX`. + ### How type-based search works In a complex type-based search, Rustdoc always treats every item's name as literal. diff --git a/src/librustdoc/html/render/search_index.rs b/src/librustdoc/html/render/search_index.rs index 66c52bec4bad7..e4a9a2b512e50 100644 --- a/src/librustdoc/html/render/search_index.rs +++ b/src/librustdoc/html/render/search_index.rs @@ -840,6 +840,22 @@ pub(crate) fn get_function_type_for_search( | clean::RequiredMethodItem(ref f) => { get_fn_inputs_and_outputs(f, tcx, impl_or_trait_generics, cache) } + clean::ConstantItem(ref c) => make_nullary_fn(&c.type_), + clean::StaticItem(ref s) => make_nullary_fn(&s.type_), + clean::StructFieldItem(ref t) => { + let Some(parent) = parent else { + return None; + }; + let mut rgen: FxIndexMap)> = + Default::default(); + let output = get_index_type(t, vec![], &mut rgen); + let input = RenderType { + id: Some(RenderTypeId::DefId(parent)), + generics: None, + bindings: None, + }; + (vec![input], vec![output], vec![], vec![]) + } _ => return None, }; @@ -1353,6 +1369,17 @@ fn simplify_fn_constraint<'a>( res.push((ty_constrained_assoc, ty_constraints)); } +/// Create a fake nullary function. +/// +/// Used to allow type-based search on constants and statics. +fn make_nullary_fn( + clean_type: &clean::Type, +) -> (Vec, Vec, Vec, Vec>) { + let mut rgen: FxIndexMap)> = Default::default(); + let output = get_index_type(clean_type, vec![], &mut rgen); + (vec![], vec![output], vec![], vec![]) +} + /// Return the full list of types when bounds have been resolved. /// /// i.e. `fn foo>(x: u32, y: B)` will return diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js index 0a0550ab82f9b..660484c133c36 100644 --- a/src/librustdoc/html/static/js/search.js +++ b/src/librustdoc/html/static/js/search.js @@ -63,6 +63,9 @@ const TY_PRIMITIVE = itemTypes.indexOf("primitive"); const TY_GENERIC = itemTypes.indexOf("generic"); const TY_IMPORT = itemTypes.indexOf("import"); const TY_TRAIT = itemTypes.indexOf("trait"); +const TY_FN = itemTypes.indexOf("fn"); +const TY_METHOD = itemTypes.indexOf("method"); +const TY_TYMETHOD = itemTypes.indexOf("tymethod"); const ROOT_PATH = typeof window !== "undefined" ? window.rootPath : "../"; // Hard limit on how deep to recurse into generics when doing type-driven search. @@ -191,6 +194,10 @@ function isEndCharacter(c) { return "=,>-])".indexOf(c) !== -1; } +function isFnLikeTy(ty) { + return ty === TY_FN || ty === TY_METHOD || ty === TY_TYMETHOD; +} + /** * Returns `true` if the given `c` character is a separator. * @@ -2766,6 +2773,15 @@ class DocSearch { return a - b; } + // in type based search, put functions first + if (parsedQuery.hasReturnArrow) { + a = !isFnLikeTy(aaa.item.ty); + b = !isFnLikeTy(bbb.item.ty); + if (a !== b) { + return a - b; + } + } + // Sort by distance in the path part, if specified // (less changes required to match means higher rankings) a = aaa.path_dist; diff --git a/tests/rustdoc-gui/search-tab.goml b/tests/rustdoc-gui/search-tab.goml index 3879c127fd0f8..eea561e0c6760 100644 --- a/tests/rustdoc-gui/search-tab.goml +++ b/tests/rustdoc-gui/search-tab.goml @@ -79,7 +79,7 @@ set-window-size: (851, 600) // Check the size and count in tabs assert-text: ("#search-tabs > button:nth-child(1) > .count", " (26) ") -assert-text: ("#search-tabs > button:nth-child(2) > .count", " (6)  ") +assert-text: ("#search-tabs > button:nth-child(2) > .count", " (7)  ") assert-text: ("#search-tabs > button:nth-child(3) > .count", " (0)  ") store-property: ("#search-tabs > button:nth-child(1)", {"offsetWidth": buttonWidth}) assert-property: ("#search-tabs > button:nth-child(2)", {"offsetWidth": |buttonWidth|}) diff --git a/tests/rustdoc-js-std/const-is-nullary-func.js b/tests/rustdoc-js-std/const-is-nullary-func.js new file mode 100644 index 0000000000000..e929741b038f4 --- /dev/null +++ b/tests/rustdoc-js-std/const-is-nullary-func.js @@ -0,0 +1,7 @@ +const EXPECTED = { + 'query': '-> char', + 'others': [ + { 'path': 'std::char', 'name': 'from_digit' }, + { 'path': 'std::char', 'name': 'MAX' }, + ], +} diff --git a/tests/rustdoc-js-std/field-is-unary-func.js b/tests/rustdoc-js-std/field-is-unary-func.js new file mode 100644 index 0000000000000..09ce8a0dde0a4 --- /dev/null +++ b/tests/rustdoc-js-std/field-is-unary-func.js @@ -0,0 +1,7 @@ +const EXPECTED = { + // one of the only non-generic structs with public fields + 'query': 'CpuidResult -> u32', + 'others': [ + { 'path': 'core::arch::x86::CpuidResult', 'name': 'eax' }, + ], +} From 08c1256dc27e4af07940440252e6356d5e39c8d8 Mon Sep 17 00:00:00 2001 From: binarycat Date: Thu, 16 Jan 2025 12:10:38 -0600 Subject: [PATCH 110/142] fix error for when results in a rustdoc-js test are in the wrong order see COMPILETEST_FORCE_STAGE0=1 --- src/tools/rustdoc-js/tester.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/rustdoc-js/tester.js b/src/tools/rustdoc-js/tester.js index 7aa5e102e6d2a..ea575f27799a9 100644 --- a/src/tools/rustdoc-js/tester.js +++ b/src/tools/rustdoc-js/tester.js @@ -256,7 +256,7 @@ async function runSearch(query, expected, doSearch, loadedFile, queryName) { JSON.stringify(results[key][index]) + "'"); } else if (ignore_order === false && entry_pos < prev_pos) { error_text.push(queryName + "==> '" + JSON.stringify(elem) + "' was supposed " + - "to be before '" + JSON.stringify(results[key][entry_pos]) + "'"); + "to be before '" + JSON.stringify(results[key][prev_pos]) + "'"); } else { prev_pos = entry_pos; } From 8d59545daa879365e8bab002b1c7f8a09c62d41d Mon Sep 17 00:00:00 2001 From: Samuel Tardieu Date: Wed, 15 Jan 2025 20:50:51 +0100 Subject: [PATCH 111/142] Fix suggestion to convert dereference of raw pointer to ref --- .../src/fn_ctxt/suggestions.rs | 1 + tests/ui/suggestions/raw-to-ref.fixed | 19 ++++++++++++++ tests/ui/suggestions/raw-to-ref.rs | 19 ++++++++++++++ tests/ui/suggestions/raw-to-ref.stderr | 25 +++++++++++++++++++ 4 files changed, 64 insertions(+) create mode 100644 tests/ui/suggestions/raw-to-ref.fixed create mode 100644 tests/ui/suggestions/raw-to-ref.rs create mode 100644 tests/ui/suggestions/raw-to-ref.stderr diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index 39511ca30e0cf..53e055fdeef44 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -2682,6 +2682,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let hir::ExprKind::Unary(hir::UnOp::Deref, inner) = expr.kind && let Some(1) = self.deref_steps_for_suggestion(expected, checked_ty) + && self.typeck_results.borrow().expr_ty(inner).is_ref() { // We have `*&T`, check if what was expected was `&T`. // If so, we may want to suggest removing a `*`. diff --git a/tests/ui/suggestions/raw-to-ref.fixed b/tests/ui/suggestions/raw-to-ref.fixed new file mode 100644 index 0000000000000..17d61e67e1f61 --- /dev/null +++ b/tests/ui/suggestions/raw-to-ref.fixed @@ -0,0 +1,19 @@ +//@ run-rustfix +// Regression test for #135580: check that we do not suggest to simply drop +// the `*` to make the types match when the source is a raw pointer while +// the target type is a reference. + +struct S; + +fn main() { + let mut s = S; + let x = &raw const s; + let _: &S = unsafe { &*x }; + //~^ ERROR mismatched types + //~| HELP consider borrowing here + + let x = &raw mut s; + let _: &mut S = unsafe { &mut *x }; + //~^ ERROR mismatched types + //~| HELP consider mutably borrowing here +} diff --git a/tests/ui/suggestions/raw-to-ref.rs b/tests/ui/suggestions/raw-to-ref.rs new file mode 100644 index 0000000000000..2be8f881b5c3a --- /dev/null +++ b/tests/ui/suggestions/raw-to-ref.rs @@ -0,0 +1,19 @@ +//@ run-rustfix +// Regression test for #135580: check that we do not suggest to simply drop +// the `*` to make the types match when the source is a raw pointer while +// the target type is a reference. + +struct S; + +fn main() { + let mut s = S; + let x = &raw const s; + let _: &S = unsafe { *x }; + //~^ ERROR mismatched types + //~| HELP consider borrowing here + + let x = &raw mut s; + let _: &mut S = unsafe { *x }; + //~^ ERROR mismatched types + //~| HELP consider mutably borrowing here +} diff --git a/tests/ui/suggestions/raw-to-ref.stderr b/tests/ui/suggestions/raw-to-ref.stderr new file mode 100644 index 0000000000000..ca358d268f018 --- /dev/null +++ b/tests/ui/suggestions/raw-to-ref.stderr @@ -0,0 +1,25 @@ +error[E0308]: mismatched types + --> $DIR/raw-to-ref.rs:11:26 + | +LL | let _: &S = unsafe { *x }; + | ^^ expected `&S`, found `S` + | +help: consider borrowing here + | +LL | let _: &S = unsafe { &*x }; + | + + +error[E0308]: mismatched types + --> $DIR/raw-to-ref.rs:16:30 + | +LL | let _: &mut S = unsafe { *x }; + | ^^ expected `&mut S`, found `S` + | +help: consider mutably borrowing here + | +LL | let _: &mut S = unsafe { &mut *x }; + | ++++ + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0308`. From 9bdc65866ced6531e988674324ffc72af817c880 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Thu, 16 Jan 2025 23:53:49 +0000 Subject: [PATCH 112/142] Expand docs for `E0207` with additional example --- .../src/error_codes/E0207.md | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/compiler/rustc_error_codes/src/error_codes/E0207.md b/compiler/rustc_error_codes/src/error_codes/E0207.md index 95e7c9fc76ce2..5b35748f4723c 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0207.md +++ b/compiler/rustc_error_codes/src/error_codes/E0207.md @@ -195,6 +195,30 @@ impl<'a> Contains for Foo { Please note that unconstrained lifetime parameters are not supported if they are being used by an associated type. +In cases where the associated type's lifetime is meant to be tied to the the +self type, and none of the methods on the trait need ownership or different +mutability, then an option is to implement the trait on a borrowed type: + +```rust +struct Foo(i32); + +trait Contents { + type Item; + + fn get(&self) -> Self::Item; +} + +// Note the lifetime `'a` is used both for the self type... +impl<'a> Contents for &'a Foo { + // ...and the associated type. + type Item = &'a i32; + + fn get(&self) -> Self::Item { + &self.0 + } +} +``` + ### Additional information For more information, please see [RFC 447]. From 097cb1a9559f9e1cd7d4a41f5f214070a678bbc2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=AE=B8=E6=9D=B0=E5=8F=8B=20Jieyou=20Xu=20=28Joe=29?= <39484203+jieyouxu@users.noreply.github.com> Date: Fri, 17 Jan 2025 08:07:25 +0800 Subject: [PATCH 113/142] compiletest: require `COMPILETEST_FORCE_STAGE0` for `./x test rustdoc-js --stage 0` --- src/bootstrap/src/core/build_steps/test.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index fdc1f30b8597a..9f3e4d9cc8995 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -1638,10 +1638,7 @@ impl Step for Compiletest { return; } - if builder.top_stage == 0 - && env::var("COMPILETEST_FORCE_STAGE0").is_err() - && self.mode != "js-doc-test" - { + if builder.top_stage == 0 && env::var("COMPILETEST_FORCE_STAGE0").is_err() { eprintln!("\ ERROR: `--stage 0` runs compiletest on the beta compiler, not your local changes, and will almost always cause tests to fail HELP: to test the compiler, use `--stage 1` instead From 865a09d50a6eb5bb079310f0777e4e7afd1c676d Mon Sep 17 00:00:00 2001 From: yukang Date: Fri, 17 Jan 2025 13:24:09 +0800 Subject: [PATCH 114/142] remove unnecessary assertion for reference error --- compiler/rustc_hir_analysis/src/check/check.rs | 1 - tests/crashes/135341.rs | 5 ----- tests/ui/wf/ice-hir-wf-issue-135341.rs | 4 ++++ tests/ui/wf/ice-hir-wf-issue-135341.stderr | 9 +++++++++ 4 files changed, 13 insertions(+), 6 deletions(-) delete mode 100644 tests/crashes/135341.rs create mode 100644 tests/ui/wf/ice-hir-wf-issue-135341.rs create mode 100644 tests/ui/wf/ice-hir-wf-issue-135341.stderr diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index ec82644ea5ba4..b0a6922ff72bb 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -1637,7 +1637,6 @@ fn check_type_alias_type_params_are_used<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalD let ty = tcx.type_of(def_id).instantiate_identity(); if ty.references_error() { // If there is already another error, do not emit an error for not using a type parameter. - assert!(tcx.dcx().has_errors().is_some()); return; } diff --git a/tests/crashes/135341.rs b/tests/crashes/135341.rs deleted file mode 100644 index 0e33242a5f5a0..0000000000000 --- a/tests/crashes/135341.rs +++ /dev/null @@ -1,5 +0,0 @@ -//@ known-bug: #135341 -type A = B; -type B = _; - -pub fn main() {} diff --git a/tests/ui/wf/ice-hir-wf-issue-135341.rs b/tests/ui/wf/ice-hir-wf-issue-135341.rs new file mode 100644 index 0000000000000..7428575aee0db --- /dev/null +++ b/tests/ui/wf/ice-hir-wf-issue-135341.rs @@ -0,0 +1,4 @@ +type A = B; +type B = _; //~ ERROR the placeholder `_` is not allowed within types on item signatures for type aliases + +fn main() {} diff --git a/tests/ui/wf/ice-hir-wf-issue-135341.stderr b/tests/ui/wf/ice-hir-wf-issue-135341.stderr new file mode 100644 index 0000000000000..c568129cf56d0 --- /dev/null +++ b/tests/ui/wf/ice-hir-wf-issue-135341.stderr @@ -0,0 +1,9 @@ +error[E0121]: the placeholder `_` is not allowed within types on item signatures for type aliases + --> $DIR/ice-hir-wf-issue-135341.rs:2:10 + | +LL | type B = _; + | ^ not allowed in type signatures + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0121`. From 7874ba2f401a7fe587178a4641ebe59781f7b463 Mon Sep 17 00:00:00 2001 From: onur-ozkan Date: Fri, 17 Jan 2025 11:20:13 +0300 Subject: [PATCH 115/142] add tidy check on dist src files order Signed-off-by: onur-ozkan --- src/bootstrap/src/core/build_steps/dist.rs | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index afa409aa83506..a2c792ea2da4e 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -994,20 +994,22 @@ impl Step for PlainSourceTarball { // This is the set of root paths which will become part of the source package let src_files = [ + // tidy-alphabetical-start + ".gitmodules", + "Cargo.lock", + "Cargo.toml", + "config.example.toml", + "configure", + "CONTRIBUTING.md", "COPYRIGHT", "LICENSE-APACHE", + "license-metadata.json", "LICENSE-MIT", - "CONTRIBUTING.md", "README.md", "RELEASES.md", "REUSE.toml", - "license-metadata.json", - "configure", "x.py", - "config.example.toml", - "Cargo.toml", - "Cargo.lock", - ".gitmodules", + // tidy-alphabetical-end ]; let src_dirs = ["src", "compiler", "library", "tests", "LICENSES"]; From de89ffc34181ea5cc49fe7d2328c6ad85df002af Mon Sep 17 00:00:00 2001 From: MarcoIeni <11428655+MarcoIeni@users.noreply.github.com> Date: Fri, 17 Jan 2025 09:22:10 +0100 Subject: [PATCH 116/142] CI: split i686-msvc job to two free runners --- src/bootstrap/mk/Makefile.in | 13 +++++++++++-- src/ci/github-actions/jobs.yml | 13 ++++++++++--- 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/src/bootstrap/mk/Makefile.in b/src/bootstrap/mk/Makefile.in index a1f38b9ac147f..082b7431440ad 100644 --- a/src/bootstrap/mk/Makefile.in +++ b/src/bootstrap/mk/Makefile.in @@ -97,6 +97,11 @@ tidy: prepare: $(Q)$(BOOTSTRAP) build --stage 2 --dry-run +# Set of tests that represent around half of the time of the test suite. +# Used to split tests across multiple CI runners. +STAGE_2_TEST_SET1 := test --stage 2 --skip=compiler --skip=src +STAGE_2_TEST_SET2 := test --stage 2 --skip=tests --skip=coverage-map --skip=coverage-run --skip=library --skip=tidyselftest + ## MSVC native builders # this intentionally doesn't use `$(BOOTSTRAP)` so we can test the shebang on Windows @@ -105,6 +110,10 @@ ci-msvc-py: ci-msvc-ps1: $(Q)$(CFG_SRC_DIR)/x.ps1 test --stage 2 --skip tidy ci-msvc: ci-msvc-py ci-msvc-ps1 +ci-msvc-py-set1: + $(Q)$(CFG_SRC_DIR)/x.py $(STAGE_2_TEST_SET1) +ci-msvc-ps1-set2: + $(Q)$(CFG_SRC_DIR)/x.ps1 $(STAGE_2_TEST_SET2) ## MingW native builders @@ -112,9 +121,9 @@ ci-msvc: ci-msvc-py ci-msvc-ps1 # Used to split tests across multiple CI runners. # Test both x and bootstrap entrypoints. ci-mingw-x: - $(Q)$(CFG_SRC_DIR)/x test --stage 2 --skip=compiler --skip=src + $(Q)$(CFG_SRC_DIR)/x $(STAGE_2_TEST_SET1) ci-mingw-bootstrap: - $(Q)$(BOOTSTRAP) test --stage 2 --skip=tests --skip=coverage-map --skip=coverage-run --skip=library --skip=tidyselftest + $(Q)$(BOOTSTRAP) $(STAGE_2_TEST_SET2) ci-mingw: ci-mingw-x ci-mingw-bootstrap .PHONY: dist diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml index 6bf4a75d080cc..7183495a46fe1 100644 --- a/src/ci/github-actions/jobs.yml +++ b/src/ci/github-actions/jobs.yml @@ -448,11 +448,18 @@ auto: SCRIPT: make ci-msvc <<: *job-windows-8c - - name: i686-msvc + # i686-msvc is split into two jobs to run tests in parallel. + - name: i686-msvc-1 env: RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc - SCRIPT: make ci-msvc - <<: *job-windows-8c + SCRIPT: make ci-msvc-py-set1 + <<: *job-windows + + - name: i686-msvc-2 + env: + RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc + SCRIPT: make ci-msvc-ps1-set2 + <<: *job-windows # x86_64-msvc-ext is split into multiple jobs to run tests in parallel. - name: x86_64-msvc-ext1 From fdf4924a263a85165f3df8531da15e22e3fee823 Mon Sep 17 00:00:00 2001 From: onur-ozkan Date: Fri, 17 Jan 2025 09:42:38 +0300 Subject: [PATCH 117/142] include `x` and `x.ps1` scripts in tarball sources Helps to provide 1:1 build experience between git-managed and tarball sources. Signed-off-by: onur-ozkan --- src/bootstrap/src/core/build_steps/dist.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index a2c792ea2da4e..470e400243f92 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -1008,6 +1008,8 @@ impl Step for PlainSourceTarball { "README.md", "RELEASES.md", "REUSE.toml", + "x", + "x.ps1", "x.py", // tidy-alphabetical-end ]; From 94bf8f04f402a2410ab85a6e6b9e542e3942b2a2 Mon Sep 17 00:00:00 2001 From: lcnr Date: Fri, 17 Jan 2025 10:01:45 +0100 Subject: [PATCH 118/142] add cache to `AmbiguityCausesVisitor` --- .../rustc_trait_selection/src/traits/coherence.rs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/compiler/rustc_trait_selection/src/traits/coherence.rs b/compiler/rustc_trait_selection/src/traits/coherence.rs index 190e34618d2af..e27143f139641 100644 --- a/compiler/rustc_trait_selection/src/traits/coherence.rs +++ b/compiler/rustc_trait_selection/src/traits/coherence.rs @@ -6,7 +6,7 @@ use std::fmt::Debug; -use rustc_data_structures::fx::FxIndexSet; +use rustc_data_structures::fx::{FxHashSet, FxIndexSet}; use rustc_errors::{Diag, EmissionGuarantee}; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; @@ -626,6 +626,7 @@ fn compute_intercrate_ambiguity_causes<'tcx>( } struct AmbiguityCausesVisitor<'a, 'tcx> { + cache: FxHashSet>>, causes: &'a mut FxIndexSet>, } @@ -635,6 +636,10 @@ impl<'a, 'tcx> ProofTreeVisitor<'tcx> for AmbiguityCausesVisitor<'a, 'tcx> { } fn visit_goal(&mut self, goal: &InspectGoal<'_, 'tcx>) { + if !self.cache.insert(goal.goal()) { + return; + } + let infcx = goal.infcx(); for cand in goal.candidates() { cand.visit_nested_in_probe(self); @@ -759,5 +764,10 @@ fn search_ambiguity_causes<'tcx>( goal: Goal<'tcx, ty::Predicate<'tcx>>, causes: &mut FxIndexSet>, ) { - infcx.probe(|_| infcx.visit_proof_tree(goal, &mut AmbiguityCausesVisitor { causes })); + infcx.probe(|_| { + infcx.visit_proof_tree(goal, &mut AmbiguityCausesVisitor { + cache: Default::default(), + causes, + }) + }); } From 5d07ee860ff4aeba8148cc7365657cae6787cf2b Mon Sep 17 00:00:00 2001 From: MarcoIeni <11428655+MarcoIeni@users.noreply.github.com> Date: Fri, 17 Jan 2025 11:41:40 +0100 Subject: [PATCH 119/142] ci: improve github action name --- .github/workflows/ghcr.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ghcr.yml b/.github/workflows/ghcr.yml index 0fcd1b178163e..4793b28e72263 100644 --- a/.github/workflows/ghcr.yml +++ b/.github/workflows/ghcr.yml @@ -9,7 +9,7 @@ # for PR jobs, because forks can't access secrets. # That's why we use ghcr.io: it has no rate limit and it doesn't require authentication. -name: GHCR +name: GHCR image mirroring on: workflow_dispatch: From 0114a9707e92306adc15111cfb1ea9d1ab0e8f05 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Thu, 9 Jan 2025 15:06:24 +0000 Subject: [PATCH 120/142] make `LocalizedConstraintGraph` a struct and not an alias this prepares the code structure for adding logical edges to the graph next --- .../src/polonius/loan_liveness.rs | 44 ++++++++++--------- 1 file changed, 23 insertions(+), 21 deletions(-) diff --git a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs index c519453652fe8..3fa2a66e3ba1b 100644 --- a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs +++ b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs @@ -29,7 +29,7 @@ pub(super) fn compute_loan_liveness<'tcx>( // edges when visualizing the constraint graph anyways. let kills = collect_kills(body, tcx, borrow_set); - let graph = index_constraints(&localized_outlives_constraints); + let graph = LocalizedConstraintGraph::new(&localized_outlives_constraints); let mut visited = FxHashSet::default(); let mut stack = Vec::new(); @@ -108,7 +108,7 @@ pub(super) fn compute_loan_liveness<'tcx>( let is_loan_killed = kills.get(¤t_location).is_some_and(|kills| kills.contains(&loan_idx)); - for succ in outgoing_edges(&graph, node) { + for succ in graph.outgoing_edges(node) { // If the loan is killed at this point, it is killed _on exit_. But only during // forward traversal. if is_loan_killed { @@ -125,9 +125,12 @@ pub(super) fn compute_loan_liveness<'tcx>( live_loans } -/// The localized constraint graph is currently the per-node map of its physical edges. In the -/// future, we'll add logical edges to model constraints that hold at all points in the CFG. -type LocalizedConstraintGraph = FxHashMap>; +/// The localized constraint graph indexes the physical edges to compute a given node's successors +/// during traversal. +struct LocalizedConstraintGraph { + /// The actual, physical, edges we have recorded for a given node. + edges: FxHashMap>, +} /// A node in the graph to be traversed, one of the two vertices of a localized outlives constraint. #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] @@ -136,24 +139,23 @@ struct LocalizedNode { point: PointIndex, } -/// Traverses the constraints and returns the indexable graph of edges per node. -fn index_constraints(constraints: &LocalizedOutlivesConstraintSet) -> LocalizedConstraintGraph { - let mut edges = LocalizedConstraintGraph::default(); - for constraint in &constraints.outlives { - let source = LocalizedNode { region: constraint.source, point: constraint.from }; - let target = LocalizedNode { region: constraint.target, point: constraint.to }; - edges.entry(source).or_default().insert(target); - } +impl LocalizedConstraintGraph { + /// Traverses the constraints and returns the indexed graph of edges per node. + fn new(constraints: &LocalizedOutlivesConstraintSet) -> Self { + let mut edges: FxHashMap<_, FxIndexSet<_>> = FxHashMap::default(); + for constraint in &constraints.outlives { + let source = LocalizedNode { region: constraint.source, point: constraint.from }; + let target = LocalizedNode { region: constraint.target, point: constraint.to }; + edges.entry(source).or_default().insert(target); + } - edges -} + LocalizedConstraintGraph { edges } + } -/// Returns the outgoing edges of a given node, not its transitive closure. -fn outgoing_edges( - graph: &LocalizedConstraintGraph, - node: LocalizedNode, -) -> impl Iterator + use<'_> { - graph.get(&node).into_iter().flat_map(|edges| edges.iter().copied()) + /// Returns the outgoing edges of a given node, not its transitive closure. + fn outgoing_edges(&self, node: LocalizedNode) -> impl Iterator + use<'_> { + self.edges.get(&node).into_iter().flat_map(|targets| targets.iter().copied()) + } } /// Traverses the MIR and collects kills. From dee52a31789fb87e04b4762df5b84d52738b1adb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Thu, 9 Jan 2025 15:33:38 +0000 Subject: [PATCH 121/142] encode `Locations::All` typeck constraints as logical edges Instead of materializing `Locations::All` constraints as physical edges at all the points in the CFG, we record them as logical edges and only materialize them during traversal as successors for a given node. This fixes the slowness/hang in the `saturating-float-casts.rs` test. --- .../src/polonius/loan_liveness.rs | 51 ++++++++++++++++--- compiler/rustc_borrowck/src/polonius/mod.rs | 1 + .../src/polonius/typeck_constraints.rs | 22 ++------ 3 files changed, 49 insertions(+), 25 deletions(-) diff --git a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs index 3fa2a66e3ba1b..768c12a97a64e 100644 --- a/compiler/rustc_borrowck/src/polonius/loan_liveness.rs +++ b/compiler/rustc_borrowck/src/polonius/loan_liveness.rs @@ -9,16 +9,21 @@ use rustc_middle::ty::{RegionVid, TyCtxt}; use rustc_mir_dataflow::points::PointIndex; use super::{LiveLoans, LocalizedOutlivesConstraintSet}; +use crate::constraints::OutlivesConstraint; use crate::dataflow::BorrowIndex; use crate::region_infer::values::LivenessValues; +use crate::type_check::Locations; use crate::{BorrowSet, PlaceConflictBias, places_conflict}; -/// With the full graph of constraints, we can compute loan reachability, stop at kills, and trace -/// loan liveness throughout the CFG. +/// Compute loan reachability, stop at kills, and trace loan liveness throughout the CFG, by +/// traversing the full graph of constraints that combines: +/// - the localized constraints (the physical edges), +/// - with the constraints that hold at all points (the logical edges). pub(super) fn compute_loan_liveness<'tcx>( tcx: TyCtxt<'tcx>, body: &Body<'tcx>, liveness: &LivenessValues, + outlives_constraints: impl Iterator>, borrow_set: &BorrowSet<'tcx>, localized_outlives_constraints: &LocalizedOutlivesConstraintSet, ) -> LiveLoans { @@ -29,7 +34,11 @@ pub(super) fn compute_loan_liveness<'tcx>( // edges when visualizing the constraint graph anyways. let kills = collect_kills(body, tcx, borrow_set); - let graph = LocalizedConstraintGraph::new(&localized_outlives_constraints); + // Create the full graph with the physical edges we've localized earlier, and the logical edges + // of constraints that hold at all points. + let logical_constraints = + outlives_constraints.filter(|c| matches!(c.locations, Locations::All(_))); + let graph = LocalizedConstraintGraph::new(&localized_outlives_constraints, logical_constraints); let mut visited = FxHashSet::default(); let mut stack = Vec::new(); @@ -125,11 +134,16 @@ pub(super) fn compute_loan_liveness<'tcx>( live_loans } -/// The localized constraint graph indexes the physical edges to compute a given node's successors -/// during traversal. +/// The localized constraint graph indexes the physical and logical edges to compute a given node's +/// successors during traversal. struct LocalizedConstraintGraph { /// The actual, physical, edges we have recorded for a given node. edges: FxHashMap>, + + /// The logical edges representing the outlives constraints that hold at all points in the CFG, + /// which we don't localize to avoid creating a lot of unnecessary edges in the graph. Some CFGs + /// can be big, and we don't need to create such a physical edge for every point in the CFG. + logical_edges: FxHashMap>, } /// A node in the graph to be traversed, one of the two vertices of a localized outlives constraint. @@ -141,7 +155,10 @@ struct LocalizedNode { impl LocalizedConstraintGraph { /// Traverses the constraints and returns the indexed graph of edges per node. - fn new(constraints: &LocalizedOutlivesConstraintSet) -> Self { + fn new<'tcx>( + constraints: &LocalizedOutlivesConstraintSet, + logical_constraints: impl Iterator>, + ) -> Self { let mut edges: FxHashMap<_, FxIndexSet<_>> = FxHashMap::default(); for constraint in &constraints.outlives { let source = LocalizedNode { region: constraint.source, point: constraint.from }; @@ -149,12 +166,30 @@ impl LocalizedConstraintGraph { edges.entry(source).or_default().insert(target); } - LocalizedConstraintGraph { edges } + let mut logical_edges: FxHashMap<_, FxIndexSet<_>> = FxHashMap::default(); + for constraint in logical_constraints { + logical_edges.entry(constraint.sup).or_default().insert(constraint.sub); + } + + LocalizedConstraintGraph { edges, logical_edges } } /// Returns the outgoing edges of a given node, not its transitive closure. fn outgoing_edges(&self, node: LocalizedNode) -> impl Iterator + use<'_> { - self.edges.get(&node).into_iter().flat_map(|targets| targets.iter().copied()) + // The outgoing edges are: + // - the physical edges present at this node, + // - the materialized logical edges that exist virtually at all points for this node's + // region, localized at this point. + let physical_edges = + self.edges.get(&node).into_iter().flat_map(|targets| targets.iter().copied()); + let materialized_edges = + self.logical_edges.get(&node.region).into_iter().flat_map(move |targets| { + targets + .iter() + .copied() + .map(move |target| LocalizedNode { point: node.point, region: target }) + }); + physical_edges.chain(materialized_edges) } } diff --git a/compiler/rustc_borrowck/src/polonius/mod.rs b/compiler/rustc_borrowck/src/polonius/mod.rs index 52a5f75d8a239..502c868194af2 100644 --- a/compiler/rustc_borrowck/src/polonius/mod.rs +++ b/compiler/rustc_borrowck/src/polonius/mod.rs @@ -130,6 +130,7 @@ impl PoloniusContext { tcx, body, regioncx.liveness_constraints(), + regioncx.outlives_constraints(), borrow_set, &localized_outlives_constraints, ); diff --git a/compiler/rustc_borrowck/src/polonius/typeck_constraints.rs b/compiler/rustc_borrowck/src/polonius/typeck_constraints.rs index 8235b844886e0..1289b1899eb35 100644 --- a/compiler/rustc_borrowck/src/polonius/typeck_constraints.rs +++ b/compiler/rustc_borrowck/src/polonius/typeck_constraints.rs @@ -22,23 +22,11 @@ pub(super) fn convert_typeck_constraints<'tcx>( for outlives_constraint in outlives_constraints { match outlives_constraint.locations { Locations::All(_) => { - // For now, turn logical constraints holding at all points into physical edges at - // every point in the graph. - // FIXME: encode this into *traversal* instead. - for (block, bb) in body.basic_blocks.iter_enumerated() { - let statement_count = bb.statements.len(); - for statement_index in 0..=statement_count { - let current_location = Location { block, statement_index }; - let current_point = liveness.point_from_location(current_location); - - localized_outlives_constraints.push(LocalizedOutlivesConstraint { - source: outlives_constraint.sup, - from: current_point, - target: outlives_constraint.sub, - to: current_point, - }); - } - } + // We don't turn constraints holding at all points into physical edges at every + // point in the graph. They are encoded into *traversal* instead: a given node's + // successors will combine these logical edges with the regular, physical, localized + // edges. + continue; } Locations::Single(location) => { From 5f495153f02f86055c5672c1633f9a19502066ce Mon Sep 17 00:00:00 2001 From: MarcoIeni <11428655+MarcoIeni@users.noreply.github.com> Date: Fri, 17 Jan 2025 12:59:48 +0100 Subject: [PATCH 122/142] ci: mirror buildkit image to ghcr --- .github/workflows/ghcr.yml | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ghcr.yml b/.github/workflows/ghcr.yml index 0fcd1b178163e..4a08bcbb8a689 100644 --- a/.github/workflows/ghcr.yml +++ b/.github/workflows/ghcr.yml @@ -48,10 +48,21 @@ jobs: - name: Mirror DockerHub run: | - # DockerHub image we want to mirror - image="ubuntu:22.04" + # List of DockerHub images to mirror to ghcr.io + images=( + # Mirrored because used by the mingw-check-tidy, which doesn't cache Docker images + "ubuntu:22.04" + # Mirrored because used by all linux CI jobs, including mingw-check-tidy + "moby/buildkit:buildx-stable-1" + ) - # Mirror image from DockerHub to ghcr.io - ./crane copy \ - docker.io/${image} \ - ghcr.io/${{ github.repository_owner }}/${image} + # Mirror each image from DockerHub to ghcr.io + for img in "${images[@]}"; do + echo "Mirroring ${img}..." + # Remove namespace from the image if any. + # E.g. "moby/buildkit:buildx-stable-1" becomes "buildkit:buildx-stable-1" + dest_image=$(echo "${img}" | cut -d'/' -f2-) + ./crane copy \ + "docker.io/${img}" \ + "ghcr.io/${{ github.repository_owner }}/${dest_image}" + done From ffc7b5dc2097a002a7040a148aa85596d063fdfd Mon Sep 17 00:00:00 2001 From: MarcoIeni <11428655+MarcoIeni@users.noreply.github.com> Date: Fri, 17 Jan 2025 13:14:51 +0100 Subject: [PATCH 123/142] ci: use ghcr ubuntu image for mingw-check-tidy --- src/ci/docker/host-x86_64/mingw-check-tidy/Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/ci/docker/host-x86_64/mingw-check-tidy/Dockerfile b/src/ci/docker/host-x86_64/mingw-check-tidy/Dockerfile index f52e306974cce..9ca8cc740a5c7 100644 --- a/src/ci/docker/host-x86_64/mingw-check-tidy/Dockerfile +++ b/src/ci/docker/host-x86_64/mingw-check-tidy/Dockerfile @@ -1,4 +1,6 @@ -FROM ubuntu:22.04 +# We use the ghcr base image because ghcr doesn't have a rate limit +# and the mingw-check-tidy job doesn't cache docker images in CI. +FROM ghcr.io/rust-lang/ubuntu:22.04 ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update && apt-get install -y --no-install-recommends \ From 0910173b3558b0ff7e9e160976618cbb55cb6149 Mon Sep 17 00:00:00 2001 From: lcnr Date: Fri, 17 Jan 2025 13:23:25 +0100 Subject: [PATCH 124/142] add test --- .../ambiguity-causes-visitor-hang.rs | 56 +++++++++++++++++++ .../ambiguity-causes-visitor-hang.stderr | 14 +++++ 2 files changed, 70 insertions(+) create mode 100644 tests/ui/traits/next-solver/coherence/ambiguity-causes-visitor-hang.rs create mode 100644 tests/ui/traits/next-solver/coherence/ambiguity-causes-visitor-hang.stderr diff --git a/tests/ui/traits/next-solver/coherence/ambiguity-causes-visitor-hang.rs b/tests/ui/traits/next-solver/coherence/ambiguity-causes-visitor-hang.rs new file mode 100644 index 0000000000000..54854b1b8a522 --- /dev/null +++ b/tests/ui/traits/next-solver/coherence/ambiguity-causes-visitor-hang.rs @@ -0,0 +1,56 @@ +// Computing the ambiguity causes for the overlap ended up +// causing an exponential blowup when recursing into the normalization +// goals for ` as RecursiveSuper>::Assoc`. This test +// takes multiple minutes when doing so and less than a second +// otherwise. + +//@ compile-flags: -Znext-solver=coherence + +trait RecursiveSuper: + Super< + A0 = Self::Assoc, + A1 = Self::Assoc, + A2 = Self::Assoc, + A3 = Self::Assoc, + A4 = Self::Assoc, + A5 = Self::Assoc, + A6 = Self::Assoc, + A7 = Self::Assoc, + A8 = Self::Assoc, + A9 = Self::Assoc, + A10 = Self::Assoc, + A11 = Self::Assoc, + A12 = Self::Assoc, + A13 = Self::Assoc, + A14 = Self::Assoc, + A15 = Self::Assoc, + > +{ + type Assoc; +} + +trait Super { + type A0; + type A1; + type A2; + type A3; + type A4; + type A5; + type A6; + type A7; + type A8; + type A9; + type A10; + type A11; + type A12; + type A13; + type A14; + type A15; +} + +trait Overlap {} +impl Overlap for T {} +impl Overlap for Box {} +//~^ ERROR conflicting implementations of trait `Overlap` for type `Box<_>` + +fn main() {} diff --git a/tests/ui/traits/next-solver/coherence/ambiguity-causes-visitor-hang.stderr b/tests/ui/traits/next-solver/coherence/ambiguity-causes-visitor-hang.stderr new file mode 100644 index 0000000000000..3731dc5b74ec0 --- /dev/null +++ b/tests/ui/traits/next-solver/coherence/ambiguity-causes-visitor-hang.stderr @@ -0,0 +1,14 @@ +error[E0119]: conflicting implementations of trait `Overlap` for type `Box<_>` + --> $DIR/ambiguity-causes-visitor-hang.rs:53:1 + | +LL | impl Overlap for T {} + | ------------------------------------- first implementation here +LL | impl Overlap for Box {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `Box<_>` + | + = note: downstream crates may implement trait `Super` for type `std::boxed::Box<_>` + = note: downstream crates may implement trait `RecursiveSuper` for type `std::boxed::Box<_>` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0119`. From 81d70f92ab7e1f242093cf2aed17c1d48eaf1f15 Mon Sep 17 00:00:00 2001 From: Jiahao XU <30436523+NobodyXu@users.noreply.github.com> Date: Fri, 17 Jan 2025 01:45:14 +1100 Subject: [PATCH 125/142] Fix import of pipe in kernel_copy.rs Signed-off-by: Jiahao XU --- library/std/src/sys/pal/unix/kernel_copy.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/library/std/src/sys/pal/unix/kernel_copy.rs b/library/std/src/sys/pal/unix/kernel_copy.rs index 36823a503b17c..bbf29f3252341 100644 --- a/library/std/src/sys/pal/unix/kernel_copy.rs +++ b/library/std/src/sys/pal/unix/kernel_copy.rs @@ -52,15 +52,14 @@ use crate::cmp::min; use crate::fs::{File, Metadata}; use crate::io::copy::generic_copy; use crate::io::{ - BufRead, BufReader, BufWriter, Error, Read, Result, StderrLock, StdinLock, StdoutLock, Take, - Write, + BufRead, BufReader, BufWriter, Error, PipeReader, PipeWriter, Read, Result, StderrLock, + StdinLock, StdoutLock, Take, Write, }; use crate::mem::ManuallyDrop; use crate::net::TcpStream; use crate::os::unix::fs::FileTypeExt; use crate::os::unix::io::{AsRawFd, FromRawFd, RawFd}; use crate::os::unix::net::UnixStream; -use crate::pipe::{PipeReader, PipeWriter}; use crate::process::{ChildStderr, ChildStdin, ChildStdout}; use crate::ptr; use crate::sync::atomic::{AtomicBool, AtomicU8, Ordering}; From 2af4197f424f2c24e4256584383d9ecddcddb992 Mon Sep 17 00:00:00 2001 From: Jiahao XU Date: Sat, 18 Jan 2025 01:39:01 +1100 Subject: [PATCH 126/142] Fix use of pipe in tests/run-make/broken-pipe-no-ice/rmake.rs Signed-off-by: Jiahao XU --- tests/run-make/broken-pipe-no-ice/rmake.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/run-make/broken-pipe-no-ice/rmake.rs b/tests/run-make/broken-pipe-no-ice/rmake.rs index 378c3289cb7bc..54d13b62f4a0e 100644 --- a/tests/run-make/broken-pipe-no-ice/rmake.rs +++ b/tests/run-make/broken-pipe-no-ice/rmake.rs @@ -25,7 +25,7 @@ enum Binary { } fn check_broken_pipe_handled_gracefully(bin: Binary, mut cmd: Command) { - let (reader, writer) = std::pipe::pipe().unwrap(); + let (reader, writer) = std::io::pipe().unwrap(); drop(reader); // close read-end cmd.stdout(writer).stderr(Stdio::piped()); From 0e80d78709f6c0211853f7ff86cc1e9b0f9d9a8a Mon Sep 17 00:00:00 2001 From: MarcoIeni <11428655+MarcoIeni@users.noreply.github.com> Date: Fri, 17 Jan 2025 16:38:10 +0100 Subject: [PATCH 127/142] ci: switch to linux free arm runners --- src/ci/github-actions/jobs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml index 6bf4a75d080cc..1ab70e4e0aaba 100644 --- a/src/ci/github-actions/jobs.yml +++ b/src/ci/github-actions/jobs.yml @@ -44,7 +44,7 @@ runners: <<: *base-job - &job-aarch64-linux - os: ubuntu-22.04-arm64-8core-32gb + os: ubuntu-22.04-arm envs: env-x86_64-apple-tests: &env-x86_64-apple-tests From dfb816208ac8aae10fb3b2dc0ed7dbe3c8168831 Mon Sep 17 00:00:00 2001 From: Jens Reidel Date: Tue, 14 Jan 2025 02:49:25 +0100 Subject: [PATCH 128/142] Drop MIPS glibc 2.23 patches that reside in crosstool-ng now Signed-off-by: Jens Reidel --- src/ci/docker/README.md | 8 --- .../host-x86_64/dist-mips-linux/Dockerfile | 1 - .../dist-mips-linux/mips-linux-gnu.defconfig | 2 - ...wrong-vfork-aliases-in-libpthread.so.patch | 44 ------------- ...fixes-to-the-vfork-aliases-in-libpth.patch | 63 ------------------- .../host-x86_64/dist-mips64-linux/Dockerfile | 1 - .../mips64-linux-gnu.defconfig | 2 - .../dist-mips64el-linux/Dockerfile | 1 - .../mips64el-linux-gnu.defconfig | 2 - .../host-x86_64/dist-mipsel-linux/Dockerfile | 1 - .../mipsel-linux-gnu.defconfig | 2 - 11 files changed, 127 deletions(-) delete mode 100644 src/ci/docker/host-x86_64/dist-mips-linux/patches/glibc/2.23/0001-MIPS-SPARC-fix-wrong-vfork-aliases-in-libpthread.so.patch delete mode 100644 src/ci/docker/host-x86_64/dist-mips-linux/patches/glibc/2.23/0002-MIPS-SPARC-more-fixes-to-the-vfork-aliases-in-libpth.patch diff --git a/src/ci/docker/README.md b/src/ci/docker/README.md index 2f52ff5a99a50..2b1de43c2f68c 100644 --- a/src/ci/docker/README.md +++ b/src/ci/docker/README.md @@ -305,8 +305,6 @@ For targets: `mips-unknown-linux-gnu` - Path and misc options > Prefix directory = /x-tools/${CT\_TARGET} - Path and misc options > Use a mirror = ENABLE - Path and misc options > Base URL = https://ci-mirrors.rust-lang.org/rustc -- Path and misc options > Patches origin = Bundled, then local -- Path and misc options > Local patch directory = /tmp/patches - Target options > Target Architecture = mips - Target options > ABI = o32 - Target options > Endianness = Big endian @@ -327,8 +325,6 @@ For targets: `mipsel-unknown-linux-gnu` - Path and misc options > Prefix directory = /x-tools/${CT\_TARGET} - Path and misc options > Use a mirror = ENABLE - Path and misc options > Base URL = https://ci-mirrors.rust-lang.org/rustc -- Path and misc options > Patches origin = Bundled, then local -- Path and misc options > Local patch directory = /tmp/patches - Target options > Target Architecture = mips - Target options > ABI = o32 - Target options > Endianness = Little endian @@ -349,8 +345,6 @@ For targets: `mips64-unknown-linux-gnuabi64` - Path and misc options > Prefix directory = /x-tools/${CT\_TARGET} - Path and misc options > Use a mirror = ENABLE - Path and misc options > Base URL = https://ci-mirrors.rust-lang.org/rustc -- Path and misc options > Patches origin = Bundled, then local -- Path and misc options > Local patch directory = /tmp/patches - Target options > Target Architecture = mips - Target options > ABI = n64 - Target options > Endianness = Big endian @@ -370,8 +364,6 @@ For targets: `mips64el-unknown-linux-gnuabi64` - Path and misc options > Prefix directory = /x-tools/${CT\_TARGET} - Path and misc options > Use a mirror = ENABLE - Path and misc options > Base URL = https://ci-mirrors.rust-lang.org/rustc -- Path and misc options > Patches origin = Bundled, then local -- Path and misc options > Local patch directory = /tmp/patches - Target options > Target Architecture = mips - Target options > ABI = n64 - Target options > Endianness = Little endian diff --git a/src/ci/docker/host-x86_64/dist-mips-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-mips-linux/Dockerfile index c08febf423adb..4a090dcdd5085 100644 --- a/src/ci/docker/host-x86_64/dist-mips-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-mips-linux/Dockerfile @@ -11,7 +11,6 @@ RUN sh /scripts/rustbuild-setup.sh WORKDIR /tmp COPY scripts/crosstool-ng-build.sh /scripts/ -COPY host-x86_64/dist-mips-linux/patches/ /tmp/patches/ COPY host-x86_64/dist-mips-linux/mips-linux-gnu.defconfig /tmp/crosstool.defconfig RUN /scripts/crosstool-ng-build.sh diff --git a/src/ci/docker/host-x86_64/dist-mips-linux/mips-linux-gnu.defconfig b/src/ci/docker/host-x86_64/dist-mips-linux/mips-linux-gnu.defconfig index 75743fe81416b..94db07922a226 100644 --- a/src/ci/docker/host-x86_64/dist-mips-linux/mips-linux-gnu.defconfig +++ b/src/ci/docker/host-x86_64/dist-mips-linux/mips-linux-gnu.defconfig @@ -2,8 +2,6 @@ CT_CONFIG_VERSION="4" CT_PREFIX_DIR="/x-tools/${CT_TARGET}" CT_USE_MIRROR=y CT_MIRROR_BASE_URL="https://ci-mirrors.rust-lang.org/rustc" -CT_PATCH_BUNDLED_LOCAL=y -CT_LOCAL_PATCH_DIR="/tmp/patches" CT_ARCH_MIPS=y CT_ARCH_mips_o32=y CT_ARCH_ARCH="mips32r2" diff --git a/src/ci/docker/host-x86_64/dist-mips-linux/patches/glibc/2.23/0001-MIPS-SPARC-fix-wrong-vfork-aliases-in-libpthread.so.patch b/src/ci/docker/host-x86_64/dist-mips-linux/patches/glibc/2.23/0001-MIPS-SPARC-fix-wrong-vfork-aliases-in-libpthread.so.patch deleted file mode 100644 index 393084df3249a..0000000000000 --- a/src/ci/docker/host-x86_64/dist-mips-linux/patches/glibc/2.23/0001-MIPS-SPARC-fix-wrong-vfork-aliases-in-libpthread.so.patch +++ /dev/null @@ -1,44 +0,0 @@ -From 43c2948756bb6e144c7b871e827bba37d61ad3a3 Mon Sep 17 00:00:00 2001 -From: Aurelien Jarno -Date: Sat, 18 Jun 2016 19:11:23 +0200 -Subject: [PATCH 1/2] MIPS, SPARC: fix wrong vfork aliases in libpthread.so - -With recent binutils versions the GNU libc fails to build on at least -MISP and SPARC, with this kind of error: - - /home/aurel32/glibc/glibc-build/nptl/libpthread.so:(*IND*+0x0): multiple definition of `vfork@GLIBC_2.0' - /home/aurel32/glibc/glibc-build/nptl/libpthread.so::(.text+0xee50): first defined here - -It appears that on these architectures pt-vfork.S includes vfork.S -(through the alpha version of pt-vfork.S) and that the __vfork aliases -are not conditionalized on IS_IN (libc) like on other architectures. -Therefore the aliases are also wrongly included in libpthread.so. - -Fix this by properly conditionalizing the aliases like on other -architectures. - -Changelog: - * sysdeps/unix/sysv/linux/mips/vfork.S (__vfork): Conditionalize - hidden_def, weak_alias and strong_alias on [IS_IN (libc)]. - * sysdeps/unix/sysv/linux/sparc/sparc32/vfork.S: Likewise. - * sysdeps/unix/sysv/linux/sparc/sparc64/vfork.S: Likewise. ---- - sysdeps/unix/sysv/linux/mips/vfork.S | 2 ++ - 1 file changed, 2 insertions(+) - -diff --git a/sysdeps/unix/sysv/linux/mips/vfork.S b/sysdeps/unix/sysv/linux/mips/vfork.S -index 8c6615143708..c0c0ce699159 100644 ---- a/sysdeps/unix/sysv/linux/mips/vfork.S -+++ b/sysdeps/unix/sysv/linux/mips/vfork.S -@@ -106,6 +106,8 @@ L(error): - #endif - END(__vfork) - -+#if IS_IN (libc) - libc_hidden_def(__vfork) - weak_alias (__vfork, vfork) - strong_alias (__vfork, __libc_vfork) -+#endif --- -2.37.3 - diff --git a/src/ci/docker/host-x86_64/dist-mips-linux/patches/glibc/2.23/0002-MIPS-SPARC-more-fixes-to-the-vfork-aliases-in-libpth.patch b/src/ci/docker/host-x86_64/dist-mips-linux/patches/glibc/2.23/0002-MIPS-SPARC-more-fixes-to-the-vfork-aliases-in-libpth.patch deleted file mode 100644 index e28c7ae99f098..0000000000000 --- a/src/ci/docker/host-x86_64/dist-mips-linux/patches/glibc/2.23/0002-MIPS-SPARC-more-fixes-to-the-vfork-aliases-in-libpth.patch +++ /dev/null @@ -1,63 +0,0 @@ -From b87c1ec3fa398646f042a68f0ce0f7d09c1348c7 Mon Sep 17 00:00:00 2001 -From: Aurelien Jarno -Date: Tue, 21 Jun 2016 23:59:37 +0200 -Subject: [PATCH 2/2] MIPS, SPARC: more fixes to the vfork aliases in - libpthread.so - -Commit 43c29487 tried to fix the vfork aliases in libpthread.so on MIPS -and SPARC, but failed to do it correctly, introducing an ABI change. - -This patch does the remaining changes needed to align the MIPS and SPARC -vfork implementations with the other architectures. That way the the -alpha version of pt-vfork.S works correctly for MIPS and SPARC. The -changes for alpha were done in 82aab97c. - -Changelog: - * sysdeps/unix/sysv/linux/mips/vfork.S (__vfork): Rename into - __libc_vfork. - (__vfork) [IS_IN (libc)]: Remove alias. - (__libc_vfork) [IS_IN (libc)]: Define as an alias. - * sysdeps/unix/sysv/linux/sparc/sparc32/vfork.S: Likewise. - * sysdeps/unix/sysv/linux/sparc/sparc64/vfork.S: Likewise. ---- - sysdeps/unix/sysv/linux/mips/vfork.S | 12 ++++++------ - 1 file changed, 6 insertions(+), 6 deletions(-) - -diff --git a/sysdeps/unix/sysv/linux/mips/vfork.S b/sysdeps/unix/sysv/linux/mips/vfork.S -index c0c0ce699159..1867c8626ebe 100644 ---- a/sysdeps/unix/sysv/linux/mips/vfork.S -+++ b/sysdeps/unix/sysv/linux/mips/vfork.S -@@ -31,13 +31,13 @@ - LOCALSZ= 1 - FRAMESZ= (((NARGSAVE+LOCALSZ)*SZREG)+ALSZ)&ALMASK - GPOFF= FRAMESZ-(1*SZREG) --NESTED(__vfork,FRAMESZ,sp) -+NESTED(__libc_vfork,FRAMESZ,sp) - #ifdef __PIC__ - SETUP_GP - #endif - PTR_SUBU sp, FRAMESZ - cfi_adjust_cfa_offset (FRAMESZ) -- SETUP_GP64_REG (a5, __vfork) -+ SETUP_GP64_REG (a5, __libc_vfork) - #ifdef __PIC__ - SAVE_GP (GPOFF) - #endif -@@ -104,10 +104,10 @@ L(error): - RESTORE_GP64_REG - j __syscall_error - #endif -- END(__vfork) -+ END(__libc_vfork) - - #if IS_IN (libc) --libc_hidden_def(__vfork) --weak_alias (__vfork, vfork) --strong_alias (__vfork, __libc_vfork) -+weak_alias (__libc_vfork, vfork) -+strong_alias (__libc_vfork, __vfork) -+libc_hidden_def (__vfork) - #endif --- -2.37.3 - diff --git a/src/ci/docker/host-x86_64/dist-mips64-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-mips64-linux/Dockerfile index 10f31075e2d55..18b0375f40080 100644 --- a/src/ci/docker/host-x86_64/dist-mips64-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-mips64-linux/Dockerfile @@ -11,7 +11,6 @@ RUN sh /scripts/rustbuild-setup.sh WORKDIR /tmp COPY scripts/crosstool-ng-build.sh /scripts/ -COPY host-x86_64/dist-mips-linux/patches/ /tmp/patches/ COPY host-x86_64/dist-mips64-linux/mips64-linux-gnu.defconfig /tmp/crosstool.defconfig RUN /scripts/crosstool-ng-build.sh diff --git a/src/ci/docker/host-x86_64/dist-mips64-linux/mips64-linux-gnu.defconfig b/src/ci/docker/host-x86_64/dist-mips64-linux/mips64-linux-gnu.defconfig index 4b8f7a5492033..f295a2fafc6b1 100644 --- a/src/ci/docker/host-x86_64/dist-mips64-linux/mips64-linux-gnu.defconfig +++ b/src/ci/docker/host-x86_64/dist-mips64-linux/mips64-linux-gnu.defconfig @@ -2,8 +2,6 @@ CT_CONFIG_VERSION="4" CT_PREFIX_DIR="/x-tools/${CT_TARGET}" CT_USE_MIRROR=y CT_MIRROR_BASE_URL="https://ci-mirrors.rust-lang.org/rustc" -CT_PATCH_BUNDLED_LOCAL=y -CT_LOCAL_PATCH_DIR="/tmp/patches" CT_ARCH_MIPS=y CT_ARCH_mips_n64=y CT_ARCH_64=y diff --git a/src/ci/docker/host-x86_64/dist-mips64el-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-mips64el-linux/Dockerfile index 5ab4a53de8a88..87407203880b4 100644 --- a/src/ci/docker/host-x86_64/dist-mips64el-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-mips64el-linux/Dockerfile @@ -11,7 +11,6 @@ RUN sh /scripts/rustbuild-setup.sh WORKDIR /tmp COPY scripts/crosstool-ng-build.sh /scripts/ -COPY host-x86_64/dist-mips-linux/patches/ /tmp/patches/ COPY host-x86_64/dist-mips64el-linux/mips64el-linux-gnu.defconfig /tmp/crosstool.defconfig RUN /scripts/crosstool-ng-build.sh diff --git a/src/ci/docker/host-x86_64/dist-mips64el-linux/mips64el-linux-gnu.defconfig b/src/ci/docker/host-x86_64/dist-mips64el-linux/mips64el-linux-gnu.defconfig index 9c8eb5007b79b..47d6246356526 100644 --- a/src/ci/docker/host-x86_64/dist-mips64el-linux/mips64el-linux-gnu.defconfig +++ b/src/ci/docker/host-x86_64/dist-mips64el-linux/mips64el-linux-gnu.defconfig @@ -2,8 +2,6 @@ CT_CONFIG_VERSION="4" CT_PREFIX_DIR="/x-tools/${CT_TARGET}" CT_USE_MIRROR=y CT_MIRROR_BASE_URL="https://ci-mirrors.rust-lang.org/rustc" -CT_PATCH_BUNDLED_LOCAL=y -CT_LOCAL_PATCH_DIR="/tmp/patches" CT_ARCH_MIPS=y CT_ARCH_mips_n64=y CT_ARCH_LE=y diff --git a/src/ci/docker/host-x86_64/dist-mipsel-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-mipsel-linux/Dockerfile index 0bbaf00339d0a..553f6ea86b72d 100644 --- a/src/ci/docker/host-x86_64/dist-mipsel-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-mipsel-linux/Dockerfile @@ -11,7 +11,6 @@ RUN sh /scripts/rustbuild-setup.sh WORKDIR /tmp COPY scripts/crosstool-ng-build.sh /scripts/ -COPY host-x86_64/dist-mips-linux/patches/ /tmp/patches/ COPY host-x86_64/dist-mipsel-linux/mipsel-linux-gnu.defconfig /tmp/crosstool.defconfig RUN /scripts/crosstool-ng-build.sh diff --git a/src/ci/docker/host-x86_64/dist-mipsel-linux/mipsel-linux-gnu.defconfig b/src/ci/docker/host-x86_64/dist-mipsel-linux/mipsel-linux-gnu.defconfig index a9dae121e536a..5daa83ebc02a7 100644 --- a/src/ci/docker/host-x86_64/dist-mipsel-linux/mipsel-linux-gnu.defconfig +++ b/src/ci/docker/host-x86_64/dist-mipsel-linux/mipsel-linux-gnu.defconfig @@ -2,8 +2,6 @@ CT_CONFIG_VERSION="4" CT_PREFIX_DIR="/x-tools/${CT_TARGET}" CT_USE_MIRROR=y CT_MIRROR_BASE_URL="https://ci-mirrors.rust-lang.org/rustc" -CT_PATCH_BUNDLED_LOCAL=y -CT_LOCAL_PATCH_DIR="/tmp/patches" CT_ARCH_MIPS=y CT_ARCH_mips_o32=y CT_ARCH_LE=y From 00844be421ff67872681cf9c4d1a4cb8d4e90891 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Fri, 17 Jan 2025 17:00:59 +0000 Subject: [PATCH 129/142] new solver: prefer trivial builtin impls over where-clauses for now, only builtin `Sized` impls are tracked as being `Trivial` --- Cargo.lock | 1 + compiler/rustc_next_trait_solver/Cargo.toml | 1 + .../src/solve/trait_goals.rs | 28 +++++++++++++++++-- .../src/traits/project.rs | 4 +-- compiler/rustc_ty_utils/src/instance.rs | 2 +- compiler/rustc_type_ir/src/solve/mod.rs | 3 ++ tests/ui/regions/issue-26448-1.rs | 5 +++- tests/ui/regions/issue-26448-2.rs | 3 ++ 8 files changed, 41 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 85e5e6c97105c..a08d43a014c06 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4259,6 +4259,7 @@ dependencies = [ "rustc_serialize", "rustc_type_ir", "rustc_type_ir_macros", + "smallvec", "tracing", ] diff --git a/compiler/rustc_next_trait_solver/Cargo.toml b/compiler/rustc_next_trait_solver/Cargo.toml index 451c215566be2..f9168112216ab 100644 --- a/compiler/rustc_next_trait_solver/Cargo.toml +++ b/compiler/rustc_next_trait_solver/Cargo.toml @@ -13,6 +13,7 @@ rustc_macros = { path = "../rustc_macros", optional = true } rustc_serialize = { path = "../rustc_serialize", optional = true } rustc_type_ir = { path = "../rustc_type_ir", default-features = false } rustc_type_ir_macros = { path = "../rustc_type_ir_macros" } +smallvec = "1.8.1" tracing = "0.1" # tidy-alphabetical-end diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs index f4d7c3ce76cf2..4faa243c02a64 100644 --- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs @@ -8,6 +8,7 @@ use rustc_type_ir::lang_items::TraitSolverLangItem; use rustc_type_ir::solve::CanonicalResponse; use rustc_type_ir::visit::TypeVisitableExt as _; use rustc_type_ir::{self as ty, Interner, TraitPredicate, TypingMode, Upcast as _, elaborate}; +use smallvec::SmallVec; use tracing::{instrument, trace}; use crate::delegate::SolverDelegate; @@ -225,7 +226,7 @@ where } ecx.probe_and_evaluate_goal_for_constituent_tys( - CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), + CandidateSource::BuiltinImpl(BuiltinImplSource::Trivial), goal, structural_traits::instantiate_constituent_tys_for_sized_trait, ) @@ -1194,7 +1195,30 @@ where }; } - // FIXME: prefer trivial builtin impls + // We prefer trivial builtin candidates, i.e. builtin impls without any + // nested requirements, over all others. This is a fix for #53123 and + // prevents where-bounds from accidentally extending the lifetime of a + // variable. + if candidates + .iter() + .any(|c| matches!(c.source, CandidateSource::BuiltinImpl(BuiltinImplSource::Trivial))) + { + let trivial_builtin_impls: SmallVec<[_; 1]> = candidates + .iter() + .filter(|c| { + matches!(c.source, CandidateSource::BuiltinImpl(BuiltinImplSource::Trivial)) + }) + .map(|c| c.result) + .collect(); + // There should only ever be a single trivial builtin candidate + // as they would otherwise overlap. + assert_eq!(trivial_builtin_impls.len(), 1); + return if let Some(response) = self.try_merge_responses(&trivial_builtin_impls) { + Ok((response, Some(TraitGoalProvenVia::Misc))) + } else { + Ok((self.bail_with_ambiguity(&trivial_builtin_impls), None)) + }; + } // If there are non-global where-bounds, prefer where-bounds // (including global ones) over everything else. diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index aae0e34ddf393..d59cf88875ea7 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -991,7 +991,7 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( Err(ErrorGuaranteed { .. }) => true, } } - ImplSource::Builtin(BuiltinImplSource::Misc, _) => { + ImplSource::Builtin(BuiltinImplSource::Misc | BuiltinImplSource::Trivial, _) => { // While a builtin impl may be known to exist, the associated type may not yet // be known. Any type with multiple potential associated types is therefore // not eligible. @@ -1296,7 +1296,7 @@ fn confirm_select_candidate<'cx, 'tcx>( ) -> Progress<'tcx> { match impl_source { ImplSource::UserDefined(data) => confirm_impl_candidate(selcx, obligation, data), - ImplSource::Builtin(BuiltinImplSource::Misc, data) => { + ImplSource::Builtin(BuiltinImplSource::Misc | BuiltinImplSource::Trivial, data) => { let tcx = selcx.tcx(); let trait_def_id = obligation.predicate.trait_def_id(tcx); if tcx.is_lang_item(trait_def_id, LangItem::Coroutine) { diff --git a/compiler/rustc_ty_utils/src/instance.rs b/compiler/rustc_ty_utils/src/instance.rs index fc76a86f79772..d5e1937efaa45 100644 --- a/compiler/rustc_ty_utils/src/instance.rs +++ b/compiler/rustc_ty_utils/src/instance.rs @@ -248,7 +248,7 @@ fn resolve_associated_item<'tcx>( }) } } - traits::ImplSource::Builtin(BuiltinImplSource::Misc, _) => { + traits::ImplSource::Builtin(BuiltinImplSource::Misc | BuiltinImplSource::Trivial, _) => { if tcx.is_lang_item(trait_ref.def_id, LangItem::Clone) { // FIXME(eddyb) use lang items for methods instead of names. let name = tcx.item_name(trait_item_id); diff --git a/compiler/rustc_type_ir/src/solve/mod.rs b/compiler/rustc_type_ir/src/solve/mod.rs index fbb5c7430eb2a..c06004d4d0f6c 100644 --- a/compiler/rustc_type_ir/src/solve/mod.rs +++ b/compiler/rustc_type_ir/src/solve/mod.rs @@ -169,6 +169,9 @@ pub enum CandidateSource { #[derive(Clone, Copy, Hash, PartialEq, Eq, Debug)] #[cfg_attr(feature = "nightly", derive(HashStable_NoContext, TyEncodable, TyDecodable))] pub enum BuiltinImplSource { + /// A built-in impl that is considered trivial, without any nested requirements. They + /// are preferred over where-clauses, and we want to track them explicitly. + Trivial, /// Some built-in impl we don't need to differentiate. This should be used /// unless more specific information is necessary. Misc, diff --git a/tests/ui/regions/issue-26448-1.rs b/tests/ui/regions/issue-26448-1.rs index 0fa40709a7b95..4d1853a93727e 100644 --- a/tests/ui/regions/issue-26448-1.rs +++ b/tests/ui/regions/issue-26448-1.rs @@ -1,4 +1,7 @@ -//@ run-pass +//@ revisions: current next +//@ [next] compile-flags: -Znext-solver +//@ ignore-compare-mode-next-solver (explicit revisions) +//@ check-pass pub trait Foo { fn foo(self) -> T; diff --git a/tests/ui/regions/issue-26448-2.rs b/tests/ui/regions/issue-26448-2.rs index 5fd1e90ebfd96..2e124555125eb 100644 --- a/tests/ui/regions/issue-26448-2.rs +++ b/tests/ui/regions/issue-26448-2.rs @@ -1,3 +1,6 @@ +//@ revisions: current next +//@ [next] compile-flags: -Znext-solver +//@ ignore-compare-mode-next-solver (explicit revisions) //@ check-pass pub struct Bar { From d58540df44a678dc32e4fe6f2fc94224844a3516 Mon Sep 17 00:00:00 2001 From: binarycat Date: Fri, 17 Jan 2025 14:31:24 -0600 Subject: [PATCH 130/142] add src/librustdoc and src/rustdoc-json-types to RUSTC_IF_UNCHANGED_ALLOWED_PATHS fixes https://github.com/rust-lang/rust/issues/135650 --- src/bootstrap/src/core/config/config.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 026380a46c685..1c49063ef5c66 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -42,6 +42,8 @@ use crate::utils::helpers::{self, exe, output, t}; #[rustfmt::skip] // We don't want rustfmt to oneline this list pub(crate) const RUSTC_IF_UNCHANGED_ALLOWED_PATHS: &[&str] = &[ ":!src/tools", + ":!src/librustdoc", + ":!src/rustdoc-json-types", ":!tests", ":!triagebot.toml", ]; From 366cecacdd5dbdefccb4bd63a655694970469004 Mon Sep 17 00:00:00 2001 From: Trevor Gross Date: Fri, 17 Jan 2025 22:47:24 +0000 Subject: [PATCH 131/142] Stabilize `float_next_up_down` FCP completed at [1]. Closes https://github.com/rust-lang/rust/issues/91399 [1]: https://github.com/rust-lang/rust/issues/91399#issuecomment-2598734570 --- library/core/src/num/f128.rs | 4 ---- library/core/src/num/f16.rs | 4 ---- library/core/src/num/f32.rs | 8 ++++---- library/core/src/num/f64.rs | 8 ++++---- library/std/src/lib.rs | 1 - 5 files changed, 8 insertions(+), 17 deletions(-) diff --git a/library/core/src/num/f128.rs b/library/core/src/num/f128.rs index 4ebeaf046114a..212007d84befb 100644 --- a/library/core/src/num/f128.rs +++ b/library/core/src/num/f128.rs @@ -504,7 +504,6 @@ impl f128 { /// /// ```rust /// #![feature(f128)] - /// #![feature(float_next_up_down)] /// # // FIXME(f16_f128): remove when `eqtf2` is available /// # #[cfg(all(target_arch = "x86_64", target_os = "linux"))] { /// @@ -522,7 +521,6 @@ impl f128 { /// [`MAX`]: Self::MAX #[inline] #[unstable(feature = "f128", issue = "116909")] - // #[unstable(feature = "float_next_up_down", issue = "91399")] pub const fn next_up(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing // denormals to zero. This is in general unsound and unsupported, but here @@ -558,7 +556,6 @@ impl f128 { /// /// ```rust /// #![feature(f128)] - /// #![feature(float_next_up_down)] /// # // FIXME(f16_f128): remove when `eqtf2` is available /// # #[cfg(all(target_arch = "x86_64", target_os = "linux"))] { /// @@ -576,7 +573,6 @@ impl f128 { /// [`MAX`]: Self::MAX #[inline] #[unstable(feature = "f128", issue = "116909")] - // #[unstable(feature = "float_next_up_down", issue = "91399")] pub const fn next_down(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing // denormals to zero. This is in general unsound and unsupported, but here diff --git a/library/core/src/num/f16.rs b/library/core/src/num/f16.rs index c82f0d7cd4ad5..fa06a33523e27 100644 --- a/library/core/src/num/f16.rs +++ b/library/core/src/num/f16.rs @@ -497,7 +497,6 @@ impl f16 { /// /// ```rust /// #![feature(f16)] - /// #![feature(float_next_up_down)] /// # // FIXME(f16_f128): ABI issues on MSVC /// # #[cfg(all(target_arch = "x86_64", target_os = "linux"))] { /// @@ -515,7 +514,6 @@ impl f16 { /// [`MAX`]: Self::MAX #[inline] #[unstable(feature = "f16", issue = "116909")] - // #[unstable(feature = "float_next_up_down", issue = "91399")] pub const fn next_up(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing // denormals to zero. This is in general unsound and unsupported, but here @@ -551,7 +549,6 @@ impl f16 { /// /// ```rust /// #![feature(f16)] - /// #![feature(float_next_up_down)] /// # // FIXME(f16_f128): ABI issues on MSVC /// # #[cfg(all(target_arch = "x86_64", target_os = "linux"))] { /// @@ -569,7 +566,6 @@ impl f16 { /// [`MAX`]: Self::MAX #[inline] #[unstable(feature = "f16", issue = "116909")] - // #[unstable(feature = "float_next_up_down", issue = "91399")] pub const fn next_down(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing // denormals to zero. This is in general unsound and unsupported, but here diff --git a/library/core/src/num/f32.rs b/library/core/src/num/f32.rs index 2b6adef65e94a..a9f413c260384 100644 --- a/library/core/src/num/f32.rs +++ b/library/core/src/num/f32.rs @@ -726,7 +726,6 @@ impl f32 { /// is finite `x == x.next_up().next_down()` also holds. /// /// ```rust - /// #![feature(float_next_up_down)] /// // f32::EPSILON is the difference between 1.0 and the next number up. /// assert_eq!(1.0f32.next_up(), 1.0 + f32::EPSILON); /// // But not for most numbers. @@ -739,7 +738,8 @@ impl f32 { /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] - #[unstable(feature = "float_next_up_down", issue = "91399")] + #[stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] pub const fn next_up(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing // denormals to zero. This is in general unsound and unsupported, but here @@ -774,7 +774,6 @@ impl f32 { /// is finite `x == x.next_down().next_up()` also holds. /// /// ```rust - /// #![feature(float_next_up_down)] /// let x = 1.0f32; /// // Clamp value into range [0, 1). /// let clamped = x.clamp(0.0, 1.0f32.next_down()); @@ -787,7 +786,8 @@ impl f32 { /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] - #[unstable(feature = "float_next_up_down", issue = "91399")] + #[stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] pub const fn next_down(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing // denormals to zero. This is in general unsound and unsupported, but here diff --git a/library/core/src/num/f64.rs b/library/core/src/num/f64.rs index 037b3afa6c46c..77594a653cc8b 100644 --- a/library/core/src/num/f64.rs +++ b/library/core/src/num/f64.rs @@ -743,7 +743,6 @@ impl f64 { /// is finite `x == x.next_up().next_down()` also holds. /// /// ```rust - /// #![feature(float_next_up_down)] /// // f64::EPSILON is the difference between 1.0 and the next number up. /// assert_eq!(1.0f64.next_up(), 1.0 + f64::EPSILON); /// // But not for most numbers. @@ -756,7 +755,8 @@ impl f64 { /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] - #[unstable(feature = "float_next_up_down", issue = "91399")] + #[stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] pub const fn next_up(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing // denormals to zero. This is in general unsound and unsupported, but here @@ -791,7 +791,6 @@ impl f64 { /// is finite `x == x.next_down().next_up()` also holds. /// /// ```rust - /// #![feature(float_next_up_down)] /// let x = 1.0f64; /// // Clamp value into range [0, 1). /// let clamped = x.clamp(0.0, 1.0f64.next_down()); @@ -804,7 +803,8 @@ impl f64 { /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] - #[unstable(feature = "float_next_up_down", issue = "91399")] + #[stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] pub const fn next_down(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing // denormals to zero. This is in general unsound and unsupported, but here diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 5c12236617c98..37c1964b04875 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -333,7 +333,6 @@ #![feature(extend_one)] #![feature(float_gamma)] #![feature(float_minimum_maximum)] -#![feature(float_next_up_down)] #![feature(fmt_internals)] #![feature(hasher_prefixfree_extras)] #![feature(hashmap_internals)] From 809f61a7835ebb6b3acb0c43ca555056e16959e4 Mon Sep 17 00:00:00 2001 From: Trevor Gross Date: Fri, 17 Jan 2025 22:52:50 +0000 Subject: [PATCH 132/142] Add references to the IEEE functions for `float_next_up_down` Mention the IEEE function by name and create a doc alias of the same. --- library/core/src/num/f128.rs | 6 ++++++ library/core/src/num/f16.rs | 6 ++++++ library/core/src/num/f32.rs | 6 ++++++ library/core/src/num/f64.rs | 6 ++++++ 4 files changed, 24 insertions(+) diff --git a/library/core/src/num/f128.rs b/library/core/src/num/f128.rs index 212007d84befb..8fb1588e60b35 100644 --- a/library/core/src/num/f128.rs +++ b/library/core/src/num/f128.rs @@ -515,11 +515,14 @@ impl f128 { /// # } /// ``` /// + /// This operation corresponds to IEEE-754 `nextUp`. + /// /// [`NEG_INFINITY`]: Self::NEG_INFINITY /// [`INFINITY`]: Self::INFINITY /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] + #[doc(alias = "nextUp")] #[unstable(feature = "f128", issue = "116909")] pub const fn next_up(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing @@ -567,11 +570,14 @@ impl f128 { /// # } /// ``` /// + /// This operation corresponds to IEEE-754 `nextDown`. + /// /// [`NEG_INFINITY`]: Self::NEG_INFINITY /// [`INFINITY`]: Self::INFINITY /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] + #[doc(alias = "nextDown")] #[unstable(feature = "f128", issue = "116909")] pub const fn next_down(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing diff --git a/library/core/src/num/f16.rs b/library/core/src/num/f16.rs index fa06a33523e27..8c2af74b8f842 100644 --- a/library/core/src/num/f16.rs +++ b/library/core/src/num/f16.rs @@ -508,11 +508,14 @@ impl f16 { /// # } /// ``` /// + /// This operation corresponds to IEEE-754 `nextUp`. + /// /// [`NEG_INFINITY`]: Self::NEG_INFINITY /// [`INFINITY`]: Self::INFINITY /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] + #[doc(alias = "nextUp")] #[unstable(feature = "f16", issue = "116909")] pub const fn next_up(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing @@ -560,11 +563,14 @@ impl f16 { /// # } /// ``` /// + /// This operation corresponds to IEEE-754 `nextDown`. + /// /// [`NEG_INFINITY`]: Self::NEG_INFINITY /// [`INFINITY`]: Self::INFINITY /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] + #[doc(alias = "nextDown")] #[unstable(feature = "f16", issue = "116909")] pub const fn next_down(self) -> Self { // Some targets violate Rust's assumption of IEEE semantics, e.g. by flushing diff --git a/library/core/src/num/f32.rs b/library/core/src/num/f32.rs index a9f413c260384..817bedbd44f98 100644 --- a/library/core/src/num/f32.rs +++ b/library/core/src/num/f32.rs @@ -733,11 +733,14 @@ impl f32 { /// assert_eq!(16777216f32.next_up(), 16777218.0); /// ``` /// + /// This operation corresponds to IEEE-754 `nextUp`. + /// /// [`NEG_INFINITY`]: Self::NEG_INFINITY /// [`INFINITY`]: Self::INFINITY /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] + #[doc(alias = "nextUp")] #[stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] #[rustc_const_stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] pub const fn next_up(self) -> Self { @@ -781,11 +784,14 @@ impl f32 { /// assert_eq!(clamped.next_up(), 1.0); /// ``` /// + /// This operation corresponds to IEEE-754 `nextDown`. + /// /// [`NEG_INFINITY`]: Self::NEG_INFINITY /// [`INFINITY`]: Self::INFINITY /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] + #[doc(alias = "nextDown")] #[stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] #[rustc_const_stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] pub const fn next_down(self) -> Self { diff --git a/library/core/src/num/f64.rs b/library/core/src/num/f64.rs index 77594a653cc8b..1b0651a0def07 100644 --- a/library/core/src/num/f64.rs +++ b/library/core/src/num/f64.rs @@ -750,11 +750,14 @@ impl f64 { /// assert_eq!(9007199254740992f64.next_up(), 9007199254740994.0); /// ``` /// + /// This operation corresponds to IEEE-754 `nextUp`. + /// /// [`NEG_INFINITY`]: Self::NEG_INFINITY /// [`INFINITY`]: Self::INFINITY /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] + #[doc(alias = "nextUp")] #[stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] #[rustc_const_stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] pub const fn next_up(self) -> Self { @@ -798,11 +801,14 @@ impl f64 { /// assert_eq!(clamped.next_up(), 1.0); /// ``` /// + /// This operation corresponds to IEEE-754 `nextDown`. + /// /// [`NEG_INFINITY`]: Self::NEG_INFINITY /// [`INFINITY`]: Self::INFINITY /// [`MIN`]: Self::MIN /// [`MAX`]: Self::MAX #[inline] + #[doc(alias = "nextDown")] #[stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] #[rustc_const_stable(feature = "float_next_up_down", since = "CURRENT_RUSTC_VERSION")] pub const fn next_down(self) -> Self { From 067cac91f9f4cde6b0c6cde2681178e9a338a0cd Mon Sep 17 00:00:00 2001 From: Frank King Date: Sat, 18 Jan 2025 08:25:06 +0800 Subject: [PATCH 133/142] Fix ICE in resolving associated items as non-bindings --- compiler/rustc_resolve/src/late.rs | 6 ++-- ...t.import_trait_associated_functions.stderr | 19 ++++++++++++ ...lve-issue-135614-assoc-const.normal.stderr | 30 +++++++++++++++++++ .../resolve-issue-135614-assoc-const.rs | 30 +++++++++++++++++++ .../resolve-issue-135614.normal.stderr | 13 ++++++++ tests/ui/resolve/resolve-issue-135614.rs | 15 ++++++++++ 6 files changed, 110 insertions(+), 3 deletions(-) create mode 100644 tests/ui/resolve/resolve-issue-135614-assoc-const.import_trait_associated_functions.stderr create mode 100644 tests/ui/resolve/resolve-issue-135614-assoc-const.normal.stderr create mode 100644 tests/ui/resolve/resolve-issue-135614-assoc-const.rs create mode 100644 tests/ui/resolve/resolve-issue-135614.normal.stderr create mode 100644 tests/ui/resolve/resolve-issue-135614.rs diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index c4aeaf478bd10..1a0292ebbde63 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -3960,7 +3960,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { match res { Res::SelfCtor(_) // See #70549. | Res::Def( - DefKind::Ctor(_, CtorKind::Const) | DefKind::Const | DefKind::ConstParam, + DefKind::Ctor(_, CtorKind::Const) | DefKind::Const | DefKind::AssocConst | DefKind::ConstParam, _, ) if is_syntactic_ambiguity => { // Disambiguate in favor of a unit struct/variant or constant pattern. @@ -3969,7 +3969,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { } Some(res) } - Res::Def(DefKind::Ctor(..) | DefKind::Const | DefKind::Static { .. }, _) => { + Res::Def(DefKind::Ctor(..) | DefKind::Const | DefKind::AssocConst | DefKind::Static { .. }, _) => { // This is unambiguously a fresh binding, either syntactically // (e.g., `IDENT @ PAT` or `ref IDENT`) or because `IDENT` resolves // to something unusable as a pattern (e.g., constructor function), @@ -4005,7 +4005,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> { ); None } - Res::Def(DefKind::Fn, _) | Res::Local(..) | Res::Err => { + Res::Def(DefKind::Fn | DefKind::AssocFn, _) | Res::Local(..) | Res::Err => { // These entities are explicitly allowed to be shadowed by fresh bindings. None } diff --git a/tests/ui/resolve/resolve-issue-135614-assoc-const.import_trait_associated_functions.stderr b/tests/ui/resolve/resolve-issue-135614-assoc-const.import_trait_associated_functions.stderr new file mode 100644 index 0000000000000..b41fa1818e25a --- /dev/null +++ b/tests/ui/resolve/resolve-issue-135614-assoc-const.import_trait_associated_functions.stderr @@ -0,0 +1,19 @@ +error[E0005]: refutable pattern in local binding + --> $DIR/resolve-issue-135614-assoc-const.rs:21:9 + | +LL | let DEFAULT: u32 = 0; + | ^^^^^^^ pattern `1_u32..=u32::MAX` not covered +LL | const DEFAULT: u32 = 0; + | ------------------ missing patterns are not covered because `DEFAULT` is interpreted as a constant pattern, not a new variable + | + = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant + = note: for more information, visit https://doc.rust-lang.org/book/ch19-02-refutability.html + = note: the matched value is of type `u32` +help: introduce a variable instead + | +LL | let DEFAULT_var: u32 = 0; + | ~~~~~~~~~~~ + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0005`. diff --git a/tests/ui/resolve/resolve-issue-135614-assoc-const.normal.stderr b/tests/ui/resolve/resolve-issue-135614-assoc-const.normal.stderr new file mode 100644 index 0000000000000..908f5bdd89749 --- /dev/null +++ b/tests/ui/resolve/resolve-issue-135614-assoc-const.normal.stderr @@ -0,0 +1,30 @@ +error[E0658]: `use` associated items of traits is unstable + --> $DIR/resolve-issue-135614-assoc-const.rs:6:5 + | +LL | use MyDefault::DEFAULT; + | ^^^^^^^^^^^^^^^^^^ + | + = note: see issue #134691 for more information + = help: add `#![feature(import_trait_associated_functions)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0005]: refutable pattern in local binding + --> $DIR/resolve-issue-135614-assoc-const.rs:21:9 + | +LL | let DEFAULT: u32 = 0; + | ^^^^^^^ pattern `1_u32..=u32::MAX` not covered +LL | const DEFAULT: u32 = 0; + | ------------------ missing patterns are not covered because `DEFAULT` is interpreted as a constant pattern, not a new variable + | + = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant + = note: for more information, visit https://doc.rust-lang.org/book/ch19-02-refutability.html + = note: the matched value is of type `u32` +help: introduce a variable instead + | +LL | let DEFAULT_var: u32 = 0; + | ~~~~~~~~~~~ + +error: aborting due to 2 previous errors + +Some errors have detailed explanations: E0005, E0658. +For more information about an error, try `rustc --explain E0005`. diff --git a/tests/ui/resolve/resolve-issue-135614-assoc-const.rs b/tests/ui/resolve/resolve-issue-135614-assoc-const.rs new file mode 100644 index 0000000000000..5a592922cd461 --- /dev/null +++ b/tests/ui/resolve/resolve-issue-135614-assoc-const.rs @@ -0,0 +1,30 @@ +//@ revisions: normal import_trait_associated_functions +#![cfg_attr(import_trait_associated_functions, feature(import_trait_associated_functions))] + +// Makes sure that imported constant can be used in pattern bindings. + +use MyDefault::DEFAULT; //[normal]~ ERROR `use` associated items of traits is unstable + +trait MyDefault { + const DEFAULT: Self; +} + +impl MyDefault for u32 { + const DEFAULT: u32 = 0; +} + +impl MyDefault for () { + const DEFAULT: () = (); +} + +fn foo(x: u32) -> u32 { + let DEFAULT: u32 = 0; //~ ERROR refutable pattern in local binding + const DEFAULT: u32 = 0; + if let DEFAULT = x { DEFAULT } else { 1 } +} + +fn bar() { + let DEFAULT = (); +} + +fn main() {} diff --git a/tests/ui/resolve/resolve-issue-135614.normal.stderr b/tests/ui/resolve/resolve-issue-135614.normal.stderr new file mode 100644 index 0000000000000..a9adeb0848e6f --- /dev/null +++ b/tests/ui/resolve/resolve-issue-135614.normal.stderr @@ -0,0 +1,13 @@ +error[E0658]: `use` associated items of traits is unstable + --> $DIR/resolve-issue-135614.rs:7:5 + | +LL | use A::b; + | ^^^^ + | + = note: see issue #134691 for more information + = help: add `#![feature(import_trait_associated_functions)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/resolve/resolve-issue-135614.rs b/tests/ui/resolve/resolve-issue-135614.rs new file mode 100644 index 0000000000000..fec9499365bcb --- /dev/null +++ b/tests/ui/resolve/resolve-issue-135614.rs @@ -0,0 +1,15 @@ +//@ revisions: normal import_trait_associated_functions +//@[import_trait_associated_functions] check-pass +#![cfg_attr(import_trait_associated_functions, feature(import_trait_associated_functions))] + +// Makes sure that imported associated functions are shadowed by the local declarations. + +use A::b; //[normal]~ ERROR `use` associated items of traits is unstable + +trait A { + fn b() {} +} + +fn main() { + let b: (); +} From deb35b000e806f3672d5668d0030b6be0241c4f9 Mon Sep 17 00:00:00 2001 From: Weihang Lo Date: Fri, 17 Jan 2025 22:37:03 -0500 Subject: [PATCH 134/142] Update cargo --- src/tools/cargo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/cargo b/src/tools/cargo index 088d496082726..045bf21b36a2e 160000 --- a/src/tools/cargo +++ b/src/tools/cargo @@ -1 +1 @@ -Subproject commit 088d496082726091024f1689c124a0c3dccbd775 +Subproject commit 045bf21b36a2e1f3ed85e38278d1c3cc4305e134 From a14c35c507e760933cede5208ea12787c72f14d5 Mon Sep 17 00:00:00 2001 From: Zalathar Date: Fri, 17 Jan 2025 14:43:12 +1100 Subject: [PATCH 135/142] coverage: Remove the `Site` enum now that we only instrument nodes --- .../src/coverage/counters.rs | 39 +++++++------------ .../rustc_mir_transform/src/coverage/mod.rs | 12 ++---- 2 files changed, 16 insertions(+), 35 deletions(-) diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index 1a9323329f61f..eeb8f65fdf9a9 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -53,21 +53,12 @@ struct BcbExpression { rhs: BcbCounter, } -/// Enum representing either a node or an edge in the coverage graph. -/// -/// FIXME(#135481): This enum is no longer needed now that we only instrument -/// nodes and not edges. It can be removed in a subsequent PR. -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub(super) enum Site { - Node { bcb: BasicCoverageBlock }, -} - /// Generates and stores coverage counter and coverage expression information -/// associated with nodes/edges in the BCB graph. +/// associated with nodes in the coverage graph. pub(super) struct CoverageCounters { /// List of places where a counter-increment statement should be injected /// into MIR, each with its corresponding counter ID. - counter_increment_sites: IndexVec, + counter_increment_sites: IndexVec, /// Coverage counters/expressions that are associated with individual BCBs. node_counters: IndexVec>, @@ -130,9 +121,9 @@ impl CoverageCounters { } } - /// Creates a new physical counter for a BCB node or edge. - fn make_phys_counter(&mut self, site: Site) -> BcbCounter { - let id = self.counter_increment_sites.push(site); + /// Creates a new physical counter for a BCB node. + fn make_phys_counter(&mut self, bcb: BasicCoverageBlock) -> BcbCounter { + let id = self.counter_increment_sites.push(bcb); BcbCounter::Counter { id } } @@ -177,12 +168,12 @@ impl CoverageCounters { self.node_counters[bcb].map(|counter| counter.as_term()) } - /// Returns an iterator over all the nodes/edges in the coverage graph that + /// Returns an iterator over all the nodes in the coverage graph that /// should have a counter-increment statement injected into MIR, along with /// each site's corresponding counter ID. pub(super) fn counter_increment_sites( &self, - ) -> impl Iterator + Captures<'_> { + ) -> impl Iterator + Captures<'_> { self.counter_increment_sites.iter_enumerated().map(|(id, &site)| (id, site)) } @@ -221,7 +212,7 @@ impl CoverageCounters { struct Transcriber { old: NodeCounters, new: CoverageCounters, - phys_counter_for_site: FxHashMap, + phys_counter_for_node: FxHashMap, } impl Transcriber { @@ -229,7 +220,7 @@ impl Transcriber { Self { old, new: CoverageCounters::with_num_bcbs(num_nodes), - phys_counter_for_site: FxHashMap::default(), + phys_counter_for_node: FxHashMap::default(), } } @@ -238,7 +229,6 @@ impl Transcriber { bcb_needs_counter: &DenseBitSet, ) -> CoverageCounters { for bcb in bcb_needs_counter.iter() { - let site = Site::Node { bcb }; let (mut pos, mut neg): (Vec<_>, Vec<_>) = self.old.counter_expr(bcb).iter().partition_map( |&CounterTerm { node, op }| match op { @@ -251,7 +241,7 @@ impl Transcriber { // If we somehow end up with no positive terms, fall back to // creating a physical counter. There's no known way for this // to happen, but we can avoid an ICE if it does. - debug_assert!(false, "{site:?} has no positive counter terms"); + debug_assert!(false, "{bcb:?} has no positive counter terms"); pos = vec![bcb]; neg = vec![]; } @@ -260,10 +250,7 @@ impl Transcriber { neg.sort(); let mut new_counters_for_sites = |sites: Vec| { - sites - .into_iter() - .map(|node| self.ensure_phys_counter(Site::Node { bcb: node })) - .collect::>() + sites.into_iter().map(|node| self.ensure_phys_counter(node)).collect::>() }; let mut pos = new_counters_for_sites(pos); let mut neg = new_counters_for_sites(neg); @@ -279,7 +266,7 @@ impl Transcriber { self.new } - fn ensure_phys_counter(&mut self, site: Site) -> BcbCounter { - *self.phys_counter_for_site.entry(site).or_insert_with(|| self.new.make_phys_counter(site)) + fn ensure_phys_counter(&mut self, bcb: BasicCoverageBlock) -> BcbCounter { + *self.phys_counter_for_node.entry(bcb).or_insert_with(|| self.new.make_phys_counter(bcb)) } } diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index b1b609595b74a..cca444306edb2 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -21,7 +21,7 @@ use rustc_span::Span; use rustc_span::def_id::LocalDefId; use tracing::{debug, debug_span, trace}; -use crate::coverage::counters::{CoverageCounters, Site}; +use crate::coverage::counters::CoverageCounters; use crate::coverage::graph::CoverageGraph; use crate::coverage::mappings::ExtractedMappings; @@ -239,14 +239,8 @@ fn inject_coverage_statements<'tcx>( coverage_counters: &CoverageCounters, ) { // Inject counter-increment statements into MIR. - for (id, site) in coverage_counters.counter_increment_sites() { - // Determine the block to inject a counter-increment statement into. - // For BCB nodes this is just their first block, but for edges we need - // to create a new block between the two BCBs, and inject into that. - let target_bb = match site { - Site::Node { bcb } => graph[bcb].leader_bb(), - }; - + for (id, bcb) in coverage_counters.counter_increment_sites() { + let target_bb = graph[bcb].leader_bb(); inject_statement(mir_body, CoverageKind::CounterIncrement { id }, target_bb); } From 37a88ba320a8ee041adcdde670719ce76a7808d8 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sat, 18 Jan 2025 05:56:46 +0000 Subject: [PATCH 136/142] Bump Fuchsia integration commit This advances Fuchsia to a checkout from 2025-01-13, which corresponds to a recent Rust roll, and hopefully avoids #135667, where a repository used by the older version of Rust was accidentally archived and broke checking out the prior version. try-job: x86_64-fuchsia --- src/ci/docker/host-x86_64/x86_64-fuchsia/build-fuchsia.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ci/docker/host-x86_64/x86_64-fuchsia/build-fuchsia.sh b/src/ci/docker/host-x86_64/x86_64-fuchsia/build-fuchsia.sh index 7027c93857c3c..d0a138b79033b 100755 --- a/src/ci/docker/host-x86_64/x86_64-fuchsia/build-fuchsia.sh +++ b/src/ci/docker/host-x86_64/x86_64-fuchsia/build-fuchsia.sh @@ -35,7 +35,7 @@ PICK_REFS=() # commit hash of fuchsia.git and some other repos in the "monorepo" checkout, in # addition to versions of prebuilts. It should be bumped regularly by the # Fuchsia team – we aim for every 1-2 months. -INTEGRATION_SHA=bb38af4e3d45e490531b71fc52a460003141d032 +INTEGRATION_SHA=f6f83d3e3852209f7752be55694006afbe979e50 checkout=fuchsia jiri=.jiri_root/bin/jiri From 48399442d44f8eba4d3ab9afa1ccea5129c5ebb7 Mon Sep 17 00:00:00 2001 From: Zalathar Date: Fri, 17 Jan 2025 15:25:24 +1100 Subject: [PATCH 137/142] coverage: Move `phys_counter_for_node` into `CoverageCounters` --- .../src/coverage/counters.rs | 37 +++++++++---------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index eeb8f65fdf9a9..67d3609436538 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -4,7 +4,7 @@ use std::fmt::{self, Debug}; use either::Either; use itertools::Itertools; use rustc_data_structures::captures::Captures; -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_data_structures::graph::DirectedGraph; use rustc_index::IndexVec; use rustc_index::bit_set::DenseBitSet; @@ -58,7 +58,8 @@ struct BcbExpression { pub(super) struct CoverageCounters { /// List of places where a counter-increment statement should be injected /// into MIR, each with its corresponding counter ID. - counter_increment_sites: IndexVec, + phys_counter_for_node: FxIndexMap, + next_counter_id: CounterId, /// Coverage counters/expressions that are associated with individual BCBs. node_counters: IndexVec>, @@ -114,16 +115,21 @@ impl CoverageCounters { fn with_num_bcbs(num_bcbs: usize) -> Self { Self { - counter_increment_sites: IndexVec::new(), + phys_counter_for_node: FxIndexMap::default(), + next_counter_id: CounterId::ZERO, node_counters: IndexVec::from_elem_n(None, num_bcbs), expressions: IndexVec::new(), expressions_memo: FxHashMap::default(), } } - /// Creates a new physical counter for a BCB node. - fn make_phys_counter(&mut self, bcb: BasicCoverageBlock) -> BcbCounter { - let id = self.counter_increment_sites.push(bcb); + /// Returns the physical counter for the given node, creating it if necessary. + fn ensure_phys_counter(&mut self, bcb: BasicCoverageBlock) -> BcbCounter { + let id = *self.phys_counter_for_node.entry(bcb).or_insert_with(|| { + let id = self.next_counter_id; + self.next_counter_id = id + 1; + id + }); BcbCounter::Counter { id } } @@ -152,7 +158,9 @@ impl CoverageCounters { } pub(super) fn num_counters(&self) -> usize { - self.counter_increment_sites.len() + let num_counters = self.phys_counter_for_node.len(); + assert_eq!(num_counters, self.next_counter_id.as_usize()); + num_counters } fn set_node_counter(&mut self, bcb: BasicCoverageBlock, counter: BcbCounter) -> BcbCounter { @@ -174,7 +182,7 @@ impl CoverageCounters { pub(super) fn counter_increment_sites( &self, ) -> impl Iterator + Captures<'_> { - self.counter_increment_sites.iter_enumerated().map(|(id, &site)| (id, site)) + self.phys_counter_for_node.iter().map(|(&site, &id)| (id, site)) } /// Returns an iterator over the subset of BCB nodes that have been associated @@ -212,16 +220,11 @@ impl CoverageCounters { struct Transcriber { old: NodeCounters, new: CoverageCounters, - phys_counter_for_node: FxHashMap, } impl Transcriber { fn new(num_nodes: usize, old: NodeCounters) -> Self { - Self { - old, - new: CoverageCounters::with_num_bcbs(num_nodes), - phys_counter_for_node: FxHashMap::default(), - } + Self { old, new: CoverageCounters::with_num_bcbs(num_nodes) } } fn transcribe_counters( @@ -250,7 +253,7 @@ impl Transcriber { neg.sort(); let mut new_counters_for_sites = |sites: Vec| { - sites.into_iter().map(|node| self.ensure_phys_counter(node)).collect::>() + sites.into_iter().map(|node| self.new.ensure_phys_counter(node)).collect::>() }; let mut pos = new_counters_for_sites(pos); let mut neg = new_counters_for_sites(neg); @@ -265,8 +268,4 @@ impl Transcriber { self.new } - - fn ensure_phys_counter(&mut self, bcb: BasicCoverageBlock) -> BcbCounter { - *self.phys_counter_for_node.entry(bcb).or_insert_with(|| self.new.make_phys_counter(bcb)) - } } From e1dda100570f6b4cb9553fa2542472aeaac72c1c Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sun, 1 Dec 2024 12:59:50 +0100 Subject: [PATCH 138/142] const-eval: detect more pointers as definitely not-null --- .../rustc_const_eval/src/interpret/memory.rs | 25 +++++++++++++------ tests/ui/consts/const-ptr-is-null.rs | 8 +++++- tests/ui/consts/const-ptr-is-null.stderr | 2 +- 3 files changed, 25 insertions(+), 10 deletions(-) diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs index 0790db984e345..2772c94d52b0b 100644 --- a/compiler/rustc_const_eval/src/interpret/memory.rs +++ b/compiler/rustc_const_eval/src/interpret/memory.rs @@ -1481,22 +1481,31 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// Test if this value might be null. /// If the machine does not support ptr-to-int casts, this is conservative. pub fn scalar_may_be_null(&self, scalar: Scalar) -> InterpResult<'tcx, bool> { - interp_ok(match scalar.try_to_scalar_int() { - Ok(int) => int.is_null(), + match scalar.try_to_scalar_int() { + Ok(int) => interp_ok(int.is_null()), Err(_) => { - // Can only happen during CTFE. + // We can't cast this pointer to an integer. Can only happen during CTFE. let ptr = scalar.to_pointer(self)?; match self.ptr_try_get_alloc_id(ptr, 0) { Ok((alloc_id, offset, _)) => { - let size = self.get_alloc_info(alloc_id).size; - // If the pointer is out-of-bounds, it may be null. - // Note that one-past-the-end (offset == size) is still inbounds, and never null. - offset > size + let info = self.get_alloc_info(alloc_id); + // If the pointer is in-bounds (including "at the end"), it is definitely not null. + if offset <= info.size { + return interp_ok(false); + } + // If the allocation is N-aligned, and the offset is not divisible by N, + // then `base + offset` has a non-zero remainder after division by `N`, + // which means `base + offset` cannot be null. + if offset.bytes() % info.align.bytes() != 0 { + return interp_ok(false); + } + // We don't know enough, this might be null. + interp_ok(true) } Err(_offset) => bug!("a non-int scalar is always a pointer"), } } - }) + } } /// Turning a "maybe pointer" into a proper pointer (and some information diff --git a/tests/ui/consts/const-ptr-is-null.rs b/tests/ui/consts/const-ptr-is-null.rs index 92cf87a9782f2..0abd9afa42246 100644 --- a/tests/ui/consts/const-ptr-is-null.rs +++ b/tests/ui/consts/const-ptr-is-null.rs @@ -12,7 +12,13 @@ const MAYBE_NULL: () = { let ptr = &x as *const i32; // This one is still unambiguous... assert!(!ptr.is_null()); - // but once we shift outside the allocation, we might become null. + // and in fact, any offset not visible by 4 (the alignment) cannot be null, + // even if it goes out-of-bounds... + assert!(!ptr.wrapping_byte_add(13).is_null()); + assert!(!ptr.wrapping_byte_add(18).is_null()); + assert!(!ptr.wrapping_byte_sub(1).is_null()); + // ... but once we shift outside the allocation, with an offset divisible by 4, + // we might become null. assert!(!ptr.wrapping_sub(512).is_null()); //~inside `MAYBE_NULL` }; diff --git a/tests/ui/consts/const-ptr-is-null.stderr b/tests/ui/consts/const-ptr-is-null.stderr index f71b37527726f..5ef79790d93bc 100644 --- a/tests/ui/consts/const-ptr-is-null.stderr +++ b/tests/ui/consts/const-ptr-is-null.stderr @@ -8,7 +8,7 @@ note: inside `std::ptr::const_ptr::::is_null::compiletime` note: inside `std::ptr::const_ptr::::is_null` --> $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL note: inside `MAYBE_NULL` - --> $DIR/const-ptr-is-null.rs:16:14 + --> $DIR/const-ptr-is-null.rs:22:14 | LL | assert!(!ptr.wrapping_sub(512).is_null()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From 4170b93cdc809673a8b837c87a79b387277e010b Mon Sep 17 00:00:00 2001 From: Zalathar Date: Fri, 17 Jan 2025 22:41:50 +1100 Subject: [PATCH 139/142] coverage: Flatten top-level counter creation into plain functions - Move `make_bcb_counters` out of `CoverageCounters` - Split out `make_node_counter_priority_list` - Flatten `Transcriber` into the function `transcribe_counters` --- .../src/coverage/counters.rs | 183 +++++++++--------- .../rustc_mir_transform/src/coverage/mod.rs | 3 +- 2 files changed, 91 insertions(+), 95 deletions(-) diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index 67d3609436538..e0973805cbe3e 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -20,6 +20,96 @@ mod iter_nodes; mod node_flow; mod union_find; +/// Ensures that each BCB node needing a counter has one, by creating physical +/// counters or counter expressions for nodes as required. +pub(super) fn make_bcb_counters( + graph: &CoverageGraph, + bcb_needs_counter: &DenseBitSet, +) -> CoverageCounters { + let balanced_graph = BalancedFlowGraph::for_graph(graph, |n| !graph[n].is_out_summable); + let merged_graph = MergedNodeFlowGraph::for_balanced_graph(&balanced_graph); + + let nodes = make_node_counter_priority_list(graph, balanced_graph); + let node_counters = merged_graph.make_node_counters(&nodes); + + transcribe_counters(&node_counters, bcb_needs_counter) +} + +fn make_node_counter_priority_list( + graph: &CoverageGraph, + balanced_graph: BalancedFlowGraph<&CoverageGraph>, +) -> Vec { + // A "reloop" node has exactly one out-edge, which jumps back to the top + // of an enclosing loop. Reloop nodes are typically visited more times + // than loop-exit nodes, so try to avoid giving them physical counters. + let is_reloop_node = IndexVec::from_fn_n( + |node| match graph.successors[node].as_slice() { + &[succ] => graph.dominates(succ, node), + _ => false, + }, + graph.num_nodes(), + ); + + let mut nodes = balanced_graph.iter_nodes().rev().collect::>(); + // The first node is the sink, which must not get a physical counter. + assert_eq!(nodes[0], balanced_graph.sink); + // Sort the real nodes, such that earlier (lesser) nodes take priority + // in being given a counter expression instead of a physical counter. + nodes[1..].sort_by(|&a, &b| { + // Start with a dummy `Equal` to make the actual tests line up nicely. + Ordering::Equal + // Prefer a physical counter for return/yield nodes. + .then_with(|| Ord::cmp(&graph[a].is_out_summable, &graph[b].is_out_summable)) + // Prefer an expression for reloop nodes (see definition above). + .then_with(|| Ord::cmp(&is_reloop_node[a], &is_reloop_node[b]).reverse()) + // Otherwise, prefer a physical counter for dominating nodes. + .then_with(|| graph.cmp_in_dominator_order(a, b).reverse()) + }); + nodes +} + +fn transcribe_counters( + old: &NodeCounters, + bcb_needs_counter: &DenseBitSet, +) -> CoverageCounters { + let mut new = CoverageCounters::with_num_bcbs(bcb_needs_counter.domain_size()); + + for bcb in bcb_needs_counter.iter() { + let (mut pos, mut neg): (Vec<_>, Vec<_>) = + old.counter_expr(bcb).iter().partition_map(|&CounterTerm { node, op }| match op { + Op::Add => Either::Left(node), + Op::Subtract => Either::Right(node), + }); + + if pos.is_empty() { + // If we somehow end up with no positive terms, fall back to + // creating a physical counter. There's no known way for this + // to happen, but we can avoid an ICE if it does. + debug_assert!(false, "{bcb:?} has no positive counter terms"); + pos = vec![bcb]; + neg = vec![]; + } + + pos.sort(); + neg.sort(); + + let mut new_counters_for_sites = |sites: Vec| { + sites.into_iter().map(|node| new.ensure_phys_counter(node)).collect::>() + }; + let mut pos = new_counters_for_sites(pos); + let mut neg = new_counters_for_sites(neg); + + pos.sort(); + neg.sort(); + + let pos_counter = new.make_sum(&pos).expect("`pos` should not be empty"); + let new_counter = new.make_subtracted_sum(pos_counter, &neg); + new.set_node_counter(bcb, new_counter); + } + + new +} + /// The coverage counter or counter expression associated with a particular /// BCB node or BCB edge. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -73,46 +163,6 @@ pub(super) struct CoverageCounters { } impl CoverageCounters { - /// Ensures that each BCB node needing a counter has one, by creating physical - /// counters or counter expressions for nodes and edges as required. - pub(super) fn make_bcb_counters( - graph: &CoverageGraph, - bcb_needs_counter: &DenseBitSet, - ) -> Self { - let balanced_graph = BalancedFlowGraph::for_graph(graph, |n| !graph[n].is_out_summable); - let merged_graph = MergedNodeFlowGraph::for_balanced_graph(&balanced_graph); - - // A "reloop" node has exactly one out-edge, which jumps back to the top - // of an enclosing loop. Reloop nodes are typically visited more times - // than loop-exit nodes, so try to avoid giving them physical counters. - let is_reloop_node = IndexVec::from_fn_n( - |node| match graph.successors[node].as_slice() { - &[succ] => graph.dominates(succ, node), - _ => false, - }, - graph.num_nodes(), - ); - - let mut nodes = balanced_graph.iter_nodes().rev().collect::>(); - // The first node is the sink, which must not get a physical counter. - assert_eq!(nodes[0], balanced_graph.sink); - // Sort the real nodes, such that earlier (lesser) nodes take priority - // in being given a counter expression instead of a physical counter. - nodes[1..].sort_by(|&a, &b| { - // Start with a dummy `Equal` to make the actual tests line up nicely. - Ordering::Equal - // Prefer a physical counter for return/yield nodes. - .then_with(|| Ord::cmp(&graph[a].is_out_summable, &graph[b].is_out_summable)) - // Prefer an expression for reloop nodes (see definition above). - .then_with(|| Ord::cmp(&is_reloop_node[a], &is_reloop_node[b]).reverse()) - // Otherwise, prefer a physical counter for dominating nodes. - .then_with(|| graph.cmp_in_dominator_order(a, b).reverse()) - }); - let node_counters = merged_graph.make_node_counters(&nodes); - - Transcriber::new(graph.num_nodes(), node_counters).transcribe_counters(bcb_needs_counter) - } - fn with_num_bcbs(num_bcbs: usize) -> Self { Self { phys_counter_for_node: FxIndexMap::default(), @@ -216,56 +266,3 @@ impl CoverageCounters { expressions } } - -struct Transcriber { - old: NodeCounters, - new: CoverageCounters, -} - -impl Transcriber { - fn new(num_nodes: usize, old: NodeCounters) -> Self { - Self { old, new: CoverageCounters::with_num_bcbs(num_nodes) } - } - - fn transcribe_counters( - mut self, - bcb_needs_counter: &DenseBitSet, - ) -> CoverageCounters { - for bcb in bcb_needs_counter.iter() { - let (mut pos, mut neg): (Vec<_>, Vec<_>) = - self.old.counter_expr(bcb).iter().partition_map( - |&CounterTerm { node, op }| match op { - Op::Add => Either::Left(node), - Op::Subtract => Either::Right(node), - }, - ); - - if pos.is_empty() { - // If we somehow end up with no positive terms, fall back to - // creating a physical counter. There's no known way for this - // to happen, but we can avoid an ICE if it does. - debug_assert!(false, "{bcb:?} has no positive counter terms"); - pos = vec![bcb]; - neg = vec![]; - } - - pos.sort(); - neg.sort(); - - let mut new_counters_for_sites = |sites: Vec| { - sites.into_iter().map(|node| self.new.ensure_phys_counter(node)).collect::>() - }; - let mut pos = new_counters_for_sites(pos); - let mut neg = new_counters_for_sites(neg); - - pos.sort(); - neg.sort(); - - let pos_counter = self.new.make_sum(&pos).expect("`pos` should not be empty"); - let new_counter = self.new.make_subtracted_sum(pos_counter, &neg); - self.new.set_node_counter(bcb, new_counter); - } - - self.new - } -} diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index cca444306edb2..19568735df76e 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -89,8 +89,7 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir: return; } - let coverage_counters = - CoverageCounters::make_bcb_counters(&graph, &bcbs_with_counter_mappings); + let coverage_counters = counters::make_bcb_counters(&graph, &bcbs_with_counter_mappings); let mappings = create_mappings(&extracted_mappings, &coverage_counters); if mappings.is_empty() { From 6000d5c462b767e3c5871034824964d0b0897a26 Mon Sep 17 00:00:00 2001 From: Zalathar Date: Fri, 17 Jan 2025 21:34:57 +1100 Subject: [PATCH 140/142] coverage: Remove `BcbCounter` and `BcbExpression` Making these separate types from `CovTerm` and `Expression` was historically very helpful, but now that most of the counter-creation work is handled by `node_flow` they are no longer needed. --- compiler/rustc_middle/src/mir/coverage.rs | 8 +- .../src/coverage/counters.rs | 80 ++++--------------- 2 files changed, 18 insertions(+), 70 deletions(-) diff --git a/compiler/rustc_middle/src/mir/coverage.rs b/compiler/rustc_middle/src/mir/coverage.rs index cd6b2d65bf184..65f51ae9d39c6 100644 --- a/compiler/rustc_middle/src/mir/coverage.rs +++ b/compiler/rustc_middle/src/mir/coverage.rs @@ -71,11 +71,7 @@ impl ConditionId { /// Enum that can hold a constant zero value, the ID of an physical coverage /// counter, or the ID of a coverage-counter expression. -/// -/// This was originally only used for expression operands (and named `Operand`), -/// but the zero/counter/expression distinction is also useful for representing -/// the value of code/gap mappings, and the true/false arms of branch mappings. -#[derive(Copy, Clone, PartialEq, Eq)] +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)] pub enum CovTerm { Zero, @@ -171,7 +167,7 @@ impl Op { } } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq, Eq)] #[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)] pub struct Expression { pub lhs: CovTerm, diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index e0973805cbe3e..7a5cea2b68ca8 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -1,5 +1,4 @@ use std::cmp::Ordering; -use std::fmt::{self, Debug}; use either::Either; use itertools::Itertools; @@ -110,39 +109,6 @@ fn transcribe_counters( new } -/// The coverage counter or counter expression associated with a particular -/// BCB node or BCB edge. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -enum BcbCounter { - Counter { id: CounterId }, - Expression { id: ExpressionId }, -} - -impl BcbCounter { - fn as_term(&self) -> CovTerm { - match *self { - BcbCounter::Counter { id, .. } => CovTerm::Counter(id), - BcbCounter::Expression { id, .. } => CovTerm::Expression(id), - } - } -} - -impl Debug for BcbCounter { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Counter { id, .. } => write!(fmt, "Counter({:?})", id.index()), - Self::Expression { id } => write!(fmt, "Expression({:?})", id.index()), - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -struct BcbExpression { - lhs: BcbCounter, - op: Op, - rhs: BcbCounter, -} - /// Generates and stores coverage counter and coverage expression information /// associated with nodes in the coverage graph. pub(super) struct CoverageCounters { @@ -152,14 +118,14 @@ pub(super) struct CoverageCounters { next_counter_id: CounterId, /// Coverage counters/expressions that are associated with individual BCBs. - node_counters: IndexVec>, + node_counters: IndexVec>, /// Table of expression data, associating each expression ID with its /// corresponding operator (+ or -) and its LHS/RHS operands. - expressions: IndexVec, + expressions: IndexVec, /// Remember expressions that have already been created (or simplified), /// so that we don't create unnecessary duplicates. - expressions_memo: FxHashMap, + expressions_memo: FxHashMap, } impl CoverageCounters { @@ -174,27 +140,27 @@ impl CoverageCounters { } /// Returns the physical counter for the given node, creating it if necessary. - fn ensure_phys_counter(&mut self, bcb: BasicCoverageBlock) -> BcbCounter { + fn ensure_phys_counter(&mut self, bcb: BasicCoverageBlock) -> CovTerm { let id = *self.phys_counter_for_node.entry(bcb).or_insert_with(|| { let id = self.next_counter_id; self.next_counter_id = id + 1; id }); - BcbCounter::Counter { id } + CovTerm::Counter(id) } - fn make_expression(&mut self, lhs: BcbCounter, op: Op, rhs: BcbCounter) -> BcbCounter { - let new_expr = BcbExpression { lhs, op, rhs }; - *self.expressions_memo.entry(new_expr).or_insert_with(|| { + fn make_expression(&mut self, lhs: CovTerm, op: Op, rhs: CovTerm) -> CovTerm { + let new_expr = Expression { lhs, op, rhs }; + *self.expressions_memo.entry(new_expr.clone()).or_insert_with(|| { let id = self.expressions.push(new_expr); - BcbCounter::Expression { id } + CovTerm::Expression(id) }) } /// Creates a counter that is the sum of the given counters. /// /// Returns `None` if the given list of counters was empty. - fn make_sum(&mut self, counters: &[BcbCounter]) -> Option { + fn make_sum(&mut self, counters: &[CovTerm]) -> Option { counters .iter() .copied() @@ -202,7 +168,7 @@ impl CoverageCounters { } /// Creates a counter whose value is `lhs - SUM(rhs)`. - fn make_subtracted_sum(&mut self, lhs: BcbCounter, rhs: &[BcbCounter]) -> BcbCounter { + fn make_subtracted_sum(&mut self, lhs: CovTerm, rhs: &[CovTerm]) -> CovTerm { let Some(rhs_sum) = self.make_sum(rhs) else { return lhs }; self.make_expression(lhs, Op::Subtract, rhs_sum) } @@ -213,7 +179,7 @@ impl CoverageCounters { num_counters } - fn set_node_counter(&mut self, bcb: BasicCoverageBlock, counter: BcbCounter) -> BcbCounter { + fn set_node_counter(&mut self, bcb: BasicCoverageBlock, counter: CovTerm) -> CovTerm { let existing = self.node_counters[bcb].replace(counter); assert!( existing.is_none(), @@ -223,7 +189,7 @@ impl CoverageCounters { } pub(super) fn term_for_bcb(&self, bcb: BasicCoverageBlock) -> Option { - self.node_counters[bcb].map(|counter| counter.as_term()) + self.node_counters[bcb] } /// Returns an iterator over all the nodes in the coverage graph that @@ -242,27 +208,13 @@ impl CoverageCounters { ) -> impl Iterator + Captures<'_> { self.node_counters.iter_enumerated().filter_map(|(bcb, &counter)| match counter { // Yield the BCB along with its associated expression ID. - Some(BcbCounter::Expression { id }) => Some((bcb, id)), + Some(CovTerm::Expression(id)) => Some((bcb, id)), // This BCB is associated with a counter or nothing, so skip it. - Some(BcbCounter::Counter { .. }) | None => None, + Some(CovTerm::Counter { .. } | CovTerm::Zero) | None => None, }) } pub(super) fn into_expressions(self) -> IndexVec { - let old_len = self.expressions.len(); - let expressions = self - .expressions - .into_iter() - .map(|BcbExpression { lhs, op, rhs }| Expression { - lhs: lhs.as_term(), - op, - rhs: rhs.as_term(), - }) - .collect::>(); - - // Expression IDs are indexes into this vector, so make sure we didn't - // accidentally invalidate them by changing its length. - assert_eq!(old_len, expressions.len()); - expressions + self.expressions } } From ea0c86c434ae4cc306c0bf43f933bd7a018232a1 Mon Sep 17 00:00:00 2001 From: Zalathar Date: Sat, 18 Jan 2025 22:12:30 +1100 Subject: [PATCH 141/142] coverage: Add a few more comments to counter creation --- .../rustc_mir_transform/src/coverage/counters.rs | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index 7a5cea2b68ca8..8d397f63cc7e0 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -25,15 +25,21 @@ pub(super) fn make_bcb_counters( graph: &CoverageGraph, bcb_needs_counter: &DenseBitSet, ) -> CoverageCounters { + // Create the derived graphs that are necessary for subsequent steps. let balanced_graph = BalancedFlowGraph::for_graph(graph, |n| !graph[n].is_out_summable); let merged_graph = MergedNodeFlowGraph::for_balanced_graph(&balanced_graph); + // Use those graphs to determine which nodes get physical counters, and how + // to compute the execution counts of other nodes from those counters. let nodes = make_node_counter_priority_list(graph, balanced_graph); let node_counters = merged_graph.make_node_counters(&nodes); + // Convert the counters into a form suitable for embedding into MIR. transcribe_counters(&node_counters, bcb_needs_counter) } +/// Arranges the nodes in `balanced_graph` into a list, such that earlier nodes +/// take priority in being given a counter expression instead of a physical counter. fn make_node_counter_priority_list( graph: &CoverageGraph, balanced_graph: BalancedFlowGraph<&CoverageGraph>, @@ -67,6 +73,7 @@ fn make_node_counter_priority_list( nodes } +// Converts node counters into a form suitable for embedding into MIR. fn transcribe_counters( old: &NodeCounters, bcb_needs_counter: &DenseBitSet, @@ -74,6 +81,9 @@ fn transcribe_counters( let mut new = CoverageCounters::with_num_bcbs(bcb_needs_counter.domain_size()); for bcb in bcb_needs_counter.iter() { + // Our counter-creation algorithm doesn't guarantee that a counter + // expression starts or ends with a positive term, so partition the + // counters into "positive" and "negative" lists for easier handling. let (mut pos, mut neg): (Vec<_>, Vec<_>) = old.counter_expr(bcb).iter().partition_map(|&CounterTerm { node, op }| match op { Op::Add => Either::Left(node), @@ -89,6 +99,10 @@ fn transcribe_counters( neg = vec![]; } + // These intermediate sorts are not strictly necessary, but were helpful + // in reducing churn when switching to the current counter-creation scheme. + // They also help to slightly decrease the overall size of the expression + // table, due to more subexpressions being shared. pos.sort(); neg.sort(); @@ -98,6 +112,7 @@ fn transcribe_counters( let mut pos = new_counters_for_sites(pos); let mut neg = new_counters_for_sites(neg); + // These sorts are also not strictly necessary; see above. pos.sort(); neg.sort(); From 2a180a93a12e5353e77d2b8f03fb95f99fed2dc7 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Sat, 18 Jan 2025 18:47:11 +0000 Subject: [PATCH 142/142] Get rid of `ToPolyTraitRef` --- compiler/rustc_infer/src/traits/util.rs | 6 +- compiler/rustc_middle/src/ty/mod.rs | 3 +- compiler/rustc_middle/src/ty/predicate.rs | 10 -- .../src/error_reporting/traits/ambiguity.rs | 24 +-- .../traits/fulfillment_errors.rs | 143 +++++++++--------- .../traits/on_unimplemented.rs | 14 +- .../src/error_reporting/traits/suggestions.rs | 29 ++-- .../src/traits/select/candidate_assembly.rs | 5 +- .../src/traits/select/confirmation.rs | 7 +- 9 files changed, 116 insertions(+), 125 deletions(-) diff --git a/compiler/rustc_infer/src/traits/util.rs b/compiler/rustc_infer/src/traits/util.rs index ab8ada1596c81..66ed49fe32676 100644 --- a/compiler/rustc_infer/src/traits/util.rs +++ b/compiler/rustc_infer/src/traits/util.rs @@ -1,5 +1,5 @@ use rustc_data_structures::fx::FxHashSet; -use rustc_middle::ty::{self, ToPolyTraitRef, TyCtxt}; +use rustc_middle::ty::{self, TyCtxt}; use rustc_span::{Ident, Span}; pub use rustc_type_ir::elaborate::*; @@ -125,8 +125,8 @@ pub fn transitive_bounds_that_define_assoc_item<'tcx>( .iter_identity_copied() .map(|(clause, _)| clause.instantiate_supertrait(tcx, trait_ref)) .filter_map(|clause| clause.as_trait_clause()) - // FIXME: Negative supertraits are elaborated here lol - .map(|trait_pred| trait_pred.to_poly_trait_ref()), + .filter(|clause| clause.polarity() == ty::PredicatePolarity::Positive) + .map(|clause| clause.map_bound(|clause| clause.trait_ref)), ); return Some(trait_ref); diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index d2875fb37944a..ca70ae794c530 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -79,8 +79,7 @@ pub use self::predicate::{ PolyExistentialPredicate, PolyExistentialProjection, PolyExistentialTraitRef, PolyProjectionPredicate, PolyRegionOutlivesPredicate, PolySubtypePredicate, PolyTraitPredicate, PolyTraitRef, PolyTypeOutlivesPredicate, Predicate, PredicateKind, ProjectionPredicate, - RegionOutlivesPredicate, SubtypePredicate, ToPolyTraitRef, TraitPredicate, TraitRef, - TypeOutlivesPredicate, + RegionOutlivesPredicate, SubtypePredicate, TraitPredicate, TraitRef, TypeOutlivesPredicate, }; pub use self::region::{ BoundRegion, BoundRegionKind, EarlyParamRegion, LateParamRegion, LateParamRegionKind, Region, diff --git a/compiler/rustc_middle/src/ty/predicate.rs b/compiler/rustc_middle/src/ty/predicate.rs index 32d6455e82557..584cac22ae8dd 100644 --- a/compiler/rustc_middle/src/ty/predicate.rs +++ b/compiler/rustc_middle/src/ty/predicate.rs @@ -476,16 +476,6 @@ impl<'tcx> Clause<'tcx> { } } -pub trait ToPolyTraitRef<'tcx> { - fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx>; -} - -impl<'tcx> ToPolyTraitRef<'tcx> for PolyTraitPredicate<'tcx> { - fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx> { - self.map_bound_ref(|trait_pred| trait_pred.trait_ref) - } -} - impl<'tcx> UpcastFrom, PredicateKind<'tcx>> for Predicate<'tcx> { fn upcast_from(from: PredicateKind<'tcx>, tcx: TyCtxt<'tcx>) -> Self { ty::Binder::dummy(from).upcast(tcx) diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/ambiguity.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/ambiguity.rs index fc0de13aeab09..b4d294a70c071 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/ambiguity.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/ambiguity.rs @@ -172,14 +172,14 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let bound_predicate = predicate.kind(); let mut err = match bound_predicate.skip_binder() { ty::PredicateKind::Clause(ty::ClauseKind::Trait(data)) => { - let trait_ref = bound_predicate.rebind(data.trait_ref); - debug!(?trait_ref); + let trait_pred = bound_predicate.rebind(data); + debug!(?trait_pred); if let Err(e) = predicate.error_reported() { return e; } - if let Err(guar) = self.tcx.ensure().coherent_trait(trait_ref.def_id()) { + if let Err(guar) = self.tcx.ensure().coherent_trait(trait_pred.def_id()) { // Avoid bogus "type annotations needed `Foo: Bar`" errors on `impl Bar for Foo` in case // other `Foo` impls are incoherent. return guar; @@ -200,13 +200,13 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // avoid inundating the user with unnecessary errors, but we now // check upstream for type errors and don't add the obligations to // begin with in those cases. - if self.tcx.is_lang_item(trait_ref.def_id(), LangItem::Sized) { + if self.tcx.is_lang_item(trait_pred.def_id(), LangItem::Sized) { match self.tainted_by_errors() { None => { let err = self.emit_inference_failure_err( obligation.cause.body_id, span, - trait_ref.self_ty().skip_binder().into(), + trait_pred.self_ty().skip_binder().into(), TypeAnnotationNeeded::E0282, false, ); @@ -251,10 +251,14 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let mut ambiguities = compute_applicable_impls_for_diagnostics( self.infcx, - &obligation.with(self.tcx, trait_ref), + &obligation.with(self.tcx, trait_pred), ); - let has_non_region_infer = - trait_ref.skip_binder().args.types().any(|t| !t.is_ty_or_numeric_infer()); + let has_non_region_infer = trait_pred + .skip_binder() + .trait_ref + .args + .types() + .any(|t| !t.is_ty_or_numeric_infer()); // It doesn't make sense to talk about applicable impls if there are more than a // handful of them. If there are a lot of them, but only a few of them have no type // params, we only show those, as they are more likely to be useful/intended. @@ -294,7 +298,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if impl_candidates.len() < 40 { self.report_similar_impl_candidates( impl_candidates.as_slice(), - trait_ref, + trait_pred, obligation.cause.body_id, &mut err, false, @@ -306,7 +310,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if let ObligationCauseCode::WhereClause(def_id, _) | ObligationCauseCode::WhereClauseInExpr(def_id, ..) = *obligation.cause.code() { - self.suggest_fully_qualified_path(&mut err, def_id, span, trait_ref.def_id()); + self.suggest_fully_qualified_path(&mut err, def_id, span, trait_pred.def_id()); } if let Some(ty::GenericArgKind::Type(_)) = arg.map(|arg| arg.unpack()) diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs index 6076c999086bf..c40ba33084569 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs @@ -23,9 +23,7 @@ use rustc_middle::ty::print::{ FmtPrinter, Print, PrintPolyTraitPredicateExt, PrintTraitPredicateExt as _, PrintTraitRefExt as _, with_forced_trimmed_paths, }; -use rustc_middle::ty::{ - self, ToPolyTraitRef, TraitRef, Ty, TyCtxt, TypeFoldable, TypeVisitableExt, Upcast, -}; +use rustc_middle::ty::{self, TraitRef, Ty, TyCtxt, TypeFoldable, TypeVisitableExt, Upcast}; use rustc_middle::{bug, span_bug}; use rustc_span::{BytePos, DUMMY_SP, STDLIB_STABLE_CRATES, Span, Symbol, sym}; use tracing::{debug, instrument}; @@ -155,12 +153,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { (leaf_trait_predicate, &obligation) }; - let main_trait_ref = main_trait_predicate.to_poly_trait_ref(); - let leaf_trait_ref = leaf_trait_predicate.to_poly_trait_ref(); - if let Some(guar) = self.emit_specialized_closure_kind_error( &obligation, - leaf_trait_ref, + leaf_trait_predicate, ) { return guar; } @@ -202,14 +197,14 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } = self.on_unimplemented_note(main_trait_predicate, main_obligation, &mut long_ty_file); let have_alt_message = message.is_some() || label.is_some(); - let is_try_conversion = self.is_try_conversion(span, main_trait_ref.def_id()); + let is_try_conversion = self.is_try_conversion(span, main_trait_predicate.def_id()); let is_unsize = - self.tcx.is_lang_item(leaf_trait_ref.def_id(), LangItem::Unsize); + self.tcx.is_lang_item(leaf_trait_predicate.def_id(), LangItem::Unsize); let (message, notes, append_const_msg) = if is_try_conversion { ( Some(format!( "`?` couldn't convert the error to `{}`", - main_trait_ref.skip_binder().self_ty(), + main_trait_predicate.skip_binder().self_ty(), )), vec![ "the question mark operation (`?`) implicitly performs a \ @@ -230,12 +225,12 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { post_message, ); - let (err_msg, safe_transmute_explanation) = if self.tcx.is_lang_item(main_trait_ref.def_id(), LangItem::TransmuteTrait) + let (err_msg, safe_transmute_explanation) = if self.tcx.is_lang_item(main_trait_predicate.def_id(), LangItem::TransmuteTrait) { // Recompute the safe transmute reason and use that for the error reporting match self.get_safe_transmute_error_and_reason( obligation.clone(), - main_trait_ref, + main_trait_predicate, span, ) { GetSafeTransmuteErrorAndReason::Silent => { @@ -266,7 +261,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } let mut suggested = false; if is_try_conversion { - suggested = self.try_conversion_context(&obligation, main_trait_ref.skip_binder(), &mut err); + suggested = self.try_conversion_context(&obligation, main_trait_predicate, &mut err); } if is_try_conversion && let Some(ret_span) = self.return_type_span(&obligation) { @@ -274,12 +269,12 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ret_span, format!( "expected `{}` because of this", - main_trait_ref.skip_binder().self_ty() + main_trait_predicate.skip_binder().self_ty() ), ); } - if tcx.is_lang_item(leaf_trait_ref.def_id(), LangItem::Tuple) { + if tcx.is_lang_item(leaf_trait_predicate.def_id(), LangItem::Tuple) { self.add_tuple_trait_message( obligation.cause.code().peel_derives(), &mut err, @@ -319,7 +314,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // If it has a custom `#[rustc_on_unimplemented]` // error message, let's display it as the label! err.span_label(span, s); - if !matches!(leaf_trait_ref.skip_binder().self_ty().kind(), ty::Param(_)) { + if !matches!(leaf_trait_predicate.skip_binder().self_ty().kind(), ty::Param(_)) { // When the self type is a type param We don't need to "the trait // `std::marker::Sized` is not implemented for `T`" as we will point // at the type param with a label to suggest constraining it. @@ -339,7 +334,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if let ObligationCauseCode::Coercion { source, target } = *obligation.cause.code().peel_derives() { - if self.tcx.is_lang_item(leaf_trait_ref.def_id(), LangItem::Sized) { + if self.tcx.is_lang_item(leaf_trait_predicate.def_id(), LangItem::Sized) { self.suggest_borrowing_for_object_cast( &mut err, root_obligation, @@ -368,7 +363,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { err.span_label(tcx.def_span(body), s); } - self.suggest_floating_point_literal(&obligation, &mut err, leaf_trait_ref); + self.suggest_floating_point_literal(&obligation, &mut err, leaf_trait_predicate); self.suggest_dereferencing_index(&obligation, &mut err, leaf_trait_predicate); suggested |= self.suggest_dereferences(&obligation, &mut err, leaf_trait_predicate); suggested |= self.suggest_fn_call(&obligation, &mut err, leaf_trait_predicate); @@ -376,7 +371,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { suggested = if let &[cand] = &impl_candidates[..] { let cand = cand.trait_ref; if let (ty::FnPtr(..), ty::FnDef(..)) = - (cand.self_ty().kind(), main_trait_ref.self_ty().skip_binder().kind()) + (cand.self_ty().kind(), main_trait_predicate.self_ty().skip_binder().kind()) { // Wrap method receivers and `&`-references in parens let suggestion = if self.tcx.sess.source_map().span_look_ahead(span, ".", Some(50)).is_some() { @@ -423,11 +418,11 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { span, leaf_trait_predicate, ); - self.note_version_mismatch(&mut err, leaf_trait_ref); + self.note_version_mismatch(&mut err, leaf_trait_predicate); self.suggest_remove_await(&obligation, &mut err); self.suggest_derive(&obligation, &mut err, leaf_trait_predicate); - if tcx.is_lang_item(leaf_trait_ref.def_id(), LangItem::Try) { + if tcx.is_lang_item(leaf_trait_predicate.def_id(), LangItem::Try) { self.suggest_await_before_try( &mut err, &obligation, @@ -455,9 +450,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ); } - let is_fn_trait = tcx.is_fn_trait(leaf_trait_ref.def_id()); + let is_fn_trait = tcx.is_fn_trait(leaf_trait_predicate.def_id()); let is_target_feature_fn = if let ty::FnDef(def_id, _) = - *leaf_trait_ref.skip_binder().self_ty().kind() + *leaf_trait_predicate.skip_binder().self_ty().kind() { !self.tcx.codegen_fn_attrs(def_id).target_features.is_empty() } else { @@ -509,7 +504,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } self.explain_hrtb_projection(&mut err, leaf_trait_predicate, obligation.param_env, &obligation.cause); - self.suggest_desugaring_async_fn_in_trait(&mut err, main_trait_ref); + self.suggest_desugaring_async_fn_in_trait(&mut err, main_trait_predicate); // Return early if the trait is Debug or Display and the invocation // originates within a standard library macro, because the output @@ -527,7 +522,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if in_std_macro && matches!( - self.tcx.get_diagnostic_name(leaf_trait_ref.def_id()), + self.tcx.get_diagnostic_name(leaf_trait_predicate.def_id()), Some(sym::Debug | sym::Display) ) { @@ -785,21 +780,22 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { fn emit_specialized_closure_kind_error( &self, obligation: &PredicateObligation<'tcx>, - mut trait_ref: ty::PolyTraitRef<'tcx>, + mut trait_pred: ty::PolyTraitPredicate<'tcx>, ) -> Option { // If `AsyncFnKindHelper` is not implemented, that means that the closure kind // doesn't extend the goal kind. This is worth reporting, but we can only do so // if we actually know which closure this goal comes from, so look at the cause // to see if we can extract that information. - if self.tcx.is_lang_item(trait_ref.def_id(), LangItem::AsyncFnKindHelper) - && let Some(found_kind) = trait_ref.skip_binder().args.type_at(0).to_opt_closure_kind() + if self.tcx.is_lang_item(trait_pred.def_id(), LangItem::AsyncFnKindHelper) + && let Some(found_kind) = + trait_pred.skip_binder().trait_ref.args.type_at(0).to_opt_closure_kind() && let Some(expected_kind) = - trait_ref.skip_binder().args.type_at(1).to_opt_closure_kind() + trait_pred.skip_binder().trait_ref.args.type_at(1).to_opt_closure_kind() && !found_kind.extends(expected_kind) { if let Some((_, Some(parent))) = obligation.cause.code().parent_with_predicate() { // If we have a derived obligation, then the parent will be a `AsyncFn*` goal. - trait_ref = parent.to_poly_trait_ref(); + trait_pred = parent; } else if let &ObligationCauseCode::FunctionArg { arg_hir_id, .. } = obligation.cause.code() && let Some(typeck_results) = &self.typeck_results @@ -820,9 +816,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } } - let self_ty = trait_ref.self_ty().skip_binder(); + let self_ty = trait_pred.self_ty().skip_binder(); - if let Some(expected_kind) = self.tcx.fn_trait_kind_from_def_id(trait_ref.def_id()) { + if let Some(expected_kind) = self.tcx.fn_trait_kind_from_def_id(trait_pred.def_id()) { let (closure_def_id, found_args, by_ref_captures) = match *self_ty.kind() { ty::Closure(def_id, args) => { (def_id, args.as_closure().sig().map_bound(|sig| sig.inputs()[0]), None) @@ -837,7 +833,8 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { _ => return None, }; - let expected_args = trait_ref.map_bound(|trait_ref| trait_ref.args.type_at(1)); + let expected_args = + trait_pred.map_bound(|trait_pred| trait_pred.trait_ref.args.type_at(1)); // Verify that the arguments are compatible. If the signature is // mismatched, then we have a totally different error to report. @@ -909,7 +906,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { fn try_conversion_context( &self, obligation: &PredicateObligation<'tcx>, - trait_ref: ty::TraitRef<'tcx>, + trait_pred: ty::PolyTraitPredicate<'tcx>, err: &mut Diag<'_>, ) -> bool { let span = obligation.cause.span; @@ -953,8 +950,8 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if !self.tcx.is_diagnostic_item(sym::FromResidual, y.def_id()) { return false; } - let self_ty = trait_ref.self_ty(); - let found_ty = trait_ref.args.get(1).and_then(|a| a.as_type()); + let self_ty = trait_pred.skip_binder().self_ty(); + let found_ty = trait_pred.skip_binder().trait_ref.args.get(1).and_then(|a| a.as_type()); let mut prev_ty = self.resolve_vars_if_possible( typeck.expr_ty_adjusted_opt(expr).unwrap_or(Ty::new_misc_error(self.tcx)), @@ -1223,18 +1220,18 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { goal: ty::TraitPredicate<'tcx>, assumption: ty::PolyTraitPredicate<'tcx>, ) -> bool { + // Fast path if goal.polarity != assumption.polarity() { return false; } - let trait_goal = goal.trait_ref; let trait_assumption = self.instantiate_binder_with_fresh_vars( DUMMY_SP, infer::BoundRegionConversionTime::HigherRankedType, - assumption.to_poly_trait_ref(), + assumption, ); - self.can_eq(ty::ParamEnv::empty(), trait_goal, trait_assumption) + self.can_eq(ty::ParamEnv::empty(), goal.trait_ref, trait_assumption.trait_ref) } fn can_match_projection( @@ -1682,7 +1679,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { pub(super) fn report_similar_impl_candidates( &self, impl_candidates: &[ImplCandidate<'tcx>], - trait_ref: ty::PolyTraitRef<'tcx>, + trait_pred: ty::PolyTraitPredicate<'tcx>, body_def_id: LocalDefId, err: &mut Diag<'_>, other: bool, @@ -1727,7 +1724,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // We'll check for the case where the reason for the mismatch is that the trait comes from // one crate version and the type comes from another crate version, even though they both // are from the same crate. - let trait_def_id = trait_ref.def_id(); + let trait_def_id = trait_pred.def_id(); let trait_name = self.tcx.item_name(trait_def_id); let crate_name = self.tcx.crate_name(trait_def_id.krate); if let Some(other_trait_def_id) = self.tcx.all_traits().find(|def_id| { @@ -1739,7 +1736,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // different crate `DefId`. We highlight the traits. let found_type = - if let ty::Adt(def, _) = trait_ref.self_ty().skip_binder().peel_refs().kind() { + if let ty::Adt(def, _) = trait_pred.self_ty().skip_binder().peel_refs().kind() { Some(def.did()) } else { None @@ -1836,7 +1833,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if self.probe(|_| { let ocx = ObligationCtxt::new(self); - self.enter_forall(trait_ref, |obligation_trait_ref| { + self.enter_forall(trait_pred, |obligation_trait_ref| { let impl_args = self.fresh_args_for_item(DUMMY_SP, single.impl_def_id); let impl_trait_ref = ocx.normalize( &ObligationCause::dummy(), @@ -1864,7 +1861,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let mut terrs = vec![]; for (obligation_arg, impl_arg) in - std::iter::zip(obligation_trait_ref.args, impl_trait_ref.args) + std::iter::zip(obligation_trait_ref.trait_ref.args, impl_trait_ref.args) { if (obligation_arg, impl_arg).references_error() { return false; @@ -1906,8 +1903,8 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } let traits = self.cmp_traits( - obligation_trait_ref.def_id, - &obligation_trait_ref.args[1..], + obligation_trait_ref.def_id(), + &obligation_trait_ref.trait_ref.args[1..], impl_trait_ref.def_id, &impl_trait_ref.args[1..], ); @@ -1991,7 +1988,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } if let &[cand] = &candidates[..] { let (desc, mention_castable) = - match (cand.self_ty().kind(), trait_ref.self_ty().skip_binder().kind()) { + match (cand.self_ty().kind(), trait_pred.self_ty().skip_binder().kind()) { (ty::FnPtr(..), ty::FnDef(..)) => { (" implemented for fn pointer `", ", cast using `as`") } @@ -2055,7 +2052,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { .filter(|cand| !self.tcx.do_not_recommend_impl(cand.impl_def_id)) .collect::>(); - let def_id = trait_ref.def_id(); + let def_id = trait_pred.def_id(); if impl_candidates.is_empty() { if self.tcx.trait_is_auto(def_id) || self.tcx.lang_items().iter().any(|(_, id)| id == def_id) @@ -2132,11 +2129,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { && !self.tcx.trait_is_auto(def_id) && !self.tcx.lang_items().iter().any(|(_, id)| id == def_id) { - let trait_ref = trait_pred.to_poly_trait_ref(); let impl_candidates = self.find_similar_impl_candidates(trait_pred); self.report_similar_impl_candidates( &impl_candidates, - trait_ref, + trait_pred, body_def_id, err, true, @@ -2173,12 +2169,16 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { /// If the `Self` type of the unsatisfied trait `trait_ref` implements a trait /// with the same path as `trait_ref`, a help message about /// a probable version mismatch is added to `err` - fn note_version_mismatch(&self, err: &mut Diag<'_>, trait_ref: ty::PolyTraitRef<'tcx>) -> bool { + fn note_version_mismatch( + &self, + err: &mut Diag<'_>, + trait_pred: ty::PolyTraitPredicate<'tcx>, + ) -> bool { let get_trait_impls = |trait_def_id| { let mut trait_impls = vec![]; self.tcx.for_each_relevant_impl( trait_def_id, - trait_ref.skip_binder().self_ty(), + trait_pred.skip_binder().self_ty(), |impl_def_id| { trait_impls.push(impl_def_id); }, @@ -2186,11 +2186,11 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { trait_impls }; - let required_trait_path = self.tcx.def_path_str(trait_ref.def_id()); + let required_trait_path = self.tcx.def_path_str(trait_pred.def_id()); let traits_with_same_path: UnordSet<_> = self .tcx .visible_traits() - .filter(|trait_def_id| *trait_def_id != trait_ref.def_id()) + .filter(|trait_def_id| *trait_def_id != trait_pred.def_id()) .map(|trait_def_id| (self.tcx.def_path_str(trait_def_id), trait_def_id)) .filter(|(p, _)| *p == required_trait_path) .collect(); @@ -2374,7 +2374,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { fn get_safe_transmute_error_and_reason( &self, obligation: PredicateObligation<'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>, + trait_pred: ty::PolyTraitPredicate<'tcx>, span: Span, ) -> GetSafeTransmuteErrorAndReason { use rustc_transmute::Answer; @@ -2386,19 +2386,19 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } // Erase regions because layout code doesn't particularly care about regions. - let trait_ref = - self.tcx.erase_regions(self.tcx.instantiate_bound_regions_with_erased(trait_ref)); + let trait_pred = + self.tcx.erase_regions(self.tcx.instantiate_bound_regions_with_erased(trait_pred)); let src_and_dst = rustc_transmute::Types { - dst: trait_ref.args.type_at(0), - src: trait_ref.args.type_at(1), + dst: trait_pred.trait_ref.args.type_at(0), + src: trait_pred.trait_ref.args.type_at(1), }; let ocx = ObligationCtxt::new(self); let Ok(assume) = ocx.structurally_normalize_const( &obligation.cause, obligation.param_env, - trait_ref.args.const_at(2), + trait_pred.trait_ref.args.const_at(2), ) else { self.dcx().span_delayed_bug( span, @@ -2417,8 +2417,8 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { return GetSafeTransmuteErrorAndReason::Silent; }; - let dst = trait_ref.args.type_at(0); - let src = trait_ref.args.type_at(1); + let dst = trait_pred.trait_ref.args.type_at(0); + let src = trait_pred.trait_ref.args.type_at(1); let err_msg = format!("`{src}` cannot be safely transmuted into `{dst}`"); match rustc_transmute::TransmuteTypeEnv::new(self.infcx).is_transmutable( @@ -2566,12 +2566,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { trait_predicate.skip_binder().polarity, ) { - self.add_help_message_for_fn_trait( - trait_predicate.to_poly_trait_ref(), - err, - implemented_kind, - params, - ); + self.add_help_message_for_fn_trait(trait_predicate, err, implemented_kind, params); } else if !trait_predicate.has_non_region_infer() && self.predicate_can_apply(obligation.param_env, trait_predicate) { @@ -2606,7 +2601,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let impl_candidates = self.find_similar_impl_candidates(trait_predicate); if !self.report_similar_impl_candidates( &impl_candidates, - trait_predicate.to_poly_trait_ref(), + trait_predicate, body_def_id, err, true, @@ -2623,7 +2618,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { self.suggest_convert_to_slice( err, obligation, - trait_predicate.to_poly_trait_ref(), + trait_predicate, impl_candidates.as_slice(), span, ); @@ -2634,7 +2629,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { fn add_help_message_for_fn_trait( &self, - trait_ref: ty::PolyTraitRef<'tcx>, + trait_pred: ty::PolyTraitPredicate<'tcx>, err: &mut Diag<'_>, implemented_kind: ty::ClosureKind, params: ty::Binder<'tcx, Ty<'tcx>>, @@ -2647,12 +2642,12 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // to implement. let selected_kind = self .tcx - .fn_trait_kind_from_def_id(trait_ref.def_id()) + .fn_trait_kind_from_def_id(trait_pred.def_id()) .expect("expected to map DefId to ClosureKind"); if !implemented_kind.extends(selected_kind) { err.note(format!( "`{}` implements `{}`, but it must implement `{}`, which is more general", - trait_ref.skip_binder().self_ty(), + trait_pred.skip_binder().self_ty(), implemented_kind, selected_kind )); @@ -2660,7 +2655,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // Note any argument mismatches let given_ty = params.skip_binder(); - let expected_ty = trait_ref.skip_binder().args.type_at(1); + let expected_ty = trait_pred.skip_binder().trait_ref.args.type_at(1); if let ty::Tuple(given) = given_ty.kind() && let ty::Tuple(expected) = expected_ty.kind() { diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs index 2d248d00066e0..2d932e36470e2 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs @@ -10,7 +10,7 @@ use rustc_hir::{AttrArgs, AttrKind, Attribute}; use rustc_macros::LintDiagnostic; use rustc_middle::bug; use rustc_middle::ty::print::PrintTraitRefExt as _; -use rustc_middle::ty::{self, GenericArgsRef, GenericParamDefKind, ToPolyTraitRef, TyCtxt}; +use rustc_middle::ty::{self, GenericArgsRef, GenericParamDefKind, TyCtxt}; use rustc_parse_format::{ParseMode, Parser, Piece, Position}; use rustc_session::lint::builtin::UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES; use rustc_span::{Span, Symbol, kw, sym}; @@ -42,18 +42,18 @@ static ALLOWED_FORMAT_SYMBOLS: &[Symbol] = &[ impl<'tcx> TypeErrCtxt<'_, 'tcx> { fn impl_similar_to( &self, - trait_ref: ty::PolyTraitRef<'tcx>, + trait_pred: ty::PolyTraitPredicate<'tcx>, obligation: &PredicateObligation<'tcx>, ) -> Option<(DefId, GenericArgsRef<'tcx>)> { let tcx = self.tcx; let param_env = obligation.param_env; - self.enter_forall(trait_ref, |trait_ref| { - let trait_self_ty = trait_ref.self_ty(); + self.enter_forall(trait_pred, |trait_pred| { + let trait_self_ty = trait_pred.self_ty(); let mut self_match_impls = vec![]; let mut fuzzy_match_impls = vec![]; - self.tcx.for_each_relevant_impl(trait_ref.def_id, trait_self_ty, |def_id| { + self.tcx.for_each_relevant_impl(trait_pred.def_id(), trait_self_ty, |def_id| { let impl_args = self.fresh_args_for_item(obligation.cause.span, def_id); let impl_trait_ref = tcx.impl_trait_ref(def_id).unwrap().instantiate(tcx, impl_args); @@ -64,7 +64,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { self_match_impls.push((def_id, impl_args)); if iter::zip( - trait_ref.args.types().skip(1), + trait_pred.trait_ref.args.types().skip(1), impl_trait_ref.args.types().skip(1), ) .all(|(u, v)| self.fuzzy_match_tys(u, v, false).is_some()) @@ -117,7 +117,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { } let (def_id, args) = self - .impl_similar_to(trait_pred.to_poly_trait_ref(), obligation) + .impl_similar_to(trait_pred, obligation) .unwrap_or_else(|| (trait_pred.def_id(), trait_pred.skip_binder().trait_ref.args)); let trait_pred = trait_pred.skip_binder(); diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs index 9d85ca1dd4dd7..c2e73b732d314 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs @@ -32,9 +32,9 @@ use rustc_middle::ty::print::{ with_forced_trimmed_paths, with_no_trimmed_paths, }; use rustc_middle::ty::{ - self, AdtKind, GenericArgs, InferTy, IsSuggestable, ToPolyTraitRef, Ty, TyCtxt, TypeFoldable, - TypeFolder, TypeSuperFoldable, TypeVisitableExt, TypeckResults, Upcast, - suggest_arbitrary_trait_bound, suggest_constraining_type_param, + self, AdtKind, GenericArgs, InferTy, IsSuggestable, Ty, TyCtxt, TypeFoldable, TypeFolder, + TypeSuperFoldable, TypeVisitableExt, TypeckResults, Upcast, suggest_arbitrary_trait_bound, + suggest_constraining_type_param, }; use rustc_middle::{bug, span_bug}; use rustc_span::def_id::LocalDefId; @@ -218,15 +218,15 @@ pub fn suggest_restriction<'tcx, G: EmissionGuarantee>( (_, None) => predicate_constraint(hir_generics, trait_pred.upcast(tcx)), (None, Some((ident, []))) => ( ident.span.shrink_to_hi(), - format!(": {}", trait_pred.to_poly_trait_ref().print_trait_sugared()), + format!(": {}", trait_pred.print_modifiers_and_trait_path()), ), (_, Some((_, [.., bounds]))) => ( bounds.span().shrink_to_hi(), - format!(" + {}", trait_pred.to_poly_trait_ref().print_trait_sugared()), + format!(" + {}", trait_pred.print_modifiers_and_trait_path()), ), (Some(_), Some((_, []))) => ( hir_generics.span.shrink_to_hi(), - format!(": {}", trait_pred.to_poly_trait_ref().print_trait_sugared()), + format!(": {}", trait_pred.print_modifiers_and_trait_path()), ), }; @@ -3729,7 +3729,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { &self, obligation: &PredicateObligation<'tcx>, err: &mut Diag<'_>, - trait_ref: ty::PolyTraitRef<'tcx>, + trait_pred: ty::PolyTraitPredicate<'tcx>, ) { let rhs_span = match obligation.cause.code() { ObligationCauseCode::BinOp { rhs_span: Some(span), rhs_is_lit, .. } if *rhs_is_lit => { @@ -3737,8 +3737,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } _ => return, }; - if let ty::Float(_) = trait_ref.skip_binder().self_ty().kind() - && let ty::Infer(InferTy::IntVar(_)) = trait_ref.skip_binder().args.type_at(1).kind() + if let ty::Float(_) = trait_pred.skip_binder().self_ty().kind() + && let ty::Infer(InferTy::IntVar(_)) = + trait_pred.skip_binder().trait_ref.args.type_at(1).kind() { err.span_suggestion_verbose( rhs_span.shrink_to_hi(), @@ -4448,7 +4449,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { &self, err: &mut Diag<'_>, obligation: &PredicateObligation<'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>, + trait_pred: ty::PolyTraitPredicate<'tcx>, candidate_impls: &[ImplCandidate<'tcx>], span: Span, ) { @@ -4464,7 +4465,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // 1. `[T; _]` (array of T) // 2. `&[T; _]` (reference to array of T) // 3. `&mut [T; _]` (mutable reference to array of T) - let (element_ty, mut mutability) = match *trait_ref.skip_binder().self_ty().kind() { + let (element_ty, mut mutability) = match *trait_pred.skip_binder().self_ty().kind() { ty::Array(element_ty, _) => (element_ty, None), ty::Ref(_, pointee_ty, mutability) => match *pointee_ty.kind() { @@ -4620,14 +4621,14 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { pub(super) fn suggest_desugaring_async_fn_in_trait( &self, err: &mut Diag<'_>, - trait_ref: ty::PolyTraitRef<'tcx>, + trait_pred: ty::PolyTraitPredicate<'tcx>, ) { // Don't suggest if RTN is active -- we should prefer a where-clause bound instead. if self.tcx.features().return_type_notation() { return; } - let trait_def_id = trait_ref.def_id(); + let trait_def_id = trait_pred.def_id(); // Only suggest specifying auto traits if !self.tcx.trait_is_auto(trait_def_id) { @@ -4635,7 +4636,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } // Look for an RPITIT - let ty::Alias(ty::Projection, alias_ty) = trait_ref.self_ty().skip_binder().kind() else { + let ty::Alias(ty::Projection, alias_ty) = trait_pred.self_ty().skip_binder().kind() else { return; }; let Some(ty::ImplTraitInTraitData::Trait { fn_def_id, opaque_def_id }) = diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs index 968dc631e50e5..b370f802052e7 100644 --- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs +++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs @@ -16,7 +16,7 @@ use rustc_infer::traits::{ Obligation, ObligationCause, PolyTraitObligation, PredicateObligations, SelectionError, }; use rustc_middle::ty::fast_reject::DeepRejectCtxt; -use rustc_middle::ty::{self, ToPolyTraitRef, Ty, TypeVisitableExt, TypingMode}; +use rustc_middle::ty::{self, Ty, TypeVisitableExt, TypingMode}; use rustc_middle::{bug, span_bug}; use rustc_type_ir::Interner; use tracing::{debug, instrument, trace}; @@ -186,10 +186,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } selcx.infcx.probe(|_| { + // We checked the polarity already match selcx.match_normalize_trait_ref( obligation, placeholder_trait_predicate.trait_ref, - bound.to_poly_trait_ref(), + bound.map_bound(|pred| pred.trait_ref), ) { Ok(None) => { candidates.vec.push(ProjectionCandidate(idx)); diff --git a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs index 0ccb0fc0615c0..729ae3f2c2a26 100644 --- a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs +++ b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs @@ -16,7 +16,7 @@ use rustc_hir::lang_items::LangItem; use rustc_infer::infer::{DefineOpaqueTypes, HigherRankedType, InferOk}; use rustc_infer::traits::ObligationCauseCode; use rustc_middle::traits::{BuiltinImplSource, SignatureMismatchData}; -use rustc_middle::ty::{self, GenericArgsRef, ToPolyTraitRef, Ty, TyCtxt, Upcast}; +use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, Upcast}; use rustc_middle::{bug, span_bug}; use rustc_span::def_id::DefId; use rustc_type_ir::elaborate; @@ -458,8 +458,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ensure_sufficient_stack(|| { let cause = obligation.derived_cause(ObligationCauseCode::BuiltinDerived); - let poly_trait_ref = obligation.predicate.to_poly_trait_ref(); - let trait_ref = self.infcx.enter_forall_and_leak_universe(poly_trait_ref); + assert_eq!(obligation.predicate.polarity(), ty::PredicatePolarity::Positive); + let trait_ref = + self.infcx.enter_forall_and_leak_universe(obligation.predicate).trait_ref; let trait_obligations = self.impl_or_trait_obligations( &cause, obligation.recursion_depth + 1,