2016-07-05 21:58:20 -07:00
|
|
|
//! Implementation of rustbuild, the Rust build system.
|
2016-05-02 15:16:15 -07:00
|
|
|
//!
|
2016-07-05 21:58:20 -07:00
|
|
|
//! This module, and its descendants, are the implementation of the Rust build
|
|
|
|
|
//! system. Most of this build system is backed by Cargo but the outer layer
|
|
|
|
|
//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
|
2016-11-16 12:31:19 -08:00
|
|
|
//! builds, building artifacts like LLVM, etc. The goals of rustbuild are:
|
2016-07-05 21:58:20 -07:00
|
|
|
//!
|
2016-11-16 12:31:19 -08:00
|
|
|
//! * To be an easily understandable, easily extensible, and maintainable build
|
|
|
|
|
//! system.
|
|
|
|
|
//! * Leverage standard tools in the Rust ecosystem to build the compiler, aka
|
|
|
|
|
//! crates.io and Cargo.
|
|
|
|
|
//! * A standard interface to build across all platforms, including MSVC
|
|
|
|
|
//!
|
|
|
|
|
//! ## Further information
|
|
|
|
|
//!
|
|
|
|
|
//! More documentation can be found in each respective module below, and you can
|
|
|
|
|
//! also check out the `src/bootstrap/README.md` file for more information.
|
2016-07-05 21:58:20 -07:00
|
|
|
|
2018-01-12 12:53:51 -08:00
|
|
|
use std::cell::{Cell, RefCell};
|
2017-07-05 10:46:41 -06:00
|
|
|
use std::collections::{HashMap, HashSet};
|
2015-11-19 15:20:12 -08:00
|
|
|
use std::env;
|
2023-04-21 23:04:32 +00:00
|
|
|
use std::fmt::Display;
|
2022-02-05 20:11:50 -05:00
|
|
|
use std::fs::{self, File};
|
2023-02-21 10:04:19 +01:00
|
|
|
use std::io;
|
2017-05-13 14:21:35 +09:00
|
|
|
use std::path::{Path, PathBuf};
|
2022-12-23 20:02:34 -06:00
|
|
|
use std::process::{Command, Stdio};
|
2018-03-27 16:06:47 +02:00
|
|
|
use std::str;
|
2016-07-05 21:58:20 -07:00
|
|
|
|
2023-04-21 23:04:32 +00:00
|
|
|
use build_helper::ci::{gha, CiEnv};
|
2023-05-30 10:53:46 +03:00
|
|
|
use build_helper::detail_exit_macro;
|
2022-10-30 17:29:51 -05:00
|
|
|
use channel::GitInfo;
|
2022-11-06 16:59:43 -06:00
|
|
|
use config::{DryRun, Target};
|
2018-03-27 16:06:47 +02:00
|
|
|
use filetime::FileTime;
|
2022-03-10 08:47:53 -06:00
|
|
|
use once_cell::sync::OnceCell;
|
2016-07-05 21:58:20 -07:00
|
|
|
|
2022-01-24 14:16:36 +01:00
|
|
|
use crate::builder::Kind;
|
2020-10-08 15:05:31 +02:00
|
|
|
use crate::config::{LlvmLibunwind, TargetSelection};
|
2022-11-24 20:39:40 -05:00
|
|
|
use crate::util::{
|
2022-12-30 11:11:33 +01:00
|
|
|
exe, libdir, mtime, output, run, run_suppressed, symlink_dir, try_run_suppressed,
|
2022-11-24 20:39:40 -05:00
|
|
|
};
|
2016-07-05 21:58:20 -07:00
|
|
|
|
2022-10-02 09:44:04 +02:00
|
|
|
mod bolt;
|
2017-07-05 10:20:20 -06:00
|
|
|
mod builder;
|
2017-07-05 10:46:41 -06:00
|
|
|
mod cache;
|
2017-09-22 21:34:27 -07:00
|
|
|
mod cc_detect;
|
2016-07-05 21:58:20 -07:00
|
|
|
mod channel;
|
|
|
|
|
mod check;
|
|
|
|
|
mod clean;
|
|
|
|
|
mod compile;
|
|
|
|
|
mod config;
|
|
|
|
|
mod dist;
|
|
|
|
|
mod doc;
|
2022-10-30 17:29:51 -05:00
|
|
|
mod download;
|
2016-07-05 21:58:20 -07:00
|
|
|
mod flags;
|
2019-10-30 16:56:27 -07:00
|
|
|
mod format;
|
2016-08-12 23:38:17 -07:00
|
|
|
mod install;
|
2023-03-20 18:13:05 -05:00
|
|
|
mod llvm;
|
2016-10-21 13:18:09 -07:00
|
|
|
mod metadata;
|
2023-03-02 11:13:48 +01:00
|
|
|
mod render_tests;
|
2019-11-26 12:06:30 +01:00
|
|
|
mod run;
|
2016-07-05 21:58:20 -07:00
|
|
|
mod sanity;
|
2020-09-12 02:32:43 -04:00
|
|
|
mod setup;
|
2023-03-01 22:17:08 +13:00
|
|
|
mod suggest;
|
2023-06-07 15:05:54 +02:00
|
|
|
mod synthetic_targets;
|
2020-11-26 16:52:20 +01:00
|
|
|
mod tarball;
|
2017-07-05 10:20:20 -06:00
|
|
|
mod test;
|
2017-07-05 10:46:41 -06:00
|
|
|
mod tool;
|
2017-08-30 18:59:26 +02:00
|
|
|
mod toolstate;
|
2019-10-30 16:56:27 -07:00
|
|
|
pub mod util;
|
2016-07-05 21:58:20 -07:00
|
|
|
|
2022-02-06 23:03:55 +01:00
|
|
|
#[cfg(feature = "build-metrics")]
|
|
|
|
|
mod metrics;
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
#[cfg(windows)]
|
|
|
|
|
mod job;
|
|
|
|
|
|
2018-05-09 17:31:41 +00:00
|
|
|
#[cfg(all(unix, not(target_os = "haiku")))]
|
2016-07-05 21:58:20 -07:00
|
|
|
mod job {
|
2018-12-07 13:21:05 +01:00
|
|
|
pub unsafe fn setup(build: &mut crate::Build) {
|
2017-03-23 22:57:29 +01:00
|
|
|
if build.config.low_priority {
|
2017-05-18 22:24:34 -05:00
|
|
|
libc::setpriority(libc::PRIO_PGRP as _, 0, 10);
|
2017-03-23 22:57:29 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-10-06 15:26:14 +00:00
|
|
|
#[cfg(any(target_os = "haiku", target_os = "hermit", not(any(unix, windows))))]
|
2016-07-05 21:58:20 -07:00
|
|
|
mod job {
|
2018-12-07 13:21:05 +01:00
|
|
|
pub unsafe fn setup(_build: &mut crate::Build) {}
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
Pass all paths to `Step::run` at once when using `ShouldRun::krate`
This was surprisingly complicated. The main changes are:
1. Invert the order of iteration in `StepDescription::run`.
Previously, it did something like:
```python
for path in paths:
for (step, should_run) in should_runs:
if let Some(set) = should_run.pathset_for_path(path):
step.run(builder, set)
```
That worked ok for individual paths, but didn't allow passing more than one path at a time to `Step::run`
(since `pathset_for_paths` only had one path available to it).
Change it to instead look at the intersection of `paths` and `should_run.paths`:
```python
for (step, should_run) in should_runs:
if let Some(set) = should_run.pathset_for_paths(paths):
step.run(builder, set)
```
2. Change `pathset_for_path` to take multiple pathsets.
The goal is to avoid `x test library/alloc` testing *all* library crates, instead of just alloc.
The changes here are similarly subtle, to use the intersection between the paths rather than all
paths in `should_run.paths`. I added a test for the behavior to try and make it more clear.
Note that we use pathsets instead of just paths to allow for sets with multiple aliases (*cough* `all_krates` *cough*).
See the documentation added in the next commit for more detail.
3. Change `StepDescription::run` to explicitly handle 0 paths.
Before this was implicitly handled by the `for` loop, which just didn't excute when there were no paths.
Now it needs a check, to avoid trying to run all steps (this is a problem for steps that use `default_condition`).
4. Change `RunDescription` to have a list of pathsets, rather than a single path.
5. Remove paths as they're matched
This allows checking at the end that no invalid paths are left over.
Note that if two steps matched the same path, this will no longer run both;
but that's a bug anyway.
6. Handle suite paths separately from regular sets.
Running multiple suite paths at once instead of in separate `make_run` invocations is both tricky and not particularly useful.
The respective test Steps already handle this by introspecting the original paths.
Avoid having to deal with it by moving suite handling into a seperate loop than `PathSet::Set` checks.
2022-04-21 22:19:36 -05:00
|
|
|
pub use crate::builder::PathSet;
|
2018-12-07 13:21:05 +01:00
|
|
|
use crate::cache::{Interned, INTERNER};
|
|
|
|
|
pub use crate::config::Config;
|
2020-09-12 02:32:43 -04:00
|
|
|
pub use crate::flags::Subcommand;
|
2023-03-07 10:21:25 +01:00
|
|
|
use termcolor::{ColorChoice, StandardStream, WriteColor};
|
2016-07-05 21:58:20 -07:00
|
|
|
|
2018-04-30 13:29:45 +02:00
|
|
|
const LLVM_TOOLS: &[&str] = &[
|
2020-11-11 23:34:01 +09:00
|
|
|
"llvm-cov", // used to generate coverage report
|
|
|
|
|
"llvm-nm", // used to inspect binaries; it shows symbol names, their sizes and visibility
|
|
|
|
|
"llvm-objcopy", // used to transform ELFs into binary format which flashing tools consume
|
|
|
|
|
"llvm-objdump", // used to disassemble programs
|
2018-05-01 06:34:12 +02:00
|
|
|
"llvm-profdata", // used to inspect and merge files generated by profiles
|
2020-11-11 23:34:01 +09:00
|
|
|
"llvm-readobj", // used to get information from ELFs/objects that the other tools don't provide
|
|
|
|
|
"llvm-size", // used to prints the size of the linker sections of a program
|
|
|
|
|
"llvm-strip", // used to discard symbols from binary files to reduce their size
|
|
|
|
|
"llvm-ar", // used for creating and modifying archive files
|
2020-11-11 21:17:45 -05:00
|
|
|
"llvm-as", // used to convert LLVM assembly to LLVM bitcode
|
2020-11-11 23:34:01 +09:00
|
|
|
"llvm-dis", // used to disassemble LLVM bitcode
|
|
|
|
|
"llc", // used to compile LLVM bytecode
|
|
|
|
|
"opt", // used to optimize LLVM bytecode
|
2018-04-30 13:29:45 +02:00
|
|
|
];
|
2018-04-30 10:15:48 +02:00
|
|
|
|
2022-08-06 18:04:52 +03:00
|
|
|
/// LLD file names for all flavors.
|
|
|
|
|
const LLD_FILE_NAMES: &[&str] = &["ld.lld", "ld64.lld", "lld-link", "wasm-ld"];
|
|
|
|
|
|
2020-10-06 23:20:51 +02:00
|
|
|
pub const VERSION: usize = 2;
|
|
|
|
|
|
2022-02-23 18:27:36 +01:00
|
|
|
/// Extra --check-cfg to add when building
|
|
|
|
|
/// (Mode restriction, config name, config values (if any))
|
|
|
|
|
const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)] = &[
|
|
|
|
|
(None, "bootstrap", None),
|
|
|
|
|
(Some(Mode::Rustc), "parallel_compiler", None),
|
|
|
|
|
(Some(Mode::ToolRustc), "parallel_compiler", None),
|
2022-09-20 17:02:26 +02:00
|
|
|
(Some(Mode::Codegen), "parallel_compiler", None),
|
2022-02-23 18:27:36 +01:00
|
|
|
(Some(Mode::Std), "stdarch_intel_sde", None),
|
|
|
|
|
(Some(Mode::Std), "no_fp_fmt_parse", None),
|
|
|
|
|
(Some(Mode::Std), "no_global_oom_handling", None),
|
2021-10-14 20:47:14 +02:00
|
|
|
(Some(Mode::Std), "no_rc", None),
|
|
|
|
|
(Some(Mode::Std), "no_sync", None),
|
2022-02-23 18:27:36 +01:00
|
|
|
(Some(Mode::Std), "freebsd12", None),
|
2021-12-11 15:24:38 +00:00
|
|
|
(Some(Mode::Std), "freebsd13", None),
|
2022-02-23 18:27:36 +01:00
|
|
|
(Some(Mode::Std), "backtrace_in_libstd", None),
|
2022-03-08 16:29:41 +01:00
|
|
|
/* Extra values not defined in the built-in targets yet, but used in std */
|
|
|
|
|
(Some(Mode::Std), "target_env", Some(&["libnx"])),
|
2023-01-19 18:01:25 +01:00
|
|
|
// (Some(Mode::Std), "target_os", Some(&[])),
|
2023-04-21 15:33:04 +02:00
|
|
|
(Some(Mode::Std), "target_arch", Some(&["asmjs", "spirv", "nvptx", "xtensa"])),
|
2022-03-08 16:29:41 +01:00
|
|
|
/* Extra names used by dependencies */
|
2022-02-23 18:27:36 +01:00
|
|
|
// FIXME: Used by serde_json, but we should not be triggering on external dependencies.
|
|
|
|
|
(Some(Mode::Rustc), "no_btreemap_remove_entry", None),
|
|
|
|
|
(Some(Mode::ToolRustc), "no_btreemap_remove_entry", None),
|
|
|
|
|
// FIXME: Used by crossbeam-utils, but we should not be triggering on external dependencies.
|
|
|
|
|
(Some(Mode::Rustc), "crossbeam_loom", None),
|
|
|
|
|
(Some(Mode::ToolRustc), "crossbeam_loom", None),
|
|
|
|
|
// FIXME: Used by proc-macro2, but we should not be triggering on external dependencies.
|
|
|
|
|
(Some(Mode::Rustc), "span_locations", None),
|
|
|
|
|
(Some(Mode::ToolRustc), "span_locations", None),
|
2023-01-19 18:01:25 +01:00
|
|
|
// FIXME: Used by rustix, but we should not be triggering on external dependencies.
|
|
|
|
|
(Some(Mode::Rustc), "rustix_use_libc", None),
|
|
|
|
|
(Some(Mode::ToolRustc), "rustix_use_libc", None),
|
|
|
|
|
// FIXME: Used by filetime, but we should not be triggering on external dependencies.
|
|
|
|
|
(Some(Mode::Rustc), "emulate_second_only_system", None),
|
|
|
|
|
(Some(Mode::ToolRustc), "emulate_second_only_system", None),
|
2023-01-15 13:43:15 -05:00
|
|
|
// Needed to avoid the need to copy windows.lib into the sysroot.
|
|
|
|
|
(Some(Mode::Rustc), "windows_raw_dylib", None),
|
|
|
|
|
(Some(Mode::ToolRustc), "windows_raw_dylib", None),
|
2022-02-23 18:27:36 +01:00
|
|
|
];
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
/// A structure representing a Rust compiler.
|
|
|
|
|
///
|
|
|
|
|
/// Each compiler has a `stage` that it is associated with and a `host` that
|
|
|
|
|
/// corresponds to the platform the compiler runs on. This structure is used as
|
|
|
|
|
/// a parameter to many methods below.
|
2018-03-10 07:01:06 -07:00
|
|
|
#[derive(Eq, PartialOrd, Ord, PartialEq, Clone, Copy, Hash, Debug)]
|
2017-07-13 18:48:44 -06:00
|
|
|
pub struct Compiler {
|
2016-07-05 21:58:20 -07:00
|
|
|
stage: u32,
|
2020-07-17 10:08:04 -04:00
|
|
|
host: TargetSelection,
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-06 00:04:06 +08:00
|
|
|
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
|
2018-05-06 03:30:42 +08:00
|
|
|
pub enum DocTests {
|
2019-07-05 18:22:08 +01:00
|
|
|
/// Run normal tests and doc tests (default).
|
2018-05-06 00:04:06 +08:00
|
|
|
Yes,
|
2019-07-05 18:22:08 +01:00
|
|
|
/// Do not run any doc tests.
|
2018-05-06 00:04:06 +08:00
|
|
|
No,
|
2019-07-05 18:22:08 +01:00
|
|
|
/// Only run doc tests.
|
2018-05-06 00:04:06 +08:00
|
|
|
Only,
|
|
|
|
|
}
|
|
|
|
|
|
2018-08-30 10:25:07 -07:00
|
|
|
pub enum GitRepo {
|
|
|
|
|
Rustc,
|
|
|
|
|
Llvm,
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
/// Global configuration for the build system.
|
|
|
|
|
///
|
|
|
|
|
/// This structure transitively contains all configuration for the build system.
|
|
|
|
|
/// All filesystem-encoded configuration is in `config`, all flags are in
|
|
|
|
|
/// `flags`, and then parsed or probed information is listed in the keys below.
|
|
|
|
|
///
|
|
|
|
|
/// This structure is a parameter of almost all methods in the build system,
|
|
|
|
|
/// although most functions are implemented as free functions rather than
|
|
|
|
|
/// methods specifically on this structure itself (to make it easier to
|
|
|
|
|
/// organize).
|
2023-04-12 14:01:59 +12:00
|
|
|
#[cfg_attr(not(feature = "build-metrics"), derive(Clone))]
|
2016-07-05 21:58:20 -07:00
|
|
|
pub struct Build {
|
2019-07-05 18:22:08 +01:00
|
|
|
/// User-specified configuration from `config.toml`.
|
2016-07-05 21:58:20 -07:00
|
|
|
config: Config,
|
|
|
|
|
|
2020-09-18 14:58:22 +02:00
|
|
|
// Version information
|
|
|
|
|
version: String,
|
|
|
|
|
|
2019-07-05 18:22:08 +01:00
|
|
|
// Properties derived from the above configuration
|
2016-07-05 21:58:20 -07:00
|
|
|
src: PathBuf,
|
|
|
|
|
out: PathBuf,
|
2022-03-03 06:12:32 -06:00
|
|
|
bootstrap_out: PathBuf,
|
2017-02-15 15:57:06 -08:00
|
|
|
cargo_info: channel::GitInfo,
|
Add rust-analyzer submodule
The current plan is that submodule tracks the `release` branch of
rust-analyzer, which is updated once a week.
rust-analyzer is a workspace (with a virtual manifest), the actual
binary is provide by `crates/rust-analyzer` package.
Note that we intentionally don't add rust-analyzer to `Kind::Test`,
for two reasons.
*First*, at the moment rust-analyzer's test suite does a couple of
things which might not work in the context of rust repository. For
example, it shells out directly to `rustup` and `rustfmt`. So, making
this work requires non-trivial efforts.
*Second*, it seems unlikely that running tests in rust-lang/rust repo
would provide any additional guarantees. rust-analyzer builds with
stable and does not depend on the specifics of the compiler, so
changes to compiler can't break ra, unless they break stability
guarantee. Additionally, rust-analyzer itself is gated on bors, so we
are pretty confident that test suite passes.
2020-06-04 13:11:15 +02:00
|
|
|
rust_analyzer_info: channel::GitInfo,
|
2018-05-28 13:34:29 +02:00
|
|
|
clippy_info: channel::GitInfo,
|
2018-12-23 21:20:35 +01:00
|
|
|
miri_info: channel::GitInfo,
|
2017-11-10 15:09:39 +13:00
|
|
|
rustfmt_info: channel::GitInfo,
|
2019-03-14 02:23:08 +01:00
|
|
|
in_tree_llvm_info: channel::GitInfo,
|
2016-07-14 19:39:55 +02:00
|
|
|
local_rebuild: bool,
|
2017-06-27 13:37:24 -06:00
|
|
|
fail_fast: bool,
|
2018-05-06 03:30:42 +08:00
|
|
|
doc_tests: DocTests,
|
2017-06-27 13:49:21 -06:00
|
|
|
verbosity: usize,
|
2016-07-05 21:58:20 -07:00
|
|
|
|
2019-07-05 18:22:08 +01:00
|
|
|
// Targets for which to build
|
2020-07-17 10:08:04 -04:00
|
|
|
build: TargetSelection,
|
|
|
|
|
hosts: Vec<TargetSelection>,
|
|
|
|
|
targets: Vec<TargetSelection>,
|
2017-06-27 15:52:46 -06:00
|
|
|
|
2017-06-27 13:32:04 -06:00
|
|
|
initial_rustc: PathBuf,
|
|
|
|
|
initial_cargo: PathBuf,
|
2020-01-28 19:21:22 +01:00
|
|
|
initial_lld: PathBuf,
|
2020-05-29 10:15:46 +03:00
|
|
|
initial_libdir: PathBuf,
|
2023-06-05 21:54:52 +02:00
|
|
|
initial_sysroot: PathBuf,
|
2017-06-27 13:32:04 -06:00
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
// Runtime state filled in later on
|
2017-10-10 23:06:22 +03:00
|
|
|
// C/C++ compilers and archiver for all targets
|
2023-06-06 12:37:54 +02:00
|
|
|
cc: RefCell<HashMap<TargetSelection, cc::Tool>>,
|
|
|
|
|
cxx: RefCell<HashMap<TargetSelection, cc::Tool>>,
|
|
|
|
|
ar: RefCell<HashMap<TargetSelection, PathBuf>>,
|
|
|
|
|
ranlib: RefCell<HashMap<TargetSelection, PathBuf>>,
|
2019-07-05 18:22:08 +01:00
|
|
|
// Miscellaneous
|
2022-03-30 17:06:16 -05:00
|
|
|
// allow bidirectional lookups: both name -> path and path -> name
|
2017-07-13 18:48:44 -06:00
|
|
|
crates: HashMap<Interned<String>, Crate>,
|
2022-03-30 17:06:16 -05:00
|
|
|
crate_paths: HashMap<PathBuf, Interned<String>>,
|
2016-11-16 12:31:19 -08:00
|
|
|
is_sudo: bool,
|
2017-05-18 00:33:20 +08:00
|
|
|
ci_env: CiEnv,
|
2017-09-18 21:21:24 +02:00
|
|
|
delayed_failures: RefCell<Vec<String>>,
|
2018-01-12 12:53:51 -08:00
|
|
|
prerelease_version: Cell<Option<u32>>,
|
2022-02-06 23:03:55 +01:00
|
|
|
|
|
|
|
|
#[cfg(feature = "build-metrics")]
|
|
|
|
|
metrics: metrics::BuildMetrics,
|
2016-10-21 13:18:09 -07:00
|
|
|
}
|
|
|
|
|
|
2023-03-01 22:17:08 +13:00
|
|
|
#[derive(Debug, Clone)]
|
2016-10-21 13:18:09 -07:00
|
|
|
struct Crate {
|
2017-07-13 18:48:44 -06:00
|
|
|
name: Interned<String>,
|
2018-06-08 16:47:16 +02:00
|
|
|
deps: HashSet<Interned<String>>,
|
2016-10-21 13:18:09 -07:00
|
|
|
path: PathBuf,
|
2023-04-20 21:30:48 -05:00
|
|
|
has_lib: bool,
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
Change Step to be invoked with a path when in default mode.
Previously, a Step would be able to tell on its own when it was invoked
"by-default" (that is, `./x.py test` was called instead of `./x.py test
some/path`). This commit replaces that functionality, invoking each Step
with each of the paths it has specified as "should be invoked by."
For example, if a step calls `path("src/tools/cargo")` and
`path("src/doc/cargo")` then it's make_run will be called twice, with
"src/tools/cargo" and "src/doc/cargo." This makes it so that default
handling logic is in builder, instead of spread across various Steps.
However, this meant that some Step specifications needed to be updated,
since for example `rustdoc` can be built by `./x.py build
src/librustdoc` or `./x.py build src/tools/rustdoc`. A `PathSet`
abstraction is added that handles this: now, each Step can not only list
`path(...)` but also `paths(&[a, b, ...])` which will make it so that we
don't invoke it with each of the individual paths, instead invoking it
with the first path in the list (though this shouldn't be depended on).
Future work likely consists of implementing a better/easier way for a
given Step to work with "any" crate in-tree, especially those that want
to run tests, build, or check crates in the std, test, or rustc crate
trees. Currently this is rather painful to do as most of the logic is
duplicated across should_run and make_run. It seems likely this can be
abstracted away into builder somehow.
2018-02-11 09:51:58 -07:00
|
|
|
impl Crate {
|
|
|
|
|
fn local_path(&self, build: &Build) -> PathBuf {
|
|
|
|
|
self.path.strip_prefix(&build.config.src).unwrap().into()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-03 00:56:27 +02:00
|
|
|
/// When building Rust various objects are handled differently.
|
|
|
|
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
|
|
|
|
pub enum DependencyType {
|
|
|
|
|
/// Libraries originating from proc-macros.
|
|
|
|
|
Host,
|
|
|
|
|
/// Typical Rust libraries.
|
|
|
|
|
Target,
|
|
|
|
|
/// Non Rust libraries and objects shipped to ease usage of certain targets.
|
|
|
|
|
TargetSelfContained,
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
/// The various "modes" of invoking Cargo.
|
|
|
|
|
///
|
|
|
|
|
/// These entries currently correspond to the various output directories of the
|
|
|
|
|
/// build system, with each mod generating output in a different directory.
|
2018-05-06 02:33:01 +08:00
|
|
|
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
2016-07-05 21:58:20 -07:00
|
|
|
pub enum Mode {
|
2017-06-27 13:24:37 -06:00
|
|
|
/// Build the standard library, placing output in the "stageN-std" directory.
|
2018-05-19 23:04:41 +03:00
|
|
|
Std,
|
2016-07-05 21:58:20 -07:00
|
|
|
|
2018-05-24 03:20:47 +03:00
|
|
|
/// Build librustc, and compiler libraries, placing output in the "stageN-rustc" directory.
|
2018-05-19 23:04:41 +03:00
|
|
|
Rustc,
|
2018-05-24 03:20:47 +03:00
|
|
|
|
2020-10-15 14:23:43 +02:00
|
|
|
/// Build a codegen backend for rustc, placing the output in the "stageN-codegen" directory.
|
|
|
|
|
Codegen,
|
|
|
|
|
|
2020-06-12 15:44:56 -07:00
|
|
|
/// Build a tool, placing output in the "stage0-bootstrap-tools"
|
|
|
|
|
/// directory. This is for miscellaneous sets of tools that are built
|
|
|
|
|
/// using the bootstrap stage0 compiler in its entirety (target libraries
|
|
|
|
|
/// and all). Typically these tools compile with stable Rust.
|
2018-06-29 14:35:10 -07:00
|
|
|
ToolBootstrap,
|
|
|
|
|
|
2020-06-12 15:44:56 -07:00
|
|
|
/// Build a tool which uses the locally built std, placing output in the
|
|
|
|
|
/// "stageN-tools" directory. Its usage is quite rare, mainly used by
|
|
|
|
|
/// compiletest which needs libtest.
|
2018-07-02 09:33:16 -07:00
|
|
|
ToolStd,
|
2020-06-12 15:44:56 -07:00
|
|
|
|
|
|
|
|
/// Build a tool which uses the locally built rustc and the target std,
|
|
|
|
|
/// placing the output in the "stageN-tools" directory. This is used for
|
|
|
|
|
/// anything that needs a fully functional rustc, such as rustdoc, clippy,
|
|
|
|
|
/// cargo, rls, rustfmt, miri, etc.
|
2018-05-19 23:04:41 +03:00
|
|
|
ToolRustc,
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-28 02:56:33 +03:00
|
|
|
impl Mode {
|
|
|
|
|
pub fn is_tool(&self) -> bool {
|
2020-08-22 01:08:04 +02:00
|
|
|
matches!(self, Mode::ToolBootstrap | Mode::ToolRustc | Mode::ToolStd)
|
2018-05-28 02:56:33 +03:00
|
|
|
}
|
2020-10-25 22:11:20 -07:00
|
|
|
|
|
|
|
|
pub fn must_support_dlopen(&self) -> bool {
|
|
|
|
|
matches!(self, Mode::Std | Mode::Codegen)
|
|
|
|
|
}
|
2018-05-28 02:56:33 +03:00
|
|
|
}
|
|
|
|
|
|
2022-02-28 16:40:08 -08:00
|
|
|
pub enum CLang {
|
|
|
|
|
C,
|
|
|
|
|
Cxx,
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-30 17:29:51 -05:00
|
|
|
macro_rules! forward {
|
|
|
|
|
( $( $fn:ident( $($param:ident: $ty:ty),* ) $( -> $ret:ty)? ),+ $(,)? ) => {
|
|
|
|
|
impl Build {
|
|
|
|
|
$( fn $fn(&self, $($param: $ty),* ) $( -> $ret)? {
|
|
|
|
|
self.config.$fn( $($param),* )
|
|
|
|
|
} )+
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
forward! {
|
|
|
|
|
verbose(msg: &str),
|
|
|
|
|
is_verbose() -> bool,
|
|
|
|
|
create(path: &Path, s: &str),
|
|
|
|
|
remove(f: &Path),
|
|
|
|
|
tempdir() -> PathBuf,
|
2023-06-23 16:33:59 +02:00
|
|
|
try_run(cmd: &mut Command) -> Result<(), ()>,
|
2022-10-30 17:29:51 -05:00
|
|
|
llvm_link_shared() -> bool,
|
|
|
|
|
download_rustc() -> bool,
|
|
|
|
|
initial_rustfmt() -> Option<PathBuf>,
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
impl Build {
|
|
|
|
|
/// Creates a new set of build configuration from the `flags` on the command
|
|
|
|
|
/// line and the filesystem `config`.
|
|
|
|
|
///
|
|
|
|
|
/// By default all build output will be placed in the current directory.
|
2022-08-24 12:36:08 -04:00
|
|
|
pub fn new(mut config: Config) -> Build {
|
2017-07-29 22:12:53 -06:00
|
|
|
let src = config.src.clone();
|
2018-03-09 18:14:35 -07:00
|
|
|
let out = config.out.clone();
|
2016-07-05 21:58:20 -07:00
|
|
|
|
2022-08-21 23:21:58 +05:30
|
|
|
#[cfg(unix)]
|
|
|
|
|
// keep this consistent with the equivalent check in x.py:
|
|
|
|
|
// https://github.com/rust-lang/rust/blob/a8a33cf27166d3eabaffc58ed3799e054af3b0c6/src/bootstrap/bootstrap.py#L796-L797
|
2016-11-16 12:31:19 -08:00
|
|
|
let is_sudo = match env::var_os("SUDO_USER") {
|
2022-08-21 23:21:58 +05:30
|
|
|
Some(_sudo_user) => {
|
|
|
|
|
let uid = unsafe { libc::getuid() };
|
|
|
|
|
uid == 0
|
|
|
|
|
}
|
2016-11-16 12:31:19 -08:00
|
|
|
None => false,
|
|
|
|
|
};
|
2022-08-21 23:21:58 +05:30
|
|
|
#[cfg(not(unix))]
|
|
|
|
|
let is_sudo = false;
|
2019-03-30 11:14:02 -07:00
|
|
|
|
2023-04-07 23:15:25 +05:30
|
|
|
let omit_git_hash = config.omit_git_hash;
|
|
|
|
|
let rust_info = channel::GitInfo::new(omit_git_hash, &src);
|
|
|
|
|
let cargo_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/cargo"));
|
Add rust-analyzer submodule
The current plan is that submodule tracks the `release` branch of
rust-analyzer, which is updated once a week.
rust-analyzer is a workspace (with a virtual manifest), the actual
binary is provide by `crates/rust-analyzer` package.
Note that we intentionally don't add rust-analyzer to `Kind::Test`,
for two reasons.
*First*, at the moment rust-analyzer's test suite does a couple of
things which might not work in the context of rust repository. For
example, it shells out directly to `rustup` and `rustfmt`. So, making
this work requires non-trivial efforts.
*Second*, it seems unlikely that running tests in rust-lang/rust repo
would provide any additional guarantees. rust-analyzer builds with
stable and does not depend on the specifics of the compiler, so
changes to compiler can't break ra, unless they break stability
guarantee. Additionally, rust-analyzer itself is gated on bors, so we
are pretty confident that test suite passes.
2020-06-04 13:11:15 +02:00
|
|
|
let rust_analyzer_info =
|
2023-04-07 23:15:25 +05:30
|
|
|
channel::GitInfo::new(omit_git_hash, &src.join("src/tools/rust-analyzer"));
|
|
|
|
|
let clippy_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/clippy"));
|
|
|
|
|
let miri_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/miri"));
|
|
|
|
|
let rustfmt_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/rustfmt"));
|
2019-03-30 11:14:02 -07:00
|
|
|
|
|
|
|
|
// we always try to use git for LLVM builds
|
|
|
|
|
let in_tree_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-project"));
|
2016-11-16 12:31:19 -08:00
|
|
|
|
2022-11-06 16:59:43 -06:00
|
|
|
let initial_target_libdir_str = if config.dry_run() {
|
2020-05-29 10:15:46 +03:00
|
|
|
"/dummy/lib/path/to/lib/".to_string()
|
2020-05-29 16:50:03 +03:00
|
|
|
} else {
|
|
|
|
|
output(
|
|
|
|
|
Command::new(&config.initial_rustc)
|
|
|
|
|
.arg("--target")
|
2020-07-17 10:08:04 -04:00
|
|
|
.arg(config.build.rustc_target_arg())
|
2020-05-29 16:50:03 +03:00
|
|
|
.arg("--print")
|
|
|
|
|
.arg("target-libdir"),
|
|
|
|
|
)
|
|
|
|
|
};
|
2020-05-29 10:15:46 +03:00
|
|
|
let initial_target_dir = Path::new(&initial_target_libdir_str).parent().unwrap();
|
|
|
|
|
let initial_lld = initial_target_dir.join("bin").join("rust-lld");
|
|
|
|
|
|
2022-11-06 16:59:43 -06:00
|
|
|
let initial_sysroot = if config.dry_run() {
|
2020-05-29 10:15:46 +03:00
|
|
|
"/dummy".to_string()
|
|
|
|
|
} else {
|
|
|
|
|
output(Command::new(&config.initial_rustc).arg("--print").arg("sysroot"))
|
2023-06-05 21:54:52 +02:00
|
|
|
}
|
|
|
|
|
.trim()
|
|
|
|
|
.to_string();
|
|
|
|
|
|
2020-05-29 10:15:46 +03:00
|
|
|
let initial_libdir = initial_target_dir
|
|
|
|
|
.parent()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.parent()
|
|
|
|
|
.unwrap()
|
2023-06-05 21:54:52 +02:00
|
|
|
.strip_prefix(&initial_sysroot)
|
2020-05-29 10:15:46 +03:00
|
|
|
.unwrap()
|
|
|
|
|
.to_path_buf();
|
2020-01-28 19:21:22 +01:00
|
|
|
|
2020-09-18 14:58:22 +02:00
|
|
|
let version = std::fs::read_to_string(src.join("src").join("version"))
|
|
|
|
|
.expect("failed to read src/version");
|
|
|
|
|
let version = version.trim();
|
|
|
|
|
|
2022-09-14 18:09:33 -05:00
|
|
|
let bootstrap_out = std::env::current_exe()
|
|
|
|
|
.expect("could not determine path to running process")
|
|
|
|
|
.parent()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.to_path_buf();
|
2022-09-14 18:01:18 -05:00
|
|
|
if !bootstrap_out.join(exe("rustc", config.build)).exists() && !cfg!(test) {
|
2022-09-14 18:09:33 -05:00
|
|
|
// this restriction can be lifted whenever https://github.com/rust-lang/rfcs/pull/3028 is implemented
|
2022-09-11 15:14:03 -05:00
|
|
|
panic!(
|
|
|
|
|
"`rustc` not found in {}, run `cargo build --bins` before `cargo run`",
|
|
|
|
|
bootstrap_out.display()
|
|
|
|
|
)
|
2022-09-14 18:09:33 -05:00
|
|
|
}
|
2022-03-03 06:12:32 -06:00
|
|
|
|
2022-08-24 12:36:08 -04:00
|
|
|
if rust_info.is_from_tarball() && config.description.is_none() {
|
|
|
|
|
config.description = Some("built from a source tarball".to_owned());
|
|
|
|
|
}
|
|
|
|
|
|
2018-03-09 19:19:59 -07:00
|
|
|
let mut build = Build {
|
2017-06-27 13:32:04 -06:00
|
|
|
initial_rustc: config.initial_rustc.clone(),
|
|
|
|
|
initial_cargo: config.initial_cargo.clone(),
|
2020-01-28 19:21:22 +01:00
|
|
|
initial_lld,
|
2020-05-29 10:15:46 +03:00
|
|
|
initial_libdir,
|
2023-06-05 21:54:52 +02:00
|
|
|
initial_sysroot: initial_sysroot.into(),
|
2017-06-27 13:32:04 -06:00
|
|
|
local_rebuild: config.local_rebuild,
|
2017-07-29 22:12:53 -06:00
|
|
|
fail_fast: config.cmd.fail_fast(),
|
2018-02-17 15:45:39 +01:00
|
|
|
doc_tests: config.cmd.doc_tests(),
|
2017-07-29 22:12:53 -06:00
|
|
|
verbosity: config.verbose,
|
2017-06-27 13:32:04 -06:00
|
|
|
|
2017-07-29 22:12:53 -06:00
|
|
|
build: config.build,
|
|
|
|
|
hosts: config.hosts.clone(),
|
|
|
|
|
targets: config.targets.clone(),
|
2017-06-27 15:52:46 -06:00
|
|
|
|
2017-08-06 22:54:09 -07:00
|
|
|
config,
|
2020-09-18 14:58:22 +02:00
|
|
|
version: version.to_string(),
|
2017-08-06 22:54:09 -07:00
|
|
|
src,
|
|
|
|
|
out,
|
2022-03-03 06:12:32 -06:00
|
|
|
bootstrap_out,
|
2016-07-05 21:58:20 -07:00
|
|
|
|
2017-08-06 22:54:09 -07:00
|
|
|
cargo_info,
|
Add rust-analyzer submodule
The current plan is that submodule tracks the `release` branch of
rust-analyzer, which is updated once a week.
rust-analyzer is a workspace (with a virtual manifest), the actual
binary is provide by `crates/rust-analyzer` package.
Note that we intentionally don't add rust-analyzer to `Kind::Test`,
for two reasons.
*First*, at the moment rust-analyzer's test suite does a couple of
things which might not work in the context of rust repository. For
example, it shells out directly to `rustup` and `rustfmt`. So, making
this work requires non-trivial efforts.
*Second*, it seems unlikely that running tests in rust-lang/rust repo
would provide any additional guarantees. rust-analyzer builds with
stable and does not depend on the specifics of the compiler, so
changes to compiler can't break ra, unless they break stability
guarantee. Additionally, rust-analyzer itself is gated on bors, so we
are pretty confident that test suite passes.
2020-06-04 13:11:15 +02:00
|
|
|
rust_analyzer_info,
|
2018-05-28 13:34:29 +02:00
|
|
|
clippy_info,
|
2018-12-23 21:20:35 +01:00
|
|
|
miri_info,
|
2017-11-10 15:09:39 +13:00
|
|
|
rustfmt_info,
|
2019-03-14 02:23:08 +01:00
|
|
|
in_tree_llvm_info,
|
2023-06-06 12:37:54 +02:00
|
|
|
cc: RefCell::new(HashMap::new()),
|
|
|
|
|
cxx: RefCell::new(HashMap::new()),
|
|
|
|
|
ar: RefCell::new(HashMap::new()),
|
|
|
|
|
ranlib: RefCell::new(HashMap::new()),
|
2016-10-21 13:18:09 -07:00
|
|
|
crates: HashMap::new(),
|
2022-03-30 17:06:16 -05:00
|
|
|
crate_paths: HashMap::new(),
|
2017-08-06 22:54:09 -07:00
|
|
|
is_sudo,
|
2017-05-18 00:33:20 +08:00
|
|
|
ci_env: CiEnv::current(),
|
2017-09-18 21:21:24 +02:00
|
|
|
delayed_failures: RefCell::new(Vec::new()),
|
2018-01-12 12:53:51 -08:00
|
|
|
prerelease_version: Cell::new(None),
|
2022-02-06 23:03:55 +01:00
|
|
|
|
|
|
|
|
#[cfg(feature = "build-metrics")]
|
|
|
|
|
metrics: metrics::BuildMetrics::init(),
|
2018-03-09 19:19:59 -07:00
|
|
|
};
|
|
|
|
|
|
2018-03-15 17:29:53 -06:00
|
|
|
// If local-rust is the same major.minor as the current version, then force a
|
|
|
|
|
// local-rebuild
|
|
|
|
|
let local_version_verbose =
|
|
|
|
|
output(Command::new(&build.initial_rustc).arg("--version").arg("--verbose"));
|
|
|
|
|
let local_release = local_version_verbose
|
|
|
|
|
.lines()
|
2020-07-10 07:18:19 +00:00
|
|
|
.filter_map(|x| x.strip_prefix("release:"))
|
2019-01-19 16:25:06 -07:00
|
|
|
.next()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.trim();
|
2020-09-18 14:58:22 +02:00
|
|
|
if local_release.split('.').take(2).eq(version.split('.').take(2)) {
|
2018-03-15 17:29:53 -06:00
|
|
|
build.verbose(&format!("auto-detected local-rebuild {}", local_release));
|
|
|
|
|
build.local_rebuild = true;
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
2018-03-15 17:29:53 -06:00
|
|
|
|
2022-11-26 15:00:39 -05:00
|
|
|
build.verbose("finding compilers");
|
2023-06-06 12:37:54 +02:00
|
|
|
cc_detect::find(&build);
|
2022-11-26 15:00:39 -05:00
|
|
|
// When running `setup`, the profile is about to change, so any requirements we have now may
|
|
|
|
|
// be different on the next invocation. Don't check for them until the next time x.py is
|
|
|
|
|
// run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing.
|
|
|
|
|
//
|
|
|
|
|
// Similarly, for `setup` we don't actually need submodules or cargo metadata.
|
|
|
|
|
if !matches!(build.config.cmd, Subcommand::Setup { .. }) {
|
|
|
|
|
build.verbose("running sanity check");
|
|
|
|
|
sanity::check(&mut build);
|
2022-05-29 02:26:19 -05:00
|
|
|
|
2022-11-26 15:00:39 -05:00
|
|
|
// Make sure we update these before gathering metadata so we don't get an error about missing
|
|
|
|
|
// Cargo.toml files.
|
2023-02-28 21:19:07 -05:00
|
|
|
let rust_submodules = ["src/tools/cargo", "library/backtrace", "library/stdarch"];
|
2022-11-26 15:00:39 -05:00
|
|
|
for s in rust_submodules {
|
|
|
|
|
build.update_submodule(Path::new(s));
|
|
|
|
|
}
|
2022-11-26 16:11:07 -05:00
|
|
|
// Now, update all existing submodules.
|
|
|
|
|
build.update_existing_submodules();
|
2022-11-26 15:00:39 -05:00
|
|
|
|
|
|
|
|
build.verbose("learning about cargo");
|
|
|
|
|
metadata::build(&mut build);
|
|
|
|
|
}
|
2018-03-09 19:19:59 -07:00
|
|
|
|
2022-11-24 20:39:40 -05:00
|
|
|
// Make a symbolic link so we can use a consistent directory in the documentation.
|
|
|
|
|
let build_triple = build.out.join(&build.build.triple);
|
2023-03-14 11:22:45 +08:00
|
|
|
t!(fs::create_dir_all(&build_triple));
|
2022-11-24 20:39:40 -05:00
|
|
|
let host = build.out.join("host");
|
2023-03-31 07:15:42 -04:00
|
|
|
if host.is_symlink() {
|
|
|
|
|
// Left over from a previous build; overwrite it.
|
|
|
|
|
// This matters if `build.build` has changed between invocations.
|
|
|
|
|
#[cfg(windows)]
|
|
|
|
|
t!(fs::remove_dir(&host));
|
|
|
|
|
#[cfg(not(windows))]
|
|
|
|
|
t!(fs::remove_file(&host));
|
2022-11-24 20:39:40 -05:00
|
|
|
}
|
2023-03-31 07:15:42 -04:00
|
|
|
t!(
|
|
|
|
|
symlink_dir(&build.config, &build_triple, &host),
|
|
|
|
|
format!("symlink_dir({} => {}) failed", host.display(), build_triple.display())
|
|
|
|
|
);
|
2022-11-24 20:39:40 -05:00
|
|
|
|
2018-03-09 19:19:59 -07:00
|
|
|
build
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2021-02-28 23:51:15 -05:00
|
|
|
// modified from `check_submodule` and `update_submodule` in bootstrap.py
|
|
|
|
|
/// Given a path to the directory of a submodule, update it.
|
|
|
|
|
///
|
|
|
|
|
/// `relative_path` should be relative to the root of the git repository, not an absolute path.
|
|
|
|
|
pub(crate) fn update_submodule(&self, relative_path: &Path) {
|
|
|
|
|
fn dir_is_empty(dir: &Path) -> bool {
|
|
|
|
|
t!(std::fs::read_dir(dir)).next().is_none()
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-30 17:29:51 -05:00
|
|
|
if !self.config.submodules(&self.rust_info()) {
|
2021-02-28 23:51:15 -05:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let absolute_path = self.config.src.join(relative_path);
|
|
|
|
|
|
|
|
|
|
// NOTE: The check for the empty directory is here because when running x.py the first time,
|
|
|
|
|
// the submodule won't be checked out. Check it out now so we can build it.
|
2022-08-24 12:36:08 -04:00
|
|
|
if !channel::GitInfo::new(false, &absolute_path).is_managed_git_subrepository()
|
|
|
|
|
&& !dir_is_empty(&absolute_path)
|
|
|
|
|
{
|
2021-02-28 23:51:15 -05:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// check_submodule
|
2022-05-22 15:51:15 -05:00
|
|
|
let checked_out_hash =
|
|
|
|
|
output(Command::new("git").args(&["rev-parse", "HEAD"]).current_dir(&absolute_path));
|
|
|
|
|
// update_submodules
|
|
|
|
|
let recorded = output(
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(&["ls-tree", "HEAD"])
|
|
|
|
|
.arg(relative_path)
|
|
|
|
|
.current_dir(&self.config.src),
|
|
|
|
|
);
|
|
|
|
|
let actual_hash = recorded
|
|
|
|
|
.split_whitespace()
|
|
|
|
|
.nth(2)
|
|
|
|
|
.unwrap_or_else(|| panic!("unexpected output `{}`", recorded));
|
|
|
|
|
|
|
|
|
|
// update_submodule
|
|
|
|
|
if actual_hash == checked_out_hash.trim_end() {
|
|
|
|
|
// already checked out
|
|
|
|
|
return;
|
2021-02-28 23:51:15 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
println!("Updating submodule {}", relative_path.display());
|
|
|
|
|
self.run(
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(&["submodule", "-q", "sync"])
|
|
|
|
|
.arg(relative_path)
|
|
|
|
|
.current_dir(&self.config.src),
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Try passing `--progress` to start, then run git again without if that fails.
|
|
|
|
|
let update = |progress: bool| {
|
2022-12-23 20:02:34 -06:00
|
|
|
// Git is buggy and will try to fetch submodules from the tracking branch for *this* repository,
|
|
|
|
|
// even though that has no relation to the upstream for the submodule.
|
|
|
|
|
let current_branch = {
|
|
|
|
|
let output = self
|
|
|
|
|
.config
|
|
|
|
|
.git()
|
|
|
|
|
.args(["symbolic-ref", "--short", "HEAD"])
|
|
|
|
|
.stderr(Stdio::inherit())
|
|
|
|
|
.output();
|
|
|
|
|
let output = t!(output);
|
|
|
|
|
if output.status.success() {
|
|
|
|
|
Some(String::from_utf8(output.stdout).unwrap().trim().to_owned())
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let mut git = self.config.git();
|
|
|
|
|
if let Some(branch) = current_branch {
|
|
|
|
|
git.arg("-c").arg(format!("branch.{branch}.remote=origin"));
|
|
|
|
|
}
|
2022-02-06 19:37:35 -06:00
|
|
|
git.args(&["submodule", "update", "--init", "--recursive", "--depth=1"]);
|
2021-02-28 23:51:15 -05:00
|
|
|
if progress {
|
|
|
|
|
git.arg("--progress");
|
|
|
|
|
}
|
2022-12-23 20:02:34 -06:00
|
|
|
git.arg(relative_path);
|
2021-02-28 23:51:15 -05:00
|
|
|
git
|
|
|
|
|
};
|
|
|
|
|
// NOTE: doesn't use `try_run` because this shouldn't print an error if it fails.
|
|
|
|
|
if !update(true).status().map_or(false, |status| status.success()) {
|
|
|
|
|
self.run(&mut update(false));
|
|
|
|
|
}
|
2022-12-03 10:33:15 -06:00
|
|
|
|
|
|
|
|
// Save any local changes, but avoid running `git stash pop` if there are none (since it will exit with an error).
|
2023-06-23 16:33:59 +02:00
|
|
|
let has_local_modifications = self
|
|
|
|
|
.try_run(
|
|
|
|
|
Command::new("git")
|
|
|
|
|
.args(&["diff-index", "--quiet", "HEAD"])
|
|
|
|
|
.current_dir(&absolute_path),
|
|
|
|
|
)
|
|
|
|
|
.is_err();
|
2022-12-03 10:33:15 -06:00
|
|
|
if has_local_modifications {
|
|
|
|
|
self.run(Command::new("git").args(&["stash", "push"]).current_dir(&absolute_path));
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-28 23:51:15 -05:00
|
|
|
self.run(Command::new("git").args(&["reset", "-q", "--hard"]).current_dir(&absolute_path));
|
2022-11-25 10:52:22 +05:30
|
|
|
self.run(Command::new("git").args(&["clean", "-qdfx"]).current_dir(&absolute_path));
|
2022-12-03 10:33:15 -06:00
|
|
|
|
|
|
|
|
if has_local_modifications {
|
|
|
|
|
self.run(Command::new("git").args(&["stash", "pop"]).current_dir(absolute_path));
|
|
|
|
|
}
|
2021-02-28 23:51:15 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// If any submodule has been initialized already, sync it unconditionally.
|
|
|
|
|
/// This avoids contributors checking in a submodule change by accident.
|
2022-11-26 16:11:07 -05:00
|
|
|
pub fn update_existing_submodules(&self) {
|
2021-02-28 23:51:15 -05:00
|
|
|
// Avoid running git when there isn't a git checkout.
|
2022-10-30 17:29:51 -05:00
|
|
|
if !self.config.submodules(&self.rust_info()) {
|
2021-02-28 23:51:15 -05:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
let output = output(
|
2022-06-26 04:39:46 -05:00
|
|
|
self.config
|
|
|
|
|
.git()
|
2021-02-28 23:51:15 -05:00
|
|
|
.args(&["config", "--file"])
|
|
|
|
|
.arg(&self.config.src.join(".gitmodules"))
|
|
|
|
|
.args(&["--get-regexp", "path"]),
|
|
|
|
|
);
|
|
|
|
|
for line in output.lines() {
|
|
|
|
|
// Look for `submodule.$name.path = $path`
|
|
|
|
|
// Sample output: `submodule.src/rust-installer.path src/tools/rust-installer`
|
|
|
|
|
let submodule = Path::new(line.splitn(2, ' ').nth(1).unwrap());
|
2022-07-31 20:18:15 -05:00
|
|
|
// Don't update the submodule unless it's already been cloned.
|
2022-08-24 12:36:08 -04:00
|
|
|
if channel::GitInfo::new(false, submodule).is_managed_git_subrepository() {
|
2021-02-28 23:51:15 -05:00
|
|
|
self.update_submodule(submodule);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
/// Executes the entire build, as configured by the flags and configuration.
|
|
|
|
|
pub fn build(&mut self) {
|
|
|
|
|
unsafe {
|
2017-03-23 22:57:29 +01:00
|
|
|
job::setup(self);
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2022-10-01 18:51:10 -04:00
|
|
|
// Download rustfmt early so that it can be used in rust-analyzer configs.
|
|
|
|
|
let _ = &builder::Builder::new(&self).initial_rustfmt();
|
|
|
|
|
|
2023-03-01 22:17:08 +13:00
|
|
|
// hardcoded subcommands
|
|
|
|
|
match &self.config.cmd {
|
2023-04-22 17:47:31 +01:00
|
|
|
Subcommand::Format { check } => {
|
|
|
|
|
return format::format(&builder::Builder::new(&self), *check, &self.config.paths);
|
2023-03-01 22:17:08 +13:00
|
|
|
}
|
|
|
|
|
Subcommand::Suggest { run } => {
|
|
|
|
|
return suggest::suggest(&builder::Builder::new(&self), *run);
|
|
|
|
|
}
|
|
|
|
|
_ => (),
|
|
|
|
|
}
|
|
|
|
|
|
2018-03-27 16:06:47 +02:00
|
|
|
{
|
|
|
|
|
let builder = builder::Builder::new(&self);
|
|
|
|
|
if let Some(path) = builder.paths.get(0) {
|
|
|
|
|
if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") {
|
|
|
|
|
return;
|
|
|
|
|
}
|
2018-03-09 19:05:06 -07:00
|
|
|
}
|
|
|
|
|
}
|
2018-03-27 16:06:47 +02:00
|
|
|
|
2022-11-06 16:59:43 -06:00
|
|
|
if !self.config.dry_run() {
|
2018-04-01 10:51:24 -06:00
|
|
|
{
|
2022-11-06 16:59:43 -06:00
|
|
|
self.config.dry_run = DryRun::SelfCheck;
|
2018-03-27 16:06:47 +02:00
|
|
|
let builder = builder::Builder::new(&self);
|
2018-04-01 10:51:24 -06:00
|
|
|
builder.execute_cli();
|
|
|
|
|
}
|
2022-11-06 16:59:43 -06:00
|
|
|
self.config.dry_run = DryRun::Disabled;
|
2018-03-27 16:06:47 +02:00
|
|
|
let builder = builder::Builder::new(&self);
|
2018-04-01 10:51:24 -06:00
|
|
|
builder.execute_cli();
|
2018-03-27 16:06:47 +02:00
|
|
|
} else {
|
|
|
|
|
let builder = builder::Builder::new(&self);
|
2020-02-03 20:13:30 +01:00
|
|
|
builder.execute_cli();
|
2018-03-27 16:06:47 +02:00
|
|
|
}
|
2017-09-18 21:21:24 +02:00
|
|
|
|
|
|
|
|
// Check for postponed failures from `test --no-fail-fast`.
|
|
|
|
|
let failures = self.delayed_failures.borrow();
|
|
|
|
|
if failures.len() > 0 {
|
2022-05-25 22:01:55 -04:00
|
|
|
eprintln!("\n{} command(s) did not execute successfully:\n", failures.len());
|
2017-09-18 21:21:24 +02:00
|
|
|
for failure in failures.iter() {
|
2022-05-25 22:01:55 -04:00
|
|
|
eprintln!(" - {}\n", failure);
|
2017-09-18 21:21:24 +02:00
|
|
|
}
|
2023-05-30 10:53:46 +03:00
|
|
|
detail_exit_macro!(1);
|
2017-09-18 21:21:24 +02:00
|
|
|
}
|
2022-02-06 23:03:55 +01:00
|
|
|
|
|
|
|
|
#[cfg(feature = "build-metrics")]
|
|
|
|
|
self.metrics.persist(self);
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Clear out `dir` if `input` is newer.
|
|
|
|
|
///
|
|
|
|
|
/// After this executes, it will also ensure that `dir` exists.
|
2017-10-16 11:40:47 -06:00
|
|
|
fn clear_if_dirty(&self, dir: &Path, input: &Path) -> bool {
|
2016-07-05 21:58:20 -07:00
|
|
|
let stamp = dir.join(".stamp");
|
2017-10-16 11:40:47 -06:00
|
|
|
let mut cleared = false;
|
2016-07-05 21:58:20 -07:00
|
|
|
if mtime(&stamp) < mtime(input) {
|
|
|
|
|
self.verbose(&format!("Dirty - {}", dir.display()));
|
|
|
|
|
let _ = fs::remove_dir_all(dir);
|
2017-10-16 11:40:47 -06:00
|
|
|
cleared = true;
|
2016-09-12 21:46:35 -07:00
|
|
|
} else if stamp.exists() {
|
2017-10-16 11:40:47 -06:00
|
|
|
return cleared;
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
t!(fs::create_dir_all(dir));
|
|
|
|
|
t!(File::create(stamp));
|
2017-10-16 11:40:47 -06:00
|
|
|
cleared
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2022-10-30 17:29:51 -05:00
|
|
|
fn rust_info(&self) -> &GitInfo {
|
|
|
|
|
&self.config.rust_info
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Gets the space-separated set of activated features for the standard
|
2016-07-05 21:58:20 -07:00
|
|
|
/// library.
|
2020-10-25 20:13:14 +08:00
|
|
|
fn std_features(&self, target: TargetSelection) -> String {
|
2022-12-22 10:34:27 -06:00
|
|
|
let mut features = " panic-unwind".to_string();
|
2016-12-29 23:28:11 -05:00
|
|
|
|
2022-02-02 22:48:09 +00:00
|
|
|
match self.config.llvm_libunwind(target) {
|
2020-10-08 15:05:31 +02:00
|
|
|
LlvmLibunwind::InTree => features.push_str(" llvm-libunwind"),
|
|
|
|
|
LlvmLibunwind::System => features.push_str(" system-llvm-libunwind"),
|
|
|
|
|
LlvmLibunwind::No => {}
|
2019-03-10 19:27:59 -07:00
|
|
|
}
|
2016-07-26 15:21:25 -05:00
|
|
|
if self.config.backtrace {
|
|
|
|
|
features.push_str(" backtrace");
|
|
|
|
|
}
|
2020-10-25 20:13:14 +08:00
|
|
|
if self.config.profiler_enabled(target) {
|
2017-02-13 09:57:50 +00:00
|
|
|
features.push_str(" profiler");
|
|
|
|
|
}
|
2017-06-27 09:51:26 -06:00
|
|
|
features
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Gets the space-separated set of activated features for the compiler.
|
2022-01-24 14:16:36 +01:00
|
|
|
fn rustc_features(&self, kind: Kind) -> String {
|
2022-02-11 23:25:14 +03:00
|
|
|
let mut features = vec![];
|
2018-10-20 19:15:06 -07:00
|
|
|
if self.config.jemalloc {
|
2022-02-11 23:25:14 +03:00
|
|
|
features.push("jemalloc");
|
2018-10-20 19:15:06 -07:00
|
|
|
}
|
2022-01-24 14:16:36 +01:00
|
|
|
if self.config.llvm_enabled() || kind == Kind::Check {
|
2022-02-11 23:25:14 +03:00
|
|
|
features.push("llvm");
|
|
|
|
|
}
|
|
|
|
|
// keep in sync with `bootstrap/compile.rs:rustc_cargo_env`
|
|
|
|
|
if self.config.rustc_parallel {
|
|
|
|
|
features.push("rustc_use_parallel_compiler");
|
2019-10-22 08:51:35 -07:00
|
|
|
}
|
2020-09-10 14:58:45 -07:00
|
|
|
|
|
|
|
|
// If debug logging is on, then we want the default for tracing:
|
|
|
|
|
// https://github.com/tokio-rs/tracing/blob/3dd5c03d907afdf2c39444a29931833335171554/tracing/src/level_filters.rs#L26
|
|
|
|
|
// which is everything (including debug/trace/etc.)
|
|
|
|
|
// if its unset, if debug_assertions is on, then debug_logging will also be on
|
|
|
|
|
// as well as tracing *ignoring* this feature when debug_assertions is on
|
|
|
|
|
if !self.config.rust_debug_logging {
|
2022-02-11 23:25:14 +03:00
|
|
|
features.push("max_level_info");
|
2020-09-10 14:58:45 -07:00
|
|
|
}
|
|
|
|
|
|
2022-02-11 23:25:14 +03:00
|
|
|
features.join(" ")
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Component directory that Cargo will produce output into (e.g.
|
|
|
|
|
/// release/debug)
|
|
|
|
|
fn cargo_dir(&self) -> &'static str {
|
2023-06-18 14:38:58 +08:00
|
|
|
if self.config.rust_optimize.is_release() { "release" } else { "debug" }
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2017-10-16 11:40:47 -06:00
|
|
|
fn tools_dir(&self, compiler: Compiler) -> PathBuf {
|
2020-07-17 10:08:04 -04:00
|
|
|
let out = self
|
|
|
|
|
.out
|
|
|
|
|
.join(&*compiler.host.triple)
|
|
|
|
|
.join(format!("stage{}-tools-bin", compiler.stage));
|
2017-10-16 11:40:47 -06:00
|
|
|
t!(fs::create_dir_all(&out));
|
|
|
|
|
out
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
/// Returns the root directory for all output generated in a particular
|
|
|
|
|
/// stage when running with a particular host compiler.
|
|
|
|
|
///
|
|
|
|
|
/// The mode indicates what the root directory is for.
|
2017-07-05 10:46:41 -06:00
|
|
|
fn stage_out(&self, compiler: Compiler, mode: Mode) -> PathBuf {
|
2016-07-05 21:58:20 -07:00
|
|
|
let suffix = match mode {
|
2018-05-19 23:04:41 +03:00
|
|
|
Mode::Std => "-std",
|
|
|
|
|
Mode::Rustc => "-rustc",
|
2020-10-15 14:23:43 +02:00
|
|
|
Mode::Codegen => "-codegen",
|
2018-06-29 14:35:10 -07:00
|
|
|
Mode::ToolBootstrap => "-bootstrap-tools",
|
bootstrap: Merge the libtest build step with libstd
Since its inception rustbuild has always worked in three stages: one for
libstd, one for libtest, and one for rustc. These three stages were
architected around crates.io dependencies, where rustc wants to depend
on crates.io crates but said crates don't explicitly depend on libstd,
requiring a sysroot assembly step in the middle. This same logic was
applied for libtest where libtest wants to depend on crates.io crates
(`getopts`) but `getopts` didn't say that it depended on std, so it
needed `std` built ahead of time.
Lots of time has passed since the inception of rustbuild, however,
and we've since gotten to the point where even `std` itself is depending
on crates.io crates (albeit with some wonky configuration). This
commit applies the same logic to the two dependencies that the `test`
crate pulls in from crates.io, `getopts` and `unicode-width`. Over the
many years since rustbuild's inception `unicode-width` was the only
dependency picked up by the `test` crate, so the extra configuration
necessary to get crates building in this crate graph is unlikely to be
too much of a burden on developers.
After this patch it means that there are now only two build phasese of
rustbuild, one for libstd and one for rustc. The libtest/libproc_macro
build phase is all lumped into one now with `std`.
This was originally motivated by rust-lang/cargo#7216 where Cargo was
having to deal with synthesizing dependency edges but this commit makes
them explicit in this repository.
2019-08-16 08:29:08 -07:00
|
|
|
Mode::ToolStd | Mode::ToolRustc => "-tools",
|
2016-07-05 21:58:20 -07:00
|
|
|
};
|
2020-07-17 10:08:04 -04:00
|
|
|
self.out.join(&*compiler.host.triple).join(format!("stage{}{}", compiler.stage, suffix))
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns the root output directory for all Cargo output in a given stage,
|
2017-08-15 21:45:21 +02:00
|
|
|
/// running a particular compiler, whether or not we're building the
|
2016-07-05 21:58:20 -07:00
|
|
|
/// standard library, and targeting the specified architecture.
|
2020-07-17 10:08:04 -04:00
|
|
|
fn cargo_out(&self, compiler: Compiler, mode: Mode, target: TargetSelection) -> PathBuf {
|
|
|
|
|
self.stage_out(compiler, mode).join(&*target.triple).join(self.cargo_dir())
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2023-04-12 21:17:08 -05:00
|
|
|
/// Directory where the extracted `rustc-dev` component is stored.
|
|
|
|
|
fn ci_rustc_dir(&self, target: TargetSelection) -> PathBuf {
|
|
|
|
|
self.out.join(&*target.triple).join("ci-rustc")
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
/// Root output directory for LLVM compiled for `target`
|
|
|
|
|
///
|
|
|
|
|
/// Note that if LLVM is configured externally then the directory returned
|
|
|
|
|
/// will likely be empty.
|
2020-07-17 10:08:04 -04:00
|
|
|
fn llvm_out(&self, target: TargetSelection) -> PathBuf {
|
|
|
|
|
self.out.join(&*target.triple).join("llvm")
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2020-07-17 10:08:04 -04:00
|
|
|
fn lld_out(&self, target: TargetSelection) -> PathBuf {
|
|
|
|
|
self.out.join(&*target.triple).join("lld")
|
rust: Import LLD for linking wasm objects
This commit imports the LLD project from LLVM to serve as the default linker for
the `wasm32-unknown-unknown` target. The `binaryen` submoule is consequently
removed along with "binaryen linker" support in rustc.
Moving to LLD brings with it a number of benefits for wasm code:
* LLD is itself an actual linker, so there's no need to compile all wasm code
with LTO any more. As a result builds should be *much* speedier as LTO is no
longer forcibly enabled for all builds of the wasm target.
* LLD is quickly becoming an "official solution" for linking wasm code together.
This, I believe at least, is intended to be the main supported linker for
native code and wasm moving forward. Picking up support early on should help
ensure that we can help LLD identify bugs and otherwise prove that it works
great for all our use cases!
* Improvements to the wasm toolchain are currently primarily focused around LLVM
and LLD (from what I can tell at least), so it's in general much better to be
on this bandwagon for bugfixes and new features.
* Historical "hacks" like `wasm-gc` will soon no longer be necessary, LLD
will [natively implement][gc] `--gc-sections` (better than `wasm-gc`!) which
means a postprocessor is no longer needed to show off Rust's "small wasm
binary size".
LLD is added in a pretty standard way to rustc right now. A new rustbuild target
was defined for building LLD, and this is executed when a compiler's sysroot is
being assembled. LLD is compiled against the LLVM that we've got in tree, which
means we're currently on the `release_60` branch, but this may get upgraded in
the near future!
LLD is placed into rustc's sysroot in a `bin` directory. This is similar to
where `gcc.exe` can be found on Windows. This directory is automatically added
to `PATH` whenever rustc executes the linker, allowing us to define a `WasmLd`
linker which implements the interface that `wasm-ld`, LLD's frontend, expects.
Like Emscripten the LLD target is currently only enabled for Tier 1 platforms,
notably OSX/Windows/Linux, and will need to be installed manually for compiling
to wasm on other platforms. LLD is by default turned off in rustbuild, and
requires a `config.toml` option to be enabled to turn it on.
Finally the unstable `#![wasm_import_memory]` attribute was also removed as LLD
has a native option for controlling this.
[gc]: https://reviews.llvm.org/D42511
2017-08-26 18:30:12 -07:00
|
|
|
}
|
|
|
|
|
|
2016-10-21 13:18:09 -07:00
|
|
|
/// Output directory for all documentation for a target
|
2020-07-17 10:08:04 -04:00
|
|
|
fn doc_out(&self, target: TargetSelection) -> PathBuf {
|
|
|
|
|
self.out.join(&*target.triple).join("doc")
|
2016-10-21 13:18:09 -07:00
|
|
|
}
|
|
|
|
|
|
2022-09-14 13:49:05 +02:00
|
|
|
/// Output directory for all JSON-formatted documentation for a target
|
|
|
|
|
fn json_doc_out(&self, target: TargetSelection) -> PathBuf {
|
|
|
|
|
self.out.join(&*target.triple).join("json-doc")
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-21 14:21:04 +01:00
|
|
|
fn test_out(&self, target: TargetSelection) -> PathBuf {
|
|
|
|
|
self.out.join(&*target.triple).join("test")
|
|
|
|
|
}
|
|
|
|
|
|
2018-03-20 02:06:38 +00:00
|
|
|
/// Output directory for all documentation for a target
|
2020-07-17 10:08:04 -04:00
|
|
|
fn compiler_doc_out(&self, target: TargetSelection) -> PathBuf {
|
|
|
|
|
self.out.join(&*target.triple).join("compiler-doc")
|
2018-03-20 02:06:38 +00:00
|
|
|
}
|
|
|
|
|
|
2017-06-12 21:35:47 +02:00
|
|
|
/// Output directory for some generated md crate documentation for a target (temporary)
|
2020-07-17 10:08:04 -04:00
|
|
|
fn md_doc_out(&self, target: TargetSelection) -> Interned<PathBuf> {
|
|
|
|
|
INTERNER.intern_path(self.out.join(&*target.triple).join("md-doc"))
|
2017-06-12 21:35:47 +02:00
|
|
|
}
|
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Returns `true` if no custom `llvm-config` is set for the specified target.
|
2016-08-06 15:54:28 +10:00
|
|
|
///
|
|
|
|
|
/// If no custom `llvm-config` was specified then Rust's llvm will be used.
|
2020-07-17 10:08:04 -04:00
|
|
|
fn is_rust_llvm(&self, target: TargetSelection) -> bool {
|
2017-07-13 18:48:44 -06:00
|
|
|
match self.config.target_config.get(&target) {
|
2022-08-26 19:24:41 -07:00
|
|
|
Some(Target { llvm_has_rust_patches: Some(patched), .. }) => *patched,
|
|
|
|
|
Some(Target { llvm_config, .. }) => {
|
|
|
|
|
// If the user set llvm-config we assume Rust is not patched,
|
|
|
|
|
// but first check to see if it was configured by llvm-from-ci.
|
|
|
|
|
(self.config.llvm_from_ci && target == self.config.build) || llvm_config.is_none()
|
|
|
|
|
}
|
2016-08-06 15:54:28 +10:00
|
|
|
None => true,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
/// Returns the path to `FileCheck` binary for the specified target
|
2020-07-17 10:08:04 -04:00
|
|
|
fn llvm_filecheck(&self, target: TargetSelection) -> PathBuf {
|
2017-07-13 18:48:44 -06:00
|
|
|
let target_config = self.config.target_config.get(&target);
|
2018-09-25 09:13:02 -06:00
|
|
|
if let Some(s) = target_config.and_then(|c| c.llvm_filecheck.as_ref()) {
|
|
|
|
|
s.to_path_buf()
|
|
|
|
|
} else if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
|
2016-12-20 19:48:14 +09:00
|
|
|
let llvm_bindir = output(Command::new(s).arg("--bindir"));
|
2020-07-17 10:08:04 -04:00
|
|
|
let filecheck = Path::new(llvm_bindir.trim()).join(exe("FileCheck", target));
|
2018-09-25 09:13:02 -06:00
|
|
|
if filecheck.exists() {
|
|
|
|
|
filecheck
|
|
|
|
|
} else {
|
|
|
|
|
// On Fedora the system LLVM installs FileCheck in the
|
|
|
|
|
// llvm subdirectory of the libdir.
|
|
|
|
|
let llvm_libdir = output(Command::new(s).arg("--libdir"));
|
|
|
|
|
let lib_filecheck =
|
2020-07-17 10:08:04 -04:00
|
|
|
Path::new(llvm_libdir.trim()).join("llvm").join(exe("FileCheck", target));
|
2018-09-25 09:13:02 -06:00
|
|
|
if lib_filecheck.exists() {
|
|
|
|
|
lib_filecheck
|
|
|
|
|
} else {
|
|
|
|
|
// Return the most normal file name, even though
|
|
|
|
|
// it doesn't exist, so that any error message
|
|
|
|
|
// refers to that.
|
|
|
|
|
filecheck
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-07-05 21:58:20 -07:00
|
|
|
} else {
|
2022-04-11 21:31:42 -04:00
|
|
|
let base = self.llvm_out(target).join("build");
|
|
|
|
|
let base = if !self.ninja() && target.contains("msvc") {
|
2018-05-22 03:40:01 +02:00
|
|
|
if self.config.llvm_optimize {
|
|
|
|
|
if self.config.llvm_release_debuginfo {
|
|
|
|
|
base.join("RelWithDebInfo")
|
|
|
|
|
} else {
|
|
|
|
|
base.join("Release")
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
base.join("Debug")
|
|
|
|
|
}
|
2016-07-05 21:58:20 -07:00
|
|
|
} else {
|
2018-05-22 03:40:01 +02:00
|
|
|
base
|
|
|
|
|
};
|
2020-07-17 10:08:04 -04:00
|
|
|
base.join("bin").join(exe("FileCheck", target))
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-01-27 01:57:30 +03:00
|
|
|
/// Directory for libraries built from C/C++ code and shared between stages.
|
2020-07-17 10:08:04 -04:00
|
|
|
fn native_dir(&self, target: TargetSelection) -> PathBuf {
|
|
|
|
|
self.out.join(&*target.triple).join("native")
|
2017-01-27 01:57:30 +03:00
|
|
|
}
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
/// Root output directory for rust_test_helpers library compiled for
|
|
|
|
|
/// `target`
|
2020-07-17 10:08:04 -04:00
|
|
|
fn test_helpers_out(&self, target: TargetSelection) -> PathBuf {
|
2017-01-27 01:57:30 +03:00
|
|
|
self.native_dir(target).join("rust-test-helpers")
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2016-11-16 12:31:19 -08:00
|
|
|
/// Adds the `RUST_TEST_THREADS` env var if necessary
|
|
|
|
|
fn add_rust_test_threads(&self, cmd: &mut Command) {
|
|
|
|
|
if env::var_os("RUST_TEST_THREADS").is_none() {
|
|
|
|
|
cmd.env("RUST_TEST_THREADS", self.jobs().to_string());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
/// Returns the libdir of the snapshot compiler.
|
|
|
|
|
fn rustc_snapshot_libdir(&self) -> PathBuf {
|
2020-07-17 10:08:04 -04:00
|
|
|
self.rustc_snapshot_sysroot().join(libdir(self.config.build))
|
2018-06-29 14:35:10 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns the sysroot of the snapshot compiler.
|
|
|
|
|
fn rustc_snapshot_sysroot(&self) -> &Path {
|
2022-03-10 08:47:53 -06:00
|
|
|
static SYSROOT_CACHE: OnceCell<PathBuf> = once_cell::sync::OnceCell::new();
|
|
|
|
|
SYSROOT_CACHE.get_or_init(|| {
|
|
|
|
|
let mut rustc = Command::new(&self.initial_rustc);
|
|
|
|
|
rustc.args(&["--print", "sysroot"]);
|
|
|
|
|
output(&mut rustc).trim().into()
|
|
|
|
|
})
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Runs a command, printing out nice contextual information if it fails.
|
|
|
|
|
fn run(&self, cmd: &mut Command) {
|
2022-11-06 16:59:43 -06:00
|
|
|
if self.config.dry_run() {
|
2018-03-27 12:44:33 +02:00
|
|
|
return;
|
|
|
|
|
}
|
2017-12-07 05:06:48 +08:00
|
|
|
self.verbose(&format!("running: {:?}", cmd));
|
2022-01-30 17:37:11 -05:00
|
|
|
run(cmd, self.is_verbose())
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2017-02-15 15:57:06 -08:00
|
|
|
/// Runs a command, printing out nice contextual information if it fails.
|
|
|
|
|
fn run_quiet(&self, cmd: &mut Command) {
|
2022-11-06 16:59:43 -06:00
|
|
|
if self.config.dry_run() {
|
2018-03-27 12:44:33 +02:00
|
|
|
return;
|
|
|
|
|
}
|
2017-02-15 15:57:06 -08:00
|
|
|
self.verbose(&format!("running: {:?}", cmd));
|
2017-12-07 05:06:48 +08:00
|
|
|
run_suppressed(cmd)
|
2017-02-15 15:57:06 -08:00
|
|
|
}
|
|
|
|
|
|
2017-06-02 09:27:44 -07:00
|
|
|
/// Runs a command, printing out nice contextual information if it fails.
|
|
|
|
|
/// Exits if the command failed to execute at all, otherwise returns its
|
|
|
|
|
/// `status.success()`.
|
|
|
|
|
fn try_run_quiet(&self, cmd: &mut Command) -> bool {
|
2022-11-06 16:59:43 -06:00
|
|
|
if self.config.dry_run() {
|
2018-03-27 12:44:33 +02:00
|
|
|
return true;
|
|
|
|
|
}
|
2017-06-02 09:27:44 -07:00
|
|
|
self.verbose(&format!("running: {:?}", cmd));
|
2017-12-07 05:06:48 +08:00
|
|
|
try_run_suppressed(cmd)
|
2017-06-02 09:27:44 -07:00
|
|
|
}
|
|
|
|
|
|
2019-03-20 12:50:18 +03:00
|
|
|
pub fn is_verbose_than(&self, level: usize) -> bool {
|
|
|
|
|
self.verbosity > level
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Prints a message if this build is configured in more verbose mode than `level`.
|
|
|
|
|
fn verbose_than(&self, level: usize, msg: &str) {
|
|
|
|
|
if self.is_verbose_than(level) {
|
|
|
|
|
println!("{}", msg);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-03-28 17:25:09 +02:00
|
|
|
fn info(&self, msg: &str) {
|
2022-11-06 17:00:58 -06:00
|
|
|
match self.config.dry_run {
|
|
|
|
|
DryRun::SelfCheck => return,
|
|
|
|
|
DryRun::Disabled | DryRun::UserSelected => {
|
|
|
|
|
println!("{}", msg);
|
|
|
|
|
}
|
2018-03-28 17:25:09 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-04-21 23:04:32 +00:00
|
|
|
fn msg_check(
|
|
|
|
|
&self,
|
|
|
|
|
what: impl Display,
|
|
|
|
|
target: impl Into<Option<TargetSelection>>,
|
|
|
|
|
) -> Option<gha::Group> {
|
|
|
|
|
self.msg(Kind::Check, self.config.stage, what, self.config.build, target)
|
|
|
|
|
}
|
|
|
|
|
|
2023-06-25 09:35:59 -05:00
|
|
|
fn msg_doc(
|
|
|
|
|
&self,
|
|
|
|
|
compiler: Compiler,
|
|
|
|
|
what: impl Display,
|
|
|
|
|
target: impl Into<Option<TargetSelection>> + Copy,
|
|
|
|
|
) -> Option<gha::Group> {
|
|
|
|
|
self.msg(Kind::Doc, compiler.stage, what, compiler.host, target.into())
|
|
|
|
|
}
|
|
|
|
|
|
2023-04-21 23:04:32 +00:00
|
|
|
fn msg_build(
|
|
|
|
|
&self,
|
|
|
|
|
compiler: Compiler,
|
|
|
|
|
what: impl Display,
|
|
|
|
|
target: impl Into<Option<TargetSelection>>,
|
|
|
|
|
) -> Option<gha::Group> {
|
|
|
|
|
self.msg(Kind::Build, compiler.stage, what, compiler.host, target)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Return a `Group` guard for a [`Step`] that is built for each `--stage`.
|
2023-05-25 21:43:13 -05:00
|
|
|
///
|
|
|
|
|
/// [`Step`]: crate::builder::Step
|
2023-04-21 23:04:32 +00:00
|
|
|
fn msg(
|
|
|
|
|
&self,
|
|
|
|
|
action: impl Into<Kind>,
|
|
|
|
|
stage: u32,
|
|
|
|
|
what: impl Display,
|
|
|
|
|
host: impl Into<Option<TargetSelection>>,
|
|
|
|
|
target: impl Into<Option<TargetSelection>>,
|
|
|
|
|
) -> Option<gha::Group> {
|
2023-05-25 13:33:43 -05:00
|
|
|
let action = action.into().description();
|
|
|
|
|
let msg = |fmt| format!("{action} stage{stage} {what}{fmt}");
|
2023-04-21 23:04:32 +00:00
|
|
|
let msg = if let Some(target) = target.into() {
|
|
|
|
|
let host = host.into().unwrap();
|
|
|
|
|
if host == target {
|
|
|
|
|
msg(format_args!(" ({target})"))
|
|
|
|
|
} else {
|
|
|
|
|
msg(format_args!(" ({host} -> {target})"))
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
msg(format_args!(""))
|
|
|
|
|
};
|
|
|
|
|
self.group(&msg)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Return a `Group` guard for a [`Step`] that is only built once and isn't affected by `--stage`.
|
2023-05-25 21:43:13 -05:00
|
|
|
///
|
|
|
|
|
/// [`Step`]: crate::builder::Step
|
2023-04-21 23:04:32 +00:00
|
|
|
fn msg_unstaged(
|
|
|
|
|
&self,
|
|
|
|
|
action: impl Into<Kind>,
|
|
|
|
|
what: impl Display,
|
|
|
|
|
target: TargetSelection,
|
|
|
|
|
) -> Option<gha::Group> {
|
2023-05-31 16:05:12 -05:00
|
|
|
let action = action.into().description();
|
|
|
|
|
let msg = format!("{action} {what} for {target}");
|
2023-04-21 23:04:32 +00:00
|
|
|
self.group(&msg)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn msg_sysroot_tool(
|
|
|
|
|
&self,
|
|
|
|
|
action: impl Into<Kind>,
|
|
|
|
|
stage: u32,
|
|
|
|
|
what: impl Display,
|
|
|
|
|
host: TargetSelection,
|
|
|
|
|
target: TargetSelection,
|
|
|
|
|
) -> Option<gha::Group> {
|
2023-05-31 16:05:12 -05:00
|
|
|
let action = action.into().description();
|
|
|
|
|
let msg = |fmt| format!("{action} {what} {fmt}");
|
2023-04-21 23:04:32 +00:00
|
|
|
let msg = if host == target {
|
|
|
|
|
msg(format_args!("(stage{stage} -> stage{}, {target})", stage + 1))
|
|
|
|
|
} else {
|
|
|
|
|
msg(format_args!("(stage{stage}:{host} -> stage{}:{target})", stage + 1))
|
|
|
|
|
};
|
|
|
|
|
self.group(&msg)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn group(&self, msg: &str) -> Option<gha::Group> {
|
|
|
|
|
match self.config.dry_run {
|
|
|
|
|
DryRun::SelfCheck => None,
|
|
|
|
|
DryRun::Disabled | DryRun::UserSelected => Some(gha::group(&msg)),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
/// Returns the number of parallel jobs that have been configured for this
|
|
|
|
|
/// build.
|
|
|
|
|
fn jobs(&self) -> u32 {
|
2022-08-15 16:24:07 +02:00
|
|
|
self.config.jobs.unwrap_or_else(|| {
|
|
|
|
|
std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32
|
|
|
|
|
})
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2020-04-01 04:00:52 +03:00
|
|
|
fn debuginfo_map_to(&self, which: GitRepo) -> Option<String> {
|
2018-08-30 10:25:07 -07:00
|
|
|
if !self.config.rust_remap_debuginfo {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-01 04:00:52 +03:00
|
|
|
match which {
|
2018-08-30 10:25:07 -07:00
|
|
|
GitRepo::Rustc => {
|
2020-09-18 14:58:22 +02:00
|
|
|
let sha = self.rust_sha().unwrap_or(&self.version);
|
2020-04-01 04:00:52 +03:00
|
|
|
Some(format!("/rustc/{}", sha))
|
2018-08-30 10:25:07 -07:00
|
|
|
}
|
2020-04-01 04:00:52 +03:00
|
|
|
GitRepo::Llvm => Some(String::from("/rustc/llvm")),
|
|
|
|
|
}
|
2018-08-30 10:25:07 -07:00
|
|
|
}
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
/// Returns the path to the C compiler for the target specified.
|
2023-06-06 12:37:54 +02:00
|
|
|
fn cc(&self, target: TargetSelection) -> PathBuf {
|
2023-06-07 14:48:45 +02:00
|
|
|
if self.config.dry_run() {
|
|
|
|
|
return PathBuf::new();
|
|
|
|
|
}
|
2023-06-06 12:37:54 +02:00
|
|
|
self.cc.borrow()[&target].path().into()
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns a list of flags to pass to the C compiler for the target
|
|
|
|
|
/// specified.
|
2022-02-28 16:40:08 -08:00
|
|
|
fn cflags(&self, target: TargetSelection, which: GitRepo, c: CLang) -> Vec<String> {
|
2023-06-07 14:48:45 +02:00
|
|
|
if self.config.dry_run() {
|
|
|
|
|
return Vec::new();
|
|
|
|
|
}
|
2022-02-28 16:40:08 -08:00
|
|
|
let base = match c {
|
2023-06-06 12:37:54 +02:00
|
|
|
CLang::C => self.cc.borrow()[&target].clone(),
|
|
|
|
|
CLang::Cxx => self.cxx.borrow()[&target].clone(),
|
2022-02-28 16:40:08 -08:00
|
|
|
};
|
|
|
|
|
|
2016-07-05 21:58:20 -07:00
|
|
|
// Filter out -O and /O (the optimization flags) that we picked up from
|
2017-09-22 21:34:27 -07:00
|
|
|
// cc-rs because the build scripts will determine that for themselves.
|
2022-02-28 16:40:08 -08:00
|
|
|
let mut base = base
|
2018-10-26 16:23:02 +02:00
|
|
|
.args()
|
|
|
|
|
.iter()
|
2016-07-05 21:58:20 -07:00
|
|
|
.map(|s| s.to_string_lossy().into_owned())
|
|
|
|
|
.filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
|
2018-10-26 16:23:02 +02:00
|
|
|
.collect::<Vec<String>>();
|
2016-07-05 21:58:20 -07:00
|
|
|
|
2017-03-12 14:13:35 -04:00
|
|
|
// If we're compiling on macOS then we add a few unconditional flags
|
2016-07-05 21:58:20 -07:00
|
|
|
// indicating that we want libc++ (more filled out than libstdc++) and
|
|
|
|
|
// we want to compile for 10.7. This way we can ensure that
|
2018-10-20 19:04:42 -07:00
|
|
|
// LLVM/etc are all properly compiled.
|
2016-07-05 21:58:20 -07:00
|
|
|
if target.contains("apple-darwin") {
|
|
|
|
|
base.push("-stdlib=libc++".into());
|
|
|
|
|
}
|
2017-04-17 10:24:33 +02:00
|
|
|
|
|
|
|
|
// Work around an apparently bad MinGW / GCC optimization,
|
2021-06-23 16:26:46 -04:00
|
|
|
// See: https://lists.llvm.org/pipermail/cfe-dev/2016-December/051980.html
|
2017-04-17 10:24:33 +02:00
|
|
|
// See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78936
|
2020-07-17 10:08:04 -04:00
|
|
|
if &*target.triple == "i686-pc-windows-gnu" {
|
2017-04-17 10:24:33 +02:00
|
|
|
base.push("-fno-omit-frame-pointer".into());
|
|
|
|
|
}
|
2018-08-30 10:25:07 -07:00
|
|
|
|
2020-04-01 04:00:52 +03:00
|
|
|
if let Some(map_to) = self.debuginfo_map_to(which) {
|
|
|
|
|
let map = format!("{}={}", self.src.display(), map_to);
|
2018-08-30 10:25:07 -07:00
|
|
|
let cc = self.cc(target);
|
|
|
|
|
if cc.ends_with("clang") || cc.ends_with("gcc") {
|
2018-10-26 16:23:02 +02:00
|
|
|
base.push(format!("-fdebug-prefix-map={}", map));
|
2018-08-30 10:25:07 -07:00
|
|
|
} else if cc.ends_with("clang-cl.exe") {
|
|
|
|
|
base.push("-Xclang".into());
|
2018-10-26 16:23:02 +02:00
|
|
|
base.push(format!("-fdebug-prefix-map={}", map));
|
2018-08-30 10:25:07 -07:00
|
|
|
}
|
|
|
|
|
}
|
2017-06-27 09:51:26 -06:00
|
|
|
base
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns the path to the `ar` archive utility for the target specified.
|
2023-06-06 12:37:54 +02:00
|
|
|
fn ar(&self, target: TargetSelection) -> Option<PathBuf> {
|
2023-06-07 14:48:45 +02:00
|
|
|
if self.config.dry_run() {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
2023-06-06 12:37:54 +02:00
|
|
|
self.ar.borrow().get(&target).cloned()
|
2017-10-10 23:06:22 +03:00
|
|
|
}
|
|
|
|
|
|
2018-05-30 16:36:18 +02:00
|
|
|
/// Returns the path to the `ranlib` utility for the target specified.
|
2023-06-06 12:37:54 +02:00
|
|
|
fn ranlib(&self, target: TargetSelection) -> Option<PathBuf> {
|
2023-06-07 14:48:45 +02:00
|
|
|
if self.config.dry_run() {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
2023-06-06 12:37:54 +02:00
|
|
|
self.ranlib.borrow().get(&target).cloned()
|
2018-05-30 16:36:18 +02:00
|
|
|
}
|
|
|
|
|
|
2017-06-22 11:51:32 -07:00
|
|
|
/// Returns the path to the C++ compiler for the target specified.
|
2023-06-06 12:37:54 +02:00
|
|
|
fn cxx(&self, target: TargetSelection) -> Result<PathBuf, String> {
|
2023-06-07 14:48:45 +02:00
|
|
|
if self.config.dry_run() {
|
|
|
|
|
return Ok(PathBuf::new());
|
|
|
|
|
}
|
2023-06-06 12:37:54 +02:00
|
|
|
match self.cxx.borrow().get(&target) {
|
|
|
|
|
Some(p) => Ok(p.path().into()),
|
2017-06-22 11:51:32 -07:00
|
|
|
None => {
|
|
|
|
|
Err(format!("target `{}` is not configured as a host, only as a target", target))
|
2019-12-22 17:42:04 -05:00
|
|
|
}
|
2016-09-12 22:43:48 -07:00
|
|
|
}
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2018-02-10 12:22:57 +01:00
|
|
|
/// Returns the path to the linker for the given target if it needs to be overridden.
|
2023-06-06 12:37:54 +02:00
|
|
|
fn linker(&self, target: TargetSelection) -> Option<PathBuf> {
|
2023-06-07 14:48:45 +02:00
|
|
|
if self.config.dry_run() {
|
|
|
|
|
return Some(PathBuf::new());
|
|
|
|
|
}
|
2023-06-06 12:37:54 +02:00
|
|
|
if let Some(linker) = self.config.target_config.get(&target).and_then(|c| c.linker.clone())
|
2017-10-16 03:20:01 +03:00
|
|
|
{
|
|
|
|
|
Some(linker)
|
2020-08-07 16:55:15 -07:00
|
|
|
} else if target.contains("vxworks") {
|
|
|
|
|
// need to use CXX compiler as linker to resolve the exception functions
|
|
|
|
|
// that are only existed in CXX libraries
|
2023-06-06 12:37:54 +02:00
|
|
|
Some(self.cxx.borrow()[&target].path().into())
|
2017-10-15 21:39:16 +03:00
|
|
|
} else if target != self.config.build
|
2020-07-17 10:08:04 -04:00
|
|
|
&& util::use_host_linker(target)
|
2019-12-09 09:46:55 +01:00
|
|
|
&& !target.contains("msvc")
|
|
|
|
|
{
|
2017-10-15 21:39:16 +03:00
|
|
|
Some(self.cc(target))
|
2020-09-07 00:39:58 +03:00
|
|
|
} else if self.config.use_lld && !self.is_fuse_ld_lld(target) && self.build == target {
|
2023-06-06 12:37:54 +02:00
|
|
|
Some(self.initial_lld.clone())
|
2017-10-15 21:39:16 +03:00
|
|
|
} else {
|
|
|
|
|
None
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
2015-11-19 15:20:12 -08:00
|
|
|
}
|
2016-09-06 21:49:02 -05:00
|
|
|
|
2020-09-07 00:39:58 +03:00
|
|
|
// LLD is used through `-fuse-ld=lld` rather than directly.
|
|
|
|
|
// Only MSVC targets use LLD directly at the moment.
|
|
|
|
|
fn is_fuse_ld_lld(&self, target: TargetSelection) -> bool {
|
|
|
|
|
self.config.use_lld && !target.contains("msvc")
|
|
|
|
|
}
|
|
|
|
|
|
2021-06-07 21:17:11 +02:00
|
|
|
fn lld_flags(&self, target: TargetSelection) -> impl Iterator<Item = String> {
|
|
|
|
|
let mut options = [None, None];
|
|
|
|
|
|
|
|
|
|
if self.config.use_lld {
|
|
|
|
|
if self.is_fuse_ld_lld(target) {
|
|
|
|
|
options[0] = Some("-Clink-arg=-fuse-ld=lld".to_string());
|
|
|
|
|
}
|
|
|
|
|
|
2022-09-21 13:59:03 +03:00
|
|
|
let no_threads = util::lld_flag_no_threads(target.contains("windows"));
|
|
|
|
|
options[1] = Some(format!("-Clink-arg=-Wl,{}", no_threads));
|
2021-06-07 21:17:11 +02:00
|
|
|
}
|
|
|
|
|
|
2021-09-03 12:36:33 +02:00
|
|
|
IntoIterator::into_iter(options).flatten()
|
2021-06-07 21:17:11 +02:00
|
|
|
}
|
|
|
|
|
|
2017-08-22 16:24:29 -05:00
|
|
|
/// Returns if this target should statically link the C runtime, if specified
|
2020-07-17 10:08:04 -04:00
|
|
|
fn crt_static(&self, target: TargetSelection) -> Option<bool> {
|
2017-08-22 16:24:29 -05:00
|
|
|
if target.contains("pc-windows-msvc") {
|
|
|
|
|
Some(true)
|
|
|
|
|
} else {
|
|
|
|
|
self.config.target_config.get(&target).and_then(|t| t.crt_static)
|
|
|
|
|
}
|
2017-08-22 16:24:29 -05:00
|
|
|
}
|
|
|
|
|
|
2016-09-06 21:49:02 -05:00
|
|
|
/// Returns the "musl root" for this `target`, if defined
|
2020-07-17 10:08:04 -04:00
|
|
|
fn musl_root(&self, target: TargetSelection) -> Option<&Path> {
|
2017-07-13 18:48:44 -06:00
|
|
|
self.config
|
|
|
|
|
.target_config
|
|
|
|
|
.get(&target)
|
2016-10-13 12:01:59 -07:00
|
|
|
.and_then(|t| t.musl_root.as_ref())
|
2020-02-03 20:13:30 +01:00
|
|
|
.or_else(|| self.config.musl_root.as_ref())
|
2016-09-06 21:49:02 -05:00
|
|
|
.map(|p| &**p)
|
|
|
|
|
}
|
2016-11-14 08:04:39 -08:00
|
|
|
|
2020-06-17 00:00:00 +00:00
|
|
|
/// Returns the "musl libdir" for this `target`.
|
2020-07-17 10:08:04 -04:00
|
|
|
fn musl_libdir(&self, target: TargetSelection) -> Option<PathBuf> {
|
2020-06-17 00:00:00 +00:00
|
|
|
let t = self.config.target_config.get(&target)?;
|
|
|
|
|
if let libdir @ Some(_) = &t.musl_libdir {
|
|
|
|
|
return libdir.clone();
|
|
|
|
|
}
|
|
|
|
|
self.musl_root(target).map(|root| root.join("lib"))
|
|
|
|
|
}
|
|
|
|
|
|
Add a new wasm32-unknown-wasi target
This commit adds a new wasm32-based target distributed through rustup,
supported in the standard library, and implemented in the compiler. The
`wasm32-unknown-wasi` target is intended to be a WebAssembly target
which matches the [WASI proposal recently announced.][LINK]. In summary
the WASI target is an effort to define a standard set of syscalls for
WebAssembly modules, allowing WebAssembly modules to not only be
portable across architectures but also be portable across environments
implementing this standard set of system calls.
The wasi target in libstd is still somewhat bare bones. This PR does not
fill out the filesystem, networking, threads, etc. Instead it only
provides the most basic of integration with the wasi syscalls, enabling
features like:
* `Instant::now` and `SystemTime::now` work
* `env::args` is hooked up
* `env::vars` will look up environment variables
* `println!` will print to standard out
* `process::{exit, abort}` should be hooked up appropriately
None of these APIs can work natively on the `wasm32-unknown-unknown`
target, but with the assumption of the WASI set of syscalls we're able
to provide implementations of these syscalls that engines can implement.
Currently the primary engine implementing wasi is [wasmtime], but more
will surely emerge!
In terms of future development of libstd, I think this is something
we'll probably want to discuss. The purpose of the WASI target is to
provide a standardized set of syscalls, but it's *also* to provide a
standard C sysroot for compiling C/C++ programs. This means it's
intended that functions like `read` and `write` are implemented for this
target with a relatively standard definition and implementation. It's
unclear, therefore, how we want to expose file descriptors and how we'll
want to implement system primitives. For example should `std::fs::File`
have a libc-based file descriptor underneath it? The raw wasi file
descriptor? We'll see! Currently these details are all intentionally
hidden and things we can change over time.
A `WasiFd` sample struct was added to the standard library as part of
this commit, but it's not currently used. It shows how all the wasi
syscalls could be ergonomically bound in Rust, and they offer a possible
implementation of primitives like `std::fs::File` if we bind wasi file
descriptors exactly.
Apart from the standard library, there's also the matter of how this
target is integrated with respect to its C standard library. The
reference sysroot, for example, provides managment of standard unix file
descriptors and also standard APIs like `open` (as opposed to the
relative `openat` inspiration for the wasi ssycalls). Currently the
standard library relies on the C sysroot symbols for operations such as
environment management, process exit, and `read`/`write` of stdio fds.
We want these operations in Rust to be interoperable with C if they're
used in the same process. Put another way, if Rust and C are linked into
the same WebAssembly binary they should work together, but that requires
that the same C standard library is used.
We also, however, want the `wasm32-unknown-wasi` target to be
usable-by-default with the Rust compiler without requiring a separate
toolchain to get downloaded and configured. With that in mind, there's
two modes of operation for the `wasm32-unknown-wasi` target:
1. By default the C standard library is statically provided inside of
`liblibc.rlib` distributed as part of the sysroot. This means that
you can `rustc foo.wasm --target wasm32-unknown-unknown` and you're
good to go, a fully workable wasi binary pops out. This is
incompatible with linking in C code, however, which may be compiled
against a different sysroot than the Rust code was previously
compiled against. In this mode the default of `rust-lld` is used to
link binaries.
2. For linking with C code, the `-C target-feature=-crt-static` flag
needs to be passed. This takes inspiration from the musl target for
this flag, but the idea is that you're no longer using the provided
static C runtime, but rather one will be provided externally. This
flag is intended to also get coupled with an external `clang`
compiler configured with its own sysroot. Therefore you'll typically
use this flag with `-C linker=/path/to/clang-script-wrapper`. Using
this mode the Rust code will continue to reference standard C
symbols, but the definition will be pulled in by the linker configured.
Alright so that's all the current state of this PR. I suspect we'll
definitely want to discuss this before landing of course! This PR is
coupled with libc changes as well which I'll be posting shortly.
[LINK]:
[wasmtime]:
2019-02-13 10:02:22 -08:00
|
|
|
/// Returns the sysroot for the wasi target, if defined
|
2020-07-17 10:08:04 -04:00
|
|
|
fn wasi_root(&self, target: TargetSelection) -> Option<&Path> {
|
Add a new wasm32-unknown-wasi target
This commit adds a new wasm32-based target distributed through rustup,
supported in the standard library, and implemented in the compiler. The
`wasm32-unknown-wasi` target is intended to be a WebAssembly target
which matches the [WASI proposal recently announced.][LINK]. In summary
the WASI target is an effort to define a standard set of syscalls for
WebAssembly modules, allowing WebAssembly modules to not only be
portable across architectures but also be portable across environments
implementing this standard set of system calls.
The wasi target in libstd is still somewhat bare bones. This PR does not
fill out the filesystem, networking, threads, etc. Instead it only
provides the most basic of integration with the wasi syscalls, enabling
features like:
* `Instant::now` and `SystemTime::now` work
* `env::args` is hooked up
* `env::vars` will look up environment variables
* `println!` will print to standard out
* `process::{exit, abort}` should be hooked up appropriately
None of these APIs can work natively on the `wasm32-unknown-unknown`
target, but with the assumption of the WASI set of syscalls we're able
to provide implementations of these syscalls that engines can implement.
Currently the primary engine implementing wasi is [wasmtime], but more
will surely emerge!
In terms of future development of libstd, I think this is something
we'll probably want to discuss. The purpose of the WASI target is to
provide a standardized set of syscalls, but it's *also* to provide a
standard C sysroot for compiling C/C++ programs. This means it's
intended that functions like `read` and `write` are implemented for this
target with a relatively standard definition and implementation. It's
unclear, therefore, how we want to expose file descriptors and how we'll
want to implement system primitives. For example should `std::fs::File`
have a libc-based file descriptor underneath it? The raw wasi file
descriptor? We'll see! Currently these details are all intentionally
hidden and things we can change over time.
A `WasiFd` sample struct was added to the standard library as part of
this commit, but it's not currently used. It shows how all the wasi
syscalls could be ergonomically bound in Rust, and they offer a possible
implementation of primitives like `std::fs::File` if we bind wasi file
descriptors exactly.
Apart from the standard library, there's also the matter of how this
target is integrated with respect to its C standard library. The
reference sysroot, for example, provides managment of standard unix file
descriptors and also standard APIs like `open` (as opposed to the
relative `openat` inspiration for the wasi ssycalls). Currently the
standard library relies on the C sysroot symbols for operations such as
environment management, process exit, and `read`/`write` of stdio fds.
We want these operations in Rust to be interoperable with C if they're
used in the same process. Put another way, if Rust and C are linked into
the same WebAssembly binary they should work together, but that requires
that the same C standard library is used.
We also, however, want the `wasm32-unknown-wasi` target to be
usable-by-default with the Rust compiler without requiring a separate
toolchain to get downloaded and configured. With that in mind, there's
two modes of operation for the `wasm32-unknown-wasi` target:
1. By default the C standard library is statically provided inside of
`liblibc.rlib` distributed as part of the sysroot. This means that
you can `rustc foo.wasm --target wasm32-unknown-unknown` and you're
good to go, a fully workable wasi binary pops out. This is
incompatible with linking in C code, however, which may be compiled
against a different sysroot than the Rust code was previously
compiled against. In this mode the default of `rust-lld` is used to
link binaries.
2. For linking with C code, the `-C target-feature=-crt-static` flag
needs to be passed. This takes inspiration from the musl target for
this flag, but the idea is that you're no longer using the provided
static C runtime, but rather one will be provided externally. This
flag is intended to also get coupled with an external `clang`
compiler configured with its own sysroot. Therefore you'll typically
use this flag with `-C linker=/path/to/clang-script-wrapper`. Using
this mode the Rust code will continue to reference standard C
symbols, but the definition will be pulled in by the linker configured.
Alright so that's all the current state of this PR. I suspect we'll
definitely want to discuss this before landing of course! This PR is
coupled with libc changes as well which I'll be posting shortly.
[LINK]:
[wasmtime]:
2019-02-13 10:02:22 -08:00
|
|
|
self.config.target_config.get(&target).and_then(|t| t.wasi_root.as_ref()).map(|p| &**p)
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Returns `true` if this is a no-std `target`, if defined
|
2020-07-17 10:08:04 -04:00
|
|
|
fn no_std(&self, target: TargetSelection) -> Option<bool> {
|
2018-04-01 18:50:21 +02:00
|
|
|
self.config.target_config.get(&target).map(|t| t.no_std)
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Returns `true` if the target will be tested using the `remote-test-client`
|
travis: Parallelize tests on Android
Currently our slowest test suite on android, run-pass, takes over 5 times longer
than the x86_64 component (~400 -> ~2200s). Typically QEMU emulation does indeed
add overhead, but not 5x for this kind of workload. One of the slowest parts of
the Android process is that *compilation* happens serially. Tests themselves
need to run single-threaded on the emulator (due to how the test harness works)
and this forces the compiles themselves to be single threaded.
Now Travis gives us more than one core per machine, so it'd be much better if we
could take advantage of them! The emulator itself is still fundamentally
single-threaded, but we should see a nice speedup by sending binaries for it to
run much more quickly.
It turns out that we've already got all the tools to do this in-tree. The
qemu-test-{server,client} that are in use for the ARM Linux testing are a
perfect match for the Android emulator. This commit migrates the custom adb
management code in compiletest/rustbuild to the same qemu-test-{server,client}
implementation that ARM Linux uses.
This allows us to lift the parallelism restriction on the compiletest test
suites, namely run-pass. Consequently although we'll still basically run the
tests themselves in single threaded mode we'll be able to compile all of them in
parallel, keeping the pipeline much more full and using more cores for the work
at hand. Additionally the architecture here should be a bit speedier as it
should have less overhead than adb which is a whole new process on both the host
and the emulator!
Locally on an 8 core machine I've seen the run-pass test suite speed up from
taking nearly an hour to only taking 6 minutes. I don't think we'll see quite a
drastic speedup on Travis but I'm hoping this change can place the Android tests
well below 2 hours instead of just above 2 hours.
Because the client/server here are now repurposed for more than just QEMU,
they've been renamed to `remote-test-{server,client}`.
Note that this PR does not currently modify how debuginfo tests are executed on
Android. While parallelizable it wouldn't be quite as easy, so that's left to
another day. Thankfully that test suite is much smaller than the run-pass test
suite.
As a final fix I discovered that the ARM and Android test suites were actually
running all library unit tests (e.g. stdtest, coretest, etc) twice. I've
corrected that to only run tests once which should also give a nice boost in
overall cycle time here.
2017-04-26 08:52:19 -07:00
|
|
|
/// and `remote-test-server` binaries.
|
2020-07-17 10:08:04 -04:00
|
|
|
fn remote_tested(&self, target: TargetSelection) -> bool {
|
2017-07-03 11:41:58 +02:00
|
|
|
self.qemu_rootfs(target).is_some()
|
|
|
|
|
|| target.contains("android")
|
|
|
|
|
|| env::var_os("TEST_DEVICE_ADDR").is_some()
|
travis: Parallelize tests on Android
Currently our slowest test suite on android, run-pass, takes over 5 times longer
than the x86_64 component (~400 -> ~2200s). Typically QEMU emulation does indeed
add overhead, but not 5x for this kind of workload. One of the slowest parts of
the Android process is that *compilation* happens serially. Tests themselves
need to run single-threaded on the emulator (due to how the test harness works)
and this forces the compiles themselves to be single threaded.
Now Travis gives us more than one core per machine, so it'd be much better if we
could take advantage of them! The emulator itself is still fundamentally
single-threaded, but we should see a nice speedup by sending binaries for it to
run much more quickly.
It turns out that we've already got all the tools to do this in-tree. The
qemu-test-{server,client} that are in use for the ARM Linux testing are a
perfect match for the Android emulator. This commit migrates the custom adb
management code in compiletest/rustbuild to the same qemu-test-{server,client}
implementation that ARM Linux uses.
This allows us to lift the parallelism restriction on the compiletest test
suites, namely run-pass. Consequently although we'll still basically run the
tests themselves in single threaded mode we'll be able to compile all of them in
parallel, keeping the pipeline much more full and using more cores for the work
at hand. Additionally the architecture here should be a bit speedier as it
should have less overhead than adb which is a whole new process on both the host
and the emulator!
Locally on an 8 core machine I've seen the run-pass test suite speed up from
taking nearly an hour to only taking 6 minutes. I don't think we'll see quite a
drastic speedup on Travis but I'm hoping this change can place the Android tests
well below 2 hours instead of just above 2 hours.
Because the client/server here are now repurposed for more than just QEMU,
they've been renamed to `remote-test-{server,client}`.
Note that this PR does not currently modify how debuginfo tests are executed on
Android. While parallelizable it wouldn't be quite as easy, so that's left to
another day. Thankfully that test suite is much smaller than the run-pass test
suite.
As a final fix I discovered that the ARM and Android test suites were actually
running all library unit tests (e.g. stdtest, coretest, etc) twice. I've
corrected that to only run tests once which should also give a nice boost in
overall cycle time here.
2017-04-26 08:52:19 -07:00
|
|
|
}
|
|
|
|
|
|
2017-01-28 13:38:06 -08:00
|
|
|
/// Returns the root of the "rootfs" image that this target will be using,
|
|
|
|
|
/// if one was configured.
|
|
|
|
|
///
|
|
|
|
|
/// If `Some` is returned then that means that tests for this target are
|
|
|
|
|
/// emulated with QEMU and binaries will need to be shipped to the emulator.
|
2020-07-17 10:08:04 -04:00
|
|
|
fn qemu_rootfs(&self, target: TargetSelection) -> Option<&Path> {
|
2017-07-13 18:48:44 -06:00
|
|
|
self.config.target_config.get(&target).and_then(|t| t.qemu_rootfs.as_ref()).map(|p| &**p)
|
2017-01-28 13:38:06 -08:00
|
|
|
}
|
|
|
|
|
|
2016-11-14 08:04:39 -08:00
|
|
|
/// Path to the python interpreter to use
|
|
|
|
|
fn python(&self) -> &Path {
|
2022-03-29 12:45:54 -04:00
|
|
|
if self.config.build.ends_with("apple-darwin") {
|
|
|
|
|
// Force /usr/bin/python3 on macOS for LLDB tests because we're loading the
|
|
|
|
|
// LLDB plugin's compiled module which only works with the system python
|
|
|
|
|
// (namely not Homebrew-installed python)
|
|
|
|
|
Path::new("/usr/bin/python3")
|
|
|
|
|
} else {
|
|
|
|
|
self.config
|
|
|
|
|
.python
|
|
|
|
|
.as_ref()
|
|
|
|
|
.expect("python is required for running LLDB or rustdoc tests")
|
|
|
|
|
}
|
2016-11-14 08:04:39 -08:00
|
|
|
}
|
rustbuild: Compile rustc twice, not thrice
This commit switches the rustbuild build system to compiling the
compiler twice for a normal bootstrap rather than the historical three
times.
Rust is a bootstrapped language which means that a previous version of
the compiler is used to build the next version of the compiler. Over
time, however, we change many parts of compiler artifacts such as the
metadata format, symbol names, etc. These changes make artifacts from
one compiler incompatible from another compiler. Consequently if a
compiler wants to be able to use some artifacts then it itself must have
compiled the artifacts.
Historically the rustc build system has achieved this by compiling the
compiler three times:
* An older compiler (stage0) is downloaded to kick off the chain.
* This compiler now compiles a new compiler (stage1)
* The stage1 compiler then compiles another compiler (stage2)
* Finally, the stage2 compiler needs libraries to link against, so it
compiles all the libraries again.
This entire process amounts in compiling the compiler three times.
Additionally, this process always guarantees that the Rust source tree
can compile itself because the stage2 compiler (created by a freshly
created compiler) would successfully compile itself again. This
property, ensuring Rust can compile itself, is quite important!
In general, though, this third compilation is not required for general
purpose development on the compiler. The third compiler (stage2) can
reuse the libraries that were created during the second compile. In
other words, the second compilation can produce both a compiler and the
libraries that compiler will use. These artifacts *must* be compatible
due to the way plugins work today anyway, and they were created by the
same source code so they *should* be compatible as well.
So given all that, this commit switches the default build process to
only compile the compiler three times, avoiding this third compilation
by copying artifacts from the previous one. Along the way a new entry in
the Travis matrix was also added to ensure that our full bootstrap can
succeed. This entry does not run tests, though, as it should not be
necessary.
To restore the old behavior of a full bootstrap (three compiles) you can
either pass:
./configure --enable-full-bootstrap
or if you're using config.toml:
[build]
full-bootstrap = true
Overall this will hopefully be an easy 33% win in build times of the
compiler. If we do 33% less work we should be 33% faster! This in turn
should affect cycle times and such on Travis and AppVeyor positively as
well as making it easier to work on the compiler itself.
2016-12-25 15:20:33 -08:00
|
|
|
|
2018-01-02 16:21:35 -08:00
|
|
|
/// Temporary directory that extended error information is emitted to.
|
|
|
|
|
fn extended_error_dir(&self) -> PathBuf {
|
|
|
|
|
self.out.join("tmp/extended-error-metadata")
|
|
|
|
|
}
|
|
|
|
|
|
rustbuild: Compile rustc twice, not thrice
This commit switches the rustbuild build system to compiling the
compiler twice for a normal bootstrap rather than the historical three
times.
Rust is a bootstrapped language which means that a previous version of
the compiler is used to build the next version of the compiler. Over
time, however, we change many parts of compiler artifacts such as the
metadata format, symbol names, etc. These changes make artifacts from
one compiler incompatible from another compiler. Consequently if a
compiler wants to be able to use some artifacts then it itself must have
compiled the artifacts.
Historically the rustc build system has achieved this by compiling the
compiler three times:
* An older compiler (stage0) is downloaded to kick off the chain.
* This compiler now compiles a new compiler (stage1)
* The stage1 compiler then compiles another compiler (stage2)
* Finally, the stage2 compiler needs libraries to link against, so it
compiles all the libraries again.
This entire process amounts in compiling the compiler three times.
Additionally, this process always guarantees that the Rust source tree
can compile itself because the stage2 compiler (created by a freshly
created compiler) would successfully compile itself again. This
property, ensuring Rust can compile itself, is quite important!
In general, though, this third compilation is not required for general
purpose development on the compiler. The third compiler (stage2) can
reuse the libraries that were created during the second compile. In
other words, the second compilation can produce both a compiler and the
libraries that compiler will use. These artifacts *must* be compatible
due to the way plugins work today anyway, and they were created by the
same source code so they *should* be compatible as well.
So given all that, this commit switches the default build process to
only compile the compiler three times, avoiding this third compilation
by copying artifacts from the previous one. Along the way a new entry in
the Travis matrix was also added to ensure that our full bootstrap can
succeed. This entry does not run tests, though, as it should not be
necessary.
To restore the old behavior of a full bootstrap (three compiles) you can
either pass:
./configure --enable-full-bootstrap
or if you're using config.toml:
[build]
full-bootstrap = true
Overall this will hopefully be an easy 33% win in build times of the
compiler. If we do 33% less work we should be 33% faster! This in turn
should affect cycle times and such on Travis and AppVeyor positively as
well as making it easier to work on the compiler itself.
2016-12-25 15:20:33 -08:00
|
|
|
/// Tests whether the `compiler` compiling for `target` should be forced to
|
|
|
|
|
/// use a stage1 compiler instead.
|
|
|
|
|
///
|
|
|
|
|
/// Currently, by default, the build system does not perform a "full
|
|
|
|
|
/// bootstrap" by default where we compile the compiler three times.
|
|
|
|
|
/// Instead, we compile the compiler two times. The final stage (stage2)
|
|
|
|
|
/// just copies the libraries from the previous stage, which is what this
|
|
|
|
|
/// method detects.
|
|
|
|
|
///
|
|
|
|
|
/// Here we return `true` if:
|
|
|
|
|
///
|
|
|
|
|
/// * The build isn't performing a full bootstrap
|
|
|
|
|
/// * The `compiler` is in the final stage, 2
|
|
|
|
|
/// * We're not cross-compiling, so the artifacts are already available in
|
|
|
|
|
/// stage1
|
|
|
|
|
///
|
|
|
|
|
/// When all of these conditions are met the build will lift artifacts from
|
|
|
|
|
/// the previous stage forward.
|
2023-03-18 10:23:30 +03:00
|
|
|
fn force_use_stage1(&self, stage: u32, target: TargetSelection) -> bool {
|
rustbuild: Compile rustc twice, not thrice
This commit switches the rustbuild build system to compiling the
compiler twice for a normal bootstrap rather than the historical three
times.
Rust is a bootstrapped language which means that a previous version of
the compiler is used to build the next version of the compiler. Over
time, however, we change many parts of compiler artifacts such as the
metadata format, symbol names, etc. These changes make artifacts from
one compiler incompatible from another compiler. Consequently if a
compiler wants to be able to use some artifacts then it itself must have
compiled the artifacts.
Historically the rustc build system has achieved this by compiling the
compiler three times:
* An older compiler (stage0) is downloaded to kick off the chain.
* This compiler now compiles a new compiler (stage1)
* The stage1 compiler then compiles another compiler (stage2)
* Finally, the stage2 compiler needs libraries to link against, so it
compiles all the libraries again.
This entire process amounts in compiling the compiler three times.
Additionally, this process always guarantees that the Rust source tree
can compile itself because the stage2 compiler (created by a freshly
created compiler) would successfully compile itself again. This
property, ensuring Rust can compile itself, is quite important!
In general, though, this third compilation is not required for general
purpose development on the compiler. The third compiler (stage2) can
reuse the libraries that were created during the second compile. In
other words, the second compilation can produce both a compiler and the
libraries that compiler will use. These artifacts *must* be compatible
due to the way plugins work today anyway, and they were created by the
same source code so they *should* be compatible as well.
So given all that, this commit switches the default build process to
only compile the compiler three times, avoiding this third compilation
by copying artifacts from the previous one. Along the way a new entry in
the Travis matrix was also added to ensure that our full bootstrap can
succeed. This entry does not run tests, though, as it should not be
necessary.
To restore the old behavior of a full bootstrap (three compiles) you can
either pass:
./configure --enable-full-bootstrap
or if you're using config.toml:
[build]
full-bootstrap = true
Overall this will hopefully be an easy 33% win in build times of the
compiler. If we do 33% less work we should be 33% faster! This in turn
should affect cycle times and such on Travis and AppVeyor positively as
well as making it easier to work on the compiler itself.
2016-12-25 15:20:33 -08:00
|
|
|
!self.config.full_bootstrap
|
2023-03-18 10:23:30 +03:00
|
|
|
&& !self.config.download_rustc()
|
|
|
|
|
&& stage >= 2
|
2017-08-28 18:32:29 -07:00
|
|
|
&& (self.hosts.iter().any(|h| *h == target) || target == self.build)
|
rustbuild: Compile rustc twice, not thrice
This commit switches the rustbuild build system to compiling the
compiler twice for a normal bootstrap rather than the historical three
times.
Rust is a bootstrapped language which means that a previous version of
the compiler is used to build the next version of the compiler. Over
time, however, we change many parts of compiler artifacts such as the
metadata format, symbol names, etc. These changes make artifacts from
one compiler incompatible from another compiler. Consequently if a
compiler wants to be able to use some artifacts then it itself must have
compiled the artifacts.
Historically the rustc build system has achieved this by compiling the
compiler three times:
* An older compiler (stage0) is downloaded to kick off the chain.
* This compiler now compiles a new compiler (stage1)
* The stage1 compiler then compiles another compiler (stage2)
* Finally, the stage2 compiler needs libraries to link against, so it
compiles all the libraries again.
This entire process amounts in compiling the compiler three times.
Additionally, this process always guarantees that the Rust source tree
can compile itself because the stage2 compiler (created by a freshly
created compiler) would successfully compile itself again. This
property, ensuring Rust can compile itself, is quite important!
In general, though, this third compilation is not required for general
purpose development on the compiler. The third compiler (stage2) can
reuse the libraries that were created during the second compile. In
other words, the second compilation can produce both a compiler and the
libraries that compiler will use. These artifacts *must* be compatible
due to the way plugins work today anyway, and they were created by the
same source code so they *should* be compatible as well.
So given all that, this commit switches the default build process to
only compile the compiler three times, avoiding this third compilation
by copying artifacts from the previous one. Along the way a new entry in
the Travis matrix was also added to ensure that our full bootstrap can
succeed. This entry does not run tests, though, as it should not be
necessary.
To restore the old behavior of a full bootstrap (three compiles) you can
either pass:
./configure --enable-full-bootstrap
or if you're using config.toml:
[build]
full-bootstrap = true
Overall this will hopefully be an easy 33% win in build times of the
compiler. If we do 33% less work we should be 33% faster! This in turn
should affect cycle times and such on Travis and AppVeyor positively as
well as making it easier to work on the compiler itself.
2016-12-25 15:20:33 -08:00
|
|
|
}
|
2017-02-15 15:57:06 -08:00
|
|
|
|
2023-03-08 00:41:53 +09:00
|
|
|
/// Checks whether the `compiler` compiling for `target` should be forced to
|
|
|
|
|
/// use a stage2 compiler instead.
|
|
|
|
|
///
|
2023-03-18 10:23:30 +03:00
|
|
|
/// When we download the pre-compiled version of rustc and compiler stage is >= 2,
|
|
|
|
|
/// it should be forced to use a stage2 compiler.
|
|
|
|
|
fn force_use_stage2(&self, stage: u32) -> bool {
|
|
|
|
|
self.config.download_rustc() && stage >= 2
|
2023-03-08 00:41:53 +09:00
|
|
|
}
|
|
|
|
|
|
2017-02-15 15:57:06 -08:00
|
|
|
/// Given `num` in the form "a.b.c" return a "release string" which
|
|
|
|
|
/// describes the release version number.
|
|
|
|
|
///
|
|
|
|
|
/// For example on nightly this returns "a.b.c-nightly", on beta it returns
|
|
|
|
|
/// "a.b.c-beta.1" and on stable it just returns "a.b.c".
|
|
|
|
|
fn release(&self, num: &str) -> String {
|
|
|
|
|
match &self.config.channel[..] {
|
|
|
|
|
"stable" => num.to_string(),
|
2018-01-25 16:22:58 -08:00
|
|
|
"beta" => {
|
2023-05-18 17:43:41 -06:00
|
|
|
if !self.config.omit_git_hash {
|
2018-01-25 16:22:58 -08:00
|
|
|
format!("{}-beta.{}", num, self.beta_prerelease_version())
|
|
|
|
|
} else {
|
|
|
|
|
format!("{}-beta", num)
|
2019-12-22 17:42:04 -05:00
|
|
|
}
|
2018-01-25 16:22:58 -08:00
|
|
|
}
|
2017-02-15 15:57:06 -08:00
|
|
|
"nightly" => format!("{}-nightly", num),
|
|
|
|
|
_ => format!("{}-dev", num),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-01-12 12:53:51 -08:00
|
|
|
fn beta_prerelease_version(&self) -> u32 {
|
2023-05-18 17:43:41 -06:00
|
|
|
fn extract_beta_rev_from_file<P: AsRef<Path>>(version_file: P) -> Option<String> {
|
|
|
|
|
let version = fs::read_to_string(version_file).ok()?;
|
|
|
|
|
|
|
|
|
|
extract_beta_rev(&version)
|
|
|
|
|
}
|
|
|
|
|
|
2018-01-12 12:53:51 -08:00
|
|
|
if let Some(s) = self.prerelease_version.get() {
|
|
|
|
|
return s;
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-18 17:43:41 -06:00
|
|
|
// First check if there is a version file available.
|
|
|
|
|
// If available, we read the beta revision from that file.
|
|
|
|
|
// This only happens when building from a source tarball when Git should not be used.
|
|
|
|
|
let count = extract_beta_rev_from_file(self.src.join("version")).unwrap_or_else(|| {
|
|
|
|
|
// Figure out how many merge commits happened since we branched off master.
|
|
|
|
|
// That's our beta number!
|
|
|
|
|
// (Note that we use a `..` range, not the `...` symmetric difference.)
|
2022-07-11 10:15:13 +02:00
|
|
|
output(self.config.git().arg("rev-list").arg("--count").arg("--merges").arg(format!(
|
|
|
|
|
"refs/remotes/origin/{}..HEAD",
|
|
|
|
|
self.config.stage0_metadata.config.nightly_branch
|
2023-05-18 17:43:41 -06:00
|
|
|
)))
|
|
|
|
|
});
|
2018-01-12 12:53:51 -08:00
|
|
|
let n = count.trim().parse().unwrap();
|
|
|
|
|
self.prerelease_version.set(Some(n));
|
|
|
|
|
n
|
|
|
|
|
}
|
|
|
|
|
|
2017-02-15 15:57:06 -08:00
|
|
|
/// Returns the value of `release` above for Rust itself.
|
|
|
|
|
fn rust_release(&self) -> String {
|
2020-09-18 14:58:22 +02:00
|
|
|
self.release(&self.version)
|
2017-02-15 15:57:06 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns the "package version" for a component given the `num` release
|
|
|
|
|
/// number.
|
|
|
|
|
///
|
|
|
|
|
/// The package version is typically what shows up in the names of tarballs.
|
|
|
|
|
/// For channels like beta/nightly it's just the channel name, otherwise
|
|
|
|
|
/// it's the `num` provided.
|
|
|
|
|
fn package_vers(&self, num: &str) -> String {
|
|
|
|
|
match &self.config.channel[..] {
|
|
|
|
|
"stable" => num.to_string(),
|
|
|
|
|
"beta" => "beta".to_string(),
|
|
|
|
|
"nightly" => "nightly".to_string(),
|
|
|
|
|
_ => format!("{}-dev", num),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns the value of `package_vers` above for Rust itself.
|
|
|
|
|
fn rust_package_vers(&self) -> String {
|
2020-09-18 14:58:22 +02:00
|
|
|
self.package_vers(&self.version)
|
2017-02-15 15:57:06 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns the `version` string associated with this compiler for Rust
|
|
|
|
|
/// itself.
|
|
|
|
|
///
|
|
|
|
|
/// Note that this is a descriptive string which includes the commit date,
|
|
|
|
|
/// sha, version, etc.
|
|
|
|
|
fn rust_version(&self) -> String {
|
2022-10-30 17:29:51 -05:00
|
|
|
let mut version = self.rust_info().version(self, &self.version);
|
2020-11-16 14:08:21 -08:00
|
|
|
if let Some(ref s) = self.config.description {
|
|
|
|
|
version.push_str(" (");
|
|
|
|
|
version.push_str(s);
|
2020-12-30 22:52:04 +01:00
|
|
|
version.push(')');
|
2020-11-16 14:08:21 -08:00
|
|
|
}
|
|
|
|
|
version
|
2017-02-15 15:57:06 -08:00
|
|
|
}
|
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Returns the full commit hash.
|
2017-08-31 16:37:14 +02:00
|
|
|
fn rust_sha(&self) -> Option<&str> {
|
2022-10-30 17:29:51 -05:00
|
|
|
self.rust_info().sha()
|
2017-08-31 16:37:14 +02:00
|
|
|
}
|
|
|
|
|
|
2017-03-28 08:00:46 +13:00
|
|
|
/// Returns the `a.b.c` version that the given package is at.
|
|
|
|
|
fn release_num(&self, package: &str) -> String {
|
2017-04-20 14:32:54 -07:00
|
|
|
let toml_file_name = self.src.join(&format!("src/tools/{}/Cargo.toml", package));
|
2018-11-16 16:22:06 -05:00
|
|
|
let toml = t!(fs::read_to_string(&toml_file_name));
|
2017-03-06 06:55:24 -08:00
|
|
|
for line in toml.lines() {
|
2020-07-10 07:18:19 +00:00
|
|
|
if let Some(stripped) =
|
|
|
|
|
line.strip_prefix("version = \"").and_then(|s| s.strip_suffix("\""))
|
|
|
|
|
{
|
|
|
|
|
return stripped.to_owned();
|
2017-03-06 06:55:24 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-03-28 08:00:46 +13:00
|
|
|
panic!("failed to find version in {}'s Cargo.toml", package)
|
2017-03-17 09:52:12 +13:00
|
|
|
}
|
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Returns `true` if unstable features should be enabled for the compiler
|
2017-02-15 15:57:06 -08:00
|
|
|
/// we're building.
|
|
|
|
|
fn unstable_features(&self) -> bool {
|
|
|
|
|
match &self.config.channel[..] {
|
|
|
|
|
"stable" | "beta" => false,
|
|
|
|
|
"nightly" | _ => true,
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-05-18 00:33:20 +08:00
|
|
|
|
2020-06-14 15:57:21 -07:00
|
|
|
/// Returns a Vec of all the dependencies of the given root crate,
|
|
|
|
|
/// including transitive dependencies and the root itself. Only includes
|
|
|
|
|
/// "local" crates (those in the local source tree, not from a registry).
|
2020-10-25 20:13:14 +08:00
|
|
|
fn in_tree_crates(&self, root: &str, target: Option<TargetSelection>) -> Vec<&Crate> {
|
2017-07-05 10:20:20 -06:00
|
|
|
let mut ret = Vec::new();
|
Change Step to be invoked with a path when in default mode.
Previously, a Step would be able to tell on its own when it was invoked
"by-default" (that is, `./x.py test` was called instead of `./x.py test
some/path`). This commit replaces that functionality, invoking each Step
with each of the paths it has specified as "should be invoked by."
For example, if a step calls `path("src/tools/cargo")` and
`path("src/doc/cargo")` then it's make_run will be called twice, with
"src/tools/cargo" and "src/doc/cargo." This makes it so that default
handling logic is in builder, instead of spread across various Steps.
However, this meant that some Step specifications needed to be updated,
since for example `rustdoc` can be built by `./x.py build
src/librustdoc` or `./x.py build src/tools/rustdoc`. A `PathSet`
abstraction is added that handles this: now, each Step can not only list
`path(...)` but also `paths(&[a, b, ...])` which will make it so that we
don't invoke it with each of the individual paths, instead invoking it
with the first path in the list (though this shouldn't be depended on).
Future work likely consists of implementing a better/easier way for a
given Step to work with "any" crate in-tree, especially those that want
to run tests, build, or check crates in the std, test, or rustc crate
trees. Currently this is rather painful to do as most of the logic is
duplicated across should_run and make_run. It seems likely this can be
abstracted away into builder somehow.
2018-02-11 09:51:58 -07:00
|
|
|
let mut list = vec![INTERNER.intern_str(root)];
|
2017-07-05 10:20:20 -06:00
|
|
|
let mut visited = HashSet::new();
|
|
|
|
|
while let Some(krate) = list.pop() {
|
2022-12-26 17:53:39 -06:00
|
|
|
let krate = self
|
|
|
|
|
.crates
|
|
|
|
|
.get(&krate)
|
|
|
|
|
.unwrap_or_else(|| panic!("metadata missing for {krate}: {:?}", self.crates));
|
2020-06-14 15:57:21 -07:00
|
|
|
ret.push(krate);
|
std: Depend directly on crates.io crates
Ever since we added a Cargo-based build system for the compiler the
standard library has always been a little special, it's never been able
to depend on crates.io crates for runtime dependencies. This has been a
result of various limitations, namely that Cargo doesn't understand that
crates from crates.io depend on libcore, so Cargo tries to build crates
before libcore is finished.
I had an idea this afternoon, however, which lifts the strategy
from #52919 to directly depend on crates.io crates from the standard
library. After all is said and done this removes a whopping three
submodules that we need to manage!
The basic idea here is that for any crate `std` depends on it adds an
*optional* dependency on an empty crate on crates.io, in this case named
`rustc-std-workspace-core`. This crate is overridden via `[patch]` in
this repository to point to a local crate we write, and *that* has a
`path` dependency on libcore.
Note that all `no_std` crates also depend on `compiler_builtins`, but if
we're not using submodules we can publish `compiler_builtins` to
crates.io and all crates can depend on it anyway! The basic strategy
then looks like:
* The standard library (or some transitive dep) decides to depend on a
crate `foo`.
* The standard library adds
```toml
[dependencies]
foo = { version = "0.1", features = ['rustc-dep-of-std'] }
```
* The crate `foo` has an optional dependency on `rustc-std-workspace-core`
* The crate `foo` has an optional dependency on `compiler_builtins`
* The crate `foo` has a feature `rustc-dep-of-std` which activates these
crates and any other necessary infrastructure in the crate.
A sample commit for `dlmalloc` [turns out to be quite simple][commit].
After that all `no_std` crates should largely build "as is" and still be
publishable on crates.io! Notably they should be able to continue to use
stable Rust if necessary, since the `rename-dependency` feature of Cargo
is soon stabilizing.
As a proof of concept, this commit removes the `dlmalloc`,
`libcompiler_builtins`, and `libc` submodules from this repository. Long
thorns in our side these are now gone for good and we can directly
depend on crates.io! It's hoped that in the long term we can bring in
other crates as necessary, but for now this is largely intended to
simply make it easier to manage these crates and remove submodules.
This should be a transparent non-breaking change for all users, but one
possible stickler is that this almost for sure breaks out-of-tree
`std`-building tools like `xargo` and `cargo-xbuild`. I think it should
be relatively easy to get them working, however, as all that's needed is
an entry in the `[patch]` section used to build the standard library.
Hopefully we can work with these tools to solve this problem!
[commit]: https://github.com/alexcrichton/dlmalloc-rs/commit/28ee12db813a3b650a7c25d1c36d2c17dcb88ae3
2018-11-19 21:52:50 -08:00
|
|
|
for dep in &krate.deps {
|
2020-11-03 09:54:11 -08:00
|
|
|
if !self.crates.contains_key(dep) {
|
|
|
|
|
// Ignore non-workspace members.
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2020-06-14 15:57:21 -07:00
|
|
|
// Don't include optional deps if their features are not
|
|
|
|
|
// enabled. Ideally this would be computed from `cargo
|
2022-02-24 13:30:50 +01:00
|
|
|
// metadata --features …`, but that is somewhat slow. In
|
|
|
|
|
// the future, we may want to consider just filtering all
|
|
|
|
|
// build and dev dependencies in metadata::build.
|
2020-06-14 15:57:21 -07:00
|
|
|
if visited.insert(dep)
|
2020-10-25 20:13:14 +08:00
|
|
|
&& (dep != "profiler_builtins"
|
|
|
|
|
|| target
|
|
|
|
|
.map(|t| self.config.profiler_enabled(t))
|
2020-12-30 22:52:04 +01:00
|
|
|
.unwrap_or_else(|| self.config.any_profiler_enabled()))
|
2020-06-14 15:57:21 -07:00
|
|
|
&& (dep != "rustc_codegen_llvm" || self.config.llvm_enabled())
|
|
|
|
|
{
|
std: Depend directly on crates.io crates
Ever since we added a Cargo-based build system for the compiler the
standard library has always been a little special, it's never been able
to depend on crates.io crates for runtime dependencies. This has been a
result of various limitations, namely that Cargo doesn't understand that
crates from crates.io depend on libcore, so Cargo tries to build crates
before libcore is finished.
I had an idea this afternoon, however, which lifts the strategy
from #52919 to directly depend on crates.io crates from the standard
library. After all is said and done this removes a whopping three
submodules that we need to manage!
The basic idea here is that for any crate `std` depends on it adds an
*optional* dependency on an empty crate on crates.io, in this case named
`rustc-std-workspace-core`. This crate is overridden via `[patch]` in
this repository to point to a local crate we write, and *that* has a
`path` dependency on libcore.
Note that all `no_std` crates also depend on `compiler_builtins`, but if
we're not using submodules we can publish `compiler_builtins` to
crates.io and all crates can depend on it anyway! The basic strategy
then looks like:
* The standard library (or some transitive dep) decides to depend on a
crate `foo`.
* The standard library adds
```toml
[dependencies]
foo = { version = "0.1", features = ['rustc-dep-of-std'] }
```
* The crate `foo` has an optional dependency on `rustc-std-workspace-core`
* The crate `foo` has an optional dependency on `compiler_builtins`
* The crate `foo` has a feature `rustc-dep-of-std` which activates these
crates and any other necessary infrastructure in the crate.
A sample commit for `dlmalloc` [turns out to be quite simple][commit].
After that all `no_std` crates should largely build "as is" and still be
publishable on crates.io! Notably they should be able to continue to use
stable Rust if necessary, since the `rename-dependency` feature of Cargo
is soon stabilizing.
As a proof of concept, this commit removes the `dlmalloc`,
`libcompiler_builtins`, and `libc` submodules from this repository. Long
thorns in our side these are now gone for good and we can directly
depend on crates.io! It's hoped that in the long term we can bring in
other crates as necessary, but for now this is largely intended to
simply make it easier to manage these crates and remove submodules.
This should be a transparent non-breaking change for all users, but one
possible stickler is that this almost for sure breaks out-of-tree
`std`-building tools like `xargo` and `cargo-xbuild`. I think it should
be relatively easy to get them working, however, as all that's needed is
an entry in the `[patch]` section used to build the standard library.
Hopefully we can work with these tools to solve this problem!
[commit]: https://github.com/alexcrichton/dlmalloc-rs/commit/28ee12db813a3b650a7c25d1c36d2c17dcb88ae3
2018-11-19 21:52:50 -08:00
|
|
|
list.push(*dep);
|
2017-07-05 10:20:20 -06:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
ret
|
|
|
|
|
}
|
2018-03-27 16:06:47 +02:00
|
|
|
|
2020-06-03 00:56:27 +02:00
|
|
|
fn read_stamp_file(&self, stamp: &Path) -> Vec<(PathBuf, DependencyType)> {
|
2022-11-06 16:59:43 -06:00
|
|
|
if self.config.dry_run() {
|
2018-03-27 16:06:47 +02:00
|
|
|
return Vec::new();
|
|
|
|
|
}
|
|
|
|
|
|
2023-01-28 15:20:27 -05:00
|
|
|
if !stamp.exists() {
|
|
|
|
|
eprintln!(
|
2023-01-29 13:35:37 -05:00
|
|
|
"Error: Unable to find the stamp file {}, did you try to keep a nonexistent build stage?",
|
|
|
|
|
stamp.display()
|
2023-01-28 15:20:27 -05:00
|
|
|
);
|
2023-05-30 10:53:46 +03:00
|
|
|
crate::detail_exit_macro!(1);
|
2023-01-28 15:20:27 -05:00
|
|
|
}
|
|
|
|
|
|
2018-03-27 16:06:47 +02:00
|
|
|
let mut paths = Vec::new();
|
2019-10-17 16:57:46 +08:00
|
|
|
let contents = t!(fs::read(stamp), &stamp);
|
2018-03-27 16:06:47 +02:00
|
|
|
// This is the method we use for extracting paths from the stamp file passed to us. See
|
|
|
|
|
// run_cargo for more information (in compile.rs).
|
|
|
|
|
for part in contents.split(|b| *b == 0) {
|
|
|
|
|
if part.is_empty() {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2020-06-03 00:56:27 +02:00
|
|
|
let dependency_type = match part[0] as char {
|
|
|
|
|
'h' => DependencyType::Host,
|
|
|
|
|
's' => DependencyType::TargetSelfContained,
|
|
|
|
|
't' => DependencyType::Target,
|
|
|
|
|
_ => unreachable!(),
|
|
|
|
|
};
|
2018-12-02 21:47:41 +01:00
|
|
|
let path = PathBuf::from(t!(str::from_utf8(&part[1..])));
|
2020-06-03 00:56:27 +02:00
|
|
|
paths.push((path, dependency_type));
|
2018-03-27 16:06:47 +02:00
|
|
|
}
|
|
|
|
|
paths
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Copies a file from `src` to `dst`
|
|
|
|
|
pub fn copy(&self, src: &Path, dst: &Path) {
|
2022-05-06 22:42:36 -05:00
|
|
|
self.copy_internal(src, dst, false);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn copy_internal(&self, src: &Path, dst: &Path, dereference_symlinks: bool) {
|
2022-11-06 16:59:43 -06:00
|
|
|
if self.config.dry_run() {
|
2018-03-27 16:06:47 +02:00
|
|
|
return;
|
|
|
|
|
}
|
2019-03-20 12:50:18 +03:00
|
|
|
self.verbose_than(1, &format!("Copy {:?} to {:?}", src, dst));
|
2019-10-12 16:01:59 +02:00
|
|
|
if src == dst {
|
|
|
|
|
return;
|
|
|
|
|
}
|
2018-03-27 16:06:47 +02:00
|
|
|
let _ = fs::remove_file(&dst);
|
2018-07-03 12:24:24 -06:00
|
|
|
let metadata = t!(src.symlink_metadata());
|
2022-05-06 22:42:36 -05:00
|
|
|
let mut src = src.to_path_buf();
|
2018-07-03 12:24:24 -06:00
|
|
|
if metadata.file_type().is_symlink() {
|
2022-05-06 22:42:36 -05:00
|
|
|
if dereference_symlinks {
|
|
|
|
|
src = t!(fs::canonicalize(src));
|
|
|
|
|
} else {
|
|
|
|
|
let link = t!(fs::read_link(src));
|
2023-02-21 10:04:19 +01:00
|
|
|
t!(self.symlink_file(link, dst));
|
2022-05-06 22:42:36 -05:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if let Ok(()) = fs::hard_link(&src, dst) {
|
2018-07-03 12:24:24 -06:00
|
|
|
// Attempt to "easy copy" by creating a hard link
|
|
|
|
|
// (symlinks don't work on windows), but if that fails
|
|
|
|
|
// just fall back to a slow `copy` operation.
|
|
|
|
|
} else {
|
2022-05-06 22:42:36 -05:00
|
|
|
if let Err(e) = fs::copy(&src, dst) {
|
2018-07-03 12:24:24 -06:00
|
|
|
panic!("failed to copy `{}` to `{}`: {}", src.display(), dst.display(), e)
|
|
|
|
|
}
|
|
|
|
|
t!(fs::set_permissions(dst, metadata.permissions()));
|
|
|
|
|
let atime = FileTime::from_last_access_time(&metadata);
|
|
|
|
|
let mtime = FileTime::from_last_modification_time(&metadata);
|
|
|
|
|
t!(filetime::set_file_times(dst, atime, mtime));
|
2018-03-27 16:06:47 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
|
|
|
|
|
/// when this function is called.
|
|
|
|
|
pub fn cp_r(&self, src: &Path, dst: &Path) {
|
2022-11-06 16:59:43 -06:00
|
|
|
if self.config.dry_run() {
|
2018-03-27 16:06:47 +02:00
|
|
|
return;
|
|
|
|
|
}
|
2019-06-09 16:57:17 -07:00
|
|
|
for f in self.read_dir(src) {
|
2018-03-27 16:06:47 +02:00
|
|
|
let path = f.path();
|
|
|
|
|
let name = path.file_name().unwrap();
|
|
|
|
|
let dst = dst.join(name);
|
|
|
|
|
if t!(f.file_type()).is_dir() {
|
|
|
|
|
t!(fs::create_dir_all(&dst));
|
|
|
|
|
self.cp_r(&path, &dst);
|
|
|
|
|
} else {
|
|
|
|
|
let _ = fs::remove_file(&dst);
|
|
|
|
|
self.copy(&path, &dst);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
|
|
|
|
|
/// when this function is called. Unwanted files or directories can be skipped
|
|
|
|
|
/// by returning `false` from the filter function.
|
2018-07-10 18:10:05 +02:00
|
|
|
pub fn cp_filtered(&self, src: &Path, dst: &Path, filter: &dyn Fn(&Path) -> bool) {
|
2018-03-27 16:06:47 +02:00
|
|
|
// Immediately recurse with an empty relative path
|
|
|
|
|
self.recurse_(src, dst, Path::new(""), filter)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Inner function does the actual work
|
2018-07-10 18:10:05 +02:00
|
|
|
fn recurse_(&self, src: &Path, dst: &Path, relative: &Path, filter: &dyn Fn(&Path) -> bool) {
|
2018-03-27 16:06:47 +02:00
|
|
|
for f in self.read_dir(src) {
|
|
|
|
|
let path = f.path();
|
|
|
|
|
let name = path.file_name().unwrap();
|
|
|
|
|
let dst = dst.join(name);
|
|
|
|
|
let relative = relative.join(name);
|
|
|
|
|
// Only copy file or directory if the filter function returns true
|
|
|
|
|
if filter(&relative) {
|
|
|
|
|
if t!(f.file_type()).is_dir() {
|
|
|
|
|
let _ = fs::remove_dir_all(&dst);
|
|
|
|
|
self.create_dir(&dst);
|
|
|
|
|
self.recurse_(&path, &dst, &relative, filter);
|
|
|
|
|
} else {
|
|
|
|
|
let _ = fs::remove_file(&dst);
|
|
|
|
|
self.copy(&path, &dst);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn copy_to_folder(&self, src: &Path, dest_folder: &Path) {
|
|
|
|
|
let file_name = src.file_name().unwrap();
|
|
|
|
|
let dest = dest_folder.join(file_name);
|
|
|
|
|
self.copy(src, &dest);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn install(&self, src: &Path, dstdir: &Path, perms: u32) {
|
2022-11-06 16:59:43 -06:00
|
|
|
if self.config.dry_run() {
|
2018-03-27 16:06:47 +02:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
let dst = dstdir.join(src.file_name().unwrap());
|
2019-03-31 22:28:12 +03:00
|
|
|
self.verbose_than(1, &format!("Install {:?} to {:?}", src, dst));
|
2018-03-27 16:06:47 +02:00
|
|
|
t!(fs::create_dir_all(dstdir));
|
2022-05-06 22:42:36 -05:00
|
|
|
if !src.exists() {
|
|
|
|
|
panic!("Error: File \"{}\" not found!", src.display());
|
2018-03-27 16:06:47 +02:00
|
|
|
}
|
2022-05-06 22:42:36 -05:00
|
|
|
self.copy_internal(src, &dst, true);
|
2018-03-27 16:06:47 +02:00
|
|
|
chmod(&dst, perms);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn read(&self, path: &Path) -> String {
|
2022-11-06 16:59:43 -06:00
|
|
|
if self.config.dry_run() {
|
2018-03-27 16:06:47 +02:00
|
|
|
return String::new();
|
|
|
|
|
}
|
2018-04-04 07:16:25 -07:00
|
|
|
t!(fs::read_to_string(path))
|
2018-03-27 16:06:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn create_dir(&self, dir: &Path) {
|
2022-11-06 16:59:43 -06:00
|
|
|
if self.config.dry_run() {
|
2018-03-27 16:06:47 +02:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
t!(fs::create_dir_all(dir))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn remove_dir(&self, dir: &Path) {
|
2022-11-06 16:59:43 -06:00
|
|
|
if self.config.dry_run() {
|
2018-03-27 16:06:47 +02:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
t!(fs::remove_dir_all(dir))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn read_dir(&self, dir: &Path) -> impl Iterator<Item = fs::DirEntry> {
|
|
|
|
|
let iter = match fs::read_dir(dir) {
|
|
|
|
|
Ok(v) => v,
|
2022-11-06 16:59:43 -06:00
|
|
|
Err(_) if self.config.dry_run() => return vec![].into_iter(),
|
2018-03-27 16:06:47 +02:00
|
|
|
Err(err) => panic!("could not read dir {:?}: {:?}", dir, err),
|
|
|
|
|
};
|
|
|
|
|
iter.map(|e| t!(e)).collect::<Vec<_>>().into_iter()
|
|
|
|
|
}
|
2018-03-31 19:21:14 -06:00
|
|
|
|
2023-02-21 10:04:19 +01:00
|
|
|
fn symlink_file<P: AsRef<Path>, Q: AsRef<Path>>(&self, src: P, link: Q) -> io::Result<()> {
|
|
|
|
|
#[cfg(unix)]
|
|
|
|
|
use std::os::unix::fs::symlink as symlink_file;
|
|
|
|
|
#[cfg(windows)]
|
|
|
|
|
use std::os::windows::fs::symlink_file;
|
|
|
|
|
if !self.config.dry_run() { symlink_file(src.as_ref(), link.as_ref()) } else { Ok(()) }
|
|
|
|
|
}
|
|
|
|
|
|
2020-09-01 09:17:26 -04:00
|
|
|
/// Returns if config.ninja is enabled, and checks for ninja existence,
|
|
|
|
|
/// exiting with a nicer error message if not.
|
|
|
|
|
fn ninja(&self) -> bool {
|
|
|
|
|
let mut cmd_finder = crate::sanity::Finder::new();
|
|
|
|
|
|
|
|
|
|
if self.config.ninja_in_file {
|
|
|
|
|
// Some Linux distros rename `ninja` to `ninja-build`.
|
|
|
|
|
// CMake can work with either binary name.
|
|
|
|
|
if cmd_finder.maybe_have("ninja-build").is_none()
|
|
|
|
|
&& cmd_finder.maybe_have("ninja").is_none()
|
|
|
|
|
{
|
|
|
|
|
eprintln!(
|
|
|
|
|
"
|
2021-09-19 14:27:49 +01:00
|
|
|
Couldn't find required command: ninja (or ninja-build)
|
|
|
|
|
|
|
|
|
|
You should install ninja as described at
|
|
|
|
|
<https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages>,
|
2021-09-19 19:50:08 +01:00
|
|
|
or set `ninja = false` in the `[llvm]` section of `config.toml`.
|
|
|
|
|
Alternatively, set `download-ci-llvm = true` in that `[llvm]` section
|
2021-09-19 14:27:49 +01:00
|
|
|
to download LLVM rather than building it.
|
2020-09-01 09:17:26 -04:00
|
|
|
"
|
|
|
|
|
);
|
2023-05-30 10:53:46 +03:00
|
|
|
detail_exit_macro!(1);
|
2020-09-01 09:17:26 -04:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If ninja isn't enabled but we're building for MSVC then we try
|
|
|
|
|
// doubly hard to enable it. It was realized in #43767 that the msbuild
|
|
|
|
|
// CMake generator for MSVC doesn't respect configuration options like
|
|
|
|
|
// disabling LLVM assertions, which can often be quite important!
|
|
|
|
|
//
|
|
|
|
|
// In these cases we automatically enable Ninja if we find it in the
|
|
|
|
|
// environment.
|
|
|
|
|
if !self.config.ninja_in_file && self.config.build.contains("msvc") {
|
|
|
|
|
if cmd_finder.maybe_have("ninja").is_some() {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
self.config.ninja_in_file
|
|
|
|
|
}
|
2023-03-02 12:23:14 +01:00
|
|
|
|
2023-03-07 10:21:25 +01:00
|
|
|
pub fn colored_stdout<R, F: FnOnce(&mut dyn WriteColor) -> R>(&self, f: F) -> R {
|
|
|
|
|
self.colored_stream_inner(StandardStream::stdout, self.config.stdout_is_tty, f)
|
2023-03-02 12:23:14 +01:00
|
|
|
}
|
|
|
|
|
|
2023-03-07 10:21:25 +01:00
|
|
|
pub fn colored_stderr<R, F: FnOnce(&mut dyn WriteColor) -> R>(&self, f: F) -> R {
|
|
|
|
|
self.colored_stream_inner(StandardStream::stderr, self.config.stderr_is_tty, f)
|
2023-03-02 12:23:14 +01:00
|
|
|
}
|
|
|
|
|
|
2023-03-07 10:21:25 +01:00
|
|
|
fn colored_stream_inner<R, F, C>(&self, constructor: C, is_tty: bool, f: F) -> R
|
|
|
|
|
where
|
|
|
|
|
C: Fn(ColorChoice) -> StandardStream,
|
|
|
|
|
F: FnOnce(&mut dyn WriteColor) -> R,
|
|
|
|
|
{
|
|
|
|
|
let choice = match self.config.color {
|
|
|
|
|
flags::Color::Always => ColorChoice::Always,
|
|
|
|
|
flags::Color::Never => ColorChoice::Never,
|
|
|
|
|
flags::Color::Auto if !is_tty => ColorChoice::Never,
|
|
|
|
|
flags::Color::Auto => ColorChoice::Auto,
|
2023-03-02 12:23:14 +01:00
|
|
|
};
|
2023-03-07 10:21:25 +01:00
|
|
|
let mut stream = constructor(choice);
|
|
|
|
|
let result = f(&mut stream);
|
|
|
|
|
stream.reset().unwrap();
|
|
|
|
|
result
|
2023-03-02 12:23:14 +01:00
|
|
|
}
|
2018-03-27 16:06:47 +02:00
|
|
|
}
|
|
|
|
|
|
2023-05-18 17:43:41 -06:00
|
|
|
/// Extract the beta revision from the full version string.
|
|
|
|
|
///
|
|
|
|
|
/// The full version string looks like "a.b.c-beta.y". And we need to extract
|
|
|
|
|
/// the "y" part from the string.
|
|
|
|
|
pub fn extract_beta_rev(version: &str) -> Option<String> {
|
|
|
|
|
let parts = version.splitn(2, "-beta.").collect::<Vec<_>>();
|
|
|
|
|
let count = parts.get(1).and_then(|s| s.find(' ').map(|p| (&s[..p]).to_string()));
|
|
|
|
|
|
|
|
|
|
count
|
|
|
|
|
}
|
|
|
|
|
|
2018-03-27 16:06:47 +02:00
|
|
|
#[cfg(unix)]
|
|
|
|
|
fn chmod(path: &Path, perms: u32) {
|
|
|
|
|
use std::os::unix::fs::*;
|
|
|
|
|
t!(fs::set_permissions(path, fs::Permissions::from_mode(perms)));
|
2015-11-19 15:20:12 -08:00
|
|
|
}
|
2018-03-27 16:06:47 +02:00
|
|
|
#[cfg(windows)]
|
|
|
|
|
fn chmod(_path: &Path, _perms: u32) {}
|
|
|
|
|
|
2019-07-23 22:17:27 +03:00
|
|
|
impl Compiler {
|
2017-07-13 18:48:44 -06:00
|
|
|
pub fn with_stage(mut self, stage: u32) -> Compiler {
|
2017-07-05 10:20:20 -06:00
|
|
|
self.stage = stage;
|
|
|
|
|
self
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Returns `true` if this is a snapshot compiler for `build`'s configuration
|
2017-07-05 10:46:41 -06:00
|
|
|
pub fn is_snapshot(&self, build: &Build) -> bool {
|
2017-06-27 15:59:43 -06:00
|
|
|
self.stage == 0 && self.host == build.build
|
2016-07-05 21:58:20 -07:00
|
|
|
}
|
2016-12-31 14:29:27 +08:00
|
|
|
|
2016-12-31 14:31:08 +08:00
|
|
|
/// Returns if this compiler should be treated as a final stage one in the
|
|
|
|
|
/// current build session.
|
|
|
|
|
/// This takes into account whether we're performing a full bootstrap or
|
|
|
|
|
/// not; don't directly compare the stage with `2`!
|
2017-07-05 10:20:20 -06:00
|
|
|
pub fn is_final_stage(&self, build: &Build) -> bool {
|
2016-12-31 14:29:27 +08:00
|
|
|
let final_stage = if build.config.full_bootstrap { 2 } else { 1 };
|
|
|
|
|
self.stage >= final_stage
|
|
|
|
|
}
|
2015-11-19 15:20:12 -08:00
|
|
|
}
|
2019-08-15 13:51:47 -07:00
|
|
|
|
|
|
|
|
fn envify(s: &str) -> String {
|
|
|
|
|
s.chars()
|
|
|
|
|
.map(|c| match c {
|
|
|
|
|
'-' => '_',
|
|
|
|
|
c => c,
|
|
|
|
|
})
|
|
|
|
|
.flat_map(|c| c.to_uppercase())
|
|
|
|
|
.collect()
|
|
|
|
|
}
|