bootstrap/core/build_steps/
perf.rs

1use std::env::consts::EXE_EXTENSION;
2use std::fmt::{Display, Formatter};
3
4use crate::core::build_steps::compile::{Std, Sysroot};
5use crate::core::build_steps::tool::{RustcPerf, Rustdoc};
6use crate::core::builder::Builder;
7use crate::core::config::DebuginfoLevel;
8use crate::utils::exec::{BootstrapCommand, command};
9
10#[derive(Debug, Clone, clap::Parser)]
11pub struct PerfArgs {
12    #[clap(subcommand)]
13    cmd: PerfCommand,
14}
15
16#[derive(Debug, Clone, clap::Parser)]
17enum PerfCommand {
18    /// Run `profile_local eprintln`.
19    /// This executes the compiler on the given benchmarks and stores its stderr output.
20    Eprintln {
21        #[clap(flatten)]
22        opts: SharedOpts,
23    },
24    /// Run `profile_local samply`
25    /// This executes the compiler on the given benchmarks and profiles it with `samply`.
26    /// You need to install `samply`, e.g. using `cargo install samply`.
27    Samply {
28        #[clap(flatten)]
29        opts: SharedOpts,
30    },
31    /// Run `profile_local cachegrind`.
32    /// This executes the compiler on the given benchmarks under `Cachegrind`.
33    Cachegrind {
34        #[clap(flatten)]
35        opts: SharedOpts,
36    },
37    /// Run compile benchmarks with a locally built compiler.
38    Benchmark {
39        /// Identifier to associate benchmark results with
40        #[clap(name = "benchmark-id")]
41        id: String,
42
43        #[clap(flatten)]
44        opts: SharedOpts,
45    },
46    /// Compare the results of two previously executed benchmark runs.
47    Compare {
48        /// The name of the base artifact to be compared.
49        base: String,
50
51        /// The name of the modified artifact to be compared.
52        modified: String,
53    },
54}
55
56impl PerfCommand {
57    fn shared_opts(&self) -> Option<&SharedOpts> {
58        match self {
59            PerfCommand::Eprintln { opts, .. }
60            | PerfCommand::Samply { opts, .. }
61            | PerfCommand::Cachegrind { opts, .. }
62            | PerfCommand::Benchmark { opts, .. } => Some(opts),
63            PerfCommand::Compare { .. } => None,
64        }
65    }
66}
67
68#[derive(Debug, Clone, clap::Parser)]
69struct SharedOpts {
70    /// Select the benchmarks that you want to run (separated by commas).
71    /// If unspecified, all benchmarks will be executed.
72    #[clap(long, global = true, value_delimiter = ',')]
73    include: Vec<String>,
74
75    /// Select the benchmarks matching a prefix in this comma-separated list that you don't want to run.
76    #[clap(long, global = true, value_delimiter = ',')]
77    exclude: Vec<String>,
78
79    /// Select the scenarios that should be benchmarked.
80    #[clap(
81        long,
82        global = true,
83        value_delimiter = ',',
84        default_value = "Full,IncrFull,IncrUnchanged,IncrPatched"
85    )]
86    scenarios: Vec<Scenario>,
87    /// Select the profiles that should be benchmarked.
88    #[clap(long, global = true, value_delimiter = ',', default_value = "Check,Debug,Opt")]
89    profiles: Vec<Profile>,
90}
91
92#[derive(Clone, Copy, Debug, PartialEq, clap::ValueEnum)]
93#[value(rename_all = "PascalCase")]
94pub enum Profile {
95    Check,
96    Debug,
97    Doc,
98    Opt,
99    Clippy,
100}
101
102impl Display for Profile {
103    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
104        let name = match self {
105            Profile::Check => "Check",
106            Profile::Debug => "Debug",
107            Profile::Doc => "Doc",
108            Profile::Opt => "Opt",
109            Profile::Clippy => "Clippy",
110        };
111        f.write_str(name)
112    }
113}
114
115#[derive(Clone, Copy, Debug, clap::ValueEnum)]
116#[value(rename_all = "PascalCase")]
117pub enum Scenario {
118    Full,
119    IncrFull,
120    IncrUnchanged,
121    IncrPatched,
122}
123
124impl Display for Scenario {
125    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
126        let name = match self {
127            Scenario::Full => "Full",
128            Scenario::IncrFull => "IncrFull",
129            Scenario::IncrUnchanged => "IncrUnchanged",
130            Scenario::IncrPatched => "IncrPatched",
131        };
132        f.write_str(name)
133    }
134}
135
136/// Performs profiling using `rustc-perf` on a built version of the compiler.
137pub fn perf(builder: &Builder<'_>, args: &PerfArgs) {
138    let collector = builder.ensure(RustcPerf {
139        compiler: builder.compiler(0, builder.config.build),
140        target: builder.config.build,
141    });
142
143    let is_profiling = match &args.cmd {
144        PerfCommand::Eprintln { .. }
145        | PerfCommand::Samply { .. }
146        | PerfCommand::Cachegrind { .. } => true,
147        PerfCommand::Benchmark { .. } | PerfCommand::Compare { .. } => false,
148    };
149    if is_profiling && builder.build.config.rust_debuginfo_level_rustc == DebuginfoLevel::None {
150        builder.info(r#"WARNING: You are compiling rustc without debuginfo, this will make profiling less useful.
151Consider setting `rust.debuginfo-level = 1` in `bootstrap.toml`."#);
152    }
153
154    let compiler = builder.compiler(builder.top_stage, builder.config.build);
155    builder.ensure(Std::new(compiler, builder.config.build));
156
157    if let Some(opts) = args.cmd.shared_opts() {
158        if opts.profiles.contains(&Profile::Doc) {
159            builder.ensure(Rustdoc { compiler });
160        }
161    }
162
163    let sysroot = builder.ensure(Sysroot::new(compiler));
164    let mut rustc = sysroot.clone();
165    rustc.push("bin");
166    rustc.push("rustc");
167    rustc.set_extension(EXE_EXTENSION);
168
169    let rustc_perf_dir = builder.build.tempdir().join("rustc-perf");
170    let results_dir = rustc_perf_dir.join("results");
171    builder.create_dir(&results_dir);
172
173    let mut cmd = command(collector.tool_path);
174
175    // We need to set the working directory to `src/tools/rustc-perf`, so that it can find the directory
176    // with compile-time benchmarks.
177    cmd.current_dir(builder.src.join("src/tools/rustc-perf"));
178
179    let db_path = results_dir.join("results.db");
180
181    match &args.cmd {
182        PerfCommand::Eprintln { opts }
183        | PerfCommand::Samply { opts }
184        | PerfCommand::Cachegrind { opts } => {
185            cmd.arg("profile_local");
186            cmd.arg(match &args.cmd {
187                PerfCommand::Eprintln { .. } => "eprintln",
188                PerfCommand::Samply { .. } => "samply",
189                PerfCommand::Cachegrind { .. } => "cachegrind",
190                _ => unreachable!(),
191            });
192
193            cmd.arg("--out-dir").arg(&results_dir);
194            cmd.arg(rustc);
195
196            apply_shared_opts(&mut cmd, opts);
197            cmd.run(builder);
198
199            println!("You can find the results at `{}`", &results_dir.display());
200        }
201        PerfCommand::Benchmark { id, opts } => {
202            cmd.arg("bench_local");
203            cmd.arg("--db").arg(&db_path);
204            cmd.arg("--id").arg(id);
205            cmd.arg(rustc);
206
207            apply_shared_opts(&mut cmd, opts);
208            cmd.run(builder);
209        }
210        PerfCommand::Compare { base, modified } => {
211            cmd.arg("bench_cmp");
212            cmd.arg("--db").arg(&db_path);
213            cmd.arg(base).arg(modified);
214
215            cmd.run(builder);
216        }
217    }
218}
219
220fn apply_shared_opts(cmd: &mut BootstrapCommand, opts: &SharedOpts) {
221    if !opts.include.is_empty() {
222        cmd.arg("--include").arg(opts.include.join(","));
223    }
224    if !opts.exclude.is_empty() {
225        cmd.arg("--exclude").arg(opts.exclude.join(","));
226    }
227    if !opts.profiles.is_empty() {
228        cmd.arg("--profiles")
229            .arg(opts.profiles.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(","));
230    }
231    if !opts.scenarios.is_empty() {
232        cmd.arg("--scenarios")
233            .arg(opts.scenarios.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(","));
234    }
235}