Skip to content

Commit e9e27ab

Browse files
committed
Auto merge of #128868 - s7tya:port-rustc-perf-cmp-command, r=Kobzol
Port rustc perf cmp command I've integrated bench_cmp and bench_local into the bootstrap. r​? `@Kobzol`
2 parents db5704f + ce1d7d1 commit e9e27ab

File tree

2 files changed

+82
-36
lines changed

2 files changed

+82
-36
lines changed

Diff for: src/tools/rustc-perf

Submodule rustc-perf updated 65 files

Diff for: src/tools/rustc-perf-wrapper/src/main.rs

+81-35
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
use std::fs::create_dir_all;
12
use std::path::PathBuf;
23
use std::process::Command;
34

@@ -17,9 +18,6 @@ pub struct Args {
1718
#[clap(subcommand)]
1819
cmd: PerfCommand,
1920

20-
#[clap(flatten)]
21-
opts: SharedOpts,
22-
2321
#[clap(flatten)]
2422
ctx: BuildContext,
2523
}
@@ -28,22 +26,37 @@ pub struct Args {
2826
enum PerfCommand {
2927
/// Run `profile_local eprintln`.
3028
/// This executes the compiler on the given benchmarks and stores its stderr output.
31-
Eprintln,
29+
Eprintln {
30+
#[clap(flatten)]
31+
opts: SharedOpts,
32+
},
3233
/// Run `profile_local samply`
3334
/// This executes the compiler on the given benchmarks and profiles it with `samply`.
3435
/// You need to install `samply`, e.g. using `cargo install samply`.
35-
Samply,
36+
Samply {
37+
#[clap(flatten)]
38+
opts: SharedOpts,
39+
},
3640
/// Run `profile_local cachegrind`.
3741
/// This executes the compiler on the given benchmarks under `Cachegrind`.
38-
Cachegrind,
39-
}
40-
41-
impl PerfCommand {
42-
fn is_profiling(&self) -> bool {
43-
match self {
44-
PerfCommand::Eprintln | PerfCommand::Samply | PerfCommand::Cachegrind => true,
45-
}
46-
}
42+
Cachegrind {
43+
#[clap(flatten)]
44+
opts: SharedOpts,
45+
},
46+
Benchmark {
47+
/// Identifier to associate benchmark results with
48+
id: String,
49+
50+
#[clap(flatten)]
51+
opts: SharedOpts,
52+
},
53+
Compare {
54+
/// The name of the base artifact to be compared.
55+
base: String,
56+
57+
/// The name of the modified artifact to be compared.
58+
modified: String,
59+
},
4760
}
4861

4962
#[derive(Debug, clap::Parser)]
@@ -52,6 +65,11 @@ struct SharedOpts {
5265
/// If unspecified, all benchmarks will be executed.
5366
#[clap(long, global = true, value_delimiter = ',')]
5467
include: Vec<String>,
68+
69+
/// Select the benchmarks matching a prefix in this comma-separated list that you don't want to run.
70+
#[clap(long, global = true, value_delimiter = ',')]
71+
exclude: Vec<String>,
72+
5573
/// Select the scenarios that should be benchmarked.
5674
#[clap(
5775
long,
@@ -87,35 +105,67 @@ fn main() {
87105

88106
fn run(args: Args) {
89107
let mut cmd = Command::new(args.ctx.collector);
108+
let db_path = args.ctx.results_dir.join("results.db");
109+
90110
match &args.cmd {
91-
PerfCommand::Eprintln => {
92-
cmd.arg("profile_local").arg("eprintln");
111+
PerfCommand::Eprintln { opts }
112+
| PerfCommand::Samply { opts }
113+
| PerfCommand::Cachegrind { opts } => {
114+
cmd.arg("profile_local");
115+
cmd.arg(match &args.cmd {
116+
PerfCommand::Eprintln { .. } => "eprintln",
117+
PerfCommand::Samply { .. } => "samply",
118+
PerfCommand::Cachegrind { .. } => "cachegrind",
119+
_ => unreachable!(),
120+
});
121+
122+
cmd.arg("--out-dir").arg(&args.ctx.results_dir);
123+
124+
apply_shared_opts(&mut cmd, opts);
125+
execute_benchmark(&mut cmd, &args.ctx.compiler);
126+
127+
println!("You can find the results at `{}`", args.ctx.results_dir.display());
93128
}
94-
PerfCommand::Samply => {
95-
cmd.arg("profile_local").arg("samply");
129+
PerfCommand::Benchmark { id, opts } => {
130+
cmd.arg("bench_local");
131+
cmd.arg("--db").arg(&db_path);
132+
cmd.arg("--id").arg(id);
133+
134+
apply_shared_opts(&mut cmd, opts);
135+
create_dir_all(&args.ctx.results_dir).unwrap();
136+
execute_benchmark(&mut cmd, &args.ctx.compiler);
96137
}
97-
PerfCommand::Cachegrind => {
98-
cmd.arg("profile_local").arg("cachegrind");
138+
PerfCommand::Compare { base, modified } => {
139+
cmd.arg("bench_cmp");
140+
cmd.arg("--db").arg(&db_path);
141+
cmd.arg(base).arg(modified);
142+
143+
create_dir_all(&args.ctx.results_dir).unwrap();
144+
cmd.status().expect("error while running rustc-perf bench_cmp");
99145
}
100146
}
101-
if args.cmd.is_profiling() {
102-
cmd.arg("--out-dir").arg(&args.ctx.results_dir);
103-
}
147+
}
104148

105-
if !args.opts.include.is_empty() {
106-
cmd.arg("--include").arg(args.opts.include.join(","));
149+
fn apply_shared_opts(cmd: &mut Command, opts: &SharedOpts) {
150+
if !opts.include.is_empty() {
151+
cmd.arg("--include").arg(opts.include.join(","));
107152
}
108-
if !args.opts.profiles.is_empty() {
153+
if !opts.exclude.is_empty() {
154+
cmd.arg("--exclude").arg(opts.exclude.join(","));
155+
}
156+
if !opts.profiles.is_empty() {
109157
cmd.arg("--profiles")
110-
.arg(args.opts.profiles.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(","));
158+
.arg(opts.profiles.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(","));
111159
}
112-
if !args.opts.scenarios.is_empty() {
160+
if !opts.scenarios.is_empty() {
113161
cmd.arg("--scenarios")
114-
.arg(args.opts.scenarios.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(","));
162+
.arg(opts.scenarios.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(","));
115163
}
116-
cmd.arg(&args.ctx.compiler);
164+
}
117165

118-
println!("Running `rustc-perf` using `{}`", args.ctx.compiler.display());
166+
fn execute_benchmark(cmd: &mut Command, compiler: &PathBuf) {
167+
cmd.arg(compiler);
168+
println!("Running `rustc-perf` using `{}`", compiler.display());
119169

120170
const MANIFEST_DIR: &str = env!("CARGO_MANIFEST_DIR");
121171

@@ -125,8 +175,4 @@ fn run(args: Args) {
125175
// with compile-time benchmarks.
126176
let cmd = cmd.current_dir(rustc_perf_dir);
127177
cmd.status().expect("error while running rustc-perf collector");
128-
129-
if args.cmd.is_profiling() {
130-
println!("You can find the results at `{}`", args.ctx.results_dir.display());
131-
}
132178
}

0 commit comments

Comments
 (0)