Skip to content

Commit

Permalink
Merge pull request rust-lang#1077 from Mark-Simulacrum/new-measureme
Browse files Browse the repository at this point in the history
Update to new measureme
  • Loading branch information
Mark-Simulacrum authored Oct 23, 2021
2 parents 3ca4c1e + 3fa81d1 commit d2570f3
Show file tree
Hide file tree
Showing 5 changed files with 99 additions and 30 deletions.
52 changes: 46 additions & 6 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion site/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ async-trait = "0.1"
database = { path = "../database" }
bytes = "1.0"
url = "2"
analyzeme = { git = "https://github.com/rust-lang/measureme" }
analyzeme = { git = "https://github.com/rust-lang/measureme", branch = "stable" }
tar = "0.4"
inferno = { version="0.10", default-features = false }
mime = "0.3"
Expand Down
36 changes: 26 additions & 10 deletions site/src/self_profile/codegen_schedule.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,20 @@ fn is_interesting(name: &str) -> bool {
}

fn by_thread(self_profile_data: Vec<u8>) -> anyhow::Result<(u64, HashMap<u32, Vec<Event>>)> {
let data = ProfilingData::from_paged_buffer(self_profile_data)
let data = ProfilingData::from_paged_buffer(self_profile_data, None)
.map_err(|e| anyhow::format_err!("{:?}", e))?;

let mut start = None;
for event in data.iter().filter(|e| !e.timestamp.is_instant()) {
let full_event = event.to_event();
for event in data
.iter()
.filter(|e| e.timestamp().map_or(false, |t| !t.is_instant()))
{
let full_event = data.to_full_event(&event);
if is_interesting(&full_event.label) {
if start.is_some() {
start = std::cmp::min(start, Some(event.timestamp.start()));
start = std::cmp::min(start, Some(event.timestamp().unwrap().start()));
} else {
start = Some(event.timestamp.start());
start = Some(event.timestamp().unwrap().start());
}
}
}
Expand All @@ -39,19 +42,32 @@ fn by_thread(self_profile_data: Vec<u8>) -> anyhow::Result<(u64, HashMap<u32, Ve

let mut end = start;
let mut by_thread = HashMap::new();
for event in data.iter().filter(|e| !e.timestamp.is_instant()) {
let full_event = event.to_event();
for event in data
.iter()
.filter(|e| e.timestamp().map_or(false, |t| !t.is_instant()))
{
let full_event = data.to_full_event(&event);

if is_interesting(&full_event.label) {
by_thread
.entry(event.thread_id)
.or_insert_with(Vec::new)
.push(Event {
name: full_event.label.into(),
start: event.timestamp.start().duration_since(start).unwrap(),
end: event.timestamp.end().duration_since(start).unwrap(),
start: event
.timestamp()
.unwrap()
.start()
.duration_since(start)
.unwrap(),
end: event
.timestamp()
.unwrap()
.end()
.duration_since(start)
.unwrap(),
});
end = std::cmp::max(end, event.timestamp.end());
end = std::cmp::max(end, event.timestamp().unwrap().end());
}
}

Expand Down
37 changes: 25 additions & 12 deletions site/src/self_profile/crox.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,14 +62,19 @@ fn generate_thread_to_collapsed_thread_mapping(
// collect start and end times for all threads
let mut thread_start_and_end: HashMap<u32, (SystemTime, SystemTime)> = HashMap::default();
for event in data.iter() {
let timestamp = if let Some(t) = event.timestamp() {
t
} else {
continue;
};
thread_start_and_end
.entry(event.thread_id)
.and_modify(|(thread_start, thread_end)| {
let (event_min, event_max) = timestamp_to_min_max(event.timestamp);
let (event_min, event_max) = timestamp_to_min_max(timestamp);
*thread_start = cmp::min(*thread_start, event_min);
*thread_end = cmp::max(*thread_end, event_max);
})
.or_insert_with(|| timestamp_to_min_max(event.timestamp));
.or_insert_with(|| timestamp_to_min_max(timestamp));
}
// collect the the threads in order of the end time
let mut end_and_thread = thread_start_and_end
Expand Down Expand Up @@ -130,29 +135,37 @@ pub fn generate(self_profile_data: Vec<u8>, opt: Opt) -> anyhow::Result<Vec<u8>>

let mut seq = serializer.serialize_seq(None)?;

let data = ProfilingData::from_paged_buffer(self_profile_data)
let data = ProfilingData::from_paged_buffer(self_profile_data, None)
.map_err(|e| anyhow::format_err!("{:?}", e))?;

let thread_to_collapsed_thread =
generate_thread_to_collapsed_thread_mapping(opt.collapse_threads, &data);

// Chrome does not seem to like how many QueryCacheHit events we generate
// only handle Interval events for now
for event in data.iter().filter(|e| !e.timestamp.is_instant()) {
for event in data
.iter()
.filter(|e| e.timestamp().map_or(false, |t| !t.is_instant()))
{
let duration = event.duration().unwrap();
if let Some(minimum_duration) = opt.minimum_duration {
if duration.as_micros() < minimum_duration {
continue;
}
}
let full_event = event.to_event();
let full_event = data.to_full_event(&event);
let crox_event = Event {
name: full_event.label.clone().into_owned(),
category: full_event.event_kind.clone().into_owned(),
event_type: EventType::Complete,
timestamp: event.timestamp.start().duration_since(UNIX_EPOCH).unwrap(),
timestamp: event
.timestamp()
.unwrap()
.start()
.duration_since(UNIX_EPOCH)
.unwrap(),
duration,
process_id: data.metadata.process_id,
process_id: data.metadata().process_id,
thread_id: *thread_to_collapsed_thread
.get(&event.thread_id)
.unwrap_or(&event.thread_id),
Expand All @@ -162,12 +175,12 @@ pub fn generate(self_profile_data: Vec<u8>, opt: Opt) -> anyhow::Result<Vec<u8>>
}
// add crate name for the process_id
let index_of_crate_name = data
.metadata
.metadata()
.cmd
.find(" --crate-name ")
.map(|index| index + 14);
if let Some(index) = index_of_crate_name {
let (_, last) = data.metadata.cmd.split_at(index);
let (_, last) = data.metadata().cmd.split_at(index);
let (crate_name, _) = last.split_at(last.find(" ").unwrap_or(last.len()));

let process_name = json!({
Expand All @@ -176,7 +189,7 @@ pub fn generate(self_profile_data: Vec<u8>, opt: Opt) -> anyhow::Result<Vec<u8>>
"ts" : 0,
"tid" : 0,
"cat" : "",
"pid" : data.metadata.process_id,
"pid" : data.metadata().process_id,
"args": {
"name" : crate_name
}
Expand All @@ -190,9 +203,9 @@ pub fn generate(self_profile_data: Vec<u8>, opt: Opt) -> anyhow::Result<Vec<u8>>
"ts" : 0,
"tid" : 0,
"cat" : "",
"pid" : data.metadata.process_id,
"pid" : data.metadata().process_id,
"args": {
"sort_index" : data.metadata.start_time.duration_since(UNIX_EPOCH).unwrap().as_micros() as u64
"sort_index" : data.metadata().start_time.duration_since(UNIX_EPOCH).unwrap().as_micros() as u64
}
});
seq.serialize_element(&process_name)?;
Expand Down
2 changes: 1 addition & 1 deletion site/src/self_profile/flamegraph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use inferno::flamegraph::{from_lines, Options as FlamegraphOptions};
pub struct Opt {}

pub fn generate(title: &str, self_profile_data: Vec<u8>, _: Opt) -> anyhow::Result<Vec<u8>> {
let profiling_data = ProfilingData::from_paged_buffer(self_profile_data)
let profiling_data = ProfilingData::from_paged_buffer(self_profile_data, None)
.map_err(|e| anyhow::format_err!("{:?}", e))?;

let recorded_stacks = collapse_stacks(&profiling_data)
Expand Down

0 comments on commit d2570f3

Please sign in to comment.