diff --git a/ui/src/frontend/base_slice_track.ts b/ui/src/frontend/base_slice_track.ts index 0ca6c01b54..45cbc9ec09 100644 --- a/ui/src/frontend/base_slice_track.ts +++ b/ui/src/frontend/base_slice_track.ts @@ -35,6 +35,7 @@ import {AsyncDisposableStack} from '../base/disposable_stack'; import {TrackMouseEvent, TrackRenderContext} from '../public/track'; import {Point2D, VerticalBounds} from '../base/geom'; import {Trace} from '../public/trace'; +import {Ds} from '../trace_processor/dataset'; // The common class that underpins all tracks drawing slices. @@ -972,6 +973,17 @@ export abstract class BaseSliceTrack< }); return {ts: Time.fromRaw(row.ts), dur: Duration.fromRaw(row.dur)}; } + + getDataset(): Ds.Dataset | undefined { + return { + src: this.getSqlSource(), + schema: { + id: NUM, + ts: LONG, + dur: LONG, + }, + }; + } } // This is the argument passed to onSliceOver(args). diff --git a/ui/src/frontend/named_slice_track.ts b/ui/src/frontend/named_slice_track.ts index ed9b5f0f34..edb7ec900e 100644 --- a/ui/src/frontend/named_slice_track.ts +++ b/ui/src/frontend/named_slice_track.ts @@ -16,7 +16,7 @@ import {getColorForSlice} from '../public/lib/colorizer'; import {TrackEventDetailsPanel} from '../public/details_panel'; import {TrackEventSelection} from '../public/selection'; import {Slice} from '../public/track'; -import {STR_NULL} from '../trace_processor/query_result'; +import {LONG, NUM, STR, STR_NULL} from '../trace_processor/query_result'; import { BASE_ROW, BaseSliceTrack, @@ -30,6 +30,7 @@ import {NewTrackArgs} from './track'; import {renderDuration} from './widgets/duration'; import {TraceImpl} from '../core/trace_impl'; import {assertIsInstance} from '../base/logging'; +import {Ds} from '../trace_processor/dataset'; export const NAMED_ROW = { // Base columns (tsq, ts, dur, id, depth). @@ -80,4 +81,16 @@ export abstract class NamedSliceTrack< // because this class is exposed to plugins (which see only Trace). return new ThreadSliceDetailsPanel(assertIsInstance(this.trace, TraceImpl)); } + + override getDataset(): Ds.Dataset | undefined { + return { + src: this.getSqlSource(), + schema: { + id: NUM, + name: STR, + ts: LONG, + dur: LONG, + }, + }; + } } diff --git a/ui/src/plugins/dev.perfetto.AsyncSlices/async_slice_track.ts b/ui/src/plugins/dev.perfetto.AsyncSlices/async_slice_track.ts index 1bb31a5838..4c8898ba0c 100644 --- a/ui/src/plugins/dev.perfetto.AsyncSlices/async_slice_track.ts +++ b/ui/src/plugins/dev.perfetto.AsyncSlices/async_slice_track.ts @@ -14,12 +14,19 @@ import {BigintMath as BIMath} from '../../base/bigint_math'; import {clamp} from '../../base/math_utils'; +import {Ds} from '../../trace_processor/dataset'; import {NAMED_ROW, NamedSliceTrack} from '../../frontend/named_slice_track'; import {SLICE_LAYOUT_FIT_CONTENT_DEFAULTS} from '../../frontend/slice_layout'; import {NewTrackArgs} from '../../frontend/track'; import {TrackEventDetails} from '../../public/selection'; import {Slice} from '../../public/track'; -import {LONG_NULL} from '../../trace_processor/query_result'; +import { + LONG, + LONG_NULL, + NUM, + NUM_NULL, + STR, +} from '../../trace_processor/query_result'; export const THREAD_SLICE_ROW = { // Base columns (tsq, ts, dur, id, depth). @@ -104,4 +111,21 @@ export class AsyncSliceTrack extends NamedSliceTrack { tableName: 'slice', }; } + + override getDataset(): Ds.Dataset { + return { + src: `slice`, + filter: { + col: 'track_id', + in: this.trackIds, + }, + schema: { + id: NUM, + name: STR, + ts: LONG, + dur: LONG, + parent_id: NUM_NULL, + }, + }; + } } diff --git a/ui/src/plugins/dev.perfetto.AsyncSlices/slice_selection_aggregator.ts b/ui/src/plugins/dev.perfetto.AsyncSlices/slice_selection_aggregator.ts index 55f7c9508c..0364152e0a 100644 --- a/ui/src/plugins/dev.perfetto.AsyncSlices/slice_selection_aggregator.ts +++ b/ui/src/plugins/dev.perfetto.AsyncSlices/slice_selection_aggregator.ts @@ -16,16 +16,27 @@ import {ColumnDef, Sorting} from '../../public/aggregation'; import {AreaSelection} from '../../public/selection'; import {Engine} from '../../trace_processor/engine'; import {AreaSelectionAggregator} from '../../public/selection'; -import {SLICE_TRACK_KIND} from '../../public/track_kinds'; +import {Ds} from '../../trace_processor/dataset'; +import {LONG, NUM, STR} from '../../trace_processor/query_result'; export class SliceSelectionAggregator implements AreaSelectionAggregator { readonly id = 'slice_aggregation'; async createAggregateView(engine: Engine, area: AreaSelection) { - const selectedTrackKeys = getSelectedTrackSqlIds(area); - - if (selectedTrackKeys.length === 0) return false; - + const desiredSchema = { + id: NUM, + name: STR, + ts: LONG, + dur: LONG, + }; + const validDatasets = area.tracks + .map((t) => t.track.getDataset?.()) + .filter((d) => d !== undefined) + .filter((d) => Ds.doesImplement(d, desiredSchema)); + if (validDatasets.length === 0) { + return false; + } + const optimizedDataset = Ds.optimize({union: validDatasets}); await engine.query(` create or replace perfetto table ${this.id} as select @@ -33,12 +44,13 @@ export class SliceSelectionAggregator implements AreaSelectionAggregator { sum(dur) AS total_dur, sum(dur)/count() as avg_dur, count() as occurrences - from slices - where track_id in (${selectedTrackKeys}) - and ts + dur > ${area.start} + from (${Ds.query(optimizedDataset)}) + where + ts + dur > ${area.start} and ts < ${area.end} group by name `); + return true; } @@ -83,14 +95,3 @@ export class SliceSelectionAggregator implements AreaSelectionAggregator { ]; } } - -function getSelectedTrackSqlIds(area: AreaSelection): number[] { - const selectedTrackKeys: number[] = []; - for (const trackInfo of area.tracks) { - if (trackInfo?.tags?.kind === SLICE_TRACK_KIND) { - trackInfo.tags.trackIds && - selectedTrackKeys.push(...trackInfo.tags.trackIds); - } - } - return selectedTrackKeys; -} diff --git a/ui/src/plugins/dev.perfetto.Frames/actual_frames_track.ts b/ui/src/plugins/dev.perfetto.Frames/actual_frames_track.ts index d75dd77e10..2c19110a76 100644 --- a/ui/src/plugins/dev.perfetto.Frames/actual_frames_track.ts +++ b/ui/src/plugins/dev.perfetto.Frames/actual_frames_track.ts @@ -102,6 +102,15 @@ export class ActualFramesTrack extends NamedSliceTrack { tableName: 'slice', }; } + + // Override dataset from base class NamedSliceTrack as we don't want these + // tracks to participate in generic area selection aggregation (frames tracks + // have their own dedicated aggregation panel). + // TODO(stevegolton): In future CLs this will be handled with aggregation keys + // instead, as this track will have to expose a dataset anyway. + override getDataset() { + return undefined; + } } function getColorSchemeForJank( diff --git a/ui/src/plugins/dev.perfetto.Ftrace/ftrace_track.ts b/ui/src/plugins/dev.perfetto.Ftrace/ftrace_track.ts index 78c59c8d44..d056a8db22 100644 --- a/ui/src/plugins/dev.perfetto.Ftrace/ftrace_track.ts +++ b/ui/src/plugins/dev.perfetto.Ftrace/ftrace_track.ts @@ -20,10 +20,11 @@ import {checkerboardExcept} from '../../frontend/checkerboard'; import {TrackData} from '../../common/track_data'; import {Engine} from '../../trace_processor/engine'; import {Track} from '../../public/track'; -import {LONG, STR} from '../../trace_processor/query_result'; +import {LONG, NUM, STR} from '../../trace_processor/query_result'; import {FtraceFilter} from './common'; import {Monitor} from '../../base/monitor'; import {TrackRenderContext} from '../../public/track'; +import {Ds} from '../../trace_processor/dataset'; const MARGIN = 2; const RECT_HEIGHT = 18; @@ -56,6 +57,25 @@ export class FtraceRawTrack implements Track { this.monitor = new Monitor([() => store.state]); } + getDataset(): Ds.Dataset { + return { + // 'ftrace_event' doesn't have a dur column, but injecting dur=0 (all + // ftrace events are effectively 'instant') allows us to participate in + // generic slice aggregations + src: 'select id, ts, 0 as dur, name from ftrace_event', + schema: { + id: NUM, + name: STR, + ts: LONG, + dur: LONG, + }, + filter: { + col: 'cpu', + eq: this.cpu, + }, + }; + } + async onUpdate({ visibleWindow, resolution, diff --git a/ui/src/public/track.ts b/ui/src/public/track.ts index 94ac9e7d17..93d493e9d4 100644 --- a/ui/src/public/track.ts +++ b/ui/src/public/track.ts @@ -20,6 +20,7 @@ import {HighPrecisionTimeSpan} from '../base/high_precision_time_span'; import {ColorScheme} from './color_scheme'; import {TrackEventDetailsPanel} from './details_panel'; import {TrackEventDetails, TrackEventSelection} from './selection'; +import {Ds} from '../trace_processor/dataset'; export interface TrackManager { /** @@ -174,6 +175,12 @@ export interface Track { onMouseClick?(event: TrackMouseEvent): boolean; onMouseOut?(): void; + /** + * Optional: Returns a dataset that represents the events displayed on this + * track. + */ + getDataset?(): Ds.Dataset | undefined; + /** * Optional: Get details of a track event given by eventId on this track. */ diff --git a/ui/src/trace_processor/dataset.ts b/ui/src/trace_processor/dataset.ts new file mode 100644 index 0000000000..686357073a --- /dev/null +++ b/ui/src/trace_processor/dataset.ts @@ -0,0 +1,258 @@ +// Copyright (C) 2024 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {assertUnreachable} from '../base/logging'; +import {getOrCreate} from '../base/utils'; +import {ColumnType, SqlValue} from './query_result'; + +export namespace Ds { + export type Dataset = UnionDataset | SourceDataset; + export type Schema = Record; + + /** + * Defines a dataset with a source SQL select statement of table name, a + * schema describing the columns, and an optional filter. + */ + export interface SourceDataset { + readonly src: string; + readonly schema: Schema; + readonly filter?: EqFilter | InFilter; + } + + /** + * A dataset that represents the union of multiple datasets. + */ + export interface UnionDataset { + readonly union: ReadonlyArray; + } + + /** + * Generic filter type. + */ + export type Filter = EqFilter | InFilter; + + /** + * A filter used to express that a column must equal a value. + */ + export interface EqFilter { + readonly col: string; + readonly eq: SqlValue; + } + + /** + * A filter used to express that column must be one of a set of values. + */ + export interface InFilter { + readonly col: string; + readonly in: ReadonlyArray; + } + + /** + * Returns true if the dataset implements a given schema. + * + * Note: `implements` is a reserved keyword in TS so we can't call this + * function `implements`. + * + * @param dataset - The dataset to test. + * @param testSchema - The schema to test against. + */ + export function doesImplement(dataset: Dataset, testSchema: Schema): boolean { + const datasetSchema = schema(dataset); + return Object.entries(testSchema).every(([name, kind]) => { + return name in datasetSchema && datasetSchema[name] === kind; + }); + } + + /** + * This function optimizes a dataset into the smallest possible expression. + * + * For example by combining elements of union data sets that have the same src + * and similar filters into a single set. + * + * For example, the following union data set... + * + * ``` + * { + * union: [ + * { + * src: 'foo', + * schema: { + * 'a': NUM, + * 'b': NUM, + * }, + * filter: {col: 'a', eq: 1}, + * }, + * { + * src: 'foo', + * schema: { + * 'a': NUM, + * 'b': NUM, + * }, + * filter: {col: 'a', eq: 2}, + * }, + * ] + * } + * ``` + * + * ...will be combined into a single set... + * + * ``` + * { + * src: 'foo', + * schema: { + * 'a': NUM, + * 'b': NUM, + * }, + * filter: {col: 'a', in: [1, 2]}, + * }, + * ``` + * + * @param dataset - The dataset to optimize. + */ + export function optimize(dataset: Dataset): Dataset { + if ('src' in dataset) { + // No optimization possible for individual datasets + return dataset; + } else if ('union' in dataset) { + // Recursively optimize each dataset of this union + const optimizedUnion = dataset.union.map(optimize); + + // Find all source datasets and combine then based on src + const combinedSrcSets = new Map(); + const otherDatasets: Dataset[] = []; + for (const e of optimizedUnion) { + if ('src' in e) { + const set = getOrCreate(combinedSrcSets, e.src, () => []); + set.push(e); + } else { + otherDatasets.push(e); + } + } + + const mergedSrcSets = Array.from(combinedSrcSets.values()).map( + (srcGroup) => { + if (srcGroup.length === 1) return srcGroup[0]; + + // Combine schema across all members in the union + const combinedSchema = srcGroup.reduce((acc, e) => { + Object.assign(acc, e.schema); + return acc; + }, {} as Schema); + + // Merge filters for the same src + const inFilters: InFilter[] = []; + for (const {filter} of srcGroup) { + if (filter) { + if ('eq' in filter) { + inFilters.push({col: filter.col, in: [filter.eq]}); + } else { + inFilters.push(filter); + } + } + } + + const mergedFilter = mergeFilters(inFilters); + return { + src: srcGroup[0].src, + schema: combinedSchema, + filter: mergedFilter, + }; + }, + ); + + const finalUnion = [...mergedSrcSets, ...otherDatasets]; + + if (finalUnion.length === 1) { + return finalUnion[0]; + } else { + return {union: finalUnion}; + } + } else { + assertUnreachable(dataset); + } + } + + function mergeFilters(filters: InFilter[]): InFilter | undefined { + if (filters.length === 0) return undefined; + const col = filters[0].col; + const values = new Set(filters.flatMap((filter) => filter.in)); + return {col, in: Array.from(values)}; + } + + /** + * Get the schema of an dataset. + * + * @param dataset - The dataset to get the schema of. + */ + export function schema(dataset: Dataset): Schema { + if ('src' in dataset) { + return dataset.schema; + } else if ('union' in dataset) { + // Find the minimal set of columns that are supported by all datasets of + // the union + let sch: Record | undefined = undefined; + dataset.union.forEach((e) => { + const eSchema = schema(e); + if (sch === undefined) { + // First time just use this one + sch = eSchema; + } else { + const newSch: Record = {}; + for (const [key, kind] of Object.entries(sch)) { + if (key in eSchema && eSchema[key] === kind) { + newSch[key] = kind; + } + } + sch = newSch; + } + }); + return sch ?? {}; + } else { + assertUnreachable(dataset); + } + } + + /** + * Produce a query for this dataset. + * + * @param dataset - The dataset to get the query for. + * @param sch - The schema to use for extracting columns - if undefined, the + * most specific possible schema is evaluated from the dataset first and used + * instead. + */ + export function query(dataset: Dataset, sch?: Schema): string { + function filterToQuery(filter: Filter) { + if ('eq' in filter) { + return `where ${filter.col} = ${filter.eq}`; + } else if ('in' in filter) { + return `where ${filter.col} in (${filter.in.join(',')})`; + } else { + assertUnreachable(filter); + } + } + + sch = sch ?? schema(dataset); + if ('src' in dataset) { + const whereClause = dataset.filter ? filterToQuery(dataset.filter) : ''; + const cols = Object.keys(sch); + return `select ${cols.join(', ')} from (${dataset.src}) ${whereClause}`.trim(); + } else if ('union' in dataset) { + return dataset.union + .map((dataset) => query(dataset, sch)) + .join(' union all '); + } else { + assertUnreachable(dataset); + } + } +} diff --git a/ui/src/trace_processor/dataset_unittest.ts b/ui/src/trace_processor/dataset_unittest.ts new file mode 100644 index 0000000000..e354b54e85 --- /dev/null +++ b/ui/src/trace_processor/dataset_unittest.ts @@ -0,0 +1,242 @@ +// Copyright (C) 2024 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {Ds} from './dataset'; +import {LONG, NUM, STR} from './query_result'; + +test('get query for simple dataset', () => { + const dataset: Ds.Dataset = { + src: 'slice', + schema: {id: NUM}, + }; + + expect(Ds.query(dataset)).toEqual('select id from (slice)'); +}); + +test("get query for simple dataset with 'eq' filter", () => { + const dataset: Ds.Dataset = { + src: 'slice', + schema: {id: NUM}, + filter: { + col: 'id', + eq: 123, + }, + }; + + expect(Ds.query(dataset)).toEqual('select id from (slice) where id = 123'); +}); + +test("get query for simple dataset with an 'in' filter", () => { + const dataset: Ds.Dataset = { + src: 'slice', + schema: {id: NUM}, + filter: { + col: 'id', + in: [123, 456], + }, + }; + + expect(Ds.query(dataset)).toEqual( + 'select id from (slice) where id in (123,456)', + ); +}); + +test('get query for union dataset', () => { + const dataset: Ds.Dataset = { + union: [ + { + src: 'slice', + schema: {id: NUM}, + filter: { + col: 'id', + eq: 123, + }, + }, + { + src: 'slice', + schema: {id: NUM}, + filter: { + col: 'id', + eq: 456, + }, + }, + ], + }; + + expect(Ds.query(dataset)).toEqual( + 'select id from (slice) where id = 123 union all select id from (slice) where id = 456', + ); +}); + +test('doesImplement', () => { + const dataset = { + src: 'slice', + schema: {id: NUM, ts: LONG}, + }; + + expect(Ds.doesImplement(dataset, {id: NUM})).toBe(true); + expect(Ds.doesImplement(dataset, {id: NUM, ts: LONG})).toBe(true); + expect(Ds.doesImplement(dataset, {id: NUM, ts: LONG, name: STR})).toBe(false); + expect(Ds.doesImplement(dataset, {id: LONG})).toBe(false); +}); + +test('find the schema of a simple dataset', () => { + const dataset: Ds.Dataset = { + src: 'slice', + schema: {id: NUM, ts: LONG}, + }; + + expect(Ds.schema(dataset)).toMatchObject({id: NUM, ts: LONG}); +}); + +test('find the schema of a union where source sets differ in their names', () => { + const dataset: Ds.Dataset = { + union: [ + { + src: 'slice', + schema: {foo: NUM}, + }, + { + src: 'slice', + schema: {bar: NUM}, + }, + ], + }; + + expect(Ds.schema(dataset)).toMatchObject({}); +}); + +test('find the schema of a union with differing source sets', () => { + const dataset: Ds.Dataset = { + union: [ + { + src: 'slice', + schema: {foo: NUM}, + }, + { + src: 'slice', + schema: {foo: LONG}, + }, + ], + }; + + expect(Ds.schema(dataset)).toMatchObject({}); +}); + +test('find the schema of a union with one column in common', () => { + const dataset: Ds.Dataset = { + union: [ + { + src: 'slice', + schema: {foo: NUM, bar: NUM}, + }, + { + src: 'slice', + schema: {foo: NUM, baz: NUM}, + }, + ], + }; + + expect(Ds.schema(dataset)).toMatchObject({foo: NUM}); +}); + +test('optimize a union dataset', () => { + const dataset: Ds.Dataset = { + union: [ + { + src: 'slice', + schema: {}, + filter: { + col: 'track_id', + eq: 123, + }, + }, + { + src: 'slice', + schema: {}, + filter: { + col: 'track_id', + eq: 456, + }, + }, + ], + }; + + expect(Ds.optimize(dataset)).toEqual({ + src: 'slice', + schema: {}, + filter: { + col: 'track_id', + in: [123, 456], + }, + }); +}); + +test('optimize a union dataset with different types of filters', () => { + const dataset: Ds.Dataset = { + union: [ + { + src: 'slice', + schema: {}, + filter: { + col: 'track_id', + eq: 123, + }, + }, + { + src: 'slice', + schema: {}, + filter: { + col: 'track_id', + in: [456, 789], + }, + }, + ], + }; + + expect(Ds.optimize(dataset)).toEqual({ + src: 'slice', + schema: {}, + filter: { + col: 'track_id', + in: [123, 456, 789], + }, + }); +}); + +test('optimize a union dataset with different schemas', () => { + const dataset: Ds.Dataset = { + union: [ + { + src: 'slice', + schema: {foo: NUM}, + }, + { + src: 'slice', + schema: {bar: NUM}, + }, + ], + }; + + expect(Ds.optimize(dataset)).toEqual({ + src: 'slice', + // The resultant schema is the combination of the union's member's schemas, + // as we know the source is the same as we know we can get all of the 'seen' + // columns from the source. + schema: { + foo: NUM, + bar: NUM, + }, + }); +});