From 6bdcdf2c33f6d4cabf6d65adca6f18dd243cae0f Mon Sep 17 00:00:00 2001 From: Yini Xu <34967660+YiniXu9506@users.noreply.github.com> Date: Wed, 22 Dec 2021 15:01:47 +0800 Subject: [PATCH] chore: support multiple profiling types (#1095) --- pkg/apiserver/profiling/model.go | 102 +++++--- pkg/apiserver/profiling/pprof.go | 43 +++- pkg/apiserver/profiling/profile.go | 21 +- pkg/apiserver/profiling/router.go | 10 + pkg/apiserver/profiling/service.go | 44 +++- .../apps/InstanceProfiling/pages/Detail.tsx | 240 ++++++++++-------- ui/lib/apps/InstanceProfiling/pages/List.tsx | 40 +++ .../InstanceProfiling/translations/en.yaml | 10 +- .../InstanceProfiling/translations/zh.yaml | 10 +- ui/lib/components/ActionsButton/index.tsx | 63 +++++ ui/lib/components/index.ts | 2 + 11 files changed, 414 insertions(+), 171 deletions(-) create mode 100644 ui/lib/components/ActionsButton/index.tsx diff --git a/pkg/apiserver/profiling/model.go b/pkg/apiserver/profiling/model.go index 48c2acfbdb..f860bae434 100644 --- a/pkg/apiserver/profiling/model.go +++ b/pkg/apiserver/profiling/model.go @@ -4,9 +4,13 @@ package profiling import ( "context" + "database/sql/driver" + "encoding/json" "fmt" "time" + "github.com/joomcode/errorx" + "github.com/pingcap/tidb-dashboard/pkg/apiserver/model" "github.com/pingcap/tidb-dashboard/pkg/dbstore" ) @@ -19,22 +23,55 @@ const ( TaskStateError TaskState = iota TaskStateRunning TaskStateFinish - TaskPartialFinish + TaskStatePartialFinish // Only valid for task group + TaskStateSkipped ) type TaskRawDataType string -const RawDataTypeProtobuf TaskRawDataType = "protobuf" +const ( + RawDataTypeProtobuf TaskRawDataType = "protobuf" + RawDataTypeText TaskRawDataType = "text" +) + +type ( + TaskProfilingType string + TaskProfilingTypeList []TaskProfilingType +) + +func (r *TaskProfilingTypeList) Scan(src interface{}) error { + return json.Unmarshal([]byte(src.(string)), r) +} + +func (r TaskProfilingTypeList) Value() (driver.Value, error) { + val, err := json.Marshal(r) + return string(val), err +} + +const ( + ProfilingTypeCPU TaskProfilingType = "cpu" + ProfilingTypeHeap TaskProfilingType = "heap" + ProfilingTypeGoroutine TaskProfilingType = "goroutine" + ProfilingTypeMutex TaskProfilingType = "mutex" +) + +var profilingTypeMap = map[TaskProfilingType]struct{}{ + ProfilingTypeCPU: {}, + ProfilingTypeHeap: {}, + ProfilingTypeGoroutine: {}, + ProfilingTypeMutex: {}, +} type TaskModel struct { - ID uint `json:"id" gorm:"primary_key"` - TaskGroupID uint `json:"task_group_id" gorm:"index"` - State TaskState `json:"state" gorm:"index"` - Target model.RequestTargetNode `json:"target" gorm:"embedded;embedded_prefix:target_"` - FilePath string `json:"-" gorm:"type:text"` - Error string `json:"error" gorm:"type:text"` - StartedAt int64 `json:"started_at"` // The start running time, reset when retry. Used to estimate approximate profiling progress. - RawDataType TaskRawDataType `json:"raw_data_type"` + ID uint `json:"id" gorm:"primary_key"` + TaskGroupID uint `json:"task_group_id" gorm:"index"` + State TaskState `json:"state" gorm:"index"` + Target model.RequestTargetNode `json:"target" gorm:"embedded;embedded_prefix:target_"` + FilePath string `json:"-" gorm:"type:text"` + Error string `json:"error" gorm:"type:text"` + StartedAt int64 `json:"started_at"` // The start running time, reset when retry. Used to estimate approximate profiling progress. + RawDataType TaskRawDataType `json:"raw_data_type" gorm:"raw_data_type"` + ProfilingType TaskProfilingType `json:"profiling_type"` } func (TaskModel) TableName() string { @@ -42,11 +79,12 @@ func (TaskModel) TableName() string { } type TaskGroupModel struct { - ID uint `json:"id" gorm:"primary_key"` - State TaskState `json:"state" gorm:"index"` - ProfileDurationSecs uint `json:"profile_duration_secs"` - TargetStats model.RequestTargetStatistics `json:"target_stats" gorm:"embedded;embedded_prefix:target_stats_"` - StartedAt int64 `json:"started_at"` + ID uint `json:"id" gorm:"primary_key"` + State TaskState `json:"state" gorm:"index"` + ProfileDurationSecs uint `json:"profile_duration_secs"` + TargetStats model.RequestTargetStatistics `json:"target_stats" gorm:"embedded;embedded_prefix:target_stats_"` + StartedAt int64 `json:"started_at"` + RequstedProfilingTypes TaskProfilingTypeList `json:"requsted_profiling_types"` } func (TaskGroupModel) TableName() string { @@ -67,14 +105,15 @@ type Task struct { } // NewTask creates a new profiling task. -func NewTask(ctx context.Context, taskGroup *TaskGroup, target model.RequestTargetNode, fts *fetchers) *Task { +func NewTask(ctx context.Context, taskGroup *TaskGroup, target model.RequestTargetNode, fts *fetchers, profilingType TaskProfilingType) *Task { ctx, cancel := context.WithCancel(ctx) return &Task{ TaskModel: &TaskModel{ - TaskGroupID: taskGroup.ID, - State: TaskStateRunning, - Target: target, - StartedAt: time.Now().Unix(), + TaskGroupID: taskGroup.ID, + State: TaskStateRunning, + Target: target, + StartedAt: time.Now().Unix(), + ProfilingType: profilingType, }, ctx: ctx, cancel: cancel, @@ -84,11 +123,15 @@ func NewTask(ctx context.Context, taskGroup *TaskGroup, target model.RequestTarg } func (t *Task) run() { - fileNameWithoutExt := fmt.Sprintf("profiling_%d_%d_%s", t.TaskGroupID, t.ID, t.Target.FileName()) - protoFilePath, rawDataType, err := profileAndWritePprof(t.ctx, t.fetchers, &t.Target, fileNameWithoutExt, t.taskGroup.ProfileDurationSecs) + fileNameWithoutExt := fmt.Sprintf("profiling_%d_%d_%s_%s", t.TaskGroupID, t.ID, t.ProfilingType, t.Target.FileName()) + protoFilePath, rawDataType, err := profileAndWritePprof(t.ctx, t.fetchers, &t.Target, fileNameWithoutExt, t.taskGroup.ProfileDurationSecs, t.ProfilingType) if err != nil { - t.Error = err.Error() - t.State = TaskStateError + if errorx.IsOfType(err, ErrUnsupportedProfilingType) { + t.State = TaskStateSkipped + } else { + t.Error = err.Error() + t.State = TaskStateError + } t.taskGroup.db.Save(t.TaskModel) return } @@ -109,13 +152,14 @@ type TaskGroup struct { } // NewTaskGroup create a new profiling task group. -func NewTaskGroup(db *dbstore.DB, profileDurationSecs uint, stats model.RequestTargetStatistics) *TaskGroup { +func NewTaskGroup(db *dbstore.DB, profileDurationSecs uint, stats model.RequestTargetStatistics, requestedProfilingTypes TaskProfilingTypeList) *TaskGroup { return &TaskGroup{ TaskGroupModel: &TaskGroupModel{ - State: TaskStateRunning, - ProfileDurationSecs: profileDurationSecs, - TargetStats: stats, - StartedAt: time.Now().Unix(), + State: TaskStateRunning, + ProfileDurationSecs: profileDurationSecs, + TargetStats: stats, + StartedAt: time.Now().Unix(), + RequstedProfilingTypes: requestedProfilingTypes, }, db: db, } diff --git a/pkg/apiserver/profiling/pprof.go b/pkg/apiserver/profiling/pprof.go index 9690d6d260..df061cfcb5 100644 --- a/pkg/apiserver/profiling/pprof.go +++ b/pkg/apiserver/profiling/pprof.go @@ -14,15 +14,16 @@ type pprofOptions struct { duration uint fileNameWithoutExt string - target *model.RequestTargetNode - fetcher *profileFetcher + target *model.RequestTargetNode + fetcher *profileFetcher + profilingType TaskProfilingType } func fetchPprof(op *pprofOptions) (string, TaskRawDataType, error) { fetcher := &fetcher{profileFetcher: op.fetcher, target: op.target} - tmpPath, rawDataType, err := fetcher.FetchAndWriteToFile(op.duration, op.fileNameWithoutExt) + tmpPath, rawDataType, err := fetcher.FetchAndWriteToFile(op.duration, op.fileNameWithoutExt, op.profilingType) if err != nil { - return "", "", fmt.Errorf("failed to fetch annd write to temp file: %v", err) + return "", "", fmt.Errorf("failed to fetch and write to temp file: %v", err) } return tmpPath, rawDataType, nil @@ -33,8 +34,31 @@ type fetcher struct { profileFetcher *profileFetcher } -func (f *fetcher) FetchAndWriteToFile(duration uint, fileNameWithoutExt string) (string, TaskRawDataType, error) { - tmpfile, err := ioutil.TempFile("", fileNameWithoutExt+"*.proto") +func (f *fetcher) FetchAndWriteToFile(duration uint, fileNameWithoutExt string, profilingType TaskProfilingType) (string, TaskRawDataType, error) { + var profilingRawDataType TaskRawDataType + var fileExtenstion string + secs := strconv.Itoa(int(duration)) + var url string + switch profilingType { + case ProfilingTypeCPU: + url = "/debug/pprof/profile?seconds=" + secs + profilingRawDataType = RawDataTypeProtobuf + fileExtenstion = "*.proto" + case ProfilingTypeHeap: + url = "/debug/pprof/heap" + profilingRawDataType = RawDataTypeProtobuf + fileExtenstion = "*.proto" + case ProfilingTypeGoroutine: + url = "/debug/pprof/goroutine?debug=1" + profilingRawDataType = RawDataTypeText + fileExtenstion = "*.txt" + case ProfilingTypeMutex: + url = "/debug/pprof/mutex?debug=1" + profilingRawDataType = RawDataTypeText + fileExtenstion = "*.txt" + } + + tmpfile, err := ioutil.TempFile("", fileNameWithoutExt+fileExtenstion) if err != nil { return "", "", fmt.Errorf("failed to create tmpfile to write profile: %v", err) } @@ -43,12 +67,9 @@ func (f *fetcher) FetchAndWriteToFile(duration uint, fileNameWithoutExt string) _ = tmpfile.Close() }() - secs := strconv.Itoa(int(duration)) - url := "/debug/pprof/profile?seconds=" + secs - resp, err := (*f.profileFetcher).fetch(&fetchOptions{ip: f.target.IP, port: f.target.Port, path: url}) if err != nil { - return "", "", fmt.Errorf("failed to fetch profile with proto format: %v", err) + return "", "", fmt.Errorf("failed to fetch profile with %v format: %v", fileExtenstion, err) } _, err = tmpfile.Write(resp) @@ -56,5 +77,5 @@ func (f *fetcher) FetchAndWriteToFile(duration uint, fileNameWithoutExt string) return "", "", fmt.Errorf("failed to write profile: %v", err) } - return tmpfile.Name(), RawDataTypeProtobuf, nil + return tmpfile.Name(), profilingRawDataType, nil } diff --git a/pkg/apiserver/profiling/profile.go b/pkg/apiserver/profiling/profile.go index 14de30e4ea..c933f8e957 100644 --- a/pkg/apiserver/profiling/profile.go +++ b/pkg/apiserver/profiling/profile.go @@ -4,22 +4,29 @@ package profiling import ( "context" - "fmt" "github.com/pingcap/tidb-dashboard/pkg/apiserver/model" ) -func profileAndWritePprof(ctx context.Context, fts *fetchers, target *model.RequestTargetNode, fileNameWithoutExt string, profileDurationSecs uint) (string, TaskRawDataType, error) { +func profileAndWritePprof(ctx context.Context, fts *fetchers, target *model.RequestTargetNode, fileNameWithoutExt string, profileDurationSecs uint, profilingType TaskProfilingType) (string, TaskRawDataType, error) { switch target.Kind { case model.NodeKindTiKV: - return fetchPprof(&pprofOptions{duration: profileDurationSecs, fileNameWithoutExt: fileNameWithoutExt, target: target, fetcher: &fts.tikv}) + // TiKV only supports CPU Profiling + if profilingType != ProfilingTypeCPU { + return "", "", ErrUnsupportedProfilingType.NewWithNoMessage() + } + return fetchPprof(&pprofOptions{duration: profileDurationSecs, fileNameWithoutExt: fileNameWithoutExt, target: target, fetcher: &fts.tikv, profilingType: profilingType}) case model.NodeKindTiFlash: - return fetchPprof(&pprofOptions{duration: profileDurationSecs, fileNameWithoutExt: fileNameWithoutExt, target: target, fetcher: &fts.tiflash}) + // TiFlash only supports CPU Profiling + if profilingType != ProfilingTypeCPU { + return "", "", ErrUnsupportedProfilingType.NewWithNoMessage() + } + return fetchPprof(&pprofOptions{duration: profileDurationSecs, fileNameWithoutExt: fileNameWithoutExt, target: target, fetcher: &fts.tiflash, profilingType: profilingType}) case model.NodeKindTiDB: - return fetchPprof(&pprofOptions{duration: profileDurationSecs, fileNameWithoutExt: fileNameWithoutExt, target: target, fetcher: &fts.tidb}) + return fetchPprof(&pprofOptions{duration: profileDurationSecs, fileNameWithoutExt: fileNameWithoutExt, target: target, fetcher: &fts.tidb, profilingType: profilingType}) case model.NodeKindPD: - return fetchPprof(&pprofOptions{duration: profileDurationSecs, fileNameWithoutExt: fileNameWithoutExt, target: target, fetcher: &fts.pd}) + return fetchPprof(&pprofOptions{duration: profileDurationSecs, fileNameWithoutExt: fileNameWithoutExt, target: target, fetcher: &fts.pd, profilingType: profilingType}) default: - return "", "", fmt.Errorf("unsupported target %s", target) + return "", "", ErrUnsupportedProfilingTarget.New(target.String()) } } diff --git a/pkg/apiserver/profiling/router.go b/pkg/apiserver/profiling/router.go index 3f6c6752fa..a93fb240f9 100644 --- a/pkg/apiserver/profiling/router.go +++ b/pkg/apiserver/profiling/router.go @@ -274,6 +274,7 @@ type ViewOutputType string const ( ViewOutputTypeProtobuf ViewOutputType = "protobuf" ViewOutputTypeGraph ViewOutputType = "graph" + ViewOutputTypeText ViewOutputType = "text" ) // @ID viewProfilingSingle @@ -332,6 +333,15 @@ func (s *Service) viewSingle(c *gin.Context) { _ = c.Error(rest.ErrBadRequest.New("Cannot output protobuf as %s", outputType)) return } + } else if task.RawDataType == RawDataTypeText { + switch outputType { + case string(ViewOutputTypeText): + contentType = "text/plain" + default: + // Will not handle converting text to other formats + _ = c.Error(rest.ErrBadRequest.New("Cannot output text as %s", outputType)) + return + } } c.Data(http.StatusOK, contentType, content) } diff --git a/pkg/apiserver/profiling/service.go b/pkg/apiserver/profiling/service.go index 252819b59e..9f1f08b9d5 100644 --- a/pkg/apiserver/profiling/service.go +++ b/pkg/apiserver/profiling/service.go @@ -25,14 +25,17 @@ const ( ) var ( - ErrNS = errorx.NewNamespace("error.profiling") - ErrIgnoredRequest = ErrNS.NewType("ignored_request") - ErrTimeout = ErrNS.NewType("timeout") + ErrNS = errorx.NewNamespace("error.api.profiling") + ErrIgnoredRequest = ErrNS.NewType("ignored_request") + ErrTimeout = ErrNS.NewType("timeout") + ErrUnsupportedProfilingType = ErrNS.NewType("unsupported_profiling_type") + ErrUnsupportedProfilingTarget = ErrNS.NewType("unsupported_profiling_target") ) type StartRequest struct { - Targets []model.RequestTargetNode `json:"targets"` - DurationSecs uint `json:"duration_secs"` + Targets []model.RequestTargetNode `json:"targets"` + DurationSecs uint `json:"duration_secs"` + RequstedProfilingTypes TaskProfilingTypeList `json:"requsted_profiling_types"` } type StartRequestSession struct { @@ -150,7 +153,7 @@ func (s *Service) exclusiveExecute(ctx context.Context, req *StartRequest) (*Tas } func (s *Service) startGroup(ctx context.Context, req *StartRequest) (*TaskGroup, error) { - taskGroup := NewTaskGroup(s.params.LocalStore, req.DurationSecs, model.NewRequestTargetStatisticsFromArray(&req.Targets)) + taskGroup := NewTaskGroup(s.params.LocalStore, req.DurationSecs, model.NewRequestTargetStatisticsFromArray(&req.Targets), req.RequstedProfilingTypes) if err := s.params.LocalStore.Create(taskGroup.TaskGroupModel).Error; err != nil { log.Warn("failed to start task group", zap.Error(err)) return nil, err @@ -158,10 +161,19 @@ func (s *Service) startGroup(ctx context.Context, req *StartRequest) (*TaskGroup tasks := make([]*Task, 0, len(req.Targets)) for _, target := range req.Targets { - t := NewTask(ctx, taskGroup, target, s.fetchers) - s.params.LocalStore.Create(t.TaskModel) - s.tasks.Store(t.ID, t) - tasks = append(tasks, t) + profileTypeList := req.RequstedProfilingTypes + for _, profilingType := range profileTypeList { + // profilingTypeMap checks the validation of requestedProfilingType. + _, valid := profilingTypeMap[profilingType] + if !valid { + return nil, ErrUnsupportedProfilingType.NewWithNoMessage() + } + + t := NewTask(ctx, taskGroup, target, s.fetchers, profilingType) + s.params.LocalStore.Create(t.TaskModel) + s.tasks.Store(t.ID, t) + tasks = append(tasks, t) + } } s.wg.Add(1) @@ -177,16 +189,20 @@ func (s *Service) startGroup(ctx context.Context, req *StartRequest) (*TaskGroup }(i) } wg.Wait() + errorTasks := 0 finishedTasks := 0 for _, task := range tasks { - if task.State == TaskStateFinish { + if task.State == TaskStateError { + errorTasks++ + } else if task.State == TaskStateFinish { finishedTasks++ } } - if finishedTasks == 0 { + if errorTasks > 0 { taskGroup.State = TaskStateError - } else if finishedTasks < len(tasks) { - taskGroup.State = TaskPartialFinish + if finishedTasks > 0 { + taskGroup.State = TaskStatePartialFinish + } } else { taskGroup.State = TaskStateFinish } diff --git a/ui/lib/apps/InstanceProfiling/pages/Detail.tsx b/ui/lib/apps/InstanceProfiling/pages/Detail.tsx index a552528a7c..58298c4121 100644 --- a/ui/lib/apps/InstanceProfiling/pages/Detail.tsx +++ b/ui/lib/apps/InstanceProfiling/pages/Detail.tsx @@ -1,34 +1,49 @@ -import { Badge, Button, Progress, Menu, Dropdown } from 'antd' +import { Badge, Button, Progress, Tooltip } from 'antd' import React, { useCallback, useMemo } from 'react' import { useTranslation } from 'react-i18next' +import { usePersistFn } from 'ahooks' import { Link } from 'react-router-dom' import { ArrowLeftOutlined } from '@ant-design/icons' -import { usePersistFn } from 'ahooks' -import client from '@lib/client' -import { CardTable, DateTime, Head, Descriptions } from '@lib/components' +import client, { ProfilingTaskModel } from '@lib/client' +import { + CardTable, + DateTime, + Head, + Descriptions, + ActionsButton, +} from '@lib/components' import { useClientRequestWithPolling } from '@lib/utils/useClientRequest' +import publicPathPrefix from '@lib/utils/publicPathPrefix' import { InstanceKindName } from '@lib/utils/instanceTable' import useQueryParams from '@lib/utils/useQueryParams' -import { ScrollablePane } from 'office-ui-fabric-react' +import { IGroup } from 'office-ui-fabric-react/lib/DetailsList' +import { ScrollablePane } from 'office-ui-fabric-react/lib/ScrollablePane' -enum ViewOption { +enum ViewOptions { FlameGraph = 'flamegraph', Graph = 'graph', Download = 'download', + Text = 'text', } enum taskState { Error, Running, Success, + Skipped = 4, } enum RawDataType { Protobuf = 'protobuf', + Text = 'text', } -function mapData(data, t) { +interface IRow { + kind: string +} + +function mapData(data) { if (!data) { return data } @@ -50,23 +65,26 @@ function mapData(data, t) { // set profiling output options for previous generated SVG files and protobuf files. if (task.raw_data_type === RawDataType.Protobuf) { task.view_options = [ - ViewOption.FlameGraph, - ViewOption.Graph, - ViewOption.Download, + ViewOptions.FlameGraph, + ViewOptions.Graph, + ViewOptions.Download, ] + } else if (task.raw_data_type === RawDataType.Text) { + task.view_options = [ViewOptions.Text] } else if (task.raw_data_type === '') { switch (task.target.kind) { case 'tidb': case 'pd': - task.view_options = [ViewOption.Graph] + task.view_options = [ViewOptions.Graph] break case 'tikv': case 'tiflash': - task.view_options = [ViewOption.FlameGraph] + task.view_options = [ViewOptions.FlameGraph] break } } }) + return data } @@ -87,14 +105,63 @@ async function getActionToken( return token } -function ViewResultButton({ rec, t }) { - const openResult = usePersistFn(async (openAs: string) => { +interface IRecord extends ProfilingTaskModel { + view_options: ViewOptions[] +} + +export default function Page() { + const { t } = useTranslation() + const { id } = useQueryParams() + + const { + data: respData, + isLoading, + error, + } = useClientRequestWithPolling( + (reqConfig) => client.getInstance().getProfilingGroupDetail(id, reqConfig), + { + shouldPoll: (data) => !isFinished(data), + } + ) + + const data = useMemo(() => mapData(respData), [respData]) + + const profileDuration = + respData?.task_group_status?.profile_duration_secs || 0 + + const [tableData, groupData] = useMemo(() => { + const newRows: IRow[] = [] + const newGroups: IGroup[] = [] + let startIndex = 0 + const tasks = data?.tasks_status ?? [] + for (const instanceKind of ['pd', 'tidb', 'tikv', 'tiflash']) { + tasks.forEach((task) => { + if (task.target.kind === instanceKind) { + newRows.push({ + ...task, + kind: InstanceKindName[instanceKind], + }) + } + }) + + newGroups.push({ + key: InstanceKindName[instanceKind], + name: InstanceKindName[instanceKind], + startIndex: startIndex, + count: newRows.length - startIndex, + }) + startIndex = newRows.length + } + return [newRows, newGroups] + }, [data]) + + const openResult = usePersistFn(async (openAs: string, rec: IRecord) => { const isProtobuf = rec.raw_data_type === RawDataType.Protobuf let token: string | undefined let profileURL: string switch (openAs) { - case ViewOption.Download: + case ViewOptions.Download: token = await getActionToken(rec.id, 'single_download') if (!token) { return @@ -102,16 +169,15 @@ function ViewResultButton({ rec, t }) { window.location.href = `${client.getBasePath()}/profiling/single/download?token=${token}` break - case ViewOption.FlameGraph: + case ViewOptions.FlameGraph: token = await getActionToken(rec.id, 'single_view') if (!token) { return } - profileURL = `${client.getBasePath()}/profiling/single/view?token=${token}` if (isProtobuf) { - const titleOnTab = rec.target.kind + '_' + rec.target.display_name - profileURL = `/dashboard/speedscope#profileURL=${encodeURIComponent( + const titleOnTab = rec.target?.kind + '_' + rec.target?.display_name + profileURL = `${publicPathPrefix}/speedscope#profileURL=${encodeURIComponent( // protobuf can be rendered to flamegraph by speedscope profileURL + `&output_type=protobuf` )}&title=${titleOnTab}` @@ -119,91 +185,19 @@ function ViewResultButton({ rec, t }) { window.open(`${profileURL}`, '_blank') break - case ViewOption.Graph: + case ViewOptions.Graph: + case ViewOptions.Text: token = await getActionToken(rec.id, 'single_view') if (!token) { return } - - profileURL = - profileURL = `${client.getBasePath()}/profiling/single/view?token=${token}&output_type=${ - ViewOption.Graph - }` + profileURL = `${client.getBasePath()}/profiling/single/view?token=${token}&output_type=${openAs}` window.open(`${profileURL}`, '_blank') + break } }) - const menu = () => { - return ( -
- ) - } - - const DropdownButton = () => { - return ( -