diff --git a/.travis.yml b/.travis.yml index 87e120ab..25791ba0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,7 +2,7 @@ sudo: required dist: trusty language: go go: -- 1.9 +- "1.10" python: - 3.6 @@ -22,8 +22,7 @@ env: - PATH=/tmp/fission-workflow-ci/bin:${PATH} - BIN_DIR=/tmp/fission-workflow-ci/bin - FISSION_VERSION=0.10.0 - - HELM_VERSION=2.8.2 - - KUBECTL_VERSION=1.9.6 + - HELM_VERSION=2.11.0 services: - docker @@ -45,6 +44,7 @@ before_script: # Static code analysis - hack/verify-gofmt.sh - hack/verify-govet.sh +- hack/verify-misc.sh - helm lint charts/fission-workflows # Build - glide install -v diff --git a/build/Dockerfile b/build/Dockerfile index fc530a23..55eacd42 100644 --- a/build/Dockerfile +++ b/build/Dockerfile @@ -1,5 +1,5 @@ # To run (from repo root): docker build -t fission -f ./build/Dockerfile . -ARG GOLANG_VERSION=1.10.0 +ARG GOLANG_VERSION=1.11.0 FROM golang:$GOLANG_VERSION AS builder ARG NOBUILD diff --git a/build/build.sh b/build/build.sh index 74678cc6..58a00d2b 100755 --- a/build/build.sh +++ b/build/build.sh @@ -75,7 +75,7 @@ echo "------------------------------" # Build client CGO_ENABLED=0 GOOS=${goos} GOARCH=${goarch} go build \ -gcflags=-trimpath=${GOPATH} -asmflags=-trimpath=${GOPATH}\ - -ldflags '-X "${versionPath}.BuildDate=${date}"'\ + -ldflags '-X "${versionPath}.buildDate=${date}"'\ -o ${output_cli}\ github.com/fission/fission-workflows/cmd/fission-workflows/ echo "$(pwd)/${output_cli}" @@ -83,7 +83,7 @@ echo "$(pwd)/${output_cli}" # Build bundle CGO_ENABLED=0 GOOS=${goos} GOARCH=${goarch} go build\ -gcflags=-trimpath=${GOPATH} -asmflags=-trimpath=${GOPATH}\ - -ldflags '-X "${versionPath}.BuildDate=${date}"'\ + -ldflags '-X "${versionPath}.buildDate=${date}"'\ -o ${output_bundle}\ github.com/fission/fission-workflows/cmd/fission-workflows-bundle/ echo "$(pwd)/${output_bundle}" \ No newline at end of file diff --git a/build/docker.sh b/build/docker.sh index 4a4bd2b2..68392ac1 100755 --- a/build/docker.sh +++ b/build/docker.sh @@ -14,6 +14,7 @@ NOBUILD=${3:-false} bundleImage=${IMAGE_REPO}/fission-workflows-bundle pushd ${BUILD_ROOT}/.. if $NOBUILD ; then + echo "Using pre-build binaries..." if [ ! -f ./fission-workflows-bundle ]; then echo "Executable './fission-workflows-bundle' not found!" exit 1; @@ -24,6 +25,7 @@ if $NOBUILD ; then exit 1; fi fi + echo "Building bundle..." docker build --tag="${bundleImage}:${IMAGE_TAG}" -f ${BUILD_ROOT}/Dockerfile \ --no-cache \ diff --git a/charts/fission-workflows/templates/deployment.yaml b/charts/fission-workflows/templates/deployment.yaml index 6cc68187..d62ef677 100644 --- a/charts/fission-workflows/templates/deployment.yaml +++ b/charts/fission-workflows/templates/deployment.yaml @@ -111,6 +111,7 @@ spec: "--debug", {{- end }} ] + imagePullPolicy: {{ .Values.pullPolicy }} env: - name: ES_NATS_URL value: "nats://{{ .Values.nats.authToken }}@{{ .Values.nats.location }}.{{ .Values.fnenv.fission.ns }}:{{ .Values.nats.port }}" @@ -123,4 +124,8 @@ spec: builder: image: "{{ .Values.buildEnvImage }}:{{.Values.tag}}" command: "defaultBuild" + container: + imagePullPolicy: {{ .Values.pullPolicy }} + + allowedFunctionsPerContainer: infinite diff --git a/cmd/fission-workflows-bundle/bundle/bundle.go b/cmd/fission-workflows-bundle/bundle/bundle.go index 6dab81b6..ca37e9c8 100644 --- a/cmd/fission-workflows-bundle/bundle/bundle.go +++ b/cmd/fission-workflows-bundle/bundle/bundle.go @@ -14,6 +14,7 @@ import ( "github.com/fission/fission-workflows/pkg/api/aggregates" "github.com/fission/fission-workflows/pkg/api/store" "github.com/fission/fission-workflows/pkg/apiserver" + fissionproxy "github.com/fission/fission-workflows/pkg/apiserver/fission" "github.com/fission/fission-workflows/pkg/controller" "github.com/fission/fission-workflows/pkg/controller/expr" wfictr "github.com/fission/fission-workflows/pkg/controller/invocation" @@ -187,8 +188,8 @@ func Run(ctx context.Context, opts *Options) error { } // Caches - wfiCache := getInvocationStore(app, esPub, eventStore) - wfCache := getWorkflowStore(app, esPub, eventStore) + invocationStore := getInvocationStore(app, esPub, eventStore) + workflowStore := getWorkflowStore(app, esPub, eventStore) // // Function Runtimes @@ -199,7 +200,7 @@ func Run(ctx context.Context, opts *Options) error { if opts.InternalRuntime || opts.Fission != nil { log.Infof("Using Task Runtime: Workflow") - reflectiveRuntime := workflows.NewRuntime(invocationAPI, wfiCache()) + reflectiveRuntime := workflows.NewRuntime(invocationAPI, invocationStore) runtimes[workflows.Name] = reflectiveRuntime } else { log.Info("No function runtimes specified.") @@ -228,12 +229,12 @@ func Run(ctx context.Context, opts *Options) error { var ctrls []controller.Controller if opts.WorkflowController { log.Info("Using controller: workflow") - ctrls = append(ctrls, setupWorkflowController(wfCache(), es, resolvers)) + ctrls = append(ctrls, setupWorkflowController(workflowStore, es, resolvers)) } if opts.InvocationController { log.Info("Using controller: invocation") - ctrls = append(ctrls, setupInvocationController(wfiCache(), wfCache(), es, runtimes, resolvers)) + ctrls = append(ctrls, setupInvocationController(invocationStore, workflowStore, es, runtimes, resolvers)) } ctrl := controller.NewMetaController(ctrls...) @@ -255,7 +256,7 @@ func Run(ctx context.Context, opts *Options) error { // if opts.Fission != nil { proxyMux := http.NewServeMux() - runFissionEnvironmentProxy(proxyMux, es, wfiCache(), wfCache(), resolvers) + runFissionEnvironmentProxy(proxyMux, es, invocationStore, workflowStore, resolvers) fissionProxySrv := &http.Server{Addr: fissionProxyAddress} fissionProxySrv.Handler = handlers.LoggingHandler(os.Stdout, proxyMux) @@ -286,11 +287,11 @@ func Run(ctx context.Context, opts *Options) error { } if opts.WorkflowAPI { - serveWorkflowAPI(grpcServer, es, resolvers, wfCache()) + serveWorkflowAPI(grpcServer, es, resolvers, workflowStore) } if opts.InvocationAPI { - serveInvocationAPI(grpcServer, es, wfiCache()) + serveInvocationAPI(grpcServer, es, invocationStore) } if opts.AdminAPI || opts.WorkflowAPI || opts.InvocationAPI { @@ -363,25 +364,14 @@ func Run(ctx context.Context, opts *Options) error { return nil } -func getWorkflowStore(app *App, eventPub pubsub.Publisher, backend fes.Backend) func() *store.Workflows { - var workflows *store.Workflows - return func() *store.Workflows { - if workflows == nil { - workflows = store.NewWorkflowsStore(setupWorkflowCache(app, eventPub, backend)) - } - return workflows - } +func getWorkflowStore(app *App, eventPub pubsub.Publisher, backend fes.Backend) *store.Workflows { + c := setupWorkflowCache(app, eventPub, backend) + return store.NewWorkflowsStore(c) } -func getInvocationStore(app *App, eventPub pubsub.Publisher, backend fes.Backend) func() *store.Invocations { - var invocations *store.Invocations - return func() *store.Invocations { - if invocations == nil { - invocations = store.NewInvocationStore(setupWorkflowInvocationCache(app, eventPub, backend)) - } - return invocations - - } +func getInvocationStore(app *App, eventPub pubsub.Publisher, backend fes.Backend) *store.Invocations { + c := setupWorkflowInvocationCache(app, eventPub, backend) + return store.NewInvocationStore(c) } func setupInternalFunctionRuntime() *native.FunctionEnv { @@ -523,7 +513,7 @@ func runFissionEnvironmentProxy(proxyMux *http.ServeMux, es fes.Backend, invocat wfServer := apiserver.NewWorkflow(workflowAPI, workflows) wfiAPI := api.NewInvocationAPI(es) wfiServer := apiserver.NewInvocation(wfiAPI, invocations) - fissionProxyServer := fission.NewFissionProxyServer(wfiServer, wfServer) + fissionProxyServer := fissionproxy.NewEnvironmentProxyServer(wfiServer, wfServer) fissionProxyServer.RegisterServer(proxyMux) } diff --git a/cmd/fission-workflows/invocation.go b/cmd/fission-workflows/invocation.go index bdc64c4b..c868b18a 100644 --- a/cmd/fission-workflows/invocation.go +++ b/cmd/fission-workflows/invocation.go @@ -11,6 +11,7 @@ import ( "github.com/fission/fission-workflows/pkg/apiserver/httpclient" "github.com/fission/fission-workflows/pkg/parse/yaml" "github.com/fission/fission-workflows/pkg/types" + "github.com/fission/fission-workflows/pkg/types/typedvalues" "github.com/golang/protobuf/ptypes" "github.com/urfave/cli" ) @@ -104,7 +105,7 @@ var cmdInvocation = cli.Command{ wfID := ctx.Args().Get(0) spec := &types.WorkflowInvocationSpec{ WorkflowId: wfID, - Inputs: map[string]*types.TypedValue{}, + Inputs: map[string]*typedvalues.TypedValue{}, } if ctx.Bool("sync") { resp, err := client.Invocation.InvokeSync(ctx, spec) diff --git a/compiling.md b/compiling.md index 5e1fcdb8..001f0d83 100644 --- a/compiling.md +++ b/compiling.md @@ -19,7 +19,7 @@ environment meets all prerequisite requirements, and checkout the repo from gith # Install dependencies glide install -v -# Build the artifacts: wfcli, fission-workflows-bundle +# Build the artifacts: client (fission-workflows) and server (fission-workflows-bundle) build/build-linux.sh # Build the docker images (the NOBUILD parameter indicates that Docker should use the artifacts (wfci, diff --git a/glide.lock b/glide.lock index 275e49d3..04073ccd 100644 --- a/glide.lock +++ b/glide.lock @@ -1,5 +1,5 @@ -hash: 8625631121e3da0aa6118e13145d87b9efb1659cb820d24b2c2f781af212556a -updated: 2018-09-20T14:16:47.976902+02:00 +hash: 49cadf0f85ef216b6c1a32a1a827aba613f185dd86cbb6ffeaf1c836d2f09bee +updated: 2018-10-01T11:47:42.068117+02:00 imports: - name: cloud.google.com/go version: 3b1ae45394a234c385be014e9a488f2bb6eef821 @@ -61,17 +61,11 @@ imports: version: c0656edd0d9eab7c66d1eb0c568f9039345796f7 subpackages: - gogoproto - - jsonpb - proto - protoc-gen-gogo/descriptor - sortkeys - - types - name: github.com/golang/glog version: 44145f04b68cf362d9c4df2182967c2275eaefed -- name: github.com/golang/groupcache - version: 24b0969c4cb722950103eed87108c8d291a8df00 - subpackages: - - lru - name: github.com/golang/protobuf version: aa810b61a9c79d51363740d207bb46cf8e620ed5 subpackages: @@ -138,7 +132,7 @@ imports: - name: github.com/howeyc/gopass version: bf9dde6d0d2c004a008c27aaee91170c786f6db8 - name: github.com/imdario/mergo - version: 33882c6bfe701aca0ff1472aa8b4ebd6135a560d + version: 9f23e2d6bd2a77f959b2bf6acdbefd708a83a4a4 - name: github.com/json-iterator/go version: f2b4162afba35581b6d4a50d3b8f34e33c144682 - name: github.com/matttproud/golang_protobuf_extensions @@ -172,6 +166,10 @@ imports: - log - name: github.com/pkg/errors version: 645ef00459ed84a119197bfb8d8205042c6df63d +- name: github.com/pmezard/go-difflib + version: d8ed2627bdf02c080bf22230dbb337003b7aba2d + subpackages: + - difflib - name: github.com/prometheus/client_golang version: c5b7fccd204277076155f10851dad72b76a49317 subpackages: @@ -209,6 +207,13 @@ imports: version: c155da19408a8799da419ed3eeb0cb5db0ad5dbc - name: github.com/spf13/pflag version: 583c0c0531f06d5278b7d917446061adc344b5cd +- name: github.com/stretchr/objx + version: cbeaeb16a013161a98496fad62933b1d21786672 +- name: github.com/stretchr/testify + version: f35b8ab0b5a2cef36673838d662e249dd9c94686 + subpackages: + - assert + - mock - name: github.com/uber/jaeger-client-go version: b043381d944715b469fd6b37addfd30145ca1758 subpackages: @@ -561,13 +566,5 @@ testImports: - docker/types/registry - docker/types/strslice - docker/types/versions -- name: github.com/pmezard/go-difflib - version: d8ed2627bdf02c080bf22230dbb337003b7aba2d - subpackages: - - difflib -- name: github.com/stretchr/testify - version: 69483b4bd14f5845b5a1e55bca19e954e827f1d0 - subpackages: - - assert - name: gopkg.in/ory-am/dockertest.v3 version: 9bca068bf5e4af2484b9c2e8cfeb3d098d5327d7 diff --git a/glide.yaml b/glide.yaml index 41208fa3..974b10ff 100644 --- a/glide.yaml +++ b/glide.yaml @@ -78,10 +78,12 @@ import: - go/otgrpc - package: github.com/hashicorp/golang-lru version: v0.5.0 -testImport: - package: github.com/stretchr/testify - version: 1.1.4 - subpackages: - - assert + version: v1.2.2 +- package: github.com/imdario/mergo + version: v0.3.6 +testImport: - package: gopkg.in/ory-am/dockertest.v3 version: v3.3.1 +- package: github.com/stretchr/testify + version: v1.2.2 diff --git a/hack/verify-misc.sh b/hack/verify-misc.sh new file mode 100755 index 00000000..0c9b3659 --- /dev/null +++ b/hack/verify-misc.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +# Miscellaneous static code checks + +# check - runs the command and adds a header with the status (OK or FAIL) to the output +check() { + msg=$1 + shift + cmd=$@ + printf "[check] ${msg}..." + output="" + if output=$(bash -c "${cmd}") ; then + printf "OK\n" + else + printf "FAIL\n" + if [ ! -z "${output}" ] ; then + echo ${output} + fi + return 1 + fi +} + +# Check if we don't accidentally use the gogo protobuf implementation, instead of the golang protobuf implementation. +check "no use of gogo-protobuf" ! grep -R 'github.com/gogo/protobuf' pkg/ cmd/ \ No newline at end of file diff --git a/pkg/api/aggregates/workflow.go b/pkg/api/aggregates/workflow.go index 9b8f807e..73c2d1cd 100644 --- a/pkg/api/aggregates/workflow.go +++ b/pkg/api/aggregates/workflow.go @@ -49,14 +49,15 @@ func (wf *Workflow) ApplyEvent(event *fes.Event) error { switch m := eventData.(type) { case *events.WorkflowCreated: - // Setup object + spec := m.GetSpec() wf.BaseEntity = fes.NewBaseEntity(wf, *event.Aggregate) wf.Workflow = &types.Workflow{ Metadata: &types.ObjectMetadata{ Id: wf.Aggregate().Id, + Name: spec.GetName(), CreatedAt: event.GetTimestamp(), }, - Spec: m.GetSpec(), + Spec: spec, Status: &types.WorkflowStatus{ Status: types.WorkflowStatus_PENDING, }, diff --git a/pkg/api/dynamic.go b/pkg/api/dynamic.go index a415393f..5f6ad799 100644 --- a/pkg/api/dynamic.go +++ b/pkg/api/dynamic.go @@ -3,6 +3,7 @@ package api import ( "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" "github.com/fission/fission-workflows/pkg/types/validate" "github.com/golang/protobuf/proto" ) @@ -23,15 +24,15 @@ func NewDynamicApi(wfAPI *Workflow, wfiAPI *Invocation) *Dynamic { // AddDynamicFlow inserts the flow as a 'dynamic task' into the workflow invocation with id invocationID as the child // of the parent task. -func (ap *Dynamic) AddDynamicFlow(invocationID string, parentTaskID string, flow typedvalues.Flow) error { +func (ap *Dynamic) AddDynamicFlow(invocationID string, parentTaskID string, flow controlflow.Flow) error { if err := validate.Flow(flow); err != nil { return err } switch flow.Type() { - case typedvalues.Workflow: - return ap.addDynamicWorkflow(invocationID, parentTaskID, flow.Workflow(), &types.TaskSpec{}) - case typedvalues.Task: - return ap.addDynamicTask(invocationID, parentTaskID, flow.Task()) + case controlflow.FlowTypeWorkflow: + return ap.addDynamicWorkflow(invocationID, parentTaskID, flow.GetWorkflow(), &types.TaskSpec{}) + case controlflow.FlowTypeTask: + return ap.addDynamicTask(invocationID, parentTaskID, flow.GetTask()) default: panic("validated flow was still empty") } @@ -72,7 +73,7 @@ func (ap *Dynamic) addDynamicWorkflow(invocationID string, parentTaskID string, // Generate Proxy Task proxyTaskSpec := proto.Clone(stubTask).(*types.TaskSpec) proxyTaskSpec.FunctionRef = wfRef.Format() - proxyTaskSpec.Input(types.InputParent, typedvalues.ParseString(invocationID)) + proxyTaskSpec.Input(types.InputParent, typedvalues.MustWrap(invocationID)) proxyTaskID := parentTaskID + "_child" proxyTask := types.NewTask(proxyTaskID, proxyTaskSpec.FunctionRef) proxyTask.Spec = proxyTaskSpec diff --git a/pkg/api/events/events.pb.go b/pkg/api/events/events.pb.go index 8aef9028..2c26f4f2 100644 --- a/pkg/api/events/events.pb.go +++ b/pkg/api/events/events.pb.go @@ -27,7 +27,8 @@ package events import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" -import fission_workflows_types "github.com/fission/fission-workflows/pkg/types" +import fission_workflows_types1 "github.com/fission/fission-workflows/pkg/types" +import fission_workflows_types "github.com/fission/fission-workflows/pkg/types/typedvalues" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -41,7 +42,7 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type WorkflowCreated struct { - Spec *fission_workflows_types.WorkflowSpec `protobuf:"bytes,1,opt,name=spec" json:"spec,omitempty"` + Spec *fission_workflows_types1.WorkflowSpec `protobuf:"bytes,1,opt,name=spec" json:"spec,omitempty"` } func (m *WorkflowCreated) Reset() { *m = WorkflowCreated{} } @@ -49,7 +50,7 @@ func (m *WorkflowCreated) String() string { return proto.CompactTextS func (*WorkflowCreated) ProtoMessage() {} func (*WorkflowCreated) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (m *WorkflowCreated) GetSpec() *fission_workflows_types.WorkflowSpec { +func (m *WorkflowCreated) GetSpec() *fission_workflows_types1.WorkflowSpec { if m != nil { return m.Spec } @@ -65,7 +66,7 @@ func (*WorkflowDeleted) ProtoMessage() {} func (*WorkflowDeleted) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } type WorkflowParsed struct { - Tasks map[string]*fission_workflows_types.TaskStatus `protobuf:"bytes,1,rep,name=tasks" json:"tasks,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Tasks map[string]*fission_workflows_types1.TaskStatus `protobuf:"bytes,1,rep,name=tasks" json:"tasks,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` } func (m *WorkflowParsed) Reset() { *m = WorkflowParsed{} } @@ -73,7 +74,7 @@ func (m *WorkflowParsed) String() string { return proto.CompactTextSt func (*WorkflowParsed) ProtoMessage() {} func (*WorkflowParsed) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } -func (m *WorkflowParsed) GetTasks() map[string]*fission_workflows_types.TaskStatus { +func (m *WorkflowParsed) GetTasks() map[string]*fission_workflows_types1.TaskStatus { if m != nil { return m.Tasks } @@ -81,7 +82,7 @@ func (m *WorkflowParsed) GetTasks() map[string]*fission_workflows_types.TaskStat } type WorkflowParsingFailed struct { - Error *fission_workflows_types.Error `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` + Error *fission_workflows_types1.Error `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` } func (m *WorkflowParsingFailed) Reset() { *m = WorkflowParsingFailed{} } @@ -89,7 +90,7 @@ func (m *WorkflowParsingFailed) String() string { return proto.Compac func (*WorkflowParsingFailed) ProtoMessage() {} func (*WorkflowParsingFailed) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } -func (m *WorkflowParsingFailed) GetError() *fission_workflows_types.Error { +func (m *WorkflowParsingFailed) GetError() *fission_workflows_types1.Error { if m != nil { return m.Error } @@ -97,7 +98,7 @@ func (m *WorkflowParsingFailed) GetError() *fission_workflows_types.Error { } type InvocationCreated struct { - Spec *fission_workflows_types.WorkflowInvocationSpec `protobuf:"bytes,1,opt,name=spec" json:"spec,omitempty"` + Spec *fission_workflows_types1.WorkflowInvocationSpec `protobuf:"bytes,1,opt,name=spec" json:"spec,omitempty"` } func (m *InvocationCreated) Reset() { *m = InvocationCreated{} } @@ -105,7 +106,7 @@ func (m *InvocationCreated) String() string { return proto.CompactTex func (*InvocationCreated) ProtoMessage() {} func (*InvocationCreated) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } -func (m *InvocationCreated) GetSpec() *fission_workflows_types.WorkflowInvocationSpec { +func (m *InvocationCreated) GetSpec() *fission_workflows_types1.WorkflowInvocationSpec { if m != nil { return m.Spec } @@ -129,7 +130,7 @@ func (m *InvocationCompleted) GetOutput() *fission_workflows_types.TypedValue { } type InvocationCanceled struct { - Error *fission_workflows_types.Error `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` + Error *fission_workflows_types1.Error `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` } func (m *InvocationCanceled) Reset() { *m = InvocationCanceled{} } @@ -137,7 +138,7 @@ func (m *InvocationCanceled) String() string { return proto.CompactTe func (*InvocationCanceled) ProtoMessage() {} func (*InvocationCanceled) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } -func (m *InvocationCanceled) GetError() *fission_workflows_types.Error { +func (m *InvocationCanceled) GetError() *fission_workflows_types1.Error { if m != nil { return m.Error } @@ -145,7 +146,7 @@ func (m *InvocationCanceled) GetError() *fission_workflows_types.Error { } type InvocationTaskAdded struct { - Task *fission_workflows_types.Task `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` + Task *fission_workflows_types1.Task `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` } func (m *InvocationTaskAdded) Reset() { *m = InvocationTaskAdded{} } @@ -153,7 +154,7 @@ func (m *InvocationTaskAdded) String() string { return proto.CompactT func (*InvocationTaskAdded) ProtoMessage() {} func (*InvocationTaskAdded) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } -func (m *InvocationTaskAdded) GetTask() *fission_workflows_types.Task { +func (m *InvocationTaskAdded) GetTask() *fission_workflows_types1.Task { if m != nil { return m.Task } @@ -161,7 +162,7 @@ func (m *InvocationTaskAdded) GetTask() *fission_workflows_types.Task { } type InvocationFailed struct { - Error *fission_workflows_types.Error `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` + Error *fission_workflows_types1.Error `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` } func (m *InvocationFailed) Reset() { *m = InvocationFailed{} } @@ -169,7 +170,7 @@ func (m *InvocationFailed) String() string { return proto.CompactText func (*InvocationFailed) ProtoMessage() {} func (*InvocationFailed) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } -func (m *InvocationFailed) GetError() *fission_workflows_types.Error { +func (m *InvocationFailed) GetError() *fission_workflows_types1.Error { if m != nil { return m.Error } @@ -181,7 +182,7 @@ func (m *InvocationFailed) GetError() *fission_workflows_types.Error { // // TODO why do we need task, and not just task spec. type TaskStarted struct { - Spec *fission_workflows_types.TaskInvocationSpec `protobuf:"bytes,1,opt,name=spec" json:"spec,omitempty"` + Spec *fission_workflows_types1.TaskInvocationSpec `protobuf:"bytes,1,opt,name=spec" json:"spec,omitempty"` } func (m *TaskStarted) Reset() { *m = TaskStarted{} } @@ -189,7 +190,7 @@ func (m *TaskStarted) String() string { return proto.CompactTextStrin func (*TaskStarted) ProtoMessage() {} func (*TaskStarted) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } -func (m *TaskStarted) GetSpec() *fission_workflows_types.TaskInvocationSpec { +func (m *TaskStarted) GetSpec() *fission_workflows_types1.TaskInvocationSpec { if m != nil { return m.Spec } @@ -197,7 +198,7 @@ func (m *TaskStarted) GetSpec() *fission_workflows_types.TaskInvocationSpec { } type TaskSucceeded struct { - Result *fission_workflows_types.TaskInvocationStatus `protobuf:"bytes,1,opt,name=result" json:"result,omitempty"` + Result *fission_workflows_types1.TaskInvocationStatus `protobuf:"bytes,1,opt,name=result" json:"result,omitempty"` } func (m *TaskSucceeded) Reset() { *m = TaskSucceeded{} } @@ -205,7 +206,7 @@ func (m *TaskSucceeded) String() string { return proto.CompactTextStr func (*TaskSucceeded) ProtoMessage() {} func (*TaskSucceeded) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } -func (m *TaskSucceeded) GetResult() *fission_workflows_types.TaskInvocationStatus { +func (m *TaskSucceeded) GetResult() *fission_workflows_types1.TaskInvocationStatus { if m != nil { return m.Result } @@ -221,7 +222,7 @@ func (*TaskSkipped) ProtoMessage() {} func (*TaskSkipped) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } type TaskFailed struct { - Error *fission_workflows_types.Error `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` + Error *fission_workflows_types1.Error `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` } func (m *TaskFailed) Reset() { *m = TaskFailed{} } @@ -229,7 +230,7 @@ func (m *TaskFailed) String() string { return proto.CompactTextString func (*TaskFailed) ProtoMessage() {} func (*TaskFailed) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } -func (m *TaskFailed) GetError() *fission_workflows_types.Error { +func (m *TaskFailed) GetError() *fission_workflows_types1.Error { if m != nil { return m.Error } @@ -255,34 +256,35 @@ func init() { func init() { proto.RegisterFile("pkg/api/events/events.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ - // 457 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x94, 0xe1, 0x6b, 0xd4, 0x30, - 0x18, 0xc6, 0xe9, 0xb6, 0x3b, 0xf4, 0x3d, 0xa6, 0x5b, 0x44, 0x38, 0x26, 0xca, 0x88, 0x08, 0x03, - 0x59, 0x8b, 0x9b, 0x1f, 0xdc, 0xfc, 0x20, 0x6e, 0x9e, 0x6c, 0xa2, 0x22, 0x9d, 0x4c, 0x11, 0xfc, - 0x90, 0x35, 0xef, 0xce, 0xd0, 0xae, 0x09, 0x49, 0x7a, 0xe3, 0xfe, 0x35, 0xff, 0x3a, 0x49, 0x93, - 0xda, 0x1e, 0x7a, 0xa7, 0xec, 0xbe, 0x34, 0x25, 0x7d, 0x9f, 0x5f, 0xdf, 0xe7, 0x79, 0xd3, 0xc2, - 0x03, 0x95, 0x8f, 0x13, 0xa6, 0x44, 0x82, 0x13, 0x2c, 0xad, 0x09, 0x4b, 0xac, 0xb4, 0xb4, 0x92, - 0x0c, 0x2f, 0x85, 0x31, 0x42, 0x96, 0xf1, 0xb5, 0xd4, 0xf9, 0x65, 0x21, 0xaf, 0x4d, 0xec, 0x9f, - 0x6f, 0x1d, 0x8e, 0x85, 0xfd, 0x51, 0x5d, 0xc4, 0x99, 0xbc, 0x4a, 0x42, 0x51, 0xb3, 0xee, 0xfe, - 0x2e, 0x4e, 0x1c, 0xdb, 0x4e, 0x15, 0x1a, 0x7f, 0xf5, 0x54, 0xfa, 0x1e, 0xee, 0x7e, 0x09, 0x25, - 0xc7, 0x1a, 0x99, 0x45, 0x4e, 0x0e, 0x60, 0xcd, 0x28, 0xcc, 0x86, 0xd1, 0x76, 0xb4, 0x33, 0xd8, - 0x7b, 0x12, 0xff, 0xf9, 0x5e, 0x0f, 0x68, 0x74, 0x67, 0x0a, 0xb3, 0xb4, 0x96, 0xd0, 0xcd, 0x96, - 0xf6, 0x06, 0x0b, 0xb4, 0xc8, 0xe9, 0xcf, 0x08, 0xee, 0x34, 0x7b, 0x9f, 0x98, 0x36, 0xc8, 0xc9, - 0x29, 0xf4, 0x2c, 0x33, 0xb9, 0x19, 0x46, 0xdb, 0xab, 0x3b, 0x83, 0xbd, 0xfd, 0x78, 0x9e, 0xb3, - 0x78, 0x56, 0x18, 0x7f, 0x76, 0xaa, 0x51, 0x69, 0xf5, 0x34, 0xf5, 0x84, 0xad, 0xef, 0x00, 0xed, - 0x26, 0xd9, 0x80, 0xd5, 0x1c, 0xa7, 0x75, 0xe3, 0xb7, 0x53, 0x77, 0x4b, 0x0e, 0xa0, 0x37, 0x61, - 0x45, 0x85, 0xc3, 0x95, 0xda, 0xcc, 0xe3, 0xb9, 0x66, 0x1c, 0xe5, 0xcc, 0x32, 0x5b, 0x99, 0xd4, - 0x2b, 0x0e, 0x57, 0x5e, 0x44, 0xf4, 0x03, 0xdc, 0xef, 0xb6, 0x20, 0xca, 0xf1, 0x5b, 0x26, 0x0a, - 0xe4, 0xe4, 0x39, 0xf4, 0x50, 0x6b, 0xa9, 0x43, 0x48, 0x8f, 0xe6, 0x72, 0x47, 0xae, 0x2a, 0xf5, - 0xc5, 0xf4, 0x2b, 0x6c, 0x9e, 0x96, 0x13, 0x99, 0x31, 0x2b, 0x64, 0xd9, 0xc4, 0x7d, 0x3c, 0x13, - 0x77, 0xf2, 0xcf, 0xb8, 0x5b, 0x42, 0x27, 0xf8, 0x14, 0xee, 0x75, 0xc8, 0xf2, 0x4a, 0xd5, 0xe1, - 0x93, 0x97, 0xd0, 0x97, 0x95, 0x55, 0x95, 0x0d, 0xf4, 0x05, 0xfe, 0xa7, 0x0a, 0xf9, 0xb9, 0x33, - 0x9e, 0x06, 0x09, 0x7d, 0x07, 0xa4, 0xc3, 0x64, 0x65, 0x86, 0x37, 0x77, 0x7e, 0xd2, 0xed, 0xcf, - 0x65, 0xfd, 0x9a, 0x73, 0xe4, 0xe4, 0x19, 0xac, 0xb9, 0x39, 0x06, 0xd6, 0xc3, 0x85, 0xd3, 0x49, - 0xeb, 0x52, 0x7a, 0x02, 0x1b, 0x2d, 0x69, 0xa9, 0x69, 0x7c, 0x84, 0x41, 0x98, 0xba, 0x76, 0x59, - 0xbd, 0x9a, 0x99, 0xc3, 0xd3, 0x85, 0xbd, 0xfc, 0x75, 0x06, 0xe7, 0xb0, 0x5e, 0xf3, 0xaa, 0x2c, - 0x43, 0x74, 0xee, 0x46, 0xd0, 0xd7, 0x68, 0xaa, 0xa2, 0x49, 0x7f, 0xf7, 0x7f, 0x99, 0xfe, 0x1c, - 0x06, 0x31, 0x5d, 0x0f, 0x7d, 0xe6, 0x42, 0x29, 0xe4, 0xf4, 0xc8, 0x1f, 0xf9, 0x65, 0xac, 0x1f, - 0xdd, 0xfa, 0xd6, 0xf7, 0x5f, 0xd8, 0x45, 0xbf, 0xfe, 0x0d, 0xec, 0xff, 0x0a, 0x00, 0x00, 0xff, - 0xff, 0xcd, 0x47, 0x78, 0xe5, 0x7b, 0x04, 0x00, 0x00, + // 467 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x94, 0x5f, 0x6b, 0xd4, 0x40, + 0x14, 0xc5, 0x49, 0xdb, 0x5d, 0xf4, 0x2e, 0xd5, 0x76, 0x44, 0x58, 0x56, 0x94, 0x32, 0x22, 0x14, + 0xa4, 0x09, 0xb6, 0x3e, 0xd8, 0xfa, 0x20, 0xb6, 0xae, 0xb4, 0x52, 0x45, 0x52, 0xa9, 0x22, 0xf8, + 0x30, 0xcd, 0xdc, 0xae, 0x21, 0x69, 0x66, 0x98, 0x99, 0x6c, 0xd9, 0xaf, 0xe6, 0xa7, 0x93, 0xf9, + 0x13, 0x37, 0x8b, 0xee, 0x5a, 0xba, 0x2f, 0x99, 0x61, 0x72, 0xcf, 0x8f, 0x7b, 0xce, 0x9d, 0x04, + 0x1e, 0xc9, 0x62, 0x94, 0x30, 0x99, 0x27, 0x38, 0xc6, 0xca, 0xe8, 0xb0, 0xc4, 0x52, 0x09, 0x23, + 0x48, 0xff, 0x32, 0xd7, 0x3a, 0x17, 0x55, 0x7c, 0x2d, 0x54, 0x71, 0x59, 0x8a, 0x6b, 0x1d, 0xfb, + 0xf7, 0x83, 0x83, 0x51, 0x6e, 0x7e, 0xd6, 0x17, 0x71, 0x26, 0xae, 0x92, 0x50, 0xd4, 0xac, 0x3b, + 0x7f, 0x8a, 0x13, 0xcb, 0x36, 0x13, 0x89, 0xda, 0x3f, 0x3d, 0x75, 0x70, 0x7a, 0x0b, 0x2d, 0x1f, + 0xb3, 0xb2, 0x9e, 0xdd, 0x7b, 0x1a, 0x3d, 0x85, 0xfb, 0x5f, 0x83, 0xe8, 0x48, 0x21, 0x33, 0xc8, + 0xc9, 0x3e, 0xac, 0x69, 0x89, 0x59, 0x3f, 0xda, 0x8a, 0xb6, 0x7b, 0xbb, 0xcf, 0xe2, 0xbf, 0x5d, + 0xf8, 0x76, 0x1a, 0xdd, 0x99, 0xc4, 0x2c, 0x75, 0x12, 0xba, 0x39, 0xa5, 0xbd, 0xc3, 0x12, 0x0d, + 0x72, 0xfa, 0x2b, 0x82, 0x7b, 0xcd, 0xd9, 0x67, 0xa6, 0x34, 0x72, 0x72, 0x02, 0x1d, 0xc3, 0x74, + 0xa1, 0xfb, 0xd1, 0xd6, 0xea, 0x76, 0x6f, 0x77, 0x2f, 0x9e, 0x97, 0x53, 0x3c, 0x2b, 0x8c, 0xbf, + 0x58, 0xd5, 0xb0, 0x32, 0x6a, 0x92, 0x7a, 0xc2, 0xe0, 0x07, 0xc0, 0xf4, 0x90, 0x6c, 0xc0, 0x6a, + 0x81, 0x13, 0xd7, 0xf8, 0xdd, 0xd4, 0x6e, 0xc9, 0x3e, 0x74, 0x9c, 0xdd, 0xfe, 0x8a, 0x33, 0xf3, + 0x74, 0xae, 0x19, 0x4b, 0x39, 0x33, 0xcc, 0xd4, 0x3a, 0xf5, 0x8a, 0x83, 0x95, 0x57, 0x11, 0xfd, + 0x08, 0x0f, 0xdb, 0x2d, 0xe4, 0xd5, 0xe8, 0x3d, 0xcb, 0x4b, 0xe4, 0xe4, 0x25, 0x74, 0x50, 0x29, + 0xa1, 0x42, 0x48, 0x4f, 0xe6, 0x72, 0x87, 0xb6, 0x2a, 0xf5, 0xc5, 0xf4, 0x1b, 0x6c, 0x9e, 0x54, + 0x63, 0x91, 0x31, 0x93, 0x8b, 0xaa, 0x89, 0xfb, 0x68, 0x26, 0xee, 0xe4, 0xbf, 0x71, 0x4f, 0x09, + 0xad, 0xe0, 0x53, 0x78, 0xd0, 0x22, 0x8b, 0x2b, 0xe9, 0xc2, 0x27, 0xaf, 0xa1, 0x2b, 0x6a, 0x23, + 0x6b, 0x13, 0xe8, 0x0b, 0xfc, 0xdb, 0x9b, 0x71, 0x6e, 0x8d, 0xa7, 0x41, 0x42, 0x3f, 0x00, 0x69, + 0x31, 0x59, 0x95, 0xe1, 0xed, 0x9d, 0x1f, 0xb7, 0xfb, 0xb3, 0x59, 0xbf, 0xe5, 0x1c, 0x39, 0x79, + 0x01, 0x6b, 0x76, 0x8e, 0x81, 0xf5, 0x78, 0xe1, 0x74, 0x52, 0x57, 0x4a, 0x8f, 0x61, 0x63, 0x4a, + 0x5a, 0x6a, 0x1a, 0x9f, 0xa0, 0x17, 0xa6, 0xae, 0x6c, 0x56, 0x6f, 0x66, 0xe6, 0xf0, 0x7c, 0x61, + 0x2f, 0xff, 0x9c, 0xc1, 0x39, 0xac, 0x3b, 0x5e, 0x9d, 0x65, 0x88, 0xd6, 0xdd, 0x10, 0xba, 0x0a, + 0x75, 0x5d, 0x36, 0xe9, 0xef, 0xdc, 0x94, 0xe9, 0xef, 0x61, 0x10, 0xd3, 0xf5, 0xd0, 0x67, 0x91, + 0x4b, 0x89, 0x9c, 0x1e, 0xfa, 0x2b, 0xbf, 0x8c, 0xf5, 0xc3, 0x3b, 0xdf, 0xbb, 0xfe, 0x0b, 0xbb, + 0xe8, 0xba, 0xdf, 0xc0, 0xde, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x9b, 0xaf, 0xd4, 0x53, 0xc9, + 0x04, 0x00, 0x00, } diff --git a/pkg/api/events/events.proto b/pkg/api/events/events.proto index 3a663e46..6dea93df 100644 --- a/pkg/api/events/events.proto +++ b/pkg/api/events/events.proto @@ -4,6 +4,7 @@ package fission.workflows.events; option go_package = "events"; import "github.com/fission/fission-workflows/pkg/types/types.proto"; +import "github.com/fission/fission-workflows/pkg/types/typedvalues/typedvalues.proto"; // // Workflow diff --git a/pkg/api/invocation.go b/pkg/api/invocation.go index 07d3022e..f8da7a82 100644 --- a/pkg/api/invocation.go +++ b/pkg/api/invocation.go @@ -8,6 +8,7 @@ import ( "github.com/fission/fission-workflows/pkg/api/events" "github.com/fission/fission-workflows/pkg/fes" "github.com/fission/fission-workflows/pkg/types" + "github.com/fission/fission-workflows/pkg/types/typedvalues" "github.com/fission/fission-workflows/pkg/types/validate" "github.com/fission/fission-workflows/pkg/util" "github.com/opentracing/opentracing-go" @@ -37,6 +38,14 @@ func (ia *Invocation) Invoke(spec *types.WorkflowInvocationSpec, opts ...CallOpt return "", err } + // Ensure that te body input is also accessible on the default parameter + // TODO remove once default input field is removed + if spec.Inputs != nil && spec.Inputs[types.InputMain] == nil { + if body, ok := spec.Inputs[types.InputBody]; ok { + spec.Inputs[types.InputMain] = body + } + } + id := fmt.Sprintf("wi-%s", util.UID()) event, err := fes.NewEvent(*aggregates.NewWorkflowInvocationAggregate(id), &events.InvocationCreated{ @@ -91,7 +100,7 @@ func (ia *Invocation) Cancel(invocationID string) error { // Complete forces the completion of an invocation. This function - used by the controller - is the only way // to ensure that a workflow invocation turns into the COMPLETED state. // If the API fails to append the event to the event store, it will return an error. -func (ia *Invocation) Complete(invocationID string, output *types.TypedValue) error { +func (ia *Invocation) Complete(invocationID string, output *typedvalues.TypedValue) error { if len(invocationID) == 0 { return validate.NewError("invocationID", errors.New("id should not be empty")) } diff --git a/pkg/api/task.go b/pkg/api/task.go index 80c20031..0b69b78d 100644 --- a/pkg/api/task.go +++ b/pkg/api/task.go @@ -8,7 +8,7 @@ import ( "github.com/fission/fission-workflows/pkg/fes" "github.com/fission/fission-workflows/pkg/fnenv" "github.com/fission/fission-workflows/pkg/types" - "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" "github.com/fission/fission-workflows/pkg/types/validate" "github.com/golang/protobuf/ptypes" "github.com/sirupsen/logrus" @@ -37,6 +37,7 @@ func NewTaskAPI(runtime map[string]fnenv.Runtime, esClient fes.Backend, api *Dyn // Currently it executes the underlying function synchronously and manage the execution until completion. // TODO make asynchronous func (ap *Task) Invoke(spec *types.TaskInvocationSpec, opts ...CallOption) (*types.TaskInvocation, error) { + log := logrus.WithField("fn", spec.FnRef).WithField("wi", spec.InvocationId).WithField("task", spec.TaskId) cfg := parseCallOptions(opts) err := validate.TaskInvocationSpec(spec) if err != nil { @@ -68,10 +69,7 @@ func (ap *Task) Invoke(spec *types.TaskInvocationSpec, opts ...CallOption) (*typ } if err != nil { // TODO improve error handling here (retries? internal or task related error?) - logrus.WithField("task", spec.FnRef). - WithField("wi", spec.InvocationId). - WithField("task", spec.TaskId). - Infof("Failed to invoke task: %v", err) + log.Infof("Failed to invoke task: %v", err) esErr := ap.Fail(spec.InvocationId, taskID, err.Error()) if esErr != nil { return nil, esErr @@ -80,9 +78,9 @@ func (ap *Task) Invoke(spec *types.TaskInvocationSpec, opts ...CallOption) (*typ } // TODO to a middleware component - if typedvalues.IsControlFlow(typedvalues.ValueType(fnResult.GetOutput().GetType())) { - logrus.Info("Adding dynamic flow") - flow, err := typedvalues.FormatControlFlow(fnResult.GetOutput()) + if controlflow.IsControlFlow(fnResult.GetOutput()) { + log.Info("Adding dynamic flow") + flow, err := controlflow.UnwrapControlFlow(fnResult.GetOutput()) if err != nil { return nil, err } diff --git a/pkg/apiserver/apiserver.pb.go b/pkg/apiserver/apiserver.pb.go index 2ff01c0a..299342cc 100644 --- a/pkg/apiserver/apiserver.pb.go +++ b/pkg/apiserver/apiserver.pb.go @@ -8,10 +8,8 @@ It is generated from these files: pkg/apiserver/apiserver.proto It has these top-level messages: - WorkflowIdentifier - SearchWorkflowResponse + WorkflowList InvocationListQuery - WorkflowInvocationIdentifier WorkflowInvocationList Health */ @@ -20,9 +18,9 @@ package apiserver import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" -import fission_workflows_types "github.com/fission/fission-workflows/pkg/types" +import fission_workflows_types1 "github.com/fission/fission-workflows/pkg/types" import fission_workflows_version "github.com/fission/fission-workflows/pkg/version" -import google_protobuf1 "github.com/golang/protobuf/ptypes/empty" +import google_protobuf2 "github.com/golang/protobuf/ptypes/empty" import _ "google.golang.org/genproto/googleapis/api/annotations" import ( @@ -41,32 +39,16 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package -type WorkflowIdentifier struct { - Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` -} - -func (m *WorkflowIdentifier) Reset() { *m = WorkflowIdentifier{} } -func (m *WorkflowIdentifier) String() string { return proto.CompactTextString(m) } -func (*WorkflowIdentifier) ProtoMessage() {} -func (*WorkflowIdentifier) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } - -func (m *WorkflowIdentifier) GetId() string { - if m != nil { - return m.Id - } - return "" -} - -type SearchWorkflowResponse struct { +type WorkflowList struct { Workflows []string `protobuf:"bytes,1,rep,name=workflows" json:"workflows,omitempty"` } -func (m *SearchWorkflowResponse) Reset() { *m = SearchWorkflowResponse{} } -func (m *SearchWorkflowResponse) String() string { return proto.CompactTextString(m) } -func (*SearchWorkflowResponse) ProtoMessage() {} -func (*SearchWorkflowResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +func (m *WorkflowList) Reset() { *m = WorkflowList{} } +func (m *WorkflowList) String() string { return proto.CompactTextString(m) } +func (*WorkflowList) ProtoMessage() {} +func (*WorkflowList) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } -func (m *SearchWorkflowResponse) GetWorkflows() []string { +func (m *WorkflowList) GetWorkflows() []string { if m != nil { return m.Workflows } @@ -80,7 +62,7 @@ type InvocationListQuery struct { func (m *InvocationListQuery) Reset() { *m = InvocationListQuery{} } func (m *InvocationListQuery) String() string { return proto.CompactTextString(m) } func (*InvocationListQuery) ProtoMessage() {} -func (*InvocationListQuery) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +func (*InvocationListQuery) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } func (m *InvocationListQuery) GetWorkflows() []string { if m != nil { @@ -89,30 +71,14 @@ func (m *InvocationListQuery) GetWorkflows() []string { return nil } -type WorkflowInvocationIdentifier struct { - Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` -} - -func (m *WorkflowInvocationIdentifier) Reset() { *m = WorkflowInvocationIdentifier{} } -func (m *WorkflowInvocationIdentifier) String() string { return proto.CompactTextString(m) } -func (*WorkflowInvocationIdentifier) ProtoMessage() {} -func (*WorkflowInvocationIdentifier) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } - -func (m *WorkflowInvocationIdentifier) GetId() string { - if m != nil { - return m.Id - } - return "" -} - type WorkflowInvocationList struct { - Invocations []string `protobuf:"bytes,1,rep,name=store" json:"store,omitempty"` + Invocations []string `protobuf:"bytes,1,rep,name=invocations" json:"invocations,omitempty"` } func (m *WorkflowInvocationList) Reset() { *m = WorkflowInvocationList{} } func (m *WorkflowInvocationList) String() string { return proto.CompactTextString(m) } func (*WorkflowInvocationList) ProtoMessage() {} -func (*WorkflowInvocationList) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } +func (*WorkflowInvocationList) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } func (m *WorkflowInvocationList) GetInvocations() []string { if m != nil { @@ -128,7 +94,7 @@ type Health struct { func (m *Health) Reset() { *m = Health{} } func (m *Health) String() string { return proto.CompactTextString(m) } func (*Health) ProtoMessage() {} -func (*Health) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } +func (*Health) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } func (m *Health) GetStatus() string { if m != nil { @@ -138,10 +104,8 @@ func (m *Health) GetStatus() string { } func init() { - proto.RegisterType((*WorkflowIdentifier)(nil), "fission.workflows.apiserver.WorkflowIdentifier") - proto.RegisterType((*SearchWorkflowResponse)(nil), "fission.workflows.apiserver.SearchWorkflowResponse") + proto.RegisterType((*WorkflowList)(nil), "fission.workflows.apiserver.WorkflowList") proto.RegisterType((*InvocationListQuery)(nil), "fission.workflows.apiserver.InvocationListQuery") - proto.RegisterType((*WorkflowInvocationIdentifier)(nil), "fission.workflows.apiserver.WorkflowInvocationIdentifier") proto.RegisterType((*WorkflowInvocationList)(nil), "fission.workflows.apiserver.WorkflowInvocationList") proto.RegisterType((*Health)(nil), "fission.workflows.apiserver.Health") } @@ -157,11 +121,11 @@ const _ = grpc.SupportPackageIsVersion4 // Client API for WorkflowAPI service type WorkflowAPIClient interface { - Create(ctx context.Context, in *fission_workflows_types.WorkflowSpec, opts ...grpc.CallOption) (*WorkflowIdentifier, error) - List(ctx context.Context, in *google_protobuf1.Empty, opts ...grpc.CallOption) (*SearchWorkflowResponse, error) - Get(ctx context.Context, in *WorkflowIdentifier, opts ...grpc.CallOption) (*fission_workflows_types.Workflow, error) - Delete(ctx context.Context, in *WorkflowIdentifier, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) - Validate(ctx context.Context, in *fission_workflows_types.WorkflowSpec, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) + Create(ctx context.Context, in *fission_workflows_types1.WorkflowSpec, opts ...grpc.CallOption) (*fission_workflows_types1.ObjectMetadata, error) + List(ctx context.Context, in *google_protobuf2.Empty, opts ...grpc.CallOption) (*WorkflowList, error) + Get(ctx context.Context, in *fission_workflows_types1.ObjectMetadata, opts ...grpc.CallOption) (*fission_workflows_types1.Workflow, error) + Delete(ctx context.Context, in *fission_workflows_types1.ObjectMetadata, opts ...grpc.CallOption) (*google_protobuf2.Empty, error) + Validate(ctx context.Context, in *fission_workflows_types1.WorkflowSpec, opts ...grpc.CallOption) (*google_protobuf2.Empty, error) } type workflowAPIClient struct { @@ -172,8 +136,8 @@ func NewWorkflowAPIClient(cc *grpc.ClientConn) WorkflowAPIClient { return &workflowAPIClient{cc} } -func (c *workflowAPIClient) Create(ctx context.Context, in *fission_workflows_types.WorkflowSpec, opts ...grpc.CallOption) (*WorkflowIdentifier, error) { - out := new(WorkflowIdentifier) +func (c *workflowAPIClient) Create(ctx context.Context, in *fission_workflows_types1.WorkflowSpec, opts ...grpc.CallOption) (*fission_workflows_types1.ObjectMetadata, error) { + out := new(fission_workflows_types1.ObjectMetadata) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.WorkflowAPI/Create", in, out, c.cc, opts...) if err != nil { return nil, err @@ -181,8 +145,8 @@ func (c *workflowAPIClient) Create(ctx context.Context, in *fission_workflows_ty return out, nil } -func (c *workflowAPIClient) List(ctx context.Context, in *google_protobuf1.Empty, opts ...grpc.CallOption) (*SearchWorkflowResponse, error) { - out := new(SearchWorkflowResponse) +func (c *workflowAPIClient) List(ctx context.Context, in *google_protobuf2.Empty, opts ...grpc.CallOption) (*WorkflowList, error) { + out := new(WorkflowList) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.WorkflowAPI/List", in, out, c.cc, opts...) if err != nil { return nil, err @@ -190,8 +154,8 @@ func (c *workflowAPIClient) List(ctx context.Context, in *google_protobuf1.Empty return out, nil } -func (c *workflowAPIClient) Get(ctx context.Context, in *WorkflowIdentifier, opts ...grpc.CallOption) (*fission_workflows_types.Workflow, error) { - out := new(fission_workflows_types.Workflow) +func (c *workflowAPIClient) Get(ctx context.Context, in *fission_workflows_types1.ObjectMetadata, opts ...grpc.CallOption) (*fission_workflows_types1.Workflow, error) { + out := new(fission_workflows_types1.Workflow) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.WorkflowAPI/Get", in, out, c.cc, opts...) if err != nil { return nil, err @@ -199,8 +163,8 @@ func (c *workflowAPIClient) Get(ctx context.Context, in *WorkflowIdentifier, opt return out, nil } -func (c *workflowAPIClient) Delete(ctx context.Context, in *WorkflowIdentifier, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) { - out := new(google_protobuf1.Empty) +func (c *workflowAPIClient) Delete(ctx context.Context, in *fission_workflows_types1.ObjectMetadata, opts ...grpc.CallOption) (*google_protobuf2.Empty, error) { + out := new(google_protobuf2.Empty) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.WorkflowAPI/Delete", in, out, c.cc, opts...) if err != nil { return nil, err @@ -208,8 +172,8 @@ func (c *workflowAPIClient) Delete(ctx context.Context, in *WorkflowIdentifier, return out, nil } -func (c *workflowAPIClient) Validate(ctx context.Context, in *fission_workflows_types.WorkflowSpec, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) { - out := new(google_protobuf1.Empty) +func (c *workflowAPIClient) Validate(ctx context.Context, in *fission_workflows_types1.WorkflowSpec, opts ...grpc.CallOption) (*google_protobuf2.Empty, error) { + out := new(google_protobuf2.Empty) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.WorkflowAPI/Validate", in, out, c.cc, opts...) if err != nil { return nil, err @@ -220,11 +184,11 @@ func (c *workflowAPIClient) Validate(ctx context.Context, in *fission_workflows_ // Server API for WorkflowAPI service type WorkflowAPIServer interface { - Create(context.Context, *fission_workflows_types.WorkflowSpec) (*WorkflowIdentifier, error) - List(context.Context, *google_protobuf1.Empty) (*SearchWorkflowResponse, error) - Get(context.Context, *WorkflowIdentifier) (*fission_workflows_types.Workflow, error) - Delete(context.Context, *WorkflowIdentifier) (*google_protobuf1.Empty, error) - Validate(context.Context, *fission_workflows_types.WorkflowSpec) (*google_protobuf1.Empty, error) + Create(context.Context, *fission_workflows_types1.WorkflowSpec) (*fission_workflows_types1.ObjectMetadata, error) + List(context.Context, *google_protobuf2.Empty) (*WorkflowList, error) + Get(context.Context, *fission_workflows_types1.ObjectMetadata) (*fission_workflows_types1.Workflow, error) + Delete(context.Context, *fission_workflows_types1.ObjectMetadata) (*google_protobuf2.Empty, error) + Validate(context.Context, *fission_workflows_types1.WorkflowSpec) (*google_protobuf2.Empty, error) } func RegisterWorkflowAPIServer(s *grpc.Server, srv WorkflowAPIServer) { @@ -232,7 +196,7 @@ func RegisterWorkflowAPIServer(s *grpc.Server, srv WorkflowAPIServer) { } func _WorkflowAPI_Create_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(fission_workflows_types.WorkflowSpec) + in := new(fission_workflows_types1.WorkflowSpec) if err := dec(in); err != nil { return nil, err } @@ -244,13 +208,13 @@ func _WorkflowAPI_Create_Handler(srv interface{}, ctx context.Context, dec func( FullMethod: "/fission.workflows.apiserver.WorkflowAPI/Create", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowAPIServer).Create(ctx, req.(*fission_workflows_types.WorkflowSpec)) + return srv.(WorkflowAPIServer).Create(ctx, req.(*fission_workflows_types1.WorkflowSpec)) } return interceptor(ctx, in, info, handler) } func _WorkflowAPI_List_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(google_protobuf1.Empty) + in := new(google_protobuf2.Empty) if err := dec(in); err != nil { return nil, err } @@ -262,13 +226,13 @@ func _WorkflowAPI_List_Handler(srv interface{}, ctx context.Context, dec func(in FullMethod: "/fission.workflows.apiserver.WorkflowAPI/List", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowAPIServer).List(ctx, req.(*google_protobuf1.Empty)) + return srv.(WorkflowAPIServer).List(ctx, req.(*google_protobuf2.Empty)) } return interceptor(ctx, in, info, handler) } func _WorkflowAPI_Get_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(WorkflowIdentifier) + in := new(fission_workflows_types1.ObjectMetadata) if err := dec(in); err != nil { return nil, err } @@ -280,13 +244,13 @@ func _WorkflowAPI_Get_Handler(srv interface{}, ctx context.Context, dec func(int FullMethod: "/fission.workflows.apiserver.WorkflowAPI/Get", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowAPIServer).Get(ctx, req.(*WorkflowIdentifier)) + return srv.(WorkflowAPIServer).Get(ctx, req.(*fission_workflows_types1.ObjectMetadata)) } return interceptor(ctx, in, info, handler) } func _WorkflowAPI_Delete_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(WorkflowIdentifier) + in := new(fission_workflows_types1.ObjectMetadata) if err := dec(in); err != nil { return nil, err } @@ -298,13 +262,13 @@ func _WorkflowAPI_Delete_Handler(srv interface{}, ctx context.Context, dec func( FullMethod: "/fission.workflows.apiserver.WorkflowAPI/Delete", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowAPIServer).Delete(ctx, req.(*WorkflowIdentifier)) + return srv.(WorkflowAPIServer).Delete(ctx, req.(*fission_workflows_types1.ObjectMetadata)) } return interceptor(ctx, in, info, handler) } func _WorkflowAPI_Validate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(fission_workflows_types.WorkflowSpec) + in := new(fission_workflows_types1.WorkflowSpec) if err := dec(in); err != nil { return nil, err } @@ -316,7 +280,7 @@ func _WorkflowAPI_Validate_Handler(srv interface{}, ctx context.Context, dec fun FullMethod: "/fission.workflows.apiserver.WorkflowAPI/Validate", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowAPIServer).Validate(ctx, req.(*fission_workflows_types.WorkflowSpec)) + return srv.(WorkflowAPIServer).Validate(ctx, req.(*fission_workflows_types1.WorkflowSpec)) } return interceptor(ctx, in, info, handler) } @@ -356,21 +320,21 @@ type WorkflowInvocationAPIClient interface { // Create a new workflow invocation // // In case the invocation specification is missing fields or contains invalid fields, a HTTP 400 is returned. - Invoke(ctx context.Context, in *fission_workflows_types.WorkflowInvocationSpec, opts ...grpc.CallOption) (*WorkflowInvocationIdentifier, error) - InvokeSync(ctx context.Context, in *fission_workflows_types.WorkflowInvocationSpec, opts ...grpc.CallOption) (*fission_workflows_types.WorkflowInvocation, error) + Invoke(ctx context.Context, in *fission_workflows_types1.WorkflowInvocationSpec, opts ...grpc.CallOption) (*fission_workflows_types1.ObjectMetadata, error) + InvokeSync(ctx context.Context, in *fission_workflows_types1.WorkflowInvocationSpec, opts ...grpc.CallOption) (*fission_workflows_types1.WorkflowInvocation, error) // Cancel a workflow invocation // // This action is irreverisble. A canceled invocation cannot be resumed or restarted. // In case that an invocation already is canceled, has failed or has completed, nothing happens. // In case that an invocation does not exist a HTTP 404 error status is returned. - Cancel(ctx context.Context, in *WorkflowInvocationIdentifier, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) + Cancel(ctx context.Context, in *fission_workflows_types1.ObjectMetadata, opts ...grpc.CallOption) (*google_protobuf2.Empty, error) List(ctx context.Context, in *InvocationListQuery, opts ...grpc.CallOption) (*WorkflowInvocationList, error) // Get the specification and status of a workflow invocation // // Get returns three different aspects of the workflow invocation, namely the spec (specification), status and logs. // To lighten the request load, consider using a more specific request. - Get(ctx context.Context, in *WorkflowInvocationIdentifier, opts ...grpc.CallOption) (*fission_workflows_types.WorkflowInvocation, error) - Validate(ctx context.Context, in *fission_workflows_types.WorkflowInvocationSpec, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) + Get(ctx context.Context, in *fission_workflows_types1.ObjectMetadata, opts ...grpc.CallOption) (*fission_workflows_types1.WorkflowInvocation, error) + Validate(ctx context.Context, in *fission_workflows_types1.WorkflowInvocationSpec, opts ...grpc.CallOption) (*google_protobuf2.Empty, error) } type workflowInvocationAPIClient struct { @@ -381,8 +345,8 @@ func NewWorkflowInvocationAPIClient(cc *grpc.ClientConn) WorkflowInvocationAPICl return &workflowInvocationAPIClient{cc} } -func (c *workflowInvocationAPIClient) Invoke(ctx context.Context, in *fission_workflows_types.WorkflowInvocationSpec, opts ...grpc.CallOption) (*WorkflowInvocationIdentifier, error) { - out := new(WorkflowInvocationIdentifier) +func (c *workflowInvocationAPIClient) Invoke(ctx context.Context, in *fission_workflows_types1.WorkflowInvocationSpec, opts ...grpc.CallOption) (*fission_workflows_types1.ObjectMetadata, error) { + out := new(fission_workflows_types1.ObjectMetadata) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.WorkflowInvocationAPI/Invoke", in, out, c.cc, opts...) if err != nil { return nil, err @@ -390,8 +354,8 @@ func (c *workflowInvocationAPIClient) Invoke(ctx context.Context, in *fission_wo return out, nil } -func (c *workflowInvocationAPIClient) InvokeSync(ctx context.Context, in *fission_workflows_types.WorkflowInvocationSpec, opts ...grpc.CallOption) (*fission_workflows_types.WorkflowInvocation, error) { - out := new(fission_workflows_types.WorkflowInvocation) +func (c *workflowInvocationAPIClient) InvokeSync(ctx context.Context, in *fission_workflows_types1.WorkflowInvocationSpec, opts ...grpc.CallOption) (*fission_workflows_types1.WorkflowInvocation, error) { + out := new(fission_workflows_types1.WorkflowInvocation) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.WorkflowInvocationAPI/InvokeSync", in, out, c.cc, opts...) if err != nil { return nil, err @@ -399,8 +363,8 @@ func (c *workflowInvocationAPIClient) InvokeSync(ctx context.Context, in *fissio return out, nil } -func (c *workflowInvocationAPIClient) Cancel(ctx context.Context, in *WorkflowInvocationIdentifier, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) { - out := new(google_protobuf1.Empty) +func (c *workflowInvocationAPIClient) Cancel(ctx context.Context, in *fission_workflows_types1.ObjectMetadata, opts ...grpc.CallOption) (*google_protobuf2.Empty, error) { + out := new(google_protobuf2.Empty) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.WorkflowInvocationAPI/Cancel", in, out, c.cc, opts...) if err != nil { return nil, err @@ -417,8 +381,8 @@ func (c *workflowInvocationAPIClient) List(ctx context.Context, in *InvocationLi return out, nil } -func (c *workflowInvocationAPIClient) Get(ctx context.Context, in *WorkflowInvocationIdentifier, opts ...grpc.CallOption) (*fission_workflows_types.WorkflowInvocation, error) { - out := new(fission_workflows_types.WorkflowInvocation) +func (c *workflowInvocationAPIClient) Get(ctx context.Context, in *fission_workflows_types1.ObjectMetadata, opts ...grpc.CallOption) (*fission_workflows_types1.WorkflowInvocation, error) { + out := new(fission_workflows_types1.WorkflowInvocation) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.WorkflowInvocationAPI/Get", in, out, c.cc, opts...) if err != nil { return nil, err @@ -426,8 +390,8 @@ func (c *workflowInvocationAPIClient) Get(ctx context.Context, in *WorkflowInvoc return out, nil } -func (c *workflowInvocationAPIClient) Validate(ctx context.Context, in *fission_workflows_types.WorkflowInvocationSpec, opts ...grpc.CallOption) (*google_protobuf1.Empty, error) { - out := new(google_protobuf1.Empty) +func (c *workflowInvocationAPIClient) Validate(ctx context.Context, in *fission_workflows_types1.WorkflowInvocationSpec, opts ...grpc.CallOption) (*google_protobuf2.Empty, error) { + out := new(google_protobuf2.Empty) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.WorkflowInvocationAPI/Validate", in, out, c.cc, opts...) if err != nil { return nil, err @@ -441,21 +405,21 @@ type WorkflowInvocationAPIServer interface { // Create a new workflow invocation // // In case the invocation specification is missing fields or contains invalid fields, a HTTP 400 is returned. - Invoke(context.Context, *fission_workflows_types.WorkflowInvocationSpec) (*WorkflowInvocationIdentifier, error) - InvokeSync(context.Context, *fission_workflows_types.WorkflowInvocationSpec) (*fission_workflows_types.WorkflowInvocation, error) + Invoke(context.Context, *fission_workflows_types1.WorkflowInvocationSpec) (*fission_workflows_types1.ObjectMetadata, error) + InvokeSync(context.Context, *fission_workflows_types1.WorkflowInvocationSpec) (*fission_workflows_types1.WorkflowInvocation, error) // Cancel a workflow invocation // // This action is irreverisble. A canceled invocation cannot be resumed or restarted. // In case that an invocation already is canceled, has failed or has completed, nothing happens. // In case that an invocation does not exist a HTTP 404 error status is returned. - Cancel(context.Context, *WorkflowInvocationIdentifier) (*google_protobuf1.Empty, error) + Cancel(context.Context, *fission_workflows_types1.ObjectMetadata) (*google_protobuf2.Empty, error) List(context.Context, *InvocationListQuery) (*WorkflowInvocationList, error) // Get the specification and status of a workflow invocation // // Get returns three different aspects of the workflow invocation, namely the spec (specification), status and logs. // To lighten the request load, consider using a more specific request. - Get(context.Context, *WorkflowInvocationIdentifier) (*fission_workflows_types.WorkflowInvocation, error) - Validate(context.Context, *fission_workflows_types.WorkflowInvocationSpec) (*google_protobuf1.Empty, error) + Get(context.Context, *fission_workflows_types1.ObjectMetadata) (*fission_workflows_types1.WorkflowInvocation, error) + Validate(context.Context, *fission_workflows_types1.WorkflowInvocationSpec) (*google_protobuf2.Empty, error) } func RegisterWorkflowInvocationAPIServer(s *grpc.Server, srv WorkflowInvocationAPIServer) { @@ -463,7 +427,7 @@ func RegisterWorkflowInvocationAPIServer(s *grpc.Server, srv WorkflowInvocationA } func _WorkflowInvocationAPI_Invoke_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(fission_workflows_types.WorkflowInvocationSpec) + in := new(fission_workflows_types1.WorkflowInvocationSpec) if err := dec(in); err != nil { return nil, err } @@ -475,13 +439,13 @@ func _WorkflowInvocationAPI_Invoke_Handler(srv interface{}, ctx context.Context, FullMethod: "/fission.workflows.apiserver.WorkflowInvocationAPI/Invoke", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowInvocationAPIServer).Invoke(ctx, req.(*fission_workflows_types.WorkflowInvocationSpec)) + return srv.(WorkflowInvocationAPIServer).Invoke(ctx, req.(*fission_workflows_types1.WorkflowInvocationSpec)) } return interceptor(ctx, in, info, handler) } func _WorkflowInvocationAPI_InvokeSync_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(fission_workflows_types.WorkflowInvocationSpec) + in := new(fission_workflows_types1.WorkflowInvocationSpec) if err := dec(in); err != nil { return nil, err } @@ -493,13 +457,13 @@ func _WorkflowInvocationAPI_InvokeSync_Handler(srv interface{}, ctx context.Cont FullMethod: "/fission.workflows.apiserver.WorkflowInvocationAPI/InvokeSync", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowInvocationAPIServer).InvokeSync(ctx, req.(*fission_workflows_types.WorkflowInvocationSpec)) + return srv.(WorkflowInvocationAPIServer).InvokeSync(ctx, req.(*fission_workflows_types1.WorkflowInvocationSpec)) } return interceptor(ctx, in, info, handler) } func _WorkflowInvocationAPI_Cancel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(WorkflowInvocationIdentifier) + in := new(fission_workflows_types1.ObjectMetadata) if err := dec(in); err != nil { return nil, err } @@ -511,7 +475,7 @@ func _WorkflowInvocationAPI_Cancel_Handler(srv interface{}, ctx context.Context, FullMethod: "/fission.workflows.apiserver.WorkflowInvocationAPI/Cancel", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowInvocationAPIServer).Cancel(ctx, req.(*WorkflowInvocationIdentifier)) + return srv.(WorkflowInvocationAPIServer).Cancel(ctx, req.(*fission_workflows_types1.ObjectMetadata)) } return interceptor(ctx, in, info, handler) } @@ -535,7 +499,7 @@ func _WorkflowInvocationAPI_List_Handler(srv interface{}, ctx context.Context, d } func _WorkflowInvocationAPI_Get_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(WorkflowInvocationIdentifier) + in := new(fission_workflows_types1.ObjectMetadata) if err := dec(in); err != nil { return nil, err } @@ -547,13 +511,13 @@ func _WorkflowInvocationAPI_Get_Handler(srv interface{}, ctx context.Context, de FullMethod: "/fission.workflows.apiserver.WorkflowInvocationAPI/Get", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowInvocationAPIServer).Get(ctx, req.(*WorkflowInvocationIdentifier)) + return srv.(WorkflowInvocationAPIServer).Get(ctx, req.(*fission_workflows_types1.ObjectMetadata)) } return interceptor(ctx, in, info, handler) } func _WorkflowInvocationAPI_Validate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(fission_workflows_types.WorkflowInvocationSpec) + in := new(fission_workflows_types1.WorkflowInvocationSpec) if err := dec(in); err != nil { return nil, err } @@ -565,7 +529,7 @@ func _WorkflowInvocationAPI_Validate_Handler(srv interface{}, ctx context.Contex FullMethod: "/fission.workflows.apiserver.WorkflowInvocationAPI/Validate", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowInvocationAPIServer).Validate(ctx, req.(*fission_workflows_types.WorkflowInvocationSpec)) + return srv.(WorkflowInvocationAPIServer).Validate(ctx, req.(*fission_workflows_types1.WorkflowInvocationSpec)) } return interceptor(ctx, in, info, handler) } @@ -606,8 +570,8 @@ var _WorkflowInvocationAPI_serviceDesc = grpc.ServiceDesc{ // Client API for AdminAPI service type AdminAPIClient interface { - Status(ctx context.Context, in *google_protobuf1.Empty, opts ...grpc.CallOption) (*Health, error) - Version(ctx context.Context, in *google_protobuf1.Empty, opts ...grpc.CallOption) (*fission_workflows_version.Info, error) + Status(ctx context.Context, in *google_protobuf2.Empty, opts ...grpc.CallOption) (*Health, error) + Version(ctx context.Context, in *google_protobuf2.Empty, opts ...grpc.CallOption) (*fission_workflows_version.Info, error) } type adminAPIClient struct { @@ -618,7 +582,7 @@ func NewAdminAPIClient(cc *grpc.ClientConn) AdminAPIClient { return &adminAPIClient{cc} } -func (c *adminAPIClient) Status(ctx context.Context, in *google_protobuf1.Empty, opts ...grpc.CallOption) (*Health, error) { +func (c *adminAPIClient) Status(ctx context.Context, in *google_protobuf2.Empty, opts ...grpc.CallOption) (*Health, error) { out := new(Health) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.AdminAPI/Status", in, out, c.cc, opts...) if err != nil { @@ -627,7 +591,7 @@ func (c *adminAPIClient) Status(ctx context.Context, in *google_protobuf1.Empty, return out, nil } -func (c *adminAPIClient) Version(ctx context.Context, in *google_protobuf1.Empty, opts ...grpc.CallOption) (*fission_workflows_version.Info, error) { +func (c *adminAPIClient) Version(ctx context.Context, in *google_protobuf2.Empty, opts ...grpc.CallOption) (*fission_workflows_version.Info, error) { out := new(fission_workflows_version.Info) err := grpc.Invoke(ctx, "/fission.workflows.apiserver.AdminAPI/Version", in, out, c.cc, opts...) if err != nil { @@ -639,8 +603,8 @@ func (c *adminAPIClient) Version(ctx context.Context, in *google_protobuf1.Empty // Server API for AdminAPI service type AdminAPIServer interface { - Status(context.Context, *google_protobuf1.Empty) (*Health, error) - Version(context.Context, *google_protobuf1.Empty) (*fission_workflows_version.Info, error) + Status(context.Context, *google_protobuf2.Empty) (*Health, error) + Version(context.Context, *google_protobuf2.Empty) (*fission_workflows_version.Info, error) } func RegisterAdminAPIServer(s *grpc.Server, srv AdminAPIServer) { @@ -648,7 +612,7 @@ func RegisterAdminAPIServer(s *grpc.Server, srv AdminAPIServer) { } func _AdminAPI_Status_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(google_protobuf1.Empty) + in := new(google_protobuf2.Empty) if err := dec(in); err != nil { return nil, err } @@ -660,13 +624,13 @@ func _AdminAPI_Status_Handler(srv interface{}, ctx context.Context, dec func(int FullMethod: "/fission.workflows.apiserver.AdminAPI/Status", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(AdminAPIServer).Status(ctx, req.(*google_protobuf1.Empty)) + return srv.(AdminAPIServer).Status(ctx, req.(*google_protobuf2.Empty)) } return interceptor(ctx, in, info, handler) } func _AdminAPI_Version_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(google_protobuf1.Empty) + in := new(google_protobuf2.Empty) if err := dec(in); err != nil { return nil, err } @@ -678,7 +642,7 @@ func _AdminAPI_Version_Handler(srv interface{}, ctx context.Context, dec func(in FullMethod: "/fission.workflows.apiserver.AdminAPI/Version", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(AdminAPIServer).Version(ctx, req.(*google_protobuf1.Empty)) + return srv.(AdminAPIServer).Version(ctx, req.(*google_protobuf2.Empty)) } return interceptor(ctx, in, info, handler) } @@ -703,48 +667,45 @@ var _AdminAPI_serviceDesc = grpc.ServiceDesc{ func init() { proto.RegisterFile("pkg/apiserver/apiserver.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ - // 675 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0xcf, 0x6e, 0xd3, 0x4e, - 0x10, 0xc7, 0xe5, 0xb6, 0xf2, 0xaf, 0x99, 0xe8, 0x57, 0x95, 0x69, 0x09, 0x25, 0x6d, 0xd5, 0xb0, - 0x80, 0x54, 0x82, 0xf0, 0xa2, 0x56, 0x42, 0x22, 0x07, 0xa4, 0x52, 0x10, 0x44, 0xe2, 0x00, 0x0d, - 0x6a, 0xa5, 0xde, 0x5c, 0x67, 0x93, 0xac, 0x9a, 0x7a, 0x8d, 0xbd, 0x49, 0x15, 0x10, 0x97, 0x5e, - 0xb8, 0x70, 0xe3, 0xc8, 0x73, 0x70, 0xe0, 0xc6, 0x3b, 0xf0, 0x0a, 0x3c, 0x08, 0xca, 0x7a, 0xed, - 0xb8, 0xd8, 0x4e, 0x6a, 0xb8, 0xc4, 0xf1, 0xec, 0xce, 0x7c, 0xbe, 0x3b, 0x7f, 0xd6, 0xb0, 0xe9, - 0x9d, 0x76, 0xa9, 0xed, 0xf1, 0x80, 0xf9, 0x43, 0xe6, 0x4f, 0xfe, 0x59, 0x9e, 0x2f, 0xa4, 0xc0, - 0xf5, 0x0e, 0x0f, 0x02, 0x2e, 0x5c, 0xeb, 0x5c, 0xf8, 0xa7, 0x9d, 0xbe, 0x38, 0x0f, 0xac, 0x78, - 0x4b, 0xb5, 0xd1, 0xe5, 0xb2, 0x37, 0x38, 0xb1, 0x1c, 0x71, 0x46, 0xf5, 0xbe, 0xe8, 0xf9, 0x20, - 0xde, 0x4f, 0xc7, 0x00, 0x39, 0xf2, 0x58, 0x10, 0xfe, 0x86, 0x81, 0xab, 0x4f, 0xae, 0xec, 0x3b, - 0x64, 0xbe, 0x5a, 0xd5, 0x4f, 0xed, 0xbf, 0xde, 0x15, 0xa2, 0xdb, 0x67, 0x54, 0xbd, 0x9d, 0x0c, - 0x3a, 0x94, 0x9d, 0x79, 0x72, 0xa4, 0x17, 0x37, 0xf4, 0xa2, 0xed, 0x71, 0x6a, 0xbb, 0xae, 0x90, - 0xb6, 0xe4, 0xc2, 0xd5, 0x68, 0x72, 0x07, 0xf0, 0x48, 0x13, 0x9a, 0x6d, 0xe6, 0x4a, 0xde, 0xe1, - 0xcc, 0xc7, 0x25, 0x98, 0xe3, 0xed, 0x35, 0xa3, 0x66, 0x6c, 0x97, 0x0e, 0xe6, 0x78, 0x9b, 0x3c, - 0x82, 0x4a, 0x8b, 0xd9, 0xbe, 0xd3, 0x8b, 0xf6, 0x1e, 0xb0, 0xc0, 0x13, 0x6e, 0xc0, 0x70, 0x03, - 0x4a, 0xb1, 0xc2, 0x35, 0xa3, 0x36, 0xbf, 0x5d, 0x3a, 0x98, 0x18, 0xc8, 0x2e, 0xac, 0x34, 0xdd, - 0xa1, 0x70, 0x14, 0xf2, 0x15, 0x0f, 0xe4, 0x9b, 0x01, 0xf3, 0x47, 0x33, 0x9c, 0x2c, 0xd8, 0x88, - 0x25, 0xc5, 0xce, 0x53, 0xc4, 0x35, 0xa0, 0x92, 0xde, 0x3f, 0x86, 0x61, 0x0d, 0xca, 0x3c, 0xb6, - 0x44, 0xa4, 0xa4, 0x89, 0xd4, 0xc0, 0x7c, 0xc9, 0xec, 0xbe, 0xec, 0x61, 0x05, 0xcc, 0x40, 0xda, - 0x72, 0x10, 0xe8, 0xc8, 0xfa, 0x6d, 0xe7, 0xdb, 0x02, 0x94, 0xa3, 0xf0, 0x7b, 0xaf, 0x9b, 0x38, - 0x04, 0x73, 0xdf, 0x67, 0xb6, 0x64, 0x78, 0xd7, 0x4a, 0xf7, 0x43, 0x58, 0xd5, 0x68, 0x7f, 0xcb, - 0x63, 0x4e, 0x95, 0x5a, 0x53, 0xda, 0xc6, 0x4a, 0x27, 0x9f, 0xac, 0x5e, 0xfc, 0xfc, 0xf5, 0x65, - 0x6e, 0x89, 0x94, 0x68, 0xe4, 0xd0, 0x30, 0xea, 0xd8, 0x81, 0x05, 0x75, 0xa6, 0x8a, 0x15, 0xd6, - 0xd3, 0x8a, 0x8a, 0x6d, 0x3d, 0x1f, 0x17, 0xbb, 0xba, 0x3b, 0x15, 0x93, 0x5d, 0x3d, 0x72, 0x4d, - 0xa1, 0xca, 0x38, 0x41, 0xe1, 0x3b, 0x98, 0x7f, 0xc1, 0x24, 0x16, 0x55, 0x5d, 0xbd, 0x35, 0x33, - 0x1b, 0xa4, 0xa2, 0x68, 0xcb, 0xb8, 0x14, 0xd3, 0xe8, 0x07, 0xde, 0xfe, 0x88, 0x1c, 0xcc, 0x67, - 0xac, 0xcf, 0x24, 0x2b, 0x4e, 0xcd, 0xc9, 0x46, 0x84, 0xaa, 0xff, 0x89, 0xea, 0xc1, 0xe2, 0xa1, - 0xdd, 0xe7, 0xed, 0x02, 0xf5, 0xcb, 0x43, 0x6c, 0x2a, 0xc4, 0x0d, 0x82, 0x13, 0xc4, 0x50, 0x87, - 0x6e, 0x18, 0xf5, 0x9d, 0x1f, 0x26, 0x5c, 0x4f, 0xb7, 0xe5, 0xb8, 0x83, 0x3e, 0x1b, 0x60, 0x8e, - 0x2d, 0xa7, 0xd9, 0xe7, 0xbd, 0x2c, 0x61, 0xe2, 0xaa, 0xc4, 0x3c, 0xbe, 0x5a, 0x82, 0x32, 0xc6, - 0x26, 0x4a, 0x09, 0x29, 0xd3, 0xc9, 0x00, 0x8c, 0x1b, 0xeb, 0xab, 0x01, 0x10, 0xca, 0x69, 0x8d, - 0x5c, 0xa7, 0xb8, 0xa4, 0xfb, 0x05, 0x1c, 0x08, 0x55, 0x22, 0xee, 0x91, 0xe5, 0x84, 0x08, 0x1a, - 0x8c, 0x5c, 0xa7, 0x61, 0xd4, 0x8f, 0x11, 0x53, 0x66, 0x1c, 0x80, 0xb9, 0x6f, 0xbb, 0x0e, 0xeb, - 0xe3, 0xdf, 0x1f, 0x3d, 0xb7, 0x84, 0x6b, 0x4a, 0x0d, 0xd6, 0x2f, 0x61, 0x55, 0x9f, 0x5c, 0x18, - 0x7a, 0xdc, 0x1e, 0x4e, 0xa5, 0x66, 0x5c, 0x6e, 0x33, 0x06, 0x31, 0xfb, 0xa6, 0x22, 0x2b, 0x4a, - 0xc9, 0xff, 0x98, 0x2c, 0x0e, 0x7e, 0x32, 0xc2, 0x59, 0xfc, 0x87, 0x93, 0x17, 0x2a, 0x8e, 0x4e, - 0x07, 0xa6, 0xd3, 0x21, 0x13, 0x63, 0x53, 0xb8, 0x41, 0xf2, 0xb2, 0xbf, 0xa5, 0x70, 0x37, 0xc9, - 0x6a, 0x12, 0x97, 0x1c, 0xa1, 0xef, 0x06, 0x2c, 0xee, 0xb5, 0xcf, 0xb8, 0x9a, 0x9a, 0x23, 0x30, - 0x5b, 0xea, 0x46, 0xce, 0xbd, 0x01, 0x6f, 0x4f, 0x4d, 0x53, 0x78, 0xcd, 0x93, 0x65, 0x05, 0x05, - 0x5c, 0xa4, 0x3d, 0x65, 0x78, 0x8f, 0x6f, 0xe1, 0xbf, 0xc3, 0xf0, 0x6b, 0x9a, 0x1b, 0x79, 0x2b, - 0x23, 0x72, 0xf4, 0x05, 0x6e, 0xba, 0x1d, 0x91, 0x88, 0xaa, 0xcd, 0x4f, 0xcb, 0xc7, 0xa5, 0x98, - 0x7d, 0x62, 0xaa, 0x78, 0xbb, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x7a, 0xd9, 0x98, 0xf7, 0x60, - 0x08, 0x00, 0x00, + // 625 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0x4f, 0x6f, 0xd3, 0x30, + 0x14, 0xc0, 0x15, 0x36, 0x85, 0xd6, 0x85, 0xa9, 0x78, 0xa3, 0x8c, 0x6c, 0xd3, 0x8a, 0x11, 0x62, + 0x2b, 0x10, 0xa3, 0xf5, 0xd6, 0x03, 0xd2, 0x18, 0x08, 0x2a, 0x81, 0x80, 0x15, 0x6d, 0xd2, 0xc4, + 0xc5, 0x4d, 0xdd, 0xd6, 0x34, 0x8d, 0xa3, 0xc4, 0xed, 0xd4, 0x21, 0x2e, 0x3b, 0x71, 0xdf, 0x91, + 0x4f, 0xc2, 0xe7, 0xe0, 0x2b, 0xf0, 0x41, 0x50, 0x1c, 0xe7, 0x4f, 0xe9, 0xd2, 0x36, 0xe2, 0xd2, + 0x34, 0xcf, 0xef, 0xbd, 0xdf, 0xfb, 0xeb, 0x80, 0x1d, 0x77, 0xd0, 0xc3, 0xc4, 0x65, 0x3e, 0xf5, + 0xc6, 0xd4, 0x4b, 0xfe, 0x99, 0xae, 0xc7, 0x05, 0x87, 0x5b, 0x5d, 0xe6, 0xfb, 0x8c, 0x3b, 0xe6, + 0x39, 0xf7, 0x06, 0x5d, 0x9b, 0x9f, 0xfb, 0x66, 0xac, 0x62, 0x34, 0x7a, 0x4c, 0xf4, 0x47, 0x6d, + 0xd3, 0xe2, 0x43, 0xac, 0xf4, 0xa2, 0xe7, 0xb3, 0x58, 0x1f, 0x07, 0x00, 0x31, 0x71, 0xa9, 0x1f, + 0xfe, 0x86, 0x8e, 0x8d, 0x17, 0x4b, 0xdb, 0x8e, 0xa9, 0x27, 0x4f, 0xd5, 0x53, 0xd9, 0x6f, 0xf5, + 0x38, 0xef, 0xd9, 0x14, 0xcb, 0xb7, 0xf6, 0xa8, 0x8b, 0xe9, 0xd0, 0x15, 0x13, 0x75, 0xb8, 0xad, + 0x0e, 0x89, 0xcb, 0x30, 0x71, 0x1c, 0x2e, 0x88, 0x60, 0xdc, 0x51, 0x68, 0xf4, 0x14, 0xdc, 0x3a, + 0x55, 0x84, 0x77, 0xcc, 0x17, 0x70, 0x1b, 0x14, 0x63, 0xe2, 0xa6, 0x56, 0x5d, 0xd9, 0x2b, 0x1e, + 0x27, 0x02, 0x54, 0x07, 0xeb, 0x4d, 0x67, 0xcc, 0x2d, 0xe9, 0x22, 0xd0, 0xff, 0x34, 0xa2, 0xde, + 0x64, 0x81, 0x51, 0x03, 0x54, 0x22, 0xc4, 0xb4, 0x31, 0xac, 0x82, 0x12, 0x8b, 0x25, 0x91, 0x65, + 0x5a, 0x84, 0xaa, 0x40, 0x7f, 0x4b, 0x89, 0x2d, 0xfa, 0xb0, 0x02, 0x74, 0x5f, 0x10, 0x31, 0x0a, + 0xd4, 0xb4, 0xbd, 0xe2, 0xb1, 0x7a, 0x3b, 0xb8, 0x5a, 0x05, 0xa5, 0xc8, 0xfd, 0xe1, 0xc7, 0x26, + 0x74, 0x80, 0x7e, 0xe4, 0x51, 0x22, 0x28, 0x7c, 0x64, 0xce, 0xf6, 0x2b, 0xac, 0x7a, 0xa4, 0xdf, + 0x72, 0xa9, 0x65, 0x3c, 0xce, 0x54, 0xfb, 0xd0, 0xfe, 0x4a, 0x2d, 0xf1, 0x9e, 0x0a, 0xd2, 0x21, + 0x82, 0xa0, 0x8d, 0xcb, 0xdf, 0x7f, 0xae, 0x6e, 0xac, 0xa1, 0x22, 0x8e, 0x14, 0x1b, 0x5a, 0x0d, + 0x7e, 0x01, 0xab, 0x32, 0x97, 0x8a, 0x19, 0xd6, 0xd9, 0x8c, 0x9a, 0x60, 0xbe, 0x0e, 0x9a, 0x60, + 0xec, 0x9b, 0x73, 0xa6, 0xc6, 0x4c, 0xd7, 0x1e, 0xdd, 0x91, 0x80, 0x12, 0x4c, 0x00, 0x90, 0x81, + 0x95, 0x37, 0x54, 0xc0, 0x65, 0x63, 0x34, 0x1e, 0x2c, 0xcc, 0x19, 0x55, 0x24, 0xa5, 0x0c, 0xd7, + 0x62, 0x0a, 0xfe, 0xc6, 0x3a, 0xdf, 0x21, 0x01, 0xfa, 0x2b, 0x6a, 0x53, 0x41, 0x97, 0xa7, 0x65, + 0xe4, 0x1c, 0x21, 0x6a, 0xff, 0x22, 0xfa, 0xa0, 0x70, 0x42, 0x6c, 0xd6, 0xc9, 0xd1, 0x9d, 0x2c, + 0xc4, 0x8e, 0x44, 0xdc, 0x43, 0x30, 0x41, 0x8c, 0x95, 0xeb, 0x86, 0x56, 0x3b, 0xf8, 0xa1, 0x83, + 0xbb, 0xb3, 0x43, 0x17, 0xcc, 0xc7, 0x05, 0xd0, 0x03, 0xc1, 0x80, 0x42, 0xbc, 0x30, 0x82, 0xc4, + 0x32, 0xdf, 0xa4, 0xa8, 0xfc, 0x51, 0x09, 0x27, 0xb3, 0x1c, 0xcc, 0xca, 0x4f, 0x0d, 0x80, 0x10, + 0xde, 0x9a, 0x38, 0x56, 0xfe, 0x00, 0x9e, 0xe4, 0x30, 0x40, 0x58, 0x06, 0xb1, 0x8f, 0xca, 0xa9, + 0x20, 0xb0, 0x3f, 0x71, 0xac, 0x86, 0x56, 0x3b, 0x83, 0x70, 0x46, 0x0c, 0x2d, 0xa0, 0x1f, 0x11, + 0xc7, 0xa2, 0xf6, 0xff, 0x0f, 0xc0, 0xa6, 0x64, 0xc3, 0xda, 0x14, 0x44, 0x8e, 0xc0, 0xa5, 0xa6, + 0xf6, 0xe5, 0xf9, 0xdc, 0xbd, 0xb8, 0xe6, 0x96, 0x31, 0xea, 0x4b, 0x6d, 0xd2, 0xb4, 0x25, 0x5a, + 0x97, 0x91, 0xdc, 0x86, 0xe9, 0x56, 0xc0, 0x51, 0xce, 0xad, 0xca, 0x55, 0x77, 0x95, 0x3b, 0x9c, + 0xcd, 0x5d, 0xa4, 0xc6, 0x3f, 0x77, 0xef, 0xb3, 0x4a, 0xbd, 0x2b, 0x71, 0xf7, 0xd1, 0x46, 0x1a, + 0x97, 0x5e, 0x85, 0x5f, 0x1a, 0x28, 0x1c, 0x76, 0x86, 0x4c, 0x4e, 0xff, 0x29, 0xd0, 0x5b, 0xf2, + 0xde, 0xcc, 0xbc, 0xaf, 0x1e, 0xce, 0xad, 0x72, 0x78, 0x19, 0xa3, 0xb2, 0x84, 0x02, 0x58, 0xc0, + 0x7d, 0x29, 0xb8, 0x80, 0x9f, 0xc1, 0xcd, 0x93, 0xf0, 0x9b, 0x94, 0xe9, 0x79, 0xf7, 0x1a, 0xcf, + 0xd1, 0x77, 0xac, 0xe9, 0x74, 0x79, 0xca, 0xab, 0x12, 0xbf, 0x2c, 0x9d, 0x15, 0x63, 0x76, 0x5b, + 0x97, 0xfe, 0xea, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xa5, 0x7e, 0xa0, 0x12, 0xa6, 0x07, 0x00, + 0x00, } diff --git a/pkg/apiserver/apiserver.pb.gw.go b/pkg/apiserver/apiserver.pb.gw.go index ccc55188..29c2d11b 100644 --- a/pkg/apiserver/apiserver.pb.gw.go +++ b/pkg/apiserver/apiserver.pb.gw.go @@ -52,8 +52,12 @@ func request_WorkflowAPI_List_0(ctx context.Context, marshaler runtime.Marshaler } +var ( + filter_WorkflowAPI_Get_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +) + func request_WorkflowAPI_Get_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowAPIClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq WorkflowIdentifier + var protoReq types.ObjectMetadata var metadata runtime.ServerMetadata var ( @@ -74,13 +78,21 @@ func request_WorkflowAPI_Get_0(ctx context.Context, marshaler runtime.Marshaler, return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_WorkflowAPI_Get_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := client.Get(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } +var ( + filter_WorkflowAPI_Delete_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +) + func request_WorkflowAPI_Delete_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowAPIClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq WorkflowIdentifier + var protoReq types.ObjectMetadata var metadata runtime.ServerMetadata var ( @@ -101,6 +113,10 @@ func request_WorkflowAPI_Delete_0(ctx context.Context, marshaler runtime.Marshal return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_WorkflowAPI_Delete_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := client.Delete(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err @@ -162,8 +178,12 @@ func request_WorkflowInvocationAPI_InvokeSync_1(ctx context.Context, marshaler r } +var ( + filter_WorkflowInvocationAPI_Cancel_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +) + func request_WorkflowInvocationAPI_Cancel_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowInvocationAPIClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq WorkflowInvocationIdentifier + var protoReq types.ObjectMetadata var metadata runtime.ServerMetadata var ( @@ -184,6 +204,10 @@ func request_WorkflowInvocationAPI_Cancel_0(ctx context.Context, marshaler runti return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_WorkflowInvocationAPI_Cancel_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := client.Cancel(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err @@ -206,8 +230,12 @@ func request_WorkflowInvocationAPI_List_0(ctx context.Context, marshaler runtime } +var ( + filter_WorkflowInvocationAPI_Get_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +) + func request_WorkflowInvocationAPI_Get_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowInvocationAPIClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq WorkflowInvocationIdentifier + var protoReq types.ObjectMetadata var metadata runtime.ServerMetadata var ( @@ -228,6 +256,10 @@ func request_WorkflowInvocationAPI_Get_0(ctx context.Context, marshaler runtime. return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) } + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_WorkflowInvocationAPI_Get_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := client.Get(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err diff --git a/pkg/apiserver/apiserver.proto b/pkg/apiserver/apiserver.proto index 7b8f1d9a..0fa27dc2 100644 --- a/pkg/apiserver/apiserver.proto +++ b/pkg/apiserver/apiserver.proto @@ -10,27 +10,27 @@ import "google/api/annotations.proto"; service WorkflowAPI { - rpc Create (fission.workflows.types.WorkflowSpec) returns (WorkflowIdentifier) { + rpc Create (fission.workflows.types.WorkflowSpec) returns (fission.workflows.types.ObjectMetadata) { option (google.api.http) = { post: "/workflow" body: "*" }; } - rpc List (google.protobuf.Empty) returns (SearchWorkflowResponse) { + rpc List (google.protobuf.Empty) returns (WorkflowList) { option (google.api.http) = { get: "/workflow" }; } - rpc Get (WorkflowIdentifier) returns (fission.workflows.types.Workflow) { + rpc Get (fission.workflows.types.ObjectMetadata) returns (fission.workflows.types.Workflow) { option (google.api.http) = { get: "/workflow/{id}" }; } - rpc Delete (WorkflowIdentifier) returns (google.protobuf.Empty) { + rpc Delete (fission.workflows.types.ObjectMetadata) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/workflow/{id}" }; @@ -44,11 +44,7 @@ service WorkflowAPI { } } -message WorkflowIdentifier { - string id = 1; -} - -message SearchWorkflowResponse { +message WorkflowList { repeated string workflows = 1; } @@ -58,7 +54,7 @@ service WorkflowInvocationAPI { // Create a new workflow invocation // // In case the invocation specification is missing fields or contains invalid fields, a HTTP 400 is returned. - rpc Invoke (fission.workflows.types.WorkflowInvocationSpec) returns (WorkflowInvocationIdentifier) { + rpc Invoke (fission.workflows.types.WorkflowInvocationSpec) returns (fission.workflows.types.ObjectMetadata) { option (google.api.http) = { post: "/invocation" body: "*" @@ -80,7 +76,7 @@ service WorkflowInvocationAPI { // This action is irreverisble. A canceled invocation cannot be resumed or restarted. // In case that an invocation already is canceled, has failed or has completed, nothing happens. // In case that an invocation does not exist a HTTP 404 error status is returned. - rpc Cancel (WorkflowInvocationIdentifier) returns (google.protobuf.Empty) { + rpc Cancel (fission.workflows.types.ObjectMetadata) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/invocation/{id}" }; @@ -96,7 +92,7 @@ service WorkflowInvocationAPI { // // Get returns three different aspects of the workflow invocation, namely the spec (specification), status and logs. // To lighten the request load, consider using a more specific request. - rpc Get (WorkflowInvocationIdentifier) returns (fission.workflows.types.WorkflowInvocation) { + rpc Get (fission.workflows.types.ObjectMetadata) returns (fission.workflows.types.WorkflowInvocation) { option (google.api.http) = { get: "/invocation/{id}" }; @@ -114,10 +110,6 @@ message InvocationListQuery { repeated string workflows = 1; } -message WorkflowInvocationIdentifier { - string id = 1; -} - message WorkflowInvocationList { repeated string invocations = 1; } @@ -134,20 +126,6 @@ service AdminAPI { get: "/version" }; } - -// rpc Resume (google.protobuf.Empty) returns (google.protobuf.Empty) { -// option (google.api.http) = { -// get: "/resume" -// }; -// } -// -// rpc Halt (google.protobuf.Empty) returns (google.protobuf.Empty) { -// option (google.api.http) = { -// get: "/halt" -// }; -// } - - // TODO add config view / edit } message Health { diff --git a/pkg/fnenv/fission/envproxy.go b/pkg/apiserver/fission/envproxy.go similarity index 87% rename from pkg/fnenv/fission/envproxy.go rename to pkg/apiserver/fission/envproxy.go index d5dca6e3..f3765391 100644 --- a/pkg/fnenv/fission/envproxy.go +++ b/pkg/apiserver/fission/envproxy.go @@ -15,7 +15,9 @@ import ( "github.com/fission/fission" "github.com/fission/fission-workflows/pkg/apiserver" "github.com/fission/fission-workflows/pkg/types" + "github.com/fission/fission-workflows/pkg/types/typedvalues" "github.com/fission/fission-workflows/pkg/types/typedvalues/httpconv" + "github.com/fission/fission-workflows/pkg/util" "github.com/fission/fission/router" "github.com/golang/protobuf/jsonpb" "github.com/opentracing/opentracing-go" @@ -32,8 +34,8 @@ type Proxy struct { fissionIds syncmap.Map // map[string]bool } -// NewFissionProxyServer creates a proxy server to adheres to the Fission Environment specification. -func NewFissionProxyServer(wfiSrv apiserver.WorkflowInvocationAPIServer, wfSrv apiserver.WorkflowAPIServer) *Proxy { +// NewEnvironmentProxyServer creates a proxy server to adheres to the Fission Environment specification. +func NewEnvironmentProxyServer(wfiSrv apiserver.WorkflowInvocationAPIServer, wfSrv apiserver.WorkflowAPIServer) *Proxy { return &Proxy{ invocationServer: wfiSrv, workflowServer: wfSrv, @@ -54,6 +56,7 @@ func (fp *Proxy) handleHealthCheck(w http.ResponseWriter, r *http.Request) { func (fp *Proxy) handleRequest(w http.ResponseWriter, r *http.Request) { ctx := r.Context() + // Optional: Parse opentracing spanCtx, err := opentracing.GlobalTracer().Extract(opentracing.HTTPHeaders, opentracing.HTTPHeadersCarrier(r.Header)) @@ -81,8 +84,7 @@ func (fp *Proxy) handleRequest(w http.ResponseWriter, r *http.Request) { if !ok { // Fallback 1 : check if it is in the event store somewhere if fp.hasWorkflow(ctx, fnID) { - logrus.WithField("fnID", fnID). - Error("Unknown fission function name") + logrus.WithField("fnID", fnID).Error("Unknown fission function name") http.Error(w, "Unknown fission function name; not specialized", 400) return } @@ -102,6 +104,11 @@ func (fp *Proxy) handleRequest(w http.ResponseWriter, r *http.Request) { } // Temporary: in case of query header 'X-Async' being present, make request async + if logrus.GetLevel() == logrus.DebugLevel { + inputs, err := typedvalues.UnwrapMapTypedValue(wfSpec.Inputs) + util.LogIfError(err) + logrus.Debugf("Fission proxy request: %v - %v", wfSpec.WorkflowId, inputs) + } if len(r.Header.Get("X-Async")) > 0 { invocationID, invokeErr := fp.invocationServer.Invoke(ctx, wfSpec) if invokeErr != nil { @@ -129,17 +136,18 @@ func (fp *Proxy) handleRequest(w http.ResponseWriter, r *http.Request) { Message: "Unknown error", } } + + // Get output + httpconv.FormatResponse(w, wi.Status.Output, wi.Status.Error) + // Logging if !wi.Status.Successful() { logrus.Errorf("Invocation not successful, was '%v': %v", wi.Status.Status.String(), wi.Status.Error.Error()) } else if wi.Status.Output == nil { logrus.Infof("Invocation '%v' has no output.", fnID) } else { - logrus.Infof("Response Content-Type: %v", httpconv.DetermineContentType(wi.Status.Output)) + logrus.Infof("Response Content-Type: %v", w.Header().Get("Content-Type")) } - - // Get output - httpconv.FormatResponse(w, wi.Status.Output, wi.Status.Error) } func (fp *Proxy) handleSpecialize(w http.ResponseWriter, r *http.Request) { @@ -166,7 +174,7 @@ func (fp *Proxy) handleSpecialize(w http.ResponseWriter, r *http.Request) { } // Attempt to specialize with the provided function load request - wfIDs, err := fp.Specialize(ctx, flr) + wfIDs, err := fp.specialize(ctx, flr) if err != nil { logrus.Errorf("failed to specialize: %v", err) if os.IsNotExist(err) { @@ -183,11 +191,13 @@ func (fp *Proxy) handleSpecialize(w http.ResponseWriter, r *http.Request) { w.Write([]byte(strings.Join(wfIDs, ";"))) } -// Specialize creates workflows provided by a Fission Load Request. +// specialize creates workflows provided by a Fission Load Request. // // The Fission package can either exist out of a single workflow file, or out of a directory filled with // solely workflow definitions. -func (fp *Proxy) Specialize(ctx context.Context, flr *fission.FunctionLoadRequest) ([]string, error) { +// +// In the FunctionLoadRequest this function expects FilePath, and the objectmetadata, with the UID and Name, to be set. +func (fp *Proxy) specialize(ctx context.Context, flr *fission.FunctionLoadRequest) ([]string, error) { if flr == nil { return nil, errors.New("no function load request provided") } @@ -267,7 +277,7 @@ func (fp *Proxy) createWorkflowFromFile(ctx context.Context, flr *fission.Functi wfSpec := &types.WorkflowSpec{} err = jsonpb.Unmarshal(bytes.NewReader(raw), wfSpec) if err != nil { - return "", fmt.Errorf("failed to parse bytes into WorkflowSpec: %v", err) + return "", fmt.Errorf("failed to parse bytes to workflow specification: %v", err) } logrus.WithField("wfSpec", wfSpec).Info("Received valid WorkflowSpec from fetcher.") @@ -282,14 +292,14 @@ func (fp *Proxy) createWorkflowFromFile(ctx context.Context, flr *fission.Functi } wfID := resp.Id - // EvalCache the id so we don't have to check whether the workflow engine already has it. + // Cache the id so we don't have to check whether the workflow engine already has it. fp.fissionIds.Store(fissionID, true) return wfID, nil } func (fp *Proxy) hasWorkflow(ctx context.Context, fnID string) bool { - wf, err := fp.workflowServer.Get(ctx, &apiserver.WorkflowIdentifier{Id: fnID}) + wf, err := fp.workflowServer.Get(ctx, &types.ObjectMetadata{Id: fnID}) if err != nil { logrus.Errorf("Failed to get workflow: %v; assuming it is non-existent", err) } diff --git a/pkg/apiserver/fission/envproxy_test.go b/pkg/apiserver/fission/envproxy_test.go new file mode 100644 index 00000000..994f4794 --- /dev/null +++ b/pkg/apiserver/fission/envproxy_test.go @@ -0,0 +1,137 @@ +package fission + +import ( + "context" + "io/ioutil" + "testing" + + "github.com/fission/fission" + "github.com/fission/fission-workflows/pkg/apiserver" + "github.com/fission/fission-workflows/pkg/types" + "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/golang/protobuf/jsonpb" + "github.com/golang/protobuf/ptypes/empty" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "k8s.io/apimachinery/pkg/apis/meta/v1" + k8stypes "k8s.io/apimachinery/pkg/types" +) + +type mockInvocationServer struct { + mock.Mock +} + +func (m *mockInvocationServer) Invoke(ctx context.Context, spec *types.WorkflowInvocationSpec) (*types.ObjectMetadata, error) { + args := m.Called(spec) + id := spec.WorkflowId + if len(id) == 0 { + id = "randomUID" + } + return &types.ObjectMetadata{Id: args.String(0)}, args.Error(1) +} + +func (m *mockInvocationServer) InvokeSync(ctx context.Context, spec *types.WorkflowInvocationSpec) (*types. + WorkflowInvocation, error) { + args := m.Called(spec) + return args.Get(0).(*types.WorkflowInvocation), args.Error(1) +} + +func (m *mockInvocationServer) Cancel(ctx context.Context, id *types.ObjectMetadata) (*empty.Empty, error) { + args := m.Called(id) + return &empty.Empty{}, args.Error(1) +} + +func (m *mockInvocationServer) List(ctx context.Context, _ *apiserver.InvocationListQuery) (*apiserver.WorkflowInvocationList, error) { + args := m.Called() + return args.Get(0).(*apiserver.WorkflowInvocationList), args.Error(1) +} + +func (m *mockInvocationServer) Get(ctx context.Context, id *types.ObjectMetadata) (*types. + WorkflowInvocation, error) { + args := m.Called(id) + return args.Get(0).(*types.WorkflowInvocation), args.Error(1) +} + +func (m *mockInvocationServer) Validate(ctx context.Context, spec *types.WorkflowInvocationSpec) (*empty.Empty, error) { + args := m.Called(spec) + return &empty.Empty{}, args.Error(1) +} + +type mockWorkflowServer struct { + mock.Mock +} + +func (m *mockWorkflowServer) Create(ctx context.Context, spec *types.WorkflowSpec) (*types.ObjectMetadata, error) { + args := m.Called(spec) + return &types.ObjectMetadata{Id: args.String(0)}, args.Error(1) +} + +func (m mockWorkflowServer) List(ctx context.Context, _ *empty.Empty) (*apiserver.WorkflowList, error) { + args := m.Called() + return args.Get(0).(*apiserver.WorkflowList), args.Error(1) +} + +func (m mockWorkflowServer) Get(ctx context.Context, id *types.ObjectMetadata) (*types.Workflow, error) { + args := m.Called(id) + return args.Get(0).(*types.Workflow), args.Error(1) +} + +func (m mockWorkflowServer) Delete(ctx context.Context, id *types.ObjectMetadata) (*empty.Empty, error) { + args := m.Called(id) + return &empty.Empty{}, args.Error(1) +} + +func (m mockWorkflowServer) Validate(ctx context.Context, spec *types.WorkflowSpec) (*empty.Empty, error) { + args := m.Called(spec) + return &empty.Empty{}, args.Error(1) +} + +func TestProxy_Specialize(t *testing.T) { + workflowServer := &mockWorkflowServer{} + workflowServer.On("Create", mock.Anything).Return("mockID", nil) + env := NewEnvironmentProxyServer(nil, workflowServer) + wf := &types.WorkflowSpec{ + ApiVersion: types.WorkflowAPIVersion, + OutputTask: "fakeFinalTask", + Tasks: map[string]*types.TaskSpec{ + "fakeFinalTask": { + FunctionRef: "noop", + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{$.Tasks.FirstTask.Output}"), + }, + Requires: map[string]*types.TaskDependencyParameters{ + "FirstTask": {}, + }, + }, + "FirstTask": { + FunctionRef: "noop", + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{$.Invocation.Inputs.default.toUpperCase()}"), + "complex": typedvalues.MustWrap(map[string]interface{}{ + "nested": map[string]interface{}{ + "object": 42, + }, + }), + }, + }, + }, + } + + // Store workflow in a temporary file (akin to fetcher request) + fd, err := ioutil.TempFile("", "test-fission-workflows-envproxy") + assert.NoError(t, err) + err = (&jsonpb.Marshaler{}).Marshal(fd, wf) + assert.NoError(t, err) + fd.Close() + + wfIds, err := env.specialize(context.Background(), &fission.FunctionLoadRequest{ + FilePath: fd.Name(), + FunctionMetadata: &v1.ObjectMeta{ + UID: k8stypes.UID("1"), + Name: "testFn", + }, + }) + assert.NoError(t, err) + assert.Equal(t, 1, len(wfIds)) + mock.AssertExpectationsForObjects(t, workflowServer) +} diff --git a/pkg/apiserver/httpclient/invocation.go b/pkg/apiserver/httpclient/invocation.go index 0966c0ec..917ce6a2 100644 --- a/pkg/apiserver/httpclient/invocation.go +++ b/pkg/apiserver/httpclient/invocation.go @@ -21,9 +21,9 @@ func NewInvocationAPI(endpoint string, client http.Client) *InvocationAPI { } } -func (api *InvocationAPI) Invoke(ctx context.Context, spec *types.WorkflowInvocationSpec) (*apiserver. - WorkflowInvocationIdentifier, error) { - result := &apiserver.WorkflowInvocationIdentifier{} +func (api *InvocationAPI) Invoke(ctx context.Context, spec *types.WorkflowInvocationSpec) (*types.ObjectMetadata, + error) { + result := &types.ObjectMetadata{} err := callWithJSON(ctx, http.MethodPost, api.formatURL("/invocation"), spec, result) return result, err } diff --git a/pkg/apiserver/httpclient/workflow.go b/pkg/apiserver/httpclient/workflow.go index c47870fc..a9f97ea3 100644 --- a/pkg/apiserver/httpclient/workflow.go +++ b/pkg/apiserver/httpclient/workflow.go @@ -21,14 +21,14 @@ func NewWorkflowAPI(endpoint string, client http.Client) *WorkflowAPI { } } -func (api *WorkflowAPI) Create(ctx context.Context, spec *types.WorkflowSpec) (*apiserver.WorkflowIdentifier, error) { - result := &apiserver.WorkflowIdentifier{} +func (api *WorkflowAPI) Create(ctx context.Context, spec *types.WorkflowSpec) (*types.ObjectMetadata, error) { + result := &types.ObjectMetadata{} err := callWithJSON(ctx, http.MethodPost, api.formatURL("/workflow"), spec, result) return result, err } -func (api *WorkflowAPI) List(ctx context.Context) (*apiserver.SearchWorkflowResponse, error) { - result := &apiserver.SearchWorkflowResponse{} +func (api *WorkflowAPI) List(ctx context.Context) (*apiserver.WorkflowList, error) { + result := &apiserver.WorkflowList{} err := callWithJSON(ctx, http.MethodGet, api.formatURL("/workflow"), nil, result) return result, err } diff --git a/pkg/apiserver/invocation.go b/pkg/apiserver/invocation.go index 1039b1f5..0afb037f 100644 --- a/pkg/apiserver/invocation.go +++ b/pkg/apiserver/invocation.go @@ -36,13 +36,13 @@ func (gi *Invocation) Validate(ctx context.Context, spec *types.WorkflowInvocati return &empty.Empty{}, nil } -func (gi *Invocation) Invoke(ctx context.Context, spec *types.WorkflowInvocationSpec) (*WorkflowInvocationIdentifier, error) { +func (gi *Invocation) Invoke(ctx context.Context, spec *types.WorkflowInvocationSpec) (*types.ObjectMetadata, error) { eventID, err := gi.api.Invoke(spec, api.WithContext(ctx)) if err != nil { return nil, toErrorStatus(err) } - return &WorkflowInvocationIdentifier{eventID}, nil + return &types.ObjectMetadata{Id: eventID}, nil } func (gi *Invocation) InvokeSync(ctx context.Context, spec *types.WorkflowInvocationSpec) (*types.WorkflowInvocation, error) { @@ -53,8 +53,8 @@ func (gi *Invocation) InvokeSync(ctx context.Context, spec *types.WorkflowInvoca return wfi, nil } -func (gi *Invocation) Cancel(ctx context.Context, invocationID *WorkflowInvocationIdentifier) (*empty.Empty, error) { - err := gi.api.Cancel(invocationID.GetId()) +func (gi *Invocation) Cancel(ctx context.Context, objectMetadata *types.ObjectMetadata) (*empty.Empty, error) { + err := gi.api.Cancel(objectMetadata.GetId()) if err != nil { return nil, toErrorStatus(err) } @@ -62,8 +62,8 @@ func (gi *Invocation) Cancel(ctx context.Context, invocationID *WorkflowInvocati return &empty.Empty{}, nil } -func (gi *Invocation) Get(ctx context.Context, invocationID *WorkflowInvocationIdentifier) (*types.WorkflowInvocation, error) { - wi, err := gi.store.GetInvocation(invocationID.GetId()) +func (gi *Invocation) Get(ctx context.Context, objectMetadata *types.ObjectMetadata) (*types.WorkflowInvocation, error) { + wi, err := gi.store.GetInvocation(objectMetadata.GetId()) if err != nil { return nil, toErrorStatus(err) } diff --git a/pkg/apiserver/workflow.go b/pkg/apiserver/workflow.go index e213dc2a..f5a37abd 100644 --- a/pkg/apiserver/workflow.go +++ b/pkg/apiserver/workflow.go @@ -24,16 +24,16 @@ func NewWorkflow(api *api.Workflow, store *store.Workflows) *Workflow { return wf } -func (ga *Workflow) Create(ctx context.Context, spec *types.WorkflowSpec) (*WorkflowIdentifier, error) { +func (ga *Workflow) Create(ctx context.Context, spec *types.WorkflowSpec) (*types.ObjectMetadata, error) { id, err := ga.api.Create(spec, api.WithContext(ctx)) if err != nil { return nil, toErrorStatus(err) } - return &WorkflowIdentifier{id}, nil + return &types.ObjectMetadata{Id: id}, nil } -func (ga *Workflow) Get(ctx context.Context, workflowID *WorkflowIdentifier) (*types.Workflow, error) { +func (ga *Workflow) Get(ctx context.Context, workflowID *types.ObjectMetadata) (*types.Workflow, error) { wf, err := ga.store.GetWorkflow(workflowID.GetId()) if err != nil { return nil, toErrorStatus(err) @@ -41,7 +41,7 @@ func (ga *Workflow) Get(ctx context.Context, workflowID *WorkflowIdentifier) (*t return wf, nil } -func (ga *Workflow) Delete(ctx context.Context, workflowID *WorkflowIdentifier) (*empty.Empty, error) { +func (ga *Workflow) Delete(ctx context.Context, workflowID *types.ObjectMetadata) (*empty.Empty, error) { err := ga.api.Delete(workflowID.GetId()) if err != nil { return nil, toErrorStatus(err) @@ -49,13 +49,13 @@ func (ga *Workflow) Delete(ctx context.Context, workflowID *WorkflowIdentifier) return &empty.Empty{}, nil } -func (ga *Workflow) List(ctx context.Context, req *empty.Empty) (*SearchWorkflowResponse, error) { +func (ga *Workflow) List(ctx context.Context, req *empty.Empty) (*WorkflowList, error) { var results []string wfs := ga.store.List() for _, result := range wfs { results = append(results, result.Id) } - return &SearchWorkflowResponse{results}, nil + return &WorkflowList{results}, nil } func (ga *Workflow) Validate(ctx context.Context, spec *types.WorkflowSpec) (*empty.Empty, error) { diff --git a/pkg/controller/expr/expr.go b/pkg/controller/expr/expr.go index b46d85b0..26415839 100644 --- a/pkg/controller/expr/expr.go +++ b/pkg/controller/expr/expr.go @@ -6,7 +6,6 @@ import ( "time" "github.com/fatih/structs" - "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" "github.com/fission/fission-workflows/pkg/util" "github.com/robertkrimen/otto" @@ -27,13 +26,13 @@ var ( DefaultResolver = NewJavascriptExpressionParser() ) -func Resolve(rootScope interface{}, currentTask string, expr *types.TypedValue) (*types.TypedValue, error) { +func Resolve(rootScope interface{}, currentTask string, expr *typedvalues.TypedValue) (*typedvalues.TypedValue, error) { return DefaultResolver.Resolve(rootScope, currentTask, expr) } // resolver resolves an expression within a given context/scope. type Resolver interface { - Resolve(rootScope interface{}, currentTask string, expr *types.TypedValue) (*types.TypedValue, error) + Resolve(rootScope interface{}, currentTask string, expr *typedvalues.TypedValue) (*typedvalues.TypedValue, error) } // Function is an interface for providing functions that are able to be injected into the Otto runtime. @@ -55,9 +54,9 @@ func NewJavascriptExpressionParser() *JavascriptExpressionParser { } func (oe *JavascriptExpressionParser) Resolve(rootScope interface{}, currentTask string, - expr *types.TypedValue) (*types.TypedValue, error) { + expr *typedvalues.TypedValue) (*typedvalues.TypedValue, error) { - switch typedvalues.ValueType(expr.GetType()) { + switch expr.ValueType() { case typedvalues.TypeList: return oe.resolveList(rootScope, currentTask, expr) case typedvalues.TypeMap: @@ -70,9 +69,9 @@ func (oe *JavascriptExpressionParser) Resolve(rootScope interface{}, currentTask } func (oe *JavascriptExpressionParser) resolveExpr(rootScope interface{}, currentTask string, - expr *types.TypedValue) (*types.TypedValue, error) { + expr *typedvalues.TypedValue) (*typedvalues.TypedValue, error) { - if !typedvalues.IsType(expr, typedvalues.TypeExpression) { + if expr.ValueType() != typedvalues.TypeExpression { return nil, errors.New("expected expression to resolve") } @@ -107,7 +106,7 @@ func (oe *JavascriptExpressionParser) resolveExpr(rootScope interface{}, current } }() - e, err := typedvalues.FormatExpression(expr) + e, err := typedvalues.UnwrapExpression(expr) if err != nil { return nil, fmt.Errorf("failed to format expression for resolving (%v)", err) } @@ -126,23 +125,23 @@ func (oe *JavascriptExpressionParser) resolveExpr(rootScope interface{}, current i = mp } - result, err := typedvalues.Parse(i) + result, err := typedvalues.Wrap(i) if err != nil { return nil, err } - result.SetLabel("src", e) + result.SetMetadata("src", e) return result, nil } func (oe *JavascriptExpressionParser) resolveMap(rootScope interface{}, currentTask string, - expr *types.TypedValue) (*types.TypedValue, error) { + expr *typedvalues.TypedValue) (*typedvalues.TypedValue, error) { - if !typedvalues.IsType(expr, typedvalues.TypeMap) { + if expr.ValueType() != typedvalues.TypeMap { return nil, errors.New("expected map to resolve") } logrus.WithField("expr", expr).Debug("Resolving map") - i, err := typedvalues.Format(expr) + i, err := typedvalues.Unwrap(expr) if err != nil { return nil, err } @@ -150,7 +149,7 @@ func (oe *JavascriptExpressionParser) resolveMap(rootScope interface{}, currentT result := map[string]interface{}{} obj := i.(map[string]interface{}) for k, v := range obj { // TODO add priority here - field, err := typedvalues.Parse(v) + field, err := typedvalues.Wrap(v) if err != nil { return nil, err } @@ -160,24 +159,24 @@ func (oe *JavascriptExpressionParser) resolveMap(rootScope interface{}, currentT return nil, err } - actualVal, err := typedvalues.Format(resolved) + actualVal, err := typedvalues.Unwrap(resolved) if err != nil { return nil, err } result[k] = actualVal } - return typedvalues.Parse(result) + return typedvalues.Wrap(result) } func (oe *JavascriptExpressionParser) resolveList(rootScope interface{}, currentTask string, - expr *types.TypedValue) (*types.TypedValue, error) { + expr *typedvalues.TypedValue) (*typedvalues.TypedValue, error) { - if !typedvalues.IsType(expr, typedvalues.TypeList) { + if expr.ValueType() != typedvalues.TypeList { return nil, errors.New("expected list to resolve") } logrus.WithField("expr", expr).Debug("Resolving list") - i, err := typedvalues.Format(expr) + i, err := typedvalues.Unwrap(expr) if err != nil { return nil, err } @@ -185,7 +184,7 @@ func (oe *JavascriptExpressionParser) resolveList(rootScope interface{}, current result := []interface{}{} obj := i.([]interface{}) for _, v := range obj { // TODO add priority here - field, err := typedvalues.Parse(v) + field, err := typedvalues.Wrap(v) if err != nil { return nil, err } @@ -195,13 +194,13 @@ func (oe *JavascriptExpressionParser) resolveList(rootScope interface{}, current return nil, err } - actualVal, err := typedvalues.Format(resolved) + actualVal, err := typedvalues.Unwrap(resolved) if err != nil { return nil, err } result = append(result, actualVal) } - return typedvalues.Parse(result) + return typedvalues.Wrap(result) } func injectFunctions(vm *otto.Otto, fns map[string]Function) { diff --git a/pkg/controller/expr/expr_test.go b/pkg/controller/expr/expr_test.go index e4024de4..39c894df 100644 --- a/pkg/controller/expr/expr_test.go +++ b/pkg/controller/expr/expr_test.go @@ -5,9 +5,7 @@ import ( "strings" "testing" - "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" - "github.com/golang/protobuf/ptypes" "github.com/stretchr/testify/assert" ) @@ -28,7 +26,7 @@ func TestResolveTestRootScopePath(t *testing.T) { t.Error(err) } - resolvedString, err := typedvalues.Format(resolved) + resolvedString, err := typedvalues.Unwrap(resolved) if err != nil { t.Error(err) } @@ -44,7 +42,7 @@ func TestResolveTestScopePath(t *testing.T) { resolved, err := exprParser.Resolve(rootScope, currentTask, mustParseExpr("{"+varCurrentTask+"}")) assert.NoError(t, err) - resolvedString, err := typedvalues.Format(resolved) + resolvedString, err := typedvalues.Unwrap(resolved) assert.NoError(t, err) assert.Equal(t, currentTask, resolvedString) @@ -58,7 +56,7 @@ func TestResolveLiteral(t *testing.T) { resolved, err := exprParser.Resolve(rootScope, "output", mustParseExpr(fmt.Sprintf("{'%s'}", expected))) assert.NoError(t, err) - resolvedString, _ := typedvalues.Format(resolved) + resolvedString, _ := typedvalues.Unwrap(resolved) assert.Equal(t, expected, resolvedString) } @@ -71,7 +69,7 @@ func TestResolveTransformation(t *testing.T) { resolved, err := exprParser.Resolve(rootScope, "", mustParseExpr(fmt.Sprintf("{'%s'.toUpperCase()}", src))) assert.NoError(t, err) - resolvedString, _ := typedvalues.Format(resolved) + resolvedString, _ := typedvalues.Unwrap(resolved) assert.Equal(t, expected, resolvedString) } @@ -82,75 +80,15 @@ func TestResolveInjectedFunction(t *testing.T) { resolved, err := exprParser.Resolve(rootScope, "", mustParseExpr("{uid()}")) assert.NoError(t, err) - resolvedString, _ := typedvalues.Format(resolved) + resolvedString, _ := typedvalues.Unwrap(resolved) assert.NotEmpty(t, resolvedString) } -func TestScope(t *testing.T) { - expected := "hello world" - expectedOutput, _ := typedvalues.Parse(expected) - - actualScope, _ := NewScope(&types.Workflow{ - Metadata: &types.ObjectMetadata{ - Id: "testWorkflow", - CreatedAt: ptypes.TimestampNow(), - }, - Status: &types.WorkflowStatus{ - Status: types.WorkflowStatus_READY, - UpdatedAt: ptypes.TimestampNow(), - Tasks: map[string]*types.TaskStatus{ - "fooTask": { - FnRef: &types.FnRef{ - Runtime: "fission", - ID: "resolvedFissionFunction", - }, - }, - }, - }, - Spec: &types.WorkflowSpec{ - ApiVersion: "1", - OutputTask: "fooTask", - Tasks: map[string]*types.TaskSpec{ - "fooTask": { - FunctionRef: "fissionFunction", - }, - }, - }, - }, &types.WorkflowInvocation{ - Metadata: &types.ObjectMetadata{ - Id: "testWorkflowInvocation", - CreatedAt: ptypes.TimestampNow(), - }, - Spec: &types.WorkflowInvocationSpec{ - WorkflowId: "testWorkflow", - }, - Status: &types.WorkflowInvocationStatus{ - Status: types.WorkflowInvocationStatus_IN_PROGRESS, - Tasks: map[string]*types.TaskInvocation{ - "fooTask": { - Spec: &types.TaskInvocationSpec{}, - Status: &types.TaskInvocationStatus{ - Output: expectedOutput, - }, - }, - }, - }, - }) - - exprParser := NewJavascriptExpressionParser() - - resolved, err := exprParser.Resolve(actualScope, "fooTask", mustParseExpr("{$.Tasks.fooTask.Output}")) - assert.NoError(t, err) - - resolvedString, _ := typedvalues.Format(resolved) - assert.Equal(t, expected, resolvedString) -} - -func mustParseExpr(s string) *types.TypedValue { - tv := typedvalues.MustParse(s) - if !typedvalues.IsType(tv, typedvalues.TypeExpression) { - panic(fmt.Sprintf("Should be an expression, but was '%v'", tv.Type)) +func mustParseExpr(s string) *typedvalues.TypedValue { + tv := typedvalues.MustWrap(s) + if tv.ValueType() != typedvalues.TypeExpression { + panic(fmt.Sprintf("Should be %v, but was '%v'", typedvalues.TypeExpression, tv.ValueType())) } return tv diff --git a/pkg/controller/expr/functions_test.go b/pkg/controller/expr/functions_test.go index f304f493..2d1b329e 100644 --- a/pkg/controller/expr/functions_test.go +++ b/pkg/controller/expr/functions_test.go @@ -11,7 +11,7 @@ import ( ) func makeTestScope() *Scope { - scope, _ := NewScope(&types.Workflow{ + scope, _ := NewScope(nil, &types.Workflow{ Metadata: &types.ObjectMetadata{ Id: "testWorkflow", CreatedAt: ptypes.TimestampNow(), @@ -34,9 +34,9 @@ func makeTestScope() *Scope { Tasks: map[string]*types.TaskSpec{ "TaskA": { FunctionRef: "fissionFunction", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("input-default"), - "otherInput": typedvalues.MustParse("input-otherInput"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("input-default"), + "otherInput": typedvalues.MustWrap("input-otherInput"), }, }, }, @@ -48,9 +48,9 @@ func makeTestScope() *Scope { }, Spec: &types.WorkflowInvocationSpec{ WorkflowId: "testWorkflow", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("body"), - "headers": typedvalues.MustParse("http-headers"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("body"), + "headers": typedvalues.MustWrap("http-headers"), }, }, Status: &types.WorkflowInvocationStatus{ @@ -59,7 +59,7 @@ func makeTestScope() *Scope { "TaskA": { Spec: &types.TaskInvocationSpec{}, Status: &types.TaskInvocationStatus{ - Output: typedvalues.MustParse("some output"), + Output: typedvalues.MustWrap("some output"), }, }, }, @@ -75,7 +75,7 @@ func TestOutputFn_Apply_OneArgument(t *testing.T) { result, err := parser.Resolve(testScope, "", mustParseExpr("{ output('TaskA') }")) assert.NoError(t, err) - i := typedvalues.MustFormat(result) + i := typedvalues.MustUnwrap(result) assert.Equal(t, testScope.Tasks["TaskA"].Output, i) } @@ -87,7 +87,7 @@ func TestOutputFn_Apply_NoArgument(t *testing.T) { result, err := parser.Resolve(testScope, "TaskA", mustParseExpr("{ output() }")) assert.NoError(t, err) - i := typedvalues.MustFormat(result) + i := typedvalues.MustUnwrap(result) assert.Equal(t, testScope.Tasks["TaskA"].Output, i) } @@ -99,7 +99,7 @@ func TestInputFn_Apply_NoArgument(t *testing.T) { result, err := parser.Resolve(testScope, "TaskA", mustParseExpr("{ input() }")) assert.NoError(t, err) - i := typedvalues.MustFormat(result) + i := typedvalues.MustUnwrap(result) assert.Equal(t, "input-default", i) } @@ -111,7 +111,7 @@ func TestInputFn_Apply_OneArgument(t *testing.T) { result, err := parser.Resolve(testScope, "", mustParseExpr("{ input('TaskA') }")) assert.NoError(t, err) - i := typedvalues.MustFormat(result) + i := typedvalues.MustUnwrap(result) assert.Equal(t, "input-default", i) } @@ -123,7 +123,7 @@ func TestInputFn_Apply_TwoArguments(t *testing.T) { result, err := parser.Resolve(testScope, "", mustParseExpr("{ input('TaskA', 'otherInput') }")) assert.NoError(t, err) - i := typedvalues.MustFormat(result) + i := typedvalues.MustUnwrap(result) assert.Equal(t, "input-otherInput", i) } @@ -133,7 +133,7 @@ func TestParamFn_Apply_NoArgument(t *testing.T) { testScope := makeTestScope() result, err := parser.Resolve(testScope, "", mustParseExpr("{ param() }")) assert.NoError(t, err) - assert.Equal(t, "body", typedvalues.MustFormat(result)) + assert.Equal(t, "body", typedvalues.MustUnwrap(result)) } func TestParamFn_Apply_OneArgument(t *testing.T) { @@ -141,7 +141,7 @@ func TestParamFn_Apply_OneArgument(t *testing.T) { testScope := makeTestScope() result, err := parser.Resolve(testScope, "", mustParseExpr("{ param('headers') }")) assert.NoError(t, err) - assert.Equal(t, "http-headers", typedvalues.MustFormat(result)) + assert.Equal(t, "http-headers", typedvalues.MustUnwrap(result)) } func TestUidFn_Apply(t *testing.T) { @@ -149,7 +149,7 @@ func TestUidFn_Apply(t *testing.T) { testScope := makeTestScope() result, err := parser.Resolve(testScope, "", mustParseExpr("{ uid() }")) assert.NoError(t, err) - assert.NotEmpty(t, typedvalues.MustFormat(result)) + assert.NotEmpty(t, typedvalues.MustUnwrap(result)) } func TestTaskFn_Apply_OneArgument(t *testing.T) { @@ -158,7 +158,7 @@ func TestTaskFn_Apply_OneArgument(t *testing.T) { testScope := makeTestScope() result, err := parser.Resolve(testScope, "", mustParseExpr("{ task('TaskA') }")) assert.NoError(t, err) - i := typedvalues.MustFormat(result) + i := typedvalues.MustUnwrap(result) assert.Equal(t, util.MustConvertStructsToMap(testScope.Tasks["TaskA"]), i) } @@ -170,7 +170,7 @@ func TestTaskFn_Apply_NoArgument(t *testing.T) { result, err := parser.Resolve(testScope, "TaskA", mustParseExpr("{ task() }")) assert.NoError(t, err) - i := typedvalues.MustFormat(result) + i := typedvalues.MustUnwrap(result) assert.Equal(t, util.MustConvertStructsToMap(testScope.Tasks["TaskA"]), i) } diff --git a/pkg/controller/expr/scope.go b/pkg/controller/expr/scope.go index 64beebb3..6e8874d7 100644 --- a/pkg/controller/expr/scope.go +++ b/pkg/controller/expr/scope.go @@ -1,25 +1,53 @@ package expr import ( + "fmt" + "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" + "github.com/golang/protobuf/proto" "github.com/golang/protobuf/ptypes" "github.com/golang/protobuf/ptypes/timestamp" + "github.com/imdario/mergo" "github.com/pkg/errors" ) +var ( + ErrMergeTypeMismatch = errors.New("cannot merge incompatible types") +) + +type DeepCopier interface { + DeepCopy() DeepCopier +} + // Scope is the broadest view of the workflow invocation, which can be queried by the user. type Scope struct { Workflow *WorkflowScope Invocation *InvocationScope - Tasks map[string]*TaskScope + Tasks Tasks +} + +func (s *Scope) DeepCopy() DeepCopier { + if s == nil { + return nil + } + return &Scope{ + Workflow: s.Workflow.DeepCopy().(*WorkflowScope), + Invocation: s.Invocation.DeepCopy().(*InvocationScope), + Tasks: s.Tasks.DeepCopy().(Tasks), + } } +type Tasks map[string]*TaskScope + // WorkflowScope provides information about the workflow definition. type WorkflowScope struct { *ObjectMetadata UpdatedAt int64 // unix timestamp Status string // workflow status + Name string + Internal bool } // InvocationScope object provides information about the current invocation. @@ -42,50 +70,140 @@ type TaskScope struct { Inputs map[string]interface{} Requires map[string]*types.TaskDependencyParameters Output interface{} - Function *types.FnRef + Function string +} + +func (s Tasks) DeepCopy() DeepCopier { + if s == nil { + return nil + } + copied := make(Tasks, len(s)) + for k, v := range s { + copied[k] = v.DeepCopy().(*TaskScope) + } + return copied +} + +func (s *WorkflowScope) DeepCopy() DeepCopier { + if s == nil { + return nil + } + return &WorkflowScope{ + ObjectMetadata: s.ObjectMetadata.DeepCopy().(*ObjectMetadata), + UpdatedAt: s.UpdatedAt, + Status: s.Status, + Name: s.Name, + Internal: s.Internal, + } +} + +func (s *InvocationScope) DeepCopy() DeepCopier { + if s == nil { + return nil + } + return &InvocationScope{ + ObjectMetadata: s.ObjectMetadata.DeepCopy().(*ObjectMetadata), + Inputs: DeepCopy(s.Inputs).(map[string]interface{}), + } +} + +func (s *ObjectMetadata) DeepCopy() DeepCopier { + if s == nil { + return nil + } + return &ObjectMetadata{ + Id: s.Id, + CreatedAt: s.CreatedAt, + } +} + +func (s *TaskScope) DeepCopy() DeepCopier { + if s == nil { + return nil + } + var requires map[string]*types.TaskDependencyParameters + if s.Requires != nil { + requires = make(map[string]*types.TaskDependencyParameters, len(s.Requires)) + for k, v := range s.Requires { + requires[k] = DeepCopy(v).(*types.TaskDependencyParameters) + } + } + + return &TaskScope{ + ObjectMetadata: s.ObjectMetadata.DeepCopy().(*ObjectMetadata), + Status: s.Status, + UpdatedAt: s.UpdatedAt, + Inputs: DeepCopy(s.Inputs).(map[string]interface{}), + Requires: requires, + Output: DeepCopy(s.Output), + Function: s.Function, + } } // NewScope creates a new scope given the workflow invocation and its associates workflow definition. -func NewScope(wf *types.Workflow, wfi *types.WorkflowInvocation) (*Scope, error) { +func NewScope(base *Scope, wf *types.Workflow, wfi *types.WorkflowInvocation) (*Scope, error) { + updated := &Scope{} + if wf != nil { + updated.Workflow = formatWorkflow(wf) + } + if wfi != nil { + invocationParams, err := typedvalues.UnwrapMapTypedValue(wfi.Spec.Inputs) + if err != nil { + return nil, errors.Wrap(err, "failed to format invocation inputs") + } + updated.Invocation = &InvocationScope{ + ObjectMetadata: formatMetadata(wfi.Metadata), + Inputs: invocationParams, + } + } - tasks := map[string]*TaskScope{} for taskId, task := range types.GetTasks(wf, wfi) { + if updated.Tasks == nil { + updated.Tasks = map[string]*TaskScope{} + } + // Dep: pipe output of dynamic tasks - t := typedvalues.ResolveTaskOutput(taskId, wfi) - output, err := typedvalues.Format(t) + t := controlflow.ResolveTaskOutput(taskId, wfi) + output, err := typedvalues.Unwrap(t) if err != nil { panic(err) } - inputs, err := typedvalues.FormatTypedValueMap(typedvalues.DefaultParserFormatter, task.Spec.Inputs) + inputs, err := typedvalues.UnwrapMapTypedValue(task.Spec.Inputs) if err != nil { return nil, errors.Wrapf(err, "failed to format inputs of task %v", taskId) } - tasks[taskId] = &TaskScope{ + updated.Tasks[taskId] = &TaskScope{ ObjectMetadata: formatMetadata(task.Metadata), Status: task.Status.Status.String(), UpdatedAt: formatTimestamp(task.Status.UpdatedAt), Inputs: inputs, Requires: task.Spec.Requires, Output: output, + Function: task.Spec.FunctionRef, } } - invocInputs, err := typedvalues.FormatTypedValueMap(typedvalues.DefaultParserFormatter, wfi.Spec.Inputs) + if base == nil { + return updated, nil + } + if base.Tasks == nil { + base.Tasks = map[string]*TaskScope{} + } + err := mergo.Merge(updated, base) if err != nil { - return nil, errors.Wrap(err, "failed to format invocation inputs") + return nil, err + } + return updated, nil +} + +func formatWorkflow(wf *types.Workflow) *WorkflowScope { + return &WorkflowScope{ + ObjectMetadata: formatMetadata(wf.Metadata), + UpdatedAt: formatTimestamp(wf.Status.UpdatedAt), + Status: wf.Status.Status.String(), + Name: wf.GetMetadata().GetName(), + Internal: wf.GetSpec().GetInternal(), } - return &Scope{ - Workflow: &WorkflowScope{ - ObjectMetadata: formatMetadata(wf.Metadata), - UpdatedAt: formatTimestamp(wf.Status.UpdatedAt), - Status: wf.Status.Status.String(), - }, - Invocation: &InvocationScope{ - ObjectMetadata: formatMetadata(wfi.Metadata), - Inputs: invocInputs, - }, - Tasks: tasks, - }, nil } func formatMetadata(meta *types.ObjectMetadata) *ObjectMetadata { @@ -102,3 +220,33 @@ func formatTimestamp(pts *timestamp.Timestamp) int64 { ts, _ := ptypes.Timestamp(pts) return ts.UnixNano() } + +func DeepCopy(i interface{}) interface{} { + if i == nil { + return i + } + switch t := i.(type) { + // TODO support any function as primitive (use reflection API) + case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, float32, float64, uintptr, + complex64, complex128, string, bool: + return t + case DeepCopier: + return t.DeepCopy() + case map[string]interface{}: // TODO support any map + copied := make(map[string]interface{}, len(t)) + for k, v := range t { + copied[k] = DeepCopy(v) + } + return copied + case []interface{}: // TODO support any array + copied := make([]interface{}, len(t)) + for k, v := range t { + copied[k] = DeepCopy(v) + } + return copied + case proto.Message: + return proto.Clone(t) + default: + panic(fmt.Sprintf("cannot deepcopy unknown type %T", t)) + } +} diff --git a/pkg/controller/expr/scope_test.go b/pkg/controller/expr/scope_test.go new file mode 100644 index 00000000..9efa5b2e --- /dev/null +++ b/pkg/controller/expr/scope_test.go @@ -0,0 +1,153 @@ +package expr + +import ( + "testing" + + "github.com/fission/fission-workflows/pkg/types" + "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/golang/protobuf/ptypes" + "github.com/stretchr/testify/assert" +) + +func TestScopeExpr(t *testing.T) { + expected := "hello world" + expectedOutput, _ := typedvalues.Wrap(expected) + + actualScope, err := NewScope(nil, &types.Workflow{ + Metadata: &types.ObjectMetadata{ + Id: "testWorkflow", + CreatedAt: ptypes.TimestampNow(), + }, + Status: &types.WorkflowStatus{ + Status: types.WorkflowStatus_PENDING, + UpdatedAt: ptypes.TimestampNow(), + Tasks: map[string]*types.TaskStatus{ + "fooTask": { + FnRef: &types.FnRef{ + Runtime: "fission", + ID: "resolvedFissionFunction", + }, + }, + }, + }, + Spec: &types.WorkflowSpec{ + ApiVersion: "1", + OutputTask: "fooTask", + Tasks: map[string]*types.TaskSpec{ + "fooTask": { + FunctionRef: "fissionFunction", + }, + }, + }, + }, &types.WorkflowInvocation{ + Metadata: &types.ObjectMetadata{ + Id: "testWorkflowInvocation", + CreatedAt: ptypes.TimestampNow(), + }, + Spec: &types.WorkflowInvocationSpec{ + WorkflowId: "testWorkflow", + }, + Status: &types.WorkflowInvocationStatus{ + Status: types.WorkflowInvocationStatus_IN_PROGRESS, + Tasks: map[string]*types.TaskInvocation{ + "fooTask": { + Spec: &types.TaskInvocationSpec{}, + Status: &types.TaskInvocationStatus{ + Output: expectedOutput, + }, + }, + }, + }, + }) + assert.NoError(t, err) + + exprParser := NewJavascriptExpressionParser() + + resolved, err := exprParser.Resolve(actualScope, "fooTask", mustParseExpr("{$.Tasks.fooTask.Output}")) + assert.NoError(t, err) + + resolvedString, _ := typedvalues.Unwrap(resolved) + assert.Equal(t, expected, resolvedString) +} + +func TestScopeOverride(t *testing.T) { + expected := "hello world" + expectedOutput, _ := typedvalues.Wrap(expected) + + scope1, err := NewScope(nil, &types.Workflow{ + Metadata: &types.ObjectMetadata{ + Id: "testWorkflow", + CreatedAt: ptypes.TimestampNow(), + }, + Status: &types.WorkflowStatus{ + Status: types.WorkflowStatus_PENDING, + UpdatedAt: ptypes.TimestampNow(), + Tasks: map[string]*types.TaskStatus{ + "fooTask": { + FnRef: &types.FnRef{ + Runtime: "fission", + ID: "resolvedFissionFunction", + }, + }, + }, + }, + Spec: &types.WorkflowSpec{ + ApiVersion: "1", + OutputTask: "fooTask", + Tasks: map[string]*types.TaskSpec{ + "fooTask": { + FunctionRef: "fissionFunction", + }, + }, + }, + }, &types.WorkflowInvocation{ + Metadata: &types.ObjectMetadata{ + Id: "testWorkflowInvocation", + CreatedAt: ptypes.TimestampNow(), + }, + Spec: &types.WorkflowInvocationSpec{ + WorkflowId: "testWorkflow", + }, + Status: &types.WorkflowInvocationStatus{ + Status: types.WorkflowInvocationStatus_IN_PROGRESS, + Tasks: map[string]*types.TaskInvocation{ + "fooTask": { + Spec: &types.TaskInvocationSpec{}, + Status: &types.TaskInvocationStatus{ + Output: expectedOutput, + }, + }, + }, + }, + }) + assert.NoError(t, err) + + // Test overriding with nil values + scope2, err := NewScope(scope1, nil, nil) + assert.NoError(t, err) + assert.Equal(t, scope1, scope2) + assert.False(t, scope1 == scope2) + + // Test with overriding workflow + scope3, err := NewScope(scope1, &types.Workflow{ + Status: &types.WorkflowStatus{ + Status: types.WorkflowStatus_READY, + }, + }, nil) + assert.NoError(t, err) + assert.NotEqual(t, scope2, scope3) + assert.Equal(t, scope2.Invocation, scope3.Invocation) + assert.Equal(t, scope2.Tasks, scope3.Tasks) + + // Test with overriding invocation + scope4, err := NewScope(scope1, nil, &types.WorkflowInvocation{ + Spec: &types.WorkflowInvocationSpec{ + Inputs: typedvalues.MustWrapMapTypedValue(map[string]interface{}{ + "foo": "bar", + }), + }, + }) + assert.NoError(t, err) + assert.NotEqual(t, scope2, scope4) + assert.Equal(t, scope2.Workflow, scope4.Workflow) +} diff --git a/pkg/controller/expr/store.go b/pkg/controller/expr/store.go index 64c826a4..02981d8e 100644 --- a/pkg/controller/expr/store.go +++ b/pkg/controller/expr/store.go @@ -7,18 +7,16 @@ import ( // TODO Keep old states (but prune if OOM) // TODO provide garbage collector type Store struct { - entries sync.Map // map[string]interface{} - resolver Resolver + entries sync.Map // map[string]interface{} } func NewStore() *Store { return &Store{ - entries: sync.Map{}, - resolver: DefaultResolver, + entries: sync.Map{}, } } -func (rs *Store) Set(id string, data interface{}) { +func (rs *Store) Set(id string, data *Scope) { rs.entries.Store(id, data) } @@ -26,11 +24,16 @@ func (rs *Store) Delete(id string) { rs.entries.Delete(id) } -func (rs *Store) Get(id string) (interface{}, bool) { - return rs.entries.Load(id) +func (rs *Store) Get(id string) (*Scope, bool) { + i, ok := rs.entries.Load(id) + if !ok { + return nil, ok + } + scope, ok := i.(*Scope) + return scope, ok } -func (rs *Store) Update(id string, updater func(entry interface{}) interface{}) { +func (rs *Store) Update(id string, updater func(entry *Scope) *Scope) { entry, ok := rs.Get(id) if ok { rs.Set(id, updater(entry)) @@ -39,8 +42,8 @@ func (rs *Store) Update(id string, updater func(entry interface{}) interface{}) // Range calls f sequentially for each key and value present in the map. // If f returns false, range stops the iteration. -func (rs *Store) Range(fn func(key string, value interface{}) bool) { +func (rs *Store) Range(fn func(key string, value *Scope) bool) { rs.entries.Range(func(key, value interface{}) bool { - return fn(key.(string), value) + return fn(key.(string), value.(*Scope)) }) } diff --git a/pkg/controller/invocation/actions.go b/pkg/controller/invocation/actions.go index a31e1798..0adc00f4 100644 --- a/pkg/controller/invocation/actions.go +++ b/pkg/controller/invocation/actions.go @@ -12,7 +12,6 @@ import ( "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" "github.com/fission/fission-workflows/pkg/util" - "github.com/imdario/mergo" "github.com/opentracing/opentracing-go" "github.com/pkg/errors" "github.com/sirupsen/logrus" @@ -121,7 +120,7 @@ func (a *ActionInvokeTask) Apply() error { } log.Infof("Executing function: %v", spec.GetFnRef().Format()) if logrus.GetLevel() == logrus.DebugLevel { - i, err := typedvalues.FormatTypedValueMap(typedvalues.DefaultParserFormatter, spec.GetInputs()) + i, err := typedvalues.UnwrapMapTypedValue(spec.GetInputs()) if err != nil { log.Errorf("Failed to format inputs for debugging: %v", err) } else { @@ -143,7 +142,7 @@ func (a *ActionInvokeTask) postTransformer(ti *types.TaskInvocation) error { if ti.GetStatus().Successful() { output := task.GetSpec().GetOutput() if output != nil { - if output.GetType() == typedvalues.TypeExpression { + if output.ValueType() == typedvalues.TypeExpression { tv, err := a.resolveOutput(ti, output) if err != nil { return err @@ -156,29 +155,26 @@ func (a *ActionInvokeTask) postTransformer(ti *types.TaskInvocation) error { return nil } -func (a *ActionInvokeTask) resolveOutput(ti *types.TaskInvocation, outputExpr *types.TypedValue) (*types.TypedValue, error) { - log := a.logger() +func (a *ActionInvokeTask) resolveOutput(ti *types.TaskInvocation, outputExpr *typedvalues.TypedValue) (*typedvalues.TypedValue, error) { + // Inherit scope if invocation has a parent + var parentScope *expr.Scope + if len(a.Wfi.Spec.ParentId) != 0 { + var ok bool + parentScope, ok = a.StateStore.Get(a.Wfi.Spec.ParentId) + if !ok { + a.logger().Warn("Could not find parent scope (%s) of scope (%s)", a.Wfi.Spec.ParentId, a.Wfi.ID()) + } + } // Setup the scope for the expressions - scope, err := expr.NewScope(a.Wf, a.Wfi) + scope, err := expr.NewScope(parentScope, a.Wf, a.Wfi) if err != nil { return nil, errors.Wrapf(err, "failed to create scope for task '%v'", a.Task.Id) } a.StateStore.Set(a.Wfi.ID(), scope) - // Inherit scope if this invocation is part of a dynamic invocation - if len(a.Wfi.Spec.ParentId) != 0 { - parentScope, ok := a.StateStore.Get(a.Wfi.Spec.ParentId) - if ok { - err := mergo.Merge(scope, parentScope) - if err != nil { - log.Errorf("Failed to inherit parent scope: %v", err) - } - } - } - // Add the current output - scope.Tasks[a.Task.Id].Output = typedvalues.MustFormat(ti.GetStatus().GetOutput()) + scope.Tasks[a.Task.Id].Output = typedvalues.MustUnwrap(ti.GetStatus().GetOutput()) // Resolve the output expression resolvedOutput, err := expr.Resolve(scope, a.Task.Id, outputExpr) @@ -188,40 +184,43 @@ func (a *ActionInvokeTask) resolveOutput(ti *types.TaskInvocation, outputExpr *t return resolvedOutput, nil } -func (a *ActionInvokeTask) resolveInputs(inputs map[string]*types.TypedValue) (map[string]*types.TypedValue, error) { - log := a.logger() +func (a *ActionInvokeTask) resolveInputs(inputs map[string]*typedvalues.TypedValue) (map[string]*typedvalues.TypedValue, error) { + // Inherit scope if invocation has a parent + var parentScope *expr.Scope + if len(a.Wfi.Spec.ParentId) != 0 { + var ok bool + parentScope, ok = a.StateStore.Get(a.Wfi.Spec.ParentId) + if !ok { + a.logger().Warn("Could not find parent scope (%s) of scope (%s)", a.Wfi.Spec.ParentId, a.Wfi.ID()) + } + } // Setup the scope for the expressions - scope, err := expr.NewScope(a.Wf, a.Wfi) + scope, err := expr.NewScope(parentScope, a.Wf, a.Wfi) if err != nil { return nil, errors.Wrapf(err, "failed to create scope for task '%v'", a.Task.Id) } a.StateStore.Set(a.Wfi.ID(), scope) - // Inherit scope if this invocation is part of a dynamic invocation - if len(a.Wfi.Spec.ParentId) != 0 { - parentScope, ok := a.StateStore.Get(a.Wfi.Spec.ParentId) - if ok { - err := mergo.Merge(scope, parentScope) - if err != nil { - log.Errorf("Failed to inherit parent scope: %v", err) - } - } - } - // Resolve each of the inputs (based on priority) - resolvedInputs := map[string]*types.TypedValue{} + resolvedInputs := map[string]*typedvalues.TypedValue{} for _, input := range typedvalues.Prioritize(inputs) { resolvedInput, err := expr.Resolve(scope, a.Task.Id, input.Val) if err != nil { return nil, fmt.Errorf("failed to resolve input field %v: %v", input.Key, err) } resolvedInputs[input.Key] = resolvedInput - log.Infof("Resolved field %v: %v -> %v", input.Key, typedvalues.MustFormat(input.Val), - util.Truncate(typedvalues.MustFormat(resolvedInput), 100)) + if input.Val.ValueType() == typedvalues.TypeExpression { + log.Infof("Input field resolved '%v': %v -> %v", input.Key, + util.Truncate(typedvalues.MustUnwrap(input.Val), 100), + util.Truncate(typedvalues.MustUnwrap(resolvedInput), 100)) + } else { + log.Infof("Input field loaded '%v': %v", input.Key, + util.Truncate(typedvalues.MustUnwrap(resolvedInput), 100)) + } // Update the scope with the resolved type - scope.Tasks[a.Task.Id].Inputs[input.Key] = typedvalues.MustFormat(resolvedInput) + scope.Tasks[a.Task.Id].Inputs[input.Key] = typedvalues.MustUnwrap(resolvedInput) } return resolvedInputs, nil } diff --git a/pkg/controller/invocation/controller.go b/pkg/controller/invocation/controller.go index e9d5ba42..975bb417 100644 --- a/pkg/controller/invocation/controller.go +++ b/pkg/controller/invocation/controller.go @@ -147,7 +147,7 @@ func (cr *Controller) Notify(update *fes.Notification) error { // TODO mark to clean up later instead cr.stateStore.Delete(entity.ID()) cr.evalStore.Delete(entity.ID()) - log.Infof("Removed entity %v from eval state", entity.ID()) + log.Debugf("Removed entity %v from eval state", entity.ID()) case events.EventTaskFailed: fallthrough case events.EventTaskSucceeded: @@ -170,7 +170,6 @@ func (cr *Controller) Tick(tick uint64) error { // Long loop: to check if there are any orphans if tick%50 == 0 { - log.Debug("Checking model caches for missing invocations") err = cr.checkModelCaches() } diff --git a/pkg/controller/invocation/rules.go b/pkg/controller/invocation/rules.go index 7df9867d..5b38e36e 100644 --- a/pkg/controller/invocation/rules.go +++ b/pkg/controller/invocation/rules.go @@ -9,6 +9,7 @@ import ( "github.com/fission/fission-workflows/pkg/scheduler" "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" "github.com/golang/protobuf/ptypes" ) @@ -137,9 +138,9 @@ func (cc *RuleCheckIfCompleted) Eval(cec controller.EvalContext) controller.Acti } } if finished { - var finalOutput *types.TypedValue + var finalOutput *typedvalues.TypedValue if len(wf.Spec.OutputTask) != 0 { - finalOutput = typedvalues.ResolveTaskOutput(wf.Spec.OutputTask, wfi) + finalOutput = controlflow.ResolveTaskOutput(wf.Spec.OutputTask, wfi) } // TODO extract to action diff --git a/pkg/fes/testutil/testutil.pb.go b/pkg/fes/testutil/testutil.pb.go index d4ab4579..3acfa018 100644 --- a/pkg/fes/testutil/testutil.pb.go +++ b/pkg/fes/testutil/testutil.pb.go @@ -27,6 +27,7 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +// DummyEvent is a stub implementation of a fes.Event type DummyEvent struct { Msg string `protobuf:"bytes,1,opt,name=msg" json:"msg,omitempty"` } diff --git a/pkg/fnenv/fission/runtime.go b/pkg/fnenv/fission/runtime.go index 0d2c91f6..7df60894 100644 --- a/pkg/fnenv/fission/runtime.go +++ b/pkg/fnenv/fission/runtime.go @@ -101,7 +101,6 @@ func (fe *FunctionEnv) Invoke(spec *types.TaskInvocationSpec, opts ...fnenv.Invo if err != nil { return nil, fmt.Errorf("error for reqUrl '%v': %v", url, err) } - defer resp.Body.Close() fnenv.FnActive.WithLabelValues(Name).Dec() fnenv.FnActive.WithLabelValues(Name).Inc() @@ -125,7 +124,7 @@ func (fe *FunctionEnv) Invoke(spec *types.TaskInvocationSpec, opts ...fnenv.Invo // Determine status of the task invocation if resp.StatusCode >= 400 { - msg, _ := typedvalues.Format(&output) + msg, _ := typedvalues.Unwrap(output) ctxLog.Warnf("[%s] Failed %v: %v", fnRef.ID, resp.StatusCode, msg) return &types.TaskInvocationStatus{ Status: types.TaskInvocationStatus_FAILED, @@ -137,7 +136,7 @@ func (fe *FunctionEnv) Invoke(spec *types.TaskInvocationSpec, opts ...fnenv.Invo return &types.TaskInvocationStatus{ Status: types.TaskInvocationStatus_SUCCEEDED, - Output: &output, + Output: output, }, nil } diff --git a/pkg/fnenv/http/http.go b/pkg/fnenv/http/http.go index e03c0108..9074a9ea 100644 --- a/pkg/fnenv/http/http.go +++ b/pkg/fnenv/http/http.go @@ -19,11 +19,17 @@ var ( ) func New() *Runtime { - return &Runtime{Client: http.DefaultClient} + mapper := httpconv.DefaultHTTPMapper.Clone() + mapper.DefaultHTTPMethod = http.MethodGet + return &Runtime{ + Client: http.DefaultClient, + httpconv: mapper, + } } type Runtime struct { - Client *http.Client + Client *http.Client + httpconv *httpconv.HTTPMapper } // Example: https://us-east1-personal-erwinvaneyk.cloudfunctions.net/helloworld @@ -57,12 +63,10 @@ func (r *Runtime) Invoke(spec *types.TaskInvocationSpec, opts ...fnenv.InvokeOpt req.URL = fnUrl // Pass task inputs to HTTP request - err = httpconv.FormatRequest(spec.GetInputs(), req) + err = r.httpconv.FormatRequest(spec.GetInputs(), req) if err != nil { return nil, err } - // Ensure that the default method is GET (not POST). - req.Method = httpconv.FormatMethod(spec.GetInputs(), http.MethodGet) logrus.Infof("HTTP request: %s %v", req.Method, req.URL) if logrus.GetLevel() == logrus.DebugLevel { @@ -89,21 +93,21 @@ func (r *Runtime) Invoke(spec *types.TaskInvocationSpec, opts ...fnenv.InvokeOpt fmt.Println("--- HTTP Response end ---") } - output, err := httpconv.ParseResponse(resp) + output, err := r.httpconv.ParseResponse(resp) if err != nil { return nil, err } if resp.StatusCode >= 400 { - msg, _ := typedvalues.Format(&output) + msg, _ := typedvalues.Unwrap(output) return &types.TaskInvocationStatus{ Status: types.TaskInvocationStatus_FAILED, Error: &types.Error{ - Message: fmt.Sprintf("http function error: %v", msg), + Message: fmt.Sprintf("HTTP runtime request error: %v", msg), }, }, nil } return &types.TaskInvocationStatus{ Status: types.TaskInvocationStatus_SUCCEEDED, - Output: &output, + Output: output, }, nil } diff --git a/pkg/fnenv/mock/mock.go b/pkg/fnenv/mock/mock.go index 210728aa..c739d81b 100644 --- a/pkg/fnenv/mock/mock.go +++ b/pkg/fnenv/mock/mock.go @@ -7,13 +7,14 @@ import ( "github.com/fission/fission-workflows/pkg/fnenv" "github.com/fission/fission-workflows/pkg/types" + "github.com/fission/fission-workflows/pkg/types/typedvalues" "github.com/fission/fission-workflows/pkg/util" "github.com/golang/protobuf/ptypes" "github.com/sirupsen/logrus" ) // Func is the type for mocked functions used in the mock.Runtime -type Func func(spec *types.TaskInvocationSpec) (*types.TypedValue, error) +type Func func(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) // Runtime mocks the implementation of the various runtime. // diff --git a/pkg/fnenv/native/builtin/builtin.go b/pkg/fnenv/native/builtin/builtin.go index 051df03e..5afdbe69 100644 --- a/pkg/fnenv/native/builtin/builtin.go +++ b/pkg/fnenv/native/builtin/builtin.go @@ -26,20 +26,27 @@ var DefaultBuiltinFunctions = map[string]native.InternalFunction{ } // ensureInput verifies that the input for the given key exists and is of one of the provided types. -func ensureInput(inputs map[string]*types.TypedValue, key string, validTypes ...typedvalues.ValueType) (*types.TypedValue, error) { +func ensureInput(inputs map[string]*typedvalues.TypedValue, key string, validTypes ...string) (*typedvalues.TypedValue, error) { tv, ok := inputs[key] if !ok { return nil, fmt.Errorf("input '%s' is not set", key) } - if len(validTypes) > 0 { - valid := typedvalues.IsType(tv, validTypes...) - if !valid { - return nil, fmt.Errorf("input '%s' is not a valid type (expected: %v, was: %T)", key, validTypes, tv.Type) + if len(validTypes) == 0 { + return tv, nil + } + var found bool + for _, validType := range validTypes { + if validType == tv.ValueType() { + found = true + break } } + if !found { + return nil, fmt.Errorf("input '%s' is not a validType type (expected: %v, was: %T)", key, validTypes, tv.ValueType()) + } return tv, nil } @@ -49,7 +56,7 @@ func internalFunctionTest(t *testing.T, fn native.InternalFunction, input *types t.Fatal(err) } - outputtedTask, err := typedvalues.Format(output) + outputtedTask, err := typedvalues.Unwrap(output) if err != nil { t.Fatal(err) } @@ -61,8 +68,8 @@ func internalFunctionTest(t *testing.T, fn native.InternalFunction, input *types // getFirstDefinedTypedValue returns the first input and key of the inputs argument that matches a field in fields. // For example, given inputs { a : b, c : d }, getFirstDefinedTypedValue(inputs, z, x, c, a) would return (c, d) -func getFirstDefinedTypedValue(inputs map[string]*types.TypedValue, fields ...string) (string, *types.TypedValue) { - var result *types.TypedValue +func getFirstDefinedTypedValue(inputs map[string]*typedvalues.TypedValue, fields ...string) (string, *typedvalues.TypedValue) { + var result *typedvalues.TypedValue var key string for _, key = range fields { val, ok := inputs[key] diff --git a/pkg/fnenv/native/builtin/compose.go b/pkg/fnenv/native/builtin/compose.go index 8ecef44c..4179bafd 100644 --- a/pkg/fnenv/native/builtin/compose.go +++ b/pkg/fnenv/native/builtin/compose.go @@ -53,9 +53,9 @@ foo: // TODO avoid adding function-injected fields to compose type FunctionCompose struct{} -func (fn *FunctionCompose) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) { +func (fn *FunctionCompose) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) { - var output *types.TypedValue + var output *typedvalues.TypedValue switch len(spec.GetInputs()) { case 0: output = nil @@ -69,19 +69,19 @@ func (fn *FunctionCompose) Invoke(spec *types.TaskInvocationSpec) (*types.TypedV default: results := map[string]interface{}{} for k, v := range spec.GetInputs() { - i, err := typedvalues.Format(v) + i, err := typedvalues.Unwrap(v) if err != nil { return nil, err } results[k] = i } - p, err := typedvalues.Parse(results) + p, err := typedvalues.Wrap(results) if err != nil { return nil, err } output = p } - logrus.Infof("[internal://%s] %v (Type: %s, Labels: %v)", Compose, typedvalues.MustFormat(output), output.GetType(), - output.GetLabels()) + logrus.Infof("[internal://%s] %v (Type: %s, Labels: %v)", Compose, typedvalues.MustUnwrap(output), output.ValueType(), + output.GetMetadata()) return output, nil } diff --git a/pkg/fnenv/native/builtin/compose_test.go b/pkg/fnenv/native/builtin/compose_test.go index 99e8df20..201f91f7 100644 --- a/pkg/fnenv/native/builtin/compose_test.go +++ b/pkg/fnenv/native/builtin/compose_test.go @@ -12,8 +12,8 @@ func TestFunctionComposePassInput(t *testing.T) { internalFunctionTest(t, &FunctionCompose{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - ComposeInput: typedvalues.MustParse(expected), + Inputs: map[string]*typedvalues.TypedValue{ + ComposeInput: typedvalues.MustWrap(expected), }, }, expected) @@ -23,7 +23,7 @@ func TestFunctionComposeEmpty(t *testing.T) { internalFunctionTest(t, &FunctionCompose{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{}, + Inputs: map[string]*typedvalues.TypedValue{}, }, nil) } @@ -32,10 +32,10 @@ func TestFunctionComposeObject(t *testing.T) { internalFunctionTest(t, &FunctionCompose{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - "foo": typedvalues.MustParse(true), - "bar": typedvalues.MustParse(false), - "default": typedvalues.MustParse("hello"), + Inputs: map[string]*typedvalues.TypedValue{ + "foo": typedvalues.MustWrap(true), + "bar": typedvalues.MustWrap(false), + "default": typedvalues.MustWrap("hello"), }, }, map[string]interface{}{ diff --git a/pkg/fnenv/native/builtin/fail.go b/pkg/fnenv/native/builtin/fail.go index 6763d90d..8e3b9550 100644 --- a/pkg/fnenv/native/builtin/fail.go +++ b/pkg/fnenv/native/builtin/fail.go @@ -13,7 +13,7 @@ const ( FailInputMsg = types.InputMain ) -var defaultErrMsg = typedvalues.MustParse("fail function triggered") +var defaultErrMsg = typedvalues.MustWrap("fail function triggered") /* FunctionFail is a function that always fails. This can be used to short-circuit workflows in @@ -41,8 +41,8 @@ A runnable example of this function can be found in the [failwhale](../examples/ */ type FunctionFail struct{} -func (fn *FunctionFail) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) { - var output *types.TypedValue +func (fn *FunctionFail) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) { + var output *typedvalues.TypedValue switch len(spec.GetInputs()) { case 0: output = defaultErrMsg @@ -58,7 +58,7 @@ func (fn *FunctionFail) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValu "output": output, }).Info("Internal Fail-function invoked.") - msg, err := typedvalues.Format(output) + msg, err := typedvalues.Unwrap(output) if err != nil { return nil, err } diff --git a/pkg/fnenv/native/builtin/fail_test.go b/pkg/fnenv/native/builtin/fail_test.go index e34db6f6..75091c96 100644 --- a/pkg/fnenv/native/builtin/fail_test.go +++ b/pkg/fnenv/native/builtin/fail_test.go @@ -12,7 +12,7 @@ func TestFunctionFail_InvokeEmpty(t *testing.T) { fn := &FunctionFail{} out, err := fn.Invoke(&types.TaskInvocationSpec{}) assert.Nil(t, out) - assert.EqualError(t, err, typedvalues.MustFormat(defaultErrMsg).(string)) + assert.EqualError(t, err, typedvalues.MustUnwrap(defaultErrMsg).(string)) } @@ -20,7 +20,7 @@ func TestFunctionFail_InvokeString(t *testing.T) { fn := &FunctionFail{} errMsg := "custom error message" out, err := fn.Invoke(&types.TaskInvocationSpec{ - Inputs: typedvalues.Input(errMsg), + Inputs: types.Input(errMsg), }) assert.Nil(t, out) assert.EqualError(t, err, errMsg) diff --git a/pkg/fnenv/native/builtin/foreach.go b/pkg/fnenv/native/builtin/foreach.go index 481c2dba..46c6a94a 100644 --- a/pkg/fnenv/native/builtin/foreach.go +++ b/pkg/fnenv/native/builtin/foreach.go @@ -6,6 +6,7 @@ import ( "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" ) const ( @@ -53,50 +54,50 @@ A complete example of this function can be found in the [foreachwhale](../exampl */ type FunctionForeach struct{} -func (fn *FunctionForeach) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) { +func (fn *FunctionForeach) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) { // Verify and parse foreach headerTv, err := ensureInput(spec.GetInputs(), ForeachInputForeach) if err != nil { return nil, err } - i, err := typedvalues.Format(headerTv) + i, err := typedvalues.Unwrap(headerTv) if err != nil { return nil, err } foreach, ok := i.([]interface{}) if !ok { - return nil, fmt.Errorf("condition '%v' needs to be a 'array', but was '%v'", i, headerTv.Type) + return nil, fmt.Errorf("condition '%v' needs to be a 'array', but was '%v'", i, headerTv.ValueType()) } - // Parse task - taskTv, err := ensureInput(spec.GetInputs(), ForeachInputDo, typedvalues.TypeTask) + // Wrap task + taskTv, err := ensureInput(spec.GetInputs(), ForeachInputDo, controlflow.TypeTask) if err != nil { return nil, err } - flow, err := typedvalues.FormatControlFlow(taskTv) + flow, err := controlflow.UnwrapControlFlow(taskTv) if err != nil { return nil, err } - if flow.Workflow() != nil { + if flow.GetWorkflow() != nil { return nil, errors.New("foreach does not support workflow inputs (yet)") } - // Parse collect + // Wrap collect collect := true collectTv, ok := spec.Inputs[ForeachInputCollect] if ok { - b, err := typedvalues.FormatBool(collectTv) + b, err := typedvalues.UnwrapBool(collectTv) if err != nil { return nil, fmt.Errorf("collect could not be parsed into a boolean: %v", err) } collect = b } - // Parse sequential + // Wrap sequential var seq bool seqTv, ok := spec.Inputs[ForeachInputSequential] if ok { - b, err := typedvalues.FormatBool(seqTv) + b, err := typedvalues.UnwrapBool(seqTv) if err != nil { return nil, fmt.Errorf("sequential could not be parsed into a boolean: %v", err) } @@ -113,12 +114,12 @@ func (fn *FunctionForeach) Invoke(spec *types.TaskInvocationSpec) (*types.TypedV var tasks []string // Needed to preserve order of the input array for k, item := range foreach { f := flow.Clone() - itemTv := typedvalues.MustParse(item) - itemTv.SetLabel("priority", "1000") // Ensure that item is resolved before other parameters + itemTv := typedvalues.MustWrap(item) + itemTv.SetMetadata(typedvalues.MetadataPriority, "1000") // Ensure that item is resolved before other parameters f.Input("_item", *itemTv) // TODO support workflows - t := f.Task() + t := f.GetTask() name := fmt.Sprintf("do_%d", k) wf.AddTask(name, t) tasks = append(tasks, name) @@ -140,8 +141,8 @@ func (fn *FunctionForeach) Invoke(spec *types.TaskInvocationSpec) (*types.TypedV output = append(output, fmt.Sprintf("{output('%s')}", k)) } } - ct.Input(ComposeInput, typedvalues.MustParse(output)) + ct.Input(ComposeInput, typedvalues.MustWrap(output)) wf.AddTask("collector", ct) - return typedvalues.Parse(wf) + return typedvalues.Wrap(wf) } diff --git a/pkg/fnenv/native/builtin/foreach_test.go b/pkg/fnenv/native/builtin/foreach_test.go index 91bc5d3e..43266661 100644 --- a/pkg/fnenv/native/builtin/foreach_test.go +++ b/pkg/fnenv/native/builtin/foreach_test.go @@ -5,25 +5,26 @@ import ( "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" "github.com/stretchr/testify/assert" ) func TestFunctionForeach_Invoke(t *testing.T) { foreachElements := []interface{}{1, 2, 3, 4, "foo"} out, err := (&FunctionForeach{}).Invoke(&types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - ForeachInputForeach: typedvalues.MustParse(foreachElements), - ForeachInputDo: typedvalues.MustParse(&types.TaskSpec{ + Inputs: map[string]*typedvalues.TypedValue{ + ForeachInputForeach: typedvalues.MustWrap(foreachElements), + ForeachInputDo: typedvalues.MustWrap(&types.TaskSpec{ FunctionRef: Noop, }), }, }) assert.NoError(t, err) - assert.Equal(t, typedvalues.TypeWorkflow, typedvalues.ValueType(out.Type)) + assert.Equal(t, controlflow.TypeWorkflow, out.ValueType()) - wf, err := typedvalues.FormatWorkflow(out) + wf, err := controlflow.UnwrapWorkflow(out) assert.NoError(t, err) assert.Equal(t, len(foreachElements)+1, len(wf.Tasks)) // + 1 for the noop-task in the foreach loop. assert.NotNil(t, wf.Tasks["do_0"]) - assert.Equal(t, foreachElements[0], int(typedvalues.MustFormat(wf.Tasks["do_0"].Inputs["_item"]).(float64))) + assert.Equal(t, foreachElements[0], int(typedvalues.MustUnwrap(wf.Tasks["do_0"].Inputs["_item"]).(int32))) } diff --git a/pkg/fnenv/native/builtin/http.go b/pkg/fnenv/native/builtin/http.go index d1095fba..fabc572a 100644 --- a/pkg/fnenv/native/builtin/http.go +++ b/pkg/fnenv/native/builtin/http.go @@ -63,7 +63,8 @@ func NewFunctionHTTP() *FunctionHTTP { } } -func (fn *FunctionHTTP) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) { +func (fn *FunctionHTTP) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) { + // Get the actual url targetUrl, err := fn.determineTargetURL(spec.Inputs) if err != nil { return nil, err @@ -85,12 +86,12 @@ func (fn *FunctionHTTP) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValu return result.GetOutput(), nil } -func (fn *FunctionHTTP) determineTargetURL(inputs map[string]*types.TypedValue) (string, error) { +func (fn *FunctionHTTP) determineTargetURL(inputs map[string]*typedvalues.TypedValue) (string, error) { _, tv := getFirstDefinedTypedValue(inputs, HttpInputUrl, types.InputMain) if tv == nil { return "", errors.New("target URL is required for HTTP function") } - s, err := typedvalues.FormatString(tv) + s, err := typedvalues.UnwrapString(tv) if err != nil { return "", err } diff --git a/pkg/fnenv/native/builtin/http_test.go b/pkg/fnenv/native/builtin/http_test.go index 980b153e..6ef8f274 100644 --- a/pkg/fnenv/native/builtin/http_test.go +++ b/pkg/fnenv/native/builtin/http_test.go @@ -1,7 +1,6 @@ package builtin import ( - "encoding/json" "fmt" "io/ioutil" "net/http" @@ -29,29 +28,25 @@ func TestFunctionHttp_Invoke(t *testing.T) { panic("Header 'Foo: Bar' not present") } - w.Header().Set("Content-Type", "application/json") - bs, err := json.Marshal(string(data)) - if err != nil { - panic(err) - } - fmt.Fprint(w, string(bs)) + w.Header().Set("Content-Type", r.Header.Get("Content-Type")) + fmt.Fprint(w, string(data)) })) defer ts.Close() fn := NewFunctionHTTP() body := "body" out, err := fn.Invoke(&types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - types.InputMethod: typedvalues.MustParse(http.MethodPost), - HttpInputUrl: typedvalues.MustParse(ts.URL), - types.InputMain: typedvalues.MustParse(body), - types.InputHeaders: typedvalues.MustParse(map[string]interface{}{ + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMethod: typedvalues.MustWrap(http.MethodPost), + HttpInputUrl: typedvalues.MustWrap(ts.URL), + types.InputMain: typedvalues.MustWrap(body), + types.InputHeaders: typedvalues.MustWrap(map[string]interface{}{ "Foo": "Bar", }), }, }) assert.NoError(t, err) - assert.Equal(t, body, typedvalues.MustFormat(out)) + assert.Equal(t, body, typedvalues.MustUnwrap(out)) } func TestFunctionHttp_Invoke_Invalid(t *testing.T) { @@ -64,15 +59,15 @@ func TestFunctionHttp_Invoke_Invalid(t *testing.T) { fn := NewFunctionHTTP() body := "body" out, err := fn.Invoke(&types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - types.InputMethod: typedvalues.MustParse(http.MethodDelete), - HttpInputUrl: typedvalues.MustParse(ts.URL), - types.InputMain: typedvalues.MustParse(body), - types.InputHeaders: typedvalues.MustParse(map[string]interface{}{ + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMethod: typedvalues.MustWrap(http.MethodDelete), + HttpInputUrl: typedvalues.MustWrap(ts.URL), + types.InputMain: typedvalues.MustWrap(body), + types.InputHeaders: typedvalues.MustWrap(map[string]interface{}{ "Foo": "Bar", }), }, }) - assert.Error(t, err) assert.Nil(t, out) + assert.Error(t, err, "expected error\n") } diff --git a/pkg/fnenv/native/builtin/if.go b/pkg/fnenv/native/builtin/if.go index 54b423a9..133bdc63 100644 --- a/pkg/fnenv/native/builtin/if.go +++ b/pkg/fnenv/native/builtin/if.go @@ -54,7 +54,7 @@ A complete example of this function can be found in the [maybewhale](../examples */ type FunctionIf struct{} -func (fn *FunctionIf) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) { +func (fn *FunctionIf) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) { // Verify and get condition expr, err := ensureInput(spec.GetInputs(), IfInputCondition) @@ -66,14 +66,14 @@ func (fn *FunctionIf) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, consequent := spec.GetInputs()[IfInputThen] alternative := spec.GetInputs()[IfInputElse] - // Parse condition to a bool - i, err := typedvalues.Format(expr) + // Wrap condition to a bool + i, err := typedvalues.Unwrap(expr) if err != nil { return nil, err } condition, ok := i.(bool) if !ok { - return nil, fmt.Errorf("condition '%v' needs to be a 'bool', but was '%v'", i, expr.Type) + return nil, fmt.Errorf("condition '%v' needs to be a 'bool', but was '%v'", i, expr.ValueType()) } // Output consequent or alternative based on condition diff --git a/pkg/fnenv/native/builtin/if_test.go b/pkg/fnenv/native/builtin/if_test.go index 6f1087a2..98d0501e 100644 --- a/pkg/fnenv/native/builtin/if_test.go +++ b/pkg/fnenv/native/builtin/if_test.go @@ -14,9 +14,9 @@ func TestFunctionIfConsequentFlow(t *testing.T) { internalFunctionTest(t, &FunctionIf{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - IfInputCondition: typedvalues.MustParse(true), - IfInputThen: typedvalues.MustParse(expectedTask), + Inputs: map[string]*typedvalues.TypedValue{ + IfInputCondition: typedvalues.MustWrap(true), + IfInputThen: typedvalues.MustWrap(expectedTask), }, }, expectedTask) @@ -32,10 +32,10 @@ func TestFunctionIfAlternativeFlow(t *testing.T) { internalFunctionTest(t, &FunctionIf{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - IfInputCondition: typedvalues.MustParse(false), - IfInputThen: typedvalues.MustParse(task), - IfInputElse: typedvalues.MustParse(alternativeTask), + Inputs: map[string]*typedvalues.TypedValue{ + IfInputCondition: typedvalues.MustWrap(false), + IfInputThen: typedvalues.MustWrap(task), + IfInputElse: typedvalues.MustWrap(alternativeTask), }, }, alternativeTask) @@ -45,10 +45,10 @@ func TestFunctionIfLiteral(t *testing.T) { internalFunctionTest(t, &FunctionIf{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - IfInputCondition: typedvalues.MustParse(true), - IfInputThen: typedvalues.MustParse("foo"), - IfInputElse: typedvalues.MustParse("bar"), + Inputs: map[string]*typedvalues.TypedValue{ + IfInputCondition: typedvalues.MustWrap(true), + IfInputThen: typedvalues.MustWrap("foo"), + IfInputElse: typedvalues.MustWrap("bar"), }, }, "foo") @@ -58,9 +58,9 @@ func TestFunctionIfMissingAlternative(t *testing.T) { internalFunctionTest(t, &FunctionIf{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - IfInputCondition: typedvalues.MustParse(false), - IfInputThen: typedvalues.MustParse("then"), + Inputs: map[string]*typedvalues.TypedValue{ + IfInputCondition: typedvalues.MustWrap(false), + IfInputThen: typedvalues.MustWrap("then"), }, }, nil) diff --git a/pkg/fnenv/native/builtin/javascript.go b/pkg/fnenv/native/builtin/javascript.go index 5e533c76..a7ff2b7d 100644 --- a/pkg/fnenv/native/builtin/javascript.go +++ b/pkg/fnenv/native/builtin/javascript.go @@ -62,17 +62,17 @@ func NewFunctionJavascript() *FunctionJavascript { } } -func (fn *FunctionJavascript) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) { +func (fn *FunctionJavascript) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) { exprVal, err := ensureInput(spec.Inputs, JavascriptInputExpr, typedvalues.TypeString) argsVal, _ := spec.Inputs[JavascriptInputArgs] if err != nil { return nil, err } - expr, err := typedvalues.FormatString(exprVal) + expr, err := typedvalues.UnwrapString(exprVal) if err != nil { return nil, err } - args, err := typedvalues.Format(argsVal) + args, err := typedvalues.Unwrap(argsVal) if err != nil { return nil, err } @@ -85,7 +85,7 @@ func (fn *FunctionJavascript) Invoke(spec *types.TaskInvocationSpec) (*types.Typ logrus.WithField("taskID", spec.TaskId). Infof("[internal://%s] %v => %v", Javascript, expr, result) - return typedvalues.Parse(result) + return typedvalues.Wrap(result) } func (fn *FunctionJavascript) exec(expr string, args interface{}) (interface{}, error) { diff --git a/pkg/fnenv/native/builtin/javascript_test.go b/pkg/fnenv/native/builtin/javascript_test.go index fc963fb3..42932766 100644 --- a/pkg/fnenv/native/builtin/javascript_test.go +++ b/pkg/fnenv/native/builtin/javascript_test.go @@ -10,31 +10,31 @@ import ( func TestFunctionJavascript_InvokeMap(t *testing.T) { spec := &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - JavascriptInputArgs: typedvalues.MustParse(map[string]interface{}{ + Inputs: map[string]*typedvalues.TypedValue{ + JavascriptInputArgs: typedvalues.MustWrap(map[string]interface{}{ "left": 2, "right": 5, }), - JavascriptInputExpr: typedvalues.MustParse("left * right"), + JavascriptInputExpr: typedvalues.MustWrap("left * right"), }, } js := NewFunctionJavascript() tv, err := js.Invoke(spec) assert.NoError(t, err) - assert.Equal(t, 10, int(typedvalues.MustFormat(tv).(float64))) + assert.Equal(t, 10, int(typedvalues.MustUnwrap(tv).(float64))) } func TestFunctionJavascript_Invoke(t *testing.T) { spec := &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - JavascriptInputArgs: typedvalues.MustParse(10), - JavascriptInputExpr: typedvalues.MustParse("arg * 2"), + Inputs: map[string]*typedvalues.TypedValue{ + JavascriptInputArgs: typedvalues.MustWrap(10), + JavascriptInputExpr: typedvalues.MustWrap("arg * 2"), }, } js := NewFunctionJavascript() tv, err := js.Invoke(spec) assert.NoError(t, err) - assert.Equal(t, 20, int(typedvalues.MustFormat(tv).(float64))) + assert.Equal(t, 20, int(typedvalues.MustUnwrap(tv).(float64))) } diff --git a/pkg/fnenv/native/builtin/noop.go b/pkg/fnenv/native/builtin/noop.go index a3936b61..fdfeed4d 100644 --- a/pkg/fnenv/native/builtin/noop.go +++ b/pkg/fnenv/native/builtin/noop.go @@ -37,9 +37,9 @@ A complete example of this function can be found in the [fortunewhale](../exampl */ type FunctionNoop struct{} -func (fn *FunctionNoop) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) { +func (fn *FunctionNoop) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) { - var output *types.TypedValue + var output *typedvalues.TypedValue switch len(spec.GetInputs()) { case 0: output = nil @@ -54,6 +54,6 @@ func (fn *FunctionNoop) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValu "invocation": spec.InvocationId, "task": spec.TaskId, }).Infof("[internal://%s] %v", Noop, - typedvalues.MustFormat(output)) + typedvalues.MustUnwrap(output)) return output, nil } diff --git a/pkg/fnenv/native/builtin/noop_test.go b/pkg/fnenv/native/builtin/noop_test.go index 1258e428..52246250 100644 --- a/pkg/fnenv/native/builtin/noop_test.go +++ b/pkg/fnenv/native/builtin/noop_test.go @@ -12,8 +12,8 @@ func TestFunctionNoopPassInput(t *testing.T) { internalFunctionTest(t, &FunctionNoop{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - NoopInput: typedvalues.MustParse(expected), + Inputs: map[string]*typedvalues.TypedValue{ + NoopInput: typedvalues.MustWrap(expected), }, }, expected) @@ -23,7 +23,7 @@ func TestFunctionNoopEmpty(t *testing.T) { internalFunctionTest(t, &FunctionNoop{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{}, + Inputs: map[string]*typedvalues.TypedValue{}, }, nil) } @@ -32,10 +32,10 @@ func TestFunctionNoopObject(t *testing.T) { internalFunctionTest(t, &FunctionNoop{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - "foo": typedvalues.MustParse(true), - "bar": typedvalues.MustParse(false), - types.InputMain: typedvalues.MustParse("hello"), + Inputs: map[string]*typedvalues.TypedValue{ + "foo": typedvalues.MustWrap(true), + "bar": typedvalues.MustWrap(false), + types.InputMain: typedvalues.MustWrap("hello"), }, }, "hello") diff --git a/pkg/fnenv/native/builtin/repeat.go b/pkg/fnenv/native/builtin/repeat.go index fa2739b9..b271abab 100644 --- a/pkg/fnenv/native/builtin/repeat.go +++ b/pkg/fnenv/native/builtin/repeat.go @@ -2,10 +2,10 @@ package builtin import ( "fmt" - "strconv" "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" "github.com/golang/protobuf/proto" ) @@ -51,40 +51,26 @@ A complete example of this function can be found in the [repeatwhale](../example // TODO minor: chose between unrolled loop and dynamic loop based on number of tasks for performance type FunctionRepeat struct{} -func (fn *FunctionRepeat) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) { +func (fn *FunctionRepeat) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) { n, ok := spec.Inputs[RepeatInputTimes] if !ok { return nil, fmt.Errorf("repeat needs '%s'", RepeatInputTimes) } - // Parse condition to a int - i, err := typedvalues.Format(n) + // Wrap condition to a int + times, err := typedvalues.UnwrapInt64(n) if err != nil { return nil, err } - // TODO fix int typedvalue - var times int64 - f, ok := i.(float64) - if ok { - times = int64(f) - } else { - // Fallback: attempt to convert string to int - t, err := strconv.Atoi(fmt.Sprintf("%s", i)) - if err != nil { - return nil, fmt.Errorf("condition '%s' needs to be a 'int64', but was '%T'", i, i) - } - times = int64(t) - } - - // Parse do task + // Wrap do task // TODO does a workflow also work? doVal, ok := spec.Inputs[RepeatInputDo] if !ok { return nil, fmt.Errorf("repeat needs '%s'", RepeatInputDo) } - doTask, err := typedvalues.FormatTask(doVal) + doTask, err := controlflow.UnwrapTask(doVal) if err != nil { return nil, err } @@ -92,7 +78,7 @@ func (fn *FunctionRepeat) Invoke(spec *types.TaskInvocationSpec) (*types.TypedVa if times > 0 { // TODO add context - return typedvalues.MustParse(&types.WorkflowSpec{ + return typedvalues.MustWrap(&types.WorkflowSpec{ OutputTask: taskID(times - 1), Tasks: createRepeatTasks(doTask, times), }), nil @@ -110,8 +96,8 @@ func createRepeatTasks(task *types.TaskSpec, times int64) map[string]*types.Task prevTask := taskID(n - 1) do.Require(prevTask) // TODO move prev to a reserved namespace, to avoid conflicts - prev := typedvalues.MustParse(fmt.Sprintf("{output('%s')}", prevTask)) - prev.SetLabel("priority", "100") + prev := typedvalues.MustWrap(fmt.Sprintf("{output('%s')}", prevTask)) + prev.SetMetadata(typedvalues.MetadataPriority, "100") do.Input(RepeatInputPrev, prev) } tasks[id] = do diff --git a/pkg/fnenv/native/builtin/repeat_test.go b/pkg/fnenv/native/builtin/repeat_test.go index bcc43062..dd8f970d 100644 --- a/pkg/fnenv/native/builtin/repeat_test.go +++ b/pkg/fnenv/native/builtin/repeat_test.go @@ -5,25 +5,26 @@ import ( "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" "github.com/stretchr/testify/assert" ) func TestFunctionRepeat_Invoke(t *testing.T) { taskToRepeat := &types.TaskSpec{ FunctionRef: Noop, - Inputs: types.SingleDefaultInput(typedvalues.MustParse("foo")), + Inputs: types.SingleDefaultInput(typedvalues.MustWrap("foo")), } repeatFn := &FunctionRepeat{} spec := &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - RepeatInputDo: typedvalues.MustParse(taskToRepeat), - RepeatInputTimes: typedvalues.MustParse(10), + Inputs: map[string]*typedvalues.TypedValue{ + RepeatInputDo: typedvalues.MustWrap(taskToRepeat), + RepeatInputTimes: typedvalues.MustWrap(10), }, } result, err := repeatFn.Invoke(spec) assert.NoError(t, err) - wf, err := typedvalues.FormatWorkflow(result) + wf, err := controlflow.UnwrapWorkflow(result) assert.NoError(t, err) assert.Equal(t, 10, len(wf.Tasks)) } diff --git a/pkg/fnenv/native/builtin/sleep.go b/pkg/fnenv/native/builtin/sleep.go index 432c619a..7df4b591 100644 --- a/pkg/fnenv/native/builtin/sleep.go +++ b/pkg/fnenv/native/builtin/sleep.go @@ -44,11 +44,11 @@ A complete example of this function can be found in the [sleepalot](../examples/ */ type FunctionSleep struct{} -func (f *FunctionSleep) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) { +func (f *FunctionSleep) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) { duration := SleepInputDefault input, ok := spec.Inputs[SleepInput] if ok { - i, err := typedvalues.Format(input) + i, err := typedvalues.Unwrap(input) if err != nil { return nil, err } @@ -60,10 +60,16 @@ func (f *FunctionSleep) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValu return nil, err } duration = d + case int32: + duration = time.Duration(t) * time.Millisecond + case int64: + duration = time.Duration(t) * time.Millisecond + case float32: + duration = time.Duration(t) * time.Millisecond case float64: duration = time.Duration(t) * time.Millisecond default: - return nil, fmt.Errorf("invalid input '%v'", input.Type) + return nil, fmt.Errorf("invalid input '%v'", input.ValueType()) } } diff --git a/pkg/fnenv/native/builtin/sleep_test.go b/pkg/fnenv/native/builtin/sleep_test.go index 6a1042a6..6473f084 100644 --- a/pkg/fnenv/native/builtin/sleep_test.go +++ b/pkg/fnenv/native/builtin/sleep_test.go @@ -15,8 +15,8 @@ func TestSleepFunctionString(t *testing.T) { internalFunctionTest(t, &FunctionSleep{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - SleepInput: typedvalues.MustParse("1000ms"), + Inputs: map[string]*typedvalues.TypedValue{ + SleepInput: typedvalues.MustWrap("1000ms"), }, }, nil) @@ -29,8 +29,8 @@ func TestSleepFunctionInt(t *testing.T) { internalFunctionTest(t, &FunctionSleep{}, &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - SleepInput: typedvalues.MustParse(1000), + Inputs: map[string]*typedvalues.TypedValue{ + SleepInput: typedvalues.MustWrap(1000), }, }, nil) diff --git a/pkg/fnenv/native/builtin/switch.go b/pkg/fnenv/native/builtin/switch.go index 39624a78..b34eae37 100644 --- a/pkg/fnenv/native/builtin/switch.go +++ b/pkg/fnenv/native/builtin/switch.go @@ -54,7 +54,7 @@ A complete example of this function can be found in the [switchwhale](../example */ type FunctionSwitch struct{} -func (fn *FunctionSwitch) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) { +func (fn *FunctionSwitch) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) { switchVal, err := fn.getSwitch(spec.Inputs) if err != nil { return nil, err @@ -76,22 +76,22 @@ func (fn *FunctionSwitch) Invoke(spec *types.TaskInvocationSpec) (*types.TypedVa return defaultCase, nil } -func (fn *FunctionSwitch) getSwitch(inputs map[string]*types.TypedValue) (string, error) { +func (fn *FunctionSwitch) getSwitch(inputs map[string]*typedvalues.TypedValue) (string, error) { tv, err := ensureInput(inputs, SwitchInputCondition) if err != nil { return "", err } - return typedvalues.FormatString(tv) + return typedvalues.UnwrapString(tv) } -func (fn *FunctionSwitch) getCases(inputs map[string]*types.TypedValue) (map[string]*types.TypedValue, - *types.TypedValue, error) { - cases := map[string]*types.TypedValue{} +func (fn *FunctionSwitch) getCases(inputs map[string]*typedvalues.TypedValue) (map[string]*typedvalues.TypedValue, + *typedvalues.TypedValue, error) { + cases := map[string]*typedvalues.TypedValue{} defaultCase := inputs[SwitchInputDefaultCase] switchCases, ok := inputs[SwitchInputCases] if ok { - ir, err := typedvalues.FormatArray(switchCases) + ir, err := typedvalues.UnwrapArray(switchCases) if err != nil { return nil, nil, err } @@ -100,7 +100,7 @@ func (fn *FunctionSwitch) getCases(inputs map[string]*types.TypedValue) (map[str if !ok { return nil, nil, errors.New("invalid case provided") } - tva, err := typedvalues.Parse(m[SwitchCaseValue]) + tva, err := typedvalues.Wrap(m[SwitchCaseValue]) if err != nil { return nil, nil, err } diff --git a/pkg/fnenv/native/builtin/switch_test.go b/pkg/fnenv/native/builtin/switch_test.go index 66b527e1..5a0776cc 100644 --- a/pkg/fnenv/native/builtin/switch_test.go +++ b/pkg/fnenv/native/builtin/switch_test.go @@ -12,28 +12,28 @@ func TestFunctionSwitch_Invoke(t *testing.T) { val := "case1Val" fn := &FunctionSwitch{} spec := &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - SwitchInputCondition: typedvalues.ParseString("case1"), - SwitchInputCases: typedvalues.MustParse([]interface{}{ + Inputs: map[string]*typedvalues.TypedValue{ + SwitchInputCondition: typedvalues.MustWrap("case1"), + SwitchInputCases: typedvalues.MustWrap([]interface{}{ switchCase("case1", val), }), - SwitchInputDefaultCase: typedvalues.MustParse("default"), + SwitchInputDefaultCase: typedvalues.MustWrap("default"), }, } out, err := fn.Invoke(spec) assert.NoError(t, err) - assert.Equal(t, "case1Val", typedvalues.MustFormat(out)) + assert.Equal(t, "case1Val", typedvalues.MustUnwrap(out)) } func TestFunctionSwitch_InvokeDefaultCase(t *testing.T) { fn := &FunctionSwitch{} spec := &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - SwitchInputCondition: typedvalues.ParseString("case1"), - SwitchInputCases: typedvalues.MustParse([]interface{}{ + Inputs: map[string]*typedvalues.TypedValue{ + SwitchInputCondition: typedvalues.MustWrap("case1"), + SwitchInputCases: typedvalues.MustWrap([]interface{}{ switchCase("case2", "case2"), }), - SwitchInputDefaultCase: typedvalues.MustParse("default"), + SwitchInputDefaultCase: typedvalues.MustWrap("default"), }, } out, err := fn.Invoke(spec) @@ -44,9 +44,9 @@ func TestFunctionSwitch_InvokeDefaultCase(t *testing.T) { func TestFunctionSwitch_InvokeNoCase(t *testing.T) { fn := &FunctionSwitch{} spec := &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - SwitchInputCondition: typedvalues.MustParse("case1"), - "case2": typedvalues.MustParse("case2"), + Inputs: map[string]*typedvalues.TypedValue{ + SwitchInputCondition: typedvalues.MustWrap("case1"), + "case2": typedvalues.MustWrap("case2"), }, } out, err := fn.Invoke(spec) @@ -57,8 +57,8 @@ func TestFunctionSwitch_InvokeNoCase(t *testing.T) { func TestFunctionSwitch_InvokeNoSwitch(t *testing.T) { fn := &FunctionSwitch{} spec := &types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - "case2": typedvalues.MustParse("case2"), + Inputs: map[string]*typedvalues.TypedValue{ + "case2": typedvalues.MustWrap("case2"), }, } out, err := fn.Invoke(spec) diff --git a/pkg/fnenv/native/builtin/while.go b/pkg/fnenv/native/builtin/while.go index cbbda985..a0b45bcb 100644 --- a/pkg/fnenv/native/builtin/while.go +++ b/pkg/fnenv/native/builtin/while.go @@ -7,6 +7,7 @@ import ( "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" "github.com/sirupsen/logrus" ) @@ -16,8 +17,6 @@ const ( WhileInputLimit = "limit" WhileInputDelay = "delay" WhileInputAction = "do" - - WhileDefaultDelay = time.Duration(100) * time.Millisecond ) var ( @@ -64,51 +63,48 @@ A complete example of this function can be found in the [whilewhale](../examples */ type FunctionWhile struct{} -func (fn *FunctionWhile) Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) { +func (fn *FunctionWhile) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) { // Expr exprTv, err := ensureInput(spec.Inputs, WhileInputExpr, typedvalues.TypeBool) if err != nil { return nil, err } - expr, err := typedvalues.FormatBool(exprTv) + expr, err := typedvalues.UnwrapBool(exprTv) if err != nil { return nil, fmt.Errorf("failed to format while condition to a boolean: %v", err) } - exprSrc, ok := exprTv.GetLabel("src") + exprSrc, ok := exprTv.GetMetadataValue("src") if !ok { return nil, fmt.Errorf("could not get source of '%v'", expr) } - exprSrcTv, err := typedvalues.ParseExpression(exprSrc) + exprSrcTv, err := typedvalues.Wrap(exprSrc) if err != nil { return nil, err } // Limit - limitTv, err := ensureInput(spec.Inputs, WhileInputLimit, typedvalues.TypeNumber) + limitTv, err := ensureInput(spec.Inputs, WhileInputLimit, typedvalues.TypeNumber...) if err != nil { return nil, err } - l, err := typedvalues.FormatNumber(limitTv) + limit, err := typedvalues.UnwrapInt64(limitTv) if err != nil { return nil, fmt.Errorf("failed to format limit to a number: %v", err) } - limit := int64(l) - // Counter var count int64 if countTv, ok := spec.Inputs["_count"]; ok { - n, err := typedvalues.FormatNumber(countTv) + count, err = typedvalues.UnwrapInt64(countTv) if err != nil { return nil, fmt.Errorf("failed to format _count to a number: %v", err) } - count = int64(n) } // Delay - delay := WhileDefaultDelay + delay := time.Duration(0) delayTv, ok := spec.Inputs[WhileInputDelay] if ok { - s, err := typedvalues.FormatString(delayTv) + s, err := typedvalues.UnwrapString(delayTv) if err != nil { return nil, fmt.Errorf("failed to parse delay (%v) to string: %v", delayTv, err) } @@ -118,7 +114,7 @@ func (fn *FunctionWhile) Invoke(spec *types.TaskInvocationSpec) (*types.TypedVal } delay = d } - delayTv = typedvalues.MustParse(delay.String()) + delayTv = typedvalues.MustWrap(delay.String()) // Action action, err := ensureInput(spec.Inputs, WhileInputAction) @@ -135,28 +131,28 @@ func (fn *FunctionWhile) Invoke(spec *types.TaskInvocationSpec) (*types.TypedVal return nil, nil } + logrus.Infof("[while] count: %v (limit %v)", count, limit) if count >= limit { return nil, ErrLimitExceeded } // Create the while-specific inputs - prevTv := typedvalues.MustParse("{output('action')}") - prevTv.SetLabel("priority", "100") - countTv := typedvalues.MustParse(count + 1) - countTv.SetLabel("priority", "100") - logrus.Infof("count: %v (<= %v)", count, limit) + prevTv := typedvalues.MustWrap("{ $.Tasks.action.Output }"). + SetMetadata(typedvalues.MetadataPriority, "100") + countTv := typedvalues.MustWrap(count+1). + SetMetadata(typedvalues.MetadataPriority, "100") // If the action is a control flow construct add the while-specific inputs - if typedvalues.IsControlFlow(action.Type) { - flow, err := typedvalues.FormatControlFlow(action) + if controlflow.IsControlFlow(action) { + cf, err := controlflow.UnwrapControlFlow(action) if err != nil { return nil, fmt.Errorf("failed to format workflow action: %v", err) } if count > 0 { - flow.Input("_prev", *prevTv) + cf.Input("_prev", *prevTv) } - flow.Input("_count", *countTv) - action, err = typedvalues.ParseControlFlow(flow) + cf.Input("_count", *countTv) + action, err = typedvalues.Wrap(cf) if err != nil { return nil, fmt.Errorf("failed to format task action: %v", err) } @@ -165,22 +161,15 @@ func (fn *FunctionWhile) Invoke(spec *types.TaskInvocationSpec) (*types.TypedVal wf := &types.WorkflowSpec{ OutputTask: "condition", Tasks: map[string]*types.TaskSpec{ - "wait": { - FunctionRef: Sleep, - Inputs: map[string]*types.TypedValue{ - SleepInput: delayTv, - }, - }, "action": { FunctionRef: Noop, - Inputs: map[string]*types.TypedValue{ + Inputs: map[string]*typedvalues.TypedValue{ NoopInput: action, }, - Requires: types.Require("wait"), }, "condition": { FunctionRef: While, - Inputs: map[string]*types.TypedValue{ + Inputs: map[string]*typedvalues.TypedValue{ WhileInputExpr: exprSrcTv, WhileInputDelay: delayTv, WhileInputLimit: limitTv, @@ -193,7 +182,17 @@ func (fn *FunctionWhile) Invoke(spec *types.TaskInvocationSpec) (*types.TypedVal }, } - wfTv, err := typedvalues.Parse(wf) + if delay > 0 { + wf.Tasks["wait"] = &types.TaskSpec{ + FunctionRef: Sleep, + Inputs: map[string]*typedvalues.TypedValue{ + SleepInput: delayTv, + }, + } + wf.Tasks["action"].Require("wait") + } + + wfTv, err := typedvalues.Wrap(wf) if err != nil { return nil, fmt.Errorf("failed to create while workflow: %v", err) } diff --git a/pkg/fnenv/native/builtin/while_test.go b/pkg/fnenv/native/builtin/while_test.go index dc46fd9d..8ab0be2d 100644 --- a/pkg/fnenv/native/builtin/while_test.go +++ b/pkg/fnenv/native/builtin/while_test.go @@ -5,33 +5,34 @@ import ( "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" "github.com/stretchr/testify/assert" ) func TestFunctionWhile_Invoke(t *testing.T) { out, err := (&FunctionWhile{}).Invoke(&types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - WhileInputExpr: typedvalues.MustParse(true).SetLabel("src", "{}"), - WhileInputLimit: typedvalues.MustParse(10), - "_count": typedvalues.MustParse(4), - WhileInputDelay: typedvalues.MustParse("1h"), - WhileInputAction: typedvalues.MustParse(&types.TaskSpec{ + Inputs: map[string]*typedvalues.TypedValue{ + WhileInputExpr: typedvalues.MustWrap(true).SetMetadata("src", "{}"), + WhileInputLimit: typedvalues.MustWrap(10), + "_count": typedvalues.MustWrap(4), + WhileInputDelay: typedvalues.MustWrap("1h"), + WhileInputAction: typedvalues.MustWrap(&types.TaskSpec{ FunctionRef: Noop, }), }, }) assert.NoError(t, err) - assert.Equal(t, typedvalues.TypeWorkflow, out.Type) + assert.Equal(t, controlflow.TypeWorkflow, out.ValueType()) } func TestFunctionWhile_InvokeCompletedInitial(t *testing.T) { out, err := (&FunctionWhile{}).Invoke(&types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - WhileInputExpr: typedvalues.MustParse(false).SetLabel("src", "{}"), - WhileInputLimit: typedvalues.MustParse(10), - "_count": typedvalues.MustParse(4), - WhileInputDelay: typedvalues.MustParse("1h"), - WhileInputAction: typedvalues.MustParse(&types.TaskSpec{ + Inputs: map[string]*typedvalues.TypedValue{ + WhileInputExpr: typedvalues.MustWrap(false).SetMetadata("src", "{}"), + WhileInputLimit: typedvalues.MustWrap(10), + "_count": typedvalues.MustWrap(4), + WhileInputDelay: typedvalues.MustWrap("1h"), + WhileInputAction: typedvalues.MustWrap(&types.TaskSpec{ FunctionRef: Noop, }), }, @@ -41,14 +42,14 @@ func TestFunctionWhile_InvokeCompletedInitial(t *testing.T) { } func TestFunctionWhile_InvokeCompleted(t *testing.T) { - prev := typedvalues.MustParse("prev result") + prev := typedvalues.MustWrap("prev result") out, err := (&FunctionWhile{}).Invoke(&types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - WhileInputExpr: typedvalues.MustParse(false).SetLabel("src", "{}"), - WhileInputLimit: typedvalues.MustParse(10), - "_count": typedvalues.MustParse(4), - WhileInputDelay: typedvalues.MustParse("1h"), - WhileInputAction: typedvalues.MustParse(&types.TaskSpec{ + Inputs: map[string]*typedvalues.TypedValue{ + WhileInputExpr: typedvalues.MustWrap(false).SetMetadata("src", "{}"), + WhileInputLimit: typedvalues.MustWrap(10), + "_count": typedvalues.MustWrap(4), + WhileInputDelay: typedvalues.MustWrap("1h"), + WhileInputAction: typedvalues.MustWrap(&types.TaskSpec{ FunctionRef: Noop, }), "_prev": prev, @@ -60,12 +61,12 @@ func TestFunctionWhile_InvokeCompleted(t *testing.T) { func TestFunctionWhile_Invoke_LimitExceeded(t *testing.T) { out, err := (&FunctionWhile{}).Invoke(&types.TaskInvocationSpec{ - Inputs: map[string]*types.TypedValue{ - WhileInputExpr: typedvalues.MustParse(true).SetLabel("src", "{}"), - WhileInputLimit: typedvalues.MustParse(10), - "_count": typedvalues.MustParse(11), - WhileInputDelay: typedvalues.MustParse("1h"), - WhileInputAction: typedvalues.MustParse(&types.TaskSpec{ + Inputs: map[string]*typedvalues.TypedValue{ + WhileInputExpr: typedvalues.MustWrap(true).SetMetadata("src", "{}"), + WhileInputLimit: typedvalues.MustWrap(10), + "_count": typedvalues.MustWrap(11), + WhileInputDelay: typedvalues.MustWrap("1h"), + WhileInputAction: typedvalues.MustWrap(&types.TaskSpec{ FunctionRef: Noop, }), }, diff --git a/pkg/fnenv/native/native.go b/pkg/fnenv/native/native.go index 59cfef18..fae926a5 100644 --- a/pkg/fnenv/native/native.go +++ b/pkg/fnenv/native/native.go @@ -8,6 +8,7 @@ import ( "github.com/fission/fission-workflows/pkg/fnenv" "github.com/fission/fission-workflows/pkg/types" + "github.com/fission/fission-workflows/pkg/types/typedvalues" "github.com/fission/fission-workflows/pkg/types/validate" "github.com/golang/protobuf/ptypes" "github.com/opentracing/opentracing-go" @@ -21,7 +22,7 @@ const ( // An InternalFunction is a function that will be executed in the same process as the invoker. type InternalFunction interface { - Invoke(spec *types.TaskInvocationSpec) (*types.TypedValue, error) + Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) } // FunctionEnv for executing low overhead functions, such as control flow constructs, inside the workflow engine diff --git a/pkg/fnenv/resolver_test.go b/pkg/fnenv/resolver_test.go index 271f5dc2..cd6e24c0 100644 --- a/pkg/fnenv/resolver_test.go +++ b/pkg/fnenv/resolver_test.go @@ -78,11 +78,11 @@ func TestResolveInputs(t *testing.T) { tasks := map[string]*types.TaskSpec{ task1: { FunctionRef: task1Name, - Inputs: map[string]*types.TypedValue{ - "nested": typedvalues.ParseTask(&types.TaskSpec{ + Inputs: map[string]*typedvalues.TypedValue{ + "nested": typedvalues.MustWrap(&types.TaskSpec{ FunctionRef: nestedTaskName, - Inputs: map[string]*types.TypedValue{ - "nested2": typedvalues.ParseTask(&types.TaskSpec{ + Inputs: map[string]*typedvalues.TypedValue{ + "nested2": typedvalues.MustWrap(&types.TaskSpec{ FunctionRef: nestedNestedTaskName, }), }, diff --git a/pkg/fnenv/workflows/workflows.go b/pkg/fnenv/workflows/workflows.go index bd1a0278..e52f1e7e 100644 --- a/pkg/fnenv/workflows/workflows.go +++ b/pkg/fnenv/workflows/workflows.go @@ -178,7 +178,7 @@ func toWorkflowSpec(spec *types.TaskInvocationSpec) (*types.WorkflowInvocationSp // Prepare inputs wfSpec := spec.ToWorkflowSpec() if parentTv, ok := spec.Inputs[types.InputParent]; ok { - parentID, err := typedvalues.FormatString(parentTv) + parentID, err := typedvalues.UnwrapString(parentTv) if err != nil { return nil, fmt.Errorf("invalid parent id %v (%v)", parentTv, err) } diff --git a/pkg/fnenv/workflows/workflows_test.go b/pkg/fnenv/workflows/workflows_test.go index 232db2de..f3f1e7af 100644 --- a/pkg/fnenv/workflows/workflows_test.go +++ b/pkg/fnenv/workflows/workflows_test.go @@ -16,6 +16,7 @@ import ( "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" "github.com/fission/fission-workflows/pkg/types/validate" + "github.com/fission/fission-workflows/pkg/util" "github.com/stretchr/testify/assert" ) @@ -45,7 +46,7 @@ func TestRuntime_InvokeWorkflow_InvalidSpec(t *testing.T) { func TestRuntime_InvokeWorkflow_SubSuccess(t *testing.T) { runtime, invocationAPI, _, cache := setup() - output := typedvalues.MustParse("foo") + output := typedvalues.MustWrap("foo") go func() { // Simulate workflow invocation time.Sleep(50 * time.Millisecond) @@ -58,7 +59,7 @@ func TestRuntime_InvokeWorkflow_SubSuccess(t *testing.T) { }() wfi, err := runtime.InvokeWorkflow(types.NewWorkflowInvocationSpec("123")) assert.NoError(t, err) - assert.Equal(t, output, wfi.GetStatus().GetOutput()) + util.AssertProtoEqual(t, output, wfi.GetStatus().GetOutput()) assert.True(t, wfi.GetStatus().Finished()) assert.True(t, wfi.GetStatus().Successful()) } @@ -68,7 +69,7 @@ func TestRuntime_InvokeWorkflow_PollSuccess(t *testing.T) { pollCache := store.NewInvocationStore(testutil.NewCache()) // ensure that cache does not support pubsub runtime.store = pollCache - output := typedvalues.MustParse("foo") + output := typedvalues.MustWrap("foo") go func() { // Simulate workflow invocation time.Sleep(50 * time.Millisecond) @@ -85,7 +86,7 @@ func TestRuntime_InvokeWorkflow_PollSuccess(t *testing.T) { }() wfi, err := runtime.InvokeWorkflow(types.NewWorkflowInvocationSpec("123")) assert.NoError(t, err) - assert.Equal(t, output, wfi.GetStatus().GetOutput()) + util.AssertProtoEqual(t, output, wfi.GetStatus().GetOutput()) assert.True(t, wfi.GetStatus().Finished()) assert.True(t, wfi.GetStatus().Successful()) } @@ -134,8 +135,8 @@ func TestRuntime_Invoke(t *testing.T) { spec := types.NewTaskInvocationSpec("wi-123", "ti-123", types.NewFnRef("internal", "", "fooFn")) spec.Inputs = types.Inputs{} - spec.Inputs[types.InputParent] = typedvalues.MustParse("parentID") - output := typedvalues.MustParse("foo") + spec.Inputs[types.InputParent] = typedvalues.MustWrap("parentID") + output := typedvalues.MustWrap("foo") go func() { // Simulate workflow invocation time.Sleep(50 * time.Millisecond) @@ -149,7 +150,7 @@ func TestRuntime_Invoke(t *testing.T) { task, err := runtime.Invoke(spec) assert.NoError(t, err) - assert.Equal(t, output, task.GetOutput()) + util.AssertProtoEqual(t, output, task.GetOutput()) } func setup() (*Runtime, *api.Invocation, *mem.Backend, fes.CacheReaderWriter) { diff --git a/pkg/parse/protobuf/parser.go b/pkg/parse/protobuf/parser.go index 569c096b..099dbdc8 100644 --- a/pkg/parse/protobuf/parser.go +++ b/pkg/parse/protobuf/parser.go @@ -7,7 +7,7 @@ import ( "io/ioutil" "github.com/fission/fission-workflows/pkg/types" - "github.com/gogo/protobuf/jsonpb" + "github.com/golang/protobuf/jsonpb" "github.com/golang/protobuf/proto" ) diff --git a/pkg/parse/protobuf/parser_test.go b/pkg/parse/protobuf/parser_test.go index eff4de9b..12b037f4 100644 --- a/pkg/parse/protobuf/parser_test.go +++ b/pkg/parse/protobuf/parser_test.go @@ -7,6 +7,7 @@ import ( "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/util" "github.com/golang/protobuf/jsonpb" "github.com/golang/protobuf/proto" "github.com/stretchr/testify/assert" @@ -19,8 +20,8 @@ func TestParseProto(t *testing.T) { Tasks: map[string]*types.TaskSpec{ "fakeFinalTask": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{$.Tasks.FirstTask.Output}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{$.Tasks.FirstTask.Output}"), }, Requires: map[string]*types.TaskDependencyParameters{ "FirstTask": {}, @@ -28,8 +29,8 @@ func TestParseProto(t *testing.T) { }, "FirstTask": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{$.Invocation.Inputs.default.toUpperCase()}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{$.Invocation.Inputs.default.toUpperCase()}"), }, }, }, @@ -38,7 +39,7 @@ func TestParseProto(t *testing.T) { assert.NoError(t, err) parsedWfSpec, err := Parse(bytes.NewReader(msg)) assert.NoError(t, err) - assert.Equal(t, originalWfSpec, parsedWfSpec) + util.AssertProtoEqual(t, originalWfSpec, parsedWfSpec) } func TestParseJson(t *testing.T) { @@ -48,8 +49,8 @@ func TestParseJson(t *testing.T) { Tasks: map[string]*types.TaskSpec{ "fakeFinalTask": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{$.Tasks.FirstTask.Output}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{$.Tasks.FirstTask.Output}"), }, Requires: map[string]*types.TaskDependencyParameters{ "FirstTask": {}, @@ -57,8 +58,8 @@ func TestParseJson(t *testing.T) { }, "FirstTask": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{$.Invocation.Inputs.default.toUpperCase()}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{$.Invocation.Inputs.default.toUpperCase()}"), }, }, }, diff --git a/pkg/parse/yaml/parser.go b/pkg/parse/yaml/parser.go index 3a3801dd..0709e249 100644 --- a/pkg/parse/yaml/parser.go +++ b/pkg/parse/yaml/parser.go @@ -101,14 +101,14 @@ func parseTask(t *taskSpec) (*types.TaskSpec, error) { } // parseInputs parses the inputs of a task. This is typically a map[interface{}]interface{}. -func parseInputs(i interface{}) (map[string]*types.TypedValue, error) { +func parseInputs(i interface{}) (map[string]*typedvalues.TypedValue, error) { if i == nil { - return map[string]*types.TypedValue{}, nil + return map[string]*typedvalues.TypedValue{}, nil } switch v := i.(type) { case map[string]interface{}: - result := map[string]*types.TypedValue{} + result := map[string]*typedvalues.TypedValue{} for inputKey, inputVal := range v { typedVal, err := parseInput(inputVal) if err != nil { @@ -118,7 +118,7 @@ func parseInputs(i interface{}) (map[string]*types.TypedValue, error) { } return result, nil case map[interface{}]interface{}: - result := map[string]*types.TypedValue{} + result := map[string]*typedvalues.TypedValue{} for inputKey, inputVal := range v { k := fmt.Sprintf("%v", inputKey) typedVal, err := parseInput(inputVal) @@ -133,12 +133,12 @@ func parseInputs(i interface{}) (map[string]*types.TypedValue, error) { if err != nil { return nil, err } - return map[string]*types.TypedValue{ + return map[string]*typedvalues.TypedValue{ types.InputMain: p, }, nil } -func parseInput(i interface{}) (*types.TypedValue, error) { +func parseInput(i interface{}) (*typedvalues.TypedValue, error) { // Handle special cases switch t := i.(type) { case []interface{}: @@ -184,7 +184,7 @@ func parseInput(i interface{}) (*types.TypedValue, error) { i = res } } else { - p, err := typedvalues.Parse(res) + p, err := typedvalues.Wrap(res) if err != nil { return nil, err } @@ -204,7 +204,7 @@ func parseInput(i interface{}) (*types.TypedValue, error) { i = w } - p, err := typedvalues.Parse(i) + p, err := typedvalues.Wrap(i) if err != nil { return nil, err } diff --git a/pkg/parse/yaml/parser_test.go b/pkg/parse/yaml/parser_test.go index e21e7155..51d71faf 100644 --- a/pkg/parse/yaml/parser_test.go +++ b/pkg/parse/yaml/parser_test.go @@ -7,6 +7,7 @@ import ( "fmt" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" "github.com/stretchr/testify/assert" ) @@ -81,8 +82,8 @@ tasks: assert.Equal(t, int(wf.Tasks[id].Await), len(task.Requires)) acmeDefaultInput := wf.Tasks["acme"].Inputs["default"] - assert.Equal(t, typedvalues.TypeMap, acmeDefaultInput.Type) - i, err := typedvalues.Format(acmeDefaultInput) + assert.Equal(t, typedvalues.TypeMap, acmeDefaultInput.ValueType()) + i, err := typedvalues.Unwrap(acmeDefaultInput) assert.NoError(t, err) assert.Equal(t, i, map[string]interface{}{ "a": "b", @@ -117,11 +118,11 @@ tasks: assert.True(t, ok) wfInput, ok := barTask.Inputs["default"] assert.True(t, ok) - innerWf, err := typedvalues.FormatWorkflow(wfInput) + innerWf, err := controlflow.UnwrapWorkflow(wfInput) assert.NoError(t, err) assert.Equal(t, "$.tasks.inner.dynamic", innerWf.OutputTask) assert.Equal(t, "v42", innerWf.ApiVersion) - assert.Equal(t, "foobar", typedvalues.MustFormat(innerWf.Tasks["inner"].Inputs["default"])) + assert.Equal(t, "foobar", typedvalues.MustUnwrap(innerWf.Tasks["inner"].Inputs["default"])) assert.Equal(t, "dynamic", innerWf.Tasks["inner"].FunctionRef) } diff --git a/pkg/scheduler/scheduler.pb.go b/pkg/scheduler/scheduler.pb.go index b8c2cfa2..73cc116d 100644 --- a/pkg/scheduler/scheduler.pb.go +++ b/pkg/scheduler/scheduler.pb.go @@ -20,7 +20,8 @@ package scheduler import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" -import fission_workflows_types "github.com/fission/fission-workflows/pkg/types" +import fission_workflows_types1 "github.com/fission/fission-workflows/pkg/types" +import fission_workflows_types "github.com/fission/fission-workflows/pkg/types/typedvalues" import google_protobuf "github.com/golang/protobuf/ptypes/timestamp" import google_protobuf1 "github.com/golang/protobuf/ptypes/any" @@ -105,8 +106,8 @@ func (m *Schedule) GetActions() []*Action { } type ScheduleRequest struct { - Workflow *fission_workflows_types.Workflow `protobuf:"bytes,1,opt,name=workflow" json:"workflow,omitempty"` - Invocation *fission_workflows_types.WorkflowInvocation `protobuf:"bytes,2,opt,name=invocation" json:"invocation,omitempty"` + Workflow *fission_workflows_types1.Workflow `protobuf:"bytes,1,opt,name=workflow" json:"workflow,omitempty"` + Invocation *fission_workflows_types1.WorkflowInvocation `protobuf:"bytes,2,opt,name=invocation" json:"invocation,omitempty"` } func (m *ScheduleRequest) Reset() { *m = ScheduleRequest{} } @@ -114,14 +115,14 @@ func (m *ScheduleRequest) String() string { return proto.CompactTextS func (*ScheduleRequest) ProtoMessage() {} func (*ScheduleRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } -func (m *ScheduleRequest) GetWorkflow() *fission_workflows_types.Workflow { +func (m *ScheduleRequest) GetWorkflow() *fission_workflows_types1.Workflow { if m != nil { return m.Workflow } return nil } -func (m *ScheduleRequest) GetInvocation() *fission_workflows_types.WorkflowInvocation { +func (m *ScheduleRequest) GetInvocation() *fission_workflows_types1.WorkflowInvocation { if m != nil { return m.Invocation } @@ -304,39 +305,40 @@ var _Scheduler_serviceDesc = grpc.ServiceDesc{ func init() { proto.RegisterFile("pkg/scheduler/scheduler.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ - // 537 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0x5d, 0x6f, 0xd3, 0x30, - 0x14, 0x5d, 0xd2, 0xad, 0x6b, 0x6f, 0xd0, 0x56, 0x59, 0x08, 0x95, 0x22, 0x44, 0xc9, 0x34, 0x51, - 0xf1, 0xe1, 0x4a, 0xe5, 0x65, 0x2d, 0xe2, 0x21, 0x93, 0x86, 0x54, 0x55, 0x6a, 0x85, 0x17, 0x0d, - 0xc1, 0xc3, 0x90, 0x9b, 0xb8, 0x5d, 0xd4, 0x36, 0x0e, 0xb1, 0xb3, 0x91, 0x1f, 0xc3, 0x33, 0xbf, - 0x88, 0xff, 0x83, 0x9c, 0x38, 0x6d, 0x29, 0xd0, 0xf5, 0x25, 0xf1, 0xc7, 0x39, 0xc7, 0xe7, 0x9e, - 0x7b, 0xe1, 0x69, 0x34, 0x9b, 0xb6, 0x85, 0x77, 0xc3, 0xfc, 0x64, 0xce, 0xe2, 0xd5, 0x0a, 0x47, - 0x31, 0x97, 0x1c, 0x3d, 0x99, 0x04, 0x42, 0x04, 0x3c, 0xc4, 0x77, 0x3c, 0x9e, 0x4d, 0xe6, 0xfc, - 0x4e, 0xe0, 0x25, 0xa4, 0xd1, 0x9b, 0x06, 0xf2, 0x26, 0x19, 0x63, 0x8f, 0x2f, 0xda, 0x1a, 0x57, - 0xfc, 0xdf, 0x2c, 0xf1, 0x6d, 0xf5, 0x80, 0x4c, 0x23, 0x26, 0xf2, 0x6f, 0x2e, 0xdc, 0x78, 0x36, - 0xe5, 0x7c, 0x3a, 0x67, 0xed, 0x6c, 0x37, 0x4e, 0x26, 0x6d, 0x19, 0x2c, 0x98, 0x90, 0x74, 0x11, - 0x69, 0xc0, 0xe3, 0x4d, 0x00, 0x0d, 0xd3, 0xfc, 0xca, 0xfe, 0x69, 0x40, 0xe5, 0x52, 0xbb, 0x40, - 0x36, 0x3c, 0x08, 0xc2, 0x5b, 0xee, 0x51, 0x19, 0xf0, 0xb0, 0xef, 0xd7, 0x8d, 0xa6, 0xd1, 0xaa, - 0x92, 0x3f, 0xce, 0xd0, 0x19, 0x54, 0xbd, 0x98, 0x51, 0xc9, 0x7c, 0x47, 0xd6, 0xcd, 0xa6, 0xd1, - 0xb2, 0x3a, 0x0d, 0x9c, 0xeb, 0xe3, 0x42, 0x1f, 0xbb, 0x85, 0x01, 0xb2, 0x02, 0xa3, 0xf7, 0x70, - 0x48, 0x3d, 0xa5, 0x22, 0xea, 0xa5, 0x66, 0xa9, 0x65, 0x75, 0x4e, 0xf0, 0x96, 0x44, 0xb0, 0x93, - 0x61, 0x49, 0xc1, 0xb1, 0x7f, 0x18, 0x70, 0x5c, 0x38, 0x25, 0xec, 0x5b, 0xc2, 0x84, 0x92, 0xac, - 0x14, 0xd4, 0xcc, 0xac, 0xd5, 0x79, 0xfe, 0x0f, 0xcd, 0x3c, 0xab, 0x4f, 0x7a, 0x4f, 0x96, 0x14, - 0x34, 0x00, 0x58, 0xd5, 0xa6, 0x8b, 0x79, 0x75, 0xaf, 0x40, 0x7f, 0x49, 0x21, 0x6b, 0x74, 0x3b, - 0x81, 0x72, 0x6e, 0x19, 0xbd, 0x83, 0x7d, 0xc5, 0xc8, 0x1c, 0x1d, 0x75, 0x5e, 0xec, 0x50, 0xa5, - 0x9b, 0x46, 0x8c, 0x64, 0x24, 0x84, 0xe1, 0x30, 0xa2, 0xe9, 0x9c, 0x53, 0xbf, 0xbe, 0x9f, 0x19, - 0x7a, 0xf8, 0x57, 0xba, 0x4e, 0x98, 0x92, 0x02, 0x64, 0x9f, 0x82, 0xe5, 0x8c, 0x79, 0x2c, 0xf5, - 0xdb, 0x8f, 0xa0, 0x1c, 0x33, 0x2a, 0x78, 0xa8, 0x9b, 0xa7, 0x77, 0xf6, 0x2f, 0x03, 0x6a, 0xca, - 0xf8, 0x8c, 0xb9, 0x54, 0xcc, 0x34, 0xf8, 0x08, 0xcc, 0xa0, 0xe8, 0xb2, 0x19, 0xf8, 0xe8, 0x23, - 0x94, 0x83, 0x30, 0x4a, 0xa4, 0xa8, 0x9b, 0x59, 0x83, 0xba, 0x5b, 0xad, 0x6f, 0xca, 0xe1, 0x7e, - 0xc6, 0xbd, 0x08, 0x65, 0x9c, 0x12, 0x2d, 0xd4, 0xb8, 0x06, 0x6b, 0xed, 0x18, 0xd5, 0xa0, 0x34, - 0x63, 0xa9, 0x7e, 0x52, 0x2d, 0x51, 0x17, 0x0e, 0x6e, 0xe9, 0x3c, 0x61, 0x3a, 0xfe, 0x93, 0xff, - 0xc6, 0xaf, 0x32, 0xf2, 0xaf, 0x14, 0x94, 0xe4, 0x8c, 0x9e, 0x79, 0x66, 0xd8, 0xd7, 0x50, 0x1b, - 0x72, 0x19, 0x4c, 0xd2, 0x2d, 0x65, 0xf5, 0x00, 0xd8, 0xf7, 0x88, 0x79, 0xbb, 0xce, 0xec, 0x1a, - 0xfa, 0x65, 0x17, 0x60, 0xd5, 0x22, 0x74, 0x0c, 0x56, 0x7f, 0x78, 0x35, 0x1a, 0x5c, 0x7c, 0x75, - 0x9d, 0xcb, 0x41, 0x6d, 0x0f, 0x55, 0xe1, 0xc0, 0x39, 0x1f, 0x11, 0xb7, 0x66, 0xa8, 0xbb, 0xe1, - 0xc8, 0xed, 0x7f, 0xf8, 0x9c, 0xdf, 0x99, 0x9d, 0x10, 0xaa, 0xc5, 0xbc, 0xc6, 0x88, 0x42, 0x85, - 0x29, 0xd7, 0x54, 0x32, 0xf4, 0x7a, 0x6b, 0xac, 0x1b, 0x33, 0xde, 0x38, 0xdd, 0x09, 0x6d, 0xef, - 0x9d, 0x5b, 0x5f, 0xaa, 0xcb, 0xf3, 0x71, 0x39, 0xab, 0xeb, 0xed, 0xef, 0x00, 0x00, 0x00, 0xff, - 0xff, 0x66, 0xe2, 0x84, 0x09, 0x94, 0x04, 0x00, 0x00, + // 547 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x52, 0x5d, 0x6f, 0xd3, 0x30, + 0x14, 0x5d, 0xb2, 0xad, 0x6b, 0x6f, 0xd0, 0x56, 0x59, 0x08, 0x95, 0x22, 0x44, 0xc9, 0x34, 0x51, + 0xf1, 0xe1, 0x4a, 0xe5, 0x65, 0x2d, 0xe2, 0x21, 0x93, 0x86, 0x54, 0x15, 0xb5, 0xc2, 0x8b, 0x86, + 0xe0, 0x61, 0xc8, 0x4d, 0xdc, 0x2e, 0x6a, 0x1b, 0x87, 0xd8, 0xd9, 0xc8, 0x8f, 0xe1, 0x99, 0x5f, + 0xc4, 0xff, 0x41, 0x71, 0x9c, 0xb6, 0x2b, 0x50, 0x2a, 0x5e, 0x12, 0x7f, 0x9c, 0x73, 0x7c, 0xee, + 0xb9, 0x17, 0x1e, 0x47, 0xd3, 0x49, 0x4b, 0x78, 0xd7, 0xcc, 0x4f, 0x66, 0x2c, 0x5e, 0xae, 0x70, + 0x14, 0x73, 0xc9, 0xd1, 0xa3, 0x71, 0x20, 0x44, 0xc0, 0x43, 0x7c, 0xcb, 0xe3, 0xe9, 0x78, 0xc6, + 0x6f, 0x05, 0x5e, 0x40, 0xea, 0xdd, 0x49, 0x20, 0xaf, 0x93, 0x11, 0xf6, 0xf8, 0xbc, 0xa5, 0x71, + 0xc5, 0xff, 0xd5, 0x02, 0xdf, 0xca, 0x1e, 0x90, 0x69, 0xc4, 0x44, 0xfe, 0xcd, 0x85, 0xeb, 0xef, + 0xff, 0x83, 0xeb, 0xdf, 0xd0, 0x59, 0x72, 0x77, 0xad, 0xd5, 0x9e, 0x4c, 0x38, 0x9f, 0xcc, 0x58, + 0x4b, 0xed, 0x46, 0xc9, 0xb8, 0x25, 0x83, 0x39, 0x13, 0x92, 0xce, 0x23, 0x0d, 0x78, 0xb8, 0x0e, + 0xa0, 0x61, 0x9a, 0x5f, 0xd9, 0x3f, 0x0c, 0x28, 0x5f, 0xe8, 0x9a, 0x90, 0x0d, 0xf7, 0x82, 0xf0, + 0x86, 0x7b, 0x54, 0x06, 0x3c, 0xec, 0xf9, 0x35, 0xa3, 0x61, 0x34, 0x2b, 0xe4, 0xce, 0x19, 0x3a, + 0x85, 0x8a, 0x17, 0x33, 0x2a, 0x99, 0xef, 0xc8, 0x9a, 0xd9, 0x30, 0x9a, 0x56, 0xbb, 0x8e, 0x73, + 0x7d, 0x5c, 0xe8, 0x63, 0xb7, 0x30, 0x40, 0x96, 0x60, 0xf4, 0x16, 0x0e, 0xa8, 0x97, 0xa9, 0x88, + 0xda, 0x6e, 0x63, 0xb7, 0x69, 0xb5, 0x8f, 0xf1, 0x86, 0x7c, 0xb1, 0xa3, 0xb0, 0xa4, 0xe0, 0xd8, + 0xdf, 0x0d, 0x38, 0x2a, 0x9c, 0x12, 0xf6, 0x35, 0x61, 0x22, 0x93, 0x2c, 0x17, 0x54, 0x65, 0xd6, + 0x6a, 0x3f, 0xfd, 0x83, 0x66, 0x9e, 0xfc, 0x47, 0xbd, 0x27, 0x0b, 0x0a, 0xea, 0x03, 0x2c, 0x6b, + 0xd3, 0xc5, 0xbc, 0xf8, 0xa7, 0x40, 0x6f, 0x41, 0x21, 0x2b, 0x74, 0x3b, 0x81, 0x52, 0x6e, 0x19, + 0xbd, 0x81, 0xbd, 0x8c, 0xa1, 0x1c, 0x1d, 0xb6, 0x9f, 0x6d, 0x51, 0xa5, 0x9b, 0x46, 0x8c, 0x28, + 0x12, 0xc2, 0x70, 0x10, 0xd1, 0x74, 0xc6, 0xa9, 0x5f, 0xdb, 0x53, 0x86, 0xee, 0xff, 0x96, 0xae, + 0x13, 0xa6, 0xa4, 0x00, 0xd9, 0x27, 0x60, 0x39, 0x23, 0x1e, 0x4b, 0xfd, 0xf6, 0x03, 0x28, 0xc5, + 0x8c, 0x0a, 0x1e, 0xea, 0xe6, 0xe9, 0x9d, 0xfd, 0xd3, 0x80, 0x6a, 0x66, 0x7c, 0xca, 0x5c, 0x2a, + 0xa6, 0x1a, 0x7c, 0x08, 0x66, 0x50, 0x74, 0xd9, 0x0c, 0x7c, 0xf4, 0x01, 0x4a, 0x41, 0x18, 0x25, + 0x52, 0xd4, 0x4c, 0xd5, 0xa0, 0xce, 0x46, 0xeb, 0xeb, 0x72, 0xb8, 0xa7, 0xb8, 0xe7, 0xa1, 0x8c, + 0x53, 0xa2, 0x85, 0xea, 0x57, 0x60, 0xad, 0x1c, 0xa3, 0x2a, 0xec, 0x4e, 0x59, 0xaa, 0x9f, 0xcc, + 0x96, 0xa8, 0x03, 0xfb, 0x6a, 0x98, 0x75, 0xfc, 0xc7, 0x7f, 0x8d, 0x3f, 0xcb, 0xc8, 0xbf, 0xcc, + 0xa0, 0x24, 0x67, 0x74, 0xcd, 0x53, 0xc3, 0xbe, 0x82, 0xea, 0x80, 0xcb, 0x60, 0x9c, 0x6e, 0x28, + 0xab, 0x0b, 0xc0, 0xbe, 0x45, 0xcc, 0xdb, 0x76, 0x66, 0x57, 0xd0, 0xcf, 0x3b, 0x00, 0xcb, 0x16, + 0xa1, 0x23, 0xb0, 0x7a, 0x83, 0xcb, 0x61, 0xff, 0xfc, 0x8b, 0xeb, 0x5c, 0xf4, 0xab, 0x3b, 0xa8, + 0x02, 0xfb, 0xce, 0xd9, 0x90, 0xb8, 0x55, 0x23, 0xbb, 0x1b, 0x0c, 0xdd, 0xde, 0xbb, 0x4f, 0xf9, + 0x9d, 0xd9, 0x0e, 0xa1, 0x52, 0xcc, 0x6b, 0x8c, 0x28, 0x94, 0x59, 0xe6, 0x9a, 0x4a, 0x86, 0x5e, + 0x6e, 0x8c, 0x75, 0x6d, 0xc6, 0xeb, 0x27, 0x5b, 0xa1, 0xed, 0x9d, 0x33, 0xeb, 0x73, 0x65, 0x71, + 0x3e, 0x2a, 0xa9, 0xba, 0x5e, 0xff, 0x0a, 0x00, 0x00, 0xff, 0xff, 0xb4, 0xa5, 0xdf, 0x7d, 0xe2, + 0x04, 0x00, 0x00, } diff --git a/pkg/scheduler/scheduler.proto b/pkg/scheduler/scheduler.proto index 13d8ac18..434ffa70 100644 --- a/pkg/scheduler/scheduler.proto +++ b/pkg/scheduler/scheduler.proto @@ -4,6 +4,7 @@ package fission.workflows.scheduler; option go_package = "scheduler"; import "github.com/fission/fission-workflows/pkg/types/types.proto"; +import "github.com/fission/fission-workflows/pkg/types/typedvalues/typedvalues.proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/any.proto"; diff --git a/pkg/types/extensions.go b/pkg/types/extensions.go index 648dbfae..e54e54a5 100644 --- a/pkg/types/extensions.go +++ b/pkg/types/extensions.go @@ -1,9 +1,7 @@ package types import ( - "fmt" - "strings" - + "github.com/fission/fission-workflows/pkg/types/typedvalues" "github.com/golang/protobuf/proto" "github.com/golang/protobuf/ptypes" ) @@ -35,46 +33,6 @@ var taskFinalStates = []TaskInvocationStatus_Status{ TaskInvocationStatus_SUCCEEDED, } -// -// TypedValue -// - -// Prints a short description of the Value -func (tv TypedValue) Short() string { - var val string - if len(tv.Value) > typedValueShortMaxLen { - val = fmt.Sprintf("%s[..%d..]", tv.Value[:typedValueShortMaxLen], len(tv.Value)-typedValueShortMaxLen) - } else { - val = fmt.Sprintf("%s", tv.Value) - } - - return fmt.Sprintf("", tv.Type, strings.Replace(val, "\n", "", -1)) -} - -func (tv *TypedValue) SetLabel(k string, v string) *TypedValue { - if tv == nil { - return tv - } - if tv.Labels == nil { - tv.Labels = map[string]string{} - } - tv.Labels[k] = v - - return tv -} - -func (tv *TypedValue) GetLabel(k string) (string, bool) { - if tv == nil { - return "", false - } - - if tv.Labels == nil { - tv.Labels = map[string]string{} - } - v, ok := tv.Labels[k] - return v, ok -} - // // Error // @@ -188,9 +146,9 @@ func (m *Task) ID() string { // TaskSpec // -func (m *TaskSpec) Input(key string, val *TypedValue) *TaskSpec { +func (m *TaskSpec) Input(key string, val *typedvalues.TypedValue) *TaskSpec { if len(m.Inputs) == 0 { - m.Inputs = map[string]*TypedValue{} + m.Inputs = map[string]*typedvalues.TypedValue{} } m.Inputs[key] = val @@ -268,7 +226,7 @@ func (m *Workflow) Task(id string) (*Task, bool) { // Note: this only retrieves the statically top-level defined tasks func (m *Workflow) Tasks() []*Task { var tasks []*Task - for id := range m.Spec.Tasks { + for id := range m.GetSpec().GetTasks() { task, _ := m.Task(id) tasks = append(tasks, task) } diff --git a/pkg/types/helpers.go b/pkg/types/helpers.go index 217213af..85fbfd77 100644 --- a/pkg/types/helpers.go +++ b/pkg/types/helpers.go @@ -1,6 +1,7 @@ package types import ( + "github.com/fission/fission-workflows/pkg/types/typedvalues" "github.com/golang/protobuf/ptypes" ) @@ -8,11 +9,13 @@ import ( // the invocation. func GetTasks(wf *Workflow, wfi *WorkflowInvocation) map[string]*Task { tasks := map[string]*Task{} - for _, task := range wf.Tasks() { - tasks[task.ID()] = task + if wf != nil { + for _, task := range wf.Tasks() { + tasks[task.ID()] = task + } } if wfi != nil { - for id := range wfi.Status.DynamicTasks { + for id := range wfi.GetStatus().GetDynamicTasks() { task, _ := GetTask(wf, wfi, id) tasks[task.ID()] = task } @@ -167,13 +170,13 @@ func NewTaskInvocation(id string) *TaskInvocation { } } -func SingleInput(key string, t *TypedValue) map[string]*TypedValue { - return map[string]*TypedValue{ +func SingleInput(key string, t *typedvalues.TypedValue) map[string]*typedvalues.TypedValue { + return map[string]*typedvalues.TypedValue{ key: t, } } -func SingleDefaultInput(t *TypedValue) map[string]*TypedValue { +func SingleDefaultInput(t *typedvalues.TypedValue) map[string]*typedvalues.TypedValue { return SingleInput(InputMain, t) } @@ -212,11 +215,11 @@ type TaskInstance struct { } type NamedTypedValue struct { - TypedValue + typedvalues.TypedValue name string } -type Inputs map[string]*TypedValue +type Inputs map[string]*typedvalues.TypedValue func NewTaskInvocationSpec(invocationId string, taskId string, fnRef FnRef) *TaskInvocationSpec { return &TaskInvocationSpec{ @@ -225,3 +228,9 @@ func NewTaskInvocationSpec(invocationId string, taskId string, fnRef FnRef) *Tas InvocationId: invocationId, } } + +func Input(val interface{}) map[string]*typedvalues.TypedValue { + return map[string]*typedvalues.TypedValue{ + InputMain: typedvalues.MustWrap(val), + } +} diff --git a/pkg/types/typedvalues/collections.go b/pkg/types/typedvalues/collections.go deleted file mode 100644 index 9ea87b44..00000000 --- a/pkg/types/typedvalues/collections.go +++ /dev/null @@ -1,201 +0,0 @@ -package typedvalues - -import ( - "fmt" - - "github.com/fission/fission-workflows/pkg/types" - "github.com/golang/protobuf/proto" -) - -const ( - TypeMap = "map" - TypeList = "list" -) - -// TODO add iterator utility functions - -func IsCollection(v ValueType) bool { - return v == TypeMap || v == TypeList -} - -type MapParserFormatter struct{} - -func (fp *MapParserFormatter) Accepts() []ValueType { - return []ValueType{ - TypeMap, - } -} - -func (fp *MapParserFormatter) Parse(ctx Parser, i interface{}) (*types.TypedValue, error) { - var tvmp map[string]*types.TypedValue - switch t := i.(type) { - case map[string]interface{}: - mp, err := ParseToTypedValueMap(ctx, t) - if err != nil { - return nil, err - } - tvmp = mp - case map[string]*types.TypedValue: - tvmp = t - default: - return nil, TypedValueErr{ - src: i, - err: ErrUnsupportedType, - } - } - return ParseTypedValueMap(tvmp) -} - -func (fp *MapParserFormatter) Format(ctx Formatter, v *types.TypedValue) (interface{}, error) { - mp, err := FormatToTypedValueMap(v) - if err != nil { - return nil, err - } - - return FormatTypedValueMap(ctx, mp) -} - -func ParseToTypedValueMap(ctx Parser, mp map[string]interface{}) (map[string]*types.TypedValue, error) { - tvmp := map[string]*types.TypedValue{} - for k, v := range mp { - tvv, err := ctx.Parse(ctx, v) - if err != nil { - return nil, fmt.Errorf("failed to parse %v: %v", k, err) - } - tvmp[k] = tvv - } - return tvmp, nil -} - -func ParseTypedValueMap(mp map[string]*types.TypedValue) (*types.TypedValue, error) { - pbv := &types.TypedValueMap{Value: mp} - v, err := proto.Marshal(pbv) - if err != nil { - return nil, err - } - return &types.TypedValue{ - Type: TypeMap, - Value: v, - }, nil -} - -func FormatToTypedValueMap(v *types.TypedValue) (map[string]*types.TypedValue, error) { - if v.Type != TypeMap { - return nil, TypedValueErr{ - src: v, - err: ErrUnsupportedType, - } - } - - mp := &types.TypedValueMap{} - err := proto.Unmarshal(v.Value, mp) - if err != nil { - return nil, err - } - - return mp.Value, nil -} - -func FormatTypedValueMap(ctx Formatter, mp map[string]*types.TypedValue) (map[string]interface{}, error) { - result := map[string]interface{}{} - for k, v := range mp { - tvv, err := ctx.Format(ctx, v) - if err != nil { - return nil, fmt.Errorf("failed to format %v: %v", k, err) - } - result[k] = tvv - } - return result, nil -} - -type ListParserFormatter struct{} - -func (pf *ListParserFormatter) Accepts() []ValueType { - return []ValueType{ - TypeList, - } -} - -func (pf *ListParserFormatter) Parse(ctx Parser, i interface{}) (*types.TypedValue, error) { - li, ok := i.([]interface{}) - if !ok { - return nil, TypedValueErr{ - src: i, - err: ErrUnsupportedType, - } - } - - return ParseList(ctx, li) -} - -func (pf *ListParserFormatter) Format(ctx Formatter, v *types.TypedValue) (interface{}, error) { - return FormatList(ctx, v) -} - -func ParseList(ctx Parser, i []interface{}) (*types.TypedValue, error) { - tvli, err := ParseListInterface(ctx, i) - if err != nil { - return nil, err - } - return ParseListTypedValue(tvli), nil -} - -func ParseListInterface(ctx Parser, li []interface{}) ([]*types.TypedValue, error) { - var result []*types.TypedValue - for _, v := range li { - tv, err := ctx.Parse(ctx, v) - if err != nil { - return nil, err - } - - result = append(result, tv) - } - return result, nil -} - -func ParseListTypedValue(l []*types.TypedValue) *types.TypedValue { - tvl := &types.TypedValueList{Value: l} - bs, err := proto.Marshal(tvl) - if err != nil { - panic(err) - } - - return &types.TypedValue{ - Type: TypeList, - Value: bs, - } -} - -func FormatList(ctx Formatter, v *types.TypedValue) ([]interface{}, error) { - ltv, err := FormatToTypedValueList(v) - if err != nil { - return nil, err - } - return FormatTypedValueList(ctx, ltv) -} - -func FormatToTypedValueList(v *types.TypedValue) ([]*types.TypedValue, error) { - err := verifyTypedValue(v, TypeList) - if err != nil { - return nil, err - } - - tvl := &types.TypedValueList{} - err = proto.Unmarshal(v.Value, tvl) - if err != nil { - return nil, err - } - return tvl.Value, nil -} - -func FormatTypedValueList(ctx Formatter, vs []*types.TypedValue) ([]interface{}, error) { - result := []interface{}{} - for _, v := range vs { - i, err := ctx.Format(ctx, v) - if err != nil { - return nil, err - } - result = append(result, i) - } - return result, nil -} diff --git a/pkg/types/typedvalues/controlflow.go b/pkg/types/typedvalues/controlflow.go deleted file mode 100644 index b793560b..00000000 --- a/pkg/types/typedvalues/controlflow.go +++ /dev/null @@ -1,242 +0,0 @@ -package typedvalues - -import ( - "github.com/fission/fission-workflows/pkg/types" - "github.com/golang/protobuf/proto" - "github.com/pkg/errors" -) - -const ( - TypeTask ValueType = "task" - TypeWorkflow ValueType = "workflow" -) - -type ControlFlowParserFormatter struct { -} - -func (pf *ControlFlowParserFormatter) Accepts() []ValueType { - return []ValueType{ - TypeTask, - TypeWorkflow, - } -} - -func (pf *ControlFlowParserFormatter) Parse(ctx Parser, i interface{}) (*types.TypedValue, error) { - switch cf := i.(type) { - case *types.TaskSpec: - return ParseTask(cf), nil - case *types.WorkflowSpec: - return ParseWorkflow(cf), nil - default: - return nil, TypedValueErr{ - src: i, - err: ErrUnsupportedType, - } - } -} - -func (pf *ControlFlowParserFormatter) Format(ctx Formatter, v *types.TypedValue) (interface{}, error) { - switch ValueType(v.Type) { - case TypeTask: - return FormatTask(v) - case TypeWorkflow: - return FormatWorkflow(v) - default: - return nil, TypedValueErr{ - src: v, - err: ErrUnsupportedType, - } - } -} - -func ParseTask(task *types.TaskSpec) *types.TypedValue { - data, err := proto.Marshal(task) - if err != nil { - panic(err) - } - return &types.TypedValue{ - Type: string(TypeTask), - Value: data, - } -} - -func ParseWorkflow(wf *types.WorkflowSpec) *types.TypedValue { - data, err := proto.Marshal(wf) - if err != nil { - panic(err) - } - return &types.TypedValue{ - Type: string(TypeWorkflow), - Value: data, - } -} - -func FormatTask(v *types.TypedValue) (*types.TaskSpec, error) { - t := &types.TaskSpec{} - err := proto.Unmarshal(v.Value, t) - if err != nil { - return nil, TypedValueErr{ - src: v, - err: errors.Wrap(err, "failed to format task"), - } - } - return t, nil -} - -func FormatWorkflow(v *types.TypedValue) (*types.WorkflowSpec, error) { - t := &types.WorkflowSpec{} - err := proto.Unmarshal(v.Value, t) - if err != nil { - return nil, TypedValueErr{ - src: v, - err: errors.Wrap(err, "failed to format workflow"), - } - } - return t, nil -} - -func IsControlFlow(v ValueType) bool { - return v == TypeTask || v == TypeWorkflow -} - -func FormatControlFlow(v *types.TypedValue) (*Flow, error) { - switch ValueType(v.Type) { - case TypeTask: - t, err := FormatTask(v) - if err != nil { - return nil, err - } - return FlowTask(t), nil - case TypeWorkflow: - wf, err := FormatWorkflow(v) - if err != nil { - return nil, err - } - return FlowWorkflow(wf), nil - default: - return nil, ErrUnsupportedType - } -} - -func ParseControlFlow(i interface{}) (*types.TypedValue, error) { - switch t := i.(type) { - case *types.TaskSpec: - return ParseTask(t), nil - case *types.WorkflowSpec: - return ParseWorkflow(t), nil - case *Flow: - return ParseControlFlow(t.Proto()) - default: - return nil, ErrUnsupportedType - } -} - -type FlowType string - -const ( - Workflow FlowType = "workflow" - Task FlowType = "task" - None FlowType = "" -) - -// Flow is a generic data type to provide a common API to working with dynamic tasks and workflows -// If a flow contains both a task and a workflow, behavior is non-standard, -// but should in principle default to the task. -type Flow struct { - task *types.TaskSpec - wf *types.WorkflowSpec -} - -func (f *Flow) Type() FlowType { - if f.task != nil { - return Task - } - if f.wf != nil { - return Workflow - } - return None -} - -func (f *Flow) Input(key string, i types.TypedValue) { - if f == nil { - return - } - if f.task != nil { - f.task.Input(key, &i) - } - if f.wf != nil { - // TODO support parameters in workflow spec - } -} - -func (f *Flow) Proto() proto.Message { - if f == nil { - return nil - } - if f.task != nil { - return f.task - } - return f.wf -} - -func (f *Flow) Clone() *Flow { - if f == nil { - return nil - } - if f.task != nil { - return FlowTask(proto.Clone(f.task).(*types.TaskSpec)) - } - if f.wf != nil { - return FlowWorkflow(proto.Clone(f.wf).(*types.WorkflowSpec)) - } - return nil -} - -func (f *Flow) Task() *types.TaskSpec { - if f == nil { - return nil - } - return f.task -} - -func (f *Flow) Workflow() *types.WorkflowSpec { - if f == nil { - return nil - } - return f.wf -} - -func (f *Flow) ApplyTask(fn func(t *types.TaskSpec)) { - if f != nil && f.task != nil { - fn(f.task) - } -} - -func (f *Flow) ApplyWorkflow(fn func(t *types.WorkflowSpec)) { - if f != nil && f.wf != nil { - fn(f.wf) - } -} - -func (f *Flow) IsEmpty() bool { - return f.wf == nil && f.task == nil -} - -func FlowTask(task *types.TaskSpec) *Flow { - return &Flow{task: task} -} - -func FlowWorkflow(wf *types.WorkflowSpec) *Flow { - return &Flow{wf: wf} -} - -func FlowInterface(i interface{}) (*Flow, error) { - switch t := i.(type) { - case *types.WorkflowSpec: - return FlowWorkflow(t), nil - case *types.TaskSpec: - return FlowTask(t), nil - default: - return nil, ErrUnsupportedType - } -} diff --git a/pkg/types/typedvalues/controlflow/flow.go b/pkg/types/typedvalues/controlflow/flow.go new file mode 100644 index 00000000..95f2fcb0 --- /dev/null +++ b/pkg/types/typedvalues/controlflow/flow.go @@ -0,0 +1,171 @@ +// Package controlflow adds support for workflows and tasks (together "flows") to TypedValues. +// +// With the workflow engine supporting dynamic tasks (tasks outputting other tasks or workflows) this package offers a +// useful abstraction of this mechanism in the form of a Flow. A flow is either a Workflow or Task, +// and is (like a task or workflow) wrappable into a TypedValue. +package controlflow + +import ( + "github.com/fission/fission-workflows/pkg/types" + "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/golang/protobuf/proto" + "github.com/pkg/errors" +) + +var ( + ErrEmptyFlow = errors.New("flow is empty") + ErrNotAFlow = errors.New("value is not a flow") +) + +func IsControlFlow(tv *typedvalues.TypedValue) bool { + if tv != nil { + vt := tv.ValueType() + for _, valueType := range Types { + if valueType == vt { + return true + } + } + } + return false +} + +func UnwrapControlFlow(tv *typedvalues.TypedValue) (*Flow, error) { + i, err := typedvalues.Unwrap(tv) + if err != nil { + return nil, err + } + return FlowInterface(i) +} + +func UnwrapTask(tv *typedvalues.TypedValue) (*types.TaskSpec, error) { + flow, err := UnwrapControlFlow(tv) + if err != nil { + return nil, err + } + + task := flow.GetTask() + if task == nil { + return nil, errors.Wrapf(typedvalues.ErrIllegalTypeAssertion, "failed to unwrap %s to task", tv.ValueType()) + } + return task, nil +} + +func UnwrapWorkflow(tv *typedvalues.TypedValue) (*types.WorkflowSpec, error) { + flow, err := UnwrapControlFlow(tv) + if err != nil { + return nil, err + } + + wf := flow.GetWorkflow() + if wf == nil { + return nil, errors.Wrapf(typedvalues.ErrIllegalTypeAssertion, "failed to unwrap %s to workflow", tv.ValueType()) + } + return wf, nil +} + +func (m *Flow) Type() FlowType { + if m == nil { + return FlowTypeNone + } + if m.Task != nil { + return FlowTypeTask + } + if m.Workflow != nil { + return FlowTypeWorkflow + } + return FlowTypeNone +} + +func (m *Flow) Input(key string, i typedvalues.TypedValue) { + if m == nil { + return + } + if m.Task != nil { + m.Task.Input(key, &i) + } + if m.Workflow != nil { + // TODO support parameters in workflow spec + } +} + +func (m *Flow) Proto() proto.Message { + if m == nil { + return nil + } + if m.Task != nil { + return m.Task + } + return m.Workflow +} + +func (m *Flow) Clone() *Flow { + if m == nil { + return nil + } + if m.Task != nil { + return FlowTask(proto.Clone(m.Task).(*types.TaskSpec)) + } + if m.Workflow != nil { + return FlowWorkflow(proto.Clone(m.Workflow).(*types.WorkflowSpec)) + } + return nil +} + +func (m *Flow) ApplyTask(fn func(t *types.TaskSpec)) { + if m != nil && m.Task != nil { + fn(m.Task) + } +} + +func (m *Flow) ApplyWorkflow(fn func(t *types.WorkflowSpec)) { + if m != nil && m.Workflow != nil { + fn(m.Workflow) + } +} + +func (m *Flow) IsEmpty() bool { + return m.Workflow == nil && m.Task == nil +} + +func FlowTask(task *types.TaskSpec) *Flow { + return &Flow{Task: task} +} + +func FlowWorkflow(workflow *types.WorkflowSpec) *Flow { + return &Flow{Workflow: workflow} +} + +func FlowInterface(i interface{}) (*Flow, error) { + if i == nil { + return nil, ErrEmptyFlow + } + switch t := i.(type) { + case *types.WorkflowSpec: + return FlowWorkflow(t), nil + case *types.TaskSpec: + return FlowTask(t), nil + case *Flow: + return t, nil + default: + return nil, ErrNotAFlow + } +} + +// TODO move to more appropriate package +func ResolveTaskOutput(taskID string, invocation *types.WorkflowInvocation) *typedvalues.TypedValue { + val, ok := invocation.Status.Tasks[taskID] + if !ok { + return nil + } + + output := val.Status.Output + if IsControlFlow(output) { + for outputTaskID, outputTask := range invocation.Status.DynamicTasks { + if dep, ok := outputTask.Spec.Requires[taskID]; ok && + dep.Type == types.TaskDependencyParameters_DYNAMIC_OUTPUT { + return ResolveTaskOutput(outputTaskID, invocation) + } + } + } + return output +} diff --git a/pkg/types/typedvalues/controlflow/flow.pb.go b/pkg/types/typedvalues/controlflow/flow.pb.go new file mode 100644 index 00000000..62597ea7 --- /dev/null +++ b/pkg/types/typedvalues/controlflow/flow.pb.go @@ -0,0 +1,78 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: pkg/types/typedvalues/controlflow/flow.proto + +/* +Package controlflow is a generated protocol buffer package. + +It is generated from these files: + pkg/types/typedvalues/controlflow/flow.proto + +It has these top-level messages: + Flow +*/ +package controlflow + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import fission_workflows_types1 "github.com/fission/fission-workflows/pkg/types" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Flow is a generic data type to provide a common API to working with dynamic tasks and workflows +// If a flow contains both a task and a workflow, behavior is non-standard, +// but should in principle default to the task. +type Flow struct { + Workflow *fission_workflows_types1.WorkflowSpec `protobuf:"bytes,1,opt,name=workflow" json:"workflow,omitempty"` + Task *fission_workflows_types1.TaskSpec `protobuf:"bytes,2,opt,name=task" json:"task,omitempty"` +} + +func (m *Flow) Reset() { *m = Flow{} } +func (m *Flow) String() string { return proto.CompactTextString(m) } +func (*Flow) ProtoMessage() {} +func (*Flow) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *Flow) GetWorkflow() *fission_workflows_types1.WorkflowSpec { + if m != nil { + return m.Workflow + } + return nil +} + +func (m *Flow) GetTask() *fission_workflows_types1.TaskSpec { + if m != nil { + return m.Task + } + return nil +} + +func init() { + proto.RegisterType((*Flow)(nil), "fission.workflows.types.Flow") +} + +func init() { proto.RegisterFile("pkg/types/typedvalues/controlflow/flow.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 182 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x29, 0xc8, 0x4e, 0xd7, + 0x2f, 0xa9, 0x2c, 0x48, 0x2d, 0x06, 0x93, 0x29, 0x65, 0x89, 0x39, 0xa5, 0xa9, 0xc5, 0xfa, 0xc9, + 0xf9, 0x79, 0x25, 0x45, 0xf9, 0x39, 0x69, 0x39, 0xf9, 0xe5, 0xfa, 0x20, 0x42, 0xaf, 0xa0, 0x28, + 0xbf, 0x24, 0x5f, 0x48, 0x3c, 0x2d, 0xb3, 0xb8, 0x38, 0x33, 0x3f, 0x4f, 0xaf, 0x3c, 0xbf, 0x28, + 0x1b, 0x24, 0x5e, 0xac, 0x07, 0xd6, 0x2b, 0x65, 0x95, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, + 0x9c, 0x9f, 0xab, 0x0f, 0x55, 0x03, 0xa3, 0x75, 0xe1, 0x6a, 0xf5, 0x51, 0xed, 0x2a, 0x86, 0x18, + 0xaa, 0xd4, 0xc0, 0xc8, 0xc5, 0xe2, 0x96, 0x93, 0x5f, 0x2e, 0xe4, 0xc8, 0xc5, 0x01, 0x53, 0x2b, + 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x6d, 0xa4, 0xaa, 0x87, 0xc3, 0x42, 0xbd, 0x70, 0x28, 0x3f, 0xb8, + 0x20, 0x35, 0x39, 0x08, 0xae, 0x4d, 0xc8, 0x94, 0x8b, 0xa5, 0x24, 0xb1, 0x38, 0x5b, 0x82, 0x09, + 0xac, 0x5d, 0x11, 0xa7, 0xf6, 0x90, 0xc4, 0xe2, 0x6c, 0xb0, 0x56, 0xb0, 0x72, 0x27, 0xde, 0x28, + 0x6e, 0x24, 0x1f, 0x27, 0xb1, 0x81, 0x1d, 0x66, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0xdb, 0x85, + 0x15, 0xbf, 0x1d, 0x01, 0x00, 0x00, +} diff --git a/pkg/types/typedvalues/controlflow/flow.proto b/pkg/types/typedvalues/controlflow/flow.proto new file mode 100644 index 00000000..b9dda3e5 --- /dev/null +++ b/pkg/types/typedvalues/controlflow/flow.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package fission.workflows.types; +option go_package = "controlflow"; + +import "github.com/fission/fission-workflows/pkg/types/types.proto"; + +// Flow is a generic data type to provide a common API to working with dynamic tasks and workflows +// If a flow contains both a task and a workflow, behavior is non-standard, +// but should in principle default to the task. +message Flow { + fission.workflows.types.WorkflowSpec workflow = 1; + fission.workflows.types.TaskSpec task = 2; +} \ No newline at end of file diff --git a/pkg/types/typedvalues/controlflow/flow_test.go b/pkg/types/typedvalues/controlflow/flow_test.go new file mode 100644 index 00000000..4d0d404c --- /dev/null +++ b/pkg/types/typedvalues/controlflow/flow_test.go @@ -0,0 +1,149 @@ +package controlflow + +import ( + "fmt" + "testing" + "time" + + "github.com/fission/fission-workflows/pkg/types" + "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/util" + "github.com/golang/protobuf/proto" + "github.com/stretchr/testify/assert" +) + +func TestIsControlFlow(t *testing.T) { + assert.True(t, IsControlFlow(typedvalues.MustWrap(&types.WorkflowSpec{}))) + assert.True(t, IsControlFlow(typedvalues.MustWrap(&types.TaskSpec{}))) + assert.False(t, IsControlFlow(typedvalues.MustWrap(nil))) +} + +type testCase struct { + name string + input proto.Message + expectedType string +} + +// parseFormatTestCases provides a suit of test cases. +// +// It is a function instead of variable because of the package initialization sequence. +func parseFormatTestCases() []testCase { + return []testCase{ + { + input: &types.TaskSpec{ + FunctionRef: "someFn", + Inputs: map[string]*typedvalues.TypedValue{ + "foo": typedvalues.MustWrap("bar"), + }, + Requires: map[string]*types.TaskDependencyParameters{ + "prev": nil, + }, + }, + expectedType: TypeTask, + }, + { + input: &types.WorkflowSpec{ + ApiVersion: types.WorkflowAPIVersion, + OutputTask: "fakeFinalTask", + Tasks: map[string]*types.TaskSpec{ + "fakeFinalTask": { + FunctionRef: "noop", + }, + }, + }, + expectedType: TypeWorkflow, + }, + { + input: FlowTask(&types.TaskSpec{ + FunctionRef: "someFn", + Inputs: map[string]*typedvalues.TypedValue{ + "foo": typedvalues.MustWrap("bar"), + }, + Requires: map[string]*types.TaskDependencyParameters{ + "prev": nil, + }, + }), + expectedType: TypeFlow, + }, + { + input: FlowWorkflow(&types.WorkflowSpec{ + ApiVersion: types.WorkflowAPIVersion, + OutputTask: "fakeFinalTask", + Tasks: map[string]*types.TaskSpec{ + "fakeFinalTask": { + FunctionRef: "noop", + }, + }, + }), + expectedType: TypeFlow, + }, + { + input: FlowWorkflow(&types.WorkflowSpec{ + ApiVersion: types.WorkflowAPIVersion, + OutputTask: "mainTask", + Tasks: map[string]*types.TaskSpec{ + "mainTask": { // layer 1 + FunctionRef: "noop", + Inputs: typedvalues.MustWrapMapTypedValue(map[string]interface{}{ + types.InputMain: &types.TaskSpec{ // layer 2 + FunctionRef: "noop", + Inputs: typedvalues.MustWrapMapTypedValue(map[string]interface{}{ + types.InputMain: &types.TaskSpec{ // layer 3 + FunctionRef: "noop", + Inputs: typedvalues.MustWrapMapTypedValue(map[string]interface{}{ + types.InputMain: "{ param('default') }", + }), + }, + }), + }, + }), + }, + }, + }), + expectedType: TypeFlow, + }, + } +} + +func TestValueTester(t *testing.T) { + var i int + for _, testCase := range parseFormatTestCases() { + testName := testCase.name + if len(testName) == 0 { + testName = fmt.Sprintf("%d_%v", i, testCase.expectedType) + } + t.Run(testName, func(t *testing.T) { + fmt.Printf("Input: %+v\n", testCase) + tv, err := typedvalues.Wrap(testCase.input) + fmt.Printf("Typed value: %+v\n", tv) + assert.NoError(t, err) + assert.True(t, IsControlFlow(tv)) + assert.Equal(t, testCase.expectedType, tv.ValueType()) + i, err := typedvalues.Unwrap(tv) + assert.NoError(t, err) + util.AssertProtoEqual(t, testCase.input, i.(proto.Message)) + fmt.Printf("Output: %+v\n", i) + }) + i++ + } + time.Sleep(100 * time.Millisecond) +} + +func BenchmarkParse(b *testing.B) { + for _, testCase := range parseFormatTestCases() { + b.Run(testCase.expectedType+"_parse", func(b *testing.B) { + for n := 0; n < b.N; n++ { + typedvalues.Wrap(testCase.input) + } + }) + } + for _, testCase := range parseFormatTestCases() { + tv, _ := typedvalues.Wrap(testCase.input) + + b.Run(testCase.expectedType+"_format", func(b *testing.B) { + for n := 0; n < b.N; n++ { + typedvalues.Unwrap(tv) + } + }) + } +} diff --git a/pkg/types/typedvalues/controlflow/valuetypes.go b/pkg/types/typedvalues/controlflow/valuetypes.go new file mode 100644 index 00000000..912184a5 --- /dev/null +++ b/pkg/types/typedvalues/controlflow/valuetypes.go @@ -0,0 +1,34 @@ +package controlflow + +import ( + "github.com/fission/fission-workflows/pkg/types" + "github.com/golang/protobuf/proto" +) + +type FlowType string + +var ( + TypeTask string + TypeWorkflow string + TypeFlow string + FlowTypeTask FlowType + FlowTypeWorkflow FlowType + FlowTypeNone FlowType + Types []string +) + +// Note: ensure that this file is lexically after the generated Protobuf messages because of package initialization +// order. +func init() { + TypeTask = proto.MessageName(&types.TaskSpec{}) + TypeWorkflow = proto.MessageName(&types.WorkflowSpec{}) + TypeFlow = proto.MessageName(&Flow{}) + FlowTypeWorkflow = FlowType(TypeWorkflow) + FlowTypeTask = FlowType(TypeTask) + FlowTypeNone = FlowType("") + Types = []string{ + TypeTask, + TypeWorkflow, + TypeFlow, + } +} diff --git a/pkg/types/typedvalues/controlflow/valuetypes_test.go b/pkg/types/typedvalues/controlflow/valuetypes_test.go new file mode 100644 index 00000000..bfaf4692 --- /dev/null +++ b/pkg/types/typedvalues/controlflow/valuetypes_test.go @@ -0,0 +1,17 @@ +package controlflow + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTypesGenerated(t *testing.T) { + for i, v := range Types { + t.Run(fmt.Sprintf("Types[%d]", i), func(t *testing.T) { + fmt.Printf("Types[%d] = %s\n", i, v) + assert.NotEmpty(t, v) + }) + } +} diff --git a/pkg/types/typedvalues/expression.go b/pkg/types/typedvalues/expression.go deleted file mode 100644 index 834f3db2..00000000 --- a/pkg/types/typedvalues/expression.go +++ /dev/null @@ -1,71 +0,0 @@ -package typedvalues - -import ( - "regexp" - - "github.com/fission/fission-workflows/pkg/types" -) - -const ( - TypeExpression ValueType = "expression" -) - -var ( - ExpressionRe = regexp.MustCompile("^\\{(.*)\\}$") -) - -type ExpressionParserFormatter struct{} - -func (pf *ExpressionParserFormatter) Accepts() []ValueType { - return []ValueType{ - TypeExpression, - } -} - -func (pf *ExpressionParserFormatter) Parse(ctx Parser, i interface{}) (*types.TypedValue, error) { - s, ok := i.(string) - if !ok { - return nil, TypedValueErr{ - src: i, - err: ErrUnsupportedType, - } - } - - return ParseExpression(s) -} - -func (pf *ExpressionParserFormatter) Format(ctx Formatter, v *types.TypedValue) (interface{}, error) { - return FormatExpression(v) -} - -func ParseExpression(s string) (*types.TypedValue, error) { - if !IsExpression(s) { - return nil, TypedValueErr{ - src: s, - err: ErrUnsupportedType, - } - } - - return &types.TypedValue{ - Type: string(TypeExpression), - Value: []byte(s), - }, nil -} - -func FormatExpression(v *types.TypedValue) (string, error) { - if ValueType(v.Type) != TypeExpression { - return "", TypedValueErr{ - src: v, - err: ErrUnsupportedType, - } - } - return string(v.Value), nil -} - -func RemoveExpressionDelimiters(expr string) string { - return ExpressionRe.ReplaceAllString(expr, "$1") -} - -func IsExpression(s string) bool { - return ExpressionRe.MatchString(s) -} diff --git a/pkg/types/typedvalues/httpconv/httpconv.go b/pkg/types/typedvalues/httpconv/httpconv.go index 74a3f2f1..2f590357 100644 --- a/pkg/types/typedvalues/httpconv/httpconv.go +++ b/pkg/types/typedvalues/httpconv/httpconv.go @@ -1,221 +1,226 @@ -// package httpconv provides methods for mapping typedvalues to HTTP requests and HTTP responses to typedvalues. +// package httpconv provides methods for mapping TypedValues to and from HTTP requests and responses. +// +// Any interaction with HTTP requests or responses should be handled by the high-level implementations in this +// package, such as ParseResponse, FormatResponse, ParseRequest, or FormatRequest. +// +// Although you get most reliable results by properly formatting your requests (especially the Content-Type header) +// this package aims to be lenient by trying to infer content-types, and so on. package httpconv import ( "bytes" - "encoding/json" "fmt" "io" "io/ioutil" "net/http" "net/url" "reflect" - "strings" "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" - "github.com/golang/protobuf/proto" + "github.com/fission/fission-workflows/pkg/util/mediatype" + "github.com/pkg/errors" "github.com/sirupsen/logrus" ) const ( - inputContentType = "content-type" - headerContentType = "Content-Type" - contentTypeJSON = "application/json" - contentTypeBytes = "application/octet-stream" - contentTypeText = "text/plain" - contentTypeTask = "application/vnd.fission.workflows.workflow" // Default format: protobuf, +json for json - contentTypeWorkflow = "application/vnd.fission.workflows.task" // Default format: protobuf, +json for json - contentTypeProtobuf = "application/protobuf" // Default format: protobuf, +json for json - contentTypeDefault = contentTypeText - DefaultMethod = http.MethodPost + inputContentType = "content-type" + headerContentType = "Content-Type" ) -// ParseRequest maps a HTTP request to a target map of typedvalues. -func ParseRequest(r *http.Request) (map[string]*types.TypedValue, error) { - target := map[string]*types.TypedValue{} - // Content-Type is a common problem, so log this for every request - contentType := r.Header.Get(headerContentType) - - // Map body to "main" input - bodyInput, err := ParseBody(r.Body, contentType) - defer r.Body.Close() - if err != nil { - return nil, fmt.Errorf("failed to parse request: %v", err) - } +var DefaultHTTPMapper = &HTTPMapper{ + DefaultHTTPMethod: http.MethodPost, + ValueTypeResolver: func(tv *typedvalues.TypedValue) *mediatype.MediaType { + // Check metadata of the value + if tv == nil { + return MediaTypeBytes + } - target[types.InputBody] = &bodyInput + if ct, ok := tv.GetMetadataValue(headerContentType); ok { + mt, err := mediatype.Parse(ct) + if err == nil { + return mt + } + } - // Deprecated: body is mapped to 'default' - target[types.InputMain] = &bodyInput + // Handle special cases + switch tv.ValueType() { + case typedvalues.TypeBytes: + return MediaTypeBytes + case typedvalues.TypeNil: + return MediaTypeBytes + case typedvalues.TypeString: + return MediaTypeText + } - // Map query to "query.x" - query := ParseQuery(r) - target[types.InputQuery] = &query + // For all other structured values, use JSON + return MediaTypeJSON + }, + MediaTypeResolver: func(mt *mediatype.MediaType) ParserFormatter { + if mt == nil { + return bytesMapper + } - // Map headers to "headers.x" - headers := ParseHeaders(r) - target[types.InputHeaders] = &headers + // Choose the mapper based on some hard-coded heuristics + switch mt.Identifier() { + case MediaTypeJSON.String(), "text/json": + return jsonMapper + case MediaTypeText.String(): + return textMapper + case MediaTypeBytes.String(): + return bytesMapper + case MediaTypeProtobuf.String(), "application/vnd.google.protobuf", "application/x.google.protobuf", + "application/x.protobuf": + return protobufMapper + } - // Map http method to "method" - method := ParseMethod(r) - target[types.InputMethod] = &method + // text/* -> TextMapper + if mt.Type == "text" { + return textMapper + } - return target, nil + // application/* -> BytesMapper + if mt.Type == "application" { + return bytesMapper + } + + // All other: use default + return bytesMapper + }, } -func ParseResponse(r *http.Response) (types.TypedValue, error) { - body := r.Body - defer r.Body.Close() - contentType := r.Header.Get(headerContentType) - output, err := ParseBody(body, contentType) - if err != nil { - return types.TypedValue{}, err - } - return output, nil +func ParseRequest(req *http.Request) (map[string]*typedvalues.TypedValue, error) { + return DefaultHTTPMapper.ParseRequest(req) } -// ParseRequest maps the body of the HTTP request to a corresponding typedvalue. -func ParseBody(data io.Reader, contentType string) (types.TypedValue, error) { - if len(contentType) == 0 { - contentType = contentTypeDefault - } +func ParseResponse(resp *http.Response) (*typedvalues.TypedValue, error) { + return DefaultHTTPMapper.ParseResponse(resp) +} + +func FormatRequest(source map[string]*typedvalues.TypedValue, target *http.Request) error { + return DefaultHTTPMapper.FormatRequest(source, target) +} - tv := types.TypedValue{} - tv.SetLabel(headerContentType, contentType) +func FormatResponse(w http.ResponseWriter, output *typedvalues.TypedValue, outputErr *types.Error) { + DefaultHTTPMapper.FormatResponse(w, output, outputErr) +} - bs, err := ioutil.ReadAll(data) - if err != nil { - return tv, err - } +type HTTPMapper struct { + DefaultHTTPMethod string + ValueTypeResolver func(tv *typedvalues.TypedValue) *mediatype.MediaType + DefaultMediaType *mediatype.MediaType + MediaTypeResolver func(mediaType *mediatype.MediaType) ParserFormatter +} - // Attempt to parse body according to provided ContentType - switch normalizeContentType(contentType) { - case contentTypeJSON: - var i interface{} - err := json.Unmarshal(bs, &i) - if err != nil { - logrus.Warnf("Failed to parse JSON data (len: %v, data: '%.50s' cause: %v), skipping further parsing.", - len(bs), string(bs), err) - tv = *typedvalues.ParseBytes(bs) - } else { - tv = *typedvalues.MustParse(i) +func (h *HTTPMapper) ParseResponse(resp *http.Response) (*typedvalues.TypedValue, error) { + contentType := h.getRequestContentType(resp.Header) + defer resp.Body.Close() + return DefaultHTTPMapper.parseBody(resp.Body, contentType) +} + +// ParseRequest maps a HTTP request to a target map of typedvalues. +func (h *HTTPMapper) ParseRequest(req *http.Request) (map[string]*typedvalues.TypedValue, error) { + // Determine content-type + contentType := h.getRequestContentType(req.Header) + defer req.Body.Close() + + var body *typedvalues.TypedValue + var err error + // TODO support multipart + switch contentType.Identifier() { + // Special case: application/x-www-form-urlencoded is the only content-type (?) which also stores data in the url + case "application/x-www-form-urlencoded": + req.ParseForm() + mp := map[string]interface{}{} + // simplify the map, because we don't support multi-value maps yet + for k, vs := range req.Form { + if len(vs) > 0 { + mp[k] = vs[0] + } } - case contentTypeText: - tv = *typedvalues.ParseString(string(bs)) - case contentTypeProtobuf: - fallthrough - case contentTypeTask: - fallthrough - case contentTypeWorkflow: - // TODO support json-encoded workflow/task - var m proto.Message - err := proto.Unmarshal(bs, m) + body, err = typedvalues.Wrap(mp) if err != nil { - return tv, err + return nil, errors.Errorf("failed to parse form request: %v", err) } - t, err := typedvalues.Parse(m) + + // Default case parse body using the Parser interface + default: + body, err = h.parseBody(req.Body, contentType) if err != nil { - return tv, err + return nil, errors.Errorf("failed to parse request: %v", err) } - tv = *t - default: - // In other cases do not attempt to interpret the data - fallthrough - case contentTypeBytes: - tv = *typedvalues.ParseBytes(bs) } - return tv, nil -} + return map[string]*typedvalues.TypedValue{ + // Map body to "body" input + types.InputBody: body, -// ParseMethod maps the method param from a request to a TypedValue -func ParseMethod(r *http.Request) types.TypedValue { - return *typedvalues.ParseString(r.Method) -} + // Deprecated: Map body to "main/default" input + types.InputMain: body, -// ParseHeaders maps the headers from a request to the "headers" key in the target map -func ParseHeaders(r *http.Request) types.TypedValue { - // For now we do not support multi-valued headers - headers := flattenMultimap(r.Header) + // Map query to "query.x" + types.InputQuery: h.parseQuery(req), - tv := typedvalues.MustParse(headers) - return *tv -} + // Map headers to "headers.x" + types.InputHeaders: h.parseHeaders(req), -// ParseQuery maps the query params from a request to the "query" key in the target map -func ParseQuery(r *http.Request) types.TypedValue { - // For now we do not support multi-valued query params - query := flattenMultimap(r.URL.Query()) - - tv := typedvalues.MustParse(query) - return *tv + // Map http method to "method" + types.InputMethod: h.parseMethod(req), + }, nil } -// -// formatting logic -// - // FormatResponse maps an TypedValue to an HTTP response -func FormatResponse(w http.ResponseWriter, output *types.TypedValue, outputErr *types.Error) { +func (h *HTTPMapper) FormatResponse(w http.ResponseWriter, output *typedvalues.TypedValue, outputErr *types.Error) { if w == nil { panic("cannot format response to nil") } if outputErr != nil { - // TODO provide different http codes based on error http.Error(w, outputErr.Error(), http.StatusInternalServerError) return } if output == nil { w.WriteHeader(http.StatusNoContent) - output = typedvalues.ParseNil() + output = typedvalues.MustWrap(nil) return } w.WriteHeader(http.StatusOK) - contentType := DetermineContentType(output) - w.Header().Set(headerContentType, contentType) - bs, err := FormatBody(*output, contentType) + contentType := h.ValueTypeResolver(output) + err := h.formatBody(w, output, contentType) if err != nil { - FormatResponse(w, nil, &types.Error{ + h.FormatResponse(w, nil, &types.Error{ Message: fmt.Sprintf("Failed to format response body: %v", err), }) } - w.Write(bs) return } // FormatRequest maps a map of typed values to an HTTP request -func FormatRequest(source map[string]*types.TypedValue, target *http.Request) error { +func (h *HTTPMapper) FormatRequest(source map[string]*typedvalues.TypedValue, target *http.Request) error { if target == nil { panic("cannot format request to nil") } // Map content-type to the request's content-type - contentType := DetermineContentTypeInputs(source) + contentType := h.determineContentTypeFromInputs(source) // Map 'body' input to the body of the request - mainInput, ok := source[types.InputBody] - if !ok { - mainInput, ok = source[types.InputMain] - } - if ok && mainInput != nil { - bs, err := FormatBody(*mainInput, contentType) + mainInput := getFirstDefined(source, types.InputBody, types.InputMain) + if mainInput != nil { + err := h.formatBody(&requestWriter{req: target}, mainInput, contentType) if err != nil { return err } - target.Body = ioutil.NopCloser(bytes.NewReader(bs)) - target.ContentLength = int64(len(bs)) } // Map method input to HTTP method - method := FormatMethod(source, DefaultMethod) + method := h.formatMethod(source) target.Method = method // Map query input to URL query - query := FormatQuery(source) + query := h.formatQuery(source) if query != nil { if target.URL == nil { panic("request has no URL") @@ -224,7 +229,7 @@ func FormatRequest(source map[string]*types.TypedValue, target *http.Request) er } // Map headers input to HTTP headers - headers := FormatHeaders(source) + headers := h.formatHeaders(source) if target.Header == nil { target.Header = headers } else { @@ -234,32 +239,67 @@ func FormatRequest(source map[string]*types.TypedValue, target *http.Request) er } } } - target.Header.Set(headerContentType, contentType) - return nil } -func FormatMethod(inputs map[string]*types.TypedValue, defaultMethod string) string { +func (h *HTTPMapper) Clone() *HTTPMapper { + return &HTTPMapper{ + DefaultMediaType: h.DefaultMediaType.Copy(), + DefaultHTTPMethod: h.DefaultHTTPMethod, + ValueTypeResolver: h.ValueTypeResolver, + MediaTypeResolver: h.MediaTypeResolver, + } +} + +// parseBody maps the body of the HTTP request to a corresponding typedvalue. +func (h *HTTPMapper) parseBody(data io.Reader, contentType *mediatype.MediaType) (*typedvalues.TypedValue, error) { + if contentType == nil { + contentType = h.DefaultMediaType + } + + return h.MediaTypeResolver(contentType).Parse(contentType, data) +} + +// parseMethod maps the method param from a request to a TypedValue +func (h *HTTPMapper) parseMethod(r *http.Request) *typedvalues.TypedValue { + return typedvalues.MustWrap(r.Method) +} + +// parseHeaders maps the headers from a request to the "headers" key in the target map +func (h *HTTPMapper) parseHeaders(r *http.Request) *typedvalues.TypedValue { + // For now we do not support multi-valued headers + headers := flattenMultimap(r.Header) + return typedvalues.MustWrap(headers) +} + +// parseQuery maps the query params from a request to the "query" key in the target map +func (h *HTTPMapper) parseQuery(r *http.Request) *typedvalues.TypedValue { + // For now we do not support multi-valued query params + query := flattenMultimap(r.URL.Query()) + return typedvalues.MustWrap(query) +} + +func (h *HTTPMapper) formatMethod(inputs map[string]*typedvalues.TypedValue) string { tv, ok := inputs[types.InputMethod] if ok && tv != nil { - contentType, err := typedvalues.FormatString(tv) + contentType, err := typedvalues.UnwrapString(tv) if err == nil { return contentType } logrus.Errorf("Invalid method in inputs: %+v", tv) } - return defaultMethod + return h.DefaultHTTPMethod } // FUTURE: support multivalued query params -func FormatQuery(inputs map[string]*types.TypedValue) url.Values { +func (h *HTTPMapper) formatQuery(inputs map[string]*typedvalues.TypedValue) url.Values { queryInput := inputs[types.InputQuery] if queryInput == nil { return nil } target := url.Values{} - i, err := typedvalues.Format(queryInput) + i, err := typedvalues.Unwrap(queryInput) if err != nil { logrus.Errorf("Failed to format headers: %v", err) } @@ -276,108 +316,60 @@ func FormatQuery(inputs map[string]*types.TypedValue) url.Values { return target } -func FormatBody(value types.TypedValue, contentType string) ([]byte, error) { - if len(contentType) == 0 { - contentType = contentTypeDefault +func (h *HTTPMapper) formatBody(w http.ResponseWriter, body *typedvalues.TypedValue, contentType *mediatype.MediaType) error { + if contentType == nil { + contentType = h.ValueTypeResolver(body) } - i, err := typedvalues.Format(&value) + return h.MediaTypeResolver(contentType).Format(w, body) +} + +func (h *HTTPMapper) findAndParseContentType(inputs map[string]*typedvalues.TypedValue) (*mediatype.MediaType, error) { + // Check the input[content-type] + s, err := typedvalues.UnwrapString(inputs[inputContentType]) + if err == nil { + return mediatype.Parse(s) + } + + // Check the input[headers][content-type + headers, err := typedvalues.UnwrapMap(inputs[types.InputHeaders]) if err != nil { return nil, err } - // Attempt to parse body according to provided ContentType - var bs []byte - switch normalizeContentType(contentType) { - case contentTypeJSON: - bs, err = json.Marshal(i) - if err != nil { - return nil, err - } - case contentTypeText: - switch t := i.(type) { - case string: - bs = []byte(t) - case []byte: - bs = t - default: - bs = []byte(fmt.Sprintf("%v", t)) - } - case contentTypeProtobuf: - fallthrough - case contentTypeTask: - fallthrough - case contentTypeWorkflow: - // TODO support json - m, ok := i.(proto.Message) - if !ok { - return nil, fmt.Errorf("illegal content type '%T', should be protobuf", i) - } - bs, err = proto.Marshal(m) - if err != nil { - return nil, err - } - default: - fallthrough - case contentTypeBytes: - var ok bool - bs, ok = i.([]byte) - if !ok { - return nil, fmt.Errorf("illegal content type '%T', should be []byte", i) - } + ctHeader, ok := headers[headerContentType].(string) + if !ok { + return nil, errors.New("cannot find or parse content-type") } - return bs, nil -} -func DetermineContentType(value *types.TypedValue) string { - if value == nil { - return contentTypeBytes - } + return mediatype.Parse(ctHeader) +} - ct, ok := value.GetLabel(headerContentType) - if ok && len(ct) > 0 { - return ct +func (h *HTTPMapper) determineContentTypeFromInputs(inputs map[string]*typedvalues.TypedValue) *mediatype.MediaType { + if inputs == nil { + return h.DefaultMediaType } - // Otherwise, check for primitive types of the main input - switch typedvalues.ValueType(value.Type) { - // TODO task and workflow - case typedvalues.TypeMap: - fallthrough - case typedvalues.TypeList: - return contentTypeJSON - case typedvalues.TypeNumber: - fallthrough - case typedvalues.TypeExpression: - fallthrough - case typedvalues.TypeString: - return contentTypeText - default: - return contentTypeBytes + mt, err := h.findAndParseContentType(inputs) + if err != nil { + mt = h.ValueTypeResolver(getFirstDefined(inputs, types.InputBody, types.InputMain)) } + return mt } -func DetermineContentTypeInputs(inputs map[string]*types.TypedValue) string { - // Check for forced contentType in inputs - ctTv, ok := inputs[inputContentType] - if ok && ctTv != nil { - contentType, err := typedvalues.FormatString(ctTv) - if err == nil { - return contentType - } - logrus.Errorf("Invalid content type in inputs: %+v", ctTv) - } - - // Otherwise, check for label on body input - if inputs[types.InputBody] != nil { - return DetermineContentType(inputs[types.InputBody]) +func (h *HTTPMapper) getRequestContentType(headers http.Header) *mediatype.MediaType { + var contentType *mediatype.MediaType + ct, err := mediatype.Parse(headers.Get(headerContentType)) + if err != nil { + contentType = h.DefaultMediaType } else { - return DetermineContentType(inputs[types.InputMain]) + contentType = ct } + return contentType } // FUTURE: support multi-headers at some point -func FormatHeaders(inputs map[string]*types.TypedValue) http.Header { +func (h *HTTPMapper) formatHeaders(inputs map[string]*typedvalues.TypedValue) http.Header { headers := http.Header{} rawHeaders, ok := inputs[types.InputHeaders] if !ok || rawHeaders == nil { @@ -385,7 +377,7 @@ func FormatHeaders(inputs map[string]*types.TypedValue) http.Header { } // TODO handle partial map - i, err := typedvalues.Format(rawHeaders) + i, err := typedvalues.Unwrap(rawHeaders) if err != nil { logrus.Errorf("Failed to format headers: %v", err) } @@ -414,18 +406,41 @@ func flattenMultimap(mm map[string][]string) map[string]interface{} { return target } -func normalizeContentType(contentType string) string { - contentType = strings.ToLower(strings.TrimSpace(contentType)) - matchContentType := contentType - // Heuristics, because everything to do with HTTP is ambiguous... - if strings.Contains(contentType, "json") { // TODO exclude JSON representation of protobuf objects - matchContentType = contentTypeJSON +// requestWriter is a wrapper over http.Request to ensure that it conforms with the http.ResponseWriter interface +type requestWriter struct { + req *http.Request + buf *bytes.Buffer +} + +func (rw *requestWriter) Header() http.Header { + if rw.req.Header == nil { + rw.req.Header = http.Header{} + } + return rw.req.Header +} + +func (rw *requestWriter) Write(data []byte) (int, error) { + if rw.buf == nil { + rw.buf = &bytes.Buffer{} + rw.req.Body = ioutil.NopCloser(rw.buf) } - if strings.HasPrefix(contentType, "text") { - matchContentType = contentTypeText + n, err := rw.buf.Write(data) + if err != nil { + return n, err } - if strings.Contains(contentType, "protobuf") { - matchContentType = contentTypeProtobuf + rw.Header().Set("Content-Length", fmt.Sprintf("%d", rw.buf.Len())) + return n, nil +} + +func (rw *requestWriter) WriteHeader(statusCode int) { + return // Not relevant for http.Request +} + +func getFirstDefined(inputs map[string]*typedvalues.TypedValue, keys ...string) *typedvalues.TypedValue { + for _, key := range keys { + if val, ok := inputs[key]; ok { + return val + } } - return matchContentType + return nil } diff --git a/pkg/types/typedvalues/httpconv/httpconv_test.go b/pkg/types/typedvalues/httpconv/httpconv_test.go index 73911bb4..561fb19f 100644 --- a/pkg/types/typedvalues/httpconv/httpconv_test.go +++ b/pkg/types/typedvalues/httpconv/httpconv_test.go @@ -27,23 +27,23 @@ func TestFormatRequest(t *testing.T) { panic(err) } target := &http.Request{ - URL: reqURL, - // TODO verify that existing headers, query params, etc stay in tact. + URL: reqURL, + Header: http.Header{}, } - source := map[string]*types.TypedValue{ - types.InputMain: unsafe(typedvalues.Parse(body)), - types.InputQuery: unsafe(typedvalues.Parse(query)), - types.InputHeaders: unsafe(typedvalues.Parse(headers)), - types.InputMethod: unsafe(typedvalues.Parse(method)), + source := map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap(body), + types.InputQuery: typedvalues.MustWrap(query), + types.InputHeaders: typedvalues.MustWrap(headers), + types.InputMethod: typedvalues.MustWrap(method), } err = FormatRequest(source, target) assert.NoError(t, err) - // Check body bs, err := ioutil.ReadAll(target.Body) assert.NoError(t, err) assert.Equal(t, body, string(bs)) + assert.Equal(t, target.Header.Get(headerContentType), "text/plain") // Check headers assert.Equal(t, headers["Header-Key"], target.Header["Header-Key"][0]) @@ -66,17 +66,17 @@ func TestParseRequestComplete(t *testing.T) { assert.NoError(t, err) // Check body - ibody, err := typedvalues.Format(target[types.InputMain]) + ibody, err := typedvalues.Unwrap(target[types.InputBody]) assert.NoError(t, err) assert.Equal(t, body, ibody) // Check method - method, err := typedvalues.Format(target[types.InputMethod]) + method, err := typedvalues.Unwrap(target[types.InputMethod]) assert.NoError(t, err) assert.Equal(t, http.MethodPut, method) // Check headers - rawHeader, err := typedvalues.Format(target[types.InputHeaders]) + rawHeader, err := typedvalues.Unwrap(target[types.InputHeaders]) assert.NoError(t, err) headers := rawHeader.(map[string]interface{}) assert.IsType(t, map[string]interface{}{}, rawHeader) @@ -84,7 +84,7 @@ func TestParseRequestComplete(t *testing.T) { assert.Equal(t, nil, headers["nonExistent"]) // Check query - rawQuery, err := typedvalues.Format(target[types.InputQuery]) + rawQuery, err := typedvalues.Unwrap(target[types.InputQuery]) assert.NoError(t, err) assert.IsType(t, map[string]interface{}{}, rawQuery) query := rawQuery.(map[string]interface{}) @@ -103,24 +103,57 @@ func TestParseRequestMinimal(t *testing.T) { assert.NoError(t, err) // Check body - ibody, err := typedvalues.Format(target[types.InputMain]) + ibody, err := typedvalues.Unwrap(target[types.InputBody]) assert.NoError(t, err) assert.Equal(t, body, ibody) // Check method - method, err := typedvalues.Format(target[types.InputMethod]) + method, err := typedvalues.Unwrap(target[types.InputMethod]) + assert.NoError(t, err) + assert.Equal(t, http.MethodPut, method) + + // Check headers + rawHeader, err := typedvalues.Unwrap(target[types.InputHeaders]) + assert.NoError(t, err) + assert.IsType(t, map[string]interface{}{}, rawHeader) + headers := rawHeader.(map[string]interface{}) + assert.Equal(t, nil, headers["nonExistent"]) + + // Check query + rawQuery, err := typedvalues.Unwrap(target[types.InputQuery]) + assert.NoError(t, err) + assert.IsType(t, map[string]interface{}{}, rawQuery) + query := rawQuery.(map[string]interface{}) + assert.Equal(t, nil, query["nonExistent"]) +} + +func TestParseRequestWithoutContentType(t *testing.T) { + body := "hello world!" + req := createRequest(http.MethodPut, "http://foo.example", map[string]string{}, + strings.NewReader(body)) + + target, err := ParseRequest(req) + assert.NoError(t, err) + + // Check body + ibody, err := typedvalues.Unwrap(target[types.InputBody]) + assert.NoError(t, err) + assert.Equal(t, body, string(ibody.([]byte))) + + // Check method + method, err := typedvalues.Unwrap(target[types.InputMethod]) assert.NoError(t, err) assert.Equal(t, http.MethodPut, method) // Check headers - rawHeader, err := typedvalues.Format(target[types.InputHeaders]) + rawHeader, err := typedvalues.Unwrap(target[types.InputHeaders]) assert.NoError(t, err) assert.IsType(t, map[string]interface{}{}, rawHeader) headers := rawHeader.(map[string]interface{}) assert.Equal(t, nil, headers["nonExistent"]) // Check query - rawQuery, err := typedvalues.Format(target[types.InputQuery]) + rawQuery, err := typedvalues.Unwrap(target[types.InputQuery]) assert.NoError(t, err) assert.IsType(t, map[string]interface{}{}, rawQuery) query := rawQuery.(map[string]interface{}) @@ -141,10 +174,3 @@ func createRequest(method string, rawURL string, headers map[string]string, body Body: body, } } - -func unsafe(i *types.TypedValue, e error) *types.TypedValue { - if e != nil { - panic(e) - } - return i -} diff --git a/pkg/types/typedvalues/httpconv/mappers.go b/pkg/types/typedvalues/httpconv/mappers.go new file mode 100644 index 00000000..5cb55b21 --- /dev/null +++ b/pkg/types/typedvalues/httpconv/mappers.go @@ -0,0 +1,267 @@ +package httpconv + +import ( + "encoding/json" + "fmt" + "io" + "io/ioutil" + "net/http" + "reflect" + + "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/util/mediatype" + "github.com/golang/protobuf/jsonpb" + "github.com/golang/protobuf/proto" + "github.com/pkg/errors" +) + +// TODO set original content type as metadata +// TODO support multipart/form-data + +var ( + // Common media types + MediaTypeBytes = mediatype.MustParse("application/octet-stream") + MediaTypeJSON = mediatype.MustParse("application/json") + MediaTypeProtobuf = mediatype.MustParse("application/protobuf") + MediaTypeText = mediatype.MustParse("text/plain") + + // Media type parameter for protobuf message addressing. + messageTypeParam = "proto" + ErrMessageTypeNotFound = errors.New("media type did not contain message type parameter") + + // Default implementations + jsonMapper = &JSONMapper{} + textMapper = &TextMapper{} + bytesMapper = &BytesMapper{} + protobufMapper = &ProtobufMapper{} +) + +type ParserFormatter interface { + Formatter + Parser +} + +type Parser interface { + Parse(mt *mediatype.MediaType, reader io.Reader) (*typedvalues.TypedValue, error) +} + +type Formatter interface { + Format(w http.ResponseWriter, body *typedvalues.TypedValue) error +} + +type BytesMapper struct { +} + +func (p *BytesMapper) Parse(mt *mediatype.MediaType, reader io.Reader) (*typedvalues.TypedValue, error) { + bs, err := ioutil.ReadAll(reader) + if err != nil { + return nil, err + } + return typedvalues.Wrap(bs) +} + +func (p *BytesMapper) Format(w http.ResponseWriter, body *typedvalues.TypedValue) error { + var bs []byte + switch body.ValueType() { + case typedvalues.TypeString: + s, err := typedvalues.UnwrapString(body) + if err != nil { + return err + } + bs = []byte(s) + case typedvalues.TypeBytes: + b, err := typedvalues.UnwrapBytes(body) + if err != nil { + return err + } + bs = b + case typedvalues.TypeNil: + // Do nothing + default: + return errors.Wrapf(typedvalues.ErrUnsupportedType, "cannot format %s to bytes", body.ValueType()) + } + _, err := w.Write(bs) + if err != nil { + return err + } + + mediatype.SetContentTypeHeader(MediaTypeBytes, w) + return nil +} + +type JSONMapper struct { +} + +func (m *JSONMapper) Format(w http.ResponseWriter, body *typedvalues.TypedValue) error { + i, err := typedvalues.Unwrap(body) + if err != nil { + return err + } + bs, err := json.Marshal(i) + if err != nil { + return errors.WithStack(err) + } + _, err = w.Write(bs) + if err != nil { + return err + } + mt := MediaTypeJSON.Copy() + mt.SetParam(messageTypeParam, body.GetValue().GetTypeUrl()) + mediatype.SetContentTypeHeader(mt, w) + return nil +} + +func (m *JSONMapper) Parse(mt *mediatype.MediaType, reader io.Reader) (*typedvalues.TypedValue, error) { + data, err := ioutil.ReadAll(reader) + if err != nil { + return nil, err + } + + // We borrow the proto parameter to do type inference + if msgName, ok := mt.Parameters[messageTypeParam]; ok { + // try to use type + tv, err := m.parseJSONWithType(data, msgName) + if err != nil { + return nil, err + } + return tv, nil + } + + // Alternatively, we use JSON's dynamic structure + var i interface{} + err = json.Unmarshal(data, &i) + if err != nil { + return nil, errors.WithStack(err) + } + return typedvalues.Wrap(i) +} + +func (m *JSONMapper) parseJSONWithType(data []byte, msgName string) (*typedvalues.TypedValue, error) { + msgType := proto.MessageType(msgName) + if msgType == nil { + return nil, ErrMessageTypeNotFound + } + + msg := reflect.New(msgType).Interface() + err := json.Unmarshal(data, msg) + if err != nil { + return nil, errors.WithStack(err) + } + + return typedvalues.Wrap(msg) +} + +type TextMapper struct{} + +func (m *TextMapper) Format(w http.ResponseWriter, body *typedvalues.TypedValue) error { + // The TextMapper is permissive, all types that make sense to convert to a string, are supported. + i, err := typedvalues.Unwrap(body) + if err != nil { + return err + } + var output string + switch body.ValueType() { + case typedvalues.TypeExpression: + fallthrough + case typedvalues.TypeString: + output = i.(string) + case typedvalues.TypeBytes: + output = string(i.([]byte)) + case typedvalues.TypeUInt64: + fallthrough + case typedvalues.TypeUInt32: + fallthrough + case typedvalues.TypeInt32: + fallthrough + case typedvalues.TypeInt64: + fallthrough + case typedvalues.TypeFloat32: + fallthrough + case typedvalues.TypeFloat64: + output = fmt.Sprintf("%d", i) + case typedvalues.TypeNil: + // Do nothing + default: + return errors.Wrapf(typedvalues.ErrUnsupportedType, "could not format %s to text", body.ValueType()) + } + _, err = w.Write([]byte(output)) + if err != nil { + return err + } + mediatype.SetContentTypeHeader(MediaTypeText, w) + return nil +} + +func (m *TextMapper) Parse(mt *mediatype.MediaType, reader io.Reader) (*typedvalues.TypedValue, error) { + bs, err := ioutil.ReadAll(reader) + if err != nil { + return nil, err + } + return typedvalues.Wrap(string(bs)) +} + +type ProtobufMapper struct{} + +func (p *ProtobufMapper) Format(w http.ResponseWriter, body *typedvalues.TypedValue) error { + _, err := w.Write(body.GetValue().Value) + if err != nil { + return err + } + + mt := MediaTypeProtobuf.Copy() + mt.SetParam(messageTypeParam, body.GetValue().GetTypeUrl()) + mediatype.SetContentTypeHeader(mt, w) + return nil +} + +func (p *ProtobufMapper) Parse(mt *mediatype.MediaType, reader io.Reader) (*typedvalues.TypedValue, error) { + // Do not check if the MediaType actually is application/protobuf, to allow users to parse mislabeled MediaTypes. + + // Fetch the message type from the media type parameters + msgName, ok := mt.Parameters[messageTypeParam] + if !ok { + return nil, ErrMessageTypeNotFound + } + + switch mt.Suffix { + case "json": + return p.parseJSONWithType(reader, msgName) + default: + return p.parseProtoWithType(reader, msgName) + } +} + +func (p *ProtobufMapper) parseJSONWithType(reader io.Reader, msgName string) (*typedvalues.TypedValue, error) { + msgType := proto.MessageType(msgName) + if msgType == nil { + return nil, ErrMessageTypeNotFound + } + + msg := reflect.New(msgType).Interface().(proto.Message) + err := jsonpb.Unmarshal(reader, msg) + if err != nil { + return nil, errors.WithStack(err) + } + + return typedvalues.Wrap(msg) +} + +func (p *ProtobufMapper) parseProtoWithType(reader io.Reader, msgName string) (*typedvalues.TypedValue, error) { + bs, err := ioutil.ReadAll(reader) + if err != nil { + return nil, err + } + + msgType := proto.MessageType(msgName) + if msgType == nil { + return nil, ErrMessageTypeNotFound + } + + msg := reflect.New(msgType).Interface().(proto.Message) + err = proto.Unmarshal(bs, msg) + if err != nil { + return nil, errors.WithStack(err) + } + + return typedvalues.Wrap(msg) +} diff --git a/pkg/types/typedvalues/primitives.go b/pkg/types/typedvalues/primitives.go deleted file mode 100644 index c0258b12..00000000 --- a/pkg/types/typedvalues/primitives.go +++ /dev/null @@ -1,286 +0,0 @@ -package typedvalues - -import ( - "reflect" - - "github.com/fission/fission-workflows/pkg/types" - "github.com/golang/protobuf/proto" - "github.com/golang/protobuf/ptypes/wrappers" -) - -const ( - TypeBool ValueType = "bool" - TypeNumber ValueType = "number" - TypeNil ValueType = "nil" - TypeString ValueType = "string" - TypeBytes ValueType = "bytes" -) - -func IsPrimitive(v ValueType) bool { - return v == TypeBool || v == TypeNumber || v == TypeString || v == TypeNil || v == TypeBytes -} - -// var DefaultPrimitiveParserFormatter - -type BoolParserFormatter struct{} - -func (pf *BoolParserFormatter) Accepts() []ValueType { - return []ValueType{ - TypeBool, - } -} - -func (pf *BoolParserFormatter) Parse(ctx Parser, i interface{}) (*types.TypedValue, error) { - b, ok := i.(bool) - if !ok { - return nil, TypedValueErr{ - src: i, - err: ErrUnsupportedType, - } - } - - return ParseBool(b), nil -} - -func (pf *BoolParserFormatter) Format(ctx Formatter, v *types.TypedValue) (interface{}, error) { - return FormatBool(v) -} - -func ParseBool(b bool) *types.TypedValue { - var v byte - if b { - v = 1 - } - return &types.TypedValue{ - Type: string(TypeBool), - Value: []byte{v}, - } -} - -func FormatBool(v *types.TypedValue) (bool, error) { - err := verifyTypedValue(v, TypeBool) - if err != nil { - return false, err - } - if v.Value == nil || len(v.Value) < 1 { - return false, TypedValueErr{ - src: v, - err: ErrValueConversion, - } - } - return v.Value[0] == 1, nil -} - -type NumberParserFormatter struct { -} - -func (pf *NumberParserFormatter) Accepts() []ValueType { - return []ValueType{ - TypeNumber, - } -} - -func (pf *NumberParserFormatter) Parse(ctx Parser, i interface{}) (*types.TypedValue, error) { - var f float64 - switch t := i.(type) { - case float32: - f = float64(t) - case float64: - f = t - case int64: - f = float64(t) - case int: - f = float64(t) - default: - return nil, TypedValueErr{ - src: i, - err: ErrUnsupportedType, - } - } - tv := ParseNumber(f) - - t := reflect.TypeOf(i) - tv.SetLabel("original_type", t.String()) - return tv, nil -} - -func (pf *NumberParserFormatter) Format(ctx Formatter, v *types.TypedValue) (interface{}, error) { - return FormatNumber(v) -} - -// TODO support utility conversions for other number types -func ParseNumber(f float64) *types.TypedValue { - w := &wrappers.DoubleValue{Value: f} - bs, err := proto.Marshal(w) - if err != nil { - panic(err) - } - return &types.TypedValue{ - Type: string(TypeNumber), - Value: bs, - } -} - -func FormatNumber(v *types.TypedValue) (float64, error) { - if ValueType(v.Type) != TypeNumber { - return 0, TypedValueErr{ - src: v, - err: ErrUnsupportedType, - } - } - if v.Value == nil { - return 0, nil - } - w := &wrappers.DoubleValue{} - err := proto.Unmarshal(v.Value, w) - if err != nil { - return 0, TypedValueErr{ - src: v, - err: err, - } - } - - return w.Value, nil -} - -type NilParserFormatter struct { -} - -func (fp *NilParserFormatter) Accepts() []ValueType { - return []ValueType{ - TypeNil, - } -} - -func (fp *NilParserFormatter) Parse(ctx Parser, i interface{}) (*types.TypedValue, error) { - if i != nil { - return nil, TypedValueErr{ - src: i, - err: ErrUnsupportedType, - } - } - return ParseNil(), nil -} - -func (fp *NilParserFormatter) Format(ctx Formatter, v *types.TypedValue) (interface{}, error) { - return nil, FormatNil(v) -} - -func ParseNil() *types.TypedValue { - return &types.TypedValue{ - Type: string(TypeNil), - Value: nil, - } -} - -func FormatNil(v *types.TypedValue) error { - if v != nil && ValueType(v.Type) != TypeNil { - return TypedValueErr{ - src: v, - err: ErrValueConversion, - } - } - return nil -} - -type StringParserFormatter struct{} - -func (pf *StringParserFormatter) Accepts() []ValueType { - return []ValueType{ - TypeString, - } -} - -func (pf *StringParserFormatter) Parse(ctx Parser, i interface{}) (*types.TypedValue, error) { - s, ok := i.(string) - if !ok { - return nil, TypedValueErr{ - src: i, - err: ErrUnsupportedType, - } - } - - return ParseString(s), nil -} - -func (pf *StringParserFormatter) Format(ctx Formatter, v *types.TypedValue) (interface{}, error) { - return FormatString(v) -} - -func ParseString(s string) *types.TypedValue { - return &types.TypedValue{ - Type: string(TypeString), - Value: []byte(s), - } -} - -func FormatString(v *types.TypedValue) (string, error) { - if ValueType(v.Type) != TypeString { - return "", TypedValueErr{ - src: v, - err: ErrUnsupportedType, - } - } - return string(v.Value), nil -} - -type BytesParserFormatter struct{} - -func (pf *BytesParserFormatter) Accepts() []ValueType { - return []ValueType{ - TypeBytes, - } -} - -func (pf *BytesParserFormatter) Parse(ctx Parser, i interface{}) (*types.TypedValue, error) { - bs, ok := i.([]byte) - if !ok { - return nil, TypedValueErr{ - src: i, - err: ErrUnsupportedType, - } - } - - return ParseBytes(bs), nil -} - -func (pf *BytesParserFormatter) Format(ctx Formatter, v *types.TypedValue) (interface{}, error) { - return FormatBytes(v) -} - -func ParseBytes(bs []byte) *types.TypedValue { - return &types.TypedValue{ - Type: string(TypeBytes), - Value: bs, - } -} - -func FormatBytes(v *types.TypedValue) ([]byte, error) { - if ValueType(v.Type) != TypeBytes { - return nil, TypedValueErr{ - src: v, - err: ErrUnsupportedType, - } - } - return v.Value, nil -} - -type IdentityParserFormatter struct{} - -func (pf *IdentityParserFormatter) Accepts() []ValueType { - return []ValueType{} -} - -func (pf *IdentityParserFormatter) Parse(ctx Parser, i interface{}) (*types.TypedValue, error) { - if tv, ok := i.(*types.TypedValue); ok { - return tv, nil - } - return nil, TypedValueErr{ - src: i, - err: ErrUnsupportedType, - } -} - -func (pf *IdentityParserFormatter) Format(ctx Formatter, v *types.TypedValue) (interface{}, error) { - return v, nil -} diff --git a/pkg/types/typedvalues/priority.go b/pkg/types/typedvalues/priority.go new file mode 100644 index 00000000..e48c8617 --- /dev/null +++ b/pkg/types/typedvalues/priority.go @@ -0,0 +1,63 @@ +package typedvalues + +import ( + "sort" + "strconv" + + "github.com/sirupsen/logrus" +) + +const ( + MetadataPriority = "priority" +) + +// NamedInput provides the TypedValue along with an associated key. +type NamedInput struct { + Key string + Val *TypedValue +} + +type namedInputSlice []NamedInput + +func (n namedInputSlice) Len() int { return len(n) } +func (n namedInputSlice) Swap(i, j int) { n[i], n[j] = n[j], n[i] } +func (n namedInputSlice) Less(i, j int) bool { + return priority(n[i].Val) < priority(n[j].Val) +} + +func sortNamedInputSlices(inputs []NamedInput) { + sort.Sort(sort.Reverse(namedInputSlice(inputs))) +} + +func priority(t *TypedValue) int { + var p int + if ps, ok := t.GetMetadata()[MetadataPriority]; ok { + i, err := strconv.Atoi(ps) + if err != nil { + logrus.Warnf("Ignoring invalid priority: %v", ps) + } else { + p = i + } + } + return p +} + +func toNamedInputs(inputs map[string]*TypedValue) []NamedInput { + out := make([]NamedInput, len(inputs)) + var i int + for k, v := range inputs { + out[i] = NamedInput{ + Val: v, + Key: k, + } + i++ + } + return out +} + +// Prioritize sorts the inputs based on the priority label (descending order) +func Prioritize(inputs map[string]*TypedValue) []NamedInput { + namedInputs := toNamedInputs(inputs) + sortNamedInputSlices(namedInputs) + return namedInputs +} diff --git a/pkg/types/typedvalues/typedvalues.go b/pkg/types/typedvalues/typedvalues.go index 7ae5a4be..23bb780f 100644 --- a/pkg/types/typedvalues/typedvalues.go +++ b/pkg/types/typedvalues/typedvalues.go @@ -1,185 +1,478 @@ +// package typedvalues provides a data container for annotating, interpreting, and transferring arbitrary data. +// +// It revolves around the TypedValue struct type. Users typically serialize generic (though not entirely generic yet) +// Golang data to a TypedValue in order to serialize and transfer it. Users can set and get annotations from the +// TypedValue, which is for example used to preserve important headers from a HTTP request from which the entity +// was parsed. +// +// The package relies heavily on Protobuf. Besides the primitive types, it supports any entity implementing the +// proto.Message interface. Internally the TypedValue uses the Protobuf Golang implementation for serializing and +// deserializing the TypedValue. +// +// In Workflows TypedValues are used for: serialization, allowing it to store the data in the same format as the +// workflow structures; storing metadata of task outputs, by annotating the TypedValues; and data evaluation, +// by parsing and formatting task inputs and outputs into structured data (where possible). package typedvalues import ( "fmt" + "regexp" + "strings" - "github.com/fission/fission-workflows/pkg/types" + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes" + "github.com/golang/protobuf/ptypes/any" + "github.com/golang/protobuf/ptypes/wrappers" "github.com/pkg/errors" - "github.com/sirupsen/logrus" ) -type TypedValue = types.TypedValue +const ( + TypeUrlPrefix = "types.fission.io/" +) var ( - // TODO unsupported -> non-fatal - ErrUnsupportedType = errors.New("unsupported type") // Error to indicate parserFormatter cannot handle type - ErrValueConversion = errors.New("failed to convert") // Error to indicate parserFormatter internal error + expressionRe = regexp.MustCompile("^\\{(.*)\\}$") + ErrIllegalTypeAssertion = errors.New("illegal type assertion") + ErrUnsupportedType = errors.New("unsupported type") ) -type TypedValueErr struct { - src interface{} - err error +// TODO add caching to formatting and parsing + +func (m *TypedValue) ValueType() string { + if m == nil || m.Value == nil { + return "" + } + typeUrl := m.Value.TypeUrl + pos := strings.Index(typeUrl, TypeUrlPrefix) + if pos == 0 { + return typeUrl[len(TypeUrlPrefix):] + } + return typeUrl +} + +func (m *TypedValue) Interface() interface{} { + if m == nil || m.Value == nil { + return nil + } + return MustUnwrap(m) +} + +func (m *TypedValue) Float64() float64 { + val := m.Interface() + switch t := val.(type) { + case int32: + return float64(t) + case int64: + return float64(t) + case uint32: + return float64(t) + case uint64: + return float64(t) + case float32: + return float64(t) + case float64: + return float64(t) + default: + panic("TypedValue: not a number") + } } -func (tve TypedValueErr) Error() string { - return fmt.Sprintf("%v (%+v)", tve.err.Error(), tve.src) +// Short prints a short description of the Value +func (m *TypedValue) Short() string { + if m == nil { + return "" + } + return fmt.Sprintf("", m.ValueType(), len(m.GetValue().GetValue())) } -type ValueType = string +func (m *TypedValue) SetMetadata(k string, v string) *TypedValue { + if m == nil { + return m + } + if m.Metadata == nil { + m.Metadata = map[string]string{} + } + m.Metadata[k] = v -type Parser interface { - Parse(ctx Parser, i interface{}) (*types.TypedValue, error) + return m } -type Formatter interface { - Format(ctx Formatter, v *types.TypedValue) (interface{}, error) +func (m *TypedValue) GetMetadataValue(k string) (string, bool) { + if m == nil { + return "", false + } + + if m.Metadata == nil { + m.Metadata = map[string]string{} + } + v, ok := m.Metadata[k] + return v, ok } -type ParserFormatter interface { - Accepts() []ValueType - Parser - Formatter +func (m *TypedValue) Equals(other *TypedValue) bool { + return proto.Equal(m, other) } -var DefaultParserFormatter = newDefaultParserFormatter() +func Unwrap(tv *TypedValue) (interface{}, error) { + if tv == nil { + return nil, nil + } + + msg, err := UnwrapProto(tv) + if err != nil { + return nil, err + } + + var i interface{} + switch t := msg.(type) { + case *MapValue: + mapValue := make(map[string]interface{}, len(t.Value)) + for k, v := range t.Value { + entry, err := Unwrap(v) + if err != nil { + return TypedValue{}, errors.Wrapf(err, "failed to format map[%s]", k) + } + mapValue[k] = entry + } + i = mapValue + case *ArrayValue: + arrayValue := make([]interface{}, len(t.Value)) + for k, v := range t.Value { + entry, err := Unwrap(v) + if err != nil { + return TypedValue{}, errors.Wrapf(err, "failed to format array[%d]", k) + } + arrayValue[k] = entry + } + i = arrayValue + case *Expression: + i = t.Value + case *wrappers.BoolValue: + i = t.Value + case *wrappers.FloatValue: + i = t.Value + case *wrappers.DoubleValue: + i = t.Value + case *wrappers.Int32Value: + i = t.Value + case *wrappers.Int64Value: + i = t.Value + case *wrappers.UInt64Value: + i = t.Value + case *wrappers.UInt32Value: + i = t.Value + case *wrappers.StringValue: + i = t.Value + case *wrappers.BytesValue: + i = t.Value + case *NilValue: + i = nil + default: + // Message does not have to be unwrapped(?) + i = t + } -func Parse(i interface{}) (*types.TypedValue, error) { - return DefaultParserFormatter.Parse(DefaultParserFormatter, i) + return i, nil } -func Format(v *types.TypedValue) (interface{}, error) { - return DefaultParserFormatter.Format(DefaultParserFormatter, v) +func Wrap(val interface{}) (*TypedValue, error) { + var msg proto.Message + switch t := val.(type) { + case *TypedValue: + return t, nil + case []*TypedValue: + msg = &ArrayValue{Value: t} + case map[string]*TypedValue: + msg = &MapValue{Value: t} + case map[string]interface{}: + values := make(map[string]*TypedValue, len(t)) + for k, v := range t { + tv, err := Wrap(v) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse map[%s]", k) + } + values[k] = tv + } + msg = &MapValue{Value: values} + case []interface{}: + values := make([]*TypedValue, len(t)) + for i, v := range t { + tv, err := Wrap(v) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse array[%d]", i) + } + values[i] = tv + } + msg = &ArrayValue{Value: values} + case proto.Message: + msg = t + case string: + if expressionRe.MatchString(t) { + msg = &Expression{Value: t} + } else { + msg = &wrappers.StringValue{Value: t} + } + case bool: + msg = &wrappers.BoolValue{Value: t} + case float32: + msg = &wrappers.FloatValue{Value: t} + case float64: + msg = &wrappers.DoubleValue{Value: t} + case int64: + msg = &wrappers.Int64Value{Value: t} + case int: + msg = &wrappers.Int32Value{Value: int32(t)} + case int32: + msg = &wrappers.Int32Value{Value: t} + case uint32: + msg = &wrappers.UInt32Value{Value: t} + case uint64: + msg = &wrappers.UInt64Value{Value: t} + case []byte: + msg = &wrappers.BytesValue{Value: t} + case nil: + msg = &NilValue{} + default: + return nil, errors.Wrapf(ErrUnsupportedType, "parse %T", t) + } + marshaled, err := marshalAny(msg) + if err != nil { + return nil, err + } + // TODO cache already if safe (copied/cloned/primitive) + return &TypedValue{ + Value: marshaled, + }, nil } -// MustParse transforms the value into a TypedValue or panics. -func MustParse(i interface{}) *types.TypedValue { - tv, err := Parse(i) +func MustWrap(val interface{}) *TypedValue { + tv, err := Wrap(val) if err != nil { panic(err) } return tv } -// MustParse transforms the TypedValue into a value or panics. -func MustFormat(tv *types.TypedValue) interface{} { - i, err := Format(tv) +func MustUnwrap(tv *TypedValue) interface{} { + i, err := Unwrap(tv) if err != nil { panic(err) } return i } -// ComposedParserFormatter is used to group multiple ParserFormatters together -type ComposedParserFormatter struct { - formatters map[ValueType][]Formatter - parsers []Parser // TODO also limit to types for parsers - supported []ValueType +func UnwrapString(tv *TypedValue) (string, error) { + i, err := Unwrap(tv) + if err != nil { + return "", err + } + + if s, ok := i.(string); ok { + return s, nil + } + + if bs, ok := i.([]byte); ok { + return string(bs), nil + } + + return "", errors.Wrapf(ErrIllegalTypeAssertion, "failed to unwrap %s to string", tv.ValueType()) } -func (pf *ComposedParserFormatter) Accepts() []ValueType { - return pf.supported +func UnwrapProto(tv *TypedValue) (proto.Message, error) { + var dynamic ptypes.DynamicAny + err := ptypes.UnmarshalAny(tv.Value, &dynamic) + if err != nil { + return nil, errors.WithStack(err) + } + + return dynamic.Message, nil } -func (pf *ComposedParserFormatter) Parse(ctx Parser, i interface{}) (*types.TypedValue, error) { - for _, parser := range pf.parsers { - logrus.Debugf("Trying to parse with: %T", parser) - tv, err := parser.Parse(ctx, i) - if err != nil { - logrus.Debugf("Parser error: %v", err) - if isErrUnsupported(err) { - continue - } else { - return nil, err - } - } - return tv, nil +func UnwrapBytes(tv *TypedValue) ([]byte, error) { + i, err := Unwrap(tv) + if err != nil { + return nil, err } - logrus.Debugf("No parsers for %T", i) - return nil, TypedValueErr{ - src: i, - err: ErrUnsupportedType, + + if d, ok := i.([]byte); ok { + return d, nil + } + if s, ok := i.(string); ok { + return []byte(s), nil } + + return nil, errors.Wrapf(ErrIllegalTypeAssertion, "failed to unwrap %s to bytes", tv.ValueType()) } -func (pf *ComposedParserFormatter) Format(ctx Formatter, v *types.TypedValue) (interface{}, error) { - if v == nil || (len(v.Type) == 0 && len(v.Value) == 0) { - return nil, nil +func UnwrapBool(tv *TypedValue) (bool, error) { + i, err := Unwrap(tv) + if err != nil { + return false, err } - formatters, ok := pf.formatters[ValueType(v.Type)] + + s, ok := i.(bool) if !ok { - logrus.Warnf("No known formatter for: %v (%+v) (%v, %v)", v.Type, v, len(v.Type), len(v.Value)) - return 0, TypedValueErr{ - src: v, - err: ErrUnsupportedType, - } + return false, errors.Wrapf(ErrIllegalTypeAssertion, "failed to unwrap %s to bool", tv.ValueType()) + } + return s, nil +} + +func UnwrapArray(tv *TypedValue) ([]interface{}, error) { + i, err := Unwrap(tv) + if err != nil { + return nil, err + } + + s, ok := i.([]interface{}) + if !ok { + return nil, errors.Wrapf(ErrIllegalTypeAssertion, "failed to unwrap %s to array", tv.ValueType()) + } + return s, nil +} + +func UnwrapTypedValueArray(tv *TypedValue) ([]*TypedValue, error) { + arrayWrapper := &ArrayValue{} + err := ptypes.UnmarshalAny(tv.Value, arrayWrapper) + if err != nil { + return nil, errors.Wrapf(ErrIllegalTypeAssertion, "failed to unwrap %s to TypedValue-array", tv.ValueType()) + } + + arrayValue := make([]*TypedValue, len(arrayWrapper.Value)) + for k, v := range arrayWrapper.Value { + arrayValue[k] = v } - logrus.Debugf("Formatter options for %v: %T", v.Type, formatters) - for _, formatter := range formatters { - logrus.Debugf("Trying to format with: %T", formatter) - tv, err := formatter.Format(ctx, v) + return arrayValue, nil +} + +func UnwrapTypedValueMap(tv *TypedValue) (map[string]*TypedValue, error) { + mapWrapper := &MapValue{} + err := ptypes.UnmarshalAny(tv.Value, mapWrapper) + if err != nil { + return nil, errors.Wrapf(ErrIllegalTypeAssertion, "failed to unwrap %s to TypedValue-map", tv.ValueType()) + } + + mapValue := make(map[string]*TypedValue, len(mapWrapper.Value)) + for k, v := range mapWrapper.Value { + mapValue[k] = v + } + return mapValue, nil +} + +func UnwrapMapTypedValue(tvs map[string]*TypedValue) (map[string]interface{}, error) { + tv, err := Wrap(tvs) + if err != nil { + return nil, err + } + return UnwrapMap(tv) +} + +func WrapMapTypedValue(tvs map[string]interface{}) (map[string]*TypedValue, error) { + entries := make(map[string]*TypedValue, len(tvs)) + for k, v := range tvs { + e, err := Wrap(v) if err != nil { - logrus.Warnf("Formatter error %t", err) - if isErrUnsupported(err) { - continue - } else { - return nil, err - } + return nil, err } - logrus.Debugf("Formatter success: %t", tv) - return tv, nil + entries[k] = e } - return nil, TypedValueErr{ - src: v, - err: ErrUnsupportedType, + return entries, nil +} + +func MustWrapMapTypedValue(tvs map[string]interface{}) map[string]*TypedValue { + tv, err := WrapMapTypedValue(tvs) + if err != nil { + panic(err) } + return tv } -func NewComposedParserFormatter(pfs []ParserFormatter) *ComposedParserFormatter { - formatters := map[ValueType][]Formatter{} - var parsers []Parser +func UnwrapMap(tv *TypedValue) (map[string]interface{}, error) { + i, err := Unwrap(tv) + if err != nil { + return nil, err + } - for _, v := range pfs { - parsers = append(parsers, v) - for _, t := range v.Accepts() { - vts := formatters[t] - if vts == nil { - vts = []Formatter{} - } - vts = append(vts, v) - formatters[t] = vts - } + s, ok := i.(map[string]interface{}) + if !ok { + return nil, errors.Wrapf(ErrIllegalTypeAssertion, "failed to unwrap %s to map", tv.ValueType()) } + return s, nil +} - var ts []ValueType - for k := range formatters { - ts = append(ts, k) +// TODO reduce verbosity of these numberic implementations +func UnwrapInt64(tv *TypedValue) (int64, error) { + i, err := Unwrap(tv) + if err != nil { + return 0, err } - return &ComposedParserFormatter{ - formatters: formatters, - parsers: parsers, - supported: ts, + switch t := i.(type) { + case int64: + return int64(t), nil + case int32: + return int64(t), nil + case uint32: + return int64(t), nil + case uint64: + return int64(t), nil + case float32: + return int64(t), nil + case float64: + return int64(t), nil + default: + return 0, errors.Wrapf(ErrIllegalTypeAssertion, "failed to unwrap %s to int64", tv.ValueType()) } } -func isErrUnsupported(err error) bool { - if tverr, ok := err.(TypedValueErr); ok { - return errors.Cause(tverr.err) == ErrUnsupportedType +func UnwrapFloat64(tv *TypedValue) (float64, error) { + i, err := Unwrap(tv) + if err != nil { + return 0, err } - return false + + switch t := i.(type) { + case int64: + return float64(t), nil + case int32: + return float64(t), nil + case uint32: + return float64(t), nil + case uint64: + return float64(t), nil + case float32: + return float64(t), nil + case float64: + return float64(t), nil + default: + return 0, errors.Wrapf(ErrIllegalTypeAssertion, "failed to unwrap %s to float64", tv.ValueType()) + } +} + +func UnwrapExpression(tv *TypedValue) (string, error) { + s, err := UnwrapString(tv) + if err != nil { + return "", err + } + + if !IsExpression(s) { + return "", errors.Wrapf(ErrIllegalTypeAssertion, "failed to unwrap %s to expression", tv.ValueType()) + } + return s, nil } -func newDefaultParserFormatter() ParserFormatter { - return NewComposedParserFormatter([]ParserFormatter{ - &IdentityParserFormatter{}, - &MapParserFormatter{}, - &ListParserFormatter{}, - &ControlFlowParserFormatter{}, - &ExpressionParserFormatter{}, - &BoolParserFormatter{}, - &NumberParserFormatter{}, - &StringParserFormatter{}, - &NilParserFormatter{}, - &BytesParserFormatter{}, - }) +func RemoveExpressionDelimiters(expr string) string { + return expressionRe.ReplaceAllString(expr, "$1") +} + +func IsExpression(s string) bool { + return expressionRe.MatchString(s) +} + +// marshalAny takes the protocol buffer and encodes it into google.protobuf.Any, without prepending the Google API URL. +func marshalAny(pb proto.Message) (*any.Any, error) { + value, err := proto.Marshal(pb) + if err != nil { + return nil, errors.WithStack(err) + } + return &any.Any{TypeUrl: TypeUrlPrefix + proto.MessageName(pb), Value: value}, nil } diff --git a/pkg/types/typedvalues/typedvalues.pb.go b/pkg/types/typedvalues/typedvalues.pb.go new file mode 100644 index 00000000..155aa3b2 --- /dev/null +++ b/pkg/types/typedvalues/typedvalues.pb.go @@ -0,0 +1,152 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: pkg/types/typedvalues/typedvalues.proto + +/* +Package typedvalues is a generated protocol buffer package. + +It is generated from these files: + pkg/types/typedvalues/typedvalues.proto + +It has these top-level messages: + TypedValue + Expression + MapValue + ArrayValue + NilValue +*/ +package typedvalues + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/any" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// TypedValue is used to serialize, deserialize, transfer data values across the workflow engine. +// +// It consists partly copy of protobuf's Any, to avoid protobuf requirement of a protobuf-based type. +type TypedValue struct { + // Value holds the actual value in a serialized form. + Value *google_protobuf.Any `protobuf:"bytes,1,opt,name=value" json:"value,omitempty"` + // Labels hold metadata about the value. It is used for example to store origins of data, past transformations, + // and information needed by serialization processes. + Metadata map[string]string `protobuf:"bytes,3,rep,name=metadata" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` +} + +func (m *TypedValue) Reset() { *m = TypedValue{} } +func (m *TypedValue) String() string { return proto.CompactTextString(m) } +func (*TypedValue) ProtoMessage() {} +func (*TypedValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *TypedValue) GetValue() *google_protobuf.Any { + if m != nil { + return m.Value + } + return nil +} + +func (m *TypedValue) GetMetadata() map[string]string { + if m != nil { + return m.Metadata + } + return nil +} + +type Expression struct { + Value string `protobuf:"bytes,1,opt,name=value" json:"value,omitempty"` +} + +func (m *Expression) Reset() { *m = Expression{} } +func (m *Expression) String() string { return proto.CompactTextString(m) } +func (*Expression) ProtoMessage() {} +func (*Expression) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *Expression) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +type MapValue struct { + Value map[string]*TypedValue `protobuf:"bytes,1,rep,name=value" json:"value,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` +} + +func (m *MapValue) Reset() { *m = MapValue{} } +func (m *MapValue) String() string { return proto.CompactTextString(m) } +func (*MapValue) ProtoMessage() {} +func (*MapValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *MapValue) GetValue() map[string]*TypedValue { + if m != nil { + return m.Value + } + return nil +} + +type ArrayValue struct { + Value []*TypedValue `protobuf:"bytes,1,rep,name=value" json:"value,omitempty"` +} + +func (m *ArrayValue) Reset() { *m = ArrayValue{} } +func (m *ArrayValue) String() string { return proto.CompactTextString(m) } +func (*ArrayValue) ProtoMessage() {} +func (*ArrayValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *ArrayValue) GetValue() []*TypedValue { + if m != nil { + return m.Value + } + return nil +} + +type NilValue struct { +} + +func (m *NilValue) Reset() { *m = NilValue{} } +func (m *NilValue) String() string { return proto.CompactTextString(m) } +func (*NilValue) ProtoMessage() {} +func (*NilValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func init() { + proto.RegisterType((*TypedValue)(nil), "fission.workflows.types.TypedValue") + proto.RegisterType((*Expression)(nil), "fission.workflows.types.Expression") + proto.RegisterType((*MapValue)(nil), "fission.workflows.types.MapValue") + proto.RegisterType((*ArrayValue)(nil), "fission.workflows.types.ArrayValue") + proto.RegisterType((*NilValue)(nil), "fission.workflows.types.NilValue") +} + +func init() { proto.RegisterFile("pkg/types/typedvalues/typedvalues.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 302 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x2f, 0xc8, 0x4e, 0xd7, + 0x2f, 0xa9, 0x2c, 0x48, 0x2d, 0x06, 0x93, 0x29, 0x65, 0x89, 0x39, 0xa5, 0xa8, 0x6c, 0xbd, 0x82, + 0xa2, 0xfc, 0x92, 0x7c, 0x21, 0xf1, 0xb4, 0xcc, 0xe2, 0xe2, 0xcc, 0xfc, 0x3c, 0xbd, 0xf2, 0xfc, + 0xa2, 0xec, 0xb4, 0x9c, 0xfc, 0xf2, 0x62, 0x3d, 0xb0, 0x36, 0x29, 0xc9, 0xf4, 0xfc, 0xfc, 0xf4, + 0x9c, 0x54, 0x7d, 0xb0, 0xb2, 0xa4, 0xd2, 0x34, 0xfd, 0xc4, 0xbc, 0x4a, 0x88, 0x1e, 0xa5, 0x23, + 0x8c, 0x5c, 0x5c, 0x21, 0x20, 0x93, 0xc2, 0x40, 0x26, 0x09, 0x69, 0x71, 0xb1, 0x82, 0x8d, 0x94, + 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x36, 0x12, 0xd1, 0x83, 0xe8, 0xd4, 0x83, 0xe9, 0xd4, 0x73, 0xcc, + 0xab, 0x0c, 0x82, 0x28, 0x11, 0xf2, 0xe5, 0xe2, 0xc8, 0x4d, 0x2d, 0x49, 0x4c, 0x49, 0x2c, 0x49, + 0x94, 0x60, 0x56, 0x60, 0xd6, 0xe0, 0x36, 0x32, 0xd4, 0xc3, 0xe1, 0x02, 0x3d, 0x84, 0x15, 0x7a, + 0xbe, 0x50, 0x3d, 0xae, 0x79, 0x25, 0x45, 0x95, 0x41, 0x70, 0x23, 0xa4, 0xac, 0xb9, 0x78, 0x51, + 0xa4, 0x84, 0x04, 0xb8, 0x98, 0xb3, 0x53, 0x2b, 0xc1, 0x2e, 0xe1, 0x0c, 0x02, 0x31, 0x85, 0x44, + 0x60, 0xae, 0x63, 0x02, 0x8b, 0x41, 0x38, 0x56, 0x4c, 0x16, 0x8c, 0x4a, 0x4a, 0x5c, 0x5c, 0xae, + 0x15, 0x05, 0x45, 0xa9, 0x60, 0xdb, 0x11, 0xea, 0x18, 0x91, 0xd4, 0x29, 0xad, 0x65, 0xe4, 0xe2, + 0xf0, 0x4d, 0x2c, 0x80, 0x78, 0xd4, 0x09, 0xa1, 0x04, 0xe4, 0x72, 0x1d, 0x9c, 0x2e, 0x87, 0xe9, + 0xd0, 0x03, 0x93, 0x10, 0x47, 0x43, 0xb4, 0x4a, 0xc5, 0x72, 0x71, 0x21, 0x04, 0xb1, 0x38, 0xd7, + 0x12, 0xd9, 0xb9, 0xdc, 0x46, 0xca, 0x44, 0x84, 0x0e, 0xb2, 0x9f, 0xdc, 0xb9, 0xb8, 0x1c, 0x8b, + 0x8a, 0x12, 0x2b, 0x21, 0x0e, 0xb6, 0x44, 0x75, 0x30, 0x09, 0x86, 0x29, 0x71, 0x71, 0x71, 0xf8, + 0x65, 0xe6, 0x80, 0x85, 0x9c, 0x78, 0xa3, 0xb8, 0x91, 0x12, 0x4e, 0x12, 0x1b, 0x38, 0x62, 0x8d, + 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x80, 0xbc, 0x9e, 0x1f, 0x64, 0x02, 0x00, 0x00, +} diff --git a/pkg/types/typedvalues/typedvalues.proto b/pkg/types/typedvalues/typedvalues.proto new file mode 100644 index 00000000..3c5e266f --- /dev/null +++ b/pkg/types/typedvalues/typedvalues.proto @@ -0,0 +1,33 @@ +syntax = "proto3"; + +package fission.workflows.types; +option go_package = "typedvalues"; + +import "google/protobuf/any.proto"; + +// TypedValue is used to serialize, deserialize, transfer data values across the workflow engine. +// +// It consists partly copy of protobuf's Any, to avoid protobuf requirement of a protobuf-based type. +message TypedValue { + + // Value holds the actual value in a serialized form. + google.protobuf.Any value = 1; + + // Labels hold metadata about the value. It is used for example to store origins of data, past transformations, + // and information needed by serialization processes. + map metadata = 3; +} + +message Expression { + string value = 1; +} + +message MapValue { + map value = 1; +} + +message ArrayValue { + repeated TypedValue value = 1; +} + +message NilValue {} \ No newline at end of file diff --git a/pkg/types/typedvalues/typedvalues_test.go b/pkg/types/typedvalues/typedvalues_test.go index 87cbf120..6cfd9ff6 100644 --- a/pkg/types/typedvalues/typedvalues_test.go +++ b/pkg/types/typedvalues/typedvalues_test.go @@ -5,24 +5,113 @@ import ( "testing" "time" - "github.com/fission/fission-workflows/pkg/types" "github.com/stretchr/testify/assert" ) +type testCase struct { + name string + input interface{} + expectedType string +} + +// parseFormatTestCases provides a suit of test cases. +// +// It is a function instead of variable because of the package initialization sequence. +func parseFormatTestCases() []testCase { + return []testCase{ + { + input: nil, + expectedType: TypeNil, + }, + { + input: true, + expectedType: TypeBool, + }, + { + input: false, + expectedType: TypeBool, + }, + { + input: float64(0), + expectedType: TypeFloat64, + }, + { + input: float64(42), + expectedType: TypeFloat64, + }, + { + input: []byte("foo bar"), + expectedType: TypeBytes, + }, + { + input: []byte(nil), + expectedType: TypeBytes, + }, + { + input: "", + expectedType: TypeString, + }, + { + input: "foo bar", + expectedType: TypeString, + }, + { + input: "{", + expectedType: TypeString, + }, + { + input: "{foo}", + expectedType: TypeExpression, + }, + { + input: "{}", + expectedType: TypeExpression, + }, + { + input: []interface{}{}, + expectedType: TypeList, + }, + { + input: []interface{}{float64(42), "foo"}, + expectedType: TypeList, + }, + { + input: map[string]interface{}{"foo": float64(42), "bar": true}, + expectedType: TypeMap, + }, + { + input: map[string]interface{}{}, + expectedType: TypeMap, + }, + { + // Complex + name: "recursiveList", + input: []interface{}{[]interface{}{[]interface{}{"foo"}}}, + expectedType: TypeList, + }, + { + // Complex + name: "recursiveMap", + input: map[string]interface{}{"a": map[string]interface{}{"b": map[string]interface{}{"c": "{d}"}}}, + expectedType: TypeMap, + }, + } +} + func TestValueTester(t *testing.T) { var i int - for _, testCase := range parseFormatTests { + for _, testCase := range parseFormatTestCases() { testName := testCase.name if len(testName) == 0 { testName = fmt.Sprintf("%d_%v", i, testCase.expectedType) } t.Run(testName, func(t *testing.T) { fmt.Printf("Input: %+v\n", testCase) - tv, err := Parse(testCase.input) + tv, err := Wrap(testCase.input) fmt.Printf("Typed value: %+v\n", tv) assert.NoError(t, err) - assert.Equal(t, testCase.expectedType, ValueType(tv.Type)) - i, err := Format(tv) + assert.Equal(t, testCase.expectedType, tv.ValueType()) + i, err := Unwrap(tv) assert.NoError(t, err) assert.Equal(t, testCase.input, i) fmt.Printf("Output: %+v\n", i) @@ -32,101 +121,21 @@ func TestValueTester(t *testing.T) { time.Sleep(100 * time.Millisecond) } -type testCase struct { - name string - input interface{} - expectedType ValueType -} +func BenchmarkParse(b *testing.B) { + for _, testCase := range parseFormatTestCases() { + b.Run(testCase.expectedType+"_parse", func(b *testing.B) { + for n := 0; n < b.N; n++ { + Wrap(testCase.input) + } + }) + } + for _, testCase := range parseFormatTestCases() { + tv, _ := Wrap(testCase.input) -var parseFormatTests = []testCase{ - { - input: nil, - expectedType: TypeNil, - }, - { - input: true, - expectedType: TypeBool, - }, - { - input: false, - expectedType: TypeBool, - }, - { - input: float64(0), - expectedType: TypeNumber, - }, - { - input: float64(42), - expectedType: TypeNumber, - }, - { - input: []byte("foo bar"), - expectedType: TypeBytes, - }, - { - input: []byte{}, - expectedType: TypeBytes, - }, - { - input: "", - expectedType: TypeString, - }, - { - input: "foo bar", - expectedType: TypeString, - }, - { - input: "{", - expectedType: TypeString, - }, - { - input: "{foo}", - expectedType: TypeExpression, - }, - { - input: "{}", - expectedType: TypeExpression, - }, - { - input: []interface{}{}, - expectedType: TypeList, - }, - { - input: []interface{}{float64(42), "foo"}, - expectedType: TypeList, - }, - { - input: map[string]interface{}{"foo": float64(42), "bar": true}, - expectedType: TypeMap, - }, - { - input: map[string]interface{}{}, - expectedType: TypeMap, - }, - { - // Complex - name: "recursiveList", - input: []interface{}{[]interface{}{[]interface{}{"foo"}}}, - expectedType: TypeList, - }, - { - // Complex - name: "recursiveMap", - input: map[string]interface{}{"a": map[string]interface{}{"b": map[string]interface{}{"c": "{d}"}}}, - expectedType: TypeMap, - }, - { - input: types.NewTaskSpec("fn1").Input("inputK", MustParse("e2")), - expectedType: TypeTask, - }, - { - input: &types.WorkflowSpec{ - ApiVersion: "v1", - OutputTask: "t1", - Tasks: types.Tasks{ - "t1": types.NewTaskSpec("fn1").Input("inputK", MustParse("e2")), - }, - }, - expectedType: TypeWorkflow, - }, + b.Run(testCase.expectedType+"_format", func(b *testing.B) { + for n := 0; n < b.N; n++ { + Unwrap(tv) + } + }) + } } diff --git a/pkg/types/typedvalues/util.go b/pkg/types/typedvalues/util.go deleted file mode 100644 index 2b2ebcb0..00000000 --- a/pkg/types/typedvalues/util.go +++ /dev/null @@ -1,152 +0,0 @@ -package typedvalues - -import ( - "errors" - "sort" - "strconv" - - "github.com/fission/fission-workflows/pkg/types" - "github.com/sirupsen/logrus" -) - -// TODO move to more appropriate package -func ResolveTaskOutput(taskID string, invoc *types.WorkflowInvocation) *types.TypedValue { - val, ok := invoc.Status.Tasks[taskID] - if !ok { - return nil - } - - output := val.Status.Output - if output == nil { - return nil - } - - // TODO to flow - switch ValueType(output.Type) { - case TypeTask: - for outputTaskID, outputTask := range invoc.Status.DynamicTasks { - if dep, ok := outputTask.Spec.Requires[taskID]; ok && dep.Type == types.TaskDependencyParameters_DYNAMIC_OUTPUT { - return ResolveTaskOutput(outputTaskID, invoc) - } - } - return nil - case TypeWorkflow: - for outputTaskID, outputTask := range invoc.Status.DynamicTasks { - if dep, ok := outputTask.Spec.Requires[taskID]; ok && dep.Type == types.TaskDependencyParameters_DYNAMIC_OUTPUT { - return ResolveTaskOutput(outputTaskID, invoc) - } - } - return nil - } - return output -} - -func FormatMap(t *types.TypedValue) (map[string]interface{}, error) { - i, err := Format(t) - if err != nil { - return nil, err - } - v, ok := i.(map[string]interface{}) - if !ok { - return nil, errors.New("invalid type") - } - return v, nil -} - -func FormatArray(t *types.TypedValue) ([]interface{}, error) { - i, err := Format(t) - if err != nil { - return nil, err - } - v, ok := i.([]interface{}) - if !ok { - return nil, errors.New("invalid type") - } - return v, nil -} - -func Input(i interface{}) types.Inputs { - in := types.Inputs{} - in[types.InputMain] = MustParse(i) - return in -} - -func verifyTypedValue(v *types.TypedValue, acceptableTypes ...ValueType) error { - if v == nil { - return TypedValueErr{ - src: v, - err: ErrUnsupportedType, - } - } - if !IsType(v, acceptableTypes...) { - return TypedValueErr{ - src: v, - err: ErrUnsupportedType, - } - } - return nil -} - -func IsType(v *types.TypedValue, ts ...ValueType) bool { - if v == nil { - return false - } - vt := ValueType(v.Type) - for _, t := range ts { - if t == vt { - return true - } - } - return false -} - -// NamedInput provides the value along with the associated key. -type NamedInput struct { - Key string - Val *types.TypedValue -} - -type namedInputSlice []NamedInput - -func (n namedInputSlice) Len() int { return len(n) } -func (n namedInputSlice) Swap(i, j int) { n[i], n[j] = n[j], n[i] } -func (n namedInputSlice) Less(i, j int) bool { - return priority(n[i].Val) < priority(n[j].Val) -} - -func sortNamedInputSlices(inputs []NamedInput) { - sort.Sort(sort.Reverse(namedInputSlice(inputs))) -} - -func priority(t *types.TypedValue) int { - var p int - if ps, ok := t.GetLabels()["priority"]; ok { - i, err := strconv.Atoi(ps) - if err != nil { - logrus.Warnf("Ignoring invalid priority: %v", ps) - } else { - p = i - } - } - return p -} - -func toNamedInputs(inputs map[string]*types.TypedValue) []NamedInput { - out := make([]NamedInput, len(inputs)) - var i int - for k, v := range inputs { - out[i] = NamedInput{ - Val: v, - Key: k, - } - i++ - } - return out -} - -// Prioritize sorts the inputs based on the priority label (descending order) -func Prioritize(inputs map[string]*types.TypedValue) []NamedInput { - namedInputs := toNamedInputs(inputs) - sortNamedInputSlices(namedInputs) - return namedInputs -} diff --git a/pkg/types/typedvalues/util_test.go b/pkg/types/typedvalues/util_test.go deleted file mode 100644 index 054f74e4..00000000 --- a/pkg/types/typedvalues/util_test.go +++ /dev/null @@ -1,28 +0,0 @@ -package typedvalues - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestPrioritizeInputs(t *testing.T) { - invalidPrioInput := MustParse("bar") - invalidPrioInput.SetLabel("priority", "NaN") - regularInput := MustParse("foo") - regularInput.SetLabel("priority", "1") - prioInput := MustParse("zzz") - prioInput.SetLabel("priority", "2") - highPrioInput := MustParse("important") - highPrioInput.SetLabel("priority", "3") - inputs := Prioritize(map[string]*TypedValue{ - "a": invalidPrioInput, - "b": regularInput, - "c": prioInput, - "d": highPrioInput, - }) - assert.Equal(t, highPrioInput, inputs[0].Val) - assert.Equal(t, prioInput, inputs[1].Val) - assert.Equal(t, regularInput, inputs[2].Val) - assert.Equal(t, invalidPrioInput, inputs[3].Val) -} diff --git a/pkg/types/typedvalues/valuetypes.go b/pkg/types/typedvalues/valuetypes.go new file mode 100644 index 00000000..d4bda49f --- /dev/null +++ b/pkg/types/typedvalues/valuetypes.go @@ -0,0 +1,64 @@ +package typedvalues + +import ( + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes/wrappers" +) + +var ( + TypeBool string + TypeInt32 string + TypeInt64 string + TypeUInt32 string + TypeUInt64 string + TypeFloat32 string + TypeFloat64 string + TypeString string + TypeBytes string + TypeNil string + TypeExpression string + TypeMap string + TypeList string + TypeNumber []string + Types []string +) + +// Note: ensure that this file is lexically after the generated Protobufs because of package initialization order. +func init() { + TypeBool = proto.MessageName(&wrappers.BoolValue{}) + TypeInt32 = proto.MessageName(&wrappers.Int32Value{}) + TypeInt64 = proto.MessageName(&wrappers.Int64Value{}) + TypeUInt32 = proto.MessageName(&wrappers.UInt32Value{}) + TypeUInt64 = proto.MessageName(&wrappers.UInt64Value{}) + TypeFloat32 = proto.MessageName(&wrappers.FloatValue{}) + TypeFloat64 = proto.MessageName(&wrappers.DoubleValue{}) + TypeString = proto.MessageName(&wrappers.StringValue{}) + TypeBytes = proto.MessageName(&wrappers.BytesValue{}) + TypeNil = proto.MessageName(&NilValue{}) + TypeExpression = proto.MessageName(&Expression{}) + TypeMap = proto.MessageName(&MapValue{}) + TypeList = proto.MessageName(&ArrayValue{}) + TypeNumber = []string{ + TypeFloat64, + TypeFloat32, + TypeInt32, + TypeInt64, + TypeUInt32, + TypeUInt64, + } + Types = []string{ + TypeBool, + TypeInt32, + TypeInt64, + TypeUInt32, + TypeUInt64, + TypeFloat32, + TypeFloat64, + TypeString, + TypeBytes, + TypeNil, + TypeExpression, + TypeMap, + TypeList, + } +} diff --git a/pkg/types/typedvalues/valuetypes_test.go b/pkg/types/typedvalues/valuetypes_test.go new file mode 100644 index 00000000..73957edd --- /dev/null +++ b/pkg/types/typedvalues/valuetypes_test.go @@ -0,0 +1,17 @@ +package typedvalues + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTypesGenerated(t *testing.T) { + for i, v := range Types { + t.Run(fmt.Sprintf("Types[%d]", i), func(t *testing.T) { + fmt.Printf("Types[%d] = %s\n", i, v) + assert.NotEmpty(t, v) + }) + } +} diff --git a/pkg/types/types.pb.go b/pkg/types/types.pb.go index e75e4caa..3d0eaa64 100644 --- a/pkg/types/types.pb.go +++ b/pkg/types/types.pb.go @@ -23,7 +23,6 @@ It has these top-level messages: TaskInvocationSpec TaskInvocationStatus ObjectMetadata - TypedValue Error FnRef TypedValueMap @@ -35,6 +34,7 @@ import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" import google_protobuf "github.com/golang/protobuf/ptypes/timestamp" +import fission_workflows_types "github.com/fission/fission-workflows/pkg/types/typedvalues" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -392,8 +392,8 @@ func (m *WorkflowInvocation) GetStatus() *WorkflowInvocationStatus { // Workflow Invocation Model type WorkflowInvocationSpec struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflowId" json:"workflowId,omitempty"` - Inputs map[string]*TypedValue `protobuf:"bytes,2,rep,name=inputs" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflowId" json:"workflowId,omitempty"` + Inputs map[string]*fission_workflows_types.TypedValue `protobuf:"bytes,2,rep,name=inputs" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // ParentId contains the id of the encapsulating workflow invocation. // // This used within the workflow engine; for user-provided workflow invocations the parentId is ignored. @@ -412,7 +412,7 @@ func (m *WorkflowInvocationSpec) GetWorkflowId() string { return "" } -func (m *WorkflowInvocationSpec) GetInputs() map[string]*TypedValue { +func (m *WorkflowInvocationSpec) GetInputs() map[string]*fission_workflows_types.TypedValue { if m != nil { return m.Inputs } @@ -427,10 +427,10 @@ func (m *WorkflowInvocationSpec) GetParentId() string { } type WorkflowInvocationStatus struct { - Status WorkflowInvocationStatus_Status `protobuf:"varint,1,opt,name=status,enum=fission.workflows.types.WorkflowInvocationStatus_Status" json:"status,omitempty"` - UpdatedAt *google_protobuf.Timestamp `protobuf:"bytes,2,opt,name=updatedAt" json:"updatedAt,omitempty"` - Tasks map[string]*TaskInvocation `protobuf:"bytes,3,rep,name=tasks" json:"tasks,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Output *TypedValue `protobuf:"bytes,4,opt,name=output" json:"output,omitempty"` + Status WorkflowInvocationStatus_Status `protobuf:"varint,1,opt,name=status,enum=fission.workflows.types.WorkflowInvocationStatus_Status" json:"status,omitempty"` + UpdatedAt *google_protobuf.Timestamp `protobuf:"bytes,2,opt,name=updatedAt" json:"updatedAt,omitempty"` + Tasks map[string]*TaskInvocation `protobuf:"bytes,3,rep,name=tasks" json:"tasks,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Output *fission_workflows_types.TypedValue `protobuf:"bytes,4,opt,name=output" json:"output,omitempty"` // In case the task ID also exists in the workflow spec, the dynamic task will be // used as an overlay over the static task. DynamicTasks map[string]*Task `protobuf:"bytes,5,rep,name=dynamicTasks" json:"dynamicTasks,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` @@ -463,7 +463,7 @@ func (m *WorkflowInvocationStatus) GetTasks() map[string]*TaskInvocation { return nil } -func (m *WorkflowInvocationStatus) GetOutput() *TypedValue { +func (m *WorkflowInvocationStatus) GetOutput() *fission_workflows_types.TypedValue { if m != nil { return m.Output } @@ -551,14 +551,14 @@ func (m *Task) GetStatus() *TaskStatus { // Id is specified outside of TaskSpec type TaskSpec struct { // Name/identifier of the function - FunctionRef string `protobuf:"bytes,1,opt,name=functionRef" json:"functionRef,omitempty"` - Inputs map[string]*TypedValue `protobuf:"bytes,2,rep,name=inputs" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + FunctionRef string `protobuf:"bytes,1,opt,name=functionRef" json:"functionRef,omitempty"` + Inputs map[string]*fission_workflows_types.TypedValue `protobuf:"bytes,2,rep,name=inputs" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Dependencies for this task to execute Requires map[string]*TaskDependencyParameters `protobuf:"bytes,3,rep,name=requires" json:"requires,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // Number of dependencies to wait for Await int32 `protobuf:"varint,4,opt,name=await" json:"await,omitempty"` // Transform the output, or override the output with a literal - Output *TypedValue `protobuf:"bytes,5,opt,name=output" json:"output,omitempty"` + Output *fission_workflows_types.TypedValue `protobuf:"bytes,5,opt,name=output" json:"output,omitempty"` } func (m *TaskSpec) Reset() { *m = TaskSpec{} } @@ -573,7 +573,7 @@ func (m *TaskSpec) GetFunctionRef() string { return "" } -func (m *TaskSpec) GetInputs() map[string]*TypedValue { +func (m *TaskSpec) GetInputs() map[string]*fission_workflows_types.TypedValue { if m != nil { return m.Inputs } @@ -594,7 +594,7 @@ func (m *TaskSpec) GetAwait() int32 { return 0 } -func (m *TaskSpec) GetOutput() *TypedValue { +func (m *TaskSpec) GetOutput() *fission_workflows_types.TypedValue { if m != nil { return m.Output } @@ -706,7 +706,7 @@ type TaskInvocationSpec struct { // TaskId is the id of the task within the workflow TaskId string `protobuf:"bytes,2,opt,name=taskId" json:"taskId,omitempty"` // Inputs contain all inputs to the task invocation - Inputs map[string]*TypedValue `protobuf:"bytes,3,rep,name=inputs" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Inputs map[string]*fission_workflows_types.TypedValue `protobuf:"bytes,3,rep,name=inputs" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // InvocationId string `protobuf:"bytes,4,opt,name=invocationId" json:"invocationId,omitempty"` } @@ -730,7 +730,7 @@ func (m *TaskInvocationSpec) GetTaskId() string { return "" } -func (m *TaskInvocationSpec) GetInputs() map[string]*TypedValue { +func (m *TaskInvocationSpec) GetInputs() map[string]*fission_workflows_types.TypedValue { if m != nil { return m.Inputs } @@ -745,10 +745,10 @@ func (m *TaskInvocationSpec) GetInvocationId() string { } type TaskInvocationStatus struct { - Status TaskInvocationStatus_Status `protobuf:"varint,1,opt,name=status,enum=fission.workflows.types.TaskInvocationStatus_Status" json:"status,omitempty"` - UpdatedAt *google_protobuf.Timestamp `protobuf:"bytes,2,opt,name=updatedAt" json:"updatedAt,omitempty"` - Output *TypedValue `protobuf:"bytes,3,opt,name=output" json:"output,omitempty"` - Error *Error `protobuf:"bytes,4,opt,name=error" json:"error,omitempty"` + Status TaskInvocationStatus_Status `protobuf:"varint,1,opt,name=status,enum=fission.workflows.types.TaskInvocationStatus_Status" json:"status,omitempty"` + UpdatedAt *google_protobuf.Timestamp `protobuf:"bytes,2,opt,name=updatedAt" json:"updatedAt,omitempty"` + Output *fission_workflows_types.TypedValue `protobuf:"bytes,3,opt,name=output" json:"output,omitempty"` + Error *Error `protobuf:"bytes,4,opt,name=error" json:"error,omitempty"` } func (m *TaskInvocationStatus) Reset() { *m = TaskInvocationStatus{} } @@ -770,7 +770,7 @@ func (m *TaskInvocationStatus) GetUpdatedAt() *google_protobuf.Timestamp { return nil } -func (m *TaskInvocationStatus) GetOutput() *TypedValue { +func (m *TaskInvocationStatus) GetOutput() *fission_workflows_types.TypedValue { if m != nil { return m.Output } @@ -835,46 +835,6 @@ func (m *ObjectMetadata) GetGeneration() int64 { return 0 } -// TypedValue is used to serialize, deserialize, transfer data values across the workflow engine. -// -// It consists partly copy of protobuf's Any, to avoid protobuf requirement of a protobuf-based type. -type TypedValue struct { - // Type is an arbitrary string representation of a type. Each type has an associated parser/formatter. - Type string `protobuf:"bytes,1,opt,name=type" json:"type,omitempty"` - // Value holds the actual value in an arbitrary serialized form. A parser should be able to parse this format - // based on the type. - Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` - // Labels hold metadata about the value. It is used for example to store origins of data, past transformations, - // and information needed by serialization processes. - Labels map[string]string `protobuf:"bytes,3,rep,name=labels" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` -} - -func (m *TypedValue) Reset() { *m = TypedValue{} } -func (m *TypedValue) String() string { return proto.CompactTextString(m) } -func (*TypedValue) ProtoMessage() {} -func (*TypedValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } - -func (m *TypedValue) GetType() string { - if m != nil { - return m.Type - } - return "" -} - -func (m *TypedValue) GetValue() []byte { - if m != nil { - return m.Value - } - return nil -} - -func (m *TypedValue) GetLabels() map[string]string { - if m != nil { - return m.Labels - } - return nil -} - type Error struct { // string code = 1; Message string `protobuf:"bytes,2,opt,name=message" json:"message,omitempty"` @@ -883,7 +843,7 @@ type Error struct { func (m *Error) Reset() { *m = Error{} } func (m *Error) String() string { return proto.CompactTextString(m) } func (*Error) ProtoMessage() {} -func (*Error) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } +func (*Error) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } func (m *Error) GetMessage() string { if m != nil { @@ -907,7 +867,7 @@ type FnRef struct { func (m *FnRef) Reset() { *m = FnRef{} } func (m *FnRef) String() string { return proto.CompactTextString(m) } func (*FnRef) ProtoMessage() {} -func (*FnRef) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } +func (*FnRef) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } func (m *FnRef) GetRuntime() string { if m != nil { @@ -932,15 +892,15 @@ func (m *FnRef) GetID() string { // Utility wrapper for a TypedValue map type TypedValueMap struct { - Value map[string]*TypedValue `protobuf:"bytes,1,rep,name=Value" json:"Value,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Value map[string]*fission_workflows_types.TypedValue `protobuf:"bytes,1,rep,name=Value" json:"Value,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` } func (m *TypedValueMap) Reset() { *m = TypedValueMap{} } func (m *TypedValueMap) String() string { return proto.CompactTextString(m) } func (*TypedValueMap) ProtoMessage() {} -func (*TypedValueMap) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } +func (*TypedValueMap) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } -func (m *TypedValueMap) GetValue() map[string]*TypedValue { +func (m *TypedValueMap) GetValue() map[string]*fission_workflows_types.TypedValue { if m != nil { return m.Value } @@ -949,15 +909,15 @@ func (m *TypedValueMap) GetValue() map[string]*TypedValue { // Utility wrapper for a TypedValue list type TypedValueList struct { - Value []*TypedValue `protobuf:"bytes,1,rep,name=Value" json:"Value,omitempty"` + Value []*fission_workflows_types.TypedValue `protobuf:"bytes,1,rep,name=Value" json:"Value,omitempty"` } func (m *TypedValueList) Reset() { *m = TypedValueList{} } func (m *TypedValueList) String() string { return proto.CompactTextString(m) } func (*TypedValueList) ProtoMessage() {} -func (*TypedValueList) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } +func (*TypedValueList) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } -func (m *TypedValueList) GetValue() []*TypedValue { +func (m *TypedValueList) GetValue() []*fission_workflows_types.TypedValue { if m != nil { return m.Value } @@ -980,7 +940,6 @@ func init() { proto.RegisterType((*TaskInvocationSpec)(nil), "fission.workflows.types.TaskInvocationSpec") proto.RegisterType((*TaskInvocationStatus)(nil), "fission.workflows.types.TaskInvocationStatus") proto.RegisterType((*ObjectMetadata)(nil), "fission.workflows.types.ObjectMetadata") - proto.RegisterType((*TypedValue)(nil), "fission.workflows.types.TypedValue") proto.RegisterType((*Error)(nil), "fission.workflows.types.Error") proto.RegisterType((*FnRef)(nil), "fission.workflows.types.FnRef") proto.RegisterType((*TypedValueMap)(nil), "fission.workflows.types.TypedValueMap") @@ -995,93 +954,92 @@ func init() { func init() { proto.RegisterFile("pkg/types/types.proto", fileDescriptor0) } var fileDescriptor0 = []byte{ - // 1405 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0xcf, 0x6f, 0x1b, 0xc5, - 0x17, 0xef, 0xae, 0xbd, 0x8e, 0xfd, 0x9c, 0xfa, 0xeb, 0xef, 0xa8, 0x14, 0xcb, 0x82, 0x92, 0x2e, - 0x42, 0xad, 0x80, 0x6e, 0x68, 0x5a, 0xd4, 0x84, 0x80, 0x8a, 0xeb, 0xdd, 0xa4, 0xab, 0x24, 0xb6, - 0xb5, 0x76, 0x5a, 0x15, 0xd4, 0x56, 0x13, 0x7b, 0x6c, 0x6d, 0x63, 0xaf, 0x97, 0xdd, 0x75, 0x2b, - 0xff, 0x09, 0x5c, 0xf8, 0x2b, 0x38, 0x72, 0xaf, 0xc4, 0x85, 0x03, 0x47, 0xfe, 0x06, 0xc4, 0x81, - 0x13, 0x07, 0xee, 0x5c, 0x90, 0xd0, 0xcc, 0xfe, 0x9a, 0x75, 0xec, 0xd8, 0x2e, 0x0e, 0x97, 0x64, - 0x67, 0xf6, 0xbd, 0x37, 0x6f, 0x3e, 0xef, 0xf3, 0x3e, 0x33, 0x6b, 0x78, 0xcb, 0x3e, 0xed, 0x6d, - 0x7a, 0x63, 0x9b, 0xb8, 0xfe, 0x5f, 0xc5, 0x76, 0x86, 0xde, 0x10, 0xbd, 0xdd, 0x35, 0x5d, 0xd7, - 0x1c, 0x5a, 0xca, 0xab, 0xa1, 0x73, 0xda, 0xed, 0x0f, 0x5f, 0xb9, 0x0a, 0x7b, 0x5d, 0x7e, 0xaf, - 0x37, 0x1c, 0xf6, 0xfa, 0x64, 0x93, 0x99, 0x9d, 0x8c, 0xba, 0x9b, 0x9e, 0x39, 0x20, 0xae, 0x87, - 0x07, 0xb6, 0xef, 0x29, 0xff, 0x22, 0x40, 0xf6, 0x71, 0xe0, 0x84, 0xaa, 0x90, 0x1d, 0x10, 0x0f, - 0x77, 0xb0, 0x87, 0x4b, 0xc2, 0x86, 0x70, 0x33, 0xbf, 0x75, 0x43, 0x99, 0x11, 0x59, 0xa9, 0x9f, - 0xbc, 0x20, 0x6d, 0xef, 0x28, 0x30, 0x37, 0x22, 0x47, 0xb4, 0x03, 0x69, 0xd7, 0x26, 0xed, 0x92, - 0xc8, 0x02, 0x7c, 0x30, 0x33, 0x40, 0xb8, 0x6a, 0xd3, 0x26, 0x6d, 0x83, 0xb9, 0xa0, 0xfb, 0x90, - 0x71, 0x3d, 0xec, 0x8d, 0xdc, 0x52, 0x6a, 0xce, 0xea, 0x91, 0x33, 0x33, 0x37, 0x02, 0x37, 0xf9, - 0x37, 0x11, 0xd6, 0xf9, 0xb8, 0xe8, 0x1a, 0x00, 0xb6, 0xcd, 0x47, 0xc4, 0xa1, 0x51, 0xd8, 0x9e, - 0x72, 0x06, 0x37, 0x83, 0xf6, 0x40, 0xf2, 0xb0, 0x7b, 0xea, 0x96, 0xc4, 0x8d, 0xd4, 0xcd, 0xfc, - 0xd6, 0x27, 0x0b, 0x65, 0xab, 0xb4, 0xa8, 0x8b, 0x66, 0x79, 0xce, 0xd8, 0xf0, 0xdd, 0xe9, 0x3a, - 0xc3, 0x91, 0x67, 0x8f, 0x3c, 0xfa, 0x8a, 0x65, 0x9f, 0x33, 0xb8, 0x19, 0xb4, 0x01, 0xf9, 0x0e, - 0x71, 0xdb, 0x8e, 0x69, 0x7b, 0x34, 0x91, 0x34, 0x33, 0xe0, 0xa7, 0x50, 0x09, 0xd6, 0xba, 0x43, - 0xa7, 0x4d, 0xf4, 0x4e, 0x49, 0x62, 0x6f, 0xc3, 0x21, 0x42, 0x90, 0xb6, 0xf0, 0x80, 0x94, 0x32, - 0x6c, 0x9a, 0x3d, 0xa3, 0x32, 0x64, 0x4d, 0xcb, 0x23, 0x8e, 0x85, 0xfb, 0xa5, 0xb5, 0x0d, 0xe1, - 0x66, 0xd6, 0x88, 0xc6, 0xe5, 0xaf, 0x01, 0xe2, 0x04, 0x51, 0x11, 0x52, 0xa7, 0x64, 0x1c, 0x6c, - 0x9d, 0x3e, 0xa2, 0x7b, 0x20, 0xbd, 0xc4, 0xfd, 0x11, 0x09, 0x2a, 0x74, 0x7d, 0xe6, 0x9e, 0x69, - 0x14, 0x56, 0x1d, 0xdf, 0xfe, 0x33, 0x71, 0x5b, 0x90, 0x7f, 0x48, 0x41, 0x21, 0x09, 0x3e, 0xda, - 0x8b, 0xaa, 0x46, 0x17, 0x29, 0x6c, 0x29, 0x0b, 0x56, 0x4d, 0x49, 0x16, 0x0f, 0x6d, 0x43, 0x6e, - 0x64, 0x77, 0xb0, 0x47, 0x3a, 0x15, 0x2f, 0xc8, 0xad, 0xac, 0xf8, 0xfc, 0x55, 0x42, 0xfe, 0x2a, - 0xad, 0x90, 0xbf, 0x46, 0x6c, 0x8c, 0x1e, 0x86, 0x55, 0x4c, 0xb1, 0x2a, 0x6e, 0x2d, 0x9a, 0xc0, - 0xd9, 0x3a, 0xde, 0x05, 0x89, 0x38, 0xce, 0xd0, 0x61, 0x15, 0xca, 0x6f, 0x5d, 0x9b, 0x19, 0x49, - 0xa3, 0x56, 0x86, 0x6f, 0x5c, 0x7e, 0x3a, 0x07, 0xf1, 0x9d, 0x24, 0xe2, 0xef, 0x9f, 0x8f, 0xb8, - 0x8f, 0x0a, 0x87, 0xf9, 0x0e, 0x64, 0x02, 0xa8, 0xf3, 0xb0, 0xd6, 0xd0, 0x6a, 0xaa, 0x5e, 0xdb, - 0x2f, 0x5e, 0x42, 0x39, 0x90, 0x0c, 0xad, 0xa2, 0x3e, 0x29, 0x8a, 0x08, 0x20, 0xb3, 0x57, 0xd1, - 0x0f, 0x35, 0xb5, 0x98, 0xa2, 0x36, 0xaa, 0x76, 0xa8, 0xb5, 0x34, 0xb5, 0x98, 0x96, 0xff, 0x10, - 0x00, 0x85, 0x9b, 0xd6, 0xad, 0x97, 0xc3, 0x36, 0x66, 0x64, 0x5b, 0x49, 0xa3, 0x57, 0x13, 0x8d, - 0xbe, 0x39, 0x17, 0xf4, 0x78, 0x7d, 0xae, 0xe5, 0xf5, 0x89, 0x96, 0xbf, 0xbd, 0x4c, 0x98, 0x64, - 0xf3, 0x7f, 0x2b, 0xc2, 0xd5, 0xe9, 0x6b, 0xd1, 0xf6, 0x0c, 0xc3, 0xe9, 0x9d, 0x50, 0x06, 0xe2, - 0x19, 0xd4, 0x84, 0x8c, 0x69, 0xd9, 0x23, 0x2f, 0xd4, 0x81, 0xdd, 0x25, 0x37, 0xa3, 0xe8, 0xcc, - 0xdb, 0xa7, 0x52, 0x10, 0x8a, 0xf6, 0xa8, 0x8d, 0x1d, 0x62, 0x79, 0x7a, 0x27, 0x50, 0x84, 0x68, - 0x5c, 0x7e, 0x06, 0x79, 0xce, 0xe5, 0x5f, 0x51, 0x66, 0x6c, 0x93, 0xce, 0x23, 0x6a, 0xca, 0x53, - 0xe6, 0x2f, 0x09, 0x4a, 0xb3, 0x00, 0x43, 0x8d, 0x89, 0x86, 0xdd, 0x5e, 0x1a, 0xf3, 0xd5, 0xb5, - 0xae, 0x91, 0x6c, 0xdd, 0xcf, 0x97, 0x4f, 0xe5, 0x6c, 0x13, 0xef, 0x42, 0xc6, 0x97, 0xde, 0xa0, - 0x8b, 0x17, 0x02, 0x2f, 0x70, 0x41, 0x3d, 0x58, 0xef, 0x8c, 0x2d, 0x3c, 0x30, 0xdb, 0x2c, 0x70, - 0x49, 0x62, 0x79, 0x55, 0x97, 0xcf, 0x4b, 0xe5, 0xa2, 0xf8, 0xe9, 0x25, 0x02, 0xc7, 0x52, 0x93, - 0x59, 0x46, 0x6a, 0xf0, 0x1c, 0xa9, 0xf9, 0x22, 0xc9, 0x9b, 0x1b, 0xe7, 0x4a, 0x4d, 0x9c, 0x33, - 0xc7, 0x9d, 0xf2, 0x33, 0xf8, 0xff, 0x99, 0xdc, 0xa7, 0xac, 0x74, 0x27, 0xb9, 0xd2, 0xbb, 0xe7, - 0xae, 0xc4, 0x73, 0xf3, 0x29, 0x2f, 0x67, 0xc7, 0xb5, 0x83, 0x5a, 0xfd, 0x71, 0xad, 0x78, 0x09, - 0x5d, 0x86, 0x5c, 0xb3, 0xfa, 0x50, 0x53, 0x8f, 0xa9, 0x8c, 0x09, 0xe8, 0x7f, 0x90, 0xd7, 0x6b, - 0xcf, 0x1b, 0x46, 0x7d, 0xdf, 0xd0, 0x9a, 0xcd, 0xa2, 0xc8, 0xde, 0x1f, 0x57, 0xab, 0x9a, 0xa6, - 0x32, 0x99, 0x8b, 0x25, 0x2f, 0x4d, 0xe3, 0x54, 0x1e, 0xd4, 0x0d, 0x2a, 0x79, 0x92, 0xfc, 0xa7, - 0x00, 0x45, 0x95, 0xd8, 0xc4, 0xea, 0x10, 0xab, 0x3d, 0xae, 0x0e, 0xad, 0xae, 0xd9, 0x43, 0x4d, - 0xc8, 0x3a, 0xe4, 0x9b, 0x91, 0xe9, 0x10, 0x4a, 0x7a, 0x5a, 0xd1, 0x7b, 0x33, 0xf3, 0x9d, 0x74, - 0x56, 0x8c, 0xc0, 0xd3, 0xaf, 0x62, 0x14, 0x08, 0x5d, 0x01, 0x09, 0xbf, 0xc2, 0xa6, 0xcf, 0x78, - 0xc9, 0xf0, 0x07, 0x65, 0x0b, 0x2e, 0x27, 0x1c, 0xa6, 0x40, 0xb7, 0x9f, 0x84, 0xee, 0xf6, 0xb9, - 0xd0, 0xc5, 0xe9, 0x34, 0xb0, 0x83, 0x07, 0xc4, 0x23, 0x4e, 0xe2, 0x74, 0xf8, 0x49, 0x80, 0x34, - 0xbb, 0x63, 0xac, 0x44, 0xd4, 0x3f, 0x4d, 0x88, 0xfa, 0x02, 0x77, 0x03, 0x5f, 0xc6, 0x77, 0x27, - 0x64, 0x7c, 0xa1, 0x23, 0x2e, 0x14, 0xee, 0xdf, 0x53, 0x90, 0x0d, 0xe3, 0xd1, 0x9b, 0x52, 0x77, - 0x64, 0xb5, 0x19, 0x29, 0x49, 0x37, 0x40, 0x8d, 0x9f, 0x42, 0xda, 0x84, 0x58, 0xdf, 0x9a, 0x9b, - 0xe4, 0x54, 0x79, 0x3e, 0xe0, 0x28, 0xe1, 0x8b, 0xcf, 0xe6, 0xfc, 0x40, 0x73, 0xa9, 0x90, 0xe6, - 0xa8, 0xc0, 0x09, 0x91, 0xb4, 0xb4, 0x10, 0x5d, 0xf4, 0x11, 0xf1, 0x9f, 0xf3, 0xf4, 0x7b, 0xd1, - 0x97, 0xae, 0xa0, 0xf7, 0x1f, 0x4c, 0x1c, 0x42, 0x1f, 0x2e, 0xc0, 0x98, 0xd5, 0x1d, 0x3b, 0x77, - 0x41, 0xea, 0x32, 0x7e, 0xa5, 0xe6, 0x88, 0xef, 0x1e, 0xb5, 0x32, 0x7c, 0xe3, 0x37, 0xbb, 0x1d, - 0xca, 0x1f, 0xf3, 0x7a, 0xd7, 0x6c, 0x55, 0x98, 0x4e, 0x71, 0xd7, 0x37, 0x81, 0xd3, 0x32, 0x51, - 0xfe, 0x59, 0x80, 0xd2, 0x2c, 0x38, 0x51, 0x0b, 0xd2, 0x74, 0x81, 0x00, 0xb2, 0x2f, 0x97, 0xae, - 0x07, 0xa7, 0x6d, 0x94, 0x14, 0x06, 0x8b, 0xc6, 0xc8, 0xdb, 0x37, 0xb1, 0xcb, 0x20, 0xcc, 0x19, - 0xfe, 0x40, 0xde, 0x85, 0x42, 0xd2, 0x1a, 0x65, 0x21, 0xad, 0x56, 0x5a, 0x95, 0xe2, 0x25, 0xba, - 0x91, 0x6a, 0xbd, 0xd6, 0x32, 0xea, 0x87, 0x45, 0x01, 0x21, 0x28, 0xa8, 0x4f, 0x6a, 0x95, 0x23, - 0xbd, 0xfa, 0xbc, 0x7e, 0xdc, 0x6a, 0x1c, 0xb7, 0x8a, 0xa2, 0xfc, 0xab, 0x00, 0x85, 0xe4, 0x09, - 0xb3, 0x1a, 0x79, 0xba, 0x9f, 0x90, 0xa7, 0x8f, 0x16, 0x3c, 0xdd, 0x38, 0xa1, 0xd2, 0x26, 0x84, - 0xea, 0xd6, 0xa2, 0x21, 0x92, 0x92, 0xf5, 0x5a, 0x04, 0x74, 0x76, 0x8d, 0x98, 0x56, 0xc2, 0x32, - 0xb4, 0xba, 0x0a, 0x19, 0x7a, 0x71, 0xd1, 0x3b, 0x41, 0x01, 0x82, 0x11, 0xaa, 0x47, 0x42, 0x97, - 0x9a, 0x73, 0x64, 0x9d, 0x4d, 0x65, 0xaa, 0xe4, 0xc9, 0xb0, 0x6e, 0x46, 0x56, 0x7a, 0x27, 0xf8, - 0x0c, 0x4d, 0xcc, 0x5d, 0xf8, 0xcd, 0xf4, 0x6f, 0x11, 0xae, 0x4c, 0x83, 0x16, 0x1d, 0x4e, 0x08, - 0xc2, 0xdd, 0xa5, 0x2a, 0xb3, 0x3a, 0x69, 0x88, 0x45, 0x3b, 0xb5, 0xfc, 0xed, 0xf1, 0xcd, 0x14, - 0xe2, 0xc5, 0x85, 0xde, 0x88, 0x98, 0xec, 0x1c, 0xe8, 0x8d, 0x86, 0xa6, 0x16, 0x33, 0xf2, 0x77, - 0x02, 0x14, 0x92, 0xed, 0x85, 0x0a, 0x20, 0x9a, 0xe1, 0x57, 0x91, 0x68, 0xc6, 0x3f, 0x38, 0x88, - 0xdc, 0x0f, 0x0e, 0xdb, 0x90, 0x6b, 0x3b, 0x24, 0xc0, 0x33, 0x35, 0x1f, 0xcf, 0xc8, 0x98, 0x7e, - 0x7b, 0xf5, 0x88, 0x45, 0x1c, 0x1c, 0xfd, 0xf2, 0x91, 0x32, 0xb8, 0x19, 0xf9, 0x47, 0x01, 0x20, - 0x46, 0x92, 0x2e, 0x1e, 0x49, 0x5c, 0x2e, 0x16, 0xa8, 0x98, 0x72, 0xeb, 0x01, 0x9b, 0xd0, 0x3e, - 0x64, 0xfa, 0xf8, 0x84, 0xf4, 0x17, 0x38, 0xbe, 0xa3, 0xf0, 0xca, 0x21, 0xf3, 0x08, 0xda, 0xc2, - 0x77, 0x2f, 0xef, 0x40, 0x9e, 0x9b, 0x9e, 0x42, 0xf9, 0xc4, 0xfa, 0x39, 0x9e, 0xcd, 0xd7, 0x41, - 0x62, 0x95, 0x44, 0x25, 0x58, 0x1b, 0x10, 0xd7, 0xc5, 0xbd, 0xd0, 0x28, 0x1c, 0xca, 0x75, 0x90, - 0x58, 0xb7, 0x53, 0x13, 0x67, 0x64, 0x79, 0x66, 0x84, 0x6c, 0x38, 0x44, 0xef, 0x40, 0x8e, 0x82, - 0xec, 0xda, 0xb8, 0x4d, 0x82, 0x4f, 0xc5, 0x78, 0x82, 0x96, 0x47, 0x57, 0x83, 0x5e, 0x15, 0x75, - 0x55, 0x7e, 0x2d, 0xc0, 0xe5, 0x78, 0x47, 0x47, 0xd8, 0xa6, 0xe7, 0x34, 0x7b, 0x0e, 0xae, 0xb6, - 0xb7, 0x17, 0x00, 0xe2, 0x08, 0xdb, 0x0a, 0x7b, 0x08, 0xbe, 0x9c, 0xd8, 0x73, 0xf9, 0x29, 0x40, - 0x3c, 0xb9, 0xfa, 0xde, 0x3f, 0x80, 0x42, 0xfc, 0xe2, 0xd0, 0x74, 0x3d, 0x1a, 0x90, 0xcf, 0x7c, - 0xb1, 0x80, 0xec, 0xdf, 0x83, 0xb5, 0xaf, 0x24, 0xf6, 0xea, 0x24, 0xc3, 0xf8, 0x77, 0xe7, 0x9f, - 0x00, 0x00, 0x00, 0xff, 0xff, 0x36, 0x18, 0xb2, 0x7c, 0x1c, 0x15, 0x00, 0x00, + // 1380 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0xcf, 0x73, 0xdb, 0xc4, + 0x17, 0xaf, 0x64, 0xcb, 0xb1, 0x9f, 0x5b, 0x7f, 0xfd, 0xdd, 0x29, 0xc5, 0xe3, 0x81, 0x92, 0x8a, + 0x61, 0xda, 0x01, 0x2a, 0xd3, 0xb4, 0x4c, 0x53, 0x02, 0x53, 0x5c, 0x4b, 0x69, 0x35, 0x49, 0x6c, + 0x8f, 0xec, 0xb4, 0x53, 0x98, 0xb6, 0xb3, 0xb1, 0xd7, 0x46, 0x4d, 0x2c, 0x09, 0x49, 0x6e, 0x27, + 0x7f, 0x02, 0x17, 0xfe, 0x0a, 0x8e, 0xdc, 0x7b, 0xe4, 0xc0, 0x91, 0xbf, 0x81, 0xe1, 0xc0, 0x89, + 0x03, 0x77, 0x2e, 0xcc, 0x30, 0xbb, 0x5a, 0x59, 0x2b, 0x27, 0x8e, 0xed, 0xe2, 0x72, 0x49, 0xf6, + 0xc7, 0x7b, 0x9f, 0xf7, 0xf6, 0xfd, 0xf8, 0xec, 0xca, 0xf0, 0x96, 0x77, 0x38, 0xac, 0x85, 0xc7, + 0x1e, 0x09, 0xa2, 0xbf, 0x9a, 0xe7, 0xbb, 0xa1, 0x8b, 0xde, 0x1e, 0xd8, 0x41, 0x60, 0xbb, 0x8e, + 0xf6, 0xd2, 0xf5, 0x0f, 0x07, 0x47, 0xee, 0xcb, 0x40, 0x63, 0xdb, 0xd5, 0xf7, 0x86, 0xae, 0x3b, + 0x3c, 0x22, 0x35, 0x26, 0x76, 0x30, 0x1e, 0xd4, 0x42, 0x7b, 0x44, 0x82, 0x10, 0x8f, 0xbc, 0x48, + 0xb3, 0xba, 0x3b, 0xb4, 0xc3, 0x6f, 0xc6, 0x07, 0x5a, 0xcf, 0x1d, 0xd5, 0x38, 0x48, 0xfc, 0xff, + 0xfa, 0x04, 0xac, 0x96, 0xb6, 0xda, 0x7f, 0x81, 0x8f, 0xc6, 0xe9, 0x71, 0x84, 0xa6, 0xfe, 0x22, + 0x41, 0xfe, 0x11, 0xd7, 0x42, 0x0d, 0xc8, 0x8f, 0x48, 0x88, 0xfb, 0x38, 0xc4, 0x15, 0x69, 0x5d, + 0xba, 0x56, 0xdc, 0xb8, 0xaa, 0xcd, 0xf0, 0x53, 0x6b, 0x1d, 0x3c, 0x27, 0xbd, 0x70, 0x8f, 0x8b, + 0x5b, 0x13, 0x45, 0x74, 0x07, 0xb2, 0x81, 0x47, 0x7a, 0x15, 0x99, 0x01, 0x7c, 0x30, 0x13, 0x20, + 0xb6, 0xda, 0xf1, 0x48, 0xcf, 0x62, 0x2a, 0xe8, 0x2e, 0xe4, 0x82, 0x10, 0x87, 0xe3, 0xa0, 0x92, + 0x99, 0x63, 0x7d, 0xa2, 0xcc, 0xc4, 0x2d, 0xae, 0xa6, 0xfe, 0x26, 0xc3, 0x79, 0x11, 0x17, 0x5d, + 0x06, 0xc0, 0x9e, 0xfd, 0x90, 0xf8, 0x14, 0x85, 0x9d, 0xa9, 0x60, 0x09, 0x2b, 0x68, 0x1b, 0x94, + 0x10, 0x07, 0x87, 0x41, 0x45, 0x5e, 0xcf, 0x5c, 0x2b, 0x6e, 0x7c, 0xb2, 0x90, 0xb7, 0x5a, 0x97, + 0xaa, 0x18, 0x4e, 0xe8, 0x1f, 0x5b, 0x91, 0x3a, 0xb5, 0xe3, 0x8e, 0x43, 0x6f, 0x1c, 0xd2, 0x2d, + 0xe6, 0x7d, 0xc1, 0x12, 0x56, 0xd0, 0x3a, 0x14, 0xfb, 0x24, 0xe8, 0xf9, 0xb6, 0x17, 0x52, 0x47, + 0xb2, 0x4c, 0x40, 0x5c, 0x42, 0x15, 0x58, 0x1b, 0xb8, 0x7e, 0x8f, 0x98, 0xfd, 0x8a, 0xc2, 0x76, + 0xe3, 0x29, 0x42, 0x90, 0x75, 0xf0, 0x88, 0x54, 0x72, 0x6c, 0x99, 0x8d, 0x51, 0x15, 0xf2, 0xb6, + 0x13, 0x12, 0xdf, 0xc1, 0x47, 0x95, 0xb5, 0x75, 0xe9, 0x5a, 0xde, 0x9a, 0xcc, 0xab, 0x5f, 0x03, + 0x24, 0x0e, 0xa2, 0x32, 0x64, 0x0e, 0xc9, 0x31, 0x3f, 0x3a, 0x1d, 0xa2, 0xdb, 0xa0, 0xb0, 0x12, + 0xe0, 0x19, 0xba, 0x32, 0xf3, 0xcc, 0x14, 0x85, 0x65, 0x27, 0x92, 0xff, 0x4c, 0xde, 0x94, 0xd4, + 0x1f, 0x33, 0x50, 0x4a, 0x07, 0x1f, 0x6d, 0x4f, 0xb2, 0x46, 0x8d, 0x94, 0x36, 0xb4, 0x05, 0xb3, + 0xa6, 0xa5, 0x93, 0x87, 0x36, 0xa1, 0x30, 0xf6, 0xfa, 0x38, 0x24, 0xfd, 0x7a, 0xc8, 0x7d, 0xab, + 0x6a, 0x51, 0x37, 0x68, 0x71, 0x37, 0x68, 0xdd, 0xb8, 0x1b, 0xac, 0x44, 0x18, 0x3d, 0x88, 0xb3, + 0x98, 0x61, 0x59, 0xdc, 0x58, 0xd4, 0x81, 0x93, 0x79, 0xbc, 0x05, 0x0a, 0xf1, 0x7d, 0xd7, 0x67, + 0x19, 0x2a, 0x6e, 0x5c, 0x9e, 0x89, 0x64, 0x50, 0x29, 0x2b, 0x12, 0xae, 0x3e, 0x99, 0x13, 0xf1, + 0x3b, 0xe9, 0x88, 0xbf, 0x7f, 0x76, 0xc4, 0xa3, 0xa8, 0x08, 0x31, 0xbf, 0x03, 0x39, 0x1e, 0xea, + 0x22, 0xac, 0xb5, 0x8d, 0xa6, 0x6e, 0x36, 0xef, 0x97, 0xcf, 0xa1, 0x02, 0x28, 0x96, 0x51, 0xd7, + 0x1f, 0x97, 0x65, 0x04, 0x90, 0xdb, 0xae, 0x9b, 0xbb, 0x86, 0x5e, 0xce, 0x50, 0x19, 0xdd, 0xd8, + 0x35, 0xba, 0x86, 0x5e, 0xce, 0xaa, 0x7f, 0x48, 0x80, 0xe2, 0x43, 0x9b, 0xce, 0x0b, 0xb7, 0x87, + 0x59, 0xb1, 0xad, 0xa4, 0xd1, 0x1b, 0xa9, 0x46, 0xaf, 0xcd, 0x0d, 0x7a, 0x62, 0x5f, 0x68, 0x79, + 0x73, 0xaa, 0xe5, 0x6f, 0x2c, 0x03, 0x93, 0x6e, 0xfe, 0xef, 0x64, 0xb8, 0x74, 0xba, 0x2d, 0xda, + 0x9e, 0x31, 0x9c, 0xd9, 0x8f, 0x69, 0x20, 0x59, 0x41, 0x1d, 0xc8, 0xd9, 0x8e, 0x37, 0x0e, 0x63, + 0x1e, 0xd8, 0x5a, 0xf2, 0x30, 0x9a, 0xc9, 0xb4, 0xa3, 0x52, 0xe2, 0x50, 0xb4, 0x47, 0x3d, 0xec, + 0x13, 0x27, 0x34, 0xfb, 0x9c, 0x11, 0x26, 0xf3, 0xea, 0x53, 0x28, 0x0a, 0x2a, 0xff, 0xaa, 0x64, + 0x28, 0xa5, 0x3f, 0xa4, 0xa2, 0x62, 0xc9, 0xfc, 0xa5, 0x40, 0x65, 0x56, 0xc0, 0x50, 0x7b, 0xaa, + 0x61, 0x37, 0x97, 0x8e, 0xf9, 0xea, 0x5a, 0xd7, 0x4a, 0xb7, 0xee, 0xe7, 0xcb, 0xbb, 0x72, 0xb2, + 0x89, 0xb7, 0x20, 0x17, 0x51, 0x2f, 0xef, 0xe2, 0x85, 0x82, 0xc7, 0x55, 0xd0, 0x10, 0xce, 0xf7, + 0x8f, 0x1d, 0x3c, 0xb2, 0x7b, 0x0c, 0xb8, 0xa2, 0x30, 0xbf, 0x1a, 0xcb, 0xfb, 0xa5, 0x0b, 0x28, + 0x91, 0x7b, 0x29, 0xe0, 0x84, 0x6a, 0x72, 0xcb, 0x50, 0x0d, 0x9e, 0x43, 0x35, 0x5f, 0xa4, 0xeb, + 0xe6, 0xea, 0x99, 0x54, 0x93, 0xf8, 0x2c, 0xd4, 0x4e, 0xf5, 0x29, 0xfc, 0xff, 0x84, 0xef, 0xa7, + 0x58, 0xba, 0x99, 0xb6, 0xf4, 0xee, 0x99, 0x96, 0xc4, 0xda, 0x7c, 0x22, 0xd2, 0xd9, 0x7e, 0x73, + 0xa7, 0xd9, 0x7a, 0xd4, 0x2c, 0x9f, 0x43, 0x17, 0xa0, 0xd0, 0x69, 0x3c, 0x30, 0xf4, 0x7d, 0x4a, + 0x63, 0x12, 0xfa, 0x1f, 0x14, 0xcd, 0xe6, 0xb3, 0xb6, 0xd5, 0xba, 0x6f, 0x19, 0x9d, 0x4e, 0x59, + 0x66, 0xfb, 0xfb, 0x8d, 0x86, 0x61, 0xe8, 0x8c, 0xe6, 0x12, 0xca, 0xcb, 0x52, 0x9c, 0xfa, 0xbd, + 0x96, 0x45, 0x29, 0x4f, 0x51, 0xff, 0x94, 0xa0, 0xac, 0x13, 0x8f, 0x38, 0x7d, 0xe2, 0xf4, 0x8e, + 0x1b, 0xae, 0x33, 0xb0, 0x87, 0xa8, 0x03, 0x79, 0x9f, 0x7c, 0x3b, 0xb6, 0x7d, 0x42, 0x8b, 0x9e, + 0x66, 0xf4, 0xf6, 0x4c, 0x7f, 0xa7, 0x95, 0x35, 0x8b, 0x6b, 0x46, 0x59, 0x9c, 0x00, 0xa1, 0x8b, + 0xa0, 0xe0, 0x97, 0xd8, 0x8e, 0x2a, 0x5e, 0xb1, 0xa2, 0x49, 0xd5, 0x81, 0x0b, 0x29, 0x85, 0x53, + 0x42, 0x77, 0x3f, 0x1d, 0xba, 0x1b, 0x67, 0x86, 0x2e, 0x71, 0xa7, 0x8d, 0x7d, 0x3c, 0x22, 0x21, + 0xf1, 0x53, 0xb7, 0xc3, 0x4f, 0x12, 0x64, 0xd9, 0x1b, 0x63, 0x25, 0xa4, 0xfe, 0x69, 0x8a, 0xd4, + 0x17, 0x78, 0x1b, 0x44, 0x34, 0xbe, 0x35, 0x45, 0xe3, 0x0b, 0x5d, 0x71, 0x31, 0x71, 0xff, 0x9e, + 0x81, 0x7c, 0x8c, 0x47, 0x5f, 0x4a, 0x83, 0xb1, 0xd3, 0x63, 0x45, 0x49, 0x06, 0x3c, 0x6a, 0xe2, + 0x12, 0x32, 0xa6, 0xc8, 0xfa, 0xfa, 0x5c, 0x27, 0x4f, 0xa5, 0xe7, 0x1d, 0xa1, 0x24, 0x22, 0xf2, + 0xa9, 0xcd, 0x07, 0x9a, 0x5b, 0x0a, 0x59, 0xa1, 0x14, 0x04, 0x22, 0x52, 0x96, 0x26, 0xa2, 0x37, + 0x7d, 0x45, 0xfc, 0xe7, 0x75, 0xfa, 0x83, 0x1c, 0x51, 0x17, 0xef, 0xfd, 0x7b, 0x53, 0x97, 0xd0, + 0x87, 0x0b, 0x54, 0xcc, 0xea, 0xae, 0x9d, 0x5b, 0xa0, 0x0c, 0x58, 0x7d, 0x65, 0xe6, 0x90, 0xef, + 0x36, 0x95, 0xb2, 0x22, 0xe1, 0xd7, 0x7b, 0x1d, 0xaa, 0x1f, 0x8b, 0x7c, 0xd7, 0xe9, 0xd6, 0x19, + 0x4f, 0x09, 0xcf, 0x37, 0x49, 0xe0, 0x32, 0x59, 0xfd, 0x59, 0x82, 0xca, 0xac, 0x70, 0xa2, 0x2e, + 0x64, 0xa9, 0x01, 0x1e, 0xb2, 0x2f, 0x97, 0xce, 0x87, 0xc0, 0x6d, 0xb4, 0x28, 0x2c, 0x86, 0xc6, + 0x8a, 0xf7, 0xc8, 0xc6, 0x01, 0x0b, 0x61, 0xc1, 0x8a, 0x26, 0xea, 0x16, 0x94, 0xd2, 0xd2, 0x28, + 0x0f, 0x59, 0xbd, 0xde, 0xad, 0x97, 0xcf, 0xd1, 0x83, 0x34, 0x5a, 0xcd, 0xae, 0xd5, 0xda, 0x2d, + 0x4b, 0x08, 0x41, 0x49, 0x7f, 0xdc, 0xac, 0xef, 0x99, 0x8d, 0x67, 0xad, 0xfd, 0x6e, 0x7b, 0xbf, + 0x5b, 0x96, 0xd5, 0x5f, 0x25, 0x28, 0xa5, 0x6f, 0x98, 0xd5, 0xd0, 0xd3, 0xdd, 0x14, 0x3d, 0x7d, + 0xb4, 0xe0, 0xed, 0x26, 0x10, 0x95, 0x31, 0x45, 0x54, 0xd7, 0x17, 0x85, 0x48, 0x53, 0xd6, 0x2b, + 0x19, 0xd0, 0x49, 0x1b, 0x49, 0x59, 0x49, 0xcb, 0x94, 0xd5, 0x25, 0xc8, 0xd1, 0x87, 0x8b, 0xd9, + 0xe7, 0x09, 0xe0, 0x33, 0xd4, 0x9a, 0x10, 0x5d, 0x66, 0xce, 0x95, 0x75, 0xd2, 0x95, 0x53, 0x29, + 0x4f, 0x85, 0xf3, 0xf6, 0x44, 0xca, 0xec, 0xf3, 0xcf, 0xd0, 0xd4, 0xda, 0x1b, 0x7f, 0x99, 0xfe, + 0x2d, 0xc3, 0xc5, 0xd3, 0x42, 0x8b, 0x76, 0xa7, 0x08, 0xe1, 0xd6, 0x52, 0x99, 0x59, 0x1d, 0x35, + 0x24, 0xa4, 0x9d, 0x59, 0xfe, 0xf5, 0xf8, 0x7a, 0x0c, 0xf1, 0xfc, 0x8d, 0xbe, 0x88, 0x18, 0xed, + 0xec, 0x98, 0xed, 0xb6, 0xa1, 0x97, 0x73, 0xea, 0xf7, 0x12, 0x94, 0xd2, 0xed, 0x85, 0x4a, 0x20, + 0xdb, 0xf1, 0x57, 0x91, 0x6c, 0x27, 0x3f, 0x38, 0xc8, 0xc2, 0x0f, 0x0e, 0x9b, 0x50, 0xe8, 0xf9, + 0x84, 0xc7, 0x33, 0x33, 0x3f, 0x9e, 0x13, 0x61, 0xfa, 0xed, 0x35, 0x24, 0x0e, 0xf1, 0xf1, 0xe4, + 0x97, 0x8f, 0x8c, 0x25, 0xac, 0xa8, 0x57, 0x40, 0x61, 0xc1, 0x40, 0x15, 0x58, 0x1b, 0x91, 0x20, + 0xc0, 0xc3, 0xd8, 0x72, 0x3c, 0x55, 0x5b, 0xa0, 0xb0, 0x86, 0xa1, 0x22, 0xfe, 0xd8, 0x09, 0xed, + 0x89, 0x73, 0xf1, 0x14, 0xbd, 0x03, 0x05, 0xea, 0x67, 0xe0, 0xe1, 0x1e, 0xe1, 0x5f, 0x5b, 0xc9, + 0x02, 0x3d, 0xa1, 0xa9, 0xf3, 0x72, 0x97, 0x4d, 0x5d, 0x7d, 0x25, 0xc1, 0x85, 0x24, 0x7b, 0x7b, + 0xd8, 0xa3, 0x57, 0x1d, 0x1b, 0xf3, 0xd7, 0xe1, 0x8d, 0x05, 0x92, 0xbe, 0x87, 0x3d, 0x8d, 0x0d, + 0xf8, 0xc7, 0x07, 0x1b, 0x57, 0x9f, 0x00, 0x24, 0x8b, 0xab, 0x6f, 0x9f, 0x1d, 0x28, 0x25, 0x1b, + 0xbb, 0x76, 0x10, 0x52, 0x40, 0xd1, 0xf3, 0xc5, 0x00, 0xd9, 0xbf, 0x7b, 0x6b, 0x5f, 0x29, 0x6c, + 0xeb, 0x20, 0xc7, 0x52, 0x78, 0xf3, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc8, 0x82, 0x34, 0x70, + 0xad, 0x14, 0x00, 0x00, } diff --git a/pkg/types/types.proto b/pkg/types/types.proto index ad43f09c..69d0857d 100644 --- a/pkg/types/types.proto +++ b/pkg/types/types.proto @@ -4,6 +4,7 @@ package fission.workflows.types; option go_package = "types"; import "google/protobuf/timestamp.proto"; +import "github.com/fission/fission-workflows/pkg/types/typedvalues/typedvalues.proto"; // // Workflow Model @@ -232,23 +233,6 @@ message ObjectMetadata { int64 generation = 4; } -// TypedValue is used to serialize, deserialize, transfer data values across the workflow engine. -// -// It consists partly copy of protobuf's Any, to avoid protobuf requirement of a protobuf-based type. -message TypedValue { - - // Type is an arbitrary string representation of a type. Each type has an associated parser/formatter. - string type = 1; - - // Value holds the actual value in an arbitrary serialized form. A parser should be able to parse this format - // based on the type. - bytes value = 2; - - // Labels hold metadata about the value. It is used for example to store origins of data, past transformations, - // and information needed by serialization processes. - map labels = 3; -} - message Error { // string code = 1; string message = 2; @@ -277,5 +261,3 @@ message TypedValueMap { message TypedValueList { repeated TypedValue Value = 1; } - - diff --git a/pkg/types/validate/validate.go b/pkg/types/validate/validate.go index 2e7e501d..3e4a2a9d 100644 --- a/pkg/types/validate/validate.go +++ b/pkg/types/validate/validate.go @@ -11,7 +11,7 @@ import ( "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/graph" - "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow" "gonum.org/v1/gonum/graph/topo" ) @@ -297,15 +297,15 @@ func format(rawErr error, depth int) string { } } -func Flow(flow typedvalues.Flow) error { +func Flow(flow controlflow.Flow) error { if flow.IsEmpty() { return ErrObjectEmpty } - wf := flow.Workflow() + wf := flow.GetWorkflow() if wf != nil { return WorkflowSpec(wf) } - return TaskSpec(flow.Task()) + return TaskSpec(flow.GetTask()) } func NewError(subject string, errs ...error) error { diff --git a/pkg/util/mediatype/mediatype.go b/pkg/util/mediatype/mediatype.go new file mode 100644 index 00000000..fe20992d --- /dev/null +++ b/pkg/util/mediatype/mediatype.go @@ -0,0 +1,124 @@ +// Package mediatype implements the IANA Media Type standard. +// +// Although a partial implementation exists in the standard library under pkg/mime, it misses functionality, such as +// representation of suffices, and an explicit container format for media types. This package wraps the library, +// providing those functions. +// +// For Further information on media types see https://www.iana.org/assignments/media-types/media-types.xhtml +package mediatype + +import ( + "mime" + "net/http" + "strings" + + "github.com/pkg/errors" +) + +const ( + HeaderContentType = "Content-Type" +) + +// See: https://en.wikipedia.org/wiki/Media_type +type MediaType struct { + Parameters map[string]string + Type string + Subtype string + Suffix string +} + +func (m *MediaType) Copy() *MediaType { + if m == nil { + return &MediaType{} + } + copiedParams := map[string]string{} + for k, v := range m.Parameters { + copiedParams[k] = v + } + return &MediaType{ + Parameters: copiedParams, + Type: m.Type, + Subtype: m.Subtype, + Suffix: m.Suffix, + } +} + +func (m *MediaType) ensureParametersExist() { + if m.Parameters == nil { + m.Parameters = map[string]string{} + } +} + +func (m *MediaType) Identifier() string { + if m == nil { + return "" + } + return m.Type + "/" + m.Subtype +} + +func (m *MediaType) GetParam(key string) (string, bool) { + m.ensureParametersExist() + val, ok := m.Parameters[key] + return val, ok +} + +func (m *MediaType) SetParam(key string, val string) bool { + m.ensureParametersExist() + _, ok := m.Parameters[key] + m.Parameters[key] = val + return ok +} + +func (m *MediaType) TypeEquals(other *MediaType) bool { + if m == nil || other == nil { + return m == other + } + + return m.Type == other.Type && m.Subtype == other.Subtype +} + +func (m *MediaType) String() string { + if m == nil { + return "" + } + var builder strings.Builder + builder.WriteString(m.Type + "/" + m.Subtype) + if len(m.Suffix) > 0 { + builder.WriteString("+" + m.Suffix) + } + return mime.FormatMediaType(builder.String(), m.Parameters) +} + +func SetContentTypeHeader(m *MediaType, w http.ResponseWriter) { + w.Header().Set(HeaderContentType, m.String()) +} + +func Parse(s string) (*MediaType, error) { + identifier, params, err := mime.ParseMediaType(s) + if err != nil { + return nil, errors.WithStack(err) + } + var mediaType, subType, suffix string + if pos := strings.Index(identifier, "+"); pos >= 0 { + suffix = identifier[pos+1:] + identifier = identifier[:pos] + } + if pos := strings.Index(identifier, "/"); pos >= 0 { + mediaType = identifier[:pos] + subType = identifier[pos+1:] + } + return &MediaType{ + Parameters: params, + Subtype: subType, + Type: mediaType, + Suffix: suffix, + }, nil +} + +func MustParse(s string) *MediaType { + mt, err := Parse(s) + if err != nil { + panic(err) + } + return mt +} diff --git a/pkg/util/mediatype/mediatype_test.go b/pkg/util/mediatype/mediatype_test.go new file mode 100644 index 00000000..48d25d47 --- /dev/null +++ b/pkg/util/mediatype/mediatype_test.go @@ -0,0 +1,81 @@ +package mediatype + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +type testCase struct { + input string + valid bool + parsed *MediaType +} + +var testCases = []testCase{ + { // simple + input: "application/json", + valid: true, + parsed: &MediaType{ + Type: "application", + Subtype: "json", + Parameters: map[string]string{}, + }, + }, + { // single tag + input: "text/plain; proto=org.some.Message", + valid: true, + parsed: &MediaType{ + Type: "text", + Subtype: "plain", + Parameters: map[string]string{ + "proto": "org.some.Message", + }, + }, + }, + { // media type with suffix + input: "application/protobuf+json; charset=utf-8", + valid: true, + parsed: &MediaType{ + Type: "application", + Subtype: "protobuf", + Suffix: "json", + Parameters: map[string]string{ + "charset": "utf-8", + }, + }, + }, + { // multiple tags + input: "application/vnd.google.protobuf; proto=com.example.SomeMessage; zoo=bar", + valid: true, + parsed: &MediaType{ + Type: "application", + Subtype: "vnd.google.protobuf", + Parameters: map[string]string{ + "proto": "com.example.SomeMessage", + "zoo": "bar", + }, + }, + }, +} + +func TestParse(t *testing.T) { + for i, testCase := range testCases { + t.Run(fmt.Sprintf("test_%d", i), func(t *testing.T) { + // Test parsing + mt, err := Parse(testCase.input) + if !testCase.valid { + assert.Error(t, err) + assert.Nil(t, mt) + return + } + assert.NoError(t, err) + assert.EqualValues(t, testCase.parsed, mt) + + // Test formatting (assuming that the input is correctly formatted) + assert.Equal(t, testCase.input, testCase.parsed.String()) + assert.Equal(t, testCase.input, mt.String()) + }) + } +} diff --git a/pkg/util/util.go b/pkg/util/util.go index 9212dc59..a44cc0fe 100644 --- a/pkg/util/util.go +++ b/pkg/util/util.go @@ -2,13 +2,17 @@ package util import ( "encoding/json" + "errors" "fmt" "sync" "sync/atomic" + "testing" "time" + "github.com/golang/protobuf/proto" "github.com/satori/go.uuid" "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" ) // UID generates a unique id @@ -113,3 +117,39 @@ func LogIfError(err error) { logrus.Error(err) } } + +func AssertProtoEqual(t *testing.T, expected, actual proto.Message) { + assert.True(t, proto.Equal(expected, actual), "expected: %v, actual: %v", expected, actual) +} + +// Numeric is a representation +type Number struct { + val float64 // Fix loss of precision in uint64 and int64 +} + +func (n Number) Value() interface{} { + // TODO return original type + return n.val +} + +func ToNumber(val interface{}) (Number, error) { + switch t := val.(type) { + case float64: + return Number{val: t}, nil + case float32: + return Number{val: float64(t)}, nil + case int: + return Number{val: float64(t)}, nil + case int32: + return Number{val: float64(t)}, nil + case int16: + return Number{val: float64(t)}, nil + case int64: + return Number{val: float64(t)}, nil + case int8: + return Number{val: float64(t)}, nil + default: + return Number{}, errors.New("not a supported number (int, int8, int16, int32, int64, float32, " + + "and float64)") + } +} diff --git a/pkg/version/version.gen.go b/pkg/version/version.gen.go index 37427511..20ddf1c5 100644 --- a/pkg/version/version.gen.go +++ b/pkg/version/version.gen.go @@ -7,14 +7,14 @@ const ( dateFormat string = time.RFC1123Z // Git commit (e.g. 1b4716ab84903b2e477135a3dc5afdb07f685cb7) - GitCommit string = "75c8e3a498cb5f1928275ab901031f74baaf05b9" + GitCommit string = "cd0b48b8da2df8725ca8461791123e6733984024" // Version contains a (potentially) human-readable version // For example 1.1.0 or 1b4716ab84903b2e477135a3dc5afdb07f685cb7 - Version string = "75c8e3a498cb5f1928275ab901031f74baaf05b9" + Version string = "cd0b48b8da2df8725ca8461791123e6733984024" // gitDate is a date in RFC1123Z format - gitDate string = "Tue, 24 Jul 2018 21:35:17 +0200" + gitDate string = "Tue, 02 Oct 2018 17:28:16 +0200" ) var ( diff --git a/test/e2e/buildtest.sh b/test/e2e/buildtest.sh index 5ce1c16c..3345f5bf 100755 --- a/test/e2e/buildtest.sh +++ b/test/e2e/buildtest.sh @@ -13,7 +13,7 @@ DOCKER_REPO=gcr.io/fission-ci WORKFLOWS_ENV_IMAGE=${DOCKER_REPO}/workflow-env WORKFLOWS_BUILD_ENV_IMAGE=${DOCKER_REPO}/workflow-build-env WORKFLOWS_BUNDLE_IMAGE=${DOCKER_REPO}/fission-workflows-bundle -TAG=test +TAG=ci-test NS=fission NS_FUNCTION=fission-function NS_BUILDER=fission-builder @@ -59,6 +59,7 @@ cleanup_fission_workflows ${fissionWorkflowsHelmId} || true # # Build docker images emph "Building images..." +echo "Repo: ${DOCKER_REPO}, Tag: ${TAG}" bash ${ROOT}/build/docker.sh ${DOCKER_REPO} ${TAG} # Ensure cli is in path diff --git a/test/e2e/install-clients.sh b/test/e2e/install-clients.sh index ac708b37..b8c44a7c 100755 --- a/test/e2e/install-clients.sh +++ b/test/e2e/install-clients.sh @@ -5,19 +5,19 @@ set -euo pipefail . $(dirname $0)/utils.sh BIN_DIR=${BIN_DIR:-/tmp/fission-workflow-ci/bin} -HELM_VERSION=${HELM_VERSION:-2.8.2} -KUBECTL_VERSION=${KUBECTL_VERSION:-1.9.6} +HELM_VERSION=${HELM_VERSION:-2.11.0} FISSION_VERSION=${FISSION_VERSION:-0.10.0} # Install kubectl -if ! kubectl version -c 2>/dev/null | grep ${KUBECTL_VERSION} >/dev/null; then - emph "Installing kubectl ${KUBECTL_VERSION} to ${BIN_DIR}/kubectl..." - curl -sLO https://storage.googleapis.com/kubernetes-release/release/v${KUBECTL_VERSION}/bin/linux/amd64/kubectl - chmod +x ./kubectl - mv -f kubectl ${BIN_DIR}/kubectl -else - emph "Kubectl ${KUBECTL_VERSION} already present." +if ! kubectl version ; then + sudo apt-get install -y apt-transport-https + curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add - + sudo touch /etc/apt/sources.list.d/kubernetes.list + echo "deb http://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee -a /etc/apt/sources.list.d/kubernetes.list + sudo apt-get update + sudo apt-get install -y kubectl fi +emph "Using kubectl $(kubectl version --client --short) already present." mkdir -p ${HOME}/.kube which kubectl diff --git a/test/e2e/tests/test_inputs.sh b/test/e2e/tests/test_inputs.sh index e67214a1..c2bcad17 100755 --- a/test/e2e/tests/test_inputs.sh +++ b/test/e2e/tests/test_inputs.sh @@ -2,18 +2,18 @@ set -exuo pipefail +FN_NAME=inputs EXAMPLE_DIR=$(dirname $0)/../../../examples/misc cleanup() { - fission fn delete --name inputs + fission fn delete --name ${FN_NAME} } trap cleanup EXIT -fission fn create --name inputs --env workflow --src ${EXAMPLE_DIR}/inputs.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands -fission fn test --name inputs -b 'foobar' -H 'HEADER_KEY: HEADER_VAL' --method PUT \ +fission fn create --name ${FN_NAME} --env workflow --src ${EXAMPLE_DIR}/inputs.wf.yaml +fission fn test --name ${FN_NAME} -b 'foobar' -H 'HEADER_KEY: HEADER_VAL' -H 'Content-Type: text/plain' --method PUT \ | tee /dev/tty \ - | grep -i Header_Val \ + | grep -i Header_Key \ | grep HEADER_VAL \ | grep -i PUT \ | grep -q foobar \ No newline at end of file diff --git a/test/e2e/tests/test_whales.sh b/test/e2e/tests/test_whales.sh index 374d33c9..bdbf71c3 100755 --- a/test/e2e/tests/test_whales.sh +++ b/test/e2e/tests/test_whales.sh @@ -54,66 +54,55 @@ fission fn create --name whalesay --env binary --deploy ${WHALES_DIR}/whalesay.s fission fn create --name fortune --env binary --deploy ${WHALES_DIR}/fortune.sh # Ensure that functions are available -retry fission fn test --name fortune +retry fission fn test --name fortune > /dev/null # test 1: fortunewhale - simplest example echo "[Test 1]: fortunewhale" fission fn create --name fortunewhale --env workflow --src ${WHALES_DIR}/fortunewhale.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands fission fn test --name fortunewhale | tee /dev/tty | grep -q "## ## ## ## ##" # test 2: echowhale - parses body input echo "[Test 2]: echowhale" fission fn create --name echowhale --env workflow --src ${WHALES_DIR}/echowhale.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands -fission fn test --name echowhale -b "Test plz ignore" | tee /dev/tty | grep -q "Test plz ignore" +fission fn test --name echowhale -b "Test plz ignore" -H "Content-Type: text/plain" | tee /dev/tty | grep -q "Test plz ignore" # test 3: metadata - parses metadata (headers, query params...) echo "[Test 3]: metadatawhale" fission fn create --name metadatawhale --env workflow --src ${WHALES_DIR}/metadatawhale.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands fission fn test --name metadatawhale -H "Prefix: The test says:" | tee /dev/tty | grep -q "The test says:" # Test 4: nestedwhale - shows nesting workflows echo "[Test 4]: nestedwhale" fission fn create --name nestedwhale --env workflow --src ${WHALES_DIR}/nestedwhale.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands fission fn test --name nestedwhale | tee /dev/tty | grep -q "## ## ## ## ##" # Test 5: maybewhale - shows of dynamic tasks echo "[Test 5]: maybewhale" fission fn create --name maybewhale --env workflow --src ${WHALES_DIR}/maybewhale.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands fission fn test --name maybewhale | tee /dev/tty | grep -q "## ## ## ## ##" echo "[Test 6]: failwhale" fission fn create --name failwhale --env workflow --src ${WHALES_DIR}/failwhale.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands fission fn test --name failwhale | tee /dev/tty | grep -q "all has failed" echo "[Test 7]: foreachwhale" fission fn create --name foreachwhale --env workflow --src ${WHALES_DIR}/foreachwhale.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands fission fn test --name foreachwhale | tee /dev/tty | grep -q "[10,20,30,40,50]" echo "[Test 8]: switchwhale" fission fn create --name switchwhale --env workflow --src ${WHALES_DIR}/switchwhale.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands fission fn test --name switchwhale -b 'hello' | tee /dev/tty | grep -q "world" fission fn test --name switchwhale -b 'foo' | tee /dev/tty | grep -q "bar" fission fn test --name switchwhale -b 'acme' | tee /dev/tty | grep -q "right..." echo "[Test 9]: whilewhale" fission fn create --name whilewhale --env workflow --src ${WHALES_DIR}/whilewhale.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands fission fn test --name whilewhale | tee /dev/tty | grep -q "5" echo "[Test 10]: httpwhale" fission fn create --name httpwhale --env workflow --src ${WHALES_DIR}/httpwhale.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands fission fn test --name httpwhale | tee /dev/tty | grep -q "## ## ## ## ##" echo "[Test 11]: scopedwhale" fission fn create --name scopedwhale --env workflow --src ${WHALES_DIR}/scopedwhale.wf.yaml -sleep 5 # TODO remove this once we can initiate synchronous commands fission fn test --name scopedwhale | tee /dev/tty | grep -q "## ## ## ## ##" \ No newline at end of file diff --git a/test/e2e/travis-setup.sh b/test/e2e/travis-setup.sh index 21ba6d37..a6032de7 100755 --- a/test/e2e/travis-setup.sh +++ b/test/e2e/travis-setup.sh @@ -52,7 +52,7 @@ echo "Setting up helm..." helm init printf "Waiting for Helm" -until helm list >/dev/null 2>&1 +until helm list do printf "." sleep 3 diff --git a/test/integration/bundle/bundle_test.go b/test/integration/bundle/bundle_test.go index df5d2621..41696a02 100644 --- a/test/integration/bundle/bundle_test.go +++ b/test/integration/bundle/bundle_test.go @@ -3,6 +3,8 @@ package bundle import ( "context" + "encoding/json" + "fmt" "os" "strings" "testing" @@ -13,6 +15,7 @@ import ( "github.com/fission/fission-workflows/pkg/fnenv/native/builtin" "github.com/fission/fission-workflows/pkg/types" "github.com/fission/fission-workflows/pkg/types/typedvalues" + "github.com/fission/fission-workflows/pkg/util" "github.com/fission/fission-workflows/test/integration" "github.com/golang/protobuf/ptypes" "github.com/golang/protobuf/ptypes/empty" @@ -94,8 +97,8 @@ func TestWorkflowInvocation(t *testing.T) { Tasks: map[string]*types.TaskSpec{ "fakeFinalTask": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{$.Tasks.FirstTask.Output}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{$.Tasks.FirstTask.Output}"), }, Requires: map[string]*types.TaskDependencyParameters{ "FirstTask": {}, @@ -103,8 +106,8 @@ func TestWorkflowInvocation(t *testing.T) { }, "FirstTask": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{$.Invocation.Inputs.default.toUpperCase()}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{$.Invocation.Inputs.default.toUpperCase()}"), }, }, }, @@ -117,13 +120,13 @@ func TestWorkflowInvocation(t *testing.T) { // Create invocation expectedOutput := "Hello world!" - tv, err := typedvalues.Parse(expectedOutput) - etv, err := typedvalues.Parse(strings.ToUpper(expectedOutput)) + tv, err := typedvalues.Wrap(expectedOutput) + etv, err := typedvalues.Wrap(strings.ToUpper(expectedOutput)) assert.NoError(t, err) wiSpec := &types.WorkflowInvocationSpec{ WorkflowId: wfResp.Id, - Inputs: map[string]*types.TypedValue{ + Inputs: map[string]*typedvalues.TypedValue{ types.InputMain: tv, }, } @@ -143,7 +146,7 @@ func TestWorkflowInvocation(t *testing.T) { deadline := time.Now().Add(time.Duration(10) * time.Second) tick := time.NewTicker(time.Duration(100) * time.Millisecond) for ti := range tick.C { - invoc, err := wi.Get(ctx, &apiserver.WorkflowInvocationIdentifier{Id: wiId}) + invoc, err := wi.Get(ctx, &types.ObjectMetadata{Id: wiId}) assert.NoError(t, err) if invoc.Status.Finished() || ti.After(deadline) { invocation = invoc @@ -151,7 +154,7 @@ func TestWorkflowInvocation(t *testing.T) { break } } - assert.Equal(t, wiSpec, invocation.Spec) + util.AssertProtoEqual(t, wiSpec, invocation.Spec) assert.Equal(t, etv.Value, invocation.Status.Output.Value) assert.True(t, invocation.Status.Successful()) } @@ -167,8 +170,8 @@ func TestDynamicWorkflowInvocation(t *testing.T) { Tasks: map[string]*types.TaskSpec{ "fakeFinalTask": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{$.Tasks.someConditionalTask.Output}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{$.Tasks.someConditionalTask.Output}"), }, Requires: map[string]*types.TaskDependencyParameters{ "FirstTask": {}, @@ -177,24 +180,24 @@ func TestDynamicWorkflowInvocation(t *testing.T) { }, "FirstTask": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{$.Invocation.Inputs.default.toUpperCase()}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{$.Invocation.Inputs.default.toUpperCase()}"), }, }, "someConditionalTask": { FunctionRef: "if", - Inputs: map[string]*types.TypedValue{ - builtin.IfInputCondition: typedvalues.MustParse("{$.Invocation.Inputs.default == 'FOO'}"), - builtin.IfInputThen: typedvalues.ParseTask(&types.TaskSpec{ + Inputs: map[string]*typedvalues.TypedValue{ + builtin.IfInputCondition: typedvalues.MustWrap("{$.Invocation.Inputs.default == 'FOO'}"), + builtin.IfInputThen: typedvalues.MustWrap(&types.TaskSpec{ FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{'consequent: ' + $.Tasks.FirstTask.Output}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{'consequent: ' + $.Tasks.FirstTask.Output}"), }, }), - builtin.IfInputElse: typedvalues.ParseTask(&types.TaskSpec{ + builtin.IfInputElse: typedvalues.MustWrap(&types.TaskSpec{ FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{'alternative: ' + $.Tasks.FirstTask.Output}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{'alternative: ' + $.Tasks.FirstTask.Output}"), }, }), }, @@ -210,10 +213,11 @@ func TestDynamicWorkflowInvocation(t *testing.T) { assert.NotNil(t, wfResp) assert.NotEmpty(t, wfResp.Id) + // Test with main input wiSpec := &types.WorkflowInvocationSpec{ WorkflowId: wfResp.Id, - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("foo"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("foo"), }, } wfi, err := wi.InvokeSync(ctx, wiSpec) @@ -222,8 +226,23 @@ func TestDynamicWorkflowInvocation(t *testing.T) { assert.True(t, wfi.Status.Finished()) assert.True(t, wfi.Status.Successful()) assert.Equal(t, 4, len(wfi.Status.Tasks)) + output := typedvalues.MustUnwrap(wfi.Status.Output) + assert.Equal(t, "alternative: FOO", output) - output := typedvalues.MustFormat(wfi.Status.Output) + // Test with body input + wiSpec = &types.WorkflowInvocationSpec{ + WorkflowId: wfResp.Id, + Inputs: map[string]*typedvalues.TypedValue{ + types.InputBody: typedvalues.MustWrap("foo"), + }, + } + wfi, err = wi.InvokeSync(ctx, wiSpec) + assert.NoError(t, err) + assert.NotEmpty(t, wfi.Status.DynamicTasks) + assert.True(t, wfi.Status.Finished()) + assert.True(t, wfi.Status.Successful()) + assert.Equal(t, 4, len(wfi.Status.Tasks)) + output = typedvalues.MustUnwrap(wfi.Status.Output) assert.Equal(t, "alternative: FOO", output) } @@ -238,20 +257,20 @@ func TestInlineWorkflowInvocation(t *testing.T) { Tasks: map[string]*types.TaskSpec{ "nestedTask": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - builtin.NoopInput: typedvalues.ParseWorkflow(&types.WorkflowSpec{ + Inputs: map[string]*typedvalues.TypedValue{ + builtin.NoopInput: typedvalues.MustWrap(&types.WorkflowSpec{ OutputTask: "b", Tasks: map[string]*types.TaskSpec{ "a": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("inner1"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("inner1"), }, }, "b": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{output('a')}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{output('a')}"), }, Requires: map[string]*types.TaskDependencyParameters{ "a": nil, @@ -263,8 +282,8 @@ func TestInlineWorkflowInvocation(t *testing.T) { }, "finalTask": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("output('nestedTask')"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("output('nestedTask')"), }, Requires: map[string]*types.TaskDependencyParameters{ "nestedTask": {}, @@ -288,7 +307,7 @@ func TestInlineWorkflowInvocation(t *testing.T) { assert.True(t, wfi.Status.Successful()) assert.Equal(t, 3, len(wfi.Status.Tasks)) - _, err = typedvalues.Format(wfi.Status.Output) + _, err = typedvalues.Unwrap(wfi.Status.Output) assert.NoError(t, err) } @@ -305,7 +324,7 @@ func TestParallelInvocation(t *testing.T) { taskSpec := &types.TaskSpec{ FunctionRef: builtin.Sleep, - Inputs: typedvalues.Input("2s"), + Inputs: types.Input("25ms"), } wfSpec.AddTask("p1", taskSpec) @@ -315,7 +334,7 @@ func TestParallelInvocation(t *testing.T) { wfSpec.AddTask("p5", taskSpec) wfSpec.AddTask("await", &types.TaskSpec{ FunctionRef: builtin.Sleep, - Inputs: typedvalues.Input("1s"), + Inputs: types.Input("10ms"), Requires: types.Require("p1", "p2", "p3", "p4", "p5"), }) wfSpec.SetOutput("await") @@ -362,36 +381,36 @@ func TestLongRunningWorkflowInvocation(t *testing.T) { Tasks: types.Tasks{ "longSleep": { FunctionRef: builtin.Sleep, - Inputs: typedvalues.Input("5s"), + Inputs: types.Input("5s"), }, "afterSleep": { FunctionRef: builtin.Noop, - Inputs: typedvalues.Input("{ '4' }"), + Inputs: types.Input("{ '4' }"), Requires: types.Require("longSleep"), }, "parallel1": { FunctionRef: builtin.Noop, - Inputs: typedvalues.Input("{ '1' }"), + Inputs: types.Input("{ '1' }"), Requires: types.Require("longSleep"), }, "parallel2": { FunctionRef: builtin.Noop, - Inputs: typedvalues.Input("{ output('parallel1') + '2' }"), + Inputs: types.Input("{ output('parallel1') + '2' }"), Requires: types.Require("parallel1"), }, "parallel3": { FunctionRef: builtin.Noop, - Inputs: typedvalues.Input("{ output('parallel2') + '3' }"), + Inputs: types.Input("{ output('parallel2') + '3' }"), Requires: types.Require("parallel2"), }, "merge": { FunctionRef: builtin.Noop, - Inputs: typedvalues.Input("{ output('parallel3') + output('afterSleep') }"), + Inputs: types.Input("{ output('parallel3') + output('afterSleep') }"), Requires: types.Require("parallel3", "afterSleep"), }, "final": { FunctionRef: builtin.Noop, - Inputs: typedvalues.Input("{ output('merge') }"), + Inputs: types.Input("{ output('merge') }"), Requires: types.Require("merge"), }, }, @@ -410,7 +429,7 @@ func TestLongRunningWorkflowInvocation(t *testing.T) { assert.True(t, wfi.Status.Successful()) assert.Equal(t, len(wfSpec.Tasks), len(wfi.Status.Tasks)) - output := typedvalues.MustFormat(wfi.Status.Output) + output := typedvalues.MustUnwrap(wfi.Status.Output) assert.Equal(t, "1234", output) } @@ -424,11 +443,11 @@ func TestWorkflowCancellation(t *testing.T) { Tasks: types.Tasks{ "longSleep": { FunctionRef: builtin.Sleep, - Inputs: typedvalues.Input("250ms"), + Inputs: types.Input("250ms"), }, "longSleep2": { FunctionRef: builtin.Sleep, - Inputs: typedvalues.Input("5s"), + Inputs: types.Input("5s"), Requires: types.Require("longSleep"), }, }, @@ -458,7 +477,7 @@ func TestWorkflowCancellation(t *testing.T) { }) assert.NoError(t, err) wfiID := wfis.Invocations[0] - wfi, err := wi.Get(ctx, &apiserver.WorkflowInvocationIdentifier{Id: wfiID}) + wfi, err := wi.Get(ctx, &types.ObjectMetadata{Id: wfiID}) assert.NoError(t, err) assert.False(t, wfi.GetStatus().Successful()) assert.True(t, wfi.GetStatus().Finished()) @@ -495,7 +514,7 @@ func TestInvocationFailed(t *testing.T) { Tasks: types.Tasks{ "task1": { FunctionRef: builtin.Fail, - Inputs: typedvalues.Input(msg), + Inputs: types.Input(msg), }, }, } @@ -522,7 +541,7 @@ func TestInvocationWithForcedOutputs(t *testing.T) { cl, wi := setup() // Test workflow creation - output := typedvalues.MustParse("overrided output") + output := typedvalues.MustWrap("overrided output") wfSpec := &types.WorkflowSpec{ ApiVersion: types.WorkflowAPIVersion, OutputTask: "t3", @@ -534,25 +553,25 @@ func TestInvocationWithForcedOutputs(t *testing.T) { }, "t2": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("{$.Tasks.t1.Output}"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("{$.Tasks.t1.Output}"), }, Requires: map[string]*types.TaskDependencyParameters{ "t1": {}, }, // Self-referencing output - Output: typedvalues.MustParse("{$.Tasks.t2.Output}"), + Output: typedvalues.MustWrap("{$.Tasks.t2.Output}"), }, "t3": { FunctionRef: "noop", - Inputs: map[string]*types.TypedValue{ - types.InputMain: typedvalues.MustParse("initial output 2"), + Inputs: map[string]*typedvalues.TypedValue{ + types.InputMain: typedvalues.MustWrap("initial output 2"), }, Requires: map[string]*types.TaskDependencyParameters{ "t2": {}, }, // Referencing output of another task - Output: typedvalues.MustParse("{$.Tasks.t2.Output}"), + Output: typedvalues.MustWrap("{$.Tasks.t2.Output}"), }, }, } @@ -562,9 +581,72 @@ func TestInvocationWithForcedOutputs(t *testing.T) { WorkflowId: wfID.GetId(), }) assert.NoError(t, err) - assert.Equal(t, string(output.GetValue()), string(wfi.GetStatus().GetTasks()["t1"].GetStatus().GetOutput().GetValue())) - assert.Equal(t, string(output.GetValue()), string(wfi.GetStatus().GetTasks()["t2"].GetStatus().GetOutput().GetValue())) - assert.Equal(t, string(output.GetValue()), string(wfi.GetStatus().GetOutput().GetValue())) + util.AssertProtoEqual(t, output.GetValue(), wfi.GetStatus().GetTasks()["t1"].GetStatus().GetOutput().GetValue()) + util.AssertProtoEqual(t, output.GetValue(), wfi.GetStatus().GetTasks()["t2"].GetStatus().GetOutput().GetValue()) + util.AssertProtoEqual(t, output.GetValue(), wfi.GetStatus().GetOutput().GetValue()) +} + +func TestDeepRecursion(t *testing.T) { + ctx := context.Background() + conn, err := grpc.Dial(gRPCAddress, grpc.WithInsecure()) + if err != nil { + panic(err) + } + cl := apiserver.NewWorkflowAPIClient(conn) + wi := apiserver.NewWorkflowInvocationAPIClient(conn) + + // Test workflow creation + wfSpec := &types.WorkflowSpec{ + ApiVersion: types.WorkflowAPIVersion, + OutputTask: "mainTask", + Tasks: map[string]*types.TaskSpec{ + "mainTask": { // layer 1 + FunctionRef: builtin.Noop, + Inputs: typedvalues.MustWrapMapTypedValue(map[string]interface{}{ + types.InputMain: &types.TaskSpec{ // layer 2 + FunctionRef: builtin.Noop, + Inputs: typedvalues.MustWrapMapTypedValue(map[string]interface{}{ + types.InputMain: &types.TaskSpec{ // layer 3 + FunctionRef: builtin.Noop, + Inputs: typedvalues.MustWrapMapTypedValue(map[string]interface{}{ + types.InputMain: &types.TaskSpec{ // layer 4 + FunctionRef: builtin.Noop, + Inputs: typedvalues.MustWrapMapTypedValue(map[string]interface{}{ + types.InputMain: "foo", + }), + }, + }), + }, + }), + }, + }), + }, + }, + } + + wfResp, err := cl.Create(ctx, wfSpec) + defer cl.Delete(ctx, wfResp) + + assert.NoError(t, err) + assert.NotNil(t, wfResp) + assert.NotEmpty(t, wfResp.Id) + + wiSpec := &types.WorkflowInvocationSpec{ + WorkflowId: wfResp.Id, + Inputs: typedvalues.MustWrapMapTypedValue(map[string]interface{}{ + types.InputMain: "foo", + }), + } + wfi, err := wi.InvokeSync(ctx, wiSpec) + assert.NoError(t, err) + assert.NotEmpty(t, wfi.Status.DynamicTasks) + assert.True(t, wfi.Status.Finished()) + assert.True(t, wfi.Status.Successful()) + + output := typedvalues.MustUnwrap(wfi.Status.Output) + d, _ := json.Marshal(output) + fmt.Println(string(d)) + assert.Equal(t, typedvalues.MustUnwrap(wiSpec.Inputs[types.InputMain]), output) } func TestDeeplyNestedInvocation(t *testing.T) { @@ -584,12 +666,12 @@ func TestDeeplyNestedInvocation(t *testing.T) { "CountUntil": { FunctionRef: builtin.While, Inputs: types.Inputs{ - builtin.WhileInputExpr: typedvalues.MustParse("{ !task().Inputs._prev || task().Inputs._prev < 5 }"), - builtin.WhileInputLimit: typedvalues.MustParse(10), - builtin.WhileInputAction: typedvalues.MustParse(&types.TaskSpec{ + builtin.WhileInputExpr: typedvalues.MustWrap("{ !task().Inputs._prev || task().Inputs._prev < 5 }"), + builtin.WhileInputLimit: typedvalues.MustWrap(10), + builtin.WhileInputAction: typedvalues.MustWrap(&types.TaskSpec{ FunctionRef: builtin.Noop, Inputs: types.Inputs{ - builtin.NoopInput: typedvalues.MustParse("{ (task().Inputs._prev || 0) + 1 }"), + builtin.NoopInput: typedvalues.MustWrap("{ (task().Inputs._prev || 0) + 1 }"), }, }), }, @@ -612,7 +694,7 @@ func TestDeeplyNestedInvocation(t *testing.T) { assert.True(t, wfi.Status.Finished()) assert.True(t, wfi.Status.Successful()) - output := typedvalues.MustFormat(wfi.Status.Output) + output := typedvalues.MustUnwrap(wfi.Status.Output) assert.Equal(t, float64(5), output) } diff --git a/test/integration/fission/runtime_test.go b/test/integration/fission/runtime_test.go index 49493853..a7ebb9ef 100644 --- a/test/integration/fission/runtime_test.go +++ b/test/integration/fission/runtime_test.go @@ -106,14 +106,14 @@ func TestFnenvInvoke(t *testing.T) { TaskId: "fooTask", InvocationId: "fooInvocation", Inputs: types.Inputs{ - "default": typedvalues.MustParse(body), - "headers": typedvalues.MustParse(map[string]interface{}{ + "default": typedvalues.MustWrap(body), + "headers": typedvalues.MustWrap(map[string]interface{}{ headerKey: headerVal, }), }, FnRef: &fnref, }) - output := typedvalues.MustFormat(result.Output) + output := typedvalues.MustUnwrap(result.Output) assert.NoError(t, err) assert.True(t, result.Finished()) assert.NotEmpty(t, output)