Skip to content

Commit

Permalink
chore: migrate verb schema parsing
Browse files Browse the repository at this point in the history
  • Loading branch information
worstell committed Jun 8, 2024
1 parent 6f08f0f commit 3333831
Show file tree
Hide file tree
Showing 7 changed files with 361 additions and 203 deletions.
178 changes: 1 addition & 177 deletions go-runtime/compile/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,7 @@ import (
)

var (
fset = token.NewFileSet()
contextIfaceType = once(func() *types.Interface {
return mustLoadRef("context", "Context").Type().Underlying().(*types.Interface) //nolint:forcetypeassert
})
errorIFaceType = once(func() *types.Interface {
return mustLoadRef("builtin", "error").Type().Underlying().(*types.Interface) //nolint:forcetypeassert
})
fset = token.NewFileSet()

ftlCallFuncPath = "github.com/TBD54566975/ftl/go-runtime/ftl.Call"
ftlFSMFuncPath = "github.com/TBD54566975/ftl/go-runtime/ftl.FSM"
Expand Down Expand Up @@ -132,9 +126,6 @@ func legacyExtractModuleSchema(dir string, sch *schema.Schema, out *analyzers.Ex
case *ast.File:
visitFile(pctx, node)

case *ast.FuncDecl:
visitFuncDecl(pctx, node)

case *ast.GenDecl:
visitGenDecl(pctx, node)

Expand Down Expand Up @@ -728,58 +719,6 @@ func visitFile(pctx *parseContext, node *ast.File) {
pctx.module.Comments = parseComments(node.Doc)
}

func isType[T types.Type](t types.Type) bool {
if _, ok := t.(*types.Named); ok {
t = t.Underlying()
}
_, ok := t.(T)
return ok
}

func checkSignature(pctx *parseContext, node *ast.FuncDecl, sig *types.Signature) (req, resp optional.Option[*types.Var]) {
params := sig.Params()
results := sig.Results()

if params.Len() > 2 {
pctx.errors.add(errorf(node, "must have at most two parameters (context.Context, struct)"))
}
if params.Len() == 0 {
pctx.errors.add(errorf(node, "first parameter must be context.Context"))
} else if !types.AssertableTo(contextIfaceType(), params.At(0).Type()) {
pctx.errors.add(tokenErrorf(params.At(0).Pos(), params.At(0).Name(), "first parameter must be of type context.Context but is %s", params.At(0).Type()))
}

if params.Len() == 2 {
if !isType[*types.Struct](params.At(1).Type()) {
pctx.errors.add(tokenErrorf(params.At(1).Pos(), params.At(1).Name(), "second parameter must be a struct but is %s", params.At(1).Type()))
}
if params.At(1).Type().String() == ftlUnitTypePath {
pctx.errors.add(tokenErrorf(params.At(1).Pos(), params.At(1).Name(), "second parameter must not be ftl.Unit"))
}

req = optional.Some(params.At(1))
}

if results.Len() > 2 {
pctx.errors.add(errorf(node, "must have at most two results (struct, error)"))
}
if results.Len() == 0 {
pctx.errors.add(errorf(node, "must at least return an error"))
} else if !types.AssertableTo(errorIFaceType(), results.At(results.Len()-1).Type()) {
pctx.errors.add(tokenErrorf(results.At(results.Len()-1).Pos(), results.At(results.Len()-1).Name(), "must return an error but is %s", results.At(0).Type()))
}
if results.Len() == 2 {
if !isType[*types.Struct](results.At(0).Type()) {
pctx.errors.add(tokenErrorf(results.At(0).Pos(), results.At(0).Name(), "first result must be a struct but is %s", results.At(0).Type()))
}
if results.At(1).Type().String() == ftlUnitTypePath {
pctx.errors.add(tokenErrorf(results.At(1).Pos(), results.At(1).Name(), "second result must not be ftl.Unit"))
}
resp = optional.Some(results.At(0))
}
return req, resp
}

func goPosToSchemaPos(pos token.Pos) schema.Position {
p := fset.Position(pos)
return schema.Position{Filename: p.Filename, Line: p.Line, Column: p.Column, Offset: p.Offset}
Expand Down Expand Up @@ -1102,121 +1041,6 @@ func maybeErrorOnInvalidEnumMixing(pctx *parseContext, node *ast.ValueSpec, enum
}
}

func visitFuncDecl(pctx *parseContext, node *ast.FuncDecl) (verb *schema.Verb) {
if node.Doc == nil {
return nil
}
directives, err := parseDirectives(node, fset, node.Doc)
if err != nil {
pctx.errors.add(err)
}
var metadata []schema.Metadata
isVerb := false
isExported := false
for _, dir := range directives {
switch dir := dir.(type) {
case *directiveVerb:
isVerb = true
isExported = dir.Export
if pctx.module.Name == "" {
pctx.module.Name = pctx.pkg.Name
} else if pctx.module.Name != pctx.pkg.Name {
pctx.errors.add(errorf(node, "function verb directive must be in the module package"))
}
case *directiveIngress:
isVerb = true
isExported = true
typ := dir.Type
if typ == "" {
typ = "http"
}
metadata = append(metadata, &schema.MetadataIngress{
Pos: dir.Pos,
Type: typ,
Method: dir.Method,
Path: dir.Path,
})
case *directiveCronJob:
isVerb = true
isExported = false
metadata = append(metadata, &schema.MetadataCronJob{
Pos: dir.Pos,
Cron: dir.Cron,
})
case *directiveRetry:
metadata = append(metadata, &schema.MetadataRetry{
Pos: dir.Pos,
Count: dir.Count,
MinBackoff: dir.MinBackoff,
MaxBackoff: dir.MaxBackoff,
})
case *directiveSubscriber:
isVerb = true
metadata = append(metadata, &schema.MetadataSubscriber{
Pos: dir.Pos,
Name: dir.Name,
})
case *directiveData, *directiveEnum, *directiveTypeAlias, *directiveExport:
pctx.errors.add(errorf(node, "unexpected directive %T", dir))
}
}
if !isVerb {
return nil
}

for _, name := range pctx.nativeNames {
if name == node.Name.Name {
pctx.errors.add(noEndColumnErrorf(node.Pos(), "duplicate verb name %q", node.Name.Name))
return nil
}
}

fnt := pctx.pkg.TypesInfo.Defs[node.Name].(*types.Func) //nolint:forcetypeassert
sig := fnt.Type().(*types.Signature) //nolint:forcetypeassert
if sig.Recv() != nil {
pctx.errors.add(errorf(node, "ftl:verb cannot be a method"))
return nil
}
params := sig.Params()
results := sig.Results()
reqt, respt := checkSignature(pctx, node, sig)

var req optional.Option[schema.Type]
if reqt.Ok() {
req = visitType(pctx, node.Pos(), params.At(1).Type(), isExported)
} else {
req = optional.Some[schema.Type](&schema.Unit{})
}
var resp optional.Option[schema.Type]
if respt.Ok() {
resp = visitType(pctx, node.Pos(), results.At(0).Type(), isExported)
} else {
resp = optional.Some[schema.Type](&schema.Unit{})
}
reqV, reqOk := req.Get()
resV, respOk := resp.Get()
if !reqOk {
pctx.errors.add(tokenErrorf(params.At(1).Pos(), params.At(1).Name(),
"unsupported request type %q", params.At(1).Type()))
}
if !respOk {
pctx.errors.add(tokenErrorf(results.At(0).Pos(), results.At(0).Name(),
"unsupported response type %q", results.At(0).Type()))
}
verb = &schema.Verb{
Pos: goPosToSchemaPos(node.Pos()),
Comments: parseComments(node.Doc),
Export: isExported,
Name: strcase.ToLowerCamel(node.Name.Name),
Request: reqV,
Response: resV,
Metadata: metadata,
}
pctx.nativeNames[verb] = node.Name.Name
pctx.module.Decls = append(pctx.module.Decls, verb)
return verb
}

func parseComments(doc *ast.CommentGroup) []string {
comments := []string{}
if doc := doc.Text(); doc != "" {
Expand Down
76 changes: 69 additions & 7 deletions go-runtime/schema/analyzers/common.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,15 @@ import (
"github.com/TBD54566975/ftl/backend/schema"
"github.com/TBD54566975/ftl/backend/schema/strcase"
"github.com/TBD54566975/golang-tools/go/analysis"
"github.com/TBD54566975/golang-tools/go/analysis/passes/inspect"
"github.com/TBD54566975/golang-tools/go/ast/astutil"
"github.com/alecthomas/types/optional"
"golang.org/x/exp/maps"
)

var (
aliasFieldTag = "json"
ftlUnitTypePath = "github.com/TBD54566975/ftl/go-runtime/ftl.Unit"
)

type DiagnosticCategory string
Expand All @@ -40,16 +47,29 @@ func (e DiagnosticCategory) ToErrorLevel() schema.ErrorLevel {

type NativeNames map[schema.Node]string

var (
aliasFieldTag = "json"
)

// TODO: maybe don't need NativeNames from extractors once we process refs/native names as an initial analyzer?
type result struct {
type extractorResult struct {
decls []schema.Decl
nativeNames NativeNames
}

func newExtractorResult(pass *analysis.Pass, decls []schema.Decl, nativeNames NativeNames) extractorResult {
transitiveDecls, nn := getTransitiveDecls(pass)
maps.Copy(nn, nativeNames)
return extractorResult{decls: append(decls, transitiveDecls...), nativeNames: nn}
}

func newExtractor(name string, run func(*analysis.Pass) (interface{}, error), factType analysis.Fact) *analysis.Analyzer {
return &analysis.Analyzer{
Name: name,
Doc: fmt.Sprintf("extracts %s schema elements to the module", name),
Run: run,
Requires: []*analysis.Analyzer{inspect.Analyzer, Initializer},
ResultType: reflect.TypeFor[extractorResult](),
RunDespiteErrors: true,
FactTypes: []analysis.Fact{factType},
}
}

func extractComments(doc *ast.CommentGroup) []string {
comments := []string{}
if doc := doc.Text(); doc != "" {
Expand Down Expand Up @@ -306,10 +326,35 @@ func extractData(pass *analysis.Pass, pos token.Pos, tnode types.Type, isExporte
return optional.None[schema.Type]()
}

// ectx.module.AddData(out) <-- TODO: add back when data extractor is implemented
pass.ExportObjectFact(named.Obj(), newFact(pass, out))
return optional.Some[schema.Type](dataRef)
}

type schemaDeclFact interface {
analysis.Fact
setValue(d schema.Decl)
getValue() schema.Decl
}

func newFact(pass *analysis.Pass, decl schema.Decl) schemaDeclFact {
factType := reflect.TypeOf(pass.Analyzer.FactTypes[0]).Elem()
factValue := reflect.New(factType).Interface().(schemaDeclFact) //nolint:forcetypeassert
factValue.setValue(decl)
return factValue
}

func getTransitiveDecls(pass *analysis.Pass) ([]schema.Decl, NativeNames) {
decls := []schema.Decl{}
nn := NativeNames{}
for _, fact := range pass.AllObjectFacts() {
if f, ok := fact.Fact.(schemaDeclFact); ok {
decls = append(decls, f.getValue())
nn[f.getValue()] = fact.Object.Name()
}
}
return decls, nn
}

func extractMap(pass *analysis.Pass, pos token.Pos, tnode *types.Map, isExported bool) optional.Option[schema.Type] {
key, ok := extractType(pass, pos, tnode.Key(), isExported).Get()
if !ok {
Expand Down Expand Up @@ -392,6 +437,23 @@ func ftlModuleFromGoModule(pkgPath string) (string, error) {
return strings.TrimSuffix(parts[1], "_test"), nil
}

func isType[T types.Type](t types.Type) bool {
if _, ok := t.(*types.Named); ok {
t = t.Underlying()
}
_, ok := t.(T)
return ok
}

func getDirective[T directive](directives []directive) optional.Option[T] {
for _, d := range directives {
if typed, ok := d.(T); ok {
return optional.Some[T](typed)
}
}
return optional.None[T]()
}

func errorf(node ast.Node, format string, args ...interface{}) analysis.Diagnostic {
return errorfAtPos(node.Pos(), node.End(), format, args...)
}
Expand Down
13 changes: 8 additions & 5 deletions go-runtime/schema/analyzers/finalize.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,25 +6,28 @@ import (

"github.com/TBD54566975/ftl/backend/schema"
"github.com/TBD54566975/golang-tools/go/analysis"
"github.com/TBD54566975/golang-tools/go/analysis/passes/inspect"
"golang.org/x/exp/maps"
)

// Extractors is a list of all schema extractors that must run.
// Extractors is a list of all schema extractors that must run. They are not ordered and may run concurrently, unless a
// dependency is explicitly specified.
var Extractors = []*analysis.Analyzer{
TypeAliasExtractor,
VerbExtractor,
}

// Finalizer aggregates the results of all extractors.
var Finalizer = &analysis.Analyzer{
Name: "finalizer",
Doc: "finalizes module schema and writes to the output destination",
Run: runFinalizer,
Requires: Extractors,
Requires: append(Extractors, inspect.Analyzer),
ResultType: reflect.TypeFor[ExtractResult](),
RunDespiteErrors: true,
}

// ExtractResult contains the final schema extraction result.
// ExtractResult contains the final schema extraction extractorResult.
type ExtractResult struct {
// Module is the extracted module schema.
Module *schema.Module
Expand Down Expand Up @@ -57,9 +60,9 @@ func buildModuleSchema(pass *analysis.Pass) (*schema.Module, NativeNames, error)
module := &schema.Module{Name: moduleName}
nn := NativeNames{}
for _, e := range Extractors {
r, ok := pass.ResultOf[e].(result)
r, ok := pass.ResultOf[e].(extractorResult)
if !ok {
return nil, nil, fmt.Errorf("failed to extract result of %s", e.Name)
return nil, nil, fmt.Errorf("failed to extract extractorResult of %s", e.Name)
}
module.AddDecls(r.decls)
maps.Copy(nn, r.nativeNames)
Expand Down
Loading

0 comments on commit 3333831

Please sign in to comment.