diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml
index f160db9..40c0258 100644
--- a/.github/workflows/go.yml
+++ b/.github/workflows/go.yml
@@ -25,3 +25,20 @@ jobs:
- name: Test
run: go test -v ./...
+
+ golangci-lint:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ submodules: true
+
+ - name: Install Go
+ uses: actions/setup-go@v3
+ with:
+ go-version: ${{ env.GO_VERSION }}
+
+ - name: golangci-lint
+ uses: golangci/golangci-lint-action@v3.1.0
+ with:
+ version: latest
diff --git a/README.md b/README.md
index 125c21e..2960805 100644
--- a/README.md
+++ b/README.md
@@ -2,6 +2,7 @@
[![godoc reference](https://godoc.org/github.com/observeinc/mustache?status.svg)](https://godoc.org/github.com/observeinc/mustache)
[![Tests Actions Status](https://github.com/observeinc/mustache/workflows/Go/badge.svg)](https://github.com/observeinc/mustache/actions)
+[![Go Report Card](https://goreportcard.com/badge/github.com/observeinc/mustache)](https://goreportcard.com/report/github.com/observeinc/mustache)
This is an implementation of the mustache templating language in Go.
diff --git a/doc.go b/doc.go
index a251ff8..a216029 100644
--- a/doc.go
+++ b/doc.go
@@ -18,7 +18,7 @@ http://mustache.github.io.
There are several wrappers of Parse and Render to help with different input or
output types. It is quite common to need to write the output of the template to
-an http.ResponseWriter. In this case the Render function is the most apropriate.
+an http.ResponseWriter. In this case the Render function is the most appropriate.
import "net/http"
import "github.com/observeinc/mustache"
diff --git a/example_test.go b/example_test.go
index 80379ec..73eb747 100644
--- a/example_test.go
+++ b/example_test.go
@@ -11,31 +11,46 @@ import (
func ExampleTemplate_basic() {
template := New()
- template.ParseString(`{{#foo}}{{bar}}{{/foo}}`)
+ parseErr := template.ParseString(`{{#foo}}{{bar}}{{/foo}}`)
+ if parseErr != nil {
+ fmt.Fprintf(os.Stderr, "failed to parse template: %s\n", parseErr)
+ }
context := map[string]interface{}{
"foo": true,
"bar": "bazinga!",
}
- output, _ := template.RenderString(context)
+ output, err := template.RenderString(context)
fmt.Println(output)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "failed to render template: %s\n", err)
+ }
// Output: bazinga!
}
func ExampleTemplate_partials() {
partial := New(Name("partial"))
- partial.ParseString(`{{bar}}`)
+ parseErr := partial.ParseString(`{{bar}}`)
+ if parseErr != nil {
+ fmt.Fprintf(os.Stderr, "failed to parse template: %s\n", parseErr)
+ }
template := New(Partial(partial))
- template.ParseString(`{{#foo}}{{>partial}}{{/foo}}`)
+ templateErr := template.ParseString(`{{#foo}}{{>partial}}{{/foo}}`)
+ if templateErr != nil {
+ fmt.Fprintf(os.Stderr, "failed to parse template: %s\n", templateErr)
+ }
context := map[string]interface{}{
"foo": true,
"bar": "bazinga!",
}
- template.Render(os.Stdout, context)
+ err := template.Render(os.Stdout, context)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "failed to render template: %s\n", err)
+ }
// Output: bazinga!
}
@@ -48,7 +63,11 @@ func ExampleTemplate_reader() {
if err != nil {
fmt.Fprintf(os.Stderr, "failed to parse template: %s\n", err)
}
- t.Render(os.Stdout, nil)
+ err = t.Render(os.Stdout, nil)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "failed to render template: %s\n", err)
+ }
+
}
func ExampleTemplate_http() {
@@ -56,12 +75,19 @@ func ExampleTemplate_http() {
request, _ := http.NewRequest("GET", "http://example.com?foo=bar&bar=one&bar=two", nil)
template := New()
- template.ParseString(`
+ err := template.ParseString(`
`)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "failed to parse template: %s\n", err)
+ }
+
handler := func(w http.ResponseWriter, r *http.Request) {
- template.Render(w, r.URL.Query())
+ err := template.Render(w, r.URL.Query())
+ if err != nil {
+ fmt.Fprint(w, err.Error())
+ }
}
handler(writer, request)
@@ -73,12 +99,20 @@ func ExampleTemplate_http() {
}
func ExampleOption() {
- title := New(Name("header")) // instantiate and name the template
- title.ParseString("{{title}}") // parse a template string
+ title := New(Name("header")) // instantiate and name the template
+ titleErr := title.ParseString("{{title}}") // parse a template string
+ // If there was an error do something with it.
+ if titleErr != nil {
+ fmt.Fprintf(os.Stderr, "failed to parse template: %s\n", titleErr)
+ }
body := New()
body.Option(Name("body")) // options can be defined after we instantiate too
- body.ParseString("{{content}}")
+ parseErr := body.ParseString("{{content}}")
+ // If there was an error do something with it.
+ if parseErr != nil {
+ fmt.Fprintf(os.Stderr, "failed to parse template: %s\n", parseErr)
+ }
template := New(
Delimiters("|", "|"), // set the mustache delimiters to | instead of {{
@@ -86,14 +120,22 @@ func ExampleOption() {
Partial(title), // register a partial
Partial(body)) // and another one...
- template.ParseString("|>header|\n|>body|")
+ templateErr := template.ParseString("|>header|\n|>body|")
+ if templateErr != nil {
+ fmt.Fprintf(os.Stderr, "failed to parse template: %s\n", templateErr)
+ }
context := map[string]interface{}{
"title": "Mustache",
"content": "Logic less templates with Mustache!",
}
- template.Render(os.Stdout, context)
+ renderErr := template.Render(os.Stdout, context)
+ // If there was an error do something with it.
+ if renderErr != nil {
+ fmt.Fprintf(os.Stderr, "failed to render template: %s\n", renderErr)
+ }
+
// Output: Mustache
// Logic less templates with Mustache!
}
diff --git a/lex.go b/lex.go
index ffbee6a..2868639 100644
--- a/lex.go
+++ b/lex.go
@@ -85,7 +85,6 @@ type stateFn func(*lexer) stateFn
// lexer holds the state of the scanner.
type lexer struct {
- name string // the name of the input; used only for error reports.
input string // the string being scanned.
leftDelim string // start of action.
rightDelim string // end of action.
@@ -244,7 +243,7 @@ func stateLeftDelim(l *lexer) stateFn {
if l.peek() == '=' {
// When the lexer encounters "{{=" it proceeds to the set delimiter
// state which alters the left and right delimiters. This operation is
- // hidden from the parser and no tokens are emited.
+ // hidden from the parser and no tokens are emitted.
l.next()
return stateSetDelim
}
@@ -378,7 +377,7 @@ func stateIdentWithMode(exitState stateFn) stateFn {
l.next()
default:
// We've found presumably the closing bracket.
- // backup by the ammount of the counted whitespace so as to not include it
+ // backup by the amount of the counted whitespace so as to not include it
// in the ident token.
//
// This whitespace will we add back will be ignored as part of the stateTag
diff --git a/lookup.go b/lookup.go
index 284aeef..dfb049a 100644
--- a/lookup.go
+++ b/lookup.go
@@ -36,53 +36,26 @@ func lookup(name string, context ...interface{}) (interface{}, bool) {
// If the current context is a map, we'll look for a key in that map
// that matches the name.
case reflect.Map:
- item := reflectValue.MapIndex(reflect.ValueOf(name))
- if item.IsValid() {
- return item.Interface(), truth(item)
+ val, ok, found := lookup_map(name, reflectValue)
+ if found {
+ return val, ok
}
+
// If the current context is a struct, we'll look for a property in that
- // struct that matches the name. In the near future I'd like to add
- // support for matching struct names to tags so we can use lower_case
- // names in our templates which makes it more mustache like.
+ // struct that matches the name.
case reflect.Struct:
- field := reflectValue.FieldByName(name)
- if field.IsValid() && field.CanInterface() {
- return field.Interface(), truth(field)
- }
- method := reflectValue.MethodByName(name)
- if method.IsValid() && method.Type().NumIn() == 1 {
- out := method.Call(nil)[0]
- return out.Interface(), truth(out)
- }
-
- typ := reflectValue.Type()
- for i := 0; i < typ.NumField(); i++ {
- f := typ.Field(i)
- if f.PkgPath != "" {
- continue
- }
- tag := f.Tag.Get("mustache")
- if tag == name {
- field := reflectValue.Field(i)
- if field.IsValid() {
- return field.Interface(), truth(field)
- }
- }
+ val, ok, found := lookup_struct(name, reflectValue)
+ if found {
+ return val, ok
}
+ // If the current context is an array or slice, we'll try to find the current
+ // name as an index in the context.
case reflect.Array, reflect.Slice:
- idx, err := strconv.Atoi(name)
- if err != nil {
- continue
- }
- if reflectValue.Len() <= idx || idx < 0 {
- continue
- }
- field := reflectValue.Index(idx)
- if field.IsValid() {
- return field.Interface(), truth(field)
+ val, ok, found := lookup_array(name, reflectValue)
+ if found {
+ return val, ok
}
-
}
// If by this point no value was matched, we'll move up a step in the
// chain and try to match a value there.
@@ -92,6 +65,59 @@ func lookup(name string, context ...interface{}) (interface{}, bool) {
return nil, false
}
+func lookup_map(name string, reflectValue reflect.Value) (value interface{}, ok bool, found bool) {
+ item := reflectValue.MapIndex(reflect.ValueOf(name))
+ if item.IsValid() {
+ return item.Interface(), truth(item), true
+ }
+ return nil, false, false
+
+}
+
+func lookup_struct(name string, reflectValue reflect.Value) (value interface{}, ok bool, found bool) {
+ field := reflectValue.FieldByName(name)
+ if field.IsValid() && field.CanInterface() {
+ return field.Interface(), truth(field), true
+ }
+ method := reflectValue.MethodByName(name)
+ if method.IsValid() && method.Type().NumIn() == 1 {
+ out := method.Call(nil)[0]
+ return out.Interface(), truth(out), true
+ }
+
+ typ := reflectValue.Type()
+ for i := 0; i < typ.NumField(); i++ {
+ f := typ.Field(i)
+ if f.PkgPath != "" {
+ continue
+ }
+ tag := f.Tag.Get("mustache")
+ if tag == name {
+ field := reflectValue.Field(i)
+ if field.IsValid() {
+ return field.Interface(), truth(field), true
+ }
+ }
+ }
+ return nil, false, false
+}
+
+func lookup_array(name string, reflectValue reflect.Value) (value interface{}, ok bool, found bool) {
+ idx, err := strconv.Atoi(name)
+ if err != nil {
+ return nil, false, false
+ }
+ if reflectValue.Len() <= idx || idx < 0 {
+ return nil, false, false
+ }
+ field := reflectValue.Index(idx)
+ if field.IsValid() {
+ return field.Interface(), truth(field), true
+ }
+
+ return nil, false, false
+}
+
// The truth function will tell us if r is a truthy value or not. This is
// important for sections as they will render their content based on the output
// of this function.
diff --git a/mustache.go b/mustache.go
index ad73426..542b4e1 100644
--- a/mustache.go
+++ b/mustache.go
@@ -12,6 +12,25 @@ import (
"strings"
)
+type ErrorSlice []error
+
+func (es ErrorSlice) Error() string {
+ b := strings.Builder{}
+ b.WriteRune('[')
+ first := true
+ for _, e := range es {
+ if first {
+ first = false
+ } else {
+ b.WriteString(", ")
+ }
+ b.WriteString(e.Error())
+ }
+ b.WriteRune(']')
+
+ return b.String()
+}
+
// The node type is the base type that represents a node in the parse tree.
type node interface {
// The render function should be defined by any type wishing to satisfy the
@@ -96,11 +115,18 @@ type sectionNode struct {
func (n *sectionNode) render(t *Template, w *writer, c ...interface{}) error {
w.tag()
defer w.tag()
+
+ errs := ErrorSlice{}
+
elemFn := func(v ...interface{}) {
for _, elem := range n.elems {
- elem.render(t, w, append(v, c...)...)
+ err := elem.render(t, w, append(v, c...)...)
+ if err != nil {
+ errs = append(errs, err)
+ }
}
}
+
v, ok := lookup(n.name, c...)
if ok != n.inverted {
r := reflect.ValueOf(v)
@@ -116,9 +142,13 @@ func (n *sectionNode) render(t *Template, w *writer, c ...interface{}) error {
default:
elemFn(v)
}
- return nil
}
- return fmt.Errorf("failed to lookup %s", n.name)
+ if len(errs) != 0 {
+ if !t.silentMiss {
+ return errs
+ }
+ }
+ return nil
}
// The testNode type is a complex node which recursively renders its child
@@ -132,16 +162,25 @@ type testNode struct {
func (n *testNode) render(t *Template, w *writer, c ...interface{}) error {
w.tag()
defer w.tag()
+ errs := ErrorSlice{}
v, _ := lookup(n.testIdent, c...)
if v != nil {
vs := strings.Builder{}
print(&vs, v, noEscape)
if vs.String() == n.testVal {
for _, elem := range n.elems {
- elem.render(t, w, c...)
+ err := elem.render(t, w, c...)
+ if err != nil {
+ errs = append(errs, err)
+ }
}
}
}
+ if len(errs) != 0 {
+ if !t.silentMiss {
+ return errs
+ }
+ }
return nil
}
@@ -149,7 +188,7 @@ func (n *sectionNode) String() string {
return fmt.Sprintf("[section: %q inv: %t elems: %s]", n.name, n.inverted, n.elems)
}
-// The commentNode type is a part of the template wich gets ignored. Perhaps it
+// The commentNode type is a part of the template which gets ignored. Perhaps it
// can be optionally enabled to print comments.
type commentNode string
@@ -171,8 +210,14 @@ func (p *partialNode) render(t *Template, w *writer, c ...interface{}) error {
w.tag()
if template, ok := t.partials[p.name]; ok {
template.partials = t.partials
- template.render(w, c...)
+ err := template.render(w, c...)
+ if err != nil {
+ if !t.silentMiss {
+ return err
+ }
+ }
}
+
return nil
}
@@ -222,7 +267,7 @@ func print(w io.Writer, v interface{}, needEscape escapeType) {
// The escape function replicates the text/template.HTMLEscapeString but keeps
// "'" and """ for compatibility with the mustache spec.
func escapeHtml(s string) string {
- if strings.IndexAny(s, `'"&<>`) < 0 {
+ if !strings.ContainsAny(s, `'"&<>`) {
return s
}
var b bytes.Buffer
diff --git a/parse.go b/parse.go
index cbdba4d..0385aa6 100644
--- a/parse.go
+++ b/parse.go
@@ -4,14 +4,12 @@ package mustache
import (
"fmt"
- "io"
)
type parser struct {
lexer *lexer
escape escapeType
buf []token
- ast []node
}
// read returns the next token from the lexer and advances the cursor. This
@@ -25,21 +23,6 @@ func (p *parser) read() token {
return p.lexer.token()
}
-// readn returns the next n tokens from the lexer and advances the cursor. If it
-// coundn't read all n tokens, for example if a tokenEOF was returned by the
-// lexer, an error is returned and the returned slice will have all tokens read
-// until that point, including tokenEOF.
-func (p *parser) readn(n int) ([]token, error) {
- tokens := make([]token, 0, n) // make a slice capable of storing up to n tokens
- for i := 0; i < n; i++ {
- tokens = append(tokens, p.read())
- if tokens[i].typ == tokenEOF {
- return tokens, io.EOF
- }
- }
- return tokens, nil
-}
-
// readt returns the tokens starting from the current position until the first
// match of t. Similar to readn it will return an error if a tokenEOF was
// returned by the lexer before a match was made.
@@ -75,56 +58,6 @@ func (p *parser) readv(t token) ([]token, error) {
return tokens, nil
}
-// peek returns the next token without advancing the cursor. Consecutive calls
-// of peek would result in the same token being retuned. To advance the cursor,
-// a read must be made.
-func (p *parser) peek() token {
- if len(p.buf) > 0 {
- return p.buf[0]
- }
- t := p.lexer.token()
- p.buf = append(p.buf, t)
- return t
-}
-
-// peekn returns the next n tokens without advancing the cursor.
-func (p *parser) peekn(n int) ([]token, error) {
- if len(p.buf) > n {
- return p.buf[:n], nil
- }
- for i := len(p.buf) - 1; i < n; i++ {
- t := p.lexer.token()
- p.buf = append(p.buf, t)
- if t.typ == tokenEOF {
- return p.buf, io.EOF
- }
- }
- return p.buf, nil
-}
-
-// peekt returns the tokens from the current postition until the first match of
-// t. it will not advance the cursor.
-func (p *parser) peekt(t tokenType) ([]token, error) {
- for i := 0; i < len(p.buf); i++ {
- switch p.buf[i].typ {
- case t:
- return p.buf[:i], nil
- case tokenEOF:
- return p.buf[:i], io.EOF
- }
- }
- for {
- token := p.lexer.token()
- p.buf = append(p.buf, token)
- switch token.typ {
- case t:
- return p.buf, nil
- case tokenEOF:
- return p.buf, io.EOF
- }
- }
-}
-
func (p *parser) errorf(t token, format string, v ...interface{}) error {
return fmt.Errorf("%d:%d syntax error: %s", t.line, t.col, fmt.Sprintf(format, v...))
}
@@ -161,7 +94,7 @@ loop:
return nodes, nil
}
-// parseTag parses a beggining of a mustache tag. It is assumed that a leftDelim
+// parseTag parses a beginning of a mustache tag. It is assumed that a leftDelim
// was already read by the parser.
func (p *parser) parseTag() (node, error) {
token := p.read()
@@ -281,7 +214,7 @@ func (p *parser) parseSectionInternal(t token) ([]node, error) {
}
tokens = append(tokens, read...)
if len(read) > 1 {
- // Check the token that preceeded the matching identifier. For
+ // Check the token that preceded the matching identifier. For
// section start and inverse tokens we increase the stack for sections or testValue for those special sections, otherwise
// decrease.
tt := read[len(read)-2]