-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmain.go
100 lines (94 loc) · 2.31 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
package main
import (
"bufio"
"encoding/json"
"fmt"
"github.com/antlr/antlr4/runtime/Go/antlr"
"github.com/tzmfreedom/soql-parser/parser"
"io"
"os"
"strconv"
"strings"
)
func main() {
filename := os.Args[1]
lineNo, err := strconv.Atoi(os.Args[2])
if err != nil {
panic(err)
}
charNo, err := strconv.Atoi(os.Args[3])
if err != nil {
panic(err)
}
fp, err := os.Open(strings.Replace(filename, "file://", "", -1))
defer fp.Close()
if err != nil {
panic(err)
}
reader := bufio.NewReaderSize(fp, 4096)
body := ""
for {
line, _, err := reader.ReadLine()
if lineNo == 0 {
body += string(line)[0:charNo-1]
break
}
if err == io.EOF {
break
}
body += string(line)
lineNo--
}
if err != nil {
panic(err)
}
expectedTokens, _ := Parse(body)
res, err := json.Marshal(expectedTokens)
if err != nil {
panic(err)
}
fmt.Println(string(res))
}
func Parse(src string) ([]string, antlr.ParserRuleContext) {
input := antlr.NewInputStream(src)
lexer := parser.NewSOQLLexer(input)
stream := antlr.NewCommonTokenStream(lexer, 0)
p := parser.NewSOQLParser(stream)
p.RemoveErrorListeners()
listener := NewErrorListener(p)
p.AddErrorListener(listener)
p.BuildParseTrees = true
tree := p.Soql_query()
return listener.ExpectedTokens, tree
}
type ErrorListener struct {
*antlr.DefaultErrorListener
parser antlr.Parser
ExpectedTokens []string
}
func NewErrorListener(p antlr.Parser) *ErrorListener{
return &ErrorListener{
parser: p,
ExpectedTokens: []string{},
}
}
func (l *ErrorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol interface{}, line, column int, msg string, e antlr.RecognitionException) {
//stream := l.parser.GetTokenStream()
//fmt.Println(l.parser.GetRuleNames()[l.parser.GetParserRuleContext().GetRuleIndex()])
//fmt.Println(l.parser.GetRuleInvocationStack(l.parser.GetParserRuleContext()))
verbose := l.parser.GetExpectedTokens().StringVerbose(
l.parser.GetLiteralNames(),
l.parser.GetSymbolicNames(),
false,
)
if strings.Contains(verbose, ",") {
tags := strings.Split(verbose[1:len(verbose)-1], ",")
expectedTokens := make([]string, len(tags))
for i, tag := range tags {
expectedTokens[i] = strings.TrimSpace(tag)
}
l.ExpectedTokens = append(l.ExpectedTokens, expectedTokens...)
} else {
l.ExpectedTokens = append(l.ExpectedTokens, verbose)
}
}