Final Toml-Path Solution

* Refactored type names and file names to mesh with existing TOML library more closely
* Added QueryResult structure that provides values and position data
* Added Query() method to TomlTree type
* Tests, tests, and more tests
* Fixed bug where positions returned from some tables were invalid
* Added test case for bug patch

The bugfix was an interesting case. Position information wasn't being
set in cases where createPath was called.  So table names like [foo.bar]
would result in table 'foo' having no position.
This commit is contained in:
eanderton
2014-09-10 21:32:04 -04:00
parent 2811a1a3c9
commit 081f3db916
15 changed files with 713 additions and 779 deletions
-261
View File
@@ -1,261 +0,0 @@
package jpath
import (
"fmt"
. "github.com/pelletier/go-toml"
"testing"
"sort"
"strings"
)
func valueString(root interface{}) string {
result := "" //fmt.Sprintf("%T:", root)
switch node := root.(type) {
case []interface{}:
items := []string{}
for _, v := range node {
items = append(items, valueString(v))
}
sort.Strings(items)
result = "[" + strings.Join(items, ", ") + "]"
case *TomlTree:
// workaround for unreliable map key ordering
items := []string{}
for _, k := range node.Keys() {
v := node.GetPath([]string{k})
items = append(items, k + ":" + valueString(v))
}
sort.Strings(items)
result = "{" + strings.Join(items, ", ") + "}"
case map[string]interface{}:
// workaround for unreliable map key ordering
items := []string{}
for k, v := range node {
items = append(items, k + ":" + valueString(v))
}
sort.Strings(items)
result = "{" + strings.Join(items, ", ") + "}"
case int64:
result += fmt.Sprintf("%d", node)
case string:
result += "'" + node + "'"
}
return result
}
func assertValue(t *testing.T, result, ref interface{}) {
pathStr := valueString(result)
refStr := valueString(ref)
if pathStr != refStr {
t.Errorf("values do not match")
t.Log("test:", pathStr)
t.Log("ref: ", refStr)
}
}
func assertQuery(t *testing.T, toml, query string, ref []interface{}) {
tree, err := Load(toml)
if err != nil {
t.Errorf("Non-nil toml parse error: %v", err)
return
}
results := Compile(query).Execute(tree)
assertValue(t, results.Values(), ref)
}
func TestQueryRoot(t *testing.T) {
assertQuery(t,
"a = 42",
"$",
[]interface{}{
map[string]interface{}{
"a": int64(42),
},
})
}
func TestQueryKey(t *testing.T) {
assertQuery(t,
"[foo]\na = 42",
"$.foo.a",
[]interface{}{
int64(42),
})
}
func TestQueryKeyString(t *testing.T) {
assertQuery(t,
"[foo]\na = 42",
"$.foo['a']",
[]interface{}{
int64(42),
})
}
func TestQueryIndex(t *testing.T) {
assertQuery(t,
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
"$.foo.a[0]",
[]interface{}{
int64(1),
})
}
func TestQuerySliceRange(t *testing.T) {
assertQuery(t,
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
"$.foo.a[0:5]",
[]interface{}{
int64(1),
int64(2),
int64(3),
int64(4),
int64(5),
})
}
func TestQuerySliceStep(t *testing.T) {
assertQuery(t,
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
"$.foo.a[0:5:2]",
[]interface{}{
int64(1),
int64(3),
int64(5),
})
}
func TestQueryAny(t *testing.T) {
assertQuery(t,
"[foo.bar]\na=1\nb=2\n[foo.baz]\na=3\nb=4",
"$.foo.*",
[]interface{}{
map[string]interface{}{
"a": int64(1),
"b": int64(2),
},
map[string]interface{}{
"a": int64(3),
"b": int64(4),
},
})
}
func TestQueryUnionSimple(t *testing.T) {
assertQuery(t,
"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
"$.*[bar,foo]",
[]interface{}{
map[string]interface{}{
"a": int64(1),
"b": int64(2),
},
map[string]interface{}{
"a": int64(3),
"b": int64(4),
},
map[string]interface{}{
"a": int64(5),
"b": int64(6),
},
})
}
func TestQueryRecursionAll(t *testing.T) {
assertQuery(t,
"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
"$..*",
[]interface{}{
map[string]interface{}{
"bar": map[string]interface{}{
"a": int64(1),
"b": int64(2),
},
},
map[string]interface{}{
"a": int64(1),
"b": int64(2),
},
int64(1),
int64(2),
map[string]interface{}{
"foo": map[string]interface{}{
"a": int64(3),
"b": int64(4),
},
},
map[string]interface{}{
"a": int64(3),
"b": int64(4),
},
int64(3),
int64(4),
map[string]interface{}{
"foo": map[string]interface{}{
"a": int64(5),
"b": int64(6),
},
},
map[string]interface{}{
"a": int64(5),
"b": int64(6),
},
int64(5),
int64(6),
})
}
func TestQueryRecursionUnionSimple(t *testing.T) {
assertQuery(t,
"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
"$..['foo','bar']",
[]interface{}{
map[string]interface{}{
"a": int64(1),
"b": int64(2),
},
map[string]interface{}{
"a": int64(3),
"b": int64(4),
},
map[string]interface{}{
"a": int64(5),
"b": int64(6),
},
})
}
func TestQueryScriptFnLast(t *testing.T) {
assertQuery(t,
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
"$.foo.a[(last)]",
[]interface{}{
int64(9),
})
}
func TestQueryFilterFnOdd(t *testing.T) {
assertQuery(t,
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
"$.foo.a[?(odd)]",
[]interface{}{
int64(1),
int64(3),
int64(5),
int64(7),
int64(9),
})
}
func TestQueryFilterFnEven(t *testing.T) {
assertQuery(t,
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
"$.foo.a[?(even)]",
[]interface{}{
int64(0),
int64(2),
int64(4),
int64(6),
int64(8),
})
}
+67 -169
View File
@@ -10,115 +10,16 @@ import (
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
"unicode"
"unicode/utf8" "unicode/utf8"
) )
var dateRegexp *regexp.Regexp var dateRegexp *regexp.Regexp
// Define tokens // Define state functions
type tokenType int type tomlLexStateFn func() tomlLexStateFn
const (
eof = -(iota + 1)
)
const (
tokenError tokenType = iota
tokenEOF
tokenComment
tokenKey
tokenEqual
tokenString
tokenInteger
tokenTrue
tokenFalse
tokenFloat
tokenLeftBracket
tokenRightBracket
tokenDoubleLeftBracket
tokenDoubleRightBracket
tokenDate
tokenKeyGroup
tokenKeyGroupArray
tokenComma
tokenEOL
)
var tokenTypeNames = []string{
"EOF",
"Comment",
"Key",
"=",
"\"",
"Integer",
"True",
"False",
"Float",
"[",
"[",
"]]",
"[[",
"Date",
"KeyGroup",
"KeyGroupArray",
",",
"EOL",
}
type token struct {
Position
typ tokenType
val string
}
func (tt tokenType) String() string {
idx := int(tt)
if idx < len(tokenTypeNames) {
return tokenTypeNames[idx]
}
return "Unknown"
}
func (i token) String() string {
switch i.typ {
case tokenEOF:
return "EOF"
case tokenError:
return i.val
}
if len(i.val) > 10 {
return fmt.Sprintf("%.10q...", i.val)
}
return fmt.Sprintf("%q", i.val)
}
func isSpace(r rune) bool {
return r == ' ' || r == '\t'
}
func isAlphanumeric(r rune) bool {
return unicode.IsLetter(r) || r == '_'
}
func isKeyChar(r rune) bool {
// "Keys start with the first non-whitespace character and end with the last
// non-whitespace character before the equals sign."
return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '=')
}
func isDigit(r rune) bool {
return unicode.IsNumber(r)
}
func isHexDigit(r rune) bool {
return isDigit(r) ||
r == 'A' || r == 'B' || r == 'C' || r == 'D' || r == 'E' || r == 'F'
}
// Define lexer // Define lexer
type lexer struct { type tomlLexer struct {
input string input string
start int start int
pos int pos int
@@ -129,14 +30,14 @@ type lexer struct {
col int col int
} }
func (l *lexer) run() { func (l *tomlLexer) run() {
for state := lexVoid; state != nil; { for state := l.lexVoid; state != nil; {
state = state(l) state = state()
} }
close(l.tokens) close(l.tokens)
} }
func (l *lexer) nextStart() { func (l *tomlLexer) nextStart() {
// iterate by runes (utf8 characters) // iterate by runes (utf8 characters)
// search for newlines and advance line/col counts // search for newlines and advance line/col counts
for i := l.start; i < l.pos; { for i := l.start; i < l.pos; {
@@ -153,7 +54,7 @@ func (l *lexer) nextStart() {
l.start = l.pos l.start = l.pos
} }
func (l *lexer) emit(t tokenType) { func (l *tomlLexer) emit(t tokenType) {
l.tokens <- token{ l.tokens <- token{
Position: Position{l.line, l.col}, Position: Position{l.line, l.col},
typ: t, typ: t,
@@ -162,7 +63,7 @@ func (l *lexer) emit(t tokenType) {
l.nextStart() l.nextStart()
} }
func (l *lexer) emitWithValue(t tokenType, value string) { func (l *tomlLexer) emitWithValue(t tokenType, value string) {
l.tokens <- token{ l.tokens <- token{
Position: Position{l.line, l.col}, Position: Position{l.line, l.col},
typ: t, typ: t,
@@ -171,7 +72,7 @@ func (l *lexer) emitWithValue(t tokenType, value string) {
l.nextStart() l.nextStart()
} }
func (l *lexer) next() rune { func (l *tomlLexer) next() rune {
if l.pos >= len(l.input) { if l.pos >= len(l.input) {
l.width = 0 l.width = 0
return eof return eof
@@ -182,15 +83,15 @@ func (l *lexer) next() rune {
return r return r
} }
func (l *lexer) ignore() { func (l *tomlLexer) ignore() {
l.nextStart() l.nextStart()
} }
func (l *lexer) backup() { func (l *tomlLexer) backup() {
l.pos -= l.width l.pos -= l.width
} }
func (l *lexer) errorf(format string, args ...interface{}) stateFn { func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
l.tokens <- token{ l.tokens <- token{
Position: Position{l.line, l.col}, Position: Position{l.line, l.col},
typ: tokenError, typ: tokenError,
@@ -199,13 +100,13 @@ func (l *lexer) errorf(format string, args ...interface{}) stateFn {
return nil return nil
} }
func (l *lexer) peek() rune { func (l *tomlLexer) peek() rune {
r := l.next() r := l.next()
l.backup() l.backup()
return r return r
} }
func (l *lexer) accept(valid string) bool { func (l *tomlLexer) accept(valid string) bool {
if strings.IndexRune(valid, l.next()) >= 0 { if strings.IndexRune(valid, l.next()) >= 0 {
return true return true
} }
@@ -213,23 +114,20 @@ func (l *lexer) accept(valid string) bool {
return false return false
} }
func (l *lexer) follow(next string) bool { func (l *tomlLexer) follow(next string) bool {
return strings.HasPrefix(l.input[l.pos:], next) return strings.HasPrefix(l.input[l.pos:], next)
} }
// Define state functions func (l *tomlLexer) lexVoid() tomlLexStateFn {
type stateFn func(*lexer) stateFn
func lexVoid(l *lexer) stateFn {
for { for {
next := l.peek() next := l.peek()
switch next { switch next {
case '[': case '[':
return lexKeyGroup return l.lexKeyGroup
case '#': case '#':
return lexComment return l.lexComment
case '=': case '=':
return lexEqual return l.lexEqual
} }
if isSpace(next) { if isSpace(next) {
@@ -237,11 +135,11 @@ func lexVoid(l *lexer) stateFn {
} }
if l.depth > 0 { if l.depth > 0 {
return lexRvalue return l.lexRvalue
} }
if isKeyChar(next) { if isKeyChar(next) {
return lexKey return l.lexKey
} }
if l.next() == eof { if l.next() == eof {
@@ -253,7 +151,7 @@ func lexVoid(l *lexer) stateFn {
return nil return nil
} }
func lexRvalue(l *lexer) stateFn { func (l *tomlLexer) lexRvalue() tomlLexStateFn {
for { for {
next := l.peek() next := l.peek()
switch next { switch next {
@@ -263,43 +161,43 @@ func lexRvalue(l *lexer) stateFn {
return l.errorf("cannot have multiple equals for the same key") return l.errorf("cannot have multiple equals for the same key")
case '[': case '[':
l.depth++ l.depth++
return lexLeftBracket return l.lexLeftBracket
case ']': case ']':
l.depth-- l.depth--
return lexRightBracket return l.lexRightBracket
case '#': case '#':
return lexComment return l.lexComment
case '"': case '"':
return lexString return l.lexString
case ',': case ',':
return lexComma return l.lexComma
case '\n': case '\n':
l.ignore() l.ignore()
l.pos++ l.pos++
if l.depth == 0 { if l.depth == 0 {
return lexVoid return l.lexVoid
} }
return lexRvalue return l.lexRvalue
} }
if l.follow("true") { if l.follow("true") {
return lexTrue return l.lexTrue
} }
if l.follow("false") { if l.follow("false") {
return lexFalse return l.lexFalse
} }
if isAlphanumeric(next) { if isAlphanumeric(next) {
return lexKey return l.lexKey
} }
if dateRegexp.FindString(l.input[l.pos:]) != "" { if dateRegexp.FindString(l.input[l.pos:]) != "" {
return lexDate return l.lexDate
} }
if next == '+' || next == '-' || isDigit(next) { if next == '+' || next == '-' || isDigit(next) {
return lexNumber return l.lexNumber
} }
if isSpace(next) { if isSpace(next) {
@@ -315,51 +213,51 @@ func lexRvalue(l *lexer) stateFn {
return nil return nil
} }
func lexDate(l *lexer) stateFn { func (l *tomlLexer) lexDate() tomlLexStateFn {
l.ignore() l.ignore()
l.pos += 20 // Fixed size of a date in TOML l.pos += 20 // Fixed size of a date in TOML
l.emit(tokenDate) l.emit(tokenDate)
return lexRvalue return l.lexRvalue
} }
func lexTrue(l *lexer) stateFn { func (l *tomlLexer) lexTrue() tomlLexStateFn {
l.ignore() l.ignore()
l.pos += 4 l.pos += 4
l.emit(tokenTrue) l.emit(tokenTrue)
return lexRvalue return l.lexRvalue
} }
func lexFalse(l *lexer) stateFn { func (l *tomlLexer) lexFalse() tomlLexStateFn {
l.ignore() l.ignore()
l.pos += 5 l.pos += 5
l.emit(tokenFalse) l.emit(tokenFalse)
return lexRvalue return l.lexRvalue
} }
func lexEqual(l *lexer) stateFn { func (l *tomlLexer) lexEqual() tomlLexStateFn {
l.ignore() l.ignore()
l.accept("=") l.accept("=")
l.emit(tokenEqual) l.emit(tokenEqual)
return lexRvalue return l.lexRvalue
} }
func lexComma(l *lexer) stateFn { func (l *tomlLexer) lexComma() tomlLexStateFn {
l.ignore() l.ignore()
l.accept(",") l.accept(",")
l.emit(tokenComma) l.emit(tokenComma)
return lexRvalue return l.lexRvalue
} }
func lexKey(l *lexer) stateFn { func (l *tomlLexer) lexKey() tomlLexStateFn {
l.ignore() l.ignore()
for isKeyChar(l.next()) { for isKeyChar(l.next()) {
} }
l.backup() l.backup()
l.emit(tokenKey) l.emit(tokenKey)
return lexVoid return l.lexVoid
} }
func lexComment(l *lexer) stateFn { func (l *tomlLexer) lexComment() tomlLexStateFn {
for { for {
next := l.next() next := l.next()
if next == '\n' || next == eof { if next == '\n' || next == eof {
@@ -367,17 +265,17 @@ func lexComment(l *lexer) stateFn {
} }
} }
l.ignore() l.ignore()
return lexVoid return l.lexVoid
} }
func lexLeftBracket(l *lexer) stateFn { func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
l.ignore() l.ignore()
l.pos++ l.pos++
l.emit(tokenLeftBracket) l.emit(tokenLeftBracket)
return lexRvalue return l.lexRvalue
} }
func lexString(l *lexer) stateFn { func (l *tomlLexer) lexString() tomlLexStateFn {
l.pos++ l.pos++
l.ignore() l.ignore()
growingString := "" growingString := ""
@@ -387,7 +285,7 @@ func lexString(l *lexer) stateFn {
l.emitWithValue(tokenString, growingString) l.emitWithValue(tokenString, growingString)
l.pos++ l.pos++
l.ignore() l.ignore()
return lexRvalue return l.lexRvalue
} }
if l.follow("\\\"") { if l.follow("\\\"") {
@@ -446,7 +344,7 @@ func lexString(l *lexer) stateFn {
return l.errorf("unclosed string") return l.errorf("unclosed string")
} }
func lexKeyGroup(l *lexer) stateFn { func (l *tomlLexer) lexKeyGroup() tomlLexStateFn {
l.ignore() l.ignore()
l.pos++ l.pos++
@@ -454,14 +352,14 @@ func lexKeyGroup(l *lexer) stateFn {
// token '[[' signifies an array of anonymous key groups // token '[[' signifies an array of anonymous key groups
l.pos++ l.pos++
l.emit(tokenDoubleLeftBracket) l.emit(tokenDoubleLeftBracket)
return lexInsideKeyGroupArray return l.lexInsideKeyGroupArray
} }
// vanilla key group // vanilla key group
l.emit(tokenLeftBracket) l.emit(tokenLeftBracket)
return lexInsideKeyGroup return l.lexInsideKeyGroup
} }
func lexInsideKeyGroupArray(l *lexer) stateFn { func (l *tomlLexer) lexInsideKeyGroupArray() tomlLexStateFn {
for { for {
if l.peek() == ']' { if l.peek() == ']' {
if l.pos > l.start { if l.pos > l.start {
@@ -474,7 +372,7 @@ func lexInsideKeyGroupArray(l *lexer) stateFn {
} }
l.pos++ l.pos++
l.emit(tokenDoubleRightBracket) l.emit(tokenDoubleRightBracket)
return lexVoid return l.lexVoid
} else if l.peek() == '[' { } else if l.peek() == '[' {
return l.errorf("group name cannot contain ']'") return l.errorf("group name cannot contain ']'")
} }
@@ -486,7 +384,7 @@ func lexInsideKeyGroupArray(l *lexer) stateFn {
return l.errorf("unclosed key group array") return l.errorf("unclosed key group array")
} }
func lexInsideKeyGroup(l *lexer) stateFn { func (l *tomlLexer) lexInsideKeyGroup() tomlLexStateFn {
for { for {
if l.peek() == ']' { if l.peek() == ']' {
if l.pos > l.start { if l.pos > l.start {
@@ -495,7 +393,7 @@ func lexInsideKeyGroup(l *lexer) stateFn {
l.ignore() l.ignore()
l.pos++ l.pos++
l.emit(tokenRightBracket) l.emit(tokenRightBracket)
return lexVoid return l.lexVoid
} else if l.peek() == '[' { } else if l.peek() == '[' {
return l.errorf("group name cannot contain ']'") return l.errorf("group name cannot contain ']'")
} }
@@ -507,14 +405,14 @@ func lexInsideKeyGroup(l *lexer) stateFn {
return l.errorf("unclosed key group") return l.errorf("unclosed key group")
} }
func lexRightBracket(l *lexer) stateFn { func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
l.ignore() l.ignore()
l.pos++ l.pos++
l.emit(tokenRightBracket) l.emit(tokenRightBracket)
return lexRvalue return l.lexRvalue
} }
func lexNumber(l *lexer) stateFn { func (l *tomlLexer) lexNumber() tomlLexStateFn {
l.ignore() l.ignore()
if !l.accept("+") { if !l.accept("+") {
l.accept("-") l.accept("-")
@@ -550,7 +448,7 @@ func lexNumber(l *lexer) stateFn {
} else { } else {
l.emit(tokenInteger) l.emit(tokenInteger)
} }
return lexRvalue return l.lexRvalue
} }
func init() { func init() {
@@ -558,13 +456,13 @@ func init() {
} }
// Entry point // Entry point
func lex(input string) (*lexer, chan token) { func lexToml(input string) chan token {
l := &lexer{ l := &tomlLexer{
input: input, input: input,
tokens: make(chan token), tokens: make(chan token),
line: 1, line: 1,
col: 1, col: 1,
} }
go l.run() go l.run()
return l, l.tokens return l.tokens
} }
+1 -1
View File
@@ -3,7 +3,7 @@ package toml
import "testing" import "testing"
func testFlow(t *testing.T, input string, expectedFlow []token) { func testFlow(t *testing.T, input string, expectedFlow []token) {
_, ch := lex(input) ch := lexToml(input)
for _, expected := range expectedFlow { for _, expected := range expectedFlow {
token := <-ch token := <-ch
if token != expected { if token != expected {
+43 -26
View File
@@ -1,10 +1,27 @@
package jpath package toml
import ( import (
"fmt" "fmt"
. "github.com/pelletier/go-toml"
) )
// support function to set positions for tomlValues
// NOTE: this is done to allow ctx.lastPosition to indicate the start of any
// values returned by the query engines
func tomlValueCheck(node interface{}, ctx *queryContext) interface{} {
switch castNode := node.(type) {
case *tomlValue:
ctx.lastPosition = castNode.position
return castNode.value
case []*TomlTree:
if len(castNode) > 0 {
ctx.lastPosition = castNode[0].position
}
return node
default:
return node
}
}
// base match // base match
type matchBase struct { type matchBase struct {
next PathFn next PathFn
@@ -28,12 +45,15 @@ func (f *terminatingFn) SetNext(next PathFn) {
} }
func (f *terminatingFn) Call(node interface{}, ctx *queryContext) { func (f *terminatingFn) Call(node interface{}, ctx *queryContext) {
ctx.result.appendResult(node) switch castNode := node.(type) {
} case *TomlTree:
ctx.result.appendResult(node, castNode.position)
// shim to ease functor writing case *tomlValue:
func treeValue(tree *TomlTree, key string) interface{} { ctx.result.appendResult(node, castNode.position)
return tree.GetPath([]string{key}) default:
// use last position for scalars
ctx.result.appendResult(node, ctx.lastPosition)
}
} }
// match single key // match single key
@@ -48,7 +68,7 @@ func newMatchKeyFn(name string) *matchKeyFn {
func (f *matchKeyFn) Call(node interface{}, ctx *queryContext) { func (f *matchKeyFn) Call(node interface{}, ctx *queryContext) {
if tree, ok := node.(*TomlTree); ok { if tree, ok := node.(*TomlTree); ok {
item := treeValue(tree, f.Name) item := tree.values[f.Name]
if item != nil { if item != nil {
f.next.Call(item, ctx) f.next.Call(item, ctx)
} }
@@ -66,11 +86,11 @@ func newMatchIndexFn(idx int) *matchIndexFn {
} }
func (f *matchIndexFn) Call(node interface{}, ctx *queryContext) { func (f *matchIndexFn) Call(node interface{}, ctx *queryContext) {
if arr, ok := node.([]interface{}); ok { if arr, ok := tomlValueCheck(node, ctx).([]interface{}); ok {
if f.Idx < len(arr) && f.Idx >= 0 { if f.Idx < len(arr) && f.Idx >= 0 {
f.next.Call(arr[f.Idx], ctx) f.next.Call(arr[f.Idx], ctx)
} }
} }
} }
// filter by slicing // filter by slicing
@@ -84,7 +104,7 @@ func newMatchSliceFn(start, end, step int) *matchSliceFn {
} }
func (f *matchSliceFn) Call(node interface{}, ctx *queryContext) { func (f *matchSliceFn) Call(node interface{}, ctx *queryContext) {
if arr, ok := node.([]interface{}); ok { if arr, ok := tomlValueCheck(node, ctx).([]interface{}); ok {
// adjust indexes for negative values, reverse ordering // adjust indexes for negative values, reverse ordering
realStart, realEnd := f.Start, f.End realStart, realEnd := f.Start, f.End
if realStart < 0 { if realStart < 0 {
@@ -114,9 +134,8 @@ func newMatchAnyFn() *matchAnyFn {
func (f *matchAnyFn) Call(node interface{}, ctx *queryContext) { func (f *matchAnyFn) Call(node interface{}, ctx *queryContext) {
if tree, ok := node.(*TomlTree); ok { if tree, ok := node.(*TomlTree); ok {
for _, key := range tree.Keys() { for _,v := range tree.values {
item := treeValue(tree, key) f.next.Call(v, ctx)
f.next.Call(item, ctx)
} }
} }
} }
@@ -151,10 +170,9 @@ func (f *matchRecursiveFn) Call(node interface{}, ctx *queryContext) {
if tree, ok := node.(*TomlTree); ok { if tree, ok := node.(*TomlTree); ok {
var visit func(tree *TomlTree) var visit func(tree *TomlTree)
visit = func(tree *TomlTree) { visit = func(tree *TomlTree) {
for _, key := range tree.Keys() { for _, v := range tree.values {
item := treeValue(tree, key) f.next.Call(v, ctx)
f.next.Call(item, ctx) switch node := v.(type) {
switch node := item.(type) {
case *TomlTree: case *TomlTree:
visit(node) visit(node)
case []*TomlTree: case []*TomlTree:
@@ -185,10 +203,9 @@ func (f *matchFilterFn) Call(node interface{}, ctx *queryContext) {
panic(fmt.Sprintf("%s: query context does not have filter '%s'", panic(fmt.Sprintf("%s: query context does not have filter '%s'",
f.Pos, f.Name)) f.Pos, f.Name))
} }
switch castNode := node.(type) { switch castNode := tomlValueCheck(node, ctx).(type) {
case *TomlTree: case *TomlTree:
for _, k := range castNode.Keys() { for _, v := range castNode.values {
v := castNode.GetPath([]string{k})
if fn(v) { if fn(v) {
f.next.Call(v, ctx) f.next.Call(v, ctx)
} }
@@ -219,7 +236,7 @@ func (f *matchScriptFn) Call(node interface{}, ctx *queryContext) {
panic(fmt.Sprintf("%s: query context does not have script '%s'", panic(fmt.Sprintf("%s: query context does not have script '%s'",
f.Pos, f.Name)) f.Pos, f.Name))
} }
switch result := fn(node).(type) { switch result := fn(tomlValueCheck(node, ctx)).(type) {
case string: case string:
nextMatch := newMatchKeyFn(result) nextMatch := newMatchKeyFn(result)
nextMatch.SetNext(f.next) nextMatch.SetNext(f.next)
+2 -4
View File
@@ -1,8 +1,7 @@
package jpath package toml
import ( import (
"fmt" "fmt"
. "github.com/pelletier/go-toml"
"math" "math"
"testing" "testing"
) )
@@ -58,8 +57,7 @@ func assertPathMatch(t *testing.T, path, ref *Query) bool {
} }
func assertPath(t *testing.T, query string, ref *Query) { func assertPath(t *testing.T, query string, ref *Query) {
_, flow := lex(query) path, _:= parseQuery(lexQuery(query))
path := parse(flow)
assertPathMatch(t, path, ref) assertPathMatch(t, path, ref)
} }
+28 -30
View File
@@ -10,7 +10,7 @@ import (
"time" "time"
) )
type parser struct { type tomlParser struct {
flow chan token flow chan token
tree *TomlTree tree *TomlTree
tokensBuffer []token tokensBuffer []token
@@ -18,20 +18,20 @@ type parser struct {
seenGroupKeys []string seenGroupKeys []string
} }
type parserStateFn func(*parser) parserStateFn type tomlParserStateFn func() tomlParserStateFn
// Formats and panics an error message based on a token // Formats and panics an error message based on a token
func (p *parser) raiseError(tok *token, msg string, args ...interface{}) { func (p *tomlParser) raiseError(tok *token, msg string, args ...interface{}) {
panic(tok.Position.String() + ": " + fmt.Sprintf(msg, args...)) panic(tok.Position.String() + ": " + fmt.Sprintf(msg, args...))
} }
func (p *parser) run() { func (p *tomlParser) run() {
for state := parseStart; state != nil; { for state := p.parseStart; state != nil; {
state = state(p) state = state()
} }
} }
func (p *parser) peek() *token { func (p *tomlParser) peek() *token {
if len(p.tokensBuffer) != 0 { if len(p.tokensBuffer) != 0 {
return &(p.tokensBuffer[0]) return &(p.tokensBuffer[0])
} }
@@ -44,7 +44,7 @@ func (p *parser) peek() *token {
return &tok return &tok
} }
func (p *parser) assume(typ tokenType) { func (p *tomlParser) assume(typ tokenType) {
tok := p.getToken() tok := p.getToken()
if tok == nil { if tok == nil {
p.raiseError(tok, "was expecting token %s, but token stream is empty", tok) p.raiseError(tok, "was expecting token %s, but token stream is empty", tok)
@@ -54,7 +54,7 @@ func (p *parser) assume(typ tokenType) {
} }
} }
func (p *parser) getToken() *token { func (p *tomlParser) getToken() *token {
if len(p.tokensBuffer) != 0 { if len(p.tokensBuffer) != 0 {
tok := p.tokensBuffer[0] tok := p.tokensBuffer[0]
p.tokensBuffer = p.tokensBuffer[1:] p.tokensBuffer = p.tokensBuffer[1:]
@@ -67,7 +67,7 @@ func (p *parser) getToken() *token {
return &tok return &tok
} }
func parseStart(p *parser) parserStateFn { func (p *tomlParser) parseStart() tomlParserStateFn {
tok := p.peek() tok := p.peek()
// end of stream, parsing is finished // end of stream, parsing is finished
@@ -77,11 +77,11 @@ func parseStart(p *parser) parserStateFn {
switch tok.typ { switch tok.typ {
case tokenDoubleLeftBracket: case tokenDoubleLeftBracket:
return parseGroupArray return p.parseGroupArray
case tokenLeftBracket: case tokenLeftBracket:
return parseGroup return p.parseGroup
case tokenKey: case tokenKey:
return parseAssign return p.parseAssign
case tokenEOF: case tokenEOF:
return nil return nil
default: default:
@@ -90,7 +90,7 @@ func parseStart(p *parser) parserStateFn {
return nil return nil
} }
func parseGroupArray(p *parser) parserStateFn { func (p *tomlParser) parseGroupArray() tomlParserStateFn {
startToken := p.getToken() // discard the [[ startToken := p.getToken() // discard the [[
key := p.getToken() key := p.getToken()
if key.typ != tokenKeyGroupArray { if key.typ != tokenKeyGroupArray {
@@ -99,7 +99,7 @@ func parseGroupArray(p *parser) parserStateFn {
// get or create group array element at the indicated part in the path // get or create group array element at the indicated part in the path
keys := strings.Split(key.val, ".") keys := strings.Split(key.val, ".")
p.tree.createSubTree(keys[:len(keys)-1]) // create parent entries p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries
destTree := p.tree.GetPath(keys) destTree := p.tree.GetPath(keys)
var array []*TomlTree var array []*TomlTree
if destTree == nil { if destTree == nil {
@@ -137,10 +137,10 @@ func parseGroupArray(p *parser) parserStateFn {
// move to next parser state // move to next parser state
p.assume(tokenDoubleRightBracket) p.assume(tokenDoubleRightBracket)
return parseStart(p) return p.parseStart
} }
func parseGroup(p *parser) parserStateFn { func (p *tomlParser) parseGroup() tomlParserStateFn {
startToken := p.getToken() // discard the [ startToken := p.getToken() // discard the [
key := p.getToken() key := p.getToken()
if key.typ != tokenKeyGroup { if key.typ != tokenKeyGroup {
@@ -154,20 +154,18 @@ func parseGroup(p *parser) parserStateFn {
p.seenGroupKeys = append(p.seenGroupKeys, key.val) p.seenGroupKeys = append(p.seenGroupKeys, key.val)
keys := strings.Split(key.val, ".") keys := strings.Split(key.val, ".")
if err := p.tree.createSubTree(keys); err != nil { if err := p.tree.createSubTree(keys,startToken.Position); err != nil {
p.raiseError(key, "%s", err) p.raiseError(key, "%s", err)
} }
p.assume(tokenRightBracket) p.assume(tokenRightBracket)
p.currentGroup = keys p.currentGroup = keys
targetTree := p.tree.GetPath(p.currentGroup).(*TomlTree) return p.parseStart
targetTree.position = startToken.Position
return parseStart(p)
} }
func parseAssign(p *parser) parserStateFn { func (p *tomlParser) parseAssign() tomlParserStateFn {
key := p.getToken() key := p.getToken()
p.assume(tokenEqual) p.assume(tokenEqual)
value := parseRvalue(p) value := p.parseRvalue()
var groupKey []string var groupKey []string
if len(p.currentGroup) > 0 { if len(p.currentGroup) > 0 {
groupKey = p.currentGroup groupKey = p.currentGroup
@@ -195,10 +193,10 @@ func parseAssign(p *parser) parserStateFn {
strings.Join(finalKey, ".")) strings.Join(finalKey, "."))
} }
targetNode.values[key.val] = &tomlValue{value, key.Position} targetNode.values[key.val] = &tomlValue{value, key.Position}
return parseStart(p) return p.parseStart
} }
func parseRvalue(p *parser) interface{} { func (p *tomlParser) parseRvalue() interface{} {
tok := p.getToken() tok := p.getToken()
if tok == nil || tok.typ == tokenEOF { if tok == nil || tok.typ == tokenEOF {
p.raiseError(tok, "expecting a value") p.raiseError(tok, "expecting a value")
@@ -230,7 +228,7 @@ func parseRvalue(p *parser) interface{} {
} }
return val return val
case tokenLeftBracket: case tokenLeftBracket:
return parseArray(p) return p.parseArray()
case tokenError: case tokenError:
p.raiseError(tok, "%s", tok) p.raiseError(tok, "%s", tok)
} }
@@ -240,7 +238,7 @@ func parseRvalue(p *parser) interface{} {
return nil return nil
} }
func parseArray(p *parser) []interface{} { func (p *tomlParser) parseArray() []interface{} {
var array []interface{} var array []interface{}
arrayType := reflect.TypeOf(nil) arrayType := reflect.TypeOf(nil)
for { for {
@@ -252,7 +250,7 @@ func parseArray(p *parser) []interface{} {
p.getToken() p.getToken()
return array return array
} }
val := parseRvalue(p) val := p.parseRvalue()
if arrayType == nil { if arrayType == nil {
arrayType = reflect.TypeOf(val) arrayType = reflect.TypeOf(val)
} }
@@ -274,10 +272,10 @@ func parseArray(p *parser) []interface{} {
return array return array
} }
func parse(flow chan token) *TomlTree { func parseToml(flow chan token) *TomlTree {
result := newTomlTree() result := newTomlTree()
result.position = Position{1,1} result.position = Position{1,1}
parser := &parser{ parser := &tomlParser{
flow: flow, flow: flow,
tree: result, tree: result,
tokensBuffer: make([]token, 0), tokensBuffer: make([]token, 0),
+14 -2
View File
@@ -31,7 +31,7 @@ func assertTree(t *testing.T, tree *TomlTree, err error, ref map[string]interfac
func TestCreateSubTree(t *testing.T) { func TestCreateSubTree(t *testing.T) {
tree := newTomlTree() tree := newTomlTree()
tree.createSubTree([]string{"a", "b", "c"}) tree.createSubTree([]string{"a", "b", "c"}, Position{})
tree.Set("a.b.c", 42) tree.Set("a.b.c", 42)
if tree.Get("a.b.c") != 42 { if tree.Get("a.b.c") != 42 {
t.Fail() t.Fail()
@@ -385,7 +385,7 @@ func assertPosition(t *testing.T, text string, ref map[string]Position) {
for path, pos := range ref { for path, pos := range ref {
testPos := tree.GetPosition(path) testPos := tree.GetPosition(path)
if testPos.Invalid() { if testPos.Invalid() {
t.Errorf("Failed to query tree path: %s", path) t.Errorf("Failed to query tree path or path has invalid position: %s", path)
} else if pos != testPos { } else if pos != testPos {
t.Errorf("Expected position %v, got %v instead", pos, testPos) t.Errorf("Expected position %v, got %v instead", pos, testPos)
} }
@@ -424,3 +424,15 @@ func TestDocumentPositionsWithGroupArray(t *testing.T) {
"foo.baz": Position{3, 1}, "foo.baz": Position{3, 1},
}) })
} }
func TestNestedTreePosition(t *testing.T) {
assertPosition(t,
"[foo.bar]\na=42\nb=69",
map[string]Position{
"": Position{1, 1},
"foo": Position{1, 1},
"foo.bar": Position{1, 1},
"foo.bar.a": Position{2, 1},
"foo.bar.b": Position{3, 1},
})
}
+8 -20
View File
@@ -1,8 +1,4 @@
package jpath package toml
import (
. "github.com/pelletier/go-toml"
)
type nodeFilterFn func(node interface{}) bool type nodeFilterFn func(node interface{}) bool
type nodeFn func(node interface{}) interface{} type nodeFn func(node interface{}) interface{}
@@ -12,18 +8,9 @@ type QueryResult struct {
positions []Position positions []Position
} }
// TODO: modify after merging with rest of lib func (r *QueryResult) appendResult(node interface{}, pos Position) {
func (r *QueryResult) appendResult(node interface{}) {
r.items = append(r.items, node) r.items = append(r.items, node)
switch castNode := node.(type) { r.positions = append(r.positions, pos)
case *TomlTree:
r.positions = append(r.positions, castNode.GetPosition(""))
//r.positions = append(r.positions, castNode.position)
//case *tomlValue:
//r.positions = append(r.positions, castNode.position)
default:
r.positions = append(r.positions, Position{})
}
} }
func (r *QueryResult) Values() []interface{} { func (r *QueryResult) Values() []interface{} {
@@ -39,6 +26,7 @@ type queryContext struct {
result *QueryResult result *QueryResult
filters *map[string]nodeFilterFn filters *map[string]nodeFilterFn
scripts *map[string]nodeFn scripts *map[string]nodeFn
lastPosition Position
} }
// generic path functor interface // generic path functor interface
@@ -74,9 +62,9 @@ func (q *Query) appendPath(next PathFn) {
next.SetNext(newTerminatingFn()) // init the next functor next.SetNext(newTerminatingFn()) // init the next functor
} }
func Compile(path string) *Query { // TODO: return (err,query) instead
_, flow := lex(path) func Compile(path string) (*Query, error) {
return parse(flow) return parseQuery(lexQuery(path))
} }
func (q *Query) Execute(tree *TomlTree) *QueryResult { func (q *Query) Execute(tree *TomlTree) *QueryResult {
@@ -85,7 +73,7 @@ func (q *Query) Execute(tree *TomlTree) *QueryResult {
positions: []Position{}, positions: []Position{},
} }
if q.root == nil { if q.root == nil {
result.appendResult(tree) result.appendResult(tree, tree.GetPosition(""))
} else { } else {
ctx := &queryContext{ ctx := &queryContext{
result: result, result: result,
+37 -152
View File
@@ -1,131 +1,22 @@
// TOML JSONPath lexer. // TOML JSONPath lexer.
// //
// Written using the principles developped by Rob Pike in // Written using the principles developed by Rob Pike in
// http://www.youtube.com/watch?v=HxaD_trXwRE // http://www.youtube.com/watch?v=HxaD_trXwRE
package jpath package toml
import ( import (
"fmt" "fmt"
. "github.com/pelletier/go-toml"
"regexp"
"strconv" "strconv"
"strings" "strings"
"unicode"
"unicode/utf8" "unicode/utf8"
) )
var dateRegexp *regexp.Regexp // Lexer state function
type queryLexStateFn func() queryLexStateFn
// Define tokens // Lexer definition
type tokenType int type queryLexer struct {
const (
eof = -(iota + 1)
)
const (
tokenError tokenType = iota
tokenEOF
tokenKey
tokenString
tokenFloat
tokenInteger
tokenAtCost
tokenDollar
tokenLBracket
tokenRBracket
tokenDot
tokenDotDot
tokenStar
tokenComma
tokenColon
tokenQuestion
tokenLParen
tokenRParen
)
var tokenTypeNames = []string{
"EOF",
"Key",
"String",
"Float",
"Integer",
"@",
"$",
"[",
"]",
".",
"..",
"*",
",",
":",
"?",
"(",
")",
}
type token struct {
Position
typ tokenType
val string
}
func (tt tokenType) String() string {
idx := int(tt)
if idx < len(tokenTypeNames) {
return tokenTypeNames[idx]
}
return "Unknown"
}
func (t token) Int() int {
if result, err := strconv.Atoi(t.val); err != nil {
panic(err)
} else {
return result
}
}
func (t token) String() string {
switch t.typ {
case tokenEOF:
return "EOF"
case tokenError:
return t.val
}
if len(t.val) > 10 {
return fmt.Sprintf("%.10q...", t.val)
}
return fmt.Sprintf("%q", t.val)
}
func isSpace(r rune) bool {
return r == ' ' || r == '\t'
}
func isAlphanumeric(r rune) bool {
return unicode.IsLetter(r) || r == '_'
}
func isKeyChar(r rune) bool {
// "Keys start with the first non-whitespace character and end with the last
// non-whitespace character before the equals sign."
return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '=')
}
func isDigit(r rune) bool {
return unicode.IsNumber(r)
}
func isHexDigit(r rune) bool {
return isDigit(r) ||
r == 'A' || r == 'B' || r == 'C' || r == 'D' || r == 'E' || r == 'F'
}
// Define lexer
type lexer struct {
input string input string
start int start int
pos int pos int
@@ -137,14 +28,14 @@ type lexer struct {
stringTerm string stringTerm string
} }
func (l *lexer) run() { func (l *queryLexer) run() {
for state := lexVoid; state != nil; { for state := l.lexVoid; state != nil; {
state = state(l) state = state()
} }
close(l.tokens) close(l.tokens)
} }
func (l *lexer) nextStart() { func (l *queryLexer) nextStart() {
// iterate by runes (utf8 characters) // iterate by runes (utf8 characters)
// search for newlines and advance line/col counts // search for newlines and advance line/col counts
for i := l.start; i < l.pos; { for i := l.start; i < l.pos; {
@@ -161,7 +52,7 @@ func (l *lexer) nextStart() {
l.start = l.pos l.start = l.pos
} }
func (l *lexer) emit(t tokenType) { func (l *queryLexer) emit(t tokenType) {
l.tokens <- token{ l.tokens <- token{
Position: Position{l.line, l.col}, Position: Position{l.line, l.col},
typ: t, typ: t,
@@ -170,7 +61,7 @@ func (l *lexer) emit(t tokenType) {
l.nextStart() l.nextStart()
} }
func (l *lexer) emitWithValue(t tokenType, value string) { func (l *queryLexer) emitWithValue(t tokenType, value string) {
l.tokens <- token{ l.tokens <- token{
Position: Position{l.line, l.col}, Position: Position{l.line, l.col},
typ: t, typ: t,
@@ -179,7 +70,7 @@ func (l *lexer) emitWithValue(t tokenType, value string) {
l.nextStart() l.nextStart()
} }
func (l *lexer) next() rune { func (l *queryLexer) next() rune {
if l.pos >= len(l.input) { if l.pos >= len(l.input) {
l.width = 0 l.width = 0
return eof return eof
@@ -190,15 +81,15 @@ func (l *lexer) next() rune {
return r return r
} }
func (l *lexer) ignore() { func (l *queryLexer) ignore() {
l.nextStart() l.nextStart()
} }
func (l *lexer) backup() { func (l *queryLexer) backup() {
l.pos -= l.width l.pos -= l.width
} }
func (l *lexer) errorf(format string, args ...interface{}) stateFn { func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn {
l.tokens <- token{ l.tokens <- token{
Position: Position{l.line, l.col}, Position: Position{l.line, l.col},
typ: tokenError, typ: tokenError,
@@ -207,13 +98,13 @@ func (l *lexer) errorf(format string, args ...interface{}) stateFn {
return nil return nil
} }
func (l *lexer) peek() rune { func (l *queryLexer) peek() rune {
r := l.next() r := l.next()
l.backup() l.backup()
return r return r
} }
func (l *lexer) accept(valid string) bool { func (l *queryLexer) accept(valid string) bool {
if strings.IndexRune(valid, l.next()) >= 0 { if strings.IndexRune(valid, l.next()) >= 0 {
return true return true
} }
@@ -221,14 +112,12 @@ func (l *lexer) accept(valid string) bool {
return false return false
} }
func (l *lexer) follow(next string) bool { func (l *queryLexer) follow(next string) bool {
return strings.HasPrefix(l.input[l.pos:], next) return strings.HasPrefix(l.input[l.pos:], next)
} }
// Define state functions
type stateFn func(*lexer) stateFn
func lexVoid(l *lexer) stateFn { func (l *queryLexer) lexVoid() queryLexStateFn {
for { for {
next := l.peek() next := l.peek()
switch next { switch next {
@@ -245,17 +134,13 @@ func lexVoid(l *lexer) stateFn {
l.emit(tokenDot) l.emit(tokenDot)
} }
continue continue
case '@':
l.pos++
l.emit(tokenAtCost)
continue
case '[': case '[':
l.pos++ l.pos++
l.emit(tokenLBracket) l.emit(tokenLeftBracket)
continue continue
case ']': case ']':
l.pos++ l.pos++
l.emit(tokenRBracket) l.emit(tokenRightBracket)
continue continue
case ',': case ',':
l.pos++ l.pos++
@@ -267,11 +152,11 @@ func lexVoid(l *lexer) stateFn {
continue continue
case '(': case '(':
l.pos++ l.pos++
l.emit(tokenLParen) l.emit(tokenLeftParen)
continue continue
case ')': case ')':
l.pos++ l.pos++
l.emit(tokenRParen) l.emit(tokenRightParen)
continue continue
case '?': case '?':
l.pos++ l.pos++
@@ -284,11 +169,11 @@ func lexVoid(l *lexer) stateFn {
case '\'': case '\'':
l.ignore() l.ignore()
l.stringTerm = string(next) l.stringTerm = string(next)
return lexString return l.lexString
case '"': case '"':
l.ignore() l.ignore()
l.stringTerm = string(next) l.stringTerm = string(next)
return lexString return l.lexString
} }
if isSpace(next) { if isSpace(next) {
@@ -298,11 +183,11 @@ func lexVoid(l *lexer) stateFn {
} }
if isAlphanumeric(next) { if isAlphanumeric(next) {
return lexKey return l.lexKey
} }
if next == '+' || next == '-' || isDigit(next) { if next == '+' || next == '-' || isDigit(next) {
return lexNumber return l.lexNumber
} }
if l.next() == eof { if l.next() == eof {
@@ -315,12 +200,12 @@ func lexVoid(l *lexer) stateFn {
return nil return nil
} }
func lexKey(l *lexer) stateFn { func (l *queryLexer) lexKey() queryLexStateFn {
for { for {
next := l.peek() next := l.peek()
if !isAlphanumeric(next) { if !isAlphanumeric(next) {
l.emit(tokenKey) l.emit(tokenKey)
return lexVoid return l.lexVoid
} }
if l.next() == eof { if l.next() == eof {
@@ -331,7 +216,7 @@ func lexKey(l *lexer) stateFn {
return nil return nil
} }
func lexString(l *lexer) stateFn { func (l *queryLexer) lexString() queryLexStateFn {
l.pos++ l.pos++
l.ignore() l.ignore()
growingString := "" growingString := ""
@@ -341,7 +226,7 @@ func lexString(l *lexer) stateFn {
l.emitWithValue(tokenString, growingString) l.emitWithValue(tokenString, growingString)
l.pos++ l.pos++
l.ignore() l.ignore()
return lexVoid return l.lexVoid
} }
if l.follow("\\\"") { if l.follow("\\\"") {
@@ -403,7 +288,7 @@ func lexString(l *lexer) stateFn {
return l.errorf("unclosed string") return l.errorf("unclosed string")
} }
func lexNumber(l *lexer) stateFn { func (l *queryLexer) lexNumber() queryLexStateFn {
l.ignore() l.ignore()
if !l.accept("+") { if !l.accept("+") {
l.accept("-") l.accept("-")
@@ -439,17 +324,17 @@ func lexNumber(l *lexer) stateFn {
} else { } else {
l.emit(tokenInteger) l.emit(tokenInteger)
} }
return lexVoid return l.lexVoid
} }
// Entry point // Entry point
func lex(input string) (*lexer, chan token) { func lexQuery(input string) chan token {
l := &lexer{ l := &queryLexer{
input: input, input: input,
tokens: make(chan token), tokens: make(chan token),
line: 1, line: 1,
col: 1, col: 1,
} }
go l.run() go l.run()
return l, l.tokens return l.tokens
} }
+16 -18
View File
@@ -1,12 +1,11 @@
package jpath package toml
import ( import (
. "github.com/pelletier/go-toml"
"testing" "testing"
) )
func testFlow(t *testing.T, input string, expectedFlow []token) { func testQLFlow(t *testing.T, input string, expectedFlow []token) {
_, ch := lex(input) ch := lexQuery(input)
for idx, expected := range expectedFlow { for idx, expected := range expectedFlow {
token := <-ch token := <-ch
if token != expected { if token != expected {
@@ -34,15 +33,14 @@ func testFlow(t *testing.T, input string, expectedFlow []token) {
} }
func TestLexSpecialChars(t *testing.T) { func TestLexSpecialChars(t *testing.T) {
testFlow(t, "@.$[]..()?*", []token{ testQLFlow(t, " .$[]..()?*", []token{
token{Position{1, 1}, tokenAtCost, "@"},
token{Position{1, 2}, tokenDot, "."}, token{Position{1, 2}, tokenDot, "."},
token{Position{1, 3}, tokenDollar, "$"}, token{Position{1, 3}, tokenDollar, "$"},
token{Position{1, 4}, tokenLBracket, "["}, token{Position{1, 4}, tokenLeftBracket, "["},
token{Position{1, 5}, tokenRBracket, "]"}, token{Position{1, 5}, tokenRightBracket, "]"},
token{Position{1, 6}, tokenDotDot, ".."}, token{Position{1, 6}, tokenDotDot, ".."},
token{Position{1, 8}, tokenLParen, "("}, token{Position{1, 8}, tokenLeftParen, "("},
token{Position{1, 9}, tokenRParen, ")"}, token{Position{1, 9}, tokenRightParen, ")"},
token{Position{1, 10}, tokenQuestion, "?"}, token{Position{1, 10}, tokenQuestion, "?"},
token{Position{1, 11}, tokenStar, "*"}, token{Position{1, 11}, tokenStar, "*"},
token{Position{1, 12}, tokenEOF, ""}, token{Position{1, 12}, tokenEOF, ""},
@@ -50,28 +48,28 @@ func TestLexSpecialChars(t *testing.T) {
} }
func TestLexString(t *testing.T) { func TestLexString(t *testing.T) {
testFlow(t, "'foo'", []token{ testQLFlow(t, "'foo'", []token{
token{Position{1, 2}, tokenString, "foo"}, token{Position{1, 2}, tokenString, "foo"},
token{Position{1, 6}, tokenEOF, ""}, token{Position{1, 6}, tokenEOF, ""},
}) })
} }
func TestLexDoubleString(t *testing.T) { func TestLexDoubleString(t *testing.T) {
testFlow(t, `"bar"`, []token{ testQLFlow(t, `"bar"`, []token{
token{Position{1, 2}, tokenString, "bar"}, token{Position{1, 2}, tokenString, "bar"},
token{Position{1, 6}, tokenEOF, ""}, token{Position{1, 6}, tokenEOF, ""},
}) })
} }
func TestLexKey(t *testing.T) { func TestLexKey(t *testing.T) {
testFlow(t, "foo", []token{ testQLFlow(t, "foo", []token{
token{Position{1, 1}, tokenKey, "foo"}, token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 4}, tokenEOF, ""}, token{Position{1, 4}, tokenEOF, ""},
}) })
} }
func TestLexRecurse(t *testing.T) { func TestLexRecurse(t *testing.T) {
testFlow(t, "$..*", []token{ testQLFlow(t, "$..*", []token{
token{Position{1, 1}, tokenDollar, "$"}, token{Position{1, 1}, tokenDollar, "$"},
token{Position{1, 2}, tokenDotDot, ".."}, token{Position{1, 2}, tokenDotDot, ".."},
token{Position{1, 4}, tokenStar, "*"}, token{Position{1, 4}, tokenStar, "*"},
@@ -80,17 +78,17 @@ func TestLexRecurse(t *testing.T) {
} }
func TestLexBracketKey(t *testing.T) { func TestLexBracketKey(t *testing.T) {
testFlow(t, "$[foo]", []token{ testQLFlow(t, "$[foo]", []token{
token{Position{1, 1}, tokenDollar, "$"}, token{Position{1, 1}, tokenDollar, "$"},
token{Position{1, 2}, tokenLBracket, "["}, token{Position{1, 2}, tokenLeftBracket, "["},
token{Position{1, 3}, tokenKey, "foo"}, token{Position{1, 3}, tokenKey, "foo"},
token{Position{1, 6}, tokenRBracket, "]"}, token{Position{1, 6}, tokenRightBracket, "]"},
token{Position{1, 7}, tokenEOF, ""}, token{Position{1, 7}, tokenEOF, ""},
}) })
} }
func TestLexSpace(t *testing.T) { func TestLexSpace(t *testing.T) {
testFlow(t, "foo bar baz", []token{ testQLFlow(t, "foo bar baz", []token{
token{Position{1, 1}, tokenKey, "foo"}, token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenKey, "bar"}, token{Position{1, 5}, tokenKey, "bar"},
token{Position{1, 9}, tokenKey, "baz"}, token{Position{1, 9}, tokenKey, "baz"},
+73 -71
View File
@@ -5,38 +5,40 @@
https://code.google.com/p/json-path/ https://code.google.com/p/json-path/
*/ */
package jpath package toml
import ( import (
"fmt" "fmt"
"math" "math"
) )
type parser struct { type queryParser struct {
flow chan token flow chan token
tokensBuffer []token tokensBuffer []token
path *Query query *Query
union []PathFn union []PathFn
err error
} }
type parserStateFn func(*parser) parserStateFn type queryParserStateFn func() queryParserStateFn
// Formats and panics an error message based on a token // Formats and panics an error message based on a token
func (p *parser) raiseError(tok *token, msg string, args ...interface{}) { func (p *queryParser) parseError(tok *token, msg string, args ...interface{}) queryParserStateFn {
panic(tok.Position.String() + ": " + fmt.Sprintf(msg, args...)) p.err = fmt.Errorf(tok.Position.String() + ": " + msg, args...)
return nil // trigger parse to end
} }
func (p *parser) run() { func (p *queryParser) run() {
for state := parseStart; state != nil; { for state := p.parseStart; state != nil; {
state = state(p) state = state()
} }
} }
func (p *parser) backup(tok *token) { func (p *queryParser) backup(tok *token) {
p.tokensBuffer = append(p.tokensBuffer, *tok) p.tokensBuffer = append(p.tokensBuffer, *tok)
} }
func (p *parser) peek() *token { func (p *queryParser) peek() *token {
if len(p.tokensBuffer) != 0 { if len(p.tokensBuffer) != 0 {
return &(p.tokensBuffer[0]) return &(p.tokensBuffer[0])
} }
@@ -49,7 +51,7 @@ func (p *parser) peek() *token {
return &tok return &tok
} }
func (p *parser) lookahead(types ...tokenType) bool { func (p *queryParser) lookahead(types ...tokenType) bool {
result := true result := true
buffer := []token{} buffer := []token{}
@@ -70,7 +72,7 @@ func (p *parser) lookahead(types ...tokenType) bool {
return result return result
} }
func (p *parser) getToken() *token { func (p *queryParser) getToken() *token {
if len(p.tokensBuffer) != 0 { if len(p.tokensBuffer) != 0 {
tok := p.tokensBuffer[0] tok := p.tokensBuffer[0]
p.tokensBuffer = p.tokensBuffer[1:] p.tokensBuffer = p.tokensBuffer[1:]
@@ -83,7 +85,7 @@ func (p *parser) getToken() *token {
return &tok return &tok
} }
func parseStart(p *parser) parserStateFn { func (p *queryParser) parseStart() queryParserStateFn {
tok := p.getToken() tok := p.getToken()
if tok == nil || tok.typ == tokenEOF { if tok == nil || tok.typ == tokenEOF {
@@ -91,29 +93,29 @@ func parseStart(p *parser) parserStateFn {
} }
if tok.typ != tokenDollar { if tok.typ != tokenDollar {
p.raiseError(tok, "Expected '$' at start of expression") return p.parseError(tok, "Expected '$' at start of expression")
} }
return parseMatchExpr return p.parseMatchExpr
} }
// handle '.' prefix, '[]', and '..' // handle '.' prefix, '[]', and '..'
func parseMatchExpr(p *parser) parserStateFn { func (p *queryParser) parseMatchExpr() queryParserStateFn {
tok := p.getToken() tok := p.getToken()
switch tok.typ { switch tok.typ {
case tokenDotDot: case tokenDotDot:
p.path.appendPath(&matchRecursiveFn{}) p.query.appendPath(&matchRecursiveFn{})
// nested parse for '..' // nested parse for '..'
tok := p.getToken() tok := p.getToken()
switch tok.typ { switch tok.typ {
case tokenKey: case tokenKey:
p.path.appendPath(newMatchKeyFn(tok.val)) p.query.appendPath(newMatchKeyFn(tok.val))
return parseMatchExpr return p.parseMatchExpr
case tokenLBracket: case tokenLeftBracket:
return parseBracketExpr return p.parseBracketExpr
case tokenStar: case tokenStar:
// do nothing - the recursive predicate is enough // do nothing - the recursive predicate is enough
return parseMatchExpr return p.parseMatchExpr
} }
case tokenDot: case tokenDot:
@@ -121,34 +123,34 @@ func parseMatchExpr(p *parser) parserStateFn {
tok := p.getToken() tok := p.getToken()
switch tok.typ { switch tok.typ {
case tokenKey: case tokenKey:
p.path.appendPath(newMatchKeyFn(tok.val)) p.query.appendPath(newMatchKeyFn(tok.val))
return parseMatchExpr return p.parseMatchExpr
case tokenStar: case tokenStar:
p.path.appendPath(&matchAnyFn{}) p.query.appendPath(&matchAnyFn{})
return parseMatchExpr return p.parseMatchExpr
} }
case tokenLBracket: case tokenLeftBracket:
return parseBracketExpr return p.parseBracketExpr
case tokenEOF: case tokenEOF:
return nil // allow EOF at this stage return nil // allow EOF at this stage
} }
p.raiseError(tok, "expected match expression") return p.parseError(tok, "expected match expression")
return nil return nil
} }
func parseBracketExpr(p *parser) parserStateFn { func (p *queryParser) parseBracketExpr() queryParserStateFn {
if p.lookahead(tokenInteger, tokenColon) { if p.lookahead(tokenInteger, tokenColon) {
return parseSliceExpr return p.parseSliceExpr
} }
if p.peek().typ == tokenColon { if p.peek().typ == tokenColon {
return parseSliceExpr return p.parseSliceExpr
} }
return parseUnionExpr return p.parseUnionExpr
} }
func parseUnionExpr(p *parser) parserStateFn { func (p *queryParser) parseUnionExpr() queryParserStateFn {
var tok *token var tok *token
// this state can be traversed after some sub-expressions // this state can be traversed after some sub-expressions
@@ -165,10 +167,10 @@ loop: // labeled loop for easy breaking
switch tok.typ { switch tok.typ {
case tokenComma: case tokenComma:
// do nothing // do nothing
case tokenRBracket: case tokenRightBracket:
break loop break loop
default: default:
p.raiseError(tok, "expected ',' or ']', not '%s'", tok.val) return p.parseError(tok, "expected ',' or ']', not '%s'", tok.val)
} }
} }
@@ -182,26 +184,26 @@ loop: // labeled loop for easy breaking
case tokenString: case tokenString:
p.union = append(p.union, newMatchKeyFn(tok.val)) p.union = append(p.union, newMatchKeyFn(tok.val))
case tokenQuestion: case tokenQuestion:
return parseFilterExpr return p.parseFilterExpr
case tokenLParen: case tokenLeftParen:
return parseScriptExpr return p.parseScriptExpr
default: default:
p.raiseError(tok, "expected union sub expression, not '%s', %d", tok.val, len(p.union)) return p.parseError(tok, "expected union sub expression, not '%s', %d", tok.val, len(p.union))
} }
} }
// if there is only one sub-expression, use that instead // if there is only one sub-expression, use that instead
if len(p.union) == 1 { if len(p.union) == 1 {
p.path.appendPath(p.union[0]) p.query.appendPath(p.union[0])
} else { } else {
p.path.appendPath(&matchUnionFn{p.union}) p.query.appendPath(&matchUnionFn{p.union})
} }
p.union = nil // clear out state p.union = nil // clear out state
return parseMatchExpr return p.parseMatchExpr
} }
func parseSliceExpr(p *parser) parserStateFn { func (p *queryParser) parseSliceExpr() queryParserStateFn {
// init slice to grab all elements // init slice to grab all elements
start, end, step := 0, math.MaxInt64, 1 start, end, step := 0, math.MaxInt64, 1
@@ -212,7 +214,7 @@ func parseSliceExpr(p *parser) parserStateFn {
tok = p.getToken() tok = p.getToken()
} }
if tok.typ != tokenColon { if tok.typ != tokenColon {
p.raiseError(tok, "expected ':'") return p.parseError(tok, "expected ':'")
} }
// parse optional end // parse optional end
@@ -221,12 +223,12 @@ func parseSliceExpr(p *parser) parserStateFn {
end = tok.Int() end = tok.Int()
tok = p.getToken() tok = p.getToken()
} }
if tok.typ == tokenRBracket { if tok.typ == tokenRightBracket {
p.path.appendPath(newMatchSliceFn(start, end, step)) p.query.appendPath(newMatchSliceFn(start, end, step))
return parseMatchExpr return p.parseMatchExpr
} }
if tok.typ != tokenColon { if tok.typ != tokenColon {
p.raiseError(tok, "expected ']' or ':'") return p.parseError(tok, "expected ']' or ':'")
} }
// parse optional step // parse optional step
@@ -234,56 +236,56 @@ func parseSliceExpr(p *parser) parserStateFn {
if tok.typ == tokenInteger { if tok.typ == tokenInteger {
step = tok.Int() step = tok.Int()
if step < 0 { if step < 0 {
p.raiseError(tok, "step must be a positive value") return p.parseError(tok, "step must be a positive value")
} }
tok = p.getToken() tok = p.getToken()
} }
if tok.typ != tokenRBracket { if tok.typ != tokenRightBracket {
p.raiseError(tok, "expected ']'") return p.parseError(tok, "expected ']'")
} }
p.path.appendPath(newMatchSliceFn(start, end, step)) p.query.appendPath(newMatchSliceFn(start, end, step))
return parseMatchExpr return p.parseMatchExpr
} }
func parseFilterExpr(p *parser) parserStateFn { func (p *queryParser) parseFilterExpr() queryParserStateFn {
tok := p.getToken() tok := p.getToken()
if tok.typ != tokenLParen { if tok.typ != tokenLeftParen {
p.raiseError(tok, "expected left-parenthesis for filter expression") return p.parseError(tok, "expected left-parenthesis for filter expression")
} }
tok = p.getToken() tok = p.getToken()
if tok.typ != tokenKey && tok.typ != tokenString { if tok.typ != tokenKey && tok.typ != tokenString {
p.raiseError(tok, "expected key or string for filter funciton name") return p.parseError(tok, "expected key or string for filter funciton name")
} }
name := tok.val name := tok.val
tok = p.getToken() tok = p.getToken()
if tok.typ != tokenRParen { if tok.typ != tokenRightParen {
p.raiseError(tok, "expected right-parenthesis for filter expression") return p.parseError(tok, "expected right-parenthesis for filter expression")
} }
p.union = append(p.union, newMatchFilterFn(name, tok.Position)) p.union = append(p.union, newMatchFilterFn(name, tok.Position))
return parseUnionExpr return p.parseUnionExpr
} }
func parseScriptExpr(p *parser) parserStateFn { func (p *queryParser) parseScriptExpr() queryParserStateFn {
tok := p.getToken() tok := p.getToken()
if tok.typ != tokenKey && tok.typ != tokenString { if tok.typ != tokenKey && tok.typ != tokenString {
p.raiseError(tok, "expected key or string for script funciton name") return p.parseError(tok, "expected key or string for script funciton name")
} }
name := tok.val name := tok.val
tok = p.getToken() tok = p.getToken()
if tok.typ != tokenRParen { if tok.typ != tokenRightParen {
p.raiseError(tok, "expected right-parenthesis for script expression") return p.parseError(tok, "expected right-parenthesis for script expression")
} }
p.union = append(p.union, newMatchScriptFn(name, tok.Position)) p.union = append(p.union, newMatchScriptFn(name, tok.Position))
return parseUnionExpr return p.parseUnionExpr
} }
func parse(flow chan token) *Query { func parseQuery(flow chan token) (*Query, error) {
parser := &parser{ parser := &queryParser{
flow: flow, flow: flow,
tokensBuffer: []token{}, tokensBuffer: []token{},
path: newQuery(), query: newQuery(),
} }
parser.run() parser.run()
return parser.path return parser.query, parser.err
} }
+368
View File
@@ -0,0 +1,368 @@
package toml
import (
"fmt"
"testing"
"sort"
"strings"
)
type queryTestNode struct {
value interface{}
position Position
}
func valueString(root interface{}) string {
result := "" //fmt.Sprintf("%T:", root)
switch node := root.(type) {
case *tomlValue:
return valueString(node.value)
case *QueryResult:
items := []string{}
for i, v := range node.Values() {
items = append(items, fmt.Sprintf("%s:%s",
node.Positions()[i].String(), valueString(v)))
}
sort.Strings(items)
result = "[" + strings.Join(items, ", ") + "]"
case queryTestNode:
result = fmt.Sprintf("%s:%s",
node.position.String(), valueString(node.value))
case []interface{}:
items := []string{}
for _, v := range node {
items = append(items, valueString(v))
}
sort.Strings(items)
result = "[" + strings.Join(items, ", ") + "]"
case *TomlTree:
// workaround for unreliable map key ordering
items := []string{}
for _, k := range node.Keys() {
v := node.GetPath([]string{k})
items = append(items, k + ":" + valueString(v))
}
sort.Strings(items)
result = "{" + strings.Join(items, ", ") + "}"
case map[string]interface{}:
// workaround for unreliable map key ordering
items := []string{}
for k, v := range node {
items = append(items, k + ":" + valueString(v))
}
sort.Strings(items)
result = "{" + strings.Join(items, ", ") + "}"
case int64:
result += fmt.Sprintf("%d", node)
case string:
result += "'" + node + "'"
}
return result
}
func assertValue(t *testing.T, result, ref interface{}) {
pathStr := valueString(result)
refStr := valueString(ref)
if pathStr != refStr {
t.Errorf("values do not match")
t.Log("test:", pathStr)
t.Log("ref: ", refStr)
}
}
func assertQueryPositions(t *testing.T, toml, query string, ref []interface{}) {
tree, err := Load(toml)
if err != nil {
t.Errorf("Non-nil toml parse error: %v", err)
return
}
q, err := Compile(query)
if err != nil {
t.Error(err)
return
}
results := q.Execute(tree)
assertValue(t, results, ref)
}
func TestQueryRoot(t *testing.T) {
assertQueryPositions(t,
"a = 42",
"$",
[]interface{}{
queryTestNode{
map[string]interface{}{
"a": int64(42),
}, Position{1, 1},
},
})
}
func TestQueryKey(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = 42",
"$.foo.a",
[]interface{}{
queryTestNode{
int64(42), Position{2,1},
},
})
}
func TestQueryKeyString(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = 42",
"$.foo['a']",
[]interface{}{
queryTestNode{
int64(42), Position{2,1},
},
})
}
func TestQueryIndex(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
"$.foo.a[5]",
[]interface{}{
queryTestNode{
int64(6), Position{2,1},
},
})
}
func TestQuerySliceRange(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
"$.foo.a[0:5]",
[]interface{}{
queryTestNode{
int64(1), Position{2,1},
},
queryTestNode{
int64(2), Position{2,1},
},
queryTestNode{
int64(3), Position{2,1},
},
queryTestNode{
int64(4), Position{2,1},
},
queryTestNode{
int64(5), Position{2,1},
},
})
}
func TestQuerySliceStep(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
"$.foo.a[0:5:2]",
[]interface{}{
queryTestNode{
int64(1), Position{2,1},
},
queryTestNode{
int64(3), Position{2,1},
},
queryTestNode{
int64(5), Position{2,1},
},
})
}
func TestQueryAny(t *testing.T) {
assertQueryPositions(t,
"[foo.bar]\na=1\nb=2\n[foo.baz]\na=3\nb=4",
"$.foo.*",
[]interface{}{
queryTestNode{
map[string]interface{}{
"a": int64(1),
"b": int64(2),
}, Position{1,1},
},
queryTestNode{
map[string]interface{}{
"a": int64(3),
"b": int64(4),
}, Position{4,1},
},
})
}
func TestQueryUnionSimple(t *testing.T) {
assertQueryPositions(t,
"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
"$.*[bar,foo]",
[]interface{}{
queryTestNode{
map[string]interface{}{
"a": int64(1),
"b": int64(2),
}, Position{1,1},
},
queryTestNode{
map[string]interface{}{
"a": int64(3),
"b": int64(4),
}, Position{4,1},
},
queryTestNode{
map[string]interface{}{
"a": int64(5),
"b": int64(6),
}, Position{7,1},
},
})
}
func TestQueryRecursionAll(t *testing.T) {
assertQueryPositions(t,
"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
"$..*",
[]interface{}{
queryTestNode{
map[string]interface{}{
"bar": map[string]interface{}{
"a": int64(1),
"b": int64(2),
},
}, Position{1,1},
},
queryTestNode{
map[string]interface{}{
"a": int64(1),
"b": int64(2),
}, Position{1,1},
},
queryTestNode{
int64(1), Position{2,1},
},
queryTestNode{
int64(2), Position{3,1},
},
queryTestNode{
map[string]interface{}{
"foo": map[string]interface{}{
"a": int64(3),
"b": int64(4),
},
}, Position{4,1},
},
queryTestNode{
map[string]interface{}{
"a": int64(3),
"b": int64(4),
}, Position{4,1},
},
queryTestNode{
int64(3), Position{5,1},
},
queryTestNode{
int64(4), Position{6,1},
},
queryTestNode{
map[string]interface{}{
"foo": map[string]interface{}{
"a": int64(5),
"b": int64(6),
},
}, Position{7,1},
},
queryTestNode{
map[string]interface{}{
"a": int64(5),
"b": int64(6),
}, Position{7,1},
},
queryTestNode{
int64(5), Position{8,1},
},
queryTestNode{
int64(6), Position{9,1},
},
})
}
func TestQueryRecursionUnionSimple(t *testing.T) {
assertQueryPositions(t,
"[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
"$..['foo','bar']",
[]interface{}{
queryTestNode{
map[string]interface{}{
"a": int64(1),
"b": int64(2),
}, Position{1,1},
},
queryTestNode{
map[string]interface{}{
"a": int64(3),
"b": int64(4),
}, Position{4,1},
},
queryTestNode{
map[string]interface{}{
"a": int64(5),
"b": int64(6),
}, Position{7,1},
},
})
}
func TestQueryScriptFnLast(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
"$.foo.a[(last)]",
[]interface{}{
queryTestNode{
int64(9), Position{2,1},
},
})
}
func TestQueryFilterFnOdd(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
"$.foo.a[?(odd)]",
[]interface{}{
queryTestNode{
int64(1), Position{2,1},
},
queryTestNode{
int64(3), Position{2,1},
},
queryTestNode{
int64(5), Position{2,1},
},
queryTestNode{
int64(7), Position{2,1},
},
queryTestNode{
int64(9), Position{2,1},
},
})
}
func TestQueryFilterFnEven(t *testing.T) {
assertQueryPositions(t,
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
"$.foo.a[?(even)]",
[]interface{}{
queryTestNode{
int64(0), Position{2,1},
},
queryTestNode{
int64(2), Position{2,1},
},
queryTestNode{
int64(4), Position{2,1},
},
queryTestNode{
int64(6), Position{2,1},
},
queryTestNode{
int64(8), Position{2,1},
},
})
}
-3
View File
@@ -21,11 +21,8 @@ go build -o toml-test github.com/BurntSushi/toml-test
mkdir -p src/github.com/pelletier/go-toml/cmd mkdir -p src/github.com/pelletier/go-toml/cmd
cp *.go *.toml src/github.com/pelletier/go-toml cp *.go *.toml src/github.com/pelletier/go-toml
cp cmd/*.go src/github.com/pelletier/go-toml/cmd cp cmd/*.go src/github.com/pelletier/go-toml/cmd
mkdir -p src/github.com/pelletier/go-toml/jpath
cp jpath/*.go src/github.com/pelletier/go-toml/jpath
go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go
# Run basic unit tests and then the BurntSushi test suite # Run basic unit tests and then the BurntSushi test suite
go test -v github.com/pelletier/go-toml/jpath
go test -v github.com/pelletier/go-toml go test -v github.com/pelletier/go-toml
./toml-test ./test_program_bin | tee test_out ./toml-test ./test_program_bin | tee test_out
+31 -22
View File
@@ -199,7 +199,7 @@ func (t *TomlTree) SetPath(keys []string, value interface{}) {
// and tree[a][b][c] // and tree[a][b][c]
// //
// Returns nil on success, error object on failure // Returns nil on success, error object on failure
func (t *TomlTree) createSubTree(keys []string) error { func (t *TomlTree) createSubTree(keys []string, pos Position) error {
subtree := t subtree := t
for _, intermediateKey := range keys { for _, intermediateKey := range keys {
if intermediateKey == "" { if intermediateKey == "" {
@@ -207,8 +207,10 @@ func (t *TomlTree) createSubTree(keys []string) error {
} }
nextTree, exists := subtree.values[intermediateKey] nextTree, exists := subtree.values[intermediateKey]
if !exists { if !exists {
nextTree = newTomlTree() tree := newTomlTree()
subtree.values[intermediateKey] = nextTree tree.position = pos
subtree.values[intermediateKey] = tree
nextTree = tree
} }
switch node := nextTree.(type) { switch node := nextTree.(type) {
@@ -317,6 +319,14 @@ func (t *TomlTree) toToml(indent, keyspace string) string {
return result return result
} }
func (t *TomlTree) Query(query string) (*QueryResult, error) {
if q, err := Compile(query); err != nil {
return nil, err
} else {
return q.Execute(t), nil
}
}
// ToString generates a human-readable representation of the current tree. // ToString generates a human-readable representation of the current tree.
// Output spans multiple lines, and is suitable for ingest by a TOML parser // Output spans multiple lines, and is suitable for ingest by a TOML parser
func (t *TomlTree) ToString() string { func (t *TomlTree) ToString() string {
@@ -325,27 +335,26 @@ func (t *TomlTree) ToString() string {
// Load creates a TomlTree from a string. // Load creates a TomlTree from a string.
func Load(content string) (tree *TomlTree, err error) { func Load(content string) (tree *TomlTree, err error) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
if _, ok := r.(runtime.Error); ok { if _, ok := r.(runtime.Error); ok {
panic(r) panic(r)
} }
err = errors.New(r.(string)) err = errors.New(r.(string))
} }
}() }()
_, flow := lex(content) tree = parseToml(lexToml(content))
tree = parse(flow) return
return
} }
// LoadFile creates a TomlTree from a file. // LoadFile creates a TomlTree from a file.
func LoadFile(path string) (tree *TomlTree, err error) { func LoadFile(path string) (tree *TomlTree, err error) {
buff, ferr := ioutil.ReadFile(path) buff, ferr := ioutil.ReadFile(path)
if ferr != nil { if ferr != nil {
err = ferr err = ferr
} else { } else {
s := string(buff) s := string(buff)
tree, err = Load(s) tree, err = Load(s)
} }
return return
} }
+25
View File
@@ -47,3 +47,28 @@ func TestTomlGetPath(t *testing.T) {
} }
} }
} }
func TestTomlQuery(t *testing.T) {
tree, err := Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
if err != nil {
t.Error(err)
return
}
result, err := tree.Query("$.foo.bar")
if err != nil {
t.Error(err)
return
}
values := result.Values()
if len(values) != 1 {
t.Errorf("Expected resultset of 1, got %d instead: %v", len(values), values)
}
if tt, ok := values[0].(*TomlTree); !ok {
t.Errorf("Expected type of TomlTree: %T Tv", values[0], values[0])
} else if tt.Get("a") != int64(1) {
t.Errorf("Expected 'a' with a value 1: %v", tt.Get("a"))
} else if tt.Get("b") != int64(2) {
t.Errorf("Expected 'b' with a value 2: %v", tt.Get("b"))
}
}