semi-functional prototype

This commit is contained in:
eanderton
2014-09-03 21:33:25 -04:00
parent fb5423fba2
commit 9942463786
6 changed files with 1334 additions and 0 deletions
+449
View File
@@ -0,0 +1,449 @@
// TOML JSONPath lexer.
//
// Written using the principles developped by Rob Pike in
// http://www.youtube.com/watch?v=HxaD_trXwRE
package jpath
import (
. "github.com/pelletier/go-toml"
"fmt"
"regexp"
"strconv"
"strings"
"unicode"
"unicode/utf8"
)
var dateRegexp *regexp.Regexp
// Define tokens
type tokenType int
const (
eof = -(iota + 1)
)
const (
tokenError tokenType = iota
tokenEOF
tokenKey
tokenString
tokenFloat
tokenInteger
tokenAtCost
tokenDollar
tokenLBracket
tokenRBracket
tokenDot
tokenDotDot
tokenStar
tokenComma
tokenColon
tokenQuestion
tokenLParen
tokenRParen
)
var tokenTypeNames = []string{
"EOF",
"Key",
"String",
"Float",
"Integer",
"@",
"$",
"[",
"]",
".",
"..",
"*",
",",
":",
"?",
"(",
")",
}
type token struct {
Position
typ tokenType
val string
}
func (tt tokenType) String() string {
idx := int(tt)
if idx < len(tokenTypeNames) {
return tokenTypeNames[idx]
}
return "Unknown"
}
func (i token) String() string {
switch i.typ {
case tokenEOF:
return "EOF"
case tokenError:
return i.val
}
if len(i.val) > 10 {
return fmt.Sprintf("%.10q...", i.val)
}
return fmt.Sprintf("%q", i.val)
}
func isSpace(r rune) bool {
return r == ' ' || r == '\t'
}
func isAlphanumeric(r rune) bool {
return unicode.IsLetter(r) || r == '_'
}
func isKeyChar(r rune) bool {
// "Keys start with the first non-whitespace character and end with the last
// non-whitespace character before the equals sign."
return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '=')
}
func isDigit(r rune) bool {
return unicode.IsNumber(r)
}
func isHexDigit(r rune) bool {
return isDigit(r) ||
r == 'A' || r == 'B' || r == 'C' || r == 'D' || r == 'E' || r == 'F'
}
// Define lexer
type lexer struct {
input string
start int
pos int
width int
tokens chan token
depth int
line int
col int
stringTerm string
}
func (l *lexer) run() {
for state := lexVoid; state != nil; {
state = state(l)
}
close(l.tokens)
}
func (l *lexer) nextStart() {
// iterate by runes (utf8 characters)
// search for newlines and advance line/col counts
for i := l.start; i < l.pos; {
r, width := utf8.DecodeRuneInString(l.input[i:])
if r == '\n' {
l.line++
l.col = 1
} else {
l.col++
}
i += width
}
// advance start position to next token
l.start = l.pos
}
func (l *lexer) emit(t tokenType) {
l.tokens <- token{
Position: Position{l.line, l.col},
typ: t,
val: l.input[l.start:l.pos],
}
l.nextStart()
}
func (l *lexer) emitWithValue(t tokenType, value string) {
l.tokens <- token{
Position: Position{l.line, l.col},
typ: t,
val: value,
}
l.nextStart()
}
func (l *lexer) next() rune {
if l.pos >= len(l.input) {
l.width = 0
return eof
}
var r rune
r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
l.pos += l.width
return r
}
func (l *lexer) ignore() {
l.nextStart()
}
func (l *lexer) backup() {
l.pos -= l.width
}
func (l *lexer) errorf(format string, args ...interface{}) stateFn {
l.tokens <- token{
Position: Position{l.line, l.col},
typ: tokenError,
val: fmt.Sprintf(format, args...),
}
return nil
}
func (l *lexer) peek() rune {
r := l.next()
l.backup()
return r
}
func (l *lexer) accept(valid string) bool {
if strings.IndexRune(valid, l.next()) >= 0 {
return true
}
l.backup()
return false
}
func (l *lexer) follow(next string) bool {
return strings.HasPrefix(l.input[l.pos:], next)
}
// Define state functions
type stateFn func(*lexer) stateFn
func lexVoid(l *lexer) stateFn {
for {
next := l.peek()
switch next {
case '$':
l.pos++
l.emit(tokenDollar)
continue
case '.':
if l.follow("..") {
l.pos += 2
l.emit(tokenDotDot)
} else {
l.pos++
l.emit(tokenDot)
}
continue
case '@':
l.pos++
l.emit(tokenAtCost)
continue
case '[':
l.pos++
l.emit(tokenLBracket)
continue
case ']':
l.pos++
l.emit(tokenRBracket)
continue
case ',':
l.pos++
l.emit(tokenComma)
continue
case '*':
l.pos++
l.emit(tokenStar)
continue
case '(':
l.pos++
l.emit(tokenLParen)
continue
case ')':
l.pos++
l.emit(tokenRParen)
continue
case '?':
l.pos++
l.emit(tokenQuestion)
continue
case ':':
l.pos++
l.emit(tokenColon)
continue
case '\'':
l.ignore()
l.stringTerm = string(next)
return lexString
case '"':
l.ignore()
l.stringTerm = string(next)
return lexString
}
if isAlphanumeric(next) {
return lexKey
}
if next == '+' || next == '-' || isDigit(next) {
return lexNumber
}
if isAlphanumeric(next) {
return lexKey
}
if isSpace(next) {
l.ignore()
}
if l.next() == eof {
break
}
return l.errorf("unexpected char: '%v'", next)
}
l.emit(tokenEOF)
return nil
}
func lexKey(l *lexer) stateFn {
for {
next := l.peek()
if !isAlphanumeric(next) {
l.emit(tokenKey)
return lexVoid
}
if l.next() == eof {
break
}
}
l.emit(tokenEOF)
return nil
}
func lexString(l *lexer) stateFn {
l.pos++
l.ignore()
growingString := ""
for {
if l.follow(l.stringTerm) {
l.emitWithValue(tokenString, growingString)
l.pos++
l.ignore()
return lexVoid
}
if l.follow("\\\"") {
l.pos++
growingString += "\""
} else if l.follow("\\'") {
l.pos++
growingString += "'"
} else if l.follow("\\n") {
l.pos++
growingString += "\n"
} else if l.follow("\\b") {
l.pos++
growingString += "\b"
} else if l.follow("\\f") {
l.pos++
growingString += "\f"
} else if l.follow("\\/") {
l.pos++
growingString += "/"
} else if l.follow("\\t") {
l.pos++
growingString += "\t"
} else if l.follow("\\r") {
l.pos++
growingString += "\r"
} else if l.follow("\\\\") {
l.pos++
growingString += "\\"
} else if l.follow("\\u") {
l.pos += 2
code := ""
for i := 0; i < 4; i++ {
c := l.peek()
l.pos++
if !isHexDigit(c) {
return l.errorf("unfinished unicode escape")
}
code = code + string(c)
}
l.pos--
intcode, err := strconv.ParseInt(code, 16, 32)
if err != nil {
return l.errorf("invalid unicode escape: \\u" + code)
}
growingString += string(rune(intcode))
} else if l.follow("\\") {
l.pos++
return l.errorf("invalid escape sequence: \\" + string(l.peek()))
} else {
growingString += string(l.peek())
}
if l.next() == eof {
break
}
}
return l.errorf("unclosed string")
}
func lexNumber(l *lexer) stateFn {
l.ignore()
if !l.accept("+") {
l.accept("-")
}
pointSeen := false
digitSeen := false
for {
next := l.next()
if next == '.' {
if pointSeen {
return l.errorf("cannot have two dots in one float")
}
if !isDigit(l.peek()) {
return l.errorf("float cannot end with a dot")
}
pointSeen = true
} else if isDigit(next) {
digitSeen = true
} else {
l.backup()
break
}
if pointSeen && !digitSeen {
return l.errorf("cannot start float with a dot")
}
}
if !digitSeen {
return l.errorf("no digit in that number")
}
if pointSeen {
l.emit(tokenFloat)
} else {
l.emit(tokenInteger)
}
return lexVoid
}
// Entry point
func lex(input string) (*lexer, chan token) {
l := &lexer{
input: input,
tokens: make(chan token),
line: 1,
col: 1,
}
go l.run()
return l, l.tokens
}
+70
View File
@@ -0,0 +1,70 @@
package jpath
import (
. "github.com/pelletier/go-toml"
"testing"
)
func testFlow(t *testing.T, input string, expectedFlow []token) {
_, ch := lex(input)
for idx, expected := range expectedFlow {
token := <-ch
if token != expected {
t.Log("While testing #", idx, ":", input)
t.Log("compared", token, "to", expected)
t.Log(token.val, "<->", expected.val)
t.Log(token.typ, "<->", expected.typ)
t.Log(token.Line, "<->", expected.Line)
t.Log(token.Col, "<->", expected.Col)
t.FailNow()
}
}
tok, ok := <-ch
if ok {
t.Log("channel is not closed!")
t.Log(len(ch)+1, "tokens remaining:")
t.Log("token ->", tok)
for token := range ch {
t.Log("token ->", token)
}
t.FailNow()
}
}
func TestLexSpecialChars(t *testing.T) {
testFlow(t, "@.$[]..()?*", []token{
token{Position{1, 1}, tokenAtCost, "@"},
token{Position{1, 2}, tokenDot, "."},
token{Position{1, 3}, tokenDollar, "$"},
token{Position{1, 4}, tokenLBracket, "["},
token{Position{1, 5}, tokenRBracket, "]"},
token{Position{1, 6}, tokenDotDot, ".."},
token{Position{1, 8}, tokenLParen, "("},
token{Position{1, 9}, tokenRParen, ")"},
token{Position{1, 10}, tokenQuestion, "?"},
token{Position{1, 11}, tokenStar, "*"},
token{Position{1, 12}, tokenEOF, ""},
})
}
func TestLexString(t *testing.T) {
testFlow(t, "'foo'", []token{
token{Position{1, 2}, tokenString, "foo"},
token{Position{1, 6}, tokenEOF, ""},
})
testFlow(t, `"bar"`, []token{
token{Position{1, 2}, tokenString, "bar"},
token{Position{1, 6}, tokenEOF, ""},
})
}
func TestLexKey(t *testing.T) {
testFlow(t, "foo", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 4}, tokenEOF, ""},
})
}
+122
View File
@@ -0,0 +1,122 @@
package jpath
import (
. "github.com/pelletier/go-toml"
)
type PathFn func(context interface{}) []interface{}
func treeValue(tree *TomlTree, key string) interface{} {
return tree.GetPath([]string{key})
}
func matchKeyFn(name string) PathFn {
return func(context interface{}) []interface{} {
if tree, ok := context.(*TomlTree); ok {
item := treeValue(tree, name)
if item != nil {
return []interface{}{ item }
}
}
return []interface{}{}
}
}
func matchIndexFn(idx int) PathFn {
return func(context interface{}) []interface{} {
if arr, ok := context.([]interface{}); ok {
if idx < len(arr) && idx >= 0 {
return arr[idx:idx+1]
}
}
return []interface{}{}
}
}
func matchSliceFn(start, end, step int) PathFn {
return func(context interface{}) []interface{} {
result := []interface{}{}
if arr, ok := context.([]interface{}); ok {
// adjust indexes for negative values, reverse ordering
realStart, realEnd := start, end
if realStart < 0 {
realStart = len(arr) + realStart
}
if realEnd < 0 {
realEnd = len(arr) + realEnd
}
if realEnd < realStart {
realEnd, realStart = realStart, realEnd // swap
}
// loop and gather
for idx := realStart; idx < realEnd; idx += step {
result = append(result, arr[idx])
}
}
return result
}
}
func matchAnyFn() PathFn {
return func(context interface{}) []interface{} {
result := []interface{}{}
if tree, ok := context.(*TomlTree); ok {
for _, key := range tree.Keys() {
item := treeValue(tree, key)
result = append(result, item)
}
}
return result
}
}
func matchUnionFn(union []PathFn) PathFn {
return func(context interface{}) []interface{} {
result := []interface{}{}
for _, fn := range union {
result = append(result, fn(context)...)
}
return result
}
}
func matchRecurseFn() PathFn {
return func(context interface{}) []interface{} {
result := []interface{}{ context }
if tree, ok := context.(*TomlTree); ok {
var visit func(tree *TomlTree)
visit = func(tree *TomlTree) {
for _, key := range tree.Keys() {
item := treeValue(tree, key)
result = append(result, item)
switch node := item.(type) {
case *TomlTree:
visit(node)
case []*TomlTree:
for _, subtree := range node {
visit(subtree)
}
}
}
}
visit(tree)
}
return result
}
}
func processPath(path []PathFn, context interface{}) []interface{} {
result := []interface{}{ context } // start with the root
for _, fn := range path {
next := []interface{}{}
for _, ctx := range result {
next = append(next, fn(ctx)...)
}
if len(next) == 0 {
return next // exit if there is nothing more to search
}
result = next // prep the next iteration
}
return result
}
+225
View File
@@ -0,0 +1,225 @@
package jpath
import (
"fmt"
"math"
"strconv"
)
type parser struct {
flow chan token
tokensBuffer []token
path []PathFn
}
type parserStateFn func(*parser) parserStateFn
// Formats and panics an error message based on a token
func (p *parser) raiseError(tok *token, msg string, args ...interface{}) {
panic(tok.Position.String() + ": " + fmt.Sprintf(msg, args...))
}
func (p *parser) run() {
for state := parseStart; state != nil; {
state = state(p)
}
}
func (p *parser) backup(tok *token) {
p.tokensBuffer = append(p.tokensBuffer, *tok)
}
func (p *parser) peek() *token {
if len(p.tokensBuffer) != 0 {
return &(p.tokensBuffer[0])
}
tok, ok := <-p.flow
if !ok {
return nil
}
p.backup(&tok)
return &tok
}
func (p *parser) getToken() *token {
if len(p.tokensBuffer) != 0 {
tok := p.tokensBuffer[0]
p.tokensBuffer = p.tokensBuffer[1:]
return &tok
}
tok, ok := <-p.flow
if !ok {
return nil
}
return &tok
}
func (p *parser) appendPath(fn PathFn) {
p.path = append(p.path, fn)
}
func parseStart(p *parser) parserStateFn {
tok := p.getToken()
if tok == nil || tok.typ == tokenEOF {
return nil
}
if tok.typ != tokenDollar {
p.raiseError(tok, "Expected '$' at start of expression")
}
return parseMatchExpr
}
func parseMatchExpr(p *parser) parserStateFn {
tok := p.getToken()
switch tok.typ {
case tokenDot:
p.appendPath(matchKeyFn(tok.val))
return parseMatchExpr
case tokenDotDot:
p.appendPath(matchRecurseFn())
return parseSimpleMatchExpr
case tokenLBracket:
return parseBracketExpr
case tokenStar:
p.appendPath(matchAnyFn())
return parseMatchExpr
case tokenEOF:
return nil // allow EOF at this stage
}
p.raiseError(tok, "expected match expression")
return nil
}
func parseSimpleMatchExpr(p *parser) parserStateFn {
tok := p.getToken()
switch tok.typ {
case tokenLBracket:
return parseBracketExpr
case tokenKey:
p.appendPath(matchKeyFn(tok.val))
return parseMatchExpr
case tokenStar:
p.appendPath(matchAnyFn())
return parseMatchExpr
}
p.raiseError(tok, "expected match expression")
return nil
}
func parseBracketExpr(p *parser) parserStateFn {
tok := p.peek()
switch tok.typ {
case tokenInteger:
// look ahead for a ':'
p.getToken()
next := p.peek()
p.backup(tok)
if next.typ == tokenColon {
return parseSliceExpr
}
return parseUnionExpr
case tokenColon:
return parseSliceExpr
}
return parseUnionExpr
}
func parseUnionExpr(p *parser) parserStateFn {
union := []PathFn{}
for {
// parse sub expression
tok := p.getToken()
switch tok.typ {
case tokenInteger:
idx, _ := strconv.Atoi(tok.val)
union = append(union, matchIndexFn(idx))
case tokenKey:
union = append(union, matchKeyFn(tok.val))
case tokenQuestion:
return parseFilterExpr
case tokenLParen:
return parseScriptExpr
default:
p.raiseError(tok, "expected union sub expression")
}
// parse delimiter or terminator
tok = p.getToken()
switch tok.typ {
case tokenComma:
continue
case tokenRBracket:
break
default:
p.raiseError(tok, "expected ',' or ']'")
}
}
p.appendPath(matchUnionFn(union))
return parseMatchExpr
}
func parseSliceExpr(p *parser) parserStateFn {
// init slice to grab all elements
start, end, step := 0, math.MaxInt64, 1
// parse optional start
tok := p.getToken()
if tok.typ == tokenInteger {
start, _ = strconv.Atoi(tok.val)
tok = p.getToken()
}
if tok.typ != tokenColon {
p.raiseError(tok, "expected ':'")
}
// parse optional end
tok = p.getToken()
if tok.typ == tokenInteger {
end, _ = strconv.Atoi(tok.val)
tok = p.getToken()
}
if tok.typ != tokenColon || tok.typ != tokenRBracket {
p.raiseError(tok, "expected ']' or ':'")
}
// parse optional step
tok = p.getToken()
if tok.typ == tokenInteger {
step, _ = strconv.Atoi(tok.val)
if step < 0 {
p.raiseError(tok, "step must be a positive value")
}
tok = p.getToken()
}
if tok.typ != tokenRBracket {
p.raiseError(tok, "expected ']'")
}
p.appendPath(matchSliceFn(start, end, step))
return parseMatchExpr
}
func parseFilterExpr(p *parser) parserStateFn {
p.raiseError(p.peek(), "filter expressions are unsupported")
return nil
}
func parseScriptExpr(p *parser) parserStateFn {
p.raiseError(p.peek(), "script expressions are unsupported")
return nil
}
func parse(flow chan token) []PathFn {
result := []PathFn{}
parser := &parser{
flow: flow,
tokensBuffer: []token{},
path: result,
}
parser.run()
return result
}
+465
View File
@@ -0,0 +1,465 @@
package jpath
import (
"fmt"
"testing"
. "github.com/pelletier/go-toml"
)
func assertQuery(t *testing.T, toml, query string, ref []interface{}) {
tree, err := Load(toml)
if err != nil {
t.Errorf("Non-nil toml parse error: %v", err)
return
}
_, flow := lex(query)
if err != nil {
t.Errorf("Non-nil query lex error: %v", err)
return
}
path := parse(flow)
result := processPath(path, tree)
assertValue(t, result, ref, "")
}
func assertValue(t *testing.T, result, ref interface{}, location string) {
switch node := ref.(type) {
case []interface{}:
if resultNode, ok := result.([]interface{}); !ok {
t.Errorf("{%s} result value not of type %T: %T",
location, node, resultNode)
} else {
for i, v := range node {
assertValue(t, resultNode[i], v, fmt.Sprintf("%s[%d]", location, i))
}
}
case map[string]interface{}:
if resultNode, ok := result.(*TomlTree); !ok {
t.Errorf("{%s} result value not of type %T: %T",
location, node, resultNode)
} else {
for k, v := range node {
assertValue(t, resultNode.GetPath([]string{k}), v, location + "." + k)
}
}
case int64:
if resultNode, ok := result.(int64); !ok {
t.Errorf("{%s} result value not of type %T: %T",
location, node, resultNode)
} else {
if node != resultNode {
t.Errorf("{%s} result value does not match", location)
}
}
case string:
if resultNode, ok := result.(string); !ok {
t.Errorf("{%s} result value not of type %T: %T",
location, node, resultNode)
} else {
if node != resultNode {
t.Errorf("{%s} result value does not match", location)
}
}
default:
if fmt.Sprintf("%v", node) != fmt.Sprintf("%v", ref) {
t.Errorf("{%s} result value does not match: %v != %v",
location, node, ref)
}
}
}
func TestQueryRoot(t *testing.T) {
assertQuery(t,
"a = 42",
"$",
[]interface{}{
map[string]interface{}{
"a": int64(42),
},
})
}
/*
// NOTE: from the BurntSushi test suite
// NOTE: this test is pure evil due to the embedded '.'
func TestSpecialKV(t *testing.T) {
tree, err := Load("~!@#$^&*()_+-`1234567890[]\\|/?><.,;: = 1")
assertTree(t, tree, err, map[string]interface{}{
"~!@#$^&*()_+-`1234567890[]\\|/?><.,;:": int64(1),
})
}
func TestSimpleNumbers(t *testing.T) {
tree, err := Load("a = +42\nb = -21\nc = +4.2\nd = -2.1")
assertTree(t, tree, err, map[string]interface{}{
"a": int64(42),
"b": int64(-21),
"c": float64(4.2),
"d": float64(-2.1),
})
}
func TestSimpleDate(t *testing.T) {
tree, err := Load("a = 1979-05-27T07:32:00Z")
assertTree(t, tree, err, map[string]interface{}{
"a": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
})
}
func TestSimpleString(t *testing.T) {
tree, err := Load("a = \"hello world\"")
assertTree(t, tree, err, map[string]interface{}{
"a": "hello world",
})
}
func TestStringEscapables(t *testing.T) {
tree, err := Load("a = \"a \\n b\"")
assertTree(t, tree, err, map[string]interface{}{
"a": "a \n b",
})
tree, err = Load("a = \"a \\t b\"")
assertTree(t, tree, err, map[string]interface{}{
"a": "a \t b",
})
tree, err = Load("a = \"a \\r b\"")
assertTree(t, tree, err, map[string]interface{}{
"a": "a \r b",
})
tree, err = Load("a = \"a \\\\ b\"")
assertTree(t, tree, err, map[string]interface{}{
"a": "a \\ b",
})
}
func TestBools(t *testing.T) {
tree, err := Load("a = true\nb = false")
assertTree(t, tree, err, map[string]interface{}{
"a": true,
"b": false,
})
}
func TestNestedKeys(t *testing.T) {
tree, err := Load("[a.b.c]\nd = 42")
assertTree(t, tree, err, map[string]interface{}{
"a": map[string]interface{}{
"b": map[string]interface{}{
"c": map[string]interface{}{
"d": int64(42),
},
},
},
})
}
func TestArrayOne(t *testing.T) {
tree, err := Load("a = [1]")
assertTree(t, tree, err, map[string]interface{}{
"a": []int64{int64(1)},
})
}
func TestArrayZero(t *testing.T) {
tree, err := Load("a = []")
assertTree(t, tree, err, map[string]interface{}{
"a": []interface{}{},
})
}
func TestArraySimple(t *testing.T) {
tree, err := Load("a = [42, 21, 10]")
assertTree(t, tree, err, map[string]interface{}{
"a": []int64{int64(42), int64(21), int64(10)},
})
tree, _ = Load("a = [42, 21, 10,]")
assertTree(t, tree, err, map[string]interface{}{
"a": []int64{int64(42), int64(21), int64(10)},
})
}
func TestArrayMultiline(t *testing.T) {
tree, err := Load("a = [42,\n21, 10,]")
assertTree(t, tree, err, map[string]interface{}{
"a": []int64{int64(42), int64(21), int64(10)},
})
}
func TestArrayNested(t *testing.T) {
tree, err := Load("a = [[42, 21], [10]]")
assertTree(t, tree, err, map[string]interface{}{
"a": [][]int64{[]int64{int64(42), int64(21)}, []int64{int64(10)}},
})
}
func TestNestedEmptyArrays(t *testing.T) {
tree, err := Load("a = [[[]]]")
assertTree(t, tree, err, map[string]interface{}{
"a": [][][]interface{}{[][]interface{}{[]interface{}{}}},
})
}
func TestArrayMixedTypes(t *testing.T) {
_, err := Load("a = [42, 16.0]")
if err.Error() != "(1, 10): mixed types in array" {
t.Error("Bad error message:", err.Error())
}
_, err = Load("a = [42, \"hello\"]")
if err.Error() != "(1, 11): mixed types in array" {
t.Error("Bad error message:", err.Error())
}
}
func TestArrayNestedStrings(t *testing.T) {
tree, err := Load("data = [ [\"gamma\", \"delta\"], [\"Foo\"] ]")
assertTree(t, tree, err, map[string]interface{}{
"data": [][]string{[]string{"gamma", "delta"}, []string{"Foo"}},
})
}
func TestMissingValue(t *testing.T) {
_, err := Load("a = ")
if err.Error() != "(1, 4): expecting a value" {
t.Error("Bad error message:", err.Error())
}
}
func TestUnterminatedArray(t *testing.T) {
_, err := Load("a = [1,")
if err.Error() != "(1, 8): unterminated array" {
t.Error("Bad error message:", err.Error())
}
}
func TestNewlinesInArrays(t *testing.T) {
tree, err := Load("a = [1,\n2,\n3]")
assertTree(t, tree, err, map[string]interface{}{
"a": []int64{int64(1), int64(2), int64(3)},
})
}
func TestArrayWithExtraComma(t *testing.T) {
tree, err := Load("a = [1,\n2,\n3,\n]")
assertTree(t, tree, err, map[string]interface{}{
"a": []int64{int64(1), int64(2), int64(3)},
})
}
func TestArrayWithExtraCommaComment(t *testing.T) {
tree, err := Load("a = [1, # wow\n2, # such items\n3, # so array\n]")
assertTree(t, tree, err, map[string]interface{}{
"a": []int64{int64(1), int64(2), int64(3)},
})
}
func TestDuplicateGroups(t *testing.T) {
_, err := Load("[foo]\na=2\n[foo]b=3")
if err.Error() != "(3, 2): duplicated tables" {
t.Error("Bad error message:", err.Error())
}
}
func TestDuplicateKeys(t *testing.T) {
_, err := Load("foo = 2\nfoo = 3")
if err.Error() != "(2, 1): The following key was defined twice: foo" {
t.Error("Bad error message:", err.Error())
}
}
func TestEmptyIntermediateTable(t *testing.T) {
_, err := Load("[foo..bar]")
if err.Error() != "(1, 2): empty intermediate table" {
t.Error("Bad error message:", err.Error())
}
}
func TestImplicitDeclarationBefore(t *testing.T) {
tree, err := Load("[a.b.c]\nanswer = 42\n[a]\nbetter = 43")
assertTree(t, tree, err, map[string]interface{}{
"a": map[string]interface{}{
"b": map[string]interface{}{
"c": map[string]interface{}{
"answer": int64(42),
},
},
"better": int64(43),
},
})
}
func TestFloatsWithoutLeadingZeros(t *testing.T) {
_, err := Load("a = .42")
if err.Error() != "(1, 4): cannot start float with a dot" {
t.Error("Bad error message:", err.Error())
}
_, err = Load("a = -.42")
if err.Error() != "(1, 5): cannot start float with a dot" {
t.Error("Bad error message:", err.Error())
}
}
func TestMissingFile(t *testing.T) {
_, err := LoadFile("foo.toml")
if err.Error() != "open foo.toml: no such file or directory" {
t.Error("Bad error message:", err.Error())
}
}
func TestParseFile(t *testing.T) {
tree, err := LoadFile("example.toml")
assertTree(t, tree, err, map[string]interface{}{
"title": "TOML Example",
"owner": map[string]interface{}{
"name": "Tom Preston-Werner",
"organization": "GitHub",
"bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
"dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
},
"database": map[string]interface{}{
"server": "192.168.1.1",
"ports": []int64{8001, 8001, 8002},
"connection_max": 5000,
"enabled": true,
},
"servers": map[string]interface{}{
"alpha": map[string]interface{}{
"ip": "10.0.0.1",
"dc": "eqdc10",
},
"beta": map[string]interface{}{
"ip": "10.0.0.2",
"dc": "eqdc10",
},
},
"clients": map[string]interface{}{
"data": []interface{}{
[]string{"gamma", "delta"},
[]int64{1, 2},
},
},
})
}
func TestParseKeyGroupArray(t *testing.T) {
tree, err := Load("[[foo.bar]] a = 42\n[[foo.bar]] a = 69")
assertTree(t, tree, err, map[string]interface{}{
"foo": map[string]interface{}{
"bar": []map[string]interface{}{
{"a": int64(42)},
{"a": int64(69)},
},
},
})
}
func TestParseKeyGroupArraySpec(t *testing.T) {
tree, err := Load("[[fruit]]\n name=\"apple\"\n [fruit.physical]\n color=\"red\"\n shape=\"round\"\n [[fruit]]\n name=\"banana\"")
assertTree(t, tree, err, map[string]interface{}{
"fruit": []map[string]interface{}{
{"name": "apple", "physical": map[string]interface{}{"color": "red", "shape": "round"}},
{"name": "banana"},
},
})
}
func TestToTomlValue(t *testing.T) {
for idx, item := range []struct {
Value interface{}
Expect string
}{
{int64(12345), "12345"},
{float64(123.45), "123.45"},
{bool(true), "true"},
{"hello world", "\"hello world\""},
{"\b\t\n\f\r\"\\", "\"\\b\\t\\n\\f\\r\\\"\\\\\""},
{"\x05", "\"\\u0005\""},
{time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
"1979-05-27T07:32:00Z"},
{[]interface{}{"gamma", "delta"},
"[\n \"gamma\",\n \"delta\",\n]"},
} {
result := toTomlValue(item.Value, 0)
if result != item.Expect {
t.Errorf("Test %d - got '%s', expected '%s'", idx, result, item.Expect)
}
}
}
func TestToString(t *testing.T) {
tree, err := Load("[foo]\n\n[[foo.bar]]\na = 42\n\n[[foo.bar]]\na = 69\n")
if err != nil {
t.Errorf("Test failed to parse: %v", err)
return
}
result := tree.ToString()
expected := "\n[foo]\n\n [[foo.bar]]\n a = 42\n\n [[foo.bar]]\n a = 69\n"
if result != expected {
t.Errorf("Expected got '%s', expected '%s'", result, expected)
}
}
func assertPosition(t *testing.T, text string, ref map[string]Position) {
tree, err := Load(text)
if err != nil {
t.Errorf("Error loading document text: `%v`", text)
t.Errorf("Error: %v", err)
}
for path, pos := range ref {
testPos := tree.GetPosition(path)
if testPos.Invalid() {
t.Errorf("Failed to query tree path: %s", path)
} else if pos != testPos {
t.Errorf("Expected position %v, got %v instead", pos, testPos)
}
}
}
func TestDocumentPositions(t *testing.T) {
assertPosition(t,
"[foo]\nbar=42\nbaz=69",
map[string]Position{
"foo": Position{1, 1},
"foo.bar": Position{2, 1},
"foo.baz": Position{3, 1},
})
}
func TestDocumentPositionsWithSpaces(t *testing.T) {
assertPosition(t,
" [foo]\n bar=42\n baz=69",
map[string]Position{
"foo": Position{1, 3},
"foo.bar": Position{2, 3},
"foo.baz": Position{3, 3},
})
}
func TestDocumentPositionsWithGroupArray(t *testing.T) {
assertPosition(t,
"[[foo]]\nbar=42\nbaz=69",
map[string]Position{
"foo": Position{1, 1},
"foo.bar": Position{2, 1},
"foo.baz": Position{3, 1},
})
}
func TestDocumentPositionsEmptyPath(t *testing.T) {
text := "[foo]\nbar=42\nbaz=69"
tree, err := Load(text)
if err != nil {
t.Errorf("Error loading document text: `%v`", text)
t.Errorf("Error: %v", err)
}
if pos := tree.GetPosition(""); !pos.Invalid() {
t.Errorf("Valid position was returned for empty path")
}
}
*/
+3
View File
@@ -21,8 +21,11 @@ go build -o toml-test github.com/BurntSushi/toml-test
mkdir -p src/github.com/pelletier/go-toml/cmd
cp *.go *.toml src/github.com/pelletier/go-toml
cp cmd/*.go src/github.com/pelletier/go-toml/cmd
mkdir -p src/github.com/pelletier/go-toml/jpath
cp jpath/*.go src/github.com/pelletier/go-toml/jpath
go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go
# Run basic unit tests and then the BurntSushi test suite
go test -v github.com/pelletier/go-toml/jpath
go test -v github.com/pelletier/go-toml
./toml-test ./test_program_bin | tee test_out