Compare commits

...

56 Commits

Author SHA1 Message Date
Thomas Pelletier 13d49d4606 Fix coveralls (#136) 2017-03-02 09:43:01 -08:00
Thomas Pelletier 7e6e4b1314 Rewrite TomlTree encoding (#133)
* Rewrite `TomlTree` encoding
* Introduce `TomlTree.WriteTo`
2017-03-02 09:17:06 -08:00
Thomas Pelletier 3616783228 Run go vet as part of the test suite (#132)
* Run go vet as part of the test suite
2017-02-27 14:29:04 -08:00
Thomas Pelletier d0ec4317d3 Fix compatibility with latest go-buffruneio (#131) 2017-02-27 14:18:12 -08:00
Thomas Pelletier 22139eb546 Test with go 1.8 (#129) 2017-02-16 17:27:36 -08:00
Thomas Pelletier c9506ee963 Update license (#128)
* Update LICENSE badge
* Update license year to 2017
2017-02-09 13:38:35 -08:00
David Brown 3a6d01f7a0 Fix syntax errors in package-level documentation (#126) 2017-02-09 13:23:28 -08:00
Thomas Pelletier d1fa2118c1 Bump test go to 1.7.5 (#127)
* Bump test go to 1.7.5
* Use travis container infrastructure
* Don't run the tests twice on PRs
2017-02-03 13:36:21 -08:00
Thomas Pelletier a1f048ba24 Make ToString() return an error instead of panic (#117)
Fixes #100
2017-01-15 18:49:11 -08:00
Jordan Bach ee2c0b51cf Fix typo in README tomljson installation instructions (#125) 2017-01-15 18:48:04 -08:00
Thomas Pelletier 439fbba1f8 Make lexComment jump back to the previous state (#122)
When a comment appears in an rvalue, the lexer needs to jump back to
lexRValue, not to lexVoid.

Fixes #120.
2016-12-29 19:51:04 +01:00
Christopher Mancini 017119f7a7 Use a single line for slice encoding (#119) 2016-12-13 15:20:06 +01:00
Thomas Pelletier ce7be745f0 Rename group to table (#115)
* Rename Group to Table Fixes #45 
* Change fmt.Errorf to errors.new for simple strings
2016-12-03 12:32:16 +01:00
Thomas Pelletier d464759235 Bump test go patchlevels (#113)
* 1.6.4
* 1.7.4
2016-12-02 11:42:58 +01:00
Thomas Pelletier 7cb988051d Make values come before tables in ToString output (#111)
If no order on the key is enforced in ToString, the following tree:

foo = 1
bar = "baz"
foobar = true
[qux]
  foo = 1
  bar = "baz"

may come out as:

bar = "baz"
foobar = true
[qux]
  foo = 1
  bar = "baz"
foo = 1

which is incorrect, since putting that back to the parser would panic
because of a duplicated key (qux.foo). Those changes make sure that
leaf values come before tables in the ToString output.
2016-11-23 16:24:52 +01:00
Thomas Pelletier 3ddb37c944 Fix []*Toml.Tree being wrapped in *Toml.Value (#110)
Nodes can be either *Toml.Tree, []*Toml.Tree, or *Toml.Value.
Arrays of trees were incorrectly wrapped in a *Toml.Value,
making the conversion functions think they were leaf nodes.
2016-11-23 15:48:39 +01:00
Thomas Pelletier f7f14983c3 Update travis to go1.7.3 (#109) 2016-11-23 15:21:57 +01:00
Cameron Moore 45932ad32d Handle nil, map[string]string, and map[interface{}]interface{} input (#103)
* Handle map[string]string and map[interface{}]interface{} input
* Handle nil values

Fixes #99
2016-09-20 09:07:15 +02:00
Cameron Moore 67b7b944a8 Support all numeric type conversions (#102)
Fixes #101
2016-09-20 09:04:39 +02:00
Thomas Pelletier 31055c2ff0 Allow empty quoted keys (#97) 2016-09-06 22:25:57 +02:00
Cameron Moore 5a62685873 Add license and Go Report Card badges to README (#93) 2016-08-23 09:47:07 +02:00
Cameron Moore d05a14897c Fix typo in comment (#94) 2016-08-23 09:46:25 +02:00
Cameron Moore 0599275eb9 Simplify redundant types in literals (#95)
Using `gofmt -s`
2016-08-23 09:45:54 +02:00
Cameron Moore 0049ab3dc4 Update Travis build (#89)
* Test with the latest releases.
* Allow tip to fail.
2016-08-22 14:27:12 +02:00
Cameron Moore bfe4a7e160 Fix gofmt and golint issues (#90) 2016-08-22 11:20:25 +02:00
Thomas Pelletier e6271032cc Move license to LICENSE file (#91) 2016-08-22 11:17:53 +02:00
Cameron Moore 887411a2a8 Add \U support to query lexer (#88) 2016-08-22 10:55:12 +02:00
Thomas Pelletier 31c735e72c Test with go 1.7. Stop testing with 1.4 (#87) 2016-08-16 14:03:31 +02:00
Thomas Pelletier 06484b677b Fix ToMap conversion of array of tables (#83) 2016-08-15 21:00:14 +02:00
Thomas Pelletier de2e921d55 TOML to JSON cli tool (#85)
* Implement tomljson
* Add note about tools in README
2016-08-14 13:50:18 +02:00
Thomas Pelletier 7f292800de Target latest Go patch level in Travis (#80) 2016-07-25 09:41:11 +02:00
Sam Broughton 923742e542 Fix String() comment (#79) 2016-07-22 09:53:40 +02:00
Sam Broughton 65ad89c1a7 Pointer cleanup (#78)
Remove unnecessary pointer receivers for Position and QueryResult
2016-07-21 16:42:51 +02:00
Thomas Pelletier 64ff1ea4d5 Don't hang when reading an invalid rvalue (#77)
Fixes #76
2016-06-30 16:21:25 +02:00
Sam Broughton b39f6ef1f9 Add a toml linter (#74)
* Add a toml linter

* Use if/else instead of os.Exit(0)

* Add usage warning about destructive changes
2016-06-06 12:29:13 +02:00
Sam Broughton c187221f01 Implement fmt.Stringer and alias ToString (#73) 2016-06-06 10:23:55 +02:00
Thomas Pelletier 8e6ab94eec Fix inline tables parsing
Inline tables were wrapped inside a TomlValue, although they should
just be part of the tree.
2016-04-22 17:38:16 +02:00
Thomas Pelletier 8d9c606c69 Improve test coverage (#66) 2016-04-22 14:26:15 +02:00
Thomas Pelletier 288bc57940 Better logging for parser tests (#65)
* Better logging for parser tests

* Add spew to tests deps list
2016-04-22 11:01:31 +02:00
Thomas Pelletier e3b2497729 TomlTree.ToMap (#59)
* Extract TomlTree conversion to its own file

* Implement ToMap

* Reorder imports in tomltree_conversions
2016-04-22 09:46:28 +02:00
Thomas Pelletier 1a8565204c Fix multiline strings (#62) 2016-04-21 17:47:41 +02:00
Thomas Pelletier e58cfd32d4 Bump to golang 1.6.2 on Travis 2016-04-21 09:22:47 +02:00
Cameron Moore a2ae216b47 Add more token tests (#58) 2016-04-19 09:43:26 +02:00
Thomas Pelletier 8645be8dc7 Merge pull request #57 from moorereason/simplify
Fix a couple issues found by gosimple
2016-04-19 09:41:51 +02:00
Cameron Moore 99b9371c53 Use strings.ContainsRune instead of IndexRune 2016-04-18 17:14:57 -05:00
Cameron Moore 92c565e02b Use literal string for regexp pattern 2016-04-18 17:14:18 -05:00
Cameron Moore 6e26017b00 Clean up lint (#56)
The only real change in this commit is that MaxInt is made private.
Everything else should be gofmt'ing, docs and cleanup of lint.
2016-04-18 16:58:23 +02:00
Thomas Pelletier 9d93af61de Add couple tests 2016-04-18 16:46:44 +02:00
Thomas Pelletier 4d8fb95ffe Update coveralls badge 2016-04-18 10:02:19 +02:00
Thomas Pelletier 0e41db2176 Update documentation for Query
Fix #54
2016-04-18 09:51:42 +02:00
Thomas Pelletier afca7f3334 Hardcode Go versions in .travis.yml 2016-04-13 09:23:15 +02:00
Thomas Pelletier d6a90e60ed Fix #52: query matcher doesn't handle arrays tables
Also improve coverage of query matcher.
2016-03-16 09:56:04 -07:00
Thomas Pelletier fe63e9f76d Run tests for 1.6 2016-02-20 13:29:42 +01:00
Thomas Pelletier 7f50e4c339 Merge pull request #51 from pelletier/pelletier/fix-crlf-support
Fix support for CRLF line ending
2016-02-20 13:20:03 +01:00
Thomas Pelletier a402e618c3 sudo is not needed by travis anymore 2016-02-19 14:17:07 +01:00
Thomas Pelletier 2df083520a Fix support for CRLF line ending 2016-02-19 14:12:13 +01:00
31 changed files with 1943 additions and 665 deletions
+14 -7
View File
@@ -1,14 +1,21 @@
language: go
script: "./test.sh"
sudo: false
language: go
go:
- 1.3.3
- 1.4.2
- 1.5.3
- tip
- 1.6.4
- 1.7.5
- 1.8
- tip
matrix:
allow_failures:
- go: tip
fast_finish: true
script:
- ./test.sh
before_install:
- go get github.com/axw/gocov/gocov
- go get github.com/mattn/goveralls
- if ! go get code.google.com/p/go.tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
branches:
only: [master]
after_success:
- $HOME/gopath/bin/goveralls -service=travis-ci
- $HOME/gopath/bin/goveralls -service=travis-ci -coverprofile=coverage.out -repotoken $COVERALLS_TOKEN
+21
View File
@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+21 -20
View File
@@ -6,8 +6,10 @@ This library supports TOML version
[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md)
[![GoDoc](https://godoc.org/github.com/pelletier/go-toml?status.svg)](http://godoc.org/github.com/pelletier/go-toml)
[![license](https://img.shields.io/github/license/pelletier/go-toml.svg)](https://github.com/pelletier/go-toml/blob/master/LICENSE)
[![Build Status](https://travis-ci.org/pelletier/go-toml.svg?branch=master)](https://travis-ci.org/pelletier/go-toml)
[![Coverage Status](https://coveralls.io/repos/pelletier/go-toml/badge.svg?branch=master&service=github)](https://coveralls.io/github/pelletier/go-toml?branch=master)
[![Coverage Status](https://coveralls.io/repos/github/pelletier/go-toml/badge.svg?branch=master)](https://coveralls.io/github/pelletier/go-toml?branch=master)
[![Go Report Card](https://goreportcard.com/badge/github.com/pelletier/go-toml)](https://goreportcard.com/report/github.com/pelletier/go-toml)
## Features
@@ -81,6 +83,23 @@ if err != nil {
The documentation and additional examples are available at
[godoc.org](http://godoc.org/github.com/pelletier/go-toml).
## Tools
Go-toml provides two handy command line tools:
* `tomll`: Reads TOML files and lint them.
```
go install github.com/pelletier/go-toml/cmd/tomll
tomll --help
```
* `tomljson`: Reads a TOML file and outputs its JSON representation.
```
go install github.com/pelletier/go-toml/cmd/tomljson
tomljson --help
```
## Contribute
Feel free to report bugs and patches using GitHub's pull requests system on
@@ -98,22 +117,4 @@ You can run both of them using `./test.sh`.
## License
Copyright (c) 2013 - 2016 Thomas Pelletier, Eric Anderton
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
The MIT License (MIT). Read [LICENSE](LICENSE).
+2 -1
View File
@@ -3,11 +3,12 @@ package main
import (
"encoding/json"
"fmt"
"github.com/pelletier/go-toml"
"io/ioutil"
"log"
"os"
"time"
"github.com/pelletier/go-toml"
)
func main() {
+67
View File
@@ -0,0 +1,67 @@
package main
import (
"encoding/json"
"flag"
"fmt"
"io"
"os"
"github.com/pelletier/go-toml"
)
func main() {
flag.Usage = func() {
fmt.Fprintln(os.Stderr, `tomljson can be used in two ways:
Writing to STDIN and reading from STDOUT:
cat file.toml | tomljson > file.json
Reading from a file name:
tomljson file.toml
`)
}
flag.Parse()
os.Exit(processMain(flag.Args(), os.Stdin, os.Stdout, os.Stderr))
}
func processMain(files []string, defaultInput io.Reader, output io.Writer, errorOutput io.Writer) int {
// read from stdin and print to stdout
inputReader := defaultInput
if len(files) > 0 {
var err error
inputReader, err = os.Open(files[0])
if err != nil {
printError(err, errorOutput)
return -1
}
}
s, err := reader(inputReader)
if err != nil {
printError(err, errorOutput)
return -1
}
io.WriteString(output, s+"\n")
return 0
}
func printError(err error, output io.Writer) {
io.WriteString(output, err.Error()+"\n")
}
func reader(r io.Reader) (string, error) {
tree, err := toml.LoadReader(r)
if err != nil {
return "", err
}
return mapToJSON(tree)
}
func mapToJSON(tree *toml.TomlTree) (string, error) {
treeMap := tree.ToMap()
bytes, err := json.MarshalIndent(treeMap, "", " ")
if err != nil {
return "", err
}
return string(bytes[:]), nil
}
+82
View File
@@ -0,0 +1,82 @@
package main
import (
"bytes"
"io/ioutil"
"os"
"strings"
"testing"
)
func expectBufferEquality(t *testing.T, name string, buffer *bytes.Buffer, expected string) {
output := buffer.String()
if output != expected {
t.Errorf("incorrect %s:\n%s\n\nexpected %s:\n%s", name, output, name, expected)
t.Log([]rune(output))
t.Log([]rune(expected))
}
}
func expectProcessMainResults(t *testing.T, input string, args []string, exitCode int, expectedOutput string, expectedError string) {
inputReader := strings.NewReader(input)
outputBuffer := new(bytes.Buffer)
errorBuffer := new(bytes.Buffer)
returnCode := processMain(args, inputReader, outputBuffer, errorBuffer)
expectBufferEquality(t, "output", outputBuffer, expectedOutput)
expectBufferEquality(t, "error", errorBuffer, expectedError)
if returnCode != exitCode {
t.Error("incorrect return code:", returnCode, "expected", exitCode)
}
}
func TestProcessMainReadFromStdin(t *testing.T) {
input := `
[mytoml]
a = 42`
expectedOutput := `{
"mytoml": {
"a": 42
}
}
`
expectedError := ``
expectedExitCode := 0
expectProcessMainResults(t, input, []string{}, expectedExitCode, expectedOutput, expectedError)
}
func TestProcessMainReadFromFile(t *testing.T) {
input := `
[mytoml]
a = 42`
tmpfile, err := ioutil.TempFile("", "example.toml")
if err != nil {
t.Fatal(err)
}
if _, err := tmpfile.Write([]byte(input)); err != nil {
t.Fatal(err)
}
defer os.Remove(tmpfile.Name())
expectedOutput := `{
"mytoml": {
"a": 42
}
}
`
expectedError := ``
expectedExitCode := 0
expectProcessMainResults(t, ``, []string{tmpfile.Name()}, expectedExitCode, expectedOutput, expectedError)
}
func TestProcessMainReadFromMissingFile(t *testing.T) {
expectedError := `open /this/file/does/not/exist: no such file or directory
`
expectProcessMainResults(t, ``, []string{"/this/file/does/not/exist"}, -1, ``, expectedError)
}
+61
View File
@@ -0,0 +1,61 @@
package main
import (
"flag"
"fmt"
"io"
"io/ioutil"
"os"
"github.com/pelletier/go-toml"
)
func main() {
flag.Usage = func() {
fmt.Fprintln(os.Stderr, `tomll can be used in two ways:
Writing to STDIN and reading from STDOUT:
cat file.toml | tomll > file.toml
Reading and updating a list of files:
tomll a.toml b.toml c.toml
When given a list of files, tomll will modify all files in place without asking.
`)
}
flag.Parse()
// read from stdin and print to stdout
if flag.NArg() == 0 {
s, err := lintReader(os.Stdin)
if err != nil {
io.WriteString(os.Stderr, err.Error())
os.Exit(-1)
}
io.WriteString(os.Stdout, s)
} else {
// otherwise modify a list of files
for _, filename := range flag.Args() {
s, err := lintFile(filename)
if err != nil {
io.WriteString(os.Stderr, err.Error())
os.Exit(-1)
}
ioutil.WriteFile(filename, []byte(s), 0644)
}
}
}
func lintFile(filename string) (string, error) {
tree, err := toml.LoadFile(filename)
if err != nil {
return "", err
}
return tree.String(), nil
}
func lintReader(r io.Reader) (string, error) {
tree, err := toml.LoadReader(r)
if err != nil {
return "", err
}
return tree.String(), nil
}
+11 -6
View File
@@ -22,8 +22,8 @@
// After parsing TOML data with Load() or LoadFile(), use the Has() and Get()
// methods on the returned TomlTree, to find your way through the document data.
//
// if tree.Has('foo') {
// fmt.Prinln("foo is: %v", tree.Get('foo'))
// if tree.Has("foo") {
// fmt.Println("foo is:", tree.Get("foo"))
// }
//
// Working with Paths
@@ -44,10 +44,10 @@
// it avoids having to parse the passed key for '.' delimiters.
//
// // looks for a key named 'baz', within struct 'bar', within struct 'foo'
// tree.HasPath(string{}{"foo","bar","baz"})
// tree.HasPath([]string{"foo","bar","baz"})
//
// // returns the key at this path, if it is there
// tree.GetPath(string{}{"foo","bar","baz"})
// tree.GetPath([]string{"foo","bar","baz"})
//
// Note that this is distinct from the heavyweight query syntax supported by
// TomlTree.Query() and the Query() struct (see below).
@@ -83,9 +83,9 @@
// The idea behind a query path is to allow quick access to any element, or set
// of elements within TOML document, with a single expression.
//
// result := tree.Query("$.foo.bar.baz") // result is 'nil' if the path is not present
// result, err := tree.Query("$.foo.bar.baz")
//
// This is equivalent to:
// This is roughly equivalent to:
//
// next := tree.Get("foo")
// if next != nil {
@@ -96,6 +96,11 @@
// }
// result := next
//
// err is nil if any parsing exception occurs.
//
// If no node in the tree matches the query, result will simply contain an empty list of
// items.
//
// As illustrated above, the query path is much more efficient, especially since
// the structure of the TOML file can vary. Rather than making assumptions about
// a document's structure, a query allows the programmer to make structured
+29
View File
@@ -0,0 +1,29 @@
# This is a TOML document. Boom.
title = "TOML Example"
[owner]
name = "Tom Preston-Werner"
organization = "GitHub"
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
[database]
server = "192.168.1.1"
ports = [ 8001, 8001, 8002 ]
connection_max = 5000
enabled = true
[servers]
# You can indent as you please. Tabs or spaces. TOML don't care.
[servers.alpha]
ip = "10.0.0.1"
dc = "eqdc10"
[servers.beta]
ip = "10.0.0.2"
dc = "eqdc10"
[clients]
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
+19 -6
View File
@@ -4,6 +4,7 @@ package toml
import (
"bytes"
"errors"
"fmt"
"unicode"
)
@@ -12,6 +13,7 @@ func parseKey(key string) ([]string, error) {
groups := []string{}
var buffer bytes.Buffer
inQuotes := false
wasInQuotes := false
escapeNext := false
ignoreSpace := true
expectDot := false
@@ -33,16 +35,27 @@ func parseKey(key string) ([]string, error) {
escapeNext = true
continue
case '"':
if inQuotes {
groups = append(groups, buffer.String())
buffer.Reset()
wasInQuotes = true
}
inQuotes = !inQuotes
expectDot = false
case '.':
if inQuotes {
buffer.WriteRune(char)
} else {
groups = append(groups, buffer.String())
buffer.Reset()
if !wasInQuotes {
if buffer.Len() == 0 {
return nil, errors.New("empty table key")
}
groups = append(groups, buffer.String())
buffer.Reset()
}
ignoreSpace = true
expectDot = false
wasInQuotes = false
}
case ' ':
if inQuotes {
@@ -55,23 +68,23 @@ func parseKey(key string) ([]string, error) {
return nil, fmt.Errorf("invalid bare character: %c", char)
}
if !inQuotes && expectDot {
return nil, fmt.Errorf("what?")
return nil, errors.New("what?")
}
buffer.WriteRune(char)
expectDot = false
}
}
if inQuotes {
return nil, fmt.Errorf("mismatched quotes")
return nil, errors.New("mismatched quotes")
}
if escapeNext {
return nil, fmt.Errorf("unfinished escape sequence")
return nil, errors.New("unfinished escape sequence")
}
if buffer.Len() > 0 {
groups = append(groups, buffer.String())
}
if len(groups) == 0 {
return nil, fmt.Errorf("empty key")
return nil, errors.New("empty key")
}
return groups, nil
}
+7
View File
@@ -7,6 +7,7 @@ import (
func testResult(t *testing.T, key string, expected []string) {
parsed, err := parseKey(key)
t.Logf("key=%s expected=%s parsed=%s", key, expected, parsed)
if err != nil {
t.Fatal("Unexpected error:", err)
}
@@ -43,7 +44,13 @@ func TestBaseKeyPound(t *testing.T) {
testError(t, "hello#world", "invalid bare character: #")
}
func TestQuotedKeys(t *testing.T) {
testResult(t, `hello."foo".bar`, []string{"hello", "foo", "bar"})
testResult(t, `"hello!"`, []string{"hello!"})
}
func TestEmptyKey(t *testing.T) {
testError(t, "", "empty key")
testError(t, " ", "empty key")
testResult(t, `""`, []string{""})
}
+127 -67
View File
@@ -1,17 +1,19 @@
// TOML lexer.
//
// Written using the principles developped by Rob Pike in
// Written using the principles developed by Rob Pike in
// http://www.youtube.com/watch?v=HxaD_trXwRE
package toml
import (
"errors"
"fmt"
"github.com/pelletier/go-buffruneio"
"io"
"regexp"
"strconv"
"strings"
"github.com/pelletier/go-buffruneio"
)
var dateRegexp *regexp.Regexp
@@ -34,7 +36,7 @@ type tomlLexer struct {
// Basic read operations on input
func (l *tomlLexer) read() rune {
r, err := l.input.ReadRune()
r, _, err := l.input.ReadRune()
if err != nil {
panic(err)
}
@@ -87,7 +89,7 @@ func (l *tomlLexer) emit(t tokenType) {
}
func (l *tomlLexer) peek() rune {
r, err := l.input.ReadRune()
r, _, err := l.input.ReadRune()
if err != nil {
panic(err)
}
@@ -97,7 +99,7 @@ func (l *tomlLexer) peek() rune {
func (l *tomlLexer) follow(next string) bool {
for _, expectedRune := range next {
r, err := l.input.ReadRune()
r, _, err := l.input.ReadRune()
defer l.input.UnreadRune()
if err != nil {
panic(err)
@@ -127,11 +129,13 @@ func (l *tomlLexer) lexVoid() tomlLexStateFn {
next := l.peek()
switch next {
case '[':
return l.lexKeyGroup
return l.lexTableKey
case '#':
return l.lexComment
return l.lexComment(l.lexVoid)
case '=':
return l.lexEqual
case '\r':
fallthrough
case '\n':
l.skip()
continue
@@ -178,13 +182,15 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
case '}':
return l.lexRightCurlyBrace
case '#':
return l.lexComment
return l.lexComment(l.lexRvalue)
case '"':
return l.lexString
case '\'':
return l.lexLiteralString
case ',':
return l.lexComma
case '\r':
fallthrough
case '\n':
l.skip()
if l.depth == 0 {
@@ -213,7 +219,7 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
break
}
possibleDate := string(l.input.Peek(35))
possibleDate := string(l.input.PeekRunes(35))
dateMatch := dateRegexp.FindString(possibleDate)
if dateMatch != "" {
l.fastForward(len(dateMatch))
@@ -228,6 +234,7 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
return l.lexKey
}
return l.errorf("no value can start with %c", next)
}
l.emit(tokenEOF)
@@ -276,29 +283,43 @@ func (l *tomlLexer) lexComma() tomlLexStateFn {
}
func (l *tomlLexer) lexKey() tomlLexStateFn {
inQuotes := false
for r := l.peek(); isKeyChar(r) || r == '\n'; r = l.peek() {
growingString := ""
for r := l.peek(); isKeyChar(r) || r == '\n' || r == '\r'; r = l.peek() {
if r == '"' {
inQuotes = !inQuotes
l.next()
str, err := l.lexStringAsString(`"`, false, true)
if err != nil {
return l.errorf(err.Error())
}
growingString += `"` + str + `"`
l.next()
continue
} else if r == '\n' {
return l.errorf("keys cannot contain new lines")
} else if isSpace(r) && !inQuotes {
} else if isSpace(r) {
break
} else if !isValidBareChar(r) && !inQuotes {
} else if !isValidBareChar(r) {
return l.errorf("keys cannot contain %c character", r)
}
growingString += string(r)
l.next()
}
l.emit(tokenKey)
l.emitWithValue(tokenKey, growingString)
return l.lexVoid
}
func (l *tomlLexer) lexComment() tomlLexStateFn {
for next := l.peek(); next != '\n' && next != eof; next = l.peek() {
l.next()
func (l *tomlLexer) lexComment(previousState tomlLexStateFn) tomlLexStateFn {
return func() tomlLexStateFn {
for next := l.peek(); next != '\n' && next != eof; next = l.peek() {
if next == '\r' && l.follow("\r\n") {
break
}
l.next()
}
l.ignore()
return previousState
}
l.ignore()
return l.lexVoid
}
func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
@@ -307,19 +328,14 @@ func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
return l.lexRvalue
}
func (l *tomlLexer) lexLiteralString() tomlLexStateFn {
l.skip()
func (l *tomlLexer) lexLiteralStringAsString(terminator string, discardLeadingNewLine bool) (string, error) {
growingString := ""
// handle special case for triple-quote
terminator := "'"
if l.follow("''") {
l.skip()
l.skip()
terminator = "'''"
// special case: discard leading newline
if l.peek() == '\n' {
if discardLeadingNewLine {
if l.follow("\r\n") {
l.skip()
l.skip()
} else if l.peek() == '\n' {
l.skip()
}
}
@@ -327,10 +343,7 @@ func (l *tomlLexer) lexLiteralString() tomlLexStateFn {
// find end of string
for {
if l.follow(terminator) {
l.emitWithValue(tokenString, growingString)
l.fastForward(len(terminator))
l.ignore()
return l.lexRvalue
return growingString, nil
}
next := l.peek()
@@ -340,32 +353,51 @@ func (l *tomlLexer) lexLiteralString() tomlLexStateFn {
growingString += string(l.next())
}
return l.errorf("unclosed string")
return "", errors.New("unclosed string")
}
func (l *tomlLexer) lexString() tomlLexStateFn {
func (l *tomlLexer) lexLiteralString() tomlLexStateFn {
l.skip()
growingString := ""
// handle special case for triple-quote
terminator := "\""
if l.follow("\"\"") {
terminator := "'"
discardLeadingNewLine := false
if l.follow("''") {
l.skip()
l.skip()
terminator = "\"\"\""
terminator = "'''"
discardLeadingNewLine = true
}
// special case: discard leading newline
if l.peek() == '\n' {
str, err := l.lexLiteralStringAsString(terminator, discardLeadingNewLine)
if err != nil {
return l.errorf(err.Error())
}
l.emitWithValue(tokenString, str)
l.fastForward(len(terminator))
l.ignore()
return l.lexRvalue
}
// Lex a string and return the results as a string.
// Terminator is the substring indicating the end of the token.
// The resulting string does not include the terminator.
func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine, acceptNewLines bool) (string, error) {
growingString := ""
if discardLeadingNewLine {
if l.follow("\r\n") {
l.skip()
l.skip()
} else if l.peek() == '\n' {
l.skip()
}
}
for {
if l.follow(terminator) {
l.emitWithValue(tokenString, growingString)
l.fastForward(len(terminator))
l.ignore()
return l.lexRvalue
return growingString, nil
}
if l.follow("\\") {
@@ -412,14 +444,14 @@ func (l *tomlLexer) lexString() tomlLexStateFn {
for i := 0; i < 4; i++ {
c := l.peek()
if !isHexDigit(c) {
return l.errorf("unfinished unicode escape")
return "", errors.New("unfinished unicode escape")
}
l.next()
code = code + string(c)
}
intcode, err := strconv.ParseInt(code, 16, 32)
if err != nil {
return l.errorf("invalid unicode escape: \\u" + code)
return "", errors.New("invalid unicode escape: \\u" + code)
}
growingString += string(rune(intcode))
case 'U':
@@ -428,23 +460,24 @@ func (l *tomlLexer) lexString() tomlLexStateFn {
for i := 0; i < 8; i++ {
c := l.peek()
if !isHexDigit(c) {
return l.errorf("unfinished unicode escape")
return "", errors.New("unfinished unicode escape")
}
l.next()
code = code + string(c)
}
intcode, err := strconv.ParseInt(code, 16, 64)
if err != nil {
return l.errorf("invalid unicode escape: \\U" + code)
return "", errors.New("invalid unicode escape: \\U" + code)
}
growingString += string(rune(intcode))
default:
return l.errorf("invalid escape sequence: \\" + string(l.peek()))
return "", errors.New("invalid escape sequence: \\" + string(l.peek()))
}
} else {
r := l.peek()
if 0x00 <= r && r <= 0x1F {
return l.errorf("unescaped control character %U", r)
if 0x00 <= r && r <= 0x1F && !(acceptNewLines && (r == '\n' || r == '\r')) {
return "", fmt.Errorf("unescaped control character %U", r)
}
l.next()
growingString += string(r)
@@ -455,24 +488,51 @@ func (l *tomlLexer) lexString() tomlLexStateFn {
}
}
return l.errorf("unclosed string")
return "", errors.New("unclosed string")
}
func (l *tomlLexer) lexKeyGroup() tomlLexStateFn {
func (l *tomlLexer) lexString() tomlLexStateFn {
l.skip()
// handle special case for triple-quote
terminator := `"`
discardLeadingNewLine := false
acceptNewLines := false
if l.follow(`""`) {
l.skip()
l.skip()
terminator = `"""`
discardLeadingNewLine = true
acceptNewLines = true
}
str, err := l.lexStringAsString(terminator, discardLeadingNewLine, acceptNewLines)
if err != nil {
return l.errorf(err.Error())
}
l.emitWithValue(tokenString, str)
l.fastForward(len(terminator))
l.ignore()
return l.lexRvalue
}
func (l *tomlLexer) lexTableKey() tomlLexStateFn {
l.next()
if l.peek() == '[' {
// token '[[' signifies an array of anonymous key groups
// token '[[' signifies an array of tables
l.next()
l.emit(tokenDoubleLeftBracket)
return l.lexInsideKeyGroupArray
return l.lexInsideTableArrayKey
}
// vanilla key group
// vanilla table key
l.emit(tokenLeftBracket)
return l.lexInsideKeyGroup
return l.lexInsideTableKey
}
func (l *tomlLexer) lexInsideKeyGroupArray() tomlLexStateFn {
func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn {
for r := l.peek(); r != eof; r = l.peek() {
switch r {
case ']':
@@ -487,15 +547,15 @@ func (l *tomlLexer) lexInsideKeyGroupArray() tomlLexStateFn {
l.emit(tokenDoubleRightBracket)
return l.lexVoid
case '[':
return l.errorf("group name cannot contain ']'")
return l.errorf("table array key cannot contain ']'")
default:
l.next()
}
}
return l.errorf("unclosed key group array")
return l.errorf("unclosed table array key")
}
func (l *tomlLexer) lexInsideKeyGroup() tomlLexStateFn {
func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn {
for r := l.peek(); r != eof; r = l.peek() {
switch r {
case ']':
@@ -506,12 +566,12 @@ func (l *tomlLexer) lexInsideKeyGroup() tomlLexStateFn {
l.emit(tokenRightBracket)
return l.lexVoid
case '[':
return l.errorf("group name cannot contain ']'")
return l.errorf("table key cannot contain ']'")
default:
l.next()
}
}
return l.errorf("unclosed key group")
return l.errorf("unclosed table key")
}
func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
@@ -578,7 +638,7 @@ func (l *tomlLexer) run() {
}
func init() {
dateRegexp = regexp.MustCompile("^\\d{1,4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(\\.\\d{1,9})?(Z|[+-]\\d{2}:\\d{2})")
dateRegexp = regexp.MustCompile(`^\d{1,4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{1,9})?(Z|[+-]\d{2}:\d{2})`)
}
// Entry point
+388 -295
View File
@@ -37,248 +37,278 @@ func testFlow(t *testing.T, input string, expectedFlow []token) {
func TestValidKeyGroup(t *testing.T) {
testFlow(t, "[hello world]", []token{
token{Position{1, 1}, tokenLeftBracket, "["},
token{Position{1, 2}, tokenKeyGroup, "hello world"},
token{Position{1, 13}, tokenRightBracket, "]"},
token{Position{1, 14}, tokenEOF, ""},
{Position{1, 1}, tokenLeftBracket, "["},
{Position{1, 2}, tokenKeyGroup, "hello world"},
{Position{1, 13}, tokenRightBracket, "]"},
{Position{1, 14}, tokenEOF, ""},
})
}
func TestNestedQuotedUnicodeKeyGroup(t *testing.T) {
testFlow(t, `[ j . "ʞ" . l ]`, []token{
token{Position{1, 1}, tokenLeftBracket, "["},
token{Position{1, 2}, tokenKeyGroup, ` j . "ʞ" . l `},
token{Position{1, 15}, tokenRightBracket, "]"},
token{Position{1, 16}, tokenEOF, ""},
{Position{1, 1}, tokenLeftBracket, "["},
{Position{1, 2}, tokenKeyGroup, ` j . "ʞ" . l `},
{Position{1, 15}, tokenRightBracket, "]"},
{Position{1, 16}, tokenEOF, ""},
})
}
func TestUnclosedKeyGroup(t *testing.T) {
testFlow(t, "[hello world", []token{
token{Position{1, 1}, tokenLeftBracket, "["},
token{Position{1, 2}, tokenError, "unclosed key group"},
{Position{1, 1}, tokenLeftBracket, "["},
{Position{1, 2}, tokenError, "unclosed table key"},
})
}
func TestComment(t *testing.T) {
testFlow(t, "# blahblah", []token{
token{Position{1, 11}, tokenEOF, ""},
{Position{1, 11}, tokenEOF, ""},
})
}
func TestKeyGroupComment(t *testing.T) {
testFlow(t, "[hello world] # blahblah", []token{
token{Position{1, 1}, tokenLeftBracket, "["},
token{Position{1, 2}, tokenKeyGroup, "hello world"},
token{Position{1, 13}, tokenRightBracket, "]"},
token{Position{1, 25}, tokenEOF, ""},
{Position{1, 1}, tokenLeftBracket, "["},
{Position{1, 2}, tokenKeyGroup, "hello world"},
{Position{1, 13}, tokenRightBracket, "]"},
{Position{1, 25}, tokenEOF, ""},
})
}
func TestMultipleKeyGroupsComment(t *testing.T) {
testFlow(t, "[hello world] # blahblah\n[test]", []token{
token{Position{1, 1}, tokenLeftBracket, "["},
token{Position{1, 2}, tokenKeyGroup, "hello world"},
token{Position{1, 13}, tokenRightBracket, "]"},
token{Position{2, 1}, tokenLeftBracket, "["},
token{Position{2, 2}, tokenKeyGroup, "test"},
token{Position{2, 6}, tokenRightBracket, "]"},
token{Position{2, 7}, tokenEOF, ""},
{Position{1, 1}, tokenLeftBracket, "["},
{Position{1, 2}, tokenKeyGroup, "hello world"},
{Position{1, 13}, tokenRightBracket, "]"},
{Position{2, 1}, tokenLeftBracket, "["},
{Position{2, 2}, tokenKeyGroup, "test"},
{Position{2, 6}, tokenRightBracket, "]"},
{Position{2, 7}, tokenEOF, ""},
})
}
func TestSimpleWindowsCRLF(t *testing.T) {
testFlow(t, "a=4\r\nb=2", []token{
{Position{1, 1}, tokenKey, "a"},
{Position{1, 2}, tokenEqual, "="},
{Position{1, 3}, tokenInteger, "4"},
{Position{2, 1}, tokenKey, "b"},
{Position{2, 2}, tokenEqual, "="},
{Position{2, 3}, tokenInteger, "2"},
{Position{2, 4}, tokenEOF, ""},
})
}
func TestBasicKey(t *testing.T) {
testFlow(t, "hello", []token{
token{Position{1, 1}, tokenKey, "hello"},
token{Position{1, 6}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "hello"},
{Position{1, 6}, tokenEOF, ""},
})
}
func TestBasicKeyWithUnderscore(t *testing.T) {
testFlow(t, "hello_hello", []token{
token{Position{1, 1}, tokenKey, "hello_hello"},
token{Position{1, 12}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "hello_hello"},
{Position{1, 12}, tokenEOF, ""},
})
}
func TestBasicKeyWithDash(t *testing.T) {
testFlow(t, "hello-world", []token{
token{Position{1, 1}, tokenKey, "hello-world"},
token{Position{1, 12}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "hello-world"},
{Position{1, 12}, tokenEOF, ""},
})
}
func TestBasicKeyWithUppercaseMix(t *testing.T) {
testFlow(t, "helloHELLOHello", []token{
token{Position{1, 1}, tokenKey, "helloHELLOHello"},
token{Position{1, 16}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "helloHELLOHello"},
{Position{1, 16}, tokenEOF, ""},
})
}
func TestBasicKeyWithInternationalCharacters(t *testing.T) {
testFlow(t, "héllÖ", []token{
token{Position{1, 1}, tokenKey, "héllÖ"},
token{Position{1, 6}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "héllÖ"},
{Position{1, 6}, tokenEOF, ""},
})
}
func TestBasicKeyAndEqual(t *testing.T) {
testFlow(t, "hello =", []token{
token{Position{1, 1}, tokenKey, "hello"},
token{Position{1, 7}, tokenEqual, "="},
token{Position{1, 8}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "hello"},
{Position{1, 7}, tokenEqual, "="},
{Position{1, 8}, tokenEOF, ""},
})
}
func TestKeyWithSharpAndEqual(t *testing.T) {
testFlow(t, "key#name = 5", []token{
token{Position{1, 1}, tokenError, "keys cannot contain # character"},
{Position{1, 1}, tokenError, "keys cannot contain # character"},
})
}
func TestKeyWithSymbolsAndEqual(t *testing.T) {
testFlow(t, "~!@$^&*()_+-`1234567890[]\\|/?><.,;:' = 5", []token{
token{Position{1, 1}, tokenError, "keys cannot contain ~ character"},
{Position{1, 1}, tokenError, "keys cannot contain ~ character"},
})
}
func TestKeyEqualStringEscape(t *testing.T) {
testFlow(t, `foo = "hello\""`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, "hello\""},
token{Position{1, 16}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, "hello\""},
{Position{1, 16}, tokenEOF, ""},
})
}
func TestKeyEqualStringUnfinished(t *testing.T) {
testFlow(t, `foo = "bar`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenError, "unclosed string"},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "unclosed string"},
})
}
func TestKeyEqualString(t *testing.T) {
testFlow(t, `foo = "bar"`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, "bar"},
token{Position{1, 12}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, "bar"},
{Position{1, 12}, tokenEOF, ""},
})
}
func TestKeyEqualTrue(t *testing.T) {
testFlow(t, "foo = true", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenTrue, "true"},
token{Position{1, 11}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenTrue, "true"},
{Position{1, 11}, tokenEOF, ""},
})
}
func TestKeyEqualFalse(t *testing.T) {
testFlow(t, "foo = false", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenFalse, "false"},
token{Position{1, 12}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenFalse, "false"},
{Position{1, 12}, tokenEOF, ""},
})
}
func TestArrayNestedString(t *testing.T) {
testFlow(t, `a = [ ["hello", "world"] ]`, []token{
token{Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenLeftBracket, "["},
token{Position{1, 7}, tokenLeftBracket, "["},
token{Position{1, 9}, tokenString, "hello"},
token{Position{1, 15}, tokenComma, ","},
token{Position{1, 18}, tokenString, "world"},
token{Position{1, 24}, tokenRightBracket, "]"},
token{Position{1, 26}, tokenRightBracket, "]"},
token{Position{1, 27}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenLeftBracket, "["},
{Position{1, 7}, tokenLeftBracket, "["},
{Position{1, 9}, tokenString, "hello"},
{Position{1, 15}, tokenComma, ","},
{Position{1, 18}, tokenString, "world"},
{Position{1, 24}, tokenRightBracket, "]"},
{Position{1, 26}, tokenRightBracket, "]"},
{Position{1, 27}, tokenEOF, ""},
})
}
func TestArrayNestedInts(t *testing.T) {
testFlow(t, "a = [ [42, 21], [10] ]", []token{
token{Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenLeftBracket, "["},
token{Position{1, 7}, tokenLeftBracket, "["},
token{Position{1, 8}, tokenInteger, "42"},
token{Position{1, 10}, tokenComma, ","},
token{Position{1, 12}, tokenInteger, "21"},
token{Position{1, 14}, tokenRightBracket, "]"},
token{Position{1, 15}, tokenComma, ","},
token{Position{1, 17}, tokenLeftBracket, "["},
token{Position{1, 18}, tokenInteger, "10"},
token{Position{1, 20}, tokenRightBracket, "]"},
token{Position{1, 22}, tokenRightBracket, "]"},
token{Position{1, 23}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenLeftBracket, "["},
{Position{1, 7}, tokenLeftBracket, "["},
{Position{1, 8}, tokenInteger, "42"},
{Position{1, 10}, tokenComma, ","},
{Position{1, 12}, tokenInteger, "21"},
{Position{1, 14}, tokenRightBracket, "]"},
{Position{1, 15}, tokenComma, ","},
{Position{1, 17}, tokenLeftBracket, "["},
{Position{1, 18}, tokenInteger, "10"},
{Position{1, 20}, tokenRightBracket, "]"},
{Position{1, 22}, tokenRightBracket, "]"},
{Position{1, 23}, tokenEOF, ""},
})
}
func TestArrayInts(t *testing.T) {
testFlow(t, "a = [ 42, 21, 10, ]", []token{
token{Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenLeftBracket, "["},
token{Position{1, 7}, tokenInteger, "42"},
token{Position{1, 9}, tokenComma, ","},
token{Position{1, 11}, tokenInteger, "21"},
token{Position{1, 13}, tokenComma, ","},
token{Position{1, 15}, tokenInteger, "10"},
token{Position{1, 17}, tokenComma, ","},
token{Position{1, 19}, tokenRightBracket, "]"},
token{Position{1, 20}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenLeftBracket, "["},
{Position{1, 7}, tokenInteger, "42"},
{Position{1, 9}, tokenComma, ","},
{Position{1, 11}, tokenInteger, "21"},
{Position{1, 13}, tokenComma, ","},
{Position{1, 15}, tokenInteger, "10"},
{Position{1, 17}, tokenComma, ","},
{Position{1, 19}, tokenRightBracket, "]"},
{Position{1, 20}, tokenEOF, ""},
})
}
func TestMultilineArrayComments(t *testing.T) {
testFlow(t, "a = [1, # wow\n2, # such items\n3, # so array\n]", []token{
token{Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenLeftBracket, "["},
token{Position{1, 6}, tokenInteger, "1"},
token{Position{1, 7}, tokenComma, ","},
token{Position{2, 1}, tokenInteger, "2"},
token{Position{2, 2}, tokenComma, ","},
token{Position{3, 1}, tokenInteger, "3"},
token{Position{3, 2}, tokenComma, ","},
token{Position{4, 1}, tokenRightBracket, "]"},
token{Position{4, 2}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenLeftBracket, "["},
{Position{1, 6}, tokenInteger, "1"},
{Position{1, 7}, tokenComma, ","},
{Position{2, 1}, tokenInteger, "2"},
{Position{2, 2}, tokenComma, ","},
{Position{3, 1}, tokenInteger, "3"},
{Position{3, 2}, tokenComma, ","},
{Position{4, 1}, tokenRightBracket, "]"},
{Position{4, 2}, tokenEOF, ""},
})
}
func TestNestedArraysComment(t *testing.T) {
toml := `
someArray = [
# does not work
["entry1"]
]`
testFlow(t, toml, []token{
{Position{2, 1}, tokenKey, "someArray"},
{Position{2, 11}, tokenEqual, "="},
{Position{2, 13}, tokenLeftBracket, "["},
{Position{4, 1}, tokenLeftBracket, "["},
{Position{4, 3}, tokenString, "entry1"},
{Position{4, 10}, tokenRightBracket, "]"},
{Position{5, 1}, tokenRightBracket, "]"},
{Position{5, 2}, tokenEOF, ""},
})
}
func TestKeyEqualArrayBools(t *testing.T) {
testFlow(t, "foo = [true, false, true]", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenLeftBracket, "["},
token{Position{1, 8}, tokenTrue, "true"},
token{Position{1, 12}, tokenComma, ","},
token{Position{1, 14}, tokenFalse, "false"},
token{Position{1, 19}, tokenComma, ","},
token{Position{1, 21}, tokenTrue, "true"},
token{Position{1, 25}, tokenRightBracket, "]"},
token{Position{1, 26}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenLeftBracket, "["},
{Position{1, 8}, tokenTrue, "true"},
{Position{1, 12}, tokenComma, ","},
{Position{1, 14}, tokenFalse, "false"},
{Position{1, 19}, tokenComma, ","},
{Position{1, 21}, tokenTrue, "true"},
{Position{1, 25}, tokenRightBracket, "]"},
{Position{1, 26}, tokenEOF, ""},
})
}
func TestKeyEqualArrayBoolsWithComments(t *testing.T) {
testFlow(t, "foo = [true, false, true] # YEAH", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenLeftBracket, "["},
token{Position{1, 8}, tokenTrue, "true"},
token{Position{1, 12}, tokenComma, ","},
token{Position{1, 14}, tokenFalse, "false"},
token{Position{1, 19}, tokenComma, ","},
token{Position{1, 21}, tokenTrue, "true"},
token{Position{1, 25}, tokenRightBracket, "]"},
token{Position{1, 33}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenLeftBracket, "["},
{Position{1, 8}, tokenTrue, "true"},
{Position{1, 12}, tokenComma, ","},
{Position{1, 14}, tokenFalse, "false"},
{Position{1, 19}, tokenComma, ","},
{Position{1, 21}, tokenTrue, "true"},
{Position{1, 25}, tokenRightBracket, "]"},
{Position{1, 33}, tokenEOF, ""},
})
}
@@ -296,338 +326,401 @@ func TestDateRegexp(t *testing.T) {
func TestKeyEqualDate(t *testing.T) {
testFlow(t, "foo = 1979-05-27T07:32:00Z", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenDate, "1979-05-27T07:32:00Z"},
token{Position{1, 27}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenDate, "1979-05-27T07:32:00Z"},
{Position{1, 27}, tokenEOF, ""},
})
testFlow(t, "foo = 1979-05-27T00:32:00-07:00", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenDate, "1979-05-27T00:32:00-07:00"},
token{Position{1, 32}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenDate, "1979-05-27T00:32:00-07:00"},
{Position{1, 32}, tokenEOF, ""},
})
testFlow(t, "foo = 1979-05-27T00:32:00.999999-07:00", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenDate, "1979-05-27T00:32:00.999999-07:00"},
token{Position{1, 39}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenDate, "1979-05-27T00:32:00.999999-07:00"},
{Position{1, 39}, tokenEOF, ""},
})
}
func TestFloatEndingWithDot(t *testing.T) {
testFlow(t, "foo = 42.", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenError, "float cannot end with a dot"},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenError, "float cannot end with a dot"},
})
}
func TestFloatWithTwoDots(t *testing.T) {
testFlow(t, "foo = 4.2.", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenError, "cannot have two dots in one float"},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenError, "cannot have two dots in one float"},
})
}
func TestFloatWithExponent1(t *testing.T) {
testFlow(t, "a = 5e+22", []token{
token{Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenFloat, "5e+22"},
token{Position{1, 10}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenFloat, "5e+22"},
{Position{1, 10}, tokenEOF, ""},
})
}
func TestFloatWithExponent2(t *testing.T) {
testFlow(t, "a = 5E+22", []token{
token{Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenFloat, "5E+22"},
token{Position{1, 10}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenFloat, "5E+22"},
{Position{1, 10}, tokenEOF, ""},
})
}
func TestFloatWithExponent3(t *testing.T) {
testFlow(t, "a = -5e+22", []token{
token{Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenFloat, "-5e+22"},
token{Position{1, 11}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenFloat, "-5e+22"},
{Position{1, 11}, tokenEOF, ""},
})
}
func TestFloatWithExponent4(t *testing.T) {
testFlow(t, "a = -5e-22", []token{
token{Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenFloat, "-5e-22"},
token{Position{1, 11}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenFloat, "-5e-22"},
{Position{1, 11}, tokenEOF, ""},
})
}
func TestFloatWithExponent5(t *testing.T) {
testFlow(t, "a = 6.626e-34", []token{
token{Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenFloat, "6.626e-34"},
token{Position{1, 14}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenFloat, "6.626e-34"},
{Position{1, 14}, tokenEOF, ""},
})
}
func TestInvalidEsquapeSequence(t *testing.T) {
testFlow(t, `foo = "\x"`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenError, "invalid escape sequence: \\x"},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "invalid escape sequence: \\x"},
})
}
func TestNestedArrays(t *testing.T) {
testFlow(t, "foo = [[[]]]", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenLeftBracket, "["},
token{Position{1, 8}, tokenLeftBracket, "["},
token{Position{1, 9}, tokenLeftBracket, "["},
token{Position{1, 10}, tokenRightBracket, "]"},
token{Position{1, 11}, tokenRightBracket, "]"},
token{Position{1, 12}, tokenRightBracket, "]"},
token{Position{1, 13}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenLeftBracket, "["},
{Position{1, 8}, tokenLeftBracket, "["},
{Position{1, 9}, tokenLeftBracket, "["},
{Position{1, 10}, tokenRightBracket, "]"},
{Position{1, 11}, tokenRightBracket, "]"},
{Position{1, 12}, tokenRightBracket, "]"},
{Position{1, 13}, tokenEOF, ""},
})
}
func TestKeyEqualNumber(t *testing.T) {
testFlow(t, "foo = 42", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenInteger, "42"},
token{Position{1, 9}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenInteger, "42"},
{Position{1, 9}, tokenEOF, ""},
})
testFlow(t, "foo = +42", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenInteger, "+42"},
token{Position{1, 10}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenInteger, "+42"},
{Position{1, 10}, tokenEOF, ""},
})
testFlow(t, "foo = -42", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenInteger, "-42"},
token{Position{1, 10}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenInteger, "-42"},
{Position{1, 10}, tokenEOF, ""},
})
testFlow(t, "foo = 4.2", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenFloat, "4.2"},
token{Position{1, 10}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenFloat, "4.2"},
{Position{1, 10}, tokenEOF, ""},
})
testFlow(t, "foo = +4.2", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenFloat, "+4.2"},
token{Position{1, 11}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenFloat, "+4.2"},
{Position{1, 11}, tokenEOF, ""},
})
testFlow(t, "foo = -4.2", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenFloat, "-4.2"},
token{Position{1, 11}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenFloat, "-4.2"},
{Position{1, 11}, tokenEOF, ""},
})
testFlow(t, "foo = 1_000", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenInteger, "1_000"},
token{Position{1, 12}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenInteger, "1_000"},
{Position{1, 12}, tokenEOF, ""},
})
testFlow(t, "foo = 5_349_221", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenInteger, "5_349_221"},
token{Position{1, 16}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenInteger, "5_349_221"},
{Position{1, 16}, tokenEOF, ""},
})
testFlow(t, "foo = 1_2_3_4_5", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenInteger, "1_2_3_4_5"},
token{Position{1, 16}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenInteger, "1_2_3_4_5"},
{Position{1, 16}, tokenEOF, ""},
})
testFlow(t, "flt8 = 9_224_617.445_991_228_313", []token{
token{Position{1, 1}, tokenKey, "flt8"},
token{Position{1, 6}, tokenEqual, "="},
token{Position{1, 8}, tokenFloat, "9_224_617.445_991_228_313"},
token{Position{1, 33}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "flt8"},
{Position{1, 6}, tokenEqual, "="},
{Position{1, 8}, tokenFloat, "9_224_617.445_991_228_313"},
{Position{1, 33}, tokenEOF, ""},
})
testFlow(t, "foo = +", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenError, "no digit in that number"},
})
}
func TestMultiline(t *testing.T) {
testFlow(t, "foo = 42\nbar=21", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenInteger, "42"},
token{Position{2, 1}, tokenKey, "bar"},
token{Position{2, 4}, tokenEqual, "="},
token{Position{2, 5}, tokenInteger, "21"},
token{Position{2, 7}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenInteger, "42"},
{Position{2, 1}, tokenKey, "bar"},
{Position{2, 4}, tokenEqual, "="},
{Position{2, 5}, tokenInteger, "21"},
{Position{2, 7}, tokenEOF, ""},
})
}
func TestKeyEqualStringUnicodeEscape(t *testing.T) {
testFlow(t, `foo = "hello \u2665"`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, "hello ♥"},
token{Position{1, 21}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, "hello ♥"},
{Position{1, 21}, tokenEOF, ""},
})
testFlow(t, `foo = "hello \U000003B4"`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, "hello δ"},
token{Position{1, 25}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, "hello δ"},
{Position{1, 25}, tokenEOF, ""},
})
testFlow(t, `foo = "\u2"`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "unfinished unicode escape"},
})
testFlow(t, `foo = "\U2"`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "unfinished unicode escape"},
})
}
func TestKeyEqualStringNoEscape(t *testing.T) {
testFlow(t, "foo = \"hello \u0002\"", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenError, "unescaped control character U+0002"},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "unescaped control character U+0002"},
})
testFlow(t, "foo = \"hello \u001F\"", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenError, "unescaped control character U+001F"},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "unescaped control character U+001F"},
})
}
func TestLiteralString(t *testing.T) {
testFlow(t, `foo = 'C:\Users\nodejs\templates'`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, `C:\Users\nodejs\templates`},
token{Position{1, 34}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, `C:\Users\nodejs\templates`},
{Position{1, 34}, tokenEOF, ""},
})
testFlow(t, `foo = '\\ServerX\admin$\system32\'`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, `\\ServerX\admin$\system32\`},
token{Position{1, 35}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, `\\ServerX\admin$\system32\`},
{Position{1, 35}, tokenEOF, ""},
})
testFlow(t, `foo = 'Tom "Dubs" Preston-Werner'`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, `Tom "Dubs" Preston-Werner`},
token{Position{1, 34}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, `Tom "Dubs" Preston-Werner`},
{Position{1, 34}, tokenEOF, ""},
})
testFlow(t, `foo = '<\i\c*\s*>'`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, `<\i\c*\s*>`},
token{Position{1, 19}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, `<\i\c*\s*>`},
{Position{1, 19}, tokenEOF, ""},
})
testFlow(t, `foo = 'C:\Users\nodejs\unfinis`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "unclosed string"},
})
}
func TestMultilineLiteralString(t *testing.T) {
testFlow(t, `foo = '''hello 'literal' world'''`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 10}, tokenString, `hello 'literal' world`},
token{Position{1, 34}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 10}, tokenString, `hello 'literal' world`},
{Position{1, 34}, tokenEOF, ""},
})
testFlow(t, "foo = '''\nhello\n'literal'\nworld'''", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{2, 1}, tokenString, "hello\n'literal'\nworld"},
token{Position{4, 9}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{2, 1}, tokenString, "hello\n'literal'\nworld"},
{Position{4, 9}, tokenEOF, ""},
})
testFlow(t, "foo = '''\r\nhello\r\n'literal'\r\nworld'''", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{2, 1}, tokenString, "hello\r\n'literal'\r\nworld"},
{Position{4, 9}, tokenEOF, ""},
})
}
func TestMultilineString(t *testing.T) {
testFlow(t, `foo = """hello "literal" world"""`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 10}, tokenString, `hello "literal" world`},
token{Position{1, 34}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 10}, tokenString, `hello "literal" world`},
{Position{1, 34}, tokenEOF, ""},
})
testFlow(t, "foo = \"\"\"\nhello\\\n\"literal\"\\\nworld\"\"\"", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{2, 1}, tokenString, "hello\"literal\"world"},
token{Position{4, 9}, tokenEOF, ""},
testFlow(t, "foo = \"\"\"\r\nhello\\\r\n\"literal\"\\\nworld\"\"\"", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{2, 1}, tokenString, "hello\"literal\"world"},
{Position{4, 9}, tokenEOF, ""},
})
testFlow(t, "foo = \"\"\"\\\n \\\n \\\n hello\\\nmultiline\\\nworld\"\"\"", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 10}, tokenString, "hellomultilineworld"},
token{Position{6, 9}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 10}, tokenString, "hellomultilineworld"},
{Position{6, 9}, tokenEOF, ""},
})
testFlow(t, "key2 = \"\"\"\nThe quick brown \\\n\n\n fox jumps over \\\n the lazy dog.\"\"\"", []token{
token{Position{1, 1}, tokenKey, "key2"},
token{Position{1, 6}, tokenEqual, "="},
token{Position{2, 1}, tokenString, "The quick brown fox jumps over the lazy dog."},
token{Position{6, 21}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "key2"},
{Position{1, 6}, tokenEqual, "="},
{Position{2, 1}, tokenString, "The quick brown fox jumps over the lazy dog."},
{Position{6, 21}, tokenEOF, ""},
})
testFlow(t, "key2 = \"\"\"\\\n The quick brown \\\n fox jumps over \\\n the lazy dog.\\\n \"\"\"", []token{
token{Position{1, 1}, tokenKey, "key2"},
token{Position{1, 6}, tokenEqual, "="},
token{Position{1, 11}, tokenString, "The quick brown fox jumps over the lazy dog."},
token{Position{5, 11}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "key2"},
{Position{1, 6}, tokenEqual, "="},
{Position{1, 11}, tokenString, "The quick brown fox jumps over the lazy dog."},
{Position{5, 11}, tokenEOF, ""},
})
testFlow(t, `key2 = "Roses are red\nViolets are blue"`, []token{
{Position{1, 1}, tokenKey, "key2"},
{Position{1, 6}, tokenEqual, "="},
{Position{1, 9}, tokenString, "Roses are red\nViolets are blue"},
{Position{1, 41}, tokenEOF, ""},
})
testFlow(t, "key2 = \"\"\"\nRoses are red\nViolets are blue\"\"\"", []token{
{Position{1, 1}, tokenKey, "key2"},
{Position{1, 6}, tokenEqual, "="},
{Position{2, 1}, tokenString, "Roses are red\nViolets are blue"},
{Position{3, 20}, tokenEOF, ""},
})
}
func TestUnicodeString(t *testing.T) {
testFlow(t, `foo = "hello ♥ world"`, []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, "hello ♥ world"},
token{Position{1, 22}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, "hello ♥ world"},
{Position{1, 22}, tokenEOF, ""},
})
}
func TestEscapeInString(t *testing.T) {
testFlow(t, `foo = "\b\f\/"`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, "\b\f/"},
{Position{1, 15}, tokenEOF, ""},
})
}
func TestKeyGroupArray(t *testing.T) {
testFlow(t, "[[foo]]", []token{
token{Position{1, 1}, tokenDoubleLeftBracket, "[["},
token{Position{1, 3}, tokenKeyGroupArray, "foo"},
token{Position{1, 6}, tokenDoubleRightBracket, "]]"},
token{Position{1, 8}, tokenEOF, ""},
{Position{1, 1}, tokenDoubleLeftBracket, "[["},
{Position{1, 3}, tokenKeyGroupArray, "foo"},
{Position{1, 6}, tokenDoubleRightBracket, "]]"},
{Position{1, 8}, tokenEOF, ""},
})
}
func TestQuotedKey(t *testing.T) {
testFlow(t, "\"a b\" = 42", []token{
token{Position{1, 1}, tokenKey, "\"a b\""},
token{Position{1, 7}, tokenEqual, "="},
token{Position{1, 9}, tokenInteger, "42"},
token{Position{1, 11}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "\"a b\""},
{Position{1, 7}, tokenEqual, "="},
{Position{1, 9}, tokenInteger, "42"},
{Position{1, 11}, tokenEOF, ""},
})
}
func TestKeyNewline(t *testing.T) {
testFlow(t, "a\n= 4", []token{
token{Position{1, 1}, tokenError, "keys cannot contain new lines"},
{Position{1, 1}, tokenError, "keys cannot contain new lines"},
})
}
func TestInvalidFloat(t *testing.T) {
testFlow(t, "a=7e1_", []token{
token{Position{1, 1}, tokenKey, "a"},
token{Position{1, 2}, tokenEqual, "="},
token{Position{1, 3}, tokenFloat, "7e1_"},
token{Position{1, 7}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "a"},
{Position{1, 2}, tokenEqual, "="},
{Position{1, 3}, tokenFloat, "7e1_"},
{Position{1, 7}, tokenEOF, ""},
})
}
func TestLexUnknownRvalue(t *testing.T) {
testFlow(t, `a = !b`, []token{
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenError, "no value can start with !"},
})
testFlow(t, `a = \b`, []token{
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenError, `no value can start with \`},
})
}
+8 -1
View File
@@ -67,7 +67,14 @@ func newMatchKeyFn(name string) *matchKeyFn {
}
func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
if tree, ok := node.(*TomlTree); ok {
if array, ok := node.([]*TomlTree); ok {
for _, tree := range array {
item := tree.values[f.Name]
if item != nil {
f.next.call(item, ctx)
}
}
} else if tree, ok := node.(*TomlTree); ok {
item := tree.values[f.Name]
if item != nil {
f.next.call(item, ctx)
+3 -3
View File
@@ -109,7 +109,7 @@ func TestPathSliceStart(t *testing.T) {
assertPath(t,
"$[123:]",
buildPath(
newMatchSliceFn(123, MaxInt, 1),
newMatchSliceFn(123, maxInt, 1),
))
}
@@ -133,7 +133,7 @@ func TestPathSliceStartStep(t *testing.T) {
assertPath(t,
"$[123::7]",
buildPath(
newMatchSliceFn(123, MaxInt, 7),
newMatchSliceFn(123, maxInt, 7),
))
}
@@ -149,7 +149,7 @@ func TestPathSliceStep(t *testing.T) {
assertPath(t,
"$[::7]",
buildPath(
newMatchSliceFn(0, MaxInt, 7),
newMatchSliceFn(0, maxInt, 7),
))
}
+44 -35
View File
@@ -3,6 +3,7 @@
package toml
import (
"errors"
"fmt"
"reflect"
"regexp"
@@ -15,8 +16,8 @@ type tomlParser struct {
flow chan token
tree *TomlTree
tokensBuffer []token
currentGroup []string
seenGroupKeys []string
currentTable []string
seenTableKeys []string
}
type tomlParserStateFn func() tomlParserStateFn
@@ -95,13 +96,13 @@ func (p *tomlParser) parseGroupArray() tomlParserStateFn {
startToken := p.getToken() // discard the [[
key := p.getToken()
if key.typ != tokenKeyGroupArray {
p.raiseError(key, "unexpected token %s, was expecting a key group array", key)
p.raiseError(key, "unexpected token %s, was expecting a table array key", key)
}
// get or create group array element at the indicated part in the path
// get or create table array element at the indicated part in the path
keys, err := parseKey(key.val)
if err != nil {
p.raiseError(key, "invalid group array key: %s", err)
p.raiseError(key, "invalid table array key: %s", err)
}
p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries
destTree := p.tree.GetPath(keys)
@@ -111,32 +112,32 @@ func (p *tomlParser) parseGroupArray() tomlParserStateFn {
} else if target, ok := destTree.([]*TomlTree); ok && target != nil {
array = destTree.([]*TomlTree)
} else {
p.raiseError(key, "key %s is already assigned and not of type group array", key)
p.raiseError(key, "key %s is already assigned and not of type table array", key)
}
p.currentGroup = keys
p.currentTable = keys
// add a new tree to the end of the group array
// add a new tree to the end of the table array
newTree := newTomlTree()
newTree.position = startToken.Position
array = append(array, newTree)
p.tree.SetPath(p.currentGroup, array)
p.tree.SetPath(p.currentTable, array)
// remove all keys that were children of this group array
// remove all keys that were children of this table array
prefix := key.val + "."
found := false
for ii := 0; ii < len(p.seenGroupKeys); {
groupKey := p.seenGroupKeys[ii]
if strings.HasPrefix(groupKey, prefix) {
p.seenGroupKeys = append(p.seenGroupKeys[:ii], p.seenGroupKeys[ii+1:]...)
for ii := 0; ii < len(p.seenTableKeys); {
tableKey := p.seenTableKeys[ii]
if strings.HasPrefix(tableKey, prefix) {
p.seenTableKeys = append(p.seenTableKeys[:ii], p.seenTableKeys[ii+1:]...)
} else {
found = (groupKey == key.val)
found = (tableKey == key.val)
ii++
}
}
// keep this key name from use by other kinds of assignments
if !found {
p.seenGroupKeys = append(p.seenGroupKeys, key.val)
p.seenTableKeys = append(p.seenTableKeys, key.val)
}
// move to next parser state
@@ -148,24 +149,24 @@ func (p *tomlParser) parseGroup() tomlParserStateFn {
startToken := p.getToken() // discard the [
key := p.getToken()
if key.typ != tokenKeyGroup {
p.raiseError(key, "unexpected token %s, was expecting a key group", key)
p.raiseError(key, "unexpected token %s, was expecting a table key", key)
}
for _, item := range p.seenGroupKeys {
for _, item := range p.seenTableKeys {
if item == key.val {
p.raiseError(key, "duplicated tables")
}
}
p.seenGroupKeys = append(p.seenGroupKeys, key.val)
p.seenTableKeys = append(p.seenTableKeys, key.val)
keys, err := parseKey(key.val)
if err != nil {
p.raiseError(key, "invalid group array key: %s", err)
p.raiseError(key, "invalid table array key: %s", err)
}
if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
p.raiseError(key, "%s", err)
}
p.assume(tokenRightBracket)
p.currentGroup = keys
p.currentTable = keys
return p.parseStart
}
@@ -174,26 +175,26 @@ func (p *tomlParser) parseAssign() tomlParserStateFn {
p.assume(tokenEqual)
value := p.parseRvalue()
var groupKey []string
if len(p.currentGroup) > 0 {
groupKey = p.currentGroup
var tableKey []string
if len(p.currentTable) > 0 {
tableKey = p.currentTable
} else {
groupKey = []string{}
tableKey = []string{}
}
// find the group to assign, looking out for arrays of groups
// find the table to assign, looking out for arrays of tables
var targetNode *TomlTree
switch node := p.tree.GetPath(groupKey).(type) {
switch node := p.tree.GetPath(tableKey).(type) {
case []*TomlTree:
targetNode = node[len(node)-1]
case *TomlTree:
targetNode = node
default:
p.raiseError(key, "Unknown group type for path: %s",
strings.Join(groupKey, "."))
p.raiseError(key, "Unknown table type for path: %s",
strings.Join(tableKey, "."))
}
// assign value to the found group
// assign value to the found table
keyVals, err := parseKey(key.val)
if err != nil {
p.raiseError(key, "%s", err)
@@ -203,12 +204,20 @@ func (p *tomlParser) parseAssign() tomlParserStateFn {
}
keyVal := keyVals[0]
localKey := []string{keyVal}
finalKey := append(groupKey, keyVal)
finalKey := append(tableKey, keyVal)
if targetNode.GetPath(localKey) != nil {
p.raiseError(key, "The following key was defined twice: %s",
strings.Join(finalKey, "."))
}
targetNode.values[keyVal] = &tomlValue{value, key.Position}
var toInsert interface{}
switch value.(type) {
case *TomlTree, []*TomlTree:
toInsert = value
default:
toInsert = &tomlValue{value, key.Position}
}
targetNode.values[keyVal] = toInsert
return p.parseStart
}
@@ -216,7 +225,7 @@ var numberUnderscoreInvalidRegexp *regexp.Regexp
func cleanupNumberToken(value string) (string, error) {
if numberUnderscoreInvalidRegexp.MatchString(value) {
return "", fmt.Errorf("invalid use of _ in number")
return "", errors.New("invalid use of _ in number")
}
cleanedVal := strings.Replace(value, "_", "", -1)
return cleanedVal, nil
@@ -372,8 +381,8 @@ func parseToml(flow chan token) *TomlTree {
flow: flow,
tree: result,
tokensBuffer: make([]token, 0),
currentGroup: make([]string, 0),
seenGroupKeys: make([]string, 0),
currentTable: make([]string, 0),
seenTableKeys: make([]string, 0),
}
parser.run()
return result
+166 -40
View File
@@ -2,26 +2,34 @@ package toml
import (
"fmt"
"reflect"
"testing"
"time"
"github.com/davecgh/go-spew/spew"
)
func assertTree(t *testing.T, tree *TomlTree, err error, ref map[string]interface{}) {
func assertSubTree(t *testing.T, path []string, tree *TomlTree, err error, ref map[string]interface{}) {
if err != nil {
t.Error("Non-nil error:", err.Error())
return
}
for k, v := range ref {
nextPath := append(path, k)
t.Log("asserting path", nextPath)
// NOTE: directly access key instead of resolve by path
// NOTE: see TestSpecialKV
switch node := tree.GetPath([]string{k}).(type) {
case []*TomlTree:
t.Log("\tcomparing key", nextPath, "by array iteration")
for idx, item := range node {
assertTree(t, item, err, v.([]map[string]interface{})[idx])
assertSubTree(t, nextPath, item, err, v.([]map[string]interface{})[idx])
}
case *TomlTree:
assertTree(t, node, err, v.(map[string]interface{}))
t.Log("\tcomparing key", nextPath, "by subtree assestion")
assertSubTree(t, nextPath, node, err, v.(map[string]interface{}))
default:
t.Log("\tcomparing key", nextPath, "by string representation because it's of type", reflect.TypeOf(node))
if fmt.Sprintf("%v", node) != fmt.Sprintf("%v", v) {
t.Errorf("was expecting %v at %v but got %v", v, k, node)
}
@@ -29,6 +37,12 @@ func assertTree(t *testing.T, tree *TomlTree, err error, ref map[string]interfac
}
}
func assertTree(t *testing.T, tree *TomlTree, err error, ref map[string]interface{}) {
t.Log("Asserting tree:\n", spew.Sdump(tree))
assertSubTree(t, []string{}, tree, err, ref)
t.Log("Finished tree assertion.")
}
func TestCreateSubTree(t *testing.T) {
tree := newTomlTree()
tree.createSubTree([]string{"a", "b", "c"}, Position{})
@@ -163,6 +177,16 @@ func TestStringEscapables(t *testing.T) {
})
}
func TestEmptyQuotedString(t *testing.T) {
tree, err := Load(`[""]
"" = 1`)
assertTree(t, tree, err, map[string]interface{}{
"": map[string]interface{}{
"": int64(1),
},
})
}
func TestBools(t *testing.T) {
tree, err := Load("a = true\nb = false")
assertTree(t, tree, err, map[string]interface{}{
@@ -255,14 +279,25 @@ func TestArrayMultiline(t *testing.T) {
func TestArrayNested(t *testing.T) {
tree, err := Load("a = [[42, 21], [10]]")
assertTree(t, tree, err, map[string]interface{}{
"a": [][]int64{[]int64{int64(42), int64(21)}, []int64{int64(10)}},
"a": [][]int64{{int64(42), int64(21)}, {int64(10)}},
})
}
func TestNestedArrayComment(t *testing.T) {
tree, err := Load(`
someArray = [
# does not work
["entry1"]
]`)
assertTree(t, tree, err, map[string]interface{}{
"someArray": [][]string{{"entry1"}},
})
}
func TestNestedEmptyArrays(t *testing.T) {
tree, err := Load("a = [[[]]]")
assertTree(t, tree, err, map[string]interface{}{
"a": [][][]interface{}{[][]interface{}{[]interface{}{}}},
"a": [][][]interface{}{{{}}},
})
}
@@ -281,10 +316,22 @@ func TestArrayMixedTypes(t *testing.T) {
func TestArrayNestedStrings(t *testing.T) {
tree, err := Load("data = [ [\"gamma\", \"delta\"], [\"Foo\"] ]")
assertTree(t, tree, err, map[string]interface{}{
"data": [][]string{[]string{"gamma", "delta"}, []string{"Foo"}},
"data": [][]string{{"gamma", "delta"}, {"Foo"}},
})
}
func TestParseUnknownRvalue(t *testing.T) {
_, err := Load("a = !bssss")
if err == nil {
t.Error("Expecting a parse error")
}
_, err = Load("a = /b")
if err == nil {
t.Error("Expecting a parse error")
}
}
func TestMissingValue(t *testing.T) {
_, err := Load("a = ")
if err.Error() != "(1, 5): expecting a value" {
@@ -368,7 +415,7 @@ func TestExampleInlineGroupInArray(t *testing.T) {
tree, err := Load(`points = [{ x = 1, y = 2 }]`)
assertTree(t, tree, err, map[string]interface{}{
"points": []map[string]interface{}{
map[string]interface{}{
{
"x": int64(1),
"y": int64(2),
},
@@ -420,7 +467,7 @@ func TestDuplicateKeys(t *testing.T) {
func TestEmptyIntermediateTable(t *testing.T) {
_, err := Load("[foo..bar]")
if err.Error() != "(1, 2): empty intermediate table" {
if err.Error() != "(1, 2): invalid table array key: empty table key" {
t.Error("Bad error message:", err.Error())
}
}
@@ -494,6 +541,42 @@ func TestParseFile(t *testing.T) {
})
}
func TestParseFileCRLF(t *testing.T) {
tree, err := LoadFile("example-crlf.toml")
assertTree(t, tree, err, map[string]interface{}{
"title": "TOML Example",
"owner": map[string]interface{}{
"name": "Tom Preston-Werner",
"organization": "GitHub",
"bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
"dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
},
"database": map[string]interface{}{
"server": "192.168.1.1",
"ports": []int64{8001, 8001, 8002},
"connection_max": 5000,
"enabled": true,
},
"servers": map[string]interface{}{
"alpha": map[string]interface{}{
"ip": "10.0.0.1",
"dc": "eqdc10",
},
"beta": map[string]interface{}{
"ip": "10.0.0.2",
"dc": "eqdc10",
},
},
"clients": map[string]interface{}{
"data": []interface{}{
[]string{"gamma", "delta"},
[]int64{1, 2},
},
},
})
}
func TestParseKeyGroupArray(t *testing.T) {
tree, err := Load("[[foo.bar]] a = 42\n[[foo.bar]] a = 69")
assertTree(t, tree, err, map[string]interface{}{
@@ -506,6 +589,40 @@ func TestParseKeyGroupArray(t *testing.T) {
})
}
func TestParseKeyGroupArrayUnfinished(t *testing.T) {
_, err := Load("[[foo.bar]\na = 42")
if err.Error() != "(1, 10): was expecting token [[, but got unclosed table array key instead" {
t.Error("Bad error message:", err.Error())
}
_, err = Load("[[foo.[bar]\na = 42")
if err.Error() != "(1, 3): unexpected token table array key cannot contain ']', was expecting a table array key" {
t.Error("Bad error message:", err.Error())
}
}
func TestParseKeyGroupArrayQueryExample(t *testing.T) {
tree, err := Load(`
[[book]]
title = "The Stand"
author = "Stephen King"
[[book]]
title = "For Whom the Bell Tolls"
author = "Ernest Hemmingway"
[[book]]
title = "Neuromancer"
author = "William Gibson"
`)
assertTree(t, tree, err, map[string]interface{}{
"book": []map[string]interface{}{
{"title": "The Stand", "author": "Stephen King"},
{"title": "For Whom the Bell Tolls", "author": "Ernest Hemmingway"},
{"title": "Neuromancer", "author": "William Gibson"},
},
})
}
func TestParseKeyGroupArraySpec(t *testing.T) {
tree, err := Load("[[fruit]]\n name=\"apple\"\n [fruit.physical]\n color=\"red\"\n shape=\"round\"\n [[fruit]]\n name=\"banana\"")
assertTree(t, tree, err, map[string]interface{}{
@@ -516,12 +633,13 @@ func TestParseKeyGroupArraySpec(t *testing.T) {
})
}
func TestToTomlValue(t *testing.T) {
func TestTomlValueStringRepresentation(t *testing.T) {
for idx, item := range []struct {
Value interface{}
Expect string
}{
{int64(12345), "12345"},
{uint64(50), "50"},
{float64(123.45), "123.45"},
{bool(true), "true"},
{"hello world", "\"hello world\""},
@@ -530,25 +648,28 @@ func TestToTomlValue(t *testing.T) {
{time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
"1979-05-27T07:32:00Z"},
{[]interface{}{"gamma", "delta"},
"[\n \"gamma\",\n \"delta\",\n]"},
"[\"gamma\",\"delta\"]"},
{nil, ""},
} {
result := toTomlValue(item.Value, 0)
result, err := tomlValueStringRepresentation(item.Value)
if err != nil {
t.Errorf("Test %d - unexpected error: %s", idx, err)
}
if result != item.Expect {
t.Errorf("Test %d - got '%s', expected '%s'", idx, result, item.Expect)
}
}
}
func TestToString(t *testing.T) {
tree, err := Load("[foo]\n\n[[foo.bar]]\na = 42\n\n[[foo.bar]]\na = 69\n")
if err != nil {
t.Errorf("Test failed to parse: %v", err)
return
}
result := tree.ToString()
expected := "\n[foo]\n\n [[foo.bar]]\n a = 42\n\n [[foo.bar]]\n a = 69\n"
if result != expected {
t.Errorf("Expected got '%s', expected '%s'", result, expected)
func TestToStringMapStringString(t *testing.T) {
in := map[string]interface{}{"m": TreeFromMap(map[string]interface{}{
"v": &tomlValue{"abc", Position{0, 0}}})}
want := "\n[m]\n v = \"abc\"\n"
tree := TreeFromMap(in)
got := tree.String()
if got != want {
t.Errorf("want:\n%q\ngot:\n%q", want, got)
}
}
@@ -572,10 +693,10 @@ func TestDocumentPositions(t *testing.T) {
assertPosition(t,
"[foo]\nbar=42\nbaz=69",
map[string]Position{
"": Position{1, 1},
"foo": Position{1, 1},
"foo.bar": Position{2, 1},
"foo.baz": Position{3, 1},
"": {1, 1},
"foo": {1, 1},
"foo.bar": {2, 1},
"foo.baz": {3, 1},
})
}
@@ -583,10 +704,10 @@ func TestDocumentPositionsWithSpaces(t *testing.T) {
assertPosition(t,
" [foo]\n bar=42\n baz=69",
map[string]Position{
"": Position{1, 1},
"foo": Position{1, 3},
"foo.bar": Position{2, 3},
"foo.baz": Position{3, 3},
"": {1, 1},
"foo": {1, 3},
"foo.bar": {2, 3},
"foo.baz": {3, 3},
})
}
@@ -594,10 +715,10 @@ func TestDocumentPositionsWithGroupArray(t *testing.T) {
assertPosition(t,
"[[foo]]\nbar=42\nbaz=69",
map[string]Position{
"": Position{1, 1},
"foo": Position{1, 1},
"foo.bar": Position{2, 1},
"foo.baz": Position{3, 1},
"": {1, 1},
"foo": {1, 1},
"foo.bar": {2, 1},
"foo.baz": {3, 1},
})
}
@@ -605,19 +726,24 @@ func TestNestedTreePosition(t *testing.T) {
assertPosition(t,
"[foo.bar]\na=42\nb=69",
map[string]Position{
"": Position{1, 1},
"foo": Position{1, 1},
"foo.bar": Position{1, 1},
"foo.bar.a": Position{2, 1},
"foo.bar.b": Position{3, 1},
"": {1, 1},
"foo": {1, 1},
"foo.bar": {1, 1},
"foo.bar.a": {2, 1},
"foo.bar.b": {3, 1},
})
}
func TestInvalidGroupArray(t *testing.T) {
_, err := Load("[key#group]\nanswer = 42")
_, err := Load("[table#key]\nanswer = 42")
if err == nil {
t.Error("Should error")
}
_, err = Load("[foo.[bar]\na = 42")
if err.Error() != "(1, 2): unexpected token table key cannot contain ']', was expecting a table key" {
t.Error("Bad error message:", err.Error())
}
}
func TestDoubleEqual(t *testing.T) {
@@ -629,7 +755,7 @@ func TestDoubleEqual(t *testing.T) {
func TestGroupArrayReassign(t *testing.T) {
_, err := Load("[hello]\n[[hello]]")
if err.Error() != "(2, 3): key \"hello\" is already assigned and not of type group array" {
if err.Error() != "(2, 3): key \"hello\" is already assigned and not of type table array" {
t.Error("Bad error message:", err.Error())
}
}
+2 -2
View File
@@ -18,12 +18,12 @@ type Position struct {
// String representation of the position.
// Displays 1-indexed line and column numbers.
func (p *Position) String() string {
func (p Position) String() string {
return fmt.Sprintf("(%d, %d)", p.Line, p.Col)
}
// Invalid returns whether or not the position is valid (i.e. with negative or
// null values)
func (p *Position) Invalid() bool {
func (p Position) Invalid() bool {
return p.Line <= 0 || p.Col <= 0
}
+3 -3
View File
@@ -18,9 +18,9 @@ func TestPositionString(t *testing.T) {
func TestInvalid(t *testing.T) {
for i, v := range []Position{
Position{0, 1234},
Position{1234, 0},
Position{0, 0},
{0, 1234},
{1234, 0},
{0, 0},
} {
if !v.Invalid() {
t.Errorf("Position at %v is valid: %v", i, v)
+31 -20
View File
@@ -4,37 +4,48 @@ import (
"time"
)
// Type of a user-defined filter function, for use with Query.SetFilter().
// NodeFilterFn represents a user-defined filter function, for use with
// Query.SetFilter().
//
// The return value of the function must indicate if 'node' is to be included
// at this stage of the TOML path. Returning true will include the node, and
// returning false will exclude it.
// The return value of the function must indicate if 'node' is to be included
// at this stage of the TOML path. Returning true will include the node, and
// returning false will exclude it.
//
// NOTE: Care should be taken to write script callbacks such that they are safe
// to use from multiple goroutines.
// NOTE: Care should be taken to write script callbacks such that they are safe
// to use from multiple goroutines.
type NodeFilterFn func(node interface{}) bool
// The result of Executing a Query
// QueryResult is the result of Executing a Query.
type QueryResult struct {
items []interface{}
positions []Position
}
// appends a value/position pair to the result set
// appends a value/position pair to the result set.
func (r *QueryResult) appendResult(node interface{}, pos Position) {
r.items = append(r.items, node)
r.positions = append(r.positions, pos)
}
// Set of values within a QueryResult. The order of values is not guaranteed
// to be in document order, and may be different each time a query is executed.
func (r *QueryResult) Values() []interface{} {
return r.items
// Values is a set of values within a QueryResult. The order of values is not
// guaranteed to be in document order, and may be different each time a query is
// executed.
func (r QueryResult) Values() []interface{} {
values := make([]interface{}, len(r.items))
for i, v := range r.items {
o, ok := v.(*tomlValue)
if ok {
values[i] = o.value
} else {
values[i] = v
}
}
return values
}
// Set of positions for values within a QueryResult. Each index in Positions()
// corresponds to the entry in Value() of the same index.
func (r *QueryResult) Positions() []Position {
// Positions is a set of positions for values within a QueryResult. Each index
// in Positions() corresponds to the entry in Value() of the same index.
func (r QueryResult) Positions() []Position {
return r.positions
}
@@ -77,13 +88,13 @@ func (q *Query) appendPath(next pathFn) {
next.setNext(newTerminatingFn()) // init the next functor
}
// Compiles a TOML path expression. The returned Query can be used to match
// elements within a TomlTree and its descendants.
// CompileQuery compiles a TOML path expression. The returned Query can be used
// to match elements within a TomlTree and its descendants.
func CompileQuery(path string) (*Query, error) {
return parseQuery(lexQuery(path))
}
// Executes a query against a TomlTree, and returns the result of the query.
// Execute executes a query against a TomlTree, and returns the result of the query.
func (q *Query) Execute(tree *TomlTree) *QueryResult {
result := &QueryResult{
items: []interface{}{},
@@ -101,8 +112,8 @@ func (q *Query) Execute(tree *TomlTree) *QueryResult {
return result
}
// Sets a user-defined filter function. These may be used inside "?(..)" query
// expressions to filter TOML document elements within a query.
// SetFilter sets a user-defined filter function. These may be used inside
// "?(..)" query expressions to filter TOML document elements within a query.
func (q *Query) SetFilter(name string, fn NodeFilterFn) {
if q.filters == &defaultFilterFunctions {
// clone the static table
+70
View File
@@ -0,0 +1,70 @@
package toml
import (
"testing"
)
func assertArrayContainsInAnyOrder(t *testing.T, array []interface{}, objects ...interface{}) {
if len(array) != len(objects) {
t.Fatalf("array contains %d objects but %d are expected", len(array), len(objects))
}
for _, o := range objects {
found := false
for _, a := range array {
if a == o {
found = true
break
}
}
if !found {
t.Fatal(o, "not found in array", array)
}
}
}
func TestQueryExample(t *testing.T) {
config, _ := Load(`
[[book]]
title = "The Stand"
author = "Stephen King"
[[book]]
title = "For Whom the Bell Tolls"
author = "Ernest Hemmingway"
[[book]]
title = "Neuromancer"
author = "William Gibson"
`)
authors, _ := config.Query("$.book.author")
names := authors.Values()
if len(names) != 3 {
t.Fatalf("query should return 3 names but returned %d", len(names))
}
assertArrayContainsInAnyOrder(t, names, "Stephen King", "Ernest Hemmingway", "William Gibson")
}
func TestQueryReadmeExample(t *testing.T) {
config, _ := Load(`
[postgres]
user = "pelletier"
password = "mypassword"
`)
results, _ := config.Query("$..[user,password]")
values := results.Values()
if len(values) != 2 {
t.Fatalf("query should return 2 values but returned %d", len(values))
}
assertArrayContainsInAnyOrder(t, values, "pelletier", "mypassword")
}
func TestQueryPathNotPresent(t *testing.T) {
config, _ := Load(`a = "hello"`)
results, err := config.Query("$.foo.bar")
if err != nil {
t.Fatalf("err should be nil. got %s instead", err)
}
if len(results.items) != 0 {
t.Fatalf("no items should be matched. %d matched instead", len(results.items))
}
}
+18 -1
View File
@@ -105,7 +105,7 @@ func (l *queryLexer) peek() rune {
}
func (l *queryLexer) accept(valid string) bool {
if strings.IndexRune(valid, l.next()) >= 0 {
if strings.ContainsRune(valid, l.next()) {
return true
}
l.backup()
@@ -272,6 +272,23 @@ func (l *queryLexer) lexString() queryLexStateFn {
return l.errorf("invalid unicode escape: \\u" + code)
}
growingString += string(rune(intcode))
} else if l.follow("\\U") {
l.pos += 2
code := ""
for i := 0; i < 8; i++ {
c := l.peek()
l.pos++
if !isHexDigit(c) {
return l.errorf("unfinished unicode escape")
}
code = code + string(c)
}
l.pos--
intcode, err := strconv.ParseInt(code, 16, 32)
if err != nil {
return l.errorf("invalid unicode escape: \\u" + code)
}
growingString += string(rune(intcode))
} else if l.follow("\\") {
l.pos++
return l.errorf("invalid escape sequence: \\" + string(l.peek()))
+115 -34
View File
@@ -10,11 +10,13 @@ func testQLFlow(t *testing.T, input string, expectedFlow []token) {
token := <-ch
if token != expected {
t.Log("While testing #", idx, ":", input)
t.Log("compared (got)", token, "to (expected)", expected)
t.Log("\tvalue:", token.val, "<->", expected.val)
t.Log("\tvalue as bytes:", []byte(token.val), "<->", []byte(expected.val))
t.Log("\ttype:", token.typ.String(), "<->", expected.typ.String())
t.Log("\tline:", token.Line, "<->", expected.Line)
t.Log("\tcolumn:", token.Col, "<->", expected.Col)
t.Log("compared", token, "to", expected)
t.Log(token.val, "<->", expected.val)
t.Log(token.typ, "<->", expected.typ)
t.Log(token.Line, "<->", expected.Line)
t.Log(token.Col, "<->", expected.Col)
t.FailNow()
}
}
@@ -34,64 +36,143 @@ func testQLFlow(t *testing.T, input string, expectedFlow []token) {
func TestLexSpecialChars(t *testing.T) {
testQLFlow(t, " .$[]..()?*", []token{
token{Position{1, 2}, tokenDot, "."},
token{Position{1, 3}, tokenDollar, "$"},
token{Position{1, 4}, tokenLeftBracket, "["},
token{Position{1, 5}, tokenRightBracket, "]"},
token{Position{1, 6}, tokenDotDot, ".."},
token{Position{1, 8}, tokenLeftParen, "("},
token{Position{1, 9}, tokenRightParen, ")"},
token{Position{1, 10}, tokenQuestion, "?"},
token{Position{1, 11}, tokenStar, "*"},
token{Position{1, 12}, tokenEOF, ""},
{Position{1, 2}, tokenDot, "."},
{Position{1, 3}, tokenDollar, "$"},
{Position{1, 4}, tokenLeftBracket, "["},
{Position{1, 5}, tokenRightBracket, "]"},
{Position{1, 6}, tokenDotDot, ".."},
{Position{1, 8}, tokenLeftParen, "("},
{Position{1, 9}, tokenRightParen, ")"},
{Position{1, 10}, tokenQuestion, "?"},
{Position{1, 11}, tokenStar, "*"},
{Position{1, 12}, tokenEOF, ""},
})
}
func TestLexString(t *testing.T) {
testQLFlow(t, "'foo'", []token{
token{Position{1, 2}, tokenString, "foo"},
token{Position{1, 6}, tokenEOF, ""},
testQLFlow(t, "'foo\n'", []token{
{Position{1, 2}, tokenString, "foo\n"},
{Position{2, 2}, tokenEOF, ""},
})
}
func TestLexDoubleString(t *testing.T) {
testQLFlow(t, `"bar"`, []token{
token{Position{1, 2}, tokenString, "bar"},
token{Position{1, 6}, tokenEOF, ""},
{Position{1, 2}, tokenString, "bar"},
{Position{1, 6}, tokenEOF, ""},
})
}
func TestLexStringEscapes(t *testing.T) {
testQLFlow(t, `"foo \" \' \b \f \/ \t \r \\ \u03A9 \U00012345 \n bar"`, []token{
{Position{1, 2}, tokenString, "foo \" ' \b \f / \t \r \\ \u03A9 \U00012345 \n bar"},
{Position{1, 55}, tokenEOF, ""},
})
}
func TestLexStringUnfinishedUnicode4(t *testing.T) {
testQLFlow(t, `"\u000"`, []token{
{Position{1, 2}, tokenError, "unfinished unicode escape"},
})
}
func TestLexStringUnfinishedUnicode8(t *testing.T) {
testQLFlow(t, `"\U0000"`, []token{
{Position{1, 2}, tokenError, "unfinished unicode escape"},
})
}
func TestLexStringInvalidEscape(t *testing.T) {
testQLFlow(t, `"\x"`, []token{
{Position{1, 2}, tokenError, "invalid escape sequence: \\x"},
})
}
func TestLexStringUnfinished(t *testing.T) {
testQLFlow(t, `"bar`, []token{
{Position{1, 2}, tokenError, "unclosed string"},
})
}
func TestLexKey(t *testing.T) {
testQLFlow(t, "foo", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 4}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 4}, tokenEOF, ""},
})
}
func TestLexRecurse(t *testing.T) {
testQLFlow(t, "$..*", []token{
token{Position{1, 1}, tokenDollar, "$"},
token{Position{1, 2}, tokenDotDot, ".."},
token{Position{1, 4}, tokenStar, "*"},
token{Position{1, 5}, tokenEOF, ""},
{Position{1, 1}, tokenDollar, "$"},
{Position{1, 2}, tokenDotDot, ".."},
{Position{1, 4}, tokenStar, "*"},
{Position{1, 5}, tokenEOF, ""},
})
}
func TestLexBracketKey(t *testing.T) {
testQLFlow(t, "$[foo]", []token{
token{Position{1, 1}, tokenDollar, "$"},
token{Position{1, 2}, tokenLeftBracket, "["},
token{Position{1, 3}, tokenKey, "foo"},
token{Position{1, 6}, tokenRightBracket, "]"},
token{Position{1, 7}, tokenEOF, ""},
{Position{1, 1}, tokenDollar, "$"},
{Position{1, 2}, tokenLeftBracket, "["},
{Position{1, 3}, tokenKey, "foo"},
{Position{1, 6}, tokenRightBracket, "]"},
{Position{1, 7}, tokenEOF, ""},
})
}
func TestLexSpace(t *testing.T) {
testQLFlow(t, "foo bar baz", []token{
token{Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenKey, "bar"},
token{Position{1, 9}, tokenKey, "baz"},
token{Position{1, 12}, tokenEOF, ""},
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenKey, "bar"},
{Position{1, 9}, tokenKey, "baz"},
{Position{1, 12}, tokenEOF, ""},
})
}
func TestLexInteger(t *testing.T) {
testQLFlow(t, "100 +200 -300", []token{
{Position{1, 1}, tokenInteger, "100"},
{Position{1, 5}, tokenInteger, "+200"},
{Position{1, 10}, tokenInteger, "-300"},
{Position{1, 14}, tokenEOF, ""},
})
}
func TestLexFloat(t *testing.T) {
testQLFlow(t, "100.0 +200.0 -300.0", []token{
{Position{1, 1}, tokenFloat, "100.0"},
{Position{1, 7}, tokenFloat, "+200.0"},
{Position{1, 14}, tokenFloat, "-300.0"},
{Position{1, 20}, tokenEOF, ""},
})
}
func TestLexFloatWithMultipleDots(t *testing.T) {
testQLFlow(t, "4.2.", []token{
{Position{1, 1}, tokenError, "cannot have two dots in one float"},
})
}
func TestLexFloatLeadingDot(t *testing.T) {
testQLFlow(t, "+.1", []token{
{Position{1, 1}, tokenError, "cannot start float with a dot"},
})
}
func TestLexFloatWithTrailingDot(t *testing.T) {
testQLFlow(t, "42.", []token{
{Position{1, 1}, tokenError, "float cannot end with a dot"},
})
}
func TestLexNumberWithoutDigit(t *testing.T) {
testQLFlow(t, "+", []token{
{Position{1, 1}, tokenError, "no digit in that number"},
})
}
func TestLexUnknown(t *testing.T) {
testQLFlow(t, "^", []token{
{Position{1, 1}, tokenError, "unexpected char: '94'"},
})
}
+2 -2
View File
@@ -11,7 +11,7 @@ import (
"fmt"
)
const MaxInt = int(^uint(0) >> 1)
const maxInt = int(^uint(0) >> 1)
type queryParser struct {
flow chan token
@@ -203,7 +203,7 @@ loop: // labeled loop for easy breaking
func (p *queryParser) parseSliceExpr() queryParserStateFn {
// init slice to grab all elements
start, end, step := 0, MaxInt, 1
start, end, step := 0, maxInt, 1
// parse optional start
tok := p.getToken()
+7 -2
View File
@@ -19,7 +19,11 @@ function git_clone() {
popd
}
# Run go vet
go vet ./...
go get github.com/pelletier/go-buffruneio
go get github.com/davecgh/go-spew/spew
# get code for BurntSushi TOML validation
# pinning all to 'HEAD' for version 0.3.x work (TODO: pin to commit hash when tests stabilize)
@@ -33,11 +37,12 @@ go build -o toml-test github.com/BurntSushi/toml-test
# NOTE: this basically mocks an install without having to go back out to github for code
mkdir -p src/github.com/pelletier/go-toml/cmd
cp *.go *.toml src/github.com/pelletier/go-toml
cp cmd/*.go src/github.com/pelletier/go-toml/cmd
cp -R cmd/* src/github.com/pelletier/go-toml/cmd
go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go
# Run basic unit tests
go test -v github.com/pelletier/go-toml
go test github.com/pelletier/go-toml -v -covermode=count -coverprofile=coverage.out
go test github.com/pelletier/go-toml/cmd/tomljson
# run the entire BurntSushi test suite
if [[ $# -eq 0 ]] ; then
-3
View File
@@ -107,9 +107,6 @@ func (t token) String() string {
return t.val
}
if len(t.val) > 10 {
return fmt.Sprintf("%.10q...", t.val)
}
return fmt.Sprintf("%q", t.val)
}
+67
View File
@@ -0,0 +1,67 @@
package toml
import "testing"
func TestTokenStringer(t *testing.T) {
var tests = []struct {
tt tokenType
expect string
}{
{tokenError, "Error"},
{tokenEOF, "EOF"},
{tokenComment, "Comment"},
{tokenKey, "Key"},
{tokenString, "String"},
{tokenInteger, "Integer"},
{tokenTrue, "True"},
{tokenFalse, "False"},
{tokenFloat, "Float"},
{tokenEqual, "="},
{tokenLeftBracket, "["},
{tokenRightBracket, "]"},
{tokenLeftCurlyBrace, "{"},
{tokenRightCurlyBrace, "}"},
{tokenLeftParen, "("},
{tokenRightParen, ")"},
{tokenDoubleLeftBracket, "]]"},
{tokenDoubleRightBracket, "[["},
{tokenDate, "Date"},
{tokenKeyGroup, "KeyGroup"},
{tokenKeyGroupArray, "KeyGroupArray"},
{tokenComma, ","},
{tokenColon, ":"},
{tokenDollar, "$"},
{tokenStar, "*"},
{tokenQuestion, "?"},
{tokenDot, "."},
{tokenDotDot, ".."},
{tokenEOL, "EOL"},
{tokenEOL + 1, "Unknown"},
}
for i, test := range tests {
got := test.tt.String()
if got != test.expect {
t.Errorf("[%d] invalid string of token type; got %q, expected %q", i, got, test.expect)
}
}
}
func TestTokenString(t *testing.T) {
var tests = []struct {
tok token
expect string
}{
{token{Position{1, 1}, tokenEOF, ""}, "EOF"},
{token{Position{1, 1}, tokenError, "Δt"}, "Δt"},
{token{Position{1, 1}, tokenString, "bar"}, `"bar"`},
{token{Position{1, 1}, tokenString, "123456789012345"}, `"123456789012345"`},
}
for i, test := range tests {
got := test.tok.String()
if got != test.expect {
t.Errorf("[%d] invalid of string token; got %q, expected %q", i, got, test.expect)
}
}
}
+8 -117
View File
@@ -6,19 +6,17 @@ import (
"io"
"os"
"runtime"
"strconv"
"strings"
"time"
)
type tomlValue struct {
value interface{}
value interface{} // string, int64, uint64, float64, bool, time.Time, [] of any of this list
position Position
}
// TomlTree is the result of the parsing of a TOML file.
type TomlTree struct {
values map[string]interface{}
values map[string]interface{} // string -> *tomlValue, *TomlTree, []*TomlTree
position Position
}
@@ -29,6 +27,7 @@ func newTomlTree() *TomlTree {
}
}
// TreeFromMap initializes a new TomlTree object using the given map.
func TreeFromMap(m map[string]interface{}) *TomlTree {
return &TomlTree{
values: m,
@@ -95,7 +94,7 @@ func (t *TomlTree) GetPath(keys []string) interface{} {
}
subtree = node[len(node)-1]
default:
return nil // cannot naigate through other node types
return nil // cannot navigate through other node types
}
}
// branch based on final node type
@@ -223,9 +222,6 @@ func (t *TomlTree) SetPath(keys []string, value interface{}) {
func (t *TomlTree) createSubTree(keys []string, pos Position) error {
subtree := t
for _, intermediateKey := range keys {
if intermediateKey == "" {
return fmt.Errorf("empty intermediate table")
}
nextTree, exists := subtree.values[intermediateKey]
if !exists {
tree := newTomlTree()
@@ -247,118 +243,13 @@ func (t *TomlTree) createSubTree(keys []string, pos Position) error {
return nil
}
// encodes a string to a TOML-compliant string value
func encodeTomlString(value string) string {
result := ""
for _, rr := range value {
intRr := uint16(rr)
switch rr {
case '\b':
result += "\\b"
case '\t':
result += "\\t"
case '\n':
result += "\\n"
case '\f':
result += "\\f"
case '\r':
result += "\\r"
case '"':
result += "\\\""
case '\\':
result += "\\\\"
default:
if intRr < 0x001F {
result += fmt.Sprintf("\\u%0.4X", intRr)
} else {
result += string(rr)
}
}
}
return result
}
// Value print support function for ToString()
// Outputs the TOML compliant string representation of a value
func toTomlValue(item interface{}, indent int) string {
tab := strings.Repeat(" ", indent)
switch value := item.(type) {
case int64:
return tab + strconv.FormatInt(value, 10)
case float64:
return tab + strconv.FormatFloat(value, 'f', -1, 64)
case string:
return tab + "\"" + encodeTomlString(value) + "\""
case bool:
if value {
return "true"
}
return "false"
case time.Time:
return tab + value.Format(time.RFC3339)
case []interface{}:
result := tab + "[\n"
for _, item := range value {
result += toTomlValue(item, indent+2) + ",\n"
}
return result + tab + "]"
default:
panic(fmt.Sprintf("unsupported value type: %v", value))
}
}
// Recursive support function for ToString()
// Outputs a tree, using the provided keyspace to prefix group names
func (t *TomlTree) toToml(indent, keyspace string) string {
result := ""
for k, v := range t.values {
// figure out the keyspace
combinedKey := k
if keyspace != "" {
combinedKey = keyspace + "." + combinedKey
}
// output based on type
switch node := v.(type) {
case []*TomlTree:
for _, item := range node {
if len(item.Keys()) > 0 {
result += fmt.Sprintf("\n%s[[%s]]\n", indent, combinedKey)
}
result += item.toToml(indent+" ", combinedKey)
}
case *TomlTree:
if len(node.Keys()) > 0 {
result += fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
}
result += node.toToml(indent+" ", combinedKey)
case map[string]interface{}:
sub := TreeFromMap(node)
if len(sub.Keys()) > 0 {
result += fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
}
result += sub.toToml(indent+" ", combinedKey)
case *tomlValue:
result += fmt.Sprintf("%s%s = %s\n", indent, k, toTomlValue(node.value, 0))
default:
result += fmt.Sprintf("%s%s = %s\n", indent, k, toTomlValue(v, 0))
}
}
return result
}
// Query compiles and executes a query on a tree and returns the query result.
func (t *TomlTree) Query(query string) (*QueryResult, error) {
if q, err := CompileQuery(query); err != nil {
q, err := CompileQuery(query)
if err != nil {
return nil, err
} else {
return q.Execute(t), nil
}
}
// ToString generates a human-readable representation of the current tree.
// Output spans multiple lines, and is suitable for ingest by a TOML parser
func (t *TomlTree) ToString() string {
return t.toToml("", "")
return q.Execute(t), nil
}
// LoadReader creates a TomlTree from any io.Reader.
+54
View File
@@ -15,6 +15,47 @@ func TestTomlHas(t *testing.T) {
if !tree.Has("test.key") {
t.Errorf("Has - expected test.key to exists")
}
if tree.Has("") {
t.Errorf("Should return false if the key is not provided")
}
}
func TestTomlGet(t *testing.T) {
tree, _ := Load(`
[test]
key = "value"
`)
if tree.Get("") != tree {
t.Errorf("Get should return the tree itself when given an empty path")
}
if tree.Get("test.key") != "value" {
t.Errorf("Get should return the value")
}
if tree.Get(`\`) != nil {
t.Errorf("should return nil when the key is malformed")
}
}
func TestTomlGetDefault(t *testing.T) {
tree, _ := Load(`
[test]
key = "value"
`)
if tree.GetDefault("", "hello") != tree {
t.Error("GetDefault should return the tree itself when given an empty path")
}
if tree.GetDefault("test.key", "hello") != "value" {
t.Error("Get should return the value")
}
if tree.GetDefault("whatever", "hello") != "hello" {
t.Error("GetDefault should return the default value if the key does not exist")
}
}
func TestTomlHasPath(t *testing.T) {
@@ -46,6 +87,11 @@ func TestTomlGetPath(t *testing.T) {
t.Errorf("GetPath[%d] %v - expected %v, got %v instead.", idx, item.Path, item.Expected, result)
}
}
tree, _ := Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
if tree.GetPath([]string{"whatever"}) != nil {
t.Error("GetPath should return nil when the key does not exist")
}
}
func TestTomlQuery(t *testing.T) {
@@ -72,3 +118,11 @@ func TestTomlQuery(t *testing.T) {
t.Errorf("Expected 'b' with a value 2: %v", tt.Get("b"))
}
}
func TestTomlFromMap(t *testing.T) {
simpleMap := map[string]interface{}{"hello": 42}
tree := TreeFromMap(simpleMap)
if tree.Get("hello") != 42 {
t.Fatal("hello should be 42, not", tree.Get("hello"))
}
}
+212
View File
@@ -0,0 +1,212 @@
package toml
import (
"bytes"
"fmt"
"io"
"sort"
"strconv"
"strings"
"time"
)
// encodes a string to a TOML-compliant string value
func encodeTomlString(value string) string {
result := ""
for _, rr := range value {
switch rr {
case '\b':
result += "\\b"
case '\t':
result += "\\t"
case '\n':
result += "\\n"
case '\f':
result += "\\f"
case '\r':
result += "\\r"
case '"':
result += "\\\""
case '\\':
result += "\\\\"
default:
intRr := uint16(rr)
if intRr < 0x001F {
result += fmt.Sprintf("\\u%0.4X", intRr)
} else {
result += string(rr)
}
}
}
return result
}
func tomlValueStringRepresentation(v interface{}) (string, error) {
switch value := v.(type) {
case uint64:
return strconv.FormatUint(value, 10), nil
case int64:
return strconv.FormatInt(value, 10), nil
case float64:
return strconv.FormatFloat(value, 'f', -1, 32), nil
case string:
return "\"" + encodeTomlString(value) + "\"", nil
case bool:
if value {
return "true", nil
}
return "false", nil
case time.Time:
return value.Format(time.RFC3339), nil
case nil:
return "", nil
case []interface{}:
values := []string{}
for _, item := range value {
itemRepr, err := tomlValueStringRepresentation(item)
if err != nil {
return "", err
}
values = append(values, itemRepr)
}
return "[" + strings.Join(values, ",") + "]", nil
default:
return "", fmt.Errorf("unsupported value type %T: %v", value, value)
}
}
func (t *TomlTree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) (int64, error) {
simpleValuesKeys := make([]string, 0)
complexValuesKeys := make([]string, 0)
for k := range t.values {
v := t.values[k]
switch v.(type) {
case *TomlTree, []*TomlTree:
complexValuesKeys = append(complexValuesKeys, k)
default:
simpleValuesKeys = append(simpleValuesKeys, k)
}
}
sort.Strings(simpleValuesKeys)
sort.Strings(complexValuesKeys)
for _, k := range simpleValuesKeys {
v, ok := t.values[k].(*tomlValue)
if !ok {
return bytesCount, fmt.Errorf("invalid key type at %s: %T", k, t.values[k])
}
repr, err := tomlValueStringRepresentation(v.value)
if err != nil {
return bytesCount, err
}
kvRepr := fmt.Sprintf("%s%s = %s\n", indent, k, repr)
writtenBytesCount, err := w.Write([]byte(kvRepr))
bytesCount += int64(writtenBytesCount)
if err != nil {
return bytesCount, err
}
}
for _, k := range complexValuesKeys {
v := t.values[k]
combinedKey := k
if keyspace != "" {
combinedKey = keyspace + "." + combinedKey
}
switch node := v.(type) {
// node has to be of those two types given how keys are sorted above
case *TomlTree:
tableName := fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
writtenBytesCount, err := w.Write([]byte(tableName))
bytesCount += int64(writtenBytesCount)
if err != nil {
return bytesCount, err
}
bytesCount, err = node.writeTo(w, indent+" ", combinedKey, bytesCount)
if err != nil {
return bytesCount, err
}
case []*TomlTree:
for _, subTree := range node {
if len(subTree.values) > 0 {
tableArrayName := fmt.Sprintf("\n%s[[%s]]\n", indent, combinedKey)
writtenBytesCount, err := w.Write([]byte(tableArrayName))
bytesCount += int64(writtenBytesCount)
if err != nil {
return bytesCount, err
}
bytesCount, err = subTree.writeTo(w, indent+" ", combinedKey, bytesCount)
if err != nil {
return bytesCount, err
}
}
}
}
}
return bytesCount, nil
}
// WriteTo encode the TomlTree as Toml and writes it to the writer w.
// Returns the number of bytes written in case of success, or an error if anything happened.
func (t *TomlTree) WriteTo(w io.Writer) (int64, error) {
return t.writeTo(w, "", "", 0)
}
// ToTomlString generates a human-readable representation of the current tree.
// Output spans multiple lines, and is suitable for ingest by a TOML parser.
// If the conversion cannot be performed, ToString returns a non-nil error.
func (t *TomlTree) ToTomlString() (string, error) {
var buf bytes.Buffer
_, err := t.WriteTo(&buf)
if err != nil {
return "", err
}
return buf.String(), nil
}
// String generates a human-readable representation of the current tree.
// Alias of ToString. Present to implement the fmt.Stringer interface.
func (t *TomlTree) String() string {
result, _ := t.ToTomlString()
return result
}
// ToMap recursively generates a representation of the tree using Go built-in structures.
// The following types are used:
// * uint64
// * int64
// * bool
// * string
// * time.Time
// * map[string]interface{} (where interface{} is any of this list)
// * []interface{} (where interface{} is any of this list)
func (t *TomlTree) ToMap() map[string]interface{} {
result := map[string]interface{}{}
for k, v := range t.values {
switch node := v.(type) {
case []*TomlTree:
var array []interface{}
for _, item := range node {
array = append(array, item.ToMap())
}
result[k] = array
case *TomlTree:
result[k] = node.ToMap()
case map[string]interface{}:
sub := TreeFromMap(node)
result[k] = sub.ToMap()
case *tomlValue:
result[k] = node.value
}
}
return result
}
+284
View File
@@ -0,0 +1,284 @@
package toml
import (
"bytes"
"errors"
"fmt"
"reflect"
"strings"
"testing"
"time"
)
type failingWriter struct {
failAt int
written int
buffer bytes.Buffer
}
func (f failingWriter) Write(p []byte) (n int, err error) {
count := len(p)
toWrite := f.failAt - count + f.written
if toWrite < 0 {
toWrite = 0
}
if toWrite > count {
f.written += count
f.buffer.WriteString(string(p))
return count, nil
}
f.buffer.WriteString(string(p[:toWrite]))
f.written = f.failAt
return f.written, fmt.Errorf("failingWriter failed after writting %d bytes", f.written)
}
func assertErrorString(t *testing.T, expected string, err error) {
expectedErr := errors.New(expected)
if err.Error() != expectedErr.Error() {
t.Errorf("expecting error %s, but got %s instead", expected, err)
}
}
func TestTomlTreeWriteToTomlString(t *testing.T) {
toml, err := Load(`name = { first = "Tom", last = "Preston-Werner" }
points = { x = 1, y = 2 }`)
if err != nil {
t.Fatal("Unexpected error:", err)
}
tomlString, _ := toml.ToTomlString()
reparsedTree, err := Load(tomlString)
assertTree(t, reparsedTree, err, map[string]interface{}{
"name": map[string]interface{}{
"first": "Tom",
"last": "Preston-Werner",
},
"points": map[string]interface{}{
"x": int64(1),
"y": int64(2),
},
})
}
func TestTomlTreeWriteToTomlStringSimple(t *testing.T) {
tree, err := Load("[foo]\n\n[[foo.bar]]\na = 42\n\n[[foo.bar]]\na = 69\n")
if err != nil {
t.Errorf("Test failed to parse: %v", err)
return
}
result, err := tree.ToTomlString()
if err != nil {
t.Errorf("Unexpected error: %s", err)
}
expected := "\n[foo]\n\n [[foo.bar]]\n a = 42\n\n [[foo.bar]]\n a = 69\n"
if result != expected {
t.Errorf("Expected got '%s', expected '%s'", result, expected)
}
}
func TestTomlTreeWriteToTomlStringKeysOrders(t *testing.T) {
for i := 0; i < 100; i++ {
tree, _ := Load(`
foobar = true
bar = "baz"
foo = 1
[qux]
foo = 1
bar = "baz2"`)
stringRepr, _ := tree.ToTomlString()
t.Log("Intermediate string representation:")
t.Log(stringRepr)
r := strings.NewReader(stringRepr)
toml, err := LoadReader(r)
if err != nil {
t.Fatal("Unexpected error:", err)
}
assertTree(t, toml, err, map[string]interface{}{
"foobar": true,
"bar": "baz",
"foo": 1,
"qux": map[string]interface{}{
"foo": 1,
"bar": "baz2",
},
})
}
}
func testMaps(t *testing.T, actual, expected map[string]interface{}) {
if !reflect.DeepEqual(actual, expected) {
t.Fatal("trees aren't equal.\n", "Expected:\n", expected, "\nActual:\n", actual)
}
}
func TestToTomlStringTypeConversionError(t *testing.T) {
tree := TomlTree{
values: map[string]interface{}{
"thing": &tomlValue{[]string{"unsupported"}, Position{}},
},
}
_, err := tree.ToTomlString()
expected := errors.New("unsupported value type []string: [unsupported]")
if err.Error() != expected.Error() {
t.Errorf("expecting error %s, but got %s instead", expected, err)
}
}
func TestTomlTreeWriteToMapSimple(t *testing.T) {
tree, _ := Load("a = 42\nb = 17")
expected := map[string]interface{}{
"a": int64(42),
"b": int64(17),
}
testMaps(t, tree.ToMap(), expected)
}
func TestTomlTreeWriteToInvalidTreeSimpleValue(t *testing.T) {
tree := TomlTree{values: map[string]interface{}{"foo": int8(1)}}
_, err := tree.ToTomlString()
assertErrorString(t, "invalid key type at foo: int8", err)
}
func TestTomlTreeWriteToInvalidTreeTomlValue(t *testing.T) {
tree := TomlTree{values: map[string]interface{}{"foo": &tomlValue{int8(1), Position{}}}}
_, err := tree.ToTomlString()
assertErrorString(t, "unsupported value type int8: 1", err)
}
func TestTomlTreeWriteToInvalidTreeTomlValueArray(t *testing.T) {
tree := TomlTree{values: map[string]interface{}{"foo": &tomlValue{[]interface{}{int8(1)}, Position{}}}}
_, err := tree.ToTomlString()
assertErrorString(t, "unsupported value type int8: 1", err)
}
func TestTomlTreeWriteToFailingWriterInSimpleValue(t *testing.T) {
toml, _ := Load(`a = 2`)
writer := failingWriter{failAt: 0, written: 0}
_, err := toml.WriteTo(writer)
assertErrorString(t, "failingWriter failed after writting 0 bytes", err)
}
func TestTomlTreeWriteToFailingWriterInTable(t *testing.T) {
toml, _ := Load(`
[b]
a = 2`)
writer := failingWriter{failAt: 2, written: 0}
_, err := toml.WriteTo(writer)
assertErrorString(t, "failingWriter failed after writting 2 bytes", err)
writer = failingWriter{failAt: 13, written: 0}
_, err = toml.WriteTo(writer)
assertErrorString(t, "failingWriter failed after writting 13 bytes", err)
}
func TestTomlTreeWriteToFailingWriterInArray(t *testing.T) {
toml, _ := Load(`
[[b]]
a = 2`)
writer := failingWriter{failAt: 2, written: 0}
_, err := toml.WriteTo(writer)
assertErrorString(t, "failingWriter failed after writting 2 bytes", err)
writer = failingWriter{failAt: 15, written: 0}
_, err = toml.WriteTo(writer)
assertErrorString(t, "failingWriter failed after writting 15 bytes", err)
}
func TestTomlTreeWriteToMapExampleFile(t *testing.T) {
tree, _ := LoadFile("example.toml")
expected := map[string]interface{}{
"title": "TOML Example",
"owner": map[string]interface{}{
"name": "Tom Preston-Werner",
"organization": "GitHub",
"bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
"dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
},
"database": map[string]interface{}{
"server": "192.168.1.1",
"ports": []interface{}{int64(8001), int64(8001), int64(8002)},
"connection_max": int64(5000),
"enabled": true,
},
"servers": map[string]interface{}{
"alpha": map[string]interface{}{
"ip": "10.0.0.1",
"dc": "eqdc10",
},
"beta": map[string]interface{}{
"ip": "10.0.0.2",
"dc": "eqdc10",
},
},
"clients": map[string]interface{}{
"data": []interface{}{
[]interface{}{"gamma", "delta"},
[]interface{}{int64(1), int64(2)},
},
},
}
testMaps(t, tree.ToMap(), expected)
}
func TestTomlTreeWriteToMapWithTablesInMultipleChunks(t *testing.T) {
tree, _ := Load(`
[[menu.main]]
a = "menu 1"
b = "menu 2"
[[menu.main]]
c = "menu 3"
d = "menu 4"`)
expected := map[string]interface{}{
"menu": map[string]interface{}{
"main": []interface{}{
map[string]interface{}{"a": "menu 1", "b": "menu 2"},
map[string]interface{}{"c": "menu 3", "d": "menu 4"},
},
},
}
treeMap := tree.ToMap()
testMaps(t, treeMap, expected)
}
func TestTomlTreeWriteToMapWithArrayOfInlineTables(t *testing.T) {
tree, _ := Load(`
[params]
language_tabs = [
{ key = "shell", name = "Shell" },
{ key = "ruby", name = "Ruby" },
{ key = "python", name = "Python" }
]`)
expected := map[string]interface{}{
"params": map[string]interface{}{
"language_tabs": []interface{}{
map[string]interface{}{
"key": "shell",
"name": "Shell",
},
map[string]interface{}{
"key": "ruby",
"name": "Ruby",
},
map[string]interface{}{
"key": "python",
"name": "Python",
},
},
},
}
treeMap := tree.ToMap()
testMaps(t, treeMap, expected)
}