Compare commits

...

100 Commits

Author SHA1 Message Date
Thomas Pelletier d464759235 Bump test go patchlevels (#113)
* 1.6.4
* 1.7.4
2016-12-02 11:42:58 +01:00
Thomas Pelletier 7cb988051d Make values come before tables in ToString output (#111)
If no order on the key is enforced in ToString, the following tree:

foo = 1
bar = "baz"
foobar = true
[qux]
  foo = 1
  bar = "baz"

may come out as:

bar = "baz"
foobar = true
[qux]
  foo = 1
  bar = "baz"
foo = 1

which is incorrect, since putting that back to the parser would panic
because of a duplicated key (qux.foo). Those changes make sure that
leaf values come before tables in the ToString output.
2016-11-23 16:24:52 +01:00
Thomas Pelletier 3ddb37c944 Fix []*Toml.Tree being wrapped in *Toml.Value (#110)
Nodes can be either *Toml.Tree, []*Toml.Tree, or *Toml.Value.
Arrays of trees were incorrectly wrapped in a *Toml.Value,
making the conversion functions think they were leaf nodes.
2016-11-23 15:48:39 +01:00
Thomas Pelletier f7f14983c3 Update travis to go1.7.3 (#109) 2016-11-23 15:21:57 +01:00
Cameron Moore 45932ad32d Handle nil, map[string]string, and map[interface{}]interface{} input (#103)
* Handle map[string]string and map[interface{}]interface{} input
* Handle nil values

Fixes #99
2016-09-20 09:07:15 +02:00
Cameron Moore 67b7b944a8 Support all numeric type conversions (#102)
Fixes #101
2016-09-20 09:04:39 +02:00
Thomas Pelletier 31055c2ff0 Allow empty quoted keys (#97) 2016-09-06 22:25:57 +02:00
Cameron Moore 5a62685873 Add license and Go Report Card badges to README (#93) 2016-08-23 09:47:07 +02:00
Cameron Moore d05a14897c Fix typo in comment (#94) 2016-08-23 09:46:25 +02:00
Cameron Moore 0599275eb9 Simplify redundant types in literals (#95)
Using `gofmt -s`
2016-08-23 09:45:54 +02:00
Cameron Moore 0049ab3dc4 Update Travis build (#89)
* Test with the latest releases.
* Allow tip to fail.
2016-08-22 14:27:12 +02:00
Cameron Moore bfe4a7e160 Fix gofmt and golint issues (#90) 2016-08-22 11:20:25 +02:00
Thomas Pelletier e6271032cc Move license to LICENSE file (#91) 2016-08-22 11:17:53 +02:00
Cameron Moore 887411a2a8 Add \U support to query lexer (#88) 2016-08-22 10:55:12 +02:00
Thomas Pelletier 31c735e72c Test with go 1.7. Stop testing with 1.4 (#87) 2016-08-16 14:03:31 +02:00
Thomas Pelletier 06484b677b Fix ToMap conversion of array of tables (#83) 2016-08-15 21:00:14 +02:00
Thomas Pelletier de2e921d55 TOML to JSON cli tool (#85)
* Implement tomljson
* Add note about tools in README
2016-08-14 13:50:18 +02:00
Thomas Pelletier 7f292800de Target latest Go patch level in Travis (#80) 2016-07-25 09:41:11 +02:00
Sam Broughton 923742e542 Fix String() comment (#79) 2016-07-22 09:53:40 +02:00
Sam Broughton 65ad89c1a7 Pointer cleanup (#78)
Remove unnecessary pointer receivers for Position and QueryResult
2016-07-21 16:42:51 +02:00
Thomas Pelletier 64ff1ea4d5 Don't hang when reading an invalid rvalue (#77)
Fixes #76
2016-06-30 16:21:25 +02:00
Sam Broughton b39f6ef1f9 Add a toml linter (#74)
* Add a toml linter

* Use if/else instead of os.Exit(0)

* Add usage warning about destructive changes
2016-06-06 12:29:13 +02:00
Sam Broughton c187221f01 Implement fmt.Stringer and alias ToString (#73) 2016-06-06 10:23:55 +02:00
Thomas Pelletier 8e6ab94eec Fix inline tables parsing
Inline tables were wrapped inside a TomlValue, although they should
just be part of the tree.
2016-04-22 17:38:16 +02:00
Thomas Pelletier 8d9c606c69 Improve test coverage (#66) 2016-04-22 14:26:15 +02:00
Thomas Pelletier 288bc57940 Better logging for parser tests (#65)
* Better logging for parser tests

* Add spew to tests deps list
2016-04-22 11:01:31 +02:00
Thomas Pelletier e3b2497729 TomlTree.ToMap (#59)
* Extract TomlTree conversion to its own file

* Implement ToMap

* Reorder imports in tomltree_conversions
2016-04-22 09:46:28 +02:00
Thomas Pelletier 1a8565204c Fix multiline strings (#62) 2016-04-21 17:47:41 +02:00
Thomas Pelletier e58cfd32d4 Bump to golang 1.6.2 on Travis 2016-04-21 09:22:47 +02:00
Cameron Moore a2ae216b47 Add more token tests (#58) 2016-04-19 09:43:26 +02:00
Thomas Pelletier 8645be8dc7 Merge pull request #57 from moorereason/simplify
Fix a couple issues found by gosimple
2016-04-19 09:41:51 +02:00
Cameron Moore 99b9371c53 Use strings.ContainsRune instead of IndexRune 2016-04-18 17:14:57 -05:00
Cameron Moore 92c565e02b Use literal string for regexp pattern 2016-04-18 17:14:18 -05:00
Cameron Moore 6e26017b00 Clean up lint (#56)
The only real change in this commit is that MaxInt is made private.
Everything else should be gofmt'ing, docs and cleanup of lint.
2016-04-18 16:58:23 +02:00
Thomas Pelletier 9d93af61de Add couple tests 2016-04-18 16:46:44 +02:00
Thomas Pelletier 4d8fb95ffe Update coveralls badge 2016-04-18 10:02:19 +02:00
Thomas Pelletier 0e41db2176 Update documentation for Query
Fix #54
2016-04-18 09:51:42 +02:00
Thomas Pelletier afca7f3334 Hardcode Go versions in .travis.yml 2016-04-13 09:23:15 +02:00
Thomas Pelletier d6a90e60ed Fix #52: query matcher doesn't handle arrays tables
Also improve coverage of query matcher.
2016-03-16 09:56:04 -07:00
Thomas Pelletier fe63e9f76d Run tests for 1.6 2016-02-20 13:29:42 +01:00
Thomas Pelletier 7f50e4c339 Merge pull request #51 from pelletier/pelletier/fix-crlf-support
Fix support for CRLF line ending
2016-02-20 13:20:03 +01:00
Thomas Pelletier a402e618c3 sudo is not needed by travis anymore 2016-02-19 14:17:07 +01:00
Thomas Pelletier 2df083520a Fix support for CRLF line ending 2016-02-19 14:12:13 +01:00
Thomas Pelletier 8176e30b38 Fix printf formatting 2016-01-31 17:07:37 +01:00
Thomas Pelletier 14c964fc02 Merge pull request #49 from pelletier/generic-input
Generic input
2016-01-31 16:57:17 +01:00
Thomas Pelletier f963bc320f Generic input
Fixes #47
2016-01-31 16:54:40 +01:00
Thomas Pelletier 0488b850c6 Have Travis run 1.5.3 2016-01-14 11:33:30 +01:00
Thomas Pelletier 346e676fa2 2015 -> 2016 2016-01-05 10:06:54 +01:00
Thomas Pelletier 6d743bb19f Improve error checking on number parsing 2015-12-01 14:38:33 +01:00
Thomas Pelletier fa1c2ab68c Error when parsing an empty key 2015-12-01 14:02:02 +01:00
Thomas Pelletier a6c6ad1f5f Don't crash when assigning group array to array 2015-12-01 13:56:31 +01:00
Thomas Pelletier ab7a652912 Fix TOML URL in doc.go 2015-12-01 09:53:09 +01:00
Thomas Pelletier 3102b98900 Update to TOML v0.4.0 2015-11-03 16:07:50 +01:00
Thomas Pelletier f0cae62430 Merge pull request #46 from pelletier/pelletier/inline-tables
Implement inline tables
2015-11-03 16:05:32 +01:00
Thomas Pelletier 56c6106477 Specify point versions in Travis 2015-09-10 09:51:39 +01:00
Thomas Pelletier 7d69e5a5c5 Tests for erroneous inline tables 2015-09-09 17:40:27 +01:00
Thomas Pelletier 07d0c2e4d3 Merge branch 'master' into pelletier/inline-tables 2015-09-09 17:35:03 +01:00
Thomas Pelletier 6b9002d8f9 Harden tests for bad arrays 2015-09-09 17:33:28 +01:00
Thomas Pelletier 5753e884d0 Fix floating points with underscores 2015-09-09 17:17:08 +01:00
Thomas Pelletier d467309bdd Add comment to justify this madness 2015-09-09 17:04:36 +01:00
Thomas Pelletier 821a80e635 Add removed test 2015-09-09 17:01:05 +01:00
Thomas Pelletier dd4c4ffc2b Implement inline tables 2015-09-09 16:56:18 +01:00
Thomas Pelletier da703daafe Add go 1.5 to tested versions 2015-08-19 10:24:53 -07:00
Thomas Pelletier f58048cec0 Merge pull request #39 from pelletier/pelletier/integers_underscores
Accept underscores in integers
2015-07-17 16:54:19 -07:00
Thomas Pelletier 440592fa85 Merge pull request #40 from pelletier/pelletier/space-in-keys
Accept spaces in keys
2015-07-17 16:53:53 -07:00
Thomas Pelletier f4f2456dcd Merge pull request #38 from pelletier/pelletier/multiline
Reject full 00 - 1F unicode range
2015-07-17 16:52:59 -07:00
Thomas Pelletier a77f30ea80 Add coveralls badge to readme 2015-07-16 23:55:56 -07:00
Thomas Pelletier d61c80733b Add goveralls 2015-07-16 23:51:41 -07:00
Thomas Pelletier 894e775e38 Accept spaces in keys 2015-07-16 23:04:13 -07:00
Thomas Pelletier 8e75093380 Accept underscores in integers 2015-07-16 22:07:16 -07:00
Thomas Pelletier cf5ad6a245 Fixes #27: Reject full 00 - 1F unicode range 2015-07-16 21:54:10 -07:00
Thomas Pelletier 8fc7451ffc Merge pull request #37 from pelletier/pelletier/better_keys_parsing
Update keys parsing
2015-07-16 17:47:46 -07:00
Thomas Pelletier 9defd66d3c Parse datetimes in UTC 2015-07-15 10:58:08 -07:00
Thomas Pelletier 6adf8057ed Use the new Travis container infrastructure
http://docs.travis-ci.com/user/migrating-from-legacy/#Why-migrate-to-container-based-infrastructure%3F
2015-07-15 09:12:52 -07:00
Thomas Pelletier 36e1197190 Test datetimes differently 2015-07-15 08:17:28 -07:00
Thomas Pelletier 6dd2de38a9 We have been in 2015 for quite a while now 2015-07-14 20:18:44 -07:00
Thomas Pelletier 209315c2af Fixes #35: Retrieve dotted keys 2015-07-14 20:15:02 -07:00
Thomas Pelletier 41a8959f14 Reject new lines in keys 2015-07-14 20:07:43 -07:00
Thomas Pelletier 16a681db2a Allow numbers in keys parsing 2015-07-14 19:56:28 -07:00
Thomas Pelletier 9f36448571 Basic keys parsing 2015-07-14 16:33:33 -07:00
Thomas Pelletier 222e90a7d3 Parse long unicode 2015-05-21 18:52:26 -07:00
Thomas Pelletier a8327d781a Specifiy timezone name 2015-04-23 15:42:25 -07:00
Thomas Pelletier 61449e9d32 Test for Go 1.4.1 2015-04-23 15:36:06 -07:00
Thomas Pelletier 48c977fb58 Test for golang 1.4 2015-04-23 15:33:31 -07:00
Thomas Pelletier 42e7853ef6 Merge pull request #34 from pelletier/issue-29
Changes to support #29 - Support multi-line literal strings
2015-02-27 14:48:13 +01:00
eanderton 1f3d0e03c3 Changes to support #29 - Support multi-line literal strings
* Added error output to test_program.go
* Added multi-line literal string support to lexer
* Added multi-line string supprt to lexer
* Added unit-test for new string support
* Modified test.sh to take an optional parameter to run an individual BurntSushi test suite.
* Fixed formatting
2015-02-26 18:03:30 -05:00
Thomas Pelletier 36d65b681a Merge branch 'toml-0.3.1' 2014-12-06 15:27:39 +01:00
Thomas Pelletier a56707c85f Fixes #28 : Support of literal strings 2014-12-06 15:23:37 +01:00
Thomas Pelletier 4b47f52cb0 Fixes #31 : Use RFC 3339 for datetimes 2014-12-06 15:00:24 +01:00
Thomas Pelletier 2f2f28631b Fixes #32 : Ensure keys are correctly parsed 2014-12-06 14:16:42 +01:00
Thomas Pelletier 543444f747 Fixes #30: Implement exp notation in floats 2014-12-06 13:56:27 +01:00
Thomas Pelletier b814e1a94f Merge pull request #25 from vektra/master
Make it possible to use lib to make new Toml Trees
2014-11-05 19:08:21 +01:00
Evan Phoenix 1fe62f3000 Merge remote-tracking branch 'prim/master'
Conflicts:
	match_test.go
	queryparser.go
2014-11-05 09:52:03 -08:00
Evan Phoenix 709382e9c1 Fix usage on 32bit machines 2014-11-05 09:24:08 -08:00
Evan Phoenix 71e7762db5 Don't wrap native types in a tomlValue{} 2014-11-05 09:23:41 -08:00
Evan Phoenix 34da10d880 Report the type and value that generated the error 2014-11-05 09:23:28 -08:00
Thomas Pelletier db15f8a481 Merge pull request #24 from pelletier/pelletier/integer_overflow
Int overflow in queryparser
2014-11-03 22:09:12 +01:00
Evan Phoenix 8ef71920bd Expose ability to make an empty tree and handle raw values 2014-10-28 11:49:50 -07:00
Evan Phoenix fa055bcbba Fix inserting values into a tree 2014-10-28 11:49:14 -07:00
Thomas Pelletier 7337a63f5a Use MaxInt instead of MaxInt64 for ints
This is causing an integer overflow on 386 go builds, because ints are
int32 and not int64 on this platform.
2014-10-16 05:58:50 -07:00
32 changed files with 2730 additions and 688 deletions
+16 -5
View File
@@ -1,7 +1,18 @@
language: go language: go
script: "./test.sh"
go: go:
- 1.1 - 1.5.4
- 1.2 - 1.6.4
- 1.3 - 1.7.4
- tip - tip
matrix:
allow_failures:
- go: tip
fast_finish: true
script:
- ./test.sh
before_install:
- go get github.com/axw/gocov/gocov
- go get github.com/mattn/goveralls
- if ! go get code.google.com/p/go.tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
after_success:
- $HOME/gopath/bin/goveralls -service=travis-ci
+22
View File
@@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2013 - 2016 Thomas Pelletier, Eric Anderton
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+22 -20
View File
@@ -3,10 +3,13 @@
Go library for the [TOML](https://github.com/mojombo/toml) format. Go library for the [TOML](https://github.com/mojombo/toml) format.
This library supports TOML version This library supports TOML version
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) [v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md)
[![GoDoc](https://godoc.org/github.com/pelletier/go-toml?status.svg)](http://godoc.org/github.com/pelletier/go-toml) [![GoDoc](https://godoc.org/github.com/pelletier/go-toml?status.svg)](http://godoc.org/github.com/pelletier/go-toml)
[![License](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/goadesign/goa/blob/master/LICENSE)
[![Build Status](https://travis-ci.org/pelletier/go-toml.svg?branch=master)](https://travis-ci.org/pelletier/go-toml) [![Build Status](https://travis-ci.org/pelletier/go-toml.svg?branch=master)](https://travis-ci.org/pelletier/go-toml)
[![Coverage Status](https://coveralls.io/repos/github/pelletier/go-toml/badge.svg?branch=master)](https://coveralls.io/github/pelletier/go-toml?branch=master)
[![Go Report Card](https://goreportcard.com/badge/github.com/pelletier/go-toml)](https://goreportcard.com/report/github.com/pelletier/go-toml)
## Features ## Features
@@ -80,6 +83,23 @@ if err != nil {
The documentation and additional examples are available at The documentation and additional examples are available at
[godoc.org](http://godoc.org/github.com/pelletier/go-toml). [godoc.org](http://godoc.org/github.com/pelletier/go-toml).
## Tools
Go-toml provides two handy command line tools:
* `tomll`: Reads TOML files and lint them.
```
go install github.com/pelletier/go-toml/cmd/tomll
tomll --help
```
* `tomljson`: Reads a TOML file and outputs its JSON representation.
```
go install github.com/pelletier/go-toml/cmd/tomjson
tomljson --help
```
## Contribute ## Contribute
Feel free to report bugs and patches using GitHub's pull requests system on Feel free to report bugs and patches using GitHub's pull requests system on
@@ -97,22 +117,4 @@ You can run both of them using `./test.sh`.
## License ## License
Copyright (c) 2013, 2014 Thomas Pelletier, Eric Anderton The MIT License (MIT). Read [LICENSE](LICENSE).
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+5 -1
View File
@@ -3,20 +3,23 @@ package main
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/pelletier/go-toml"
"io/ioutil" "io/ioutil"
"log" "log"
"os" "os"
"time" "time"
"github.com/pelletier/go-toml"
) )
func main() { func main() {
bytes, err := ioutil.ReadAll(os.Stdin) bytes, err := ioutil.ReadAll(os.Stdin)
if err != nil { if err != nil {
log.Fatalf("Error during TOML read: %s", err)
os.Exit(2) os.Exit(2)
} }
tree, err := toml.Load(string(bytes)) tree, err := toml.Load(string(bytes))
if err != nil { if err != nil {
log.Fatalf("Error during TOML load: %s", err)
os.Exit(1) os.Exit(1)
} }
@@ -24,6 +27,7 @@ func main() {
if err := json.NewEncoder(os.Stdout).Encode(typedTree); err != nil { if err := json.NewEncoder(os.Stdout).Encode(typedTree); err != nil {
log.Fatalf("Error encoding JSON: %s", err) log.Fatalf("Error encoding JSON: %s", err)
os.Exit(3)
} }
os.Exit(0) os.Exit(0)
+67
View File
@@ -0,0 +1,67 @@
package main
import (
"encoding/json"
"flag"
"fmt"
"io"
"os"
"github.com/pelletier/go-toml"
)
func main() {
flag.Usage = func() {
fmt.Fprintln(os.Stderr, `tomljson can be used in two ways:
Writing to STDIN and reading from STDOUT:
cat file.toml | tomljson > file.json
Reading from a file name:
tomljson file.toml
`)
}
flag.Parse()
os.Exit(processMain(flag.Args(), os.Stdin, os.Stdout, os.Stderr))
}
func processMain(files []string, defaultInput io.Reader, output io.Writer, errorOutput io.Writer) int {
// read from stdin and print to stdout
inputReader := defaultInput
if len(files) > 0 {
var err error
inputReader, err = os.Open(files[0])
if err != nil {
printError(err, errorOutput)
return -1
}
}
s, err := reader(inputReader)
if err != nil {
printError(err, errorOutput)
return -1
}
io.WriteString(output, s+"\n")
return 0
}
func printError(err error, output io.Writer) {
io.WriteString(output, err.Error()+"\n")
}
func reader(r io.Reader) (string, error) {
tree, err := toml.LoadReader(r)
if err != nil {
return "", err
}
return mapToJSON(tree)
}
func mapToJSON(tree *toml.TomlTree) (string, error) {
treeMap := tree.ToMap()
bytes, err := json.MarshalIndent(treeMap, "", " ")
if err != nil {
return "", err
}
return string(bytes[:]), nil
}
+82
View File
@@ -0,0 +1,82 @@
package main
import (
"bytes"
"io/ioutil"
"os"
"strings"
"testing"
)
func expectBufferEquality(t *testing.T, name string, buffer *bytes.Buffer, expected string) {
output := buffer.String()
if output != expected {
t.Errorf("incorrect %s:\n%s\n\nexpected %s:\n%s", name, output, name, expected)
t.Log([]rune(output))
t.Log([]rune(expected))
}
}
func expectProcessMainResults(t *testing.T, input string, args []string, exitCode int, expectedOutput string, expectedError string) {
inputReader := strings.NewReader(input)
outputBuffer := new(bytes.Buffer)
errorBuffer := new(bytes.Buffer)
returnCode := processMain(args, inputReader, outputBuffer, errorBuffer)
expectBufferEquality(t, "output", outputBuffer, expectedOutput)
expectBufferEquality(t, "error", errorBuffer, expectedError)
if returnCode != exitCode {
t.Error("incorrect return code:", returnCode, "expected", exitCode)
}
}
func TestProcessMainReadFromStdin(t *testing.T) {
input := `
[mytoml]
a = 42`
expectedOutput := `{
"mytoml": {
"a": 42
}
}
`
expectedError := ``
expectedExitCode := 0
expectProcessMainResults(t, input, []string{}, expectedExitCode, expectedOutput, expectedError)
}
func TestProcessMainReadFromFile(t *testing.T) {
input := `
[mytoml]
a = 42`
tmpfile, err := ioutil.TempFile("", "example.toml")
if err != nil {
t.Fatal(err)
}
if _, err := tmpfile.Write([]byte(input)); err != nil {
t.Fatal(err)
}
defer os.Remove(tmpfile.Name())
expectedOutput := `{
"mytoml": {
"a": 42
}
}
`
expectedError := ``
expectedExitCode := 0
expectProcessMainResults(t, ``, []string{tmpfile.Name()}, expectedExitCode, expectedOutput, expectedError)
}
func TestProcessMainReadFromMissingFile(t *testing.T) {
expectedError := `open /this/file/does/not/exist: no such file or directory
`
expectProcessMainResults(t, ``, []string{"/this/file/does/not/exist"}, -1, ``, expectedError)
}
+61
View File
@@ -0,0 +1,61 @@
package main
import (
"flag"
"fmt"
"io"
"io/ioutil"
"os"
"github.com/pelletier/go-toml"
)
func main() {
flag.Usage = func() {
fmt.Fprintln(os.Stderr, `tomll can be used in two ways:
Writing to STDIN and reading from STDOUT:
cat file.toml | tomll > file.toml
Reading and updating a list of files:
tomll a.toml b.toml c.toml
When given a list of files, tomll will modify all files in place without asking.
`)
}
flag.Parse()
// read from stdin and print to stdout
if flag.NArg() == 0 {
s, err := lintReader(os.Stdin)
if err != nil {
io.WriteString(os.Stderr, err.Error())
os.Exit(-1)
}
io.WriteString(os.Stdout, s)
} else {
// otherwise modify a list of files
for _, filename := range flag.Args() {
s, err := lintFile(filename)
if err != nil {
io.WriteString(os.Stderr, err.Error())
os.Exit(-1)
}
ioutil.WriteFile(filename, []byte(s), 0644)
}
}
}
func lintFile(filename string) (string, error) {
tree, err := toml.LoadFile(filename)
if err != nil {
return "", err
}
return tree.String(), nil
}
func lintReader(r io.Reader) (string, error) {
tree, err := toml.LoadReader(r)
if err != nil {
return "", err
}
return tree.String(), nil
}
+8 -3
View File
@@ -1,7 +1,7 @@
// Package toml is a TOML markup language parser. // Package toml is a TOML markup language parser.
// //
// This version supports the specification as described in // This version supports the specification as described in
// https://github.com/toml-lang/toml/blob/master/versions/toml-v0.2.0.md // https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md
// //
// TOML Parsing // TOML Parsing
// //
@@ -83,9 +83,9 @@
// The idea behind a query path is to allow quick access to any element, or set // The idea behind a query path is to allow quick access to any element, or set
// of elements within TOML document, with a single expression. // of elements within TOML document, with a single expression.
// //
// result := tree.Query("$.foo.bar.baz") // result is 'nil' if the path is not present // result, err := tree.Query("$.foo.bar.baz")
// //
// This is equivalent to: // This is roughly equivalent to:
// //
// next := tree.Get("foo") // next := tree.Get("foo")
// if next != nil { // if next != nil {
@@ -96,6 +96,11 @@
// } // }
// result := next // result := next
// //
// err is nil if any parsing exception occurs.
//
// If no node in the tree matches the query, result will simply contain an empty list of
// items.
//
// As illustrated above, the query path is much more efficient, especially since // As illustrated above, the query path is much more efficient, especially since
// the structure of the TOML file can vary. Rather than making assumptions about // the structure of the TOML file can vary. Rather than making assumptions about
// a document's structure, a query allows the programmer to make structured // a document's structure, a query allows the programmer to make structured
+3 -3
View File
@@ -69,13 +69,13 @@ func Example_comprehensiveExample() {
fmt.Println("User is ", user, ". Password is ", password) fmt.Println("User is ", user, ". Password is ", password)
// show where elements are in the file // show where elements are in the file
fmt.Println("User position: %v", configTree.GetPosition("user")) fmt.Printf("User position: %v\n", configTree.GetPosition("user"))
fmt.Println("Password position: %v", configTree.GetPosition("password")) fmt.Printf("Password position: %v\n", configTree.GetPosition("password"))
// use a query to gather elements without walking the tree // use a query to gather elements without walking the tree
results, _ := config.Query("$..[user,password]") results, _ := config.Query("$..[user,password]")
for ii, item := range results.Values() { for ii, item := range results.Values() {
fmt.Println("Query result %d: %v", ii, item) fmt.Printf("Query result %d: %v\n", ii, item)
} }
} }
} }
+29
View File
@@ -0,0 +1,29 @@
# This is a TOML document. Boom.
title = "TOML Example"
[owner]
name = "Tom Preston-Werner"
organization = "GitHub"
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
[database]
server = "192.168.1.1"
ports = [ 8001, 8001, 8002 ]
connection_max = 5000
enabled = true
[servers]
# You can indent as you please. Tabs or spaces. TOML don't care.
[servers.alpha]
ip = "10.0.0.1"
dc = "eqdc10"
[servers.beta]
ip = "10.0.0.2"
dc = "eqdc10"
[clients]
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
+93
View File
@@ -0,0 +1,93 @@
// Parsing keys handling both bare and quoted keys.
package toml
import (
"bytes"
"fmt"
"unicode"
)
func parseKey(key string) ([]string, error) {
groups := []string{}
var buffer bytes.Buffer
inQuotes := false
wasInQuotes := false
escapeNext := false
ignoreSpace := true
expectDot := false
for _, char := range key {
if ignoreSpace {
if char == ' ' {
continue
}
ignoreSpace = false
}
if escapeNext {
buffer.WriteRune(char)
escapeNext = false
continue
}
switch char {
case '\\':
escapeNext = true
continue
case '"':
if inQuotes {
groups = append(groups, buffer.String())
buffer.Reset()
wasInQuotes = true
}
inQuotes = !inQuotes
expectDot = false
case '.':
if inQuotes {
buffer.WriteRune(char)
} else {
if !wasInQuotes {
if buffer.Len() == 0 {
return nil, fmt.Errorf("empty key group")
}
groups = append(groups, buffer.String())
buffer.Reset()
}
ignoreSpace = true
expectDot = false
wasInQuotes = false
}
case ' ':
if inQuotes {
buffer.WriteRune(char)
} else {
expectDot = true
}
default:
if !inQuotes && !isValidBareChar(char) {
return nil, fmt.Errorf("invalid bare character: %c", char)
}
if !inQuotes && expectDot {
return nil, fmt.Errorf("what?")
}
buffer.WriteRune(char)
expectDot = false
}
}
if inQuotes {
return nil, fmt.Errorf("mismatched quotes")
}
if escapeNext {
return nil, fmt.Errorf("unfinished escape sequence")
}
if buffer.Len() > 0 {
groups = append(groups, buffer.String())
}
if len(groups) == 0 {
return nil, fmt.Errorf("empty key")
}
return groups, nil
}
func isValidBareChar(r rune) bool {
return isAlphanumeric(r) || r == '-' || unicode.IsNumber(r)
}
+56
View File
@@ -0,0 +1,56 @@
package toml
import (
"fmt"
"testing"
)
func testResult(t *testing.T, key string, expected []string) {
parsed, err := parseKey(key)
t.Logf("key=%s expected=%s parsed=%s", key, expected, parsed)
if err != nil {
t.Fatal("Unexpected error:", err)
}
if len(expected) != len(parsed) {
t.Fatal("Expected length", len(expected), "but", len(parsed), "parsed")
}
for index, expectedKey := range expected {
if expectedKey != parsed[index] {
t.Fatal("Expected", expectedKey, "at index", index, "but found", parsed[index])
}
}
}
func testError(t *testing.T, key string, expectedError string) {
_, err := parseKey(key)
if fmt.Sprintf("%s", err) != expectedError {
t.Fatalf("Expected error \"%s\", but got \"%s\".", expectedError, err)
}
}
func TestBareKeyBasic(t *testing.T) {
testResult(t, "test", []string{"test"})
}
func TestBareKeyDotted(t *testing.T) {
testResult(t, "this.is.a.key", []string{"this", "is", "a", "key"})
}
func TestDottedKeyBasic(t *testing.T) {
testResult(t, "\"a.dotted.key\"", []string{"a.dotted.key"})
}
func TestBaseKeyPound(t *testing.T) {
testError(t, "hello#world", "invalid bare character: #")
}
func TestQuotedKeys(t *testing.T) {
testResult(t, `hello."foo".bar`, []string{"hello", "foo", "bar"})
testResult(t, `"hello!"`, []string{"hello!"})
}
func TestEmptyKey(t *testing.T) {
testError(t, "", "empty key")
testError(t, " ", "empty key")
testResult(t, `""`, []string{""})
}
+376 -189
View File
@@ -1,16 +1,19 @@
// TOML lexer. // TOML lexer.
// //
// Written using the principles developped by Rob Pike in // Written using the principles developed by Rob Pike in
// http://www.youtube.com/watch?v=HxaD_trXwRE // http://www.youtube.com/watch?v=HxaD_trXwRE
package toml package toml
import ( import (
"errors"
"fmt" "fmt"
"io"
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
"unicode/utf8"
"github.com/pelletier/go-buffruneio"
) )
var dateRegexp *regexp.Regexp var dateRegexp *regexp.Regexp
@@ -20,47 +23,56 @@ type tomlLexStateFn func() tomlLexStateFn
// Define lexer // Define lexer
type tomlLexer struct { type tomlLexer struct {
input string input *buffruneio.Reader // Textual source
start int buffer []rune // Runes composing the current token
pos int tokens chan token
width int depth int
tokens chan token line int
depth int col int
line int endbufferLine int
col int endbufferCol int
} }
func (l *tomlLexer) run() { // Basic read operations on input
for state := l.lexVoid; state != nil; {
state = state() func (l *tomlLexer) read() rune {
r, err := l.input.ReadRune()
if err != nil {
panic(err)
} }
close(l.tokens) if r == '\n' {
l.endbufferLine++
l.endbufferCol = 1
} else {
l.endbufferCol++
}
return r
} }
func (l *tomlLexer) nextStart() { func (l *tomlLexer) next() rune {
// iterate by runes (utf8 characters) r := l.read()
// search for newlines and advance line/col counts
for i := l.start; i < l.pos; { if r != eof {
r, width := utf8.DecodeRuneInString(l.input[i:]) l.buffer = append(l.buffer, r)
if r == '\n' {
l.line++
l.col = 1
} else {
l.col++
}
i += width
} }
// advance start position to next token return r
l.start = l.pos
} }
func (l *tomlLexer) emit(t tokenType) { func (l *tomlLexer) ignore() {
l.tokens <- token{ l.buffer = make([]rune, 0)
Position: Position{l.line, l.col}, l.line = l.endbufferLine
typ: t, l.col = l.endbufferCol
val: l.input[l.start:l.pos], }
func (l *tomlLexer) skip() {
l.next()
l.ignore()
}
func (l *tomlLexer) fastForward(n int) {
for i := 0; i < n; i++ {
l.next()
} }
l.nextStart()
} }
func (l *tomlLexer) emitWithValue(t tokenType, value string) { func (l *tomlLexer) emitWithValue(t tokenType, value string) {
@@ -69,27 +81,37 @@ func (l *tomlLexer) emitWithValue(t tokenType, value string) {
typ: t, typ: t,
val: value, val: value,
} }
l.nextStart() l.ignore()
} }
func (l *tomlLexer) next() rune { func (l *tomlLexer) emit(t tokenType) {
if l.pos >= len(l.input) { l.emitWithValue(t, string(l.buffer))
l.width = 0 }
return eof
func (l *tomlLexer) peek() rune {
r, err := l.input.ReadRune()
if err != nil {
panic(err)
} }
var r rune l.input.UnreadRune()
r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
l.pos += l.width
return r return r
} }
func (l *tomlLexer) ignore() { func (l *tomlLexer) follow(next string) bool {
l.nextStart() for _, expectedRune := range next {
r, err := l.input.ReadRune()
defer l.input.UnreadRune()
if err != nil {
panic(err)
}
if expectedRune != r {
return false
}
}
return true
} }
func (l *tomlLexer) backup() { // Error management
l.pos -= l.width
}
func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn { func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
l.tokens <- token{ l.tokens <- token{
@@ -100,23 +122,7 @@ func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
return nil return nil
} }
func (l *tomlLexer) peek() rune { // State functions
r := l.next()
l.backup()
return r
}
func (l *tomlLexer) accept(valid string) bool {
if strings.IndexRune(valid, l.next()) >= 0 {
return true
}
l.backup()
return false
}
func (l *tomlLexer) follow(next string) bool {
return strings.HasPrefix(l.input[l.pos:], next)
}
func (l *tomlLexer) lexVoid() tomlLexStateFn { func (l *tomlLexer) lexVoid() tomlLexStateFn {
for { for {
@@ -128,21 +134,27 @@ func (l *tomlLexer) lexVoid() tomlLexStateFn {
return l.lexComment return l.lexComment
case '=': case '=':
return l.lexEqual return l.lexEqual
case '\r':
fallthrough
case '\n':
l.skip()
continue
} }
if isSpace(next) { if isSpace(next) {
l.ignore() l.skip()
} }
if l.depth > 0 { if l.depth > 0 {
return l.lexRvalue return l.lexRvalue
} }
if isKeyChar(next) { if isKeyStartChar(next) {
return l.lexKey return l.lexKey
} }
if l.next() == eof { if next == eof {
l.next()
break break
} }
} }
@@ -158,26 +170,35 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
case '.': case '.':
return l.errorf("cannot start float with a dot") return l.errorf("cannot start float with a dot")
case '=': case '=':
return l.errorf("cannot have multiple equals for the same key") return l.lexEqual
case '[': case '[':
l.depth++ l.depth++
return l.lexLeftBracket return l.lexLeftBracket
case ']': case ']':
l.depth-- l.depth--
return l.lexRightBracket return l.lexRightBracket
case '{':
return l.lexLeftCurlyBrace
case '}':
return l.lexRightCurlyBrace
case '#': case '#':
return l.lexComment return l.lexComment
case '"': case '"':
return l.lexString return l.lexString
case '\'':
return l.lexLiteralString
case ',': case ',':
return l.lexComma return l.lexComma
case '\r':
fallthrough
case '\n': case '\n':
l.ignore() l.skip()
l.pos++
if l.depth == 0 { if l.depth == 0 {
return l.lexVoid return l.lexVoid
} }
return l.lexRvalue return l.lexRvalue
case '_':
return l.errorf("cannot start number with underscore")
} }
if l.follow("true") { if l.follow("true") {
@@ -188,11 +209,20 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
return l.lexFalse return l.lexFalse
} }
if isAlphanumeric(next) { if isSpace(next) {
return l.lexKey l.skip()
continue
} }
if dateRegexp.FindString(l.input[l.pos:]) != "" { if next == eof {
l.next()
break
}
possibleDate := string(l.input.Peek(35))
dateMatch := dateRegexp.FindString(possibleDate)
if dateMatch != "" {
l.fastForward(len(dateMatch))
return l.lexDate return l.lexDate
} }
@@ -200,157 +230,298 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
return l.lexNumber return l.lexNumber
} }
if isSpace(next) { if isAlphanumeric(next) {
l.ignore() return l.lexKey
} }
if l.next() == eof { return l.errorf("no value can start with %c", next)
break
}
} }
l.emit(tokenEOF) l.emit(tokenEOF)
return nil return nil
} }
func (l *tomlLexer) lexLeftCurlyBrace() tomlLexStateFn {
l.next()
l.emit(tokenLeftCurlyBrace)
return l.lexRvalue
}
func (l *tomlLexer) lexRightCurlyBrace() tomlLexStateFn {
l.next()
l.emit(tokenRightCurlyBrace)
return l.lexRvalue
}
func (l *tomlLexer) lexDate() tomlLexStateFn { func (l *tomlLexer) lexDate() tomlLexStateFn {
l.ignore()
l.pos += 20 // Fixed size of a date in TOML
l.emit(tokenDate) l.emit(tokenDate)
return l.lexRvalue return l.lexRvalue
} }
func (l *tomlLexer) lexTrue() tomlLexStateFn { func (l *tomlLexer) lexTrue() tomlLexStateFn {
l.ignore() l.fastForward(4)
l.pos += 4
l.emit(tokenTrue) l.emit(tokenTrue)
return l.lexRvalue return l.lexRvalue
} }
func (l *tomlLexer) lexFalse() tomlLexStateFn { func (l *tomlLexer) lexFalse() tomlLexStateFn {
l.ignore() l.fastForward(5)
l.pos += 5
l.emit(tokenFalse) l.emit(tokenFalse)
return l.lexRvalue return l.lexRvalue
} }
func (l *tomlLexer) lexEqual() tomlLexStateFn { func (l *tomlLexer) lexEqual() tomlLexStateFn {
l.ignore() l.next()
l.accept("=")
l.emit(tokenEqual) l.emit(tokenEqual)
return l.lexRvalue return l.lexRvalue
} }
func (l *tomlLexer) lexComma() tomlLexStateFn { func (l *tomlLexer) lexComma() tomlLexStateFn {
l.ignore() l.next()
l.accept(",")
l.emit(tokenComma) l.emit(tokenComma)
return l.lexRvalue return l.lexRvalue
} }
func (l *tomlLexer) lexKey() tomlLexStateFn { func (l *tomlLexer) lexKey() tomlLexStateFn {
l.ignore() growingString := ""
for isKeyChar(l.next()) {
for r := l.peek(); isKeyChar(r) || r == '\n' || r == '\r'; r = l.peek() {
if r == '"' {
l.next()
str, err := l.lexStringAsString(`"`, false, true)
if err != nil {
return l.errorf(err.Error())
}
growingString += `"` + str + `"`
l.next()
continue
} else if r == '\n' {
return l.errorf("keys cannot contain new lines")
} else if isSpace(r) {
break
} else if !isValidBareChar(r) {
return l.errorf("keys cannot contain %c character", r)
}
growingString += string(r)
l.next()
} }
l.backup() l.emitWithValue(tokenKey, growingString)
l.emit(tokenKey)
return l.lexVoid return l.lexVoid
} }
func (l *tomlLexer) lexComment() tomlLexStateFn { func (l *tomlLexer) lexComment() tomlLexStateFn {
for { for next := l.peek(); next != '\n' && next != eof; next = l.peek() {
next := l.next() if next == '\r' && l.follow("\r\n") {
if next == '\n' || next == eof {
break break
} }
l.next()
} }
l.ignore() l.ignore()
return l.lexVoid return l.lexVoid
} }
func (l *tomlLexer) lexLeftBracket() tomlLexStateFn { func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
l.ignore() l.next()
l.pos++
l.emit(tokenLeftBracket) l.emit(tokenLeftBracket)
return l.lexRvalue return l.lexRvalue
} }
func (l *tomlLexer) lexString() tomlLexStateFn { func (l *tomlLexer) lexLiteralStringAsString(terminator string, discardLeadingNewLine bool) (string, error) {
l.pos++
l.ignore()
growingString := "" growingString := ""
if discardLeadingNewLine {
if l.follow("\r\n") {
l.skip()
l.skip()
} else if l.peek() == '\n' {
l.skip()
}
}
// find end of string
for { for {
if l.peek() == '"' { if l.follow(terminator) {
l.emitWithValue(tokenString, growingString) return growingString, nil
l.pos++
l.ignore()
return l.lexRvalue
} }
if l.follow("\\\"") { next := l.peek()
l.pos++ if next == eof {
growingString += "\"" break
} else if l.follow("\\n") { }
l.pos++ growingString += string(l.next())
growingString += "\n" }
} else if l.follow("\\b") {
l.pos++ return "", errors.New("unclosed string")
growingString += "\b" }
} else if l.follow("\\f") {
l.pos++ func (l *tomlLexer) lexLiteralString() tomlLexStateFn {
growingString += "\f" l.skip()
} else if l.follow("\\/") {
l.pos++ // handle special case for triple-quote
growingString += "/" terminator := "'"
} else if l.follow("\\t") { discardLeadingNewLine := false
l.pos++ if l.follow("''") {
growingString += "\t" l.skip()
} else if l.follow("\\r") { l.skip()
l.pos++ terminator = "'''"
growingString += "\r" discardLeadingNewLine = true
} else if l.follow("\\\\") { }
l.pos++
growingString += "\\" str, err := l.lexLiteralStringAsString(terminator, discardLeadingNewLine)
} else if l.follow("\\u") { if err != nil {
l.pos += 2 return l.errorf(err.Error())
code := "" }
for i := 0; i < 4; i++ {
c := l.peek() l.emitWithValue(tokenString, str)
l.pos++ l.fastForward(len(terminator))
if !isHexDigit(c) { l.ignore()
return l.errorf("unfinished unicode escape") return l.lexRvalue
}
// Lex a string and return the results as a string.
// Terminator is the substring indicating the end of the token.
// The resulting string does not include the terminator.
func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine, acceptNewLines bool) (string, error) {
growingString := ""
if discardLeadingNewLine {
if l.follow("\r\n") {
l.skip()
l.skip()
} else if l.peek() == '\n' {
l.skip()
}
}
for {
if l.follow(terminator) {
return growingString, nil
}
if l.follow("\\") {
l.next()
switch l.peek() {
case '\r':
fallthrough
case '\n':
fallthrough
case '\t':
fallthrough
case ' ':
// skip all whitespace chars following backslash
for strings.ContainsRune("\r\n\t ", l.peek()) {
l.next()
} }
code = code + string(c) case '"':
growingString += "\""
l.next()
case 'n':
growingString += "\n"
l.next()
case 'b':
growingString += "\b"
l.next()
case 'f':
growingString += "\f"
l.next()
case '/':
growingString += "/"
l.next()
case 't':
growingString += "\t"
l.next()
case 'r':
growingString += "\r"
l.next()
case '\\':
growingString += "\\"
l.next()
case 'u':
l.next()
code := ""
for i := 0; i < 4; i++ {
c := l.peek()
if !isHexDigit(c) {
return "", errors.New("unfinished unicode escape")
}
l.next()
code = code + string(c)
}
intcode, err := strconv.ParseInt(code, 16, 32)
if err != nil {
return "", errors.New("invalid unicode escape: \\u" + code)
}
growingString += string(rune(intcode))
case 'U':
l.next()
code := ""
for i := 0; i < 8; i++ {
c := l.peek()
if !isHexDigit(c) {
return "", errors.New("unfinished unicode escape")
}
l.next()
code = code + string(c)
}
intcode, err := strconv.ParseInt(code, 16, 64)
if err != nil {
return "", errors.New("invalid unicode escape: \\U" + code)
}
growingString += string(rune(intcode))
default:
return "", errors.New("invalid escape sequence: \\" + string(l.peek()))
} }
l.pos--
intcode, err := strconv.ParseInt(code, 16, 32)
if err != nil {
return l.errorf("invalid unicode escape: \\u" + code)
}
growingString += string(rune(intcode))
} else if l.follow("\\") {
l.pos++
return l.errorf("invalid escape sequence: \\" + string(l.peek()))
} else { } else {
growingString += string(l.peek()) r := l.peek()
if 0x00 <= r && r <= 0x1F && !(acceptNewLines && (r == '\n' || r == '\r')) {
return "", fmt.Errorf("unescaped control character %U", r)
}
l.next()
growingString += string(r)
} }
if l.next() == eof { if l.peek() == eof {
break break
} }
} }
return l.errorf("unclosed string") return "", errors.New("unclosed string")
}
func (l *tomlLexer) lexString() tomlLexStateFn {
l.skip()
// handle special case for triple-quote
terminator := `"`
discardLeadingNewLine := false
acceptNewLines := false
if l.follow(`""`) {
l.skip()
l.skip()
terminator = `"""`
discardLeadingNewLine = true
acceptNewLines = true
}
str, err := l.lexStringAsString(terminator, discardLeadingNewLine, acceptNewLines)
if err != nil {
return l.errorf(err.Error())
}
l.emitWithValue(tokenString, str)
l.fastForward(len(terminator))
l.ignore()
return l.lexRvalue
} }
func (l *tomlLexer) lexKeyGroup() tomlLexStateFn { func (l *tomlLexer) lexKeyGroup() tomlLexStateFn {
l.ignore() l.next()
l.pos++
if l.peek() == '[' { if l.peek() == '[' {
// token '[[' signifies an array of anonymous key groups // token '[[' signifies an array of anonymous key groups
l.pos++ l.next()
l.emit(tokenDoubleLeftBracket) l.emit(tokenDoubleLeftBracket)
return l.lexInsideKeyGroupArray return l.lexInsideKeyGroupArray
} }
@@ -360,79 +531,85 @@ func (l *tomlLexer) lexKeyGroup() tomlLexStateFn {
} }
func (l *tomlLexer) lexInsideKeyGroupArray() tomlLexStateFn { func (l *tomlLexer) lexInsideKeyGroupArray() tomlLexStateFn {
for { for r := l.peek(); r != eof; r = l.peek() {
if l.peek() == ']' { switch r {
if l.pos > l.start { case ']':
if len(l.buffer) > 0 {
l.emit(tokenKeyGroupArray) l.emit(tokenKeyGroupArray)
} }
l.ignore() l.next()
l.pos++
if l.peek() != ']' { if l.peek() != ']' {
break // error break
} }
l.pos++ l.next()
l.emit(tokenDoubleRightBracket) l.emit(tokenDoubleRightBracket)
return l.lexVoid return l.lexVoid
} else if l.peek() == '[' { case '[':
return l.errorf("group name cannot contain ']'") return l.errorf("group name cannot contain ']'")
} default:
l.next()
if l.next() == eof {
break
} }
} }
return l.errorf("unclosed key group array") return l.errorf("unclosed key group array")
} }
func (l *tomlLexer) lexInsideKeyGroup() tomlLexStateFn { func (l *tomlLexer) lexInsideKeyGroup() tomlLexStateFn {
for { for r := l.peek(); r != eof; r = l.peek() {
if l.peek() == ']' { switch r {
if l.pos > l.start { case ']':
if len(l.buffer) > 0 {
l.emit(tokenKeyGroup) l.emit(tokenKeyGroup)
} }
l.ignore() l.next()
l.pos++
l.emit(tokenRightBracket) l.emit(tokenRightBracket)
return l.lexVoid return l.lexVoid
} else if l.peek() == '[' { case '[':
return l.errorf("group name cannot contain ']'") return l.errorf("group name cannot contain ']'")
} default:
l.next()
if l.next() == eof {
break
} }
} }
return l.errorf("unclosed key group") return l.errorf("unclosed key group")
} }
func (l *tomlLexer) lexRightBracket() tomlLexStateFn { func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
l.ignore() l.next()
l.pos++
l.emit(tokenRightBracket) l.emit(tokenRightBracket)
return l.lexRvalue return l.lexRvalue
} }
func (l *tomlLexer) lexNumber() tomlLexStateFn { func (l *tomlLexer) lexNumber() tomlLexStateFn {
l.ignore() r := l.peek()
if !l.accept("+") { if r == '+' || r == '-' {
l.accept("-") l.next()
} }
pointSeen := false pointSeen := false
expSeen := false
digitSeen := false digitSeen := false
for { for {
next := l.next() next := l.peek()
if next == '.' { if next == '.' {
if pointSeen { if pointSeen {
return l.errorf("cannot have two dots in one float") return l.errorf("cannot have two dots in one float")
} }
l.next()
if !isDigit(l.peek()) { if !isDigit(l.peek()) {
return l.errorf("float cannot end with a dot") return l.errorf("float cannot end with a dot")
} }
pointSeen = true pointSeen = true
} else if next == 'e' || next == 'E' {
expSeen = true
l.next()
r := l.peek()
if r == '+' || r == '-' {
l.next()
}
} else if isDigit(next) { } else if isDigit(next) {
digitSeen = true digitSeen = true
l.next()
} else if next == '_' {
l.next()
} else { } else {
l.backup()
break break
} }
if pointSeen && !digitSeen { if pointSeen && !digitSeen {
@@ -443,7 +620,7 @@ func (l *tomlLexer) lexNumber() tomlLexStateFn {
if !digitSeen { if !digitSeen {
return l.errorf("no digit in that number") return l.errorf("no digit in that number")
} }
if pointSeen { if pointSeen || expSeen {
l.emit(tokenFloat) l.emit(tokenFloat)
} else { } else {
l.emit(tokenInteger) l.emit(tokenInteger)
@@ -451,17 +628,27 @@ func (l *tomlLexer) lexNumber() tomlLexStateFn {
return l.lexRvalue return l.lexRvalue
} }
func (l *tomlLexer) run() {
for state := l.lexVoid; state != nil; {
state = state()
}
close(l.tokens)
}
func init() { func init() {
dateRegexp = regexp.MustCompile("^\\d{1,4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}Z") dateRegexp = regexp.MustCompile(`^\d{1,4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{1,9})?(Z|[+-]\d{2}:\d{2})`)
} }
// Entry point // Entry point
func lexToml(input string) chan token { func lexToml(input io.Reader) chan token {
bufferedInput := buffruneio.NewReader(input)
l := &tomlLexer{ l := &tomlLexer{
input: input, input: bufferedInput,
tokens: make(chan token), tokens: make(chan token),
line: 1, line: 1,
col: 1, col: 1,
endbufferLine: 1,
endbufferCol: 1,
} }
go l.run() go l.run()
return l.tokens return l.tokens
+491 -202
View File
@@ -1,18 +1,23 @@
package toml package toml
import "testing" import (
"strings"
"testing"
)
func testFlow(t *testing.T, input string, expectedFlow []token) { func testFlow(t *testing.T, input string, expectedFlow []token) {
ch := lexToml(input) ch := lexToml(strings.NewReader(input))
for _, expected := range expectedFlow { for _, expected := range expectedFlow {
token := <-ch token := <-ch
if token != expected { if token != expected {
t.Log("While testing: ", input) t.Log("While testing: ", input)
t.Log("compared (got)", token, "to (expected)", expected)
t.Log("\tvalue:", token.val, "<->", expected.val)
t.Log("\tvalue as bytes:", []byte(token.val), "<->", []byte(expected.val))
t.Log("\ttype:", token.typ.String(), "<->", expected.typ.String())
t.Log("\tline:", token.Line, "<->", expected.Line)
t.Log("\tcolumn:", token.Col, "<->", expected.Col)
t.Log("compared", token, "to", expected) t.Log("compared", token, "to", expected)
t.Log(token.val, "<->", expected.val)
t.Log(token.typ, "<->", expected.typ)
t.Log(token.Line, "<->", expected.Line)
t.Log(token.Col, "<->", expected.Col)
t.FailNow() t.FailNow()
} }
} }
@@ -32,388 +37,672 @@ func testFlow(t *testing.T, input string, expectedFlow []token) {
func TestValidKeyGroup(t *testing.T) { func TestValidKeyGroup(t *testing.T) {
testFlow(t, "[hello world]", []token{ testFlow(t, "[hello world]", []token{
token{Position{1, 1}, tokenLeftBracket, "["}, {Position{1, 1}, tokenLeftBracket, "["},
token{Position{1, 2}, tokenKeyGroup, "hello world"}, {Position{1, 2}, tokenKeyGroup, "hello world"},
token{Position{1, 13}, tokenRightBracket, "]"}, {Position{1, 13}, tokenRightBracket, "]"},
token{Position{1, 14}, tokenEOF, ""}, {Position{1, 14}, tokenEOF, ""},
})
}
func TestNestedQuotedUnicodeKeyGroup(t *testing.T) {
testFlow(t, `[ j . "ʞ" . l ]`, []token{
{Position{1, 1}, tokenLeftBracket, "["},
{Position{1, 2}, tokenKeyGroup, ` j . "ʞ" . l `},
{Position{1, 15}, tokenRightBracket, "]"},
{Position{1, 16}, tokenEOF, ""},
}) })
} }
func TestUnclosedKeyGroup(t *testing.T) { func TestUnclosedKeyGroup(t *testing.T) {
testFlow(t, "[hello world", []token{ testFlow(t, "[hello world", []token{
token{Position{1, 1}, tokenLeftBracket, "["}, {Position{1, 1}, tokenLeftBracket, "["},
token{Position{1, 2}, tokenError, "unclosed key group"}, {Position{1, 2}, tokenError, "unclosed key group"},
}) })
} }
func TestComment(t *testing.T) { func TestComment(t *testing.T) {
testFlow(t, "# blahblah", []token{ testFlow(t, "# blahblah", []token{
token{Position{1, 11}, tokenEOF, ""}, {Position{1, 11}, tokenEOF, ""},
}) })
} }
func TestKeyGroupComment(t *testing.T) { func TestKeyGroupComment(t *testing.T) {
testFlow(t, "[hello world] # blahblah", []token{ testFlow(t, "[hello world] # blahblah", []token{
token{Position{1, 1}, tokenLeftBracket, "["}, {Position{1, 1}, tokenLeftBracket, "["},
token{Position{1, 2}, tokenKeyGroup, "hello world"}, {Position{1, 2}, tokenKeyGroup, "hello world"},
token{Position{1, 13}, tokenRightBracket, "]"}, {Position{1, 13}, tokenRightBracket, "]"},
token{Position{1, 25}, tokenEOF, ""}, {Position{1, 25}, tokenEOF, ""},
}) })
} }
func TestMultipleKeyGroupsComment(t *testing.T) { func TestMultipleKeyGroupsComment(t *testing.T) {
testFlow(t, "[hello world] # blahblah\n[test]", []token{ testFlow(t, "[hello world] # blahblah\n[test]", []token{
token{Position{1, 1}, tokenLeftBracket, "["}, {Position{1, 1}, tokenLeftBracket, "["},
token{Position{1, 2}, tokenKeyGroup, "hello world"}, {Position{1, 2}, tokenKeyGroup, "hello world"},
token{Position{1, 13}, tokenRightBracket, "]"}, {Position{1, 13}, tokenRightBracket, "]"},
token{Position{2, 1}, tokenLeftBracket, "["}, {Position{2, 1}, tokenLeftBracket, "["},
token{Position{2, 2}, tokenKeyGroup, "test"}, {Position{2, 2}, tokenKeyGroup, "test"},
token{Position{2, 6}, tokenRightBracket, "]"}, {Position{2, 6}, tokenRightBracket, "]"},
token{Position{2, 7}, tokenEOF, ""}, {Position{2, 7}, tokenEOF, ""},
})
}
func TestSimpleWindowsCRLF(t *testing.T) {
testFlow(t, "a=4\r\nb=2", []token{
{Position{1, 1}, tokenKey, "a"},
{Position{1, 2}, tokenEqual, "="},
{Position{1, 3}, tokenInteger, "4"},
{Position{2, 1}, tokenKey, "b"},
{Position{2, 2}, tokenEqual, "="},
{Position{2, 3}, tokenInteger, "2"},
{Position{2, 4}, tokenEOF, ""},
}) })
} }
func TestBasicKey(t *testing.T) { func TestBasicKey(t *testing.T) {
testFlow(t, "hello", []token{ testFlow(t, "hello", []token{
token{Position{1, 1}, tokenKey, "hello"}, {Position{1, 1}, tokenKey, "hello"},
token{Position{1, 6}, tokenEOF, ""}, {Position{1, 6}, tokenEOF, ""},
}) })
} }
func TestBasicKeyWithUnderscore(t *testing.T) { func TestBasicKeyWithUnderscore(t *testing.T) {
testFlow(t, "hello_hello", []token{ testFlow(t, "hello_hello", []token{
token{Position{1, 1}, tokenKey, "hello_hello"}, {Position{1, 1}, tokenKey, "hello_hello"},
token{Position{1, 12}, tokenEOF, ""}, {Position{1, 12}, tokenEOF, ""},
}) })
} }
func TestBasicKeyWithDash(t *testing.T) { func TestBasicKeyWithDash(t *testing.T) {
testFlow(t, "hello-world", []token{ testFlow(t, "hello-world", []token{
token{Position{1, 1}, tokenKey, "hello-world"}, {Position{1, 1}, tokenKey, "hello-world"},
token{Position{1, 12}, tokenEOF, ""}, {Position{1, 12}, tokenEOF, ""},
}) })
} }
func TestBasicKeyWithUppercaseMix(t *testing.T) { func TestBasicKeyWithUppercaseMix(t *testing.T) {
testFlow(t, "helloHELLOHello", []token{ testFlow(t, "helloHELLOHello", []token{
token{Position{1, 1}, tokenKey, "helloHELLOHello"}, {Position{1, 1}, tokenKey, "helloHELLOHello"},
token{Position{1, 16}, tokenEOF, ""}, {Position{1, 16}, tokenEOF, ""},
}) })
} }
func TestBasicKeyWithInternationalCharacters(t *testing.T) { func TestBasicKeyWithInternationalCharacters(t *testing.T) {
testFlow(t, "héllÖ", []token{ testFlow(t, "héllÖ", []token{
token{Position{1, 1}, tokenKey, "héllÖ"}, {Position{1, 1}, tokenKey, "héllÖ"},
token{Position{1, 6}, tokenEOF, ""}, {Position{1, 6}, tokenEOF, ""},
}) })
} }
func TestBasicKeyAndEqual(t *testing.T) { func TestBasicKeyAndEqual(t *testing.T) {
testFlow(t, "hello =", []token{ testFlow(t, "hello =", []token{
token{Position{1, 1}, tokenKey, "hello"}, {Position{1, 1}, tokenKey, "hello"},
token{Position{1, 7}, tokenEqual, "="}, {Position{1, 7}, tokenEqual, "="},
token{Position{1, 8}, tokenEOF, ""}, {Position{1, 8}, tokenEOF, ""},
}) })
} }
func TestKeyWithSharpAndEqual(t *testing.T) { func TestKeyWithSharpAndEqual(t *testing.T) {
testFlow(t, "key#name = 5", []token{ testFlow(t, "key#name = 5", []token{
token{Position{1, 1}, tokenKey, "key#name"}, {Position{1, 1}, tokenError, "keys cannot contain # character"},
token{Position{1, 10}, tokenEqual, "="},
token{Position{1, 12}, tokenInteger, "5"},
token{Position{1, 13}, tokenEOF, ""},
}) })
} }
func TestKeyWithSymbolsAndEqual(t *testing.T) { func TestKeyWithSymbolsAndEqual(t *testing.T) {
testFlow(t, "~!@#$^&*()_+-`1234567890[]\\|/?><.,;:' = 5", []token{ testFlow(t, "~!@$^&*()_+-`1234567890[]\\|/?><.,;:' = 5", []token{
token{Position{1, 1}, tokenKey, "~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'"}, {Position{1, 1}, tokenError, "keys cannot contain ~ character"},
token{Position{1, 39}, tokenEqual, "="},
token{Position{1, 41}, tokenInteger, "5"},
token{Position{1, 42}, tokenEOF, ""},
}) })
} }
func TestKeyEqualStringEscape(t *testing.T) { func TestKeyEqualStringEscape(t *testing.T) {
testFlow(t, `foo = "hello\""`, []token{ testFlow(t, `foo = "hello\""`, []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, "hello\""}, {Position{1, 8}, tokenString, "hello\""},
token{Position{1, 16}, tokenEOF, ""}, {Position{1, 16}, tokenEOF, ""},
}) })
} }
func TestKeyEqualStringUnfinished(t *testing.T) { func TestKeyEqualStringUnfinished(t *testing.T) {
testFlow(t, `foo = "bar`, []token{ testFlow(t, `foo = "bar`, []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenError, "unclosed string"}, {Position{1, 8}, tokenError, "unclosed string"},
}) })
} }
func TestKeyEqualString(t *testing.T) { func TestKeyEqualString(t *testing.T) {
testFlow(t, `foo = "bar"`, []token{ testFlow(t, `foo = "bar"`, []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, "bar"}, {Position{1, 8}, tokenString, "bar"},
token{Position{1, 12}, tokenEOF, ""}, {Position{1, 12}, tokenEOF, ""},
}) })
} }
func TestKeyEqualTrue(t *testing.T) { func TestKeyEqualTrue(t *testing.T) {
testFlow(t, "foo = true", []token{ testFlow(t, "foo = true", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenTrue, "true"}, {Position{1, 7}, tokenTrue, "true"},
token{Position{1, 11}, tokenEOF, ""}, {Position{1, 11}, tokenEOF, ""},
}) })
} }
func TestKeyEqualFalse(t *testing.T) { func TestKeyEqualFalse(t *testing.T) {
testFlow(t, "foo = false", []token{ testFlow(t, "foo = false", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenFalse, "false"}, {Position{1, 7}, tokenFalse, "false"},
token{Position{1, 12}, tokenEOF, ""}, {Position{1, 12}, tokenEOF, ""},
}) })
} }
func TestArrayNestedString(t *testing.T) { func TestArrayNestedString(t *testing.T) {
testFlow(t, `a = [ ["hello", "world"] ]`, []token{ testFlow(t, `a = [ ["hello", "world"] ]`, []token{
token{Position{1, 1}, tokenKey, "a"}, {Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="}, {Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenLeftBracket, "["}, {Position{1, 5}, tokenLeftBracket, "["},
token{Position{1, 7}, tokenLeftBracket, "["}, {Position{1, 7}, tokenLeftBracket, "["},
token{Position{1, 9}, tokenString, "hello"}, {Position{1, 9}, tokenString, "hello"},
token{Position{1, 15}, tokenComma, ","}, {Position{1, 15}, tokenComma, ","},
token{Position{1, 18}, tokenString, "world"}, {Position{1, 18}, tokenString, "world"},
token{Position{1, 24}, tokenRightBracket, "]"}, {Position{1, 24}, tokenRightBracket, "]"},
token{Position{1, 26}, tokenRightBracket, "]"}, {Position{1, 26}, tokenRightBracket, "]"},
token{Position{1, 27}, tokenEOF, ""}, {Position{1, 27}, tokenEOF, ""},
}) })
} }
func TestArrayNestedInts(t *testing.T) { func TestArrayNestedInts(t *testing.T) {
testFlow(t, "a = [ [42, 21], [10] ]", []token{ testFlow(t, "a = [ [42, 21], [10] ]", []token{
token{Position{1, 1}, tokenKey, "a"}, {Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="}, {Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenLeftBracket, "["}, {Position{1, 5}, tokenLeftBracket, "["},
token{Position{1, 7}, tokenLeftBracket, "["}, {Position{1, 7}, tokenLeftBracket, "["},
token{Position{1, 8}, tokenInteger, "42"}, {Position{1, 8}, tokenInteger, "42"},
token{Position{1, 10}, tokenComma, ","}, {Position{1, 10}, tokenComma, ","},
token{Position{1, 12}, tokenInteger, "21"}, {Position{1, 12}, tokenInteger, "21"},
token{Position{1, 14}, tokenRightBracket, "]"}, {Position{1, 14}, tokenRightBracket, "]"},
token{Position{1, 15}, tokenComma, ","}, {Position{1, 15}, tokenComma, ","},
token{Position{1, 17}, tokenLeftBracket, "["}, {Position{1, 17}, tokenLeftBracket, "["},
token{Position{1, 18}, tokenInteger, "10"}, {Position{1, 18}, tokenInteger, "10"},
token{Position{1, 20}, tokenRightBracket, "]"}, {Position{1, 20}, tokenRightBracket, "]"},
token{Position{1, 22}, tokenRightBracket, "]"}, {Position{1, 22}, tokenRightBracket, "]"},
token{Position{1, 23}, tokenEOF, ""}, {Position{1, 23}, tokenEOF, ""},
}) })
} }
func TestArrayInts(t *testing.T) { func TestArrayInts(t *testing.T) {
testFlow(t, "a = [ 42, 21, 10, ]", []token{ testFlow(t, "a = [ 42, 21, 10, ]", []token{
token{Position{1, 1}, tokenKey, "a"}, {Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="}, {Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenLeftBracket, "["}, {Position{1, 5}, tokenLeftBracket, "["},
token{Position{1, 7}, tokenInteger, "42"}, {Position{1, 7}, tokenInteger, "42"},
token{Position{1, 9}, tokenComma, ","}, {Position{1, 9}, tokenComma, ","},
token{Position{1, 11}, tokenInteger, "21"}, {Position{1, 11}, tokenInteger, "21"},
token{Position{1, 13}, tokenComma, ","}, {Position{1, 13}, tokenComma, ","},
token{Position{1, 15}, tokenInteger, "10"}, {Position{1, 15}, tokenInteger, "10"},
token{Position{1, 17}, tokenComma, ","}, {Position{1, 17}, tokenComma, ","},
token{Position{1, 19}, tokenRightBracket, "]"}, {Position{1, 19}, tokenRightBracket, "]"},
token{Position{1, 20}, tokenEOF, ""}, {Position{1, 20}, tokenEOF, ""},
}) })
} }
func TestMultilineArrayComments(t *testing.T) { func TestMultilineArrayComments(t *testing.T) {
testFlow(t, "a = [1, # wow\n2, # such items\n3, # so array\n]", []token{ testFlow(t, "a = [1, # wow\n2, # such items\n3, # so array\n]", []token{
token{Position{1, 1}, tokenKey, "a"}, {Position{1, 1}, tokenKey, "a"},
token{Position{1, 3}, tokenEqual, "="}, {Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenLeftBracket, "["}, {Position{1, 5}, tokenLeftBracket, "["},
token{Position{1, 6}, tokenInteger, "1"}, {Position{1, 6}, tokenInteger, "1"},
token{Position{1, 7}, tokenComma, ","}, {Position{1, 7}, tokenComma, ","},
token{Position{2, 1}, tokenInteger, "2"}, {Position{2, 1}, tokenInteger, "2"},
token{Position{2, 2}, tokenComma, ","}, {Position{2, 2}, tokenComma, ","},
token{Position{3, 1}, tokenInteger, "3"}, {Position{3, 1}, tokenInteger, "3"},
token{Position{3, 2}, tokenComma, ","}, {Position{3, 2}, tokenComma, ","},
token{Position{4, 1}, tokenRightBracket, "]"}, {Position{4, 1}, tokenRightBracket, "]"},
token{Position{4, 2}, tokenEOF, ""}, {Position{4, 2}, tokenEOF, ""},
}) })
} }
func TestKeyEqualArrayBools(t *testing.T) { func TestKeyEqualArrayBools(t *testing.T) {
testFlow(t, "foo = [true, false, true]", []token{ testFlow(t, "foo = [true, false, true]", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenLeftBracket, "["}, {Position{1, 7}, tokenLeftBracket, "["},
token{Position{1, 8}, tokenTrue, "true"}, {Position{1, 8}, tokenTrue, "true"},
token{Position{1, 12}, tokenComma, ","}, {Position{1, 12}, tokenComma, ","},
token{Position{1, 14}, tokenFalse, "false"}, {Position{1, 14}, tokenFalse, "false"},
token{Position{1, 19}, tokenComma, ","}, {Position{1, 19}, tokenComma, ","},
token{Position{1, 21}, tokenTrue, "true"}, {Position{1, 21}, tokenTrue, "true"},
token{Position{1, 25}, tokenRightBracket, "]"}, {Position{1, 25}, tokenRightBracket, "]"},
token{Position{1, 26}, tokenEOF, ""}, {Position{1, 26}, tokenEOF, ""},
}) })
} }
func TestKeyEqualArrayBoolsWithComments(t *testing.T) { func TestKeyEqualArrayBoolsWithComments(t *testing.T) {
testFlow(t, "foo = [true, false, true] # YEAH", []token{ testFlow(t, "foo = [true, false, true] # YEAH", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenLeftBracket, "["}, {Position{1, 7}, tokenLeftBracket, "["},
token{Position{1, 8}, tokenTrue, "true"}, {Position{1, 8}, tokenTrue, "true"},
token{Position{1, 12}, tokenComma, ","}, {Position{1, 12}, tokenComma, ","},
token{Position{1, 14}, tokenFalse, "false"}, {Position{1, 14}, tokenFalse, "false"},
token{Position{1, 19}, tokenComma, ","}, {Position{1, 19}, tokenComma, ","},
token{Position{1, 21}, tokenTrue, "true"}, {Position{1, 21}, tokenTrue, "true"},
token{Position{1, 25}, tokenRightBracket, "]"}, {Position{1, 25}, tokenRightBracket, "]"},
token{Position{1, 33}, tokenEOF, ""}, {Position{1, 33}, tokenEOF, ""},
}) })
} }
func TestDateRegexp(t *testing.T) { func TestDateRegexp(t *testing.T) {
if dateRegexp.FindString("1979-05-27T07:32:00Z") == "" { if dateRegexp.FindString("1979-05-27T07:32:00Z") == "" {
t.Fail() t.Error("basic lexing")
}
if dateRegexp.FindString("1979-05-27T00:32:00-07:00") == "" {
t.Error("offset lexing")
}
if dateRegexp.FindString("1979-05-27T00:32:00.999999-07:00") == "" {
t.Error("nano precision lexing")
} }
} }
func TestKeyEqualDate(t *testing.T) { func TestKeyEqualDate(t *testing.T) {
testFlow(t, "foo = 1979-05-27T07:32:00Z", []token{ testFlow(t, "foo = 1979-05-27T07:32:00Z", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenDate, "1979-05-27T07:32:00Z"}, {Position{1, 7}, tokenDate, "1979-05-27T07:32:00Z"},
token{Position{1, 27}, tokenEOF, ""}, {Position{1, 27}, tokenEOF, ""},
})
testFlow(t, "foo = 1979-05-27T00:32:00-07:00", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenDate, "1979-05-27T00:32:00-07:00"},
{Position{1, 32}, tokenEOF, ""},
})
testFlow(t, "foo = 1979-05-27T00:32:00.999999-07:00", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenDate, "1979-05-27T00:32:00.999999-07:00"},
{Position{1, 39}, tokenEOF, ""},
}) })
} }
func TestFloatEndingWithDot(t *testing.T) { func TestFloatEndingWithDot(t *testing.T) {
testFlow(t, "foo = 42.", []token{ testFlow(t, "foo = 42.", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenError, "float cannot end with a dot"}, {Position{1, 7}, tokenError, "float cannot end with a dot"},
}) })
} }
func TestFloatWithTwoDots(t *testing.T) { func TestFloatWithTwoDots(t *testing.T) {
testFlow(t, "foo = 4.2.", []token{ testFlow(t, "foo = 4.2.", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenError, "cannot have two dots in one float"}, {Position{1, 7}, tokenError, "cannot have two dots in one float"},
}) })
} }
func TestDoubleEqualKey(t *testing.T) { func TestFloatWithExponent1(t *testing.T) {
testFlow(t, "foo= = 2", []token{ testFlow(t, "a = 5e+22", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "a"},
token{Position{1, 4}, tokenEqual, "="}, {Position{1, 3}, tokenEqual, "="},
token{Position{1, 5}, tokenError, "cannot have multiple equals for the same key"}, {Position{1, 5}, tokenFloat, "5e+22"},
{Position{1, 10}, tokenEOF, ""},
})
}
func TestFloatWithExponent2(t *testing.T) {
testFlow(t, "a = 5E+22", []token{
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenFloat, "5E+22"},
{Position{1, 10}, tokenEOF, ""},
})
}
func TestFloatWithExponent3(t *testing.T) {
testFlow(t, "a = -5e+22", []token{
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenFloat, "-5e+22"},
{Position{1, 11}, tokenEOF, ""},
})
}
func TestFloatWithExponent4(t *testing.T) {
testFlow(t, "a = -5e-22", []token{
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenFloat, "-5e-22"},
{Position{1, 11}, tokenEOF, ""},
})
}
func TestFloatWithExponent5(t *testing.T) {
testFlow(t, "a = 6.626e-34", []token{
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenFloat, "6.626e-34"},
{Position{1, 14}, tokenEOF, ""},
}) })
} }
func TestInvalidEsquapeSequence(t *testing.T) { func TestInvalidEsquapeSequence(t *testing.T) {
testFlow(t, `foo = "\x"`, []token{ testFlow(t, `foo = "\x"`, []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenError, "invalid escape sequence: \\x"}, {Position{1, 8}, tokenError, "invalid escape sequence: \\x"},
}) })
} }
func TestNestedArrays(t *testing.T) { func TestNestedArrays(t *testing.T) {
testFlow(t, "foo = [[[]]]", []token{ testFlow(t, "foo = [[[]]]", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenLeftBracket, "["}, {Position{1, 7}, tokenLeftBracket, "["},
token{Position{1, 8}, tokenLeftBracket, "["}, {Position{1, 8}, tokenLeftBracket, "["},
token{Position{1, 9}, tokenLeftBracket, "["}, {Position{1, 9}, tokenLeftBracket, "["},
token{Position{1, 10}, tokenRightBracket, "]"}, {Position{1, 10}, tokenRightBracket, "]"},
token{Position{1, 11}, tokenRightBracket, "]"}, {Position{1, 11}, tokenRightBracket, "]"},
token{Position{1, 12}, tokenRightBracket, "]"}, {Position{1, 12}, tokenRightBracket, "]"},
token{Position{1, 13}, tokenEOF, ""}, {Position{1, 13}, tokenEOF, ""},
}) })
} }
func TestKeyEqualNumber(t *testing.T) { func TestKeyEqualNumber(t *testing.T) {
testFlow(t, "foo = 42", []token{ testFlow(t, "foo = 42", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenInteger, "42"}, {Position{1, 7}, tokenInteger, "42"},
token{Position{1, 9}, tokenEOF, ""}, {Position{1, 9}, tokenEOF, ""},
}) })
testFlow(t, "foo = +42", []token{ testFlow(t, "foo = +42", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenInteger, "+42"}, {Position{1, 7}, tokenInteger, "+42"},
token{Position{1, 10}, tokenEOF, ""}, {Position{1, 10}, tokenEOF, ""},
}) })
testFlow(t, "foo = -42", []token{ testFlow(t, "foo = -42", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenInteger, "-42"}, {Position{1, 7}, tokenInteger, "-42"},
token{Position{1, 10}, tokenEOF, ""}, {Position{1, 10}, tokenEOF, ""},
}) })
testFlow(t, "foo = 4.2", []token{ testFlow(t, "foo = 4.2", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenFloat, "4.2"}, {Position{1, 7}, tokenFloat, "4.2"},
token{Position{1, 10}, tokenEOF, ""}, {Position{1, 10}, tokenEOF, ""},
}) })
testFlow(t, "foo = +4.2", []token{ testFlow(t, "foo = +4.2", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenFloat, "+4.2"}, {Position{1, 7}, tokenFloat, "+4.2"},
token{Position{1, 11}, tokenEOF, ""}, {Position{1, 11}, tokenEOF, ""},
}) })
testFlow(t, "foo = -4.2", []token{ testFlow(t, "foo = -4.2", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenFloat, "-4.2"}, {Position{1, 7}, tokenFloat, "-4.2"},
token{Position{1, 11}, tokenEOF, ""}, {Position{1, 11}, tokenEOF, ""},
})
testFlow(t, "foo = 1_000", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenInteger, "1_000"},
{Position{1, 12}, tokenEOF, ""},
})
testFlow(t, "foo = 5_349_221", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenInteger, "5_349_221"},
{Position{1, 16}, tokenEOF, ""},
})
testFlow(t, "foo = 1_2_3_4_5", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenInteger, "1_2_3_4_5"},
{Position{1, 16}, tokenEOF, ""},
})
testFlow(t, "flt8 = 9_224_617.445_991_228_313", []token{
{Position{1, 1}, tokenKey, "flt8"},
{Position{1, 6}, tokenEqual, "="},
{Position{1, 8}, tokenFloat, "9_224_617.445_991_228_313"},
{Position{1, 33}, tokenEOF, ""},
})
testFlow(t, "foo = +", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 7}, tokenError, "no digit in that number"},
}) })
} }
func TestMultiline(t *testing.T) { func TestMultiline(t *testing.T) {
testFlow(t, "foo = 42\nbar=21", []token{ testFlow(t, "foo = 42\nbar=21", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 7}, tokenInteger, "42"}, {Position{1, 7}, tokenInteger, "42"},
token{Position{2, 1}, tokenKey, "bar"}, {Position{2, 1}, tokenKey, "bar"},
token{Position{2, 4}, tokenEqual, "="}, {Position{2, 4}, tokenEqual, "="},
token{Position{2, 5}, tokenInteger, "21"}, {Position{2, 5}, tokenInteger, "21"},
token{Position{2, 7}, tokenEOF, ""}, {Position{2, 7}, tokenEOF, ""},
}) })
} }
func TestKeyEqualStringUnicodeEscape(t *testing.T) { func TestKeyEqualStringUnicodeEscape(t *testing.T) {
testFlow(t, `foo = "hello \u2665"`, []token{ testFlow(t, `foo = "hello \u2665"`, []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, "hello ♥"}, {Position{1, 8}, tokenString, "hello ♥"},
token{Position{1, 21}, tokenEOF, ""}, {Position{1, 21}, tokenEOF, ""},
})
testFlow(t, `foo = "hello \U000003B4"`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, "hello δ"},
{Position{1, 25}, tokenEOF, ""},
})
testFlow(t, `foo = "\u2"`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "unfinished unicode escape"},
})
testFlow(t, `foo = "\U2"`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "unfinished unicode escape"},
})
}
func TestKeyEqualStringNoEscape(t *testing.T) {
testFlow(t, "foo = \"hello \u0002\"", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "unescaped control character U+0002"},
})
testFlow(t, "foo = \"hello \u001F\"", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "unescaped control character U+001F"},
})
}
func TestLiteralString(t *testing.T) {
testFlow(t, `foo = 'C:\Users\nodejs\templates'`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, `C:\Users\nodejs\templates`},
{Position{1, 34}, tokenEOF, ""},
})
testFlow(t, `foo = '\\ServerX\admin$\system32\'`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, `\\ServerX\admin$\system32\`},
{Position{1, 35}, tokenEOF, ""},
})
testFlow(t, `foo = 'Tom "Dubs" Preston-Werner'`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, `Tom "Dubs" Preston-Werner`},
{Position{1, 34}, tokenEOF, ""},
})
testFlow(t, `foo = '<\i\c*\s*>'`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, `<\i\c*\s*>`},
{Position{1, 19}, tokenEOF, ""},
})
testFlow(t, `foo = 'C:\Users\nodejs\unfinis`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenError, "unclosed string"},
})
}
func TestMultilineLiteralString(t *testing.T) {
testFlow(t, `foo = '''hello 'literal' world'''`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 10}, tokenString, `hello 'literal' world`},
{Position{1, 34}, tokenEOF, ""},
})
testFlow(t, "foo = '''\nhello\n'literal'\nworld'''", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{2, 1}, tokenString, "hello\n'literal'\nworld"},
{Position{4, 9}, tokenEOF, ""},
})
testFlow(t, "foo = '''\r\nhello\r\n'literal'\r\nworld'''", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{2, 1}, tokenString, "hello\r\n'literal'\r\nworld"},
{Position{4, 9}, tokenEOF, ""},
})
}
func TestMultilineString(t *testing.T) {
testFlow(t, `foo = """hello "literal" world"""`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 10}, tokenString, `hello "literal" world`},
{Position{1, 34}, tokenEOF, ""},
})
testFlow(t, "foo = \"\"\"\r\nhello\\\r\n\"literal\"\\\nworld\"\"\"", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{2, 1}, tokenString, "hello\"literal\"world"},
{Position{4, 9}, tokenEOF, ""},
})
testFlow(t, "foo = \"\"\"\\\n \\\n \\\n hello\\\nmultiline\\\nworld\"\"\"", []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 10}, tokenString, "hellomultilineworld"},
{Position{6, 9}, tokenEOF, ""},
})
testFlow(t, "key2 = \"\"\"\nThe quick brown \\\n\n\n fox jumps over \\\n the lazy dog.\"\"\"", []token{
{Position{1, 1}, tokenKey, "key2"},
{Position{1, 6}, tokenEqual, "="},
{Position{2, 1}, tokenString, "The quick brown fox jumps over the lazy dog."},
{Position{6, 21}, tokenEOF, ""},
})
testFlow(t, "key2 = \"\"\"\\\n The quick brown \\\n fox jumps over \\\n the lazy dog.\\\n \"\"\"", []token{
{Position{1, 1}, tokenKey, "key2"},
{Position{1, 6}, tokenEqual, "="},
{Position{1, 11}, tokenString, "The quick brown fox jumps over the lazy dog."},
{Position{5, 11}, tokenEOF, ""},
})
testFlow(t, `key2 = "Roses are red\nViolets are blue"`, []token{
{Position{1, 1}, tokenKey, "key2"},
{Position{1, 6}, tokenEqual, "="},
{Position{1, 9}, tokenString, "Roses are red\nViolets are blue"},
{Position{1, 41}, tokenEOF, ""},
})
testFlow(t, "key2 = \"\"\"\nRoses are red\nViolets are blue\"\"\"", []token{
{Position{1, 1}, tokenKey, "key2"},
{Position{1, 6}, tokenEqual, "="},
{Position{2, 1}, tokenString, "Roses are red\nViolets are blue"},
{Position{3, 20}, tokenEOF, ""},
}) })
} }
func TestUnicodeString(t *testing.T) { func TestUnicodeString(t *testing.T) {
testFlow(t, `foo = "hello ♥ world"`, []token{ testFlow(t, `foo = "hello ♥ world"`, []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenEqual, "="}, {Position{1, 5}, tokenEqual, "="},
token{Position{1, 8}, tokenString, "hello ♥ world"}, {Position{1, 8}, tokenString, "hello ♥ world"},
token{Position{1, 22}, tokenEOF, ""}, {Position{1, 22}, tokenEOF, ""},
})
}
func TestEscapeInString(t *testing.T) {
testFlow(t, `foo = "\b\f\/"`, []token{
{Position{1, 1}, tokenKey, "foo"},
{Position{1, 5}, tokenEqual, "="},
{Position{1, 8}, tokenString, "\b\f/"},
{Position{1, 15}, tokenEOF, ""},
}) })
} }
func TestKeyGroupArray(t *testing.T) { func TestKeyGroupArray(t *testing.T) {
testFlow(t, "[[foo]]", []token{ testFlow(t, "[[foo]]", []token{
token{Position{1, 1}, tokenDoubleLeftBracket, "[["}, {Position{1, 1}, tokenDoubleLeftBracket, "[["},
token{Position{1, 3}, tokenKeyGroupArray, "foo"}, {Position{1, 3}, tokenKeyGroupArray, "foo"},
token{Position{1, 6}, tokenDoubleRightBracket, "]]"}, {Position{1, 6}, tokenDoubleRightBracket, "]]"},
token{Position{1, 8}, tokenEOF, ""}, {Position{1, 8}, tokenEOF, ""},
})
}
func TestQuotedKey(t *testing.T) {
testFlow(t, "\"a b\" = 42", []token{
{Position{1, 1}, tokenKey, "\"a b\""},
{Position{1, 7}, tokenEqual, "="},
{Position{1, 9}, tokenInteger, "42"},
{Position{1, 11}, tokenEOF, ""},
})
}
func TestKeyNewline(t *testing.T) {
testFlow(t, "a\n= 4", []token{
{Position{1, 1}, tokenError, "keys cannot contain new lines"},
})
}
func TestInvalidFloat(t *testing.T) {
testFlow(t, "a=7e1_", []token{
{Position{1, 1}, tokenKey, "a"},
{Position{1, 2}, tokenEqual, "="},
{Position{1, 3}, tokenFloat, "7e1_"},
{Position{1, 7}, tokenEOF, ""},
})
}
func TestLexUnknownRvalue(t *testing.T) {
testFlow(t, `a = !b`, []token{
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenError, "no value can start with !"},
})
testFlow(t, `a = \b`, []token{
{Position{1, 1}, tokenKey, "a"},
{Position{1, 3}, tokenEqual, "="},
{Position{1, 5}, tokenError, `no value can start with \`},
}) })
} }
+9 -2
View File
@@ -67,7 +67,14 @@ func newMatchKeyFn(name string) *matchKeyFn {
} }
func (f *matchKeyFn) call(node interface{}, ctx *queryContext) { func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
if tree, ok := node.(*TomlTree); ok { if array, ok := node.([]*TomlTree); ok {
for _, tree := range array {
item := tree.values[f.Name]
if item != nil {
f.next.call(item, ctx)
}
}
} else if tree, ok := node.(*TomlTree); ok {
item := tree.values[f.Name] item := tree.values[f.Name]
if item != nil { if item != nil {
f.next.call(item, ctx) f.next.call(item, ctx)
@@ -202,7 +209,7 @@ func (f *matchFilterFn) call(node interface{}, ctx *queryContext) {
fn, ok := (*ctx.filters)[f.Name] fn, ok := (*ctx.filters)[f.Name]
if !ok { if !ok {
panic(fmt.Sprintf("%s: query context does not have filter '%s'", panic(fmt.Sprintf("%s: query context does not have filter '%s'",
f.Pos, f.Name)) f.Pos.String(), f.Name))
} }
switch castNode := tomlValueCheck(node, ctx).(type) { switch castNode := tomlValueCheck(node, ctx).(type) {
case *TomlTree: case *TomlTree:
+3 -4
View File
@@ -2,7 +2,6 @@ package toml
import ( import (
"fmt" "fmt"
"math"
"testing" "testing"
) )
@@ -110,7 +109,7 @@ func TestPathSliceStart(t *testing.T) {
assertPath(t, assertPath(t,
"$[123:]", "$[123:]",
buildPath( buildPath(
newMatchSliceFn(123, math.MaxInt64, 1), newMatchSliceFn(123, maxInt, 1),
)) ))
} }
@@ -134,7 +133,7 @@ func TestPathSliceStartStep(t *testing.T) {
assertPath(t, assertPath(t,
"$[123::7]", "$[123::7]",
buildPath( buildPath(
newMatchSliceFn(123, math.MaxInt64, 7), newMatchSliceFn(123, maxInt, 7),
)) ))
} }
@@ -150,7 +149,7 @@ func TestPathSliceStep(t *testing.T) {
assertPath(t, assertPath(t,
"$[::7]", "$[::7]",
buildPath( buildPath(
newMatchSliceFn(0, math.MaxInt64, 7), newMatchSliceFn(0, maxInt, 7),
)) ))
} }
+117 -12
View File
@@ -5,6 +5,7 @@ package toml
import ( import (
"fmt" "fmt"
"reflect" "reflect"
"regexp"
"strconv" "strconv"
"strings" "strings"
"time" "time"
@@ -98,13 +99,16 @@ func (p *tomlParser) parseGroupArray() tomlParserStateFn {
} }
// get or create group array element at the indicated part in the path // get or create group array element at the indicated part in the path
keys := strings.Split(key.val, ".") keys, err := parseKey(key.val)
if err != nil {
p.raiseError(key, "invalid group array key: %s", err)
}
p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries
destTree := p.tree.GetPath(keys) destTree := p.tree.GetPath(keys)
var array []*TomlTree var array []*TomlTree
if destTree == nil { if destTree == nil {
array = make([]*TomlTree, 0) array = make([]*TomlTree, 0)
} else if destTree.([]*TomlTree) != nil { } else if target, ok := destTree.([]*TomlTree); ok && target != nil {
array = destTree.([]*TomlTree) array = destTree.([]*TomlTree)
} else { } else {
p.raiseError(key, "key %s is already assigned and not of type group array", key) p.raiseError(key, "key %s is already assigned and not of type group array", key)
@@ -153,7 +157,10 @@ func (p *tomlParser) parseGroup() tomlParserStateFn {
} }
p.seenGroupKeys = append(p.seenGroupKeys, key.val) p.seenGroupKeys = append(p.seenGroupKeys, key.val)
keys := strings.Split(key.val, ".") keys, err := parseKey(key.val)
if err != nil {
p.raiseError(key, "invalid group array key: %s", err)
}
if err := p.tree.createSubTree(keys, startToken.Position); err != nil { if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
p.raiseError(key, "%s", err) p.raiseError(key, "%s", err)
} }
@@ -165,6 +172,7 @@ func (p *tomlParser) parseGroup() tomlParserStateFn {
func (p *tomlParser) parseAssign() tomlParserStateFn { func (p *tomlParser) parseAssign() tomlParserStateFn {
key := p.getToken() key := p.getToken()
p.assume(tokenEqual) p.assume(tokenEqual)
value := p.parseRvalue() value := p.parseRvalue()
var groupKey []string var groupKey []string
if len(p.currentGroup) > 0 { if len(p.currentGroup) > 0 {
@@ -186,16 +194,42 @@ func (p *tomlParser) parseAssign() tomlParserStateFn {
} }
// assign value to the found group // assign value to the found group
localKey := []string{key.val} keyVals, err := parseKey(key.val)
finalKey := append(groupKey, key.val) if err != nil {
p.raiseError(key, "%s", err)
}
if len(keyVals) != 1 {
p.raiseError(key, "Invalid key")
}
keyVal := keyVals[0]
localKey := []string{keyVal}
finalKey := append(groupKey, keyVal)
if targetNode.GetPath(localKey) != nil { if targetNode.GetPath(localKey) != nil {
p.raiseError(key, "The following key was defined twice: %s", p.raiseError(key, "The following key was defined twice: %s",
strings.Join(finalKey, ".")) strings.Join(finalKey, "."))
} }
targetNode.values[key.val] = &tomlValue{value, key.Position} var toInsert interface{}
switch value.(type) {
case *TomlTree, []*TomlTree:
toInsert = value
default:
toInsert = &tomlValue{value, key.Position}
}
targetNode.values[keyVal] = toInsert
return p.parseStart return p.parseStart
} }
var numberUnderscoreInvalidRegexp *regexp.Regexp
func cleanupNumberToken(value string) (string, error) {
if numberUnderscoreInvalidRegexp.MatchString(value) {
return "", fmt.Errorf("invalid use of _ in number")
}
cleanedVal := strings.Replace(value, "_", "", -1)
return cleanedVal, nil
}
func (p *tomlParser) parseRvalue() interface{} { func (p *tomlParser) parseRvalue() interface{} {
tok := p.getToken() tok := p.getToken()
if tok == nil || tok.typ == tokenEOF { if tok == nil || tok.typ == tokenEOF {
@@ -210,25 +244,37 @@ func (p *tomlParser) parseRvalue() interface{} {
case tokenFalse: case tokenFalse:
return false return false
case tokenInteger: case tokenInteger:
val, err := strconv.ParseInt(tok.val, 10, 64) cleanedVal, err := cleanupNumberToken(tok.val)
if err != nil {
p.raiseError(tok, "%s", err)
}
val, err := strconv.ParseInt(cleanedVal, 10, 64)
if err != nil { if err != nil {
p.raiseError(tok, "%s", err) p.raiseError(tok, "%s", err)
} }
return val return val
case tokenFloat: case tokenFloat:
val, err := strconv.ParseFloat(tok.val, 64) cleanedVal, err := cleanupNumberToken(tok.val)
if err != nil {
p.raiseError(tok, "%s", err)
}
val, err := strconv.ParseFloat(cleanedVal, 64)
if err != nil { if err != nil {
p.raiseError(tok, "%s", err) p.raiseError(tok, "%s", err)
} }
return val return val
case tokenDate: case tokenDate:
val, err := time.Parse(time.RFC3339, tok.val) val, err := time.ParseInLocation(time.RFC3339Nano, tok.val, time.UTC)
if err != nil { if err != nil {
p.raiseError(tok, "%s", err) p.raiseError(tok, "%s", err)
} }
return val return val
case tokenLeftBracket: case tokenLeftBracket:
return p.parseArray() return p.parseArray()
case tokenLeftCurlyBrace:
return p.parseInlineTable()
case tokenEqual:
p.raiseError(tok, "cannot have multiple equals for the same key")
case tokenError: case tokenError:
p.raiseError(tok, "%s", tok) p.raiseError(tok, "%s", tok)
} }
@@ -238,7 +284,51 @@ func (p *tomlParser) parseRvalue() interface{} {
return nil return nil
} }
func (p *tomlParser) parseArray() []interface{} { func tokenIsComma(t *token) bool {
return t != nil && t.typ == tokenComma
}
func (p *tomlParser) parseInlineTable() *TomlTree {
tree := newTomlTree()
var previous *token
Loop:
for {
follow := p.peek()
if follow == nil || follow.typ == tokenEOF {
p.raiseError(follow, "unterminated inline table")
}
switch follow.typ {
case tokenRightCurlyBrace:
p.getToken()
break Loop
case tokenKey:
if !tokenIsComma(previous) && previous != nil {
p.raiseError(follow, "comma expected between fields in inline table")
}
key := p.getToken()
p.assume(tokenEqual)
value := p.parseRvalue()
tree.Set(key.val, value)
case tokenComma:
if previous == nil {
p.raiseError(follow, "inline table cannot start with a comma")
}
if tokenIsComma(previous) {
p.raiseError(follow, "need field between two commas in inline table")
}
p.getToken()
default:
p.raiseError(follow, "unexpected token type in inline table: %s", follow.typ.String())
}
previous = follow
}
if tokenIsComma(previous) {
p.raiseError(previous, "trailing comma at the end of inline table")
}
return tree
}
func (p *tomlParser) parseArray() interface{} {
var array []interface{} var array []interface{}
arrayType := reflect.TypeOf(nil) arrayType := reflect.TypeOf(nil)
for { for {
@@ -248,7 +338,7 @@ func (p *tomlParser) parseArray() []interface{} {
} }
if follow.typ == tokenRightBracket { if follow.typ == tokenRightBracket {
p.getToken() p.getToken()
return array break
} }
val := p.parseRvalue() val := p.parseRvalue()
if arrayType == nil { if arrayType == nil {
@@ -259,7 +349,7 @@ func (p *tomlParser) parseArray() []interface{} {
} }
array = append(array, val) array = append(array, val)
follow = p.peek() follow = p.peek()
if follow == nil { if follow == nil || follow.typ == tokenEOF {
p.raiseError(follow, "unterminated array") p.raiseError(follow, "unterminated array")
} }
if follow.typ != tokenRightBracket && follow.typ != tokenComma { if follow.typ != tokenRightBracket && follow.typ != tokenComma {
@@ -269,6 +359,17 @@ func (p *tomlParser) parseArray() []interface{} {
p.getToken() p.getToken()
} }
} }
// An array of TomlTrees is actually an array of inline
// tables, which is a shorthand for a table array. If the
// array was not converted from []interface{} to []*TomlTree,
// the two notations would not be equivalent.
if arrayType == reflect.TypeOf(newTomlTree()) {
tomlArray := make([]*TomlTree, len(array))
for i, v := range array {
tomlArray[i] = v.(*TomlTree)
}
return tomlArray
}
return array return array
} }
@@ -285,3 +386,7 @@ func parseToml(flow chan token) *TomlTree {
parser.run() parser.run()
return result return result
} }
func init() {
numberUnderscoreInvalidRegexp = regexp.MustCompile(`([^\d]_|_[^\d]|_$|^_)`)
}
+394 -31
View File
@@ -2,26 +2,34 @@ package toml
import ( import (
"fmt" "fmt"
"reflect"
"testing" "testing"
"time" "time"
"github.com/davecgh/go-spew/spew"
) )
func assertTree(t *testing.T, tree *TomlTree, err error, ref map[string]interface{}) { func assertSubTree(t *testing.T, path []string, tree *TomlTree, err error, ref map[string]interface{}) {
if err != nil { if err != nil {
t.Error("Non-nil error:", err.Error()) t.Error("Non-nil error:", err.Error())
return return
} }
for k, v := range ref { for k, v := range ref {
nextPath := append(path, k)
t.Log("asserting path", nextPath)
// NOTE: directly access key instead of resolve by path // NOTE: directly access key instead of resolve by path
// NOTE: see TestSpecialKV // NOTE: see TestSpecialKV
switch node := tree.GetPath([]string{k}).(type) { switch node := tree.GetPath([]string{k}).(type) {
case []*TomlTree: case []*TomlTree:
t.Log("\tcomparing key", nextPath, "by array iteration")
for idx, item := range node { for idx, item := range node {
assertTree(t, item, err, v.([]map[string]interface{})[idx]) assertSubTree(t, nextPath, item, err, v.([]map[string]interface{})[idx])
} }
case *TomlTree: case *TomlTree:
assertTree(t, node, err, v.(map[string]interface{})) t.Log("\tcomparing key", nextPath, "by subtree assestion")
assertSubTree(t, nextPath, node, err, v.(map[string]interface{}))
default: default:
t.Log("\tcomparing key", nextPath, "by string representation because it's of type", reflect.TypeOf(node))
if fmt.Sprintf("%v", node) != fmt.Sprintf("%v", v) { if fmt.Sprintf("%v", node) != fmt.Sprintf("%v", v) {
t.Errorf("was expecting %v at %v but got %v", v, k, node) t.Errorf("was expecting %v at %v but got %v", v, k, node)
} }
@@ -29,6 +37,12 @@ func assertTree(t *testing.T, tree *TomlTree, err error, ref map[string]interfac
} }
} }
func assertTree(t *testing.T, tree *TomlTree, err error, ref map[string]interface{}) {
t.Log("Asserting tree:\n", spew.Sdump(tree))
assertSubTree(t, []string{}, tree, err, ref)
t.Log("Finished tree assertion.")
}
func TestCreateSubTree(t *testing.T) { func TestCreateSubTree(t *testing.T) {
tree := newTomlTree() tree := newTomlTree()
tree.createSubTree([]string{"a", "b", "c"}, Position{}) tree.createSubTree([]string{"a", "b", "c"}, Position{})
@@ -51,12 +65,10 @@ func TestSimpleKV(t *testing.T) {
}) })
} }
// NOTE: from the BurntSushi test suite func TestNumberInKey(t *testing.T) {
// NOTE: this test is pure evil due to the embedded '.' tree, err := Load("hello2 = 42")
func TestSpecialKV(t *testing.T) {
tree, err := Load("~!@#$^&*()_+-`1234567890[]\\|/?><.,;: = 1")
assertTree(t, tree, err, map[string]interface{}{ assertTree(t, tree, err, map[string]interface{}{
"~!@#$^&*()_+-`1234567890[]\\|/?><.,;:": int64(1), "hello2": int64(42),
}) })
} }
@@ -70,6 +82,44 @@ func TestSimpleNumbers(t *testing.T) {
}) })
} }
func TestNumbersWithUnderscores(t *testing.T) {
tree, err := Load("a = 1_000")
assertTree(t, tree, err, map[string]interface{}{
"a": int64(1000),
})
tree, err = Load("a = 5_349_221")
assertTree(t, tree, err, map[string]interface{}{
"a": int64(5349221),
})
tree, err = Load("a = 1_2_3_4_5")
assertTree(t, tree, err, map[string]interface{}{
"a": int64(12345),
})
tree, err = Load("flt8 = 9_224_617.445_991_228_313")
assertTree(t, tree, err, map[string]interface{}{
"flt8": float64(9224617.445991228313),
})
tree, err = Load("flt9 = 1e1_00")
assertTree(t, tree, err, map[string]interface{}{
"flt9": float64(1e100),
})
}
func TestFloatsWithExponents(t *testing.T) {
tree, err := Load("a = 5e+22\nb = 5E+22\nc = -5e+22\nd = -5e-22\ne = 6.626e-34")
assertTree(t, tree, err, map[string]interface{}{
"a": float64(5e+22),
"b": float64(5E+22),
"c": float64(-5e+22),
"d": float64(-5e-22),
"e": float64(6.626e-34),
})
}
func TestSimpleDate(t *testing.T) { func TestSimpleDate(t *testing.T) {
tree, err := Load("a = 1979-05-27T07:32:00Z") tree, err := Load("a = 1979-05-27T07:32:00Z")
assertTree(t, tree, err, map[string]interface{}{ assertTree(t, tree, err, map[string]interface{}{
@@ -77,6 +127,20 @@ func TestSimpleDate(t *testing.T) {
}) })
} }
func TestDateOffset(t *testing.T) {
tree, err := Load("a = 1979-05-27T00:32:00-07:00")
assertTree(t, tree, err, map[string]interface{}{
"a": time.Date(1979, time.May, 27, 0, 32, 0, 0, time.FixedZone("", -7*60*60)),
})
}
func TestDateNano(t *testing.T) {
tree, err := Load("a = 1979-05-27T00:32:00.999999999-07:00")
assertTree(t, tree, err, map[string]interface{}{
"a": time.Date(1979, time.May, 27, 0, 32, 0, 999999999, time.FixedZone("", -7*60*60)),
})
}
func TestSimpleString(t *testing.T) { func TestSimpleString(t *testing.T) {
tree, err := Load("a = \"hello world\"") tree, err := Load("a = \"hello world\"")
assertTree(t, tree, err, map[string]interface{}{ assertTree(t, tree, err, map[string]interface{}{
@@ -84,6 +148,13 @@ func TestSimpleString(t *testing.T) {
}) })
} }
func TestSpaceKey(t *testing.T) {
tree, err := Load("\"a b\" = \"hello world\"")
assertTree(t, tree, err, map[string]interface{}{
"a b": "hello world",
})
}
func TestStringEscapables(t *testing.T) { func TestStringEscapables(t *testing.T) {
tree, err := Load("a = \"a \\n b\"") tree, err := Load("a = \"a \\n b\"")
assertTree(t, tree, err, map[string]interface{}{ assertTree(t, tree, err, map[string]interface{}{
@@ -106,6 +177,16 @@ func TestStringEscapables(t *testing.T) {
}) })
} }
func TestEmptyQuotedString(t *testing.T) {
tree, err := Load(`[""]
"" = 1`)
assertTree(t, tree, err, map[string]interface{}{
"": map[string]interface{}{
"": int64(1),
},
})
}
func TestBools(t *testing.T) { func TestBools(t *testing.T) {
tree, err := Load("a = true\nb = false") tree, err := Load("a = true\nb = false")
assertTree(t, tree, err, map[string]interface{}{ assertTree(t, tree, err, map[string]interface{}{
@@ -127,6 +208,41 @@ func TestNestedKeys(t *testing.T) {
}) })
} }
func TestNestedQuotedUnicodeKeys(t *testing.T) {
tree, err := Load("[ j . \"ʞ\" . l ]\nd = 42")
assertTree(t, tree, err, map[string]interface{}{
"j": map[string]interface{}{
"ʞ": map[string]interface{}{
"l": map[string]interface{}{
"d": int64(42),
},
},
},
})
tree, err = Load("[ g . h . i ]\nd = 42")
assertTree(t, tree, err, map[string]interface{}{
"g": map[string]interface{}{
"h": map[string]interface{}{
"i": map[string]interface{}{
"d": int64(42),
},
},
},
})
tree, err = Load("[ d.e.f ]\nk = 42")
assertTree(t, tree, err, map[string]interface{}{
"d": map[string]interface{}{
"e": map[string]interface{}{
"f": map[string]interface{}{
"k": int64(42),
},
},
},
})
}
func TestArrayOne(t *testing.T) { func TestArrayOne(t *testing.T) {
tree, err := Load("a = [1]") tree, err := Load("a = [1]")
assertTree(t, tree, err, map[string]interface{}{ assertTree(t, tree, err, map[string]interface{}{
@@ -163,14 +279,14 @@ func TestArrayMultiline(t *testing.T) {
func TestArrayNested(t *testing.T) { func TestArrayNested(t *testing.T) {
tree, err := Load("a = [[42, 21], [10]]") tree, err := Load("a = [[42, 21], [10]]")
assertTree(t, tree, err, map[string]interface{}{ assertTree(t, tree, err, map[string]interface{}{
"a": [][]int64{[]int64{int64(42), int64(21)}, []int64{int64(10)}}, "a": [][]int64{{int64(42), int64(21)}, {int64(10)}},
}) })
} }
func TestNestedEmptyArrays(t *testing.T) { func TestNestedEmptyArrays(t *testing.T) {
tree, err := Load("a = [[[]]]") tree, err := Load("a = [[[]]]")
assertTree(t, tree, err, map[string]interface{}{ assertTree(t, tree, err, map[string]interface{}{
"a": [][][]interface{}{[][]interface{}{[]interface{}{}}}, "a": [][][]interface{}{{{}}},
}) })
} }
@@ -189,13 +305,25 @@ func TestArrayMixedTypes(t *testing.T) {
func TestArrayNestedStrings(t *testing.T) { func TestArrayNestedStrings(t *testing.T) {
tree, err := Load("data = [ [\"gamma\", \"delta\"], [\"Foo\"] ]") tree, err := Load("data = [ [\"gamma\", \"delta\"], [\"Foo\"] ]")
assertTree(t, tree, err, map[string]interface{}{ assertTree(t, tree, err, map[string]interface{}{
"data": [][]string{[]string{"gamma", "delta"}, []string{"Foo"}}, "data": [][]string{{"gamma", "delta"}, {"Foo"}},
}) })
} }
func TestParseUnknownRvalue(t *testing.T) {
_, err := Load("a = !bssss")
if err == nil {
t.Error("Expecting a parse error")
}
_, err = Load("a = /b")
if err == nil {
t.Error("Expecting a parse error")
}
}
func TestMissingValue(t *testing.T) { func TestMissingValue(t *testing.T) {
_, err := Load("a = ") _, err := Load("a = ")
if err.Error() != "(1, 4): expecting a value" { if err.Error() != "(1, 5): expecting a value" {
t.Error("Bad error message:", err.Error()) t.Error("Bad error message:", err.Error())
} }
} }
@@ -205,6 +333,16 @@ func TestUnterminatedArray(t *testing.T) {
if err.Error() != "(1, 8): unterminated array" { if err.Error() != "(1, 8): unterminated array" {
t.Error("Bad error message:", err.Error()) t.Error("Bad error message:", err.Error())
} }
_, err = Load("a = [1")
if err.Error() != "(1, 7): unterminated array" {
t.Error("Bad error message:", err.Error())
}
_, err = Load("a = [1 2")
if err.Error() != "(1, 8): missing comma" {
t.Error("Bad error message:", err.Error())
}
} }
func TestNewlinesInArrays(t *testing.T) { func TestNewlinesInArrays(t *testing.T) {
@@ -228,6 +366,80 @@ func TestArrayWithExtraCommaComment(t *testing.T) {
}) })
} }
func TestSimpleInlineGroup(t *testing.T) {
tree, err := Load("key = {a = 42}")
assertTree(t, tree, err, map[string]interface{}{
"key": map[string]interface{}{
"a": int64(42),
},
})
}
func TestDoubleInlineGroup(t *testing.T) {
tree, err := Load("key = {a = 42, b = \"foo\"}")
assertTree(t, tree, err, map[string]interface{}{
"key": map[string]interface{}{
"a": int64(42),
"b": "foo",
},
})
}
func TestExampleInlineGroup(t *testing.T) {
tree, err := Load(`name = { first = "Tom", last = "Preston-Werner" }
point = { x = 1, y = 2 }`)
assertTree(t, tree, err, map[string]interface{}{
"name": map[string]interface{}{
"first": "Tom",
"last": "Preston-Werner",
},
"point": map[string]interface{}{
"x": int64(1),
"y": int64(2),
},
})
}
func TestExampleInlineGroupInArray(t *testing.T) {
tree, err := Load(`points = [{ x = 1, y = 2 }]`)
assertTree(t, tree, err, map[string]interface{}{
"points": []map[string]interface{}{
{
"x": int64(1),
"y": int64(2),
},
},
})
}
func TestInlineTableUnterminated(t *testing.T) {
_, err := Load("foo = {")
if err.Error() != "(1, 8): unterminated inline table" {
t.Error("Bad error message:", err.Error())
}
}
func TestInlineTableCommaExpected(t *testing.T) {
_, err := Load("foo = {hello = 53 test = foo}")
if err.Error() != "(1, 19): comma expected between fields in inline table" {
t.Error("Bad error message:", err.Error())
}
}
func TestInlineTableCommaStart(t *testing.T) {
_, err := Load("foo = {, hello = 53}")
if err.Error() != "(1, 8): inline table cannot start with a comma" {
t.Error("Bad error message:", err.Error())
}
}
func TestInlineTableDoubleComma(t *testing.T) {
_, err := Load("foo = {hello = 53,, foo = 17}")
if err.Error() != "(1, 19): need field between two commas in inline table" {
t.Error("Bad error message:", err.Error())
}
}
func TestDuplicateGroups(t *testing.T) { func TestDuplicateGroups(t *testing.T) {
_, err := Load("[foo]\na=2\n[foo]b=3") _, err := Load("[foo]\na=2\n[foo]b=3")
if err.Error() != "(3, 2): duplicated tables" { if err.Error() != "(3, 2): duplicated tables" {
@@ -244,7 +456,7 @@ func TestDuplicateKeys(t *testing.T) {
func TestEmptyIntermediateTable(t *testing.T) { func TestEmptyIntermediateTable(t *testing.T) {
_, err := Load("[foo..bar]") _, err := Load("[foo..bar]")
if err.Error() != "(1, 2): empty intermediate table" { if err.Error() != "(1, 2): invalid group array key: empty key group" {
t.Error("Bad error message:", err.Error()) t.Error("Bad error message:", err.Error())
} }
} }
@@ -265,7 +477,7 @@ func TestImplicitDeclarationBefore(t *testing.T) {
func TestFloatsWithoutLeadingZeros(t *testing.T) { func TestFloatsWithoutLeadingZeros(t *testing.T) {
_, err := Load("a = .42") _, err := Load("a = .42")
if err.Error() != "(1, 4): cannot start float with a dot" { if err.Error() != "(1, 5): cannot start float with a dot" {
t.Error("Bad error message:", err.Error()) t.Error("Bad error message:", err.Error())
} }
@@ -318,6 +530,42 @@ func TestParseFile(t *testing.T) {
}) })
} }
func TestParseFileCRLF(t *testing.T) {
tree, err := LoadFile("example-crlf.toml")
assertTree(t, tree, err, map[string]interface{}{
"title": "TOML Example",
"owner": map[string]interface{}{
"name": "Tom Preston-Werner",
"organization": "GitHub",
"bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
"dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
},
"database": map[string]interface{}{
"server": "192.168.1.1",
"ports": []int64{8001, 8001, 8002},
"connection_max": 5000,
"enabled": true,
},
"servers": map[string]interface{}{
"alpha": map[string]interface{}{
"ip": "10.0.0.1",
"dc": "eqdc10",
},
"beta": map[string]interface{}{
"ip": "10.0.0.2",
"dc": "eqdc10",
},
},
"clients": map[string]interface{}{
"data": []interface{}{
[]string{"gamma", "delta"},
[]int64{1, 2},
},
},
})
}
func TestParseKeyGroupArray(t *testing.T) { func TestParseKeyGroupArray(t *testing.T) {
tree, err := Load("[[foo.bar]] a = 42\n[[foo.bar]] a = 69") tree, err := Load("[[foo.bar]] a = 42\n[[foo.bar]] a = 69")
assertTree(t, tree, err, map[string]interface{}{ assertTree(t, tree, err, map[string]interface{}{
@@ -330,6 +578,40 @@ func TestParseKeyGroupArray(t *testing.T) {
}) })
} }
func TestParseKeyGroupArrayUnfinished(t *testing.T) {
_, err := Load("[[foo.bar]\na = 42")
if err.Error() != "(1, 10): was expecting token [[, but got unclosed key group array instead" {
t.Error("Bad error message:", err.Error())
}
_, err = Load("[[foo.[bar]\na = 42")
if err.Error() != "(1, 3): unexpected token group name cannot contain ']', was expecting a key group array" {
t.Error("Bad error message:", err.Error())
}
}
func TestParseKeyGroupArrayQueryExample(t *testing.T) {
tree, err := Load(`
[[book]]
title = "The Stand"
author = "Stephen King"
[[book]]
title = "For Whom the Bell Tolls"
author = "Ernest Hemmingway"
[[book]]
title = "Neuromancer"
author = "William Gibson"
`)
assertTree(t, tree, err, map[string]interface{}{
"book": []map[string]interface{}{
{"title": "The Stand", "author": "Stephen King"},
{"title": "For Whom the Bell Tolls", "author": "Ernest Hemmingway"},
{"title": "Neuromancer", "author": "William Gibson"},
},
})
}
func TestParseKeyGroupArraySpec(t *testing.T) { func TestParseKeyGroupArraySpec(t *testing.T) {
tree, err := Load("[[fruit]]\n name=\"apple\"\n [fruit.physical]\n color=\"red\"\n shape=\"round\"\n [[fruit]]\n name=\"banana\"") tree, err := Load("[[fruit]]\n name=\"apple\"\n [fruit.physical]\n color=\"red\"\n shape=\"round\"\n [[fruit]]\n name=\"banana\"")
assertTree(t, tree, err, map[string]interface{}{ assertTree(t, tree, err, map[string]interface{}{
@@ -345,7 +627,17 @@ func TestToTomlValue(t *testing.T) {
Value interface{} Value interface{}
Expect string Expect string
}{ }{
{int(1), "1"},
{int8(2), "2"},
{int16(3), "3"},
{int32(4), "4"},
{int64(12345), "12345"}, {int64(12345), "12345"},
{uint(10), "10"},
{uint8(20), "20"},
{uint16(30), "30"},
{uint32(40), "40"},
{uint64(50), "50"},
{float32(12.456), "12.456"},
{float64(123.45), "123.45"}, {float64(123.45), "123.45"},
{bool(true), "true"}, {bool(true), "true"},
{"hello world", "\"hello world\""}, {"hello world", "\"hello world\""},
@@ -355,6 +647,7 @@ func TestToTomlValue(t *testing.T) {
"1979-05-27T07:32:00Z"}, "1979-05-27T07:32:00Z"},
{[]interface{}{"gamma", "delta"}, {[]interface{}{"gamma", "delta"},
"[\n \"gamma\",\n \"delta\",\n]"}, "[\n \"gamma\",\n \"delta\",\n]"},
{nil, ""},
} { } {
result := toTomlValue(item.Value, 0) result := toTomlValue(item.Value, 0)
if result != item.Expect { if result != item.Expect {
@@ -376,6 +669,28 @@ func TestToString(t *testing.T) {
} }
} }
func TestToStringMapStringString(t *testing.T) {
in := map[string]interface{}{"m": map[string]string{"v": "abc"}}
want := "\n[m]\n v = \"abc\"\n"
tree := TreeFromMap(in)
got := tree.String()
if got != want {
t.Errorf("want:\n%q\ngot:\n%q", want, got)
}
}
func TestToStringMapInterfaceInterface(t *testing.T) {
in := map[string]interface{}{"m": map[interface{}]interface{}{"v": "abc"}}
want := "\n[m]\n v = \"abc\"\n"
tree := TreeFromMap(in)
got := tree.String()
if got != want {
t.Errorf("want:\n%q\ngot:\n%q", want, got)
}
}
func assertPosition(t *testing.T, text string, ref map[string]Position) { func assertPosition(t *testing.T, text string, ref map[string]Position) {
tree, err := Load(text) tree, err := Load(text)
if err != nil { if err != nil {
@@ -396,10 +711,10 @@ func TestDocumentPositions(t *testing.T) {
assertPosition(t, assertPosition(t,
"[foo]\nbar=42\nbaz=69", "[foo]\nbar=42\nbaz=69",
map[string]Position{ map[string]Position{
"": Position{1, 1}, "": {1, 1},
"foo": Position{1, 1}, "foo": {1, 1},
"foo.bar": Position{2, 1}, "foo.bar": {2, 1},
"foo.baz": Position{3, 1}, "foo.baz": {3, 1},
}) })
} }
@@ -407,10 +722,10 @@ func TestDocumentPositionsWithSpaces(t *testing.T) {
assertPosition(t, assertPosition(t,
" [foo]\n bar=42\n baz=69", " [foo]\n bar=42\n baz=69",
map[string]Position{ map[string]Position{
"": Position{1, 1}, "": {1, 1},
"foo": Position{1, 3}, "foo": {1, 3},
"foo.bar": Position{2, 3}, "foo.bar": {2, 3},
"foo.baz": Position{3, 3}, "foo.baz": {3, 3},
}) })
} }
@@ -418,10 +733,10 @@ func TestDocumentPositionsWithGroupArray(t *testing.T) {
assertPosition(t, assertPosition(t,
"[[foo]]\nbar=42\nbaz=69", "[[foo]]\nbar=42\nbaz=69",
map[string]Position{ map[string]Position{
"": Position{1, 1}, "": {1, 1},
"foo": Position{1, 1}, "foo": {1, 1},
"foo.bar": Position{2, 1}, "foo.bar": {2, 1},
"foo.baz": Position{3, 1}, "foo.baz": {3, 1},
}) })
} }
@@ -429,10 +744,58 @@ func TestNestedTreePosition(t *testing.T) {
assertPosition(t, assertPosition(t,
"[foo.bar]\na=42\nb=69", "[foo.bar]\na=42\nb=69",
map[string]Position{ map[string]Position{
"": Position{1, 1}, "": {1, 1},
"foo": Position{1, 1}, "foo": {1, 1},
"foo.bar": Position{1, 1}, "foo.bar": {1, 1},
"foo.bar.a": Position{2, 1}, "foo.bar.a": {2, 1},
"foo.bar.b": Position{3, 1}, "foo.bar.b": {3, 1},
}) })
} }
func TestInvalidGroupArray(t *testing.T) {
_, err := Load("[key#group]\nanswer = 42")
if err == nil {
t.Error("Should error")
}
_, err = Load("[foo.[bar]\na = 42")
if err.Error() != "(1, 2): unexpected token group name cannot contain ']', was expecting a key group" {
t.Error("Bad error message:", err.Error())
}
}
func TestDoubleEqual(t *testing.T) {
_, err := Load("foo= = 2")
if err.Error() != "(1, 6): cannot have multiple equals for the same key" {
t.Error("Bad error message:", err.Error())
}
}
func TestGroupArrayReassign(t *testing.T) {
_, err := Load("[hello]\n[[hello]]")
if err.Error() != "(2, 3): key \"hello\" is already assigned and not of type group array" {
t.Error("Bad error message:", err.Error())
}
}
func TestInvalidFloatParsing(t *testing.T) {
_, err := Load("a=1e_2")
if err.Error() != "(1, 3): invalid use of _ in number" {
t.Error("Bad error message:", err.Error())
}
_, err = Load("a=1e2_")
if err.Error() != "(1, 3): invalid use of _ in number" {
t.Error("Bad error message:", err.Error())
}
_, err = Load("a=1__2")
if err.Error() != "(1, 3): invalid use of _ in number" {
t.Error("Bad error message:", err.Error())
}
_, err = Load("a=_1_2")
if err.Error() != "(1, 3): cannot start number with underscore" {
t.Error("Bad error message:", err.Error())
}
}
+8 -10
View File
@@ -6,13 +6,11 @@ import (
"fmt" "fmt"
) )
/* // Position of a document element within a TOML document.
Position of a document element within a TOML document. //
// Line and Col are both 1-indexed positions for the element's line number and
Line and Col are both 1-indexed positions for the element's line number and // column number, respectively. Values of zero or less will cause Invalid(),
column number, respectively. Values of zero or less will cause Invalid(), // to return true.
to return true.
*/
type Position struct { type Position struct {
Line int // line within the document Line int // line within the document
Col int // column within the line Col int // column within the line
@@ -20,12 +18,12 @@ type Position struct {
// String representation of the position. // String representation of the position.
// Displays 1-indexed line and column numbers. // Displays 1-indexed line and column numbers.
func (p *Position) String() string { func (p Position) String() string {
return fmt.Sprintf("(%d, %d)", p.Line, p.Col) return fmt.Sprintf("(%d, %d)", p.Line, p.Col)
} }
// Returns whether or not the position is valid (i.e. with negative or // Invalid returns whether or not the position is valid (i.e. with negative or
// null values) // null values)
func (p *Position) Invalid() bool { func (p Position) Invalid() bool {
return p.Line <= 0 || p.Col <= 0 return p.Line <= 0 || p.Col <= 0
} }
+3 -3
View File
@@ -18,9 +18,9 @@ func TestPositionString(t *testing.T) {
func TestInvalid(t *testing.T) { func TestInvalid(t *testing.T) {
for i, v := range []Position{ for i, v := range []Position{
Position{0, 1234}, {0, 1234},
Position{1234, 0}, {1234, 0},
Position{0, 0}, {0, 0},
} { } {
if !v.Invalid() { if !v.Invalid() {
t.Errorf("Position at %v is valid: %v", i, v) t.Errorf("Position at %v is valid: %v", i, v)
+31 -20
View File
@@ -4,37 +4,48 @@ import (
"time" "time"
) )
// Type of a user-defined filter function, for use with Query.SetFilter(). // NodeFilterFn represents a user-defined filter function, for use with
// Query.SetFilter().
// //
// The return value of the function must indicate if 'node' is to be included // The return value of the function must indicate if 'node' is to be included
// at this stage of the TOML path. Returning true will include the node, and // at this stage of the TOML path. Returning true will include the node, and
// returning false will exclude it. // returning false will exclude it.
// //
// NOTE: Care should be taken to write script callbacks such that they are safe // NOTE: Care should be taken to write script callbacks such that they are safe
// to use from multiple goroutines. // to use from multiple goroutines.
type NodeFilterFn func(node interface{}) bool type NodeFilterFn func(node interface{}) bool
// The result of Executing a Query // QueryResult is the result of Executing a Query.
type QueryResult struct { type QueryResult struct {
items []interface{} items []interface{}
positions []Position positions []Position
} }
// appends a value/position pair to the result set // appends a value/position pair to the result set.
func (r *QueryResult) appendResult(node interface{}, pos Position) { func (r *QueryResult) appendResult(node interface{}, pos Position) {
r.items = append(r.items, node) r.items = append(r.items, node)
r.positions = append(r.positions, pos) r.positions = append(r.positions, pos)
} }
// Set of values within a QueryResult. The order of values is not guaranteed // Values is a set of values within a QueryResult. The order of values is not
// to be in document order, and may be different each time a query is executed. // guaranteed to be in document order, and may be different each time a query is
func (r *QueryResult) Values() []interface{} { // executed.
return r.items func (r QueryResult) Values() []interface{} {
values := make([]interface{}, len(r.items))
for i, v := range r.items {
o, ok := v.(*tomlValue)
if ok {
values[i] = o.value
} else {
values[i] = v
}
}
return values
} }
// Set of positions for values within a QueryResult. Each index in Positions() // Positions is a set of positions for values within a QueryResult. Each index
// corresponds to the entry in Value() of the same index. // in Positions() corresponds to the entry in Value() of the same index.
func (r *QueryResult) Positions() []Position { func (r QueryResult) Positions() []Position {
return r.positions return r.positions
} }
@@ -77,13 +88,13 @@ func (q *Query) appendPath(next pathFn) {
next.setNext(newTerminatingFn()) // init the next functor next.setNext(newTerminatingFn()) // init the next functor
} }
// Compiles a TOML path expression. The returned Query can be used to match // CompileQuery compiles a TOML path expression. The returned Query can be used
// elements within a TomlTree and its descendants. // to match elements within a TomlTree and its descendants.
func CompileQuery(path string) (*Query, error) { func CompileQuery(path string) (*Query, error) {
return parseQuery(lexQuery(path)) return parseQuery(lexQuery(path))
} }
// Executes a query against a TomlTree, and returns the result of the query. // Execute executes a query against a TomlTree, and returns the result of the query.
func (q *Query) Execute(tree *TomlTree) *QueryResult { func (q *Query) Execute(tree *TomlTree) *QueryResult {
result := &QueryResult{ result := &QueryResult{
items: []interface{}{}, items: []interface{}{},
@@ -101,8 +112,8 @@ func (q *Query) Execute(tree *TomlTree) *QueryResult {
return result return result
} }
// Sets a user-defined filter function. These may be used inside "?(..)" query // SetFilter sets a user-defined filter function. These may be used inside
// expressions to filter TOML document elements within a query. // "?(..)" query expressions to filter TOML document elements within a query.
func (q *Query) SetFilter(name string, fn NodeFilterFn) { func (q *Query) SetFilter(name string, fn NodeFilterFn) {
if q.filters == &defaultFilterFunctions { if q.filters == &defaultFilterFunctions {
// clone the static table // clone the static table
+70
View File
@@ -0,0 +1,70 @@
package toml
import (
"testing"
)
func assertArrayContainsInAnyOrder(t *testing.T, array []interface{}, objects ...interface{}) {
if len(array) != len(objects) {
t.Fatalf("array contains %d objects but %d are expected", len(array), len(objects))
}
for _, o := range objects {
found := false
for _, a := range array {
if a == o {
found = true
break
}
}
if !found {
t.Fatal(o, "not found in array", array)
}
}
}
func TestQueryExample(t *testing.T) {
config, _ := Load(`
[[book]]
title = "The Stand"
author = "Stephen King"
[[book]]
title = "For Whom the Bell Tolls"
author = "Ernest Hemmingway"
[[book]]
title = "Neuromancer"
author = "William Gibson"
`)
authors, _ := config.Query("$.book.author")
names := authors.Values()
if len(names) != 3 {
t.Fatalf("query should return 3 names but returned %d", len(names))
}
assertArrayContainsInAnyOrder(t, names, "Stephen King", "Ernest Hemmingway", "William Gibson")
}
func TestQueryReadmeExample(t *testing.T) {
config, _ := Load(`
[postgres]
user = "pelletier"
password = "mypassword"
`)
results, _ := config.Query("$..[user,password]")
values := results.Values()
if len(values) != 2 {
t.Fatalf("query should return 2 values but returned %d", len(values))
}
assertArrayContainsInAnyOrder(t, values, "pelletier", "mypassword")
}
func TestQueryPathNotPresent(t *testing.T) {
config, _ := Load(`a = "hello"`)
results, err := config.Query("$.foo.bar")
if err != nil {
t.Fatalf("err should be nil. got %s instead", err)
}
if len(results.items) != 0 {
t.Fatalf("no items should be matched. %d matched instead", len(results.items))
}
}
+18 -1
View File
@@ -105,7 +105,7 @@ func (l *queryLexer) peek() rune {
} }
func (l *queryLexer) accept(valid string) bool { func (l *queryLexer) accept(valid string) bool {
if strings.IndexRune(valid, l.next()) >= 0 { if strings.ContainsRune(valid, l.next()) {
return true return true
} }
l.backup() l.backup()
@@ -272,6 +272,23 @@ func (l *queryLexer) lexString() queryLexStateFn {
return l.errorf("invalid unicode escape: \\u" + code) return l.errorf("invalid unicode escape: \\u" + code)
} }
growingString += string(rune(intcode)) growingString += string(rune(intcode))
} else if l.follow("\\U") {
l.pos += 2
code := ""
for i := 0; i < 8; i++ {
c := l.peek()
l.pos++
if !isHexDigit(c) {
return l.errorf("unfinished unicode escape")
}
code = code + string(c)
}
l.pos--
intcode, err := strconv.ParseInt(code, 16, 32)
if err != nil {
return l.errorf("invalid unicode escape: \\u" + code)
}
growingString += string(rune(intcode))
} else if l.follow("\\") { } else if l.follow("\\") {
l.pos++ l.pos++
return l.errorf("invalid escape sequence: \\" + string(l.peek())) return l.errorf("invalid escape sequence: \\" + string(l.peek()))
+115 -34
View File
@@ -10,11 +10,13 @@ func testQLFlow(t *testing.T, input string, expectedFlow []token) {
token := <-ch token := <-ch
if token != expected { if token != expected {
t.Log("While testing #", idx, ":", input) t.Log("While testing #", idx, ":", input)
t.Log("compared (got)", token, "to (expected)", expected)
t.Log("\tvalue:", token.val, "<->", expected.val)
t.Log("\tvalue as bytes:", []byte(token.val), "<->", []byte(expected.val))
t.Log("\ttype:", token.typ.String(), "<->", expected.typ.String())
t.Log("\tline:", token.Line, "<->", expected.Line)
t.Log("\tcolumn:", token.Col, "<->", expected.Col)
t.Log("compared", token, "to", expected) t.Log("compared", token, "to", expected)
t.Log(token.val, "<->", expected.val)
t.Log(token.typ, "<->", expected.typ)
t.Log(token.Line, "<->", expected.Line)
t.Log(token.Col, "<->", expected.Col)
t.FailNow() t.FailNow()
} }
} }
@@ -34,64 +36,143 @@ func testQLFlow(t *testing.T, input string, expectedFlow []token) {
func TestLexSpecialChars(t *testing.T) { func TestLexSpecialChars(t *testing.T) {
testQLFlow(t, " .$[]..()?*", []token{ testQLFlow(t, " .$[]..()?*", []token{
token{Position{1, 2}, tokenDot, "."}, {Position{1, 2}, tokenDot, "."},
token{Position{1, 3}, tokenDollar, "$"}, {Position{1, 3}, tokenDollar, "$"},
token{Position{1, 4}, tokenLeftBracket, "["}, {Position{1, 4}, tokenLeftBracket, "["},
token{Position{1, 5}, tokenRightBracket, "]"}, {Position{1, 5}, tokenRightBracket, "]"},
token{Position{1, 6}, tokenDotDot, ".."}, {Position{1, 6}, tokenDotDot, ".."},
token{Position{1, 8}, tokenLeftParen, "("}, {Position{1, 8}, tokenLeftParen, "("},
token{Position{1, 9}, tokenRightParen, ")"}, {Position{1, 9}, tokenRightParen, ")"},
token{Position{1, 10}, tokenQuestion, "?"}, {Position{1, 10}, tokenQuestion, "?"},
token{Position{1, 11}, tokenStar, "*"}, {Position{1, 11}, tokenStar, "*"},
token{Position{1, 12}, tokenEOF, ""}, {Position{1, 12}, tokenEOF, ""},
}) })
} }
func TestLexString(t *testing.T) { func TestLexString(t *testing.T) {
testQLFlow(t, "'foo'", []token{ testQLFlow(t, "'foo\n'", []token{
token{Position{1, 2}, tokenString, "foo"}, {Position{1, 2}, tokenString, "foo\n"},
token{Position{1, 6}, tokenEOF, ""}, {Position{2, 2}, tokenEOF, ""},
}) })
} }
func TestLexDoubleString(t *testing.T) { func TestLexDoubleString(t *testing.T) {
testQLFlow(t, `"bar"`, []token{ testQLFlow(t, `"bar"`, []token{
token{Position{1, 2}, tokenString, "bar"}, {Position{1, 2}, tokenString, "bar"},
token{Position{1, 6}, tokenEOF, ""}, {Position{1, 6}, tokenEOF, ""},
})
}
func TestLexStringEscapes(t *testing.T) {
testQLFlow(t, `"foo \" \' \b \f \/ \t \r \\ \u03A9 \U00012345 \n bar"`, []token{
{Position{1, 2}, tokenString, "foo \" ' \b \f / \t \r \\ \u03A9 \U00012345 \n bar"},
{Position{1, 55}, tokenEOF, ""},
})
}
func TestLexStringUnfinishedUnicode4(t *testing.T) {
testQLFlow(t, `"\u000"`, []token{
{Position{1, 2}, tokenError, "unfinished unicode escape"},
})
}
func TestLexStringUnfinishedUnicode8(t *testing.T) {
testQLFlow(t, `"\U0000"`, []token{
{Position{1, 2}, tokenError, "unfinished unicode escape"},
})
}
func TestLexStringInvalidEscape(t *testing.T) {
testQLFlow(t, `"\x"`, []token{
{Position{1, 2}, tokenError, "invalid escape sequence: \\x"},
})
}
func TestLexStringUnfinished(t *testing.T) {
testQLFlow(t, `"bar`, []token{
{Position{1, 2}, tokenError, "unclosed string"},
}) })
} }
func TestLexKey(t *testing.T) { func TestLexKey(t *testing.T) {
testQLFlow(t, "foo", []token{ testQLFlow(t, "foo", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 4}, tokenEOF, ""}, {Position{1, 4}, tokenEOF, ""},
}) })
} }
func TestLexRecurse(t *testing.T) { func TestLexRecurse(t *testing.T) {
testQLFlow(t, "$..*", []token{ testQLFlow(t, "$..*", []token{
token{Position{1, 1}, tokenDollar, "$"}, {Position{1, 1}, tokenDollar, "$"},
token{Position{1, 2}, tokenDotDot, ".."}, {Position{1, 2}, tokenDotDot, ".."},
token{Position{1, 4}, tokenStar, "*"}, {Position{1, 4}, tokenStar, "*"},
token{Position{1, 5}, tokenEOF, ""}, {Position{1, 5}, tokenEOF, ""},
}) })
} }
func TestLexBracketKey(t *testing.T) { func TestLexBracketKey(t *testing.T) {
testQLFlow(t, "$[foo]", []token{ testQLFlow(t, "$[foo]", []token{
token{Position{1, 1}, tokenDollar, "$"}, {Position{1, 1}, tokenDollar, "$"},
token{Position{1, 2}, tokenLeftBracket, "["}, {Position{1, 2}, tokenLeftBracket, "["},
token{Position{1, 3}, tokenKey, "foo"}, {Position{1, 3}, tokenKey, "foo"},
token{Position{1, 6}, tokenRightBracket, "]"}, {Position{1, 6}, tokenRightBracket, "]"},
token{Position{1, 7}, tokenEOF, ""}, {Position{1, 7}, tokenEOF, ""},
}) })
} }
func TestLexSpace(t *testing.T) { func TestLexSpace(t *testing.T) {
testQLFlow(t, "foo bar baz", []token{ testQLFlow(t, "foo bar baz", []token{
token{Position{1, 1}, tokenKey, "foo"}, {Position{1, 1}, tokenKey, "foo"},
token{Position{1, 5}, tokenKey, "bar"}, {Position{1, 5}, tokenKey, "bar"},
token{Position{1, 9}, tokenKey, "baz"}, {Position{1, 9}, tokenKey, "baz"},
token{Position{1, 12}, tokenEOF, ""}, {Position{1, 12}, tokenEOF, ""},
})
}
func TestLexInteger(t *testing.T) {
testQLFlow(t, "100 +200 -300", []token{
{Position{1, 1}, tokenInteger, "100"},
{Position{1, 5}, tokenInteger, "+200"},
{Position{1, 10}, tokenInteger, "-300"},
{Position{1, 14}, tokenEOF, ""},
})
}
func TestLexFloat(t *testing.T) {
testQLFlow(t, "100.0 +200.0 -300.0", []token{
{Position{1, 1}, tokenFloat, "100.0"},
{Position{1, 7}, tokenFloat, "+200.0"},
{Position{1, 14}, tokenFloat, "-300.0"},
{Position{1, 20}, tokenEOF, ""},
})
}
func TestLexFloatWithMultipleDots(t *testing.T) {
testQLFlow(t, "4.2.", []token{
{Position{1, 1}, tokenError, "cannot have two dots in one float"},
})
}
func TestLexFloatLeadingDot(t *testing.T) {
testQLFlow(t, "+.1", []token{
{Position{1, 1}, tokenError, "cannot start float with a dot"},
})
}
func TestLexFloatWithTrailingDot(t *testing.T) {
testQLFlow(t, "42.", []token{
{Position{1, 1}, tokenError, "float cannot end with a dot"},
})
}
func TestLexNumberWithoutDigit(t *testing.T) {
testQLFlow(t, "+", []token{
{Position{1, 1}, tokenError, "no digit in that number"},
})
}
func TestLexUnknown(t *testing.T) {
testQLFlow(t, "^", []token{
{Position{1, 1}, tokenError, "unexpected char: '94'"},
}) })
} }
+3 -3
View File
@@ -9,9 +9,10 @@ package toml
import ( import (
"fmt" "fmt"
"math"
) )
const maxInt = int(^uint(0) >> 1)
type queryParser struct { type queryParser struct {
flow chan token flow chan token
tokensBuffer []token tokensBuffer []token
@@ -137,7 +138,6 @@ func (p *queryParser) parseMatchExpr() queryParserStateFn {
return nil // allow EOF at this stage return nil // allow EOF at this stage
} }
return p.parseError(tok, "expected match expression") return p.parseError(tok, "expected match expression")
return nil
} }
func (p *queryParser) parseBracketExpr() queryParserStateFn { func (p *queryParser) parseBracketExpr() queryParserStateFn {
@@ -203,7 +203,7 @@ loop: // labeled loop for easy breaking
func (p *queryParser) parseSliceExpr() queryParserStateFn { func (p *queryParser) parseSliceExpr() queryParserStateFn {
// init slice to grab all elements // init slice to grab all elements
start, end, step := 0, math.MaxInt64, 1 start, end, step := 0, maxInt, 1
// parse optional start // parse optional start
tok := p.getToken() tok := p.getToken()
+64 -13
View File
@@ -5,24 +5,75 @@ set -e
# set the path to the present working directory # set the path to the present working directory
export GOPATH=`pwd` export GOPATH=`pwd`
# Vendorize the BurntSushi test suite function git_clone() {
# NOTE: this gets a specific release to avoid versioning issues path=$1
if [ ! -d 'src/github.com/BurntSushi/toml-test' ]; then branch=$2
mkdir -p src/github.com/BurntSushi version=$3
git clone https://github.com/BurntSushi/toml-test.git src/github.com/BurntSushi/toml-test if [ ! -d "src/$path" ]; then
fi mkdir -p src/$path
pushd src/github.com/BurntSushi/toml-test git clone https://$path.git src/$path
git reset --hard '0.2.0' # use the released version, NOT tip fi
popd pushd src/$path
git checkout "$branch"
git reset --hard "$version"
popd
}
go get github.com/pelletier/go-buffruneio
go get github.com/davecgh/go-spew/spew
# get code for BurntSushi TOML validation
# pinning all to 'HEAD' for version 0.3.x work (TODO: pin to commit hash when tests stabilize)
git_clone github.com/BurntSushi/toml master HEAD
git_clone github.com/BurntSushi/toml-test master HEAD #was: 0.2.0 HEAD
# build the BurntSushi test application
go build -o toml-test github.com/BurntSushi/toml-test go build -o toml-test github.com/BurntSushi/toml-test
# vendorize the current lib for testing # vendorize the current lib for testing
# NOTE: this basically mocks an install without having to go back out to github for code # NOTE: this basically mocks an install without having to go back out to github for code
mkdir -p src/github.com/pelletier/go-toml/cmd mkdir -p src/github.com/pelletier/go-toml/cmd
cp *.go *.toml src/github.com/pelletier/go-toml cp *.go *.toml src/github.com/pelletier/go-toml
cp cmd/*.go src/github.com/pelletier/go-toml/cmd cp -R cmd/* src/github.com/pelletier/go-toml/cmd
go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go
# Run basic unit tests and then the BurntSushi test suite # Run basic unit tests
go test -v github.com/pelletier/go-toml go test github.com/pelletier/go-toml \
./toml-test ./test_program_bin | tee test_out github.com/pelletier/go-toml/cmd/tomljson
# run the entire BurntSushi test suite
if [[ $# -eq 0 ]] ; then
echo "Running all BurntSushi tests"
./toml-test ./test_program_bin | tee test_out
else
# run a specific test
test=$1
test_path='src/github.com/BurntSushi/toml-test/tests'
valid_test="$test_path/valid/$test"
invalid_test="$test_path/invalid/$test"
if [ -e "$valid_test.toml" ]; then
echo "Valid Test TOML for $test:"
echo "===="
cat "$valid_test.toml"
echo "Valid Test JSON for $test:"
echo "===="
cat "$valid_test.json"
echo "Go-TOML Output for $test:"
echo "===="
cat "$valid_test.toml" | ./test_program_bin
fi
if [ -e "$invalid_test.toml" ]; then
echo "Invalid Test TOML for $test:"
echo "===="
cat "$invalid_test.toml"
echo "Go-TOML Output for $test:"
echo "===="
echo "go-toml Output:"
cat "$invalid_test.toml" | ./test_program_bin
fi
fi
+14 -7
View File
@@ -26,6 +26,8 @@ const (
tokenEqual tokenEqual
tokenLeftBracket tokenLeftBracket
tokenRightBracket tokenRightBracket
tokenLeftCurlyBrace
tokenRightCurlyBrace
tokenLeftParen tokenLeftParen
tokenRightParen tokenRightParen
tokenDoubleLeftBracket tokenDoubleLeftBracket
@@ -44,6 +46,7 @@ const (
) )
var tokenTypeNames = []string{ var tokenTypeNames = []string{
"Error",
"EOF", "EOF",
"Comment", "Comment",
"Key", "Key",
@@ -54,7 +57,9 @@ var tokenTypeNames = []string{
"Float", "Float",
"=", "=",
"[", "[",
"[", "]",
"{",
"}",
"(", "(",
")", ")",
"]]", "]]",
@@ -102,9 +107,6 @@ func (t token) String() string {
return t.val return t.val
} }
if len(t.val) > 10 {
return fmt.Sprintf("%.10q...", t.val)
}
return fmt.Sprintf("%q", t.val) return fmt.Sprintf("%q", t.val)
} }
@@ -117,9 +119,14 @@ func isAlphanumeric(r rune) bool {
} }
func isKeyChar(r rune) bool { func isKeyChar(r rune) bool {
// "Keys start with the first non-whitespace character and end with the last // Keys start with the first character that isn't whitespace or [ and end
// non-whitespace character before the equals sign." // with the last non-whitespace character before the equals sign. Keys
return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '=') // cannot contain a # character."
return !(r == '\r' || r == '\n' || r == eof || r == '=')
}
func isKeyStartChar(r rune) bool {
return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '[')
} }
func isDigit(r rune) bool { func isDigit(r rune) bool {
+67
View File
@@ -0,0 +1,67 @@
package toml
import "testing"
func TestTokenStringer(t *testing.T) {
var tests = []struct {
tt tokenType
expect string
}{
{tokenError, "Error"},
{tokenEOF, "EOF"},
{tokenComment, "Comment"},
{tokenKey, "Key"},
{tokenString, "String"},
{tokenInteger, "Integer"},
{tokenTrue, "True"},
{tokenFalse, "False"},
{tokenFloat, "Float"},
{tokenEqual, "="},
{tokenLeftBracket, "["},
{tokenRightBracket, "]"},
{tokenLeftCurlyBrace, "{"},
{tokenRightCurlyBrace, "}"},
{tokenLeftParen, "("},
{tokenRightParen, ")"},
{tokenDoubleLeftBracket, "]]"},
{tokenDoubleRightBracket, "[["},
{tokenDate, "Date"},
{tokenKeyGroup, "KeyGroup"},
{tokenKeyGroupArray, "KeyGroupArray"},
{tokenComma, ","},
{tokenColon, ":"},
{tokenDollar, "$"},
{tokenStar, "*"},
{tokenQuestion, "?"},
{tokenDot, "."},
{tokenDotDot, ".."},
{tokenEOL, "EOL"},
{tokenEOL + 1, "Unknown"},
}
for i, test := range tests {
got := test.tt.String()
if got != test.expect {
t.Errorf("[%d] invalid string of token type; got %q, expected %q", i, got, test.expect)
}
}
}
func TestTokenString(t *testing.T) {
var tests = []struct {
tok token
expect string
}{
{token{Position{1, 1}, tokenEOF, ""}, "EOF"},
{token{Position{1, 1}, tokenError, "Δt"}, "Δt"},
{token{Position{1, 1}, tokenString, "bar"}, `"bar"`},
{token{Position{1, 1}, tokenString, "123456789012345"}, `"123456789012345"`},
}
for i, test := range tests {
got := test.tok.String()
if got != test.expect {
t.Errorf("[%d] invalid of string token; got %q, expected %q", i, got, test.expect)
}
}
}
+50 -124
View File
@@ -3,11 +3,10 @@ package toml
import ( import (
"errors" "errors"
"fmt" "fmt"
"io/ioutil" "io"
"os"
"runtime" "runtime"
"strconv"
"strings" "strings"
"time"
) )
type tomlValue struct { type tomlValue struct {
@@ -28,6 +27,13 @@ func newTomlTree() *TomlTree {
} }
} }
// TreeFromMap initializes a new TomlTree object using the given map.
func TreeFromMap(m map[string]interface{}) *TomlTree {
return &TomlTree{
values: m,
}
}
// Has returns a boolean indicating if the given key exists. // Has returns a boolean indicating if the given key exists.
func (t *TomlTree) Has(key string) bool { func (t *TomlTree) Has(key string) bool {
if key == "" { if key == "" {
@@ -59,7 +65,11 @@ func (t *TomlTree) Get(key string) interface{} {
if key == "" { if key == "" {
return t return t
} }
return t.GetPath(strings.Split(key, ".")) comps, err := parseKey(key)
if err != nil {
return nil
}
return t.GetPath(comps)
} }
// GetPath returns the element in the tree indicated by 'keys'. // GetPath returns the element in the tree indicated by 'keys'.
@@ -84,7 +94,7 @@ func (t *TomlTree) GetPath(keys []string) interface{} {
} }
subtree = node[len(node)-1] subtree = node[len(node)-1]
default: default:
return nil // cannot naigate through other node types return nil // cannot navigate through other node types
} }
} }
// branch based on final node type // branch based on final node type
@@ -171,7 +181,7 @@ func (t *TomlTree) SetPath(keys []string, value interface{}) {
nextTree, exists := subtree.values[intermediateKey] nextTree, exists := subtree.values[intermediateKey]
if !exists { if !exists {
nextTree = newTomlTree() nextTree = newTomlTree()
subtree.values[intermediateKey] = &nextTree // add new element here subtree.values[intermediateKey] = nextTree // add new element here
} }
switch node := nextTree.(type) { switch node := nextTree.(type) {
case *TomlTree: case *TomlTree:
@@ -185,7 +195,21 @@ func (t *TomlTree) SetPath(keys []string, value interface{}) {
subtree = node[len(node)-1] subtree = node[len(node)-1]
} }
} }
subtree.values[keys[len(keys)-1]] = value
var toInsert interface{}
switch value.(type) {
case *TomlTree:
toInsert = value
case []*TomlTree:
toInsert = value
case *tomlValue:
toInsert = value
default:
toInsert = &tomlValue{value: value}
}
subtree.values[keys[len(keys)-1]] = toInsert
} }
// createSubTree takes a tree and a key and create the necessary intermediate // createSubTree takes a tree and a key and create the necessary intermediate
@@ -198,9 +222,6 @@ func (t *TomlTree) SetPath(keys []string, value interface{}) {
func (t *TomlTree) createSubTree(keys []string, pos Position) error { func (t *TomlTree) createSubTree(keys []string, pos Position) error {
subtree := t subtree := t
for _, intermediateKey := range keys { for _, intermediateKey := range keys {
if intermediateKey == "" {
return fmt.Errorf("empty intermediate table")
}
nextTree, exists := subtree.values[intermediateKey] nextTree, exists := subtree.values[intermediateKey]
if !exists { if !exists {
tree := newTomlTree() tree := newTomlTree()
@@ -215,122 +236,24 @@ func (t *TomlTree) createSubTree(keys []string, pos Position) error {
case *TomlTree: case *TomlTree:
subtree = node subtree = node
default: default:
return fmt.Errorf("unknown type for path %s (%s)", return fmt.Errorf("unknown type for path %s (%s): %T (%#v)",
strings.Join(keys, "."), intermediateKey) strings.Join(keys, "."), intermediateKey, nextTree, nextTree)
} }
} }
return nil return nil
} }
// encodes a string to a TOML-compliant string value // Query compiles and executes a query on a tree and returns the query result.
func encodeTomlString(value string) string {
result := ""
for _, rr := range value {
intRr := uint16(rr)
switch rr {
case '\b':
result += "\\b"
case '\t':
result += "\\t"
case '\n':
result += "\\n"
case '\f':
result += "\\f"
case '\r':
result += "\\r"
case '"':
result += "\\\""
case '\\':
result += "\\\\"
default:
if intRr < 0x001F {
result += fmt.Sprintf("\\u%0.4X", intRr)
} else {
result += string(rr)
}
}
}
return result
}
// Value print support function for ToString()
// Outputs the TOML compliant string representation of a value
func toTomlValue(item interface{}, indent int) string {
tab := strings.Repeat(" ", indent)
switch value := item.(type) {
case int64:
return tab + strconv.FormatInt(value, 10)
case float64:
return tab + strconv.FormatFloat(value, 'f', -1, 64)
case string:
return tab + "\"" + encodeTomlString(value) + "\""
case bool:
if value {
return "true"
}
return "false"
case time.Time:
return tab + value.Format(time.RFC3339)
case []interface{}:
result := tab + "[\n"
for _, item := range value {
result += toTomlValue(item, indent+2) + ",\n"
}
return result + tab + "]"
default:
panic(fmt.Sprintf("unsupported value type: %v", value))
}
}
// Recursive support function for ToString()
// Outputs a tree, using the provided keyspace to prefix group names
func (t *TomlTree) toToml(indent, keyspace string) string {
result := ""
for k, v := range t.values {
// figure out the keyspace
combinedKey := k
if keyspace != "" {
combinedKey = keyspace + "." + combinedKey
}
// output based on type
switch node := v.(type) {
case []*TomlTree:
for _, item := range node {
if len(item.Keys()) > 0 {
result += fmt.Sprintf("\n%s[[%s]]\n", indent, combinedKey)
}
result += item.toToml(indent+" ", combinedKey)
}
case *TomlTree:
if len(node.Keys()) > 0 {
result += fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
}
result += node.toToml(indent+" ", combinedKey)
case *tomlValue:
result += fmt.Sprintf("%s%s = %s\n", indent, k, toTomlValue(node.value, 0))
default:
panic(fmt.Sprintf("unsupported node type: %v", node))
}
}
return result
}
func (t *TomlTree) Query(query string) (*QueryResult, error) { func (t *TomlTree) Query(query string) (*QueryResult, error) {
if q, err := CompileQuery(query); err != nil { q, err := CompileQuery(query)
if err != nil {
return nil, err return nil, err
} else {
return q.Execute(t), nil
} }
return q.Execute(t), nil
} }
// ToString generates a human-readable representation of the current tree. // LoadReader creates a TomlTree from any io.Reader.
// Output spans multiple lines, and is suitable for ingest by a TOML parser func LoadReader(reader io.Reader) (tree *TomlTree, err error) {
func (t *TomlTree) ToString() string {
return t.toToml("", "")
}
// Load creates a TomlTree from a string.
func Load(content string) (tree *TomlTree, err error) {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
if _, ok := r.(runtime.Error); ok { if _, ok := r.(runtime.Error); ok {
@@ -339,18 +262,21 @@ func Load(content string) (tree *TomlTree, err error) {
err = errors.New(r.(string)) err = errors.New(r.(string))
} }
}() }()
tree = parseToml(lexToml(content)) tree = parseToml(lexToml(reader))
return return
} }
// Load creates a TomlTree from a string.
func Load(content string) (tree *TomlTree, err error) {
return LoadReader(strings.NewReader(content))
}
// LoadFile creates a TomlTree from a file. // LoadFile creates a TomlTree from a file.
func LoadFile(path string) (tree *TomlTree, err error) { func LoadFile(path string) (tree *TomlTree, err error) {
buff, ferr := ioutil.ReadFile(path) file, err := os.Open(path)
if ferr != nil { if err != nil {
err = ferr return nil, err
} else {
s := string(buff)
tree, err = Load(s)
} }
return defer file.Close()
return LoadReader(file)
} }
+55 -1
View File
@@ -15,6 +15,47 @@ func TestTomlHas(t *testing.T) {
if !tree.Has("test.key") { if !tree.Has("test.key") {
t.Errorf("Has - expected test.key to exists") t.Errorf("Has - expected test.key to exists")
} }
if tree.Has("") {
t.Errorf("Should return false if the key is not provided")
}
}
func TestTomlGet(t *testing.T) {
tree, _ := Load(`
[test]
key = "value"
`)
if tree.Get("") != tree {
t.Errorf("Get should return the tree itself when given an empty path")
}
if tree.Get("test.key") != "value" {
t.Errorf("Get should return the value")
}
if tree.Get(`\`) != nil {
t.Errorf("should return nil when the key is malformed")
}
}
func TestTomlGetDefault(t *testing.T) {
tree, _ := Load(`
[test]
key = "value"
`)
if tree.GetDefault("", "hello") != tree {
t.Error("GetDefault should return the tree itself when given an empty path")
}
if tree.GetDefault("test.key", "hello") != "value" {
t.Error("Get should return the value")
}
if tree.GetDefault("whatever", "hello") != "hello" {
t.Error("GetDefault should return the default value if the key does not exist")
}
} }
func TestTomlHasPath(t *testing.T) { func TestTomlHasPath(t *testing.T) {
@@ -46,6 +87,11 @@ func TestTomlGetPath(t *testing.T) {
t.Errorf("GetPath[%d] %v - expected %v, got %v instead.", idx, item.Path, item.Expected, result) t.Errorf("GetPath[%d] %v - expected %v, got %v instead.", idx, item.Path, item.Expected, result)
} }
} }
tree, _ := Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
if tree.GetPath([]string{"whatever"}) != nil {
t.Error("GetPath should return nil when the key does not exist")
}
} }
func TestTomlQuery(t *testing.T) { func TestTomlQuery(t *testing.T) {
@@ -65,10 +111,18 @@ func TestTomlQuery(t *testing.T) {
} }
if tt, ok := values[0].(*TomlTree); !ok { if tt, ok := values[0].(*TomlTree); !ok {
t.Errorf("Expected type of TomlTree: %T Tv", values[0], values[0]) t.Errorf("Expected type of TomlTree: %T", values[0])
} else if tt.Get("a") != int64(1) { } else if tt.Get("a") != int64(1) {
t.Errorf("Expected 'a' with a value 1: %v", tt.Get("a")) t.Errorf("Expected 'a' with a value 1: %v", tt.Get("a"))
} else if tt.Get("b") != int64(2) { } else if tt.Get("b") != int64(2) {
t.Errorf("Expected 'b' with a value 2: %v", tt.Get("b")) t.Errorf("Expected 'b' with a value 2: %v", tt.Get("b"))
} }
} }
func TestTomlFromMap(t *testing.T) {
simpleMap := map[string]interface{}{"hello": 42}
tree := TreeFromMap(simpleMap)
if tree.Get("hello") != 42 {
t.Fatal("hello should be 42, not", tree.Get("hello"))
}
}
+207
View File
@@ -0,0 +1,207 @@
package toml
// Tools to convert a TomlTree to different representations
import (
"fmt"
"strconv"
"strings"
"time"
)
// encodes a string to a TOML-compliant string value
func encodeTomlString(value string) string {
result := ""
for _, rr := range value {
intRr := uint16(rr)
switch rr {
case '\b':
result += "\\b"
case '\t':
result += "\\t"
case '\n':
result += "\\n"
case '\f':
result += "\\f"
case '\r':
result += "\\r"
case '"':
result += "\\\""
case '\\':
result += "\\\\"
default:
if intRr < 0x001F {
result += fmt.Sprintf("\\u%0.4X", intRr)
} else {
result += string(rr)
}
}
}
return result
}
// Value print support function for ToString()
// Outputs the TOML compliant string representation of a value
func toTomlValue(item interface{}, indent int) string {
tab := strings.Repeat(" ", indent)
switch value := item.(type) {
case int:
return tab + strconv.FormatInt(int64(value), 10)
case int8:
return tab + strconv.FormatInt(int64(value), 10)
case int16:
return tab + strconv.FormatInt(int64(value), 10)
case int32:
return tab + strconv.FormatInt(int64(value), 10)
case int64:
return tab + strconv.FormatInt(value, 10)
case uint:
return tab + strconv.FormatUint(uint64(value), 10)
case uint8:
return tab + strconv.FormatUint(uint64(value), 10)
case uint16:
return tab + strconv.FormatUint(uint64(value), 10)
case uint32:
return tab + strconv.FormatUint(uint64(value), 10)
case uint64:
return tab + strconv.FormatUint(value, 10)
case float32:
return tab + strconv.FormatFloat(float64(value), 'f', -1, 32)
case float64:
return tab + strconv.FormatFloat(value, 'f', -1, 64)
case string:
return tab + "\"" + encodeTomlString(value) + "\""
case bool:
if value {
return "true"
}
return "false"
case time.Time:
return tab + value.Format(time.RFC3339)
case []interface{}:
result := tab + "[\n"
for _, item := range value {
result += toTomlValue(item, indent+2) + ",\n"
}
return result + tab + "]"
case nil:
return ""
default:
panic(fmt.Sprintf("unsupported value type %T: %v", value, value))
}
}
// Recursive support function for ToString()
// Outputs a tree, using the provided keyspace to prefix group names
func (t *TomlTree) toToml(indent, keyspace string) string {
resultChunks := []string{}
for k, v := range t.values {
// figure out the keyspace
combinedKey := k
if keyspace != "" {
combinedKey = keyspace + "." + combinedKey
}
resultChunk := ""
// output based on type
switch node := v.(type) {
case []*TomlTree:
for _, item := range node {
if len(item.Keys()) > 0 {
resultChunk += fmt.Sprintf("\n%s[[%s]]\n", indent, combinedKey)
}
resultChunk += item.toToml(indent+" ", combinedKey)
}
resultChunks = append(resultChunks, resultChunk)
case *TomlTree:
if len(node.Keys()) > 0 {
resultChunk += fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
}
resultChunk += node.toToml(indent+" ", combinedKey)
resultChunks = append(resultChunks, resultChunk)
case map[string]interface{}:
sub := TreeFromMap(node)
if len(sub.Keys()) > 0 {
resultChunk += fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
}
resultChunk += sub.toToml(indent+" ", combinedKey)
resultChunks = append(resultChunks, resultChunk)
case map[string]string:
sub := TreeFromMap(convertMapStringString(node))
if len(sub.Keys()) > 0 {
resultChunk += fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
}
resultChunk += sub.toToml(indent+" ", combinedKey)
resultChunks = append(resultChunks, resultChunk)
case map[interface{}]interface{}:
sub := TreeFromMap(convertMapInterfaceInterface(node))
if len(sub.Keys()) > 0 {
resultChunk += fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
}
resultChunk += sub.toToml(indent+" ", combinedKey)
resultChunks = append(resultChunks, resultChunk)
case *tomlValue:
resultChunk = fmt.Sprintf("%s%s = %s\n", indent, k, toTomlValue(node.value, 0))
resultChunks = append([]string{resultChunk}, resultChunks...)
default:
resultChunk = fmt.Sprintf("%s%s = %s\n", indent, k, toTomlValue(v, 0))
resultChunks = append([]string{resultChunk}, resultChunks...)
}
}
return strings.Join(resultChunks, "")
}
func convertMapStringString(in map[string]string) map[string]interface{} {
result := make(map[string]interface{}, len(in))
for k, v := range in {
result[k] = v
}
return result
}
func convertMapInterfaceInterface(in map[interface{}]interface{}) map[string]interface{} {
result := make(map[string]interface{}, len(in))
for k, v := range in {
result[k.(string)] = v
}
return result
}
// ToString is an alias for String
func (t *TomlTree) ToString() string {
return t.String()
}
// String generates a human-readable representation of the current tree.
// Output spans multiple lines, and is suitable for ingest by a TOML parser
func (t *TomlTree) String() string {
return t.toToml("", "")
}
// ToMap recursively generates a representation of the current tree using map[string]interface{}.
func (t *TomlTree) ToMap() map[string]interface{} {
result := map[string]interface{}{}
for k, v := range t.values {
switch node := v.(type) {
case []*TomlTree:
var array []interface{}
for _, item := range node {
array = append(array, item.ToMap())
}
result[k] = array
case *TomlTree:
result[k] = node.ToMap()
case map[string]interface{}:
sub := TreeFromMap(node)
result[k] = sub.ToMap()
case *tomlValue:
result[k] = node.value
}
}
return result
}
+171
View File
@@ -0,0 +1,171 @@
package toml
import (
"reflect"
"testing"
"time"
"strings"
)
func TestTomlTreeConversionToString(t *testing.T) {
toml, err := Load(`name = { first = "Tom", last = "Preston-Werner" }
points = { x = 1, y = 2 }`)
if err != nil {
t.Fatal("Unexpected error:", err)
}
reparsedTree, err := Load(toml.ToString())
assertTree(t, reparsedTree, err, map[string]interface{}{
"name": map[string]interface{}{
"first": "Tom",
"last": "Preston-Werner",
},
"points": map[string]interface{}{
"x": int64(1),
"y": int64(2),
},
})
}
func TestTomlTreeConversionToStringKeysOrders(t *testing.T) {
for i := 0; i < 100; i++ {
tree, _ := Load(`
foobar = true
bar = "baz"
foo = 1
[qux]
foo = 1
bar = "baz2"`)
stringRepr := tree.ToString()
t.Log("Intermediate string representation:")
t.Log(stringRepr)
r := strings.NewReader(stringRepr)
toml, err := LoadReader(r)
if err != nil {
t.Fatal("Unexpected error:", err)
}
assertTree(t, toml, err, map[string]interface{}{
"foobar": true,
"bar": "baz",
"foo": 1,
"qux": map[string]interface{}{
"foo": 1,
"bar": "baz2",
},
})
}
}
func testMaps(t *testing.T, actual, expected map[string]interface{}) {
if !reflect.DeepEqual(actual, expected) {
t.Fatal("trees aren't equal.\n", "Expected:\n", expected, "\nActual:\n", actual)
}
}
func TestTomlTreeConversionToMapSimple(t *testing.T) {
tree, _ := Load("a = 42\nb = 17")
expected := map[string]interface{}{
"a": int64(42),
"b": int64(17),
}
testMaps(t, tree.ToMap(), expected)
}
func TestTomlTreeConversionToMapExampleFile(t *testing.T) {
tree, _ := LoadFile("example.toml")
expected := map[string]interface{}{
"title": "TOML Example",
"owner": map[string]interface{}{
"name": "Tom Preston-Werner",
"organization": "GitHub",
"bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
"dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
},
"database": map[string]interface{}{
"server": "192.168.1.1",
"ports": []interface{}{int64(8001), int64(8001), int64(8002)},
"connection_max": int64(5000),
"enabled": true,
},
"servers": map[string]interface{}{
"alpha": map[string]interface{}{
"ip": "10.0.0.1",
"dc": "eqdc10",
},
"beta": map[string]interface{}{
"ip": "10.0.0.2",
"dc": "eqdc10",
},
},
"clients": map[string]interface{}{
"data": []interface{}{
[]interface{}{"gamma", "delta"},
[]interface{}{int64(1), int64(2)},
},
},
}
testMaps(t, tree.ToMap(), expected)
}
func TestTomlTreeConversionToMapWithTablesInMultipleChunks(t *testing.T) {
tree, _ := Load(`
[[menu.main]]
a = "menu 1"
b = "menu 2"
[[menu.main]]
c = "menu 3"
d = "menu 4"`)
expected := map[string]interface{}{
"menu": map[string]interface{}{
"main": []interface{}{
map[string]interface{}{"a": "menu 1", "b": "menu 2"},
map[string]interface{}{"c": "menu 3", "d": "menu 4"},
},
},
}
treeMap := tree.ToMap()
testMaps(t, treeMap, expected)
}
func TestTomlTreeConversionToMapWithArrayOfInlineTables(t *testing.T) {
tree, _ := Load(`
[params]
language_tabs = [
{ key = "shell", name = "Shell" },
{ key = "ruby", name = "Ruby" },
{ key = "python", name = "Python" }
]`)
expected := map[string]interface{}{
"params": map[string]interface{}{
"language_tabs": []interface{}{
map[string]interface{}{
"key": "shell",
"name": "Shell",
},
map[string]interface{}{
"key": "ruby",
"name": "Ruby",
},
map[string]interface{}{
"key": "python",
"name": "Python",
},
},
},
}
treeMap := tree.ToMap()
testMaps(t, treeMap, expected)
}