Compare commits
212 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 8fe62057ea | |||
| 5f42261979 | |||
| 75654e60b8 | |||
| 091e2dc498 | |||
| 095a905e04 | |||
| ec312409d3 | |||
| 26fd12ff54 | |||
| b40204d36a | |||
| 4d5afd743f | |||
| 3ded2e09ee | |||
| 781fbae71e | |||
| 68063a447e | |||
| 84da2c4a25 | |||
| dba45d427f | |||
| 728039f679 | |||
| 1d8903f1d0 | |||
| 65b27e6823 | |||
| 6ea91ef590 | |||
| 51edd0ca49 | |||
| d95bfe020e | |||
| 63909f0a90 | |||
| f9070d3b40 | |||
| 405d48dc28 | |||
| 690ec00a4b | |||
| bef2d19cb0 | |||
| e1803f96f6 | |||
| d9a27b8052 | |||
| ad2aec1dcc | |||
| 489c49b1b4 | |||
| 27c6b39a13 | |||
| 539dd095b3 | |||
| b56e1b27b4 | |||
| 19cbd226da | |||
| 0a1666a81f | |||
| aa79e12a97 | |||
| 81a861c69d | |||
| 78b76feda6 | |||
| 90d6f96e9e | |||
| e33f654429 | |||
| 4edab6691b | |||
| c2dbbc24a9 | |||
| 14d3ac30da | |||
| 5c5490133d | |||
| 216c9ec838 | |||
| a295f02a64 | |||
| dbe63ccdd0 | |||
| 603baefff9 | |||
| c01d1270ff | |||
| 66540cf1fc | |||
| 05bcc0fb0d | |||
| acdc450948 | |||
| 778c285afa | |||
| a1e8a8d702 | |||
| 03c6bf4172 | |||
| a1b12e18b7 | |||
| 4874e8477b | |||
| 9bf0212445 | |||
| 0131db6d73 | |||
| 861c4734ac | |||
| b8b5e76965 | |||
| 4e9e0ee19b | |||
| 8c31c2ec65 | |||
| 6d858869d3 | |||
| 1916042ba2 | |||
| a410399d2c | |||
| 878c11e70e | |||
| 19ece5dc77 | |||
| d01db88be9 | |||
| 2009e44b6f | |||
| 690dbc9ee7 | |||
| 16398bac15 | |||
| 1d6b12b7cb | |||
| 9c1b4e331f | |||
| 4692b8f9ba | |||
| 69d355db53 | |||
| ef23ce9e92 | |||
| 4a000a21a4 | |||
| fe7536c3de | |||
| e94d595cd4 | |||
| 0d5a6db8dd | |||
| a60c71373e | |||
| 5ccdfb18c7 | |||
| 40ecdac242 | |||
| 26ae43fdee | |||
| 048765b449 | |||
| 5c26a6ff6f | |||
| 685a1f1cb7 | |||
| 23f644976a | |||
| 64bc956d5e | |||
| 53be957dac | |||
| 97253b98df | |||
| 76c552dcd7 | |||
| fe206efb84 | |||
| e32a2e0474 | |||
| f6e7596e8d | |||
| 25e50242f6 | |||
| 62e2d802ed | |||
| fee7787d3f | |||
| 3b00596b2e | |||
| 13d49d4606 | |||
| 7e6e4b1314 | |||
| 3616783228 | |||
| d0ec4317d3 | |||
| 22139eb546 | |||
| c9506ee963 | |||
| 3a6d01f7a0 | |||
| d1fa2118c1 | |||
| a1f048ba24 | |||
| ee2c0b51cf | |||
| 439fbba1f8 | |||
| 017119f7a7 | |||
| ce7be745f0 | |||
| d464759235 | |||
| 7cb988051d | |||
| 3ddb37c944 | |||
| f7f14983c3 | |||
| 45932ad32d | |||
| 67b7b944a8 | |||
| 31055c2ff0 | |||
| 5a62685873 | |||
| d05a14897c | |||
| 0599275eb9 | |||
| 0049ab3dc4 | |||
| bfe4a7e160 | |||
| e6271032cc | |||
| 887411a2a8 | |||
| 31c735e72c | |||
| 06484b677b | |||
| de2e921d55 | |||
| 7f292800de | |||
| 923742e542 | |||
| 65ad89c1a7 | |||
| 64ff1ea4d5 | |||
| b39f6ef1f9 | |||
| c187221f01 | |||
| 8e6ab94eec | |||
| 8d9c606c69 | |||
| 288bc57940 | |||
| e3b2497729 | |||
| 1a8565204c | |||
| e58cfd32d4 | |||
| a2ae216b47 | |||
| 8645be8dc7 | |||
| 99b9371c53 | |||
| 92c565e02b | |||
| 6e26017b00 | |||
| 9d93af61de | |||
| 4d8fb95ffe | |||
| 0e41db2176 | |||
| afca7f3334 | |||
| d6a90e60ed | |||
| fe63e9f76d | |||
| 7f50e4c339 | |||
| a402e618c3 | |||
| 2df083520a | |||
| 8176e30b38 | |||
| 14c964fc02 | |||
| f963bc320f | |||
| 0488b850c6 | |||
| 346e676fa2 | |||
| 6d743bb19f | |||
| fa1c2ab68c | |||
| a6c6ad1f5f | |||
| ab7a652912 | |||
| 3102b98900 | |||
| f0cae62430 | |||
| 56c6106477 | |||
| 7d69e5a5c5 | |||
| 07d0c2e4d3 | |||
| 6b9002d8f9 | |||
| 5753e884d0 | |||
| d467309bdd | |||
| 821a80e635 | |||
| dd4c4ffc2b | |||
| da703daafe | |||
| f58048cec0 | |||
| 440592fa85 | |||
| f4f2456dcd | |||
| a77f30ea80 | |||
| d61c80733b | |||
| 894e775e38 | |||
| 8e75093380 | |||
| cf5ad6a245 | |||
| 8fc7451ffc | |||
| 9defd66d3c | |||
| 6adf8057ed | |||
| 36e1197190 | |||
| 6dd2de38a9 | |||
| 209315c2af | |||
| 41a8959f14 | |||
| 16a681db2a | |||
| 9f36448571 | |||
| 222e90a7d3 | |||
| a8327d781a | |||
| 61449e9d32 | |||
| 48c977fb58 | |||
| 42e7853ef6 | |||
| 1f3d0e03c3 | |||
| 36d65b681a | |||
| a56707c85f | |||
| 4b47f52cb0 | |||
| 2f2f28631b | |||
| 543444f747 | |||
| b814e1a94f | |||
| 1fe62f3000 | |||
| 709382e9c1 | |||
| 71e7762db5 | |||
| 34da10d880 | |||
| db15f8a481 | |||
| 8ef71920bd | |||
| fa055bcbba | |||
| 7337a63f5a |
@@ -0,0 +1,2 @@
|
|||||||
|
cmd/tomll/tomll
|
||||||
|
cmd/tomljson/tomljson
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
**To Reproduce**
|
||||||
|
Steps to reproduce the behavior. Including TOML files.
|
||||||
|
|
||||||
|
**Expected behavior**
|
||||||
|
A clear and concise description of what you expected to happen, if other than "should work".
|
||||||
|
|
||||||
|
**Versions**
|
||||||
|
- go-toml: version (git sha)
|
||||||
|
- go: version
|
||||||
|
- operating system: e.g. macOS, Windows, Linux
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context about the problem here that you think may help to diagnose.
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
|
||||||
|
**Describe alternatives you've considered**
|
||||||
|
A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context or screenshots about the feature request here.
|
||||||
@@ -1 +1,5 @@
|
|||||||
test_program/test_program_bin
|
test_program/test_program_bin
|
||||||
|
fuzz/
|
||||||
|
cmd/tomll/tomll
|
||||||
|
cmd/tomljson/tomljson
|
||||||
|
cmd/tomltestgen/tomltestgen
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
language: go
|
|
||||||
script: "./test.sh"
|
|
||||||
go:
|
|
||||||
- 1.1
|
|
||||||
- 1.2
|
|
||||||
- 1.3
|
|
||||||
- tip
|
|
||||||
+132
@@ -0,0 +1,132 @@
|
|||||||
|
## Contributing
|
||||||
|
|
||||||
|
Thank you for your interest in go-toml! We appreciate you considering
|
||||||
|
contributing to go-toml!
|
||||||
|
|
||||||
|
The main goal is the project is to provide an easy-to-use TOML
|
||||||
|
implementation for Go that gets the job done and gets out of your way –
|
||||||
|
dealing with TOML is probably not the central piece of your project.
|
||||||
|
|
||||||
|
As the single maintainer of go-toml, time is scarce. All help, big or
|
||||||
|
small, is more than welcomed!
|
||||||
|
|
||||||
|
### Ask questions
|
||||||
|
|
||||||
|
Any question you may have, somebody else might have it too. Always feel
|
||||||
|
free to ask them on the [issues tracker][issues-tracker]. We will try to
|
||||||
|
answer them as clearly and quickly as possible, time permitting.
|
||||||
|
|
||||||
|
Asking questions also helps us identify areas where the documentation needs
|
||||||
|
improvement, or new features that weren't envisioned before. Sometimes, a
|
||||||
|
seemingly innocent question leads to the fix of a bug. Don't hesitate and
|
||||||
|
ask away!
|
||||||
|
|
||||||
|
### Improve the documentation
|
||||||
|
|
||||||
|
The best way to share your knowledge and experience with go-toml is to
|
||||||
|
improve the documentation. Fix a typo, clarify an interface, add an
|
||||||
|
example, anything goes!
|
||||||
|
|
||||||
|
The documentation is present in the [README][readme] and thorough the
|
||||||
|
source code. On release, it gets updated on [GoDoc][godoc]. To make a
|
||||||
|
change to the documentation, create a pull request with your proposed
|
||||||
|
changes. For simple changes like that, the easiest way to go is probably
|
||||||
|
the "Fork this project and edit the file" button on Github, displayed at
|
||||||
|
the top right of the file. Unless it's a trivial change (for example a
|
||||||
|
typo), provide a little bit of context in your pull request description or
|
||||||
|
commit message.
|
||||||
|
|
||||||
|
### Report a bug
|
||||||
|
|
||||||
|
Found a bug! Sorry to hear that :(. Help us and other track them down and
|
||||||
|
fix by reporting it. [File a new bug report][bug-report] on the [issues
|
||||||
|
tracker][issues-tracker]. The template should provide enough guidance on
|
||||||
|
what to include. When in doubt: add more details! By reducing ambiguity and
|
||||||
|
providing more information, it decreases back and forth and saves everyone
|
||||||
|
time.
|
||||||
|
|
||||||
|
### Code changes
|
||||||
|
|
||||||
|
Want to contribute a patch? Very happy to hear that!
|
||||||
|
|
||||||
|
First, some high-level rules:
|
||||||
|
|
||||||
|
* A short proposal with some POC code is better than a lengthy piece of
|
||||||
|
text with no code. Code speaks louder than words.
|
||||||
|
* No backward-incompatible patch will be accepted unless discussed.
|
||||||
|
Sometimes it's hard, and Go's lack of versioning by default does not
|
||||||
|
help, but we try not to break people's programs unless we absolutely have
|
||||||
|
to.
|
||||||
|
* If you are writing a new feature or extending an existing one, make sure
|
||||||
|
to write some documentation.
|
||||||
|
* Bug fixes need to be accompanied with regression tests.
|
||||||
|
* New code needs to be tested.
|
||||||
|
* Your commit messages need to explain why the change is needed, even if
|
||||||
|
already included in the PR description.
|
||||||
|
|
||||||
|
It does sound like a lot, but those best practices are here to save time
|
||||||
|
overall and continuously improve the quality of the project, which is
|
||||||
|
something everyone benefits from.
|
||||||
|
|
||||||
|
#### Get started
|
||||||
|
|
||||||
|
The fairly standard code contribution process looks like that:
|
||||||
|
|
||||||
|
1. [Fork the project][fork].
|
||||||
|
2. Make your changes, commit on any branch you like.
|
||||||
|
3. [Open up a pull request][pull-request]
|
||||||
|
4. Review, potential ask for changes.
|
||||||
|
5. Merge. You're in!
|
||||||
|
|
||||||
|
Feel free to ask for help! You can create draft pull requests to gather
|
||||||
|
some early feedback!
|
||||||
|
|
||||||
|
#### Run the tests
|
||||||
|
|
||||||
|
You can run tests for go-toml using Go's test tool: `go test ./...`.
|
||||||
|
When creating a pull requests, all tests will be ran on Linux on a few Go
|
||||||
|
versions (Travis CI), and on Windows using the latest Go version
|
||||||
|
(AppVeyor).
|
||||||
|
|
||||||
|
#### Style
|
||||||
|
|
||||||
|
Try to look around and follow the same format and structure as the rest of
|
||||||
|
the code. We enforce using `go fmt` on the whole code base.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Maintainers-only
|
||||||
|
|
||||||
|
#### Merge pull request
|
||||||
|
|
||||||
|
Checklist:
|
||||||
|
|
||||||
|
* Passing CI.
|
||||||
|
* Does not introduce backward-incompatible changes (unless discussed).
|
||||||
|
* Has relevant doc changes.
|
||||||
|
* Has relevant unit tests.
|
||||||
|
|
||||||
|
1. Merge using "squash and merge".
|
||||||
|
2. Make sure to edit the commit message to keep all the useful information
|
||||||
|
nice and clean.
|
||||||
|
3. Make sure the commit title is clear and contains the PR number (#123).
|
||||||
|
|
||||||
|
#### New release
|
||||||
|
|
||||||
|
1. Go to [releases][releases]. Click on "X commits to master since this
|
||||||
|
release".
|
||||||
|
2. Make note of all the changes. Look for backward incompatible changes,
|
||||||
|
new features, and bug fixes.
|
||||||
|
3. Pick the new version using the above and semver.
|
||||||
|
4. Create a [new release][new-release].
|
||||||
|
5. Follow the same format as [1.1.0][release-110].
|
||||||
|
|
||||||
|
[issues-tracker]: https://github.com/pelletier/go-toml/issues
|
||||||
|
[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md
|
||||||
|
[godoc]: https://godoc.org/github.com/pelletier/go-toml
|
||||||
|
[readme]: ./README.md
|
||||||
|
[fork]: https://help.github.com/articles/fork-a-repo
|
||||||
|
[pull-request]: https://help.github.com/en/articles/creating-a-pull-request
|
||||||
|
[releases]: https://github.com/pelletier/go-toml/releases
|
||||||
|
[new-release]: https://github.com/pelletier/go-toml/releases/new
|
||||||
|
[release-110]: https://github.com/pelletier/go-toml/releases/tag/v1.1.0
|
||||||
+11
@@ -0,0 +1,11 @@
|
|||||||
|
FROM golang:1.12-alpine3.9 as builder
|
||||||
|
WORKDIR /go/src/github.com/pelletier/go-toml
|
||||||
|
COPY . .
|
||||||
|
ENV CGO_ENABLED=0
|
||||||
|
ENV GOOS=linux
|
||||||
|
RUN go install ./...
|
||||||
|
|
||||||
|
FROM scratch
|
||||||
|
COPY --from=builder /go/bin/tomll /usr/bin/tomll
|
||||||
|
COPY --from=builder /go/bin/tomljson /usr/bin/tomljson
|
||||||
|
COPY --from=builder /go/bin/jsontoml /usr/bin/jsontoml
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
**Issue:** add link to pelletier/go-toml issue here
|
||||||
|
|
||||||
|
Explanation of what this pull request does.
|
||||||
|
|
||||||
|
More detailed description of the decisions being made and the reasons why (if the patch is non-trivial).
|
||||||
@@ -3,75 +3,78 @@
|
|||||||
Go library for the [TOML](https://github.com/mojombo/toml) format.
|
Go library for the [TOML](https://github.com/mojombo/toml) format.
|
||||||
|
|
||||||
This library supports TOML version
|
This library supports TOML version
|
||||||
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md)
|
[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md)
|
||||||
|
|
||||||
[](http://godoc.org/github.com/pelletier/go-toml)
|
[](http://godoc.org/github.com/pelletier/go-toml)
|
||||||
[](https://travis-ci.org/pelletier/go-toml)
|
[](https://github.com/pelletier/go-toml/blob/master/LICENSE)
|
||||||
|
[](https://dev.azure.com/pelletierthomas/go-toml-ci/_build/latest?definitionId=1&branchName=master)
|
||||||
|
[](https://codecov.io/gh/pelletier/go-toml)
|
||||||
|
[](https://goreportcard.com/report/github.com/pelletier/go-toml)
|
||||||
|
[](https://app.fossa.io/projects/git%2Bgithub.com%2Fpelletier%2Fgo-toml?ref=badge_shield)
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
Go-toml provides the following features for using data parsed from TOML documents:
|
Go-toml provides the following features for using data parsed from TOML documents:
|
||||||
|
|
||||||
* Load TOML documents from files and string data
|
* Load TOML documents from files and string data
|
||||||
* Easily navigate TOML structure using TomlTree
|
* Easily navigate TOML structure using Tree
|
||||||
|
* Mashaling and unmarshaling to and from data structures
|
||||||
* Line & column position data for all parsed elements
|
* Line & column position data for all parsed elements
|
||||||
* Query support similar to JSON-Path
|
* [Query support similar to JSON-Path](query/)
|
||||||
* Syntax errors contain line and column numbers
|
* Syntax errors contain line and column numbers
|
||||||
|
|
||||||
Go-toml is designed to help cover use-cases not covered by reflection-based TOML parsing:
|
|
||||||
|
|
||||||
* Semantic evaluation of parsed TOML
|
|
||||||
* Informing a user of mistakes in the source document, after it has been parsed
|
|
||||||
* Programatic handling of default values on a case-by-case basis
|
|
||||||
* Using a TOML document as a flexible data-store
|
|
||||||
|
|
||||||
## Import
|
## Import
|
||||||
|
|
||||||
import "github.com/pelletier/go-toml"
|
```go
|
||||||
|
import "github.com/pelletier/go-toml"
|
||||||
## Usage
|
|
||||||
|
|
||||||
### Example
|
|
||||||
|
|
||||||
Say you have a TOML file that looks like this:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[postgres]
|
|
||||||
user = "pelletier"
|
|
||||||
password = "mypassword"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Read the username and password like this:
|
## Usage example
|
||||||
|
|
||||||
|
Read a TOML document:
|
||||||
|
|
||||||
```go
|
```go
|
||||||
import (
|
config, _ := toml.Load(`
|
||||||
"fmt"
|
[postgres]
|
||||||
"github.com/pelletier/go-toml"
|
user = "pelletier"
|
||||||
)
|
password = "mypassword"`)
|
||||||
|
// retrieve data directly
|
||||||
|
user := config.Get("postgres.user").(string)
|
||||||
|
|
||||||
config, err := toml.LoadFile("config.toml")
|
// or using an intermediate object
|
||||||
if err != nil {
|
postgresConfig := config.Get("postgres").(*toml.Tree)
|
||||||
fmt.Println("Error ", err.Error())
|
password := postgresConfig.Get("password").(string)
|
||||||
} else {
|
```
|
||||||
// retrieve data directly
|
|
||||||
user := config.Get("postgres.user").(string)
|
|
||||||
password := config.Get("postgres.password").(string)
|
|
||||||
|
|
||||||
// or using an intermediate object
|
Or use Unmarshal:
|
||||||
configTree := config.Get("postgres").(*toml.TomlTree)
|
|
||||||
user = configTree.Get("user").(string)
|
|
||||||
password = configTree.Get("password").(string)
|
|
||||||
fmt.Println("User is ", user, ". Password is ", password)
|
|
||||||
|
|
||||||
// show where elements are in the file
|
```go
|
||||||
fmt.Println("User position: %v", configTree.GetPosition("user"))
|
type Postgres struct {
|
||||||
fmt.Println("Password position: %v", configTree.GetPosition("password"))
|
User string
|
||||||
|
Password string
|
||||||
|
}
|
||||||
|
type Config struct {
|
||||||
|
Postgres Postgres
|
||||||
|
}
|
||||||
|
|
||||||
// use a query to gather elements without walking the tree
|
doc := []byte(`
|
||||||
results, _ := config.Query("$..[user,password]")
|
[Postgres]
|
||||||
for ii, item := range results.Values() {
|
User = "pelletier"
|
||||||
fmt.Println("Query result %d: %v", ii, item)
|
Password = "mypassword"`)
|
||||||
}
|
|
||||||
|
config := Config{}
|
||||||
|
toml.Unmarshal(doc, &config)
|
||||||
|
fmt.Println("user=", config.Postgres.User)
|
||||||
|
```
|
||||||
|
|
||||||
|
Or use a query:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// use a query to gather elements without walking the tree
|
||||||
|
q, _ := query.Compile("$..[user,password]")
|
||||||
|
results := q.Execute(config)
|
||||||
|
for ii, item := range results.Values() {
|
||||||
|
fmt.Println("Query result %d: %v", ii, item)
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -80,6 +83,47 @@ if err != nil {
|
|||||||
The documentation and additional examples are available at
|
The documentation and additional examples are available at
|
||||||
[godoc.org](http://godoc.org/github.com/pelletier/go-toml).
|
[godoc.org](http://godoc.org/github.com/pelletier/go-toml).
|
||||||
|
|
||||||
|
## Tools
|
||||||
|
|
||||||
|
Go-toml provides two handy command line tools:
|
||||||
|
|
||||||
|
* `tomll`: Reads TOML files and lint them.
|
||||||
|
|
||||||
|
```
|
||||||
|
go install github.com/pelletier/go-toml/cmd/tomll
|
||||||
|
tomll --help
|
||||||
|
```
|
||||||
|
* `tomljson`: Reads a TOML file and outputs its JSON representation.
|
||||||
|
|
||||||
|
```
|
||||||
|
go install github.com/pelletier/go-toml/cmd/tomljson
|
||||||
|
tomljson --help
|
||||||
|
```
|
||||||
|
|
||||||
|
* `jsontoml`: Reads a JSON file and outputs a TOML representation.
|
||||||
|
|
||||||
|
```
|
||||||
|
go install github.com/pelletier/go-toml/cmd/jsontoml
|
||||||
|
jsontoml --help
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker image
|
||||||
|
|
||||||
|
Those tools are also availble as a Docker image from
|
||||||
|
[dockerhub](https://hub.docker.com/r/pelletier/go-toml). For example, to
|
||||||
|
use `tomljson`:
|
||||||
|
|
||||||
|
```
|
||||||
|
docker run -v $PWD:/workdir pelletier/go-toml tomljson /workdir/example.toml
|
||||||
|
```
|
||||||
|
|
||||||
|
Only master (`latest`) and tagged versions are published to dockerhub. You
|
||||||
|
can build your own image as usual:
|
||||||
|
|
||||||
|
```
|
||||||
|
docker build -t go-toml .
|
||||||
|
```
|
||||||
|
|
||||||
## Contribute
|
## Contribute
|
||||||
|
|
||||||
Feel free to report bugs and patches using GitHub's pull requests system on
|
Feel free to report bugs and patches using GitHub's pull requests system on
|
||||||
@@ -88,31 +132,20 @@ much appreciated!
|
|||||||
|
|
||||||
### Run tests
|
### Run tests
|
||||||
|
|
||||||
You have to make sure two kind of tests run:
|
`go test ./...`
|
||||||
|
|
||||||
1. The Go unit tests
|
### Fuzzing
|
||||||
2. The TOML examples base
|
|
||||||
|
|
||||||
You can run both of them using `./test.sh`.
|
The script `./fuzz.sh` is available to
|
||||||
|
run [go-fuzz](https://github.com/dvyukov/go-fuzz) on go-toml.
|
||||||
|
|
||||||
|
## Versioning
|
||||||
|
|
||||||
|
Go-toml follows [Semantic Versioning](http://semver.org/). The supported version
|
||||||
|
of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of
|
||||||
|
this document. The last two major versions of Go are supported
|
||||||
|
(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)).
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
Copyright (c) 2013, 2014 Thomas Pelletier, Eric Anderton
|
The MIT License (MIT). Read [LICENSE](LICENSE).
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
|
||||||
the Software without restriction, including without limitation the rights to
|
|
||||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
|
||||||
of the Software, and to permit persons to whom the Software is furnished to do
|
|
||||||
so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
|
|||||||
@@ -0,0 +1,167 @@
|
|||||||
|
trigger:
|
||||||
|
- master
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- stage: fuzzit
|
||||||
|
displayName: "Run Fuzzit"
|
||||||
|
dependsOn: []
|
||||||
|
condition: and(succeeded(), eq(variables['Build.SourceBranchName'], 'master'))
|
||||||
|
jobs:
|
||||||
|
- job: submit
|
||||||
|
displayName: "Submit"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: GoTool@0
|
||||||
|
displayName: "Install Go 1.13"
|
||||||
|
inputs:
|
||||||
|
version: "1.13"
|
||||||
|
- script: echo "##vso[task.setvariable variable=PATH]${PATH}:/home/vsts/go/bin/"
|
||||||
|
- script: mkdir -p ${HOME}/go/src/github.com/pelletier/go-toml
|
||||||
|
- script: cp -R . ${HOME}/go/src/github.com/pelletier/go-toml
|
||||||
|
- task: Bash@3
|
||||||
|
inputs:
|
||||||
|
filePath: './fuzzit.sh'
|
||||||
|
env:
|
||||||
|
TYPE: fuzzing
|
||||||
|
FUZZIT_API_KEY: $(FUZZIT_API_KEY)
|
||||||
|
|
||||||
|
- stage: run_checks
|
||||||
|
displayName: "Check"
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- job: fmt
|
||||||
|
displayName: "fmt"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: GoTool@0
|
||||||
|
displayName: "Install Go 1.13"
|
||||||
|
inputs:
|
||||||
|
version: "1.13"
|
||||||
|
- task: Go@0
|
||||||
|
displayName: "go fmt ./..."
|
||||||
|
inputs:
|
||||||
|
command: 'custom'
|
||||||
|
customCommand: 'fmt'
|
||||||
|
arguments: './...'
|
||||||
|
- job: coverage
|
||||||
|
displayName: "coverage"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: GoTool@0
|
||||||
|
displayName: "Install Go 1.13"
|
||||||
|
inputs:
|
||||||
|
version: "1.13"
|
||||||
|
- task: Go@0
|
||||||
|
displayName: "Generate coverage"
|
||||||
|
inputs:
|
||||||
|
command: 'test'
|
||||||
|
arguments: "-race -coverprofile=coverage.txt -covermode=atomic"
|
||||||
|
- task: Bash@3
|
||||||
|
inputs:
|
||||||
|
targetType: 'inline'
|
||||||
|
script: 'bash <(curl -s https://codecov.io/bash) -t $(CODECOV_TOKEN)'
|
||||||
|
- job: benchmark
|
||||||
|
displayName: "benchmark"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: GoTool@0
|
||||||
|
displayName: "Install Go 1.13"
|
||||||
|
inputs:
|
||||||
|
version: "1.13"
|
||||||
|
- script: echo "##vso[task.setvariable variable=PATH]${PATH}:/home/vsts/go/bin/"
|
||||||
|
- task: Bash@3
|
||||||
|
inputs:
|
||||||
|
filePath: './benchmark.sh'
|
||||||
|
arguments: "master $(Build.Repository.Uri)"
|
||||||
|
|
||||||
|
- job: fuzzing
|
||||||
|
displayName: "fuzzing"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: GoTool@0
|
||||||
|
displayName: "Install Go 1.13"
|
||||||
|
inputs:
|
||||||
|
version: "1.13"
|
||||||
|
- script: echo "##vso[task.setvariable variable=PATH]${PATH}:/home/vsts/go/bin/"
|
||||||
|
- script: mkdir -p ${HOME}/go/src/github.com/pelletier/go-toml
|
||||||
|
- script: cp -R . ${HOME}/go/src/github.com/pelletier/go-toml
|
||||||
|
- task: Bash@3
|
||||||
|
inputs:
|
||||||
|
filePath: './fuzzit.sh'
|
||||||
|
env:
|
||||||
|
TYPE: local-regression
|
||||||
|
|
||||||
|
- job: go_unit_tests
|
||||||
|
displayName: "unit tests"
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
linux 1.13:
|
||||||
|
goVersion: '1.13'
|
||||||
|
imageName: 'ubuntu-latest'
|
||||||
|
mac 1.13:
|
||||||
|
goVersion: '1.13'
|
||||||
|
imageName: 'macos-10.13'
|
||||||
|
windows 1.13:
|
||||||
|
goVersion: '1.13'
|
||||||
|
imageName: 'vs2017-win2016'
|
||||||
|
linux 1.12:
|
||||||
|
goVersion: '1.12'
|
||||||
|
imageName: 'ubuntu-latest'
|
||||||
|
mac 1.12:
|
||||||
|
goVersion: '1.12'
|
||||||
|
imageName: 'macos-10.13'
|
||||||
|
windows 1.12:
|
||||||
|
goVersion: '1.12'
|
||||||
|
imageName: 'vs2017-win2016'
|
||||||
|
pool:
|
||||||
|
vmImage: $(imageName)
|
||||||
|
steps:
|
||||||
|
- task: GoTool@0
|
||||||
|
displayName: "Install Go $(goVersion)"
|
||||||
|
inputs:
|
||||||
|
version: $(goVersion)
|
||||||
|
- task: Go@0
|
||||||
|
displayName: "go test ./..."
|
||||||
|
inputs:
|
||||||
|
command: 'test'
|
||||||
|
arguments: './...'
|
||||||
|
|
||||||
|
- stage: build_docker_image
|
||||||
|
displayName: "Build Docker image"
|
||||||
|
dependsOn: run_checks
|
||||||
|
jobs:
|
||||||
|
- job: build
|
||||||
|
displayName: "Build"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: Docker@2
|
||||||
|
inputs:
|
||||||
|
command: 'build'
|
||||||
|
Dockerfile: 'Dockerfile'
|
||||||
|
buildContext: '.'
|
||||||
|
addPipelineData: false
|
||||||
|
|
||||||
|
- stage: publish_docker_image
|
||||||
|
displayName: "Publish Docker image"
|
||||||
|
dependsOn: build_docker_image
|
||||||
|
condition: and(succeeded(), eq(variables['Build.SourceBranchName'], 'master'))
|
||||||
|
jobs:
|
||||||
|
- job: publish
|
||||||
|
displayName: "Publish"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: Docker@2
|
||||||
|
inputs:
|
||||||
|
containerRegistry: 'DockerHub'
|
||||||
|
repository: 'pelletier/go-toml'
|
||||||
|
command: 'buildAndPush'
|
||||||
|
Dockerfile: 'Dockerfile'
|
||||||
|
buildContext: '.'
|
||||||
|
tags: 'latest'
|
||||||
+164
@@ -0,0 +1,164 @@
|
|||||||
|
{
|
||||||
|
"array": {
|
||||||
|
"key1": [
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
],
|
||||||
|
"key2": [
|
||||||
|
"red",
|
||||||
|
"yellow",
|
||||||
|
"green"
|
||||||
|
],
|
||||||
|
"key3": [
|
||||||
|
[
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
],
|
||||||
|
[
|
||||||
|
3,
|
||||||
|
4,
|
||||||
|
5
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"key4": [
|
||||||
|
[
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"a",
|
||||||
|
"b",
|
||||||
|
"c"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"key5": [
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
],
|
||||||
|
"key6": [
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"boolean": {
|
||||||
|
"False": false,
|
||||||
|
"True": true
|
||||||
|
},
|
||||||
|
"datetime": {
|
||||||
|
"key1": "1979-05-27T07:32:00Z",
|
||||||
|
"key2": "1979-05-27T00:32:00-07:00",
|
||||||
|
"key3": "1979-05-27T00:32:00.999999-07:00"
|
||||||
|
},
|
||||||
|
"float": {
|
||||||
|
"both": {
|
||||||
|
"key": 6.626e-34
|
||||||
|
},
|
||||||
|
"exponent": {
|
||||||
|
"key1": 5e+22,
|
||||||
|
"key2": 1000000,
|
||||||
|
"key3": -0.02
|
||||||
|
},
|
||||||
|
"fractional": {
|
||||||
|
"key1": 1,
|
||||||
|
"key2": 3.1415,
|
||||||
|
"key3": -0.01
|
||||||
|
},
|
||||||
|
"underscores": {
|
||||||
|
"key1": 9224617.445991227,
|
||||||
|
"key2": 1e+100
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fruit": [{
|
||||||
|
"name": "apple",
|
||||||
|
"physical": {
|
||||||
|
"color": "red",
|
||||||
|
"shape": "round"
|
||||||
|
},
|
||||||
|
"variety": [{
|
||||||
|
"name": "red delicious"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "granny smith"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "banana",
|
||||||
|
"variety": [{
|
||||||
|
"name": "plantain"
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"integer": {
|
||||||
|
"key1": 99,
|
||||||
|
"key2": 42,
|
||||||
|
"key3": 0,
|
||||||
|
"key4": -17,
|
||||||
|
"underscores": {
|
||||||
|
"key1": 1000,
|
||||||
|
"key2": 5349221,
|
||||||
|
"key3": 12345
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"products": [{
|
||||||
|
"name": "Hammer",
|
||||||
|
"sku": 738594937
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
"color": "gray",
|
||||||
|
"name": "Nail",
|
||||||
|
"sku": 284758393
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"string": {
|
||||||
|
"basic": {
|
||||||
|
"basic": "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."
|
||||||
|
},
|
||||||
|
"literal": {
|
||||||
|
"multiline": {
|
||||||
|
"lines": "The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n",
|
||||||
|
"regex2": "I [dw]on't need \\d{2} apples"
|
||||||
|
},
|
||||||
|
"quoted": "Tom \"Dubs\" Preston-Werner",
|
||||||
|
"regex": "\u003c\\i\\c*\\s*\u003e",
|
||||||
|
"winpath": "C:\\Users\\nodejs\\templates",
|
||||||
|
"winpath2": "\\\\ServerX\\admin$\\system32\\"
|
||||||
|
},
|
||||||
|
"multiline": {
|
||||||
|
"continued": {
|
||||||
|
"key1": "The quick brown fox jumps over the lazy dog.",
|
||||||
|
"key2": "The quick brown fox jumps over the lazy dog.",
|
||||||
|
"key3": "The quick brown fox jumps over the lazy dog."
|
||||||
|
},
|
||||||
|
"key1": "One\nTwo",
|
||||||
|
"key2": "One\nTwo",
|
||||||
|
"key3": "One\nTwo"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"table": {
|
||||||
|
"inline": {
|
||||||
|
"name": {
|
||||||
|
"first": "Tom",
|
||||||
|
"last": "Preston-Werner"
|
||||||
|
},
|
||||||
|
"point": {
|
||||||
|
"x": 1,
|
||||||
|
"y": 2
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"key": "value",
|
||||||
|
"subtable": {
|
||||||
|
"key": "another value"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"x": {
|
||||||
|
"y": {
|
||||||
|
"z": {
|
||||||
|
"w": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Executable
+31
@@ -0,0 +1,31 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
reference_ref=${1:-master}
|
||||||
|
reference_git=${2:-.}
|
||||||
|
|
||||||
|
if ! `hash benchstat 2>/dev/null`; then
|
||||||
|
echo "Installing benchstat"
|
||||||
|
go get golang.org/x/perf/cmd/benchstat
|
||||||
|
fi
|
||||||
|
|
||||||
|
tempdir=`mktemp -d /tmp/go-toml-benchmark-XXXXXX`
|
||||||
|
ref_tempdir="${tempdir}/ref"
|
||||||
|
ref_benchmark="${ref_tempdir}/benchmark-`echo -n ${reference_ref}|tr -s '/' '-'`.txt"
|
||||||
|
local_benchmark="`pwd`/benchmark-local.txt"
|
||||||
|
|
||||||
|
echo "=== ${reference_ref} (${ref_tempdir})"
|
||||||
|
git clone ${reference_git} ${ref_tempdir} >/dev/null 2>/dev/null
|
||||||
|
pushd ${ref_tempdir} >/dev/null
|
||||||
|
git checkout ${reference_ref} >/dev/null 2>/dev/null
|
||||||
|
go test -bench=. -benchmem | tee ${ref_benchmark}
|
||||||
|
popd >/dev/null
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== local"
|
||||||
|
go test -bench=. -benchmem | tee ${local_benchmark}
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== diff"
|
||||||
|
benchstat -delta-test=none ${ref_benchmark} ${local_benchmark}
|
||||||
+244
@@ -0,0 +1,244 @@
|
|||||||
|
################################################################################
|
||||||
|
## Comment
|
||||||
|
|
||||||
|
# Speak your mind with the hash symbol. They go from the symbol to the end of
|
||||||
|
# the line.
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Table
|
||||||
|
|
||||||
|
# Tables (also known as hash tables or dictionaries) are collections of
|
||||||
|
# key/value pairs. They appear in square brackets on a line by themselves.
|
||||||
|
|
||||||
|
[table]
|
||||||
|
|
||||||
|
key = "value" # Yeah, you can do this.
|
||||||
|
|
||||||
|
# Nested tables are denoted by table names with dots in them. Name your tables
|
||||||
|
# whatever crap you please, just don't use #, ., [ or ].
|
||||||
|
|
||||||
|
[table.subtable]
|
||||||
|
|
||||||
|
key = "another value"
|
||||||
|
|
||||||
|
# You don't need to specify all the super-tables if you don't want to. TOML
|
||||||
|
# knows how to do it for you.
|
||||||
|
|
||||||
|
# [x] you
|
||||||
|
# [x.y] don't
|
||||||
|
# [x.y.z] need these
|
||||||
|
[x.y.z.w] # for this to work
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Inline Table
|
||||||
|
|
||||||
|
# Inline tables provide a more compact syntax for expressing tables. They are
|
||||||
|
# especially useful for grouped data that can otherwise quickly become verbose.
|
||||||
|
# Inline tables are enclosed in curly braces `{` and `}`. No newlines are
|
||||||
|
# allowed between the curly braces unless they are valid within a value.
|
||||||
|
|
||||||
|
[table.inline]
|
||||||
|
|
||||||
|
name = { first = "Tom", last = "Preston-Werner" }
|
||||||
|
point = { x = 1, y = 2 }
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## String
|
||||||
|
|
||||||
|
# There are four ways to express strings: basic, multi-line basic, literal, and
|
||||||
|
# multi-line literal. All strings must contain only valid UTF-8 characters.
|
||||||
|
|
||||||
|
[string.basic]
|
||||||
|
|
||||||
|
basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF."
|
||||||
|
|
||||||
|
[string.multiline]
|
||||||
|
|
||||||
|
# The following strings are byte-for-byte equivalent:
|
||||||
|
key1 = "One\nTwo"
|
||||||
|
key2 = """One\nTwo"""
|
||||||
|
key3 = """
|
||||||
|
One
|
||||||
|
Two"""
|
||||||
|
|
||||||
|
[string.multiline.continued]
|
||||||
|
|
||||||
|
# The following strings are byte-for-byte equivalent:
|
||||||
|
key1 = "The quick brown fox jumps over the lazy dog."
|
||||||
|
|
||||||
|
key2 = """
|
||||||
|
The quick brown \
|
||||||
|
|
||||||
|
|
||||||
|
fox jumps over \
|
||||||
|
the lazy dog."""
|
||||||
|
|
||||||
|
key3 = """\
|
||||||
|
The quick brown \
|
||||||
|
fox jumps over \
|
||||||
|
the lazy dog.\
|
||||||
|
"""
|
||||||
|
|
||||||
|
[string.literal]
|
||||||
|
|
||||||
|
# What you see is what you get.
|
||||||
|
winpath = 'C:\Users\nodejs\templates'
|
||||||
|
winpath2 = '\\ServerX\admin$\system32\'
|
||||||
|
quoted = 'Tom "Dubs" Preston-Werner'
|
||||||
|
regex = '<\i\c*\s*>'
|
||||||
|
|
||||||
|
|
||||||
|
[string.literal.multiline]
|
||||||
|
|
||||||
|
regex2 = '''I [dw]on't need \d{2} apples'''
|
||||||
|
lines = '''
|
||||||
|
The first newline is
|
||||||
|
trimmed in raw strings.
|
||||||
|
All other whitespace
|
||||||
|
is preserved.
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Integer
|
||||||
|
|
||||||
|
# Integers are whole numbers. Positive numbers may be prefixed with a plus sign.
|
||||||
|
# Negative numbers are prefixed with a minus sign.
|
||||||
|
|
||||||
|
[integer]
|
||||||
|
|
||||||
|
key1 = +99
|
||||||
|
key2 = 42
|
||||||
|
key3 = 0
|
||||||
|
key4 = -17
|
||||||
|
|
||||||
|
[integer.underscores]
|
||||||
|
|
||||||
|
# For large numbers, you may use underscores to enhance readability. Each
|
||||||
|
# underscore must be surrounded by at least one digit.
|
||||||
|
key1 = 1_000
|
||||||
|
key2 = 5_349_221
|
||||||
|
key3 = 1_2_3_4_5 # valid but inadvisable
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Float
|
||||||
|
|
||||||
|
# A float consists of an integer part (which may be prefixed with a plus or
|
||||||
|
# minus sign) followed by a fractional part and/or an exponent part.
|
||||||
|
|
||||||
|
[float.fractional]
|
||||||
|
|
||||||
|
key1 = +1.0
|
||||||
|
key2 = 3.1415
|
||||||
|
key3 = -0.01
|
||||||
|
|
||||||
|
[float.exponent]
|
||||||
|
|
||||||
|
key1 = 5e+22
|
||||||
|
key2 = 1e6
|
||||||
|
key3 = -2E-2
|
||||||
|
|
||||||
|
[float.both]
|
||||||
|
|
||||||
|
key = 6.626e-34
|
||||||
|
|
||||||
|
[float.underscores]
|
||||||
|
|
||||||
|
key1 = 9_224_617.445_991_228_313
|
||||||
|
key2 = 1e1_00
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Boolean
|
||||||
|
|
||||||
|
# Booleans are just the tokens you're used to. Always lowercase.
|
||||||
|
|
||||||
|
[boolean]
|
||||||
|
|
||||||
|
True = true
|
||||||
|
False = false
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Datetime
|
||||||
|
|
||||||
|
# Datetimes are RFC 3339 dates.
|
||||||
|
|
||||||
|
[datetime]
|
||||||
|
|
||||||
|
key1 = 1979-05-27T07:32:00Z
|
||||||
|
key2 = 1979-05-27T00:32:00-07:00
|
||||||
|
key3 = 1979-05-27T00:32:00.999999-07:00
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Array
|
||||||
|
|
||||||
|
# Arrays are square brackets with other primitives inside. Whitespace is
|
||||||
|
# ignored. Elements are separated by commas. Data types may not be mixed.
|
||||||
|
|
||||||
|
[array]
|
||||||
|
|
||||||
|
key1 = [ 1, 2, 3 ]
|
||||||
|
key2 = [ "red", "yellow", "green" ]
|
||||||
|
key3 = [ [ 1, 2 ], [3, 4, 5] ]
|
||||||
|
#key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok
|
||||||
|
|
||||||
|
# Arrays can also be multiline. So in addition to ignoring whitespace, arrays
|
||||||
|
# also ignore newlines between the brackets. Terminating commas are ok before
|
||||||
|
# the closing bracket.
|
||||||
|
|
||||||
|
key5 = [
|
||||||
|
1, 2, 3
|
||||||
|
]
|
||||||
|
key6 = [
|
||||||
|
1,
|
||||||
|
2, # this is ok
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Array of Tables
|
||||||
|
|
||||||
|
# These can be expressed by using a table name in double brackets. Each table
|
||||||
|
# with the same double bracketed name will be an element in the array. The
|
||||||
|
# tables are inserted in the order encountered.
|
||||||
|
|
||||||
|
[[products]]
|
||||||
|
|
||||||
|
name = "Hammer"
|
||||||
|
sku = 738594937
|
||||||
|
|
||||||
|
[[products]]
|
||||||
|
|
||||||
|
[[products]]
|
||||||
|
|
||||||
|
name = "Nail"
|
||||||
|
sku = 284758393
|
||||||
|
color = "gray"
|
||||||
|
|
||||||
|
|
||||||
|
# You can create nested arrays of tables as well.
|
||||||
|
|
||||||
|
[[fruit]]
|
||||||
|
name = "apple"
|
||||||
|
|
||||||
|
[fruit.physical]
|
||||||
|
color = "red"
|
||||||
|
shape = "round"
|
||||||
|
|
||||||
|
[[fruit.variety]]
|
||||||
|
name = "red delicious"
|
||||||
|
|
||||||
|
[[fruit.variety]]
|
||||||
|
name = "granny smith"
|
||||||
|
|
||||||
|
[[fruit]]
|
||||||
|
name = "banana"
|
||||||
|
|
||||||
|
[[fruit.variety]]
|
||||||
|
name = "plantain"
|
||||||
+121
@@ -0,0 +1,121 @@
|
|||||||
|
---
|
||||||
|
array:
|
||||||
|
key1:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
key2:
|
||||||
|
- red
|
||||||
|
- yellow
|
||||||
|
- green
|
||||||
|
key3:
|
||||||
|
- - 1
|
||||||
|
- 2
|
||||||
|
- - 3
|
||||||
|
- 4
|
||||||
|
- 5
|
||||||
|
key4:
|
||||||
|
- - 1
|
||||||
|
- 2
|
||||||
|
- - a
|
||||||
|
- b
|
||||||
|
- c
|
||||||
|
key5:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
key6:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
boolean:
|
||||||
|
'False': false
|
||||||
|
'True': true
|
||||||
|
datetime:
|
||||||
|
key1: '1979-05-27T07:32:00Z'
|
||||||
|
key2: '1979-05-27T00:32:00-07:00'
|
||||||
|
key3: '1979-05-27T00:32:00.999999-07:00'
|
||||||
|
float:
|
||||||
|
both:
|
||||||
|
key: 6.626e-34
|
||||||
|
exponent:
|
||||||
|
key1: 5.0e+22
|
||||||
|
key2: 1000000
|
||||||
|
key3: -0.02
|
||||||
|
fractional:
|
||||||
|
key1: 1
|
||||||
|
key2: 3.1415
|
||||||
|
key3: -0.01
|
||||||
|
underscores:
|
||||||
|
key1: 9224617.445991227
|
||||||
|
key2: 1.0e+100
|
||||||
|
fruit:
|
||||||
|
- name: apple
|
||||||
|
physical:
|
||||||
|
color: red
|
||||||
|
shape: round
|
||||||
|
variety:
|
||||||
|
- name: red delicious
|
||||||
|
- name: granny smith
|
||||||
|
- name: banana
|
||||||
|
variety:
|
||||||
|
- name: plantain
|
||||||
|
integer:
|
||||||
|
key1: 99
|
||||||
|
key2: 42
|
||||||
|
key3: 0
|
||||||
|
key4: -17
|
||||||
|
underscores:
|
||||||
|
key1: 1000
|
||||||
|
key2: 5349221
|
||||||
|
key3: 12345
|
||||||
|
products:
|
||||||
|
- name: Hammer
|
||||||
|
sku: 738594937
|
||||||
|
- {}
|
||||||
|
- color: gray
|
||||||
|
name: Nail
|
||||||
|
sku: 284758393
|
||||||
|
string:
|
||||||
|
basic:
|
||||||
|
basic: "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."
|
||||||
|
literal:
|
||||||
|
multiline:
|
||||||
|
lines: |
|
||||||
|
The first newline is
|
||||||
|
trimmed in raw strings.
|
||||||
|
All other whitespace
|
||||||
|
is preserved.
|
||||||
|
regex2: I [dw]on't need \d{2} apples
|
||||||
|
quoted: Tom "Dubs" Preston-Werner
|
||||||
|
regex: "<\\i\\c*\\s*>"
|
||||||
|
winpath: C:\Users\nodejs\templates
|
||||||
|
winpath2: "\\\\ServerX\\admin$\\system32\\"
|
||||||
|
multiline:
|
||||||
|
continued:
|
||||||
|
key1: The quick brown fox jumps over the lazy dog.
|
||||||
|
key2: The quick brown fox jumps over the lazy dog.
|
||||||
|
key3: The quick brown fox jumps over the lazy dog.
|
||||||
|
key1: |-
|
||||||
|
One
|
||||||
|
Two
|
||||||
|
key2: |-
|
||||||
|
One
|
||||||
|
Two
|
||||||
|
key3: |-
|
||||||
|
One
|
||||||
|
Two
|
||||||
|
table:
|
||||||
|
inline:
|
||||||
|
name:
|
||||||
|
first: Tom
|
||||||
|
last: Preston-Werner
|
||||||
|
point:
|
||||||
|
x: 1
|
||||||
|
y: 2
|
||||||
|
key: value
|
||||||
|
subtable:
|
||||||
|
key: another value
|
||||||
|
x:
|
||||||
|
y:
|
||||||
|
z:
|
||||||
|
w: {}
|
||||||
@@ -0,0 +1,192 @@
|
|||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"io/ioutil"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
burntsushi "github.com/BurntSushi/toml"
|
||||||
|
yaml "gopkg.in/yaml.v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
type benchmarkDoc struct {
|
||||||
|
Table struct {
|
||||||
|
Key string
|
||||||
|
Subtable struct {
|
||||||
|
Key string
|
||||||
|
}
|
||||||
|
Inline struct {
|
||||||
|
Name struct {
|
||||||
|
First string
|
||||||
|
Last string
|
||||||
|
}
|
||||||
|
Point struct {
|
||||||
|
X int64
|
||||||
|
U int64
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
String struct {
|
||||||
|
Basic struct {
|
||||||
|
Basic string
|
||||||
|
}
|
||||||
|
Multiline struct {
|
||||||
|
Key1 string
|
||||||
|
Key2 string
|
||||||
|
Key3 string
|
||||||
|
Continued struct {
|
||||||
|
Key1 string
|
||||||
|
Key2 string
|
||||||
|
Key3 string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Literal struct {
|
||||||
|
Winpath string
|
||||||
|
Winpath2 string
|
||||||
|
Quoted string
|
||||||
|
Regex string
|
||||||
|
Multiline struct {
|
||||||
|
Regex2 string
|
||||||
|
Lines string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Integer struct {
|
||||||
|
Key1 int64
|
||||||
|
Key2 int64
|
||||||
|
Key3 int64
|
||||||
|
Key4 int64
|
||||||
|
Underscores struct {
|
||||||
|
Key1 int64
|
||||||
|
Key2 int64
|
||||||
|
Key3 int64
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Float struct {
|
||||||
|
Fractional struct {
|
||||||
|
Key1 float64
|
||||||
|
Key2 float64
|
||||||
|
Key3 float64
|
||||||
|
}
|
||||||
|
Exponent struct {
|
||||||
|
Key1 float64
|
||||||
|
Key2 float64
|
||||||
|
Key3 float64
|
||||||
|
}
|
||||||
|
Both struct {
|
||||||
|
Key float64
|
||||||
|
}
|
||||||
|
Underscores struct {
|
||||||
|
Key1 float64
|
||||||
|
Key2 float64
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Boolean struct {
|
||||||
|
True bool
|
||||||
|
False bool
|
||||||
|
}
|
||||||
|
Datetime struct {
|
||||||
|
Key1 time.Time
|
||||||
|
Key2 time.Time
|
||||||
|
Key3 time.Time
|
||||||
|
}
|
||||||
|
Array struct {
|
||||||
|
Key1 []int64
|
||||||
|
Key2 []string
|
||||||
|
Key3 [][]int64
|
||||||
|
// TODO: Key4 not supported by go-toml's Unmarshal
|
||||||
|
Key5 []int64
|
||||||
|
Key6 []int64
|
||||||
|
}
|
||||||
|
Products []struct {
|
||||||
|
Name string
|
||||||
|
Sku int64
|
||||||
|
Color string
|
||||||
|
}
|
||||||
|
Fruit []struct {
|
||||||
|
Name string
|
||||||
|
Physical struct {
|
||||||
|
Color string
|
||||||
|
Shape string
|
||||||
|
Variety []struct {
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkParseToml(b *testing.B) {
|
||||||
|
fileBytes, err := ioutil.ReadFile("benchmark.toml")
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
b.ResetTimer()
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
_, err := LoadReader(bytes.NewReader(fileBytes))
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkUnmarshalToml(b *testing.B) {
|
||||||
|
bytes, err := ioutil.ReadFile("benchmark.toml")
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
b.ResetTimer()
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
target := benchmarkDoc{}
|
||||||
|
err := Unmarshal(bytes, &target)
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkUnmarshalBurntSushiToml(b *testing.B) {
|
||||||
|
bytes, err := ioutil.ReadFile("benchmark.toml")
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
b.ResetTimer()
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
target := benchmarkDoc{}
|
||||||
|
err := burntsushi.Unmarshal(bytes, &target)
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkUnmarshalJson(b *testing.B) {
|
||||||
|
bytes, err := ioutil.ReadFile("benchmark.json")
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
b.ResetTimer()
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
target := benchmarkDoc{}
|
||||||
|
err := json.Unmarshal(bytes, &target)
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkUnmarshalYaml(b *testing.B) {
|
||||||
|
bytes, err := ioutil.ReadFile("benchmark.yml")
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
b.ResetTimer()
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
target := benchmarkDoc{}
|
||||||
|
err := yaml.Unmarshal(bytes, &target)
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# fail out of the script if anything here fails
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# clear out stuff generated by test.sh
|
|
||||||
rm -rf src test_program_bin toml-test
|
|
||||||
@@ -0,0 +1,82 @@
|
|||||||
|
// Jsontoml reads JSON and converts to TOML.
|
||||||
|
//
|
||||||
|
// Usage:
|
||||||
|
// cat file.toml | jsontoml > file.json
|
||||||
|
// jsontoml file1.toml > file.json
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = func() {
|
||||||
|
fmt.Fprintln(os.Stderr, "jsontoml can be used in two ways:")
|
||||||
|
fmt.Fprintln(os.Stderr, "Writing to STDIN and reading from STDOUT:")
|
||||||
|
fmt.Fprintln(os.Stderr, "")
|
||||||
|
fmt.Fprintln(os.Stderr, "")
|
||||||
|
fmt.Fprintln(os.Stderr, "Reading from a file name:")
|
||||||
|
fmt.Fprintln(os.Stderr, " tomljson file.toml")
|
||||||
|
}
|
||||||
|
flag.Parse()
|
||||||
|
os.Exit(processMain(flag.Args(), os.Stdin, os.Stdout, os.Stderr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func processMain(files []string, defaultInput io.Reader, output io.Writer, errorOutput io.Writer) int {
|
||||||
|
// read from stdin and print to stdout
|
||||||
|
inputReader := defaultInput
|
||||||
|
|
||||||
|
if len(files) > 0 {
|
||||||
|
file, err := os.Open(files[0])
|
||||||
|
if err != nil {
|
||||||
|
printError(err, errorOutput)
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
inputReader = file
|
||||||
|
defer file.Close()
|
||||||
|
}
|
||||||
|
s, err := reader(inputReader)
|
||||||
|
if err != nil {
|
||||||
|
printError(err, errorOutput)
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
io.WriteString(output, s)
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func printError(err error, output io.Writer) {
|
||||||
|
io.WriteString(output, err.Error()+"\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func reader(r io.Reader) (string, error) {
|
||||||
|
jsonMap := make(map[string]interface{})
|
||||||
|
jsonBytes, err := ioutil.ReadAll(r)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
err = json.Unmarshal(jsonBytes, &jsonMap)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
tree, err := toml.TreeFromMap(jsonMap)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return mapToTOML(tree)
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapToTOML(t *toml.Tree) (string, error) {
|
||||||
|
tomlBytes, err := t.ToTomlString()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return string(tomlBytes[:]), nil
|
||||||
|
}
|
||||||
@@ -0,0 +1,92 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func expectBufferEquality(t *testing.T, name string, buffer *bytes.Buffer, expected string) {
|
||||||
|
output := buffer.String()
|
||||||
|
if output != expected {
|
||||||
|
t.Errorf("incorrect %s: \n%sexpected %s: \n%s", name, output, name, expected)
|
||||||
|
t.Log([]rune(output))
|
||||||
|
t.Log([]rune(expected))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func expectProcessMainResults(t *testing.T, input string, args []string, exitCode int, expectedOutput string, expectedError string) {
|
||||||
|
inputReader := strings.NewReader(input)
|
||||||
|
|
||||||
|
outputBuffer := new(bytes.Buffer)
|
||||||
|
errorBuffer := new(bytes.Buffer)
|
||||||
|
|
||||||
|
returnCode := processMain(args, inputReader, outputBuffer, errorBuffer)
|
||||||
|
|
||||||
|
expectBufferEquality(t, "output", outputBuffer, expectedOutput)
|
||||||
|
expectBufferEquality(t, "error", errorBuffer, expectedError)
|
||||||
|
|
||||||
|
if returnCode != exitCode {
|
||||||
|
t.Error("incorrect return code:", returnCode, "expected", exitCode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainReadFromStdin(t *testing.T) {
|
||||||
|
expectedOutput := `
|
||||||
|
[mytoml]
|
||||||
|
a = 42.0
|
||||||
|
`
|
||||||
|
input := `{
|
||||||
|
"mytoml": {
|
||||||
|
"a": 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
expectedError := ``
|
||||||
|
expectedExitCode := 0
|
||||||
|
|
||||||
|
expectProcessMainResults(t, input, []string{}, expectedExitCode, expectedOutput, expectedError)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainReadFromFile(t *testing.T) {
|
||||||
|
input := `{
|
||||||
|
"mytoml": {
|
||||||
|
"a": 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
tmpfile, err := ioutil.TempFile("", "example.json")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if _, err := tmpfile.Write([]byte(input)); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
defer os.Remove(tmpfile.Name())
|
||||||
|
|
||||||
|
expectedOutput := `
|
||||||
|
[mytoml]
|
||||||
|
a = 42.0
|
||||||
|
`
|
||||||
|
expectedError := ``
|
||||||
|
expectedExitCode := 0
|
||||||
|
|
||||||
|
expectProcessMainResults(t, ``, []string{tmpfile.Name()}, expectedExitCode, expectedOutput, expectedError)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainReadFromMissingFile(t *testing.T) {
|
||||||
|
var expectedError string
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
expectedError = `open /this/file/does/not/exist: The system cannot find the path specified.
|
||||||
|
`
|
||||||
|
} else {
|
||||||
|
expectedError = `open /this/file/does/not/exist: no such file or directory
|
||||||
|
`
|
||||||
|
}
|
||||||
|
|
||||||
|
expectProcessMainResults(t, ``, []string{"/this/file/does/not/exist"}, -1, ``, expectedError)
|
||||||
|
}
|
||||||
@@ -1,87 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"github.com/pelletier/go-toml"
|
|
||||||
"io/ioutil"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
bytes, err := ioutil.ReadAll(os.Stdin)
|
|
||||||
if err != nil {
|
|
||||||
os.Exit(2)
|
|
||||||
}
|
|
||||||
tree, err := toml.Load(string(bytes))
|
|
||||||
if err != nil {
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
typedTree := translate(*tree)
|
|
||||||
|
|
||||||
if err := json.NewEncoder(os.Stdout).Encode(typedTree); err != nil {
|
|
||||||
log.Fatalf("Error encoding JSON: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
os.Exit(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func translate(tomlData interface{}) interface{} {
|
|
||||||
switch orig := tomlData.(type) {
|
|
||||||
case map[string]interface{}:
|
|
||||||
typed := make(map[string]interface{}, len(orig))
|
|
||||||
for k, v := range orig {
|
|
||||||
typed[k] = translate(v)
|
|
||||||
}
|
|
||||||
return typed
|
|
||||||
case *toml.TomlTree:
|
|
||||||
return translate(*orig)
|
|
||||||
case toml.TomlTree:
|
|
||||||
keys := orig.Keys()
|
|
||||||
typed := make(map[string]interface{}, len(keys))
|
|
||||||
for _, k := range keys {
|
|
||||||
typed[k] = translate(orig.GetPath([]string{k}))
|
|
||||||
}
|
|
||||||
return typed
|
|
||||||
case []*toml.TomlTree:
|
|
||||||
typed := make([]map[string]interface{}, len(orig))
|
|
||||||
for i, v := range orig {
|
|
||||||
typed[i] = translate(v).(map[string]interface{})
|
|
||||||
}
|
|
||||||
return typed
|
|
||||||
case []map[string]interface{}:
|
|
||||||
typed := make([]map[string]interface{}, len(orig))
|
|
||||||
for i, v := range orig {
|
|
||||||
typed[i] = translate(v).(map[string]interface{})
|
|
||||||
}
|
|
||||||
return typed
|
|
||||||
case []interface{}:
|
|
||||||
typed := make([]interface{}, len(orig))
|
|
||||||
for i, v := range orig {
|
|
||||||
typed[i] = translate(v)
|
|
||||||
}
|
|
||||||
return tag("array", typed)
|
|
||||||
case time.Time:
|
|
||||||
return tag("datetime", orig.Format("2006-01-02T15:04:05Z"))
|
|
||||||
case bool:
|
|
||||||
return tag("bool", fmt.Sprintf("%v", orig))
|
|
||||||
case int64:
|
|
||||||
return tag("integer", fmt.Sprintf("%d", orig))
|
|
||||||
case float64:
|
|
||||||
return tag("float", fmt.Sprintf("%v", orig))
|
|
||||||
case string:
|
|
||||||
return tag("string", orig)
|
|
||||||
}
|
|
||||||
|
|
||||||
panic(fmt.Sprintf("Unknown type: %T", tomlData))
|
|
||||||
}
|
|
||||||
|
|
||||||
func tag(typeName string, data interface{}) map[string]interface{} {
|
|
||||||
return map[string]interface{}{
|
|
||||||
"type": typeName,
|
|
||||||
"value": data,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,71 @@
|
|||||||
|
// Tomljson reads TOML and converts to JSON.
|
||||||
|
//
|
||||||
|
// Usage:
|
||||||
|
// cat file.toml | tomljson > file.json
|
||||||
|
// tomljson file1.toml > file.json
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = func() {
|
||||||
|
fmt.Fprintln(os.Stderr, "tomljson can be used in two ways:")
|
||||||
|
fmt.Fprintln(os.Stderr, "Writing to STDIN and reading from STDOUT:")
|
||||||
|
fmt.Fprintln(os.Stderr, " cat file.toml | tomljson > file.json")
|
||||||
|
fmt.Fprintln(os.Stderr, "")
|
||||||
|
fmt.Fprintln(os.Stderr, "Reading from a file name:")
|
||||||
|
fmt.Fprintln(os.Stderr, " tomljson file.toml")
|
||||||
|
}
|
||||||
|
flag.Parse()
|
||||||
|
os.Exit(processMain(flag.Args(), os.Stdin, os.Stdout, os.Stderr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func processMain(files []string, defaultInput io.Reader, output io.Writer, errorOutput io.Writer) int {
|
||||||
|
// read from stdin and print to stdout
|
||||||
|
inputReader := defaultInput
|
||||||
|
|
||||||
|
if len(files) > 0 {
|
||||||
|
var err error
|
||||||
|
inputReader, err = os.Open(files[0])
|
||||||
|
if err != nil {
|
||||||
|
printError(err, errorOutput)
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s, err := reader(inputReader)
|
||||||
|
if err != nil {
|
||||||
|
printError(err, errorOutput)
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
io.WriteString(output, s+"\n")
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func printError(err error, output io.Writer) {
|
||||||
|
io.WriteString(output, err.Error()+"\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func reader(r io.Reader) (string, error) {
|
||||||
|
tree, err := toml.LoadReader(r)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return mapToJSON(tree)
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapToJSON(tree *toml.Tree) (string, error) {
|
||||||
|
treeMap := tree.ToMap()
|
||||||
|
bytes, err := json.MarshalIndent(treeMap, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return string(bytes[:]), nil
|
||||||
|
}
|
||||||
@@ -0,0 +1,90 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func expectBufferEquality(t *testing.T, name string, buffer *bytes.Buffer, expected string) {
|
||||||
|
output := buffer.String()
|
||||||
|
if output != expected {
|
||||||
|
t.Errorf("incorrect %s:\n%s\n\nexpected %s:\n%s", name, output, name, expected)
|
||||||
|
t.Log([]rune(output))
|
||||||
|
t.Log([]rune(expected))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func expectProcessMainResults(t *testing.T, input string, args []string, exitCode int, expectedOutput string, expectedError string) {
|
||||||
|
inputReader := strings.NewReader(input)
|
||||||
|
outputBuffer := new(bytes.Buffer)
|
||||||
|
errorBuffer := new(bytes.Buffer)
|
||||||
|
|
||||||
|
returnCode := processMain(args, inputReader, outputBuffer, errorBuffer)
|
||||||
|
|
||||||
|
expectBufferEquality(t, "output", outputBuffer, expectedOutput)
|
||||||
|
expectBufferEquality(t, "error", errorBuffer, expectedError)
|
||||||
|
|
||||||
|
if returnCode != exitCode {
|
||||||
|
t.Error("incorrect return code:", returnCode, "expected", exitCode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainReadFromStdin(t *testing.T) {
|
||||||
|
input := `
|
||||||
|
[mytoml]
|
||||||
|
a = 42`
|
||||||
|
expectedOutput := `{
|
||||||
|
"mytoml": {
|
||||||
|
"a": 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
expectedError := ``
|
||||||
|
expectedExitCode := 0
|
||||||
|
|
||||||
|
expectProcessMainResults(t, input, []string{}, expectedExitCode, expectedOutput, expectedError)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainReadFromFile(t *testing.T) {
|
||||||
|
input := `
|
||||||
|
[mytoml]
|
||||||
|
a = 42`
|
||||||
|
|
||||||
|
tmpfile, err := ioutil.TempFile("", "example.toml")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if _, err := tmpfile.Write([]byte(input)); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
defer os.Remove(tmpfile.Name())
|
||||||
|
|
||||||
|
expectedOutput := `{
|
||||||
|
"mytoml": {
|
||||||
|
"a": 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
expectedError := ``
|
||||||
|
expectedExitCode := 0
|
||||||
|
|
||||||
|
expectProcessMainResults(t, ``, []string{tmpfile.Name()}, expectedExitCode, expectedOutput, expectedError)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainReadFromMissingFile(t *testing.T) {
|
||||||
|
var expectedError string
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
expectedError = `open /this/file/does/not/exist: The system cannot find the path specified.
|
||||||
|
`
|
||||||
|
} else {
|
||||||
|
expectedError = `open /this/file/does/not/exist: no such file or directory
|
||||||
|
`
|
||||||
|
}
|
||||||
|
|
||||||
|
expectProcessMainResults(t, ``, []string{"/this/file/does/not/exist"}, -1, ``, expectedError)
|
||||||
|
}
|
||||||
@@ -0,0 +1,65 @@
|
|||||||
|
// Tomll is a linter for TOML
|
||||||
|
//
|
||||||
|
// Usage:
|
||||||
|
// cat file.toml | tomll > file_linted.toml
|
||||||
|
// tomll file1.toml file2.toml # lint the two files in place
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = func() {
|
||||||
|
fmt.Fprintln(os.Stderr, "tomll can be used in two ways:")
|
||||||
|
fmt.Fprintln(os.Stderr, "Writing to STDIN and reading from STDOUT:")
|
||||||
|
fmt.Fprintln(os.Stderr, " cat file.toml | tomll > file.toml")
|
||||||
|
fmt.Fprintln(os.Stderr, "")
|
||||||
|
fmt.Fprintln(os.Stderr, "Reading and updating a list of files:")
|
||||||
|
fmt.Fprintln(os.Stderr, " tomll a.toml b.toml c.toml")
|
||||||
|
fmt.Fprintln(os.Stderr, "")
|
||||||
|
fmt.Fprintln(os.Stderr, "When given a list of files, tomll will modify all files in place without asking.")
|
||||||
|
}
|
||||||
|
flag.Parse()
|
||||||
|
// read from stdin and print to stdout
|
||||||
|
if flag.NArg() == 0 {
|
||||||
|
s, err := lintReader(os.Stdin)
|
||||||
|
if err != nil {
|
||||||
|
io.WriteString(os.Stderr, err.Error())
|
||||||
|
os.Exit(-1)
|
||||||
|
}
|
||||||
|
io.WriteString(os.Stdout, s)
|
||||||
|
} else {
|
||||||
|
// otherwise modify a list of files
|
||||||
|
for _, filename := range flag.Args() {
|
||||||
|
s, err := lintFile(filename)
|
||||||
|
if err != nil {
|
||||||
|
io.WriteString(os.Stderr, err.Error())
|
||||||
|
os.Exit(-1)
|
||||||
|
}
|
||||||
|
ioutil.WriteFile(filename, []byte(s), 0644)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func lintFile(filename string) (string, error) {
|
||||||
|
tree, err := toml.LoadFile(filename)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return tree.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func lintReader(r io.Reader) (string, error) {
|
||||||
|
tree, err := toml.LoadReader(r)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return tree.String(), nil
|
||||||
|
}
|
||||||
@@ -0,0 +1,219 @@
|
|||||||
|
// Tomltestgen is a program that retrieves a given version of
|
||||||
|
// https://github.com/BurntSushi/toml-test and generates go code for go-toml's unit tests
|
||||||
|
// based on the test files.
|
||||||
|
//
|
||||||
|
// Usage: go run github.com/pelletier/go-toml/cmd/tomltestgen > toml_testgen_test.go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"bytes"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"go/format"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type invalid struct {
|
||||||
|
Name string
|
||||||
|
Input string
|
||||||
|
}
|
||||||
|
|
||||||
|
type valid struct {
|
||||||
|
Name string
|
||||||
|
Input string
|
||||||
|
JsonRef string
|
||||||
|
}
|
||||||
|
|
||||||
|
type testsCollection struct {
|
||||||
|
Ref string
|
||||||
|
Timestamp string
|
||||||
|
Invalid []invalid
|
||||||
|
Valid []valid
|
||||||
|
Count int
|
||||||
|
}
|
||||||
|
|
||||||
|
const srcTemplate = "// Generated by tomltestgen for toml-test ref {{.Ref}} on {{.Timestamp}}\n" +
|
||||||
|
"package toml\n" +
|
||||||
|
" import (\n" +
|
||||||
|
" \"testing\"\n" +
|
||||||
|
")\n" +
|
||||||
|
|
||||||
|
"{{range .Invalid}}\n" +
|
||||||
|
"func TestInvalid{{.Name}}(t *testing.T) {\n" +
|
||||||
|
" input := {{.Input|gostr}}\n" +
|
||||||
|
" testgenInvalid(t, input)\n" +
|
||||||
|
"}\n" +
|
||||||
|
"{{end}}\n" +
|
||||||
|
"\n" +
|
||||||
|
"{{range .Valid}}\n" +
|
||||||
|
"func TestValid{{.Name}}(t *testing.T) {\n" +
|
||||||
|
" input := {{.Input|gostr}}\n" +
|
||||||
|
" jsonRef := {{.JsonRef|gostr}}\n" +
|
||||||
|
" testgenValid(t, input, jsonRef)\n" +
|
||||||
|
"}\n" +
|
||||||
|
"{{end}}\n"
|
||||||
|
|
||||||
|
func downloadTmpFile(url string) string {
|
||||||
|
log.Println("starting to download file from", url)
|
||||||
|
resp, err := http.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
tmpfile, err := ioutil.TempFile("", "toml-test-*.zip")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
defer tmpfile.Close()
|
||||||
|
|
||||||
|
copiedLen, err := io.Copy(tmpfile, resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
if resp.ContentLength > 0 && copiedLen != resp.ContentLength {
|
||||||
|
panic(fmt.Errorf("copied %d bytes, request body had %d", copiedLen, resp.ContentLength))
|
||||||
|
}
|
||||||
|
return tmpfile.Name()
|
||||||
|
}
|
||||||
|
|
||||||
|
func kebabToCamel(kebab string) string {
|
||||||
|
camel := ""
|
||||||
|
nextUpper := true
|
||||||
|
for _, c := range kebab {
|
||||||
|
if nextUpper {
|
||||||
|
camel += strings.ToUpper(string(c))
|
||||||
|
nextUpper = false
|
||||||
|
} else if c == '-' {
|
||||||
|
nextUpper = true
|
||||||
|
} else {
|
||||||
|
camel += string(c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return camel
|
||||||
|
}
|
||||||
|
|
||||||
|
func readFileFromZip(f *zip.File) string {
|
||||||
|
reader, err := f.Open()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
defer reader.Close()
|
||||||
|
bytes, err := ioutil.ReadAll(reader)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return string(bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func templateGoStr(input string) string {
|
||||||
|
if len(input) > 0 && input[len(input)-1] == '\n' {
|
||||||
|
input = input[0 : len(input)-1]
|
||||||
|
}
|
||||||
|
if strings.Contains(input, "`") {
|
||||||
|
lines := strings.Split(input, "\n")
|
||||||
|
for idx, line := range lines {
|
||||||
|
lines[idx] = strconv.Quote(line + "\n")
|
||||||
|
}
|
||||||
|
return strings.Join(lines, " + \n")
|
||||||
|
}
|
||||||
|
return "`" + input + "`"
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
ref = flag.String("r", "master", "git reference")
|
||||||
|
)
|
||||||
|
|
||||||
|
func usage() {
|
||||||
|
_, _ = fmt.Fprintf(os.Stderr, "usage: tomltestgen [flags]\n")
|
||||||
|
flag.PrintDefaults()
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
url := "https://codeload.github.com/BurntSushi/toml-test/zip/" + *ref
|
||||||
|
resultFile := downloadTmpFile(url)
|
||||||
|
defer os.Remove(resultFile)
|
||||||
|
log.Println("file written to", resultFile)
|
||||||
|
|
||||||
|
zipReader, err := zip.OpenReader(resultFile)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
defer zipReader.Close()
|
||||||
|
|
||||||
|
collection := testsCollection{
|
||||||
|
Ref: *ref,
|
||||||
|
Timestamp: time.Now().Format(time.RFC3339),
|
||||||
|
}
|
||||||
|
|
||||||
|
zipFilesMap := map[string]*zip.File{}
|
||||||
|
|
||||||
|
for _, f := range zipReader.File {
|
||||||
|
zipFilesMap[f.Name] = f
|
||||||
|
}
|
||||||
|
|
||||||
|
testFileRegexp := regexp.MustCompile(`([^/]+/tests/(valid|invalid)/(.+))\.(toml)`)
|
||||||
|
for _, f := range zipReader.File {
|
||||||
|
groups := testFileRegexp.FindStringSubmatch(f.Name)
|
||||||
|
if len(groups) > 0 {
|
||||||
|
name := kebabToCamel(groups[3])
|
||||||
|
testType := groups[2]
|
||||||
|
|
||||||
|
log.Printf("> [%s] %s\n", testType, name)
|
||||||
|
|
||||||
|
tomlContent := readFileFromZip(f)
|
||||||
|
|
||||||
|
switch testType {
|
||||||
|
case "invalid":
|
||||||
|
collection.Invalid = append(collection.Invalid, invalid{
|
||||||
|
Name: name,
|
||||||
|
Input: tomlContent,
|
||||||
|
})
|
||||||
|
collection.Count++
|
||||||
|
case "valid":
|
||||||
|
baseFilePath := groups[1]
|
||||||
|
jsonFilePath := baseFilePath + ".json"
|
||||||
|
jsonContent := readFileFromZip(zipFilesMap[jsonFilePath])
|
||||||
|
|
||||||
|
collection.Valid = append(collection.Valid, valid{
|
||||||
|
Name: name,
|
||||||
|
Input: tomlContent,
|
||||||
|
JsonRef: jsonContent,
|
||||||
|
})
|
||||||
|
collection.Count++
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("unknown test type: %s", testType))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("Collected %d tests from toml-test\n", collection.Count)
|
||||||
|
|
||||||
|
funcMap := template.FuncMap{
|
||||||
|
"gostr": templateGoStr,
|
||||||
|
}
|
||||||
|
t := template.Must(template.New("src").Funcs(funcMap).Parse(srcTemplate))
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
err = t.Execute(buf, collection)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
outputBytes, err := format.Source(buf.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
fmt.Println(string(outputBytes))
|
||||||
|
}
|
||||||
@@ -1,245 +1,23 @@
|
|||||||
// Package toml is a TOML markup language parser.
|
// Package toml is a TOML parser and manipulation library.
|
||||||
//
|
//
|
||||||
// This version supports the specification as described in
|
// This version supports the specification as described in
|
||||||
// https://github.com/toml-lang/toml/blob/master/versions/toml-v0.2.0.md
|
// https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md
|
||||||
//
|
//
|
||||||
// TOML Parsing
|
// Marshaling
|
||||||
//
|
//
|
||||||
// TOML data may be parsed in two ways: by file, or by string.
|
// Go-toml can marshal and unmarshal TOML documents from and to data
|
||||||
|
// structures.
|
||||||
//
|
//
|
||||||
// // load TOML data by filename
|
// TOML document as a tree
|
||||||
// tree, err := toml.LoadFile("filename.toml")
|
|
||||||
//
|
//
|
||||||
// // load TOML data stored in a string
|
// Go-toml can operate on a TOML document as a tree. Use one of the Load*
|
||||||
// tree, err := toml.Load(stringContainingTomlData)
|
// functions to parse TOML data and obtain a Tree instance, then one of its
|
||||||
|
// methods to manipulate the tree.
|
||||||
//
|
//
|
||||||
// Either way, the result is a TomlTree object that can be used to navigate the
|
// JSONPath-like queries
|
||||||
// structure and data within the original document.
|
|
||||||
//
|
//
|
||||||
//
|
// The package github.com/pelletier/go-toml/query implements a system
|
||||||
// Getting data from the TomlTree
|
// similar to JSONPath to quickly retrieve elements of a TOML document using a
|
||||||
//
|
// single expression. See the package documentation for more information.
|
||||||
// After parsing TOML data with Load() or LoadFile(), use the Has() and Get()
|
|
||||||
// methods on the returned TomlTree, to find your way through the document data.
|
|
||||||
//
|
|
||||||
// if tree.Has('foo') {
|
|
||||||
// fmt.Prinln("foo is: %v", tree.Get('foo'))
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Working with Paths
|
|
||||||
//
|
|
||||||
// Go-toml has support for basic dot-separated key paths on the Has(), Get(), Set()
|
|
||||||
// and GetDefault() methods. These are the same kind of key paths used within the
|
|
||||||
// TOML specification for struct tames.
|
|
||||||
//
|
|
||||||
// // looks for a key named 'baz', within struct 'bar', within struct 'foo'
|
|
||||||
// tree.Has("foo.bar.baz")
|
|
||||||
//
|
|
||||||
// // returns the key at this path, if it is there
|
|
||||||
// tree.Get("foo.bar.baz")
|
|
||||||
//
|
|
||||||
// TOML allows keys to contain '.', which can cause this syntax to be problematic
|
|
||||||
// for some documents. In such cases, use the GetPath(), HasPath(), and SetPath(),
|
|
||||||
// methods to explicitly define the path. This form is also faster, since
|
|
||||||
// it avoids having to parse the passed key for '.' delimiters.
|
|
||||||
//
|
|
||||||
// // looks for a key named 'baz', within struct 'bar', within struct 'foo'
|
|
||||||
// tree.HasPath(string{}{"foo","bar","baz"})
|
|
||||||
//
|
|
||||||
// // returns the key at this path, if it is there
|
|
||||||
// tree.GetPath(string{}{"foo","bar","baz"})
|
|
||||||
//
|
|
||||||
// Note that this is distinct from the heavyweight query syntax supported by
|
|
||||||
// TomlTree.Query() and the Query() struct (see below).
|
|
||||||
//
|
|
||||||
// Position Support
|
|
||||||
//
|
|
||||||
// Each element within the TomlTree is stored with position metadata, which is
|
|
||||||
// invaluable for providing semantic feedback to a user. This helps in
|
|
||||||
// situations where the TOML file parses correctly, but contains data that is
|
|
||||||
// not correct for the application. In such cases, an error message can be
|
|
||||||
// generated that indicates the problem line and column number in the source
|
|
||||||
// TOML document.
|
|
||||||
//
|
|
||||||
// // load TOML data
|
|
||||||
// tree, _ := toml.Load("filename.toml")
|
|
||||||
//
|
|
||||||
// // get an entry and report an error if it's the wrong type
|
|
||||||
// element := tree.Get("foo")
|
|
||||||
// if value, ok := element.(int64); !ok {
|
|
||||||
// return fmt.Errorf("%v: Element 'foo' must be an integer", tree.GetPosition("foo"))
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// // report an error if an expected element is missing
|
|
||||||
// if !tree.Has("bar") {
|
|
||||||
// return fmt.Errorf("%v: Expected 'bar' element", tree.GetPosition(""))
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Query Support
|
|
||||||
//
|
|
||||||
// The TOML query path implementation is based loosely on the JSONPath specification:
|
|
||||||
// http://goessner.net/articles/JsonPath/
|
|
||||||
//
|
|
||||||
// The idea behind a query path is to allow quick access to any element, or set
|
|
||||||
// of elements within TOML document, with a single expression.
|
|
||||||
//
|
|
||||||
// result := tree.Query("$.foo.bar.baz") // result is 'nil' if the path is not present
|
|
||||||
//
|
|
||||||
// This is equivalent to:
|
|
||||||
//
|
|
||||||
// next := tree.Get("foo")
|
|
||||||
// if next != nil {
|
|
||||||
// next = next.Get("bar")
|
|
||||||
// if next != nil {
|
|
||||||
// next = next.Get("baz")
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// result := next
|
|
||||||
//
|
|
||||||
// As illustrated above, the query path is much more efficient, especially since
|
|
||||||
// the structure of the TOML file can vary. Rather than making assumptions about
|
|
||||||
// a document's structure, a query allows the programmer to make structured
|
|
||||||
// requests into the document, and get zero or more values as a result.
|
|
||||||
//
|
|
||||||
// The syntax of a query begins with a root token, followed by any number
|
|
||||||
// sub-expressions:
|
|
||||||
//
|
|
||||||
// $
|
|
||||||
// Root of the TOML tree. This must always come first.
|
|
||||||
// .name
|
|
||||||
// Selects child of this node, where 'name' is a TOML key
|
|
||||||
// name.
|
|
||||||
// ['name']
|
|
||||||
// Selects child of this node, where 'name' is a string
|
|
||||||
// containing a TOML key name.
|
|
||||||
// [index]
|
|
||||||
// Selcts child array element at 'index'.
|
|
||||||
// ..expr
|
|
||||||
// Recursively selects all children, filtered by an a union,
|
|
||||||
// index, or slice expression.
|
|
||||||
// ..*
|
|
||||||
// Recursive selection of all nodes at this point in the
|
|
||||||
// tree.
|
|
||||||
// .*
|
|
||||||
// Selects all children of the current node.
|
|
||||||
// [expr,expr]
|
|
||||||
// Union operator - a logical 'or' grouping of two or more
|
|
||||||
// sub-expressions: index, key name, or filter.
|
|
||||||
// [start:end:step]
|
|
||||||
// Slice operator - selects array elements from start to
|
|
||||||
// end-1, at the given step. All three arguments are
|
|
||||||
// optional.
|
|
||||||
// [?(filter)]
|
|
||||||
// Named filter expression - the function 'filter' is
|
|
||||||
// used to filter children at this node.
|
|
||||||
//
|
|
||||||
// Query Indexes And Slices
|
|
||||||
//
|
|
||||||
// Index expressions perform no bounds checking, and will contribute no
|
|
||||||
// values to the result set if the provided index or index range is invalid.
|
|
||||||
// Negative indexes represent values from the end of the array, counting backwards.
|
|
||||||
//
|
|
||||||
// // select the last index of the array named 'foo'
|
|
||||||
// tree.Query("$.foo[-1]")
|
|
||||||
//
|
|
||||||
// Slice expressions are supported, by using ':' to separate a start/end index pair.
|
|
||||||
//
|
|
||||||
// // select up to the first five elements in the array
|
|
||||||
// tree.Query("$.foo[0:5]")
|
|
||||||
//
|
|
||||||
// Slice expressions also allow negative indexes for the start and stop
|
|
||||||
// arguments.
|
|
||||||
//
|
|
||||||
// // select all array elements.
|
|
||||||
// tree.Query("$.foo[0:-1]")
|
|
||||||
//
|
|
||||||
// Slice expressions may have an optional stride/step parameter:
|
|
||||||
//
|
|
||||||
// // select every other element
|
|
||||||
// tree.Query("$.foo[0:-1:2]")
|
|
||||||
//
|
|
||||||
// Slice start and end parameters are also optional:
|
|
||||||
//
|
|
||||||
// // these are all equivalent and select all the values in the array
|
|
||||||
// tree.Query("$.foo[:]")
|
|
||||||
// tree.Query("$.foo[0:]")
|
|
||||||
// tree.Query("$.foo[:-1]")
|
|
||||||
// tree.Query("$.foo[0:-1:]")
|
|
||||||
// tree.Query("$.foo[::1]")
|
|
||||||
// tree.Query("$.foo[0::1]")
|
|
||||||
// tree.Query("$.foo[:-1:1]")
|
|
||||||
// tree.Query("$.foo[0:-1:1]")
|
|
||||||
//
|
|
||||||
// Query Filters
|
|
||||||
//
|
|
||||||
// Query filters are used within a Union [,] or single Filter [] expression.
|
|
||||||
// A filter only allows nodes that qualify through to the next expression,
|
|
||||||
// and/or into the result set.
|
|
||||||
//
|
|
||||||
// // returns children of foo that are permitted by the 'bar' filter.
|
|
||||||
// tree.Query("$.foo[?(bar)]")
|
|
||||||
//
|
|
||||||
// There are several filters provided with the library:
|
|
||||||
//
|
|
||||||
// tree
|
|
||||||
// Allows nodes of type TomlTree.
|
|
||||||
// int
|
|
||||||
// Allows nodes of type int64.
|
|
||||||
// float
|
|
||||||
// Allows nodes of type float64.
|
|
||||||
// string
|
|
||||||
// Allows nodes of type string.
|
|
||||||
// time
|
|
||||||
// Allows nodes of type time.Time.
|
|
||||||
// bool
|
|
||||||
// Allows nodes of type bool.
|
|
||||||
//
|
|
||||||
// Query Results
|
|
||||||
//
|
|
||||||
// An executed query returns a QueryResult object. This contains the nodes
|
|
||||||
// in the TOML tree that qualify the query expression. Position information
|
|
||||||
// is also available for each value in the set.
|
|
||||||
//
|
|
||||||
// // display the results of a query
|
|
||||||
// results := tree.Query("$.foo.bar.baz")
|
|
||||||
// for idx, value := results.Values() {
|
|
||||||
// fmt.Println("%v: %v", results.Positions()[idx], value)
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Compiled Queries
|
|
||||||
//
|
|
||||||
// Queries may be executed directly on a TomlTree object, or compiled ahead
|
|
||||||
// of time and executed discretely. The former is more convienent, but has the
|
|
||||||
// penalty of having to recompile the query expression each time.
|
|
||||||
//
|
|
||||||
// // basic query
|
|
||||||
// results := tree.Query("$.foo.bar.baz")
|
|
||||||
//
|
|
||||||
// // compiled query
|
|
||||||
// query := toml.CompileQuery("$.foo.bar.baz")
|
|
||||||
// results := query.Execute(tree)
|
|
||||||
//
|
|
||||||
// // run the compiled query again on a different tree
|
|
||||||
// moreResults := query.Execute(anotherTree)
|
|
||||||
//
|
|
||||||
// User Defined Query Filters
|
|
||||||
//
|
|
||||||
// Filter expressions may also be user defined by using the SetFilter()
|
|
||||||
// function on the Query object. The function must return true/false, which
|
|
||||||
// signifies if the passed node is kept or discarded, respectively.
|
|
||||||
//
|
|
||||||
// // create a query that references a user-defined filter
|
|
||||||
// query, _ := CompileQuery("$[?(bazOnly)]")
|
|
||||||
//
|
|
||||||
// // define the filter, and assign it to the query
|
|
||||||
// query.SetFilter("bazOnly", func(node interface{}) bool{
|
|
||||||
// if tree, ok := node.(*TomlTree); ok {
|
|
||||||
// return tree.Has("baz")
|
|
||||||
// }
|
|
||||||
// return false // reject all other node types
|
|
||||||
// })
|
|
||||||
//
|
|
||||||
// // run the query
|
|
||||||
// query.Execute(tree)
|
|
||||||
//
|
//
|
||||||
package toml
|
package toml
|
||||||
|
|||||||
+88
-63
@@ -1,81 +1,106 @@
|
|||||||
// code examples for godoc
|
// code examples for godoc
|
||||||
|
|
||||||
package toml
|
package toml_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
toml "github.com/pelletier/go-toml"
|
||||||
)
|
)
|
||||||
|
|
||||||
func ExampleNodeFilterFn_filterExample() {
|
func Example_tree() {
|
||||||
tree, _ := Load(`
|
config, err := toml.LoadFile("config.toml")
|
||||||
[struct_one]
|
|
||||||
foo = "foo"
|
|
||||||
bar = "bar"
|
|
||||||
|
|
||||||
[struct_two]
|
|
||||||
baz = "baz"
|
|
||||||
gorf = "gorf"
|
|
||||||
`)
|
|
||||||
|
|
||||||
// create a query that references a user-defined-filter
|
|
||||||
query, _ := CompileQuery("$[?(bazOnly)]")
|
|
||||||
|
|
||||||
// define the filter, and assign it to the query
|
|
||||||
query.SetFilter("bazOnly", func(node interface{}) bool {
|
|
||||||
if tree, ok := node.(*TomlTree); ok {
|
|
||||||
return tree.Has("baz")
|
|
||||||
}
|
|
||||||
return false // reject all other node types
|
|
||||||
})
|
|
||||||
|
|
||||||
// results contain only the 'struct_two' TomlTree
|
|
||||||
query.Execute(tree)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExampleQuery_queryExample() {
|
|
||||||
config, _ := Load(`
|
|
||||||
[[book]]
|
|
||||||
title = "The Stand"
|
|
||||||
author = "Stephen King"
|
|
||||||
[[book]]
|
|
||||||
title = "For Whom the Bell Tolls"
|
|
||||||
author = "Ernest Hemmingway"
|
|
||||||
[[book]]
|
|
||||||
title = "Neuromancer"
|
|
||||||
author = "William Gibson"
|
|
||||||
`)
|
|
||||||
|
|
||||||
// find and print all the authors in the document
|
|
||||||
authors, _ := config.Query("$.book.author")
|
|
||||||
for _, name := range authors.Values() {
|
|
||||||
fmt.Println(name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Example_comprehensiveExample() {
|
|
||||||
config, err := LoadFile("config.toml")
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Println("Error ", err.Error())
|
fmt.Println("Error ", err.Error())
|
||||||
} else {
|
} else {
|
||||||
// retrieve data directly
|
// retrieve data directly
|
||||||
user := config.Get("postgres.user").(string)
|
directUser := config.Get("postgres.user").(string)
|
||||||
password := config.Get("postgres.password").(string)
|
directPassword := config.Get("postgres.password").(string)
|
||||||
|
fmt.Println("User is", directUser, " and password is", directPassword)
|
||||||
|
|
||||||
// or using an intermediate object
|
// or using an intermediate object
|
||||||
configTree := config.Get("postgres").(*TomlTree)
|
configTree := config.Get("postgres").(*toml.Tree)
|
||||||
user = configTree.Get("user").(string)
|
user := configTree.Get("user").(string)
|
||||||
password = configTree.Get("password").(string)
|
password := configTree.Get("password").(string)
|
||||||
fmt.Println("User is ", user, ". Password is ", password)
|
fmt.Println("User is", user, " and password is", password)
|
||||||
|
|
||||||
// show where elements are in the file
|
// show where elements are in the file
|
||||||
fmt.Println("User position: %v", configTree.GetPosition("user"))
|
fmt.Printf("User position: %v\n", configTree.GetPosition("user"))
|
||||||
fmt.Println("Password position: %v", configTree.GetPosition("password"))
|
fmt.Printf("Password position: %v\n", configTree.GetPosition("password"))
|
||||||
|
|
||||||
// use a query to gather elements without walking the tree
|
|
||||||
results, _ := config.Query("$..[user,password]")
|
|
||||||
for ii, item := range results.Values() {
|
|
||||||
fmt.Println("Query result %d: %v", ii, item)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Example_unmarshal() {
|
||||||
|
type Employer struct {
|
||||||
|
Name string
|
||||||
|
Phone string
|
||||||
|
}
|
||||||
|
type Person struct {
|
||||||
|
Name string
|
||||||
|
Age int64
|
||||||
|
Employer Employer
|
||||||
|
}
|
||||||
|
|
||||||
|
document := []byte(`
|
||||||
|
name = "John"
|
||||||
|
age = 30
|
||||||
|
[employer]
|
||||||
|
name = "Company Inc."
|
||||||
|
phone = "+1 234 567 89012"
|
||||||
|
`)
|
||||||
|
|
||||||
|
person := Person{}
|
||||||
|
toml.Unmarshal(document, &person)
|
||||||
|
fmt.Println(person.Name, "is", person.Age, "and works at", person.Employer.Name)
|
||||||
|
// Output:
|
||||||
|
// John is 30 and works at Company Inc.
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExampleMarshal() {
|
||||||
|
type Postgres struct {
|
||||||
|
User string `toml:"user"`
|
||||||
|
Password string `toml:"password"`
|
||||||
|
Database string `toml:"db" commented:"true" comment:"not used anymore"`
|
||||||
|
}
|
||||||
|
type Config struct {
|
||||||
|
Postgres Postgres `toml:"postgres" comment:"Postgres configuration"`
|
||||||
|
}
|
||||||
|
|
||||||
|
config := Config{Postgres{User: "pelletier", Password: "mypassword", Database: "old_database"}}
|
||||||
|
b, err := toml.Marshal(config)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
fmt.Println(string(b))
|
||||||
|
// Output:
|
||||||
|
// # Postgres configuration
|
||||||
|
// [postgres]
|
||||||
|
//
|
||||||
|
// # not used anymore
|
||||||
|
// # db = "old_database"
|
||||||
|
// password = "mypassword"
|
||||||
|
// user = "pelletier"
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExampleUnmarshal() {
|
||||||
|
type Postgres struct {
|
||||||
|
User string
|
||||||
|
Password string
|
||||||
|
}
|
||||||
|
type Config struct {
|
||||||
|
Postgres Postgres
|
||||||
|
}
|
||||||
|
|
||||||
|
doc := []byte(`
|
||||||
|
[postgres]
|
||||||
|
user = "pelletier"
|
||||||
|
password = "mypassword"`)
|
||||||
|
|
||||||
|
config := Config{}
|
||||||
|
toml.Unmarshal(doc, &config)
|
||||||
|
fmt.Println("user=", config.Postgres.User)
|
||||||
|
// Output:
|
||||||
|
// user= pelletier
|
||||||
|
}
|
||||||
|
|||||||
@@ -0,0 +1,29 @@
|
|||||||
|
# This is a TOML document. Boom.
|
||||||
|
|
||||||
|
title = "TOML Example"
|
||||||
|
|
||||||
|
[owner]
|
||||||
|
name = "Tom Preston-Werner"
|
||||||
|
organization = "GitHub"
|
||||||
|
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
|
||||||
|
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
|
||||||
|
|
||||||
|
[database]
|
||||||
|
server = "192.168.1.1"
|
||||||
|
ports = [ 8001, 8001, 8002 ]
|
||||||
|
connection_max = 5000
|
||||||
|
enabled = true
|
||||||
|
|
||||||
|
[servers]
|
||||||
|
|
||||||
|
# You can indent as you please. Tabs or spaces. TOML don't care.
|
||||||
|
[servers.alpha]
|
||||||
|
ip = "10.0.0.1"
|
||||||
|
dc = "eqdc10"
|
||||||
|
|
||||||
|
[servers.beta]
|
||||||
|
ip = "10.0.0.2"
|
||||||
|
dc = "eqdc10"
|
||||||
|
|
||||||
|
[clients]
|
||||||
|
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||||
@@ -0,0 +1,31 @@
|
|||||||
|
// +build gofuzz
|
||||||
|
|
||||||
|
package toml
|
||||||
|
|
||||||
|
func Fuzz(data []byte) int {
|
||||||
|
tree, err := LoadBytes(data)
|
||||||
|
if err != nil {
|
||||||
|
if tree != nil {
|
||||||
|
panic("tree must be nil if there is an error")
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
str, err := tree.ToTomlString()
|
||||||
|
if err != nil {
|
||||||
|
if str != "" {
|
||||||
|
panic(`str must be "" if there is an error`)
|
||||||
|
}
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
tree, err = Load(str)
|
||||||
|
if err != nil {
|
||||||
|
if tree != nil {
|
||||||
|
panic("tree must be nil if there is an error")
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
#! /bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
go get github.com/dvyukov/go-fuzz/go-fuzz
|
||||||
|
go get github.com/dvyukov/go-fuzz/go-fuzz-build
|
||||||
|
|
||||||
|
if [ ! -e toml-fuzz.zip ]; then
|
||||||
|
go-fuzz-build github.com/pelletier/go-toml
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -fr fuzz
|
||||||
|
mkdir -p fuzz/corpus
|
||||||
|
cp *.toml fuzz/corpus
|
||||||
|
|
||||||
|
go-fuzz -bin=toml-fuzz.zip -workdir=fuzz
|
||||||
@@ -0,0 +1,26 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -xe
|
||||||
|
|
||||||
|
# go-fuzz doesn't support modules yet, so ensure we do everything
|
||||||
|
# in the old style GOPATH way
|
||||||
|
export GO111MODULE="off"
|
||||||
|
|
||||||
|
# install go-fuzz
|
||||||
|
go get -u github.com/dvyukov/go-fuzz/go-fuzz github.com/dvyukov/go-fuzz/go-fuzz-build
|
||||||
|
|
||||||
|
# target name can only contain lower-case letters (a-z), digits (0-9) and a dash (-)
|
||||||
|
# to add another target, make sure to create it with `fuzzit create target`
|
||||||
|
# before using `fuzzit create job`
|
||||||
|
TARGET=toml-fuzzer
|
||||||
|
|
||||||
|
go-fuzz-build -libfuzzer -o ${TARGET}.a github.com/pelletier/go-toml
|
||||||
|
clang -fsanitize=fuzzer ${TARGET}.a -o ${TARGET}
|
||||||
|
|
||||||
|
# install fuzzit for talking to fuzzit.dev service
|
||||||
|
# or latest version:
|
||||||
|
# https://github.com/fuzzitdev/fuzzit/releases/latest/download/fuzzit_Linux_x86_64
|
||||||
|
wget -q -O fuzzit https://github.com/fuzzitdev/fuzzit/releases/download/v2.4.52/fuzzit_Linux_x86_64
|
||||||
|
chmod a+x fuzzit
|
||||||
|
|
||||||
|
# TODO: change kkowalczyk to go-toml and create toml-fuzzer target there
|
||||||
|
./fuzzit create job --type $TYPE go-toml/${TARGET} ${TARGET}
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
module github.com/pelletier/go-toml
|
||||||
|
|
||||||
|
go 1.12
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/BurntSushi/toml v0.3.1
|
||||||
|
github.com/davecgh/go-spew v1.1.1
|
||||||
|
gopkg.in/yaml.v2 v2.2.4
|
||||||
|
)
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||||
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.3 h1:fvjTMHxHEw/mxHbtzPi3JCcKXQRAnQTBRo6YCJSVHKI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
|
||||||
|
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
+113
@@ -0,0 +1,113 @@
|
|||||||
|
// Parsing keys handling both bare and quoted keys.
|
||||||
|
|
||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Convert the bare key group string to an array.
|
||||||
|
// The input supports double quotation and single quotation,
|
||||||
|
// but escape sequences are not supported. Lexers must unescape them beforehand.
|
||||||
|
func parseKey(key string) ([]string, error) {
|
||||||
|
runes := []rune(key)
|
||||||
|
var groups []string
|
||||||
|
|
||||||
|
if len(key) == 0 {
|
||||||
|
return nil, errors.New("empty key")
|
||||||
|
}
|
||||||
|
|
||||||
|
idx := 0
|
||||||
|
for idx < len(runes) {
|
||||||
|
for ; idx < len(runes) && isSpace(runes[idx]); idx++ {
|
||||||
|
// skip leading whitespace
|
||||||
|
}
|
||||||
|
if idx >= len(runes) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
r := runes[idx]
|
||||||
|
if isValidBareChar(r) {
|
||||||
|
// parse bare key
|
||||||
|
startIdx := idx
|
||||||
|
endIdx := -1
|
||||||
|
idx++
|
||||||
|
for idx < len(runes) {
|
||||||
|
r = runes[idx]
|
||||||
|
if isValidBareChar(r) {
|
||||||
|
idx++
|
||||||
|
} else if r == '.' {
|
||||||
|
endIdx = idx
|
||||||
|
break
|
||||||
|
} else if isSpace(r) {
|
||||||
|
endIdx = idx
|
||||||
|
for ; idx < len(runes) && isSpace(runes[idx]); idx++ {
|
||||||
|
// skip trailing whitespace
|
||||||
|
}
|
||||||
|
if idx < len(runes) && runes[idx] != '.' {
|
||||||
|
return nil, fmt.Errorf("invalid key character after whitespace: %c", runes[idx])
|
||||||
|
}
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("invalid bare key character: %c", r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if endIdx == -1 {
|
||||||
|
endIdx = idx
|
||||||
|
}
|
||||||
|
groups = append(groups, string(runes[startIdx:endIdx]))
|
||||||
|
} else if r == '\'' {
|
||||||
|
// parse single quoted key
|
||||||
|
idx++
|
||||||
|
startIdx := idx
|
||||||
|
for {
|
||||||
|
if idx >= len(runes) {
|
||||||
|
return nil, fmt.Errorf("unclosed single-quoted key")
|
||||||
|
}
|
||||||
|
r = runes[idx]
|
||||||
|
if r == '\'' {
|
||||||
|
groups = append(groups, string(runes[startIdx:idx]))
|
||||||
|
idx++
|
||||||
|
break
|
||||||
|
}
|
||||||
|
idx++
|
||||||
|
}
|
||||||
|
} else if r == '"' {
|
||||||
|
// parse double quoted key
|
||||||
|
idx++
|
||||||
|
startIdx := idx
|
||||||
|
for {
|
||||||
|
if idx >= len(runes) {
|
||||||
|
return nil, fmt.Errorf("unclosed double-quoted key")
|
||||||
|
}
|
||||||
|
r = runes[idx]
|
||||||
|
if r == '"' {
|
||||||
|
groups = append(groups, string(runes[startIdx:idx]))
|
||||||
|
idx++
|
||||||
|
break
|
||||||
|
}
|
||||||
|
idx++
|
||||||
|
}
|
||||||
|
} else if r == '.' {
|
||||||
|
idx++
|
||||||
|
if idx >= len(runes) {
|
||||||
|
return nil, fmt.Errorf("unexpected end of key")
|
||||||
|
}
|
||||||
|
r = runes[idx]
|
||||||
|
if !isValidBareChar(r) && r != '\'' && r != '"' && r != ' ' {
|
||||||
|
return nil, fmt.Errorf("expecting key part after dot")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("invalid key character: %c", r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(groups) == 0 {
|
||||||
|
return nil, fmt.Errorf("empty key")
|
||||||
|
}
|
||||||
|
return groups, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func isValidBareChar(r rune) bool {
|
||||||
|
return isAlphanumeric(r) || r == '-' || unicode.IsNumber(r)
|
||||||
|
}
|
||||||
@@ -0,0 +1,79 @@
|
|||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func testResult(t *testing.T, key string, expected []string) {
|
||||||
|
parsed, err := parseKey(key)
|
||||||
|
t.Logf("key=%s expected=%s parsed=%s", key, expected, parsed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Unexpected error:", err)
|
||||||
|
}
|
||||||
|
if len(expected) != len(parsed) {
|
||||||
|
t.Fatal("Expected length", len(expected), "but", len(parsed), "parsed")
|
||||||
|
}
|
||||||
|
for index, expectedKey := range expected {
|
||||||
|
if expectedKey != parsed[index] {
|
||||||
|
t.Fatal("Expected", expectedKey, "at index", index, "but found", parsed[index])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func testError(t *testing.T, key string, expectedError string) {
|
||||||
|
res, err := parseKey(key)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("Expected error, but successfully parsed key %s", res)
|
||||||
|
}
|
||||||
|
if fmt.Sprintf("%s", err) != expectedError {
|
||||||
|
t.Fatalf("Expected error \"%s\", but got \"%s\".", expectedError, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBareKeyBasic(t *testing.T) {
|
||||||
|
testResult(t, "test", []string{"test"})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBareKeyDotted(t *testing.T) {
|
||||||
|
testResult(t, "this.is.a.key", []string{"this", "is", "a", "key"})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDottedKeyBasic(t *testing.T) {
|
||||||
|
testResult(t, "\"a.dotted.key\"", []string{"a.dotted.key"})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBaseKeyPound(t *testing.T) {
|
||||||
|
testError(t, "hello#world", "invalid bare key character: #")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUnclosedSingleQuotedKey(t *testing.T) {
|
||||||
|
testError(t, "'", "unclosed single-quoted key")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUnclosedDoubleQuotedKey(t *testing.T) {
|
||||||
|
testError(t, "\"", "unclosed double-quoted key")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidStartKeyCharacter(t *testing.T) {
|
||||||
|
testError(t, "/", "invalid key character: /")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidSpaceInKey(t *testing.T) {
|
||||||
|
testError(t, "invalid key", "invalid key character after whitespace: k")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuotedKeys(t *testing.T) {
|
||||||
|
testResult(t, `hello."foo".bar`, []string{"hello", "foo", "bar"})
|
||||||
|
testResult(t, `"hello!"`, []string{"hello!"})
|
||||||
|
testResult(t, `foo."ba.r".baz`, []string{"foo", "ba.r", "baz"})
|
||||||
|
|
||||||
|
// escape sequences must not be converted
|
||||||
|
testResult(t, `"hello\tworld"`, []string{`hello\tworld`})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEmptyKey(t *testing.T) {
|
||||||
|
testError(t, ``, "empty key")
|
||||||
|
testError(t, ` `, "empty key")
|
||||||
|
testResult(t, `""`, []string{""})
|
||||||
|
}
|
||||||
@@ -1,16 +1,17 @@
|
|||||||
// TOML lexer.
|
// TOML lexer.
|
||||||
//
|
//
|
||||||
// Written using the principles developped by Rob Pike in
|
// Written using the principles developed by Rob Pike in
|
||||||
// http://www.youtube.com/watch?v=HxaD_trXwRE
|
// http://www.youtube.com/watch?v=HxaD_trXwRE
|
||||||
|
|
||||||
package toml
|
package toml
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"unicode/utf8"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var dateRegexp *regexp.Regexp
|
var dateRegexp *regexp.Regexp
|
||||||
@@ -20,129 +21,135 @@ type tomlLexStateFn func() tomlLexStateFn
|
|||||||
|
|
||||||
// Define lexer
|
// Define lexer
|
||||||
type tomlLexer struct {
|
type tomlLexer struct {
|
||||||
input string
|
inputIdx int
|
||||||
start int
|
input []rune // Textual source
|
||||||
pos int
|
currentTokenStart int
|
||||||
width int
|
currentTokenStop int
|
||||||
tokens chan token
|
tokens []token
|
||||||
depth int
|
depth int
|
||||||
line int
|
line int
|
||||||
col int
|
col int
|
||||||
|
endbufferLine int
|
||||||
|
endbufferCol int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) run() {
|
// Basic read operations on input
|
||||||
for state := l.lexVoid; state != nil; {
|
|
||||||
state = state()
|
|
||||||
}
|
|
||||||
close(l.tokens)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *tomlLexer) nextStart() {
|
func (l *tomlLexer) read() rune {
|
||||||
// iterate by runes (utf8 characters)
|
r := l.peek()
|
||||||
// search for newlines and advance line/col counts
|
if r == '\n' {
|
||||||
for i := l.start; i < l.pos; {
|
l.endbufferLine++
|
||||||
r, width := utf8.DecodeRuneInString(l.input[i:])
|
l.endbufferCol = 1
|
||||||
if r == '\n' {
|
} else {
|
||||||
l.line++
|
l.endbufferCol++
|
||||||
l.col = 1
|
|
||||||
} else {
|
|
||||||
l.col++
|
|
||||||
}
|
|
||||||
i += width
|
|
||||||
}
|
}
|
||||||
// advance start position to next token
|
l.inputIdx++
|
||||||
l.start = l.pos
|
return r
|
||||||
}
|
|
||||||
|
|
||||||
func (l *tomlLexer) emit(t tokenType) {
|
|
||||||
l.tokens <- token{
|
|
||||||
Position: Position{l.line, l.col},
|
|
||||||
typ: t,
|
|
||||||
val: l.input[l.start:l.pos],
|
|
||||||
}
|
|
||||||
l.nextStart()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *tomlLexer) emitWithValue(t tokenType, value string) {
|
|
||||||
l.tokens <- token{
|
|
||||||
Position: Position{l.line, l.col},
|
|
||||||
typ: t,
|
|
||||||
val: value,
|
|
||||||
}
|
|
||||||
l.nextStart()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) next() rune {
|
func (l *tomlLexer) next() rune {
|
||||||
if l.pos >= len(l.input) {
|
r := l.read()
|
||||||
l.width = 0
|
|
||||||
return eof
|
if r != eof {
|
||||||
|
l.currentTokenStop++
|
||||||
}
|
}
|
||||||
var r rune
|
|
||||||
r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
|
|
||||||
l.pos += l.width
|
|
||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) ignore() {
|
func (l *tomlLexer) ignore() {
|
||||||
l.nextStart()
|
l.currentTokenStart = l.currentTokenStop
|
||||||
|
l.line = l.endbufferLine
|
||||||
|
l.col = l.endbufferCol
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) backup() {
|
func (l *tomlLexer) skip() {
|
||||||
l.pos -= l.width
|
l.next()
|
||||||
|
l.ignore()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
|
func (l *tomlLexer) fastForward(n int) {
|
||||||
l.tokens <- token{
|
for i := 0; i < n; i++ {
|
||||||
Position: Position{l.line, l.col},
|
l.next()
|
||||||
typ: tokenError,
|
|
||||||
val: fmt.Sprintf(format, args...),
|
|
||||||
}
|
}
|
||||||
return nil
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) emitWithValue(t tokenType, value string) {
|
||||||
|
l.tokens = append(l.tokens, token{
|
||||||
|
Position: Position{l.line, l.col},
|
||||||
|
typ: t,
|
||||||
|
val: value,
|
||||||
|
})
|
||||||
|
l.ignore()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) emit(t tokenType) {
|
||||||
|
l.emitWithValue(t, string(l.input[l.currentTokenStart:l.currentTokenStop]))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) peek() rune {
|
func (l *tomlLexer) peek() rune {
|
||||||
r := l.next()
|
if l.inputIdx >= len(l.input) {
|
||||||
l.backup()
|
return eof
|
||||||
return r
|
}
|
||||||
|
return l.input[l.inputIdx]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) accept(valid string) bool {
|
func (l *tomlLexer) peekString(size int) string {
|
||||||
if strings.IndexRune(valid, l.next()) >= 0 {
|
maxIdx := len(l.input)
|
||||||
return true
|
upperIdx := l.inputIdx + size // FIXME: potential overflow
|
||||||
|
if upperIdx > maxIdx {
|
||||||
|
upperIdx = maxIdx
|
||||||
}
|
}
|
||||||
l.backup()
|
return string(l.input[l.inputIdx:upperIdx])
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) follow(next string) bool {
|
func (l *tomlLexer) follow(next string) bool {
|
||||||
return strings.HasPrefix(l.input[l.pos:], next)
|
return next == l.peekString(len(next))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Error management
|
||||||
|
|
||||||
|
func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
|
||||||
|
l.tokens = append(l.tokens, token{
|
||||||
|
Position: Position{l.line, l.col},
|
||||||
|
typ: tokenError,
|
||||||
|
val: fmt.Sprintf(format, args...),
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// State functions
|
||||||
|
|
||||||
func (l *tomlLexer) lexVoid() tomlLexStateFn {
|
func (l *tomlLexer) lexVoid() tomlLexStateFn {
|
||||||
for {
|
for {
|
||||||
next := l.peek()
|
next := l.peek()
|
||||||
switch next {
|
switch next {
|
||||||
case '[':
|
case '[':
|
||||||
return l.lexKeyGroup
|
return l.lexTableKey
|
||||||
case '#':
|
case '#':
|
||||||
return l.lexComment
|
return l.lexComment(l.lexVoid)
|
||||||
case '=':
|
case '=':
|
||||||
return l.lexEqual
|
return l.lexEqual
|
||||||
|
case '\r':
|
||||||
|
fallthrough
|
||||||
|
case '\n':
|
||||||
|
l.skip()
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if isSpace(next) {
|
if isSpace(next) {
|
||||||
l.ignore()
|
l.skip()
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.depth > 0 {
|
if l.depth > 0 {
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
if isKeyChar(next) {
|
if isKeyStartChar(next) {
|
||||||
return l.lexKey
|
return l.lexKey
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.next() == eof {
|
if next == eof {
|
||||||
|
l.next()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -158,26 +165,35 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||||||
case '.':
|
case '.':
|
||||||
return l.errorf("cannot start float with a dot")
|
return l.errorf("cannot start float with a dot")
|
||||||
case '=':
|
case '=':
|
||||||
return l.errorf("cannot have multiple equals for the same key")
|
return l.lexEqual
|
||||||
case '[':
|
case '[':
|
||||||
l.depth++
|
l.depth++
|
||||||
return l.lexLeftBracket
|
return l.lexLeftBracket
|
||||||
case ']':
|
case ']':
|
||||||
l.depth--
|
l.depth--
|
||||||
return l.lexRightBracket
|
return l.lexRightBracket
|
||||||
|
case '{':
|
||||||
|
return l.lexLeftCurlyBrace
|
||||||
|
case '}':
|
||||||
|
return l.lexRightCurlyBrace
|
||||||
case '#':
|
case '#':
|
||||||
return l.lexComment
|
return l.lexComment(l.lexRvalue)
|
||||||
case '"':
|
case '"':
|
||||||
return l.lexString
|
return l.lexString
|
||||||
|
case '\'':
|
||||||
|
return l.lexLiteralString
|
||||||
case ',':
|
case ',':
|
||||||
return l.lexComma
|
return l.lexComma
|
||||||
|
case '\r':
|
||||||
|
fallthrough
|
||||||
case '\n':
|
case '\n':
|
||||||
l.ignore()
|
l.skip()
|
||||||
l.pos++
|
|
||||||
if l.depth == 0 {
|
if l.depth == 0 {
|
||||||
return l.lexVoid
|
return l.lexVoid
|
||||||
}
|
}
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
|
case '_':
|
||||||
|
return l.errorf("cannot start number with underscore")
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.follow("true") {
|
if l.follow("true") {
|
||||||
@@ -188,11 +204,28 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||||||
return l.lexFalse
|
return l.lexFalse
|
||||||
}
|
}
|
||||||
|
|
||||||
if isAlphanumeric(next) {
|
if l.follow("inf") {
|
||||||
return l.lexKey
|
return l.lexInf
|
||||||
}
|
}
|
||||||
|
|
||||||
if dateRegexp.FindString(l.input[l.pos:]) != "" {
|
if l.follow("nan") {
|
||||||
|
return l.lexNan
|
||||||
|
}
|
||||||
|
|
||||||
|
if isSpace(next) {
|
||||||
|
l.skip()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if next == eof {
|
||||||
|
l.next()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
possibleDate := l.peekString(35)
|
||||||
|
dateMatch := dateRegexp.FindString(possibleDate)
|
||||||
|
if dateMatch != "" {
|
||||||
|
l.fastForward(len(dateMatch))
|
||||||
return l.lexDate
|
return l.lexDate
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -200,239 +233,481 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||||||
return l.lexNumber
|
return l.lexNumber
|
||||||
}
|
}
|
||||||
|
|
||||||
if isSpace(next) {
|
if isAlphanumeric(next) {
|
||||||
l.ignore()
|
return l.lexKey
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.next() == eof {
|
return l.errorf("no value can start with %c", next)
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
l.emit(tokenEOF)
|
l.emit(tokenEOF)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) lexLeftCurlyBrace() tomlLexStateFn {
|
||||||
|
l.next()
|
||||||
|
l.emit(tokenLeftCurlyBrace)
|
||||||
|
return l.lexRvalue
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) lexRightCurlyBrace() tomlLexStateFn {
|
||||||
|
l.next()
|
||||||
|
l.emit(tokenRightCurlyBrace)
|
||||||
|
return l.lexRvalue
|
||||||
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexDate() tomlLexStateFn {
|
func (l *tomlLexer) lexDate() tomlLexStateFn {
|
||||||
l.ignore()
|
|
||||||
l.pos += 20 // Fixed size of a date in TOML
|
|
||||||
l.emit(tokenDate)
|
l.emit(tokenDate)
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexTrue() tomlLexStateFn {
|
func (l *tomlLexer) lexTrue() tomlLexStateFn {
|
||||||
l.ignore()
|
l.fastForward(4)
|
||||||
l.pos += 4
|
|
||||||
l.emit(tokenTrue)
|
l.emit(tokenTrue)
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexFalse() tomlLexStateFn {
|
func (l *tomlLexer) lexFalse() tomlLexStateFn {
|
||||||
l.ignore()
|
l.fastForward(5)
|
||||||
l.pos += 5
|
|
||||||
l.emit(tokenFalse)
|
l.emit(tokenFalse)
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) lexInf() tomlLexStateFn {
|
||||||
|
l.fastForward(3)
|
||||||
|
l.emit(tokenInf)
|
||||||
|
return l.lexRvalue
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) lexNan() tomlLexStateFn {
|
||||||
|
l.fastForward(3)
|
||||||
|
l.emit(tokenNan)
|
||||||
|
return l.lexRvalue
|
||||||
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexEqual() tomlLexStateFn {
|
func (l *tomlLexer) lexEqual() tomlLexStateFn {
|
||||||
l.ignore()
|
l.next()
|
||||||
l.accept("=")
|
|
||||||
l.emit(tokenEqual)
|
l.emit(tokenEqual)
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexComma() tomlLexStateFn {
|
func (l *tomlLexer) lexComma() tomlLexStateFn {
|
||||||
l.ignore()
|
l.next()
|
||||||
l.accept(",")
|
|
||||||
l.emit(tokenComma)
|
l.emit(tokenComma)
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Parse the key and emits its value without escape sequences.
|
||||||
|
// bare keys, basic string keys and literal string keys are supported.
|
||||||
func (l *tomlLexer) lexKey() tomlLexStateFn {
|
func (l *tomlLexer) lexKey() tomlLexStateFn {
|
||||||
l.ignore()
|
growingString := ""
|
||||||
for isKeyChar(l.next()) {
|
|
||||||
|
for r := l.peek(); isKeyChar(r) || r == '\n' || r == '\r'; r = l.peek() {
|
||||||
|
if r == '"' {
|
||||||
|
l.next()
|
||||||
|
str, err := l.lexStringAsString(`"`, false, true)
|
||||||
|
if err != nil {
|
||||||
|
return l.errorf(err.Error())
|
||||||
|
}
|
||||||
|
growingString += "\"" + str + "\""
|
||||||
|
l.next()
|
||||||
|
continue
|
||||||
|
} else if r == '\'' {
|
||||||
|
l.next()
|
||||||
|
str, err := l.lexLiteralStringAsString(`'`, false)
|
||||||
|
if err != nil {
|
||||||
|
return l.errorf(err.Error())
|
||||||
|
}
|
||||||
|
growingString += "'" + str + "'"
|
||||||
|
l.next()
|
||||||
|
continue
|
||||||
|
} else if r == '\n' {
|
||||||
|
return l.errorf("keys cannot contain new lines")
|
||||||
|
} else if isSpace(r) {
|
||||||
|
break
|
||||||
|
} else if r == '.' {
|
||||||
|
// skip
|
||||||
|
} else if !isValidBareChar(r) {
|
||||||
|
return l.errorf("keys cannot contain %c character", r)
|
||||||
|
}
|
||||||
|
growingString += string(r)
|
||||||
|
l.next()
|
||||||
}
|
}
|
||||||
l.backup()
|
l.emitWithValue(tokenKey, growingString)
|
||||||
l.emit(tokenKey)
|
|
||||||
return l.lexVoid
|
return l.lexVoid
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexComment() tomlLexStateFn {
|
func (l *tomlLexer) lexComment(previousState tomlLexStateFn) tomlLexStateFn {
|
||||||
for {
|
return func() tomlLexStateFn {
|
||||||
next := l.next()
|
for next := l.peek(); next != '\n' && next != eof; next = l.peek() {
|
||||||
if next == '\n' || next == eof {
|
if next == '\r' && l.follow("\r\n") {
|
||||||
break
|
break
|
||||||
|
}
|
||||||
|
l.next()
|
||||||
}
|
}
|
||||||
|
l.ignore()
|
||||||
|
return previousState
|
||||||
}
|
}
|
||||||
l.ignore()
|
|
||||||
return l.lexVoid
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
|
func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
|
||||||
l.ignore()
|
l.next()
|
||||||
l.pos++
|
|
||||||
l.emit(tokenLeftBracket)
|
l.emit(tokenLeftBracket)
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexString() tomlLexStateFn {
|
func (l *tomlLexer) lexLiteralStringAsString(terminator string, discardLeadingNewLine bool) (string, error) {
|
||||||
l.pos++
|
|
||||||
l.ignore()
|
|
||||||
growingString := ""
|
growingString := ""
|
||||||
|
|
||||||
|
if discardLeadingNewLine {
|
||||||
|
if l.follow("\r\n") {
|
||||||
|
l.skip()
|
||||||
|
l.skip()
|
||||||
|
} else if l.peek() == '\n' {
|
||||||
|
l.skip()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// find end of string
|
||||||
for {
|
for {
|
||||||
if l.peek() == '"' {
|
if l.follow(terminator) {
|
||||||
l.emitWithValue(tokenString, growingString)
|
return growingString, nil
|
||||||
l.pos++
|
|
||||||
l.ignore()
|
|
||||||
return l.lexRvalue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.follow("\\\"") {
|
next := l.peek()
|
||||||
l.pos++
|
if next == eof {
|
||||||
growingString += "\""
|
break
|
||||||
} else if l.follow("\\n") {
|
}
|
||||||
l.pos++
|
growingString += string(l.next())
|
||||||
growingString += "\n"
|
}
|
||||||
} else if l.follow("\\b") {
|
|
||||||
l.pos++
|
return "", errors.New("unclosed string")
|
||||||
growingString += "\b"
|
}
|
||||||
} else if l.follow("\\f") {
|
|
||||||
l.pos++
|
func (l *tomlLexer) lexLiteralString() tomlLexStateFn {
|
||||||
growingString += "\f"
|
l.skip()
|
||||||
} else if l.follow("\\/") {
|
|
||||||
l.pos++
|
// handle special case for triple-quote
|
||||||
growingString += "/"
|
terminator := "'"
|
||||||
} else if l.follow("\\t") {
|
discardLeadingNewLine := false
|
||||||
l.pos++
|
if l.follow("''") {
|
||||||
growingString += "\t"
|
l.skip()
|
||||||
} else if l.follow("\\r") {
|
l.skip()
|
||||||
l.pos++
|
terminator = "'''"
|
||||||
growingString += "\r"
|
discardLeadingNewLine = true
|
||||||
} else if l.follow("\\\\") {
|
}
|
||||||
l.pos++
|
|
||||||
growingString += "\\"
|
str, err := l.lexLiteralStringAsString(terminator, discardLeadingNewLine)
|
||||||
} else if l.follow("\\u") {
|
if err != nil {
|
||||||
l.pos += 2
|
return l.errorf(err.Error())
|
||||||
code := ""
|
}
|
||||||
for i := 0; i < 4; i++ {
|
|
||||||
c := l.peek()
|
l.emitWithValue(tokenString, str)
|
||||||
l.pos++
|
l.fastForward(len(terminator))
|
||||||
if !isHexDigit(c) {
|
l.ignore()
|
||||||
return l.errorf("unfinished unicode escape")
|
return l.lexRvalue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lex a string and return the results as a string.
|
||||||
|
// Terminator is the substring indicating the end of the token.
|
||||||
|
// The resulting string does not include the terminator.
|
||||||
|
func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine, acceptNewLines bool) (string, error) {
|
||||||
|
growingString := ""
|
||||||
|
|
||||||
|
if discardLeadingNewLine {
|
||||||
|
if l.follow("\r\n") {
|
||||||
|
l.skip()
|
||||||
|
l.skip()
|
||||||
|
} else if l.peek() == '\n' {
|
||||||
|
l.skip()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
if l.follow(terminator) {
|
||||||
|
return growingString, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if l.follow("\\") {
|
||||||
|
l.next()
|
||||||
|
switch l.peek() {
|
||||||
|
case '\r':
|
||||||
|
fallthrough
|
||||||
|
case '\n':
|
||||||
|
fallthrough
|
||||||
|
case '\t':
|
||||||
|
fallthrough
|
||||||
|
case ' ':
|
||||||
|
// skip all whitespace chars following backslash
|
||||||
|
for strings.ContainsRune("\r\n\t ", l.peek()) {
|
||||||
|
l.next()
|
||||||
}
|
}
|
||||||
code = code + string(c)
|
case '"':
|
||||||
|
growingString += "\""
|
||||||
|
l.next()
|
||||||
|
case 'n':
|
||||||
|
growingString += "\n"
|
||||||
|
l.next()
|
||||||
|
case 'b':
|
||||||
|
growingString += "\b"
|
||||||
|
l.next()
|
||||||
|
case 'f':
|
||||||
|
growingString += "\f"
|
||||||
|
l.next()
|
||||||
|
case '/':
|
||||||
|
growingString += "/"
|
||||||
|
l.next()
|
||||||
|
case 't':
|
||||||
|
growingString += "\t"
|
||||||
|
l.next()
|
||||||
|
case 'r':
|
||||||
|
growingString += "\r"
|
||||||
|
l.next()
|
||||||
|
case '\\':
|
||||||
|
growingString += "\\"
|
||||||
|
l.next()
|
||||||
|
case 'u':
|
||||||
|
l.next()
|
||||||
|
code := ""
|
||||||
|
for i := 0; i < 4; i++ {
|
||||||
|
c := l.peek()
|
||||||
|
if !isHexDigit(c) {
|
||||||
|
return "", errors.New("unfinished unicode escape")
|
||||||
|
}
|
||||||
|
l.next()
|
||||||
|
code = code + string(c)
|
||||||
|
}
|
||||||
|
intcode, err := strconv.ParseInt(code, 16, 32)
|
||||||
|
if err != nil {
|
||||||
|
return "", errors.New("invalid unicode escape: \\u" + code)
|
||||||
|
}
|
||||||
|
growingString += string(rune(intcode))
|
||||||
|
case 'U':
|
||||||
|
l.next()
|
||||||
|
code := ""
|
||||||
|
for i := 0; i < 8; i++ {
|
||||||
|
c := l.peek()
|
||||||
|
if !isHexDigit(c) {
|
||||||
|
return "", errors.New("unfinished unicode escape")
|
||||||
|
}
|
||||||
|
l.next()
|
||||||
|
code = code + string(c)
|
||||||
|
}
|
||||||
|
intcode, err := strconv.ParseInt(code, 16, 64)
|
||||||
|
if err != nil {
|
||||||
|
return "", errors.New("invalid unicode escape: \\U" + code)
|
||||||
|
}
|
||||||
|
growingString += string(rune(intcode))
|
||||||
|
default:
|
||||||
|
return "", errors.New("invalid escape sequence: \\" + string(l.peek()))
|
||||||
}
|
}
|
||||||
l.pos--
|
|
||||||
intcode, err := strconv.ParseInt(code, 16, 32)
|
|
||||||
if err != nil {
|
|
||||||
return l.errorf("invalid unicode escape: \\u" + code)
|
|
||||||
}
|
|
||||||
growingString += string(rune(intcode))
|
|
||||||
} else if l.follow("\\") {
|
|
||||||
l.pos++
|
|
||||||
return l.errorf("invalid escape sequence: \\" + string(l.peek()))
|
|
||||||
} else {
|
} else {
|
||||||
growingString += string(l.peek())
|
r := l.peek()
|
||||||
|
|
||||||
|
if 0x00 <= r && r <= 0x1F && !(acceptNewLines && (r == '\n' || r == '\r')) {
|
||||||
|
return "", fmt.Errorf("unescaped control character %U", r)
|
||||||
|
}
|
||||||
|
l.next()
|
||||||
|
growingString += string(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.next() == eof {
|
if l.peek() == eof {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return l.errorf("unclosed string")
|
return "", errors.New("unclosed string")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexKeyGroup() tomlLexStateFn {
|
func (l *tomlLexer) lexString() tomlLexStateFn {
|
||||||
|
l.skip()
|
||||||
|
|
||||||
|
// handle special case for triple-quote
|
||||||
|
terminator := `"`
|
||||||
|
discardLeadingNewLine := false
|
||||||
|
acceptNewLines := false
|
||||||
|
if l.follow(`""`) {
|
||||||
|
l.skip()
|
||||||
|
l.skip()
|
||||||
|
terminator = `"""`
|
||||||
|
discardLeadingNewLine = true
|
||||||
|
acceptNewLines = true
|
||||||
|
}
|
||||||
|
|
||||||
|
str, err := l.lexStringAsString(terminator, discardLeadingNewLine, acceptNewLines)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return l.errorf(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
l.emitWithValue(tokenString, str)
|
||||||
|
l.fastForward(len(terminator))
|
||||||
l.ignore()
|
l.ignore()
|
||||||
l.pos++
|
return l.lexRvalue
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) lexTableKey() tomlLexStateFn {
|
||||||
|
l.next()
|
||||||
|
|
||||||
if l.peek() == '[' {
|
if l.peek() == '[' {
|
||||||
// token '[[' signifies an array of anonymous key groups
|
// token '[[' signifies an array of tables
|
||||||
l.pos++
|
l.next()
|
||||||
l.emit(tokenDoubleLeftBracket)
|
l.emit(tokenDoubleLeftBracket)
|
||||||
return l.lexInsideKeyGroupArray
|
return l.lexInsideTableArrayKey
|
||||||
}
|
}
|
||||||
// vanilla key group
|
// vanilla table key
|
||||||
l.emit(tokenLeftBracket)
|
l.emit(tokenLeftBracket)
|
||||||
return l.lexInsideKeyGroup
|
return l.lexInsideTableKey
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexInsideKeyGroupArray() tomlLexStateFn {
|
// Parse the key till "]]", but only bare keys are supported
|
||||||
for {
|
func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn {
|
||||||
if l.peek() == ']' {
|
for r := l.peek(); r != eof; r = l.peek() {
|
||||||
if l.pos > l.start {
|
switch r {
|
||||||
|
case ']':
|
||||||
|
if l.currentTokenStop > l.currentTokenStart {
|
||||||
l.emit(tokenKeyGroupArray)
|
l.emit(tokenKeyGroupArray)
|
||||||
}
|
}
|
||||||
l.ignore()
|
l.next()
|
||||||
l.pos++
|
|
||||||
if l.peek() != ']' {
|
if l.peek() != ']' {
|
||||||
break // error
|
break
|
||||||
}
|
}
|
||||||
l.pos++
|
l.next()
|
||||||
l.emit(tokenDoubleRightBracket)
|
l.emit(tokenDoubleRightBracket)
|
||||||
return l.lexVoid
|
return l.lexVoid
|
||||||
} else if l.peek() == '[' {
|
case '[':
|
||||||
return l.errorf("group name cannot contain ']'")
|
return l.errorf("table array key cannot contain ']'")
|
||||||
}
|
default:
|
||||||
|
l.next()
|
||||||
if l.next() == eof {
|
|
||||||
break
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return l.errorf("unclosed key group array")
|
return l.errorf("unclosed table array key")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexInsideKeyGroup() tomlLexStateFn {
|
// Parse the key till "]" but only bare keys are supported
|
||||||
for {
|
func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn {
|
||||||
if l.peek() == ']' {
|
for r := l.peek(); r != eof; r = l.peek() {
|
||||||
if l.pos > l.start {
|
switch r {
|
||||||
|
case ']':
|
||||||
|
if l.currentTokenStop > l.currentTokenStart {
|
||||||
l.emit(tokenKeyGroup)
|
l.emit(tokenKeyGroup)
|
||||||
}
|
}
|
||||||
l.ignore()
|
l.next()
|
||||||
l.pos++
|
|
||||||
l.emit(tokenRightBracket)
|
l.emit(tokenRightBracket)
|
||||||
return l.lexVoid
|
return l.lexVoid
|
||||||
} else if l.peek() == '[' {
|
case '[':
|
||||||
return l.errorf("group name cannot contain ']'")
|
return l.errorf("table key cannot contain ']'")
|
||||||
}
|
default:
|
||||||
|
l.next()
|
||||||
if l.next() == eof {
|
|
||||||
break
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return l.errorf("unclosed key group")
|
return l.errorf("unclosed table key")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
|
func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
|
||||||
l.ignore()
|
l.next()
|
||||||
l.pos++
|
|
||||||
l.emit(tokenRightBracket)
|
l.emit(tokenRightBracket)
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type validRuneFn func(r rune) bool
|
||||||
|
|
||||||
|
func isValidHexRune(r rune) bool {
|
||||||
|
return r >= 'a' && r <= 'f' ||
|
||||||
|
r >= 'A' && r <= 'F' ||
|
||||||
|
r >= '0' && r <= '9' ||
|
||||||
|
r == '_'
|
||||||
|
}
|
||||||
|
|
||||||
|
func isValidOctalRune(r rune) bool {
|
||||||
|
return r >= '0' && r <= '7' || r == '_'
|
||||||
|
}
|
||||||
|
|
||||||
|
func isValidBinaryRune(r rune) bool {
|
||||||
|
return r == '0' || r == '1' || r == '_'
|
||||||
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexNumber() tomlLexStateFn {
|
func (l *tomlLexer) lexNumber() tomlLexStateFn {
|
||||||
l.ignore()
|
r := l.peek()
|
||||||
if !l.accept("+") {
|
|
||||||
l.accept("-")
|
if r == '0' {
|
||||||
|
follow := l.peekString(2)
|
||||||
|
if len(follow) == 2 {
|
||||||
|
var isValidRune validRuneFn
|
||||||
|
switch follow[1] {
|
||||||
|
case 'x':
|
||||||
|
isValidRune = isValidHexRune
|
||||||
|
case 'o':
|
||||||
|
isValidRune = isValidOctalRune
|
||||||
|
case 'b':
|
||||||
|
isValidRune = isValidBinaryRune
|
||||||
|
default:
|
||||||
|
if follow[1] >= 'a' && follow[1] <= 'z' || follow[1] >= 'A' && follow[1] <= 'Z' {
|
||||||
|
return l.errorf("unknown number base: %s. possible options are x (hex) o (octal) b (binary)", string(follow[1]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if isValidRune != nil {
|
||||||
|
l.next()
|
||||||
|
l.next()
|
||||||
|
digitSeen := false
|
||||||
|
for {
|
||||||
|
next := l.peek()
|
||||||
|
if !isValidRune(next) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
digitSeen = true
|
||||||
|
l.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
if !digitSeen {
|
||||||
|
return l.errorf("number needs at least one digit")
|
||||||
|
}
|
||||||
|
|
||||||
|
l.emit(tokenInteger)
|
||||||
|
|
||||||
|
return l.lexRvalue
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if r == '+' || r == '-' {
|
||||||
|
l.next()
|
||||||
|
if l.follow("inf") {
|
||||||
|
return l.lexInf
|
||||||
|
}
|
||||||
|
if l.follow("nan") {
|
||||||
|
return l.lexNan
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pointSeen := false
|
pointSeen := false
|
||||||
|
expSeen := false
|
||||||
digitSeen := false
|
digitSeen := false
|
||||||
for {
|
for {
|
||||||
next := l.next()
|
next := l.peek()
|
||||||
if next == '.' {
|
if next == '.' {
|
||||||
if pointSeen {
|
if pointSeen {
|
||||||
return l.errorf("cannot have two dots in one float")
|
return l.errorf("cannot have two dots in one float")
|
||||||
}
|
}
|
||||||
|
l.next()
|
||||||
if !isDigit(l.peek()) {
|
if !isDigit(l.peek()) {
|
||||||
return l.errorf("float cannot end with a dot")
|
return l.errorf("float cannot end with a dot")
|
||||||
}
|
}
|
||||||
pointSeen = true
|
pointSeen = true
|
||||||
|
} else if next == 'e' || next == 'E' {
|
||||||
|
expSeen = true
|
||||||
|
l.next()
|
||||||
|
r := l.peek()
|
||||||
|
if r == '+' || r == '-' {
|
||||||
|
l.next()
|
||||||
|
}
|
||||||
} else if isDigit(next) {
|
} else if isDigit(next) {
|
||||||
digitSeen = true
|
digitSeen = true
|
||||||
|
l.next()
|
||||||
|
} else if next == '_' {
|
||||||
|
l.next()
|
||||||
} else {
|
} else {
|
||||||
l.backup()
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
if pointSeen && !digitSeen {
|
if pointSeen && !digitSeen {
|
||||||
@@ -443,7 +718,7 @@ func (l *tomlLexer) lexNumber() tomlLexStateFn {
|
|||||||
if !digitSeen {
|
if !digitSeen {
|
||||||
return l.errorf("no digit in that number")
|
return l.errorf("no digit in that number")
|
||||||
}
|
}
|
||||||
if pointSeen {
|
if pointSeen || expSeen {
|
||||||
l.emit(tokenFloat)
|
l.emit(tokenFloat)
|
||||||
} else {
|
} else {
|
||||||
l.emit(tokenInteger)
|
l.emit(tokenInteger)
|
||||||
@@ -451,18 +726,27 @@ func (l *tomlLexer) lexNumber() tomlLexStateFn {
|
|||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) run() {
|
||||||
|
for state := l.lexVoid; state != nil; {
|
||||||
|
state = state()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
dateRegexp = regexp.MustCompile("^\\d{1,4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}Z")
|
dateRegexp = regexp.MustCompile(`^\d{1,4}-\d{2}-\d{2}[T ]\d{2}:\d{2}:\d{2}(\.\d{1,9})?(Z|[+-]\d{2}:\d{2})`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Entry point
|
// Entry point
|
||||||
func lexToml(input string) chan token {
|
func lexToml(inputBytes []byte) []token {
|
||||||
|
runes := bytes.Runes(inputBytes)
|
||||||
l := &tomlLexer{
|
l := &tomlLexer{
|
||||||
input: input,
|
input: runes,
|
||||||
tokens: make(chan token),
|
tokens: make([]token, 0, 256),
|
||||||
line: 1,
|
line: 1,
|
||||||
col: 1,
|
col: 1,
|
||||||
|
endbufferLine: 1,
|
||||||
|
endbufferCol: 1,
|
||||||
}
|
}
|
||||||
go l.run()
|
l.run()
|
||||||
return l.tokens
|
return l.tokens
|
||||||
}
|
}
|
||||||
|
|||||||
+561
-221
@@ -1,419 +1,759 @@
|
|||||||
package toml
|
package toml
|
||||||
|
|
||||||
import "testing"
|
import (
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
func testFlow(t *testing.T, input string, expectedFlow []token) {
|
func testFlow(t *testing.T, input string, expectedFlow []token) {
|
||||||
ch := lexToml(input)
|
tokens := lexToml([]byte(input))
|
||||||
for _, expected := range expectedFlow {
|
if !reflect.DeepEqual(tokens, expectedFlow) {
|
||||||
token := <-ch
|
t.Fatal("Different flows. Expected\n", expectedFlow, "\nGot:\n", tokens)
|
||||||
if token != expected {
|
|
||||||
t.Log("While testing: ", input)
|
|
||||||
t.Log("compared", token, "to", expected)
|
|
||||||
t.Log(token.val, "<->", expected.val)
|
|
||||||
t.Log(token.typ, "<->", expected.typ)
|
|
||||||
t.Log(token.Line, "<->", expected.Line)
|
|
||||||
t.Log(token.Col, "<->", expected.Col)
|
|
||||||
t.FailNow()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tok, ok := <-ch
|
|
||||||
if ok {
|
|
||||||
t.Log("channel is not closed!")
|
|
||||||
t.Log(len(ch)+1, "tokens remaining:")
|
|
||||||
|
|
||||||
t.Log("token ->", tok)
|
|
||||||
for token := range ch {
|
|
||||||
t.Log("token ->", token)
|
|
||||||
}
|
|
||||||
t.FailNow()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestValidKeyGroup(t *testing.T) {
|
func TestValidKeyGroup(t *testing.T) {
|
||||||
testFlow(t, "[hello world]", []token{
|
testFlow(t, "[hello world]", []token{
|
||||||
token{Position{1, 1}, tokenLeftBracket, "["},
|
{Position{1, 1}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 2}, tokenKeyGroup, "hello world"},
|
{Position{1, 2}, tokenKeyGroup, "hello world"},
|
||||||
token{Position{1, 13}, tokenRightBracket, "]"},
|
{Position{1, 13}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 14}, tokenEOF, ""},
|
{Position{1, 14}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNestedQuotedUnicodeKeyGroup(t *testing.T) {
|
||||||
|
testFlow(t, `[ j . "ʞ" . l ]`, []token{
|
||||||
|
{Position{1, 1}, tokenLeftBracket, "["},
|
||||||
|
{Position{1, 2}, tokenKeyGroup, ` j . "ʞ" . l `},
|
||||||
|
{Position{1, 15}, tokenRightBracket, "]"},
|
||||||
|
{Position{1, 16}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnclosedKeyGroup(t *testing.T) {
|
func TestUnclosedKeyGroup(t *testing.T) {
|
||||||
testFlow(t, "[hello world", []token{
|
testFlow(t, "[hello world", []token{
|
||||||
token{Position{1, 1}, tokenLeftBracket, "["},
|
{Position{1, 1}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 2}, tokenError, "unclosed key group"},
|
{Position{1, 2}, tokenError, "unclosed table key"},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestComment(t *testing.T) {
|
func TestComment(t *testing.T) {
|
||||||
testFlow(t, "# blahblah", []token{
|
testFlow(t, "# blahblah", []token{
|
||||||
token{Position{1, 11}, tokenEOF, ""},
|
{Position{1, 11}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyGroupComment(t *testing.T) {
|
func TestKeyGroupComment(t *testing.T) {
|
||||||
testFlow(t, "[hello world] # blahblah", []token{
|
testFlow(t, "[hello world] # blahblah", []token{
|
||||||
token{Position{1, 1}, tokenLeftBracket, "["},
|
{Position{1, 1}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 2}, tokenKeyGroup, "hello world"},
|
{Position{1, 2}, tokenKeyGroup, "hello world"},
|
||||||
token{Position{1, 13}, tokenRightBracket, "]"},
|
{Position{1, 13}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 25}, tokenEOF, ""},
|
{Position{1, 25}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMultipleKeyGroupsComment(t *testing.T) {
|
func TestMultipleKeyGroupsComment(t *testing.T) {
|
||||||
testFlow(t, "[hello world] # blahblah\n[test]", []token{
|
testFlow(t, "[hello world] # blahblah\n[test]", []token{
|
||||||
token{Position{1, 1}, tokenLeftBracket, "["},
|
{Position{1, 1}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 2}, tokenKeyGroup, "hello world"},
|
{Position{1, 2}, tokenKeyGroup, "hello world"},
|
||||||
token{Position{1, 13}, tokenRightBracket, "]"},
|
{Position{1, 13}, tokenRightBracket, "]"},
|
||||||
token{Position{2, 1}, tokenLeftBracket, "["},
|
{Position{2, 1}, tokenLeftBracket, "["},
|
||||||
token{Position{2, 2}, tokenKeyGroup, "test"},
|
{Position{2, 2}, tokenKeyGroup, "test"},
|
||||||
token{Position{2, 6}, tokenRightBracket, "]"},
|
{Position{2, 6}, tokenRightBracket, "]"},
|
||||||
token{Position{2, 7}, tokenEOF, ""},
|
{Position{2, 7}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSimpleWindowsCRLF(t *testing.T) {
|
||||||
|
testFlow(t, "a=4\r\nb=2", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
|
{Position{1, 2}, tokenEqual, "="},
|
||||||
|
{Position{1, 3}, tokenInteger, "4"},
|
||||||
|
{Position{2, 1}, tokenKey, "b"},
|
||||||
|
{Position{2, 2}, tokenEqual, "="},
|
||||||
|
{Position{2, 3}, tokenInteger, "2"},
|
||||||
|
{Position{2, 4}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBasicKey(t *testing.T) {
|
func TestBasicKey(t *testing.T) {
|
||||||
testFlow(t, "hello", []token{
|
testFlow(t, "hello", []token{
|
||||||
token{Position{1, 1}, tokenKey, "hello"},
|
{Position{1, 1}, tokenKey, "hello"},
|
||||||
token{Position{1, 6}, tokenEOF, ""},
|
{Position{1, 6}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBasicKeyWithUnderscore(t *testing.T) {
|
func TestBasicKeyWithUnderscore(t *testing.T) {
|
||||||
testFlow(t, "hello_hello", []token{
|
testFlow(t, "hello_hello", []token{
|
||||||
token{Position{1, 1}, tokenKey, "hello_hello"},
|
{Position{1, 1}, tokenKey, "hello_hello"},
|
||||||
token{Position{1, 12}, tokenEOF, ""},
|
{Position{1, 12}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBasicKeyWithDash(t *testing.T) {
|
func TestBasicKeyWithDash(t *testing.T) {
|
||||||
testFlow(t, "hello-world", []token{
|
testFlow(t, "hello-world", []token{
|
||||||
token{Position{1, 1}, tokenKey, "hello-world"},
|
{Position{1, 1}, tokenKey, "hello-world"},
|
||||||
token{Position{1, 12}, tokenEOF, ""},
|
{Position{1, 12}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBasicKeyWithUppercaseMix(t *testing.T) {
|
func TestBasicKeyWithUppercaseMix(t *testing.T) {
|
||||||
testFlow(t, "helloHELLOHello", []token{
|
testFlow(t, "helloHELLOHello", []token{
|
||||||
token{Position{1, 1}, tokenKey, "helloHELLOHello"},
|
{Position{1, 1}, tokenKey, "helloHELLOHello"},
|
||||||
token{Position{1, 16}, tokenEOF, ""},
|
{Position{1, 16}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBasicKeyWithInternationalCharacters(t *testing.T) {
|
func TestBasicKeyWithInternationalCharacters(t *testing.T) {
|
||||||
testFlow(t, "héllÖ", []token{
|
testFlow(t, "héllÖ", []token{
|
||||||
token{Position{1, 1}, tokenKey, "héllÖ"},
|
{Position{1, 1}, tokenKey, "héllÖ"},
|
||||||
token{Position{1, 6}, tokenEOF, ""},
|
{Position{1, 6}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBasicKeyAndEqual(t *testing.T) {
|
func TestBasicKeyAndEqual(t *testing.T) {
|
||||||
testFlow(t, "hello =", []token{
|
testFlow(t, "hello =", []token{
|
||||||
token{Position{1, 1}, tokenKey, "hello"},
|
{Position{1, 1}, tokenKey, "hello"},
|
||||||
token{Position{1, 7}, tokenEqual, "="},
|
{Position{1, 7}, tokenEqual, "="},
|
||||||
token{Position{1, 8}, tokenEOF, ""},
|
{Position{1, 8}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyWithSharpAndEqual(t *testing.T) {
|
func TestKeyWithSharpAndEqual(t *testing.T) {
|
||||||
testFlow(t, "key#name = 5", []token{
|
testFlow(t, "key#name = 5", []token{
|
||||||
token{Position{1, 1}, tokenKey, "key#name"},
|
{Position{1, 1}, tokenError, "keys cannot contain # character"},
|
||||||
token{Position{1, 10}, tokenEqual, "="},
|
|
||||||
token{Position{1, 12}, tokenInteger, "5"},
|
|
||||||
token{Position{1, 13}, tokenEOF, ""},
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyWithSymbolsAndEqual(t *testing.T) {
|
func TestKeyWithSymbolsAndEqual(t *testing.T) {
|
||||||
testFlow(t, "~!@#$^&*()_+-`1234567890[]\\|/?><.,;:' = 5", []token{
|
testFlow(t, "~!@$^&*()_+-`1234567890[]\\|/?><.,;:' = 5", []token{
|
||||||
token{Position{1, 1}, tokenKey, "~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'"},
|
{Position{1, 1}, tokenError, "keys cannot contain ~ character"},
|
||||||
token{Position{1, 39}, tokenEqual, "="},
|
|
||||||
token{Position{1, 41}, tokenInteger, "5"},
|
|
||||||
token{Position{1, 42}, tokenEOF, ""},
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyEqualStringEscape(t *testing.T) {
|
func TestKeyEqualStringEscape(t *testing.T) {
|
||||||
testFlow(t, `foo = "hello\""`, []token{
|
testFlow(t, `foo = "hello\""`, []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 8}, tokenString, "hello\""},
|
{Position{1, 8}, tokenString, "hello\""},
|
||||||
token{Position{1, 16}, tokenEOF, ""},
|
{Position{1, 16}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyEqualStringUnfinished(t *testing.T) {
|
func TestKeyEqualStringUnfinished(t *testing.T) {
|
||||||
testFlow(t, `foo = "bar`, []token{
|
testFlow(t, `foo = "bar`, []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 8}, tokenError, "unclosed string"},
|
{Position{1, 8}, tokenError, "unclosed string"},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyEqualString(t *testing.T) {
|
func TestKeyEqualString(t *testing.T) {
|
||||||
testFlow(t, `foo = "bar"`, []token{
|
testFlow(t, `foo = "bar"`, []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 8}, tokenString, "bar"},
|
{Position{1, 8}, tokenString, "bar"},
|
||||||
token{Position{1, 12}, tokenEOF, ""},
|
{Position{1, 12}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyEqualTrue(t *testing.T) {
|
func TestKeyEqualTrue(t *testing.T) {
|
||||||
testFlow(t, "foo = true", []token{
|
testFlow(t, "foo = true", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenTrue, "true"},
|
{Position{1, 7}, tokenTrue, "true"},
|
||||||
token{Position{1, 11}, tokenEOF, ""},
|
{Position{1, 11}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyEqualFalse(t *testing.T) {
|
func TestKeyEqualFalse(t *testing.T) {
|
||||||
testFlow(t, "foo = false", []token{
|
testFlow(t, "foo = false", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenFalse, "false"},
|
{Position{1, 7}, tokenFalse, "false"},
|
||||||
token{Position{1, 12}, tokenEOF, ""},
|
{Position{1, 12}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestArrayNestedString(t *testing.T) {
|
func TestArrayNestedString(t *testing.T) {
|
||||||
testFlow(t, `a = [ ["hello", "world"] ]`, []token{
|
testFlow(t, `a = [ ["hello", "world"] ]`, []token{
|
||||||
token{Position{1, 1}, tokenKey, "a"},
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
token{Position{1, 3}, tokenEqual, "="},
|
{Position{1, 3}, tokenEqual, "="},
|
||||||
token{Position{1, 5}, tokenLeftBracket, "["},
|
{Position{1, 5}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 7}, tokenLeftBracket, "["},
|
{Position{1, 7}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 9}, tokenString, "hello"},
|
{Position{1, 9}, tokenString, "hello"},
|
||||||
token{Position{1, 15}, tokenComma, ","},
|
{Position{1, 15}, tokenComma, ","},
|
||||||
token{Position{1, 18}, tokenString, "world"},
|
{Position{1, 18}, tokenString, "world"},
|
||||||
token{Position{1, 24}, tokenRightBracket, "]"},
|
{Position{1, 24}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 26}, tokenRightBracket, "]"},
|
{Position{1, 26}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 27}, tokenEOF, ""},
|
{Position{1, 27}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestArrayNestedInts(t *testing.T) {
|
func TestArrayNestedInts(t *testing.T) {
|
||||||
testFlow(t, "a = [ [42, 21], [10] ]", []token{
|
testFlow(t, "a = [ [42, 21], [10] ]", []token{
|
||||||
token{Position{1, 1}, tokenKey, "a"},
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
token{Position{1, 3}, tokenEqual, "="},
|
{Position{1, 3}, tokenEqual, "="},
|
||||||
token{Position{1, 5}, tokenLeftBracket, "["},
|
{Position{1, 5}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 7}, tokenLeftBracket, "["},
|
{Position{1, 7}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 8}, tokenInteger, "42"},
|
{Position{1, 8}, tokenInteger, "42"},
|
||||||
token{Position{1, 10}, tokenComma, ","},
|
{Position{1, 10}, tokenComma, ","},
|
||||||
token{Position{1, 12}, tokenInteger, "21"},
|
{Position{1, 12}, tokenInteger, "21"},
|
||||||
token{Position{1, 14}, tokenRightBracket, "]"},
|
{Position{1, 14}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 15}, tokenComma, ","},
|
{Position{1, 15}, tokenComma, ","},
|
||||||
token{Position{1, 17}, tokenLeftBracket, "["},
|
{Position{1, 17}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 18}, tokenInteger, "10"},
|
{Position{1, 18}, tokenInteger, "10"},
|
||||||
token{Position{1, 20}, tokenRightBracket, "]"},
|
{Position{1, 20}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 22}, tokenRightBracket, "]"},
|
{Position{1, 22}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 23}, tokenEOF, ""},
|
{Position{1, 23}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestArrayInts(t *testing.T) {
|
func TestArrayInts(t *testing.T) {
|
||||||
testFlow(t, "a = [ 42, 21, 10, ]", []token{
|
testFlow(t, "a = [ 42, 21, 10, ]", []token{
|
||||||
token{Position{1, 1}, tokenKey, "a"},
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
token{Position{1, 3}, tokenEqual, "="},
|
{Position{1, 3}, tokenEqual, "="},
|
||||||
token{Position{1, 5}, tokenLeftBracket, "["},
|
{Position{1, 5}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 7}, tokenInteger, "42"},
|
{Position{1, 7}, tokenInteger, "42"},
|
||||||
token{Position{1, 9}, tokenComma, ","},
|
{Position{1, 9}, tokenComma, ","},
|
||||||
token{Position{1, 11}, tokenInteger, "21"},
|
{Position{1, 11}, tokenInteger, "21"},
|
||||||
token{Position{1, 13}, tokenComma, ","},
|
{Position{1, 13}, tokenComma, ","},
|
||||||
token{Position{1, 15}, tokenInteger, "10"},
|
{Position{1, 15}, tokenInteger, "10"},
|
||||||
token{Position{1, 17}, tokenComma, ","},
|
{Position{1, 17}, tokenComma, ","},
|
||||||
token{Position{1, 19}, tokenRightBracket, "]"},
|
{Position{1, 19}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 20}, tokenEOF, ""},
|
{Position{1, 20}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMultilineArrayComments(t *testing.T) {
|
func TestMultilineArrayComments(t *testing.T) {
|
||||||
testFlow(t, "a = [1, # wow\n2, # such items\n3, # so array\n]", []token{
|
testFlow(t, "a = [1, # wow\n2, # such items\n3, # so array\n]", []token{
|
||||||
token{Position{1, 1}, tokenKey, "a"},
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
token{Position{1, 3}, tokenEqual, "="},
|
{Position{1, 3}, tokenEqual, "="},
|
||||||
token{Position{1, 5}, tokenLeftBracket, "["},
|
{Position{1, 5}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 6}, tokenInteger, "1"},
|
{Position{1, 6}, tokenInteger, "1"},
|
||||||
token{Position{1, 7}, tokenComma, ","},
|
{Position{1, 7}, tokenComma, ","},
|
||||||
token{Position{2, 1}, tokenInteger, "2"},
|
{Position{2, 1}, tokenInteger, "2"},
|
||||||
token{Position{2, 2}, tokenComma, ","},
|
{Position{2, 2}, tokenComma, ","},
|
||||||
token{Position{3, 1}, tokenInteger, "3"},
|
{Position{3, 1}, tokenInteger, "3"},
|
||||||
token{Position{3, 2}, tokenComma, ","},
|
{Position{3, 2}, tokenComma, ","},
|
||||||
token{Position{4, 1}, tokenRightBracket, "]"},
|
{Position{4, 1}, tokenRightBracket, "]"},
|
||||||
token{Position{4, 2}, tokenEOF, ""},
|
{Position{4, 2}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNestedArraysComment(t *testing.T) {
|
||||||
|
toml := `
|
||||||
|
someArray = [
|
||||||
|
# does not work
|
||||||
|
["entry1"]
|
||||||
|
]`
|
||||||
|
testFlow(t, toml, []token{
|
||||||
|
{Position{2, 1}, tokenKey, "someArray"},
|
||||||
|
{Position{2, 11}, tokenEqual, "="},
|
||||||
|
{Position{2, 13}, tokenLeftBracket, "["},
|
||||||
|
{Position{4, 1}, tokenLeftBracket, "["},
|
||||||
|
{Position{4, 3}, tokenString, "entry1"},
|
||||||
|
{Position{4, 10}, tokenRightBracket, "]"},
|
||||||
|
{Position{5, 1}, tokenRightBracket, "]"},
|
||||||
|
{Position{5, 2}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyEqualArrayBools(t *testing.T) {
|
func TestKeyEqualArrayBools(t *testing.T) {
|
||||||
testFlow(t, "foo = [true, false, true]", []token{
|
testFlow(t, "foo = [true, false, true]", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenLeftBracket, "["},
|
{Position{1, 7}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 8}, tokenTrue, "true"},
|
{Position{1, 8}, tokenTrue, "true"},
|
||||||
token{Position{1, 12}, tokenComma, ","},
|
{Position{1, 12}, tokenComma, ","},
|
||||||
token{Position{1, 14}, tokenFalse, "false"},
|
{Position{1, 14}, tokenFalse, "false"},
|
||||||
token{Position{1, 19}, tokenComma, ","},
|
{Position{1, 19}, tokenComma, ","},
|
||||||
token{Position{1, 21}, tokenTrue, "true"},
|
{Position{1, 21}, tokenTrue, "true"},
|
||||||
token{Position{1, 25}, tokenRightBracket, "]"},
|
{Position{1, 25}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 26}, tokenEOF, ""},
|
{Position{1, 26}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyEqualArrayBoolsWithComments(t *testing.T) {
|
func TestKeyEqualArrayBoolsWithComments(t *testing.T) {
|
||||||
testFlow(t, "foo = [true, false, true] # YEAH", []token{
|
testFlow(t, "foo = [true, false, true] # YEAH", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenLeftBracket, "["},
|
{Position{1, 7}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 8}, tokenTrue, "true"},
|
{Position{1, 8}, tokenTrue, "true"},
|
||||||
token{Position{1, 12}, tokenComma, ","},
|
{Position{1, 12}, tokenComma, ","},
|
||||||
token{Position{1, 14}, tokenFalse, "false"},
|
{Position{1, 14}, tokenFalse, "false"},
|
||||||
token{Position{1, 19}, tokenComma, ","},
|
{Position{1, 19}, tokenComma, ","},
|
||||||
token{Position{1, 21}, tokenTrue, "true"},
|
{Position{1, 21}, tokenTrue, "true"},
|
||||||
token{Position{1, 25}, tokenRightBracket, "]"},
|
{Position{1, 25}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 33}, tokenEOF, ""},
|
{Position{1, 33}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDateRegexp(t *testing.T) {
|
func TestDateRegexp(t *testing.T) {
|
||||||
if dateRegexp.FindString("1979-05-27T07:32:00Z") == "" {
|
if dateRegexp.FindString("1979-05-27T07:32:00Z") == "" {
|
||||||
t.Fail()
|
t.Error("basic lexing")
|
||||||
|
}
|
||||||
|
if dateRegexp.FindString("1979-05-27T00:32:00-07:00") == "" {
|
||||||
|
t.Error("offset lexing")
|
||||||
|
}
|
||||||
|
if dateRegexp.FindString("1979-05-27T00:32:00.999999-07:00") == "" {
|
||||||
|
t.Error("nano precision lexing")
|
||||||
|
}
|
||||||
|
if dateRegexp.FindString("1979-05-27 07:32:00Z") == "" {
|
||||||
|
t.Error("space delimiter lexing")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyEqualDate(t *testing.T) {
|
func TestKeyEqualDate(t *testing.T) {
|
||||||
testFlow(t, "foo = 1979-05-27T07:32:00Z", []token{
|
testFlow(t, "foo = 1979-05-27T07:32:00Z", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenDate, "1979-05-27T07:32:00Z"},
|
{Position{1, 7}, tokenDate, "1979-05-27T07:32:00Z"},
|
||||||
token{Position{1, 27}, tokenEOF, ""},
|
{Position{1, 27}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, "foo = 1979-05-27T00:32:00-07:00", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenDate, "1979-05-27T00:32:00-07:00"},
|
||||||
|
{Position{1, 32}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, "foo = 1979-05-27T00:32:00.999999-07:00", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenDate, "1979-05-27T00:32:00.999999-07:00"},
|
||||||
|
{Position{1, 39}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, "foo = 1979-05-27 07:32:00Z", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenDate, "1979-05-27 07:32:00Z"},
|
||||||
|
{Position{1, 27}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFloatEndingWithDot(t *testing.T) {
|
func TestFloatEndingWithDot(t *testing.T) {
|
||||||
testFlow(t, "foo = 42.", []token{
|
testFlow(t, "foo = 42.", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenError, "float cannot end with a dot"},
|
{Position{1, 7}, tokenError, "float cannot end with a dot"},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFloatWithTwoDots(t *testing.T) {
|
func TestFloatWithTwoDots(t *testing.T) {
|
||||||
testFlow(t, "foo = 4.2.", []token{
|
testFlow(t, "foo = 4.2.", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenError, "cannot have two dots in one float"},
|
{Position{1, 7}, tokenError, "cannot have two dots in one float"},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDoubleEqualKey(t *testing.T) {
|
func TestFloatWithExponent1(t *testing.T) {
|
||||||
testFlow(t, "foo= = 2", []token{
|
testFlow(t, "a = 5e+22", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
token{Position{1, 4}, tokenEqual, "="},
|
{Position{1, 3}, tokenEqual, "="},
|
||||||
token{Position{1, 5}, tokenError, "cannot have multiple equals for the same key"},
|
{Position{1, 5}, tokenFloat, "5e+22"},
|
||||||
|
{Position{1, 10}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFloatWithExponent2(t *testing.T) {
|
||||||
|
testFlow(t, "a = 5E+22", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
|
{Position{1, 3}, tokenEqual, "="},
|
||||||
|
{Position{1, 5}, tokenFloat, "5E+22"},
|
||||||
|
{Position{1, 10}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFloatWithExponent3(t *testing.T) {
|
||||||
|
testFlow(t, "a = -5e+22", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
|
{Position{1, 3}, tokenEqual, "="},
|
||||||
|
{Position{1, 5}, tokenFloat, "-5e+22"},
|
||||||
|
{Position{1, 11}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFloatWithExponent4(t *testing.T) {
|
||||||
|
testFlow(t, "a = -5e-22", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
|
{Position{1, 3}, tokenEqual, "="},
|
||||||
|
{Position{1, 5}, tokenFloat, "-5e-22"},
|
||||||
|
{Position{1, 11}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFloatWithExponent5(t *testing.T) {
|
||||||
|
testFlow(t, "a = 6.626e-34", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
|
{Position{1, 3}, tokenEqual, "="},
|
||||||
|
{Position{1, 5}, tokenFloat, "6.626e-34"},
|
||||||
|
{Position{1, 14}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestInvalidEsquapeSequence(t *testing.T) {
|
func TestInvalidEsquapeSequence(t *testing.T) {
|
||||||
testFlow(t, `foo = "\x"`, []token{
|
testFlow(t, `foo = "\x"`, []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 8}, tokenError, "invalid escape sequence: \\x"},
|
{Position{1, 8}, tokenError, "invalid escape sequence: \\x"},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNestedArrays(t *testing.T) {
|
func TestNestedArrays(t *testing.T) {
|
||||||
testFlow(t, "foo = [[[]]]", []token{
|
testFlow(t, "foo = [[[]]]", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenLeftBracket, "["},
|
{Position{1, 7}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 8}, tokenLeftBracket, "["},
|
{Position{1, 8}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 9}, tokenLeftBracket, "["},
|
{Position{1, 9}, tokenLeftBracket, "["},
|
||||||
token{Position{1, 10}, tokenRightBracket, "]"},
|
{Position{1, 10}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 11}, tokenRightBracket, "]"},
|
{Position{1, 11}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 12}, tokenRightBracket, "]"},
|
{Position{1, 12}, tokenRightBracket, "]"},
|
||||||
token{Position{1, 13}, tokenEOF, ""},
|
{Position{1, 13}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyEqualNumber(t *testing.T) {
|
func TestKeyEqualNumber(t *testing.T) {
|
||||||
testFlow(t, "foo = 42", []token{
|
testFlow(t, "foo = 42", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenInteger, "42"},
|
{Position{1, 7}, tokenInteger, "42"},
|
||||||
token{Position{1, 9}, tokenEOF, ""},
|
{Position{1, 9}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
|
|
||||||
testFlow(t, "foo = +42", []token{
|
testFlow(t, "foo = +42", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenInteger, "+42"},
|
{Position{1, 7}, tokenInteger, "+42"},
|
||||||
token{Position{1, 10}, tokenEOF, ""},
|
{Position{1, 10}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
|
|
||||||
testFlow(t, "foo = -42", []token{
|
testFlow(t, "foo = -42", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenInteger, "-42"},
|
{Position{1, 7}, tokenInteger, "-42"},
|
||||||
token{Position{1, 10}, tokenEOF, ""},
|
{Position{1, 10}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
|
|
||||||
testFlow(t, "foo = 4.2", []token{
|
testFlow(t, "foo = 4.2", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenFloat, "4.2"},
|
{Position{1, 7}, tokenFloat, "4.2"},
|
||||||
token{Position{1, 10}, tokenEOF, ""},
|
{Position{1, 10}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
|
|
||||||
testFlow(t, "foo = +4.2", []token{
|
testFlow(t, "foo = +4.2", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenFloat, "+4.2"},
|
{Position{1, 7}, tokenFloat, "+4.2"},
|
||||||
token{Position{1, 11}, tokenEOF, ""},
|
{Position{1, 11}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
|
|
||||||
testFlow(t, "foo = -4.2", []token{
|
testFlow(t, "foo = -4.2", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenFloat, "-4.2"},
|
{Position{1, 7}, tokenFloat, "-4.2"},
|
||||||
token{Position{1, 11}, tokenEOF, ""},
|
{Position{1, 11}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, "foo = 1_000", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenInteger, "1_000"},
|
||||||
|
{Position{1, 12}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, "foo = 5_349_221", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenInteger, "5_349_221"},
|
||||||
|
{Position{1, 16}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, "foo = 1_2_3_4_5", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenInteger, "1_2_3_4_5"},
|
||||||
|
{Position{1, 16}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, "flt8 = 9_224_617.445_991_228_313", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "flt8"},
|
||||||
|
{Position{1, 6}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenFloat, "9_224_617.445_991_228_313"},
|
||||||
|
{Position{1, 33}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, "foo = +", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenError, "no digit in that number"},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMultiline(t *testing.T) {
|
func TestMultiline(t *testing.T) {
|
||||||
testFlow(t, "foo = 42\nbar=21", []token{
|
testFlow(t, "foo = 42\nbar=21", []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 7}, tokenInteger, "42"},
|
{Position{1, 7}, tokenInteger, "42"},
|
||||||
token{Position{2, 1}, tokenKey, "bar"},
|
{Position{2, 1}, tokenKey, "bar"},
|
||||||
token{Position{2, 4}, tokenEqual, "="},
|
{Position{2, 4}, tokenEqual, "="},
|
||||||
token{Position{2, 5}, tokenInteger, "21"},
|
{Position{2, 5}, tokenInteger, "21"},
|
||||||
token{Position{2, 7}, tokenEOF, ""},
|
{Position{2, 7}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyEqualStringUnicodeEscape(t *testing.T) {
|
func TestKeyEqualStringUnicodeEscape(t *testing.T) {
|
||||||
testFlow(t, `foo = "hello \u2665"`, []token{
|
testFlow(t, `foo = "hello \u2665"`, []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 8}, tokenString, "hello ♥"},
|
{Position{1, 8}, tokenString, "hello ♥"},
|
||||||
token{Position{1, 21}, tokenEOF, ""},
|
{Position{1, 21}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, `foo = "hello \U000003B4"`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenString, "hello δ"},
|
||||||
|
{Position{1, 25}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, `foo = "\uabcd"`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenString, "\uabcd"},
|
||||||
|
{Position{1, 15}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, `foo = "\uABCD"`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenString, "\uABCD"},
|
||||||
|
{Position{1, 15}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, `foo = "\U000bcdef"`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenString, "\U000bcdef"},
|
||||||
|
{Position{1, 19}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, `foo = "\U000BCDEF"`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenString, "\U000BCDEF"},
|
||||||
|
{Position{1, 19}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, `foo = "\u2"`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenError, "unfinished unicode escape"},
|
||||||
|
})
|
||||||
|
testFlow(t, `foo = "\U2"`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenError, "unfinished unicode escape"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKeyEqualStringNoEscape(t *testing.T) {
|
||||||
|
testFlow(t, "foo = \"hello \u0002\"", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenError, "unescaped control character U+0002"},
|
||||||
|
})
|
||||||
|
testFlow(t, "foo = \"hello \u001F\"", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenError, "unescaped control character U+001F"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLiteralString(t *testing.T) {
|
||||||
|
testFlow(t, `foo = 'C:\Users\nodejs\templates'`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenString, `C:\Users\nodejs\templates`},
|
||||||
|
{Position{1, 34}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, `foo = '\\ServerX\admin$\system32\'`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenString, `\\ServerX\admin$\system32\`},
|
||||||
|
{Position{1, 35}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, `foo = 'Tom "Dubs" Preston-Werner'`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenString, `Tom "Dubs" Preston-Werner`},
|
||||||
|
{Position{1, 34}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, `foo = '<\i\c*\s*>'`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenString, `<\i\c*\s*>`},
|
||||||
|
{Position{1, 19}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, `foo = 'C:\Users\nodejs\unfinis`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenError, "unclosed string"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMultilineLiteralString(t *testing.T) {
|
||||||
|
testFlow(t, `foo = '''hello 'literal' world'''`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 10}, tokenString, `hello 'literal' world`},
|
||||||
|
{Position{1, 34}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, "foo = '''\nhello\n'literal'\nworld'''", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{2, 1}, tokenString, "hello\n'literal'\nworld"},
|
||||||
|
{Position{4, 9}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
testFlow(t, "foo = '''\r\nhello\r\n'literal'\r\nworld'''", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{2, 1}, tokenString, "hello\r\n'literal'\r\nworld"},
|
||||||
|
{Position{4, 9}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMultilineString(t *testing.T) {
|
||||||
|
testFlow(t, `foo = """hello "literal" world"""`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 10}, tokenString, `hello "literal" world`},
|
||||||
|
{Position{1, 34}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, "foo = \"\"\"\r\nhello\\\r\n\"literal\"\\\nworld\"\"\"", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{2, 1}, tokenString, "hello\"literal\"world"},
|
||||||
|
{Position{4, 9}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, "foo = \"\"\"\\\n \\\n \\\n hello\\\nmultiline\\\nworld\"\"\"", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 10}, tokenString, "hellomultilineworld"},
|
||||||
|
{Position{6, 9}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, "key2 = \"\"\"\nThe quick brown \\\n\n\n fox jumps over \\\n the lazy dog.\"\"\"", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "key2"},
|
||||||
|
{Position{1, 6}, tokenEqual, "="},
|
||||||
|
{Position{2, 1}, tokenString, "The quick brown fox jumps over the lazy dog."},
|
||||||
|
{Position{6, 21}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, "key2 = \"\"\"\\\n The quick brown \\\n fox jumps over \\\n the lazy dog.\\\n \"\"\"", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "key2"},
|
||||||
|
{Position{1, 6}, tokenEqual, "="},
|
||||||
|
{Position{1, 11}, tokenString, "The quick brown fox jumps over the lazy dog."},
|
||||||
|
{Position{5, 11}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, `key2 = "Roses are red\nViolets are blue"`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "key2"},
|
||||||
|
{Position{1, 6}, tokenEqual, "="},
|
||||||
|
{Position{1, 9}, tokenString, "Roses are red\nViolets are blue"},
|
||||||
|
{Position{1, 41}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, "key2 = \"\"\"\nRoses are red\nViolets are blue\"\"\"", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "key2"},
|
||||||
|
{Position{1, 6}, tokenEqual, "="},
|
||||||
|
{Position{2, 1}, tokenString, "Roses are red\nViolets are blue"},
|
||||||
|
{Position{3, 20}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnicodeString(t *testing.T) {
|
func TestUnicodeString(t *testing.T) {
|
||||||
testFlow(t, `foo = "hello ♥ world"`, []token{
|
testFlow(t, `foo = "hello ♥ world"`, []token{
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
token{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
token{Position{1, 8}, tokenString, "hello ♥ world"},
|
{Position{1, 8}, tokenString, "hello ♥ world"},
|
||||||
token{Position{1, 22}, tokenEOF, ""},
|
{Position{1, 22}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
func TestEscapeInString(t *testing.T) {
|
||||||
|
testFlow(t, `foo = "\b\f\/"`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenString, "\b\f/"},
|
||||||
|
{Position{1, 15}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKeyGroupArray(t *testing.T) {
|
func TestKeyGroupArray(t *testing.T) {
|
||||||
testFlow(t, "[[foo]]", []token{
|
testFlow(t, "[[foo]]", []token{
|
||||||
token{Position{1, 1}, tokenDoubleLeftBracket, "[["},
|
{Position{1, 1}, tokenDoubleLeftBracket, "[["},
|
||||||
token{Position{1, 3}, tokenKeyGroupArray, "foo"},
|
{Position{1, 3}, tokenKeyGroupArray, "foo"},
|
||||||
token{Position{1, 6}, tokenDoubleRightBracket, "]]"},
|
{Position{1, 6}, tokenDoubleRightBracket, "]]"},
|
||||||
token{Position{1, 8}, tokenEOF, ""},
|
{Position{1, 8}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestQuotedKey(t *testing.T) {
|
||||||
|
testFlow(t, "\"a b\" = 42", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "\"a b\""},
|
||||||
|
{Position{1, 7}, tokenEqual, "="},
|
||||||
|
{Position{1, 9}, tokenInteger, "42"},
|
||||||
|
{Position{1, 11}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKeyNewline(t *testing.T) {
|
||||||
|
testFlow(t, "a\n= 4", []token{
|
||||||
|
{Position{1, 1}, tokenError, "keys cannot contain new lines"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidFloat(t *testing.T) {
|
||||||
|
testFlow(t, "a=7e1_", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
|
{Position{1, 2}, tokenEqual, "="},
|
||||||
|
{Position{1, 3}, tokenFloat, "7e1_"},
|
||||||
|
{Position{1, 7}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexUnknownRvalue(t *testing.T) {
|
||||||
|
testFlow(t, `a = !b`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
|
{Position{1, 3}, tokenEqual, "="},
|
||||||
|
{Position{1, 5}, tokenError, "no value can start with !"},
|
||||||
|
})
|
||||||
|
|
||||||
|
testFlow(t, `a = \b`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "a"},
|
||||||
|
{Position{1, 3}, tokenEqual, "="},
|
||||||
|
{Position{1, 5}, tokenError, `no value can start with \`},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkLexer(b *testing.B) {
|
||||||
|
sample := `title = "Hugo: A Fast and Flexible Website Generator"
|
||||||
|
baseurl = "http://gohugo.io/"
|
||||||
|
MetaDataFormat = "yaml"
|
||||||
|
pluralizeListTitles = false
|
||||||
|
|
||||||
|
[params]
|
||||||
|
description = "Documentation of Hugo, a fast and flexible static site generator built with love by spf13, bep and friends in Go"
|
||||||
|
author = "Steve Francia (spf13) and friends"
|
||||||
|
release = "0.22-DEV"
|
||||||
|
|
||||||
|
[[menu.main]]
|
||||||
|
name = "Download Hugo"
|
||||||
|
pre = "<i class='fa fa-download'></i>"
|
||||||
|
url = "https://github.com/spf13/hugo/releases"
|
||||||
|
weight = -200
|
||||||
|
`
|
||||||
|
b.ResetTimer()
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
lexToml([]byte(sample))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
+837
@@ -0,0 +1,837 @@
|
|||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
tagFieldName = "toml"
|
||||||
|
tagFieldComment = "comment"
|
||||||
|
tagCommented = "commented"
|
||||||
|
tagMultiline = "multiline"
|
||||||
|
tagDefault = "default"
|
||||||
|
)
|
||||||
|
|
||||||
|
type tomlOpts struct {
|
||||||
|
name string
|
||||||
|
comment string
|
||||||
|
commented bool
|
||||||
|
multiline bool
|
||||||
|
include bool
|
||||||
|
omitempty bool
|
||||||
|
defaultValue string
|
||||||
|
}
|
||||||
|
|
||||||
|
type encOpts struct {
|
||||||
|
quoteMapKeys bool
|
||||||
|
arraysOneElementPerLine bool
|
||||||
|
}
|
||||||
|
|
||||||
|
var encOptsDefaults = encOpts{
|
||||||
|
quoteMapKeys: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
type annotation struct {
|
||||||
|
tag string
|
||||||
|
comment string
|
||||||
|
commented string
|
||||||
|
multiline string
|
||||||
|
defaultValue string
|
||||||
|
}
|
||||||
|
|
||||||
|
var annotationDefault = annotation{
|
||||||
|
tag: tagFieldName,
|
||||||
|
comment: tagFieldComment,
|
||||||
|
commented: tagCommented,
|
||||||
|
multiline: tagMultiline,
|
||||||
|
defaultValue: tagDefault,
|
||||||
|
}
|
||||||
|
|
||||||
|
type marshalOrder int
|
||||||
|
|
||||||
|
// Orders the Encoder can write the fields to the output stream.
|
||||||
|
const (
|
||||||
|
// Sort fields alphabetically.
|
||||||
|
OrderAlphabetical marshalOrder = iota + 1
|
||||||
|
// Preserve the order the fields are encountered. For example, the order of fields in
|
||||||
|
// a struct.
|
||||||
|
OrderPreserve
|
||||||
|
)
|
||||||
|
|
||||||
|
var timeType = reflect.TypeOf(time.Time{})
|
||||||
|
var marshalerType = reflect.TypeOf(new(Marshaler)).Elem()
|
||||||
|
|
||||||
|
// Check if the given marshal type maps to a Tree primitive
|
||||||
|
func isPrimitive(mtype reflect.Type) bool {
|
||||||
|
switch mtype.Kind() {
|
||||||
|
case reflect.Ptr:
|
||||||
|
return isPrimitive(mtype.Elem())
|
||||||
|
case reflect.Bool:
|
||||||
|
return true
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
return true
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||||
|
return true
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
return true
|
||||||
|
case reflect.String:
|
||||||
|
return true
|
||||||
|
case reflect.Struct:
|
||||||
|
return mtype == timeType || isCustomMarshaler(mtype)
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the given marshal type maps to a Tree slice
|
||||||
|
func isTreeSlice(mtype reflect.Type) bool {
|
||||||
|
switch mtype.Kind() {
|
||||||
|
case reflect.Slice:
|
||||||
|
return !isOtherSlice(mtype)
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the given marshal type maps to a non-Tree slice
|
||||||
|
func isOtherSlice(mtype reflect.Type) bool {
|
||||||
|
switch mtype.Kind() {
|
||||||
|
case reflect.Ptr:
|
||||||
|
return isOtherSlice(mtype.Elem())
|
||||||
|
case reflect.Slice:
|
||||||
|
return isPrimitive(mtype.Elem()) || isOtherSlice(mtype.Elem())
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the given marshal type maps to a Tree
|
||||||
|
func isTree(mtype reflect.Type) bool {
|
||||||
|
switch mtype.Kind() {
|
||||||
|
case reflect.Map:
|
||||||
|
return true
|
||||||
|
case reflect.Struct:
|
||||||
|
return !isPrimitive(mtype)
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isCustomMarshaler(mtype reflect.Type) bool {
|
||||||
|
return mtype.Implements(marshalerType)
|
||||||
|
}
|
||||||
|
|
||||||
|
func callCustomMarshaler(mval reflect.Value) ([]byte, error) {
|
||||||
|
return mval.Interface().(Marshaler).MarshalTOML()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Marshaler is the interface implemented by types that
|
||||||
|
// can marshal themselves into valid TOML.
|
||||||
|
type Marshaler interface {
|
||||||
|
MarshalTOML() ([]byte, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Marshal returns the TOML encoding of v. Behavior is similar to the Go json
|
||||||
|
encoder, except that there is no concept of a Marshaler interface or MarshalTOML
|
||||||
|
function for sub-structs, and currently only definite types can be marshaled
|
||||||
|
(i.e. no `interface{}`).
|
||||||
|
|
||||||
|
The following struct annotations are supported:
|
||||||
|
|
||||||
|
toml:"Field" Overrides the field's name to output.
|
||||||
|
omitempty When set, empty values and groups are not emitted.
|
||||||
|
comment:"comment" Emits a # comment on the same line. This supports new lines.
|
||||||
|
commented:"true" Emits the value as commented.
|
||||||
|
|
||||||
|
Note that pointers are automatically assigned the "omitempty" option, as TOML
|
||||||
|
explicitly does not handle null values (saying instead the label should be
|
||||||
|
dropped).
|
||||||
|
|
||||||
|
Tree structural types and corresponding marshal types:
|
||||||
|
|
||||||
|
*Tree (*)struct, (*)map[string]interface{}
|
||||||
|
[]*Tree (*)[](*)struct, (*)[](*)map[string]interface{}
|
||||||
|
[]interface{} (as interface{}) (*)[]primitive, (*)[]([]interface{})
|
||||||
|
interface{} (*)primitive
|
||||||
|
|
||||||
|
Tree primitive types and corresponding marshal types:
|
||||||
|
|
||||||
|
uint64 uint, uint8-uint64, pointers to same
|
||||||
|
int64 int, int8-uint64, pointers to same
|
||||||
|
float64 float32, float64, pointers to same
|
||||||
|
string string, pointers to same
|
||||||
|
bool bool, pointers to same
|
||||||
|
time.Time time.Time{}, pointers to same
|
||||||
|
|
||||||
|
For additional flexibility, use the Encoder API.
|
||||||
|
*/
|
||||||
|
func Marshal(v interface{}) ([]byte, error) {
|
||||||
|
return NewEncoder(nil).marshal(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encoder writes TOML values to an output stream.
|
||||||
|
type Encoder struct {
|
||||||
|
w io.Writer
|
||||||
|
encOpts
|
||||||
|
annotation
|
||||||
|
line int
|
||||||
|
col int
|
||||||
|
order marshalOrder
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEncoder returns a new encoder that writes to w.
|
||||||
|
func NewEncoder(w io.Writer) *Encoder {
|
||||||
|
return &Encoder{
|
||||||
|
w: w,
|
||||||
|
encOpts: encOptsDefaults,
|
||||||
|
annotation: annotationDefault,
|
||||||
|
line: 0,
|
||||||
|
col: 1,
|
||||||
|
order: OrderAlphabetical,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode writes the TOML encoding of v to the stream.
|
||||||
|
//
|
||||||
|
// See the documentation for Marshal for details.
|
||||||
|
func (e *Encoder) Encode(v interface{}) error {
|
||||||
|
b, err := e.marshal(v)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := e.w.Write(b); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// QuoteMapKeys sets up the encoder to encode
|
||||||
|
// maps with string type keys with quoted TOML keys.
|
||||||
|
//
|
||||||
|
// This relieves the character limitations on map keys.
|
||||||
|
func (e *Encoder) QuoteMapKeys(v bool) *Encoder {
|
||||||
|
e.quoteMapKeys = v
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
// ArraysWithOneElementPerLine sets up the encoder to encode arrays
|
||||||
|
// with more than one element on multiple lines instead of one.
|
||||||
|
//
|
||||||
|
// For example:
|
||||||
|
//
|
||||||
|
// A = [1,2,3]
|
||||||
|
//
|
||||||
|
// Becomes
|
||||||
|
//
|
||||||
|
// A = [
|
||||||
|
// 1,
|
||||||
|
// 2,
|
||||||
|
// 3,
|
||||||
|
// ]
|
||||||
|
func (e *Encoder) ArraysWithOneElementPerLine(v bool) *Encoder {
|
||||||
|
e.arraysOneElementPerLine = v
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order allows to change in which order fields will be written to the output stream.
|
||||||
|
func (e *Encoder) Order(ord marshalOrder) *Encoder {
|
||||||
|
e.order = ord
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTagName allows changing default tag "toml"
|
||||||
|
func (e *Encoder) SetTagName(v string) *Encoder {
|
||||||
|
e.tag = v
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTagComment allows changing default tag "comment"
|
||||||
|
func (e *Encoder) SetTagComment(v string) *Encoder {
|
||||||
|
e.comment = v
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTagCommented allows changing default tag "commented"
|
||||||
|
func (e *Encoder) SetTagCommented(v string) *Encoder {
|
||||||
|
e.commented = v
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTagMultiline allows changing default tag "multiline"
|
||||||
|
func (e *Encoder) SetTagMultiline(v string) *Encoder {
|
||||||
|
e.multiline = v
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Encoder) marshal(v interface{}) ([]byte, error) {
|
||||||
|
mtype := reflect.TypeOf(v)
|
||||||
|
|
||||||
|
switch mtype.Kind() {
|
||||||
|
case reflect.Struct, reflect.Map:
|
||||||
|
case reflect.Ptr:
|
||||||
|
if mtype.Elem().Kind() != reflect.Struct {
|
||||||
|
return []byte{}, errors.New("Only pointer to struct can be marshaled to TOML")
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return []byte{}, errors.New("Only a struct or map can be marshaled to TOML")
|
||||||
|
}
|
||||||
|
|
||||||
|
sval := reflect.ValueOf(v)
|
||||||
|
if isCustomMarshaler(mtype) {
|
||||||
|
return callCustomMarshaler(sval)
|
||||||
|
}
|
||||||
|
t, err := e.valueToTree(mtype, sval)
|
||||||
|
if err != nil {
|
||||||
|
return []byte{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
_, err = t.writeToOrdered(&buf, "", "", 0, e.arraysOneElementPerLine, e.order)
|
||||||
|
|
||||||
|
return buf.Bytes(), err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create next tree with a position based on Encoder.line
|
||||||
|
func (e *Encoder) nextTree() *Tree {
|
||||||
|
return newTreeWithPosition(Position{Line: e.line, Col: 1})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert given marshal struct or map value to toml tree
|
||||||
|
func (e *Encoder) valueToTree(mtype reflect.Type, mval reflect.Value) (*Tree, error) {
|
||||||
|
if mtype.Kind() == reflect.Ptr {
|
||||||
|
return e.valueToTree(mtype.Elem(), mval.Elem())
|
||||||
|
}
|
||||||
|
tval := e.nextTree()
|
||||||
|
switch mtype.Kind() {
|
||||||
|
case reflect.Struct:
|
||||||
|
for i := 0; i < mtype.NumField(); i++ {
|
||||||
|
mtypef, mvalf := mtype.Field(i), mval.Field(i)
|
||||||
|
opts := tomlOptions(mtypef, e.annotation)
|
||||||
|
if opts.include && (!opts.omitempty || !isZero(mvalf)) {
|
||||||
|
val, err := e.valueToToml(mtypef.Type, mvalf)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
tval.SetWithOptions(opts.name, SetOptions{
|
||||||
|
Comment: opts.comment,
|
||||||
|
Commented: opts.commented,
|
||||||
|
Multiline: opts.multiline,
|
||||||
|
}, val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Map:
|
||||||
|
keys := mval.MapKeys()
|
||||||
|
if e.order == OrderPreserve && len(keys) > 0 {
|
||||||
|
// Sorting []reflect.Value is not straight forward.
|
||||||
|
//
|
||||||
|
// OrderPreserve will support deterministic results when string is used
|
||||||
|
// as the key to maps.
|
||||||
|
typ := keys[0].Type()
|
||||||
|
kind := keys[0].Kind()
|
||||||
|
if kind == reflect.String {
|
||||||
|
ikeys := make([]string, len(keys))
|
||||||
|
for i := range keys {
|
||||||
|
ikeys[i] = keys[i].Interface().(string)
|
||||||
|
}
|
||||||
|
sort.Strings(ikeys)
|
||||||
|
for i := range ikeys {
|
||||||
|
keys[i] = reflect.ValueOf(ikeys[i]).Convert(typ)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, key := range keys {
|
||||||
|
mvalf := mval.MapIndex(key)
|
||||||
|
val, err := e.valueToToml(mtype.Elem(), mvalf)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if e.quoteMapKeys {
|
||||||
|
keyStr, err := tomlValueStringRepresentation(key.String(), "", e.arraysOneElementPerLine)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
tval.SetPath([]string{keyStr}, val)
|
||||||
|
} else {
|
||||||
|
tval.Set(key.String(), val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tval, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert given marshal slice to slice of Toml trees
|
||||||
|
func (e *Encoder) valueToTreeSlice(mtype reflect.Type, mval reflect.Value) ([]*Tree, error) {
|
||||||
|
tval := make([]*Tree, mval.Len(), mval.Len())
|
||||||
|
for i := 0; i < mval.Len(); i++ {
|
||||||
|
val, err := e.valueToTree(mtype.Elem(), mval.Index(i))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
tval[i] = val
|
||||||
|
}
|
||||||
|
return tval, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert given marshal slice to slice of toml values
|
||||||
|
func (e *Encoder) valueToOtherSlice(mtype reflect.Type, mval reflect.Value) (interface{}, error) {
|
||||||
|
tval := make([]interface{}, mval.Len(), mval.Len())
|
||||||
|
for i := 0; i < mval.Len(); i++ {
|
||||||
|
val, err := e.valueToToml(mtype.Elem(), mval.Index(i))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
tval[i] = val
|
||||||
|
}
|
||||||
|
return tval, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert given marshal value to toml value
|
||||||
|
func (e *Encoder) valueToToml(mtype reflect.Type, mval reflect.Value) (interface{}, error) {
|
||||||
|
e.line++
|
||||||
|
if mtype.Kind() == reflect.Ptr {
|
||||||
|
return e.valueToToml(mtype.Elem(), mval.Elem())
|
||||||
|
}
|
||||||
|
switch {
|
||||||
|
case isCustomMarshaler(mtype):
|
||||||
|
return callCustomMarshaler(mval)
|
||||||
|
case isTree(mtype):
|
||||||
|
return e.valueToTree(mtype, mval)
|
||||||
|
case isTreeSlice(mtype):
|
||||||
|
return e.valueToTreeSlice(mtype, mval)
|
||||||
|
case isOtherSlice(mtype):
|
||||||
|
return e.valueToOtherSlice(mtype, mval)
|
||||||
|
default:
|
||||||
|
switch mtype.Kind() {
|
||||||
|
case reflect.Bool:
|
||||||
|
return mval.Bool(), nil
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
if mtype.Kind() == reflect.Int64 && mtype == reflect.TypeOf(time.Duration(1)) {
|
||||||
|
return fmt.Sprint(mval), nil
|
||||||
|
}
|
||||||
|
return mval.Int(), nil
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||||
|
return mval.Uint(), nil
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
return mval.Float(), nil
|
||||||
|
case reflect.String:
|
||||||
|
return mval.String(), nil
|
||||||
|
case reflect.Struct:
|
||||||
|
return mval.Interface().(time.Time), nil
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("Marshal can't handle %v(%v)", mtype, mtype.Kind())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unmarshal attempts to unmarshal the Tree into a Go struct pointed by v.
|
||||||
|
// Neither Unmarshaler interfaces nor UnmarshalTOML functions are supported for
|
||||||
|
// sub-structs, and only definite types can be unmarshaled.
|
||||||
|
func (t *Tree) Unmarshal(v interface{}) error {
|
||||||
|
d := Decoder{tval: t, tagName: tagFieldName}
|
||||||
|
return d.unmarshal(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Marshal returns the TOML encoding of Tree.
|
||||||
|
// See Marshal() documentation for types mapping table.
|
||||||
|
func (t *Tree) Marshal() ([]byte, error) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
_, err := t.WriteTo(&buf)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unmarshal parses the TOML-encoded data and stores the result in the value
|
||||||
|
// pointed to by v. Behavior is similar to the Go json encoder, except that there
|
||||||
|
// is no concept of an Unmarshaler interface or UnmarshalTOML function for
|
||||||
|
// sub-structs, and currently only definite types can be unmarshaled to (i.e. no
|
||||||
|
// `interface{}`).
|
||||||
|
//
|
||||||
|
// The following struct annotations are supported:
|
||||||
|
//
|
||||||
|
// toml:"Field" Overrides the field's name to map to.
|
||||||
|
// default:"foo" Provides a default value.
|
||||||
|
//
|
||||||
|
// For default values, only fields of the following types are supported:
|
||||||
|
// * string
|
||||||
|
// * bool
|
||||||
|
// * int
|
||||||
|
// * int64
|
||||||
|
// * float64
|
||||||
|
//
|
||||||
|
// See Marshal() documentation for types mapping table.
|
||||||
|
func Unmarshal(data []byte, v interface{}) error {
|
||||||
|
t, err := LoadReader(bytes.NewReader(data))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return t.Unmarshal(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decoder reads and decodes TOML values from an input stream.
|
||||||
|
type Decoder struct {
|
||||||
|
r io.Reader
|
||||||
|
tval *Tree
|
||||||
|
encOpts
|
||||||
|
tagName string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDecoder returns a new decoder that reads from r.
|
||||||
|
func NewDecoder(r io.Reader) *Decoder {
|
||||||
|
return &Decoder{
|
||||||
|
r: r,
|
||||||
|
encOpts: encOptsDefaults,
|
||||||
|
tagName: tagFieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode reads a TOML-encoded value from it's input
|
||||||
|
// and unmarshals it in the value pointed at by v.
|
||||||
|
//
|
||||||
|
// See the documentation for Marshal for details.
|
||||||
|
func (d *Decoder) Decode(v interface{}) error {
|
||||||
|
var err error
|
||||||
|
d.tval, err = LoadReader(d.r)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return d.unmarshal(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTagName allows changing default tag "toml"
|
||||||
|
func (d *Decoder) SetTagName(v string) *Decoder {
|
||||||
|
d.tagName = v
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Decoder) unmarshal(v interface{}) error {
|
||||||
|
mtype := reflect.TypeOf(v)
|
||||||
|
if mtype.Kind() != reflect.Ptr {
|
||||||
|
return errors.New("only a pointer to struct or map can be unmarshaled from TOML")
|
||||||
|
}
|
||||||
|
|
||||||
|
elem := mtype.Elem()
|
||||||
|
|
||||||
|
switch elem.Kind() {
|
||||||
|
case reflect.Struct, reflect.Map:
|
||||||
|
default:
|
||||||
|
return errors.New("only a pointer to struct or map can be unmarshaled from TOML")
|
||||||
|
}
|
||||||
|
|
||||||
|
vv := reflect.ValueOf(v).Elem()
|
||||||
|
|
||||||
|
sval, err := d.valueFromTree(elem, d.tval, &vv)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
reflect.ValueOf(v).Elem().Set(sval)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert toml tree to marshal struct or map, using marshal type. When mval1
|
||||||
|
// is non-nil, merge fields into the given value instead of allocating a new one.
|
||||||
|
func (d *Decoder) valueFromTree(mtype reflect.Type, tval *Tree, mval1 *reflect.Value) (reflect.Value, error) {
|
||||||
|
if mtype.Kind() == reflect.Ptr {
|
||||||
|
return d.unwrapPointer(mtype, tval, mval1)
|
||||||
|
}
|
||||||
|
var mval reflect.Value
|
||||||
|
switch mtype.Kind() {
|
||||||
|
case reflect.Struct:
|
||||||
|
if mval1 != nil {
|
||||||
|
mval = *mval1
|
||||||
|
} else {
|
||||||
|
mval = reflect.New(mtype).Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < mtype.NumField(); i++ {
|
||||||
|
mtypef := mtype.Field(i)
|
||||||
|
an := annotation{tag: d.tagName}
|
||||||
|
opts := tomlOptions(mtypef, an)
|
||||||
|
if opts.include {
|
||||||
|
baseKey := opts.name
|
||||||
|
keysToTry := []string{
|
||||||
|
baseKey,
|
||||||
|
strings.ToLower(baseKey),
|
||||||
|
strings.ToTitle(baseKey),
|
||||||
|
strings.ToLower(string(baseKey[0])) + baseKey[1:],
|
||||||
|
}
|
||||||
|
|
||||||
|
found := false
|
||||||
|
for _, key := range keysToTry {
|
||||||
|
exists := tval.Has(key)
|
||||||
|
if !exists {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
val := tval.Get(key)
|
||||||
|
fval := mval.Field(i)
|
||||||
|
mvalf, err := d.valueFromToml(mtypef.Type, val, &fval)
|
||||||
|
if err != nil {
|
||||||
|
return mval, formatError(err, tval.GetPosition(key))
|
||||||
|
}
|
||||||
|
mval.Field(i).Set(mvalf)
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found && opts.defaultValue != "" {
|
||||||
|
mvalf := mval.Field(i)
|
||||||
|
var val interface{}
|
||||||
|
var err error
|
||||||
|
switch mvalf.Kind() {
|
||||||
|
case reflect.Bool:
|
||||||
|
val, err = strconv.ParseBool(opts.defaultValue)
|
||||||
|
if err != nil {
|
||||||
|
return mval.Field(i), err
|
||||||
|
}
|
||||||
|
case reflect.Int:
|
||||||
|
val, err = strconv.Atoi(opts.defaultValue)
|
||||||
|
if err != nil {
|
||||||
|
return mval.Field(i), err
|
||||||
|
}
|
||||||
|
case reflect.String:
|
||||||
|
val = opts.defaultValue
|
||||||
|
case reflect.Int64:
|
||||||
|
val, err = strconv.ParseInt(opts.defaultValue, 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return mval.Field(i), err
|
||||||
|
}
|
||||||
|
case reflect.Float64:
|
||||||
|
val, err = strconv.ParseFloat(opts.defaultValue, 64)
|
||||||
|
if err != nil {
|
||||||
|
return mval.Field(i), err
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return mval.Field(i), fmt.Errorf("unsuported field type for default option")
|
||||||
|
}
|
||||||
|
mval.Field(i).Set(reflect.ValueOf(val))
|
||||||
|
}
|
||||||
|
|
||||||
|
// save the old behavior above and try to check anonymous structs
|
||||||
|
if !found && opts.defaultValue == "" && mtypef.Anonymous && mtypef.Type.Kind() == reflect.Struct {
|
||||||
|
v, err := d.valueFromTree(mtypef.Type, tval, nil)
|
||||||
|
if err != nil {
|
||||||
|
return v, err
|
||||||
|
}
|
||||||
|
mval.Field(i).Set(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Map:
|
||||||
|
mval = reflect.MakeMap(mtype)
|
||||||
|
for _, key := range tval.Keys() {
|
||||||
|
// TODO: path splits key
|
||||||
|
val := tval.GetPath([]string{key})
|
||||||
|
mvalf, err := d.valueFromToml(mtype.Elem(), val, nil)
|
||||||
|
if err != nil {
|
||||||
|
return mval, formatError(err, tval.GetPosition(key))
|
||||||
|
}
|
||||||
|
mval.SetMapIndex(reflect.ValueOf(key).Convert(mtype.Key()), mvalf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return mval, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert toml value to marshal struct/map slice, using marshal type
|
||||||
|
func (d *Decoder) valueFromTreeSlice(mtype reflect.Type, tval []*Tree) (reflect.Value, error) {
|
||||||
|
mval := reflect.MakeSlice(mtype, len(tval), len(tval))
|
||||||
|
for i := 0; i < len(tval); i++ {
|
||||||
|
val, err := d.valueFromTree(mtype.Elem(), tval[i], nil)
|
||||||
|
if err != nil {
|
||||||
|
return mval, err
|
||||||
|
}
|
||||||
|
mval.Index(i).Set(val)
|
||||||
|
}
|
||||||
|
return mval, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert toml value to marshal primitive slice, using marshal type
|
||||||
|
func (d *Decoder) valueFromOtherSlice(mtype reflect.Type, tval []interface{}) (reflect.Value, error) {
|
||||||
|
mval := reflect.MakeSlice(mtype, len(tval), len(tval))
|
||||||
|
for i := 0; i < len(tval); i++ {
|
||||||
|
val, err := d.valueFromToml(mtype.Elem(), tval[i], nil)
|
||||||
|
if err != nil {
|
||||||
|
return mval, err
|
||||||
|
}
|
||||||
|
mval.Index(i).Set(val)
|
||||||
|
}
|
||||||
|
return mval, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert toml value to marshal value, using marshal type. When mval1 is non-nil
|
||||||
|
// and the given type is a struct value, merge fields into it.
|
||||||
|
func (d *Decoder) valueFromToml(mtype reflect.Type, tval interface{}, mval1 *reflect.Value) (reflect.Value, error) {
|
||||||
|
if mtype.Kind() == reflect.Ptr {
|
||||||
|
return d.unwrapPointer(mtype, tval, mval1)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch t := tval.(type) {
|
||||||
|
case *Tree:
|
||||||
|
var mval11 *reflect.Value
|
||||||
|
if mtype.Kind() == reflect.Struct {
|
||||||
|
mval11 = mval1
|
||||||
|
}
|
||||||
|
|
||||||
|
if isTree(mtype) {
|
||||||
|
return d.valueFromTree(mtype, t, mval11)
|
||||||
|
}
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to a tree", tval, tval)
|
||||||
|
case []*Tree:
|
||||||
|
if isTreeSlice(mtype) {
|
||||||
|
return d.valueFromTreeSlice(mtype, t)
|
||||||
|
}
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to trees", tval, tval)
|
||||||
|
case []interface{}:
|
||||||
|
if isOtherSlice(mtype) {
|
||||||
|
return d.valueFromOtherSlice(mtype, t)
|
||||||
|
}
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to a slice", tval, tval)
|
||||||
|
default:
|
||||||
|
switch mtype.Kind() {
|
||||||
|
case reflect.Bool, reflect.Struct:
|
||||||
|
val := reflect.ValueOf(tval)
|
||||||
|
// if this passes for when mtype is reflect.Struct, tval is a time.Time
|
||||||
|
if !val.Type().ConvertibleTo(mtype) {
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return val.Convert(mtype), nil
|
||||||
|
case reflect.String:
|
||||||
|
val := reflect.ValueOf(tval)
|
||||||
|
// stupidly, int64 is convertible to string. So special case this.
|
||||||
|
if !val.Type().ConvertibleTo(mtype) || val.Kind() == reflect.Int64 {
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return val.Convert(mtype), nil
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
val := reflect.ValueOf(tval)
|
||||||
|
if mtype.Kind() == reflect.Int64 && mtype == reflect.TypeOf(time.Duration(1)) && val.Kind() == reflect.String {
|
||||||
|
d, err := time.ParseDuration(val.String())
|
||||||
|
if err != nil {
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v. %s", tval, tval, mtype.String(), err)
|
||||||
|
}
|
||||||
|
return reflect.ValueOf(d), nil
|
||||||
|
}
|
||||||
|
if !val.Type().ConvertibleTo(mtype) {
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String())
|
||||||
|
}
|
||||||
|
if reflect.Indirect(reflect.New(mtype)).OverflowInt(val.Convert(mtype).Int()) {
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return val.Convert(mtype), nil
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||||
|
val := reflect.ValueOf(tval)
|
||||||
|
if !val.Type().ConvertibleTo(mtype) {
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
if val.Convert(reflect.TypeOf(int(1))).Int() < 0 {
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("%v(%T) is negative so does not fit in %v", tval, tval, mtype.String())
|
||||||
|
}
|
||||||
|
if reflect.Indirect(reflect.New(mtype)).OverflowUint(uint64(val.Convert(mtype).Uint())) {
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return val.Convert(mtype), nil
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
val := reflect.ValueOf(tval)
|
||||||
|
if !val.Type().ConvertibleTo(mtype) {
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String())
|
||||||
|
}
|
||||||
|
if reflect.Indirect(reflect.New(mtype)).OverflowFloat(val.Convert(mtype).Float()) {
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return val.Convert(mtype), nil
|
||||||
|
default:
|
||||||
|
return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v(%v)", tval, tval, mtype, mtype.Kind())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Decoder) unwrapPointer(mtype reflect.Type, tval interface{}, mval1 *reflect.Value) (reflect.Value, error) {
|
||||||
|
var melem *reflect.Value
|
||||||
|
|
||||||
|
if mval1 != nil && !mval1.IsNil() && mtype.Elem().Kind() == reflect.Struct {
|
||||||
|
elem := mval1.Elem()
|
||||||
|
melem = &elem
|
||||||
|
}
|
||||||
|
|
||||||
|
val, err := d.valueFromToml(mtype.Elem(), tval, melem)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.ValueOf(nil), err
|
||||||
|
}
|
||||||
|
mval := reflect.New(mtype.Elem())
|
||||||
|
mval.Elem().Set(val)
|
||||||
|
return mval, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func tomlOptions(vf reflect.StructField, an annotation) tomlOpts {
|
||||||
|
tag := vf.Tag.Get(an.tag)
|
||||||
|
parse := strings.Split(tag, ",")
|
||||||
|
var comment string
|
||||||
|
if c := vf.Tag.Get(an.comment); c != "" {
|
||||||
|
comment = c
|
||||||
|
}
|
||||||
|
commented, _ := strconv.ParseBool(vf.Tag.Get(an.commented))
|
||||||
|
multiline, _ := strconv.ParseBool(vf.Tag.Get(an.multiline))
|
||||||
|
defaultValue := vf.Tag.Get(tagDefault)
|
||||||
|
result := tomlOpts{
|
||||||
|
name: vf.Name,
|
||||||
|
comment: comment,
|
||||||
|
commented: commented,
|
||||||
|
multiline: multiline,
|
||||||
|
include: true,
|
||||||
|
omitempty: false,
|
||||||
|
defaultValue: defaultValue,
|
||||||
|
}
|
||||||
|
if parse[0] != "" {
|
||||||
|
if parse[0] == "-" && len(parse) == 1 {
|
||||||
|
result.include = false
|
||||||
|
} else {
|
||||||
|
result.name = strings.Trim(parse[0], " ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if vf.PkgPath != "" {
|
||||||
|
result.include = false
|
||||||
|
}
|
||||||
|
if len(parse) > 1 && strings.Trim(parse[1], " ") == "omitempty" {
|
||||||
|
result.omitempty = true
|
||||||
|
}
|
||||||
|
if vf.Type.Kind() == reflect.Ptr {
|
||||||
|
result.omitempty = true
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func isZero(val reflect.Value) bool {
|
||||||
|
switch val.Type().Kind() {
|
||||||
|
case reflect.Map:
|
||||||
|
fallthrough
|
||||||
|
case reflect.Array:
|
||||||
|
fallthrough
|
||||||
|
case reflect.Slice:
|
||||||
|
return val.Len() == 0
|
||||||
|
default:
|
||||||
|
return reflect.DeepEqual(val.Interface(), reflect.Zero(val.Type()).Interface())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatError(err error, pos Position) error {
|
||||||
|
if err.Error()[0] == '(' { // Error already contains position information
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return fmt.Errorf("%s: %s", pos, err)
|
||||||
|
}
|
||||||
@@ -0,0 +1,39 @@
|
|||||||
|
title = "TOML Marshal Testing"
|
||||||
|
|
||||||
|
[basic_lists]
|
||||||
|
floats = [12.3,45.6,78.9]
|
||||||
|
bools = [true,false,true]
|
||||||
|
dates = [1979-05-27T07:32:00Z,1980-05-27T07:32:00Z]
|
||||||
|
ints = [8001,8001,8002]
|
||||||
|
uints = [5002,5003]
|
||||||
|
strings = ["One","Two","Three"]
|
||||||
|
|
||||||
|
[[subdocptrs]]
|
||||||
|
name = "Second"
|
||||||
|
|
||||||
|
[basic_map]
|
||||||
|
one = "one"
|
||||||
|
two = "two"
|
||||||
|
|
||||||
|
[subdoc]
|
||||||
|
|
||||||
|
[subdoc.second]
|
||||||
|
name = "Second"
|
||||||
|
|
||||||
|
[subdoc.first]
|
||||||
|
name = "First"
|
||||||
|
|
||||||
|
[basic]
|
||||||
|
uint = 5001
|
||||||
|
bool = true
|
||||||
|
float = 123.4
|
||||||
|
float64 = 123.456782132399
|
||||||
|
int = 5000
|
||||||
|
string = "Bite me"
|
||||||
|
date = 1979-05-27T07:32:00Z
|
||||||
|
|
||||||
|
[[subdoclist]]
|
||||||
|
name = "List.First"
|
||||||
|
|
||||||
|
[[subdoclist]]
|
||||||
|
name = "List.Second"
|
||||||
+1701
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,39 @@
|
|||||||
|
title = "TOML Marshal Testing"
|
||||||
|
|
||||||
|
[basic]
|
||||||
|
bool = true
|
||||||
|
date = 1979-05-27T07:32:00Z
|
||||||
|
float = 123.4
|
||||||
|
float64 = 123.456782132399
|
||||||
|
int = 5000
|
||||||
|
string = "Bite me"
|
||||||
|
uint = 5001
|
||||||
|
|
||||||
|
[basic_lists]
|
||||||
|
bools = [true,false,true]
|
||||||
|
dates = [1979-05-27T07:32:00Z,1980-05-27T07:32:00Z]
|
||||||
|
floats = [12.3,45.6,78.9]
|
||||||
|
ints = [8001,8001,8002]
|
||||||
|
strings = ["One","Two","Three"]
|
||||||
|
uints = [5002,5003]
|
||||||
|
|
||||||
|
[basic_map]
|
||||||
|
one = "one"
|
||||||
|
two = "two"
|
||||||
|
|
||||||
|
[subdoc]
|
||||||
|
|
||||||
|
[subdoc.first]
|
||||||
|
name = "First"
|
||||||
|
|
||||||
|
[subdoc.second]
|
||||||
|
name = "Second"
|
||||||
|
|
||||||
|
[[subdoclist]]
|
||||||
|
name = "List.First"
|
||||||
|
|
||||||
|
[[subdoclist]]
|
||||||
|
name = "List.Second"
|
||||||
|
|
||||||
|
[[subdocptrs]]
|
||||||
|
name = "Second"
|
||||||
@@ -3,19 +3,22 @@
|
|||||||
package toml
|
package toml
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
type tomlParser struct {
|
type tomlParser struct {
|
||||||
flow chan token
|
flowIdx int
|
||||||
tree *TomlTree
|
flow []token
|
||||||
tokensBuffer []token
|
tree *Tree
|
||||||
currentGroup []string
|
currentTable []string
|
||||||
seenGroupKeys []string
|
seenTableKeys []string
|
||||||
}
|
}
|
||||||
|
|
||||||
type tomlParserStateFn func() tomlParserStateFn
|
type tomlParserStateFn func() tomlParserStateFn
|
||||||
@@ -32,16 +35,10 @@ func (p *tomlParser) run() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p *tomlParser) peek() *token {
|
func (p *tomlParser) peek() *token {
|
||||||
if len(p.tokensBuffer) != 0 {
|
if p.flowIdx >= len(p.flow) {
|
||||||
return &(p.tokensBuffer[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
tok, ok := <-p.flow
|
|
||||||
if !ok {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
p.tokensBuffer = append(p.tokensBuffer, tok)
|
return &p.flow[p.flowIdx]
|
||||||
return &tok
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *tomlParser) assume(typ tokenType) {
|
func (p *tomlParser) assume(typ tokenType) {
|
||||||
@@ -55,16 +52,12 @@ func (p *tomlParser) assume(typ tokenType) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p *tomlParser) getToken() *token {
|
func (p *tomlParser) getToken() *token {
|
||||||
if len(p.tokensBuffer) != 0 {
|
tok := p.peek()
|
||||||
tok := p.tokensBuffer[0]
|
if tok == nil {
|
||||||
p.tokensBuffer = p.tokensBuffer[1:]
|
|
||||||
return &tok
|
|
||||||
}
|
|
||||||
tok, ok := <-p.flow
|
|
||||||
if !ok {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return &tok
|
p.flowIdx++
|
||||||
|
return tok
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *tomlParser) parseStart() tomlParserStateFn {
|
func (p *tomlParser) parseStart() tomlParserStateFn {
|
||||||
@@ -84,8 +77,10 @@ func (p *tomlParser) parseStart() tomlParserStateFn {
|
|||||||
return p.parseAssign
|
return p.parseAssign
|
||||||
case tokenEOF:
|
case tokenEOF:
|
||||||
return nil
|
return nil
|
||||||
|
case tokenError:
|
||||||
|
p.raiseError(tok, "parsing error: %s", tok.String())
|
||||||
default:
|
default:
|
||||||
p.raiseError(tok, "unexpected token")
|
p.raiseError(tok, "unexpected token %s", tok.typ)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -94,45 +89,48 @@ func (p *tomlParser) parseGroupArray() tomlParserStateFn {
|
|||||||
startToken := p.getToken() // discard the [[
|
startToken := p.getToken() // discard the [[
|
||||||
key := p.getToken()
|
key := p.getToken()
|
||||||
if key.typ != tokenKeyGroupArray {
|
if key.typ != tokenKeyGroupArray {
|
||||||
p.raiseError(key, "unexpected token %s, was expecting a key group array", key)
|
p.raiseError(key, "unexpected token %s, was expecting a table array key", key)
|
||||||
}
|
}
|
||||||
|
|
||||||
// get or create group array element at the indicated part in the path
|
// get or create table array element at the indicated part in the path
|
||||||
keys := strings.Split(key.val, ".")
|
keys, err := parseKey(key.val)
|
||||||
|
if err != nil {
|
||||||
|
p.raiseError(key, "invalid table array key: %s", err)
|
||||||
|
}
|
||||||
p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries
|
p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries
|
||||||
destTree := p.tree.GetPath(keys)
|
destTree := p.tree.GetPath(keys)
|
||||||
var array []*TomlTree
|
var array []*Tree
|
||||||
if destTree == nil {
|
if destTree == nil {
|
||||||
array = make([]*TomlTree, 0)
|
array = make([]*Tree, 0)
|
||||||
} else if destTree.([]*TomlTree) != nil {
|
} else if target, ok := destTree.([]*Tree); ok && target != nil {
|
||||||
array = destTree.([]*TomlTree)
|
array = destTree.([]*Tree)
|
||||||
} else {
|
} else {
|
||||||
p.raiseError(key, "key %s is already assigned and not of type group array", key)
|
p.raiseError(key, "key %s is already assigned and not of type table array", key)
|
||||||
}
|
}
|
||||||
p.currentGroup = keys
|
p.currentTable = keys
|
||||||
|
|
||||||
// add a new tree to the end of the group array
|
// add a new tree to the end of the table array
|
||||||
newTree := newTomlTree()
|
newTree := newTree()
|
||||||
newTree.position = startToken.Position
|
newTree.position = startToken.Position
|
||||||
array = append(array, newTree)
|
array = append(array, newTree)
|
||||||
p.tree.SetPath(p.currentGroup, array)
|
p.tree.SetPath(p.currentTable, array)
|
||||||
|
|
||||||
// remove all keys that were children of this group array
|
// remove all keys that were children of this table array
|
||||||
prefix := key.val + "."
|
prefix := key.val + "."
|
||||||
found := false
|
found := false
|
||||||
for ii := 0; ii < len(p.seenGroupKeys); {
|
for ii := 0; ii < len(p.seenTableKeys); {
|
||||||
groupKey := p.seenGroupKeys[ii]
|
tableKey := p.seenTableKeys[ii]
|
||||||
if strings.HasPrefix(groupKey, prefix) {
|
if strings.HasPrefix(tableKey, prefix) {
|
||||||
p.seenGroupKeys = append(p.seenGroupKeys[:ii], p.seenGroupKeys[ii+1:]...)
|
p.seenTableKeys = append(p.seenTableKeys[:ii], p.seenTableKeys[ii+1:]...)
|
||||||
} else {
|
} else {
|
||||||
found = (groupKey == key.val)
|
found = (tableKey == key.val)
|
||||||
ii++
|
ii++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// keep this key name from use by other kinds of assignments
|
// keep this key name from use by other kinds of assignments
|
||||||
if !found {
|
if !found {
|
||||||
p.seenGroupKeys = append(p.seenGroupKeys, key.val)
|
p.seenTableKeys = append(p.seenTableKeys, key.val)
|
||||||
}
|
}
|
||||||
|
|
||||||
// move to next parser state
|
// move to next parser state
|
||||||
@@ -144,58 +142,107 @@ func (p *tomlParser) parseGroup() tomlParserStateFn {
|
|||||||
startToken := p.getToken() // discard the [
|
startToken := p.getToken() // discard the [
|
||||||
key := p.getToken()
|
key := p.getToken()
|
||||||
if key.typ != tokenKeyGroup {
|
if key.typ != tokenKeyGroup {
|
||||||
p.raiseError(key, "unexpected token %s, was expecting a key group", key)
|
p.raiseError(key, "unexpected token %s, was expecting a table key", key)
|
||||||
}
|
}
|
||||||
for _, item := range p.seenGroupKeys {
|
for _, item := range p.seenTableKeys {
|
||||||
if item == key.val {
|
if item == key.val {
|
||||||
p.raiseError(key, "duplicated tables")
|
p.raiseError(key, "duplicated tables")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
p.seenGroupKeys = append(p.seenGroupKeys, key.val)
|
p.seenTableKeys = append(p.seenTableKeys, key.val)
|
||||||
keys := strings.Split(key.val, ".")
|
keys, err := parseKey(key.val)
|
||||||
|
if err != nil {
|
||||||
|
p.raiseError(key, "invalid table array key: %s", err)
|
||||||
|
}
|
||||||
if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
|
if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
|
||||||
p.raiseError(key, "%s", err)
|
p.raiseError(key, "%s", err)
|
||||||
}
|
}
|
||||||
p.assume(tokenRightBracket)
|
p.assume(tokenRightBracket)
|
||||||
p.currentGroup = keys
|
p.currentTable = keys
|
||||||
return p.parseStart
|
return p.parseStart
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *tomlParser) parseAssign() tomlParserStateFn {
|
func (p *tomlParser) parseAssign() tomlParserStateFn {
|
||||||
key := p.getToken()
|
key := p.getToken()
|
||||||
p.assume(tokenEqual)
|
p.assume(tokenEqual)
|
||||||
|
|
||||||
|
parsedKey, err := parseKey(key.val)
|
||||||
|
if err != nil {
|
||||||
|
p.raiseError(key, "invalid key: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
value := p.parseRvalue()
|
value := p.parseRvalue()
|
||||||
var groupKey []string
|
var tableKey []string
|
||||||
if len(p.currentGroup) > 0 {
|
if len(p.currentTable) > 0 {
|
||||||
groupKey = p.currentGroup
|
tableKey = p.currentTable
|
||||||
} else {
|
} else {
|
||||||
groupKey = []string{}
|
tableKey = []string{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// find the group to assign, looking out for arrays of groups
|
prefixKey := parsedKey[0 : len(parsedKey)-1]
|
||||||
var targetNode *TomlTree
|
tableKey = append(tableKey, prefixKey...)
|
||||||
switch node := p.tree.GetPath(groupKey).(type) {
|
|
||||||
case []*TomlTree:
|
// find the table to assign, looking out for arrays of tables
|
||||||
|
var targetNode *Tree
|
||||||
|
switch node := p.tree.GetPath(tableKey).(type) {
|
||||||
|
case []*Tree:
|
||||||
targetNode = node[len(node)-1]
|
targetNode = node[len(node)-1]
|
||||||
case *TomlTree:
|
case *Tree:
|
||||||
targetNode = node
|
targetNode = node
|
||||||
|
case nil:
|
||||||
|
// create intermediate
|
||||||
|
if err := p.tree.createSubTree(tableKey, key.Position); err != nil {
|
||||||
|
p.raiseError(key, "could not create intermediate group: %s", err)
|
||||||
|
}
|
||||||
|
targetNode = p.tree.GetPath(tableKey).(*Tree)
|
||||||
default:
|
default:
|
||||||
p.raiseError(key, "Unknown group type for path: %s",
|
p.raiseError(key, "Unknown table type for path: %s",
|
||||||
strings.Join(groupKey, "."))
|
strings.Join(tableKey, "."))
|
||||||
}
|
}
|
||||||
|
|
||||||
// assign value to the found group
|
// assign value to the found table
|
||||||
localKey := []string{key.val}
|
keyVal := parsedKey[len(parsedKey)-1]
|
||||||
finalKey := append(groupKey, key.val)
|
localKey := []string{keyVal}
|
||||||
|
finalKey := append(tableKey, keyVal)
|
||||||
if targetNode.GetPath(localKey) != nil {
|
if targetNode.GetPath(localKey) != nil {
|
||||||
p.raiseError(key, "The following key was defined twice: %s",
|
p.raiseError(key, "The following key was defined twice: %s",
|
||||||
strings.Join(finalKey, "."))
|
strings.Join(finalKey, "."))
|
||||||
}
|
}
|
||||||
targetNode.values[key.val] = &tomlValue{value, key.Position}
|
var toInsert interface{}
|
||||||
|
|
||||||
|
switch value.(type) {
|
||||||
|
case *Tree, []*Tree:
|
||||||
|
toInsert = value
|
||||||
|
default:
|
||||||
|
toInsert = &tomlValue{value: value, position: key.Position}
|
||||||
|
}
|
||||||
|
targetNode.values[keyVal] = toInsert
|
||||||
return p.parseStart
|
return p.parseStart
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var numberUnderscoreInvalidRegexp *regexp.Regexp
|
||||||
|
var hexNumberUnderscoreInvalidRegexp *regexp.Regexp
|
||||||
|
|
||||||
|
func numberContainsInvalidUnderscore(value string) error {
|
||||||
|
if numberUnderscoreInvalidRegexp.MatchString(value) {
|
||||||
|
return errors.New("invalid use of _ in number")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func hexNumberContainsInvalidUnderscore(value string) error {
|
||||||
|
if hexNumberUnderscoreInvalidRegexp.MatchString(value) {
|
||||||
|
return errors.New("invalid use of _ in hex number")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func cleanupNumberToken(value string) string {
|
||||||
|
cleanedVal := strings.Replace(value, "_", "", -1)
|
||||||
|
return cleanedVal
|
||||||
|
}
|
||||||
|
|
||||||
func (p *tomlParser) parseRvalue() interface{} {
|
func (p *tomlParser) parseRvalue() interface{} {
|
||||||
tok := p.getToken()
|
tok := p.getToken()
|
||||||
if tok == nil || tok.typ == tokenEOF {
|
if tok == nil || tok.typ == tokenEOF {
|
||||||
@@ -209,26 +256,78 @@ func (p *tomlParser) parseRvalue() interface{} {
|
|||||||
return true
|
return true
|
||||||
case tokenFalse:
|
case tokenFalse:
|
||||||
return false
|
return false
|
||||||
|
case tokenInf:
|
||||||
|
if tok.val[0] == '-' {
|
||||||
|
return math.Inf(-1)
|
||||||
|
}
|
||||||
|
return math.Inf(1)
|
||||||
|
case tokenNan:
|
||||||
|
return math.NaN()
|
||||||
case tokenInteger:
|
case tokenInteger:
|
||||||
val, err := strconv.ParseInt(tok.val, 10, 64)
|
cleanedVal := cleanupNumberToken(tok.val)
|
||||||
|
var err error
|
||||||
|
var val int64
|
||||||
|
if len(cleanedVal) >= 3 && cleanedVal[0] == '0' {
|
||||||
|
switch cleanedVal[1] {
|
||||||
|
case 'x':
|
||||||
|
err = hexNumberContainsInvalidUnderscore(tok.val)
|
||||||
|
if err != nil {
|
||||||
|
p.raiseError(tok, "%s", err)
|
||||||
|
}
|
||||||
|
val, err = strconv.ParseInt(cleanedVal[2:], 16, 64)
|
||||||
|
case 'o':
|
||||||
|
err = numberContainsInvalidUnderscore(tok.val)
|
||||||
|
if err != nil {
|
||||||
|
p.raiseError(tok, "%s", err)
|
||||||
|
}
|
||||||
|
val, err = strconv.ParseInt(cleanedVal[2:], 8, 64)
|
||||||
|
case 'b':
|
||||||
|
err = numberContainsInvalidUnderscore(tok.val)
|
||||||
|
if err != nil {
|
||||||
|
p.raiseError(tok, "%s", err)
|
||||||
|
}
|
||||||
|
val, err = strconv.ParseInt(cleanedVal[2:], 2, 64)
|
||||||
|
default:
|
||||||
|
panic("invalid base") // the lexer should catch this first
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
err = numberContainsInvalidUnderscore(tok.val)
|
||||||
|
if err != nil {
|
||||||
|
p.raiseError(tok, "%s", err)
|
||||||
|
}
|
||||||
|
val, err = strconv.ParseInt(cleanedVal, 10, 64)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.raiseError(tok, "%s", err)
|
p.raiseError(tok, "%s", err)
|
||||||
}
|
}
|
||||||
return val
|
return val
|
||||||
case tokenFloat:
|
case tokenFloat:
|
||||||
val, err := strconv.ParseFloat(tok.val, 64)
|
err := numberContainsInvalidUnderscore(tok.val)
|
||||||
|
if err != nil {
|
||||||
|
p.raiseError(tok, "%s", err)
|
||||||
|
}
|
||||||
|
cleanedVal := cleanupNumberToken(tok.val)
|
||||||
|
val, err := strconv.ParseFloat(cleanedVal, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.raiseError(tok, "%s", err)
|
p.raiseError(tok, "%s", err)
|
||||||
}
|
}
|
||||||
return val
|
return val
|
||||||
case tokenDate:
|
case tokenDate:
|
||||||
val, err := time.Parse(time.RFC3339, tok.val)
|
layout := time.RFC3339Nano
|
||||||
|
if !strings.Contains(tok.val, "T") {
|
||||||
|
layout = strings.Replace(layout, "T", " ", 1)
|
||||||
|
}
|
||||||
|
val, err := time.ParseInLocation(layout, tok.val, time.UTC)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.raiseError(tok, "%s", err)
|
p.raiseError(tok, "%s", err)
|
||||||
}
|
}
|
||||||
return val
|
return val
|
||||||
case tokenLeftBracket:
|
case tokenLeftBracket:
|
||||||
return p.parseArray()
|
return p.parseArray()
|
||||||
|
case tokenLeftCurlyBrace:
|
||||||
|
return p.parseInlineTable()
|
||||||
|
case tokenEqual:
|
||||||
|
p.raiseError(tok, "cannot have multiple equals for the same key")
|
||||||
case tokenError:
|
case tokenError:
|
||||||
p.raiseError(tok, "%s", tok)
|
p.raiseError(tok, "%s", tok)
|
||||||
}
|
}
|
||||||
@@ -238,7 +337,51 @@ func (p *tomlParser) parseRvalue() interface{} {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *tomlParser) parseArray() []interface{} {
|
func tokenIsComma(t *token) bool {
|
||||||
|
return t != nil && t.typ == tokenComma
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *tomlParser) parseInlineTable() *Tree {
|
||||||
|
tree := newTree()
|
||||||
|
var previous *token
|
||||||
|
Loop:
|
||||||
|
for {
|
||||||
|
follow := p.peek()
|
||||||
|
if follow == nil || follow.typ == tokenEOF {
|
||||||
|
p.raiseError(follow, "unterminated inline table")
|
||||||
|
}
|
||||||
|
switch follow.typ {
|
||||||
|
case tokenRightCurlyBrace:
|
||||||
|
p.getToken()
|
||||||
|
break Loop
|
||||||
|
case tokenKey, tokenInteger, tokenString:
|
||||||
|
if !tokenIsComma(previous) && previous != nil {
|
||||||
|
p.raiseError(follow, "comma expected between fields in inline table")
|
||||||
|
}
|
||||||
|
key := p.getToken()
|
||||||
|
p.assume(tokenEqual)
|
||||||
|
value := p.parseRvalue()
|
||||||
|
tree.Set(key.val, value)
|
||||||
|
case tokenComma:
|
||||||
|
if previous == nil {
|
||||||
|
p.raiseError(follow, "inline table cannot start with a comma")
|
||||||
|
}
|
||||||
|
if tokenIsComma(previous) {
|
||||||
|
p.raiseError(follow, "need field between two commas in inline table")
|
||||||
|
}
|
||||||
|
p.getToken()
|
||||||
|
default:
|
||||||
|
p.raiseError(follow, "unexpected token type in inline table: %s", follow.String())
|
||||||
|
}
|
||||||
|
previous = follow
|
||||||
|
}
|
||||||
|
if tokenIsComma(previous) {
|
||||||
|
p.raiseError(previous, "trailing comma at the end of inline table")
|
||||||
|
}
|
||||||
|
return tree
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *tomlParser) parseArray() interface{} {
|
||||||
var array []interface{}
|
var array []interface{}
|
||||||
arrayType := reflect.TypeOf(nil)
|
arrayType := reflect.TypeOf(nil)
|
||||||
for {
|
for {
|
||||||
@@ -248,7 +391,7 @@ func (p *tomlParser) parseArray() []interface{} {
|
|||||||
}
|
}
|
||||||
if follow.typ == tokenRightBracket {
|
if follow.typ == tokenRightBracket {
|
||||||
p.getToken()
|
p.getToken()
|
||||||
return array
|
break
|
||||||
}
|
}
|
||||||
val := p.parseRvalue()
|
val := p.parseRvalue()
|
||||||
if arrayType == nil {
|
if arrayType == nil {
|
||||||
@@ -259,7 +402,7 @@ func (p *tomlParser) parseArray() []interface{} {
|
|||||||
}
|
}
|
||||||
array = append(array, val)
|
array = append(array, val)
|
||||||
follow = p.peek()
|
follow = p.peek()
|
||||||
if follow == nil {
|
if follow == nil || follow.typ == tokenEOF {
|
||||||
p.raiseError(follow, "unterminated array")
|
p.raiseError(follow, "unterminated array")
|
||||||
}
|
}
|
||||||
if follow.typ != tokenRightBracket && follow.typ != tokenComma {
|
if follow.typ != tokenRightBracket && follow.typ != tokenComma {
|
||||||
@@ -269,19 +412,35 @@ func (p *tomlParser) parseArray() []interface{} {
|
|||||||
p.getToken()
|
p.getToken()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// An array of Trees is actually an array of inline
|
||||||
|
// tables, which is a shorthand for a table array. If the
|
||||||
|
// array was not converted from []interface{} to []*Tree,
|
||||||
|
// the two notations would not be equivalent.
|
||||||
|
if arrayType == reflect.TypeOf(newTree()) {
|
||||||
|
tomlArray := make([]*Tree, len(array))
|
||||||
|
for i, v := range array {
|
||||||
|
tomlArray[i] = v.(*Tree)
|
||||||
|
}
|
||||||
|
return tomlArray
|
||||||
|
}
|
||||||
return array
|
return array
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseToml(flow chan token) *TomlTree {
|
func parseToml(flow []token) *Tree {
|
||||||
result := newTomlTree()
|
result := newTree()
|
||||||
result.position = Position{1, 1}
|
result.position = Position{1, 1}
|
||||||
parser := &tomlParser{
|
parser := &tomlParser{
|
||||||
|
flowIdx: 0,
|
||||||
flow: flow,
|
flow: flow,
|
||||||
tree: result,
|
tree: result,
|
||||||
tokensBuffer: make([]token, 0),
|
currentTable: make([]string, 0),
|
||||||
currentGroup: make([]string, 0),
|
seenTableKeys: make([]string, 0),
|
||||||
seenGroupKeys: make([]string, 0),
|
|
||||||
}
|
}
|
||||||
parser.run()
|
parser.run()
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
numberUnderscoreInvalidRegexp = regexp.MustCompile(`([^\d]_|_[^\d])|_$|^_`)
|
||||||
|
hexNumberUnderscoreInvalidRegexp = regexp.MustCompile(`(^0x_)|([^\da-f]_|_[^\da-f])|_$|^_`)
|
||||||
|
}
|
||||||
|
|||||||
+555
-47
@@ -2,26 +2,35 @@ package toml
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/davecgh/go-spew/spew"
|
||||||
)
|
)
|
||||||
|
|
||||||
func assertTree(t *testing.T, tree *TomlTree, err error, ref map[string]interface{}) {
|
func assertSubTree(t *testing.T, path []string, tree *Tree, err error, ref map[string]interface{}) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error("Non-nil error:", err.Error())
|
t.Error("Non-nil error:", err.Error())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
for k, v := range ref {
|
for k, v := range ref {
|
||||||
|
nextPath := append(path, k)
|
||||||
|
t.Log("asserting path", nextPath)
|
||||||
// NOTE: directly access key instead of resolve by path
|
// NOTE: directly access key instead of resolve by path
|
||||||
// NOTE: see TestSpecialKV
|
// NOTE: see TestSpecialKV
|
||||||
switch node := tree.GetPath([]string{k}).(type) {
|
switch node := tree.GetPath([]string{k}).(type) {
|
||||||
case []*TomlTree:
|
case []*Tree:
|
||||||
|
t.Log("\tcomparing key", nextPath, "by array iteration")
|
||||||
for idx, item := range node {
|
for idx, item := range node {
|
||||||
assertTree(t, item, err, v.([]map[string]interface{})[idx])
|
assertSubTree(t, nextPath, item, err, v.([]map[string]interface{})[idx])
|
||||||
}
|
}
|
||||||
case *TomlTree:
|
case *Tree:
|
||||||
assertTree(t, node, err, v.(map[string]interface{}))
|
t.Log("\tcomparing key", nextPath, "by subtree assestion")
|
||||||
|
assertSubTree(t, nextPath, node, err, v.(map[string]interface{}))
|
||||||
default:
|
default:
|
||||||
|
t.Log("\tcomparing key", nextPath, "by string representation because it's of type", reflect.TypeOf(node))
|
||||||
if fmt.Sprintf("%v", node) != fmt.Sprintf("%v", v) {
|
if fmt.Sprintf("%v", node) != fmt.Sprintf("%v", v) {
|
||||||
t.Errorf("was expecting %v at %v but got %v", v, k, node)
|
t.Errorf("was expecting %v at %v but got %v", v, k, node)
|
||||||
}
|
}
|
||||||
@@ -29,8 +38,14 @@ func assertTree(t *testing.T, tree *TomlTree, err error, ref map[string]interfac
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func assertTree(t *testing.T, tree *Tree, err error, ref map[string]interface{}) {
|
||||||
|
t.Log("Asserting tree:\n", spew.Sdump(tree))
|
||||||
|
assertSubTree(t, []string{}, tree, err, ref)
|
||||||
|
t.Log("Finished tree assertion.")
|
||||||
|
}
|
||||||
|
|
||||||
func TestCreateSubTree(t *testing.T) {
|
func TestCreateSubTree(t *testing.T) {
|
||||||
tree := newTomlTree()
|
tree := newTree()
|
||||||
tree.createSubTree([]string{"a", "b", "c"}, Position{})
|
tree.createSubTree([]string{"a", "b", "c"}, Position{})
|
||||||
tree.Set("a.b.c", 42)
|
tree.Set("a.b.c", 42)
|
||||||
if tree.Get("a.b.c") != 42 {
|
if tree.Get("a.b.c") != 42 {
|
||||||
@@ -51,15 +66,24 @@ func TestSimpleKV(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: from the BurntSushi test suite
|
func TestNumberInKey(t *testing.T) {
|
||||||
// NOTE: this test is pure evil due to the embedded '.'
|
tree, err := Load("hello2 = 42")
|
||||||
func TestSpecialKV(t *testing.T) {
|
|
||||||
tree, err := Load("~!@#$^&*()_+-`1234567890[]\\|/?><.,;: = 1")
|
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
"~!@#$^&*()_+-`1234567890[]\\|/?><.,;:": int64(1),
|
"hello2": int64(42),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestIncorrectKeyExtraSquareBracket(t *testing.T) {
|
||||||
|
_, err := Load(`[a]b]
|
||||||
|
zyx = 42`)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Error should have been returned.")
|
||||||
|
}
|
||||||
|
if err.Error() != "(1, 4): parsing error: keys cannot contain ] character" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestSimpleNumbers(t *testing.T) {
|
func TestSimpleNumbers(t *testing.T) {
|
||||||
tree, err := Load("a = +42\nb = -21\nc = +4.2\nd = -2.1")
|
tree, err := Load("a = +42\nb = -21\nc = +4.2\nd = -2.1")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
@@ -70,6 +94,116 @@ func TestSimpleNumbers(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSpecialFloats(t *testing.T) {
|
||||||
|
tree, err := Load(`
|
||||||
|
normalinf = inf
|
||||||
|
plusinf = +inf
|
||||||
|
minusinf = -inf
|
||||||
|
normalnan = nan
|
||||||
|
plusnan = +nan
|
||||||
|
minusnan = -nan
|
||||||
|
`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"normalinf": math.Inf(1),
|
||||||
|
"plusinf": math.Inf(1),
|
||||||
|
"minusinf": math.Inf(-1),
|
||||||
|
"normalnan": math.NaN(),
|
||||||
|
"plusnan": math.NaN(),
|
||||||
|
"minusnan": math.NaN(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHexIntegers(t *testing.T) {
|
||||||
|
tree, err := Load(`a = 0xDEADBEEF`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{"a": int64(3735928559)})
|
||||||
|
|
||||||
|
tree, err = Load(`a = 0xdeadbeef`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{"a": int64(3735928559)})
|
||||||
|
|
||||||
|
tree, err = Load(`a = 0xdead_beef`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{"a": int64(3735928559)})
|
||||||
|
|
||||||
|
_, err = Load(`a = 0x_1`)
|
||||||
|
if err.Error() != "(1, 5): invalid use of _ in hex number" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOctIntegers(t *testing.T) {
|
||||||
|
tree, err := Load(`a = 0o01234567`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{"a": int64(342391)})
|
||||||
|
|
||||||
|
tree, err = Load(`a = 0o755`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{"a": int64(493)})
|
||||||
|
|
||||||
|
_, err = Load(`a = 0o_1`)
|
||||||
|
if err.Error() != "(1, 5): invalid use of _ in number" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBinIntegers(t *testing.T) {
|
||||||
|
tree, err := Load(`a = 0b11010110`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{"a": int64(214)})
|
||||||
|
|
||||||
|
_, err = Load(`a = 0b_1`)
|
||||||
|
if err.Error() != "(1, 5): invalid use of _ in number" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBadIntegerBase(t *testing.T) {
|
||||||
|
_, err := Load(`a = 0k1`)
|
||||||
|
if err.Error() != "(1, 5): unknown number base: k. possible options are x (hex) o (octal) b (binary)" {
|
||||||
|
t.Error("Error should have been returned.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIntegerNoDigit(t *testing.T) {
|
||||||
|
_, err := Load(`a = 0b`)
|
||||||
|
if err.Error() != "(1, 5): number needs at least one digit" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNumbersWithUnderscores(t *testing.T) {
|
||||||
|
tree, err := Load("a = 1_000")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a": int64(1000),
|
||||||
|
})
|
||||||
|
|
||||||
|
tree, err = Load("a = 5_349_221")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a": int64(5349221),
|
||||||
|
})
|
||||||
|
|
||||||
|
tree, err = Load("a = 1_2_3_4_5")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a": int64(12345),
|
||||||
|
})
|
||||||
|
|
||||||
|
tree, err = Load("flt8 = 9_224_617.445_991_228_313")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"flt8": float64(9224617.445991228313),
|
||||||
|
})
|
||||||
|
|
||||||
|
tree, err = Load("flt9 = 1e1_00")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"flt9": float64(1e100),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFloatsWithExponents(t *testing.T) {
|
||||||
|
tree, err := Load("a = 5e+22\nb = 5E+22\nc = -5e+22\nd = -5e-22\ne = 6.626e-34")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a": float64(5e+22),
|
||||||
|
"b": float64(5E+22),
|
||||||
|
"c": float64(-5e+22),
|
||||||
|
"d": float64(-5e-22),
|
||||||
|
"e": float64(6.626e-34),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestSimpleDate(t *testing.T) {
|
func TestSimpleDate(t *testing.T) {
|
||||||
tree, err := Load("a = 1979-05-27T07:32:00Z")
|
tree, err := Load("a = 1979-05-27T07:32:00Z")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
@@ -77,6 +211,27 @@ func TestSimpleDate(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDateOffset(t *testing.T) {
|
||||||
|
tree, err := Load("a = 1979-05-27T00:32:00-07:00")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a": time.Date(1979, time.May, 27, 0, 32, 0, 0, time.FixedZone("", -7*60*60)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateNano(t *testing.T) {
|
||||||
|
tree, err := Load("a = 1979-05-27T00:32:00.999999999-07:00")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a": time.Date(1979, time.May, 27, 0, 32, 0, 999999999, time.FixedZone("", -7*60*60)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateSpaceDelimiter(t *testing.T) {
|
||||||
|
tree, err := Load("odt4 = 1979-05-27 07:32:00Z")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"odt4": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestSimpleString(t *testing.T) {
|
func TestSimpleString(t *testing.T) {
|
||||||
tree, err := Load("a = \"hello world\"")
|
tree, err := Load("a = \"hello world\"")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
@@ -84,6 +239,43 @@ func TestSimpleString(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSpaceKey(t *testing.T) {
|
||||||
|
tree, err := Load("\"a b\" = \"hello world\"")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a b": "hello world",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDoubleQuotedKey(t *testing.T) {
|
||||||
|
tree, err := Load(`
|
||||||
|
"key" = "a"
|
||||||
|
"\t" = "b"
|
||||||
|
"\U0001F914" = "c"
|
||||||
|
"\u2764" = "d"
|
||||||
|
`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"key": "a",
|
||||||
|
"\t": "b",
|
||||||
|
"\U0001F914": "c",
|
||||||
|
"\u2764": "d",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSingleQuotedKey(t *testing.T) {
|
||||||
|
tree, err := Load(`
|
||||||
|
'key' = "a"
|
||||||
|
'\t' = "b"
|
||||||
|
'\U0001F914' = "c"
|
||||||
|
'\u2764' = "d"
|
||||||
|
`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
`key`: "a",
|
||||||
|
`\t`: "b",
|
||||||
|
`\U0001F914`: "c",
|
||||||
|
`\u2764`: "d",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestStringEscapables(t *testing.T) {
|
func TestStringEscapables(t *testing.T) {
|
||||||
tree, err := Load("a = \"a \\n b\"")
|
tree, err := Load("a = \"a \\n b\"")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
@@ -106,6 +298,16 @@ func TestStringEscapables(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestEmptyQuotedString(t *testing.T) {
|
||||||
|
tree, err := Load(`[""]
|
||||||
|
"" = 1`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"": map[string]interface{}{
|
||||||
|
"": int64(1),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestBools(t *testing.T) {
|
func TestBools(t *testing.T) {
|
||||||
tree, err := Load("a = true\nb = false")
|
tree, err := Load("a = true\nb = false")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
@@ -127,6 +329,41 @@ func TestNestedKeys(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestNestedQuotedUnicodeKeys(t *testing.T) {
|
||||||
|
tree, err := Load("[ j . \"ʞ\" . l ]\nd = 42")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"j": map[string]interface{}{
|
||||||
|
"ʞ": map[string]interface{}{
|
||||||
|
"l": map[string]interface{}{
|
||||||
|
"d": int64(42),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
tree, err = Load("[ g . h . i ]\nd = 42")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"g": map[string]interface{}{
|
||||||
|
"h": map[string]interface{}{
|
||||||
|
"i": map[string]interface{}{
|
||||||
|
"d": int64(42),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
tree, err = Load("[ d.e.f ]\nk = 42")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"d": map[string]interface{}{
|
||||||
|
"e": map[string]interface{}{
|
||||||
|
"f": map[string]interface{}{
|
||||||
|
"k": int64(42),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestArrayOne(t *testing.T) {
|
func TestArrayOne(t *testing.T) {
|
||||||
tree, err := Load("a = [1]")
|
tree, err := Load("a = [1]")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
@@ -163,14 +400,25 @@ func TestArrayMultiline(t *testing.T) {
|
|||||||
func TestArrayNested(t *testing.T) {
|
func TestArrayNested(t *testing.T) {
|
||||||
tree, err := Load("a = [[42, 21], [10]]")
|
tree, err := Load("a = [[42, 21], [10]]")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
"a": [][]int64{[]int64{int64(42), int64(21)}, []int64{int64(10)}},
|
"a": [][]int64{{int64(42), int64(21)}, {int64(10)}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNestedArrayComment(t *testing.T) {
|
||||||
|
tree, err := Load(`
|
||||||
|
someArray = [
|
||||||
|
# does not work
|
||||||
|
["entry1"]
|
||||||
|
]`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"someArray": [][]string{{"entry1"}},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNestedEmptyArrays(t *testing.T) {
|
func TestNestedEmptyArrays(t *testing.T) {
|
||||||
tree, err := Load("a = [[[]]]")
|
tree, err := Load("a = [[[]]]")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
"a": [][][]interface{}{[][]interface{}{[]interface{}{}}},
|
"a": [][][]interface{}{{{}}},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -189,13 +437,25 @@ func TestArrayMixedTypes(t *testing.T) {
|
|||||||
func TestArrayNestedStrings(t *testing.T) {
|
func TestArrayNestedStrings(t *testing.T) {
|
||||||
tree, err := Load("data = [ [\"gamma\", \"delta\"], [\"Foo\"] ]")
|
tree, err := Load("data = [ [\"gamma\", \"delta\"], [\"Foo\"] ]")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
"data": [][]string{[]string{"gamma", "delta"}, []string{"Foo"}},
|
"data": [][]string{{"gamma", "delta"}, {"Foo"}},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParseUnknownRvalue(t *testing.T) {
|
||||||
|
_, err := Load("a = !bssss")
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expecting a parse error")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = Load("a = /b")
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Expecting a parse error")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestMissingValue(t *testing.T) {
|
func TestMissingValue(t *testing.T) {
|
||||||
_, err := Load("a = ")
|
_, err := Load("a = ")
|
||||||
if err.Error() != "(1, 4): expecting a value" {
|
if err.Error() != "(1, 5): expecting a value" {
|
||||||
t.Error("Bad error message:", err.Error())
|
t.Error("Bad error message:", err.Error())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -205,6 +465,16 @@ func TestUnterminatedArray(t *testing.T) {
|
|||||||
if err.Error() != "(1, 8): unterminated array" {
|
if err.Error() != "(1, 8): unterminated array" {
|
||||||
t.Error("Bad error message:", err.Error())
|
t.Error("Bad error message:", err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_, err = Load("a = [1")
|
||||||
|
if err.Error() != "(1, 7): unterminated array" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = Load("a = [1 2")
|
||||||
|
if err.Error() != "(1, 8): missing comma" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNewlinesInArrays(t *testing.T) {
|
func TestNewlinesInArrays(t *testing.T) {
|
||||||
@@ -228,6 +498,80 @@ func TestArrayWithExtraCommaComment(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSimpleInlineGroup(t *testing.T) {
|
||||||
|
tree, err := Load("key = {a = 42}")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"key": map[string]interface{}{
|
||||||
|
"a": int64(42),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDoubleInlineGroup(t *testing.T) {
|
||||||
|
tree, err := Load("key = {a = 42, b = \"foo\"}")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"key": map[string]interface{}{
|
||||||
|
"a": int64(42),
|
||||||
|
"b": "foo",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExampleInlineGroup(t *testing.T) {
|
||||||
|
tree, err := Load(`name = { first = "Tom", last = "Preston-Werner" }
|
||||||
|
point = { x = 1, y = 2 }`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"name": map[string]interface{}{
|
||||||
|
"first": "Tom",
|
||||||
|
"last": "Preston-Werner",
|
||||||
|
},
|
||||||
|
"point": map[string]interface{}{
|
||||||
|
"x": int64(1),
|
||||||
|
"y": int64(2),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExampleInlineGroupInArray(t *testing.T) {
|
||||||
|
tree, err := Load(`points = [{ x = 1, y = 2 }]`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"points": []map[string]interface{}{
|
||||||
|
{
|
||||||
|
"x": int64(1),
|
||||||
|
"y": int64(2),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInlineTableUnterminated(t *testing.T) {
|
||||||
|
_, err := Load("foo = {")
|
||||||
|
if err.Error() != "(1, 8): unterminated inline table" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInlineTableCommaExpected(t *testing.T) {
|
||||||
|
_, err := Load("foo = {hello = 53 test = foo}")
|
||||||
|
if err.Error() != "(1, 19): comma expected between fields in inline table" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInlineTableCommaStart(t *testing.T) {
|
||||||
|
_, err := Load("foo = {, hello = 53}")
|
||||||
|
if err.Error() != "(1, 8): inline table cannot start with a comma" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInlineTableDoubleComma(t *testing.T) {
|
||||||
|
_, err := Load("foo = {hello = 53,, foo = 17}")
|
||||||
|
if err.Error() != "(1, 19): need field between two commas in inline table" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestDuplicateGroups(t *testing.T) {
|
func TestDuplicateGroups(t *testing.T) {
|
||||||
_, err := Load("[foo]\na=2\n[foo]b=3")
|
_, err := Load("[foo]\na=2\n[foo]b=3")
|
||||||
if err.Error() != "(3, 2): duplicated tables" {
|
if err.Error() != "(3, 2): duplicated tables" {
|
||||||
@@ -244,7 +588,7 @@ func TestDuplicateKeys(t *testing.T) {
|
|||||||
|
|
||||||
func TestEmptyIntermediateTable(t *testing.T) {
|
func TestEmptyIntermediateTable(t *testing.T) {
|
||||||
_, err := Load("[foo..bar]")
|
_, err := Load("[foo..bar]")
|
||||||
if err.Error() != "(1, 2): empty intermediate table" {
|
if err.Error() != "(1, 2): invalid table array key: expecting key part after dot" {
|
||||||
t.Error("Bad error message:", err.Error())
|
t.Error("Bad error message:", err.Error())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -265,7 +609,7 @@ func TestImplicitDeclarationBefore(t *testing.T) {
|
|||||||
|
|
||||||
func TestFloatsWithoutLeadingZeros(t *testing.T) {
|
func TestFloatsWithoutLeadingZeros(t *testing.T) {
|
||||||
_, err := Load("a = .42")
|
_, err := Load("a = .42")
|
||||||
if err.Error() != "(1, 4): cannot start float with a dot" {
|
if err.Error() != "(1, 5): cannot start float with a dot" {
|
||||||
t.Error("Bad error message:", err.Error())
|
t.Error("Bad error message:", err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -277,7 +621,8 @@ func TestFloatsWithoutLeadingZeros(t *testing.T) {
|
|||||||
|
|
||||||
func TestMissingFile(t *testing.T) {
|
func TestMissingFile(t *testing.T) {
|
||||||
_, err := LoadFile("foo.toml")
|
_, err := LoadFile("foo.toml")
|
||||||
if err.Error() != "open foo.toml: no such file or directory" {
|
if err.Error() != "open foo.toml: no such file or directory" &&
|
||||||
|
err.Error() != "open foo.toml: The system cannot find the file specified." {
|
||||||
t.Error("Bad error message:", err.Error())
|
t.Error("Bad error message:", err.Error())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -318,6 +663,42 @@ func TestParseFile(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParseFileCRLF(t *testing.T) {
|
||||||
|
tree, err := LoadFile("example-crlf.toml")
|
||||||
|
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"title": "TOML Example",
|
||||||
|
"owner": map[string]interface{}{
|
||||||
|
"name": "Tom Preston-Werner",
|
||||||
|
"organization": "GitHub",
|
||||||
|
"bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
|
||||||
|
"dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
|
||||||
|
},
|
||||||
|
"database": map[string]interface{}{
|
||||||
|
"server": "192.168.1.1",
|
||||||
|
"ports": []int64{8001, 8001, 8002},
|
||||||
|
"connection_max": 5000,
|
||||||
|
"enabled": true,
|
||||||
|
},
|
||||||
|
"servers": map[string]interface{}{
|
||||||
|
"alpha": map[string]interface{}{
|
||||||
|
"ip": "10.0.0.1",
|
||||||
|
"dc": "eqdc10",
|
||||||
|
},
|
||||||
|
"beta": map[string]interface{}{
|
||||||
|
"ip": "10.0.0.2",
|
||||||
|
"dc": "eqdc10",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"clients": map[string]interface{}{
|
||||||
|
"data": []interface{}{
|
||||||
|
[]string{"gamma", "delta"},
|
||||||
|
[]int64{1, 2},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestParseKeyGroupArray(t *testing.T) {
|
func TestParseKeyGroupArray(t *testing.T) {
|
||||||
tree, err := Load("[[foo.bar]] a = 42\n[[foo.bar]] a = 69")
|
tree, err := Load("[[foo.bar]] a = 42\n[[foo.bar]] a = 69")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
@@ -330,6 +711,40 @@ func TestParseKeyGroupArray(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParseKeyGroupArrayUnfinished(t *testing.T) {
|
||||||
|
_, err := Load("[[foo.bar]\na = 42")
|
||||||
|
if err.Error() != "(1, 10): was expecting token [[, but got unclosed table array key instead" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = Load("[[foo.[bar]\na = 42")
|
||||||
|
if err.Error() != "(1, 3): unexpected token table array key cannot contain ']', was expecting a table array key" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseKeyGroupArrayQueryExample(t *testing.T) {
|
||||||
|
tree, err := Load(`
|
||||||
|
[[book]]
|
||||||
|
title = "The Stand"
|
||||||
|
author = "Stephen King"
|
||||||
|
[[book]]
|
||||||
|
title = "For Whom the Bell Tolls"
|
||||||
|
author = "Ernest Hemmingway"
|
||||||
|
[[book]]
|
||||||
|
title = "Neuromancer"
|
||||||
|
author = "William Gibson"
|
||||||
|
`)
|
||||||
|
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"book": []map[string]interface{}{
|
||||||
|
{"title": "The Stand", "author": "Stephen King"},
|
||||||
|
{"title": "For Whom the Bell Tolls", "author": "Ernest Hemmingway"},
|
||||||
|
{"title": "Neuromancer", "author": "William Gibson"},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestParseKeyGroupArraySpec(t *testing.T) {
|
func TestParseKeyGroupArraySpec(t *testing.T) {
|
||||||
tree, err := Load("[[fruit]]\n name=\"apple\"\n [fruit.physical]\n color=\"red\"\n shape=\"round\"\n [[fruit]]\n name=\"banana\"")
|
tree, err := Load("[[fruit]]\n name=\"apple\"\n [fruit.physical]\n color=\"red\"\n shape=\"round\"\n [[fruit]]\n name=\"banana\"")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
@@ -340,39 +755,44 @@ func TestParseKeyGroupArraySpec(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestToTomlValue(t *testing.T) {
|
func TestTomlValueStringRepresentation(t *testing.T) {
|
||||||
for idx, item := range []struct {
|
for idx, item := range []struct {
|
||||||
Value interface{}
|
Value interface{}
|
||||||
Expect string
|
Expect string
|
||||||
}{
|
}{
|
||||||
{int64(12345), "12345"},
|
{int64(12345), "12345"},
|
||||||
|
{uint64(50), "50"},
|
||||||
{float64(123.45), "123.45"},
|
{float64(123.45), "123.45"},
|
||||||
{bool(true), "true"},
|
{true, "true"},
|
||||||
{"hello world", "\"hello world\""},
|
{"hello world", "\"hello world\""},
|
||||||
{"\b\t\n\f\r\"\\", "\"\\b\\t\\n\\f\\r\\\"\\\\\""},
|
{"\b\t\n\f\r\"\\", "\"\\b\\t\\n\\f\\r\\\"\\\\\""},
|
||||||
{"\x05", "\"\\u0005\""},
|
{"\x05", "\"\\u0005\""},
|
||||||
{time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
|
{time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
|
||||||
"1979-05-27T07:32:00Z"},
|
"1979-05-27T07:32:00Z"},
|
||||||
{[]interface{}{"gamma", "delta"},
|
{[]interface{}{"gamma", "delta"},
|
||||||
"[\n \"gamma\",\n \"delta\",\n]"},
|
"[\"gamma\",\"delta\"]"},
|
||||||
|
{nil, ""},
|
||||||
} {
|
} {
|
||||||
result := toTomlValue(item.Value, 0)
|
result, err := tomlValueStringRepresentation(item.Value, "", false)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Test %d - unexpected error: %s", idx, err)
|
||||||
|
}
|
||||||
if result != item.Expect {
|
if result != item.Expect {
|
||||||
t.Errorf("Test %d - got '%s', expected '%s'", idx, result, item.Expect)
|
t.Errorf("Test %d - got '%s', expected '%s'", idx, result, item.Expect)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestToString(t *testing.T) {
|
func TestToStringMapStringString(t *testing.T) {
|
||||||
tree, err := Load("[foo]\n\n[[foo.bar]]\na = 42\n\n[[foo.bar]]\na = 69\n")
|
tree, err := TreeFromMap(map[string]interface{}{"m": map[string]interface{}{"v": "abc"}})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("Test failed to parse: %v", err)
|
t.Fatalf("unexpected error: %s", err)
|
||||||
return
|
|
||||||
}
|
}
|
||||||
result := tree.ToString()
|
want := "\n[m]\n v = \"abc\"\n"
|
||||||
expected := "\n[foo]\n\n [[foo.bar]]\n a = 42\n\n [[foo.bar]]\n a = 69\n"
|
got := tree.String()
|
||||||
if result != expected {
|
|
||||||
t.Errorf("Expected got '%s', expected '%s'", result, expected)
|
if got != want {
|
||||||
|
t.Errorf("want:\n%q\ngot:\n%q", want, got)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -396,10 +816,10 @@ func TestDocumentPositions(t *testing.T) {
|
|||||||
assertPosition(t,
|
assertPosition(t,
|
||||||
"[foo]\nbar=42\nbaz=69",
|
"[foo]\nbar=42\nbaz=69",
|
||||||
map[string]Position{
|
map[string]Position{
|
||||||
"": Position{1, 1},
|
"": {1, 1},
|
||||||
"foo": Position{1, 1},
|
"foo": {1, 1},
|
||||||
"foo.bar": Position{2, 1},
|
"foo.bar": {2, 1},
|
||||||
"foo.baz": Position{3, 1},
|
"foo.baz": {3, 1},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -407,10 +827,10 @@ func TestDocumentPositionsWithSpaces(t *testing.T) {
|
|||||||
assertPosition(t,
|
assertPosition(t,
|
||||||
" [foo]\n bar=42\n baz=69",
|
" [foo]\n bar=42\n baz=69",
|
||||||
map[string]Position{
|
map[string]Position{
|
||||||
"": Position{1, 1},
|
"": {1, 1},
|
||||||
"foo": Position{1, 3},
|
"foo": {1, 3},
|
||||||
"foo.bar": Position{2, 3},
|
"foo.bar": {2, 3},
|
||||||
"foo.baz": Position{3, 3},
|
"foo.baz": {3, 3},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -418,10 +838,10 @@ func TestDocumentPositionsWithGroupArray(t *testing.T) {
|
|||||||
assertPosition(t,
|
assertPosition(t,
|
||||||
"[[foo]]\nbar=42\nbaz=69",
|
"[[foo]]\nbar=42\nbaz=69",
|
||||||
map[string]Position{
|
map[string]Position{
|
||||||
"": Position{1, 1},
|
"": {1, 1},
|
||||||
"foo": Position{1, 1},
|
"foo": {1, 1},
|
||||||
"foo.bar": Position{2, 1},
|
"foo.bar": {2, 1},
|
||||||
"foo.baz": Position{3, 1},
|
"foo.baz": {3, 1},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -429,10 +849,98 @@ func TestNestedTreePosition(t *testing.T) {
|
|||||||
assertPosition(t,
|
assertPosition(t,
|
||||||
"[foo.bar]\na=42\nb=69",
|
"[foo.bar]\na=42\nb=69",
|
||||||
map[string]Position{
|
map[string]Position{
|
||||||
"": Position{1, 1},
|
"": {1, 1},
|
||||||
"foo": Position{1, 1},
|
"foo": {1, 1},
|
||||||
"foo.bar": Position{1, 1},
|
"foo.bar": {1, 1},
|
||||||
"foo.bar.a": Position{2, 1},
|
"foo.bar.a": {2, 1},
|
||||||
"foo.bar.b": Position{3, 1},
|
"foo.bar.b": {3, 1},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestInvalidGroupArray(t *testing.T) {
|
||||||
|
_, err := Load("[table#key]\nanswer = 42")
|
||||||
|
if err == nil {
|
||||||
|
t.Error("Should error")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = Load("[foo.[bar]\na = 42")
|
||||||
|
if err.Error() != "(1, 2): unexpected token table key cannot contain ']', was expecting a table key" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDoubleEqual(t *testing.T) {
|
||||||
|
_, err := Load("foo= = 2")
|
||||||
|
if err.Error() != "(1, 6): cannot have multiple equals for the same key" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGroupArrayReassign(t *testing.T) {
|
||||||
|
_, err := Load("[hello]\n[[hello]]")
|
||||||
|
if err.Error() != "(2, 3): key \"hello\" is already assigned and not of type table array" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidFloatParsing(t *testing.T) {
|
||||||
|
_, err := Load("a=1e_2")
|
||||||
|
if err.Error() != "(1, 3): invalid use of _ in number" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = Load("a=1e2_")
|
||||||
|
if err.Error() != "(1, 3): invalid use of _ in number" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = Load("a=1__2")
|
||||||
|
if err.Error() != "(1, 3): invalid use of _ in number" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = Load("a=_1_2")
|
||||||
|
if err.Error() != "(1, 3): cannot start number with underscore" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMapKeyIsNum(t *testing.T) {
|
||||||
|
_, err := Load("table={2018=1,2019=2}")
|
||||||
|
if err != nil {
|
||||||
|
t.Error("should be passed")
|
||||||
|
}
|
||||||
|
_, err = Load(`table={"2018"=1,"2019"=2}`)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("should be passed")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDottedKeys(t *testing.T) {
|
||||||
|
tree, err := Load(`
|
||||||
|
name = "Orange"
|
||||||
|
physical.color = "orange"
|
||||||
|
physical.shape = "round"
|
||||||
|
site."google.com" = true`)
|
||||||
|
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"name": "Orange",
|
||||||
|
"physical": map[string]interface{}{
|
||||||
|
"color": "orange",
|
||||||
|
"shape": "round",
|
||||||
|
},
|
||||||
|
"site": map[string]interface{}{
|
||||||
|
"google.com": true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidDottedKeyEmptyGroup(t *testing.T) {
|
||||||
|
_, err := Load(`a..b = true`)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("should return an error")
|
||||||
|
}
|
||||||
|
if err.Error() != "(1, 1): invalid key: expecting key part after dot" {
|
||||||
|
t.Fatalf("invalid error message: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
+8
-10
@@ -6,13 +6,11 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
/*
|
// Position of a document element within a TOML document.
|
||||||
Position of a document element within a TOML document.
|
//
|
||||||
|
// Line and Col are both 1-indexed positions for the element's line number and
|
||||||
Line and Col are both 1-indexed positions for the element's line number and
|
// column number, respectively. Values of zero or less will cause Invalid(),
|
||||||
column number, respectively. Values of zero or less will cause Invalid(),
|
// to return true.
|
||||||
to return true.
|
|
||||||
*/
|
|
||||||
type Position struct {
|
type Position struct {
|
||||||
Line int // line within the document
|
Line int // line within the document
|
||||||
Col int // column within the line
|
Col int // column within the line
|
||||||
@@ -20,12 +18,12 @@ type Position struct {
|
|||||||
|
|
||||||
// String representation of the position.
|
// String representation of the position.
|
||||||
// Displays 1-indexed line and column numbers.
|
// Displays 1-indexed line and column numbers.
|
||||||
func (p *Position) String() string {
|
func (p Position) String() string {
|
||||||
return fmt.Sprintf("(%d, %d)", p.Line, p.Col)
|
return fmt.Sprintf("(%d, %d)", p.Line, p.Col)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns whether or not the position is valid (i.e. with negative or
|
// Invalid returns whether or not the position is valid (i.e. with negative or
|
||||||
// null values)
|
// null values)
|
||||||
func (p *Position) Invalid() bool {
|
func (p Position) Invalid() bool {
|
||||||
return p.Line <= 0 || p.Col <= 0
|
return p.Line <= 0 || p.Col <= 0
|
||||||
}
|
}
|
||||||
|
|||||||
+3
-3
@@ -18,9 +18,9 @@ func TestPositionString(t *testing.T) {
|
|||||||
|
|
||||||
func TestInvalid(t *testing.T) {
|
func TestInvalid(t *testing.T) {
|
||||||
for i, v := range []Position{
|
for i, v := range []Position{
|
||||||
Position{0, 1234},
|
{0, 1234},
|
||||||
Position{1234, 0},
|
{1234, 0},
|
||||||
Position{0, 0},
|
{0, 0},
|
||||||
} {
|
} {
|
||||||
if !v.Invalid() {
|
if !v.Invalid() {
|
||||||
t.Errorf("Position at %v is valid: %v", i, v)
|
t.Errorf("Position at %v is valid: %v", i, v)
|
||||||
|
|||||||
@@ -1,142 +0,0 @@
|
|||||||
package toml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Type of a user-defined filter function, for use with Query.SetFilter().
|
|
||||||
//
|
|
||||||
// The return value of the function must indicate if 'node' is to be included
|
|
||||||
// at this stage of the TOML path. Returning true will include the node, and
|
|
||||||
// returning false will exclude it.
|
|
||||||
//
|
|
||||||
// NOTE: Care should be taken to write script callbacks such that they are safe
|
|
||||||
// to use from multiple goroutines.
|
|
||||||
type NodeFilterFn func(node interface{}) bool
|
|
||||||
|
|
||||||
// The result of Executing a Query
|
|
||||||
type QueryResult struct {
|
|
||||||
items []interface{}
|
|
||||||
positions []Position
|
|
||||||
}
|
|
||||||
|
|
||||||
// appends a value/position pair to the result set
|
|
||||||
func (r *QueryResult) appendResult(node interface{}, pos Position) {
|
|
||||||
r.items = append(r.items, node)
|
|
||||||
r.positions = append(r.positions, pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set of values within a QueryResult. The order of values is not guaranteed
|
|
||||||
// to be in document order, and may be different each time a query is executed.
|
|
||||||
func (r *QueryResult) Values() []interface{} {
|
|
||||||
return r.items
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set of positions for values within a QueryResult. Each index in Positions()
|
|
||||||
// corresponds to the entry in Value() of the same index.
|
|
||||||
func (r *QueryResult) Positions() []Position {
|
|
||||||
return r.positions
|
|
||||||
}
|
|
||||||
|
|
||||||
// runtime context for executing query paths
|
|
||||||
type queryContext struct {
|
|
||||||
result *QueryResult
|
|
||||||
filters *map[string]NodeFilterFn
|
|
||||||
lastPosition Position
|
|
||||||
}
|
|
||||||
|
|
||||||
// generic path functor interface
|
|
||||||
type pathFn interface {
|
|
||||||
setNext(next pathFn)
|
|
||||||
call(node interface{}, ctx *queryContext)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A Query is the representation of a compiled TOML path. A Query is safe
|
|
||||||
// for concurrent use by multiple goroutines.
|
|
||||||
type Query struct {
|
|
||||||
root pathFn
|
|
||||||
tail pathFn
|
|
||||||
filters *map[string]NodeFilterFn
|
|
||||||
}
|
|
||||||
|
|
||||||
func newQuery() *Query {
|
|
||||||
return &Query{
|
|
||||||
root: nil,
|
|
||||||
tail: nil,
|
|
||||||
filters: &defaultFilterFunctions,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *Query) appendPath(next pathFn) {
|
|
||||||
if q.root == nil {
|
|
||||||
q.root = next
|
|
||||||
} else {
|
|
||||||
q.tail.setNext(next)
|
|
||||||
}
|
|
||||||
q.tail = next
|
|
||||||
next.setNext(newTerminatingFn()) // init the next functor
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compiles a TOML path expression. The returned Query can be used to match
|
|
||||||
// elements within a TomlTree and its descendants.
|
|
||||||
func CompileQuery(path string) (*Query, error) {
|
|
||||||
return parseQuery(lexQuery(path))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Executes a query against a TomlTree, and returns the result of the query.
|
|
||||||
func (q *Query) Execute(tree *TomlTree) *QueryResult {
|
|
||||||
result := &QueryResult{
|
|
||||||
items: []interface{}{},
|
|
||||||
positions: []Position{},
|
|
||||||
}
|
|
||||||
if q.root == nil {
|
|
||||||
result.appendResult(tree, tree.GetPosition(""))
|
|
||||||
} else {
|
|
||||||
ctx := &queryContext{
|
|
||||||
result: result,
|
|
||||||
filters: q.filters,
|
|
||||||
}
|
|
||||||
q.root.call(tree, ctx)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sets a user-defined filter function. These may be used inside "?(..)" query
|
|
||||||
// expressions to filter TOML document elements within a query.
|
|
||||||
func (q *Query) SetFilter(name string, fn NodeFilterFn) {
|
|
||||||
if q.filters == &defaultFilterFunctions {
|
|
||||||
// clone the static table
|
|
||||||
q.filters = &map[string]NodeFilterFn{}
|
|
||||||
for k, v := range defaultFilterFunctions {
|
|
||||||
(*q.filters)[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(*q.filters)[name] = fn
|
|
||||||
}
|
|
||||||
|
|
||||||
var defaultFilterFunctions = map[string]NodeFilterFn{
|
|
||||||
"tree": func(node interface{}) bool {
|
|
||||||
_, ok := node.(*TomlTree)
|
|
||||||
return ok
|
|
||||||
},
|
|
||||||
"int": func(node interface{}) bool {
|
|
||||||
_, ok := node.(int64)
|
|
||||||
return ok
|
|
||||||
},
|
|
||||||
"float": func(node interface{}) bool {
|
|
||||||
_, ok := node.(float64)
|
|
||||||
return ok
|
|
||||||
},
|
|
||||||
"string": func(node interface{}) bool {
|
|
||||||
_, ok := node.(string)
|
|
||||||
return ok
|
|
||||||
},
|
|
||||||
"time": func(node interface{}) bool {
|
|
||||||
_, ok := node.(time.Time)
|
|
||||||
return ok
|
|
||||||
},
|
|
||||||
"bool": func(node interface{}) bool {
|
|
||||||
_, ok := node.(bool)
|
|
||||||
return ok
|
|
||||||
},
|
|
||||||
}
|
|
||||||
+175
@@ -0,0 +1,175 @@
|
|||||||
|
// Package query performs JSONPath-like queries on a TOML document.
|
||||||
|
//
|
||||||
|
// The query path implementation is based loosely on the JSONPath specification:
|
||||||
|
// http://goessner.net/articles/JsonPath/.
|
||||||
|
//
|
||||||
|
// The idea behind a query path is to allow quick access to any element, or set
|
||||||
|
// of elements within TOML document, with a single expression.
|
||||||
|
//
|
||||||
|
// result, err := query.CompileAndExecute("$.foo.bar.baz", tree)
|
||||||
|
//
|
||||||
|
// This is roughly equivalent to:
|
||||||
|
//
|
||||||
|
// next := tree.Get("foo")
|
||||||
|
// if next != nil {
|
||||||
|
// next = next.Get("bar")
|
||||||
|
// if next != nil {
|
||||||
|
// next = next.Get("baz")
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// result := next
|
||||||
|
//
|
||||||
|
// err is nil if any parsing exception occurs.
|
||||||
|
//
|
||||||
|
// If no node in the tree matches the query, result will simply contain an empty list of
|
||||||
|
// items.
|
||||||
|
//
|
||||||
|
// As illustrated above, the query path is much more efficient, especially since
|
||||||
|
// the structure of the TOML file can vary. Rather than making assumptions about
|
||||||
|
// a document's structure, a query allows the programmer to make structured
|
||||||
|
// requests into the document, and get zero or more values as a result.
|
||||||
|
//
|
||||||
|
// Query syntax
|
||||||
|
//
|
||||||
|
// The syntax of a query begins with a root token, followed by any number
|
||||||
|
// sub-expressions:
|
||||||
|
//
|
||||||
|
// $
|
||||||
|
// Root of the TOML tree. This must always come first.
|
||||||
|
// .name
|
||||||
|
// Selects child of this node, where 'name' is a TOML key
|
||||||
|
// name.
|
||||||
|
// ['name']
|
||||||
|
// Selects child of this node, where 'name' is a string
|
||||||
|
// containing a TOML key name.
|
||||||
|
// [index]
|
||||||
|
// Selcts child array element at 'index'.
|
||||||
|
// ..expr
|
||||||
|
// Recursively selects all children, filtered by an a union,
|
||||||
|
// index, or slice expression.
|
||||||
|
// ..*
|
||||||
|
// Recursive selection of all nodes at this point in the
|
||||||
|
// tree.
|
||||||
|
// .*
|
||||||
|
// Selects all children of the current node.
|
||||||
|
// [expr,expr]
|
||||||
|
// Union operator - a logical 'or' grouping of two or more
|
||||||
|
// sub-expressions: index, key name, or filter.
|
||||||
|
// [start:end:step]
|
||||||
|
// Slice operator - selects array elements from start to
|
||||||
|
// end-1, at the given step. All three arguments are
|
||||||
|
// optional.
|
||||||
|
// [?(filter)]
|
||||||
|
// Named filter expression - the function 'filter' is
|
||||||
|
// used to filter children at this node.
|
||||||
|
//
|
||||||
|
// Query Indexes And Slices
|
||||||
|
//
|
||||||
|
// Index expressions perform no bounds checking, and will contribute no
|
||||||
|
// values to the result set if the provided index or index range is invalid.
|
||||||
|
// Negative indexes represent values from the end of the array, counting backwards.
|
||||||
|
//
|
||||||
|
// // select the last index of the array named 'foo'
|
||||||
|
// query.CompileAndExecute("$.foo[-1]", tree)
|
||||||
|
//
|
||||||
|
// Slice expressions are supported, by using ':' to separate a start/end index pair.
|
||||||
|
//
|
||||||
|
// // select up to the first five elements in the array
|
||||||
|
// query.CompileAndExecute("$.foo[0:5]", tree)
|
||||||
|
//
|
||||||
|
// Slice expressions also allow negative indexes for the start and stop
|
||||||
|
// arguments.
|
||||||
|
//
|
||||||
|
// // select all array elements.
|
||||||
|
// query.CompileAndExecute("$.foo[0:-1]", tree)
|
||||||
|
//
|
||||||
|
// Slice expressions may have an optional stride/step parameter:
|
||||||
|
//
|
||||||
|
// // select every other element
|
||||||
|
// query.CompileAndExecute("$.foo[0:-1:2]", tree)
|
||||||
|
//
|
||||||
|
// Slice start and end parameters are also optional:
|
||||||
|
//
|
||||||
|
// // these are all equivalent and select all the values in the array
|
||||||
|
// query.CompileAndExecute("$.foo[:]", tree)
|
||||||
|
// query.CompileAndExecute("$.foo[0:]", tree)
|
||||||
|
// query.CompileAndExecute("$.foo[:-1]", tree)
|
||||||
|
// query.CompileAndExecute("$.foo[0:-1:]", tree)
|
||||||
|
// query.CompileAndExecute("$.foo[::1]", tree)
|
||||||
|
// query.CompileAndExecute("$.foo[0::1]", tree)
|
||||||
|
// query.CompileAndExecute("$.foo[:-1:1]", tree)
|
||||||
|
// query.CompileAndExecute("$.foo[0:-1:1]", tree)
|
||||||
|
//
|
||||||
|
// Query Filters
|
||||||
|
//
|
||||||
|
// Query filters are used within a Union [,] or single Filter [] expression.
|
||||||
|
// A filter only allows nodes that qualify through to the next expression,
|
||||||
|
// and/or into the result set.
|
||||||
|
//
|
||||||
|
// // returns children of foo that are permitted by the 'bar' filter.
|
||||||
|
// query.CompileAndExecute("$.foo[?(bar)]", tree)
|
||||||
|
//
|
||||||
|
// There are several filters provided with the library:
|
||||||
|
//
|
||||||
|
// tree
|
||||||
|
// Allows nodes of type Tree.
|
||||||
|
// int
|
||||||
|
// Allows nodes of type int64.
|
||||||
|
// float
|
||||||
|
// Allows nodes of type float64.
|
||||||
|
// string
|
||||||
|
// Allows nodes of type string.
|
||||||
|
// time
|
||||||
|
// Allows nodes of type time.Time.
|
||||||
|
// bool
|
||||||
|
// Allows nodes of type bool.
|
||||||
|
//
|
||||||
|
// Query Results
|
||||||
|
//
|
||||||
|
// An executed query returns a Result object. This contains the nodes
|
||||||
|
// in the TOML tree that qualify the query expression. Position information
|
||||||
|
// is also available for each value in the set.
|
||||||
|
//
|
||||||
|
// // display the results of a query
|
||||||
|
// results := query.CompileAndExecute("$.foo.bar.baz", tree)
|
||||||
|
// for idx, value := results.Values() {
|
||||||
|
// fmt.Println("%v: %v", results.Positions()[idx], value)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Compiled Queries
|
||||||
|
//
|
||||||
|
// Queries may be executed directly on a Tree object, or compiled ahead
|
||||||
|
// of time and executed discretely. The former is more convenient, but has the
|
||||||
|
// penalty of having to recompile the query expression each time.
|
||||||
|
//
|
||||||
|
// // basic query
|
||||||
|
// results := query.CompileAndExecute("$.foo.bar.baz", tree)
|
||||||
|
//
|
||||||
|
// // compiled query
|
||||||
|
// query, err := toml.Compile("$.foo.bar.baz")
|
||||||
|
// results := query.Execute(tree)
|
||||||
|
//
|
||||||
|
// // run the compiled query again on a different tree
|
||||||
|
// moreResults := query.Execute(anotherTree)
|
||||||
|
//
|
||||||
|
// User Defined Query Filters
|
||||||
|
//
|
||||||
|
// Filter expressions may also be user defined by using the SetFilter()
|
||||||
|
// function on the Query object. The function must return true/false, which
|
||||||
|
// signifies if the passed node is kept or discarded, respectively.
|
||||||
|
//
|
||||||
|
// // create a query that references a user-defined filter
|
||||||
|
// query, _ := query.Compile("$[?(bazOnly)]")
|
||||||
|
//
|
||||||
|
// // define the filter, and assign it to the query
|
||||||
|
// query.SetFilter("bazOnly", func(node interface{}) bool{
|
||||||
|
// if tree, ok := node.(*Tree); ok {
|
||||||
|
// return tree.Has("baz")
|
||||||
|
// }
|
||||||
|
// return false // reject all other node types
|
||||||
|
// })
|
||||||
|
//
|
||||||
|
// // run the query
|
||||||
|
// query.Execute(tree)
|
||||||
|
//
|
||||||
|
package query
|
||||||
@@ -3,10 +3,11 @@
|
|||||||
// Written using the principles developed by Rob Pike in
|
// Written using the principles developed by Rob Pike in
|
||||||
// http://www.youtube.com/watch?v=HxaD_trXwRE
|
// http://www.youtube.com/watch?v=HxaD_trXwRE
|
||||||
|
|
||||||
package toml
|
package query
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
@@ -54,7 +55,7 @@ func (l *queryLexer) nextStart() {
|
|||||||
|
|
||||||
func (l *queryLexer) emit(t tokenType) {
|
func (l *queryLexer) emit(t tokenType) {
|
||||||
l.tokens <- token{
|
l.tokens <- token{
|
||||||
Position: Position{l.line, l.col},
|
Position: toml.Position{Line: l.line, Col: l.col},
|
||||||
typ: t,
|
typ: t,
|
||||||
val: l.input[l.start:l.pos],
|
val: l.input[l.start:l.pos],
|
||||||
}
|
}
|
||||||
@@ -63,7 +64,7 @@ func (l *queryLexer) emit(t tokenType) {
|
|||||||
|
|
||||||
func (l *queryLexer) emitWithValue(t tokenType, value string) {
|
func (l *queryLexer) emitWithValue(t tokenType, value string) {
|
||||||
l.tokens <- token{
|
l.tokens <- token{
|
||||||
Position: Position{l.line, l.col},
|
Position: toml.Position{Line: l.line, Col: l.col},
|
||||||
typ: t,
|
typ: t,
|
||||||
val: value,
|
val: value,
|
||||||
}
|
}
|
||||||
@@ -91,7 +92,7 @@ func (l *queryLexer) backup() {
|
|||||||
|
|
||||||
func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn {
|
func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn {
|
||||||
l.tokens <- token{
|
l.tokens <- token{
|
||||||
Position: Position{l.line, l.col},
|
Position: toml.Position{Line: l.line, Col: l.col},
|
||||||
typ: tokenError,
|
typ: tokenError,
|
||||||
val: fmt.Sprintf(format, args...),
|
val: fmt.Sprintf(format, args...),
|
||||||
}
|
}
|
||||||
@@ -105,7 +106,7 @@ func (l *queryLexer) peek() rune {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (l *queryLexer) accept(valid string) bool {
|
func (l *queryLexer) accept(valid string) bool {
|
||||||
if strings.IndexRune(valid, l.next()) >= 0 {
|
if strings.ContainsRune(valid, l.next()) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
l.backup()
|
l.backup()
|
||||||
@@ -272,6 +273,23 @@ func (l *queryLexer) lexString() queryLexStateFn {
|
|||||||
return l.errorf("invalid unicode escape: \\u" + code)
|
return l.errorf("invalid unicode escape: \\u" + code)
|
||||||
}
|
}
|
||||||
growingString += string(rune(intcode))
|
growingString += string(rune(intcode))
|
||||||
|
} else if l.follow("\\U") {
|
||||||
|
l.pos += 2
|
||||||
|
code := ""
|
||||||
|
for i := 0; i < 8; i++ {
|
||||||
|
c := l.peek()
|
||||||
|
l.pos++
|
||||||
|
if !isHexDigit(c) {
|
||||||
|
return l.errorf("unfinished unicode escape")
|
||||||
|
}
|
||||||
|
code = code + string(c)
|
||||||
|
}
|
||||||
|
l.pos--
|
||||||
|
intcode, err := strconv.ParseInt(code, 16, 32)
|
||||||
|
if err != nil {
|
||||||
|
return l.errorf("invalid unicode escape: \\u" + code)
|
||||||
|
}
|
||||||
|
growingString += string(rune(intcode))
|
||||||
} else if l.follow("\\") {
|
} else if l.follow("\\") {
|
||||||
l.pos++
|
l.pos++
|
||||||
return l.errorf("invalid escape sequence: \\" + string(l.peek()))
|
return l.errorf("invalid escape sequence: \\" + string(l.peek()))
|
||||||
@@ -0,0 +1,179 @@
|
|||||||
|
package query
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func testQLFlow(t *testing.T, input string, expectedFlow []token) {
|
||||||
|
ch := lexQuery(input)
|
||||||
|
for idx, expected := range expectedFlow {
|
||||||
|
token := <-ch
|
||||||
|
if token != expected {
|
||||||
|
t.Log("While testing #", idx, ":", input)
|
||||||
|
t.Log("compared (got)", token, "to (expected)", expected)
|
||||||
|
t.Log("\tvalue:", token.val, "<->", expected.val)
|
||||||
|
t.Log("\tvalue as bytes:", []byte(token.val), "<->", []byte(expected.val))
|
||||||
|
t.Log("\ttype:", token.typ.String(), "<->", expected.typ.String())
|
||||||
|
t.Log("\tline:", token.Line, "<->", expected.Line)
|
||||||
|
t.Log("\tcolumn:", token.Col, "<->", expected.Col)
|
||||||
|
t.Log("compared", token, "to", expected)
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tok, ok := <-ch
|
||||||
|
if ok {
|
||||||
|
t.Log("channel is not closed!")
|
||||||
|
t.Log(len(ch)+1, "tokens remaining:")
|
||||||
|
|
||||||
|
t.Log("token ->", tok)
|
||||||
|
for token := range ch {
|
||||||
|
t.Log("token ->", token)
|
||||||
|
}
|
||||||
|
t.FailNow()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexSpecialChars(t *testing.T) {
|
||||||
|
testQLFlow(t, " .$[]..()?*", []token{
|
||||||
|
{toml.Position{1, 2}, tokenDot, "."},
|
||||||
|
{toml.Position{1, 3}, tokenDollar, "$"},
|
||||||
|
{toml.Position{1, 4}, tokenLeftBracket, "["},
|
||||||
|
{toml.Position{1, 5}, tokenRightBracket, "]"},
|
||||||
|
{toml.Position{1, 6}, tokenDotDot, ".."},
|
||||||
|
{toml.Position{1, 8}, tokenLeftParen, "("},
|
||||||
|
{toml.Position{1, 9}, tokenRightParen, ")"},
|
||||||
|
{toml.Position{1, 10}, tokenQuestion, "?"},
|
||||||
|
{toml.Position{1, 11}, tokenStar, "*"},
|
||||||
|
{toml.Position{1, 12}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexString(t *testing.T) {
|
||||||
|
testQLFlow(t, "'foo\n'", []token{
|
||||||
|
{toml.Position{1, 2}, tokenString, "foo\n"},
|
||||||
|
{toml.Position{2, 2}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexDoubleString(t *testing.T) {
|
||||||
|
testQLFlow(t, `"bar"`, []token{
|
||||||
|
{toml.Position{1, 2}, tokenString, "bar"},
|
||||||
|
{toml.Position{1, 6}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexStringEscapes(t *testing.T) {
|
||||||
|
testQLFlow(t, `"foo \" \' \b \f \/ \t \r \\ \u03A9 \U00012345 \n bar"`, []token{
|
||||||
|
{toml.Position{1, 2}, tokenString, "foo \" ' \b \f / \t \r \\ \u03A9 \U00012345 \n bar"},
|
||||||
|
{toml.Position{1, 55}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexStringUnfinishedUnicode4(t *testing.T) {
|
||||||
|
testQLFlow(t, `"\u000"`, []token{
|
||||||
|
{toml.Position{1, 2}, tokenError, "unfinished unicode escape"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexStringUnfinishedUnicode8(t *testing.T) {
|
||||||
|
testQLFlow(t, `"\U0000"`, []token{
|
||||||
|
{toml.Position{1, 2}, tokenError, "unfinished unicode escape"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexStringInvalidEscape(t *testing.T) {
|
||||||
|
testQLFlow(t, `"\x"`, []token{
|
||||||
|
{toml.Position{1, 2}, tokenError, "invalid escape sequence: \\x"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexStringUnfinished(t *testing.T) {
|
||||||
|
testQLFlow(t, `"bar`, []token{
|
||||||
|
{toml.Position{1, 2}, tokenError, "unclosed string"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexKey(t *testing.T) {
|
||||||
|
testQLFlow(t, "foo", []token{
|
||||||
|
{toml.Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{toml.Position{1, 4}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexRecurse(t *testing.T) {
|
||||||
|
testQLFlow(t, "$..*", []token{
|
||||||
|
{toml.Position{1, 1}, tokenDollar, "$"},
|
||||||
|
{toml.Position{1, 2}, tokenDotDot, ".."},
|
||||||
|
{toml.Position{1, 4}, tokenStar, "*"},
|
||||||
|
{toml.Position{1, 5}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexBracketKey(t *testing.T) {
|
||||||
|
testQLFlow(t, "$[foo]", []token{
|
||||||
|
{toml.Position{1, 1}, tokenDollar, "$"},
|
||||||
|
{toml.Position{1, 2}, tokenLeftBracket, "["},
|
||||||
|
{toml.Position{1, 3}, tokenKey, "foo"},
|
||||||
|
{toml.Position{1, 6}, tokenRightBracket, "]"},
|
||||||
|
{toml.Position{1, 7}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexSpace(t *testing.T) {
|
||||||
|
testQLFlow(t, "foo bar baz", []token{
|
||||||
|
{toml.Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{toml.Position{1, 5}, tokenKey, "bar"},
|
||||||
|
{toml.Position{1, 9}, tokenKey, "baz"},
|
||||||
|
{toml.Position{1, 12}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInteger(t *testing.T) {
|
||||||
|
testQLFlow(t, "100 +200 -300", []token{
|
||||||
|
{toml.Position{1, 1}, tokenInteger, "100"},
|
||||||
|
{toml.Position{1, 5}, tokenInteger, "+200"},
|
||||||
|
{toml.Position{1, 10}, tokenInteger, "-300"},
|
||||||
|
{toml.Position{1, 14}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexFloat(t *testing.T) {
|
||||||
|
testQLFlow(t, "100.0 +200.0 -300.0", []token{
|
||||||
|
{toml.Position{1, 1}, tokenFloat, "100.0"},
|
||||||
|
{toml.Position{1, 7}, tokenFloat, "+200.0"},
|
||||||
|
{toml.Position{1, 14}, tokenFloat, "-300.0"},
|
||||||
|
{toml.Position{1, 20}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexFloatWithMultipleDots(t *testing.T) {
|
||||||
|
testQLFlow(t, "4.2.", []token{
|
||||||
|
{toml.Position{1, 1}, tokenError, "cannot have two dots in one float"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexFloatLeadingDot(t *testing.T) {
|
||||||
|
testQLFlow(t, "+.1", []token{
|
||||||
|
{toml.Position{1, 1}, tokenError, "cannot start float with a dot"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexFloatWithTrailingDot(t *testing.T) {
|
||||||
|
testQLFlow(t, "42.", []token{
|
||||||
|
{toml.Position{1, 1}, tokenError, "float cannot end with a dot"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexNumberWithoutDigit(t *testing.T) {
|
||||||
|
testQLFlow(t, "+", []token{
|
||||||
|
{toml.Position{1, 1}, tokenError, "no digit in that number"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexUnknown(t *testing.T) {
|
||||||
|
testQLFlow(t, "^", []token{
|
||||||
|
{toml.Position{1, 1}, tokenError, "unexpected char: '94'"},
|
||||||
|
})
|
||||||
|
}
|
||||||
+58
-53
@@ -1,27 +1,10 @@
|
|||||||
package toml
|
package query
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
)
|
)
|
||||||
|
|
||||||
// support function to set positions for tomlValues
|
|
||||||
// NOTE: this is done to allow ctx.lastPosition to indicate the start of any
|
|
||||||
// values returned by the query engines
|
|
||||||
func tomlValueCheck(node interface{}, ctx *queryContext) interface{} {
|
|
||||||
switch castNode := node.(type) {
|
|
||||||
case *tomlValue:
|
|
||||||
ctx.lastPosition = castNode.position
|
|
||||||
return castNode.value
|
|
||||||
case []*TomlTree:
|
|
||||||
if len(castNode) > 0 {
|
|
||||||
ctx.lastPosition = castNode[0].position
|
|
||||||
}
|
|
||||||
return node
|
|
||||||
default:
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// base match
|
// base match
|
||||||
type matchBase struct {
|
type matchBase struct {
|
||||||
next pathFn
|
next pathFn
|
||||||
@@ -45,15 +28,7 @@ func (f *terminatingFn) setNext(next pathFn) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (f *terminatingFn) call(node interface{}, ctx *queryContext) {
|
func (f *terminatingFn) call(node interface{}, ctx *queryContext) {
|
||||||
switch castNode := node.(type) {
|
ctx.result.appendResult(node, ctx.lastPosition)
|
||||||
case *TomlTree:
|
|
||||||
ctx.result.appendResult(node, castNode.position)
|
|
||||||
case *tomlValue:
|
|
||||||
ctx.result.appendResult(node, castNode.position)
|
|
||||||
default:
|
|
||||||
// use last position for scalars
|
|
||||||
ctx.result.appendResult(node, ctx.lastPosition)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// match single key
|
// match single key
|
||||||
@@ -67,9 +42,18 @@ func newMatchKeyFn(name string) *matchKeyFn {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
|
func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
|
||||||
if tree, ok := node.(*TomlTree); ok {
|
if array, ok := node.([]*toml.Tree); ok {
|
||||||
item := tree.values[f.Name]
|
for _, tree := range array {
|
||||||
|
item := tree.Get(f.Name)
|
||||||
|
if item != nil {
|
||||||
|
ctx.lastPosition = tree.GetPosition(f.Name)
|
||||||
|
f.next.call(item, ctx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if tree, ok := node.(*toml.Tree); ok {
|
||||||
|
item := tree.Get(f.Name)
|
||||||
if item != nil {
|
if item != nil {
|
||||||
|
ctx.lastPosition = tree.GetPosition(f.Name)
|
||||||
f.next.call(item, ctx)
|
f.next.call(item, ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -86,8 +70,13 @@ func newMatchIndexFn(idx int) *matchIndexFn {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (f *matchIndexFn) call(node interface{}, ctx *queryContext) {
|
func (f *matchIndexFn) call(node interface{}, ctx *queryContext) {
|
||||||
if arr, ok := tomlValueCheck(node, ctx).([]interface{}); ok {
|
if arr, ok := node.([]interface{}); ok {
|
||||||
if f.Idx < len(arr) && f.Idx >= 0 {
|
if f.Idx < len(arr) && f.Idx >= 0 {
|
||||||
|
if treesArray, ok := node.([]*toml.Tree); ok {
|
||||||
|
if len(treesArray) > 0 {
|
||||||
|
ctx.lastPosition = treesArray[0].Position()
|
||||||
|
}
|
||||||
|
}
|
||||||
f.next.call(arr[f.Idx], ctx)
|
f.next.call(arr[f.Idx], ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -104,7 +93,7 @@ func newMatchSliceFn(start, end, step int) *matchSliceFn {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (f *matchSliceFn) call(node interface{}, ctx *queryContext) {
|
func (f *matchSliceFn) call(node interface{}, ctx *queryContext) {
|
||||||
if arr, ok := tomlValueCheck(node, ctx).([]interface{}); ok {
|
if arr, ok := node.([]interface{}); ok {
|
||||||
// adjust indexes for negative values, reverse ordering
|
// adjust indexes for negative values, reverse ordering
|
||||||
realStart, realEnd := f.Start, f.End
|
realStart, realEnd := f.Start, f.End
|
||||||
if realStart < 0 {
|
if realStart < 0 {
|
||||||
@@ -118,6 +107,11 @@ func (f *matchSliceFn) call(node interface{}, ctx *queryContext) {
|
|||||||
}
|
}
|
||||||
// loop and gather
|
// loop and gather
|
||||||
for idx := realStart; idx < realEnd; idx += f.Step {
|
for idx := realStart; idx < realEnd; idx += f.Step {
|
||||||
|
if treesArray, ok := node.([]*toml.Tree); ok {
|
||||||
|
if len(treesArray) > 0 {
|
||||||
|
ctx.lastPosition = treesArray[0].Position()
|
||||||
|
}
|
||||||
|
}
|
||||||
f.next.call(arr[idx], ctx)
|
f.next.call(arr[idx], ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -133,8 +127,10 @@ func newMatchAnyFn() *matchAnyFn {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (f *matchAnyFn) call(node interface{}, ctx *queryContext) {
|
func (f *matchAnyFn) call(node interface{}, ctx *queryContext) {
|
||||||
if tree, ok := node.(*TomlTree); ok {
|
if tree, ok := node.(*toml.Tree); ok {
|
||||||
for _, v := range tree.values {
|
for _, k := range tree.Keys() {
|
||||||
|
v := tree.Get(k)
|
||||||
|
ctx.lastPosition = tree.GetPosition(k)
|
||||||
f.next.call(v, ctx)
|
f.next.call(v, ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -167,21 +163,25 @@ func newMatchRecursiveFn() *matchRecursiveFn {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (f *matchRecursiveFn) call(node interface{}, ctx *queryContext) {
|
func (f *matchRecursiveFn) call(node interface{}, ctx *queryContext) {
|
||||||
if tree, ok := node.(*TomlTree); ok {
|
originalPosition := ctx.lastPosition
|
||||||
var visit func(tree *TomlTree)
|
if tree, ok := node.(*toml.Tree); ok {
|
||||||
visit = func(tree *TomlTree) {
|
var visit func(tree *toml.Tree)
|
||||||
for _, v := range tree.values {
|
visit = func(tree *toml.Tree) {
|
||||||
|
for _, k := range tree.Keys() {
|
||||||
|
v := tree.Get(k)
|
||||||
|
ctx.lastPosition = tree.GetPosition(k)
|
||||||
f.next.call(v, ctx)
|
f.next.call(v, ctx)
|
||||||
switch node := v.(type) {
|
switch node := v.(type) {
|
||||||
case *TomlTree:
|
case *toml.Tree:
|
||||||
visit(node)
|
visit(node)
|
||||||
case []*TomlTree:
|
case []*toml.Tree:
|
||||||
for _, subtree := range node {
|
for _, subtree := range node {
|
||||||
visit(subtree)
|
visit(subtree)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ctx.lastPosition = originalPosition
|
||||||
f.next.call(tree, ctx)
|
f.next.call(tree, ctx)
|
||||||
visit(tree)
|
visit(tree)
|
||||||
}
|
}
|
||||||
@@ -190,11 +190,11 @@ func (f *matchRecursiveFn) call(node interface{}, ctx *queryContext) {
|
|||||||
// match based on an externally provided functional filter
|
// match based on an externally provided functional filter
|
||||||
type matchFilterFn struct {
|
type matchFilterFn struct {
|
||||||
matchBase
|
matchBase
|
||||||
Pos Position
|
Pos toml.Position
|
||||||
Name string
|
Name string
|
||||||
}
|
}
|
||||||
|
|
||||||
func newMatchFilterFn(name string, pos Position) *matchFilterFn {
|
func newMatchFilterFn(name string, pos toml.Position) *matchFilterFn {
|
||||||
return &matchFilterFn{Name: name, Pos: pos}
|
return &matchFilterFn{Name: name, Pos: pos}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -202,19 +202,24 @@ func (f *matchFilterFn) call(node interface{}, ctx *queryContext) {
|
|||||||
fn, ok := (*ctx.filters)[f.Name]
|
fn, ok := (*ctx.filters)[f.Name]
|
||||||
if !ok {
|
if !ok {
|
||||||
panic(fmt.Sprintf("%s: query context does not have filter '%s'",
|
panic(fmt.Sprintf("%s: query context does not have filter '%s'",
|
||||||
f.Pos, f.Name))
|
f.Pos.String(), f.Name))
|
||||||
}
|
}
|
||||||
switch castNode := tomlValueCheck(node, ctx).(type) {
|
switch castNode := node.(type) {
|
||||||
case *TomlTree:
|
case *toml.Tree:
|
||||||
for _, v := range castNode.values {
|
for _, k := range castNode.Keys() {
|
||||||
if tv, ok := v.(*tomlValue); ok {
|
v := castNode.Get(k)
|
||||||
if fn(tv.value) {
|
if fn(v) {
|
||||||
f.next.call(v, ctx)
|
ctx.lastPosition = castNode.GetPosition(k)
|
||||||
}
|
f.next.call(v, ctx)
|
||||||
} else {
|
}
|
||||||
if fn(v) {
|
}
|
||||||
f.next.call(v, ctx)
|
case []*toml.Tree:
|
||||||
|
for _, v := range castNode {
|
||||||
|
if fn(v) {
|
||||||
|
if len(castNode) > 0 {
|
||||||
|
ctx.lastPosition = castNode[0].Position()
|
||||||
}
|
}
|
||||||
|
f.next.call(v, ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case []interface{}:
|
case []interface{}:
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
package toml
|
package query
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"github.com/pelletier/go-toml"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -110,7 +110,7 @@ func TestPathSliceStart(t *testing.T) {
|
|||||||
assertPath(t,
|
assertPath(t,
|
||||||
"$[123:]",
|
"$[123:]",
|
||||||
buildPath(
|
buildPath(
|
||||||
newMatchSliceFn(123, math.MaxInt64, 1),
|
newMatchSliceFn(123, maxInt, 1),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -134,7 +134,7 @@ func TestPathSliceStartStep(t *testing.T) {
|
|||||||
assertPath(t,
|
assertPath(t,
|
||||||
"$[123::7]",
|
"$[123::7]",
|
||||||
buildPath(
|
buildPath(
|
||||||
newMatchSliceFn(123, math.MaxInt64, 7),
|
newMatchSliceFn(123, maxInt, 7),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -150,7 +150,7 @@ func TestPathSliceStep(t *testing.T) {
|
|||||||
assertPath(t,
|
assertPath(t,
|
||||||
"$[::7]",
|
"$[::7]",
|
||||||
buildPath(
|
buildPath(
|
||||||
newMatchSliceFn(0, math.MaxInt64, 7),
|
newMatchSliceFn(0, maxInt, 7),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -195,8 +195,8 @@ func TestPathFilterExpr(t *testing.T) {
|
|||||||
"$[?('foo'),?(bar)]",
|
"$[?('foo'),?(bar)]",
|
||||||
buildPath(
|
buildPath(
|
||||||
&matchUnionFn{[]pathFn{
|
&matchUnionFn{[]pathFn{
|
||||||
newMatchFilterFn("foo", Position{}),
|
newMatchFilterFn("foo", toml.Position{}),
|
||||||
newMatchFilterFn("bar", Position{}),
|
newMatchFilterFn("bar", toml.Position{}),
|
||||||
}},
|
}},
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
@@ -5,13 +5,14 @@
|
|||||||
https://code.google.com/p/json-path/
|
https://code.google.com/p/json-path/
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package toml
|
package query
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const maxInt = int(^uint(0) >> 1)
|
||||||
|
|
||||||
type queryParser struct {
|
type queryParser struct {
|
||||||
flow chan token
|
flow chan token
|
||||||
tokensBuffer []token
|
tokensBuffer []token
|
||||||
@@ -137,7 +138,6 @@ func (p *queryParser) parseMatchExpr() queryParserStateFn {
|
|||||||
return nil // allow EOF at this stage
|
return nil // allow EOF at this stage
|
||||||
}
|
}
|
||||||
return p.parseError(tok, "expected match expression")
|
return p.parseError(tok, "expected match expression")
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *queryParser) parseBracketExpr() queryParserStateFn {
|
func (p *queryParser) parseBracketExpr() queryParserStateFn {
|
||||||
@@ -203,7 +203,7 @@ loop: // labeled loop for easy breaking
|
|||||||
|
|
||||||
func (p *queryParser) parseSliceExpr() queryParserStateFn {
|
func (p *queryParser) parseSliceExpr() queryParserStateFn {
|
||||||
// init slice to grab all elements
|
// init slice to grab all elements
|
||||||
start, end, step := 0, math.MaxInt64, 1
|
start, end, step := 0, maxInt, 1
|
||||||
|
|
||||||
// parse optional start
|
// parse optional start
|
||||||
tok := p.getToken()
|
tok := p.getToken()
|
||||||
@@ -253,7 +253,7 @@ func (p *queryParser) parseFilterExpr() queryParserStateFn {
|
|||||||
}
|
}
|
||||||
tok = p.getToken()
|
tok = p.getToken()
|
||||||
if tok.typ != tokenKey && tok.typ != tokenString {
|
if tok.typ != tokenKey && tok.typ != tokenString {
|
||||||
return p.parseError(tok, "expected key or string for filter funciton name")
|
return p.parseError(tok, "expected key or string for filter function name")
|
||||||
}
|
}
|
||||||
name := tok.val
|
name := tok.val
|
||||||
tok = p.getToken()
|
tok = p.getToken()
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package toml
|
package query
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
@@ -7,19 +7,19 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
)
|
)
|
||||||
|
|
||||||
type queryTestNode struct {
|
type queryTestNode struct {
|
||||||
value interface{}
|
value interface{}
|
||||||
position Position
|
position toml.Position
|
||||||
}
|
}
|
||||||
|
|
||||||
func valueString(root interface{}) string {
|
func valueString(root interface{}) string {
|
||||||
result := "" //fmt.Sprintf("%T:", root)
|
result := "" //fmt.Sprintf("%T:", root)
|
||||||
switch node := root.(type) {
|
switch node := root.(type) {
|
||||||
case *tomlValue:
|
case *Result:
|
||||||
return valueString(node.value)
|
|
||||||
case *QueryResult:
|
|
||||||
items := []string{}
|
items := []string{}
|
||||||
for i, v := range node.Values() {
|
for i, v := range node.Values() {
|
||||||
items = append(items, fmt.Sprintf("%s:%s",
|
items = append(items, fmt.Sprintf("%s:%s",
|
||||||
@@ -37,7 +37,7 @@ func valueString(root interface{}) string {
|
|||||||
}
|
}
|
||||||
sort.Strings(items)
|
sort.Strings(items)
|
||||||
result = "[" + strings.Join(items, ", ") + "]"
|
result = "[" + strings.Join(items, ", ") + "]"
|
||||||
case *TomlTree:
|
case *toml.Tree:
|
||||||
// workaround for unreliable map key ordering
|
// workaround for unreliable map key ordering
|
||||||
items := []string{}
|
items := []string{}
|
||||||
for _, k := range node.Keys() {
|
for _, k := range node.Keys() {
|
||||||
@@ -78,13 +78,13 @@ func assertValue(t *testing.T, result, ref interface{}) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func assertQueryPositions(t *testing.T, toml, query string, ref []interface{}) {
|
func assertQueryPositions(t *testing.T, tomlDoc string, query string, ref []interface{}) {
|
||||||
tree, err := Load(toml)
|
tree, err := toml.Load(tomlDoc)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("Non-nil toml parse error: %v", err)
|
t.Errorf("Non-nil toml parse error: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
q, err := CompileQuery(query)
|
q, err := Compile(query)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
return
|
return
|
||||||
@@ -101,7 +101,7 @@ func TestQueryRoot(t *testing.T) {
|
|||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(42),
|
"a": int64(42),
|
||||||
}, Position{1, 1},
|
}, toml.Position{1, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -112,7 +112,7 @@ func TestQueryKey(t *testing.T) {
|
|||||||
"$.foo.a",
|
"$.foo.a",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(42), Position{2, 1},
|
int64(42), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -123,7 +123,7 @@ func TestQueryKeyString(t *testing.T) {
|
|||||||
"$.foo['a']",
|
"$.foo['a']",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(42), Position{2, 1},
|
int64(42), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -134,7 +134,7 @@ func TestQueryIndex(t *testing.T) {
|
|||||||
"$.foo.a[5]",
|
"$.foo.a[5]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(6), Position{2, 1},
|
int64(6), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -145,19 +145,19 @@ func TestQuerySliceRange(t *testing.T) {
|
|||||||
"$.foo.a[0:5]",
|
"$.foo.a[0:5]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(1), Position{2, 1},
|
int64(1), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(2), Position{2, 1},
|
int64(2), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(3), Position{2, 1},
|
int64(3), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(4), Position{2, 1},
|
int64(4), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(5), Position{2, 1},
|
int64(5), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -168,13 +168,13 @@ func TestQuerySliceStep(t *testing.T) {
|
|||||||
"$.foo.a[0:5:2]",
|
"$.foo.a[0:5:2]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(1), Position{2, 1},
|
int64(1), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(3), Position{2, 1},
|
int64(3), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(5), Position{2, 1},
|
int64(5), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -188,13 +188,13 @@ func TestQueryAny(t *testing.T) {
|
|||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(1),
|
"a": int64(1),
|
||||||
"b": int64(2),
|
"b": int64(2),
|
||||||
}, Position{1, 1},
|
}, toml.Position{1, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(3),
|
"a": int64(3),
|
||||||
"b": int64(4),
|
"b": int64(4),
|
||||||
}, Position{4, 1},
|
}, toml.Position{4, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -207,19 +207,19 @@ func TestQueryUnionSimple(t *testing.T) {
|
|||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(1),
|
"a": int64(1),
|
||||||
"b": int64(2),
|
"b": int64(2),
|
||||||
}, Position{1, 1},
|
}, toml.Position{1, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(3),
|
"a": int64(3),
|
||||||
"b": int64(4),
|
"b": int64(4),
|
||||||
}, Position{4, 1},
|
}, toml.Position{4, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(5),
|
"a": int64(5),
|
||||||
"b": int64(6),
|
"b": int64(6),
|
||||||
}, Position{7, 1},
|
}, toml.Position{7, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -249,7 +249,7 @@ func TestQueryRecursionAll(t *testing.T) {
|
|||||||
"b": int64(6),
|
"b": int64(6),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}, Position{1, 1},
|
}, toml.Position{1, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
@@ -257,19 +257,19 @@ func TestQueryRecursionAll(t *testing.T) {
|
|||||||
"a": int64(1),
|
"a": int64(1),
|
||||||
"b": int64(2),
|
"b": int64(2),
|
||||||
},
|
},
|
||||||
}, Position{1, 1},
|
}, toml.Position{1, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(1),
|
"a": int64(1),
|
||||||
"b": int64(2),
|
"b": int64(2),
|
||||||
}, Position{1, 1},
|
}, toml.Position{1, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(1), Position{2, 1},
|
int64(1), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(2), Position{3, 1},
|
int64(2), toml.Position{3, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
@@ -277,19 +277,19 @@ func TestQueryRecursionAll(t *testing.T) {
|
|||||||
"a": int64(3),
|
"a": int64(3),
|
||||||
"b": int64(4),
|
"b": int64(4),
|
||||||
},
|
},
|
||||||
}, Position{4, 1},
|
}, toml.Position{4, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(3),
|
"a": int64(3),
|
||||||
"b": int64(4),
|
"b": int64(4),
|
||||||
}, Position{4, 1},
|
}, toml.Position{4, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(3), Position{5, 1},
|
int64(3), toml.Position{5, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(4), Position{6, 1},
|
int64(4), toml.Position{6, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
@@ -297,19 +297,19 @@ func TestQueryRecursionAll(t *testing.T) {
|
|||||||
"a": int64(5),
|
"a": int64(5),
|
||||||
"b": int64(6),
|
"b": int64(6),
|
||||||
},
|
},
|
||||||
}, Position{7, 1},
|
}, toml.Position{7, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(5),
|
"a": int64(5),
|
||||||
"b": int64(6),
|
"b": int64(6),
|
||||||
}, Position{7, 1},
|
}, toml.Position{7, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(5), Position{8, 1},
|
int64(5), toml.Position{8, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(6), Position{9, 1},
|
int64(6), toml.Position{9, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -325,31 +325,31 @@ func TestQueryRecursionUnionSimple(t *testing.T) {
|
|||||||
"a": int64(1),
|
"a": int64(1),
|
||||||
"b": int64(2),
|
"b": int64(2),
|
||||||
},
|
},
|
||||||
}, Position{1, 1},
|
}, toml.Position{1, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(3),
|
"a": int64(3),
|
||||||
"b": int64(4),
|
"b": int64(4),
|
||||||
}, Position{4, 1},
|
}, toml.Position{4, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(1),
|
"a": int64(1),
|
||||||
"b": int64(2),
|
"b": int64(2),
|
||||||
}, Position{1, 1},
|
}, toml.Position{1, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"a": int64(5),
|
"a": int64(5),
|
||||||
"b": int64(6),
|
"b": int64(6),
|
||||||
}, Position{7, 1},
|
}, toml.Position{7, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestQueryFilterFn(t *testing.T) {
|
func TestQueryFilterFn(t *testing.T) {
|
||||||
buff, err := ioutil.ReadFile("example.toml")
|
buff, err := ioutil.ReadFile("../example.toml")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
return
|
return
|
||||||
@@ -359,16 +359,16 @@ func TestQueryFilterFn(t *testing.T) {
|
|||||||
"$..[?(int)]",
|
"$..[?(int)]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(8001), Position{13, 1},
|
int64(8001), toml.Position{13, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(8001), Position{13, 1},
|
int64(8001), toml.Position{13, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(8002), Position{13, 1},
|
int64(8002), toml.Position{13, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(5000), Position{14, 1},
|
int64(5000), toml.Position{14, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -376,39 +376,38 @@ func TestQueryFilterFn(t *testing.T) {
|
|||||||
"$..[?(string)]",
|
"$..[?(string)]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
"TOML Example", Position{3, 1},
|
"TOML Example", toml.Position{3, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
"Tom Preston-Werner", Position{6, 1},
|
"Tom Preston-Werner", toml.Position{6, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
"GitHub", Position{7, 1},
|
"GitHub", toml.Position{7, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
"GitHub Cofounder & CEO\nLikes tater tots and beer.",
|
"GitHub Cofounder & CEO\nLikes tater tots and beer.",
|
||||||
Position{8, 1},
|
toml.Position{8, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
"192.168.1.1", Position{12, 1},
|
"192.168.1.1", toml.Position{12, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
"10.0.0.1", Position{21, 3},
|
"10.0.0.1", toml.Position{21, 3},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
"eqdc10", Position{22, 3},
|
"eqdc10", toml.Position{22, 3},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
"10.0.0.2", Position{25, 3},
|
"10.0.0.2", toml.Position{25, 3},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
"eqdc10", Position{26, 3},
|
"eqdc10", toml.Position{26, 3},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
assertQueryPositions(t, string(buff),
|
assertQueryPositions(t, string(buff),
|
||||||
"$..[?(float)]",
|
"$..[?(float)]",
|
||||||
[]interface{}{
|
[]interface{}{ // no float values in document
|
||||||
// no float values in document
|
|
||||||
})
|
})
|
||||||
|
|
||||||
tv, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z")
|
tv, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z")
|
||||||
@@ -421,7 +420,7 @@ func TestQueryFilterFn(t *testing.T) {
|
|||||||
"organization": "GitHub",
|
"organization": "GitHub",
|
||||||
"bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
|
"bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
|
||||||
"dob": tv,
|
"dob": tv,
|
||||||
}, Position{5, 1},
|
}, toml.Position{5, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
@@ -429,7 +428,7 @@ func TestQueryFilterFn(t *testing.T) {
|
|||||||
"ports": []interface{}{int64(8001), int64(8001), int64(8002)},
|
"ports": []interface{}{int64(8001), int64(8001), int64(8002)},
|
||||||
"connection_max": int64(5000),
|
"connection_max": int64(5000),
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
}, Position{11, 1},
|
}, toml.Position{11, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
@@ -441,19 +440,19 @@ func TestQueryFilterFn(t *testing.T) {
|
|||||||
"ip": "10.0.0.2",
|
"ip": "10.0.0.2",
|
||||||
"dc": "eqdc10",
|
"dc": "eqdc10",
|
||||||
},
|
},
|
||||||
}, Position{17, 1},
|
}, toml.Position{17, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"ip": "10.0.0.1",
|
"ip": "10.0.0.1",
|
||||||
"dc": "eqdc10",
|
"dc": "eqdc10",
|
||||||
}, Position{20, 3},
|
}, toml.Position{20, 3},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"ip": "10.0.0.2",
|
"ip": "10.0.0.2",
|
||||||
"dc": "eqdc10",
|
"dc": "eqdc10",
|
||||||
}, Position{24, 3},
|
}, toml.Position{24, 3},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
@@ -461,7 +460,7 @@ func TestQueryFilterFn(t *testing.T) {
|
|||||||
[]interface{}{"gamma", "delta"},
|
[]interface{}{"gamma", "delta"},
|
||||||
[]interface{}{int64(1), int64(2)},
|
[]interface{}{int64(1), int64(2)},
|
||||||
},
|
},
|
||||||
}, Position{28, 1},
|
}, toml.Position{28, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -469,7 +468,7 @@ func TestQueryFilterFn(t *testing.T) {
|
|||||||
"$..[?(time)]",
|
"$..[?(time)]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
tv, Position{9, 1},
|
tv, toml.Position{9, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -477,7 +476,7 @@ func TestQueryFilterFn(t *testing.T) {
|
|||||||
"$..[?(bool)]",
|
"$..[?(bool)]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
true, Position{15, 1},
|
true, toml.Position{15, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
+158
@@ -0,0 +1,158 @@
|
|||||||
|
package query
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NodeFilterFn represents a user-defined filter function, for use with
|
||||||
|
// Query.SetFilter().
|
||||||
|
//
|
||||||
|
// The return value of the function must indicate if 'node' is to be included
|
||||||
|
// at this stage of the TOML path. Returning true will include the node, and
|
||||||
|
// returning false will exclude it.
|
||||||
|
//
|
||||||
|
// NOTE: Care should be taken to write script callbacks such that they are safe
|
||||||
|
// to use from multiple goroutines.
|
||||||
|
type NodeFilterFn func(node interface{}) bool
|
||||||
|
|
||||||
|
// Result is the result of Executing a Query.
|
||||||
|
type Result struct {
|
||||||
|
items []interface{}
|
||||||
|
positions []toml.Position
|
||||||
|
}
|
||||||
|
|
||||||
|
// appends a value/position pair to the result set.
|
||||||
|
func (r *Result) appendResult(node interface{}, pos toml.Position) {
|
||||||
|
r.items = append(r.items, node)
|
||||||
|
r.positions = append(r.positions, pos)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Values is a set of values within a Result. The order of values is not
|
||||||
|
// guaranteed to be in document order, and may be different each time a query is
|
||||||
|
// executed.
|
||||||
|
func (r Result) Values() []interface{} {
|
||||||
|
return r.items
|
||||||
|
}
|
||||||
|
|
||||||
|
// Positions is a set of positions for values within a Result. Each index
|
||||||
|
// in Positions() corresponds to the entry in Value() of the same index.
|
||||||
|
func (r Result) Positions() []toml.Position {
|
||||||
|
return r.positions
|
||||||
|
}
|
||||||
|
|
||||||
|
// runtime context for executing query paths
|
||||||
|
type queryContext struct {
|
||||||
|
result *Result
|
||||||
|
filters *map[string]NodeFilterFn
|
||||||
|
lastPosition toml.Position
|
||||||
|
}
|
||||||
|
|
||||||
|
// generic path functor interface
|
||||||
|
type pathFn interface {
|
||||||
|
setNext(next pathFn)
|
||||||
|
// it is the caller's responsibility to set the ctx.lastPosition before invoking call()
|
||||||
|
// node can be one of: *toml.Tree, []*toml.Tree, or a scalar
|
||||||
|
call(node interface{}, ctx *queryContext)
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Query is the representation of a compiled TOML path. A Query is safe
|
||||||
|
// for concurrent use by multiple goroutines.
|
||||||
|
type Query struct {
|
||||||
|
root pathFn
|
||||||
|
tail pathFn
|
||||||
|
filters *map[string]NodeFilterFn
|
||||||
|
}
|
||||||
|
|
||||||
|
func newQuery() *Query {
|
||||||
|
return &Query{
|
||||||
|
root: nil,
|
||||||
|
tail: nil,
|
||||||
|
filters: &defaultFilterFunctions,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Query) appendPath(next pathFn) {
|
||||||
|
if q.root == nil {
|
||||||
|
q.root = next
|
||||||
|
} else {
|
||||||
|
q.tail.setNext(next)
|
||||||
|
}
|
||||||
|
q.tail = next
|
||||||
|
next.setNext(newTerminatingFn()) // init the next functor
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compile compiles a TOML path expression. The returned Query can be used
|
||||||
|
// to match elements within a Tree and its descendants. See Execute.
|
||||||
|
func Compile(path string) (*Query, error) {
|
||||||
|
return parseQuery(lexQuery(path))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute executes a query against a Tree, and returns the result of the query.
|
||||||
|
func (q *Query) Execute(tree *toml.Tree) *Result {
|
||||||
|
result := &Result{
|
||||||
|
items: []interface{}{},
|
||||||
|
positions: []toml.Position{},
|
||||||
|
}
|
||||||
|
if q.root == nil {
|
||||||
|
result.appendResult(tree, tree.GetPosition(""))
|
||||||
|
} else {
|
||||||
|
ctx := &queryContext{
|
||||||
|
result: result,
|
||||||
|
filters: q.filters,
|
||||||
|
}
|
||||||
|
ctx.lastPosition = tree.Position()
|
||||||
|
q.root.call(tree, ctx)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompileAndExecute is a shorthand for Compile(path) followed by Execute(tree).
|
||||||
|
func CompileAndExecute(path string, tree *toml.Tree) (*Result, error) {
|
||||||
|
query, err := Compile(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return query.Execute(tree), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetFilter sets a user-defined filter function. These may be used inside
|
||||||
|
// "?(..)" query expressions to filter TOML document elements within a query.
|
||||||
|
func (q *Query) SetFilter(name string, fn NodeFilterFn) {
|
||||||
|
if q.filters == &defaultFilterFunctions {
|
||||||
|
// clone the static table
|
||||||
|
q.filters = &map[string]NodeFilterFn{}
|
||||||
|
for k, v := range defaultFilterFunctions {
|
||||||
|
(*q.filters)[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(*q.filters)[name] = fn
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaultFilterFunctions = map[string]NodeFilterFn{
|
||||||
|
"tree": func(node interface{}) bool {
|
||||||
|
_, ok := node.(*toml.Tree)
|
||||||
|
return ok
|
||||||
|
},
|
||||||
|
"int": func(node interface{}) bool {
|
||||||
|
_, ok := node.(int64)
|
||||||
|
return ok
|
||||||
|
},
|
||||||
|
"float": func(node interface{}) bool {
|
||||||
|
_, ok := node.(float64)
|
||||||
|
return ok
|
||||||
|
},
|
||||||
|
"string": func(node interface{}) bool {
|
||||||
|
_, ok := node.(string)
|
||||||
|
return ok
|
||||||
|
},
|
||||||
|
"time": func(node interface{}) bool {
|
||||||
|
_, ok := node.(time.Time)
|
||||||
|
return ok
|
||||||
|
},
|
||||||
|
"bool": func(node interface{}) bool {
|
||||||
|
_, ok := node.(bool)
|
||||||
|
return ok
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -0,0 +1,157 @@
|
|||||||
|
package query
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func assertArrayContainsInAnyOrder(t *testing.T, array []interface{}, objects ...interface{}) {
|
||||||
|
if len(array) != len(objects) {
|
||||||
|
t.Fatalf("array contains %d objects but %d are expected", len(array), len(objects))
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, o := range objects {
|
||||||
|
found := false
|
||||||
|
for _, a := range array {
|
||||||
|
if a == o {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
t.Fatal(o, "not found in array", array)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQueryExample(t *testing.T) {
|
||||||
|
config, _ := toml.Load(`
|
||||||
|
[[book]]
|
||||||
|
title = "The Stand"
|
||||||
|
author = "Stephen King"
|
||||||
|
[[book]]
|
||||||
|
title = "For Whom the Bell Tolls"
|
||||||
|
author = "Ernest Hemmingway"
|
||||||
|
[[book]]
|
||||||
|
title = "Neuromancer"
|
||||||
|
author = "William Gibson"
|
||||||
|
`)
|
||||||
|
authors, err := CompileAndExecute("$.book.author", config)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("unexpected error:", err)
|
||||||
|
}
|
||||||
|
names := authors.Values()
|
||||||
|
if len(names) != 3 {
|
||||||
|
t.Fatalf("query should return 3 names but returned %d", len(names))
|
||||||
|
}
|
||||||
|
assertArrayContainsInAnyOrder(t, names, "Stephen King", "Ernest Hemmingway", "William Gibson")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQueryReadmeExample(t *testing.T) {
|
||||||
|
config, _ := toml.Load(`
|
||||||
|
[postgres]
|
||||||
|
user = "pelletier"
|
||||||
|
password = "mypassword"
|
||||||
|
`)
|
||||||
|
|
||||||
|
query, err := Compile("$..[user,password]")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("unexpected error:", err)
|
||||||
|
}
|
||||||
|
results := query.Execute(config)
|
||||||
|
values := results.Values()
|
||||||
|
if len(values) != 2 {
|
||||||
|
t.Fatalf("query should return 2 values but returned %d", len(values))
|
||||||
|
}
|
||||||
|
assertArrayContainsInAnyOrder(t, values, "pelletier", "mypassword")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQueryPathNotPresent(t *testing.T) {
|
||||||
|
config, _ := toml.Load(`a = "hello"`)
|
||||||
|
query, err := Compile("$.foo.bar")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("unexpected error:", err)
|
||||||
|
}
|
||||||
|
results := query.Execute(config)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("err should be nil. got %s instead", err)
|
||||||
|
}
|
||||||
|
if len(results.items) != 0 {
|
||||||
|
t.Fatalf("no items should be matched. %d matched instead", len(results.items))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExampleNodeFilterFn_filterExample() {
|
||||||
|
tree, _ := toml.Load(`
|
||||||
|
[struct_one]
|
||||||
|
foo = "foo"
|
||||||
|
bar = "bar"
|
||||||
|
|
||||||
|
[struct_two]
|
||||||
|
baz = "baz"
|
||||||
|
gorf = "gorf"
|
||||||
|
`)
|
||||||
|
|
||||||
|
// create a query that references a user-defined-filter
|
||||||
|
query, _ := Compile("$[?(bazOnly)]")
|
||||||
|
|
||||||
|
// define the filter, and assign it to the query
|
||||||
|
query.SetFilter("bazOnly", func(node interface{}) bool {
|
||||||
|
if tree, ok := node.(*toml.Tree); ok {
|
||||||
|
return tree.Has("baz")
|
||||||
|
}
|
||||||
|
return false // reject all other node types
|
||||||
|
})
|
||||||
|
|
||||||
|
// results contain only the 'struct_two' Tree
|
||||||
|
query.Execute(tree)
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExampleQuery_queryExample() {
|
||||||
|
config, _ := toml.Load(`
|
||||||
|
[[book]]
|
||||||
|
title = "The Stand"
|
||||||
|
author = "Stephen King"
|
||||||
|
[[book]]
|
||||||
|
title = "For Whom the Bell Tolls"
|
||||||
|
author = "Ernest Hemmingway"
|
||||||
|
[[book]]
|
||||||
|
title = "Neuromancer"
|
||||||
|
author = "William Gibson"
|
||||||
|
`)
|
||||||
|
|
||||||
|
// find and print all the authors in the document
|
||||||
|
query, _ := Compile("$.book.author")
|
||||||
|
authors := query.Execute(config)
|
||||||
|
for _, name := range authors.Values() {
|
||||||
|
fmt.Println(name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTomlQuery(t *testing.T) {
|
||||||
|
tree, err := toml.Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
query, err := Compile("$.foo.bar")
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
result := query.Execute(tree)
|
||||||
|
values := result.Values()
|
||||||
|
if len(values) != 1 {
|
||||||
|
t.Errorf("Expected resultset of 1, got %d instead: %v", len(values), values)
|
||||||
|
}
|
||||||
|
|
||||||
|
if tt, ok := values[0].(*toml.Tree); !ok {
|
||||||
|
t.Errorf("Expected type of Tree: %T", values[0])
|
||||||
|
} else if tt.Get("a") != int64(1) {
|
||||||
|
t.Errorf("Expected 'a' with a value 1: %v", tt.Get("a"))
|
||||||
|
} else if tt.Get("b") != int64(2) {
|
||||||
|
t.Errorf("Expected 'b' with a value 2: %v", tt.Get("b"))
|
||||||
|
}
|
||||||
|
}
|
||||||
+106
@@ -0,0 +1,106 @@
|
|||||||
|
package query
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
|
"strconv"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Define tokens
|
||||||
|
type tokenType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
eof = -(iota + 1)
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
tokenError tokenType = iota
|
||||||
|
tokenEOF
|
||||||
|
tokenKey
|
||||||
|
tokenString
|
||||||
|
tokenInteger
|
||||||
|
tokenFloat
|
||||||
|
tokenLeftBracket
|
||||||
|
tokenRightBracket
|
||||||
|
tokenLeftParen
|
||||||
|
tokenRightParen
|
||||||
|
tokenComma
|
||||||
|
tokenColon
|
||||||
|
tokenDollar
|
||||||
|
tokenStar
|
||||||
|
tokenQuestion
|
||||||
|
tokenDot
|
||||||
|
tokenDotDot
|
||||||
|
)
|
||||||
|
|
||||||
|
var tokenTypeNames = []string{
|
||||||
|
"Error",
|
||||||
|
"EOF",
|
||||||
|
"Key",
|
||||||
|
"String",
|
||||||
|
"Integer",
|
||||||
|
"Float",
|
||||||
|
"[",
|
||||||
|
"]",
|
||||||
|
"(",
|
||||||
|
")",
|
||||||
|
",",
|
||||||
|
":",
|
||||||
|
"$",
|
||||||
|
"*",
|
||||||
|
"?",
|
||||||
|
".",
|
||||||
|
"..",
|
||||||
|
}
|
||||||
|
|
||||||
|
type token struct {
|
||||||
|
toml.Position
|
||||||
|
typ tokenType
|
||||||
|
val string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tt tokenType) String() string {
|
||||||
|
idx := int(tt)
|
||||||
|
if idx < len(tokenTypeNames) {
|
||||||
|
return tokenTypeNames[idx]
|
||||||
|
}
|
||||||
|
return "Unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t token) Int() int {
|
||||||
|
if result, err := strconv.Atoi(t.val); err != nil {
|
||||||
|
panic(err)
|
||||||
|
} else {
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t token) String() string {
|
||||||
|
switch t.typ {
|
||||||
|
case tokenEOF:
|
||||||
|
return "EOF"
|
||||||
|
case tokenError:
|
||||||
|
return t.val
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("%q", t.val)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSpace(r rune) bool {
|
||||||
|
return r == ' ' || r == '\t'
|
||||||
|
}
|
||||||
|
|
||||||
|
func isAlphanumeric(r rune) bool {
|
||||||
|
return unicode.IsLetter(r) || r == '_'
|
||||||
|
}
|
||||||
|
|
||||||
|
func isDigit(r rune) bool {
|
||||||
|
return unicode.IsNumber(r)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isHexDigit(r rune) bool {
|
||||||
|
return isDigit(r) ||
|
||||||
|
(r >= 'a' && r <= 'f') ||
|
||||||
|
(r >= 'A' && r <= 'F')
|
||||||
|
}
|
||||||
@@ -1,97 +0,0 @@
|
|||||||
package toml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func testQLFlow(t *testing.T, input string, expectedFlow []token) {
|
|
||||||
ch := lexQuery(input)
|
|
||||||
for idx, expected := range expectedFlow {
|
|
||||||
token := <-ch
|
|
||||||
if token != expected {
|
|
||||||
t.Log("While testing #", idx, ":", input)
|
|
||||||
t.Log("compared", token, "to", expected)
|
|
||||||
t.Log(token.val, "<->", expected.val)
|
|
||||||
t.Log(token.typ, "<->", expected.typ)
|
|
||||||
t.Log(token.Line, "<->", expected.Line)
|
|
||||||
t.Log(token.Col, "<->", expected.Col)
|
|
||||||
t.FailNow()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tok, ok := <-ch
|
|
||||||
if ok {
|
|
||||||
t.Log("channel is not closed!")
|
|
||||||
t.Log(len(ch)+1, "tokens remaining:")
|
|
||||||
|
|
||||||
t.Log("token ->", tok)
|
|
||||||
for token := range ch {
|
|
||||||
t.Log("token ->", token)
|
|
||||||
}
|
|
||||||
t.FailNow()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLexSpecialChars(t *testing.T) {
|
|
||||||
testQLFlow(t, " .$[]..()?*", []token{
|
|
||||||
token{Position{1, 2}, tokenDot, "."},
|
|
||||||
token{Position{1, 3}, tokenDollar, "$"},
|
|
||||||
token{Position{1, 4}, tokenLeftBracket, "["},
|
|
||||||
token{Position{1, 5}, tokenRightBracket, "]"},
|
|
||||||
token{Position{1, 6}, tokenDotDot, ".."},
|
|
||||||
token{Position{1, 8}, tokenLeftParen, "("},
|
|
||||||
token{Position{1, 9}, tokenRightParen, ")"},
|
|
||||||
token{Position{1, 10}, tokenQuestion, "?"},
|
|
||||||
token{Position{1, 11}, tokenStar, "*"},
|
|
||||||
token{Position{1, 12}, tokenEOF, ""},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLexString(t *testing.T) {
|
|
||||||
testQLFlow(t, "'foo'", []token{
|
|
||||||
token{Position{1, 2}, tokenString, "foo"},
|
|
||||||
token{Position{1, 6}, tokenEOF, ""},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLexDoubleString(t *testing.T) {
|
|
||||||
testQLFlow(t, `"bar"`, []token{
|
|
||||||
token{Position{1, 2}, tokenString, "bar"},
|
|
||||||
token{Position{1, 6}, tokenEOF, ""},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLexKey(t *testing.T) {
|
|
||||||
testQLFlow(t, "foo", []token{
|
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
|
||||||
token{Position{1, 4}, tokenEOF, ""},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLexRecurse(t *testing.T) {
|
|
||||||
testQLFlow(t, "$..*", []token{
|
|
||||||
token{Position{1, 1}, tokenDollar, "$"},
|
|
||||||
token{Position{1, 2}, tokenDotDot, ".."},
|
|
||||||
token{Position{1, 4}, tokenStar, "*"},
|
|
||||||
token{Position{1, 5}, tokenEOF, ""},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLexBracketKey(t *testing.T) {
|
|
||||||
testQLFlow(t, "$[foo]", []token{
|
|
||||||
token{Position{1, 1}, tokenDollar, "$"},
|
|
||||||
token{Position{1, 2}, tokenLeftBracket, "["},
|
|
||||||
token{Position{1, 3}, tokenKey, "foo"},
|
|
||||||
token{Position{1, 6}, tokenRightBracket, "]"},
|
|
||||||
token{Position{1, 7}, tokenEOF, ""},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLexSpace(t *testing.T) {
|
|
||||||
testQLFlow(t, "foo bar baz", []token{
|
|
||||||
token{Position{1, 1}, tokenKey, "foo"},
|
|
||||||
token{Position{1, 5}, tokenKey, "bar"},
|
|
||||||
token{Position{1, 9}, tokenKey, "baz"},
|
|
||||||
token{Position{1, 12}, tokenEOF, ""},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# fail out of the script if anything here fails
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# set the path to the present working directory
|
|
||||||
export GOPATH=`pwd`
|
|
||||||
|
|
||||||
# Vendorize the BurntSushi test suite
|
|
||||||
# NOTE: this gets a specific release to avoid versioning issues
|
|
||||||
if [ ! -d 'src/github.com/BurntSushi/toml-test' ]; then
|
|
||||||
mkdir -p src/github.com/BurntSushi
|
|
||||||
git clone https://github.com/BurntSushi/toml-test.git src/github.com/BurntSushi/toml-test
|
|
||||||
fi
|
|
||||||
pushd src/github.com/BurntSushi/toml-test
|
|
||||||
git reset --hard '0.2.0' # use the released version, NOT tip
|
|
||||||
popd
|
|
||||||
go build -o toml-test github.com/BurntSushi/toml-test
|
|
||||||
|
|
||||||
# vendorize the current lib for testing
|
|
||||||
# NOTE: this basically mocks an install without having to go back out to github for code
|
|
||||||
mkdir -p src/github.com/pelletier/go-toml/cmd
|
|
||||||
cp *.go *.toml src/github.com/pelletier/go-toml
|
|
||||||
cp cmd/*.go src/github.com/pelletier/go-toml/cmd
|
|
||||||
go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go
|
|
||||||
|
|
||||||
# Run basic unit tests and then the BurntSushi test suite
|
|
||||||
go test -v github.com/pelletier/go-toml
|
|
||||||
./toml-test ./test_program_bin | tee test_out
|
|
||||||
@@ -23,9 +23,13 @@ const (
|
|||||||
tokenTrue
|
tokenTrue
|
||||||
tokenFalse
|
tokenFalse
|
||||||
tokenFloat
|
tokenFloat
|
||||||
|
tokenInf
|
||||||
|
tokenNan
|
||||||
tokenEqual
|
tokenEqual
|
||||||
tokenLeftBracket
|
tokenLeftBracket
|
||||||
tokenRightBracket
|
tokenRightBracket
|
||||||
|
tokenLeftCurlyBrace
|
||||||
|
tokenRightCurlyBrace
|
||||||
tokenLeftParen
|
tokenLeftParen
|
||||||
tokenRightParen
|
tokenRightParen
|
||||||
tokenDoubleLeftBracket
|
tokenDoubleLeftBracket
|
||||||
@@ -44,6 +48,7 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var tokenTypeNames = []string{
|
var tokenTypeNames = []string{
|
||||||
|
"Error",
|
||||||
"EOF",
|
"EOF",
|
||||||
"Comment",
|
"Comment",
|
||||||
"Key",
|
"Key",
|
||||||
@@ -52,9 +57,13 @@ var tokenTypeNames = []string{
|
|||||||
"True",
|
"True",
|
||||||
"False",
|
"False",
|
||||||
"Float",
|
"Float",
|
||||||
|
"Inf",
|
||||||
|
"NaN",
|
||||||
"=",
|
"=",
|
||||||
"[",
|
"[",
|
||||||
"[",
|
"]",
|
||||||
|
"{",
|
||||||
|
"}",
|
||||||
"(",
|
"(",
|
||||||
")",
|
")",
|
||||||
"]]",
|
"]]",
|
||||||
@@ -102,9 +111,6 @@ func (t token) String() string {
|
|||||||
return t.val
|
return t.val
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(t.val) > 10 {
|
|
||||||
return fmt.Sprintf("%.10q...", t.val)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%q", t.val)
|
return fmt.Sprintf("%q", t.val)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -117,9 +123,14 @@ func isAlphanumeric(r rune) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func isKeyChar(r rune) bool {
|
func isKeyChar(r rune) bool {
|
||||||
// "Keys start with the first non-whitespace character and end with the last
|
// Keys start with the first character that isn't whitespace or [ and end
|
||||||
// non-whitespace character before the equals sign."
|
// with the last non-whitespace character before the equals sign. Keys
|
||||||
return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '=')
|
// cannot contain a # character."
|
||||||
|
return !(r == '\r' || r == '\n' || r == eof || r == '=')
|
||||||
|
}
|
||||||
|
|
||||||
|
func isKeyStartChar(r rune) bool {
|
||||||
|
return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '[')
|
||||||
}
|
}
|
||||||
|
|
||||||
func isDigit(r rune) bool {
|
func isDigit(r rune) bool {
|
||||||
@@ -128,5 +139,6 @@ func isDigit(r rune) bool {
|
|||||||
|
|
||||||
func isHexDigit(r rune) bool {
|
func isHexDigit(r rune) bool {
|
||||||
return isDigit(r) ||
|
return isDigit(r) ||
|
||||||
r == 'A' || r == 'B' || r == 'C' || r == 'D' || r == 'E' || r == 'F'
|
(r >= 'a' && r <= 'f') ||
|
||||||
|
(r >= 'A' && r <= 'F')
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,67 @@
|
|||||||
|
package toml
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
|
||||||
|
func TestTokenStringer(t *testing.T) {
|
||||||
|
var tests = []struct {
|
||||||
|
tt tokenType
|
||||||
|
expect string
|
||||||
|
}{
|
||||||
|
{tokenError, "Error"},
|
||||||
|
{tokenEOF, "EOF"},
|
||||||
|
{tokenComment, "Comment"},
|
||||||
|
{tokenKey, "Key"},
|
||||||
|
{tokenString, "String"},
|
||||||
|
{tokenInteger, "Integer"},
|
||||||
|
{tokenTrue, "True"},
|
||||||
|
{tokenFalse, "False"},
|
||||||
|
{tokenFloat, "Float"},
|
||||||
|
{tokenEqual, "="},
|
||||||
|
{tokenLeftBracket, "["},
|
||||||
|
{tokenRightBracket, "]"},
|
||||||
|
{tokenLeftCurlyBrace, "{"},
|
||||||
|
{tokenRightCurlyBrace, "}"},
|
||||||
|
{tokenLeftParen, "("},
|
||||||
|
{tokenRightParen, ")"},
|
||||||
|
{tokenDoubleLeftBracket, "]]"},
|
||||||
|
{tokenDoubleRightBracket, "[["},
|
||||||
|
{tokenDate, "Date"},
|
||||||
|
{tokenKeyGroup, "KeyGroup"},
|
||||||
|
{tokenKeyGroupArray, "KeyGroupArray"},
|
||||||
|
{tokenComma, ","},
|
||||||
|
{tokenColon, ":"},
|
||||||
|
{tokenDollar, "$"},
|
||||||
|
{tokenStar, "*"},
|
||||||
|
{tokenQuestion, "?"},
|
||||||
|
{tokenDot, "."},
|
||||||
|
{tokenDotDot, ".."},
|
||||||
|
{tokenEOL, "EOL"},
|
||||||
|
{tokenEOL + 1, "Unknown"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, test := range tests {
|
||||||
|
got := test.tt.String()
|
||||||
|
if got != test.expect {
|
||||||
|
t.Errorf("[%d] invalid string of token type; got %q, expected %q", i, got, test.expect)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTokenString(t *testing.T) {
|
||||||
|
var tests = []struct {
|
||||||
|
tok token
|
||||||
|
expect string
|
||||||
|
}{
|
||||||
|
{token{Position{1, 1}, tokenEOF, ""}, "EOF"},
|
||||||
|
{token{Position{1, 1}, tokenError, "Δt"}, "Δt"},
|
||||||
|
{token{Position{1, 1}, tokenString, "bar"}, `"bar"`},
|
||||||
|
{token{Position{1, 1}, tokenString, "123456789012345"}, `"123456789012345"`},
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, test := range tests {
|
||||||
|
got := test.tok.String()
|
||||||
|
if got != test.expect {
|
||||||
|
t.Errorf("[%d] invalid of string token; got %q, expected %q", i, got, test.expect)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,33 +3,56 @@ package toml
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type tomlValue struct {
|
type tomlValue struct {
|
||||||
value interface{}
|
value interface{} // string, int64, uint64, float64, bool, time.Time, [] of any of this list
|
||||||
position Position
|
comment string
|
||||||
|
commented bool
|
||||||
|
multiline bool
|
||||||
|
position Position
|
||||||
}
|
}
|
||||||
|
|
||||||
// TomlTree is the result of the parsing of a TOML file.
|
// Tree is the result of the parsing of a TOML file.
|
||||||
type TomlTree struct {
|
type Tree struct {
|
||||||
values map[string]interface{}
|
values map[string]interface{} // string -> *tomlValue, *Tree, []*Tree
|
||||||
position Position
|
comment string
|
||||||
|
commented bool
|
||||||
|
position Position
|
||||||
}
|
}
|
||||||
|
|
||||||
func newTomlTree() *TomlTree {
|
func newTree() *Tree {
|
||||||
return &TomlTree{
|
return newTreeWithPosition(Position{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTreeWithPosition(pos Position) *Tree {
|
||||||
|
return &Tree{
|
||||||
values: make(map[string]interface{}),
|
values: make(map[string]interface{}),
|
||||||
position: Position{},
|
position: pos,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TreeFromMap initializes a new Tree object using the given map.
|
||||||
|
func TreeFromMap(m map[string]interface{}) (*Tree, error) {
|
||||||
|
result, err := toTree(m)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return result.(*Tree), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Position returns the position of the tree.
|
||||||
|
func (t *Tree) Position() Position {
|
||||||
|
return t.position
|
||||||
|
}
|
||||||
|
|
||||||
// Has returns a boolean indicating if the given key exists.
|
// Has returns a boolean indicating if the given key exists.
|
||||||
func (t *TomlTree) Has(key string) bool {
|
func (t *Tree) Has(key string) bool {
|
||||||
if key == "" {
|
if key == "" {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@@ -37,25 +60,27 @@ func (t *TomlTree) Has(key string) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// HasPath returns true if the given path of keys exists, false otherwise.
|
// HasPath returns true if the given path of keys exists, false otherwise.
|
||||||
func (t *TomlTree) HasPath(keys []string) bool {
|
func (t *Tree) HasPath(keys []string) bool {
|
||||||
return t.GetPath(keys) != nil
|
return t.GetPath(keys) != nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Keys returns the keys of the toplevel tree.
|
// Keys returns the keys of the toplevel tree (does not recurse).
|
||||||
// Warning: this is a costly operation.
|
func (t *Tree) Keys() []string {
|
||||||
func (t *TomlTree) Keys() []string {
|
keys := make([]string, len(t.values))
|
||||||
var keys []string
|
i := 0
|
||||||
for k := range t.values {
|
for k := range t.values {
|
||||||
keys = append(keys, k)
|
keys[i] = k
|
||||||
|
i++
|
||||||
}
|
}
|
||||||
return keys
|
return keys
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the value at key in the TomlTree.
|
// Get the value at key in the Tree.
|
||||||
// Key is a dot-separated path (e.g. a.b.c).
|
// Key is a dot-separated path (e.g. a.b.c) without single/double quoted strings.
|
||||||
|
// If you need to retrieve non-bare keys, use GetPath.
|
||||||
// Returns nil if the path does not exist in the tree.
|
// Returns nil if the path does not exist in the tree.
|
||||||
// If keys is of length zero, the current tree is returned.
|
// If keys is of length zero, the current tree is returned.
|
||||||
func (t *TomlTree) Get(key string) interface{} {
|
func (t *Tree) Get(key string) interface{} {
|
||||||
if key == "" {
|
if key == "" {
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
@@ -64,7 +89,7 @@ func (t *TomlTree) Get(key string) interface{} {
|
|||||||
|
|
||||||
// GetPath returns the element in the tree indicated by 'keys'.
|
// GetPath returns the element in the tree indicated by 'keys'.
|
||||||
// If keys is of length zero, the current tree is returned.
|
// If keys is of length zero, the current tree is returned.
|
||||||
func (t *TomlTree) GetPath(keys []string) interface{} {
|
func (t *Tree) GetPath(keys []string) interface{} {
|
||||||
if len(keys) == 0 {
|
if len(keys) == 0 {
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
@@ -75,16 +100,16 @@ func (t *TomlTree) GetPath(keys []string) interface{} {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
switch node := value.(type) {
|
switch node := value.(type) {
|
||||||
case *TomlTree:
|
case *Tree:
|
||||||
subtree = node
|
subtree = node
|
||||||
case []*TomlTree:
|
case []*Tree:
|
||||||
// go to most recent element
|
// go to most recent element
|
||||||
if len(node) == 0 {
|
if len(node) == 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
subtree = node[len(node)-1]
|
subtree = node[len(node)-1]
|
||||||
default:
|
default:
|
||||||
return nil // cannot naigate through other node types
|
return nil // cannot navigate through other node types
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// branch based on final node type
|
// branch based on final node type
|
||||||
@@ -97,7 +122,7 @@ func (t *TomlTree) GetPath(keys []string) interface{} {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetPosition returns the position of the given key.
|
// GetPosition returns the position of the given key.
|
||||||
func (t *TomlTree) GetPosition(key string) Position {
|
func (t *Tree) GetPosition(key string) Position {
|
||||||
if key == "" {
|
if key == "" {
|
||||||
return t.position
|
return t.position
|
||||||
}
|
}
|
||||||
@@ -106,7 +131,7 @@ func (t *TomlTree) GetPosition(key string) Position {
|
|||||||
|
|
||||||
// GetPositionPath returns the element in the tree indicated by 'keys'.
|
// GetPositionPath returns the element in the tree indicated by 'keys'.
|
||||||
// If keys is of length zero, the current tree is returned.
|
// If keys is of length zero, the current tree is returned.
|
||||||
func (t *TomlTree) GetPositionPath(keys []string) Position {
|
func (t *Tree) GetPositionPath(keys []string) Position {
|
||||||
if len(keys) == 0 {
|
if len(keys) == 0 {
|
||||||
return t.position
|
return t.position
|
||||||
}
|
}
|
||||||
@@ -117,9 +142,9 @@ func (t *TomlTree) GetPositionPath(keys []string) Position {
|
|||||||
return Position{0, 0}
|
return Position{0, 0}
|
||||||
}
|
}
|
||||||
switch node := value.(type) {
|
switch node := value.(type) {
|
||||||
case *TomlTree:
|
case *Tree:
|
||||||
subtree = node
|
subtree = node
|
||||||
case []*TomlTree:
|
case []*Tree:
|
||||||
// go to most recent element
|
// go to most recent element
|
||||||
if len(node) == 0 {
|
if len(node) == 0 {
|
||||||
return Position{0, 0}
|
return Position{0, 0}
|
||||||
@@ -133,9 +158,9 @@ func (t *TomlTree) GetPositionPath(keys []string) Position {
|
|||||||
switch node := subtree.values[keys[len(keys)-1]].(type) {
|
switch node := subtree.values[keys[len(keys)-1]].(type) {
|
||||||
case *tomlValue:
|
case *tomlValue:
|
||||||
return node.position
|
return node.position
|
||||||
case *TomlTree:
|
case *Tree:
|
||||||
return node.position
|
return node.position
|
||||||
case []*TomlTree:
|
case []*Tree:
|
||||||
// go to most recent element
|
// go to most recent element
|
||||||
if len(node) == 0 {
|
if len(node) == 0 {
|
||||||
return Position{0, 0}
|
return Position{0, 0}
|
||||||
@@ -147,7 +172,7 @@ func (t *TomlTree) GetPositionPath(keys []string) Position {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetDefault works like Get but with a default value
|
// GetDefault works like Get but with a default value
|
||||||
func (t *TomlTree) GetDefault(key string, def interface{}) interface{} {
|
func (t *Tree) GetDefault(key string, def interface{}) interface{} {
|
||||||
val := t.Get(key)
|
val := t.Get(key)
|
||||||
if val == nil {
|
if val == nil {
|
||||||
return def
|
return def
|
||||||
@@ -155,37 +180,117 @@ func (t *TomlTree) GetDefault(key string, def interface{}) interface{} {
|
|||||||
return val
|
return val
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set an element in the tree.
|
// SetOptions arguments are supplied to the SetWithOptions and SetPathWithOptions functions to modify marshalling behaviour.
|
||||||
// Key is a dot-separated path (e.g. a.b.c).
|
// The default values within the struct are valid default options.
|
||||||
// Creates all necessary intermediates trees, if needed.
|
type SetOptions struct {
|
||||||
func (t *TomlTree) Set(key string, value interface{}) {
|
Comment string
|
||||||
t.SetPath(strings.Split(key, "."), value)
|
Commented bool
|
||||||
|
Multiline bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetPath sets an element in the tree.
|
// SetWithOptions is the same as Set, but allows you to provide formatting
|
||||||
// Keys is an array of path elements (e.g. {"a","b","c"}).
|
// instructions to the key, that will be used by Marshal().
|
||||||
// Creates all necessary intermediates trees, if needed.
|
func (t *Tree) SetWithOptions(key string, opts SetOptions, value interface{}) {
|
||||||
func (t *TomlTree) SetPath(keys []string, value interface{}) {
|
t.SetPathWithOptions(strings.Split(key, "."), opts, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetPathWithOptions is the same as SetPath, but allows you to provide
|
||||||
|
// formatting instructions to the key, that will be reused by Marshal().
|
||||||
|
func (t *Tree) SetPathWithOptions(keys []string, opts SetOptions, value interface{}) {
|
||||||
subtree := t
|
subtree := t
|
||||||
for _, intermediateKey := range keys[:len(keys)-1] {
|
for i, intermediateKey := range keys[:len(keys)-1] {
|
||||||
nextTree, exists := subtree.values[intermediateKey]
|
nextTree, exists := subtree.values[intermediateKey]
|
||||||
if !exists {
|
if !exists {
|
||||||
nextTree = newTomlTree()
|
nextTree = newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col})
|
||||||
subtree.values[intermediateKey] = &nextTree // add new element here
|
subtree.values[intermediateKey] = nextTree // add new element here
|
||||||
}
|
}
|
||||||
switch node := nextTree.(type) {
|
switch node := nextTree.(type) {
|
||||||
case *TomlTree:
|
case *Tree:
|
||||||
subtree = node
|
subtree = node
|
||||||
case []*TomlTree:
|
case []*Tree:
|
||||||
// go to most recent element
|
// go to most recent element
|
||||||
if len(node) == 0 {
|
if len(node) == 0 {
|
||||||
// create element if it does not exist
|
// create element if it does not exist
|
||||||
subtree.values[intermediateKey] = append(node, newTomlTree())
|
subtree.values[intermediateKey] = append(node, newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col}))
|
||||||
}
|
}
|
||||||
subtree = node[len(node)-1]
|
subtree = node[len(node)-1]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
subtree.values[keys[len(keys)-1]] = value
|
|
||||||
|
var toInsert interface{}
|
||||||
|
|
||||||
|
switch v := value.(type) {
|
||||||
|
case *Tree:
|
||||||
|
v.comment = opts.Comment
|
||||||
|
toInsert = value
|
||||||
|
case []*Tree:
|
||||||
|
toInsert = value
|
||||||
|
case *tomlValue:
|
||||||
|
v.comment = opts.Comment
|
||||||
|
toInsert = v
|
||||||
|
default:
|
||||||
|
toInsert = &tomlValue{value: value,
|
||||||
|
comment: opts.Comment,
|
||||||
|
commented: opts.Commented,
|
||||||
|
multiline: opts.Multiline,
|
||||||
|
position: Position{Line: subtree.position.Line + len(subtree.values) + 1, Col: subtree.position.Col}}
|
||||||
|
}
|
||||||
|
|
||||||
|
subtree.values[keys[len(keys)-1]] = toInsert
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set an element in the tree.
|
||||||
|
// Key is a dot-separated path (e.g. a.b.c).
|
||||||
|
// Creates all necessary intermediate trees, if needed.
|
||||||
|
func (t *Tree) Set(key string, value interface{}) {
|
||||||
|
t.SetWithComment(key, "", false, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetWithComment is the same as Set, but allows you to provide comment
|
||||||
|
// information to the key, that will be reused by Marshal().
|
||||||
|
func (t *Tree) SetWithComment(key string, comment string, commented bool, value interface{}) {
|
||||||
|
t.SetPathWithComment(strings.Split(key, "."), comment, commented, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetPath sets an element in the tree.
|
||||||
|
// Keys is an array of path elements (e.g. {"a","b","c"}).
|
||||||
|
// Creates all necessary intermediate trees, if needed.
|
||||||
|
func (t *Tree) SetPath(keys []string, value interface{}) {
|
||||||
|
t.SetPathWithComment(keys, "", false, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetPathWithComment is the same as SetPath, but allows you to provide comment
|
||||||
|
// information to the key, that will be reused by Marshal().
|
||||||
|
func (t *Tree) SetPathWithComment(keys []string, comment string, commented bool, value interface{}) {
|
||||||
|
t.SetPathWithOptions(keys, SetOptions{Comment: comment, Commented: commented}, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete removes a key from the tree.
|
||||||
|
// Key is a dot-separated path (e.g. a.b.c).
|
||||||
|
func (t *Tree) Delete(key string) error {
|
||||||
|
keys, err := parseKey(key)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return t.DeletePath(keys)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeletePath removes a key from the tree.
|
||||||
|
// Keys is an array of path elements (e.g. {"a","b","c"}).
|
||||||
|
func (t *Tree) DeletePath(keys []string) error {
|
||||||
|
keyLen := len(keys)
|
||||||
|
if keyLen == 1 {
|
||||||
|
delete(t.values, keys[0])
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
tree := t.GetPath(keys[:keyLen-1])
|
||||||
|
item := keys[keyLen-1]
|
||||||
|
switch node := tree.(type) {
|
||||||
|
case *Tree:
|
||||||
|
delete(node.values, item)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return errors.New("no such key to delete")
|
||||||
}
|
}
|
||||||
|
|
||||||
// createSubTree takes a tree and a key and create the necessary intermediate
|
// createSubTree takes a tree and a key and create the necessary intermediate
|
||||||
@@ -195,142 +300,32 @@ func (t *TomlTree) SetPath(keys []string, value interface{}) {
|
|||||||
// and tree[a][b][c]
|
// and tree[a][b][c]
|
||||||
//
|
//
|
||||||
// Returns nil on success, error object on failure
|
// Returns nil on success, error object on failure
|
||||||
func (t *TomlTree) createSubTree(keys []string, pos Position) error {
|
func (t *Tree) createSubTree(keys []string, pos Position) error {
|
||||||
subtree := t
|
subtree := t
|
||||||
for _, intermediateKey := range keys {
|
for i, intermediateKey := range keys {
|
||||||
if intermediateKey == "" {
|
|
||||||
return fmt.Errorf("empty intermediate table")
|
|
||||||
}
|
|
||||||
nextTree, exists := subtree.values[intermediateKey]
|
nextTree, exists := subtree.values[intermediateKey]
|
||||||
if !exists {
|
if !exists {
|
||||||
tree := newTomlTree()
|
tree := newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col})
|
||||||
tree.position = pos
|
tree.position = pos
|
||||||
subtree.values[intermediateKey] = tree
|
subtree.values[intermediateKey] = tree
|
||||||
nextTree = tree
|
nextTree = tree
|
||||||
}
|
}
|
||||||
|
|
||||||
switch node := nextTree.(type) {
|
switch node := nextTree.(type) {
|
||||||
case []*TomlTree:
|
case []*Tree:
|
||||||
subtree = node[len(node)-1]
|
subtree = node[len(node)-1]
|
||||||
case *TomlTree:
|
case *Tree:
|
||||||
subtree = node
|
subtree = node
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("unknown type for path %s (%s)",
|
return fmt.Errorf("unknown type for path %s (%s): %T (%#v)",
|
||||||
strings.Join(keys, "."), intermediateKey)
|
strings.Join(keys, "."), intermediateKey, nextTree, nextTree)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// encodes a string to a TOML-compliant string value
|
// LoadBytes creates a Tree from a []byte.
|
||||||
func encodeTomlString(value string) string {
|
func LoadBytes(b []byte) (tree *Tree, err error) {
|
||||||
result := ""
|
|
||||||
for _, rr := range value {
|
|
||||||
intRr := uint16(rr)
|
|
||||||
switch rr {
|
|
||||||
case '\b':
|
|
||||||
result += "\\b"
|
|
||||||
case '\t':
|
|
||||||
result += "\\t"
|
|
||||||
case '\n':
|
|
||||||
result += "\\n"
|
|
||||||
case '\f':
|
|
||||||
result += "\\f"
|
|
||||||
case '\r':
|
|
||||||
result += "\\r"
|
|
||||||
case '"':
|
|
||||||
result += "\\\""
|
|
||||||
case '\\':
|
|
||||||
result += "\\\\"
|
|
||||||
default:
|
|
||||||
if intRr < 0x001F {
|
|
||||||
result += fmt.Sprintf("\\u%0.4X", intRr)
|
|
||||||
} else {
|
|
||||||
result += string(rr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// Value print support function for ToString()
|
|
||||||
// Outputs the TOML compliant string representation of a value
|
|
||||||
func toTomlValue(item interface{}, indent int) string {
|
|
||||||
tab := strings.Repeat(" ", indent)
|
|
||||||
switch value := item.(type) {
|
|
||||||
case int64:
|
|
||||||
return tab + strconv.FormatInt(value, 10)
|
|
||||||
case float64:
|
|
||||||
return tab + strconv.FormatFloat(value, 'f', -1, 64)
|
|
||||||
case string:
|
|
||||||
return tab + "\"" + encodeTomlString(value) + "\""
|
|
||||||
case bool:
|
|
||||||
if value {
|
|
||||||
return "true"
|
|
||||||
}
|
|
||||||
return "false"
|
|
||||||
case time.Time:
|
|
||||||
return tab + value.Format(time.RFC3339)
|
|
||||||
case []interface{}:
|
|
||||||
result := tab + "[\n"
|
|
||||||
for _, item := range value {
|
|
||||||
result += toTomlValue(item, indent+2) + ",\n"
|
|
||||||
}
|
|
||||||
return result + tab + "]"
|
|
||||||
default:
|
|
||||||
panic(fmt.Sprintf("unsupported value type: %v", value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Recursive support function for ToString()
|
|
||||||
// Outputs a tree, using the provided keyspace to prefix group names
|
|
||||||
func (t *TomlTree) toToml(indent, keyspace string) string {
|
|
||||||
result := ""
|
|
||||||
for k, v := range t.values {
|
|
||||||
// figure out the keyspace
|
|
||||||
combinedKey := k
|
|
||||||
if keyspace != "" {
|
|
||||||
combinedKey = keyspace + "." + combinedKey
|
|
||||||
}
|
|
||||||
// output based on type
|
|
||||||
switch node := v.(type) {
|
|
||||||
case []*TomlTree:
|
|
||||||
for _, item := range node {
|
|
||||||
if len(item.Keys()) > 0 {
|
|
||||||
result += fmt.Sprintf("\n%s[[%s]]\n", indent, combinedKey)
|
|
||||||
}
|
|
||||||
result += item.toToml(indent+" ", combinedKey)
|
|
||||||
}
|
|
||||||
case *TomlTree:
|
|
||||||
if len(node.Keys()) > 0 {
|
|
||||||
result += fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
|
|
||||||
}
|
|
||||||
result += node.toToml(indent+" ", combinedKey)
|
|
||||||
case *tomlValue:
|
|
||||||
result += fmt.Sprintf("%s%s = %s\n", indent, k, toTomlValue(node.value, 0))
|
|
||||||
default:
|
|
||||||
panic(fmt.Sprintf("unsupported node type: %v", node))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *TomlTree) Query(query string) (*QueryResult, error) {
|
|
||||||
if q, err := CompileQuery(query); err != nil {
|
|
||||||
return nil, err
|
|
||||||
} else {
|
|
||||||
return q.Execute(t), nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToString generates a human-readable representation of the current tree.
|
|
||||||
// Output spans multiple lines, and is suitable for ingest by a TOML parser
|
|
||||||
func (t *TomlTree) ToString() string {
|
|
||||||
return t.toToml("", "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load creates a TomlTree from a string.
|
|
||||||
func Load(content string) (tree *TomlTree, err error) {
|
|
||||||
defer func() {
|
defer func() {
|
||||||
if r := recover(); r != nil {
|
if r := recover(); r != nil {
|
||||||
if _, ok := r.(runtime.Error); ok {
|
if _, ok := r.(runtime.Error); ok {
|
||||||
@@ -339,18 +334,60 @@ func Load(content string) (tree *TomlTree, err error) {
|
|||||||
err = errors.New(r.(string))
|
err = errors.New(r.(string))
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
tree = parseToml(lexToml(content))
|
|
||||||
|
if len(b) >= 4 && (hasUTF32BigEndianBOM4(b) || hasUTF32LittleEndianBOM4(b)) {
|
||||||
|
b = b[4:]
|
||||||
|
} else if len(b) >= 3 && hasUTF8BOM3(b) {
|
||||||
|
b = b[3:]
|
||||||
|
} else if len(b) >= 2 && (hasUTF16BigEndianBOM2(b) || hasUTF16LittleEndianBOM2(b)) {
|
||||||
|
b = b[2:]
|
||||||
|
}
|
||||||
|
|
||||||
|
tree = parseToml(lexToml(b))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// LoadFile creates a TomlTree from a file.
|
func hasUTF16BigEndianBOM2(b []byte) bool {
|
||||||
func LoadFile(path string) (tree *TomlTree, err error) {
|
return b[0] == 0xFE && b[1] == 0xFF
|
||||||
buff, ferr := ioutil.ReadFile(path)
|
}
|
||||||
if ferr != nil {
|
|
||||||
err = ferr
|
func hasUTF16LittleEndianBOM2(b []byte) bool {
|
||||||
} else {
|
return b[0] == 0xFF && b[1] == 0xFE
|
||||||
s := string(buff)
|
}
|
||||||
tree, err = Load(s)
|
|
||||||
|
func hasUTF8BOM3(b []byte) bool {
|
||||||
|
return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasUTF32BigEndianBOM4(b []byte) bool {
|
||||||
|
return b[0] == 0x00 && b[1] == 0x00 && b[2] == 0xFE && b[3] == 0xFF
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasUTF32LittleEndianBOM4(b []byte) bool {
|
||||||
|
return b[0] == 0xFF && b[1] == 0xFE && b[2] == 0x00 && b[3] == 0x00
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadReader creates a Tree from any io.Reader.
|
||||||
|
func LoadReader(reader io.Reader) (tree *Tree, err error) {
|
||||||
|
inputBytes, err := ioutil.ReadAll(reader)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
tree, err = LoadBytes(inputBytes)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Load creates a Tree from a string.
|
||||||
|
func Load(content string) (tree *Tree, err error) {
|
||||||
|
return LoadBytes([]byte(content))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadFile creates a Tree from a file.
|
||||||
|
func LoadFile(path string) (tree *Tree, err error) {
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
return LoadReader(file)
|
||||||
|
}
|
||||||
|
|||||||
+128
-22
@@ -15,6 +15,47 @@ func TestTomlHas(t *testing.T) {
|
|||||||
if !tree.Has("test.key") {
|
if !tree.Has("test.key") {
|
||||||
t.Errorf("Has - expected test.key to exists")
|
t.Errorf("Has - expected test.key to exists")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if tree.Has("") {
|
||||||
|
t.Errorf("Should return false if the key is not provided")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTomlGet(t *testing.T) {
|
||||||
|
tree, _ := Load(`
|
||||||
|
[test]
|
||||||
|
key = "value"
|
||||||
|
`)
|
||||||
|
|
||||||
|
if tree.Get("") != tree {
|
||||||
|
t.Errorf("Get should return the tree itself when given an empty path")
|
||||||
|
}
|
||||||
|
|
||||||
|
if tree.Get("test.key") != "value" {
|
||||||
|
t.Errorf("Get should return the value")
|
||||||
|
}
|
||||||
|
if tree.Get(`\`) != nil {
|
||||||
|
t.Errorf("should return nil when the key is malformed")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTomlGetDefault(t *testing.T) {
|
||||||
|
tree, _ := Load(`
|
||||||
|
[test]
|
||||||
|
key = "value"
|
||||||
|
`)
|
||||||
|
|
||||||
|
if tree.GetDefault("", "hello") != tree {
|
||||||
|
t.Error("GetDefault should return the tree itself when given an empty path")
|
||||||
|
}
|
||||||
|
|
||||||
|
if tree.GetDefault("test.key", "hello") != "value" {
|
||||||
|
t.Error("Get should return the value")
|
||||||
|
}
|
||||||
|
|
||||||
|
if tree.GetDefault("whatever", "hello") != "hello" {
|
||||||
|
t.Error("GetDefault should return the default value if the key does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTomlHasPath(t *testing.T) {
|
func TestTomlHasPath(t *testing.T) {
|
||||||
@@ -28,13 +69,67 @@ func TestTomlHasPath(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTomlDelete(t *testing.T) {
|
||||||
|
tree, _ := Load(`
|
||||||
|
key = "value"
|
||||||
|
`)
|
||||||
|
err := tree.Delete("key")
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Delete - unexpected error while deleting key: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if tree.Get("key") != nil {
|
||||||
|
t.Errorf("Delete should have removed key but did not.")
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTomlDeleteUnparsableKey(t *testing.T) {
|
||||||
|
tree, _ := Load(`
|
||||||
|
key = "value"
|
||||||
|
`)
|
||||||
|
err := tree.Delete(".")
|
||||||
|
if err == nil {
|
||||||
|
t.Errorf("Delete should error")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTomlDeleteNestedKey(t *testing.T) {
|
||||||
|
tree, _ := Load(`
|
||||||
|
[foo]
|
||||||
|
[foo.bar]
|
||||||
|
key = "value"
|
||||||
|
`)
|
||||||
|
err := tree.Delete("foo.bar.key")
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Error while deleting nested key: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if tree.Get("key") != nil {
|
||||||
|
t.Errorf("Delete should have removed nested key but did not.")
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTomlDeleteNonexistentNestedKey(t *testing.T) {
|
||||||
|
tree, _ := Load(`
|
||||||
|
[foo]
|
||||||
|
[foo.bar]
|
||||||
|
key = "value"
|
||||||
|
`)
|
||||||
|
err := tree.Delete("foo.not.there.key")
|
||||||
|
if err == nil {
|
||||||
|
t.Errorf("Delete should have thrown an error trying to delete key in nonexistent tree")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestTomlGetPath(t *testing.T) {
|
func TestTomlGetPath(t *testing.T) {
|
||||||
node := newTomlTree()
|
node := newTree()
|
||||||
//TODO: set other node data
|
//TODO: set other node data
|
||||||
|
|
||||||
for idx, item := range []struct {
|
for idx, item := range []struct {
|
||||||
Path []string
|
Path []string
|
||||||
Expected *TomlTree
|
Expected *Tree
|
||||||
}{
|
}{
|
||||||
{ // empty path test
|
{ // empty path test
|
||||||
[]string{},
|
[]string{},
|
||||||
@@ -46,29 +141,40 @@ func TestTomlGetPath(t *testing.T) {
|
|||||||
t.Errorf("GetPath[%d] %v - expected %v, got %v instead.", idx, item.Path, item.Expected, result)
|
t.Errorf("GetPath[%d] %v - expected %v, got %v instead.", idx, item.Path, item.Expected, result)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tree, _ := Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
|
||||||
|
if tree.GetPath([]string{"whatever"}) != nil {
|
||||||
|
t.Error("GetPath should return nil when the key does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTomlQuery(t *testing.T) {
|
func TestTomlFromMap(t *testing.T) {
|
||||||
tree, err := Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
|
simpleMap := map[string]interface{}{"hello": 42}
|
||||||
|
tree, err := TreeFromMap(simpleMap)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Fatal("unexpected error:", err)
|
||||||
return
|
|
||||||
}
|
}
|
||||||
result, err := tree.Query("$.foo.bar")
|
if tree.Get("hello") != int64(42) {
|
||||||
if err != nil {
|
t.Fatal("hello should be 42, not", tree.Get("hello"))
|
||||||
t.Error(err)
|
}
|
||||||
return
|
}
|
||||||
}
|
|
||||||
values := result.Values()
|
func TestLoadBytesBOM(t *testing.T) {
|
||||||
if len(values) != 1 {
|
payloads := [][]byte{
|
||||||
t.Errorf("Expected resultset of 1, got %d instead: %v", len(values), values)
|
[]byte("\xFE\xFFhello=1"),
|
||||||
}
|
[]byte("\xFF\xFEhello=1"),
|
||||||
|
[]byte("\xEF\xBB\xBFhello=1"),
|
||||||
if tt, ok := values[0].(*TomlTree); !ok {
|
[]byte("\x00\x00\xFE\xFFhello=1"),
|
||||||
t.Errorf("Expected type of TomlTree: %T Tv", values[0], values[0])
|
[]byte("\xFF\xFE\x00\x00hello=1"),
|
||||||
} else if tt.Get("a") != int64(1) {
|
}
|
||||||
t.Errorf("Expected 'a' with a value 1: %v", tt.Get("a"))
|
for _, data := range payloads {
|
||||||
} else if tt.Get("b") != int64(2) {
|
tree, err := LoadBytes(data)
|
||||||
t.Errorf("Expected 'b' with a value 2: %v", tt.Get("b"))
|
if err != nil {
|
||||||
|
t.Fatal("unexpected error:", err, "for:", data)
|
||||||
|
}
|
||||||
|
v := tree.Get("hello")
|
||||||
|
if v != int64(1) {
|
||||||
|
t.Fatal("hello should be 1, not", v)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,119 @@
|
|||||||
|
// This is a support file for toml_testgen_test.go
|
||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/davecgh/go-spew/spew"
|
||||||
|
)
|
||||||
|
|
||||||
|
func testgenInvalid(t *testing.T, input string) {
|
||||||
|
t.Logf("Input TOML:\n%s", input)
|
||||||
|
tree, err := Load(input)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
typedTree := testgenTranslate(*tree)
|
||||||
|
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
if err := json.NewEncoder(buf).Encode(typedTree); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Fatalf("test did not fail. resulting tree:\n%s", buf.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
func testgenValid(t *testing.T, input string, jsonRef string) {
|
||||||
|
t.Logf("Input TOML:\n%s", input)
|
||||||
|
tree, err := Load(input)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed parsing toml: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
typedTree := testgenTranslate(*tree)
|
||||||
|
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
if err := json.NewEncoder(buf).Encode(typedTree); err != nil {
|
||||||
|
t.Fatalf("failed translating to JSON: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var jsonTest interface{}
|
||||||
|
if err := json.NewDecoder(buf).Decode(&jsonTest); err != nil {
|
||||||
|
t.Logf("translated JSON:\n%s", buf.String())
|
||||||
|
t.Fatalf("failed decoding translated JSON: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var jsonExpected interface{}
|
||||||
|
if err := json.NewDecoder(bytes.NewBufferString(jsonRef)).Decode(&jsonExpected); err != nil {
|
||||||
|
t.Logf("reference JSON:\n%s", jsonRef)
|
||||||
|
t.Fatalf("failed decoding reference JSON: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(jsonExpected, jsonTest) {
|
||||||
|
t.Logf("Diff:\n%s", spew.Sdump(jsonExpected, jsonTest))
|
||||||
|
t.Fatal("parsed TOML tree is different than expected structure")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func testgenTranslate(tomlData interface{}) interface{} {
|
||||||
|
switch orig := tomlData.(type) {
|
||||||
|
case map[string]interface{}:
|
||||||
|
typed := make(map[string]interface{}, len(orig))
|
||||||
|
for k, v := range orig {
|
||||||
|
typed[k] = testgenTranslate(v)
|
||||||
|
}
|
||||||
|
return typed
|
||||||
|
case *Tree:
|
||||||
|
return testgenTranslate(*orig)
|
||||||
|
case Tree:
|
||||||
|
keys := orig.Keys()
|
||||||
|
typed := make(map[string]interface{}, len(keys))
|
||||||
|
for _, k := range keys {
|
||||||
|
typed[k] = testgenTranslate(orig.GetPath([]string{k}))
|
||||||
|
}
|
||||||
|
return typed
|
||||||
|
case []*Tree:
|
||||||
|
typed := make([]map[string]interface{}, len(orig))
|
||||||
|
for i, v := range orig {
|
||||||
|
typed[i] = testgenTranslate(v).(map[string]interface{})
|
||||||
|
}
|
||||||
|
return typed
|
||||||
|
case []map[string]interface{}:
|
||||||
|
typed := make([]map[string]interface{}, len(orig))
|
||||||
|
for i, v := range orig {
|
||||||
|
typed[i] = testgenTranslate(v).(map[string]interface{})
|
||||||
|
}
|
||||||
|
return typed
|
||||||
|
case []interface{}:
|
||||||
|
typed := make([]interface{}, len(orig))
|
||||||
|
for i, v := range orig {
|
||||||
|
typed[i] = testgenTranslate(v)
|
||||||
|
}
|
||||||
|
return testgenTag("array", typed)
|
||||||
|
case time.Time:
|
||||||
|
return testgenTag("datetime", orig.Format("2006-01-02T15:04:05Z"))
|
||||||
|
case bool:
|
||||||
|
return testgenTag("bool", fmt.Sprintf("%v", orig))
|
||||||
|
case int64:
|
||||||
|
return testgenTag("integer", fmt.Sprintf("%d", orig))
|
||||||
|
case float64:
|
||||||
|
return testgenTag("float", fmt.Sprintf("%v", orig))
|
||||||
|
case string:
|
||||||
|
return testgenTag("string", orig)
|
||||||
|
}
|
||||||
|
|
||||||
|
panic(fmt.Sprintf("Unknown type: %T", tomlData))
|
||||||
|
}
|
||||||
|
|
||||||
|
func testgenTag(typeName string, data interface{}) map[string]interface{} {
|
||||||
|
return map[string]interface{}{
|
||||||
|
"type": typeName,
|
||||||
|
"value": data,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,943 @@
|
|||||||
|
// Generated by tomltestgen for toml-test ref 39e37e6 on 2019-03-19T23:58:45-07:00
|
||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestInvalidArrayMixedTypesArraysAndInts(t *testing.T) {
|
||||||
|
input := `arrays-and-ints = [1, ["Arrays are not integers."]]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidArrayMixedTypesIntsAndFloats(t *testing.T) {
|
||||||
|
input := `ints-and-floats = [1, 1.1]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidArrayMixedTypesStringsAndInts(t *testing.T) {
|
||||||
|
input := `strings-and-ints = ["hi", 42]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidDatetimeMalformedNoLeads(t *testing.T) {
|
||||||
|
input := `no-leads = 1987-7-05T17:45:00Z`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidDatetimeMalformedNoSecs(t *testing.T) {
|
||||||
|
input := `no-secs = 1987-07-05T17:45Z`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidDatetimeMalformedNoT(t *testing.T) {
|
||||||
|
input := `no-t = 1987-07-0517:45:00Z`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidDatetimeMalformedWithMilli(t *testing.T) {
|
||||||
|
input := `with-milli = 1987-07-5T17:45:00.12Z`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidDuplicateKeyTable(t *testing.T) {
|
||||||
|
input := `[fruit]
|
||||||
|
type = "apple"
|
||||||
|
|
||||||
|
[fruit.type]
|
||||||
|
apple = "yes"`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidDuplicateKeys(t *testing.T) {
|
||||||
|
input := `dupe = false
|
||||||
|
dupe = true`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidDuplicateTables(t *testing.T) {
|
||||||
|
input := `[a]
|
||||||
|
[a]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidEmptyImplicitTable(t *testing.T) {
|
||||||
|
input := `[naughty..naughty]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidEmptyTable(t *testing.T) {
|
||||||
|
input := `[]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidFloatNoLeadingZero(t *testing.T) {
|
||||||
|
input := `answer = .12345
|
||||||
|
neganswer = -.12345`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidFloatNoTrailingDigits(t *testing.T) {
|
||||||
|
input := `answer = 1.
|
||||||
|
neganswer = -1.`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidKeyEmpty(t *testing.T) {
|
||||||
|
input := ` = 1`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidKeyHash(t *testing.T) {
|
||||||
|
input := `a# = 1`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidKeyNewline(t *testing.T) {
|
||||||
|
input := `a
|
||||||
|
= 1`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidKeyOpenBracket(t *testing.T) {
|
||||||
|
input := `[abc = 1`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidKeySingleOpenBracket(t *testing.T) {
|
||||||
|
input := `[`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidKeySpace(t *testing.T) {
|
||||||
|
input := `a b = 1`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidKeyStartBracket(t *testing.T) {
|
||||||
|
input := `[a]
|
||||||
|
[xyz = 5
|
||||||
|
[b]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidKeyTwoEquals(t *testing.T) {
|
||||||
|
input := `key= = 1`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidStringBadByteEscape(t *testing.T) {
|
||||||
|
input := `naughty = "\xAg"`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidStringBadEscape(t *testing.T) {
|
||||||
|
input := `invalid-escape = "This string has a bad \a escape character."`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidStringByteEscapes(t *testing.T) {
|
||||||
|
input := `answer = "\x33"`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidStringNoClose(t *testing.T) {
|
||||||
|
input := `no-ending-quote = "One time, at band camp`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTableArrayImplicit(t *testing.T) {
|
||||||
|
input := "# This test is a bit tricky. It should fail because the first use of\n" +
|
||||||
|
"# `[[albums.songs]]` without first declaring `albums` implies that `albums`\n" +
|
||||||
|
"# must be a table. The alternative would be quite weird. Namely, it wouldn't\n" +
|
||||||
|
"# comply with the TOML spec: \"Each double-bracketed sub-table will belong to \n" +
|
||||||
|
"# the most *recently* defined table element *above* it.\"\n" +
|
||||||
|
"#\n" +
|
||||||
|
"# This is in contrast to the *valid* test, table-array-implicit where\n" +
|
||||||
|
"# `[[albums.songs]]` works by itself, so long as `[[albums]]` isn't declared\n" +
|
||||||
|
"# later. (Although, `[albums]` could be.)\n" +
|
||||||
|
"[[albums.songs]]\n" +
|
||||||
|
"name = \"Glory Days\"\n" +
|
||||||
|
"\n" +
|
||||||
|
"[[albums]]\n" +
|
||||||
|
"name = \"Born in the USA\"\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTableArrayMalformedBracket(t *testing.T) {
|
||||||
|
input := `[[albums]
|
||||||
|
name = "Born to Run"`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTableArrayMalformedEmpty(t *testing.T) {
|
||||||
|
input := `[[]]
|
||||||
|
name = "Born to Run"`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTableEmpty(t *testing.T) {
|
||||||
|
input := `[]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTableNestedBracketsClose(t *testing.T) {
|
||||||
|
input := `[a]b]
|
||||||
|
zyx = 42`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTableNestedBracketsOpen(t *testing.T) {
|
||||||
|
input := `[a[b]
|
||||||
|
zyx = 42`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTableWhitespace(t *testing.T) {
|
||||||
|
input := `[invalid key]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTableWithPound(t *testing.T) {
|
||||||
|
input := `[key#group]
|
||||||
|
answer = 42`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTextAfterArrayEntries(t *testing.T) {
|
||||||
|
input := `array = [
|
||||||
|
"Is there life after an array separator?", No
|
||||||
|
"Entry"
|
||||||
|
]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTextAfterInteger(t *testing.T) {
|
||||||
|
input := `answer = 42 the ultimate answer?`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTextAfterString(t *testing.T) {
|
||||||
|
input := `string = "Is there life after strings?" No.`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTextAfterTable(t *testing.T) {
|
||||||
|
input := `[error] this shouldn't be here`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTextBeforeArraySeparator(t *testing.T) {
|
||||||
|
input := `array = [
|
||||||
|
"Is there life before an array separator?" No,
|
||||||
|
"Entry"
|
||||||
|
]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidTextInArray(t *testing.T) {
|
||||||
|
input := `array = [
|
||||||
|
"Entry 1",
|
||||||
|
I don't belong,
|
||||||
|
"Entry 2",
|
||||||
|
]`
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidArrayEmpty(t *testing.T) {
|
||||||
|
input := `thevoid = [[[[[]]]]]`
|
||||||
|
jsonRef := `{
|
||||||
|
"thevoid": { "type": "array", "value": [
|
||||||
|
{"type": "array", "value": [
|
||||||
|
{"type": "array", "value": [
|
||||||
|
{"type": "array", "value": [
|
||||||
|
{"type": "array", "value": []}
|
||||||
|
]}
|
||||||
|
]}
|
||||||
|
]}
|
||||||
|
]}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidArrayNospaces(t *testing.T) {
|
||||||
|
input := `ints = [1,2,3]`
|
||||||
|
jsonRef := `{
|
||||||
|
"ints": {
|
||||||
|
"type": "array",
|
||||||
|
"value": [
|
||||||
|
{"type": "integer", "value": "1"},
|
||||||
|
{"type": "integer", "value": "2"},
|
||||||
|
{"type": "integer", "value": "3"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidArraysHetergeneous(t *testing.T) {
|
||||||
|
input := `mixed = [[1, 2], ["a", "b"], [1.1, 2.1]]`
|
||||||
|
jsonRef := `{
|
||||||
|
"mixed": {
|
||||||
|
"type": "array",
|
||||||
|
"value": [
|
||||||
|
{"type": "array", "value": [
|
||||||
|
{"type": "integer", "value": "1"},
|
||||||
|
{"type": "integer", "value": "2"}
|
||||||
|
]},
|
||||||
|
{"type": "array", "value": [
|
||||||
|
{"type": "string", "value": "a"},
|
||||||
|
{"type": "string", "value": "b"}
|
||||||
|
]},
|
||||||
|
{"type": "array", "value": [
|
||||||
|
{"type": "float", "value": "1.1"},
|
||||||
|
{"type": "float", "value": "2.1"}
|
||||||
|
]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidArraysNested(t *testing.T) {
|
||||||
|
input := `nest = [["a"], ["b"]]`
|
||||||
|
jsonRef := `{
|
||||||
|
"nest": {
|
||||||
|
"type": "array",
|
||||||
|
"value": [
|
||||||
|
{"type": "array", "value": [
|
||||||
|
{"type": "string", "value": "a"}
|
||||||
|
]},
|
||||||
|
{"type": "array", "value": [
|
||||||
|
{"type": "string", "value": "b"}
|
||||||
|
]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidArrays(t *testing.T) {
|
||||||
|
input := `ints = [1, 2, 3]
|
||||||
|
floats = [1.1, 2.1, 3.1]
|
||||||
|
strings = ["a", "b", "c"]
|
||||||
|
dates = [
|
||||||
|
1987-07-05T17:45:00Z,
|
||||||
|
1979-05-27T07:32:00Z,
|
||||||
|
2006-06-01T11:00:00Z,
|
||||||
|
]`
|
||||||
|
jsonRef := `{
|
||||||
|
"ints": {
|
||||||
|
"type": "array",
|
||||||
|
"value": [
|
||||||
|
{"type": "integer", "value": "1"},
|
||||||
|
{"type": "integer", "value": "2"},
|
||||||
|
{"type": "integer", "value": "3"}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"floats": {
|
||||||
|
"type": "array",
|
||||||
|
"value": [
|
||||||
|
{"type": "float", "value": "1.1"},
|
||||||
|
{"type": "float", "value": "2.1"},
|
||||||
|
{"type": "float", "value": "3.1"}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"strings": {
|
||||||
|
"type": "array",
|
||||||
|
"value": [
|
||||||
|
{"type": "string", "value": "a"},
|
||||||
|
{"type": "string", "value": "b"},
|
||||||
|
{"type": "string", "value": "c"}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"dates": {
|
||||||
|
"type": "array",
|
||||||
|
"value": [
|
||||||
|
{"type": "datetime", "value": "1987-07-05T17:45:00Z"},
|
||||||
|
{"type": "datetime", "value": "1979-05-27T07:32:00Z"},
|
||||||
|
{"type": "datetime", "value": "2006-06-01T11:00:00Z"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidBool(t *testing.T) {
|
||||||
|
input := `t = true
|
||||||
|
f = false`
|
||||||
|
jsonRef := `{
|
||||||
|
"f": {"type": "bool", "value": "false"},
|
||||||
|
"t": {"type": "bool", "value": "true"}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidCommentsEverywhere(t *testing.T) {
|
||||||
|
input := `# Top comment.
|
||||||
|
# Top comment.
|
||||||
|
# Top comment.
|
||||||
|
|
||||||
|
# [no-extraneous-groups-please]
|
||||||
|
|
||||||
|
[group] # Comment
|
||||||
|
answer = 42 # Comment
|
||||||
|
# no-extraneous-keys-please = 999
|
||||||
|
# Inbetween comment.
|
||||||
|
more = [ # Comment
|
||||||
|
# What about multiple # comments?
|
||||||
|
# Can you handle it?
|
||||||
|
#
|
||||||
|
# Evil.
|
||||||
|
# Evil.
|
||||||
|
42, 42, # Comments within arrays are fun.
|
||||||
|
# What about multiple # comments?
|
||||||
|
# Can you handle it?
|
||||||
|
#
|
||||||
|
# Evil.
|
||||||
|
# Evil.
|
||||||
|
# ] Did I fool you?
|
||||||
|
] # Hopefully not.`
|
||||||
|
jsonRef := `{
|
||||||
|
"group": {
|
||||||
|
"answer": {"type": "integer", "value": "42"},
|
||||||
|
"more": {
|
||||||
|
"type": "array",
|
||||||
|
"value": [
|
||||||
|
{"type": "integer", "value": "42"},
|
||||||
|
{"type": "integer", "value": "42"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidDatetime(t *testing.T) {
|
||||||
|
input := `bestdayever = 1987-07-05T17:45:00Z`
|
||||||
|
jsonRef := `{
|
||||||
|
"bestdayever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidEmpty(t *testing.T) {
|
||||||
|
input := ``
|
||||||
|
jsonRef := `{}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidExample(t *testing.T) {
|
||||||
|
input := `best-day-ever = 1987-07-05T17:45:00Z
|
||||||
|
|
||||||
|
[numtheory]
|
||||||
|
boring = false
|
||||||
|
perfection = [6, 28, 496]`
|
||||||
|
jsonRef := `{
|
||||||
|
"best-day-ever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"},
|
||||||
|
"numtheory": {
|
||||||
|
"boring": {"type": "bool", "value": "false"},
|
||||||
|
"perfection": {
|
||||||
|
"type": "array",
|
||||||
|
"value": [
|
||||||
|
{"type": "integer", "value": "6"},
|
||||||
|
{"type": "integer", "value": "28"},
|
||||||
|
{"type": "integer", "value": "496"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidFloat(t *testing.T) {
|
||||||
|
input := `pi = 3.14
|
||||||
|
negpi = -3.14`
|
||||||
|
jsonRef := `{
|
||||||
|
"pi": {"type": "float", "value": "3.14"},
|
||||||
|
"negpi": {"type": "float", "value": "-3.14"}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidImplicitAndExplicitAfter(t *testing.T) {
|
||||||
|
input := `[a.b.c]
|
||||||
|
answer = 42
|
||||||
|
|
||||||
|
[a]
|
||||||
|
better = 43`
|
||||||
|
jsonRef := `{
|
||||||
|
"a": {
|
||||||
|
"better": {"type": "integer", "value": "43"},
|
||||||
|
"b": {
|
||||||
|
"c": {
|
||||||
|
"answer": {"type": "integer", "value": "42"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidImplicitAndExplicitBefore(t *testing.T) {
|
||||||
|
input := `[a]
|
||||||
|
better = 43
|
||||||
|
|
||||||
|
[a.b.c]
|
||||||
|
answer = 42`
|
||||||
|
jsonRef := `{
|
||||||
|
"a": {
|
||||||
|
"better": {"type": "integer", "value": "43"},
|
||||||
|
"b": {
|
||||||
|
"c": {
|
||||||
|
"answer": {"type": "integer", "value": "42"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidImplicitGroups(t *testing.T) {
|
||||||
|
input := `[a.b.c]
|
||||||
|
answer = 42`
|
||||||
|
jsonRef := `{
|
||||||
|
"a": {
|
||||||
|
"b": {
|
||||||
|
"c": {
|
||||||
|
"answer": {"type": "integer", "value": "42"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidInteger(t *testing.T) {
|
||||||
|
input := `answer = 42
|
||||||
|
neganswer = -42`
|
||||||
|
jsonRef := `{
|
||||||
|
"answer": {"type": "integer", "value": "42"},
|
||||||
|
"neganswer": {"type": "integer", "value": "-42"}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidKeyEqualsNospace(t *testing.T) {
|
||||||
|
input := `answer=42`
|
||||||
|
jsonRef := `{
|
||||||
|
"answer": {"type": "integer", "value": "42"}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidKeySpace(t *testing.T) {
|
||||||
|
input := `"a b" = 1`
|
||||||
|
jsonRef := `{
|
||||||
|
"a b": {"type": "integer", "value": "1"}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidKeySpecialChars(t *testing.T) {
|
||||||
|
input := "\"~!@$^&*()_+-`1234567890[]|/?><.,;:'\" = 1\n"
|
||||||
|
jsonRef := "{\n" +
|
||||||
|
" \"~!@$^&*()_+-`1234567890[]|/?><.,;:'\": {\n" +
|
||||||
|
" \"type\": \"integer\", \"value\": \"1\"\n" +
|
||||||
|
" }\n" +
|
||||||
|
"}\n"
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidLongFloat(t *testing.T) {
|
||||||
|
input := `longpi = 3.141592653589793
|
||||||
|
neglongpi = -3.141592653589793`
|
||||||
|
jsonRef := `{
|
||||||
|
"longpi": {"type": "float", "value": "3.141592653589793"},
|
||||||
|
"neglongpi": {"type": "float", "value": "-3.141592653589793"}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidLongInteger(t *testing.T) {
|
||||||
|
input := `answer = 9223372036854775807
|
||||||
|
neganswer = -9223372036854775808`
|
||||||
|
jsonRef := `{
|
||||||
|
"answer": {"type": "integer", "value": "9223372036854775807"},
|
||||||
|
"neganswer": {"type": "integer", "value": "-9223372036854775808"}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidMultilineString(t *testing.T) {
|
||||||
|
input := `multiline_empty_one = """"""
|
||||||
|
multiline_empty_two = """
|
||||||
|
"""
|
||||||
|
multiline_empty_three = """\
|
||||||
|
"""
|
||||||
|
multiline_empty_four = """\
|
||||||
|
\
|
||||||
|
\
|
||||||
|
"""
|
||||||
|
|
||||||
|
equivalent_one = "The quick brown fox jumps over the lazy dog."
|
||||||
|
equivalent_two = """
|
||||||
|
The quick brown \
|
||||||
|
|
||||||
|
|
||||||
|
fox jumps over \
|
||||||
|
the lazy dog."""
|
||||||
|
|
||||||
|
equivalent_three = """\
|
||||||
|
The quick brown \
|
||||||
|
fox jumps over \
|
||||||
|
the lazy dog.\
|
||||||
|
"""`
|
||||||
|
jsonRef := `{
|
||||||
|
"multiline_empty_one": {
|
||||||
|
"type": "string",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"multiline_empty_two": {
|
||||||
|
"type": "string",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"multiline_empty_three": {
|
||||||
|
"type": "string",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"multiline_empty_four": {
|
||||||
|
"type": "string",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"equivalent_one": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "The quick brown fox jumps over the lazy dog."
|
||||||
|
},
|
||||||
|
"equivalent_two": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "The quick brown fox jumps over the lazy dog."
|
||||||
|
},
|
||||||
|
"equivalent_three": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "The quick brown fox jumps over the lazy dog."
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidRawMultilineString(t *testing.T) {
|
||||||
|
input := `oneline = '''This string has a ' quote character.'''
|
||||||
|
firstnl = '''
|
||||||
|
This string has a ' quote character.'''
|
||||||
|
multiline = '''
|
||||||
|
This string
|
||||||
|
has ' a quote character
|
||||||
|
and more than
|
||||||
|
one newline
|
||||||
|
in it.'''`
|
||||||
|
jsonRef := `{
|
||||||
|
"oneline": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a ' quote character."
|
||||||
|
},
|
||||||
|
"firstnl": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a ' quote character."
|
||||||
|
},
|
||||||
|
"multiline": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string\nhas ' a quote character\nand more than\none newline\nin it."
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidRawString(t *testing.T) {
|
||||||
|
input := `backspace = 'This string has a \b backspace character.'
|
||||||
|
tab = 'This string has a \t tab character.'
|
||||||
|
newline = 'This string has a \n new line character.'
|
||||||
|
formfeed = 'This string has a \f form feed character.'
|
||||||
|
carriage = 'This string has a \r carriage return character.'
|
||||||
|
slash = 'This string has a \/ slash character.'
|
||||||
|
backslash = 'This string has a \\ backslash character.'`
|
||||||
|
jsonRef := `{
|
||||||
|
"backspace": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \\b backspace character."
|
||||||
|
},
|
||||||
|
"tab": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \\t tab character."
|
||||||
|
},
|
||||||
|
"newline": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \\n new line character."
|
||||||
|
},
|
||||||
|
"formfeed": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \\f form feed character."
|
||||||
|
},
|
||||||
|
"carriage": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \\r carriage return character."
|
||||||
|
},
|
||||||
|
"slash": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \\/ slash character."
|
||||||
|
},
|
||||||
|
"backslash": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \\\\ backslash character."
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidStringEmpty(t *testing.T) {
|
||||||
|
input := `answer = ""`
|
||||||
|
jsonRef := `{
|
||||||
|
"answer": {
|
||||||
|
"type": "string",
|
||||||
|
"value": ""
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidStringEscapes(t *testing.T) {
|
||||||
|
input := `backspace = "This string has a \b backspace character."
|
||||||
|
tab = "This string has a \t tab character."
|
||||||
|
newline = "This string has a \n new line character."
|
||||||
|
formfeed = "This string has a \f form feed character."
|
||||||
|
carriage = "This string has a \r carriage return character."
|
||||||
|
quote = "This string has a \" quote character."
|
||||||
|
backslash = "This string has a \\ backslash character."
|
||||||
|
notunicode1 = "This string does not have a unicode \\u escape."
|
||||||
|
notunicode2 = "This string does not have a unicode \u005Cu escape."
|
||||||
|
notunicode3 = "This string does not have a unicode \\u0075 escape."
|
||||||
|
notunicode4 = "This string does not have a unicode \\\u0075 escape."`
|
||||||
|
jsonRef := `{
|
||||||
|
"backspace": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \u0008 backspace character."
|
||||||
|
},
|
||||||
|
"tab": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \u0009 tab character."
|
||||||
|
},
|
||||||
|
"newline": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \u000A new line character."
|
||||||
|
},
|
||||||
|
"formfeed": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \u000C form feed character."
|
||||||
|
},
|
||||||
|
"carriage": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \u000D carriage return character."
|
||||||
|
},
|
||||||
|
"quote": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \u0022 quote character."
|
||||||
|
},
|
||||||
|
"backslash": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string has a \u005C backslash character."
|
||||||
|
},
|
||||||
|
"notunicode1": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string does not have a unicode \\u escape."
|
||||||
|
},
|
||||||
|
"notunicode2": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string does not have a unicode \u005Cu escape."
|
||||||
|
},
|
||||||
|
"notunicode3": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string does not have a unicode \\u0075 escape."
|
||||||
|
},
|
||||||
|
"notunicode4": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "This string does not have a unicode \\\u0075 escape."
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidStringSimple(t *testing.T) {
|
||||||
|
input := `answer = "You are not drinking enough whisky."`
|
||||||
|
jsonRef := `{
|
||||||
|
"answer": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "You are not drinking enough whisky."
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidStringWithPound(t *testing.T) {
|
||||||
|
input := `pound = "We see no # comments here."
|
||||||
|
poundcomment = "But there are # some comments here." # Did I # mess you up?`
|
||||||
|
jsonRef := `{
|
||||||
|
"pound": {"type": "string", "value": "We see no # comments here."},
|
||||||
|
"poundcomment": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "But there are # some comments here."
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidTableArrayImplicit(t *testing.T) {
|
||||||
|
input := `[[albums.songs]]
|
||||||
|
name = "Glory Days"`
|
||||||
|
jsonRef := `{
|
||||||
|
"albums": {
|
||||||
|
"songs": [
|
||||||
|
{"name": {"type": "string", "value": "Glory Days"}}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidTableArrayMany(t *testing.T) {
|
||||||
|
input := `[[people]]
|
||||||
|
first_name = "Bruce"
|
||||||
|
last_name = "Springsteen"
|
||||||
|
|
||||||
|
[[people]]
|
||||||
|
first_name = "Eric"
|
||||||
|
last_name = "Clapton"
|
||||||
|
|
||||||
|
[[people]]
|
||||||
|
first_name = "Bob"
|
||||||
|
last_name = "Seger"`
|
||||||
|
jsonRef := `{
|
||||||
|
"people": [
|
||||||
|
{
|
||||||
|
"first_name": {"type": "string", "value": "Bruce"},
|
||||||
|
"last_name": {"type": "string", "value": "Springsteen"}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"first_name": {"type": "string", "value": "Eric"},
|
||||||
|
"last_name": {"type": "string", "value": "Clapton"}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"first_name": {"type": "string", "value": "Bob"},
|
||||||
|
"last_name": {"type": "string", "value": "Seger"}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidTableArrayNest(t *testing.T) {
|
||||||
|
input := `[[albums]]
|
||||||
|
name = "Born to Run"
|
||||||
|
|
||||||
|
[[albums.songs]]
|
||||||
|
name = "Jungleland"
|
||||||
|
|
||||||
|
[[albums.songs]]
|
||||||
|
name = "Meeting Across the River"
|
||||||
|
|
||||||
|
[[albums]]
|
||||||
|
name = "Born in the USA"
|
||||||
|
|
||||||
|
[[albums.songs]]
|
||||||
|
name = "Glory Days"
|
||||||
|
|
||||||
|
[[albums.songs]]
|
||||||
|
name = "Dancing in the Dark"`
|
||||||
|
jsonRef := `{
|
||||||
|
"albums": [
|
||||||
|
{
|
||||||
|
"name": {"type": "string", "value": "Born to Run"},
|
||||||
|
"songs": [
|
||||||
|
{"name": {"type": "string", "value": "Jungleland"}},
|
||||||
|
{"name": {"type": "string", "value": "Meeting Across the River"}}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": {"type": "string", "value": "Born in the USA"},
|
||||||
|
"songs": [
|
||||||
|
{"name": {"type": "string", "value": "Glory Days"}},
|
||||||
|
{"name": {"type": "string", "value": "Dancing in the Dark"}}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidTableArrayOne(t *testing.T) {
|
||||||
|
input := `[[people]]
|
||||||
|
first_name = "Bruce"
|
||||||
|
last_name = "Springsteen"`
|
||||||
|
jsonRef := `{
|
||||||
|
"people": [
|
||||||
|
{
|
||||||
|
"first_name": {"type": "string", "value": "Bruce"},
|
||||||
|
"last_name": {"type": "string", "value": "Springsteen"}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidTableEmpty(t *testing.T) {
|
||||||
|
input := `[a]`
|
||||||
|
jsonRef := `{
|
||||||
|
"a": {}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidTableSubEmpty(t *testing.T) {
|
||||||
|
input := `[a]
|
||||||
|
[a.b]`
|
||||||
|
jsonRef := `{
|
||||||
|
"a": { "b": {} }
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidTableWhitespace(t *testing.T) {
|
||||||
|
input := `["valid key"]`
|
||||||
|
jsonRef := `{
|
||||||
|
"valid key": {}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidTableWithPound(t *testing.T) {
|
||||||
|
input := `["key#group"]
|
||||||
|
answer = 42`
|
||||||
|
jsonRef := `{
|
||||||
|
"key#group": {
|
||||||
|
"answer": {"type": "integer", "value": "42"}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidUnicodeEscape(t *testing.T) {
|
||||||
|
input := `answer4 = "\u03B4"
|
||||||
|
answer8 = "\U000003B4"`
|
||||||
|
jsonRef := `{
|
||||||
|
"answer4": {"type": "string", "value": "\u03B4"},
|
||||||
|
"answer8": {"type": "string", "value": "\u03B4"}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidUnicodeLiteral(t *testing.T) {
|
||||||
|
input := `answer = "δ"`
|
||||||
|
jsonRef := `{
|
||||||
|
"answer": {"type": "string", "value": "δ"}
|
||||||
|
}`
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
@@ -0,0 +1,142 @@
|
|||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var kindToType = [reflect.String + 1]reflect.Type{
|
||||||
|
reflect.Bool: reflect.TypeOf(true),
|
||||||
|
reflect.String: reflect.TypeOf(""),
|
||||||
|
reflect.Float32: reflect.TypeOf(float64(1)),
|
||||||
|
reflect.Float64: reflect.TypeOf(float64(1)),
|
||||||
|
reflect.Int: reflect.TypeOf(int64(1)),
|
||||||
|
reflect.Int8: reflect.TypeOf(int64(1)),
|
||||||
|
reflect.Int16: reflect.TypeOf(int64(1)),
|
||||||
|
reflect.Int32: reflect.TypeOf(int64(1)),
|
||||||
|
reflect.Int64: reflect.TypeOf(int64(1)),
|
||||||
|
reflect.Uint: reflect.TypeOf(uint64(1)),
|
||||||
|
reflect.Uint8: reflect.TypeOf(uint64(1)),
|
||||||
|
reflect.Uint16: reflect.TypeOf(uint64(1)),
|
||||||
|
reflect.Uint32: reflect.TypeOf(uint64(1)),
|
||||||
|
reflect.Uint64: reflect.TypeOf(uint64(1)),
|
||||||
|
}
|
||||||
|
|
||||||
|
// typeFor returns a reflect.Type for a reflect.Kind, or nil if none is found.
|
||||||
|
// supported values:
|
||||||
|
// string, bool, int64, uint64, float64, time.Time, int, int8, int16, int32, uint, uint8, uint16, uint32, float32
|
||||||
|
func typeFor(k reflect.Kind) reflect.Type {
|
||||||
|
if k > 0 && int(k) < len(kindToType) {
|
||||||
|
return kindToType[k]
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func simpleValueCoercion(object interface{}) (interface{}, error) {
|
||||||
|
switch original := object.(type) {
|
||||||
|
case string, bool, int64, uint64, float64, time.Time:
|
||||||
|
return original, nil
|
||||||
|
case int:
|
||||||
|
return int64(original), nil
|
||||||
|
case int8:
|
||||||
|
return int64(original), nil
|
||||||
|
case int16:
|
||||||
|
return int64(original), nil
|
||||||
|
case int32:
|
||||||
|
return int64(original), nil
|
||||||
|
case uint:
|
||||||
|
return uint64(original), nil
|
||||||
|
case uint8:
|
||||||
|
return uint64(original), nil
|
||||||
|
case uint16:
|
||||||
|
return uint64(original), nil
|
||||||
|
case uint32:
|
||||||
|
return uint64(original), nil
|
||||||
|
case float32:
|
||||||
|
return float64(original), nil
|
||||||
|
case fmt.Stringer:
|
||||||
|
return original.String(), nil
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("cannot convert type %T to Tree", object)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func sliceToTree(object interface{}) (interface{}, error) {
|
||||||
|
// arrays are a bit tricky, since they can represent either a
|
||||||
|
// collection of simple values, which is represented by one
|
||||||
|
// *tomlValue, or an array of tables, which is represented by an
|
||||||
|
// array of *Tree.
|
||||||
|
|
||||||
|
// holding the assumption that this function is called from toTree only when value.Kind() is Array or Slice
|
||||||
|
value := reflect.ValueOf(object)
|
||||||
|
insideType := value.Type().Elem()
|
||||||
|
length := value.Len()
|
||||||
|
if length > 0 {
|
||||||
|
insideType = reflect.ValueOf(value.Index(0).Interface()).Type()
|
||||||
|
}
|
||||||
|
if insideType.Kind() == reflect.Map {
|
||||||
|
// this is considered as an array of tables
|
||||||
|
tablesArray := make([]*Tree, 0, length)
|
||||||
|
for i := 0; i < length; i++ {
|
||||||
|
table := value.Index(i)
|
||||||
|
tree, err := toTree(table.Interface())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
tablesArray = append(tablesArray, tree.(*Tree))
|
||||||
|
}
|
||||||
|
return tablesArray, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
sliceType := typeFor(insideType.Kind())
|
||||||
|
if sliceType == nil {
|
||||||
|
sliceType = insideType
|
||||||
|
}
|
||||||
|
|
||||||
|
arrayValue := reflect.MakeSlice(reflect.SliceOf(sliceType), 0, length)
|
||||||
|
|
||||||
|
for i := 0; i < length; i++ {
|
||||||
|
val := value.Index(i).Interface()
|
||||||
|
simpleValue, err := simpleValueCoercion(val)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
arrayValue = reflect.Append(arrayValue, reflect.ValueOf(simpleValue))
|
||||||
|
}
|
||||||
|
return &tomlValue{value: arrayValue.Interface(), position: Position{}}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func toTree(object interface{}) (interface{}, error) {
|
||||||
|
value := reflect.ValueOf(object)
|
||||||
|
|
||||||
|
if value.Kind() == reflect.Map {
|
||||||
|
values := map[string]interface{}{}
|
||||||
|
keys := value.MapKeys()
|
||||||
|
for _, key := range keys {
|
||||||
|
if key.Kind() != reflect.String {
|
||||||
|
if _, ok := key.Interface().(string); !ok {
|
||||||
|
return nil, fmt.Errorf("map key needs to be a string, not %T (%v)", key.Interface(), key.Kind())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
v := value.MapIndex(key)
|
||||||
|
newValue, err := toTree(v.Interface())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
values[key.String()] = newValue
|
||||||
|
}
|
||||||
|
return &Tree{values: values, position: Position{}}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if value.Kind() == reflect.Array || value.Kind() == reflect.Slice {
|
||||||
|
return sliceToTree(object)
|
||||||
|
}
|
||||||
|
|
||||||
|
simpleValue, err := simpleValueCoercion(object)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &tomlValue{value: simpleValue, position: Position{}}, nil
|
||||||
|
}
|
||||||
@@ -0,0 +1,126 @@
|
|||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type customString string
|
||||||
|
|
||||||
|
type stringer struct{}
|
||||||
|
|
||||||
|
func (s stringer) String() string {
|
||||||
|
return "stringer"
|
||||||
|
}
|
||||||
|
|
||||||
|
func validate(t *testing.T, path string, object interface{}) {
|
||||||
|
switch o := object.(type) {
|
||||||
|
case *Tree:
|
||||||
|
for key, tree := range o.values {
|
||||||
|
validate(t, path+"."+key, tree)
|
||||||
|
}
|
||||||
|
case []*Tree:
|
||||||
|
for index, tree := range o {
|
||||||
|
validate(t, path+"."+strconv.Itoa(index), tree)
|
||||||
|
}
|
||||||
|
case *tomlValue:
|
||||||
|
switch o.value.(type) {
|
||||||
|
case int64, uint64, bool, string, float64, time.Time,
|
||||||
|
[]int64, []uint64, []bool, []string, []float64, []time.Time:
|
||||||
|
default:
|
||||||
|
t.Fatalf("tomlValue at key %s containing incorrect type %T", path, o.value)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
t.Fatalf("value at key %s is of incorrect type %T", path, object)
|
||||||
|
}
|
||||||
|
t.Logf("validation ok %s as %T", path, object)
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateTree(t *testing.T, tree *Tree) {
|
||||||
|
validate(t, "", tree)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeCreateToTree(t *testing.T) {
|
||||||
|
data := map[string]interface{}{
|
||||||
|
"a_string": "bar",
|
||||||
|
"an_int": 42,
|
||||||
|
"time": time.Now(),
|
||||||
|
"int8": int8(2),
|
||||||
|
"int16": int16(2),
|
||||||
|
"int32": int32(2),
|
||||||
|
"uint8": uint8(2),
|
||||||
|
"uint16": uint16(2),
|
||||||
|
"uint32": uint32(2),
|
||||||
|
"float32": float32(2),
|
||||||
|
"a_bool": false,
|
||||||
|
"stringer": stringer{},
|
||||||
|
"nested": map[string]interface{}{
|
||||||
|
"foo": "bar",
|
||||||
|
},
|
||||||
|
"array": []string{"a", "b", "c"},
|
||||||
|
"array_uint": []uint{uint(1), uint(2)},
|
||||||
|
"array_table": []map[string]interface{}{{"sub_map": 52}},
|
||||||
|
"array_times": []time.Time{time.Now(), time.Now()},
|
||||||
|
"map_times": map[string]time.Time{"now": time.Now()},
|
||||||
|
"custom_string_map_key": map[customString]interface{}{customString("custom"): "custom"},
|
||||||
|
}
|
||||||
|
tree, err := TreeFromMap(data)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("unexpected error:", err)
|
||||||
|
}
|
||||||
|
validateTree(t, tree)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeCreateToTreeInvalidLeafType(t *testing.T) {
|
||||||
|
_, err := TreeFromMap(map[string]interface{}{"foo": t})
|
||||||
|
expected := "cannot convert type *testing.T to Tree"
|
||||||
|
if err.Error() != expected {
|
||||||
|
t.Fatalf("expected error %s, got %s", expected, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeCreateToTreeInvalidMapKeyType(t *testing.T) {
|
||||||
|
_, err := TreeFromMap(map[string]interface{}{"foo": map[int]interface{}{2: 1}})
|
||||||
|
expected := "map key needs to be a string, not int (int)"
|
||||||
|
if err.Error() != expected {
|
||||||
|
t.Fatalf("expected error %s, got %s", expected, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeCreateToTreeInvalidArrayMemberType(t *testing.T) {
|
||||||
|
_, err := TreeFromMap(map[string]interface{}{"foo": []*testing.T{t}})
|
||||||
|
expected := "cannot convert type *testing.T to Tree"
|
||||||
|
if err.Error() != expected {
|
||||||
|
t.Fatalf("expected error %s, got %s", expected, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeCreateToTreeInvalidTableGroupType(t *testing.T) {
|
||||||
|
_, err := TreeFromMap(map[string]interface{}{"foo": []map[string]interface{}{{"hello": t}}})
|
||||||
|
expected := "cannot convert type *testing.T to Tree"
|
||||||
|
if err.Error() != expected {
|
||||||
|
t.Fatalf("expected error %s, got %s", expected, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRoundTripArrayOfTables(t *testing.T) {
|
||||||
|
orig := "\n[[stuff]]\n name = \"foo\"\n things = [\"a\",\"b\"]\n"
|
||||||
|
tree, err := Load(orig)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
m := tree.ToMap()
|
||||||
|
|
||||||
|
tree, err = TreeFromMap(m)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %s", err)
|
||||||
|
}
|
||||||
|
want := orig
|
||||||
|
got := tree.String()
|
||||||
|
|
||||||
|
if got != want {
|
||||||
|
t.Errorf("want:\n%s\ngot:\n%s", want, got)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,469 @@
|
|||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"math"
|
||||||
|
"math/big"
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type valueComplexity int
|
||||||
|
|
||||||
|
const (
|
||||||
|
valueSimple valueComplexity = iota + 1
|
||||||
|
valueComplex
|
||||||
|
)
|
||||||
|
|
||||||
|
type sortNode struct {
|
||||||
|
key string
|
||||||
|
complexity valueComplexity
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encodes a string to a TOML-compliant multi-line string value
|
||||||
|
// This function is a clone of the existing encodeTomlString function, except that whitespace characters
|
||||||
|
// are preserved. Quotation marks and backslashes are also not escaped.
|
||||||
|
func encodeMultilineTomlString(value string) string {
|
||||||
|
var b bytes.Buffer
|
||||||
|
|
||||||
|
for _, rr := range value {
|
||||||
|
switch rr {
|
||||||
|
case '\b':
|
||||||
|
b.WriteString(`\b`)
|
||||||
|
case '\t':
|
||||||
|
b.WriteString("\t")
|
||||||
|
case '\n':
|
||||||
|
b.WriteString("\n")
|
||||||
|
case '\f':
|
||||||
|
b.WriteString(`\f`)
|
||||||
|
case '\r':
|
||||||
|
b.WriteString("\r")
|
||||||
|
case '"':
|
||||||
|
b.WriteString(`"`)
|
||||||
|
case '\\':
|
||||||
|
b.WriteString(`\`)
|
||||||
|
default:
|
||||||
|
intRr := uint16(rr)
|
||||||
|
if intRr < 0x001F {
|
||||||
|
b.WriteString(fmt.Sprintf("\\u%0.4X", intRr))
|
||||||
|
} else {
|
||||||
|
b.WriteRune(rr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return b.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encodes a string to a TOML-compliant string value
|
||||||
|
func encodeTomlString(value string) string {
|
||||||
|
var b bytes.Buffer
|
||||||
|
|
||||||
|
for _, rr := range value {
|
||||||
|
switch rr {
|
||||||
|
case '\b':
|
||||||
|
b.WriteString(`\b`)
|
||||||
|
case '\t':
|
||||||
|
b.WriteString(`\t`)
|
||||||
|
case '\n':
|
||||||
|
b.WriteString(`\n`)
|
||||||
|
case '\f':
|
||||||
|
b.WriteString(`\f`)
|
||||||
|
case '\r':
|
||||||
|
b.WriteString(`\r`)
|
||||||
|
case '"':
|
||||||
|
b.WriteString(`\"`)
|
||||||
|
case '\\':
|
||||||
|
b.WriteString(`\\`)
|
||||||
|
default:
|
||||||
|
intRr := uint16(rr)
|
||||||
|
if intRr < 0x001F {
|
||||||
|
b.WriteString(fmt.Sprintf("\\u%0.4X", intRr))
|
||||||
|
} else {
|
||||||
|
b.WriteRune(rr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return b.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElementPerLine bool) (string, error) {
|
||||||
|
// this interface check is added to dereference the change made in the writeTo function.
|
||||||
|
// That change was made to allow this function to see formatting options.
|
||||||
|
tv, ok := v.(*tomlValue)
|
||||||
|
if ok {
|
||||||
|
v = tv.value
|
||||||
|
} else {
|
||||||
|
tv = &tomlValue{}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch value := v.(type) {
|
||||||
|
case uint64:
|
||||||
|
return strconv.FormatUint(value, 10), nil
|
||||||
|
case int64:
|
||||||
|
return strconv.FormatInt(value, 10), nil
|
||||||
|
case float64:
|
||||||
|
// Default bit length is full 64
|
||||||
|
bits := 64
|
||||||
|
// Float panics if nan is used
|
||||||
|
if !math.IsNaN(value) {
|
||||||
|
// if 32 bit accuracy is enough to exactly show, use 32
|
||||||
|
_, acc := big.NewFloat(value).Float32()
|
||||||
|
if acc == big.Exact {
|
||||||
|
bits = 32
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if math.Trunc(value) == value {
|
||||||
|
return strings.ToLower(strconv.FormatFloat(value, 'f', 1, bits)), nil
|
||||||
|
}
|
||||||
|
return strings.ToLower(strconv.FormatFloat(value, 'f', -1, bits)), nil
|
||||||
|
case string:
|
||||||
|
if tv.multiline {
|
||||||
|
return "\"\"\"\n" + encodeMultilineTomlString(value) + "\"\"\"", nil
|
||||||
|
}
|
||||||
|
return "\"" + encodeTomlString(value) + "\"", nil
|
||||||
|
case []byte:
|
||||||
|
b, _ := v.([]byte)
|
||||||
|
return tomlValueStringRepresentation(string(b), indent, arraysOneElementPerLine)
|
||||||
|
case bool:
|
||||||
|
if value {
|
||||||
|
return "true", nil
|
||||||
|
}
|
||||||
|
return "false", nil
|
||||||
|
case time.Time:
|
||||||
|
return value.Format(time.RFC3339), nil
|
||||||
|
case nil:
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
rv := reflect.ValueOf(v)
|
||||||
|
|
||||||
|
if rv.Kind() == reflect.Slice {
|
||||||
|
var values []string
|
||||||
|
for i := 0; i < rv.Len(); i++ {
|
||||||
|
item := rv.Index(i).Interface()
|
||||||
|
itemRepr, err := tomlValueStringRepresentation(item, indent, arraysOneElementPerLine)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
values = append(values, itemRepr)
|
||||||
|
}
|
||||||
|
if arraysOneElementPerLine && len(values) > 1 {
|
||||||
|
stringBuffer := bytes.Buffer{}
|
||||||
|
valueIndent := indent + ` ` // TODO: move that to a shared encoder state
|
||||||
|
|
||||||
|
stringBuffer.WriteString("[\n")
|
||||||
|
|
||||||
|
for _, value := range values {
|
||||||
|
stringBuffer.WriteString(valueIndent)
|
||||||
|
stringBuffer.WriteString(value)
|
||||||
|
stringBuffer.WriteString(`,`)
|
||||||
|
stringBuffer.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
stringBuffer.WriteString(indent + "]")
|
||||||
|
|
||||||
|
return stringBuffer.String(), nil
|
||||||
|
}
|
||||||
|
return "[" + strings.Join(values, ",") + "]", nil
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("unsupported value type %T: %v", v, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getTreeArrayLine(trees []*Tree) (line int) {
|
||||||
|
// get lowest line number that is not 0
|
||||||
|
for _, tv := range trees {
|
||||||
|
if tv.position.Line < line || line == 0 {
|
||||||
|
line = tv.position.Line
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func sortByLines(t *Tree) (vals []sortNode) {
|
||||||
|
var (
|
||||||
|
line int
|
||||||
|
lines []int
|
||||||
|
tv *Tree
|
||||||
|
tom *tomlValue
|
||||||
|
node sortNode
|
||||||
|
)
|
||||||
|
vals = make([]sortNode, 0)
|
||||||
|
m := make(map[int]sortNode)
|
||||||
|
|
||||||
|
for k := range t.values {
|
||||||
|
v := t.values[k]
|
||||||
|
switch v.(type) {
|
||||||
|
case *Tree:
|
||||||
|
tv = v.(*Tree)
|
||||||
|
line = tv.position.Line
|
||||||
|
node = sortNode{key: k, complexity: valueComplex}
|
||||||
|
case []*Tree:
|
||||||
|
line = getTreeArrayLine(v.([]*Tree))
|
||||||
|
node = sortNode{key: k, complexity: valueComplex}
|
||||||
|
default:
|
||||||
|
tom = v.(*tomlValue)
|
||||||
|
line = tom.position.Line
|
||||||
|
node = sortNode{key: k, complexity: valueSimple}
|
||||||
|
}
|
||||||
|
lines = append(lines, line)
|
||||||
|
vals = append(vals, node)
|
||||||
|
m[line] = node
|
||||||
|
}
|
||||||
|
sort.Ints(lines)
|
||||||
|
|
||||||
|
for i, line := range lines {
|
||||||
|
vals[i] = m[line]
|
||||||
|
}
|
||||||
|
|
||||||
|
return vals
|
||||||
|
}
|
||||||
|
|
||||||
|
func sortAlphabetical(t *Tree) (vals []sortNode) {
|
||||||
|
var (
|
||||||
|
node sortNode
|
||||||
|
simpVals []string
|
||||||
|
compVals []string
|
||||||
|
)
|
||||||
|
vals = make([]sortNode, 0)
|
||||||
|
m := make(map[string]sortNode)
|
||||||
|
|
||||||
|
for k := range t.values {
|
||||||
|
v := t.values[k]
|
||||||
|
switch v.(type) {
|
||||||
|
case *Tree, []*Tree:
|
||||||
|
node = sortNode{key: k, complexity: valueComplex}
|
||||||
|
compVals = append(compVals, node.key)
|
||||||
|
default:
|
||||||
|
node = sortNode{key: k, complexity: valueSimple}
|
||||||
|
simpVals = append(simpVals, node.key)
|
||||||
|
}
|
||||||
|
vals = append(vals, node)
|
||||||
|
m[node.key] = node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simples first to match previous implementation
|
||||||
|
sort.Strings(simpVals)
|
||||||
|
i := 0
|
||||||
|
for _, key := range simpVals {
|
||||||
|
vals[i] = m[key]
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Strings(compVals)
|
||||||
|
for _, key := range compVals {
|
||||||
|
vals[i] = m[key]
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
return vals
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool) (int64, error) {
|
||||||
|
return t.writeToOrdered(w, indent, keyspace, bytesCount, arraysOneElementPerLine, OrderAlphabetical)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Tree) writeToOrdered(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool, ord marshalOrder) (int64, error) {
|
||||||
|
var orderedVals []sortNode
|
||||||
|
|
||||||
|
switch ord {
|
||||||
|
case OrderPreserve:
|
||||||
|
orderedVals = sortByLines(t)
|
||||||
|
default:
|
||||||
|
orderedVals = sortAlphabetical(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, node := range orderedVals {
|
||||||
|
switch node.complexity {
|
||||||
|
case valueComplex:
|
||||||
|
k := node.key
|
||||||
|
v := t.values[k]
|
||||||
|
|
||||||
|
combinedKey := k
|
||||||
|
if keyspace != "" {
|
||||||
|
combinedKey = keyspace + "." + combinedKey
|
||||||
|
}
|
||||||
|
var commented string
|
||||||
|
if t.commented {
|
||||||
|
commented = "# "
|
||||||
|
}
|
||||||
|
|
||||||
|
switch node := v.(type) {
|
||||||
|
// node has to be of those two types given how keys are sorted above
|
||||||
|
case *Tree:
|
||||||
|
tv, ok := t.values[k].(*Tree)
|
||||||
|
if !ok {
|
||||||
|
return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
|
||||||
|
}
|
||||||
|
if tv.comment != "" {
|
||||||
|
comment := strings.Replace(tv.comment, "\n", "\n"+indent+"#", -1)
|
||||||
|
start := "# "
|
||||||
|
if strings.HasPrefix(comment, "#") {
|
||||||
|
start = ""
|
||||||
|
}
|
||||||
|
writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment)
|
||||||
|
bytesCount += int64(writtenBytesCountComment)
|
||||||
|
if errc != nil {
|
||||||
|
return bytesCount, errc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[", combinedKey, "]\n")
|
||||||
|
bytesCount += int64(writtenBytesCount)
|
||||||
|
if err != nil {
|
||||||
|
return bytesCount, err
|
||||||
|
}
|
||||||
|
bytesCount, err = node.writeToOrdered(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine, ord)
|
||||||
|
if err != nil {
|
||||||
|
return bytesCount, err
|
||||||
|
}
|
||||||
|
case []*Tree:
|
||||||
|
for _, subTree := range node {
|
||||||
|
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[[", combinedKey, "]]\n")
|
||||||
|
bytesCount += int64(writtenBytesCount)
|
||||||
|
if err != nil {
|
||||||
|
return bytesCount, err
|
||||||
|
}
|
||||||
|
|
||||||
|
bytesCount, err = subTree.writeToOrdered(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine, ord)
|
||||||
|
if err != nil {
|
||||||
|
return bytesCount, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default: // Simple
|
||||||
|
k := node.key
|
||||||
|
v, ok := t.values[k].(*tomlValue)
|
||||||
|
if !ok {
|
||||||
|
return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
|
||||||
|
}
|
||||||
|
|
||||||
|
repr, err := tomlValueStringRepresentation(v, indent, arraysOneElementPerLine)
|
||||||
|
if err != nil {
|
||||||
|
return bytesCount, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if v.comment != "" {
|
||||||
|
comment := strings.Replace(v.comment, "\n", "\n"+indent+"#", -1)
|
||||||
|
start := "# "
|
||||||
|
if strings.HasPrefix(comment, "#") {
|
||||||
|
start = ""
|
||||||
|
}
|
||||||
|
writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment, "\n")
|
||||||
|
bytesCount += int64(writtenBytesCountComment)
|
||||||
|
if errc != nil {
|
||||||
|
return bytesCount, errc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var commented string
|
||||||
|
if v.commented {
|
||||||
|
commented = "# "
|
||||||
|
}
|
||||||
|
quotedKey := quoteKeyIfNeeded(k)
|
||||||
|
writtenBytesCount, err := writeStrings(w, indent, commented, quotedKey, " = ", repr, "\n")
|
||||||
|
bytesCount += int64(writtenBytesCount)
|
||||||
|
if err != nil {
|
||||||
|
return bytesCount, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return bytesCount, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// quote a key if it does not fit the bare key format (A-Za-z0-9_-)
|
||||||
|
// quoted keys use the same rules as strings
|
||||||
|
func quoteKeyIfNeeded(k string) string {
|
||||||
|
// when encoding a map with the 'quoteMapKeys' option enabled, the tree will contain
|
||||||
|
// keys that have already been quoted.
|
||||||
|
// not an ideal situation, but good enough of a stop gap.
|
||||||
|
if len(k) >= 2 && k[0] == '"' && k[len(k)-1] == '"' {
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
isBare := true
|
||||||
|
for _, r := range k {
|
||||||
|
if !isValidBareChar(r) {
|
||||||
|
isBare = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isBare {
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
return quoteKey(k)
|
||||||
|
}
|
||||||
|
|
||||||
|
func quoteKey(k string) string {
|
||||||
|
return "\"" + encodeTomlString(k) + "\""
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeStrings(w io.Writer, s ...string) (int, error) {
|
||||||
|
var n int
|
||||||
|
for i := range s {
|
||||||
|
b, err := io.WriteString(w, s[i])
|
||||||
|
n += b
|
||||||
|
if err != nil {
|
||||||
|
return n, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return n, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTo encode the Tree as Toml and writes it to the writer w.
|
||||||
|
// Returns the number of bytes written in case of success, or an error if anything happened.
|
||||||
|
func (t *Tree) WriteTo(w io.Writer) (int64, error) {
|
||||||
|
return t.writeTo(w, "", "", 0, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToTomlString generates a human-readable representation of the current tree.
|
||||||
|
// Output spans multiple lines, and is suitable for ingest by a TOML parser.
|
||||||
|
// If the conversion cannot be performed, ToString returns a non-nil error.
|
||||||
|
func (t *Tree) ToTomlString() (string, error) {
|
||||||
|
b, err := t.Marshal()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return string(b), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// String generates a human-readable representation of the current tree.
|
||||||
|
// Alias of ToString. Present to implement the fmt.Stringer interface.
|
||||||
|
func (t *Tree) String() string {
|
||||||
|
result, _ := t.ToTomlString()
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToMap recursively generates a representation of the tree using Go built-in structures.
|
||||||
|
// The following types are used:
|
||||||
|
//
|
||||||
|
// * bool
|
||||||
|
// * float64
|
||||||
|
// * int64
|
||||||
|
// * string
|
||||||
|
// * uint64
|
||||||
|
// * time.Time
|
||||||
|
// * map[string]interface{} (where interface{} is any of this list)
|
||||||
|
// * []interface{} (where interface{} is any of this list)
|
||||||
|
func (t *Tree) ToMap() map[string]interface{} {
|
||||||
|
result := map[string]interface{}{}
|
||||||
|
|
||||||
|
for k, v := range t.values {
|
||||||
|
switch node := v.(type) {
|
||||||
|
case []*Tree:
|
||||||
|
var array []interface{}
|
||||||
|
for _, item := range node {
|
||||||
|
array = append(array, item.ToMap())
|
||||||
|
}
|
||||||
|
result[k] = array
|
||||||
|
case *Tree:
|
||||||
|
result[k] = node.ToMap()
|
||||||
|
case *tomlValue:
|
||||||
|
result[k] = node.value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
@@ -0,0 +1,400 @@
|
|||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type failingWriter struct {
|
||||||
|
failAt int
|
||||||
|
written int
|
||||||
|
buffer bytes.Buffer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *failingWriter) Write(p []byte) (n int, err error) {
|
||||||
|
count := len(p)
|
||||||
|
toWrite := f.failAt - (count + f.written)
|
||||||
|
if toWrite < 0 {
|
||||||
|
toWrite = 0
|
||||||
|
}
|
||||||
|
if toWrite > count {
|
||||||
|
f.written += count
|
||||||
|
f.buffer.Write(p)
|
||||||
|
return count, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
f.buffer.Write(p[:toWrite])
|
||||||
|
f.written = f.failAt
|
||||||
|
return toWrite, fmt.Errorf("failingWriter failed after writing %d bytes", f.written)
|
||||||
|
}
|
||||||
|
|
||||||
|
func assertErrorString(t *testing.T, expected string, err error) {
|
||||||
|
expectedErr := errors.New(expected)
|
||||||
|
if err == nil || err.Error() != expectedErr.Error() {
|
||||||
|
t.Errorf("expecting error %s, but got %s instead", expected, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToEmptyTable(t *testing.T) {
|
||||||
|
doc := `[[empty-tables]]
|
||||||
|
[[empty-tables]]`
|
||||||
|
|
||||||
|
toml, err := Load(doc)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Unexpected Load error:", err)
|
||||||
|
}
|
||||||
|
tomlString, err := toml.ToTomlString()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Unexpected ToTomlString error:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
expected := `
|
||||||
|
[[empty-tables]]
|
||||||
|
|
||||||
|
[[empty-tables]]
|
||||||
|
`
|
||||||
|
|
||||||
|
if tomlString != expected {
|
||||||
|
t.Fatalf("Expected:\n%s\nGot:\n%s", expected, tomlString)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToTomlString(t *testing.T) {
|
||||||
|
toml, err := Load(`name = { first = "Tom", last = "Preston-Werner" }
|
||||||
|
points = { x = 1, y = 2 }`)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Unexpected error:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
tomlString, _ := toml.ToTomlString()
|
||||||
|
reparsedTree, err := Load(tomlString)
|
||||||
|
|
||||||
|
assertTree(t, reparsedTree, err, map[string]interface{}{
|
||||||
|
"name": map[string]interface{}{
|
||||||
|
"first": "Tom",
|
||||||
|
"last": "Preston-Werner",
|
||||||
|
},
|
||||||
|
"points": map[string]interface{}{
|
||||||
|
"x": int64(1),
|
||||||
|
"y": int64(2),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToTomlStringSimple(t *testing.T) {
|
||||||
|
tree, err := Load("[foo]\n\n[[foo.bar]]\na = 42\n\n[[foo.bar]]\na = 69\n")
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Test failed to parse: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
result, err := tree.ToTomlString()
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Unexpected error: %s", err)
|
||||||
|
}
|
||||||
|
expected := "\n[foo]\n\n [[foo.bar]]\n a = 42\n\n [[foo.bar]]\n a = 69\n"
|
||||||
|
if result != expected {
|
||||||
|
t.Errorf("Expected got '%s', expected '%s'", result, expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToTomlStringKeysOrders(t *testing.T) {
|
||||||
|
for i := 0; i < 100; i++ {
|
||||||
|
tree, _ := Load(`
|
||||||
|
foobar = true
|
||||||
|
bar = "baz"
|
||||||
|
foo = 1
|
||||||
|
[qux]
|
||||||
|
foo = 1
|
||||||
|
bar = "baz2"`)
|
||||||
|
|
||||||
|
stringRepr, _ := tree.ToTomlString()
|
||||||
|
|
||||||
|
t.Log("Intermediate string representation:")
|
||||||
|
t.Log(stringRepr)
|
||||||
|
|
||||||
|
r := strings.NewReader(stringRepr)
|
||||||
|
toml, err := LoadReader(r)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Unexpected error:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
assertTree(t, toml, err, map[string]interface{}{
|
||||||
|
"foobar": true,
|
||||||
|
"bar": "baz",
|
||||||
|
"foo": 1,
|
||||||
|
"qux": map[string]interface{}{
|
||||||
|
"foo": 1,
|
||||||
|
"bar": "baz2",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func testMaps(t *testing.T, actual, expected map[string]interface{}) {
|
||||||
|
if !reflect.DeepEqual(actual, expected) {
|
||||||
|
t.Fatal("trees aren't equal.\n", "Expected:\n", expected, "\nActual:\n", actual)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToMapSimple(t *testing.T) {
|
||||||
|
tree, _ := Load("a = 42\nb = 17")
|
||||||
|
|
||||||
|
expected := map[string]interface{}{
|
||||||
|
"a": int64(42),
|
||||||
|
"b": int64(17),
|
||||||
|
}
|
||||||
|
|
||||||
|
testMaps(t, tree.ToMap(), expected)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToInvalidTreeSimpleValue(t *testing.T) {
|
||||||
|
tree := Tree{values: map[string]interface{}{"foo": int8(1)}}
|
||||||
|
_, err := tree.ToTomlString()
|
||||||
|
assertErrorString(t, "invalid value type at foo: int8", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToInvalidTreeTomlValue(t *testing.T) {
|
||||||
|
tree := Tree{values: map[string]interface{}{"foo": &tomlValue{value: int8(1), comment: "", position: Position{}}}}
|
||||||
|
_, err := tree.ToTomlString()
|
||||||
|
assertErrorString(t, "unsupported value type int8: 1", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToInvalidTreeTomlValueArray(t *testing.T) {
|
||||||
|
tree := Tree{values: map[string]interface{}{"foo": &tomlValue{value: int8(1), comment: "", position: Position{}}}}
|
||||||
|
_, err := tree.ToTomlString()
|
||||||
|
assertErrorString(t, "unsupported value type int8: 1", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToFailingWriterInSimpleValue(t *testing.T) {
|
||||||
|
toml, _ := Load(`a = 2`)
|
||||||
|
writer := failingWriter{failAt: 0, written: 0}
|
||||||
|
_, err := toml.WriteTo(&writer)
|
||||||
|
assertErrorString(t, "failingWriter failed after writing 0 bytes", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToFailingWriterInTable(t *testing.T) {
|
||||||
|
toml, _ := Load(`
|
||||||
|
[b]
|
||||||
|
a = 2`)
|
||||||
|
writer := failingWriter{failAt: 2, written: 0}
|
||||||
|
_, err := toml.WriteTo(&writer)
|
||||||
|
assertErrorString(t, "failingWriter failed after writing 2 bytes", err)
|
||||||
|
|
||||||
|
writer = failingWriter{failAt: 13, written: 0}
|
||||||
|
_, err = toml.WriteTo(&writer)
|
||||||
|
assertErrorString(t, "failingWriter failed after writing 13 bytes", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToFailingWriterInArray(t *testing.T) {
|
||||||
|
toml, _ := Load(`
|
||||||
|
[[b]]
|
||||||
|
a = 2`)
|
||||||
|
writer := failingWriter{failAt: 2, written: 0}
|
||||||
|
_, err := toml.WriteTo(&writer)
|
||||||
|
assertErrorString(t, "failingWriter failed after writing 2 bytes", err)
|
||||||
|
|
||||||
|
writer = failingWriter{failAt: 15, written: 0}
|
||||||
|
_, err = toml.WriteTo(&writer)
|
||||||
|
assertErrorString(t, "failingWriter failed after writing 15 bytes", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToMapExampleFile(t *testing.T) {
|
||||||
|
tree, _ := LoadFile("example.toml")
|
||||||
|
expected := map[string]interface{}{
|
||||||
|
"title": "TOML Example",
|
||||||
|
"owner": map[string]interface{}{
|
||||||
|
"name": "Tom Preston-Werner",
|
||||||
|
"organization": "GitHub",
|
||||||
|
"bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
|
||||||
|
"dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
|
||||||
|
},
|
||||||
|
"database": map[string]interface{}{
|
||||||
|
"server": "192.168.1.1",
|
||||||
|
"ports": []interface{}{int64(8001), int64(8001), int64(8002)},
|
||||||
|
"connection_max": int64(5000),
|
||||||
|
"enabled": true,
|
||||||
|
},
|
||||||
|
"servers": map[string]interface{}{
|
||||||
|
"alpha": map[string]interface{}{
|
||||||
|
"ip": "10.0.0.1",
|
||||||
|
"dc": "eqdc10",
|
||||||
|
},
|
||||||
|
"beta": map[string]interface{}{
|
||||||
|
"ip": "10.0.0.2",
|
||||||
|
"dc": "eqdc10",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"clients": map[string]interface{}{
|
||||||
|
"data": []interface{}{
|
||||||
|
[]interface{}{"gamma", "delta"},
|
||||||
|
[]interface{}{int64(1), int64(2)},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
testMaps(t, tree.ToMap(), expected)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToMapWithTablesInMultipleChunks(t *testing.T) {
|
||||||
|
tree, _ := Load(`
|
||||||
|
[[menu.main]]
|
||||||
|
a = "menu 1"
|
||||||
|
b = "menu 2"
|
||||||
|
[[menu.main]]
|
||||||
|
c = "menu 3"
|
||||||
|
d = "menu 4"`)
|
||||||
|
expected := map[string]interface{}{
|
||||||
|
"menu": map[string]interface{}{
|
||||||
|
"main": []interface{}{
|
||||||
|
map[string]interface{}{"a": "menu 1", "b": "menu 2"},
|
||||||
|
map[string]interface{}{"c": "menu 3", "d": "menu 4"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
treeMap := tree.ToMap()
|
||||||
|
|
||||||
|
testMaps(t, treeMap, expected)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToMapWithArrayOfInlineTables(t *testing.T) {
|
||||||
|
tree, _ := Load(`
|
||||||
|
[params]
|
||||||
|
language_tabs = [
|
||||||
|
{ key = "shell", name = "Shell" },
|
||||||
|
{ key = "ruby", name = "Ruby" },
|
||||||
|
{ key = "python", name = "Python" }
|
||||||
|
]`)
|
||||||
|
|
||||||
|
expected := map[string]interface{}{
|
||||||
|
"params": map[string]interface{}{
|
||||||
|
"language_tabs": []interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"key": "shell",
|
||||||
|
"name": "Shell",
|
||||||
|
},
|
||||||
|
map[string]interface{}{
|
||||||
|
"key": "ruby",
|
||||||
|
"name": "Ruby",
|
||||||
|
},
|
||||||
|
map[string]interface{}{
|
||||||
|
"key": "python",
|
||||||
|
"name": "Python",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
treeMap := tree.ToMap()
|
||||||
|
testMaps(t, treeMap, expected)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToFloat(t *testing.T) {
|
||||||
|
tree, err := Load(`a = 3.0`)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
str, err := tree.ToTomlString()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
expected := `a = 3.0`
|
||||||
|
if strings.TrimSpace(str) != strings.TrimSpace(expected) {
|
||||||
|
t.Fatalf("Expected:\n%s\nGot:\n%s", expected, str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToSpecialFloat(t *testing.T) {
|
||||||
|
expected := `a = +inf
|
||||||
|
b = -inf
|
||||||
|
c = nan`
|
||||||
|
|
||||||
|
tree, err := Load(expected)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
str, err := tree.ToTomlString()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(str) != strings.TrimSpace(expected) {
|
||||||
|
t.Fatalf("Expected:\n%s\nGot:\n%s", expected, str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue290(t *testing.T) {
|
||||||
|
tomlString :=
|
||||||
|
`[table]
|
||||||
|
"127.0.0.1" = "value"
|
||||||
|
"127.0.0.1:8028" = "value"
|
||||||
|
"character encoding" = "value"
|
||||||
|
"ʎǝʞ" = "value"`
|
||||||
|
|
||||||
|
t1, err := Load(tomlString)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("load err:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
s, err := t1.ToTomlString()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("ToTomlString err:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = Load(s)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("reload err:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkTreeToTomlString(b *testing.B) {
|
||||||
|
toml, err := Load(sampleHard)
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal("Unexpected error:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
_, err := toml.ToTomlString()
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var sampleHard = `# Test file for TOML
|
||||||
|
# Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate
|
||||||
|
# This part you'll really hate
|
||||||
|
|
||||||
|
[the]
|
||||||
|
test_string = "You'll hate me after this - #" # " Annoying, isn't it?
|
||||||
|
|
||||||
|
[the.hard]
|
||||||
|
test_array = [ "] ", " # "] # ] There you go, parse this!
|
||||||
|
test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ]
|
||||||
|
# You didn't think it'd as easy as chucking out the last #, did you?
|
||||||
|
another_test_string = " Same thing, but with a string #"
|
||||||
|
harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too"
|
||||||
|
# Things will get harder
|
||||||
|
|
||||||
|
[the.hard."bit#"]
|
||||||
|
"what?" = "You don't think some user won't do that?"
|
||||||
|
multi_line_array = [
|
||||||
|
"]",
|
||||||
|
# ] Oh yes I did
|
||||||
|
]
|
||||||
|
|
||||||
|
# Each of the following keygroups/key value pairs should produce an error. Uncomment to them to test
|
||||||
|
|
||||||
|
#[error] if you didn't catch this, your parser is broken
|
||||||
|
#string = "Anything other than tabs, spaces and newline after a keygroup or key value pair has ended should produce an error unless it is a comment" like this
|
||||||
|
#array = [
|
||||||
|
# "This might most likely happen in multiline arrays",
|
||||||
|
# Like here,
|
||||||
|
# "or here,
|
||||||
|
# and here"
|
||||||
|
# ] End of array comment, forgot the #
|
||||||
|
#number = 3.14 pi <--again forgot the # `
|
||||||
Reference in New Issue
Block a user