Compare commits
125 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 5dc006fb52 | |||
| fed1464066 | |||
| 1baee4630f | |||
| 352072d51a | |||
| c42c3365f3 | |||
| b8ba995eaa | |||
| 8e44986c28 | |||
| 837c1d09ee | |||
| 8410c965c2 | |||
| d083470585 | |||
| c893dbf25c | |||
| 2a1df71375 | |||
| a2f5197638 | |||
| bb65137dc4 | |||
| 99782c87cf | |||
| ce6fbd7bc0 | |||
| b59c12a70d | |||
| 6a307ac0d0 | |||
| a2e5256180 | |||
| 5163266f16 | |||
| b4f0a950bf | |||
| ef48fb2be1 | |||
| c9a09d8695 | |||
| 3430b0f086 | |||
| a713a3eccc | |||
| 652b9f8232 | |||
| ba1b12be14 | |||
| 2e01f733df | |||
| 1bd9461acb | |||
| 5b4e7e5dcc | |||
| b4905040a8 | |||
| 5c66c78bc5 | |||
| f9ba08244d | |||
| e6908614ee | |||
| a7448fe8de | |||
| 65ca806488 | |||
| 5c94d86029 | |||
| b76eb62117 | |||
| 196ce3a1f6 | |||
| 9f8f82dfe8 | |||
| 661484ae7e | |||
| 34de94e6a8 | |||
| 88263a05cc | |||
| 1dbe20e76c | |||
| 05bf3807d3 | |||
| 06838de5d2 | |||
| db62263e3e | |||
| 2d866e3fae | |||
| 100799f7b7 | |||
| ecd155a62f | |||
| bcacc71a18 | |||
| 16c9a8bdc0 | |||
| f99d6bbca1 | |||
| 8784f9c73a | |||
| a60e466129 | |||
| 44aed552fd | |||
| 1479e10663 | |||
| 9ba7363552 | |||
| 96ff402934 | |||
| 249d0eaf46 | |||
| 19eb8cf036 | |||
| c5fbd3eba6 | |||
| 9ccd9bbc7a | |||
| e7d1a179ae | |||
| 71a8bd4c61 | |||
| 34782191ba | |||
| 7fbde32684 | |||
| 82a6a1977d | |||
| cc3100c329 | |||
| f1ba6388fb | |||
| d05497900e | |||
| e29a498ed5 | |||
| 2b8e33f503 | |||
| d3c92c5999 | |||
| 71c324cf7b | |||
| 4c840f1b8b | |||
| d1e0fc37ce | |||
| 947ab3f90a | |||
| e9e8265313 | |||
| a30fd2239c | |||
| 323fe5d063 | |||
| 24d4446802 | |||
| 5060c72d94 | |||
| 0a459e938d | |||
| e872682c78 | |||
| 145b18309a | |||
| 8e8d2a6aad | |||
| 3f7178ffd6 | |||
| 9fd5922321 | |||
| 610cf85ed6 | |||
| 99f8a2a010 | |||
| 556d384d4c | |||
| eb7280e4a7 | |||
| 7ee1118b4b | |||
| a12e102214 | |||
| ad60b7e437 | |||
| 3503483c73 | |||
| d2d17bccec | |||
| 76a94674c9 | |||
| 80f8b7660b | |||
| 6f6ca41621 | |||
| c4efb7477c | |||
| 903d9455db | |||
| a89a075e1b | |||
| 5e74bb91ea | |||
| 3a4d7af89e | |||
| 8a362ad712 | |||
| 5edf9acd3e | |||
| e95df67ba3 | |||
| bef0f57967 | |||
| e87c92d4f4 | |||
| 8fe62057ea | |||
| 5f42261979 | |||
| 75654e60b8 | |||
| 091e2dc498 | |||
| 095a905e04 | |||
| ec312409d3 | |||
| 26fd12ff54 | |||
| b40204d36a | |||
| 4d5afd743f | |||
| 3ded2e09ee | |||
| 781fbae71e | |||
| 68063a447e | |||
| 84da2c4a25 | |||
| dba45d427f |
@@ -1,170 +0,0 @@
|
|||||||
version: 2.1
|
|
||||||
|
|
||||||
executors:
|
|
||||||
golang:
|
|
||||||
parameters:
|
|
||||||
version:
|
|
||||||
type: string
|
|
||||||
docker:
|
|
||||||
- image: circleci/golang:<< parameters.version >>
|
|
||||||
|
|
||||||
commands:
|
|
||||||
get_deps:
|
|
||||||
description: "Get go dependencies"
|
|
||||||
steps:
|
|
||||||
- run: go get github.com/jstemmer/go-junit-report
|
|
||||||
|
|
||||||
run_test:
|
|
||||||
description: "Run unit tests for a go module"
|
|
||||||
parameters:
|
|
||||||
test_name:
|
|
||||||
type: string
|
|
||||||
module:
|
|
||||||
type: string
|
|
||||||
coverage:
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
allow_fail:
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
steps:
|
|
||||||
- run:
|
|
||||||
name: "Run tests for <<parameters.test_name>>"
|
|
||||||
command: |
|
|
||||||
TEST_DIR="/tmp/test-results/<<parameters.test_name>>"
|
|
||||||
mkdir -p ${TEST_DIR}
|
|
||||||
trap "go-junit-report </tmp/test-results/go-test.out > ${TEST_DIR}/go-test-report.xml" EXIT
|
|
||||||
go test <<parameters.module>> -race -v \
|
|
||||||
<<# parameters.coverage >>-coverprofile=/tmp/workspace/coverage.txt -covermode=atomic<</ parameters.coverage >> \
|
|
||||||
| tee /tmp/test-results/go-test.out <<# parameters.allow_fail >>|| true<</ parameters.allow_fail >>
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
go:
|
|
||||||
parameters:
|
|
||||||
version:
|
|
||||||
type: string
|
|
||||||
allow_fail:
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
executor:
|
|
||||||
name: golang
|
|
||||||
version: "<<parameters.version>>"
|
|
||||||
working_directory: /go/src/github.com/pelletier/go-toml
|
|
||||||
environment:
|
|
||||||
GO111MODULE: "on"
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run: mkdir -p /tmp/workspace
|
|
||||||
- run: go fmt ./... <<# parameters.allow_fail >>|| true<</ parameters.allow_fail >>
|
|
||||||
- get_deps
|
|
||||||
- run_test:
|
|
||||||
test_name: "go-toml"
|
|
||||||
module: "github.com/pelletier/go-toml"
|
|
||||||
coverage: true
|
|
||||||
allow_fail: <<parameters.allow_fail>>
|
|
||||||
- run_test:
|
|
||||||
test_name: "tomljson"
|
|
||||||
module: "github.com/pelletier/go-toml/cmd/tomljson"
|
|
||||||
allow_fail: <<parameters.allow_fail>>
|
|
||||||
- run_test:
|
|
||||||
test_name: "tomll"
|
|
||||||
module: "github.com/pelletier/go-toml/cmd/tomll"
|
|
||||||
allow_fail: <<parameters.allow_fail>>
|
|
||||||
- run_test:
|
|
||||||
test_name: "query"
|
|
||||||
module: "github.com/pelletier/go-toml/query"
|
|
||||||
allow_fail: <<parameters.allow_fail>>
|
|
||||||
- store_test_results:
|
|
||||||
path: /tmp/test-results
|
|
||||||
codecov:
|
|
||||||
docker:
|
|
||||||
- image: "circleci/golang:1.12"
|
|
||||||
steps:
|
|
||||||
- attach_workspace:
|
|
||||||
at: /tmp/workspace
|
|
||||||
- run:
|
|
||||||
name: "upload to codecov"
|
|
||||||
working_directory: /tmp/workspace
|
|
||||||
command: |
|
|
||||||
curl https://codecov.io/bash > codecov.sh
|
|
||||||
bash codecov.sh -v
|
|
||||||
docker:
|
|
||||||
docker:
|
|
||||||
- image: "circleci/golang:1.12"
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- setup_remote_docker:
|
|
||||||
docker_layer_caching: true
|
|
||||||
- run: docker build -t pelletier/go-toml:$CIRCLE_SHA1 .
|
|
||||||
- run:
|
|
||||||
name: "Publish docker image"
|
|
||||||
command: |
|
|
||||||
if [ "${CIRCLE_PR_REPONAME}" == "" ]; then
|
|
||||||
IMAGE_NAME="pelletier/go-toml"
|
|
||||||
IMAGE_SHA_TAG="${IMAGE_NAME}:$CIRCLE_SHA1"
|
|
||||||
if [ "${CIRCLE_BRANCH}" = "master" ]; then
|
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASS
|
|
||||||
docker tag ${IMAGE_SHA_TAG} ${IMAGE_NAME}:latest
|
|
||||||
docker push ${IMAGE_NAME}:latest
|
|
||||||
fi
|
|
||||||
if [ "${CIRCLE_TAG}" != "" ]; then
|
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASS
|
|
||||||
docker tag ${IMAGE_SHA_TAG} ${IMAGE_NAME}:${CIRCLE_TAG}
|
|
||||||
docker push ${IMAGE_NAME}:${CIRCLE_TAG}
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "not pushing docker image for forked repo"
|
|
||||||
fi
|
|
||||||
|
|
||||||
workflows:
|
|
||||||
version: 2.1
|
|
||||||
build:
|
|
||||||
jobs:
|
|
||||||
- go:
|
|
||||||
name: "go1_11"
|
|
||||||
version: "1.11"
|
|
||||||
- go:
|
|
||||||
name: "go1_12"
|
|
||||||
version: "1.12"
|
|
||||||
post-steps:
|
|
||||||
- run: go tool cover -html=/tmp/workspace/coverage.txt -o coverage.html
|
|
||||||
- store_artifacts:
|
|
||||||
path: /tmp/workspace/coverage.txt
|
|
||||||
- store_artifacts:
|
|
||||||
path: coverage.html
|
|
||||||
- persist_to_workspace:
|
|
||||||
root: /tmp/workspace
|
|
||||||
paths:
|
|
||||||
- coverage.txt
|
|
||||||
- go:
|
|
||||||
name: "gotip"
|
|
||||||
version: "1.12" # use as base
|
|
||||||
allow_fail: true
|
|
||||||
pre-steps:
|
|
||||||
- restore_cache:
|
|
||||||
keys:
|
|
||||||
- go-tip-source
|
|
||||||
- run:
|
|
||||||
name: "Compile go tip"
|
|
||||||
command: |
|
|
||||||
if [ ! -d "/tmp/go" ]; then
|
|
||||||
git clone https://go.googlesource.com/go /tmp/go
|
|
||||||
fi
|
|
||||||
cd /tmp/go
|
|
||||||
git checkout master
|
|
||||||
git pull
|
|
||||||
cd src
|
|
||||||
./make.bash
|
|
||||||
echo 'export PATH="/tmp/go/bin:$PATH"' >> $BASH_ENV
|
|
||||||
- run: go version
|
|
||||||
- save_cache:
|
|
||||||
key: go-tip-source
|
|
||||||
paths:
|
|
||||||
- "/tmp/go"
|
|
||||||
- codecov:
|
|
||||||
requires:
|
|
||||||
- go1_11
|
|
||||||
- go1_12
|
|
||||||
- docker:
|
|
||||||
requires:
|
|
||||||
- codecov
|
|
||||||
@@ -1,9 +1,18 @@
|
|||||||
---
|
---
|
||||||
name: Bug report
|
name: Bug report
|
||||||
about: Create a report to help us improve
|
about: Create a report to help us improve
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
<!--
|
||||||
|
‼️ Main development focus is on the upcoming go-toml v2 ⚠️
|
||||||
|
|
||||||
|
As a result, v1.x bugs will likely not see a fix on a v1.x version.
|
||||||
|
However, reporting the bug is the best way to ensure that it will be fixed in v2.
|
||||||
|
|
||||||
|
See https://github.com/pelletier/go-toml/discussions/506.
|
||||||
|
-->
|
||||||
|
|
||||||
|
|
||||||
**Describe the bug**
|
**Describe the bug**
|
||||||
A clear and concise description of what the bug is.
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
@@ -14,7 +23,7 @@ Steps to reproduce the behavior. Including TOML files.
|
|||||||
A clear and concise description of what you expected to happen, if other than "should work".
|
A clear and concise description of what you expected to happen, if other than "should work".
|
||||||
|
|
||||||
**Versions**
|
**Versions**
|
||||||
- go-toml: version (git sha)
|
- go-toml: version (or git sha)
|
||||||
- go: version
|
- go: version
|
||||||
- operating system: e.g. macOS, Windows, Linux
|
- operating system: e.g. macOS, Windows, Linux
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,5 @@
|
|||||||
|
blank_issues_enabled: false
|
||||||
|
contact_links:
|
||||||
|
- name: Questions and discussions
|
||||||
|
url: https://github.com/pelletier/go-toml/discussions
|
||||||
|
about: Please ask and answer questions here.
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
---
|
|
||||||
name: Feature request
|
|
||||||
about: Suggest an idea for this project
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Is your feature request related to a problem? Please describe.**
|
|
||||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
|
||||||
|
|
||||||
**Describe the solution you'd like**
|
|
||||||
A clear and concise description of what you want to happen.
|
|
||||||
|
|
||||||
**Describe alternatives you've considered**
|
|
||||||
A clear and concise description of any alternative solutions or features you've considered.
|
|
||||||
|
|
||||||
**Additional context**
|
|
||||||
Add any other context or screenshots about the feature request here.
|
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: gomod
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "13:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
@@ -0,0 +1,67 @@
|
|||||||
|
# For most projects, this workflow file will not need changing; you simply need
|
||||||
|
# to commit it to your repository.
|
||||||
|
#
|
||||||
|
# You may wish to alter this file to override the set of languages analyzed,
|
||||||
|
# or to provide custom queries or build logic.
|
||||||
|
#
|
||||||
|
# ******** NOTE ********
|
||||||
|
# We have attempted to detect the languages in your repository. Please check
|
||||||
|
# the `language` matrix defined below to confirm you have the correct set of
|
||||||
|
# supported CodeQL languages.
|
||||||
|
#
|
||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ master ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [ master ]
|
||||||
|
schedule:
|
||||||
|
- cron: '26 19 * * 0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'go' ]
|
||||||
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
|
||||||
|
# Learn more:
|
||||||
|
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v1
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v1
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 https://git.io/JvXDl
|
||||||
|
|
||||||
|
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||||
|
# and modify them (or add more) to build your code if your project
|
||||||
|
# uses a compiled language
|
||||||
|
|
||||||
|
#- run: |
|
||||||
|
# make bootstrap
|
||||||
|
# make release
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v1
|
||||||
-22
@@ -1,22 +0,0 @@
|
|||||||
sudo: false
|
|
||||||
language: go
|
|
||||||
go:
|
|
||||||
- 1.11.x
|
|
||||||
- 1.12.x
|
|
||||||
- tip
|
|
||||||
matrix:
|
|
||||||
allow_failures:
|
|
||||||
- go: tip
|
|
||||||
fast_finish: true
|
|
||||||
env:
|
|
||||||
- GO111MODULE=on
|
|
||||||
script:
|
|
||||||
- if [ -n "$(go fmt ./...)" ]; then exit 1; fi
|
|
||||||
- go test github.com/pelletier/go-toml -race -coverprofile=coverage.txt -covermode=atomic
|
|
||||||
- go test github.com/pelletier/go-toml/cmd/tomljson
|
|
||||||
- go test github.com/pelletier/go-toml/cmd/tomll
|
|
||||||
- go test github.com/pelletier/go-toml/query
|
|
||||||
- ./benchmark.sh $TRAVIS_BRANCH https://github.com/$TRAVIS_REPO_SLUG.git
|
|
||||||
|
|
||||||
after_success:
|
|
||||||
- bash <(curl -s https://codecov.io/bash)
|
|
||||||
+2
-2
@@ -28,7 +28,7 @@ improve the documentation. Fix a typo, clarify an interface, add an
|
|||||||
example, anything goes!
|
example, anything goes!
|
||||||
|
|
||||||
The documentation is present in the [README][readme] and thorough the
|
The documentation is present in the [README][readme] and thorough the
|
||||||
source code. On release, it gets updated on [GoDoc][godoc]. To make a
|
source code. On release, it gets updated on [pkg.go.dev][pkg.go.dev]. To make a
|
||||||
change to the documentation, create a pull request with your proposed
|
change to the documentation, create a pull request with your proposed
|
||||||
changes. For simple changes like that, the easiest way to go is probably
|
changes. For simple changes like that, the easiest way to go is probably
|
||||||
the "Fork this project and edit the file" button on Github, displayed at
|
the "Fork this project and edit the file" button on Github, displayed at
|
||||||
@@ -123,7 +123,7 @@ Checklist:
|
|||||||
|
|
||||||
[issues-tracker]: https://github.com/pelletier/go-toml/issues
|
[issues-tracker]: https://github.com/pelletier/go-toml/issues
|
||||||
[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md
|
[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md
|
||||||
[godoc]: https://godoc.org/github.com/pelletier/go-toml
|
[pkg.go.dev]: https://pkg.go.dev/github.com/pelletier/go-toml
|
||||||
[readme]: ./README.md
|
[readme]: ./README.md
|
||||||
[fork]: https://help.github.com/articles/fork-a-repo
|
[fork]: https://help.github.com/articles/fork-a-repo
|
||||||
[pull-request]: https://help.github.com/en/articles/creating-a-pull-request
|
[pull-request]: https://help.github.com/en/articles/creating-a-pull-request
|
||||||
|
|||||||
@@ -8,3 +8,4 @@ RUN go install ./...
|
|||||||
FROM scratch
|
FROM scratch
|
||||||
COPY --from=builder /go/bin/tomll /usr/bin/tomll
|
COPY --from=builder /go/bin/tomll /usr/bin/tomll
|
||||||
COPY --from=builder /go/bin/tomljson /usr/bin/tomljson
|
COPY --from=builder /go/bin/tomljson /usr/bin/tomljson
|
||||||
|
COPY --from=builder /go/bin/jsontoml /usr/bin/jsontoml
|
||||||
|
|||||||
@@ -1,6 +1,16 @@
|
|||||||
|
The bulk of github.com/pelletier/go-toml is distributed under the MIT license
|
||||||
|
(see below), with the exception of localtime.go and localtime.test.go.
|
||||||
|
Those two files have been copied over from Google's civil library at revision
|
||||||
|
ed46f5086358513cf8c25f8e3f022cb838a49d66, and are distributed under the Apache
|
||||||
|
2.0 license (see below).
|
||||||
|
|
||||||
|
|
||||||
|
github.com/pelletier/go-toml:
|
||||||
|
|
||||||
|
|
||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton
|
Copyright (c) 2013 - 2021 Thomas Pelletier, Eric Anderton
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
@@ -19,3 +29,219 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
SOFTWARE.
|
SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
localtime.go, localtime_test.go:
|
||||||
|
|
||||||
|
Originals:
|
||||||
|
https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/civil/civil.go
|
||||||
|
https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/civil/civil_test.go
|
||||||
|
Changes:
|
||||||
|
* Renamed files from civil* to localtime*.
|
||||||
|
* Package changed from civil to toml.
|
||||||
|
* 'Local' prefix added to all structs.
|
||||||
|
License:
|
||||||
|
https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/LICENSE
|
||||||
|
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|||||||
@@ -0,0 +1,29 @@
|
|||||||
|
export CGO_ENABLED=0
|
||||||
|
go := go
|
||||||
|
go.goos ?= $(shell echo `go version`|cut -f4 -d ' '|cut -d '/' -f1)
|
||||||
|
go.goarch ?= $(shell echo `go version`|cut -f4 -d ' '|cut -d '/' -f2)
|
||||||
|
|
||||||
|
out.tools := tomll tomljson jsontoml
|
||||||
|
out.dist := $(out.tools:=_$(go.goos)_$(go.goarch).tar.xz)
|
||||||
|
sources := $(wildcard **/*.go)
|
||||||
|
|
||||||
|
|
||||||
|
.PHONY:
|
||||||
|
tools: $(out.tools)
|
||||||
|
|
||||||
|
$(out.tools): $(sources)
|
||||||
|
GOOS=$(go.goos) GOARCH=$(go.goarch) $(go) build ./cmd/$@
|
||||||
|
|
||||||
|
.PHONY:
|
||||||
|
dist: $(out.dist)
|
||||||
|
|
||||||
|
$(out.dist):%_$(go.goos)_$(go.goarch).tar.xz: %
|
||||||
|
if [ "$(go.goos)" = "windows" ]; then \
|
||||||
|
tar -cJf $@ $^.exe; \
|
||||||
|
else \
|
||||||
|
tar -cJf $@ $^; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
.PHONY:
|
||||||
|
clean:
|
||||||
|
rm -rf $(out.tools) $(out.dist)
|
||||||
@@ -1,25 +1,62 @@
|
|||||||
# go-toml
|
# go-toml
|
||||||
|
|
||||||
Go library for the [TOML](https://github.com/mojombo/toml) format.
|
Go library for the [TOML](https://toml.io/) format.
|
||||||
|
|
||||||
|
|
||||||
|
⚠️ This readme is for go-toml v1. As for 2022-04-27,
|
||||||
|
[go-toml v2](https://github.com/pelletier/go-toml/tree/v2) has been released.
|
||||||
|
|
||||||
|
The new version contains tons of bug fixes, is much faster, and more
|
||||||
|
importantly maintained. You are strongly encouraged to use it instead of v1!
|
||||||
|
|
||||||
|
[👉 go-toml v2](https://github.com/pelletier/go-toml/tree/v2)
|
||||||
|
|
||||||
|
v1 will not receive any updates.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
This library supports TOML version
|
This library supports TOML version
|
||||||
[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md)
|
[v1.0.0-rc.3](https://toml.io/en/v1.0.0-rc.3)
|
||||||
|
|
||||||
[](http://godoc.org/github.com/pelletier/go-toml)
|
[](https://pkg.go.dev/github.com/pelletier/go-toml)
|
||||||
[](https://github.com/pelletier/go-toml/blob/master/LICENSE)
|
[](https://github.com/pelletier/go-toml/blob/master/LICENSE)
|
||||||
[](https://travis-ci.org/pelletier/go-toml)
|
[](https://dev.azure.com/pelletierthomas/go-toml-ci/_build/latest?definitionId=1&branchName=master)
|
||||||
[](https://ci.appveyor.com/project/pelletier/go-toml/branch/master)
|
|
||||||
[](https://codecov.io/gh/pelletier/go-toml)
|
[](https://codecov.io/gh/pelletier/go-toml)
|
||||||
[](https://goreportcard.com/report/github.com/pelletier/go-toml)
|
[](https://goreportcard.com/report/github.com/pelletier/go-toml)
|
||||||
[](https://app.fossa.io/projects/git%2Bgithub.com%2Fpelletier%2Fgo-toml?ref=badge_shield)
|
[](https://app.fossa.io/projects/git%2Bgithub.com%2Fpelletier%2Fgo-toml?ref=badge_shield)
|
||||||
|
|
||||||
|
|
||||||
|
## Development status
|
||||||
|
|
||||||
|
**ℹ️ Consider go-toml v2!**
|
||||||
|
|
||||||
|
The next version of go-toml is in [active development][v2-dev], and
|
||||||
|
[nearing completion][v2-map].
|
||||||
|
|
||||||
|
Though technically in beta, v2 is already more tested, [fixes bugs][v1-bugs],
|
||||||
|
and [much faster][v2-bench]. If you only need reading and writing TOML documents
|
||||||
|
(majority of cases), those features are implemented and the API unlikely to
|
||||||
|
change.
|
||||||
|
|
||||||
|
The remaining features will be added shortly. While pull-requests are welcome on
|
||||||
|
v1, no active development is expected on it. When v2.0.0 is released, v1 will be
|
||||||
|
deprecated.
|
||||||
|
|
||||||
|
👉 [go-toml v2][v2]
|
||||||
|
|
||||||
|
[v2]: https://github.com/pelletier/go-toml/tree/v2
|
||||||
|
[v2-map]: https://github.com/pelletier/go-toml/discussions/506
|
||||||
|
[v2-dev]: https://github.com/pelletier/go-toml/tree/v2
|
||||||
|
[v1-bugs]: https://github.com/pelletier/go-toml/issues?q=is%3Aissue+is%3Aopen+label%3Av2-fixed
|
||||||
|
[v2-bench]: https://github.com/pelletier/go-toml/tree/v2#benchmarks
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
Go-toml provides the following features for using data parsed from TOML documents:
|
Go-toml provides the following features for using data parsed from TOML documents:
|
||||||
|
|
||||||
* Load TOML documents from files and string data
|
* Load TOML documents from files and string data
|
||||||
* Easily navigate TOML structure using Tree
|
* Easily navigate TOML structure using Tree
|
||||||
* Mashaling and unmarshaling to and from data structures
|
* Marshaling and unmarshaling to and from data structures
|
||||||
* Line & column position data for all parsed elements
|
* Line & column position data for all parsed elements
|
||||||
* [Query support similar to JSON-Path](query/)
|
* [Query support similar to JSON-Path](query/)
|
||||||
* Syntax errors contain line and column numbers
|
* Syntax errors contain line and column numbers
|
||||||
@@ -75,20 +112,20 @@ Or use a query:
|
|||||||
q, _ := query.Compile("$..[user,password]")
|
q, _ := query.Compile("$..[user,password]")
|
||||||
results := q.Execute(config)
|
results := q.Execute(config)
|
||||||
for ii, item := range results.Values() {
|
for ii, item := range results.Values() {
|
||||||
fmt.Println("Query result %d: %v", ii, item)
|
fmt.Printf("Query result %d: %v\n", ii, item)
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
The documentation and additional examples are available at
|
The documentation and additional examples are available at
|
||||||
[godoc.org](http://godoc.org/github.com/pelletier/go-toml).
|
[pkg.go.dev](https://pkg.go.dev/github.com/pelletier/go-toml).
|
||||||
|
|
||||||
## Tools
|
## Tools
|
||||||
|
|
||||||
Go-toml provides two handy command line tools:
|
Go-toml provides three handy command line tools:
|
||||||
|
|
||||||
* `tomll`: Reads TOML files and lint them.
|
* `tomll`: Reads TOML files and lints them.
|
||||||
|
|
||||||
```
|
```
|
||||||
go install github.com/pelletier/go-toml/cmd/tomll
|
go install github.com/pelletier/go-toml/cmd/tomll
|
||||||
@@ -101,9 +138,16 @@ Go-toml provides two handy command line tools:
|
|||||||
tomljson --help
|
tomljson --help
|
||||||
```
|
```
|
||||||
|
|
||||||
|
* `jsontoml`: Reads a JSON file and outputs a TOML representation.
|
||||||
|
|
||||||
|
```
|
||||||
|
go install github.com/pelletier/go-toml/cmd/jsontoml
|
||||||
|
jsontoml --help
|
||||||
|
```
|
||||||
|
|
||||||
### Docker image
|
### Docker image
|
||||||
|
|
||||||
Those tools are also availble as a Docker image from
|
Those tools are also available as a Docker image from
|
||||||
[dockerhub](https://hub.docker.com/r/pelletier/go-toml). For example, to
|
[dockerhub](https://hub.docker.com/r/pelletier/go-toml). For example, to
|
||||||
use `tomljson`:
|
use `tomljson`:
|
||||||
|
|
||||||
@@ -142,4 +186,4 @@ this document. The last two major versions of Go are supported
|
|||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
The MIT License (MIT). Read [LICENSE](LICENSE).
|
The MIT License (MIT) + Apache 2.0. Read [LICENSE](LICENSE).
|
||||||
|
|||||||
+19
@@ -0,0 +1,19 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
Use this section to tell people about which versions of your project are
|
||||||
|
currently being supported with security updates.
|
||||||
|
|
||||||
|
| Version | Supported |
|
||||||
|
| ---------- | ------------------ |
|
||||||
|
| Latest 2.x | :white_check_mark: |
|
||||||
|
| All 1.x | :x: |
|
||||||
|
| All 0.x | :x: |
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
Email a vulnerability report to `security@pelletier.codes`. Make sure to include
|
||||||
|
as many details as possible to reproduce the vulnerability. This is a
|
||||||
|
side-project: I will try to get back to you as quickly as possible, time
|
||||||
|
permitting in my personal life. Providing a working patch helps very much!
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
version: "{build}"
|
|
||||||
|
|
||||||
# Source Config
|
|
||||||
clone_folder: c:\gopath\src\github.com\pelletier\go-toml
|
|
||||||
|
|
||||||
# Build host
|
|
||||||
environment:
|
|
||||||
GOPATH: c:\gopath
|
|
||||||
DEPTESTBYPASS501: 1
|
|
||||||
GOVERSION: 1.12
|
|
||||||
GO111MODULE: on
|
|
||||||
|
|
||||||
init:
|
|
||||||
- git config --global core.autocrlf input
|
|
||||||
|
|
||||||
# Build
|
|
||||||
install:
|
|
||||||
# Install the specific Go version.
|
|
||||||
- rmdir c:\go /s /q
|
|
||||||
- appveyor DownloadFile https://storage.googleapis.com/golang/go%GOVERSION%.windows-amd64.msi
|
|
||||||
- msiexec /i go%GOVERSION%.windows-amd64.msi /q
|
|
||||||
- choco install bzr
|
|
||||||
- set Path=c:\go\bin;c:\gopath\bin;C:\Program Files (x86)\Bazaar\;C:\Program Files\Mercurial\%Path%
|
|
||||||
- go version
|
|
||||||
- go env
|
|
||||||
|
|
||||||
build: false
|
|
||||||
deploy: false
|
|
||||||
|
|
||||||
test_script:
|
|
||||||
- go test github.com/pelletier/go-toml
|
|
||||||
- go test github.com/pelletier/go-toml/cmd/tomljson
|
|
||||||
- go test github.com/pelletier/go-toml/cmd/tomll
|
|
||||||
- go test github.com/pelletier/go-toml/query
|
|
||||||
@@ -0,0 +1,188 @@
|
|||||||
|
trigger:
|
||||||
|
- master
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- stage: run_checks
|
||||||
|
displayName: "Check"
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- job: fmt
|
||||||
|
displayName: "fmt"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: GoTool@0
|
||||||
|
displayName: "Install Go 1.16"
|
||||||
|
inputs:
|
||||||
|
version: "1.16"
|
||||||
|
- task: Go@0
|
||||||
|
displayName: "go fmt ./..."
|
||||||
|
inputs:
|
||||||
|
command: 'custom'
|
||||||
|
customCommand: 'fmt'
|
||||||
|
arguments: './...'
|
||||||
|
- job: coverage
|
||||||
|
displayName: "coverage"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: GoTool@0
|
||||||
|
displayName: "Install Go 1.16"
|
||||||
|
inputs:
|
||||||
|
version: "1.16"
|
||||||
|
- task: Go@0
|
||||||
|
displayName: "Generate coverage"
|
||||||
|
inputs:
|
||||||
|
command: 'test'
|
||||||
|
arguments: "-race -coverprofile=coverage.txt -covermode=atomic"
|
||||||
|
- task: Bash@3
|
||||||
|
inputs:
|
||||||
|
targetType: 'inline'
|
||||||
|
script: 'bash <(curl -s https://codecov.io/bash) -t ${CODECOV_TOKEN}'
|
||||||
|
env:
|
||||||
|
CODECOV_TOKEN: $(CODECOV_TOKEN)
|
||||||
|
- job: benchmark
|
||||||
|
displayName: "benchmark"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: GoTool@0
|
||||||
|
displayName: "Install Go 1.16"
|
||||||
|
inputs:
|
||||||
|
version: "1.16"
|
||||||
|
- script: echo "##vso[task.setvariable variable=PATH]${PATH}:/home/vsts/go/bin/"
|
||||||
|
- task: Bash@3
|
||||||
|
inputs:
|
||||||
|
filePath: './benchmark.sh'
|
||||||
|
arguments: "master $(Build.Repository.Uri)"
|
||||||
|
|
||||||
|
- job: go_unit_tests
|
||||||
|
displayName: "unit tests"
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
linux 1.16:
|
||||||
|
goVersion: '1.16'
|
||||||
|
imageName: 'ubuntu-latest'
|
||||||
|
mac 1.16:
|
||||||
|
goVersion: '1.16'
|
||||||
|
imageName: 'macOS-latest'
|
||||||
|
windows 1.16:
|
||||||
|
goVersion: '1.16'
|
||||||
|
imageName: 'windows-latest'
|
||||||
|
linux 1.15:
|
||||||
|
goVersion: '1.15'
|
||||||
|
imageName: 'ubuntu-latest'
|
||||||
|
mac 1.15:
|
||||||
|
goVersion: '1.15'
|
||||||
|
imageName: 'macOS-latest'
|
||||||
|
windows 1.15:
|
||||||
|
goVersion: '1.15'
|
||||||
|
imageName: 'windows-latest'
|
||||||
|
pool:
|
||||||
|
vmImage: $(imageName)
|
||||||
|
steps:
|
||||||
|
- task: GoTool@0
|
||||||
|
displayName: "Install Go $(goVersion)"
|
||||||
|
inputs:
|
||||||
|
version: $(goVersion)
|
||||||
|
- task: Go@0
|
||||||
|
displayName: "go test ./..."
|
||||||
|
inputs:
|
||||||
|
command: 'test'
|
||||||
|
arguments: './...'
|
||||||
|
- stage: build_binaries
|
||||||
|
displayName: "Build binaries"
|
||||||
|
dependsOn: run_checks
|
||||||
|
jobs:
|
||||||
|
- job: build_binary
|
||||||
|
displayName: "Build binary"
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
linux_amd64:
|
||||||
|
GOOS: linux
|
||||||
|
GOARCH: amd64
|
||||||
|
darwin_amd64:
|
||||||
|
GOOS: darwin
|
||||||
|
GOARCH: amd64
|
||||||
|
windows_amd64:
|
||||||
|
GOOS: windows
|
||||||
|
GOARCH: amd64
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: GoTool@0
|
||||||
|
displayName: "Install Go"
|
||||||
|
inputs:
|
||||||
|
version: 1.16
|
||||||
|
- task: Bash@3
|
||||||
|
inputs:
|
||||||
|
targetType: inline
|
||||||
|
script: "make dist"
|
||||||
|
env:
|
||||||
|
go.goos: $(GOOS)
|
||||||
|
go.goarch: $(GOARCH)
|
||||||
|
- task: CopyFiles@2
|
||||||
|
inputs:
|
||||||
|
sourceFolder: '$(Build.SourcesDirectory)'
|
||||||
|
contents: '*.tar.xz'
|
||||||
|
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
inputs:
|
||||||
|
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||||
|
artifactName: binaries
|
||||||
|
- stage: build_binaries_manifest
|
||||||
|
displayName: "Build binaries manifest"
|
||||||
|
dependsOn: build_binaries
|
||||||
|
jobs:
|
||||||
|
- job: build_manifest
|
||||||
|
displayName: "Build binaries manifest"
|
||||||
|
steps:
|
||||||
|
- task: DownloadBuildArtifacts@0
|
||||||
|
inputs:
|
||||||
|
buildType: 'current'
|
||||||
|
downloadType: 'single'
|
||||||
|
artifactName: 'binaries'
|
||||||
|
downloadPath: '$(Build.SourcesDirectory)'
|
||||||
|
- task: Bash@3
|
||||||
|
inputs:
|
||||||
|
targetType: inline
|
||||||
|
script: "cd binaries && sha256sum --binary *.tar.xz | tee $(Build.ArtifactStagingDirectory)/sha256sums.txt"
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
inputs:
|
||||||
|
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||||
|
artifactName: manifest
|
||||||
|
|
||||||
|
- stage: build_docker_image
|
||||||
|
displayName: "Build Docker image"
|
||||||
|
dependsOn: run_checks
|
||||||
|
jobs:
|
||||||
|
- job: build
|
||||||
|
displayName: "Build"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: Docker@2
|
||||||
|
inputs:
|
||||||
|
command: 'build'
|
||||||
|
Dockerfile: 'Dockerfile'
|
||||||
|
buildContext: '.'
|
||||||
|
addPipelineData: false
|
||||||
|
|
||||||
|
- stage: publish_docker_image
|
||||||
|
displayName: "Publish Docker image"
|
||||||
|
dependsOn: build_docker_image
|
||||||
|
condition: and(succeeded(), eq(variables['Build.SourceBranchName'], 'master'))
|
||||||
|
jobs:
|
||||||
|
- job: publish
|
||||||
|
displayName: "Publish"
|
||||||
|
pool:
|
||||||
|
vmImage: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- task: Docker@2
|
||||||
|
inputs:
|
||||||
|
containerRegistry: 'DockerHub'
|
||||||
|
repository: 'pelletier/go-toml'
|
||||||
|
command: 'buildAndPush'
|
||||||
|
Dockerfile: 'Dockerfile'
|
||||||
|
buildContext: '.'
|
||||||
|
tags: 'latest'
|
||||||
+5
-2
@@ -1,6 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -e
|
set -ex
|
||||||
|
|
||||||
reference_ref=${1:-master}
|
reference_ref=${1:-master}
|
||||||
reference_git=${2:-.}
|
reference_git=${2:-.}
|
||||||
@@ -8,7 +8,6 @@ reference_git=${2:-.}
|
|||||||
if ! `hash benchstat 2>/dev/null`; then
|
if ! `hash benchstat 2>/dev/null`; then
|
||||||
echo "Installing benchstat"
|
echo "Installing benchstat"
|
||||||
go get golang.org/x/perf/cmd/benchstat
|
go get golang.org/x/perf/cmd/benchstat
|
||||||
go install golang.org/x/perf/cmd/benchstat
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
tempdir=`mktemp -d /tmp/go-toml-benchmark-XXXXXX`
|
tempdir=`mktemp -d /tmp/go-toml-benchmark-XXXXXX`
|
||||||
@@ -21,11 +20,15 @@ git clone ${reference_git} ${ref_tempdir} >/dev/null 2>/dev/null
|
|||||||
pushd ${ref_tempdir} >/dev/null
|
pushd ${ref_tempdir} >/dev/null
|
||||||
git checkout ${reference_ref} >/dev/null 2>/dev/null
|
git checkout ${reference_ref} >/dev/null 2>/dev/null
|
||||||
go test -bench=. -benchmem | tee ${ref_benchmark}
|
go test -bench=. -benchmem | tee ${ref_benchmark}
|
||||||
|
cd benchmark
|
||||||
|
go test -bench=. -benchmem | tee -a ${ref_benchmark}
|
||||||
popd >/dev/null
|
popd >/dev/null
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "=== local"
|
echo "=== local"
|
||||||
go test -bench=. -benchmem | tee ${local_benchmark}
|
go test -bench=. -benchmem | tee ${local_benchmark}
|
||||||
|
cd benchmark
|
||||||
|
go test -bench=. -benchmem | tee -a ${local_benchmark}
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "=== diff"
|
echo "=== diff"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package toml
|
package benchmark
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@@ -8,7 +8,8 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
burntsushi "github.com/BurntSushi/toml"
|
burntsushi "github.com/BurntSushi/toml"
|
||||||
yaml "gopkg.in/yaml.v2"
|
"github.com/pelletier/go-toml"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
type benchmarkDoc struct {
|
type benchmarkDoc struct {
|
||||||
@@ -124,7 +125,7 @@ func BenchmarkParseToml(b *testing.B) {
|
|||||||
}
|
}
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
_, err := LoadReader(bytes.NewReader(fileBytes))
|
_, err := toml.LoadReader(bytes.NewReader(fileBytes))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
@@ -136,10 +137,11 @@ func BenchmarkUnmarshalToml(b *testing.B) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
|
b.ReportAllocs()
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
target := benchmarkDoc{}
|
target := benchmarkDoc{}
|
||||||
err := Unmarshal(bytes, &target)
|
err := toml.Unmarshal(bytes, &target)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
module github.com/pelletier/go-toml/benchmark
|
||||||
|
|
||||||
|
go 1.12
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/BurntSushi/toml v0.3.1
|
||||||
|
github.com/pelletier/go-toml v0.0.0
|
||||||
|
gopkg.in/yaml.v2 v2.3.0
|
||||||
|
)
|
||||||
|
|
||||||
|
replace github.com/pelletier/go-toml => ../
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||||
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
||||||
|
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
@@ -0,0 +1,82 @@
|
|||||||
|
// Jsontoml reads JSON and converts to TOML.
|
||||||
|
//
|
||||||
|
// Usage:
|
||||||
|
// cat file.toml | jsontoml > file.json
|
||||||
|
// jsontoml file1.toml > file.json
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = func() {
|
||||||
|
fmt.Fprintln(os.Stderr, "jsontoml can be used in two ways:")
|
||||||
|
fmt.Fprintln(os.Stderr, "Writing to STDIN and reading from STDOUT:")
|
||||||
|
fmt.Fprintln(os.Stderr, "")
|
||||||
|
fmt.Fprintln(os.Stderr, "")
|
||||||
|
fmt.Fprintln(os.Stderr, "Reading from a file name:")
|
||||||
|
fmt.Fprintln(os.Stderr, " tomljson file.toml")
|
||||||
|
}
|
||||||
|
flag.Parse()
|
||||||
|
os.Exit(processMain(flag.Args(), os.Stdin, os.Stdout, os.Stderr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func processMain(files []string, defaultInput io.Reader, output io.Writer, errorOutput io.Writer) int {
|
||||||
|
// read from stdin and print to stdout
|
||||||
|
inputReader := defaultInput
|
||||||
|
|
||||||
|
if len(files) > 0 {
|
||||||
|
file, err := os.Open(files[0])
|
||||||
|
if err != nil {
|
||||||
|
printError(err, errorOutput)
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
inputReader = file
|
||||||
|
defer file.Close()
|
||||||
|
}
|
||||||
|
s, err := reader(inputReader)
|
||||||
|
if err != nil {
|
||||||
|
printError(err, errorOutput)
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
io.WriteString(output, s)
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func printError(err error, output io.Writer) {
|
||||||
|
io.WriteString(output, err.Error()+"\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func reader(r io.Reader) (string, error) {
|
||||||
|
jsonMap := make(map[string]interface{})
|
||||||
|
jsonBytes, err := ioutil.ReadAll(r)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
err = json.Unmarshal(jsonBytes, &jsonMap)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
tree, err := toml.TreeFromMap(jsonMap)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return mapToTOML(tree)
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapToTOML(t *toml.Tree) (string, error) {
|
||||||
|
tomlBytes, err := t.ToTomlString()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return string(tomlBytes[:]), nil
|
||||||
|
}
|
||||||
@@ -0,0 +1,92 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func expectBufferEquality(t *testing.T, name string, buffer *bytes.Buffer, expected string) {
|
||||||
|
output := buffer.String()
|
||||||
|
if output != expected {
|
||||||
|
t.Errorf("incorrect %s: \n%sexpected %s: \n%s", name, output, name, expected)
|
||||||
|
t.Log([]rune(output))
|
||||||
|
t.Log([]rune(expected))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func expectProcessMainResults(t *testing.T, input string, args []string, exitCode int, expectedOutput string, expectedError string) {
|
||||||
|
inputReader := strings.NewReader(input)
|
||||||
|
|
||||||
|
outputBuffer := new(bytes.Buffer)
|
||||||
|
errorBuffer := new(bytes.Buffer)
|
||||||
|
|
||||||
|
returnCode := processMain(args, inputReader, outputBuffer, errorBuffer)
|
||||||
|
|
||||||
|
expectBufferEquality(t, "output", outputBuffer, expectedOutput)
|
||||||
|
expectBufferEquality(t, "error", errorBuffer, expectedError)
|
||||||
|
|
||||||
|
if returnCode != exitCode {
|
||||||
|
t.Error("incorrect return code:", returnCode, "expected", exitCode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainReadFromStdin(t *testing.T) {
|
||||||
|
expectedOutput := `
|
||||||
|
[mytoml]
|
||||||
|
a = 42.0
|
||||||
|
`
|
||||||
|
input := `{
|
||||||
|
"mytoml": {
|
||||||
|
"a": 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
expectedError := ``
|
||||||
|
expectedExitCode := 0
|
||||||
|
|
||||||
|
expectProcessMainResults(t, input, []string{}, expectedExitCode, expectedOutput, expectedError)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainReadFromFile(t *testing.T) {
|
||||||
|
input := `{
|
||||||
|
"mytoml": {
|
||||||
|
"a": 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
tmpfile, err := ioutil.TempFile("", "example.json")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if _, err := tmpfile.Write([]byte(input)); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
defer os.Remove(tmpfile.Name())
|
||||||
|
|
||||||
|
expectedOutput := `
|
||||||
|
[mytoml]
|
||||||
|
a = 42.0
|
||||||
|
`
|
||||||
|
expectedError := ``
|
||||||
|
expectedExitCode := 0
|
||||||
|
|
||||||
|
expectProcessMainResults(t, ``, []string{tmpfile.Name()}, expectedExitCode, expectedOutput, expectedError)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainReadFromMissingFile(t *testing.T) {
|
||||||
|
var expectedError string
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
expectedError = `open /this/file/does/not/exist: The system cannot find the path specified.
|
||||||
|
`
|
||||||
|
} else {
|
||||||
|
expectedError = `open /this/file/does/not/exist: no such file or directory
|
||||||
|
`
|
||||||
|
}
|
||||||
|
|
||||||
|
expectProcessMainResults(t, ``, []string{"/this/file/does/not/exist"}, -1, ``, expectedError)
|
||||||
|
}
|
||||||
+24
-6
@@ -6,6 +6,7 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
@@ -16,6 +17,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
multiLineArray := flag.Bool("multiLineArray", false, "sets up the linter to encode arrays with more than one element on multiple lines instead of one.")
|
||||||
flag.Usage = func() {
|
flag.Usage = func() {
|
||||||
fmt.Fprintln(os.Stderr, "tomll can be used in two ways:")
|
fmt.Fprintln(os.Stderr, "tomll can be used in two ways:")
|
||||||
fmt.Fprintln(os.Stderr, "Writing to STDIN and reading from STDOUT:")
|
fmt.Fprintln(os.Stderr, "Writing to STDIN and reading from STDOUT:")
|
||||||
@@ -25,11 +27,16 @@ func main() {
|
|||||||
fmt.Fprintln(os.Stderr, " tomll a.toml b.toml c.toml")
|
fmt.Fprintln(os.Stderr, " tomll a.toml b.toml c.toml")
|
||||||
fmt.Fprintln(os.Stderr, "")
|
fmt.Fprintln(os.Stderr, "")
|
||||||
fmt.Fprintln(os.Stderr, "When given a list of files, tomll will modify all files in place without asking.")
|
fmt.Fprintln(os.Stderr, "When given a list of files, tomll will modify all files in place without asking.")
|
||||||
|
fmt.Fprintln(os.Stderr, "When given a list of files, tomll will modify all files in place without asking.")
|
||||||
|
fmt.Fprintln(os.Stderr, "")
|
||||||
|
fmt.Fprintln(os.Stderr, "Flags:")
|
||||||
|
fmt.Fprintln(os.Stderr, "-multiLineArray sets up the linter to encode arrays with more than one element on multiple lines instead of one.")
|
||||||
}
|
}
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
|
|
||||||
// read from stdin and print to stdout
|
// read from stdin and print to stdout
|
||||||
if flag.NArg() == 0 {
|
if flag.NArg() == 0 {
|
||||||
s, err := lintReader(os.Stdin)
|
s, err := lintReader(os.Stdin, *multiLineArray)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
io.WriteString(os.Stderr, err.Error())
|
io.WriteString(os.Stderr, err.Error())
|
||||||
os.Exit(-1)
|
os.Exit(-1)
|
||||||
@@ -38,7 +45,7 @@ func main() {
|
|||||||
} else {
|
} else {
|
||||||
// otherwise modify a list of files
|
// otherwise modify a list of files
|
||||||
for _, filename := range flag.Args() {
|
for _, filename := range flag.Args() {
|
||||||
s, err := lintFile(filename)
|
s, err := lintFile(filename, *multiLineArray)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
io.WriteString(os.Stderr, err.Error())
|
io.WriteString(os.Stderr, err.Error())
|
||||||
os.Exit(-1)
|
os.Exit(-1)
|
||||||
@@ -48,18 +55,29 @@ func main() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func lintFile(filename string) (string, error) {
|
func lintFile(filename string, multiLineArray bool) (string, error) {
|
||||||
tree, err := toml.LoadFile(filename)
|
tree, err := toml.LoadFile(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return tree.String(), nil
|
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
if err := toml.NewEncoder(buf).ArraysWithOneElementPerLine(multiLineArray).Encode(tree); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.String(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func lintReader(r io.Reader) (string, error) {
|
func lintReader(r io.Reader, multiLineArray bool) (string, error) {
|
||||||
tree, err := toml.LoadReader(r)
|
tree, err := toml.LoadReader(r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return tree.String(), nil
|
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
if err := toml.NewEncoder(buf).ArraysWithOneElementPerLine(multiLineArray).Encode(tree); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return buf.String(), nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
// Package toml is a TOML parser and manipulation library.
|
// Package toml is a TOML parser and manipulation library.
|
||||||
//
|
//
|
||||||
// This version supports the specification as described in
|
// This version supports the specification as described in
|
||||||
// https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md
|
// https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.5.0.md
|
||||||
//
|
//
|
||||||
// Marshaling
|
// Marshaling
|
||||||
//
|
//
|
||||||
|
|||||||
+64
@@ -5,6 +5,7 @@ package toml_test
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
|
"os"
|
||||||
|
|
||||||
toml "github.com/pelletier/go-toml"
|
toml "github.com/pelletier/go-toml"
|
||||||
)
|
)
|
||||||
@@ -104,3 +105,66 @@ func ExampleUnmarshal() {
|
|||||||
// Output:
|
// Output:
|
||||||
// user= pelletier
|
// user= pelletier
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ExampleEncoder_anonymous() {
|
||||||
|
type Credentials struct {
|
||||||
|
User string `toml:"user"`
|
||||||
|
Password string `toml:"password"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Protocol struct {
|
||||||
|
Name string `toml:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
Version int `toml:"version"`
|
||||||
|
Credentials
|
||||||
|
Protocol `toml:"Protocol"`
|
||||||
|
}
|
||||||
|
config := Config{
|
||||||
|
Version: 2,
|
||||||
|
Credentials: Credentials{
|
||||||
|
User: "pelletier",
|
||||||
|
Password: "mypassword",
|
||||||
|
},
|
||||||
|
Protocol: Protocol{
|
||||||
|
Name: "tcp",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
fmt.Println("Default:")
|
||||||
|
fmt.Println("---------------")
|
||||||
|
|
||||||
|
def := toml.NewEncoder(os.Stdout)
|
||||||
|
if err := def.Encode(config); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("---------------")
|
||||||
|
fmt.Println("With promotion:")
|
||||||
|
fmt.Println("---------------")
|
||||||
|
|
||||||
|
prom := toml.NewEncoder(os.Stdout).PromoteAnonymous(true)
|
||||||
|
if err := prom.Encode(config); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
// Output:
|
||||||
|
// Default:
|
||||||
|
// ---------------
|
||||||
|
// password = "mypassword"
|
||||||
|
// user = "pelletier"
|
||||||
|
// version = 2
|
||||||
|
//
|
||||||
|
// [Protocol]
|
||||||
|
// name = "tcp"
|
||||||
|
// ---------------
|
||||||
|
// With promotion:
|
||||||
|
// ---------------
|
||||||
|
// version = 2
|
||||||
|
//
|
||||||
|
// [Credentials]
|
||||||
|
// password = "mypassword"
|
||||||
|
// user = "pelletier"
|
||||||
|
//
|
||||||
|
// [Protocol]
|
||||||
|
// name = "tcp"
|
||||||
|
}
|
||||||
|
|||||||
@@ -27,3 +27,4 @@ enabled = true
|
|||||||
|
|
||||||
[clients]
|
[clients]
|
||||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||||
|
score = 4e-08 # to make sure leading zeroes in exponent parts of floats are supported
|
||||||
@@ -27,3 +27,4 @@ enabled = true
|
|||||||
|
|
||||||
[clients]
|
[clients]
|
||||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||||
|
score = 4e-08 # to make sure leading zeroes in exponent parts of floats are supported
|
||||||
@@ -1,9 +1,3 @@
|
|||||||
module github.com/pelletier/go-toml
|
module github.com/pelletier/go-toml
|
||||||
|
|
||||||
go 1.12
|
go 1.12
|
||||||
|
|
||||||
require (
|
|
||||||
github.com/BurntSushi/toml v0.3.1
|
|
||||||
github.com/davecgh/go-spew v1.1.1
|
|
||||||
gopkg.in/yaml.v2 v2.2.2
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
|
||||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|
||||||
+1
-2
@@ -5,7 +5,6 @@ package toml
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"unicode"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Convert the bare key group string to an array.
|
// Convert the bare key group string to an array.
|
||||||
@@ -109,5 +108,5 @@ func parseKey(key string) ([]string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func isValidBareChar(r rune) bool {
|
func isValidBareChar(r rune) bool {
|
||||||
return isAlphanumeric(r) || r == '-' || unicode.IsNumber(r)
|
return isAlphanumeric(r) || r == '-' || isDigit(r)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,13 +9,10 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"regexp"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
var dateRegexp *regexp.Regexp
|
|
||||||
|
|
||||||
// Define state functions
|
// Define state functions
|
||||||
type tomlLexStateFn func() tomlLexStateFn
|
type tomlLexStateFn func() tomlLexStateFn
|
||||||
|
|
||||||
@@ -26,7 +23,7 @@ type tomlLexer struct {
|
|||||||
currentTokenStart int
|
currentTokenStart int
|
||||||
currentTokenStop int
|
currentTokenStop int
|
||||||
tokens []token
|
tokens []token
|
||||||
depth int
|
brackets []rune
|
||||||
line int
|
line int
|
||||||
col int
|
col int
|
||||||
endbufferLine int
|
endbufferLine int
|
||||||
@@ -123,6 +120,8 @@ func (l *tomlLexer) lexVoid() tomlLexStateFn {
|
|||||||
for {
|
for {
|
||||||
next := l.peek()
|
next := l.peek()
|
||||||
switch next {
|
switch next {
|
||||||
|
case '}': // after '{'
|
||||||
|
return l.lexRightCurlyBrace
|
||||||
case '[':
|
case '[':
|
||||||
return l.lexTableKey
|
return l.lexTableKey
|
||||||
case '#':
|
case '#':
|
||||||
@@ -140,10 +139,6 @@ func (l *tomlLexer) lexVoid() tomlLexStateFn {
|
|||||||
l.skip()
|
l.skip()
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.depth > 0 {
|
|
||||||
return l.lexRvalue
|
|
||||||
}
|
|
||||||
|
|
||||||
if isKeyStartChar(next) {
|
if isKeyStartChar(next) {
|
||||||
return l.lexKey
|
return l.lexKey
|
||||||
}
|
}
|
||||||
@@ -167,10 +162,8 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||||||
case '=':
|
case '=':
|
||||||
return l.lexEqual
|
return l.lexEqual
|
||||||
case '[':
|
case '[':
|
||||||
l.depth++
|
|
||||||
return l.lexLeftBracket
|
return l.lexLeftBracket
|
||||||
case ']':
|
case ']':
|
||||||
l.depth--
|
|
||||||
return l.lexRightBracket
|
return l.lexRightBracket
|
||||||
case '{':
|
case '{':
|
||||||
return l.lexLeftCurlyBrace
|
return l.lexLeftCurlyBrace
|
||||||
@@ -188,12 +181,10 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||||||
fallthrough
|
fallthrough
|
||||||
case '\n':
|
case '\n':
|
||||||
l.skip()
|
l.skip()
|
||||||
if l.depth == 0 {
|
if len(l.brackets) > 0 && l.brackets[len(l.brackets)-1] == '[' {
|
||||||
return l.lexVoid
|
|
||||||
}
|
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
case '_':
|
}
|
||||||
return l.errorf("cannot start number with underscore")
|
return l.lexVoid
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.follow("true") {
|
if l.follow("true") {
|
||||||
@@ -222,19 +213,12 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
possibleDate := l.peekString(35)
|
if next == '+' || next == '-' {
|
||||||
dateMatch := dateRegexp.FindString(possibleDate)
|
|
||||||
if dateMatch != "" {
|
|
||||||
l.fastForward(len(dateMatch))
|
|
||||||
return l.lexDate
|
|
||||||
}
|
|
||||||
|
|
||||||
if next == '+' || next == '-' || isDigit(next) {
|
|
||||||
return l.lexNumber
|
return l.lexNumber
|
||||||
}
|
}
|
||||||
|
|
||||||
if isAlphanumeric(next) {
|
if isDigit(next) {
|
||||||
return l.lexKey
|
return l.lexDateTimeOrNumber
|
||||||
}
|
}
|
||||||
|
|
||||||
return l.errorf("no value can start with %c", next)
|
return l.errorf("no value can start with %c", next)
|
||||||
@@ -244,21 +228,288 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) lexDateTimeOrNumber() tomlLexStateFn {
|
||||||
|
// Could be either a date/time, or a digit.
|
||||||
|
// The options for date/times are:
|
||||||
|
// YYYY-... => date or date-time
|
||||||
|
// HH:... => time
|
||||||
|
// Anything else should be a number.
|
||||||
|
|
||||||
|
lookAhead := l.peekString(5)
|
||||||
|
if len(lookAhead) < 3 {
|
||||||
|
return l.lexNumber()
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx, r := range lookAhead {
|
||||||
|
if !isDigit(r) {
|
||||||
|
if idx == 2 && r == ':' {
|
||||||
|
return l.lexDateTimeOrTime()
|
||||||
|
}
|
||||||
|
if idx == 4 && r == '-' {
|
||||||
|
return l.lexDateTimeOrTime()
|
||||||
|
}
|
||||||
|
return l.lexNumber()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return l.lexNumber()
|
||||||
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexLeftCurlyBrace() tomlLexStateFn {
|
func (l *tomlLexer) lexLeftCurlyBrace() tomlLexStateFn {
|
||||||
l.next()
|
l.next()
|
||||||
l.emit(tokenLeftCurlyBrace)
|
l.emit(tokenLeftCurlyBrace)
|
||||||
return l.lexRvalue
|
l.brackets = append(l.brackets, '{')
|
||||||
|
return l.lexVoid
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexRightCurlyBrace() tomlLexStateFn {
|
func (l *tomlLexer) lexRightCurlyBrace() tomlLexStateFn {
|
||||||
l.next()
|
l.next()
|
||||||
l.emit(tokenRightCurlyBrace)
|
l.emit(tokenRightCurlyBrace)
|
||||||
|
if len(l.brackets) == 0 || l.brackets[len(l.brackets)-1] != '{' {
|
||||||
|
return l.errorf("cannot have '}' here")
|
||||||
|
}
|
||||||
|
l.brackets = l.brackets[:len(l.brackets)-1]
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexDate() tomlLexStateFn {
|
func (l *tomlLexer) lexDateTimeOrTime() tomlLexStateFn {
|
||||||
l.emit(tokenDate)
|
// Example matches:
|
||||||
|
// 1979-05-27T07:32:00Z
|
||||||
|
// 1979-05-27T00:32:00-07:00
|
||||||
|
// 1979-05-27T00:32:00.999999-07:00
|
||||||
|
// 1979-05-27 07:32:00Z
|
||||||
|
// 1979-05-27 00:32:00-07:00
|
||||||
|
// 1979-05-27 00:32:00.999999-07:00
|
||||||
|
// 1979-05-27T07:32:00
|
||||||
|
// 1979-05-27T00:32:00.999999
|
||||||
|
// 1979-05-27 07:32:00
|
||||||
|
// 1979-05-27 00:32:00.999999
|
||||||
|
// 1979-05-27
|
||||||
|
// 07:32:00
|
||||||
|
// 00:32:00.999999
|
||||||
|
|
||||||
|
// we already know those two are digits
|
||||||
|
l.next()
|
||||||
|
l.next()
|
||||||
|
|
||||||
|
// Got 2 digits. At that point it could be either a time or a date(-time).
|
||||||
|
|
||||||
|
r := l.next()
|
||||||
|
if r == ':' {
|
||||||
|
return l.lexTime()
|
||||||
|
}
|
||||||
|
|
||||||
|
return l.lexDateTime()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) lexDateTime() tomlLexStateFn {
|
||||||
|
// This state accepts an offset date-time, a local date-time, or a local date.
|
||||||
|
//
|
||||||
|
// v--- cursor
|
||||||
|
// 1979-05-27T07:32:00Z
|
||||||
|
// 1979-05-27T00:32:00-07:00
|
||||||
|
// 1979-05-27T00:32:00.999999-07:00
|
||||||
|
// 1979-05-27 07:32:00Z
|
||||||
|
// 1979-05-27 00:32:00-07:00
|
||||||
|
// 1979-05-27 00:32:00.999999-07:00
|
||||||
|
// 1979-05-27T07:32:00
|
||||||
|
// 1979-05-27T00:32:00.999999
|
||||||
|
// 1979-05-27 07:32:00
|
||||||
|
// 1979-05-27 00:32:00.999999
|
||||||
|
// 1979-05-27
|
||||||
|
|
||||||
|
// date
|
||||||
|
|
||||||
|
// already checked by lexRvalue
|
||||||
|
l.next() // digit
|
||||||
|
l.next() // -
|
||||||
|
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
r := l.next()
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.errorf("invalid month digit in date: %c", r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r := l.next()
|
||||||
|
if r != '-' {
|
||||||
|
return l.errorf("expected - to separate month of a date, not %c", r)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
r := l.next()
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.errorf("invalid day digit in date: %c", r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
l.emit(tokenLocalDate)
|
||||||
|
|
||||||
|
r = l.peek()
|
||||||
|
|
||||||
|
if r == eof {
|
||||||
|
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
|
}
|
||||||
|
|
||||||
|
if r != ' ' && r != 'T' {
|
||||||
|
return l.errorf("incorrect date/time separation character: %c", r)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r == ' ' {
|
||||||
|
lookAhead := l.peekString(3)[1:]
|
||||||
|
if len(lookAhead) < 2 {
|
||||||
|
return l.lexRvalue
|
||||||
|
}
|
||||||
|
for _, r := range lookAhead {
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.lexRvalue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
l.skip() // skip the T or ' '
|
||||||
|
|
||||||
|
// time
|
||||||
|
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
r := l.next()
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.errorf("invalid hour digit in time: %c", r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r = l.next()
|
||||||
|
if r != ':' {
|
||||||
|
return l.errorf("time hour/minute separator should be :, not %c", r)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
r := l.next()
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.errorf("invalid minute digit in time: %c", r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r = l.next()
|
||||||
|
if r != ':' {
|
||||||
|
return l.errorf("time minute/second separator should be :, not %c", r)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
r := l.next()
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.errorf("invalid second digit in time: %c", r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r = l.peek()
|
||||||
|
if r == '.' {
|
||||||
|
l.next()
|
||||||
|
r := l.next()
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.errorf("expected at least one digit in time's fraction, not %c", r)
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
r := l.peek()
|
||||||
|
if !isDigit(r) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
l.next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
l.emit(tokenLocalTime)
|
||||||
|
|
||||||
|
return l.lexTimeOffset
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) lexTimeOffset() tomlLexStateFn {
|
||||||
|
// potential offset
|
||||||
|
|
||||||
|
// Z
|
||||||
|
// -07:00
|
||||||
|
// +07:00
|
||||||
|
// nothing
|
||||||
|
|
||||||
|
r := l.peek()
|
||||||
|
|
||||||
|
if r == 'Z' {
|
||||||
|
l.next()
|
||||||
|
l.emit(tokenTimeOffset)
|
||||||
|
} else if r == '+' || r == '-' {
|
||||||
|
l.next()
|
||||||
|
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
r := l.next()
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.errorf("invalid hour digit in time offset: %c", r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r = l.next()
|
||||||
|
if r != ':' {
|
||||||
|
return l.errorf("time offset hour/minute separator should be :, not %c", r)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
r := l.next()
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.errorf("invalid minute digit in time offset: %c", r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
l.emit(tokenTimeOffset)
|
||||||
|
}
|
||||||
|
|
||||||
|
return l.lexRvalue
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *tomlLexer) lexTime() tomlLexStateFn {
|
||||||
|
// v--- cursor
|
||||||
|
// 07:32:00
|
||||||
|
// 00:32:00.999999
|
||||||
|
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
r := l.next()
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.errorf("invalid minute digit in time: %c", r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r := l.next()
|
||||||
|
if r != ':' {
|
||||||
|
return l.errorf("time minute/second separator should be :, not %c", r)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
r := l.next()
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.errorf("invalid second digit in time: %c", r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r = l.peek()
|
||||||
|
if r == '.' {
|
||||||
|
l.next()
|
||||||
|
r := l.next()
|
||||||
|
if !isDigit(r) {
|
||||||
|
return l.errorf("expected at least one digit in time's fraction, not %c", r)
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
r := l.peek()
|
||||||
|
if !isDigit(r) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
l.next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
l.emit(tokenLocalTime)
|
||||||
|
return l.lexRvalue
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexTrue() tomlLexStateFn {
|
func (l *tomlLexer) lexTrue() tomlLexStateFn {
|
||||||
@@ -294,13 +545,16 @@ func (l *tomlLexer) lexEqual() tomlLexStateFn {
|
|||||||
func (l *tomlLexer) lexComma() tomlLexStateFn {
|
func (l *tomlLexer) lexComma() tomlLexStateFn {
|
||||||
l.next()
|
l.next()
|
||||||
l.emit(tokenComma)
|
l.emit(tokenComma)
|
||||||
|
if len(l.brackets) > 0 && l.brackets[len(l.brackets)-1] == '{' {
|
||||||
|
return l.lexVoid
|
||||||
|
}
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse the key and emits its value without escape sequences.
|
// Parse the key and emits its value without escape sequences.
|
||||||
// bare keys, basic string keys and literal string keys are supported.
|
// bare keys, basic string keys and literal string keys are supported.
|
||||||
func (l *tomlLexer) lexKey() tomlLexStateFn {
|
func (l *tomlLexer) lexKey() tomlLexStateFn {
|
||||||
growingString := ""
|
var sb strings.Builder
|
||||||
|
|
||||||
for r := l.peek(); isKeyChar(r) || r == '\n' || r == '\r'; r = l.peek() {
|
for r := l.peek(); isKeyChar(r) || r == '\n' || r == '\r'; r = l.peek() {
|
||||||
if r == '"' {
|
if r == '"' {
|
||||||
@@ -309,7 +563,9 @@ func (l *tomlLexer) lexKey() tomlLexStateFn {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return l.errorf(err.Error())
|
return l.errorf(err.Error())
|
||||||
}
|
}
|
||||||
growingString += "\"" + str + "\""
|
sb.WriteString("\"")
|
||||||
|
sb.WriteString(str)
|
||||||
|
sb.WriteString("\"")
|
||||||
l.next()
|
l.next()
|
||||||
continue
|
continue
|
||||||
} else if r == '\'' {
|
} else if r == '\'' {
|
||||||
@@ -318,22 +574,45 @@ func (l *tomlLexer) lexKey() tomlLexStateFn {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return l.errorf(err.Error())
|
return l.errorf(err.Error())
|
||||||
}
|
}
|
||||||
growingString += "'" + str + "'"
|
sb.WriteString("'")
|
||||||
|
sb.WriteString(str)
|
||||||
|
sb.WriteString("'")
|
||||||
l.next()
|
l.next()
|
||||||
continue
|
continue
|
||||||
} else if r == '\n' {
|
} else if r == '\n' {
|
||||||
return l.errorf("keys cannot contain new lines")
|
return l.errorf("keys cannot contain new lines")
|
||||||
} else if isSpace(r) {
|
} else if isSpace(r) {
|
||||||
|
var str strings.Builder
|
||||||
|
str.WriteString(" ")
|
||||||
|
|
||||||
|
// skip trailing whitespace
|
||||||
|
l.next()
|
||||||
|
for r = l.peek(); isSpace(r); r = l.peek() {
|
||||||
|
str.WriteRune(r)
|
||||||
|
l.next()
|
||||||
|
}
|
||||||
|
// break loop if not a dot
|
||||||
|
if r != '.' {
|
||||||
break
|
break
|
||||||
|
}
|
||||||
|
str.WriteString(".")
|
||||||
|
// skip trailing whitespace after dot
|
||||||
|
l.next()
|
||||||
|
for r = l.peek(); isSpace(r); r = l.peek() {
|
||||||
|
str.WriteRune(r)
|
||||||
|
l.next()
|
||||||
|
}
|
||||||
|
sb.WriteString(str.String())
|
||||||
|
continue
|
||||||
} else if r == '.' {
|
} else if r == '.' {
|
||||||
// skip
|
// skip
|
||||||
} else if !isValidBareChar(r) {
|
} else if !isValidBareChar(r) {
|
||||||
return l.errorf("keys cannot contain %c character", r)
|
return l.errorf("keys cannot contain %c character", r)
|
||||||
}
|
}
|
||||||
growingString += string(r)
|
sb.WriteRune(r)
|
||||||
l.next()
|
l.next()
|
||||||
}
|
}
|
||||||
l.emitWithValue(tokenKey, growingString)
|
l.emitWithValue(tokenKey, sb.String())
|
||||||
return l.lexVoid
|
return l.lexVoid
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -353,11 +632,12 @@ func (l *tomlLexer) lexComment(previousState tomlLexStateFn) tomlLexStateFn {
|
|||||||
func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
|
func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
|
||||||
l.next()
|
l.next()
|
||||||
l.emit(tokenLeftBracket)
|
l.emit(tokenLeftBracket)
|
||||||
|
l.brackets = append(l.brackets, '[')
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *tomlLexer) lexLiteralStringAsString(terminator string, discardLeadingNewLine bool) (string, error) {
|
func (l *tomlLexer) lexLiteralStringAsString(terminator string, discardLeadingNewLine bool) (string, error) {
|
||||||
growingString := ""
|
var sb strings.Builder
|
||||||
|
|
||||||
if discardLeadingNewLine {
|
if discardLeadingNewLine {
|
||||||
if l.follow("\r\n") {
|
if l.follow("\r\n") {
|
||||||
@@ -371,14 +651,14 @@ func (l *tomlLexer) lexLiteralStringAsString(terminator string, discardLeadingNe
|
|||||||
// find end of string
|
// find end of string
|
||||||
for {
|
for {
|
||||||
if l.follow(terminator) {
|
if l.follow(terminator) {
|
||||||
return growingString, nil
|
return sb.String(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
next := l.peek()
|
next := l.peek()
|
||||||
if next == eof {
|
if next == eof {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
growingString += string(l.next())
|
sb.WriteRune(l.next())
|
||||||
}
|
}
|
||||||
|
|
||||||
return "", errors.New("unclosed string")
|
return "", errors.New("unclosed string")
|
||||||
@@ -412,7 +692,7 @@ func (l *tomlLexer) lexLiteralString() tomlLexStateFn {
|
|||||||
// Terminator is the substring indicating the end of the token.
|
// Terminator is the substring indicating the end of the token.
|
||||||
// The resulting string does not include the terminator.
|
// The resulting string does not include the terminator.
|
||||||
func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine, acceptNewLines bool) (string, error) {
|
func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine, acceptNewLines bool) (string, error) {
|
||||||
growingString := ""
|
var sb strings.Builder
|
||||||
|
|
||||||
if discardLeadingNewLine {
|
if discardLeadingNewLine {
|
||||||
if l.follow("\r\n") {
|
if l.follow("\r\n") {
|
||||||
@@ -425,7 +705,7 @@ func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine,
|
|||||||
|
|
||||||
for {
|
for {
|
||||||
if l.follow(terminator) {
|
if l.follow(terminator) {
|
||||||
return growingString, nil
|
return sb.String(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.follow("\\") {
|
if l.follow("\\") {
|
||||||
@@ -443,72 +723,72 @@ func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine,
|
|||||||
l.next()
|
l.next()
|
||||||
}
|
}
|
||||||
case '"':
|
case '"':
|
||||||
growingString += "\""
|
sb.WriteString("\"")
|
||||||
l.next()
|
l.next()
|
||||||
case 'n':
|
case 'n':
|
||||||
growingString += "\n"
|
sb.WriteString("\n")
|
||||||
l.next()
|
l.next()
|
||||||
case 'b':
|
case 'b':
|
||||||
growingString += "\b"
|
sb.WriteString("\b")
|
||||||
l.next()
|
l.next()
|
||||||
case 'f':
|
case 'f':
|
||||||
growingString += "\f"
|
sb.WriteString("\f")
|
||||||
l.next()
|
l.next()
|
||||||
case '/':
|
case '/':
|
||||||
growingString += "/"
|
sb.WriteString("/")
|
||||||
l.next()
|
l.next()
|
||||||
case 't':
|
case 't':
|
||||||
growingString += "\t"
|
sb.WriteString("\t")
|
||||||
l.next()
|
l.next()
|
||||||
case 'r':
|
case 'r':
|
||||||
growingString += "\r"
|
sb.WriteString("\r")
|
||||||
l.next()
|
l.next()
|
||||||
case '\\':
|
case '\\':
|
||||||
growingString += "\\"
|
sb.WriteString("\\")
|
||||||
l.next()
|
l.next()
|
||||||
case 'u':
|
case 'u':
|
||||||
l.next()
|
l.next()
|
||||||
code := ""
|
var code strings.Builder
|
||||||
for i := 0; i < 4; i++ {
|
for i := 0; i < 4; i++ {
|
||||||
c := l.peek()
|
c := l.peek()
|
||||||
if !isHexDigit(c) {
|
if !isHexDigit(c) {
|
||||||
return "", errors.New("unfinished unicode escape")
|
return "", errors.New("unfinished unicode escape")
|
||||||
}
|
}
|
||||||
l.next()
|
l.next()
|
||||||
code = code + string(c)
|
code.WriteRune(c)
|
||||||
}
|
}
|
||||||
intcode, err := strconv.ParseInt(code, 16, 32)
|
intcode, err := strconv.ParseInt(code.String(), 16, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", errors.New("invalid unicode escape: \\u" + code)
|
return "", errors.New("invalid unicode escape: \\u" + code.String())
|
||||||
}
|
}
|
||||||
growingString += string(rune(intcode))
|
sb.WriteRune(rune(intcode))
|
||||||
case 'U':
|
case 'U':
|
||||||
l.next()
|
l.next()
|
||||||
code := ""
|
var code strings.Builder
|
||||||
for i := 0; i < 8; i++ {
|
for i := 0; i < 8; i++ {
|
||||||
c := l.peek()
|
c := l.peek()
|
||||||
if !isHexDigit(c) {
|
if !isHexDigit(c) {
|
||||||
return "", errors.New("unfinished unicode escape")
|
return "", errors.New("unfinished unicode escape")
|
||||||
}
|
}
|
||||||
l.next()
|
l.next()
|
||||||
code = code + string(c)
|
code.WriteRune(c)
|
||||||
}
|
}
|
||||||
intcode, err := strconv.ParseInt(code, 16, 64)
|
intcode, err := strconv.ParseInt(code.String(), 16, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", errors.New("invalid unicode escape: \\U" + code)
|
return "", errors.New("invalid unicode escape: \\U" + code.String())
|
||||||
}
|
}
|
||||||
growingString += string(rune(intcode))
|
sb.WriteRune(rune(intcode))
|
||||||
default:
|
default:
|
||||||
return "", errors.New("invalid escape sequence: \\" + string(l.peek()))
|
return "", errors.New("invalid escape sequence: \\" + string(l.peek()))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
r := l.peek()
|
r := l.peek()
|
||||||
|
|
||||||
if 0x00 <= r && r <= 0x1F && !(acceptNewLines && (r == '\n' || r == '\r')) {
|
if 0x00 <= r && r <= 0x1F && r != '\t' && !(acceptNewLines && (r == '\n' || r == '\r')) {
|
||||||
return "", fmt.Errorf("unescaped control character %U", r)
|
return "", fmt.Errorf("unescaped control character %U", r)
|
||||||
}
|
}
|
||||||
l.next()
|
l.next()
|
||||||
growingString += string(r)
|
sb.WriteRune(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.peek() == eof {
|
if l.peek() == eof {
|
||||||
@@ -535,7 +815,6 @@ func (l *tomlLexer) lexString() tomlLexStateFn {
|
|||||||
}
|
}
|
||||||
|
|
||||||
str, err := l.lexStringAsString(terminator, discardLeadingNewLine, acceptNewLines)
|
str, err := l.lexStringAsString(terminator, discardLeadingNewLine, acceptNewLines)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return l.errorf(err.Error())
|
return l.errorf(err.Error())
|
||||||
}
|
}
|
||||||
@@ -607,6 +886,10 @@ func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn {
|
|||||||
func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
|
func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
|
||||||
l.next()
|
l.next()
|
||||||
l.emit(tokenRightBracket)
|
l.emit(tokenRightBracket)
|
||||||
|
if len(l.brackets) == 0 || l.brackets[len(l.brackets)-1] != '[' {
|
||||||
|
return l.errorf("cannot have ']' here")
|
||||||
|
}
|
||||||
|
l.brackets = l.brackets[:len(l.brackets)-1]
|
||||||
return l.lexRvalue
|
return l.lexRvalue
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -732,10 +1015,6 @@ func (l *tomlLexer) run() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
|
||||||
dateRegexp = regexp.MustCompile(`^\d{1,4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{1,9})?(Z|[+-]\d{2}:\d{2})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Entry point
|
// Entry point
|
||||||
func lexToml(inputBytes []byte) []token {
|
func lexToml(inputBytes []byte) []token {
|
||||||
runes := bytes.Runes(inputBytes)
|
runes := bytes.Runes(inputBytes)
|
||||||
|
|||||||
+520
-23
@@ -1,17 +1,63 @@
|
|||||||
package toml
|
package toml
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
|
"text/tabwriter"
|
||||||
)
|
)
|
||||||
|
|
||||||
func testFlow(t *testing.T, input string, expectedFlow []token) {
|
func testFlow(t *testing.T, input string, expectedFlow []token) {
|
||||||
tokens := lexToml([]byte(input))
|
tokens := lexToml([]byte(input))
|
||||||
|
|
||||||
if !reflect.DeepEqual(tokens, expectedFlow) {
|
if !reflect.DeepEqual(tokens, expectedFlow) {
|
||||||
t.Fatal("Different flows. Expected\n", expectedFlow, "\nGot:\n", tokens)
|
diffFlowsColumnsFatal(t, expectedFlow, tokens)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func diffFlowsColumnsFatal(t *testing.T, expectedFlow []token, actualFlow []token) {
|
||||||
|
max := len(expectedFlow)
|
||||||
|
if len(actualFlow) > max {
|
||||||
|
max = len(actualFlow)
|
||||||
|
}
|
||||||
|
|
||||||
|
b := &bytes.Buffer{}
|
||||||
|
w := tabwriter.NewWriter(b, 0, 0, 1, ' ', tabwriter.Debug)
|
||||||
|
|
||||||
|
fmt.Fprintln(w, "expected\tT\tP\tactual\tT\tP\tdiff")
|
||||||
|
|
||||||
|
for i := 0; i < max; i++ {
|
||||||
|
expected := ""
|
||||||
|
expectedType := ""
|
||||||
|
expectedPos := ""
|
||||||
|
if i < len(expectedFlow) {
|
||||||
|
expected = fmt.Sprintf("%s", expectedFlow[i])
|
||||||
|
expectedType = fmt.Sprintf("%s", expectedFlow[i].typ)
|
||||||
|
expectedPos = expectedFlow[i].Position.String()
|
||||||
|
}
|
||||||
|
actual := ""
|
||||||
|
actualType := ""
|
||||||
|
actualPos := ""
|
||||||
|
if i < len(actualFlow) {
|
||||||
|
actual = fmt.Sprintf("%s", actualFlow[i])
|
||||||
|
actualType = fmt.Sprintf("%s", actualFlow[i].typ)
|
||||||
|
actualPos = actualFlow[i].Position.String()
|
||||||
|
}
|
||||||
|
different := ""
|
||||||
|
if i >= len(expectedFlow) {
|
||||||
|
different = "+"
|
||||||
|
} else if i >= len(actualFlow) {
|
||||||
|
different = "-"
|
||||||
|
} else if !reflect.DeepEqual(expectedFlow[i], actualFlow[i]) {
|
||||||
|
different = "x"
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%s\t%s\n", expected, expectedType, expectedPos, actual, actualType, actualPos, different)
|
||||||
|
}
|
||||||
|
w.Flush()
|
||||||
|
t.Errorf("Different flows:\n%s", b.String())
|
||||||
|
}
|
||||||
|
|
||||||
func TestValidKeyGroup(t *testing.T) {
|
func TestValidKeyGroup(t *testing.T) {
|
||||||
testFlow(t, "[hello world]", []token{
|
testFlow(t, "[hello world]", []token{
|
||||||
{Position{1, 1}, tokenLeftBracket, "["},
|
{Position{1, 1}, tokenLeftBracket, "["},
|
||||||
@@ -22,11 +68,20 @@ func TestValidKeyGroup(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestNestedQuotedUnicodeKeyGroup(t *testing.T) {
|
func TestNestedQuotedUnicodeKeyGroup(t *testing.T) {
|
||||||
testFlow(t, `[ j . "ʞ" . l ]`, []token{
|
testFlow(t, `[ j . "ʞ" . l . 'ɯ' ]`, []token{
|
||||||
{Position{1, 1}, tokenLeftBracket, "["},
|
{Position{1, 1}, tokenLeftBracket, "["},
|
||||||
{Position{1, 2}, tokenKeyGroup, ` j . "ʞ" . l `},
|
{Position{1, 2}, tokenKeyGroup, ` j . "ʞ" . l . 'ɯ' `},
|
||||||
{Position{1, 15}, tokenRightBracket, "]"},
|
{Position{1, 21}, tokenRightBracket, "]"},
|
||||||
{Position{1, 16}, tokenEOF, ""},
|
{Position{1, 22}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNestedQuotedUnicodeKeyAssign(t *testing.T) {
|
||||||
|
testFlow(t, ` j . "ʞ" . l . 'ɯ' = 3`, []token{
|
||||||
|
{Position{1, 2}, tokenKey, `j . "ʞ" . l . 'ɯ'`},
|
||||||
|
{Position{1, 20}, tokenEqual, "="},
|
||||||
|
{Position{1, 22}, tokenInteger, "3"},
|
||||||
|
{Position{1, 23}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -105,9 +160,9 @@ func TestBasicKeyWithUppercaseMix(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestBasicKeyWithInternationalCharacters(t *testing.T) {
|
func TestBasicKeyWithInternationalCharacters(t *testing.T) {
|
||||||
testFlow(t, "héllÖ", []token{
|
testFlow(t, "'héllÖ'", []token{
|
||||||
{Position{1, 1}, tokenKey, "héllÖ"},
|
{Position{1, 1}, tokenKey, "'héllÖ'"},
|
||||||
{Position{1, 6}, tokenEOF, ""},
|
{Position{1, 8}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -289,37 +344,281 @@ func TestKeyEqualArrayBoolsWithComments(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDateRegexp(t *testing.T) {
|
|
||||||
if dateRegexp.FindString("1979-05-27T07:32:00Z") == "" {
|
|
||||||
t.Error("basic lexing")
|
|
||||||
}
|
|
||||||
if dateRegexp.FindString("1979-05-27T00:32:00-07:00") == "" {
|
|
||||||
t.Error("offset lexing")
|
|
||||||
}
|
|
||||||
if dateRegexp.FindString("1979-05-27T00:32:00.999999-07:00") == "" {
|
|
||||||
t.Error("nano precision lexing")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestKeyEqualDate(t *testing.T) {
|
func TestKeyEqualDate(t *testing.T) {
|
||||||
|
t.Run("local date time", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27T07:32:00", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "07:32:00"},
|
||||||
|
{Position{1, 26}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date time space", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 07:32:00", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "07:32:00"},
|
||||||
|
{Position{1, 26}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date time fraction", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27T00:32:00.999999", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "00:32:00.999999"},
|
||||||
|
{Position{1, 33}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date time fraction space", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 00:32:00.999999", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "00:32:00.999999"},
|
||||||
|
{Position{1, 33}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("offset date-time utc", func(t *testing.T) {
|
||||||
testFlow(t, "foo = 1979-05-27T07:32:00Z", []token{
|
testFlow(t, "foo = 1979-05-27T07:32:00Z", []token{
|
||||||
{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
{Position{1, 7}, tokenDate, "1979-05-27T07:32:00Z"},
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "07:32:00"},
|
||||||
|
{Position{1, 26}, tokenTimeOffset, "Z"},
|
||||||
{Position{1, 27}, tokenEOF, ""},
|
{Position{1, 27}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("offset date-time -07:00", func(t *testing.T) {
|
||||||
testFlow(t, "foo = 1979-05-27T00:32:00-07:00", []token{
|
testFlow(t, "foo = 1979-05-27T00:32:00-07:00", []token{
|
||||||
{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
{Position{1, 7}, tokenDate, "1979-05-27T00:32:00-07:00"},
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "00:32:00"},
|
||||||
|
{Position{1, 26}, tokenTimeOffset, "-07:00"},
|
||||||
{Position{1, 32}, tokenEOF, ""},
|
{Position{1, 32}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("offset date-time fractions -07:00", func(t *testing.T) {
|
||||||
testFlow(t, "foo = 1979-05-27T00:32:00.999999-07:00", []token{
|
testFlow(t, "foo = 1979-05-27T00:32:00.999999-07:00", []token{
|
||||||
{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
{Position{1, 5}, tokenEqual, "="},
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
{Position{1, 7}, tokenDate, "1979-05-27T00:32:00.999999-07:00"},
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "00:32:00.999999"},
|
||||||
|
{Position{1, 33}, tokenTimeOffset, "-07:00"},
|
||||||
{Position{1, 39}, tokenEOF, ""},
|
{Position{1, 39}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("offset date-time space separated utc", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 07:32:00Z", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "07:32:00"},
|
||||||
|
{Position{1, 26}, tokenTimeOffset, "Z"},
|
||||||
|
{Position{1, 27}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("offset date-time space separated offset", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 00:32:00-07:00", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "00:32:00"},
|
||||||
|
{Position{1, 26}, tokenTimeOffset, "-07:00"},
|
||||||
|
{Position{1, 32}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("offset date-time space separated fraction offset", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 00:32:00.999999-07:00", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "00:32:00.999999"},
|
||||||
|
{Position{1, 33}, tokenTimeOffset, "-07:00"},
|
||||||
|
{Position{1, 39}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 17}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local time", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 07:32:00", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalTime, "07:32:00"},
|
||||||
|
{Position{1, 15}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local time fraction", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 00:32:00.999999", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalTime, "00:32:00.999999"},
|
||||||
|
{Position{1, 22}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local time invalid minute digit", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 00:3x:00.999999", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenError, "invalid minute digit in time: x"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local time invalid minute/second digit", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 00:30x00.999999", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenError, "time minute/second separator should be :, not x"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local time invalid second digit", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 00:30:x0.999999", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenError, "invalid second digit in time: x"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local time invalid second digit", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 00:30:00.F", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenError, "expected at least one digit in time's fraction, not F"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date-time invalid minute digit", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 00:3x:00.999999", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenError, "invalid minute digit in time: x"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date-time invalid hour digit", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27T0x:30:00.999999", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenError, "invalid hour digit in time: x"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date-time invalid hour digit", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27T00x30:00.999999", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenError, "time hour/minute separator should be :, not x"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date-time invalid minute/second digit", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 00:30x00.999999", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenError, "time minute/second separator should be :, not x"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date-time invalid second digit", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 00:30:x0.999999", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenError, "invalid second digit in time: x"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date-time invalid fraction", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 00:30:00.F", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenError, "expected at least one digit in time's fraction, not F"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date-time invalid month-date separator", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05X27 00:30:00.F", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenError, "expected - to separate month of a date, not X"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date-time extra whitespace", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 ", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 19}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("local date-time extra whitespace", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 ", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 22}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("offset date-time space separated offset", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 00:32:00-0x:00", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "00:32:00"},
|
||||||
|
{Position{1, 26}, tokenError, "invalid hour digit in time offset: x"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("offset date-time space separated offset", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 00:32:00-07x00", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "00:32:00"},
|
||||||
|
{Position{1, 26}, tokenError, "time offset hour/minute separator should be :, not x"},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("offset date-time space separated offset", func(t *testing.T) {
|
||||||
|
testFlow(t, "foo = 1979-05-27 00:32:00-07:x0", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLocalDate, "1979-05-27"},
|
||||||
|
{Position{1, 18}, tokenLocalTime, "00:32:00"},
|
||||||
|
{Position{1, 26}, tokenError, "invalid minute digit in time offset: x"},
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFloatEndingWithDot(t *testing.T) {
|
func TestFloatEndingWithDot(t *testing.T) {
|
||||||
@@ -633,6 +932,13 @@ func TestMultilineString(t *testing.T) {
|
|||||||
{Position{6, 9}, tokenEOF, ""},
|
{Position{6, 9}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
testFlow(t, `foo = """hello world"""`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 10}, tokenString, "hello\tworld"},
|
||||||
|
{Position{1, 24}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
|
||||||
testFlow(t, "key2 = \"\"\"\nThe quick brown \\\n\n\n fox jumps over \\\n the lazy dog.\"\"\"", []token{
|
testFlow(t, "key2 = \"\"\"\nThe quick brown \\\n\n\n fox jumps over \\\n the lazy dog.\"\"\"", []token{
|
||||||
{Position{1, 1}, tokenKey, "key2"},
|
{Position{1, 1}, tokenKey, "key2"},
|
||||||
{Position{1, 6}, tokenEqual, "="},
|
{Position{1, 6}, tokenEqual, "="},
|
||||||
@@ -670,6 +976,7 @@ func TestUnicodeString(t *testing.T) {
|
|||||||
{Position{1, 22}, tokenEOF, ""},
|
{Position{1, 22}, tokenEOF, ""},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestEscapeInString(t *testing.T) {
|
func TestEscapeInString(t *testing.T) {
|
||||||
testFlow(t, `foo = "\b\f\/"`, []token{
|
testFlow(t, `foo = "\b\f\/"`, []token{
|
||||||
{Position{1, 1}, tokenKey, "foo"},
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
@@ -679,6 +986,15 @@ func TestEscapeInString(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTabInString(t *testing.T) {
|
||||||
|
testFlow(t, `foo = "hello world"`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 8}, tokenString, "hello\tworld"},
|
||||||
|
{Position{1, 20}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestKeyGroupArray(t *testing.T) {
|
func TestKeyGroupArray(t *testing.T) {
|
||||||
testFlow(t, "[[foo]]", []token{
|
testFlow(t, "[[foo]]", []token{
|
||||||
{Position{1, 1}, tokenDoubleLeftBracket, "[["},
|
{Position{1, 1}, tokenDoubleLeftBracket, "[["},
|
||||||
@@ -697,6 +1013,15 @@ func TestQuotedKey(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestQuotedKeyTab(t *testing.T) {
|
||||||
|
testFlow(t, "\"num\tber\" = 123", []token{
|
||||||
|
{Position{1, 1}, tokenKey, "\"num\tber\""},
|
||||||
|
{Position{1, 11}, tokenEqual, "="},
|
||||||
|
{Position{1, 13}, tokenInteger, "123"},
|
||||||
|
{Position{1, 16}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestKeyNewline(t *testing.T) {
|
func TestKeyNewline(t *testing.T) {
|
||||||
testFlow(t, "a\n= 4", []token{
|
testFlow(t, "a\n= 4", []token{
|
||||||
{Position{1, 1}, tokenError, "keys cannot contain new lines"},
|
{Position{1, 1}, tokenError, "keys cannot contain new lines"},
|
||||||
@@ -726,6 +1051,178 @@ func TestLexUnknownRvalue(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableEmpty(t *testing.T) {
|
||||||
|
testFlow(t, `foo = {}`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 8}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 9}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableBareKey(t *testing.T) {
|
||||||
|
testFlow(t, `foo = { bar = "baz" }`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 9}, tokenKey, "bar"},
|
||||||
|
{Position{1, 13}, tokenEqual, "="},
|
||||||
|
{Position{1, 16}, tokenString, "baz"},
|
||||||
|
{Position{1, 21}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 22}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableBareKeyDash(t *testing.T) {
|
||||||
|
testFlow(t, `foo = { -bar = "baz" }`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 9}, tokenKey, "-bar"},
|
||||||
|
{Position{1, 14}, tokenEqual, "="},
|
||||||
|
{Position{1, 17}, tokenString, "baz"},
|
||||||
|
{Position{1, 22}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 23}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableBareKeyInArray(t *testing.T) {
|
||||||
|
testFlow(t, `foo = [{ -bar_ = "baz" }]`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftBracket, "["},
|
||||||
|
{Position{1, 8}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 10}, tokenKey, "-bar_"},
|
||||||
|
{Position{1, 16}, tokenEqual, "="},
|
||||||
|
{Position{1, 19}, tokenString, "baz"},
|
||||||
|
{Position{1, 24}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 25}, tokenRightBracket, "]"},
|
||||||
|
{Position{1, 26}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableError1(t *testing.T) {
|
||||||
|
testFlow(t, `foo = { 123 = 0 ]`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 9}, tokenKey, "123"},
|
||||||
|
{Position{1, 13}, tokenEqual, "="},
|
||||||
|
{Position{1, 15}, tokenInteger, "0"},
|
||||||
|
{Position{1, 17}, tokenRightBracket, "]"},
|
||||||
|
{Position{1, 18}, tokenError, "cannot have ']' here"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableError2(t *testing.T) {
|
||||||
|
testFlow(t, `foo = { 123 = 0 }}`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 9}, tokenKey, "123"},
|
||||||
|
{Position{1, 13}, tokenEqual, "="},
|
||||||
|
{Position{1, 15}, tokenInteger, "0"},
|
||||||
|
{Position{1, 17}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 18}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 19}, tokenError, "cannot have '}' here"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableDottedKey1(t *testing.T) {
|
||||||
|
testFlow(t, `foo = { a = 0, 123.45abc = 0 }`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 9}, tokenKey, "a"},
|
||||||
|
{Position{1, 11}, tokenEqual, "="},
|
||||||
|
{Position{1, 13}, tokenInteger, "0"},
|
||||||
|
{Position{1, 14}, tokenComma, ","},
|
||||||
|
{Position{1, 16}, tokenKey, "123.45abc"},
|
||||||
|
{Position{1, 26}, tokenEqual, "="},
|
||||||
|
{Position{1, 28}, tokenInteger, "0"},
|
||||||
|
{Position{1, 30}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 31}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableDottedKey2(t *testing.T) {
|
||||||
|
testFlow(t, `foo = { a = 0, '123'.'45abc' = 0 }`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 9}, tokenKey, "a"},
|
||||||
|
{Position{1, 11}, tokenEqual, "="},
|
||||||
|
{Position{1, 13}, tokenInteger, "0"},
|
||||||
|
{Position{1, 14}, tokenComma, ","},
|
||||||
|
{Position{1, 16}, tokenKey, "'123'.'45abc'"},
|
||||||
|
{Position{1, 30}, tokenEqual, "="},
|
||||||
|
{Position{1, 32}, tokenInteger, "0"},
|
||||||
|
{Position{1, 34}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 35}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableDottedKey3(t *testing.T) {
|
||||||
|
testFlow(t, `foo = { a = 0, "123"."45ʎǝʞ" = 0 }`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 9}, tokenKey, "a"},
|
||||||
|
{Position{1, 11}, tokenEqual, "="},
|
||||||
|
{Position{1, 13}, tokenInteger, "0"},
|
||||||
|
{Position{1, 14}, tokenComma, ","},
|
||||||
|
{Position{1, 16}, tokenKey, `"123"."45ʎǝʞ"`},
|
||||||
|
{Position{1, 30}, tokenEqual, "="},
|
||||||
|
{Position{1, 32}, tokenInteger, "0"},
|
||||||
|
{Position{1, 34}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 35}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableBareKeyWithComma(t *testing.T) {
|
||||||
|
testFlow(t, `foo = { -bar1 = "baz", -bar_ = "baz" }`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 9}, tokenKey, "-bar1"},
|
||||||
|
{Position{1, 15}, tokenEqual, "="},
|
||||||
|
{Position{1, 18}, tokenString, "baz"},
|
||||||
|
{Position{1, 22}, tokenComma, ","},
|
||||||
|
{Position{1, 24}, tokenKey, "-bar_"},
|
||||||
|
{Position{1, 30}, tokenEqual, "="},
|
||||||
|
{Position{1, 33}, tokenString, "baz"},
|
||||||
|
{Position{1, 38}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 39}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableBareKeyUnderscore(t *testing.T) {
|
||||||
|
testFlow(t, `foo = { _bar = "baz" }`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 9}, tokenKey, "_bar"},
|
||||||
|
{Position{1, 14}, tokenEqual, "="},
|
||||||
|
{Position{1, 17}, tokenString, "baz"},
|
||||||
|
{Position{1, 22}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 23}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLexInlineTableQuotedKey(t *testing.T) {
|
||||||
|
testFlow(t, `foo = { "bar" = "baz" }`, []token{
|
||||||
|
{Position{1, 1}, tokenKey, "foo"},
|
||||||
|
{Position{1, 5}, tokenEqual, "="},
|
||||||
|
{Position{1, 7}, tokenLeftCurlyBrace, "{"},
|
||||||
|
{Position{1, 9}, tokenKey, "\"bar\""},
|
||||||
|
{Position{1, 15}, tokenEqual, "="},
|
||||||
|
{Position{1, 18}, tokenString, "baz"},
|
||||||
|
{Position{1, 23}, tokenRightCurlyBrace, "}"},
|
||||||
|
{Position{1, 24}, tokenEOF, ""},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func BenchmarkLexer(b *testing.B) {
|
func BenchmarkLexer(b *testing.B) {
|
||||||
sample := `title = "Hugo: A Fast and Flexible Website Generator"
|
sample := `title = "Hugo: A Fast and Flexible Website Generator"
|
||||||
baseurl = "http://gohugo.io/"
|
baseurl = "http://gohugo.io/"
|
||||||
|
|||||||
+287
@@ -0,0 +1,287 @@
|
|||||||
|
// Implementation of TOML's local date/time.
|
||||||
|
//
|
||||||
|
// Copied over from Google's civil to avoid pulling all the Google dependencies.
|
||||||
|
// Originals:
|
||||||
|
// https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/civil/civil.go
|
||||||
|
// Changes:
|
||||||
|
// * Renamed files from civil* to localtime*.
|
||||||
|
// * Package changed from civil to toml.
|
||||||
|
// * 'Local' prefix added to all structs.
|
||||||
|
//
|
||||||
|
// Copyright 2016 Google LLC
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
// Package civil implements types for civil time, a time-zone-independent
|
||||||
|
// representation of time that follows the rules of the proleptic
|
||||||
|
// Gregorian calendar with exactly 24-hour days, 60-minute hours, and 60-second
|
||||||
|
// minutes.
|
||||||
|
//
|
||||||
|
// Because they lack location information, these types do not represent unique
|
||||||
|
// moments or intervals of time. Use time.Time for that purpose.
|
||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// A LocalDate represents a date (year, month, day).
|
||||||
|
//
|
||||||
|
// This type does not include location information, and therefore does not
|
||||||
|
// describe a unique 24-hour timespan.
|
||||||
|
type LocalDate struct {
|
||||||
|
Year int // Year (e.g., 2014).
|
||||||
|
Month time.Month // Month of the year (January = 1, ...).
|
||||||
|
Day int // Day of the month, starting at 1.
|
||||||
|
}
|
||||||
|
|
||||||
|
// LocalDateOf returns the LocalDate in which a time occurs in that time's location.
|
||||||
|
func LocalDateOf(t time.Time) LocalDate {
|
||||||
|
var d LocalDate
|
||||||
|
d.Year, d.Month, d.Day = t.Date()
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseLocalDate parses a string in RFC3339 full-date format and returns the date value it represents.
|
||||||
|
func ParseLocalDate(s string) (LocalDate, error) {
|
||||||
|
t, err := time.Parse("2006-01-02", s)
|
||||||
|
if err != nil {
|
||||||
|
return LocalDate{}, err
|
||||||
|
}
|
||||||
|
return LocalDateOf(t), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// String returns the date in RFC3339 full-date format.
|
||||||
|
func (d LocalDate) String() string {
|
||||||
|
return fmt.Sprintf("%04d-%02d-%02d", d.Year, d.Month, d.Day)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsValid reports whether the date is valid.
|
||||||
|
func (d LocalDate) IsValid() bool {
|
||||||
|
return LocalDateOf(d.In(time.UTC)) == d
|
||||||
|
}
|
||||||
|
|
||||||
|
// In returns the time corresponding to time 00:00:00 of the date in the location.
|
||||||
|
//
|
||||||
|
// In is always consistent with time.LocalDate, even when time.LocalDate returns a time
|
||||||
|
// on a different day. For example, if loc is America/Indiana/Vincennes, then both
|
||||||
|
// time.LocalDate(1955, time.May, 1, 0, 0, 0, 0, loc)
|
||||||
|
// and
|
||||||
|
// civil.LocalDate{Year: 1955, Month: time.May, Day: 1}.In(loc)
|
||||||
|
// return 23:00:00 on April 30, 1955.
|
||||||
|
//
|
||||||
|
// In panics if loc is nil.
|
||||||
|
func (d LocalDate) In(loc *time.Location) time.Time {
|
||||||
|
return time.Date(d.Year, d.Month, d.Day, 0, 0, 0, 0, loc)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddDays returns the date that is n days in the future.
|
||||||
|
// n can also be negative to go into the past.
|
||||||
|
func (d LocalDate) AddDays(n int) LocalDate {
|
||||||
|
return LocalDateOf(d.In(time.UTC).AddDate(0, 0, n))
|
||||||
|
}
|
||||||
|
|
||||||
|
// DaysSince returns the signed number of days between the date and s, not including the end day.
|
||||||
|
// This is the inverse operation to AddDays.
|
||||||
|
func (d LocalDate) DaysSince(s LocalDate) (days int) {
|
||||||
|
// We convert to Unix time so we do not have to worry about leap seconds:
|
||||||
|
// Unix time increases by exactly 86400 seconds per day.
|
||||||
|
deltaUnix := d.In(time.UTC).Unix() - s.In(time.UTC).Unix()
|
||||||
|
return int(deltaUnix / 86400)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Before reports whether d1 occurs before d2.
|
||||||
|
func (d1 LocalDate) Before(d2 LocalDate) bool {
|
||||||
|
if d1.Year != d2.Year {
|
||||||
|
return d1.Year < d2.Year
|
||||||
|
}
|
||||||
|
if d1.Month != d2.Month {
|
||||||
|
return d1.Month < d2.Month
|
||||||
|
}
|
||||||
|
return d1.Day < d2.Day
|
||||||
|
}
|
||||||
|
|
||||||
|
// After reports whether d1 occurs after d2.
|
||||||
|
func (d1 LocalDate) After(d2 LocalDate) bool {
|
||||||
|
return d2.Before(d1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalText implements the encoding.TextMarshaler interface.
|
||||||
|
// The output is the result of d.String().
|
||||||
|
func (d LocalDate) MarshalText() ([]byte, error) {
|
||||||
|
return []byte(d.String()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalText implements the encoding.TextUnmarshaler interface.
|
||||||
|
// The date is expected to be a string in a format accepted by ParseLocalDate.
|
||||||
|
func (d *LocalDate) UnmarshalText(data []byte) error {
|
||||||
|
var err error
|
||||||
|
*d, err = ParseLocalDate(string(data))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// A LocalTime represents a time with nanosecond precision.
|
||||||
|
//
|
||||||
|
// This type does not include location information, and therefore does not
|
||||||
|
// describe a unique moment in time.
|
||||||
|
//
|
||||||
|
// This type exists to represent the TIME type in storage-based APIs like BigQuery.
|
||||||
|
// Most operations on Times are unlikely to be meaningful. Prefer the LocalDateTime type.
|
||||||
|
type LocalTime struct {
|
||||||
|
Hour int // The hour of the day in 24-hour format; range [0-23]
|
||||||
|
Minute int // The minute of the hour; range [0-59]
|
||||||
|
Second int // The second of the minute; range [0-59]
|
||||||
|
Nanosecond int // The nanosecond of the second; range [0-999999999]
|
||||||
|
}
|
||||||
|
|
||||||
|
// LocalTimeOf returns the LocalTime representing the time of day in which a time occurs
|
||||||
|
// in that time's location. It ignores the date.
|
||||||
|
func LocalTimeOf(t time.Time) LocalTime {
|
||||||
|
var tm LocalTime
|
||||||
|
tm.Hour, tm.Minute, tm.Second = t.Clock()
|
||||||
|
tm.Nanosecond = t.Nanosecond()
|
||||||
|
return tm
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseLocalTime parses a string and returns the time value it represents.
|
||||||
|
// ParseLocalTime accepts an extended form of the RFC3339 partial-time format. After
|
||||||
|
// the HH:MM:SS part of the string, an optional fractional part may appear,
|
||||||
|
// consisting of a decimal point followed by one to nine decimal digits.
|
||||||
|
// (RFC3339 admits only one digit after the decimal point).
|
||||||
|
func ParseLocalTime(s string) (LocalTime, error) {
|
||||||
|
t, err := time.Parse("15:04:05.999999999", s)
|
||||||
|
if err != nil {
|
||||||
|
return LocalTime{}, err
|
||||||
|
}
|
||||||
|
return LocalTimeOf(t), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// String returns the date in the format described in ParseLocalTime. If Nanoseconds
|
||||||
|
// is zero, no fractional part will be generated. Otherwise, the result will
|
||||||
|
// end with a fractional part consisting of a decimal point and nine digits.
|
||||||
|
func (t LocalTime) String() string {
|
||||||
|
s := fmt.Sprintf("%02d:%02d:%02d", t.Hour, t.Minute, t.Second)
|
||||||
|
if t.Nanosecond == 0 {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
return s + fmt.Sprintf(".%09d", t.Nanosecond)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsValid reports whether the time is valid.
|
||||||
|
func (t LocalTime) IsValid() bool {
|
||||||
|
// Construct a non-zero time.
|
||||||
|
tm := time.Date(2, 2, 2, t.Hour, t.Minute, t.Second, t.Nanosecond, time.UTC)
|
||||||
|
return LocalTimeOf(tm) == t
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalText implements the encoding.TextMarshaler interface.
|
||||||
|
// The output is the result of t.String().
|
||||||
|
func (t LocalTime) MarshalText() ([]byte, error) {
|
||||||
|
return []byte(t.String()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalText implements the encoding.TextUnmarshaler interface.
|
||||||
|
// The time is expected to be a string in a format accepted by ParseLocalTime.
|
||||||
|
func (t *LocalTime) UnmarshalText(data []byte) error {
|
||||||
|
var err error
|
||||||
|
*t, err = ParseLocalTime(string(data))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// A LocalDateTime represents a date and time.
|
||||||
|
//
|
||||||
|
// This type does not include location information, and therefore does not
|
||||||
|
// describe a unique moment in time.
|
||||||
|
type LocalDateTime struct {
|
||||||
|
Date LocalDate
|
||||||
|
Time LocalTime
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: We deliberately do not embed LocalDate into LocalDateTime, to avoid promoting AddDays and Sub.
|
||||||
|
|
||||||
|
// LocalDateTimeOf returns the LocalDateTime in which a time occurs in that time's location.
|
||||||
|
func LocalDateTimeOf(t time.Time) LocalDateTime {
|
||||||
|
return LocalDateTime{
|
||||||
|
Date: LocalDateOf(t),
|
||||||
|
Time: LocalTimeOf(t),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseLocalDateTime parses a string and returns the LocalDateTime it represents.
|
||||||
|
// ParseLocalDateTime accepts a variant of the RFC3339 date-time format that omits
|
||||||
|
// the time offset but includes an optional fractional time, as described in
|
||||||
|
// ParseLocalTime. Informally, the accepted format is
|
||||||
|
// YYYY-MM-DDTHH:MM:SS[.FFFFFFFFF]
|
||||||
|
// where the 'T' may be a lower-case 't'.
|
||||||
|
func ParseLocalDateTime(s string) (LocalDateTime, error) {
|
||||||
|
t, err := time.Parse("2006-01-02T15:04:05.999999999", s)
|
||||||
|
if err != nil {
|
||||||
|
t, err = time.Parse("2006-01-02t15:04:05.999999999", s)
|
||||||
|
if err != nil {
|
||||||
|
return LocalDateTime{}, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return LocalDateTimeOf(t), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// String returns the date in the format described in ParseLocalDate.
|
||||||
|
func (dt LocalDateTime) String() string {
|
||||||
|
return dt.Date.String() + "T" + dt.Time.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsValid reports whether the datetime is valid.
|
||||||
|
func (dt LocalDateTime) IsValid() bool {
|
||||||
|
return dt.Date.IsValid() && dt.Time.IsValid()
|
||||||
|
}
|
||||||
|
|
||||||
|
// In returns the time corresponding to the LocalDateTime in the given location.
|
||||||
|
//
|
||||||
|
// If the time is missing or ambigous at the location, In returns the same
|
||||||
|
// result as time.LocalDate. For example, if loc is America/Indiana/Vincennes, then
|
||||||
|
// both
|
||||||
|
// time.LocalDate(1955, time.May, 1, 0, 30, 0, 0, loc)
|
||||||
|
// and
|
||||||
|
// civil.LocalDateTime{
|
||||||
|
// civil.LocalDate{Year: 1955, Month: time.May, Day: 1}},
|
||||||
|
// civil.LocalTime{Minute: 30}}.In(loc)
|
||||||
|
// return 23:30:00 on April 30, 1955.
|
||||||
|
//
|
||||||
|
// In panics if loc is nil.
|
||||||
|
func (dt LocalDateTime) In(loc *time.Location) time.Time {
|
||||||
|
return time.Date(dt.Date.Year, dt.Date.Month, dt.Date.Day, dt.Time.Hour, dt.Time.Minute, dt.Time.Second, dt.Time.Nanosecond, loc)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Before reports whether dt1 occurs before dt2.
|
||||||
|
func (dt1 LocalDateTime) Before(dt2 LocalDateTime) bool {
|
||||||
|
return dt1.In(time.UTC).Before(dt2.In(time.UTC))
|
||||||
|
}
|
||||||
|
|
||||||
|
// After reports whether dt1 occurs after dt2.
|
||||||
|
func (dt1 LocalDateTime) After(dt2 LocalDateTime) bool {
|
||||||
|
return dt2.Before(dt1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalText implements the encoding.TextMarshaler interface.
|
||||||
|
// The output is the result of dt.String().
|
||||||
|
func (dt LocalDateTime) MarshalText() ([]byte, error) {
|
||||||
|
return []byte(dt.String()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalText implements the encoding.TextUnmarshaler interface.
|
||||||
|
// The datetime is expected to be a string in a format accepted by ParseLocalDateTime
|
||||||
|
func (dt *LocalDateTime) UnmarshalText(data []byte) error {
|
||||||
|
var err error
|
||||||
|
*dt, err = ParseLocalDateTime(string(data))
|
||||||
|
return err
|
||||||
|
}
|
||||||
@@ -0,0 +1,456 @@
|
|||||||
|
// Implementation of TOML's local date/time.
|
||||||
|
//
|
||||||
|
// Copied over from Google's civil to avoid pulling all the Google dependencies.
|
||||||
|
// Originals:
|
||||||
|
// https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/civil/civil_test.go
|
||||||
|
// Changes:
|
||||||
|
// * Renamed files from civil* to localtime*.
|
||||||
|
// * Package changed from civil to toml.
|
||||||
|
// * 'Local' prefix added to all structs.
|
||||||
|
//
|
||||||
|
// Copyright 2016 Google LLC
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package toml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func cmpEqual(x, y interface{}) bool {
|
||||||
|
return reflect.DeepEqual(x, y)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDates(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
date LocalDate
|
||||||
|
loc *time.Location
|
||||||
|
wantStr string
|
||||||
|
wantTime time.Time
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
date: LocalDate{2014, 7, 29},
|
||||||
|
loc: time.Local,
|
||||||
|
wantStr: "2014-07-29",
|
||||||
|
wantTime: time.Date(2014, time.July, 29, 0, 0, 0, 0, time.Local),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
date: LocalDateOf(time.Date(2014, 8, 20, 15, 8, 43, 1, time.Local)),
|
||||||
|
loc: time.UTC,
|
||||||
|
wantStr: "2014-08-20",
|
||||||
|
wantTime: time.Date(2014, 8, 20, 0, 0, 0, 0, time.UTC),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
date: LocalDateOf(time.Date(999, time.January, 26, 0, 0, 0, 0, time.Local)),
|
||||||
|
loc: time.UTC,
|
||||||
|
wantStr: "0999-01-26",
|
||||||
|
wantTime: time.Date(999, 1, 26, 0, 0, 0, 0, time.UTC),
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
if got := test.date.String(); got != test.wantStr {
|
||||||
|
t.Errorf("%#v.String() = %q, want %q", test.date, got, test.wantStr)
|
||||||
|
}
|
||||||
|
if got := test.date.In(test.loc); !got.Equal(test.wantTime) {
|
||||||
|
t.Errorf("%#v.In(%v) = %v, want %v", test.date, test.loc, got, test.wantTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateIsValid(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
date LocalDate
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{LocalDate{2014, 7, 29}, true},
|
||||||
|
{LocalDate{2000, 2, 29}, true},
|
||||||
|
{LocalDate{10000, 12, 31}, true},
|
||||||
|
{LocalDate{1, 1, 1}, true},
|
||||||
|
{LocalDate{0, 1, 1}, true}, // year zero is OK
|
||||||
|
{LocalDate{-1, 1, 1}, true}, // negative year is OK
|
||||||
|
{LocalDate{1, 0, 1}, false},
|
||||||
|
{LocalDate{1, 1, 0}, false},
|
||||||
|
{LocalDate{2016, 1, 32}, false},
|
||||||
|
{LocalDate{2016, 13, 1}, false},
|
||||||
|
{LocalDate{1, -1, 1}, false},
|
||||||
|
{LocalDate{1, 1, -1}, false},
|
||||||
|
} {
|
||||||
|
got := test.date.IsValid()
|
||||||
|
if got != test.want {
|
||||||
|
t.Errorf("%#v: got %t, want %t", test.date, got, test.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseDate(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
str string
|
||||||
|
want LocalDate // if empty, expect an error
|
||||||
|
}{
|
||||||
|
{"2016-01-02", LocalDate{2016, 1, 2}},
|
||||||
|
{"2016-12-31", LocalDate{2016, 12, 31}},
|
||||||
|
{"0003-02-04", LocalDate{3, 2, 4}},
|
||||||
|
{"999-01-26", LocalDate{}},
|
||||||
|
{"", LocalDate{}},
|
||||||
|
{"2016-01-02x", LocalDate{}},
|
||||||
|
} {
|
||||||
|
got, err := ParseLocalDate(test.str)
|
||||||
|
if got != test.want {
|
||||||
|
t.Errorf("ParseLocalDate(%q) = %+v, want %+v", test.str, got, test.want)
|
||||||
|
}
|
||||||
|
if err != nil && test.want != (LocalDate{}) {
|
||||||
|
t.Errorf("Unexpected error %v from ParseLocalDate(%q)", err, test.str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateArithmetic(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
desc string
|
||||||
|
start LocalDate
|
||||||
|
end LocalDate
|
||||||
|
days int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "zero days noop",
|
||||||
|
start: LocalDate{2014, 5, 9},
|
||||||
|
end: LocalDate{2014, 5, 9},
|
||||||
|
days: 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "crossing a year boundary",
|
||||||
|
start: LocalDate{2014, 12, 31},
|
||||||
|
end: LocalDate{2015, 1, 1},
|
||||||
|
days: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "negative number of days",
|
||||||
|
start: LocalDate{2015, 1, 1},
|
||||||
|
end: LocalDate{2014, 12, 31},
|
||||||
|
days: -1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "full leap year",
|
||||||
|
start: LocalDate{2004, 1, 1},
|
||||||
|
end: LocalDate{2005, 1, 1},
|
||||||
|
days: 366,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "full non-leap year",
|
||||||
|
start: LocalDate{2001, 1, 1},
|
||||||
|
end: LocalDate{2002, 1, 1},
|
||||||
|
days: 365,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "crossing a leap second",
|
||||||
|
start: LocalDate{1972, 6, 30},
|
||||||
|
end: LocalDate{1972, 7, 1},
|
||||||
|
days: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "dates before the unix epoch",
|
||||||
|
start: LocalDate{101, 1, 1},
|
||||||
|
end: LocalDate{102, 1, 1},
|
||||||
|
days: 365,
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
if got := test.start.AddDays(test.days); got != test.end {
|
||||||
|
t.Errorf("[%s] %#v.AddDays(%v) = %#v, want %#v", test.desc, test.start, test.days, got, test.end)
|
||||||
|
}
|
||||||
|
if got := test.end.DaysSince(test.start); got != test.days {
|
||||||
|
t.Errorf("[%s] %#v.Sub(%#v) = %v, want %v", test.desc, test.end, test.start, got, test.days)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateBefore(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
d1, d2 LocalDate
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{LocalDate{2016, 12, 31}, LocalDate{2017, 1, 1}, true},
|
||||||
|
{LocalDate{2016, 1, 1}, LocalDate{2016, 1, 1}, false},
|
||||||
|
{LocalDate{2016, 12, 30}, LocalDate{2016, 12, 31}, true},
|
||||||
|
{LocalDate{2016, 1, 30}, LocalDate{2016, 12, 31}, true},
|
||||||
|
} {
|
||||||
|
if got := test.d1.Before(test.d2); got != test.want {
|
||||||
|
t.Errorf("%v.Before(%v): got %t, want %t", test.d1, test.d2, got, test.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateAfter(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
d1, d2 LocalDate
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{LocalDate{2016, 12, 31}, LocalDate{2017, 1, 1}, false},
|
||||||
|
{LocalDate{2016, 1, 1}, LocalDate{2016, 1, 1}, false},
|
||||||
|
{LocalDate{2016, 12, 30}, LocalDate{2016, 12, 31}, false},
|
||||||
|
} {
|
||||||
|
if got := test.d1.After(test.d2); got != test.want {
|
||||||
|
t.Errorf("%v.After(%v): got %t, want %t", test.d1, test.d2, got, test.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTimeToString(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
str string
|
||||||
|
time LocalTime
|
||||||
|
roundTrip bool // ParseLocalTime(str).String() == str?
|
||||||
|
}{
|
||||||
|
{"13:26:33", LocalTime{13, 26, 33, 0}, true},
|
||||||
|
{"01:02:03.000023456", LocalTime{1, 2, 3, 23456}, true},
|
||||||
|
{"00:00:00.000000001", LocalTime{0, 0, 0, 1}, true},
|
||||||
|
{"13:26:03.1", LocalTime{13, 26, 3, 100000000}, false},
|
||||||
|
{"13:26:33.0000003", LocalTime{13, 26, 33, 300}, false},
|
||||||
|
} {
|
||||||
|
gotTime, err := ParseLocalTime(test.str)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("ParseLocalTime(%q): got error: %v", test.str, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if gotTime != test.time {
|
||||||
|
t.Errorf("ParseLocalTime(%q) = %+v, want %+v", test.str, gotTime, test.time)
|
||||||
|
}
|
||||||
|
if test.roundTrip {
|
||||||
|
gotStr := test.time.String()
|
||||||
|
if gotStr != test.str {
|
||||||
|
t.Errorf("%#v.String() = %q, want %q", test.time, gotStr, test.str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTimeOf(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
time time.Time
|
||||||
|
want LocalTime
|
||||||
|
}{
|
||||||
|
{time.Date(2014, 8, 20, 15, 8, 43, 1, time.Local), LocalTime{15, 8, 43, 1}},
|
||||||
|
{time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC), LocalTime{0, 0, 0, 0}},
|
||||||
|
} {
|
||||||
|
if got := LocalTimeOf(test.time); got != test.want {
|
||||||
|
t.Errorf("LocalTimeOf(%v) = %+v, want %+v", test.time, got, test.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTimeIsValid(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
time LocalTime
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{LocalTime{0, 0, 0, 0}, true},
|
||||||
|
{LocalTime{23, 0, 0, 0}, true},
|
||||||
|
{LocalTime{23, 59, 59, 999999999}, true},
|
||||||
|
{LocalTime{24, 59, 59, 999999999}, false},
|
||||||
|
{LocalTime{23, 60, 59, 999999999}, false},
|
||||||
|
{LocalTime{23, 59, 60, 999999999}, false},
|
||||||
|
{LocalTime{23, 59, 59, 1000000000}, false},
|
||||||
|
{LocalTime{-1, 0, 0, 0}, false},
|
||||||
|
{LocalTime{0, -1, 0, 0}, false},
|
||||||
|
{LocalTime{0, 0, -1, 0}, false},
|
||||||
|
{LocalTime{0, 0, 0, -1}, false},
|
||||||
|
} {
|
||||||
|
got := test.time.IsValid()
|
||||||
|
if got != test.want {
|
||||||
|
t.Errorf("%#v: got %t, want %t", test.time, got, test.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateTimeToString(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
str string
|
||||||
|
dateTime LocalDateTime
|
||||||
|
roundTrip bool // ParseLocalDateTime(str).String() == str?
|
||||||
|
}{
|
||||||
|
{"2016-03-22T13:26:33", LocalDateTime{LocalDate{2016, 03, 22}, LocalTime{13, 26, 33, 0}}, true},
|
||||||
|
{"2016-03-22T13:26:33.000000600", LocalDateTime{LocalDate{2016, 03, 22}, LocalTime{13, 26, 33, 600}}, true},
|
||||||
|
{"2016-03-22t13:26:33", LocalDateTime{LocalDate{2016, 03, 22}, LocalTime{13, 26, 33, 0}}, false},
|
||||||
|
} {
|
||||||
|
gotDateTime, err := ParseLocalDateTime(test.str)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("ParseLocalDateTime(%q): got error: %v", test.str, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if gotDateTime != test.dateTime {
|
||||||
|
t.Errorf("ParseLocalDateTime(%q) = %+v, want %+v", test.str, gotDateTime, test.dateTime)
|
||||||
|
}
|
||||||
|
if test.roundTrip {
|
||||||
|
gotStr := test.dateTime.String()
|
||||||
|
if gotStr != test.str {
|
||||||
|
t.Errorf("%#v.String() = %q, want %q", test.dateTime, gotStr, test.str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseDateTimeErrors(t *testing.T) {
|
||||||
|
for _, str := range []string{
|
||||||
|
"",
|
||||||
|
"2016-03-22", // just a date
|
||||||
|
"13:26:33", // just a time
|
||||||
|
"2016-03-22 13:26:33", // wrong separating character
|
||||||
|
"2016-03-22T13:26:33x", // extra at end
|
||||||
|
} {
|
||||||
|
if _, err := ParseLocalDateTime(str); err == nil {
|
||||||
|
t.Errorf("ParseLocalDateTime(%q) succeeded, want error", str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateTimeOf(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
time time.Time
|
||||||
|
want LocalDateTime
|
||||||
|
}{
|
||||||
|
{time.Date(2014, 8, 20, 15, 8, 43, 1, time.Local),
|
||||||
|
LocalDateTime{LocalDate{2014, 8, 20}, LocalTime{15, 8, 43, 1}}},
|
||||||
|
{time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||||
|
LocalDateTime{LocalDate{1, 1, 1}, LocalTime{0, 0, 0, 0}}},
|
||||||
|
} {
|
||||||
|
if got := LocalDateTimeOf(test.time); got != test.want {
|
||||||
|
t.Errorf("LocalDateTimeOf(%v) = %+v, want %+v", test.time, got, test.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateTimeIsValid(t *testing.T) {
|
||||||
|
// No need to be exhaustive here; it's just LocalDate.IsValid && LocalTime.IsValid.
|
||||||
|
for _, test := range []struct {
|
||||||
|
dt LocalDateTime
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{LocalDateTime{LocalDate{2016, 3, 20}, LocalTime{0, 0, 0, 0}}, true},
|
||||||
|
{LocalDateTime{LocalDate{2016, -3, 20}, LocalTime{0, 0, 0, 0}}, false},
|
||||||
|
{LocalDateTime{LocalDate{2016, 3, 20}, LocalTime{24, 0, 0, 0}}, false},
|
||||||
|
} {
|
||||||
|
got := test.dt.IsValid()
|
||||||
|
if got != test.want {
|
||||||
|
t.Errorf("%#v: got %t, want %t", test.dt, got, test.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateTimeIn(t *testing.T) {
|
||||||
|
dt := LocalDateTime{LocalDate{2016, 1, 2}, LocalTime{3, 4, 5, 6}}
|
||||||
|
got := dt.In(time.UTC)
|
||||||
|
want := time.Date(2016, 1, 2, 3, 4, 5, 6, time.UTC)
|
||||||
|
if !got.Equal(want) {
|
||||||
|
t.Errorf("got %v, want %v", got, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateTimeBefore(t *testing.T) {
|
||||||
|
d1 := LocalDate{2016, 12, 31}
|
||||||
|
d2 := LocalDate{2017, 1, 1}
|
||||||
|
t1 := LocalTime{5, 6, 7, 8}
|
||||||
|
t2 := LocalTime{5, 6, 7, 9}
|
||||||
|
for _, test := range []struct {
|
||||||
|
dt1, dt2 LocalDateTime
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{LocalDateTime{d1, t1}, LocalDateTime{d2, t1}, true},
|
||||||
|
{LocalDateTime{d1, t1}, LocalDateTime{d1, t2}, true},
|
||||||
|
{LocalDateTime{d2, t1}, LocalDateTime{d1, t1}, false},
|
||||||
|
{LocalDateTime{d2, t1}, LocalDateTime{d2, t1}, false},
|
||||||
|
} {
|
||||||
|
if got := test.dt1.Before(test.dt2); got != test.want {
|
||||||
|
t.Errorf("%v.Before(%v): got %t, want %t", test.dt1, test.dt2, got, test.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateTimeAfter(t *testing.T) {
|
||||||
|
d1 := LocalDate{2016, 12, 31}
|
||||||
|
d2 := LocalDate{2017, 1, 1}
|
||||||
|
t1 := LocalTime{5, 6, 7, 8}
|
||||||
|
t2 := LocalTime{5, 6, 7, 9}
|
||||||
|
for _, test := range []struct {
|
||||||
|
dt1, dt2 LocalDateTime
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{LocalDateTime{d1, t1}, LocalDateTime{d2, t1}, false},
|
||||||
|
{LocalDateTime{d1, t1}, LocalDateTime{d1, t2}, false},
|
||||||
|
{LocalDateTime{d2, t1}, LocalDateTime{d1, t1}, true},
|
||||||
|
{LocalDateTime{d2, t1}, LocalDateTime{d2, t1}, false},
|
||||||
|
} {
|
||||||
|
if got := test.dt1.After(test.dt2); got != test.want {
|
||||||
|
t.Errorf("%v.After(%v): got %t, want %t", test.dt1, test.dt2, got, test.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarshalJSON(t *testing.T) {
|
||||||
|
for _, test := range []struct {
|
||||||
|
value interface{}
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{LocalDate{1987, 4, 15}, `"1987-04-15"`},
|
||||||
|
{LocalTime{18, 54, 2, 0}, `"18:54:02"`},
|
||||||
|
{LocalDateTime{LocalDate{1987, 4, 15}, LocalTime{18, 54, 2, 0}}, `"1987-04-15T18:54:02"`},
|
||||||
|
} {
|
||||||
|
bgot, err := json.Marshal(test.value)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if got := string(bgot); got != test.want {
|
||||||
|
t.Errorf("%#v: got %s, want %s", test.value, got, test.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUnmarshalJSON(t *testing.T) {
|
||||||
|
var d LocalDate
|
||||||
|
var tm LocalTime
|
||||||
|
var dt LocalDateTime
|
||||||
|
for _, test := range []struct {
|
||||||
|
data string
|
||||||
|
ptr interface{}
|
||||||
|
want interface{}
|
||||||
|
}{
|
||||||
|
{`"1987-04-15"`, &d, &LocalDate{1987, 4, 15}},
|
||||||
|
{`"1987-04-\u0031\u0035"`, &d, &LocalDate{1987, 4, 15}},
|
||||||
|
{`"18:54:02"`, &tm, &LocalTime{18, 54, 2, 0}},
|
||||||
|
{`"1987-04-15T18:54:02"`, &dt, &LocalDateTime{LocalDate{1987, 4, 15}, LocalTime{18, 54, 2, 0}}},
|
||||||
|
} {
|
||||||
|
if err := json.Unmarshal([]byte(test.data), test.ptr); err != nil {
|
||||||
|
t.Fatalf("%s: %v", test.data, err)
|
||||||
|
}
|
||||||
|
if !cmpEqual(test.ptr, test.want) {
|
||||||
|
t.Errorf("%s: got %#v, want %#v", test.data, test.ptr, test.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, bad := range []string{"", `""`, `"bad"`, `"1987-04-15x"`,
|
||||||
|
`19870415`, // a JSON number
|
||||||
|
`11987-04-15x`, // not a JSON string
|
||||||
|
|
||||||
|
} {
|
||||||
|
if json.Unmarshal([]byte(bad), &d) == nil {
|
||||||
|
t.Errorf("%q, LocalDate: got nil, want error", bad)
|
||||||
|
}
|
||||||
|
if json.Unmarshal([]byte(bad), &tm) == nil {
|
||||||
|
t.Errorf("%q, LocalTime: got nil, want error", bad)
|
||||||
|
}
|
||||||
|
if json.Unmarshal([]byte(bad), &dt) == nil {
|
||||||
|
t.Errorf("%q, LocalDateTime: got nil, want error", bad)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
+594
-89
File diff suppressed because it is too large
Load Diff
@@ -1,17 +0,0 @@
|
|||||||
title = "TOML Marshal Testing"
|
|
||||||
|
|
||||||
[basic_map]
|
|
||||||
one = "one"
|
|
||||||
two = "two"
|
|
||||||
|
|
||||||
[long_map]
|
|
||||||
a7 = "1"
|
|
||||||
b3 = "2"
|
|
||||||
c8 = "3"
|
|
||||||
d4 = "4"
|
|
||||||
e6 = "5"
|
|
||||||
f5 = "6"
|
|
||||||
g10 = "7"
|
|
||||||
h1 = "8"
|
|
||||||
i2 = "9"
|
|
||||||
j9 = "10"
|
|
||||||
@@ -27,6 +27,7 @@ title = "TOML Marshal Testing"
|
|||||||
uint = 5001
|
uint = 5001
|
||||||
bool = true
|
bool = true
|
||||||
float = 123.4
|
float = 123.4
|
||||||
|
float64 = 123.456782132399
|
||||||
int = 5000
|
int = 5000
|
||||||
string = "Bite me"
|
string = "Bite me"
|
||||||
date = 1979-05-27T07:32:00Z
|
date = 1979-05-27T07:32:00Z
|
||||||
|
|||||||
+2769
-57
File diff suppressed because it is too large
Load Diff
@@ -4,6 +4,7 @@ title = "TOML Marshal Testing"
|
|||||||
bool = true
|
bool = true
|
||||||
date = 1979-05-27T07:32:00Z
|
date = 1979-05-27T07:32:00Z
|
||||||
float = 123.4
|
float = 123.4
|
||||||
|
float64 = 123.456782132399
|
||||||
int = 5000
|
int = 5000
|
||||||
string = "Bite me"
|
string = "Bite me"
|
||||||
uint = 5001
|
uint = 5001
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"reflect"
|
"reflect"
|
||||||
"regexp"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
@@ -158,6 +157,11 @@ func (p *tomlParser) parseGroup() tomlParserStateFn {
|
|||||||
if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
|
if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
|
||||||
p.raiseError(key, "%s", err)
|
p.raiseError(key, "%s", err)
|
||||||
}
|
}
|
||||||
|
destTree := p.tree.GetPath(keys)
|
||||||
|
if target, ok := destTree.(*Tree); ok && target != nil && target.inline {
|
||||||
|
p.raiseError(key, "could not re-define exist inline table or its sub-table : %s",
|
||||||
|
strings.Join(keys, "."))
|
||||||
|
}
|
||||||
p.assume(tokenRightBracket)
|
p.assume(tokenRightBracket)
|
||||||
p.currentTable = keys
|
p.currentTable = keys
|
||||||
return p.parseStart
|
return p.parseStart
|
||||||
@@ -201,6 +205,11 @@ func (p *tomlParser) parseAssign() tomlParserStateFn {
|
|||||||
strings.Join(tableKey, "."))
|
strings.Join(tableKey, "."))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if targetNode.inline {
|
||||||
|
p.raiseError(key, "could not add key or sub-table to exist inline table or its sub-table : %s",
|
||||||
|
strings.Join(tableKey, "."))
|
||||||
|
}
|
||||||
|
|
||||||
// assign value to the found table
|
// assign value to the found table
|
||||||
keyVal := parsedKey[len(parsedKey)-1]
|
keyVal := parsedKey[len(parsedKey)-1]
|
||||||
localKey := []string{keyVal}
|
localKey := []string{keyVal}
|
||||||
@@ -221,19 +230,38 @@ func (p *tomlParser) parseAssign() tomlParserStateFn {
|
|||||||
return p.parseStart
|
return p.parseStart
|
||||||
}
|
}
|
||||||
|
|
||||||
var numberUnderscoreInvalidRegexp *regexp.Regexp
|
var errInvalidUnderscore = errors.New("invalid use of _ in number")
|
||||||
var hexNumberUnderscoreInvalidRegexp *regexp.Regexp
|
|
||||||
|
|
||||||
func numberContainsInvalidUnderscore(value string) error {
|
func numberContainsInvalidUnderscore(value string) error {
|
||||||
if numberUnderscoreInvalidRegexp.MatchString(value) {
|
// For large numbers, you may use underscores between digits to enhance
|
||||||
return errors.New("invalid use of _ in number")
|
// readability. Each underscore must be surrounded by at least one digit on
|
||||||
|
// each side.
|
||||||
|
|
||||||
|
hasBefore := false
|
||||||
|
for idx, r := range value {
|
||||||
|
if r == '_' {
|
||||||
|
if !hasBefore || idx+1 >= len(value) {
|
||||||
|
// can't end with an underscore
|
||||||
|
return errInvalidUnderscore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
hasBefore = isDigit(r)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var errInvalidUnderscoreHex = errors.New("invalid use of _ in hex number")
|
||||||
|
|
||||||
func hexNumberContainsInvalidUnderscore(value string) error {
|
func hexNumberContainsInvalidUnderscore(value string) error {
|
||||||
if hexNumberUnderscoreInvalidRegexp.MatchString(value) {
|
hasBefore := false
|
||||||
return errors.New("invalid use of _ in hex number")
|
for idx, r := range value {
|
||||||
|
if r == '_' {
|
||||||
|
if !hasBefore || idx+1 >= len(value) {
|
||||||
|
// can't end with an underscore
|
||||||
|
return errInvalidUnderscoreHex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
hasBefore = isHexDigit(r)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -265,42 +293,41 @@ func (p *tomlParser) parseRvalue() interface{} {
|
|||||||
return math.NaN()
|
return math.NaN()
|
||||||
case tokenInteger:
|
case tokenInteger:
|
||||||
cleanedVal := cleanupNumberToken(tok.val)
|
cleanedVal := cleanupNumberToken(tok.val)
|
||||||
var err error
|
base := 10
|
||||||
var val int64
|
s := cleanedVal
|
||||||
|
checkInvalidUnderscore := numberContainsInvalidUnderscore
|
||||||
if len(cleanedVal) >= 3 && cleanedVal[0] == '0' {
|
if len(cleanedVal) >= 3 && cleanedVal[0] == '0' {
|
||||||
switch cleanedVal[1] {
|
switch cleanedVal[1] {
|
||||||
case 'x':
|
case 'x':
|
||||||
err = hexNumberContainsInvalidUnderscore(tok.val)
|
checkInvalidUnderscore = hexNumberContainsInvalidUnderscore
|
||||||
if err != nil {
|
base = 16
|
||||||
p.raiseError(tok, "%s", err)
|
|
||||||
}
|
|
||||||
val, err = strconv.ParseInt(cleanedVal[2:], 16, 64)
|
|
||||||
case 'o':
|
case 'o':
|
||||||
err = numberContainsInvalidUnderscore(tok.val)
|
base = 8
|
||||||
if err != nil {
|
|
||||||
p.raiseError(tok, "%s", err)
|
|
||||||
}
|
|
||||||
val, err = strconv.ParseInt(cleanedVal[2:], 8, 64)
|
|
||||||
case 'b':
|
case 'b':
|
||||||
err = numberContainsInvalidUnderscore(tok.val)
|
base = 2
|
||||||
if err != nil {
|
|
||||||
p.raiseError(tok, "%s", err)
|
|
||||||
}
|
|
||||||
val, err = strconv.ParseInt(cleanedVal[2:], 2, 64)
|
|
||||||
default:
|
default:
|
||||||
panic("invalid base") // the lexer should catch this first
|
panic("invalid base") // the lexer should catch this first
|
||||||
}
|
}
|
||||||
} else {
|
s = cleanedVal[2:]
|
||||||
err = numberContainsInvalidUnderscore(tok.val)
|
}
|
||||||
if err != nil {
|
|
||||||
p.raiseError(tok, "%s", err)
|
err := checkInvalidUnderscore(tok.val)
|
||||||
}
|
|
||||||
val, err = strconv.ParseInt(cleanedVal, 10, 64)
|
|
||||||
}
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.raiseError(tok, "%s", err)
|
p.raiseError(tok, "%s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var val interface{}
|
||||||
|
val, err = strconv.ParseInt(s, base, 64)
|
||||||
|
if err == nil {
|
||||||
return val
|
return val
|
||||||
|
}
|
||||||
|
|
||||||
|
if s[0] != '-' {
|
||||||
|
if val, err = strconv.ParseUint(s, base, 64); err == nil {
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p.raiseError(tok, "%s", err)
|
||||||
case tokenFloat:
|
case tokenFloat:
|
||||||
err := numberContainsInvalidUnderscore(tok.val)
|
err := numberContainsInvalidUnderscore(tok.val)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -312,8 +339,44 @@ func (p *tomlParser) parseRvalue() interface{} {
|
|||||||
p.raiseError(tok, "%s", err)
|
p.raiseError(tok, "%s", err)
|
||||||
}
|
}
|
||||||
return val
|
return val
|
||||||
case tokenDate:
|
case tokenLocalTime:
|
||||||
val, err := time.ParseInLocation(time.RFC3339Nano, tok.val, time.UTC)
|
val, err := ParseLocalTime(tok.val)
|
||||||
|
if err != nil {
|
||||||
|
p.raiseError(tok, "%s", err)
|
||||||
|
}
|
||||||
|
return val
|
||||||
|
case tokenLocalDate:
|
||||||
|
// a local date may be followed by:
|
||||||
|
// * nothing: this is a local date
|
||||||
|
// * a local time: this is a local date-time
|
||||||
|
|
||||||
|
next := p.peek()
|
||||||
|
if next == nil || next.typ != tokenLocalTime {
|
||||||
|
val, err := ParseLocalDate(tok.val)
|
||||||
|
if err != nil {
|
||||||
|
p.raiseError(tok, "%s", err)
|
||||||
|
}
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
|
||||||
|
localDate := tok
|
||||||
|
localTime := p.getToken()
|
||||||
|
|
||||||
|
next = p.peek()
|
||||||
|
if next == nil || next.typ != tokenTimeOffset {
|
||||||
|
v := localDate.val + "T" + localTime.val
|
||||||
|
val, err := ParseLocalDateTime(v)
|
||||||
|
if err != nil {
|
||||||
|
p.raiseError(tok, "%s", err)
|
||||||
|
}
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
|
||||||
|
offset := p.getToken()
|
||||||
|
|
||||||
|
layout := time.RFC3339Nano
|
||||||
|
v := localDate.val + "T" + localTime.val + offset.val
|
||||||
|
val, err := time.ParseInLocation(layout, v, time.UTC)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.raiseError(tok, "%s", err)
|
p.raiseError(tok, "%s", err)
|
||||||
}
|
}
|
||||||
@@ -326,10 +389,10 @@ func (p *tomlParser) parseRvalue() interface{} {
|
|||||||
p.raiseError(tok, "cannot have multiple equals for the same key")
|
p.raiseError(tok, "cannot have multiple equals for the same key")
|
||||||
case tokenError:
|
case tokenError:
|
||||||
p.raiseError(tok, "%s", tok)
|
p.raiseError(tok, "%s", tok)
|
||||||
|
default:
|
||||||
|
panic(fmt.Errorf("unhandled token: %v", tok))
|
||||||
}
|
}
|
||||||
|
|
||||||
p.raiseError(tok, "never reached")
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -356,12 +419,15 @@ Loop:
|
|||||||
}
|
}
|
||||||
key := p.getToken()
|
key := p.getToken()
|
||||||
p.assume(tokenEqual)
|
p.assume(tokenEqual)
|
||||||
value := p.parseRvalue()
|
|
||||||
tree.Set(key.val, value)
|
parsedKey, err := parseKey(key.val)
|
||||||
case tokenComma:
|
if err != nil {
|
||||||
if previous == nil {
|
p.raiseError(key, "invalid key: %s", err)
|
||||||
p.raiseError(follow, "inline table cannot start with a comma")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
value := p.parseRvalue()
|
||||||
|
tree.SetPath(parsedKey, value)
|
||||||
|
case tokenComma:
|
||||||
if tokenIsComma(previous) {
|
if tokenIsComma(previous) {
|
||||||
p.raiseError(follow, "need field between two commas in inline table")
|
p.raiseError(follow, "need field between two commas in inline table")
|
||||||
}
|
}
|
||||||
@@ -374,12 +440,13 @@ Loop:
|
|||||||
if tokenIsComma(previous) {
|
if tokenIsComma(previous) {
|
||||||
p.raiseError(previous, "trailing comma at the end of inline table")
|
p.raiseError(previous, "trailing comma at the end of inline table")
|
||||||
}
|
}
|
||||||
|
tree.inline = true
|
||||||
return tree
|
return tree
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *tomlParser) parseArray() interface{} {
|
func (p *tomlParser) parseArray() interface{} {
|
||||||
var array []interface{}
|
var array []interface{}
|
||||||
arrayType := reflect.TypeOf(nil)
|
arrayType := reflect.TypeOf(newTree())
|
||||||
for {
|
for {
|
||||||
follow := p.peek()
|
follow := p.peek()
|
||||||
if follow == nil || follow.typ == tokenEOF {
|
if follow == nil || follow.typ == tokenEOF {
|
||||||
@@ -390,11 +457,8 @@ func (p *tomlParser) parseArray() interface{} {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
val := p.parseRvalue()
|
val := p.parseRvalue()
|
||||||
if arrayType == nil {
|
|
||||||
arrayType = reflect.TypeOf(val)
|
|
||||||
}
|
|
||||||
if reflect.TypeOf(val) != arrayType {
|
if reflect.TypeOf(val) != arrayType {
|
||||||
p.raiseError(follow, "mixed types in array")
|
arrayType = nil
|
||||||
}
|
}
|
||||||
array = append(array, val)
|
array = append(array, val)
|
||||||
follow = p.peek()
|
follow = p.peek()
|
||||||
@@ -408,6 +472,12 @@ func (p *tomlParser) parseArray() interface{} {
|
|||||||
p.getToken()
|
p.getToken()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// if the array is a mixed-type array or its length is 0,
|
||||||
|
// don't convert it to a table array
|
||||||
|
if len(array) <= 0 {
|
||||||
|
arrayType = nil
|
||||||
|
}
|
||||||
// An array of Trees is actually an array of inline
|
// An array of Trees is actually an array of inline
|
||||||
// tables, which is a shorthand for a table array. If the
|
// tables, which is a shorthand for a table array. If the
|
||||||
// array was not converted from []interface{} to []*Tree,
|
// array was not converted from []interface{} to []*Tree,
|
||||||
@@ -435,8 +505,3 @@ func parseToml(flow []token) *Tree {
|
|||||||
parser.run()
|
parser.run()
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
|
||||||
numberUnderscoreInvalidRegexp = regexp.MustCompile(`([^\d]_|_[^\d])|_$|^_`)
|
|
||||||
hexNumberUnderscoreInvalidRegexp = regexp.MustCompile(`(^0x_)|([^\da-f]_|_[^\da-f])|_$|^_`)
|
|
||||||
}
|
|
||||||
|
|||||||
+257
-25
@@ -6,8 +6,6 @@ import (
|
|||||||
"reflect"
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/davecgh/go-spew/spew"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func assertSubTree(t *testing.T, path []string, tree *Tree, err error, ref map[string]interface{}) {
|
func assertSubTree(t *testing.T, path []string, tree *Tree, err error, ref map[string]interface{}) {
|
||||||
@@ -39,7 +37,7 @@ func assertSubTree(t *testing.T, path []string, tree *Tree, err error, ref map[s
|
|||||||
}
|
}
|
||||||
|
|
||||||
func assertTree(t *testing.T, tree *Tree, err error, ref map[string]interface{}) {
|
func assertTree(t *testing.T, tree *Tree, err error, ref map[string]interface{}) {
|
||||||
t.Log("Asserting tree:\n", spew.Sdump(tree))
|
t.Logf("Asserting tree:\n (%T)(%p)(%+v)", tree, tree, tree)
|
||||||
assertSubTree(t, []string{}, tree, err, ref)
|
assertSubTree(t, []string{}, tree, err, ref)
|
||||||
t.Log("Finished tree assertion.")
|
t.Log("Finished tree assertion.")
|
||||||
}
|
}
|
||||||
@@ -197,7 +195,7 @@ func TestFloatsWithExponents(t *testing.T) {
|
|||||||
tree, err := Load("a = 5e+22\nb = 5E+22\nc = -5e+22\nd = -5e-22\ne = 6.626e-34")
|
tree, err := Load("a = 5e+22\nb = 5E+22\nc = -5e+22\nd = -5e-22\ne = 6.626e-34")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
"a": float64(5e+22),
|
"a": float64(5e+22),
|
||||||
"b": float64(5E+22),
|
"b": float64(5e+22),
|
||||||
"c": float64(-5e+22),
|
"c": float64(-5e+22),
|
||||||
"d": float64(-5e-22),
|
"d": float64(-5e-22),
|
||||||
"e": float64(6.626e-34),
|
"e": float64(6.626e-34),
|
||||||
@@ -225,6 +223,107 @@ func TestDateNano(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestLocalDateTime(t *testing.T) {
|
||||||
|
tree, err := Load("a = 1979-05-27T07:32:00")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a": LocalDateTime{
|
||||||
|
Date: LocalDate{
|
||||||
|
Year: 1979,
|
||||||
|
Month: 5,
|
||||||
|
Day: 27,
|
||||||
|
},
|
||||||
|
Time: LocalTime{
|
||||||
|
Hour: 7,
|
||||||
|
Minute: 32,
|
||||||
|
Second: 0,
|
||||||
|
Nanosecond: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLocalDateTimeNano(t *testing.T) {
|
||||||
|
tree, err := Load("a = 1979-05-27T07:32:00.999999")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a": LocalDateTime{
|
||||||
|
Date: LocalDate{
|
||||||
|
Year: 1979,
|
||||||
|
Month: 5,
|
||||||
|
Day: 27,
|
||||||
|
},
|
||||||
|
Time: LocalTime{
|
||||||
|
Hour: 7,
|
||||||
|
Minute: 32,
|
||||||
|
Second: 0,
|
||||||
|
Nanosecond: 999999000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLocalDate(t *testing.T) {
|
||||||
|
tree, err := Load("a = 1979-05-27")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a": LocalDate{
|
||||||
|
Year: 1979,
|
||||||
|
Month: 5,
|
||||||
|
Day: 27,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLocalDateError(t *testing.T) {
|
||||||
|
_, err := Load("a = 2020-09-31")
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("should error")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLocalTimeError(t *testing.T) {
|
||||||
|
_, err := Load("a = 07:99:00")
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("should error")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLocalDateTimeError(t *testing.T) {
|
||||||
|
_, err := Load("a = 2020-09-31T07:99:00")
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("should error")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDateTimeOffsetError(t *testing.T) {
|
||||||
|
_, err := Load("a = 2020-09-31T07:99:00Z")
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("should error")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLocalTime(t *testing.T) {
|
||||||
|
tree, err := Load("a = 07:32:00")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a": LocalTime{
|
||||||
|
Hour: 7,
|
||||||
|
Minute: 32,
|
||||||
|
Second: 0,
|
||||||
|
Nanosecond: 0,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLocalTimeNano(t *testing.T) {
|
||||||
|
tree, err := Load("a = 00:32:00.999999")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"a": LocalTime{
|
||||||
|
Hour: 0,
|
||||||
|
Minute: 32,
|
||||||
|
Second: 0,
|
||||||
|
Nanosecond: 999999000,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestSimpleString(t *testing.T) {
|
func TestSimpleString(t *testing.T) {
|
||||||
tree, err := Load("a = \"hello world\"")
|
tree, err := Load("a = \"hello world\"")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
@@ -415,18 +514,6 @@ func TestNestedEmptyArrays(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestArrayMixedTypes(t *testing.T) {
|
|
||||||
_, err := Load("a = [42, 16.0]")
|
|
||||||
if err.Error() != "(1, 10): mixed types in array" {
|
|
||||||
t.Error("Bad error message:", err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = Load("a = [42, \"hello\"]")
|
|
||||||
if err.Error() != "(1, 11): mixed types in array" {
|
|
||||||
t.Error("Bad error message:", err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestArrayNestedStrings(t *testing.T) {
|
func TestArrayNestedStrings(t *testing.T) {
|
||||||
tree, err := Load("data = [ [\"gamma\", \"delta\"], [\"Foo\"] ]")
|
tree, err := Load("data = [ [\"gamma\", \"delta\"], [\"Foo\"] ]")
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
@@ -510,6 +597,39 @@ func TestDoubleInlineGroup(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestNestedInlineGroup(t *testing.T) {
|
||||||
|
tree, err := Load("out = {block0 = {x = 99, y = 100}, block1 = {p = \"999\", q = \"1000\"}}")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"out": map[string]interface{}{
|
||||||
|
"block0": map[string]interface{}{
|
||||||
|
"x": int64(99),
|
||||||
|
"y": int64(100),
|
||||||
|
},
|
||||||
|
"block1": map[string]interface{}{
|
||||||
|
"p": "999",
|
||||||
|
"q": "1000",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestArrayInNestedInlineGroup(t *testing.T) {
|
||||||
|
tree, err := Load(`image = {name = "xxx", palette = {id = 100, colors = ["red", "blue", "green"]}}`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"image": map[string]interface{}{
|
||||||
|
"name": "xxx",
|
||||||
|
"palette": map[string]interface{}{
|
||||||
|
"id": int64(100),
|
||||||
|
"colors": []string{
|
||||||
|
"red",
|
||||||
|
"blue",
|
||||||
|
"green",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestExampleInlineGroup(t *testing.T) {
|
func TestExampleInlineGroup(t *testing.T) {
|
||||||
tree, err := Load(`name = { first = "Tom", last = "Preston-Werner" }
|
tree, err := Load(`name = { first = "Tom", last = "Preston-Werner" }
|
||||||
point = { x = 1, y = 2 }`)
|
point = { x = 1, y = 2 }`)
|
||||||
@@ -525,6 +645,33 @@ point = { x = 1, y = 2 }`)
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestInlineGroupBareKeysUnderscore(t *testing.T) {
|
||||||
|
tree, err := Load(`foo = { _bar = "buz" }`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"foo": map[string]interface{}{
|
||||||
|
"_bar": "buz",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInlineGroupBareKeysDash(t *testing.T) {
|
||||||
|
tree, err := Load(`foo = { -bar = "buz" }`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"foo": map[string]interface{}{
|
||||||
|
"-bar": "buz",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInlineGroupKeyQuoted(t *testing.T) {
|
||||||
|
tree, err := Load(`foo = { "bar" = "buz" }`)
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"foo": map[string]interface{}{
|
||||||
|
"bar": "buz",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestExampleInlineGroupInArray(t *testing.T) {
|
func TestExampleInlineGroupInArray(t *testing.T) {
|
||||||
tree, err := Load(`points = [{ x = 1, y = 2 }]`)
|
tree, err := Load(`points = [{ x = 1, y = 2 }]`)
|
||||||
assertTree(t, tree, err, map[string]interface{}{
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
@@ -546,21 +693,56 @@ func TestInlineTableUnterminated(t *testing.T) {
|
|||||||
|
|
||||||
func TestInlineTableCommaExpected(t *testing.T) {
|
func TestInlineTableCommaExpected(t *testing.T) {
|
||||||
_, err := Load("foo = {hello = 53 test = foo}")
|
_, err := Load("foo = {hello = 53 test = foo}")
|
||||||
if err.Error() != "(1, 19): comma expected between fields in inline table" {
|
if err.Error() != "(1, 19): unexpected token type in inline table: no value can start with t" {
|
||||||
t.Error("Bad error message:", err.Error())
|
t.Error("Bad error message:", err.Error())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestInlineTableCommaStart(t *testing.T) {
|
func TestInlineTableCommaStart(t *testing.T) {
|
||||||
_, err := Load("foo = {, hello = 53}")
|
_, err := Load("foo = {, hello = 53}")
|
||||||
if err.Error() != "(1, 8): inline table cannot start with a comma" {
|
if err.Error() != "(1, 8): unexpected token type in inline table: keys cannot contain , character" {
|
||||||
t.Error("Bad error message:", err.Error())
|
t.Error("Bad error message:", err.Error())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestInlineTableDoubleComma(t *testing.T) {
|
func TestInlineTableDoubleComma(t *testing.T) {
|
||||||
_, err := Load("foo = {hello = 53,, foo = 17}")
|
_, err := Load("foo = {hello = 53,, foo = 17}")
|
||||||
if err.Error() != "(1, 19): need field between two commas in inline table" {
|
if err.Error() != "(1, 19): unexpected token type in inline table: keys cannot contain , character" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInlineTableTrailingComma(t *testing.T) {
|
||||||
|
_, err := Load("foo = {hello = 53, foo = 17,}")
|
||||||
|
if err.Error() != "(1, 28): trailing comma at the end of inline table" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAddKeyToInlineTable(t *testing.T) {
|
||||||
|
_, err := Load("type = { name = \"Nail\" }\ntype.edible = false")
|
||||||
|
if err.Error() != "(2, 1): could not add key or sub-table to exist inline table or its sub-table : type" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAddSubTableToInlineTable(t *testing.T) {
|
||||||
|
_, err := Load("a = { b = \"c\" }\na.d.e = \"f\"")
|
||||||
|
if err.Error() != "(2, 1): could not add key or sub-table to exist inline table or its sub-table : a.d" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAddKeyToSubTableOfInlineTable(t *testing.T) {
|
||||||
|
_, err := Load("a = { b = { c = \"d\" } }\na.b.e = \"f\"")
|
||||||
|
if err.Error() != "(2, 1): could not add key or sub-table to exist inline table or its sub-table : a.b" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReDefineInlineTable(t *testing.T) {
|
||||||
|
_, err := Load("a = { b = \"c\" }\n[a]\n d = \"e\"")
|
||||||
|
if err.Error() != "(2, 2): could not re-define exist inline table or its sub-table : a" {
|
||||||
t.Error("Bad error message:", err.Error())
|
t.Error("Bad error message:", err.Error())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -652,6 +834,7 @@ func TestParseFile(t *testing.T) {
|
|||||||
[]string{"gamma", "delta"},
|
[]string{"gamma", "delta"},
|
||||||
[]int64{1, 2},
|
[]int64{1, 2},
|
||||||
},
|
},
|
||||||
|
"score": 4e-08,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -688,6 +871,7 @@ func TestParseFileCRLF(t *testing.T) {
|
|||||||
[]string{"gamma", "delta"},
|
[]string{"gamma", "delta"},
|
||||||
[]int64{1, 2},
|
[]int64{1, 2},
|
||||||
},
|
},
|
||||||
|
"score": 4e-08,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -760,13 +944,11 @@ func TestTomlValueStringRepresentation(t *testing.T) {
|
|||||||
{"hello world", "\"hello world\""},
|
{"hello world", "\"hello world\""},
|
||||||
{"\b\t\n\f\r\"\\", "\"\\b\\t\\n\\f\\r\\\"\\\\\""},
|
{"\b\t\n\f\r\"\\", "\"\\b\\t\\n\\f\\r\\\"\\\\\""},
|
||||||
{"\x05", "\"\\u0005\""},
|
{"\x05", "\"\\u0005\""},
|
||||||
{time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
|
{time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC), "1979-05-27T07:32:00Z"},
|
||||||
"1979-05-27T07:32:00Z"},
|
{[]interface{}{"gamma", "delta"}, "[\"gamma\", \"delta\"]"},
|
||||||
{[]interface{}{"gamma", "delta"},
|
|
||||||
"[\"gamma\",\"delta\"]"},
|
|
||||||
{nil, ""},
|
{nil, ""},
|
||||||
} {
|
} {
|
||||||
result, err := tomlValueStringRepresentation(item.Value, "", false)
|
result, err := tomlValueStringRepresentation(item.Value, "", "", OrderAlphabetical, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("Test %d - unexpected error: %s", idx, err)
|
t.Errorf("Test %d - unexpected error: %s", idx, err)
|
||||||
}
|
}
|
||||||
@@ -893,7 +1075,7 @@ func TestInvalidFloatParsing(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_, err = Load("a=_1_2")
|
_, err = Load("a=_1_2")
|
||||||
if err.Error() != "(1, 3): cannot start number with underscore" {
|
if err.Error() != "(1, 3): no value can start with _" {
|
||||||
t.Error("Bad error message:", err.Error())
|
t.Error("Bad error message:", err.Error())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -909,6 +1091,13 @@ func TestMapKeyIsNum(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestInvalidKeyInlineTable(t *testing.T) {
|
||||||
|
_, err := Load("table={invalid..key = 1}")
|
||||||
|
if err.Error() != "(1, 8): invalid key: expecting key part after dot" {
|
||||||
|
t.Error("Bad error message:", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestDottedKeys(t *testing.T) {
|
func TestDottedKeys(t *testing.T) {
|
||||||
tree, err := Load(`
|
tree, err := Load(`
|
||||||
name = "Orange"
|
name = "Orange"
|
||||||
@@ -937,3 +1126,46 @@ func TestInvalidDottedKeyEmptyGroup(t *testing.T) {
|
|||||||
t.Fatalf("invalid error message: %s", err)
|
t.Fatalf("invalid error message: %s", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAccidentalNewlines(t *testing.T) {
|
||||||
|
expected := "The quick brown fox jumps over the lazy dog."
|
||||||
|
tree, err := Load(`str1 = "The quick brown fox jumps over the lazy dog."
|
||||||
|
|
||||||
|
str2 = """
|
||||||
|
The quick brown \
|
||||||
|
|
||||||
|
|
||||||
|
fox jumps over \
|
||||||
|
the lazy dog."""
|
||||||
|
|
||||||
|
str3 = """\
|
||||||
|
The quick brown \` + " " + `
|
||||||
|
fox jumps over \` + " " + `
|
||||||
|
the lazy dog.\` + " " + `
|
||||||
|
"""`)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
got := tree.Get("str1")
|
||||||
|
if got != expected {
|
||||||
|
t.Errorf("expected '%s', got '%s'", expected, got)
|
||||||
|
}
|
||||||
|
|
||||||
|
got = tree.Get("str2")
|
||||||
|
if got != expected {
|
||||||
|
t.Errorf("expected '%s', got '%s'", expected, got)
|
||||||
|
}
|
||||||
|
|
||||||
|
got = tree.Get("str3")
|
||||||
|
if got != expected {
|
||||||
|
t.Errorf("expected '%s', got '%s'", expected, got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUint(t *testing.T) {
|
||||||
|
tree, err := Load("hello = 18446744073709551615")
|
||||||
|
assertTree(t, tree, err, map[string]interface{}{
|
||||||
|
"hello": uint64(math.MaxUint64),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|||||||
+201
@@ -0,0 +1,201 @@
|
|||||||
|
# Query package
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Package query performs JSONPath-like queries on a TOML document.
|
||||||
|
|
||||||
|
The query path implementation is based loosely on the JSONPath specification:
|
||||||
|
http://goessner.net/articles/JsonPath/.
|
||||||
|
|
||||||
|
The idea behind a query path is to allow quick access to any element, or set
|
||||||
|
of elements within TOML document, with a single expression.
|
||||||
|
|
||||||
|
```go
|
||||||
|
result, err := query.CompileAndExecute("$.foo.bar.baz", tree)
|
||||||
|
```
|
||||||
|
|
||||||
|
This is roughly equivalent to:
|
||||||
|
|
||||||
|
```go
|
||||||
|
next := tree.Get("foo")
|
||||||
|
if next != nil {
|
||||||
|
next = next.Get("bar")
|
||||||
|
if next != nil {
|
||||||
|
next = next.Get("baz")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result := next
|
||||||
|
```
|
||||||
|
|
||||||
|
err is nil if any parsing exception occurs.
|
||||||
|
|
||||||
|
If no node in the tree matches the query, result will simply contain an empty list of
|
||||||
|
items.
|
||||||
|
|
||||||
|
As illustrated above, the query path is much more efficient, especially since
|
||||||
|
the structure of the TOML file can vary. Rather than making assumptions about
|
||||||
|
a document's structure, a query allows the programmer to make structured
|
||||||
|
requests into the document, and get zero or more values as a result.
|
||||||
|
|
||||||
|
## Query syntax
|
||||||
|
|
||||||
|
The syntax of a query begins with a root token, followed by any number
|
||||||
|
sub-expressions:
|
||||||
|
|
||||||
|
```
|
||||||
|
$
|
||||||
|
Root of the TOML tree. This must always come first.
|
||||||
|
.name
|
||||||
|
Selects child of this node, where 'name' is a TOML key
|
||||||
|
name.
|
||||||
|
['name']
|
||||||
|
Selects child of this node, where 'name' is a string
|
||||||
|
containing a TOML key name.
|
||||||
|
[index]
|
||||||
|
Selcts child array element at 'index'.
|
||||||
|
..expr
|
||||||
|
Recursively selects all children, filtered by an a union,
|
||||||
|
index, or slice expression.
|
||||||
|
..*
|
||||||
|
Recursive selection of all nodes at this point in the
|
||||||
|
tree.
|
||||||
|
.*
|
||||||
|
Selects all children of the current node.
|
||||||
|
[expr,expr]
|
||||||
|
Union operator - a logical 'or' grouping of two or more
|
||||||
|
sub-expressions: index, key name, or filter.
|
||||||
|
[start:end:step]
|
||||||
|
Slice operator - selects array elements from start to
|
||||||
|
end-1, at the given step. All three arguments are
|
||||||
|
optional.
|
||||||
|
[?(filter)]
|
||||||
|
Named filter expression - the function 'filter' is
|
||||||
|
used to filter children at this node.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Query Indexes And Slices
|
||||||
|
|
||||||
|
Index expressions perform no bounds checking, and will contribute no
|
||||||
|
values to the result set if the provided index or index range is invalid.
|
||||||
|
Negative indexes represent values from the end of the array, counting backwards.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// select the last index of the array named 'foo'
|
||||||
|
query.CompileAndExecute("$.foo[-1]", tree)
|
||||||
|
```
|
||||||
|
|
||||||
|
Slice expressions are supported, by using ':' to separate a start/end index pair.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// select up to the first five elements in the array
|
||||||
|
query.CompileAndExecute("$.foo[0:5]", tree)
|
||||||
|
```
|
||||||
|
|
||||||
|
Slice expressions also allow negative indexes for the start and stop
|
||||||
|
arguments.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// select all array elements except the last one.
|
||||||
|
query.CompileAndExecute("$.foo[0:-1]", tree)
|
||||||
|
```
|
||||||
|
|
||||||
|
Slice expressions may have an optional stride/step parameter:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// select every other element
|
||||||
|
query.CompileAndExecute("$.foo[0::2]", tree)
|
||||||
|
```
|
||||||
|
|
||||||
|
Slice start and end parameters are also optional:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// these are all equivalent and select all the values in the array
|
||||||
|
query.CompileAndExecute("$.foo[:]", tree)
|
||||||
|
query.CompileAndExecute("$.foo[::]", tree)
|
||||||
|
query.CompileAndExecute("$.foo[::1]", tree)
|
||||||
|
query.CompileAndExecute("$.foo[0:]", tree)
|
||||||
|
query.CompileAndExecute("$.foo[0::]", tree)
|
||||||
|
query.CompileAndExecute("$.foo[0::1]", tree)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Query Filters
|
||||||
|
|
||||||
|
Query filters are used within a Union [,] or single Filter [] expression.
|
||||||
|
A filter only allows nodes that qualify through to the next expression,
|
||||||
|
and/or into the result set.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// returns children of foo that are permitted by the 'bar' filter.
|
||||||
|
query.CompileAndExecute("$.foo[?(bar)]", tree)
|
||||||
|
```
|
||||||
|
|
||||||
|
There are several filters provided with the library:
|
||||||
|
|
||||||
|
```
|
||||||
|
tree
|
||||||
|
Allows nodes of type Tree.
|
||||||
|
int
|
||||||
|
Allows nodes of type int64.
|
||||||
|
float
|
||||||
|
Allows nodes of type float64.
|
||||||
|
string
|
||||||
|
Allows nodes of type string.
|
||||||
|
time
|
||||||
|
Allows nodes of type time.Time.
|
||||||
|
bool
|
||||||
|
Allows nodes of type bool.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Query Results
|
||||||
|
|
||||||
|
An executed query returns a Result object. This contains the nodes
|
||||||
|
in the TOML tree that qualify the query expression. Position information
|
||||||
|
is also available for each value in the set.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// display the results of a query
|
||||||
|
results := query.CompileAndExecute("$.foo.bar.baz", tree)
|
||||||
|
for idx, value := results.Values() {
|
||||||
|
fmt.Println("%v: %v", results.Positions()[idx], value)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Compiled Queries
|
||||||
|
|
||||||
|
Queries may be executed directly on a Tree object, or compiled ahead
|
||||||
|
of time and executed discretely. The former is more convenient, but has the
|
||||||
|
penalty of having to recompile the query expression each time.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// basic query
|
||||||
|
results := query.CompileAndExecute("$.foo.bar.baz", tree)
|
||||||
|
|
||||||
|
// compiled query
|
||||||
|
query, err := toml.Compile("$.foo.bar.baz")
|
||||||
|
results := query.Execute(tree)
|
||||||
|
|
||||||
|
// run the compiled query again on a different tree
|
||||||
|
moreResults := query.Execute(anotherTree)
|
||||||
|
```
|
||||||
|
|
||||||
|
## User Defined Query Filters
|
||||||
|
|
||||||
|
Filter expressions may also be user defined by using the SetFilter()
|
||||||
|
function on the Query object. The function must return true/false, which
|
||||||
|
signifies if the passed node is kept or discarded, respectively.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// create a query that references a user-defined filter
|
||||||
|
query, _ := query.Compile("$[?(bazOnly)]")
|
||||||
|
|
||||||
|
// define the filter, and assign it to the query
|
||||||
|
query.SetFilter("bazOnly", func(node interface{}) bool{
|
||||||
|
if tree, ok := node.(*Tree); ok {
|
||||||
|
return tree.Has("baz")
|
||||||
|
}
|
||||||
|
return false // reject all other node types
|
||||||
|
})
|
||||||
|
|
||||||
|
// run the query
|
||||||
|
query.Execute(tree)
|
||||||
|
```
|
||||||
+5
-7
@@ -80,25 +80,23 @@
|
|||||||
// Slice expressions also allow negative indexes for the start and stop
|
// Slice expressions also allow negative indexes for the start and stop
|
||||||
// arguments.
|
// arguments.
|
||||||
//
|
//
|
||||||
// // select all array elements.
|
// // select all array elements except the last one.
|
||||||
// query.CompileAndExecute("$.foo[0:-1]", tree)
|
// query.CompileAndExecute("$.foo[0:-1]", tree)
|
||||||
//
|
//
|
||||||
// Slice expressions may have an optional stride/step parameter:
|
// Slice expressions may have an optional stride/step parameter:
|
||||||
//
|
//
|
||||||
// // select every other element
|
// // select every other element
|
||||||
// query.CompileAndExecute("$.foo[0:-1:2]", tree)
|
// query.CompileAndExecute("$.foo[0::2]", tree)
|
||||||
//
|
//
|
||||||
// Slice start and end parameters are also optional:
|
// Slice start and end parameters are also optional:
|
||||||
//
|
//
|
||||||
// // these are all equivalent and select all the values in the array
|
// // these are all equivalent and select all the values in the array
|
||||||
// query.CompileAndExecute("$.foo[:]", tree)
|
// query.CompileAndExecute("$.foo[:]", tree)
|
||||||
// query.CompileAndExecute("$.foo[0:]", tree)
|
// query.CompileAndExecute("$.foo[::]", tree)
|
||||||
// query.CompileAndExecute("$.foo[:-1]", tree)
|
|
||||||
// query.CompileAndExecute("$.foo[0:-1:]", tree)
|
|
||||||
// query.CompileAndExecute("$.foo[::1]", tree)
|
// query.CompileAndExecute("$.foo[::1]", tree)
|
||||||
|
// query.CompileAndExecute("$.foo[0:]", tree)
|
||||||
|
// query.CompileAndExecute("$.foo[0::]", tree)
|
||||||
// query.CompileAndExecute("$.foo[0::1]", tree)
|
// query.CompileAndExecute("$.foo[0::1]", tree)
|
||||||
// query.CompileAndExecute("$.foo[:-1:1]", tree)
|
|
||||||
// query.CompileAndExecute("$.foo[0:-1:1]", tree)
|
|
||||||
//
|
//
|
||||||
// Query Filters
|
// Query Filters
|
||||||
//
|
//
|
||||||
|
|||||||
+114
-35
@@ -2,6 +2,8 @@ package query
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml"
|
"github.com/pelletier/go-toml"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -44,16 +46,16 @@ func newMatchKeyFn(name string) *matchKeyFn {
|
|||||||
func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
|
func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
|
||||||
if array, ok := node.([]*toml.Tree); ok {
|
if array, ok := node.([]*toml.Tree); ok {
|
||||||
for _, tree := range array {
|
for _, tree := range array {
|
||||||
item := tree.Get(f.Name)
|
item := tree.GetPath([]string{f.Name})
|
||||||
if item != nil {
|
if item != nil {
|
||||||
ctx.lastPosition = tree.GetPosition(f.Name)
|
ctx.lastPosition = tree.GetPositionPath([]string{f.Name})
|
||||||
f.next.call(item, ctx)
|
f.next.call(item, ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if tree, ok := node.(*toml.Tree); ok {
|
} else if tree, ok := node.(*toml.Tree); ok {
|
||||||
item := tree.Get(f.Name)
|
item := tree.GetPath([]string{f.Name})
|
||||||
if item != nil {
|
if item != nil {
|
||||||
ctx.lastPosition = tree.GetPosition(f.Name)
|
ctx.lastPosition = tree.GetPositionPath([]string{f.Name})
|
||||||
f.next.call(item, ctx)
|
f.next.call(item, ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -70,51 +72,128 @@ func newMatchIndexFn(idx int) *matchIndexFn {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (f *matchIndexFn) call(node interface{}, ctx *queryContext) {
|
func (f *matchIndexFn) call(node interface{}, ctx *queryContext) {
|
||||||
if arr, ok := node.([]interface{}); ok {
|
v := reflect.ValueOf(node)
|
||||||
if f.Idx < len(arr) && f.Idx >= 0 {
|
if v.Kind() == reflect.Slice {
|
||||||
|
if v.Len() == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Manage negative values
|
||||||
|
idx := f.Idx
|
||||||
|
if idx < 0 {
|
||||||
|
idx += v.Len()
|
||||||
|
}
|
||||||
|
if 0 <= idx && idx < v.Len() {
|
||||||
|
callNextIndexSlice(f.next, node, ctx, v.Index(idx).Interface())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func callNextIndexSlice(next pathFn, node interface{}, ctx *queryContext, value interface{}) {
|
||||||
if treesArray, ok := node.([]*toml.Tree); ok {
|
if treesArray, ok := node.([]*toml.Tree); ok {
|
||||||
if len(treesArray) > 0 {
|
|
||||||
ctx.lastPosition = treesArray[0].Position()
|
ctx.lastPosition = treesArray[0].Position()
|
||||||
}
|
}
|
||||||
}
|
next.call(value, ctx)
|
||||||
f.next.call(arr[f.Idx], ctx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// filter by slicing
|
// filter by slicing
|
||||||
type matchSliceFn struct {
|
type matchSliceFn struct {
|
||||||
matchBase
|
matchBase
|
||||||
Start, End, Step int
|
Start, End, Step *int
|
||||||
}
|
}
|
||||||
|
|
||||||
func newMatchSliceFn(start, end, step int) *matchSliceFn {
|
func newMatchSliceFn() *matchSliceFn {
|
||||||
return &matchSliceFn{Start: start, End: end, Step: step}
|
return &matchSliceFn{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *matchSliceFn) setStart(start int) *matchSliceFn {
|
||||||
|
f.Start = &start
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *matchSliceFn) setEnd(end int) *matchSliceFn {
|
||||||
|
f.End = &end
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *matchSliceFn) setStep(step int) *matchSliceFn {
|
||||||
|
f.Step = &step
|
||||||
|
return f
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *matchSliceFn) call(node interface{}, ctx *queryContext) {
|
func (f *matchSliceFn) call(node interface{}, ctx *queryContext) {
|
||||||
if arr, ok := node.([]interface{}); ok {
|
v := reflect.ValueOf(node)
|
||||||
// adjust indexes for negative values, reverse ordering
|
if v.Kind() == reflect.Slice {
|
||||||
realStart, realEnd := f.Start, f.End
|
if v.Len() == 0 {
|
||||||
if realStart < 0 {
|
return
|
||||||
realStart = len(arr) + realStart
|
|
||||||
}
|
}
|
||||||
if realEnd < 0 {
|
|
||||||
realEnd = len(arr) + realEnd
|
var start, end, step int
|
||||||
|
|
||||||
|
// Initialize step
|
||||||
|
if f.Step != nil {
|
||||||
|
step = *f.Step
|
||||||
|
} else {
|
||||||
|
step = 1
|
||||||
}
|
}
|
||||||
if realEnd < realStart {
|
|
||||||
realEnd, realStart = realStart, realEnd // swap
|
// Initialize start
|
||||||
|
if f.Start != nil {
|
||||||
|
start = *f.Start
|
||||||
|
// Manage negative values
|
||||||
|
if start < 0 {
|
||||||
|
start += v.Len()
|
||||||
}
|
}
|
||||||
// loop and gather
|
// Manage out of range values
|
||||||
for idx := realStart; idx < realEnd; idx += f.Step {
|
start = max(start, 0)
|
||||||
if treesArray, ok := node.([]*toml.Tree); ok {
|
start = min(start, v.Len()-1)
|
||||||
if len(treesArray) > 0 {
|
} else if step > 0 {
|
||||||
ctx.lastPosition = treesArray[0].Position()
|
start = 0
|
||||||
|
} else {
|
||||||
|
start = v.Len() - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize end
|
||||||
|
if f.End != nil {
|
||||||
|
end = *f.End
|
||||||
|
// Manage negative values
|
||||||
|
if end < 0 {
|
||||||
|
end += v.Len()
|
||||||
|
}
|
||||||
|
// Manage out of range values
|
||||||
|
end = max(end, -1)
|
||||||
|
end = min(end, v.Len())
|
||||||
|
} else if step > 0 {
|
||||||
|
end = v.Len()
|
||||||
|
} else {
|
||||||
|
end = -1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loop on values
|
||||||
|
if step > 0 {
|
||||||
|
for idx := start; idx < end; idx += step {
|
||||||
|
callNextIndexSlice(f.next, node, ctx, v.Index(idx).Interface())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for idx := start; idx > end; idx += step {
|
||||||
|
callNextIndexSlice(f.next, node, ctx, v.Index(idx).Interface())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
f.next.call(arr[idx], ctx)
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func min(a, b int) int {
|
||||||
|
if a < b {
|
||||||
|
return a
|
||||||
}
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func max(a, b int) int {
|
||||||
|
if a > b {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
// match anything
|
// match anything
|
||||||
@@ -129,8 +208,8 @@ func newMatchAnyFn() *matchAnyFn {
|
|||||||
func (f *matchAnyFn) call(node interface{}, ctx *queryContext) {
|
func (f *matchAnyFn) call(node interface{}, ctx *queryContext) {
|
||||||
if tree, ok := node.(*toml.Tree); ok {
|
if tree, ok := node.(*toml.Tree); ok {
|
||||||
for _, k := range tree.Keys() {
|
for _, k := range tree.Keys() {
|
||||||
v := tree.Get(k)
|
v := tree.GetPath([]string{k})
|
||||||
ctx.lastPosition = tree.GetPosition(k)
|
ctx.lastPosition = tree.GetPositionPath([]string{k})
|
||||||
f.next.call(v, ctx)
|
f.next.call(v, ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -168,8 +247,8 @@ func (f *matchRecursiveFn) call(node interface{}, ctx *queryContext) {
|
|||||||
var visit func(tree *toml.Tree)
|
var visit func(tree *toml.Tree)
|
||||||
visit = func(tree *toml.Tree) {
|
visit = func(tree *toml.Tree) {
|
||||||
for _, k := range tree.Keys() {
|
for _, k := range tree.Keys() {
|
||||||
v := tree.Get(k)
|
v := tree.GetPath([]string{k})
|
||||||
ctx.lastPosition = tree.GetPosition(k)
|
ctx.lastPosition = tree.GetPositionPath([]string{k})
|
||||||
f.next.call(v, ctx)
|
f.next.call(v, ctx)
|
||||||
switch node := v.(type) {
|
switch node := v.(type) {
|
||||||
case *toml.Tree:
|
case *toml.Tree:
|
||||||
@@ -207,9 +286,9 @@ func (f *matchFilterFn) call(node interface{}, ctx *queryContext) {
|
|||||||
switch castNode := node.(type) {
|
switch castNode := node.(type) {
|
||||||
case *toml.Tree:
|
case *toml.Tree:
|
||||||
for _, k := range castNode.Keys() {
|
for _, k := range castNode.Keys() {
|
||||||
v := castNode.Get(k)
|
v := castNode.GetPath([]string{k})
|
||||||
if fn(v) {
|
if fn(v) {
|
||||||
ctx.lastPosition = castNode.GetPosition(k)
|
ctx.lastPosition = castNode.GetPositionPath([]string{k})
|
||||||
f.next.call(v, ctx)
|
f.next.call(v, ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
+21
-10
@@ -2,8 +2,10 @@ package query
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/pelletier/go-toml"
|
"strconv"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
)
|
)
|
||||||
|
|
||||||
// dump path tree to a string
|
// dump path tree to a string
|
||||||
@@ -19,8 +21,17 @@ func pathString(root pathFn) string {
|
|||||||
result += fmt.Sprintf("{%d}", fn.Idx)
|
result += fmt.Sprintf("{%d}", fn.Idx)
|
||||||
result += pathString(fn.next)
|
result += pathString(fn.next)
|
||||||
case *matchSliceFn:
|
case *matchSliceFn:
|
||||||
result += fmt.Sprintf("{%d:%d:%d}",
|
startString, endString, stepString := "nil", "nil", "nil"
|
||||||
fn.Start, fn.End, fn.Step)
|
if fn.Start != nil {
|
||||||
|
startString = strconv.Itoa(*fn.Start)
|
||||||
|
}
|
||||||
|
if fn.End != nil {
|
||||||
|
endString = strconv.Itoa(*fn.End)
|
||||||
|
}
|
||||||
|
if fn.Step != nil {
|
||||||
|
stepString = strconv.Itoa(*fn.Step)
|
||||||
|
}
|
||||||
|
result += fmt.Sprintf("{%s:%s:%s}", startString, endString, stepString)
|
||||||
result += pathString(fn.next)
|
result += pathString(fn.next)
|
||||||
case *matchAnyFn:
|
case *matchAnyFn:
|
||||||
result += "{}"
|
result += "{}"
|
||||||
@@ -110,7 +121,7 @@ func TestPathSliceStart(t *testing.T) {
|
|||||||
assertPath(t,
|
assertPath(t,
|
||||||
"$[123:]",
|
"$[123:]",
|
||||||
buildPath(
|
buildPath(
|
||||||
newMatchSliceFn(123, maxInt, 1),
|
newMatchSliceFn().setStart(123),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -118,7 +129,7 @@ func TestPathSliceStartEnd(t *testing.T) {
|
|||||||
assertPath(t,
|
assertPath(t,
|
||||||
"$[123:456]",
|
"$[123:456]",
|
||||||
buildPath(
|
buildPath(
|
||||||
newMatchSliceFn(123, 456, 1),
|
newMatchSliceFn().setStart(123).setEnd(456),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -126,7 +137,7 @@ func TestPathSliceStartEndColon(t *testing.T) {
|
|||||||
assertPath(t,
|
assertPath(t,
|
||||||
"$[123:456:]",
|
"$[123:456:]",
|
||||||
buildPath(
|
buildPath(
|
||||||
newMatchSliceFn(123, 456, 1),
|
newMatchSliceFn().setStart(123).setEnd(456),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -134,7 +145,7 @@ func TestPathSliceStartStep(t *testing.T) {
|
|||||||
assertPath(t,
|
assertPath(t,
|
||||||
"$[123::7]",
|
"$[123::7]",
|
||||||
buildPath(
|
buildPath(
|
||||||
newMatchSliceFn(123, maxInt, 7),
|
newMatchSliceFn().setStart(123).setStep(7),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -142,7 +153,7 @@ func TestPathSliceEndStep(t *testing.T) {
|
|||||||
assertPath(t,
|
assertPath(t,
|
||||||
"$[:456:7]",
|
"$[:456:7]",
|
||||||
buildPath(
|
buildPath(
|
||||||
newMatchSliceFn(0, 456, 7),
|
newMatchSliceFn().setEnd(456).setStep(7),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -150,7 +161,7 @@ func TestPathSliceStep(t *testing.T) {
|
|||||||
assertPath(t,
|
assertPath(t,
|
||||||
"$[::7]",
|
"$[::7]",
|
||||||
buildPath(
|
buildPath(
|
||||||
newMatchSliceFn(0, maxInt, 7),
|
newMatchSliceFn().setStep(7),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -158,7 +169,7 @@ func TestPathSliceAll(t *testing.T) {
|
|||||||
assertPath(t,
|
assertPath(t,
|
||||||
"$[123:456:7]",
|
"$[123:456:7]",
|
||||||
buildPath(
|
buildPath(
|
||||||
newMatchSliceFn(123, 456, 7),
|
newMatchSliceFn().setStart(123).setEnd(456).setStep(7),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
+11
-8
@@ -203,12 +203,13 @@ loop: // labeled loop for easy breaking
|
|||||||
|
|
||||||
func (p *queryParser) parseSliceExpr() queryParserStateFn {
|
func (p *queryParser) parseSliceExpr() queryParserStateFn {
|
||||||
// init slice to grab all elements
|
// init slice to grab all elements
|
||||||
start, end, step := 0, maxInt, 1
|
var start, end, step *int = nil, nil, nil
|
||||||
|
|
||||||
// parse optional start
|
// parse optional start
|
||||||
tok := p.getToken()
|
tok := p.getToken()
|
||||||
if tok.typ == tokenInteger {
|
if tok.typ == tokenInteger {
|
||||||
start = tok.Int()
|
v := tok.Int()
|
||||||
|
start = &v
|
||||||
tok = p.getToken()
|
tok = p.getToken()
|
||||||
}
|
}
|
||||||
if tok.typ != tokenColon {
|
if tok.typ != tokenColon {
|
||||||
@@ -218,11 +219,12 @@ func (p *queryParser) parseSliceExpr() queryParserStateFn {
|
|||||||
// parse optional end
|
// parse optional end
|
||||||
tok = p.getToken()
|
tok = p.getToken()
|
||||||
if tok.typ == tokenInteger {
|
if tok.typ == tokenInteger {
|
||||||
end = tok.Int()
|
v := tok.Int()
|
||||||
|
end = &v
|
||||||
tok = p.getToken()
|
tok = p.getToken()
|
||||||
}
|
}
|
||||||
if tok.typ == tokenRightBracket {
|
if tok.typ == tokenRightBracket {
|
||||||
p.query.appendPath(newMatchSliceFn(start, end, step))
|
p.query.appendPath(&matchSliceFn{Start: start, End: end, Step: step})
|
||||||
return p.parseMatchExpr
|
return p.parseMatchExpr
|
||||||
}
|
}
|
||||||
if tok.typ != tokenColon {
|
if tok.typ != tokenColon {
|
||||||
@@ -232,17 +234,18 @@ func (p *queryParser) parseSliceExpr() queryParserStateFn {
|
|||||||
// parse optional step
|
// parse optional step
|
||||||
tok = p.getToken()
|
tok = p.getToken()
|
||||||
if tok.typ == tokenInteger {
|
if tok.typ == tokenInteger {
|
||||||
step = tok.Int()
|
v := tok.Int()
|
||||||
if step < 0 {
|
if v == 0 {
|
||||||
return p.parseError(tok, "step must be a positive value")
|
return p.parseError(tok, "step cannot be zero")
|
||||||
}
|
}
|
||||||
|
step = &v
|
||||||
tok = p.getToken()
|
tok = p.getToken()
|
||||||
}
|
}
|
||||||
if tok.typ != tokenRightBracket {
|
if tok.typ != tokenRightBracket {
|
||||||
return p.parseError(tok, "expected ']'")
|
return p.parseError(tok, "expected ']'")
|
||||||
}
|
}
|
||||||
|
|
||||||
p.query.appendPath(newMatchSliceFn(start, end, step))
|
p.query.appendPath(&matchSliceFn{Start: start, End: end, Step: step})
|
||||||
return p.parseMatchExpr
|
return p.parseMatchExpr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
+226
-95
@@ -78,6 +78,19 @@ func assertValue(t *testing.T, result, ref interface{}) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func assertParseError(t *testing.T, query string, errString string) {
|
||||||
|
_, err := Compile(query)
|
||||||
|
if err == nil {
|
||||||
|
t.Error("error should be non-nil")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err.Error() != errString {
|
||||||
|
t.Errorf("error does not match")
|
||||||
|
t.Log("test:", err.Error())
|
||||||
|
t.Log("ref: ", errString)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func assertQueryPositions(t *testing.T, tomlDoc string, query string, ref []interface{}) {
|
func assertQueryPositions(t *testing.T, tomlDoc string, query string, ref []interface{}) {
|
||||||
tree, err := toml.Load(tomlDoc)
|
tree, err := toml.Load(tomlDoc)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -128,54 +141,213 @@ func TestQueryKeyString(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestQueryIndex(t *testing.T) {
|
func TestQueryKeyUnicodeString(t *testing.T) {
|
||||||
assertQueryPositions(t,
|
assertQueryPositions(t,
|
||||||
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
|
"['f𝟘.o']\na = 42",
|
||||||
"$.foo.a[5]",
|
"$['f𝟘.o']['a']",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(6), toml.Position{2, 1},
|
int64(42), toml.Position{2, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestQueryIndexError1(t *testing.T) {
|
||||||
|
assertParseError(t, "$.foo.a[5", "(1, 10): expected ',' or ']', not ''")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQueryIndexError2(t *testing.T) {
|
||||||
|
assertParseError(t, "$.foo.a[]", "(1, 9): expected union sub expression, not ']', 0")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQueryIndex(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[5]",
|
||||||
|
[]interface{}{
|
||||||
|
queryTestNode{int64(5), toml.Position{2, 1}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQueryIndexNegative(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[-2]",
|
||||||
|
[]interface{}{
|
||||||
|
queryTestNode{int64(8), toml.Position{2, 1}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQueryIndexWrong(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[99]",
|
||||||
|
[]interface{}{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQueryIndexEmpty(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = []",
|
||||||
|
"$.foo.a[5]",
|
||||||
|
[]interface{}{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQueryIndexTree(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[[foo]]\na = [0,1,2,3,4,5,6,7,8,9]\n[[foo]]\nb = 3",
|
||||||
|
"$.foo[1].b",
|
||||||
|
[]interface{}{
|
||||||
|
queryTestNode{int64(3), toml.Position{4, 1}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceError1(t *testing.T) {
|
||||||
|
assertParseError(t, "$.foo.a[3:?]", "(1, 11): expected ']' or ':'")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceError2(t *testing.T) {
|
||||||
|
assertParseError(t, "$.foo.a[:::]", "(1, 11): expected ']'")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceError3(t *testing.T) {
|
||||||
|
assertParseError(t, "$.foo.a[::0]", "(1, 11): step cannot be zero")
|
||||||
|
}
|
||||||
|
|
||||||
func TestQuerySliceRange(t *testing.T) {
|
func TestQuerySliceRange(t *testing.T) {
|
||||||
assertQueryPositions(t,
|
assertQueryPositions(t,
|
||||||
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
"$.foo.a[0:5]",
|
"$.foo.a[:5]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{int64(0), toml.Position{2, 1}},
|
||||||
int64(1), toml.Position{2, 1},
|
queryTestNode{int64(1), toml.Position{2, 1}},
|
||||||
},
|
queryTestNode{int64(2), toml.Position{2, 1}},
|
||||||
queryTestNode{
|
queryTestNode{int64(3), toml.Position{2, 1}},
|
||||||
int64(2), toml.Position{2, 1},
|
queryTestNode{int64(4), toml.Position{2, 1}},
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
int64(3), toml.Position{2, 1},
|
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
int64(4), toml.Position{2, 1},
|
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
int64(5), toml.Position{2, 1},
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestQuerySliceStep(t *testing.T) {
|
func TestQuerySliceStep(t *testing.T) {
|
||||||
assertQueryPositions(t,
|
assertQueryPositions(t,
|
||||||
"[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
"$.foo.a[0:5:2]",
|
"$.foo.a[0:5:2]",
|
||||||
|
[]interface{}{
|
||||||
|
queryTestNode{int64(0), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(2), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(4), toml.Position{2, 1}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceStartNegative(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[-3:]",
|
||||||
|
[]interface{}{
|
||||||
|
queryTestNode{int64(7), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(8), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(9), toml.Position{2, 1}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceEndNegative(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[:-6]",
|
||||||
|
[]interface{}{
|
||||||
|
queryTestNode{int64(0), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(1), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(2), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(3), toml.Position{2, 1}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceStepNegative(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[::-2]",
|
||||||
|
[]interface{}{
|
||||||
|
queryTestNode{int64(9), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(7), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(5), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(3), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(1), toml.Position{2, 1}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceStartOverRange(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[-99:3]",
|
||||||
|
[]interface{}{
|
||||||
|
queryTestNode{int64(0), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(1), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(2), toml.Position{2, 1}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceStartOverRangeNegative(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[99:7:-1]",
|
||||||
|
[]interface{}{
|
||||||
|
queryTestNode{int64(9), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(8), toml.Position{2, 1}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceEndOverRange(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[7:99]",
|
||||||
|
[]interface{}{
|
||||||
|
queryTestNode{int64(7), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(8), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(9), toml.Position{2, 1}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceEndOverRangeNegative(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[2:-99:-1]",
|
||||||
|
[]interface{}{
|
||||||
|
queryTestNode{int64(2), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(1), toml.Position{2, 1}},
|
||||||
|
queryTestNode{int64(0), toml.Position{2, 1}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceWrongRange(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[5:3]",
|
||||||
|
[]interface{}{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceWrongRangeNegative(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = [0,1,2,3,4,5,6,7,8,9]",
|
||||||
|
"$.foo.a[3:5:-1]",
|
||||||
|
[]interface{}{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceEmpty(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[foo]\na = []",
|
||||||
|
"$.foo.a[5:]",
|
||||||
|
[]interface{}{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQuerySliceTree(t *testing.T) {
|
||||||
|
assertQueryPositions(t,
|
||||||
|
"[[foo]]\na='nok'\n[[foo]]\na = [0,1,2,3,4,5,6,7,8,9]\n[[foo]]\na='ok'\nb = 3",
|
||||||
|
"$.foo[1:].a",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
int64(1), toml.Position{2, 1},
|
[]interface{}{
|
||||||
},
|
int64(0), int64(1), int64(2), int64(3), int64(4),
|
||||||
queryTestNode{
|
int64(5), int64(6), int64(7), int64(8), int64(9)},
|
||||||
int64(3), toml.Position{2, 1},
|
toml.Position{4, 1}},
|
||||||
},
|
queryTestNode{"ok", toml.Position{6, 1}},
|
||||||
queryTestNode{
|
|
||||||
int64(5), toml.Position{2, 1},
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -265,12 +437,8 @@ func TestQueryRecursionAll(t *testing.T) {
|
|||||||
"b": int64(2),
|
"b": int64(2),
|
||||||
}, toml.Position{1, 1},
|
}, toml.Position{1, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{int64(1), toml.Position{2, 1}},
|
||||||
int64(1), toml.Position{2, 1},
|
queryTestNode{int64(2), toml.Position{3, 1}},
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
int64(2), toml.Position{3, 1},
|
|
||||||
},
|
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"foo": map[string]interface{}{
|
"foo": map[string]interface{}{
|
||||||
@@ -285,12 +453,8 @@ func TestQueryRecursionAll(t *testing.T) {
|
|||||||
"b": int64(4),
|
"b": int64(4),
|
||||||
}, toml.Position{4, 1},
|
}, toml.Position{4, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{int64(3), toml.Position{5, 1}},
|
||||||
int64(3), toml.Position{5, 1},
|
queryTestNode{int64(4), toml.Position{6, 1}},
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
int64(4), toml.Position{6, 1},
|
|
||||||
},
|
|
||||||
queryTestNode{
|
queryTestNode{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"foo": map[string]interface{}{
|
"foo": map[string]interface{}{
|
||||||
@@ -305,12 +469,8 @@ func TestQueryRecursionAll(t *testing.T) {
|
|||||||
"b": int64(6),
|
"b": int64(6),
|
||||||
}, toml.Position{7, 1},
|
}, toml.Position{7, 1},
|
||||||
},
|
},
|
||||||
queryTestNode{
|
queryTestNode{int64(5), toml.Position{8, 1}},
|
||||||
int64(5), toml.Position{8, 1},
|
queryTestNode{int64(6), toml.Position{9, 1}},
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
int64(6), toml.Position{9, 1},
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -358,56 +518,30 @@ func TestQueryFilterFn(t *testing.T) {
|
|||||||
assertQueryPositions(t, string(buff),
|
assertQueryPositions(t, string(buff),
|
||||||
"$..[?(int)]",
|
"$..[?(int)]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{int64(8001), toml.Position{13, 1}},
|
||||||
int64(8001), toml.Position{13, 1},
|
queryTestNode{int64(8001), toml.Position{13, 1}},
|
||||||
},
|
queryTestNode{int64(8002), toml.Position{13, 1}},
|
||||||
queryTestNode{
|
queryTestNode{int64(5000), toml.Position{14, 1}},
|
||||||
int64(8001), toml.Position{13, 1},
|
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
int64(8002), toml.Position{13, 1},
|
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
int64(5000), toml.Position{14, 1},
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
|
|
||||||
assertQueryPositions(t, string(buff),
|
assertQueryPositions(t, string(buff),
|
||||||
"$..[?(string)]",
|
"$..[?(string)]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{"TOML Example", toml.Position{3, 1}},
|
||||||
"TOML Example", toml.Position{3, 1},
|
queryTestNode{"Tom Preston-Werner", toml.Position{6, 1}},
|
||||||
},
|
queryTestNode{"GitHub", toml.Position{7, 1}},
|
||||||
queryTestNode{
|
queryTestNode{"GitHub Cofounder & CEO\nLikes tater tots and beer.", toml.Position{8, 1}},
|
||||||
"Tom Preston-Werner", toml.Position{6, 1},
|
queryTestNode{"192.168.1.1", toml.Position{12, 1}},
|
||||||
},
|
queryTestNode{"10.0.0.1", toml.Position{21, 3}},
|
||||||
queryTestNode{
|
queryTestNode{"eqdc10", toml.Position{22, 3}},
|
||||||
"GitHub", toml.Position{7, 1},
|
queryTestNode{"10.0.0.2", toml.Position{25, 3}},
|
||||||
},
|
queryTestNode{"eqdc10", toml.Position{26, 3}},
|
||||||
queryTestNode{
|
|
||||||
"GitHub Cofounder & CEO\nLikes tater tots and beer.",
|
|
||||||
toml.Position{8, 1},
|
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
"192.168.1.1", toml.Position{12, 1},
|
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
"10.0.0.1", toml.Position{21, 3},
|
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
"eqdc10", toml.Position{22, 3},
|
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
"10.0.0.2", toml.Position{25, 3},
|
|
||||||
},
|
|
||||||
queryTestNode{
|
|
||||||
"eqdc10", toml.Position{26, 3},
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
|
|
||||||
assertQueryPositions(t, string(buff),
|
assertQueryPositions(t, string(buff),
|
||||||
"$..[?(float)]",
|
"$..[?(float)]",
|
||||||
[]interface{}{ // no float values in document
|
[]interface{}{
|
||||||
|
queryTestNode{4e-08, toml.Position{30, 1}},
|
||||||
})
|
})
|
||||||
|
|
||||||
tv, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z")
|
tv, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z")
|
||||||
@@ -460,6 +594,7 @@ func TestQueryFilterFn(t *testing.T) {
|
|||||||
[]interface{}{"gamma", "delta"},
|
[]interface{}{"gamma", "delta"},
|
||||||
[]interface{}{int64(1), int64(2)},
|
[]interface{}{int64(1), int64(2)},
|
||||||
},
|
},
|
||||||
|
"score": 4e-08,
|
||||||
}, toml.Position{28, 1},
|
}, toml.Position{28, 1},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@@ -467,16 +602,12 @@ func TestQueryFilterFn(t *testing.T) {
|
|||||||
assertQueryPositions(t, string(buff),
|
assertQueryPositions(t, string(buff),
|
||||||
"$..[?(time)]",
|
"$..[?(time)]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{tv, toml.Position{9, 1}},
|
||||||
tv, toml.Position{9, 1},
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
|
|
||||||
assertQueryPositions(t, string(buff),
|
assertQueryPositions(t, string(buff),
|
||||||
"$..[?(bool)]",
|
"$..[?(bool)]",
|
||||||
[]interface{}{
|
[]interface{}{
|
||||||
queryTestNode{
|
queryTestNode{true, toml.Position{15, 1}},
|
||||||
true, toml.Position{15, 1},
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
+23
-29
@@ -7,23 +7,24 @@ import (
|
|||||||
"github.com/pelletier/go-toml"
|
"github.com/pelletier/go-toml"
|
||||||
)
|
)
|
||||||
|
|
||||||
func assertArrayContainsInAnyOrder(t *testing.T, array []interface{}, objects ...interface{}) {
|
func assertArrayContainsInOrder(t *testing.T, array []interface{}, objects ...interface{}) {
|
||||||
if len(array) != len(objects) {
|
if len(array) != len(objects) {
|
||||||
t.Fatalf("array contains %d objects but %d are expected", len(array), len(objects))
|
t.Fatalf("array contains %d objects but %d are expected", len(array), len(objects))
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, o := range objects {
|
for i := 0; i < len(array); i++ {
|
||||||
found := false
|
if array[i] != objects[i] {
|
||||||
for _, a := range array {
|
t.Fatalf("wanted '%s', have '%s'", objects[i], array[i])
|
||||||
if a == o {
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !found {
|
}
|
||||||
t.Fatal(o, "not found in array", array)
|
|
||||||
}
|
func checkQuery(t *testing.T, tree *toml.Tree, query string, objects ...interface{}) {
|
||||||
|
results, err := CompileAndExecute(query, tree)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("unexpected error:", err)
|
||||||
}
|
}
|
||||||
|
assertArrayContainsInOrder(t, results.Values(), objects...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestQueryExample(t *testing.T) {
|
func TestQueryExample(t *testing.T) {
|
||||||
@@ -38,15 +39,17 @@ func TestQueryExample(t *testing.T) {
|
|||||||
title = "Neuromancer"
|
title = "Neuromancer"
|
||||||
author = "William Gibson"
|
author = "William Gibson"
|
||||||
`)
|
`)
|
||||||
authors, err := CompileAndExecute("$.book.author", config)
|
|
||||||
if err != nil {
|
checkQuery(t, config, "$.book.author", "Stephen King", "Ernest Hemmingway", "William Gibson")
|
||||||
t.Fatal("unexpected error:", err)
|
|
||||||
}
|
checkQuery(t, config, "$.book[0].author", "Stephen King")
|
||||||
names := authors.Values()
|
checkQuery(t, config, "$.book[-1].author", "William Gibson")
|
||||||
if len(names) != 3 {
|
checkQuery(t, config, "$.book[1:].author", "Ernest Hemmingway", "William Gibson")
|
||||||
t.Fatalf("query should return 3 names but returned %d", len(names))
|
checkQuery(t, config, "$.book[-1:].author", "William Gibson")
|
||||||
}
|
checkQuery(t, config, "$.book[::2].author", "Stephen King", "William Gibson")
|
||||||
assertArrayContainsInAnyOrder(t, names, "Stephen King", "Ernest Hemmingway", "William Gibson")
|
checkQuery(t, config, "$.book[::-1].author", "William Gibson", "Ernest Hemmingway", "Stephen King")
|
||||||
|
checkQuery(t, config, "$.book[:].author", "Stephen King", "Ernest Hemmingway", "William Gibson")
|
||||||
|
checkQuery(t, config, "$.book[::].author", "Stephen King", "Ernest Hemmingway", "William Gibson")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestQueryReadmeExample(t *testing.T) {
|
func TestQueryReadmeExample(t *testing.T) {
|
||||||
@@ -56,16 +59,7 @@ user = "pelletier"
|
|||||||
password = "mypassword"
|
password = "mypassword"
|
||||||
`)
|
`)
|
||||||
|
|
||||||
query, err := Compile("$..[user,password]")
|
checkQuery(t, config, "$..[user,password]", "pelletier", "mypassword")
|
||||||
if err != nil {
|
|
||||||
t.Fatal("unexpected error:", err)
|
|
||||||
}
|
|
||||||
results := query.Execute(config)
|
|
||||||
values := results.Values()
|
|
||||||
if len(values) != 2 {
|
|
||||||
t.Fatalf("query should return 2 values but returned %d", len(values))
|
|
||||||
}
|
|
||||||
assertArrayContainsInAnyOrder(t, values, "pelletier", "mypassword")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestQueryPathNotPresent(t *testing.T) {
|
func TestQueryPathNotPresent(t *testing.T) {
|
||||||
|
|||||||
+4
-4
@@ -2,9 +2,9 @@ package query
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/pelletier/go-toml"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"unicode"
|
|
||||||
|
"github.com/pelletier/go-toml"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Define tokens
|
// Define tokens
|
||||||
@@ -92,11 +92,11 @@ func isSpace(r rune) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func isAlphanumeric(r rune) bool {
|
func isAlphanumeric(r rune) bool {
|
||||||
return unicode.IsLetter(r) || r == '_'
|
return 'a' <= r && r <= 'z' || 'A' <= r && r <= 'Z' || r == '_'
|
||||||
}
|
}
|
||||||
|
|
||||||
func isDigit(r rune) bool {
|
func isDigit(r rune) bool {
|
||||||
return unicode.IsNumber(r)
|
return '0' <= r && r <= '9'
|
||||||
}
|
}
|
||||||
|
|
||||||
func isHexDigit(r rune) bool {
|
func isHexDigit(r rune) bool {
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
package toml
|
package toml
|
||||||
|
|
||||||
import (
|
import "fmt"
|
||||||
"fmt"
|
|
||||||
"strconv"
|
|
||||||
"unicode"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Define tokens
|
// Define tokens
|
||||||
type tokenType int
|
type tokenType int
|
||||||
@@ -34,7 +30,9 @@ const (
|
|||||||
tokenRightParen
|
tokenRightParen
|
||||||
tokenDoubleLeftBracket
|
tokenDoubleLeftBracket
|
||||||
tokenDoubleRightBracket
|
tokenDoubleRightBracket
|
||||||
tokenDate
|
tokenLocalDate
|
||||||
|
tokenLocalTime
|
||||||
|
tokenTimeOffset
|
||||||
tokenKeyGroup
|
tokenKeyGroup
|
||||||
tokenKeyGroupArray
|
tokenKeyGroupArray
|
||||||
tokenComma
|
tokenComma
|
||||||
@@ -68,7 +66,9 @@ var tokenTypeNames = []string{
|
|||||||
")",
|
")",
|
||||||
"]]",
|
"]]",
|
||||||
"[[",
|
"[[",
|
||||||
"Date",
|
"LocalDate",
|
||||||
|
"LocalTime",
|
||||||
|
"TimeOffset",
|
||||||
"KeyGroup",
|
"KeyGroup",
|
||||||
"KeyGroupArray",
|
"KeyGroupArray",
|
||||||
",",
|
",",
|
||||||
@@ -95,14 +95,6 @@ func (tt tokenType) String() string {
|
|||||||
return "Unknown"
|
return "Unknown"
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t token) Int() int {
|
|
||||||
if result, err := strconv.Atoi(t.val); err != nil {
|
|
||||||
panic(err)
|
|
||||||
} else {
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t token) String() string {
|
func (t token) String() string {
|
||||||
switch t.typ {
|
switch t.typ {
|
||||||
case tokenEOF:
|
case tokenEOF:
|
||||||
@@ -119,7 +111,7 @@ func isSpace(r rune) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func isAlphanumeric(r rune) bool {
|
func isAlphanumeric(r rune) bool {
|
||||||
return unicode.IsLetter(r) || r == '_'
|
return 'a' <= r && r <= 'z' || 'A' <= r && r <= 'Z' || r == '_'
|
||||||
}
|
}
|
||||||
|
|
||||||
func isKeyChar(r rune) bool {
|
func isKeyChar(r rune) bool {
|
||||||
@@ -134,7 +126,7 @@ func isKeyStartChar(r rune) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func isDigit(r rune) bool {
|
func isDigit(r rune) bool {
|
||||||
return unicode.IsNumber(r)
|
return '0' <= r && r <= '9'
|
||||||
}
|
}
|
||||||
|
|
||||||
func isHexDigit(r rune) bool {
|
func isHexDigit(r rune) bool {
|
||||||
|
|||||||
+3
-1
@@ -25,7 +25,9 @@ func TestTokenStringer(t *testing.T) {
|
|||||||
{tokenRightParen, ")"},
|
{tokenRightParen, ")"},
|
||||||
{tokenDoubleLeftBracket, "]]"},
|
{tokenDoubleLeftBracket, "]]"},
|
||||||
{tokenDoubleRightBracket, "[["},
|
{tokenDoubleRightBracket, "[["},
|
||||||
{tokenDate, "Date"},
|
{tokenLocalDate, "LocalDate"},
|
||||||
|
{tokenLocalTime, "LocalTime"},
|
||||||
|
{tokenTimeOffset, "TimeOffset"},
|
||||||
{tokenKeyGroup, "KeyGroup"},
|
{tokenKeyGroup, "KeyGroup"},
|
||||||
{tokenKeyGroupArray, "KeyGroupArray"},
|
{tokenKeyGroupArray, "KeyGroupArray"},
|
||||||
{tokenComma, ","},
|
{tokenComma, ","},
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ type tomlValue struct {
|
|||||||
comment string
|
comment string
|
||||||
commented bool
|
commented bool
|
||||||
multiline bool
|
multiline bool
|
||||||
|
literal bool
|
||||||
position Position
|
position Position
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -23,6 +24,7 @@ type Tree struct {
|
|||||||
values map[string]interface{} // string -> *tomlValue, *Tree, []*Tree
|
values map[string]interface{} // string -> *tomlValue, *Tree, []*Tree
|
||||||
comment string
|
comment string
|
||||||
commented bool
|
commented bool
|
||||||
|
inline bool
|
||||||
position Position
|
position Position
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -121,6 +123,89 @@ func (t *Tree) GetPath(keys []string) interface{} {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetArray returns the value at key in the Tree.
|
||||||
|
// It returns []string, []int64, etc type if key has homogeneous lists
|
||||||
|
// Key is a dot-separated path (e.g. a.b.c) without single/double quoted strings.
|
||||||
|
// Returns nil if the path does not exist in the tree.
|
||||||
|
// If keys is of length zero, the current tree is returned.
|
||||||
|
func (t *Tree) GetArray(key string) interface{} {
|
||||||
|
if key == "" {
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
return t.GetArrayPath(strings.Split(key, "."))
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetArrayPath returns the element in the tree indicated by 'keys'.
|
||||||
|
// If keys is of length zero, the current tree is returned.
|
||||||
|
func (t *Tree) GetArrayPath(keys []string) interface{} {
|
||||||
|
if len(keys) == 0 {
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
subtree := t
|
||||||
|
for _, intermediateKey := range keys[:len(keys)-1] {
|
||||||
|
value, exists := subtree.values[intermediateKey]
|
||||||
|
if !exists {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch node := value.(type) {
|
||||||
|
case *Tree:
|
||||||
|
subtree = node
|
||||||
|
case []*Tree:
|
||||||
|
// go to most recent element
|
||||||
|
if len(node) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
subtree = node[len(node)-1]
|
||||||
|
default:
|
||||||
|
return nil // cannot navigate through other node types
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// branch based on final node type
|
||||||
|
switch node := subtree.values[keys[len(keys)-1]].(type) {
|
||||||
|
case *tomlValue:
|
||||||
|
switch n := node.value.(type) {
|
||||||
|
case []interface{}:
|
||||||
|
return getArray(n)
|
||||||
|
default:
|
||||||
|
return node.value
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if homogeneous array, then return slice type object over []interface{}
|
||||||
|
func getArray(n []interface{}) interface{} {
|
||||||
|
var s []string
|
||||||
|
var i64 []int64
|
||||||
|
var f64 []float64
|
||||||
|
var bl []bool
|
||||||
|
for _, value := range n {
|
||||||
|
switch v := value.(type) {
|
||||||
|
case string:
|
||||||
|
s = append(s, v)
|
||||||
|
case int64:
|
||||||
|
i64 = append(i64, v)
|
||||||
|
case float64:
|
||||||
|
f64 = append(f64, v)
|
||||||
|
case bool:
|
||||||
|
bl = append(bl, v)
|
||||||
|
default:
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(s) == len(n) {
|
||||||
|
return s
|
||||||
|
} else if len(i64) == len(n) {
|
||||||
|
return i64
|
||||||
|
} else if len(f64) == len(n) {
|
||||||
|
return f64
|
||||||
|
} else if len(bl) == len(n) {
|
||||||
|
return bl
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
// GetPosition returns the position of the given key.
|
// GetPosition returns the position of the given key.
|
||||||
func (t *Tree) GetPosition(key string) Position {
|
func (t *Tree) GetPosition(key string) Position {
|
||||||
if key == "" {
|
if key == "" {
|
||||||
@@ -129,6 +214,50 @@ func (t *Tree) GetPosition(key string) Position {
|
|||||||
return t.GetPositionPath(strings.Split(key, "."))
|
return t.GetPositionPath(strings.Split(key, "."))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetPositionPath sets the position of element in the tree indicated by 'keys'.
|
||||||
|
// If keys is of length zero, the current tree position is set.
|
||||||
|
func (t *Tree) SetPositionPath(keys []string, pos Position) {
|
||||||
|
if len(keys) == 0 {
|
||||||
|
t.position = pos
|
||||||
|
return
|
||||||
|
}
|
||||||
|
subtree := t
|
||||||
|
for _, intermediateKey := range keys[:len(keys)-1] {
|
||||||
|
value, exists := subtree.values[intermediateKey]
|
||||||
|
if !exists {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch node := value.(type) {
|
||||||
|
case *Tree:
|
||||||
|
subtree = node
|
||||||
|
case []*Tree:
|
||||||
|
// go to most recent element
|
||||||
|
if len(node) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
subtree = node[len(node)-1]
|
||||||
|
default:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// branch based on final node type
|
||||||
|
switch node := subtree.values[keys[len(keys)-1]].(type) {
|
||||||
|
case *tomlValue:
|
||||||
|
node.position = pos
|
||||||
|
return
|
||||||
|
case *Tree:
|
||||||
|
node.position = pos
|
||||||
|
return
|
||||||
|
case []*Tree:
|
||||||
|
// go to most recent element
|
||||||
|
if len(node) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
node[len(node)-1].position = pos
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// GetPositionPath returns the element in the tree indicated by 'keys'.
|
// GetPositionPath returns the element in the tree indicated by 'keys'.
|
||||||
// If keys is of length zero, the current tree is returned.
|
// If keys is of length zero, the current tree is returned.
|
||||||
func (t *Tree) GetPositionPath(keys []string) Position {
|
func (t *Tree) GetPositionPath(keys []string) Position {
|
||||||
@@ -186,6 +315,7 @@ type SetOptions struct {
|
|||||||
Comment string
|
Comment string
|
||||||
Commented bool
|
Commented bool
|
||||||
Multiline bool
|
Multiline bool
|
||||||
|
Literal bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetWithOptions is the same as Set, but allows you to provide formatting
|
// SetWithOptions is the same as Set, but allows you to provide formatting
|
||||||
@@ -211,7 +341,8 @@ func (t *Tree) SetPathWithOptions(keys []string, opts SetOptions, value interfac
|
|||||||
// go to most recent element
|
// go to most recent element
|
||||||
if len(node) == 0 {
|
if len(node) == 0 {
|
||||||
// create element if it does not exist
|
// create element if it does not exist
|
||||||
subtree.values[intermediateKey] = append(node, newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col}))
|
node = append(node, newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col}))
|
||||||
|
subtree.values[intermediateKey] = node
|
||||||
}
|
}
|
||||||
subtree = node[len(node)-1]
|
subtree = node[len(node)-1]
|
||||||
}
|
}
|
||||||
@@ -222,17 +353,25 @@ func (t *Tree) SetPathWithOptions(keys []string, opts SetOptions, value interfac
|
|||||||
switch v := value.(type) {
|
switch v := value.(type) {
|
||||||
case *Tree:
|
case *Tree:
|
||||||
v.comment = opts.Comment
|
v.comment = opts.Comment
|
||||||
|
v.commented = opts.Commented
|
||||||
toInsert = value
|
toInsert = value
|
||||||
case []*Tree:
|
case []*Tree:
|
||||||
|
for i := range v {
|
||||||
|
v[i].commented = opts.Commented
|
||||||
|
}
|
||||||
toInsert = value
|
toInsert = value
|
||||||
case *tomlValue:
|
case *tomlValue:
|
||||||
v.comment = opts.Comment
|
v.comment = opts.Comment
|
||||||
|
v.commented = opts.Commented
|
||||||
|
v.multiline = opts.Multiline
|
||||||
|
v.literal = opts.Literal
|
||||||
toInsert = v
|
toInsert = v
|
||||||
default:
|
default:
|
||||||
toInsert = &tomlValue{value: value,
|
toInsert = &tomlValue{value: value,
|
||||||
comment: opts.Comment,
|
comment: opts.Comment,
|
||||||
commented: opts.Commented,
|
commented: opts.Commented,
|
||||||
multiline: opts.Multiline,
|
multiline: opts.Multiline,
|
||||||
|
literal: opts.Literal,
|
||||||
position: Position{Line: subtree.position.Line + len(subtree.values) + 1, Col: subtree.position.Col}}
|
position: Position{Line: subtree.position.Line + len(subtree.values) + 1, Col: subtree.position.Col}}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -307,6 +446,7 @@ func (t *Tree) createSubTree(keys []string, pos Position) error {
|
|||||||
if !exists {
|
if !exists {
|
||||||
tree := newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col})
|
tree := newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col})
|
||||||
tree.position = pos
|
tree.position = pos
|
||||||
|
tree.inline = subtree.inline
|
||||||
subtree.values[intermediateKey] = tree
|
subtree.values[intermediateKey] = tree
|
||||||
nextTree = tree
|
nextTree = tree
|
||||||
}
|
}
|
||||||
@@ -331,7 +471,7 @@ func LoadBytes(b []byte) (tree *Tree, err error) {
|
|||||||
if _, ok := r.(runtime.Error); ok {
|
if _, ok := r.(runtime.Error); ok {
|
||||||
panic(r)
|
panic(r)
|
||||||
}
|
}
|
||||||
err = errors.New(r.(string))
|
err = fmt.Errorf("%s", r)
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
package toml
|
package toml
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -39,6 +40,41 @@ func TestTomlGet(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTomlGetArray(t *testing.T) {
|
||||||
|
tree, _ := Load(`
|
||||||
|
[test]
|
||||||
|
key = ["one", "two"]
|
||||||
|
key2 = [true, false, false]
|
||||||
|
key3 = [1.5,2.5]
|
||||||
|
`)
|
||||||
|
|
||||||
|
if tree.GetArray("") != tree {
|
||||||
|
t.Errorf("GetArray should return the tree itself when given an empty path")
|
||||||
|
}
|
||||||
|
|
||||||
|
expect := []string{"one", "two"}
|
||||||
|
actual := tree.GetArray("test.key").([]string)
|
||||||
|
if !reflect.DeepEqual(actual, expect) {
|
||||||
|
t.Errorf("GetArray should return the []string value")
|
||||||
|
}
|
||||||
|
|
||||||
|
expect2 := []bool{true, false, false}
|
||||||
|
actual2 := tree.GetArray("test.key2").([]bool)
|
||||||
|
if !reflect.DeepEqual(actual2, expect2) {
|
||||||
|
t.Errorf("GetArray should return the []bool value")
|
||||||
|
}
|
||||||
|
|
||||||
|
expect3 := []float64{1.5, 2.5}
|
||||||
|
actual3 := tree.GetArray("test.key3").([]float64)
|
||||||
|
if !reflect.DeepEqual(actual3, expect3) {
|
||||||
|
t.Errorf("GetArray should return the []float64 value")
|
||||||
|
}
|
||||||
|
|
||||||
|
if tree.GetArray(`\`) != nil {
|
||||||
|
t.Errorf("should return nil when the key is malformed")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestTomlGetDefault(t *testing.T) {
|
func TestTomlGetDefault(t *testing.T) {
|
||||||
tree, _ := Load(`
|
tree, _ := Load(`
|
||||||
[test]
|
[test]
|
||||||
@@ -148,6 +184,51 @@ func TestTomlGetPath(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTomlGetArrayPath(t *testing.T) {
|
||||||
|
for idx, item := range []struct {
|
||||||
|
Name string
|
||||||
|
Path []string
|
||||||
|
Make func() (tree *Tree, expected interface{})
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
Name: "empty",
|
||||||
|
Path: []string{},
|
||||||
|
Make: func() (tree *Tree, expected interface{}) {
|
||||||
|
tree = newTree()
|
||||||
|
expected = tree
|
||||||
|
return
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "int64",
|
||||||
|
Path: []string{"a"},
|
||||||
|
Make: func() (tree *Tree, expected interface{}) {
|
||||||
|
var err error
|
||||||
|
tree, err = Load(`a = [1,2,3]`)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
expected = []int64{1, 2, 3}
|
||||||
|
return
|
||||||
|
},
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
t.Run(item.Name, func(t *testing.T) {
|
||||||
|
tree, expected := item.Make()
|
||||||
|
result := tree.GetArrayPath(item.Path)
|
||||||
|
if !reflect.DeepEqual(result, expected) {
|
||||||
|
t.Errorf("GetArrayPath[%d] %v - expected %#v, got %#v instead.", idx, item.Path, expected, result)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
tree, _ := Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
|
||||||
|
if tree.GetArrayPath([]string{"whatever"}) != nil {
|
||||||
|
t.Error("GetArrayPath should return nil when the key does not exist")
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func TestTomlFromMap(t *testing.T) {
|
func TestTomlFromMap(t *testing.T) {
|
||||||
simpleMap := map[string]interface{}{"hello": 42}
|
simpleMap := map[string]interface{}{"hello": 42}
|
||||||
tree, err := TreeFromMap(simpleMap)
|
tree, err := TreeFromMap(simpleMap)
|
||||||
|
|||||||
@@ -8,8 +8,6 @@ import (
|
|||||||
"reflect"
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/davecgh/go-spew/spew"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func testgenInvalid(t *testing.T, input string) {
|
func testgenInvalid(t *testing.T, input string) {
|
||||||
@@ -56,7 +54,7 @@ func testgenValid(t *testing.T, input string, jsonRef string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(jsonExpected, jsonTest) {
|
if !reflect.DeepEqual(jsonExpected, jsonTest) {
|
||||||
t.Logf("Diff:\n%s", spew.Sdump(jsonExpected, jsonTest))
|
t.Logf("Diff:\n%#+v\n%#+v", jsonExpected, jsonTest)
|
||||||
t.Fatal("parsed TOML tree is different than expected structure")
|
t.Fatal("parsed TOML tree is different than expected structure")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,21 +5,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestInvalidArrayMixedTypesArraysAndInts(t *testing.T) {
|
|
||||||
input := `arrays-and-ints = [1, ["Arrays are not integers."]]`
|
|
||||||
testgenInvalid(t, input)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInvalidArrayMixedTypesIntsAndFloats(t *testing.T) {
|
|
||||||
input := `ints-and-floats = [1, 1.1]`
|
|
||||||
testgenInvalid(t, input)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInvalidArrayMixedTypesStringsAndInts(t *testing.T) {
|
|
||||||
input := `strings-and-ints = ["hi", 42]`
|
|
||||||
testgenInvalid(t, input)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInvalidDatetimeMalformedNoLeads(t *testing.T) {
|
func TestInvalidDatetimeMalformedNoLeads(t *testing.T) {
|
||||||
input := `no-leads = 1987-7-05T17:45:00Z`
|
input := `no-leads = 1987-7-05T17:45:00Z`
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
|
|||||||
+71
@@ -0,0 +1,71 @@
|
|||||||
|
package toml
|
||||||
|
|
||||||
|
// PubTOMLValue wrapping tomlValue in order to access all properties from outside.
|
||||||
|
type PubTOMLValue = tomlValue
|
||||||
|
|
||||||
|
func (ptv *PubTOMLValue) Value() interface{} {
|
||||||
|
return ptv.value
|
||||||
|
}
|
||||||
|
func (ptv *PubTOMLValue) Comment() string {
|
||||||
|
return ptv.comment
|
||||||
|
}
|
||||||
|
func (ptv *PubTOMLValue) Commented() bool {
|
||||||
|
return ptv.commented
|
||||||
|
}
|
||||||
|
func (ptv *PubTOMLValue) Multiline() bool {
|
||||||
|
return ptv.multiline
|
||||||
|
}
|
||||||
|
func (ptv *PubTOMLValue) Position() Position {
|
||||||
|
return ptv.position
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ptv *PubTOMLValue) SetValue(v interface{}) {
|
||||||
|
ptv.value = v
|
||||||
|
}
|
||||||
|
func (ptv *PubTOMLValue) SetComment(s string) {
|
||||||
|
ptv.comment = s
|
||||||
|
}
|
||||||
|
func (ptv *PubTOMLValue) SetCommented(c bool) {
|
||||||
|
ptv.commented = c
|
||||||
|
}
|
||||||
|
func (ptv *PubTOMLValue) SetMultiline(m bool) {
|
||||||
|
ptv.multiline = m
|
||||||
|
}
|
||||||
|
func (ptv *PubTOMLValue) SetPosition(p Position) {
|
||||||
|
ptv.position = p
|
||||||
|
}
|
||||||
|
|
||||||
|
// PubTree wrapping Tree in order to access all properties from outside.
|
||||||
|
type PubTree = Tree
|
||||||
|
|
||||||
|
func (pt *PubTree) Values() map[string]interface{} {
|
||||||
|
return pt.values
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pt *PubTree) Comment() string {
|
||||||
|
return pt.comment
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pt *PubTree) Commented() bool {
|
||||||
|
return pt.commented
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pt *PubTree) Inline() bool {
|
||||||
|
return pt.inline
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pt *PubTree) SetValues(v map[string]interface{}) {
|
||||||
|
pt.values = v
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pt *PubTree) SetComment(c string) {
|
||||||
|
pt.comment = c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pt *PubTree) SetCommented(c bool) {
|
||||||
|
pt.commented = c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pt *PubTree) SetInline(i bool) {
|
||||||
|
pt.inline = i
|
||||||
|
}
|
||||||
@@ -57,6 +57,19 @@ func simpleValueCoercion(object interface{}) (interface{}, error) {
|
|||||||
return float64(original), nil
|
return float64(original), nil
|
||||||
case fmt.Stringer:
|
case fmt.Stringer:
|
||||||
return original.String(), nil
|
return original.String(), nil
|
||||||
|
case []interface{}:
|
||||||
|
value := reflect.ValueOf(original)
|
||||||
|
length := value.Len()
|
||||||
|
arrayValue := reflect.MakeSlice(value.Type(), 0, length)
|
||||||
|
for i := 0; i < length; i++ {
|
||||||
|
val := value.Index(i).Interface()
|
||||||
|
simpleValue, err := simpleValueCoercion(val)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
arrayValue = reflect.Append(arrayValue, reflect.ValueOf(simpleValue))
|
||||||
|
}
|
||||||
|
return arrayValue.Interface(), nil
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("cannot convert type %T to Tree", object)
|
return nil, fmt.Errorf("cannot convert type %T to Tree", object)
|
||||||
}
|
}
|
||||||
|
|||||||
+118
-1
@@ -1,6 +1,7 @@
|
|||||||
package toml
|
package toml
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"reflect"
|
||||||
"strconv"
|
"strconv"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
@@ -105,7 +106,7 @@ func TestTreeCreateToTreeInvalidTableGroupType(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestRoundTripArrayOfTables(t *testing.T) {
|
func TestRoundTripArrayOfTables(t *testing.T) {
|
||||||
orig := "\n[[stuff]]\n name = \"foo\"\n things = [\"a\",\"b\"]\n"
|
orig := "\n[[stuff]]\n name = \"foo\"\n things = [\"a\", \"b\"]\n"
|
||||||
tree, err := Load(orig)
|
tree, err := Load(orig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("unexpected error: %s", err)
|
t.Fatalf("unexpected error: %s", err)
|
||||||
@@ -124,3 +125,119 @@ func TestRoundTripArrayOfTables(t *testing.T) {
|
|||||||
t.Errorf("want:\n%s\ngot:\n%s", want, got)
|
t.Errorf("want:\n%s\ngot:\n%s", want, got)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTomlSliceOfSlice(t *testing.T) {
|
||||||
|
tree, err := Load(` hosts=[["10.1.0.107:9092","10.1.0.107:9093", "192.168.0.40:9094"] ] `)
|
||||||
|
m := tree.ToMap()
|
||||||
|
tree, err = TreeFromMap(m)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("should not error", err)
|
||||||
|
}
|
||||||
|
type Struct struct {
|
||||||
|
Hosts [][]string
|
||||||
|
}
|
||||||
|
var actual Struct
|
||||||
|
tree.Unmarshal(&actual)
|
||||||
|
|
||||||
|
expected := Struct{Hosts: [][]string{[]string{"10.1.0.107:9092", "10.1.0.107:9093", "192.168.0.40:9094"}}}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(actual, expected) {
|
||||||
|
t.Errorf("Bad unmarshal: expected %+v, got %+v", expected, actual)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTomlSliceOfSliceOfSlice(t *testing.T) {
|
||||||
|
tree, err := Load(` hosts=[[["10.1.0.107:9092","10.1.0.107:9093", "192.168.0.40:9094"] ]] `)
|
||||||
|
m := tree.ToMap()
|
||||||
|
tree, err = TreeFromMap(m)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("should not error", err)
|
||||||
|
}
|
||||||
|
type Struct struct {
|
||||||
|
Hosts [][][]string
|
||||||
|
}
|
||||||
|
var actual Struct
|
||||||
|
tree.Unmarshal(&actual)
|
||||||
|
|
||||||
|
expected := Struct{Hosts: [][][]string{[][]string{[]string{"10.1.0.107:9092", "10.1.0.107:9093", "192.168.0.40:9094"}}}}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(actual, expected) {
|
||||||
|
t.Errorf("Bad unmarshal: expected %+v, got %+v", expected, actual)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTomlSliceOfSliceInt(t *testing.T) {
|
||||||
|
tree, err := Load(` hosts=[[1,2,3],[4,5,6] ] `)
|
||||||
|
m := tree.ToMap()
|
||||||
|
tree, err = TreeFromMap(m)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("should not error", err)
|
||||||
|
}
|
||||||
|
type Struct struct {
|
||||||
|
Hosts [][]int
|
||||||
|
}
|
||||||
|
var actual Struct
|
||||||
|
err = tree.Unmarshal(&actual)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("should not error", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
expected := Struct{Hosts: [][]int{[]int{1, 2, 3}, []int{4, 5, 6}}}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(actual, expected) {
|
||||||
|
t.Errorf("Bad unmarshal: expected %+v, got %+v", expected, actual)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
func TestTomlSliceOfSliceInt64(t *testing.T) {
|
||||||
|
tree, err := Load(` hosts=[[1,2,3],[4,5,6] ] `)
|
||||||
|
m := tree.ToMap()
|
||||||
|
tree, err = TreeFromMap(m)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("should not error", err)
|
||||||
|
}
|
||||||
|
type Struct struct {
|
||||||
|
Hosts [][]int64
|
||||||
|
}
|
||||||
|
var actual Struct
|
||||||
|
err = tree.Unmarshal(&actual)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("should not error", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
expected := Struct{Hosts: [][]int64{[]int64{1, 2, 3}, []int64{4, 5, 6}}}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(actual, expected) {
|
||||||
|
t.Errorf("Bad unmarshal: expected %+v, got %+v", expected, actual)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTomlSliceOfSliceInt64FromMap(t *testing.T) {
|
||||||
|
tree, err := TreeFromMap(map[string]interface{}{"hosts": [][]interface{}{[]interface{}{int32(1), int8(2), 3}}})
|
||||||
|
if err != nil {
|
||||||
|
t.Error("should not error", err)
|
||||||
|
}
|
||||||
|
type Struct struct {
|
||||||
|
Hosts [][]int64
|
||||||
|
}
|
||||||
|
var actual Struct
|
||||||
|
err = tree.Unmarshal(&actual)
|
||||||
|
if err != nil {
|
||||||
|
t.Error("should not error", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
expected := Struct{Hosts: [][]int64{[]int64{1, 2, 3}}}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(actual, expected) {
|
||||||
|
t.Errorf("Bad unmarshal: expected %+v, got %+v", expected, actual)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
func TestTomlSliceOfSliceError(t *testing.T) { // make Codecov happy
|
||||||
|
_, err := TreeFromMap(map[string]interface{}{"hosts": [][]interface{}{[]interface{}{1, 2, []struct{}{}}}})
|
||||||
|
expected := "cannot convert type []struct {} to Tree"
|
||||||
|
if err.Error() != expected {
|
||||||
|
t.Fatalf("unexpected error: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
+154
-36
@@ -5,6 +5,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"math"
|
"math"
|
||||||
|
"math/big"
|
||||||
"reflect"
|
"reflect"
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
@@ -27,23 +28,35 @@ type sortNode struct {
|
|||||||
// Encodes a string to a TOML-compliant multi-line string value
|
// Encodes a string to a TOML-compliant multi-line string value
|
||||||
// This function is a clone of the existing encodeTomlString function, except that whitespace characters
|
// This function is a clone of the existing encodeTomlString function, except that whitespace characters
|
||||||
// are preserved. Quotation marks and backslashes are also not escaped.
|
// are preserved. Quotation marks and backslashes are also not escaped.
|
||||||
func encodeMultilineTomlString(value string) string {
|
func encodeMultilineTomlString(value string, commented string) string {
|
||||||
var b bytes.Buffer
|
var b bytes.Buffer
|
||||||
|
adjacentQuoteCount := 0
|
||||||
|
|
||||||
for _, rr := range value {
|
b.WriteString(commented)
|
||||||
|
for i, rr := range value {
|
||||||
|
if rr != '"' {
|
||||||
|
adjacentQuoteCount = 0
|
||||||
|
} else {
|
||||||
|
adjacentQuoteCount++
|
||||||
|
}
|
||||||
switch rr {
|
switch rr {
|
||||||
case '\b':
|
case '\b':
|
||||||
b.WriteString(`\b`)
|
b.WriteString(`\b`)
|
||||||
case '\t':
|
case '\t':
|
||||||
b.WriteString("\t")
|
b.WriteString("\t")
|
||||||
case '\n':
|
case '\n':
|
||||||
b.WriteString("\n")
|
b.WriteString("\n" + commented)
|
||||||
case '\f':
|
case '\f':
|
||||||
b.WriteString(`\f`)
|
b.WriteString(`\f`)
|
||||||
case '\r':
|
case '\r':
|
||||||
b.WriteString("\r")
|
b.WriteString("\r")
|
||||||
case '"':
|
case '"':
|
||||||
|
if adjacentQuoteCount >= 3 || i == len(value)-1 {
|
||||||
|
adjacentQuoteCount = 0
|
||||||
|
b.WriteString(`\"`)
|
||||||
|
} else {
|
||||||
b.WriteString(`"`)
|
b.WriteString(`"`)
|
||||||
|
}
|
||||||
case '\\':
|
case '\\':
|
||||||
b.WriteString(`\`)
|
b.WriteString(`\`)
|
||||||
default:
|
default:
|
||||||
@@ -90,7 +103,30 @@ func encodeTomlString(value string) string {
|
|||||||
return b.String()
|
return b.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElementPerLine bool) (string, error) {
|
func tomlTreeStringRepresentation(t *Tree, ord MarshalOrder) (string, error) {
|
||||||
|
var orderedVals []sortNode
|
||||||
|
switch ord {
|
||||||
|
case OrderPreserve:
|
||||||
|
orderedVals = sortByLines(t)
|
||||||
|
default:
|
||||||
|
orderedVals = sortAlphabetical(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
var values []string
|
||||||
|
for _, node := range orderedVals {
|
||||||
|
k := node.key
|
||||||
|
v := t.values[k]
|
||||||
|
|
||||||
|
repr, err := tomlValueStringRepresentation(v, "", "", ord, false)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
values = append(values, quoteKeyIfNeeded(k)+" = "+repr)
|
||||||
|
}
|
||||||
|
return "{ " + strings.Join(values, ", ") + " }", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func tomlValueStringRepresentation(v interface{}, commented string, indent string, ord MarshalOrder, arraysOneElementPerLine bool) (string, error) {
|
||||||
// this interface check is added to dereference the change made in the writeTo function.
|
// this interface check is added to dereference the change made in the writeTo function.
|
||||||
// That change was made to allow this function to see formatting options.
|
// That change was made to allow this function to see formatting options.
|
||||||
tv, ok := v.(*tomlValue)
|
tv, ok := v.(*tomlValue)
|
||||||
@@ -106,20 +142,36 @@ func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElemen
|
|||||||
case int64:
|
case int64:
|
||||||
return strconv.FormatInt(value, 10), nil
|
return strconv.FormatInt(value, 10), nil
|
||||||
case float64:
|
case float64:
|
||||||
// Ensure a round float does contain a decimal point. Otherwise feeding
|
// Default bit length is full 64
|
||||||
// the output back to the parser would convert to an integer.
|
bits := 64
|
||||||
if math.Trunc(value) == value {
|
// Float panics if nan is used
|
||||||
return strings.ToLower(strconv.FormatFloat(value, 'f', 1, 32)), nil
|
if !math.IsNaN(value) {
|
||||||
|
// if 32 bit accuracy is enough to exactly show, use 32
|
||||||
|
_, acc := big.NewFloat(value).Float32()
|
||||||
|
if acc == big.Exact {
|
||||||
|
bits = 32
|
||||||
}
|
}
|
||||||
return strings.ToLower(strconv.FormatFloat(value, 'f', -1, 32)), nil
|
}
|
||||||
|
if math.Trunc(value) == value {
|
||||||
|
return strings.ToLower(strconv.FormatFloat(value, 'f', 1, bits)), nil
|
||||||
|
}
|
||||||
|
return strings.ToLower(strconv.FormatFloat(value, 'f', -1, bits)), nil
|
||||||
case string:
|
case string:
|
||||||
if tv.multiline {
|
if tv.multiline {
|
||||||
return "\"\"\"\n" + encodeMultilineTomlString(value) + "\"\"\"", nil
|
if tv.literal {
|
||||||
|
b := strings.Builder{}
|
||||||
|
b.WriteString("'''\n")
|
||||||
|
b.Write([]byte(value))
|
||||||
|
b.WriteString("\n'''")
|
||||||
|
return b.String(), nil
|
||||||
|
} else {
|
||||||
|
return "\"\"\"\n" + encodeMultilineTomlString(value, commented) + "\"\"\"", nil
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return "\"" + encodeTomlString(value) + "\"", nil
|
return "\"" + encodeTomlString(value) + "\"", nil
|
||||||
case []byte:
|
case []byte:
|
||||||
b, _ := v.([]byte)
|
b, _ := v.([]byte)
|
||||||
return tomlValueStringRepresentation(string(b), indent, arraysOneElementPerLine)
|
return string(b), nil
|
||||||
case bool:
|
case bool:
|
||||||
if value {
|
if value {
|
||||||
return "true", nil
|
return "true", nil
|
||||||
@@ -127,6 +179,14 @@ func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElemen
|
|||||||
return "false", nil
|
return "false", nil
|
||||||
case time.Time:
|
case time.Time:
|
||||||
return value.Format(time.RFC3339), nil
|
return value.Format(time.RFC3339), nil
|
||||||
|
case LocalDate:
|
||||||
|
return value.String(), nil
|
||||||
|
case LocalDateTime:
|
||||||
|
return value.String(), nil
|
||||||
|
case LocalTime:
|
||||||
|
return value.String(), nil
|
||||||
|
case *Tree:
|
||||||
|
return tomlTreeStringRepresentation(value, ord)
|
||||||
case nil:
|
case nil:
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
@@ -137,7 +197,7 @@ func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElemen
|
|||||||
var values []string
|
var values []string
|
||||||
for i := 0; i < rv.Len(); i++ {
|
for i := 0; i < rv.Len(); i++ {
|
||||||
item := rv.Index(i).Interface()
|
item := rv.Index(i).Interface()
|
||||||
itemRepr, err := tomlValueStringRepresentation(item, indent, arraysOneElementPerLine)
|
itemRepr, err := tomlValueStringRepresentation(item, commented, indent, ord, arraysOneElementPerLine)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
@@ -151,22 +211,24 @@ func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElemen
|
|||||||
|
|
||||||
for _, value := range values {
|
for _, value := range values {
|
||||||
stringBuffer.WriteString(valueIndent)
|
stringBuffer.WriteString(valueIndent)
|
||||||
stringBuffer.WriteString(value)
|
stringBuffer.WriteString(commented + value)
|
||||||
stringBuffer.WriteString(`,`)
|
stringBuffer.WriteString(`,`)
|
||||||
stringBuffer.WriteString("\n")
|
stringBuffer.WriteString("\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
stringBuffer.WriteString(indent + "]")
|
stringBuffer.WriteString(indent + commented + "]")
|
||||||
|
|
||||||
return stringBuffer.String(), nil
|
return stringBuffer.String(), nil
|
||||||
}
|
}
|
||||||
return "[" + strings.Join(values, ",") + "]", nil
|
return "[" + strings.Join(values, ", ") + "]", nil
|
||||||
}
|
}
|
||||||
return "", fmt.Errorf("unsupported value type %T: %v", v, v)
|
return "", fmt.Errorf("unsupported value type %T: %v", v, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
func getTreeArrayLine(trees []*Tree) (line int) {
|
func getTreeArrayLine(trees []*Tree) (line int) {
|
||||||
// get lowest line number that is not 0
|
// Prevent returning 0 for empty trees
|
||||||
|
line = int(^uint(0) >> 1)
|
||||||
|
// get lowest line number >= 0
|
||||||
for _, tv := range trees {
|
for _, tv := range trees {
|
||||||
if tv.position.Line < line || line == 0 {
|
if tv.position.Line < line || line == 0 {
|
||||||
line = tv.position.Line
|
line = tv.position.Line
|
||||||
@@ -255,10 +317,10 @@ func sortAlphabetical(t *Tree) (vals []sortNode) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool) (int64, error) {
|
func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool) (int64, error) {
|
||||||
return t.writeToOrdered(w, indent, keyspace, bytesCount, arraysOneElementPerLine, OrderAlphabetical)
|
return t.writeToOrdered(w, indent, keyspace, bytesCount, arraysOneElementPerLine, OrderAlphabetical, " ", false, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *Tree) writeToOrdered(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool, ord marshalOrder) (int64, error) {
|
func (t *Tree) writeToOrdered(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool, ord MarshalOrder, indentString string, compactComments, parentCommented bool) (int64, error) {
|
||||||
var orderedVals []sortNode
|
var orderedVals []sortNode
|
||||||
|
|
||||||
switch ord {
|
switch ord {
|
||||||
@@ -274,14 +336,10 @@ func (t *Tree) writeToOrdered(w io.Writer, indent, keyspace string, bytesCount i
|
|||||||
k := node.key
|
k := node.key
|
||||||
v := t.values[k]
|
v := t.values[k]
|
||||||
|
|
||||||
combinedKey := k
|
combinedKey := quoteKeyIfNeeded(k)
|
||||||
if keyspace != "" {
|
if keyspace != "" {
|
||||||
combinedKey = keyspace + "." + combinedKey
|
combinedKey = keyspace + "." + combinedKey
|
||||||
}
|
}
|
||||||
var commented string
|
|
||||||
if t.commented {
|
|
||||||
commented = "# "
|
|
||||||
}
|
|
||||||
|
|
||||||
switch node := v.(type) {
|
switch node := v.(type) {
|
||||||
// node has to be of those two types given how keys are sorted above
|
// node has to be of those two types given how keys are sorted above
|
||||||
@@ -302,24 +360,33 @@ func (t *Tree) writeToOrdered(w io.Writer, indent, keyspace string, bytesCount i
|
|||||||
return bytesCount, errc
|
return bytesCount, errc
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var commented string
|
||||||
|
if parentCommented || t.commented || tv.commented {
|
||||||
|
commented = "# "
|
||||||
|
}
|
||||||
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[", combinedKey, "]\n")
|
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[", combinedKey, "]\n")
|
||||||
bytesCount += int64(writtenBytesCount)
|
bytesCount += int64(writtenBytesCount)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return bytesCount, err
|
return bytesCount, err
|
||||||
}
|
}
|
||||||
bytesCount, err = node.writeToOrdered(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine, ord)
|
bytesCount, err = node.writeToOrdered(w, indent+indentString, combinedKey, bytesCount, arraysOneElementPerLine, ord, indentString, compactComments, parentCommented || t.commented || tv.commented)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return bytesCount, err
|
return bytesCount, err
|
||||||
}
|
}
|
||||||
case []*Tree:
|
case []*Tree:
|
||||||
for _, subTree := range node {
|
for _, subTree := range node {
|
||||||
|
var commented string
|
||||||
|
if parentCommented || t.commented || subTree.commented {
|
||||||
|
commented = "# "
|
||||||
|
}
|
||||||
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[[", combinedKey, "]]\n")
|
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[[", combinedKey, "]]\n")
|
||||||
bytesCount += int64(writtenBytesCount)
|
bytesCount += int64(writtenBytesCount)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return bytesCount, err
|
return bytesCount, err
|
||||||
}
|
}
|
||||||
|
|
||||||
bytesCount, err = subTree.writeToOrdered(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine, ord)
|
bytesCount, err = subTree.writeToOrdered(w, indent+indentString, combinedKey, bytesCount, arraysOneElementPerLine, ord, indentString, compactComments, parentCommented || t.commented || subTree.commented)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return bytesCount, err
|
return bytesCount, err
|
||||||
}
|
}
|
||||||
@@ -332,7 +399,11 @@ func (t *Tree) writeToOrdered(w io.Writer, indent, keyspace string, bytesCount i
|
|||||||
return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
|
return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
|
||||||
}
|
}
|
||||||
|
|
||||||
repr, err := tomlValueStringRepresentation(v, indent, arraysOneElementPerLine)
|
var commented string
|
||||||
|
if parentCommented || t.commented || v.commented {
|
||||||
|
commented = "# "
|
||||||
|
}
|
||||||
|
repr, err := tomlValueStringRepresentation(v, commented, indent, ord, arraysOneElementPerLine)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return bytesCount, err
|
return bytesCount, err
|
||||||
}
|
}
|
||||||
@@ -343,18 +414,22 @@ func (t *Tree) writeToOrdered(w io.Writer, indent, keyspace string, bytesCount i
|
|||||||
if strings.HasPrefix(comment, "#") {
|
if strings.HasPrefix(comment, "#") {
|
||||||
start = ""
|
start = ""
|
||||||
}
|
}
|
||||||
writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment, "\n")
|
if !compactComments {
|
||||||
|
writtenBytesCountComment, errc := writeStrings(w, "\n")
|
||||||
|
bytesCount += int64(writtenBytesCountComment)
|
||||||
|
if errc != nil {
|
||||||
|
return bytesCount, errc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
writtenBytesCountComment, errc := writeStrings(w, indent, start, comment, "\n")
|
||||||
bytesCount += int64(writtenBytesCountComment)
|
bytesCount += int64(writtenBytesCountComment)
|
||||||
if errc != nil {
|
if errc != nil {
|
||||||
return bytesCount, errc
|
return bytesCount, errc
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var commented string
|
quotedKey := quoteKeyIfNeeded(k)
|
||||||
if v.commented {
|
writtenBytesCount, err := writeStrings(w, indent, commented, quotedKey, " = ", repr, "\n")
|
||||||
commented = "# "
|
|
||||||
}
|
|
||||||
writtenBytesCount, err := writeStrings(w, indent, commented, k, " = ", repr, "\n")
|
|
||||||
bytesCount += int64(writtenBytesCount)
|
bytesCount += int64(writtenBytesCount)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return bytesCount, err
|
return bytesCount, err
|
||||||
@@ -365,6 +440,32 @@ func (t *Tree) writeToOrdered(w io.Writer, indent, keyspace string, bytesCount i
|
|||||||
return bytesCount, nil
|
return bytesCount, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// quote a key if it does not fit the bare key format (A-Za-z0-9_-)
|
||||||
|
// quoted keys use the same rules as strings
|
||||||
|
func quoteKeyIfNeeded(k string) string {
|
||||||
|
// when encoding a map with the 'quoteMapKeys' option enabled, the tree will contain
|
||||||
|
// keys that have already been quoted.
|
||||||
|
// not an ideal situation, but good enough of a stop gap.
|
||||||
|
if len(k) >= 2 && k[0] == '"' && k[len(k)-1] == '"' {
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
isBare := true
|
||||||
|
for _, r := range k {
|
||||||
|
if !isValidBareChar(r) {
|
||||||
|
isBare = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isBare {
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
return quoteKey(k)
|
||||||
|
}
|
||||||
|
|
||||||
|
func quoteKey(k string) string {
|
||||||
|
return "\"" + encodeTomlString(k) + "\""
|
||||||
|
}
|
||||||
|
|
||||||
func writeStrings(w io.Writer, s ...string) (int, error) {
|
func writeStrings(w io.Writer, s ...string) (int, error) {
|
||||||
var n int
|
var n int
|
||||||
for i := range s {
|
for i := range s {
|
||||||
@@ -387,12 +488,11 @@ func (t *Tree) WriteTo(w io.Writer) (int64, error) {
|
|||||||
// Output spans multiple lines, and is suitable for ingest by a TOML parser.
|
// Output spans multiple lines, and is suitable for ingest by a TOML parser.
|
||||||
// If the conversion cannot be performed, ToString returns a non-nil error.
|
// If the conversion cannot be performed, ToString returns a non-nil error.
|
||||||
func (t *Tree) ToTomlString() (string, error) {
|
func (t *Tree) ToTomlString() (string, error) {
|
||||||
var buf bytes.Buffer
|
b, err := t.Marshal()
|
||||||
_, err := t.WriteTo(&buf)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return buf.String(), nil
|
return string(b), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// String generates a human-readable representation of the current tree.
|
// String generates a human-readable representation of the current tree.
|
||||||
@@ -427,8 +527,26 @@ func (t *Tree) ToMap() map[string]interface{} {
|
|||||||
case *Tree:
|
case *Tree:
|
||||||
result[k] = node.ToMap()
|
result[k] = node.ToMap()
|
||||||
case *tomlValue:
|
case *tomlValue:
|
||||||
result[k] = node.value
|
result[k] = tomlValueToGo(node.value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func tomlValueToGo(v interface{}) interface{} {
|
||||||
|
if tree, ok := v.(*Tree); ok {
|
||||||
|
return tree.ToMap()
|
||||||
|
}
|
||||||
|
|
||||||
|
rv := reflect.ValueOf(v)
|
||||||
|
|
||||||
|
if rv.Kind() != reflect.Slice {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
values := make([]interface{}, rv.Len())
|
||||||
|
for i := 0; i < rv.Len(); i++ {
|
||||||
|
item := rv.Index(i).Interface()
|
||||||
|
values[i] = tomlValueToGo(item)
|
||||||
|
}
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|||||||
@@ -236,6 +236,7 @@ func TestTreeWriteToMapExampleFile(t *testing.T) {
|
|||||||
[]interface{}{"gamma", "delta"},
|
[]interface{}{"gamma", "delta"},
|
||||||
[]interface{}{int64(1), int64(2)},
|
[]interface{}{int64(1), int64(2)},
|
||||||
},
|
},
|
||||||
|
"score": 4e-08,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
testMaps(t, tree.ToMap(), expected)
|
testMaps(t, tree.ToMap(), expected)
|
||||||
@@ -294,6 +295,42 @@ func TestTreeWriteToMapWithArrayOfInlineTables(t *testing.T) {
|
|||||||
testMaps(t, treeMap, expected)
|
testMaps(t, treeMap, expected)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTreeWriteToMapWithTableInMixedArray(t *testing.T) {
|
||||||
|
tree, _ := Load(`a = [
|
||||||
|
"foo",
|
||||||
|
[
|
||||||
|
"bar",
|
||||||
|
{baz = "quux"},
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{a = "b"},
|
||||||
|
{c = "d"},
|
||||||
|
],
|
||||||
|
]`)
|
||||||
|
expected := map[string]interface{}{
|
||||||
|
"a": []interface{}{
|
||||||
|
"foo",
|
||||||
|
[]interface{}{
|
||||||
|
"bar",
|
||||||
|
map[string]interface{}{
|
||||||
|
"baz": "quux",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
[]interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"a": "b",
|
||||||
|
},
|
||||||
|
map[string]interface{}{
|
||||||
|
"c": "d",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
treeMap := tree.ToMap()
|
||||||
|
|
||||||
|
testMaps(t, treeMap, expected)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTreeWriteToFloat(t *testing.T) {
|
func TestTreeWriteToFloat(t *testing.T) {
|
||||||
tree, err := Load(`a = 3.0`)
|
tree, err := Load(`a = 3.0`)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -327,6 +364,79 @@ c = nan`
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestOrderedEmptyTrees(t *testing.T) {
|
||||||
|
type val struct {
|
||||||
|
Key string `toml:"key"`
|
||||||
|
}
|
||||||
|
type structure struct {
|
||||||
|
First val `toml:"first"`
|
||||||
|
Empty []val `toml:"empty"`
|
||||||
|
}
|
||||||
|
input := structure{First: val{Key: "value"}}
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
err := NewEncoder(buf).Order(OrderPreserve).Encode(input)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("failed to encode input")
|
||||||
|
}
|
||||||
|
expected := `
|
||||||
|
[first]
|
||||||
|
key = "value"
|
||||||
|
`
|
||||||
|
if expected != buf.String() {
|
||||||
|
t.Fatal("expected and encoded body aren't equal: ", expected, buf.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOrderedNonIncreasedLine(t *testing.T) {
|
||||||
|
type NiceMap map[string]string
|
||||||
|
type Manifest struct {
|
||||||
|
NiceMap `toml:"dependencies"`
|
||||||
|
Build struct {
|
||||||
|
BuildCommand string `toml:"build-command"`
|
||||||
|
} `toml:"build"`
|
||||||
|
}
|
||||||
|
|
||||||
|
test := &Manifest{}
|
||||||
|
test.Build.BuildCommand = "test"
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
if err := NewEncoder(buf).Order(OrderPreserve).Encode(test); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
expected := `
|
||||||
|
[dependencies]
|
||||||
|
|
||||||
|
[build]
|
||||||
|
build-command = "test"
|
||||||
|
`
|
||||||
|
if expected != buf.String() {
|
||||||
|
t.Fatal("expected and encoded body aren't equal: ", expected, buf.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue290(t *testing.T) {
|
||||||
|
tomlString :=
|
||||||
|
`[table]
|
||||||
|
"127.0.0.1" = "value"
|
||||||
|
"127.0.0.1:8028" = "value"
|
||||||
|
"character encoding" = "value"
|
||||||
|
"ʎǝʞ" = "value"`
|
||||||
|
|
||||||
|
t1, err := Load(tomlString)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("load err:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
s, err := t1.ToTomlString()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("ToTomlString err:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = Load(s)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("reload err:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func BenchmarkTreeToTomlString(b *testing.B) {
|
func BenchmarkTreeToTomlString(b *testing.B) {
|
||||||
toml, err := Load(sampleHard)
|
toml, err := Load(sampleHard)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
package toml
|
||||||
|
|
||||||
|
// ValueStringRepresentation transforms an interface{} value into its toml string representation.
|
||||||
|
func ValueStringRepresentation(v interface{}, commented string, indent string, ord MarshalOrder, arraysOneElementPerLine bool) (string, error) {
|
||||||
|
return tomlValueStringRepresentation(v, commented, indent, ord, arraysOneElementPerLine)
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user