Compare commits

..

2 Commits

Author SHA1 Message Date
Thomas Pelletier 8f6d0d8be7 Specialize map[string]iface when unmarshaling kvs
name                               old time/op    new time/op    delta
UnmarshalDataset/config-8            12.3ms ± 0%    10.9ms ± 0%  -11.36%  (p=0.008 n=5+5)
UnmarshalDataset/canada-8            55.2ms ± 0%    55.4ms ± 1%     ~     (p=0.690 n=5+5)
UnmarshalDataset/citm_catalog-8      16.5ms ± 1%    15.7ms ± 0%   -4.43%  (p=0.008 n=5+5)
UnmarshalDataset/twitter-8           7.01ms ± 1%    6.63ms ± 0%   -5.34%  (p=0.008 n=5+5)
UnmarshalDataset/code-8              52.0ms ± 0%    48.7ms ± 2%   -6.43%  (p=0.008 n=5+5)
UnmarshalDataset/example-8            119µs ± 0%     110µs ± 3%   -6.81%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/struct-8     432ns ± 1%     431ns ± 1%     ~     (p=0.222 n=5+5)
Unmarshal/SimpleDocument/map-8        573ns ± 1%     540ns ± 1%   -5.64%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/struct-8     33.7µs ± 0%    33.6µs ± 0%     ~     (p=0.310 n=5+5)
Unmarshal/ReferenceFile/map-8        44.2µs ± 1%    41.7µs ± 1%   -5.63%  (p=0.008 n=5+5)
Unmarshal/HugoFrontMatter-8          7.28µs ± 1%    6.74µs ± 1%   -7.42%  (p=0.008 n=5+5)
[Geo mean]                            292µs          277µs        -4.91%

name                               old speed      new speed      delta
UnmarshalDataset/config-8          85.2MB/s ± 0%  96.1MB/s ± 0%  +12.82%  (p=0.008 n=5+5)
UnmarshalDataset/canada-8          39.9MB/s ± 0%  39.8MB/s ± 1%     ~     (p=0.690 n=5+5)
UnmarshalDataset/citm_catalog-8    33.9MB/s ± 1%  35.5MB/s ± 0%   +4.64%  (p=0.008 n=5+5)
UnmarshalDataset/twitter-8         63.1MB/s ± 1%  66.6MB/s ± 0%   +5.65%  (p=0.008 n=5+5)
UnmarshalDataset/code-8            51.6MB/s ± 0%  55.1MB/s ± 2%   +6.88%  (p=0.008 n=5+5)
UnmarshalDataset/example-8         68.3MB/s ± 0%  73.4MB/s ± 3%   +7.34%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/struct-8  25.4MB/s ± 1%  25.5MB/s ± 1%     ~     (p=0.246 n=5+5)
Unmarshal/SimpleDocument/map-8     19.2MB/s ± 1%  20.4MB/s ± 1%   +5.99%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/struct-8    156MB/s ± 0%   156MB/s ± 0%     ~     (p=0.310 n=5+5)
Unmarshal/ReferenceFile/map-8       119MB/s ± 1%   126MB/s ± 1%   +5.97%  (p=0.008 n=5+5)
Unmarshal/HugoFrontMatter-8        75.0MB/s ± 1%  81.0MB/s ± 1%   +8.01%  (p=0.008 n=5+5)
[Geo mean]                         56.1MB/s       59.0MB/s        +5.17%

name                               old alloc/op   new alloc/op   delta
UnmarshalDataset/config-8            5.26MB ± 0%    4.75MB ± 0%   -9.66%  (p=0.008 n=5+5)
UnmarshalDataset/canada-8            83.0MB ± 0%    83.0MB ± 0%   -0.00%  (p=0.008 n=5+5)
UnmarshalDataset/citm_catalog-8      34.7MB ± 0%    34.3MB ± 0%   -1.13%  (p=0.008 n=5+5)
UnmarshalDataset/twitter-8           12.7MB ± 0%    12.5MB ± 0%   -1.44%  (p=0.008 n=5+5)
UnmarshalDataset/code-8              15.3MB ± 0%    13.9MB ± 0%   -9.27%  (p=0.008 n=5+5)
UnmarshalDataset/example-8            186kB ± 0%     182kB ± 0%   -2.20%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/struct-8      805B ± 0%      805B ± 0%     ~     (all equal)
Unmarshal/SimpleDocument/map-8       1.13kB ± 0%    1.12kB ± 0%   -1.41%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/struct-8     20.9kB ± 0%    20.9kB ± 0%     ~     (all equal)
Unmarshal/ReferenceFile/map-8        36.4kB ± 0%    35.4kB ± 0%     ~     (p=0.079 n=4+5)
Unmarshal/HugoFrontMatter-8          7.20kB ± 0%    6.98kB ± 0%   -3.11%  (p=0.008 n=5+5)
[Geo mean]                            312kB          303kB        -2.86%

name                               old allocs/op  new allocs/op  delta
UnmarshalDataset/config-8              189k ± 0%      157k ± 0%  -16.80%  (p=0.029 n=4+4)
UnmarshalDataset/canada-8              782k ± 0%      782k ± 0%   -0.00%  (p=0.008 n=5+5)
UnmarshalDataset/citm_catalog-8        191k ± 0%      167k ± 0%  -12.75%  (p=0.000 n=4+5)
UnmarshalDataset/twitter-8            56.9k ± 0%     45.5k ± 0%  -20.02%  (p=0.016 n=5+4)
UnmarshalDataset/code-8                626k ± 0%      537k ± 0%  -14.22%  (p=0.008 n=5+5)
UnmarshalDataset/example-8            1.36k ± 0%     1.11k ± 0%  -18.53%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/struct-8      9.00 ± 0%      9.00 ± 0%     ~     (all equal)
Unmarshal/SimpleDocument/map-8         13.0 ± 0%      12.0 ± 0%   -7.69%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/struct-8        183 ± 0%       183 ± 0%     ~     (all equal)
Unmarshal/ReferenceFile/map-8           526 ± 0%       466 ± 0%  -11.41%  (p=0.008 n=5+5)
Unmarshal/HugoFrontMatter-8             126 ± 0%       112 ± 0%  -11.11%  (p=0.008 n=5+5)
[Geo mean]                            3.73k          3.34k       -10.51%
2022-04-15 17:41:18 -04:00
Thomas Pelletier 3c4b709fed Specialize navigating map[string]interface{}
This is a common type. Specializing it to reduce the use of reflection
yields better performance.

Similar to https://github.com/pelletier/go-toml/pull/669, there is a lot
to explore there.

name                               old time/op    new time/op    delta
UnmarshalDataset/config-8            13.3ms ± 0%    12.3ms ± 0%   -7.45%  (p=0.008 n=5+5)
UnmarshalDataset/canada-8            55.7ms ± 0%    55.2ms ± 0%   -0.88%  (p=0.008 n=5+5)
UnmarshalDataset/citm_catalog-8      16.6ms ± 1%    16.5ms ± 1%   -0.97%  (p=0.008 n=5+5)
UnmarshalDataset/twitter-8           7.10ms ± 1%    7.01ms ± 1%   -1.28%  (p=0.016 n=5+5)
UnmarshalDataset/code-8              63.8ms ± 0%    52.0ms ± 0%  -18.45%  (p=0.008 n=5+5)
UnmarshalDataset/example-8            121µs ± 0%     119µs ± 0%   -2.13%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/struct-8     438ns ± 1%     432ns ± 1%   -1.40%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/map-8        594ns ± 2%     573ns ± 1%   -3.56%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/struct-8     34.3µs ± 1%    33.7µs ± 0%   -1.95%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/map-8        48.6µs ± 0%    44.2µs ± 1%   -9.22%  (p=0.008 n=5+5)
Unmarshal/HugoFrontMatter-8          7.88µs ± 1%    7.28µs ± 1%   -7.66%  (p=0.008 n=5+5)

name                               old speed      new speed      delta
UnmarshalDataset/config-8          78.9MB/s ± 0%  85.2MB/s ± 0%   +8.05%  (p=0.008 n=5+5)
UnmarshalDataset/canada-8          39.5MB/s ± 0%  39.9MB/s ± 0%   +0.89%  (p=0.008 n=5+5)
UnmarshalDataset/citm_catalog-8    33.6MB/s ± 1%  33.9MB/s ± 1%   +0.98%  (p=0.008 n=5+5)
UnmarshalDataset/twitter-8         62.3MB/s ± 1%  63.1MB/s ± 1%   +1.30%  (p=0.016 n=5+5)
UnmarshalDataset/code-8            42.1MB/s ± 0%  51.6MB/s ± 0%  +22.62%  (p=0.008 n=5+5)
UnmarshalDataset/example-8         66.9MB/s ± 0%  68.3MB/s ± 0%   +2.18%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/struct-8  25.1MB/s ± 1%  25.4MB/s ± 1%   +1.43%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/map-8     18.5MB/s ± 2%  19.2MB/s ± 1%   +3.70%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/struct-8    153MB/s ± 1%   156MB/s ± 0%   +1.99%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/map-8       108MB/s ± 0%   119MB/s ± 1%  +10.16%  (p=0.008 n=5+5)
Unmarshal/HugoFrontMatter-8        69.3MB/s ± 1%  75.0MB/s ± 1%   +8.30%  (p=0.008 n=5+5)

name                               old alloc/op   new alloc/op   delta
UnmarshalDataset/config-8            5.86MB ± 0%    5.26MB ± 0%  -10.36%  (p=0.008 n=5+5)
UnmarshalDataset/canada-8            83.0MB ± 0%    83.0MB ± 0%   -0.00%  (p=0.008 n=5+5)
UnmarshalDataset/citm_catalog-8      34.7MB ± 0%    34.7MB ± 0%   -0.04%  (p=0.008 n=5+5)
UnmarshalDataset/twitter-8           12.7MB ± 0%    12.7MB ± 0%     ~     (p=0.548 n=5+5)
UnmarshalDataset/code-8              22.2MB ± 0%    15.3MB ± 0%  -30.76%  (p=0.008 n=5+5)
UnmarshalDataset/example-8            186kB ± 0%     186kB ± 0%   -0.04%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/struct-8      805B ± 0%      805B ± 0%     ~     (all equal)
Unmarshal/SimpleDocument/map-8       1.13kB ± 0%    1.13kB ± 0%     ~     (all equal)
Unmarshal/ReferenceFile/struct-8     20.9kB ± 0%    20.9kB ± 0%     ~     (all equal)
Unmarshal/ReferenceFile/map-8        38.2kB ± 0%    36.4kB ± 0%   -4.86%  (p=0.029 n=4+4)
Unmarshal/HugoFrontMatter-8          7.44kB ± 0%    7.20kB ± 0%   -3.23%  (p=0.008 n=5+5)

name                               old allocs/op  new allocs/op  delta
UnmarshalDataset/config-8              227k ± 0%      189k ± 0%  -16.74%  (p=0.029 n=4+4)
UnmarshalDataset/canada-8              782k ± 0%      782k ± 0%   -0.00%  (p=0.008 n=5+5)
UnmarshalDataset/citm_catalog-8        192k ± 0%      191k ± 0%   -0.49%  (p=0.000 n=5+4)
UnmarshalDataset/twitter-8            56.9k ± 0%     56.9k ± 0%   -0.00%  (p=0.032 n=5+5)
UnmarshalDataset/code-8               1.05M ± 0%     0.63M ± 0%  -40.52%  (p=0.008 n=5+5)
UnmarshalDataset/example-8            1.36k ± 0%     1.36k ± 0%   -0.15%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/struct-8      9.00 ± 0%      9.00 ± 0%     ~     (all equal)
Unmarshal/SimpleDocument/map-8         13.0 ± 0%      13.0 ± 0%     ~     (all equal)
Unmarshal/ReferenceFile/struct-8        183 ± 0%       183 ± 0%     ~     (all equal)
Unmarshal/ReferenceFile/map-8           642 ± 0%       526 ± 0%  -18.07%  (p=0.008 n=5+5)
Unmarshal/HugoFrontMatter-8             141 ± 0%       126 ± 0%  -10.64%  (p=0.008 n=5+5)
2022-04-15 17:20:24 -04:00
27 changed files with 370 additions and 733 deletions
+4 -4
View File
@@ -35,11 +35,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v1
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -50,7 +50,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
uses: github/codeql-action/autobuild@v1
# ️ Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@@ -64,4 +64,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v1
+2 -2
View File
@@ -9,12 +9,12 @@ jobs:
runs-on: "ubuntu-latest"
name: report
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@master
with:
fetch-depth: 0
- name: Setup go
uses: actions/setup-go@master
with:
go-version: 1.19
go-version: 1.18
- name: Run tests with coverage
run: ./ci.sh coverage -d "${GITHUB_BASE_REF-HEAD}"
+4 -4
View File
@@ -16,21 +16,21 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: 1.19
go-version: 1.18
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v3
uses: goreleaser/goreleaser-action@v2
with:
distribution: goreleaser
version: latest
+2 -2
View File
@@ -12,11 +12,11 @@ jobs:
strategy:
matrix:
os: [ 'ubuntu-latest', 'windows-latest', 'macos-latest']
go: [ '1.18', '1.19' ]
go: [ '1.17', '1.18' ]
runs-on: ${{ matrix.os }}
name: ${{ matrix.go }}/${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@master
with:
fetch-depth: 0
- name: Setup go ${{ matrix.go }}
-12
View File
@@ -16,11 +16,7 @@ builds:
mod_timestamp: '{{ .CommitTimestamp }}'
targets:
- linux_amd64
- linux_arm64
- linux_arm
- windows_amd64
- windows_arm64
- windows_arm
- darwin_amd64
- darwin_arm64
- id: tomljson
@@ -35,11 +31,7 @@ builds:
mod_timestamp: '{{ .CommitTimestamp }}'
targets:
- linux_amd64
- linux_arm64
- linux_arm
- windows_amd64
- windows_arm64
- windows_arm
- darwin_amd64
- darwin_arm64
- id: jsontoml
@@ -54,11 +46,7 @@ builds:
mod_timestamp: '{{ .CommitTimestamp }}'
targets:
- linux_amd64
- linux_arm64
- linux_arm
- windows_amd64
- windows_arm64
- windows_arm
- darwin_amd64
- darwin_arm64
universal_binaries:
+1 -1
View File
@@ -1,6 +1,6 @@
The MIT License (MIT)
Copyright (c) 2013 - 2022 Thomas Pelletier, Eric Anderton
Copyright (c) 2013 - 2021 Thomas Pelletier, Eric Anderton
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
+12 -8
View File
@@ -4,6 +4,17 @@ Go library for the [TOML](https://toml.io/en/) format.
This library supports [TOML v1.0.0](https://toml.io/en/v1.0.0).
## Development status
This is the upcoming major version of go-toml. It is currently in active
development. As of release v2.0.0-beta.1, the library has reached feature parity
with v1, and fixes a lot known bugs and performance issues along the way.
If you do not need the advanced document editing features of v1, you are
encouraged to try out this version.
[👉 Roadmap for v2](https://github.com/pelletier/go-toml/discussions/506)
[🐞 Bug Reports](https://github.com/pelletier/go-toml/issues)
[💬 Anything else](https://github.com/pelletier/go-toml/discussions)
@@ -38,7 +49,7 @@ operations should not be shockingly slow. See [benchmarks](#benchmarks).
### Strict mode
`Decoder` can be set to "strict mode", which makes it error when some parts of
the TOML document was not present in the target structure. This is a great way
the TOML document was not prevent in the target structure. This is a great way
to check for typos. [See example in the documentation][strict].
[strict]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#example-Decoder.DisallowUnknownFields
@@ -540,13 +551,6 @@ complete solutions exist out there.
[query]: https://github.com/pelletier/go-toml/tree/f99d6bbca119636aeafcf351ee52b3d202782627/query
[dasel]: https://github.com/TomWright/dasel
## Versioning
Go-toml follows [Semantic Versioning](http://semver.org/). The supported version
of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of
this document. The last two major versions of Go are supported
(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)).
## License
The MIT License (MIT). Read [LICENSE](LICENSE).
+5 -5
View File
@@ -1,20 +1,20 @@
// Package jsontoml is a program that converts JSON to TOML.
//
// # Usage
// Usage
//
// Reading from stdin:
//
// cat file.json | jsontoml > file.toml
// cat file.json | jsontoml > file.toml
//
// Reading from a file:
//
// jsontoml file.json > file.toml
// jsontoml file.json > file.toml
//
// # Installation
// Installation
//
// Using Go:
//
// go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest
// go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest
package main
import (
+1
View File
@@ -26,6 +26,7 @@ func TestConvert(t *testing.T) {
}`,
expected: `[mytoml]
a = 42.0
`,
},
{
+5 -5
View File
@@ -1,20 +1,20 @@
// Package tomljson is a program that converts TOML to JSON.
//
// # Usage
// Usage
//
// Reading from stdin:
//
// cat file.toml | tomljson > file.json
// cat file.toml | tomljson > file.json
//
// Reading from a file:
//
// tomljson file.toml > file.json
// tomljson file.toml > file.json
//
// # Installation
// Installation
//
// Using Go:
//
// go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest
// go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest
package main
import (
+5 -5
View File
@@ -1,20 +1,20 @@
// Package tomll is a linter program for TOML.
//
// # Usage
// Usage
//
// Reading from stdin, writing to stdout:
//
// cat file.toml | tomll
// cat file.toml | tomll
//
// Reading and updating a list of files in place:
//
// tomll a.toml b.toml c.toml
// tomll a.toml b.toml c.toml
//
// # Installation
// Installation
//
// Using Go:
//
// go install github.com/pelletier/go-toml/v2/cmd/tomll@latest
// go install github.com/pelletier/go-toml/v2/cmd/tomll@latest
package main
import (
+1
View File
@@ -23,6 +23,7 @@ mytoml.a = 42.0
`,
expected: `[mytoml]
a = 42.0
`,
},
{
+1 -1
View File
@@ -3,7 +3,7 @@
//
// Within the go-toml package, run `go generate`. Otherwise, use:
//
// go run github.com/pelletier/go-toml/cmd/tomltestgen -o toml_testgen_test.go
// go run github.com/pelletier/go-toml/cmd/tomltestgen -o toml_testgen_test.go
package main
import (
-1
View File
@@ -103,7 +103,6 @@ func (e *DecodeError) Key() Key {
//
// The function copies all bytes used in DecodeError, so that document and
// highlight can be freely deallocated.
//
//nolint:funlen
func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
offset := danger.SubsliceOffset(document, de.highlight)
+2 -2
View File
@@ -1,5 +1,5 @@
//go:build go1.18 || go1.19
// +build go1.18 go1.19
//go:build go1.18
// +build go1.18
package toml_test
+1 -1
View File
@@ -2,4 +2,4 @@ module github.com/pelletier/go-toml/v2
go 1.16
require github.com/stretchr/testify v1.8.0
require github.com/stretchr/testify v1.7.1
+3 -7
View File
@@ -1,15 +1,11 @@
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+4 -4
View File
@@ -11,10 +11,10 @@ import (
//
// For example:
//
// it := n.Children()
// for it.Next() {
// it.Node()
// }
// it := n.Children()
// for it.Next() {
// it.Node()
// }
type Iterator struct {
started bool
node *Node
@@ -67,7 +67,6 @@ func TestDocMarshal(t *testing.T) {
}
marshalTestToml := `title = 'TOML Marshal Testing'
[basic_lists]
floats = [12.3, 45.6, 78.9]
bools = [true, false, true]
@@ -90,6 +89,7 @@ name = 'Second'
[subdoc.first]
name = 'First'
[basic]
uint = 5001
bool = true
@@ -101,9 +101,9 @@ date = 1979-05-27T07:32:00Z
[[subdoclist]]
name = 'List.First'
[[subdoclist]]
name = 'List.Second'
`
result, err := toml.Marshal(docData)
@@ -117,15 +117,14 @@ func TestBasicMarshalQuotedKey(t *testing.T) {
expected := `'Z.string-àéù' = 'Hello'
'Yfloat-𝟘' = 3.5
['Xsubdoc-àéù']
String2 = 'One'
[['W.sublist-𝟘']]
String2 = 'Two'
[['W.sublist-𝟘']]
String2 = 'Three'
`
require.Equal(t, string(expected), string(result))
@@ -160,8 +159,8 @@ bool = false
int = 0
string = ''
stringlist = []
[map]
`
require.Equal(t, string(expected), string(result))
@@ -151,7 +151,6 @@ type quotedKeyMarshalTestStruct struct {
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var quotedKeyMarshalTestData = quotedKeyMarshalTestStruct{
String: "Hello",
@@ -161,7 +160,6 @@ var quotedKeyMarshalTestData = quotedKeyMarshalTestStruct{
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var quotedKeyMarshalTestToml = []byte(`"Yfloat-𝟘" = 3.5
"Z.string-àéù" = "Hello"
@@ -274,7 +272,6 @@ var docData = testDoc{
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var mapTestDoc = testMapDoc{
Title: "TOML Marshal Testing",
@@ -562,12 +559,10 @@ func (c customMarshaler) MarshalTOML() ([]byte, error) {
var customMarshalerData = customMarshaler{FirstName: "Sally", LastName: "Fields"}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var customMarshalerToml = []byte(`Sally Fields`)
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var nestedCustomMarshalerData = customMarshalerParent{
Self: customMarshaler{FirstName: "Maiku", LastName: "Suteda"},
@@ -575,7 +570,6 @@ var nestedCustomMarshalerData = customMarshalerParent{
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var nestedCustomMarshalerToml = []byte(`friends = ["Sally Fields"]
me = "Maiku Suteda"
@@ -617,7 +611,6 @@ func TestUnmarshalTextMarshaler(t *testing.T) {
}
// TODO: Remove nolint once type and methods are used by a test
//
//nolint:unused
type precedentMarshaler struct {
FirstName string
@@ -636,7 +629,6 @@ func (m precedentMarshaler) MarshalTOML() ([]byte, error) {
}
// TODO: Remove nolint once type and method are used by a test
//
//nolint:unused
type customPointerMarshaler struct {
FirstName string
@@ -649,7 +641,6 @@ func (m *customPointerMarshaler) MarshalTOML() ([]byte, error) {
}
// TODO: Remove nolint once type and method are used by a test
//
//nolint:unused
type textPointerMarshaler struct {
FirstName string
@@ -662,7 +653,6 @@ func (m *textPointerMarshaler) MarshalText() ([]byte, error) {
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var commentTestToml = []byte(`
# it's a comment on type
@@ -700,7 +690,6 @@ type mapsTestStruct struct {
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var mapsTestData = mapsTestStruct{
Simple: map[string]string{
@@ -724,7 +713,6 @@ var mapsTestData = mapsTestStruct{
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var mapsTestToml = []byte(`
[Other]
@@ -747,7 +735,6 @@ var mapsTestToml = []byte(`
`)
// TODO: Remove nolint once type is used by a test
//
//nolint:deadcode,unused
type structArrayNoTag struct {
A struct {
@@ -757,7 +744,6 @@ type structArrayNoTag struct {
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var customTagTestToml = []byte(`
[postgres]
@@ -772,7 +758,6 @@ var customTagTestToml = []byte(`
`)
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var customCommentTagTestToml = []byte(`
# db connection
@@ -786,7 +771,6 @@ var customCommentTagTestToml = []byte(`
`)
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var customCommentedTagTestToml = []byte(`
[postgres]
@@ -841,7 +825,6 @@ func TestUnmarshalTabInStringAndQuotedKey(t *testing.T) {
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var customMultilineTagTestToml = []byte(`int_slice = [
1,
@@ -851,7 +834,6 @@ var customMultilineTagTestToml = []byte(`int_slice = [
`)
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var testDocBasicToml = []byte(`
[document]
@@ -864,14 +846,12 @@ var testDocBasicToml = []byte(`
`)
// TODO: Remove nolint once type is used by a test
//
//nolint:deadcode
type testDocCustomTag struct {
Doc testDocBasicsCustomTag `file:"document"`
}
// TODO: Remove nolint once type is used by a test
//
//nolint:deadcode
type testDocBasicsCustomTag struct {
Bool bool `file:"bool_val"`
@@ -884,7 +864,6 @@ type testDocBasicsCustomTag struct {
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,varcheck
var testDocCustomTagData = testDocCustomTag{
Doc: testDocBasicsCustomTag{
@@ -987,7 +966,6 @@ func TestUnmarshalInvalidPointerKind(t *testing.T) {
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused
type testDuration struct {
Nanosec time.Duration `toml:"nanosec"`
@@ -1002,7 +980,6 @@ type testDuration struct {
}
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var testDurationToml = []byte(`
nanosec = "1ns"
@@ -1017,7 +994,6 @@ a_string = "15s"
`)
// TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck
var testDurationToml2 = []byte(`a_string = "15s"
hour = "1h0m0s"
@@ -1031,7 +1007,6 @@ sec = "1s"
`)
// TODO: Remove nolint once type is used by a test
//
//nolint:deadcode,unused
type testBadDuration struct {
Val time.Duration `toml:"val"`
+1 -2
View File
@@ -11,6 +11,5 @@ func TestEntrySize(t *testing.T) {
// Validate no regression on the size of entry{}. This is a critical bit for
// performance of unmarshaling documents. Should only be increased with care
// and a very good reason.
maxExpectedEntrySize := 48
require.LessOrEqual(t, int(unsafe.Sizeof(entry{})), maxExpectedEntrySize)
require.LessOrEqual(t, 48, int(unsafe.Sizeof(entry{})))
}
+16 -106
View File
@@ -54,7 +54,7 @@ func NewEncoder(w io.Writer) *Encoder {
// This behavior can be controlled on an individual struct field basis with the
// inline tag:
//
// MyField `toml:",inline"`
// MyField `inline:"true"`
func (enc *Encoder) SetTablesInline(inline bool) *Encoder {
enc.tablesInline = inline
return enc
@@ -65,7 +65,7 @@ func (enc *Encoder) SetTablesInline(inline bool) *Encoder {
//
// This behavior can be controlled on an individual struct field basis with the multiline tag:
//
// MyField `multiline:"true"`
// MyField `multiline:"true"`
func (enc *Encoder) SetArraysMultiline(multiline bool) *Encoder {
enc.arraysMultiline = multiline
return enc
@@ -89,7 +89,7 @@ func (enc *Encoder) SetIndentTables(indent bool) *Encoder {
//
// If v cannot be represented to TOML it returns an error.
//
// # Encoding rules
// Encoding rules
//
// A top level slice containing only maps or structs is encoded as [[table
// array]].
@@ -107,30 +107,10 @@ func (enc *Encoder) SetIndentTables(indent bool) *Encoder {
// a newline character or a single quote. In that case they are emitted as
// quoted strings.
//
// Unsigned integers larger than math.MaxInt64 cannot be encoded. Doing so
// results in an error. This rule exists because the TOML specification only
// requires parsers to support at least the 64 bits integer range. Allowing
// larger numbers would create non-standard TOML documents, which may not be
// readable (at best) by other implementations. To encode such numbers, a
// solution is a custom type that implements encoding.TextMarshaler.
//
// When encoding structs, fields are encoded in order of definition, with their
// exact name.
//
// Tables and array tables are separated by empty lines. However, consecutive
// subtables definitions are not. For example:
//
// [top1]
//
// [top2]
// [top2.child1]
//
// [[array]]
//
// [[array]]
// [array.child2]
//
// # Struct tags
// Struct tags
//
// The encoding of each public struct field can be customized by the format
// string in the "toml" key of the struct field's tag. This follows
@@ -148,8 +128,7 @@ func (enc *Encoder) SetIndentTables(indent bool) *Encoder {
//
// In addition to the "toml" tag struct tag, a "comment" tag can be used to emit
// a TOML comment before the value being annotated. Comments are ignored inside
// inline tables. For array tables, the comment is only present before the first
// element of the array.
// inline tables.
func (enc *Encoder) Encode(v interface{}) error {
var (
b []byte
@@ -323,11 +302,7 @@ func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, e
b = append(b, "false"...)
}
case reflect.Uint64, reflect.Uint32, reflect.Uint16, reflect.Uint8, reflect.Uint:
x := v.Uint()
if x > uint64(math.MaxInt64) {
return nil, fmt.Errorf("toml: not encoding uint (%d) greater than max int64 (%d)", x, int64(math.MaxInt64))
}
b = strconv.AppendUint(b, x, 10)
b = strconv.AppendUint(b, v.Uint(), 10)
case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int:
b = strconv.AppendInt(b, v.Int(), 10)
default:
@@ -346,13 +321,13 @@ func isNil(v reflect.Value) bool {
}
}
func shouldOmitEmpty(options valueOptions, v reflect.Value) bool {
return options.omitempty && isEmptyValue(v)
}
func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v reflect.Value) ([]byte, error) {
var err error
if (ctx.options.omitempty || options.omitempty) && isEmptyValue(v) {
return b, nil
}
if !ctx.inline {
b = enc.encodeComment(ctx.indent, options.comment, b)
}
@@ -378,8 +353,6 @@ func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v r
func isEmptyValue(v reflect.Value) bool {
switch v.Kind() {
case reflect.Struct:
return isEmptyStruct(v)
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
return v.Len() == 0
case reflect.Bool:
@@ -396,34 +369,6 @@ func isEmptyValue(v reflect.Value) bool {
return false
}
func isEmptyStruct(v reflect.Value) bool {
// TODO: merge with walkStruct and cache.
typ := v.Type()
for i := 0; i < typ.NumField(); i++ {
fieldType := typ.Field(i)
// only consider exported fields
if fieldType.PkgPath != "" {
continue
}
tag := fieldType.Tag.Get("toml")
// special field name to skip field
if tag == "-" {
continue
}
f := v.Field(i)
if !isEmptyValue(f) {
return false
}
}
return true
}
const literalQuote = '\''
func (enc *Encoder) encodeString(b []byte, v string, options valueOptions) []byte {
@@ -453,6 +398,7 @@ func (enc *Encoder) encodeLiteralString(b []byte, v string) []byte {
return b
}
//nolint:cyclop
func (enc *Encoder) encodeQuotedString(multiline bool, b []byte, v string) []byte {
stringQuote := `"`
@@ -706,19 +652,10 @@ func (enc *Encoder) encodeStruct(b []byte, ctx encoderCtx, v reflect.Value) ([]b
}
func (enc *Encoder) encodeComment(indent int, comment string, b []byte) []byte {
for len(comment) > 0 {
var line string
idx := strings.IndexByte(comment, '\n')
if idx >= 0 {
line = comment[:idx]
comment = comment[idx+1:]
} else {
line = comment
comment = ""
}
if comment != "" {
b = enc.indent(indent, b)
b = append(b, "# "...)
b = append(b, line...)
b = append(b, comment...)
b = append(b, '\n')
}
return b
@@ -799,13 +736,7 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
}
ctx.skipTableHeader = false
hasNonEmptyKV := false
for _, kv := range t.kvs {
if shouldOmitEmpty(kv.Options, kv.Value) {
continue
}
hasNonEmptyKV = true
ctx.setKey(kv.Key)
b, err = enc.encodeKv(b, ctx, kv.Options, kv.Value)
@@ -816,20 +747,7 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
b = append(b, '\n')
}
first := true
for _, table := range t.tables {
if shouldOmitEmpty(table.Options, table.Value) {
continue
}
if first {
first = false
if hasNonEmptyKV {
b = append(b, '\n')
}
} else {
b = append(b, "\n"...)
}
ctx.setKey(table.Key)
ctx.options = table.Options
@@ -838,6 +756,8 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
if err != nil {
return nil, err
}
b = append(b, '\n')
}
return b, nil
@@ -850,10 +770,6 @@ func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte
first := true
for _, kv := range t.kvs {
if shouldOmitEmpty(kv.Options, kv.Value) {
continue
}
if first {
first = false
} else {
@@ -869,7 +785,7 @@ func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte
}
if len(t.tables) > 0 {
panic("inline table cannot contain nested tables, only key-values")
panic("inline table cannot contain nested tables, online key-values")
}
b = append(b, "}"...)
@@ -965,13 +881,7 @@ func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.
scratch = append(scratch, "]]\n"...)
ctx.skipTableHeader = true
b = enc.encodeComment(ctx.indent, ctx.options.comment, b)
for i := 0; i < v.Len(); i++ {
if i != 0 {
b = append(b, "\n"...)
}
b = append(b, scratch...)
var err error
+93 -197
View File
@@ -39,21 +39,21 @@ func TestMarshal(t *testing.T) {
v: map[string]string{
"hello": "world",
},
expected: "hello = 'world'\n",
expected: "hello = 'world'",
},
{
desc: "map with new line in key",
v: map[string]string{
"hel\nlo": "world",
},
expected: "\"hel\\nlo\" = 'world'\n",
expected: `"hel\nlo" = 'world'`,
},
{
desc: `map with " in key`,
v: map[string]string{
`hel"lo`: "world",
},
expected: "'hel\"lo' = 'world'\n",
expected: `'hel"lo' = 'world'`,
},
{
desc: "map in map and string",
@@ -62,9 +62,9 @@ func TestMarshal(t *testing.T) {
"hello": "world",
},
},
expected: `[table]
hello = 'world'
`,
expected: `
[table]
hello = 'world'`,
},
{
desc: "map in map in map and string",
@@ -75,10 +75,10 @@ hello = 'world'
},
},
},
expected: `[this]
expected: `
[this]
[this.is]
a = 'test'
`,
a = 'test'`,
},
{
desc: "map in map in map and string with values",
@@ -90,20 +90,18 @@ a = 'test'
"also": "that",
},
},
expected: `[this]
expected: `
[this]
also = 'that'
[this.is]
a = 'test'
`,
a = 'test'`,
},
{
desc: "simple string array",
v: map[string][]string{
"array": {"one", "two", "three"},
},
expected: `array = ['one', 'two', 'three']
`,
expected: `array = ['one', 'two', 'three']`,
},
{
desc: "empty string array",
@@ -120,16 +118,14 @@ a = 'test'
v: map[string][][]string{
"array": {{"one", "two"}, {"three"}},
},
expected: `array = [['one', 'two'], ['three']]
`,
expected: `array = [['one', 'two'], ['three']]`,
},
{
desc: "mixed strings and nested string arrays",
v: map[string][]interface{}{
"array": {"a string", []string{"one", "two"}, "last"},
},
expected: `array = ['a string', ['one', 'two'], 'last']
`,
expected: `array = ['a string', ['one', 'two'], 'last']`,
},
{
desc: "array of maps",
@@ -139,9 +135,9 @@ a = 'test'
{"map2.1": "v2.1"},
},
},
expected: `[[top]]
expected: `
[[top]]
'map1.1' = 'v1.1'
[[top]]
'map2.1' = 'v2.1'
`,
@@ -152,9 +148,9 @@ a = 'test'
"key1": "value1",
"key2": "value2",
},
expected: `key1 = 'value1'
key2 = 'value2'
`,
expected: `
key1 = 'value1'
key2 = 'value2'`,
},
{
desc: "simple struct",
@@ -163,8 +159,7 @@ key2 = 'value2'
}{
A: "foo",
},
expected: `A = 'foo'
`,
expected: `A = 'foo'`,
},
{
desc: "one level of structs within structs",
@@ -179,7 +174,8 @@ key2 = 'value2'
K2: "v2",
},
},
expected: `[A]
expected: `
[A]
K1 = 'v1'
K2 = 'v2'
`,
@@ -194,10 +190,10 @@ K2 = 'v2'
},
},
},
expected: `[root]
expected: `
[root]
[[root.nested]]
name = 'Bob'
[[root.nested]]
name = 'Alice'
`,
@@ -207,53 +203,49 @@ name = 'Alice'
v: map[string]interface{}{
"a": "'\b\f\r\t\"\\",
},
expected: `a = "'\b\f\r\t\"\\"
`,
expected: `a = "'\b\f\r\t\"\\"`,
},
{
desc: "string utf8 low",
v: map[string]interface{}{
"a": "'Ę",
},
expected: `a = "'Ę"
`,
expected: `a = "'Ę"`,
},
{
desc: "string utf8 low 2",
v: map[string]interface{}{
"a": "'\u10A85",
},
expected: "a = \"'\u10A85\"\n",
expected: "a = \"'\u10A85\"",
},
{
desc: "string utf8 low 2",
v: map[string]interface{}{
"a": "'\u10A85",
},
expected: "a = \"'\u10A85\"\n",
expected: "a = \"'\u10A85\"",
},
{
desc: "emoji",
v: map[string]interface{}{
"a": "'😀",
},
expected: "a = \"'😀\"\n",
expected: "a = \"'😀\"",
},
{
desc: "control char",
v: map[string]interface{}{
"a": "'\u001A",
},
expected: `a = "'\u001A"
`,
expected: `a = "'\u001A"`,
},
{
desc: "multi-line string",
v: map[string]interface{}{
"a": "hello\nworld",
},
expected: `a = "hello\nworld"
`,
expected: `a = "hello\nworld"`,
},
{
desc: "multi-line forced",
@@ -264,8 +256,7 @@ name = 'Alice'
},
expected: `A = """
hello
world"""
`,
world"""`,
},
{
desc: "inline field",
@@ -280,8 +271,8 @@ world"""
"isinline": "no",
},
},
expected: `A = {isinline = 'yes'}
expected: `
A = {isinline = 'yes'}
[B]
isinline = 'no'
`,
@@ -295,7 +286,8 @@ isinline = 'no'
A: []int{1, 2, 3, 4},
B: []int{1, 2, 3, 4},
},
expected: `A = [
expected: `
A = [
1,
2,
3,
@@ -311,7 +303,8 @@ B = [1, 2, 3, 4]
}{
A: [][]int{{1, 2}, {3, 4}},
},
expected: `A = [
expected: `
A = [
[1, 2],
[3, 4]
]
@@ -336,8 +329,7 @@ B = [1, 2, 3, 4]
}{
A: []*int{nil},
},
expected: `A = [0]
`,
expected: `A = [0]`,
},
{
desc: "nil pointer in slice uses zero value",
@@ -346,8 +338,7 @@ B = [1, 2, 3, 4]
}{
A: []*int{nil},
},
expected: `A = [0]
`,
expected: `A = [0]`,
},
{
desc: "pointer in slice",
@@ -356,8 +347,7 @@ B = [1, 2, 3, 4]
}{
A: []*int{&someInt},
},
expected: `A = [42]
`,
expected: `A = [42]`,
},
{
desc: "inline table in inline table",
@@ -368,25 +358,23 @@ B = [1, 2, 3, 4]
},
},
},
expected: `A = {A = {A = 'hello'}}
`,
expected: `A = {A = {A = 'hello'}}`,
},
{
desc: "empty slice in map",
v: map[string][]string{
"a": {},
},
expected: `a = []
`,
expected: `a = []`,
},
{
desc: "map in slice",
v: map[string][]map[string]string{
"a": {{"hello": "world"}},
},
expected: `[[a]]
hello = 'world'
`,
expected: `
[[a]]
hello = 'world'`,
},
{
desc: "newline in map in slice",
@@ -394,8 +382,7 @@ hello = 'world'
"a\n": {{"hello": "world"}},
},
expected: `[["a\n"]]
hello = 'world'
`,
hello = 'world'`,
},
{
desc: "newline in map in slice",
@@ -411,8 +398,7 @@ hello = 'world'
}{
A: []struct{}{},
},
expected: `A = []
`,
expected: `A = []`,
},
{
desc: "nil field is ignored",
@@ -432,8 +418,7 @@ hello = 'world'
Public: "shown",
private: "hidden",
},
expected: `Public = 'shown'
`,
expected: `Public = 'shown'`,
},
{
desc: "fields tagged - are ignored",
@@ -457,8 +442,7 @@ hello = 'world'
v: map[string]interface{}{
"hello\nworld": 42,
},
expected: `"hello\nworld" = 42
`,
expected: `"hello\nworld" = 42`,
},
{
desc: "new line in parent of nested table key",
@@ -468,8 +452,7 @@ hello = 'world'
},
},
expected: `["hello\nworld"]
inner = 42
`,
inner = 42`,
},
{
desc: "new line in nested table key",
@@ -482,8 +465,7 @@ inner = 42
},
expected: `[parent]
[parent."in\ner"]
foo = 42
`,
foo = 42`,
},
{
desc: "invalid map key",
@@ -506,8 +488,7 @@ foo = 42
}{
T: time.Time{},
},
expected: `T = 0001-01-01T00:00:00Z
`,
expected: `T = 0001-01-01T00:00:00Z`,
},
{
desc: "time nano",
@@ -516,8 +497,7 @@ foo = 42
}{
T: time.Date(1979, time.May, 27, 0, 32, 0, 999999000, time.UTC),
},
expected: `T = 1979-05-27T00:32:00.999999Z
`,
expected: `T = 1979-05-27T00:32:00.999999Z`,
},
{
desc: "bool",
@@ -528,9 +508,9 @@ foo = 42
A: false,
B: true,
},
expected: `A = false
B = true
`,
expected: `
A = false
B = true`,
},
{
desc: "numbers",
@@ -561,7 +541,8 @@ B = true
K: 42,
L: 2.2,
},
expected: `A = 1.1
expected: `
A = 1.1
B = 42
C = 42
D = 42
@@ -572,8 +553,7 @@ H = 42
I = 42
J = 42
K = 42
L = 2.2
`,
L = 2.2`,
},
{
desc: "comments",
@@ -586,7 +566,8 @@ L = 2.2
Three: []int{1, 2, 3},
},
},
expected: `# Before table
expected: `
# Before table
[Table]
One = 1
# Before kv
@@ -608,7 +589,7 @@ Three = [1, 2, 3]
}
require.NoError(t, err)
assert.Equal(t, e.expected, string(b))
equalStringsIgnoreNewlines(t, e.expected, string(b))
// make sure the output is always valid TOML
defaultMap := map[string]interface{}{}
@@ -683,6 +664,12 @@ func testWithFlags(t *testing.T, flags int, setters flagsSetters, testfn func(t
}
}
func equalStringsIgnoreNewlines(t *testing.T, expected string, actual string) {
t.Helper()
cutset := "\n"
assert.Equal(t, strings.Trim(expected, cutset), strings.Trim(actual, cutset))
}
func TestMarshalFloats(t *testing.T) {
v := map[string]float32{
"nan": float32(math.NaN()),
@@ -722,8 +709,7 @@ func TestMarshalIndentTables(t *testing.T) {
v: map[string]interface{}{
"foo": "bar",
},
expected: `foo = 'bar'
`,
expected: `foo = 'bar'`,
},
{
desc: "one level table",
@@ -733,7 +719,8 @@ func TestMarshalIndentTables(t *testing.T) {
"two": "value2",
},
},
expected: `[foo]
expected: `
[foo]
one = 'value1'
two = 'value2'
`,
@@ -749,11 +736,10 @@ func TestMarshalIndentTables(t *testing.T) {
},
},
},
expected: `root = 'value0'
expected: `
root = 'value0'
[level1]
one = 'value1'
[level1.level2]
two = 'value2'
`,
@@ -768,7 +754,7 @@ func TestMarshalIndentTables(t *testing.T) {
enc.SetIndentTables(true)
err := enc.Encode(e.v)
require.NoError(t, err)
assert.Equal(t, e.expected, buf.String())
equalStringsIgnoreNewlines(t, e.expected, buf.String())
})
}
}
@@ -813,7 +799,7 @@ func TestMarshalTextMarshaler(t *testing.T) {
m := map[string]interface{}{"a": &customTextMarshaler{value: 2}}
r, err := toml.Marshal(m)
require.NoError(t, err)
assert.Equal(t, "a = '::2'\n", string(r))
equalStringsIgnoreNewlines(t, "a = '::2'", string(r))
}
type brokenWriter struct{}
@@ -836,10 +822,10 @@ func TestEncoderSetIndentSymbol(t *testing.T) {
enc.SetIndentSymbol(">>>")
err := enc.Encode(map[string]map[string]string{"parent": {"hello": "world"}})
require.NoError(t, err)
expected := `[parent]
>>>hello = 'world'
`
assert.Equal(t, expected, w.String())
expected := `
[parent]
>>>hello = 'world'`
equalStringsIgnoreNewlines(t, expected, w.String())
}
func TestEncoderOmitempty(t *testing.T) {
@@ -870,9 +856,9 @@ func TestEncoderOmitempty(t *testing.T) {
b, err := toml.Marshal(d)
require.NoError(t, err)
expected := ``
expected := `[Struct]`
assert.Equal(t, expected, string(b))
equalStringsIgnoreNewlines(t, expected, string(b))
}
func TestEncoderTagFieldName(t *testing.T) {
@@ -887,12 +873,13 @@ func TestEncoderTagFieldName(t *testing.T) {
b, err := toml.Marshal(d)
require.NoError(t, err)
expected := `hello = 'world'
expected := `
hello = 'world'
'#' = ''
Bad = ''
`
assert.Equal(t, expected, string(b))
equalStringsIgnoreNewlines(t, expected, string(b))
}
func TestIssue436(t *testing.T) {
@@ -906,11 +893,12 @@ func TestIssue436(t *testing.T) {
err = toml.NewEncoder(&buf).Encode(v)
require.NoError(t, err)
expected := `[[a]]
expected := `
[[a]]
[a.b]
c = 'd'
`
assert.Equal(t, expected, buf.String())
equalStringsIgnoreNewlines(t, expected, buf.String())
}
func TestIssue424(t *testing.T) {
@@ -992,7 +980,7 @@ func TestIssue678(t *testing.T) {
out, err := toml.Marshal(cfg)
require.NoError(t, err)
assert.Equal(t, "BigInt = '123'\n", string(out))
equalStringsIgnoreNewlines(t, "BigInt = '123'", string(out))
cfg2 := &Config{}
err = toml.Unmarshal(out, cfg2)
@@ -1016,85 +1004,6 @@ func TestIssue752(t *testing.T) {
require.Equal(t, "", string(out))
}
func TestIssue768(t *testing.T) {
type cfg struct {
Name string `comment:"This is a multiline comment.\nThis is line 2."`
}
out, err := toml.Marshal(&cfg{})
require.NoError(t, err)
expected := `# This is a multiline comment.
# This is line 2.
Name = ''
`
require.Equal(t, expected, string(out))
}
func TestIssue786(t *testing.T) {
type Dependencies struct {
Dependencies []string `toml:"dependencies,multiline,omitempty"`
BuildDependencies []string `toml:"buildDependencies,multiline,omitempty"`
OptionalDependencies []string `toml:"optionalDependencies,multiline,omitempty"`
}
type Test struct {
Dependencies Dependencies `toml:"dependencies,omitempty"`
}
x := Test{}
b, err := toml.Marshal(x)
require.NoError(t, err)
require.Equal(t, "", string(b))
type General struct {
From string `toml:"from,omitempty" json:"from,omitempty" comment:"from in graphite-web format, the local TZ is used"`
Randomize bool `toml:"randomize" json:"randomize" comment:"randomize starting time with [0,step)"`
}
type Custom struct {
Name string `toml:"name" json:"name,omitempty" comment:"names for generator, braces are expanded like in shell"`
Type string `toml:"type,omitempty" json:"type,omitempty" comment:"type of generator"`
General
}
type Config struct {
General
Custom []Custom `toml:"custom,omitempty" json:"custom,omitempty" comment:"generators with custom parameters can be specified separately"`
}
buf := new(bytes.Buffer)
config := &Config{General: General{From: "-2d", Randomize: true}}
config.Custom = []Custom{{Name: "omit", General: General{Randomize: false}}}
config.Custom = append(config.Custom, Custom{Name: "present", General: General{From: "-2d", Randomize: true}})
encoder := toml.NewEncoder(buf)
encoder.Encode(config)
expected := `# from in graphite-web format, the local TZ is used
from = '-2d'
# randomize starting time with [0,step)
randomize = true
# generators with custom parameters can be specified separately
[[custom]]
# names for generator, braces are expanded like in shell
name = 'omit'
# randomize starting time with [0,step)
randomize = false
[[custom]]
# names for generator, braces are expanded like in shell
name = 'present'
# from in graphite-web format, the local TZ is used
from = '-2d'
# randomize starting time with [0,step)
randomize = true
`
require.Equal(t, expected, buf.String())
}
func TestMarshalNestedAnonymousStructs(t *testing.T) {
type Embedded struct {
Value string `toml:"value" json:"value"`
@@ -1116,7 +1025,6 @@ func TestMarshalNestedAnonymousStructs(t *testing.T) {
}
expected := `value = ''
[top]
value = ''
@@ -1125,6 +1033,7 @@ value = ''
[anonymous]
value = ''
`
result, err := toml.Marshal(doc)
@@ -1148,9 +1057,9 @@ func TestMarshalNestedAnonymousStructs_DuplicateField(t *testing.T) {
doc.Value = "shadows"
expected := `value = 'shadows'
[top]
value = ''
`
result, err := toml.Marshal(doc)
@@ -1161,7 +1070,7 @@ value = ''
func TestLocalTime(t *testing.T) {
v := map[string]toml.LocalTime{
"a": {
"a": toml.LocalTime{
Hour: 1,
Minute: 2,
Second: 3,
@@ -1177,19 +1086,6 @@ func TestLocalTime(t *testing.T) {
require.Equal(t, expected, string(out))
}
func TestMarshalUint64Overflow(t *testing.T) {
// The TOML spec only requires implementation to provide support for the
// int64 range. To avoid generating TOML documents that would not be
// supported by standard-compliant parsers, uint64 > max int64 cannot be
// marshaled.
x := map[string]interface{}{
"foo": uint64(math.MaxInt64) + 1,
}
_, err := toml.Marshal(x)
require.Error(t, err)
}
func ExampleMarshal() {
type MyConfig struct {
Version int
+43 -84
View File
@@ -28,11 +28,6 @@ func (p *parser) Raw(raw ast.Range) []byte {
return p.data[raw.Offset : raw.Offset+raw.Length]
}
func (p *parser) SetRaw(ref ast.Reference, from []byte, to []byte) {
b := danger.BytesRange(from, to)
p.builder.NodeAt(ref).Raw = p.Range(b)
}
func (p *parser) Reset(b []byte) {
p.builder.Reset()
p.ref = ast.InvalidReference
@@ -157,14 +152,12 @@ func (p *parser) parseArrayTable(b []byte) (ast.Reference, []byte, error) {
ref := p.builder.Push(ast.Node{
Kind: ast.ArrayTable,
})
start := b
b = b[2:]
b = p.parseWhitespace(b)
k, b, err := p.parseKey(b)
if err != nil {
p.SetRaw(ref, start, b)
return ref, nil, err
}
@@ -173,12 +166,11 @@ func (p *parser) parseArrayTable(b []byte) (ast.Reference, []byte, error) {
b, err = expect(']', b)
if err != nil {
p.SetRaw(ref, start, b)
return ref, nil, err
}
b, err = expect(']', b)
p.SetRaw(ref, start, b)
return ref, b, err
}
@@ -189,14 +181,12 @@ func (p *parser) parseStdTable(b []byte) (ast.Reference, []byte, error) {
ref := p.builder.Push(ast.Node{
Kind: ast.Table,
})
start := b
b = b[1:]
b = p.parseWhitespace(b)
key, b, err := p.parseKey(b)
if err != nil {
p.SetRaw(ref, start, b)
return ref, nil, err
}
@@ -206,7 +196,6 @@ func (p *parser) parseStdTable(b []byte) (ast.Reference, []byte, error) {
b, err = expect(']', b)
p.SetRaw(ref, start, b)
return ref, b, err
}
@@ -215,12 +204,10 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
ref := p.builder.Push(ast.Node{
Kind: ast.KeyValue,
})
start := b
key, b, err := p.parseKey(b)
if err != nil {
p.SetRaw(ref, start, b)
return ast.InvalidReference, b, err
return ast.InvalidReference, nil, err
}
// keyval-sep = ws %x3D ws ; =
@@ -228,28 +215,24 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
b = p.parseWhitespace(b)
if len(b) == 0 {
p.SetRaw(ref, start, b)
return ast.InvalidReference, b, newDecodeError(b, "expected = after a key, but the document ends there")
return ast.InvalidReference, nil, newDecodeError(b, "expected = after a key, but the document ends there")
}
b, err = expect('=', b)
if err != nil {
p.SetRaw(ref, start, b)
return ast.InvalidReference, b, err
return ast.InvalidReference, nil, err
}
b = p.parseWhitespace(b)
valRef, b, err := p.parseVal(b)
if err != nil {
p.SetRaw(ref, start, b)
return ref, b, err
}
p.builder.Chain(valRef, key)
p.builder.AttachChild(ref, valRef)
p.SetRaw(ref, start, b)
return ref, b, err
}
@@ -259,7 +242,7 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
ref := ast.InvalidReference
if len(b) == 0 {
return ref, b, newDecodeError(b, "expected value, not eof")
return ref, nil, newDecodeError(b, "expected value, not eof")
}
var err error
@@ -304,25 +287,23 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
return ref, b, err
case 't':
if !scanFollowsTrue(b) {
return ref, b, newDecodeError(atmost(b, 4), "expected 'true'")
return ref, nil, newDecodeError(atmost(b, 4), "expected 'true'")
}
ref = p.builder.Push(ast.Node{
Kind: ast.Bool,
Data: b[:4],
Raw: p.Range(b[:4]),
})
return ref, b[4:], nil
case 'f':
if !scanFollowsFalse(b) {
return ref, b, newDecodeError(atmost(b, 5), "expected 'false'")
return ref, nil, newDecodeError(atmost(b, 5), "expected 'false'")
}
ref = p.builder.Push(ast.Node{
Kind: ast.Bool,
Data: b[:5],
Raw: p.Range(b[:5]),
})
return ref, b[5:], nil
@@ -346,7 +327,7 @@ func atmost(b []byte, n int) []byte {
func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
v, rest, err := scanLiteralString(b)
if err != nil {
return nil, nil, rest, err
return nil, nil, nil, err
}
return v, v[1 : len(v)-1], rest, nil
@@ -361,7 +342,6 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
parent := p.builder.Push(ast.Node{
Kind: ast.InlineTable,
})
start := b
first := true
@@ -376,8 +356,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
b = p.parseWhitespace(b)
if len(b) == 0 {
p.SetRaw(parent, start, b)
return parent, b, newDecodeError(previousB[:1], "inline table is incomplete")
return parent, nil, newDecodeError(previousB[:1], "inline table is incomplete")
}
if b[0] == '}' {
@@ -387,8 +366,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
if !first {
b, err = expect(',', b)
if err != nil {
p.SetRaw(parent, start, b)
return parent, b, err
return parent, nil, err
}
b = p.parseWhitespace(b)
}
@@ -397,8 +375,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
kv, b, err = p.parseKeyval(b)
if err != nil {
p.SetRaw(parent, start, b)
return parent, b, err
return parent, nil, err
}
if first {
@@ -413,7 +390,6 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
rest, err := expect('}', b)
p.SetRaw(parent, start, b)
return parent, rest, err
}
@@ -427,7 +403,6 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
// array-sep = %x2C ; , Comma
// ws-comment-newline = *( wschar / [ comment ] newline )
arrayStart := b
start := b
b = b[1:]
parent := p.builder.Push(ast.Node{
@@ -442,13 +417,11 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
for len(b) > 0 {
b, err = p.parseOptionalWhitespaceCommentNewline(b)
if err != nil {
p.SetRaw(parent, start, b)
return parent, b, err
return parent, nil, err
}
if len(b) == 0 {
p.SetRaw(parent, start, b)
return parent, b, newDecodeError(arrayStart[:1], "array is incomplete")
return parent, nil, newDecodeError(arrayStart[:1], "array is incomplete")
}
if b[0] == ']' {
@@ -457,19 +430,16 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
if b[0] == ',' {
if first {
p.SetRaw(parent, start, b)
return parent, b, newDecodeError(b[0:1], "array cannot start with comma")
return parent, nil, newDecodeError(b[0:1], "array cannot start with comma")
}
b = b[1:]
b, err = p.parseOptionalWhitespaceCommentNewline(b)
if err != nil {
p.SetRaw(parent, start, b)
return parent, b, err
return parent, nil, err
}
} else if !first {
p.SetRaw(parent, start, b)
return parent, b, newDecodeError(b[0:1], "array elements must be separated by commas")
return parent, nil, newDecodeError(b[0:1], "array elements must be separated by commas")
}
// TOML allows trailing commas in arrays.
@@ -480,8 +450,7 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
var valueRef ast.Reference
valueRef, b, err = p.parseVal(b)
if err != nil {
p.SetRaw(parent, start, b)
return parent, b, err
return parent, nil, err
}
if first {
@@ -493,16 +462,13 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
b, err = p.parseOptionalWhitespaceCommentNewline(b)
if err != nil {
p.SetRaw(parent, start, b)
return parent, b, err
return parent, nil, err
}
first = false
}
rest, err := expect(']', b)
p.SetRaw(parent, start, rest)
return parent, rest, err
}
@@ -514,7 +480,7 @@ func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error)
if len(b) > 0 && b[0] == '#' {
_, b, err = scanComment(b)
if err != nil {
return b, err
return nil, err
}
}
@@ -525,7 +491,7 @@ func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error)
if b[0] == '\n' || b[0] == '\r' {
b, err = p.parseNewline(b)
if err != nil {
return b, err
return nil, err
}
} else {
break
@@ -538,7 +504,7 @@ func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error)
func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) {
token, rest, err := scanMultilineLiteralString(b)
if err != nil {
return nil, nil, rest, err
return nil, nil, nil, err
}
i := 3
@@ -567,7 +533,7 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
// mlb-escaped-nl = escape ws newline *( wschar / newline )
token, escaped, rest, err := scanMultilineBasicString(b)
if err != nil {
return nil, nil, rest, err
return nil, nil, nil, err
}
i := 3
@@ -589,7 +555,7 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
if verr.Zero() {
return token, str, rest, nil
}
return nil, nil, rest, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
}
var builder bytes.Buffer
@@ -656,26 +622,26 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
case 'u':
x, err := hexToRune(atmost(token[i+1:], 4), 4)
if err != nil {
return nil, nil, rest, err
return nil, nil, nil, err
}
builder.WriteRune(x)
i += 4
case 'U':
x, err := hexToRune(atmost(token[i+1:], 8), 8)
if err != nil {
return nil, nil, rest, err
return nil, nil, nil, err
}
builder.WriteRune(x)
i += 8
default:
return nil, nil, rest, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
}
i++
} else {
size := utf8ValidNext(token[i:])
if size == 0 {
return nil, nil, rest, newDecodeError(token[i:i+1], "invalid character %#U", c)
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c)
}
builder.Write(token[i : i+size])
i += size
@@ -696,7 +662,7 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
// dot-sep = ws %x2E ws ; . Period
raw, key, b, err := p.parseSimpleKey(b)
if err != nil {
return ast.InvalidReference, b, err
return ast.InvalidReference, nil, err
}
ref := p.builder.Push(ast.Node{
@@ -712,7 +678,7 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
raw, key, b, err = p.parseSimpleKey(b)
if err != nil {
return ref, b, err
return ref, nil, err
}
p.builder.PushAndChain(ast.Node{
@@ -730,7 +696,7 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
if len(b) == 0 {
return nil, nil, b, newDecodeError(b, "expected key but found none")
return nil, nil, nil, newDecodeError(b, "expected key but found none")
}
// simple-key = quoted-key / unquoted-key
@@ -745,7 +711,7 @@ func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
key, rest = scanUnquotedKey(b)
return key, key, rest, nil
default:
return nil, nil, b[1:], newDecodeError(b[0:1], "invalid character at start of key: %c", b[0])
return nil, nil, nil, newDecodeError(b[0:1], "invalid character at start of key: %c", b[0])
}
}
@@ -767,7 +733,7 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
// escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX
token, escaped, rest, err := scanBasicString(b)
if err != nil {
return nil, nil, rest, err
return nil, nil, nil, err
}
startIdx := len(`"`)
@@ -782,7 +748,7 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
if verr.Zero() {
return token, str, rest, nil
}
return nil, nil, rest, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
}
i := startIdx
@@ -815,7 +781,7 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
case 'u':
x, err := hexToRune(token[i+1:len(token)-1], 4)
if err != nil {
return nil, nil, rest, err
return nil, nil, nil, err
}
builder.WriteRune(x)
@@ -823,19 +789,19 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
case 'U':
x, err := hexToRune(token[i+1:len(token)-1], 8)
if err != nil {
return nil, nil, rest, err
return nil, nil, nil, err
}
builder.WriteRune(x)
i += 8
default:
return nil, nil, rest, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
}
i++
} else {
size := utf8ValidNext(token[i:])
if size == 0 {
return nil, nil, rest, newDecodeError(token[i:i+1], "invalid character %#U", c)
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c)
}
builder.Write(token[i : i+size])
i += size
@@ -888,23 +854,21 @@ func (p *parser) parseIntOrFloatOrDateTime(b []byte) (ast.Reference, []byte, err
switch b[0] {
case 'i':
if !scanFollowsInf(b) {
return ast.InvalidReference, b, newDecodeError(atmost(b, 3), "expected 'inf'")
return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'inf'")
}
return p.builder.Push(ast.Node{
Kind: ast.Float,
Data: b[:3],
Raw: p.Range(b[:3]),
}), b[3:], nil
case 'n':
if !scanFollowsNan(b) {
return ast.InvalidReference, b, newDecodeError(atmost(b, 3), "expected 'nan'")
return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'nan'")
}
return p.builder.Push(ast.Node{
Kind: ast.Float,
Data: b[:3],
Raw: p.Range(b[:3]),
}), b[3:], nil
case '+', '-':
return p.scanIntOrFloat(b)
@@ -996,7 +960,6 @@ byteLoop:
return p.builder.Push(ast.Node{
Kind: kind,
Data: b[:i],
Raw: p.Range(b[:i]),
}), b[i:], nil
}
@@ -1030,7 +993,6 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
return p.builder.Push(ast.Node{
Kind: ast.Integer,
Data: b[:i],
Raw: p.Range(b[:i]),
}), b[i:], nil
}
@@ -1054,11 +1016,10 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
return p.builder.Push(ast.Node{
Kind: ast.Float,
Data: b[:i+3],
Raw: p.Range(b[:i+3]),
}), b[i+3:], nil
}
return ast.InvalidReference, b[i:], newDecodeError(b[i:i+1], "unexpected character 'i' while scanning for a number")
return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'i' while scanning for a number")
}
if c == 'n' {
@@ -1066,11 +1027,10 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
return p.builder.Push(ast.Node{
Kind: ast.Float,
Data: b[:i+3],
Raw: p.Range(b[:i+3]),
}), b[i+3:], nil
}
return ast.InvalidReference, b[i:], newDecodeError(b[i:i+1], "unexpected character 'n' while scanning for a number")
return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'n' while scanning for a number")
}
break
@@ -1089,7 +1049,6 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
return p.builder.Push(ast.Node{
Kind: kind,
Data: b[:i],
Raw: p.Range(b[:i]),
}), b[i:], nil
}
@@ -1116,11 +1075,11 @@ func isValidBinaryRune(r byte) bool {
func expect(x byte, b []byte) ([]byte, error) {
if len(b) == 0 {
return b, newDecodeError(b, "expected character %c but the document ended here", x)
return nil, newDecodeError(b, "expected character %c but the document ended here", x)
}
if b[0] != x {
return b, newDecodeError(b[0:1], "expected character %c", x)
return nil, newDecodeError(b[0:1], "expected character %c", x)
}
return b[1:], nil
+19 -19
View File
@@ -54,16 +54,16 @@ func scanLiteralString(b []byte) ([]byte, []byte, error) {
case '\'':
return b[:i+1], b[i+1:], nil
case '\n', '\r':
return nil, b[i+1:], newDecodeError(b[i:i+1], "literal strings cannot have new lines")
return nil, nil, newDecodeError(b[i:i+1], "literal strings cannot have new lines")
}
size := utf8ValidNext(b[i:])
if size == 0 {
return nil, b[i+1:], newDecodeError(b[i:i+1], "invalid character")
return nil, nil, newDecodeError(b[i:i+1], "invalid character")
}
i += size
}
return nil, b[len(b):], newDecodeError(b[len(b):], "unterminated literal string")
return nil, nil, newDecodeError(b[len(b):], "unterminated literal string")
}
func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
@@ -98,39 +98,39 @@ func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
i++
if i < len(b) && b[i] == '\'' {
return nil, b[i:], newDecodeError(b[i-3:i+1], "''' not allowed in multiline literal string")
return nil, nil, newDecodeError(b[i-3:i+1], "''' not allowed in multiline literal string")
}
return b[:i], b[i:], nil
}
case '\r':
if len(b) < i+2 {
return nil, b[i:], newDecodeError(b[len(b):], `need a \n after \r`)
return nil, nil, newDecodeError(b[len(b):], `need a \n after \r`)
}
if b[i+1] != '\n' {
return nil, b[i+2:], newDecodeError(b[i:i+2], `need a \n after \r`)
return nil, nil, newDecodeError(b[i:i+2], `need a \n after \r`)
}
i += 2 // skip the \n
continue
}
size := utf8ValidNext(b[i:])
if size == 0 {
return nil, b[i:], newDecodeError(b[i:i+1], "invalid character")
return nil, nil, newDecodeError(b[i:i+1], "invalid character")
}
i += size
}
return nil, b[len(b):], newDecodeError(b[len(b):], `multiline literal string not terminated by '''`)
return nil, nil, newDecodeError(b[len(b):], `multiline literal string not terminated by '''`)
}
func scanWindowsNewline(b []byte) ([]byte, []byte, error) {
const lenCRLF = 2
if len(b) < lenCRLF {
return nil, b, newDecodeError(b, "windows new line expected")
return nil, nil, newDecodeError(b, "windows new line expected")
}
if b[1] != '\n' {
return nil, b[2:], newDecodeError(b, `windows new line should be \r\n`)
return nil, nil, newDecodeError(b, `windows new line should be \r\n`)
}
return b[:lenCRLF], b[lenCRLF:], nil
@@ -169,7 +169,7 @@ func scanComment(b []byte) ([]byte, []byte, error) {
}
size := utf8ValidNext(b[i:])
if size == 0 {
return nil, b[i+1:], newDecodeError(b[i:i+1], "invalid character in comment")
return nil, nil, newDecodeError(b[i:i+1], "invalid character in comment")
}
i += size
@@ -192,17 +192,17 @@ func scanBasicString(b []byte) ([]byte, bool, []byte, error) {
case '"':
return b[:i+1], escaped, b[i+1:], nil
case '\n', '\r':
return nil, escaped, b[i+1:], newDecodeError(b[i:i+1], "basic strings cannot have new lines")
return nil, escaped, nil, newDecodeError(b[i:i+1], "basic strings cannot have new lines")
case '\\':
if len(b) < i+2 {
return nil, escaped, b[i+1:], newDecodeError(b[i:i+1], "need a character after \\")
return nil, escaped, nil, newDecodeError(b[i:i+1], "need a character after \\")
}
escaped = true
i++ // skip the next character
}
}
return nil, escaped, b[len(b):], newDecodeError(b[len(b):], `basic string not terminated by "`)
return nil, escaped, nil, newDecodeError(b[len(b):], `basic string not terminated by "`)
}
func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
@@ -243,27 +243,27 @@ func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
i++
if i < len(b) && b[i] == '"' {
return nil, escaped, b[i+1:], newDecodeError(b[i-3:i+1], `""" not allowed in multiline basic string`)
return nil, escaped, nil, newDecodeError(b[i-3:i+1], `""" not allowed in multiline basic string`)
}
return b[:i], escaped, b[i:], nil
}
case '\\':
if len(b) < i+2 {
return nil, escaped, b[len(b):], newDecodeError(b[len(b):], "need a character after \\")
return nil, escaped, nil, newDecodeError(b[len(b):], "need a character after \\")
}
escaped = true
i++ // skip the next character
case '\r':
if len(b) < i+2 {
return nil, escaped, b[len(b):], newDecodeError(b[len(b):], `need a \n after \r`)
return nil, escaped, nil, newDecodeError(b[len(b):], `need a \n after \r`)
}
if b[i+1] != '\n' {
return nil, escaped, b[i+2:], newDecodeError(b[i:i+2], `need a \n after \r`)
return nil, escaped, nil, newDecodeError(b[i:i+2], `need a \n after \r`)
}
i++ // skip the \n
}
}
return nil, escaped, b[len(b):], newDecodeError(b[len(b):], `multiline basic string not terminated by """`)
return nil, escaped, nil, newDecodeError(b[len(b):], `multiline basic string not terminated by """`)
}
+121 -82
View File
@@ -79,22 +79,22 @@ func (d *Decoder) DisallowUnknownFields() *Decoder {
// strict mode and a field is missing, a `toml.StrictMissingError` is
// returned. In any other case, this function returns a standard Go error.
//
// # Type mapping
// Type mapping
//
// List of supported TOML types and their associated accepted Go types:
//
// String -> string
// Integer -> uint*, int*, depending on size
// Float -> float*, depending on size
// Boolean -> bool
// Offset Date-Time -> time.Time
// Local Date-time -> LocalDateTime, time.Time
// Local Date -> LocalDate, time.Time
// Local Time -> LocalTime, time.Time
// Array -> slice and array, depending on elements types
// Table -> map and struct
// Inline Table -> same as Table
// Array of Tables -> same as Array and Table
// String -> string
// Integer -> uint*, int*, depending on size
// Float -> float*, depending on size
// Boolean -> bool
// Offset Date-Time -> time.Time
// Local Date-time -> LocalDateTime, time.Time
// Local Date -> LocalDate, time.Time
// Local Time -> LocalTime, time.Time
// Array -> slice and array, depending on elements types
// Table -> map and struct
// Inline Table -> same as Table
// Array of Tables -> same as Array and Table
func (d *Decoder) Decode(v interface{}) error {
b, err := ioutil.ReadAll(d.r)
if err != nil {
@@ -123,7 +123,7 @@ type decoder struct {
stashedExpr bool
// Skip expressions until a table is found. This is set to true when a
// table could not be created (missing field in map), so all KV expressions
// table could not be create (missing field in map), so all KV expressions
// need to be skipped.
skipUntilTable bool
@@ -230,15 +230,6 @@ func (d *decoder) fromParser(root reflect.Value) error {
return d.p.Error()
}
/*
Rules for the unmarshal code:
- The stack is used to keep track of which values need to be set where.
- handle* functions <=> switch on a given ast.Kind.
- unmarshalX* functions need to unmarshal a node of kind X.
- An "object" is either a struct or a map.
*/
func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error {
var x reflect.Value
var err error
@@ -344,9 +335,9 @@ func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Val
elem := v.Index(idx)
_, err := d.handleArrayTable(key, elem)
return v, err
default:
return reflect.Value{}, fmt.Errorf("toml: cannot decode array table into a %s", v.Type())
}
return d.handleArrayTable(key, v)
}
// When parsing an array table expression, each part of the key needs to be
@@ -400,6 +391,84 @@ func (d *decoder) handleArrayTableCollection(key ast.Iterator, v reflect.Value)
return d.handleArrayTable(key, v)
}
func (d *decoder) handleKeyValuePartMapStringInterface(key ast.Iterator, value *ast.Node, m map[string]interface{}) (reflect.Value, error) {
k := string(key.Node().Data)
newMap := false
if m == nil {
newMap = true
m = make(map[string]interface{}, 8)
}
set := false
v, ok := m[k]
if !ok || key.IsLast() {
set = true
v = nil
}
mv := reflect.ValueOf(&v).Elem()
x, err := d.handleKeyValueInner(key, value, mv)
if err != nil {
return reflect.Value{}, err
}
if x.IsValid() {
mv = x
set = true
}
if set {
m[k] = mv.Interface()
}
if newMap {
return reflect.ValueOf(m), nil
}
return reflect.Value{}, nil
}
func (d *decoder) handleKeyPartMapStringInterface(key ast.Iterator, m map[string]interface{}, nextFn handlerFn, makeFn valueMakerFn) (reflect.Value, error) {
newMap := false
k := string(key.Node().Data)
if m == nil {
newMap = true
m = make(map[string]interface{}, 8)
}
v, ok := m[k]
set := false
if !ok || v == nil {
set = true
v = makeFn().Interface()
}
mv := reflect.ValueOf(v)
x, err := nextFn(key, mv)
if err != nil {
return reflect.Value{}, err
}
if x.IsValid() {
mv = x
set = true
}
if set {
m[k] = mv.Interface()
}
if newMap {
return reflect.ValueOf(m), nil
}
return reflect.Value{}, nil
}
func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handlerFn, makeFn valueMakerFn) (reflect.Value, error) {
var rv reflect.Value
@@ -416,6 +485,11 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
case reflect.Map:
vt := v.Type()
if vt == mapStringInterfaceType {
m := v.Interface().(map[string]interface{})
return d.handleKeyPartMapStringInterface(key, m, nextFn, makeFn)
}
// Create the key for the map element. Convert to key type.
mk := reflect.ValueOf(string(key.Node().Data)).Convert(vt.Key())
@@ -483,7 +557,7 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
d.errorContext.Struct = t
d.errorContext.Field = path
f := fieldByIndex(v, path)
f := v.FieldByIndex(path)
x, err := nextFn(key, f)
if err != nil || d.skipUntilTable {
return reflect.Value{}, err
@@ -866,27 +940,12 @@ func (d *decoder) unmarshalFloat(value *ast.Node, v reflect.Value) error {
return nil
}
const (
maxInt = int64(^uint(0) >> 1)
minInt = -maxInt - 1
)
// Maximum value of uint for decoding. Currently the decoder parses the integer
// into an int64. As a result, on architectures where uint is 64 bits, the
// effective maximum uint we can decode is the maximum of int64. On
// architectures where uint is 32 bits, the maximum value we can decode is
// lower: the maximum of uint32. I didn't find a way to figure out this value at
// compile time, so it is computed during initialization.
var maxUint int64 = math.MaxInt64
func init() {
m := uint64(^uint(0))
if m < uint64(maxUint) {
maxUint = int64(m)
}
}
func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
const (
maxInt = int64(^uint(0) >> 1)
minInt = -maxInt - 1
)
i, err := parseInteger(value.Data)
if err != nil {
return err
@@ -947,7 +1006,7 @@ func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
r = reflect.ValueOf(uint8(i))
case reflect.Uint:
if i < 0 || i > maxUint {
if i < 0 {
return fmt.Errorf("toml: negative number %d does not fit in an uint", i)
}
@@ -1014,6 +1073,11 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
case reflect.Map:
vt := v.Type()
if vt == mapStringInterfaceType {
m := v.Interface().(map[string]interface{})
return d.handleKeyValuePartMapStringInterface(key, value, m)
}
mk := reflect.ValueOf(string(key.Node().Data))
mkt := stringType
@@ -1037,12 +1101,10 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
if !mv.IsValid() {
set = true
mv = reflect.New(v.Type().Elem()).Elem()
} else {
if key.IsLast() {
var x interface{}
mv = reflect.ValueOf(&x).Elem()
set = true
}
} else if key.IsLast() {
var x interface{}
mv = reflect.ValueOf(&x).Elem()
set = true
}
nv, err := d.handleKeyValueInner(key, value, mv)
@@ -1071,7 +1133,7 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
d.errorContext.Struct = t
d.errorContext.Field = path
f := fieldByIndex(v, path)
f := v.FieldByIndex(path)
x, err := d.handleKeyValueInner(key, value, f)
if err != nil {
return reflect.Value{}, err
@@ -1135,21 +1197,6 @@ func initAndDereferencePointer(v reflect.Value) reflect.Value {
return elem
}
// Same as reflect.Value.FieldByIndex, but creates pointers if needed.
func fieldByIndex(v reflect.Value, path []int) reflect.Value {
for i, x := range path {
v = v.Field(x)
if i < len(path)-1 && v.Kind() == reflect.Pointer {
if v.IsNil() {
v.Set(reflect.New(v.Type().Elem()))
}
v = v.Elem()
}
}
return v
}
type fieldPathsMap = map[string][]int
var globalFieldPathsCache atomic.Value // map[danger.TypeID]fieldPathsMap
@@ -1197,6 +1244,11 @@ func forEachField(t reflect.Type, path []int, do func(name string, path []int))
fieldPath := append(path, i)
fieldPath = fieldPath[:len(fieldPath):len(fieldPath)]
if f.Anonymous {
forEachField(f.Type, fieldPath, do)
continue
}
name := f.Tag.Get("toml")
if name == "-" {
continue
@@ -1205,19 +1257,6 @@ func forEachField(t reflect.Type, path []int, do func(name string, path []int))
if i := strings.IndexByte(name, ','); i >= 0 {
name = name[:i]
}
if f.Anonymous && name == "" {
t2 := f.Type
if t2.Kind() == reflect.Pointer {
t2 = t2.Elem()
}
if t2.Kind() == reflect.Struct {
forEachField(t2, fieldPath, do)
}
continue
}
if name == "" {
name = f.Name
}
+19 -148
View File
@@ -1735,28 +1735,6 @@ B = "data"`,
}
},
},
{
desc: "kv that points to a slice",
input: "a.b.c = 'foo'",
gen: func() test {
doc := map[string][]string{}
return test{
target: &doc,
err: true,
}
},
},
{
desc: "kv that points to a pointer to a slice",
input: "a.b.c = 'foo'",
gen: func() test {
doc := map[string]*[]string{}
return test{
target: &doc,
err: true,
}
},
},
}
for _, e := range examples {
@@ -1898,7 +1876,8 @@ key2 = "missing2"
key3 = "missing3"
key4 = "value4"
`,
expected: `2| key1 = "value1"
expected: `
2| key1 = "value1"
3| key2 = "missing2"
| ~~~~ missing field
4| key3 = "missing3"
@@ -1908,7 +1887,8 @@ key4 = "value4"
3| key2 = "missing2"
4| key3 = "missing3"
| ~~~~ missing field
5| key4 = "value4"`,
5| key4 = "value4"
`,
target: &struct {
Key1 string
Key4 string
@@ -1917,8 +1897,10 @@ key4 = "value4"
{
desc: "multi-part key",
input: `a.short.key="foo"`,
expected: `1| a.short.key="foo"
| ~~~~~~~~~~~ missing field`,
expected: `
1| a.short.key="foo"
| ~~~~~~~~~~~ missing field
`,
},
{
desc: "missing table",
@@ -1926,19 +1908,24 @@ key4 = "value4"
[foo]
bar = 42
`,
expected: `2| [foo]
expected: `
2| [foo]
| ~~~ missing table
3| bar = 42`,
3| bar = 42
`,
},
{
desc: "missing array table",
input: `
[[foo]]
bar = 42`,
expected: `2| [[foo]]
bar = 42
`,
expected: `
2| [[foo]]
| ~~~ missing table
3| bar = 42`,
3| bar = 42
`,
},
}
@@ -1957,7 +1944,7 @@ bar = 42`,
var tsm *toml.StrictMissingError
if errors.As(err, &tsm) {
assert.Equal(t, e.expected, tsm.String())
equalStringsIgnoreNewlines(t, e.expected, tsm.String())
} else {
t.Fatalf("err should have been a *toml.StrictMissingError, but got %s (%T)", err, err)
}
@@ -2393,79 +2380,6 @@ func TestIssue714(t *testing.T) {
require.Error(t, err)
}
func TestIssue772(t *testing.T) {
type FileHandling struct {
FilePattern string `toml:"pattern"`
}
type Config struct {
FileHandling `toml:"filehandling"`
}
var defaultConfigFile = []byte(`
[filehandling]
pattern = "reach-masterdev-"`)
config := Config{}
err := toml.Unmarshal(defaultConfigFile, &config)
require.NoError(t, err)
require.Equal(t, "reach-masterdev-", config.FileHandling.FilePattern)
}
func TestIssue774(t *testing.T) {
type ScpData struct {
Host string `json:"host"`
}
type GenConfig struct {
SCP []ScpData `toml:"scp" comment:"Array of Secure Copy Configurations"`
}
c := &GenConfig{}
c.SCP = []ScpData{{Host: "main.domain.com"}}
b, err := toml.Marshal(c)
require.NoError(t, err)
expected := `# Array of Secure Copy Configurations
[[scp]]
Host = 'main.domain.com'
`
require.Equal(t, expected, string(b))
}
func TestIssue799(t *testing.T) {
const testTOML = `
# notice the double brackets
[[test]]
answer = 42
`
var s struct {
// should be []map[string]int
Test map[string]int `toml:"test"`
}
err := toml.Unmarshal([]byte(testTOML), &s)
require.Error(t, err)
}
func TestIssue807(t *testing.T) {
type A struct {
Name string `toml:"name"`
}
type M struct {
*A
}
var m M
err := toml.Unmarshal([]byte(`name = 'foo'`), &m)
require.NoError(t, err)
require.Equal(t, "foo", m.Name)
}
func TestUnmarshalDecodeErrors(t *testing.T) {
examples := []struct {
desc string
@@ -2917,36 +2831,6 @@ world'`,
}
}
func TestOmitEmpty(t *testing.T) {
type inner struct {
private string
Skip string `toml:"-"`
V string
}
type elem struct {
Foo string `toml:",omitempty"`
Bar string `toml:",omitempty"`
Inner inner `toml:",omitempty"`
}
type doc struct {
X []elem `toml:",inline"`
}
d := doc{X: []elem{elem{
Foo: "test",
Inner: inner{
V: "alue",
},
}}}
b, err := toml.Marshal(d)
require.NoError(t, err)
require.Equal(t, "X = [{Foo = 'test', Inner = {V = 'alue'}}]\n", string(b))
}
func TestUnmarshalTags(t *testing.T) {
type doc struct {
Dash string `toml:"-,"`
@@ -3288,16 +3172,3 @@ func TestUnmarshal_RecursiveTableArray(t *testing.T) {
})
}
}
func TestUnmarshalEmbedNonString(t *testing.T) {
type Foo []byte
type doc struct {
Foo
}
d := doc{}
err := toml.Unmarshal([]byte(`foo = 'bar'`), &d)
require.NoError(t, err)
require.Nil(t, d.Foo)
}