Compare commits

...

12 Commits

Author SHA1 Message Date
Thomas Pelletier b371733c67 Make all nodes contain Raw 2022-08-22 21:05:41 -04:00
Thomas Pelletier 64dcce07ea WIP 2022-08-22 23:04:44 +00:00
Thomas Pelletier 28f1efc7d3 Decode: don't break on non-struct embed field (#810) 2022-08-22 18:39:11 -04:00
Piotr Buliński 7d69e4a728 Add missing '+build' comment to fuzz_test.go (#809) 2022-08-22 14:05:37 -04:00
Thomas Pelletier e46d245c09 Decode: don't crash on embedded nil pointers (#808)
Also has the perks of reducing the overhead of FindByIndex:

```
name                                old time/op    new time/op    delta
UnmarshalDataset/config-32            17.0ms ± 1%    17.0ms ± 1%    ~     (p=1.000 n=5+5)
UnmarshalDataset/canada-32            71.6ms ± 1%    71.4ms ± 1%    ~     (p=1.000 n=5+5)
UnmarshalDataset/citm_catalog-32      24.2ms ± 3%    23.5ms ± 2%  -3.03%  (p=0.032 n=5+5)
UnmarshalDataset/twitter-32           9.37ms ± 1%    9.09ms ± 2%  -2.97%  (p=0.032 n=5+5)
UnmarshalDataset/code-32              75.4ms ± 2%    74.9ms ± 0%    ~     (p=0.222 n=5+5)
UnmarshalDataset/example-32            147µs ±10%     136µs ± 1%  -7.14%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/struct-32     512ns ± 2%     500ns ± 0%  -2.35%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/map-32        721ns ± 2%     702ns ± 1%  -2.68%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/struct-32     40.1µs ± 0%    39.6µs ± 0%  -1.30%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/map-32        62.3µs ± 1%    60.6µs ± 0%  -2.83%  (p=0.008 n=5+5)
Unmarshal/HugoFrontMatter-32          10.8µs ± 1%    10.5µs ± 1%  -2.86%  (p=0.008 n=5+5)

name                                old speed      new speed      delta
UnmarshalDataset/config-32          61.8MB/s ± 1%  61.8MB/s ± 1%    ~     (p=1.000 n=5+5)
UnmarshalDataset/canada-32          30.8MB/s ± 1%  30.8MB/s ± 1%    ~     (p=1.000 n=5+5)
UnmarshalDataset/citm_catalog-32    23.0MB/s ± 3%  23.8MB/s ± 2%  +3.09%  (p=0.032 n=5+5)
UnmarshalDataset/twitter-32         47.2MB/s ± 1%  48.6MB/s ± 2%  +3.09%  (p=0.032 n=5+5)
UnmarshalDataset/code-32            35.6MB/s ± 2%  35.9MB/s ± 0%    ~     (p=0.222 n=5+5)
UnmarshalDataset/example-32         55.3MB/s ±10%  59.4MB/s ± 1%  +7.36%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/struct-32  21.5MB/s ± 2%  22.0MB/s ± 0%  +2.41%  (p=0.008 n=5+5)
Unmarshal/SimpleDocument/map-32     15.2MB/s ± 2%  15.7MB/s ± 1%  +2.74%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/struct-32    131MB/s ± 0%   132MB/s ± 0%  +1.31%  (p=0.008 n=5+5)
Unmarshal/ReferenceFile/map-32      84.1MB/s ± 1%  86.6MB/s ± 0%  +2.91%  (p=0.008 n=5+5)
Unmarshal/HugoFrontMatter-32        50.6MB/s ± 1%  52.1MB/s ± 1%  +2.93%  (p=0.008 n=5+5)

name                                old alloc/op   new alloc/op   delta
UnmarshalDataset/config-32            5.86MB ± 0%    5.86MB ± 0%    ~     (p=0.579 n=5+5)
UnmarshalDataset/canada-32            83.0MB ± 0%    83.0MB ± 0%    ~     (p=0.651 n=5+5)
UnmarshalDataset/citm_catalog-32      34.7MB ± 0%    34.7MB ± 0%    ~     (p=0.548 n=5+5)
UnmarshalDataset/twitter-32           12.7MB ± 0%    12.7MB ± 0%    ~     (p=1.000 n=5+5)
UnmarshalDataset/code-32              22.2MB ± 0%    22.2MB ± 0%    ~     (p=0.841 n=5+5)
UnmarshalDataset/example-32            186kB ± 0%     186kB ± 0%    ~     (p=0.111 n=5+5)
Unmarshal/SimpleDocument/struct-32      805B ± 0%      805B ± 0%    ~     (all equal)
Unmarshal/SimpleDocument/map-32       1.13kB ± 0%    1.13kB ± 0%    ~     (all equal)
Unmarshal/ReferenceFile/struct-32     20.9kB ± 0%    20.9kB ± 0%    ~     (p=0.643 n=5+5)
Unmarshal/ReferenceFile/map-32        38.3kB ± 0%    38.3kB ± 0%    ~     (p=0.397 n=5+5)
Unmarshal/HugoFrontMatter-32          7.44kB ± 0%    7.44kB ± 0%    ~     (all equal)

name                                old allocs/op  new allocs/op  delta
UnmarshalDataset/config-32              227k ± 0%      227k ± 0%    ~     (p=1.000 n=5+5)
UnmarshalDataset/canada-32              782k ± 0%      782k ± 0%    ~     (all equal)
UnmarshalDataset/citm_catalog-32        192k ± 0%      192k ± 0%    ~     (p=0.968 n=4+5)
UnmarshalDataset/twitter-32            56.9k ± 0%     56.9k ± 0%    ~     (p=0.429 n=4+5)
UnmarshalDataset/code-32               1.05M ± 0%     1.05M ± 0%    ~     (p=0.556 n=4+5)
UnmarshalDataset/example-32            1.36k ± 0%     1.36k ± 0%    ~     (all equal)
Unmarshal/SimpleDocument/struct-32      9.00 ± 0%      9.00 ± 0%    ~     (all equal)
Unmarshal/SimpleDocument/map-32         13.0 ± 0%      13.0 ± 0%    ~     (all equal)
Unmarshal/ReferenceFile/struct-32        183 ± 0%       183 ± 0%    ~     (all equal)
Unmarshal/ReferenceFile/map-32           642 ± 0%       642 ± 0%    ~     (all equal)
Unmarshal/HugoFrontMatter-32             141 ± 0%       141 ± 0%    ~     (all equal)
```

Fixes #807
2022-08-20 21:24:03 -04:00
Thomas Pelletier 7baa23f493 Decode: error on array table mismatched type (#804)
Prevent the decoder from continuing if it encounters a type it cannot decode an
array table into.

Fixes #799
2022-08-15 16:38:07 -04:00
Thomas Pelletier 2d8433b69e Encode: don't inherit omitempty (#803)
Fixes #786.
2022-08-15 11:29:46 -04:00
Thomas Pelletier 67bc5422f3 Go 1.19 (#802) 2022-08-15 10:56:33 -04:00
Thomas Pelletier fb6d1d6c2b Marshal: define and fix newlines behavior when using omitempty (#798)
Ref #786
2022-07-24 15:40:20 -04:00
dependabot[bot] d017a6dc89 build(deps): bump github.com/stretchr/testify from 1.7.5 to 1.8.0 (#795) 2022-06-29 09:51:28 -04:00
dependabot[bot] d6d3196163 build(deps): bump github.com/stretchr/testify from 1.7.4 to 1.7.5 (#794) 2022-06-24 12:49:56 -04:00
dependabot[bot] 41718a6db3 build(deps): bump github.com/stretchr/testify from 1.7.2 to 1.7.4 (#793)
Bumps [github.com/stretchr/testify](https://github.com/stretchr/testify) from 1.7.2 to 1.7.4.
- [Release notes](https://github.com/stretchr/testify/releases)
- [Commits](https://github.com/stretchr/testify/compare/v1.7.2...v1.7.4)

---
updated-dependencies:
- dependency-name: github.com/stretchr/testify
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-06-21 08:32:13 -04:00
22 changed files with 561 additions and 241 deletions
+1 -1
View File
@@ -15,6 +15,6 @@ jobs:
- name: Setup go - name: Setup go
uses: actions/setup-go@master uses: actions/setup-go@master
with: with:
go-version: 1.18 go-version: 1.19
- name: Run tests with coverage - name: Run tests with coverage
run: ./ci.sh coverage -d "${GITHUB_BASE_REF-HEAD}" run: ./ci.sh coverage -d "${GITHUB_BASE_REF-HEAD}"
+1 -1
View File
@@ -22,7 +22,7 @@ jobs:
- name: Set up Go - name: Set up Go
uses: actions/setup-go@v2 uses: actions/setup-go@v2
with: with:
go-version: 1.18 go-version: 1.19
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v2 uses: docker/login-action@v2
with: with:
+1 -1
View File
@@ -12,7 +12,7 @@ jobs:
strategy: strategy:
matrix: matrix:
os: [ 'ubuntu-latest', 'windows-latest', 'macos-latest'] os: [ 'ubuntu-latest', 'windows-latest', 'macos-latest']
go: [ '1.17', '1.18' ] go: [ '1.18', '1.19' ]
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
name: ${{ matrix.go }}/${{ matrix.os }} name: ${{ matrix.go }}/${{ matrix.os }}
steps: steps:
+5 -5
View File
@@ -1,20 +1,20 @@
// Package jsontoml is a program that converts JSON to TOML. // Package jsontoml is a program that converts JSON to TOML.
// //
// Usage // # Usage
// //
// Reading from stdin: // Reading from stdin:
// //
// cat file.json | jsontoml > file.toml // cat file.json | jsontoml > file.toml
// //
// Reading from a file: // Reading from a file:
// //
// jsontoml file.json > file.toml // jsontoml file.json > file.toml
// //
// Installation // # Installation
// //
// Using Go: // Using Go:
// //
// go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest // go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest
package main package main
import ( import (
-1
View File
@@ -26,7 +26,6 @@ func TestConvert(t *testing.T) {
}`, }`,
expected: `[mytoml] expected: `[mytoml]
a = 42.0 a = 42.0
`, `,
}, },
{ {
+5 -5
View File
@@ -1,20 +1,20 @@
// Package tomljson is a program that converts TOML to JSON. // Package tomljson is a program that converts TOML to JSON.
// //
// Usage // # Usage
// //
// Reading from stdin: // Reading from stdin:
// //
// cat file.toml | tomljson > file.json // cat file.toml | tomljson > file.json
// //
// Reading from a file: // Reading from a file:
// //
// tomljson file.toml > file.json // tomljson file.toml > file.json
// //
// Installation // # Installation
// //
// Using Go: // Using Go:
// //
// go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest // go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest
package main package main
import ( import (
+5 -5
View File
@@ -1,20 +1,20 @@
// Package tomll is a linter program for TOML. // Package tomll is a linter program for TOML.
// //
// Usage // # Usage
// //
// Reading from stdin, writing to stdout: // Reading from stdin, writing to stdout:
// //
// cat file.toml | tomll // cat file.toml | tomll
// //
// Reading and updating a list of files in place: // Reading and updating a list of files in place:
// //
// tomll a.toml b.toml c.toml // tomll a.toml b.toml c.toml
// //
// Installation // # Installation
// //
// Using Go: // Using Go:
// //
// go install github.com/pelletier/go-toml/v2/cmd/tomll@latest // go install github.com/pelletier/go-toml/v2/cmd/tomll@latest
package main package main
import ( import (
-1
View File
@@ -23,7 +23,6 @@ mytoml.a = 42.0
`, `,
expected: `[mytoml] expected: `[mytoml]
a = 42.0 a = 42.0
`, `,
}, },
{ {
+1 -1
View File
@@ -3,7 +3,7 @@
// //
// Within the go-toml package, run `go generate`. Otherwise, use: // Within the go-toml package, run `go generate`. Otherwise, use:
// //
// go run github.com/pelletier/go-toml/cmd/tomltestgen -o toml_testgen_test.go // go run github.com/pelletier/go-toml/cmd/tomltestgen -o toml_testgen_test.go
package main package main
import ( import (
+1
View File
@@ -103,6 +103,7 @@ func (e *DecodeError) Key() Key {
// //
// The function copies all bytes used in DecodeError, so that document and // The function copies all bytes used in DecodeError, so that document and
// highlight can be freely deallocated. // highlight can be freely deallocated.
//
//nolint:funlen //nolint:funlen
func wrapDecodeError(document []byte, de *decodeError) *DecodeError { func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
offset := danger.SubsliceOffset(document, de.highlight) offset := danger.SubsliceOffset(document, de.highlight)
+2 -2
View File
@@ -1,5 +1,5 @@
//go:build go1.18 //go:build go1.18 || go1.19
// +build go1.18 // +build go1.18 go1.19
package toml_test package toml_test
+1 -1
View File
@@ -2,4 +2,4 @@ module github.com/pelletier/go-toml/v2
go 1.16 go 1.16
require github.com/stretchr/testify v1.7.2 require github.com/stretchr/testify v1.8.0
+7 -3
View File
@@ -1,11 +1,15 @@
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.7.2 h1:4jaiDzPyXQvSd7D0EjG45355tLlV3VOECpq10pLC+8s= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+4 -4
View File
@@ -11,10 +11,10 @@ import (
// //
// For example: // For example:
// //
// it := n.Children() // it := n.Children()
// for it.Next() { // for it.Next() {
// it.Node() // it.Node()
// } // }
type Iterator struct { type Iterator struct {
started bool started bool
node *Node node *Node
@@ -67,6 +67,7 @@ func TestDocMarshal(t *testing.T) {
} }
marshalTestToml := `title = 'TOML Marshal Testing' marshalTestToml := `title = 'TOML Marshal Testing'
[basic_lists] [basic_lists]
floats = [12.3, 45.6, 78.9] floats = [12.3, 45.6, 78.9]
bools = [true, false, true] bools = [true, false, true]
@@ -89,7 +90,6 @@ name = 'Second'
[subdoc.first] [subdoc.first]
name = 'First' name = 'First'
[basic] [basic]
uint = 5001 uint = 5001
bool = true bool = true
@@ -101,9 +101,9 @@ date = 1979-05-27T07:32:00Z
[[subdoclist]] [[subdoclist]]
name = 'List.First' name = 'List.First'
[[subdoclist]] [[subdoclist]]
name = 'List.Second' name = 'List.Second'
` `
result, err := toml.Marshal(docData) result, err := toml.Marshal(docData)
@@ -117,14 +117,15 @@ func TestBasicMarshalQuotedKey(t *testing.T) {
expected := `'Z.string-àéù' = 'Hello' expected := `'Z.string-àéù' = 'Hello'
'Yfloat-𝟘' = 3.5 'Yfloat-𝟘' = 3.5
['Xsubdoc-àéù'] ['Xsubdoc-àéù']
String2 = 'One' String2 = 'One'
[['W.sublist-𝟘']] [['W.sublist-𝟘']]
String2 = 'Two' String2 = 'Two'
[['W.sublist-𝟘']] [['W.sublist-𝟘']]
String2 = 'Three' String2 = 'Three'
` `
require.Equal(t, string(expected), string(result)) require.Equal(t, string(expected), string(result))
@@ -159,8 +160,8 @@ bool = false
int = 0 int = 0
string = '' string = ''
stringlist = [] stringlist = []
[map]
[map]
` `
require.Equal(t, string(expected), string(result)) require.Equal(t, string(expected), string(result))
@@ -151,6 +151,7 @@ type quotedKeyMarshalTestStruct struct {
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var quotedKeyMarshalTestData = quotedKeyMarshalTestStruct{ var quotedKeyMarshalTestData = quotedKeyMarshalTestStruct{
String: "Hello", String: "Hello",
@@ -160,6 +161,7 @@ var quotedKeyMarshalTestData = quotedKeyMarshalTestStruct{
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var quotedKeyMarshalTestToml = []byte(`"Yfloat-𝟘" = 3.5 var quotedKeyMarshalTestToml = []byte(`"Yfloat-𝟘" = 3.5
"Z.string-àéù" = "Hello" "Z.string-àéù" = "Hello"
@@ -272,6 +274,7 @@ var docData = testDoc{
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var mapTestDoc = testMapDoc{ var mapTestDoc = testMapDoc{
Title: "TOML Marshal Testing", Title: "TOML Marshal Testing",
@@ -559,10 +562,12 @@ func (c customMarshaler) MarshalTOML() ([]byte, error) {
var customMarshalerData = customMarshaler{FirstName: "Sally", LastName: "Fields"} var customMarshalerData = customMarshaler{FirstName: "Sally", LastName: "Fields"}
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var customMarshalerToml = []byte(`Sally Fields`) var customMarshalerToml = []byte(`Sally Fields`)
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var nestedCustomMarshalerData = customMarshalerParent{ var nestedCustomMarshalerData = customMarshalerParent{
Self: customMarshaler{FirstName: "Maiku", LastName: "Suteda"}, Self: customMarshaler{FirstName: "Maiku", LastName: "Suteda"},
@@ -570,6 +575,7 @@ var nestedCustomMarshalerData = customMarshalerParent{
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var nestedCustomMarshalerToml = []byte(`friends = ["Sally Fields"] var nestedCustomMarshalerToml = []byte(`friends = ["Sally Fields"]
me = "Maiku Suteda" me = "Maiku Suteda"
@@ -611,6 +617,7 @@ func TestUnmarshalTextMarshaler(t *testing.T) {
} }
// TODO: Remove nolint once type and methods are used by a test // TODO: Remove nolint once type and methods are used by a test
//
//nolint:unused //nolint:unused
type precedentMarshaler struct { type precedentMarshaler struct {
FirstName string FirstName string
@@ -629,6 +636,7 @@ func (m precedentMarshaler) MarshalTOML() ([]byte, error) {
} }
// TODO: Remove nolint once type and method are used by a test // TODO: Remove nolint once type and method are used by a test
//
//nolint:unused //nolint:unused
type customPointerMarshaler struct { type customPointerMarshaler struct {
FirstName string FirstName string
@@ -641,6 +649,7 @@ func (m *customPointerMarshaler) MarshalTOML() ([]byte, error) {
} }
// TODO: Remove nolint once type and method are used by a test // TODO: Remove nolint once type and method are used by a test
//
//nolint:unused //nolint:unused
type textPointerMarshaler struct { type textPointerMarshaler struct {
FirstName string FirstName string
@@ -653,6 +662,7 @@ func (m *textPointerMarshaler) MarshalText() ([]byte, error) {
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var commentTestToml = []byte(` var commentTestToml = []byte(`
# it's a comment on type # it's a comment on type
@@ -690,6 +700,7 @@ type mapsTestStruct struct {
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var mapsTestData = mapsTestStruct{ var mapsTestData = mapsTestStruct{
Simple: map[string]string{ Simple: map[string]string{
@@ -713,6 +724,7 @@ var mapsTestData = mapsTestStruct{
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var mapsTestToml = []byte(` var mapsTestToml = []byte(`
[Other] [Other]
@@ -735,6 +747,7 @@ var mapsTestToml = []byte(`
`) `)
// TODO: Remove nolint once type is used by a test // TODO: Remove nolint once type is used by a test
//
//nolint:deadcode,unused //nolint:deadcode,unused
type structArrayNoTag struct { type structArrayNoTag struct {
A struct { A struct {
@@ -744,6 +757,7 @@ type structArrayNoTag struct {
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var customTagTestToml = []byte(` var customTagTestToml = []byte(`
[postgres] [postgres]
@@ -758,6 +772,7 @@ var customTagTestToml = []byte(`
`) `)
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var customCommentTagTestToml = []byte(` var customCommentTagTestToml = []byte(`
# db connection # db connection
@@ -771,6 +786,7 @@ var customCommentTagTestToml = []byte(`
`) `)
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var customCommentedTagTestToml = []byte(` var customCommentedTagTestToml = []byte(`
[postgres] [postgres]
@@ -825,6 +841,7 @@ func TestUnmarshalTabInStringAndQuotedKey(t *testing.T) {
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var customMultilineTagTestToml = []byte(`int_slice = [ var customMultilineTagTestToml = []byte(`int_slice = [
1, 1,
@@ -834,6 +851,7 @@ var customMultilineTagTestToml = []byte(`int_slice = [
`) `)
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var testDocBasicToml = []byte(` var testDocBasicToml = []byte(`
[document] [document]
@@ -846,12 +864,14 @@ var testDocBasicToml = []byte(`
`) `)
// TODO: Remove nolint once type is used by a test // TODO: Remove nolint once type is used by a test
//
//nolint:deadcode //nolint:deadcode
type testDocCustomTag struct { type testDocCustomTag struct {
Doc testDocBasicsCustomTag `file:"document"` Doc testDocBasicsCustomTag `file:"document"`
} }
// TODO: Remove nolint once type is used by a test // TODO: Remove nolint once type is used by a test
//
//nolint:deadcode //nolint:deadcode
type testDocBasicsCustomTag struct { type testDocBasicsCustomTag struct {
Bool bool `file:"bool_val"` Bool bool `file:"bool_val"`
@@ -864,6 +884,7 @@ type testDocBasicsCustomTag struct {
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,varcheck //nolint:deadcode,varcheck
var testDocCustomTagData = testDocCustomTag{ var testDocCustomTagData = testDocCustomTag{
Doc: testDocBasicsCustomTag{ Doc: testDocBasicsCustomTag{
@@ -966,6 +987,7 @@ func TestUnmarshalInvalidPointerKind(t *testing.T) {
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused //nolint:deadcode,unused
type testDuration struct { type testDuration struct {
Nanosec time.Duration `toml:"nanosec"` Nanosec time.Duration `toml:"nanosec"`
@@ -980,6 +1002,7 @@ type testDuration struct {
} }
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var testDurationToml = []byte(` var testDurationToml = []byte(`
nanosec = "1ns" nanosec = "1ns"
@@ -994,6 +1017,7 @@ a_string = "15s"
`) `)
// TODO: Remove nolint once var is used by a test // TODO: Remove nolint once var is used by a test
//
//nolint:deadcode,unused,varcheck //nolint:deadcode,unused,varcheck
var testDurationToml2 = []byte(`a_string = "15s" var testDurationToml2 = []byte(`a_string = "15s"
hour = "1h0m0s" hour = "1h0m0s"
@@ -1007,6 +1031,7 @@ sec = "1s"
`) `)
// TODO: Remove nolint once type is used by a test // TODO: Remove nolint once type is used by a test
//
//nolint:deadcode,unused //nolint:deadcode,unused
type testBadDuration struct { type testBadDuration struct {
Val time.Duration `toml:"val"` Val time.Duration `toml:"val"`
+79 -12
View File
@@ -54,7 +54,7 @@ func NewEncoder(w io.Writer) *Encoder {
// This behavior can be controlled on an individual struct field basis with the // This behavior can be controlled on an individual struct field basis with the
// inline tag: // inline tag:
// //
// MyField `inline:"true"` // MyField `toml:",inline"`
func (enc *Encoder) SetTablesInline(inline bool) *Encoder { func (enc *Encoder) SetTablesInline(inline bool) *Encoder {
enc.tablesInline = inline enc.tablesInline = inline
return enc return enc
@@ -65,7 +65,7 @@ func (enc *Encoder) SetTablesInline(inline bool) *Encoder {
// //
// This behavior can be controlled on an individual struct field basis with the multiline tag: // This behavior can be controlled on an individual struct field basis with the multiline tag:
// //
// MyField `multiline:"true"` // MyField `multiline:"true"`
func (enc *Encoder) SetArraysMultiline(multiline bool) *Encoder { func (enc *Encoder) SetArraysMultiline(multiline bool) *Encoder {
enc.arraysMultiline = multiline enc.arraysMultiline = multiline
return enc return enc
@@ -89,7 +89,7 @@ func (enc *Encoder) SetIndentTables(indent bool) *Encoder {
// //
// If v cannot be represented to TOML it returns an error. // If v cannot be represented to TOML it returns an error.
// //
// Encoding rules // # Encoding rules
// //
// A top level slice containing only maps or structs is encoded as [[table // A top level slice containing only maps or structs is encoded as [[table
// array]]. // array]].
@@ -117,7 +117,20 @@ func (enc *Encoder) SetIndentTables(indent bool) *Encoder {
// When encoding structs, fields are encoded in order of definition, with their // When encoding structs, fields are encoded in order of definition, with their
// exact name. // exact name.
// //
// Struct tags // Tables and array tables are separated by empty lines. However, consecutive
// subtables definitions are not. For example:
//
// [top1]
//
// [top2]
// [top2.child1]
//
// [[array]]
//
// [[array]]
// [array.child2]
//
// # Struct tags
// //
// The encoding of each public struct field can be customized by the format // The encoding of each public struct field can be customized by the format
// string in the "toml" key of the struct field's tag. This follows // string in the "toml" key of the struct field's tag. This follows
@@ -333,13 +346,13 @@ func isNil(v reflect.Value) bool {
} }
} }
func shouldOmitEmpty(options valueOptions, v reflect.Value) bool {
return options.omitempty && isEmptyValue(v)
}
func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v reflect.Value) ([]byte, error) { func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v reflect.Value) ([]byte, error) {
var err error var err error
if (ctx.options.omitempty || options.omitempty) && isEmptyValue(v) {
return b, nil
}
if !ctx.inline { if !ctx.inline {
b = enc.encodeComment(ctx.indent, options.comment, b) b = enc.encodeComment(ctx.indent, options.comment, b)
} }
@@ -365,6 +378,8 @@ func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v r
func isEmptyValue(v reflect.Value) bool { func isEmptyValue(v reflect.Value) bool {
switch v.Kind() { switch v.Kind() {
case reflect.Struct:
return isEmptyStruct(v)
case reflect.Array, reflect.Map, reflect.Slice, reflect.String: case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
return v.Len() == 0 return v.Len() == 0
case reflect.Bool: case reflect.Bool:
@@ -381,6 +396,34 @@ func isEmptyValue(v reflect.Value) bool {
return false return false
} }
func isEmptyStruct(v reflect.Value) bool {
// TODO: merge with walkStruct and cache.
typ := v.Type()
for i := 0; i < typ.NumField(); i++ {
fieldType := typ.Field(i)
// only consider exported fields
if fieldType.PkgPath != "" {
continue
}
tag := fieldType.Tag.Get("toml")
// special field name to skip field
if tag == "-" {
continue
}
f := v.Field(i)
if !isEmptyValue(f) {
return false
}
}
return true
}
const literalQuote = '\'' const literalQuote = '\''
func (enc *Encoder) encodeString(b []byte, v string, options valueOptions) []byte { func (enc *Encoder) encodeString(b []byte, v string, options valueOptions) []byte {
@@ -410,7 +453,6 @@ func (enc *Encoder) encodeLiteralString(b []byte, v string) []byte {
return b return b
} }
//nolint:cyclop
func (enc *Encoder) encodeQuotedString(multiline bool, b []byte, v string) []byte { func (enc *Encoder) encodeQuotedString(multiline bool, b []byte, v string) []byte {
stringQuote := `"` stringQuote := `"`
@@ -757,7 +799,13 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
} }
ctx.skipTableHeader = false ctx.skipTableHeader = false
hasNonEmptyKV := false
for _, kv := range t.kvs { for _, kv := range t.kvs {
if shouldOmitEmpty(kv.Options, kv.Value) {
continue
}
hasNonEmptyKV = true
ctx.setKey(kv.Key) ctx.setKey(kv.Key)
b, err = enc.encodeKv(b, ctx, kv.Options, kv.Value) b, err = enc.encodeKv(b, ctx, kv.Options, kv.Value)
@@ -768,7 +816,20 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
b = append(b, '\n') b = append(b, '\n')
} }
first := true
for _, table := range t.tables { for _, table := range t.tables {
if shouldOmitEmpty(table.Options, table.Value) {
continue
}
if first {
first = false
if hasNonEmptyKV {
b = append(b, '\n')
}
} else {
b = append(b, "\n"...)
}
ctx.setKey(table.Key) ctx.setKey(table.Key)
ctx.options = table.Options ctx.options = table.Options
@@ -777,8 +838,6 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
if err != nil { if err != nil {
return nil, err return nil, err
} }
b = append(b, '\n')
} }
return b, nil return b, nil
@@ -791,6 +850,10 @@ func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte
first := true first := true
for _, kv := range t.kvs { for _, kv := range t.kvs {
if shouldOmitEmpty(kv.Options, kv.Value) {
continue
}
if first { if first {
first = false first = false
} else { } else {
@@ -806,7 +869,7 @@ func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte
} }
if len(t.tables) > 0 { if len(t.tables) > 0 {
panic("inline table cannot contain nested tables, online key-values") panic("inline table cannot contain nested tables, only key-values")
} }
b = append(b, "}"...) b = append(b, "}"...)
@@ -905,6 +968,10 @@ func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.
b = enc.encodeComment(ctx.indent, ctx.options.comment, b) b = enc.encodeComment(ctx.indent, ctx.options.comment, b)
for i := 0; i < v.Len(); i++ { for i := 0; i < v.Len(); i++ {
if i != 0 {
b = append(b, "\n"...)
}
b = append(b, scratch...) b = append(b, scratch...)
var err error var err error
+168 -93
View File
@@ -39,21 +39,21 @@ func TestMarshal(t *testing.T) {
v: map[string]string{ v: map[string]string{
"hello": "world", "hello": "world",
}, },
expected: "hello = 'world'", expected: "hello = 'world'\n",
}, },
{ {
desc: "map with new line in key", desc: "map with new line in key",
v: map[string]string{ v: map[string]string{
"hel\nlo": "world", "hel\nlo": "world",
}, },
expected: `"hel\nlo" = 'world'`, expected: "\"hel\\nlo\" = 'world'\n",
}, },
{ {
desc: `map with " in key`, desc: `map with " in key`,
v: map[string]string{ v: map[string]string{
`hel"lo`: "world", `hel"lo`: "world",
}, },
expected: `'hel"lo' = 'world'`, expected: "'hel\"lo' = 'world'\n",
}, },
{ {
desc: "map in map and string", desc: "map in map and string",
@@ -62,9 +62,9 @@ func TestMarshal(t *testing.T) {
"hello": "world", "hello": "world",
}, },
}, },
expected: ` expected: `[table]
[table] hello = 'world'
hello = 'world'`, `,
}, },
{ {
desc: "map in map in map and string", desc: "map in map in map and string",
@@ -75,10 +75,10 @@ hello = 'world'`,
}, },
}, },
}, },
expected: ` expected: `[this]
[this]
[this.is] [this.is]
a = 'test'`, a = 'test'
`,
}, },
{ {
desc: "map in map in map and string with values", desc: "map in map in map and string with values",
@@ -90,18 +90,20 @@ a = 'test'`,
"also": "that", "also": "that",
}, },
}, },
expected: ` expected: `[this]
[this]
also = 'that' also = 'that'
[this.is] [this.is]
a = 'test'`, a = 'test'
`,
}, },
{ {
desc: "simple string array", desc: "simple string array",
v: map[string][]string{ v: map[string][]string{
"array": {"one", "two", "three"}, "array": {"one", "two", "three"},
}, },
expected: `array = ['one', 'two', 'three']`, expected: `array = ['one', 'two', 'three']
`,
}, },
{ {
desc: "empty string array", desc: "empty string array",
@@ -118,14 +120,16 @@ a = 'test'`,
v: map[string][][]string{ v: map[string][][]string{
"array": {{"one", "two"}, {"three"}}, "array": {{"one", "two"}, {"three"}},
}, },
expected: `array = [['one', 'two'], ['three']]`, expected: `array = [['one', 'two'], ['three']]
`,
}, },
{ {
desc: "mixed strings and nested string arrays", desc: "mixed strings and nested string arrays",
v: map[string][]interface{}{ v: map[string][]interface{}{
"array": {"a string", []string{"one", "two"}, "last"}, "array": {"a string", []string{"one", "two"}, "last"},
}, },
expected: `array = ['a string', ['one', 'two'], 'last']`, expected: `array = ['a string', ['one', 'two'], 'last']
`,
}, },
{ {
desc: "array of maps", desc: "array of maps",
@@ -135,9 +139,9 @@ a = 'test'`,
{"map2.1": "v2.1"}, {"map2.1": "v2.1"},
}, },
}, },
expected: ` expected: `[[top]]
[[top]]
'map1.1' = 'v1.1' 'map1.1' = 'v1.1'
[[top]] [[top]]
'map2.1' = 'v2.1' 'map2.1' = 'v2.1'
`, `,
@@ -148,9 +152,9 @@ a = 'test'`,
"key1": "value1", "key1": "value1",
"key2": "value2", "key2": "value2",
}, },
expected: ` expected: `key1 = 'value1'
key1 = 'value1' key2 = 'value2'
key2 = 'value2'`, `,
}, },
{ {
desc: "simple struct", desc: "simple struct",
@@ -159,7 +163,8 @@ key2 = 'value2'`,
}{ }{
A: "foo", A: "foo",
}, },
expected: `A = 'foo'`, expected: `A = 'foo'
`,
}, },
{ {
desc: "one level of structs within structs", desc: "one level of structs within structs",
@@ -174,8 +179,7 @@ key2 = 'value2'`,
K2: "v2", K2: "v2",
}, },
}, },
expected: ` expected: `[A]
[A]
K1 = 'v1' K1 = 'v1'
K2 = 'v2' K2 = 'v2'
`, `,
@@ -190,10 +194,10 @@ K2 = 'v2'
}, },
}, },
}, },
expected: ` expected: `[root]
[root]
[[root.nested]] [[root.nested]]
name = 'Bob' name = 'Bob'
[[root.nested]] [[root.nested]]
name = 'Alice' name = 'Alice'
`, `,
@@ -203,49 +207,53 @@ name = 'Alice'
v: map[string]interface{}{ v: map[string]interface{}{
"a": "'\b\f\r\t\"\\", "a": "'\b\f\r\t\"\\",
}, },
expected: `a = "'\b\f\r\t\"\\"`, expected: `a = "'\b\f\r\t\"\\"
`,
}, },
{ {
desc: "string utf8 low", desc: "string utf8 low",
v: map[string]interface{}{ v: map[string]interface{}{
"a": "'Ę", "a": "'Ę",
}, },
expected: `a = "'Ę"`, expected: `a = "'Ę"
`,
}, },
{ {
desc: "string utf8 low 2", desc: "string utf8 low 2",
v: map[string]interface{}{ v: map[string]interface{}{
"a": "'\u10A85", "a": "'\u10A85",
}, },
expected: "a = \"'\u10A85\"", expected: "a = \"'\u10A85\"\n",
}, },
{ {
desc: "string utf8 low 2", desc: "string utf8 low 2",
v: map[string]interface{}{ v: map[string]interface{}{
"a": "'\u10A85", "a": "'\u10A85",
}, },
expected: "a = \"'\u10A85\"", expected: "a = \"'\u10A85\"\n",
}, },
{ {
desc: "emoji", desc: "emoji",
v: map[string]interface{}{ v: map[string]interface{}{
"a": "'😀", "a": "'😀",
}, },
expected: "a = \"'😀\"", expected: "a = \"'😀\"\n",
}, },
{ {
desc: "control char", desc: "control char",
v: map[string]interface{}{ v: map[string]interface{}{
"a": "'\u001A", "a": "'\u001A",
}, },
expected: `a = "'\u001A"`, expected: `a = "'\u001A"
`,
}, },
{ {
desc: "multi-line string", desc: "multi-line string",
v: map[string]interface{}{ v: map[string]interface{}{
"a": "hello\nworld", "a": "hello\nworld",
}, },
expected: `a = "hello\nworld"`, expected: `a = "hello\nworld"
`,
}, },
{ {
desc: "multi-line forced", desc: "multi-line forced",
@@ -256,7 +264,8 @@ name = 'Alice'
}, },
expected: `A = """ expected: `A = """
hello hello
world"""`, world"""
`,
}, },
{ {
desc: "inline field", desc: "inline field",
@@ -271,8 +280,8 @@ world"""`,
"isinline": "no", "isinline": "no",
}, },
}, },
expected: ` expected: `A = {isinline = 'yes'}
A = {isinline = 'yes'}
[B] [B]
isinline = 'no' isinline = 'no'
`, `,
@@ -286,8 +295,7 @@ isinline = 'no'
A: []int{1, 2, 3, 4}, A: []int{1, 2, 3, 4},
B: []int{1, 2, 3, 4}, B: []int{1, 2, 3, 4},
}, },
expected: ` expected: `A = [
A = [
1, 1,
2, 2,
3, 3,
@@ -303,8 +311,7 @@ B = [1, 2, 3, 4]
}{ }{
A: [][]int{{1, 2}, {3, 4}}, A: [][]int{{1, 2}, {3, 4}},
}, },
expected: ` expected: `A = [
A = [
[1, 2], [1, 2],
[3, 4] [3, 4]
] ]
@@ -329,7 +336,8 @@ A = [
}{ }{
A: []*int{nil}, A: []*int{nil},
}, },
expected: `A = [0]`, expected: `A = [0]
`,
}, },
{ {
desc: "nil pointer in slice uses zero value", desc: "nil pointer in slice uses zero value",
@@ -338,7 +346,8 @@ A = [
}{ }{
A: []*int{nil}, A: []*int{nil},
}, },
expected: `A = [0]`, expected: `A = [0]
`,
}, },
{ {
desc: "pointer in slice", desc: "pointer in slice",
@@ -347,7 +356,8 @@ A = [
}{ }{
A: []*int{&someInt}, A: []*int{&someInt},
}, },
expected: `A = [42]`, expected: `A = [42]
`,
}, },
{ {
desc: "inline table in inline table", desc: "inline table in inline table",
@@ -358,23 +368,25 @@ A = [
}, },
}, },
}, },
expected: `A = {A = {A = 'hello'}}`, expected: `A = {A = {A = 'hello'}}
`,
}, },
{ {
desc: "empty slice in map", desc: "empty slice in map",
v: map[string][]string{ v: map[string][]string{
"a": {}, "a": {},
}, },
expected: `a = []`, expected: `a = []
`,
}, },
{ {
desc: "map in slice", desc: "map in slice",
v: map[string][]map[string]string{ v: map[string][]map[string]string{
"a": {{"hello": "world"}}, "a": {{"hello": "world"}},
}, },
expected: ` expected: `[[a]]
[[a]] hello = 'world'
hello = 'world'`, `,
}, },
{ {
desc: "newline in map in slice", desc: "newline in map in slice",
@@ -382,7 +394,8 @@ hello = 'world'`,
"a\n": {{"hello": "world"}}, "a\n": {{"hello": "world"}},
}, },
expected: `[["a\n"]] expected: `[["a\n"]]
hello = 'world'`, hello = 'world'
`,
}, },
{ {
desc: "newline in map in slice", desc: "newline in map in slice",
@@ -398,7 +411,8 @@ hello = 'world'`,
}{ }{
A: []struct{}{}, A: []struct{}{},
}, },
expected: `A = []`, expected: `A = []
`,
}, },
{ {
desc: "nil field is ignored", desc: "nil field is ignored",
@@ -418,7 +432,8 @@ hello = 'world'`,
Public: "shown", Public: "shown",
private: "hidden", private: "hidden",
}, },
expected: `Public = 'shown'`, expected: `Public = 'shown'
`,
}, },
{ {
desc: "fields tagged - are ignored", desc: "fields tagged - are ignored",
@@ -442,7 +457,8 @@ hello = 'world'`,
v: map[string]interface{}{ v: map[string]interface{}{
"hello\nworld": 42, "hello\nworld": 42,
}, },
expected: `"hello\nworld" = 42`, expected: `"hello\nworld" = 42
`,
}, },
{ {
desc: "new line in parent of nested table key", desc: "new line in parent of nested table key",
@@ -452,7 +468,8 @@ hello = 'world'`,
}, },
}, },
expected: `["hello\nworld"] expected: `["hello\nworld"]
inner = 42`, inner = 42
`,
}, },
{ {
desc: "new line in nested table key", desc: "new line in nested table key",
@@ -465,7 +482,8 @@ inner = 42`,
}, },
expected: `[parent] expected: `[parent]
[parent."in\ner"] [parent."in\ner"]
foo = 42`, foo = 42
`,
}, },
{ {
desc: "invalid map key", desc: "invalid map key",
@@ -488,7 +506,8 @@ foo = 42`,
}{ }{
T: time.Time{}, T: time.Time{},
}, },
expected: `T = 0001-01-01T00:00:00Z`, expected: `T = 0001-01-01T00:00:00Z
`,
}, },
{ {
desc: "time nano", desc: "time nano",
@@ -497,7 +516,8 @@ foo = 42`,
}{ }{
T: time.Date(1979, time.May, 27, 0, 32, 0, 999999000, time.UTC), T: time.Date(1979, time.May, 27, 0, 32, 0, 999999000, time.UTC),
}, },
expected: `T = 1979-05-27T00:32:00.999999Z`, expected: `T = 1979-05-27T00:32:00.999999Z
`,
}, },
{ {
desc: "bool", desc: "bool",
@@ -508,9 +528,9 @@ foo = 42`,
A: false, A: false,
B: true, B: true,
}, },
expected: ` expected: `A = false
A = false B = true
B = true`, `,
}, },
{ {
desc: "numbers", desc: "numbers",
@@ -541,8 +561,7 @@ B = true`,
K: 42, K: 42,
L: 2.2, L: 2.2,
}, },
expected: ` expected: `A = 1.1
A = 1.1
B = 42 B = 42
C = 42 C = 42
D = 42 D = 42
@@ -553,7 +572,8 @@ H = 42
I = 42 I = 42
J = 42 J = 42
K = 42 K = 42
L = 2.2`, L = 2.2
`,
}, },
{ {
desc: "comments", desc: "comments",
@@ -566,8 +586,7 @@ L = 2.2`,
Three: []int{1, 2, 3}, Three: []int{1, 2, 3},
}, },
}, },
expected: ` expected: `# Before table
# Before table
[Table] [Table]
One = 1 One = 1
# Before kv # Before kv
@@ -589,7 +608,7 @@ Three = [1, 2, 3]
} }
require.NoError(t, err) require.NoError(t, err)
equalStringsIgnoreNewlines(t, e.expected, string(b)) assert.Equal(t, e.expected, string(b))
// make sure the output is always valid TOML // make sure the output is always valid TOML
defaultMap := map[string]interface{}{} defaultMap := map[string]interface{}{}
@@ -664,12 +683,6 @@ func testWithFlags(t *testing.T, flags int, setters flagsSetters, testfn func(t
} }
} }
func equalStringsIgnoreNewlines(t *testing.T, expected string, actual string) {
t.Helper()
cutset := "\n"
assert.Equal(t, strings.Trim(expected, cutset), strings.Trim(actual, cutset))
}
func TestMarshalFloats(t *testing.T) { func TestMarshalFloats(t *testing.T) {
v := map[string]float32{ v := map[string]float32{
"nan": float32(math.NaN()), "nan": float32(math.NaN()),
@@ -709,7 +722,8 @@ func TestMarshalIndentTables(t *testing.T) {
v: map[string]interface{}{ v: map[string]interface{}{
"foo": "bar", "foo": "bar",
}, },
expected: `foo = 'bar'`, expected: `foo = 'bar'
`,
}, },
{ {
desc: "one level table", desc: "one level table",
@@ -719,8 +733,7 @@ func TestMarshalIndentTables(t *testing.T) {
"two": "value2", "two": "value2",
}, },
}, },
expected: ` expected: `[foo]
[foo]
one = 'value1' one = 'value1'
two = 'value2' two = 'value2'
`, `,
@@ -736,10 +749,11 @@ func TestMarshalIndentTables(t *testing.T) {
}, },
}, },
}, },
expected: ` expected: `root = 'value0'
root = 'value0'
[level1] [level1]
one = 'value1' one = 'value1'
[level1.level2] [level1.level2]
two = 'value2' two = 'value2'
`, `,
@@ -754,7 +768,7 @@ root = 'value0'
enc.SetIndentTables(true) enc.SetIndentTables(true)
err := enc.Encode(e.v) err := enc.Encode(e.v)
require.NoError(t, err) require.NoError(t, err)
equalStringsIgnoreNewlines(t, e.expected, buf.String()) assert.Equal(t, e.expected, buf.String())
}) })
} }
} }
@@ -799,7 +813,7 @@ func TestMarshalTextMarshaler(t *testing.T) {
m := map[string]interface{}{"a": &customTextMarshaler{value: 2}} m := map[string]interface{}{"a": &customTextMarshaler{value: 2}}
r, err := toml.Marshal(m) r, err := toml.Marshal(m)
require.NoError(t, err) require.NoError(t, err)
equalStringsIgnoreNewlines(t, "a = '::2'", string(r)) assert.Equal(t, "a = '::2'\n", string(r))
} }
type brokenWriter struct{} type brokenWriter struct{}
@@ -822,10 +836,10 @@ func TestEncoderSetIndentSymbol(t *testing.T) {
enc.SetIndentSymbol(">>>") enc.SetIndentSymbol(">>>")
err := enc.Encode(map[string]map[string]string{"parent": {"hello": "world"}}) err := enc.Encode(map[string]map[string]string{"parent": {"hello": "world"}})
require.NoError(t, err) require.NoError(t, err)
expected := ` expected := `[parent]
[parent] >>>hello = 'world'
>>>hello = 'world'` `
equalStringsIgnoreNewlines(t, expected, w.String()) assert.Equal(t, expected, w.String())
} }
func TestEncoderOmitempty(t *testing.T) { func TestEncoderOmitempty(t *testing.T) {
@@ -856,9 +870,9 @@ func TestEncoderOmitempty(t *testing.T) {
b, err := toml.Marshal(d) b, err := toml.Marshal(d)
require.NoError(t, err) require.NoError(t, err)
expected := `[Struct]` expected := ``
equalStringsIgnoreNewlines(t, expected, string(b)) assert.Equal(t, expected, string(b))
} }
func TestEncoderTagFieldName(t *testing.T) { func TestEncoderTagFieldName(t *testing.T) {
@@ -873,13 +887,12 @@ func TestEncoderTagFieldName(t *testing.T) {
b, err := toml.Marshal(d) b, err := toml.Marshal(d)
require.NoError(t, err) require.NoError(t, err)
expected := ` expected := `hello = 'world'
hello = 'world'
'#' = '' '#' = ''
Bad = '' Bad = ''
` `
equalStringsIgnoreNewlines(t, expected, string(b)) assert.Equal(t, expected, string(b))
} }
func TestIssue436(t *testing.T) { func TestIssue436(t *testing.T) {
@@ -893,12 +906,11 @@ func TestIssue436(t *testing.T) {
err = toml.NewEncoder(&buf).Encode(v) err = toml.NewEncoder(&buf).Encode(v)
require.NoError(t, err) require.NoError(t, err)
expected := ` expected := `[[a]]
[[a]]
[a.b] [a.b]
c = 'd' c = 'd'
` `
equalStringsIgnoreNewlines(t, expected, buf.String()) assert.Equal(t, expected, buf.String())
} }
func TestIssue424(t *testing.T) { func TestIssue424(t *testing.T) {
@@ -980,7 +992,7 @@ func TestIssue678(t *testing.T) {
out, err := toml.Marshal(cfg) out, err := toml.Marshal(cfg)
require.NoError(t, err) require.NoError(t, err)
equalStringsIgnoreNewlines(t, "BigInt = '123'", string(out)) assert.Equal(t, "BigInt = '123'\n", string(out))
cfg2 := &Config{} cfg2 := &Config{}
err = toml.Unmarshal(out, cfg2) err = toml.Unmarshal(out, cfg2)
@@ -1020,6 +1032,69 @@ Name = ''
require.Equal(t, expected, string(out)) require.Equal(t, expected, string(out))
} }
func TestIssue786(t *testing.T) {
type Dependencies struct {
Dependencies []string `toml:"dependencies,multiline,omitempty"`
BuildDependencies []string `toml:"buildDependencies,multiline,omitempty"`
OptionalDependencies []string `toml:"optionalDependencies,multiline,omitempty"`
}
type Test struct {
Dependencies Dependencies `toml:"dependencies,omitempty"`
}
x := Test{}
b, err := toml.Marshal(x)
require.NoError(t, err)
require.Equal(t, "", string(b))
type General struct {
From string `toml:"from,omitempty" json:"from,omitempty" comment:"from in graphite-web format, the local TZ is used"`
Randomize bool `toml:"randomize" json:"randomize" comment:"randomize starting time with [0,step)"`
}
type Custom struct {
Name string `toml:"name" json:"name,omitempty" comment:"names for generator, braces are expanded like in shell"`
Type string `toml:"type,omitempty" json:"type,omitempty" comment:"type of generator"`
General
}
type Config struct {
General
Custom []Custom `toml:"custom,omitempty" json:"custom,omitempty" comment:"generators with custom parameters can be specified separately"`
}
buf := new(bytes.Buffer)
config := &Config{General: General{From: "-2d", Randomize: true}}
config.Custom = []Custom{{Name: "omit", General: General{Randomize: false}}}
config.Custom = append(config.Custom, Custom{Name: "present", General: General{From: "-2d", Randomize: true}})
encoder := toml.NewEncoder(buf)
encoder.Encode(config)
expected := `# from in graphite-web format, the local TZ is used
from = '-2d'
# randomize starting time with [0,step)
randomize = true
# generators with custom parameters can be specified separately
[[custom]]
# names for generator, braces are expanded like in shell
name = 'omit'
# randomize starting time with [0,step)
randomize = false
[[custom]]
# names for generator, braces are expanded like in shell
name = 'present'
# from in graphite-web format, the local TZ is used
from = '-2d'
# randomize starting time with [0,step)
randomize = true
`
require.Equal(t, expected, buf.String())
}
func TestMarshalNestedAnonymousStructs(t *testing.T) { func TestMarshalNestedAnonymousStructs(t *testing.T) {
type Embedded struct { type Embedded struct {
Value string `toml:"value" json:"value"` Value string `toml:"value" json:"value"`
@@ -1041,6 +1116,7 @@ func TestMarshalNestedAnonymousStructs(t *testing.T) {
} }
expected := `value = '' expected := `value = ''
[top] [top]
value = '' value = ''
@@ -1049,7 +1125,6 @@ value = ''
[anonymous] [anonymous]
value = '' value = ''
` `
result, err := toml.Marshal(doc) result, err := toml.Marshal(doc)
@@ -1073,9 +1148,9 @@ func TestMarshalNestedAnonymousStructs_DuplicateField(t *testing.T) {
doc.Value = "shadows" doc.Value = "shadows"
expected := `value = 'shadows' expected := `value = 'shadows'
[top] [top]
value = '' value = ''
` `
result, err := toml.Marshal(doc) result, err := toml.Marshal(doc)
@@ -1086,7 +1161,7 @@ value = ''
func TestLocalTime(t *testing.T) { func TestLocalTime(t *testing.T) {
v := map[string]toml.LocalTime{ v := map[string]toml.LocalTime{
"a": toml.LocalTime{ "a": {
Hour: 1, Hour: 1,
Minute: 2, Minute: 2,
Second: 3, Second: 3,
+84 -43
View File
@@ -28,6 +28,11 @@ func (p *parser) Raw(raw ast.Range) []byte {
return p.data[raw.Offset : raw.Offset+raw.Length] return p.data[raw.Offset : raw.Offset+raw.Length]
} }
func (p *parser) SetRaw(ref ast.Reference, from []byte, to []byte) {
b := danger.BytesRange(from, to)
p.builder.NodeAt(ref).Raw = p.Range(b)
}
func (p *parser) Reset(b []byte) { func (p *parser) Reset(b []byte) {
p.builder.Reset() p.builder.Reset()
p.ref = ast.InvalidReference p.ref = ast.InvalidReference
@@ -152,12 +157,14 @@ func (p *parser) parseArrayTable(b []byte) (ast.Reference, []byte, error) {
ref := p.builder.Push(ast.Node{ ref := p.builder.Push(ast.Node{
Kind: ast.ArrayTable, Kind: ast.ArrayTable,
}) })
start := b
b = b[2:] b = b[2:]
b = p.parseWhitespace(b) b = p.parseWhitespace(b)
k, b, err := p.parseKey(b) k, b, err := p.parseKey(b)
if err != nil { if err != nil {
p.SetRaw(ref, start, b)
return ref, nil, err return ref, nil, err
} }
@@ -166,11 +173,12 @@ func (p *parser) parseArrayTable(b []byte) (ast.Reference, []byte, error) {
b, err = expect(']', b) b, err = expect(']', b)
if err != nil { if err != nil {
p.SetRaw(ref, start, b)
return ref, nil, err return ref, nil, err
} }
b, err = expect(']', b) b, err = expect(']', b)
p.SetRaw(ref, start, b)
return ref, b, err return ref, b, err
} }
@@ -181,12 +189,14 @@ func (p *parser) parseStdTable(b []byte) (ast.Reference, []byte, error) {
ref := p.builder.Push(ast.Node{ ref := p.builder.Push(ast.Node{
Kind: ast.Table, Kind: ast.Table,
}) })
start := b
b = b[1:] b = b[1:]
b = p.parseWhitespace(b) b = p.parseWhitespace(b)
key, b, err := p.parseKey(b) key, b, err := p.parseKey(b)
if err != nil { if err != nil {
p.SetRaw(ref, start, b)
return ref, nil, err return ref, nil, err
} }
@@ -196,6 +206,7 @@ func (p *parser) parseStdTable(b []byte) (ast.Reference, []byte, error) {
b, err = expect(']', b) b, err = expect(']', b)
p.SetRaw(ref, start, b)
return ref, b, err return ref, b, err
} }
@@ -204,10 +215,12 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
ref := p.builder.Push(ast.Node{ ref := p.builder.Push(ast.Node{
Kind: ast.KeyValue, Kind: ast.KeyValue,
}) })
start := b
key, b, err := p.parseKey(b) key, b, err := p.parseKey(b)
if err != nil { if err != nil {
return ast.InvalidReference, nil, err p.SetRaw(ref, start, b)
return ast.InvalidReference, b, err
} }
// keyval-sep = ws %x3D ws ; = // keyval-sep = ws %x3D ws ; =
@@ -215,24 +228,28 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
b = p.parseWhitespace(b) b = p.parseWhitespace(b)
if len(b) == 0 { if len(b) == 0 {
return ast.InvalidReference, nil, newDecodeError(b, "expected = after a key, but the document ends there") p.SetRaw(ref, start, b)
return ast.InvalidReference, b, newDecodeError(b, "expected = after a key, but the document ends there")
} }
b, err = expect('=', b) b, err = expect('=', b)
if err != nil { if err != nil {
return ast.InvalidReference, nil, err p.SetRaw(ref, start, b)
return ast.InvalidReference, b, err
} }
b = p.parseWhitespace(b) b = p.parseWhitespace(b)
valRef, b, err := p.parseVal(b) valRef, b, err := p.parseVal(b)
if err != nil { if err != nil {
p.SetRaw(ref, start, b)
return ref, b, err return ref, b, err
} }
p.builder.Chain(valRef, key) p.builder.Chain(valRef, key)
p.builder.AttachChild(ref, valRef) p.builder.AttachChild(ref, valRef)
p.SetRaw(ref, start, b)
return ref, b, err return ref, b, err
} }
@@ -242,7 +259,7 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
ref := ast.InvalidReference ref := ast.InvalidReference
if len(b) == 0 { if len(b) == 0 {
return ref, nil, newDecodeError(b, "expected value, not eof") return ref, b, newDecodeError(b, "expected value, not eof")
} }
var err error var err error
@@ -287,23 +304,25 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
return ref, b, err return ref, b, err
case 't': case 't':
if !scanFollowsTrue(b) { if !scanFollowsTrue(b) {
return ref, nil, newDecodeError(atmost(b, 4), "expected 'true'") return ref, b, newDecodeError(atmost(b, 4), "expected 'true'")
} }
ref = p.builder.Push(ast.Node{ ref = p.builder.Push(ast.Node{
Kind: ast.Bool, Kind: ast.Bool,
Data: b[:4], Data: b[:4],
Raw: p.Range(b[:4]),
}) })
return ref, b[4:], nil return ref, b[4:], nil
case 'f': case 'f':
if !scanFollowsFalse(b) { if !scanFollowsFalse(b) {
return ref, nil, newDecodeError(atmost(b, 5), "expected 'false'") return ref, b, newDecodeError(atmost(b, 5), "expected 'false'")
} }
ref = p.builder.Push(ast.Node{ ref = p.builder.Push(ast.Node{
Kind: ast.Bool, Kind: ast.Bool,
Data: b[:5], Data: b[:5],
Raw: p.Range(b[:5]),
}) })
return ref, b[5:], nil return ref, b[5:], nil
@@ -327,7 +346,7 @@ func atmost(b []byte, n int) []byte {
func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) { func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
v, rest, err := scanLiteralString(b) v, rest, err := scanLiteralString(b)
if err != nil { if err != nil {
return nil, nil, nil, err return nil, nil, rest, err
} }
return v, v[1 : len(v)-1], rest, nil return v, v[1 : len(v)-1], rest, nil
@@ -342,6 +361,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
parent := p.builder.Push(ast.Node{ parent := p.builder.Push(ast.Node{
Kind: ast.InlineTable, Kind: ast.InlineTable,
}) })
start := b
first := true first := true
@@ -356,7 +376,8 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
b = p.parseWhitespace(b) b = p.parseWhitespace(b)
if len(b) == 0 { if len(b) == 0 {
return parent, nil, newDecodeError(previousB[:1], "inline table is incomplete") p.SetRaw(parent, start, b)
return parent, b, newDecodeError(previousB[:1], "inline table is incomplete")
} }
if b[0] == '}' { if b[0] == '}' {
@@ -366,7 +387,8 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
if !first { if !first {
b, err = expect(',', b) b, err = expect(',', b)
if err != nil { if err != nil {
return parent, nil, err p.SetRaw(parent, start, b)
return parent, b, err
} }
b = p.parseWhitespace(b) b = p.parseWhitespace(b)
} }
@@ -375,7 +397,8 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
kv, b, err = p.parseKeyval(b) kv, b, err = p.parseKeyval(b)
if err != nil { if err != nil {
return parent, nil, err p.SetRaw(parent, start, b)
return parent, b, err
} }
if first { if first {
@@ -390,6 +413,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
rest, err := expect('}', b) rest, err := expect('}', b)
p.SetRaw(parent, start, b)
return parent, rest, err return parent, rest, err
} }
@@ -403,6 +427,7 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
// array-sep = %x2C ; , Comma // array-sep = %x2C ; , Comma
// ws-comment-newline = *( wschar / [ comment ] newline ) // ws-comment-newline = *( wschar / [ comment ] newline )
arrayStart := b arrayStart := b
start := b
b = b[1:] b = b[1:]
parent := p.builder.Push(ast.Node{ parent := p.builder.Push(ast.Node{
@@ -417,11 +442,13 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
for len(b) > 0 { for len(b) > 0 {
b, err = p.parseOptionalWhitespaceCommentNewline(b) b, err = p.parseOptionalWhitespaceCommentNewline(b)
if err != nil { if err != nil {
return parent, nil, err p.SetRaw(parent, start, b)
return parent, b, err
} }
if len(b) == 0 { if len(b) == 0 {
return parent, nil, newDecodeError(arrayStart[:1], "array is incomplete") p.SetRaw(parent, start, b)
return parent, b, newDecodeError(arrayStart[:1], "array is incomplete")
} }
if b[0] == ']' { if b[0] == ']' {
@@ -430,16 +457,19 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
if b[0] == ',' { if b[0] == ',' {
if first { if first {
return parent, nil, newDecodeError(b[0:1], "array cannot start with comma") p.SetRaw(parent, start, b)
return parent, b, newDecodeError(b[0:1], "array cannot start with comma")
} }
b = b[1:] b = b[1:]
b, err = p.parseOptionalWhitespaceCommentNewline(b) b, err = p.parseOptionalWhitespaceCommentNewline(b)
if err != nil { if err != nil {
return parent, nil, err p.SetRaw(parent, start, b)
return parent, b, err
} }
} else if !first { } else if !first {
return parent, nil, newDecodeError(b[0:1], "array elements must be separated by commas") p.SetRaw(parent, start, b)
return parent, b, newDecodeError(b[0:1], "array elements must be separated by commas")
} }
// TOML allows trailing commas in arrays. // TOML allows trailing commas in arrays.
@@ -450,7 +480,8 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
var valueRef ast.Reference var valueRef ast.Reference
valueRef, b, err = p.parseVal(b) valueRef, b, err = p.parseVal(b)
if err != nil { if err != nil {
return parent, nil, err p.SetRaw(parent, start, b)
return parent, b, err
} }
if first { if first {
@@ -462,13 +493,16 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
b, err = p.parseOptionalWhitespaceCommentNewline(b) b, err = p.parseOptionalWhitespaceCommentNewline(b)
if err != nil { if err != nil {
return parent, nil, err p.SetRaw(parent, start, b)
return parent, b, err
} }
first = false first = false
} }
rest, err := expect(']', b) rest, err := expect(']', b)
p.SetRaw(parent, start, rest)
return parent, rest, err return parent, rest, err
} }
@@ -480,7 +514,7 @@ func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error)
if len(b) > 0 && b[0] == '#' { if len(b) > 0 && b[0] == '#' {
_, b, err = scanComment(b) _, b, err = scanComment(b)
if err != nil { if err != nil {
return nil, err return b, err
} }
} }
@@ -491,7 +525,7 @@ func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error)
if b[0] == '\n' || b[0] == '\r' { if b[0] == '\n' || b[0] == '\r' {
b, err = p.parseNewline(b) b, err = p.parseNewline(b)
if err != nil { if err != nil {
return nil, err return b, err
} }
} else { } else {
break break
@@ -504,7 +538,7 @@ func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error)
func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) { func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) {
token, rest, err := scanMultilineLiteralString(b) token, rest, err := scanMultilineLiteralString(b)
if err != nil { if err != nil {
return nil, nil, nil, err return nil, nil, rest, err
} }
i := 3 i := 3
@@ -533,7 +567,7 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
// mlb-escaped-nl = escape ws newline *( wschar / newline ) // mlb-escaped-nl = escape ws newline *( wschar / newline )
token, escaped, rest, err := scanMultilineBasicString(b) token, escaped, rest, err := scanMultilineBasicString(b)
if err != nil { if err != nil {
return nil, nil, nil, err return nil, nil, rest, err
} }
i := 3 i := 3
@@ -555,7 +589,7 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
if verr.Zero() { if verr.Zero() {
return token, str, rest, nil return token, str, rest, nil
} }
return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8") return nil, nil, rest, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
} }
var builder bytes.Buffer var builder bytes.Buffer
@@ -622,26 +656,26 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
case 'u': case 'u':
x, err := hexToRune(atmost(token[i+1:], 4), 4) x, err := hexToRune(atmost(token[i+1:], 4), 4)
if err != nil { if err != nil {
return nil, nil, nil, err return nil, nil, rest, err
} }
builder.WriteRune(x) builder.WriteRune(x)
i += 4 i += 4
case 'U': case 'U':
x, err := hexToRune(atmost(token[i+1:], 8), 8) x, err := hexToRune(atmost(token[i+1:], 8), 8)
if err != nil { if err != nil {
return nil, nil, nil, err return nil, nil, rest, err
} }
builder.WriteRune(x) builder.WriteRune(x)
i += 8 i += 8
default: default:
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c) return nil, nil, rest, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
} }
i++ i++
} else { } else {
size := utf8ValidNext(token[i:]) size := utf8ValidNext(token[i:])
if size == 0 { if size == 0 {
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c) return nil, nil, rest, newDecodeError(token[i:i+1], "invalid character %#U", c)
} }
builder.Write(token[i : i+size]) builder.Write(token[i : i+size])
i += size i += size
@@ -662,7 +696,7 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
// dot-sep = ws %x2E ws ; . Period // dot-sep = ws %x2E ws ; . Period
raw, key, b, err := p.parseSimpleKey(b) raw, key, b, err := p.parseSimpleKey(b)
if err != nil { if err != nil {
return ast.InvalidReference, nil, err return ast.InvalidReference, b, err
} }
ref := p.builder.Push(ast.Node{ ref := p.builder.Push(ast.Node{
@@ -678,7 +712,7 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
raw, key, b, err = p.parseSimpleKey(b) raw, key, b, err = p.parseSimpleKey(b)
if err != nil { if err != nil {
return ref, nil, err return ref, b, err
} }
p.builder.PushAndChain(ast.Node{ p.builder.PushAndChain(ast.Node{
@@ -696,7 +730,7 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) { func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
if len(b) == 0 { if len(b) == 0 {
return nil, nil, nil, newDecodeError(b, "expected key but found none") return nil, nil, b, newDecodeError(b, "expected key but found none")
} }
// simple-key = quoted-key / unquoted-key // simple-key = quoted-key / unquoted-key
@@ -711,7 +745,7 @@ func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
key, rest = scanUnquotedKey(b) key, rest = scanUnquotedKey(b)
return key, key, rest, nil return key, key, rest, nil
default: default:
return nil, nil, nil, newDecodeError(b[0:1], "invalid character at start of key: %c", b[0]) return nil, nil, b[1:], newDecodeError(b[0:1], "invalid character at start of key: %c", b[0])
} }
} }
@@ -733,7 +767,7 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
// escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX // escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX
token, escaped, rest, err := scanBasicString(b) token, escaped, rest, err := scanBasicString(b)
if err != nil { if err != nil {
return nil, nil, nil, err return nil, nil, rest, err
} }
startIdx := len(`"`) startIdx := len(`"`)
@@ -748,7 +782,7 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
if verr.Zero() { if verr.Zero() {
return token, str, rest, nil return token, str, rest, nil
} }
return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8") return nil, nil, rest, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
} }
i := startIdx i := startIdx
@@ -781,7 +815,7 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
case 'u': case 'u':
x, err := hexToRune(token[i+1:len(token)-1], 4) x, err := hexToRune(token[i+1:len(token)-1], 4)
if err != nil { if err != nil {
return nil, nil, nil, err return nil, nil, rest, err
} }
builder.WriteRune(x) builder.WriteRune(x)
@@ -789,19 +823,19 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
case 'U': case 'U':
x, err := hexToRune(token[i+1:len(token)-1], 8) x, err := hexToRune(token[i+1:len(token)-1], 8)
if err != nil { if err != nil {
return nil, nil, nil, err return nil, nil, rest, err
} }
builder.WriteRune(x) builder.WriteRune(x)
i += 8 i += 8
default: default:
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c) return nil, nil, rest, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
} }
i++ i++
} else { } else {
size := utf8ValidNext(token[i:]) size := utf8ValidNext(token[i:])
if size == 0 { if size == 0 {
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c) return nil, nil, rest, newDecodeError(token[i:i+1], "invalid character %#U", c)
} }
builder.Write(token[i : i+size]) builder.Write(token[i : i+size])
i += size i += size
@@ -854,21 +888,23 @@ func (p *parser) parseIntOrFloatOrDateTime(b []byte) (ast.Reference, []byte, err
switch b[0] { switch b[0] {
case 'i': case 'i':
if !scanFollowsInf(b) { if !scanFollowsInf(b) {
return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'inf'") return ast.InvalidReference, b, newDecodeError(atmost(b, 3), "expected 'inf'")
} }
return p.builder.Push(ast.Node{ return p.builder.Push(ast.Node{
Kind: ast.Float, Kind: ast.Float,
Data: b[:3], Data: b[:3],
Raw: p.Range(b[:3]),
}), b[3:], nil }), b[3:], nil
case 'n': case 'n':
if !scanFollowsNan(b) { if !scanFollowsNan(b) {
return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'nan'") return ast.InvalidReference, b, newDecodeError(atmost(b, 3), "expected 'nan'")
} }
return p.builder.Push(ast.Node{ return p.builder.Push(ast.Node{
Kind: ast.Float, Kind: ast.Float,
Data: b[:3], Data: b[:3],
Raw: p.Range(b[:3]),
}), b[3:], nil }), b[3:], nil
case '+', '-': case '+', '-':
return p.scanIntOrFloat(b) return p.scanIntOrFloat(b)
@@ -960,6 +996,7 @@ byteLoop:
return p.builder.Push(ast.Node{ return p.builder.Push(ast.Node{
Kind: kind, Kind: kind,
Data: b[:i], Data: b[:i],
Raw: p.Range(b[:i]),
}), b[i:], nil }), b[i:], nil
} }
@@ -993,6 +1030,7 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
return p.builder.Push(ast.Node{ return p.builder.Push(ast.Node{
Kind: ast.Integer, Kind: ast.Integer,
Data: b[:i], Data: b[:i],
Raw: p.Range(b[:i]),
}), b[i:], nil }), b[i:], nil
} }
@@ -1016,10 +1054,11 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
return p.builder.Push(ast.Node{ return p.builder.Push(ast.Node{
Kind: ast.Float, Kind: ast.Float,
Data: b[:i+3], Data: b[:i+3],
Raw: p.Range(b[:i+3]),
}), b[i+3:], nil }), b[i+3:], nil
} }
return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'i' while scanning for a number") return ast.InvalidReference, b[i:], newDecodeError(b[i:i+1], "unexpected character 'i' while scanning for a number")
} }
if c == 'n' { if c == 'n' {
@@ -1027,10 +1066,11 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
return p.builder.Push(ast.Node{ return p.builder.Push(ast.Node{
Kind: ast.Float, Kind: ast.Float,
Data: b[:i+3], Data: b[:i+3],
Raw: p.Range(b[:i+3]),
}), b[i+3:], nil }), b[i+3:], nil
} }
return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'n' while scanning for a number") return ast.InvalidReference, b[i:], newDecodeError(b[i:i+1], "unexpected character 'n' while scanning for a number")
} }
break break
@@ -1049,6 +1089,7 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
return p.builder.Push(ast.Node{ return p.builder.Push(ast.Node{
Kind: kind, Kind: kind,
Data: b[:i], Data: b[:i],
Raw: p.Range(b[:i]),
}), b[i:], nil }), b[i:], nil
} }
@@ -1075,11 +1116,11 @@ func isValidBinaryRune(r byte) bool {
func expect(x byte, b []byte) ([]byte, error) { func expect(x byte, b []byte) ([]byte, error) {
if len(b) == 0 { if len(b) == 0 {
return nil, newDecodeError(b, "expected character %c but the document ended here", x) return b, newDecodeError(b, "expected character %c but the document ended here", x)
} }
if b[0] != x { if b[0] != x {
return nil, newDecodeError(b[0:1], "expected character %c", x) return b, newDecodeError(b[0:1], "expected character %c", x)
} }
return b[1:], nil return b[1:], nil
+19 -19
View File
@@ -54,16 +54,16 @@ func scanLiteralString(b []byte) ([]byte, []byte, error) {
case '\'': case '\'':
return b[:i+1], b[i+1:], nil return b[:i+1], b[i+1:], nil
case '\n', '\r': case '\n', '\r':
return nil, nil, newDecodeError(b[i:i+1], "literal strings cannot have new lines") return nil, b[i+1:], newDecodeError(b[i:i+1], "literal strings cannot have new lines")
} }
size := utf8ValidNext(b[i:]) size := utf8ValidNext(b[i:])
if size == 0 { if size == 0 {
return nil, nil, newDecodeError(b[i:i+1], "invalid character") return nil, b[i+1:], newDecodeError(b[i:i+1], "invalid character")
} }
i += size i += size
} }
return nil, nil, newDecodeError(b[len(b):], "unterminated literal string") return nil, b[len(b):], newDecodeError(b[len(b):], "unterminated literal string")
} }
func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) { func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
@@ -98,39 +98,39 @@ func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
i++ i++
if i < len(b) && b[i] == '\'' { if i < len(b) && b[i] == '\'' {
return nil, nil, newDecodeError(b[i-3:i+1], "''' not allowed in multiline literal string") return nil, b[i:], newDecodeError(b[i-3:i+1], "''' not allowed in multiline literal string")
} }
return b[:i], b[i:], nil return b[:i], b[i:], nil
} }
case '\r': case '\r':
if len(b) < i+2 { if len(b) < i+2 {
return nil, nil, newDecodeError(b[len(b):], `need a \n after \r`) return nil, b[i:], newDecodeError(b[len(b):], `need a \n after \r`)
} }
if b[i+1] != '\n' { if b[i+1] != '\n' {
return nil, nil, newDecodeError(b[i:i+2], `need a \n after \r`) return nil, b[i+2:], newDecodeError(b[i:i+2], `need a \n after \r`)
} }
i += 2 // skip the \n i += 2 // skip the \n
continue continue
} }
size := utf8ValidNext(b[i:]) size := utf8ValidNext(b[i:])
if size == 0 { if size == 0 {
return nil, nil, newDecodeError(b[i:i+1], "invalid character") return nil, b[i:], newDecodeError(b[i:i+1], "invalid character")
} }
i += size i += size
} }
return nil, nil, newDecodeError(b[len(b):], `multiline literal string not terminated by '''`) return nil, b[len(b):], newDecodeError(b[len(b):], `multiline literal string not terminated by '''`)
} }
func scanWindowsNewline(b []byte) ([]byte, []byte, error) { func scanWindowsNewline(b []byte) ([]byte, []byte, error) {
const lenCRLF = 2 const lenCRLF = 2
if len(b) < lenCRLF { if len(b) < lenCRLF {
return nil, nil, newDecodeError(b, "windows new line expected") return nil, b, newDecodeError(b, "windows new line expected")
} }
if b[1] != '\n' { if b[1] != '\n' {
return nil, nil, newDecodeError(b, `windows new line should be \r\n`) return nil, b[2:], newDecodeError(b, `windows new line should be \r\n`)
} }
return b[:lenCRLF], b[lenCRLF:], nil return b[:lenCRLF], b[lenCRLF:], nil
@@ -169,7 +169,7 @@ func scanComment(b []byte) ([]byte, []byte, error) {
} }
size := utf8ValidNext(b[i:]) size := utf8ValidNext(b[i:])
if size == 0 { if size == 0 {
return nil, nil, newDecodeError(b[i:i+1], "invalid character in comment") return nil, b[i+1:], newDecodeError(b[i:i+1], "invalid character in comment")
} }
i += size i += size
@@ -192,17 +192,17 @@ func scanBasicString(b []byte) ([]byte, bool, []byte, error) {
case '"': case '"':
return b[:i+1], escaped, b[i+1:], nil return b[:i+1], escaped, b[i+1:], nil
case '\n', '\r': case '\n', '\r':
return nil, escaped, nil, newDecodeError(b[i:i+1], "basic strings cannot have new lines") return nil, escaped, b[i+1:], newDecodeError(b[i:i+1], "basic strings cannot have new lines")
case '\\': case '\\':
if len(b) < i+2 { if len(b) < i+2 {
return nil, escaped, nil, newDecodeError(b[i:i+1], "need a character after \\") return nil, escaped, b[i+1:], newDecodeError(b[i:i+1], "need a character after \\")
} }
escaped = true escaped = true
i++ // skip the next character i++ // skip the next character
} }
} }
return nil, escaped, nil, newDecodeError(b[len(b):], `basic string not terminated by "`) return nil, escaped, b[len(b):], newDecodeError(b[len(b):], `basic string not terminated by "`)
} }
func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) { func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
@@ -243,27 +243,27 @@ func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
i++ i++
if i < len(b) && b[i] == '"' { if i < len(b) && b[i] == '"' {
return nil, escaped, nil, newDecodeError(b[i-3:i+1], `""" not allowed in multiline basic string`) return nil, escaped, b[i+1:], newDecodeError(b[i-3:i+1], `""" not allowed in multiline basic string`)
} }
return b[:i], escaped, b[i:], nil return b[:i], escaped, b[i:], nil
} }
case '\\': case '\\':
if len(b) < i+2 { if len(b) < i+2 {
return nil, escaped, nil, newDecodeError(b[len(b):], "need a character after \\") return nil, escaped, b[len(b):], newDecodeError(b[len(b):], "need a character after \\")
} }
escaped = true escaped = true
i++ // skip the next character i++ // skip the next character
case '\r': case '\r':
if len(b) < i+2 { if len(b) < i+2 {
return nil, escaped, nil, newDecodeError(b[len(b):], `need a \n after \r`) return nil, escaped, b[len(b):], newDecodeError(b[len(b):], `need a \n after \r`)
} }
if b[i+1] != '\n' { if b[i+1] != '\n' {
return nil, escaped, nil, newDecodeError(b[i:i+2], `need a \n after \r`) return nil, escaped, b[i+2:], newDecodeError(b[i:i+2], `need a \n after \r`)
} }
i++ // skip the \n i++ // skip the \n
} }
} }
return nil, escaped, nil, newDecodeError(b[len(b):], `multiline basic string not terminated by """`) return nil, escaped, b[len(b):], newDecodeError(b[len(b):], `multiline basic string not terminated by """`)
} }
+41 -19
View File
@@ -79,22 +79,22 @@ func (d *Decoder) DisallowUnknownFields() *Decoder {
// strict mode and a field is missing, a `toml.StrictMissingError` is // strict mode and a field is missing, a `toml.StrictMissingError` is
// returned. In any other case, this function returns a standard Go error. // returned. In any other case, this function returns a standard Go error.
// //
// Type mapping // # Type mapping
// //
// List of supported TOML types and their associated accepted Go types: // List of supported TOML types and their associated accepted Go types:
// //
// String -> string // String -> string
// Integer -> uint*, int*, depending on size // Integer -> uint*, int*, depending on size
// Float -> float*, depending on size // Float -> float*, depending on size
// Boolean -> bool // Boolean -> bool
// Offset Date-Time -> time.Time // Offset Date-Time -> time.Time
// Local Date-time -> LocalDateTime, time.Time // Local Date-time -> LocalDateTime, time.Time
// Local Date -> LocalDate, time.Time // Local Date -> LocalDate, time.Time
// Local Time -> LocalTime, time.Time // Local Time -> LocalTime, time.Time
// Array -> slice and array, depending on elements types // Array -> slice and array, depending on elements types
// Table -> map and struct // Table -> map and struct
// Inline Table -> same as Table // Inline Table -> same as Table
// Array of Tables -> same as Array and Table // Array of Tables -> same as Array and Table
func (d *Decoder) Decode(v interface{}) error { func (d *Decoder) Decode(v interface{}) error {
b, err := ioutil.ReadAll(d.r) b, err := ioutil.ReadAll(d.r)
if err != nil { if err != nil {
@@ -123,7 +123,7 @@ type decoder struct {
stashedExpr bool stashedExpr bool
// Skip expressions until a table is found. This is set to true when a // Skip expressions until a table is found. This is set to true when a
// table could not be create (missing field in map), so all KV expressions // table could not be created (missing field in map), so all KV expressions
// need to be skipped. // need to be skipped.
skipUntilTable bool skipUntilTable bool
@@ -344,9 +344,9 @@ func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Val
elem := v.Index(idx) elem := v.Index(idx)
_, err := d.handleArrayTable(key, elem) _, err := d.handleArrayTable(key, elem)
return v, err return v, err
default:
return reflect.Value{}, fmt.Errorf("toml: cannot decode array table into a %s", v.Type())
} }
return d.handleArrayTable(key, v)
} }
// When parsing an array table expression, each part of the key needs to be // When parsing an array table expression, each part of the key needs to be
@@ -483,7 +483,7 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
d.errorContext.Struct = t d.errorContext.Struct = t
d.errorContext.Field = path d.errorContext.Field = path
f := v.FieldByIndex(path) f := fieldByIndex(v, path)
x, err := nextFn(key, f) x, err := nextFn(key, f)
if err != nil || d.skipUntilTable { if err != nil || d.skipUntilTable {
return reflect.Value{}, err return reflect.Value{}, err
@@ -1071,7 +1071,7 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
d.errorContext.Struct = t d.errorContext.Struct = t
d.errorContext.Field = path d.errorContext.Field = path
f := v.FieldByIndex(path) f := fieldByIndex(v, path)
x, err := d.handleKeyValueInner(key, value, f) x, err := d.handleKeyValueInner(key, value, f)
if err != nil { if err != nil {
return reflect.Value{}, err return reflect.Value{}, err
@@ -1135,6 +1135,21 @@ func initAndDereferencePointer(v reflect.Value) reflect.Value {
return elem return elem
} }
// Same as reflect.Value.FieldByIndex, but creates pointers if needed.
func fieldByIndex(v reflect.Value, path []int) reflect.Value {
for i, x := range path {
v = v.Field(x)
if i < len(path)-1 && v.Kind() == reflect.Pointer {
if v.IsNil() {
v.Set(reflect.New(v.Type().Elem()))
}
v = v.Elem()
}
}
return v
}
type fieldPathsMap = map[string][]int type fieldPathsMap = map[string][]int
var globalFieldPathsCache atomic.Value // map[danger.TypeID]fieldPathsMap var globalFieldPathsCache atomic.Value // map[danger.TypeID]fieldPathsMap
@@ -1192,7 +1207,14 @@ func forEachField(t reflect.Type, path []int, do func(name string, path []int))
} }
if f.Anonymous && name == "" { if f.Anonymous && name == "" {
forEachField(f.Type, fieldPath, do) t2 := f.Type
if t2.Kind() == reflect.Pointer {
t2 = t2.Elem()
}
if t2.Kind() == reflect.Struct {
forEachField(t2, fieldPath, do)
}
continue continue
} }
+106 -20
View File
@@ -1735,6 +1735,28 @@ B = "data"`,
} }
}, },
}, },
{
desc: "kv that points to a slice",
input: "a.b.c = 'foo'",
gen: func() test {
doc := map[string][]string{}
return test{
target: &doc,
err: true,
}
},
},
{
desc: "kv that points to a pointer to a slice",
input: "a.b.c = 'foo'",
gen: func() test {
doc := map[string]*[]string{}
return test{
target: &doc,
err: true,
}
},
},
} }
for _, e := range examples { for _, e := range examples {
@@ -1876,8 +1898,7 @@ key2 = "missing2"
key3 = "missing3" key3 = "missing3"
key4 = "value4" key4 = "value4"
`, `,
expected: ` expected: `2| key1 = "value1"
2| key1 = "value1"
3| key2 = "missing2" 3| key2 = "missing2"
| ~~~~ missing field | ~~~~ missing field
4| key3 = "missing3" 4| key3 = "missing3"
@@ -1887,8 +1908,7 @@ key4 = "value4"
3| key2 = "missing2" 3| key2 = "missing2"
4| key3 = "missing3" 4| key3 = "missing3"
| ~~~~ missing field | ~~~~ missing field
5| key4 = "value4" 5| key4 = "value4"`,
`,
target: &struct { target: &struct {
Key1 string Key1 string
Key4 string Key4 string
@@ -1897,10 +1917,8 @@ key4 = "value4"
{ {
desc: "multi-part key", desc: "multi-part key",
input: `a.short.key="foo"`, input: `a.short.key="foo"`,
expected: ` expected: `1| a.short.key="foo"
1| a.short.key="foo" | ~~~~~~~~~~~ missing field`,
| ~~~~~~~~~~~ missing field
`,
}, },
{ {
desc: "missing table", desc: "missing table",
@@ -1908,24 +1926,19 @@ key4 = "value4"
[foo] [foo]
bar = 42 bar = 42
`, `,
expected: ` expected: `2| [foo]
2| [foo]
| ~~~ missing table | ~~~ missing table
3| bar = 42 3| bar = 42`,
`,
}, },
{ {
desc: "missing array table", desc: "missing array table",
input: ` input: `
[[foo]] [[foo]]
bar = 42 bar = 42`,
`, expected: `2| [[foo]]
expected: `
2| [[foo]]
| ~~~ missing table | ~~~ missing table
3| bar = 42 3| bar = 42`,
`,
}, },
} }
@@ -1944,7 +1957,7 @@ bar = 42
var tsm *toml.StrictMissingError var tsm *toml.StrictMissingError
if errors.As(err, &tsm) { if errors.As(err, &tsm) {
equalStringsIgnoreNewlines(t, e.expected, tsm.String()) assert.Equal(t, e.expected, tsm.String())
} else { } else {
t.Fatalf("err should have been a *toml.StrictMissingError, but got %s (%T)", err, err) t.Fatalf("err should have been a *toml.StrictMissingError, but got %s (%T)", err, err)
} }
@@ -2417,12 +2430,42 @@ func TestIssue774(t *testing.T) {
expected := `# Array of Secure Copy Configurations expected := `# Array of Secure Copy Configurations
[[scp]] [[scp]]
Host = 'main.domain.com' Host = 'main.domain.com'
` `
require.Equal(t, expected, string(b)) require.Equal(t, expected, string(b))
} }
func TestIssue799(t *testing.T) {
const testTOML = `
# notice the double brackets
[[test]]
answer = 42
`
var s struct {
// should be []map[string]int
Test map[string]int `toml:"test"`
}
err := toml.Unmarshal([]byte(testTOML), &s)
require.Error(t, err)
}
func TestIssue807(t *testing.T) {
type A struct {
Name string `toml:"name"`
}
type M struct {
*A
}
var m M
err := toml.Unmarshal([]byte(`name = 'foo'`), &m)
require.NoError(t, err)
require.Equal(t, "foo", m.Name)
}
func TestUnmarshalDecodeErrors(t *testing.T) { func TestUnmarshalDecodeErrors(t *testing.T) {
examples := []struct { examples := []struct {
desc string desc string
@@ -2874,6 +2917,36 @@ world'`,
} }
} }
func TestOmitEmpty(t *testing.T) {
type inner struct {
private string
Skip string `toml:"-"`
V string
}
type elem struct {
Foo string `toml:",omitempty"`
Bar string `toml:",omitempty"`
Inner inner `toml:",omitempty"`
}
type doc struct {
X []elem `toml:",inline"`
}
d := doc{X: []elem{elem{
Foo: "test",
Inner: inner{
V: "alue",
},
}}}
b, err := toml.Marshal(d)
require.NoError(t, err)
require.Equal(t, "X = [{Foo = 'test', Inner = {V = 'alue'}}]\n", string(b))
}
func TestUnmarshalTags(t *testing.T) { func TestUnmarshalTags(t *testing.T) {
type doc struct { type doc struct {
Dash string `toml:"-,"` Dash string `toml:"-,"`
@@ -3215,3 +3288,16 @@ func TestUnmarshal_RecursiveTableArray(t *testing.T) {
}) })
} }
} }
func TestUnmarshalEmbedNonString(t *testing.T) {
type Foo []byte
type doc struct {
Foo
}
d := doc{}
err := toml.Unmarshal([]byte(`foo = 'bar'`), &d)
require.NoError(t, err)
require.Nil(t, d.Foo)
}