Compare commits

..

13 Commits

Author SHA1 Message Date
Thomas Pelletier 618f0181ac AST Tweaks (#551)
* Use pointers instead of copying around ast.Node

Node is a 56B struct that is constantly in the hot path. Passing nodes
around by copy had a cost that started to add up. This change replaces
them by pointers. Using unsafe pointer arithmetic and converting
sibling/child indexes to relative offsets, it removes the need to carry
around a pointer to the root of the tree. This saves 8B per Node. This
space will be used to store an extra []byte slice to provide contextual
error handling on all nodes, including the ones whose data is different
than the raw input (for example: strings with escaped characters), while
staying under the size of a cache line.

* Remove conditional

* Add Raw to track range in data for parsed values

* Simplify reference tracking
2021-06-03 21:48:51 -04:00
Thomas Pelletier f3bb20ea79 Benchmark marshal (#550) 2021-06-02 09:29:19 -04:00
Thomas Pelletier b0d6c62255 Don't use bytes.Buffer when not necessary (#549)
When parsing strings, they can be referenced directly from the document
when they don't contain escaped characters. This avoids paying to cost
of allocating (and sometimes growing) the bytes buffer unecessarily.
2021-06-01 09:51:59 -04:00
Thomas Pelletier b202375414 Add benchmarks results to readme (#548) 2021-06-01 09:10:17 -04:00
Thomas Pelletier 250e073408 Stack-based unmarshaler (#546)
* Benchmark script

* Rewrite unmarshaler using the stack

Instead of tracking the build chain using `target`s, use the stack
instead.

Working and most benchmarks look good, but regression on structs unmarshalling.

~60% slower on ReferenceFile/struct.

* Shortcut to check if last node of iterator

* Remove unecessary pointer allocation

* Skip over unused keys without marking them as seen

* Add some tests

* Fix mktemp on macos
2021-05-31 12:14:13 -04:00
Thomas Pelletier 11f022ab09 Fix parser skipping over the whole file (#547)
* Add test for ReferenceFile/struct
* Stop skipping after table
* Add .gitattributes to force LF encoding
* Fix the reference file
2021-05-30 18:53:07 -04:00
Thomas Pelletier 840df4a229 seen tracker: use array for storage (#545)
name                              old time/op    new time/op    delta
UnmarshalDataset/config-32          81.2ms ± 3%    77.8ms ± 3%   -4.25%  (p=0.000 n=20+19)
UnmarshalDataset/canada-32           104ms ± 5%     105ms ± 4%     ~     (p=0.102 n=20+20)
UnmarshalDataset/citm_catalog-32    57.5ms ± 5%    59.0ms ± 5%   +2.54%  (p=0.033 n=20+20)
UnmarshalDataset/twitter-32         25.7ms ± 7%    28.1ms ± 5%   +9.33%  (p=0.000 n=20+20)
UnmarshalDataset/code-32             305ms ± 6%     292ms ± 5%   -4.29%  (p=0.000 n=20+19)
UnmarshalDataset/example-32          519µs ± 6%     522µs ± 5%     ~     (p=0.659 n=20+20)
UnmarshalSimple/struct-32           1.44µs ± 1%    1.17µs ± 6%  -18.78%  (p=0.000 n=14+20)
UnmarshalSimple/map-32              2.30µs ± 4%    1.99µs ± 4%  -13.65%  (p=0.000 n=20+19)
ReferenceFile/struct-32             44.1µs ± 4%    38.1µs ± 5%  -13.61%  (p=0.000 n=18+20)
ReferenceFile/map-32                 197µs ± 7%     189µs ± 5%   -3.91%  (p=0.000 n=20+20)
HugoFrontMatter-32                  45.9µs ± 6%    39.3µs ± 6%  -14.46%  (p=0.000 n=19+20)

name                              old speed      new speed      delta
UnmarshalDataset/config-32        12.9MB/s ± 3%  13.5MB/s ± 3%   +4.42%  (p=0.000 n=20+19)
UnmarshalDataset/canada-32        21.1MB/s ± 5%  20.9MB/s ± 4%     ~     (p=0.101 n=20+20)
UnmarshalDataset/citm_catalog-32  9.72MB/s ± 6%  9.47MB/s ± 5%   -2.53%  (p=0.031 n=20+20)
UnmarshalDataset/twitter-32       17.2MB/s ± 7%  15.8MB/s ± 5%   -8.57%  (p=0.000 n=20+20)
UnmarshalDataset/code-32          8.81MB/s ± 7%  9.20MB/s ± 5%   +4.47%  (p=0.000 n=20+19)
UnmarshalDataset/example-32       15.6MB/s ± 6%  15.5MB/s ± 5%     ~     (p=0.644 n=20+20)
UnmarshalSimple/struct-32         7.61MB/s ± 1%  9.39MB/s ± 7%  +23.33%  (p=0.000 n=15+20)
UnmarshalSimple/map-32            4.78MB/s ± 4%  5.54MB/s ± 5%  +15.85%  (p=0.000 n=20+19)
ReferenceFile/struct-32            119MB/s ± 4%   138MB/s ± 5%  +15.79%  (p=0.000 n=18+20)
ReferenceFile/map-32              26.6MB/s ± 7%  27.7MB/s ± 5%   +4.06%  (p=0.000 n=20+20)
HugoFrontMatter-32                11.9MB/s ± 6%  13.9MB/s ± 6%  +16.91%  (p=0.000 n=19+20)

name                              old alloc/op   new alloc/op   delta
UnmarshalDataset/config-32          16.9MB ± 0%    13.9MB ± 0%  -17.48%  (p=0.000 n=18+18)
UnmarshalDataset/canada-32          74.3MB ± 0%    74.3MB ± 0%   -0.00%  (p=0.000 n=20+20)
UnmarshalDataset/citm_catalog-32    37.3MB ± 0%    37.3MB ± 0%   +0.11%  (p=0.000 n=20+20)
UnmarshalDataset/twitter-32         15.6MB ± 0%    15.6MB ± 0%     ~     (p=0.211 n=19+20)
UnmarshalDataset/code-32            59.5MB ± 0%    52.4MB ± 0%  -11.96%  (p=0.000 n=19+20)
UnmarshalDataset/example-32          238kB ± 0%     239kB ± 0%   +0.02%  (p=0.000 n=18+20)
UnmarshalSimple/struct-32             981B ± 0%      709B ± 0%  -27.73%  (p=0.000 n=20+20)
UnmarshalSimple/map-32              1.45kB ± 0%    1.17kB ± 0%  -18.82%  (p=0.000 n=20+20)
ReferenceFile/struct-32             11.8kB ± 0%     9.7kB ± 0%  -17.64%  (p=0.000 n=20+20)
ReferenceFile/map-32                51.5kB ± 0%    52.2kB ± 0%   +1.30%  (p=0.000 n=20+17)
HugoFrontMatter-32                  12.1kB ± 0%    11.1kB ± 0%   -7.97%  (p=0.000 n=20+19)

name                              old allocs/op  new allocs/op  delta
UnmarshalDataset/config-32            645k ± 0%      557k ± 0%  -13.76%  (p=0.000 n=20+16)
UnmarshalDataset/canada-32            896k ± 0%      896k ± 0%   -0.00%  (p=0.000 n=20+20)
UnmarshalDataset/citm_catalog-32      380k ± 0%      377k ± 0%   -0.75%  (p=0.000 n=19+20)
UnmarshalDataset/twitter-32           158k ± 0%      158k ± 0%   -0.01%  (p=0.000 n=18+18)
UnmarshalDataset/code-32             2.92M ± 0%     2.57M ± 0%  -11.87%  (p=0.000 n=20+20)
UnmarshalDataset/example-32          3.66k ± 0%     3.64k ± 0%   -0.63%  (p=0.000 n=20+20)
UnmarshalSimple/struct-32             13.0 ± 0%      10.0 ± 0%  -23.08%  (p=0.000 n=20+20)
UnmarshalSimple/map-32                22.0 ± 0%      19.0 ± 0%  -13.64%  (p=0.000 n=20+20)
ReferenceFile/struct-32                253 ± 0%       155 ± 0%  -38.74%  (p=0.000 n=20+20)
ReferenceFile/map-32                 1.67k ± 0%     1.44k ± 0%  -14.23%  (p=0.000 n=20+20)
HugoFrontMatter-32                     357 ± 0%       313 ± 0%  -12.32%  (p=0.000 n=20+20)
2021-05-26 18:47:00 -04:00
Thomas Pelletier c2d1fd86e5 Fix timezone detection when time has fractional component (#544) 2021-05-21 09:37:43 -04:00
Thomas Pelletier 238a6fef7d Add links to proper feedback channels 2021-05-15 08:55:07 -04:00
Thomas Pelletier 67852cf007 Clarify default struct tag being unsupported (#543)
Follow up to https://github.com/pelletier/go-toml/issues/542
2021-05-15 08:49:15 -04:00
Thomas Pelletier d276c42adc Run coverage test on branches only 2021-05-10 20:22:12 -04:00
Thomas Pelletier 95c701b253 Increase test coverage (#538)
Also fix array in map bug.
2021-05-10 20:17:05 -04:00
Thomas Pelletier 3db329a512 ci: basic github action for coverage (#537) 2021-05-09 17:37:03 -04:00
37 changed files with 3645 additions and 1779 deletions
+3
View File
@@ -0,0 +1,3 @@
* text=auto
benchmark/benchmark.toml text eol=lf
+16 -2
View File
@@ -1,5 +1,19 @@
**Issue:** add link to pelletier/go-toml issue here <!--
Thank you for your pull request!
Please read the Code changes section of the CONTRIBUTING.md file,
and make sure you have followed the instructions.
https://github.com/pelletier/go-toml/blob/v2/CONTRIBUTING.md#code-changes
-->
Explanation of what this pull request does. Explanation of what this pull request does.
More detailed description of the decisions being made and the reasons why (if the patch is non-trivial). More detailed description of the decisions being made and the reasons why (if
the patch is non-trivial).
---
Paste `benchstat` results here
+20
View File
@@ -0,0 +1,20 @@
name: coverage
on:
pull_request:
branches:
- v2
jobs:
report:
runs-on: "ubuntu-latest"
name: report
steps:
- uses: actions/checkout@master
with:
fetch-depth: 0
- name: Setup go
uses: actions/setup-go@master
with:
go-version: 1.16
- name: Run tests with coverage
run: ./ci.sh coverage -d "${GITHUB_BASE_REF-HEAD}"
+5 -5
View File
@@ -24,7 +24,7 @@ enable = [
# "exhaustivestruct", # "exhaustivestruct",
"exportloopref", "exportloopref",
"forbidigo", "forbidigo",
"forcetypeassert", # "forcetypeassert",
"funlen", "funlen",
"gci", "gci",
# "gochecknoglobals", # "gochecknoglobals",
@@ -35,7 +35,7 @@ enable = [
"gocyclo", "gocyclo",
"godot", "godot",
"godox", "godox",
"goerr113", # "goerr113",
"gofmt", "gofmt",
"gofumpt", "gofumpt",
"goheader", "goheader",
@@ -57,10 +57,10 @@ enable = [
"nakedret", "nakedret",
"nestif", "nestif",
"nilerr", "nilerr",
"nlreturn", # "nlreturn",
"noctx", "noctx",
"nolintlint", "nolintlint",
"paralleltest", #"paralleltest",
"prealloc", "prealloc",
"predeclared", "predeclared",
"revive", "revive",
@@ -80,5 +80,5 @@ enable = [
"wastedassign", "wastedassign",
"whitespace", "whitespace",
# "wrapcheck", # "wrapcheck",
"wsl" # "wsl"
] ]
+113 -63
View File
@@ -1,74 +1,74 @@
## Contributing # Contributing
Thank you for your interest in go-toml! We appreciate you considering Thank you for your interest in go-toml! We appreciate you considering
contributing to go-toml! contributing to go-toml!
The main goal is the project is to provide an easy-to-use TOML The main goal is the project is to provide an easy-to-use and efficient TOML
implementation for Go that gets the job done and gets out of your way implementation for Go that gets the job done and gets out of your way dealing
dealing with TOML is probably not the central piece of your project. with TOML is probably not the central piece of your project.
As the single maintainer of go-toml, time is scarce. All help, big or As the single maintainer of go-toml, time is scarce. All help, big or small, is
small, is more than welcomed! more than welcomed!
### Ask questions ## Ask questions
Any question you may have, somebody else might have it too. Always feel Any question you may have, somebody else might have it too. Always feel free to
free to ask them on the [issues tracker][issues-tracker]. We will try to ask them on the [discussion board][discussions]. We will try to answer them as
answer them as clearly and quickly as possible, time permitting. clearly and quickly as possible, time permitting.
Asking questions also helps us identify areas where the documentation needs Asking questions also helps us identify areas where the documentation needs
improvement, or new features that weren't envisioned before. Sometimes, a improvement, or new features that weren't envisioned before. Sometimes, a
seemingly innocent question leads to the fix of a bug. Don't hesitate and seemingly innocent question leads to the fix of a bug. Don't hesitate and ask
ask away! away!
### Improve the documentation [discussions]: https://github.com/pelletier/go-toml/discussions
The best way to share your knowledge and experience with go-toml is to ## Improve the documentation
improve the documentation. Fix a typo, clarify an interface, add an
example, anything goes!
The documentation is present in the [README][readme] and thorough the The best way to share your knowledge and experience with go-toml is to improve
source code. On release, it gets updated on [pkg.go.dev][pkg.go.dev]. To make a the documentation. Fix a typo, clarify an interface, add an example, anything
change to the documentation, create a pull request with your proposed goes!
changes. For simple changes like that, the easiest way to go is probably
the "Fork this project and edit the file" button on Github, displayed at
the top right of the file. Unless it's a trivial change (for example a
typo), provide a little bit of context in your pull request description or
commit message.
### Report a bug The documentation is present in the [README][readme] and thorough the source
code. On release, it gets updated on [pkg.go.dev][pkg.go.dev]. To make a change
to the documentation, create a pull request with your proposed changes. For
simple changes like that, the easiest way to go is probably the "Fork this
project and edit the file" button on Github, displayed at the top right of the
file. Unless it's a trivial change (for example a typo), provide a little bit of
context in your pull request description or commit message.
Found a bug! Sorry to hear that :(. Help us and other track them down and ## Report a bug
fix by reporting it. [File a new bug report][bug-report] on the [issues
tracker][issues-tracker]. The template should provide enough guidance on
what to include. When in doubt: add more details! By reducing ambiguity and
providing more information, it decreases back and forth and saves everyone
time.
### Code changes Found a bug! Sorry to hear that :(. Help us and other track them down and fix by
reporting it. [File a new bug report][bug-report] on the [issues
tracker][issues-tracker]. The template should provide enough guidance on what to
include. When in doubt: add more details! By reducing ambiguity and providing
more information, it decreases back and forth and saves everyone time.
## Code changes
Want to contribute a patch? Very happy to hear that! Want to contribute a patch? Very happy to hear that!
First, some high-level rules: First, some high-level rules:
* A short proposal with some POC code is better than a lengthy piece of - A short proposal with some POC code is better than a lengthy piece of text
text with no code. Code speaks louder than words. with no code. Code speaks louder than words. That being said, bigger changes
* No backward-incompatible patch will be accepted unless discussed. should probably start with a [discussion][discussions].
Sometimes it's hard, and Go's lack of versioning by default does not - No backward-incompatible patch will be accepted unless discussed. Sometimes
help, but we try not to break people's programs unless we absolutely have it's hard, but we try not to break people's programs unless we absolutely have
to. to.
* If you are writing a new feature or extending an existing one, make sure - If you are writing a new feature or extending an existing one, make sure to
to write some documentation. write some documentation.
* Bug fixes need to be accompanied with regression tests. - Bug fixes need to be accompanied with regression tests.
* New code needs to be tested. - New code needs to be tested.
* Your commit messages need to explain why the change is needed, even if - Your commit messages need to explain why the change is needed, even if already
already included in the PR description. included in the PR description.
It does sound like a lot, but those best practices are here to save time It does sound like a lot, but those best practices are here to save time overall
overall and continuously improve the quality of the project, which is and continuously improve the quality of the project, which is something everyone
something everyone benefits from. benefits from.
#### Get started ### Get started
The fairly standard code contribution process looks like that: The fairly standard code contribution process looks like that:
@@ -76,42 +76,92 @@ The fairly standard code contribution process looks like that:
2. Make your changes, commit on any branch you like. 2. Make your changes, commit on any branch you like.
3. [Open up a pull request][pull-request] 3. [Open up a pull request][pull-request]
4. Review, potential ask for changes. 4. Review, potential ask for changes.
5. Merge. You're in! 5. Merge.
Feel free to ask for help! You can create draft pull requests to gather Feel free to ask for help! You can create draft pull requests to gather
some early feedback! some early feedback!
#### Run the tests ### Run the tests
You can run tests for go-toml using Go's test tool: `go test ./...`. You can run tests for go-toml using Go's test tool: `go test -race ./...`.
When creating a pull requests, all tests will be ran on Linux on a few Go
versions (Travis CI), and on Windows using the latest Go version
(AppVeyor).
#### Style During the pull request process, all tests will be ran on Linux, Windows, and
MacOS on the last two versions of Go.
Try to look around and follow the same format and structure as the rest of However, given GitHub's new policy to _not_ run Actions on pull requests until a
the code. We enforce using `go fmt` on the whole code base. maintainer clicks on button, it is highly recommended that you run them locally
as you make changes.
### Check coverage
We use `go tool cover` to compute test coverage. Most code editors have a way to
run and display code coverage, but at the end of the day, we do this:
```
go test -covermode=atomic -coverprofile=coverage.out
go tool cover -func=coverage.out
```
and verify that the overall percentage of tested code does not go down. This is
a requirement. As a rule of thumb, all lines of code touched by your changes
should be covered. On Unix you can use `./ci.sh coverage -d v2` to check if your
code lowers the coverage.
### Verify performance
Go-toml aims to stay efficient. We rely on a set of scenarios executed with Go's
builtin benchmark systems. Because of their noisy nature, containers provided by
Github Actions cannot be reliably used for benchmarking. As a result, you are
responsible for checking that your changes do not incur a performance penalty.
You can run their following to execute benchmarks:
```
go test ./... -bench=. -count=10
```
Benchmark results should be compared against each other with
[benchstat][benchstat]. Typical flow looks like this:
1. On the `v2` branch, run `go test ./... -bench=. -count 10` and save output to
a file (for example `old.txt`).
2. Make some code changes.
3. Run `go test ....` again, and save the output to an other file (for example
`new.txt`).
4. Run `benchstat old.txt new.txt` to check that time/op does not go up in any
test.
On Unix you can use `./ci.sh benchmark -d v2` to verify how your code impacts
performance.
It is highly encouraged to add the benchstat results to your pull request
description. Pull requests that lower performance will receive more scrutiny.
[benchstat]: https://pkg.go.dev/golang.org/x/perf/cmd/benchstat
### Style
Try to look around and follow the same format and structure as the rest of the
code. We enforce using `go fmt` on the whole code base.
--- ---
### Maintainers-only ## Maintainers-only
#### Merge pull request ### Merge pull request
Checklist: Checklist:
* Passing CI. - Passing CI.
* Does not introduce backward-incompatible changes (unless discussed). - Does not introduce backward-incompatible changes (unless discussed).
* Has relevant doc changes. - Has relevant doc changes.
* Has relevant unit tests. - Benchstat does not show performance regression.
1. Merge using "squash and merge". 1. Merge using "squash and merge".
2. Make sure to edit the commit message to keep all the useful information 2. Make sure to edit the commit message to keep all the useful information
nice and clean. nice and clean.
3. Make sure the commit title is clear and contains the PR number (#123). 3. Make sure the commit title is clear and contains the PR number (#123).
#### New release ### New release
1. Go to [releases][releases]. Click on "X commits to master since this 1. Go to [releases][releases]. Click on "X commits to master since this
release". release".
+63 -5
View File
@@ -4,7 +4,6 @@ Go library for the [TOML](https://toml.io/en/) format.
This library supports [TOML v1.0.0](https://toml.io/en/v1.0.0). This library supports [TOML v1.0.0](https://toml.io/en/v1.0.0).
## Development status ## Development status
This is the upcoming major version of go-toml. It is currently in active This is the upcoming major version of go-toml. It is currently in active
@@ -14,8 +13,11 @@ with v1, and fixes a lot known bugs and performance issues along the way.
If you do not need the advanced document editing features of v1, you are If you do not need the advanced document editing features of v1, you are
encouraged to try out this version. encouraged to try out this version.
👉 [Roadmap for v2](https://github.com/pelletier/go-toml/discussions/506). [👉 Roadmap for v2](https://github.com/pelletier/go-toml/discussions/506)
[🐞 Bug Reports](https://github.com/pelletier/go-toml/issues)
[💬 Anything else](https://github.com/pelletier/go-toml/discussions)
## Documentation ## Documentation
@@ -23,7 +25,6 @@ Full API, examples, and implementation notes are available in the Go documentati
[![Go Reference](https://pkg.go.dev/badge/github.com/pelletier/go-toml/v2.svg)](https://pkg.go.dev/github.com/pelletier/go-toml/v2) [![Go Reference](https://pkg.go.dev/badge/github.com/pelletier/go-toml/v2.svg)](https://pkg.go.dev/github.com/pelletier/go-toml/v2)
## Import ## Import
```go ```go
@@ -40,7 +41,7 @@ standard library's `encoding/json`.
### Performance ### Performance
While go-toml favors usability, it is written with performance in mind. Most While go-toml favors usability, it is written with performance in mind. Most
operations should not be shockingly slow. operations should not be shockingly slow. See [benchmarks](#benchmarks).
### Strict mode ### Strict mode
@@ -146,6 +147,48 @@ fmt.Println(string(b))
[marshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Marshal [marshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Marshal
## Benchmarks
Execution time speedup compared to other Go TOML libraries:
<table>
<thead>
<tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
</thead>
<tbody>
<tr><td>Marshal/HugoFrontMatter</td><td>2.0x</td><td>2.0x</td></tr>
<tr><td>Marshal/ReferenceFile/map</td><td>1.8x</td><td>2.0x</td></tr>
<tr><td>Marshal/ReferenceFile/struct</td><td>2.7x</td><td>2.7x</td></tr>
<tr><td>Unmarshal/HugoFrontMatter</td><td>3.0x</td><td>2.6x</td></tr>
<tr><td>Unmarshal/ReferenceFile/map</td><td>3.0x</td><td>3.1x</td></tr>
<tr><td>Unmarshal/ReferenceFile/struct</td><td>5.9x</td><td>6.6x</td></tr>
</tbody>
</table>
<details><summary>See more</summary>
<p>The table above has the results of the most common use-cases. The table below
contains the results of all benchmarks, including unrealistic ones. It is
provided for completeness.</p>
<table>
<thead>
<tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
</thead>
<tbody>
<tr><td>Marshal/SimpleDocument/map</td><td>1.7x</td><td>2.1x</td></tr>
<tr><td>Marshal/SimpleDocument/struct</td><td>2.6x</td><td>2.9x</td></tr>
<tr><td>Unmarshal/SimpleDocument/map</td><td>4.1x</td><td>2.9x</td></tr>
<tr><td>Unmarshal/SimpleDocument/struct</td><td>6.3x</td><td>4.1x</td></tr>
<tr><td>UnmarshalDataset/example</td><td>3.5x</td><td>2.4x</td></tr>
<tr><td>UnmarshalDataset/code</td><td>2.2x</td><td>2.8x</td></tr>
<tr><td>UnmarshalDataset/twitter</td><td>2.8x</td><td>2.1x</td></tr>
<tr><td>UnmarshalDataset/citm_catalog</td><td>2.3x</td><td>1.5x</td></tr>
<tr><td>UnmarshalDataset/config</td><td>4.2x</td><td>3.2x</td></tr>
<tr><td>[Geo mean]</td><td>3.0x</td><td>2.7x</td></tr>
</tbody>
</table>
<p>This table can be generated with <code>./ci.sh benchmark -a -html</code>.</p>
</details>
## Migrating from v1 ## Migrating from v1
This section describes the differences between v1 and v2, with some pointers on This section describes the differences between v1 and v2, with some pointers on
@@ -248,6 +291,22 @@ This method was not widely used, poorly defined, and added a lot of complexity.
A similar effect can be achieved by implementing the `encoding.TextUnmarshaler` A similar effect can be achieved by implementing the `encoding.TextUnmarshaler`
interface and use strings. interface and use strings.
#### Support for `default` struct tag has been dropped
This feature adds complexity and a poorly defined API for an effect that can be
accomplished outside of the library.
It does not seem like other format parsers in Go support that feature (the
project referenced in the original ticket #202 has not been updated since 2017).
Given that go-toml v2 should not touch values not in the document, the same
effect can be achieved by pre-filling the struct with defaults (libraries like
[go-defaults][go-defaults] can help). Also, string representation is not well
defined for all types: it creates issues like #278.
The recommended replacement is pre-filling the struct before unmarshaling.
[go-defaults]: https://github.com/mcuadros/go-defaults
### Encoding / Marshal ### Encoding / Marshal
#### Default struct fields order #### Default struct fields order
@@ -290,7 +349,6 @@ manually sort the fields alphabetically in the struct definition.
V1 automatically indents content of tables by default. V2 does not. However the V1 automatically indents content of tables by default. V2 does not. However the
same behavior can be obtained using [`Encoder.SetIndentTables`][sit]. For example: same behavior can be obtained using [`Encoder.SetIndentTables`][sit]. For example:
```go ```go
data := map[string]interface{}{ data := map[string]interface{}{
"table": map[string]string{ "table": map[string]string{
+16 -17
View File
@@ -31,13 +31,14 @@ var bench_inputs = []struct {
func TestUnmarshalDatasetCode(t *testing.T) { func TestUnmarshalDatasetCode(t *testing.T) {
for _, tc := range bench_inputs { for _, tc := range bench_inputs {
buf := fixture(t, tc.name)
t.Run(tc.name, func(t *testing.T) { t.Run(tc.name, func(t *testing.T) {
buf := fixture(t, tc.name)
var v interface{} var v interface{}
check(t, toml.Unmarshal(buf, &v)) require.NoError(t, toml.Unmarshal(buf, &v))
b, err := json.Marshal(v) b, err := json.Marshal(v)
check(t, err) require.NoError(t, err)
require.Equal(t, len(b), tc.jsonLen) require.Equal(t, len(b), tc.jsonLen)
}) })
} }
@@ -45,14 +46,14 @@ func TestUnmarshalDatasetCode(t *testing.T) {
func BenchmarkUnmarshalDataset(b *testing.B) { func BenchmarkUnmarshalDataset(b *testing.B) {
for _, tc := range bench_inputs { for _, tc := range bench_inputs {
buf := fixture(b, tc.name)
b.Run(tc.name, func(b *testing.B) { b.Run(tc.name, func(b *testing.B) {
buf := fixture(b, tc.name)
b.SetBytes(int64(len(buf))) b.SetBytes(int64(len(buf)))
b.ReportAllocs() b.ReportAllocs()
b.ResetTimer() b.ResetTimer()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
var v interface{} var v interface{}
check(b, toml.Unmarshal(buf, &v)) require.NoError(b, toml.Unmarshal(buf, &v))
} }
}) })
} }
@@ -60,22 +61,20 @@ func BenchmarkUnmarshalDataset(b *testing.B) {
// fixture returns the uncompressed contents of path. // fixture returns the uncompressed contents of path.
func fixture(tb testing.TB, path string) []byte { func fixture(tb testing.TB, path string) []byte {
f, err := os.Open(filepath.Join("testdata", path+".toml.gz")) tb.Helper()
check(tb, err)
file := path + ".toml.gz"
f, err := os.Open(filepath.Join("testdata", file))
if os.IsNotExist(err) {
tb.Skip("benchmark fixture not found:", file)
}
require.NoError(tb, err)
defer f.Close() defer f.Close()
gz, err := gzip.NewReader(f) gz, err := gzip.NewReader(f)
check(tb, err) require.NoError(tb, err)
buf, err := ioutil.ReadAll(gz) buf, err := ioutil.ReadAll(gz)
check(tb, err) require.NoError(tb, err)
return buf return buf
} }
func check(tb testing.TB, err error) {
if err != nil {
tb.Helper()
tb.Fatal(err)
}
}
+1 -1
View File
@@ -186,7 +186,7 @@ key3 = 1979-05-27T00:32:00.999999-07:00
key1 = [ 1, 2, 3 ] key1 = [ 1, 2, 3 ]
key2 = [ "red", "yellow", "green" ] key2 = [ "red", "yellow", "green" ]
key3 = [ [ 1, 2 ], [3, 4, 5] ] key3 = [ [ 1, 2 ], [3, 4, 5] ]
#key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok
# Arrays can also be multiline. So in addition to ignoring whitespace, arrays # Arrays can also be multiline. So in addition to ignoring whitespace, arrays
# also ignore newlines between the brackets. Terminating commas are ok before # also ignore newlines between the brackets. Terminating commas are ok before
+529 -29
View File
@@ -1,6 +1,7 @@
package benchmark_test package benchmark_test
import ( import (
"bytes"
"io/ioutil" "io/ioutil"
"testing" "testing"
"time" "time"
@@ -9,17 +10,230 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func BenchmarkUnmarshalSimple(b *testing.B) { func TestUnmarshalSimple(t *testing.T) {
doc := []byte(`A = "hello"`)
d := struct { d := struct {
A string A string
}{} }{}
doc := []byte(`A = "hello"`)
for i := 0; i < b.N; i++ { err := toml.Unmarshal(doc, &d)
err := toml.Unmarshal(doc, &d) if err != nil {
panic(err)
}
}
func BenchmarkUnmarshal(b *testing.B) {
b.Run("SimpleDocument", func(b *testing.B) {
doc := []byte(`A = "hello"`)
b.Run("struct", func(b *testing.B) {
b.SetBytes(int64(len(doc)))
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
d := struct {
A string
}{}
err := toml.Unmarshal(doc, &d)
if err != nil {
panic(err)
}
}
})
b.Run("map", func(b *testing.B) {
b.SetBytes(int64(len(doc)))
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
d := map[string]interface{}{}
err := toml.Unmarshal(doc, &d)
if err != nil {
panic(err)
}
}
})
})
b.Run("ReferenceFile", func(b *testing.B) {
bytes, err := ioutil.ReadFile("benchmark.toml")
if err != nil {
b.Fatal(err)
}
b.Run("struct", func(b *testing.B) {
b.SetBytes(int64(len(bytes)))
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
d := benchmarkDoc{}
err := toml.Unmarshal(bytes, &d)
if err != nil {
panic(err)
}
}
})
b.Run("map", func(b *testing.B) {
b.SetBytes(int64(len(bytes)))
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
d := map[string]interface{}{}
err := toml.Unmarshal(bytes, &d)
if err != nil {
panic(err)
}
}
})
})
b.Run("HugoFrontMatter", func(b *testing.B) {
b.SetBytes(int64(len(hugoFrontMatterbytes)))
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
d := map[string]interface{}{}
err := toml.Unmarshal(hugoFrontMatterbytes, &d)
if err != nil {
panic(err)
}
}
})
}
func marshal(v interface{}) ([]byte, error) {
var b bytes.Buffer
enc := toml.NewEncoder(&b)
err := enc.Encode(v)
return b.Bytes(), err
}
func BenchmarkMarshal(b *testing.B) {
b.Run("SimpleDocument", func(b *testing.B) {
doc := []byte(`A = "hello"`)
b.Run("struct", func(b *testing.B) {
d := struct {
A string
}{}
err := toml.Unmarshal(doc, &d)
if err != nil {
panic(err)
}
b.ReportAllocs()
b.ResetTimer()
var out []byte
for i := 0; i < b.N; i++ {
out, err = marshal(d)
if err != nil {
panic(err)
}
}
b.SetBytes(int64(len(out)))
})
b.Run("map", func(b *testing.B) {
d := map[string]interface{}{}
err := toml.Unmarshal(doc, &d)
if err != nil {
panic(err)
}
b.ReportAllocs()
b.ResetTimer()
var out []byte
for i := 0; i < b.N; i++ {
out, err = marshal(d)
if err != nil {
panic(err)
}
}
b.SetBytes(int64(len(out)))
})
})
b.Run("ReferenceFile", func(b *testing.B) {
bytes, err := ioutil.ReadFile("benchmark.toml")
if err != nil {
b.Fatal(err)
}
b.Run("struct", func(b *testing.B) {
d := benchmarkDoc{}
err := toml.Unmarshal(bytes, &d)
if err != nil {
panic(err)
}
b.ReportAllocs()
b.ResetTimer()
var out []byte
for i := 0; i < b.N; i++ {
out, err = marshal(d)
if err != nil {
panic(err)
}
}
b.SetBytes(int64(len(out)))
})
b.Run("map", func(b *testing.B) {
d := map[string]interface{}{}
err := toml.Unmarshal(bytes, &d)
if err != nil {
panic(err)
}
b.ReportAllocs()
b.ResetTimer()
var out []byte
for i := 0; i < b.N; i++ {
out, err = marshal(d)
if err != nil {
panic(err)
}
}
b.SetBytes(int64(len(out)))
})
})
b.Run("HugoFrontMatter", func(b *testing.B) {
d := map[string]interface{}{}
err := toml.Unmarshal(hugoFrontMatterbytes, &d)
if err != nil { if err != nil {
panic(err) panic(err)
} }
}
b.ReportAllocs()
b.ResetTimer()
var out []byte
for i := 0; i < b.N; i++ {
out, err = marshal(d)
if err != nil {
panic(err)
}
}
b.SetBytes(int64(len(out)))
})
} }
type benchmarkDoc struct { type benchmarkDoc struct {
@@ -35,7 +249,7 @@ type benchmarkDoc struct {
} }
Point struct { Point struct {
X int64 X int64
U int64 Y int64
} }
} }
} }
@@ -108,6 +322,7 @@ type benchmarkDoc struct {
Key2 []string Key2 []string
Key3 [][]int64 Key3 [][]int64
// TODO: Key4 not supported by go-toml's Unmarshal // TODO: Key4 not supported by go-toml's Unmarshal
Key4 []interface{}
Key5 []int64 Key5 []int64
Key6 []int64 Key6 []int64
} }
@@ -119,36 +334,321 @@ type benchmarkDoc struct {
Fruit []struct { Fruit []struct {
Name string Name string
Physical struct { Physical struct {
Color string Color string
Shape string Shape string
Variety []struct { }
Name string Variety []struct {
} Name string
} }
} }
} }
func BenchmarkReferenceFile(b *testing.B) { func TestUnmarshalReferenceFile(t *testing.T) {
bytes, err := ioutil.ReadFile("benchmark.toml")
if err != nil {
b.Fatal(err)
}
b.SetBytes(int64(len(bytes)))
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
d := benchmarkDoc{}
err := toml.Unmarshal(bytes, &d)
if err != nil {
panic(err)
}
}
}
func TestReferenceFile(t *testing.T) {
bytes, err := ioutil.ReadFile("benchmark.toml") bytes, err := ioutil.ReadFile("benchmark.toml")
require.NoError(t, err) require.NoError(t, err)
d := benchmarkDoc{} d := benchmarkDoc{}
err = toml.Unmarshal(bytes, &d) err = toml.Unmarshal(bytes, &d)
require.NoError(t, err) require.NoError(t, err)
expected := benchmarkDoc{
Table: struct {
Key string
Subtable struct{ Key string }
Inline struct {
Name struct {
First string
Last string
}
Point struct {
X int64
Y int64
}
}
}{
Key: "value",
Subtable: struct{ Key string }{
Key: "another value",
},
// note: x.y.z.w is purposefully missing
Inline: struct {
Name struct {
First string
Last string
}
Point struct {
X int64
Y int64
}
}{
Name: struct {
First string
Last string
}{
First: "Tom",
Last: "Preston-Werner",
},
Point: struct {
X int64
Y int64
}{
X: 1,
Y: 2,
},
},
},
String: struct {
Basic struct{ Basic string }
Multiline struct {
Key1 string
Key2 string
Key3 string
Continued struct {
Key1 string
Key2 string
Key3 string
}
}
Literal struct {
Winpath string
Winpath2 string
Quoted string
Regex string
Multiline struct {
Regex2 string
Lines string
}
}
}{
Basic: struct{ Basic string }{
Basic: "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF.",
},
Multiline: struct {
Key1 string
Key2 string
Key3 string
Continued struct {
Key1 string
Key2 string
Key3 string
}
}{
Key1: "One\nTwo",
Key2: "One\nTwo",
Key3: "One\nTwo",
Continued: struct {
Key1 string
Key2 string
Key3 string
}{
Key1: `The quick brown fox jumps over the lazy dog.`,
Key2: `The quick brown fox jumps over the lazy dog.`,
Key3: `The quick brown fox jumps over the lazy dog.`,
},
},
Literal: struct {
Winpath string
Winpath2 string
Quoted string
Regex string
Multiline struct {
Regex2 string
Lines string
}
}{
Winpath: `C:\Users\nodejs\templates`,
Winpath2: `\\ServerX\admin$\system32\`,
Quoted: `Tom "Dubs" Preston-Werner`,
Regex: `<\i\c*\s*>`,
Multiline: struct {
Regex2 string
Lines string
}{
Regex2: `I [dw]on't need \d{2} apples`,
Lines: `The first newline is
trimmed in raw strings.
All other whitespace
is preserved.
`,
},
},
},
Integer: struct {
Key1 int64
Key2 int64
Key3 int64
Key4 int64
Underscores struct {
Key1 int64
Key2 int64
Key3 int64
}
}{
Key1: 99,
Key2: 42,
Key3: 0,
Key4: -17,
Underscores: struct {
Key1 int64
Key2 int64
Key3 int64
}{
Key1: 1000,
Key2: 5349221,
Key3: 12345,
},
},
Float: struct {
Fractional struct {
Key1 float64
Key2 float64
Key3 float64
}
Exponent struct {
Key1 float64
Key2 float64
Key3 float64
}
Both struct{ Key float64 }
Underscores struct {
Key1 float64
Key2 float64
}
}{
Fractional: struct {
Key1 float64
Key2 float64
Key3 float64
}{
Key1: 1.0,
Key2: 3.1415,
Key3: -0.01,
},
Exponent: struct {
Key1 float64
Key2 float64
Key3 float64
}{
Key1: 5e+22,
Key2: 1e6,
Key3: -2e-2,
},
Both: struct{ Key float64 }{
Key: 6.626e-34,
},
Underscores: struct {
Key1 float64
Key2 float64
}{
Key1: 9224617.445991228313,
Key2: 1e100,
},
},
Boolean: struct {
True bool
False bool
}{
True: true,
False: false,
},
Datetime: struct {
Key1 time.Time
Key2 time.Time
Key3 time.Time
}{
Key1: time.Date(1979, 5, 27, 7, 32, 0, 0, time.UTC),
Key2: time.Date(1979, 5, 27, 0, 32, 0, 0, time.FixedZone("", -7*3600)),
Key3: time.Date(1979, 5, 27, 0, 32, 0, 999999000, time.FixedZone("", -7*3600)),
},
Array: struct {
Key1 []int64
Key2 []string
Key3 [][]int64
Key4 []interface{}
Key5 []int64
Key6 []int64
}{
Key1: []int64{1, 2, 3},
Key2: []string{"red", "yellow", "green"},
Key3: [][]int64{{1, 2}, {3, 4, 5}},
Key4: []interface{}{
[]interface{}{int64(1), int64(2)},
[]interface{}{"a", "b", "c"},
},
Key5: []int64{1, 2, 3},
Key6: []int64{1, 2},
},
Products: []struct {
Name string
Sku int64
Color string
}{
{
Name: "Hammer",
Sku: 738594937,
},
{},
{
Name: "Nail",
Sku: 284758393,
Color: "gray",
},
},
Fruit: []struct {
Name string
Physical struct {
Color string
Shape string
}
Variety []struct{ Name string }
}{
{
Name: "apple",
Physical: struct {
Color string
Shape string
}{
Color: "red",
Shape: "round",
},
Variety: []struct{ Name string }{
{Name: "red delicious"},
{Name: "granny smith"},
},
},
{
Name: "banana",
Variety: []struct{ Name string }{
{Name: "plantain"},
},
},
},
}
require.Equal(t, expected, d)
} }
var hugoFrontMatterbytes = []byte(`
categories = ["Development", "VIM"]
date = "2012-04-06"
description = "spf13-vim is a cross platform distribution of vim plugins and resources for Vim."
slug = "spf13-vim-3-0-release-and-new-website"
tags = [".vimrc", "plugins", "spf13-vim", "vim"]
title = "spf13-vim 3.0 release and new website"
include_toc = true
show_comments = false
[[cascade]]
background = "yosemite.jpg"
[cascade._target]
kind = "page"
lang = "en"
path = "/blog/**"
[[cascade]]
background = "goldenbridge.jpg"
[cascade._target]
kind = "section"
`)
Executable
+273
View File
@@ -0,0 +1,273 @@
#!/usr/bin/env bash
stderr() {
echo "$@" 1>&2
}
usage() {
b=$(basename "$0")
echo $b: ERROR: "$@" 1>&2
cat 1>&2 <<EOF
DESCRIPTION
$(basename "$0") is the script to run continuous integration commands for
go-toml on unix.
Requires Go and Git to be available in the PATH. Expects to be ran from the
root of go-toml's Git repository.
USAGE
$b COMMAND [OPTIONS...]
COMMANDS
benchmark [OPTIONS...] [BRANCH]
Run benchmarks.
ARGUMENTS
BRANCH Optional. Defines which Git branch to use when running
benchmarks.
OPTIONS
-d Compare benchmarks of HEAD with BRANCH using benchstats. In
this form the BRANCH argument is required.
-a Compare benchmarks of HEAD against go-toml v1 and
BurntSushi/toml.
-html When used with -a, emits the output as HTML, ready to be
embedded in the README.
coverage [OPTIONS...] [BRANCH]
Generates code coverage.
ARGUMENTS
BRANCH Optional. Defines which Git branch to use when reporting
coverage. Defaults to HEAD.
OPTIONS
-d Compare coverage of HEAD with the one of BRANCH. In this form,
the BRANCH argument is required. Exit code is non-zero when
coverage percentage decreased.
EOF
exit 1
}
cover() {
branch="${1}"
dir="$(mktemp -d)"
stderr "Executing coverage for ${branch} at ${dir}"
if [ "${branch}" = "HEAD" ]; then
cp -r . "${dir}/"
else
git worktree add "$dir" "$branch"
fi
pushd "$dir"
go test -covermode=atomic -coverprofile=coverage.out ./...
go tool cover -func=coverage.out
popd
if [ "${branch}" != "HEAD" ]; then
git worktree remove --force "$dir"
fi
}
coverage() {
case "$1" in
-d)
shift
target="${1?Need to provide a target branch argument}"
output_dir="$(mktemp -d)"
target_out="${output_dir}/target.txt"
head_out="${output_dir}/head.txt"
cover "${target}" > "${target_out}"
cover "HEAD" > "${head_out}"
cat "${target_out}"
cat "${head_out}"
echo ""
target_pct="$(cat ${target_out} |sed -E 's/.*total.*\t([0-9.]+)%/\1/;t;d')"
head_pct="$(cat ${head_out} |sed -E 's/.*total.*\t([0-9.]+)%/\1/;t;d')"
echo "Results: ${target} ${target_pct}% HEAD ${head_pct}%"
delta_pct=$(echo "$head_pct - $target_pct" | bc -l)
echo "Delta: ${delta_pct}"
if [[ $delta_pct = \-* ]]; then
echo "Regression!";
return 1
fi
return 0
;;
esac
cover "${1-HEAD}"
}
bench() {
branch="${1}"
out="${2}"
replace="${3}"
dir="$(mktemp -d)"
stderr "Executing benchmark for ${branch} at ${dir}"
if [ "${branch}" = "HEAD" ]; then
cp -r . "${dir}/"
else
git worktree add "$dir" "$branch"
fi
pushd "$dir"
if [ "${replace}" != "" ]; then
find ./benchmark/ -iname '*.go' -exec sed -i -E "s|github.com/pelletier/go-toml/v2|${replace}|g" {} \;
go get "${replace}"
# hack: remove canada.toml.gz because it is not supported by
# burntsushi, and replace is only used for benchmark -a
rm -f benchmark/testdata/canada.toml.gz
fi
go test -bench=. -count=10 ./... | tee "${out}"
popd
if [ "${branch}" != "HEAD" ]; then
git worktree remove --force "$dir"
fi
}
fmktemp() {
if mktemp --version|grep GNU >/dev/null; then
mktemp --suffix=-$1;
else
mktemp -t $1;
fi
}
benchstathtml() {
python3 - $1 <<'EOF'
import sys
lines = []
stop = False
with open(sys.argv[1]) as f:
for line in f.readlines():
line = line.strip()
if line == "":
stop = True
if not stop:
lines.append(line.split(','))
results = []
for line in reversed(lines[1:]):
v2 = float(line[1])
results.append([
line[0].replace("-32", ""),
"%.1fx" % (float(line[3])/v2), # v1
"%.1fx" % (float(line[5])/v2), # bs
])
# move geomean to the end
results.append(results[0])
del results[0]
def printtable(data):
print("""
<table>
<thead>
<tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
</thead>
<tbody>""")
for r in data:
print(" <tr><td>{}</td><td>{}</td><td>{}</td></tr>".format(*r))
print(""" </tbody>
</table>""")
def match(x):
return "ReferenceFile" in x[0] or "HugoFrontMatter" in x[0]
above = [x for x in results if match(x)]
below = [x for x in results if not match(x)]
printtable(above)
print("<details><summary>See more</summary>")
print("""<p>The table above has the results of the most common use-cases. The table below
contains the results of all benchmarks, including unrealistic ones. It is
provided for completeness.</p>""")
printtable(below)
print('<p>This table can be generated with <code>./ci.sh benchmark -a -html</code>.</p>')
print("</details>")
EOF
}
benchmark() {
case "$1" in
-d)
shift
target="${1?Need to provide a target branch argument}"
old=`fmktemp ${target}`
bench "${target}" "${old}"
new=`fmktemp HEAD`
bench HEAD "${new}"
benchstat "${old}" "${new}"
return 0
;;
-a)
shift
v2stats=`fmktemp go-toml-v2`
bench HEAD "${v2stats}" "github.com/pelletier/go-toml/v2"
v1stats=`fmktemp go-toml-v1`
bench HEAD "${v1stats}" "github.com/pelletier/go-toml"
bsstats=`fmktemp bs-toml`
bench HEAD "${bsstats}" "github.com/BurntSushi/toml"
cp "${v2stats}" go-toml-v2.txt
cp "${v1stats}" go-toml-v1.txt
cp "${bsstats}" bs-toml.txt
if [ "$1" = "-html" ]; then
tmpcsv=`fmktemp csv`
benchstat -csv -geomean go-toml-v2.txt go-toml-v1.txt bs-toml.txt > $tmpcsv
benchstathtml $tmpcsv
else
benchstat -geomean go-toml-v2.txt go-toml-v1.txt bs-toml.txt
fi
rm -f go-toml-v2.txt go-toml-v1.txt bs-toml.txt
return $?
esac
bench "${1-HEAD}" `mktemp`
}
case "$1" in
coverage) shift; coverage $@;;
benchmark) shift; benchmark $@;;
*) usage "bad argument $1";;
esac
+21 -58
View File
@@ -1,6 +1,7 @@
package toml package toml
import ( import (
"fmt"
"math" "math"
"strconv" "strconv"
"time" "time"
@@ -16,7 +17,7 @@ func parseInteger(b []byte) (int64, error) {
case 'o': case 'o':
return parseIntOct(b) return parseIntOct(b)
default: default:
return 0, newDecodeError(b[1:2], "invalid base: '%c'", b[1]) panic(fmt.Errorf("invalid base '%c', should have been checked by scanIntOrFloat", b[1]))
} }
} }
@@ -34,41 +35,26 @@ func parseLocalDate(b []byte) (LocalDate, error) {
return date, newDecodeError(b, "dates are expected to have the format YYYY-MM-DD") return date, newDecodeError(b, "dates are expected to have the format YYYY-MM-DD")
} }
var err error date.Year = parseDecimalDigits(b[0:4])
date.Year, err = parseDecimalDigits(b[0:4]) v := parseDecimalDigits(b[5:7])
if err != nil {
return date, err
}
v, err := parseDecimalDigits(b[5:7])
if err != nil {
return date, err
}
date.Month = time.Month(v) date.Month = time.Month(v)
date.Day, err = parseDecimalDigits(b[8:10]) date.Day = parseDecimalDigits(b[8:10])
if err != nil {
return date, err
}
return date, nil return date, nil
} }
func parseDecimalDigits(b []byte) (int, error) { func parseDecimalDigits(b []byte) int {
v := 0 v := 0
for i, c := range b { for _, c := range b {
if !isDigit(c) {
return 0, newDecodeError(b[i:i+1], "should be a digit (0-9)")
}
v *= 10 v *= 10
v += int(c - '0') v += int(c - '0')
} }
return v, nil return v
} }
func parseDateTime(b []byte) (time.Time, error) { func parseDateTime(b []byte) (time.Time, error) {
@@ -77,8 +63,6 @@ func parseDateTime(b []byte) (time.Time, error) {
// time-offset = "Z" / time-numoffset // time-offset = "Z" / time-numoffset
// time-numoffset = ( "+" / "-" ) time-hour ":" time-minute // time-numoffset = ( "+" / "-" ) time-hour ":" time-minute
originalBytes := b
dt, b, err := parseLocalDateTime(b) dt, b, err := parseLocalDateTime(b)
if err != nil { if err != nil {
return time.Time{}, err return time.Time{}, err
@@ -87,7 +71,8 @@ func parseDateTime(b []byte) (time.Time, error) {
var zone *time.Location var zone *time.Location
if len(b) == 0 { if len(b) == 0 {
return time.Time{}, newDecodeError(originalBytes, "date-time is missing timezone") // parser should have checked that when assigning the date time node
panic("date time should have a timezone")
} }
if b[0] == 'Z' { if b[0] == 'Z' {
@@ -99,18 +84,15 @@ func parseDateTime(b []byte) (time.Time, error) {
return time.Time{}, newDecodeError(b, "invalid date-time timezone") return time.Time{}, newDecodeError(b, "invalid date-time timezone")
} }
direction := 1 direction := 1
switch b[0] { if b[0] == '-' {
case '+':
case '-':
direction = -1 direction = -1
default:
return time.Time{}, newDecodeError(b[0:1], "invalid timezone offset character")
} }
hours := digitsToInt(b[1:3]) hours := digitsToInt(b[1:3])
minutes := digitsToInt(b[4:6]) minutes := digitsToInt(b[4:6])
seconds := direction * (hours*3600 + minutes*60) seconds := direction * (hours*3600 + minutes*60)
zone = time.FixedZone("", seconds) zone = time.FixedZone("", seconds)
b = b[dateTimeByteLen:]
} }
if len(b) > 0 { if len(b) > 0 {
@@ -161,7 +143,6 @@ func parseLocalDateTime(b []byte) (LocalDateTime, []byte, error) {
// parseLocalTime is a bit different because it also returns the remaining // parseLocalTime is a bit different because it also returns the remaining
// []byte that is didn't need. This is to allow parseDateTime to parse those // []byte that is didn't need. This is to allow parseDateTime to parse those
// remaining bytes as a timezone. // remaining bytes as a timezone.
//nolint:cyclop,funlen
func parseLocalTime(b []byte) (LocalTime, []byte, error) { func parseLocalTime(b []byte) (LocalTime, []byte, error) {
var ( var (
nspow = [10]int{0, 1e8, 1e7, 1e6, 1e5, 1e4, 1e3, 1e2, 1e1, 1e0} nspow = [10]int{0, 1e8, 1e7, 1e6, 1e5, 1e4, 1e3, 1e2, 1e1, 1e0}
@@ -173,36 +154,24 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
return t, nil, newDecodeError(b, "times are expected to have the format HH:MM:SS[.NNNNNN]") return t, nil, newDecodeError(b, "times are expected to have the format HH:MM:SS[.NNNNNN]")
} }
var err error t.Hour = parseDecimalDigits(b[0:2])
t.Hour, err = parseDecimalDigits(b[0:2])
if err != nil {
return t, nil, err
}
if b[2] != ':' { if b[2] != ':' {
return t, nil, newDecodeError(b[2:3], "expecting colon between hours and minutes") return t, nil, newDecodeError(b[2:3], "expecting colon between hours and minutes")
} }
t.Minute, err = parseDecimalDigits(b[3:5]) t.Minute = parseDecimalDigits(b[3:5])
if err != nil {
return t, nil, err
}
if b[5] != ':' { if b[5] != ':' {
return t, nil, newDecodeError(b[5:6], "expecting colon between minutes and seconds") return t, nil, newDecodeError(b[5:6], "expecting colon between minutes and seconds")
} }
t.Second, err = parseDecimalDigits(b[6:8]) t.Second = parseDecimalDigits(b[6:8])
if err != nil {
return t, nil, err
}
if len(b) >= 9 && b[8] == '.' { const minLengthWithFrac = 9
if len(b) >= minLengthWithFrac && b[minLengthWithFrac-1] == '.' {
frac := 0 frac := 0
digits := 0 digits := 0
for i, c := range b[9:] { for i, c := range b[minLengthWithFrac:] {
if !isDigit(c) { if !isDigit(c) {
if i == 0 { if i == 0 {
return t, nil, newDecodeError(b[i:i+1], "need at least one digit after fraction point") return t, nil, newDecodeError(b[i:i+1], "need at least one digit after fraction point")
@@ -211,8 +180,8 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
break break
} }
//nolint:gomnd const maxFracPrecision = 9
if i >= 9 { if i >= maxFracPrecision {
return t, nil, newDecodeError(b[i:i+1], "maximum precision for date time is nanosecond") return t, nil, newDecodeError(b[i:i+1], "maximum precision for date time is nanosecond")
} }
@@ -231,8 +200,6 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
//nolint:cyclop //nolint:cyclop
func parseFloat(b []byte) (float64, error) { func parseFloat(b []byte) (float64, error) {
//nolint:godox
// TODO: inefficient
if len(b) == 4 && (b[0] == '+' || b[0] == '-') && b[1] == 'n' && b[2] == 'a' && b[3] == 'n' { if len(b) == 4 && (b[0] == '+' || b[0] == '-') && b[1] == 'n' && b[2] == 'a' && b[3] == 'n' {
return math.NaN(), nil return math.NaN(), nil
} }
@@ -252,7 +219,7 @@ func parseFloat(b []byte) (float64, error) {
f, err := strconv.ParseFloat(string(cleaned), 64) f, err := strconv.ParseFloat(string(cleaned), 64)
if err != nil { if err != nil {
return 0, newDecodeError(b, "coudn't parse float: %w", err) return 0, newDecodeError(b, "unable to parse float: %w", err)
} }
return f, nil return f, nil
@@ -315,10 +282,6 @@ func parseIntDec(b []byte) (int64, error) {
} }
func checkAndRemoveUnderscores(b []byte) ([]byte, error) { func checkAndRemoveUnderscores(b []byte) ([]byte, error) {
if len(b) == 0 {
return b, nil
}
if b[0] == '_' { if b[0] == '_' {
return nil, newDecodeError(b[0:1], "number cannot start with underscore") return nil, newDecodeError(b[0:1], "number cannot start with underscore")
} }
+1 -3
View File
@@ -1,4 +1,2 @@
/* // Package toml is a library to read and write TOML documents.
Package toml is a library to read and write TOML documents.
*/
package toml package toml
+3 -7
View File
@@ -5,7 +5,7 @@ import (
"strconv" "strconv"
"strings" "strings"
"github.com/pelletier/go-toml/v2/internal/unsafe" "github.com/pelletier/go-toml/v2/internal/danger"
) )
// DecodeError represents an error encountered during the parsing or decoding // DecodeError represents an error encountered during the parsing or decoding
@@ -105,13 +105,9 @@ func (e *DecodeError) Key() Key {
// highlight can be freely deallocated. // highlight can be freely deallocated.
//nolint:funlen //nolint:funlen
func wrapDecodeError(document []byte, de *decodeError) *DecodeError { func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
if de == nil { offset := danger.SubsliceOffset(document, de.highlight)
return nil
}
offset := unsafe.SubsliceOffset(document, de.highlight) errMessage := de.Error()
errMessage := de.message
errLine, errColumn := positionAtEnd(document[:offset]) errLine, errColumn := positionAtEnd(document[:offset])
before, after := linesOfContext(document, de.highlight, offset, 3) before, after := linesOfContext(document, de.highlight, offset, 3)
+21 -4
View File
@@ -12,7 +12,6 @@ import (
//nolint:funlen //nolint:funlen
func TestDecodeError(t *testing.T) { func TestDecodeError(t *testing.T) {
t.Parallel()
examples := []struct { examples := []struct {
desc string desc string
@@ -154,7 +153,7 @@ line 5`,
for _, e := range examples { for _, e := range examples {
e := e e := e
t.Run(e.desc, func(t *testing.T) { t.Run(e.desc, func(t *testing.T) {
t.Parallel()
b := bytes.Buffer{} b := bytes.Buffer{}
b.Write([]byte(e.doc[0])) b.Write([]byte(e.doc[0]))
start := b.Len() start := b.Len()
@@ -181,6 +180,23 @@ line 5`,
} }
} }
func TestDecodeError_Accessors(t *testing.T) {
e := DecodeError{
message: "foo",
line: 1,
column: 2,
key: []string{"one", "two"},
human: "bar",
}
assert.Equal(t, "toml: foo", e.Error())
r, c := e.Position()
assert.Equal(t, 1, r)
assert.Equal(t, 2, c)
assert.Equal(t, Key{"one", "two"}, e.Key())
assert.Equal(t, "bar", e.String())
}
func ExampleDecodeError() { func ExampleDecodeError() {
doc := `name = 123__456` doc := `name = 123__456`
@@ -189,14 +205,15 @@ func ExampleDecodeError() {
fmt.Println(err) fmt.Println(err)
//nolint:errorlint
de := err.(*DecodeError) de := err.(*DecodeError)
fmt.Println(de.String()) fmt.Println(de.String())
row, col := de.Position() row, col := de.Position()
fmt.Println("error occured at row", row, "column", col) fmt.Println("error occurred at row", row, "column", col)
// Output: // Output:
// toml: number must have at least one digit between underscores // toml: number must have at least one digit between underscores
// 1| name = 123__456 // 1| name = 123__456
// | ~~ number must have at least one digit between underscores // | ~~ number must have at least one digit between underscores
// error occured at row 1 column 11 // error occurred at row 1 column 11
} }
+100
View File
@@ -0,0 +1,100 @@
package toml_test
import (
"testing"
"github.com/pelletier/go-toml/v2"
"github.com/stretchr/testify/require"
)
func TestFastSimple(t *testing.T) {
m := map[string]int64{}
err := toml.Unmarshal([]byte(`a = 42`), &m)
require.NoError(t, err)
require.Equal(t, map[string]int64{"a": 42}, m)
}
func TestFastSimpleString(t *testing.T) {
m := map[string]string{}
err := toml.Unmarshal([]byte(`a = "hello"`), &m)
require.NoError(t, err)
require.Equal(t, map[string]string{"a": "hello"}, m)
}
func TestFastSimpleInterface(t *testing.T) {
m := map[string]interface{}{}
err := toml.Unmarshal([]byte(`
a = "hello"
b = 42`), &m)
require.NoError(t, err)
require.Equal(t, map[string]interface{}{
"a": "hello",
"b": int64(42),
}, m)
}
func TestFastMultipartKeyInterface(t *testing.T) {
m := map[string]interface{}{}
err := toml.Unmarshal([]byte(`
a.interim = "test"
a.b.c = "hello"
b = 42`), &m)
require.NoError(t, err)
require.Equal(t, map[string]interface{}{
"a": map[string]interface{}{
"interim": "test",
"b": map[string]interface{}{
"c": "hello",
},
},
"b": int64(42),
}, m)
}
func TestFastExistingMap(t *testing.T) {
m := map[string]interface{}{
"ints": map[string]int{},
}
err := toml.Unmarshal([]byte(`
ints.one = 1
ints.two = 2
strings.yo = "hello"`), &m)
require.NoError(t, err)
require.Equal(t, map[string]interface{}{
"ints": map[string]interface{}{
"one": int64(1),
"two": int64(2),
},
"strings": map[string]interface{}{
"yo": "hello",
},
}, m)
}
func TestFastArrayTable(t *testing.T) {
b := []byte(`
[root]
[[root.nested]]
name = 'Bob'
[[root.nested]]
name = 'Alice'
`)
m := map[string]interface{}{}
err := toml.Unmarshal(b, &m)
require.NoError(t, err)
require.Equal(t, map[string]interface{}{
"root": map[string]interface{}{
"nested": []interface{}{
map[string]interface{}{
"name": "Bob",
},
map[string]interface{}{
"name": "Alice",
},
},
},
}, m)
}
+2 -1
View File
@@ -2,4 +2,5 @@ module github.com/pelletier/go-toml/v2
go 1.15 go 1.15
require github.com/stretchr/testify v1.7.0 // latest (v1.7.0) doesn't have the fix for time.Time
require github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942
+2 -2
View File
@@ -3,8 +3,8 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942 h1:t0lM6y/M5IiUZyvbBTcngso8SZEZICH7is9B6g/obVU=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
+42 -28
View File
@@ -2,6 +2,9 @@ package ast
import ( import (
"fmt" "fmt"
"unsafe"
"github.com/pelletier/go-toml/v2/internal/danger"
) )
// Iterator starts uninitialized, you need to call Next() first. // Iterator starts uninitialized, you need to call Next() first.
@@ -14,7 +17,7 @@ import (
// } // }
type Iterator struct { type Iterator struct {
started bool started bool
node Node node *Node
} }
// Next moves the iterator forward and returns true if points to a node, false // Next moves the iterator forward and returns true if points to a node, false
@@ -28,8 +31,14 @@ func (c *Iterator) Next() bool {
return c.node.Valid() return c.node.Valid()
} }
// IsLast returns true if the current node of the iterator is the last one.
// Subsequent call to Next() will return false.
func (c *Iterator) IsLast() bool {
return c.node.next == 0
}
// Node returns a copy of the node pointed at by the iterator. // Node returns a copy of the node pointed at by the iterator.
func (c *Iterator) Node() Node { func (c *Iterator) Node() *Node {
return c.node return c.node
} }
@@ -44,14 +53,13 @@ type Root struct {
func (r *Root) Iterator() Iterator { func (r *Root) Iterator() Iterator {
it := Iterator{} it := Iterator{}
if len(r.nodes) > 0 { if len(r.nodes) > 0 {
it.node = r.nodes[0] it.node = &r.nodes[0]
} }
return it return it
} }
func (r *Root) at(idx int) Node { func (r *Root) at(idx Reference) *Node {
// TODO: unsafe to point to the node directly return &r.nodes[idx]
return r.nodes[idx]
} }
// Arrays have one child per element in the array. // Arrays have one child per element in the array.
@@ -63,42 +71,48 @@ func (r *Root) at(idx int) Node {
// children []Node // children []Node
type Node struct { type Node struct {
Kind Kind Kind Kind
Data []byte // Raw bytes from the input Raw Range // Raw bytes from the input.
Data []byte // Node value (could be either allocated or referencing the input).
// next idx (in the root array). 0 if last of the collection. // References to other nodes, as offsets in the backing array from this
next int // node. References can go backward, so those can be negative.
// child idx (in the root array). 0 if no child. next int // 0 if last element
child int child int // 0 if no child
// pointer to the root array }
root *Root
type Range struct {
Offset uint32
Length uint32
} }
// Next returns a copy of the next node, or an invalid Node if there is no // Next returns a copy of the next node, or an invalid Node if there is no
// next node. // next node.
func (n Node) Next() Node { func (n *Node) Next() *Node {
if n.next <= 0 { if n.next == 0 {
return noNode return nil
} }
return n.root.at(n.next) ptr := unsafe.Pointer(n)
size := unsafe.Sizeof(Node{})
return (*Node)(danger.Stride(ptr, size, n.next))
} }
// Child returns a copy of the first child node of this node. Other children // Child returns a copy of the first child node of this node. Other children
// can be accessed calling Next on the first child. // can be accessed calling Next on the first child.
// Returns an invalid Node if there is none. // Returns an invalid Node if there is none.
func (n Node) Child() Node { func (n *Node) Child() *Node {
if n.child <= 0 { if n.child == 0 {
return noNode return nil
} }
return n.root.at(n.child) ptr := unsafe.Pointer(n)
size := unsafe.Sizeof(Node{})
return (*Node)(danger.Stride(ptr, size, n.child))
} }
// Valid returns true if the node's kind is set (not to Invalid). // Valid returns true if the node's kind is set (not to Invalid).
func (n Node) Valid() bool { func (n *Node) Valid() bool {
return n.Kind != Invalid return n != nil
} }
var noNode = Node{}
// Key returns the child nodes making the Key on a supported node. Panics // Key returns the child nodes making the Key on a supported node. Panics
// otherwise. // otherwise.
// They are guaranteed to be all be of the Kind Key. A simple key would return // They are guaranteed to be all be of the Kind Key. A simple key would return
@@ -121,13 +135,13 @@ func (n *Node) Key() Iterator {
// Value returns a pointer to the value node of a KeyValue. // Value returns a pointer to the value node of a KeyValue.
// Guaranteed to be non-nil. // Guaranteed to be non-nil.
// Panics if not called on a KeyValue node, or if the Children are malformed. // Panics if not called on a KeyValue node, or if the Children are malformed.
func (n Node) Value() Node { func (n *Node) Value() *Node {
assertKind(KeyValue, n) assertKind(KeyValue, *n)
return n.Child() return n.Child()
} }
// Children returns an iterator over a node's children. // Children returns an iterator over a node's children.
func (n Node) Children() Iterator { func (n *Node) Children() Iterator {
return Iterator{node: n.Child()} return Iterator{node: n.Child()}
} }
+11 -20
View File
@@ -1,12 +1,11 @@
package ast package ast
type Reference struct { type Reference int
idx int
set bool const InvalidReference Reference = -1
}
func (r Reference) Valid() bool { func (r Reference) Valid() bool {
return r.set return r != InvalidReference
} }
type Builder struct { type Builder struct {
@@ -18,8 +17,8 @@ func (b *Builder) Tree() *Root {
return &b.tree return &b.tree
} }
func (b *Builder) NodeAt(ref Reference) Node { func (b *Builder) NodeAt(ref Reference) *Node {
return b.tree.at(ref.idx) return b.tree.at(ref)
} }
func (b *Builder) Reset() { func (b *Builder) Reset() {
@@ -28,33 +27,25 @@ func (b *Builder) Reset() {
} }
func (b *Builder) Push(n Node) Reference { func (b *Builder) Push(n Node) Reference {
n.root = &b.tree
b.lastIdx = len(b.tree.nodes) b.lastIdx = len(b.tree.nodes)
b.tree.nodes = append(b.tree.nodes, n) b.tree.nodes = append(b.tree.nodes, n)
return Reference{ return Reference(b.lastIdx)
idx: b.lastIdx,
set: true,
}
} }
func (b *Builder) PushAndChain(n Node) Reference { func (b *Builder) PushAndChain(n Node) Reference {
n.root = &b.tree
newIdx := len(b.tree.nodes) newIdx := len(b.tree.nodes)
b.tree.nodes = append(b.tree.nodes, n) b.tree.nodes = append(b.tree.nodes, n)
if b.lastIdx >= 0 { if b.lastIdx >= 0 {
b.tree.nodes[b.lastIdx].next = newIdx b.tree.nodes[b.lastIdx].next = newIdx - b.lastIdx
} }
b.lastIdx = newIdx b.lastIdx = newIdx
return Reference{ return Reference(b.lastIdx)
idx: b.lastIdx,
set: true,
}
} }
func (b *Builder) AttachChild(parent Reference, child Reference) { func (b *Builder) AttachChild(parent Reference, child Reference) {
b.tree.nodes[parent.idx].child = child.idx b.tree.nodes[parent].child = int(child) - int(parent)
} }
func (b *Builder) Chain(from Reference, to Reference) { func (b *Builder) Chain(from Reference, to Reference) {
b.tree.nodes[from.idx].next = to.idx b.tree.nodes[from].next = int(to) - int(from)
} }
@@ -1,4 +1,4 @@
package unsafe package danger
import ( import (
"fmt" "fmt"
@@ -57,3 +57,9 @@ func BytesRange(start []byte, end []byte) []byte {
return start[:l] return start[:l]
} }
func Stride(ptr unsafe.Pointer, size uintptr, offset int) unsafe.Pointer {
// TODO: replace with unsafe.Add when Go 1.17 is released
// https://github.com/golang/go/issues/40481
return unsafe.Pointer(uintptr(ptr) + uintptr(int(size)*offset))
}
@@ -1,15 +1,16 @@
package unsafe_test package danger_test
import ( import (
"testing" "testing"
"unsafe"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/pelletier/go-toml/v2/internal/unsafe" "github.com/pelletier/go-toml/v2/internal/danger"
) )
func TestUnsafeSubsliceOffsetValid(t *testing.T) { func TestSubsliceOffsetValid(t *testing.T) {
examples := []struct { examples := []struct {
desc string desc string
test func() ([]byte, []byte) test func() ([]byte, []byte)
@@ -28,13 +29,13 @@ func TestUnsafeSubsliceOffsetValid(t *testing.T) {
for _, e := range examples { for _, e := range examples {
t.Run(e.desc, func(t *testing.T) { t.Run(e.desc, func(t *testing.T) {
d, s := e.test() d, s := e.test()
offset := unsafe.SubsliceOffset(d, s) offset := danger.SubsliceOffset(d, s)
assert.Equal(t, e.offset, offset) assert.Equal(t, e.offset, offset)
}) })
} }
} }
func TestUnsafeSubsliceOffsetInvalid(t *testing.T) { func TestSubsliceOffsetInvalid(t *testing.T) {
examples := []struct { examples := []struct {
desc string desc string
test func() ([]byte, []byte) test func() ([]byte, []byte)
@@ -72,13 +73,22 @@ func TestUnsafeSubsliceOffsetInvalid(t *testing.T) {
t.Run(e.desc, func(t *testing.T) { t.Run(e.desc, func(t *testing.T) {
d, s := e.test() d, s := e.test()
require.Panics(t, func() { require.Panics(t, func() {
unsafe.SubsliceOffset(d, s) danger.SubsliceOffset(d, s)
}) })
}) })
} }
} }
func TestUnsafeBytesRange(t *testing.T) { func TestStride(t *testing.T) {
a := []byte{1, 2, 3, 4}
x := &a[1]
n := (*byte)(danger.Stride(unsafe.Pointer(x), unsafe.Sizeof(byte(0)), 1))
require.Equal(t, &a[2], n)
n = (*byte)(danger.Stride(unsafe.Pointer(x), unsafe.Sizeof(byte(0)), -1))
require.Equal(t, &a[0], n)
}
func TestBytesRange(t *testing.T) {
type fn = func() ([]byte, []byte) type fn = func() ([]byte, []byte)
examples := []struct { examples := []struct {
desc string desc string
@@ -157,10 +167,10 @@ func TestUnsafeBytesRange(t *testing.T) {
start, end := e.test() start, end := e.test()
if e.expected == nil { if e.expected == nil {
require.Panics(t, func() { require.Panics(t, func() {
unsafe.BytesRange(start, end) danger.BytesRange(start, end)
}) })
} else { } else {
res := unsafe.BytesRange(start, end) res := danger.BytesRange(start, end)
require.Equal(t, e.expected, res) require.Equal(t, e.expected, res)
} }
}) })
@@ -223,11 +223,13 @@ type testSubDoc struct {
unexported int `toml:"shouldntBeHere"` unexported int `toml:"shouldntBeHere"`
} }
var biteMe = "Bite me" var (
var float1 float32 = 12.3 biteMe = "Bite me"
var float2 float32 = 45.6 float1 float32 = 12.3
var float3 float32 = 78.9 float2 float32 = 45.6
var subdoc = testSubDoc{"Second", 0} float3 float32 = 78.9
subdoc = testSubDoc{"Second", 0}
)
var docData = testDoc{ var docData = testDoc{
Title: "TOML Marshal Testing", Title: "TOML Marshal Testing",
@@ -382,7 +384,7 @@ var intErrTomls = []string{
} }
func TestErrUnmarshal(t *testing.T) { func TestErrUnmarshal(t *testing.T) {
var errTomls = []string{ errTomls := []string{
"bool = truly\ndate = 1979-05-27T07:32:00Z\nfloat = 123.4\nint = 5000\nstring = \"Bite me\"", "bool = truly\ndate = 1979-05-27T07:32:00Z\nfloat = 123.4\nint = 5000\nstring = \"Bite me\"",
"bool = true\ndate = 1979-05-27T07:3200Z\nfloat = 123.4\nint = 5000\nstring = \"Bite me\"", "bool = true\ndate = 1979-05-27T07:3200Z\nfloat = 123.4\nint = 5000\nstring = \"Bite me\"",
"bool = true\ndate = 1979-05-27T07:32:00Z\nfloat = 123a4\nint = 5000\nstring = \"Bite me\"", "bool = true\ndate = 1979-05-27T07:32:00Z\nfloat = 123a4\nint = 5000\nstring = \"Bite me\"",
@@ -468,7 +470,7 @@ func TestEmptyUnmarshalOmit(t *testing.T) {
Map map[string]string `toml:"map,omitempty"` Map map[string]string `toml:"map,omitempty"`
} }
var emptyTestData2 = emptyMarshalTestStruct2{ emptyTestData2 := emptyMarshalTestStruct2{
Title: "Placeholder", Title: "Placeholder",
Bool: false, Bool: false,
Int: 0, Int: 0,
@@ -496,21 +498,23 @@ type pointerMarshalTestStruct struct {
DblPtr *[]*[]*string DblPtr *[]*[]*string
} }
var pointerStr = "Hello" var (
var pointerList = []string{"Hello back"} pointerStr = "Hello"
var pointerListPtr = []*string{&pointerStr} pointerList = []string{"Hello back"}
var pointerMap = map[string]string{"response": "Goodbye"} pointerListPtr = []*string{&pointerStr}
var pointerMapPtr = map[string]*string{"alternate": &pointerStr} pointerMap = map[string]string{"response": "Goodbye"}
var pointerTestData = pointerMarshalTestStruct{ pointerMapPtr = map[string]*string{"alternate": &pointerStr}
Str: &pointerStr, pointerTestData = pointerMarshalTestStruct{
List: &pointerList, Str: &pointerStr,
ListPtr: &pointerListPtr, List: &pointerList,
Map: &pointerMap, ListPtr: &pointerListPtr,
MapPtr: &pointerMapPtr, Map: &pointerMap,
EmptyStr: nil, MapPtr: &pointerMapPtr,
EmptyList: nil, EmptyStr: nil,
EmptyMap: nil, EmptyList: nil,
} EmptyMap: nil,
}
)
var pointerTestToml = []byte(`List = ["Hello back"] var pointerTestToml = []byte(`List = ["Hello back"]
ListPtr = ["Hello"] ListPtr = ["Hello"]
@@ -538,15 +542,17 @@ func TestUnmarshalTypeMismatch(t *testing.T) {
type nestedMarshalTestStruct struct { type nestedMarshalTestStruct struct {
String [][]string String [][]string
//Struct [][]basicMarshalTestSubStruct // Struct [][]basicMarshalTestSubStruct
StringPtr *[]*[]*string StringPtr *[]*[]*string
// StructPtr *[]*[]*basicMarshalTestSubStruct // StructPtr *[]*[]*basicMarshalTestSubStruct
} }
var str1 = "Three" var (
var str2 = "Four" str1 = "Three"
var strPtr = []*string{&str1, &str2} str2 = "Four"
var strPtr2 = []*[]*string{&strPtr} strPtr = []*string{&str1, &str2}
strPtr2 = []*[]*string{&strPtr}
)
var nestedTestData = nestedMarshalTestStruct{ var nestedTestData = nestedMarshalTestStruct{
String: [][]string{{"Five", "Six"}, {"One", "Two"}}, String: [][]string{{"Five", "Six"}, {"One", "Two"}},
@@ -597,6 +603,7 @@ var nestedCustomMarshalerData = customMarshalerParent{
var nestedCustomMarshalerToml = []byte(`friends = ["Sally Fields"] var nestedCustomMarshalerToml = []byte(`friends = ["Sally Fields"]
me = "Maiku Suteda" me = "Maiku Suteda"
`) `)
var nestedCustomMarshalerTomlForUnmarshal = []byte(`[friends] var nestedCustomMarshalerTomlForUnmarshal = []byte(`[friends]
FirstName = "Sally" FirstName = "Sally"
LastName = "Fields"`) LastName = "Fields"`)
@@ -613,11 +620,11 @@ func (x *IntOrString) MarshalTOML() ([]byte, error) {
} }
func TestUnmarshalTextMarshaler(t *testing.T) { func TestUnmarshalTextMarshaler(t *testing.T) {
var nested = struct { nested := struct {
Friends textMarshaler `toml:"friends"` Friends textMarshaler `toml:"friends"`
}{} }{}
var expected = struct { expected := struct {
Friends textMarshaler `toml:"friends"` Friends textMarshaler `toml:"friends"`
}{ }{
Friends: textMarshaler{FirstName: "Sally", LastName: "Fields"}, Friends: textMarshaler{FirstName: "Sally", LastName: "Fields"},
@@ -1360,7 +1367,6 @@ func TestUnmarshalPreservesUnexportedFields(t *testing.T) {
t.Run("unexported field should not be set from toml", func(t *testing.T) { t.Run("unexported field should not be set from toml", func(t *testing.T) {
var actual unexportedFieldPreservationTest var actual unexportedFieldPreservationTest
err := toml.Unmarshal([]byte(doc), &actual) err := toml.Unmarshal([]byte(doc), &actual)
if err != nil { if err != nil {
t.Fatal("did not expect an error") t.Fatal("did not expect an error")
} }
@@ -1394,7 +1400,6 @@ func TestUnmarshalPreservesUnexportedFields(t *testing.T) {
Nested3: &unexportedFieldPreservationTestNested{"baz", "bax"}, Nested3: &unexportedFieldPreservationTestNested{"baz", "bax"},
} }
err := toml.Unmarshal([]byte(doc), &actual) err := toml.Unmarshal([]byte(doc), &actual)
if err != nil { if err != nil {
t.Fatal("did not expect an error") t.Fatal("did not expect an error")
} }
@@ -1431,7 +1436,6 @@ func TestUnmarshalLocalDate(t *testing.T) {
var obj dateStruct var obj dateStruct
err := toml.Unmarshal([]byte(doc), &obj) err := toml.Unmarshal([]byte(doc), &obj)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@@ -1457,7 +1461,6 @@ func TestUnmarshalLocalDate(t *testing.T) {
var obj dateStruct var obj dateStruct
err := toml.Unmarshal([]byte(doc), &obj) err := toml.Unmarshal([]byte(doc), &obj)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@@ -1495,7 +1498,8 @@ func TestUnmarshalLocalDateTime(t *testing.T) {
Second: 0, Second: 0,
Nanosecond: 0, Nanosecond: 0,
}, },
}}, },
},
{ {
name: "with nanoseconds", name: "with nanoseconds",
in: "1979-05-27T00:32:00.999999", in: "1979-05-27T00:32:00.999999",
@@ -1526,7 +1530,6 @@ func TestUnmarshalLocalDateTime(t *testing.T) {
var obj dateStruct var obj dateStruct
err := toml.Unmarshal([]byte(doc), &obj) err := toml.Unmarshal([]byte(doc), &obj)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@@ -1544,7 +1547,6 @@ func TestUnmarshalLocalDateTime(t *testing.T) {
var obj dateStruct var obj dateStruct
err := toml.Unmarshal([]byte(doc), &obj) err := toml.Unmarshal([]byte(doc), &obj)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@@ -1613,7 +1615,6 @@ func TestUnmarshalLocalTime(t *testing.T) {
var obj dateStruct var obj dateStruct
err := toml.Unmarshal([]byte(doc), &obj) err := toml.Unmarshal([]byte(doc), &obj)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@@ -2283,8 +2284,7 @@ func (d *durationString) UnmarshalTOML(v interface{}) error {
return nil return nil
} }
type config437Error struct { type config437Error struct{}
}
func (e *config437Error) UnmarshalTOML(v interface{}) error { func (e *config437Error) UnmarshalTOML(v interface{}) error {
return errors.New("expected") return errors.New("expected")
+4 -4
View File
@@ -11,19 +11,19 @@ type KeyTracker struct {
} }
// UpdateTable sets the state of the tracker with the AST table node. // UpdateTable sets the state of the tracker with the AST table node.
func (t *KeyTracker) UpdateTable(node ast.Node) { func (t *KeyTracker) UpdateTable(node *ast.Node) {
t.reset() t.reset()
t.Push(node) t.Push(node)
} }
// UpdateArrayTable sets the state of the tracker with the AST array table node. // UpdateArrayTable sets the state of the tracker with the AST array table node.
func (t *KeyTracker) UpdateArrayTable(node ast.Node) { func (t *KeyTracker) UpdateArrayTable(node *ast.Node) {
t.reset() t.reset()
t.Push(node) t.Push(node)
} }
// Push the given key on the stack. // Push the given key on the stack.
func (t *KeyTracker) Push(node ast.Node) { func (t *KeyTracker) Push(node *ast.Node) {
it := node.Key() it := node.Key()
for it.Next() { for it.Next() {
t.k = append(t.k, string(it.Node().Data)) t.k = append(t.k, string(it.Node().Data))
@@ -31,7 +31,7 @@ func (t *KeyTracker) Push(node ast.Node) {
} }
// Pop key from stack. // Pop key from stack.
func (t *KeyTracker) Pop(node ast.Node) { func (t *KeyTracker) Pop(node *ast.Node) {
it := node.Key() it := node.Key()
for it.Next() { for it.Next() {
t.k = t.k[:len(t.k)-1] t.k = t.k[:len(t.k)-1]
+150 -93
View File
@@ -1,6 +1,7 @@
package tracker package tracker
import ( import (
"bytes"
"fmt" "fmt"
"github.com/pelletier/go-toml/v2/internal/ast" "github.com/pelletier/go-toml/v2/internal/ast"
@@ -29,67 +30,92 @@ func (k keyKind) String() string {
panic("missing keyKind string mapping") panic("missing keyKind string mapping")
} }
// SeenTracker tracks which keys have been seen with which TOML type to flag duplicates // SeenTracker tracks which keys have been seen with which TOML type to flag
// and mismatches according to the spec. // duplicates and mismatches according to the spec.
//
// Each node in the visited tree is represented by an entry. Each entry has an
// identifier, which is provided by a counter. Entries are stored in the array
// entries. As new nodes are discovered (referenced for the first time in the
// TOML document), entries are created and appended to the array. An entry
// points to its parent using its id.
//
// To find whether a given key (sequence of []byte) has already been visited,
// the entries are linearly searched, looking for one with the right name and
// parent id.
//
// Given that all keys appear in the document after their parent, it is
// guaranteed that all descendants of a node are stored after the node, this
// speeds up the search process.
//
// When encountering [[array tables]], the descendants of that node are removed
// to allow that branch of the tree to be "rediscovered". To maintain the
// invariant above, the deletion process needs to keep the order of entries.
// This results in more copies in that case.
type SeenTracker struct { type SeenTracker struct {
root *info entries []entry
current *info currentIdx int
nextID int
} }
type info struct { type entry struct {
parent *info id int
parent int
name []byte
kind keyKind kind keyKind
children map[string]*info
explicit bool explicit bool
} }
func (i *info) Clear() { // Remove all descendent of node at position idx.
i.children = nil func (s *SeenTracker) clear(idx int) {
p := s.entries[idx].id
rest := clear(p, s.entries[idx+1:])
s.entries = s.entries[:idx+1+len(rest)]
} }
func (i *info) Has(k string) (*info, bool) { func clear(parentID int, entries []entry) []entry {
c, ok := i.children[k] for i := 0; i < len(entries); {
return c, ok if entries[i].parent == parentID {
} id := entries[i].id
copy(entries[i:], entries[i+1:])
func (i *info) SetKind(kind keyKind) { entries = entries[:len(entries)-1]
i.kind = kind rest := clear(id, entries[i:])
} entries = entries[:i+len(rest)]
} else {
func (i *info) CreateTable(k string, explicit bool) *info { i++
return i.createChild(k, tableKind, explicit) }
}
func (i *info) CreateArrayTable(k string, explicit bool) *info {
return i.createChild(k, arrayTableKind, explicit)
}
func (i *info) createChild(k string, kind keyKind, explicit bool) *info {
if i.children == nil {
i.children = make(map[string]*info, 1)
} }
return entries
}
x := &info{ func (s *SeenTracker) create(parentIdx int, name []byte, kind keyKind, explicit bool) int {
parent: i, parentID := s.id(parentIdx)
idx := len(s.entries)
s.entries = append(s.entries, entry{
id: s.nextID,
parent: parentID,
name: name,
kind: kind, kind: kind,
explicit: explicit, explicit: explicit,
} })
i.children[k] = x s.nextID++
return x return idx
} }
// CheckExpression takes a top-level node and checks that it does not contain keys // CheckExpression takes a top-level node and checks that it does not contain keys
// that have been seen in previous calls, and validates that types are consistent. // that have been seen in previous calls, and validates that types are consistent.
func (s *SeenTracker) CheckExpression(node ast.Node) error { func (s *SeenTracker) CheckExpression(node *ast.Node) error {
if s.root == nil { if s.entries == nil {
s.root = &info{ // s.entries = make([]entry, 0, 8)
kind: tableKind, // Skip ID = 0 to remove the confusion between nodes whose parent has
} // id 0 and root nodes (parent id is 0 because it's the zero value).
s.current = s.root s.nextID = 1
// Start unscoped, so idx is negative.
s.currentIdx = -1
} }
switch node.Kind { switch node.Kind {
case ast.KeyValue: case ast.KeyValue:
return s.checkKeyValue(s.current, node) return s.checkKeyValue(node)
case ast.Table: case ast.Table:
return s.checkTable(node) return s.checkTable(node)
case ast.ArrayTable: case ast.ArrayTable:
@@ -97,104 +123,135 @@ func (s *SeenTracker) CheckExpression(node ast.Node) error {
default: default:
panic(fmt.Errorf("this should not be a top level node type: %s", node.Kind)) panic(fmt.Errorf("this should not be a top level node type: %s", node.Kind))
} }
} }
func (s *SeenTracker) checkTable(node ast.Node) error {
s.current = s.root
func (s *SeenTracker) checkTable(node *ast.Node) error {
it := node.Key() it := node.Key()
// handle the first parts of the key, excluding the last one
parentIdx := -1
// This code is duplicated in checkArrayTable. This is because factoring
// it in a function requires to copy the iterator, or allocate it to the
// heap, which is not cheap.
for it.Next() { for it.Next() {
if !it.Node().Next().Valid() { if it.IsLast() {
break break
} }
k := string(it.Node().Data) k := it.Node().Data
child, found := s.current.Has(k)
if !found { idx := s.find(parentIdx, k)
child = s.current.CreateTable(k, false)
if idx < 0 {
idx = s.create(parentIdx, k, tableKind, false)
} }
s.current = child parentIdx = idx
} }
// handle the last part of the key k := it.Node().Data
k := string(it.Node().Data) idx := s.find(parentIdx, k)
i, found := s.current.Has(k) if idx >= 0 {
if found { kind := s.entries[idx].kind
if i.kind != tableKind { if kind != tableKind {
return fmt.Errorf("toml: key %s should be a table, not a %s", k, i.kind) return fmt.Errorf("toml: key %s should be a table, not a %s", string(k), kind)
} }
if i.explicit { if s.entries[idx].explicit {
return fmt.Errorf("toml: table %s already exists", k) return fmt.Errorf("toml: table %s already exists", string(k))
} }
i.explicit = true s.entries[idx].explicit = true
s.current = i
} else { } else {
s.current = s.current.CreateTable(k, true) idx = s.create(parentIdx, k, tableKind, true)
} }
s.currentIdx = idx
return nil return nil
} }
func (s *SeenTracker) checkArrayTable(node ast.Node) error { func (s *SeenTracker) checkArrayTable(node *ast.Node) error {
s.current = s.root
it := node.Key() it := node.Key()
// handle the first parts of the key, excluding the last one parentIdx := -1
for it.Next() { for it.Next() {
if !it.Node().Next().Valid() { if it.IsLast() {
break break
} }
k := string(it.Node().Data) k := it.Node().Data
child, found := s.current.Has(k)
if !found { idx := s.find(parentIdx, k)
child = s.current.CreateTable(k, false)
if idx < 0 {
idx = s.create(parentIdx, k, tableKind, false)
} }
s.current = child parentIdx = idx
} }
// handle the last part of the key k := it.Node().Data
k := string(it.Node().Data) idx := s.find(parentIdx, k)
info, found := s.current.Has(k) if idx >= 0 {
if found { kind := s.entries[idx].kind
if info.kind != arrayTableKind { if kind != arrayTableKind {
return fmt.Errorf("toml: key %s already exists as a %s, but should be an array table", info.kind, k) return fmt.Errorf("toml: key %s already exists as a %s, but should be an array table", kind, string(k))
} }
info.Clear() s.clear(idx)
} else { } else {
info = s.current.CreateArrayTable(k, true) idx = s.create(parentIdx, k, arrayTableKind, true)
} }
s.current = info s.currentIdx = idx
return nil return nil
} }
func (s *SeenTracker) checkKeyValue(context *info, node ast.Node) error { func (s *SeenTracker) checkKeyValue(node *ast.Node) error {
it := node.Key() it := node.Key()
// handle the first parts of the key, excluding the last one parentIdx := s.currentIdx
for it.Next() { for it.Next() {
k := string(it.Node().Data) k := it.Node().Data
child, found := context.Has(k)
if found { idx := s.find(parentIdx, k)
if child.kind != tableKind {
return fmt.Errorf("toml: expected %s to be a table, not a %s", k, child.kind) if idx >= 0 {
if s.entries[idx].kind != tableKind {
return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), s.entries[idx].kind)
} }
} else { } else {
child = context.CreateTable(k, false) idx = s.create(parentIdx, k, tableKind, false)
} }
context = child parentIdx = idx
} }
kind := valueKind
if node.Value().Kind == ast.InlineTable { if node.Value().Kind == ast.InlineTable {
context.SetKind(tableKind) kind = tableKind
} else {
context.SetKind(valueKind)
} }
s.entries[parentIdx].kind = kind
return nil return nil
} }
func (s *SeenTracker) id(idx int) int {
if idx >= 0 {
return s.entries[idx].id
}
return 0
}
func (s *SeenTracker) find(parentIdx int, k []byte) int {
parentID := s.id(parentIdx)
for i := parentIdx + 1; i < len(s.entries); i++ {
if s.entries[i].parent == parentID && bytes.Equal(s.entries[i].name, k) {
return i
}
}
return -1
}
-18
View File
@@ -26,7 +26,6 @@ func cmpEqual(x, y interface{}) bool {
} }
func TestDates(t *testing.T) { func TestDates(t *testing.T) {
t.Parallel()
for _, test := range []struct { for _, test := range []struct {
date LocalDate date LocalDate
@@ -64,7 +63,6 @@ func TestDates(t *testing.T) {
} }
func TestDateIsValid(t *testing.T) { func TestDateIsValid(t *testing.T) {
t.Parallel()
for _, test := range []struct { for _, test := range []struct {
date LocalDate date LocalDate
@@ -91,7 +89,6 @@ func TestDateIsValid(t *testing.T) {
} }
func TestParseDate(t *testing.T) { func TestParseDate(t *testing.T) {
t.Parallel()
var emptyDate LocalDate var emptyDate LocalDate
@@ -118,7 +115,6 @@ func TestParseDate(t *testing.T) {
} }
func TestDateArithmetic(t *testing.T) { func TestDateArithmetic(t *testing.T) {
t.Parallel()
for _, test := range []struct { for _, test := range []struct {
desc string desc string
@@ -180,7 +176,6 @@ func TestDateArithmetic(t *testing.T) {
} }
func TestDateBefore(t *testing.T) { func TestDateBefore(t *testing.T) {
t.Parallel()
for _, test := range []struct { for _, test := range []struct {
d1, d2 LocalDate d1, d2 LocalDate
@@ -198,7 +193,6 @@ func TestDateBefore(t *testing.T) {
} }
func TestDateAfter(t *testing.T) { func TestDateAfter(t *testing.T) {
t.Parallel()
for _, test := range []struct { for _, test := range []struct {
d1, d2 LocalDate d1, d2 LocalDate
@@ -215,7 +209,6 @@ func TestDateAfter(t *testing.T) {
} }
func TestTimeToString(t *testing.T) { func TestTimeToString(t *testing.T) {
t.Parallel()
for _, test := range []struct { for _, test := range []struct {
str string str string
@@ -249,7 +242,6 @@ func TestTimeToString(t *testing.T) {
} }
func TestTimeOf(t *testing.T) { func TestTimeOf(t *testing.T) {
t.Parallel()
for _, test := range []struct { for _, test := range []struct {
time time.Time time time.Time
@@ -265,7 +257,6 @@ func TestTimeOf(t *testing.T) {
} }
func TestTimeIsValid(t *testing.T) { func TestTimeIsValid(t *testing.T) {
t.Parallel()
for _, test := range []struct { for _, test := range []struct {
time LocalTime time LocalTime
@@ -291,7 +282,6 @@ func TestTimeIsValid(t *testing.T) {
} }
func TestDateTimeToString(t *testing.T) { func TestDateTimeToString(t *testing.T) {
t.Parallel()
for _, test := range []struct { for _, test := range []struct {
str string str string
@@ -323,7 +313,6 @@ func TestDateTimeToString(t *testing.T) {
} }
func TestParseDateTimeErrors(t *testing.T) { func TestParseDateTimeErrors(t *testing.T) {
t.Parallel()
for _, str := range []string{ for _, str := range []string{
"", "",
@@ -339,7 +328,6 @@ func TestParseDateTimeErrors(t *testing.T) {
} }
func TestDateTimeOf(t *testing.T) { func TestDateTimeOf(t *testing.T) {
t.Parallel()
for _, test := range []struct { for _, test := range []struct {
time time.Time time time.Time
@@ -361,7 +349,6 @@ func TestDateTimeOf(t *testing.T) {
} }
func TestDateTimeIsValid(t *testing.T) { func TestDateTimeIsValid(t *testing.T) {
t.Parallel()
// No need to be exhaustive here; it's just LocalDate.IsValid && LocalTime.IsValid. // No need to be exhaustive here; it's just LocalDate.IsValid && LocalTime.IsValid.
for _, test := range []struct { for _, test := range []struct {
@@ -380,7 +367,6 @@ func TestDateTimeIsValid(t *testing.T) {
} }
func TestDateTimeIn(t *testing.T) { func TestDateTimeIn(t *testing.T) {
t.Parallel()
dt := LocalDateTime{LocalDate{2016, 1, 2}, LocalTime{3, 4, 5, 6}} dt := LocalDateTime{LocalDate{2016, 1, 2}, LocalTime{3, 4, 5, 6}}
@@ -391,7 +377,6 @@ func TestDateTimeIn(t *testing.T) {
} }
func TestDateTimeBefore(t *testing.T) { func TestDateTimeBefore(t *testing.T) {
t.Parallel()
d1 := LocalDate{2016, 12, 31} d1 := LocalDate{2016, 12, 31}
d2 := LocalDate{2017, 1, 1} d2 := LocalDate{2017, 1, 1}
@@ -414,7 +399,6 @@ func TestDateTimeBefore(t *testing.T) {
} }
func TestDateTimeAfter(t *testing.T) { func TestDateTimeAfter(t *testing.T) {
t.Parallel()
d1 := LocalDate{2016, 12, 31} d1 := LocalDate{2016, 12, 31}
d2 := LocalDate{2017, 1, 1} d2 := LocalDate{2017, 1, 1}
@@ -437,7 +421,6 @@ func TestDateTimeAfter(t *testing.T) {
} }
func TestMarshalJSON(t *testing.T) { func TestMarshalJSON(t *testing.T) {
t.Parallel()
for _, test := range []struct { for _, test := range []struct {
value interface{} value interface{}
@@ -459,7 +442,6 @@ func TestMarshalJSON(t *testing.T) {
} }
func TestUnmarshalJSON(t *testing.T) { func TestUnmarshalJSON(t *testing.T) {
t.Parallel()
var ( var (
d LocalDate d LocalDate
+27 -68
View File
@@ -127,6 +127,10 @@ func (enc *Encoder) Encode(v interface{}) error {
ctx.inline = enc.tablesInline ctx.inline = enc.tablesInline
if v == nil {
return fmt.Errorf("toml: cannot encode a nil interface")
}
b, err := enc.encode(b, ctx, reflect.ValueOf(v)) b, err := enc.encode(b, ctx, reflect.ValueOf(v))
if err != nil { if err != nil {
return err return err
@@ -193,9 +197,11 @@ func (ctx *encoderCtx) isRoot() bool {
//nolint:cyclop,funlen //nolint:cyclop,funlen
func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
i, ok := v.Interface().(time.Time) if !v.IsZero() {
if ok { i, ok := v.Interface().(time.Time)
return i.AppendFormat(b, time.RFC3339), nil if ok {
return i.AppendFormat(b, time.RFC3339), nil
}
} }
if v.Type().Implements(textMarshalerType) { if v.Type().Implements(textMarshalerType) {
@@ -273,11 +279,6 @@ func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v r
if !ctx.hasKey { if !ctx.hasKey {
panic("caller of encodeKv should have set the key in the context") panic("caller of encodeKv should have set the key in the context")
} }
if isNil(v) {
return b, nil
}
b = enc.indent(ctx.indent, b) b = enc.indent(ctx.indent, b)
b, err = enc.encodeKey(b, ctx.key) b, err = enc.encodeKey(b, ctx.key)
@@ -470,12 +471,7 @@ func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte
continue continue
} }
table, err := willConvertToTableOrArrayTable(ctx, v) if willConvertToTableOrArrayTable(ctx, v) {
if err != nil {
return nil, err
}
if table {
t.pushTable(k, v, emptyValueOptions) t.pushTable(k, v, emptyValueOptions)
} else { } else {
t.pushKV(k, v, emptyValueOptions) t.pushKV(k, v, emptyValueOptions)
@@ -543,18 +539,13 @@ func (enc *Encoder) encodeStruct(b []byte, ctx encoderCtx, v reflect.Value) ([]b
continue continue
} }
willConvert, err := willConvertToTableOrArrayTable(ctx, f)
if err != nil {
return nil, err
}
options := valueOptions{ options := valueOptions{
multiline: fieldBoolTag(fieldType, "multiline"), multiline: fieldBoolTag(fieldType, "multiline"),
} }
inline := fieldBoolTag(fieldType, "inline") inline := fieldBoolTag(fieldType, "inline")
if inline || !willConvert { if inline || !willConvertToTableOrArrayTable(ctx, f) {
t.pushKV(k, f, options) t.pushKV(k, f, options)
} else { } else {
t.pushTable(k, f, options) t.pushTable(k, f, options)
@@ -640,21 +631,8 @@ func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte
} }
} }
for _, table := range t.tables { if len(t.tables) > 0 {
if first { panic("inline table cannot contain nested tables, online key-values")
first = false
} else {
b = append(b, `, `...)
}
ctx.setKey(table.Key)
b, err = enc.encode(b, ctx, table.Value)
if err != nil {
return nil, err
}
b = append(b, '\n')
} }
b = append(b, "}"...) b = append(b, "}"...)
@@ -662,63 +640,53 @@ func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte
return b, nil return b, nil
} }
var textMarshalerType = reflect.TypeOf(new(encoding.TextMarshaler)).Elem() func willConvertToTable(ctx encoderCtx, v reflect.Value) bool {
if !v.IsValid() {
func willConvertToTable(ctx encoderCtx, v reflect.Value) (bool, error) { return false
}
if v.Type() == timeType || v.Type().Implements(textMarshalerType) { if v.Type() == timeType || v.Type().Implements(textMarshalerType) {
return false, nil return false
} }
t := v.Type() t := v.Type()
switch t.Kind() { switch t.Kind() {
case reflect.Map, reflect.Struct: case reflect.Map, reflect.Struct:
return !ctx.inline, nil return !ctx.inline
case reflect.Interface: case reflect.Interface:
if v.IsNil() {
return false, fmt.Errorf("toml: encoding a nil interface is not supported")
}
return willConvertToTable(ctx, v.Elem()) return willConvertToTable(ctx, v.Elem())
case reflect.Ptr: case reflect.Ptr:
if v.IsNil() { if v.IsNil() {
return false, nil return false
} }
return willConvertToTable(ctx, v.Elem()) return willConvertToTable(ctx, v.Elem())
default: default:
return false, nil return false
} }
} }
func willConvertToTableOrArrayTable(ctx encoderCtx, v reflect.Value) (bool, error) { func willConvertToTableOrArrayTable(ctx encoderCtx, v reflect.Value) bool {
t := v.Type() t := v.Type()
if t.Kind() == reflect.Interface { if t.Kind() == reflect.Interface {
if v.IsNil() {
return false, fmt.Errorf("toml: encoding a nil interface is not supported")
}
return willConvertToTableOrArrayTable(ctx, v.Elem()) return willConvertToTableOrArrayTable(ctx, v.Elem())
} }
if t.Kind() == reflect.Slice { if t.Kind() == reflect.Slice {
if v.Len() == 0 { if v.Len() == 0 {
// An empty slice should be a kv = []. // An empty slice should be a kv = [].
return false, nil return false
} }
for i := 0; i < v.Len(); i++ { for i := 0; i < v.Len(); i++ {
t, err := willConvertToTable(ctx, v.Index(i)) t := willConvertToTable(ctx, v.Index(i))
if err != nil {
return false, err
}
if !t { if !t {
return false, nil return false
} }
} }
return true, nil return true
} }
return willConvertToTable(ctx, v) return willConvertToTable(ctx, v)
@@ -731,12 +699,7 @@ func (enc *Encoder) encodeSlice(b []byte, ctx encoderCtx, v reflect.Value) ([]by
return b, nil return b, nil
} }
allTables, err := willConvertToTableOrArrayTable(ctx, v) if willConvertToTableOrArrayTable(ctx, v) {
if err != nil {
return nil, err
}
if allTables {
return enc.encodeSliceAsArrayTable(b, ctx, v) return enc.encodeSliceAsArrayTable(b, ctx, v)
} }
@@ -746,10 +709,6 @@ func (enc *Encoder) encodeSlice(b []byte, ctx encoderCtx, v reflect.Value) ([]by
// caller should have checked that v is a slice that only contains values that // caller should have checked that v is a slice that only contains values that
// encode into tables. // encode into tables.
func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
if v.Len() == 0 {
return b, nil
}
ctx.shiftKey() ctx.shiftKey()
var err error var err error
+281 -11
View File
@@ -14,7 +14,11 @@ import (
//nolint:funlen //nolint:funlen
func TestMarshal(t *testing.T) { func TestMarshal(t *testing.T) {
t.Parallel() someInt := 42
type structInline struct {
A interface{} `inline:"true"`
}
examples := []struct { examples := []struct {
desc string desc string
@@ -298,13 +302,218 @@ A = [
] ]
`, `,
}, },
{
desc: "nil interface not supported at root",
v: nil,
err: true,
},
{
desc: "nil interface not supported in slice",
v: map[string]interface{}{
"a": []interface{}{"a", nil, 2},
},
err: true,
},
{
desc: "nil pointer in slice uses zero value",
v: struct {
A []*int
}{
A: []*int{nil},
},
expected: `A = [0]`,
},
{
desc: "nil pointer in slice uses zero value",
v: struct {
A []*int
}{
A: []*int{nil},
},
expected: `A = [0]`,
},
{
desc: "pointer in slice",
v: struct {
A []*int
}{
A: []*int{&someInt},
},
expected: `A = [42]`,
},
{
desc: "inline table in inline table",
v: structInline{
A: structInline{
A: structInline{
A: "hello",
},
},
},
expected: `A = {A = {A = 'hello'}}`,
},
{
desc: "empty slice in map",
v: map[string][]string{
"a": {},
},
expected: `a = []`,
},
{
desc: "map in slice",
v: map[string][]map[string]string{
"a": {{"hello": "world"}},
},
expected: `
[[a]]
hello = 'world'`,
},
{
desc: "newline in map in slice",
v: map[string][]map[string]string{
"a\n": {{"hello": "world"}},
},
err: true,
},
{
desc: "newline in map in slice",
v: map[string][]map[string]*customTextMarshaler{
"a": {{"hello": &customTextMarshaler{1}}},
},
err: true,
},
{
desc: "empty slice of empty struct",
v: struct {
A []struct{}
}{
A: []struct{}{},
},
expected: `A = []`,
},
{
desc: "nil field is ignored",
v: struct {
A interface{}
}{
A: nil,
},
expected: ``,
},
{
desc: "private fields are ignored",
v: struct {
Public string
private string
}{
Public: "shown",
private: "hidden",
},
expected: `Public = 'shown'`,
},
{
desc: "fields tagged - are ignored",
v: struct {
Public string `toml:"-"`
private string
}{
Public: "hidden",
},
expected: ``,
},
{
desc: "nil value in map is ignored",
v: map[string]interface{}{
"A": nil,
},
expected: ``,
},
{
desc: "new line in table key",
v: map[string]interface{}{
"hello\nworld": 42,
},
err: true,
},
{
desc: "new line in parent of nested table key",
v: map[string]interface{}{
"hello\nworld": map[string]interface{}{
"inner": 42,
},
},
err: true,
},
{
desc: "new line in nested table key",
v: map[string]interface{}{
"parent": map[string]interface{}{
"in\ner": map[string]interface{}{
"foo": 42,
},
},
},
err: true,
},
{
desc: "invalid map key",
v: map[int]interface{}{},
err: true,
},
{
desc: "unhandled type",
v: struct {
A chan int
}{
A: make(chan int),
},
err: true,
},
{
desc: "numbers",
v: struct {
A float32
B uint64
C uint32
D uint16
E uint8
F uint
G int64
H int32
I int16
J int8
K int
}{
A: 1.1,
B: 42,
C: 42,
D: 42,
E: 42,
F: 42,
G: 42,
H: 42,
I: 42,
J: 42,
K: 42,
},
expected: `
A = 1.1
B = 42
C = 42
D = 42
E = 42
F = 42
G = 42
H = 42
I = 42
J = 42
K = 42`,
},
} }
for _, e := range examples { for _, e := range examples {
e := e e := e
t.Run(e.desc, func(t *testing.T) { t.Run(e.desc, func(t *testing.T) {
t.Parallel()
b, err := toml.Marshal(e.v) b, err := toml.Marshal(e.v)
if e.err { if e.err {
require.Error(t, err) require.Error(t, err)
@@ -396,8 +605,6 @@ func equalStringsIgnoreNewlines(t *testing.T, expected string, actual string) {
//nolint:funlen //nolint:funlen
func TestMarshalIndentTables(t *testing.T) { func TestMarshalIndentTables(t *testing.T) {
t.Parallel()
examples := []struct { examples := []struct {
desc string desc string
v interface{} v interface{}
@@ -448,8 +655,6 @@ root = 'value0'
for _, e := range examples { for _, e := range examples {
e := e e := e
t.Run(e.desc, func(t *testing.T) { t.Run(e.desc, func(t *testing.T) {
t.Parallel()
var buf strings.Builder var buf strings.Builder
enc := toml.NewEncoder(&buf) enc := toml.NewEncoder(&buf)
enc.SetIndentTables(true) enc.SetIndentTables(true)
@@ -460,9 +665,76 @@ root = 'value0'
} }
} }
func TestIssue436(t *testing.T) { type customTextMarshaler struct {
t.Parallel() value int64
}
func (c *customTextMarshaler) MarshalText() ([]byte, error) {
if c.value == 1 {
return nil, fmt.Errorf("cannot represent 1 because this is a silly test")
}
return []byte(fmt.Sprintf("::%d", c.value)), nil
}
func TestMarshalTextMarshaler_NoRoot(t *testing.T) {
c := customTextMarshaler{}
_, err := toml.Marshal(&c)
require.Error(t, err)
}
func TestMarshalTextMarshaler_Error(t *testing.T) {
m := map[string]interface{}{"a": &customTextMarshaler{value: 1}}
_, err := toml.Marshal(m)
require.Error(t, err)
}
func TestMarshalTextMarshaler_ErrorInline(t *testing.T) {
type s struct {
A map[string]interface{} `inline:"true"`
}
d := s{
A: map[string]interface{}{"a": &customTextMarshaler{value: 1}},
}
_, err := toml.Marshal(d)
require.Error(t, err)
}
func TestMarshalTextMarshaler(t *testing.T) {
m := map[string]interface{}{"a": &customTextMarshaler{value: 2}}
r, err := toml.Marshal(m)
require.NoError(t, err)
equalStringsIgnoreNewlines(t, "a = '::2'", string(r))
}
type brokenWriter struct{}
func (b *brokenWriter) Write([]byte) (int, error) {
return 0, fmt.Errorf("dead")
}
func TestEncodeToBrokenWriter(t *testing.T) {
w := brokenWriter{}
enc := toml.NewEncoder(&w)
err := enc.Encode(map[string]string{"hello": "world"})
require.Error(t, err)
}
func TestEncoderSetIndentSymbol(t *testing.T) {
var w strings.Builder
enc := toml.NewEncoder(&w)
enc.SetIndentTables(true)
enc.SetIndentSymbol(">>>")
err := enc.Encode(map[string]map[string]string{"parent": {"hello": "world"}})
require.NoError(t, err)
expected := `
[parent]
>>>hello = 'world'`
equalStringsIgnoreNewlines(t, expected, w.String())
}
func TestIssue436(t *testing.T) {
data := []byte(`{"a": [ { "b": { "c": "d" } } ]}`) data := []byte(`{"a": [ { "b": { "c": "d" } } ]}`)
var v interface{} var v interface{}
@@ -482,8 +754,6 @@ c = 'd'
} }
func TestIssue424(t *testing.T) { func TestIssue424(t *testing.T) {
t.Parallel()
type Message1 struct { type Message1 struct {
Text string Text string
} }
+105 -81
View File
@@ -2,10 +2,10 @@ package toml
import ( import (
"bytes" "bytes"
"fmt"
"strconv" "strconv"
"github.com/pelletier/go-toml/v2/internal/ast" "github.com/pelletier/go-toml/v2/internal/ast"
"github.com/pelletier/go-toml/v2/internal/danger"
) )
type parser struct { type parser struct {
@@ -17,9 +17,20 @@ type parser struct {
first bool first bool
} }
func (p *parser) Range(b []byte) ast.Range {
return ast.Range{
Offset: uint32(danger.SubsliceOffset(p.data, b)),
Length: uint32(len(b)),
}
}
func (p *parser) Raw(raw ast.Range) []byte {
return p.data[raw.Offset : raw.Offset+raw.Length]
}
func (p *parser) Reset(b []byte) { func (p *parser) Reset(b []byte) {
p.builder.Reset() p.builder.Reset()
p.ref = ast.Reference{} p.ref = ast.InvalidReference
p.data = b p.data = b
p.left = b p.left = b
p.err = nil p.err = nil
@@ -33,7 +44,7 @@ func (p *parser) NextExpression() bool {
} }
p.builder.Reset() p.builder.Reset()
p.ref = ast.Reference{} p.ref = ast.InvalidReference
for { for {
if len(p.left) == 0 || p.err != nil { if len(p.left) == 0 || p.err != nil {
@@ -62,7 +73,7 @@ func (p *parser) NextExpression() bool {
} }
} }
func (p *parser) Expression() ast.Node { func (p *parser) Expression() *ast.Node {
return p.builder.NodeAt(p.ref) return p.builder.NodeAt(p.ref)
} }
@@ -77,7 +88,6 @@ func (p *parser) parseNewline(b []byte) ([]byte, error) {
if b[0] == '\r' { if b[0] == '\r' {
_, rest, err := scanWindowsNewline(b) _, rest, err := scanWindowsNewline(b)
return rest, err return rest, err
} }
@@ -88,7 +98,7 @@ func (p *parser) parseExpression(b []byte) (ast.Reference, []byte, error) {
// expression = ws [ comment ] // expression = ws [ comment ]
// expression =/ ws keyval ws [ comment ] // expression =/ ws keyval ws [ comment ]
// expression =/ ws table ws [ comment ] // expression =/ ws table ws [ comment ]
var ref ast.Reference ref := ast.InvalidReference
b = p.parseWhitespace(b) b = p.parseWhitespace(b)
@@ -199,16 +209,20 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
key, b, err := p.parseKey(b) key, b, err := p.parseKey(b)
if err != nil { if err != nil {
return ast.Reference{}, nil, err return ast.InvalidReference, nil, err
} }
// keyval-sep = ws %x3D ws ; = // keyval-sep = ws %x3D ws ; =
b = p.parseWhitespace(b) b = p.parseWhitespace(b)
if len(b) == 0 {
return ast.InvalidReference, nil, newDecodeError(b, "expected = after a key, but the document ends there")
}
b, err = expect('=', b) b, err = expect('=', b)
if err != nil { if err != nil {
return ast.Reference{}, nil, err return ast.InvalidReference, nil, err
} }
b = p.parseWhitespace(b) b = p.parseWhitespace(b)
@@ -227,7 +241,7 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
//nolint:cyclop,funlen //nolint:cyclop,funlen
func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) { func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
// val = string / boolean / array / inline-table / date-time / float / integer // val = string / boolean / array / inline-table / date-time / float / integer
var ref ast.Reference ref := ast.InvalidReference
if len(b) == 0 { if len(b) == 0 {
return ref, nil, newDecodeError(b, "expected value, not eof") return ref, nil, newDecodeError(b, "expected value, not eof")
@@ -238,32 +252,36 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
switch c { switch c {
case '"': case '"':
var raw []byte
var v []byte var v []byte
if scanFollowsMultilineBasicStringDelimiter(b) { if scanFollowsMultilineBasicStringDelimiter(b) {
v, b, err = p.parseMultilineBasicString(b) raw, v, b, err = p.parseMultilineBasicString(b)
} else { } else {
v, b, err = p.parseBasicString(b) raw, v, b, err = p.parseBasicString(b)
} }
if err == nil { if err == nil {
ref = p.builder.Push(ast.Node{ ref = p.builder.Push(ast.Node{
Kind: ast.String, Kind: ast.String,
Raw: p.Range(raw),
Data: v, Data: v,
}) })
} }
return ref, b, err return ref, b, err
case '\'': case '\'':
var raw []byte
var v []byte var v []byte
if scanFollowsMultilineLiteralStringDelimiter(b) { if scanFollowsMultilineLiteralStringDelimiter(b) {
v, b, err = p.parseMultilineLiteralString(b) raw, v, b, err = p.parseMultilineLiteralString(b)
} else { } else {
v, b, err = p.parseLiteralString(b) raw, v, b, err = p.parseLiteralString(b)
} }
if err == nil { if err == nil {
ref = p.builder.Push(ast.Node{ ref = p.builder.Push(ast.Node{
Kind: ast.String, Kind: ast.String,
Raw: p.Range(raw),
Data: v, Data: v,
}) })
} }
@@ -304,16 +322,17 @@ func atmost(b []byte, n int) []byte {
if n >= len(b) { if n >= len(b) {
return b return b
} }
return b[:n] return b[:n]
} }
func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, error) { func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
v, rest, err := scanLiteralString(b) v, rest, err := scanLiteralString(b)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, nil, err
} }
return v[1 : len(v)-1], rest, nil return v, v[1 : len(v)-1], rest, nil
} }
func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) { func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
@@ -397,8 +416,7 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
} }
if len(b) == 0 { if len(b) == 0 {
//nolint:godox return parent, nil, newDecodeError(b, "array is incomplete")
return parent, nil, unexpectedCharacter{b: b} // TODO: should be unexpected EOF
} }
if b[0] == ']' { if b[0] == ']' {
@@ -474,10 +492,10 @@ func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error)
return b, nil return b, nil
} }
func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, error) { func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) {
token, rest, err := scanMultilineLiteralString(b) token, rest, err := scanMultilineLiteralString(b)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, nil, err
} }
i := 3 i := 3
@@ -489,11 +507,11 @@ func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, error) {
i += 2 i += 2
} }
return token[i : len(token)-3], rest, err return token, token[i : len(token)-3], rest, err
} }
//nolint:funlen,gocognit,cyclop //nolint:funlen,gocognit,cyclop
func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, error) { func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, error) {
// ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body // ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
// ml-basic-string-delim // ml-basic-string-delim
// ml-basic-string-delim = 3quotation-mark // ml-basic-string-delim = 3quotation-mark
@@ -506,11 +524,9 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, error) {
// mlb-escaped-nl = escape ws newline *( wschar / newline ) // mlb-escaped-nl = escape ws newline *( wschar / newline )
token, rest, err := scanMultilineBasicString(b) token, rest, err := scanMultilineBasicString(b)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, nil, err
} }
var builder bytes.Buffer
i := 3 i := 3
// skip the immediate new line // skip the immediate new line
@@ -520,6 +536,21 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, error) {
i += 2 i += 2
} }
// fast path
startIdx := i
endIdx := len(token) - len(`"""`)
for ; i < endIdx; i++ {
if token[i] == '\\' {
break
}
}
if i == endIdx {
return token, token[startIdx:endIdx], rest, nil
}
var builder bytes.Buffer
builder.Write(token[startIdx:i])
// The scanner ensures that the token starts and ends with quotes and that // The scanner ensures that the token starts and ends with quotes and that
// escapes are balanced. // escapes are balanced.
for ; i < len(token)-3; i++ { for ; i < len(token)-3; i++ {
@@ -562,30 +593,30 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, error) {
case 't': case 't':
builder.WriteByte('\t') builder.WriteByte('\t')
case 'u': case 'u':
x, err := hexToString(token[i+3:len(token)-3], 4) x, err := hexToString(atmost(token[i+1:], 4), 4)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, nil, err
} }
builder.WriteString(x) builder.WriteString(x)
i += 4 i += 4
case 'U': case 'U':
x, err := hexToString(token[i+3:len(token)-3], 8) x, err := hexToString(atmost(token[i+1:], 8), 8)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, nil, err
} }
builder.WriteString(x) builder.WriteString(x)
i += 8 i += 8
default: default:
return nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c) return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
} }
} else { } else {
builder.WriteByte(c) builder.WriteByte(c)
} }
} }
return builder.Bytes(), rest, nil return token, builder.Bytes(), rest, nil
} }
func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) { func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
@@ -597,33 +628,30 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
// dotted-key = simple-key 1*( dot-sep simple-key ) // dotted-key = simple-key 1*( dot-sep simple-key )
// //
// dot-sep = ws %x2E ws ; . Period // dot-sep = ws %x2E ws ; . Period
key, b, err := p.parseSimpleKey(b) raw, key, b, err := p.parseSimpleKey(b)
if err != nil { if err != nil {
return ast.Reference{}, nil, err return ast.InvalidReference, nil, err
} }
ref := p.builder.Push(ast.Node{ ref := p.builder.Push(ast.Node{
Kind: ast.Key, Kind: ast.Key,
Raw: p.Range(raw),
Data: key, Data: key,
}) })
for { for {
b = p.parseWhitespace(b) b = p.parseWhitespace(b)
if len(b) > 0 && b[0] == '.' { if len(b) > 0 && b[0] == '.' {
b, err = expect('.', b) b = p.parseWhitespace(b[1:])
if err != nil {
return ref, nil, err
}
b = p.parseWhitespace(b) raw, key, b, err = p.parseSimpleKey(b)
key, b, err = p.parseSimpleKey(b)
if err != nil { if err != nil {
return ref, nil, err return ref, nil, err
} }
p.builder.PushAndChain(ast.Node{ p.builder.PushAndChain(ast.Node{
Kind: ast.Key, Kind: ast.Key,
Raw: p.Range(raw),
Data: key, Data: key,
}) })
} else { } else {
@@ -634,13 +662,12 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
return ref, b, nil return ref, b, nil
} }
func (p *parser) parseSimpleKey(b []byte) (key, rest []byte, err error) { func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
// simple-key = quoted-key / unquoted-key // simple-key = quoted-key / unquoted-key
// unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _ // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
// quoted-key = basic-string / literal-string // quoted-key = basic-string / literal-string
if len(b) == 0 { if len(b) == 0 {
//nolint:godox return nil, nil, nil, newDecodeError(b, "key is incomplete")
return nil, nil, unexpectedCharacter{b: b} // TODO: should be unexpected EOF
} }
switch { switch {
@@ -649,15 +676,15 @@ func (p *parser) parseSimpleKey(b []byte) (key, rest []byte, err error) {
case b[0] == '"': case b[0] == '"':
return p.parseBasicString(b) return p.parseBasicString(b)
case isUnquotedKeyChar(b[0]): case isUnquotedKeyChar(b[0]):
return scanUnquotedKey(b) key, rest = scanUnquotedKey(b)
return key, key, rest, nil
default: default:
//nolint:godox return nil, nil, nil, newDecodeError(b[0:1], "invalid character at start of key: %c", b[0])
return nil, nil, unexpectedCharacter{b: b} // TODO: should be unexpected EOF
} }
} }
//nolint:funlen,cyclop //nolint:funlen,cyclop
func (p *parser) parseBasicString(b []byte) ([]byte, []byte, error) { func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
// basic-string = quotation-mark *basic-char quotation-mark // basic-string = quotation-mark *basic-char quotation-mark
// quotation-mark = %x22 ; " // quotation-mark = %x22 ; "
// basic-char = basic-unescaped / escaped // basic-char = basic-unescaped / escaped
@@ -674,14 +701,28 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, error) {
// escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX // escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX
token, rest, err := scanBasicString(b) token, rest, err := scanBasicString(b)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, nil, err
}
// fast path
i := len(`"`)
startIdx := i
endIdx := len(token) - len(`"`)
for ; i < endIdx; i++ {
if token[i] == '\\' {
break
}
}
if i == endIdx {
return token, token[startIdx:endIdx], rest, nil
} }
var builder bytes.Buffer var builder bytes.Buffer
builder.Write(token[startIdx:i])
// The scanner ensures that the token starts and ends with quotes and that // The scanner ensures that the token starts and ends with quotes and that
// escapes are balanced. // escapes are balanced.
for i := 1; i < len(token)-1; i++ { for ; i < len(token)-1; i++ {
c := token[i] c := token[i]
if c == '\\' { if c == '\\' {
i++ i++
@@ -703,7 +744,7 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, error) {
case 'u': case 'u':
x, err := hexToString(token[i+1:len(token)-1], 4) x, err := hexToString(token[i+1:len(token)-1], 4)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, nil, err
} }
builder.WriteString(x) builder.WriteString(x)
@@ -711,20 +752,20 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, error) {
case 'U': case 'U':
x, err := hexToString(token[i+1:len(token)-1], 8) x, err := hexToString(token[i+1:len(token)-1], 8)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, nil, err
} }
builder.WriteString(x) builder.WriteString(x)
i += 8 i += 8
default: default:
return nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c) return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
} }
} else { } else {
builder.WriteByte(c) builder.WriteByte(c)
} }
} }
return builder.Bytes(), rest, nil return token, builder.Bytes(), rest, nil
} }
func hexToString(b []byte, length int) (string, error) { func hexToString(b []byte, length int) (string, error) {
@@ -757,7 +798,7 @@ func (p *parser) parseIntOrFloatOrDateTime(b []byte) (ast.Reference, []byte, err
switch b[0] { switch b[0] {
case 'i': case 'i':
if !scanFollowsInf(b) { if !scanFollowsInf(b) {
return ast.Reference{}, nil, newDecodeError(atmost(b, 3), "expected 'inf'") return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'inf'")
} }
return p.builder.Push(ast.Node{ return p.builder.Push(ast.Node{
@@ -766,7 +807,7 @@ func (p *parser) parseIntOrFloatOrDateTime(b []byte) (ast.Reference, []byte, err
}), b[3:], nil }), b[3:], nil
case 'n': case 'n':
if !scanFollowsNan(b) { if !scanFollowsNan(b) {
return ast.Reference{}, nil, newDecodeError(atmost(b, 3), "expected 'nan'") return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'nan'")
} }
return p.builder.Push(ast.Node{ return p.builder.Push(ast.Node{
@@ -825,11 +866,14 @@ byteLoop:
c := b[i] c := b[i]
switch { switch {
case isDigit(c) || c == '-': case isDigit(c):
case c == '-':
const minOffsetOfTz = 8
if i >= minOffsetOfTz {
hasTz = true
}
case c == 'T' || c == ':' || c == '.': case c == 'T' || c == ':' || c == '.':
hasTime = true hasTime = true
continue byteLoop
case c == '+' || c == '-' || c == 'Z': case c == '+' || c == '-' || c == 'Z':
hasTz = true hasTz = true
case c == ' ': case c == ' ':
@@ -854,9 +898,6 @@ byteLoop:
kind = ast.LocalDateTime kind = ast.LocalDateTime
} }
} else { } else {
if hasTz {
return ast.Reference{}, nil, newDecodeError(b, "date-time has timezone but not time component")
}
kind = ast.LocalDate kind = ast.LocalDate
} }
@@ -922,7 +963,7 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
}), b[i+3:], nil }), b[i+3:], nil
} }
return ast.Reference{}, nil, newDecodeError(b[i:i+1], "unexpected character 'i' while scanning for a number") return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'i' while scanning for a number")
} }
if c == 'n' { if c == 'n' {
@@ -933,14 +974,14 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
}), b[i+3:], nil }), b[i+3:], nil
} }
return ast.Reference{}, nil, newDecodeError(b[i:i+1], "unexpected character 'n' while scanning for a number") return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'n' while scanning for a number")
} }
break break
} }
if i == 0 { if i == 0 {
return ast.Reference{}, b, newDecodeError(b, "incomplete number") return ast.InvalidReference, b, newDecodeError(b, "incomplete number")
} }
kind := ast.Integer kind := ast.Integer
@@ -977,26 +1018,9 @@ func isValidBinaryRune(r byte) bool {
} }
func expect(x byte, b []byte) ([]byte, error) { func expect(x byte, b []byte) ([]byte, error) {
if len(b) == 0 {
return nil, newDecodeError(b[:0], "expecting %#U", x)
}
if b[0] != x { if b[0] != x {
return nil, newDecodeError(b[0:1], "expected character %U", x) return nil, newDecodeError(b[0:1], "expected character %U", x)
} }
return b[1:], nil return b[1:], nil
} }
type unexpectedCharacter struct {
r byte
b []byte
}
func (u unexpectedCharacter) Error() string {
if len(u.b) == 0 {
return fmt.Sprintf("expected %#U, not EOF", u.r)
}
return fmt.Sprintf("expected %#U, not %#U", u.r, u.b[0])
}
+1 -7
View File
@@ -9,8 +9,6 @@ import (
//nolint:funlen //nolint:funlen
func TestParser_AST_Numbers(t *testing.T) { func TestParser_AST_Numbers(t *testing.T) {
t.Parallel()
examples := []struct { examples := []struct {
desc string desc string
input string input string
@@ -137,7 +135,6 @@ func TestParser_AST_Numbers(t *testing.T) {
for _, e := range examples { for _, e := range examples {
e := e e := e
t.Run(e.desc, func(t *testing.T) { t.Run(e.desc, func(t *testing.T) {
t.Parallel()
p := parser{} p := parser{}
p.Reset([]byte(`A = ` + e.input)) p.Reset([]byte(`A = ` + e.input))
p.NextExpression() p.NextExpression()
@@ -168,7 +165,7 @@ type (
} }
) )
func compareNode(t *testing.T, e astNode, n ast.Node) { func compareNode(t *testing.T, e astNode, n *ast.Node) {
t.Helper() t.Helper()
require.Equal(t, e.Kind, n.Kind) require.Equal(t, e.Kind, n.Kind)
require.Equal(t, e.Data, n.Data) require.Equal(t, e.Data, n.Data)
@@ -200,8 +197,6 @@ func compareIterator(t *testing.T, expected []astNode, actual ast.Iterator) {
//nolint:funlen //nolint:funlen
func TestParser_AST(t *testing.T) { func TestParser_AST(t *testing.T) {
t.Parallel()
examples := []struct { examples := []struct {
desc string desc string
input string input string
@@ -340,7 +335,6 @@ func TestParser_AST(t *testing.T) {
for _, e := range examples { for _, e := range examples {
e := e e := e
t.Run(e.desc, func(t *testing.T) { t.Run(e.desc, func(t *testing.T) {
t.Parallel()
p := parser{} p := parser{}
p.Reset([]byte(e.input)) p.Reset([]byte(e.input))
p.NextExpression() p.NextExpression()
+3 -3
View File
@@ -30,15 +30,15 @@ func scanFollowsNan(b []byte) bool {
return scanFollows(b, `nan`) return scanFollows(b, `nan`)
} }
func scanUnquotedKey(b []byte) ([]byte, []byte, error) { func scanUnquotedKey(b []byte) ([]byte, []byte) {
// unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _ // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
for i := 0; i < len(b); i++ { for i := 0; i < len(b); i++ {
if !isUnquotedKeyChar(b[i]) { if !isUnquotedKeyChar(b[i]) {
return b[:i], b[i:], nil return b[:i], b[i:]
} }
} }
return b, b[len(b):], nil return b, b[len(b):]
} }
func isUnquotedKeyChar(r byte) bool { func isUnquotedKeyChar(r byte) bool {
+25 -6
View File
@@ -2,6 +2,7 @@ package toml
import ( import (
"github.com/pelletier/go-toml/v2/internal/ast" "github.com/pelletier/go-toml/v2/internal/ast"
"github.com/pelletier/go-toml/v2/internal/danger"
"github.com/pelletier/go-toml/v2/internal/tracker" "github.com/pelletier/go-toml/v2/internal/tracker"
) )
@@ -14,7 +15,7 @@ type strict struct {
missing []decodeError missing []decodeError
} }
func (s *strict) EnterTable(node ast.Node) { func (s *strict) EnterTable(node *ast.Node) {
if !s.Enabled { if !s.Enabled {
return return
} }
@@ -22,7 +23,7 @@ func (s *strict) EnterTable(node ast.Node) {
s.key.UpdateTable(node) s.key.UpdateTable(node)
} }
func (s *strict) EnterArrayTable(node ast.Node) { func (s *strict) EnterArrayTable(node *ast.Node) {
if !s.Enabled { if !s.Enabled {
return return
} }
@@ -30,7 +31,7 @@ func (s *strict) EnterArrayTable(node ast.Node) {
s.key.UpdateArrayTable(node) s.key.UpdateArrayTable(node)
} }
func (s *strict) EnterKeyValue(node ast.Node) { func (s *strict) EnterKeyValue(node *ast.Node) {
if !s.Enabled { if !s.Enabled {
return return
} }
@@ -38,7 +39,7 @@ func (s *strict) EnterKeyValue(node ast.Node) {
s.key.Push(node) s.key.Push(node)
} }
func (s *strict) ExitKeyValue(node ast.Node) { func (s *strict) ExitKeyValue(node *ast.Node) {
if !s.Enabled { if !s.Enabled {
return return
} }
@@ -46,7 +47,7 @@ func (s *strict) ExitKeyValue(node ast.Node) {
s.key.Pop(node) s.key.Pop(node)
} }
func (s *strict) MissingTable(node ast.Node) { func (s *strict) MissingTable(node *ast.Node) {
if !s.Enabled { if !s.Enabled {
return return
} }
@@ -58,7 +59,7 @@ func (s *strict) MissingTable(node ast.Node) {
}) })
} }
func (s *strict) MissingField(node ast.Node) { func (s *strict) MissingField(node *ast.Node) {
if !s.Enabled { if !s.Enabled {
return return
} }
@@ -86,3 +87,21 @@ func (s *strict) Error(doc []byte) error {
return err return err
} }
func keyLocation(node *ast.Node) []byte {
k := node.Key()
hasOne := k.Next()
if !hasOne {
panic("should not be called with empty key")
}
start := k.Node().Data
end := k.Node().Data
for k.Next() {
end = k.Node().Data
}
return danger.BytesRange(start, end)
}
-540
View File
@@ -1,540 +0,0 @@
package toml
import (
"fmt"
"math"
"reflect"
"strings"
"sync"
)
type target interface {
// Dereferences the target.
get() reflect.Value
// Store a string at the target.
setString(v string)
// Store a boolean at the target
setBool(v bool)
// Store an int64 at the target
setInt64(v int64)
// Store a float64 at the target
setFloat64(v float64)
// Stores any value at the target
set(v reflect.Value)
}
// valueTarget just contains a reflect.Value that can be set.
// It is used for struct fields.
type valueTarget reflect.Value
func (t valueTarget) get() reflect.Value {
return reflect.Value(t)
}
func (t valueTarget) set(v reflect.Value) {
reflect.Value(t).Set(v)
}
func (t valueTarget) setString(v string) {
t.get().SetString(v)
}
func (t valueTarget) setBool(v bool) {
t.get().SetBool(v)
}
func (t valueTarget) setInt64(v int64) {
t.get().SetInt(v)
}
func (t valueTarget) setFloat64(v float64) {
t.get().SetFloat(v)
}
// interfaceTarget wraps an other target to dereference on get.
type interfaceTarget struct {
x target
}
func (t interfaceTarget) get() reflect.Value {
return t.x.get().Elem()
}
func (t interfaceTarget) set(v reflect.Value) {
t.x.set(v)
}
func (t interfaceTarget) setString(v string) {
t.x.setString(v)
}
func (t interfaceTarget) setBool(v bool) {
t.x.setBool(v)
}
func (t interfaceTarget) setInt64(v int64) {
t.x.setInt64(v)
}
func (t interfaceTarget) setFloat64(v float64) {
t.x.setFloat64(v)
}
// mapTarget targets a specific key of a map.
type mapTarget struct {
v reflect.Value
k reflect.Value
}
func (t mapTarget) get() reflect.Value {
return t.v.MapIndex(t.k)
}
func (t mapTarget) set(v reflect.Value) {
t.v.SetMapIndex(t.k, v)
}
func (t mapTarget) setString(v string) {
t.set(reflect.ValueOf(v))
}
func (t mapTarget) setBool(v bool) {
t.set(reflect.ValueOf(v))
}
func (t mapTarget) setInt64(v int64) {
t.set(reflect.ValueOf(v))
}
func (t mapTarget) setFloat64(v float64) {
t.set(reflect.ValueOf(v))
}
//nolint:cyclop
// makes sure that the value pointed at by t is indexable (Slice, Array), or
// dereferences to an indexable (Ptr, Interface).
func ensureValueIndexable(t target) error {
f := t.get()
switch f.Type().Kind() {
case reflect.Slice:
if f.IsNil() {
t.set(reflect.MakeSlice(f.Type(), 0, 0))
return nil
}
case reflect.Interface:
if f.IsNil() || f.Elem().Type() != sliceInterfaceType {
t.set(reflect.MakeSlice(sliceInterfaceType, 0, 0))
return nil
}
case reflect.Ptr:
panic("pointer should have already been dereferenced")
case reflect.Array:
// arrays are always initialized.
default:
return fmt.Errorf("toml: cannot store array in a %s", f.Kind())
}
return nil
}
var (
sliceInterfaceType = reflect.TypeOf([]interface{}{})
mapStringInterfaceType = reflect.TypeOf(map[string]interface{}{})
)
func ensureMapIfInterface(x target) {
v := x.get()
if v.Kind() == reflect.Interface && v.IsNil() {
newElement := reflect.MakeMap(mapStringInterfaceType)
x.set(newElement)
}
}
func setString(t target, v string) error {
f := t.get()
switch f.Kind() {
case reflect.String:
t.setString(v)
case reflect.Interface:
t.set(reflect.ValueOf(v))
default:
return fmt.Errorf("toml: cannot assign string to a %s", f.Kind())
}
return nil
}
func setBool(t target, v bool) error {
f := t.get()
switch f.Kind() {
case reflect.Bool:
t.setBool(v)
case reflect.Interface:
t.set(reflect.ValueOf(v))
default:
return fmt.Errorf("toml: cannot assign boolean to a %s", f.Kind())
}
return nil
}
const (
maxInt = int64(^uint(0) >> 1)
minInt = -maxInt - 1
)
//nolint:funlen,gocognit,cyclop,gocyclo
func setInt64(t target, v int64) error {
f := t.get()
switch f.Kind() {
case reflect.Int64:
t.setInt64(v)
case reflect.Int32:
if v < math.MinInt32 || v > math.MaxInt32 {
return fmt.Errorf("toml: number %d does not fit in an int32", v)
}
t.set(reflect.ValueOf(int32(v)))
return nil
case reflect.Int16:
if v < math.MinInt16 || v > math.MaxInt16 {
return fmt.Errorf("toml: number %d does not fit in an int16", v)
}
t.set(reflect.ValueOf(int16(v)))
case reflect.Int8:
if v < math.MinInt8 || v > math.MaxInt8 {
return fmt.Errorf("toml: number %d does not fit in an int8", v)
}
t.set(reflect.ValueOf(int8(v)))
case reflect.Int:
if v < minInt || v > maxInt {
return fmt.Errorf("toml: number %d does not fit in an int", v)
}
t.set(reflect.ValueOf(int(v)))
case reflect.Uint64:
if v < 0 {
return fmt.Errorf("toml: negative number %d does not fit in an uint64", v)
}
t.set(reflect.ValueOf(uint64(v)))
case reflect.Uint32:
if v < 0 || v > math.MaxUint32 {
return fmt.Errorf("toml: negative number %d does not fit in an uint32", v)
}
t.set(reflect.ValueOf(uint32(v)))
case reflect.Uint16:
if v < 0 || v > math.MaxUint16 {
return fmt.Errorf("toml: negative number %d does not fit in an uint16", v)
}
t.set(reflect.ValueOf(uint16(v)))
case reflect.Uint8:
if v < 0 || v > math.MaxUint8 {
return fmt.Errorf("toml: negative number %d does not fit in an uint8", v)
}
t.set(reflect.ValueOf(uint8(v)))
case reflect.Uint:
if v < 0 {
return fmt.Errorf("toml: negative number %d does not fit in an uint", v)
}
t.set(reflect.ValueOf(uint(v)))
case reflect.Interface:
t.set(reflect.ValueOf(v))
default:
return fmt.Errorf("toml: integer cannot be assigned to %s", f.Kind())
}
return nil
}
func setFloat64(t target, v float64) error {
f := t.get()
switch f.Kind() {
case reflect.Float64:
t.setFloat64(v)
case reflect.Float32:
if v > math.MaxFloat32 {
return fmt.Errorf("toml: number %f does not fit in a float32", v)
}
t.set(reflect.ValueOf(float32(v)))
case reflect.Interface:
t.set(reflect.ValueOf(v))
default:
return fmt.Errorf("toml: float cannot be assigned to %s", f.Kind())
}
return nil
}
//nolint:cyclop
// Returns the element at idx of the value pointed at by target, or an error if
// t does not point to an indexable.
// If the target points to an Array and idx is out of bounds, it returns
// (nil, nil) as this is not a fatal error (the unmarshaler will skip).
func elementAt(t target, idx int) target {
f := t.get()
switch f.Kind() {
case reflect.Slice:
//nolint:godox
// TODO: use the idx function argument and avoid alloc if possible.
idx := f.Len()
t.set(reflect.Append(f, reflect.New(f.Type().Elem()).Elem()))
return valueTarget(t.get().Index(idx))
case reflect.Array:
if idx >= f.Len() {
return nil
}
return valueTarget(f.Index(idx))
case reflect.Interface:
// This function is called after ensureValueIndexable, so it's
// guaranteed that f contains an initialized slice.
ifaceElem := f.Elem()
idx := ifaceElem.Len()
newElem := reflect.New(ifaceElem.Type().Elem()).Elem()
newSlice := reflect.Append(ifaceElem, newElem)
t.set(newSlice)
return valueTarget(t.get().Elem().Index(idx))
default:
// Why ensureValueIndexable let it go through?
panic(fmt.Errorf("elementAt received unhandled value type: %s", f.Kind()))
}
}
//nolint:cyclop
func (d *decoder) scopeTableTarget(shouldAppend bool, t target, name string) (target, bool, error) {
x := t.get()
switch x.Kind() {
// Kinds that need to recurse
case reflect.Interface:
t := scopeInterface(shouldAppend, t)
return d.scopeTableTarget(shouldAppend, t, name)
case reflect.Ptr:
t := scopePtr(t)
return d.scopeTableTarget(shouldAppend, t, name)
case reflect.Slice:
t := scopeSlice(shouldAppend, t)
shouldAppend = false
return d.scopeTableTarget(shouldAppend, t, name)
case reflect.Array:
t, err := d.scopeArray(shouldAppend, t)
if err != nil {
return t, false, err
}
shouldAppend = false
return d.scopeTableTarget(shouldAppend, t, name)
// Terminal kinds
case reflect.Struct:
return scopeStruct(x, name)
case reflect.Map:
if x.IsNil() {
t.set(reflect.MakeMap(x.Type()))
x = t.get()
}
return scopeMap(x, name)
default:
panic(fmt.Sprintf("can't scope on a %s", x.Kind()))
}
}
func scopeInterface(shouldAppend bool, t target) target {
initInterface(shouldAppend, t)
return interfaceTarget{t}
}
func scopePtr(t target) target {
initPtr(t)
return valueTarget(t.get().Elem())
}
func initPtr(t target) {
x := t.get()
if !x.IsNil() {
return
}
t.set(reflect.New(x.Type().Elem()))
}
// initInterface makes sure that the interface pointed at by the target is not
// nil.
// Returns the target to the initialized value of the target.
func initInterface(shouldAppend bool, t target) {
x := t.get()
if x.Kind() != reflect.Interface {
panic("this should only be called on interfaces")
}
if !x.IsNil() && (x.Elem().Type() == sliceInterfaceType || x.Elem().Type() == mapStringInterfaceType) {
return
}
var newElement reflect.Value
if shouldAppend {
newElement = reflect.MakeSlice(sliceInterfaceType, 0, 0)
} else {
newElement = reflect.MakeMap(mapStringInterfaceType)
}
t.set(newElement)
}
func scopeSlice(shouldAppend bool, t target) target {
v := t.get()
if shouldAppend {
newElem := reflect.New(v.Type().Elem())
newSlice := reflect.Append(v, newElem.Elem())
t.set(newSlice)
v = t.get()
}
return valueTarget(v.Index(v.Len() - 1))
}
func (d *decoder) scopeArray(shouldAppend bool, t target) (target, error) {
v := t.get()
idx := d.arrayIndex(shouldAppend, v)
if idx >= v.Len() {
return nil, fmt.Errorf("toml: impossible to insert element beyond array's size: %d", v.Len())
}
return valueTarget(v.Index(idx)), nil
}
func scopeMap(v reflect.Value, name string) (target, bool, error) {
k := reflect.ValueOf(name)
keyType := v.Type().Key()
if !k.Type().AssignableTo(keyType) {
if !k.Type().ConvertibleTo(keyType) {
return nil, false, fmt.Errorf("toml: cannot convert map key of type %s to expected type %s", k.Type(), keyType)
}
k = k.Convert(keyType)
}
if !v.MapIndex(k).IsValid() {
newElem := reflect.New(v.Type().Elem())
v.SetMapIndex(k, newElem.Elem())
}
return mapTarget{
v: v,
k: k,
}, true, nil
}
type fieldPathsMap = map[string][]int
type fieldPathsCache struct {
m map[reflect.Type]fieldPathsMap
l sync.RWMutex
}
func (c *fieldPathsCache) get(t reflect.Type) (fieldPathsMap, bool) {
c.l.RLock()
paths, ok := c.m[t]
c.l.RUnlock()
return paths, ok
}
func (c *fieldPathsCache) set(t reflect.Type, m fieldPathsMap) {
c.l.Lock()
c.m[t] = m
c.l.Unlock()
}
var globalFieldPathsCache = fieldPathsCache{
m: map[reflect.Type]fieldPathsMap{},
l: sync.RWMutex{},
}
func scopeStruct(v reflect.Value, name string) (target, bool, error) {
//nolint:godox
// TODO: cache this, and reduce allocations
fieldPaths, ok := globalFieldPathsCache.get(v.Type())
if !ok {
fieldPaths = map[string][]int{}
path := make([]int, 0, 16)
var walk func(reflect.Value)
walk = func(v reflect.Value) {
t := v.Type()
for i := 0; i < t.NumField(); i++ {
l := len(path)
path = append(path, i)
f := t.Field(i)
if f.Anonymous {
walk(v.Field(i))
} else if f.PkgPath == "" {
// only consider exported fields
fieldName, ok := f.Tag.Lookup("toml")
if !ok {
fieldName = f.Name
}
pathCopy := make([]int, len(path))
copy(pathCopy, path)
fieldPaths[fieldName] = pathCopy
// extra copy for the case-insensitive match
fieldPaths[strings.ToLower(fieldName)] = pathCopy
}
path = path[:l]
}
}
walk(v)
globalFieldPathsCache.set(v.Type(), fieldPaths)
}
path, ok := fieldPaths[name]
if !ok {
path, ok = fieldPaths[strings.ToLower(name)]
}
if !ok {
return nil, false, nil
}
return valueTarget(v.FieldByIndex(path)), true, nil
}
-207
View File
@@ -1,207 +0,0 @@
package toml
import (
"reflect"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestStructTarget_Ensure(t *testing.T) {
t.Parallel()
examples := []struct {
desc string
input reflect.Value
name string
test func(v reflect.Value, err error)
}{
{
desc: "handle a nil slice of string",
input: reflect.ValueOf(&struct{ A []string }{}).Elem(),
name: "A",
test: func(v reflect.Value, err error) {
assert.NoError(t, err)
assert.False(t, v.IsNil())
},
},
{
desc: "handle an existing slice of string",
input: reflect.ValueOf(&struct{ A []string }{A: []string{"foo"}}).Elem(),
name: "A",
test: func(v reflect.Value, err error) {
assert.NoError(t, err)
require.False(t, v.IsNil())
s, ok := v.Interface().([]string)
if !ok {
t.Errorf("interface %v should be castable into []string", s)
return
}
assert.Equal(t, []string{"foo"}, s)
},
},
}
for _, e := range examples {
e := e
t.Run(e.desc, func(t *testing.T) {
t.Parallel()
d := decoder{}
target, _, err := d.scopeTableTarget(false, valueTarget(e.input), e.name)
require.NoError(t, err)
err = ensureValueIndexable(target)
v := target.get()
e.test(v, err)
})
}
}
func TestStructTarget_SetString(t *testing.T) {
t.Parallel()
str := "value"
examples := []struct {
desc string
input reflect.Value
name string
test func(v reflect.Value, err error)
}{
{
desc: "sets a string",
input: reflect.ValueOf(&struct{ A string }{}).Elem(),
name: "A",
test: func(v reflect.Value, err error) {
assert.NoError(t, err)
assert.Equal(t, str, v.String())
},
},
{
desc: "fails on a float",
input: reflect.ValueOf(&struct{ A float64 }{}).Elem(),
name: "A",
test: func(v reflect.Value, err error) {
assert.Error(t, err)
},
},
{
desc: "fails on a slice",
input: reflect.ValueOf(&struct{ A []string }{}).Elem(),
name: "A",
test: func(v reflect.Value, err error) {
assert.Error(t, err)
},
},
}
for _, e := range examples {
e := e
t.Run(e.desc, func(t *testing.T) {
t.Parallel()
d := decoder{}
target, _, err := d.scopeTableTarget(false, valueTarget(e.input), e.name)
require.NoError(t, err)
err = setString(target, str)
v := target.get()
e.test(v, err)
})
}
}
func TestPushNew(t *testing.T) {
t.Parallel()
t.Run("slice of strings", func(t *testing.T) {
t.Parallel()
type Doc struct {
A []string
}
d := Doc{}
dec := decoder{}
x, _, err := dec.scopeTableTarget(false, valueTarget(reflect.ValueOf(&d).Elem()), "A")
require.NoError(t, err)
n := elementAt(x, 0)
n.setString("hello")
require.Equal(t, []string{"hello"}, d.A)
n = elementAt(x, 1)
n.setString("world")
require.Equal(t, []string{"hello", "world"}, d.A)
})
t.Run("slice of interfaces", func(t *testing.T) {
t.Parallel()
type Doc struct {
A []interface{}
}
d := Doc{}
dec := decoder{}
x, _, err := dec.scopeTableTarget(false, valueTarget(reflect.ValueOf(&d).Elem()), "A")
require.NoError(t, err)
n := elementAt(x, 0)
require.NoError(t, setString(n, "hello"))
require.Equal(t, []interface{}{"hello"}, d.A)
n = elementAt(x, 1)
require.NoError(t, setString(n, "world"))
require.Equal(t, []interface{}{"hello", "world"}, d.A)
})
}
func TestScope_Struct(t *testing.T) {
t.Parallel()
examples := []struct {
desc string
input reflect.Value
name string
err bool
found bool
idx []int
}{
{
desc: "simple field",
input: reflect.ValueOf(&struct{ A string }{}).Elem(),
name: "A",
idx: []int{0},
found: true,
},
{
desc: "fails not-exported field",
input: reflect.ValueOf(&struct{ a string }{}).Elem(),
name: "a",
err: false,
found: false,
},
}
for _, e := range examples {
e := e
t.Run(e.desc, func(t *testing.T) {
t.Parallel()
dec := decoder{}
x, found, err := dec.scopeTableTarget(false, valueTarget(e.input), e.name)
assert.Equal(t, e.found, found)
if e.err {
assert.Error(t, err)
}
if found {
x2, ok := x.(valueTarget)
require.True(t, ok)
x2.get()
}
})
}
}
-74
View File
@@ -6,35 +6,30 @@ import (
) )
func TestInvalidDatetimeMalformedNoLeads(t *testing.T) { func TestInvalidDatetimeMalformedNoLeads(t *testing.T) {
t.Parallel()
input := `no-leads = 1987-7-05T17:45:00Z` input := `no-leads = 1987-7-05T17:45:00Z`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidDatetimeMalformedNoSecs(t *testing.T) { func TestInvalidDatetimeMalformedNoSecs(t *testing.T) {
t.Parallel()
input := `no-secs = 1987-07-05T17:45Z` input := `no-secs = 1987-07-05T17:45Z`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidDatetimeMalformedNoT(t *testing.T) { func TestInvalidDatetimeMalformedNoT(t *testing.T) {
t.Parallel()
input := `no-t = 1987-07-0517:45:00Z` input := `no-t = 1987-07-0517:45:00Z`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidDatetimeMalformedWithMilli(t *testing.T) { func TestInvalidDatetimeMalformedWithMilli(t *testing.T) {
t.Parallel()
input := `with-milli = 1987-07-5T17:45:00.12Z` input := `with-milli = 1987-07-5T17:45:00.12Z`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidDuplicateKeyTable(t *testing.T) { func TestInvalidDuplicateKeyTable(t *testing.T) {
t.Parallel()
input := `[fruit] input := `[fruit]
type = "apple" type = "apple"
@@ -45,7 +40,6 @@ apple = "yes"`
} }
func TestInvalidDuplicateKeys(t *testing.T) { func TestInvalidDuplicateKeys(t *testing.T) {
t.Parallel()
input := `dupe = false input := `dupe = false
dupe = true` dupe = true`
@@ -53,7 +47,6 @@ dupe = true`
} }
func TestInvalidDuplicateTables(t *testing.T) { func TestInvalidDuplicateTables(t *testing.T) {
t.Parallel()
input := `[a] input := `[a]
[a]` [a]`
@@ -61,21 +54,18 @@ func TestInvalidDuplicateTables(t *testing.T) {
} }
func TestInvalidEmptyImplicitTable(t *testing.T) { func TestInvalidEmptyImplicitTable(t *testing.T) {
t.Parallel()
input := `[naughty..naughty]` input := `[naughty..naughty]`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidEmptyTable(t *testing.T) { func TestInvalidEmptyTable(t *testing.T) {
t.Parallel()
input := `[]` input := `[]`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidFloatNoLeadingZero(t *testing.T) { func TestInvalidFloatNoLeadingZero(t *testing.T) {
t.Parallel()
input := `answer = .12345 input := `answer = .12345
neganswer = -.12345` neganswer = -.12345`
@@ -83,7 +73,6 @@ neganswer = -.12345`
} }
func TestInvalidFloatNoTrailingDigits(t *testing.T) { func TestInvalidFloatNoTrailingDigits(t *testing.T) {
t.Parallel()
input := `answer = 1. input := `answer = 1.
neganswer = -1.` neganswer = -1.`
@@ -91,21 +80,18 @@ neganswer = -1.`
} }
func TestInvalidKeyEmpty(t *testing.T) { func TestInvalidKeyEmpty(t *testing.T) {
t.Parallel()
input := ` = 1` input := ` = 1`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidKeyHash(t *testing.T) { func TestInvalidKeyHash(t *testing.T) {
t.Parallel()
input := `a# = 1` input := `a# = 1`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidKeyNewline(t *testing.T) { func TestInvalidKeyNewline(t *testing.T) {
t.Parallel()
input := `a input := `a
= 1` = 1`
@@ -113,28 +99,24 @@ func TestInvalidKeyNewline(t *testing.T) {
} }
func TestInvalidKeyOpenBracket(t *testing.T) { func TestInvalidKeyOpenBracket(t *testing.T) {
t.Parallel()
input := `[abc = 1` input := `[abc = 1`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidKeySingleOpenBracket(t *testing.T) { func TestInvalidKeySingleOpenBracket(t *testing.T) {
t.Parallel()
input := `[` input := `[`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidKeySpace(t *testing.T) { func TestInvalidKeySpace(t *testing.T) {
t.Parallel()
input := `a b = 1` input := `a b = 1`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidKeyStartBracket(t *testing.T) { func TestInvalidKeyStartBracket(t *testing.T) {
t.Parallel()
input := `[a] input := `[a]
[xyz = 5 [xyz = 5
@@ -143,42 +125,36 @@ func TestInvalidKeyStartBracket(t *testing.T) {
} }
func TestInvalidKeyTwoEquals(t *testing.T) { func TestInvalidKeyTwoEquals(t *testing.T) {
t.Parallel()
input := `key= = 1` input := `key= = 1`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidStringBadByteEscape(t *testing.T) { func TestInvalidStringBadByteEscape(t *testing.T) {
t.Parallel()
input := `naughty = "\xAg"` input := `naughty = "\xAg"`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidStringBadEscape(t *testing.T) { func TestInvalidStringBadEscape(t *testing.T) {
t.Parallel()
input := `invalid-escape = "This string has a bad \a escape character."` input := `invalid-escape = "This string has a bad \a escape character."`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidStringByteEscapes(t *testing.T) { func TestInvalidStringByteEscapes(t *testing.T) {
t.Parallel()
input := `answer = "\x33"` input := `answer = "\x33"`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidStringNoClose(t *testing.T) { func TestInvalidStringNoClose(t *testing.T) {
t.Parallel()
input := `no-ending-quote = "One time, at band camp` input := `no-ending-quote = "One time, at band camp`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidTableArrayImplicit(t *testing.T) { func TestInvalidTableArrayImplicit(t *testing.T) {
t.Parallel()
input := "# This test is a bit tricky. It should fail because the first use of\n" + input := "# This test is a bit tricky. It should fail because the first use of\n" +
"# `[[albums.songs]]` without first declaring `albums` implies that `albums`\n" + "# `[[albums.songs]]` without first declaring `albums` implies that `albums`\n" +
@@ -198,7 +174,6 @@ func TestInvalidTableArrayImplicit(t *testing.T) {
} }
func TestInvalidTableArrayMalformedBracket(t *testing.T) { func TestInvalidTableArrayMalformedBracket(t *testing.T) {
t.Parallel()
input := `[[albums] input := `[[albums]
name = "Born to Run"` name = "Born to Run"`
@@ -206,7 +181,6 @@ name = "Born to Run"`
} }
func TestInvalidTableArrayMalformedEmpty(t *testing.T) { func TestInvalidTableArrayMalformedEmpty(t *testing.T) {
t.Parallel()
input := `[[]] input := `[[]]
name = "Born to Run"` name = "Born to Run"`
@@ -214,14 +188,12 @@ name = "Born to Run"`
} }
func TestInvalidTableEmpty(t *testing.T) { func TestInvalidTableEmpty(t *testing.T) {
t.Parallel()
input := `[]` input := `[]`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidTableNestedBracketsClose(t *testing.T) { func TestInvalidTableNestedBracketsClose(t *testing.T) {
t.Parallel()
input := `[a]b] input := `[a]b]
zyx = 42` zyx = 42`
@@ -229,7 +201,6 @@ zyx = 42`
} }
func TestInvalidTableNestedBracketsOpen(t *testing.T) { func TestInvalidTableNestedBracketsOpen(t *testing.T) {
t.Parallel()
input := `[a[b] input := `[a[b]
zyx = 42` zyx = 42`
@@ -237,14 +208,12 @@ zyx = 42`
} }
func TestInvalidTableWhitespace(t *testing.T) { func TestInvalidTableWhitespace(t *testing.T) {
t.Parallel()
input := `[invalid key]` input := `[invalid key]`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidTableWithPound(t *testing.T) { func TestInvalidTableWithPound(t *testing.T) {
t.Parallel()
input := `[key#group] input := `[key#group]
answer = 42` answer = 42`
@@ -252,7 +221,6 @@ answer = 42`
} }
func TestInvalidTextAfterArrayEntries(t *testing.T) { func TestInvalidTextAfterArrayEntries(t *testing.T) {
t.Parallel()
input := `array = [ input := `array = [
"Is there life after an array separator?", No "Is there life after an array separator?", No
@@ -262,28 +230,24 @@ func TestInvalidTextAfterArrayEntries(t *testing.T) {
} }
func TestInvalidTextAfterInteger(t *testing.T) { func TestInvalidTextAfterInteger(t *testing.T) {
t.Parallel()
input := `answer = 42 the ultimate answer?` input := `answer = 42 the ultimate answer?`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidTextAfterString(t *testing.T) { func TestInvalidTextAfterString(t *testing.T) {
t.Parallel()
input := `string = "Is there life after strings?" No.` input := `string = "Is there life after strings?" No.`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidTextAfterTable(t *testing.T) { func TestInvalidTextAfterTable(t *testing.T) {
t.Parallel()
input := `[error] this shouldn't be here` input := `[error] this shouldn't be here`
testgenInvalid(t, input) testgenInvalid(t, input)
} }
func TestInvalidTextBeforeArraySeparator(t *testing.T) { func TestInvalidTextBeforeArraySeparator(t *testing.T) {
t.Parallel()
input := `array = [ input := `array = [
"Is there life before an array separator?" No, "Is there life before an array separator?" No,
@@ -293,7 +257,6 @@ func TestInvalidTextBeforeArraySeparator(t *testing.T) {
} }
func TestInvalidTextInArray(t *testing.T) { func TestInvalidTextInArray(t *testing.T) {
t.Parallel()
input := `array = [ input := `array = [
"Entry 1", "Entry 1",
@@ -304,7 +267,6 @@ func TestInvalidTextInArray(t *testing.T) {
} }
func TestValidArrayEmpty(t *testing.T) { func TestValidArrayEmpty(t *testing.T) {
t.Parallel()
input := `thevoid = [[[[[]]]]]` input := `thevoid = [[[[[]]]]]`
jsonRef := `{ jsonRef := `{
@@ -322,7 +284,6 @@ func TestValidArrayEmpty(t *testing.T) {
} }
func TestValidArrayNospaces(t *testing.T) { func TestValidArrayNospaces(t *testing.T) {
t.Parallel()
input := `ints = [1,2,3]` input := `ints = [1,2,3]`
jsonRef := `{ jsonRef := `{
@@ -339,7 +300,6 @@ func TestValidArrayNospaces(t *testing.T) {
} }
func TestValidArraysHetergeneous(t *testing.T) { func TestValidArraysHetergeneous(t *testing.T) {
t.Parallel()
input := `mixed = [[1, 2], ["a", "b"], [1.1, 2.1]]` input := `mixed = [[1, 2], ["a", "b"], [1.1, 2.1]]`
jsonRef := `{ jsonRef := `{
@@ -365,7 +325,6 @@ func TestValidArraysHetergeneous(t *testing.T) {
} }
func TestValidArraysNested(t *testing.T) { func TestValidArraysNested(t *testing.T) {
t.Parallel()
input := `nest = [["a"], ["b"]]` input := `nest = [["a"], ["b"]]`
jsonRef := `{ jsonRef := `{
@@ -385,7 +344,6 @@ func TestValidArraysNested(t *testing.T) {
} }
func TestValidArrays(t *testing.T) { func TestValidArrays(t *testing.T) {
t.Parallel()
input := `ints = [1, 2, 3] input := `ints = [1, 2, 3]
floats = [1.1, 2.1, 3.1] floats = [1.1, 2.1, 3.1]
@@ -433,7 +391,6 @@ dates = [
} }
func TestValidBool(t *testing.T) { func TestValidBool(t *testing.T) {
t.Parallel()
input := `t = true input := `t = true
f = false` f = false`
@@ -445,7 +402,6 @@ f = false`
} }
func TestValidCommentsEverywhere(t *testing.T) { func TestValidCommentsEverywhere(t *testing.T) {
t.Parallel()
input := `# Top comment. input := `# Top comment.
# Top comment. # Top comment.
@@ -487,7 +443,6 @@ more = [ # Comment
} }
func TestValidDatetime(t *testing.T) { func TestValidDatetime(t *testing.T) {
t.Parallel()
input := `bestdayever = 1987-07-05T17:45:00Z` input := `bestdayever = 1987-07-05T17:45:00Z`
jsonRef := `{ jsonRef := `{
@@ -497,7 +452,6 @@ func TestValidDatetime(t *testing.T) {
} }
func TestValidEmpty(t *testing.T) { func TestValidEmpty(t *testing.T) {
t.Parallel()
input := `` input := ``
jsonRef := `{}` jsonRef := `{}`
@@ -505,7 +459,6 @@ func TestValidEmpty(t *testing.T) {
} }
func TestValidExample(t *testing.T) { func TestValidExample(t *testing.T) {
t.Parallel()
input := `best-day-ever = 1987-07-05T17:45:00Z input := `best-day-ever = 1987-07-05T17:45:00Z
@@ -530,7 +483,6 @@ perfection = [6, 28, 496]`
} }
func TestValidFloat(t *testing.T) { func TestValidFloat(t *testing.T) {
t.Parallel()
input := `pi = 3.14 input := `pi = 3.14
negpi = -3.14` negpi = -3.14`
@@ -542,7 +494,6 @@ negpi = -3.14`
} }
func TestValidImplicitAndExplicitAfter(t *testing.T) { func TestValidImplicitAndExplicitAfter(t *testing.T) {
t.Parallel()
input := `[a.b.c] input := `[a.b.c]
answer = 42 answer = 42
@@ -563,7 +514,6 @@ better = 43`
} }
func TestValidImplicitAndExplicitBefore(t *testing.T) { func TestValidImplicitAndExplicitBefore(t *testing.T) {
t.Parallel()
input := `[a] input := `[a]
better = 43 better = 43
@@ -584,7 +534,6 @@ answer = 42`
} }
func TestValidImplicitGroups(t *testing.T) { func TestValidImplicitGroups(t *testing.T) {
t.Parallel()
input := `[a.b.c] input := `[a.b.c]
answer = 42` answer = 42`
@@ -601,7 +550,6 @@ answer = 42`
} }
func TestValidInteger(t *testing.T) { func TestValidInteger(t *testing.T) {
t.Parallel()
input := `answer = 42 input := `answer = 42
neganswer = -42` neganswer = -42`
@@ -613,7 +561,6 @@ neganswer = -42`
} }
func TestValidKeyEqualsNospace(t *testing.T) { func TestValidKeyEqualsNospace(t *testing.T) {
t.Parallel()
input := `answer=42` input := `answer=42`
jsonRef := `{ jsonRef := `{
@@ -623,7 +570,6 @@ func TestValidKeyEqualsNospace(t *testing.T) {
} }
func TestValidKeySpace(t *testing.T) { func TestValidKeySpace(t *testing.T) {
t.Parallel()
input := `"a b" = 1` input := `"a b" = 1`
jsonRef := `{ jsonRef := `{
@@ -633,7 +579,6 @@ func TestValidKeySpace(t *testing.T) {
} }
func TestValidKeySpecialChars(t *testing.T) { func TestValidKeySpecialChars(t *testing.T) {
t.Parallel()
input := "\"~!@$^&*()_+-`1234567890[]|/?><.,;:'\" = 1\n" input := "\"~!@$^&*()_+-`1234567890[]|/?><.,;:'\" = 1\n"
jsonRef := "{\n" + jsonRef := "{\n" +
@@ -645,7 +590,6 @@ func TestValidKeySpecialChars(t *testing.T) {
} }
func TestValidLongFloat(t *testing.T) { func TestValidLongFloat(t *testing.T) {
t.Parallel()
input := `longpi = 3.141592653589793 input := `longpi = 3.141592653589793
neglongpi = -3.141592653589793` neglongpi = -3.141592653589793`
@@ -657,7 +601,6 @@ neglongpi = -3.141592653589793`
} }
func TestValidLongInteger(t *testing.T) { func TestValidLongInteger(t *testing.T) {
t.Parallel()
input := `answer = 9223372036854775807 input := `answer = 9223372036854775807
neganswer = -9223372036854775808` neganswer = -9223372036854775808`
@@ -669,7 +612,6 @@ neganswer = -9223372036854775808`
} }
func TestValidMultilineString(t *testing.T) { func TestValidMultilineString(t *testing.T) {
t.Parallel()
input := `multiline_empty_one = """""" input := `multiline_empty_one = """"""
multiline_empty_two = """ multiline_empty_two = """
@@ -728,7 +670,6 @@ equivalent_three = """\
} }
func TestValidRawMultilineString(t *testing.T) { func TestValidRawMultilineString(t *testing.T) {
t.Parallel()
input := `oneline = '''This string has a ' quote character.''' input := `oneline = '''This string has a ' quote character.'''
firstnl = ''' firstnl = '''
@@ -757,7 +698,6 @@ in it.'''`
} }
func TestValidRawString(t *testing.T) { func TestValidRawString(t *testing.T) {
t.Parallel()
input := `backspace = 'This string has a \b backspace character.' input := `backspace = 'This string has a \b backspace character.'
tab = 'This string has a \t tab character.' tab = 'This string has a \t tab character.'
@@ -800,7 +740,6 @@ backslash = 'This string has a \\ backslash character.'`
} }
func TestValidStringEmpty(t *testing.T) { func TestValidStringEmpty(t *testing.T) {
t.Parallel()
input := `answer = ""` input := `answer = ""`
jsonRef := `{ jsonRef := `{
@@ -813,7 +752,6 @@ func TestValidStringEmpty(t *testing.T) {
} }
func TestValidStringEscapes(t *testing.T) { func TestValidStringEscapes(t *testing.T) {
t.Parallel()
input := `backspace = "This string has a \b backspace character." input := `backspace = "This string has a \b backspace character."
tab = "This string has a \t tab character." tab = "This string has a \t tab character."
@@ -876,7 +814,6 @@ notunicode4 = "This string does not have a unicode \\\u0075 escape."`
} }
func TestValidStringSimple(t *testing.T) { func TestValidStringSimple(t *testing.T) {
t.Parallel()
input := `answer = "You are not drinking enough whisky."` input := `answer = "You are not drinking enough whisky."`
jsonRef := `{ jsonRef := `{
@@ -889,7 +826,6 @@ func TestValidStringSimple(t *testing.T) {
} }
func TestValidStringWithPound(t *testing.T) { func TestValidStringWithPound(t *testing.T) {
t.Parallel()
input := `pound = "We see no # comments here." input := `pound = "We see no # comments here."
poundcomment = "But there are # some comments here." # Did I # mess you up?` poundcomment = "But there are # some comments here." # Did I # mess you up?`
@@ -904,7 +840,6 @@ poundcomment = "But there are # some comments here." # Did I # mess you up?`
} }
func TestValidTableArrayImplicit(t *testing.T) { func TestValidTableArrayImplicit(t *testing.T) {
t.Parallel()
input := `[[albums.songs]] input := `[[albums.songs]]
name = "Glory Days"` name = "Glory Days"`
@@ -919,7 +854,6 @@ name = "Glory Days"`
} }
func TestValidTableArrayMany(t *testing.T) { func TestValidTableArrayMany(t *testing.T) {
t.Parallel()
input := `[[people]] input := `[[people]]
first_name = "Bruce" first_name = "Bruce"
@@ -952,7 +886,6 @@ last_name = "Seger"`
} }
func TestValidTableArrayNest(t *testing.T) { func TestValidTableArrayNest(t *testing.T) {
t.Parallel()
input := `[[albums]] input := `[[albums]]
name = "Born to Run" name = "Born to Run"
@@ -993,7 +926,6 @@ name = "Born in the USA"
} }
func TestValidTableArrayOne(t *testing.T) { func TestValidTableArrayOne(t *testing.T) {
t.Parallel()
input := `[[people]] input := `[[people]]
first_name = "Bruce" first_name = "Bruce"
@@ -1010,7 +942,6 @@ last_name = "Springsteen"`
} }
func TestValidTableEmpty(t *testing.T) { func TestValidTableEmpty(t *testing.T) {
t.Parallel()
input := `[a]` input := `[a]`
jsonRef := `{ jsonRef := `{
@@ -1020,7 +951,6 @@ func TestValidTableEmpty(t *testing.T) {
} }
func TestValidTableSubEmpty(t *testing.T) { func TestValidTableSubEmpty(t *testing.T) {
t.Parallel()
input := `[a] input := `[a]
[a.b]` [a.b]`
@@ -1031,7 +961,6 @@ func TestValidTableSubEmpty(t *testing.T) {
} }
func TestValidTableWhitespace(t *testing.T) { func TestValidTableWhitespace(t *testing.T) {
t.Parallel()
input := `["valid key"]` input := `["valid key"]`
jsonRef := `{ jsonRef := `{
@@ -1041,7 +970,6 @@ func TestValidTableWhitespace(t *testing.T) {
} }
func TestValidTableWithPound(t *testing.T) { func TestValidTableWithPound(t *testing.T) {
t.Parallel()
input := `["key#group"] input := `["key#group"]
answer = 42` answer = 42`
@@ -1054,7 +982,6 @@ answer = 42`
} }
func TestValidUnicodeEscape(t *testing.T) { func TestValidUnicodeEscape(t *testing.T) {
t.Parallel()
input := `answer4 = "\u03B4" input := `answer4 = "\u03B4"
answer8 = "\U000003B4"` answer8 = "\U000003B4"`
@@ -1066,7 +993,6 @@ answer8 = "\U000003B4"`
} }
func TestValidUnicodeLiteral(t *testing.T) { func TestValidUnicodeLiteral(t *testing.T) {
t.Parallel()
input := `answer = "δ"` input := `answer = "δ"`
jsonRef := `{ jsonRef := `{
+13
View File
@@ -0,0 +1,13 @@
package toml
import (
"encoding"
"reflect"
"time"
)
var timeType = reflect.TypeOf(time.Time{})
var textMarshalerType = reflect.TypeOf(new(encoding.TextMarshaler)).Elem()
var textUnmarshalerType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem()
var mapStringInterfaceType = reflect.TypeOf(map[string]interface{}{})
var sliceInterfaceType = reflect.TypeOf([]interface{}{})
+838 -312
View File
File diff suppressed because it is too large Load Diff
+886 -56
View File
File diff suppressed because it is too large Load Diff