Compare commits
109 Commits
specialize
...
v2.0.8
| Author | SHA1 | Date | |
|---|---|---|---|
| 7b980e792b | |||
| 44c1513ccd | |||
| fcf9d37d0c | |||
| 986afffb7c | |||
| 8c2c9cc986 | |||
| 55ca4e35e4 | |||
| d34104d493 | |||
| 2aa08368fa | |||
| 654811fbc3 | |||
| 5c05d4d863 | |||
| 643c251c4b | |||
| 8a416daa69 | |||
| fcd9179b7d | |||
| 9f5726004e | |||
| c4a2eef8a4 | |||
| b58c20aa49 | |||
| 090cccf4ba | |||
| 58a592bbf8 | |||
| 94bd3ddcd6 | |||
| e195b58fd0 | |||
| c83d001c6d | |||
| b9e3b9c370 | |||
| d26887310c | |||
| 942841787a | |||
| 28f1efc7d3 | |||
| 7d69e4a728 | |||
| e46d245c09 | |||
| 7baa23f493 | |||
| 2d8433b69e | |||
| 67bc5422f3 | |||
| fb6d1d6c2b | |||
| d017a6dc89 | |||
| d6d3196163 | |||
| 41718a6db3 | |||
| 216628222f | |||
| 322e0b15d2 | |||
| 85bfc0ed51 | |||
| 295a720dfb | |||
| 0a422e3dbd | |||
| 627dade0c7 | |||
| b2e0231cc9 | |||
| ba95863cd3 | |||
| db679df765 | |||
| c5ca2c682b | |||
| ed80712cb4 | |||
| b24772942d | |||
| 9501a05ed7 | |||
| 171a592663 | |||
| 5aaf5ef13b | |||
| adacebd8c7 | |||
| 8bbb673431 | |||
| 2377ac4bc0 | |||
| f5cc8c49eb | |||
| 89d7b412d8 | |||
| 88a8aecdd4 | |||
| 9804fc57e0 | |||
| 068279f13b | |||
| b9edbeb611 | |||
| a97c9317d4 | |||
| 3229a0abfb | |||
| 3f5d8a6b06 | |||
| 146f70ea8a | |||
| e83cf535f5 | |||
| c3ba3ef97a | |||
| 7ee3c8ff25 | |||
| 1e85aa6d78 | |||
| 46fa3225e2 | |||
| 4d51831dab | |||
| 5a1a96cb2d | |||
| ea9040ae83 | |||
| 2373685f1e | |||
| f1391952d4 | |||
| 4a73a200ed | |||
| 4807229e94 | |||
| d8ddc00c61 | |||
| 82f8dad811 | |||
| 75db1016e8 | |||
| de6d715bd2 | |||
| 3ab2fc2b87 | |||
| 1b1dd3d6d5 | |||
| 128b7a8bfb | |||
| 892df5c28e | |||
| d58eb50ebf | |||
| 535fc65c5f | |||
| f158d7d278 | |||
| 5fd6e9cce0 | |||
| 8ce5c3d78f | |||
| 177b4a5e53 | |||
| 5cbdea6192 | |||
| 696dd25c17 | |||
| facb2b13e8 | |||
| 8bbb519477 | |||
| b37e11d74d | |||
| 6cd86876b8 | |||
| f53bc740c1 | |||
| 9bf9be681e | |||
| c862c344b3 | |||
| 0d20a84523 | |||
| 3990899d7e | |||
| 4c7a337083 | |||
| bbaae540ce | |||
| ede6445608 | |||
| b226db6a29 | |||
| d8997efb5a | |||
| 79e78b234c | |||
| 1b5a25c0ef | |||
| 8eae15b2ee | |||
| 2b3de620e8 | |||
| 8645d6376b |
@@ -1,3 +1,4 @@
|
|||||||
* text=auto
|
* text=auto
|
||||||
|
|
||||||
benchmark/benchmark.toml text eol=lf
|
benchmark/benchmark.toml text eol=lf
|
||||||
|
testdata/** text eol=lf
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ changelog:
|
|||||||
exclude:
|
exclude:
|
||||||
labels:
|
labels:
|
||||||
- build
|
- build
|
||||||
|
- testing
|
||||||
categories:
|
categories:
|
||||||
- title: What's new
|
- title: What's new
|
||||||
labels:
|
labels:
|
||||||
|
|||||||
@@ -0,0 +1,26 @@
|
|||||||
|
name: CIFuzz
|
||||||
|
on: [pull_request]
|
||||||
|
jobs:
|
||||||
|
Fuzzing:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Build Fuzzers
|
||||||
|
id: build
|
||||||
|
uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master
|
||||||
|
with:
|
||||||
|
oss-fuzz-project-name: 'go-toml'
|
||||||
|
dry-run: false
|
||||||
|
language: go
|
||||||
|
- name: Run Fuzzers
|
||||||
|
uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master
|
||||||
|
with:
|
||||||
|
oss-fuzz-project-name: 'go-toml'
|
||||||
|
fuzz-seconds: 300
|
||||||
|
dry-run: false
|
||||||
|
language: go
|
||||||
|
- name: Upload Crash
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
if: failure() && steps.build.outcome == 'success'
|
||||||
|
with:
|
||||||
|
name: artifacts
|
||||||
|
path: ./out/artifacts
|
||||||
@@ -35,11 +35,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v1
|
uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@@ -50,7 +50,7 @@ jobs:
|
|||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v1
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 https://git.io/JvXDl
|
# 📚 https://git.io/JvXDl
|
||||||
@@ -64,4 +64,4 @@ jobs:
|
|||||||
# make release
|
# make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v2
|
||||||
|
|||||||
@@ -9,12 +9,12 @@ jobs:
|
|||||||
runs-on: "ubuntu-latest"
|
runs-on: "ubuntu-latest"
|
||||||
name: report
|
name: report
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@master
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup go
|
- name: Setup go
|
||||||
uses: actions/setup-go@master
|
uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version: 1.16
|
go-version: "1.20"
|
||||||
- name: Run tests with coverage
|
- name: Run tests with coverage
|
||||||
run: ./ci.sh coverage -d "${GITHUB_BASE_REF-HEAD}"
|
run: ./ci.sh coverage -d "${GITHUB_BASE_REF-HEAD}"
|
||||||
|
|||||||
@@ -0,0 +1,39 @@
|
|||||||
|
name: release
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "v2.*"
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
args:
|
||||||
|
description: "Extra arguments to pass goreleaser"
|
||||||
|
default: ""
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v4
|
||||||
|
with:
|
||||||
|
go-version: "1.20"
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Run GoReleaser
|
||||||
|
uses: goreleaser/goreleaser-action@v3
|
||||||
|
with:
|
||||||
|
distribution: goreleaser
|
||||||
|
version: latest
|
||||||
|
args: release ${{ inputs.args }} --rm-dist
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
@@ -12,14 +12,21 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ 'ubuntu-latest', 'windows-latest', 'macos-latest']
|
os: [ 'ubuntu-latest', 'windows-latest', 'macos-latest']
|
||||||
go: [ '1.16', '1.17' ]
|
go: [ '1.19', '1.20' ]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
name: ${{ matrix.go }}/${{ matrix.os }}
|
name: ${{ matrix.go }}/${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@master
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
- name: Setup go ${{ matrix.go }}
|
- name: Setup go ${{ matrix.go }}
|
||||||
uses: actions/setup-go@master
|
uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version: ${{ matrix.go }}
|
go-version: ${{ matrix.go }}
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
run: go test -race ./...
|
run: go test -race ./...
|
||||||
|
release-check:
|
||||||
|
if: ${{ github.ref != 'refs/heads/v2' }}
|
||||||
|
uses: pelletier/go-toml/.github/workflows/release.yml@v2
|
||||||
|
with:
|
||||||
|
args: --snapshot
|
||||||
|
|||||||
@@ -3,3 +3,4 @@ fuzz/
|
|||||||
cmd/tomll/tomll
|
cmd/tomll/tomll
|
||||||
cmd/tomljson/tomljson
|
cmd/tomljson/tomljson
|
||||||
cmd/tomltestgen/tomltestgen
|
cmd/tomltestgen/tomltestgen
|
||||||
|
dist
|
||||||
@@ -0,0 +1,123 @@
|
|||||||
|
before:
|
||||||
|
hooks:
|
||||||
|
- go mod tidy
|
||||||
|
- go fmt ./...
|
||||||
|
- go test ./...
|
||||||
|
builds:
|
||||||
|
- id: tomll
|
||||||
|
main: ./cmd/tomll
|
||||||
|
binary: tomll
|
||||||
|
env:
|
||||||
|
- CGO_ENABLED=0
|
||||||
|
flags:
|
||||||
|
- -trimpath
|
||||||
|
ldflags:
|
||||||
|
- -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}}
|
||||||
|
mod_timestamp: '{{ .CommitTimestamp }}'
|
||||||
|
targets:
|
||||||
|
- linux_amd64
|
||||||
|
- linux_arm64
|
||||||
|
- linux_arm
|
||||||
|
- windows_amd64
|
||||||
|
- windows_arm64
|
||||||
|
- windows_arm
|
||||||
|
- darwin_amd64
|
||||||
|
- darwin_arm64
|
||||||
|
- id: tomljson
|
||||||
|
main: ./cmd/tomljson
|
||||||
|
binary: tomljson
|
||||||
|
env:
|
||||||
|
- CGO_ENABLED=0
|
||||||
|
flags:
|
||||||
|
- -trimpath
|
||||||
|
ldflags:
|
||||||
|
- -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}}
|
||||||
|
mod_timestamp: '{{ .CommitTimestamp }}'
|
||||||
|
targets:
|
||||||
|
- linux_amd64
|
||||||
|
- linux_arm64
|
||||||
|
- linux_arm
|
||||||
|
- windows_amd64
|
||||||
|
- windows_arm64
|
||||||
|
- windows_arm
|
||||||
|
- darwin_amd64
|
||||||
|
- darwin_arm64
|
||||||
|
- id: jsontoml
|
||||||
|
main: ./cmd/jsontoml
|
||||||
|
binary: jsontoml
|
||||||
|
env:
|
||||||
|
- CGO_ENABLED=0
|
||||||
|
flags:
|
||||||
|
- -trimpath
|
||||||
|
ldflags:
|
||||||
|
- -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}}
|
||||||
|
mod_timestamp: '{{ .CommitTimestamp }}'
|
||||||
|
targets:
|
||||||
|
- linux_amd64
|
||||||
|
- linux_arm64
|
||||||
|
- linux_arm
|
||||||
|
- windows_amd64
|
||||||
|
- windows_arm64
|
||||||
|
- windows_arm
|
||||||
|
- darwin_amd64
|
||||||
|
- darwin_arm64
|
||||||
|
universal_binaries:
|
||||||
|
- id: tomll
|
||||||
|
replace: true
|
||||||
|
name_template: tomll
|
||||||
|
- id: tomljson
|
||||||
|
replace: true
|
||||||
|
name_template: tomljson
|
||||||
|
- id: jsontoml
|
||||||
|
replace: true
|
||||||
|
name_template: jsontoml
|
||||||
|
archives:
|
||||||
|
- id: jsontoml
|
||||||
|
format: tar.xz
|
||||||
|
builds:
|
||||||
|
- jsontoml
|
||||||
|
files:
|
||||||
|
- none*
|
||||||
|
name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}"
|
||||||
|
- id: tomljson
|
||||||
|
format: tar.xz
|
||||||
|
builds:
|
||||||
|
- tomljson
|
||||||
|
files:
|
||||||
|
- none*
|
||||||
|
name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}"
|
||||||
|
- id: tomll
|
||||||
|
format: tar.xz
|
||||||
|
builds:
|
||||||
|
- tomll
|
||||||
|
files:
|
||||||
|
- none*
|
||||||
|
name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}"
|
||||||
|
dockers:
|
||||||
|
- id: tools
|
||||||
|
goos: linux
|
||||||
|
goarch: amd64
|
||||||
|
ids:
|
||||||
|
- jsontoml
|
||||||
|
- tomljson
|
||||||
|
- tomll
|
||||||
|
image_templates:
|
||||||
|
- "ghcr.io/pelletier/go-toml:latest"
|
||||||
|
- "ghcr.io/pelletier/go-toml:{{ .Tag }}"
|
||||||
|
- "ghcr.io/pelletier/go-toml:v{{ .Major }}"
|
||||||
|
skip_push: false
|
||||||
|
checksum:
|
||||||
|
name_template: 'sha256sums.txt'
|
||||||
|
snapshot:
|
||||||
|
name_template: "{{ incpatch .Version }}-next"
|
||||||
|
release:
|
||||||
|
github:
|
||||||
|
owner: pelletier
|
||||||
|
name: go-toml
|
||||||
|
draft: true
|
||||||
|
prerelease: auto
|
||||||
|
mode: replace
|
||||||
|
changelog:
|
||||||
|
use: github-native
|
||||||
|
announce:
|
||||||
|
skip: true
|
||||||
+23
-9
@@ -155,6 +155,8 @@ Checklist:
|
|||||||
- Does not introduce backward-incompatible changes (unless discussed).
|
- Does not introduce backward-incompatible changes (unless discussed).
|
||||||
- Has relevant doc changes.
|
- Has relevant doc changes.
|
||||||
- Benchstat does not show performance regression.
|
- Benchstat does not show performance regression.
|
||||||
|
- Pull request is [labeled appropriately][pr-labels].
|
||||||
|
- Title will be understandable in the changelog.
|
||||||
|
|
||||||
1. Merge using "squash and merge".
|
1. Merge using "squash and merge".
|
||||||
2. Make sure to edit the commit message to keep all the useful information
|
2. Make sure to edit the commit message to keep all the useful information
|
||||||
@@ -163,13 +165,25 @@ Checklist:
|
|||||||
|
|
||||||
### New release
|
### New release
|
||||||
|
|
||||||
1. Go to [releases][releases]. Click on "X commits to master since this
|
1. Decide on the next version number. Use semver.
|
||||||
release".
|
2. Generate release notes using [`gh`][gh]. Example:
|
||||||
2. Make note of all the changes. Look for backward incompatible changes,
|
```
|
||||||
new features, and bug fixes.
|
$ gh api -X POST \
|
||||||
3. Pick the new version using the above and semver.
|
-F tag_name='v2.0.0-beta.5' \
|
||||||
4. Create a [new release][new-release].
|
-F target_commitish='v2' \
|
||||||
5. Follow the same format as [1.1.0][release-110].
|
-F previous_tag_name='v2.0.0-beta.4' \
|
||||||
|
--jq '.body' \
|
||||||
|
repos/pelletier/go-toml/releases/generate-notes
|
||||||
|
```
|
||||||
|
3. Look for "Other changes". That would indicate a pull request not labeled
|
||||||
|
properly. Tweak labels and pull request titles until changelog looks good for
|
||||||
|
users.
|
||||||
|
4. [Draft new release][new-release].
|
||||||
|
5. Fill tag and target with the same value used to generate the changelog.
|
||||||
|
6. Set title to the new tag value.
|
||||||
|
7. Paste the generated changelog.
|
||||||
|
8. Check "create discussion", in the "Releases" category.
|
||||||
|
9. Check pre-release if new version is an alpha or beta.
|
||||||
|
|
||||||
[issues-tracker]: https://github.com/pelletier/go-toml/issues
|
[issues-tracker]: https://github.com/pelletier/go-toml/issues
|
||||||
[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md
|
[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md
|
||||||
@@ -177,6 +191,6 @@ Checklist:
|
|||||||
[readme]: ./README.md
|
[readme]: ./README.md
|
||||||
[fork]: https://help.github.com/articles/fork-a-repo
|
[fork]: https://help.github.com/articles/fork-a-repo
|
||||||
[pull-request]: https://help.github.com/en/articles/creating-a-pull-request
|
[pull-request]: https://help.github.com/en/articles/creating-a-pull-request
|
||||||
[releases]: https://github.com/pelletier/go-toml/releases
|
|
||||||
[new-release]: https://github.com/pelletier/go-toml/releases/new
|
[new-release]: https://github.com/pelletier/go-toml/releases/new
|
||||||
[release-110]: https://github.com/pelletier/go-toml/releases/tag/v1.1.0
|
[gh]: https://github.com/cli/cli
|
||||||
|
[pr-labels]: https://github.com/pelletier/go-toml/blob/v2/.github/release.yml
|
||||||
|
|||||||
@@ -0,0 +1,5 @@
|
|||||||
|
FROM scratch
|
||||||
|
ENV PATH "$PATH:/bin"
|
||||||
|
COPY tomll /bin/tomll
|
||||||
|
COPY tomljson /bin/tomljson
|
||||||
|
COPY jsontoml /bin/jsontoml
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2013 - 2021 Thomas Pelletier, Eric Anderton
|
Copyright (c) 2013 - 2022 Thomas Pelletier, Eric Anderton
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
@@ -4,24 +4,14 @@ Go library for the [TOML](https://toml.io/en/) format.
|
|||||||
|
|
||||||
This library supports [TOML v1.0.0](https://toml.io/en/v1.0.0).
|
This library supports [TOML v1.0.0](https://toml.io/en/v1.0.0).
|
||||||
|
|
||||||
## Development status
|
|
||||||
|
|
||||||
This is the upcoming major version of go-toml. It is currently in active
|
|
||||||
development. As of release v2.0.0-beta.1, the library has reached feature parity
|
|
||||||
with v1, and fixes a lot known bugs and performance issues along the way.
|
|
||||||
|
|
||||||
If you do not need the advanced document editing features of v1, you are
|
|
||||||
encouraged to try out this version.
|
|
||||||
|
|
||||||
[👉 Roadmap for v2](https://github.com/pelletier/go-toml/discussions/506)
|
|
||||||
|
|
||||||
[🐞 Bug Reports](https://github.com/pelletier/go-toml/issues)
|
[🐞 Bug Reports](https://github.com/pelletier/go-toml/issues)
|
||||||
|
|
||||||
[💬 Anything else](https://github.com/pelletier/go-toml/discussions)
|
[💬 Anything else](https://github.com/pelletier/go-toml/discussions)
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
Full API, examples, and implementation notes are available in the Go documentation.
|
Full API, examples, and implementation notes are available in the Go
|
||||||
|
documentation.
|
||||||
|
|
||||||
[](https://pkg.go.dev/github.com/pelletier/go-toml/v2)
|
[](https://pkg.go.dev/github.com/pelletier/go-toml/v2)
|
||||||
|
|
||||||
@@ -48,15 +38,16 @@ operations should not be shockingly slow. See [benchmarks](#benchmarks).
|
|||||||
### Strict mode
|
### Strict mode
|
||||||
|
|
||||||
`Decoder` can be set to "strict mode", which makes it error when some parts of
|
`Decoder` can be set to "strict mode", which makes it error when some parts of
|
||||||
the TOML document was not prevent in the target structure. This is a great way
|
the TOML document was not present in the target structure. This is a great way
|
||||||
to check for typos. [See example in the documentation][strict].
|
to check for typos. [See example in the documentation][strict].
|
||||||
|
|
||||||
[strict]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#example-Decoder.SetStrict
|
[strict]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#example-Decoder.DisallowUnknownFields
|
||||||
|
|
||||||
### Contextualized errors
|
### Contextualized errors
|
||||||
|
|
||||||
When decoding errors occur, go-toml returns [`DecodeError`][decode-err]), which
|
When most decoding errors occur, go-toml returns [`DecodeError`][decode-err]),
|
||||||
contains a human readable contextualized version of the error. For example:
|
which contains a human readable contextualized version of the error. For
|
||||||
|
example:
|
||||||
|
|
||||||
```
|
```
|
||||||
2| key1 = "value1"
|
2| key1 = "value1"
|
||||||
@@ -149,6 +140,17 @@ fmt.Println(string(b))
|
|||||||
|
|
||||||
[marshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Marshal
|
[marshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Marshal
|
||||||
|
|
||||||
|
## Unstable API
|
||||||
|
|
||||||
|
This API does not yet follow the backward compatibility guarantees of this
|
||||||
|
library. They provide early access to features that may have rough edges or an
|
||||||
|
API subject to change.
|
||||||
|
|
||||||
|
### Parser
|
||||||
|
|
||||||
|
Parser is the unstable API that allows iterative parsing of a TOML document at
|
||||||
|
the AST level. See https://pkg.go.dev/github.com/pelletier/go-toml/v2/unstable.
|
||||||
|
|
||||||
## Benchmarks
|
## Benchmarks
|
||||||
|
|
||||||
Execution time speedup compared to other Go TOML libraries:
|
Execution time speedup compared to other Go TOML libraries:
|
||||||
@@ -159,11 +161,11 @@ Execution time speedup compared to other Go TOML libraries:
|
|||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr><td>Marshal/HugoFrontMatter-2</td><td>1.9x</td><td>1.9x</td></tr>
|
<tr><td>Marshal/HugoFrontMatter-2</td><td>1.9x</td><td>1.9x</td></tr>
|
||||||
<tr><td>Marshal/ReferenceFile/map-2</td><td>1.7x</td><td>1.9x</td></tr>
|
<tr><td>Marshal/ReferenceFile/map-2</td><td>1.7x</td><td>1.8x</td></tr>
|
||||||
<tr><td>Marshal/ReferenceFile/struct-2</td><td>2.4x</td><td>2.6x</td></tr>
|
<tr><td>Marshal/ReferenceFile/struct-2</td><td>2.2x</td><td>2.5x</td></tr>
|
||||||
<tr><td>Unmarshal/HugoFrontMatter-2</td><td>2.9x</td><td>2.5x</td></tr>
|
<tr><td>Unmarshal/HugoFrontMatter-2</td><td>2.9x</td><td>2.9x</td></tr>
|
||||||
<tr><td>Unmarshal/ReferenceFile/map-2</td><td>2.7x</td><td>2.6x</td></tr>
|
<tr><td>Unmarshal/ReferenceFile/map-2</td><td>2.6x</td><td>2.9x</td></tr>
|
||||||
<tr><td>Unmarshal/ReferenceFile/struct-2</td><td>4.8x</td><td>5.1x</td></tr>
|
<tr><td>Unmarshal/ReferenceFile/struct-2</td><td>4.4x</td><td>5.3x</td></tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
<details><summary>See more</summary>
|
<details><summary>See more</summary>
|
||||||
@@ -176,17 +178,17 @@ provided for completeness.</p>
|
|||||||
<tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
|
<tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr><td>Marshal/SimpleDocument/map-2</td><td>1.7x</td><td>2.1x</td></tr>
|
<tr><td>Marshal/SimpleDocument/map-2</td><td>1.8x</td><td>2.9x</td></tr>
|
||||||
<tr><td>Marshal/SimpleDocument/struct-2</td><td>2.5x</td><td>2.8x</td></tr>
|
<tr><td>Marshal/SimpleDocument/struct-2</td><td>2.7x</td><td>4.2x</td></tr>
|
||||||
<tr><td>Unmarshal/SimpleDocument/map-2</td><td>4.1x</td><td>3.1x</td></tr>
|
<tr><td>Unmarshal/SimpleDocument/map-2</td><td>4.5x</td><td>3.1x</td></tr>
|
||||||
<tr><td>Unmarshal/SimpleDocument/struct-2</td><td>6.4x</td><td>4.3x</td></tr>
|
<tr><td>Unmarshal/SimpleDocument/struct-2</td><td>6.2x</td><td>3.9x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/example-2</td><td>3.4x</td><td>3.2x</td></tr>
|
<tr><td>UnmarshalDataset/example-2</td><td>3.1x</td><td>3.5x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/code-2</td><td>2.2x</td><td>2.5x</td></tr>
|
<tr><td>UnmarshalDataset/code-2</td><td>2.3x</td><td>3.1x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/twitter-2</td><td>2.8x</td><td>2.7x</td></tr>
|
<tr><td>UnmarshalDataset/twitter-2</td><td>2.5x</td><td>2.6x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/citm_catalog-2</td><td>2.2x</td><td>2.0x</td></tr>
|
<tr><td>UnmarshalDataset/citm_catalog-2</td><td>2.1x</td><td>2.2x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/canada-2</td><td>1.8x</td><td>1.4x</td></tr>
|
<tr><td>UnmarshalDataset/canada-2</td><td>1.6x</td><td>1.3x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/config-2</td><td>4.4x</td><td>2.9x</td></tr>
|
<tr><td>UnmarshalDataset/config-2</td><td>4.3x</td><td>3.2x</td></tr>
|
||||||
<tr><td>[Geo mean]</td><td>2.8x</td><td>2.6x</td></tr>
|
<tr><td>[Geo mean]</td><td>2.7x</td><td>2.8x</td></tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
<p>This table can be generated with <code>./ci.sh benchmark -a -html</code>.</p>
|
<p>This table can be generated with <code>./ci.sh benchmark -a -html</code>.</p>
|
||||||
@@ -206,6 +208,44 @@ In case of trouble: [Go Modules FAQ][mod-faq].
|
|||||||
|
|
||||||
[mod-faq]: https://github.com/golang/go/wiki/Modules#why-does-installing-a-tool-via-go-get-fail-with-error-cannot-find-main-module
|
[mod-faq]: https://github.com/golang/go/wiki/Modules#why-does-installing-a-tool-via-go-get-fail-with-error-cannot-find-main-module
|
||||||
|
|
||||||
|
## Tools
|
||||||
|
|
||||||
|
Go-toml provides three handy command line tools:
|
||||||
|
|
||||||
|
* `tomljson`: Reads a TOML file and outputs its JSON representation.
|
||||||
|
|
||||||
|
```
|
||||||
|
$ go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest
|
||||||
|
$ tomljson --help
|
||||||
|
```
|
||||||
|
|
||||||
|
* `jsontoml`: Reads a JSON file and outputs a TOML representation.
|
||||||
|
|
||||||
|
```
|
||||||
|
$ go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest
|
||||||
|
$ jsontoml --help
|
||||||
|
```
|
||||||
|
|
||||||
|
* `tomll`: Lints and reformats a TOML file.
|
||||||
|
|
||||||
|
```
|
||||||
|
$ go install github.com/pelletier/go-toml/v2/cmd/tomll@latest
|
||||||
|
$ tomll --help
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker image
|
||||||
|
|
||||||
|
Those tools are also available as a [Docker image][docker]. For example, to use
|
||||||
|
`tomljson`:
|
||||||
|
|
||||||
|
```
|
||||||
|
docker run -i ghcr.io/pelletier/go-toml:v2 tomljson < example.toml
|
||||||
|
```
|
||||||
|
|
||||||
|
Multiple versions are availble on [ghcr.io][docker].
|
||||||
|
|
||||||
|
[docker]: https://github.com/pelletier/go-toml/pkgs/container/go-toml
|
||||||
|
|
||||||
## Migrating from v1
|
## Migrating from v1
|
||||||
|
|
||||||
This section describes the differences between v1 and v2, with some pointers on
|
This section describes the differences between v1 and v2, with some pointers on
|
||||||
@@ -324,6 +364,29 @@ The recommended replacement is pre-filling the struct before unmarshaling.
|
|||||||
|
|
||||||
[go-defaults]: https://github.com/mcuadros/go-defaults
|
[go-defaults]: https://github.com/mcuadros/go-defaults
|
||||||
|
|
||||||
|
#### `toml.Tree` replacement
|
||||||
|
|
||||||
|
This structure was the initial attempt at providing a document model for
|
||||||
|
go-toml. It allows manipulating the structure of any document, encoding and
|
||||||
|
decoding from their TOML representation. While a more robust feature was
|
||||||
|
initially planned in go-toml v2, this has been ultimately [removed from
|
||||||
|
scope][nodoc] of this library, with no plan to add it back at the moment. The
|
||||||
|
closest equivalent at the moment would be to unmarshal into an `interface{}` and
|
||||||
|
use type assertions and/or reflection to manipulate the arbitrary
|
||||||
|
structure. However this would fall short of providing all of the TOML features
|
||||||
|
such as adding comments and be specific about whitespace.
|
||||||
|
|
||||||
|
|
||||||
|
#### `toml.Position` are not retrievable anymore
|
||||||
|
|
||||||
|
The API for retrieving the position (line, column) of a specific TOML element do
|
||||||
|
not exist anymore. This was done to minimize the amount of concepts introduced
|
||||||
|
by the library (query path), and avoid the performance hit related to storing
|
||||||
|
positions in the absence of a document model, for a feature that seemed to have
|
||||||
|
little use. Errors however have gained more detailed position
|
||||||
|
information. Position retrieval seems better fitted for a document model, which
|
||||||
|
has been [removed from the scope][nodoc] of go-toml v2 at the moment.
|
||||||
|
|
||||||
### Encoding / Marshal
|
### Encoding / Marshal
|
||||||
|
|
||||||
#### Default struct fields order
|
#### Default struct fields order
|
||||||
@@ -359,7 +422,8 @@ fmt.Println("v2:\n" + string(b))
|
|||||||
```
|
```
|
||||||
|
|
||||||
There is no way to make v2 encoder behave like v1. A workaround could be to
|
There is no way to make v2 encoder behave like v1. A workaround could be to
|
||||||
manually sort the fields alphabetically in the struct definition.
|
manually sort the fields alphabetically in the struct definition, or generate
|
||||||
|
struct types using `reflect.StructOf`.
|
||||||
|
|
||||||
#### No indentation by default
|
#### No indentation by default
|
||||||
|
|
||||||
@@ -407,7 +471,9 @@ fmt.Println("v2 Encoder:\n" + string(buf.Bytes()))
|
|||||||
|
|
||||||
V1 always uses double quotes (`"`) around strings and keys that cannot be
|
V1 always uses double quotes (`"`) around strings and keys that cannot be
|
||||||
represented bare (unquoted). V2 uses single quotes instead by default (`'`),
|
represented bare (unquoted). V2 uses single quotes instead by default (`'`),
|
||||||
unless a character cannot be represented, then falls back to double quotes.
|
unless a character cannot be represented, then falls back to double quotes. As a
|
||||||
|
result of this change, `Encoder.QuoteMapKeys` has been removed, as it is not
|
||||||
|
useful anymore.
|
||||||
|
|
||||||
There is no way to make v2 encoder behave like v1.
|
There is no way to make v2 encoder behave like v1.
|
||||||
|
|
||||||
@@ -422,6 +488,76 @@ There is no way to make v2 encoder behave like v1.
|
|||||||
|
|
||||||
[tm]: https://golang.org/pkg/encoding/#TextMarshaler
|
[tm]: https://golang.org/pkg/encoding/#TextMarshaler
|
||||||
|
|
||||||
|
#### `Encoder.CompactComments` has been removed
|
||||||
|
|
||||||
|
Emitting compact comments is now the default behavior of go-toml. This option
|
||||||
|
is not necessary anymore.
|
||||||
|
|
||||||
|
#### Struct tags have been merged
|
||||||
|
|
||||||
|
V1 used to provide multiple struct tags: `comment`, `commented`, `multiline`,
|
||||||
|
`toml`, and `omitempty`. To behave more like the standard library, v2 has merged
|
||||||
|
`toml`, `multiline`, and `omitempty`. For example:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type doc struct {
|
||||||
|
// v1
|
||||||
|
F string `toml:"field" multiline:"true" omitempty:"true"`
|
||||||
|
// v2
|
||||||
|
F string `toml:"field,multiline,omitempty"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Has a result, the `Encoder.SetTag*` methods have been removed, as there is just
|
||||||
|
one tag now.
|
||||||
|
|
||||||
|
|
||||||
|
#### `commented` tag has been removed
|
||||||
|
|
||||||
|
There is no replacement for the `commented` tag. This feature would be better
|
||||||
|
suited in a proper document model for go-toml v2, which has been [cut from
|
||||||
|
scope][nodoc] at the moment.
|
||||||
|
|
||||||
|
#### `Encoder.ArraysWithOneElementPerLine` has been renamed
|
||||||
|
|
||||||
|
The new name is `Encoder.SetArraysMultiline`. The behavior should be the same.
|
||||||
|
|
||||||
|
#### `Encoder.Indentation` has been renamed
|
||||||
|
|
||||||
|
The new name is `Encoder.SetIndentSymbol`. The behavior should be the same.
|
||||||
|
|
||||||
|
|
||||||
|
#### Embedded structs behave like stdlib
|
||||||
|
|
||||||
|
V1 defaults to merging embedded struct fields into the embedding struct. This
|
||||||
|
behavior was unexpected because it does not follow the standard library. To
|
||||||
|
avoid breaking backward compatibility, the `Encoder.PromoteAnonymous` method was
|
||||||
|
added to make the encoder behave correctly. Given backward compatibility is not
|
||||||
|
a problem anymore, v2 does the right thing by default: it follows the behavior
|
||||||
|
of `encoding/json`. `Encoder.PromoteAnonymous` has been removed.
|
||||||
|
|
||||||
|
[nodoc]: https://github.com/pelletier/go-toml/discussions/506#discussioncomment-1526038
|
||||||
|
|
||||||
|
### `query`
|
||||||
|
|
||||||
|
go-toml v1 provided the [`go-toml/query`][query] package. It allowed to run
|
||||||
|
JSONPath-style queries on TOML files. This feature is not available in v2. For a
|
||||||
|
replacement, check out [dasel][dasel].
|
||||||
|
|
||||||
|
This package has been removed because it was essentially not supported anymore
|
||||||
|
(last commit May 2020), increased the complexity of the code base, and more
|
||||||
|
complete solutions exist out there.
|
||||||
|
|
||||||
|
[query]: https://github.com/pelletier/go-toml/tree/f99d6bbca119636aeafcf351ee52b3d202782627/query
|
||||||
|
[dasel]: https://github.com/TomWright/dasel
|
||||||
|
|
||||||
|
## Versioning
|
||||||
|
|
||||||
|
Go-toml follows [Semantic Versioning](https://semver.org). The supported version
|
||||||
|
of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of
|
||||||
|
this document. The last two major versions of Go are supported
|
||||||
|
(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)).
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
The MIT License (MIT). Read [LICENSE](LICENSE).
|
The MIT License (MIT). Read [LICENSE](LICENSE).
|
||||||
|
|||||||
@@ -76,7 +76,8 @@ cover() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
pushd "$dir"
|
pushd "$dir"
|
||||||
go test -covermode=atomic -coverprofile=coverage.out ./...
|
go test -covermode=atomic -coverpkg=./... -coverprofile=coverage.out.tmp ./...
|
||||||
|
cat coverage.out.tmp | grep -v fuzz | grep -v testsuite | grep -v tomltestgen | grep -v gotoml-test-decoder > coverage.out
|
||||||
go tool cover -func=coverage.out
|
go tool cover -func=coverage.out
|
||||||
popd
|
popd
|
||||||
|
|
||||||
@@ -103,16 +104,23 @@ coverage() {
|
|||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
target_pct="$(cat ${target_out} |sed -E 's/.*total.*\t([0-9.]+)%/\1/;t;d')"
|
target_pct="$(tail -n2 ${target_out} | head -n1 | sed -E 's/.*total.*\t([0-9.]+)%.*/\1/')"
|
||||||
head_pct="$(cat ${head_out} |sed -E 's/.*total.*\t([0-9.]+)%/\1/;t;d')"
|
head_pct="$(tail -n2 ${head_out} | head -n1 | sed -E 's/.*total.*\t([0-9.]+)%/\1/')"
|
||||||
echo "Results: ${target} ${target_pct}% HEAD ${head_pct}%"
|
echo "Results: ${target} ${target_pct}% HEAD ${head_pct}%"
|
||||||
|
|
||||||
delta_pct=$(echo "$head_pct - $target_pct" | bc -l)
|
delta_pct=$(echo "$head_pct - $target_pct" | bc -l)
|
||||||
echo "Delta: ${delta_pct}"
|
echo "Delta: ${delta_pct}"
|
||||||
|
|
||||||
if [[ $delta_pct = \-* ]]; then
|
if [[ $delta_pct = \-* ]]; then
|
||||||
echo "Regression!";
|
echo "Regression!";
|
||||||
return 1
|
|
||||||
|
target_diff="${output_dir}/target.diff.txt"
|
||||||
|
head_diff="${output_dir}/head.diff.txt"
|
||||||
|
cat "${target_out}" | grep -E '^github.com/pelletier/go-toml' | tr -s "\t " | cut -f 2,3 | sort > "${target_diff}"
|
||||||
|
cat "${head_out}" | grep -E '^github.com/pelletier/go-toml' | tr -s "\t " | cut -f 2,3 | sort > "${head_diff}"
|
||||||
|
|
||||||
|
diff --side-by-side --suppress-common-lines "${target_diff}" "${head_diff}"
|
||||||
|
return 1
|
||||||
fi
|
fi
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/testsuite"
|
"github.com/pelletier/go-toml/v2/internal/testsuite"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
|||||||
@@ -0,0 +1,55 @@
|
|||||||
|
// Package jsontoml is a program that converts JSON to TOML.
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// Reading from stdin:
|
||||||
|
//
|
||||||
|
// cat file.json | jsontoml > file.toml
|
||||||
|
//
|
||||||
|
// Reading from a file:
|
||||||
|
//
|
||||||
|
// jsontoml file.json > file.toml
|
||||||
|
//
|
||||||
|
// # Installation
|
||||||
|
//
|
||||||
|
// Using Go:
|
||||||
|
//
|
||||||
|
// go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"io"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
"github.com/pelletier/go-toml/v2/internal/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
const usage = `jsontoml can be used in two ways:
|
||||||
|
Reading from stdin:
|
||||||
|
cat file.json | jsontoml > file.toml
|
||||||
|
|
||||||
|
Reading from a file:
|
||||||
|
jsontoml file.json > file.toml
|
||||||
|
`
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
p := cli.Program{
|
||||||
|
Usage: usage,
|
||||||
|
Fn: convert,
|
||||||
|
}
|
||||||
|
p.Execute()
|
||||||
|
}
|
||||||
|
|
||||||
|
func convert(r io.Reader, w io.Writer) error {
|
||||||
|
var v interface{}
|
||||||
|
|
||||||
|
d := json.NewDecoder(r)
|
||||||
|
err := d.Decode(&v)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
e := toml.NewEncoder(w)
|
||||||
|
return e.Encode(v)
|
||||||
|
}
|
||||||
@@ -0,0 +1,48 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConvert(t *testing.T) {
|
||||||
|
examples := []struct {
|
||||||
|
name string
|
||||||
|
input string
|
||||||
|
expected string
|
||||||
|
errors bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "valid json",
|
||||||
|
input: `
|
||||||
|
{
|
||||||
|
"mytoml": {
|
||||||
|
"a": 42
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
expected: `[mytoml]
|
||||||
|
a = 42.0
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid json",
|
||||||
|
input: `{ foo`,
|
||||||
|
errors: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range examples {
|
||||||
|
b := new(bytes.Buffer)
|
||||||
|
err := convert(strings.NewReader(e.input), b)
|
||||||
|
if e.errors {
|
||||||
|
require.Error(t, err)
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, e.expected, b.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,63 @@
|
|||||||
|
// Package tomljson is a program that converts TOML to JSON.
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// Reading from stdin:
|
||||||
|
//
|
||||||
|
// cat file.toml | tomljson > file.json
|
||||||
|
//
|
||||||
|
// Reading from a file:
|
||||||
|
//
|
||||||
|
// tomljson file.toml > file.json
|
||||||
|
//
|
||||||
|
// # Installation
|
||||||
|
//
|
||||||
|
// Using Go:
|
||||||
|
//
|
||||||
|
// go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
"github.com/pelletier/go-toml/v2/internal/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
const usage = `tomljson can be used in two ways:
|
||||||
|
Reading from stdin:
|
||||||
|
cat file.toml | tomljson > file.json
|
||||||
|
|
||||||
|
Reading from a file:
|
||||||
|
tomljson file.toml > file.json
|
||||||
|
`
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
p := cli.Program{
|
||||||
|
Usage: usage,
|
||||||
|
Fn: convert,
|
||||||
|
}
|
||||||
|
p.Execute()
|
||||||
|
}
|
||||||
|
|
||||||
|
func convert(r io.Reader, w io.Writer) error {
|
||||||
|
var v interface{}
|
||||||
|
|
||||||
|
d := toml.NewDecoder(r)
|
||||||
|
err := d.Decode(&v)
|
||||||
|
if err != nil {
|
||||||
|
var derr *toml.DecodeError
|
||||||
|
if errors.As(err, &derr) {
|
||||||
|
row, col := derr.Position()
|
||||||
|
return fmt.Errorf("%s\nerror occurred at row %d column %d", derr.String(), row, col)
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
e := json.NewEncoder(w)
|
||||||
|
e.SetIndent("", " ")
|
||||||
|
return e.Encode(v)
|
||||||
|
}
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConvert(t *testing.T) {
|
||||||
|
examples := []struct {
|
||||||
|
name string
|
||||||
|
input io.Reader
|
||||||
|
expected string
|
||||||
|
errors bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "valid toml",
|
||||||
|
input: strings.NewReader(`
|
||||||
|
[mytoml]
|
||||||
|
a = 42`),
|
||||||
|
expected: `{
|
||||||
|
"mytoml": {
|
||||||
|
"a": 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid toml",
|
||||||
|
input: strings.NewReader(`bad = []]`),
|
||||||
|
errors: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bad reader",
|
||||||
|
input: &badReader{},
|
||||||
|
errors: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range examples {
|
||||||
|
b := new(bytes.Buffer)
|
||||||
|
err := convert(e.input, b)
|
||||||
|
if e.errors {
|
||||||
|
require.Error(t, err)
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, e.expected, b.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type badReader struct{}
|
||||||
|
|
||||||
|
func (r *badReader) Read([]byte) (int, error) {
|
||||||
|
return 0, fmt.Errorf("reader failed on purpose")
|
||||||
|
}
|
||||||
@@ -0,0 +1,58 @@
|
|||||||
|
// Package tomll is a linter program for TOML.
|
||||||
|
//
|
||||||
|
// # Usage
|
||||||
|
//
|
||||||
|
// Reading from stdin, writing to stdout:
|
||||||
|
//
|
||||||
|
// cat file.toml | tomll
|
||||||
|
//
|
||||||
|
// Reading and updating a list of files in place:
|
||||||
|
//
|
||||||
|
// tomll a.toml b.toml c.toml
|
||||||
|
//
|
||||||
|
// # Installation
|
||||||
|
//
|
||||||
|
// Using Go:
|
||||||
|
//
|
||||||
|
// go install github.com/pelletier/go-toml/v2/cmd/tomll@latest
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
"github.com/pelletier/go-toml/v2/internal/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
const usage = `tomll can be used in two ways:
|
||||||
|
|
||||||
|
Reading from stdin, writing to stdout:
|
||||||
|
cat file.toml | tomll > file.toml
|
||||||
|
|
||||||
|
Reading and updating a list of files in place:
|
||||||
|
tomll a.toml b.toml c.toml
|
||||||
|
|
||||||
|
When given a list of files, tomll will modify all files in place without asking.
|
||||||
|
`
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
p := cli.Program{
|
||||||
|
Usage: usage,
|
||||||
|
Fn: convert,
|
||||||
|
Inplace: true,
|
||||||
|
}
|
||||||
|
p.Execute()
|
||||||
|
}
|
||||||
|
|
||||||
|
func convert(r io.Reader, w io.Writer) error {
|
||||||
|
var v interface{}
|
||||||
|
|
||||||
|
d := toml.NewDecoder(r)
|
||||||
|
err := d.Decode(&v)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
e := toml.NewEncoder(w)
|
||||||
|
return e.Encode(v)
|
||||||
|
}
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConvert(t *testing.T) {
|
||||||
|
examples := []struct {
|
||||||
|
name string
|
||||||
|
input string
|
||||||
|
expected string
|
||||||
|
errors bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "valid toml",
|
||||||
|
input: `
|
||||||
|
mytoml.a = 42.0
|
||||||
|
`,
|
||||||
|
expected: `[mytoml]
|
||||||
|
a = 42.0
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid toml",
|
||||||
|
input: `[what`,
|
||||||
|
errors: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range examples {
|
||||||
|
b := new(bytes.Buffer)
|
||||||
|
err := convert(strings.NewReader(e.input), b)
|
||||||
|
if e.errors {
|
||||||
|
require.Error(t, err)
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, e.expected, b.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,7 +3,7 @@
|
|||||||
//
|
//
|
||||||
// Within the go-toml package, run `go generate`. Otherwise, use:
|
// Within the go-toml package, run `go generate`. Otherwise, use:
|
||||||
//
|
//
|
||||||
// go run github.com/pelletier/go-toml/cmd/tomltestgen -o toml_testgen_test.go
|
// go run github.com/pelletier/go-toml/cmd/tomltestgen -o toml_testgen_test.go
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ import (
|
|||||||
"math"
|
"math"
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
func parseInteger(b []byte) (int64, error) {
|
func parseInteger(b []byte) (int64, error) {
|
||||||
@@ -32,7 +34,7 @@ func parseLocalDate(b []byte) (LocalDate, error) {
|
|||||||
var date LocalDate
|
var date LocalDate
|
||||||
|
|
||||||
if len(b) != 10 || b[4] != '-' || b[7] != '-' {
|
if len(b) != 10 || b[4] != '-' || b[7] != '-' {
|
||||||
return date, newDecodeError(b, "dates are expected to have the format YYYY-MM-DD")
|
return date, unstable.NewParserError(b, "dates are expected to have the format YYYY-MM-DD")
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
@@ -53,7 +55,7 @@ func parseLocalDate(b []byte) (LocalDate, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !isValidDate(date.Year, date.Month, date.Day) {
|
if !isValidDate(date.Year, date.Month, date.Day) {
|
||||||
return LocalDate{}, newDecodeError(b, "impossible date")
|
return LocalDate{}, unstable.NewParserError(b, "impossible date")
|
||||||
}
|
}
|
||||||
|
|
||||||
return date, nil
|
return date, nil
|
||||||
@@ -64,7 +66,7 @@ func parseDecimalDigits(b []byte) (int, error) {
|
|||||||
|
|
||||||
for i, c := range b {
|
for i, c := range b {
|
||||||
if c < '0' || c > '9' {
|
if c < '0' || c > '9' {
|
||||||
return 0, newDecodeError(b[i:i+1], "expected digit (0-9)")
|
return 0, unstable.NewParserError(b[i:i+1], "expected digit (0-9)")
|
||||||
}
|
}
|
||||||
v *= 10
|
v *= 10
|
||||||
v += int(c - '0')
|
v += int(c - '0')
|
||||||
@@ -97,22 +99,49 @@ func parseDateTime(b []byte) (time.Time, error) {
|
|||||||
} else {
|
} else {
|
||||||
const dateTimeByteLen = 6
|
const dateTimeByteLen = 6
|
||||||
if len(b) != dateTimeByteLen {
|
if len(b) != dateTimeByteLen {
|
||||||
return time.Time{}, newDecodeError(b, "invalid date-time timezone")
|
return time.Time{}, unstable.NewParserError(b, "invalid date-time timezone")
|
||||||
}
|
}
|
||||||
direction := 1
|
var direction int
|
||||||
if b[0] == '-' {
|
switch b[0] {
|
||||||
|
case '-':
|
||||||
direction = -1
|
direction = -1
|
||||||
|
case '+':
|
||||||
|
direction = +1
|
||||||
|
default:
|
||||||
|
return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset character")
|
||||||
|
}
|
||||||
|
|
||||||
|
if b[3] != ':' {
|
||||||
|
return time.Time{}, unstable.NewParserError(b[3:4], "expected a : separator")
|
||||||
|
}
|
||||||
|
|
||||||
|
hours, err := parseDecimalDigits(b[1:3])
|
||||||
|
if err != nil {
|
||||||
|
return time.Time{}, err
|
||||||
|
}
|
||||||
|
if hours > 23 {
|
||||||
|
return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset hours")
|
||||||
|
}
|
||||||
|
|
||||||
|
minutes, err := parseDecimalDigits(b[4:6])
|
||||||
|
if err != nil {
|
||||||
|
return time.Time{}, err
|
||||||
|
}
|
||||||
|
if minutes > 59 {
|
||||||
|
return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset minutes")
|
||||||
}
|
}
|
||||||
|
|
||||||
hours := digitsToInt(b[1:3])
|
|
||||||
minutes := digitsToInt(b[4:6])
|
|
||||||
seconds := direction * (hours*3600 + minutes*60)
|
seconds := direction * (hours*3600 + minutes*60)
|
||||||
zone = time.FixedZone("", seconds)
|
if seconds == 0 {
|
||||||
|
zone = time.UTC
|
||||||
|
} else {
|
||||||
|
zone = time.FixedZone("", seconds)
|
||||||
|
}
|
||||||
b = b[dateTimeByteLen:]
|
b = b[dateTimeByteLen:]
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(b) > 0 {
|
if len(b) > 0 {
|
||||||
return time.Time{}, newDecodeError(b, "extra bytes at the end of the timezone")
|
return time.Time{}, unstable.NewParserError(b, "extra bytes at the end of the timezone")
|
||||||
}
|
}
|
||||||
|
|
||||||
t := time.Date(
|
t := time.Date(
|
||||||
@@ -133,7 +162,7 @@ func parseLocalDateTime(b []byte) (LocalDateTime, []byte, error) {
|
|||||||
|
|
||||||
const localDateTimeByteMinLen = 11
|
const localDateTimeByteMinLen = 11
|
||||||
if len(b) < localDateTimeByteMinLen {
|
if len(b) < localDateTimeByteMinLen {
|
||||||
return dt, nil, newDecodeError(b, "local datetimes are expected to have the format YYYY-MM-DDTHH:MM:SS[.NNNNNNNNN]")
|
return dt, nil, unstable.NewParserError(b, "local datetimes are expected to have the format YYYY-MM-DDTHH:MM:SS[.NNNNNNNNN]")
|
||||||
}
|
}
|
||||||
|
|
||||||
date, err := parseLocalDate(b[:10])
|
date, err := parseLocalDate(b[:10])
|
||||||
@@ -144,7 +173,7 @@ func parseLocalDateTime(b []byte) (LocalDateTime, []byte, error) {
|
|||||||
|
|
||||||
sep := b[10]
|
sep := b[10]
|
||||||
if sep != 'T' && sep != ' ' && sep != 't' {
|
if sep != 'T' && sep != ' ' && sep != 't' {
|
||||||
return dt, nil, newDecodeError(b[10:11], "datetime separator is expected to be T or a space")
|
return dt, nil, unstable.NewParserError(b[10:11], "datetime separator is expected to be T or a space")
|
||||||
}
|
}
|
||||||
|
|
||||||
t, rest, err := parseLocalTime(b[11:])
|
t, rest, err := parseLocalTime(b[11:])
|
||||||
@@ -168,7 +197,7 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||||||
// check if b matches to have expected format HH:MM:SS[.NNNNNN]
|
// check if b matches to have expected format HH:MM:SS[.NNNNNN]
|
||||||
const localTimeByteLen = 8
|
const localTimeByteLen = 8
|
||||||
if len(b) < localTimeByteLen {
|
if len(b) < localTimeByteLen {
|
||||||
return t, nil, newDecodeError(b, "times are expected to have the format HH:MM:SS[.NNNNNN]")
|
return t, nil, unstable.NewParserError(b, "times are expected to have the format HH:MM:SS[.NNNNNN]")
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
@@ -179,10 +208,10 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if t.Hour > 23 {
|
if t.Hour > 23 {
|
||||||
return t, nil, newDecodeError(b[0:2], "hour cannot be greater 23")
|
return t, nil, unstable.NewParserError(b[0:2], "hour cannot be greater 23")
|
||||||
}
|
}
|
||||||
if b[2] != ':' {
|
if b[2] != ':' {
|
||||||
return t, nil, newDecodeError(b[2:3], "expecting colon between hours and minutes")
|
return t, nil, unstable.NewParserError(b[2:3], "expecting colon between hours and minutes")
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Minute, err = parseDecimalDigits(b[3:5])
|
t.Minute, err = parseDecimalDigits(b[3:5])
|
||||||
@@ -190,10 +219,10 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||||||
return t, nil, err
|
return t, nil, err
|
||||||
}
|
}
|
||||||
if t.Minute > 59 {
|
if t.Minute > 59 {
|
||||||
return t, nil, newDecodeError(b[3:5], "minutes cannot be greater 59")
|
return t, nil, unstable.NewParserError(b[3:5], "minutes cannot be greater 59")
|
||||||
}
|
}
|
||||||
if b[5] != ':' {
|
if b[5] != ':' {
|
||||||
return t, nil, newDecodeError(b[5:6], "expecting colon between minutes and seconds")
|
return t, nil, unstable.NewParserError(b[5:6], "expecting colon between minutes and seconds")
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Second, err = parseDecimalDigits(b[6:8])
|
t.Second, err = parseDecimalDigits(b[6:8])
|
||||||
@@ -201,45 +230,53 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||||||
return t, nil, err
|
return t, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.Second > 59 {
|
if t.Second > 60 {
|
||||||
return t, nil, newDecodeError(b[3:5], "seconds cannot be greater 59")
|
return t, nil, unstable.NewParserError(b[6:8], "seconds cannot be greater 60")
|
||||||
}
|
}
|
||||||
|
|
||||||
const minLengthWithFrac = 9
|
b = b[8:]
|
||||||
if len(b) >= minLengthWithFrac && b[minLengthWithFrac-1] == '.' {
|
|
||||||
|
if len(b) >= 1 && b[0] == '.' {
|
||||||
frac := 0
|
frac := 0
|
||||||
|
precision := 0
|
||||||
digits := 0
|
digits := 0
|
||||||
|
|
||||||
for i, c := range b[minLengthWithFrac:] {
|
for i, c := range b[1:] {
|
||||||
if !isDigit(c) {
|
if !isDigit(c) {
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
return t, nil, newDecodeError(b[i:i+1], "need at least one digit after fraction point")
|
return t, nil, unstable.NewParserError(b[0:1], "need at least one digit after fraction point")
|
||||||
}
|
}
|
||||||
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
digits++
|
||||||
|
|
||||||
const maxFracPrecision = 9
|
const maxFracPrecision = 9
|
||||||
if i >= maxFracPrecision {
|
if i >= maxFracPrecision {
|
||||||
return t, nil, newDecodeError(b[i:i+1], "maximum precision for date time is nanosecond")
|
// go-toml allows decoding fractional seconds
|
||||||
|
// beyond the supported precision of 9
|
||||||
|
// digits. It truncates the fractional component
|
||||||
|
// to the supported precision and ignores the
|
||||||
|
// remaining digits.
|
||||||
|
//
|
||||||
|
// https://github.com/pelletier/go-toml/discussions/707
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
frac *= 10
|
frac *= 10
|
||||||
frac += int(c - '0')
|
frac += int(c - '0')
|
||||||
digits++
|
precision++
|
||||||
}
|
}
|
||||||
|
|
||||||
if digits == 0 {
|
if precision == 0 {
|
||||||
return t, nil, newDecodeError(b[minLengthWithFrac-1:minLengthWithFrac], "nanoseconds need at least one digit")
|
return t, nil, unstable.NewParserError(b[:1], "nanoseconds need at least one digit")
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Nanosecond = frac * nspow[digits]
|
t.Nanosecond = frac * nspow[precision]
|
||||||
t.Precision = digits
|
t.Precision = precision
|
||||||
|
|
||||||
return t, b[9+digits:], nil
|
return t, b[1+digits:], nil
|
||||||
}
|
}
|
||||||
|
return t, b, nil
|
||||||
return t, b[8:], nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop
|
//nolint:cyclop
|
||||||
@@ -254,40 +291,40 @@ func parseFloat(b []byte) (float64, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if cleaned[0] == '.' {
|
if cleaned[0] == '.' {
|
||||||
return 0, newDecodeError(b, "float cannot start with a dot")
|
return 0, unstable.NewParserError(b, "float cannot start with a dot")
|
||||||
}
|
}
|
||||||
|
|
||||||
if cleaned[len(cleaned)-1] == '.' {
|
if cleaned[len(cleaned)-1] == '.' {
|
||||||
return 0, newDecodeError(b, "float cannot end with a dot")
|
return 0, unstable.NewParserError(b, "float cannot end with a dot")
|
||||||
}
|
}
|
||||||
|
|
||||||
dotAlreadySeen := false
|
dotAlreadySeen := false
|
||||||
for i, c := range cleaned {
|
for i, c := range cleaned {
|
||||||
if c == '.' {
|
if c == '.' {
|
||||||
if dotAlreadySeen {
|
if dotAlreadySeen {
|
||||||
return 0, newDecodeError(b[i:i+1], "float can have at most one decimal point")
|
return 0, unstable.NewParserError(b[i:i+1], "float can have at most one decimal point")
|
||||||
}
|
}
|
||||||
if !isDigit(cleaned[i-1]) {
|
if !isDigit(cleaned[i-1]) {
|
||||||
return 0, newDecodeError(b[i-1:i+1], "float decimal point must be preceded by a digit")
|
return 0, unstable.NewParserError(b[i-1:i+1], "float decimal point must be preceded by a digit")
|
||||||
}
|
}
|
||||||
if !isDigit(cleaned[i+1]) {
|
if !isDigit(cleaned[i+1]) {
|
||||||
return 0, newDecodeError(b[i:i+2], "float decimal point must be followed by a digit")
|
return 0, unstable.NewParserError(b[i:i+2], "float decimal point must be followed by a digit")
|
||||||
}
|
}
|
||||||
dotAlreadySeen = true
|
dotAlreadySeen = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
start := 0
|
start := 0
|
||||||
if b[0] == '+' || b[0] == '-' {
|
if cleaned[0] == '+' || cleaned[0] == '-' {
|
||||||
start = 1
|
start = 1
|
||||||
}
|
}
|
||||||
if b[start] == '0' && isDigit(b[start+1]) {
|
if cleaned[start] == '0' && isDigit(cleaned[start+1]) {
|
||||||
return 0, newDecodeError(b, "float integer part cannot have leading zeroes")
|
return 0, unstable.NewParserError(b, "float integer part cannot have leading zeroes")
|
||||||
}
|
}
|
||||||
|
|
||||||
f, err := strconv.ParseFloat(string(cleaned), 64)
|
f, err := strconv.ParseFloat(string(cleaned), 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, newDecodeError(b, "unable to parse float: %w", err)
|
return 0, unstable.NewParserError(b, "unable to parse float: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return f, nil
|
return f, nil
|
||||||
@@ -301,7 +338,7 @@ func parseIntHex(b []byte) (int64, error) {
|
|||||||
|
|
||||||
i, err := strconv.ParseInt(string(cleaned), 16, 64)
|
i, err := strconv.ParseInt(string(cleaned), 16, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, newDecodeError(b, "couldn't parse hexadecimal number: %w", err)
|
return 0, unstable.NewParserError(b, "couldn't parse hexadecimal number: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return i, nil
|
return i, nil
|
||||||
@@ -315,7 +352,7 @@ func parseIntOct(b []byte) (int64, error) {
|
|||||||
|
|
||||||
i, err := strconv.ParseInt(string(cleaned), 8, 64)
|
i, err := strconv.ParseInt(string(cleaned), 8, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, newDecodeError(b, "couldn't parse octal number: %w", err)
|
return 0, unstable.NewParserError(b, "couldn't parse octal number: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return i, nil
|
return i, nil
|
||||||
@@ -329,7 +366,7 @@ func parseIntBin(b []byte) (int64, error) {
|
|||||||
|
|
||||||
i, err := strconv.ParseInt(string(cleaned), 2, 64)
|
i, err := strconv.ParseInt(string(cleaned), 2, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, newDecodeError(b, "couldn't parse binary number: %w", err)
|
return 0, unstable.NewParserError(b, "couldn't parse binary number: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return i, nil
|
return i, nil
|
||||||
@@ -352,24 +389,33 @@ func parseIntDec(b []byte) (int64, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(cleaned) > startIdx+1 && cleaned[startIdx] == '0' {
|
if len(cleaned) > startIdx+1 && cleaned[startIdx] == '0' {
|
||||||
return 0, newDecodeError(b, "leading zero not allowed on decimal number")
|
return 0, unstable.NewParserError(b, "leading zero not allowed on decimal number")
|
||||||
}
|
}
|
||||||
|
|
||||||
i, err := strconv.ParseInt(string(cleaned), 10, 64)
|
i, err := strconv.ParseInt(string(cleaned), 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, newDecodeError(b, "couldn't parse decimal number: %w", err)
|
return 0, unstable.NewParserError(b, "couldn't parse decimal number: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return i, nil
|
return i, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) {
|
func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) {
|
||||||
if b[0] == '_' {
|
start := 0
|
||||||
return nil, newDecodeError(b[0:1], "number cannot start with underscore")
|
if b[start] == '+' || b[start] == '-' {
|
||||||
|
start++
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(b) == start {
|
||||||
|
return b, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if b[start] == '_' {
|
||||||
|
return nil, unstable.NewParserError(b[start:start+1], "number cannot start with underscore")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[len(b)-1] == '_' {
|
if b[len(b)-1] == '_' {
|
||||||
return nil, newDecodeError(b[len(b)-1:], "number cannot end with underscore")
|
return nil, unstable.NewParserError(b[len(b)-1:], "number cannot end with underscore")
|
||||||
}
|
}
|
||||||
|
|
||||||
// fast path
|
// fast path
|
||||||
@@ -391,7 +437,7 @@ func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) {
|
|||||||
c := b[i]
|
c := b[i]
|
||||||
if c == '_' {
|
if c == '_' {
|
||||||
if !before {
|
if !before {
|
||||||
return nil, newDecodeError(b[i-1:i+1], "number must have at least one digit between underscores")
|
return nil, unstable.NewParserError(b[i-1:i+1], "number must have at least one digit between underscores")
|
||||||
}
|
}
|
||||||
before = false
|
before = false
|
||||||
} else {
|
} else {
|
||||||
@@ -405,11 +451,11 @@ func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) {
|
|||||||
|
|
||||||
func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
|
func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
|
||||||
if b[0] == '_' {
|
if b[0] == '_' {
|
||||||
return nil, newDecodeError(b[0:1], "number cannot start with underscore")
|
return nil, unstable.NewParserError(b[0:1], "number cannot start with underscore")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[len(b)-1] == '_' {
|
if b[len(b)-1] == '_' {
|
||||||
return nil, newDecodeError(b[len(b)-1:], "number cannot end with underscore")
|
return nil, unstable.NewParserError(b[len(b)-1:], "number cannot end with underscore")
|
||||||
}
|
}
|
||||||
|
|
||||||
// fast path
|
// fast path
|
||||||
@@ -432,23 +478,27 @@ func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
|
|||||||
switch c {
|
switch c {
|
||||||
case '_':
|
case '_':
|
||||||
if !before {
|
if !before {
|
||||||
return nil, newDecodeError(b[i-1:i+1], "number must have at least one digit between underscores")
|
return nil, unstable.NewParserError(b[i-1:i+1], "number must have at least one digit between underscores")
|
||||||
}
|
}
|
||||||
if i < len(b)-1 && (b[i+1] == 'e' || b[i+1] == 'E') {
|
if i < len(b)-1 && (b[i+1] == 'e' || b[i+1] == 'E') {
|
||||||
return nil, newDecodeError(b[i+1:i+2], "cannot have underscore before exponent")
|
return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore before exponent")
|
||||||
}
|
}
|
||||||
before = false
|
before = false
|
||||||
|
case '+', '-':
|
||||||
|
// signed exponents
|
||||||
|
cleaned = append(cleaned, c)
|
||||||
|
before = false
|
||||||
case 'e', 'E':
|
case 'e', 'E':
|
||||||
if i < len(b)-1 && b[i+1] == '_' {
|
if i < len(b)-1 && b[i+1] == '_' {
|
||||||
return nil, newDecodeError(b[i+1:i+2], "cannot have underscore after exponent")
|
return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore after exponent")
|
||||||
}
|
}
|
||||||
cleaned = append(cleaned, c)
|
cleaned = append(cleaned, c)
|
||||||
case '.':
|
case '.':
|
||||||
if i < len(b)-1 && b[i+1] == '_' {
|
if i < len(b)-1 && b[i+1] == '_' {
|
||||||
return nil, newDecodeError(b[i+1:i+2], "cannot have underscore after decimal point")
|
return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore after decimal point")
|
||||||
}
|
}
|
||||||
if i > 0 && b[i-1] == '_' {
|
if i > 0 && b[i-1] == '_' {
|
||||||
return nil, newDecodeError(b[i-1:i], "cannot have underscore before decimal point")
|
return nil, unstable.NewParserError(b[i-1:i], "cannot have underscore before decimal point")
|
||||||
}
|
}
|
||||||
cleaned = append(cleaned, c)
|
cleaned = append(cleaned, c)
|
||||||
default:
|
default:
|
||||||
@@ -462,7 +512,7 @@ func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
|
|||||||
|
|
||||||
// isValidDate checks if a provided date is a date that exists.
|
// isValidDate checks if a provided date is a date that exists.
|
||||||
func isValidDate(year int, month int, day int) bool {
|
func isValidDate(year int, month int, day int) bool {
|
||||||
return day <= daysIn(month, year)
|
return month > 0 && month < 13 && day > 0 && day <= daysIn(month, year)
|
||||||
}
|
}
|
||||||
|
|
||||||
// daysBefore[m] counts the number of days in a non-leap year
|
// daysBefore[m] counts the number of days in a non-leap year
|
||||||
@@ -494,3 +544,7 @@ func daysIn(m int, year int) int {
|
|||||||
func isLeap(year int) bool {
|
func isLeap(year int) bool {
|
||||||
return year%4 == 0 && (year%100 != 0 || year%400 == 0)
|
return year%4 == 0 && (year%100 != 0 || year%400 == 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isDigit(r byte) bool {
|
||||||
|
return r >= '0' && r <= '9'
|
||||||
|
}
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
"github.com/pelletier/go-toml/v2/internal/danger"
|
||||||
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
// DecodeError represents an error encountered during the parsing or decoding
|
// DecodeError represents an error encountered during the parsing or decoding
|
||||||
@@ -27,7 +28,7 @@ type DecodeError struct {
|
|||||||
// corresponding field in the target value. It contains all the missing fields
|
// corresponding field in the target value. It contains all the missing fields
|
||||||
// in Errors.
|
// in Errors.
|
||||||
//
|
//
|
||||||
// Emitted by Decoder when SetStrict(true) was called.
|
// Emitted by Decoder when DisallowUnknownFields() was called.
|
||||||
type StrictMissingError struct {
|
type StrictMissingError struct {
|
||||||
// One error per field that could not be found.
|
// One error per field that could not be found.
|
||||||
Errors []DecodeError
|
Errors []DecodeError
|
||||||
@@ -55,25 +56,6 @@ func (s *StrictMissingError) String() string {
|
|||||||
|
|
||||||
type Key []string
|
type Key []string
|
||||||
|
|
||||||
// internal version of DecodeError that is used as the base to create a
|
|
||||||
// DecodeError with full context.
|
|
||||||
type decodeError struct {
|
|
||||||
highlight []byte
|
|
||||||
message string
|
|
||||||
key Key // optional
|
|
||||||
}
|
|
||||||
|
|
||||||
func (de *decodeError) Error() string {
|
|
||||||
return de.message
|
|
||||||
}
|
|
||||||
|
|
||||||
func newDecodeError(highlight []byte, format string, args ...interface{}) error {
|
|
||||||
return &decodeError{
|
|
||||||
highlight: highlight,
|
|
||||||
message: fmt.Errorf(format, args...).Error(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Error returns the error message contained in the DecodeError.
|
// Error returns the error message contained in the DecodeError.
|
||||||
func (e *DecodeError) Error() string {
|
func (e *DecodeError) Error() string {
|
||||||
return "toml: " + e.message
|
return "toml: " + e.message
|
||||||
@@ -103,13 +85,14 @@ func (e *DecodeError) Key() Key {
|
|||||||
//
|
//
|
||||||
// The function copies all bytes used in DecodeError, so that document and
|
// The function copies all bytes used in DecodeError, so that document and
|
||||||
// highlight can be freely deallocated.
|
// highlight can be freely deallocated.
|
||||||
|
//
|
||||||
//nolint:funlen
|
//nolint:funlen
|
||||||
func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
func wrapDecodeError(document []byte, de *unstable.ParserError) *DecodeError {
|
||||||
offset := danger.SubsliceOffset(document, de.highlight)
|
offset := danger.SubsliceOffset(document, de.Highlight)
|
||||||
|
|
||||||
errMessage := de.Error()
|
errMessage := de.Error()
|
||||||
errLine, errColumn := positionAtEnd(document[:offset])
|
errLine, errColumn := positionAtEnd(document[:offset])
|
||||||
before, after := linesOfContext(document, de.highlight, offset, 3)
|
before, after := linesOfContext(document, de.Highlight, offset, 3)
|
||||||
|
|
||||||
var buf strings.Builder
|
var buf strings.Builder
|
||||||
|
|
||||||
@@ -139,7 +122,7 @@ func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
|||||||
buf.Write(before[0])
|
buf.Write(before[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
buf.Write(de.highlight)
|
buf.Write(de.Highlight)
|
||||||
|
|
||||||
if len(after) > 0 {
|
if len(after) > 0 {
|
||||||
buf.Write(after[0])
|
buf.Write(after[0])
|
||||||
@@ -157,7 +140,7 @@ func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
|||||||
buf.WriteString(strings.Repeat(" ", len(before[0])))
|
buf.WriteString(strings.Repeat(" ", len(before[0])))
|
||||||
}
|
}
|
||||||
|
|
||||||
buf.WriteString(strings.Repeat("~", len(de.highlight)))
|
buf.WriteString(strings.Repeat("~", len(de.Highlight)))
|
||||||
|
|
||||||
if len(errMessage) > 0 {
|
if len(errMessage) > 0 {
|
||||||
buf.WriteString(" ")
|
buf.WriteString(" ")
|
||||||
@@ -182,7 +165,7 @@ func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
|||||||
message: errMessage,
|
message: errMessage,
|
||||||
line: errLine,
|
line: errLine,
|
||||||
column: errColumn,
|
column: errColumn,
|
||||||
key: de.key,
|
key: de.Key,
|
||||||
human: buf.String(),
|
human: buf.String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
+10
-9
@@ -7,6 +7,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -170,9 +171,9 @@ line 5`,
|
|||||||
doc := b.Bytes()
|
doc := b.Bytes()
|
||||||
hl := doc[start:end]
|
hl := doc[start:end]
|
||||||
|
|
||||||
err := wrapDecodeError(doc, &decodeError{
|
err := wrapDecodeError(doc, &unstable.ParserError{
|
||||||
highlight: hl,
|
Highlight: hl,
|
||||||
message: e.msg,
|
Message: e.msg,
|
||||||
})
|
})
|
||||||
|
|
||||||
var derr *DecodeError
|
var derr *DecodeError
|
||||||
@@ -212,12 +213,12 @@ func ExampleDecodeError() {
|
|||||||
|
|
||||||
fmt.Println(err)
|
fmt.Println(err)
|
||||||
|
|
||||||
//nolint:errorlint
|
var derr *DecodeError
|
||||||
de := err.(*DecodeError)
|
if errors.As(err, &derr) {
|
||||||
fmt.Println(de.String())
|
fmt.Println(derr.String())
|
||||||
|
row, col := derr.Position()
|
||||||
row, col := de.Position()
|
fmt.Println("error occurred at row", row, "column", col)
|
||||||
fmt.Println("error occurred at row", row, "column", col)
|
}
|
||||||
// Output:
|
// Output:
|
||||||
// toml: number must have at least one digit between underscores
|
// toml: number must have at least one digit between underscores
|
||||||
// 1| name = 123__456
|
// 1| name = 123__456
|
||||||
|
|||||||
@@ -0,0 +1,37 @@
|
|||||||
|
package toml_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
type customInt int
|
||||||
|
|
||||||
|
func (i *customInt) UnmarshalText(b []byte) error {
|
||||||
|
x, err := strconv.ParseInt(string(b), 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*i = customInt(x * 100)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type doc struct {
|
||||||
|
Value customInt
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExampleUnmarshal_textUnmarshal() {
|
||||||
|
var x doc
|
||||||
|
|
||||||
|
data := []byte(`value = "42"`)
|
||||||
|
err := toml.Unmarshal(data, &x)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
fmt.Println(x)
|
||||||
|
// Output:
|
||||||
|
// {4200}
|
||||||
|
}
|
||||||
+8
-1
@@ -7,13 +7,20 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFastSimple(t *testing.T) {
|
func TestFastSimpleInt(t *testing.T) {
|
||||||
m := map[string]int64{}
|
m := map[string]int64{}
|
||||||
err := toml.Unmarshal([]byte(`a = 42`), &m)
|
err := toml.Unmarshal([]byte(`a = 42`), &m)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, map[string]int64{"a": 42}, m)
|
require.Equal(t, map[string]int64{"a": 42}, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestFastSimpleFloat(t *testing.T) {
|
||||||
|
m := map[string]float64{}
|
||||||
|
err := toml.Unmarshal([]byte("a = 42\nb = 1.1\nc = 12341234123412341234123412341234"), &m)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, map[string]float64{"a": 42, "b": 1.1, "c": 1.2341234123412342e+31}, m)
|
||||||
|
}
|
||||||
|
|
||||||
func TestFastSimpleString(t *testing.T) {
|
func TestFastSimpleString(t *testing.T) {
|
||||||
m := map[string]string{}
|
m := map[string]string{}
|
||||||
err := toml.Unmarshal([]byte(`a = "hello"`), &m)
|
err := toml.Unmarshal([]byte(`a = "hello"`), &m)
|
||||||
|
|||||||
@@ -0,0 +1,56 @@
|
|||||||
|
//go:build go1.18 || go1.19 || go1.20
|
||||||
|
// +build go1.18 go1.19 go1.20
|
||||||
|
|
||||||
|
package toml_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func FuzzUnmarshal(f *testing.F) {
|
||||||
|
file, err := ioutil.ReadFile("benchmark/benchmark.toml")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
f.Add(file)
|
||||||
|
|
||||||
|
f.Fuzz(func(t *testing.T, b []byte) {
|
||||||
|
if strings.Contains(string(b), "nan") {
|
||||||
|
// Current limitation of testify.
|
||||||
|
// https://github.com/stretchr/testify/issues/624
|
||||||
|
t.Skip("can't compare NaNs")
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Log("INITIAL DOCUMENT ===========================")
|
||||||
|
t.Log(string(b))
|
||||||
|
|
||||||
|
var v interface{}
|
||||||
|
err := toml.Unmarshal(b, &v)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Log("DECODED VALUE ===========================")
|
||||||
|
t.Logf("%#+v", v)
|
||||||
|
|
||||||
|
encoded, err := toml.Marshal(v)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("cannot marshal unmarshaled document: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Log("ENCODED DOCUMENT ===========================")
|
||||||
|
t.Log(string(encoded))
|
||||||
|
|
||||||
|
var v2 interface{}
|
||||||
|
err = toml.Unmarshal(encoded, &v2)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed round trip: %s", err)
|
||||||
|
}
|
||||||
|
require.Equal(t, v, v2)
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -2,5 +2,4 @@ module github.com/pelletier/go-toml/v2
|
|||||||
|
|
||||||
go 1.16
|
go 1.16
|
||||||
|
|
||||||
// latest (v1.7.0) doesn't have the fix for time.Time
|
require github.com/stretchr/testify v1.8.3
|
||||||
require github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942
|
|
||||||
|
|||||||
@@ -1,11 +1,17 @@
|
|||||||
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
|
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942 h1:t0lM6y/M5IiUZyvbBTcngso8SZEZICH7is9B6g/obVU=
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
github.com/stretchr/testify v1.7.1-0.20210427113832-6241f9ab9942/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
|
github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY=
|
||||||
|
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|||||||
@@ -1,145 +0,0 @@
|
|||||||
package ast
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"unsafe"
|
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Iterator starts uninitialized, you need to call Next() first.
|
|
||||||
//
|
|
||||||
// For example:
|
|
||||||
//
|
|
||||||
// it := n.Children()
|
|
||||||
// for it.Next() {
|
|
||||||
// it.Node()
|
|
||||||
// }
|
|
||||||
type Iterator struct {
|
|
||||||
started bool
|
|
||||||
node *Node
|
|
||||||
}
|
|
||||||
|
|
||||||
// Next moves the iterator forward and returns true if points to a node, false
|
|
||||||
// otherwise.
|
|
||||||
func (c *Iterator) Next() bool {
|
|
||||||
if !c.started {
|
|
||||||
c.started = true
|
|
||||||
} else if c.node.Valid() {
|
|
||||||
c.node = c.node.Next()
|
|
||||||
}
|
|
||||||
return c.node.Valid()
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsLast returns true if the current node of the iterator is the last one.
|
|
||||||
// Subsequent call to Next() will return false.
|
|
||||||
func (c *Iterator) IsLast() bool {
|
|
||||||
return c.node.next == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Node returns a copy of the node pointed at by the iterator.
|
|
||||||
func (c *Iterator) Node() *Node {
|
|
||||||
return c.node
|
|
||||||
}
|
|
||||||
|
|
||||||
// Root contains a full AST.
|
|
||||||
//
|
|
||||||
// It is immutable once constructed with Builder.
|
|
||||||
type Root struct {
|
|
||||||
nodes []Node
|
|
||||||
}
|
|
||||||
|
|
||||||
// Iterator over the top level nodes.
|
|
||||||
func (r *Root) Iterator() Iterator {
|
|
||||||
it := Iterator{}
|
|
||||||
if len(r.nodes) > 0 {
|
|
||||||
it.node = &r.nodes[0]
|
|
||||||
}
|
|
||||||
return it
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Root) at(idx Reference) *Node {
|
|
||||||
return &r.nodes[idx]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Arrays have one child per element in the array.
|
|
||||||
// InlineTables have one child per key-value pair in the table.
|
|
||||||
// KeyValues have at least two children. The first one is the value. The
|
|
||||||
// rest make a potentially dotted key.
|
|
||||||
// Table and Array table have one child per element of the key they
|
|
||||||
// represent (same as KeyValue, but without the last node being the value).
|
|
||||||
// children []Node
|
|
||||||
type Node struct {
|
|
||||||
Kind Kind
|
|
||||||
Raw Range // Raw bytes from the input.
|
|
||||||
Data []byte // Node value (could be either allocated or referencing the input).
|
|
||||||
|
|
||||||
// References to other nodes, as offsets in the backing array from this
|
|
||||||
// node. References can go backward, so those can be negative.
|
|
||||||
next int // 0 if last element
|
|
||||||
child int // 0 if no child
|
|
||||||
}
|
|
||||||
|
|
||||||
type Range struct {
|
|
||||||
Offset uint32
|
|
||||||
Length uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
// Next returns a copy of the next node, or an invalid Node if there is no
|
|
||||||
// next node.
|
|
||||||
func (n *Node) Next() *Node {
|
|
||||||
if n.next == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
ptr := unsafe.Pointer(n)
|
|
||||||
size := unsafe.Sizeof(Node{})
|
|
||||||
return (*Node)(danger.Stride(ptr, size, n.next))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Child returns a copy of the first child node of this node. Other children
|
|
||||||
// can be accessed calling Next on the first child.
|
|
||||||
// Returns an invalid Node if there is none.
|
|
||||||
func (n *Node) Child() *Node {
|
|
||||||
if n.child == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
ptr := unsafe.Pointer(n)
|
|
||||||
size := unsafe.Sizeof(Node{})
|
|
||||||
return (*Node)(danger.Stride(ptr, size, n.child))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Valid returns true if the node's kind is set (not to Invalid).
|
|
||||||
func (n *Node) Valid() bool {
|
|
||||||
return n != nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Key returns the child nodes making the Key on a supported node. Panics
|
|
||||||
// otherwise.
|
|
||||||
// They are guaranteed to be all be of the Kind Key. A simple key would return
|
|
||||||
// just one element.
|
|
||||||
func (n *Node) Key() Iterator {
|
|
||||||
switch n.Kind {
|
|
||||||
case KeyValue:
|
|
||||||
value := n.Child()
|
|
||||||
if !value.Valid() {
|
|
||||||
panic(fmt.Errorf("KeyValue should have at least two children"))
|
|
||||||
}
|
|
||||||
return Iterator{node: value.Next()}
|
|
||||||
case Table, ArrayTable:
|
|
||||||
return Iterator{node: n.Child()}
|
|
||||||
default:
|
|
||||||
panic(fmt.Errorf("Key() is not supported on a %s", n.Kind))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Value returns a pointer to the value node of a KeyValue.
|
|
||||||
// Guaranteed to be non-nil.
|
|
||||||
// Panics if not called on a KeyValue node, or if the Children are malformed.
|
|
||||||
func (n *Node) Value() *Node {
|
|
||||||
return n.Child()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Children returns an iterator over a node's children.
|
|
||||||
func (n *Node) Children() Iterator {
|
|
||||||
return Iterator{node: n.Child()}
|
|
||||||
}
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
package ast
|
|
||||||
|
|
||||||
type Reference int
|
|
||||||
|
|
||||||
const InvalidReference Reference = -1
|
|
||||||
|
|
||||||
func (r Reference) Valid() bool {
|
|
||||||
return r != InvalidReference
|
|
||||||
}
|
|
||||||
|
|
||||||
type Builder struct {
|
|
||||||
tree Root
|
|
||||||
lastIdx int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) Tree() *Root {
|
|
||||||
return &b.tree
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) NodeAt(ref Reference) *Node {
|
|
||||||
return b.tree.at(ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) Reset() {
|
|
||||||
b.tree.nodes = b.tree.nodes[:0]
|
|
||||||
b.lastIdx = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) Push(n Node) Reference {
|
|
||||||
b.lastIdx = len(b.tree.nodes)
|
|
||||||
b.tree.nodes = append(b.tree.nodes, n)
|
|
||||||
return Reference(b.lastIdx)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) PushAndChain(n Node) Reference {
|
|
||||||
newIdx := len(b.tree.nodes)
|
|
||||||
b.tree.nodes = append(b.tree.nodes, n)
|
|
||||||
if b.lastIdx >= 0 {
|
|
||||||
b.tree.nodes[b.lastIdx].next = newIdx - b.lastIdx
|
|
||||||
}
|
|
||||||
b.lastIdx = newIdx
|
|
||||||
return Reference(b.lastIdx)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) AttachChild(parent Reference, child Reference) {
|
|
||||||
b.tree.nodes[parent].child = int(child) - int(parent)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) Chain(from Reference, to Reference) {
|
|
||||||
b.tree.nodes[from].next = int(to) - int(from)
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
package characters
|
||||||
|
|
||||||
|
var invalidAsciiTable = [256]bool{
|
||||||
|
0x00: true,
|
||||||
|
0x01: true,
|
||||||
|
0x02: true,
|
||||||
|
0x03: true,
|
||||||
|
0x04: true,
|
||||||
|
0x05: true,
|
||||||
|
0x06: true,
|
||||||
|
0x07: true,
|
||||||
|
0x08: true,
|
||||||
|
// 0x09 TAB
|
||||||
|
// 0x0A LF
|
||||||
|
0x0B: true,
|
||||||
|
0x0C: true,
|
||||||
|
// 0x0D CR
|
||||||
|
0x0E: true,
|
||||||
|
0x0F: true,
|
||||||
|
0x10: true,
|
||||||
|
0x11: true,
|
||||||
|
0x12: true,
|
||||||
|
0x13: true,
|
||||||
|
0x14: true,
|
||||||
|
0x15: true,
|
||||||
|
0x16: true,
|
||||||
|
0x17: true,
|
||||||
|
0x18: true,
|
||||||
|
0x19: true,
|
||||||
|
0x1A: true,
|
||||||
|
0x1B: true,
|
||||||
|
0x1C: true,
|
||||||
|
0x1D: true,
|
||||||
|
0x1E: true,
|
||||||
|
0x1F: true,
|
||||||
|
// 0x20 - 0x7E Printable ASCII characters
|
||||||
|
0x7F: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func InvalidAscii(b byte) bool {
|
||||||
|
return invalidAsciiTable[b]
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package toml
|
package characters
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
@@ -32,7 +32,7 @@ func (u utf8Err) Zero() bool {
|
|||||||
// 0x9 => tab, ok
|
// 0x9 => tab, ok
|
||||||
// 0xA - 0x1F => invalid
|
// 0xA - 0x1F => invalid
|
||||||
// 0x7F => invalid
|
// 0x7F => invalid
|
||||||
func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
func Utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
||||||
// Fast path. Check for and skip 8 bytes of ASCII characters per iteration.
|
// Fast path. Check for and skip 8 bytes of ASCII characters per iteration.
|
||||||
offset := 0
|
offset := 0
|
||||||
for len(p) >= 8 {
|
for len(p) >= 8 {
|
||||||
@@ -48,7 +48,7 @@ func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for i, b := range p[:8] {
|
for i, b := range p[:8] {
|
||||||
if invalidAscii(b) {
|
if InvalidAscii(b) {
|
||||||
err.Index = offset + i
|
err.Index = offset + i
|
||||||
err.Size = 1
|
err.Size = 1
|
||||||
return
|
return
|
||||||
@@ -62,7 +62,7 @@ func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
|||||||
for i := 0; i < n; {
|
for i := 0; i < n; {
|
||||||
pi := p[i]
|
pi := p[i]
|
||||||
if pi < utf8.RuneSelf {
|
if pi < utf8.RuneSelf {
|
||||||
if invalidAscii(pi) {
|
if InvalidAscii(pi) {
|
||||||
err.Index = offset + i
|
err.Index = offset + i
|
||||||
err.Size = 1
|
err.Size = 1
|
||||||
return
|
return
|
||||||
@@ -106,11 +106,11 @@ func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Return the size of the next rune if valid, 0 otherwise.
|
// Return the size of the next rune if valid, 0 otherwise.
|
||||||
func utf8ValidNext(p []byte) int {
|
func Utf8ValidNext(p []byte) int {
|
||||||
c := p[0]
|
c := p[0]
|
||||||
|
|
||||||
if c < utf8.RuneSelf {
|
if c < utf8.RuneSelf {
|
||||||
if invalidAscii(c) {
|
if InvalidAscii(c) {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
return 1
|
return 1
|
||||||
@@ -140,47 +140,6 @@ func utf8ValidNext(p []byte) int {
|
|||||||
return size
|
return size
|
||||||
}
|
}
|
||||||
|
|
||||||
var invalidAsciiTable = [256]bool{
|
|
||||||
0x00: true,
|
|
||||||
0x01: true,
|
|
||||||
0x02: true,
|
|
||||||
0x03: true,
|
|
||||||
0x04: true,
|
|
||||||
0x05: true,
|
|
||||||
0x06: true,
|
|
||||||
0x07: true,
|
|
||||||
0x08: true,
|
|
||||||
// 0x09 TAB
|
|
||||||
// 0x0A LF
|
|
||||||
0x0B: true,
|
|
||||||
0x0C: true,
|
|
||||||
// 0x0D CR
|
|
||||||
0x0E: true,
|
|
||||||
0x0F: true,
|
|
||||||
0x10: true,
|
|
||||||
0x11: true,
|
|
||||||
0x12: true,
|
|
||||||
0x13: true,
|
|
||||||
0x14: true,
|
|
||||||
0x15: true,
|
|
||||||
0x16: true,
|
|
||||||
0x17: true,
|
|
||||||
0x18: true,
|
|
||||||
0x19: true,
|
|
||||||
0x1A: true,
|
|
||||||
0x1B: true,
|
|
||||||
0x1C: true,
|
|
||||||
0x1D: true,
|
|
||||||
0x1E: true,
|
|
||||||
0x1F: true,
|
|
||||||
// 0x20 - 0x7E Printable ASCII characters
|
|
||||||
0x7F: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
func invalidAscii(b byte) bool {
|
|
||||||
return invalidAsciiTable[b]
|
|
||||||
}
|
|
||||||
|
|
||||||
// acceptRange gives the range of valid values for the second byte in a UTF-8
|
// acceptRange gives the range of valid values for the second byte in a UTF-8
|
||||||
// sequence.
|
// sequence.
|
||||||
type acceptRange struct {
|
type acceptRange struct {
|
||||||
@@ -0,0 +1,88 @@
|
|||||||
|
package cli
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ConvertFn func(r io.Reader, w io.Writer) error
|
||||||
|
|
||||||
|
type Program struct {
|
||||||
|
Usage string
|
||||||
|
Fn ConvertFn
|
||||||
|
// Inplace allows the command to take more than one file as argument and
|
||||||
|
// perform conversion in place on each provided file.
|
||||||
|
Inplace bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Program) Execute() {
|
||||||
|
flag.Usage = func() { fmt.Fprintf(os.Stderr, p.Usage) }
|
||||||
|
flag.Parse()
|
||||||
|
os.Exit(p.main(flag.Args(), os.Stdin, os.Stdout, os.Stderr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Program) main(files []string, input io.Reader, output, error io.Writer) int {
|
||||||
|
err := p.run(files, input, output)
|
||||||
|
if err != nil {
|
||||||
|
|
||||||
|
var derr *toml.DecodeError
|
||||||
|
if errors.As(err, &derr) {
|
||||||
|
fmt.Fprintln(error, derr.String())
|
||||||
|
row, col := derr.Position()
|
||||||
|
fmt.Fprintln(error, "error occurred at row", row, "column", col)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintln(error, err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Program) run(files []string, input io.Reader, output io.Writer) error {
|
||||||
|
if len(files) > 0 {
|
||||||
|
if p.Inplace {
|
||||||
|
return p.runAllFilesInPlace(files)
|
||||||
|
}
|
||||||
|
f, err := os.Open(files[0])
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
input = f
|
||||||
|
}
|
||||||
|
return p.Fn(input, output)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Program) runAllFilesInPlace(files []string) error {
|
||||||
|
for _, path := range files {
|
||||||
|
err := p.runFileInPlace(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Program) runFileInPlace(path string) error {
|
||||||
|
in, err := ioutil.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
out := new(bytes.Buffer)
|
||||||
|
|
||||||
|
err = p.Fn(bytes.NewReader(in), out)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ioutil.WriteFile(path, out.Bytes(), 0600)
|
||||||
|
}
|
||||||
@@ -0,0 +1,172 @@
|
|||||||
|
package cli
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func processMain(args []string, input io.Reader, stdout, stderr io.Writer, f ConvertFn) int {
|
||||||
|
p := Program{Fn: f}
|
||||||
|
return p.main(args, input, stdout, stderr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainStdin(t *testing.T) {
|
||||||
|
stdout := new(bytes.Buffer)
|
||||||
|
stderr := new(bytes.Buffer)
|
||||||
|
input := strings.NewReader("this is the input")
|
||||||
|
|
||||||
|
exit := processMain([]string{}, input, stdout, stderr, func(r io.Reader, w io.Writer) error {
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Equal(t, 0, exit)
|
||||||
|
assert.Empty(t, stdout.String())
|
||||||
|
assert.Empty(t, stderr.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainStdinErr(t *testing.T) {
|
||||||
|
stdout := new(bytes.Buffer)
|
||||||
|
stderr := new(bytes.Buffer)
|
||||||
|
input := strings.NewReader("this is the input")
|
||||||
|
|
||||||
|
exit := processMain([]string{}, input, stdout, stderr, func(r io.Reader, w io.Writer) error {
|
||||||
|
return fmt.Errorf("something bad")
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Equal(t, -1, exit)
|
||||||
|
assert.Empty(t, stdout.String())
|
||||||
|
assert.NotEmpty(t, stderr.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainStdinDecodeErr(t *testing.T) {
|
||||||
|
stdout := new(bytes.Buffer)
|
||||||
|
stderr := new(bytes.Buffer)
|
||||||
|
input := strings.NewReader("this is the input")
|
||||||
|
|
||||||
|
exit := processMain([]string{}, input, stdout, stderr, func(r io.Reader, w io.Writer) error {
|
||||||
|
var v interface{}
|
||||||
|
return toml.Unmarshal([]byte(`qwe = 001`), &v)
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Equal(t, -1, exit)
|
||||||
|
assert.Empty(t, stdout.String())
|
||||||
|
assert.Contains(t, stderr.String(), "error occurred at")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainFileExists(t *testing.T) {
|
||||||
|
tmpfile, err := ioutil.TempFile("", "example")
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer os.Remove(tmpfile.Name())
|
||||||
|
_, err = tmpfile.Write([]byte(`some data`))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
stdout := new(bytes.Buffer)
|
||||||
|
stderr := new(bytes.Buffer)
|
||||||
|
|
||||||
|
exit := processMain([]string{tmpfile.Name()}, nil, stdout, stderr, func(r io.Reader, w io.Writer) error {
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Equal(t, 0, exit)
|
||||||
|
assert.Empty(t, stdout.String())
|
||||||
|
assert.Empty(t, stderr.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainFileDoesNotExist(t *testing.T) {
|
||||||
|
stdout := new(bytes.Buffer)
|
||||||
|
stderr := new(bytes.Buffer)
|
||||||
|
|
||||||
|
exit := processMain([]string{"/lets/hope/this/does/not/exist"}, nil, stdout, stderr, func(r io.Reader, w io.Writer) error {
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Equal(t, -1, exit)
|
||||||
|
assert.Empty(t, stdout.String())
|
||||||
|
assert.NotEmpty(t, stderr.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainFilesInPlace(t *testing.T) {
|
||||||
|
dir, err := ioutil.TempDir("", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer os.RemoveAll(dir)
|
||||||
|
|
||||||
|
path1 := path.Join(dir, "file1")
|
||||||
|
path2 := path.Join(dir, "file2")
|
||||||
|
|
||||||
|
err = ioutil.WriteFile(path1, []byte("content 1"), 0600)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = ioutil.WriteFile(path2, []byte("content 2"), 0600)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
p := Program{
|
||||||
|
Fn: dummyFileFn,
|
||||||
|
Inplace: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
exit := p.main([]string{path1, path2}, os.Stdin, os.Stdout, os.Stderr)
|
||||||
|
|
||||||
|
require.Equal(t, 0, exit)
|
||||||
|
|
||||||
|
v1, err := ioutil.ReadFile(path1)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "1", string(v1))
|
||||||
|
|
||||||
|
v2, err := ioutil.ReadFile(path2)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "2", string(v2))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainFilesInPlaceErrRead(t *testing.T) {
|
||||||
|
p := Program{
|
||||||
|
Fn: dummyFileFn,
|
||||||
|
Inplace: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
exit := p.main([]string{"/this/path/is/invalid"}, os.Stdin, os.Stdout, os.Stderr)
|
||||||
|
|
||||||
|
require.Equal(t, -1, exit)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessMainFilesInPlaceFailFn(t *testing.T) {
|
||||||
|
dir, err := ioutil.TempDir("", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer os.RemoveAll(dir)
|
||||||
|
|
||||||
|
path1 := path.Join(dir, "file1")
|
||||||
|
|
||||||
|
err = ioutil.WriteFile(path1, []byte("content 1"), 0600)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
p := Program{
|
||||||
|
Fn: func(io.Reader, io.Writer) error { return fmt.Errorf("oh no") },
|
||||||
|
Inplace: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
exit := p.main([]string{path1}, os.Stdin, os.Stdout, os.Stderr)
|
||||||
|
|
||||||
|
require.Equal(t, -1, exit)
|
||||||
|
|
||||||
|
v1, err := ioutil.ReadFile(path1)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "content 1", string(v1))
|
||||||
|
}
|
||||||
|
|
||||||
|
func dummyFileFn(r io.Reader, w io.Writer) error {
|
||||||
|
b, err := ioutil.ReadAll(r)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
v := strings.SplitN(string(b), " ", 2)[1]
|
||||||
|
_, err = w.Write([]byte(v))
|
||||||
|
return err
|
||||||
|
}
|
||||||
@@ -67,6 +67,7 @@ func TestDocMarshal(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
marshalTestToml := `title = 'TOML Marshal Testing'
|
marshalTestToml := `title = 'TOML Marshal Testing'
|
||||||
|
|
||||||
[basic_lists]
|
[basic_lists]
|
||||||
floats = [12.3, 45.6, 78.9]
|
floats = [12.3, 45.6, 78.9]
|
||||||
bools = [true, false, true]
|
bools = [true, false, true]
|
||||||
@@ -89,7 +90,6 @@ name = 'Second'
|
|||||||
[subdoc.first]
|
[subdoc.first]
|
||||||
name = 'First'
|
name = 'First'
|
||||||
|
|
||||||
|
|
||||||
[basic]
|
[basic]
|
||||||
uint = 5001
|
uint = 5001
|
||||||
bool = true
|
bool = true
|
||||||
@@ -101,9 +101,9 @@ date = 1979-05-27T07:32:00Z
|
|||||||
|
|
||||||
[[subdoclist]]
|
[[subdoclist]]
|
||||||
name = 'List.First'
|
name = 'List.First'
|
||||||
|
|
||||||
[[subdoclist]]
|
[[subdoclist]]
|
||||||
name = 'List.Second'
|
name = 'List.Second'
|
||||||
|
|
||||||
`
|
`
|
||||||
|
|
||||||
result, err := toml.Marshal(docData)
|
result, err := toml.Marshal(docData)
|
||||||
@@ -117,14 +117,15 @@ func TestBasicMarshalQuotedKey(t *testing.T) {
|
|||||||
|
|
||||||
expected := `'Z.string-àéù' = 'Hello'
|
expected := `'Z.string-àéù' = 'Hello'
|
||||||
'Yfloat-𝟘' = 3.5
|
'Yfloat-𝟘' = 3.5
|
||||||
|
|
||||||
['Xsubdoc-àéù']
|
['Xsubdoc-àéù']
|
||||||
String2 = 'One'
|
String2 = 'One'
|
||||||
|
|
||||||
[['W.sublist-𝟘']]
|
[['W.sublist-𝟘']]
|
||||||
String2 = 'Two'
|
String2 = 'Two'
|
||||||
|
|
||||||
[['W.sublist-𝟘']]
|
[['W.sublist-𝟘']]
|
||||||
String2 = 'Three'
|
String2 = 'Three'
|
||||||
|
|
||||||
`
|
`
|
||||||
|
|
||||||
require.Equal(t, string(expected), string(result))
|
require.Equal(t, string(expected), string(result))
|
||||||
@@ -159,8 +160,8 @@ bool = false
|
|||||||
int = 0
|
int = 0
|
||||||
string = ''
|
string = ''
|
||||||
stringlist = []
|
stringlist = []
|
||||||
[map]
|
|
||||||
|
|
||||||
|
[map]
|
||||||
`
|
`
|
||||||
|
|
||||||
require.Equal(t, string(expected), string(result))
|
require.Equal(t, string(expected), string(result))
|
||||||
|
|||||||
@@ -151,6 +151,7 @@ type quotedKeyMarshalTestStruct struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var quotedKeyMarshalTestData = quotedKeyMarshalTestStruct{
|
var quotedKeyMarshalTestData = quotedKeyMarshalTestStruct{
|
||||||
String: "Hello",
|
String: "Hello",
|
||||||
@@ -160,6 +161,7 @@ var quotedKeyMarshalTestData = quotedKeyMarshalTestStruct{
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var quotedKeyMarshalTestToml = []byte(`"Yfloat-𝟘" = 3.5
|
var quotedKeyMarshalTestToml = []byte(`"Yfloat-𝟘" = 3.5
|
||||||
"Z.string-àéù" = "Hello"
|
"Z.string-àéù" = "Hello"
|
||||||
@@ -272,6 +274,7 @@ var docData = testDoc{
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var mapTestDoc = testMapDoc{
|
var mapTestDoc = testMapDoc{
|
||||||
Title: "TOML Marshal Testing",
|
Title: "TOML Marshal Testing",
|
||||||
@@ -457,35 +460,6 @@ func TestEmptytomlUnmarshal(t *testing.T) {
|
|||||||
assert.Equal(t, emptyTestData, result)
|
assert.Equal(t, emptyTestData, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestEmptyUnmarshalOmit(t *testing.T) {
|
|
||||||
t.Skipf("Have not figured yet if omitempty is a good idea")
|
|
||||||
|
|
||||||
type emptyMarshalTestStruct2 struct {
|
|
||||||
Title string `toml:"title"`
|
|
||||||
Bool bool `toml:"bool,omitempty"`
|
|
||||||
Int int `toml:"int, omitempty"`
|
|
||||||
String string `toml:"string,omitempty "`
|
|
||||||
StringList []string `toml:"stringlist,omitempty"`
|
|
||||||
Ptr *basicMarshalTestStruct `toml:"ptr,omitempty"`
|
|
||||||
Map map[string]string `toml:"map,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
emptyTestData2 := emptyMarshalTestStruct2{
|
|
||||||
Title: "Placeholder",
|
|
||||||
Bool: false,
|
|
||||||
Int: 0,
|
|
||||||
String: "",
|
|
||||||
StringList: []string{},
|
|
||||||
Ptr: nil,
|
|
||||||
Map: map[string]string{},
|
|
||||||
}
|
|
||||||
|
|
||||||
result := emptyMarshalTestStruct2{}
|
|
||||||
err := toml.Unmarshal(emptyTestToml, &result)
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, emptyTestData2, result)
|
|
||||||
}
|
|
||||||
|
|
||||||
type pointerMarshalTestStruct struct {
|
type pointerMarshalTestStruct struct {
|
||||||
Str *string
|
Str *string
|
||||||
List *[]string
|
List *[]string
|
||||||
@@ -588,10 +562,12 @@ func (c customMarshaler) MarshalTOML() ([]byte, error) {
|
|||||||
var customMarshalerData = customMarshaler{FirstName: "Sally", LastName: "Fields"}
|
var customMarshalerData = customMarshaler{FirstName: "Sally", LastName: "Fields"}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var customMarshalerToml = []byte(`Sally Fields`)
|
var customMarshalerToml = []byte(`Sally Fields`)
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var nestedCustomMarshalerData = customMarshalerParent{
|
var nestedCustomMarshalerData = customMarshalerParent{
|
||||||
Self: customMarshaler{FirstName: "Maiku", LastName: "Suteda"},
|
Self: customMarshaler{FirstName: "Maiku", LastName: "Suteda"},
|
||||||
@@ -599,6 +575,7 @@ var nestedCustomMarshalerData = customMarshalerParent{
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var nestedCustomMarshalerToml = []byte(`friends = ["Sally Fields"]
|
var nestedCustomMarshalerToml = []byte(`friends = ["Sally Fields"]
|
||||||
me = "Maiku Suteda"
|
me = "Maiku Suteda"
|
||||||
@@ -640,6 +617,7 @@ func TestUnmarshalTextMarshaler(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once type and methods are used by a test
|
// TODO: Remove nolint once type and methods are used by a test
|
||||||
|
//
|
||||||
//nolint:unused
|
//nolint:unused
|
||||||
type precedentMarshaler struct {
|
type precedentMarshaler struct {
|
||||||
FirstName string
|
FirstName string
|
||||||
@@ -658,6 +636,7 @@ func (m precedentMarshaler) MarshalTOML() ([]byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once type and method are used by a test
|
// TODO: Remove nolint once type and method are used by a test
|
||||||
|
//
|
||||||
//nolint:unused
|
//nolint:unused
|
||||||
type customPointerMarshaler struct {
|
type customPointerMarshaler struct {
|
||||||
FirstName string
|
FirstName string
|
||||||
@@ -670,6 +649,7 @@ func (m *customPointerMarshaler) MarshalTOML() ([]byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once type and method are used by a test
|
// TODO: Remove nolint once type and method are used by a test
|
||||||
|
//
|
||||||
//nolint:unused
|
//nolint:unused
|
||||||
type textPointerMarshaler struct {
|
type textPointerMarshaler struct {
|
||||||
FirstName string
|
FirstName string
|
||||||
@@ -682,6 +662,7 @@ func (m *textPointerMarshaler) MarshalText() ([]byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var commentTestToml = []byte(`
|
var commentTestToml = []byte(`
|
||||||
# it's a comment on type
|
# it's a comment on type
|
||||||
@@ -719,6 +700,7 @@ type mapsTestStruct struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var mapsTestData = mapsTestStruct{
|
var mapsTestData = mapsTestStruct{
|
||||||
Simple: map[string]string{
|
Simple: map[string]string{
|
||||||
@@ -742,6 +724,7 @@ var mapsTestData = mapsTestStruct{
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var mapsTestToml = []byte(`
|
var mapsTestToml = []byte(`
|
||||||
[Other]
|
[Other]
|
||||||
@@ -764,6 +747,7 @@ var mapsTestToml = []byte(`
|
|||||||
`)
|
`)
|
||||||
|
|
||||||
// TODO: Remove nolint once type is used by a test
|
// TODO: Remove nolint once type is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused
|
//nolint:deadcode,unused
|
||||||
type structArrayNoTag struct {
|
type structArrayNoTag struct {
|
||||||
A struct {
|
A struct {
|
||||||
@@ -773,6 +757,7 @@ type structArrayNoTag struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var customTagTestToml = []byte(`
|
var customTagTestToml = []byte(`
|
||||||
[postgres]
|
[postgres]
|
||||||
@@ -787,6 +772,7 @@ var customTagTestToml = []byte(`
|
|||||||
`)
|
`)
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var customCommentTagTestToml = []byte(`
|
var customCommentTagTestToml = []byte(`
|
||||||
# db connection
|
# db connection
|
||||||
@@ -800,6 +786,7 @@ var customCommentTagTestToml = []byte(`
|
|||||||
`)
|
`)
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var customCommentedTagTestToml = []byte(`
|
var customCommentedTagTestToml = []byte(`
|
||||||
[postgres]
|
[postgres]
|
||||||
@@ -854,6 +841,7 @@ func TestUnmarshalTabInStringAndQuotedKey(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var customMultilineTagTestToml = []byte(`int_slice = [
|
var customMultilineTagTestToml = []byte(`int_slice = [
|
||||||
1,
|
1,
|
||||||
@@ -863,6 +851,7 @@ var customMultilineTagTestToml = []byte(`int_slice = [
|
|||||||
`)
|
`)
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var testDocBasicToml = []byte(`
|
var testDocBasicToml = []byte(`
|
||||||
[document]
|
[document]
|
||||||
@@ -875,12 +864,14 @@ var testDocBasicToml = []byte(`
|
|||||||
`)
|
`)
|
||||||
|
|
||||||
// TODO: Remove nolint once type is used by a test
|
// TODO: Remove nolint once type is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode
|
//nolint:deadcode
|
||||||
type testDocCustomTag struct {
|
type testDocCustomTag struct {
|
||||||
Doc testDocBasicsCustomTag `file:"document"`
|
Doc testDocBasicsCustomTag `file:"document"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once type is used by a test
|
// TODO: Remove nolint once type is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode
|
//nolint:deadcode
|
||||||
type testDocBasicsCustomTag struct {
|
type testDocBasicsCustomTag struct {
|
||||||
Bool bool `file:"bool_val"`
|
Bool bool `file:"bool_val"`
|
||||||
@@ -893,6 +884,7 @@ type testDocBasicsCustomTag struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,varcheck
|
//nolint:deadcode,varcheck
|
||||||
var testDocCustomTagData = testDocCustomTag{
|
var testDocCustomTagData = testDocCustomTag{
|
||||||
Doc: testDocBasicsCustomTag{
|
Doc: testDocBasicsCustomTag{
|
||||||
@@ -956,6 +948,29 @@ func TestUnmarshalMapWithTypedKey(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestUnmarshalTypeTableHeader(t *testing.T) {
|
||||||
|
testToml := []byte(`
|
||||||
|
[test]
|
||||||
|
a = 1
|
||||||
|
`)
|
||||||
|
|
||||||
|
type header string
|
||||||
|
var result map[header]map[string]int
|
||||||
|
err := toml.Unmarshal(testToml, &result)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Received unexpected error: %s", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
expected := map[header]map[string]int{
|
||||||
|
"test": map[string]int{"a": 1},
|
||||||
|
}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(result, expected) {
|
||||||
|
t.Errorf("Bad unmarshal: expected %v, got %v", expected, result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestUnmarshalNonPointer(t *testing.T) {
|
func TestUnmarshalNonPointer(t *testing.T) {
|
||||||
a := 1
|
a := 1
|
||||||
err := toml.Unmarshal([]byte{}, a)
|
err := toml.Unmarshal([]byte{}, a)
|
||||||
@@ -972,6 +987,7 @@ func TestUnmarshalInvalidPointerKind(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused
|
//nolint:deadcode,unused
|
||||||
type testDuration struct {
|
type testDuration struct {
|
||||||
Nanosec time.Duration `toml:"nanosec"`
|
Nanosec time.Duration `toml:"nanosec"`
|
||||||
@@ -986,6 +1002,7 @@ type testDuration struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var testDurationToml = []byte(`
|
var testDurationToml = []byte(`
|
||||||
nanosec = "1ns"
|
nanosec = "1ns"
|
||||||
@@ -1000,6 +1017,7 @@ a_string = "15s"
|
|||||||
`)
|
`)
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:deadcode,unused,varcheck
|
||||||
var testDurationToml2 = []byte(`a_string = "15s"
|
var testDurationToml2 = []byte(`a_string = "15s"
|
||||||
hour = "1h0m0s"
|
hour = "1h0m0s"
|
||||||
@@ -1013,6 +1031,7 @@ sec = "1s"
|
|||||||
`)
|
`)
|
||||||
|
|
||||||
// TODO: Remove nolint once type is used by a test
|
// TODO: Remove nolint once type is used by a test
|
||||||
|
//
|
||||||
//nolint:deadcode,unused
|
//nolint:deadcode,unused
|
||||||
type testBadDuration struct {
|
type testBadDuration struct {
|
||||||
Val time.Duration `toml:"val"`
|
Val time.Duration `toml:"val"`
|
||||||
@@ -1066,10 +1085,6 @@ func TestUnmarshalCheckConversionFloatInt(t *testing.T) {
|
|||||||
desc: "int",
|
desc: "int",
|
||||||
input: `I = 1e300`,
|
input: `I = 1e300`,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
desc: "float",
|
|
||||||
input: `F = 9223372036854775806`,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range testCases {
|
for _, test := range testCases {
|
||||||
@@ -1954,7 +1969,7 @@ func decoder(doc string) *toml.Decoder {
|
|||||||
|
|
||||||
func strictDecoder(doc string) *toml.Decoder {
|
func strictDecoder(doc string) *toml.Decoder {
|
||||||
d := decoder(doc)
|
d := decoder(doc)
|
||||||
d.SetStrict(true)
|
d.DisallowUnknownFields()
|
||||||
return d
|
return d
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
package tracker
|
package tracker
|
||||||
|
|
||||||
import (
|
import "github.com/pelletier/go-toml/v2/unstable"
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
|
||||||
)
|
|
||||||
|
|
||||||
// KeyTracker is a tracker that keeps track of the current Key as the AST is
|
// KeyTracker is a tracker that keeps track of the current Key as the AST is
|
||||||
// walked.
|
// walked.
|
||||||
@@ -11,19 +9,19 @@ type KeyTracker struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// UpdateTable sets the state of the tracker with the AST table node.
|
// UpdateTable sets the state of the tracker with the AST table node.
|
||||||
func (t *KeyTracker) UpdateTable(node *ast.Node) {
|
func (t *KeyTracker) UpdateTable(node *unstable.Node) {
|
||||||
t.reset()
|
t.reset()
|
||||||
t.Push(node)
|
t.Push(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateArrayTable sets the state of the tracker with the AST array table node.
|
// UpdateArrayTable sets the state of the tracker with the AST array table node.
|
||||||
func (t *KeyTracker) UpdateArrayTable(node *ast.Node) {
|
func (t *KeyTracker) UpdateArrayTable(node *unstable.Node) {
|
||||||
t.reset()
|
t.reset()
|
||||||
t.Push(node)
|
t.Push(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Push the given key on the stack.
|
// Push the given key on the stack.
|
||||||
func (t *KeyTracker) Push(node *ast.Node) {
|
func (t *KeyTracker) Push(node *unstable.Node) {
|
||||||
it := node.Key()
|
it := node.Key()
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
t.k = append(t.k, string(it.Node().Data))
|
t.k = append(t.k, string(it.Node().Data))
|
||||||
@@ -31,7 +29,7 @@ func (t *KeyTracker) Push(node *ast.Node) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Pop key from stack.
|
// Pop key from stack.
|
||||||
func (t *KeyTracker) Pop(node *ast.Node) {
|
func (t *KeyTracker) Pop(node *unstable.Node) {
|
||||||
it := node.Key()
|
it := node.Key()
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
t.k = t.k[:len(t.k)-1]
|
t.k = t.k[:len(t.k)-1]
|
||||||
|
|||||||
+138
-86
@@ -3,8 +3,9 @@ package tracker
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
type keyKind uint8
|
type keyKind uint8
|
||||||
@@ -54,82 +55,125 @@ func (k keyKind) String() string {
|
|||||||
type SeenTracker struct {
|
type SeenTracker struct {
|
||||||
entries []entry
|
entries []entry
|
||||||
currentIdx int
|
currentIdx int
|
||||||
nextID int
|
}
|
||||||
|
|
||||||
|
var pool sync.Pool
|
||||||
|
|
||||||
|
func (s *SeenTracker) reset() {
|
||||||
|
// Always contains a root element at index 0.
|
||||||
|
s.currentIdx = 0
|
||||||
|
if len(s.entries) == 0 {
|
||||||
|
s.entries = make([]entry, 1, 2)
|
||||||
|
} else {
|
||||||
|
s.entries = s.entries[:1]
|
||||||
|
}
|
||||||
|
s.entries[0].child = -1
|
||||||
|
s.entries[0].next = -1
|
||||||
}
|
}
|
||||||
|
|
||||||
type entry struct {
|
type entry struct {
|
||||||
id int
|
// Use -1 to indicate no child or no sibling.
|
||||||
parent int
|
child int
|
||||||
|
next int
|
||||||
|
|
||||||
name []byte
|
name []byte
|
||||||
kind keyKind
|
kind keyKind
|
||||||
explicit bool
|
explicit bool
|
||||||
|
kv bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the index of the child of parentIdx with key k. Returns -1 if
|
||||||
|
// it does not exist.
|
||||||
|
func (s *SeenTracker) find(parentIdx int, k []byte) int {
|
||||||
|
for i := s.entries[parentIdx].child; i >= 0; i = s.entries[i].next {
|
||||||
|
if bytes.Equal(s.entries[i].name, k) {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove all descendants of node at position idx.
|
// Remove all descendants of node at position idx.
|
||||||
func (s *SeenTracker) clear(idx int) {
|
func (s *SeenTracker) clear(idx int) {
|
||||||
p := s.entries[idx].id
|
if idx >= len(s.entries) {
|
||||||
rest := clear(p, s.entries[idx+1:])
|
return
|
||||||
s.entries = s.entries[:idx+1+len(rest)]
|
|
||||||
}
|
|
||||||
|
|
||||||
func clear(parentID int, entries []entry) []entry {
|
|
||||||
for i := 0; i < len(entries); {
|
|
||||||
if entries[i].parent == parentID {
|
|
||||||
id := entries[i].id
|
|
||||||
copy(entries[i:], entries[i+1:])
|
|
||||||
entries = entries[:len(entries)-1]
|
|
||||||
rest := clear(id, entries[i:])
|
|
||||||
entries = entries[:i+len(rest)]
|
|
||||||
} else {
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return entries
|
|
||||||
|
for i := s.entries[idx].child; i >= 0; {
|
||||||
|
next := s.entries[i].next
|
||||||
|
n := s.entries[0].next
|
||||||
|
s.entries[0].next = i
|
||||||
|
s.entries[i].next = n
|
||||||
|
s.entries[i].name = nil
|
||||||
|
s.clear(i)
|
||||||
|
i = next
|
||||||
|
}
|
||||||
|
|
||||||
|
s.entries[idx].child = -1
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) create(parentIdx int, name []byte, kind keyKind, explicit bool) int {
|
func (s *SeenTracker) create(parentIdx int, name []byte, kind keyKind, explicit bool, kv bool) int {
|
||||||
parentID := s.id(parentIdx)
|
e := entry{
|
||||||
|
child: -1,
|
||||||
|
next: s.entries[parentIdx].child,
|
||||||
|
|
||||||
idx := len(s.entries)
|
|
||||||
s.entries = append(s.entries, entry{
|
|
||||||
id: s.nextID,
|
|
||||||
parent: parentID,
|
|
||||||
name: name,
|
name: name,
|
||||||
kind: kind,
|
kind: kind,
|
||||||
explicit: explicit,
|
explicit: explicit,
|
||||||
})
|
kv: kv,
|
||||||
s.nextID++
|
}
|
||||||
|
var idx int
|
||||||
|
if s.entries[0].next >= 0 {
|
||||||
|
idx = s.entries[0].next
|
||||||
|
s.entries[0].next = s.entries[idx].next
|
||||||
|
s.entries[idx] = e
|
||||||
|
} else {
|
||||||
|
idx = len(s.entries)
|
||||||
|
s.entries = append(s.entries, e)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.entries[parentIdx].child = idx
|
||||||
|
|
||||||
return idx
|
return idx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *SeenTracker) setExplicitFlag(parentIdx int) {
|
||||||
|
for i := s.entries[parentIdx].child; i >= 0; i = s.entries[i].next {
|
||||||
|
if s.entries[i].kv {
|
||||||
|
s.entries[i].explicit = true
|
||||||
|
s.entries[i].kv = false
|
||||||
|
}
|
||||||
|
s.setExplicitFlag(i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// CheckExpression takes a top-level node and checks that it does not contain
|
// CheckExpression takes a top-level node and checks that it does not contain
|
||||||
// keys that have been seen in previous calls, and validates that types are
|
// keys that have been seen in previous calls, and validates that types are
|
||||||
// consistent.
|
// consistent.
|
||||||
func (s *SeenTracker) CheckExpression(node *ast.Node) error {
|
func (s *SeenTracker) CheckExpression(node *unstable.Node) error {
|
||||||
if s.entries == nil {
|
if s.entries == nil {
|
||||||
// Skip ID = 0 to remove the confusion between nodes whose
|
s.reset()
|
||||||
// parent has id 0 and root nodes (parent id is 0 because it's
|
|
||||||
// the zero value).
|
|
||||||
s.nextID = 1
|
|
||||||
// Start unscoped, so idx is negative.
|
|
||||||
s.currentIdx = -1
|
|
||||||
}
|
}
|
||||||
switch node.Kind {
|
switch node.Kind {
|
||||||
case ast.KeyValue:
|
case unstable.KeyValue:
|
||||||
return s.checkKeyValue(s.currentIdx, node)
|
return s.checkKeyValue(node)
|
||||||
case ast.Table:
|
case unstable.Table:
|
||||||
return s.checkTable(node)
|
return s.checkTable(node)
|
||||||
case ast.ArrayTable:
|
case unstable.ArrayTable:
|
||||||
return s.checkArrayTable(node)
|
return s.checkArrayTable(node)
|
||||||
default:
|
default:
|
||||||
panic(fmt.Errorf("this should not be a top level node type: %s", node.Kind))
|
panic(fmt.Errorf("this should not be a top level node type: %s", node.Kind))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) checkTable(node *ast.Node) error {
|
func (s *SeenTracker) checkTable(node *unstable.Node) error {
|
||||||
|
if s.currentIdx >= 0 {
|
||||||
|
s.setExplicitFlag(s.currentIdx)
|
||||||
|
}
|
||||||
|
|
||||||
it := node.Key()
|
it := node.Key()
|
||||||
|
|
||||||
parentIdx := -1
|
parentIdx := 0
|
||||||
|
|
||||||
// This code is duplicated in checkArrayTable. This is because factoring
|
// This code is duplicated in checkArrayTable. This is because factoring
|
||||||
// it in a function requires to copy the iterator, or allocate it to the
|
// it in a function requires to copy the iterator, or allocate it to the
|
||||||
@@ -144,7 +188,12 @@ func (s *SeenTracker) checkTable(node *ast.Node) error {
|
|||||||
idx := s.find(parentIdx, k)
|
idx := s.find(parentIdx, k)
|
||||||
|
|
||||||
if idx < 0 {
|
if idx < 0 {
|
||||||
idx = s.create(parentIdx, k, tableKind, false)
|
idx = s.create(parentIdx, k, tableKind, false, false)
|
||||||
|
} else {
|
||||||
|
entry := s.entries[idx]
|
||||||
|
if entry.kind == valueKind {
|
||||||
|
return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
parentIdx = idx
|
parentIdx = idx
|
||||||
}
|
}
|
||||||
@@ -162,7 +211,7 @@ func (s *SeenTracker) checkTable(node *ast.Node) error {
|
|||||||
}
|
}
|
||||||
s.entries[idx].explicit = true
|
s.entries[idx].explicit = true
|
||||||
} else {
|
} else {
|
||||||
idx = s.create(parentIdx, k, tableKind, true)
|
idx = s.create(parentIdx, k, tableKind, true, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
s.currentIdx = idx
|
s.currentIdx = idx
|
||||||
@@ -170,10 +219,14 @@ func (s *SeenTracker) checkTable(node *ast.Node) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) checkArrayTable(node *ast.Node) error {
|
func (s *SeenTracker) checkArrayTable(node *unstable.Node) error {
|
||||||
|
if s.currentIdx >= 0 {
|
||||||
|
s.setExplicitFlag(s.currentIdx)
|
||||||
|
}
|
||||||
|
|
||||||
it := node.Key()
|
it := node.Key()
|
||||||
|
|
||||||
parentIdx := -1
|
parentIdx := 0
|
||||||
|
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
if it.IsLast() {
|
if it.IsLast() {
|
||||||
@@ -185,8 +238,14 @@ func (s *SeenTracker) checkArrayTable(node *ast.Node) error {
|
|||||||
idx := s.find(parentIdx, k)
|
idx := s.find(parentIdx, k)
|
||||||
|
|
||||||
if idx < 0 {
|
if idx < 0 {
|
||||||
idx = s.create(parentIdx, k, tableKind, false)
|
idx = s.create(parentIdx, k, tableKind, false, false)
|
||||||
|
} else {
|
||||||
|
entry := s.entries[idx]
|
||||||
|
if entry.kind == valueKind {
|
||||||
|
return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
parentIdx = idx
|
parentIdx = idx
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -200,7 +259,7 @@ func (s *SeenTracker) checkArrayTable(node *ast.Node) error {
|
|||||||
}
|
}
|
||||||
s.clear(idx)
|
s.clear(idx)
|
||||||
} else {
|
} else {
|
||||||
idx = s.create(parentIdx, k, arrayTableKind, true)
|
idx = s.create(parentIdx, k, arrayTableKind, true, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
s.currentIdx = idx
|
s.currentIdx = idx
|
||||||
@@ -208,7 +267,8 @@ func (s *SeenTracker) checkArrayTable(node *ast.Node) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) checkKeyValue(parentIdx int, node *ast.Node) error {
|
func (s *SeenTracker) checkKeyValue(node *unstable.Node) error {
|
||||||
|
parentIdx := s.currentIdx
|
||||||
it := node.Key()
|
it := node.Key()
|
||||||
|
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
@@ -217,7 +277,7 @@ func (s *SeenTracker) checkKeyValue(parentIdx int, node *ast.Node) error {
|
|||||||
idx := s.find(parentIdx, k)
|
idx := s.find(parentIdx, k)
|
||||||
|
|
||||||
if idx < 0 {
|
if idx < 0 {
|
||||||
idx = s.create(parentIdx, k, tableKind, false)
|
idx = s.create(parentIdx, k, tableKind, false, true)
|
||||||
} else {
|
} else {
|
||||||
entry := s.entries[idx]
|
entry := s.entries[idx]
|
||||||
if it.IsLast() {
|
if it.IsLast() {
|
||||||
@@ -237,68 +297,60 @@ func (s *SeenTracker) checkKeyValue(parentIdx int, node *ast.Node) error {
|
|||||||
value := node.Value()
|
value := node.Value()
|
||||||
|
|
||||||
switch value.Kind {
|
switch value.Kind {
|
||||||
case ast.InlineTable:
|
case unstable.InlineTable:
|
||||||
return s.checkInlineTable(parentIdx, value)
|
return s.checkInlineTable(value)
|
||||||
case ast.Array:
|
case unstable.Array:
|
||||||
return s.checkArray(parentIdx, value)
|
return s.checkArray(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) checkArray(parentIdx int, node *ast.Node) error {
|
func (s *SeenTracker) checkArray(node *unstable.Node) error {
|
||||||
set := false
|
|
||||||
it := node.Children()
|
it := node.Children()
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
if set {
|
|
||||||
s.clear(parentIdx)
|
|
||||||
}
|
|
||||||
n := it.Node()
|
n := it.Node()
|
||||||
switch n.Kind {
|
switch n.Kind {
|
||||||
case ast.InlineTable:
|
case unstable.InlineTable:
|
||||||
err := s.checkInlineTable(parentIdx, n)
|
err := s.checkInlineTable(n)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
set = true
|
case unstable.Array:
|
||||||
case ast.Array:
|
err := s.checkArray(n)
|
||||||
err := s.checkArray(parentIdx, n)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
set = true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) checkInlineTable(parentIdx int, node *ast.Node) error {
|
func (s *SeenTracker) checkInlineTable(node *unstable.Node) error {
|
||||||
|
if pool.New == nil {
|
||||||
|
pool.New = func() interface{} {
|
||||||
|
return &SeenTracker{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
s = pool.Get().(*SeenTracker)
|
||||||
|
s.reset()
|
||||||
|
|
||||||
it := node.Children()
|
it := node.Children()
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
n := it.Node()
|
n := it.Node()
|
||||||
err := s.checkKeyValue(parentIdx, n)
|
err := s.checkKeyValue(n)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// As inline tables are self-contained, the tracker does not
|
||||||
|
// need to retain the details of what they contain. The
|
||||||
|
// keyValue element that creates the inline table is kept to
|
||||||
|
// mark the presence of the inline table and prevent
|
||||||
|
// redefinition of its keys: check* functions cannot walk into
|
||||||
|
// a value.
|
||||||
|
pool.Put(s)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) id(idx int) int {
|
|
||||||
if idx >= 0 {
|
|
||||||
return s.entries[idx].id
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *SeenTracker) find(parentIdx int, k []byte) int {
|
|
||||||
parentID := s.id(parentIdx)
|
|
||||||
|
|
||||||
for i := parentIdx + 1; i < len(s.entries); i++ {
|
|
||||||
if s.entries[i].parent == parentID && bytes.Equal(s.entries[i].name, k) {
|
|
||||||
return i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -0,0 +1,16 @@
|
|||||||
|
package tracker
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestEntrySize(t *testing.T) {
|
||||||
|
// Validate no regression on the size of entry{}. This is a critical bit for
|
||||||
|
// performance of unmarshaling documents. Should only be increased with care
|
||||||
|
// and a very good reason.
|
||||||
|
maxExpectedEntrySize := 48
|
||||||
|
require.LessOrEqual(t, int(unsafe.Sizeof(entry{})), maxExpectedEntrySize)
|
||||||
|
}
|
||||||
+4
-2
@@ -4,6 +4,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
// LocalDate represents a calendar day in no specific timezone.
|
// LocalDate represents a calendar day in no specific timezone.
|
||||||
@@ -75,7 +77,7 @@ func (d LocalTime) MarshalText() ([]byte, error) {
|
|||||||
func (d *LocalTime) UnmarshalText(b []byte) error {
|
func (d *LocalTime) UnmarshalText(b []byte) error {
|
||||||
res, left, err := parseLocalTime(b)
|
res, left, err := parseLocalTime(b)
|
||||||
if err == nil && len(left) != 0 {
|
if err == nil && len(left) != 0 {
|
||||||
err = newDecodeError(left, "extra characters")
|
err = unstable.NewParserError(left, "extra characters")
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -109,7 +111,7 @@ func (d LocalDateTime) MarshalText() ([]byte, error) {
|
|||||||
func (d *LocalDateTime) UnmarshalText(data []byte) error {
|
func (d *LocalDateTime) UnmarshalText(data []byte) error {
|
||||||
res, left, err := parseLocalDateTime(data)
|
res, left, err := parseLocalDateTime(data)
|
||||||
if err == nil && len(left) != 0 {
|
if err == nil && len(left) != 0 {
|
||||||
err = newDecodeError(left, "extra characters")
|
err = unstable.NewParserError(left, "extra characters")
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|||||||
+340
-92
@@ -11,6 +11,9 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
"unicode"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2/internal/characters"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Marshal serializes a Go value as a TOML document.
|
// Marshal serializes a Go value as a TOML document.
|
||||||
@@ -53,7 +56,7 @@ func NewEncoder(w io.Writer) *Encoder {
|
|||||||
// This behavior can be controlled on an individual struct field basis with the
|
// This behavior can be controlled on an individual struct field basis with the
|
||||||
// inline tag:
|
// inline tag:
|
||||||
//
|
//
|
||||||
// MyField `inline:"true"`
|
// MyField `toml:",inline"`
|
||||||
func (enc *Encoder) SetTablesInline(inline bool) *Encoder {
|
func (enc *Encoder) SetTablesInline(inline bool) *Encoder {
|
||||||
enc.tablesInline = inline
|
enc.tablesInline = inline
|
||||||
return enc
|
return enc
|
||||||
@@ -64,7 +67,7 @@ func (enc *Encoder) SetTablesInline(inline bool) *Encoder {
|
|||||||
//
|
//
|
||||||
// This behavior can be controlled on an individual struct field basis with the multiline tag:
|
// This behavior can be controlled on an individual struct field basis with the multiline tag:
|
||||||
//
|
//
|
||||||
// MyField `multiline:"true"`
|
// MyField `multiline:"true"`
|
||||||
func (enc *Encoder) SetArraysMultiline(multiline bool) *Encoder {
|
func (enc *Encoder) SetArraysMultiline(multiline bool) *Encoder {
|
||||||
enc.arraysMultiline = multiline
|
enc.arraysMultiline = multiline
|
||||||
return enc
|
return enc
|
||||||
@@ -88,7 +91,7 @@ func (enc *Encoder) SetIndentTables(indent bool) *Encoder {
|
|||||||
//
|
//
|
||||||
// If v cannot be represented to TOML it returns an error.
|
// If v cannot be represented to TOML it returns an error.
|
||||||
//
|
//
|
||||||
// Encoding rules
|
// # Encoding rules
|
||||||
//
|
//
|
||||||
// A top level slice containing only maps or structs is encoded as [[table
|
// A top level slice containing only maps or structs is encoded as [[table
|
||||||
// array]].
|
// array]].
|
||||||
@@ -103,27 +106,52 @@ func (enc *Encoder) SetIndentTables(indent bool) *Encoder {
|
|||||||
// Intermediate tables are always printed.
|
// Intermediate tables are always printed.
|
||||||
//
|
//
|
||||||
// By default, strings are encoded as literal string, unless they contain either
|
// By default, strings are encoded as literal string, unless they contain either
|
||||||
// a newline character or a single quote. In that case they are emitted as quoted
|
// a newline character or a single quote. In that case they are emitted as
|
||||||
// strings.
|
// quoted strings.
|
||||||
|
//
|
||||||
|
// Unsigned integers larger than math.MaxInt64 cannot be encoded. Doing so
|
||||||
|
// results in an error. This rule exists because the TOML specification only
|
||||||
|
// requires parsers to support at least the 64 bits integer range. Allowing
|
||||||
|
// larger numbers would create non-standard TOML documents, which may not be
|
||||||
|
// readable (at best) by other implementations. To encode such numbers, a
|
||||||
|
// solution is a custom type that implements encoding.TextMarshaler.
|
||||||
//
|
//
|
||||||
// When encoding structs, fields are encoded in order of definition, with their
|
// When encoding structs, fields are encoded in order of definition, with their
|
||||||
// exact name.
|
// exact name.
|
||||||
//
|
//
|
||||||
// Struct tags
|
// Tables and array tables are separated by empty lines. However, consecutive
|
||||||
|
// subtables definitions are not. For example:
|
||||||
//
|
//
|
||||||
// The following struct tags are available to tweak encoding on a per-field
|
// [top1]
|
||||||
// basis:
|
|
||||||
//
|
//
|
||||||
// toml:"foo"
|
// [top2]
|
||||||
// Changes the name of the key to use for the field to foo.
|
// [top2.child1]
|
||||||
//
|
//
|
||||||
// multiline:"true"
|
// [[array]]
|
||||||
// When the field contains a string, it will be emitted as a quoted
|
|
||||||
// multi-line TOML string.
|
|
||||||
//
|
//
|
||||||
// inline:"true"
|
// [[array]]
|
||||||
// When the field would normally be encoded as a table, it is instead
|
// [array.child2]
|
||||||
// encoded as an inline table.
|
//
|
||||||
|
// # Struct tags
|
||||||
|
//
|
||||||
|
// The encoding of each public struct field can be customized by the format
|
||||||
|
// string in the "toml" key of the struct field's tag. This follows
|
||||||
|
// encoding/json's convention. The format string starts with the name of the
|
||||||
|
// field, optionally followed by a comma-separated list of options. The name may
|
||||||
|
// be empty in order to provide options without overriding the default name.
|
||||||
|
//
|
||||||
|
// The "multiline" option emits strings as quoted multi-line TOML strings. It
|
||||||
|
// has no effect on fields that would not be encoded as strings.
|
||||||
|
//
|
||||||
|
// The "inline" option turns fields that would be emitted as tables into inline
|
||||||
|
// tables instead. It has no effect on other fields.
|
||||||
|
//
|
||||||
|
// The "omitempty" option prevents empty values or groups from being emitted.
|
||||||
|
//
|
||||||
|
// In addition to the "toml" tag struct tag, a "comment" tag can be used to emit
|
||||||
|
// a TOML comment before the value being annotated. Comments are ignored inside
|
||||||
|
// inline tables. For array tables, the comment is only present before the first
|
||||||
|
// element of the array.
|
||||||
func (enc *Encoder) Encode(v interface{}) error {
|
func (enc *Encoder) Encode(v interface{}) error {
|
||||||
var (
|
var (
|
||||||
b []byte
|
b []byte
|
||||||
@@ -151,6 +179,8 @@ func (enc *Encoder) Encode(v interface{}) error {
|
|||||||
|
|
||||||
type valueOptions struct {
|
type valueOptions struct {
|
||||||
multiline bool
|
multiline bool
|
||||||
|
omitempty bool
|
||||||
|
comment string
|
||||||
}
|
}
|
||||||
|
|
||||||
type encoderCtx struct {
|
type encoderCtx struct {
|
||||||
@@ -200,16 +230,29 @@ func (ctx *encoderCtx) isRoot() bool {
|
|||||||
return len(ctx.parentKey) == 0 && !ctx.hasKey
|
return len(ctx.parentKey) == 0 && !ctx.hasKey
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop,funlen
|
|
||||||
func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
||||||
if !v.IsZero() {
|
i := v.Interface()
|
||||||
i, ok := v.Interface().(time.Time)
|
|
||||||
if ok {
|
switch x := i.(type) {
|
||||||
return i.AppendFormat(b, time.RFC3339), nil
|
case time.Time:
|
||||||
|
if x.Nanosecond() > 0 {
|
||||||
|
return x.AppendFormat(b, time.RFC3339Nano), nil
|
||||||
}
|
}
|
||||||
|
return x.AppendFormat(b, time.RFC3339), nil
|
||||||
|
case LocalTime:
|
||||||
|
return append(b, x.String()...), nil
|
||||||
|
case LocalDate:
|
||||||
|
return append(b, x.String()...), nil
|
||||||
|
case LocalDateTime:
|
||||||
|
return append(b, x.String()...), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if v.Type().Implements(textMarshalerType) {
|
hasTextMarshaler := v.Type().Implements(textMarshalerType)
|
||||||
|
if hasTextMarshaler || (v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) {
|
||||||
|
if !hasTextMarshaler {
|
||||||
|
v = v.Addr()
|
||||||
|
}
|
||||||
|
|
||||||
if ctx.isRoot() {
|
if ctx.isRoot() {
|
||||||
return nil, fmt.Errorf("toml: type %s implementing the TextMarshaler interface cannot be a root element", v.Type())
|
return nil, fmt.Errorf("toml: type %s implementing the TextMarshaler interface cannot be a root element", v.Type())
|
||||||
}
|
}
|
||||||
@@ -249,16 +292,31 @@ func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, e
|
|||||||
case reflect.String:
|
case reflect.String:
|
||||||
b = enc.encodeString(b, v.String(), ctx.options)
|
b = enc.encodeString(b, v.String(), ctx.options)
|
||||||
case reflect.Float32:
|
case reflect.Float32:
|
||||||
if math.Trunc(v.Float()) == v.Float() {
|
f := v.Float()
|
||||||
b = strconv.AppendFloat(b, v.Float(), 'f', 1, 32)
|
|
||||||
|
if math.IsNaN(f) {
|
||||||
|
b = append(b, "nan"...)
|
||||||
|
} else if f > math.MaxFloat32 {
|
||||||
|
b = append(b, "inf"...)
|
||||||
|
} else if f < -math.MaxFloat32 {
|
||||||
|
b = append(b, "-inf"...)
|
||||||
|
} else if math.Trunc(f) == f {
|
||||||
|
b = strconv.AppendFloat(b, f, 'f', 1, 32)
|
||||||
} else {
|
} else {
|
||||||
b = strconv.AppendFloat(b, v.Float(), 'f', -1, 32)
|
b = strconv.AppendFloat(b, f, 'f', -1, 32)
|
||||||
}
|
}
|
||||||
case reflect.Float64:
|
case reflect.Float64:
|
||||||
if math.Trunc(v.Float()) == v.Float() {
|
f := v.Float()
|
||||||
b = strconv.AppendFloat(b, v.Float(), 'f', 1, 64)
|
if math.IsNaN(f) {
|
||||||
|
b = append(b, "nan"...)
|
||||||
|
} else if f > math.MaxFloat64 {
|
||||||
|
b = append(b, "inf"...)
|
||||||
|
} else if f < -math.MaxFloat64 {
|
||||||
|
b = append(b, "-inf"...)
|
||||||
|
} else if math.Trunc(f) == f {
|
||||||
|
b = strconv.AppendFloat(b, f, 'f', 1, 64)
|
||||||
} else {
|
} else {
|
||||||
b = strconv.AppendFloat(b, v.Float(), 'f', -1, 64)
|
b = strconv.AppendFloat(b, f, 'f', -1, 64)
|
||||||
}
|
}
|
||||||
case reflect.Bool:
|
case reflect.Bool:
|
||||||
if v.Bool() {
|
if v.Bool() {
|
||||||
@@ -267,7 +325,11 @@ func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, e
|
|||||||
b = append(b, "false"...)
|
b = append(b, "false"...)
|
||||||
}
|
}
|
||||||
case reflect.Uint64, reflect.Uint32, reflect.Uint16, reflect.Uint8, reflect.Uint:
|
case reflect.Uint64, reflect.Uint32, reflect.Uint16, reflect.Uint8, reflect.Uint:
|
||||||
b = strconv.AppendUint(b, v.Uint(), 10)
|
x := v.Uint()
|
||||||
|
if x > uint64(math.MaxInt64) {
|
||||||
|
return nil, fmt.Errorf("toml: not encoding uint (%d) greater than max int64 (%d)", x, int64(math.MaxInt64))
|
||||||
|
}
|
||||||
|
b = strconv.AppendUint(b, x, 10)
|
||||||
case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int:
|
case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int:
|
||||||
b = strconv.AppendInt(b, v.Int(), 10)
|
b = strconv.AppendInt(b, v.Int(), 10)
|
||||||
default:
|
default:
|
||||||
@@ -286,19 +348,19 @@ func isNil(v reflect.Value) bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func shouldOmitEmpty(options valueOptions, v reflect.Value) bool {
|
||||||
|
return options.omitempty && isEmptyValue(v)
|
||||||
|
}
|
||||||
|
|
||||||
func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v reflect.Value) ([]byte, error) {
|
func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v reflect.Value) ([]byte, error) {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if !ctx.hasKey {
|
if !ctx.inline {
|
||||||
panic("caller of encodeKv should have set the key in the context")
|
b = enc.encodeComment(ctx.indent, options.comment, b)
|
||||||
}
|
b = enc.indent(ctx.indent, b)
|
||||||
b = enc.indent(ctx.indent, b)
|
|
||||||
|
|
||||||
b, err = enc.encodeKey(b, ctx.key)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
b = enc.encodeKey(b, ctx.key)
|
||||||
b = append(b, " = "...)
|
b = append(b, " = "...)
|
||||||
|
|
||||||
// create a copy of the context because the value of a KV shouldn't
|
// create a copy of the context because the value of a KV shouldn't
|
||||||
@@ -316,6 +378,54 @@ func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v r
|
|||||||
return b, nil
|
return b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isEmptyValue(v reflect.Value) bool {
|
||||||
|
switch v.Kind() {
|
||||||
|
case reflect.Struct:
|
||||||
|
return isEmptyStruct(v)
|
||||||
|
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
|
||||||
|
return v.Len() == 0
|
||||||
|
case reflect.Bool:
|
||||||
|
return !v.Bool()
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
return v.Int() == 0
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||||
|
return v.Uint() == 0
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
return v.Float() == 0
|
||||||
|
case reflect.Interface, reflect.Ptr:
|
||||||
|
return v.IsNil()
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func isEmptyStruct(v reflect.Value) bool {
|
||||||
|
// TODO: merge with walkStruct and cache.
|
||||||
|
typ := v.Type()
|
||||||
|
for i := 0; i < typ.NumField(); i++ {
|
||||||
|
fieldType := typ.Field(i)
|
||||||
|
|
||||||
|
// only consider exported fields
|
||||||
|
if fieldType.PkgPath != "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
tag := fieldType.Tag.Get("toml")
|
||||||
|
|
||||||
|
// special field name to skip field
|
||||||
|
if tag == "-" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
f := v.Field(i)
|
||||||
|
|
||||||
|
if !isEmptyValue(f) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
const literalQuote = '\''
|
const literalQuote = '\''
|
||||||
|
|
||||||
func (enc *Encoder) encodeString(b []byte, v string, options valueOptions) []byte {
|
func (enc *Encoder) encodeString(b []byte, v string, options valueOptions) []byte {
|
||||||
@@ -327,7 +437,13 @@ func (enc *Encoder) encodeString(b []byte, v string, options valueOptions) []byt
|
|||||||
}
|
}
|
||||||
|
|
||||||
func needsQuoting(v string) bool {
|
func needsQuoting(v string) bool {
|
||||||
return strings.ContainsAny(v, "'\b\f\n\r\t")
|
// TODO: vectorize
|
||||||
|
for _, b := range []byte(v) {
|
||||||
|
if b == '\'' || b == '\r' || b == '\n' || characters.InvalidAscii(b) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// caller should have checked that the string does not contain new lines or ' .
|
// caller should have checked that the string does not contain new lines or ' .
|
||||||
@@ -339,7 +455,6 @@ func (enc *Encoder) encodeLiteralString(b []byte, v string) []byte {
|
|||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop
|
|
||||||
func (enc *Encoder) encodeQuotedString(multiline bool, b []byte, v string) []byte {
|
func (enc *Encoder) encodeQuotedString(multiline bool, b []byte, v string) []byte {
|
||||||
stringQuote := `"`
|
stringQuote := `"`
|
||||||
|
|
||||||
@@ -399,7 +514,7 @@ func (enc *Encoder) encodeQuotedString(multiline bool, b []byte, v string) []byt
|
|||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
// called should have checked that the string is in A-Z / a-z / 0-9 / - / _ .
|
// caller should have checked that the string is in A-Z / a-z / 0-9 / - / _ .
|
||||||
func (enc *Encoder) encodeUnquotedKey(b []byte, v string) []byte {
|
func (enc *Encoder) encodeUnquotedKey(b []byte, v string) []byte {
|
||||||
return append(b, v...)
|
return append(b, v...)
|
||||||
}
|
}
|
||||||
@@ -409,24 +524,17 @@ func (enc *Encoder) encodeTableHeader(ctx encoderCtx, b []byte) ([]byte, error)
|
|||||||
return b, nil
|
return b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
b = enc.encodeComment(ctx.indent, ctx.options.comment, b)
|
||||||
|
|
||||||
b = enc.indent(ctx.indent, b)
|
b = enc.indent(ctx.indent, b)
|
||||||
|
|
||||||
b = append(b, '[')
|
b = append(b, '[')
|
||||||
|
|
||||||
var err error
|
b = enc.encodeKey(b, ctx.parentKey[0])
|
||||||
|
|
||||||
b, err = enc.encodeKey(b, ctx.parentKey[0])
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, k := range ctx.parentKey[1:] {
|
for _, k := range ctx.parentKey[1:] {
|
||||||
b = append(b, '.')
|
b = append(b, '.')
|
||||||
|
b = enc.encodeKey(b, k)
|
||||||
b, err = enc.encodeKey(b, k)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
b = append(b, "]\n"...)
|
b = append(b, "]\n"...)
|
||||||
@@ -435,19 +543,19 @@ func (enc *Encoder) encodeTableHeader(ctx encoderCtx, b []byte) ([]byte, error)
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop
|
//nolint:cyclop
|
||||||
func (enc *Encoder) encodeKey(b []byte, k string) ([]byte, error) {
|
func (enc *Encoder) encodeKey(b []byte, k string) []byte {
|
||||||
needsQuotation := false
|
needsQuotation := false
|
||||||
cannotUseLiteral := false
|
cannotUseLiteral := false
|
||||||
|
|
||||||
|
if len(k) == 0 {
|
||||||
|
return append(b, "''"...)
|
||||||
|
}
|
||||||
|
|
||||||
for _, c := range k {
|
for _, c := range k {
|
||||||
if (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '-' || c == '_' {
|
if (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '-' || c == '_' {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if c == '\n' {
|
|
||||||
return nil, fmt.Errorf("toml: new line characters in keys are not supported")
|
|
||||||
}
|
|
||||||
|
|
||||||
if c == literalQuote {
|
if c == literalQuote {
|
||||||
cannotUseLiteral = true
|
cannotUseLiteral = true
|
||||||
}
|
}
|
||||||
@@ -455,21 +563,37 @@ func (enc *Encoder) encodeKey(b []byte, k string) ([]byte, error) {
|
|||||||
needsQuotation = true
|
needsQuotation = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if needsQuotation && needsQuoting(k) {
|
||||||
|
cannotUseLiteral = true
|
||||||
|
}
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case cannotUseLiteral:
|
case cannotUseLiteral:
|
||||||
return enc.encodeQuotedString(false, b, k), nil
|
return enc.encodeQuotedString(false, b, k)
|
||||||
case needsQuotation:
|
case needsQuotation:
|
||||||
return enc.encodeLiteralString(b, k), nil
|
return enc.encodeLiteralString(b, k)
|
||||||
default:
|
default:
|
||||||
return enc.encodeUnquotedKey(b, k), nil
|
return enc.encodeUnquotedKey(b, k)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
func (enc *Encoder) keyToString(k reflect.Value) (string, error) {
|
||||||
if v.Type().Key().Kind() != reflect.String {
|
keyType := k.Type()
|
||||||
return nil, fmt.Errorf("toml: type %s is not supported as a map key", v.Type().Key().Kind())
|
switch {
|
||||||
}
|
case keyType.Kind() == reflect.String:
|
||||||
|
return k.String(), nil
|
||||||
|
|
||||||
|
case keyType.Implements(textMarshalerType):
|
||||||
|
keyB, err := k.Interface().(encoding.TextMarshaler).MarshalText()
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("toml: error marshalling key %v from text: %w", k, err)
|
||||||
|
}
|
||||||
|
return string(keyB), nil
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("toml: type %s is not supported as a map key", keyType.Kind())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
||||||
var (
|
var (
|
||||||
t table
|
t table
|
||||||
emptyValueOptions valueOptions
|
emptyValueOptions valueOptions
|
||||||
@@ -477,13 +601,17 @@ func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte
|
|||||||
|
|
||||||
iter := v.MapRange()
|
iter := v.MapRange()
|
||||||
for iter.Next() {
|
for iter.Next() {
|
||||||
k := iter.Key().String()
|
|
||||||
v := iter.Value()
|
v := iter.Value()
|
||||||
|
|
||||||
if isNil(v) {
|
if isNil(v) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
k, err := enc.keyToString(iter.Key())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
if willConvertToTableOrArrayTable(ctx, v) {
|
if willConvertToTableOrArrayTable(ctx, v) {
|
||||||
t.pushTable(k, v, emptyValueOptions)
|
t.pushTable(k, v, emptyValueOptions)
|
||||||
} else {
|
} else {
|
||||||
@@ -515,18 +643,26 @@ type table struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (t *table) pushKV(k string, v reflect.Value, options valueOptions) {
|
func (t *table) pushKV(k string, v reflect.Value, options valueOptions) {
|
||||||
|
for _, e := range t.kvs {
|
||||||
|
if e.Key == k {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
t.kvs = append(t.kvs, entry{Key: k, Value: v, Options: options})
|
t.kvs = append(t.kvs, entry{Key: k, Value: v, Options: options})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *table) pushTable(k string, v reflect.Value, options valueOptions) {
|
func (t *table) pushTable(k string, v reflect.Value, options valueOptions) {
|
||||||
|
for _, e := range t.tables {
|
||||||
|
if e.Key == k {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
t.tables = append(t.tables, entry{Key: k, Value: v, Options: options})
|
t.tables = append(t.tables, entry{Key: k, Value: v, Options: options})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (enc *Encoder) encodeStruct(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
func walkStruct(ctx encoderCtx, t *table, v reflect.Value) {
|
||||||
var t table
|
// TODO: cache this
|
||||||
|
|
||||||
//nolint:godox
|
|
||||||
// TODO: cache this?
|
|
||||||
typ := v.Type()
|
typ := v.Type()
|
||||||
for i := 0; i < typ.NumField(); i++ {
|
for i := 0; i < typ.NumField(); i++ {
|
||||||
fieldType := typ.Field(i)
|
fieldType := typ.Field(i)
|
||||||
@@ -536,45 +672,130 @@ func (enc *Encoder) encodeStruct(b []byte, ctx encoderCtx, v reflect.Value) ([]b
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
k, ok := fieldType.Tag.Lookup("toml")
|
tag := fieldType.Tag.Get("toml")
|
||||||
if !ok {
|
|
||||||
k = fieldType.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
// special field name to skip field
|
// special field name to skip field
|
||||||
if k == "-" {
|
if tag == "-" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
k, opts := parseTag(tag)
|
||||||
|
if !isValidName(k) {
|
||||||
|
k = ""
|
||||||
|
}
|
||||||
|
|
||||||
f := v.Field(i)
|
f := v.Field(i)
|
||||||
|
|
||||||
|
if k == "" {
|
||||||
|
if fieldType.Anonymous {
|
||||||
|
if fieldType.Type.Kind() == reflect.Struct {
|
||||||
|
walkStruct(ctx, t, f)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
k = fieldType.Name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if isNil(f) {
|
if isNil(f) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
options := valueOptions{
|
options := valueOptions{
|
||||||
multiline: fieldBoolTag(fieldType, "multiline"),
|
multiline: opts.multiline,
|
||||||
|
omitempty: opts.omitempty,
|
||||||
|
comment: fieldType.Tag.Get("comment"),
|
||||||
}
|
}
|
||||||
|
|
||||||
inline := fieldBoolTag(fieldType, "inline")
|
if opts.inline || !willConvertToTableOrArrayTable(ctx, f) {
|
||||||
|
|
||||||
if inline || !willConvertToTableOrArrayTable(ctx, f) {
|
|
||||||
t.pushKV(k, f, options)
|
t.pushKV(k, f, options)
|
||||||
} else {
|
} else {
|
||||||
t.pushTable(k, f, options)
|
t.pushTable(k, f, options)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (enc *Encoder) encodeStruct(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
||||||
|
var t table
|
||||||
|
|
||||||
|
walkStruct(ctx, &t, v)
|
||||||
|
|
||||||
return enc.encodeTable(b, ctx, t)
|
return enc.encodeTable(b, ctx, t)
|
||||||
}
|
}
|
||||||
|
|
||||||
func fieldBoolTag(field reflect.StructField, tag string) bool {
|
func (enc *Encoder) encodeComment(indent int, comment string, b []byte) []byte {
|
||||||
x, ok := field.Tag.Lookup(tag)
|
for len(comment) > 0 {
|
||||||
|
var line string
|
||||||
return ok && x == "true"
|
idx := strings.IndexByte(comment, '\n')
|
||||||
|
if idx >= 0 {
|
||||||
|
line = comment[:idx]
|
||||||
|
comment = comment[idx+1:]
|
||||||
|
} else {
|
||||||
|
line = comment
|
||||||
|
comment = ""
|
||||||
|
}
|
||||||
|
b = enc.indent(indent, b)
|
||||||
|
b = append(b, "# "...)
|
||||||
|
b = append(b, line...)
|
||||||
|
b = append(b, '\n')
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func isValidName(s string) bool {
|
||||||
|
if s == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, c := range s {
|
||||||
|
switch {
|
||||||
|
case strings.ContainsRune("!#$%&()*+-./:;<=>?@[]^_{|}~ ", c):
|
||||||
|
// Backslash and quote chars are reserved, but
|
||||||
|
// otherwise any punctuation chars are allowed
|
||||||
|
// in a tag name.
|
||||||
|
case !unicode.IsLetter(c) && !unicode.IsDigit(c):
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
type tagOptions struct {
|
||||||
|
multiline bool
|
||||||
|
inline bool
|
||||||
|
omitempty bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseTag(tag string) (string, tagOptions) {
|
||||||
|
opts := tagOptions{}
|
||||||
|
|
||||||
|
idx := strings.Index(tag, ",")
|
||||||
|
if idx == -1 {
|
||||||
|
return tag, opts
|
||||||
|
}
|
||||||
|
|
||||||
|
raw := tag[idx+1:]
|
||||||
|
tag = string(tag[:idx])
|
||||||
|
for raw != "" {
|
||||||
|
var o string
|
||||||
|
i := strings.Index(raw, ",")
|
||||||
|
if i >= 0 {
|
||||||
|
o, raw = raw[:i], raw[i+1:]
|
||||||
|
} else {
|
||||||
|
o, raw = raw, ""
|
||||||
|
}
|
||||||
|
switch o {
|
||||||
|
case "multiline":
|
||||||
|
opts.multiline = true
|
||||||
|
case "inline":
|
||||||
|
opts.inline = true
|
||||||
|
case "omitempty":
|
||||||
|
opts.omitempty = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return tag, opts
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop
|
|
||||||
func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, error) {
|
func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, error) {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
@@ -596,7 +817,13 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
|
|||||||
}
|
}
|
||||||
ctx.skipTableHeader = false
|
ctx.skipTableHeader = false
|
||||||
|
|
||||||
|
hasNonEmptyKV := false
|
||||||
for _, kv := range t.kvs {
|
for _, kv := range t.kvs {
|
||||||
|
if shouldOmitEmpty(kv.Options, kv.Value) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
hasNonEmptyKV = true
|
||||||
|
|
||||||
ctx.setKey(kv.Key)
|
ctx.setKey(kv.Key)
|
||||||
|
|
||||||
b, err = enc.encodeKv(b, ctx, kv.Options, kv.Value)
|
b, err = enc.encodeKv(b, ctx, kv.Options, kv.Value)
|
||||||
@@ -607,7 +834,20 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
|
|||||||
b = append(b, '\n')
|
b = append(b, '\n')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
first := true
|
||||||
for _, table := range t.tables {
|
for _, table := range t.tables {
|
||||||
|
if shouldOmitEmpty(table.Options, table.Value) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if first {
|
||||||
|
first = false
|
||||||
|
if hasNonEmptyKV {
|
||||||
|
b = append(b, '\n')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
b = append(b, "\n"...)
|
||||||
|
}
|
||||||
|
|
||||||
ctx.setKey(table.Key)
|
ctx.setKey(table.Key)
|
||||||
|
|
||||||
ctx.options = table.Options
|
ctx.options = table.Options
|
||||||
@@ -616,8 +856,6 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
b = append(b, '\n')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return b, nil
|
return b, nil
|
||||||
@@ -630,6 +868,10 @@ func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte
|
|||||||
|
|
||||||
first := true
|
first := true
|
||||||
for _, kv := range t.kvs {
|
for _, kv := range t.kvs {
|
||||||
|
if shouldOmitEmpty(kv.Options, kv.Value) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
if first {
|
if first {
|
||||||
first = false
|
first = false
|
||||||
} else {
|
} else {
|
||||||
@@ -645,7 +887,7 @@ func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(t.tables) > 0 {
|
if len(t.tables) > 0 {
|
||||||
panic("inline table cannot contain nested tables, online key-values")
|
panic("inline table cannot contain nested tables, only key-values")
|
||||||
}
|
}
|
||||||
|
|
||||||
b = append(b, "}"...)
|
b = append(b, "}"...)
|
||||||
@@ -657,7 +899,7 @@ func willConvertToTable(ctx encoderCtx, v reflect.Value) bool {
|
|||||||
if !v.IsValid() {
|
if !v.IsValid() {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if v.Type() == timeType || v.Type().Implements(textMarshalerType) {
|
if v.Type() == timeType || v.Type().Implements(textMarshalerType) || (v.Kind() != reflect.Ptr && v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -679,6 +921,9 @@ func willConvertToTable(ctx encoderCtx, v reflect.Value) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func willConvertToTableOrArrayTable(ctx encoderCtx, v reflect.Value) bool {
|
func willConvertToTableOrArrayTable(ctx encoderCtx, v reflect.Value) bool {
|
||||||
|
if ctx.insideKv {
|
||||||
|
return false
|
||||||
|
}
|
||||||
t := v.Type()
|
t := v.Type()
|
||||||
|
|
||||||
if t.Kind() == reflect.Interface {
|
if t.Kind() == reflect.Interface {
|
||||||
@@ -724,7 +969,6 @@ func (enc *Encoder) encodeSlice(b []byte, ctx encoderCtx, v reflect.Value) ([]by
|
|||||||
func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
||||||
ctx.shiftKey()
|
ctx.shiftKey()
|
||||||
|
|
||||||
var err error
|
|
||||||
scratch := make([]byte, 0, 64)
|
scratch := make([]byte, 0, 64)
|
||||||
scratch = append(scratch, "[["...)
|
scratch = append(scratch, "[["...)
|
||||||
|
|
||||||
@@ -733,18 +977,22 @@ func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.
|
|||||||
scratch = append(scratch, '.')
|
scratch = append(scratch, '.')
|
||||||
}
|
}
|
||||||
|
|
||||||
scratch, err = enc.encodeKey(scratch, k)
|
scratch = enc.encodeKey(scratch, k)
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
scratch = append(scratch, "]]\n"...)
|
scratch = append(scratch, "]]\n"...)
|
||||||
ctx.skipTableHeader = true
|
ctx.skipTableHeader = true
|
||||||
|
|
||||||
|
b = enc.encodeComment(ctx.indent, ctx.options.comment, b)
|
||||||
|
|
||||||
for i := 0; i < v.Len(); i++ {
|
for i := 0; i < v.Len(); i++ {
|
||||||
|
if i != 0 {
|
||||||
|
b = append(b, "\n"...)
|
||||||
|
}
|
||||||
|
|
||||||
b = append(b, scratch...)
|
b = append(b, scratch...)
|
||||||
|
|
||||||
|
var err error
|
||||||
b, err = enc.encode(b, ctx, v.Index(i))
|
b, err = enc.encode(b, ctx, v.Index(i))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|||||||
+563
-106
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,45 @@
|
|||||||
|
//go:build go1.18 || go1.19 || go1.20
|
||||||
|
// +build go1.18 go1.19 go1.20
|
||||||
|
|
||||||
|
package ossfuzz
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
func FuzzToml(data []byte) int {
|
||||||
|
if len(data) >= 2048 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(string(data), "nan") {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
var v interface{}
|
||||||
|
err := toml.Unmarshal(data, &v)
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
encoded, err := toml.Marshal(v)
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("failed to marshal unmarshaled document: %s", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
var v2 interface{}
|
||||||
|
err = toml.Unmarshal(encoded, &v2)
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("failed round trip: %s", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(v, v2) {
|
||||||
|
panic(fmt.Sprintf("not equal: %#+v %#+v", v, v2))
|
||||||
|
}
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
-450
@@ -1,450 +0,0 @@
|
|||||||
package toml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
//nolint:funlen
|
|
||||||
func TestParser_AST_Numbers(t *testing.T) {
|
|
||||||
examples := []struct {
|
|
||||||
desc string
|
|
||||||
input string
|
|
||||||
kind ast.Kind
|
|
||||||
err bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "integer just digits",
|
|
||||||
input: `1234`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer zero",
|
|
||||||
input: `0`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer sign",
|
|
||||||
input: `+99`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer hex uppercase",
|
|
||||||
input: `0xDEADBEEF`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer hex lowercase",
|
|
||||||
input: `0xdead_beef`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer octal",
|
|
||||||
input: `0o01234567`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer binary",
|
|
||||||
input: `0b11010110`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float zero",
|
|
||||||
input: `0.0`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float positive zero",
|
|
||||||
input: `+0.0`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float negative zero",
|
|
||||||
input: `-0.0`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float pi",
|
|
||||||
input: `3.1415`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float negative",
|
|
||||||
input: `-0.01`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float signed exponent",
|
|
||||||
input: `5e+22`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float exponent lowercase",
|
|
||||||
input: `1e06`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float exponent uppercase",
|
|
||||||
input: `-2E-2`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float fractional with exponent",
|
|
||||||
input: `6.626e-34`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float underscores",
|
|
||||||
input: `224_617.445_991_228`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "inf",
|
|
||||||
input: `inf`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "inf negative",
|
|
||||||
input: `-inf`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "inf positive",
|
|
||||||
input: `+inf`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "nan",
|
|
||||||
input: `nan`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "nan negative",
|
|
||||||
input: `-nan`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "nan positive",
|
|
||||||
input: `+nan`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, e := range examples {
|
|
||||||
e := e
|
|
||||||
t.Run(e.desc, func(t *testing.T) {
|
|
||||||
p := parser{}
|
|
||||||
p.Reset([]byte(`A = ` + e.input))
|
|
||||||
p.NextExpression()
|
|
||||||
err := p.Error()
|
|
||||||
if e.err {
|
|
||||||
require.Error(t, err)
|
|
||||||
} else {
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
expected := astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{Kind: e.kind, Data: []byte(e.input)},
|
|
||||||
{Kind: ast.Key, Data: []byte(`A`)},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
compareNode(t, expected, p.Expression())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type (
|
|
||||||
astNode struct {
|
|
||||||
Kind ast.Kind
|
|
||||||
Data []byte
|
|
||||||
Children []astNode
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
func compareNode(t *testing.T, e astNode, n *ast.Node) {
|
|
||||||
t.Helper()
|
|
||||||
require.Equal(t, e.Kind, n.Kind)
|
|
||||||
require.Equal(t, e.Data, n.Data)
|
|
||||||
|
|
||||||
compareIterator(t, e.Children, n.Children())
|
|
||||||
}
|
|
||||||
|
|
||||||
func compareIterator(t *testing.T, expected []astNode, actual ast.Iterator) {
|
|
||||||
t.Helper()
|
|
||||||
idx := 0
|
|
||||||
|
|
||||||
for actual.Next() {
|
|
||||||
n := actual.Node()
|
|
||||||
|
|
||||||
if idx >= len(expected) {
|
|
||||||
t.Fatal("extra child in actual tree")
|
|
||||||
}
|
|
||||||
e := expected[idx]
|
|
||||||
|
|
||||||
compareNode(t, e, n)
|
|
||||||
|
|
||||||
idx++
|
|
||||||
}
|
|
||||||
|
|
||||||
if idx < len(expected) {
|
|
||||||
t.Fatal("missing children in actual", "idx =", idx, "expected =", len(expected))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//nolint:funlen
|
|
||||||
func TestParser_AST(t *testing.T) {
|
|
||||||
examples := []struct {
|
|
||||||
desc string
|
|
||||||
input string
|
|
||||||
ast astNode
|
|
||||||
err bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "simple string assignment",
|
|
||||||
input: `A = "hello"`,
|
|
||||||
ast: astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`hello`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Key,
|
|
||||||
Data: []byte(`A`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "simple bool assignment",
|
|
||||||
input: `A = true`,
|
|
||||||
ast: astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.Bool,
|
|
||||||
Data: []byte(`true`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Key,
|
|
||||||
Data: []byte(`A`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "array of strings",
|
|
||||||
input: `A = ["hello", ["world", "again"]]`,
|
|
||||||
ast: astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.Array,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`hello`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Array,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`world`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`again`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Key,
|
|
||||||
Data: []byte(`A`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "array of arrays of strings",
|
|
||||||
input: `A = ["hello", "world"]`,
|
|
||||||
ast: astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.Array,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`hello`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`world`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Key,
|
|
||||||
Data: []byte(`A`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "inline table",
|
|
||||||
input: `name = { first = "Tom", last = "Preston-Werner" }`,
|
|
||||||
ast: astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.InlineTable,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{Kind: ast.String, Data: []byte(`Tom`)},
|
|
||||||
{Kind: ast.Key, Data: []byte(`first`)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{Kind: ast.String, Data: []byte(`Preston-Werner`)},
|
|
||||||
{Kind: ast.Key, Data: []byte(`last`)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Key,
|
|
||||||
Data: []byte(`name`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, e := range examples {
|
|
||||||
e := e
|
|
||||||
t.Run(e.desc, func(t *testing.T) {
|
|
||||||
p := parser{}
|
|
||||||
p.Reset([]byte(e.input))
|
|
||||||
p.NextExpression()
|
|
||||||
err := p.Error()
|
|
||||||
if e.err {
|
|
||||||
require.Error(t, err)
|
|
||||||
} else {
|
|
||||||
require.NoError(t, err)
|
|
||||||
compareNode(t, e.ast, p.Expression())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkParseBasicStringWithUnicode(b *testing.B) {
|
|
||||||
p := &parser{}
|
|
||||||
b.Run("4", func(b *testing.B) {
|
|
||||||
input := []byte(`"\u1234\u5678\u9ABC\u1234\u5678\u9ABC"`)
|
|
||||||
b.ReportAllocs()
|
|
||||||
b.SetBytes(int64(len(input)))
|
|
||||||
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
p.parseBasicString(input)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
b.Run("8", func(b *testing.B) {
|
|
||||||
input := []byte(`"\u12345678\u9ABCDEF0\u12345678\u9ABCDEF0"`)
|
|
||||||
b.ReportAllocs()
|
|
||||||
b.SetBytes(int64(len(input)))
|
|
||||||
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
p.parseBasicString(input)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkParseBasicStringsEasy(b *testing.B) {
|
|
||||||
p := &parser{}
|
|
||||||
|
|
||||||
for _, size := range []int{1, 4, 8, 16, 21} {
|
|
||||||
b.Run(strconv.Itoa(size), func(b *testing.B) {
|
|
||||||
input := []byte(`"` + strings.Repeat("A", size) + `"`)
|
|
||||||
|
|
||||||
b.ReportAllocs()
|
|
||||||
b.SetBytes(int64(len(input)))
|
|
||||||
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
p.parseBasicString(input)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestParser_AST_DateTimes(t *testing.T) {
|
|
||||||
examples := []struct {
|
|
||||||
desc string
|
|
||||||
input string
|
|
||||||
kind ast.Kind
|
|
||||||
err bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "offset-date-time with delim 'T' and UTC offset",
|
|
||||||
input: `2021-07-21T12:08:05Z`,
|
|
||||||
kind: ast.DateTime,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "offset-date-time with space delim and +8hours offset",
|
|
||||||
input: `2021-07-21 12:08:05+08:00`,
|
|
||||||
kind: ast.DateTime,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "local-date-time with nano second",
|
|
||||||
input: `2021-07-21T12:08:05.666666666`,
|
|
||||||
kind: ast.LocalDateTime,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "local-date-time",
|
|
||||||
input: `2021-07-21T12:08:05`,
|
|
||||||
kind: ast.LocalDateTime,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "local-date",
|
|
||||||
input: `2021-07-21`,
|
|
||||||
kind: ast.LocalDate,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, e := range examples {
|
|
||||||
e := e
|
|
||||||
t.Run(e.desc, func(t *testing.T) {
|
|
||||||
p := parser{}
|
|
||||||
p.Reset([]byte(`A = ` + e.input))
|
|
||||||
p.NextExpression()
|
|
||||||
err := p.Error()
|
|
||||||
if e.err {
|
|
||||||
require.Error(t, err)
|
|
||||||
} else {
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
expected := astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{Kind: e.kind, Data: []byte(e.input)},
|
|
||||||
{Kind: ast.Key, Data: []byte(`A`)},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
compareNode(t, expected, p.Expression())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
package toml
|
package toml
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
"github.com/pelletier/go-toml/v2/internal/danger"
|
||||||
"github.com/pelletier/go-toml/v2/internal/tracker"
|
"github.com/pelletier/go-toml/v2/internal/tracker"
|
||||||
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
type strict struct {
|
type strict struct {
|
||||||
@@ -12,10 +12,10 @@ type strict struct {
|
|||||||
// Tracks the current key being processed.
|
// Tracks the current key being processed.
|
||||||
key tracker.KeyTracker
|
key tracker.KeyTracker
|
||||||
|
|
||||||
missing []decodeError
|
missing []unstable.ParserError
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) EnterTable(node *ast.Node) {
|
func (s *strict) EnterTable(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -23,7 +23,7 @@ func (s *strict) EnterTable(node *ast.Node) {
|
|||||||
s.key.UpdateTable(node)
|
s.key.UpdateTable(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) EnterArrayTable(node *ast.Node) {
|
func (s *strict) EnterArrayTable(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -31,7 +31,7 @@ func (s *strict) EnterArrayTable(node *ast.Node) {
|
|||||||
s.key.UpdateArrayTable(node)
|
s.key.UpdateArrayTable(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) EnterKeyValue(node *ast.Node) {
|
func (s *strict) EnterKeyValue(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -39,7 +39,7 @@ func (s *strict) EnterKeyValue(node *ast.Node) {
|
|||||||
s.key.Push(node)
|
s.key.Push(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) ExitKeyValue(node *ast.Node) {
|
func (s *strict) ExitKeyValue(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -47,27 +47,27 @@ func (s *strict) ExitKeyValue(node *ast.Node) {
|
|||||||
s.key.Pop(node)
|
s.key.Pop(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) MissingTable(node *ast.Node) {
|
func (s *strict) MissingTable(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
s.missing = append(s.missing, decodeError{
|
s.missing = append(s.missing, unstable.ParserError{
|
||||||
highlight: keyLocation(node),
|
Highlight: keyLocation(node),
|
||||||
message: "missing table",
|
Message: "missing table",
|
||||||
key: s.key.Key(),
|
Key: s.key.Key(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) MissingField(node *ast.Node) {
|
func (s *strict) MissingField(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
s.missing = append(s.missing, decodeError{
|
s.missing = append(s.missing, unstable.ParserError{
|
||||||
highlight: keyLocation(node),
|
Highlight: keyLocation(node),
|
||||||
message: "missing field",
|
Message: "missing field",
|
||||||
key: s.key.Key(),
|
Key: s.key.Key(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,7 +88,7 @@ func (s *strict) Error(doc []byte) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func keyLocation(node *ast.Node) []byte {
|
func keyLocation(node *unstable.Node) []byte {
|
||||||
k := node.Key()
|
k := node.Key()
|
||||||
|
|
||||||
hasOne := k.Next()
|
hasOne := k.Next()
|
||||||
|
|||||||
Vendored
+2
@@ -0,0 +1,2 @@
|
|||||||
|
go test fuzz v1
|
||||||
|
[]byte("0=0000-01-01 00:00:00")
|
||||||
Vendored
+2
@@ -0,0 +1,2 @@
|
|||||||
|
go test fuzz v1
|
||||||
|
[]byte("\"\\n\"=\"\"")
|
||||||
Vendored
+2
@@ -0,0 +1,2 @@
|
|||||||
|
go test fuzz v1
|
||||||
|
[]byte("''=0")
|
||||||
Vendored
+2
@@ -0,0 +1,2 @@
|
|||||||
|
go test fuzz v1
|
||||||
|
[]byte("0=0000-01-01")
|
||||||
Vendored
+2
@@ -0,0 +1,2 @@
|
|||||||
|
go test fuzz v1
|
||||||
|
[]byte("0=\"\"\"\\U00000000\"\"\"")
|
||||||
Vendored
+2
@@ -0,0 +1,2 @@
|
|||||||
|
go test fuzz v1
|
||||||
|
[]byte("0=[[{}]]")
|
||||||
Vendored
+2
@@ -0,0 +1,2 @@
|
|||||||
|
go test fuzz v1
|
||||||
|
[]byte("\"\\b\"=\"\"")
|
||||||
Vendored
+2
@@ -0,0 +1,2 @@
|
|||||||
|
go test fuzz v1
|
||||||
|
[]byte("0=inf")
|
||||||
Vendored
+2
@@ -0,0 +1,2 @@
|
|||||||
|
go test fuzz v1
|
||||||
|
[]byte("0=0000-01-01 00:00:00+00:00")
|
||||||
Vendored
+2
@@ -0,0 +1,2 @@
|
|||||||
|
go test fuzz v1
|
||||||
|
[]byte("0=[{}]")
|
||||||
Vendored
+2
@@ -0,0 +1,2 @@
|
|||||||
|
go test fuzz v1
|
||||||
|
[]byte("0=nan")
|
||||||
@@ -8,7 +8,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
"github.com/pelletier/go-toml/v2/testsuite"
|
"github.com/pelletier/go-toml/v2/internal/testsuite"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
+215
-32
@@ -1,4 +1,4 @@
|
|||||||
// Generated by tomltestgen for toml-test ref master on 2021-11-08T22:33:24-05:00
|
// Generated by tomltestgen for toml-test ref master on 2022-04-07T20:09:42-04:00
|
||||||
package toml_test
|
package toml_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@@ -55,11 +55,51 @@ func TestTOMLTest_Invalid_Array_TextInArray(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Bool_AlmostFalseWithExtra(t *testing.T) {
|
||||||
|
input := "a = falsify\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Bool_AlmostFalse(t *testing.T) {
|
||||||
|
input := "a = fals\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Bool_AlmostTrueWithExtra(t *testing.T) {
|
||||||
|
input := "a = truthy\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Bool_AlmostTrue(t *testing.T) {
|
||||||
|
input := "a = tru\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Bool_JustF(t *testing.T) {
|
||||||
|
input := "a = f\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Bool_JustT(t *testing.T) {
|
||||||
|
input := "a = t\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Bool_MixedCase(t *testing.T) {
|
func TestTOMLTest_Invalid_Bool_MixedCase(t *testing.T) {
|
||||||
input := "valid = False\n"
|
input := "valid = False\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Bool_StartingSameFalse(t *testing.T) {
|
||||||
|
input := "a = falsey\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Bool_StartingSameTrue(t *testing.T) {
|
||||||
|
input := "a = truer\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Bool_WrongCaseFalse(t *testing.T) {
|
func TestTOMLTest_Invalid_Bool_WrongCaseFalse(t *testing.T) {
|
||||||
input := "b = FALSE\n"
|
input := "b = FALSE\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -70,6 +110,31 @@ func TestTOMLTest_Invalid_Bool_WrongCaseTrue(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Control_BareCr(t *testing.T) {
|
||||||
|
input := "# The following line contains a single carriage return control character\r\n\r"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Control_BareFormfeed(t *testing.T) {
|
||||||
|
input := "bare-formfeed = \f\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Control_BareNull(t *testing.T) {
|
||||||
|
input := "bare-null = \"some value\" \x00\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Control_BareVerticalTab(t *testing.T) {
|
||||||
|
input := "bare-vertical-tab = \v\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Control_CommentCr(t *testing.T) {
|
||||||
|
input := "comment-cr = \"Carriage return in comment\" # \ra=1\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Control_CommentDel(t *testing.T) {
|
func TestTOMLTest_Invalid_Control_CommentDel(t *testing.T) {
|
||||||
input := "comment-del = \"0x7f\" # \u007f\n"
|
input := "comment-del = \"0x7f\" # \u007f\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -175,33 +240,73 @@ func TestTOMLTest_Invalid_Control_StringUs(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Datetime_ImpossibleDate(t *testing.T) {
|
func TestTOMLTest_Invalid_Datetime_HourOver(t *testing.T) {
|
||||||
input := "d = 2006-01-50T00:00:00Z\n"
|
input := "# time-hour = 2DIGIT ; 00-23\nd = 2006-01-01T24:00:00-00:00\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Datetime_MdayOver(t *testing.T) {
|
||||||
|
input := "# date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on\n# ; month/year\nd = 2006-01-32T00:00:00-00:00\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Datetime_MdayUnder(t *testing.T) {
|
||||||
|
input := "# date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on\n# ; month/year\nd = 2006-01-00T00:00:00-00:00\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Datetime_MinuteOver(t *testing.T) {
|
||||||
|
input := "# time-minute = 2DIGIT ; 00-59\nd = 2006-01-01T00:60:00-00:00\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Datetime_MonthOver(t *testing.T) {
|
||||||
|
input := "# date-month = 2DIGIT ; 01-12\nd = 2006-13-01T00:00:00-00:00\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Datetime_MonthUnder(t *testing.T) {
|
||||||
|
input := "# date-month = 2DIGIT ; 01-12\nd = 2007-00-01T00:00:00-00:00\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Datetime_NoLeadsWithMilli(t *testing.T) {
|
func TestTOMLTest_Invalid_Datetime_NoLeadsWithMilli(t *testing.T) {
|
||||||
input := "with-milli = 1987-07-5T17:45:00.12Z\n"
|
input := "# Day \"5\" instead of \"05\"; the leading zero is required.\nwith-milli = 1987-07-5T17:45:00.12Z\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Datetime_NoLeads(t *testing.T) {
|
func TestTOMLTest_Invalid_Datetime_NoLeads(t *testing.T) {
|
||||||
input := "no-leads = 1987-7-05T17:45:00Z\n"
|
input := "# Month \"7\" instead of \"07\"; the leading zero is required.\nno-leads = 1987-7-05T17:45:00Z\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Datetime_NoSecs(t *testing.T) {
|
func TestTOMLTest_Invalid_Datetime_NoSecs(t *testing.T) {
|
||||||
input := "no-secs = 1987-07-05T17:45Z\n"
|
input := "# No seconds in time.\nno-secs = 1987-07-05T17:45Z\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Datetime_NoT(t *testing.T) {
|
func TestTOMLTest_Invalid_Datetime_NoT(t *testing.T) {
|
||||||
input := "no-t = 1987-07-0517:45:00Z\n"
|
input := "# No \"t\" or \"T\" between the date and time.\nno-t = 1987-07-0517:45:00Z\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Datetime_SecondOver(t *testing.T) {
|
||||||
|
input := "# time-second = 2DIGIT ; 00-58, 00-59, 00-60 based on leap second\n# ; rules\nd = 2006-01-01T00:00:61-00:00\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Datetime_TimeNoLeads2(t *testing.T) {
|
||||||
|
input := "# Leading 0 is always required.\nd = 01:32:0\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Datetime_TimeNoLeads(t *testing.T) {
|
||||||
|
input := "# Leading 0 is always required.\nd = 1:32:00\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Datetime_TrailingT(t *testing.T) {
|
func TestTOMLTest_Invalid_Datetime_TrailingT(t *testing.T) {
|
||||||
input := "d = 2006-01-30T\n"
|
input := "# Date cannot end with trailing T\nd = 2006-01-30T\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -395,6 +500,11 @@ func TestTOMLTest_Invalid_Float_UsBeforePoint(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_InlineTable_Add(t *testing.T) {
|
||||||
|
input := "a={}\n# Inline tables are immutable and can't be extended\n[a.b]\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_InlineTable_DoubleComma(t *testing.T) {
|
func TestTOMLTest_Invalid_InlineTable_DoubleComma(t *testing.T) {
|
||||||
input := "t = {x=3,,y=4}\n"
|
input := "t = {x=3,,y=4}\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -435,6 +545,11 @@ func TestTOMLTest_Invalid_InlineTable_NoComma(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_InlineTable_Overwrite(t *testing.T) {
|
||||||
|
input := "a.b=0\n# Since table \"a\" is already defined, it can't be replaced by an inline table.\na={}\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_InlineTable_TrailingComma(t *testing.T) {
|
func TestTOMLTest_Invalid_InlineTable_TrailingComma(t *testing.T) {
|
||||||
input := "# A terminating comma (also called trailing comma) is not permitted after the\n# last key/value pair in an inline table\nabc = { abc = 123, }\n"
|
input := "# A terminating comma (also called trailing comma) is not permitted after the\n# last key/value pair in an inline table\nabc = { abc = 123, }\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -470,6 +585,21 @@ func TestTOMLTest_Invalid_Integer_DoubleUs(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Integer_IncompleteBin(t *testing.T) {
|
||||||
|
input := "incomplete-bin = 0b\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Integer_IncompleteHex(t *testing.T) {
|
||||||
|
input := "incomplete-hex = 0x\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Integer_IncompleteOct(t *testing.T) {
|
||||||
|
input := "incomplete-oct = 0o\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Integer_InvalidBin(t *testing.T) {
|
func TestTOMLTest_Invalid_Integer_InvalidBin(t *testing.T) {
|
||||||
input := "invalid-bin = 0b0012\n"
|
input := "invalid-bin = 0b0012\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -515,6 +645,11 @@ func TestTOMLTest_Invalid_Integer_LeadingZero2(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Integer_LeadingZero3(t *testing.T) {
|
||||||
|
input := "leading-zero-3 = 0_0\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Integer_LeadingZeroSign1(t *testing.T) {
|
func TestTOMLTest_Invalid_Integer_LeadingZeroSign1(t *testing.T) {
|
||||||
input := "leading-zero-sign-1 = -01\n"
|
input := "leading-zero-sign-1 = -01\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -525,6 +660,11 @@ func TestTOMLTest_Invalid_Integer_LeadingZeroSign2(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Integer_LeadingZeroSign3(t *testing.T) {
|
||||||
|
input := "leading-zero-sign-3 = +0_1\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Integer_NegativeBin(t *testing.T) {
|
func TestTOMLTest_Invalid_Integer_NegativeBin(t *testing.T) {
|
||||||
input := "negative-bin = -0b11010110\n"
|
input := "negative-bin = -0b11010110\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -730,11 +870,16 @@ func TestTOMLTest_Invalid_String_BadConcat(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_String_BadEscape(t *testing.T) {
|
func TestTOMLTest_Invalid_String_BadEscape1(t *testing.T) {
|
||||||
input := "invalid-escape = \"This string has a bad \\a escape character.\"\n"
|
input := "invalid-escape = \"This string has a bad \\a escape character.\"\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_String_BadEscape2(t *testing.T) {
|
||||||
|
input := "invalid-escape = \"This string has a bad \\ escape character.\"\n\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_String_BadMultiline(t *testing.T) {
|
func TestTOMLTest_Invalid_String_BadMultiline(t *testing.T) {
|
||||||
input := "multi = \"first line\nsecond line\"\n"
|
input := "multi = \"first line\nsecond line\"\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -805,6 +950,21 @@ func TestTOMLTest_Invalid_String_MissingQuotes(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_String_MultilineBadEscape1(t *testing.T) {
|
||||||
|
input := "k = \"\"\"t\\a\"\"\"\n\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_String_MultilineBadEscape2(t *testing.T) {
|
||||||
|
input := "# \\<Space> is not a valid escape.\nk = \"\"\"t\\ t\"\"\"\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_String_MultilineBadEscape3(t *testing.T) {
|
||||||
|
input := "# \\<Space> is not a valid escape.\nk = \"\"\"t\\ \"\"\"\n\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_String_MultilineEscapeSpace(t *testing.T) {
|
func TestTOMLTest_Invalid_String_MultilineEscapeSpace(t *testing.T) {
|
||||||
input := "a = \"\"\"\n foo \\ \\n\n bar\"\"\"\n"
|
input := "a = \"\"\"\n foo \\ \\n\n bar\"\"\"\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -825,11 +985,6 @@ func TestTOMLTest_Invalid_String_MultilineQuotes1(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_String_MultilineQuotes2(t *testing.T) {
|
|
||||||
input := "a = \"\"\"6 quotes: \"\"\"\"\"\"\n"
|
|
||||||
testgenInvalid(t, input)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_String_NoClose(t *testing.T) {
|
func TestTOMLTest_Invalid_String_NoClose(t *testing.T) {
|
||||||
input := "no-ending-quote = \"One time, at band camp\n"
|
input := "no-ending-quote = \"One time, at band camp\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -845,6 +1000,16 @@ func TestTOMLTest_Invalid_String_WrongClose(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Table_AppendWithDottedKeys1(t *testing.T) {
|
||||||
|
input := "# First a.b.c defines a table: a.b.c = {z=9}\n#\n# Then we define a.b.c.t = \"str\" to add a str to the above table, making it:\n#\n# a.b.c = {z=9, t=\"...\"}\n#\n# While this makes sense, logically, it was decided this is not valid TOML as\n# it's too confusing/convoluted.\n# \n# See: https://github.com/toml-lang/toml/issues/846\n# https://github.com/toml-lang/toml/pull/859\n\n[a.b.c]\n z = 9\n\n[a]\n b.c.t = \"Using dotted keys to add to [a.b.c] after explicitly defining it above is not allowed\"\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Table_AppendWithDottedKeys2(t *testing.T) {
|
||||||
|
input := "# This is the same issue as in injection-1.toml, except that nests one level\n# deeper. See that file for a more complete description.\n\n[a.b.c.d]\n z = 9\n\n[a]\n b.c.d.k.t = \"Using dotted keys to add to [a.b.c.d] after explicitly defining it above is not allowed\"\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Table_ArrayEmpty(t *testing.T) {
|
func TestTOMLTest_Invalid_Table_ArrayEmpty(t *testing.T) {
|
||||||
input := "[[]]\nname = \"Born to Run\"\n"
|
input := "[[]]\nname = \"Born to Run\"\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -860,6 +1025,16 @@ func TestTOMLTest_Invalid_Table_ArrayMissingBracket(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Table_DuplicateKeyDottedTable(t *testing.T) {
|
||||||
|
input := "[fruit]\napple.color = \"red\"\n\n[fruit.apple] # INVALID\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Invalid_Table_DuplicateKeyDottedTable2(t *testing.T) {
|
||||||
|
input := "[fruit]\napple.taste.sweet = true\n\n[fruit.apple.taste] # INVALID\n"
|
||||||
|
testgenInvalid(t, input)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Table_DuplicateKeyTable(t *testing.T) {
|
func TestTOMLTest_Invalid_Table_DuplicateKeyTable(t *testing.T) {
|
||||||
input := "[fruit]\ntype = \"apple\"\n\n[fruit.type]\napple = \"yes\"\n"
|
input := "[fruit]\ntype = \"apple\"\n\n[fruit.type]\napple = \"yes\"\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -895,16 +1070,6 @@ func TestTOMLTest_Invalid_Table_EqualsSign(t *testing.T) {
|
|||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Table_Injection1(t *testing.T) {
|
|
||||||
input := "[a.b.c]\n z = 9\n[a]\n b.c.t = \"Using dotted keys to add to [a.b.c] after explicitly defining it above is not allowed\"\n \n# see https://github.com/toml-lang/toml/issues/846\n"
|
|
||||||
testgenInvalid(t, input)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Table_Injection2(t *testing.T) {
|
|
||||||
input := "[a.b.c.d]\n z = 9\n[a]\n b.c.d.k.t = \"Using dotted keys to add to [a.b.c.d] after explicitly defining it above is not allowed\"\n \n# see https://github.com/toml-lang/toml/issues/846\n"
|
|
||||||
testgenInvalid(t, input)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTOMLTest_Invalid_Table_Llbrace(t *testing.T) {
|
func TestTOMLTest_Invalid_Table_Llbrace(t *testing.T) {
|
||||||
input := "[ [table]]\n"
|
input := "[ [table]]\n"
|
||||||
testgenInvalid(t, input)
|
testgenInvalid(t, input)
|
||||||
@@ -1071,8 +1236,14 @@ func TestTOMLTest_Valid_Comment_AtEof2(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Valid_Comment_Everywhere(t *testing.T) {
|
func TestTOMLTest_Valid_Comment_Everywhere(t *testing.T) {
|
||||||
input := "# Top comment.\n # Top comment.\n# Top comment.\n\n# [no-extraneous-groups-please]\n\n[group] # Comment\nanswer = 42 # Comment\n# no-extraneous-keys-please = 999\n# Inbetween comment.\nmore = [ # Comment\n # What about multiple # comments?\n # Can you handle it?\n #\n # Evil.\n# Evil.\n 42, 42, # Comments within arrays are fun.\n # What about multiple # comments?\n # Can you handle it?\n #\n # Evil.\n# Evil.\n# ] Did I fool you?\n] # Hopefully not.\n\n# Make sure the space between the datetime and \"#\" isn't lexed.\nd = 1979-05-27T07:32:12-07:00 # c\n"
|
input := "# Top comment.\n # Top comment.\n# Top comment.\n\n# [no-extraneous-groups-please]\n\n[group] # Comment\nanswer = 42 # Comment\n# no-extraneous-keys-please = 999\n# Inbetween comment.\nmore = [ # Comment\n # What about multiple # comments?\n # Can you handle it?\n #\n # Evil.\n# Evil.\n 42, 42, # Comments within arrays are fun.\n # What about multiple # comments?\n # Can you handle it?\n #\n # Evil.\n# Evil.\n# ] Did I fool you?\n] # Hopefully not.\n\n# Make sure the space between the datetime and \"#\" isn't lexed.\ndt = 1979-05-27T07:32:12-07:00 # c\nd = 1979-05-27 # Comment\n"
|
||||||
jsonRef := "{\n \"group\": {\n \"answer\": {\n \"type\": \"integer\",\n \"value\": \"42\"\n },\n \"d\": {\n \"type\": \"datetime\",\n \"value\": \"1979-05-27T07:32:12-07:00\"\n },\n \"more\": [\n {\n \"type\": \"integer\",\n \"value\": \"42\"\n },\n {\n \"type\": \"integer\",\n \"value\": \"42\"\n }\n ]\n }\n}\n"
|
jsonRef := "{\n \"group\": {\n \"answer\": {\n \"type\": \"integer\",\n \"value\": \"42\"\n },\n \"dt\": {\n \"type\": \"datetime\",\n \"value\": \"1979-05-27T07:32:12-07:00\"\n },\n \"d\": {\n \"type\": \"date-local\",\n \"value\": \"1979-05-27\"\n },\n \"more\": [\n {\n \"type\": \"integer\",\n \"value\": \"42\"\n },\n {\n \"type\": \"integer\",\n \"value\": \"42\"\n }\n ]\n }\n}\n"
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Valid_Comment_Noeol(t *testing.T) {
|
||||||
|
input := "# single comment without any eol characters"
|
||||||
|
jsonRef := "{}\n"
|
||||||
testgenValid(t, input, jsonRef)
|
testgenValid(t, input, jsonRef)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1107,8 +1278,8 @@ func TestTOMLTest_Valid_Datetime_Local(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Valid_Datetime_Milliseconds(t *testing.T) {
|
func TestTOMLTest_Valid_Datetime_Milliseconds(t *testing.T) {
|
||||||
input := "utc1 = 1987-07-05T17:45:56.123456Z\nutc2 = 1987-07-05T17:45:56.6Z\nwita1 = 1987-07-05T17:45:56.123456+08:00\nwita2 = 1987-07-05T17:45:56.6+08:00\n"
|
input := "utc1 = 1987-07-05T17:45:56.1234Z\nutc2 = 1987-07-05T17:45:56.6Z\nwita1 = 1987-07-05T17:45:56.1234+08:00\nwita2 = 1987-07-05T17:45:56.6+08:00\n"
|
||||||
jsonRef := "{\n \"utc1\": {\n \"type\": \"datetime\",\n \"value\": \"1987-07-05T17:45:56.123456Z\"\n },\n \"utc2\": {\n \"type\": \"datetime\",\n \"value\": \"1987-07-05T17:45:56.600000Z\"\n },\n \"wita1\": {\n \"type\": \"datetime\",\n \"value\": \"1987-07-05T17:45:56.123456+08:00\"\n },\n \"wita2\": {\n \"type\": \"datetime\",\n \"value\": \"1987-07-05T17:45:56.600000+08:00\"\n }\n}\n"
|
jsonRef := "{\n \"utc1\": {\n \"type\": \"datetime\",\n \"value\": \"1987-07-05T17:45:56.1234Z\"\n },\n \"utc2\": {\n \"type\": \"datetime\",\n \"value\": \"1987-07-05T17:45:56.6000Z\"\n },\n \"wita1\": {\n \"type\": \"datetime\",\n \"value\": \"1987-07-05T17:45:56.1234+08:00\"\n },\n \"wita2\": {\n \"type\": \"datetime\",\n \"value\": \"1987-07-05T17:45:56.6000+08:00\"\n }\n}\n"
|
||||||
testgenValid(t, input, jsonRef)
|
testgenValid(t, input, jsonRef)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1370,6 +1541,12 @@ func TestTOMLTest_Valid_String_Empty(t *testing.T) {
|
|||||||
testgenValid(t, input, jsonRef)
|
testgenValid(t, input, jsonRef)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Valid_String_EscapeEsc(t *testing.T) {
|
||||||
|
input := "esc = \"\\e There is no escape! \\e\"\n"
|
||||||
|
jsonRef := "{\n \"esc\": {\n \"type\": \"string\",\n \"value\": \"\\u001b There is no escape! \\u001b\"\n }\n}\n"
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Valid_String_EscapeTricky(t *testing.T) {
|
func TestTOMLTest_Valid_String_EscapeTricky(t *testing.T) {
|
||||||
input := "end_esc = \"String does not end here\\\" but ends here\\\\\"\nlit_end_esc = 'String ends here\\'\n\nmultiline_unicode = \"\"\"\n\\u00a0\"\"\"\n\nmultiline_not_unicode = \"\"\"\n\\\\u0041\"\"\"\n\nmultiline_end_esc = \"\"\"When will it end? \\\"\"\"...\"\"\\\" should be here\\\"\"\"\"\n\nlit_multiline_not_unicode = '''\n\\u007f'''\n\nlit_multiline_end = '''There is no escape\\'''\n"
|
input := "end_esc = \"String does not end here\\\" but ends here\\\\\"\nlit_end_esc = 'String ends here\\'\n\nmultiline_unicode = \"\"\"\n\\u00a0\"\"\"\n\nmultiline_not_unicode = \"\"\"\n\\\\u0041\"\"\"\n\nmultiline_end_esc = \"\"\"When will it end? \\\"\"\"...\"\"\\\" should be here\\\"\"\"\"\n\nlit_multiline_not_unicode = '''\n\\u007f'''\n\nlit_multiline_end = '''There is no escape\\'''\n"
|
||||||
jsonRef := "{\n \"end_esc\": {\n \"type\": \"string\",\n \"value\": \"String does not end here\\\" but ends here\\\\\"\n },\n \"lit_end_esc\": {\n \"type\": \"string\",\n \"value\": \"String ends here\\\\\"\n },\n \"lit_multiline_end\": {\n \"type\": \"string\",\n \"value\": \"There is no escape\\\\\"\n },\n \"lit_multiline_not_unicode\": {\n \"type\": \"string\",\n \"value\": \"\\\\u007f\"\n },\n \"multiline_end_esc\": {\n \"type\": \"string\",\n \"value\": \"When will it end? \\\"\\\"\\\"...\\\"\\\"\\\" should be here\\\"\"\n },\n \"multiline_not_unicode\": {\n \"type\": \"string\",\n \"value\": \"\\\\u0041\"\n },\n \"multiline_unicode\": {\n \"type\": \"string\",\n \"value\": \"\u00a0\"\n }\n}\n"
|
jsonRef := "{\n \"end_esc\": {\n \"type\": \"string\",\n \"value\": \"String does not end here\\\" but ends here\\\\\"\n },\n \"lit_end_esc\": {\n \"type\": \"string\",\n \"value\": \"String ends here\\\\\"\n },\n \"lit_multiline_end\": {\n \"type\": \"string\",\n \"value\": \"There is no escape\\\\\"\n },\n \"lit_multiline_not_unicode\": {\n \"type\": \"string\",\n \"value\": \"\\\\u007f\"\n },\n \"multiline_end_esc\": {\n \"type\": \"string\",\n \"value\": \"When will it end? \\\"\\\"\\\"...\\\"\\\"\\\" should be here\\\"\"\n },\n \"multiline_not_unicode\": {\n \"type\": \"string\",\n \"value\": \"\\\\u0041\"\n },\n \"multiline_unicode\": {\n \"type\": \"string\",\n \"value\": \"\u00a0\"\n }\n}\n"
|
||||||
@@ -1388,14 +1565,20 @@ func TestTOMLTest_Valid_String_Escapes(t *testing.T) {
|
|||||||
testgenValid(t, input, jsonRef)
|
testgenValid(t, input, jsonRef)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTOMLTest_Valid_String_MultilineEscapedCrlf(t *testing.T) {
|
||||||
|
input := "# The following line should be an unescaped backslash followed by a Windows\r\n# newline sequence (\"\\r\\n\")\r\n0=\"\"\"\\\r\n\"\"\"\r\n"
|
||||||
|
jsonRef := "{\n \"0\": {\n \"type\": \"string\",\n \"value\": \"\"\n }\n}\n"
|
||||||
|
testgenValid(t, input, jsonRef)
|
||||||
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Valid_String_MultilineQuotes(t *testing.T) {
|
func TestTOMLTest_Valid_String_MultilineQuotes(t *testing.T) {
|
||||||
input := "# Make sure that quotes inside multiline strings are allowed, including right\n# after the opening '''/\"\"\" and before the closing '''/\"\"\"\n\nlit_one = ''''one quote''''\nlit_two = '''''two quotes'''''\nlit_one_space = ''' 'one quote' '''\nlit_two_space = ''' ''two quotes'' '''\n\none = \"\"\"\"one quote\"\"\"\"\ntwo = \"\"\"\"\"two quotes\"\"\"\"\"\none_space = \"\"\" \"one quote\" \"\"\"\ntwo_space = \"\"\" \"\"two quotes\"\" \"\"\"\n\nmismatch1 = \"\"\"aaa'''bbb\"\"\"\nmismatch2 = '''aaa\"\"\"bbb'''\n"
|
input := "# Make sure that quotes inside multiline strings are allowed, including right\n# after the opening '''/\"\"\" and before the closing '''/\"\"\"\n\nlit_one = ''''one quote''''\nlit_two = '''''two quotes'''''\nlit_one_space = ''' 'one quote' '''\nlit_two_space = ''' ''two quotes'' '''\n\none = \"\"\"\"one quote\"\"\"\"\ntwo = \"\"\"\"\"two quotes\"\"\"\"\"\none_space = \"\"\" \"one quote\" \"\"\"\ntwo_space = \"\"\" \"\"two quotes\"\" \"\"\"\n\nmismatch1 = \"\"\"aaa'''bbb\"\"\"\nmismatch2 = '''aaa\"\"\"bbb'''\n\n# Three opening \"\"\", then one escaped \", then two \"\" (allowed), and then three\n# closing \"\"\"\nescaped = \"\"\"lol\\\"\"\"\"\"\"\n"
|
||||||
jsonRef := "{\n \"lit_one\": {\n \"type\": \"string\",\n \"value\": \"'one quote'\"\n },\n \"lit_one_space\": {\n \"type\": \"string\",\n \"value\": \" 'one quote' \"\n },\n \"lit_two\": {\n \"type\": \"string\",\n \"value\": \"''two quotes''\"\n },\n \"lit_two_space\": {\n \"type\": \"string\",\n \"value\": \" ''two quotes'' \"\n },\n \"mismatch1\": {\n \"type\": \"string\",\n \"value\": \"aaa'''bbb\"\n },\n \"mismatch2\": {\n \"type\": \"string\",\n \"value\": \"aaa\\\"\\\"\\\"bbb\"\n },\n \"one\": {\n \"type\": \"string\",\n \"value\": \"\\\"one quote\\\"\"\n },\n \"one_space\": {\n \"type\": \"string\",\n \"value\": \" \\\"one quote\\\" \"\n },\n \"two\": {\n \"type\": \"string\",\n \"value\": \"\\\"\\\"two quotes\\\"\\\"\"\n },\n \"two_space\": {\n \"type\": \"string\",\n \"value\": \" \\\"\\\"two quotes\\\"\\\" \"\n }\n}\n"
|
jsonRef := "{\n \"escaped\": {\n \"type\": \"string\",\n \"value\": \"lol\\\"\\\"\\\"\"\n },\n \"lit_one\": {\n \"type\": \"string\",\n \"value\": \"'one quote'\"\n },\n \"lit_one_space\": {\n \"type\": \"string\",\n \"value\": \" 'one quote' \"\n },\n \"lit_two\": {\n \"type\": \"string\",\n \"value\": \"''two quotes''\"\n },\n \"lit_two_space\": {\n \"type\": \"string\",\n \"value\": \" ''two quotes'' \"\n },\n \"mismatch1\": {\n \"type\": \"string\",\n \"value\": \"aaa'''bbb\"\n },\n \"mismatch2\": {\n \"type\": \"string\",\n \"value\": \"aaa\\\"\\\"\\\"bbb\"\n },\n \"one\": {\n \"type\": \"string\",\n \"value\": \"\\\"one quote\\\"\"\n },\n \"one_space\": {\n \"type\": \"string\",\n \"value\": \" \\\"one quote\\\" \"\n },\n \"two\": {\n \"type\": \"string\",\n \"value\": \"\\\"\\\"two quotes\\\"\\\"\"\n },\n \"two_space\": {\n \"type\": \"string\",\n \"value\": \" \\\"\\\"two quotes\\\"\\\" \"\n }\n}\n"
|
||||||
testgenValid(t, input, jsonRef)
|
testgenValid(t, input, jsonRef)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Valid_String_Multiline(t *testing.T) {
|
func TestTOMLTest_Valid_String_Multiline(t *testing.T) {
|
||||||
input := "# NOTE: this file includes some literal tab characters.\n\nmultiline_empty_one = \"\"\"\"\"\"\nmultiline_empty_two = \"\"\"\n\"\"\"\nmultiline_empty_three = \"\"\"\\\n \"\"\"\nmultiline_empty_four = \"\"\"\\\n \\\n \\ \n \"\"\"\n\nequivalent_one = \"The quick brown fox jumps over the lazy dog.\"\nequivalent_two = \"\"\"\nThe quick brown \\\n\n\n fox jumps over \\\n the lazy dog.\"\"\"\n\nequivalent_three = \"\"\"\\\n The quick brown \\\n fox jumps over \\\n the lazy dog.\\\n \"\"\"\n\nwhitespace-after-bs = \"\"\"\\\n The quick brown \\\n fox jumps over \\ \n the lazy dog.\\\t\n \"\"\"\n\nno-space = \"\"\"a\\\n b\"\"\"\n\nkeep-ws-before = \"\"\"a \t\\\n b\"\"\"\n\nescape-bs-1 = \"\"\"a \\\\\nb\"\"\"\n\nescape-bs-2 = \"\"\"a \\\\\\\nb\"\"\"\n\nescape-bs-3 = \"\"\"a \\\\\\\\\n b\"\"\"\n"
|
input := "# NOTE: this file includes some literal tab characters.\n\nmultiline_empty_one = \"\"\"\"\"\"\n\n# A newline immediately following the opening delimiter will be trimmed.\nmultiline_empty_two = \"\"\"\n\"\"\"\n\n# \\ at the end of line trims newlines as well; note that last \\ is followed by\n# two spaces, which are ignored.\nmultiline_empty_three = \"\"\"\\\n \"\"\"\nmultiline_empty_four = \"\"\"\\\n \\\n \\ \n \"\"\"\n\nequivalent_one = \"The quick brown fox jumps over the lazy dog.\"\nequivalent_two = \"\"\"\nThe quick brown \\\n\n\n fox jumps over \\\n the lazy dog.\"\"\"\n\nequivalent_three = \"\"\"\\\n The quick brown \\\n fox jumps over \\\n the lazy dog.\\\n \"\"\"\n\nwhitespace-after-bs = \"\"\"\\\n The quick brown \\\n fox jumps over \\ \n the lazy dog.\\\t\n \"\"\"\n\nno-space = \"\"\"a\\\n b\"\"\"\n\n# Has tab character.\nkeep-ws-before = \"\"\"a \t\\\n b\"\"\"\n\nescape-bs-1 = \"\"\"a \\\\\nb\"\"\"\n\nescape-bs-2 = \"\"\"a \\\\\\\nb\"\"\"\n\nescape-bs-3 = \"\"\"a \\\\\\\\\n b\"\"\"\n"
|
||||||
jsonRef := "{\n \"equivalent_one\": {\n \"type\": \"string\",\n \"value\": \"The quick brown fox jumps over the lazy dog.\"\n },\n \"equivalent_three\": {\n \"type\": \"string\",\n \"value\": \"The quick brown fox jumps over the lazy dog.\"\n },\n \"equivalent_two\": {\n \"type\": \"string\",\n \"value\": \"The quick brown fox jumps over the lazy dog.\"\n },\n \"escape-bs-1\": {\n \"type\": \"string\",\n \"value\": \"a \\\\\\nb\"\n },\n \"escape-bs-2\": {\n \"type\": \"string\",\n \"value\": \"a \\\\b\"\n },\n \"escape-bs-3\": {\n \"type\": \"string\",\n \"value\": \"a \\\\\\\\\\n b\"\n },\n \"keep-ws-before\": {\n \"type\": \"string\",\n \"value\": \"a \\tb\"\n },\n \"multiline_empty_four\": {\n \"type\": \"string\",\n \"value\": \"\"\n },\n \"multiline_empty_one\": {\n \"type\": \"string\",\n \"value\": \"\"\n },\n \"multiline_empty_three\": {\n \"type\": \"string\",\n \"value\": \"\"\n },\n \"multiline_empty_two\": {\n \"type\": \"string\",\n \"value\": \"\"\n },\n \"no-space\": {\n \"type\": \"string\",\n \"value\": \"ab\"\n },\n \"whitespace-after-bs\": {\n \"type\": \"string\",\n \"value\": \"The quick brown fox jumps over the lazy dog.\"\n }\n}\n"
|
jsonRef := "{\n \"equivalent_one\": {\n \"type\": \"string\",\n \"value\": \"The quick brown fox jumps over the lazy dog.\"\n },\n \"equivalent_three\": {\n \"type\": \"string\",\n \"value\": \"The quick brown fox jumps over the lazy dog.\"\n },\n \"equivalent_two\": {\n \"type\": \"string\",\n \"value\": \"The quick brown fox jumps over the lazy dog.\"\n },\n \"escape-bs-1\": {\n \"type\": \"string\",\n \"value\": \"a \\\\\\nb\"\n },\n \"escape-bs-2\": {\n \"type\": \"string\",\n \"value\": \"a \\\\b\"\n },\n \"escape-bs-3\": {\n \"type\": \"string\",\n \"value\": \"a \\\\\\\\\\n b\"\n },\n \"keep-ws-before\": {\n \"type\": \"string\",\n \"value\": \"a \\tb\"\n },\n \"multiline_empty_four\": {\n \"type\": \"string\",\n \"value\": \"\"\n },\n \"multiline_empty_one\": {\n \"type\": \"string\",\n \"value\": \"\"\n },\n \"multiline_empty_three\": {\n \"type\": \"string\",\n \"value\": \"\"\n },\n \"multiline_empty_two\": {\n \"type\": \"string\",\n \"value\": \"\"\n },\n \"no-space\": {\n \"type\": \"string\",\n \"value\": \"ab\"\n },\n \"whitespace-after-bs\": {\n \"type\": \"string\",\n \"value\": \"The quick brown fox jumps over the lazy dog.\"\n }\n}\n"
|
||||||
testgenValid(t, input, jsonRef)
|
testgenValid(t, input, jsonRef)
|
||||||
}
|
}
|
||||||
@@ -1407,7 +1590,7 @@ func TestTOMLTest_Valid_String_Nl(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestTOMLTest_Valid_String_RawMultiline(t *testing.T) {
|
func TestTOMLTest_Valid_String_RawMultiline(t *testing.T) {
|
||||||
input := "oneline = '''This string has a ' quote character.'''\nfirstnl = '''\nThis string has a ' quote character.'''\nmultiline = '''\nThis string\nhas ' a quote character\nand more than\none newline\nin it.'''\n"
|
input := "# Single ' should be allowed.\noneline = '''This string has a ' quote character.'''\n\n# A newline immediately following the opening delimiter will be trimmed.\nfirstnl = '''\nThis string has a ' quote character.'''\n\n# All other whitespace and newline characters remain intact.\nmultiline = '''\nThis string\nhas ' a quote character\nand more than\none newline\nin it.'''\n"
|
||||||
jsonRef := "{\n \"firstnl\": {\n \"type\": \"string\",\n \"value\": \"This string has a ' quote character.\"\n },\n \"multiline\": {\n \"type\": \"string\",\n \"value\": \"This string\\nhas ' a quote character\\nand more than\\none newline\\nin it.\"\n },\n \"oneline\": {\n \"type\": \"string\",\n \"value\": \"This string has a ' quote character.\"\n }\n}\n"
|
jsonRef := "{\n \"firstnl\": {\n \"type\": \"string\",\n \"value\": \"This string has a ' quote character.\"\n },\n \"multiline\": {\n \"type\": \"string\",\n \"value\": \"This string\\nhas ' a quote character\\nand more than\\none newline\\nin it.\"\n },\n \"oneline\": {\n \"type\": \"string\",\n \"value\": \"This string has a ' quote character.\"\n }\n}\n"
|
||||||
testgenValid(t, input, jsonRef)
|
testgenValid(t, input, jsonRef)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var timeType = reflect.TypeOf(time.Time{})
|
var timeType = reflect.TypeOf((*time.Time)(nil)).Elem()
|
||||||
var textMarshalerType = reflect.TypeOf(new(encoding.TextMarshaler)).Elem()
|
var textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
|
||||||
var textUnmarshalerType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem()
|
var textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
|
||||||
var mapStringInterfaceType = reflect.TypeOf(map[string]interface{}{})
|
var mapStringInterfaceType = reflect.TypeOf(map[string]interface{}(nil))
|
||||||
var sliceInterfaceType = reflect.TypeOf([]interface{}{})
|
var sliceInterfaceType = reflect.TypeOf([]interface{}(nil))
|
||||||
var stringType = reflect.TypeOf("")
|
var stringType = reflect.TypeOf("")
|
||||||
|
|||||||
+251
-130
@@ -12,16 +12,16 @@ import (
|
|||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
"github.com/pelletier/go-toml/v2/internal/danger"
|
||||||
"github.com/pelletier/go-toml/v2/internal/tracker"
|
"github.com/pelletier/go-toml/v2/internal/tracker"
|
||||||
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Unmarshal deserializes a TOML document into a Go value.
|
// Unmarshal deserializes a TOML document into a Go value.
|
||||||
//
|
//
|
||||||
// It is a shortcut for Decoder.Decode() with the default options.
|
// It is a shortcut for Decoder.Decode() with the default options.
|
||||||
func Unmarshal(data []byte, v interface{}) error {
|
func Unmarshal(data []byte, v interface{}) error {
|
||||||
p := parser{}
|
p := unstable.Parser{}
|
||||||
p.Reset(data)
|
p.Reset(data)
|
||||||
d := decoder{p: &p}
|
d := decoder{p: &p}
|
||||||
|
|
||||||
@@ -42,25 +42,27 @@ func NewDecoder(r io.Reader) *Decoder {
|
|||||||
return &Decoder{r: r}
|
return &Decoder{r: r}
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetStrict toggles decoding in stict mode.
|
// DisallowUnknownFields causes the Decoder to return an error when the
|
||||||
|
// destination is a struct and the input contains a key that does not match a
|
||||||
|
// non-ignored field.
|
||||||
//
|
//
|
||||||
// When the decoder is in strict mode, it will record fields from the document
|
// In that case, the Decoder returns a StrictMissingError that can be used to
|
||||||
// that could not be set on the target value. In that case, the decoder returns
|
// retrieve the individual errors as well as generate a human readable
|
||||||
// a StrictMissingError that can be used to retrieve the individual errors as
|
// description of the missing fields.
|
||||||
// well as generate a human readable description of the missing fields.
|
func (d *Decoder) DisallowUnknownFields() *Decoder {
|
||||||
func (d *Decoder) SetStrict(strict bool) *Decoder {
|
d.strict = true
|
||||||
d.strict = strict
|
|
||||||
return d
|
return d
|
||||||
}
|
}
|
||||||
|
|
||||||
// Decode the whole content of r into v.
|
// Decode the whole content of r into v.
|
||||||
//
|
//
|
||||||
// By default, values in the document that don't exist in the target Go value
|
// By default, values in the document that don't exist in the target Go value
|
||||||
// are ignored. See Decoder.SetStrict() to change this behavior.
|
// are ignored. See Decoder.DisallowUnknownFields() to change this behavior.
|
||||||
//
|
//
|
||||||
// When a TOML local date, time, or date-time is decoded into a time.Time, its
|
// When a TOML local date, time, or date-time is decoded into a time.Time, its
|
||||||
// value is represented in time.Local timezone. Otherwise the approriate Local*
|
// value is represented in time.Local timezone. Otherwise the appropriate Local*
|
||||||
// structure is used.
|
// structure is used. For time values, precision up to the nanosecond is
|
||||||
|
// supported by truncating extra digits.
|
||||||
//
|
//
|
||||||
// Empty tables decoded in an interface{} create an empty initialized
|
// Empty tables decoded in an interface{} create an empty initialized
|
||||||
// map[string]interface{}.
|
// map[string]interface{}.
|
||||||
@@ -72,29 +74,34 @@ func (d *Decoder) SetStrict(strict bool) *Decoder {
|
|||||||
// bounds for the target type (which includes negative numbers when decoding
|
// bounds for the target type (which includes negative numbers when decoding
|
||||||
// into an unsigned int).
|
// into an unsigned int).
|
||||||
//
|
//
|
||||||
// Type mapping
|
// If an error occurs while decoding the content of the document, this function
|
||||||
|
// returns a toml.DecodeError, providing context about the issue. When using
|
||||||
|
// strict mode and a field is missing, a `toml.StrictMissingError` is
|
||||||
|
// returned. In any other case, this function returns a standard Go error.
|
||||||
|
//
|
||||||
|
// # Type mapping
|
||||||
//
|
//
|
||||||
// List of supported TOML types and their associated accepted Go types:
|
// List of supported TOML types and their associated accepted Go types:
|
||||||
//
|
//
|
||||||
// String -> string
|
// String -> string
|
||||||
// Integer -> uint*, int*, depending on size
|
// Integer -> uint*, int*, depending on size
|
||||||
// Float -> float*, depending on size
|
// Float -> float*, depending on size
|
||||||
// Boolean -> bool
|
// Boolean -> bool
|
||||||
// Offset Date-Time -> time.Time
|
// Offset Date-Time -> time.Time
|
||||||
// Local Date-time -> LocalDateTime, time.Time
|
// Local Date-time -> LocalDateTime, time.Time
|
||||||
// Local Date -> LocalDate, time.Time
|
// Local Date -> LocalDate, time.Time
|
||||||
// Local Time -> LocalTime, time.Time
|
// Local Time -> LocalTime, time.Time
|
||||||
// Array -> slice and array, depending on elements types
|
// Array -> slice and array, depending on elements types
|
||||||
// Table -> map and struct
|
// Table -> map and struct
|
||||||
// Inline Table -> same as Table
|
// Inline Table -> same as Table
|
||||||
// Array of Tables -> same as Array and Table
|
// Array of Tables -> same as Array and Table
|
||||||
func (d *Decoder) Decode(v interface{}) error {
|
func (d *Decoder) Decode(v interface{}) error {
|
||||||
b, err := ioutil.ReadAll(d.r)
|
b, err := ioutil.ReadAll(d.r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("toml: %w", err)
|
return fmt.Errorf("toml: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
p := parser{}
|
p := unstable.Parser{}
|
||||||
p.Reset(b)
|
p.Reset(b)
|
||||||
dec := decoder{
|
dec := decoder{
|
||||||
p: &p,
|
p: &p,
|
||||||
@@ -108,7 +115,7 @@ func (d *Decoder) Decode(v interface{}) error {
|
|||||||
|
|
||||||
type decoder struct {
|
type decoder struct {
|
||||||
// Which parser instance in use for this decoding session.
|
// Which parser instance in use for this decoding session.
|
||||||
p *parser
|
p *unstable.Parser
|
||||||
|
|
||||||
// Flag indicating that the current expression is stashed.
|
// Flag indicating that the current expression is stashed.
|
||||||
// If set to true, calling nextExpr will not actually pull a new expression
|
// If set to true, calling nextExpr will not actually pull a new expression
|
||||||
@@ -116,7 +123,7 @@ type decoder struct {
|
|||||||
stashedExpr bool
|
stashedExpr bool
|
||||||
|
|
||||||
// Skip expressions until a table is found. This is set to true when a
|
// Skip expressions until a table is found. This is set to true when a
|
||||||
// table could not be create (missing field in map), so all KV expressions
|
// table could not be created (missing field in map), so all KV expressions
|
||||||
// need to be skipped.
|
// need to be skipped.
|
||||||
skipUntilTable bool
|
skipUntilTable bool
|
||||||
|
|
||||||
@@ -131,9 +138,26 @@ type decoder struct {
|
|||||||
|
|
||||||
// Strict mode
|
// Strict mode
|
||||||
strict strict
|
strict strict
|
||||||
|
|
||||||
|
// Current context for the error.
|
||||||
|
errorContext *errorContext
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) expr() *ast.Node {
|
type errorContext struct {
|
||||||
|
Struct reflect.Type
|
||||||
|
Field []int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) typeMismatchError(toml string, target reflect.Type) error {
|
||||||
|
if d.errorContext != nil && d.errorContext.Struct != nil {
|
||||||
|
ctx := d.errorContext
|
||||||
|
f := ctx.Struct.FieldByIndex(ctx.Field)
|
||||||
|
return fmt.Errorf("toml: cannot decode TOML %s into struct field %s.%s of type %s", toml, ctx.Struct, f.Name, f.Type)
|
||||||
|
}
|
||||||
|
return fmt.Errorf("toml: cannot decode TOML %s into a Go value of type %s", toml, target)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) expr() *unstable.Node {
|
||||||
return d.p.Expression()
|
return d.p.Expression()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -184,12 +208,12 @@ func (d *decoder) FromParser(v interface{}) error {
|
|||||||
|
|
||||||
err := d.fromParser(r)
|
err := d.fromParser(r)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return d.strict.Error(d.p.data)
|
return d.strict.Error(d.p.Data())
|
||||||
}
|
}
|
||||||
|
|
||||||
var e *decodeError
|
var e *unstable.ParserError
|
||||||
if errors.As(err, &e) {
|
if errors.As(err, &e) {
|
||||||
return wrapDecodeError(d.p.data, e)
|
return wrapDecodeError(d.p.Data(), e)
|
||||||
}
|
}
|
||||||
|
|
||||||
return err
|
return err
|
||||||
@@ -210,16 +234,16 @@ func (d *decoder) fromParser(root reflect.Value) error {
|
|||||||
Rules for the unmarshal code:
|
Rules for the unmarshal code:
|
||||||
|
|
||||||
- The stack is used to keep track of which values need to be set where.
|
- The stack is used to keep track of which values need to be set where.
|
||||||
- handle* functions <=> switch on a given ast.Kind.
|
- handle* functions <=> switch on a given unstable.Kind.
|
||||||
- unmarshalX* functions need to unmarshal a node of kind X.
|
- unmarshalX* functions need to unmarshal a node of kind X.
|
||||||
- An "object" is either a struct or a map.
|
- An "object" is either a struct or a map.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error {
|
func (d *decoder) handleRootExpression(expr *unstable.Node, v reflect.Value) error {
|
||||||
var x reflect.Value
|
var x reflect.Value
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if !(d.skipUntilTable && expr.Kind == ast.KeyValue) {
|
if !(d.skipUntilTable && expr.Kind == unstable.KeyValue) {
|
||||||
err = d.seen.CheckExpression(expr)
|
err = d.seen.CheckExpression(expr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -227,16 +251,16 @@ func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
switch expr.Kind {
|
switch expr.Kind {
|
||||||
case ast.KeyValue:
|
case unstable.KeyValue:
|
||||||
if d.skipUntilTable {
|
if d.skipUntilTable {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
x, err = d.handleKeyValue(expr, v)
|
x, err = d.handleKeyValue(expr, v)
|
||||||
case ast.Table:
|
case unstable.Table:
|
||||||
d.skipUntilTable = false
|
d.skipUntilTable = false
|
||||||
d.strict.EnterTable(expr)
|
d.strict.EnterTable(expr)
|
||||||
x, err = d.handleTable(expr.Key(), v)
|
x, err = d.handleTable(expr.Key(), v)
|
||||||
case ast.ArrayTable:
|
case unstable.ArrayTable:
|
||||||
d.skipUntilTable = false
|
d.skipUntilTable = false
|
||||||
d.strict.EnterArrayTable(expr)
|
d.strict.EnterArrayTable(expr)
|
||||||
x, err = d.handleArrayTable(expr.Key(), v)
|
x, err = d.handleArrayTable(expr.Key(), v)
|
||||||
@@ -245,7 +269,7 @@ func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if d.skipUntilTable {
|
if d.skipUntilTable {
|
||||||
if expr.Kind == ast.Table || expr.Kind == ast.ArrayTable {
|
if expr.Kind == unstable.Table || expr.Kind == unstable.ArrayTable {
|
||||||
d.strict.MissingTable(expr)
|
d.strict.MissingTable(expr)
|
||||||
}
|
}
|
||||||
} else if err == nil && x.IsValid() {
|
} else if err == nil && x.IsValid() {
|
||||||
@@ -255,14 +279,14 @@ func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleArrayTable(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleArrayTable(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
if key.Next() {
|
if key.Next() {
|
||||||
return d.handleArrayTablePart(key, v)
|
return d.handleArrayTablePart(key, v)
|
||||||
}
|
}
|
||||||
return d.handleKeyValues(v)
|
return d.handleKeyValues(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleArrayTableCollectionLast(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
elem := v.Elem()
|
elem := v.Elem()
|
||||||
@@ -298,10 +322,12 @@ func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Val
|
|||||||
return v, nil
|
return v, nil
|
||||||
case reflect.Slice:
|
case reflect.Slice:
|
||||||
elemType := v.Type().Elem()
|
elemType := v.Type().Elem()
|
||||||
|
var elem reflect.Value
|
||||||
if elemType.Kind() == reflect.Interface {
|
if elemType.Kind() == reflect.Interface {
|
||||||
elemType = mapStringInterfaceType
|
elem = makeMapStringInterface()
|
||||||
|
} else {
|
||||||
|
elem = reflect.New(elemType).Elem()
|
||||||
}
|
}
|
||||||
elem := reflect.New(elemType).Elem()
|
|
||||||
elem2, err := d.handleArrayTable(key, elem)
|
elem2, err := d.handleArrayTable(key, elem)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return reflect.Value{}, err
|
return reflect.Value{}, err
|
||||||
@@ -313,21 +339,21 @@ func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Val
|
|||||||
case reflect.Array:
|
case reflect.Array:
|
||||||
idx := d.arrayIndex(true, v)
|
idx := d.arrayIndex(true, v)
|
||||||
if idx >= v.Len() {
|
if idx >= v.Len() {
|
||||||
return v, fmt.Errorf("toml: cannot decode array table into %s at position %d", v.Type(), idx)
|
return v, fmt.Errorf("%s at position %d", d.typeMismatchError("array table", v.Type()), idx)
|
||||||
}
|
}
|
||||||
elem := v.Index(idx)
|
elem := v.Index(idx)
|
||||||
_, err := d.handleArrayTable(key, elem)
|
_, err := d.handleArrayTable(key, elem)
|
||||||
return v, err
|
return v, err
|
||||||
|
default:
|
||||||
|
return reflect.Value{}, d.typeMismatchError("array table", v.Type())
|
||||||
}
|
}
|
||||||
|
|
||||||
return d.handleArrayTable(key, v)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// When parsing an array table expression, each part of the key needs to be
|
// When parsing an array table expression, each part of the key needs to be
|
||||||
// evaluated like a normal key, but if it returns a collection, it also needs to
|
// evaluated like a normal key, but if it returns a collection, it also needs to
|
||||||
// point to the last element of the collection. Unless it is the last part of
|
// point to the last element of the collection. Unless it is the last part of
|
||||||
// the key, then it needs to create a new element at the end.
|
// the key, then it needs to create a new element at the end.
|
||||||
func (d *decoder) handleArrayTableCollection(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleArrayTableCollection(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
if key.IsLast() {
|
if key.IsLast() {
|
||||||
return d.handleArrayTableCollectionLast(key, v)
|
return d.handleArrayTableCollectionLast(key, v)
|
||||||
}
|
}
|
||||||
@@ -345,7 +371,9 @@ func (d *decoder) handleArrayTableCollection(key ast.Iterator, v reflect.Value)
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return reflect.Value{}, err
|
return reflect.Value{}, err
|
||||||
}
|
}
|
||||||
v.Elem().Set(elem)
|
if elem.IsValid() {
|
||||||
|
v.Elem().Set(elem)
|
||||||
|
}
|
||||||
|
|
||||||
return v, nil
|
return v, nil
|
||||||
case reflect.Slice:
|
case reflect.Slice:
|
||||||
@@ -362,7 +390,7 @@ func (d *decoder) handleArrayTableCollection(key ast.Iterator, v reflect.Value)
|
|||||||
case reflect.Array:
|
case reflect.Array:
|
||||||
idx := d.arrayIndex(false, v)
|
idx := d.arrayIndex(false, v)
|
||||||
if idx >= v.Len() {
|
if idx >= v.Len() {
|
||||||
return v, fmt.Errorf("toml: cannot decode array table into %s at position %d", v.Type(), idx)
|
return v, fmt.Errorf("%s at position %d", d.typeMismatchError("array table", v.Type()), idx)
|
||||||
}
|
}
|
||||||
elem := v.Index(idx)
|
elem := v.Index(idx)
|
||||||
_, err := d.handleArrayTable(key, elem)
|
_, err := d.handleArrayTable(key, elem)
|
||||||
@@ -372,7 +400,7 @@ func (d *decoder) handleArrayTableCollection(key ast.Iterator, v reflect.Value)
|
|||||||
return d.handleArrayTable(key, v)
|
return d.handleArrayTable(key, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handlerFn, makeFn valueMakerFn) (reflect.Value, error) {
|
func (d *decoder) handleKeyPart(key unstable.Iterator, v reflect.Value, nextFn handlerFn, makeFn valueMakerFn) (reflect.Value, error) {
|
||||||
var rv reflect.Value
|
var rv reflect.Value
|
||||||
|
|
||||||
// First, dispatch over v to make sure it is a valid object.
|
// First, dispatch over v to make sure it is a valid object.
|
||||||
@@ -386,9 +414,13 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
|
|||||||
elem = v.Elem()
|
elem = v.Elem()
|
||||||
return d.handleKeyPart(key, elem, nextFn, makeFn)
|
return d.handleKeyPart(key, elem, nextFn, makeFn)
|
||||||
case reflect.Map:
|
case reflect.Map:
|
||||||
|
vt := v.Type()
|
||||||
|
|
||||||
// Create the key for the map element. For now assume it's a string.
|
// Create the key for the map element. Convert to key type.
|
||||||
mk := reflect.ValueOf(string(key.Node().Data))
|
mk, err := d.keyFromData(vt.Key(), key.Node().Data)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.Value{}, err
|
||||||
|
}
|
||||||
|
|
||||||
// If the map does not exist, create it.
|
// If the map does not exist, create it.
|
||||||
if v.IsNil() {
|
if v.IsNil() {
|
||||||
@@ -405,7 +437,6 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
|
|||||||
// map[string]interface{} or a []interface{} depending on whether
|
// map[string]interface{} or a []interface{} depending on whether
|
||||||
// this is the last part of the array table key.
|
// this is the last part of the array table key.
|
||||||
|
|
||||||
vt := v.Type()
|
|
||||||
t := vt.Elem()
|
t := vt.Elem()
|
||||||
if t.Kind() == reflect.Interface {
|
if t.Kind() == reflect.Interface {
|
||||||
mv = makeFn()
|
mv = makeFn()
|
||||||
@@ -442,12 +473,20 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
|
|||||||
v.SetMapIndex(mk, mv)
|
v.SetMapIndex(mk, mv)
|
||||||
}
|
}
|
||||||
case reflect.Struct:
|
case reflect.Struct:
|
||||||
f, found := structField(v, string(key.Node().Data))
|
path, found := structFieldPath(v, string(key.Node().Data))
|
||||||
if !found {
|
if !found {
|
||||||
d.skipUntilTable = true
|
d.skipUntilTable = true
|
||||||
return reflect.Value{}, nil
|
return reflect.Value{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if d.errorContext == nil {
|
||||||
|
d.errorContext = new(errorContext)
|
||||||
|
}
|
||||||
|
t := v.Type()
|
||||||
|
d.errorContext.Struct = t
|
||||||
|
d.errorContext.Field = path
|
||||||
|
|
||||||
|
f := fieldByIndex(v, path)
|
||||||
x, err := nextFn(key, f)
|
x, err := nextFn(key, f)
|
||||||
if err != nil || d.skipUntilTable {
|
if err != nil || d.skipUntilTable {
|
||||||
return reflect.Value{}, err
|
return reflect.Value{}, err
|
||||||
@@ -455,6 +494,8 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
|
|||||||
if x.IsValid() {
|
if x.IsValid() {
|
||||||
f.Set(x)
|
f.Set(x)
|
||||||
}
|
}
|
||||||
|
d.errorContext.Field = nil
|
||||||
|
d.errorContext.Struct = nil
|
||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
if v.Elem().IsValid() {
|
if v.Elem().IsValid() {
|
||||||
v = v.Elem()
|
v = v.Elem()
|
||||||
@@ -480,7 +521,7 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
|
|||||||
// HandleArrayTablePart navigates the Go structure v using the key v. It is
|
// HandleArrayTablePart navigates the Go structure v using the key v. It is
|
||||||
// only used for the prefix (non-last) parts of an array-table. When
|
// only used for the prefix (non-last) parts of an array-table. When
|
||||||
// encountering a collection, it should go to the last element.
|
// encountering a collection, it should go to the last element.
|
||||||
func (d *decoder) handleArrayTablePart(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleArrayTablePart(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
var makeFn valueMakerFn
|
var makeFn valueMakerFn
|
||||||
if key.IsLast() {
|
if key.IsLast() {
|
||||||
makeFn = makeSliceInterface
|
makeFn = makeSliceInterface
|
||||||
@@ -492,10 +533,10 @@ func (d *decoder) handleArrayTablePart(key ast.Iterator, v reflect.Value) (refle
|
|||||||
|
|
||||||
// HandleTable returns a reference when it has checked the next expression but
|
// HandleTable returns a reference when it has checked the next expression but
|
||||||
// cannot handle it.
|
// cannot handle it.
|
||||||
func (d *decoder) handleTable(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleTable(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
if v.Kind() == reflect.Slice {
|
if v.Kind() == reflect.Slice {
|
||||||
if v.Len() == 0 {
|
if v.Len() == 0 {
|
||||||
return reflect.Value{}, newDecodeError(key.Node().Data, "cannot store a table in a slice")
|
return reflect.Value{}, unstable.NewParserError(key.Node().Data, "cannot store a table in a slice")
|
||||||
}
|
}
|
||||||
elem := v.Index(v.Len() - 1)
|
elem := v.Index(v.Len() - 1)
|
||||||
x, err := d.handleTable(key, elem)
|
x, err := d.handleTable(key, elem)
|
||||||
@@ -522,7 +563,7 @@ func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) {
|
|||||||
var rv reflect.Value
|
var rv reflect.Value
|
||||||
for d.nextExpr() {
|
for d.nextExpr() {
|
||||||
expr := d.expr()
|
expr := d.expr()
|
||||||
if expr.Kind != ast.KeyValue {
|
if expr.Kind != unstable.KeyValue {
|
||||||
// Stash the expression so that fromParser can just loop and use
|
// Stash the expression so that fromParser can just loop and use
|
||||||
// the right handler.
|
// the right handler.
|
||||||
// We could just recurse ourselves here, but at least this gives a
|
// We could just recurse ourselves here, but at least this gives a
|
||||||
@@ -549,7 +590,7 @@ func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type (
|
type (
|
||||||
handlerFn func(key ast.Iterator, v reflect.Value) (reflect.Value, error)
|
handlerFn func(key unstable.Iterator, v reflect.Value) (reflect.Value, error)
|
||||||
valueMakerFn func() reflect.Value
|
valueMakerFn func() reflect.Value
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -561,11 +602,11 @@ func makeSliceInterface() reflect.Value {
|
|||||||
return reflect.MakeSlice(sliceInterfaceType, 0, 16)
|
return reflect.MakeSlice(sliceInterfaceType, 0, 16)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleTablePart(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleTablePart(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
return d.handleKeyPart(key, v, d.handleTable, makeMapStringInterface)
|
return d.handleKeyPart(key, v, d.handleTable, makeMapStringInterface)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) tryTextUnmarshaler(node *ast.Node, v reflect.Value) (bool, error) {
|
func (d *decoder) tryTextUnmarshaler(node *unstable.Node, v reflect.Value) (bool, error) {
|
||||||
// Special case for time, because we allow to unmarshal to it from
|
// Special case for time, because we allow to unmarshal to it from
|
||||||
// different kind of AST nodes.
|
// different kind of AST nodes.
|
||||||
if v.Type() == timeType {
|
if v.Type() == timeType {
|
||||||
@@ -575,7 +616,7 @@ func (d *decoder) tryTextUnmarshaler(node *ast.Node, v reflect.Value) (bool, err
|
|||||||
if v.CanAddr() && v.Addr().Type().Implements(textUnmarshalerType) {
|
if v.CanAddr() && v.Addr().Type().Implements(textUnmarshalerType) {
|
||||||
err := v.Addr().Interface().(encoding.TextUnmarshaler).UnmarshalText(node.Data)
|
err := v.Addr().Interface().(encoding.TextUnmarshaler).UnmarshalText(node.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, newDecodeError(d.p.Raw(node.Raw), "error calling UnmarshalText: %w", err)
|
return false, unstable.NewParserError(d.p.Raw(node.Raw), "%w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
@@ -584,7 +625,7 @@ func (d *decoder) tryTextUnmarshaler(node *ast.Node, v reflect.Value) (bool, err
|
|||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleValue(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) handleValue(value *unstable.Node, v reflect.Value) error {
|
||||||
for v.Kind() == reflect.Ptr {
|
for v.Kind() == reflect.Ptr {
|
||||||
v = initAndDereferencePointer(v)
|
v = initAndDereferencePointer(v)
|
||||||
}
|
}
|
||||||
@@ -595,32 +636,32 @@ func (d *decoder) handleValue(value *ast.Node, v reflect.Value) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
switch value.Kind {
|
switch value.Kind {
|
||||||
case ast.String:
|
case unstable.String:
|
||||||
return d.unmarshalString(value, v)
|
return d.unmarshalString(value, v)
|
||||||
case ast.Integer:
|
case unstable.Integer:
|
||||||
return d.unmarshalInteger(value, v)
|
return d.unmarshalInteger(value, v)
|
||||||
case ast.Float:
|
case unstable.Float:
|
||||||
return d.unmarshalFloat(value, v)
|
return d.unmarshalFloat(value, v)
|
||||||
case ast.Bool:
|
case unstable.Bool:
|
||||||
return d.unmarshalBool(value, v)
|
return d.unmarshalBool(value, v)
|
||||||
case ast.DateTime:
|
case unstable.DateTime:
|
||||||
return d.unmarshalDateTime(value, v)
|
return d.unmarshalDateTime(value, v)
|
||||||
case ast.LocalDate:
|
case unstable.LocalDate:
|
||||||
return d.unmarshalLocalDate(value, v)
|
return d.unmarshalLocalDate(value, v)
|
||||||
case ast.LocalTime:
|
case unstable.LocalTime:
|
||||||
return d.unmarshalLocalTime(value, v)
|
return d.unmarshalLocalTime(value, v)
|
||||||
case ast.LocalDateTime:
|
case unstable.LocalDateTime:
|
||||||
return d.unmarshalLocalDateTime(value, v)
|
return d.unmarshalLocalDateTime(value, v)
|
||||||
case ast.InlineTable:
|
case unstable.InlineTable:
|
||||||
return d.unmarshalInlineTable(value, v)
|
return d.unmarshalInlineTable(value, v)
|
||||||
case ast.Array:
|
case unstable.Array:
|
||||||
return d.unmarshalArray(value, v)
|
return d.unmarshalArray(value, v)
|
||||||
default:
|
default:
|
||||||
panic(fmt.Errorf("handleValue not implemented for %s", value.Kind))
|
panic(fmt.Errorf("handleValue not implemented for %s", value.Kind))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalArray(array *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalArray(array *unstable.Node, v reflect.Value) error {
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
case reflect.Slice:
|
case reflect.Slice:
|
||||||
if v.IsNil() {
|
if v.IsNil() {
|
||||||
@@ -655,7 +696,7 @@ func (d *decoder) unmarshalArray(array *ast.Node, v reflect.Value) error {
|
|||||||
default:
|
default:
|
||||||
// TODO: use newDecodeError, but first the parser needs to fill
|
// TODO: use newDecodeError, but first the parser needs to fill
|
||||||
// array.Data.
|
// array.Data.
|
||||||
return fmt.Errorf("toml: cannot store array in Go type %s", v.Kind())
|
return d.typeMismatchError("array", v.Type())
|
||||||
}
|
}
|
||||||
|
|
||||||
elemType := v.Type().Elem()
|
elemType := v.Type().Elem()
|
||||||
@@ -691,7 +732,7 @@ func (d *decoder) unmarshalArray(array *ast.Node, v reflect.Value) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalInlineTable(itable *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalInlineTable(itable *unstable.Node, v reflect.Value) error {
|
||||||
// Make sure v is an initialized object.
|
// Make sure v is an initialized object.
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
case reflect.Map:
|
case reflect.Map:
|
||||||
@@ -708,7 +749,7 @@ func (d *decoder) unmarshalInlineTable(itable *ast.Node, v reflect.Value) error
|
|||||||
}
|
}
|
||||||
return d.unmarshalInlineTable(itable, elem)
|
return d.unmarshalInlineTable(itable, elem)
|
||||||
default:
|
default:
|
||||||
return newDecodeError(itable.Data, "cannot store inline table in Go type %s", v.Kind())
|
return unstable.NewParserError(d.p.Raw(itable.Raw), "cannot store inline table in Go type %s", v.Kind())
|
||||||
}
|
}
|
||||||
|
|
||||||
it := itable.Children()
|
it := itable.Children()
|
||||||
@@ -727,7 +768,7 @@ func (d *decoder) unmarshalInlineTable(itable *ast.Node, v reflect.Value) error
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalDateTime(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalDateTime(value *unstable.Node, v reflect.Value) error {
|
||||||
dt, err := parseDateTime(value.Data)
|
dt, err := parseDateTime(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -737,7 +778,7 @@ func (d *decoder) unmarshalDateTime(value *ast.Node, v reflect.Value) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalLocalDate(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalLocalDate(value *unstable.Node, v reflect.Value) error {
|
||||||
ld, err := parseLocalDate(value.Data)
|
ld, err := parseLocalDate(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -754,28 +795,28 @@ func (d *decoder) unmarshalLocalDate(value *ast.Node, v reflect.Value) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalLocalTime(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalLocalTime(value *unstable.Node, v reflect.Value) error {
|
||||||
lt, rest, err := parseLocalTime(value.Data)
|
lt, rest, err := parseLocalTime(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(rest) > 0 {
|
if len(rest) > 0 {
|
||||||
return newDecodeError(rest, "extra characters at the end of a local time")
|
return unstable.NewParserError(rest, "extra characters at the end of a local time")
|
||||||
}
|
}
|
||||||
|
|
||||||
v.Set(reflect.ValueOf(lt))
|
v.Set(reflect.ValueOf(lt))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalLocalDateTime(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalLocalDateTime(value *unstable.Node, v reflect.Value) error {
|
||||||
ldt, rest, err := parseLocalDateTime(value.Data)
|
ldt, rest, err := parseLocalDateTime(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(rest) > 0 {
|
if len(rest) > 0 {
|
||||||
return newDecodeError(rest, "extra characters at the end of a local date time")
|
return unstable.NewParserError(rest, "extra characters at the end of a local date time")
|
||||||
}
|
}
|
||||||
|
|
||||||
if v.Type() == timeType {
|
if v.Type() == timeType {
|
||||||
@@ -790,7 +831,7 @@ func (d *decoder) unmarshalLocalDateTime(value *ast.Node, v reflect.Value) error
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalBool(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalBool(value *unstable.Node, v reflect.Value) error {
|
||||||
b := value.Data[0] == 't'
|
b := value.Data[0] == 't'
|
||||||
|
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
@@ -799,13 +840,13 @@ func (d *decoder) unmarshalBool(value *ast.Node, v reflect.Value) error {
|
|||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
v.Set(reflect.ValueOf(b))
|
v.Set(reflect.ValueOf(b))
|
||||||
default:
|
default:
|
||||||
return newDecodeError(value.Data, "cannot assign boolean to a %t", b)
|
return unstable.NewParserError(value.Data, "cannot assign boolean to a %t", b)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalFloat(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalFloat(value *unstable.Node, v reflect.Value) error {
|
||||||
f, err := parseFloat(value.Data)
|
f, err := parseFloat(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -816,23 +857,43 @@ func (d *decoder) unmarshalFloat(value *ast.Node, v reflect.Value) error {
|
|||||||
v.SetFloat(f)
|
v.SetFloat(f)
|
||||||
case reflect.Float32:
|
case reflect.Float32:
|
||||||
if f > math.MaxFloat32 {
|
if f > math.MaxFloat32 {
|
||||||
return newDecodeError(value.Data, "number %f does not fit in a float32", f)
|
return unstable.NewParserError(value.Data, "number %f does not fit in a float32", f)
|
||||||
}
|
}
|
||||||
v.SetFloat(f)
|
v.SetFloat(f)
|
||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
v.Set(reflect.ValueOf(f))
|
v.Set(reflect.ValueOf(f))
|
||||||
default:
|
default:
|
||||||
return newDecodeError(value.Data, "float cannot be assigned to %s", v.Kind())
|
return unstable.NewParserError(value.Data, "float cannot be assigned to %s", v.Kind())
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
const (
|
||||||
const (
|
maxInt = int64(^uint(0) >> 1)
|
||||||
maxInt = int64(^uint(0) >> 1)
|
minInt = -maxInt - 1
|
||||||
minInt = -maxInt - 1
|
)
|
||||||
)
|
|
||||||
|
// Maximum value of uint for decoding. Currently the decoder parses the integer
|
||||||
|
// into an int64. As a result, on architectures where uint is 64 bits, the
|
||||||
|
// effective maximum uint we can decode is the maximum of int64. On
|
||||||
|
// architectures where uint is 32 bits, the maximum value we can decode is
|
||||||
|
// lower: the maximum of uint32. I didn't find a way to figure out this value at
|
||||||
|
// compile time, so it is computed during initialization.
|
||||||
|
var maxUint int64 = math.MaxInt64
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
m := uint64(^uint(0))
|
||||||
|
if m < uint64(maxUint) {
|
||||||
|
maxUint = int64(m)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) unmarshalInteger(value *unstable.Node, v reflect.Value) error {
|
||||||
|
kind := v.Kind()
|
||||||
|
if kind == reflect.Float32 || kind == reflect.Float64 {
|
||||||
|
return d.unmarshalFloat(value, v)
|
||||||
|
}
|
||||||
|
|
||||||
i, err := parseInteger(value.Data)
|
i, err := parseInteger(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -841,7 +902,7 @@ func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
|||||||
|
|
||||||
var r reflect.Value
|
var r reflect.Value
|
||||||
|
|
||||||
switch v.Kind() {
|
switch kind {
|
||||||
case reflect.Int64:
|
case reflect.Int64:
|
||||||
v.SetInt(i)
|
v.SetInt(i)
|
||||||
return nil
|
return nil
|
||||||
@@ -894,7 +955,7 @@ func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
|||||||
|
|
||||||
r = reflect.ValueOf(uint8(i))
|
r = reflect.ValueOf(uint8(i))
|
||||||
case reflect.Uint:
|
case reflect.Uint:
|
||||||
if i < 0 {
|
if i < 0 || i > maxUint {
|
||||||
return fmt.Errorf("toml: negative number %d does not fit in an uint", i)
|
return fmt.Errorf("toml: negative number %d does not fit in an uint", i)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -902,7 +963,7 @@ func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
|||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
r = reflect.ValueOf(i)
|
r = reflect.ValueOf(i)
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("toml: cannot store TOML integer into a Go %s", v.Kind())
|
return d.typeMismatchError("integer", v.Type())
|
||||||
}
|
}
|
||||||
|
|
||||||
if !r.Type().AssignableTo(v.Type()) {
|
if !r.Type().AssignableTo(v.Type()) {
|
||||||
@@ -914,20 +975,20 @@ func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalString(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalString(value *unstable.Node, v reflect.Value) error {
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
case reflect.String:
|
case reflect.String:
|
||||||
v.SetString(string(value.Data))
|
v.SetString(string(value.Data))
|
||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
v.Set(reflect.ValueOf(string(value.Data)))
|
v.Set(reflect.ValueOf(string(value.Data)))
|
||||||
default:
|
default:
|
||||||
return newDecodeError(d.p.Raw(value.Raw), "cannot store TOML string into a Go %s", v.Kind())
|
return unstable.NewParserError(d.p.Raw(value.Raw), "cannot store TOML string into a Go %s", v.Kind())
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleKeyValue(expr *ast.Node, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleKeyValue(expr *unstable.Node, v reflect.Value) (reflect.Value, error) {
|
||||||
d.strict.EnterKeyValue(expr)
|
d.strict.EnterKeyValue(expr)
|
||||||
|
|
||||||
v, err := d.handleKeyValueInner(expr.Key(), expr.Value(), v)
|
v, err := d.handleKeyValueInner(expr.Key(), expr.Value(), v)
|
||||||
@@ -941,7 +1002,7 @@ func (d *decoder) handleKeyValue(expr *ast.Node, v reflect.Value) (reflect.Value
|
|||||||
return v, err
|
return v, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleKeyValueInner(key ast.Iterator, value *ast.Node, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleKeyValueInner(key unstable.Iterator, value *unstable.Node, v reflect.Value) (reflect.Value, error) {
|
||||||
if key.Next() {
|
if key.Next() {
|
||||||
// Still scoping the key
|
// Still scoping the key
|
||||||
return d.handleKeyValuePart(key, value, v)
|
return d.handleKeyValuePart(key, value, v)
|
||||||
@@ -951,7 +1012,32 @@ func (d *decoder) handleKeyValueInner(key ast.Iterator, value *ast.Node, v refle
|
|||||||
return reflect.Value{}, d.handleValue(value, v)
|
return reflect.Value{}, d.handleValue(value, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) keyFromData(keyType reflect.Type, data []byte) (reflect.Value, error) {
|
||||||
|
switch {
|
||||||
|
case stringType.AssignableTo(keyType):
|
||||||
|
return reflect.ValueOf(string(data)), nil
|
||||||
|
|
||||||
|
case stringType.ConvertibleTo(keyType):
|
||||||
|
return reflect.ValueOf(string(data)).Convert(keyType), nil
|
||||||
|
|
||||||
|
case keyType.Implements(textUnmarshalerType):
|
||||||
|
mk := reflect.New(keyType.Elem())
|
||||||
|
if err := mk.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil {
|
||||||
|
return reflect.Value{}, fmt.Errorf("toml: error unmarshalling key type %s from text: %w", stringType, err)
|
||||||
|
}
|
||||||
|
return mk, nil
|
||||||
|
|
||||||
|
case reflect.PtrTo(keyType).Implements(textUnmarshalerType):
|
||||||
|
mk := reflect.New(keyType)
|
||||||
|
if err := mk.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil {
|
||||||
|
return reflect.Value{}, fmt.Errorf("toml: error unmarshalling key type %s from text: %w", stringType, err)
|
||||||
|
}
|
||||||
|
return mk.Elem(), nil
|
||||||
|
}
|
||||||
|
return reflect.Value{}, fmt.Errorf("toml: cannot convert map key of type %s to expected type %s", stringType, keyType)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) handleKeyValuePart(key unstable.Iterator, value *unstable.Node, v reflect.Value) (reflect.Value, error) {
|
||||||
// contains the replacement for v
|
// contains the replacement for v
|
||||||
var rv reflect.Value
|
var rv reflect.Value
|
||||||
|
|
||||||
@@ -961,16 +1047,9 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
|
|||||||
case reflect.Map:
|
case reflect.Map:
|
||||||
vt := v.Type()
|
vt := v.Type()
|
||||||
|
|
||||||
mk := reflect.ValueOf(string(key.Node().Data))
|
mk, err := d.keyFromData(vt.Key(), key.Node().Data)
|
||||||
mkt := stringType
|
if err != nil {
|
||||||
|
return reflect.Value{}, err
|
||||||
keyType := vt.Key()
|
|
||||||
if !mkt.AssignableTo(keyType) {
|
|
||||||
if !mkt.ConvertibleTo(keyType) {
|
|
||||||
return reflect.Value{}, fmt.Errorf("toml: cannot convert map key of type %s to expected type %s", mkt, keyType)
|
|
||||||
}
|
|
||||||
|
|
||||||
mk = mk.Convert(keyType)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the map does not exist, create it.
|
// If the map does not exist, create it.
|
||||||
@@ -981,15 +1060,9 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
|
|||||||
|
|
||||||
mv := v.MapIndex(mk)
|
mv := v.MapIndex(mk)
|
||||||
set := false
|
set := false
|
||||||
if !mv.IsValid() {
|
if !mv.IsValid() || key.IsLast() {
|
||||||
set = true
|
set = true
|
||||||
mv = reflect.New(v.Type().Elem()).Elem()
|
mv = reflect.New(v.Type().Elem()).Elem()
|
||||||
} else {
|
|
||||||
if key.IsLast() {
|
|
||||||
var x interface{}
|
|
||||||
mv = reflect.ValueOf(&x).Elem()
|
|
||||||
set = true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
nv, err := d.handleKeyValueInner(key, value, mv)
|
nv, err := d.handleKeyValueInner(key, value, mv)
|
||||||
@@ -1005,12 +1078,33 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
|
|||||||
v.SetMapIndex(mk, mv)
|
v.SetMapIndex(mk, mv)
|
||||||
}
|
}
|
||||||
case reflect.Struct:
|
case reflect.Struct:
|
||||||
f, found := structField(v, string(key.Node().Data))
|
path, found := structFieldPath(v, string(key.Node().Data))
|
||||||
if !found {
|
if !found {
|
||||||
d.skipUntilTable = true
|
d.skipUntilTable = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if d.errorContext == nil {
|
||||||
|
d.errorContext = new(errorContext)
|
||||||
|
}
|
||||||
|
t := v.Type()
|
||||||
|
d.errorContext.Struct = t
|
||||||
|
d.errorContext.Field = path
|
||||||
|
|
||||||
|
f := fieldByIndex(v, path)
|
||||||
|
|
||||||
|
if !f.CanSet() {
|
||||||
|
// If the field is not settable, need to take a slower path and make a copy of
|
||||||
|
// the struct itself to a new location.
|
||||||
|
nvp := reflect.New(v.Type())
|
||||||
|
nvp.Elem().Set(v)
|
||||||
|
v = nvp.Elem()
|
||||||
|
_, err := d.handleKeyValuePart(key, value, v)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.Value{}, err
|
||||||
|
}
|
||||||
|
return nvp.Elem(), nil
|
||||||
|
}
|
||||||
x, err := d.handleKeyValueInner(key, value, f)
|
x, err := d.handleKeyValueInner(key, value, f)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return reflect.Value{}, err
|
return reflect.Value{}, err
|
||||||
@@ -1019,6 +1113,8 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
|
|||||||
if x.IsValid() {
|
if x.IsValid() {
|
||||||
f.Set(x)
|
f.Set(x)
|
||||||
}
|
}
|
||||||
|
d.errorContext.Struct = nil
|
||||||
|
d.errorContext.Field = nil
|
||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
v = v.Elem()
|
v = v.Elem()
|
||||||
|
|
||||||
@@ -1072,11 +1168,26 @@ func initAndDereferencePointer(v reflect.Value) reflect.Value {
|
|||||||
return elem
|
return elem
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Same as reflect.Value.FieldByIndex, but creates pointers if needed.
|
||||||
|
func fieldByIndex(v reflect.Value, path []int) reflect.Value {
|
||||||
|
for i, x := range path {
|
||||||
|
v = v.Field(x)
|
||||||
|
|
||||||
|
if i < len(path)-1 && v.Kind() == reflect.Ptr {
|
||||||
|
if v.IsNil() {
|
||||||
|
v.Set(reflect.New(v.Type().Elem()))
|
||||||
|
}
|
||||||
|
v = v.Elem()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
type fieldPathsMap = map[string][]int
|
type fieldPathsMap = map[string][]int
|
||||||
|
|
||||||
var globalFieldPathsCache atomic.Value // map[danger.TypeID]fieldPathsMap
|
var globalFieldPathsCache atomic.Value // map[danger.TypeID]fieldPathsMap
|
||||||
|
|
||||||
func structField(v reflect.Value, name string) (reflect.Value, bool) {
|
func structFieldPath(v reflect.Value, name string) ([]int, bool) {
|
||||||
t := v.Type()
|
t := v.Type()
|
||||||
|
|
||||||
cache, _ := globalFieldPathsCache.Load().(map[danger.TypeID]fieldPathsMap)
|
cache, _ := globalFieldPathsCache.Load().(map[danger.TypeID]fieldPathsMap)
|
||||||
@@ -1103,12 +1214,7 @@ func structField(v reflect.Value, name string) (reflect.Value, bool) {
|
|||||||
if !ok {
|
if !ok {
|
||||||
path, ok = fieldPaths[strings.ToLower(name)]
|
path, ok = fieldPaths[strings.ToLower(name)]
|
||||||
}
|
}
|
||||||
|
return path, ok
|
||||||
if !ok {
|
|
||||||
return reflect.Value{}, false
|
|
||||||
}
|
|
||||||
|
|
||||||
return v.FieldByIndex(path), true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func forEachField(t reflect.Type, path []int, do func(name string, path []int)) {
|
func forEachField(t reflect.Type, path []int, do func(name string, path []int)) {
|
||||||
@@ -1124,13 +1230,28 @@ func forEachField(t reflect.Type, path []int, do func(name string, path []int))
|
|||||||
fieldPath := append(path, i)
|
fieldPath := append(path, i)
|
||||||
fieldPath = fieldPath[:len(fieldPath):len(fieldPath)]
|
fieldPath = fieldPath[:len(fieldPath):len(fieldPath)]
|
||||||
|
|
||||||
if f.Anonymous {
|
name := f.Tag.Get("toml")
|
||||||
forEachField(f.Type, fieldPath, do)
|
if name == "-" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
name, ok := f.Tag.Lookup("toml")
|
if i := strings.IndexByte(name, ','); i >= 0 {
|
||||||
if !ok {
|
name = name[:i]
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Anonymous && name == "" {
|
||||||
|
t2 := f.Type
|
||||||
|
if t2.Kind() == reflect.Ptr {
|
||||||
|
t2 = t2.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
if t2.Kind() == reflect.Struct {
|
||||||
|
forEachField(t2, fieldPath, do)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if name == "" {
|
||||||
name = f.Name
|
name = f.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
+663
-43
@@ -16,7 +16,28 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func ExampleDecoder_SetStrict() {
|
type unmarshalTextKey struct {
|
||||||
|
A string
|
||||||
|
B string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (k *unmarshalTextKey) UnmarshalText(text []byte) error {
|
||||||
|
parts := strings.Split(string(text), "-")
|
||||||
|
if len(parts) != 2 {
|
||||||
|
return fmt.Errorf("invalid text key: %s", text)
|
||||||
|
}
|
||||||
|
k.A = parts[0]
|
||||||
|
k.B = parts[1]
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type unmarshalBadTextKey struct{}
|
||||||
|
|
||||||
|
func (k *unmarshalBadTextKey) UnmarshalText(text []byte) error {
|
||||||
|
return fmt.Errorf("error")
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExampleDecoder_DisallowUnknownFields() {
|
||||||
type S struct {
|
type S struct {
|
||||||
Key1 string
|
Key1 string
|
||||||
Key3 string
|
Key3 string
|
||||||
@@ -28,7 +49,7 @@ key3 = "value3"
|
|||||||
`
|
`
|
||||||
r := strings.NewReader(doc)
|
r := strings.NewReader(doc)
|
||||||
d := toml.NewDecoder(r)
|
d := toml.NewDecoder(r)
|
||||||
d.SetStrict(true)
|
d.DisallowUnknownFields()
|
||||||
s := S{}
|
s := S{}
|
||||||
err := d.Decode(&s)
|
err := d.Decode(&s)
|
||||||
|
|
||||||
@@ -69,7 +90,6 @@ func ExampleUnmarshal() {
|
|||||||
fmt.Println("version:", cfg.Version)
|
fmt.Println("version:", cfg.Version)
|
||||||
fmt.Println("name:", cfg.Name)
|
fmt.Println("name:", cfg.Name)
|
||||||
fmt.Println("tags:", cfg.Tags)
|
fmt.Println("tags:", cfg.Tags)
|
||||||
|
|
||||||
// Output:
|
// Output:
|
||||||
// version: 2
|
// version: 2
|
||||||
// name: go-toml
|
// name: go-toml
|
||||||
@@ -279,6 +299,11 @@ func TestUnmarshal_Floats(t *testing.T) {
|
|||||||
input: `1.0_e2`,
|
input: `1.0_e2`,
|
||||||
err: true,
|
err: true,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: "leading zero in positive float",
|
||||||
|
input: `+0_0.0`,
|
||||||
|
err: true,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
type doc struct {
|
type doc struct {
|
||||||
@@ -310,6 +335,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
target interface{}
|
target interface{}
|
||||||
expected interface{}
|
expected interface{}
|
||||||
err bool
|
err bool
|
||||||
|
assert func(t *testing.T, test test)
|
||||||
}
|
}
|
||||||
examples := []struct {
|
examples := []struct {
|
||||||
skip bool
|
skip bool
|
||||||
@@ -345,6 +371,96 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: "kv text key",
|
||||||
|
input: `a-1 = "foo"`,
|
||||||
|
gen: func() test {
|
||||||
|
type doc = map[unmarshalTextKey]string
|
||||||
|
|
||||||
|
return test{
|
||||||
|
target: &doc{},
|
||||||
|
expected: &doc{{A: "a", B: "1"}: "foo"},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "table text key",
|
||||||
|
input: `["a-1"]
|
||||||
|
foo = "bar"`,
|
||||||
|
gen: func() test {
|
||||||
|
type doc = map[unmarshalTextKey]map[string]string
|
||||||
|
|
||||||
|
return test{
|
||||||
|
target: &doc{},
|
||||||
|
expected: &doc{{A: "a", B: "1"}: map[string]string{"foo": "bar"}},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "kv ptr text key",
|
||||||
|
input: `a-1 = "foo"`,
|
||||||
|
gen: func() test {
|
||||||
|
type doc = map[*unmarshalTextKey]string
|
||||||
|
|
||||||
|
return test{
|
||||||
|
target: &doc{},
|
||||||
|
expected: &doc{{A: "a", B: "1"}: "foo"},
|
||||||
|
assert: func(t *testing.T, test test) {
|
||||||
|
// Despite the documentation:
|
||||||
|
// Pointer variable equality is determined based on the equality of the
|
||||||
|
// referenced values (as opposed to the memory addresses).
|
||||||
|
// assert.Equal does not work properly with maps with pointer keys
|
||||||
|
// https://github.com/stretchr/testify/issues/1143
|
||||||
|
expected := make(map[unmarshalTextKey]string)
|
||||||
|
for k, v := range *(test.expected.(*doc)) {
|
||||||
|
expected[*k] = v
|
||||||
|
}
|
||||||
|
got := make(map[unmarshalTextKey]string)
|
||||||
|
for k, v := range *(test.target.(*doc)) {
|
||||||
|
got[*k] = v
|
||||||
|
}
|
||||||
|
assert.Equal(t, expected, got)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "kv bad text key",
|
||||||
|
input: `a-1 = "foo"`,
|
||||||
|
gen: func() test {
|
||||||
|
type doc = map[unmarshalBadTextKey]string
|
||||||
|
|
||||||
|
return test{
|
||||||
|
target: &doc{},
|
||||||
|
err: true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "kv bad ptr text key",
|
||||||
|
input: `a-1 = "foo"`,
|
||||||
|
gen: func() test {
|
||||||
|
type doc = map[*unmarshalBadTextKey]string
|
||||||
|
|
||||||
|
return test{
|
||||||
|
target: &doc{},
|
||||||
|
err: true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "table bad text key",
|
||||||
|
input: `["a-1"]
|
||||||
|
foo = "bar"`,
|
||||||
|
gen: func() test {
|
||||||
|
type doc = map[unmarshalBadTextKey]map[string]string
|
||||||
|
|
||||||
|
return test{
|
||||||
|
target: &doc{},
|
||||||
|
err: true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
desc: "time.time with negative zone",
|
desc: "time.time with negative zone",
|
||||||
input: `a = 1979-05-27T00:32:00-07:00 `, // space intentional
|
input: `a = 1979-05-27T00:32:00-07:00 `, // space intentional
|
||||||
@@ -540,6 +656,35 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: "issue 739 - table redefinition",
|
||||||
|
input: `
|
||||||
|
[foo.bar.baz]
|
||||||
|
wibble = 'wobble'
|
||||||
|
|
||||||
|
[foo]
|
||||||
|
|
||||||
|
[foo.bar]
|
||||||
|
huey = 'dewey'
|
||||||
|
`,
|
||||||
|
gen: func() test {
|
||||||
|
m := map[string]interface{}{}
|
||||||
|
|
||||||
|
return test{
|
||||||
|
target: &m,
|
||||||
|
expected: &map[string]interface{}{
|
||||||
|
`foo`: map[string]interface{}{
|
||||||
|
"bar": map[string]interface{}{
|
||||||
|
"huey": "dewey",
|
||||||
|
"baz": map[string]interface{}{
|
||||||
|
"wibble": "wobble",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
desc: "multiline basic string",
|
desc: "multiline basic string",
|
||||||
input: `A = """\
|
input: `A = """\
|
||||||
@@ -571,7 +716,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "multiline basic string with windows newline",
|
desc: "multiline basic string with windows newline",
|
||||||
input: "A = \"\"\"\r\nTest\"\"\"",
|
input: "A = \"\"\"\r\nTe\r\nst\"\"\"",
|
||||||
gen: func() test {
|
gen: func() test {
|
||||||
type doc struct {
|
type doc struct {
|
||||||
A string
|
A string
|
||||||
@@ -579,7 +724,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
|
|
||||||
return test{
|
return test{
|
||||||
target: &doc{},
|
target: &doc{},
|
||||||
expected: &doc{A: "Test"},
|
expected: &doc{A: "Te\r\nst"},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -664,6 +809,36 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: "long string array into []string",
|
||||||
|
input: `A = ["0","1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16","17"]`,
|
||||||
|
gen: func() test {
|
||||||
|
type doc struct {
|
||||||
|
A []string
|
||||||
|
}
|
||||||
|
|
||||||
|
return test{
|
||||||
|
target: &doc{},
|
||||||
|
expected: &doc{A: []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17"}},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "long string array into []interface{}",
|
||||||
|
input: `A = ["0","1","2","3","4","5","6","7","8","9","10","11","12","13","14",
|
||||||
|
"15","16","17"]`,
|
||||||
|
gen: func() test {
|
||||||
|
type doc struct {
|
||||||
|
A []interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return test{
|
||||||
|
target: &doc{},
|
||||||
|
expected: &doc{A: []interface{}{"0", "1", "2", "3", "4", "5", "6",
|
||||||
|
"7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17"}},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
desc: "standard table",
|
desc: "standard table",
|
||||||
input: `[A]
|
input: `[A]
|
||||||
@@ -907,7 +1082,7 @@ B = "data"`,
|
|||||||
"Name": "Hammer",
|
"Name": "Hammer",
|
||||||
"Sku": int64(738594937),
|
"Sku": int64(738594937),
|
||||||
},
|
},
|
||||||
map[string]interface{}(nil),
|
map[string]interface{}{},
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"Name": "Nail",
|
"Name": "Nail",
|
||||||
"Sku": int64(284758393),
|
"Sku": int64(284758393),
|
||||||
@@ -1441,7 +1616,7 @@ B = "data"`,
|
|||||||
target: &map[string]interface{}{},
|
target: &map[string]interface{}{},
|
||||||
expected: &map[string]interface{}{
|
expected: &map[string]interface{}{
|
||||||
"products": []interface{}{
|
"products": []interface{}{
|
||||||
map[string]interface{}(nil),
|
map[string]interface{}{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -1457,6 +1632,16 @@ B = "data"`,
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: "empty map into map with invalid key type",
|
||||||
|
input: ``,
|
||||||
|
gen: func() test {
|
||||||
|
return test{
|
||||||
|
target: &map[int]string{},
|
||||||
|
expected: &map[int]string{},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
desc: "into map with convertible key type",
|
desc: "into map with convertible key type",
|
||||||
input: `A = "hello"`,
|
input: `A = "hello"`,
|
||||||
@@ -1671,6 +1856,28 @@ B = "data"`,
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: "kv that points to a slice",
|
||||||
|
input: "a.b.c = 'foo'",
|
||||||
|
gen: func() test {
|
||||||
|
doc := map[string][]string{}
|
||||||
|
return test{
|
||||||
|
target: &doc,
|
||||||
|
err: true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "kv that points to a pointer to a slice",
|
||||||
|
input: "a.b.c = 'foo'",
|
||||||
|
gen: func() test {
|
||||||
|
doc := map[string]*[]string{}
|
||||||
|
return test{
|
||||||
|
target: &doc,
|
||||||
|
err: true,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, e := range examples {
|
for _, e := range examples {
|
||||||
@@ -1691,7 +1898,11 @@ B = "data"`,
|
|||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
} else {
|
} else {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, test.expected, test.target)
|
if test.assert != nil {
|
||||||
|
test.assert(t, test)
|
||||||
|
} else {
|
||||||
|
assert.Equal(t, test.expected, test.target)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -1760,6 +1971,20 @@ func TestUnmarshalOverflows(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestUnmarshalErrors(t *testing.T) {
|
||||||
|
type mystruct struct {
|
||||||
|
Bar string
|
||||||
|
}
|
||||||
|
|
||||||
|
data := `bar = 42`
|
||||||
|
|
||||||
|
s := mystruct{}
|
||||||
|
err := toml.Unmarshal([]byte(data), &s)
|
||||||
|
require.Error(t, err)
|
||||||
|
|
||||||
|
require.Equal(t, "toml: cannot decode TOML integer into struct field toml_test.mystruct.Bar of type string", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
func TestUnmarshalInvalidTarget(t *testing.T) {
|
func TestUnmarshalInvalidTarget(t *testing.T) {
|
||||||
x := "foo"
|
x := "foo"
|
||||||
err := toml.Unmarshal([]byte{}, x)
|
err := toml.Unmarshal([]byte{}, x)
|
||||||
@@ -1798,8 +2023,7 @@ key2 = "missing2"
|
|||||||
key3 = "missing3"
|
key3 = "missing3"
|
||||||
key4 = "value4"
|
key4 = "value4"
|
||||||
`,
|
`,
|
||||||
expected: `
|
expected: `2| key1 = "value1"
|
||||||
2| key1 = "value1"
|
|
||||||
3| key2 = "missing2"
|
3| key2 = "missing2"
|
||||||
| ~~~~ missing field
|
| ~~~~ missing field
|
||||||
4| key3 = "missing3"
|
4| key3 = "missing3"
|
||||||
@@ -1809,8 +2033,7 @@ key4 = "value4"
|
|||||||
3| key2 = "missing2"
|
3| key2 = "missing2"
|
||||||
4| key3 = "missing3"
|
4| key3 = "missing3"
|
||||||
| ~~~~ missing field
|
| ~~~~ missing field
|
||||||
5| key4 = "value4"
|
5| key4 = "value4"`,
|
||||||
`,
|
|
||||||
target: &struct {
|
target: &struct {
|
||||||
Key1 string
|
Key1 string
|
||||||
Key4 string
|
Key4 string
|
||||||
@@ -1819,10 +2042,8 @@ key4 = "value4"
|
|||||||
{
|
{
|
||||||
desc: "multi-part key",
|
desc: "multi-part key",
|
||||||
input: `a.short.key="foo"`,
|
input: `a.short.key="foo"`,
|
||||||
expected: `
|
expected: `1| a.short.key="foo"
|
||||||
1| a.short.key="foo"
|
| ~~~~~~~~~~~ missing field`,
|
||||||
| ~~~~~~~~~~~ missing field
|
|
||||||
`,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "missing table",
|
desc: "missing table",
|
||||||
@@ -1830,24 +2051,19 @@ key4 = "value4"
|
|||||||
[foo]
|
[foo]
|
||||||
bar = 42
|
bar = 42
|
||||||
`,
|
`,
|
||||||
expected: `
|
expected: `2| [foo]
|
||||||
2| [foo]
|
|
||||||
| ~~~ missing table
|
| ~~~ missing table
|
||||||
3| bar = 42
|
3| bar = 42`,
|
||||||
`,
|
|
||||||
},
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
desc: "missing array table",
|
desc: "missing array table",
|
||||||
input: `
|
input: `
|
||||||
[[foo]]
|
[[foo]]
|
||||||
bar = 42
|
bar = 42`,
|
||||||
`,
|
expected: `2| [[foo]]
|
||||||
expected: `
|
|
||||||
2| [[foo]]
|
|
||||||
| ~~~ missing table
|
| ~~~ missing table
|
||||||
3| bar = 42
|
3| bar = 42`,
|
||||||
`,
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1857,7 +2073,7 @@ bar = 42
|
|||||||
t.Run("strict", func(t *testing.T) {
|
t.Run("strict", func(t *testing.T) {
|
||||||
r := strings.NewReader(e.input)
|
r := strings.NewReader(e.input)
|
||||||
d := toml.NewDecoder(r)
|
d := toml.NewDecoder(r)
|
||||||
d.SetStrict(true)
|
d.DisallowUnknownFields()
|
||||||
x := e.target
|
x := e.target
|
||||||
if x == nil {
|
if x == nil {
|
||||||
x = &struct{}{}
|
x = &struct{}{}
|
||||||
@@ -1866,7 +2082,7 @@ bar = 42
|
|||||||
|
|
||||||
var tsm *toml.StrictMissingError
|
var tsm *toml.StrictMissingError
|
||||||
if errors.As(err, &tsm) {
|
if errors.As(err, &tsm) {
|
||||||
equalStringsIgnoreNewlines(t, e.expected, tsm.String())
|
assert.Equal(t, e.expected, tsm.String())
|
||||||
} else {
|
} else {
|
||||||
t.Fatalf("err should have been a *toml.StrictMissingError, but got %s (%T)", err, err)
|
t.Fatalf("err should have been a *toml.StrictMissingError, but got %s (%T)", err, err)
|
||||||
}
|
}
|
||||||
@@ -1875,7 +2091,6 @@ bar = 42
|
|||||||
t.Run("default", func(t *testing.T) {
|
t.Run("default", func(t *testing.T) {
|
||||||
r := strings.NewReader(e.input)
|
r := strings.NewReader(e.input)
|
||||||
d := toml.NewDecoder(r)
|
d := toml.NewDecoder(r)
|
||||||
d.SetStrict(false)
|
|
||||||
x := e.target
|
x := e.target
|
||||||
if x == nil {
|
if x == nil {
|
||||||
x = &struct{}{}
|
x = &struct{}{}
|
||||||
@@ -2085,7 +2300,7 @@ xz_hash = "1a48f723fea1f17d786ce6eadd9d00914d38062d28fd9c455ed3c3801905b388"
|
|||||||
|
|
||||||
expected := doc{
|
expected := doc{
|
||||||
Pkg: map[string]pkg{
|
Pkg: map[string]pkg{
|
||||||
"cargo": pkg{
|
"cargo": {
|
||||||
Target: map[string]target{
|
Target: map[string]target{
|
||||||
"aarch64-apple-darwin": {
|
"aarch64-apple-darwin": {
|
||||||
XZ_URL: "https://static.rust-lang.org/dist/2021-07-29/cargo-1.54.0-aarch64-apple-darwin.tar.xz",
|
XZ_URL: "https://static.rust-lang.org/dist/2021-07-29/cargo-1.54.0-aarch64-apple-darwin.tar.xz",
|
||||||
@@ -2190,7 +2405,277 @@ func TestIssue666(t *testing.T) {
|
|||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:funlen
|
func TestIssue677(t *testing.T) {
|
||||||
|
doc := `
|
||||||
|
[Build]
|
||||||
|
Name = "publication build"
|
||||||
|
|
||||||
|
[[Build.Dependencies]]
|
||||||
|
Name = "command"
|
||||||
|
Program = "hugo"
|
||||||
|
`
|
||||||
|
|
||||||
|
type _tomlJob struct {
|
||||||
|
Dependencies []map[string]interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
type tomlParser struct {
|
||||||
|
Build *_tomlJob
|
||||||
|
}
|
||||||
|
|
||||||
|
p := tomlParser{}
|
||||||
|
|
||||||
|
err := toml.Unmarshal([]byte(doc), &p)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
expected := tomlParser{
|
||||||
|
Build: &_tomlJob{
|
||||||
|
Dependencies: []map[string]interface{}{
|
||||||
|
{
|
||||||
|
"Name": "command",
|
||||||
|
"Program": "hugo",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
require.Equal(t, expected, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue701(t *testing.T) {
|
||||||
|
// Expected behavior:
|
||||||
|
// Return an error since a cannot be modified. From the TOML spec:
|
||||||
|
//
|
||||||
|
// > Inline tables are fully self-contained and define all
|
||||||
|
// keys and sub-tables within them. Keys and sub-tables cannot
|
||||||
|
// be added outside the braces.
|
||||||
|
|
||||||
|
docs := []string{
|
||||||
|
`
|
||||||
|
a={}
|
||||||
|
[a.b]
|
||||||
|
z=0
|
||||||
|
`,
|
||||||
|
`
|
||||||
|
a={}
|
||||||
|
[[a.b]]
|
||||||
|
z=0
|
||||||
|
`,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, doc := range docs {
|
||||||
|
var v interface{}
|
||||||
|
err := toml.Unmarshal([]byte(doc), &v)
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue703(t *testing.T) {
|
||||||
|
var v interface{}
|
||||||
|
err := toml.Unmarshal([]byte("[a]\nx.y=0\n[a.x]"), &v)
|
||||||
|
require.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue708(t *testing.T) {
|
||||||
|
v := map[string]string{}
|
||||||
|
err := toml.Unmarshal([]byte("0=\"\"\"\\\r\n\"\"\""), &v)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, map[string]string{"0": ""}, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue710(t *testing.T) {
|
||||||
|
v := map[string]toml.LocalTime{}
|
||||||
|
err := toml.Unmarshal([]byte(`0=00:00:00.0000000000`), &v)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, map[string]toml.LocalTime{"0": {Precision: 9}}, v)
|
||||||
|
v1 := map[string]toml.LocalTime{}
|
||||||
|
err = toml.Unmarshal([]byte(`0=00:00:00.0000000001`), &v1)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, map[string]toml.LocalTime{"0": {Precision: 9}}, v1)
|
||||||
|
v2 := map[string]toml.LocalTime{}
|
||||||
|
err = toml.Unmarshal([]byte(`0=00:00:00.1111111119`), &v2)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, map[string]toml.LocalTime{"0": {Nanosecond: 111111111, Precision: 9}}, v2)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue715(t *testing.T) {
|
||||||
|
var v interface{}
|
||||||
|
err := toml.Unmarshal([]byte("0=+"), &v)
|
||||||
|
require.Error(t, err)
|
||||||
|
|
||||||
|
err = toml.Unmarshal([]byte("0=-"), &v)
|
||||||
|
require.Error(t, err)
|
||||||
|
|
||||||
|
err = toml.Unmarshal([]byte("0=+A"), &v)
|
||||||
|
require.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue714(t *testing.T) {
|
||||||
|
var v interface{}
|
||||||
|
err := toml.Unmarshal([]byte("0."), &v)
|
||||||
|
require.Error(t, err)
|
||||||
|
|
||||||
|
err = toml.Unmarshal([]byte("0={0=0,"), &v)
|
||||||
|
require.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue772(t *testing.T) {
|
||||||
|
type FileHandling struct {
|
||||||
|
FilePattern string `toml:"pattern"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
FileHandling `toml:"filehandling"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaultConfigFile = []byte(`
|
||||||
|
[filehandling]
|
||||||
|
pattern = "reach-masterdev-"`)
|
||||||
|
|
||||||
|
config := Config{}
|
||||||
|
err := toml.Unmarshal(defaultConfigFile, &config)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "reach-masterdev-", config.FileHandling.FilePattern)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue774(t *testing.T) {
|
||||||
|
type ScpData struct {
|
||||||
|
Host string `json:"host"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GenConfig struct {
|
||||||
|
SCP []ScpData `toml:"scp" comment:"Array of Secure Copy Configurations"`
|
||||||
|
}
|
||||||
|
|
||||||
|
c := &GenConfig{}
|
||||||
|
c.SCP = []ScpData{{Host: "main.domain.com"}}
|
||||||
|
|
||||||
|
b, err := toml.Marshal(c)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
expected := `# Array of Secure Copy Configurations
|
||||||
|
[[scp]]
|
||||||
|
Host = 'main.domain.com'
|
||||||
|
`
|
||||||
|
|
||||||
|
require.Equal(t, expected, string(b))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue799(t *testing.T) {
|
||||||
|
const testTOML = `
|
||||||
|
# notice the double brackets
|
||||||
|
[[test]]
|
||||||
|
answer = 42
|
||||||
|
`
|
||||||
|
|
||||||
|
var s struct {
|
||||||
|
// should be []map[string]int
|
||||||
|
Test map[string]int `toml:"test"`
|
||||||
|
}
|
||||||
|
|
||||||
|
err := toml.Unmarshal([]byte(testTOML), &s)
|
||||||
|
require.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue807(t *testing.T) {
|
||||||
|
type A struct {
|
||||||
|
Name string `toml:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type M struct {
|
||||||
|
*A
|
||||||
|
}
|
||||||
|
|
||||||
|
var m M
|
||||||
|
err := toml.Unmarshal([]byte(`name = 'foo'`), &m)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "foo", m.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue850(t *testing.T) {
|
||||||
|
data := make(map[string]string)
|
||||||
|
err := toml.Unmarshal([]byte("foo = {}"), &data)
|
||||||
|
require.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue851(t *testing.T) {
|
||||||
|
type Target struct {
|
||||||
|
Params map[string]string `toml:"params"`
|
||||||
|
}
|
||||||
|
|
||||||
|
content := "params = {a=\"1\",b=\"2\"}"
|
||||||
|
var target Target
|
||||||
|
err := toml.Unmarshal([]byte(content), &target)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, map[string]string{"a": "1", "b": "2"}, target.Params)
|
||||||
|
err = toml.Unmarshal([]byte(content), &target)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, map[string]string{"a": "1", "b": "2"}, target.Params)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIssue866(t *testing.T) {
|
||||||
|
type Pipeline struct {
|
||||||
|
Mapping map[string]struct {
|
||||||
|
Req [][]string `toml:"req"`
|
||||||
|
Res [][]string `toml:"res"`
|
||||||
|
} `toml:"mapping"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Pipelines struct {
|
||||||
|
PipelineMapping map[string]*Pipeline `toml:"pipelines"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var badToml = `
|
||||||
|
[pipelines.register]
|
||||||
|
mapping.inst.req = [
|
||||||
|
["param1", "value1"],
|
||||||
|
]
|
||||||
|
mapping.inst.res = [
|
||||||
|
["param2", "value2"],
|
||||||
|
]
|
||||||
|
`
|
||||||
|
|
||||||
|
pipelines := new(Pipelines)
|
||||||
|
if err := toml.NewDecoder(bytes.NewBufferString(badToml)).DisallowUnknownFields().Decode(pipelines); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if pipelines.PipelineMapping["register"].Mapping["inst"].Req[0][0] != "param1" {
|
||||||
|
t.Fatal("unmarshal failed with mismatch value")
|
||||||
|
}
|
||||||
|
|
||||||
|
var goodTooToml = `
|
||||||
|
[pipelines.register]
|
||||||
|
mapping.inst.req = [
|
||||||
|
["param1", "value1"],
|
||||||
|
]
|
||||||
|
`
|
||||||
|
|
||||||
|
pipelines = new(Pipelines)
|
||||||
|
if err := toml.NewDecoder(bytes.NewBufferString(goodTooToml)).DisallowUnknownFields().Decode(pipelines); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if pipelines.PipelineMapping["register"].Mapping["inst"].Req[0][0] != "param1" {
|
||||||
|
t.Fatal("unmarshal failed with mismatch value")
|
||||||
|
}
|
||||||
|
|
||||||
|
var goodToml = `
|
||||||
|
[pipelines.register.mapping.inst]
|
||||||
|
req = [
|
||||||
|
["param1", "value1"],
|
||||||
|
]
|
||||||
|
res = [
|
||||||
|
["param2", "value2"],
|
||||||
|
]
|
||||||
|
`
|
||||||
|
|
||||||
|
pipelines = new(Pipelines)
|
||||||
|
if err := toml.NewDecoder(bytes.NewBufferString(goodToml)).DisallowUnknownFields().Decode(pipelines); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if pipelines.PipelineMapping["register"].Mapping["inst"].Req[0][0] != "param1" {
|
||||||
|
t.Fatal("unmarshal failed with mismatch value")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestUnmarshalDecodeErrors(t *testing.T) {
|
func TestUnmarshalDecodeErrors(t *testing.T) {
|
||||||
examples := []struct {
|
examples := []struct {
|
||||||
desc string
|
desc string
|
||||||
@@ -2205,18 +2690,10 @@ func TestUnmarshalDecodeErrors(t *testing.T) {
|
|||||||
desc: "local time with fractional",
|
desc: "local time with fractional",
|
||||||
data: `a = 11:22:33.x`,
|
data: `a = 11:22:33.x`,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
desc: "local time frac precision too large",
|
|
||||||
data: `a = 2021-05-09T11:22:33.99999999999`,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
desc: "wrong time offset separator",
|
desc: "wrong time offset separator",
|
||||||
data: `a = 1979-05-27T00:32:00.-07:00`,
|
data: `a = 1979-05-27T00:32:00.-07:00`,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
desc: "missing fractional with tz",
|
|
||||||
data: `a = 2021-05-09T11:22:33.99999999999`,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
desc: "wrong time offset separator",
|
desc: "wrong time offset separator",
|
||||||
data: `a = 1979-05-27T00:32:00Z07:00`,
|
data: `a = 1979-05-27T00:32:00Z07:00`,
|
||||||
@@ -2226,9 +2703,25 @@ func TestUnmarshalDecodeErrors(t *testing.T) {
|
|||||||
data: `flt8 = 224_617.445_991__228`,
|
data: `flt8 = 224_617.445_991__228`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "float with double _",
|
desc: "float with double .",
|
||||||
data: `flt8 = 1..2`,
|
data: `flt8 = 1..2`,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: "number with plus sign and leading underscore",
|
||||||
|
data: `a = +_0`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "number with negative sign and leading underscore",
|
||||||
|
data: `a = -_0`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "exponent with plus sign and leading underscore",
|
||||||
|
data: `a = 0e+_0`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "exponent with negative sign and leading underscore",
|
||||||
|
data: `a = 0e-_0`,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
desc: "int with wrong base",
|
desc: "int with wrong base",
|
||||||
data: `a = 0f2`,
|
data: `a = 0f2`,
|
||||||
@@ -2336,7 +2829,7 @@ world'`,
|
|||||||
{
|
{
|
||||||
desc: "invalid seconds value",
|
desc: "invalid seconds value",
|
||||||
data: `a=1979-05-27T12:45:99`,
|
data: `a=1979-05-27T12:45:99`,
|
||||||
msg: `seconds cannot be greater 59`,
|
msg: `seconds cannot be greater 60`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: `binary with invalid digit`,
|
desc: `binary with invalid digit`,
|
||||||
@@ -2459,7 +2952,7 @@ world'`,
|
|||||||
data: "a = \"aaaa\xE2\x80\x00\"",
|
data: "a = \"aaaa\xE2\x80\x00\"",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "invalid 4rd byte of 4-byte utf8 character in string with no escape sequence",
|
desc: "invalid 4th byte of 4-byte utf8 character in string with no escape sequence",
|
||||||
data: "a = \"aaaa\xF2\x81\x81\x00\"",
|
data: "a = \"aaaa\xF2\x81\x81\x00\"",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -2475,7 +2968,7 @@ world'`,
|
|||||||
data: "a = 'aaaa\xE2\x80\x00'",
|
data: "a = 'aaaa\xE2\x80\x00'",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "invalid 4rd byte of 4-byte utf8 character in literal string",
|
desc: "invalid 4th byte of 4-byte utf8 character in literal string",
|
||||||
data: "a = 'aaaa\xF2\x81\x81\x00'",
|
data: "a = 'aaaa\xF2\x81\x81\x00'",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -2540,14 +3033,70 @@ world'`,
|
|||||||
desc: `invalid month`,
|
desc: `invalid month`,
|
||||||
data: `a=2021-0--29`,
|
data: `a=2021-0--29`,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: `zero is an invalid day`,
|
||||||
|
data: `a=2021-11-00`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: `zero is an invalid month`,
|
||||||
|
data: `a=2021-00-11`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: `invalid number of seconds digits with trailing digit`,
|
||||||
|
data: `a=0000-01-01 00:00:000000Z3`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: `invalid zone offset hours`,
|
||||||
|
data: `a=0000-01-01 00:00:00+24:00`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: `invalid zone offset minutes`,
|
||||||
|
data: `a=0000-01-01 00:00:00+00:60`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: `invalid character in zone offset hours`,
|
||||||
|
data: `a=0000-01-01 00:00:00+0Z:00`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: `invalid character in zone offset minutes`,
|
||||||
|
data: `a=0000-01-01 00:00:00+00:0Z`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: `invalid number of seconds`,
|
||||||
|
data: `a=0000-01-01 00:00:00+27000`,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
desc: `carriage return inside basic key`,
|
desc: `carriage return inside basic key`,
|
||||||
data: "\"\r\"=42",
|
data: "\"\r\"=42",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: `carriage return inside literal key`,
|
||||||
|
data: "'\r'=42",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
desc: `carriage return inside basic string`,
|
desc: `carriage return inside basic string`,
|
||||||
data: "A = \"\r\"",
|
data: "A = \"\r\"",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
desc: `carriage return inside basic multiline string`,
|
||||||
|
data: "a=\"\"\"\r\"\"\"",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: `carriage return at the trail of basic multiline string`,
|
||||||
|
data: "a=\"\"\"\r",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: `carriage return inside literal string`,
|
||||||
|
data: "A = '\r'",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: `carriage return inside multiline literal string`,
|
||||||
|
data: "a='''\r'''",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: `carriage return at trail of multiline literal string`,
|
||||||
|
data: "a='''\r",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
desc: `carriage return in comment`,
|
desc: `carriage return in comment`,
|
||||||
data: "# this is a test\ra=1",
|
data: "# this is a test\ra=1",
|
||||||
@@ -2578,6 +3127,64 @@ world'`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestOmitEmpty(t *testing.T) {
|
||||||
|
type inner struct {
|
||||||
|
private string
|
||||||
|
Skip string `toml:"-"`
|
||||||
|
V string
|
||||||
|
}
|
||||||
|
|
||||||
|
type elem struct {
|
||||||
|
Foo string `toml:",omitempty"`
|
||||||
|
Bar string `toml:",omitempty"`
|
||||||
|
Inner inner `toml:",omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type doc struct {
|
||||||
|
X []elem `toml:",inline"`
|
||||||
|
}
|
||||||
|
|
||||||
|
d := doc{X: []elem{elem{
|
||||||
|
Foo: "test",
|
||||||
|
Inner: inner{
|
||||||
|
V: "alue",
|
||||||
|
},
|
||||||
|
}}}
|
||||||
|
|
||||||
|
b, err := toml.Marshal(d)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
require.Equal(t, "X = [{Foo = 'test', Inner = {V = 'alue'}}]\n", string(b))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUnmarshalTags(t *testing.T) {
|
||||||
|
type doc struct {
|
||||||
|
Dash string `toml:"-,"`
|
||||||
|
Ignore string `toml:"-"`
|
||||||
|
A string `toml:"hello"`
|
||||||
|
B string `toml:"comma,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
data := `
|
||||||
|
'-' = "dash"
|
||||||
|
Ignore = 'me'
|
||||||
|
hello = 'content'
|
||||||
|
comma = 'ok'
|
||||||
|
`
|
||||||
|
|
||||||
|
d := doc{}
|
||||||
|
expected := doc{
|
||||||
|
Dash: "dash",
|
||||||
|
Ignore: "",
|
||||||
|
A: "content",
|
||||||
|
B: "ok",
|
||||||
|
}
|
||||||
|
|
||||||
|
err := toml.Unmarshal([]byte(data), &d)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, expected, d)
|
||||||
|
}
|
||||||
|
|
||||||
func TestASCIIControlCharacters(t *testing.T) {
|
func TestASCIIControlCharacters(t *testing.T) {
|
||||||
invalidCharacters := []byte{0x7F}
|
invalidCharacters := []byte{0x7F}
|
||||||
for c := byte(0x0); c <= 0x08; c++ {
|
for c := byte(0x0); c <= 0x08; c++ {
|
||||||
@@ -2891,3 +3498,16 @@ func TestUnmarshal_RecursiveTableArray(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestUnmarshalEmbedNonString(t *testing.T) {
|
||||||
|
type Foo []byte
|
||||||
|
type doc struct {
|
||||||
|
Foo
|
||||||
|
}
|
||||||
|
|
||||||
|
d := doc{}
|
||||||
|
|
||||||
|
err := toml.Unmarshal([]byte(`foo = 'bar'`), &d)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Nil(t, d.Foo)
|
||||||
|
}
|
||||||
|
|||||||
+136
@@ -0,0 +1,136 @@
|
|||||||
|
package unstable
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2/internal/danger"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Iterator over a sequence of nodes.
|
||||||
|
//
|
||||||
|
// Starts uninitialized, you need to call Next() first.
|
||||||
|
//
|
||||||
|
// For example:
|
||||||
|
//
|
||||||
|
// it := n.Children()
|
||||||
|
// for it.Next() {
|
||||||
|
// n := it.Node()
|
||||||
|
// // do something with n
|
||||||
|
// }
|
||||||
|
type Iterator struct {
|
||||||
|
started bool
|
||||||
|
node *Node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next moves the iterator forward and returns true if points to a
|
||||||
|
// node, false otherwise.
|
||||||
|
func (c *Iterator) Next() bool {
|
||||||
|
if !c.started {
|
||||||
|
c.started = true
|
||||||
|
} else if c.node.Valid() {
|
||||||
|
c.node = c.node.Next()
|
||||||
|
}
|
||||||
|
return c.node.Valid()
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsLast returns true if the current node of the iterator is the last
|
||||||
|
// one. Subsequent calls to Next() will return false.
|
||||||
|
func (c *Iterator) IsLast() bool {
|
||||||
|
return c.node.next == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Node returns a pointer to the node pointed at by the iterator.
|
||||||
|
func (c *Iterator) Node() *Node {
|
||||||
|
return c.node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Node in a TOML expression AST.
|
||||||
|
//
|
||||||
|
// Depending on Kind, its sequence of children should be interpreted
|
||||||
|
// differently.
|
||||||
|
//
|
||||||
|
// - Array have one child per element in the array.
|
||||||
|
// - InlineTable have one child per key-value in the table (each of kind
|
||||||
|
// InlineTable).
|
||||||
|
// - KeyValue have at least two children. The first one is the value. The rest
|
||||||
|
// make a potentially dotted key.
|
||||||
|
// - Table and ArrayTable's children represent a dotted key (same as
|
||||||
|
// KeyValue, but without the first node being the value).
|
||||||
|
//
|
||||||
|
// When relevant, Raw describes the range of bytes this node is referring to in
|
||||||
|
// the input document. Use Parser.Raw() to retrieve the actual bytes.
|
||||||
|
type Node struct {
|
||||||
|
Kind Kind
|
||||||
|
Raw Range // Raw bytes from the input.
|
||||||
|
Data []byte // Node value (either allocated or referencing the input).
|
||||||
|
|
||||||
|
// References to other nodes, as offsets in the backing array
|
||||||
|
// from this node. References can go backward, so those can be
|
||||||
|
// negative.
|
||||||
|
next int // 0 if last element
|
||||||
|
child int // 0 if no child
|
||||||
|
}
|
||||||
|
|
||||||
|
// Range of bytes in the document.
|
||||||
|
type Range struct {
|
||||||
|
Offset uint32
|
||||||
|
Length uint32
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next returns a pointer to the next node, or nil if there is no next node.
|
||||||
|
func (n *Node) Next() *Node {
|
||||||
|
if n.next == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
ptr := unsafe.Pointer(n)
|
||||||
|
size := unsafe.Sizeof(Node{})
|
||||||
|
return (*Node)(danger.Stride(ptr, size, n.next))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Child returns a pointer to the first child node of this node. Other children
|
||||||
|
// can be accessed calling Next on the first child. Returns an nil if this Node
|
||||||
|
// has no child.
|
||||||
|
func (n *Node) Child() *Node {
|
||||||
|
if n.child == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
ptr := unsafe.Pointer(n)
|
||||||
|
size := unsafe.Sizeof(Node{})
|
||||||
|
return (*Node)(danger.Stride(ptr, size, n.child))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valid returns true if the node's kind is set (not to Invalid).
|
||||||
|
func (n *Node) Valid() bool {
|
||||||
|
return n != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Key returns the children nodes making the Key on a supported node. Panics
|
||||||
|
// otherwise. They are guaranteed to be all be of the Kind Key. A simple key
|
||||||
|
// would return just one element.
|
||||||
|
func (n *Node) Key() Iterator {
|
||||||
|
switch n.Kind {
|
||||||
|
case KeyValue:
|
||||||
|
value := n.Child()
|
||||||
|
if !value.Valid() {
|
||||||
|
panic(fmt.Errorf("KeyValue should have at least two children"))
|
||||||
|
}
|
||||||
|
return Iterator{node: value.Next()}
|
||||||
|
case Table, ArrayTable:
|
||||||
|
return Iterator{node: n.Child()}
|
||||||
|
default:
|
||||||
|
panic(fmt.Errorf("Key() is not supported on a %s", n.Kind))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Value returns a pointer to the value node of a KeyValue.
|
||||||
|
// Guaranteed to be non-nil. Panics if not called on a KeyValue node,
|
||||||
|
// or if the Children are malformed.
|
||||||
|
func (n *Node) Value() *Node {
|
||||||
|
return n.Child()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Children returns an iterator over a node's children.
|
||||||
|
func (n *Node) Children() Iterator {
|
||||||
|
return Iterator{node: n.Child()}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package toml
|
package unstable
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@@ -55,7 +55,7 @@ func BenchmarkParseLiteralStringValid(b *testing.B) {
|
|||||||
|
|
||||||
for name, input := range inputs {
|
for name, input := range inputs {
|
||||||
b.Run(name, func(b *testing.B) {
|
b.Run(name, func(b *testing.B) {
|
||||||
p := parser{}
|
p := Parser{}
|
||||||
b.SetBytes(int64(len(input)))
|
b.SetBytes(int64(len(input)))
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
@@ -0,0 +1,71 @@
|
|||||||
|
package unstable
|
||||||
|
|
||||||
|
// root contains a full AST.
|
||||||
|
//
|
||||||
|
// It is immutable once constructed with Builder.
|
||||||
|
type root struct {
|
||||||
|
nodes []Node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Iterator over the top level nodes.
|
||||||
|
func (r *root) Iterator() Iterator {
|
||||||
|
it := Iterator{}
|
||||||
|
if len(r.nodes) > 0 {
|
||||||
|
it.node = &r.nodes[0]
|
||||||
|
}
|
||||||
|
return it
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *root) at(idx reference) *Node {
|
||||||
|
return &r.nodes[idx]
|
||||||
|
}
|
||||||
|
|
||||||
|
type reference int
|
||||||
|
|
||||||
|
const invalidReference reference = -1
|
||||||
|
|
||||||
|
func (r reference) Valid() bool {
|
||||||
|
return r != invalidReference
|
||||||
|
}
|
||||||
|
|
||||||
|
type builder struct {
|
||||||
|
tree root
|
||||||
|
lastIdx int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) Tree() *root {
|
||||||
|
return &b.tree
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) NodeAt(ref reference) *Node {
|
||||||
|
return b.tree.at(ref)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) Reset() {
|
||||||
|
b.tree.nodes = b.tree.nodes[:0]
|
||||||
|
b.lastIdx = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) Push(n Node) reference {
|
||||||
|
b.lastIdx = len(b.tree.nodes)
|
||||||
|
b.tree.nodes = append(b.tree.nodes, n)
|
||||||
|
return reference(b.lastIdx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) PushAndChain(n Node) reference {
|
||||||
|
newIdx := len(b.tree.nodes)
|
||||||
|
b.tree.nodes = append(b.tree.nodes, n)
|
||||||
|
if b.lastIdx >= 0 {
|
||||||
|
b.tree.nodes[b.lastIdx].next = newIdx - b.lastIdx
|
||||||
|
}
|
||||||
|
b.lastIdx = newIdx
|
||||||
|
return reference(b.lastIdx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) AttachChild(parent reference, child reference) {
|
||||||
|
b.tree.nodes[parent].child = int(child) - int(parent)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) Chain(from reference, to reference) {
|
||||||
|
b.tree.nodes[from].next = int(to) - int(from)
|
||||||
|
}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// Package unstable provides APIs that do not meet the backward compatibility
|
||||||
|
// guarantees yet.
|
||||||
|
package unstable
|
||||||
@@ -1,25 +1,26 @@
|
|||||||
package ast
|
package unstable
|
||||||
|
|
||||||
import "fmt"
|
import "fmt"
|
||||||
|
|
||||||
|
// Kind represents the type of TOML structure contained in a given Node.
|
||||||
type Kind int
|
type Kind int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// meta
|
// Meta
|
||||||
Invalid Kind = iota
|
Invalid Kind = iota
|
||||||
Comment
|
Comment
|
||||||
Key
|
Key
|
||||||
|
|
||||||
// top level structures
|
// Top level structures
|
||||||
Table
|
Table
|
||||||
ArrayTable
|
ArrayTable
|
||||||
KeyValue
|
KeyValue
|
||||||
|
|
||||||
// containers values
|
// Containers values
|
||||||
Array
|
Array
|
||||||
InlineTable
|
InlineTable
|
||||||
|
|
||||||
// values
|
// Values
|
||||||
String
|
String
|
||||||
Bool
|
Bool
|
||||||
Float
|
Float
|
||||||
@@ -30,6 +31,7 @@ const (
|
|||||||
DateTime
|
DateTime
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// String implementation of fmt.Stringer.
|
||||||
func (k Kind) String() string {
|
func (k Kind) String() string {
|
||||||
switch k {
|
switch k {
|
||||||
case Invalid:
|
case Invalid:
|
||||||
+295
-143
@@ -1,50 +1,108 @@
|
|||||||
package toml
|
package unstable
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"fmt"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
"github.com/pelletier/go-toml/v2/internal/characters"
|
||||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
"github.com/pelletier/go-toml/v2/internal/danger"
|
||||||
)
|
)
|
||||||
|
|
||||||
type parser struct {
|
// ParserError describes an error relative to the content of the document.
|
||||||
builder ast.Builder
|
//
|
||||||
ref ast.Reference
|
// It cannot outlive the instance of Parser it refers to, and may cause panics
|
||||||
|
// if the parser is reset.
|
||||||
|
type ParserError struct {
|
||||||
|
Highlight []byte
|
||||||
|
Message string
|
||||||
|
Key []string // optional
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error is the implementation of the error interface.
|
||||||
|
func (e *ParserError) Error() string {
|
||||||
|
return e.Message
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewParserError is a convenience function to create a ParserError
|
||||||
|
//
|
||||||
|
// Warning: Highlight needs to be a subslice of Parser.data, so only slices
|
||||||
|
// returned by Parser.Raw are valid candidates.
|
||||||
|
func NewParserError(highlight []byte, format string, args ...interface{}) error {
|
||||||
|
return &ParserError{
|
||||||
|
Highlight: highlight,
|
||||||
|
Message: fmt.Errorf(format, args...).Error(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parser scans over a TOML-encoded document and generates an iterative AST.
|
||||||
|
//
|
||||||
|
// To prime the Parser, first reset it with the contents of a TOML document.
|
||||||
|
// Then, process all top-level expressions sequentially. See Example.
|
||||||
|
//
|
||||||
|
// Don't forget to check Error() after you're done parsing.
|
||||||
|
//
|
||||||
|
// Each top-level expression needs to be fully processed before calling
|
||||||
|
// NextExpression() again. Otherwise, calls to various Node methods may panic if
|
||||||
|
// the parser has moved on the next expression.
|
||||||
|
//
|
||||||
|
// For performance reasons, go-toml doesn't make a copy of the input bytes to
|
||||||
|
// the parser. Make sure to copy all the bytes you need to outlive the slice
|
||||||
|
// given to the parser.
|
||||||
|
type Parser struct {
|
||||||
data []byte
|
data []byte
|
||||||
|
builder builder
|
||||||
|
ref reference
|
||||||
left []byte
|
left []byte
|
||||||
err error
|
err error
|
||||||
first bool
|
first bool
|
||||||
|
|
||||||
|
KeepComments bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) Range(b []byte) ast.Range {
|
// Data returns the slice provided to the last call to Reset.
|
||||||
return ast.Range{
|
func (p *Parser) Data() []byte {
|
||||||
|
return p.data
|
||||||
|
}
|
||||||
|
|
||||||
|
// Range returns a range description that corresponds to a given slice of the
|
||||||
|
// input. If the argument is not a subslice of the parser input, this function
|
||||||
|
// panics.
|
||||||
|
func (p *Parser) Range(b []byte) Range {
|
||||||
|
return Range{
|
||||||
Offset: uint32(danger.SubsliceOffset(p.data, b)),
|
Offset: uint32(danger.SubsliceOffset(p.data, b)),
|
||||||
Length: uint32(len(b)),
|
Length: uint32(len(b)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) Raw(raw ast.Range) []byte {
|
// Raw returns the slice corresponding to the bytes in the given range.
|
||||||
|
func (p *Parser) Raw(raw Range) []byte {
|
||||||
return p.data[raw.Offset : raw.Offset+raw.Length]
|
return p.data[raw.Offset : raw.Offset+raw.Length]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) Reset(b []byte) {
|
// Reset brings the parser to its initial state for a given input. It wipes an
|
||||||
|
// reuses internal storage to reduce allocation.
|
||||||
|
func (p *Parser) Reset(b []byte) {
|
||||||
p.builder.Reset()
|
p.builder.Reset()
|
||||||
p.ref = ast.InvalidReference
|
p.ref = invalidReference
|
||||||
p.data = b
|
p.data = b
|
||||||
p.left = b
|
p.left = b
|
||||||
p.err = nil
|
p.err = nil
|
||||||
p.first = true
|
p.first = true
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop
|
// NextExpression parses the next top-level expression. If an expression was
|
||||||
func (p *parser) NextExpression() bool {
|
// successfully parsed, it returns true. If the parser is at the end of the
|
||||||
|
// document or an error occurred, it returns false.
|
||||||
|
//
|
||||||
|
// Retrieve the parsed expression with Expression().
|
||||||
|
func (p *Parser) NextExpression() bool {
|
||||||
if len(p.left) == 0 || p.err != nil {
|
if len(p.left) == 0 || p.err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
p.builder.Reset()
|
p.builder.Reset()
|
||||||
p.ref = ast.InvalidReference
|
p.ref = invalidReference
|
||||||
|
|
||||||
for {
|
for {
|
||||||
if len(p.left) == 0 || p.err != nil {
|
if len(p.left) == 0 || p.err != nil {
|
||||||
@@ -73,15 +131,56 @@ func (p *parser) NextExpression() bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) Expression() *ast.Node {
|
// Expression returns a pointer to the node representing the last successfully
|
||||||
|
// parsed expression.
|
||||||
|
func (p *Parser) Expression() *Node {
|
||||||
return p.builder.NodeAt(p.ref)
|
return p.builder.NodeAt(p.ref)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) Error() error {
|
// Error returns any error that has occurred during parsing.
|
||||||
|
func (p *Parser) Error() error {
|
||||||
return p.err
|
return p.err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseNewline(b []byte) ([]byte, error) {
|
// Position describes a position in the input.
|
||||||
|
type Position struct {
|
||||||
|
// Number of bytes from the beginning of the input.
|
||||||
|
Offset int
|
||||||
|
// Line number, starting at 1.
|
||||||
|
Line int
|
||||||
|
// Column number, starting at 1.
|
||||||
|
Column int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Shape describes the position of a range in the input.
|
||||||
|
type Shape struct {
|
||||||
|
Start Position
|
||||||
|
End Position
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) position(b []byte) Position {
|
||||||
|
offset := danger.SubsliceOffset(p.data, b)
|
||||||
|
|
||||||
|
lead := p.data[:offset]
|
||||||
|
|
||||||
|
return Position{
|
||||||
|
Offset: offset,
|
||||||
|
Line: bytes.Count(lead, []byte{'\n'}) + 1,
|
||||||
|
Column: len(lead) - bytes.LastIndex(lead, []byte{'\n'}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Shape returns the shape of the given range in the input. Will
|
||||||
|
// panic if the range is not a subslice of the input.
|
||||||
|
func (p *Parser) Shape(r Range) Shape {
|
||||||
|
raw := p.Raw(r)
|
||||||
|
return Shape{
|
||||||
|
Start: p.position(raw),
|
||||||
|
End: p.position(raw[r.Length:]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseNewline(b []byte) ([]byte, error) {
|
||||||
if b[0] == '\n' {
|
if b[0] == '\n' {
|
||||||
return b[1:], nil
|
return b[1:], nil
|
||||||
}
|
}
|
||||||
@@ -91,14 +190,27 @@ func (p *parser) parseNewline(b []byte) ([]byte, error) {
|
|||||||
return rest, err
|
return rest, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, newDecodeError(b[0:1], "expected newline but got %#U", b[0])
|
return nil, NewParserError(b[0:1], "expected newline but got %#U", b[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseExpression(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseComment(b []byte) (reference, []byte, error) {
|
||||||
|
ref := invalidReference
|
||||||
|
data, rest, err := scanComment(b)
|
||||||
|
if p.KeepComments && err == nil {
|
||||||
|
ref = p.builder.Push(Node{
|
||||||
|
Kind: Comment,
|
||||||
|
Raw: p.Range(data),
|
||||||
|
Data: data,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return ref, rest, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseExpression(b []byte) (reference, []byte, error) {
|
||||||
// expression = ws [ comment ]
|
// expression = ws [ comment ]
|
||||||
// expression =/ ws keyval ws [ comment ]
|
// expression =/ ws keyval ws [ comment ]
|
||||||
// expression =/ ws table ws [ comment ]
|
// expression =/ ws table ws [ comment ]
|
||||||
ref := ast.InvalidReference
|
ref := invalidReference
|
||||||
|
|
||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
|
|
||||||
@@ -107,7 +219,7 @@ func (p *parser) parseExpression(b []byte) (ast.Reference, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if b[0] == '#' {
|
if b[0] == '#' {
|
||||||
_, rest, err := scanComment(b)
|
ref, rest, err := p.parseComment(b)
|
||||||
return ref, rest, err
|
return ref, rest, err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -129,14 +241,17 @@ func (p *parser) parseExpression(b []byte) (ast.Reference, []byte, error) {
|
|||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
|
|
||||||
if len(b) > 0 && b[0] == '#' {
|
if len(b) > 0 && b[0] == '#' {
|
||||||
_, rest, err := scanComment(b)
|
cref, rest, err := p.parseComment(b)
|
||||||
|
if cref != invalidReference {
|
||||||
|
p.builder.Chain(ref, cref)
|
||||||
|
}
|
||||||
return ref, rest, err
|
return ref, rest, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return ref, b, nil
|
return ref, b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseTable(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseTable(b []byte) (reference, []byte, error) {
|
||||||
// table = std-table / array-table
|
// table = std-table / array-table
|
||||||
if len(b) > 1 && b[1] == '[' {
|
if len(b) > 1 && b[1] == '[' {
|
||||||
return p.parseArrayTable(b)
|
return p.parseArrayTable(b)
|
||||||
@@ -145,12 +260,12 @@ func (p *parser) parseTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return p.parseStdTable(b)
|
return p.parseStdTable(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseArrayTable(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseArrayTable(b []byte) (reference, []byte, error) {
|
||||||
// array-table = array-table-open key array-table-close
|
// array-table = array-table-open key array-table-close
|
||||||
// array-table-open = %x5B.5B ws ; [[ Double left square bracket
|
// array-table-open = %x5B.5B ws ; [[ Double left square bracket
|
||||||
// array-table-close = ws %x5D.5D ; ]] Double right square bracket
|
// array-table-close = ws %x5D.5D ; ]] Double right square bracket
|
||||||
ref := p.builder.Push(ast.Node{
|
ref := p.builder.Push(Node{
|
||||||
Kind: ast.ArrayTable,
|
Kind: ArrayTable,
|
||||||
})
|
})
|
||||||
|
|
||||||
b = b[2:]
|
b = b[2:]
|
||||||
@@ -174,12 +289,12 @@ func (p *parser) parseArrayTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return ref, b, err
|
return ref, b, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseStdTable(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseStdTable(b []byte) (reference, []byte, error) {
|
||||||
// std-table = std-table-open key std-table-close
|
// std-table = std-table-open key std-table-close
|
||||||
// std-table-open = %x5B ws ; [ Left square bracket
|
// std-table-open = %x5B ws ; [ Left square bracket
|
||||||
// std-table-close = ws %x5D ; ] Right square bracket
|
// std-table-close = ws %x5D ; ] Right square bracket
|
||||||
ref := p.builder.Push(ast.Node{
|
ref := p.builder.Push(Node{
|
||||||
Kind: ast.Table,
|
Kind: Table,
|
||||||
})
|
})
|
||||||
|
|
||||||
b = b[1:]
|
b = b[1:]
|
||||||
@@ -199,15 +314,15 @@ func (p *parser) parseStdTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return ref, b, err
|
return ref, b, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseKeyval(b []byte) (reference, []byte, error) {
|
||||||
// keyval = key keyval-sep val
|
// keyval = key keyval-sep val
|
||||||
ref := p.builder.Push(ast.Node{
|
ref := p.builder.Push(Node{
|
||||||
Kind: ast.KeyValue,
|
Kind: KeyValue,
|
||||||
})
|
})
|
||||||
|
|
||||||
key, b, err := p.parseKey(b)
|
key, b, err := p.parseKey(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ast.InvalidReference, nil, err
|
return invalidReference, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// keyval-sep = ws %x3D ws ; =
|
// keyval-sep = ws %x3D ws ; =
|
||||||
@@ -215,12 +330,12 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
|
|||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
|
|
||||||
if len(b) == 0 {
|
if len(b) == 0 {
|
||||||
return ast.InvalidReference, nil, newDecodeError(b, "expected = after a key, but the document ends there")
|
return invalidReference, nil, NewParserError(b, "expected = after a key, but the document ends there")
|
||||||
}
|
}
|
||||||
|
|
||||||
b, err = expect('=', b)
|
b, err = expect('=', b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ast.InvalidReference, nil, err
|
return invalidReference, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
@@ -237,12 +352,12 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop,funlen
|
//nolint:cyclop,funlen
|
||||||
func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseVal(b []byte) (reference, []byte, error) {
|
||||||
// val = string / boolean / array / inline-table / date-time / float / integer
|
// val = string / boolean / array / inline-table / date-time / float / integer
|
||||||
ref := ast.InvalidReference
|
ref := invalidReference
|
||||||
|
|
||||||
if len(b) == 0 {
|
if len(b) == 0 {
|
||||||
return ref, nil, newDecodeError(b, "expected value, not eof")
|
return ref, nil, NewParserError(b, "expected value, not eof")
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
@@ -259,8 +374,8 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
ref = p.builder.Push(ast.Node{
|
ref = p.builder.Push(Node{
|
||||||
Kind: ast.String,
|
Kind: String,
|
||||||
Raw: p.Range(raw),
|
Raw: p.Range(raw),
|
||||||
Data: v,
|
Data: v,
|
||||||
})
|
})
|
||||||
@@ -277,8 +392,8 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
ref = p.builder.Push(ast.Node{
|
ref = p.builder.Push(Node{
|
||||||
Kind: ast.String,
|
Kind: String,
|
||||||
Raw: p.Range(raw),
|
Raw: p.Range(raw),
|
||||||
Data: v,
|
Data: v,
|
||||||
})
|
})
|
||||||
@@ -287,22 +402,22 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return ref, b, err
|
return ref, b, err
|
||||||
case 't':
|
case 't':
|
||||||
if !scanFollowsTrue(b) {
|
if !scanFollowsTrue(b) {
|
||||||
return ref, nil, newDecodeError(atmost(b, 4), "expected 'true'")
|
return ref, nil, NewParserError(atmost(b, 4), "expected 'true'")
|
||||||
}
|
}
|
||||||
|
|
||||||
ref = p.builder.Push(ast.Node{
|
ref = p.builder.Push(Node{
|
||||||
Kind: ast.Bool,
|
Kind: Bool,
|
||||||
Data: b[:4],
|
Data: b[:4],
|
||||||
})
|
})
|
||||||
|
|
||||||
return ref, b[4:], nil
|
return ref, b[4:], nil
|
||||||
case 'f':
|
case 'f':
|
||||||
if !scanFollowsFalse(b) {
|
if !scanFollowsFalse(b) {
|
||||||
return ref, nil, newDecodeError(atmost(b, 5), "expected 'false'")
|
return ref, nil, NewParserError(atmost(b, 5), "expected 'false'")
|
||||||
}
|
}
|
||||||
|
|
||||||
ref = p.builder.Push(ast.Node{
|
ref = p.builder.Push(Node{
|
||||||
Kind: ast.Bool,
|
Kind: Bool,
|
||||||
Data: b[:5],
|
Data: b[:5],
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -324,7 +439,7 @@ func atmost(b []byte, n int) []byte {
|
|||||||
return b[:n]
|
return b[:n]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
func (p *Parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
||||||
v, rest, err := scanLiteralString(b)
|
v, rest, err := scanLiteralString(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
@@ -333,19 +448,20 @@ func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
|||||||
return v, v[1 : len(v)-1], rest, nil
|
return v, v[1 : len(v)-1], rest, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseInlineTable(b []byte) (reference, []byte, error) {
|
||||||
// inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close
|
// inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close
|
||||||
// inline-table-open = %x7B ws ; {
|
// inline-table-open = %x7B ws ; {
|
||||||
// inline-table-close = ws %x7D ; }
|
// inline-table-close = ws %x7D ; }
|
||||||
// inline-table-sep = ws %x2C ws ; , Comma
|
// inline-table-sep = ws %x2C ws ; , Comma
|
||||||
// inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ]
|
// inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ]
|
||||||
parent := p.builder.Push(ast.Node{
|
parent := p.builder.Push(Node{
|
||||||
Kind: ast.InlineTable,
|
Kind: InlineTable,
|
||||||
|
Raw: p.Range(b[:1]),
|
||||||
})
|
})
|
||||||
|
|
||||||
first := true
|
first := true
|
||||||
|
|
||||||
var child ast.Reference
|
var child reference
|
||||||
|
|
||||||
b = b[1:]
|
b = b[1:]
|
||||||
|
|
||||||
@@ -356,7 +472,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
|
|
||||||
if len(b) == 0 {
|
if len(b) == 0 {
|
||||||
return parent, nil, newDecodeError(previousB[:1], "inline table is incomplete")
|
return parent, nil, NewParserError(previousB[:1], "inline table is incomplete")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[0] == '}' {
|
if b[0] == '}' {
|
||||||
@@ -371,7 +487,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
var kv ast.Reference
|
var kv reference
|
||||||
|
|
||||||
kv, b, err = p.parseKeyval(b)
|
kv, b, err = p.parseKeyval(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -394,7 +510,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint:funlen,cyclop
|
//nolint:funlen,cyclop
|
||||||
func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseValArray(b []byte) (reference, []byte, error) {
|
||||||
// array = array-open [ array-values ] ws-comment-newline array-close
|
// array = array-open [ array-values ] ws-comment-newline array-close
|
||||||
// array-open = %x5B ; [
|
// array-open = %x5B ; [
|
||||||
// array-close = %x5D ; ]
|
// array-close = %x5D ; ]
|
||||||
@@ -405,23 +521,39 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||||||
arrayStart := b
|
arrayStart := b
|
||||||
b = b[1:]
|
b = b[1:]
|
||||||
|
|
||||||
parent := p.builder.Push(ast.Node{
|
parent := p.builder.Push(Node{
|
||||||
Kind: ast.Array,
|
Kind: Array,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// First indicates whether the parser is looking for the first element
|
||||||
|
// (non-comment) of the array.
|
||||||
first := true
|
first := true
|
||||||
|
|
||||||
var lastChild ast.Reference
|
lastChild := invalidReference
|
||||||
|
|
||||||
|
addChild := func(valueRef reference) {
|
||||||
|
if lastChild == invalidReference {
|
||||||
|
p.builder.AttachChild(parent, valueRef)
|
||||||
|
} else {
|
||||||
|
p.builder.Chain(lastChild, valueRef)
|
||||||
|
}
|
||||||
|
lastChild = valueRef
|
||||||
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
for len(b) > 0 {
|
for len(b) > 0 {
|
||||||
b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
cref := invalidReference
|
||||||
|
cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return parent, nil, err
|
return parent, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if cref != invalidReference {
|
||||||
|
addChild(cref)
|
||||||
|
}
|
||||||
|
|
||||||
if len(b) == 0 {
|
if len(b) == 0 {
|
||||||
return parent, nil, newDecodeError(arrayStart[:1], "array is incomplete")
|
return parent, nil, NewParserError(arrayStart[:1], "array is incomplete")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[0] == ']' {
|
if b[0] == ']' {
|
||||||
@@ -430,16 +562,19 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||||||
|
|
||||||
if b[0] == ',' {
|
if b[0] == ',' {
|
||||||
if first {
|
if first {
|
||||||
return parent, nil, newDecodeError(b[0:1], "array cannot start with comma")
|
return parent, nil, NewParserError(b[0:1], "array cannot start with comma")
|
||||||
}
|
}
|
||||||
b = b[1:]
|
b = b[1:]
|
||||||
|
|
||||||
b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return parent, nil, err
|
return parent, nil, err
|
||||||
}
|
}
|
||||||
|
if cref != invalidReference {
|
||||||
|
addChild(cref)
|
||||||
|
}
|
||||||
} else if !first {
|
} else if !first {
|
||||||
return parent, nil, newDecodeError(b[0:1], "array elements must be separated by commas")
|
return parent, nil, NewParserError(b[0:1], "array elements must be separated by commas")
|
||||||
}
|
}
|
||||||
|
|
||||||
// TOML allows trailing commas in arrays.
|
// TOML allows trailing commas in arrays.
|
||||||
@@ -447,23 +582,22 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
var valueRef ast.Reference
|
var valueRef reference
|
||||||
valueRef, b, err = p.parseVal(b)
|
valueRef, b, err = p.parseVal(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return parent, nil, err
|
return parent, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if first {
|
addChild(valueRef)
|
||||||
p.builder.AttachChild(parent, valueRef)
|
|
||||||
} else {
|
|
||||||
p.builder.Chain(lastChild, valueRef)
|
|
||||||
}
|
|
||||||
lastChild = valueRef
|
|
||||||
|
|
||||||
b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return parent, nil, err
|
return parent, nil, err
|
||||||
}
|
}
|
||||||
|
if cref != invalidReference {
|
||||||
|
addChild(cref)
|
||||||
|
}
|
||||||
|
|
||||||
first = false
|
first = false
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -472,15 +606,34 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return parent, rest, err
|
return parent, rest, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error) {
|
func (p *Parser) parseOptionalWhitespaceCommentNewline(b []byte) (reference, []byte, error) {
|
||||||
|
rootCommentRef := invalidReference
|
||||||
|
latestCommentRef := invalidReference
|
||||||
|
|
||||||
|
addComment := func(ref reference) {
|
||||||
|
if rootCommentRef == invalidReference {
|
||||||
|
rootCommentRef = ref
|
||||||
|
} else if latestCommentRef == invalidReference {
|
||||||
|
p.builder.AttachChild(rootCommentRef, ref)
|
||||||
|
latestCommentRef = ref
|
||||||
|
} else {
|
||||||
|
p.builder.Chain(latestCommentRef, ref)
|
||||||
|
latestCommentRef = ref
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for len(b) > 0 {
|
for len(b) > 0 {
|
||||||
var err error
|
var err error
|
||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
|
|
||||||
if len(b) > 0 && b[0] == '#' {
|
if len(b) > 0 && b[0] == '#' {
|
||||||
_, b, err = scanComment(b)
|
var ref reference
|
||||||
|
ref, b, err = p.parseComment(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return invalidReference, nil, err
|
||||||
|
}
|
||||||
|
if ref != invalidReference {
|
||||||
|
addComment(ref)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -491,17 +644,17 @@ func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error)
|
|||||||
if b[0] == '\n' || b[0] == '\r' {
|
if b[0] == '\n' || b[0] == '\r' {
|
||||||
b, err = p.parseNewline(b)
|
b, err = p.parseNewline(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return invalidReference, nil, err
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return b, nil
|
return rootCommentRef, b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
func (p *Parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
||||||
token, rest, err := scanMultilineLiteralString(b)
|
token, rest, err := scanMultilineLiteralString(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
@@ -520,7 +673,7 @@ func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte,
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint:funlen,gocognit,cyclop
|
//nolint:funlen,gocognit,cyclop
|
||||||
func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
func (p *Parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
||||||
// ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
|
// ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
|
||||||
// ml-basic-string-delim
|
// ml-basic-string-delim
|
||||||
// ml-basic-string-delim = 3quotation-mark
|
// ml-basic-string-delim = 3quotation-mark
|
||||||
@@ -551,11 +704,11 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||||||
|
|
||||||
if !escaped {
|
if !escaped {
|
||||||
str := token[startIdx:endIdx]
|
str := token[startIdx:endIdx]
|
||||||
verr := utf8TomlValidAlreadyEscaped(str)
|
verr := characters.Utf8TomlValidAlreadyEscaped(str)
|
||||||
if verr.Zero() {
|
if verr.Zero() {
|
||||||
return token, str, rest, nil
|
return token, str, rest, nil
|
||||||
}
|
}
|
||||||
return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
|
return nil, nil, nil, NewParserError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
|
||||||
}
|
}
|
||||||
|
|
||||||
var builder bytes.Buffer
|
var builder bytes.Buffer
|
||||||
@@ -578,6 +731,10 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||||||
switch token[i+j] {
|
switch token[i+j] {
|
||||||
case ' ', '\t':
|
case ' ', '\t':
|
||||||
continue
|
continue
|
||||||
|
case '\r':
|
||||||
|
if token[i+j+1] == '\n' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
case '\n':
|
case '\n':
|
||||||
isLastNonWhitespaceOnLine = true
|
isLastNonWhitespaceOnLine = true
|
||||||
}
|
}
|
||||||
@@ -613,6 +770,8 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||||||
builder.WriteByte('\r')
|
builder.WriteByte('\r')
|
||||||
case 't':
|
case 't':
|
||||||
builder.WriteByte('\t')
|
builder.WriteByte('\t')
|
||||||
|
case 'e':
|
||||||
|
builder.WriteByte(0x1B)
|
||||||
case 'u':
|
case 'u':
|
||||||
x, err := hexToRune(atmost(token[i+1:], 4), 4)
|
x, err := hexToRune(atmost(token[i+1:], 4), 4)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -629,13 +788,13 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||||||
builder.WriteRune(x)
|
builder.WriteRune(x)
|
||||||
i += 8
|
i += 8
|
||||||
default:
|
default:
|
||||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
|
return nil, nil, nil, NewParserError(token[i:i+1], "invalid escaped character %#U", c)
|
||||||
}
|
}
|
||||||
i++
|
i++
|
||||||
} else {
|
} else {
|
||||||
size := utf8ValidNext(token[i:])
|
size := characters.Utf8ValidNext(token[i:])
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c)
|
return nil, nil, nil, NewParserError(token[i:i+1], "invalid character %#U", c)
|
||||||
}
|
}
|
||||||
builder.Write(token[i : i+size])
|
builder.Write(token[i : i+size])
|
||||||
i += size
|
i += size
|
||||||
@@ -645,7 +804,7 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||||||
return token, builder.Bytes(), rest, nil
|
return token, builder.Bytes(), rest, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseKey(b []byte) (reference, []byte, error) {
|
||||||
// key = simple-key / dotted-key
|
// key = simple-key / dotted-key
|
||||||
// simple-key = quoted-key / unquoted-key
|
// simple-key = quoted-key / unquoted-key
|
||||||
//
|
//
|
||||||
@@ -656,11 +815,11 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
|||||||
// dot-sep = ws %x2E ws ; . Period
|
// dot-sep = ws %x2E ws ; . Period
|
||||||
raw, key, b, err := p.parseSimpleKey(b)
|
raw, key, b, err := p.parseSimpleKey(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ast.InvalidReference, nil, err
|
return invalidReference, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
ref := p.builder.Push(ast.Node{
|
ref := p.builder.Push(Node{
|
||||||
Kind: ast.Key,
|
Kind: Key,
|
||||||
Raw: p.Range(raw),
|
Raw: p.Range(raw),
|
||||||
Data: key,
|
Data: key,
|
||||||
})
|
})
|
||||||
@@ -675,8 +834,8 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return ref, nil, err
|
return ref, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
p.builder.PushAndChain(ast.Node{
|
p.builder.PushAndChain(Node{
|
||||||
Kind: ast.Key,
|
Kind: Key,
|
||||||
Raw: p.Range(raw),
|
Raw: p.Range(raw),
|
||||||
Data: key,
|
Data: key,
|
||||||
})
|
})
|
||||||
@@ -688,7 +847,11 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return ref, b, nil
|
return ref, b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
|
func (p *Parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
|
||||||
|
if len(b) == 0 {
|
||||||
|
return nil, nil, nil, NewParserError(b, "expected key but found none")
|
||||||
|
}
|
||||||
|
|
||||||
// simple-key = quoted-key / unquoted-key
|
// simple-key = quoted-key / unquoted-key
|
||||||
// unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
|
// unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
|
||||||
// quoted-key = basic-string / literal-string
|
// quoted-key = basic-string / literal-string
|
||||||
@@ -701,12 +864,12 @@ func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
|
|||||||
key, rest = scanUnquotedKey(b)
|
key, rest = scanUnquotedKey(b)
|
||||||
return key, key, rest, nil
|
return key, key, rest, nil
|
||||||
default:
|
default:
|
||||||
return nil, nil, nil, newDecodeError(b[0:1], "invalid character at start of key: %c", b[0])
|
return nil, nil, nil, NewParserError(b[0:1], "invalid character at start of key: %c", b[0])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:funlen,cyclop
|
//nolint:funlen,cyclop
|
||||||
func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
func (p *Parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
||||||
// basic-string = quotation-mark *basic-char quotation-mark
|
// basic-string = quotation-mark *basic-char quotation-mark
|
||||||
// quotation-mark = %x22 ; "
|
// quotation-mark = %x22 ; "
|
||||||
// basic-char = basic-unescaped / escaped
|
// basic-char = basic-unescaped / escaped
|
||||||
@@ -734,11 +897,11 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
|||||||
// validate the string and return a direct reference to the buffer.
|
// validate the string and return a direct reference to the buffer.
|
||||||
if !escaped {
|
if !escaped {
|
||||||
str := token[startIdx:endIdx]
|
str := token[startIdx:endIdx]
|
||||||
verr := utf8TomlValidAlreadyEscaped(str)
|
verr := characters.Utf8TomlValidAlreadyEscaped(str)
|
||||||
if verr.Zero() {
|
if verr.Zero() {
|
||||||
return token, str, rest, nil
|
return token, str, rest, nil
|
||||||
}
|
}
|
||||||
return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
|
return nil, nil, nil, NewParserError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
|
||||||
}
|
}
|
||||||
|
|
||||||
i := startIdx
|
i := startIdx
|
||||||
@@ -766,6 +929,8 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
|||||||
builder.WriteByte('\r')
|
builder.WriteByte('\r')
|
||||||
case 't':
|
case 't':
|
||||||
builder.WriteByte('\t')
|
builder.WriteByte('\t')
|
||||||
|
case 'e':
|
||||||
|
builder.WriteByte(0x1B)
|
||||||
case 'u':
|
case 'u':
|
||||||
x, err := hexToRune(token[i+1:len(token)-1], 4)
|
x, err := hexToRune(token[i+1:len(token)-1], 4)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -783,13 +948,13 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
|||||||
builder.WriteRune(x)
|
builder.WriteRune(x)
|
||||||
i += 8
|
i += 8
|
||||||
default:
|
default:
|
||||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
|
return nil, nil, nil, NewParserError(token[i:i+1], "invalid escaped character %#U", c)
|
||||||
}
|
}
|
||||||
i++
|
i++
|
||||||
} else {
|
} else {
|
||||||
size := utf8ValidNext(token[i:])
|
size := characters.Utf8ValidNext(token[i:])
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c)
|
return nil, nil, nil, NewParserError(token[i:i+1], "invalid character %#U", c)
|
||||||
}
|
}
|
||||||
builder.Write(token[i : i+size])
|
builder.Write(token[i : i+size])
|
||||||
i += size
|
i += size
|
||||||
@@ -801,7 +966,7 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
|||||||
|
|
||||||
func hexToRune(b []byte, length int) (rune, error) {
|
func hexToRune(b []byte, length int) (rune, error) {
|
||||||
if len(b) < length {
|
if len(b) < length {
|
||||||
return -1, newDecodeError(b, "unicode point needs %d character, not %d", length, len(b))
|
return -1, NewParserError(b, "unicode point needs %d character, not %d", length, len(b))
|
||||||
}
|
}
|
||||||
b = b[:length]
|
b = b[:length]
|
||||||
|
|
||||||
@@ -816,19 +981,19 @@ func hexToRune(b []byte, length int) (rune, error) {
|
|||||||
case 'A' <= c && c <= 'F':
|
case 'A' <= c && c <= 'F':
|
||||||
d = uint32(c - 'A' + 10)
|
d = uint32(c - 'A' + 10)
|
||||||
default:
|
default:
|
||||||
return -1, newDecodeError(b[i:i+1], "non-hex character")
|
return -1, NewParserError(b[i:i+1], "non-hex character")
|
||||||
}
|
}
|
||||||
r = r*16 + d
|
r = r*16 + d
|
||||||
}
|
}
|
||||||
|
|
||||||
if r > unicode.MaxRune || 0xD800 <= r && r < 0xE000 {
|
if r > unicode.MaxRune || 0xD800 <= r && r < 0xE000 {
|
||||||
return -1, newDecodeError(b, "escape sequence is invalid Unicode code point")
|
return -1, NewParserError(b, "escape sequence is invalid Unicode code point")
|
||||||
}
|
}
|
||||||
|
|
||||||
return rune(r), nil
|
return rune(r), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseWhitespace(b []byte) []byte {
|
func (p *Parser) parseWhitespace(b []byte) []byte {
|
||||||
// ws = *wschar
|
// ws = *wschar
|
||||||
// wschar = %x20 ; Space
|
// wschar = %x20 ; Space
|
||||||
// wschar =/ %x09 ; Horizontal tab
|
// wschar =/ %x09 ; Horizontal tab
|
||||||
@@ -838,31 +1003,30 @@ func (p *parser) parseWhitespace(b []byte) []byte {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop
|
//nolint:cyclop
|
||||||
func (p *parser) parseIntOrFloatOrDateTime(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseIntOrFloatOrDateTime(b []byte) (reference, []byte, error) {
|
||||||
switch b[0] {
|
switch b[0] {
|
||||||
case 'i':
|
case 'i':
|
||||||
if !scanFollowsInf(b) {
|
if !scanFollowsInf(b) {
|
||||||
return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'inf'")
|
return invalidReference, nil, NewParserError(atmost(b, 3), "expected 'inf'")
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: ast.Float,
|
Kind: Float,
|
||||||
Data: b[:3],
|
Data: b[:3],
|
||||||
}), b[3:], nil
|
}), b[3:], nil
|
||||||
case 'n':
|
case 'n':
|
||||||
if !scanFollowsNan(b) {
|
if !scanFollowsNan(b) {
|
||||||
return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'nan'")
|
return invalidReference, nil, NewParserError(atmost(b, 3), "expected 'nan'")
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: ast.Float,
|
Kind: Float,
|
||||||
Data: b[:3],
|
Data: b[:3],
|
||||||
}), b[3:], nil
|
}), b[3:], nil
|
||||||
case '+', '-':
|
case '+', '-':
|
||||||
return p.scanIntOrFloat(b)
|
return p.scanIntOrFloat(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:gomnd
|
|
||||||
if len(b) < 3 {
|
if len(b) < 3 {
|
||||||
return p.scanIntOrFloat(b)
|
return p.scanIntOrFloat(b)
|
||||||
}
|
}
|
||||||
@@ -887,19 +1051,7 @@ func (p *parser) parseIntOrFloatOrDateTime(b []byte) (ast.Reference, []byte, err
|
|||||||
return p.scanIntOrFloat(b)
|
return p.scanIntOrFloat(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
func digitsToInt(b []byte) int {
|
func (p *Parser) scanDateTime(b []byte) (reference, []byte, error) {
|
||||||
x := 0
|
|
||||||
|
|
||||||
for _, d := range b {
|
|
||||||
x *= 10
|
|
||||||
x += int(d - '0')
|
|
||||||
}
|
|
||||||
|
|
||||||
return x
|
|
||||||
}
|
|
||||||
|
|
||||||
//nolint:gocognit,cyclop
|
|
||||||
func (p *parser) scanDateTime(b []byte) (ast.Reference, []byte, error) {
|
|
||||||
// scans for contiguous characters in [0-9T:Z.+-], and up to one space if
|
// scans for contiguous characters in [0-9T:Z.+-], and up to one space if
|
||||||
// followed by a digit.
|
// followed by a digit.
|
||||||
hasDate := false
|
hasDate := false
|
||||||
@@ -942,30 +1094,30 @@ byteLoop:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var kind ast.Kind
|
var kind Kind
|
||||||
|
|
||||||
if hasTime {
|
if hasTime {
|
||||||
if hasDate {
|
if hasDate {
|
||||||
if hasTz {
|
if hasTz {
|
||||||
kind = ast.DateTime
|
kind = DateTime
|
||||||
} else {
|
} else {
|
||||||
kind = ast.LocalDateTime
|
kind = LocalDateTime
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
kind = ast.LocalTime
|
kind = LocalTime
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
kind = ast.LocalDate
|
kind = LocalDate
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: kind,
|
Kind: kind,
|
||||||
Data: b[:i],
|
Data: b[:i],
|
||||||
}), b[i:], nil
|
}), b[i:], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:funlen,gocognit,cyclop
|
//nolint:funlen,gocognit,cyclop
|
||||||
func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) scanIntOrFloat(b []byte) (reference, []byte, error) {
|
||||||
i := 0
|
i := 0
|
||||||
|
|
||||||
if len(b) > 2 && b[0] == '0' && b[1] != '.' && b[1] != 'e' && b[1] != 'E' {
|
if len(b) > 2 && b[0] == '0' && b[1] != '.' && b[1] != 'e' && b[1] != 'E' {
|
||||||
@@ -991,8 +1143,8 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: ast.Integer,
|
Kind: Integer,
|
||||||
Data: b[:i],
|
Data: b[:i],
|
||||||
}), b[i:], nil
|
}), b[i:], nil
|
||||||
}
|
}
|
||||||
@@ -1014,40 +1166,40 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
|
|||||||
|
|
||||||
if c == 'i' {
|
if c == 'i' {
|
||||||
if scanFollowsInf(b[i:]) {
|
if scanFollowsInf(b[i:]) {
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: ast.Float,
|
Kind: Float,
|
||||||
Data: b[:i+3],
|
Data: b[:i+3],
|
||||||
}), b[i+3:], nil
|
}), b[i+3:], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'i' while scanning for a number")
|
return invalidReference, nil, NewParserError(b[i:i+1], "unexpected character 'i' while scanning for a number")
|
||||||
}
|
}
|
||||||
|
|
||||||
if c == 'n' {
|
if c == 'n' {
|
||||||
if scanFollowsNan(b[i:]) {
|
if scanFollowsNan(b[i:]) {
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: ast.Float,
|
Kind: Float,
|
||||||
Data: b[:i+3],
|
Data: b[:i+3],
|
||||||
}), b[i+3:], nil
|
}), b[i+3:], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'n' while scanning for a number")
|
return invalidReference, nil, NewParserError(b[i:i+1], "unexpected character 'n' while scanning for a number")
|
||||||
}
|
}
|
||||||
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
return ast.InvalidReference, b, newDecodeError(b, "incomplete number")
|
return invalidReference, b, NewParserError(b, "incomplete number")
|
||||||
}
|
}
|
||||||
|
|
||||||
kind := ast.Integer
|
kind := Integer
|
||||||
|
|
||||||
if isFloat {
|
if isFloat {
|
||||||
kind = ast.Float
|
kind = Float
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: kind,
|
Kind: kind,
|
||||||
Data: b[:i],
|
Data: b[:i],
|
||||||
}), b[i:], nil
|
}), b[i:], nil
|
||||||
@@ -1076,11 +1228,11 @@ func isValidBinaryRune(r byte) bool {
|
|||||||
|
|
||||||
func expect(x byte, b []byte) ([]byte, error) {
|
func expect(x byte, b []byte) ([]byte, error) {
|
||||||
if len(b) == 0 {
|
if len(b) == 0 {
|
||||||
return nil, newDecodeError(b, "expected character %c but the document ended here", x)
|
return nil, NewParserError(b, "expected character %c but the document ended here", x)
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[0] != x {
|
if b[0] != x {
|
||||||
return nil, newDecodeError(b[0:1], "expected character %c", x)
|
return nil, NewParserError(b[0:1], "expected character %c", x)
|
||||||
}
|
}
|
||||||
|
|
||||||
return b[1:], nil
|
return b[1:], nil
|
||||||
@@ -0,0 +1,629 @@
|
|||||||
|
package unstable
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestParser_AST_Numbers(t *testing.T) {
|
||||||
|
examples := []struct {
|
||||||
|
desc string
|
||||||
|
input string
|
||||||
|
kind Kind
|
||||||
|
err bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "integer just digits",
|
||||||
|
input: `1234`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer zero",
|
||||||
|
input: `0`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer sign",
|
||||||
|
input: `+99`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer hex uppercase",
|
||||||
|
input: `0xDEADBEEF`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer hex lowercase",
|
||||||
|
input: `0xdead_beef`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer octal",
|
||||||
|
input: `0o01234567`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer binary",
|
||||||
|
input: `0b11010110`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float zero",
|
||||||
|
input: `0.0`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float positive zero",
|
||||||
|
input: `+0.0`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float negative zero",
|
||||||
|
input: `-0.0`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float pi",
|
||||||
|
input: `3.1415`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float negative",
|
||||||
|
input: `-0.01`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float signed exponent",
|
||||||
|
input: `5e+22`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float exponent lowercase",
|
||||||
|
input: `1e06`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float exponent uppercase",
|
||||||
|
input: `-2E-2`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float fractional with exponent",
|
||||||
|
input: `6.626e-34`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float underscores",
|
||||||
|
input: `224_617.445_991_228`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "inf",
|
||||||
|
input: `inf`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "inf negative",
|
||||||
|
input: `-inf`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "inf positive",
|
||||||
|
input: `+inf`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "nan",
|
||||||
|
input: `nan`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "nan negative",
|
||||||
|
input: `-nan`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "nan positive",
|
||||||
|
input: `+nan`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range examples {
|
||||||
|
e := e
|
||||||
|
t.Run(e.desc, func(t *testing.T) {
|
||||||
|
p := Parser{}
|
||||||
|
p.Reset([]byte(`A = ` + e.input))
|
||||||
|
p.NextExpression()
|
||||||
|
err := p.Error()
|
||||||
|
if e.err {
|
||||||
|
require.Error(t, err)
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
expected := astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{Kind: e.kind, Data: []byte(e.input)},
|
||||||
|
{Kind: Key, Data: []byte(`A`)},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
compareNode(t, expected, p.Expression())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type (
|
||||||
|
astNode struct {
|
||||||
|
Kind Kind
|
||||||
|
Data []byte
|
||||||
|
Children []astNode
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func compareNode(t *testing.T, e astNode, n *Node) {
|
||||||
|
t.Helper()
|
||||||
|
require.Equal(t, e.Kind, n.Kind)
|
||||||
|
require.Equal(t, e.Data, n.Data)
|
||||||
|
|
||||||
|
compareIterator(t, e.Children, n.Children())
|
||||||
|
}
|
||||||
|
|
||||||
|
func compareIterator(t *testing.T, expected []astNode, actual Iterator) {
|
||||||
|
t.Helper()
|
||||||
|
idx := 0
|
||||||
|
|
||||||
|
for actual.Next() {
|
||||||
|
n := actual.Node()
|
||||||
|
|
||||||
|
if idx >= len(expected) {
|
||||||
|
t.Fatal("extra child in actual tree")
|
||||||
|
}
|
||||||
|
e := expected[idx]
|
||||||
|
|
||||||
|
compareNode(t, e, n)
|
||||||
|
|
||||||
|
idx++
|
||||||
|
}
|
||||||
|
|
||||||
|
if idx < len(expected) {
|
||||||
|
t.Fatal("missing children in actual", "idx =", idx, "expected =", len(expected))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//nolint:funlen
|
||||||
|
func TestParser_AST(t *testing.T) {
|
||||||
|
examples := []struct {
|
||||||
|
desc string
|
||||||
|
input string
|
||||||
|
ast astNode
|
||||||
|
err bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "simple string assignment",
|
||||||
|
input: `A = "hello"`,
|
||||||
|
ast: astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`hello`),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Key,
|
||||||
|
Data: []byte(`A`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "simple bool assignment",
|
||||||
|
input: `A = true`,
|
||||||
|
ast: astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: Bool,
|
||||||
|
Data: []byte(`true`),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Key,
|
||||||
|
Data: []byte(`A`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "array of strings",
|
||||||
|
input: `A = ["hello", ["world", "again"]]`,
|
||||||
|
ast: astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: Array,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`hello`),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Array,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`world`),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`again`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Key,
|
||||||
|
Data: []byte(`A`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "array of arrays of strings",
|
||||||
|
input: `A = ["hello", "world"]`,
|
||||||
|
ast: astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: Array,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`hello`),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`world`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Key,
|
||||||
|
Data: []byte(`A`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "inline table",
|
||||||
|
input: `name = { first = "Tom", last = "Preston-Werner" }`,
|
||||||
|
ast: astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: InlineTable,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{Kind: String, Data: []byte(`Tom`)},
|
||||||
|
{Kind: Key, Data: []byte(`first`)},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{Kind: String, Data: []byte(`Preston-Werner`)},
|
||||||
|
{Kind: Key, Data: []byte(`last`)},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Key,
|
||||||
|
Data: []byte(`name`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range examples {
|
||||||
|
e := e
|
||||||
|
t.Run(e.desc, func(t *testing.T) {
|
||||||
|
p := Parser{}
|
||||||
|
p.Reset([]byte(e.input))
|
||||||
|
p.NextExpression()
|
||||||
|
err := p.Error()
|
||||||
|
if e.err {
|
||||||
|
require.Error(t, err)
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
compareNode(t, e.ast, p.Expression())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkParseBasicStringWithUnicode(b *testing.B) {
|
||||||
|
p := &Parser{}
|
||||||
|
b.Run("4", func(b *testing.B) {
|
||||||
|
input := []byte(`"\u1234\u5678\u9ABC\u1234\u5678\u9ABC"`)
|
||||||
|
b.ReportAllocs()
|
||||||
|
b.SetBytes(int64(len(input)))
|
||||||
|
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
p.parseBasicString(input)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
b.Run("8", func(b *testing.B) {
|
||||||
|
input := []byte(`"\u12345678\u9ABCDEF0\u12345678\u9ABCDEF0"`)
|
||||||
|
b.ReportAllocs()
|
||||||
|
b.SetBytes(int64(len(input)))
|
||||||
|
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
p.parseBasicString(input)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkParseBasicStringsEasy(b *testing.B) {
|
||||||
|
p := &Parser{}
|
||||||
|
|
||||||
|
for _, size := range []int{1, 4, 8, 16, 21} {
|
||||||
|
b.Run(strconv.Itoa(size), func(b *testing.B) {
|
||||||
|
input := []byte(`"` + strings.Repeat("A", size) + `"`)
|
||||||
|
|
||||||
|
b.ReportAllocs()
|
||||||
|
b.SetBytes(int64(len(input)))
|
||||||
|
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
p.parseBasicString(input)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParser_AST_DateTimes(t *testing.T) {
|
||||||
|
examples := []struct {
|
||||||
|
desc string
|
||||||
|
input string
|
||||||
|
kind Kind
|
||||||
|
err bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "offset-date-time with delim 'T' and UTC offset",
|
||||||
|
input: `2021-07-21T12:08:05Z`,
|
||||||
|
kind: DateTime,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "offset-date-time with space delim and +8hours offset",
|
||||||
|
input: `2021-07-21 12:08:05+08:00`,
|
||||||
|
kind: DateTime,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "local-date-time with nano second",
|
||||||
|
input: `2021-07-21T12:08:05.666666666`,
|
||||||
|
kind: LocalDateTime,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "local-date-time",
|
||||||
|
input: `2021-07-21T12:08:05`,
|
||||||
|
kind: LocalDateTime,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "local-date",
|
||||||
|
input: `2021-07-21`,
|
||||||
|
kind: LocalDate,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range examples {
|
||||||
|
e := e
|
||||||
|
t.Run(e.desc, func(t *testing.T) {
|
||||||
|
p := Parser{}
|
||||||
|
p.Reset([]byte(`A = ` + e.input))
|
||||||
|
p.NextExpression()
|
||||||
|
err := p.Error()
|
||||||
|
if e.err {
|
||||||
|
require.Error(t, err)
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
expected := astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{Kind: e.kind, Data: []byte(e.input)},
|
||||||
|
{Kind: Key, Data: []byte(`A`)},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
compareNode(t, expected, p.Expression())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This example demonstrates how to parse a TOML document and preserving
|
||||||
|
// comments. Comments are stored in the AST as Comment nodes. This example
|
||||||
|
// displays the structure of the full AST generated by the parser using the
|
||||||
|
// following structure:
|
||||||
|
//
|
||||||
|
// 1. Each root-level expression is separated by three dashes.
|
||||||
|
// 2. Bytes associated to a node are displayed in square brackets.
|
||||||
|
// 3. Siblings have the same indentation.
|
||||||
|
// 4. Children of a node are indented one level.
|
||||||
|
func ExampleParser_comments() {
|
||||||
|
doc := `# Top of the document comment.
|
||||||
|
# Optional, any amount of lines.
|
||||||
|
|
||||||
|
# Above table.
|
||||||
|
[table] # Next to table.
|
||||||
|
# Above simple value.
|
||||||
|
key = "value" # Next to simple value.
|
||||||
|
# Below simple value.
|
||||||
|
|
||||||
|
# Some comment alone.
|
||||||
|
|
||||||
|
# Multiple comments, on multiple lines.
|
||||||
|
|
||||||
|
# Above inline table.
|
||||||
|
name = { first = "Tom", last = "Preston-Werner" } # Next to inline table.
|
||||||
|
# Below inline table.
|
||||||
|
|
||||||
|
# Above array.
|
||||||
|
array = [ 1, 2, 3 ] # Next to one-line array.
|
||||||
|
# Below array.
|
||||||
|
|
||||||
|
# Above multi-line array.
|
||||||
|
key5 = [ # Next to start of inline array.
|
||||||
|
# Second line before array content.
|
||||||
|
1, # Next to first element.
|
||||||
|
# After first element.
|
||||||
|
# Before second element.
|
||||||
|
2,
|
||||||
|
3, # Next to last element
|
||||||
|
# After last element.
|
||||||
|
] # Next to end of array.
|
||||||
|
# Below multi-line array.
|
||||||
|
|
||||||
|
# Before array table.
|
||||||
|
[[products]] # Next to array table.
|
||||||
|
# After array table.
|
||||||
|
`
|
||||||
|
|
||||||
|
var printGeneric func(*Parser, int, *Node)
|
||||||
|
printGeneric = func(p *Parser, indent int, e *Node) {
|
||||||
|
if e == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
s := p.Shape(e.Raw)
|
||||||
|
x := fmt.Sprintf("%d:%d->%d:%d (%d->%d)", s.Start.Line, s.Start.Column, s.End.Line, s.End.Column, s.Start.Offset, s.End.Offset)
|
||||||
|
fmt.Printf("%-25s | %s%s [%s]\n", x, strings.Repeat(" ", indent), e.Kind, e.Data)
|
||||||
|
printGeneric(p, indent+1, e.Child())
|
||||||
|
printGeneric(p, indent, e.Next())
|
||||||
|
}
|
||||||
|
|
||||||
|
printTree := func(p *Parser) {
|
||||||
|
for p.NextExpression() {
|
||||||
|
e := p.Expression()
|
||||||
|
fmt.Println("---")
|
||||||
|
printGeneric(p, 0, e)
|
||||||
|
}
|
||||||
|
if err := p.Error(); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
p := &Parser{
|
||||||
|
KeepComments: true,
|
||||||
|
}
|
||||||
|
p.Reset([]byte(doc))
|
||||||
|
printTree(p)
|
||||||
|
|
||||||
|
// Output:
|
||||||
|
// ---
|
||||||
|
// 1:1->1:31 (0->30) | Comment [# Top of the document comment.]
|
||||||
|
// ---
|
||||||
|
// 2:1->2:33 (31->63) | Comment [# Optional, any amount of lines.]
|
||||||
|
// ---
|
||||||
|
// 4:1->4:15 (65->79) | Comment [# Above table.]
|
||||||
|
// ---
|
||||||
|
// 1:1->1:1 (0->0) | Table []
|
||||||
|
// 5:2->5:7 (81->86) | Key [table]
|
||||||
|
// 5:9->5:25 (88->104) | Comment [# Next to table.]
|
||||||
|
// ---
|
||||||
|
// 6:1->6:22 (105->126) | Comment [# Above simple value.]
|
||||||
|
// ---
|
||||||
|
// 1:1->1:1 (0->0) | KeyValue []
|
||||||
|
// 7:7->7:14 (133->140) | String [value]
|
||||||
|
// 7:1->7:4 (127->130) | Key [key]
|
||||||
|
// 7:15->7:38 (141->164) | Comment [# Next to simple value.]
|
||||||
|
// ---
|
||||||
|
// 8:1->8:22 (165->186) | Comment [# Below simple value.]
|
||||||
|
// ---
|
||||||
|
// 10:1->10:22 (188->209) | Comment [# Some comment alone.]
|
||||||
|
// ---
|
||||||
|
// 12:1->12:40 (211->250) | Comment [# Multiple comments, on multiple lines.]
|
||||||
|
// ---
|
||||||
|
// 14:1->14:22 (252->273) | Comment [# Above inline table.]
|
||||||
|
// ---
|
||||||
|
// 1:1->1:1 (0->0) | KeyValue []
|
||||||
|
// 15:8->15:9 (281->282) | InlineTable []
|
||||||
|
// 1:1->1:1 (0->0) | KeyValue []
|
||||||
|
// 15:18->15:23 (291->296) | String [Tom]
|
||||||
|
// 15:10->15:15 (283->288) | Key [first]
|
||||||
|
// 1:1->1:1 (0->0) | KeyValue []
|
||||||
|
// 15:32->15:48 (305->321) | String [Preston-Werner]
|
||||||
|
// 15:25->15:29 (298->302) | Key [last]
|
||||||
|
// 15:1->15:5 (274->278) | Key [name]
|
||||||
|
// 15:51->15:74 (324->347) | Comment [# Next to inline table.]
|
||||||
|
// ---
|
||||||
|
// 16:1->16:22 (348->369) | Comment [# Below inline table.]
|
||||||
|
// ---
|
||||||
|
// 18:1->18:15 (371->385) | Comment [# Above array.]
|
||||||
|
// ---
|
||||||
|
// 1:1->1:1 (0->0) | KeyValue []
|
||||||
|
// 1:1->1:1 (0->0) | Array []
|
||||||
|
// 1:1->1:1 (0->0) | Integer [1]
|
||||||
|
// 1:1->1:1 (0->0) | Integer [2]
|
||||||
|
// 1:1->1:1 (0->0) | Integer [3]
|
||||||
|
// 19:1->19:6 (386->391) | Key [array]
|
||||||
|
// 19:21->19:46 (406->431) | Comment [# Next to one-line array.]
|
||||||
|
// ---
|
||||||
|
// 20:1->20:15 (432->446) | Comment [# Below array.]
|
||||||
|
// ---
|
||||||
|
// 22:1->22:26 (448->473) | Comment [# Above multi-line array.]
|
||||||
|
// ---
|
||||||
|
// 1:1->1:1 (0->0) | KeyValue []
|
||||||
|
// 1:1->1:1 (0->0) | Array []
|
||||||
|
// 23:10->23:42 (483->515) | Comment [# Next to start of inline array.]
|
||||||
|
// 24:3->24:38 (518->553) | Comment [# Second line before array content.]
|
||||||
|
// 1:1->1:1 (0->0) | Integer [1]
|
||||||
|
// 25:6->25:30 (559->583) | Comment [# Next to first element.]
|
||||||
|
// 26:3->26:25 (586->608) | Comment [# After first element.]
|
||||||
|
// 27:3->27:27 (611->635) | Comment [# Before second element.]
|
||||||
|
// 1:1->1:1 (0->0) | Integer [2]
|
||||||
|
// 1:1->1:1 (0->0) | Integer [3]
|
||||||
|
// 29:6->29:28 (646->668) | Comment [# Next to last element]
|
||||||
|
// 30:3->30:24 (671->692) | Comment [# After last element.]
|
||||||
|
// 23:1->23:5 (474->478) | Key [key5]
|
||||||
|
// 31:3->31:26 (695->718) | Comment [# Next to end of array.]
|
||||||
|
// ---
|
||||||
|
// 32:1->32:26 (719->744) | Comment [# Below multi-line array.]
|
||||||
|
// ---
|
||||||
|
// 34:1->34:22 (746->767) | Comment [# Before array table.]
|
||||||
|
// ---
|
||||||
|
// 1:1->1:1 (0->0) | ArrayTable []
|
||||||
|
// 35:3->35:11 (770->778) | Key [products]
|
||||||
|
// 35:14->35:36 (781->803) | Comment [# Next to array table.]
|
||||||
|
// ---
|
||||||
|
// 36:1->36:21 (804->824) | Comment [# After array table.]
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExampleParser() {
|
||||||
|
doc := `
|
||||||
|
hello = "world"
|
||||||
|
value = 42
|
||||||
|
`
|
||||||
|
p := Parser{}
|
||||||
|
p.Reset([]byte(doc))
|
||||||
|
for p.NextExpression() {
|
||||||
|
e := p.Expression()
|
||||||
|
fmt.Printf("Expression: %s\n", e.Kind)
|
||||||
|
value := e.Value()
|
||||||
|
it := e.Key()
|
||||||
|
k := it.Node() // shortcut: we know there is no dotted key in the example
|
||||||
|
fmt.Printf("%s -> (%s) %s\n", k.Data, value.Kind, value.Data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output:
|
||||||
|
// Expression: KeyValue
|
||||||
|
// hello -> (String) world
|
||||||
|
// Expression: KeyValue
|
||||||
|
// value -> (Integer) 42
|
||||||
|
}
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
package toml
|
package unstable
|
||||||
|
|
||||||
|
import "github.com/pelletier/go-toml/v2/internal/characters"
|
||||||
|
|
||||||
func scanFollows(b []byte, pattern string) bool {
|
func scanFollows(b []byte, pattern string) bool {
|
||||||
n := len(pattern)
|
n := len(pattern)
|
||||||
@@ -53,17 +55,17 @@ func scanLiteralString(b []byte) ([]byte, []byte, error) {
|
|||||||
switch b[i] {
|
switch b[i] {
|
||||||
case '\'':
|
case '\'':
|
||||||
return b[:i+1], b[i+1:], nil
|
return b[:i+1], b[i+1:], nil
|
||||||
case '\n':
|
case '\n', '\r':
|
||||||
return nil, nil, newDecodeError(b[i:i+1], "literal strings cannot have new lines")
|
return nil, nil, NewParserError(b[i:i+1], "literal strings cannot have new lines")
|
||||||
}
|
}
|
||||||
size := utf8ValidNext(b[i:])
|
size := characters.Utf8ValidNext(b[i:])
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character")
|
return nil, nil, NewParserError(b[i:i+1], "invalid character")
|
||||||
}
|
}
|
||||||
i += size
|
i += size
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, nil, newDecodeError(b[len(b):], "unterminated literal string")
|
return nil, nil, NewParserError(b[len(b):], "unterminated literal string")
|
||||||
}
|
}
|
||||||
|
|
||||||
func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
|
func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
|
||||||
@@ -76,49 +78,61 @@ func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
|
|||||||
// mll-char = %x09 / %x20-26 / %x28-7E / non-ascii
|
// mll-char = %x09 / %x20-26 / %x28-7E / non-ascii
|
||||||
// mll-quotes = 1*2apostrophe
|
// mll-quotes = 1*2apostrophe
|
||||||
for i := 3; i < len(b); {
|
for i := 3; i < len(b); {
|
||||||
if scanFollowsMultilineLiteralStringDelimiter(b[i:]) {
|
switch b[i] {
|
||||||
i += 3
|
case '\'':
|
||||||
|
if scanFollowsMultilineLiteralStringDelimiter(b[i:]) {
|
||||||
|
i += 3
|
||||||
|
|
||||||
// At that point we found 3 apostrophe, and i is the
|
// At that point we found 3 apostrophe, and i is the
|
||||||
// index of the byte after the third one. The scanner
|
// index of the byte after the third one. The scanner
|
||||||
// needs to be eager, because there can be an extra 2
|
// needs to be eager, because there can be an extra 2
|
||||||
// apostrophe that can be accepted at the end of the
|
// apostrophe that can be accepted at the end of the
|
||||||
// string.
|
// string.
|
||||||
|
|
||||||
|
if i >= len(b) || b[i] != '\'' {
|
||||||
|
return b[:i], b[i:], nil
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
|
||||||
|
if i >= len(b) || b[i] != '\'' {
|
||||||
|
return b[:i], b[i:], nil
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
|
||||||
|
if i < len(b) && b[i] == '\'' {
|
||||||
|
return nil, nil, NewParserError(b[i-3:i+1], "''' not allowed in multiline literal string")
|
||||||
|
}
|
||||||
|
|
||||||
if i >= len(b) || b[i] != '\'' {
|
|
||||||
return b[:i], b[i:], nil
|
return b[:i], b[i:], nil
|
||||||
}
|
}
|
||||||
i++
|
case '\r':
|
||||||
|
if len(b) < i+2 {
|
||||||
if i >= len(b) || b[i] != '\'' {
|
return nil, nil, NewParserError(b[len(b):], `need a \n after \r`)
|
||||||
return b[:i], b[i:], nil
|
|
||||||
}
|
}
|
||||||
i++
|
if b[i+1] != '\n' {
|
||||||
|
return nil, nil, NewParserError(b[i:i+2], `need a \n after \r`)
|
||||||
if i < len(b) && b[i] == '\'' {
|
|
||||||
return nil, nil, newDecodeError(b[i-3:i+1], "''' not allowed in multiline literal string")
|
|
||||||
}
|
}
|
||||||
|
i += 2 // skip the \n
|
||||||
return b[:i], b[i:], nil
|
continue
|
||||||
}
|
}
|
||||||
size := utf8ValidNext(b[i:])
|
size := characters.Utf8ValidNext(b[i:])
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character")
|
return nil, nil, NewParserError(b[i:i+1], "invalid character")
|
||||||
}
|
}
|
||||||
i += size
|
i += size
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, nil, newDecodeError(b[len(b):], `multiline literal string not terminated by '''`)
|
return nil, nil, NewParserError(b[len(b):], `multiline literal string not terminated by '''`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func scanWindowsNewline(b []byte) ([]byte, []byte, error) {
|
func scanWindowsNewline(b []byte) ([]byte, []byte, error) {
|
||||||
const lenCRLF = 2
|
const lenCRLF = 2
|
||||||
if len(b) < lenCRLF {
|
if len(b) < lenCRLF {
|
||||||
return nil, nil, newDecodeError(b, "windows new line expected")
|
return nil, nil, NewParserError(b, "windows new line expected")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[1] != '\n' {
|
if b[1] != '\n' {
|
||||||
return nil, nil, newDecodeError(b, `windows new line should be \r\n`)
|
return nil, nil, NewParserError(b, `windows new line should be \r\n`)
|
||||||
}
|
}
|
||||||
|
|
||||||
return b[:lenCRLF], b[lenCRLF:], nil
|
return b[:lenCRLF], b[lenCRLF:], nil
|
||||||
@@ -137,7 +151,6 @@ func scanWhitespace(b []byte) ([]byte, []byte) {
|
|||||||
return b, b[len(b):]
|
return b, b[len(b):]
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:unparam
|
|
||||||
func scanComment(b []byte) ([]byte, []byte, error) {
|
func scanComment(b []byte) ([]byte, []byte, error) {
|
||||||
// comment-start-symbol = %x23 ; #
|
// comment-start-symbol = %x23 ; #
|
||||||
// non-ascii = %x80-D7FF / %xE000-10FFFF
|
// non-ascii = %x80-D7FF / %xE000-10FFFF
|
||||||
@@ -153,11 +166,11 @@ func scanComment(b []byte) ([]byte, []byte, error) {
|
|||||||
if i+1 < len(b) && b[i+1] == '\n' {
|
if i+1 < len(b) && b[i+1] == '\n' {
|
||||||
return b[:i+1], b[i+1:], nil
|
return b[:i+1], b[i+1:], nil
|
||||||
}
|
}
|
||||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character in comment")
|
return nil, nil, NewParserError(b[i:i+1], "invalid character in comment")
|
||||||
}
|
}
|
||||||
size := utf8ValidNext(b[i:])
|
size := characters.Utf8ValidNext(b[i:])
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character in comment")
|
return nil, nil, NewParserError(b[i:i+1], "invalid character in comment")
|
||||||
}
|
}
|
||||||
|
|
||||||
i += size
|
i += size
|
||||||
@@ -180,17 +193,17 @@ func scanBasicString(b []byte) ([]byte, bool, []byte, error) {
|
|||||||
case '"':
|
case '"':
|
||||||
return b[:i+1], escaped, b[i+1:], nil
|
return b[:i+1], escaped, b[i+1:], nil
|
||||||
case '\n', '\r':
|
case '\n', '\r':
|
||||||
return nil, escaped, nil, newDecodeError(b[i:i+1], "basic strings cannot have new lines")
|
return nil, escaped, nil, NewParserError(b[i:i+1], "basic strings cannot have new lines")
|
||||||
case '\\':
|
case '\\':
|
||||||
if len(b) < i+2 {
|
if len(b) < i+2 {
|
||||||
return nil, escaped, nil, newDecodeError(b[i:i+1], "need a character after \\")
|
return nil, escaped, nil, NewParserError(b[i:i+1], "need a character after \\")
|
||||||
}
|
}
|
||||||
escaped = true
|
escaped = true
|
||||||
i++ // skip the next character
|
i++ // skip the next character
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, escaped, nil, newDecodeError(b[len(b):], `basic string not terminated by "`)
|
return nil, escaped, nil, NewParserError(b[len(b):], `basic string not terminated by "`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
|
func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
|
||||||
@@ -231,19 +244,27 @@ func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
|
|||||||
i++
|
i++
|
||||||
|
|
||||||
if i < len(b) && b[i] == '"' {
|
if i < len(b) && b[i] == '"' {
|
||||||
return nil, escaped, nil, newDecodeError(b[i-3:i+1], `""" not allowed in multiline basic string`)
|
return nil, escaped, nil, NewParserError(b[i-3:i+1], `""" not allowed in multiline basic string`)
|
||||||
}
|
}
|
||||||
|
|
||||||
return b[:i], escaped, b[i:], nil
|
return b[:i], escaped, b[i:], nil
|
||||||
}
|
}
|
||||||
case '\\':
|
case '\\':
|
||||||
if len(b) < i+2 {
|
if len(b) < i+2 {
|
||||||
return nil, escaped, nil, newDecodeError(b[len(b):], "need a character after \\")
|
return nil, escaped, nil, NewParserError(b[len(b):], "need a character after \\")
|
||||||
}
|
}
|
||||||
escaped = true
|
escaped = true
|
||||||
i++ // skip the next character
|
i++ // skip the next character
|
||||||
|
case '\r':
|
||||||
|
if len(b) < i+2 {
|
||||||
|
return nil, escaped, nil, NewParserError(b[len(b):], `need a \n after \r`)
|
||||||
|
}
|
||||||
|
if b[i+1] != '\n' {
|
||||||
|
return nil, escaped, nil, NewParserError(b[i:i+2], `need a \n after \r`)
|
||||||
|
}
|
||||||
|
i++ // skip the \n
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, escaped, nil, newDecodeError(b[len(b):], `multiline basic string not terminated by """`)
|
return nil, escaped, nil, NewParserError(b[len(b):], `multiline basic string not terminated by """`)
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user