Compare commits
110 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 80189ba449 | |||
| f36a3ece9e | |||
| 77f3862df4 | |||
| 16b1ef5508 | |||
| e14bde7c1d | |||
| 4b1ff01eb3 | |||
| 048a25f0f2 | |||
| b3575580f9 | |||
| a0be52f4c1 | |||
| 316bfc66a4 | |||
| 2edc61f171 | |||
| 4a1b05ca08 | |||
| 003aa0993b | |||
| 84d730b6c4 | |||
| 97bd897177 | |||
| 7924b1816f | |||
| 2a07b6d9db | |||
| 692b98560b | |||
| 99cd40b175 | |||
| 3aaf147e3e | |||
| a675c6b3e2 | |||
| 9702fae9b8 | |||
| 3cf1eb2312 | |||
| 2af3554f90 | |||
| 180c6ba2ba | |||
| dafc4173ef | |||
| f1a83be671 | |||
| 5aeb70b3f0 | |||
| 8384a5683c | |||
| 4369957cb4 | |||
| a0e8464967 | |||
| c57d0d559f | |||
| 644602b845 | |||
| 36df8eef6e | |||
| 18a2148713 | |||
| bc9958322f | |||
| 6d56ac8027 | |||
| 098464b61b | |||
| 85e2448ce5 | |||
| ee07c9203b | |||
| 014204cfb7 | |||
| 923b2ab478 | |||
| af236b689f | |||
| b730b2be5d | |||
| a437caafe5 | |||
| be6c57be30 | |||
| d55304782e | |||
| 0977c05dd5 | |||
| bccd6e48f4 | |||
| 9b890cf9c5 | |||
| a3d5a0bb53 | |||
| d00d2cca6e | |||
| 86608d7fca | |||
| 4a1877957a | |||
| 3021d6d033 | |||
| 32788f26f8 | |||
| 8ed6d131eb | |||
| 7dad87762a | |||
| 2b69615b5d | |||
| 06fb30bf2e | |||
| 2e087bdf5f | |||
| caeb9f9631 | |||
| e7223fb40e | |||
| 05bedf36d8 | |||
| f5486d590f | |||
| 2ca21fb7b4 | |||
| 34765b4a9e | |||
| 358c8d2c23 | |||
| fd8d0bf4d9 | |||
| a76e18e8c5 | |||
| dff0c128d0 | |||
| 3573ce3770 | |||
| ae933f2e2a | |||
| 3175efb395 | |||
| 9dd7f1af78 | |||
| 4a5c27c299 | |||
| 76cc96f6d8 | |||
| 4835627845 | |||
| cef80b96a4 | |||
| 4040373cfd | |||
| bb026cae89 | |||
| f7d9b9ba53 | |||
| fac33d6fa8 | |||
| e183db7e69 | |||
| 60e4af8cca | |||
| 8bb1e08dc7 | |||
| 7b980e792b | |||
| 44c1513ccd | |||
| fcf9d37d0c | |||
| 986afffb7c | |||
| 8c2c9cc986 | |||
| 55ca4e35e4 | |||
| d34104d493 | |||
| 2aa08368fa | |||
| 654811fbc3 | |||
| 5c05d4d863 | |||
| 643c251c4b | |||
| 8a416daa69 | |||
| fcd9179b7d | |||
| 9f5726004e | |||
| c4a2eef8a4 | |||
| b58c20aa49 | |||
| 090cccf4ba | |||
| 58a592bbf8 | |||
| 94bd3ddcd6 | |||
| e195b58fd0 | |||
| c83d001c6d | |||
| b9e3b9c370 | |||
| d26887310c | |||
| 942841787a |
@@ -0,0 +1,26 @@
|
|||||||
|
name: CIFuzz
|
||||||
|
on: [pull_request]
|
||||||
|
jobs:
|
||||||
|
Fuzzing:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Build Fuzzers
|
||||||
|
id: build
|
||||||
|
uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master
|
||||||
|
with:
|
||||||
|
oss-fuzz-project-name: 'go-toml'
|
||||||
|
dry-run: false
|
||||||
|
language: go
|
||||||
|
- name: Run Fuzzers
|
||||||
|
uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master
|
||||||
|
with:
|
||||||
|
oss-fuzz-project-name: 'go-toml'
|
||||||
|
fuzz-seconds: 300
|
||||||
|
dry-run: false
|
||||||
|
language: go
|
||||||
|
- name: Upload Crash
|
||||||
|
uses: actions/upload-artifact@v7
|
||||||
|
if: failure() && steps.build.outcome == 'success'
|
||||||
|
with:
|
||||||
|
name: artifacts
|
||||||
|
path: ./out/artifacts
|
||||||
@@ -35,11 +35,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v4
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@@ -47,10 +47,10 @@ jobs:
|
|||||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v2
|
uses: github/codeql-action/autobuild@v4
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 https://git.io/JvXDl
|
# 📚 https://git.io/JvXDl
|
||||||
@@ -64,4 +64,4 @@ jobs:
|
|||||||
# make release
|
# make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v2
|
uses: github/codeql-action/analyze@v4
|
||||||
|
|||||||
@@ -9,12 +9,12 @@ jobs:
|
|||||||
runs-on: "ubuntu-latest"
|
runs-on: "ubuntu-latest"
|
||||||
name: report
|
name: report
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup go
|
- name: Setup go
|
||||||
uses: actions/setup-go@master
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version: 1.19
|
go-version: "1.26"
|
||||||
- name: Run tests with coverage
|
- name: Run tests with coverage
|
||||||
run: ./ci.sh coverage -d "${GITHUB_BASE_REF-HEAD}"
|
run: ./ci.sh coverage -d "${GITHUB_BASE_REF-HEAD}"
|
||||||
|
|||||||
@@ -0,0 +1,36 @@
|
|||||||
|
name: Go Versions Compatibility Test
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
go_versions:
|
||||||
|
description: 'Go versions to test (space-separated, e.g., "1.21 1.22 1.23")'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v4
|
||||||
|
|
||||||
|
- name: Run Go versions compatibility test
|
||||||
|
run: |
|
||||||
|
VERSIONS="${{ github.event.inputs.go_versions }}"
|
||||||
|
./test-go-versions.sh --output ./test-results $VERSIONS
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v7
|
||||||
|
with:
|
||||||
|
name: go-versions-test-results
|
||||||
|
path: |
|
||||||
|
test-results/
|
||||||
|
retention-days: 30
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
name: lint
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- v2
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
golangci:
|
||||||
|
name: lint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Setup go
|
||||||
|
uses: actions/setup-go@v6
|
||||||
|
with:
|
||||||
|
go-version: "1.26"
|
||||||
|
- name: Run golangci-lint
|
||||||
|
uses: golangci/golangci-lint-action@v9
|
||||||
|
with:
|
||||||
|
version: v2.8.0
|
||||||
@@ -16,24 +16,24 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v2
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version: 1.19
|
go-version: "1.26"
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v4
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Run GoReleaser
|
- name: Run GoReleaser
|
||||||
uses: goreleaser/goreleaser-action@v3
|
uses: goreleaser/goreleaser-action@v7
|
||||||
with:
|
with:
|
||||||
distribution: goreleaser
|
distribution: goreleaser
|
||||||
version: latest
|
version: '~> v2'
|
||||||
args: release ${{ inputs.args }} --rm-dist
|
args: release ${{ inputs.args }} --clean
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
@@ -10,23 +10,24 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
strategy:
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ 'ubuntu-latest', 'windows-latest', 'macos-latest']
|
os: [ 'ubuntu-latest', 'windows-latest', 'macos-latest', 'macos-14' ]
|
||||||
go: [ '1.18', '1.19' ]
|
go: [ '1.25', '1.26' ]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
name: ${{ matrix.go }}/${{ matrix.os }}
|
name: ${{ matrix.go }}/${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup go ${{ matrix.go }}
|
- name: Setup go ${{ matrix.go }}
|
||||||
uses: actions/setup-go@master
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version: ${{ matrix.go }}
|
go-version: ${{ matrix.go }}
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
run: go test -race ./...
|
run: go test -race ./...
|
||||||
release-check:
|
release-check:
|
||||||
if: ${{ github.ref != 'refs/heads/v2' }}
|
if: ${{ github.ref != 'refs/heads/v2' }}
|
||||||
uses: pelletier/go-toml/.github/workflows/release.yml@v2
|
uses: ./.github/workflows/release.yml
|
||||||
with:
|
with:
|
||||||
args: --snapshot
|
args: --snapshot
|
||||||
|
|||||||
+3
-1
@@ -3,4 +3,6 @@ fuzz/
|
|||||||
cmd/tomll/tomll
|
cmd/tomll/tomll
|
||||||
cmd/tomljson/tomljson
|
cmd/tomljson/tomljson
|
||||||
cmd/tomltestgen/tomltestgen
|
cmd/tomltestgen/tomltestgen
|
||||||
dist
|
dist
|
||||||
|
tests/
|
||||||
|
test-results
|
||||||
|
|||||||
+33
-41
@@ -1,84 +1,76 @@
|
|||||||
[service]
|
version = "2"
|
||||||
golangci-lint-version = "1.39.0"
|
|
||||||
|
|
||||||
[linters-settings.wsl]
|
|
||||||
allow-assign-and-anything = true
|
|
||||||
|
|
||||||
[linters-settings.exhaustive]
|
|
||||||
default-signifies-exhaustive = true
|
|
||||||
|
|
||||||
[linters]
|
[linters]
|
||||||
disable-all = true
|
default = "none"
|
||||||
enable = [
|
enable = [
|
||||||
"asciicheck",
|
"asciicheck",
|
||||||
"bodyclose",
|
"bodyclose",
|
||||||
"cyclop",
|
|
||||||
"deadcode",
|
|
||||||
"depguard",
|
|
||||||
"dogsled",
|
"dogsled",
|
||||||
"dupl",
|
"dupl",
|
||||||
"durationcheck",
|
"durationcheck",
|
||||||
"errcheck",
|
"errcheck",
|
||||||
"errorlint",
|
"errorlint",
|
||||||
"exhaustive",
|
"exhaustive",
|
||||||
# "exhaustivestruct",
|
|
||||||
"exportloopref",
|
|
||||||
"forbidigo",
|
"forbidigo",
|
||||||
# "forcetypeassert",
|
|
||||||
"funlen",
|
|
||||||
"gci",
|
|
||||||
# "gochecknoglobals",
|
|
||||||
"gochecknoinits",
|
"gochecknoinits",
|
||||||
"gocognit",
|
|
||||||
"goconst",
|
"goconst",
|
||||||
"gocritic",
|
"gocritic",
|
||||||
"gocyclo",
|
"godoclint",
|
||||||
"godot",
|
|
||||||
"godox",
|
|
||||||
# "goerr113",
|
|
||||||
"gofmt",
|
|
||||||
"gofumpt",
|
|
||||||
"goheader",
|
"goheader",
|
||||||
"goimports",
|
|
||||||
"golint",
|
|
||||||
"gomnd",
|
|
||||||
# "gomoddirectives",
|
|
||||||
"gomodguard",
|
"gomodguard",
|
||||||
"goprintffuncname",
|
"goprintffuncname",
|
||||||
"gosec",
|
"gosec",
|
||||||
"gosimple",
|
|
||||||
"govet",
|
"govet",
|
||||||
# "ifshort",
|
|
||||||
"importas",
|
"importas",
|
||||||
"ineffassign",
|
"ineffassign",
|
||||||
"lll",
|
"lll",
|
||||||
"makezero",
|
"makezero",
|
||||||
|
"mirror",
|
||||||
"misspell",
|
"misspell",
|
||||||
"nakedret",
|
"nakedret",
|
||||||
"nestif",
|
|
||||||
"nilerr",
|
"nilerr",
|
||||||
# "nlreturn",
|
|
||||||
"noctx",
|
"noctx",
|
||||||
"nolintlint",
|
"nolintlint",
|
||||||
#"paralleltest",
|
"perfsprint",
|
||||||
"prealloc",
|
"prealloc",
|
||||||
"predeclared",
|
"predeclared",
|
||||||
"revive",
|
"revive",
|
||||||
"rowserrcheck",
|
"rowserrcheck",
|
||||||
"sqlclosecheck",
|
"sqlclosecheck",
|
||||||
"staticcheck",
|
"staticcheck",
|
||||||
"structcheck",
|
|
||||||
"stylecheck",
|
|
||||||
# "testpackage",
|
|
||||||
"thelper",
|
"thelper",
|
||||||
"tparallel",
|
"tparallel",
|
||||||
"typecheck",
|
|
||||||
"unconvert",
|
"unconvert",
|
||||||
"unparam",
|
"unparam",
|
||||||
"unused",
|
"unused",
|
||||||
"varcheck",
|
"usetesting",
|
||||||
"wastedassign",
|
"wastedassign",
|
||||||
"whitespace",
|
"whitespace",
|
||||||
# "wrapcheck",
|
]
|
||||||
# "wsl"
|
|
||||||
|
[linters.settings.exhaustive]
|
||||||
|
default-signifies-exhaustive = true
|
||||||
|
|
||||||
|
[linters.settings.lll]
|
||||||
|
line-length = 150
|
||||||
|
|
||||||
|
[[linters.exclusions.rules]]
|
||||||
|
path = ".test.go"
|
||||||
|
linters = ["goconst", "gosec"]
|
||||||
|
|
||||||
|
[[linters.exclusions.rules]]
|
||||||
|
path = "main.go"
|
||||||
|
linters = ["forbidigo"]
|
||||||
|
|
||||||
|
[[linters.exclusions.rules]]
|
||||||
|
path = "internal"
|
||||||
|
linters = ["revive"]
|
||||||
|
text = "(exported|indent-error-flow): "
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
enable = [
|
||||||
|
"gci",
|
||||||
|
"gofmt",
|
||||||
|
"gofumpt",
|
||||||
|
"goimports",
|
||||||
]
|
]
|
||||||
|
|||||||
+5
-4
@@ -1,3 +1,4 @@
|
|||||||
|
version: 2
|
||||||
before:
|
before:
|
||||||
hooks:
|
hooks:
|
||||||
- go mod tidy
|
- go mod tidy
|
||||||
@@ -18,9 +19,9 @@ builds:
|
|||||||
- linux_amd64
|
- linux_amd64
|
||||||
- linux_arm64
|
- linux_arm64
|
||||||
- linux_arm
|
- linux_arm
|
||||||
|
- linux_riscv64
|
||||||
- windows_amd64
|
- windows_amd64
|
||||||
- windows_arm64
|
- windows_arm64
|
||||||
- windows_arm
|
|
||||||
- darwin_amd64
|
- darwin_amd64
|
||||||
- darwin_arm64
|
- darwin_arm64
|
||||||
- id: tomljson
|
- id: tomljson
|
||||||
@@ -37,9 +38,9 @@ builds:
|
|||||||
- linux_amd64
|
- linux_amd64
|
||||||
- linux_arm64
|
- linux_arm64
|
||||||
- linux_arm
|
- linux_arm
|
||||||
|
- linux_riscv64
|
||||||
- windows_amd64
|
- windows_amd64
|
||||||
- windows_arm64
|
- windows_arm64
|
||||||
- windows_arm
|
|
||||||
- darwin_amd64
|
- darwin_amd64
|
||||||
- darwin_arm64
|
- darwin_arm64
|
||||||
- id: jsontoml
|
- id: jsontoml
|
||||||
@@ -55,10 +56,10 @@ builds:
|
|||||||
targets:
|
targets:
|
||||||
- linux_amd64
|
- linux_amd64
|
||||||
- linux_arm64
|
- linux_arm64
|
||||||
|
- linux_riscv64
|
||||||
- linux_arm
|
- linux_arm
|
||||||
- windows_amd64
|
- windows_amd64
|
||||||
- windows_arm64
|
- windows_arm64
|
||||||
- windows_arm
|
|
||||||
- darwin_amd64
|
- darwin_amd64
|
||||||
- darwin_arm64
|
- darwin_arm64
|
||||||
universal_binaries:
|
universal_binaries:
|
||||||
@@ -109,7 +110,7 @@ dockers:
|
|||||||
checksum:
|
checksum:
|
||||||
name_template: 'sha256sums.txt'
|
name_template: 'sha256sums.txt'
|
||||||
snapshot:
|
snapshot:
|
||||||
name_template: "{{ incpatch .Version }}-next"
|
version_template: "{{ incpatch .Version }}-next"
|
||||||
release:
|
release:
|
||||||
github:
|
github:
|
||||||
owner: pelletier
|
owner: pelletier
|
||||||
|
|||||||
@@ -0,0 +1,64 @@
|
|||||||
|
# Agent Guidelines for go-toml
|
||||||
|
|
||||||
|
This file provides guidelines for AI agents contributing to go-toml. All agents must follow these rules derived from [CONTRIBUTING.md](./CONTRIBUTING.md).
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
go-toml is a TOML library for Go. The goal is to provide an easy-to-use and efficient TOML implementation that gets the job done without getting in the way.
|
||||||
|
|
||||||
|
## Code Change Rules
|
||||||
|
|
||||||
|
### Backward Compatibility
|
||||||
|
|
||||||
|
- **No backward-incompatible changes** unless explicitly discussed and approved
|
||||||
|
- Avoid breaking people's programs unless absolutely necessary
|
||||||
|
|
||||||
|
### Testing Requirements
|
||||||
|
|
||||||
|
- **All bug fixes must include regression tests**
|
||||||
|
- **All new code must be tested**
|
||||||
|
- Run tests before submitting: `go test -race ./...`
|
||||||
|
- Test coverage must not decrease. Check with:
|
||||||
|
```bash
|
||||||
|
go test -covermode=atomic -coverprofile=coverage.out
|
||||||
|
go tool cover -func=coverage.out
|
||||||
|
```
|
||||||
|
- All lines of code touched by changes should be covered by tests
|
||||||
|
|
||||||
|
### Performance Requirements
|
||||||
|
|
||||||
|
- go-toml aims to stay efficient; avoid performance regressions
|
||||||
|
- Run benchmarks to verify: `go test ./... -bench=. -count=10`
|
||||||
|
- Compare results using [benchstat](https://pkg.go.dev/golang.org/x/perf/cmd/benchstat)
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- New features or feature extensions must include documentation
|
||||||
|
- Documentation lives in [README.md](./README.md) and throughout source code
|
||||||
|
|
||||||
|
### Code Style
|
||||||
|
|
||||||
|
- Follow existing code format and structure
|
||||||
|
- Code must pass `go fmt`
|
||||||
|
- Code must pass linting with the same golangci-lint version as CI (see version in `.github/workflows/lint.yml`):
|
||||||
|
```bash
|
||||||
|
# Install specific version (check lint.yml for current version)
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/HEAD/install.sh | sh -s -- -b $(go env GOPATH)/bin <version>
|
||||||
|
# Run linter
|
||||||
|
golangci-lint run ./...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Commit Messages
|
||||||
|
|
||||||
|
- Commit messages must explain **why** the change is needed
|
||||||
|
- Keep messages clear and informative even if details are in the PR description
|
||||||
|
|
||||||
|
## Pull Request Checklist
|
||||||
|
|
||||||
|
Before submitting:
|
||||||
|
|
||||||
|
1. Tests pass (`go test -race ./...`)
|
||||||
|
2. No backward-incompatible changes (unless discussed)
|
||||||
|
3. Relevant documentation added/updated
|
||||||
|
4. No performance regression (verify with benchmarks)
|
||||||
|
5. Title is clear and understandable for changelog
|
||||||
+60
-21
@@ -33,7 +33,7 @@ The documentation is present in the [README][readme] and thorough the source
|
|||||||
code. On release, it gets updated on [pkg.go.dev][pkg.go.dev]. To make a change
|
code. On release, it gets updated on [pkg.go.dev][pkg.go.dev]. To make a change
|
||||||
to the documentation, create a pull request with your proposed changes. For
|
to the documentation, create a pull request with your proposed changes. For
|
||||||
simple changes like that, the easiest way to go is probably the "Fork this
|
simple changes like that, the easiest way to go is probably the "Fork this
|
||||||
project and edit the file" button on Github, displayed at the top right of the
|
project and edit the file" button on GitHub, displayed at the top right of the
|
||||||
file. Unless it's a trivial change (for example a typo), provide a little bit of
|
file. Unless it's a trivial change (for example a typo), provide a little bit of
|
||||||
context in your pull request description or commit message.
|
context in your pull request description or commit message.
|
||||||
|
|
||||||
@@ -92,6 +92,48 @@ However, given GitHub's new policy to _not_ run Actions on pull requests until a
|
|||||||
maintainer clicks on button, it is highly recommended that you run them locally
|
maintainer clicks on button, it is highly recommended that you run them locally
|
||||||
as you make changes.
|
as you make changes.
|
||||||
|
|
||||||
|
### Test across Go versions
|
||||||
|
|
||||||
|
The repository includes tooling to test go-toml across multiple Go versions
|
||||||
|
(1.11 through 1.25) both locally and in GitHub Actions.
|
||||||
|
|
||||||
|
#### Local testing with Docker
|
||||||
|
|
||||||
|
Prerequisites: Docker installed and running, Bash shell, `rsync` command.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Test all Go versions in parallel (default)
|
||||||
|
./test-go-versions.sh
|
||||||
|
|
||||||
|
# Test specific versions
|
||||||
|
./test-go-versions.sh 1.21 1.22 1.23
|
||||||
|
|
||||||
|
# Test sequentially (slower but uses less resources)
|
||||||
|
./test-go-versions.sh --sequential
|
||||||
|
|
||||||
|
# Verbose output with custom results directory
|
||||||
|
./test-go-versions.sh --verbose --output ./my-results 1.24 1.25
|
||||||
|
|
||||||
|
# Show all options
|
||||||
|
./test-go-versions.sh --help
|
||||||
|
```
|
||||||
|
|
||||||
|
The script creates Docker containers for each Go version and runs the full test
|
||||||
|
suite. Results are saved to a `test-results/` directory with individual logs and
|
||||||
|
a comprehensive summary report.
|
||||||
|
|
||||||
|
The script only exits with a non-zero status code if either of the two most
|
||||||
|
recent Go versions fail.
|
||||||
|
|
||||||
|
#### GitHub Actions testing (maintainers)
|
||||||
|
|
||||||
|
1. Go to the **Actions** tab in the GitHub repository
|
||||||
|
2. Select **"Go Versions Compatibility Test"** from the workflow list
|
||||||
|
3. Click **"Run workflow"**
|
||||||
|
4. Optionally customize:
|
||||||
|
- **Go versions**: Space-separated list (e.g., `1.21 1.22 1.23`)
|
||||||
|
- **Execution mode**: Parallel (faster) or sequential (more stable)
|
||||||
|
|
||||||
### Check coverage
|
### Check coverage
|
||||||
|
|
||||||
We use `go tool cover` to compute test coverage. Most code editors have a way to
|
We use `go tool cover` to compute test coverage. Most code editors have a way to
|
||||||
@@ -111,7 +153,7 @@ code lowers the coverage.
|
|||||||
|
|
||||||
Go-toml aims to stay efficient. We rely on a set of scenarios executed with Go's
|
Go-toml aims to stay efficient. We rely on a set of scenarios executed with Go's
|
||||||
builtin benchmark systems. Because of their noisy nature, containers provided by
|
builtin benchmark systems. Because of their noisy nature, containers provided by
|
||||||
Github Actions cannot be reliably used for benchmarking. As a result, you are
|
GitHub Actions cannot be reliably used for benchmarking. As a result, you are
|
||||||
responsible for checking that your changes do not incur a performance penalty.
|
responsible for checking that your changes do not incur a performance penalty.
|
||||||
You can run their following to execute benchmarks:
|
You can run their following to execute benchmarks:
|
||||||
|
|
||||||
@@ -165,25 +207,22 @@ Checklist:
|
|||||||
|
|
||||||
### New release
|
### New release
|
||||||
|
|
||||||
1. Decide on the next version number. Use semver.
|
1. Decide on the next version number. Use semver. Review commits since last
|
||||||
2. Generate release notes using [`gh`][gh]. Example:
|
version to assess.
|
||||||
```
|
2. Tag release. For example:
|
||||||
$ gh api -X POST \
|
```
|
||||||
-F tag_name='v2.0.0-beta.5' \
|
git checkout v2
|
||||||
-F target_commitish='v2' \
|
git pull
|
||||||
-F previous_tag_name='v2.0.0-beta.4' \
|
git tag v2.2.0
|
||||||
--jq '.body' \
|
git push --tags
|
||||||
repos/pelletier/go-toml/releases/generate-notes
|
```
|
||||||
```
|
3. CI automatically builds a draft GitHub release. Review it and edit as
|
||||||
3. Look for "Other changes". That would indicate a pull request not labeled
|
necessary. Look for "Other changes". That would indicate a pull request not
|
||||||
properly. Tweak labels and pull request titles until changelog looks good for
|
labeled properly. Tweak labels and pull request titles until changelog looks
|
||||||
users.
|
good for users.
|
||||||
4. [Draft new release][new-release].
|
4. Check "create discussion" box, in the "Releases" category.
|
||||||
5. Fill tag and target with the same value used to generate the changelog.
|
5. If new version is an alpha or beta only, check pre-release box.
|
||||||
6. Set title to the new tag value.
|
|
||||||
7. Paste the generated changelog.
|
|
||||||
8. Check "create discussion", in the "Releases" category.
|
|
||||||
9. Check pre-release if new version is an alpha or beta.
|
|
||||||
|
|
||||||
[issues-tracker]: https://github.com/pelletier/go-toml/issues
|
[issues-tracker]: https://github.com/pelletier/go-toml/issues
|
||||||
[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md
|
[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2013 - 2022 Thomas Pelletier, Eric Anderton
|
go-toml v2
|
||||||
|
Copyright (c) 2021 - 2023 Thomas Pelletier
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
@@ -45,16 +45,15 @@ to check for typos. [See example in the documentation][strict].
|
|||||||
|
|
||||||
### Contextualized errors
|
### Contextualized errors
|
||||||
|
|
||||||
When most decoding errors occur, go-toml returns [`DecodeError`][decode-err]),
|
When most decoding errors occur, go-toml returns [`DecodeError`][decode-err],
|
||||||
which contains a human readable contextualized version of the error. For
|
which contains a human readable contextualized version of the error. For
|
||||||
example:
|
example:
|
||||||
|
|
||||||
```
|
```
|
||||||
2| key1 = "value1"
|
1| [server]
|
||||||
3| key2 = "missing2"
|
2| path = 100
|
||||||
| ~~~~ missing field
|
| ~~~ cannot decode TOML integer into struct field toml_test.Server.Path of type string
|
||||||
4| key3 = "missing3"
|
3| port = 50
|
||||||
5| key4 = "value4"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
[decode-err]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#DecodeError
|
[decode-err]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#DecodeError
|
||||||
@@ -73,22 +72,46 @@ representation.
|
|||||||
[tlt]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalTime
|
[tlt]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalTime
|
||||||
[tldt]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalDateTime
|
[tldt]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalDateTime
|
||||||
|
|
||||||
|
### Commented config
|
||||||
|
|
||||||
|
Since TOML is often used for configuration files, go-toml can emit documents
|
||||||
|
annotated with [comments and commented-out values][comments-example]. For
|
||||||
|
example, it can generate the following file:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
# Host IP to connect to.
|
||||||
|
host = '127.0.0.1'
|
||||||
|
# Port of the remote server.
|
||||||
|
port = 4242
|
||||||
|
|
||||||
|
# Encryption parameters (optional)
|
||||||
|
# [TLS]
|
||||||
|
# cipher = 'AEAD-AES128-GCM-SHA256'
|
||||||
|
# version = 'TLS 1.3'
|
||||||
|
```
|
||||||
|
|
||||||
|
[comments-example]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#example-Marshal-Commented
|
||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
|
|
||||||
Given the following struct, let's see how to read it and write it as TOML:
|
Given the following struct, let's see how to read it and write it as TOML:
|
||||||
|
|
||||||
```go
|
```go
|
||||||
type MyConfig struct {
|
type MyConfig struct {
|
||||||
Version int
|
Version int
|
||||||
Name string
|
Name string
|
||||||
Tags []string
|
Tags []string
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Unmarshaling
|
### Unmarshaling
|
||||||
|
|
||||||
[`Unmarshal`][unmarshal] reads a TOML document and fills a Go structure with its
|
[`Unmarshal`][unmarshal] reads a TOML document and fills a Go structure with its
|
||||||
content. For example:
|
content.
|
||||||
|
|
||||||
|
Note that the struct variable names are _capitalized_, while the variables in the toml document are _lowercase_.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
```go
|
```go
|
||||||
doc := `
|
doc := `
|
||||||
@@ -100,7 +123,7 @@ tags = ["go", "toml"]
|
|||||||
var cfg MyConfig
|
var cfg MyConfig
|
||||||
err := toml.Unmarshal([]byte(doc), &cfg)
|
err := toml.Unmarshal([]byte(doc), &cfg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
fmt.Println("version:", cfg.Version)
|
fmt.Println("version:", cfg.Version)
|
||||||
fmt.Println("name:", cfg.Name)
|
fmt.Println("name:", cfg.Name)
|
||||||
@@ -114,6 +137,62 @@ fmt.Println("tags:", cfg.Tags)
|
|||||||
|
|
||||||
[unmarshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Unmarshal
|
[unmarshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Unmarshal
|
||||||
|
|
||||||
|
|
||||||
|
Here is an example using tables with some simple nesting:
|
||||||
|
|
||||||
|
```go
|
||||||
|
doc := `
|
||||||
|
age = 45
|
||||||
|
fruits = ["apple", "pear"]
|
||||||
|
|
||||||
|
# these are very important!
|
||||||
|
[my-variables]
|
||||||
|
first = 1
|
||||||
|
second = 0.2
|
||||||
|
third = "abc"
|
||||||
|
|
||||||
|
# this is not so important.
|
||||||
|
[my-variables.b]
|
||||||
|
bfirst = 123
|
||||||
|
`
|
||||||
|
|
||||||
|
var Document struct {
|
||||||
|
Age int
|
||||||
|
Fruits []string
|
||||||
|
|
||||||
|
Myvariables struct {
|
||||||
|
First int
|
||||||
|
Second float64
|
||||||
|
Third string
|
||||||
|
|
||||||
|
B struct {
|
||||||
|
Bfirst int
|
||||||
|
}
|
||||||
|
} `toml:"my-variables"`
|
||||||
|
}
|
||||||
|
|
||||||
|
err := toml.Unmarshal([]byte(doc), &Document)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("age:", Document.Age)
|
||||||
|
fmt.Println("fruits:", Document.Fruits)
|
||||||
|
fmt.Println("my-variables.first:", Document.Myvariables.First)
|
||||||
|
fmt.Println("my-variables.second:", Document.Myvariables.Second)
|
||||||
|
fmt.Println("my-variables.third:", Document.Myvariables.Third)
|
||||||
|
fmt.Println("my-variables.B.Bfirst:", Document.Myvariables.B.Bfirst)
|
||||||
|
|
||||||
|
// Output:
|
||||||
|
// age: 45
|
||||||
|
// fruits: [apple pear]
|
||||||
|
// my-variables.first: 1
|
||||||
|
// my-variables.second: 0.2
|
||||||
|
// my-variables.third: abc
|
||||||
|
// my-variables.B.Bfirst: 123
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### Marshaling
|
### Marshaling
|
||||||
|
|
||||||
[`Marshal`][marshal] is the opposite of Unmarshal: it represents a Go structure
|
[`Marshal`][marshal] is the opposite of Unmarshal: it represents a Go structure
|
||||||
@@ -121,14 +200,14 @@ as a TOML document:
|
|||||||
|
|
||||||
```go
|
```go
|
||||||
cfg := MyConfig{
|
cfg := MyConfig{
|
||||||
Version: 2,
|
Version: 2,
|
||||||
Name: "go-toml",
|
Name: "go-toml",
|
||||||
Tags: []string{"go", "toml"},
|
Tags: []string{"go", "toml"},
|
||||||
}
|
}
|
||||||
|
|
||||||
b, err := toml.Marshal(cfg)
|
b, err := toml.Marshal(cfg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
fmt.Println(string(b))
|
fmt.Println(string(b))
|
||||||
|
|
||||||
@@ -140,6 +219,17 @@ fmt.Println(string(b))
|
|||||||
|
|
||||||
[marshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Marshal
|
[marshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Marshal
|
||||||
|
|
||||||
|
## Unstable API
|
||||||
|
|
||||||
|
This API does not yet follow the backward compatibility guarantees of this
|
||||||
|
library. They provide early access to features that may have rough edges or an
|
||||||
|
API subject to change.
|
||||||
|
|
||||||
|
### Parser
|
||||||
|
|
||||||
|
Parser is the unstable API that allows iterative parsing of a TOML document at
|
||||||
|
the AST level. See https://pkg.go.dev/github.com/pelletier/go-toml/v2/unstable.
|
||||||
|
|
||||||
## Benchmarks
|
## Benchmarks
|
||||||
|
|
||||||
Execution time speedup compared to other Go TOML libraries:
|
Execution time speedup compared to other Go TOML libraries:
|
||||||
@@ -149,12 +239,12 @@ Execution time speedup compared to other Go TOML libraries:
|
|||||||
<tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
|
<tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr><td>Marshal/HugoFrontMatter-2</td><td>1.9x</td><td>1.9x</td></tr>
|
<tr><td>Marshal/HugoFrontMatter-2</td><td>2.1x</td><td>2.0x</td></tr>
|
||||||
<tr><td>Marshal/ReferenceFile/map-2</td><td>1.7x</td><td>1.8x</td></tr>
|
<tr><td>Marshal/ReferenceFile/map-2</td><td>2.0x</td><td>2.0x</td></tr>
|
||||||
<tr><td>Marshal/ReferenceFile/struct-2</td><td>2.2x</td><td>2.5x</td></tr>
|
<tr><td>Marshal/ReferenceFile/struct-2</td><td>2.3x</td><td>2.5x</td></tr>
|
||||||
<tr><td>Unmarshal/HugoFrontMatter-2</td><td>2.9x</td><td>2.9x</td></tr>
|
<tr><td>Unmarshal/HugoFrontMatter-2</td><td>3.3x</td><td>2.8x</td></tr>
|
||||||
<tr><td>Unmarshal/ReferenceFile/map-2</td><td>2.6x</td><td>2.9x</td></tr>
|
<tr><td>Unmarshal/ReferenceFile/map-2</td><td>2.9x</td><td>3.0x</td></tr>
|
||||||
<tr><td>Unmarshal/ReferenceFile/struct-2</td><td>4.4x</td><td>5.3x</td></tr>
|
<tr><td>Unmarshal/ReferenceFile/struct-2</td><td>4.8x</td><td>5.0x</td></tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
<details><summary>See more</summary>
|
<details><summary>See more</summary>
|
||||||
@@ -167,17 +257,17 @@ provided for completeness.</p>
|
|||||||
<tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
|
<tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr><td>Marshal/SimpleDocument/map-2</td><td>1.8x</td><td>2.9x</td></tr>
|
<tr><td>Marshal/SimpleDocument/map-2</td><td>2.0x</td><td>2.9x</td></tr>
|
||||||
<tr><td>Marshal/SimpleDocument/struct-2</td><td>2.7x</td><td>4.2x</td></tr>
|
<tr><td>Marshal/SimpleDocument/struct-2</td><td>2.5x</td><td>3.6x</td></tr>
|
||||||
<tr><td>Unmarshal/SimpleDocument/map-2</td><td>4.5x</td><td>3.1x</td></tr>
|
<tr><td>Unmarshal/SimpleDocument/map-2</td><td>4.2x</td><td>3.4x</td></tr>
|
||||||
<tr><td>Unmarshal/SimpleDocument/struct-2</td><td>6.2x</td><td>3.9x</td></tr>
|
<tr><td>Unmarshal/SimpleDocument/struct-2</td><td>5.9x</td><td>4.4x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/example-2</td><td>3.1x</td><td>3.5x</td></tr>
|
<tr><td>UnmarshalDataset/example-2</td><td>3.2x</td><td>2.9x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/code-2</td><td>2.3x</td><td>3.1x</td></tr>
|
<tr><td>UnmarshalDataset/code-2</td><td>2.4x</td><td>2.8x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/twitter-2</td><td>2.5x</td><td>2.6x</td></tr>
|
<tr><td>UnmarshalDataset/twitter-2</td><td>2.7x</td><td>2.5x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/citm_catalog-2</td><td>2.1x</td><td>2.2x</td></tr>
|
<tr><td>UnmarshalDataset/citm_catalog-2</td><td>2.3x</td><td>2.3x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/canada-2</td><td>1.6x</td><td>1.3x</td></tr>
|
<tr><td>UnmarshalDataset/canada-2</td><td>1.9x</td><td>1.5x</td></tr>
|
||||||
<tr><td>UnmarshalDataset/config-2</td><td>4.3x</td><td>3.2x</td></tr>
|
<tr><td>UnmarshalDataset/config-2</td><td>5.4x</td><td>3.0x</td></tr>
|
||||||
<tr><td>[Geo mean]</td><td>2.7x</td><td>2.8x</td></tr>
|
<tr><td>geomean</td><td>2.9x</td><td>2.8x</td></tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
<p>This table can be generated with <code>./ci.sh benchmark -a -html</code>.</p>
|
<p>This table can be generated with <code>./ci.sh benchmark -a -html</code>.</p>
|
||||||
@@ -203,24 +293,24 @@ Go-toml provides three handy command line tools:
|
|||||||
|
|
||||||
* `tomljson`: Reads a TOML file and outputs its JSON representation.
|
* `tomljson`: Reads a TOML file and outputs its JSON representation.
|
||||||
|
|
||||||
```
|
```
|
||||||
$ go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest
|
$ go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest
|
||||||
$ tomljson --help
|
$ tomljson --help
|
||||||
```
|
```
|
||||||
|
|
||||||
* `jsontoml`: Reads a JSON file and outputs a TOML representation.
|
* `jsontoml`: Reads a JSON file and outputs a TOML representation.
|
||||||
|
|
||||||
```
|
```
|
||||||
$ go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest
|
$ go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest
|
||||||
$ jsontoml --help
|
$ jsontoml --help
|
||||||
```
|
```
|
||||||
|
|
||||||
* `tomll`: Lints and reformats a TOML file.
|
* `tomll`: Lints and reformats a TOML file.
|
||||||
|
|
||||||
```
|
```
|
||||||
$ go install github.com/pelletier/go-toml/v2/cmd/tomll@latest
|
$ go install github.com/pelletier/go-toml/v2/cmd/tomll@latest
|
||||||
$ tomll --help
|
$ tomll --help
|
||||||
```
|
```
|
||||||
|
|
||||||
### Docker image
|
### Docker image
|
||||||
|
|
||||||
@@ -231,7 +321,7 @@ Those tools are also available as a [Docker image][docker]. For example, to use
|
|||||||
docker run -i ghcr.io/pelletier/go-toml:v2 tomljson < example.toml
|
docker run -i ghcr.io/pelletier/go-toml:v2 tomljson < example.toml
|
||||||
```
|
```
|
||||||
|
|
||||||
Multiple versions are availble on [ghcr.io][docker].
|
Multiple versions are available on [ghcr.io][docker].
|
||||||
|
|
||||||
[docker]: https://github.com/pelletier/go-toml/pkgs/container/go-toml
|
[docker]: https://github.com/pelletier/go-toml/pkgs/container/go-toml
|
||||||
|
|
||||||
@@ -263,16 +353,16 @@ element in the interface to decode the object. For example:
|
|||||||
|
|
||||||
```go
|
```go
|
||||||
type inner struct {
|
type inner struct {
|
||||||
B interface{}
|
B interface{}
|
||||||
}
|
}
|
||||||
type doc struct {
|
type doc struct {
|
||||||
A interface{}
|
A interface{}
|
||||||
}
|
}
|
||||||
|
|
||||||
d := doc{
|
d := doc{
|
||||||
A: inner{
|
A: inner{
|
||||||
B: "Before",
|
B: "Before",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
data := `
|
data := `
|
||||||
@@ -311,7 +401,7 @@ contained in the doc is superior to the capacity of the array. For example:
|
|||||||
|
|
||||||
```go
|
```go
|
||||||
type doc struct {
|
type doc struct {
|
||||||
A [2]string
|
A [2]string
|
||||||
}
|
}
|
||||||
d := doc{}
|
d := doc{}
|
||||||
err := toml.Unmarshal([]byte(`A = ["one", "two", "many"]`), &d)
|
err := toml.Unmarshal([]byte(`A = ["one", "two", "many"]`), &d)
|
||||||
@@ -486,27 +576,20 @@ is not necessary anymore.
|
|||||||
|
|
||||||
V1 used to provide multiple struct tags: `comment`, `commented`, `multiline`,
|
V1 used to provide multiple struct tags: `comment`, `commented`, `multiline`,
|
||||||
`toml`, and `omitempty`. To behave more like the standard library, v2 has merged
|
`toml`, and `omitempty`. To behave more like the standard library, v2 has merged
|
||||||
`toml`, `multiline`, and `omitempty`. For example:
|
`toml`, `multiline`, `commented`, and `omitempty`. For example:
|
||||||
|
|
||||||
```go
|
```go
|
||||||
type doc struct {
|
type doc struct {
|
||||||
// v1
|
// v1
|
||||||
F string `toml:"field" multiline:"true" omitempty:"true"`
|
F string `toml:"field" multiline:"true" omitempty:"true" commented:"true"`
|
||||||
// v2
|
// v2
|
||||||
F string `toml:"field,multiline,omitempty"`
|
F string `toml:"field,multiline,omitempty,commented"`
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Has a result, the `Encoder.SetTag*` methods have been removed, as there is just
|
Has a result, the `Encoder.SetTag*` methods have been removed, as there is just
|
||||||
one tag now.
|
one tag now.
|
||||||
|
|
||||||
|
|
||||||
#### `commented` tag has been removed
|
|
||||||
|
|
||||||
There is no replacement for the `commented` tag. This feature would be better
|
|
||||||
suited in a proper document model for go-toml v2, which has been [cut from
|
|
||||||
scope][nodoc] at the moment.
|
|
||||||
|
|
||||||
#### `Encoder.ArraysWithOneElementPerLine` has been renamed
|
#### `Encoder.ArraysWithOneElementPerLine` has been renamed
|
||||||
|
|
||||||
The new name is `Encoder.SetArraysMultiline`. The behavior should be the same.
|
The new name is `Encoder.SetArraysMultiline`. The behavior should be the same.
|
||||||
@@ -542,10 +625,11 @@ complete solutions exist out there.
|
|||||||
|
|
||||||
## Versioning
|
## Versioning
|
||||||
|
|
||||||
Go-toml follows [Semantic Versioning](http://semver.org/). The supported version
|
Expect for parts explicitly marked otherwise, go-toml follows [Semantic
|
||||||
of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of
|
Versioning](https://semver.org). The supported version of
|
||||||
this document. The last two major versions of Go are supported
|
[TOML](https://github.com/toml-lang/toml) is indicated at the beginning of this
|
||||||
(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)).
|
document. The last two major versions of Go are supported (see [Go Release
|
||||||
|
Policy](https://golang.org/doc/devel/release.html#policy)).
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|||||||
@@ -2,9 +2,6 @@
|
|||||||
|
|
||||||
## Supported Versions
|
## Supported Versions
|
||||||
|
|
||||||
Use this section to tell people about which versions of your project are
|
|
||||||
currently being supported with security updates.
|
|
||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| ---------- | ------------------ |
|
| ---------- | ------------------ |
|
||||||
| Latest 2.x | :white_check_mark: |
|
| Latest 2.x | :white_check_mark: |
|
||||||
|
|||||||
@@ -3,16 +3,16 @@ package benchmark_test
|
|||||||
import (
|
import (
|
||||||
"compress/gzip"
|
"compress/gzip"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io/ioutil"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
var bench_inputs = []struct {
|
var benchInputs = []struct {
|
||||||
name string
|
name string
|
||||||
jsonLen int
|
jsonLen int
|
||||||
}{
|
}{
|
||||||
@@ -30,22 +30,22 @@ var bench_inputs = []struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestUnmarshalDatasetCode(t *testing.T) {
|
func TestUnmarshalDatasetCode(t *testing.T) {
|
||||||
for _, tc := range bench_inputs {
|
for _, tc := range benchInputs {
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
buf := fixture(t, tc.name)
|
buf := fixture(t, tc.name)
|
||||||
|
|
||||||
var v interface{}
|
var v interface{}
|
||||||
require.NoError(t, toml.Unmarshal(buf, &v))
|
assert.NoError(t, toml.Unmarshal(buf, &v))
|
||||||
|
|
||||||
b, err := json.Marshal(v)
|
b, err := json.Marshal(v)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, len(b), tc.jsonLen)
|
assert.Equal(t, len(b), tc.jsonLen)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkUnmarshalDataset(b *testing.B) {
|
func BenchmarkUnmarshalDataset(b *testing.B) {
|
||||||
for _, tc := range bench_inputs {
|
for _, tc := range benchInputs {
|
||||||
b.Run(tc.name, func(b *testing.B) {
|
b.Run(tc.name, func(b *testing.B) {
|
||||||
buf := fixture(b, tc.name)
|
buf := fixture(b, tc.name)
|
||||||
b.SetBytes(int64(len(buf)))
|
b.SetBytes(int64(len(buf)))
|
||||||
@@ -53,7 +53,7 @@ func BenchmarkUnmarshalDataset(b *testing.B) {
|
|||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
var v interface{}
|
var v interface{}
|
||||||
require.NoError(b, toml.Unmarshal(buf, &v))
|
assert.NoError(b, toml.Unmarshal(buf, &v))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -68,13 +68,13 @@ func fixture(tb testing.TB, path string) []byte {
|
|||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
tb.Skip("benchmark fixture not found:", file)
|
tb.Skip("benchmark fixture not found:", file)
|
||||||
}
|
}
|
||||||
require.NoError(tb, err)
|
assert.NoError(tb, err)
|
||||||
defer f.Close()
|
defer func() { _ = f.Close() }()
|
||||||
|
|
||||||
gz, err := gzip.NewReader(f)
|
gz, err := gzip.NewReader(f)
|
||||||
require.NoError(tb, err)
|
assert.NoError(tb, err)
|
||||||
|
|
||||||
buf, err := ioutil.ReadAll(gz)
|
buf, err := io.ReadAll(gz)
|
||||||
require.NoError(tb, err)
|
assert.NoError(tb, err)
|
||||||
return buf
|
return buf
|
||||||
}
|
}
|
||||||
|
|||||||
+24
-24
@@ -2,12 +2,12 @@ package benchmark_test
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"io/ioutil"
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestUnmarshalSimple(t *testing.T) {
|
func TestUnmarshalSimple(t *testing.T) {
|
||||||
@@ -18,7 +18,7 @@ func TestUnmarshalSimple(t *testing.T) {
|
|||||||
|
|
||||||
err := toml.Unmarshal(doc, &d)
|
err := toml.Unmarshal(doc, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -38,7 +38,7 @@ func BenchmarkUnmarshal(b *testing.B) {
|
|||||||
|
|
||||||
err := toml.Unmarshal(doc, &d)
|
err := toml.Unmarshal(doc, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -52,14 +52,14 @@ func BenchmarkUnmarshal(b *testing.B) {
|
|||||||
d := map[string]interface{}{}
|
d := map[string]interface{}{}
|
||||||
err := toml.Unmarshal(doc, &d)
|
err := toml.Unmarshal(doc, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
b.Run("ReferenceFile", func(b *testing.B) {
|
b.Run("ReferenceFile", func(b *testing.B) {
|
||||||
bytes, err := ioutil.ReadFile("benchmark.toml")
|
bytes, err := os.ReadFile("benchmark.toml")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
@@ -72,7 +72,7 @@ func BenchmarkUnmarshal(b *testing.B) {
|
|||||||
d := benchmarkDoc{}
|
d := benchmarkDoc{}
|
||||||
err := toml.Unmarshal(bytes, &d)
|
err := toml.Unmarshal(bytes, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -85,7 +85,7 @@ func BenchmarkUnmarshal(b *testing.B) {
|
|||||||
d := map[string]interface{}{}
|
d := map[string]interface{}{}
|
||||||
err := toml.Unmarshal(bytes, &d)
|
err := toml.Unmarshal(bytes, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -99,7 +99,7 @@ func BenchmarkUnmarshal(b *testing.B) {
|
|||||||
d := map[string]interface{}{}
|
d := map[string]interface{}{}
|
||||||
err := toml.Unmarshal(hugoFrontMatterbytes, &d)
|
err := toml.Unmarshal(hugoFrontMatterbytes, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -123,7 +123,7 @@ func BenchmarkMarshal(b *testing.B) {
|
|||||||
|
|
||||||
err := toml.Unmarshal(doc, &d)
|
err := toml.Unmarshal(doc, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
@@ -134,7 +134,7 @@ func BenchmarkMarshal(b *testing.B) {
|
|||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
out, err = marshal(d)
|
out, err = marshal(d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -145,7 +145,7 @@ func BenchmarkMarshal(b *testing.B) {
|
|||||||
d := map[string]interface{}{}
|
d := map[string]interface{}{}
|
||||||
err := toml.Unmarshal(doc, &d)
|
err := toml.Unmarshal(doc, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
@@ -156,7 +156,7 @@ func BenchmarkMarshal(b *testing.B) {
|
|||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
out, err = marshal(d)
|
out, err = marshal(d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -165,7 +165,7 @@ func BenchmarkMarshal(b *testing.B) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
b.Run("ReferenceFile", func(b *testing.B) {
|
b.Run("ReferenceFile", func(b *testing.B) {
|
||||||
bytes, err := ioutil.ReadFile("benchmark.toml")
|
bytes, err := os.ReadFile("benchmark.toml")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
@@ -174,7 +174,7 @@ func BenchmarkMarshal(b *testing.B) {
|
|||||||
d := benchmarkDoc{}
|
d := benchmarkDoc{}
|
||||||
err := toml.Unmarshal(bytes, &d)
|
err := toml.Unmarshal(bytes, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
@@ -184,7 +184,7 @@ func BenchmarkMarshal(b *testing.B) {
|
|||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
out, err = marshal(d)
|
out, err = marshal(d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -195,7 +195,7 @@ func BenchmarkMarshal(b *testing.B) {
|
|||||||
d := map[string]interface{}{}
|
d := map[string]interface{}{}
|
||||||
err := toml.Unmarshal(bytes, &d)
|
err := toml.Unmarshal(bytes, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
@@ -205,7 +205,7 @@ func BenchmarkMarshal(b *testing.B) {
|
|||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
out, err = marshal(d)
|
out, err = marshal(d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -217,7 +217,7 @@ func BenchmarkMarshal(b *testing.B) {
|
|||||||
d := map[string]interface{}{}
|
d := map[string]interface{}{}
|
||||||
err := toml.Unmarshal(hugoFrontMatterbytes, &d)
|
err := toml.Unmarshal(hugoFrontMatterbytes, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
@@ -228,7 +228,7 @@ func BenchmarkMarshal(b *testing.B) {
|
|||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
out, err = marshal(d)
|
out, err = marshal(d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -344,11 +344,11 @@ type benchmarkDoc struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestUnmarshalReferenceFile(t *testing.T) {
|
func TestUnmarshalReferenceFile(t *testing.T) {
|
||||||
bytes, err := ioutil.ReadFile("benchmark.toml")
|
bytes, err := os.ReadFile("benchmark.toml")
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
d := benchmarkDoc{}
|
d := benchmarkDoc{}
|
||||||
err = toml.Unmarshal(bytes, &d)
|
err = toml.Unmarshal(bytes, &d)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
expected := benchmarkDoc{
|
expected := benchmarkDoc{
|
||||||
Table: struct {
|
Table: struct {
|
||||||
@@ -627,7 +627,7 @@ trimmed in raw strings.
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
require.Equal(t, expected, d)
|
assert.Equal(t, expected, d)
|
||||||
}
|
}
|
||||||
|
|
||||||
var hugoFrontMatterbytes = []byte(`
|
var hugoFrontMatterbytes = []byte(`
|
||||||
|
|||||||
@@ -77,8 +77,9 @@ cover() {
|
|||||||
|
|
||||||
pushd "$dir"
|
pushd "$dir"
|
||||||
go test -covermode=atomic -coverpkg=./... -coverprofile=coverage.out.tmp ./...
|
go test -covermode=atomic -coverpkg=./... -coverprofile=coverage.out.tmp ./...
|
||||||
cat coverage.out.tmp | grep -v testsuite | grep -v tomltestgen | grep -v gotoml-test-decoder > coverage.out
|
grep -Ev '(fuzz|testsuite|tomltestgen|gotoml-test-decoder|gotoml-test-encoder)' coverage.out.tmp > coverage.out
|
||||||
go tool cover -func=coverage.out
|
go tool cover -func=coverage.out
|
||||||
|
echo "Coverage profile for ${branch}: ${dir}/coverage.out" >&2
|
||||||
popd
|
popd
|
||||||
|
|
||||||
if [ "${branch}" != "HEAD" ]; then
|
if [ "${branch}" != "HEAD" ]; then
|
||||||
@@ -146,12 +147,12 @@ bench() {
|
|||||||
pushd "$dir"
|
pushd "$dir"
|
||||||
|
|
||||||
if [ "${replace}" != "" ]; then
|
if [ "${replace}" != "" ]; then
|
||||||
find ./benchmark/ -iname '*.go' -exec sed -i -E "s|github.com/pelletier/go-toml/v2|${replace}|g" {} \;
|
find ./benchmark/ -iname '*.go' -exec sed -i -E "s|github.com/pelletier/go-toml/v2\"|${replace}\"|g" {} \;
|
||||||
go get "${replace}"
|
go get "${replace}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
export GOMAXPROCS=2
|
export GOMAXPROCS=2
|
||||||
nice -n -19 taskset --cpu-list 0,1 go test '-bench=^Benchmark(Un)?[mM]arshal' -count=5 -run=Nothing ./... | tee "${out}"
|
go test '-bench=^Benchmark(Un)?[mM]arshal' -count=10 -run=Nothing ./... | tee "${out}"
|
||||||
popd
|
popd
|
||||||
|
|
||||||
if [ "${branch}" != "HEAD" ]; then
|
if [ "${branch}" != "HEAD" ]; then
|
||||||
@@ -160,10 +161,12 @@ bench() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fmktemp() {
|
fmktemp() {
|
||||||
if mktemp --version|grep GNU >/dev/null; then
|
if mktemp --version &> /dev/null; then
|
||||||
mktemp --suffix=-$1;
|
# GNU
|
||||||
|
mktemp --suffix=-$1
|
||||||
else
|
else
|
||||||
mktemp -t $1;
|
# BSD
|
||||||
|
mktemp -t $1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -183,13 +186,20 @@ with open(sys.argv[1]) as f:
|
|||||||
lines.append(line.split(','))
|
lines.append(line.split(','))
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
for line in reversed(lines[1:]):
|
for line in reversed(lines[2:]):
|
||||||
|
if len(line) < 8 or line[0] == "":
|
||||||
|
continue
|
||||||
v2 = float(line[1])
|
v2 = float(line[1])
|
||||||
results.append([
|
results.append([
|
||||||
line[0].replace("-32", ""),
|
line[0].replace("-32", ""),
|
||||||
"%.1fx" % (float(line[3])/v2), # v1
|
"%.1fx" % (float(line[3])/v2), # v1
|
||||||
"%.1fx" % (float(line[5])/v2), # bs
|
"%.1fx" % (float(line[7])/v2), # bs
|
||||||
])
|
])
|
||||||
|
|
||||||
|
if not results:
|
||||||
|
print("No benchmark results to display.", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
# move geomean to the end
|
# move geomean to the end
|
||||||
results.append(results[0])
|
results.append(results[0])
|
||||||
del results[0]
|
del results[0]
|
||||||
@@ -259,10 +269,10 @@ benchmark() {
|
|||||||
|
|
||||||
if [ "$1" = "-html" ]; then
|
if [ "$1" = "-html" ]; then
|
||||||
tmpcsv=`fmktemp csv`
|
tmpcsv=`fmktemp csv`
|
||||||
benchstat -csv -geomean go-toml-v2.txt go-toml-v1.txt bs-toml.txt > $tmpcsv
|
benchstat -format csv go-toml-v2.txt go-toml-v1.txt bs-toml.txt > $tmpcsv
|
||||||
benchstathtml $tmpcsv
|
benchstathtml $tmpcsv
|
||||||
else
|
else
|
||||||
benchstat -geomean go-toml-v2.txt go-toml-v1.txt bs-toml.txt
|
benchstat go-toml-v2.txt go-toml-v1.txt bs-toml.txt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
rm -f go-toml-v2.txt go-toml-v1.txt bs-toml.txt
|
rm -f go-toml-v2.txt go-toml-v1.txt bs-toml.txt
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
// Package gotoml-test-decoder is a minimal decoder program used to compare this library with other TOML implementations.
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|||||||
@@ -0,0 +1,31 @@
|
|||||||
|
// Package gotoml-test-encoder is a minimal encoder program used to compare this library with other TOML implementations.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2/internal/testsuite"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
log.SetFlags(0)
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
if flag.NArg() != 0 {
|
||||||
|
flag.Usage()
|
||||||
|
}
|
||||||
|
|
||||||
|
err := testsuite.EncodeStdin()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func usage() {
|
||||||
|
log.Printf("Usage: %s < json-file\n", path.Base(os.Args[0]))
|
||||||
|
flag.PrintDefaults()
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
+12
-1
@@ -19,6 +19,7 @@ package main
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"flag"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
@@ -33,7 +34,11 @@ Reading from a file:
|
|||||||
jsontoml file.json > file.toml
|
jsontoml file.json > file.toml
|
||||||
`
|
`
|
||||||
|
|
||||||
|
var useJSONNumber bool
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
flag.BoolVar(&useJSONNumber, "use-json-number", false, "unmarshal numbers into `json.Number` type instead of as `float64`")
|
||||||
|
|
||||||
p := cli.Program{
|
p := cli.Program{
|
||||||
Usage: usage,
|
Usage: usage,
|
||||||
Fn: convert,
|
Fn: convert,
|
||||||
@@ -45,11 +50,17 @@ func convert(r io.Reader, w io.Writer) error {
|
|||||||
var v interface{}
|
var v interface{}
|
||||||
|
|
||||||
d := json.NewDecoder(r)
|
d := json.NewDecoder(r)
|
||||||
|
e := toml.NewEncoder(w)
|
||||||
|
|
||||||
|
if useJSONNumber {
|
||||||
|
d.UseNumber()
|
||||||
|
e.SetMarshalJSONNumbers(true)
|
||||||
|
}
|
||||||
|
|
||||||
err := d.Decode(&v)
|
err := d.Decode(&v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
e := toml.NewEncoder(w)
|
|
||||||
return e.Encode(v)
|
return e.Encode(v)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,16 +5,16 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestConvert(t *testing.T) {
|
func TestConvert(t *testing.T) {
|
||||||
examples := []struct {
|
examples := []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
expected string
|
expected string
|
||||||
errors bool
|
errors bool
|
||||||
|
useJSONNumber bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "valid json",
|
name: "valid json",
|
||||||
@@ -26,6 +26,19 @@ func TestConvert(t *testing.T) {
|
|||||||
}`,
|
}`,
|
||||||
expected: `[mytoml]
|
expected: `[mytoml]
|
||||||
a = 42.0
|
a = 42.0
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "use json number",
|
||||||
|
useJSONNumber: true,
|
||||||
|
input: `
|
||||||
|
{
|
||||||
|
"mytoml": {
|
||||||
|
"a": 42
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
expected: `[mytoml]
|
||||||
|
a = 42
|
||||||
`,
|
`,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -37,9 +50,10 @@ a = 42.0
|
|||||||
|
|
||||||
for _, e := range examples {
|
for _, e := range examples {
|
||||||
b := new(bytes.Buffer)
|
b := new(bytes.Buffer)
|
||||||
|
useJSONNumber = e.useJSONNumber
|
||||||
err := convert(strings.NewReader(e.input), b)
|
err := convert(strings.NewReader(e.input), b)
|
||||||
if e.errors {
|
if e.errors {
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
} else {
|
} else {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, e.expected, b.String())
|
assert.Equal(t, e.expected, b.String())
|
||||||
|
|||||||
@@ -2,13 +2,12 @@ package main
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"errors"
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestConvert(t *testing.T) {
|
func TestConvert(t *testing.T) {
|
||||||
@@ -46,7 +45,7 @@ a = 42`),
|
|||||||
b := new(bytes.Buffer)
|
b := new(bytes.Buffer)
|
||||||
err := convert(e.input, b)
|
err := convert(e.input, b)
|
||||||
if e.errors {
|
if e.errors {
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
} else {
|
} else {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, e.expected, b.String())
|
assert.Equal(t, e.expected, b.String())
|
||||||
@@ -57,5 +56,5 @@ a = 42`),
|
|||||||
type badReader struct{}
|
type badReader struct{}
|
||||||
|
|
||||||
func (r *badReader) Read([]byte) (int, error) {
|
func (r *badReader) Read([]byte) (int, error) {
|
||||||
return 0, fmt.Errorf("reader failed on purpose")
|
return 0, errors.New("reader failed on purpose")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,8 +5,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestConvert(t *testing.T) {
|
func TestConvert(t *testing.T) {
|
||||||
@@ -36,7 +35,7 @@ a = 42.0
|
|||||||
b := new(bytes.Buffer)
|
b := new(bytes.Buffer)
|
||||||
err := convert(strings.NewReader(e.input), b)
|
err := convert(strings.NewReader(e.input), b)
|
||||||
if e.errors {
|
if e.errors {
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
} else {
|
} else {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, e.expected, b.String())
|
assert.Equal(t, e.expected, b.String())
|
||||||
|
|||||||
+61
-101
@@ -7,21 +7,18 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"archive/zip"
|
|
||||||
"bytes"
|
"bytes"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/format"
|
"go/format"
|
||||||
"io"
|
|
||||||
"io/ioutil"
|
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
|
||||||
"os"
|
"os"
|
||||||
"regexp"
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
"text/template"
|
||||||
"time"
|
"time"
|
||||||
|
"unicode"
|
||||||
)
|
)
|
||||||
|
|
||||||
type invalid struct {
|
type invalid struct {
|
||||||
@@ -32,7 +29,7 @@ type invalid struct {
|
|||||||
type valid struct {
|
type valid struct {
|
||||||
Name string
|
Name string
|
||||||
Input string
|
Input string
|
||||||
JsonRef string
|
JSONRef string
|
||||||
}
|
}
|
||||||
|
|
||||||
type testsCollection struct {
|
type testsCollection struct {
|
||||||
@@ -43,12 +40,11 @@ type testsCollection struct {
|
|||||||
Count int
|
Count int
|
||||||
}
|
}
|
||||||
|
|
||||||
const srcTemplate = "// Generated by tomltestgen for toml-test ref {{.Ref}} on {{.Timestamp}}\n" +
|
const srcTemplate = "// Code generated by tomltestgen for toml-test ref {{.Ref}} on {{.Timestamp}}. DO NOT EDIT.\n" +
|
||||||
"package toml_test\n" +
|
"package toml_test\n" +
|
||||||
" import (\n" +
|
" import (\n" +
|
||||||
" \"testing\"\n" +
|
" \"testing\"\n" +
|
||||||
")\n" +
|
")\n" +
|
||||||
|
|
||||||
"{{range .Invalid}}\n" +
|
"{{range .Invalid}}\n" +
|
||||||
"func TestTOMLTest_Invalid_{{.Name}}(t *testing.T) {\n" +
|
"func TestTOMLTest_Invalid_{{.Name}}(t *testing.T) {\n" +
|
||||||
" input := {{.Input|gostr}}\n" +
|
" input := {{.Input|gostr}}\n" +
|
||||||
@@ -59,65 +55,31 @@ const srcTemplate = "// Generated by tomltestgen for toml-test ref {{.Ref}} on {
|
|||||||
"{{range .Valid}}\n" +
|
"{{range .Valid}}\n" +
|
||||||
"func TestTOMLTest_Valid_{{.Name}}(t *testing.T) {\n" +
|
"func TestTOMLTest_Valid_{{.Name}}(t *testing.T) {\n" +
|
||||||
" input := {{.Input|gostr}}\n" +
|
" input := {{.Input|gostr}}\n" +
|
||||||
" jsonRef := {{.JsonRef|gostr}}\n" +
|
" jsonRef := {{.JSONRef|gostr}}\n" +
|
||||||
" testgenValid(t, input, jsonRef)\n" +
|
" testgenValid(t, input, jsonRef)\n" +
|
||||||
"}\n" +
|
"}\n" +
|
||||||
"{{end}}\n"
|
"{{end}}\n"
|
||||||
|
|
||||||
func downloadTmpFile(url string) string {
|
|
||||||
log.Println("starting to download file from", url)
|
|
||||||
resp, err := http.Get(url)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
tmpfile, err := ioutil.TempFile("", "toml-test-*.zip")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
defer tmpfile.Close()
|
|
||||||
|
|
||||||
copiedLen, err := io.Copy(tmpfile, resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
if resp.ContentLength > 0 && copiedLen != resp.ContentLength {
|
|
||||||
panic(fmt.Errorf("copied %d bytes, request body had %d", copiedLen, resp.ContentLength))
|
|
||||||
}
|
|
||||||
return tmpfile.Name()
|
|
||||||
}
|
|
||||||
|
|
||||||
func kebabToCamel(kebab string) string {
|
func kebabToCamel(kebab string) string {
|
||||||
camel := ""
|
var buf strings.Builder
|
||||||
nextUpper := true
|
nextUpper := true
|
||||||
for _, c := range kebab {
|
for _, c := range kebab {
|
||||||
if nextUpper {
|
if nextUpper {
|
||||||
camel += strings.ToUpper(string(c))
|
buf.WriteRune(unicode.ToUpper(c))
|
||||||
nextUpper = false
|
nextUpper = false
|
||||||
} else if c == '-' {
|
|
||||||
nextUpper = true
|
|
||||||
} else if c == '/' {
|
|
||||||
nextUpper = true
|
|
||||||
camel += "_"
|
|
||||||
} else {
|
} else {
|
||||||
camel += string(c)
|
switch c {
|
||||||
|
case '-':
|
||||||
|
nextUpper = true
|
||||||
|
case '/':
|
||||||
|
nextUpper = true
|
||||||
|
buf.WriteByte('_')
|
||||||
|
default:
|
||||||
|
buf.WriteRune(c)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return camel
|
return buf.String()
|
||||||
}
|
|
||||||
|
|
||||||
func readFileFromZip(f *zip.File) string {
|
|
||||||
reader, err := f.Open()
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
defer reader.Close()
|
|
||||||
bytes, err := ioutil.ReadAll(reader)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
return string(bytes)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func templateGoStr(input string) string {
|
func templateGoStr(input string) string {
|
||||||
@@ -138,61 +100,59 @@ func main() {
|
|||||||
flag.Usage = usage
|
flag.Usage = usage
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
|
|
||||||
url := "https://codeload.github.com/BurntSushi/toml-test/zip/" + *ref
|
|
||||||
resultFile := downloadTmpFile(url)
|
|
||||||
defer os.Remove(resultFile)
|
|
||||||
log.Println("file written to", resultFile)
|
|
||||||
|
|
||||||
zipReader, err := zip.OpenReader(resultFile)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
defer zipReader.Close()
|
|
||||||
|
|
||||||
collection := testsCollection{
|
collection := testsCollection{
|
||||||
Ref: *ref,
|
Ref: *ref,
|
||||||
Timestamp: time.Now().Format(time.RFC3339),
|
Timestamp: time.Now().Format(time.RFC3339),
|
||||||
}
|
}
|
||||||
|
|
||||||
zipFilesMap := map[string]*zip.File{}
|
dirContent, _ := filepath.Glob("tests/invalid/**/*.toml")
|
||||||
|
for _, f := range dirContent {
|
||||||
|
filename := strings.TrimPrefix(f, "tests/valid/")
|
||||||
|
name := kebabToCamel(strings.TrimSuffix(filename, ".toml"))
|
||||||
|
name = strings.ReplaceAll(name, ".", "_")
|
||||||
|
|
||||||
for _, f := range zipReader.File {
|
log.Printf("> [%s] %s\n", "invalid", name)
|
||||||
zipFilesMap[f.Name] = f
|
|
||||||
|
tomlContent, err := os.ReadFile(f) // #nosec G304
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("failed to read test file: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
collection.Invalid = append(collection.Invalid, invalid{
|
||||||
|
Name: name,
|
||||||
|
Input: string(tomlContent),
|
||||||
|
})
|
||||||
|
collection.Count++
|
||||||
}
|
}
|
||||||
|
|
||||||
testFileRegexp := regexp.MustCompile(`([^/]+/tests/(valid|invalid)/(.+))\.(toml)`)
|
dirContent, _ = filepath.Glob("tests/valid/**/*.toml")
|
||||||
for _, f := range zipReader.File {
|
for _, f := range dirContent {
|
||||||
groups := testFileRegexp.FindStringSubmatch(f.Name)
|
filename := strings.TrimPrefix(f, "tests/valid/")
|
||||||
if len(groups) > 0 {
|
name := kebabToCamel(strings.TrimSuffix(filename, ".toml"))
|
||||||
name := kebabToCamel(groups[3])
|
name = strings.ReplaceAll(name, ".", "_")
|
||||||
testType := groups[2]
|
|
||||||
|
|
||||||
log.Printf("> [%s] %s\n", testType, name)
|
log.Printf("> [%s] %s\n", "valid", name)
|
||||||
|
|
||||||
tomlContent := readFileFromZip(f)
|
tomlContent, err := os.ReadFile(f) // #nosec G304
|
||||||
|
if err != nil {
|
||||||
switch testType {
|
fmt.Printf("failed reading test file: %s\n", err)
|
||||||
case "invalid":
|
os.Exit(1)
|
||||||
collection.Invalid = append(collection.Invalid, invalid{
|
|
||||||
Name: name,
|
|
||||||
Input: tomlContent,
|
|
||||||
})
|
|
||||||
collection.Count++
|
|
||||||
case "valid":
|
|
||||||
baseFilePath := groups[1]
|
|
||||||
jsonFilePath := baseFilePath + ".json"
|
|
||||||
jsonContent := readFileFromZip(zipFilesMap[jsonFilePath])
|
|
||||||
|
|
||||||
collection.Valid = append(collection.Valid, valid{
|
|
||||||
Name: name,
|
|
||||||
Input: tomlContent,
|
|
||||||
JsonRef: jsonContent,
|
|
||||||
})
|
|
||||||
collection.Count++
|
|
||||||
default:
|
|
||||||
panic(fmt.Sprintf("unknown test type: %s", testType))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
filename = strings.TrimSuffix(f, ".toml")
|
||||||
|
jsonContent, err := os.ReadFile(filename + ".json") // #nosec G304
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("failed reading validation json: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
collection.Valid = append(collection.Valid, valid{
|
||||||
|
Name: name,
|
||||||
|
Input: string(tomlContent),
|
||||||
|
JSONRef: string(jsonContent),
|
||||||
|
})
|
||||||
|
collection.Count++
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Printf("Collected %d tests from toml-test\n", collection.Count)
|
log.Printf("Collected %d tests from toml-test\n", collection.Count)
|
||||||
@@ -202,7 +162,7 @@ func main() {
|
|||||||
}
|
}
|
||||||
t := template.Must(template.New("src").Funcs(funcMap).Parse(srcTemplate))
|
t := template.Must(template.New("src").Funcs(funcMap).Parse(srcTemplate))
|
||||||
buf := new(bytes.Buffer)
|
buf := new(bytes.Buffer)
|
||||||
err = t.Execute(buf, collection)
|
err := t.Execute(buf, collection)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
@@ -216,7 +176,7 @@ func main() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
err = os.WriteFile(*out, outputBytes, 0644)
|
err = os.WriteFile(*out, outputBytes, 0o600)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ import (
|
|||||||
"math"
|
"math"
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
func parseInteger(b []byte) (int64, error) {
|
func parseInteger(b []byte) (int64, error) {
|
||||||
@@ -32,7 +34,7 @@ func parseLocalDate(b []byte) (LocalDate, error) {
|
|||||||
var date LocalDate
|
var date LocalDate
|
||||||
|
|
||||||
if len(b) != 10 || b[4] != '-' || b[7] != '-' {
|
if len(b) != 10 || b[4] != '-' || b[7] != '-' {
|
||||||
return date, newDecodeError(b, "dates are expected to have the format YYYY-MM-DD")
|
return date, unstable.NewParserError(b, "dates are expected to have the format YYYY-MM-DD")
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
@@ -53,7 +55,7 @@ func parseLocalDate(b []byte) (LocalDate, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !isValidDate(date.Year, date.Month, date.Day) {
|
if !isValidDate(date.Year, date.Month, date.Day) {
|
||||||
return LocalDate{}, newDecodeError(b, "impossible date")
|
return LocalDate{}, unstable.NewParserError(b, "impossible date")
|
||||||
}
|
}
|
||||||
|
|
||||||
return date, nil
|
return date, nil
|
||||||
@@ -64,7 +66,7 @@ func parseDecimalDigits(b []byte) (int, error) {
|
|||||||
|
|
||||||
for i, c := range b {
|
for i, c := range b {
|
||||||
if c < '0' || c > '9' {
|
if c < '0' || c > '9' {
|
||||||
return 0, newDecodeError(b[i:i+1], "expected digit (0-9)")
|
return 0, unstable.NewParserError(b[i:i+1], "expected digit (0-9)")
|
||||||
}
|
}
|
||||||
v *= 10
|
v *= 10
|
||||||
v += int(c - '0')
|
v += int(c - '0')
|
||||||
@@ -97,7 +99,7 @@ func parseDateTime(b []byte) (time.Time, error) {
|
|||||||
} else {
|
} else {
|
||||||
const dateTimeByteLen = 6
|
const dateTimeByteLen = 6
|
||||||
if len(b) != dateTimeByteLen {
|
if len(b) != dateTimeByteLen {
|
||||||
return time.Time{}, newDecodeError(b, "invalid date-time timezone")
|
return time.Time{}, unstable.NewParserError(b, "invalid date-time timezone")
|
||||||
}
|
}
|
||||||
var direction int
|
var direction int
|
||||||
switch b[0] {
|
switch b[0] {
|
||||||
@@ -106,11 +108,11 @@ func parseDateTime(b []byte) (time.Time, error) {
|
|||||||
case '+':
|
case '+':
|
||||||
direction = +1
|
direction = +1
|
||||||
default:
|
default:
|
||||||
return time.Time{}, newDecodeError(b[:1], "invalid timezone offset character")
|
return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset character")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[3] != ':' {
|
if b[3] != ':' {
|
||||||
return time.Time{}, newDecodeError(b[3:4], "expected a : separator")
|
return time.Time{}, unstable.NewParserError(b[3:4], "expected a : separator")
|
||||||
}
|
}
|
||||||
|
|
||||||
hours, err := parseDecimalDigits(b[1:3])
|
hours, err := parseDecimalDigits(b[1:3])
|
||||||
@@ -118,7 +120,7 @@ func parseDateTime(b []byte) (time.Time, error) {
|
|||||||
return time.Time{}, err
|
return time.Time{}, err
|
||||||
}
|
}
|
||||||
if hours > 23 {
|
if hours > 23 {
|
||||||
return time.Time{}, newDecodeError(b[:1], "invalid timezone offset hours")
|
return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset hours")
|
||||||
}
|
}
|
||||||
|
|
||||||
minutes, err := parseDecimalDigits(b[4:6])
|
minutes, err := parseDecimalDigits(b[4:6])
|
||||||
@@ -126,7 +128,7 @@ func parseDateTime(b []byte) (time.Time, error) {
|
|||||||
return time.Time{}, err
|
return time.Time{}, err
|
||||||
}
|
}
|
||||||
if minutes > 59 {
|
if minutes > 59 {
|
||||||
return time.Time{}, newDecodeError(b[:1], "invalid timezone offset minutes")
|
return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset minutes")
|
||||||
}
|
}
|
||||||
|
|
||||||
seconds := direction * (hours*3600 + minutes*60)
|
seconds := direction * (hours*3600 + minutes*60)
|
||||||
@@ -139,7 +141,7 @@ func parseDateTime(b []byte) (time.Time, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(b) > 0 {
|
if len(b) > 0 {
|
||||||
return time.Time{}, newDecodeError(b, "extra bytes at the end of the timezone")
|
return time.Time{}, unstable.NewParserError(b, "extra bytes at the end of the timezone")
|
||||||
}
|
}
|
||||||
|
|
||||||
t := time.Date(
|
t := time.Date(
|
||||||
@@ -160,7 +162,7 @@ func parseLocalDateTime(b []byte) (LocalDateTime, []byte, error) {
|
|||||||
|
|
||||||
const localDateTimeByteMinLen = 11
|
const localDateTimeByteMinLen = 11
|
||||||
if len(b) < localDateTimeByteMinLen {
|
if len(b) < localDateTimeByteMinLen {
|
||||||
return dt, nil, newDecodeError(b, "local datetimes are expected to have the format YYYY-MM-DDTHH:MM:SS[.NNNNNNNNN]")
|
return dt, nil, unstable.NewParserError(b, "local datetimes are expected to have the format YYYY-MM-DDTHH:MM:SS[.NNNNNNNNN]")
|
||||||
}
|
}
|
||||||
|
|
||||||
date, err := parseLocalDate(b[:10])
|
date, err := parseLocalDate(b[:10])
|
||||||
@@ -171,7 +173,7 @@ func parseLocalDateTime(b []byte) (LocalDateTime, []byte, error) {
|
|||||||
|
|
||||||
sep := b[10]
|
sep := b[10]
|
||||||
if sep != 'T' && sep != ' ' && sep != 't' {
|
if sep != 'T' && sep != ' ' && sep != 't' {
|
||||||
return dt, nil, newDecodeError(b[10:11], "datetime separator is expected to be T or a space")
|
return dt, nil, unstable.NewParserError(b[10:11], "datetime separator is expected to be T or a space")
|
||||||
}
|
}
|
||||||
|
|
||||||
t, rest, err := parseLocalTime(b[11:])
|
t, rest, err := parseLocalTime(b[11:])
|
||||||
@@ -195,7 +197,7 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||||||
// check if b matches to have expected format HH:MM:SS[.NNNNNN]
|
// check if b matches to have expected format HH:MM:SS[.NNNNNN]
|
||||||
const localTimeByteLen = 8
|
const localTimeByteLen = 8
|
||||||
if len(b) < localTimeByteLen {
|
if len(b) < localTimeByteLen {
|
||||||
return t, nil, newDecodeError(b, "times are expected to have the format HH:MM:SS[.NNNNNN]")
|
return t, nil, unstable.NewParserError(b, "times are expected to have the format HH:MM:SS[.NNNNNN]")
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
@@ -206,10 +208,10 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if t.Hour > 23 {
|
if t.Hour > 23 {
|
||||||
return t, nil, newDecodeError(b[0:2], "hour cannot be greater 23")
|
return t, nil, unstable.NewParserError(b[0:2], "hour cannot be greater 23")
|
||||||
}
|
}
|
||||||
if b[2] != ':' {
|
if b[2] != ':' {
|
||||||
return t, nil, newDecodeError(b[2:3], "expecting colon between hours and minutes")
|
return t, nil, unstable.NewParserError(b[2:3], "expecting colon between hours and minutes")
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Minute, err = parseDecimalDigits(b[3:5])
|
t.Minute, err = parseDecimalDigits(b[3:5])
|
||||||
@@ -217,10 +219,10 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||||||
return t, nil, err
|
return t, nil, err
|
||||||
}
|
}
|
||||||
if t.Minute > 59 {
|
if t.Minute > 59 {
|
||||||
return t, nil, newDecodeError(b[3:5], "minutes cannot be greater 59")
|
return t, nil, unstable.NewParserError(b[3:5], "minutes cannot be greater 59")
|
||||||
}
|
}
|
||||||
if b[5] != ':' {
|
if b[5] != ':' {
|
||||||
return t, nil, newDecodeError(b[5:6], "expecting colon between minutes and seconds")
|
return t, nil, unstable.NewParserError(b[5:6], "expecting colon between minutes and seconds")
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Second, err = parseDecimalDigits(b[6:8])
|
t.Second, err = parseDecimalDigits(b[6:8])
|
||||||
@@ -228,8 +230,8 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||||||
return t, nil, err
|
return t, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.Second > 60 {
|
if t.Second > 59 {
|
||||||
return t, nil, newDecodeError(b[6:8], "seconds cannot be greater 60")
|
return t, nil, unstable.NewParserError(b[6:8], "seconds cannot be greater than 59")
|
||||||
}
|
}
|
||||||
|
|
||||||
b = b[8:]
|
b = b[8:]
|
||||||
@@ -242,7 +244,7 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||||||
for i, c := range b[1:] {
|
for i, c := range b[1:] {
|
||||||
if !isDigit(c) {
|
if !isDigit(c) {
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
return t, nil, newDecodeError(b[0:1], "need at least one digit after fraction point")
|
return t, nil, unstable.NewParserError(b[0:1], "need at least one digit after fraction point")
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@@ -266,7 +268,7 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if precision == 0 {
|
if precision == 0 {
|
||||||
return t, nil, newDecodeError(b[:1], "nanoseconds need at least one digit")
|
return t, nil, unstable.NewParserError(b[:1], "nanoseconds need at least one digit")
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Nanosecond = frac * nspow[precision]
|
t.Nanosecond = frac * nspow[precision]
|
||||||
@@ -277,7 +279,6 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||||||
return t, b, nil
|
return t, b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop
|
|
||||||
func parseFloat(b []byte) (float64, error) {
|
func parseFloat(b []byte) (float64, error) {
|
||||||
if len(b) == 4 && (b[0] == '+' || b[0] == '-') && b[1] == 'n' && b[2] == 'a' && b[3] == 'n' {
|
if len(b) == 4 && (b[0] == '+' || b[0] == '-') && b[1] == 'n' && b[2] == 'a' && b[3] == 'n' {
|
||||||
return math.NaN(), nil
|
return math.NaN(), nil
|
||||||
@@ -289,24 +290,24 @@ func parseFloat(b []byte) (float64, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if cleaned[0] == '.' {
|
if cleaned[0] == '.' {
|
||||||
return 0, newDecodeError(b, "float cannot start with a dot")
|
return 0, unstable.NewParserError(b, "float cannot start with a dot")
|
||||||
}
|
}
|
||||||
|
|
||||||
if cleaned[len(cleaned)-1] == '.' {
|
if cleaned[len(cleaned)-1] == '.' {
|
||||||
return 0, newDecodeError(b, "float cannot end with a dot")
|
return 0, unstable.NewParserError(b, "float cannot end with a dot")
|
||||||
}
|
}
|
||||||
|
|
||||||
dotAlreadySeen := false
|
dotAlreadySeen := false
|
||||||
for i, c := range cleaned {
|
for i, c := range cleaned {
|
||||||
if c == '.' {
|
if c == '.' {
|
||||||
if dotAlreadySeen {
|
if dotAlreadySeen {
|
||||||
return 0, newDecodeError(b[i:i+1], "float can have at most one decimal point")
|
return 0, unstable.NewParserError(b[i:i+1], "float can have at most one decimal point")
|
||||||
}
|
}
|
||||||
if !isDigit(cleaned[i-1]) {
|
if !isDigit(cleaned[i-1]) {
|
||||||
return 0, newDecodeError(b[i-1:i+1], "float decimal point must be preceded by a digit")
|
return 0, unstable.NewParserError(b[i-1:i+1], "float decimal point must be preceded by a digit")
|
||||||
}
|
}
|
||||||
if !isDigit(cleaned[i+1]) {
|
if !isDigit(cleaned[i+1]) {
|
||||||
return 0, newDecodeError(b[i:i+2], "float decimal point must be followed by a digit")
|
return 0, unstable.NewParserError(b[i:i+2], "float decimal point must be followed by a digit")
|
||||||
}
|
}
|
||||||
dotAlreadySeen = true
|
dotAlreadySeen = true
|
||||||
}
|
}
|
||||||
@@ -316,13 +317,13 @@ func parseFloat(b []byte) (float64, error) {
|
|||||||
if cleaned[0] == '+' || cleaned[0] == '-' {
|
if cleaned[0] == '+' || cleaned[0] == '-' {
|
||||||
start = 1
|
start = 1
|
||||||
}
|
}
|
||||||
if cleaned[start] == '0' && isDigit(cleaned[start+1]) {
|
if cleaned[start] == '0' && len(cleaned) > start+1 && isDigit(cleaned[start+1]) {
|
||||||
return 0, newDecodeError(b, "float integer part cannot have leading zeroes")
|
return 0, unstable.NewParserError(b, "float integer part cannot have leading zeroes")
|
||||||
}
|
}
|
||||||
|
|
||||||
f, err := strconv.ParseFloat(string(cleaned), 64)
|
f, err := strconv.ParseFloat(string(cleaned), 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, newDecodeError(b, "unable to parse float: %w", err)
|
return 0, unstable.NewParserError(b, "unable to parse float: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return f, nil
|
return f, nil
|
||||||
@@ -336,7 +337,7 @@ func parseIntHex(b []byte) (int64, error) {
|
|||||||
|
|
||||||
i, err := strconv.ParseInt(string(cleaned), 16, 64)
|
i, err := strconv.ParseInt(string(cleaned), 16, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, newDecodeError(b, "couldn't parse hexadecimal number: %w", err)
|
return 0, unstable.NewParserError(b, "couldn't parse hexadecimal number: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return i, nil
|
return i, nil
|
||||||
@@ -350,7 +351,7 @@ func parseIntOct(b []byte) (int64, error) {
|
|||||||
|
|
||||||
i, err := strconv.ParseInt(string(cleaned), 8, 64)
|
i, err := strconv.ParseInt(string(cleaned), 8, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, newDecodeError(b, "couldn't parse octal number: %w", err)
|
return 0, unstable.NewParserError(b, "couldn't parse octal number: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return i, nil
|
return i, nil
|
||||||
@@ -364,7 +365,7 @@ func parseIntBin(b []byte) (int64, error) {
|
|||||||
|
|
||||||
i, err := strconv.ParseInt(string(cleaned), 2, 64)
|
i, err := strconv.ParseInt(string(cleaned), 2, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, newDecodeError(b, "couldn't parse binary number: %w", err)
|
return 0, unstable.NewParserError(b, "couldn't parse binary number: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return i, nil
|
return i, nil
|
||||||
@@ -387,12 +388,12 @@ func parseIntDec(b []byte) (int64, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(cleaned) > startIdx+1 && cleaned[startIdx] == '0' {
|
if len(cleaned) > startIdx+1 && cleaned[startIdx] == '0' {
|
||||||
return 0, newDecodeError(b, "leading zero not allowed on decimal number")
|
return 0, unstable.NewParserError(b, "leading zero not allowed on decimal number")
|
||||||
}
|
}
|
||||||
|
|
||||||
i, err := strconv.ParseInt(string(cleaned), 10, 64)
|
i, err := strconv.ParseInt(string(cleaned), 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, newDecodeError(b, "couldn't parse decimal number: %w", err)
|
return 0, unstable.NewParserError(b, "couldn't parse decimal number: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return i, nil
|
return i, nil
|
||||||
@@ -409,11 +410,11 @@ func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if b[start] == '_' {
|
if b[start] == '_' {
|
||||||
return nil, newDecodeError(b[start:start+1], "number cannot start with underscore")
|
return nil, unstable.NewParserError(b[start:start+1], "number cannot start with underscore")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[len(b)-1] == '_' {
|
if b[len(b)-1] == '_' {
|
||||||
return nil, newDecodeError(b[len(b)-1:], "number cannot end with underscore")
|
return nil, unstable.NewParserError(b[len(b)-1:], "number cannot end with underscore")
|
||||||
}
|
}
|
||||||
|
|
||||||
// fast path
|
// fast path
|
||||||
@@ -435,7 +436,7 @@ func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) {
|
|||||||
c := b[i]
|
c := b[i]
|
||||||
if c == '_' {
|
if c == '_' {
|
||||||
if !before {
|
if !before {
|
||||||
return nil, newDecodeError(b[i-1:i+1], "number must have at least one digit between underscores")
|
return nil, unstable.NewParserError(b[i-1:i+1], "number must have at least one digit between underscores")
|
||||||
}
|
}
|
||||||
before = false
|
before = false
|
||||||
} else {
|
} else {
|
||||||
@@ -449,11 +450,11 @@ func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) {
|
|||||||
|
|
||||||
func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
|
func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
|
||||||
if b[0] == '_' {
|
if b[0] == '_' {
|
||||||
return nil, newDecodeError(b[0:1], "number cannot start with underscore")
|
return nil, unstable.NewParserError(b[0:1], "number cannot start with underscore")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[len(b)-1] == '_' {
|
if b[len(b)-1] == '_' {
|
||||||
return nil, newDecodeError(b[len(b)-1:], "number cannot end with underscore")
|
return nil, unstable.NewParserError(b[len(b)-1:], "number cannot end with underscore")
|
||||||
}
|
}
|
||||||
|
|
||||||
// fast path
|
// fast path
|
||||||
@@ -476,10 +477,10 @@ func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
|
|||||||
switch c {
|
switch c {
|
||||||
case '_':
|
case '_':
|
||||||
if !before {
|
if !before {
|
||||||
return nil, newDecodeError(b[i-1:i+1], "number must have at least one digit between underscores")
|
return nil, unstable.NewParserError(b[i-1:i+1], "number must have at least one digit between underscores")
|
||||||
}
|
}
|
||||||
if i < len(b)-1 && (b[i+1] == 'e' || b[i+1] == 'E') {
|
if i < len(b)-1 && (b[i+1] == 'e' || b[i+1] == 'E') {
|
||||||
return nil, newDecodeError(b[i+1:i+2], "cannot have underscore before exponent")
|
return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore before exponent")
|
||||||
}
|
}
|
||||||
before = false
|
before = false
|
||||||
case '+', '-':
|
case '+', '-':
|
||||||
@@ -488,15 +489,15 @@ func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
|
|||||||
before = false
|
before = false
|
||||||
case 'e', 'E':
|
case 'e', 'E':
|
||||||
if i < len(b)-1 && b[i+1] == '_' {
|
if i < len(b)-1 && b[i+1] == '_' {
|
||||||
return nil, newDecodeError(b[i+1:i+2], "cannot have underscore after exponent")
|
return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore after exponent")
|
||||||
}
|
}
|
||||||
cleaned = append(cleaned, c)
|
cleaned = append(cleaned, c)
|
||||||
case '.':
|
case '.':
|
||||||
if i < len(b)-1 && b[i+1] == '_' {
|
if i < len(b)-1 && b[i+1] == '_' {
|
||||||
return nil, newDecodeError(b[i+1:i+2], "cannot have underscore after decimal point")
|
return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore after decimal point")
|
||||||
}
|
}
|
||||||
if i > 0 && b[i-1] == '_' {
|
if i > 0 && b[i-1] == '_' {
|
||||||
return nil, newDecodeError(b[i-1:i], "cannot have underscore before decimal point")
|
return nil, unstable.NewParserError(b[i-1:i], "cannot have underscore before decimal point")
|
||||||
}
|
}
|
||||||
cleaned = append(cleaned, c)
|
cleaned = append(cleaned, c)
|
||||||
default:
|
default:
|
||||||
@@ -542,3 +543,7 @@ func daysIn(m int, year int) int {
|
|||||||
func isLeap(year int) bool {
|
func isLeap(year int) bool {
|
||||||
return year%4 == 0 && (year%100 != 0 || year%400 == 0)
|
return year%4 == 0 && (year%100 != 0 || year%400 == 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isDigit(r byte) bool {
|
||||||
|
return r >= '0' && r <= '9'
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,10 +2,11 @@ package toml
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"reflect"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
// DecodeError represents an error encountered during the parsing or decoding
|
// DecodeError represents an error encountered during the parsing or decoding
|
||||||
@@ -53,27 +54,20 @@ func (s *StrictMissingError) String() string {
|
|||||||
return buf.String()
|
return buf.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
type Key []string
|
// Unwrap returns wrapped decode errors
|
||||||
|
//
|
||||||
// internal version of DecodeError that is used as the base to create a
|
// Implements errors.Join() interface.
|
||||||
// DecodeError with full context.
|
func (s *StrictMissingError) Unwrap() []error {
|
||||||
type decodeError struct {
|
errs := make([]error, len(s.Errors))
|
||||||
highlight []byte
|
for i := range s.Errors {
|
||||||
message string
|
errs[i] = &s.Errors[i]
|
||||||
key Key // optional
|
|
||||||
}
|
|
||||||
|
|
||||||
func (de *decodeError) Error() string {
|
|
||||||
return de.message
|
|
||||||
}
|
|
||||||
|
|
||||||
func newDecodeError(highlight []byte, format string, args ...interface{}) error {
|
|
||||||
return &decodeError{
|
|
||||||
highlight: highlight,
|
|
||||||
message: fmt.Errorf(format, args...).Error(),
|
|
||||||
}
|
}
|
||||||
|
return errs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Key represents a TOML key as a sequence of key parts.
|
||||||
|
type Key []string
|
||||||
|
|
||||||
// Error returns the error message contained in the DecodeError.
|
// Error returns the error message contained in the DecodeError.
|
||||||
func (e *DecodeError) Error() string {
|
func (e *DecodeError) Error() string {
|
||||||
return "toml: " + e.message
|
return "toml: " + e.message
|
||||||
@@ -96,7 +90,7 @@ func (e *DecodeError) Key() Key {
|
|||||||
return e.key
|
return e.key
|
||||||
}
|
}
|
||||||
|
|
||||||
// decodeErrorFromHighlight creates a DecodeError referencing a highlighted
|
// wrapDecodeError creates a DecodeError referencing a highlighted
|
||||||
// range of bytes from document.
|
// range of bytes from document.
|
||||||
//
|
//
|
||||||
// highlight needs to be a sub-slice of document, or this function panics.
|
// highlight needs to be a sub-slice of document, or this function panics.
|
||||||
@@ -105,12 +99,12 @@ func (e *DecodeError) Key() Key {
|
|||||||
// highlight can be freely deallocated.
|
// highlight can be freely deallocated.
|
||||||
//
|
//
|
||||||
//nolint:funlen
|
//nolint:funlen
|
||||||
func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
func wrapDecodeError(document []byte, de *unstable.ParserError) *DecodeError {
|
||||||
offset := danger.SubsliceOffset(document, de.highlight)
|
offset := subsliceOffset(document, de.Highlight)
|
||||||
|
|
||||||
errMessage := de.Error()
|
errMessage := de.Error()
|
||||||
errLine, errColumn := positionAtEnd(document[:offset])
|
errLine, errColumn := positionAtEnd(document[:offset])
|
||||||
before, after := linesOfContext(document, de.highlight, offset, 3)
|
before, after := linesOfContext(document, de.Highlight, offset, 3)
|
||||||
|
|
||||||
var buf strings.Builder
|
var buf strings.Builder
|
||||||
|
|
||||||
@@ -140,7 +134,7 @@ func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
|||||||
buf.Write(before[0])
|
buf.Write(before[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
buf.Write(de.highlight)
|
buf.Write(de.Highlight)
|
||||||
|
|
||||||
if len(after) > 0 {
|
if len(after) > 0 {
|
||||||
buf.Write(after[0])
|
buf.Write(after[0])
|
||||||
@@ -158,7 +152,7 @@ func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
|||||||
buf.WriteString(strings.Repeat(" ", len(before[0])))
|
buf.WriteString(strings.Repeat(" ", len(before[0])))
|
||||||
}
|
}
|
||||||
|
|
||||||
buf.WriteString(strings.Repeat("~", len(de.highlight)))
|
buf.WriteString(strings.Repeat("~", len(de.Highlight)))
|
||||||
|
|
||||||
if len(errMessage) > 0 {
|
if len(errMessage) > 0 {
|
||||||
buf.WriteString(" ")
|
buf.WriteString(" ")
|
||||||
@@ -183,7 +177,7 @@ func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
|||||||
message: errMessage,
|
message: errMessage,
|
||||||
line: errLine,
|
line: errLine,
|
||||||
column: errColumn,
|
column: errColumn,
|
||||||
key: de.key,
|
key: de.Key,
|
||||||
human: buf.String(),
|
human: buf.String(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -266,5 +260,24 @@ func positionAtEnd(b []byte) (row int, column int) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return row, column
|
||||||
|
}
|
||||||
|
|
||||||
|
// subsliceOffset returns the byte offset of subslice within data.
|
||||||
|
// subslice must share the same backing array as data.
|
||||||
|
func subsliceOffset(data []byte, subslice []byte) int {
|
||||||
|
if len(subslice) == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use reflect to get the data pointers of both slices.
|
||||||
|
// This is safe because we're only reading the pointer values for comparison.
|
||||||
|
dataPtr := reflect.ValueOf(data).Pointer()
|
||||||
|
subPtr := reflect.ValueOf(subslice).Pointer()
|
||||||
|
|
||||||
|
offset := int(subPtr - dataPtr)
|
||||||
|
if offset < 0 || offset > len(data) {
|
||||||
|
panic("subslice is not within data")
|
||||||
|
}
|
||||||
|
return offset
|
||||||
}
|
}
|
||||||
|
|||||||
+139
-10
@@ -7,12 +7,12 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
//nolint:funlen
|
//nolint:funlen
|
||||||
func TestDecodeError(t *testing.T) {
|
func TestDecodeError(t *testing.T) {
|
||||||
|
|
||||||
examples := []struct {
|
examples := []struct {
|
||||||
desc string
|
desc string
|
||||||
doc [3]string
|
doc [3]string
|
||||||
@@ -160,19 +160,18 @@ line 5`,
|
|||||||
for _, e := range examples {
|
for _, e := range examples {
|
||||||
e := e
|
e := e
|
||||||
t.Run(e.desc, func(t *testing.T) {
|
t.Run(e.desc, func(t *testing.T) {
|
||||||
|
|
||||||
b := bytes.Buffer{}
|
b := bytes.Buffer{}
|
||||||
b.Write([]byte(e.doc[0]))
|
b.WriteString(e.doc[0])
|
||||||
start := b.Len()
|
start := b.Len()
|
||||||
b.Write([]byte(e.doc[1]))
|
b.WriteString(e.doc[1])
|
||||||
end := b.Len()
|
end := b.Len()
|
||||||
b.Write([]byte(e.doc[2]))
|
b.WriteString(e.doc[2])
|
||||||
doc := b.Bytes()
|
doc := b.Bytes()
|
||||||
hl := doc[start:end]
|
hl := doc[start:end]
|
||||||
|
|
||||||
err := wrapDecodeError(doc, &decodeError{
|
err := wrapDecodeError(doc, &unstable.ParserError{
|
||||||
highlight: hl,
|
Highlight: hl,
|
||||||
message: e.msg,
|
Message: e.msg,
|
||||||
})
|
})
|
||||||
|
|
||||||
var derr *DecodeError
|
var derr *DecodeError
|
||||||
@@ -188,7 +187,6 @@ line 5`,
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestDecodeError_Accessors(t *testing.T) {
|
func TestDecodeError_Accessors(t *testing.T) {
|
||||||
|
|
||||||
e := DecodeError{
|
e := DecodeError{
|
||||||
message: "foo",
|
message: "foo",
|
||||||
line: 1,
|
line: 1,
|
||||||
@@ -204,6 +202,137 @@ func TestDecodeError_Accessors(t *testing.T) {
|
|||||||
assert.Equal(t, "bar", e.String())
|
assert.Equal(t, "bar", e.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDecodeError_InvalidKeyStartAfterComment(t *testing.T) {
|
||||||
|
// Regression for https://github.com/pelletier/go-toml/issues/1047: the "="
|
||||||
|
// that starts an invalid keyval must be reported on line 2, column 1, with
|
||||||
|
// the human-readable context pointing at that byte (not the document end).
|
||||||
|
doc := "# comment\n= \"value\""
|
||||||
|
|
||||||
|
var v map[string]any
|
||||||
|
err := Unmarshal([]byte(doc), &v)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("expected an error")
|
||||||
|
}
|
||||||
|
|
||||||
|
var derr *DecodeError
|
||||||
|
if !errors.As(err, &derr) {
|
||||||
|
t.Fatalf("expected *DecodeError, got %T", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
row, col := derr.Position()
|
||||||
|
if row != 2 || col != 1 {
|
||||||
|
t.Errorf("Position(): got row %d col %d, want row 2 col 1", row, col)
|
||||||
|
}
|
||||||
|
|
||||||
|
human := derr.String()
|
||||||
|
if !strings.Contains(human, `2| = "value"`) {
|
||||||
|
t.Errorf("human output should show the error line; got:\n%s", human)
|
||||||
|
}
|
||||||
|
// Caret line uses line-number column width padding; only the "| ~" part is stable here.
|
||||||
|
if !strings.Contains(human, "| ~ invalid character at start of key") {
|
||||||
|
t.Errorf("human output should underline '=' and include the parser message; got:\n%s", human)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDecodeError_DuplicateContent(t *testing.T) {
|
||||||
|
// This test verifies that when the same content appears multiple times
|
||||||
|
// in the document, the error correctly points to the actual location
|
||||||
|
// of the error, not the first occurrence of the content.
|
||||||
|
//
|
||||||
|
// The document has "1__2" on line 1 and "3__4" on line 2.
|
||||||
|
// Both have "__" which is invalid, but we want to ensure errors
|
||||||
|
// on line 2 report line 2, not line 1.
|
||||||
|
|
||||||
|
doc := `a = 1
|
||||||
|
b = 3__4`
|
||||||
|
|
||||||
|
var v map[string]int
|
||||||
|
err := Unmarshal([]byte(doc), &v)
|
||||||
|
|
||||||
|
var derr *DecodeError
|
||||||
|
if !errors.As(err, &derr) {
|
||||||
|
t.Fatal("error not in expected format")
|
||||||
|
}
|
||||||
|
|
||||||
|
row, col := derr.Position()
|
||||||
|
// The error should be on line 2 where "3__4" is
|
||||||
|
if row != 2 {
|
||||||
|
t.Errorf("expected error on row 2, got row %d", row)
|
||||||
|
}
|
||||||
|
// Column should point to the "__" part (after "3")
|
||||||
|
if col < 5 {
|
||||||
|
t.Errorf("expected error at column >= 5, got column %d", col)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDecodeError_Position(t *testing.T) {
|
||||||
|
// Test that error positions are correctly reported for various error locations
|
||||||
|
examples := []struct {
|
||||||
|
name string
|
||||||
|
doc string
|
||||||
|
expectedRow int
|
||||||
|
minCol int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "error on first line",
|
||||||
|
doc: `a = 1__2`,
|
||||||
|
expectedRow: 1,
|
||||||
|
minCol: 5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error on second line",
|
||||||
|
doc: "a = 1\nb = 2__3",
|
||||||
|
expectedRow: 2,
|
||||||
|
minCol: 5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error on third line",
|
||||||
|
doc: "a = 1\nb = 2\nc = 3__4",
|
||||||
|
expectedRow: 3,
|
||||||
|
minCol: 5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "missing equals on last line without trailing newline",
|
||||||
|
doc: "a = 1\nb = 2\nc",
|
||||||
|
expectedRow: 3,
|
||||||
|
minCol: 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range examples {
|
||||||
|
t.Run(e.name, func(t *testing.T) {
|
||||||
|
var v map[string]int
|
||||||
|
err := Unmarshal([]byte(e.doc), &v)
|
||||||
|
|
||||||
|
var derr *DecodeError
|
||||||
|
if !errors.As(err, &derr) {
|
||||||
|
t.Fatal("error not in expected format")
|
||||||
|
}
|
||||||
|
|
||||||
|
row, col := derr.Position()
|
||||||
|
assert.Equal(t, e.expectedRow, row)
|
||||||
|
if col < e.minCol {
|
||||||
|
t.Errorf("expected column >= %d, got %d", e.minCol, col)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestStrictErrorUnwrap(t *testing.T) {
|
||||||
|
fo := bytes.NewBufferString(`
|
||||||
|
Missing = 1
|
||||||
|
OtherMissing = 1
|
||||||
|
`)
|
||||||
|
var out struct{}
|
||||||
|
err := NewDecoder(fo).DisallowUnknownFields().Decode(&out)
|
||||||
|
assert.Error(t, err)
|
||||||
|
|
||||||
|
strictErr := &StrictMissingError{}
|
||||||
|
assert.True(t, errors.As(err, &strictErr))
|
||||||
|
|
||||||
|
assert.Equal(t, 2, len(strictErr.Unwrap()))
|
||||||
|
}
|
||||||
|
|
||||||
func ExampleDecodeError() {
|
func ExampleDecodeError() {
|
||||||
doc := `name = 123__456`
|
doc := `name = 123__456`
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,37 @@
|
|||||||
|
package toml_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
type customInt int
|
||||||
|
|
||||||
|
func (i *customInt) UnmarshalText(b []byte) error {
|
||||||
|
x, err := strconv.ParseInt(string(b), 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*i = customInt(x * 100)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type doc struct {
|
||||||
|
Value customInt
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExampleUnmarshal_textUnmarshal() {
|
||||||
|
var x doc
|
||||||
|
|
||||||
|
data := []byte(`value = "42"`)
|
||||||
|
err := toml.Unmarshal(data, &x)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
fmt.Println(x)
|
||||||
|
// Output:
|
||||||
|
// {4200}
|
||||||
|
}
|
||||||
+21
-14
@@ -4,21 +4,28 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFastSimple(t *testing.T) {
|
func TestFastSimpleInt(t *testing.T) {
|
||||||
m := map[string]int64{}
|
m := map[string]int64{}
|
||||||
err := toml.Unmarshal([]byte(`a = 42`), &m)
|
err := toml.Unmarshal([]byte(`a = 42`), &m)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, map[string]int64{"a": 42}, m)
|
assert.Equal(t, map[string]int64{"a": 42}, m)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFastSimpleFloat(t *testing.T) {
|
||||||
|
m := map[string]float64{}
|
||||||
|
err := toml.Unmarshal([]byte("a = 42\nb = 1.1\nc = 12341234123412341234123412341234"), &m)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, map[string]float64{"a": 42, "b": 1.1, "c": 1.2341234123412342e+31}, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFastSimpleString(t *testing.T) {
|
func TestFastSimpleString(t *testing.T) {
|
||||||
m := map[string]string{}
|
m := map[string]string{}
|
||||||
err := toml.Unmarshal([]byte(`a = "hello"`), &m)
|
err := toml.Unmarshal([]byte(`a = "hello"`), &m)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, map[string]string{"a": "hello"}, m)
|
assert.Equal(t, map[string]string{"a": "hello"}, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFastSimpleInterface(t *testing.T) {
|
func TestFastSimpleInterface(t *testing.T) {
|
||||||
@@ -26,8 +33,8 @@ func TestFastSimpleInterface(t *testing.T) {
|
|||||||
err := toml.Unmarshal([]byte(`
|
err := toml.Unmarshal([]byte(`
|
||||||
a = "hello"
|
a = "hello"
|
||||||
b = 42`), &m)
|
b = 42`), &m)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, map[string]interface{}{
|
assert.Equal(t, map[string]interface{}{
|
||||||
"a": "hello",
|
"a": "hello",
|
||||||
"b": int64(42),
|
"b": int64(42),
|
||||||
}, m)
|
}, m)
|
||||||
@@ -39,8 +46,8 @@ func TestFastMultipartKeyInterface(t *testing.T) {
|
|||||||
a.interim = "test"
|
a.interim = "test"
|
||||||
a.b.c = "hello"
|
a.b.c = "hello"
|
||||||
b = 42`), &m)
|
b = 42`), &m)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, map[string]interface{}{
|
assert.Equal(t, map[string]interface{}{
|
||||||
"a": map[string]interface{}{
|
"a": map[string]interface{}{
|
||||||
"interim": "test",
|
"interim": "test",
|
||||||
"b": map[string]interface{}{
|
"b": map[string]interface{}{
|
||||||
@@ -59,8 +66,8 @@ func TestFastExistingMap(t *testing.T) {
|
|||||||
ints.one = 1
|
ints.one = 1
|
||||||
ints.two = 2
|
ints.two = 2
|
||||||
strings.yo = "hello"`), &m)
|
strings.yo = "hello"`), &m)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, map[string]interface{}{
|
assert.Equal(t, map[string]interface{}{
|
||||||
"ints": map[string]interface{}{
|
"ints": map[string]interface{}{
|
||||||
"one": int64(1),
|
"one": int64(1),
|
||||||
"two": int64(2),
|
"two": int64(2),
|
||||||
@@ -83,9 +90,9 @@ func TestFastArrayTable(t *testing.T) {
|
|||||||
m := map[string]interface{}{}
|
m := map[string]interface{}{}
|
||||||
|
|
||||||
err := toml.Unmarshal(b, &m)
|
err := toml.Unmarshal(b, &m)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
require.Equal(t, map[string]interface{}{
|
assert.Equal(t, map[string]interface{}{
|
||||||
"root": map[string]interface{}{
|
"root": map[string]interface{}{
|
||||||
"nested": []interface{}{
|
"nested": []interface{}{
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
|
|||||||
+5
-8
@@ -1,21 +1,18 @@
|
|||||||
//go:build go1.18 || go1.19
|
|
||||||
// +build go1.18 go1.19
|
|
||||||
|
|
||||||
package toml_test
|
package toml_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"io/ioutil"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func FuzzUnmarshal(f *testing.F) {
|
func FuzzUnmarshal(f *testing.F) {
|
||||||
file, err := ioutil.ReadFile("benchmark/benchmark.toml")
|
file, err := os.ReadFile("benchmark/benchmark.toml")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
f.Error(err)
|
||||||
}
|
}
|
||||||
f.Add(file)
|
f.Add(file)
|
||||||
|
|
||||||
@@ -51,6 +48,6 @@ func FuzzUnmarshal(f *testing.F) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed round trip: %s", err)
|
t.Fatalf("failed round trip: %s", err)
|
||||||
}
|
}
|
||||||
require.Equal(t, v, v2)
|
assert.Equal(t, v, v2)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
module github.com/pelletier/go-toml/v2
|
module github.com/pelletier/go-toml/v2
|
||||||
|
|
||||||
go 1.16
|
go 1.21.0
|
||||||
|
|
||||||
require github.com/stretchr/testify v1.8.0
|
|
||||||
|
|||||||
@@ -1,15 +0,0 @@
|
|||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
|
||||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
|
||||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
|
||||||
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
|
|
||||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
|
||||||
|
|||||||
@@ -0,0 +1,141 @@
|
|||||||
|
// Package assert provides assertion functions for unit testing.
|
||||||
|
package assert
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
// True asserts that an expression is true.
|
||||||
|
func True(tb testing.TB, ok bool, msgAndArgs ...any) {
|
||||||
|
tb.Helper()
|
||||||
|
if ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tb.Fatal(formatMsgAndArgs("Expected expression to be true", msgAndArgs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// False asserts that an expression is false.
|
||||||
|
func False(tb testing.TB, ok bool, msgAndArgs ...any) {
|
||||||
|
tb.Helper()
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tb.Fatal(formatMsgAndArgs("Expected expression to be false", msgAndArgs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Equal asserts that "expected" and "actual" are equal.
|
||||||
|
func Equal[T any](tb testing.TB, expected, actual T, msgAndArgs ...any) {
|
||||||
|
tb.Helper()
|
||||||
|
if objectsAreEqual(expected, actual) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
msg := formatMsgAndArgs("Expected values to be equal:", msgAndArgs...)
|
||||||
|
tb.Fatalf("%s\n%s", msg, diff(expected, actual))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error asserts that an error is not nil.
|
||||||
|
func Error(tb testing.TB, err error, msgAndArgs ...any) {
|
||||||
|
tb.Helper()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tb.Fatal(formatMsgAndArgs("Expected an error", msgAndArgs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NoError asserts that an error is nil.
|
||||||
|
func NoError(tb testing.TB, err error, msgAndArgs ...any) {
|
||||||
|
tb.Helper()
|
||||||
|
if err == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
msg := formatMsgAndArgs("Unexpected error:", msgAndArgs...)
|
||||||
|
tb.Fatalf("%s\n%+v", msg, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Panics asserts that the given function panics.
|
||||||
|
func Panics(tb testing.TB, fn func(), msgAndArgs ...any) {
|
||||||
|
tb.Helper()
|
||||||
|
defer func() {
|
||||||
|
if recover() == nil {
|
||||||
|
msg := formatMsgAndArgs("Expected function to panic", msgAndArgs...)
|
||||||
|
tb.Fatal(msg)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
fn()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Zero asserts that a value is its zero value.
|
||||||
|
func Zero[T any](tb testing.TB, value T, msgAndArgs ...any) {
|
||||||
|
tb.Helper()
|
||||||
|
var zero T
|
||||||
|
if objectsAreEqual(value, zero) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
val := reflect.ValueOf(value)
|
||||||
|
if (val.Kind() == reflect.Slice || val.Kind() == reflect.Map || val.Kind() == reflect.Array) && val.Len() == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
msg := formatMsgAndArgs("Expected zero value but got:", msgAndArgs...)
|
||||||
|
tb.Fatalf("%s\n%v", msg, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func NotZero[T any](tb testing.TB, value T, msgAndArgs ...any) {
|
||||||
|
tb.Helper()
|
||||||
|
var zero T
|
||||||
|
if !objectsAreEqual(value, zero) {
|
||||||
|
val := reflect.ValueOf(value)
|
||||||
|
switch val.Kind() {
|
||||||
|
case reflect.Slice, reflect.Map, reflect.Array:
|
||||||
|
if val.Len() > 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
msg := formatMsgAndArgs("Unexpected zero value:", msgAndArgs...)
|
||||||
|
tb.Fatalf("%s\n%v", msg, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatMsgAndArgs(msg string, args ...any) string {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return msg
|
||||||
|
}
|
||||||
|
format, ok := args[0].(string)
|
||||||
|
if !ok {
|
||||||
|
panic("message argument must be a fmt string")
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(format, args[1:]...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func diff(expected, actual any) string {
|
||||||
|
lines := []string{
|
||||||
|
"expected:",
|
||||||
|
fmt.Sprintf("%v", expected),
|
||||||
|
"actual:",
|
||||||
|
fmt.Sprintf("%v", actual),
|
||||||
|
}
|
||||||
|
return strings.Join(lines, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func objectsAreEqual(expected, actual any) bool {
|
||||||
|
if expected == nil || actual == nil {
|
||||||
|
return expected == actual
|
||||||
|
}
|
||||||
|
if exp, eok := expected.([]byte); eok {
|
||||||
|
if act, aok := actual.([]byte); aok {
|
||||||
|
return bytes.Equal(exp, act)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if exp, eok := expected.(string); eok {
|
||||||
|
if act, aok := actual.(string); aok {
|
||||||
|
return exp == act
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return reflect.DeepEqual(expected, actual)
|
||||||
|
}
|
||||||
@@ -0,0 +1,217 @@
|
|||||||
|
package assert
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Data struct {
|
||||||
|
Label string
|
||||||
|
Value int64
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBadMessage(t *testing.T) {
|
||||||
|
invalidMessage := func() { True(t, false, 1234) }
|
||||||
|
assertOk(t, "Non-fmt message value", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Panics(tb, invalidMessage)
|
||||||
|
})
|
||||||
|
assertFail(t, "Non-fmt message value", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
True(tb, false, "example %s", "message")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTrue(t *testing.T) {
|
||||||
|
assertOk(t, "Succeed", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
True(tb, 1 > 0)
|
||||||
|
})
|
||||||
|
assertFail(t, "Fail", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
True(tb, 1 < 0)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFalse(t *testing.T) {
|
||||||
|
assertOk(t, "Succeed", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
False(tb, 1 < 0)
|
||||||
|
})
|
||||||
|
assertFail(t, "Fail", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
False(tb, 1 > 0)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEqual(t *testing.T) {
|
||||||
|
assertOk(t, "Nil", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Equal(tb, interface{}(nil), interface{}(nil))
|
||||||
|
})
|
||||||
|
assertOk(t, "Identical structs", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Equal(tb, Data{"expected", 1234}, Data{"expected", 1234})
|
||||||
|
})
|
||||||
|
assertFail(t, "Different structs", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Equal(tb, Data{"expected", 1234}, Data{"actual", 1234})
|
||||||
|
})
|
||||||
|
assertOk(t, "Identical numbers", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Equal(tb, 1234, 1234)
|
||||||
|
})
|
||||||
|
assertFail(t, "Identical numbers", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Equal(tb, 1234, 1324)
|
||||||
|
})
|
||||||
|
assertOk(t, "Zero-length byte arrays", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Equal(tb, []byte(nil), []byte(""))
|
||||||
|
})
|
||||||
|
assertOk(t, "Identical byte arrays", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Equal(tb, []byte{1, 2, 3, 4}, []byte{1, 2, 3, 4})
|
||||||
|
})
|
||||||
|
assertFail(t, "Different byte arrays", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Equal(tb, []byte{1, 2, 3, 4}, []byte{1, 3, 2, 4})
|
||||||
|
})
|
||||||
|
assertOk(t, "Identical strings", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Equal(tb, "example", "example")
|
||||||
|
})
|
||||||
|
assertFail(t, "Identical strings", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Equal(tb, "example", "elpmaxe")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestError(t *testing.T) {
|
||||||
|
assertOk(t, "Error", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Error(tb, errors.New("example"))
|
||||||
|
})
|
||||||
|
assertFail(t, "Nil", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Error(tb, nil)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNoError(t *testing.T) {
|
||||||
|
assertFail(t, "Error", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
NoError(tb, errors.New("example"))
|
||||||
|
})
|
||||||
|
assertOk(t, "Nil", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
NoError(tb, nil)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPanics(t *testing.T) {
|
||||||
|
willPanic := func() { panic("example") }
|
||||||
|
wontPanic := func() {}
|
||||||
|
assertOk(t, "Will panic", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Panics(tb, willPanic)
|
||||||
|
})
|
||||||
|
assertFail(t, "Won't panic", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Panics(tb, wontPanic)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestZero(t *testing.T) {
|
||||||
|
assertOk(t, "Empty struct", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Zero(tb, Data{})
|
||||||
|
})
|
||||||
|
assertFail(t, "Non-empty struct", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
Zero(tb, Data{Label: "example"})
|
||||||
|
})
|
||||||
|
assertOk(t, "Nil slice", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
var slice []int
|
||||||
|
Zero(tb, slice)
|
||||||
|
})
|
||||||
|
assertFail(t, "Non-empty slice", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
slice := []int{1, 2, 3, 4}
|
||||||
|
Zero(tb, slice)
|
||||||
|
})
|
||||||
|
assertOk(t, "Zero-length slice", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
slice := []int{}
|
||||||
|
Zero(tb, slice)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNotZero(t *testing.T) {
|
||||||
|
assertFail(t, "Empty struct", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
zero := Data{}
|
||||||
|
NotZero(tb, zero)
|
||||||
|
})
|
||||||
|
assertOk(t, "Non-empty struct", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
notZero := Data{Label: "example"}
|
||||||
|
NotZero(tb, notZero)
|
||||||
|
})
|
||||||
|
assertFail(t, "Nil slice", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
var slice []int
|
||||||
|
NotZero(tb, slice)
|
||||||
|
})
|
||||||
|
assertFail(t, "Zero-length slice", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
slice := []int{}
|
||||||
|
NotZero(tb, slice)
|
||||||
|
})
|
||||||
|
assertOk(t, "Non-empty slice", func(tb testing.TB) {
|
||||||
|
tb.Helper()
|
||||||
|
slice := []int{1, 2, 3, 4}
|
||||||
|
NotZero(tb, slice)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
type testCase struct {
|
||||||
|
*testing.T
|
||||||
|
failed string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *testCase) Fatal(args ...interface{}) {
|
||||||
|
t.failed = fmt.Sprint(args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *testCase) Fatalf(message string, args ...interface{}) {
|
||||||
|
t.failed = fmt.Sprintf(message, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func assertFail(t *testing.T, name string, fn func(testing.TB)) {
|
||||||
|
t.Helper()
|
||||||
|
t.Run(name, func(t *testing.T) {
|
||||||
|
t.Helper()
|
||||||
|
test := &testCase{T: t}
|
||||||
|
fn(test)
|
||||||
|
if test.failed == "" {
|
||||||
|
t.Fatal("Test expected to fail but did not")
|
||||||
|
} else {
|
||||||
|
t.Log(test.failed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func assertOk(t *testing.T, name string, fn func(testing.TB)) {
|
||||||
|
t.Helper()
|
||||||
|
t.Run(name, func(t *testing.T) {
|
||||||
|
t.Helper()
|
||||||
|
test := &testCase{T: t}
|
||||||
|
fn(test)
|
||||||
|
if test.failed != "" {
|
||||||
|
t.Fatal("Test expected to succeed but did not:\n", test.failed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -1,144 +0,0 @@
|
|||||||
package ast
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"unsafe"
|
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Iterator starts uninitialized, you need to call Next() first.
|
|
||||||
//
|
|
||||||
// For example:
|
|
||||||
//
|
|
||||||
// it := n.Children()
|
|
||||||
// for it.Next() {
|
|
||||||
// it.Node()
|
|
||||||
// }
|
|
||||||
type Iterator struct {
|
|
||||||
started bool
|
|
||||||
node *Node
|
|
||||||
}
|
|
||||||
|
|
||||||
// Next moves the iterator forward and returns true if points to a
|
|
||||||
// node, false otherwise.
|
|
||||||
func (c *Iterator) Next() bool {
|
|
||||||
if !c.started {
|
|
||||||
c.started = true
|
|
||||||
} else if c.node.Valid() {
|
|
||||||
c.node = c.node.Next()
|
|
||||||
}
|
|
||||||
return c.node.Valid()
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsLast returns true if the current node of the iterator is the last
|
|
||||||
// one. Subsequent call to Next() will return false.
|
|
||||||
func (c *Iterator) IsLast() bool {
|
|
||||||
return c.node.next == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Node returns a copy of the node pointed at by the iterator.
|
|
||||||
func (c *Iterator) Node() *Node {
|
|
||||||
return c.node
|
|
||||||
}
|
|
||||||
|
|
||||||
// Root contains a full AST.
|
|
||||||
//
|
|
||||||
// It is immutable once constructed with Builder.
|
|
||||||
type Root struct {
|
|
||||||
nodes []Node
|
|
||||||
}
|
|
||||||
|
|
||||||
// Iterator over the top level nodes.
|
|
||||||
func (r *Root) Iterator() Iterator {
|
|
||||||
it := Iterator{}
|
|
||||||
if len(r.nodes) > 0 {
|
|
||||||
it.node = &r.nodes[0]
|
|
||||||
}
|
|
||||||
return it
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Root) at(idx Reference) *Node {
|
|
||||||
return &r.nodes[idx]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Arrays have one child per element in the array. InlineTables have
|
|
||||||
// one child per key-value pair in the table. KeyValues have at least
|
|
||||||
// two children. The first one is the value. The rest make a
|
|
||||||
// potentially dotted key. Table and Array table have one child per
|
|
||||||
// element of the key they represent (same as KeyValue, but without
|
|
||||||
// the last node being the value).
|
|
||||||
type Node struct {
|
|
||||||
Kind Kind
|
|
||||||
Raw Range // Raw bytes from the input.
|
|
||||||
Data []byte // Node value (either allocated or referencing the input).
|
|
||||||
|
|
||||||
// References to other nodes, as offsets in the backing array
|
|
||||||
// from this node. References can go backward, so those can be
|
|
||||||
// negative.
|
|
||||||
next int // 0 if last element
|
|
||||||
child int // 0 if no child
|
|
||||||
}
|
|
||||||
|
|
||||||
type Range struct {
|
|
||||||
Offset uint32
|
|
||||||
Length uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
// Next returns a copy of the next node, or an invalid Node if there
|
|
||||||
// is no next node.
|
|
||||||
func (n *Node) Next() *Node {
|
|
||||||
if n.next == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
ptr := unsafe.Pointer(n)
|
|
||||||
size := unsafe.Sizeof(Node{})
|
|
||||||
return (*Node)(danger.Stride(ptr, size, n.next))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Child returns a copy of the first child node of this node. Other
|
|
||||||
// children can be accessed calling Next on the first child. Returns
|
|
||||||
// an invalid Node if there is none.
|
|
||||||
func (n *Node) Child() *Node {
|
|
||||||
if n.child == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
ptr := unsafe.Pointer(n)
|
|
||||||
size := unsafe.Sizeof(Node{})
|
|
||||||
return (*Node)(danger.Stride(ptr, size, n.child))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Valid returns true if the node's kind is set (not to Invalid).
|
|
||||||
func (n *Node) Valid() bool {
|
|
||||||
return n != nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Key returns the child nodes making the Key on a supported
|
|
||||||
// node. Panics otherwise. They are guaranteed to be all be of the
|
|
||||||
// Kind Key. A simple key would return just one element.
|
|
||||||
func (n *Node) Key() Iterator {
|
|
||||||
switch n.Kind {
|
|
||||||
case KeyValue:
|
|
||||||
value := n.Child()
|
|
||||||
if !value.Valid() {
|
|
||||||
panic(fmt.Errorf("KeyValue should have at least two children"))
|
|
||||||
}
|
|
||||||
return Iterator{node: value.Next()}
|
|
||||||
case Table, ArrayTable:
|
|
||||||
return Iterator{node: n.Child()}
|
|
||||||
default:
|
|
||||||
panic(fmt.Errorf("Key() is not supported on a %s", n.Kind))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Value returns a pointer to the value node of a KeyValue.
|
|
||||||
// Guaranteed to be non-nil. Panics if not called on a KeyValue node,
|
|
||||||
// or if the Children are malformed.
|
|
||||||
func (n *Node) Value() *Node {
|
|
||||||
return n.Child()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Children returns an iterator over a node's children.
|
|
||||||
func (n *Node) Children() Iterator {
|
|
||||||
return Iterator{node: n.Child()}
|
|
||||||
}
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
package ast
|
|
||||||
|
|
||||||
type Reference int
|
|
||||||
|
|
||||||
const InvalidReference Reference = -1
|
|
||||||
|
|
||||||
func (r Reference) Valid() bool {
|
|
||||||
return r != InvalidReference
|
|
||||||
}
|
|
||||||
|
|
||||||
type Builder struct {
|
|
||||||
tree Root
|
|
||||||
lastIdx int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) Tree() *Root {
|
|
||||||
return &b.tree
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) NodeAt(ref Reference) *Node {
|
|
||||||
return b.tree.at(ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) Reset() {
|
|
||||||
b.tree.nodes = b.tree.nodes[:0]
|
|
||||||
b.lastIdx = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) Push(n Node) Reference {
|
|
||||||
b.lastIdx = len(b.tree.nodes)
|
|
||||||
b.tree.nodes = append(b.tree.nodes, n)
|
|
||||||
return Reference(b.lastIdx)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) PushAndChain(n Node) Reference {
|
|
||||||
newIdx := len(b.tree.nodes)
|
|
||||||
b.tree.nodes = append(b.tree.nodes, n)
|
|
||||||
if b.lastIdx >= 0 {
|
|
||||||
b.tree.nodes[b.lastIdx].next = newIdx - b.lastIdx
|
|
||||||
}
|
|
||||||
b.lastIdx = newIdx
|
|
||||||
return Reference(b.lastIdx)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) AttachChild(parent Reference, child Reference) {
|
|
||||||
b.tree.nodes[parent].child = int(child) - int(parent)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) Chain(from Reference, to Reference) {
|
|
||||||
b.tree.nodes[from].next = int(to) - int(from)
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
package characters
|
||||||
|
|
||||||
|
var invalidASCIITable = [256]bool{
|
||||||
|
0x00: true,
|
||||||
|
0x01: true,
|
||||||
|
0x02: true,
|
||||||
|
0x03: true,
|
||||||
|
0x04: true,
|
||||||
|
0x05: true,
|
||||||
|
0x06: true,
|
||||||
|
0x07: true,
|
||||||
|
0x08: true,
|
||||||
|
// 0x09 TAB
|
||||||
|
// 0x0A LF
|
||||||
|
0x0B: true,
|
||||||
|
0x0C: true,
|
||||||
|
// 0x0D CR
|
||||||
|
0x0E: true,
|
||||||
|
0x0F: true,
|
||||||
|
0x10: true,
|
||||||
|
0x11: true,
|
||||||
|
0x12: true,
|
||||||
|
0x13: true,
|
||||||
|
0x14: true,
|
||||||
|
0x15: true,
|
||||||
|
0x16: true,
|
||||||
|
0x17: true,
|
||||||
|
0x18: true,
|
||||||
|
0x19: true,
|
||||||
|
0x1A: true,
|
||||||
|
0x1B: true,
|
||||||
|
0x1C: true,
|
||||||
|
0x1D: true,
|
||||||
|
0x1E: true,
|
||||||
|
0x1F: true,
|
||||||
|
// 0x20 - 0x7E Printable ASCII characters
|
||||||
|
0x7F: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func InvalidASCII(b byte) bool {
|
||||||
|
return invalidASCIITable[b]
|
||||||
|
}
|
||||||
@@ -1,20 +1,12 @@
|
|||||||
package toml
|
// Package characters provides functions for working with string encodings.
|
||||||
|
package characters
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
)
|
)
|
||||||
|
|
||||||
type utf8Err struct {
|
// Utf8TomlValidAlreadyEscaped verifies that a given string is only made of
|
||||||
Index int
|
// valid UTF-8 characters allowed by the TOML spec:
|
||||||
Size int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (u utf8Err) Zero() bool {
|
|
||||||
return u.Size == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verified that a given string is only made of valid UTF-8 characters allowed
|
|
||||||
// by the TOML spec:
|
|
||||||
//
|
//
|
||||||
// Any Unicode character may be used except those that must be escaped:
|
// Any Unicode character may be used except those that must be escaped:
|
||||||
// quotation mark, backslash, and the control characters other than tab (U+0000
|
// quotation mark, backslash, and the control characters other than tab (U+0000
|
||||||
@@ -23,8 +15,8 @@ func (u utf8Err) Zero() bool {
|
|||||||
// It is a copy of the Go 1.17 utf8.Valid implementation, tweaked to exit early
|
// It is a copy of the Go 1.17 utf8.Valid implementation, tweaked to exit early
|
||||||
// when a character is not allowed.
|
// when a character is not allowed.
|
||||||
//
|
//
|
||||||
// The returned utf8Err is Zero() if the string is valid, or contains the byte
|
// The returned slice is empty if the string is valid, or contains the bytes
|
||||||
// index and size of the invalid character.
|
// of the invalid character.
|
||||||
//
|
//
|
||||||
// quotation mark => already checked
|
// quotation mark => already checked
|
||||||
// backslash => already checked
|
// backslash => already checked
|
||||||
@@ -32,9 +24,8 @@ func (u utf8Err) Zero() bool {
|
|||||||
// 0x9 => tab, ok
|
// 0x9 => tab, ok
|
||||||
// 0xA - 0x1F => invalid
|
// 0xA - 0x1F => invalid
|
||||||
// 0x7F => invalid
|
// 0x7F => invalid
|
||||||
func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
func Utf8TomlValidAlreadyEscaped(p []byte) []byte {
|
||||||
// Fast path. Check for and skip 8 bytes of ASCII characters per iteration.
|
// Fast path. Check for and skip 8 bytes of ASCII characters per iteration.
|
||||||
offset := 0
|
|
||||||
for len(p) >= 8 {
|
for len(p) >= 8 {
|
||||||
// Combining two 32 bit loads allows the same code to be used
|
// Combining two 32 bit loads allows the same code to be used
|
||||||
// for 32 and 64 bit platforms.
|
// for 32 and 64 bit platforms.
|
||||||
@@ -48,24 +39,19 @@ func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for i, b := range p[:8] {
|
for i, b := range p[:8] {
|
||||||
if invalidAscii(b) {
|
if InvalidASCII(b) {
|
||||||
err.Index = offset + i
|
return p[i : i+1]
|
||||||
err.Size = 1
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
p = p[8:]
|
p = p[8:]
|
||||||
offset += 8
|
|
||||||
}
|
}
|
||||||
n := len(p)
|
n := len(p)
|
||||||
for i := 0; i < n; {
|
for i := 0; i < n; {
|
||||||
pi := p[i]
|
pi := p[i]
|
||||||
if pi < utf8.RuneSelf {
|
if pi < utf8.RuneSelf {
|
||||||
if invalidAscii(pi) {
|
if InvalidASCII(pi) {
|
||||||
err.Index = offset + i
|
return p[i : i+1]
|
||||||
err.Size = 1
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
i++
|
i++
|
||||||
continue
|
continue
|
||||||
@@ -73,44 +59,34 @@ func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
|||||||
x := first[pi]
|
x := first[pi]
|
||||||
if x == xx {
|
if x == xx {
|
||||||
// Illegal starter byte.
|
// Illegal starter byte.
|
||||||
err.Index = offset + i
|
return p[i : i+1]
|
||||||
err.Size = 1
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
size := int(x & 7)
|
size := int(x & 7)
|
||||||
if i+size > n {
|
if i+size > n {
|
||||||
// Short or invalid.
|
// Short or invalid.
|
||||||
err.Index = offset + i
|
return p[i:n]
|
||||||
err.Size = n - i
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
accept := acceptRanges[x>>4]
|
accept := acceptRanges[x>>4]
|
||||||
if c := p[i+1]; c < accept.lo || accept.hi < c {
|
if c := p[i+1]; c < accept.lo || accept.hi < c {
|
||||||
err.Index = offset + i
|
return p[i : i+2]
|
||||||
err.Size = 2
|
} else if size == 2 { //revive:disable:empty-block
|
||||||
return
|
|
||||||
} else if size == 2 {
|
|
||||||
} else if c := p[i+2]; c < locb || hicb < c {
|
} else if c := p[i+2]; c < locb || hicb < c {
|
||||||
err.Index = offset + i
|
return p[i : i+3]
|
||||||
err.Size = 3
|
} else if size == 3 { //revive:disable:empty-block
|
||||||
return
|
|
||||||
} else if size == 3 {
|
|
||||||
} else if c := p[i+3]; c < locb || hicb < c {
|
} else if c := p[i+3]; c < locb || hicb < c {
|
||||||
err.Index = offset + i
|
return p[i : i+4]
|
||||||
err.Size = 4
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
i += size
|
i += size
|
||||||
}
|
}
|
||||||
return
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return the size of the next rune if valid, 0 otherwise.
|
// Utf8ValidNext returns the size of the next rune if valid, 0 otherwise.
|
||||||
func utf8ValidNext(p []byte) int {
|
func Utf8ValidNext(p []byte) int {
|
||||||
c := p[0]
|
c := p[0]
|
||||||
|
|
||||||
if c < utf8.RuneSelf {
|
if c < utf8.RuneSelf {
|
||||||
if invalidAscii(c) {
|
if InvalidASCII(c) {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
return 1
|
return 1
|
||||||
@@ -129,10 +105,10 @@ func utf8ValidNext(p []byte) int {
|
|||||||
accept := acceptRanges[x>>4]
|
accept := acceptRanges[x>>4]
|
||||||
if c := p[1]; c < accept.lo || accept.hi < c {
|
if c := p[1]; c < accept.lo || accept.hi < c {
|
||||||
return 0
|
return 0
|
||||||
} else if size == 2 {
|
} else if size == 2 { //nolint:revive
|
||||||
} else if c := p[2]; c < locb || hicb < c {
|
} else if c := p[2]; c < locb || hicb < c {
|
||||||
return 0
|
return 0
|
||||||
} else if size == 3 {
|
} else if size == 3 { //nolint:revive
|
||||||
} else if c := p[3]; c < locb || hicb < c {
|
} else if c := p[3]; c < locb || hicb < c {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
@@ -140,47 +116,6 @@ func utf8ValidNext(p []byte) int {
|
|||||||
return size
|
return size
|
||||||
}
|
}
|
||||||
|
|
||||||
var invalidAsciiTable = [256]bool{
|
|
||||||
0x00: true,
|
|
||||||
0x01: true,
|
|
||||||
0x02: true,
|
|
||||||
0x03: true,
|
|
||||||
0x04: true,
|
|
||||||
0x05: true,
|
|
||||||
0x06: true,
|
|
||||||
0x07: true,
|
|
||||||
0x08: true,
|
|
||||||
// 0x09 TAB
|
|
||||||
// 0x0A LF
|
|
||||||
0x0B: true,
|
|
||||||
0x0C: true,
|
|
||||||
// 0x0D CR
|
|
||||||
0x0E: true,
|
|
||||||
0x0F: true,
|
|
||||||
0x10: true,
|
|
||||||
0x11: true,
|
|
||||||
0x12: true,
|
|
||||||
0x13: true,
|
|
||||||
0x14: true,
|
|
||||||
0x15: true,
|
|
||||||
0x16: true,
|
|
||||||
0x17: true,
|
|
||||||
0x18: true,
|
|
||||||
0x19: true,
|
|
||||||
0x1A: true,
|
|
||||||
0x1B: true,
|
|
||||||
0x1C: true,
|
|
||||||
0x1D: true,
|
|
||||||
0x1E: true,
|
|
||||||
0x1F: true,
|
|
||||||
// 0x20 - 0x7E Printable ASCII characters
|
|
||||||
0x7F: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
func invalidAscii(b byte) bool {
|
|
||||||
return invalidAsciiTable[b]
|
|
||||||
}
|
|
||||||
|
|
||||||
// acceptRange gives the range of valid values for the second byte in a UTF-8
|
// acceptRange gives the range of valid values for the second byte in a UTF-8
|
||||||
// sequence.
|
// sequence.
|
||||||
type acceptRange struct {
|
type acceptRange struct {
|
||||||
+10
-11
@@ -1,3 +1,4 @@
|
|||||||
|
// Package cli provides common functions for command-line programs.
|
||||||
package cli
|
package cli
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@@ -6,7 +7,6 @@ import (
|
|||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
@@ -18,27 +18,26 @@ type Program struct {
|
|||||||
Usage string
|
Usage string
|
||||||
Fn ConvertFn
|
Fn ConvertFn
|
||||||
// Inplace allows the command to take more than one file as argument and
|
// Inplace allows the command to take more than one file as argument and
|
||||||
// perform convertion in place on each provided file.
|
// perform conversion in place on each provided file.
|
||||||
Inplace bool
|
Inplace bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Program) Execute() {
|
func (p *Program) Execute() {
|
||||||
flag.Usage = func() { fmt.Fprintf(os.Stderr, p.Usage) }
|
flag.Usage = func() { fmt.Fprint(os.Stderr, p.Usage) }
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
os.Exit(p.main(flag.Args(), os.Stdin, os.Stdout, os.Stderr))
|
os.Exit(p.main(flag.Args(), os.Stdin, os.Stdout, os.Stderr))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Program) main(files []string, input io.Reader, output, error io.Writer) int {
|
func (p *Program) main(files []string, input io.Reader, output, stderr io.Writer) int {
|
||||||
err := p.run(files, input, output)
|
err := p.run(files, input, output)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
||||||
var derr *toml.DecodeError
|
var derr *toml.DecodeError
|
||||||
if errors.As(err, &derr) {
|
if errors.As(err, &derr) {
|
||||||
fmt.Fprintln(error, derr.String())
|
_, _ = fmt.Fprintln(stderr, derr.String())
|
||||||
row, col := derr.Position()
|
row, col := derr.Position()
|
||||||
fmt.Fprintln(error, "error occurred at row", row, "column", col)
|
_, _ = fmt.Fprintln(stderr, "error occurred at row", row, "column", col)
|
||||||
} else {
|
} else {
|
||||||
fmt.Fprintln(error, err.Error())
|
_, _ = fmt.Fprintln(stderr, err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
return -1
|
return -1
|
||||||
@@ -55,7 +54,7 @@ func (p *Program) run(files []string, input io.Reader, output io.Writer) error {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer f.Close()
|
defer func() { _ = f.Close() }()
|
||||||
input = f
|
input = f
|
||||||
}
|
}
|
||||||
return p.Fn(input, output)
|
return p.Fn(input, output)
|
||||||
@@ -72,7 +71,7 @@ func (p *Program) runAllFilesInPlace(files []string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p *Program) runFileInPlace(path string) error {
|
func (p *Program) runFileInPlace(path string) error {
|
||||||
in, err := ioutil.ReadFile(path)
|
in, err := os.ReadFile(path) // #nosec G304
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -84,5 +83,5 @@ func (p *Program) runFileInPlace(path string) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return ioutil.WriteFile(path, out.Bytes(), 0600)
|
return os.WriteFile(path, out.Bytes(), 0o600)
|
||||||
}
|
}
|
||||||
|
|||||||
+45
-51
@@ -2,17 +2,15 @@ package cli
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"errors"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func processMain(args []string, input io.Reader, stdout, stderr io.Writer, f ConvertFn) int {
|
func processMain(args []string, input io.Reader, stdout, stderr io.Writer, f ConvertFn) int {
|
||||||
@@ -25,13 +23,13 @@ func TestProcessMainStdin(t *testing.T) {
|
|||||||
stderr := new(bytes.Buffer)
|
stderr := new(bytes.Buffer)
|
||||||
input := strings.NewReader("this is the input")
|
input := strings.NewReader("this is the input")
|
||||||
|
|
||||||
exit := processMain([]string{}, input, stdout, stderr, func(r io.Reader, w io.Writer) error {
|
exit := processMain([]string{}, input, stdout, stderr, func(io.Reader, io.Writer) error {
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
|
||||||
assert.Equal(t, 0, exit)
|
assert.Equal(t, 0, exit)
|
||||||
assert.Empty(t, stdout.String())
|
assert.Zero(t, stdout.String())
|
||||||
assert.Empty(t, stderr.String())
|
assert.Zero(t, stderr.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProcessMainStdinErr(t *testing.T) {
|
func TestProcessMainStdinErr(t *testing.T) {
|
||||||
@@ -39,13 +37,13 @@ func TestProcessMainStdinErr(t *testing.T) {
|
|||||||
stderr := new(bytes.Buffer)
|
stderr := new(bytes.Buffer)
|
||||||
input := strings.NewReader("this is the input")
|
input := strings.NewReader("this is the input")
|
||||||
|
|
||||||
exit := processMain([]string{}, input, stdout, stderr, func(r io.Reader, w io.Writer) error {
|
exit := processMain([]string{}, input, stdout, stderr, func(io.Reader, io.Writer) error {
|
||||||
return fmt.Errorf("something bad")
|
return errors.New("something bad")
|
||||||
})
|
})
|
||||||
|
|
||||||
assert.Equal(t, -1, exit)
|
assert.Equal(t, -1, exit)
|
||||||
assert.Empty(t, stdout.String())
|
assert.Zero(t, stdout.String())
|
||||||
assert.NotEmpty(t, stderr.String())
|
assert.NotZero(t, stderr.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProcessMainStdinDecodeErr(t *testing.T) {
|
func TestProcessMainStdinDecodeErr(t *testing.T) {
|
||||||
@@ -53,60 +51,58 @@ func TestProcessMainStdinDecodeErr(t *testing.T) {
|
|||||||
stderr := new(bytes.Buffer)
|
stderr := new(bytes.Buffer)
|
||||||
input := strings.NewReader("this is the input")
|
input := strings.NewReader("this is the input")
|
||||||
|
|
||||||
exit := processMain([]string{}, input, stdout, stderr, func(r io.Reader, w io.Writer) error {
|
exit := processMain([]string{}, input, stdout, stderr, func(io.Reader, io.Writer) error {
|
||||||
var v interface{}
|
var v interface{}
|
||||||
return toml.Unmarshal([]byte(`qwe = 001`), &v)
|
return toml.Unmarshal([]byte(`qwe = 001`), &v)
|
||||||
})
|
})
|
||||||
|
|
||||||
assert.Equal(t, -1, exit)
|
assert.Equal(t, -1, exit)
|
||||||
assert.Empty(t, stdout.String())
|
assert.Zero(t, stdout.String())
|
||||||
assert.Contains(t, stderr.String(), "error occurred at")
|
assert.True(t, strings.Contains(stderr.String(), "error occurred at"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProcessMainFileExists(t *testing.T) {
|
func TestProcessMainFileExists(t *testing.T) {
|
||||||
tmpfile, err := ioutil.TempFile("", "example")
|
tmpfile, err := os.CreateTemp(t.TempDir(), "example")
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
defer os.Remove(tmpfile.Name())
|
_, err = tmpfile.WriteString(`some data`)
|
||||||
_, err = tmpfile.Write([]byte(`some data`))
|
assert.NoError(t, err)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, tmpfile.Close())
|
||||||
|
|
||||||
stdout := new(bytes.Buffer)
|
stdout := new(bytes.Buffer)
|
||||||
stderr := new(bytes.Buffer)
|
stderr := new(bytes.Buffer)
|
||||||
|
|
||||||
exit := processMain([]string{tmpfile.Name()}, nil, stdout, stderr, func(r io.Reader, w io.Writer) error {
|
exit := processMain([]string{tmpfile.Name()}, nil, stdout, stderr, func(io.Reader, io.Writer) error {
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
|
||||||
assert.Equal(t, 0, exit)
|
assert.Equal(t, 0, exit)
|
||||||
assert.Empty(t, stdout.String())
|
assert.Zero(t, stdout.String())
|
||||||
assert.Empty(t, stderr.String())
|
assert.Zero(t, stderr.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProcessMainFileDoesNotExist(t *testing.T) {
|
func TestProcessMainFileDoesNotExist(t *testing.T) {
|
||||||
stdout := new(bytes.Buffer)
|
stdout := new(bytes.Buffer)
|
||||||
stderr := new(bytes.Buffer)
|
stderr := new(bytes.Buffer)
|
||||||
|
|
||||||
exit := processMain([]string{"/lets/hope/this/does/not/exist"}, nil, stdout, stderr, func(r io.Reader, w io.Writer) error {
|
exit := processMain([]string{"/lets/hope/this/does/not/exist"}, nil, stdout, stderr, func(io.Reader, io.Writer) error {
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
|
||||||
assert.Equal(t, -1, exit)
|
assert.Equal(t, -1, exit)
|
||||||
assert.Empty(t, stdout.String())
|
assert.Zero(t, stdout.String())
|
||||||
assert.NotEmpty(t, stderr.String())
|
assert.NotZero(t, stderr.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProcessMainFilesInPlace(t *testing.T) {
|
func TestProcessMainFilesInPlace(t *testing.T) {
|
||||||
dir, err := ioutil.TempDir("", "")
|
dir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
path1 := path.Join(dir, "file1")
|
path1 := path.Join(dir, "file1")
|
||||||
path2 := path.Join(dir, "file2")
|
path2 := path.Join(dir, "file2")
|
||||||
|
|
||||||
err = ioutil.WriteFile(path1, []byte("content 1"), 0600)
|
err := os.WriteFile(path1, []byte("content 1"), 0o600)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
err = ioutil.WriteFile(path2, []byte("content 2"), 0600)
|
err = os.WriteFile(path2, []byte("content 2"), 0o600)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
p := Program{
|
p := Program{
|
||||||
Fn: dummyFileFn,
|
Fn: dummyFileFn,
|
||||||
@@ -115,15 +111,15 @@ func TestProcessMainFilesInPlace(t *testing.T) {
|
|||||||
|
|
||||||
exit := p.main([]string{path1, path2}, os.Stdin, os.Stdout, os.Stderr)
|
exit := p.main([]string{path1, path2}, os.Stdin, os.Stdout, os.Stderr)
|
||||||
|
|
||||||
require.Equal(t, 0, exit)
|
assert.Equal(t, 0, exit)
|
||||||
|
|
||||||
v1, err := ioutil.ReadFile(path1)
|
v1, err := os.ReadFile(path1)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, "1", string(v1))
|
assert.Equal(t, "1", string(v1))
|
||||||
|
|
||||||
v2, err := ioutil.ReadFile(path2)
|
v2, err := os.ReadFile(path2)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, "2", string(v2))
|
assert.Equal(t, "2", string(v2))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProcessMainFilesInPlaceErrRead(t *testing.T) {
|
func TestProcessMainFilesInPlaceErrRead(t *testing.T) {
|
||||||
@@ -134,35 +130,33 @@ func TestProcessMainFilesInPlaceErrRead(t *testing.T) {
|
|||||||
|
|
||||||
exit := p.main([]string{"/this/path/is/invalid"}, os.Stdin, os.Stdout, os.Stderr)
|
exit := p.main([]string{"/this/path/is/invalid"}, os.Stdin, os.Stdout, os.Stderr)
|
||||||
|
|
||||||
require.Equal(t, -1, exit)
|
assert.Equal(t, -1, exit)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProcessMainFilesInPlaceFailFn(t *testing.T) {
|
func TestProcessMainFilesInPlaceFailFn(t *testing.T) {
|
||||||
dir, err := ioutil.TempDir("", "")
|
dir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer os.RemoveAll(dir)
|
|
||||||
|
|
||||||
path1 := path.Join(dir, "file1")
|
path1 := path.Join(dir, "file1")
|
||||||
|
|
||||||
err = ioutil.WriteFile(path1, []byte("content 1"), 0600)
|
err := os.WriteFile(path1, []byte("content 1"), 0o600)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
p := Program{
|
p := Program{
|
||||||
Fn: func(io.Reader, io.Writer) error { return fmt.Errorf("oh no") },
|
Fn: func(io.Reader, io.Writer) error { return errors.New("oh no") },
|
||||||
Inplace: true,
|
Inplace: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
exit := p.main([]string{path1}, os.Stdin, os.Stdout, os.Stderr)
|
exit := p.main([]string{path1}, os.Stdin, os.Stdout, os.Stderr)
|
||||||
|
|
||||||
require.Equal(t, -1, exit)
|
assert.Equal(t, -1, exit)
|
||||||
|
|
||||||
v1, err := ioutil.ReadFile(path1)
|
v1, err := os.ReadFile(path1)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, "content 1", string(v1))
|
assert.Equal(t, "content 1", string(v1))
|
||||||
}
|
}
|
||||||
|
|
||||||
func dummyFileFn(r io.Reader, w io.Writer) error {
|
func dummyFileFn(r io.Reader, w io.Writer) error {
|
||||||
b, err := ioutil.ReadAll(r)
|
b, err := io.ReadAll(r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,65 +0,0 @@
|
|||||||
package danger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
"unsafe"
|
|
||||||
)
|
|
||||||
|
|
||||||
const maxInt = uintptr(int(^uint(0) >> 1))
|
|
||||||
|
|
||||||
func SubsliceOffset(data []byte, subslice []byte) int {
|
|
||||||
datap := (*reflect.SliceHeader)(unsafe.Pointer(&data))
|
|
||||||
hlp := (*reflect.SliceHeader)(unsafe.Pointer(&subslice))
|
|
||||||
|
|
||||||
if hlp.Data < datap.Data {
|
|
||||||
panic(fmt.Errorf("subslice address (%d) is before data address (%d)", hlp.Data, datap.Data))
|
|
||||||
}
|
|
||||||
offset := hlp.Data - datap.Data
|
|
||||||
|
|
||||||
if offset > maxInt {
|
|
||||||
panic(fmt.Errorf("slice offset larger than int (%d)", offset))
|
|
||||||
}
|
|
||||||
|
|
||||||
intoffset := int(offset)
|
|
||||||
|
|
||||||
if intoffset > datap.Len {
|
|
||||||
panic(fmt.Errorf("slice offset (%d) is farther than data length (%d)", intoffset, datap.Len))
|
|
||||||
}
|
|
||||||
|
|
||||||
if intoffset+hlp.Len > datap.Len {
|
|
||||||
panic(fmt.Errorf("slice ends (%d+%d) is farther than data length (%d)", intoffset, hlp.Len, datap.Len))
|
|
||||||
}
|
|
||||||
|
|
||||||
return intoffset
|
|
||||||
}
|
|
||||||
|
|
||||||
func BytesRange(start []byte, end []byte) []byte {
|
|
||||||
if start == nil || end == nil {
|
|
||||||
panic("cannot call BytesRange with nil")
|
|
||||||
}
|
|
||||||
startp := (*reflect.SliceHeader)(unsafe.Pointer(&start))
|
|
||||||
endp := (*reflect.SliceHeader)(unsafe.Pointer(&end))
|
|
||||||
|
|
||||||
if startp.Data > endp.Data {
|
|
||||||
panic(fmt.Errorf("start pointer address (%d) is after end pointer address (%d)", startp.Data, endp.Data))
|
|
||||||
}
|
|
||||||
|
|
||||||
l := startp.Len
|
|
||||||
endLen := int(endp.Data-startp.Data) + endp.Len
|
|
||||||
if endLen > l {
|
|
||||||
l = endLen
|
|
||||||
}
|
|
||||||
|
|
||||||
if l > startp.Cap {
|
|
||||||
panic(fmt.Errorf("range length is larger than capacity"))
|
|
||||||
}
|
|
||||||
|
|
||||||
return start[:l]
|
|
||||||
}
|
|
||||||
|
|
||||||
func Stride(ptr unsafe.Pointer, size uintptr, offset int) unsafe.Pointer {
|
|
||||||
// TODO: replace with unsafe.Add when Go 1.17 is released
|
|
||||||
// https://github.com/golang/go/issues/40481
|
|
||||||
return unsafe.Pointer(uintptr(ptr) + uintptr(int(size)*offset))
|
|
||||||
}
|
|
||||||
@@ -1,178 +0,0 @@
|
|||||||
package danger_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
"unsafe"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestSubsliceOffsetValid(t *testing.T) {
|
|
||||||
examples := []struct {
|
|
||||||
desc string
|
|
||||||
test func() ([]byte, []byte)
|
|
||||||
offset int
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "simple",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
data := []byte("hello")
|
|
||||||
return data, data[1:]
|
|
||||||
},
|
|
||||||
offset: 1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, e := range examples {
|
|
||||||
t.Run(e.desc, func(t *testing.T) {
|
|
||||||
d, s := e.test()
|
|
||||||
offset := danger.SubsliceOffset(d, s)
|
|
||||||
assert.Equal(t, e.offset, offset)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSubsliceOffsetInvalid(t *testing.T) {
|
|
||||||
examples := []struct {
|
|
||||||
desc string
|
|
||||||
test func() ([]byte, []byte)
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "unrelated arrays",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
return []byte("one"), []byte("two")
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "slice starts before data",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
full := []byte("hello world")
|
|
||||||
return full[5:], full[1:]
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "slice starts after data",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
full := []byte("hello world")
|
|
||||||
return full[:3], full[5:]
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "slice ends after data",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
full := []byte("hello world")
|
|
||||||
return full[:5], full[3:8]
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, e := range examples {
|
|
||||||
t.Run(e.desc, func(t *testing.T) {
|
|
||||||
d, s := e.test()
|
|
||||||
require.Panics(t, func() {
|
|
||||||
danger.SubsliceOffset(d, s)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestStride(t *testing.T) {
|
|
||||||
a := []byte{1, 2, 3, 4}
|
|
||||||
x := &a[1]
|
|
||||||
n := (*byte)(danger.Stride(unsafe.Pointer(x), unsafe.Sizeof(byte(0)), 1))
|
|
||||||
require.Equal(t, &a[2], n)
|
|
||||||
n = (*byte)(danger.Stride(unsafe.Pointer(x), unsafe.Sizeof(byte(0)), -1))
|
|
||||||
require.Equal(t, &a[0], n)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBytesRange(t *testing.T) {
|
|
||||||
type fn = func() ([]byte, []byte)
|
|
||||||
examples := []struct {
|
|
||||||
desc string
|
|
||||||
test fn
|
|
||||||
expected []byte
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "simple",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
full := []byte("hello world")
|
|
||||||
return full[1:3], full[6:8]
|
|
||||||
},
|
|
||||||
expected: []byte("ello wo"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "full",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
full := []byte("hello world")
|
|
||||||
return full[0:1], full[len(full)-1:]
|
|
||||||
},
|
|
||||||
expected: []byte("hello world"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "end before start",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
full := []byte("hello world")
|
|
||||||
return full[len(full)-1:], full[0:1]
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "nils",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
return nil, nil
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "nils start",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
return nil, []byte("foo")
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "nils end",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
return []byte("foo"), nil
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "start is end",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
full := []byte("hello world")
|
|
||||||
return full[1:3], full[1:3]
|
|
||||||
},
|
|
||||||
expected: []byte("el"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "end contained in start",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
full := []byte("hello world")
|
|
||||||
return full[1:7], full[2:4]
|
|
||||||
},
|
|
||||||
expected: []byte("ello w"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "different backing arrays",
|
|
||||||
test: func() ([]byte, []byte) {
|
|
||||||
one := []byte("hello world")
|
|
||||||
two := []byte("hello world")
|
|
||||||
return one, two
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, e := range examples {
|
|
||||||
t.Run(e.desc, func(t *testing.T) {
|
|
||||||
start, end := e.test()
|
|
||||||
if e.expected == nil {
|
|
||||||
require.Panics(t, func() {
|
|
||||||
danger.BytesRange(start, end)
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
res := danger.BytesRange(start, end)
|
|
||||||
require.Equal(t, e.expected, res)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
package danger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
"unsafe"
|
|
||||||
)
|
|
||||||
|
|
||||||
// typeID is used as key in encoder and decoder caches to enable using
|
|
||||||
// the optimize runtime.mapaccess2_fast64 function instead of the more
|
|
||||||
// expensive lookup if we were to use reflect.Type as map key.
|
|
||||||
//
|
|
||||||
// typeID holds the pointer to the reflect.Type value, which is unique
|
|
||||||
// in the program.
|
|
||||||
//
|
|
||||||
// https://github.com/segmentio/encoding/blob/master/json/codec.go#L59-L61
|
|
||||||
type TypeID unsafe.Pointer
|
|
||||||
|
|
||||||
func MakeTypeID(t reflect.Type) TypeID {
|
|
||||||
// reflect.Type has the fields:
|
|
||||||
// typ unsafe.Pointer
|
|
||||||
// ptr unsafe.Pointer
|
|
||||||
return TypeID((*[2]unsafe.Pointer)(unsafe.Pointer(&t))[1])
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package imported_tests
|
package imported_tests //revive:disable:var-naming
|
||||||
|
|
||||||
// Those tests have been imported from v1, but adjust to match the new
|
// Those tests have been imported from v1, but adjust to match the new
|
||||||
// defaults of v2.
|
// defaults of v2.
|
||||||
@@ -9,7 +9,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestDocMarshal(t *testing.T) {
|
func TestDocMarshal(t *testing.T) {
|
||||||
@@ -21,12 +21,12 @@ func TestDocMarshal(t *testing.T) {
|
|||||||
Subdocs testDocSubs `toml:"subdoc"`
|
Subdocs testDocSubs `toml:"subdoc"`
|
||||||
Basics testDocBasics `toml:"basic"`
|
Basics testDocBasics `toml:"basic"`
|
||||||
SubDocList []testSubDoc `toml:"subdoclist"`
|
SubDocList []testSubDoc `toml:"subdoclist"`
|
||||||
err int `toml:"shouldntBeHere"`
|
err int `toml:"shouldntBeHere"` //nolint:unused
|
||||||
unexported int `toml:"shouldntBeHere"`
|
unexported int `toml:"shouldntBeHere"`
|
||||||
Unexported2 int `toml:"-"`
|
Unexported2 int `toml:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var docData = testDoc{
|
docData := testDoc{
|
||||||
Title: "TOML Marshal Testing",
|
Title: "TOML Marshal Testing",
|
||||||
unexported: 0,
|
unexported: 0,
|
||||||
Unexported2: 0,
|
Unexported2: 0,
|
||||||
@@ -107,13 +107,13 @@ name = 'List.Second'
|
|||||||
`
|
`
|
||||||
|
|
||||||
result, err := toml.Marshal(docData)
|
result, err := toml.Marshal(docData)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, marshalTestToml, string(result))
|
assert.Equal(t, marshalTestToml, string(result))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBasicMarshalQuotedKey(t *testing.T) {
|
func TestBasicMarshalQuotedKey(t *testing.T) {
|
||||||
result, err := toml.Marshal(quotedKeyMarshalTestData)
|
result, err := toml.Marshal(quotedKeyMarshalTestData)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
expected := `'Z.string-àéù' = 'Hello'
|
expected := `'Z.string-àéù' = 'Hello'
|
||||||
'Yfloat-𝟘' = 3.5
|
'Yfloat-𝟘' = 3.5
|
||||||
@@ -128,8 +128,7 @@ String2 = 'Two'
|
|||||||
String2 = 'Three'
|
String2 = 'Three'
|
||||||
`
|
`
|
||||||
|
|
||||||
require.Equal(t, string(expected), string(result))
|
assert.Equal(t, expected, string(result))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestEmptyMarshal(t *testing.T) {
|
func TestEmptyMarshal(t *testing.T) {
|
||||||
@@ -153,7 +152,7 @@ func TestEmptyMarshal(t *testing.T) {
|
|||||||
Map: map[string]string{},
|
Map: map[string]string{},
|
||||||
}
|
}
|
||||||
result, err := toml.Marshal(doc)
|
result, err := toml.Marshal(doc)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
expected := `title = 'Placeholder'
|
expected := `title = 'Placeholder'
|
||||||
bool = false
|
bool = false
|
||||||
@@ -164,7 +163,7 @@ stringlist = []
|
|||||||
[map]
|
[map]
|
||||||
`
|
`
|
||||||
|
|
||||||
require.Equal(t, string(expected), string(result))
|
assert.Equal(t, expected, string(result))
|
||||||
}
|
}
|
||||||
|
|
||||||
type textMarshaler struct {
|
type textMarshaler struct {
|
||||||
@@ -187,13 +186,13 @@ func TestTextMarshaler(t *testing.T) {
|
|||||||
t.Run("at root", func(t *testing.T) {
|
t.Run("at root", func(t *testing.T) {
|
||||||
_, err := toml.Marshal(m)
|
_, err := toml.Marshal(m)
|
||||||
// in v2 we do not allow TextMarshaler at root
|
// in v2 we do not allow TextMarshaler at root
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("leaf", func(t *testing.T) {
|
t.Run("leaf", func(t *testing.T) {
|
||||||
res, err := toml.Marshal(wrap{m})
|
res, err := toml.Marshal(wrap{m})
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
require.Equal(t, "TM = 'Sally Fields'\n", string(res))
|
assert.Equal(t, "TM = 'Sally Fields'\n", string(res))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package imported_tests
|
package imported_tests //revive:disable:var-naming
|
||||||
|
|
||||||
// Those tests were imported directly from go-toml v1
|
// Those tests were imported directly from go-toml v1
|
||||||
// https://raw.githubusercontent.com/pelletier/go-toml/a2e52561804c6cd9392ebf0048ca64fe4af67a43/marshal_test.go
|
// https://raw.githubusercontent.com/pelletier/go-toml/a2e52561804c6cd9392ebf0048ca64fe4af67a43/marshal_test.go
|
||||||
@@ -16,8 +16,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type basicMarshalTestStruct struct {
|
type basicMarshalTestStruct struct {
|
||||||
@@ -123,7 +122,7 @@ func TestInterface(t *testing.T) {
|
|||||||
var config Conf
|
var config Conf
|
||||||
config.Inter = &NestedStruct{}
|
config.Inter = &NestedStruct{}
|
||||||
err := toml.Unmarshal(doc, &config)
|
err := toml.Unmarshal(doc, &config)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
expected := Conf{
|
expected := Conf{
|
||||||
Name: "rui",
|
Name: "rui",
|
||||||
Age: 18,
|
Age: 18,
|
||||||
@@ -139,8 +138,8 @@ func TestInterface(t *testing.T) {
|
|||||||
func TestBasicUnmarshal(t *testing.T) {
|
func TestBasicUnmarshal(t *testing.T) {
|
||||||
result := basicMarshalTestStruct{}
|
result := basicMarshalTestStruct{}
|
||||||
err := toml.Unmarshal(basicTestToml, &result)
|
err := toml.Unmarshal(basicTestToml, &result)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, basicTestData, result)
|
assert.Equal(t, basicTestData, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
type quotedKeyMarshalTestStruct struct {
|
type quotedKeyMarshalTestStruct struct {
|
||||||
@@ -150,9 +149,6 @@ type quotedKeyMarshalTestStruct struct {
|
|||||||
SubList []basicMarshalTestSubStruct `toml:"W.sublist-𝟘"`
|
SubList []basicMarshalTestSubStruct `toml:"W.sublist-𝟘"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
|
||||||
//
|
|
||||||
//nolint:deadcode,unused,varcheck
|
|
||||||
var quotedKeyMarshalTestData = quotedKeyMarshalTestStruct{
|
var quotedKeyMarshalTestData = quotedKeyMarshalTestStruct{
|
||||||
String: "Hello",
|
String: "Hello",
|
||||||
Float: 3.5,
|
Float: 3.5,
|
||||||
@@ -162,7 +158,7 @@ var quotedKeyMarshalTestData = quotedKeyMarshalTestStruct{
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var quotedKeyMarshalTestToml = []byte(`"Yfloat-𝟘" = 3.5
|
var quotedKeyMarshalTestToml = []byte(`"Yfloat-𝟘" = 3.5
|
||||||
"Z.string-àéù" = "Hello"
|
"Z.string-àéù" = "Hello"
|
||||||
|
|
||||||
@@ -184,11 +180,12 @@ type testDoc struct {
|
|||||||
Subdocs testDocSubs `toml:"subdoc"`
|
Subdocs testDocSubs `toml:"subdoc"`
|
||||||
Basics testDocBasics `toml:"basic"`
|
Basics testDocBasics `toml:"basic"`
|
||||||
SubDocList []testSubDoc `toml:"subdoclist"`
|
SubDocList []testSubDoc `toml:"subdoclist"`
|
||||||
err int `toml:"shouldntBeHere"` // nolint:structcheck,unused
|
err int `toml:"shouldntBeHere"` //nolint:unused
|
||||||
unexported int `toml:"shouldntBeHere"`
|
unexported int `toml:"shouldntBeHere"`
|
||||||
Unexported2 int `toml:"-"`
|
Unexported2 int `toml:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//nolint:unused
|
||||||
type testMapDoc struct {
|
type testMapDoc struct {
|
||||||
Title string `toml:"title"`
|
Title string `toml:"title"`
|
||||||
BasicMap map[string]string `toml:"basic_map"`
|
BasicMap map[string]string `toml:"basic_map"`
|
||||||
@@ -275,7 +272,7 @@ var docData = testDoc{
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var mapTestDoc = testMapDoc{
|
var mapTestDoc = testMapDoc{
|
||||||
Title: "TOML Marshal Testing",
|
Title: "TOML Marshal Testing",
|
||||||
BasicMap: map[string]string{
|
BasicMap: map[string]string{
|
||||||
@@ -300,7 +297,7 @@ func TestDocUnmarshal(t *testing.T) {
|
|||||||
result := testDoc{}
|
result := testDoc{}
|
||||||
err := toml.Unmarshal(marshalTestToml, &result)
|
err := toml.Unmarshal(marshalTestToml, &result)
|
||||||
expected := docData
|
expected := docData
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, expected, result)
|
assert.Equal(t, expected, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -340,7 +337,7 @@ shouldntBeHere = 2
|
|||||||
func TestUnexportedUnmarshal(t *testing.T) {
|
func TestUnexportedUnmarshal(t *testing.T) {
|
||||||
result := unexportedMarshalTestStruct{}
|
result := unexportedMarshalTestStruct{}
|
||||||
err := toml.Unmarshal(unexportedTestToml, &result)
|
err := toml.Unmarshal(unexportedTestToml, &result)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, unexportedTestData, result)
|
assert.Equal(t, unexportedTestData, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -456,7 +453,7 @@ func TestEmptytomlUnmarshal(t *testing.T) {
|
|||||||
|
|
||||||
result := emptyMarshalTestStruct{}
|
result := emptyMarshalTestStruct{}
|
||||||
err := toml.Unmarshal(emptyTestToml, &result)
|
err := toml.Unmarshal(emptyTestToml, &result)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, emptyTestData, result)
|
assert.Equal(t, emptyTestData, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -504,7 +501,7 @@ Str = "Hello"
|
|||||||
func TestPointerUnmarshal(t *testing.T) {
|
func TestPointerUnmarshal(t *testing.T) {
|
||||||
result := pointerMarshalTestStruct{}
|
result := pointerMarshalTestStruct{}
|
||||||
err := toml.Unmarshal(pointerTestToml, &result)
|
err := toml.Unmarshal(pointerTestToml, &result)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, pointerTestData, result)
|
assert.Equal(t, pointerTestData, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -540,35 +537,39 @@ StringPtr = [["Three", "Four"]]
|
|||||||
func TestNestedUnmarshal(t *testing.T) {
|
func TestNestedUnmarshal(t *testing.T) {
|
||||||
result := nestedMarshalTestStruct{}
|
result := nestedMarshalTestStruct{}
|
||||||
err := toml.Unmarshal(nestedTestToml, &result)
|
err := toml.Unmarshal(nestedTestToml, &result)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, nestedTestData, result)
|
assert.Equal(t, nestedTestData, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//nolint:unused
|
||||||
type customMarshalerParent struct {
|
type customMarshalerParent struct {
|
||||||
Self customMarshaler `toml:"me"`
|
Self customMarshaler `toml:"me"`
|
||||||
Friends []customMarshaler `toml:"friends"`
|
Friends []customMarshaler `toml:"friends"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//nolint:unused
|
||||||
type customMarshaler struct {
|
type customMarshaler struct {
|
||||||
FirstName string
|
FirstName string
|
||||||
LastName string
|
LastName string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//nolint:unused
|
||||||
func (c customMarshaler) MarshalTOML() ([]byte, error) {
|
func (c customMarshaler) MarshalTOML() ([]byte, error) {
|
||||||
fullName := fmt.Sprintf("%s %s", c.FirstName, c.LastName)
|
fullName := fmt.Sprintf("%s %s", c.FirstName, c.LastName)
|
||||||
return []byte(fullName), nil
|
return []byte(fullName), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//nolint:unused
|
||||||
var customMarshalerData = customMarshaler{FirstName: "Sally", LastName: "Fields"}
|
var customMarshalerData = customMarshaler{FirstName: "Sally", LastName: "Fields"}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var customMarshalerToml = []byte(`Sally Fields`)
|
var customMarshalerToml = []byte(`Sally Fields`)
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var nestedCustomMarshalerData = customMarshalerParent{
|
var nestedCustomMarshalerData = customMarshalerParent{
|
||||||
Self: customMarshaler{FirstName: "Maiku", LastName: "Suteda"},
|
Self: customMarshaler{FirstName: "Maiku", LastName: "Suteda"},
|
||||||
Friends: []customMarshaler{customMarshalerData},
|
Friends: []customMarshaler{customMarshalerData},
|
||||||
@@ -576,7 +577,7 @@ var nestedCustomMarshalerData = customMarshalerParent{
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var nestedCustomMarshalerToml = []byte(`friends = ["Sally Fields"]
|
var nestedCustomMarshalerToml = []byte(`friends = ["Sally Fields"]
|
||||||
me = "Maiku Suteda"
|
me = "Maiku Suteda"
|
||||||
`)
|
`)
|
||||||
@@ -591,7 +592,7 @@ func (x *IntOrString) MarshalTOML() ([]byte, error) {
|
|||||||
s := *(*string)(x)
|
s := *(*string)(x)
|
||||||
_, err := strconv.Atoi(s)
|
_, err := strconv.Atoi(s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return []byte(fmt.Sprintf(`"%s"`, s)), nil
|
return []byte(fmt.Sprintf(`"%s"`, s)), nil //nolint:nilerr
|
||||||
}
|
}
|
||||||
return []byte(s), nil
|
return []byte(s), nil
|
||||||
}
|
}
|
||||||
@@ -663,7 +664,7 @@ func (m *textPointerMarshaler) MarshalText() ([]byte, error) {
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var commentTestToml = []byte(`
|
var commentTestToml = []byte(`
|
||||||
# it's a comment on type
|
# it's a comment on type
|
||||||
[postgres]
|
[postgres]
|
||||||
@@ -688,6 +689,7 @@ var commentTestToml = []byte(`
|
|||||||
My = "Baar"
|
My = "Baar"
|
||||||
`)
|
`)
|
||||||
|
|
||||||
|
//nolint:unused
|
||||||
type mapsTestStruct struct {
|
type mapsTestStruct struct {
|
||||||
Simple map[string]string
|
Simple map[string]string
|
||||||
Paths map[string]string
|
Paths map[string]string
|
||||||
@@ -701,7 +703,7 @@ type mapsTestStruct struct {
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var mapsTestData = mapsTestStruct{
|
var mapsTestData = mapsTestStruct{
|
||||||
Simple: map[string]string{
|
Simple: map[string]string{
|
||||||
"one plus one": "two",
|
"one plus one": "two",
|
||||||
@@ -725,7 +727,7 @@ var mapsTestData = mapsTestStruct{
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var mapsTestToml = []byte(`
|
var mapsTestToml = []byte(`
|
||||||
[Other]
|
[Other]
|
||||||
"testing" = 3.9999
|
"testing" = 3.9999
|
||||||
@@ -748,7 +750,7 @@ var mapsTestToml = []byte(`
|
|||||||
|
|
||||||
// TODO: Remove nolint once type is used by a test
|
// TODO: Remove nolint once type is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused
|
//nolint:unused
|
||||||
type structArrayNoTag struct {
|
type structArrayNoTag struct {
|
||||||
A struct {
|
A struct {
|
||||||
B []int64
|
B []int64
|
||||||
@@ -758,7 +760,7 @@ type structArrayNoTag struct {
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var customTagTestToml = []byte(`
|
var customTagTestToml = []byte(`
|
||||||
[postgres]
|
[postgres]
|
||||||
password = "bvalue"
|
password = "bvalue"
|
||||||
@@ -773,7 +775,7 @@ var customTagTestToml = []byte(`
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var customCommentTagTestToml = []byte(`
|
var customCommentTagTestToml = []byte(`
|
||||||
# db connection
|
# db connection
|
||||||
[postgres]
|
[postgres]
|
||||||
@@ -787,7 +789,7 @@ var customCommentTagTestToml = []byte(`
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var customCommentedTagTestToml = []byte(`
|
var customCommentedTagTestToml = []byte(`
|
||||||
[postgres]
|
[postgres]
|
||||||
# password = "bvalue"
|
# password = "bvalue"
|
||||||
@@ -834,7 +836,7 @@ func TestUnmarshalTabInStringAndQuotedKey(t *testing.T) {
|
|||||||
t.Run(test.desc, func(t *testing.T) {
|
t.Run(test.desc, func(t *testing.T) {
|
||||||
result := Test{}
|
result := Test{}
|
||||||
err := toml.Unmarshal(test.input, &result)
|
err := toml.Unmarshal(test.input, &result)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, test.expected, result)
|
assert.Equal(t, test.expected, result)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -842,7 +844,7 @@ func TestUnmarshalTabInStringAndQuotedKey(t *testing.T) {
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var customMultilineTagTestToml = []byte(`int_slice = [
|
var customMultilineTagTestToml = []byte(`int_slice = [
|
||||||
1,
|
1,
|
||||||
2,
|
2,
|
||||||
@@ -852,7 +854,7 @@ var customMultilineTagTestToml = []byte(`int_slice = [
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var testDocBasicToml = []byte(`
|
var testDocBasicToml = []byte(`
|
||||||
[document]
|
[document]
|
||||||
bool_val = true
|
bool_val = true
|
||||||
@@ -863,16 +865,12 @@ var testDocBasicToml = []byte(`
|
|||||||
uint_val = 5001
|
uint_val = 5001
|
||||||
`)
|
`)
|
||||||
|
|
||||||
// TODO: Remove nolint once type is used by a test
|
//nolint:unused
|
||||||
//
|
|
||||||
//nolint:deadcode
|
|
||||||
type testDocCustomTag struct {
|
type testDocCustomTag struct {
|
||||||
Doc testDocBasicsCustomTag `file:"document"`
|
Doc testDocBasicsCustomTag `file:"document"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once type is used by a test
|
//nolint:unused
|
||||||
//
|
|
||||||
//nolint:deadcode
|
|
||||||
type testDocBasicsCustomTag struct {
|
type testDocBasicsCustomTag struct {
|
||||||
Bool bool `file:"bool_val"`
|
Bool bool `file:"bool_val"`
|
||||||
Date time.Time `file:"date_val"`
|
Date time.Time `file:"date_val"`
|
||||||
@@ -883,9 +881,7 @@ type testDocBasicsCustomTag struct {
|
|||||||
unexported int `file:"shouldntBeHere"`
|
unexported int `file:"shouldntBeHere"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
//nolint:unused
|
||||||
//
|
|
||||||
//nolint:deadcode,varcheck
|
|
||||||
var testDocCustomTagData = testDocCustomTag{
|
var testDocCustomTagData = testDocCustomTag{
|
||||||
Doc: testDocBasicsCustomTag{
|
Doc: testDocBasicsCustomTag{
|
||||||
Bool: true,
|
Bool: true,
|
||||||
@@ -963,7 +959,7 @@ func TestUnmarshalTypeTableHeader(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
expected := map[header]map[string]int{
|
expected := map[header]map[string]int{
|
||||||
"test": map[string]int{"a": 1},
|
"test": {"a": 1},
|
||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(result, expected) {
|
if !reflect.DeepEqual(result, expected) {
|
||||||
@@ -988,13 +984,13 @@ func TestUnmarshalInvalidPointerKind(t *testing.T) {
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused
|
//nolint:unused
|
||||||
type testDuration struct {
|
type testDuration struct {
|
||||||
Nanosec time.Duration `toml:"nanosec"`
|
Nanosec time.Duration `toml:"nanosec"`
|
||||||
Microsec1 time.Duration `toml:"microsec1"`
|
Microsec1 time.Duration `toml:"microsec1"`
|
||||||
Microsec2 *time.Duration `toml:"microsec2"`
|
Microsec2 *time.Duration `toml:"microsec2"`
|
||||||
Millisec time.Duration `toml:"millisec"`
|
Millisec time.Duration `toml:"millisec"`
|
||||||
Sec time.Duration `toml:"sec"`
|
Sec time.Duration `toml:"sec"` //nolint:staticcheck
|
||||||
Min time.Duration `toml:"min"`
|
Min time.Duration `toml:"min"`
|
||||||
Hour time.Duration `toml:"hour"`
|
Hour time.Duration `toml:"hour"`
|
||||||
Mixed time.Duration `toml:"mixed"`
|
Mixed time.Duration `toml:"mixed"`
|
||||||
@@ -1003,7 +999,7 @@ type testDuration struct {
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var testDurationToml = []byte(`
|
var testDurationToml = []byte(`
|
||||||
nanosec = "1ns"
|
nanosec = "1ns"
|
||||||
microsec1 = "1us"
|
microsec1 = "1us"
|
||||||
@@ -1018,7 +1014,7 @@ a_string = "15s"
|
|||||||
|
|
||||||
// TODO: Remove nolint once var is used by a test
|
// TODO: Remove nolint once var is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused,varcheck
|
//nolint:unused
|
||||||
var testDurationToml2 = []byte(`a_string = "15s"
|
var testDurationToml2 = []byte(`a_string = "15s"
|
||||||
hour = "1h0m0s"
|
hour = "1h0m0s"
|
||||||
microsec1 = "1µs"
|
microsec1 = "1µs"
|
||||||
@@ -1032,15 +1028,14 @@ sec = "1s"
|
|||||||
|
|
||||||
// TODO: Remove nolint once type is used by a test
|
// TODO: Remove nolint once type is used by a test
|
||||||
//
|
//
|
||||||
//nolint:deadcode,unused
|
//nolint:unused
|
||||||
type testBadDuration struct {
|
type testBadDuration struct {
|
||||||
Val time.Duration `toml:"val"`
|
Val time.Duration `toml:"val"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: add back camelCase test
|
// TODO: add back camelCase test
|
||||||
var testCamelCaseKeyToml = []byte(`fooBar = 10`) //nolint:unused
|
var testCamelCaseKeyToml = []byte(`fooBar = 10`)
|
||||||
|
|
||||||
//nolint:unused
|
|
||||||
func TestUnmarshalCamelCaseKey(t *testing.T) {
|
func TestUnmarshalCamelCaseKey(t *testing.T) {
|
||||||
t.Skipf("don't know if it is a good idea to automatically convert like that yet")
|
t.Skipf("don't know if it is a good idea to automatically convert like that yet")
|
||||||
var x struct {
|
var x struct {
|
||||||
@@ -1059,7 +1054,7 @@ func TestUnmarshalCamelCaseKey(t *testing.T) {
|
|||||||
|
|
||||||
func TestUnmarshalNegativeUint(t *testing.T) {
|
func TestUnmarshalNegativeUint(t *testing.T) {
|
||||||
t.Skipf("not sure if we this should always error")
|
t.Skipf("not sure if we this should always error")
|
||||||
type check struct{ U uint } // nolint:unused
|
type check struct{ U uint }
|
||||||
err := toml.Unmarshal([]byte("U = -1"), &check{})
|
err := toml.Unmarshal([]byte("U = -1"), &check{})
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
}
|
}
|
||||||
@@ -1085,16 +1080,12 @@ func TestUnmarshalCheckConversionFloatInt(t *testing.T) {
|
|||||||
desc: "int",
|
desc: "int",
|
||||||
input: `I = 1e300`,
|
input: `I = 1e300`,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
desc: "float",
|
|
||||||
input: `F = 9223372036854775806`,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range testCases {
|
for _, test := range testCases {
|
||||||
t.Run(test.desc, func(t *testing.T) {
|
t.Run(test.desc, func(t *testing.T) {
|
||||||
err := toml.Unmarshal([]byte(test.input), &conversionCheck{})
|
err := toml.Unmarshal([]byte(test.input), &conversionCheck{})
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1129,7 +1120,7 @@ func TestUnmarshalOverflow(t *testing.T) {
|
|||||||
for _, test := range testCases {
|
for _, test := range testCases {
|
||||||
t.Run(test.desc, func(t *testing.T) {
|
t.Run(test.desc, func(t *testing.T) {
|
||||||
err := toml.Unmarshal([]byte(test.input), &overflow{})
|
err := toml.Unmarshal([]byte(test.input), &overflow{})
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1540,7 +1531,7 @@ func TestUnmarshalLocalDateTime(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for i, example := range examples {
|
for i, example := range examples {
|
||||||
doc := fmt.Sprintf(`date = %s`, example.in)
|
doc := "date = " + example.in
|
||||||
|
|
||||||
t.Run(fmt.Sprintf("ToLocalDateTime_%d_%s", i, example.name), func(t *testing.T) {
|
t.Run(fmt.Sprintf("ToLocalDateTime_%d_%s", i, example.name), func(t *testing.T) {
|
||||||
type dateStruct struct {
|
type dateStruct struct {
|
||||||
@@ -1626,7 +1617,7 @@ func TestUnmarshalLocalTime(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for i, example := range examples {
|
for i, example := range examples {
|
||||||
doc := fmt.Sprintf(`Time = %s`, example.in)
|
doc := "Time = " + example.in
|
||||||
|
|
||||||
t.Run(fmt.Sprintf("ToLocalTime_%d_%s", i, example.name), func(t *testing.T) {
|
t.Run(fmt.Sprintf("ToLocalTime_%d_%s", i, example.name), func(t *testing.T) {
|
||||||
type dateStruct struct {
|
type dateStruct struct {
|
||||||
@@ -1749,7 +1740,7 @@ Age = 23
|
|||||||
}
|
}
|
||||||
actual := OuterStruct{}
|
actual := OuterStruct{}
|
||||||
err := toml.Unmarshal(doc, &actual)
|
err := toml.Unmarshal(doc, &actual)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, expected, actual)
|
assert.Equal(t, expected, actual)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1834,7 +1825,7 @@ InnerField = "After4"
|
|||||||
}
|
}
|
||||||
|
|
||||||
err := toml.Unmarshal(doc, &actual)
|
err := toml.Unmarshal(doc, &actual)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, expected, actual)
|
assert.Equal(t, expected, actual)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1883,7 +1874,7 @@ type arrayTooSmallStruct struct {
|
|||||||
func TestUnmarshalSlice(t *testing.T) {
|
func TestUnmarshalSlice(t *testing.T) {
|
||||||
var actual sliceStruct
|
var actual sliceStruct
|
||||||
err := toml.Unmarshal(sliceTomlDemo, &actual)
|
err := toml.Unmarshal(sliceTomlDemo, &actual)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
expected := sliceStruct{
|
expected := sliceStruct{
|
||||||
Slice: []string{"Howdy", "Hey There"},
|
Slice: []string{"Howdy", "Hey There"},
|
||||||
SlicePtr: &[]string{"Howdy", "Hey There"},
|
SlicePtr: &[]string{"Howdy", "Hey There"},
|
||||||
@@ -1911,19 +1902,12 @@ func TestUnmarshalMixedTypeSlice(t *testing.T) {
|
|||||||
ArrayField []interface{}
|
ArrayField []interface{}
|
||||||
}
|
}
|
||||||
|
|
||||||
//doc := []byte(`ArrayField = [3.14,100,true,"hello world",{Field = "inner1"},[{Field = "inner2"},{Field = "inner3"}]]
|
|
||||||
//`)
|
|
||||||
|
|
||||||
doc := []byte(`ArrayField = [{Field = "inner1"},[{Field = "inner2"},{Field = "inner3"}]]
|
doc := []byte(`ArrayField = [{Field = "inner1"},[{Field = "inner2"},{Field = "inner3"}]]
|
||||||
`)
|
`)
|
||||||
|
|
||||||
actual := TestStruct{}
|
actual := TestStruct{}
|
||||||
expected := TestStruct{
|
expected := TestStruct{
|
||||||
ArrayField: []interface{}{
|
ArrayField: []interface{}{
|
||||||
//3.14,
|
|
||||||
//int64(100),
|
|
||||||
//true,
|
|
||||||
//"hello world",
|
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"Field": "inner1",
|
"Field": "inner1",
|
||||||
},
|
},
|
||||||
@@ -1934,7 +1918,7 @@ func TestUnmarshalMixedTypeSlice(t *testing.T) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
err := toml.Unmarshal(doc, &actual)
|
err := toml.Unmarshal(doc, &actual)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, expected, actual)
|
assert.Equal(t, expected, actual)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1943,7 +1927,7 @@ func TestUnmarshalArray(t *testing.T) {
|
|||||||
|
|
||||||
var actual arrayStruct
|
var actual arrayStruct
|
||||||
err = toml.Unmarshal(sliceTomlDemo, &actual)
|
err = toml.Unmarshal(sliceTomlDemo, &actual)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
expected := arrayStruct{
|
expected := arrayStruct{
|
||||||
Slice: [4]string{"Howdy", "Hey There"},
|
Slice: [4]string{"Howdy", "Hey There"},
|
||||||
@@ -2002,11 +1986,17 @@ func TestDecoderStrict(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
err := strictDecoder(input).Decode(&doc)
|
err := strictDecoder(input).Decode(&doc)
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
require.IsType(t, &toml.StrictMissingError{}, err)
|
|
||||||
se := err.(*toml.StrictMissingError)
|
|
||||||
|
|
||||||
keys := []toml.Key{}
|
assert.Equal(t,
|
||||||
|
reflect.TypeOf(err), reflect.TypeOf(&toml.StrictMissingError{}),
|
||||||
|
"Expected a *toml.StrictMissingError, got: %v", reflect.TypeOf(err),
|
||||||
|
)
|
||||||
|
|
||||||
|
var se *toml.StrictMissingError
|
||||||
|
assert.True(t, errors.As(err, &se))
|
||||||
|
|
||||||
|
keys := make([]toml.Key, 0, len(se.Errors))
|
||||||
|
|
||||||
for _, e := range se.Errors {
|
for _, e := range se.Errors {
|
||||||
keys = append(keys, e.Key())
|
keys = append(keys, e.Key())
|
||||||
@@ -2019,13 +2009,14 @@ func TestDecoderStrict(t *testing.T) {
|
|||||||
{"undecoded", "array"},
|
{"undecoded", "array"},
|
||||||
}
|
}
|
||||||
|
|
||||||
require.Equal(t, expectedKeys, keys)
|
assert.Equal(t, expectedKeys, keys)
|
||||||
|
|
||||||
err = decoder(input).Decode(&doc)
|
err = decoder(input).Decode(&doc)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
var m map[string]interface{}
|
var m map[string]interface{}
|
||||||
err = decoder(input).Decode(&m)
|
err = decoder(input).Decode(&m)
|
||||||
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDecoderStrictValid(t *testing.T) {
|
func TestDecoderStrictValid(t *testing.T) {
|
||||||
@@ -2040,7 +2031,7 @@ func TestDecoderStrictValid(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
err := strictDecoder(input).Decode(&doc)
|
err := strictDecoder(input).Decode(&doc)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
type docUnmarshalTOML struct {
|
type docUnmarshalTOML struct {
|
||||||
@@ -2062,19 +2053,6 @@ func (d *docUnmarshalTOML) UnmarshalTOML(i interface{}) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDecoderStrictCustomUnmarshal(t *testing.T) {
|
|
||||||
t.Skip()
|
|
||||||
//input := `key = "ok"`
|
|
||||||
//var doc docUnmarshalTOML
|
|
||||||
//err := NewDecoder(bytes.NewReader([]byte(input))).Strict(true).Decode(&doc)
|
|
||||||
//if err != nil {
|
|
||||||
// t.Fatal("unexpected error:", err)
|
|
||||||
//}
|
|
||||||
//if doc.Decoded.Key != "ok" {
|
|
||||||
// t.Errorf("Bad unmarshal: expected ok, got %v", doc.Decoded.Key)
|
|
||||||
//}
|
|
||||||
}
|
|
||||||
|
|
||||||
type parent struct {
|
type parent struct {
|
||||||
Doc docUnmarshalTOML
|
Doc docUnmarshalTOML
|
||||||
DocPointer *docUnmarshalTOML
|
DocPointer *docUnmarshalTOML
|
||||||
@@ -2091,7 +2069,7 @@ func TestCustomUnmarshal(t *testing.T) {
|
|||||||
|
|
||||||
var d parent
|
var d parent
|
||||||
err := toml.Unmarshal([]byte(input), &d)
|
err := toml.Unmarshal([]byte(input), &d)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, "ok1", d.Doc.Decoded.Key)
|
assert.Equal(t, "ok1", d.Doc.Decoded.Key)
|
||||||
assert.Equal(t, "ok2", d.DocPointer.Decoded.Key)
|
assert.Equal(t, "ok2", d.DocPointer.Decoded.Key)
|
||||||
}
|
}
|
||||||
@@ -2157,7 +2135,7 @@ Int = 21
|
|||||||
Float = 2.0
|
Float = 2.0
|
||||||
`
|
`
|
||||||
err := toml.Unmarshal([]byte(input), &doc)
|
err := toml.Unmarshal([]byte(input), &doc)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, 12, doc.UnixTime.Value)
|
assert.Equal(t, 12, doc.UnixTime.Value)
|
||||||
assert.Equal(t, 42, doc.Version.Value)
|
assert.Equal(t, 42, doc.Version.Value)
|
||||||
assert.Equal(t, 1, doc.Bool.Value)
|
assert.Equal(t, 1, doc.Bool.Value)
|
||||||
@@ -2227,7 +2205,10 @@ func TestUnmarshalEmptyInterface(t *testing.T) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
require.IsType(t, map[string]interface{}{}, v)
|
assert.Equal(t,
|
||||||
|
reflect.TypeOf(map[string]interface{}{}), reflect.TypeOf(v),
|
||||||
|
"Expected map[string]interface{}{} type, got: %v", reflect.TypeOf(v),
|
||||||
|
)
|
||||||
|
|
||||||
x := v.(map[string]interface{})
|
x := v.(map[string]interface{})
|
||||||
assert.Equal(t, "pelletier", x["User"])
|
assert.Equal(t, "pelletier", x["User"])
|
||||||
@@ -2275,12 +2256,12 @@ type Custom struct {
|
|||||||
v string
|
v string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Custom) UnmarshalTOML(v interface{}) error {
|
func (c *Custom) UnmarshalTOML(interface{}) error {
|
||||||
c.v = "called"
|
c.v = "called"
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGithubIssue431(t *testing.T) {
|
func TestGitHubIssue431(t *testing.T) {
|
||||||
doc := `key = "value"`
|
doc := `key = "value"`
|
||||||
var c Config
|
var c Config
|
||||||
if err := toml.Unmarshal([]byte(doc), &c); err != nil {
|
if err := toml.Unmarshal([]byte(doc), &c); err != nil {
|
||||||
@@ -2300,14 +2281,14 @@ type durationString struct {
|
|||||||
time.Duration
|
time.Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *durationString) UnmarshalTOML(v interface{}) error {
|
func (d *durationString) UnmarshalTOML(interface{}) error {
|
||||||
d.Duration = 10 * time.Second
|
d.Duration = 10 * time.Second
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type config437Error struct{}
|
type config437Error struct{}
|
||||||
|
|
||||||
func (e *config437Error) UnmarshalTOML(v interface{}) error {
|
func (e *config437Error) UnmarshalTOML(interface{}) error {
|
||||||
return errors.New("expected")
|
return errors.New("expected")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2318,7 +2299,7 @@ type config437 struct {
|
|||||||
} `toml:"HTTP"`
|
} `toml:"HTTP"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGithubIssue437(t *testing.T) {
|
func TestGitHubIssue437(t *testing.T) {
|
||||||
t.Skipf("unmarshalTOML not implemented")
|
t.Skipf("unmarshalTOML not implemented")
|
||||||
src := `
|
src := `
|
||||||
[HTTP]
|
[HTTP]
|
||||||
|
|||||||
@@ -3,17 +3,18 @@ package testsuite
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
// addTag adds JSON tags to a data structure as expected by toml-test.
|
// addTag adds JSON tags to a data structure as expected by toml-test.
|
||||||
func addTag(key string, tomlData interface{}) interface{} {
|
func addTag(tomlData interface{}) interface{} {
|
||||||
// Switch on the data type.
|
// Switch on the data type.
|
||||||
switch orig := tomlData.(type) {
|
switch orig := tomlData.(type) {
|
||||||
default:
|
default:
|
||||||
//return map[string]interface{}{}
|
// return map[string]interface{}{}
|
||||||
panic(fmt.Sprintf("Unknown type: %T", tomlData))
|
panic(fmt.Sprintf("Unknown type: %T", tomlData))
|
||||||
|
|
||||||
// A table: we don't need to add any tags, just recurse for every table
|
// A table: we don't need to add any tags, just recurse for every table
|
||||||
@@ -21,7 +22,7 @@ func addTag(key string, tomlData interface{}) interface{} {
|
|||||||
case map[string]interface{}:
|
case map[string]interface{}:
|
||||||
typed := make(map[string]interface{}, len(orig))
|
typed := make(map[string]interface{}, len(orig))
|
||||||
for k, v := range orig {
|
for k, v := range orig {
|
||||||
typed[k] = addTag(k, v)
|
typed[k] = addTag(v)
|
||||||
}
|
}
|
||||||
return typed
|
return typed
|
||||||
|
|
||||||
@@ -30,13 +31,13 @@ func addTag(key string, tomlData interface{}) interface{} {
|
|||||||
case []map[string]interface{}:
|
case []map[string]interface{}:
|
||||||
typed := make([]map[string]interface{}, len(orig))
|
typed := make([]map[string]interface{}, len(orig))
|
||||||
for i, v := range orig {
|
for i, v := range orig {
|
||||||
typed[i] = addTag("", v).(map[string]interface{})
|
typed[i] = addTag(v).(map[string]interface{})
|
||||||
}
|
}
|
||||||
return typed
|
return typed
|
||||||
case []interface{}:
|
case []interface{}:
|
||||||
typed := make([]interface{}, len(orig))
|
typed := make([]interface{}, len(orig))
|
||||||
for i, v := range orig {
|
for i, v := range orig {
|
||||||
typed[i] = addTag("", v)
|
typed[i] = addTag(v)
|
||||||
}
|
}
|
||||||
return typed
|
return typed
|
||||||
|
|
||||||
@@ -52,11 +53,11 @@ func addTag(key string, tomlData interface{}) interface{} {
|
|||||||
|
|
||||||
// Tag primitive values: bool, string, int, and float64.
|
// Tag primitive values: bool, string, int, and float64.
|
||||||
case bool:
|
case bool:
|
||||||
return tag("bool", fmt.Sprintf("%v", orig))
|
return tag("bool", strconv.FormatBool(orig))
|
||||||
case string:
|
case string:
|
||||||
return tag("string", orig)
|
return tag("string", orig)
|
||||||
case int64:
|
case int64:
|
||||||
return tag("integer", fmt.Sprintf("%d", orig))
|
return tag("integer", strconv.FormatInt(orig, 10))
|
||||||
case float64:
|
case float64:
|
||||||
// Special case for nan since NaN == NaN is false.
|
// Special case for nan since NaN == NaN is false.
|
||||||
if math.IsNaN(orig) {
|
if math.IsNaN(orig) {
|
||||||
|
|||||||
+10
-10
@@ -9,6 +9,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func CmpJSON(t *testing.T, key string, want, have interface{}) {
|
func CmpJSON(t *testing.T, key string, want, have interface{}) {
|
||||||
|
t.Helper()
|
||||||
switch w := want.(type) {
|
switch w := want.(type) {
|
||||||
case map[string]interface{}:
|
case map[string]interface{}:
|
||||||
cmpJSONMaps(t, key, w, have)
|
cmpJSONMaps(t, key, w, have)
|
||||||
@@ -22,6 +23,7 @@ func CmpJSON(t *testing.T, key string, want, have interface{}) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func cmpJSONMaps(t *testing.T, key string, want map[string]interface{}, have interface{}) {
|
func cmpJSONMaps(t *testing.T, key string, want map[string]interface{}, have interface{}) {
|
||||||
|
t.Helper()
|
||||||
haveMap, ok := have.(map[string]interface{})
|
haveMap, ok := have.(map[string]interface{})
|
||||||
if !ok {
|
if !ok {
|
||||||
mismatch(t, key, "table", want, haveMap)
|
mismatch(t, key, "table", want, haveMap)
|
||||||
@@ -61,6 +63,7 @@ func cmpJSONMaps(t *testing.T, key string, want map[string]interface{}, have int
|
|||||||
}
|
}
|
||||||
|
|
||||||
func cmpJSONArrays(t *testing.T, key string, want, have interface{}) {
|
func cmpJSONArrays(t *testing.T, key string, want, have interface{}) {
|
||||||
|
t.Helper()
|
||||||
wantSlice, ok := want.([]interface{})
|
wantSlice, ok := want.([]interface{})
|
||||||
if !ok {
|
if !ok {
|
||||||
panic(fmt.Sprintf("'value' should be a JSON array when 'type=array', but it is a %T", want))
|
panic(fmt.Sprintf("'value' should be a JSON array when 'type=array', but it is a %T", want))
|
||||||
@@ -83,6 +86,7 @@ func cmpJSONArrays(t *testing.T, key string, want, have interface{}) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func cmpJSONValues(t *testing.T, key string, want, have map[string]interface{}) {
|
func cmpJSONValues(t *testing.T, key string, want, have map[string]interface{}) {
|
||||||
|
t.Helper()
|
||||||
wantType, ok := want["type"].(string)
|
wantType, ok := want["type"].(string)
|
||||||
if !ok {
|
if !ok {
|
||||||
panic(fmt.Sprintf("'type' should be a string, but it is a %T", want["type"]))
|
panic(fmt.Sprintf("'type' should be a string, but it is a %T", want["type"]))
|
||||||
@@ -126,6 +130,7 @@ func cmpJSONValues(t *testing.T, key string, want, have map[string]interface{})
|
|||||||
}
|
}
|
||||||
|
|
||||||
func cmpAsStrings(t *testing.T, key string, want, have string) {
|
func cmpAsStrings(t *testing.T, key string, want, have string) {
|
||||||
|
t.Helper()
|
||||||
if want != have {
|
if want != have {
|
||||||
t.Fatalf("Values for key '%s' don't match:\n"+
|
t.Fatalf("Values for key '%s' don't match:\n"+
|
||||||
" Expected: %s\n"+
|
" Expected: %s\n"+
|
||||||
@@ -135,6 +140,7 @@ func cmpAsStrings(t *testing.T, key string, want, have string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func cmpFloats(t *testing.T, key string, want, have string) {
|
func cmpFloats(t *testing.T, key string, want, have string) {
|
||||||
|
t.Helper()
|
||||||
// Special case for NaN, since NaN != NaN.
|
// Special case for NaN, since NaN != NaN.
|
||||||
if strings.HasSuffix(want, "nan") || strings.HasSuffix(have, "nan") {
|
if strings.HasSuffix(want, "nan") || strings.HasSuffix(have, "nan") {
|
||||||
if want != have {
|
if want != have {
|
||||||
@@ -177,6 +183,7 @@ var layouts = map[string]string{
|
|||||||
}
|
}
|
||||||
|
|
||||||
func cmpAsDatetimes(t *testing.T, key string, kind, want, have string) {
|
func cmpAsDatetimes(t *testing.T, key string, kind, want, have string) {
|
||||||
|
t.Helper()
|
||||||
layout, ok := layouts[kind]
|
layout, ok := layouts[kind]
|
||||||
if !ok {
|
if !ok {
|
||||||
panic("should never happen")
|
panic("should never happen")
|
||||||
@@ -200,15 +207,6 @@ func cmpAsDatetimes(t *testing.T, key string, kind, want, have string) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func cmpAsDatetimesLocal(t *testing.T, key string, want, have string) {
|
|
||||||
if datetimeRepl.Replace(want) != datetimeRepl.Replace(have) {
|
|
||||||
t.Fatalf("Values for key '%s' don't match:\n"+
|
|
||||||
" Expected: %v\n"+
|
|
||||||
" Your encoder: %v",
|
|
||||||
key, want, have)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func kjoin(old, key string) string {
|
func kjoin(old, key string) string {
|
||||||
if len(old) == 0 {
|
if len(old) == 0 {
|
||||||
return key
|
return key
|
||||||
@@ -230,6 +228,7 @@ func isValue(m map[string]interface{}) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func mismatch(t *testing.T, key string, wantType string, want, have interface{}) {
|
func mismatch(t *testing.T, key string, wantType string, want, have interface{}) {
|
||||||
|
t.Helper()
|
||||||
t.Fatalf("Key '%s' is not an %s but %[4]T:\n"+
|
t.Fatalf("Key '%s' is not an %s but %[4]T:\n"+
|
||||||
" Expected: %#[3]v\n"+
|
" Expected: %#[3]v\n"+
|
||||||
" Your encoder: %#[4]v",
|
" Your encoder: %#[4]v",
|
||||||
@@ -237,8 +236,9 @@ func mismatch(t *testing.T, key string, wantType string, want, have interface{})
|
|||||||
}
|
}
|
||||||
|
|
||||||
func valMismatch(t *testing.T, key string, wantType, haveType string, want, have interface{}) {
|
func valMismatch(t *testing.T, key string, wantType, haveType string, want, have interface{}) {
|
||||||
|
t.Helper()
|
||||||
t.Fatalf("Key '%s' is not an %s but %s:\n"+
|
t.Fatalf("Key '%s' is not an %s but %s:\n"+
|
||||||
" Expected: %#[3]v\n"+
|
" Expected: %#[3]v\n"+
|
||||||
" Your encoder: %#[4]v",
|
" Your encoder: %#[4]v",
|
||||||
key, wantType, want, have)
|
key, wantType, haveType, want, have)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,69 +0,0 @@
|
|||||||
package testsuite
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
type parser struct{}
|
|
||||||
|
|
||||||
func (p parser) Decode(input string) (output string, outputIsError bool, retErr error) {
|
|
||||||
defer func() {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
switch rr := r.(type) {
|
|
||||||
case error:
|
|
||||||
retErr = rr
|
|
||||||
default:
|
|
||||||
retErr = fmt.Errorf("%s", rr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
var v interface{}
|
|
||||||
|
|
||||||
if err := toml.Unmarshal([]byte(input), &v); err != nil {
|
|
||||||
return err.Error(), true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
j, err := json.MarshalIndent(addTag("", v), "", " ")
|
|
||||||
if err != nil {
|
|
||||||
return "", false, retErr
|
|
||||||
}
|
|
||||||
|
|
||||||
return string(j), false, retErr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p parser) Encode(input string) (output string, outputIsError bool, retErr error) {
|
|
||||||
defer func() {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
switch rr := r.(type) {
|
|
||||||
case error:
|
|
||||||
retErr = rr
|
|
||||||
default:
|
|
||||||
retErr = fmt.Errorf("%s", rr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
var tmp interface{}
|
|
||||||
err := json.Unmarshal([]byte(input), &tmp)
|
|
||||||
if err != nil {
|
|
||||||
return "", false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
rm, err := rmTag(tmp)
|
|
||||||
if err != nil {
|
|
||||||
return err.Error(), true, retErr
|
|
||||||
}
|
|
||||||
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
err = toml.NewEncoder(buf).Encode(rm)
|
|
||||||
if err != nil {
|
|
||||||
return err.Error(), true, retErr
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf.String(), false, retErr
|
|
||||||
}
|
|
||||||
+27
-21
@@ -4,10 +4,12 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Remove JSON tags to a data structure as returned by toml-test.
|
// Remove JSON tags to a data structure as returned by toml-test.
|
||||||
func rmTag(typedJson interface{}) (interface{}, error) {
|
func rmTag(typedJSON interface{}) (interface{}, error) {
|
||||||
// Check if key is in the table m.
|
// Check if key is in the table m.
|
||||||
in := func(key string, m map[string]interface{}) bool {
|
in := func(key string, m map[string]interface{}) bool {
|
||||||
_, ok := m[key]
|
_, ok := m[key]
|
||||||
@@ -15,8 +17,7 @@ func rmTag(typedJson interface{}) (interface{}, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Switch on the data type.
|
// Switch on the data type.
|
||||||
switch v := typedJson.(type) {
|
switch v := typedJSON.(type) {
|
||||||
|
|
||||||
// Object: this can either be a TOML table or a primitive with tags.
|
// Object: this can either be a TOML table or a primitive with tags.
|
||||||
case map[string]interface{}:
|
case map[string]interface{}:
|
||||||
// This value represents a primitive: remove the tags and return just
|
// This value represents a primitive: remove the tags and return just
|
||||||
@@ -40,7 +41,7 @@ func rmTag(typedJson interface{}) (interface{}, error) {
|
|||||||
}
|
}
|
||||||
return m, nil
|
return m, nil
|
||||||
|
|
||||||
// Array: remove tags from all itenm.
|
// Array: remove tags from all items.
|
||||||
case []interface{}:
|
case []interface{}:
|
||||||
a := make([]interface{}, len(v))
|
a := make([]interface{}, len(v))
|
||||||
for i := range v {
|
for i := range v {
|
||||||
@@ -54,7 +55,7 @@ func rmTag(typedJson interface{}) (interface{}, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// The top level must be an object or array.
|
// The top level must be an object or array.
|
||||||
return nil, fmt.Errorf("unrecognized JSON format '%T'", typedJson)
|
return nil, fmt.Errorf("unrecognized JSON format '%T'", typedJSON)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return a primitive: read the "type" and convert the "value" to that.
|
// Return a primitive: read the "type" and convert the "value" to that.
|
||||||
@@ -76,14 +77,31 @@ func untag(typed map[string]interface{}) (interface{}, error) {
|
|||||||
return nil, fmt.Errorf("untag: %w", err)
|
return nil, fmt.Errorf("untag: %w", err)
|
||||||
}
|
}
|
||||||
return f, nil
|
return f, nil
|
||||||
|
|
||||||
|
// toml.LocalDate{Year:2020, Month:12, Day:12}
|
||||||
case "datetime":
|
case "datetime":
|
||||||
return parseTime(v, "2006-01-02T15:04:05.999999999Z07:00", false)
|
return time.Parse("2006-01-02T15:04:05.999999999Z07:00", v)
|
||||||
case "datetime-local":
|
case "datetime-local":
|
||||||
return parseTime(v, "2006-01-02T15:04:05.999999999", true)
|
var t toml.LocalDateTime
|
||||||
|
err := t.UnmarshalText([]byte(v))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("untag: %w", err)
|
||||||
|
}
|
||||||
|
return t, nil
|
||||||
case "date-local":
|
case "date-local":
|
||||||
return parseTime(v, "2006-01-02", true)
|
var t toml.LocalDate
|
||||||
|
err := t.UnmarshalText([]byte(v))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("untag: %w", err)
|
||||||
|
}
|
||||||
|
return t, nil
|
||||||
case "time-local":
|
case "time-local":
|
||||||
return parseTime(v, "15:04:05.999999999", true)
|
var t toml.LocalTime
|
||||||
|
err := t.UnmarshalText([]byte(v))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("untag: %w", err)
|
||||||
|
}
|
||||||
|
return t, nil
|
||||||
case "bool":
|
case "bool":
|
||||||
switch v {
|
switch v {
|
||||||
case "true":
|
case "true":
|
||||||
@@ -96,15 +114,3 @@ func untag(typed map[string]interface{}) (interface{}, error) {
|
|||||||
|
|
||||||
return nil, fmt.Errorf("untag: unrecognized tag type %q", t)
|
return nil, fmt.Errorf("untag: unrecognized tag type %q", t)
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseTime(v, format string, local bool) (t time.Time, err error) {
|
|
||||||
if local {
|
|
||||||
t, err = time.ParseInLocation(format, v, time.Local)
|
|
||||||
} else {
|
|
||||||
t, err = time.Parse(format, v)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return time.Time{}, fmt.Errorf("Could not parse %q as a datetime: %w", v, err)
|
|
||||||
}
|
|
||||||
return t, nil
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import (
|
|||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Marshal is a helpfer function for calling toml.Marshal
|
// Marshal is a helper function for calling toml.Marshal
|
||||||
//
|
//
|
||||||
// Only needed to avoid package import loops.
|
// Only needed to avoid package import loops.
|
||||||
func Marshal(v interface{}) ([]byte, error) {
|
func Marshal(v interface{}) ([]byte, error) {
|
||||||
@@ -27,7 +27,7 @@ func Unmarshal(data []byte, v interface{}) error {
|
|||||||
// ValueToTaggedJSON takes a data structure and returns the tagged JSON
|
// ValueToTaggedJSON takes a data structure and returns the tagged JSON
|
||||||
// representation.
|
// representation.
|
||||||
func ValueToTaggedJSON(doc interface{}) ([]byte, error) {
|
func ValueToTaggedJSON(doc interface{}) ([]byte, error) {
|
||||||
return json.MarshalIndent(addTag("", doc), "", " ")
|
return json.MarshalIndent(addTag(doc), "", " ")
|
||||||
}
|
}
|
||||||
|
|
||||||
// DecodeStdin is a helper function for the toml-test binary interface. TOML input
|
// DecodeStdin is a helper function for the toml-test binary interface. TOML input
|
||||||
@@ -37,14 +37,32 @@ func DecodeStdin() error {
|
|||||||
var decoded map[string]interface{}
|
var decoded map[string]interface{}
|
||||||
|
|
||||||
if err := toml.NewDecoder(os.Stdin).Decode(&decoded); err != nil {
|
if err := toml.NewDecoder(os.Stdin).Decode(&decoded); err != nil {
|
||||||
return fmt.Errorf("Error decoding TOML: %s", err)
|
return fmt.Errorf("error decoding TOML: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
j := json.NewEncoder(os.Stdout)
|
j := json.NewEncoder(os.Stdout)
|
||||||
j.SetIndent("", " ")
|
j.SetIndent("", " ")
|
||||||
if err := j.Encode(addTag("", decoded)); err != nil {
|
if err := j.Encode(addTag(decoded)); err != nil {
|
||||||
return fmt.Errorf("Error encoding JSON: %s", err)
|
return fmt.Errorf("error encoding JSON: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// EncodeStdin is a helper function for the toml-test binary interface. Tagged
|
||||||
|
// JSON is read from STDIN and a resulting TOML representation is written to
|
||||||
|
// STDOUT.
|
||||||
|
func EncodeStdin() error {
|
||||||
|
var j interface{}
|
||||||
|
err := json.NewDecoder(os.Stdin).Decode(&j)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
rm, err := rmTag(j)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("removing tags: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return toml.NewEncoder(os.Stdout).Encode(rm)
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
package tracker
|
package tracker
|
||||||
|
|
||||||
import (
|
import "github.com/pelletier/go-toml/v2/unstable"
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
|
||||||
)
|
|
||||||
|
|
||||||
// KeyTracker is a tracker that keeps track of the current Key as the AST is
|
// KeyTracker is a tracker that keeps track of the current Key as the AST is
|
||||||
// walked.
|
// walked.
|
||||||
@@ -11,19 +9,19 @@ type KeyTracker struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// UpdateTable sets the state of the tracker with the AST table node.
|
// UpdateTable sets the state of the tracker with the AST table node.
|
||||||
func (t *KeyTracker) UpdateTable(node *ast.Node) {
|
func (t *KeyTracker) UpdateTable(node *unstable.Node) {
|
||||||
t.reset()
|
t.reset()
|
||||||
t.Push(node)
|
t.Push(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateArrayTable sets the state of the tracker with the AST array table node.
|
// UpdateArrayTable sets the state of the tracker with the AST array table node.
|
||||||
func (t *KeyTracker) UpdateArrayTable(node *ast.Node) {
|
func (t *KeyTracker) UpdateArrayTable(node *unstable.Node) {
|
||||||
t.reset()
|
t.reset()
|
||||||
t.Push(node)
|
t.Push(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Push the given key on the stack.
|
// Push the given key on the stack.
|
||||||
func (t *KeyTracker) Push(node *ast.Node) {
|
func (t *KeyTracker) Push(node *unstable.Node) {
|
||||||
it := node.Key()
|
it := node.Key()
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
t.k = append(t.k, string(it.Node().Data))
|
t.k = append(t.k, string(it.Node().Data))
|
||||||
@@ -31,14 +29,14 @@ func (t *KeyTracker) Push(node *ast.Node) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Pop key from stack.
|
// Pop key from stack.
|
||||||
func (t *KeyTracker) Pop(node *ast.Node) {
|
func (t *KeyTracker) Pop(node *unstable.Node) {
|
||||||
it := node.Key()
|
it := node.Key()
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
t.k = t.k[:len(t.k)-1]
|
t.k = t.k[:len(t.k)-1]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Key returns the current key
|
// Key returns the current key.
|
||||||
func (t *KeyTracker) Key() []string {
|
func (t *KeyTracker) Key() []string {
|
||||||
k := make([]string, len(t.k))
|
k := make([]string, len(t.k))
|
||||||
copy(k, t.k)
|
copy(k, t.k)
|
||||||
|
|||||||
+52
-49
@@ -5,7 +5,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
type keyKind uint8
|
type keyKind uint8
|
||||||
@@ -57,7 +57,11 @@ type SeenTracker struct {
|
|||||||
currentIdx int
|
currentIdx int
|
||||||
}
|
}
|
||||||
|
|
||||||
var pool sync.Pool
|
var pool = sync.Pool{
|
||||||
|
New: func() interface{} {
|
||||||
|
return &SeenTracker{}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) reset() {
|
func (s *SeenTracker) reset() {
|
||||||
// Always contains a root element at index 0.
|
// Always contains a root element at index 0.
|
||||||
@@ -149,24 +153,25 @@ func (s *SeenTracker) setExplicitFlag(parentIdx int) {
|
|||||||
|
|
||||||
// CheckExpression takes a top-level node and checks that it does not contain
|
// CheckExpression takes a top-level node and checks that it does not contain
|
||||||
// keys that have been seen in previous calls, and validates that types are
|
// keys that have been seen in previous calls, and validates that types are
|
||||||
// consistent.
|
// consistent. It returns true if it is the first time this node's key is seen.
|
||||||
func (s *SeenTracker) CheckExpression(node *ast.Node) error {
|
// Useful to clear array tables on first use.
|
||||||
|
func (s *SeenTracker) CheckExpression(node *unstable.Node) (bool, error) {
|
||||||
if s.entries == nil {
|
if s.entries == nil {
|
||||||
s.reset()
|
s.reset()
|
||||||
}
|
}
|
||||||
switch node.Kind {
|
switch node.Kind {
|
||||||
case ast.KeyValue:
|
case unstable.KeyValue:
|
||||||
return s.checkKeyValue(node)
|
return s.checkKeyValue(node)
|
||||||
case ast.Table:
|
case unstable.Table:
|
||||||
return s.checkTable(node)
|
return s.checkTable(node)
|
||||||
case ast.ArrayTable:
|
case unstable.ArrayTable:
|
||||||
return s.checkArrayTable(node)
|
return s.checkArrayTable(node)
|
||||||
default:
|
default:
|
||||||
panic(fmt.Errorf("this should not be a top level node type: %s", node.Kind))
|
panic(fmt.Errorf("this should not be a top level node type: %s", node.Kind))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) checkTable(node *ast.Node) error {
|
func (s *SeenTracker) checkTable(node *unstable.Node) (bool, error) {
|
||||||
if s.currentIdx >= 0 {
|
if s.currentIdx >= 0 {
|
||||||
s.setExplicitFlag(s.currentIdx)
|
s.setExplicitFlag(s.currentIdx)
|
||||||
}
|
}
|
||||||
@@ -192,7 +197,7 @@ func (s *SeenTracker) checkTable(node *ast.Node) error {
|
|||||||
} else {
|
} else {
|
||||||
entry := s.entries[idx]
|
entry := s.entries[idx]
|
||||||
if entry.kind == valueKind {
|
if entry.kind == valueKind {
|
||||||
return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
|
return false, fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
parentIdx = idx
|
parentIdx = idx
|
||||||
@@ -201,25 +206,27 @@ func (s *SeenTracker) checkTable(node *ast.Node) error {
|
|||||||
k := it.Node().Data
|
k := it.Node().Data
|
||||||
idx := s.find(parentIdx, k)
|
idx := s.find(parentIdx, k)
|
||||||
|
|
||||||
|
first := false
|
||||||
if idx >= 0 {
|
if idx >= 0 {
|
||||||
kind := s.entries[idx].kind
|
kind := s.entries[idx].kind
|
||||||
if kind != tableKind {
|
if kind != tableKind {
|
||||||
return fmt.Errorf("toml: key %s should be a table, not a %s", string(k), kind)
|
return false, fmt.Errorf("toml: key %s should be a table, not a %s", string(k), kind)
|
||||||
}
|
}
|
||||||
if s.entries[idx].explicit {
|
if s.entries[idx].explicit {
|
||||||
return fmt.Errorf("toml: table %s already exists", string(k))
|
return false, fmt.Errorf("toml: table %s already exists", string(k))
|
||||||
}
|
}
|
||||||
s.entries[idx].explicit = true
|
s.entries[idx].explicit = true
|
||||||
} else {
|
} else {
|
||||||
idx = s.create(parentIdx, k, tableKind, true, false)
|
idx = s.create(parentIdx, k, tableKind, true, false)
|
||||||
|
first = true
|
||||||
}
|
}
|
||||||
|
|
||||||
s.currentIdx = idx
|
s.currentIdx = idx
|
||||||
|
|
||||||
return nil
|
return first, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) checkArrayTable(node *ast.Node) error {
|
func (s *SeenTracker) checkArrayTable(node *unstable.Node) (bool, error) {
|
||||||
if s.currentIdx >= 0 {
|
if s.currentIdx >= 0 {
|
||||||
s.setExplicitFlag(s.currentIdx)
|
s.setExplicitFlag(s.currentIdx)
|
||||||
}
|
}
|
||||||
@@ -242,7 +249,7 @@ func (s *SeenTracker) checkArrayTable(node *ast.Node) error {
|
|||||||
} else {
|
} else {
|
||||||
entry := s.entries[idx]
|
entry := s.entries[idx]
|
||||||
if entry.kind == valueKind {
|
if entry.kind == valueKind {
|
||||||
return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
|
return false, fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -252,22 +259,23 @@ func (s *SeenTracker) checkArrayTable(node *ast.Node) error {
|
|||||||
k := it.Node().Data
|
k := it.Node().Data
|
||||||
idx := s.find(parentIdx, k)
|
idx := s.find(parentIdx, k)
|
||||||
|
|
||||||
if idx >= 0 {
|
firstTime := idx < 0
|
||||||
|
if firstTime {
|
||||||
|
idx = s.create(parentIdx, k, arrayTableKind, true, false)
|
||||||
|
} else {
|
||||||
kind := s.entries[idx].kind
|
kind := s.entries[idx].kind
|
||||||
if kind != arrayTableKind {
|
if kind != arrayTableKind {
|
||||||
return fmt.Errorf("toml: key %s already exists as a %s, but should be an array table", kind, string(k))
|
return false, fmt.Errorf("toml: key %s already exists as a %s, but should be an array table", kind, string(k))
|
||||||
}
|
}
|
||||||
s.clear(idx)
|
s.clear(idx)
|
||||||
} else {
|
|
||||||
idx = s.create(parentIdx, k, arrayTableKind, true, false)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
s.currentIdx = idx
|
s.currentIdx = idx
|
||||||
|
|
||||||
return nil
|
return firstTime, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) checkKeyValue(node *ast.Node) error {
|
func (s *SeenTracker) checkKeyValue(node *unstable.Node) (bool, error) {
|
||||||
parentIdx := s.currentIdx
|
parentIdx := s.currentIdx
|
||||||
it := node.Key()
|
it := node.Key()
|
||||||
|
|
||||||
@@ -280,12 +288,13 @@ func (s *SeenTracker) checkKeyValue(node *ast.Node) error {
|
|||||||
idx = s.create(parentIdx, k, tableKind, false, true)
|
idx = s.create(parentIdx, k, tableKind, false, true)
|
||||||
} else {
|
} else {
|
||||||
entry := s.entries[idx]
|
entry := s.entries[idx]
|
||||||
if it.IsLast() {
|
switch {
|
||||||
return fmt.Errorf("toml: key %s is already defined", string(k))
|
case it.IsLast():
|
||||||
} else if entry.kind != tableKind {
|
return false, fmt.Errorf("toml: key %s is already defined", string(k))
|
||||||
return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
|
case entry.kind != tableKind:
|
||||||
} else if entry.explicit {
|
return false, fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
|
||||||
return fmt.Errorf("toml: cannot redefine table %s that has already been explicitly defined", string(k))
|
case entry.explicit:
|
||||||
|
return false, fmt.Errorf("toml: cannot redefine table %s that has already been explicitly defined", string(k))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -297,51 +306,45 @@ func (s *SeenTracker) checkKeyValue(node *ast.Node) error {
|
|||||||
value := node.Value()
|
value := node.Value()
|
||||||
|
|
||||||
switch value.Kind {
|
switch value.Kind {
|
||||||
case ast.InlineTable:
|
case unstable.InlineTable:
|
||||||
return s.checkInlineTable(value)
|
return s.checkInlineTable(value)
|
||||||
case ast.Array:
|
case unstable.Array:
|
||||||
return s.checkArray(value)
|
return s.checkArray(value)
|
||||||
|
default:
|
||||||
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) checkArray(node *ast.Node) error {
|
func (s *SeenTracker) checkArray(node *unstable.Node) (first bool, err error) {
|
||||||
it := node.Children()
|
it := node.Children()
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
n := it.Node()
|
n := it.Node()
|
||||||
switch n.Kind {
|
switch n.Kind { //nolint:exhaustive
|
||||||
case ast.InlineTable:
|
case unstable.InlineTable:
|
||||||
err := s.checkInlineTable(n)
|
first, err = s.checkInlineTable(n)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return false, err
|
||||||
}
|
}
|
||||||
case ast.Array:
|
case unstable.Array:
|
||||||
err := s.checkArray(n)
|
first, err = s.checkArray(n)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return false, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return first, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SeenTracker) checkInlineTable(node *ast.Node) error {
|
func (s *SeenTracker) checkInlineTable(node *unstable.Node) (first bool, err error) {
|
||||||
if pool.New == nil {
|
|
||||||
pool.New = func() interface{} {
|
|
||||||
return &SeenTracker{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
s = pool.Get().(*SeenTracker)
|
s = pool.Get().(*SeenTracker)
|
||||||
s.reset()
|
s.reset()
|
||||||
|
|
||||||
it := node.Children()
|
it := node.Children()
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
n := it.Node()
|
n := it.Node()
|
||||||
err := s.checkKeyValue(n)
|
first, err = s.checkKeyValue(n)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return false, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -352,5 +355,5 @@ func (s *SeenTracker) checkInlineTable(node *ast.Node) error {
|
|||||||
// redefinition of its keys: check* functions cannot walk into
|
// redefinition of its keys: check* functions cannot walk into
|
||||||
// a value.
|
// a value.
|
||||||
pool.Put(s)
|
pool.Put(s)
|
||||||
return nil
|
return first, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
package tracker
|
package tracker
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
"unsafe"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestEntrySize(t *testing.T) {
|
func TestEntrySize(t *testing.T) {
|
||||||
@@ -12,5 +12,10 @@ func TestEntrySize(t *testing.T) {
|
|||||||
// performance of unmarshaling documents. Should only be increased with care
|
// performance of unmarshaling documents. Should only be increased with care
|
||||||
// and a very good reason.
|
// and a very good reason.
|
||||||
maxExpectedEntrySize := 48
|
maxExpectedEntrySize := 48
|
||||||
require.LessOrEqual(t, int(unsafe.Sizeof(entry{})), maxExpectedEntrySize)
|
entrySize := int(reflect.TypeOf(entry{}).Size())
|
||||||
|
assert.True(t,
|
||||||
|
entrySize <= maxExpectedEntrySize,
|
||||||
|
"Expected entry to be less than or equal to %d, got: %d",
|
||||||
|
maxExpectedEntrySize, entrySize,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1,2 @@
|
|||||||
|
// Package tracker provides functions for keeping track of AST nodes.
|
||||||
package tracker
|
package tracker
|
||||||
|
|||||||
+5
-3
@@ -4,6 +4,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
// LocalDate represents a calendar day in no specific timezone.
|
// LocalDate represents a calendar day in no specific timezone.
|
||||||
@@ -43,7 +45,7 @@ func (d *LocalDate) UnmarshalText(b []byte) error {
|
|||||||
type LocalTime struct {
|
type LocalTime struct {
|
||||||
Hour int // Hour of the day: [0; 24[
|
Hour int // Hour of the day: [0; 24[
|
||||||
Minute int // Minute of the hour: [0; 60[
|
Minute int // Minute of the hour: [0; 60[
|
||||||
Second int // Second of the minute: [0; 60[
|
Second int // Second of the minute: [0; 59]
|
||||||
Nanosecond int // Nanoseconds within the second: [0, 1000000000[
|
Nanosecond int // Nanoseconds within the second: [0, 1000000000[
|
||||||
Precision int // Number of digits to display for Nanosecond.
|
Precision int // Number of digits to display for Nanosecond.
|
||||||
}
|
}
|
||||||
@@ -75,7 +77,7 @@ func (d LocalTime) MarshalText() ([]byte, error) {
|
|||||||
func (d *LocalTime) UnmarshalText(b []byte) error {
|
func (d *LocalTime) UnmarshalText(b []byte) error {
|
||||||
res, left, err := parseLocalTime(b)
|
res, left, err := parseLocalTime(b)
|
||||||
if err == nil && len(left) != 0 {
|
if err == nil && len(left) != 0 {
|
||||||
err = newDecodeError(left, "extra characters")
|
err = unstable.NewParserError(left, "extra characters")
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -109,7 +111,7 @@ func (d LocalDateTime) MarshalText() ([]byte, error) {
|
|||||||
func (d *LocalDateTime) UnmarshalText(data []byte) error {
|
func (d *LocalDateTime) UnmarshalText(data []byte) error {
|
||||||
res, left, err := parseLocalDateTime(data)
|
res, left, err := parseLocalDateTime(data)
|
||||||
if err == nil && len(left) != 0 {
|
if err == nil && len(left) != 0 {
|
||||||
err = newDecodeError(left, "extra characters")
|
err = unstable.NewParserError(left, "extra characters")
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|||||||
+28
-28
@@ -5,73 +5,73 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestLocalDate_AsTime(t *testing.T) {
|
func TestLocalDate_AsTime(t *testing.T) {
|
||||||
d := toml.LocalDate{2021, 6, 8}
|
d := toml.LocalDate{2021, 6, 8}
|
||||||
cast := d.AsTime(time.UTC)
|
cast := d.AsTime(time.UTC)
|
||||||
require.Equal(t, time.Date(2021, time.June, 8, 0, 0, 0, 0, time.UTC), cast)
|
assert.Equal(t, time.Date(2021, time.June, 8, 0, 0, 0, 0, time.UTC), cast)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLocalDate_String(t *testing.T) {
|
func TestLocalDate_String(t *testing.T) {
|
||||||
d := toml.LocalDate{2021, 6, 8}
|
d := toml.LocalDate{2021, 6, 8}
|
||||||
require.Equal(t, "2021-06-08", d.String())
|
assert.Equal(t, "2021-06-08", d.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLocalDate_MarshalText(t *testing.T) {
|
func TestLocalDate_MarshalText(t *testing.T) {
|
||||||
d := toml.LocalDate{2021, 6, 8}
|
d := toml.LocalDate{2021, 6, 8}
|
||||||
b, err := d.MarshalText()
|
b, err := d.MarshalText()
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, []byte("2021-06-08"), b)
|
assert.Equal(t, []byte("2021-06-08"), b)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLocalDate_UnmarshalMarshalText(t *testing.T) {
|
func TestLocalDate_UnmarshalMarshalText(t *testing.T) {
|
||||||
d := toml.LocalDate{}
|
d := toml.LocalDate{}
|
||||||
err := d.UnmarshalText([]byte("2021-06-08"))
|
err := d.UnmarshalText([]byte("2021-06-08"))
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, toml.LocalDate{2021, 6, 8}, d)
|
assert.Equal(t, toml.LocalDate{2021, 6, 8}, d)
|
||||||
|
|
||||||
err = d.UnmarshalText([]byte("what"))
|
err = d.UnmarshalText([]byte("what"))
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLocalTime_String(t *testing.T) {
|
func TestLocalTime_String(t *testing.T) {
|
||||||
d := toml.LocalTime{20, 12, 1, 2, 9}
|
d := toml.LocalTime{20, 12, 1, 2, 9}
|
||||||
require.Equal(t, "20:12:01.000000002", d.String())
|
assert.Equal(t, "20:12:01.000000002", d.String())
|
||||||
d = toml.LocalTime{20, 12, 1, 0, 0}
|
d = toml.LocalTime{20, 12, 1, 0, 0}
|
||||||
require.Equal(t, "20:12:01", d.String())
|
assert.Equal(t, "20:12:01", d.String())
|
||||||
d = toml.LocalTime{20, 12, 1, 0, 9}
|
d = toml.LocalTime{20, 12, 1, 0, 9}
|
||||||
require.Equal(t, "20:12:01.000000000", d.String())
|
assert.Equal(t, "20:12:01.000000000", d.String())
|
||||||
d = toml.LocalTime{20, 12, 1, 100, 0}
|
d = toml.LocalTime{20, 12, 1, 100, 0}
|
||||||
require.Equal(t, "20:12:01.0000001", d.String())
|
assert.Equal(t, "20:12:01.0000001", d.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLocalTime_MarshalText(t *testing.T) {
|
func TestLocalTime_MarshalText(t *testing.T) {
|
||||||
d := toml.LocalTime{20, 12, 1, 2, 9}
|
d := toml.LocalTime{20, 12, 1, 2, 9}
|
||||||
b, err := d.MarshalText()
|
b, err := d.MarshalText()
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, []byte("20:12:01.000000002"), b)
|
assert.Equal(t, []byte("20:12:01.000000002"), b)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLocalTime_UnmarshalMarshalText(t *testing.T) {
|
func TestLocalTime_UnmarshalMarshalText(t *testing.T) {
|
||||||
d := toml.LocalTime{}
|
d := toml.LocalTime{}
|
||||||
err := d.UnmarshalText([]byte("20:12:01.000000002"))
|
err := d.UnmarshalText([]byte("20:12:01.000000002"))
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, toml.LocalTime{20, 12, 1, 2, 9}, d)
|
assert.Equal(t, toml.LocalTime{20, 12, 1, 2, 9}, d)
|
||||||
|
|
||||||
err = d.UnmarshalText([]byte("what"))
|
err = d.UnmarshalText([]byte("what"))
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
|
|
||||||
err = d.UnmarshalText([]byte("20:12:01.000000002 bad"))
|
err = d.UnmarshalText([]byte("20:12:01.000000002 bad"))
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLocalTime_RoundTrip(t *testing.T) {
|
func TestLocalTime_RoundTrip(t *testing.T) {
|
||||||
var d struct{ A toml.LocalTime }
|
var d struct{ A toml.LocalTime }
|
||||||
err := toml.Unmarshal([]byte("a=20:12:01.500"), &d)
|
err := toml.Unmarshal([]byte("a=20:12:01.500"), &d)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, "20:12:01.500", d.A.String())
|
assert.Equal(t, "20:12:01.500", d.A.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLocalDateTime_AsTime(t *testing.T) {
|
func TestLocalDateTime_AsTime(t *testing.T) {
|
||||||
@@ -80,7 +80,7 @@ func TestLocalDateTime_AsTime(t *testing.T) {
|
|||||||
toml.LocalTime{20, 12, 1, 2, 9},
|
toml.LocalTime{20, 12, 1, 2, 9},
|
||||||
}
|
}
|
||||||
cast := d.AsTime(time.UTC)
|
cast := d.AsTime(time.UTC)
|
||||||
require.Equal(t, time.Date(2021, time.June, 8, 20, 12, 1, 2, time.UTC), cast)
|
assert.Equal(t, time.Date(2021, time.June, 8, 20, 12, 1, 2, time.UTC), cast)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLocalDateTime_String(t *testing.T) {
|
func TestLocalDateTime_String(t *testing.T) {
|
||||||
@@ -88,7 +88,7 @@ func TestLocalDateTime_String(t *testing.T) {
|
|||||||
toml.LocalDate{2021, 6, 8},
|
toml.LocalDate{2021, 6, 8},
|
||||||
toml.LocalTime{20, 12, 1, 2, 9},
|
toml.LocalTime{20, 12, 1, 2, 9},
|
||||||
}
|
}
|
||||||
require.Equal(t, "2021-06-08T20:12:01.000000002", d.String())
|
assert.Equal(t, "2021-06-08T20:12:01.000000002", d.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLocalDateTime_MarshalText(t *testing.T) {
|
func TestLocalDateTime_MarshalText(t *testing.T) {
|
||||||
@@ -97,22 +97,22 @@ func TestLocalDateTime_MarshalText(t *testing.T) {
|
|||||||
toml.LocalTime{20, 12, 1, 2, 9},
|
toml.LocalTime{20, 12, 1, 2, 9},
|
||||||
}
|
}
|
||||||
b, err := d.MarshalText()
|
b, err := d.MarshalText()
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, []byte("2021-06-08T20:12:01.000000002"), b)
|
assert.Equal(t, []byte("2021-06-08T20:12:01.000000002"), b)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLocalDateTime_UnmarshalMarshalText(t *testing.T) {
|
func TestLocalDateTime_UnmarshalMarshalText(t *testing.T) {
|
||||||
d := toml.LocalDateTime{}
|
d := toml.LocalDateTime{}
|
||||||
err := d.UnmarshalText([]byte("2021-06-08 20:12:01.000000002"))
|
err := d.UnmarshalText([]byte("2021-06-08 20:12:01.000000002"))
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
require.Equal(t, toml.LocalDateTime{
|
assert.Equal(t, toml.LocalDateTime{
|
||||||
toml.LocalDate{2021, 6, 8},
|
toml.LocalDate{2021, 6, 8},
|
||||||
toml.LocalTime{20, 12, 1, 2, 9},
|
toml.LocalTime{20, 12, 1, 2, 9},
|
||||||
}, d)
|
}, d)
|
||||||
|
|
||||||
err = d.UnmarshalText([]byte("what"))
|
err = d.UnmarshalText([]byte("what"))
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
|
|
||||||
err = d.UnmarshalText([]byte("2021-06-08 20:12:01.000000002 bad"))
|
err = d.UnmarshalText([]byte("2021-06-08 20:12:01.000000002 bad"))
|
||||||
require.Error(t, err)
|
assert.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|||||||
+210
-48
@@ -3,15 +3,19 @@ package toml
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding"
|
"encoding"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"math"
|
"math"
|
||||||
"reflect"
|
"reflect"
|
||||||
"sort"
|
"slices"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2/internal/characters"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Marshal serializes a Go value as a TOML document.
|
// Marshal serializes a Go value as a TOML document.
|
||||||
@@ -35,10 +39,11 @@ type Encoder struct {
|
|||||||
w io.Writer
|
w io.Writer
|
||||||
|
|
||||||
// global settings
|
// global settings
|
||||||
tablesInline bool
|
tablesInline bool
|
||||||
arraysMultiline bool
|
arraysMultiline bool
|
||||||
indentSymbol string
|
indentSymbol string
|
||||||
indentTables bool
|
indentTables bool
|
||||||
|
marshalJSONNumbers bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewEncoder returns a new Encoder that writes to w.
|
// NewEncoder returns a new Encoder that writes to w.
|
||||||
@@ -85,6 +90,17 @@ func (enc *Encoder) SetIndentTables(indent bool) *Encoder {
|
|||||||
return enc
|
return enc
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetMarshalJSONNumbers forces the encoder to serialize `json.Number` as a
|
||||||
|
// float or integer instead of relying on TextMarshaler to emit a string.
|
||||||
|
//
|
||||||
|
// *Unstable:* This method does not follow the compatibility guarantees of
|
||||||
|
// semver. It can be changed or removed without a new major version being
|
||||||
|
// issued.
|
||||||
|
func (enc *Encoder) SetMarshalJSONNumbers(indent bool) *Encoder {
|
||||||
|
enc.marshalJSONNumbers = indent
|
||||||
|
return enc
|
||||||
|
}
|
||||||
|
|
||||||
// Encode writes a TOML representation of v to the stream.
|
// Encode writes a TOML representation of v to the stream.
|
||||||
//
|
//
|
||||||
// If v cannot be represented to TOML it returns an error.
|
// If v cannot be represented to TOML it returns an error.
|
||||||
@@ -146,6 +162,11 @@ func (enc *Encoder) SetIndentTables(indent bool) *Encoder {
|
|||||||
//
|
//
|
||||||
// The "omitempty" option prevents empty values or groups from being emitted.
|
// The "omitempty" option prevents empty values or groups from being emitted.
|
||||||
//
|
//
|
||||||
|
// The "omitzero" option prevents zero values or groups from being emitted.
|
||||||
|
//
|
||||||
|
// The "commented" option prefixes the value and all its children with a comment
|
||||||
|
// symbol.
|
||||||
|
//
|
||||||
// In addition to the "toml" tag struct tag, a "comment" tag can be used to emit
|
// In addition to the "toml" tag struct tag, a "comment" tag can be used to emit
|
||||||
// a TOML comment before the value being annotated. Comments are ignored inside
|
// a TOML comment before the value being annotated. Comments are ignored inside
|
||||||
// inline tables. For array tables, the comment is only present before the first
|
// inline tables. For array tables, the comment is only present before the first
|
||||||
@@ -159,7 +180,7 @@ func (enc *Encoder) Encode(v interface{}) error {
|
|||||||
ctx.inline = enc.tablesInline
|
ctx.inline = enc.tablesInline
|
||||||
|
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return fmt.Errorf("toml: cannot encode a nil interface")
|
return errors.New("toml: cannot encode a nil interface")
|
||||||
}
|
}
|
||||||
|
|
||||||
b, err := enc.encode(b, ctx, reflect.ValueOf(v))
|
b, err := enc.encode(b, ctx, reflect.ValueOf(v))
|
||||||
@@ -178,6 +199,8 @@ func (enc *Encoder) Encode(v interface{}) error {
|
|||||||
type valueOptions struct {
|
type valueOptions struct {
|
||||||
multiline bool
|
multiline bool
|
||||||
omitempty bool
|
omitempty bool
|
||||||
|
omitzero bool
|
||||||
|
commented bool
|
||||||
comment string
|
comment string
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -203,6 +226,9 @@ type encoderCtx struct {
|
|||||||
// Indentation level
|
// Indentation level
|
||||||
indent int
|
indent int
|
||||||
|
|
||||||
|
// Prefix the current value with a comment.
|
||||||
|
commented bool
|
||||||
|
|
||||||
// Options coming from struct tags
|
// Options coming from struct tags
|
||||||
options valueOptions
|
options valueOptions
|
||||||
}
|
}
|
||||||
@@ -243,10 +269,21 @@ func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, e
|
|||||||
return append(b, x.String()...), nil
|
return append(b, x.String()...), nil
|
||||||
case LocalDateTime:
|
case LocalDateTime:
|
||||||
return append(b, x.String()...), nil
|
return append(b, x.String()...), nil
|
||||||
|
case json.Number:
|
||||||
|
if enc.marshalJSONNumbers {
|
||||||
|
if x == "" { /// Useful zero value.
|
||||||
|
return append(b, "0"...), nil
|
||||||
|
} else if v, err := x.Int64(); err == nil {
|
||||||
|
return enc.encode(b, ctx, reflect.ValueOf(v))
|
||||||
|
} else if f, err := x.Float64(); err == nil {
|
||||||
|
return enc.encode(b, ctx, reflect.ValueOf(f))
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("toml: unable to convert %q to int64 or float64", x)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
hasTextMarshaler := v.Type().Implements(textMarshalerType)
|
hasTextMarshaler := v.Type().Implements(textMarshalerType)
|
||||||
if hasTextMarshaler || (v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) {
|
if hasTextMarshaler || (v.CanAddr() && reflect.PointerTo(v.Type()).Implements(textMarshalerType)) {
|
||||||
if !hasTextMarshaler {
|
if !hasTextMarshaler {
|
||||||
v = v.Addr()
|
v = v.Addr()
|
||||||
}
|
}
|
||||||
@@ -271,11 +308,11 @@ func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, e
|
|||||||
return enc.encodeMap(b, ctx, v)
|
return enc.encodeMap(b, ctx, v)
|
||||||
case reflect.Struct:
|
case reflect.Struct:
|
||||||
return enc.encodeStruct(b, ctx, v)
|
return enc.encodeStruct(b, ctx, v)
|
||||||
case reflect.Slice:
|
case reflect.Slice, reflect.Array:
|
||||||
return enc.encodeSlice(b, ctx, v)
|
return enc.encodeSlice(b, ctx, v)
|
||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
if v.IsNil() {
|
if v.IsNil() {
|
||||||
return nil, fmt.Errorf("toml: encoding a nil interface is not supported")
|
return nil, errors.New("toml: encoding a nil interface is not supported")
|
||||||
}
|
}
|
||||||
|
|
||||||
return enc.encode(b, ctx, v.Elem())
|
return enc.encode(b, ctx, v.Elem())
|
||||||
@@ -292,28 +329,30 @@ func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, e
|
|||||||
case reflect.Float32:
|
case reflect.Float32:
|
||||||
f := v.Float()
|
f := v.Float()
|
||||||
|
|
||||||
if math.IsNaN(f) {
|
switch {
|
||||||
|
case math.IsNaN(f):
|
||||||
b = append(b, "nan"...)
|
b = append(b, "nan"...)
|
||||||
} else if f > math.MaxFloat32 {
|
case f > math.MaxFloat32:
|
||||||
b = append(b, "inf"...)
|
b = append(b, "inf"...)
|
||||||
} else if f < -math.MaxFloat32 {
|
case f < -math.MaxFloat32:
|
||||||
b = append(b, "-inf"...)
|
b = append(b, "-inf"...)
|
||||||
} else if math.Trunc(f) == f {
|
case math.Trunc(f) == f:
|
||||||
b = strconv.AppendFloat(b, f, 'f', 1, 32)
|
b = strconv.AppendFloat(b, f, 'f', 1, 32)
|
||||||
} else {
|
default:
|
||||||
b = strconv.AppendFloat(b, f, 'f', -1, 32)
|
b = strconv.AppendFloat(b, f, 'f', -1, 32)
|
||||||
}
|
}
|
||||||
case reflect.Float64:
|
case reflect.Float64:
|
||||||
f := v.Float()
|
f := v.Float()
|
||||||
if math.IsNaN(f) {
|
switch {
|
||||||
|
case math.IsNaN(f):
|
||||||
b = append(b, "nan"...)
|
b = append(b, "nan"...)
|
||||||
} else if f > math.MaxFloat64 {
|
case f > math.MaxFloat64:
|
||||||
b = append(b, "inf"...)
|
b = append(b, "inf"...)
|
||||||
} else if f < -math.MaxFloat64 {
|
case f < -math.MaxFloat64:
|
||||||
b = append(b, "-inf"...)
|
b = append(b, "-inf"...)
|
||||||
} else if math.Trunc(f) == f {
|
case math.Trunc(f) == f:
|
||||||
b = strconv.AppendFloat(b, f, 'f', 1, 64)
|
b = strconv.AppendFloat(b, f, 'f', 1, 64)
|
||||||
} else {
|
default:
|
||||||
b = strconv.AppendFloat(b, f, 'f', -1, 64)
|
b = strconv.AppendFloat(b, f, 'f', -1, 64)
|
||||||
}
|
}
|
||||||
case reflect.Bool:
|
case reflect.Bool:
|
||||||
@@ -350,14 +389,40 @@ func shouldOmitEmpty(options valueOptions, v reflect.Value) bool {
|
|||||||
return options.omitempty && isEmptyValue(v)
|
return options.omitempty && isEmptyValue(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func shouldOmitZero(options valueOptions, v reflect.Value) bool {
|
||||||
|
if !options.omitzero {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the type implements isZeroer interface (has a custom IsZero method).
|
||||||
|
if v.Type().Implements(isZeroerType) {
|
||||||
|
return v.Interface().(isZeroer).IsZero()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if pointer type implements isZeroer.
|
||||||
|
if reflect.PointerTo(v.Type()).Implements(isZeroerType) {
|
||||||
|
if v.CanAddr() {
|
||||||
|
return v.Addr().Interface().(isZeroer).IsZero()
|
||||||
|
}
|
||||||
|
// Create a temporary addressable copy to call the pointer receiver method.
|
||||||
|
pv := reflect.New(v.Type())
|
||||||
|
pv.Elem().Set(v)
|
||||||
|
return pv.Interface().(isZeroer).IsZero()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to reflect's IsZero for types without custom IsZero method.
|
||||||
|
return v.IsZero()
|
||||||
|
}
|
||||||
|
|
||||||
func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v reflect.Value) ([]byte, error) {
|
func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v reflect.Value) ([]byte, error) {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if !ctx.inline {
|
if !ctx.inline {
|
||||||
b = enc.encodeComment(ctx.indent, options.comment, b)
|
b = enc.encodeComment(ctx.indent, options.comment, b)
|
||||||
|
b = enc.commented(ctx.commented, b)
|
||||||
|
b = enc.indent(ctx.indent, b)
|
||||||
}
|
}
|
||||||
|
|
||||||
b = enc.indent(ctx.indent, b)
|
|
||||||
b = enc.encodeKey(b, ctx.key)
|
b = enc.encodeKey(b, ctx.key)
|
||||||
b = append(b, " = "...)
|
b = append(b, " = "...)
|
||||||
|
|
||||||
@@ -376,6 +441,13 @@ func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v r
|
|||||||
return b, nil
|
return b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (enc *Encoder) commented(commented bool, b []byte) []byte {
|
||||||
|
if commented {
|
||||||
|
return append(b, "# "...)
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
func isEmptyValue(v reflect.Value) bool {
|
func isEmptyValue(v reflect.Value) bool {
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
case reflect.Struct:
|
case reflect.Struct:
|
||||||
@@ -392,8 +464,9 @@ func isEmptyValue(v reflect.Value) bool {
|
|||||||
return v.Float() == 0
|
return v.Float() == 0
|
||||||
case reflect.Interface, reflect.Ptr:
|
case reflect.Interface, reflect.Ptr:
|
||||||
return v.IsNil()
|
return v.IsNil()
|
||||||
|
default:
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func isEmptyStruct(v reflect.Value) bool {
|
func isEmptyStruct(v reflect.Value) bool {
|
||||||
@@ -437,7 +510,7 @@ func (enc *Encoder) encodeString(b []byte, v string, options valueOptions) []byt
|
|||||||
func needsQuoting(v string) bool {
|
func needsQuoting(v string) bool {
|
||||||
// TODO: vectorize
|
// TODO: vectorize
|
||||||
for _, b := range []byte(v) {
|
for _, b := range []byte(v) {
|
||||||
if b == '\'' || b == '\r' || b == '\n' || invalidAscii(b) {
|
if b == '\'' || b == '\r' || b == '\n' || characters.InvalidASCII(b) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -475,12 +548,26 @@ func (enc *Encoder) encodeQuotedString(multiline bool, b []byte, v string) []byt
|
|||||||
del = 0x7f
|
del = 0x7f
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, r := range []byte(v) {
|
bv := []byte(v)
|
||||||
|
for i := 0; i < len(bv); i++ {
|
||||||
|
r := bv[i]
|
||||||
switch r {
|
switch r {
|
||||||
case '\\':
|
case '\\':
|
||||||
b = append(b, `\\`...)
|
b = append(b, `\\`...)
|
||||||
case '"':
|
case '"':
|
||||||
b = append(b, `\"`...)
|
if multiline {
|
||||||
|
// Quotation marks do not need to be quoted in multiline strings unless
|
||||||
|
// it contains 3 consecutive. If 3+ quotes appear, quote all of them
|
||||||
|
// because it's visually better
|
||||||
|
if i+2 > len(bv) || bv[i+1] != '"' || bv[i+2] != '"' {
|
||||||
|
b = append(b, r)
|
||||||
|
} else {
|
||||||
|
b = append(b, `\"\"\"`...)
|
||||||
|
i += 2
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
b = append(b, `\"`...)
|
||||||
|
}
|
||||||
case '\b':
|
case '\b':
|
||||||
b = append(b, `\b`...)
|
b = append(b, `\b`...)
|
||||||
case '\f':
|
case '\f':
|
||||||
@@ -517,13 +604,15 @@ func (enc *Encoder) encodeUnquotedKey(b []byte, v string) []byte {
|
|||||||
return append(b, v...)
|
return append(b, v...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (enc *Encoder) encodeTableHeader(ctx encoderCtx, b []byte) ([]byte, error) {
|
func (enc *Encoder) encodeTableHeader(ctx encoderCtx, b []byte) []byte {
|
||||||
if len(ctx.parentKey) == 0 {
|
if len(ctx.parentKey) == 0 {
|
||||||
return b, nil
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
b = enc.encodeComment(ctx.indent, ctx.options.comment, b)
|
b = enc.encodeComment(ctx.indent, ctx.options.comment, b)
|
||||||
|
|
||||||
|
b = enc.commented(ctx.commented, b)
|
||||||
|
|
||||||
b = enc.indent(ctx.indent, b)
|
b = enc.indent(ctx.indent, b)
|
||||||
|
|
||||||
b = append(b, '[')
|
b = append(b, '[')
|
||||||
@@ -537,10 +626,9 @@ func (enc *Encoder) encodeTableHeader(ctx encoderCtx, b []byte) ([]byte, error)
|
|||||||
|
|
||||||
b = append(b, "]\n"...)
|
b = append(b, "]\n"...)
|
||||||
|
|
||||||
return b, nil
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop
|
|
||||||
func (enc *Encoder) encodeKey(b []byte, k string) []byte {
|
func (enc *Encoder) encodeKey(b []byte, k string) []byte {
|
||||||
needsQuotation := false
|
needsQuotation := false
|
||||||
cannotUseLiteral := false
|
cannotUseLiteral := false
|
||||||
@@ -575,11 +663,38 @@ func (enc *Encoder) encodeKey(b []byte, k string) []byte {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
func (enc *Encoder) keyToString(k reflect.Value) (string, error) {
|
||||||
if v.Type().Key().Kind() != reflect.String {
|
keyType := k.Type()
|
||||||
return nil, fmt.Errorf("toml: type %s is not supported as a map key", v.Type().Key().Kind())
|
if keyType.Implements(textMarshalerType) {
|
||||||
|
keyB, err := k.Interface().(encoding.TextMarshaler).MarshalText()
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("toml: error marshalling key %v from text: %w", k, err)
|
||||||
|
}
|
||||||
|
return string(keyB), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
switch keyType.Kind() {
|
||||||
|
case reflect.String:
|
||||||
|
return k.String(), nil
|
||||||
|
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
return strconv.FormatInt(k.Int(), 10), nil
|
||||||
|
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||||
|
return strconv.FormatUint(k.Uint(), 10), nil
|
||||||
|
|
||||||
|
case reflect.Float32:
|
||||||
|
return strconv.FormatFloat(k.Float(), 'f', -1, 32), nil
|
||||||
|
|
||||||
|
case reflect.Float64:
|
||||||
|
return strconv.FormatFloat(k.Float(), 'f', -1, 64), nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
return "", fmt.Errorf("toml: type %s is not supported as a map key", keyType.Kind())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
||||||
var (
|
var (
|
||||||
t table
|
t table
|
||||||
emptyValueOptions valueOptions
|
emptyValueOptions valueOptions
|
||||||
@@ -587,11 +702,25 @@ func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte
|
|||||||
|
|
||||||
iter := v.MapRange()
|
iter := v.MapRange()
|
||||||
for iter.Next() {
|
for iter.Next() {
|
||||||
k := iter.Key().String()
|
|
||||||
v := iter.Value()
|
v := iter.Value()
|
||||||
|
|
||||||
if isNil(v) {
|
// Handle nil values: convert nil pointers to zero value,
|
||||||
continue
|
// skip nil interfaces and nil maps.
|
||||||
|
switch v.Kind() {
|
||||||
|
case reflect.Ptr:
|
||||||
|
if v.IsNil() {
|
||||||
|
v = reflect.Zero(v.Type().Elem())
|
||||||
|
}
|
||||||
|
case reflect.Interface, reflect.Map:
|
||||||
|
if v.IsNil() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
|
||||||
|
k, err := enc.keyToString(iter.Key())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if willConvertToTableOrArrayTable(ctx, v) {
|
if willConvertToTableOrArrayTable(ctx, v) {
|
||||||
@@ -608,8 +737,8 @@ func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte
|
|||||||
}
|
}
|
||||||
|
|
||||||
func sortEntriesByKey(e []entry) {
|
func sortEntriesByKey(e []entry) {
|
||||||
sort.Slice(e, func(i, j int) bool {
|
slices.SortFunc(e, func(a, b entry) int {
|
||||||
return e[i].Key < e[j].Key
|
return strings.Compare(a.Key, b.Key)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -672,11 +801,12 @@ func walkStruct(ctx encoderCtx, t *table, v reflect.Value) {
|
|||||||
if fieldType.Anonymous {
|
if fieldType.Anonymous {
|
||||||
if fieldType.Type.Kind() == reflect.Struct {
|
if fieldType.Type.Kind() == reflect.Struct {
|
||||||
walkStruct(ctx, t, f)
|
walkStruct(ctx, t, f)
|
||||||
|
} else if fieldType.Type.Kind() == reflect.Ptr && !f.IsNil() && f.Elem().Kind() == reflect.Struct {
|
||||||
|
walkStruct(ctx, t, f.Elem())
|
||||||
}
|
}
|
||||||
continue
|
continue
|
||||||
} else {
|
|
||||||
k = fieldType.Name
|
|
||||||
}
|
}
|
||||||
|
k = fieldType.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
if isNil(f) {
|
if isNil(f) {
|
||||||
@@ -686,6 +816,8 @@ func walkStruct(ctx encoderCtx, t *table, v reflect.Value) {
|
|||||||
options := valueOptions{
|
options := valueOptions{
|
||||||
multiline: opts.multiline,
|
multiline: opts.multiline,
|
||||||
omitempty: opts.omitempty,
|
omitempty: opts.omitempty,
|
||||||
|
omitzero: opts.omitzero,
|
||||||
|
commented: opts.commented,
|
||||||
comment: fieldType.Tag.Get("comment"),
|
comment: fieldType.Tag.Get("comment"),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -745,6 +877,8 @@ type tagOptions struct {
|
|||||||
multiline bool
|
multiline bool
|
||||||
inline bool
|
inline bool
|
||||||
omitempty bool
|
omitempty bool
|
||||||
|
omitzero bool
|
||||||
|
commented bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseTag(tag string) (string, tagOptions) {
|
func parseTag(tag string) (string, tagOptions) {
|
||||||
@@ -756,7 +890,7 @@ func parseTag(tag string) (string, tagOptions) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
raw := tag[idx+1:]
|
raw := tag[idx+1:]
|
||||||
tag = string(tag[:idx])
|
tag = tag[:idx]
|
||||||
for raw != "" {
|
for raw != "" {
|
||||||
var o string
|
var o string
|
||||||
i := strings.Index(raw, ",")
|
i := strings.Index(raw, ",")
|
||||||
@@ -772,6 +906,10 @@ func parseTag(tag string) (string, tagOptions) {
|
|||||||
opts.inline = true
|
opts.inline = true
|
||||||
case "omitempty":
|
case "omitempty":
|
||||||
opts.omitempty = true
|
opts.omitempty = true
|
||||||
|
case "omitzero":
|
||||||
|
opts.omitzero = true
|
||||||
|
case "commented":
|
||||||
|
opts.commented = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -788,10 +926,7 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !ctx.skipTableHeader {
|
if !ctx.skipTableHeader {
|
||||||
b, err = enc.encodeTableHeader(ctx, b)
|
b = enc.encodeTableHeader(ctx, b)
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if enc.indentTables && len(ctx.parentKey) > 0 {
|
if enc.indentTables && len(ctx.parentKey) > 0 {
|
||||||
ctx.indent++
|
ctx.indent++
|
||||||
@@ -804,11 +939,16 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
|
|||||||
if shouldOmitEmpty(kv.Options, kv.Value) {
|
if shouldOmitEmpty(kv.Options, kv.Value) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if kv.Options.omitzero && shouldOmitZero(kv.Options, kv.Value) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
hasNonEmptyKV = true
|
hasNonEmptyKV = true
|
||||||
|
|
||||||
ctx.setKey(kv.Key)
|
ctx.setKey(kv.Key)
|
||||||
|
ctx2 := ctx
|
||||||
|
ctx2.commented = kv.Options.commented || ctx2.commented
|
||||||
|
|
||||||
b, err = enc.encodeKv(b, ctx, kv.Options, kv.Value)
|
b, err = enc.encodeKv(b, ctx2, kv.Options, kv.Value)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -821,6 +961,9 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
|
|||||||
if shouldOmitEmpty(table.Options, table.Value) {
|
if shouldOmitEmpty(table.Options, table.Value) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if table.Options.omitzero && shouldOmitZero(table.Options, table.Value) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
if first {
|
if first {
|
||||||
first = false
|
first = false
|
||||||
if hasNonEmptyKV {
|
if hasNonEmptyKV {
|
||||||
@@ -833,8 +976,10 @@ func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, erro
|
|||||||
ctx.setKey(table.Key)
|
ctx.setKey(table.Key)
|
||||||
|
|
||||||
ctx.options = table.Options
|
ctx.options = table.Options
|
||||||
|
ctx2 := ctx
|
||||||
|
ctx2.commented = ctx2.commented || ctx.options.commented
|
||||||
|
|
||||||
b, err = enc.encode(b, ctx, table.Value)
|
b, err = enc.encode(b, ctx2, table.Value)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -853,6 +998,9 @@ func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte
|
|||||||
if shouldOmitEmpty(kv.Options, kv.Value) {
|
if shouldOmitEmpty(kv.Options, kv.Value) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if kv.Options.omitzero && shouldOmitZero(kv.Options, kv.Value) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
if first {
|
if first {
|
||||||
first = false
|
first = false
|
||||||
@@ -881,11 +1029,14 @@ func willConvertToTable(ctx encoderCtx, v reflect.Value) bool {
|
|||||||
if !v.IsValid() {
|
if !v.IsValid() {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if v.Type() == timeType || v.Type().Implements(textMarshalerType) || (v.Kind() != reflect.Ptr && v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) {
|
t := v.Type()
|
||||||
|
if t == timeType || t.Implements(textMarshalerType) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if v.Kind() != reflect.Ptr && v.CanAddr() && reflect.PointerTo(t).Implements(textMarshalerType) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
t := v.Type()
|
|
||||||
switch t.Kind() {
|
switch t.Kind() {
|
||||||
case reflect.Map, reflect.Struct:
|
case reflect.Map, reflect.Struct:
|
||||||
return !ctx.inline
|
return !ctx.inline
|
||||||
@@ -912,7 +1063,7 @@ func willConvertToTableOrArrayTable(ctx encoderCtx, v reflect.Value) bool {
|
|||||||
return willConvertToTableOrArrayTable(ctx, v.Elem())
|
return willConvertToTableOrArrayTable(ctx, v.Elem())
|
||||||
}
|
}
|
||||||
|
|
||||||
if t.Kind() == reflect.Slice {
|
if t.Kind() == reflect.Slice || t.Kind() == reflect.Array {
|
||||||
if v.Len() == 0 {
|
if v.Len() == 0 {
|
||||||
// An empty slice should be a kv = [].
|
// An empty slice should be a kv = [].
|
||||||
return false
|
return false
|
||||||
@@ -952,6 +1103,13 @@ func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.
|
|||||||
ctx.shiftKey()
|
ctx.shiftKey()
|
||||||
|
|
||||||
scratch := make([]byte, 0, 64)
|
scratch := make([]byte, 0, 64)
|
||||||
|
|
||||||
|
scratch = enc.commented(ctx.commented, scratch)
|
||||||
|
|
||||||
|
if enc.indentTables {
|
||||||
|
scratch = enc.indent(ctx.indent, scratch)
|
||||||
|
}
|
||||||
|
|
||||||
scratch = append(scratch, "[["...)
|
scratch = append(scratch, "[["...)
|
||||||
|
|
||||||
for i, k := range ctx.parentKey {
|
for i, k := range ctx.parentKey {
|
||||||
@@ -967,6 +1125,10 @@ func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.
|
|||||||
|
|
||||||
b = enc.encodeComment(ctx.indent, ctx.options.comment, b)
|
b = enc.encodeComment(ctx.indent, ctx.options.comment, b)
|
||||||
|
|
||||||
|
if enc.indentTables {
|
||||||
|
ctx.indent++
|
||||||
|
}
|
||||||
|
|
||||||
for i := 0; i < v.Len(); i++ {
|
for i := 0; i < v.Len(); i++ {
|
||||||
if i != 0 {
|
if i != 0 {
|
||||||
b = append(b, "\n"...)
|
b = append(b, "\n"...)
|
||||||
|
|||||||
+1087
-62
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,44 @@
|
|||||||
|
// Package ossfuzz provides a fuzzing target for OSS-Fuzz.
|
||||||
|
package ossfuzz
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FuzzToml is the fuzzing target.
|
||||||
|
func FuzzToml(data []byte) int {
|
||||||
|
if len(data) >= 2048 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(string(data), "nan") {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
var v interface{}
|
||||||
|
err := toml.Unmarshal(data, &v)
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
encoded, err := toml.Marshal(v)
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("failed to marshal unmarshaled document: %s", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
var v2 interface{}
|
||||||
|
err = toml.Unmarshal(encoded, &v2)
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("failed round trip: %s", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(v, v2) {
|
||||||
|
panic(fmt.Sprintf("not equal: %#+v %#+v", v, v2))
|
||||||
|
}
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
-450
@@ -1,450 +0,0 @@
|
|||||||
package toml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
//nolint:funlen
|
|
||||||
func TestParser_AST_Numbers(t *testing.T) {
|
|
||||||
examples := []struct {
|
|
||||||
desc string
|
|
||||||
input string
|
|
||||||
kind ast.Kind
|
|
||||||
err bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "integer just digits",
|
|
||||||
input: `1234`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer zero",
|
|
||||||
input: `0`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer sign",
|
|
||||||
input: `+99`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer hex uppercase",
|
|
||||||
input: `0xDEADBEEF`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer hex lowercase",
|
|
||||||
input: `0xdead_beef`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer octal",
|
|
||||||
input: `0o01234567`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "integer binary",
|
|
||||||
input: `0b11010110`,
|
|
||||||
kind: ast.Integer,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float zero",
|
|
||||||
input: `0.0`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float positive zero",
|
|
||||||
input: `+0.0`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float negative zero",
|
|
||||||
input: `-0.0`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float pi",
|
|
||||||
input: `3.1415`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float negative",
|
|
||||||
input: `-0.01`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float signed exponent",
|
|
||||||
input: `5e+22`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float exponent lowercase",
|
|
||||||
input: `1e06`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float exponent uppercase",
|
|
||||||
input: `-2E-2`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float fractional with exponent",
|
|
||||||
input: `6.626e-34`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "float underscores",
|
|
||||||
input: `224_617.445_991_228`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "inf",
|
|
||||||
input: `inf`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "inf negative",
|
|
||||||
input: `-inf`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "inf positive",
|
|
||||||
input: `+inf`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "nan",
|
|
||||||
input: `nan`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "nan negative",
|
|
||||||
input: `-nan`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "nan positive",
|
|
||||||
input: `+nan`,
|
|
||||||
kind: ast.Float,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, e := range examples {
|
|
||||||
e := e
|
|
||||||
t.Run(e.desc, func(t *testing.T) {
|
|
||||||
p := parser{}
|
|
||||||
p.Reset([]byte(`A = ` + e.input))
|
|
||||||
p.NextExpression()
|
|
||||||
err := p.Error()
|
|
||||||
if e.err {
|
|
||||||
require.Error(t, err)
|
|
||||||
} else {
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
expected := astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{Kind: e.kind, Data: []byte(e.input)},
|
|
||||||
{Kind: ast.Key, Data: []byte(`A`)},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
compareNode(t, expected, p.Expression())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type (
|
|
||||||
astNode struct {
|
|
||||||
Kind ast.Kind
|
|
||||||
Data []byte
|
|
||||||
Children []astNode
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
func compareNode(t *testing.T, e astNode, n *ast.Node) {
|
|
||||||
t.Helper()
|
|
||||||
require.Equal(t, e.Kind, n.Kind)
|
|
||||||
require.Equal(t, e.Data, n.Data)
|
|
||||||
|
|
||||||
compareIterator(t, e.Children, n.Children())
|
|
||||||
}
|
|
||||||
|
|
||||||
func compareIterator(t *testing.T, expected []astNode, actual ast.Iterator) {
|
|
||||||
t.Helper()
|
|
||||||
idx := 0
|
|
||||||
|
|
||||||
for actual.Next() {
|
|
||||||
n := actual.Node()
|
|
||||||
|
|
||||||
if idx >= len(expected) {
|
|
||||||
t.Fatal("extra child in actual tree")
|
|
||||||
}
|
|
||||||
e := expected[idx]
|
|
||||||
|
|
||||||
compareNode(t, e, n)
|
|
||||||
|
|
||||||
idx++
|
|
||||||
}
|
|
||||||
|
|
||||||
if idx < len(expected) {
|
|
||||||
t.Fatal("missing children in actual", "idx =", idx, "expected =", len(expected))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//nolint:funlen
|
|
||||||
func TestParser_AST(t *testing.T) {
|
|
||||||
examples := []struct {
|
|
||||||
desc string
|
|
||||||
input string
|
|
||||||
ast astNode
|
|
||||||
err bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "simple string assignment",
|
|
||||||
input: `A = "hello"`,
|
|
||||||
ast: astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`hello`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Key,
|
|
||||||
Data: []byte(`A`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "simple bool assignment",
|
|
||||||
input: `A = true`,
|
|
||||||
ast: astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.Bool,
|
|
||||||
Data: []byte(`true`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Key,
|
|
||||||
Data: []byte(`A`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "array of strings",
|
|
||||||
input: `A = ["hello", ["world", "again"]]`,
|
|
||||||
ast: astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.Array,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`hello`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Array,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`world`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`again`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Key,
|
|
||||||
Data: []byte(`A`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "array of arrays of strings",
|
|
||||||
input: `A = ["hello", "world"]`,
|
|
||||||
ast: astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.Array,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`hello`),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.String,
|
|
||||||
Data: []byte(`world`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Key,
|
|
||||||
Data: []byte(`A`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "inline table",
|
|
||||||
input: `name = { first = "Tom", last = "Preston-Werner" }`,
|
|
||||||
ast: astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.InlineTable,
|
|
||||||
Children: []astNode{
|
|
||||||
{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{Kind: ast.String, Data: []byte(`Tom`)},
|
|
||||||
{Kind: ast.Key, Data: []byte(`first`)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{Kind: ast.String, Data: []byte(`Preston-Werner`)},
|
|
||||||
{Kind: ast.Key, Data: []byte(`last`)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: ast.Key,
|
|
||||||
Data: []byte(`name`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, e := range examples {
|
|
||||||
e := e
|
|
||||||
t.Run(e.desc, func(t *testing.T) {
|
|
||||||
p := parser{}
|
|
||||||
p.Reset([]byte(e.input))
|
|
||||||
p.NextExpression()
|
|
||||||
err := p.Error()
|
|
||||||
if e.err {
|
|
||||||
require.Error(t, err)
|
|
||||||
} else {
|
|
||||||
require.NoError(t, err)
|
|
||||||
compareNode(t, e.ast, p.Expression())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkParseBasicStringWithUnicode(b *testing.B) {
|
|
||||||
p := &parser{}
|
|
||||||
b.Run("4", func(b *testing.B) {
|
|
||||||
input := []byte(`"\u1234\u5678\u9ABC\u1234\u5678\u9ABC"`)
|
|
||||||
b.ReportAllocs()
|
|
||||||
b.SetBytes(int64(len(input)))
|
|
||||||
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
p.parseBasicString(input)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
b.Run("8", func(b *testing.B) {
|
|
||||||
input := []byte(`"\u12345678\u9ABCDEF0\u12345678\u9ABCDEF0"`)
|
|
||||||
b.ReportAllocs()
|
|
||||||
b.SetBytes(int64(len(input)))
|
|
||||||
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
p.parseBasicString(input)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkParseBasicStringsEasy(b *testing.B) {
|
|
||||||
p := &parser{}
|
|
||||||
|
|
||||||
for _, size := range []int{1, 4, 8, 16, 21} {
|
|
||||||
b.Run(strconv.Itoa(size), func(b *testing.B) {
|
|
||||||
input := []byte(`"` + strings.Repeat("A", size) + `"`)
|
|
||||||
|
|
||||||
b.ReportAllocs()
|
|
||||||
b.SetBytes(int64(len(input)))
|
|
||||||
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
p.parseBasicString(input)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestParser_AST_DateTimes(t *testing.T) {
|
|
||||||
examples := []struct {
|
|
||||||
desc string
|
|
||||||
input string
|
|
||||||
kind ast.Kind
|
|
||||||
err bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
desc: "offset-date-time with delim 'T' and UTC offset",
|
|
||||||
input: `2021-07-21T12:08:05Z`,
|
|
||||||
kind: ast.DateTime,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "offset-date-time with space delim and +8hours offset",
|
|
||||||
input: `2021-07-21 12:08:05+08:00`,
|
|
||||||
kind: ast.DateTime,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "local-date-time with nano second",
|
|
||||||
input: `2021-07-21T12:08:05.666666666`,
|
|
||||||
kind: ast.LocalDateTime,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "local-date-time",
|
|
||||||
input: `2021-07-21T12:08:05`,
|
|
||||||
kind: ast.LocalDateTime,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
desc: "local-date",
|
|
||||||
input: `2021-07-21`,
|
|
||||||
kind: ast.LocalDate,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, e := range examples {
|
|
||||||
e := e
|
|
||||||
t.Run(e.desc, func(t *testing.T) {
|
|
||||||
p := parser{}
|
|
||||||
p.Reset([]byte(`A = ` + e.input))
|
|
||||||
p.NextExpression()
|
|
||||||
err := p.Error()
|
|
||||||
if e.err {
|
|
||||||
require.Error(t, err)
|
|
||||||
} else {
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
expected := astNode{
|
|
||||||
Kind: ast.KeyValue,
|
|
||||||
Children: []astNode{
|
|
||||||
{Kind: e.kind, Data: []byte(e.input)},
|
|
||||||
{Kind: ast.Key, Data: []byte(`A`)},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
compareNode(t, expected, p.Expression())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,9 +1,8 @@
|
|||||||
package toml
|
package toml
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/tracker"
|
"github.com/pelletier/go-toml/v2/internal/tracker"
|
||||||
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
type strict struct {
|
type strict struct {
|
||||||
@@ -12,10 +11,13 @@ type strict struct {
|
|||||||
// Tracks the current key being processed.
|
// Tracks the current key being processed.
|
||||||
key tracker.KeyTracker
|
key tracker.KeyTracker
|
||||||
|
|
||||||
missing []decodeError
|
missing []unstable.ParserError
|
||||||
|
|
||||||
|
// Reference to the document for computing key ranges.
|
||||||
|
doc []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) EnterTable(node *ast.Node) {
|
func (s *strict) EnterTable(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -23,7 +25,7 @@ func (s *strict) EnterTable(node *ast.Node) {
|
|||||||
s.key.UpdateTable(node)
|
s.key.UpdateTable(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) EnterArrayTable(node *ast.Node) {
|
func (s *strict) EnterArrayTable(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -31,7 +33,7 @@ func (s *strict) EnterArrayTable(node *ast.Node) {
|
|||||||
s.key.UpdateArrayTable(node)
|
s.key.UpdateArrayTable(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) EnterKeyValue(node *ast.Node) {
|
func (s *strict) EnterKeyValue(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -39,7 +41,7 @@ func (s *strict) EnterKeyValue(node *ast.Node) {
|
|||||||
s.key.Push(node)
|
s.key.Push(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) ExitKeyValue(node *ast.Node) {
|
func (s *strict) ExitKeyValue(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -47,27 +49,27 @@ func (s *strict) ExitKeyValue(node *ast.Node) {
|
|||||||
s.key.Pop(node)
|
s.key.Pop(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) MissingTable(node *ast.Node) {
|
func (s *strict) MissingTable(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
s.missing = append(s.missing, decodeError{
|
s.missing = append(s.missing, unstable.ParserError{
|
||||||
highlight: keyLocation(node),
|
Highlight: s.keyLocation(node),
|
||||||
message: "missing table",
|
Message: "missing table",
|
||||||
key: s.key.Key(),
|
Key: s.key.Key(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *strict) MissingField(node *ast.Node) {
|
func (s *strict) MissingField(node *unstable.Node) {
|
||||||
if !s.Enabled {
|
if !s.Enabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
s.missing = append(s.missing, decodeError{
|
s.missing = append(s.missing, unstable.ParserError{
|
||||||
highlight: keyLocation(node),
|
Highlight: s.keyLocation(node),
|
||||||
message: "missing field",
|
Message: "missing field",
|
||||||
key: s.key.Key(),
|
Key: s.key.Key(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,7 +90,7 @@ func (s *strict) Error(doc []byte) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func keyLocation(node *ast.Node) []byte {
|
func (s *strict) keyLocation(node *unstable.Node) []byte {
|
||||||
k := node.Key()
|
k := node.Key()
|
||||||
|
|
||||||
hasOne := k.Next()
|
hasOne := k.Next()
|
||||||
@@ -96,12 +98,17 @@ func keyLocation(node *ast.Node) []byte {
|
|||||||
panic("should not be called with empty key")
|
panic("should not be called with empty key")
|
||||||
}
|
}
|
||||||
|
|
||||||
start := k.Node().Data
|
// Get the range from the first key to the last key.
|
||||||
end := k.Node().Data
|
firstRaw := k.Node().Raw
|
||||||
|
lastRaw := firstRaw
|
||||||
|
|
||||||
for k.Next() {
|
for k.Next() {
|
||||||
end = k.Node().Data
|
lastRaw = k.Node().Raw
|
||||||
}
|
}
|
||||||
|
|
||||||
return danger.BytesRange(start, end)
|
// Compute the slice from the document using the ranges.
|
||||||
|
start := firstRaw.Offset
|
||||||
|
end := lastRaw.Offset + lastRaw.Length
|
||||||
|
|
||||||
|
return s.doc[start:end]
|
||||||
}
|
}
|
||||||
|
|||||||
Executable
+597
@@ -0,0 +1,597 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -uo pipefail
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Go versions to test (1.11 through 1.26)
|
||||||
|
GO_VERSIONS=(
|
||||||
|
"1.11"
|
||||||
|
"1.12"
|
||||||
|
"1.13"
|
||||||
|
"1.14"
|
||||||
|
"1.15"
|
||||||
|
"1.16"
|
||||||
|
"1.17"
|
||||||
|
"1.18"
|
||||||
|
"1.19"
|
||||||
|
"1.20"
|
||||||
|
"1.21"
|
||||||
|
"1.22"
|
||||||
|
"1.23"
|
||||||
|
"1.24"
|
||||||
|
"1.25"
|
||||||
|
"1.26"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Default values
|
||||||
|
PARALLEL=true
|
||||||
|
VERBOSE=false
|
||||||
|
OUTPUT_DIR="test-results"
|
||||||
|
DOCKER_TIMEOUT="10m"
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat << EOF
|
||||||
|
Usage: $0 [OPTIONS] [GO_VERSIONS...]
|
||||||
|
|
||||||
|
Test go-toml across multiple Go versions using Docker containers.
|
||||||
|
|
||||||
|
The script reports the lowest continuous supported Go version (where all subsequent
|
||||||
|
versions pass) and only exits with non-zero status if either of the two most recent
|
||||||
|
Go versions fail, indicating immediate attention is needed.
|
||||||
|
|
||||||
|
Note: For Go versions < 1.21, the script automatically updates go.mod to match the
|
||||||
|
target version, but older versions may still fail due to missing standard library
|
||||||
|
features (e.g., the 'slices' package introduced in Go 1.21).
|
||||||
|
|
||||||
|
OPTIONS:
|
||||||
|
-h, --help Show this help message
|
||||||
|
-s, --sequential Run tests sequentially instead of in parallel
|
||||||
|
-v, --verbose Enable verbose output
|
||||||
|
-o, --output DIR Output directory for test results (default: test-results)
|
||||||
|
-t, --timeout TIME Docker timeout for each test (default: 10m)
|
||||||
|
--list List available Go versions and exit
|
||||||
|
|
||||||
|
ARGUMENTS:
|
||||||
|
GO_VERSIONS Specific Go versions to test (default: all supported versions)
|
||||||
|
Examples: 1.21 1.22 1.23
|
||||||
|
|
||||||
|
EXAMPLES:
|
||||||
|
$0 # Test all Go versions in parallel
|
||||||
|
$0 --sequential # Test all Go versions sequentially
|
||||||
|
$0 1.21 1.22 1.23 # Test specific versions
|
||||||
|
$0 --verbose --output ./results 1.25 1.26 # Verbose output to custom directory
|
||||||
|
|
||||||
|
EXIT CODES:
|
||||||
|
0 Recent Go versions pass (good compatibility)
|
||||||
|
1 Recent Go versions fail (needs attention) or script error
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
log() {
|
||||||
|
echo -e "${BLUE}[$(date +'%H:%M:%S')]${NC} $*" >&2
|
||||||
|
}
|
||||||
|
|
||||||
|
log_success() {
|
||||||
|
echo -e "${GREEN}[$(date +'%H:%M:%S')] ✓${NC} $*" >&2
|
||||||
|
}
|
||||||
|
|
||||||
|
log_error() {
|
||||||
|
echo -e "${RED}[$(date +'%H:%M:%S')] ✗${NC} $*" >&2
|
||||||
|
}
|
||||||
|
|
||||||
|
log_warning() {
|
||||||
|
echo -e "${YELLOW}[$(date +'%H:%M:%S')] ⚠${NC} $*" >&2
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse command line arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-h|--help)
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
-s|--sequential)
|
||||||
|
PARALLEL=false
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-v|--verbose)
|
||||||
|
VERBOSE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-o|--output)
|
||||||
|
OUTPUT_DIR="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-t|--timeout)
|
||||||
|
DOCKER_TIMEOUT="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--list)
|
||||||
|
echo "Available Go versions:"
|
||||||
|
printf '%s\n' "${GO_VERSIONS[@]}"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
-*)
|
||||||
|
echo "Unknown option: $1" >&2
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
# Remaining arguments are Go versions
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# If specific versions provided, use those instead of defaults
|
||||||
|
if [[ $# -gt 0 ]]; then
|
||||||
|
GO_VERSIONS=("$@")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate Go versions
|
||||||
|
for version in "${GO_VERSIONS[@]}"; do
|
||||||
|
if ! [[ "$version" =~ ^1\.(1[1-9]|2[0-6])$ ]]; then
|
||||||
|
log_error "Invalid Go version: $version. Supported versions: 1.11-1.26"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Check if Docker is available
|
||||||
|
if ! command -v docker &> /dev/null; then
|
||||||
|
log_error "Docker is required but not installed or not in PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if Docker daemon is running
|
||||||
|
if ! docker info &> /dev/null; then
|
||||||
|
log_error "Docker daemon is not running"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create output directory
|
||||||
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
# Function to test a single Go version
|
||||||
|
test_go_version() {
|
||||||
|
local go_version="$1"
|
||||||
|
local container_name="go-toml-test-${go_version}"
|
||||||
|
local result_file="${OUTPUT_DIR}/go-${go_version}.txt"
|
||||||
|
local dockerfile_content
|
||||||
|
|
||||||
|
log "Testing Go $go_version..."
|
||||||
|
|
||||||
|
# Create a temporary Dockerfile for this version
|
||||||
|
# For Go versions < 1.21, we need to update go.mod to match the Go version
|
||||||
|
local needs_go_mod_update=false
|
||||||
|
if [[ $(echo "$go_version 1.21" | tr ' ' '\n' | sort -V | head -n1) == "$go_version" && "$go_version" != "1.21" ]]; then
|
||||||
|
needs_go_mod_update=true
|
||||||
|
fi
|
||||||
|
|
||||||
|
dockerfile_content="FROM golang:${go_version}-alpine
|
||||||
|
|
||||||
|
# Install git (required for go mod)
|
||||||
|
RUN apk add --no-cache git
|
||||||
|
|
||||||
|
# Set working directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy source code
|
||||||
|
COPY . ."
|
||||||
|
|
||||||
|
# Add go.mod update step for older Go versions
|
||||||
|
if [[ "$needs_go_mod_update" == true ]]; then
|
||||||
|
dockerfile_content="$dockerfile_content
|
||||||
|
|
||||||
|
# Update go.mod to match Go version (required for Go < 1.21)
|
||||||
|
RUN if [ -f go.mod ]; then sed -i 's/^go [0-9]\\+\\.[0-9]\\+\\(\\.[0-9]\\+\\)\\?/go $go_version/' go.mod; fi
|
||||||
|
|
||||||
|
# Note: Go versions < 1.21 may fail due to missing standard library packages (e.g., slices)
|
||||||
|
# This is expected for projects that use Go 1.21+ features"
|
||||||
|
fi
|
||||||
|
|
||||||
|
dockerfile_content="$dockerfile_content
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
CMD [\"sh\", \"-c\", \"go version && echo '--- Running go test ./... ---' && go test ./...\"]"
|
||||||
|
|
||||||
|
# Create temporary directory for this test
|
||||||
|
local temp_dir
|
||||||
|
temp_dir=$(mktemp -d)
|
||||||
|
|
||||||
|
# Copy source to temp directory (excluding test results and git)
|
||||||
|
rsync -a --exclude="$OUTPUT_DIR" --exclude=".git" --exclude="*.test" . "$temp_dir/"
|
||||||
|
|
||||||
|
# Create Dockerfile in temp directory
|
||||||
|
echo "$dockerfile_content" > "$temp_dir/Dockerfile"
|
||||||
|
|
||||||
|
# Build and run container
|
||||||
|
local exit_code=0
|
||||||
|
local output
|
||||||
|
|
||||||
|
if $VERBOSE; then
|
||||||
|
log "Building Docker image for Go $go_version..."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Capture both stdout and stderr, and the exit code
|
||||||
|
if output=$(cd "$temp_dir" && timeout "$DOCKER_TIMEOUT" docker build -t "$container_name" . 2>&1 && \
|
||||||
|
timeout "$DOCKER_TIMEOUT" docker run --rm "$container_name" 2>&1); then
|
||||||
|
log_success "Go $go_version: PASSED"
|
||||||
|
echo "PASSED" > "${result_file}.status"
|
||||||
|
else
|
||||||
|
exit_code=$?
|
||||||
|
log_error "Go $go_version: FAILED (exit code: $exit_code)"
|
||||||
|
echo "FAILED" > "${result_file}.status"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Save full output
|
||||||
|
echo "$output" > "$result_file"
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
docker rmi "$container_name" &> /dev/null || true
|
||||||
|
rm -rf "$temp_dir"
|
||||||
|
|
||||||
|
if $VERBOSE; then
|
||||||
|
echo "--- Go $go_version output ---"
|
||||||
|
echo "$output"
|
||||||
|
echo "--- End Go $go_version output ---"
|
||||||
|
fi
|
||||||
|
|
||||||
|
return $exit_code
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to run tests in parallel
|
||||||
|
run_parallel() {
|
||||||
|
local pids=()
|
||||||
|
local failed_versions=()
|
||||||
|
|
||||||
|
log "Starting parallel tests for ${#GO_VERSIONS[@]} Go versions..."
|
||||||
|
|
||||||
|
# Start all tests in background
|
||||||
|
for version in "${GO_VERSIONS[@]}"; do
|
||||||
|
test_go_version "$version" &
|
||||||
|
pids+=($!)
|
||||||
|
done
|
||||||
|
|
||||||
|
# Wait for all tests to complete
|
||||||
|
for i in "${!pids[@]}"; do
|
||||||
|
local pid=${pids[$i]}
|
||||||
|
local version=${GO_VERSIONS[$i]}
|
||||||
|
|
||||||
|
if ! wait $pid; then
|
||||||
|
failed_versions+=("$version")
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
return ${#failed_versions[@]}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to run tests sequentially
|
||||||
|
run_sequential() {
|
||||||
|
local failed_versions=()
|
||||||
|
|
||||||
|
log "Starting sequential tests for ${#GO_VERSIONS[@]} Go versions..."
|
||||||
|
|
||||||
|
for version in "${GO_VERSIONS[@]}"; do
|
||||||
|
if ! test_go_version "$version"; then
|
||||||
|
failed_versions+=("$version")
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
return ${#failed_versions[@]}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main execution
|
||||||
|
main() {
|
||||||
|
local start_time
|
||||||
|
start_time=$(date +%s)
|
||||||
|
|
||||||
|
log "Starting Go version compatibility tests..."
|
||||||
|
log "Testing versions: ${GO_VERSIONS[*]}"
|
||||||
|
log "Output directory: $OUTPUT_DIR"
|
||||||
|
log "Parallel execution: $PARALLEL"
|
||||||
|
|
||||||
|
local failed_count
|
||||||
|
if $PARALLEL; then
|
||||||
|
run_parallel
|
||||||
|
failed_count=$?
|
||||||
|
else
|
||||||
|
run_sequential
|
||||||
|
failed_count=$?
|
||||||
|
fi
|
||||||
|
|
||||||
|
local end_time
|
||||||
|
end_time=$(date +%s)
|
||||||
|
local duration=$((end_time - start_time))
|
||||||
|
|
||||||
|
# Collect results for display
|
||||||
|
local passed_versions=()
|
||||||
|
local failed_versions=()
|
||||||
|
local unknown_versions=()
|
||||||
|
local passed_count=0
|
||||||
|
|
||||||
|
for version in "${GO_VERSIONS[@]}"; do
|
||||||
|
local status_file="${OUTPUT_DIR}/go-${version}.txt.status"
|
||||||
|
if [[ -f "$status_file" ]]; then
|
||||||
|
local status
|
||||||
|
status=$(cat "$status_file")
|
||||||
|
if [[ "$status" == "PASSED" ]]; then
|
||||||
|
passed_versions+=("$version")
|
||||||
|
((passed_count++))
|
||||||
|
else
|
||||||
|
failed_versions+=("$version")
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
unknown_versions+=("$version")
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Generate summary report
|
||||||
|
local summary_file="${OUTPUT_DIR}/summary.txt"
|
||||||
|
{
|
||||||
|
echo "Go Version Compatibility Test Summary"
|
||||||
|
echo "====================================="
|
||||||
|
echo "Date: $(date)"
|
||||||
|
echo "Duration: ${duration}s"
|
||||||
|
echo "Parallel: $PARALLEL"
|
||||||
|
echo ""
|
||||||
|
echo "Results:"
|
||||||
|
|
||||||
|
for version in "${GO_VERSIONS[@]}"; do
|
||||||
|
local status_file="${OUTPUT_DIR}/go-${version}.txt.status"
|
||||||
|
if [[ -f "$status_file" ]]; then
|
||||||
|
local status
|
||||||
|
status=$(cat "$status_file")
|
||||||
|
if [[ "$status" == "PASSED" ]]; then
|
||||||
|
echo " Go $version: ✓ PASSED"
|
||||||
|
else
|
||||||
|
echo " Go $version: ✗ FAILED"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo " Go $version: ? UNKNOWN (no status file)"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Summary: $passed_count/${#GO_VERSIONS[@]} versions passed"
|
||||||
|
|
||||||
|
if [[ $failed_count -gt 0 ]]; then
|
||||||
|
echo ""
|
||||||
|
echo "Failed versions details:"
|
||||||
|
for version in "${failed_versions[@]}"; do
|
||||||
|
echo ""
|
||||||
|
echo "--- Go $version (FAILED) ---"
|
||||||
|
local result_file="${OUTPUT_DIR}/go-${version}.txt"
|
||||||
|
if [[ -f "$result_file" ]]; then
|
||||||
|
tail -n 30 "$result_file"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
} > "$summary_file"
|
||||||
|
|
||||||
|
# Find lowest continuous supported version and check recent versions
|
||||||
|
local lowest_continuous_version=""
|
||||||
|
local recent_versions_failed=false
|
||||||
|
|
||||||
|
# Sort versions to ensure proper order
|
||||||
|
local sorted_versions=()
|
||||||
|
for version in "${GO_VERSIONS[@]}"; do
|
||||||
|
sorted_versions+=("$version")
|
||||||
|
done
|
||||||
|
# Sort versions numerically (1.11, 1.12, ..., 1.25)
|
||||||
|
IFS=$'\n' sorted_versions=($(sort -V <<< "${sorted_versions[*]}"))
|
||||||
|
|
||||||
|
# Find lowest continuous supported version (all versions from this point onwards pass)
|
||||||
|
for version in "${sorted_versions[@]}"; do
|
||||||
|
local status_file="${OUTPUT_DIR}/go-${version}.txt.status"
|
||||||
|
local all_subsequent_pass=true
|
||||||
|
|
||||||
|
# Check if this version and all subsequent versions pass
|
||||||
|
local found_current=false
|
||||||
|
for check_version in "${sorted_versions[@]}"; do
|
||||||
|
if [[ "$check_version" == "$version" ]]; then
|
||||||
|
found_current=true
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$found_current" == true ]]; then
|
||||||
|
local check_status_file="${OUTPUT_DIR}/go-${check_version}.txt.status"
|
||||||
|
if [[ -f "$check_status_file" ]]; then
|
||||||
|
local status
|
||||||
|
status=$(cat "$check_status_file")
|
||||||
|
if [[ "$status" != "PASSED" ]]; then
|
||||||
|
all_subsequent_pass=false
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
all_subsequent_pass=false
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ "$all_subsequent_pass" == true ]]; then
|
||||||
|
lowest_continuous_version="$version"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Check if the two most recent versions failed
|
||||||
|
local num_versions=${#sorted_versions[@]}
|
||||||
|
if [[ $num_versions -ge 2 ]]; then
|
||||||
|
local second_recent="${sorted_versions[$((num_versions-2))]}"
|
||||||
|
local most_recent="${sorted_versions[$((num_versions-1))]}"
|
||||||
|
|
||||||
|
local second_recent_status_file="${OUTPUT_DIR}/go-${second_recent}.txt.status"
|
||||||
|
local most_recent_status_file="${OUTPUT_DIR}/go-${most_recent}.txt.status"
|
||||||
|
|
||||||
|
local second_recent_failed=false
|
||||||
|
local most_recent_failed=false
|
||||||
|
|
||||||
|
if [[ -f "$second_recent_status_file" ]]; then
|
||||||
|
local status
|
||||||
|
status=$(cat "$second_recent_status_file")
|
||||||
|
if [[ "$status" != "PASSED" ]]; then
|
||||||
|
second_recent_failed=true
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
second_recent_failed=true
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -f "$most_recent_status_file" ]]; then
|
||||||
|
local status
|
||||||
|
status=$(cat "$most_recent_status_file")
|
||||||
|
if [[ "$status" != "PASSED" ]]; then
|
||||||
|
most_recent_failed=true
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
most_recent_failed=true
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$second_recent_failed" == true || "$most_recent_failed" == true ]]; then
|
||||||
|
recent_versions_failed=true
|
||||||
|
fi
|
||||||
|
elif [[ $num_versions -eq 1 ]]; then
|
||||||
|
# Only one version tested, check if it's the most recent and failed
|
||||||
|
local only_version="${sorted_versions[0]}"
|
||||||
|
local only_status_file="${OUTPUT_DIR}/go-${only_version}.txt.status"
|
||||||
|
|
||||||
|
if [[ -f "$only_status_file" ]]; then
|
||||||
|
local status
|
||||||
|
status=$(cat "$only_status_file")
|
||||||
|
if [[ "$status" != "PASSED" ]]; then
|
||||||
|
recent_versions_failed=true
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
recent_versions_failed=true
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Display summary
|
||||||
|
echo ""
|
||||||
|
log "Test completed in ${duration}s"
|
||||||
|
log "Summary report: $summary_file"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "========================================"
|
||||||
|
echo " FINAL RESULTS"
|
||||||
|
echo "========================================"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Display passed versions
|
||||||
|
if [[ ${#passed_versions[@]} -gt 0 ]]; then
|
||||||
|
log_success "PASSED (${#passed_versions[@]}/${#GO_VERSIONS[@]}):"
|
||||||
|
# Sort passed versions for display
|
||||||
|
local sorted_passed=()
|
||||||
|
for version in "${sorted_versions[@]}"; do
|
||||||
|
for passed_version in "${passed_versions[@]}"; do
|
||||||
|
if [[ "$version" == "$passed_version" ]]; then
|
||||||
|
sorted_passed+=("$version")
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
done
|
||||||
|
for version in "${sorted_passed[@]}"; do
|
||||||
|
echo -e " ${GREEN}✓${NC} Go $version"
|
||||||
|
done
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Display failed versions
|
||||||
|
if [[ ${#failed_versions[@]} -gt 0 ]]; then
|
||||||
|
log_error "FAILED (${#failed_versions[@]}/${#GO_VERSIONS[@]}):"
|
||||||
|
# Sort failed versions for display
|
||||||
|
local sorted_failed=()
|
||||||
|
for version in "${sorted_versions[@]}"; do
|
||||||
|
for failed_version in "${failed_versions[@]}"; do
|
||||||
|
if [[ "$version" == "$failed_version" ]]; then
|
||||||
|
sorted_failed+=("$version")
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
done
|
||||||
|
for version in "${sorted_failed[@]}"; do
|
||||||
|
echo -e " ${RED}✗${NC} Go $version"
|
||||||
|
done
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Show failure details
|
||||||
|
echo "========================================"
|
||||||
|
echo " FAILURE DETAILS"
|
||||||
|
echo "========================================"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
for version in "${sorted_failed[@]}"; do
|
||||||
|
echo -e "${RED}--- Go $version FAILURE LOGS (last 30 lines) ---${NC}"
|
||||||
|
local result_file="${OUTPUT_DIR}/go-${version}.txt"
|
||||||
|
if [[ -f "$result_file" ]]; then
|
||||||
|
tail -n 30 "$result_file" | sed 's/^/ /'
|
||||||
|
else
|
||||||
|
echo " No log file found: $result_file"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Display unknown versions
|
||||||
|
if [[ ${#unknown_versions[@]} -gt 0 ]]; then
|
||||||
|
log_warning "UNKNOWN (${#unknown_versions[@]}/${#GO_VERSIONS[@]}):"
|
||||||
|
for version in "${unknown_versions[@]}"; do
|
||||||
|
echo -e " ${YELLOW}?${NC} Go $version (no status file)"
|
||||||
|
done
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "========================================"
|
||||||
|
echo " COMPATIBILITY SUMMARY"
|
||||||
|
echo "========================================"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [[ -n "$lowest_continuous_version" ]]; then
|
||||||
|
log_success "Lowest continuous supported version: Go $lowest_continuous_version"
|
||||||
|
echo " (All versions from Go $lowest_continuous_version onwards pass)"
|
||||||
|
else
|
||||||
|
log_error "No continuous version support found"
|
||||||
|
echo " (No version has all subsequent versions passing)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "========================================"
|
||||||
|
echo "Full detailed logs available in: $OUTPUT_DIR"
|
||||||
|
echo "========================================"
|
||||||
|
|
||||||
|
# Determine exit code based on recent versions
|
||||||
|
if [[ "$recent_versions_failed" == true ]]; then
|
||||||
|
log_error "OVERALL RESULT: Recent Go versions failed - this needs attention!"
|
||||||
|
if [[ -n "$lowest_continuous_version" ]]; then
|
||||||
|
echo "Note: Continuous support starts from Go $lowest_continuous_version"
|
||||||
|
fi
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
log_success "OVERALL RESULT: Recent Go versions pass - compatibility looks good!"
|
||||||
|
if [[ -n "$lowest_continuous_version" ]]; then
|
||||||
|
echo "Continuous support starts from Go $lowest_continuous_version"
|
||||||
|
fi
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Trap to clean up on exit
|
||||||
|
cleanup() {
|
||||||
|
# Kill any remaining background processes
|
||||||
|
jobs -p | xargs -r kill 2>/dev/null || true
|
||||||
|
|
||||||
|
# Clean up any remaining Docker containers
|
||||||
|
docker ps -q --filter "name=go-toml-test-" | xargs -r docker stop 2>/dev/null || true
|
||||||
|
docker images -q --filter "reference=go-toml-test-*" | xargs -r docker rmi 2>/dev/null || true
|
||||||
|
}
|
||||||
|
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main
|
||||||
@@ -1,15 +1,16 @@
|
|||||||
//go:generate go run ./cmd/tomltestgen/main.go -o toml_testgen_test.go
|
//go:generate go run github.com/toml-lang/toml-test/cmd/toml-test@v1.6.0 -copy ./tests
|
||||||
|
//go:generate go run ./cmd/tomltestgen/main.go -r v1.6.0 -o toml_testgen_test.go
|
||||||
|
|
||||||
// This is a support file for toml_testgen_test.go
|
|
||||||
package toml_test
|
package toml_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2"
|
"github.com/pelletier/go-toml/v2"
|
||||||
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
"github.com/pelletier/go-toml/v2/internal/testsuite"
|
"github.com/pelletier/go-toml/v2/internal/testsuite"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func testgenInvalid(t *testing.T, input string) {
|
func testgenInvalid(t *testing.T, input string) {
|
||||||
@@ -38,21 +39,22 @@ func testgenValid(t *testing.T, input string, jsonRef string) {
|
|||||||
|
|
||||||
err := testsuite.Unmarshal([]byte(input), &doc)
|
err := testsuite.Unmarshal([]byte(input), &doc)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if de, ok := err.(*toml.DecodeError); ok {
|
de := &toml.DecodeError{}
|
||||||
|
if errors.As(err, &de) {
|
||||||
t.Logf("%s\n%s", err, de)
|
t.Logf("%s\n%s", err, de)
|
||||||
}
|
}
|
||||||
t.Fatalf("failed parsing toml: %s", err)
|
t.Fatalf("failed parsing toml: %s", err)
|
||||||
}
|
}
|
||||||
j, err := testsuite.ValueToTaggedJSON(doc)
|
j, err := testsuite.ValueToTaggedJSON(doc)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
var ref interface{}
|
var ref interface{}
|
||||||
err = json.Unmarshal([]byte(jsonRef), &ref)
|
err = json.Unmarshal([]byte(jsonRef), &ref)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
var actual interface{}
|
var actual interface{}
|
||||||
err = json.Unmarshal([]byte(j), &actual)
|
err = json.Unmarshal(j, &actual)
|
||||||
require.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
testsuite.CmpJSON(t, "", ref, actual)
|
testsuite.CmpJSON(t, "", ref, actual)
|
||||||
}
|
}
|
||||||
|
|||||||
+1634
-433
File diff suppressed because it is too large
Load Diff
@@ -6,9 +6,18 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var timeType = reflect.TypeOf(time.Time{})
|
// isZeroer is used to check if a type has a custom IsZero method.
|
||||||
var textMarshalerType = reflect.TypeOf(new(encoding.TextMarshaler)).Elem()
|
// This allows custom types to define their own zero-value semantics.
|
||||||
var textUnmarshalerType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem()
|
type isZeroer interface {
|
||||||
var mapStringInterfaceType = reflect.TypeOf(map[string]interface{}{})
|
IsZero() bool
|
||||||
var sliceInterfaceType = reflect.TypeOf([]interface{}{})
|
}
|
||||||
var stringType = reflect.TypeOf("")
|
|
||||||
|
var (
|
||||||
|
timeType = reflect.TypeOf((*time.Time)(nil)).Elem()
|
||||||
|
textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
|
||||||
|
textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
|
||||||
|
isZeroerType = reflect.TypeOf((*isZeroer)(nil)).Elem()
|
||||||
|
mapStringInterfaceType = reflect.TypeOf(map[string]interface{}(nil))
|
||||||
|
sliceInterfaceType = reflect.TypeOf([]interface{}(nil))
|
||||||
|
stringType = reflect.TypeOf("")
|
||||||
|
)
|
||||||
|
|||||||
+364
-133
@@ -5,26 +5,23 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
|
||||||
"math"
|
"math"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/tracker"
|
"github.com/pelletier/go-toml/v2/internal/tracker"
|
||||||
|
"github.com/pelletier/go-toml/v2/unstable"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Unmarshal deserializes a TOML document into a Go value.
|
// Unmarshal deserializes a TOML document into a Go value.
|
||||||
//
|
//
|
||||||
// It is a shortcut for Decoder.Decode() with the default options.
|
// It is a shortcut for Decoder.Decode() with the default options.
|
||||||
func Unmarshal(data []byte, v interface{}) error {
|
func Unmarshal(data []byte, v interface{}) error {
|
||||||
p := parser{}
|
d := decoder{}
|
||||||
p.Reset(data)
|
d.p.Reset(data)
|
||||||
d := decoder{p: &p}
|
|
||||||
|
|
||||||
return d.FromParser(v)
|
return d.FromParser(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -35,6 +32,9 @@ type Decoder struct {
|
|||||||
|
|
||||||
// global settings
|
// global settings
|
||||||
strict bool
|
strict bool
|
||||||
|
|
||||||
|
// toggles unmarshaler interface
|
||||||
|
unmarshalerInterface bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewDecoder creates a new Decoder that will read from r.
|
// NewDecoder creates a new Decoder that will read from r.
|
||||||
@@ -54,13 +54,36 @@ func (d *Decoder) DisallowUnknownFields() *Decoder {
|
|||||||
return d
|
return d
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// EnableUnmarshalerInterface allows to enable unmarshaler interface.
|
||||||
|
//
|
||||||
|
// With this feature enabled, types implementing the unstable.Unmarshaler
|
||||||
|
// interface can be decoded from any structure of the document. It allows types
|
||||||
|
// that don't have a straightforward TOML representation to provide their own
|
||||||
|
// decoding logic.
|
||||||
|
//
|
||||||
|
// The UnmarshalTOML method receives raw TOML bytes:
|
||||||
|
// - For single values: the raw value bytes (e.g., `"hello"` for a string)
|
||||||
|
// - For tables: all key-value lines belonging to that table
|
||||||
|
// - For inline tables/arrays: the raw bytes of the inline structure
|
||||||
|
//
|
||||||
|
// The unstable.RawMessage type can be used to capture raw TOML bytes for
|
||||||
|
// later processing, similar to json.RawMessage.
|
||||||
|
//
|
||||||
|
// *Unstable:* This method does not follow the compatibility guarantees of
|
||||||
|
// semver. It can be changed or removed without a new major version being
|
||||||
|
// issued.
|
||||||
|
func (d *Decoder) EnableUnmarshalerInterface() *Decoder {
|
||||||
|
d.unmarshalerInterface = true
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
// Decode the whole content of r into v.
|
// Decode the whole content of r into v.
|
||||||
//
|
//
|
||||||
// By default, values in the document that don't exist in the target Go value
|
// By default, values in the document that don't exist in the target Go value
|
||||||
// are ignored. See Decoder.DisallowUnknownFields() to change this behavior.
|
// are ignored. See Decoder.DisallowUnknownFields() to change this behavior.
|
||||||
//
|
//
|
||||||
// When a TOML local date, time, or date-time is decoded into a time.Time, its
|
// When a TOML local date, time, or date-time is decoded into a time.Time, its
|
||||||
// value is represented in time.Local timezone. Otherwise the approriate Local*
|
// value is represented in time.Local timezone. Otherwise the appropriate Local*
|
||||||
// structure is used. For time values, precision up to the nanosecond is
|
// structure is used. For time values, precision up to the nanosecond is
|
||||||
// supported by truncating extra digits.
|
// supported by truncating extra digits.
|
||||||
//
|
//
|
||||||
@@ -96,26 +119,26 @@ func (d *Decoder) DisallowUnknownFields() *Decoder {
|
|||||||
// Inline Table -> same as Table
|
// Inline Table -> same as Table
|
||||||
// Array of Tables -> same as Array and Table
|
// Array of Tables -> same as Array and Table
|
||||||
func (d *Decoder) Decode(v interface{}) error {
|
func (d *Decoder) Decode(v interface{}) error {
|
||||||
b, err := ioutil.ReadAll(d.r)
|
b, err := io.ReadAll(d.r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("toml: %w", err)
|
return fmt.Errorf("toml: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
p := parser{}
|
|
||||||
p.Reset(b)
|
|
||||||
dec := decoder{
|
dec := decoder{
|
||||||
p: &p,
|
|
||||||
strict: strict{
|
strict: strict{
|
||||||
Enabled: d.strict,
|
Enabled: d.strict,
|
||||||
|
doc: b,
|
||||||
},
|
},
|
||||||
|
unmarshalerInterface: d.unmarshalerInterface,
|
||||||
}
|
}
|
||||||
|
dec.p.Reset(b)
|
||||||
|
|
||||||
return dec.FromParser(v)
|
return dec.FromParser(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
type decoder struct {
|
type decoder struct {
|
||||||
// Which parser instance in use for this decoding session.
|
// Which parser instance in use for this decoding session.
|
||||||
p *parser
|
p unstable.Parser
|
||||||
|
|
||||||
// Flag indicating that the current expression is stashed.
|
// Flag indicating that the current expression is stashed.
|
||||||
// If set to true, calling nextExpr will not actually pull a new expression
|
// If set to true, calling nextExpr will not actually pull a new expression
|
||||||
@@ -127,6 +150,10 @@ type decoder struct {
|
|||||||
// need to be skipped.
|
// need to be skipped.
|
||||||
skipUntilTable bool
|
skipUntilTable bool
|
||||||
|
|
||||||
|
// Flag indicating that the current array/slice table should be cleared because
|
||||||
|
// it is the first encounter of an array table.
|
||||||
|
clearArrayTable bool
|
||||||
|
|
||||||
// Tracks position in Go arrays.
|
// Tracks position in Go arrays.
|
||||||
// This is used when decoding [[array tables]] into Go arrays. Given array
|
// This is used when decoding [[array tables]] into Go arrays. Given array
|
||||||
// tables are separate TOML expression, we need to keep track of where we
|
// tables are separate TOML expression, we need to keep track of where we
|
||||||
@@ -139,6 +166,9 @@ type decoder struct {
|
|||||||
// Strict mode
|
// Strict mode
|
||||||
strict strict
|
strict strict
|
||||||
|
|
||||||
|
// Flag that enables/disables unmarshaler interface.
|
||||||
|
unmarshalerInterface bool
|
||||||
|
|
||||||
// Current context for the error.
|
// Current context for the error.
|
||||||
errorContext *errorContext
|
errorContext *errorContext
|
||||||
}
|
}
|
||||||
@@ -149,15 +179,19 @@ type errorContext struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) typeMismatchError(toml string, target reflect.Type) error {
|
func (d *decoder) typeMismatchError(toml string, target reflect.Type) error {
|
||||||
|
return fmt.Errorf("toml: %s", d.typeMismatchString(toml, target))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) typeMismatchString(toml string, target reflect.Type) string {
|
||||||
if d.errorContext != nil && d.errorContext.Struct != nil {
|
if d.errorContext != nil && d.errorContext.Struct != nil {
|
||||||
ctx := d.errorContext
|
ctx := d.errorContext
|
||||||
f := ctx.Struct.FieldByIndex(ctx.Field)
|
f := ctx.Struct.FieldByIndex(ctx.Field)
|
||||||
return fmt.Errorf("toml: cannot decode TOML %s into struct field %s.%s of type %s", toml, ctx.Struct, f.Name, f.Type)
|
return fmt.Sprintf("cannot decode TOML %s into struct field %s.%s of type %s", toml, ctx.Struct, f.Name, f.Type)
|
||||||
}
|
}
|
||||||
return fmt.Errorf("toml: cannot decode TOML %s into a Go value of type %s", toml, target)
|
return fmt.Sprintf("cannot decode TOML %s into a Go value of type %s", toml, target)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) expr() *ast.Node {
|
func (d *decoder) expr() *unstable.Node {
|
||||||
return d.p.Expression()
|
return d.p.Expression()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -197,7 +231,7 @@ func (d *decoder) FromParser(v interface{}) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if r.IsNil() {
|
if r.IsNil() {
|
||||||
return fmt.Errorf("toml: decoding pointer target cannot be nil")
|
return errors.New("toml: decoding pointer target cannot be nil")
|
||||||
}
|
}
|
||||||
|
|
||||||
r = r.Elem()
|
r = r.Elem()
|
||||||
@@ -208,12 +242,12 @@ func (d *decoder) FromParser(v interface{}) error {
|
|||||||
|
|
||||||
err := d.fromParser(r)
|
err := d.fromParser(r)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return d.strict.Error(d.p.data)
|
return d.strict.Error(d.p.Data())
|
||||||
}
|
}
|
||||||
|
|
||||||
var e *decodeError
|
var e *unstable.ParserError
|
||||||
if errors.As(err, &e) {
|
if errors.As(err, &e) {
|
||||||
return wrapDecodeError(d.p.data, e)
|
return wrapDecodeError(d.p.Data(), e)
|
||||||
}
|
}
|
||||||
|
|
||||||
return err
|
return err
|
||||||
@@ -234,42 +268,44 @@ func (d *decoder) fromParser(root reflect.Value) error {
|
|||||||
Rules for the unmarshal code:
|
Rules for the unmarshal code:
|
||||||
|
|
||||||
- The stack is used to keep track of which values need to be set where.
|
- The stack is used to keep track of which values need to be set where.
|
||||||
- handle* functions <=> switch on a given ast.Kind.
|
- handle* functions <=> switch on a given unstable.Kind.
|
||||||
- unmarshalX* functions need to unmarshal a node of kind X.
|
- unmarshalX* functions need to unmarshal a node of kind X.
|
||||||
- An "object" is either a struct or a map.
|
- An "object" is either a struct or a map.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error {
|
func (d *decoder) handleRootExpression(expr *unstable.Node, v reflect.Value) error {
|
||||||
var x reflect.Value
|
var x reflect.Value
|
||||||
var err error
|
var err error
|
||||||
|
var first bool // used for to clear array tables on first use
|
||||||
|
|
||||||
if !(d.skipUntilTable && expr.Kind == ast.KeyValue) {
|
if !d.skipUntilTable || expr.Kind != unstable.KeyValue {
|
||||||
err = d.seen.CheckExpression(expr)
|
first, err = d.seen.CheckExpression(expr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch expr.Kind {
|
switch expr.Kind {
|
||||||
case ast.KeyValue:
|
case unstable.KeyValue:
|
||||||
if d.skipUntilTable {
|
if d.skipUntilTable {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
x, err = d.handleKeyValue(expr, v)
|
x, err = d.handleKeyValue(expr, v)
|
||||||
case ast.Table:
|
case unstable.Table:
|
||||||
d.skipUntilTable = false
|
d.skipUntilTable = false
|
||||||
d.strict.EnterTable(expr)
|
d.strict.EnterTable(expr)
|
||||||
x, err = d.handleTable(expr.Key(), v)
|
x, err = d.handleTable(expr.Key(), v)
|
||||||
case ast.ArrayTable:
|
case unstable.ArrayTable:
|
||||||
d.skipUntilTable = false
|
d.skipUntilTable = false
|
||||||
d.strict.EnterArrayTable(expr)
|
d.strict.EnterArrayTable(expr)
|
||||||
|
d.clearArrayTable = first
|
||||||
x, err = d.handleArrayTable(expr.Key(), v)
|
x, err = d.handleArrayTable(expr.Key(), v)
|
||||||
default:
|
default:
|
||||||
panic(fmt.Errorf("parser should not permit expression of kind %s at document root", expr.Kind))
|
panic(fmt.Errorf("parser should not permit expression of kind %s at document root", expr.Kind))
|
||||||
}
|
}
|
||||||
|
|
||||||
if d.skipUntilTable {
|
if d.skipUntilTable {
|
||||||
if expr.Kind == ast.Table || expr.Kind == ast.ArrayTable {
|
if expr.Kind == unstable.Table || expr.Kind == unstable.ArrayTable {
|
||||||
d.strict.MissingTable(expr)
|
d.strict.MissingTable(expr)
|
||||||
}
|
}
|
||||||
} else if err == nil && x.IsValid() {
|
} else if err == nil && x.IsValid() {
|
||||||
@@ -279,14 +315,14 @@ func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleArrayTable(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleArrayTable(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
if key.Next() {
|
if key.Next() {
|
||||||
return d.handleArrayTablePart(key, v)
|
return d.handleArrayTablePart(key, v)
|
||||||
}
|
}
|
||||||
return d.handleKeyValues(v)
|
return d.handleKeyValues(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleArrayTableCollectionLast(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
elem := v.Elem()
|
elem := v.Elem()
|
||||||
@@ -303,6 +339,10 @@ func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Val
|
|||||||
reflect.Copy(nelem, elem)
|
reflect.Copy(nelem, elem)
|
||||||
elem = nelem
|
elem = nelem
|
||||||
}
|
}
|
||||||
|
if d.clearArrayTable && elem.Len() > 0 {
|
||||||
|
elem.SetLen(0)
|
||||||
|
d.clearArrayTable = false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return d.handleArrayTableCollectionLast(key, elem)
|
return d.handleArrayTableCollectionLast(key, elem)
|
||||||
case reflect.Ptr:
|
case reflect.Ptr:
|
||||||
@@ -321,6 +361,10 @@ func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Val
|
|||||||
|
|
||||||
return v, nil
|
return v, nil
|
||||||
case reflect.Slice:
|
case reflect.Slice:
|
||||||
|
if d.clearArrayTable && v.Len() > 0 {
|
||||||
|
v.SetLen(0)
|
||||||
|
d.clearArrayTable = false
|
||||||
|
}
|
||||||
elemType := v.Type().Elem()
|
elemType := v.Type().Elem()
|
||||||
var elem reflect.Value
|
var elem reflect.Value
|
||||||
if elemType.Kind() == reflect.Interface {
|
if elemType.Kind() == reflect.Interface {
|
||||||
@@ -339,13 +383,13 @@ func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Val
|
|||||||
case reflect.Array:
|
case reflect.Array:
|
||||||
idx := d.arrayIndex(true, v)
|
idx := d.arrayIndex(true, v)
|
||||||
if idx >= v.Len() {
|
if idx >= v.Len() {
|
||||||
return v, fmt.Errorf("toml: cannot decode array table into %s at position %d", v.Type(), idx)
|
return v, fmt.Errorf("%w at position %d", d.typeMismatchError("array table", v.Type()), idx)
|
||||||
}
|
}
|
||||||
elem := v.Index(idx)
|
elem := v.Index(idx)
|
||||||
_, err := d.handleArrayTable(key, elem)
|
_, err := d.handleArrayTable(key, elem)
|
||||||
return v, err
|
return v, err
|
||||||
default:
|
default:
|
||||||
return reflect.Value{}, fmt.Errorf("toml: cannot decode array table into a %s", v.Type())
|
return reflect.Value{}, d.typeMismatchError("array table", v.Type())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -353,7 +397,7 @@ func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Val
|
|||||||
// evaluated like a normal key, but if it returns a collection, it also needs to
|
// evaluated like a normal key, but if it returns a collection, it also needs to
|
||||||
// point to the last element of the collection. Unless it is the last part of
|
// point to the last element of the collection. Unless it is the last part of
|
||||||
// the key, then it needs to create a new element at the end.
|
// the key, then it needs to create a new element at the end.
|
||||||
func (d *decoder) handleArrayTableCollection(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleArrayTableCollection(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
if key.IsLast() {
|
if key.IsLast() {
|
||||||
return d.handleArrayTableCollectionLast(key, v)
|
return d.handleArrayTableCollectionLast(key, v)
|
||||||
}
|
}
|
||||||
@@ -377,30 +421,54 @@ func (d *decoder) handleArrayTableCollection(key ast.Iterator, v reflect.Value)
|
|||||||
|
|
||||||
return v, nil
|
return v, nil
|
||||||
case reflect.Slice:
|
case reflect.Slice:
|
||||||
elem := v.Index(v.Len() - 1)
|
// Create a new element when the slice is empty; otherwise operate on
|
||||||
|
// the last element.
|
||||||
|
var (
|
||||||
|
elem reflect.Value
|
||||||
|
created bool
|
||||||
|
)
|
||||||
|
if v.Len() == 0 {
|
||||||
|
created = true
|
||||||
|
elemType := v.Type().Elem()
|
||||||
|
if elemType.Kind() == reflect.Interface {
|
||||||
|
elem = makeMapStringInterface()
|
||||||
|
} else {
|
||||||
|
elem = reflect.New(elemType).Elem()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
elem = v.Index(v.Len() - 1)
|
||||||
|
}
|
||||||
|
|
||||||
x, err := d.handleArrayTable(key, elem)
|
x, err := d.handleArrayTable(key, elem)
|
||||||
if err != nil || d.skipUntilTable {
|
if err != nil || d.skipUntilTable {
|
||||||
return reflect.Value{}, err
|
return reflect.Value{}, err
|
||||||
}
|
}
|
||||||
if x.IsValid() {
|
if x.IsValid() {
|
||||||
elem.Set(x)
|
if created {
|
||||||
|
elem = x
|
||||||
|
} else {
|
||||||
|
elem.Set(x)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if created {
|
||||||
|
return reflect.Append(v, elem), nil
|
||||||
|
}
|
||||||
return v, err
|
return v, err
|
||||||
case reflect.Array:
|
case reflect.Array:
|
||||||
idx := d.arrayIndex(false, v)
|
idx := d.arrayIndex(false, v)
|
||||||
if idx >= v.Len() {
|
if idx >= v.Len() {
|
||||||
return v, fmt.Errorf("toml: cannot decode array table into %s at position %d", v.Type(), idx)
|
return v, fmt.Errorf("%w at position %d", d.typeMismatchError("array table", v.Type()), idx)
|
||||||
}
|
}
|
||||||
elem := v.Index(idx)
|
elem := v.Index(idx)
|
||||||
_, err := d.handleArrayTable(key, elem)
|
_, err := d.handleArrayTable(key, elem)
|
||||||
return v, err
|
return v, err
|
||||||
|
default:
|
||||||
|
return d.handleArrayTable(key, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
return d.handleArrayTable(key, v)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handlerFn, makeFn valueMakerFn) (reflect.Value, error) {
|
func (d *decoder) handleKeyPart(key unstable.Iterator, v reflect.Value, nextFn handlerFn, makeFn valueMakerFn) (reflect.Value, error) {
|
||||||
var rv reflect.Value
|
var rv reflect.Value
|
||||||
|
|
||||||
// First, dispatch over v to make sure it is a valid object.
|
// First, dispatch over v to make sure it is a valid object.
|
||||||
@@ -417,7 +485,10 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
|
|||||||
vt := v.Type()
|
vt := v.Type()
|
||||||
|
|
||||||
// Create the key for the map element. Convert to key type.
|
// Create the key for the map element. Convert to key type.
|
||||||
mk := reflect.ValueOf(string(key.Node().Data)).Convert(vt.Key())
|
mk, err := d.keyFromData(vt.Key(), key.Node().Data)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.Value{}, err
|
||||||
|
}
|
||||||
|
|
||||||
// If the map does not exist, create it.
|
// If the map does not exist, create it.
|
||||||
if v.IsNil() {
|
if v.IsNil() {
|
||||||
@@ -428,7 +499,8 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
|
|||||||
|
|
||||||
mv := v.MapIndex(mk)
|
mv := v.MapIndex(mk)
|
||||||
set := false
|
set := false
|
||||||
if !mv.IsValid() {
|
switch {
|
||||||
|
case !mv.IsValid():
|
||||||
// If there is no value in the map, create a new one according to
|
// If there is no value in the map, create a new one according to
|
||||||
// the map type. If the element type is interface, create either a
|
// the map type. If the element type is interface, create either a
|
||||||
// map[string]interface{} or a []interface{} depending on whether
|
// map[string]interface{} or a []interface{} depending on whether
|
||||||
@@ -441,13 +513,13 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
|
|||||||
mv = reflect.New(t).Elem()
|
mv = reflect.New(t).Elem()
|
||||||
}
|
}
|
||||||
set = true
|
set = true
|
||||||
} else if mv.Kind() == reflect.Interface {
|
case mv.Kind() == reflect.Interface:
|
||||||
mv = mv.Elem()
|
mv = mv.Elem()
|
||||||
if !mv.IsValid() {
|
if !mv.IsValid() {
|
||||||
mv = makeFn()
|
mv = makeFn()
|
||||||
}
|
}
|
||||||
set = true
|
set = true
|
||||||
} else if !mv.CanAddr() {
|
case !mv.CanAddr():
|
||||||
vt := v.Type()
|
vt := v.Type()
|
||||||
t := vt.Elem()
|
t := vt.Elem()
|
||||||
oldmv := mv
|
oldmv := mv
|
||||||
@@ -518,7 +590,7 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
|
|||||||
// HandleArrayTablePart navigates the Go structure v using the key v. It is
|
// HandleArrayTablePart navigates the Go structure v using the key v. It is
|
||||||
// only used for the prefix (non-last) parts of an array-table. When
|
// only used for the prefix (non-last) parts of an array-table. When
|
||||||
// encountering a collection, it should go to the last element.
|
// encountering a collection, it should go to the last element.
|
||||||
func (d *decoder) handleArrayTablePart(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleArrayTablePart(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
var makeFn valueMakerFn
|
var makeFn valueMakerFn
|
||||||
if key.IsLast() {
|
if key.IsLast() {
|
||||||
makeFn = makeSliceInterface
|
makeFn = makeSliceInterface
|
||||||
@@ -530,20 +602,30 @@ func (d *decoder) handleArrayTablePart(key ast.Iterator, v reflect.Value) (refle
|
|||||||
|
|
||||||
// HandleTable returns a reference when it has checked the next expression but
|
// HandleTable returns a reference when it has checked the next expression but
|
||||||
// cannot handle it.
|
// cannot handle it.
|
||||||
func (d *decoder) handleTable(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleTable(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
if v.Kind() == reflect.Slice {
|
if v.Kind() == reflect.Slice {
|
||||||
if v.Len() == 0 {
|
// For non-empty slices, work with the last element
|
||||||
return reflect.Value{}, newDecodeError(key.Node().Data, "cannot store a table in a slice")
|
if v.Len() > 0 {
|
||||||
|
elem := v.Index(v.Len() - 1)
|
||||||
|
x, err := d.handleTable(key, elem)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.Value{}, err
|
||||||
|
}
|
||||||
|
if x.IsValid() {
|
||||||
|
elem.Set(x)
|
||||||
|
}
|
||||||
|
return reflect.Value{}, nil
|
||||||
}
|
}
|
||||||
elem := v.Index(v.Len() - 1)
|
// Empty slice - check if it implements Unmarshaler (e.g., RawMessage)
|
||||||
x, err := d.handleTable(key, elem)
|
// and we're at the end of the key path
|
||||||
if err != nil {
|
if d.unmarshalerInterface && !key.Next() {
|
||||||
return reflect.Value{}, err
|
if v.CanAddr() && v.Addr().CanInterface() {
|
||||||
|
if outi, ok := v.Addr().Interface().(unstable.Unmarshaler); ok {
|
||||||
|
return d.handleKeyValuesUnmarshaler(outi)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if x.IsValid() {
|
return reflect.Value{}, unstable.NewParserError(key.Node().Data, "cannot store a table in a slice")
|
||||||
elem.Set(x)
|
|
||||||
}
|
|
||||||
return reflect.Value{}, nil
|
|
||||||
}
|
}
|
||||||
if key.Next() {
|
if key.Next() {
|
||||||
// Still scoping the key
|
// Still scoping the key
|
||||||
@@ -557,10 +639,28 @@ func (d *decoder) handleTable(key ast.Iterator, v reflect.Value) (reflect.Value,
|
|||||||
// Handle root expressions until the end of the document or the next
|
// Handle root expressions until the end of the document or the next
|
||||||
// non-key-value.
|
// non-key-value.
|
||||||
func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) {
|
||||||
|
// Check if target implements Unmarshaler before processing key-values.
|
||||||
|
// This allows types to handle entire tables themselves.
|
||||||
|
if d.unmarshalerInterface {
|
||||||
|
vv := v
|
||||||
|
for vv.Kind() == reflect.Ptr {
|
||||||
|
if vv.IsNil() {
|
||||||
|
vv.Set(reflect.New(vv.Type().Elem()))
|
||||||
|
}
|
||||||
|
vv = vv.Elem()
|
||||||
|
}
|
||||||
|
if vv.CanAddr() && vv.Addr().CanInterface() {
|
||||||
|
if outi, ok := vv.Addr().Interface().(unstable.Unmarshaler); ok {
|
||||||
|
// Collect all key-value expressions for this table
|
||||||
|
return d.handleKeyValuesUnmarshaler(outi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var rv reflect.Value
|
var rv reflect.Value
|
||||||
for d.nextExpr() {
|
for d.nextExpr() {
|
||||||
expr := d.expr()
|
expr := d.expr()
|
||||||
if expr.Kind != ast.KeyValue {
|
if expr.Kind != unstable.KeyValue {
|
||||||
// Stash the expression so that fromParser can just loop and use
|
// Stash the expression so that fromParser can just loop and use
|
||||||
// the right handler.
|
// the right handler.
|
||||||
// We could just recurse ourselves here, but at least this gives a
|
// We could just recurse ourselves here, but at least this gives a
|
||||||
@@ -569,7 +669,7 @@ func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
err := d.seen.CheckExpression(expr)
|
_, err := d.seen.CheckExpression(expr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return reflect.Value{}, err
|
return reflect.Value{}, err
|
||||||
}
|
}
|
||||||
@@ -586,8 +686,43 @@ func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) {
|
|||||||
return rv, nil
|
return rv, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// handleKeyValuesUnmarshaler collects all key-value expressions for a table
|
||||||
|
// and passes them to the Unmarshaler as raw TOML bytes.
|
||||||
|
func (d *decoder) handleKeyValuesUnmarshaler(u unstable.Unmarshaler) (reflect.Value, error) {
|
||||||
|
// Collect raw bytes from all key-value expressions for this table.
|
||||||
|
// We use the Raw field on each KeyValue expression to preserve the
|
||||||
|
// original formatting (whitespace, quoting style, etc.) from the document.
|
||||||
|
var buf []byte
|
||||||
|
|
||||||
|
for d.nextExpr() {
|
||||||
|
expr := d.expr()
|
||||||
|
if expr.Kind != unstable.KeyValue {
|
||||||
|
d.stashExpr()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := d.seen.CheckExpression(expr)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.Value{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the raw bytes from the original document to preserve formatting
|
||||||
|
if expr.Raw.Length > 0 {
|
||||||
|
raw := d.p.Raw(expr.Raw)
|
||||||
|
buf = append(buf, raw...)
|
||||||
|
}
|
||||||
|
buf = append(buf, '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := u.UnmarshalTOML(buf); err != nil {
|
||||||
|
return reflect.Value{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return reflect.Value{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
type (
|
type (
|
||||||
handlerFn func(key ast.Iterator, v reflect.Value) (reflect.Value, error)
|
handlerFn func(key unstable.Iterator, v reflect.Value) (reflect.Value, error)
|
||||||
valueMakerFn func() reflect.Value
|
valueMakerFn func() reflect.Value
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -599,11 +734,11 @@ func makeSliceInterface() reflect.Value {
|
|||||||
return reflect.MakeSlice(sliceInterfaceType, 0, 16)
|
return reflect.MakeSlice(sliceInterfaceType, 0, 16)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleTablePart(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleTablePart(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||||
return d.handleKeyPart(key, v, d.handleTable, makeMapStringInterface)
|
return d.handleKeyPart(key, v, d.handleTable, makeMapStringInterface)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) tryTextUnmarshaler(node *ast.Node, v reflect.Value) (bool, error) {
|
func (d *decoder) tryTextUnmarshaler(node *unstable.Node, v reflect.Value) (bool, error) {
|
||||||
// Special case for time, because we allow to unmarshal to it from
|
// Special case for time, because we allow to unmarshal to it from
|
||||||
// different kind of AST nodes.
|
// different kind of AST nodes.
|
||||||
if v.Type() == timeType {
|
if v.Type() == timeType {
|
||||||
@@ -613,7 +748,7 @@ func (d *decoder) tryTextUnmarshaler(node *ast.Node, v reflect.Value) (bool, err
|
|||||||
if v.CanAddr() && v.Addr().Type().Implements(textUnmarshalerType) {
|
if v.CanAddr() && v.Addr().Type().Implements(textUnmarshalerType) {
|
||||||
err := v.Addr().Interface().(encoding.TextUnmarshaler).UnmarshalText(node.Data)
|
err := v.Addr().Interface().(encoding.TextUnmarshaler).UnmarshalText(node.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, newDecodeError(d.p.Raw(node.Raw), "%w", err)
|
return false, unstable.NewParserError(d.p.Raw(node.Raw), "%w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
@@ -622,43 +757,58 @@ func (d *decoder) tryTextUnmarshaler(node *ast.Node, v reflect.Value) (bool, err
|
|||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleValue(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) handleValue(value *unstable.Node, v reflect.Value) error {
|
||||||
for v.Kind() == reflect.Ptr {
|
for v.Kind() == reflect.Ptr {
|
||||||
v = initAndDereferencePointer(v)
|
v = initAndDereferencePointer(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
ok, err := d.tryTextUnmarshaler(value, v)
|
if d.unmarshalerInterface {
|
||||||
if ok || err != nil {
|
if v.CanAddr() && v.Addr().CanInterface() {
|
||||||
return err
|
if outi, ok := v.Addr().Interface().(unstable.Unmarshaler); ok {
|
||||||
|
// Pass raw bytes from the original document
|
||||||
|
return outi.UnmarshalTOML(d.p.Raw(value.Raw))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only try TextUnmarshaler for scalar types. For Array and InlineTable,
|
||||||
|
// fall through to struct/map unmarshaling to allow flexible unmarshaling
|
||||||
|
// where a type can implement UnmarshalText for string values but still
|
||||||
|
// be populated field-by-field from a table. See issue #974.
|
||||||
|
if value.Kind != unstable.Array && value.Kind != unstable.InlineTable {
|
||||||
|
ok, err := d.tryTextUnmarshaler(value, v)
|
||||||
|
if ok || err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch value.Kind {
|
switch value.Kind {
|
||||||
case ast.String:
|
case unstable.String:
|
||||||
return d.unmarshalString(value, v)
|
return d.unmarshalString(value, v)
|
||||||
case ast.Integer:
|
case unstable.Integer:
|
||||||
return d.unmarshalInteger(value, v)
|
return d.unmarshalInteger(value, v)
|
||||||
case ast.Float:
|
case unstable.Float:
|
||||||
return d.unmarshalFloat(value, v)
|
return d.unmarshalFloat(value, v)
|
||||||
case ast.Bool:
|
case unstable.Bool:
|
||||||
return d.unmarshalBool(value, v)
|
return d.unmarshalBool(value, v)
|
||||||
case ast.DateTime:
|
case unstable.DateTime:
|
||||||
return d.unmarshalDateTime(value, v)
|
return d.unmarshalDateTime(value, v)
|
||||||
case ast.LocalDate:
|
case unstable.LocalDate:
|
||||||
return d.unmarshalLocalDate(value, v)
|
return d.unmarshalLocalDate(value, v)
|
||||||
case ast.LocalTime:
|
case unstable.LocalTime:
|
||||||
return d.unmarshalLocalTime(value, v)
|
return d.unmarshalLocalTime(value, v)
|
||||||
case ast.LocalDateTime:
|
case unstable.LocalDateTime:
|
||||||
return d.unmarshalLocalDateTime(value, v)
|
return d.unmarshalLocalDateTime(value, v)
|
||||||
case ast.InlineTable:
|
case unstable.InlineTable:
|
||||||
return d.unmarshalInlineTable(value, v)
|
return d.unmarshalInlineTable(value, v)
|
||||||
case ast.Array:
|
case unstable.Array:
|
||||||
return d.unmarshalArray(value, v)
|
return d.unmarshalArray(value, v)
|
||||||
default:
|
default:
|
||||||
panic(fmt.Errorf("handleValue not implemented for %s", value.Kind))
|
panic(fmt.Errorf("handleValue not implemented for %s", value.Kind))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalArray(array *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalArray(array *unstable.Node, v reflect.Value) error {
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
case reflect.Slice:
|
case reflect.Slice:
|
||||||
if v.IsNil() {
|
if v.IsNil() {
|
||||||
@@ -729,7 +879,7 @@ func (d *decoder) unmarshalArray(array *ast.Node, v reflect.Value) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalInlineTable(itable *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalInlineTable(itable *unstable.Node, v reflect.Value) error {
|
||||||
// Make sure v is an initialized object.
|
// Make sure v is an initialized object.
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
case reflect.Map:
|
case reflect.Map:
|
||||||
@@ -746,7 +896,7 @@ func (d *decoder) unmarshalInlineTable(itable *ast.Node, v reflect.Value) error
|
|||||||
}
|
}
|
||||||
return d.unmarshalInlineTable(itable, elem)
|
return d.unmarshalInlineTable(itable, elem)
|
||||||
default:
|
default:
|
||||||
return newDecodeError(itable.Data, "cannot store inline table in Go type %s", v.Kind())
|
return unstable.NewParserError(d.p.Raw(itable.Raw), "cannot store inline table in Go type %s", v.Kind())
|
||||||
}
|
}
|
||||||
|
|
||||||
it := itable.Children()
|
it := itable.Children()
|
||||||
@@ -765,70 +915,75 @@ func (d *decoder) unmarshalInlineTable(itable *ast.Node, v reflect.Value) error
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalDateTime(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalDateTime(value *unstable.Node, v reflect.Value) error {
|
||||||
dt, err := parseDateTime(value.Data)
|
dt, err := parseDateTime(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if v.Kind() != reflect.Interface && v.Type() != timeType {
|
||||||
|
return unstable.NewParserError(d.p.Raw(value.Raw), "%s", d.typeMismatchString("datetime", v.Type()))
|
||||||
|
}
|
||||||
v.Set(reflect.ValueOf(dt))
|
v.Set(reflect.ValueOf(dt))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalLocalDate(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalLocalDate(value *unstable.Node, v reflect.Value) error {
|
||||||
ld, err := parseLocalDate(value.Data)
|
ld, err := parseLocalDate(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if v.Kind() != reflect.Interface && v.Type() != timeType {
|
||||||
|
return unstable.NewParserError(d.p.Raw(value.Raw), "%s", d.typeMismatchString("local date", v.Type()))
|
||||||
|
}
|
||||||
if v.Type() == timeType {
|
if v.Type() == timeType {
|
||||||
cast := ld.AsTime(time.Local)
|
v.Set(reflect.ValueOf(ld.AsTime(time.Local)))
|
||||||
v.Set(reflect.ValueOf(cast))
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
v.Set(reflect.ValueOf(ld))
|
v.Set(reflect.ValueOf(ld))
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalLocalTime(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalLocalTime(value *unstable.Node, v reflect.Value) error {
|
||||||
lt, rest, err := parseLocalTime(value.Data)
|
lt, rest, err := parseLocalTime(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(rest) > 0 {
|
if len(rest) > 0 {
|
||||||
return newDecodeError(rest, "extra characters at the end of a local time")
|
return unstable.NewParserError(rest, "extra characters at the end of a local time")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if v.Kind() != reflect.Interface {
|
||||||
|
return unstable.NewParserError(d.p.Raw(value.Raw), "%s", d.typeMismatchString("local time", v.Type()))
|
||||||
|
}
|
||||||
v.Set(reflect.ValueOf(lt))
|
v.Set(reflect.ValueOf(lt))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalLocalDateTime(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalLocalDateTime(value *unstable.Node, v reflect.Value) error {
|
||||||
ldt, rest, err := parseLocalDateTime(value.Data)
|
ldt, rest, err := parseLocalDateTime(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(rest) > 0 {
|
if len(rest) > 0 {
|
||||||
return newDecodeError(rest, "extra characters at the end of a local date time")
|
return unstable.NewParserError(rest, "extra characters at the end of a local date time")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if v.Kind() != reflect.Interface && v.Type() != timeType {
|
||||||
|
return unstable.NewParserError(d.p.Raw(value.Raw), "%s", d.typeMismatchString("local datetime", v.Type()))
|
||||||
|
}
|
||||||
if v.Type() == timeType {
|
if v.Type() == timeType {
|
||||||
cast := ldt.AsTime(time.Local)
|
v.Set(reflect.ValueOf(ldt.AsTime(time.Local)))
|
||||||
|
|
||||||
v.Set(reflect.ValueOf(cast))
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
v.Set(reflect.ValueOf(ldt))
|
v.Set(reflect.ValueOf(ldt))
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalBool(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalBool(value *unstable.Node, v reflect.Value) error {
|
||||||
b := value.Data[0] == 't'
|
b := value.Data[0] == 't'
|
||||||
|
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
@@ -837,13 +992,13 @@ func (d *decoder) unmarshalBool(value *ast.Node, v reflect.Value) error {
|
|||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
v.Set(reflect.ValueOf(b))
|
v.Set(reflect.ValueOf(b))
|
||||||
default:
|
default:
|
||||||
return newDecodeError(value.Data, "cannot assign boolean to a %t", b)
|
return unstable.NewParserError(value.Data, "cannot assign boolean to a %t", b)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalFloat(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalFloat(value *unstable.Node, v reflect.Value) error {
|
||||||
f, err := parseFloat(value.Data)
|
f, err := parseFloat(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -854,13 +1009,13 @@ func (d *decoder) unmarshalFloat(value *ast.Node, v reflect.Value) error {
|
|||||||
v.SetFloat(f)
|
v.SetFloat(f)
|
||||||
case reflect.Float32:
|
case reflect.Float32:
|
||||||
if f > math.MaxFloat32 {
|
if f > math.MaxFloat32 {
|
||||||
return newDecodeError(value.Data, "number %f does not fit in a float32", f)
|
return unstable.NewParserError(value.Data, "number %f does not fit in a float32", f)
|
||||||
}
|
}
|
||||||
v.SetFloat(f)
|
v.SetFloat(f)
|
||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
v.Set(reflect.ValueOf(f))
|
v.Set(reflect.ValueOf(f))
|
||||||
default:
|
default:
|
||||||
return newDecodeError(value.Data, "float cannot be assigned to %s", v.Kind())
|
return unstable.NewParserError(value.Data, "float cannot be assigned to %s", v.Kind())
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@@ -879,14 +1034,20 @@ const (
|
|||||||
// compile time, so it is computed during initialization.
|
// compile time, so it is computed during initialization.
|
||||||
var maxUint int64 = math.MaxInt64
|
var maxUint int64 = math.MaxInt64
|
||||||
|
|
||||||
func init() {
|
func init() { //nolint:gochecknoinits
|
||||||
m := uint64(^uint(0))
|
m := uint64(^uint(0))
|
||||||
|
// #nosec G115
|
||||||
if m < uint64(maxUint) {
|
if m < uint64(maxUint) {
|
||||||
maxUint = int64(m)
|
maxUint = int64(m)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalInteger(value *unstable.Node, v reflect.Value) error {
|
||||||
|
kind := v.Kind()
|
||||||
|
if kind == reflect.Float32 || kind == reflect.Float64 {
|
||||||
|
return d.unmarshalFloat(value, v)
|
||||||
|
}
|
||||||
|
|
||||||
i, err := parseInteger(value.Data)
|
i, err := parseInteger(value.Data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -894,7 +1055,7 @@ func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
|||||||
|
|
||||||
var r reflect.Value
|
var r reflect.Value
|
||||||
|
|
||||||
switch v.Kind() {
|
switch kind {
|
||||||
case reflect.Int64:
|
case reflect.Int64:
|
||||||
v.SetInt(i)
|
v.SetInt(i)
|
||||||
return nil
|
return nil
|
||||||
@@ -955,7 +1116,7 @@ func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
|||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
r = reflect.ValueOf(i)
|
r = reflect.ValueOf(i)
|
||||||
default:
|
default:
|
||||||
return d.typeMismatchError("integer", v.Type())
|
return unstable.NewParserError(d.p.Raw(value.Raw), "%s", d.typeMismatchString("integer", v.Type()))
|
||||||
}
|
}
|
||||||
|
|
||||||
if !r.Type().AssignableTo(v.Type()) {
|
if !r.Type().AssignableTo(v.Type()) {
|
||||||
@@ -967,20 +1128,20 @@ func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) unmarshalString(value *ast.Node, v reflect.Value) error {
|
func (d *decoder) unmarshalString(value *unstable.Node, v reflect.Value) error {
|
||||||
switch v.Kind() {
|
switch v.Kind() {
|
||||||
case reflect.String:
|
case reflect.String:
|
||||||
v.SetString(string(value.Data))
|
v.SetString(string(value.Data))
|
||||||
case reflect.Interface:
|
case reflect.Interface:
|
||||||
v.Set(reflect.ValueOf(string(value.Data)))
|
v.Set(reflect.ValueOf(string(value.Data)))
|
||||||
default:
|
default:
|
||||||
return newDecodeError(d.p.Raw(value.Raw), "cannot store TOML string into a Go %s", v.Kind())
|
return unstable.NewParserError(d.p.Raw(value.Raw), "%s", d.typeMismatchString("string", v.Type()))
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleKeyValue(expr *ast.Node, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleKeyValue(expr *unstable.Node, v reflect.Value) (reflect.Value, error) {
|
||||||
d.strict.EnterKeyValue(expr)
|
d.strict.EnterKeyValue(expr)
|
||||||
|
|
||||||
v, err := d.handleKeyValueInner(expr.Key(), expr.Value(), v)
|
v, err := d.handleKeyValueInner(expr.Key(), expr.Value(), v)
|
||||||
@@ -994,7 +1155,7 @@ func (d *decoder) handleKeyValue(expr *ast.Node, v reflect.Value) (reflect.Value
|
|||||||
return v, err
|
return v, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleKeyValueInner(key ast.Iterator, value *ast.Node, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) handleKeyValueInner(key unstable.Iterator, value *unstable.Node, v reflect.Value) (reflect.Value, error) {
|
||||||
if key.Next() {
|
if key.Next() {
|
||||||
// Still scoping the key
|
// Still scoping the key
|
||||||
return d.handleKeyValuePart(key, value, v)
|
return d.handleKeyValuePart(key, value, v)
|
||||||
@@ -1004,7 +1165,63 @@ func (d *decoder) handleKeyValueInner(key ast.Iterator, value *ast.Node, v refle
|
|||||||
return reflect.Value{}, d.handleValue(value, v)
|
return reflect.Value{}, d.handleValue(value, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflect.Value) (reflect.Value, error) {
|
func (d *decoder) keyFromData(keyType reflect.Type, data []byte) (reflect.Value, error) {
|
||||||
|
switch {
|
||||||
|
case stringType.AssignableTo(keyType):
|
||||||
|
return reflect.ValueOf(string(data)), nil
|
||||||
|
|
||||||
|
case stringType.ConvertibleTo(keyType):
|
||||||
|
return reflect.ValueOf(string(data)).Convert(keyType), nil
|
||||||
|
|
||||||
|
case keyType.Implements(textUnmarshalerType):
|
||||||
|
mk := reflect.New(keyType.Elem())
|
||||||
|
if err := mk.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil {
|
||||||
|
return reflect.Value{}, fmt.Errorf("toml: error unmarshalling key type %s from text: %w", stringType, err)
|
||||||
|
}
|
||||||
|
return mk, nil
|
||||||
|
|
||||||
|
case reflect.PointerTo(keyType).Implements(textUnmarshalerType):
|
||||||
|
mk := reflect.New(keyType)
|
||||||
|
if err := mk.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil {
|
||||||
|
return reflect.Value{}, fmt.Errorf("toml: error unmarshalling key type %s from text: %w", stringType, err)
|
||||||
|
}
|
||||||
|
return mk.Elem(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
switch keyType.Kind() {
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
key, err := strconv.ParseInt(string(data), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.Value{}, fmt.Errorf("toml: error parsing key of type %s from integer: %w", stringType, err)
|
||||||
|
}
|
||||||
|
return reflect.ValueOf(key).Convert(keyType), nil
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||||
|
key, err := strconv.ParseUint(string(data), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.Value{}, fmt.Errorf("toml: error parsing key of type %s from unsigned integer: %w", stringType, err)
|
||||||
|
}
|
||||||
|
return reflect.ValueOf(key).Convert(keyType), nil
|
||||||
|
|
||||||
|
case reflect.Float32:
|
||||||
|
key, err := strconv.ParseFloat(string(data), 32)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.Value{}, fmt.Errorf("toml: error parsing key of type %s from float: %w", stringType, err)
|
||||||
|
}
|
||||||
|
return reflect.ValueOf(float32(key)), nil
|
||||||
|
|
||||||
|
case reflect.Float64:
|
||||||
|
key, err := strconv.ParseFloat(string(data), 64)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.Value{}, fmt.Errorf("toml: error parsing key of type %s from float: %w", stringType, err)
|
||||||
|
}
|
||||||
|
return reflect.ValueOf(float64(key)), nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
return reflect.Value{}, fmt.Errorf("toml: cannot convert map key of type %s to expected type %s", stringType, keyType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) handleKeyValuePart(key unstable.Iterator, value *unstable.Node, v reflect.Value) (reflect.Value, error) {
|
||||||
// contains the replacement for v
|
// contains the replacement for v
|
||||||
var rv reflect.Value
|
var rv reflect.Value
|
||||||
|
|
||||||
@@ -1014,16 +1231,9 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
|
|||||||
case reflect.Map:
|
case reflect.Map:
|
||||||
vt := v.Type()
|
vt := v.Type()
|
||||||
|
|
||||||
mk := reflect.ValueOf(string(key.Node().Data))
|
mk, err := d.keyFromData(vt.Key(), key.Node().Data)
|
||||||
mkt := stringType
|
if err != nil {
|
||||||
|
return reflect.Value{}, err
|
||||||
keyType := vt.Key()
|
|
||||||
if !mkt.AssignableTo(keyType) {
|
|
||||||
if !mkt.ConvertibleTo(keyType) {
|
|
||||||
return reflect.Value{}, fmt.Errorf("toml: cannot convert map key of type %s to expected type %s", mkt, keyType)
|
|
||||||
}
|
|
||||||
|
|
||||||
mk = mk.Convert(keyType)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the map does not exist, create it.
|
// If the map does not exist, create it.
|
||||||
@@ -1034,15 +1244,9 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
|
|||||||
|
|
||||||
mv := v.MapIndex(mk)
|
mv := v.MapIndex(mk)
|
||||||
set := false
|
set := false
|
||||||
if !mv.IsValid() {
|
if !mv.IsValid() || key.IsLast() {
|
||||||
set = true
|
set = true
|
||||||
mv = reflect.New(v.Type().Elem()).Elem()
|
mv = reflect.New(v.Type().Elem()).Elem()
|
||||||
} else {
|
|
||||||
if key.IsLast() {
|
|
||||||
var x interface{}
|
|
||||||
mv = reflect.ValueOf(&x).Elem()
|
|
||||||
set = true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
nv, err := d.handleKeyValueInner(key, value, mv)
|
nv, err := d.handleKeyValueInner(key, value, mv)
|
||||||
@@ -1060,6 +1264,18 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
|
|||||||
case reflect.Struct:
|
case reflect.Struct:
|
||||||
path, found := structFieldPath(v, string(key.Node().Data))
|
path, found := structFieldPath(v, string(key.Node().Data))
|
||||||
if !found {
|
if !found {
|
||||||
|
// If no matching struct field is found but the target implements the
|
||||||
|
// unstable.Unmarshaler interface (and it is enabled), delegate the
|
||||||
|
// decoding of this value to the custom unmarshaler.
|
||||||
|
if d.unmarshalerInterface {
|
||||||
|
if v.CanAddr() && v.Addr().CanInterface() {
|
||||||
|
if outi, ok := v.Addr().Interface().(unstable.Unmarshaler); ok {
|
||||||
|
// Pass raw bytes from the original document
|
||||||
|
return reflect.Value{}, outi.UnmarshalTOML(d.p.Raw(value.Raw))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Otherwise, keep previous behavior and skip until the next table.
|
||||||
d.skipUntilTable = true
|
d.skipUntilTable = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@@ -1072,6 +1288,19 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
|
|||||||
d.errorContext.Field = path
|
d.errorContext.Field = path
|
||||||
|
|
||||||
f := fieldByIndex(v, path)
|
f := fieldByIndex(v, path)
|
||||||
|
|
||||||
|
if !f.CanAddr() {
|
||||||
|
// If the field is not addressable, need to take a slower path and
|
||||||
|
// make a copy of the struct itself to a new location.
|
||||||
|
nvp := reflect.New(v.Type())
|
||||||
|
nvp.Elem().Set(v)
|
||||||
|
v = nvp.Elem()
|
||||||
|
_, err := d.handleKeyValuePart(key, value, v)
|
||||||
|
if err != nil {
|
||||||
|
return reflect.Value{}, err
|
||||||
|
}
|
||||||
|
return nvp.Elem(), nil
|
||||||
|
}
|
||||||
x, err := d.handleKeyValueInner(key, value, f)
|
x, err := d.handleKeyValueInner(key, value, f)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return reflect.Value{}, err
|
return reflect.Value{}, err
|
||||||
@@ -1137,10 +1366,10 @@ func initAndDereferencePointer(v reflect.Value) reflect.Value {
|
|||||||
|
|
||||||
// Same as reflect.Value.FieldByIndex, but creates pointers if needed.
|
// Same as reflect.Value.FieldByIndex, but creates pointers if needed.
|
||||||
func fieldByIndex(v reflect.Value, path []int) reflect.Value {
|
func fieldByIndex(v reflect.Value, path []int) reflect.Value {
|
||||||
for i, x := range path {
|
for _, x := range path {
|
||||||
v = v.Field(x)
|
v = v.Field(x)
|
||||||
|
|
||||||
if i < len(path)-1 && v.Kind() == reflect.Pointer {
|
if v.Kind() == reflect.Ptr {
|
||||||
if v.IsNil() {
|
if v.IsNil() {
|
||||||
v.Set(reflect.New(v.Type().Elem()))
|
v.Set(reflect.New(v.Type().Elem()))
|
||||||
}
|
}
|
||||||
@@ -1152,13 +1381,13 @@ func fieldByIndex(v reflect.Value, path []int) reflect.Value {
|
|||||||
|
|
||||||
type fieldPathsMap = map[string][]int
|
type fieldPathsMap = map[string][]int
|
||||||
|
|
||||||
var globalFieldPathsCache atomic.Value // map[danger.TypeID]fieldPathsMap
|
var globalFieldPathsCache atomic.Value // map[reflect.Type]fieldPathsMap
|
||||||
|
|
||||||
func structFieldPath(v reflect.Value, name string) ([]int, bool) {
|
func structFieldPath(v reflect.Value, name string) ([]int, bool) {
|
||||||
t := v.Type()
|
t := v.Type()
|
||||||
|
|
||||||
cache, _ := globalFieldPathsCache.Load().(map[danger.TypeID]fieldPathsMap)
|
cache, _ := globalFieldPathsCache.Load().(map[reflect.Type]fieldPathsMap)
|
||||||
fieldPaths, ok := cache[danger.MakeTypeID(t)]
|
fieldPaths, ok := cache[t]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
fieldPaths = map[string][]int{}
|
fieldPaths = map[string][]int{}
|
||||||
@@ -1169,8 +1398,8 @@ func structFieldPath(v reflect.Value, name string) ([]int, bool) {
|
|||||||
fieldPaths[strings.ToLower(name)] = path
|
fieldPaths[strings.ToLower(name)] = path
|
||||||
})
|
})
|
||||||
|
|
||||||
newCache := make(map[danger.TypeID]fieldPathsMap, len(cache)+1)
|
newCache := make(map[reflect.Type]fieldPathsMap, len(cache)+1)
|
||||||
newCache[danger.MakeTypeID(t)] = fieldPaths
|
newCache[t] = fieldPaths
|
||||||
for k, v := range cache {
|
for k, v := range cache {
|
||||||
newCache[k] = v
|
newCache[k] = v
|
||||||
}
|
}
|
||||||
@@ -1194,7 +1423,9 @@ func forEachField(t reflect.Type, path []int, do func(name string, path []int))
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
fieldPath := append(path, i)
|
fieldPath := make([]int, 0, len(path)+1)
|
||||||
|
fieldPath = append(fieldPath, path...)
|
||||||
|
fieldPath = append(fieldPath, i)
|
||||||
fieldPath = fieldPath[:len(fieldPath):len(fieldPath)]
|
fieldPath = fieldPath[:len(fieldPath):len(fieldPath)]
|
||||||
|
|
||||||
name := f.Tag.Get("toml")
|
name := f.Tag.Get("toml")
|
||||||
@@ -1208,7 +1439,7 @@ func forEachField(t reflect.Type, path []int, do func(name string, path []int))
|
|||||||
|
|
||||||
if f.Anonymous && name == "" {
|
if f.Anonymous && name == "" {
|
||||||
t2 := f.Type
|
t2 := f.Type
|
||||||
if t2.Kind() == reflect.Pointer {
|
if t2.Kind() == reflect.Ptr {
|
||||||
t2 = t2.Elem()
|
t2 = t2.Elem()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
+1579
-106
File diff suppressed because it is too large
Load Diff
+149
@@ -0,0 +1,149 @@
|
|||||||
|
package unstable
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Iterator over a sequence of nodes.
|
||||||
|
//
|
||||||
|
// Starts uninitialized, you need to call Next() first.
|
||||||
|
//
|
||||||
|
// For example:
|
||||||
|
//
|
||||||
|
// it := n.Children()
|
||||||
|
// for it.Next() {
|
||||||
|
// n := it.Node()
|
||||||
|
// // do something with n
|
||||||
|
// }
|
||||||
|
type Iterator struct {
|
||||||
|
nodes *[]Node
|
||||||
|
idx int32
|
||||||
|
started bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next moves the iterator forward and returns true if points to a
|
||||||
|
// node, false otherwise.
|
||||||
|
func (c *Iterator) Next() bool {
|
||||||
|
if c.nodes == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
nodes := *c.nodes
|
||||||
|
if !c.started {
|
||||||
|
c.started = true
|
||||||
|
} else {
|
||||||
|
idx := c.idx
|
||||||
|
if idx >= 0 && int(idx) < len(nodes) {
|
||||||
|
c.idx = nodes[idx].next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return c.idx >= 0 && int(c.idx) < len(nodes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsLast returns true if the current node of the iterator is the last
|
||||||
|
// one. Subsequent calls to Next() will return false.
|
||||||
|
func (c *Iterator) IsLast() bool {
|
||||||
|
return c.nodes == nil || c.idx < 0 || (*c.nodes)[c.idx].next < 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Node returns a pointer to the node pointed at by the iterator.
|
||||||
|
func (c *Iterator) Node() *Node {
|
||||||
|
if c.nodes == nil || c.idx < 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
n := &(*c.nodes)[c.idx]
|
||||||
|
n.nodes = c.nodes
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
// Node in a TOML expression AST.
|
||||||
|
//
|
||||||
|
// Depending on Kind, its sequence of children should be interpreted
|
||||||
|
// differently.
|
||||||
|
//
|
||||||
|
// - Array have one child per element in the array.
|
||||||
|
// - InlineTable have one child per key-value in the table (each of kind
|
||||||
|
// InlineTable).
|
||||||
|
// - KeyValue have at least two children. The first one is the value. The rest
|
||||||
|
// make a potentially dotted key.
|
||||||
|
// - Table and ArrayTable's children represent a dotted key (same as
|
||||||
|
// KeyValue, but without the first node being the value).
|
||||||
|
//
|
||||||
|
// When relevant, Raw describes the range of bytes this node is referring to in
|
||||||
|
// the input document. Use Parser.Raw() to retrieve the actual bytes.
|
||||||
|
type Node struct {
|
||||||
|
Kind Kind
|
||||||
|
Raw Range // Raw bytes from the input.
|
||||||
|
Data []byte // Node value (either allocated or referencing the input).
|
||||||
|
|
||||||
|
// Absolute indices into the backing nodes slice. -1 means none.
|
||||||
|
next int32
|
||||||
|
child int32
|
||||||
|
|
||||||
|
// Reference to the backing nodes slice for navigation.
|
||||||
|
nodes *[]Node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Range of bytes in the document.
|
||||||
|
type Range struct {
|
||||||
|
Offset uint32
|
||||||
|
Length uint32
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next returns a pointer to the next node, or nil if there is no next node.
|
||||||
|
func (n *Node) Next() *Node {
|
||||||
|
if n.next < 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
next := &(*n.nodes)[n.next]
|
||||||
|
next.nodes = n.nodes
|
||||||
|
return next
|
||||||
|
}
|
||||||
|
|
||||||
|
// Child returns a pointer to the first child node of this node. Other children
|
||||||
|
// can be accessed calling Next on the first child. Returns nil if this Node
|
||||||
|
// has no child.
|
||||||
|
func (n *Node) Child() *Node {
|
||||||
|
if n.child < 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
child := &(*n.nodes)[n.child]
|
||||||
|
child.nodes = n.nodes
|
||||||
|
return child
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valid returns true if the node's kind is set (not to Invalid).
|
||||||
|
func (n *Node) Valid() bool {
|
||||||
|
return n != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Key returns the children nodes making the Key on a supported node. Panics
|
||||||
|
// otherwise. They are guaranteed to be all be of the Kind Key. A simple key
|
||||||
|
// would return just one element.
|
||||||
|
func (n *Node) Key() Iterator {
|
||||||
|
switch n.Kind {
|
||||||
|
case KeyValue:
|
||||||
|
child := n.child
|
||||||
|
if child < 0 {
|
||||||
|
panic(errors.New("KeyValue should have at least two children"))
|
||||||
|
}
|
||||||
|
valueNode := &(*n.nodes)[child]
|
||||||
|
return Iterator{nodes: n.nodes, idx: valueNode.next}
|
||||||
|
case Table, ArrayTable:
|
||||||
|
return Iterator{nodes: n.nodes, idx: n.child}
|
||||||
|
default:
|
||||||
|
panic(fmt.Errorf("Key() is not supported on a %s", n.Kind))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Value returns a pointer to the value node of a KeyValue.
|
||||||
|
// Guaranteed to be non-nil. Panics if not called on a KeyValue node,
|
||||||
|
// or if the Children are malformed.
|
||||||
|
func (n *Node) Value() *Node {
|
||||||
|
return n.Child()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Children returns an iterator over a node's children.
|
||||||
|
func (n *Node) Children() Iterator {
|
||||||
|
return Iterator{nodes: n.nodes, idx: n.child}
|
||||||
|
}
|
||||||
@@ -1,16 +1,18 @@
|
|||||||
package toml
|
package unstable
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
var valid10Ascii = []byte("1234567890")
|
var (
|
||||||
var valid10Utf8 = []byte("日本語a")
|
valid10ASCII = []byte("1234567890")
|
||||||
var valid1kUtf8 = bytes.Repeat([]byte("0123456789日本語日本語日本語日abcdefghijklmnopqrstuvwx"), 16)
|
valid10Utf8 = []byte("日本語a")
|
||||||
var valid1MUtf8 = bytes.Repeat(valid1kUtf8, 1024)
|
valid1kUtf8 = bytes.Repeat([]byte("0123456789日本語日本語日本語日abcdefghijklmnopqrstuvwx"), 16)
|
||||||
var valid1kAscii = bytes.Repeat([]byte("012345678998jhjklasDJKLAAdjdfjsdklfjdslkabcdefghijklmnopqrstuvwx"), 16)
|
valid1MUtf8 = bytes.Repeat(valid1kUtf8, 1024)
|
||||||
var valid1MAscii = bytes.Repeat(valid1kAscii, 1024)
|
valid1kASCII = bytes.Repeat([]byte("012345678998jhjklasDJKLAAdjdfjsdklfjdslkabcdefghijklmnopqrstuvwx"), 16)
|
||||||
|
valid1MASCII = bytes.Repeat(valid1kASCII, 1024)
|
||||||
|
)
|
||||||
|
|
||||||
func BenchmarkScanComments(b *testing.B) {
|
func BenchmarkScanComments(b *testing.B) {
|
||||||
wrap := func(x []byte) []byte {
|
wrap := func(x []byte) []byte {
|
||||||
@@ -18,9 +20,9 @@ func BenchmarkScanComments(b *testing.B) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
inputs := map[string][]byte{
|
inputs := map[string][]byte{
|
||||||
"10Valid": wrap(valid10Ascii),
|
"10Valid": wrap(valid10ASCII),
|
||||||
"1kValid": wrap(valid1kAscii),
|
"1kValid": wrap(valid1kASCII),
|
||||||
"1MValid": wrap(valid1MAscii),
|
"1MValid": wrap(valid1MASCII),
|
||||||
"10ValidUtf8": wrap(valid10Utf8),
|
"10ValidUtf8": wrap(valid10Utf8),
|
||||||
"1kValidUtf8": wrap(valid1kUtf8),
|
"1kValidUtf8": wrap(valid1kUtf8),
|
||||||
"1MValidUtf8": wrap(valid1MUtf8),
|
"1MValidUtf8": wrap(valid1MUtf8),
|
||||||
@@ -33,7 +35,7 @@ func BenchmarkScanComments(b *testing.B) {
|
|||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
|
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
scanComment(input)
|
_, _, _ = scanComment(input)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -45,9 +47,9 @@ func BenchmarkParseLiteralStringValid(b *testing.B) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
inputs := map[string][]byte{
|
inputs := map[string][]byte{
|
||||||
"10Valid": wrap(valid10Ascii),
|
"10Valid": wrap(valid10ASCII),
|
||||||
"1kValid": wrap(valid1kAscii),
|
"1kValid": wrap(valid1kASCII),
|
||||||
"1MValid": wrap(valid1MAscii),
|
"1MValid": wrap(valid1MASCII),
|
||||||
"10ValidUtf8": wrap(valid10Utf8),
|
"10ValidUtf8": wrap(valid10Utf8),
|
||||||
"1kValidUtf8": wrap(valid1kUtf8),
|
"1kValidUtf8": wrap(valid1kUtf8),
|
||||||
"1MValidUtf8": wrap(valid1MUtf8),
|
"1MValidUtf8": wrap(valid1MUtf8),
|
||||||
@@ -55,7 +57,7 @@ func BenchmarkParseLiteralStringValid(b *testing.B) {
|
|||||||
|
|
||||||
for name, input := range inputs {
|
for name, input := range inputs {
|
||||||
b.Run(name, func(b *testing.B) {
|
b.Run(name, func(b *testing.B) {
|
||||||
p := parser{}
|
p := Parser{}
|
||||||
b.SetBytes(int64(len(input)))
|
b.SetBytes(int64(len(input)))
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
@@ -63,7 +65,7 @@ func BenchmarkParseLiteralStringValid(b *testing.B) {
|
|||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
_, _, _, err := p.parseLiteralString(input)
|
_, _, _, err := p.parseLiteralString(input)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
b.Error(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -0,0 +1,64 @@
|
|||||||
|
package unstable
|
||||||
|
|
||||||
|
// root contains a full AST.
|
||||||
|
//
|
||||||
|
// It is immutable once constructed with Builder.
|
||||||
|
type root struct {
|
||||||
|
nodes []Node
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *root) at(idx reference) *Node {
|
||||||
|
return &r.nodes[idx]
|
||||||
|
}
|
||||||
|
|
||||||
|
type reference int
|
||||||
|
|
||||||
|
const invalidReference reference = -1
|
||||||
|
|
||||||
|
func (r reference) Valid() bool {
|
||||||
|
return r != invalidReference
|
||||||
|
}
|
||||||
|
|
||||||
|
type builder struct {
|
||||||
|
tree root
|
||||||
|
lastIdx int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) NodeAt(ref reference) *Node {
|
||||||
|
n := b.tree.at(ref)
|
||||||
|
n.nodes = &b.tree.nodes
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) Reset() {
|
||||||
|
b.tree.nodes = b.tree.nodes[:0]
|
||||||
|
b.lastIdx = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) Push(n Node) reference {
|
||||||
|
b.lastIdx = len(b.tree.nodes)
|
||||||
|
n.next = -1
|
||||||
|
n.child = -1
|
||||||
|
b.tree.nodes = append(b.tree.nodes, n)
|
||||||
|
return reference(b.lastIdx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) PushAndChain(n Node) reference {
|
||||||
|
newIdx := len(b.tree.nodes)
|
||||||
|
n.next = -1
|
||||||
|
n.child = -1
|
||||||
|
b.tree.nodes = append(b.tree.nodes, n)
|
||||||
|
if b.lastIdx >= 0 {
|
||||||
|
b.tree.nodes[b.lastIdx].next = int32(newIdx) //nolint:gosec // TOML ASTs are small
|
||||||
|
}
|
||||||
|
b.lastIdx = newIdx
|
||||||
|
return reference(b.lastIdx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) AttachChild(parent reference, child reference) {
|
||||||
|
b.tree.nodes[parent].child = int32(child) //nolint:gosec // TOML ASTs are small
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *builder) Chain(from reference, to reference) {
|
||||||
|
b.tree.nodes[from].next = int32(to) //nolint:gosec // TOML ASTs are small
|
||||||
|
}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
// Package unstable provides APIs that do not meet the backward compatibility
|
||||||
|
// guarantees yet.
|
||||||
|
package unstable
|
||||||
@@ -1,35 +1,49 @@
|
|||||||
package ast
|
package unstable
|
||||||
|
|
||||||
import "fmt"
|
import "fmt"
|
||||||
|
|
||||||
|
// Kind represents the type of TOML structure contained in a given Node.
|
||||||
type Kind int
|
type Kind int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// meta
|
// Invalid represents an invalid meta node.
|
||||||
Invalid Kind = iota
|
Invalid Kind = iota
|
||||||
|
// Comment represents a comment meta node.
|
||||||
Comment
|
Comment
|
||||||
|
// Key represents a key meta node.
|
||||||
Key
|
Key
|
||||||
|
|
||||||
// top level structures
|
// Table represents a top-level table.
|
||||||
Table
|
Table
|
||||||
|
// ArrayTable represents a top-level array table.
|
||||||
ArrayTable
|
ArrayTable
|
||||||
|
// KeyValue represents a top-level key value.
|
||||||
KeyValue
|
KeyValue
|
||||||
|
|
||||||
// containers values
|
// Array represents an array container value.
|
||||||
Array
|
Array
|
||||||
|
// InlineTable represents an inline table container value.
|
||||||
InlineTable
|
InlineTable
|
||||||
|
|
||||||
// values
|
// String represents a string value.
|
||||||
String
|
String
|
||||||
|
// Bool represents a boolean value.
|
||||||
Bool
|
Bool
|
||||||
|
// Float represents a floating point value.
|
||||||
Float
|
Float
|
||||||
|
// Integer represents an integer value.
|
||||||
Integer
|
Integer
|
||||||
|
// LocalDate represents a a local date value.
|
||||||
LocalDate
|
LocalDate
|
||||||
|
// LocalTime represents a local time value.
|
||||||
LocalTime
|
LocalTime
|
||||||
|
// LocalDateTime represents a local date/time value.
|
||||||
LocalDateTime
|
LocalDateTime
|
||||||
|
// DateTime represents a data/time value.
|
||||||
DateTime
|
DateTime
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// String implementation of fmt.Stringer.
|
||||||
func (k Kind) String() string {
|
func (k Kind) String() string {
|
||||||
switch k {
|
switch k {
|
||||||
case Invalid:
|
case Invalid:
|
||||||
+338
-147
@@ -1,50 +1,133 @@
|
|||||||
package toml
|
package unstable
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
|
||||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
"github.com/pelletier/go-toml/v2/internal/characters"
|
||||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type parser struct {
|
// ParserError describes an error relative to the content of the document.
|
||||||
builder ast.Builder
|
//
|
||||||
ref ast.Reference
|
// It cannot outlive the instance of Parser it refers to, and may cause panics
|
||||||
data []byte
|
// if the parser is reset.
|
||||||
left []byte
|
type ParserError struct {
|
||||||
err error
|
Highlight []byte
|
||||||
first bool
|
Message string
|
||||||
|
Key []string // optional
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) Range(b []byte) ast.Range {
|
// Error is the implementation of the error interface.
|
||||||
return ast.Range{
|
func (e *ParserError) Error() string {
|
||||||
Offset: uint32(danger.SubsliceOffset(p.data, b)),
|
return e.Message
|
||||||
Length: uint32(len(b)),
|
}
|
||||||
|
|
||||||
|
// NewParserError is a convenience function to create a ParserError
|
||||||
|
//
|
||||||
|
// Warning: Highlight needs to be a subslice of Parser.data, so only slices
|
||||||
|
// returned by Parser.Raw are valid candidates.
|
||||||
|
func NewParserError(highlight []byte, format string, args ...interface{}) error {
|
||||||
|
return &ParserError{
|
||||||
|
Highlight: highlight,
|
||||||
|
Message: fmt.Errorf(format, args...).Error(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) Raw(raw ast.Range) []byte {
|
// Parser scans over a TOML-encoded document and generates an iterative AST.
|
||||||
|
//
|
||||||
|
// To prime the Parser, first reset it with the contents of a TOML document.
|
||||||
|
// Then, process all top-level expressions sequentially. See Example.
|
||||||
|
//
|
||||||
|
// Don't forget to check Error() after you're done parsing.
|
||||||
|
//
|
||||||
|
// Each top-level expression needs to be fully processed before calling
|
||||||
|
// NextExpression() again. Otherwise, calls to various Node methods may panic if
|
||||||
|
// the parser has moved on the next expression.
|
||||||
|
//
|
||||||
|
// For performance reasons, go-toml doesn't make a copy of the input bytes to
|
||||||
|
// the parser. Make sure to copy all the bytes you need to outlive the slice
|
||||||
|
// given to the parser.
|
||||||
|
type Parser struct {
|
||||||
|
data []byte
|
||||||
|
builder builder
|
||||||
|
ref reference
|
||||||
|
left []byte
|
||||||
|
err error
|
||||||
|
first bool
|
||||||
|
|
||||||
|
KeepComments bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// Data returns the slice provided to the last call to Reset.
|
||||||
|
func (p *Parser) Data() []byte {
|
||||||
|
return p.data
|
||||||
|
}
|
||||||
|
|
||||||
|
// Range returns a range description that corresponds to a given slice of the
|
||||||
|
// input. If the argument is not a subslice of the parser input, this function
|
||||||
|
// panics.
|
||||||
|
func (p *Parser) Range(b []byte) Range {
|
||||||
|
return Range{
|
||||||
|
Offset: uint32(subsliceOffset(p.data, b)), //nolint:gosec // TOML documents are small
|
||||||
|
Length: uint32(len(b)), //nolint:gosec // TOML documents are small
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// rangeOfToken computes the Range of a token given the remaining bytes after the token.
|
||||||
|
// This is used when the token was extracted from the beginning of some position,
|
||||||
|
// and 'rest' is what remains after the token.
|
||||||
|
func (p *Parser) rangeOfToken(token, rest []byte) Range {
|
||||||
|
offset := len(p.data) - len(token) - len(rest)
|
||||||
|
return Range{Offset: uint32(offset), Length: uint32(len(token))} //nolint:gosec // TOML documents are small
|
||||||
|
}
|
||||||
|
|
||||||
|
// subsliceOffset returns the byte offset of subslice b within data.
|
||||||
|
// b must share the same backing array as data (any subslice of data).
|
||||||
|
func subsliceOffset(data, b []byte) int {
|
||||||
|
if len(b) == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
dataPtr := reflect.ValueOf(data).Pointer()
|
||||||
|
bPtr := reflect.ValueOf(b).Pointer()
|
||||||
|
|
||||||
|
offset := int(bPtr - dataPtr)
|
||||||
|
if offset < 0 || offset > len(data) {
|
||||||
|
panic("subslice is not within data")
|
||||||
|
}
|
||||||
|
return offset
|
||||||
|
}
|
||||||
|
|
||||||
|
// Raw returns the slice corresponding to the bytes in the given range.
|
||||||
|
func (p *Parser) Raw(raw Range) []byte {
|
||||||
return p.data[raw.Offset : raw.Offset+raw.Length]
|
return p.data[raw.Offset : raw.Offset+raw.Length]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) Reset(b []byte) {
|
// Reset brings the parser to its initial state for a given input. It wipes an
|
||||||
|
// reuses internal storage to reduce allocation.
|
||||||
|
func (p *Parser) Reset(b []byte) {
|
||||||
p.builder.Reset()
|
p.builder.Reset()
|
||||||
p.ref = ast.InvalidReference
|
p.ref = invalidReference
|
||||||
p.data = b
|
p.data = b
|
||||||
p.left = b
|
p.left = b
|
||||||
p.err = nil
|
p.err = nil
|
||||||
p.first = true
|
p.first = true
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop
|
// NextExpression parses the next top-level expression. If an expression was
|
||||||
func (p *parser) NextExpression() bool {
|
// successfully parsed, it returns true. If the parser is at the end of the
|
||||||
|
// document or an error occurred, it returns false.
|
||||||
|
//
|
||||||
|
// Retrieve the parsed expression with Expression().
|
||||||
|
func (p *Parser) NextExpression() bool {
|
||||||
if len(p.left) == 0 || p.err != nil {
|
if len(p.left) == 0 || p.err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
p.builder.Reset()
|
p.builder.Reset()
|
||||||
p.ref = ast.InvalidReference
|
p.ref = invalidReference
|
||||||
|
|
||||||
for {
|
for {
|
||||||
if len(p.left) == 0 || p.err != nil {
|
if len(p.left) == 0 || p.err != nil {
|
||||||
@@ -73,15 +156,54 @@ func (p *parser) NextExpression() bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) Expression() *ast.Node {
|
// Expression returns a pointer to the node representing the last successfully
|
||||||
|
// parsed expression.
|
||||||
|
func (p *Parser) Expression() *Node {
|
||||||
return p.builder.NodeAt(p.ref)
|
return p.builder.NodeAt(p.ref)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) Error() error {
|
// Error returns any error that has occurred during parsing.
|
||||||
|
func (p *Parser) Error() error {
|
||||||
return p.err
|
return p.err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseNewline(b []byte) ([]byte, error) {
|
// Position describes a position in the input.
|
||||||
|
type Position struct {
|
||||||
|
// Number of bytes from the beginning of the input.
|
||||||
|
Offset int
|
||||||
|
// Line number, starting at 1.
|
||||||
|
Line int
|
||||||
|
// Column number, starting at 1.
|
||||||
|
Column int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Shape describes the position of a range in the input.
|
||||||
|
type Shape struct {
|
||||||
|
Start Position
|
||||||
|
End Position
|
||||||
|
}
|
||||||
|
|
||||||
|
// Shape returns the shape of the given range in the input. Will
|
||||||
|
// panic if the range is not a subslice of the input.
|
||||||
|
func (p *Parser) Shape(r Range) Shape {
|
||||||
|
return Shape{
|
||||||
|
Start: p.positionAt(int(r.Offset)),
|
||||||
|
End: p.positionAt(int(r.Offset + r.Length)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// positionAt returns the position at the given byte offset in the document.
|
||||||
|
func (p *Parser) positionAt(offset int) Position {
|
||||||
|
lead := p.data[:offset]
|
||||||
|
|
||||||
|
return Position{
|
||||||
|
Offset: offset,
|
||||||
|
Line: bytes.Count(lead, []byte{'\n'}) + 1,
|
||||||
|
Column: len(lead) - bytes.LastIndex(lead, []byte{'\n'}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseNewline(b []byte) ([]byte, error) {
|
||||||
if b[0] == '\n' {
|
if b[0] == '\n' {
|
||||||
return b[1:], nil
|
return b[1:], nil
|
||||||
}
|
}
|
||||||
@@ -91,14 +213,27 @@ func (p *parser) parseNewline(b []byte) ([]byte, error) {
|
|||||||
return rest, err
|
return rest, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, newDecodeError(b[0:1], "expected newline but got %#U", b[0])
|
return nil, NewParserError(b[0:1], "expected newline but got %#U", b[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseExpression(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseComment(b []byte) (reference, []byte, error) {
|
||||||
|
ref := invalidReference
|
||||||
|
data, rest, err := scanComment(b)
|
||||||
|
if p.KeepComments && err == nil {
|
||||||
|
ref = p.builder.Push(Node{
|
||||||
|
Kind: Comment,
|
||||||
|
Raw: p.rangeOfToken(data, rest),
|
||||||
|
Data: data,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return ref, rest, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseExpression(b []byte) (reference, []byte, error) {
|
||||||
// expression = ws [ comment ]
|
// expression = ws [ comment ]
|
||||||
// expression =/ ws keyval ws [ comment ]
|
// expression =/ ws keyval ws [ comment ]
|
||||||
// expression =/ ws table ws [ comment ]
|
// expression =/ ws table ws [ comment ]
|
||||||
ref := ast.InvalidReference
|
ref := invalidReference
|
||||||
|
|
||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
|
|
||||||
@@ -107,7 +242,7 @@ func (p *parser) parseExpression(b []byte) (ast.Reference, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if b[0] == '#' {
|
if b[0] == '#' {
|
||||||
_, rest, err := scanComment(b)
|
ref, rest, err := p.parseComment(b)
|
||||||
return ref, rest, err
|
return ref, rest, err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -129,14 +264,17 @@ func (p *parser) parseExpression(b []byte) (ast.Reference, []byte, error) {
|
|||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
|
|
||||||
if len(b) > 0 && b[0] == '#' {
|
if len(b) > 0 && b[0] == '#' {
|
||||||
_, rest, err := scanComment(b)
|
cref, rest, err := p.parseComment(b)
|
||||||
|
if cref != invalidReference {
|
||||||
|
p.builder.Chain(ref, cref)
|
||||||
|
}
|
||||||
return ref, rest, err
|
return ref, rest, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return ref, b, nil
|
return ref, b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseTable(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseTable(b []byte) (reference, []byte, error) {
|
||||||
// table = std-table / array-table
|
// table = std-table / array-table
|
||||||
if len(b) > 1 && b[1] == '[' {
|
if len(b) > 1 && b[1] == '[' {
|
||||||
return p.parseArrayTable(b)
|
return p.parseArrayTable(b)
|
||||||
@@ -145,12 +283,12 @@ func (p *parser) parseTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return p.parseStdTable(b)
|
return p.parseStdTable(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseArrayTable(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseArrayTable(b []byte) (reference, []byte, error) {
|
||||||
// array-table = array-table-open key array-table-close
|
// array-table = array-table-open key array-table-close
|
||||||
// array-table-open = %x5B.5B ws ; [[ Double left square bracket
|
// array-table-open = %x5B.5B ws ; [[ Double left square bracket
|
||||||
// array-table-close = ws %x5D.5D ; ]] Double right square bracket
|
// array-table-close = ws %x5D.5D ; ]] Double right square bracket
|
||||||
ref := p.builder.Push(ast.Node{
|
ref := p.builder.Push(Node{
|
||||||
Kind: ast.ArrayTable,
|
Kind: ArrayTable,
|
||||||
})
|
})
|
||||||
|
|
||||||
b = b[2:]
|
b = b[2:]
|
||||||
@@ -174,12 +312,12 @@ func (p *parser) parseArrayTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return ref, b, err
|
return ref, b, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseStdTable(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseStdTable(b []byte) (reference, []byte, error) {
|
||||||
// std-table = std-table-open key std-table-close
|
// std-table = std-table-open key std-table-close
|
||||||
// std-table-open = %x5B ws ; [ Left square bracket
|
// std-table-open = %x5B ws ; [ Left square bracket
|
||||||
// std-table-close = ws %x5D ; ] Right square bracket
|
// std-table-close = ws %x5D ; ] Right square bracket
|
||||||
ref := p.builder.Push(ast.Node{
|
ref := p.builder.Push(Node{
|
||||||
Kind: ast.Table,
|
Kind: Table,
|
||||||
})
|
})
|
||||||
|
|
||||||
b = b[1:]
|
b = b[1:]
|
||||||
@@ -199,15 +337,18 @@ func (p *parser) parseStdTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return ref, b, err
|
return ref, b, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseKeyval(b []byte) (reference, []byte, error) {
|
||||||
// keyval = key keyval-sep val
|
// keyval = key keyval-sep val
|
||||||
ref := p.builder.Push(ast.Node{
|
// Track the start position for Raw range
|
||||||
Kind: ast.KeyValue,
|
startB := b
|
||||||
|
|
||||||
|
ref := p.builder.Push(Node{
|
||||||
|
Kind: KeyValue,
|
||||||
})
|
})
|
||||||
|
|
||||||
key, b, err := p.parseKey(b)
|
key, b, err := p.parseKey(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ast.InvalidReference, nil, err
|
return invalidReference, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// keyval-sep = ws %x3D ws ; =
|
// keyval-sep = ws %x3D ws ; =
|
||||||
@@ -215,12 +356,12 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
|
|||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
|
|
||||||
if len(b) == 0 {
|
if len(b) == 0 {
|
||||||
return ast.InvalidReference, nil, newDecodeError(b, "expected = after a key, but the document ends there")
|
return invalidReference, nil, NewParserError(startB[:len(startB)-len(b)], "expected = after a key, but the document ends there")
|
||||||
}
|
}
|
||||||
|
|
||||||
b, err = expect('=', b)
|
b, err = expect('=', b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ast.InvalidReference, nil, err
|
return invalidReference, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
@@ -233,16 +374,21 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
|
|||||||
p.builder.Chain(valRef, key)
|
p.builder.Chain(valRef, key)
|
||||||
p.builder.AttachChild(ref, valRef)
|
p.builder.AttachChild(ref, valRef)
|
||||||
|
|
||||||
|
// Set Raw to span the entire key-value expression.
|
||||||
|
// Access the node directly in the slice to avoid the write barrier
|
||||||
|
// that NodeAt's nodes-pointer setup would trigger.
|
||||||
|
p.builder.tree.nodes[ref].Raw = p.rangeOfToken(startB[:len(startB)-len(b)], b)
|
||||||
|
|
||||||
return ref, b, err
|
return ref, b, err
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop,funlen
|
//nolint:cyclop,funlen
|
||||||
func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseVal(b []byte) (reference, []byte, error) {
|
||||||
// val = string / boolean / array / inline-table / date-time / float / integer
|
// val = string / boolean / array / inline-table / date-time / float / integer
|
||||||
ref := ast.InvalidReference
|
ref := invalidReference
|
||||||
|
|
||||||
if len(b) == 0 {
|
if len(b) == 0 {
|
||||||
return ref, nil, newDecodeError(b, "expected value, not eof")
|
return ref, nil, NewParserError(b, "expected value, not eof")
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
@@ -259,9 +405,9 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
ref = p.builder.Push(ast.Node{
|
ref = p.builder.Push(Node{
|
||||||
Kind: ast.String,
|
Kind: String,
|
||||||
Raw: p.Range(raw),
|
Raw: p.rangeOfToken(raw, b),
|
||||||
Data: v,
|
Data: v,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -277,9 +423,9 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
ref = p.builder.Push(ast.Node{
|
ref = p.builder.Push(Node{
|
||||||
Kind: ast.String,
|
Kind: String,
|
||||||
Raw: p.Range(raw),
|
Raw: p.rangeOfToken(raw, b),
|
||||||
Data: v,
|
Data: v,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -287,22 +433,22 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return ref, b, err
|
return ref, b, err
|
||||||
case 't':
|
case 't':
|
||||||
if !scanFollowsTrue(b) {
|
if !scanFollowsTrue(b) {
|
||||||
return ref, nil, newDecodeError(atmost(b, 4), "expected 'true'")
|
return ref, nil, NewParserError(atmost(b, 4), "expected 'true'")
|
||||||
}
|
}
|
||||||
|
|
||||||
ref = p.builder.Push(ast.Node{
|
ref = p.builder.Push(Node{
|
||||||
Kind: ast.Bool,
|
Kind: Bool,
|
||||||
Data: b[:4],
|
Data: b[:4],
|
||||||
})
|
})
|
||||||
|
|
||||||
return ref, b[4:], nil
|
return ref, b[4:], nil
|
||||||
case 'f':
|
case 'f':
|
||||||
if !scanFollowsFalse(b) {
|
if !scanFollowsFalse(b) {
|
||||||
return ref, nil, newDecodeError(atmost(b, 5), "expected 'false'")
|
return ref, nil, NewParserError(atmost(b, 5), "expected 'false'")
|
||||||
}
|
}
|
||||||
|
|
||||||
ref = p.builder.Push(ast.Node{
|
ref = p.builder.Push(Node{
|
||||||
Kind: ast.Bool,
|
Kind: Bool,
|
||||||
Data: b[:5],
|
Data: b[:5],
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -324,7 +470,7 @@ func atmost(b []byte, n int) []byte {
|
|||||||
return b[:n]
|
return b[:n]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
func (p *Parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
||||||
v, rest, err := scanLiteralString(b)
|
v, rest, err := scanLiteralString(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
@@ -333,19 +479,20 @@ func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
|||||||
return v, v[1 : len(v)-1], rest, nil
|
return v, v[1 : len(v)-1], rest, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseInlineTable(b []byte) (reference, []byte, error) {
|
||||||
// inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close
|
// inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close
|
||||||
// inline-table-open = %x7B ws ; {
|
// inline-table-open = %x7B ws ; {
|
||||||
// inline-table-close = ws %x7D ; }
|
// inline-table-close = ws %x7D ; }
|
||||||
// inline-table-sep = ws %x2C ws ; , Comma
|
// inline-table-sep = ws %x2C ws ; , Comma
|
||||||
// inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ]
|
// inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ]
|
||||||
parent := p.builder.Push(ast.Node{
|
parent := p.builder.Push(Node{
|
||||||
Kind: ast.InlineTable,
|
Kind: InlineTable,
|
||||||
|
Raw: p.rangeOfToken(b[:1], b[1:]),
|
||||||
})
|
})
|
||||||
|
|
||||||
first := true
|
first := true
|
||||||
|
|
||||||
var child ast.Reference
|
var child reference
|
||||||
|
|
||||||
b = b[1:]
|
b = b[1:]
|
||||||
|
|
||||||
@@ -356,7 +503,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
|
|
||||||
if len(b) == 0 {
|
if len(b) == 0 {
|
||||||
return parent, nil, newDecodeError(previousB[:1], "inline table is incomplete")
|
return parent, nil, NewParserError(previousB[:1], "inline table is incomplete")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[0] == '}' {
|
if b[0] == '}' {
|
||||||
@@ -371,7 +518,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
var kv ast.Reference
|
var kv reference
|
||||||
|
|
||||||
kv, b, err = p.parseKeyval(b)
|
kv, b, err = p.parseKeyval(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -394,7 +541,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint:funlen,cyclop
|
//nolint:funlen,cyclop
|
||||||
func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseValArray(b []byte) (reference, []byte, error) {
|
||||||
// array = array-open [ array-values ] ws-comment-newline array-close
|
// array = array-open [ array-values ] ws-comment-newline array-close
|
||||||
// array-open = %x5B ; [
|
// array-open = %x5B ; [
|
||||||
// array-close = %x5D ; ]
|
// array-close = %x5D ; ]
|
||||||
@@ -405,23 +552,39 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||||||
arrayStart := b
|
arrayStart := b
|
||||||
b = b[1:]
|
b = b[1:]
|
||||||
|
|
||||||
parent := p.builder.Push(ast.Node{
|
parent := p.builder.Push(Node{
|
||||||
Kind: ast.Array,
|
Kind: Array,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// First indicates whether the parser is looking for the first element
|
||||||
|
// (non-comment) of the array.
|
||||||
first := true
|
first := true
|
||||||
|
|
||||||
var lastChild ast.Reference
|
lastChild := invalidReference
|
||||||
|
|
||||||
|
addChild := func(valueRef reference) {
|
||||||
|
if lastChild == invalidReference {
|
||||||
|
p.builder.AttachChild(parent, valueRef)
|
||||||
|
} else {
|
||||||
|
p.builder.Chain(lastChild, valueRef)
|
||||||
|
}
|
||||||
|
lastChild = valueRef
|
||||||
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
for len(b) > 0 {
|
for len(b) > 0 {
|
||||||
b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
var cref reference
|
||||||
|
cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return parent, nil, err
|
return parent, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if cref != invalidReference {
|
||||||
|
addChild(cref)
|
||||||
|
}
|
||||||
|
|
||||||
if len(b) == 0 {
|
if len(b) == 0 {
|
||||||
return parent, nil, newDecodeError(arrayStart[:1], "array is incomplete")
|
return parent, nil, NewParserError(arrayStart[:1], "array is incomplete")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[0] == ']' {
|
if b[0] == ']' {
|
||||||
@@ -430,16 +593,19 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||||||
|
|
||||||
if b[0] == ',' {
|
if b[0] == ',' {
|
||||||
if first {
|
if first {
|
||||||
return parent, nil, newDecodeError(b[0:1], "array cannot start with comma")
|
return parent, nil, NewParserError(b[0:1], "array cannot start with comma")
|
||||||
}
|
}
|
||||||
b = b[1:]
|
b = b[1:]
|
||||||
|
|
||||||
b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return parent, nil, err
|
return parent, nil, err
|
||||||
}
|
}
|
||||||
|
if cref != invalidReference {
|
||||||
|
addChild(cref)
|
||||||
|
}
|
||||||
} else if !first {
|
} else if !first {
|
||||||
return parent, nil, newDecodeError(b[0:1], "array elements must be separated by commas")
|
return parent, nil, NewParserError(b[0:1], "array elements must be separated by commas")
|
||||||
}
|
}
|
||||||
|
|
||||||
// TOML allows trailing commas in arrays.
|
// TOML allows trailing commas in arrays.
|
||||||
@@ -447,23 +613,22 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
var valueRef ast.Reference
|
var valueRef reference
|
||||||
valueRef, b, err = p.parseVal(b)
|
valueRef, b, err = p.parseVal(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return parent, nil, err
|
return parent, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if first {
|
addChild(valueRef)
|
||||||
p.builder.AttachChild(parent, valueRef)
|
|
||||||
} else {
|
|
||||||
p.builder.Chain(lastChild, valueRef)
|
|
||||||
}
|
|
||||||
lastChild = valueRef
|
|
||||||
|
|
||||||
b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return parent, nil, err
|
return parent, nil, err
|
||||||
}
|
}
|
||||||
|
if cref != invalidReference {
|
||||||
|
addChild(cref)
|
||||||
|
}
|
||||||
|
|
||||||
first = false
|
first = false
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -472,15 +637,35 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return parent, rest, err
|
return parent, rest, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error) {
|
func (p *Parser) parseOptionalWhitespaceCommentNewline(b []byte) (reference, []byte, error) {
|
||||||
|
rootCommentRef := invalidReference
|
||||||
|
latestCommentRef := invalidReference
|
||||||
|
|
||||||
|
addComment := func(ref reference) {
|
||||||
|
switch {
|
||||||
|
case rootCommentRef == invalidReference:
|
||||||
|
rootCommentRef = ref
|
||||||
|
case latestCommentRef == invalidReference:
|
||||||
|
p.builder.AttachChild(rootCommentRef, ref)
|
||||||
|
latestCommentRef = ref
|
||||||
|
default:
|
||||||
|
p.builder.Chain(latestCommentRef, ref)
|
||||||
|
latestCommentRef = ref
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for len(b) > 0 {
|
for len(b) > 0 {
|
||||||
var err error
|
var err error
|
||||||
b = p.parseWhitespace(b)
|
b = p.parseWhitespace(b)
|
||||||
|
|
||||||
if len(b) > 0 && b[0] == '#' {
|
if len(b) > 0 && b[0] == '#' {
|
||||||
_, b, err = scanComment(b)
|
var ref reference
|
||||||
|
ref, b, err = p.parseComment(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return invalidReference, nil, err
|
||||||
|
}
|
||||||
|
if ref != invalidReference {
|
||||||
|
addComment(ref)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -491,17 +676,17 @@ func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error)
|
|||||||
if b[0] == '\n' || b[0] == '\r' {
|
if b[0] == '\n' || b[0] == '\r' {
|
||||||
b, err = p.parseNewline(b)
|
b, err = p.parseNewline(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return invalidReference, nil, err
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return b, nil
|
return rootCommentRef, b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
func (p *Parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
||||||
token, rest, err := scanMultilineLiteralString(b)
|
token, rest, err := scanMultilineLiteralString(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
@@ -520,7 +705,7 @@ func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte,
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint:funlen,gocognit,cyclop
|
//nolint:funlen,gocognit,cyclop
|
||||||
func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
func (p *Parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
||||||
// ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
|
// ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
|
||||||
// ml-basic-string-delim
|
// ml-basic-string-delim
|
||||||
// ml-basic-string-delim = 3quotation-mark
|
// ml-basic-string-delim = 3quotation-mark
|
||||||
@@ -551,11 +736,11 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||||||
|
|
||||||
if !escaped {
|
if !escaped {
|
||||||
str := token[startIdx:endIdx]
|
str := token[startIdx:endIdx]
|
||||||
verr := utf8TomlValidAlreadyEscaped(str)
|
highlight := characters.Utf8TomlValidAlreadyEscaped(str)
|
||||||
if verr.Zero() {
|
if len(highlight) == 0 {
|
||||||
return token, str, rest, nil
|
return token, str, rest, nil
|
||||||
}
|
}
|
||||||
return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
|
return nil, nil, nil, NewParserError(highlight, "invalid UTF-8")
|
||||||
}
|
}
|
||||||
|
|
||||||
var builder bytes.Buffer
|
var builder bytes.Buffer
|
||||||
@@ -591,7 +776,7 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||||||
i += j
|
i += j
|
||||||
for ; i < len(token)-3; i++ {
|
for ; i < len(token)-3; i++ {
|
||||||
c := token[i]
|
c := token[i]
|
||||||
if !(c == '\n' || c == '\r' || c == ' ' || c == '\t') {
|
if c != '\n' && c != '\r' && c != ' ' && c != '\t' {
|
||||||
i--
|
i--
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@@ -635,13 +820,13 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||||||
builder.WriteRune(x)
|
builder.WriteRune(x)
|
||||||
i += 8
|
i += 8
|
||||||
default:
|
default:
|
||||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
|
return nil, nil, nil, NewParserError(token[i:i+1], "invalid escaped character %#U", c)
|
||||||
}
|
}
|
||||||
i++
|
i++
|
||||||
} else {
|
} else {
|
||||||
size := utf8ValidNext(token[i:])
|
size := characters.Utf8ValidNext(token[i:])
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c)
|
return nil, nil, nil, NewParserError(token[i:i+1], "invalid character %#U", c)
|
||||||
}
|
}
|
||||||
builder.Write(token[i : i+size])
|
builder.Write(token[i : i+size])
|
||||||
i += size
|
i += size
|
||||||
@@ -651,7 +836,7 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||||||
return token, builder.Bytes(), rest, nil
|
return token, builder.Bytes(), rest, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseKey(b []byte) (reference, []byte, error) {
|
||||||
// key = simple-key / dotted-key
|
// key = simple-key / dotted-key
|
||||||
// simple-key = quoted-key / unquoted-key
|
// simple-key = quoted-key / unquoted-key
|
||||||
//
|
//
|
||||||
@@ -662,12 +847,12 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
|||||||
// dot-sep = ws %x2E ws ; . Period
|
// dot-sep = ws %x2E ws ; . Period
|
||||||
raw, key, b, err := p.parseSimpleKey(b)
|
raw, key, b, err := p.parseSimpleKey(b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ast.InvalidReference, nil, err
|
return invalidReference, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
ref := p.builder.Push(ast.Node{
|
ref := p.builder.Push(Node{
|
||||||
Kind: ast.Key,
|
Kind: Key,
|
||||||
Raw: p.Range(raw),
|
Raw: p.rangeOfToken(raw, b),
|
||||||
Data: key,
|
Data: key,
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -681,9 +866,9 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return ref, nil, err
|
return ref, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
p.builder.PushAndChain(ast.Node{
|
p.builder.PushAndChain(Node{
|
||||||
Kind: ast.Key,
|
Kind: Key,
|
||||||
Raw: p.Range(raw),
|
Raw: p.rangeOfToken(raw, b),
|
||||||
Data: key,
|
Data: key,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
@@ -694,9 +879,9 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
|||||||
return ref, b, nil
|
return ref, b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
|
func (p *Parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
|
||||||
if len(b) == 0 {
|
if len(b) == 0 {
|
||||||
return nil, nil, nil, newDecodeError(b, "expected key but found none")
|
return nil, nil, nil, NewParserError(b, "expected key but found none")
|
||||||
}
|
}
|
||||||
|
|
||||||
// simple-key = quoted-key / unquoted-key
|
// simple-key = quoted-key / unquoted-key
|
||||||
@@ -711,12 +896,12 @@ func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
|
|||||||
key, rest = scanUnquotedKey(b)
|
key, rest = scanUnquotedKey(b)
|
||||||
return key, key, rest, nil
|
return key, key, rest, nil
|
||||||
default:
|
default:
|
||||||
return nil, nil, nil, newDecodeError(b[0:1], "invalid character at start of key: %c", b[0])
|
return nil, nil, nil, NewParserError(b[0:1], "invalid character at start of key: %c", b[0])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:funlen,cyclop
|
//nolint:funlen,cyclop
|
||||||
func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
func (p *Parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
||||||
// basic-string = quotation-mark *basic-char quotation-mark
|
// basic-string = quotation-mark *basic-char quotation-mark
|
||||||
// quotation-mark = %x22 ; "
|
// quotation-mark = %x22 ; "
|
||||||
// basic-char = basic-unescaped / escaped
|
// basic-char = basic-unescaped / escaped
|
||||||
@@ -744,11 +929,11 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
|||||||
// validate the string and return a direct reference to the buffer.
|
// validate the string and return a direct reference to the buffer.
|
||||||
if !escaped {
|
if !escaped {
|
||||||
str := token[startIdx:endIdx]
|
str := token[startIdx:endIdx]
|
||||||
verr := utf8TomlValidAlreadyEscaped(str)
|
highlight := characters.Utf8TomlValidAlreadyEscaped(str)
|
||||||
if verr.Zero() {
|
if len(highlight) == 0 {
|
||||||
return token, str, rest, nil
|
return token, str, rest, nil
|
||||||
}
|
}
|
||||||
return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
|
return nil, nil, nil, NewParserError(highlight, "invalid UTF-8")
|
||||||
}
|
}
|
||||||
|
|
||||||
i := startIdx
|
i := startIdx
|
||||||
@@ -795,13 +980,13 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
|||||||
builder.WriteRune(x)
|
builder.WriteRune(x)
|
||||||
i += 8
|
i += 8
|
||||||
default:
|
default:
|
||||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
|
return nil, nil, nil, NewParserError(token[i:i+1], "invalid escaped character %#U", c)
|
||||||
}
|
}
|
||||||
i++
|
i++
|
||||||
} else {
|
} else {
|
||||||
size := utf8ValidNext(token[i:])
|
size := characters.Utf8ValidNext(token[i:])
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c)
|
return nil, nil, nil, NewParserError(token[i:i+1], "invalid character %#U", c)
|
||||||
}
|
}
|
||||||
builder.Write(token[i : i+size])
|
builder.Write(token[i : i+size])
|
||||||
i += size
|
i += size
|
||||||
@@ -813,13 +998,13 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
|||||||
|
|
||||||
func hexToRune(b []byte, length int) (rune, error) {
|
func hexToRune(b []byte, length int) (rune, error) {
|
||||||
if len(b) < length {
|
if len(b) < length {
|
||||||
return -1, newDecodeError(b, "unicode point needs %d character, not %d", length, len(b))
|
return -1, NewParserError(b, "unicode point needs %d character, not %d", length, len(b))
|
||||||
}
|
}
|
||||||
b = b[:length]
|
b = b[:length]
|
||||||
|
|
||||||
var r uint32
|
var r uint32
|
||||||
for i, c := range b {
|
for i, c := range b {
|
||||||
d := uint32(0)
|
var d uint32
|
||||||
switch {
|
switch {
|
||||||
case '0' <= c && c <= '9':
|
case '0' <= c && c <= '9':
|
||||||
d = uint32(c - '0')
|
d = uint32(c - '0')
|
||||||
@@ -828,19 +1013,19 @@ func hexToRune(b []byte, length int) (rune, error) {
|
|||||||
case 'A' <= c && c <= 'F':
|
case 'A' <= c && c <= 'F':
|
||||||
d = uint32(c - 'A' + 10)
|
d = uint32(c - 'A' + 10)
|
||||||
default:
|
default:
|
||||||
return -1, newDecodeError(b[i:i+1], "non-hex character")
|
return -1, NewParserError(b[i:i+1], "non-hex character")
|
||||||
}
|
}
|
||||||
r = r*16 + d
|
r = r*16 + d
|
||||||
}
|
}
|
||||||
|
|
||||||
if r > unicode.MaxRune || 0xD800 <= r && r < 0xE000 {
|
if r > unicode.MaxRune || 0xD800 <= r && r < 0xE000 {
|
||||||
return -1, newDecodeError(b, "escape sequence is invalid Unicode code point")
|
return -1, NewParserError(b, "escape sequence is invalid Unicode code point")
|
||||||
}
|
}
|
||||||
|
|
||||||
return rune(r), nil
|
return rune(r), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseWhitespace(b []byte) []byte {
|
func (p *Parser) parseWhitespace(b []byte) []byte {
|
||||||
// ws = *wschar
|
// ws = *wschar
|
||||||
// wschar = %x20 ; Space
|
// wschar = %x20 ; Space
|
||||||
// wschar =/ %x09 ; Horizontal tab
|
// wschar =/ %x09 ; Horizontal tab
|
||||||
@@ -850,25 +1035,27 @@ func (p *parser) parseWhitespace(b []byte) []byte {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint:cyclop
|
//nolint:cyclop
|
||||||
func (p *parser) parseIntOrFloatOrDateTime(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) parseIntOrFloatOrDateTime(b []byte) (reference, []byte, error) {
|
||||||
switch b[0] {
|
switch b[0] {
|
||||||
case 'i':
|
case 'i':
|
||||||
if !scanFollowsInf(b) {
|
if !scanFollowsInf(b) {
|
||||||
return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'inf'")
|
return invalidReference, nil, NewParserError(atmost(b, 3), "expected 'inf'")
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: ast.Float,
|
Kind: Float,
|
||||||
Data: b[:3],
|
Data: b[:3],
|
||||||
|
Raw: p.rangeOfToken(b[:3], b[3:]),
|
||||||
}), b[3:], nil
|
}), b[3:], nil
|
||||||
case 'n':
|
case 'n':
|
||||||
if !scanFollowsNan(b) {
|
if !scanFollowsNan(b) {
|
||||||
return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'nan'")
|
return invalidReference, nil, NewParserError(atmost(b, 3), "expected 'nan'")
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: ast.Float,
|
Kind: Float,
|
||||||
Data: b[:3],
|
Data: b[:3],
|
||||||
|
Raw: p.rangeOfToken(b[:3], b[3:]),
|
||||||
}), b[3:], nil
|
}), b[3:], nil
|
||||||
case '+', '-':
|
case '+', '-':
|
||||||
return p.scanIntOrFloat(b)
|
return p.scanIntOrFloat(b)
|
||||||
@@ -898,7 +1085,7 @@ func (p *parser) parseIntOrFloatOrDateTime(b []byte) (ast.Reference, []byte, err
|
|||||||
return p.scanIntOrFloat(b)
|
return p.scanIntOrFloat(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) scanDateTime(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) scanDateTime(b []byte) (reference, []byte, error) {
|
||||||
// scans for contiguous characters in [0-9T:Z.+-], and up to one space if
|
// scans for contiguous characters in [0-9T:Z.+-], and up to one space if
|
||||||
// followed by a digit.
|
// followed by a digit.
|
||||||
hasDate := false
|
hasDate := false
|
||||||
@@ -921,7 +1108,7 @@ byteLoop:
|
|||||||
}
|
}
|
||||||
case c == 'T' || c == 't' || c == ':' || c == '.':
|
case c == 'T' || c == 't' || c == ':' || c == '.':
|
||||||
hasTime = true
|
hasTime = true
|
||||||
case c == '+' || c == '-' || c == 'Z' || c == 'z':
|
case c == '+' || c == 'Z' || c == 'z':
|
||||||
hasTz = true
|
hasTz = true
|
||||||
case c == ' ':
|
case c == ' ':
|
||||||
if !seenSpace && i+1 < len(b) && isDigit(b[i+1]) {
|
if !seenSpace && i+1 < len(b) && isDigit(b[i+1]) {
|
||||||
@@ -941,30 +1128,30 @@ byteLoop:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var kind ast.Kind
|
var kind Kind
|
||||||
|
|
||||||
if hasTime {
|
if hasTime {
|
||||||
if hasDate {
|
if hasDate {
|
||||||
if hasTz {
|
if hasTz {
|
||||||
kind = ast.DateTime
|
kind = DateTime
|
||||||
} else {
|
} else {
|
||||||
kind = ast.LocalDateTime
|
kind = LocalDateTime
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
kind = ast.LocalTime
|
kind = LocalTime
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
kind = ast.LocalDate
|
kind = LocalDate
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: kind,
|
Kind: kind,
|
||||||
Data: b[:i],
|
Data: b[:i],
|
||||||
}), b[i:], nil
|
}), b[i:], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:funlen,gocognit,cyclop
|
//nolint:funlen,gocognit,cyclop
|
||||||
func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
|
func (p *Parser) scanIntOrFloat(b []byte) (reference, []byte, error) {
|
||||||
i := 0
|
i := 0
|
||||||
|
|
||||||
if len(b) > 2 && b[0] == '0' && b[1] != '.' && b[1] != 'e' && b[1] != 'E' {
|
if len(b) > 2 && b[0] == '0' && b[1] != '.' && b[1] != 'e' && b[1] != 'E' {
|
||||||
@@ -990,9 +1177,10 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: ast.Integer,
|
Kind: Integer,
|
||||||
Data: b[:i],
|
Data: b[:i],
|
||||||
|
Raw: p.rangeOfToken(b[:i], b[i:]),
|
||||||
}), b[i:], nil
|
}), b[i:], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1013,42 +1201,45 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
|
|||||||
|
|
||||||
if c == 'i' {
|
if c == 'i' {
|
||||||
if scanFollowsInf(b[i:]) {
|
if scanFollowsInf(b[i:]) {
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: ast.Float,
|
Kind: Float,
|
||||||
Data: b[:i+3],
|
Data: b[:i+3],
|
||||||
|
Raw: p.rangeOfToken(b[:i+3], b[i+3:]),
|
||||||
}), b[i+3:], nil
|
}), b[i+3:], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'i' while scanning for a number")
|
return invalidReference, nil, NewParserError(b[i:i+1], "unexpected character 'i' while scanning for a number")
|
||||||
}
|
}
|
||||||
|
|
||||||
if c == 'n' {
|
if c == 'n' {
|
||||||
if scanFollowsNan(b[i:]) {
|
if scanFollowsNan(b[i:]) {
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: ast.Float,
|
Kind: Float,
|
||||||
Data: b[:i+3],
|
Data: b[:i+3],
|
||||||
|
Raw: p.rangeOfToken(b[:i+3], b[i+3:]),
|
||||||
}), b[i+3:], nil
|
}), b[i+3:], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'n' while scanning for a number")
|
return invalidReference, nil, NewParserError(b[i:i+1], "unexpected character 'n' while scanning for a number")
|
||||||
}
|
}
|
||||||
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
return ast.InvalidReference, b, newDecodeError(b, "incomplete number")
|
return invalidReference, b, NewParserError(b, "incomplete number")
|
||||||
}
|
}
|
||||||
|
|
||||||
kind := ast.Integer
|
kind := Integer
|
||||||
|
|
||||||
if isFloat {
|
if isFloat {
|
||||||
kind = ast.Float
|
kind = Float
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.builder.Push(ast.Node{
|
return p.builder.Push(Node{
|
||||||
Kind: kind,
|
Kind: kind,
|
||||||
Data: b[:i],
|
Data: b[:i],
|
||||||
|
Raw: p.rangeOfToken(b[:i], b[i:]),
|
||||||
}), b[i:], nil
|
}), b[i:], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1075,11 +1266,11 @@ func isValidBinaryRune(r byte) bool {
|
|||||||
|
|
||||||
func expect(x byte, b []byte) ([]byte, error) {
|
func expect(x byte, b []byte) ([]byte, error) {
|
||||||
if len(b) == 0 {
|
if len(b) == 0 {
|
||||||
return nil, newDecodeError(b, "expected character %c but the document ended here", x)
|
return nil, NewParserError(b, "expected character %c but the document ended here", x)
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[0] != x {
|
if b[0] != x {
|
||||||
return nil, newDecodeError(b[0:1], "expected character %c", x)
|
return nil, NewParserError(b[0:1], "expected character %c", x)
|
||||||
}
|
}
|
||||||
|
|
||||||
return b[1:], nil
|
return b[1:], nil
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
package unstable
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Regression test for https://github.com/pelletier/go-toml/issues/1047:
|
||||||
|
// Parser.Range must use the real slice offset, not len(data)-len(slice).
|
||||||
|
func TestParser_Range_HighlightAfterComment(t *testing.T) {
|
||||||
|
input := []byte("# comment\n= \"value\"")
|
||||||
|
|
||||||
|
var p Parser
|
||||||
|
p.Reset(input)
|
||||||
|
for p.NextExpression() {
|
||||||
|
}
|
||||||
|
err := p.Error()
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("expected an error")
|
||||||
|
}
|
||||||
|
|
||||||
|
var perr *ParserError
|
||||||
|
if !errors.As(err, &perr) {
|
||||||
|
t.Fatalf("expected *ParserError, got %T", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
r := p.Range(perr.Highlight)
|
||||||
|
shape := p.Shape(r)
|
||||||
|
|
||||||
|
if r.Offset != 10 {
|
||||||
|
t.Errorf("Range offset: got %d, want 10", r.Offset)
|
||||||
|
}
|
||||||
|
if shape.Start.Line != 2 || shape.Start.Column != 1 {
|
||||||
|
t.Errorf("position: got %d:%d, want 2:1", shape.Start.Line, shape.Start.Column)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,697 @@
|
|||||||
|
package unstable
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pelletier/go-toml/v2/internal/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestParser_AST_Numbers(t *testing.T) {
|
||||||
|
examples := []struct {
|
||||||
|
desc string
|
||||||
|
input string
|
||||||
|
kind Kind
|
||||||
|
err bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "integer just digits",
|
||||||
|
input: `1234`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer zero",
|
||||||
|
input: `0`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer sign",
|
||||||
|
input: `+99`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer hex uppercase",
|
||||||
|
input: `0xDEADBEEF`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer hex lowercase",
|
||||||
|
input: `0xdead_beef`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer octal",
|
||||||
|
input: `0o01234567`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "integer binary",
|
||||||
|
input: `0b11010110`,
|
||||||
|
kind: Integer,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float zero",
|
||||||
|
input: `0.0`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float positive zero",
|
||||||
|
input: `+0.0`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float negative zero",
|
||||||
|
input: `-0.0`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float pi",
|
||||||
|
input: `3.1415`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float negative",
|
||||||
|
input: `-0.01`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float signed exponent",
|
||||||
|
input: `5e+22`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float exponent lowercase",
|
||||||
|
input: `1e06`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float exponent uppercase",
|
||||||
|
input: `-2E-2`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float fractional with exponent",
|
||||||
|
input: `6.626e-34`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "float underscores",
|
||||||
|
input: `224_617.445_991_228`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "inf",
|
||||||
|
input: `inf`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "inf negative",
|
||||||
|
input: `-inf`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "inf positive",
|
||||||
|
input: `+inf`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "nan",
|
||||||
|
input: `nan`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "nan negative",
|
||||||
|
input: `-nan`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "nan positive",
|
||||||
|
input: `+nan`,
|
||||||
|
kind: Float,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range examples {
|
||||||
|
e := e
|
||||||
|
t.Run(e.desc, func(t *testing.T) {
|
||||||
|
p := Parser{}
|
||||||
|
p.Reset([]byte(`A = ` + e.input))
|
||||||
|
p.NextExpression()
|
||||||
|
err := p.Error()
|
||||||
|
if e.err {
|
||||||
|
assert.Error(t, err)
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
expected := astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{Kind: e.kind, Data: []byte(e.input)},
|
||||||
|
{Kind: Key, Data: []byte(`A`)},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
compareNode(t, expected, p.Expression())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type (
|
||||||
|
astNode struct {
|
||||||
|
Kind Kind
|
||||||
|
Data []byte
|
||||||
|
Children []astNode
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func compareNode(t *testing.T, e astNode, n *Node) {
|
||||||
|
t.Helper()
|
||||||
|
assert.Equal(t, e.Kind, n.Kind)
|
||||||
|
assert.Equal(t, e.Data, n.Data)
|
||||||
|
|
||||||
|
compareIterator(t, e.Children, n.Children())
|
||||||
|
}
|
||||||
|
|
||||||
|
func compareIterator(t *testing.T, expected []astNode, actual Iterator) {
|
||||||
|
t.Helper()
|
||||||
|
idx := 0
|
||||||
|
|
||||||
|
for actual.Next() {
|
||||||
|
n := actual.Node()
|
||||||
|
|
||||||
|
if idx >= len(expected) {
|
||||||
|
t.Fatal("extra child in actual tree")
|
||||||
|
}
|
||||||
|
e := expected[idx]
|
||||||
|
|
||||||
|
compareNode(t, e, n)
|
||||||
|
|
||||||
|
idx++
|
||||||
|
}
|
||||||
|
|
||||||
|
if idx < len(expected) {
|
||||||
|
t.Fatal("missing children in actual", "idx =", idx, "expected =", len(expected))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//nolint:funlen
|
||||||
|
func TestParser_AST(t *testing.T) {
|
||||||
|
examples := []struct {
|
||||||
|
desc string
|
||||||
|
input string
|
||||||
|
ast astNode
|
||||||
|
err bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "simple string assignment",
|
||||||
|
input: `A = "hello"`,
|
||||||
|
ast: astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`hello`),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Key,
|
||||||
|
Data: []byte(`A`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "simple bool assignment",
|
||||||
|
input: `A = true`,
|
||||||
|
ast: astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: Bool,
|
||||||
|
Data: []byte(`true`),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Key,
|
||||||
|
Data: []byte(`A`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "array of strings",
|
||||||
|
input: `A = ["hello", ["world", "again"]]`,
|
||||||
|
ast: astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: Array,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`hello`),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Array,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`world`),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`again`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Key,
|
||||||
|
Data: []byte(`A`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "array of arrays of strings",
|
||||||
|
input: `A = ["hello", "world"]`,
|
||||||
|
ast: astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: Array,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`hello`),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: String,
|
||||||
|
Data: []byte(`world`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Key,
|
||||||
|
Data: []byte(`A`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "inline table",
|
||||||
|
input: `name = { first = "Tom", last = "Preston-Werner" }`,
|
||||||
|
ast: astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: InlineTable,
|
||||||
|
Children: []astNode{
|
||||||
|
{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{Kind: String, Data: []byte(`Tom`)},
|
||||||
|
{Kind: Key, Data: []byte(`first`)},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{Kind: String, Data: []byte(`Preston-Werner`)},
|
||||||
|
{Kind: Key, Data: []byte(`last`)},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Kind: Key,
|
||||||
|
Data: []byte(`name`),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range examples {
|
||||||
|
e := e
|
||||||
|
t.Run(e.desc, func(t *testing.T) {
|
||||||
|
p := Parser{}
|
||||||
|
p.Reset([]byte(e.input))
|
||||||
|
p.NextExpression()
|
||||||
|
err := p.Error()
|
||||||
|
if e.err {
|
||||||
|
assert.Error(t, err)
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
compareNode(t, e.ast, p.Expression())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkParseBasicStringWithUnicode(b *testing.B) {
|
||||||
|
p := &Parser{}
|
||||||
|
b.Run("4", func(b *testing.B) {
|
||||||
|
input := []byte(`"\u1234\u5678\u9ABC\u1234\u5678\u9ABC"`)
|
||||||
|
b.ReportAllocs()
|
||||||
|
b.SetBytes(int64(len(input)))
|
||||||
|
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
_, _, _, _ = p.parseBasicString(input)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
b.Run("8", func(b *testing.B) {
|
||||||
|
input := []byte(`"\u12345678\u9ABCDEF0\u12345678\u9ABCDEF0"`)
|
||||||
|
b.ReportAllocs()
|
||||||
|
b.SetBytes(int64(len(input)))
|
||||||
|
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
_, _, _, _ = p.parseBasicString(input)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkParseBasicStringsEasy(b *testing.B) {
|
||||||
|
p := &Parser{}
|
||||||
|
|
||||||
|
for _, size := range []int{1, 4, 8, 16, 21} {
|
||||||
|
b.Run(strconv.Itoa(size), func(b *testing.B) {
|
||||||
|
input := []byte(`"` + strings.Repeat("A", size) + `"`)
|
||||||
|
|
||||||
|
b.ReportAllocs()
|
||||||
|
b.SetBytes(int64(len(input)))
|
||||||
|
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
_, _, _, _ = p.parseBasicString(input)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParser_AST_DateTimes(t *testing.T) {
|
||||||
|
examples := []struct {
|
||||||
|
desc string
|
||||||
|
input string
|
||||||
|
kind Kind
|
||||||
|
err bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
desc: "offset-date-time with delim 'T' and UTC offset",
|
||||||
|
input: `2021-07-21T12:08:05Z`,
|
||||||
|
kind: DateTime,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "offset-date-time with space delim and +8hours offset",
|
||||||
|
input: `2021-07-21 12:08:05+08:00`,
|
||||||
|
kind: DateTime,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "local-date-time with nano second",
|
||||||
|
input: `2021-07-21T12:08:05.666666666`,
|
||||||
|
kind: LocalDateTime,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "local-date-time",
|
||||||
|
input: `2021-07-21T12:08:05`,
|
||||||
|
kind: LocalDateTime,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
desc: "local-date",
|
||||||
|
input: `2021-07-21`,
|
||||||
|
kind: LocalDate,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range examples {
|
||||||
|
e := e
|
||||||
|
t.Run(e.desc, func(t *testing.T) {
|
||||||
|
p := Parser{}
|
||||||
|
p.Reset([]byte(`A = ` + e.input))
|
||||||
|
p.NextExpression()
|
||||||
|
err := p.Error()
|
||||||
|
if e.err {
|
||||||
|
assert.Error(t, err)
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
expected := astNode{
|
||||||
|
Kind: KeyValue,
|
||||||
|
Children: []astNode{
|
||||||
|
{Kind: e.kind, Data: []byte(e.input)},
|
||||||
|
{Kind: Key, Data: []byte(`A`)},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
compareNode(t, expected, p.Expression())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This example demonstrates how to parse a TOML document and preserving
|
||||||
|
// comments. Comments are stored in the AST as Comment nodes. This example
|
||||||
|
// displays the structure of the full AST generated by the parser using the
|
||||||
|
// following structure:
|
||||||
|
//
|
||||||
|
// 1. Each root-level expression is separated by three dashes.
|
||||||
|
// 2. Bytes associated to a node are displayed in square brackets.
|
||||||
|
// 3. Siblings have the same indentation.
|
||||||
|
// 4. Children of a node are indented one level.
|
||||||
|
func ExampleParser_comments() {
|
||||||
|
doc := `# Top of the document comment.
|
||||||
|
# Optional, any amount of lines.
|
||||||
|
|
||||||
|
# Above table.
|
||||||
|
[table] # Next to table.
|
||||||
|
# Above simple value.
|
||||||
|
key = "value" # Next to simple value.
|
||||||
|
# Below simple value.
|
||||||
|
|
||||||
|
# Some comment alone.
|
||||||
|
|
||||||
|
# Multiple comments, on multiple lines.
|
||||||
|
|
||||||
|
# Above inline table.
|
||||||
|
name = { first = "Tom", last = "Preston-Werner" } # Next to inline table.
|
||||||
|
# Below inline table.
|
||||||
|
|
||||||
|
# Above array.
|
||||||
|
array = [ 1, 2, 3 ] # Next to one-line array.
|
||||||
|
# Below array.
|
||||||
|
|
||||||
|
# Above multi-line array.
|
||||||
|
key5 = [ # Next to start of inline array.
|
||||||
|
# Second line before array content.
|
||||||
|
1, # Next to first element.
|
||||||
|
# After first element.
|
||||||
|
# Before second element.
|
||||||
|
2,
|
||||||
|
3, # Next to last element
|
||||||
|
# After last element.
|
||||||
|
] # Next to end of array.
|
||||||
|
# Below multi-line array.
|
||||||
|
|
||||||
|
# Before array table.
|
||||||
|
[[products]] # Next to array table.
|
||||||
|
# After array table.
|
||||||
|
`
|
||||||
|
|
||||||
|
var printGeneric func(*Parser, int, *Node)
|
||||||
|
printGeneric = func(p *Parser, indent int, e *Node) {
|
||||||
|
if e == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
s := p.Shape(e.Raw)
|
||||||
|
x := fmt.Sprintf("%d:%d->%d:%d (%d->%d)", s.Start.Line, s.Start.Column, s.End.Line, s.End.Column, s.Start.Offset, s.End.Offset)
|
||||||
|
fmt.Printf("%-25s | %s%s [%s]\n", x, strings.Repeat(" ", indent), e.Kind, e.Data)
|
||||||
|
printGeneric(p, indent+1, e.Child())
|
||||||
|
printGeneric(p, indent, e.Next())
|
||||||
|
}
|
||||||
|
|
||||||
|
printTree := func(p *Parser) {
|
||||||
|
for p.NextExpression() {
|
||||||
|
e := p.Expression()
|
||||||
|
fmt.Println("---")
|
||||||
|
printGeneric(p, 0, e)
|
||||||
|
}
|
||||||
|
if err := p.Error(); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
p := &Parser{
|
||||||
|
KeepComments: true,
|
||||||
|
}
|
||||||
|
p.Reset([]byte(doc))
|
||||||
|
printTree(p)
|
||||||
|
|
||||||
|
// Output:
|
||||||
|
// ---
|
||||||
|
// 1:1->1:31 (0->30) | Comment [# Top of the document comment.]
|
||||||
|
// ---
|
||||||
|
// 2:1->2:33 (31->63) | Comment [# Optional, any amount of lines.]
|
||||||
|
// ---
|
||||||
|
// 4:1->4:15 (65->79) | Comment [# Above table.]
|
||||||
|
// ---
|
||||||
|
// 1:1->1:1 (0->0) | Table []
|
||||||
|
// 5:2->5:7 (81->86) | Key [table]
|
||||||
|
// 5:9->5:25 (88->104) | Comment [# Next to table.]
|
||||||
|
// ---
|
||||||
|
// 6:1->6:22 (105->126) | Comment [# Above simple value.]
|
||||||
|
// ---
|
||||||
|
// 7:1->7:14 (127->140) | KeyValue []
|
||||||
|
// 7:7->7:14 (133->140) | String [value]
|
||||||
|
// 7:1->7:4 (127->130) | Key [key]
|
||||||
|
// 7:15->7:38 (141->164) | Comment [# Next to simple value.]
|
||||||
|
// ---
|
||||||
|
// 8:1->8:22 (165->186) | Comment [# Below simple value.]
|
||||||
|
// ---
|
||||||
|
// 10:1->10:22 (188->209) | Comment [# Some comment alone.]
|
||||||
|
// ---
|
||||||
|
// 12:1->12:40 (211->250) | Comment [# Multiple comments, on multiple lines.]
|
||||||
|
// ---
|
||||||
|
// 14:1->14:22 (252->273) | Comment [# Above inline table.]
|
||||||
|
// ---
|
||||||
|
// 15:1->15:50 (274->323) | KeyValue []
|
||||||
|
// 15:8->15:9 (281->282) | InlineTable []
|
||||||
|
// 15:10->15:23 (283->296) | KeyValue []
|
||||||
|
// 15:18->15:23 (291->296) | String [Tom]
|
||||||
|
// 15:10->15:15 (283->288) | Key [first]
|
||||||
|
// 15:25->15:48 (298->321) | KeyValue []
|
||||||
|
// 15:32->15:48 (305->321) | String [Preston-Werner]
|
||||||
|
// 15:25->15:29 (298->302) | Key [last]
|
||||||
|
// 15:1->15:5 (274->278) | Key [name]
|
||||||
|
// 15:51->15:74 (324->347) | Comment [# Next to inline table.]
|
||||||
|
// ---
|
||||||
|
// 16:1->16:22 (348->369) | Comment [# Below inline table.]
|
||||||
|
// ---
|
||||||
|
// 18:1->18:15 (371->385) | Comment [# Above array.]
|
||||||
|
// ---
|
||||||
|
// 19:1->19:20 (386->405) | KeyValue []
|
||||||
|
// 1:1->1:1 (0->0) | Array []
|
||||||
|
// 19:11->19:12 (396->397) | Integer [1]
|
||||||
|
// 19:14->19:15 (399->400) | Integer [2]
|
||||||
|
// 19:17->19:18 (402->403) | Integer [3]
|
||||||
|
// 19:1->19:6 (386->391) | Key [array]
|
||||||
|
// 19:21->19:46 (406->431) | Comment [# Next to one-line array.]
|
||||||
|
// ---
|
||||||
|
// 20:1->20:15 (432->446) | Comment [# Below array.]
|
||||||
|
// ---
|
||||||
|
// 22:1->22:26 (448->473) | Comment [# Above multi-line array.]
|
||||||
|
// ---
|
||||||
|
// 23:1->31:2 (474->694) | KeyValue []
|
||||||
|
// 1:1->1:1 (0->0) | Array []
|
||||||
|
// 23:10->23:42 (483->515) | Comment [# Next to start of inline array.]
|
||||||
|
// 24:3->24:38 (518->553) | Comment [# Second line before array content.]
|
||||||
|
// 25:3->25:4 (556->557) | Integer [1]
|
||||||
|
// 25:6->25:30 (559->583) | Comment [# Next to first element.]
|
||||||
|
// 26:3->26:25 (586->608) | Comment [# After first element.]
|
||||||
|
// 27:3->27:27 (611->635) | Comment [# Before second element.]
|
||||||
|
// 28:3->28:4 (638->639) | Integer [2]
|
||||||
|
// 29:3->29:4 (643->644) | Integer [3]
|
||||||
|
// 29:6->29:28 (646->668) | Comment [# Next to last element]
|
||||||
|
// 30:3->30:24 (671->692) | Comment [# After last element.]
|
||||||
|
// 23:1->23:5 (474->478) | Key [key5]
|
||||||
|
// 31:3->31:26 (695->718) | Comment [# Next to end of array.]
|
||||||
|
// ---
|
||||||
|
// 32:1->32:26 (719->744) | Comment [# Below multi-line array.]
|
||||||
|
// ---
|
||||||
|
// 34:1->34:22 (746->767) | Comment [# Before array table.]
|
||||||
|
// ---
|
||||||
|
// 1:1->1:1 (0->0) | ArrayTable []
|
||||||
|
// 35:3->35:11 (770->778) | Key [products]
|
||||||
|
// 35:14->35:36 (781->803) | Comment [# Next to array table.]
|
||||||
|
// ---
|
||||||
|
// 36:1->36:21 (804->824) | Comment [# After array table.]
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIterator_IsLast(t *testing.T) {
|
||||||
|
// Test IsLast on an iterator with multiple elements using public Parser API
|
||||||
|
doc := `array = [1, 2, 3]`
|
||||||
|
p := Parser{}
|
||||||
|
p.Reset([]byte(doc))
|
||||||
|
p.NextExpression()
|
||||||
|
|
||||||
|
e := p.Expression()
|
||||||
|
arr := e.Value() // The array node
|
||||||
|
|
||||||
|
it := arr.Children()
|
||||||
|
count := 0
|
||||||
|
lastCount := 0
|
||||||
|
for it.Next() {
|
||||||
|
count++
|
||||||
|
if it.IsLast() {
|
||||||
|
lastCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, 3, count)
|
||||||
|
assert.Equal(t, 1, lastCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNodeChaining(t *testing.T) {
|
||||||
|
// Test that sibling nodes are correctly chained via Next()
|
||||||
|
// This exercises the internal PushAndChain functionality through public APIs
|
||||||
|
doc := `a.b.c = 1`
|
||||||
|
p := Parser{}
|
||||||
|
p.Reset([]byte(doc))
|
||||||
|
p.NextExpression()
|
||||||
|
|
||||||
|
e := p.Expression()
|
||||||
|
// KeyValue has children: value, then key parts (a, b, c)
|
||||||
|
keyIt := e.Key()
|
||||||
|
|
||||||
|
// Collect all key parts by following the iterator
|
||||||
|
var keys []string
|
||||||
|
for keyIt.Next() {
|
||||||
|
keys = append(keys, string(keyIt.Node().Data))
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, []string{"a", "b", "c"}, keys)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMultipleExpressions(t *testing.T) {
|
||||||
|
// Test parsing multiple top-level expressions
|
||||||
|
// This exercises root iteration through public APIs
|
||||||
|
doc := `
|
||||||
|
key1 = "value1"
|
||||||
|
key2 = "value2"
|
||||||
|
key3 = "value3"
|
||||||
|
`
|
||||||
|
p := Parser{}
|
||||||
|
p.Reset([]byte(doc))
|
||||||
|
|
||||||
|
var keys []string
|
||||||
|
for p.NextExpression() {
|
||||||
|
e := p.Expression()
|
||||||
|
keyIt := e.Key()
|
||||||
|
keyIt.Next()
|
||||||
|
keys = append(keys, string(keyIt.Node().Data))
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.NoError(t, p.Error())
|
||||||
|
assert.Equal(t, []string{"key1", "key2", "key3"}, keys)
|
||||||
|
}
|
||||||
|
|
||||||
|
func ExampleParser() {
|
||||||
|
doc := `
|
||||||
|
hello = "world"
|
||||||
|
value = 42
|
||||||
|
`
|
||||||
|
p := Parser{}
|
||||||
|
p.Reset([]byte(doc))
|
||||||
|
for p.NextExpression() {
|
||||||
|
e := p.Expression()
|
||||||
|
fmt.Printf("Expression: %s\n", e.Kind)
|
||||||
|
value := e.Value()
|
||||||
|
it := e.Key()
|
||||||
|
k := it.Node() // shortcut: we know there is no dotted key in the example
|
||||||
|
fmt.Printf("%s -> (%s) %s\n", k.Data, value.Kind, value.Data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output:
|
||||||
|
// Expression: KeyValue
|
||||||
|
// hello -> (String) world
|
||||||
|
// Expression: KeyValue
|
||||||
|
// value -> (Integer) 42
|
||||||
|
}
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
package toml
|
package unstable
|
||||||
|
|
||||||
|
import "github.com/pelletier/go-toml/v2/internal/characters"
|
||||||
|
|
||||||
func scanFollows(b []byte, pattern string) bool {
|
func scanFollows(b []byte, pattern string) bool {
|
||||||
n := len(pattern)
|
n := len(pattern)
|
||||||
@@ -54,16 +56,16 @@ func scanLiteralString(b []byte) ([]byte, []byte, error) {
|
|||||||
case '\'':
|
case '\'':
|
||||||
return b[:i+1], b[i+1:], nil
|
return b[:i+1], b[i+1:], nil
|
||||||
case '\n', '\r':
|
case '\n', '\r':
|
||||||
return nil, nil, newDecodeError(b[i:i+1], "literal strings cannot have new lines")
|
return nil, nil, NewParserError(b[i:i+1], "literal strings cannot have new lines")
|
||||||
}
|
}
|
||||||
size := utf8ValidNext(b[i:])
|
size := characters.Utf8ValidNext(b[i:])
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character")
|
return nil, nil, NewParserError(b[i:i+1], "invalid character")
|
||||||
}
|
}
|
||||||
i += size
|
i += size
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, nil, newDecodeError(b[len(b):], "unterminated literal string")
|
return nil, nil, NewParserError(b[len(b):], "unterminated literal string")
|
||||||
}
|
}
|
||||||
|
|
||||||
func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
|
func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
|
||||||
@@ -98,39 +100,39 @@ func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
|
|||||||
i++
|
i++
|
||||||
|
|
||||||
if i < len(b) && b[i] == '\'' {
|
if i < len(b) && b[i] == '\'' {
|
||||||
return nil, nil, newDecodeError(b[i-3:i+1], "''' not allowed in multiline literal string")
|
return nil, nil, NewParserError(b[i-3:i+1], "''' not allowed in multiline literal string")
|
||||||
}
|
}
|
||||||
|
|
||||||
return b[:i], b[i:], nil
|
return b[:i], b[i:], nil
|
||||||
}
|
}
|
||||||
case '\r':
|
case '\r':
|
||||||
if len(b) < i+2 {
|
if len(b) < i+2 {
|
||||||
return nil, nil, newDecodeError(b[len(b):], `need a \n after \r`)
|
return nil, nil, NewParserError(b[len(b):], `need a \n after \r`)
|
||||||
}
|
}
|
||||||
if b[i+1] != '\n' {
|
if b[i+1] != '\n' {
|
||||||
return nil, nil, newDecodeError(b[i:i+2], `need a \n after \r`)
|
return nil, nil, NewParserError(b[i:i+2], `need a \n after \r`)
|
||||||
}
|
}
|
||||||
i += 2 // skip the \n
|
i += 2 // skip the \n
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
size := utf8ValidNext(b[i:])
|
size := characters.Utf8ValidNext(b[i:])
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character")
|
return nil, nil, NewParserError(b[i:i+1], "invalid character")
|
||||||
}
|
}
|
||||||
i += size
|
i += size
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, nil, newDecodeError(b[len(b):], `multiline literal string not terminated by '''`)
|
return nil, nil, NewParserError(b[len(b):], `multiline literal string not terminated by '''`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func scanWindowsNewline(b []byte) ([]byte, []byte, error) {
|
func scanWindowsNewline(b []byte) ([]byte, []byte, error) {
|
||||||
const lenCRLF = 2
|
const lenCRLF = 2
|
||||||
if len(b) < lenCRLF {
|
if len(b) < lenCRLF {
|
||||||
return nil, nil, newDecodeError(b, "windows new line expected")
|
return nil, nil, NewParserError(b, "windows new line expected")
|
||||||
}
|
}
|
||||||
|
|
||||||
if b[1] != '\n' {
|
if b[1] != '\n' {
|
||||||
return nil, nil, newDecodeError(b, `windows new line should be \r\n`)
|
return nil, nil, NewParserError(b, `windows new line should be \r\n`)
|
||||||
}
|
}
|
||||||
|
|
||||||
return b[:lenCRLF], b[lenCRLF:], nil
|
return b[:lenCRLF], b[lenCRLF:], nil
|
||||||
@@ -149,7 +151,6 @@ func scanWhitespace(b []byte) ([]byte, []byte) {
|
|||||||
return b, b[len(b):]
|
return b, b[len(b):]
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:unparam
|
|
||||||
func scanComment(b []byte) ([]byte, []byte, error) {
|
func scanComment(b []byte) ([]byte, []byte, error) {
|
||||||
// comment-start-symbol = %x23 ; #
|
// comment-start-symbol = %x23 ; #
|
||||||
// non-ascii = %x80-D7FF / %xE000-10FFFF
|
// non-ascii = %x80-D7FF / %xE000-10FFFF
|
||||||
@@ -165,11 +166,11 @@ func scanComment(b []byte) ([]byte, []byte, error) {
|
|||||||
if i+1 < len(b) && b[i+1] == '\n' {
|
if i+1 < len(b) && b[i+1] == '\n' {
|
||||||
return b[:i+1], b[i+1:], nil
|
return b[:i+1], b[i+1:], nil
|
||||||
}
|
}
|
||||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character in comment")
|
return nil, nil, NewParserError(b[i:i+1], "invalid character in comment")
|
||||||
}
|
}
|
||||||
size := utf8ValidNext(b[i:])
|
size := characters.Utf8ValidNext(b[i:])
|
||||||
if size == 0 {
|
if size == 0 {
|
||||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character in comment")
|
return nil, nil, NewParserError(b[i:i+1], "invalid character in comment")
|
||||||
}
|
}
|
||||||
|
|
||||||
i += size
|
i += size
|
||||||
@@ -192,17 +193,17 @@ func scanBasicString(b []byte) ([]byte, bool, []byte, error) {
|
|||||||
case '"':
|
case '"':
|
||||||
return b[:i+1], escaped, b[i+1:], nil
|
return b[:i+1], escaped, b[i+1:], nil
|
||||||
case '\n', '\r':
|
case '\n', '\r':
|
||||||
return nil, escaped, nil, newDecodeError(b[i:i+1], "basic strings cannot have new lines")
|
return nil, escaped, nil, NewParserError(b[i:i+1], "basic strings cannot have new lines")
|
||||||
case '\\':
|
case '\\':
|
||||||
if len(b) < i+2 {
|
if len(b) < i+2 {
|
||||||
return nil, escaped, nil, newDecodeError(b[i:i+1], "need a character after \\")
|
return nil, escaped, nil, NewParserError(b[i:i+1], "need a character after \\")
|
||||||
}
|
}
|
||||||
escaped = true
|
escaped = true
|
||||||
i++ // skip the next character
|
i++ // skip the next character
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, escaped, nil, newDecodeError(b[len(b):], `basic string not terminated by "`)
|
return nil, escaped, nil, NewParserError(b[len(b):], `basic string not terminated by "`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
|
func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
|
||||||
@@ -243,27 +244,27 @@ func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
|
|||||||
i++
|
i++
|
||||||
|
|
||||||
if i < len(b) && b[i] == '"' {
|
if i < len(b) && b[i] == '"' {
|
||||||
return nil, escaped, nil, newDecodeError(b[i-3:i+1], `""" not allowed in multiline basic string`)
|
return nil, escaped, nil, NewParserError(b[i-3:i+1], `""" not allowed in multiline basic string`)
|
||||||
}
|
}
|
||||||
|
|
||||||
return b[:i], escaped, b[i:], nil
|
return b[:i], escaped, b[i:], nil
|
||||||
}
|
}
|
||||||
case '\\':
|
case '\\':
|
||||||
if len(b) < i+2 {
|
if len(b) < i+2 {
|
||||||
return nil, escaped, nil, newDecodeError(b[len(b):], "need a character after \\")
|
return nil, escaped, nil, NewParserError(b[len(b):], "need a character after \\")
|
||||||
}
|
}
|
||||||
escaped = true
|
escaped = true
|
||||||
i++ // skip the next character
|
i++ // skip the next character
|
||||||
case '\r':
|
case '\r':
|
||||||
if len(b) < i+2 {
|
if len(b) < i+2 {
|
||||||
return nil, escaped, nil, newDecodeError(b[len(b):], `need a \n after \r`)
|
return nil, escaped, nil, NewParserError(b[len(b):], `need a \n after \r`)
|
||||||
}
|
}
|
||||||
if b[i+1] != '\n' {
|
if b[i+1] != '\n' {
|
||||||
return nil, escaped, nil, newDecodeError(b[i:i+2], `need a \n after \r`)
|
return nil, escaped, nil, NewParserError(b[i:i+2], `need a \n after \r`)
|
||||||
}
|
}
|
||||||
i++ // skip the \n
|
i++ // skip the \n
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, escaped, nil, newDecodeError(b[len(b):], `multiline basic string not terminated by """`)
|
return nil, escaped, nil, NewParserError(b[len(b):], `multiline basic string not terminated by """`)
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
package unstable
|
||||||
|
|
||||||
|
// Unmarshaler is implemented by types that can unmarshal a TOML
|
||||||
|
// description of themselves. The input is a valid TOML document
|
||||||
|
// containing the relevant portion of the parsed document.
|
||||||
|
//
|
||||||
|
// For tables (including split tables defined in multiple places),
|
||||||
|
// the data contains the raw key-value bytes from the original document
|
||||||
|
// with adjusted table headers to be relative to the unmarshaling target.
|
||||||
|
type Unmarshaler interface {
|
||||||
|
UnmarshalTOML(data []byte) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// RawMessage is a raw encoded TOML value. It implements Unmarshaler
|
||||||
|
// and can be used to delay TOML decoding or capture raw content.
|
||||||
|
//
|
||||||
|
// Example usage:
|
||||||
|
//
|
||||||
|
// type Config struct {
|
||||||
|
// Plugin RawMessage `toml:"plugin"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// var cfg Config
|
||||||
|
// toml.NewDecoder(r).EnableUnmarshalerInterface().Decode(&cfg)
|
||||||
|
// // cfg.Plugin now contains the raw TOML bytes for [plugin]
|
||||||
|
type RawMessage []byte
|
||||||
|
|
||||||
|
// UnmarshalTOML implements Unmarshaler.
|
||||||
|
func (m *RawMessage) UnmarshalTOML(data []byte) error {
|
||||||
|
*m = append((*m)[0:0], data...)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user