diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 699791b..0000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,56 +0,0 @@ -name: Test and Lint -on: - push: - branches-ignore: [wip/**] -jobs: - build: - strategy: - matrix: - os: - - { icon: 🐧, name: AMD64 Linux, on: ubuntu-latest } - - { icon: 🐧, name: ARM64 Linux, on: ubuntu-24.04-arm } - - { icon: 🍎, name: ARM64 macOS, on: macos-latest } - - { icon: 🍎, name: AMD64 macOS, on: macos-15-intel } - - { icon: πŸͺŸ, name: AMD64 Windows, on: windows-latest } - go: ["1.24", "1.23", "1.22"] - name: ${{ matrix.os.icon }} ${{ matrix.os.name }}, Go ${{ matrix.go }} - runs-on: ${{ matrix.os.on }} - steps: - - name: Checkout Repo - uses: actions/checkout@v4 - - name: Setup Go - uses: actions/setup-go@v5 - with: { go-version: "${{ matrix.go }}", check-latest: true } - - name: Run Tests - run: make test - wasm: - name: πŸ•ΈοΈ Build WASM - runs-on: ubuntu-latest - steps: - - name: Checkout Repo - uses: actions/checkout@v4 - - name: Setup TinyGo - uses: acifani/setup-tinygo@v2 - with: { tinygo-version: 0.37.0 } - - name: Test WASM - run: make wasm - lint: - name: πŸ“Š Lint and Cover - runs-on: ubuntu-latest - steps: - - name: Checkout Repo - uses: actions/checkout@v4 - - name: Setup Go - uses: actions/setup-go@v5 - with: { go-version-file: go.mod, check-latest: true } - - name: Install Dependencies - run: make debian-lint-depends - - name: Run pre-commit - uses: pre-commit/action@v3.0.1 - - name: Run Test Coverage - run: go test -race -coverprofile coverage.txt -covermode atomic ./... - - name: Upload Coverage - uses: codecov/codecov-action@v4 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: cover.out diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 0000000..8e8995e --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,39 @@ +name: πŸ› Deploy Playground +on: + push: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Go + uses: actions/setup-go@v5 + with: { go-version: "1.24", check-latest: true } + - name: Setup TinyGo + uses: acifani/setup-tinygo@v2 + with: { tinygo-version: 0.36.0 } + - name: Generate App + run: make playground + - name: Upload Artifact + if: github.ref == 'refs/heads/playground' + uses: actions/upload-pages-artifact@v3 + with: { path: ./pub } + + deploy: + needs: build + if: github.ref == 'refs/heads/playground' + permissions: + pages: write + id-token: write + concurrency: + group: "pages" + cancel-in-progress: false + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + steps: + - id: deployment + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..665aa1c --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,21 @@ +name: πŸ§ͺ Lint +on: + push: + branches-ignore: [wip/**] +jobs: + lint: + name: πŸ”Ž Lint Playground + runs-on: ubuntu-latest + steps: + - name: Checkout Repo + uses: actions/checkout@v4 + - name: Setup Go + uses: actions/setup-go@v5 + with: { check-latest: true } + - name: Setup TinyGo + uses: acifani/setup-tinygo@v2 + with: { tinygo-version: 0.36.0 } + - name: Install Dependencies + run: make debian-lint-depends + - name: Run pre-commit + uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index cee6123..0000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: πŸš€ Release -on: - push: - # Release on semantic version tag. - tags: ["v[0-9]+.[0-9]+.[0-9]+"] -jobs: - release: - name: πŸš€ Release on GitHub - runs-on: ubuntu-latest - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - name: Generate Release Notes - id: notes - uses: theory/changelog-version-notes-action@v0 - - name: Create GitHub Release - uses: softprops/action-gh-release@v2 - with: - name: "Release ${{ github.ref }}" - body_path: "${{ steps.notes.outputs.file }}" diff --git a/.gitignore b/.gitignore index 6ad1902..f0d691c 100644 --- a/.gitignore +++ b/.gitignore @@ -21,10 +21,12 @@ vendor/ go.work # Editor files -.vscode/ +# .vscode/ # Build artifacts +pub/ _build/ +jsonpath-compliance-test-suite/ # OS Stuff .DS_Store diff --git a/.golangci.yaml b/.golangci.yaml index e289a28..daaf0f0 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -5,57 +5,17 @@ linters: # Too strict. - cyclop # redundant, covered by gocyclo - depguard - - exhaustruct - - funcorder # new, too much to move around - nestif # redundant, covered by gocyclo - nlreturn - - testpackage - - varnamelen - wsl - - wsl_v5 - - noinlineerr - settings: - errcheck: - disable-default-exclusions: false - check-type-assertions: false - check-blank: false - exclude-functions: - - fmt.Fprintf - - fmt.Fprintln - - fmt.Fprint - exhaustive: - default-signifies-exhaustive: true - funlen: - lines: 68 - ignore-comments: true - ireturn: - allow: - - anon - - error - - empty - - stdlib - - generic - - ast\.Node$ - - types\.DateTime$ - revive: - rules: - - name: var-naming - arguments: [[], [], [{ skip-package-name-checks: true }]] exclusions: generated: lax - rules: - - linters: - - err113 - - funlen - - maintidx - path: _test\.go paths: - third_party$ - builtin$ - examples$ formatters: enable: - - gci - gofmt - gofumpt - goimports diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e9fe98c..06f6669 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,14 +19,17 @@ repos: name: Go mod tidy language: system entry: go mod tidy - types: [go] pass_filenames: false - id: golangci-lint name: Go linting language: system - entry: golangci-lint run --fix --timeout=5m + entry: make golangci-lint + pass_filenames: false + - id: playground + name: playground + language: system + entry: make playground pass_filenames: false - types: [go] - repo: https://github.com/pre-commit/mirrors-prettier rev: v3.1.0 @@ -34,3 +37,4 @@ repos: - id: prettier name: JSON and YAML formatting types_or: [json, yaml] + exclude: ^.vscode/ diff --git a/.util/pg2go.pl b/.util/pg2go.pl deleted file mode 100755 index 8e6b6c0..0000000 --- a/.util/pg2go.pl +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env perl -w - -use strict; -use warnings; -use v5.30; - -# Functions to cause SQL `true` and `false` args to jsonb_path_query() to evaluate. -sub true { 1 } -sub false { 0 } -sub NULL { undef } - -my $num = 0; - -while () { - chomp; - s/^select\s+(?:[*]\s+from\s+)?//i or next; - my $comment = s/\s*--\s*(.+)// ? " // $1" : ''; - until (/;$/) { - $_ .= ; - chomp; - } - local $@; - my ($json, $path, $opts) = eval; - die $@ if $@; - $num++; - say qq/ { - name: "test_$num", - json: js(\`$json\`), - path: \`$path\`,$opts - exp: []any{},$comment - },/; -} - -# Mock jsonb_path_query that converts its arguments into the JSON, path, and -# Options to specify the test. -sub jsonb_path_query { - my ($json, $path, @opts) = @_; - return $json, $path, '' unless @opts; - my @options; - while (@opts) { - my $param = shift @opts; - my $val = shift @opts; - if ($param eq 'silent') { - push @options => 'WithSilent()' if $val; - } elsif ($param eq 'vars') { - push @options => "WithVars(jv(\`$val\`))" if $val; - } else { - push @options => "WithVars(jv(\`$param\`))"; - push @options => 'WithSilent()' if $val; - } - } - return $json, $path, "\n opt: []Option{" . join(',', @options) . '},'; -} - -sub jsonb_path_query_tz { - my ($json, $path, $opts) = jsonb_path_query(@_); - return $json, $path, "\n opt: []Option{WithTZ()}," unless $opts; - $opts =~ s/\}$/WithTZ()}/; - return $json, $path, $opts; -} - -sub jsonb_path_query_array { - jsonb_path_query(@_); -} - -sub jsonb_path_query_first { - jsonb_path_query(@_); -} - -sub jsonb_path_match { - jsonb_path_query(@_); -} - -# Paste tests to convert below __DATA__. -__DATA__ -SELECT jsonb_path_match('[{"a": 1}, {"a": 2}]', '$[*].a > 1'); -SELECT jsonb_path_match('[{"a": 1}]', '$undefined_var'); -SELECT jsonb_path_match('[{"a": 1}]', 'false'); diff --git a/.util/pglist.go b/.util/pglist.go deleted file mode 100644 index 48488fb..0000000 --- a/.util/pglist.go +++ /dev/null @@ -1,65 +0,0 @@ -package main - -// Utility to generate `git diff` commands for the Postgres source from -// comments that contain GitHub URLs. Use it on in a Postgres Git clone to -// compare changes since the last time comments were updated. -// -// go run .util/pglist.go - -import ( - "bufio" - "fmt" - "io" - "io/fs" - "os" - "path/filepath" - "regexp" - "slices" - "strings" -) - -func main() { - srcRegex := regexp.MustCompile(`[.](?:go|md)`) - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L52-L64 - pgRegex := regexp.MustCompile(`postgres/postgres/blob/([^/]+)/([^#]+)`) - - found := map[string][]string{} - logErr(filepath.WalkDir("path", func(path string, info fs.DirEntry, err error) error { - if err == nil && srcRegex.MatchString(info.Name()) { - file, err := os.Open(path) - logErr(err) - defer file.Close() - reader := bufio.NewReader(file) - for { - line, err := reader.ReadString('\n') - if err == io.EOF { - break - } - logErr(err) - if match := pgRegex.FindStringSubmatch(line); match != nil { - if list, ok := found[match[1]]; ok { - if !slices.Contains(list, match[2]) { - found[match[1]] = append(list, strings.TrimSpace(match[2])) - } - } else { - found[match[1]] = []string{strings.TrimSpace(match[2])} - } - } - } - } - return nil - })) - - fmt.Println("# Clone the next release tag from the Postgres repo and run these diffs:") - for tag, files := range found { - for _, f := range files { - fmt.Printf("git diff %v -- %v\n", tag, f) - } - } -} - -func logErr(err error) { - if err != nil { - panic(err) - } -} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..56f6034 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,23 @@ +{ + "gopls": { + "build.env": { + "GOOS": "js", + "GOARCH": "wasm" + } + }, + "cSpell.words": [ + "datetime", + "Goldmark", + "Inuzuka", + "keyvalue", + "lval", + "shopspring", + "sqljson", + "timetz's", + "vals", + "Yusuke" + ], + "yaml.schemas": { + "https://json.schemastore.org/github-workflow.json": "file:///Users/david/dev/go/sqljson/.github/workflows/deploy.yml" + } +} diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 3eccaa4..0000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,143 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. It uses the -[Keep a Changelog] format, and this project adheres to [Semantic Versioning]. - - [Keep a Changelog]: https://keepachangelog.com/en/1.1.0/ - [Semantic Versioning]: https://semver.org/spec/v2.0.0.html - "Semantic Versioning 2.0.0" - -## [v0.3.2] β€” Unreleased - -### πŸ“š Improvements - -* Updated with all changes for PostgreSQL 18, which adds no new - functionality but adds a few new tests, adjusts the formatting of error - messages, and tweaks the wording of some documentation. - -### ⬆️ Dependency Updates - -* Upgraded dependencies to the latest versions that still support Go v1.22. -* Upgraded to `golangci-lint` v2.8.0 - -### πŸͺ² Bug Fixes - -* Fixed test name scoping issues with testify objects. - - [v0.3.2]: https://github.com/theory/sqljson/compare/v0.3.1...v0.3.2 - -## [v0.3.1] β€” 2025-04-25 - -### ⚑ Improvements - -* Added support for 32-bit architectures, including [TinyGo]. - - [v0.3.1]: https://github.com/theory/sqljson/compare/v0.3.0...v0.3.1 - [TinyGo]: https://tinygo.org - -## [v0.3.0] β€” 2025-03-31 - -### πŸͺ² Bug Fixes - -* Changed the return value of the `Query` and `MustQuery` functions and - methods from `any` to `[]any`, as they always return a slice of selected - items. - -### πŸ“š Documentation - -* Fixed the example code at the top of [ast](./path/ast/ast.go) to properly - handle `nil` binary operands and the node returned by `Next()`. - -### πŸ“” Notes - -* Applied fixes suggested by golangci-lint v2. - - [v0.3.0]: https://github.com/theory/sqljson/compare/v0.2.1...v0.2.0 - -## [v0.2.1] β€” 2024-12-22 - -### πŸ“š Documentation - -* Updated the playground links in the README to the new URL. -* Added a couple of missing playground links to the README. - - [v0.2.1]: https://github.com/theory/sqljson/compare/v0.2.0...v0.2.1 - -## [v0.2.0] β€” 2024-12-15 - -### ⚑ Improvements - -* Replaced the `ToString` method from the `DateTime` interface with - `fmt.Stringer`. This complies with [a change] made just before the release - of PostgreSQL 17 in order to keep the jsonpath functions immutable. The - upshot is that time zones no longer impact the output of the `.string()` - method. - -### πŸͺ² Bug Fixes - -* Synced an [error message change] from PostgreSQL, changing `bool` to - `boolean`. - -### πŸ“š Documentation - -* PostgreSQL's implementation is also no longer impacted by the `DateStyle` - GUC, so the paragraph about the lack of support for that feature has been - removed from the `Compatibility` section of the `README`. - -### πŸ“” Notes - -* Added a utility to help examine changes in the PostgreSQL repository when - syncing changes. - - [v0.2.0]: https://github.com/theory/sqljson/compare/v0.1.1...v0.2.0 - [a change]: https://github.com/postgres/postgres/commit/cc4fdfa - [error message change]: https://github.com/postgres/postgres/commit/f2353dd - -## [v0.1.1] β€” 2024-09-19 - -### πŸ“š Documentation - -* Linked to and described the new [πŸ› Playground] in the `README` -* Fixed broken links in the `README` -* Added Playground links for all the examples in the `README` -* Moved discussion of the difference from the PostgreSQL `DateStyle` - parameter to the `Compatibility` section of the `README` -* Added notes on the variable handling of identifiers with some Unicode code - points to the `Compatibility` section of the `README` - -### πŸͺ² Bug Fixes - -* Fixed failing tests comparing IDs generated by the `.keyvalue()` method - due to differences between compile time and runtime. - - [v0.1.1]: https://github.com/theory/sqljson/compare/v0.1.0...v0.1.1 - [πŸ› Playground]: https://theory.github.io/sqljson/playground - -## [v0.1.0] β€” 2024-07-04 - -The theme of this release is *Full Metal PostgreSQL.* - -### ⚑ Improvements - -* First release, everything is new! -* Full [PostgreSQL 17 JSONPath] implementation as of [Beta 1] -* All [PostgreSQL jsonpath tests] replicated and passing -* Includes parser, AST, executor, and datetime data types - -### πŸ—οΈ Build Setup - -* Built with Go -* Use `go get` to add to a project - -### πŸ“š Documentation - -* Docs on [pkg.go.dev] -* Thorough language docs and list of variations from PostgreSQL in the `README` - - [v0.1.0]: https://github.com/theory/sqljson/compare/b9883eb...v0.1.0 - [PostgreSQL 17 JSONPath]: https://www.postgresql.org/docs/current/functions-json.html#FUNCTIONS-SQLJSON-PATH - "PostgreSQL Docs: The SQL/JSON Path Language" - [Beta 1]: https://www.postgresql.org/about/news/postgresql-17-beta-1-released-2865/ - "PostgreSQL 17 Beta 1 Released!" - [PostgreSQL jsonpath tests]: https://github.com/postgres/postgres/blob/REL_17_BETA1/src/test/regress/sql/jsonb_jsonpath.sql - [pkg.go.dev]: https://pkg.go.dev/github.com/theory/sqljson@v0.1.0/path diff --git a/Makefile b/Makefile index 66c2989..333bdf7 100644 --- a/Makefile +++ b/Makefile @@ -1,60 +1,54 @@ GO ?= go -.PHONY: test # Run the unit tests -test: - GOTOOLCHAIN=local $(GO) test ./... -count=1 +SRC_DIR := src +DST_DIR := pub +WASM_EXEC := $(shell tinygo env TINYGOROOT)/targets/wasm_exec.js +# WASM_EXEC := $(shell go env GOROOT)/lib/wasm/wasm_exec.js -.PHONY: cover # Run test coverage -cover: $(shell find . -name \*.go) - GOTOOLCHAIN=local $(GO) test -v -coverprofile=cover.out -covermode=count ./... - @$(GO) tool cover -html=cover.out +playground: $(DST_DIR)/play.wasm $(DST_DIR)/index.html $(DST_DIR)/wasm_exec.js $(DST_DIR)/play.css $(DST_DIR)/playground/index.html -.PHONY: lint # Lint the project -lint: .golangci.yaml - @pre-commit run --show-diff-on-failure --color=always --all-files +ROOT_DIR := $(dir $(realpath $(lastword $(MAKEFILE_LIST)))) +$(DST_DIR)/play.wasm: $(SRC_DIR)/main.go + @mkdir -p $(@D) + GOOS=js GOARCH=wasm tinygo build -no-debug -size short -o $@ $< +# cd $(SRC_DIR); GOOS=js GOARCH=wasm go build -o $(ROOT_DIR)/$@ $$(basename "$<") -.PHONY: clean # Remove generated files -clean: - $(GO) clean - @rm -rf cover.out _build +$(DST_DIR)/play.css: $(SRC_DIR)/play.css + mkdir -p $(@D) + cp $< $@ -# WASM -.PHONY: wasm # Build a simple app with Go and TinyGo WASM compilation. -wasm: _build/go.wasm _build/tinygo.wasm +$(DST_DIR)/index.html: $(SRC_DIR)/index.html + mkdir -p $(@D) + version=$$(grep sqljson go.mod | awk '{print $$3}'); cat $< | sed -e "s!{{version}}!$${version}!g" > $@ -_build/go.wasm: internal/wasm/wasm.go - @mkdir -p $(@D) - GOOS=js GOARCH=wasm $(GO) build -o $@ $< +$(DST_DIR)/playground/index.html: $(SRC_DIR)/playground/index.html + mkdir -p $(@D) + cp $< $@ -_build/tinygo.wasm: internal/wasm/wasm.go - @mkdir -p $(@D) - GOOS=js GOARCH=wasm tinygo build -no-debug -size short -o $@ $< +$(DST_DIR)/wasm_exec.js: $(WASM_EXEC) + mkdir -p $(@D) + cp $< $@ + +.PHONY: run +run: playground + python3 -m http.server --directory $(DST_DIR) -############################################################################ -# Utilities. .PHONY: brew-lint-depends # Install linting tools from Homebrew brew-lint-depends: brew install golangci-lint .PHONY: debian-lint-depends # Install linting tools on Debian debian-lint-depends: - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sudo sh -s -- -b /usr/bin v2.8.0 - -.PHONY: install-generators # Install Go code generators -install-generators: - $(GO) install golang.org/x/tools/cmd/goyacc@v0.32.0 - $(GO) install golang.org/x/tools/cmd/stringer@v0.32.0 - -.PHONY: generate # Generate Go code -generate: - @$(GO) generate ./... - @perl -i -pe 's{^//line yacc.+\n}{}g' path/parser/grammar.go - -## .git/hooks/pre-commit: Install the pre-commit hook -.git/hooks/pre-commit: - @printf "#!/bin/sh\nmake lint\n" > $@ - @chmod +x $@ - -.PHONY: pg-diff # Generage diff statements aginst the Postgres source. -pg-diff: .util/pglist.go - @go run $< + curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sudo sh -s -- -b /usr/bin v2.0.2 + +.PHONY: lint # Lint the project +lint: .pre-commit-config.yaml + @GOOS=js GOARCH=wasm pre-commit run --show-diff-on-failure --color=always --all-files + +.PHONY: golangci-lint # Run golangci-lint +golangci-lint: .golangci.yaml + @GOOS=js GOARCH=wasm golangci-lint run --fix --timeout=5m + +.PHONY: clean +clean: + rm -rf $(DST_DIR) diff --git a/README.md b/README.md index f5b73ba..20c819d 100644 --- a/README.md +++ b/README.md @@ -1,73 +1,150 @@ -Go SQL/JSON -=========== - -[![License](https://img.shields.io/badge/License-PostgreSQL-blue.svg)](https://opensource.org/license/postgresql "βš–οΈ License") -[![GoDoc](https://godoc.org/github.com/theory/sqljson?status.svg)](https://pkg.go.dev/github.com/theory/sqljson "πŸ“„ Documentation") -[![Go Report Card](https://goreportcard.com/badge/github.com/theory/sqljson)](https://goreportcard.com/report/github.com/theory/sqljson "πŸ—ƒοΈ Report Card") -[![Build Status](https://github.com/theory/sqljson/actions/workflows/ci.yml/badge.svg)](https://github.com/theory/sqljson/actions/workflows/ci.yml "πŸ› οΈ Build Status") -[![Code Coverage](https://codecov.io/gh/theory/sqljson/graph/badge.svg?token=DIFED324ZY)](https://codecov.io/gh/theory/sqljson "πŸ“Š Code Coverage") - -The SQL/JSON package provides PostgreSQL-compatible SQL-standard SQL/JSON -functionality in Go. For now that means [jsonpath](path/). An example: - -``` go -func main() { - src := []byte(`{ - "track": { - "segments": [ - { - "location": [ 47.763, 13.4034 ], - "start time": "2018-10-14 10:05:14", - "HR": 73 - }, - { - "location": [ 47.706, 13.2635 ], - "start time": "2018-10-14 10:39:21", - "HR": 135 - } - ] - } - }`) - - // Parse the JSON. - var value any - if err := json.Unmarshal(src, &value); err != nil { - log.Fatal(err) - } - - // Parse the SQL-standard jsonpath query. - p, err := path.Parse(`$.track.segments[*] ? (@.HR > 130)."start time"`) - if err != nil { - log.Fatal(err) - } - - // Execute the query against the JSON. - items, err := p.Query(context.Background(), value) - if err != nil { - log.Fatal(err) - } - - // Print the results. - fmt.Printf("%v\n", items) - // Output: [2018-10-14 10:39:21] -} +Go SQL/JSON Path Playground +=========================== + +The source for the [Go SQL/JSON Path Playground], a stateless single-page web +site for experimenting with the [Go SQL/JSON Path] package. Compiled via +[TinyGo] into a ca. 930 K (360 K compressed) [Wasm] file and loaded directly +into the page. All functionality implemented in JavaScript and Go, [Go +JSONPath Playground], [Goldmark Playground] and [serde_json_path Sandbox]. + +Usage +----- + +On load, the form will be filled with sample JSON, a randomly-selected example +query, and, in some cases, option adjustments for the query. Hit the "Run +Query" button to see the values the path query selects from the JSON appear in +the "Query Output" field. + +To try your own, paste the JSON to query into the "JSON" field and input the +jsonpath expression into the "Path" field, then hit the "Run Query" button to +see the the values the path query selects from the JSON. + +That's it. + +Read on for details and additional features. + +### Docs + +The two buttons in the top-right corner provide documentation and links. + +* Hit the button with the circled question mark in the top right corner to + reveal a table summarizing the SQL/JSON Path syntax. + +* Hit the button with the circled i for information about the SQL/JSON Path + playground. + +### Mode + +Choose the mode in which to execute the jsonpath query. The options are: + +* **Query**: Use [Query] to return an array of all the JSON items returned + by the Path from the JSON. +* **First**: Like Query, but uses [First] to return only the first item, if + any. +* **Exists or Match**: Use [ExistsOrMatch] to return `true` or `false` + depending on whether the query does or does not find results or match + values, and `null` if the result is unknown. + +For the subtleties on the two behaviors of jsonpath expressions that use +`Exists` or `Match`, see [Two Types of Queries]. + +### Options + +Select options for execution and the display of results: + +* **WithSilent**: Use [WithSilent] to suppress some errors, including missing + object field or array element, unexpected JSON item type, and datetime and + numeric errors. +* **WithTZ**: Use [WithTZ] to allow comparisons of datetime values that + require timezone-aware conversions. +* **LocalTZ**: Use [ContextWithTZ] to parse times and timestamps in the + context of your browser's local time zone instead of [UTC]. + +### Permalink + +Hit this button to reload the page with a URL that contains the contents of +all the fields. Use for sharing. + +Note that the Playground is stateless; no data is stored except in the +Permalink URL itself (and whatever data collection GitHub injects; see its +[privacy statement] for details). + +### Path + +Input the jsonpath expression to execute into this field. See the [language +docs] or the [PostgreSQL docs] for details on the jsonpath language. Example: + +```jsonpath +$.a[*] ? (@ >= $min && @ <= $max) ``` -See the [path README](./path/README.md) for a complete description of the -SQL/JSON path language, and the [Go doc] for usage and examples. +### Variables -Or take the [πŸ› Playground] for a spin ([direct link for above example]). -Implemented as a single-page stateless JavaScript and [TinyGo]-compiled [Wasm] -app. +Input the variables used in the *Path* as a JSON object. For example, the +*Path* example above references two variables, `$min` and `$max`. The object +to set their values might be: -## Copyright +``` json +{ "min": 2, "max": 4 } +``` -Copyright Β© 1996-2025 The PostgreSQL Global Development Group +### JSON -Copyright Β© 2024-2025 David E. Wheeler +Input the JSON against which to execute the *Path* expression. May be any kind +of JSON value, including objects, arrays, and scalar values. An example that +the above Path expression successfully executes against: - [Go doc]: https://pkg.go.dev/github.com/theory/sqljson/path - [πŸ› Playground]: https://theory.github.io/sqljson/playground - [direct link for above example]: https://theory.github.io/sqljson/playground/?p=%2524.track.segments%255B*%255D%2520%253F%2520%28%2540.HR%2520%253E%2520130%29.%2522start%2520time%2522&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1&v=v0.1.0 - [TinyGo]: https://tinygo.org +```json +{ "a": [1,2,3,4,5] } +``` + +## Syntax Summary + +| Syntax Element | Description | +| ------------------ | ----------------------------------------------------------------------- | +| `$` | root node identifier | +| `@` | current node identifier (valid only within filter selectors) | +| `."name"` | name selector: selects a named child of an object | +| `.name` | shorthand for `."name"` | +| `.*` | wildcard selector: selects all children of a node | +| `.**` | recursive wildcard accessor: selects zero or more descendants of a node | +| `.**{3}` | recursive wildcard accessor: selects up to specified level of hierarchy | +| `.**{2 to 5}` | recursive wildcard accessor: selects from start to end level | +| `[]` | array selector with comma-delimited subscripts | +| `[3]` | index selector subscript: selects an indexed child of an array | +| `[3 to last]` | array slice subscript: select slice from start to end index (or `last`) | +| `[*]` | wildcard array selector: returns all array elements. | +| `$var_name` | a variable referring to a value in the Vars object | +| `strict` | raise error on a structural error | +| `lax` | suppress structural errors | +| `?()` | filter selector: selects and transforms children | +| `.size()` | method selector | + +## Copyright and License + +Copyright (c) 2024-2025 David E. Wheeler. Distributed under the [PostgreSQL License]. + +Based on [Goldmark Playground] the [serde_json_path Sandbox], with icons from +[Boxicons], all distributed under the [MIT License]. + + [Go SQL/JSON Path Playground]: https://theory.github.io/sqljson/playground + [Go SQL/JSON Path]: https://pkg.go.dev/github.com/theory/sqljson/path + "pkg.go.dev: github.com/theory/sqljson/path" [Wasm]: https://webassembly.org "WebAssembly" + [TinyGo]: https://tinygo.org + [Go JSONPath Playground]: https://theory.github.io/jsonpath/playground + [Goldmark Playground]: https://yuin.github.io/goldmark/playground + [serde_json_path Sandbox]: https://serdejsonpath.live + [Query]: https://pkg.go.dev/github.com/theory/sqljson@v0.1.0/path#Path.Query + [First]: https://pkg.go.dev/github.com/theory/sqljson@v0.1.0/path#Path.First + [ExistsOrMatch]: https://pkg.go.dev/github.com/theory/sqljson@v0.1.0/path#Path.ExistsOrMatch + [Two Types of Queries]: https://pkg.go.dev/github.com/theory/sqljson@v0.1.0/path#hdr-Two_Types_of_Queries + [WithSilent]: https://pkg.go.dev/github.com/theory/sqljson@v0.1.0/path#example-package-WithSilent + [WithTZ]: https://pkg.go.dev/github.com/theory/sqljson@v0.1.0/path#example-package-WithTZ + [ContextWithTZ]: https://pkg.go.dev/github.com/theory/sqljson/path/types#ContextWithTZ + [UTC]: https://en.wikipedia.org/wiki/Coordinated_Universal_Time + [privacy statement]: https://docs.github.com/en/site-policy/privacy-policies/github-general-privacy-statement + [language docs]: https://github.com/theory/sqljson/blob/main/path/README.md + [PostgreSQL docs]: https://www.postgresql.org/docs/devel/functions-json.html#FUNCTIONS-SQLJSON-PATH + [PostgreSQL License]: https://www.opensource.org/licenses/postgresql + [MIT License]: https://opensource.org/license/mit diff --git a/go.mod b/go.mod index 44d694f..b8d2845 100644 --- a/go.mod +++ b/go.mod @@ -1,16 +1,13 @@ -module github.com/theory/sqljson +module main -go 1.22.0 +go 1.24.0 -require ( - github.com/smasher164/xid v0.1.2 - github.com/stretchr/testify v1.10.0 - golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac -) +toolchain go1.24.2 + +require github.com/theory/sqljson v0.3.1 require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - golang.org/x/text v0.22.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect + github.com/smasher164/xid v0.1.2 // indirect + golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect + golang.org/x/text v0.23.0 // indirect ) diff --git a/go.sum b/go.sum index 76fce3d..da91109 100644 --- a/go.sum +++ b/go.sum @@ -6,13 +6,13 @@ github.com/smasher164/xid v0.1.2 h1:erplXSdBRIIw+MrwjJ/m8sLN2XY16UGzpTA0E2Ru6HA= github.com/smasher164/xid v0.1.2/go.mod h1:tgivm8CQl19fH1c5y+8F4mA+qY6n2i6qDRBlY/6nm+I= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac h1:l5+whBCLH3iH2ZNHYLbAe58bo7yrN4mVcnkHDYz5vvs= -golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac/go.mod h1:hH+7mtFmImwwcMvScyxUhjuVHR3HGaDPMn9rMSUUbxo= +github.com/theory/sqljson v0.3.1 h1:vwPHdVzwhvkJ/y5eCqk3+ygcfwOcr/7SnfGJB6KnmQs= +github.com/theory/sqljson v0.3.1/go.mod h1:QXKj6XAV86KCuHj/+uSveiwYgc5A9jaKe3+2Pk37kdc= +golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw= +golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= -golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= +golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= +golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/wasm/wasm.go b/internal/wasm/wasm.go deleted file mode 100644 index 7999444..0000000 --- a/internal/wasm/wasm.go +++ /dev/null @@ -1,25 +0,0 @@ -// Package main performs a basic JSONPath query in order to test WASM compilation. -package main - -import ( - "context" - "encoding/json" - "fmt" - - "github.com/theory/sqljson/path" -) - -func main() { - // Parse a jsonpath query. - p, _ := path.Parse(`$.foo`) - - // Select values from unmarshaled JSON input. - result, _ := p.Query(context.Background(), []byte(`{"foo": "bar"}`)) - - // Show the result. - //nolint:errchkjson - items, _ := json.Marshal(result) - - //nolint:forbidigo - fmt.Printf("%s\n", items) -} diff --git a/internal/wasm/wasm_test.go b/internal/wasm/wasm_test.go deleted file mode 100644 index dba06b0..0000000 --- a/internal/wasm/wasm_test.go +++ /dev/null @@ -1,10 +0,0 @@ -package main - -import ( - "testing" -) - -func TestMain(t *testing.T) { - t.Parallel() - main() -} diff --git a/path/README.md b/path/README.md deleted file mode 100644 index 2d17881..0000000 --- a/path/README.md +++ /dev/null @@ -1,1285 +0,0 @@ -Go SQL/JSON Path -================ - -The path package ports the SQL/JSON Path data type from PostgreSQL to Go. It -supports both SQL-standard path expressions and PostgreSQL-specific predicate -check expressions. - -> πŸ’‘ Use the [πŸ› Playground] links below to run the examples in this document, -> and to experiment with jsonpath execution. The Go SQL/JSON Path Playground -> is a single-page stateless JavaScript and [TinyGo]-compiled [Wasm] app that -> offers permalink generation to share examples, like [this one]. - -## The SQL/JSON Path Language - -> This section was ported from the [PostgreSQL docs]. - - - - -SQL/JSON Path is a query language for JSON values. A path expression applied -to a JSON value produces a JSON result. - -SQL/JSON path expressions specify item(s) to be retrieved from a JSON value, -similarly to XPath expressions used for access to XML content. In Go, path -expressions are implemented in the path package and can use any elements -described [below](#syntax). - -### Syntax - -The path package implements support for the SQL/JSON path language in Go to -efficiently query JSON data. It provides an abstract syntax tree of the -parsed SQL/JSON path expression that specifies the items to be retrieved by -the path engine from the JSON data for further processing with the SQL/JSON -query functions. - -The semantics of SQL/JSON path predicates and operators generally follow SQL. -At the same time, to provide a natural way of working with JSON data, SQL/JSON -path syntax uses some JavaScript conventions: - -* Dot (`.`) is used for member access. - -* Square brackets (`[]`) are used for array access. - -* SQL/JSON arrays are 0-relative, like Go slices, but unlike regular SQL - arrays, which start from 1. - -Numeric literals in SQL/JSON path expressions follow JavaScript rules, which -are different from Go, SQL, and JSON in some minor details. For example, -SQL/JSON path allows `.1` and `1.`, which are invalid in JSON. Non-decimal -integer literals and underscore separators are supported, for example, -`1_000_000`, `0x1EEE_FFFF`, `0o273`, `0b100101`. In SQL/JSON path (and in -JavaScript, but not in SQL or Go), there must not be an underscore separator -directly after the radix prefix. - -An SQL/JSON path expression is typically written as a Go string literal, so it -must be enclosed in back quotes or double quotes --- and with the latter any -double quotes within the value must be escaped (see [string literals]). - -Some forms of path expressions require string literals within them. These -embedded string literals follow JavaScript/ECMAScript conventions: they must -be surrounded by double quotes, and backslash escapes may be used within them -to represent otherwise-hard-to-type characters. In particular, the way to -write a double quote within a double-quoted string literal is `\"`, and to -write a backslash itself, you must write `\\`. Other special backslash -sequences include those recognized in JSON strings: `\b`, `\f`, `\n`, `\r`, -`\t`, `\v` for various ASCII control characters, and `\uNNNN` for a Unicode -character identified by its 4-hex-digit code point. The backslash syntax also -includes two cases not allowed by JSON: `\xNN` for a character code written -with only two hex digits, and `\u{N...}` for a character code written with 1 -to 6 hex digits. - -A path expression consists of a sequence of path elements, which can be any of -the following: - -* Path literals of JSON primitive types: Unicode text, numeric, `true`, - `false`, or `null` -* Path variables listed in the [Path Variables table](#path-variables) -* Accessor operators listed in the [Path Accessors table](#path-accessors) -* JSON path operators and methods listed[SQL/JSON Path Operators And - Methods](#sql-json-path-operators-and-methods) -* Parentheses, which can be used to provide filter expressions or define the - order of path evaluation - -For details on using JSON path expressions with SQL/JSON query functions, see -[Operation](#operation). - -#### Path Variables - -| Variable | Description -| ---------- | ------------------------------------------------------------------------------------------------- | -| `$` | A variable representing the JSON value being queried (the context item). | -| `$varname` | A named variable. Its value can be set by the `exec.WithVars` option of Path processing functions | -| `@` | A variable representing the result of path evaluation in filter expressions. | - -#### Path Accessors - -| Accessor Operator | Description -| --------------------- | ------------------------------------------------------------------------------------------------- | -| `.key`, `."$varname"` | Member accessor that returns an object member with the specified key. If the key name matches some named variable starting with `$` or does not meet the JavaScript rules for an identifier, it must be enclosed in double quotes to make it a string literal. -| `.*` | Wildcard member accessor that returns the values of all members located at the top level of the current object. -| `.**` | Recursive wildcard member accessor that processes all levels of the JSON hierarchy of the current object and returns all the member values, regardless of their nesting level. This is a PostgreSQL extension of the SQL/JSON standard. -| `.**{level}`, `.**{start_level to end_level}` | Like `.**`, but selects only the specified levels of the JSON hierarchy. Nesting levels are specified as integers. Level zero corresponds to the current object. To access the lowest nesting level, you can use the `last` keyword. This is a PostgreSQL extension of the SQL/JSON standard. -| `[subscript, ...]` | Array element accessor. `subscript` can be given in two forms: `index` or `start_index` to `end_index`. The first form returns a single array element by its index. The second form returns an array slice by the range of indexes, including the elements that correspond to the provided `start_index` and `end_index`.

The specified index can be an integer, as well as an expression returning a single numeric value, which is automatically cast to integer. Index zero corresponds to the first array element. You can also use the `last` keyword to denote the last array element, which is useful for handling arrays of unknown length. -| `[*]` | Wildcard array element accessor that returns all array elements. - -### Operation - -Path query functions pass the provided path expression to the path engine for -evaluation. If the expression matches the queried JSON data, the corresponding -set of JSON items, is returned as an `[]any` slice. If there is no match, the -result will be an empty slice, `NULL`, `false`, or an error, depending on the -function. Path expressions are written in the SQL/JSON path language and can -include arithmetic expressions and functions. - -A path expression consists of a sequence of elements allowed by the SQL/JSON -path language. The path expression is normally evaluated from left to right, -but you can use parentheses to change the order of operations. If the -evaluation is successful, a sequence of JSON items is produced, and the -evaluation result is returned to the Path query function that completes the -specified computation. - -To refer to the JSON value being queried (the context item), use the `$` -variable in the path expression. The first element of a path must always be -`$`. It can be followed by one or more accessor operators, which go down the -JSON structure level by level to retrieve sub-items of the context item. Each -accessor operator acts on the result(s) of the previous evaluation step, -producing zero, one, or more output items from each input item. - -For example, suppose you have some JSON data from a GPS tracker that you would -like to parse, such as: - -``` go -var src = []byte(`{ - "track": { - "segments": [ - { - "location": [ 47.763, 13.4034 ], - "start time": "2018-10-14 10:05:14", - "HR": 73 - }, - { - "location": [ 47.706, 13.2635 ], - "start time": "2018-10-14 10:39:21", - "HR": 135 - } - ] - } -}`) -``` - -The path package expects JSON to be decoded into a Go value, one of `string`, -`float64`, [`json.Number`], `map[string]any`, or `[]any` β€” which are the -values produced by unmarshaling data into an `any` value. For the above JSON, -unmarshal it like so: - -``` go -var value any -if err := json.Unmarshal(src, &value); err != nil { - log.Fatal(err) -} -fmt.Printf("%T\n", value) -``` - -The output shows the parsed data type: - -``` go -map[string]interface {} -``` - -Note that examples below encode results as JSON for legibility using a -function like this: - -``` go -func pp(val any) { - js, err := json.Marshal(val) - if err != nil { - log.Fatal(err) - } - fmt.Println(string(js)) -} -``` - -To retrieve the available track segments, you need to use the `.key` accessor -operator to descend through surrounding JSON objects, for example: - -``` go -pp(path.MustQuery("$.track.segments", value)) -``` - -And the output (indented for legibility; [playground][play01]): - -``` json -[ - [ - { - "HR": 73, - "location": [ - 47.763, - 13.4034 - ], - "start time": "2018-10-14 10:05:14" - }, - { - "HR": 135, - "location": [ - 47.706, - 13.2635 - ], - "start time": "2018-10-14 10:39:21" - } - ] -] -``` - -To retrieve the contents of an array, you typically use the `[*]` operator. The -following example will return the location coordinates for all the available -track segments ([playground][play02]): - -``` go -pp(path.MustQuery("$.track.segments[*].location", value)) -``` - -``` json -[[47.763,13.4034],[47.706,13.2635]] -``` - -Here we started with the whole JSON input value (`$`), then the `.track` -accessor selected the JSON object associated with the `"track"` object key, -then the `.segments` accessor selected the JSON array associated with the -`"segments"` key within that object, then the `[*]` accessor selected each -element of that array (producing a series of items), then the `.location` -accessor selected the JSON array associated with the `"location"` key within -each of those objects. In this example, each of those objects had a -`"location"` key; but if any of them did not, the `.location` accessor would -have simply produced no output for that input item. - -To return the coordinates of the first segment only, you can specify the -corresponding subscript in the `[]` accessor operator. Recall that JSON array -indexes are 0-relative ([playground][play03]): - -```go -pp(path.MustQuery("$.track.segments[0].location", value)) -``` - -``` json -[[47.763,13.4034]] -``` - -The result of each path evaluation step can be processed by one or more of the -json path operators and methods listed [below](#sqljson-path-operators-and-methods). -Each method name must be preceded by a dot. For example, you can get the size -of an array ([playground][play04]): - -```go -pp(path.MustQuery("$.track.segments.size()", value)) -``` - -``` json -[2] -``` - -More examples of using jsonpath operators and methods within path expressions -appear [below](#sqljson-path-operators-and-methods). - -A path can also contain *filter* expressions that work similarly to the `WHERE` -clause in SQL. A filter expression begins with a question mark and provides a -condition in parentheses: - -``` -? (condition) -``` - -Filter expressions must be written just after the path evaluation step to -which they should apply. The result of that step is filtered to include only -those items that satisfy the provided condition. SQL/JSON defines three-valued -logic, so the condition can produce `true`, `false`, or `unknown`. The unknown -value plays the same role as SQL `NULL` and Go `nil` and can be tested for -with the `is unknown` predicate. Further path evaluation steps use only those -items for which the filter expression returned `true`. - -The functions and operators that can be used in filter expressions are listed -[below](#filter-expression-elements). Within a filter expression, the `@` -variable denotes the value being considered (i.e., one result of the preceding -path step). You can write accessor operators after `@` to retrieve component -items. - -For example, suppose you would like to retrieve all heart rate values higher -than 130. You can achieve this as follows ([playground][play05]): - -```go -pp(path.MustQuery("$.track.segments[*].HR ? (@ > 130)", value)) -``` - -``` json -[135] -``` - -To get the start times of segments with such values, you have to filter out -irrelevant segments before selecting the start times, so the filter expression -is applied to the previous step, and the path used in the condition is -different ([playground][play06]): - -```go -pp(path.MustQuery( - `$.track.segments[*] ? (@.HR > 130)."start time"`, - value, -)) -``` - -``` json -["2018-10-14 10:39:21"] -``` - -You can use several filter expressions in sequence, if required. The following -example selects start times of all segments that contain locations with -relevant coordinates and high heart rate values ([playground][play07]): - -```go -pp(path.MustQuery( - `$.track.segments[*] ? (@.location[1] < 13.4) ? (@.HR > 130)."start time"`, - value, -)) -``` - -```json -["2018-10-14 10:39:21"] -``` - -Using filter expressions at different nesting levels is also allowed. The -following example first filters all segments by location, and then returns -high heart rate values for these segments, if available ([playground][play08]): - -```go -pp(path.MustQuery( - `$.track.segments[*] ? (@.location[1] < 13.4).HR ? (@ > 130)`, - value, -)) -``` - -```json -[135] -``` - -You can also nest filter expressions within each other. This example returns -the size of the track if it contains any segments with high heart rate values, -or an empty sequence otherwise ([playground][play09]): - -```go -pp(path.MustQuery( - `$.track ? (exists(@.segments[*] ? (@.HR > 130))).segments.size()`, - value, -)) -``` - -```go -[2] -``` - -### Deviations From The SQL Standard - -PostgreSQL's implementation of the SQL/JSON path language, and therefore also -this Go implementation, has the following deviations from the SQL/JSON -standard. - -#### Boolean Predicate Check Expressions - -As an extension to the SQL standard, a PostgreSQL path expression can be a -Boolean predicate, whereas the SQL standard allows predicates only within -filters. While SQL-standard path expressions return the relevant element(s) of -the queried JSON value, predicate check expressions return the single -three-valued JSON result of the predicate: `true`, `false`, or `nil`. For -example, we could write this SQL-standard filter expression -([playground][play10]): - -```go -pp(path.MustQuery("$.track.segments ?(@[*].HR > 130)", value)) -``` - -The result: - -```json -[{"HR":135,"location":[47.706,13.2635],"start time":"2018-10-14 10:39:21"}] -``` - -The similar predicate check expression simply returns `true`, indicating that a -match exists ([playground][play11]): - -```go -pp(path.MustQuery("$.track.segments[*].HR > 130", value)) -``` - -```go -[true] -``` - -**Note:** PostgreSQL predicate check expressions require the `@@` operator, -while SQL-standard path expressions require the `@?` operator. Use the -`PgIndexOperator` method to pass the appropriate operator to PostgreSQL. - -#### Regular Expression Interpretation - -There are minor differences in the interpretation of regular expression -patterns used in `like_regex` filters, as described -[below](#sqljson-regular-expressions). - -### Strict And Lax Modes - -When you query JSON data, the path expression may not match the actual JSON -data structure. An attempt to access a non-existent member of an object or -element of an array is defined as a structural error. SQL/JSON path -expressions have two modes of handling structural errors: - -* lax (default) β€” the path engine implicitly adapts the queried data to - the specified path. Any structural errors that cannot be fixed as - described below are suppressed, producing no match. - -* strict β€” if a structural error occurs, an error is raised. - -Lax mode facilitates matching of a JSON document and path expression when the -JSON data does not conform to the expected schema. If an operand does not -match the requirements of a particular operation, it can be automatically -wrapped as an SQL/JSON array, or unwrapped by converting its elements into an -SQL/JSON sequence before performing the operation. Also, comparison operators -and most methods automatically unwrap their operands in lax mode, so you can -compare SQL/JSON arrays out-of-the-box. An array of size 1 is considered equal -to its sole element. Automatic unwrapping is not performed when: - -* The path expression contains `type()` or `size()` methods that return the - type and the number of elements in the array, respectively. - -* The queried JSON data contain nested arrays. In this case, only the - outermost array is unwrapped, while all the inner arrays remain unchanged. - Thus, implicit unwrapping can only go one level down within each path - evaluation step. - -For example, when querying the GPS data listed above, you can abstract from -the fact that it stores an array of segments when using lax mode -([playground][play12]): - -```go -pp(path.MustQuery("lax $.track.segments.location", value)) -``` - -``` json -[[47.763,13.4034],[47.706,13.2635]] -``` - -In strict mode, the specified path must exactly match the structure of the -queried JSON document, so using this path expression will cause an error -([playground][play13]): - -```go -pp(path.MustQuery("strict $.track.segments.location", value)) -``` - -``` text -panic: exec: jsonpath member accessor can only be applied to an object -``` - -To get the same result as in lax mode, you have to explicitly unwrap the -segments array ([playground][play14]): - -```go -pp(path.MustQuery("strict $.track.segments[*].location", value)) -``` - -``` json -[[47.763,13.4034],[47.706,13.2635]] -``` - -The unwrapping behavior of lax mode can lead to surprising results. For -instance, the following query using the `.**` accessor selects every `HR` value -twice ([playground][play15]): - -```go -pp(path.MustQuery("lax $.**.HR", value)) -``` - -``` go -[73,135,73,135] -``` - -This happens because the `.**` accessor selects both the segments array and -each of its elements, while the `.HR` accessor automatically unwraps arrays -when using lax mode. To avoid surprising results, we recommend using the `.**` -accessor only in strict mode. The following query selects each `HR` value just -once ([playground][play16]): - -```go -pp(path.MustQuery("strict $.**.HR", value)) -``` - -``` json -[73,135] -``` - -The unwrapping of arrays can also lead to unexpected results. Consider this -example, which selects all the location arrays ([playground][play17]): - -```go -pp(path.MustQuery("lax $.track.segments[*].location", value)) -``` - -``` json -[[47.763,13.4034],[47.706,13.2635]] -``` - -As expected it returns the full arrays. But applying a filter expression -causes the arrays to be unwrapped to evaluate each item, returning only the -items that match the expression ([playground][play18]): - -```go -pp(path.MustQuery( - "lax $.track.segments[*].location ?(@[*] > 15)", - value, -)) -``` - -``` json -[47.763,47.706] -``` - -This despite the fact that the full arrays are selected by the path -expression. Use strict mode to restore selecting the arrays -([playground][play19]): - -```go -pp(path.MustQuery( - "strict $.track.segments[*].location ?(@[*] > 15)", - value, -)) -``` - -``` json -[[47.763,13.4034],[47.706,13.2635]] -``` - -### SQL/JSON Path Operators And Methods - -The list of operators and methods available in JSON path expressions. Note -that while the unary operators and methods can be applied to multiple values -resulting from a preceding path step, the binary operators (addition etc.) can -only be applied to single values. In lax mode, methods applied to an array -will be executed for each value in the array. The exceptions are `.type()` and -`.size()`, which apply to the array itself. - -**Note:** The examples below use this utility function to marshall JSON -arguments: - -``` go -func val(src string) any { - var value any - if err := json.Unmarshal([]byte(src), &value); err != nil { - log.Fatal(err) - } - return value -} -``` - -#### `number + number β†’ number` - -Addition ([playground][play20]): - -``` go -pp(path.MustQuery("$[0] + 3", val("2"))) // β†’ [5] -``` - -#### `+ number β†’ number` - -Unary plus (no operation); unlike addition, this can iterate over multiple -values ([playground][play21]): - -``` go -pp(path.MustQuery("+ $.x", val(`{"x": [2,3,4]}`))) // β†’ [2, 3, 4] -``` - -#### `number - number β†’ number` - -Subtraction ([playground][play22]): - -``` go -pp(path.MustQuery("7 - $[0]", val("[2]"))) // β†’ [5] -``` - -#### `- number β†’ number` - -Negation; unlike subtraction, this can iterate over multiple values -([playground][play23]): - -``` go -pp(path.MustQuery("- $.x", val(`{"x": [2,3,4]}`))) // β†’ [-2,-3,-4] -``` - -#### `number * number β†’ number` - -Multiplication ([playground][play24]): - -``` go -pp(path.MustQuery("2 * $[0]", val("4"))) // β†’ [8] -``` -#### `number / number β†’ number` - -Division ([playground][play25]): - -``` go -pp(path.MustQuery("$[0] / 2", val("[8.5]"))) // β†’ [4.25] -``` - -#### `number % number β†’ number` - -Modulo (remainder) ([playground][play26]): - -``` go -pp(path.MustQuery("$[0] % 10", val("[32]"))) // β†’ [2] -``` - -#### `value . type() β†’ string` - -Type of the JSON item ([playground][play27]): - -``` go -pp(path.MustQuery("$[*].type()", val(`[1, "2", {}]`))) // β†’ ["number","string","object"] -``` - -#### `value . size() β†’ number` - -Size of the JSON item (number of array elements, or 1 if not an array; -[playground][play28]): - -``` go -pp(path.MustQuery("$.m.size()", val(`{"m": [11, 15]}`))) // β†’ [2] -``` - -#### `value . boolean() β†’ boolean` - -Boolean value converted from a JSON boolean, number, or string -([playground][play29]): - -``` go -pp(path.MustQuery("$[*].boolean()", val(`[1, "yes", false]`))) // β†’ [true,true,false] -``` - -#### `value . string() β†’ string` - -String value converted from a JSON boolean, number, string, or datetime -([playground][play30], [playground][play31]): - -``` go -pp(path.MustQuery("$[*].string()", val(`[1.23, "xyz", false]`))) // β†’ ["1.23","xyz","false"] -pp(path.MustQuery("$.timestamp().string()", "2023-08-15 12:34:56")) // β†’ ["2023-08-15T12:34:56"] -``` - -#### `value . double() β†’ number` - -Approximate floating-point number converted from a JSON number or string -([playground][play32]): - -``` go -pp(path.MustQuery(" ", val(`{"len": "1.9"}`))) // β†’ [3.8] -``` - -#### `number . ceiling() β†’ number` - -Nearest integer greater than or equal to the given number -([playground][play33]): - -``` go -pp(path.MustQuery("$.h.ceiling()", val(`{"h": 1.3}`))) // β†’ [2] -``` - -#### `number . floor() β†’ number` - -Nearest integer less than or equal to the given number ([playground][play34]): - -``` go -pp(path.MustQuery("$.h.floor()", val(`{"h": 1.7}`))) // β†’ [1] -``` - -#### `number . abs() β†’ number` - -Absolute value of the given number ([playground][play35]): - -``` go -pp(path.MustQuery("$.z.abs()", val(`{"z": -0.3}`))) // β†’ [0.3] -``` - -#### `value . bigint() β†’ bigint` - -Big integer value converted from a JSON number or string -([playground][play36]): - -``` go -pp(path.MustQuery("$.len.bigint()", val(`{"len": "9876543219"}`))) // β†’ [9876543219] -``` - -#### `value . decimal( [ precision [ , scale ] ] ) β†’ decimal` - -Rounded decimal value converted from a JSON number or string. Precision and -scale must be integer values ([playground][play37]): - -``` go -pp(path.MustQuery("$.decimal(6, 2)", val("1234.5678"))) // β†’ [1234.57] -``` - -#### `value . integer() β†’ integer` - -Integer value converted from a JSON number or string ([playground][play38]): - -``` go -pp(path.MustQuery("$.len.integer()", val(`{"len": "12345"}`))) // β†’ [12345] -``` - -#### `value . number() β†’ numeric` - -Numeric value converted from a JSON number or string ([playground][play39]): - -``` go -pp(path.MustQuery("$.len.number()", val(`{"len": "123.45"}`))) // β†’ [123.45] -``` - -#### `string . datetime() β†’ types.DateTime` - -Date/time value converted from a string ([playground][play40]): - -``` go -pp(path.MustQuery( - `$[*] ? (@.datetime() < "2015-08-02".datetime())`, - val(`["2015-08-01", "2015-08-12"]`), -)) // β†’ ["2015-8-01"] -``` - -#### `string . datetime(template) β†’ types.DateTime` - -Date/time value converted from a string using the specified to_timestamp -template. - -**NOTE:** Currently unimplemented, raises an error ([playground][play41]): - -``` go -pp(path.MustQuery( - `$[*].datetime("HH24:MI")`, val(`["12:30", "18:40"]`), -)) // β†’ panic: exec: .datetime(template) is not yet supported -``` - -#### `string . date() β†’ types.Date` - -Date value converted from a string ([playground][play42]): - -``` go -pp(path.MustQuery("$.date()", "2023-08-15")) // β†’ ["2023-08-15"] -``` - -#### `string . time() β†’ types.Time` - -Time without time zone value converted from a string ([playground][play43]): - -``` go -pp(path.MustQuery("$.time()", "12:34:56")) // β†’ ["12:34:56"] -``` - -#### `string . time(precision) β†’ types.Time` - -Time without time zone value converted from a string, with fractional seconds -adjusted to the given precision ([playground][play44]): - -``` go -pp(path.MustQuery("$.time(2)", "12:34:56.789")) // β†’ ["12:34:56.79"] -``` - -#### `string . time_tz() β†’ types.TimeTZ` - -Time with time zone value converted from a string ([playground][play45]): - -``` go -pp(path.MustQuery("$.time_tz()", "12:34:56+05:30")) // β†’ ["12:34:56+05:30"] -``` - -#### `string . time_tz(precision) β†’ types.TimeTZ` - -Time with time zone value converted from a string, with fractional seconds -adjusted to the given precision ([playground][play46]): - -``` go -pp(path.MustQuery("$.time_tz(2)", "12:34:56.789+05:30")) // β†’ ["12:34:56.79+05:30"] -``` - -#### `string . timestamp() β†’ types.Timestamp` - -Timestamp without time zone value converted from a string ([playground][play47]): - -``` go -pp(path.MustQuery("$.timestamp()", "2023-08-15 12:34:56")) // β†’ ["2023-08-15T12:34:56"] -``` - -#### `string . timestamp(precision) β†’ types.Timestamp` - -Timestamp without time zone value converted from a string, with fractional -seconds adjusted to the given precision ([playground][play48]): - -``` go -arg := "2023-08-15 12:34:56.789" -pp(path.MustQuery("$.timestamp(2)", arg)) // β†’ ["2023-08-15T12:34:56.79"] -``` - -#### `string . timestamp_tz() β†’ types.TimestampTZ` - -Timestamp with time zone value converted from a string ([playground][play49]): - -``` go -arg := "2023-08-15 12:34:56+05:30" -pp(path.MustQuery("$.timestamp_tz()", arg)) // β†’ ["2023-08-15T12:34:56+05:30"] -``` - -#### `string . timestamp_tz(precision) β†’ types.TimestampTZ` - -Timestamp with time zone value converted from a string, with fractional -seconds adjusted to the given precision ([playground][play50]): - -``` go -arg := "2023-08-15 12:34:56.789+05:30" -pp(path.MustQuery("$.timestamp_tz(2)", arg)) // β†’ ["2023-08-15T12:34:56.79+05:30"] -``` - -#### `object . keyvalue() β†’ []map[string]any` - -The object's key-value pairs, represented as an array of objects containing -three fields: "key", "value", and "id"; "id" is a unique identifier of the -object the key-value pair belongs to ([playground][play51]): - -``` go -pp(path.MustQuery("$.keyvalue()", val(`{"x": "20", "y": 32}`))) -// β†’ [{"id":0,"key":"x","value":"20"},{"id":0,"key":"y","value":32}] -``` - -### Filter Expression Elements - -The filter expression elements available in JSON path. - -#### `value == value β†’ boolean` - -Equality comparison (this, and the other comparison operators, work on all -JSON scalar values; [playground][play52], [playground][play53]): - -``` go -pp(path.MustQuery("$[*] ? (@ == 1)", val(`[1, "a", 1, 3]`))) // β†’ [1,1] -pp(path.MustQuery(`$[*] ? (@ == "a")`, val(`[1, "a", 1, 3]`))) // β†’ ["a"] -``` - -#### `value != value β†’ boolean` - -#### `value <> value β†’ boolean` - -Non-equality comparison ([playground][play54], [playground][play55]): - -``` go -pp(path.MustQuery("$[*] ? (@ != 1)", val(`[1, 2, 1, 3]`))) // β†’ [2,3] -pp(path.MustQuery(`$[*] ? (@ <> "b")`, val(`["a", "b", "c"]`))) // β†’ ["a","c"] -``` - -#### `value < value β†’ boolean` - -Less-than comparison ([playground][play56]): - -``` go -pp(path.MustQuery("$[*] ? (@ < 2)", val(`[1, 2, 3]`))) // β†’ [1] -``` - -#### `value <= value β†’ boolean` - -Less-than-or-equal-to comparison ([playground][play57]): - -``` go -pp(path.MustQuery(`$[*] ? (@ <= "b")`, val(`["a", "b", "c"]`))) // β†’ ["a","b"] -``` - -#### `value > value β†’ boolean` - -Greater-than comparison ([playground][play58]): - -``` go -pp(path.MustQuery("$[*] ? (@ > 2)", val(`[1, 2, 3]`))) // β†’ [3] -``` - -#### `value >= value β†’ boolean` - -Greater-than-or-equal-to comparison ([playground][play59]): - -``` go -pp(path.MustQuery("$[*] ? (@ >= 2)", val(`[1, 2, 3]`))) // β†’ [2,3] -``` - -#### `true β†’ boolean` - -JSON constant true ([playground][play60]): - -``` go -arg := val(`[ - {"name": "John", "parent": false}, - {"name": "Chris", "parent": true} -]`) -pp(path.MustQuery("$[*] ? (@.parent == true)", arg)) // β†’ [{"name":"Chris","parent":true}] -``` - -#### `false β†’ boolean` - -JSON constant false ([playground][play61]): - -``` go -arg := val(`[ - {"name": "John", "parent": false}, - {"name": "Chris", "parent": true} -]`) -pp(path.MustQuery("$[*] ? (@.parent == false)", arg)) // β†’ [{"name":"John","parent":false}] -``` - -#### `null β†’ value` - -JSON constant null (note that, unlike in SQL, comparison to null works -normally; [playground][play62]): - -``` go -arg := val(`[ - {"name": "Mary", "job": null}, - {"name": "Michael", "job": "driver"} -]`) -pp(path.MustQuery("$[*] ? (@.job == null) .name", arg)) // β†’ ["Mary"] -``` - -#### `boolean && boolean β†’ boolean` - -Boolean `AND` ([playground][play63]): - -``` go -pp(path.MustQuery("$[*] ? (@ > 1 && @ < 5)", val(`[1, 3, 7]`))) // β†’ [3] -``` - -#### `boolean || boolean β†’ boolean` - -Boolean `OR` ([playground][play64]): - -``` go -pp(path.MustQuery("$[*] ? (@ < 1 || @ > 5)", val(`[1, 3, 7]`))) // β†’ [7] -``` - -#### `! boolean β†’ boolean` - -Boolean `NOT` ([playground][play65]): - -``` go -pp(path.MustQuery("$[*] ? (!(@ < 5))", val(`[1, 3, 7]`))) // β†’ [7] -``` - -#### `boolean is unknown β†’ boolean` - -Tests whether a Boolean condition is unknown ([playground][play66]): - -``` go -pp(path.MustQuery("$[*] ? ((@ > 0) is unknown)", val(`[-1, 2, 7, "foo"]`))) // β†’ ["foo"] -``` - -#### `string like_regex string [ flag string ] β†’ boolean` - -Tests whether the first operand matches the regular expression given by the -second operand, optionally with modifications described by a string of flag -characters (see [SQL/JSON Regular Expressions](#sqljson-regular-expressions); -[playground][play67], [playground][play68]): - -``` go -arg := val(`["abc", "abd", "aBdC", "abdacb", "babc"]`) -pp(path.MustQuery(`$[*] ? (@ like_regex "^ab.*c")`, arg)) // β†’ ["abc","abdacb"] -pp(path.MustQuery(`$[*] ? (@ like_regex "^ab.*c" flag "i")`, arg)) // β†’ ["abc","aBdC","abdacb"] -``` - -#### `string starts with string β†’ boolean` - -Tests whether the second operand is an initial substring of the first operand -([playground][play69]): - -``` go -arg := val(`["John Smith", "Mary Stone", "Bob Johnson"]`) -pp(path.MustQuery(`$[*] ? (@ starts with "John")`, arg)) // β†’ ["John Smith"] -``` - -#### `exists ( path_expression ) β†’ boolean` - -Tests whether a path expression matches at least one SQL/JSON item. Returns -unknown if the path expression would result in an error; the second example -uses this to avoid a no-such-key error in strict mode -([playground][play70], [playground][play71]): - -``` go -arg := val(`{"x": [1, 2], "y": [2, 4]}`) -pp(path.MustQuery("strict $.* ? (exists (@ ? (@[*] > 2)))", arg)) // β†’ [[2,4]] -pp(path.MustQuery("strict $ ? (exists (@.name)) .name", val(`{"value": 42}`))) // β†’ [] -``` - -### SQL/JSON Regular Expressions - -SQL/JSON path expressions allow matching text to a regular expression with the -`like_regex` filter. For example, the following SQL/JSON path query would -case-insensitively match all strings in an array that start with an English -vowel: - -```jsonpath -$[*] ? (@ like_regex "^[aeiou]" flag "i") -``` - -The optional `flag` string may include one or more of the characters `i` for -case-insensitive match, `m` to allow `^` and `$` to match at newlines, `s` to -allow `.` to match a newline, and `q` to quote the whole pattern (reducing the -behavior to a simple substring match). - -The SQL/JSON standard borrows its definition for regular expressions from the -`LIKE_REGEX` operator, which in turn uses the XQuery standard. The path -package follows the example of PostgreSQL, using the [regexp] package to -implement `like_regex`. This leads to various minor discrepancies from -standard SQL/JSON behavior, which are cataloged in [Differences From SQL -Standard And XQuery]. Note, however, that the flag-letter incompatibilities -described there do not apply to SQL/JSON, as it translates the XQuery flag -letters to match what the [regexp] package expects. - -There are also variations between PostgreSQL regular expression syntax and go -regular expression syntax, cataloged [below](#compatibility). - -Keep in mind that the pattern argument of `like_regex` is a JSON path string -literal, written according to the rules given [above](#syntax). This means in -particular that any backslashes in the regular expression must be doubled in -double-quoted strings. For example, to match string values of the root -document that contain only digits: - -``` go -p := path.MustParse("$.* ?(@ like_regex \"^\\\\d+$\")") -pp(p.MustQuery(context.Background(), val(`{"x": "42", "y": "no"}`))) // β†’ ["42"] -``` - -This doubling upon doubling is required to escape backslashes once for go -parsing and a second time for JSON path string parsing. - -We therefore recommend using raw [string literals] (backtick strings) to -compose path expressions with double quotes or backslashes, both of which are -common in `like_regex` expressions. Raw strings require double backslashes in -regular expressions only once, for the path string parsing -([playground][play72]): - -``` go -p := path.MustParse(`$.* ?(@ like_regex "^\\d+$")`) -pp(p.MustQuery(context.Background(), val(`{"x": "42", "y": "no"}`))) // β†’ ["42"] -``` - -## Compatibility - -As a direct port from the Postgres source, the path package strives to -maintain the highest level of compatibility. Still, there remain some -unavoidable differences and to-dos. These include: - -* Numbers. The Postgres [JSONB] type implements numbers as [arbitrary - precision numbers]. This contrasts with Go JSON parsing, which by default - parses numbers into `float64` values. Decimal numbers outside the range of - `float64` are not supported and will trigger an error. For numbers within - `float64` range, warnings about the precision of [floating point math] - apply. - - For [json.Number]s, however, the path package first attempts to treat them - as `int64` values and falls back on `float64` only if all the values in an - expression cannot be parsed as integers. This increases precision for - integer-only expressions. We therefore recommend parsing JSON with - [json.Decoder.UseNumber]. - - This incompatibility may be addressed in the future, perhaps by using - [decimal] for all numeric operations. - -* `datetime(template)`. The `datetime()` method has been implemented, but - `datetime(template)` has not. Use of the template parameter will raise an - error. This issue will likely be addressed in a future release. - -* Date and time parsing. The path package relies uses the [time] packages's - [layouts] to parse values in the datetime methods (`datetime()`, - `timestamp()`, `timestamp_tz()`, etc.). These layouts are stricter about - the formats they'll parse than [Postgres date/time formatting]. - - As a result, some values parsed by the Postgres datetime methods will not - be parsed by this package. Examples include values with extra spaces - between the time and time zone, and missing leading zeros on the day and - month. - - This issue will likely be addressed when the `datetime(template)` method - is implemented, as it will require adopting the full Postgres date/time - formatting language. - -* Time zones. Postgres operates on time and time values in the context of - the time zone defined by the [TimeZone GUC] or the server's system time - zone. The path package does not rely on such global configuration. It - instead uses the time zone configured in the context passed by the path - queries ([playground][play73]), and defaults to UTC if it's not set or - included in the value ([playground][play74]): - - ```go - p := path.MustParse("$.timestamp_tz()") - arg := "2023-08-15 12:34:56" - pp(p.MustQuery(context.Background(), arg, exec.WithTZ())) // β†’ ["2023-08-15T12:34:56+00:00"] - - // Add a time zone to the context. - tz, err := time.LoadLocation("America/New_York") - if err != nil { - log.Fatal(err) - } - ctx := types.ContextWithTZ(context.Background(), tz) - - // The output will now be in the custom time zone. - pp(p.MustQuery(ctx, arg, exec.WithTZ())) // β†’ ["2023-08-15T12:34:56-04:00"] - ``` - -* Regular expressions. Whereas the Postgres implementation of the `like_regex` - expression relies on its [POSIX regular expression engine], the Go version - relies on the [regexp] package. We have attempted to configure things for - full compatibility with the Postgres implementation (including the same - diversions from XQuery regular expressions), but some variation is likely. - - Notably, a number of escapes and character classes vary: - - | Escape | PostgresSQL | Go | - | ------------ | ------------------------------------- | ------------------------------------- | - | `\a` | alert (bell) character | alert (bell) character | - | `\A` | at beginning of text | at beginning of text | - | `\b` | backspace | at ASCII word boundary | - | `\B` | synonym for backslash (`\`) | not at ASCII word boundary | - | `\cX` | low-order 5 bits comparison | N/A | - | `\d` | digit | digit | - | `\D` | non-digit | non-digit | - | `\e` | `ESC` or octal `033` | N/A | - | `\f` | form feed | form feed | - | `\m` | beginning of a word | N/A | - | `\M` | end of a word | N/A | - | `\n` | newline | newline | - | `\Q...\E` | N/A | literal `...` | - | `\r` | carriage return | carriage return | - | `\s` | whitespace character | whitespace character | - | `\S` | non-whitespace character | non-whitespace character | - | `\t` | horizontal tab | horizontal tab | - | `\uwxyz` | character with hex value `0xwxyz` | N/A (see `\x{}`) | - | `\Ustuvwxyz` | character with hex value `0xstuvwxyz` | N/A (see `\x{}`) | - | `\v` | vertical tab | vertical tab | - | `\w` | word character | word character | - | `\W` | non-word character | non-word character | - | `\xhhh` | character with hex value `0xhhh` | character with hex value `0xhhh` | - | `\xy` | character with octal value `0xy` | N/A | - | `\x{10FFFF}` | N/A (see `\U`) | hex character code | - | `\y` | beginning or end of a word | N/A (see `\b`) | - | `\Y` | not the beginning or end of a word | N/A (see `\B`) | - | `\z` | N/A (see `\Z`) | end of text | - | `\Z` | end of text | N/A (see `\z`) | - | `\0` | the null byte | N/A | - | `\*` | literal punctuation character `*` | literal `*` punctuation character `*` | - -* Identifiers. Postgres jsonpath parsing is quite liberal in what it allows - in unquoted identifiers. The allowed characters are defined by the - [ECMAScript standard] are stricter, and this package hews closer to the - standard. - - The upshot is that expressions allowed by Postgres, such as `x.πŸŽ‰`, are - better written as `x."πŸŽ‰"` for compatibility with the standard and to work - with both this package and Postgres. - -* `keyvalue()` IDs. Postgres creates IDs for the output of the `keyvalue()` - method by comparing memory addresses between JSONB values. This works well - for JSONB because it has a highly-structured, well-ordered layout. The - path package follows this pattern. - - However, The addresses of nested `map[string]any` and `[]any` values in Go - are less stable. Ids will therefore sometimes vary between executions β€” - especially for slices. However, the IDs determined for a single object or - array should be stable through repeated query executions and calls to - `keyvalue()`. - -## Copyright - -Copyright Β© 1996-2025 The PostgreSQL Global Development Group - -Copyright Β© 2024-2025 David E. Wheeler - - [πŸ› Playground]: https://theory.github.io/sqljson "Go SQL/JSON Path Playground" - [TinyGo]: https://tinygo.org - [Wasm]: https://webassembly.org "WebAssembly" - [this one]: https://theory.github.io/sqljson/?p=%2524.track.segments%255B*%255D.location&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [PostgreSQL docs]: https://www.postgresql.org/docs/devel/functions-json.html#FUNCTIONS-SQLJSON-PATH - "PostgreSQL Documentation: β€œThe SQL/JSON Path Language”" - [`json.Number`]: https://pkg.go.dev/encoding/json#Number - [string literals]: https://go.dev/ref/spec#String_literals - "Go Language Spec: String literals" - [regexp]: https://pkg.go.dev/regexp "Go Standard Library: regexp" - [Differences From SQL Standard And XQuery]: https://www.postgresql.org/docs/devel/functions-matching.html#POSIX-VS-XQUERY - "PostgreSQL Documentation: β€œDifferences From SQL Standard And XQuery”" - [JSONB]: https://www.postgresql.org/docs/current/datatype-json.html - [arbitrary precision numbers]: https://www.postgresql.org/docs/current/datatype-numeric.html#DATATYPE-NUMERIC-DECIMAL - [floating point math]: https://en.wikipedia.org/wiki/Floating-point_arithmetic - [json.Number]: https://pkg.go.dev/encoding/json#Number - [json.Decoder.UseNumber]: https://pkg.go.dev/encoding/json#Decoder.UseNumber - [decimal]: https://pkg.go.dev/github.com/shopspring/decimal - [time]: https://pkg.go.dev/time - [layouts]: https://pkg.go.dev/time#pkg-constants - [Postgres date/time formatting]: https://www.postgresql.org/docs/current/functions-formatting.html - [ECMAScript standard]: https://262.ecma-international.org/#sec-identifier-names - [POSIX regular expression engine]: https://www.postgresql.org/docs/devel/functions-matching.html#FUNCTIONS-POSIX-REGEXP - [regexp]: https://pkg.go.dev/regexp - [backspace character]: https://en.wikipedia.org/wiki/Backspace - [TimeZone GUC]: https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-TIMEZONE - [types.ContextWithTZ]: https://pkg.go.dev/github.com/theory/sqljson/path/types#ContextWithTZ - [output format]: https://www.postgresql.org/docs/current/datatype-datetime.html#DATATYPE-DATETIME-OUTPUT - - - [play01]: https://theory.github.io/sqljson/?p=%2524.track.segments&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=33 - [play02]: https://theory.github.io/sqljson/?p=%2524.track.segments%255B*%255D.location&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play03]: https://theory.github.io/sqljson/?p=%2524.track.segments%255B0%255D.location&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play04]: https://theory.github.io/sqljson/?p=%2524.track.segments.size%28%29&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play05]: https://theory.github.io/sqljson/?p=%2524.track.segments%255B*%255D.HR%2520%253F%2520%28%2540%2520%253E%2520130%29&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play06]: https://theory.github.io/sqljson/?p=%2524.track.segments%255B*%255D%2520%253F%2520%28%2540.HR%2520%253E%2520130%29.%2522start%2520time%2522&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play07]: https://theory.github.io/sqljson/?p=%2524.track.segments%255B*%255D%2520%253F%2520%28%2540.location%255B1%255D%2520%253C%252013.4%29%2520%253F%2520%28%2540.HR%2520%253E%2520130%29.%2522start%2520time%2522&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play08]: https://theory.github.io/sqljson/?p=%2524.track.segments%255B*%255D%2520%253F%2520%28%2540.location%255B1%255D%2520%253C%252013.4%29.HR%2520%253F%2520%28%2540%2520%253E%2520130%29&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play09]: https://theory.github.io/sqljson/?p=%2524.track%2520%253F%2520%28exists%28%2540.segments%255B*%255D%2520%253F%2520%28%2540.HR%2520%253E%2520130%29%29%29.segments.size%28%29&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play10]: https://theory.github.io/sqljson/?p=%2524.track.segments%2520%253F%28%2540%255B*%255D.HR%2520%253E%2520130%29&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=33 - [play11]: https://theory.github.io/sqljson/?p=%2524.track.segments%255B*%255D.HR%2520%253E%2520130&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play12]: https://theory.github.io/sqljson/?p=lax%2520%2524.track.segments.location&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play13]: https://theory.github.io/sqljson/?p=strict%2520%2524.track.segments.location&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play14]: https://theory.github.io/sqljson/?p=strict%2520%2524.track.segments%255B*%255D.location&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play15]: https://theory.github.io/sqljson/?p=lax%2520%2524.**.HR&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play16]: https://theory.github.io/sqljson/?p=strict%2520%2524.**.HR&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play17]: https://theory.github.io/sqljson/?p=lax%2520%2524.track.segments%255B*%255D.location&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play18]: https://theory.github.io/sqljson/?p=lax%2520%2524.track.segments%255B*%255D.location%2520%253F%28%2540%255B*%255D%2520%253E%252015%29&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play19]: https://theory.github.io/sqljson/?p=strict%2520%2524.track.segments%255B*%255D.location%2520%253F%28%2540%255B*%255D%2520%253E%252015%29&j=%257B%250A%2520%2520%2522track%2522%253A%2520%257B%250A%2520%2520%2520%2520%2522segments%2522%253A%2520%255B%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.763%252C%252013.4034%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A05%253A14%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%252073%250A%2520%2520%2520%2520%2520%2520%257D%252C%250A%2520%2520%2520%2520%2520%2520%257B%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522location%2522%253A%2520%2520%2520%255B%252047.706%252C%252013.2635%2520%255D%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522start%2520time%2522%253A%2520%25222018-10-14%252010%253A39%253A21%2522%252C%250A%2520%2520%2520%2520%2520%2520%2520%2520%2522HR%2522%253A%2520135%250A%2520%2520%2520%2520%2520%2520%257D%250A%2520%2520%2520%2520%255D%250A%2520%2520%257D%250A%257D&a=&o=1 - [play20]: https://theory.github.io/sqljson/?p=%2524%255B0%255D%2520%252B%25203&j=2&a=&o=1 - [play21]: https://theory.github.io/sqljson/?p=%252B%2520%2524.x&j=%257B%2522x%2522%253A%2520%255B2%252C3%252C4%255D%257D&a=&o=1 - [play22]: https://theory.github.io/sqljson/?p=7%2520-%2520%2524%255B0%255D&j=%255B2%255D&a=&o=1 - [play23]: https://theory.github.io/sqljson/?p=-%2520%2524.x&j=%257B%2522x%2522%253A%2520%255B2%252C3%252C4%255D%257D&a=&o=1 - [play24]: https://theory.github.io/sqljson/?p=2%2520*%2520%2524%255B0%255D&j=4&a=&o=1 - [play25]: https://theory.github.io/sqljson/?p=%2524%255B0%255D%2520%252F%25202&j=%255B8.5%255D&a=&o=1 - [play26]: https://theory.github.io/sqljson/?p=%2524%255B0%255D%2520%2525%252010&j=%255B32%255D&a=&o=1 - [play27]: https://theory.github.io/sqljson/?p=%2524%255B*%255D.type%28%29&j=%255B1%252C%2520%25222%2522%252C%2520%257B%257D%255D&a=&o=1 - [play28]: https://theory.github.io/sqljson/?p=%2524.m.size%28%29&j=%257B%2522m%2522%253A%2520%255B11%252C%252015%255D%257D&a=&o=1 - [play29]: https://theory.github.io/sqljson/?p=%2524%255B*%255D.boolean%28%29&j=%255B1%252C%2520%2522yes%2522%252C%2520false%255D&a=&o=1 - [play30]: https://theory.github.io/sqljson/?p=%2524%255B*%255D.string%28%29&j=%255B1.23%252C%2520%2522xyz%2522%252C%2520false%255D&a=&o=1 - [play31]: https://theory.github.io/sqljson/?p=%2524.timestamp%28%29.string%28%29&j=%25222023-08-15%252012%253A34%253A56%2522 - [play32]: https://theory.github.io/sqljson/?p=%2524.len.double%28%29%2520*%25202&j=%257B%2522len%2522%253A%2520%25221.9%2522%257D&a=&o=1 - [play33]: https://theory.github.io/sqljson/?p=%2524.h.ceiling%28%29&j=%257B%2522h%2522%253A%25201.3%257D&a=&o=1 - [play34]: https://theory.github.io/sqljson/?p=%2524.h.floor%28%29&j=%257B%2522h%2522%253A%25201.7%257D&a=&o=1 - [play35]: https://theory.github.io/sqljson/?p=%2524.z.abs%28%29&j=%257B%2522z%2522%253A%2520-0.3%257D&a=&o=1 - [play36]: https://theory.github.io/sqljson/?p=%2524.len.bigint%28%29&j=%257B%2522len%2522%253A%2520%25229876543219%2522%257D&a=&o=1 - [play37]: https://theory.github.io/sqljson/?p=%2524.decimal%286%252C%25202%29&j=%25221234.5678%2522&a=&o=1 - [play38]: https://theory.github.io/sqljson/?p=%2524.len.integer%28%29&j=%257B%2522len%2522%253A%2520%252212345%2522%257D&a=&o=1 - [play39]: https://theory.github.io/sqljson/?p=%2524.len.number%28%29&j=%257B%2522len%2522%253A%2520%2522123.45%2522%257D&a=&o=1 - [play40]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540.datetime%28%29%2520%253C%2520%25222015-08-02%2522.datetime%28%29%29&j=%255B%25222015-08-01%2522%252C%2520%25222015-08-12%2522%255D&a=&o=1 - [play41]: https://theory.github.io/sqljson/?p=%2524%255B*%255D.datetime%28%2522HH24%253AMI%2522%29&j=%255B%252212%253A30%2522%252C%2520%252218%253A40%2522%255D&a=&o=1 - [play42]: https://theory.github.io/sqljson/?p=%2524.date%28%29&j=2023-08-15&a=&o=1 - [play43]: https://theory.github.io/sqljson/?p=%2524.time%28%29&j=12%253A34%253A56&a=&o=1 - [play44]: https://theory.github.io/sqljson/?p=%2524.time%282%29&j=%252212%253A34%253A56.789%2522&a=&o=1 - [play45]: https://theory.github.io/sqljson/?p=%2524.time_tz%28%29&j=%252212%253A34%253A56%252B05%253A30%2522&a=%257B%257D&o=1 - [play46]: https://theory.github.io/sqljson/?p=%2524.time_tz%282%29&j=%252212%253A34%253A56.789%252B05%253A30%2522&a=&o=1 - [play47]: https://theory.github.io/sqljson/?p=%2524.timestamp%28%29&j=%25222023-08-15%252012%253A34%253A56%2522&a=&o=1 - [play48]: https://theory.github.io/sqljson/?p=%2524.timestamp%282%29&j=%25222023-08-15%252012%253A34%253A56.789%2522&a=&o=1 - [play49]: https://theory.github.io/sqljson/?p=%2524.timestamp_tz%28%29&j=%25222023-08-15%252012%253A34%253A56%252B05%253A30%2522&a=&o=1 - [play50]: https://theory.github.io/sqljson/?p=%2524.timestamp_tz%282%29&j=%25222023-08-15%252012%253A34%253A56.789%252B05%253A30%2522&a=&o=1 - [play51]: https://theory.github.io/sqljson/?p=%2524.keyvalue%28%29&j=%257B%2522x%2522%253A%2520%252220%2522%252C%2520%2522y%2522%253A%252032%257D&a=&o=1 - [play52]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520%253D%253D%25201%29&j=%255B1%252C%2520%2522a%2522%252C%25201%252C%25203%255D&a=&o=1 - [play53]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520%253D%253D%2520%2522a%2522%29&j=%255B1%252C%2520%2522a%2522%252C%25201%252C%25203%255D&a=&o=1 - [play54]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520%21%253D%25201%29&j=%255B1%252C%25202%252C%25201%252C%25203%255D&a=&o=1 - [play55]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520%253C%253E%2520%2522b%2522%29&j=%255B%2522a%2522%252C%2520%2522b%2522%252C%2520%2522c%2522%255D&a=&o=1 - [play56]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520%253C%25202%29&j=%255B1%252C%25202%252C%25203%255D&a=&o=1 - [play57]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520%253C%253D%2520%2522b%2522%29&j=%255B%2522a%2522%252C%2520%2522b%2522%252C%2520%2522c%2522%255D&a=&o=1 - [play58]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520%253E%25202%29&j=%255B1%252C%25202%252C%25203%255D&a=&o=1 - [play59]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520%253E%253D%25202%29&j=%255B1%252C%25202%252C%25203%255D&a=&o=1 - [play60]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540.parent%2520%253D%253D%2520true%29&j=%255B%250A%2520%2520%257B%2522name%2522%253A%2520%2522John%2522%252C%2520%2522parent%2522%253A%2520false%257D%252C%250A%2520%2520%257B%2522name%2522%253A%2520%2522Chris%2522%252C%2520%2522parent%2522%253A%2520true%257D%250A%255D&a=&o=1 - [play61]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540.parent%2520%253D%253D%2520false%29&j=%255B%250A%2520%2520%257B%2522name%2522%253A%2520%2522John%2522%252C%2520%2522parent%2522%253A%2520false%257D%252C%250A%2520%2520%257B%2522name%2522%253A%2520%2522Chris%2522%252C%2520%2522parent%2522%253A%2520true%257D%250A%255D&a=&o=1 - [play62]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540.job%2520%253D%253D%2520null%29%2520.name&j=%255B%250A%2520%2520%257B%2522name%2522%253A%2520%2522Mary%2522%252C%2520%2522job%2522%253A%2520null%257D%252C%250A%2520%2520%257B%2522name%2522%253A%2520%2522Michael%2522%252C%2520%2522job%2522%253A%2520%2522driver%2522%257D%250A%255D&a=&o=1 - [play63]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520%253E%25201%2520%2526%2526%2520%2540%2520%253C%25205%29&j=%255B1%252C%25203%252C%25207%255D&a=&o=1 - [play64]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520%253C%25201%2520%257C%257C%2520%2540%2520%253E%25205%29&j=%255B1%252C%25203%252C%25207%255D&a=&o=1 - [play65]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%21%28%2540%2520%253C%25205%29%29&j=%255B1%252C%25203%252C%25207%255D&a=&o=1 - [play66]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%28%2540%2520%253E%25200%29%2520is%2520unknown%29&j=%255B-1%252C%25202%252C%25207%252C%2520%2522foo%2522%255D&a=&o=1 - [play66]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520like_regex%2520%2522%255Eab.*c%2522%29&j=%255B%2522abc%2522%252C%2520%2522abd%2522%252C%2520%2522aBdC%2522%252C%2520%2522abdacb%2522%252C%2520%2522babc%2522%255D&a=&o=1 - [play67]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520like_regex%2520%2522%255Eab.*c%2522%29&j=%255B%2522abc%2522%252C%2520%2522abd%2522%252C%2520%2522aBdC%2522%252C%2520%2522abdacb%2522%252C%2520%2522babc%2522%255D&a=&o=1 - [play68]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520like_regex%2520%2522%255Eab.*c%2522%2520flag%2520%2522i%2522%29&j=%255B%2522abc%2522%252C%2520%2522abd%2522%252C%2520%2522aBdC%2522%252C%2520%2522abdacb%2522%252C%2520%2522babc%2522%255D&a=&o=1 - [play69]: https://theory.github.io/sqljson/?p=%2524%255B*%255D%2520%253F%2520%28%2540%2520starts%2520with%2520%2522John%2522%29&j=%255B%2522John%2520Smith%2522%252C%2520%2522Mary%2520Stone%2522%252C%2520%2522Bob%2520Johnson%2522%255D&a=&o=1 - [play70]: https://theory.github.io/sqljson/?p=strict%2520%2524.*%2520%253F%2520%28exists%2520%28%2540%2520%253F%2520%28%2540%255B*%255D%2520%253E%25202%29%29%29&j=%257B%2522x%2522%253A%2520%255B1%252C%25202%255D%252C%2520%2522y%2522%253A%2520%255B2%252C%25204%255D%257D&a=&o=1 - [play71]: https://theory.github.io/sqljson/?p=strict%2520%2524%2520%253F%2520%28exists%2520%28%2540.name%29%29%2520.name&j=%257B%2522x%2522%253A%2520%255B1%252C%25202%255D%252C%2520%2522y%2522%253A%2520%255B2%252C%25204%255D%257D&a=%257B%2522value%2522%253A%252042%257D&o=1 - [play72]: https://theory.github.io/sqljson/?p=%2524.*%2520%253F%28%2540%2520like_regex%2520%2522%255E%255C%255Cd%252B%2524%2522%29&j=%257B%2522x%2522%253A%2520%252242%2522%252C%2520%2522y%2522%253A%2520%2522no%2522%257D&a=&o=1 - [play73]: https://theory.github.io/sqljson/?p=%2524.timestamp_tz%28%29&j=%25222023-08-15%252012%253A34%253A56%2522&o=49&a=%257B%257D - [play74]: https://theory.github.io/sqljson/?p=%2524.timestamp_tz%28%29&j=%25222023-08-15%252012%253A34%253A56%2522&o=17&a=%257B%257D diff --git a/path/ast/ast.go b/path/ast/ast.go deleted file mode 100644 index b1beb2a..0000000 --- a/path/ast/ast.go +++ /dev/null @@ -1,1050 +0,0 @@ -// Package ast provides an abstract syntax tree for SQL/JSON paths. -// -// Largely ported from PostgreSQL's [jsonpath.c], it provides objects for every -// node parsed from an SQL/JSON path. The [parser] constructs these nodes as it -// parses a path, and constructs an AST object from the root node. -// -// Note that errors returned by AST are not wrapped, as they're expected to be -// wrapped by parser. -// -// The complete list of types that implement Node: -// -// - [ConstNode] -// - [MethodNode] -// - [StringNode] -// - [VariableNode] -// - [KeyNode] -// - [NumericNode] -// - [IntegerNode] -// - [AnyNode] -// - [BinaryNode] -// - [UnaryNode] -// - [RegexNode] -// - [ArrayIndexNode] -// -// Here's a starter recursive function for processing nodes. -// -// func processNode(node ast.Node) { -// switch node := node.(type) { -// case *ast.ConstNode: -// case *ast.MethodNode: -// case *ast.StringNode: -// case *ast.VariableNode: -// case *ast.KeyNode: -// case *ast.NumericNode: -// case *ast.IntegerNode: -// case *ast.AnyNode: -// case *ast.BinaryNode: -// if node.Left() != nil { -// processNode(node.Left()) -// } -// if node.Right() != nil { -// processNode(node.Right()) -// } -// case *ast.UnaryNode: -// processNode(node.Operand()) -// case *ast.RegexNode: -// processNode(node.Operand()) -// case *ast.ArrayIndexNode: -// for _, n := range node.Subscripts() { -// processNode(n) -// } -// } -// if next := node.Next(); next != nil { -// processNode(next) -// } -// } -// -// [jsonpath.c]: https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/jsonpath.c -package ast - -// Use golang.org/x/tools/cmd/stringer to generate the String method for enums -// for their inline comments. - -//go:generate stringer -linecomment -output ast_string.go -type Constant,BinaryOperator,UnaryOperator,MethodName - -import ( - "encoding/json" - "errors" - "fmt" - "math" - "regexp" - "strconv" - "strings" -) - -// Node represents a single node in the AST. -type Node interface { - fmt.Stringer - - // writeTo writes the string representation of a node to buf. inKey is true - // when the node is a key in an accessor list and withParens requires - // parentheses to be printed around the node. - writeTo(buf *strings.Builder, inKey, withParens bool) - - // priority returns the operational priority of the node relative to other - // nodes. Priority ranges from 0 for highest to 6 for lowest. - priority() uint8 - - // Next returns the next node when the node is part of a linked list of - // nodes. - Next() Node - - // setNext sets the next node in a linked list of nodes. - setNext(next Node) -} - -// lowestPriority is the lowest priority returned by priority, and the default -// for most nodes. -const lowestPriority = uint8(6) - -// Constant is a constant value parsed from the path. -type Constant int - -//revive:disable:exported -const ( - ConstRoot Constant = iota // $ - ConstCurrent // @ - ConstLast // last - ConstAnyArray // [*] - ConstAnyKey // * - ConstTrue // true - ConstFalse // false - ConstNull // null -) - -// ConstNode represents a constant node in the path. -type ConstNode struct { - kind Constant - next Node -} - -// NewConst creates a new ConstNode defined by kind. -func NewConst(kind Constant) *ConstNode { - return &ConstNode{kind: kind} -} - -// writeTo writes the string representation of n to buf. If n.kind is -// ConstAnyKey and inKey is true, it will be preceded by '.'. -func (n *ConstNode) writeTo(buf *strings.Builder, inKey, _ bool) { - if n.kind == ConstAnyKey && inKey { - buf.WriteRune('.') - } - buf.WriteString(n.kind.String()) - if next := n.Next(); next != nil { - next.writeTo(buf, true, true) - } -} - -// Const returns the Constant defining n. -func (n *ConstNode) Const() Constant { - return n.kind -} - -// String returns the string representation of n. -func (n *ConstNode) String() string { - return n.kind.String() -} - -// priority returns the priority of the ConstantNode, which is always 6. -func (*ConstNode) priority() uint8 { return lowestPriority } - -// setNext sets the next node when n is in a linked list. -func (n *ConstNode) setNext(next Node) { - n.next = next -} - -// Next returns the next node, if any. -func (n *ConstNode) Next() Node { - return n.next -} - -// BinaryOperator represents a binary operator. -type BinaryOperator int - -const ( - BinaryAnd BinaryOperator = iota // && - BinaryOr // || - BinaryEqual // == - BinaryNotEqual // != - BinaryLess // < - BinaryGreater // > - BinaryLessOrEqual // <= - BinaryGreaterOrEqual // >= - BinaryStartsWith // starts with - BinaryAdd // + - BinarySub // - - BinaryMul // * - BinaryDiv // / - BinaryMod // % - BinarySubscript // to - BinaryDecimal // .decimal() -) - -// Priority returns the priority of the operator. -// -//nolint:mnd -func (op BinaryOperator) priority() uint8 { - switch op { - case BinaryOr: - return 0 - case BinaryAnd: - return 1 - case BinaryEqual, BinaryNotEqual, BinaryLess, BinaryGreater, - BinaryLessOrEqual, BinaryGreaterOrEqual, BinaryStartsWith: - return 2 - case BinaryAdd, BinarySub: - return 3 - case BinaryMul, BinaryDiv, BinaryMod: - return 4 - default: - return lowestPriority - } -} - -// UnaryOperator represents a unary operator. -type UnaryOperator int - -const ( - UnaryExists UnaryOperator = iota // exists - UnaryNot // ! - UnaryIsUnknown // is unknown - UnaryPlus // + - UnaryMinus // - - UnaryFilter // ? - UnaryDateTime // .datetime - UnaryDate // .date - UnaryTime // .time - UnaryTimeTZ // .time_tz - UnaryTimestamp // .timestamp - UnaryTimestampTZ // .timestamp_tz -) - -// Priority returns the priority of the operator. -// -//nolint:mnd -func (op UnaryOperator) priority() uint8 { - switch op { - case UnaryPlus, UnaryMinus: - return 5 - default: - return lowestPriority - } -} - -// MethodName represents the name of a path method. -type MethodName int - -const ( - MethodAbs MethodName = iota // .abs() - MethodSize // .size() - MethodType // .type() - MethodFloor // .floor() - MethodCeiling // .ceiling() - MethodDouble // .double() - MethodKeyValue // .keyvalue() - MethodBigInt // .bigint() - MethodBoolean // .boolean() - MethodInteger // .integer() - MethodNumber // .number() - MethodString // .string() -) - -// MethodNode represents a path method. -type MethodNode struct { - name MethodName - next Node -} - -// NewMethod returns a new MethodNode with name. -func NewMethod(name MethodName) *MethodNode { - return &MethodNode{name: name} -} - -// String returns the SQL/JSON representation of the method: A dot, the name, -// then parentheses. -func (n *MethodNode) String() string { - return n.name.String() -} - -// Name returns the MethodName of the method. -func (n *MethodNode) Name() MethodName { - return n.name -} - -// writeTo writes the string representation of n to buf. -func (n *MethodNode) writeTo(buf *strings.Builder, _, _ bool) { - buf.WriteString(n.name.String()) - if next := n.Next(); next != nil { - next.writeTo(buf, true, true) - } -} - -// priority returns the priority of the MethodNode, which is always 6. -func (*MethodNode) priority() uint8 { return lowestPriority } - -// setNext sets the next node when n is in a linked list. -func (n *MethodNode) setNext(next Node) { - n.next = next -} - -// Next returns the next node, if any. -func (n *MethodNode) Next() Node { - return n.next -} - -// quotedString represents a quoted string node, including strings, variables, -// and path keys. -type quotedString struct { - str string - next Node -} - -// Text returns the textual representation of the string. -func (n *quotedString) Text() string { - return n.str -} - -// String returns the SQL/JSON path-encoded quoted string. -func (n *quotedString) String() string { - return strconv.Quote(n.str) -} - -// writeTo writes n.String to buf. -func (n *quotedString) writeTo(buf *strings.Builder, _, _ bool) { - buf.WriteString(n.String()) - if next := n.Next(); next != nil { - next.writeTo(buf, true, true) - } -} - -// priority returns the priority of the quotedString, which is always 6. -func (*quotedString) priority() uint8 { return lowestPriority } - -// setNext sets the next node when n is in a linked list. -func (n *quotedString) setNext(next Node) { - n.next = next -} - -// Next returns the next node, if any. -func (n *quotedString) Next() Node { - return n.next -} - -// StringNode represents a string parsed from the path. -type StringNode struct { - *quotedString -} - -// NewString returns a new StringNode representing str. -func NewString(str string) *StringNode { - return &StringNode{"edString{str: str}} -} - -// VariableNode represents a SQL/JSON path variable name. -type VariableNode struct { - // jpiVariable - *quotedString -} - -// NewVariable returns a new VariableNode named name. -func NewVariable(name string) *VariableNode { - return &VariableNode{"edString{str: name}} -} - -// String returns the double-quoted representation of n, preceded by '$'. -func (n *VariableNode) String() string { - return "$" + n.quotedString.String() -} - -// writeTo writes n.String to buf. -func (n *VariableNode) writeTo(buf *strings.Builder, _, _ bool) { - buf.WriteString(n.String()) - if next := n.Next(); next != nil { - next.writeTo(buf, true, true) - } -} - -// KeyNode represents a SQL/JSON path key expression, e.g., '.foo'. -type KeyNode struct { - // jpiKey - *quotedString -} - -// NewKey returns a new KeyNode with key. -func NewKey(key string) *KeyNode { - return &KeyNode{"edString{str: key}} -} - -// writeTo writes the key to buf, prepended with '.' if inKey is true. -func (n *KeyNode) writeTo(buf *strings.Builder, inKey, _ bool) { - if inKey { - buf.WriteRune('.') - } - buf.WriteString(n.String()) - if next := n.Next(); next != nil { - next.writeTo(buf, true, true) - } -} - -type numberNode struct { - literal string - parsed string - next Node -} - -// Literal returns the literal text string of the number as passed to the -// constructor. -func (n *numberNode) Literal() string { - return n.literal -} - -// String returns the normalized string representation of the number. -func (n *numberNode) String() string { - return n.parsed -} - -// writeTo writes n.String to buf, surrounded by parentheses if there is a -// next node in the list. -func (n *numberNode) writeTo(buf *strings.Builder, _, _ bool) { - next := n.Next() - if next != nil { - buf.WriteRune('(') - } - buf.WriteString(n.String()) - if next != nil { - buf.WriteRune(')') - next.writeTo(buf, true, true) - } -} - -// priority returns the priority of the numberNode, which is always 6. -func (*numberNode) priority() uint8 { return lowestPriority } - -// setNext sets the next node when n is in a linked list. -func (n *numberNode) setNext(next Node) { - n.next = next -} - -// Next returns the next node, if any. -func (n *numberNode) Next() Node { - return n.next -} - -// NumericNode represents a numeric (non-integer) value. -type NumericNode struct { - *numberNode -} - -// NewNumeric returns a new NumericNode representing num. Panics if num cannot -// be parsed into JSON-compatible float64. -func NewNumeric(num string) *NumericNode { - f, err := strconv.ParseFloat(num, 64) - if err != nil { - panic(err) - } - - // https://www.postgresql.org/docs/current/datatype-json.html#DATATYPE-JSONPATH: - // - // > Numeric literals in SQL/JSON path expressions follow JavaScript rules, - // > which are different from both SQL and JSON in some minor details. For - // > example, SQL/JSON path allows .1 and 1., which are invalid in JSON. - // > Non-decimal integer literals and underscore separators are supported, - // > for example, 1_000_000, 0x1EEE_FFFF, 0o273, 0b100101. In SQL/JSON path - // > (and in JavaScript, but not in SQL proper), there must not be an - // > underscore separator directly after the radix prefix. - // - // Rely on JSON semantics, a subset of the JavaScript. - str, err := json.Marshal(f) - if err != nil { - panic(err) - } - - return &NumericNode{&numberNode{literal: num, parsed: string(str)}} -} - -// Float returns the floating point number corresponding to n. -func (n *NumericNode) Float() float64 { - num, _ := strconv.ParseFloat(n.parsed, 64) - return num -} - -// IntegerNode represents an integral value. -type IntegerNode struct { - *numberNode -} - -// NewInteger returns a new IntegerNode representing num. Panics if -// integer cannot be parsed into int64. -func NewInteger(integer string) *IntegerNode { - val, err := strconv.ParseInt(integer, 0, 64) - if err != nil { - panic(err) - } - return &IntegerNode{&numberNode{ - literal: integer, - parsed: strconv.FormatInt(val, 10), - }} -} - -// Int returns the integer corresponding to n. -func (n *IntegerNode) Int() int64 { - val, _ := strconv.ParseInt(n.parsed, 0, 64) - return val -} - -// BinaryNode represents a binary operation. -type BinaryNode struct { - op BinaryOperator - left Node - right Node - next Node -} - -// NewBinary returns a new BinaryNode where op represents the binary operator -// and left and right the operands. -func NewBinary(op BinaryOperator, left, right Node) *BinaryNode { - return &BinaryNode{op: op, left: left, right: right} -} - -// String returns the SQL/JSON path string representation of the binary -// expression. -func (n *BinaryNode) String() string { - buf := new(strings.Builder) - n.writeTo(buf, false, false) - return buf.String() -} - -// writeTo writes the SQL/JSON path string representation of the binary -// expression to buf. If withParens is true and the binary operation is neither -// BinaryDecimal nor BinarySubscript, parentheses will be written around the -// expression. -func (n *BinaryNode) writeTo(buf *strings.Builder, _, withParens bool) { - switch n.op { - case BinaryDecimal: - buf.WriteString(".decimal(") - if n.left != nil { - buf.WriteString(n.left.String()) - } - if n.right != nil { - buf.WriteRune(',') - buf.WriteString(n.right.String()) - } - buf.WriteRune(')') - case BinarySubscript: - n.left.writeTo(buf, false, false) - if n.right != nil { - buf.WriteString(" " + n.op.String() + " ") - n.right.writeTo(buf, false, false) - } - case BinaryAnd, BinaryOr, BinaryEqual, BinaryNotEqual, BinaryLess, - BinaryGreater, BinaryLessOrEqual, BinaryGreaterOrEqual, - BinaryAdd, BinarySub, BinaryMul, BinaryDiv, BinaryMod, - BinaryStartsWith: - if withParens { - buf.WriteRune('(') - } - - n.left.writeTo(buf, false, n.left.priority() <= n.priority()) - buf.WriteString(" " + n.op.String() + " ") - n.right.writeTo(buf, false, n.right.priority() <= n.priority()) - - if withParens { - buf.WriteRune(')') - } - default: - panic(fmt.Sprintf("Unknown binary operator %v", n.op)) - } - if next := n.Next(); next != nil { - next.writeTo(buf, true, true) - } -} - -// priority returns the priority of n.op. -func (n *BinaryNode) priority() uint8 { return n.op.priority() } - -// Operator returns the BinaryNode's BinaryOperator. -func (n *BinaryNode) Operator() BinaryOperator { - return n.op -} - -// Left returns the BinaryNode's left operand. -func (n *BinaryNode) Left() Node { - return n.left -} - -// Right returns the BinaryNode's right operand. -func (n *BinaryNode) Right() Node { - return n.right -} - -// setNext sets the next node when n is in a linked list. -func (n *BinaryNode) setNext(next Node) { - n.next = next -} - -// Next returns the next node, if any. -func (n *BinaryNode) Next() Node { - return n.next -} - -// UnaryNode represents a unary operation. -type UnaryNode struct { - op UnaryOperator - operand Node - next Node -} - -// NewUnary returns a new UnaryNode where op represents the unary operator -// and node its operand. -func NewUnary(op UnaryOperator, node Node) *UnaryNode { - return &UnaryNode{op: op, operand: node} -} - -// String returns the SQL/JSON path string representation of the unary -// expression. -func (n *UnaryNode) String() string { - buf := new(strings.Builder) - n.writeTo(buf, false, false) - return buf.String() -} - -// priority returns the priority of n.op. -func (n *UnaryNode) priority() uint8 { return n.op.priority() } - -// writeTo writes the SQL/JSON path string representation of the unary -// expression to buf. If withParens is true and the binary operation is -// UnaryPlus or UnaryMinus, parentheses will be written around the expression. -func (n *UnaryNode) writeTo(buf *strings.Builder, _, withParens bool) { - switch n.op { - case UnaryExists: - buf.WriteString("exists (") - n.operand.writeTo(buf, false, false) - buf.WriteRune(')') - case UnaryNot, UnaryFilter: - buf.WriteString(n.op.String()) - buf.WriteRune('(') - n.operand.writeTo(buf, false, false) - buf.WriteRune(')') - case UnaryIsUnknown: - buf.WriteRune('(') - n.operand.writeTo(buf, false, false) - buf.WriteString(") is unknown") - case UnaryPlus, UnaryMinus: - if withParens { - buf.WriteRune('(') - } - - buf.WriteString(n.op.String()) - n.operand.writeTo(buf, false, n.operand.priority() <= n.priority()) - - if withParens { - buf.WriteRune(')') - } - case UnaryDateTime, UnaryDate, UnaryTime, UnaryTimeTZ, UnaryTimestamp, UnaryTimestampTZ: - if n.operand == nil { - buf.WriteString(n.op.String() + "()") - } else { - buf.WriteString(n.op.String() + "(" + n.operand.String() + ")") - } - default: - // Write nothing. - } - if next := n.Next(); next != nil { - next.writeTo(buf, true, true) - } -} - -// Operator returns the UnaryNode's BinaryOperator. -func (n *UnaryNode) Operator() UnaryOperator { - return n.op -} - -// Operand returns the UnaryNode's operand. -func (n *UnaryNode) Operand() Node { - return n.operand -} - -// setNext sets the next node when n is in a linked list. -func (n *UnaryNode) setNext(next Node) { - n.next = next -} - -// Next returns the next node, if any. -func (n *UnaryNode) Next() Node { - return n.next -} - -// LinkNodes assembles nodes into a linked list, where a call to Next on each -// returns the next node in the list until the last node, where Next returns -// nil. -func LinkNodes(nodes []Node) Node { - size := len(nodes) - if size == 0 { - panic("No nodes passed to LinkNodes") - } - - head := nodes[0] - if size == 1 { - // Nothing to append. - return head - } - - // Find the end of an existing list, if any, so we can append to its end. - end := head - for next := end.Next(); next != nil; next = end.Next() { - end = next - } - - // Append the remaining nodes to the list. - //nolint:gosec // disable G602 (xxx fixed in https://github.com/securego/gosec/commit/ea5b276?) - for _, next := range nodes[1:] { - end.setNext(next) - end = next - } - - // Return the head of the list. - return head -} - -// ArrayIndexNode represents the nodes in an array index expression. -type ArrayIndexNode struct { - subscripts []Node - next Node -} - -// NewArrayIndex creates a new ArrayIndexNode consisting of subscripts. -// which must be BinaryNodes using the BinarySubscript operator. -func NewArrayIndex(subscripts []Node) *ArrayIndexNode { - return &ArrayIndexNode{subscripts: subscripts} -} - -// Subscripts returns all of the subscript nodes in n. -func (n *ArrayIndexNode) Subscripts() []Node { return n.subscripts } - -// String produces JSON Path array index string representation of the nodes in -// n. -func (n *ArrayIndexNode) String() string { - buf := new(strings.Builder) - n.writeTo(buf, false, false) - return buf.String() -} - -// writeTo writes the SQL/JSON path representation of n to buf. -func (n *ArrayIndexNode) writeTo(buf *strings.Builder, _, _ bool) { - buf.WriteRune('[') - for i, node := range n.subscripts { - if i > 0 { - buf.WriteRune(',') - } - node.writeTo(buf, false, false) - } - buf.WriteRune(']') - if next := n.Next(); next != nil { - next.writeTo(buf, true, true) - } -} - -// priority returns the priority of the ArrayIndexNode, which is always 6. -func (*ArrayIndexNode) priority() uint8 { return lowestPriority } - -// setNext sets the next node when n is in a linked list. -func (n *ArrayIndexNode) setNext(next Node) { - n.next = next -} - -// Next returns the next node, if any. -func (n *ArrayIndexNode) Next() Node { - return n.next -} - -// AnyNode represents any node in a path accessor with the expression -// 'first TO last'. -type AnyNode struct { - // jpiAny - first uint32 - last uint32 - next Node -} - -// NewAny returns a new AnyNode with first as its first index and last as its -// last. If either number is negative it's considered unbounded. Numbers -// greater than [math.MaxUint32] (or [math.MaxInt] on 32-bit systems) will -// max out at that number. -func NewAny(first, last int) *AnyNode { - n := &AnyNode{first: math.MaxUint32, last: math.MaxUint32} - if first >= 0 && first < min(math.MaxUint32, math.MaxInt) { - n.first = uint32(first) - } - if last >= 0 && last < min(math.MaxUint32, math.MaxInt) { - n.last = uint32(last) - } - return n -} - -// String returns the SQL/JSON path any node expression. -func (n *AnyNode) String() string { - buf := new(strings.Builder) - n.writeTo(buf, false, false) - return buf.String() -} - -// First returns the first index. If its value math.MaxUint32 it's considered -// unbounded. -func (n *AnyNode) First() uint32 { return n.first } - -// Last returns the last index. If its value math.MaxUint32 it's considered -// unbounded. -func (n *AnyNode) Last() uint32 { return n.last } - -// writeTo writes the SQL/JSON path representation of n to buf. -// If inKey is true it will be preceded by a '.'. -func (n *AnyNode) writeTo(buf *strings.Builder, inKey, _ bool) { - if inKey { - buf.WriteRune('.') - } - switch { - case n.first == 0 && n.last == math.MaxUint32: - buf.WriteString("**") - case n.first == n.last: - if n.first == math.MaxUint32 { - buf.WriteString("**{last}") - } else { - fmt.Fprintf(buf, "**{%v}", n.first) - } - case n.first == math.MaxUint32: - fmt.Fprintf(buf, "**{last to %v}", n.last) - case n.last == math.MaxUint32: - fmt.Fprintf(buf, "**{%v to last}", n.first) - default: - fmt.Fprintf(buf, "**{%v to %v}", n.first, n.last) - } - if next := n.Next(); next != nil { - next.writeTo(buf, true, true) - } -} - -// priority returns the priority of the AnyNode, which is always 6. -func (*AnyNode) priority() uint8 { return lowestPriority } - -// setNext sets the next node when n is in a linked list. -func (n *AnyNode) setNext(next Node) { - n.next = next -} - -// Next returns the next node, if any. -func (n *AnyNode) Next() Node { - return n.next -} - -// RegexNode represents a regular expression. -type RegexNode struct { - // jpiLikeRegex - operand Node - pattern string - flags regexFlags - next Node -} - -// NewRegex returns anew RegexNode that compares node to the regular expression -// pattern configured by flags. -func NewRegex(expr Node, pattern, flags string) (*RegexNode, error) { - f, err := newRegexFlags(flags) - if err != nil { - return nil, err - } - if err := validateRegex(pattern, f); err != nil { - return nil, err - } - return &RegexNode{operand: expr, pattern: pattern, flags: f}, nil -} - -// String returns the RegexNode as a SQL/JSON path 'like_regex' expression. -func (n *RegexNode) String() string { - buf := new(strings.Builder) - n.writeTo(buf, false, false) - return buf.String() -} - -// writeTo writes the SQL/JSON path representation of n to buf. If withParens it -// will be wrapped in parentheses. -func (n *RegexNode) writeTo(buf *strings.Builder, _, withParens bool) { - if withParens { - buf.WriteRune('(') - } - - n.operand.writeTo(buf, false, n.operand.priority() <= n.priority()) - fmt.Fprintf(buf, " like_regex %q%v", n.pattern, n.flags) - - if withParens { - buf.WriteRune(')') - } - if next := n.Next(); next != nil { - next.writeTo(buf, true, true) - } -} - -// priority returns the priority of the RegexNode, which is always 6. -func (*RegexNode) priority() uint8 { return lowestPriority } - -// Regexp returns a regexp.Regexp compiled from n. -func (n *RegexNode) Regexp() *regexp.Regexp { - flags := n.flags.goFlags() - if n.flags.shouldQuoteMeta() { - return regexp.MustCompile(flags + regexp.QuoteMeta(n.pattern)) - } - return regexp.MustCompile(n.flags.goFlags() + n.pattern) -} - -// Operand returns the RegexNode's operand. -func (n *RegexNode) Operand() Node { - return n.operand -} - -// setNext sets the next node when n is in a linked list. -func (n *RegexNode) setNext(next Node) { - n.next = next -} - -// Next returns the next node, if any. -func (n *RegexNode) Next() Node { - return n.next -} - -// AST represents the complete abstract syntax tree for a parsed SQL/JSON path. -type AST struct { - root Node - lax bool - pred bool -} - -// New creates a new AST with n as its root. If lax is true it's considered a -// lax path query, and if pred is true it's considered a predicate query. -func New(lax, pred bool, n Node) (*AST, error) { - if err := validateNode(n, 0, false); err != nil { - return nil, err - } - return &AST{root: n, lax: lax, pred: pred}, nil -} - -// IsLax indicates whether the path query is lax. -func (a *AST) IsLax() bool { return a.lax } - -// IsStrict indicates whether the path query is strict. -func (a *AST) IsStrict() bool { return !a.lax } - -// String returns the SQL/JSON Path-encoded string representation of the path. -func (a *AST) String() string { - buf := new(strings.Builder) - if !a.lax { - buf.WriteString("strict ") - } - a.root.writeTo(buf, false, true) - return buf.String() -} - -// Root returns the root node of the AST. -func (a *AST) Root() Node { - return a.root -} - -// IsPredicate returns true if the AST represents a PostgreSQL-style -// "predicate check" path. -func (a *AST) IsPredicate() bool { - return a.pred -} - -// validateNode recursively validates nodes. It's based on the Postgres -// flattenJsonPathParseItem function, but does not turn the AST into a binary -// representation, just does a second pass to detect any further issues. -// -//nolint:gocognit -func validateNode(node Node, depth int, inSubscript bool) error { - argDepth := 0 - switch node := node.(type) { - case nil: - return nil - case *StringNode, *VariableNode, *KeyNode, *NumericNode, *IntegerNode: - // Nothing to do. - case *BinaryNode: - if err := validateNode(node.left, depth+argDepth, inSubscript); err != nil { - return err - } - if err := validateNode(node.right, depth+argDepth, inSubscript); err != nil { - return err - } - case *UnaryNode: - if node.op == UnaryFilter { - argDepth++ - } - if err := validateNode(node.operand, depth+argDepth, inSubscript); err != nil { - return err - } - case *RegexNode: - if err := validateNode(node.operand, depth, inSubscript); err != nil { - return err - } - case *ConstNode: - switch node.kind { - case ConstCurrent: - if depth <= 0 { - //nolint:err113 - return errors.New("@ is not allowed in root expressions") - } - case ConstLast: - if !inSubscript { - //nolint:err113 - return errors.New("LAST is allowed only in array subscripts") - } - default: - // Nothing to check. - } - case *ArrayIndexNode: - for _, n := range node.subscripts { - if err := validateNode(n, depth+argDepth, true); err != nil { - return err - } - } - } - if next := node.Next(); next != nil { - if err := validateNode(next, depth, inSubscript); err != nil { - return err - } - } - - return nil -} - -// NewUnaryOrNumber returns a new node for op ast.UnaryPlus or ast.UnaryMinus. -// If node is numeric and not the first item in an accessor list, it returns a -// ast.NumericNode or ast.IntegerNode, as appropriate. -func NewUnaryOrNumber(op UnaryOperator, node Node) Node { - if node.Next() == nil { - switch node := node.(type) { - case *NumericNode: - switch op { - case UnaryPlus: - // Just a positive number, return it. - return node - case UnaryMinus: - // Just a negative number, return it with the minus sign. - return NewNumeric("-" + node.literal) - default: - panic(fmt.Sprintf("Operator must be + or - but is %v", op)) - } - case *IntegerNode: - switch op { - case UnaryPlus: - // Just a positive number, return it. - return node - case UnaryMinus: - // Just a negative number, return it with the minus sign. - return NewInteger("-" + node.literal) - default: - panic(fmt.Sprintf("Operator must be + or - but is %v", op)) - } - } - } - - return NewUnary(op, node) -} diff --git a/path/ast/ast_string.go b/path/ast/ast_string.go deleted file mode 100644 index ff2b5f5..0000000 --- a/path/ast/ast_string.go +++ /dev/null @@ -1,118 +0,0 @@ -// Code generated by "stringer -linecomment -output ast_string.go -type Constant,BinaryOperator,UnaryOperator,MethodName"; DO NOT EDIT. - -package ast - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[ConstRoot-0] - _ = x[ConstCurrent-1] - _ = x[ConstLast-2] - _ = x[ConstAnyArray-3] - _ = x[ConstAnyKey-4] - _ = x[ConstTrue-5] - _ = x[ConstFalse-6] - _ = x[ConstNull-7] -} - -const _Constant_name = "$@last[*]*truefalsenull" - -var _Constant_index = [...]uint8{0, 1, 2, 6, 9, 10, 14, 19, 23} - -func (i Constant) String() string { - if i < 0 || i >= Constant(len(_Constant_index)-1) { - return "Constant(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _Constant_name[_Constant_index[i]:_Constant_index[i+1]] -} -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[BinaryAnd-0] - _ = x[BinaryOr-1] - _ = x[BinaryEqual-2] - _ = x[BinaryNotEqual-3] - _ = x[BinaryLess-4] - _ = x[BinaryGreater-5] - _ = x[BinaryLessOrEqual-6] - _ = x[BinaryGreaterOrEqual-7] - _ = x[BinaryStartsWith-8] - _ = x[BinaryAdd-9] - _ = x[BinarySub-10] - _ = x[BinaryMul-11] - _ = x[BinaryDiv-12] - _ = x[BinaryMod-13] - _ = x[BinarySubscript-14] - _ = x[BinaryDecimal-15] -} - -const _BinaryOperator_name = "&&||==!=<><=>=starts with+-*/%to.decimal()" - -var _BinaryOperator_index = [...]uint8{0, 2, 4, 6, 8, 9, 10, 12, 14, 25, 26, 27, 28, 29, 30, 32, 42} - -func (i BinaryOperator) String() string { - if i < 0 || i >= BinaryOperator(len(_BinaryOperator_index)-1) { - return "BinaryOperator(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _BinaryOperator_name[_BinaryOperator_index[i]:_BinaryOperator_index[i+1]] -} -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[UnaryExists-0] - _ = x[UnaryNot-1] - _ = x[UnaryIsUnknown-2] - _ = x[UnaryPlus-3] - _ = x[UnaryMinus-4] - _ = x[UnaryFilter-5] - _ = x[UnaryDateTime-6] - _ = x[UnaryDate-7] - _ = x[UnaryTime-8] - _ = x[UnaryTimeTZ-9] - _ = x[UnaryTimestamp-10] - _ = x[UnaryTimestampTZ-11] -} - -const _UnaryOperator_name = "exists!is unknown+-?.datetime.date.time.time_tz.timestamp.timestamp_tz" - -var _UnaryOperator_index = [...]uint8{0, 6, 7, 17, 18, 19, 20, 29, 34, 39, 47, 57, 70} - -func (i UnaryOperator) String() string { - if i < 0 || i >= UnaryOperator(len(_UnaryOperator_index)-1) { - return "UnaryOperator(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _UnaryOperator_name[_UnaryOperator_index[i]:_UnaryOperator_index[i+1]] -} -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[MethodAbs-0] - _ = x[MethodSize-1] - _ = x[MethodType-2] - _ = x[MethodFloor-3] - _ = x[MethodCeiling-4] - _ = x[MethodDouble-5] - _ = x[MethodKeyValue-6] - _ = x[MethodBigInt-7] - _ = x[MethodBoolean-8] - _ = x[MethodInteger-9] - _ = x[MethodNumber-10] - _ = x[MethodString-11] -} - -const _MethodName_name = ".abs().size().type().floor().ceiling().double().keyvalue().bigint().boolean().integer().number().string()" - -var _MethodName_index = [...]uint8{0, 6, 13, 20, 28, 38, 47, 58, 67, 77, 87, 96, 105} - -func (i MethodName) String() string { - if i < 0 || i >= MethodName(len(_MethodName_index)-1) { - return "MethodName(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _MethodName_name[_MethodName_index[i]:_MethodName_index[i+1]] -} diff --git a/path/ast/ast_test.go b/path/ast/ast_test.go deleted file mode 100644 index 8995e68..0000000 --- a/path/ast/ast_test.go +++ /dev/null @@ -1,1449 +0,0 @@ -package ast - -import ( - "fmt" - "math" - "strconv" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestConstNode(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - kind Constant - str string - inKeyStr string - }{ - {"root", ConstRoot, "$", ""}, - {"current", ConstCurrent, "@", ""}, - {"last", ConstLast, "last", ""}, - {"any_array", ConstAnyArray, "[*]", ""}, - {"any_key", ConstAnyKey, "*", ".*"}, - {"true", ConstTrue, "true", ""}, - {"false", ConstFalse, "false", ""}, - {"null", ConstNull, "null", ""}, - {"unknown", -1, "Constant(-1)", ""}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - node := NewConst(tc.kind) - a.Implements((*Node)(nil), node) - a.Equal(tc.str, node.String()) - a.Equal(lowestPriority, node.priority()) - a.Nil(node.Next()) - a.Equal(tc.kind, node.kind) - a.Equal(tc.kind, node.Const()) - - // Test set_next() - node.setNext(NewKey("foo")) - a.Equal(NewKey("foo"), node.Next()) - - // Test writeTo. - buf := new(strings.Builder) - node.writeTo(buf, false, false) - a.Equal(tc.str+`."foo"`, buf.String()) - - // Test writeTo with inKey true. - buf.Reset() - node.writeTo(buf, true, false) - if tc.inKeyStr == "" { - tc.inKeyStr = tc.str - } - a.Equal(tc.inKeyStr+`."foo"`, buf.String()) - }) - } -} - -func TestBinaryOperator(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - op BinaryOperator - str string - prior uint8 - }{ - {"and", BinaryAnd, "&&", 1}, - {"or", BinaryOr, "||", 0}, - {"equal", BinaryEqual, "==", 2}, - {"not_equal", BinaryNotEqual, "!=", 2}, - {"less", BinaryLess, "<", 2}, - {"less_equal", BinaryLessOrEqual, "<=", 2}, - {"greater", BinaryGreater, ">", 2}, - {"greater_equal", BinaryGreaterOrEqual, ">=", 2}, - {"starts_with", BinaryStartsWith, "starts with", 2}, - {"add", BinaryAdd, "+", 3}, - {"sub", BinarySub, "-", 3}, - {"mul", BinaryMul, "*", 4}, - {"div", BinaryDiv, "/", 4}, - {"mod", BinaryMod, "%", 4}, - {"subscript", BinarySubscript, "to", 6}, - {"decimal", BinaryDecimal, ".decimal()", 6}, - {"unknown", -1, "BinaryOperator(-1)", 6}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(tc.str, tc.op.String()) - a.Equal(tc.prior, tc.op.priority()) - }) - } -} - -func TestUnaryOperator(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - op UnaryOperator - str string - prior uint8 - }{ - {"exists", UnaryExists, "exists", 6}, - {"not", UnaryNot, "!", 6}, - {"is_unknown", UnaryIsUnknown, "is unknown", 6}, - {"plus", UnaryPlus, "+", 5}, - {"minus", UnaryMinus, "-", 5}, - {"filter", UnaryFilter, "?", 6}, - {"datetime", UnaryDateTime, ".datetime", 6}, - {"time", UnaryTime, ".time", 6}, - {"date", UnaryDate, ".date", 6}, - {"time_tz", UnaryTimeTZ, ".time_tz", 6}, - {"timestamp", UnaryTimestamp, ".timestamp", 6}, - {"timestamp_tz", UnaryTimestampTZ, ".timestamp_tz", 6}, - {"unknown", -1, "UnaryOperator(-1)", 6}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(tc.str, tc.op.String()) - a.Equal(tc.prior, tc.op.priority()) - }) - } -} - -func TestMethodNode(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - meth MethodName - str string - }{ - {"abs", MethodAbs, ".abs()"}, - {"size", MethodSize, ".size()"}, - {"type", MethodType, ".type()"}, - {"floor", MethodFloor, ".floor()"}, - {"ceiling", MethodCeiling, ".ceiling()"}, - {"keyvalue", MethodKeyValue, ".keyvalue()"}, - {"bigint", MethodBigInt, ".bigint()"}, - {"boolean", MethodBoolean, ".boolean()"}, - {"integer", MethodInteger, ".integer()"}, - {"number", MethodNumber, ".number()"}, - {"string", MethodString, ".string()"}, - {"unknown", -1, "MethodName(-1)"}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - node := NewMethod(tc.meth) - a.Implements((*Node)(nil), node) - a.Equal(tc.meth, node.name) - a.Equal(tc.meth, node.Name()) - a.Equal(tc.str, node.String()) - a.Equal(lowestPriority, node.priority()) - - // Test next. - a.Nil(node.next) - a.Nil(node.Next()) - node.setNext(NewKey("foo")) - a.Equal(NewKey("foo"), node.next) - a.Equal(NewKey("foo"), node.Next()) - - // Test writeTo. - buf := new(strings.Builder) - node.writeTo(buf, false, false) - a.Equal(tc.str+`."foo"`, buf.String()) - }) - } -} - -func TestStringNodes(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - expr string - val string - str string - }{ - {"word", "word", "word", `"word"`}, - {"space", "hi there", "hi there", `"hi there"`}, - {"unicode", "lΓΆl", "lΓΆl", `"lΓΆl"`}, - {"backslash", `foo\nbar`, `foo\nbar`, `"foo\\nbar"`}, - {"quote", `"foo"`, `"foo"`, `"\"foo\""`}, - {"newline", "hi\nthere", "hi\nthere", `"hi\nthere"`}, - {"tab", "hi\tthere", "hi\tthere", `"hi\tthere"`}, - {"ff", "hi\fthere", "hi\fthere", `"hi\fthere"`}, - {"return", "hi\rthere", "hi\rthere", `"hi\rthere"`}, - {"vertical_tab", "hi\vthere", "hi\vthere", `"hi\vthere"`}, - {"backspace", "hi\bthere", "hi\bthere", `"hi\bthere"`}, - {"emoji", "πŸ€˜πŸ»πŸŽ‰πŸ³", "πŸ€˜πŸ»πŸŽ‰πŸ³", `"πŸ€˜πŸ»πŸŽ‰πŸ³"`}, - {"multibyte", "\U0001D11E", "π„ž", `"π„ž"`}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - str := NewString(tc.expr) - a.Implements((*Node)(nil), str) - a.Equal(tc.str, str.String()) - a.Equal(lowestPriority, str.priority()) - buf := new(strings.Builder) - str.writeTo(buf, false, false) - a.Equal(tc.str, buf.String()) - - // Test next. - a.Nil(str.next) - a.Nil(str.Next()) - str.setNext(NewKey("foo")) - a.Equal(NewKey("foo"), str.next) - a.Equal(NewKey("foo"), str.Next()) - - variable := NewVariable(tc.expr) - a.Implements((*Node)(nil), variable) - a.Equal(tc.val, variable.Text()) - a.Equal("$"+tc.str, variable.String()) - a.Equal(lowestPriority, variable.priority()) - buf.Reset() - variable.writeTo(buf, false, false) - a.Equal("$"+tc.str, buf.String()) - - key := NewString(tc.expr) - a.Implements((*Node)(nil), key) - a.Equal(tc.val, key.Text()) - a.Equal(tc.str, key.String()) - a.Equal(lowestPriority, key.priority()) - buf.Reset() - key.writeTo(buf, false, false) - a.Equal(tc.str, buf.String()) - }) - } -} - -//nolint:dupl -func TestNumericNode(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - num string - val float64 - str string - err string - }{ - {"number", "42.3", 42.3, "42.3", ""}, - {"zero_dot", "0.", 0.0, "0", ""}, - {"dot_one", ".1", 0.1, "0.1", ""}, - {"zero_dot_zero", "0.0", 0.0, "0", ""}, - {"zero_dot_000", "0.000", 0.0, "0", ""}, - {"expo", "0.0010e-1", 0.0001, "0.0001", ""}, - {"pos_expo", "0.0010e+2", 0.1, "0.1", ""}, - {"dot_001", ".001", 0.001, "0.001", ""}, - {"dot_expo", "1.e1", 10, "10", ""}, - {"one_expo_3", "1e3", 1000, "1000", ""}, - {"1_dot_2e3", "1.2e3", 1200, "1200", ""}, - { - test: "max_float", - num: fmt.Sprintf("%v", math.MaxFloat64), - val: math.MaxFloat64, - str: fmt.Sprintf("%v", math.MaxFloat64), - }, - { - test: "min_float", - num: fmt.Sprintf("%v", math.SmallestNonzeroFloat64), - val: math.SmallestNonzeroFloat64, - str: fmt.Sprintf("%v", math.SmallestNonzeroFloat64), - }, - { - test: "invalid_float", - num: "xyz.4", - val: 0, - str: "xyz.4", - err: `strconv.ParseFloat: parsing "xyz.4": invalid syntax`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - if tc.err != "" { - a.PanicsWithError(tc.err, func() { NewNumeric(tc.num) }) - return - } - - num := NewNumeric(tc.num) - a.Implements((*Node)(nil), num) - a.Equal(tc.num, num.Literal()) - a.Equal(tc.str, num.String()) - a.Equal(lowestPriority, num.priority()) - //nolint:testifylint - a.Equal(tc.val, num.Float()) - - // Test writeTo. - buf := new(strings.Builder) - num.writeTo(buf, false, false) - a.Equal(tc.str, buf.String()) - - // Test next. - a.Nil(num.next) - a.Nil(num.Next()) - num.setNext(NewKey("foo")) - a.Equal(NewKey("foo"), num.next) - a.Equal(NewKey("foo"), num.Next()) - - // With a next node, should wrap the number in parens. - buf.Reset() - num.writeTo(buf, false, false) - a.Equal("("+tc.str+`)."foo"`, buf.String()) - }) - } -} - -//nolint:dupl -func TestIntegerNode(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - num string - val int64 - str string - err string - }{ - {"number", "42", 42, "42", ""}, - {"underscores", "1_000_000", 1_000_000, "1000000", ""}, - {"binary", "0b100101", 37, "37", ""}, - {"octal", "0o273", 187, "187", ""}, - {"hex", "0x42F", 1071, "1071", ""}, - { - test: "max_int", - num: strconv.FormatInt(math.MaxInt64, 10), - val: math.MaxInt, - str: strconv.FormatInt(math.MaxInt64, 10), - }, - { - test: "min_int", - num: strconv.Itoa(math.MinInt32), - val: math.MinInt32, - str: strconv.Itoa(math.MinInt32), - }, - { - test: "invalid_int", - num: "123x", - val: 0, - str: "123x", - err: `strconv.ParseInt: parsing "123x": invalid syntax`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - if tc.err != "" { - a.PanicsWithError(tc.err, func() { NewInteger(tc.num) }) - return - } - - num := NewInteger(tc.num) - a.Implements((*Node)(nil), num) - a.Equal(tc.num, num.Literal()) - a.Equal(tc.str, num.String()) - a.Equal(lowestPriority, num.priority()) - a.Equal(tc.val, num.Int()) - - // Test writeTo. - buf := new(strings.Builder) - num.writeTo(buf, false, false) - a.Equal(tc.str, buf.String()) - - // Test next. - a.Nil(num.next) - a.Nil(num.Next()) - num.setNext(NewKey("foo")) - a.Equal(NewKey("foo"), num.next) - a.Equal(NewKey("foo"), num.Next()) - - // With a next node, should wrap the number in parens. - buf.Reset() - num.writeTo(buf, false, false) - a.Equal("("+tc.str+`)."foo"`, buf.String()) - }) - } -} - -func TestBinaryNode(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - left Node - op BinaryOperator - right Node - str string - err string - }{ - { - test: "equal", - left: NewInteger("42"), - op: BinaryEqual, - right: NewInteger("99"), - str: "42 == 99", - }, - { - test: "equal_string", - left: NewConst(ConstCurrent), - op: BinaryEqual, - right: NewString("xyz"), - str: `@ == "xyz"`, - }, - { - test: "not_equal", - left: NewInteger("42"), - op: BinaryNotEqual, - right: NewInteger("99"), - str: "42 != 99", - }, - { - test: "lt", - left: NewInteger("42"), - op: BinaryLess, - right: NewInteger("99"), - str: "42 < 99", - }, - { - test: "le", - left: NewInteger("42"), - op: BinaryLessOrEqual, - right: NewInteger("99"), - str: "42 <= 99", - }, - { - test: "gt", - left: NewInteger("42"), - op: BinaryGreater, - right: NewInteger("99"), - str: "42 > 99", - }, - { - test: "ge", - left: NewInteger("42"), - op: BinaryGreaterOrEqual, - right: NewInteger("99"), - str: "42 >= 99", - }, - { - test: "and", - left: NewBinary(BinaryEqual, NewConst(ConstCurrent), NewConst(ConstTrue)), - op: BinaryAnd, - right: NewBinary(BinaryEqual, NewVariable("xxx"), NewInteger("42")), - str: `@ == true && $"xxx" == 42`, - }, - { - test: "or", - left: NewBinary(BinaryEqual, NewConst(ConstCurrent), NewConst(ConstTrue)), - op: BinaryOr, - right: NewBinary(BinaryEqual, NewVariable("xxx"), NewInteger("42")), - str: `@ == true || $"xxx" == 42`, - }, - { - test: "add", - left: NewInteger("42"), - op: BinaryAdd, - right: NewNumeric("98.6"), - str: `42 + 98.6`, - }, - { - test: "subtract", - left: NewInteger("42"), - op: BinarySub, - right: NewNumeric("98.6"), - str: `42 - 98.6`, - }, - { - test: "multiply", - left: NewInteger("42"), - op: BinaryMul, - right: NewNumeric("98.6"), - str: `42 * 98.6`, - }, - { - test: "divide", - left: NewInteger("42"), - op: BinaryDiv, - right: NewNumeric("98.6"), - str: `42 / 98.6`, - }, - { - test: "modulo", - left: NewInteger("42"), - op: BinaryMod, - right: NewInteger("12"), - str: `42 % 12`, - }, - { - test: "starts_with", - left: NewString("food"), - op: BinaryStartsWith, - right: NewString("foo"), - str: `"food" starts with "foo"`, - }, - // case jpiStartsWith: - { - test: "subscript", - left: NewInteger("42"), - op: BinarySubscript, - right: NewInteger("99"), - str: "42 to 99", - }, - { - test: "left_subscript", - left: NewInteger("42"), - op: BinarySubscript, - right: nil, - str: "42", - }, - { - test: "decimal_l_r", - left: NewInteger("42"), - op: BinaryDecimal, - right: NewInteger("99"), - str: ".decimal(42,99)", - }, - { - test: "decimal_l", - left: NewInteger("42"), - op: BinaryDecimal, - str: ".decimal(42)", - }, - { - test: "decimal_r", - op: BinaryDecimal, - right: NewInteger("99"), - str: ".decimal(,99)", - }, - { - test: "decimal", - op: BinaryDecimal, - str: ".decimal()", - }, - { - test: "unknown_op", - op: BinaryOperator(-1), - err: "Unknown binary operator BinaryOperator(-1)", - }, - { - test: "priority_parens", - op: BinaryAnd, - left: NewBinary(BinaryOr, NewConst(ConstCurrent), NewConst(ConstCurrent)), - right: NewBinary(BinaryOr, NewConst(ConstCurrent), NewConst(ConstCurrent)), - str: "(@ || @) && (@ || @)", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - node := NewBinary(tc.op, tc.left, tc.right) - a.Implements((*Node)(nil), node) - a.Equal(node.op.priority(), node.priority()) - a.Equal(tc.op, node.Operator()) - a.Equal(tc.left, node.Left()) - a.Equal(tc.right, node.Right()) - if tc.err != "" { - a.PanicsWithValue(tc.err, func() { _ = node.String() }) - return - } - a.Equal(tc.str, node.String()) - - // Test next. - a.Nil(node.next) - a.Nil(node.Next()) - node.setNext(NewKey("foo")) - a.Equal(NewKey("foo"), node.next) - a.Equal(NewKey("foo"), node.Next()) - - // Test writeTo. - buf := new(strings.Builder) - node.writeTo(buf, false, false) - a.Equal(tc.str+`."foo"`, buf.String()) - - // Test writeTo withParens true - buf.Reset() - node.writeTo(buf, false, true) - - switch node.op { - case BinaryAnd, BinaryOr, BinaryEqual, BinaryNotEqual, BinaryLess, - BinaryGreater, BinaryLessOrEqual, BinaryGreaterOrEqual, - BinaryAdd, BinarySub, BinaryMul, BinaryDiv, BinaryMod, - BinaryStartsWith: - a.Equal("("+tc.str+`)."foo"`, buf.String()) - default: - a.Equal(tc.str+`."foo"`, buf.String()) - } - }) - } -} - -func TestUnaryNode(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - op UnaryOperator - node Node - str string - }{ - { - test: "exists", - op: UnaryExists, - node: NewInteger("99"), - str: "exists (99)", - }, - { - test: "is_unknown", - op: UnaryIsUnknown, - node: NewInteger("99"), - str: "(99) is unknown", - }, - { - test: "not", - op: UnaryNot, - node: NewInteger("99"), - str: "!(99)", - }, - { - test: "plus", - op: UnaryPlus, - node: NewInteger("99"), - str: "+99", - }, - { - test: "minus", - op: UnaryMinus, - node: NewInteger("99"), - str: "-99", - }, - { - test: "filter", - op: UnaryFilter, - node: NewInteger("99"), - str: "?(99)", - }, - { - test: "datetime", - op: UnaryDateTime, - node: NewInteger("99"), - str: ".datetime(99)", - }, - { - test: "datetime_nil", - op: UnaryDateTime, - str: ".datetime()", - }, - { - test: "date", - op: UnaryDate, - str: ".date()", - }, - { - test: "time", - op: UnaryTime, - node: NewInteger("99"), - str: ".time(99)", - }, - { - test: "time_tz", - op: UnaryTimeTZ, - node: NewInteger("99"), - str: ".time_tz(99)", - }, - { - test: "timestamp", - op: UnaryTimestamp, - node: NewInteger("99"), - str: ".timestamp(99)", - }, - { - test: "timestamp_tz", - op: UnaryTimestampTZ, - node: NewInteger("99"), - str: ".timestamp_tz(99)", - }, - { - test: "unknown_op", - op: UnaryOperator(-1), - node: NewInteger("99"), - str: "", - }, - { - test: "priority_parens", - op: UnaryPlus, - node: NewBinary(BinaryOr, NewConst(ConstCurrent), NewConst(ConstCurrent)), - str: "+(@ || @)", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - node := NewUnary(tc.op, tc.node) - a.Implements((*Node)(nil), node) - a.Equal(node.op.priority(), node.priority()) - a.Equal(tc.op, node.Operator()) - a.Equal(tc.node, node.Operand()) - a.Equal(tc.str, node.String()) - - // Test next. - a.Nil(node.next) - a.Nil(node.Next()) - node.setNext(NewKey("foo")) - a.Equal(NewKey("foo"), node.next) - a.Equal(NewKey("foo"), node.Next()) - - // Test writeTo. - buf := new(strings.Builder) - node.writeTo(buf, false, false) - a.Equal(tc.str+`."foo"`, buf.String()) - - // Test writeTo withParens true - buf.Reset() - node.writeTo(buf, false, true) - - switch node.op { - case UnaryPlus, UnaryMinus: - a.Equal("("+tc.str+`)."foo"`, buf.String()) - default: - a.Equal(tc.str+`."foo"`, buf.String()) - } - }) - } -} - -func TestArrayIndexNode(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - nodes []Node - str string - }{ - { - test: "single_subscript", - nodes: []Node{NewBinary(BinarySubscript, NewInteger("1"), NewInteger("4"))}, - str: `[1 to 4]`, - }, - { - test: "start_only", - nodes: []Node{NewBinary(BinarySubscript, NewInteger("4"), nil)}, - str: `[4]`, - }, - { - test: "two_subscripts", - nodes: []Node{ - NewBinary(BinarySubscript, NewInteger("1"), NewInteger("4")), - NewBinary(BinarySubscript, NewInteger("6"), NewInteger("7")), - }, - str: `[1 to 4,6 to 7]`, - }, - { - test: "complex_subscripts", - nodes: []Node{ - NewBinary(BinarySubscript, NewInteger("1"), NewInteger("2")), - NewBinary(BinarySubscript, NewBinary(BinaryAdd, NewConst(ConstCurrent), NewInteger("3")), nil), - NewBinary(BinarySubscript, NewInteger("6"), nil), - }, - str: `[1 to 2,@ + 3,6]`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - node := NewArrayIndex(tc.nodes) - a.Implements((*Node)(nil), node) - a.Equal(tc.nodes, node.subscripts) - a.Equal(tc.nodes, node.Subscripts()) - a.Equal(lowestPriority, node.priority()) - a.Equal(tc.str, node.String()) - - // Test next. - a.Nil(node.next) - a.Nil(node.Next()) - node.setNext(NewKey("foo")) - a.Equal(NewKey("foo"), node.next) - a.Equal(NewKey("foo"), node.Next()) - - // Test writeTo. - buf := new(strings.Builder) - node.writeTo(buf, false, false) - a.Equal(tc.str+`."foo"`, buf.String()) - }) - } -} - -func TestAnyNode(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - first int - last int - str string - }{ - { - test: "first_last", - first: 0, - last: 4, - str: `**{0 to 4}`, - }, - { - test: "neg_first_last", - first: -1, - last: 4, - str: `**{last to 4}`, - }, - { - test: "first_neg_last", - first: 4, - last: -1, - str: `**{4 to last}`, - }, - { - test: "zero_neg", - first: 0, - last: -1, - str: `**`, - }, - { - test: "neg_neg", - first: -1, - last: -1, - str: `**{last}`, - }, - { - test: "equal", - first: 2, - last: 2, - str: `**{2}`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - node := NewAny(tc.first, tc.last) - a.Implements((*Node)(nil), node) - a.Equal(lowestPriority, node.priority()) - a.Equal(tc.str, node.String()) - //nolint:gosec // disable G115, we know NewAny() compensates for them. - { - a.Equal(uint32(tc.first), node.first) - a.Equal(uint32(tc.first), node.First()) - a.Equal(uint32(tc.last), node.Last()) - a.Equal(uint32(tc.last), node.last) - } - - // Test next. - a.Nil(node.next) - a.Nil(node.Next()) - node.setNext(NewKey("foo")) - a.Equal(NewKey("foo"), node.next) - a.Equal(NewKey("foo"), node.Next()) - - // Test writeTo. - buf := new(strings.Builder) - node.writeTo(buf, false, false) - a.Equal(tc.str+`."foo"`, buf.String()) - - // Test writeTo with inKey true - buf.Reset() - node.writeTo(buf, true, false) - a.Equal("."+tc.str+`."foo"`, buf.String()) - }) - } -} - -func TestRegexNode(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - node Node - re string - flag string - flags regexFlags - str string - err string - match []string - noMatch []string - }{ - { - test: "dot", - node: NewString("foo"), - re: `.`, - str: `"foo" like_regex "."`, - match: []string{"x", "abc", "123"}, - noMatch: []string{"", "\n"}, - }, - { - test: "anchor", - node: NewString("foo"), - re: `^a`, - str: `"foo" like_regex "^a"`, - match: []string{"a", "abc", "a\nb\nc"}, - noMatch: []string{"", "\na\n"}, - }, - { - test: "flags", - node: NewString("fOo"), - re: `^o.`, - flag: "ims", - flags: regexFlags(regexDotAll | regexICase | regexMLine), - str: `"fOo" like_regex "^o." flag "ism"`, - match: []string{"ox", "Ox", "oO", "a\no\nc"}, - noMatch: []string{"xoxo", "a\nxo"}, - }, - { - test: "quote", - node: NewString("foo"), - re: `xa+`, - flag: "iqsm", - flags: regexFlags(regexICase | regexQuote | regexDotAll | regexMLine), - str: `"foo" like_regex "xa+" flag "ismq"`, - match: []string{"xa+", "XA+", "\nXa+", "bmXa+"}, - noMatch: []string{`xa\+`, "x"}, - }, - { - test: "bad_flags", - node: NewString("foo"), - re: `.`, - flag: "x", - err: `XQuery "x" flag (expanded regular expressions) is not implemented`, - }, - { - test: "bad_pattern", - node: NewString("foo"), - re: `.(hi`, - err: "error parsing regexp: missing closing ): `.(hi`", - }, - { - test: "priority_parens", - node: NewBinary(BinaryOr, NewConst(ConstCurrent), NewConst(ConstCurrent)), - re: `xa+`, - flag: "iqsm", - flags: regexFlags(regexICase | regexQuote | regexDotAll | regexMLine), - str: `(@ || @) like_regex "xa+" flag "ismq"`, - match: []string{"xa+", "XA+", "\nXa+", "bmXa+"}, - noMatch: []string{`xa\+`, "x"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - node, err := NewRegex(tc.node, tc.re, tc.flag) - if tc.err != "" { - r.EqualError(err, tc.err) - a.Nil(node) - return - } - - r.NoError(err) - r.NotNil(node) - a.Implements((*Node)(nil), node) - a.Equal(lowestPriority, node.priority()) - a.Equal(tc.re, node.pattern) - a.Equal(tc.flags, node.flags) - a.Equal(tc.node, node.operand) - a.Equal(tc.node, node.Operand()) - a.Equal(tc.str, node.String()) - - // Test next. - a.Nil(node.next) - a.Nil(node.Next()) - node.setNext(NewKey("foo")) - a.Equal(NewKey("foo"), node.next) - a.Equal(NewKey("foo"), node.Next()) - - // Test writeTo. - buf := new(strings.Builder) - node.writeTo(buf, false, false) - a.Equal(tc.str+`."foo"`, buf.String()) - - // Test writeTo with withParens true - buf.Reset() - node.writeTo(buf, false, true) - a.Equal("("+tc.str+`)."foo"`, buf.String()) - - // Make sure the regex matches what it should. - re := node.Regexp() - r.NotNil(re) - - for _, str := range tc.match { - a.True(re.MatchString(str)) - } - - for _, str := range tc.noMatch { - if !a.False(re.MatchString(str)) { - t.Logf("Unexpectedly matched %q", str) - } - } - }) - } -} - -func TestNewUnaryOrNumber(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - op UnaryOperator - node Node - exp Node - err string - }{ - { - test: "plus_integer", - op: UnaryPlus, - node: NewInteger("42"), - exp: NewInteger("42"), - }, - { - test: "minus_integer", - op: UnaryMinus, - node: NewInteger("42"), - exp: NewInteger("-42"), - }, - { - test: "other_integer", - op: UnaryExists, - node: NewInteger("42"), - err: "Operator must be + or - but is exists", - }, - { - test: "plus_accessor_integer", - op: UnaryPlus, - node: LinkNodes([]Node{NewInteger("42")}), - exp: NewInteger("42"), - }, - { - test: "minus_accessor_integer", - op: UnaryMinus, - node: LinkNodes([]Node{NewInteger("42")}), - exp: NewInteger("-42"), - }, - { - test: "minus_accessor_multi", - op: UnaryMinus, - node: LinkNodes([]Node{NewInteger("42"), NewInteger("42")}), - exp: NewUnary(UnaryMinus, LinkNodes([]Node{NewInteger("42"), NewInteger("42")})), - }, - { - test: "plus_numeric", - op: UnaryPlus, - node: NewNumeric("42.0"), - exp: NewNumeric("42.0"), - }, - { - test: "minus_numeric", - op: UnaryMinus, - node: NewNumeric("42.0"), - exp: NewNumeric("-42.0"), - }, - { - test: "other_numeric", - op: UnaryNot, - node: NewNumeric("42"), - err: "Operator must be + or - but is !", - }, - { - test: "plus_accessor_numeric", - op: UnaryPlus, - node: LinkNodes([]Node{NewNumeric("42.1")}), - exp: NewNumeric("42.1"), - }, - { - test: "minus_accessor_numeric", - op: UnaryMinus, - node: LinkNodes([]Node{NewNumeric("42")}), - exp: NewNumeric("-42"), - }, - { - test: "minus_accessor_multi_numeric", - op: UnaryMinus, - node: LinkNodes([]Node{NewNumeric("42"), NewConst(ConstCurrent)}), - exp: NewUnary(UnaryMinus, LinkNodes([]Node{NewNumeric("42"), NewConst(ConstCurrent)})), - }, - { - test: "plus_other", - op: UnaryPlus, - node: NewConst(ConstCurrent), - exp: NewUnary(UnaryPlus, NewConst(ConstCurrent)), - }, - { - test: "minus_other", - op: UnaryMinus, - node: NewConst(ConstCurrent), - exp: NewUnary(UnaryMinus, NewConst(ConstCurrent)), - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - if tc.err != "" { - a.PanicsWithValue(tc.err, func() { NewUnaryOrNumber(tc.op, tc.node) }) - return - } - a.Equal(tc.exp, NewUnaryOrNumber(tc.op, tc.node)) - }) - } -} - -func TestWriteToNext(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - node Node - exp string - }{ - { - test: "string_string", - node: LinkNodes([]Node{NewString("hi"), NewString("there")}), - exp: `"hi""there"`, - }, - { - test: "variable_string", - node: LinkNodes([]Node{NewVariable("hi"), NewString("there")}), - exp: `$"hi""there"`, - }, - { - test: "key_key", - node: LinkNodes([]Node{NewKey("hi"), NewKey("there")}), - exp: `"hi"."there"`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - buf := new(strings.Builder) - tc.node.writeTo(buf, false, false) - a.Equal(tc.exp, buf.String()) - }) - } -} - -func TestAST(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - node Node - err string - }{ - {"string", NewString("foo"), ""}, - {"accessor", LinkNodes([]Node{NewConst(ConstRoot)}), ""}, - {"current", NewConst(ConstCurrent), "@ is not allowed in root expressions"}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - if tc.err != "" { - tree, err := New(true, false, tc.node) - r.EqualError(err, tc.err) - a.Nil(tree) - return - } - - tree, err := New(true, false, tc.node) - r.NoError(err) - a.True(tree.lax) - a.True(tree.IsLax()) - a.False(tree.IsStrict()) - a.Equal(tc.node, tree.root) - a.Equal(tree.root.String(), tree.String()) - a.Equal(tc.node, tree.Root()) - a.False(tree.IsPredicate()) - - tree, err = New(false, true, tc.node) - r.NoError(err) - a.False(tree.lax) - a.False(tree.IsLax()) - a.True(tree.IsStrict()) - a.Equal(tc.node, tree.root) - a.Equal("strict "+tree.root.String(), tree.String()) - a.Equal(tc.node, tree.Root()) - a.True(tree.IsPredicate()) - }) - } -} - -func TestValidateNode(t *testing.T) { - t.Parallel() - goodRegex, _ := NewRegex(NewConst(ConstRoot), ".", "") - badRegex, _ := NewRegex(NewConst(ConstCurrent), ".", "") - - for _, tc := range []struct { - test string - node Node - depth int - inSub bool - err string - }{ - { - test: "string", - node: NewString("foo"), - }, - { - test: "variable", - node: NewVariable("foo"), - }, - { - test: "key", - node: NewKey("foo"), - }, - { - test: "numeric", - node: NewNumeric("42"), - }, - { - test: "integer", - node: NewInteger("42"), - }, - { - test: "binary", - node: NewBinary(BinaryAdd, NewInteger("42"), NewInteger("99")), - }, - { - test: "binary_left_fail", - node: NewBinary(BinaryAdd, NewConst(ConstCurrent), NewConst(ConstRoot)), - err: "@ is not allowed in root expressions", - }, - { - test: "binary_right_fail", - node: NewBinary(BinaryAdd, NewConst(ConstRoot), NewConst(ConstCurrent)), - err: "@ is not allowed in root expressions", - }, - { - test: "binary_current_okay_depth", - node: NewBinary(BinaryAdd, NewConst(ConstRoot), NewConst(ConstCurrent)), - depth: 1, - }, - { - test: "unary", - node: NewUnary(UnaryNot, NewConst(ConstRoot)), - }, - { - test: "unary_fail", - node: NewUnary(UnaryNot, NewConst(ConstLast)), - err: "LAST is allowed only in array subscripts", - }, - { - test: "unary_current_okay_depth", - node: NewUnary(UnaryNot, NewConst(ConstCurrent)), - depth: 1, - }, - { - test: "regex", - node: goodRegex, - }, - { - test: "bad_regex", - node: badRegex, - err: "@ is not allowed in root expressions", - }, - { - test: "regex_current_okay_depth", - node: badRegex, - depth: 1, - }, - { - test: "current", - node: NewConst(ConstCurrent), - err: "@ is not allowed in root expressions", - }, - { - test: "current_depth", - node: NewConst(ConstCurrent), - depth: 1, - }, - { - test: "last", - node: NewConst(ConstLast), - err: "LAST is allowed only in array subscripts", - }, - { - test: "last_in_sub", - node: NewConst(ConstLast), - inSub: true, - }, - { - test: "array", - node: NewArrayIndex([]Node{NewBinary(BinarySubscript, NewConst(ConstRoot), NewConst(ConstRoot))}), - }, - { - test: "array_last", - node: NewArrayIndex([]Node{NewBinary(BinarySubscript, NewConst(ConstRoot), NewConst(ConstLast))}), - }, - { - test: "array_current", - node: NewArrayIndex([]Node{NewBinary(BinarySubscript, NewConst(ConstRoot), NewConst(ConstCurrent))}), - err: "@ is not allowed in root expressions", - }, - { - test: "accessor", - node: LinkNodes([]Node{NewConst(ConstRoot)}), - }, - { - test: "accessor_current", - node: LinkNodes([]Node{NewConst(ConstCurrent)}), - err: "@ is not allowed in root expressions", - }, - { - test: "accessor_filter_current", - node: LinkNodes([]Node{NewConst(ConstRoot), NewUnary(UnaryFilter, NewConst(ConstCurrent))}), - }, - { - test: "nil", - node: nil, - }, - { - test: "next_nil", - node: LinkNodes([]Node{NewConst(ConstRoot), nil}), - }, - { - test: "next_fail", - node: LinkNodes([]Node{NewConst(ConstRoot), NewBinary(BinaryAdd, NewConst(ConstRoot), NewConst(ConstCurrent))}), - err: "@ is not allowed in root expressions", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - r := require.New(t) - - err := validateNode(tc.node, tc.depth, tc.inSub) - if tc.err == "" { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - } - }) - } -} - -func TestNodes(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - node any - }{ - {"ConstNode", NewConst(ConstRoot)}, - {"MethodNode", NewMethod(MethodAbs)}, - {"StringNode", &StringNode{}}, - {"VariableNode", &VariableNode{}}, - {"KeyNode", &KeyNode{}}, - {"NumericNode", &NumericNode{}}, - {"IntegerNode", &IntegerNode{}}, - {"AnyNode", &AnyNode{}}, - {"BinaryNode", &BinaryNode{}}, - {"UnaryNode", &UnaryNode{}}, - {"RegexNode", &RegexNode{}}, - {"ArrayIndexNode", &ArrayIndexNode{}}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Implements((*Node)(nil), tc.node) - }) - } -} - -func TestLinkNodes(t *testing.T) { - t.Parallel() - - // Test for empty list of nodes - t.Run("empty", func(t *testing.T) { - t.Parallel() - r := require.New(t) - - r.PanicsWithValue("No nodes passed to LinkNodes", func() { LinkNodes(nil) }) - r.PanicsWithValue("No nodes passed to LinkNodes", func() { LinkNodes([]Node{}) }) - }) - - t.Run("simple", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - nodes := []Node{ - NewConst(ConstRoot), - NewMethod(MethodAbs), - NewKey("yo"), - } - - a.Equal(nodes[0], LinkNodes(nodes)) - a.Equal(nodes[1], nodes[0].Next()) - a.Equal(nodes[2], nodes[1].Next()) - a.Nil(nodes[2].Next()) - - // Test writeTo. - buf := new(strings.Builder) - nodes[0].writeTo(buf, false, false) - a.Equal(`$.abs()."yo"`, buf.String()) - }) - - t.Run("append", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - nodes := []Node{ - &ConstNode{ - kind: ConstRoot, - next: &StringNode{"edString{ - str: "hi", - next: &NumericNode{&numberNode{}}, - }}, - }, - NewMethod(MethodAbs), - NewString("yo"), - } - - a.Equal(nodes[0], LinkNodes(nodes)) - // MethodAbs and yo should e appended to the numeric node at the end - // of the existing list in nodes[0]. - a.Equal(&StringNode{"edString{ - str: "hi", - next: &NumericNode{&numberNode{ - next: &MethodNode{name: MethodAbs, next: NewString("yo")}, - }}, - }}, nodes[0].Next()) - }) -} diff --git a/path/ast/regex.go b/path/ast/regex.go deleted file mode 100644 index 077cac9..0000000 --- a/path/ast/regex.go +++ /dev/null @@ -1,206 +0,0 @@ -package ast - -import ( - "errors" - "fmt" - "regexp/syntax" - "strings" -) - -// Use golang.org/x/tools/cmd/stringer to generate the String method for the -// regexFlag enums from their inline comments. -//go:generate stringer -linecomment -output regex_string.go -type regexFlag - -// regexFlag represents a single JSON Path regex flag. -type regexFlag uint16 - -// https://github.com/postgres/postgres/blob/REL_18_BETA2/src/include/utils/jsonpath.h#L120-L125 -// -//nolint:godot -const ( - // i flag, case insensitive - regexICase regexFlag = 0x01 // i - // s flag, dot matches newline - regexDotAll regexFlag = 0x02 // s - // m flag, ^/$ match at newlines - regexMLine regexFlag = 0x04 // m - // x flag, ignore whitespace in pattern - regexWSpace regexFlag = 0x08 // x - // q flag, no special characters - regexQuote regexFlag = 0x10 // q -) - -// regexFlags is a bit mask of regexFlag flags. -type regexFlags uint16 - -// newRegexFlags parses flags to create a new regexFlags. -func newRegexFlags(flags string) (regexFlags, error) { - bitMask := regexFlag(0) - - // Parse the flags string, convert to bit mask. Duplicate flags are OK. - for _, f := range flags { - switch f { - case 'i': - bitMask |= regexICase - case 's': - bitMask |= regexDotAll - case 'm': - bitMask |= regexMLine - case 'x': - bitMask |= regexWSpace - case 'q': - bitMask |= regexQuote - default: - //nolint:err113,staticcheck - return 0, fmt.Errorf( - `Unrecognized flag character "%c" in LIKE_REGEX predicate`, - f, - ) - } - } - - // Validate compatibility with Go flags. - reFlags := regexFlags(bitMask) - if _, err := reFlags._syntaxFlags(); err != nil { - return 0, err - } - - return reFlags, nil -} - -// String returns the flags formatted as a SQL/JSON path 'flags ""' expression. -func (f regexFlags) String() string { - if f == 0 { - return "" - } - - flags := ` flag "` - bitMask := regexFlag(f) - - var flagsSb79 strings.Builder - for _, flag := range []regexFlag{regexICase, regexDotAll, regexMLine, regexWSpace, regexQuote} { - if bitMask&flag > 0 { - flagsSb79.WriteString(flag.String()) - } - } - flags += flagsSb79.String() - - return flags + `"` -} - -// convertRegexFlags converts from XQuery regex flags to those recognized by -// regexp/syntax. -func (f regexFlags) syntaxFlags() syntax.Flags { - synFlags, _ := f._syntaxFlags() - return synFlags -} - -// _syntaxFlags converts from XQuery regex flags to those recognized by -// regexp/syntax. Returns an error for unsupported use of the 'x' flag. -func (f regexFlags) _syntaxFlags() (syntax.Flags, error) { - cFlags := syntax.OneLine | syntax.ClassNL | syntax.PerlX - bitMask := regexFlag(f) - - // Ignore case. - if bitMask®exICase != 0 { - cFlags |= syntax.FoldCase - } - - // Per XQuery spec, if 'q' is specified then 'm', 's', 'x' are ignored - // https://www.w3.org/TR/xpath-functions-3/#flags - if bitMask®exQuote != 0 { - return cFlags | syntax.Literal, nil - } - - // From the Postgres source - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/jsonpath_gram.y#L669-L675 - // - // > XQuery's 'x' mode is related to Spencer's expanded mode, but it's - // > not really enough alike to justify treating JSP_REGEX_WSPACE as - // > REG_EXPANDED. For now we treat 'x' as unimplemented; perhaps in - // > future we'll modify the regex library to have an option for - // > XQuery-style ignore-whitespace mode. - // - // Go regexp doesn't appear to support 'x', so we, too, treat it as - // unimplemented. - if bitMask®exWSpace != 0 { - //nolint:err113 - return 0, errors.New( - `XQuery "x" flag (expanded regular expressions) is not implemented`, - ) - } - - if bitMask®exMLine != 0 { - cFlags &= ^syntax.OneLine - } - - if bitMask®exDotAll != 0 { - cFlags |= syntax.DotNL - } - - return cFlags, nil -} - -// shouldQuoteMeta returns true if the flags include the 'q' flag, in which case -// all characters in the regular expression are treated as representing -// themselves, not as metacharacters --- that is, if the pattern should be -// escaped with the use of [regexp.QuoteMeta]. -func (f regexFlags) shouldQuoteMeta() bool { - return regexFlag(f)®exQuote != 0 -} - -// _syntaxFlags converts from XQuery regex flags to those recognized by -// regexp/syntax. Returns an error for unsupported use of the 'x' flag. -func (f regexFlags) goFlags() string { - // Start flags with '(?' - const maxFlagSize = 6 - const startSize = 2 - flags := make([]byte, startSize, maxFlagSize) - flags[0] = '(' - flags[1] = '?' - - // need to compare same types. - bitMask := regexFlag(f) - - // Ignore case. - if bitMask®exICase != 0 { - flags = append(flags, 'i') - } - - // Per XQuery spec, if 'q' is specified then 'm', 's' are ignored - // https://www.w3.org/TR/xpath-functions-3/#flags - if bitMask®exQuote == 0 { - if bitMask®exDotAll != 0 { - flags = append(flags, 's') - } - - if bitMask®exMLine != 0 { - flags = append(flags, 'm') - } - } - - if len(flags) == startSize { - return "" - } - - return string(append(flags, ')')) -} - -// validateRegex validates that regexp/syntax compiles pattern with flags. -func validateRegex(pattern string, flags regexFlags) error { - // Make sure it parses. - _, err := syntax.Parse(pattern, flags.syntaxFlags()) - if err != nil { - //nolint:wrapcheck - return err - } - - // (Compile never returns an error, so skip this bit.) - // Make sure it compiles. - // _, err = syntax.Compile(re.Simplify()) - // if err != nil { - // return err - // } - - return nil -} diff --git a/path/ast/regex_string.go b/path/ast/regex_string.go deleted file mode 100644 index 4b89228..0000000 --- a/path/ast/regex_string.go +++ /dev/null @@ -1,43 +0,0 @@ -// Code generated by "stringer -linecomment -output regex_string.go -type regexFlag"; DO NOT EDIT. - -package ast - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[regexICase-1] - _ = x[regexDotAll-2] - _ = x[regexMLine-4] - _ = x[regexWSpace-8] - _ = x[regexQuote-16] -} - -const ( - _regexFlag_name_0 = "is" - _regexFlag_name_1 = "m" - _regexFlag_name_2 = "x" - _regexFlag_name_3 = "q" -) - -var ( - _regexFlag_index_0 = [...]uint8{0, 1, 2} -) - -func (i regexFlag) String() string { - switch { - case 1 <= i && i <= 2: - i -= 1 - return _regexFlag_name_0[_regexFlag_index_0[i]:_regexFlag_index_0[i+1]] - case i == 4: - return _regexFlag_name_1 - case i == 8: - return _regexFlag_name_2 - case i == 16: - return _regexFlag_name_3 - default: - return "regexFlag(" + strconv.FormatInt(int64(i), 10) + ")" - } -} diff --git a/path/ast/regex_test.go b/path/ast/regex_test.go deleted file mode 100644 index b611816..0000000 --- a/path/ast/regex_test.go +++ /dev/null @@ -1,205 +0,0 @@ -package ast - -import ( - "regexp/syntax" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestRegexFlag(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - flag regexFlag - val uint16 - str string - }{ - {regexICase, 0x01, "i"}, - {regexDotAll, 0x02, "s"}, - {regexMLine, 0x04, "m"}, - {regexWSpace, 0x08, "x"}, - {regexQuote, 0x10, "q"}, - {regexFlag(999), 999, "regexFlag(999)"}, - } { - t.Run(tc.str+"_flag", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(regexFlag(tc.val), tc.flag) - a.Equal(tc.str, tc.flag.String()) - }) - } -} - -func TestRegexFlags(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - expr string - exp regexFlags - str string - syn syntax.Flags - ref string - err string - }{ - { - test: "empty", - exp: regexFlags(0), - syn: syntax.OneLine | syntax.ClassNL | syntax.PerlX, - }, - { - test: "i", - expr: "i", - exp: regexFlags(regexICase), - str: ` flag "i"`, - syn: syntax.OneLine | syntax.ClassNL | syntax.PerlX | syntax.FoldCase, - ref: "(?i)", - }, - { - test: "s", - expr: "s", - exp: regexFlags(regexDotAll), - str: ` flag "s"`, - syn: syntax.OneLine | syntax.ClassNL | syntax.PerlX | syntax.DotNL, - ref: "(?s)", - }, - { - test: "m", - expr: "m", - exp: regexFlags(regexMLine), - str: ` flag "m"`, - syn: syntax.ClassNL | syntax.PerlX, - ref: "(?m)", - }, - { - test: "x", - expr: "x", - err: `XQuery "x" flag (expanded regular expressions) is not implemented`, - }, - { - test: "q", - expr: "q", - exp: regexFlags(regexQuote), - str: ` flag "q"`, - syn: syntax.OneLine | syntax.ClassNL | syntax.PerlX | syntax.Literal, - }, - { - test: "q", - expr: "q", - exp: regexFlags(regexQuote), - str: ` flag "q"`, - syn: syntax.OneLine | syntax.ClassNL | syntax.PerlX | syntax.Literal, - }, - { - test: "unknown", - expr: "y", - err: `Unrecognized flag character "y" in LIKE_REGEX predicate`, - }, - { - test: "qx", - expr: "qx", - exp: regexFlags(regexQuote | regexWSpace), - str: ` flag "xq"`, - syn: syntax.OneLine | syntax.ClassNL | syntax.PerlX | syntax.Literal, - }, - { - test: "qi", - expr: "qi", - exp: regexFlags(regexQuote | regexICase), - str: ` flag "iq"`, - syn: syntax.OneLine | syntax.ClassNL | syntax.PerlX | syntax.FoldCase | syntax.Literal, - ref: "(?i)", - }, - { - test: "qmsx", - expr: "qmsx", - exp: regexFlags(regexQuote | regexMLine | regexDotAll | regexWSpace), - str: ` flag "smxq"`, - syn: syntax.OneLine | syntax.ClassNL | syntax.PerlX | syntax.Literal, - }, - { - test: "msi", - expr: "msi", - exp: regexFlags(regexICase | regexDotAll | regexMLine), - str: ` flag "ism"`, - syn: syntax.FoldCase | syntax.ClassNL | syntax.PerlX | syntax.DotNL, - ref: "(?ism)", - }, - { - test: "dupes_okay", - expr: "msmm", - exp: regexFlags(regexMLine | regexDotAll), - str: ` flag "sm"`, - syn: syntax.DotNL | syntax.ClassNL | syntax.PerlX, - ref: "(?sm)", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - flags, err := newRegexFlags(tc.expr) - a.Equal(tc.exp, flags) - if tc.err != "" { - r.EqualError(err, tc.err) - return - } - r.NoError(err) - a.Equal(tc.str, flags.String()) - a.Equal(tc.syn, flags.syntaxFlags()) - a.Equal(tc.ref, flags.goFlags()) - }) - } -} - -func TestValidateRegex(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - re string - flags regexFlags - str string - err string - }{ - { - test: "dot", - re: ".", - }, - { - test: "case_insensitive", - re: "[abc]", - flags: regexFlags(regexICase), - }, - { - test: "digits", - re: `\d+`, - }, - { - test: "all_flags_but_x", - re: "[abc]", - flags: regexFlags(regexICase | regexDotAll | regexMLine | regexQuote), - }, - { - test: "parse_failure", - re: "(oops", - err: "error parsing regexp: missing closing ): `(oops`", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - r := require.New(t) - - err := validateRegex(tc.re, tc.flags) - if tc.err == "" { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - } - }) - } -} diff --git a/path/example_test.go b/path/example_test.go deleted file mode 100644 index 3d1086f..0000000 --- a/path/example_test.go +++ /dev/null @@ -1,326 +0,0 @@ -//nolint:godot -package path_test - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "log" - "time" - - "github.com/theory/sqljson/path" - "github.com/theory/sqljson/path/exec" - "github.com/theory/sqljson/path/parser" - "github.com/theory/sqljson/path/types" -) - -// SQL-standard path expressions hew to the SQL standard, which allows -// Boolean predicates only in ?() filter expressions, and can return -// any number of results. -// -// PostgreSQL jsonb_path_query(): -// -// => SELECT '{ -// "track": { -// "segments": [ -// { -// "location": [ 47.763, 13.4034 ], -// "start time": "2018-10-14 10:05:14", -// "HR": 73 -// }, -// { -// "location": [ 47.706, 13.2635 ], -// "start time": "2018-10-14 10:39:21", -// "HR": 135 -// } -// ] -// } -// }' AS json \gset -// -// => SELECT jsonb_path_query(:'json', '$.track.segments[*] ? (@.HR > 130)."start time"'); -// jsonb_path_query -// ----------------------- -// "2018-10-14 10:39:21" -// (1 row) -// -// [Path.Query]: -func Example_sQLStandardPath() { - src := []byte(`{ - "track": { - "segments": [ - { - "location": [ 47.763, 13.4034 ], - "start time": "2018-10-14 10:05:14", - "HR": 73 - }, - { - "location": [ 47.706, 13.2635 ], - "start time": "2018-10-14 10:39:21", - "HR": 135 - } - ] - } - }`) - - // Parse the JSON. - var value any - if err := json.Unmarshal(src, &value); err != nil { - log.Fatal(err) - } - - // Parse the SQL-standard jsonpath query. - p, err := path.Parse(`$.track.segments[*] ? (@.HR > 130)."start time"`) - if err != nil { - log.Fatal(err) - } - - // Execute the query against the JSON. - items, err := p.Query(context.Background(), value) - if err != nil { - log.Fatal(err) - } - - // Print the results. - fmt.Printf("%v\n", items) - // Output: [2018-10-14 10:39:21] -} - -// Boolean predicate check expressions are a PostgreSQL extension that allow -// path expression to be a Boolean predicate, which can return only true, -// false, and null. -// -// PostgreSQL jsonb_path_query(): -// -// => SELECT '{ -// "track": { -// "segments": [ -// { -// "location": [ 47.763, 13.4034 ], -// "start time": "2018-10-14 10:05:14", -// "HR": 73 -// }, -// { -// "location": [ 47.706, 13.2635 ], -// "start time": "2018-10-14 10:39:21", -// "HR": 135 -// } -// ] -// } -// }' AS json \gset -// -// => SELECT jsonb_path_query(:'json', '$.track.segments[*].HR > 130'); -// jsonb_path_query -// ------------------ -// true -// (1 row) -// -// [Path.Query]: -func Example_predicateCheckPath() { - src := []byte(`{ - "track": { - "segments": [ - { - "location": [ 47.763, 13.4034 ], - "start time": "2018-10-14 10:05:14", - "HR": 73 - }, - { - "location": [ 47.706, 13.2635 ], - "start time": "2018-10-14 10:39:21", - "HR": 135 - } - ] - } - }`) - - // Parse the JSON. - var value any - if err := json.Unmarshal(src, &value); err != nil { - log.Fatal(err) - } - - // Parse the Postgres predicate check jsonpath query. - p, err := path.Parse(`$.track.segments[*].HR > 130`) - if err != nil { - log.Fatal(err) - } - - // Execute the query against the JSON. - matched, err := p.Match(context.Background(), value) - if err != nil { - log.Fatal(err) - } - - // Print the results. - fmt.Printf("%v\n", matched) - // Output: true -} - -func ExampleParse() { - p, err := path.Parse("$.x [*] ? ( @ > 2 )") - if err != nil { - log.Fatal(err) - } - fmt.Printf("%v\n", p) - // Output: $."x"[*]?(@ > 2) -} - -func ExampleMustParse() { - p := path.MustParse("$.x [*] ? ( @ > 2 )") - fmt.Printf("%v\n", p) - // Output: $."x"[*]?(@ > 2) -} - -func ExampleNew() { - ast, err := parser.Parse("$.x [*] ? ( @ > 2 )") - if err != nil { - log.Fatal(err) - } - p := path.New(ast) - fmt.Printf("%v\n", p) - // Output: $."x"[*]?(@ > 2) -} - -func ExamplePath_PgIndexOperator() { - p := path.MustParse("$.x[*] ?(@ > 2)") - fmt.Printf("SQL Standard: %v\n", p.PgIndexOperator()) - p = path.MustParse("$.x[*] > 2") - fmt.Printf("Predicate Check: %v\n", p.PgIndexOperator()) - // Output: SQL Standard: @? - // Predicate Check: @@ -} - -// [exec.WithVars] provides named values to be substituted into the -// path expression. PostgreSQL jsonb_path_query() example: -// -// => SELECT jsonb_path_query('{"a":[1,2,3,4,5]}', '$.a[*] ? (@ >= $min && @ <= $max)', '{"min":2, "max":4}'); -// jsonb_path_query -// ------------------ -// 2 -// 3 -// 4 -// (3 rows) -// -// [Path.Query] using [exec.WithVars]: -func Example_withVars() { - p := path.MustParse("$.a[*] ? (@ >= $min && @ <= $max)") - var value any - if err := json.Unmarshal([]byte(`{"a":[1,2,3,4,5]}`), &value); err != nil { - log.Fatal(err) - } - - res, err := p.Query( - context.Background(), - value, - exec.WithVars(exec.Vars{"min": float64(2), "max": float64(4)}), - ) - if err != nil { - log.Fatal(err) - } - - fmt.Printf("%v\n", res) - // Output: [2 3 4] -} - -// [exec.WithTZ] allows comparisons of date and time values that require -// timezone-aware conversions. By default such conversions are made relative -// to UTC, but can be made relative to another (user-preferred) time zone by -// using [types.ContextWithTZ] to add it to the context passed to the query -// method. -// -// This is the equivalent to using the *_tz() PostgreSQL functions. For -// example, this call to jsonb_path_query_tz() converts "2015-08-02", which -// has no offset, to a timestamptz in UTC, to compare to the two values. It -// selects only "2015-08-02 23:00:00-05" because, once it converts to PDT, its -// value is "2015-08-02 21:00:00-07", while "2015-08-02 01:00:00-05" resolves -// to "2015-08-01 23:00:00-07", which is less than 2015-08-02: -// -// => SET time zone 'PST8PDT'; -// SET -// => SELECT jsonb_path_query_tz( -// '["2015-08-02 01:00:00-05", "2015-08-02 23:00:00-05"]', -// '$[*] ? (@.datetime() >= "2015-08-02".date())' -// ); -// jsonb_path_query_tz -// -------------------------- -// "2015-08-02 23:00:00-05" -// -// Here's the equivalent using [types.ContextWithTZ] to set the time zone -// context in which [Path.Query] operates, and where [exec.WithTZ] allows -// conversion between timestamps with and without time zones: -func Example_withTZ() { - // Configure time zone to use when casting. - loc, err := time.LoadLocation("PST8PDT") - if err != nil { - log.Fatal(err) - } - - // Query in the context of that time zone. - p := path.MustParse(`$[*] ? (@.datetime() >= "2015-08-02".date())`) - res, err := p.Query( - types.ContextWithTZ(context.Background(), loc), - []any{"2015-08-01 02:00:00-05", "2015-08-02 23:00:00-05"}, - exec.WithTZ(), - ) - if err != nil { - log.Fatal(err) - } - - fmt.Printf("%v\n", res) - // Output: [2015-08-02 23:00:00-05] -} - -// [exec.WithSilent] suppresses [exec.ErrVerbose] errors, including missing -// object field or array element, unexpected JSON item type, and datetime -// and numeric errors. This behavior might be helpful when searching JSON -// entities of varying structure. -// -// For example, this PostgreSQL jsonb_path_query() call raises an error -// because index 1 it out of bounds of the array, ["hi"], which has only one -// value, and so raises an error: -// -// => SELECT jsonb_path_query(target => '["hi"]', path => 'strict $[1]'); -// ERROR: jsonpath array subscript is out of bounds -// -// Passing the silent parameter suppresses the error: -// -// => SELECT jsonb_path_query(target => '["hi"]', path => 'strict $[1]', silent => true); -// jsonb_path_query -// ------------------ -// (0 rows) -// -// Here's the equivalent call to [Path.Query] without and then with the -// [exec.WithSilent] option: -func Example_withSilent() { - // Execute query with array index out of bounds. - p := path.MustParse("strict $[1]") - ctx := context.Background() - res, err := p.Query(ctx, []any{"hi"}) - fmt.Printf("%v: %v\n", res, err) - - // WithSilent suppresses the error. - res, err = p.Query(ctx, []any{"hi"}, exec.WithSilent()) - fmt.Printf("%v: %v\n", res, err) - // Output: []: exec: jsonpath array subscript is out of bounds - // []: -} - -func ExamplePath_Exists_nULL() { - p := path.MustParse("strict $[1]") - ctx := context.Background() - res, err := p.Exists(ctx, []any{"hi"}, exec.WithSilent()) - if err != nil { - if errors.Is(err, exec.NULL) { - // The outcome was actually unknown. - fmt.Println("result was null") - } else { - // Some other error. - log.Fatal(err) - } - } else { - // Result is known. - fmt.Printf("%v\n", res) - } - // Output: result was null -} diff --git a/path/exec/array.go b/path/exec/array.go deleted file mode 100644 index c956cf5..0000000 --- a/path/exec/array.go +++ /dev/null @@ -1,153 +0,0 @@ -package exec - -import ( - "context" - "fmt" - - "github.com/theory/sqljson/path/ast" -) - -// execSubscript executes node, which must be an a ast.BinarySubscript -// operator, against value and returns the subscript indexes. -func (exec *Executor) execSubscript( - ctx context.Context, - node ast.Node, - value any, - arraySize int, -) (int, int, error) { - subscript, ok := node.(*ast.BinaryNode) - if !ok || subscript.Operator() != ast.BinarySubscript { - return 0, 0, fmt.Errorf( - "%w: jsonpath array subscript is not a single numeric value", - ErrExecution, - ) - } - - indexFrom, err := exec.getArrayIndex(ctx, subscript.Left(), value) - if err != nil { - return 0, 0, err - } - - indexTo := indexFrom - if right := subscript.Right(); right != nil { - indexTo, err = exec.getArrayIndex(ctx, right, value) - if err != nil { - return 0, 0, err - } - } - - if !exec.ignoreStructuralErrors && (indexFrom < 0 || indexFrom > indexTo || indexTo >= arraySize) { - return 0, 0, fmt.Errorf( - "%w: jsonpath array subscript is out of bounds", - ErrVerbose, - ) - } - - if indexFrom < 0 { - indexFrom = 0 - } - - if indexTo >= arraySize { - indexTo = arraySize - 1 - } - - return indexFrom, indexTo, nil -} - -// execArrayIndex executes node against value and passes the values selected -// to the next node. value must be an array ([]any) unless exec.autoWrap -// returns true, in which case it is considered the sole value in an array. -func (exec *Executor) execArrayIndex( - ctx context.Context, - node *ast.ArrayIndexNode, - value any, - found *valueList, -) (resultStatus, error) { - res := statusNotFound - var resErr error - - if array, ok := value.([]any); ok || exec.autoWrap() { - if !ok { - array = []any{value} // auto wrap - } - - size := len(array) - next := node.Next() - innermostArraySize := exec.innermostArraySize - defer func() { exec.innermostArraySize = innermostArraySize }() - exec.innermostArraySize = size // for LAST evaluation - - for _, subscript := range node.Subscripts() { - indexFrom, indexTo, err := exec.execSubscript(ctx, subscript, value, size) - if err != nil { - return exec.returnError(err) - } - - for index := indexFrom; index <= indexTo; index++ { - v := array[index] - if v == nil { - continue - } - - if next == nil && found == nil { - return statusOK, nil - } - - res, resErr = exec.executeNextItem(ctx, node, next, v, found) - if res.failed() || (res == statusOK && found == nil) { - break - } - } - } - - return res, resErr - } - - // In strict mode we accept only arrays. - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath array accessor can only be applied to an array", - ErrVerbose, - )) -} - -// executeItemUnwrapTargetArray unwraps the current array item and executes -// node for each of its elements. -func (exec *Executor) executeItemUnwrapTargetArray( - ctx context.Context, - node ast.Node, - value any, - found *valueList, -) (resultStatus, error) { - array, ok := value.([]any) - if !ok { - return statusFailed, fmt.Errorf( - "%w: invalid json array value type: %T", - ErrInvalid, value, - ) - } - - return exec.executeAnyItem(ctx, node, array, found, 1, 1, 1, false, false) -} - -// getArrayIndex executes an array subscript expression and converts the -// resulting numeric item to the integer type with truncation. -func (exec *Executor) getArrayIndex( - ctx context.Context, - node ast.Node, - value any, -) (int, error) { - found := newList() - res, err := exec.executeItem(ctx, node, value, found) - if res == statusFailed { - return 0, err - } - - if len(found.list) != 1 { - return 0, fmt.Errorf( - "%w: jsonpath array subscript is not a single numeric value", - ErrVerbose, - ) - } - - return getJSONInt32(found.list[0], "array subscript") -} diff --git a/path/exec/array_test.go b/path/exec/array_test.go deleted file mode 100644 index b4571f7..0000000 --- a/path/exec/array_test.go +++ /dev/null @@ -1,423 +0,0 @@ -package exec - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/parser" -) - -func TestExecSubscript(t *testing.T) { - t.Parallel() - ctx := context.Background() - lax, _ := parser.Parse("$") - strict, _ := parser.Parse("strict $") - - for _, tc := range []struct { - test string - path *ast.AST - node ast.Node - size int - from int - to int - err string - errIs error - }{ - { - test: "not_binary_node", - path: lax, - node: ast.NewString("hi"), - err: `exec: jsonpath array subscript is not a single numeric value`, - errIs: ErrExecution, - }, - { - test: "not_subscript", - path: lax, - node: ast.NewBinary(ast.BinaryAdd, ast.NewInteger("1"), ast.NewInteger("2")), - err: `exec: jsonpath array subscript is not a single numeric value`, - errIs: ErrExecution, - }, - { - test: "left_not_number", - path: lax, - node: ast.NewBinary(ast.BinarySubscript, ast.NewString("1"), ast.NewInteger("2")), - err: `exec: jsonpath array subscript is not a single numeric value`, - errIs: ErrVerbose, - }, - { - test: "right_not_number", - path: lax, - node: ast.NewBinary(ast.BinarySubscript, ast.NewInteger("1"), ast.NewString("2")), - err: `exec: jsonpath array subscript is not a single numeric value`, - errIs: ErrVerbose, - }, - { - test: "from_lt_0_strict", - path: strict, - node: ast.NewBinary(ast.BinarySubscript, ast.NewInteger("-1"), nil), - err: `exec: jsonpath array subscript is out of bounds`, - errIs: ErrVerbose, - }, - { - test: "from_gt_to_strict", - path: strict, - node: ast.NewBinary(ast.BinarySubscript, ast.NewInteger("2"), ast.NewInteger("1")), - err: `exec: jsonpath array subscript is out of bounds`, - errIs: ErrVerbose, - }, - { - test: "to_gt_size_strict", - path: strict, - node: ast.NewBinary(ast.BinarySubscript, ast.NewInteger("1"), ast.NewInteger("4")), - size: 2, - err: `exec: jsonpath array subscript is out of bounds`, - errIs: ErrVerbose, - }, - { - test: "from_lt_0_lax", - path: lax, - node: ast.NewBinary(ast.BinarySubscript, ast.NewInteger("-1"), ast.NewInteger("1")), - size: 3, - from: 0, - to: 1, - }, - { - test: "from_gt_to_lax", - path: lax, - node: ast.NewBinary(ast.BinarySubscript, ast.NewInteger("2"), ast.NewInteger("4")), - size: 7, - from: 2, - to: 4, - }, - { - test: "to_gt_size_lax", - path: lax, - node: ast.NewBinary(ast.BinarySubscript, ast.NewInteger("1"), ast.NewInteger("14")), - size: 7, - from: 1, - to: 6, - }, - { - test: "no_right_operand", - path: lax, - node: ast.NewBinary(ast.BinarySubscript, ast.NewInteger("1"), nil), - size: 10, - from: 1, - to: 1, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - e := newTestExecutor(tc.path, nil, true, false) - from, to, err := e.execSubscript(ctx, tc.node, nil, tc.size) - a.Equal(tc.from, from) - a.Equal(tc.to, to) - - if tc.errIs == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.errIs) - } - }) - } -} - -func TestExecArrayIndex(t *testing.T) { - t.Parallel() - ctx := context.Background() - lax, _ := parser.Parse("$") - strict, _ := parser.Parse("strict $") - linked, _ := ast.LinkNodes([]ast.Node{ - ast.NewArrayIndex([]ast.Node{ - ast.NewBinary(ast.BinarySubscript, ast.NewInteger("0"), ast.NewConst(ast.ConstLast)), - }), - ast.NewMethod(ast.MethodString), - }).(*ast.ArrayIndexNode) - nextErr, _ := ast.LinkNodes([]ast.Node{ - ast.NewArrayIndex([]ast.Node{ - ast.NewBinary(ast.BinarySubscript, ast.NewInteger("0"), ast.NewConst(ast.ConstLast)), - }), - ast.NewVariable("foo"), - }).(*ast.ArrayIndexNode) - - for _, tc := range []struct { - test string - path *ast.AST - node *ast.ArrayIndexNode - value any - unwrap bool - exp resultStatus - found []any - err string - errIs error - }{ - { - test: "not_array_strict", - path: strict, - value: "hi", - exp: statusFailed, - found: []any{}, - err: `exec: jsonpath array accessor can only be applied to an array`, - errIs: ErrVerbose, - }, - { - test: "not_array_lax", - path: lax, - node: ast.NewArrayIndex([]ast.Node{ast.NewBinary(ast.BinarySubscript, ast.NewInteger("0"), nil)}), - value: "hi", - exp: statusOK, - found: []any{"hi"}, - }, - { - test: "not_found_lax", - path: lax, - node: ast.NewArrayIndex([]ast.Node{ast.NewBinary(ast.BinarySubscript, ast.NewInteger("1"), nil)}), - value: "hi", - exp: statusNotFound, - found: []any{}, - }, - { - test: "is_array", - path: strict, - node: ast.NewArrayIndex([]ast.Node{ast.NewBinary(ast.BinarySubscript, ast.NewInteger("0"), nil)}), - value: []any{"hi"}, - exp: statusOK, - found: []any{"hi"}, - }, - { - test: "is_array_second_item", - path: strict, - node: ast.NewArrayIndex([]ast.Node{ast.NewBinary(ast.BinarySubscript, ast.NewInteger("1"), nil)}), - value: []any{"hi", "go"}, - exp: statusOK, - found: []any{"go"}, - }, - { - test: "is_array_range", - path: strict, - node: ast.NewArrayIndex([]ast.Node{ - ast.NewBinary(ast.BinarySubscript, ast.NewInteger("0"), ast.NewInteger("1")), - }), - value: []any{"hi", "go", "on"}, - exp: statusOK, - found: []any{"hi", "go"}, - }, - { - test: "is_array_sub_range", - path: strict, - node: ast.NewArrayIndex([]ast.Node{ - ast.NewBinary(ast.BinarySubscript, ast.NewInteger("2"), ast.NewInteger("5")), - }), - value: []any{"hi", "go", "on", true, "12", false, "nope"}, - exp: statusOK, - found: []any{"on", true, "12", false}, - }, - { - test: "is_array_last", - path: strict, - node: ast.NewArrayIndex([]ast.Node{ - ast.NewBinary(ast.BinarySubscript, ast.NewInteger("0"), ast.NewConst(ast.ConstLast)), - }), - value: []any{"hi", "go", "on"}, - exp: statusOK, - found: []any{"hi", "go", "on"}, - }, - { - test: "not_a_subscript", - path: strict, - node: ast.NewArrayIndex([]ast.Node{ast.NewConst(ast.ConstRoot)}), - value: []any{"hi"}, - exp: statusFailed, - found: []any{}, - err: `exec: jsonpath array subscript is not a single numeric value`, - errIs: ErrExecution, - }, - { - test: "skip_nil", - path: strict, - node: ast.NewArrayIndex([]ast.Node{ - ast.NewBinary(ast.BinarySubscript, ast.NewInteger("0"), ast.NewConst(ast.ConstLast)), - }), - value: []any{"hi", nil, "go", "on"}, - exp: statusOK, - found: []any{"hi", "go", "on"}, - }, - { - test: "no_found_param", - path: strict, - node: ast.NewArrayIndex([]ast.Node{ - ast.NewBinary(ast.BinarySubscript, ast.NewInteger("0"), ast.NewConst(ast.ConstLast)), - }), - value: []any{"hi", "go", "on"}, - exp: statusOK, - }, - { - test: "next_item", - path: strict, - node: linked, - value: []any{int64(2), true}, - exp: statusOK, - found: []any{"2", "true"}, - }, - { - test: "next_item_fail", - path: strict, - node: nextErr, - value: []any{int64(2), true}, - exp: statusFailed, - found: []any{}, - err: `exec: could not find jsonpath variable "foo"`, - errIs: ErrExecution, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - e := newTestExecutor(tc.path, nil, true, false) - e.innermostArraySize = 12 - found := newList() - if tc.found == nil { - found = nil - } - res, err := e.execArrayIndex(ctx, tc.node, tc.value, found) - a.Equal(tc.exp, res) - a.Equal(12, e.innermostArraySize) - if tc.found == nil { - a.Nil(found) - } else { - a.Equal(tc.found, found.list) - } - if tc.errIs == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.errIs) - } - }) - } -} - -func TestExecuteItemUnwrapTargetArray(t *testing.T) { - t.Parallel() - ctx := context.Background() - path, _ := parser.Parse("$") - - for _, tc := range []struct { - test string - node ast.Node - value any - exp resultStatus - found []any - err string - errIs error - }{ - { - test: "not_array", - value: "hi", - exp: statusFailed, - found: []any{}, - err: `exec invalid: invalid json array value type: string`, - errIs: ErrInvalid, - }, - { - test: "invalid_array", - value: []string{"hi"}, - exp: statusFailed, - found: []any{}, - err: `exec invalid: invalid json array value type: []string`, - errIs: ErrInvalid, - }, - { - test: "is_array_no_node", - value: []any{float64(1), float64(2)}, - exp: statusOK, - found: []any{float64(1), float64(2)}, - }, - { - test: "exec_node", - value: []any{float64(1), float64(2)}, - node: ast.NewMethod(ast.MethodString), - exp: statusOK, - found: []any{"1", "2"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - e := newTestExecutor(path, nil, true, false) - found := newList() - res, err := e.executeItemUnwrapTargetArray(ctx, tc.node, tc.value, found) - a.Equal(tc.exp, res) - a.Equal(tc.found, found.list) - if tc.errIs == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.errIs) - } - }) - } -} - -func TestGetArrayIndex(t *testing.T) { - t.Parallel() - ctx := context.Background() - path, _ := parser.Parse("$[*]") - - for _, tc := range []struct { - test string - node ast.Node - value any - exp int - err string - errIs error - }{ - { - test: "exec_item_fail", - node: ast.NewVariable("foo"), - err: `exec: could not find jsonpath variable "foo"`, - errIs: ErrExecution, - }, - { - test: "too_many_found", - node: path.Root(), - value: []any{1, 2}, - err: `exec: jsonpath array subscript is not a single numeric value`, - errIs: ErrExecution, - }, - { - test: "success", - node: path.Root(), - value: []any{int64(1)}, - exp: 1, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - e := newTestExecutor(path, nil, true, false) - e.root = tc.value - integer, err := e.getArrayIndex(ctx, tc.node, tc.value) - a.Equal(tc.exp, integer) - if tc.errIs == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.errIs) - } - }) - } -} diff --git a/path/exec/boolean.go b/path/exec/boolean.go deleted file mode 100644 index 3c9f82b..0000000 --- a/path/exec/boolean.go +++ /dev/null @@ -1,175 +0,0 @@ -package exec - -import ( - "context" - "fmt" - - "github.com/theory/sqljson/path/ast" -) - -// executeBinaryBoolItem executes node against value and returns the result. -func (exec *Executor) executeBinaryBoolItem( - ctx context.Context, - node *ast.BinaryNode, - value any, -) (predOutcome, error) { - switch node.Operator() { - case ast.BinaryAnd: - res, err := exec.executeBoolItem(ctx, node.Left(), value, false) - if res == predFalse || err != nil { - return res, err - } - - // SQL/JSON says that we should check second arg in case of error - res2, err2 := exec.executeBoolItem(ctx, node.Right(), value, false) - if res2 == predTrue { - return res, err2 - } - return res2, err2 - case ast.BinaryOr: - res, err := exec.executeBoolItem(ctx, node.Left(), value, false) - if res == predTrue || err != nil { - return res, err - } - res2, err2 := exec.executeBoolItem(ctx, node.Right(), value, false) - if res2 == predFalse { - return res, err - } - return res2, err2 - case ast.BinaryEqual, ast.BinaryNotEqual, ast.BinaryLess, - ast.BinaryGreater, ast.BinaryLessOrEqual, ast.BinaryGreaterOrEqual: - return exec.executePredicate(ctx, node, node.Left(), node.Right(), value, true, exec.compareItems) - case ast.BinaryStartsWith: - return exec.executePredicate(ctx, node, node.Left(), node.Right(), value, false, executeStartsWith) - default: - return predUnknown, fmt.Errorf( - "%w: invalid jsonpath boolean operator %v", - ErrInvalid, node.Operator(), - ) - } -} - -// executeUnaryBoolItem executes node, which must be a ast.UnaryNot, -// ast.UnaryIsUnknown, or ast.UnaryExists operator, against value. -func (exec *Executor) executeUnaryBoolItem( - ctx context.Context, - node *ast.UnaryNode, - value any, -) (predOutcome, error) { - switch node.Operator() { - case ast.UnaryNot: - res, err := exec.executeBoolItem(ctx, node.Operand(), value, false) - switch res { - case predUnknown: - return res, err - case predTrue: - return predFalse, nil - case predFalse: - return predTrue, nil - } - case ast.UnaryIsUnknown: - res, _ := exec.executeBoolItem(ctx, node.Operand(), value, false) - return predFrom(res == predUnknown), nil - case ast.UnaryExists: - if exec.strictAbsenceOfErrors() { - // In strict mode we must get a complete list of values to - // check that there are no errors at all. - vals := newList() - res, err := exec.executeItemOptUnwrapResultSilent(ctx, node.Operand(), value, false, vals) - if res == statusFailed { - return predUnknown, err - } - if vals.isEmpty() { - return predFalse, nil - } - return predTrue, nil - } - - res, err := exec.executeItemOptUnwrapResultSilent(ctx, node.Operand(), value, false, nil) - if res == statusFailed { - return predUnknown, err - } - if res == statusOK { - return predTrue, nil - } - return predFalse, nil - default: - // We only process boolean unary operators here. - } - - return predUnknown, fmt.Errorf( - "%w: invalid jsonpath boolean operator %v", - ErrInvalid, node.Operator(), - ) -} - -// executeBoolItem executes node, which must be a ast.BinaryNode, -// ast.UnaryNode, or ast.RegexNode, against value. -func (exec *Executor) executeBoolItem( - ctx context.Context, - node ast.Node, - value any, - canHaveNext bool, -) (predOutcome, error) { - if !canHaveNext && node.Next() != nil { - return predUnknown, fmt.Errorf( - "%w: boolean jsonpath item cannot have next item", ErrInvalid, - ) - } - - switch node := node.(type) { - case *ast.BinaryNode: - return exec.executeBinaryBoolItem(ctx, node, value) - case *ast.UnaryNode: - return exec.executeUnaryBoolItem(ctx, node, value) - case *ast.RegexNode: - return exec.executePredicate(ctx, node, node.Operand(), nil, value, false, exec.executeLikeRegex) - } - - return predUnknown, fmt.Errorf( - "%w: invalid boolean jsonpath item type: %v", - ErrInvalid, node, - ) -} - -// appendBoolResult convert boolean execution status res to a boolean JSON -// value and executes the next jsonpath. -func (exec *Executor) appendBoolResult( - ctx context.Context, - node ast.Node, - found *valueList, - res predOutcome, - err error, -) (resultStatus, error) { - if err != nil { - return statusFailed, err - } - - next := node.Next() - if next == nil && found == nil { - // found singleton boolean value - return statusOK, nil - } - var value any - - if res == predUnknown { - value = nil - } else { - value = res == predTrue - } - - return exec.executeNextItem(ctx, node, next, value, found) -} - -// executeNestedBoolItem executes a nested (filters etc.) boolean expression -// pushing current SQL/JSON item onto the stack. -func (exec *Executor) executeNestedBoolItem( - ctx context.Context, - node ast.Node, - value any, -) (predOutcome, error) { - prev := exec.current - defer func(e *Executor, c any) { e.current = c }(exec, prev) - exec.current = value - return exec.executeBoolItem(ctx, node, value, false) -} diff --git a/path/exec/boolean_test.go b/path/exec/boolean_test.go deleted file mode 100644 index a07110a..0000000 --- a/path/exec/boolean_test.go +++ /dev/null @@ -1,687 +0,0 @@ -package exec - -import ( - "context" - "fmt" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/parser" -) - -//nolint:dupl -func TestExecuteBinaryBoolItem(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - path string - op ast.BinaryOperator - value any - exp predOutcome - err string - isErr error - }{ - { - test: "binary_and", - path: "$ == $ && $ == $", - value: true, - op: ast.BinaryAnd, - exp: predTrue, - }, - { - test: "binary_and_unknown_left", - path: "$ == $x && $ == $", - value: true, - op: ast.BinaryAnd, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "binary_and_unknown_right", - path: "$ == $ && $ == $x", - value: true, - op: ast.BinaryAnd, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "binary_or", - path: "$ == $ || $ == $", - value: true, - op: ast.BinaryOr, - exp: predTrue, - }, - { - test: "binary_or_both", - path: "$ == false || $ == $", - value: true, - op: ast.BinaryOr, - exp: predTrue, - }, - { - test: "binary_or_false", - path: "$ == false || $ == false", - value: true, - op: ast.BinaryOr, - exp: predFalse, - }, - { - test: "binary_or_unknown_left", - path: "$ == $x || $ == $", - value: true, - op: ast.BinaryOr, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "binary_or_unknown_right", - path: "$ == false || $ == $x", - value: true, - op: ast.BinaryOr, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "binary_eq_true", - path: "$ == $", - value: true, - op: ast.BinaryEqual, - exp: predTrue, - }, - { - test: "binary_eq_false", - path: "$ == false", - value: true, - op: ast.BinaryEqual, - exp: predFalse, - }, - { - test: "binary_eq_unknown", - path: "$ == $x", - value: true, - op: ast.BinaryEqual, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "binary_ne_false", - path: "$ != $", - value: true, - op: ast.BinaryNotEqual, - exp: predFalse, - }, - { - test: "binary_ne_true", - path: "$ != false", - value: true, - op: ast.BinaryNotEqual, - exp: predTrue, - }, - { - test: "binary_ne_unknown", - path: "$ != $x", - value: true, - op: ast.BinaryNotEqual, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "binary_lt_true", - path: "$ < 3", - value: int64(1), - op: ast.BinaryLess, - exp: predTrue, - }, - { - test: "binary_lt_false", - path: "$ < 3", - value: int64(3), - op: ast.BinaryLess, - exp: predFalse, - }, - { - test: "binary_lt_unknown", - path: "$ < $x", - value: int64(3), - op: ast.BinaryLess, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "binary_gt_true", - path: "$ > 3", - value: int64(5), - op: ast.BinaryGreater, - exp: predTrue, - }, - { - test: "binary_gt_false", - path: "$ > 3", - value: int64(3), - op: ast.BinaryGreater, - exp: predFalse, - }, - { - test: "binary_gt_unknown", - path: "$ > $x", - value: int64(3), - op: ast.BinaryGreater, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "binary_le_true", - path: "$ <= 3", - value: int64(2), - op: ast.BinaryLessOrEqual, - exp: predTrue, - }, - { - test: "binary_le_true_2", - path: "$ <= 3", - value: int64(3), - op: ast.BinaryLessOrEqual, - exp: predTrue, - }, - { - test: "binary_le_false", - path: "$ <= 3", - value: int64(4), - op: ast.BinaryLessOrEqual, - exp: predFalse, - }, - { - test: "binary_le_unknown", - path: "$ <= $x", - value: int64(3), - op: ast.BinaryLessOrEqual, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "binary_ge_true", - path: "$ >= 3", - value: int64(4), - op: ast.BinaryGreaterOrEqual, - exp: predTrue, - }, - { - test: "binary_le_true_2", - path: "$ >= 3", - value: int64(3), - op: ast.BinaryGreaterOrEqual, - exp: predTrue, - }, - { - test: "binary_le_false", - path: "$ >= 3", - value: int64(2), - op: ast.BinaryGreaterOrEqual, - exp: predFalse, - }, - { - test: "binary_le_unknown", - path: "$ >= $x", - value: int64(3), - op: ast.BinaryGreaterOrEqual, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "starts_with_true", - path: `$ starts with "a"`, - value: "abc", - op: ast.BinaryStartsWith, - exp: predTrue, - }, - { - test: "starts_with_false", - path: `$ starts with "b"`, - value: "abc", - op: ast.BinaryStartsWith, - exp: predFalse, - }, - { - test: "starts_with_unknown", - path: "$ starts with $x", - value: true, - op: ast.BinaryStartsWith, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "unsupported_binary", - path: "$ + 4", - value: true, - op: ast.BinaryAdd, - exp: predUnknown, - err: `exec invalid: invalid jsonpath boolean operator +`, - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Parse the path and make sure the root node is what we expect to - // be testing. - path, err := parser.Parse(tc.path) - r.NoError(err) - node, ok := path.Root().(*ast.BinaryNode) - r.True(ok) - a.Equal(tc.op, node.Operator()) - - // Test executeBinaryBoolItem with the root node set to tc.value. - e := newTestExecutor(path, nil, true, false) - e.root = tc.value - res, err := e.executeBinaryBoolItem(ctx, node, tc.value) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -//nolint:dupl -func TestExecuteUnaryBoolItem(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - path string - op ast.UnaryOperator - value any - exp predOutcome - err string - isErr error - }{ - { - test: "unary_not_true", - path: "!($ == false)", - value: true, - op: ast.UnaryNot, - exp: predTrue, - }, - { - test: "unary_not_false", - path: "!($ == true)", - value: true, - op: ast.UnaryNot, - exp: predFalse, - }, - { - test: "unary_not_unknown", - path: "!($ == $x)", - value: true, - op: ast.UnaryNot, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "unary_is_unknown_true", - path: "($ == $x) is unknown", - value: true, - op: ast.UnaryIsUnknown, - exp: predTrue, - }, - { - test: "unary_is_unknown_false", - path: "($ == $) is unknown", - value: true, - op: ast.UnaryIsUnknown, - exp: predFalse, - }, - { - test: "unary_is_unknown_false_false", - path: "($ == $) is unknown", - value: false, - op: ast.UnaryIsUnknown, - exp: predFalse, - }, - { - test: "unary_exists_true", - path: "exists ($)", - value: true, - op: ast.UnaryExists, - exp: predTrue, - }, - { - test: "unary_exists_false", - path: "exists ($.x)", - value: true, - op: ast.UnaryExists, - exp: predFalse, - }, - { - test: "unary_exists_unknown", - path: "exists ($x)", - value: true, - op: ast.UnaryExists, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "unary_exists_strict_true", - path: "strict exists ($[*])", - value: []any{"x", "y"}, - op: ast.UnaryExists, - exp: predTrue, - }, - { - test: "unary_exists_strict_false", - path: "strict exists ($[*])", - value: []any{}, - op: ast.UnaryExists, - exp: predFalse, - }, - { - test: "unary_exists_strict_unknown", - path: "strict exists ($x[*])", - value: []any{}, - op: ast.UnaryExists, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "unary_not_boolean", - path: "-$", - op: ast.UnaryMinus, - exp: predUnknown, - err: `exec invalid: invalid jsonpath boolean operator -`, - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Parse the path and make sure the root node is what we expect to - // be testing. - path, err := parser.Parse(tc.path) - r.NoError(err) - node, ok := path.Root().(*ast.UnaryNode) - r.True(ok) - a.Equal(tc.op, node.Operator()) - - // Test executeUnaryBoolItem with the root node set to tc.value. - e := newTestExecutor(path, nil, true, false) - e.root = tc.value - res, err := e.executeUnaryBoolItem(ctx, node, tc.value) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecuteBoolItem(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - path string - value any - canHaveNext bool - exp predOutcome - err string - isErr error - }{ - { - test: "no_next", - path: "$ ?($ == $).x", - value: true, - exp: predUnknown, - err: `exec invalid: boolean jsonpath item cannot have next item`, - isErr: ErrInvalid, - }, - { - test: "next_ok_true", - path: "($.x == $.x).x", - value: map[string]any{"x": true}, - canHaveNext: true, - exp: predTrue, - }, - { - test: "next_ok_false", - path: "($.x != $.x).x", - value: map[string]any{"x": true}, - canHaveNext: true, - exp: predFalse, - }, - { - test: "next_ok_unknown", - path: "($.x == $x).x", - value: map[string]any{"x": true}, - canHaveNext: true, - exp: predUnknown, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "binary", - path: "$ == $", - value: true, - exp: predTrue, - }, - { - test: "unary", - path: "exists ($)", - value: true, - exp: predTrue, - }, - { - test: "regex", - path: `$ like_regex "^a"`, - value: "abc", - exp: predTrue, - }, - { - test: "invalid_boolean", - path: `$`, - value: true, - exp: predUnknown, - err: `exec invalid: invalid boolean jsonpath item type: $`, - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Parse the path. - path, err := parser.Parse(tc.path) - r.NoError(err) - - // Test executeBoolItem with the root node set to tc.value. - e := newTestExecutor(path, nil, true, false) - e.root = tc.value - res, err := e.executeBoolItem(ctx, path.Root(), tc.value, tc.canHaveNext) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestAppendBoolResult(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - path string - found []any - passOut predOutcome - passErr error - exp resultStatus - err string - isErr error - }{ - { - test: "passed_error", - path: "$", - passErr: fmt.Errorf("%w: OOPS", ErrExecution), - exp: statusFailed, - err: `exec: OOPS`, - isErr: ErrExecution, - }, - { - test: "pass_unknown", - path: "$", - passOut: predUnknown, - exp: statusOK, - }, - { - test: "pass_unknown_found", - path: "$", - passOut: predUnknown, - found: []any{nil}, - exp: statusOK, - }, - { - test: "no_found_ok", - path: "$", - exp: statusOK, - }, - { - test: "true_no_next", - path: "$", - passOut: predTrue, - exp: statusOK, - }, - { - test: "false_no_next", - path: "$", - passOut: predFalse, - exp: statusOK, - }, - { - test: "okay_next", - path: "($ == $).x", - passOut: predTrue, - exp: statusNotFound, - }, - { - test: "add_ok", - path: "$ + $", - passOut: predTrue, - found: []any{true}, - exp: statusOK, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Parse the path. - path, err := parser.Parse(tc.path) - r.NoError(err) - - // Construct found. - var found *valueList - if tc.found != nil { - found = newList() - } - - // Execute appendBoolResult. - e := newTestExecutor(path, nil, true, false) - res, err := e.appendBoolResult(ctx, path.Root(), found, tc.passOut, tc.passErr) - a.Equal(tc.exp, res) - if tc.found != nil { - a.Equal(tc.found, found.list) - } - - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecuteNestedBoolItem(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - path string - root any - current any - value any - exp predOutcome - err string - isErr error - }{ - { - test: "switch_current", - path: "$ == $", - root: true, - current: "foo", - value: "bar", - exp: predTrue, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Parse the path. - path, err := parser.Parse(tc.path) - r.NoError(err) - - // Execute executeNestedBoolItem. - e := newTestExecutor(path, nil, true, false) - e.root = tc.root - e.current = tc.current - res, err := e.executeNestedBoolItem(ctx, path.Root(), tc.value) - a.Equal(tc.exp, res) - a.Equal(tc.current, e.current) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} diff --git a/path/exec/compare.go b/path/exec/compare.go deleted file mode 100644 index 81bb180..0000000 --- a/path/exec/compare.go +++ /dev/null @@ -1,173 +0,0 @@ -package exec - -import ( - "context" - "encoding/json" - "fmt" - "strings" - - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/types" -) - -// compareItems compares two SQL/JSON items using comparison operation 'op'. -// Implements predicateCallback. -func (exec *Executor) compareItems(ctx context.Context, node ast.Node, left, right any) (predOutcome, error) { - var cmp int - bin, ok := node.(*ast.BinaryNode) - if !ok { - return predUnknown, fmt.Errorf( - "%w: invalid node type %T passed to compareItems", ErrInvalid, node, - ) - } - op := bin.Operator() - - if (left == nil && right != nil) || (right == nil && left != nil) { - // Equality and order comparison of nulls to non-nulls returns - // always false, but inequality comparison returns true. - return predFrom(op == ast.BinaryNotEqual), nil - } - - switch left := left.(type) { - case nil: - cmp = 0 - case bool: - cmp, ok = compareBool(left, right) - if !ok { - return predUnknown, nil - } - case int64, float64, json.Number: - switch right.(type) { - case int64, float64, json.Number: - cmp = compareNumeric(left, right) - default: - return predUnknown, nil - } - case string: - right, ok := right.(string) - if !ok { - return predUnknown, nil - } - cmp = strings.Compare(left, right) - if op == ast.BinaryEqual { - return predFrom(cmp == 0), nil - } - case *types.Date, *types.Time, *types.TimeTZ, *types.Timestamp, *types.TimestampTZ: - var err error - cmp, err = compareDatetime(ctx, left, right, exec.useTZ) - if cmp < -1 || err != nil { - return predUnknown, err - } - case map[string]any, []any: - // non-scalars are not comparable - return predUnknown, nil - default: - return predUnknown, fmt.Errorf( - "%w: invalid json value type %T", ErrInvalid, left, - ) - } - - return applyCompare(op, cmp) -} - -// compareBool compares two boolean values and returns 0, 1, or -1. Returns -// false if right is not a bool. -func compareBool(left bool, right any) (int, bool) { - right, ok := right.(bool) - if !ok { - return 0, false - } - switch { - case left == right: - return 0, true - case left: - return 1, true - default: - return -1, true - } -} - -// applyCompare applies op relative to cmp. -func applyCompare(op ast.BinaryOperator, cmp int) (predOutcome, error) { - switch op { - case ast.BinaryEqual: - return predFrom(cmp == 0), nil - case ast.BinaryNotEqual: - return predFrom(cmp != 0), nil - case ast.BinaryLess: - return predFrom(cmp < 0), nil - case ast.BinaryGreater: - return predFrom(cmp > 0), nil - case ast.BinaryLessOrEqual: - return predFrom(cmp <= 0), nil - case ast.BinaryGreaterOrEqual: - return predFrom(cmp >= 0), nil - default: - // We only process binary comparison operators here. - return predUnknown, fmt.Errorf( - "%w: unrecognized jsonpath comparison operation %v", ErrInvalid, op, - ) - } -} - -// compareNumbers compares two numbers and returns 0, 1, or -1. -func compareNumbers[T int | int64 | float64](left, right T) int { - if left < right { - return -1 - } - if left > right { - return 1 - } - return 0 -} - -// compareBool compares two numeric values and returns 0, 1, or -1. The left -// and right params must be int64, float64, or json.Number values. -func compareNumeric(left, right any) int { - switch left := left.(type) { - case int64: - switch right := right.(type) { - case int64: - return compareNumbers(left, right) - case float64: - return compareNumbers(float64(left), right) - case json.Number: - if rightInt, err := right.Int64(); err == nil { - return compareNumbers(left, rightInt) - } - rightFloat, err := right.Float64() - if err == nil { - return compareNumbers(float64(left), rightFloat) - } - // This should not happen. - panic(err) - } - case float64: - switch right := right.(type) { - case float64: - return compareNumbers(left, right) - case int64: - return compareNumbers(left, float64(right)) - case json.Number: - rightFloat, err := right.Float64() - if err == nil { - return compareNumbers(left, rightFloat) - } - // This should not happen. - panic(err) - } - case json.Number: - if left, err := left.Int64(); err == nil { - return compareNumeric(left, right) - } - leftFloat, err := left.Float64() - if err == nil { - return compareNumeric(leftFloat, right) - } - // This should not happen. - panic(err) - } - - // This should not happen - panic(fmt.Sprintf("Value not numeric: %q", left)) -} diff --git a/path/exec/compare_test.go b/path/exec/compare_test.go deleted file mode 100644 index f000b4e..0000000 --- a/path/exec/compare_test.go +++ /dev/null @@ -1,669 +0,0 @@ -package exec - -import ( - "context" - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/parser" - "github.com/theory/sqljson/path/types" -) - -func TestCompareItems(t *testing.T) { - t.Parallel() - now := time.Now() - ctx := context.Background() - - for _, tc := range []struct { - test string - path string - left any - right any - exp predOutcome - err string - isErr error - }{ - { - test: "not_binary", - path: "$", - exp: predUnknown, - err: `exec invalid: invalid node type *ast.ConstNode passed to compareItems`, - isErr: ErrInvalid, - }, - { - test: "left_eq_null", - path: "$ == $", - right: int64(6), - exp: predFalse, - }, - { - test: "left_ne_null", - path: "$ != $", - right: int64(6), - exp: predTrue, - }, - { - test: "right_eq_null", - path: "$ == $", - left: int64(6), - exp: predFalse, - }, - { - test: "right_ne_null", - path: "$ != $", - left: int64(6), - exp: predTrue, - }, - { - test: "both_null", - path: "$ == $", - exp: predTrue, - }, - { - test: "bool_true", - path: "$ == $", - left: true, - right: true, - exp: predTrue, - }, - { - test: "bool_false", - path: "$ == $", - left: true, - right: false, - exp: predFalse, - }, - { - test: "bool_unknown", - path: "$ == $", - left: true, - right: "true", - exp: predUnknown, - }, - { - test: "int_true", - path: "$ == $", - left: int64(3), - right: int64(3), - exp: predTrue, - }, - { - test: "int_false", - path: "$ == $", - left: int64(3), - right: int64(4), - exp: predFalse, - }, - { - test: "int_unknown", - path: "$ == $", - left: int64(3), - right: "4", - exp: predUnknown, - }, - { - test: "float_true", - path: "$ == $", - left: float64(3.0), - right: float64(3.0), - exp: predTrue, - }, - { - test: "float_false", - path: "$ == $", - left: float64(3.1), - right: float64(4.2), - exp: predFalse, - }, - { - test: "float_unknown", - path: "$ == $", - left: float64(3.0), - right: "3.0", - exp: predUnknown, - }, - { - test: "json_number_true", - path: "$ == $", - left: json.Number("3"), - right: json.Number("3"), - exp: predTrue, - }, - { - test: "json_number_false", - path: "$ == $", - left: json.Number("3"), - right: json.Number("3.1"), - exp: predFalse, - }, - { - test: "json_number_unknown", - path: "$ == $", - left: json.Number("3"), - right: "3", - exp: predUnknown, - }, - { - test: "string_true", - path: "$ == $", - left: "abc", - right: "abc", - exp: predTrue, - }, - { - test: "string_false", - path: "$ == $", - left: "abc", - right: "abd", - exp: predFalse, - }, - { - test: "string_unknown", - path: "$ == $", - left: "abc", - right: false, - exp: predUnknown, - }, - { - test: "string_ne_true", - path: "$ != $", - left: "abc", - right: "abd", - exp: predTrue, - }, - { - test: "datetime_true", - path: "$ == $", - left: types.NewDate(now), - right: types.NewDate(now), - exp: predTrue, - }, - { - test: "datetime_false", - path: "$ == $", - left: types.NewDate(now), - right: types.NewTimestamp(now), - exp: predFalse, - }, - { - test: "datetime_unknown", - path: "$ == $", - left: types.NewDate(now), - right: types.NewTime(now), - exp: predUnknown, - }, - { - test: "datetime_unknown_err", - path: "$ == $", - left: types.NewDate(now), - right: "not a date", - exp: predUnknown, - err: `exec invalid: unrecognized SQL/JSON datetime type string`, - isErr: ErrInvalid, - }, - { - test: "object_unknown", - path: "$ == $", - left: map[string]any{}, - right: false, - exp: predUnknown, - }, - { - test: "array_unknown", - path: "$ == $", - left: []any{}, - right: false, - exp: predUnknown, - }, - { - test: "anything_else", - path: "$ == $", - left: int32(3), - right: false, - exp: predUnknown, - err: `exec invalid: invalid json value type int32`, - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Parse the path. - path, err := parser.Parse(tc.path) - r.NoError(err) - - // Execute compareItems. - e := newTestExecutor(path, nil, true, false) - res, err := e.compareItems(ctx, path.Root(), tc.left, tc.right) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestCompareBool(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - path string - left bool - right any - exp int - ok bool - }{ - { - test: "true_true", - left: true, - right: true, - exp: 0, - ok: true, - }, - { - test: "true_false", - left: true, - right: false, - exp: 1, - ok: true, - }, - { - test: "false_true", - left: false, - right: true, - exp: -1, - ok: true, - }, - { - test: "false_false", - left: false, - right: false, - exp: 0, - ok: true, - }, - { - test: "right_not_bool", - left: false, - right: "false", - exp: 0, - ok: false, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - res, ok := compareBool(tc.left, tc.right) - a.Equal(tc.exp, res) - a.Equal(tc.ok, ok) - }) - } -} - -func TestApplyCompare(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - op ast.BinaryOperator - exp []predOutcome - err bool - }{ - { - test: "equal", - op: ast.BinaryEqual, - exp: []predOutcome{predFalse, predTrue, predFalse}, - }, - { - test: "not_equal", - op: ast.BinaryNotEqual, - exp: []predOutcome{predTrue, predFalse, predTrue}, - }, - { - test: "lt", - op: ast.BinaryLess, - exp: []predOutcome{predTrue, predFalse, predFalse}, - }, - { - test: "gt", - op: ast.BinaryGreater, - exp: []predOutcome{predFalse, predFalse, predTrue}, - }, - { - test: "le", - op: ast.BinaryLessOrEqual, - exp: []predOutcome{predTrue, predTrue, predFalse}, - }, - { - test: "ge", - op: ast.BinaryGreaterOrEqual, - exp: []predOutcome{predFalse, predTrue, predTrue}, - }, - { - test: "add", - op: ast.BinaryAdd, - err: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - for i, cmp := range []int{-1, 0, 1} { - res, err := applyCompare(tc.op, cmp) - if tc.err { - r.EqualError(err, "exec invalid: unrecognized jsonpath comparison operation +") - r.ErrorIs(err, ErrInvalid) - a.Equal(predUnknown, res) - } else { - r.NoError(err) - a.Equal(tc.exp[i], res) - } - } - }) - } -} - -func TestCompareNumbers(t *testing.T) { - t.Parallel() - - t.Run("int_int", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(0, compareNumbers(42, 42)) - a.Equal(-1, compareNumbers(42, 43)) - a.Equal(1, compareNumbers(42, 41)) - }) - - t.Run("int_int64", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(0, compareNumbers(42, int64(42))) - a.Equal(-1, compareNumbers(42, int64(43))) - a.Equal(1, compareNumbers(42, int64(41))) - }) - - t.Run("int_float64", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(0, compareNumbers(42, float64(42.0))) - a.Equal(-1, compareNumbers(42, float64(42.1))) - a.Equal(1, compareNumbers(42, float64(41.9))) - }) - - t.Run("int64_int", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(0, compareNumbers(int64(42), 42)) - a.Equal(-1, compareNumbers(int64(42), 43)) - a.Equal(1, compareNumbers(int64(42), 41)) - }) - - t.Run("int64_int64", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(0, compareNumbers(int64(42), int64(42))) - a.Equal(-1, compareNumbers(int64(42), int64(43))) - a.Equal(1, compareNumbers(int64(42), int64(41))) - }) - - t.Run("float64_int", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(0, compareNumbers(float64(42.0), 42)) - a.Equal(-1, compareNumbers(float64(41.9), 42)) - a.Equal(1, compareNumbers(float64(42.1), 42)) - }) - - t.Run("float64_float64", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(0, compareNumbers(float64(42.0), float64(42.00))) - a.Equal(-1, compareNumbers(float64(42), float64(42.1))) - a.Equal(1, compareNumbers(float64(42.0), float64(41.9))) - }) -} - -func TestCompareNumeric(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - left any - right any - exp int - panic bool - }{ - { - test: "int64_int64_eq", - left: int64(42), - right: int64(42), - exp: 0, - }, - { - test: "int64_int64_lt", - left: int64(41), - right: int64(42), - exp: -1, - }, - { - test: "int64_int64_gt", - left: int64(43), - right: int64(42), - exp: 1, - }, - { - test: "int64_float64_eq", - left: int64(42), - right: float64(42.0), - exp: 0, - }, - { - test: "int64_float64_lt", - left: int64(42), - right: float64(42.1), - exp: -1, - }, - { - test: "int64_float64_gt", - left: int64(42), - right: float64(41.9), - exp: 1, - }, - { - test: "int64_json_int_eq", - left: int64(42), - right: json.Number("42"), - exp: 0, - }, - { - test: "int64_json_int_lt", - left: int64(42), - right: json.Number("43"), - exp: -1, - }, - { - test: "int64_json_int_gt", - left: int64(42), - right: json.Number("41"), - exp: 1, - }, - { - test: "int64_json_float_eq", - left: int64(42), - right: json.Number("42.0"), - exp: 0, - }, - { - test: "int64_json_float_lt", - left: int64(42), - right: json.Number("42.1"), - exp: -1, - }, - { - test: "int64_json_float_gt", - left: int64(42), - right: json.Number("41.9"), - exp: 1, - }, - { - test: "int64_json_err", - left: int64(42), - right: json.Number("nope"), - panic: true, - }, - { - test: "float64_float64_eq", - left: float64(42), - right: float64(42.0), - exp: 0, - }, - { - test: "float64_float64_lt", - left: float64(42), - right: float64(42.1), - exp: -1, - }, - { - test: "float64_float64_gt", - left: float64(42), - right: float64(41.9), - exp: 1, - }, - { - test: "float64_int64_eq", - left: float64(42.0), - right: int64(42), - exp: 0, - }, - { - test: "float64_int64_lt", - left: float64(41.9), - right: int64(42), - exp: -1, - }, - { - test: "float64_int64_gt", - left: float64(42.1), - right: int64(42), - exp: 1, - }, - { - test: "float64_json_eq", - left: float64(42.0), - right: json.Number("42"), - exp: 0, - }, - { - test: "float64_json_lt", - left: float64(41.9), - right: json.Number("42.1"), - exp: -1, - }, - { - test: "float64_json_gt", - left: float64(42.1), - right: json.Number("41.9"), - exp: 1, - }, - { - test: "float64_json_err", - left: float64(42.1), - right: json.Number("nope"), - panic: true, - }, - { - test: "json_json_eq", - left: json.Number("42.0"), - right: json.Number("42"), - exp: 0, - }, - { - test: "json_json_lt", - left: json.Number("42.0"), - right: json.Number("42.1"), - exp: -1, - }, - { - test: "json_json_gt", - left: json.Number("42.1"), - right: json.Number("42"), - exp: 1, - }, - { - test: "json_int64_eq", - left: json.Number("42"), - right: int64(42), - exp: 0, - }, - { - test: "json_int64_lt", - left: json.Number("42.0"), - right: int64(43), - exp: -1, - }, - { - test: "json_int64_gt", - left: json.Number("42.1"), - right: int64(42), - exp: 1, - }, - { - test: "json_float64_eq", - left: json.Number("42.0"), - right: float64(42), - exp: 0, - }, - { - test: "json_float64_lt", - left: json.Number("41.9"), - right: float64(42), - exp: -1, - }, - { - test: "json_float64_gt", - left: json.Number("42.1"), - right: float64(42), - exp: 1, - }, - { - test: "json_err", - left: json.Number("nope"), - panic: true, - }, - { - test: "not_numeric", - left: "hi", - panic: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - if tc.panic { - a.Panics(func() { compareNumeric(tc.left, tc.right) }) - } else { - a.Equal(tc.exp, compareNumeric(tc.left, tc.right)) - } - }) - } -} diff --git a/path/exec/const.go b/path/exec/const.go deleted file mode 100644 index 2cb4145..0000000 --- a/path/exec/const.go +++ /dev/null @@ -1,149 +0,0 @@ -package exec - -import ( - "context" - "fmt" - - "github.com/theory/sqljson/path/ast" - "golang.org/x/exp/maps" // Switch to maps when go 1.22 dropped -) - -// execConstNode Executes node against value. -func (exec *Executor) execConstNode( - ctx context.Context, - node *ast.ConstNode, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - switch node.Const() { - case ast.ConstNull, ast.ConstTrue, ast.ConstFalse: - return exec.execLiteralConst(ctx, node, found) - case ast.ConstRoot: - defer exec.setTempBaseObject(exec.root, 0)() - return exec.executeNextItem(ctx, node, nil, exec.root, found) - case ast.ConstCurrent: - return exec.executeNextItem(ctx, node, nil, exec.current, found) - case ast.ConstAnyKey: - return exec.execAnyKey(ctx, node, value, found, unwrap) - case ast.ConstAnyArray: - return exec.execAnyArray(ctx, node, value, found) - case ast.ConstLast: - return exec.execLastConst(ctx, node, found) - } - - // Should only happen if a new constant ast.Constant is not added to the - // switch statement above. - return statusFailed, fmt.Errorf( - "%w: Unknown ConstNode %v", ErrInvalid, node.Const(), - ) -} - -// execLiteralConst handles the execution of a null or boolean node. -func (exec *Executor) execLiteralConst( - ctx context.Context, - node *ast.ConstNode, - found *valueList, -) (resultStatus, error) { - next := node.Next() - if next == nil && found == nil { - return statusOK, nil - } - - var v any - if node.Const() == ast.ConstNull { - v = nil - } else { - v = node.Const() == ast.ConstTrue - } - - return exec.executeNextItem(ctx, node, next, v, found) -} - -// execAnyKey handles execution of an ast.ConstAnyKey node. If value is an -// object, its values are passed to executeAnyItem(). If unwrap is true and -// value is an array, its values are unwrapped via -// [executeItemUnwrapTargetArray]. Otherwise it returns an error unless -// exec.ignoreStructuralErrors returns true. -func (exec *Executor) execAnyKey( - ctx context.Context, - node *ast.ConstNode, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - switch value := value.(type) { - case map[string]any: - return exec.executeAnyItem( - ctx, node.Next(), maps.Values(value), found, - 1, 1, 1, false, exec.autoUnwrap(), - ) - case []any: - if unwrap { - return exec.executeItemUnwrapTargetArray(ctx, node, value, found) - } - } - - if !exec.ignoreStructuralErrors { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/jsonpath_exec.c#L872 - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath wildcard member accessor can only be applied to an object", - ErrVerbose, - )) - } - - return statusNotFound, nil -} - -// execAnyArray executes node against value. If value's type is not []any but -// exec.autoWrap() returns true, it passed it to executeNextItem to be -// unwrapped. Otherwise it returns statusFailed and an error if -// exec.ignoreStructuralErrors is false, and statusNotFound if it is true. -func (exec *Executor) execAnyArray( - ctx context.Context, - node *ast.ConstNode, - value any, - found *valueList, -) (resultStatus, error) { - if value, ok := value.([]any); ok { - return exec.executeAnyItem(ctx, node.Next(), value, found, 1, 1, 1, false, exec.autoUnwrap()) - } - - if exec.autoWrap() { - return exec.executeNextItem(ctx, node, nil, value, found) - } - - if !exec.ignoreStructuralErrors { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/jsonpath_exec.c#L849 - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath wildcard array accessor can only be applied to an array", - ErrVerbose, - )) - } - - return statusNotFound, nil -} - -// execLastConst handles execution of the LAST node. Returns an error if -// execution is not currently part of an array subscript. -func (exec *Executor) execLastConst( - ctx context.Context, - node *ast.ConstNode, - found *valueList, -) (resultStatus, error) { - if exec.innermostArraySize < 0 { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/jsonpath_exec.c#L1241 - return statusFailed, fmt.Errorf( - "%w: evaluating jsonpath LAST outside of array subscript", - ErrExecution, - ) - } - - next := node.Next() - if next == nil && found == nil { - return statusOK, nil - } - - last := int64(exec.innermostArraySize - 1) - return exec.executeNextItem(ctx, node, next, last, found) -} diff --git a/path/exec/const_test.go b/path/exec/const_test.go deleted file mode 100644 index 4c70440..0000000 --- a/path/exec/const_test.go +++ /dev/null @@ -1,549 +0,0 @@ -package exec - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/parser" -) - -func TestExecConstNode(t *testing.T) { - t.Parallel() - ctx := context.Background() - path, _ := parser.Parse("$") - base := kvBaseObject{addr: uintptr(42), id: -1} - current := []any{"hi", true} - - for _, tc := range []struct { - test string - node *ast.ConstNode - value any - find []any - unwrap bool - exp resultStatus - err string - isErr error - }{ - { - test: "null", - node: ast.NewConst(ast.ConstNull), - exp: statusOK, - find: []any{nil}, - }, - { - test: "true", - node: ast.NewConst(ast.ConstTrue), - exp: statusOK, - find: []any{true}, - }, - { - test: "false", - node: ast.NewConst(ast.ConstFalse), - exp: statusOK, - find: []any{false}, - }, - { - test: "root", - node: ast.NewConst(ast.ConstRoot), - exp: statusOK, - find: []any{path.Root()}, - }, - { - test: "current", - node: ast.NewConst(ast.ConstCurrent), - exp: statusOK, - find: []any{current}, - }, - { - test: "any_key", - node: ast.NewConst(ast.ConstAnyKey), - value: map[string]any{"hi": "x", "there": "x"}, - exp: statusOK, - find: []any{"x", "x"}, - }, - { - test: "any_key_array", - node: ast.NewConst(ast.ConstAnyKey), - value: []any{"hi", "there"}, - exp: statusNotFound, - find: []any{}, - }, - { - test: "any_key_array_unwrap", - node: ast.NewConst(ast.ConstAnyKey), - value: []any{"hi", "there"}, - unwrap: true, - exp: statusNotFound, - find: []any{}, - }, - { - test: "any_key_nested_array_unwrap", - node: ast.NewConst(ast.ConstAnyKey), - value: []any{"hi", "there", map[string]any{"x": int64(1), "y": int64(1)}}, - unwrap: true, - exp: statusOK, - find: []any{int64(1), int64(1)}, - }, - { - test: "any_array", - node: ast.NewConst(ast.ConstAnyArray), - value: []any{"hi", "there"}, - exp: statusOK, - find: []any{"hi", "there"}, - }, - { - test: "last", - node: ast.NewConst(ast.ConstLast), - exp: statusOK, - find: []any{int64(3)}, - }, - { - test: "unknown_const", - node: ast.NewConst(ast.Constant(-1)), - exp: statusFailed, - find: []any{}, - err: "exec invalid: Unknown ConstNode Constant(-1)", - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Construct found. - var found *valueList - if tc.find != nil { - found = newList() - } - - // Construct executor. - e := newTestExecutor(path, nil, true, false) - e.root = path.Root() - e.baseObject = base - e.current = current - e.innermostArraySize = 4 - - // Execute execConstNode. - res, err := e.execConstNode(ctx, tc.node, tc.value, found, tc.unwrap) - a.Equal(tc.exp, res) - - // Base and current objects should be reset. - a.Equal(base, e.baseObject) - a.Equal(current, e.current) - - // Check found - if tc.find != nil { - a.Equal(tc.find, found.list) - } - - // Check the error. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecLiteralConst(t *testing.T) { - t.Parallel() - ctx := context.Background() - path, _ := parser.Parse("$") - - for _, tc := range []struct { - test string - node ast.Node - find []any - exp resultStatus - err string - isErr error - }{ - { - test: "no_found", - node: ast.NewConst(ast.ConstNull), - exp: statusOK, - }, - { - test: "null", - node: ast.NewConst(ast.ConstNull), - exp: statusOK, - find: []any{nil}, - }, - { - test: "true", - node: ast.NewConst(ast.ConstTrue), - exp: statusOK, - find: []any{true}, - }, - { - test: "false", - node: ast.NewConst(ast.ConstFalse), - exp: statusOK, - find: []any{false}, - }, - { - test: "false_next", - node: ast.LinkNodes([]ast.Node{ast.NewConst(ast.ConstFalse), ast.NewMethod(ast.MethodString)}), - exp: statusOK, - find: []any{"false"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Construct found. - var found *valueList - if tc.find != nil { - found = newList() - } - - // Get the constant. - node, ok := tc.node.(*ast.ConstNode) - a.True(ok) - - // Construct executor. - e := newTestExecutor(path, nil, true, false) - - // Execute execLiteralConst. - res, err := e.execLiteralConst(ctx, node, found) - a.Equal(tc.exp, res) - - // Check found - if tc.find != nil { - a.Equal(tc.find, found.list) - } - - // Check the error. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecAnyKey(t *testing.T) { - t.Parallel() - ctx := context.Background() - lax, _ := parser.Parse("$") - strict, _ := parser.Parse("strict $") - - for _, tc := range []struct { - test string - path *ast.AST - node *ast.ConstNode - value any - find []any - unwrap bool - strict bool - exp resultStatus - err string - isErr error - }{ - { - test: "any_key", - path: lax, - node: ast.NewConst(ast.ConstAnyKey), - value: map[string]any{"hi": "x", "there": "x"}, - exp: statusOK, - find: []any{"x", "x"}, - }, - { - test: "any_key_array", - path: lax, - node: ast.NewConst(ast.ConstAnyKey), - value: []any{"hi", "there"}, - exp: statusNotFound, - find: []any{}, - }, - { - test: "any_key_array_strict", - path: strict, - node: ast.NewConst(ast.ConstAnyKey), - value: []any{"hi", "there"}, - exp: statusFailed, - find: []any{}, - err: "exec: jsonpath wildcard member accessor can only be applied to an object", - isErr: ErrVerbose, - }, - { - test: "any_key_array_unwrap", - path: lax, - node: ast.NewConst(ast.ConstAnyKey), - value: []any{"hi", "there"}, - unwrap: true, - exp: statusNotFound, - find: []any{}, - }, - { - test: "any_key_nested_array_unwrap", - path: lax, - node: ast.NewConst(ast.ConstAnyKey), - value: []any{"hi", "there", map[string]any{"x": int64(1), "y": int64(1)}}, - unwrap: true, - exp: statusOK, - find: []any{int64(1), int64(1)}, - }, - { - test: "any_key_scalar", - path: lax, - node: ast.NewConst(ast.ConstAnyKey), - value: true, - exp: statusNotFound, - find: []any{}, - }, - { - test: "any_key_scalar_strict", - path: strict, - node: ast.NewConst(ast.ConstAnyKey), - value: true, - exp: statusFailed, - find: []any{}, - err: "exec: jsonpath wildcard member accessor can only be applied to an object", - isErr: ErrVerbose, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Construct found. - var found *valueList - if tc.find != nil { - found = newList() - } - - // Construct executor. - e := newTestExecutor(tc.path, nil, true, false) - - // Execute execAnyKey. - res, err := e.execAnyKey(ctx, tc.node, tc.value, found, tc.unwrap) - a.Equal(tc.exp, res) - - // Check found - if tc.find != nil { - a.Equal(tc.find, found.list) - } - - // Check the error. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecAnyArray(t *testing.T) { - t.Parallel() - ctx := context.Background() - lax, _ := parser.Parse("$") - strict, _ := parser.Parse("strict $") - - for _, tc := range []struct { - test string - node ast.Node - path *ast.AST - ignore bool - value any - find []any - exp resultStatus - err string - isErr error - }{ - { - test: "array", - node: ast.NewConst(ast.ConstNull), - path: lax, - value: []any{true, false, nil}, - exp: statusOK, - find: []any{true, false, nil}, - }, - { - test: "array_next", - node: ast.LinkNodes([]ast.Node{ast.NewConst(ast.ConstNull), ast.NewMethod(ast.MethodString)}), - path: lax, - value: []any{true, false, float64(98.6)}, - exp: statusOK, - find: []any{"true", "false", "98.6"}, - }, - { - test: "array_next_err", - node: ast.LinkNodes([]ast.Node{ast.NewConst(ast.ConstNull), ast.NewMethod(ast.MethodString)}), - path: lax, - value: []any{true, false, nil}, - exp: statusFailed, - find: []any{"true", "false"}, - err: "exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value", - isErr: ErrVerbose, - }, - { - test: "auto_wrap", - node: ast.NewConst(ast.ConstNull), - path: lax, - value: true, - exp: statusOK, - find: []any{true}, - }, - { - test: "no_auto_wrap_no_error", - node: ast.NewConst(ast.ConstNull), - path: strict, - ignore: true, - value: true, - exp: statusNotFound, - find: []any{}, - }, - { - test: "no_auto_wrap_strict", - node: ast.NewConst(ast.ConstNull), - path: strict, - value: true, - exp: statusFailed, - find: []any{}, - err: "exec: jsonpath wildcard array accessor can only be applied to an array", - isErr: ErrVerbose, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Construct found. - var found *valueList - if tc.find != nil { - found = newList() - } - - // Get the constant. - node, ok := tc.node.(*ast.ConstNode) - a.True(ok) - - // Construct executor. - e := newTestExecutor(tc.path, nil, true, false) - if tc.ignore { - e.ignoreStructuralErrors = true - } - - // Execute execAnyArray. - res, err := e.execAnyArray(ctx, node, tc.value, found) - a.Equal(tc.exp, res) - - // Check found - if tc.find != nil { - a.Equal(tc.find, found.list) - } - - // Check the error. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecLastConst(t *testing.T) { - t.Parallel() - ctx := context.Background() - path, _ := parser.Parse("$") - - for _, tc := range []struct { - test string - node ast.Node - size int - find []any - exp resultStatus - err string - isErr error - }{ - { - test: "outside_array_subscript", - node: ast.NewConst(ast.ConstLast), - size: -1, - exp: statusFailed, - err: "exec: evaluating jsonpath LAST outside of array subscript", - isErr: ErrExecution, - }, - { - test: "size_4", - node: ast.NewConst(ast.ConstLast), - size: 4, - exp: statusOK, - find: []any{int64(3)}, - }, - { - test: "no_found", - node: ast.NewConst(ast.ConstLast), - size: 4, - exp: statusOK, - }, - { - test: "size_6", - node: ast.NewConst(ast.ConstLast), - size: 6, - exp: statusOK, - find: []any{int64(5)}, - }, - { - test: "size_4_next", - node: ast.LinkNodes([]ast.Node{ast.NewConst(ast.ConstLast), ast.NewMethod(ast.MethodString)}), - size: 4, - exp: statusOK, - find: []any{"3"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Construct found. - var found *valueList - if tc.find != nil { - found = newList() - } - - // Get the constant. - node, ok := tc.node.(*ast.ConstNode) - a.True(ok) - - // Construct executor. - e := newTestExecutor(path, nil, true, false) - e.innermostArraySize = tc.size - - // Execute execLastConst. - res, err := e.execLastConst(ctx, node, found) - a.Equal(tc.exp, res) - - // Check found - if tc.find != nil { - a.Equal(tc.find, found.list) - } - - // Check the error. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} diff --git a/path/exec/datetime.go b/path/exec/datetime.go deleted file mode 100644 index f4e7d8a..0000000 --- a/path/exec/datetime.go +++ /dev/null @@ -1,432 +0,0 @@ -package exec - -import ( - "context" - "fmt" - - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/types" -) - -// tzRequiredCast constructs an error reporting that type1 cannot be cast to -// type2 without time zone usage. -func tzRequiredCast(type1, type2 string) error { - return fmt.Errorf( - "%w: cannot convert value from %v to %v without time zone usage. HINT: Use WithTZ() option for time zone support", - ErrExecution, type1, type2, - ) -} - -// unknownDateTime returns 0 and an error reporting that val is not a known -// datetime type. -func unknownDateTime(val any) (int, error) { - return 0, fmt.Errorf( - "%w: unrecognized SQL/JSON datetime type %T", - ErrInvalid, val, - ) -} - -// compareDatetime performs a Cross-type comparison of two datetime SQL/JSON -// items. Returns <= -1 if items are incomparable. Returns an error if a cast -// requires timezone useTZ is false. -func compareDatetime(ctx context.Context, val1, val2 any, useTZ bool) (int, error) { - switch val1 := val1.(type) { - case *types.Date: - return compareDate(ctx, val1, val2, useTZ) - case *types.Time: - return compareTime(ctx, val1, val2, useTZ) - case *types.TimeTZ: - return compareTimeTZ(ctx, val1, val2, useTZ) - case *types.Timestamp: - return compareTimestamp(ctx, val1, val2, useTZ) - case *types.TimestampTZ: - return compareTimestampTZ(ctx, val1, val2, useTZ) - default: - return unknownDateTime(val1) - } -} - -// compareDate compares val1 to val1. Returns -2 if they're incomparable and -// an error if a cast requires timezone useTZ is false. -func compareDate(_ context.Context, val1 *types.Date, val2 any, useTZ bool) (int, error) { - switch val2 := val2.(type) { - case *types.Date: - return val1.Compare(val2.Time), nil - case *types.Timestamp: - return val1.Compare(val2.Time), nil - case *types.TimestampTZ: - if !useTZ { - return 0, tzRequiredCast("date", "timestamptz") - } - return val1.Compare(val2.Time), nil - case *types.Time, *types.TimeTZ: - // Incomparable types - return -2, nil - default: - return unknownDateTime(val2) - } -} - -// compareTime compares val1 to val1. Returns -2 if they're incomparable and -// an error if a cast requires timezone useTZ is false. -func compareTime(ctx context.Context, val1 *types.Time, val2 any, useTZ bool) (int, error) { - switch val2 := val2.(type) { - case *types.Time: - return val1.Compare(val2.Time), nil - case *types.TimeTZ: - if !useTZ { - return 0, tzRequiredCast("time", "timetz") - } - // Convert time to timetz context time. - ttz := val1.ToTimeTZ(ctx) - // There are special comparison rules for TimeTZ, so use its Compare - // function and invert the result. - return -val2.Compare(ttz.Time), nil - - case *types.Date, *types.Timestamp, *types.TimestampTZ: - // Incomparable types - return -2, nil - default: - return unknownDateTime(val2) - } -} - -// compareTimeTZ compares val1 to val1. Returns -2 if they're incomparable and -// an error if a cast requires timezone useTZ is false. -func compareTimeTZ(ctx context.Context, val1 *types.TimeTZ, val2 any, useTZ bool) (int, error) { - switch val2 := val2.(type) { - case *types.Time: - if !useTZ { - return 0, tzRequiredCast("time", "timetz") - } - // Convert time to timetz context time. - return val1.Compare(val2.ToTimeTZ(ctx).Time), nil - case *types.TimeTZ: - return val1.Compare(val2.Time), nil - case *types.Date, *types.Timestamp, *types.TimestampTZ: - // Incomparable types - return -2, nil - default: - return unknownDateTime(val2) - } -} - -// compareTimestamp compares val1 to val1. Returns -2 if they're incomparable -// and an error if a cast requires timezone useTZ is false. -func compareTimestamp(_ context.Context, val1 *types.Timestamp, val2 any, useTZ bool) (int, error) { - switch val2 := val2.(type) { - case *types.Date: - return val1.Compare(val2.Time), nil - case *types.Timestamp: - return val1.Compare(val2.Time), nil - case *types.TimestampTZ: - if !useTZ { - return 0, tzRequiredCast("timestamp", "timestamptz") - } - return val1.UTC().Compare(val2.Time), nil - case *types.Time, *types.TimeTZ: - // Incomparable types - return -2, nil - default: - return unknownDateTime(val2) - } -} - -// compareTimestampTZ compares val1 to val1. Returns -2 if they're -// incomparable and an error if a cast requires timezone useTZ is false. -func compareTimestampTZ(_ context.Context, val1 *types.TimestampTZ, val2 any, useTZ bool) (int, error) { - switch val2 := val2.(type) { - case *types.Date: - if !useTZ { - return 0, tzRequiredCast("date", "timestamptz") - } - return val1.Compare(val2.UTC()), nil - case *types.Timestamp: - if !useTZ { - return 0, tzRequiredCast("timestamp", "timestamptz") - } - return val1.Compare(val2.UTC()), nil - case *types.TimestampTZ: - return val1.Compare(val2.Time), nil - case *types.Time, *types.TimeTZ: - // Incomparable types - return -2, nil - default: - return unknownDateTime(val2) - } -} - -// executeDateTimeMethod implements .datetime() and related methods. -// -// Converts a string into a date/time value. The actual type is determined at -// run time. If an argument is provided to .datetime(), it should be used as -// the template to parse the string, but that feature is currently -// unimplemented, so it instead returns an error. -// -// In all other cases, it calls [types.ParseTime], which attempts a number of -// formats fitting ISO, and the first to succeed determines the type. -// -// .time(), .time_tz(), .timestamp(), .timestamp_tz() take an optional time -// precision. -func (exec *Executor) executeDateTimeMethod( - ctx context.Context, - node *ast.UnaryNode, - value any, - found *valueList, -) (resultStatus, error) { - op := node.Operator() - - datetime, ok := value.(string) - if !ok { - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v() can only be applied to a string", - ErrVerbose, op, - )) - } - - arg := node.Operand() - var timeVal types.DateTime - var err error - - // .datetime(template) has an argument, the rest of the methods don't have - // an argument. So we handle that separately. - if op == ast.UnaryDateTime && arg != nil { - err = exec.parseDateTimeFormat(datetime, arg) - } else { - timeVal, err = exec.parseDateTime(ctx, op, datetime, arg) - } - if err != nil { - return exec.returnError(err) - } - - // The parsing above processes the entire input string and returns the - // best fitted datetime type. So, if this call is for a specific datatype, - // then we do the conversion here. Return an error for incompatible types. - switch op { - case ast.UnaryDateTime: - // Nothing to do for DATETIME - case ast.UnaryDate: - timeVal, err = exec.castDate(ctx, timeVal, datetime) - case ast.UnaryTime: - timeVal, err = exec.castTime(ctx, timeVal, datetime) - case ast.UnaryTimeTZ: - timeVal, err = exec.castTimeTZ(ctx, timeVal, datetime) - case ast.UnaryTimestamp: - timeVal, err = exec.castTimestamp(ctx, timeVal, datetime) - case ast.UnaryTimestampTZ: - timeVal, err = exec.castTimestampTZ(ctx, timeVal, datetime) - case ast.UnaryExists, ast.UnaryNot, ast.UnaryIsUnknown, ast.UnaryPlus, ast.UnaryMinus, ast.UnaryFilter: - return statusFailed, fmt.Errorf("%w: unrecognized jsonpath datetime method: %v", ErrInvalid, op) - } - - if err != nil { - return exec.returnError(err) - } - - next := node.Next() - if next == nil && found == nil { - return statusOK, nil - } - - return exec.executeNextItem(ctx, node, next, timeVal, found) -} - -// parseDateTimeFormat parses datetime with arg format and returns the -// resulting [types.DateTime] or an error. -// -// Or it will eventually. Currently it is unimplemented and returns an error. -func (exec *Executor) parseDateTimeFormat(_ string, _ ast.Node) error { - // func (exec *Executor) parseDateTimeFormat(datetime string, arg ast.Node) (types.DateTime, error) { - // XXX: Requires a format parser, so defer for now. - return fmt.Errorf( - "%w: .datetime(template) is not yet supported", - ErrExecution, - ) - - // var str *ast.StringNode - // str, ok := arg.(*ast.StringNode) - // if !ok { - // return nil, fmt.Errorf( - // "%w: invalid jsonpath item type for .datetime() argument", - // ErrExecution, - // ) - // } - // timeVal, ok := types.ParseDateTime(str.Text(), datetime) -} - -// parseDateTime extracts an optional precision from arg, if it's not nil, the -// passes it along with datetime to [types.ParseTime] to parse datetime and -// apply precision to the resulting [types.DateTime] value. -func (exec *Executor) parseDateTime( - ctx context.Context, - op ast.UnaryOperator, - datetime string, - arg ast.Node, -) (types.DateTime, error) { - // Check for optional precision for methods other than .datetime() and - // .date() - precision := -1 - if op != ast.UnaryDateTime && op != ast.UnaryDate && arg != nil { - var err error - precision, err = getNodeInt32(arg, op.String()+"()", "time precision") - if err != nil { - return nil, err - } - - if precision < 0 { - return nil, fmt.Errorf( - "%w: time precision of jsonpath item method %v() is invalid", - ErrVerbose, op, - ) - } - - const maxTimestampPrecision = 6 - if precision > maxTimestampPrecision { - // pg: issues a warning - precision = maxTimestampPrecision - } - } - - // Parse the value. - timeVal, ok := types.ParseTime(ctx, datetime, precision) - if !ok { - return nil, fmt.Errorf( - `%w: %v format is not recognized: "%v"`, - ErrVerbose, op.String()[1:], datetime, - ) - } - - return timeVal, nil -} - -// notRecognized creates an error when the format of datetime is not able to -// be parsed into a [types.DateTime]. -func notRecognized(op ast.UnaryOperator, datetime string) error { - return fmt.Errorf( - `%w: %v format is not recognized: "%v"`, - ErrVerbose, op.String()[1:], datetime, - ) -} - -// castDate casts timeVal to [types.Date]. The datetime param is used in error -// messages. -func (exec *Executor) castDate(ctx context.Context, timeVal types.DateTime, datetime string) (*types.Date, error) { - // Convert result type to date - switch tv := timeVal.(type) { - case *types.Date: - // Nothing to do for DATE - return tv, nil - case *types.Time, *types.TimeTZ: - // Incompatible. - return nil, notRecognized(ast.UnaryDate, datetime) - case *types.Timestamp: - return tv.ToDate(ctx), nil - case *types.TimestampTZ: - if !exec.useTZ { - return nil, tzRequiredCast("timestamptz", "date") - } - return tv.ToDate(ctx), nil - default: - return nil, fmt.Errorf("%w: type %T not supported", ErrInvalid, tv) - } -} - -// castTime casts timeVal to [types.Time]. The datetime param is used in error -// messages. -func (exec *Executor) castTime(ctx context.Context, timeVal types.DateTime, datetime string) (*types.Time, error) { - switch tv := timeVal.(type) { - case *types.Date: - return nil, notRecognized(ast.UnaryTime, datetime) - case *types.Time: - // Nothing to do for time - return tv, nil - case *types.TimeTZ: - if !exec.useTZ { - return nil, tzRequiredCast("timetz", "time") - } - return tv.ToTime(ctx), nil - case *types.Timestamp: - return tv.ToTime(ctx), nil - case *types.TimestampTZ: - if !exec.useTZ { - return nil, tzRequiredCast("timestamptz", "time") - } - return tv.ToTime(ctx), nil - default: - return nil, fmt.Errorf("%w: type %T not supported", ErrInvalid, tv) - } -} - -// castTimeTZ casts timeVal to [types.TimeTZ]. The datetime param is used in -// error messages. -func (exec *Executor) castTimeTZ(ctx context.Context, timeVal types.DateTime, datetime string) (*types.TimeTZ, error) { - switch tv := timeVal.(type) { - case *types.Date, *types.Timestamp: - return nil, notRecognized(ast.UnaryTimeTZ, datetime) - case *types.Time: - if !exec.useTZ { - return nil, tzRequiredCast("time", "timetz") - } - return tv.ToTimeTZ(ctx), nil - case *types.TimeTZ: - // Nothing to do for TIMETZ - return tv, nil - case *types.TimestampTZ: - return tv.ToTimeTZ(ctx), nil - default: - return nil, fmt.Errorf("%w: type %T not supported", ErrInvalid, tv) - } -} - -// castTimestamp casts timeVal to [types.Timestamp]. The datetime param is -// used in error messages. -func (exec *Executor) castTimestamp( - ctx context.Context, - timeVal types.DateTime, - datetime string, -) (*types.Timestamp, error) { - switch tv := timeVal.(type) { - case *types.Date: - return tv.ToTimestamp(ctx), nil - case *types.Time, *types.TimeTZ: - return nil, notRecognized(ast.UnaryTimestamp, datetime) - case *types.Timestamp: - // Nothing to do for TIMESTAMP - return tv, nil - case *types.TimestampTZ: - if !exec.useTZ { - return nil, tzRequiredCast("timestamptz", "timestamp") - } - return tv.ToTimestamp(ctx), nil - default: - return nil, fmt.Errorf("%w: type %T not supported", ErrInvalid, tv) - } -} - -// castTimestampTZ casts timeVal to [types.TimestampTZ]. The datetime param is -// used in error messages. -func (exec *Executor) castTimestampTZ( - ctx context.Context, - timeVal types.DateTime, - datetime string, -) (*types.TimestampTZ, error) { - switch tv := timeVal.(type) { - case *types.Date: - if !exec.useTZ { - return nil, tzRequiredCast("date", "timestamptz") - } - return tv.ToTimestampTZ(ctx), nil - case *types.Time, *types.TimeTZ: - return nil, notRecognized(ast.UnaryTimestampTZ, datetime) - case *types.Timestamp: - if !exec.useTZ { - return nil, tzRequiredCast("timestamp", "timestamptz") - } - return tv.ToTimestampTZ(ctx), nil - case *types.TimestampTZ: - // Nothing to do for TIMESTAMPTZ - return tv, nil - default: - return nil, fmt.Errorf("%w: type %T not supported", ErrInvalid, tv) - } -} diff --git a/path/exec/datetime_test.go b/path/exec/datetime_test.go deleted file mode 100644 index ae21d54..0000000 --- a/path/exec/datetime_test.go +++ /dev/null @@ -1,1324 +0,0 @@ -package exec - -import ( - "context" - "errors" - "fmt" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/parser" - "github.com/theory/sqljson/path/types" -) - -func TestTZRequiredCast(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - t1 string - t2 string - }{ - { - test: "date_timestamptz", - t1: "date", - t2: "timestamptz", - }, - { - test: "time_timetz", - t1: "time", - t2: "timetz", - }, - { - test: "timestamp_timestamptz", - t1: "timestamp", - t2: "timestamptz", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - r := require.New(t) - - err := tzRequiredCast(tc.t1, tc.t2) - r.EqualError(err, fmt.Sprintf( - "exec: cannot convert value from %v to %v without time zone usage."+tzHint, - tc.t1, tc.t2, - )) - r.ErrorIs(err, ErrExecution) - }) - } -} - -func TestUnknownDateTime(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - val any - }{ - { - test: "string", - val: "foo", - }, - { - test: "array", - val: []any{}, - }, - { - test: "object", - val: map[string]any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - res, err := unknownDateTime(tc.val) - a.Equal(0, res) - r.EqualError( - err, - fmt.Sprintf("exec invalid: unrecognized SQL/JSON datetime type %T", tc.val), - ) - r.ErrorIs(err, ErrInvalid) - }) - } -} - -type testDatetimeCompare struct { - test string - val1 any - val2 any - useTZ bool - exp int - err error -} - -func (tc testDatetimeCompare) checkCompare(t *testing.T, res int, err error) { - t.Helper() - a := assert.New(t) - r := require.New(t) - - a.Equal(tc.exp, res) - if tc.err == nil { - a.NoError(err) - } else { - r.EqualError(err, tc.err.Error()) - if errors.Is(tc.err, ErrExecution) { - r.ErrorIs(err, ErrExecution) - } else { - r.ErrorIs(err, ErrInvalid) - } - } -} - -func stableTime() time.Time { - return time.Date(2024, time.June, 6, 1, 48, 22, 939932000, time.FixedZone("", 0)) -} - -func TestCompareDatetime(t *testing.T) { - t.Parallel() - moment := stableTime() - ctx := context.Background() - - for _, tc := range []testDatetimeCompare{ - { - test: "date_date", - val1: types.NewDate(moment), - val2: types.NewDate(moment), - }, - { - test: "date_timestamp", - val1: types.NewDate(moment), - val2: types.NewTimestamp(moment), - exp: -1, - }, - { - test: "date_timestamptz", - val1: types.NewDate(moment), - val2: types.NewTimestampTZ(ctx, moment), - err: tzRequiredCast("date", "timestamptz"), - }, - { - test: "date_timestamptz_cast", - val1: types.NewDate(moment), - val2: types.NewTimestampTZ(ctx, moment), - useTZ: true, - exp: -1, - }, - { - test: "time_time", - val1: types.NewTime(moment), - val2: types.NewTime(moment), - }, - { - test: "time_timetz", - val1: types.NewTime(moment), - val2: types.NewTimeTZ(moment), - err: tzRequiredCast("time", "timetz"), - }, - { - test: "time_timetz_cast", - val1: types.NewTime(moment), - val2: types.NewTimeTZ(moment), - useTZ: true, - exp: 0, - }, - { - test: "timetz_timetz", - val1: types.NewTimeTZ(moment), - val2: types.NewTimeTZ(moment), - }, - { - test: "timetz_time", - val1: types.NewTimeTZ(moment), - val2: types.NewTime(moment), - err: tzRequiredCast("time", "timetz"), - }, - { - test: "timetz_time_cast", - val1: types.NewTimeTZ(moment), - val2: types.NewTime(moment), - useTZ: true, - exp: 0, - }, - { - test: "timestamp_timestamp", - val1: types.NewTimestamp(moment), - val2: types.NewTimestamp(moment), - }, - { - test: "timestamp_date", - val1: types.NewTimestamp(moment), - val2: types.NewDate(moment), - exp: 1, - }, - { - test: "timestamp_timestamptz", - val1: types.NewTimestamp(moment), - val2: types.NewTimestampTZ(ctx, moment), - err: tzRequiredCast("timestamp", "timestamptz"), - }, - { - test: "timestamp_timestamptz_cast", - val1: types.NewTimestamp(moment), - val2: types.NewTimestampTZ(ctx, moment), - useTZ: true, - }, - { - test: "timestamptz_timestamptz", - val1: types.NewTimestampTZ(ctx, moment), - val2: types.NewTimestampTZ(ctx, moment), - }, - { - test: "timestamptz_time", - val1: types.NewTimestampTZ(ctx, moment), - val2: types.NewTime(moment), - exp: -2, - }, - { - test: "timestamptz_timestamp", - val1: types.NewTimestampTZ(ctx, moment), - val2: types.NewTimestamp(moment), - err: tzRequiredCast("timestamp", "timestamptz"), - }, - { - test: "timestamptz_timestamp_cast", - val1: types.NewTimestampTZ(ctx, moment), - val2: types.NewTimestamp(moment), - useTZ: true, - }, - { - test: "unknown_type", - val1: "not a timestamp", - err: errors.New("exec invalid: unrecognized SQL/JSON datetime type string"), - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - res, err := compareDatetime(ctx, tc.val1, tc.val2, tc.useTZ) - tc.checkCompare(t, res, err) - }) - } -} - -func TestCompareDate(t *testing.T) { - t.Parallel() - moment := stableTime() - ctx := context.Background() - - for _, tc := range []testDatetimeCompare{ - { - test: "date_date", - val1: types.NewDate(moment), - val2: types.NewDate(moment), - }, - { - test: "date_timestamp", - val1: types.NewDate(moment), - val2: types.NewTimestamp(moment), - exp: -1, - }, - { - test: "date_timestamptz", - val1: types.NewDate(moment), - val2: types.NewTimestampTZ(ctx, moment), - err: tzRequiredCast("date", "timestamptz"), - }, - { - test: "date_timestamptz_cast", - val1: types.NewDate(moment), - val2: types.NewTimestampTZ(ctx, moment), - useTZ: true, - exp: -1, - }, - { - test: "date_time", - val1: types.NewDate(moment), - val2: types.NewTime(moment), - exp: -2, - }, - { - test: "date_timetz", - val1: types.NewDate(moment), - val2: types.NewTime(moment), - exp: -2, - }, - { - test: "unknown_type", - val1: types.NewDate(moment), - val2: "not a timestamp", - err: errors.New("exec invalid: unrecognized SQL/JSON datetime type string"), - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - val1, ok := tc.val1.(*types.Date) - a.True(ok) - res, err := compareDate(ctx, val1, tc.val2, tc.useTZ) - tc.checkCompare(t, res, err) - }) - } -} - -func TestCompareTime(t *testing.T) { - t.Parallel() - - moment := stableTime() - loc, err := time.LoadLocation("PST8PDT") - require.NoError(t, err) - ctx := types.ContextWithTZ(context.Background(), loc) - - for _, tc := range []testDatetimeCompare{ - { - test: "time_time", - val1: types.NewTime(moment), - val2: types.NewTime(moment), - }, - { - test: "time_timetz", - val1: types.NewTime(moment), - val2: types.NewTimeTZ(moment), - err: tzRequiredCast("time", "timetz"), - }, - { - test: "time_timetz_cast", - val1: types.NewTime(moment), - val2: types.NewTimeTZ(moment), - useTZ: true, - exp: 1, - }, - { - test: "time_date", - val1: types.NewTime(moment), - val2: types.NewDate(moment), - exp: -2, - }, - { - test: "time_timestamp", - val1: types.NewTime(moment), - val2: types.NewTimestamp(moment), - exp: -2, - }, - { - test: "time_timestamptz", - val1: types.NewTime(moment), - val2: types.NewTimestampTZ(ctx, moment), - exp: -2, - }, - { - test: "unknown_type", - val1: types.NewTime(moment), - val2: "not a timestamp", - err: errors.New("exec invalid: unrecognized SQL/JSON datetime type string"), - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - val1, ok := tc.val1.(*types.Time) - a.True(ok) - res, err := compareTime(ctx, val1, tc.val2, tc.useTZ) - tc.checkCompare(t, res, err) - }) - } -} - -func TestCompareTimeTZ(t *testing.T) { - t.Parallel() - - moment := stableTime() - loc, err := time.LoadLocation("PST8PDT") - require.NoError(t, err) - ctx := types.ContextWithTZ(context.Background(), loc) - - for _, tc := range []testDatetimeCompare{ - { - test: "timetz_timetz", - val1: types.NewTimeTZ(moment), - val2: types.NewTimeTZ(moment), - }, - { - test: "timetz_time", - val1: types.NewTimeTZ(moment), - val2: types.NewTime(moment), - err: tzRequiredCast("time", "timetz"), - }, - { - test: "timetz_time_cast", - val1: types.NewTimeTZ(moment), - val2: types.NewTime(moment), - useTZ: true, - exp: -1, - }, - { - test: "timetz_date", - val1: types.NewTimeTZ(moment), - val2: types.NewDate(moment), - exp: -2, - }, - { - test: "timetz_timestamp", - val1: types.NewTimeTZ(moment), - val2: types.NewTimestamp(moment), - exp: -2, - }, - { - test: "timetz_timestamptz", - val1: types.NewTimeTZ(moment), - val2: types.NewTimestampTZ(ctx, moment), - exp: -2, - }, - { - test: "unknown_type", - val1: types.NewTimeTZ(moment), - val2: "not a timestamp", - err: errors.New("exec invalid: unrecognized SQL/JSON datetime type string"), - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - val1, ok := tc.val1.(*types.TimeTZ) - a.True(ok) - res, err := compareTimeTZ(ctx, val1, tc.val2, tc.useTZ) - tc.checkCompare(t, res, err) - }) - } -} - -func TestCompareTimestamp(t *testing.T) { - t.Parallel() - moment := stableTime() - ctx := context.Background() - - for _, tc := range []testDatetimeCompare{ - { - test: "timestamp_timestamp", - val1: types.NewTimestamp(moment), - val2: types.NewTimestamp(moment), - }, - { - test: "timestamp_date", - val1: types.NewTimestamp(moment), - val2: types.NewDate(moment), - exp: 1, - }, - { - test: "timestamp_timestamptz", - val1: types.NewTimestamp(moment), - val2: types.NewTimestampTZ(ctx, moment), - err: tzRequiredCast("timestamp", "timestamptz"), - }, - { - test: "timestamp_timestamptz_cast", - val1: types.NewTimestamp(moment), - val2: types.NewTimestampTZ(ctx, moment), - useTZ: true, - }, - { - test: "timestamp_time", - val1: types.NewTimestamp(moment), - val2: types.NewTime(moment), - exp: -2, - }, - { - test: "timestamp_timetz", - val1: types.NewTimestamp(moment), - val2: types.NewTimeTZ(moment), - exp: -2, - }, - { - test: "unknown_type", - val1: types.NewTimestamp(moment), - val2: "not a timestamp", - err: errors.New("exec invalid: unrecognized SQL/JSON datetime type string"), - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - val1, ok := tc.val1.(*types.Timestamp) - a.True(ok) - res, err := compareTimestamp(ctx, val1, tc.val2, tc.useTZ) - tc.checkCompare(t, res, err) - }) - } -} - -func TestCompareTimestampTZ(t *testing.T) { - t.Parallel() - moment := stableTime() - ctx := context.Background() - - for _, tc := range []testDatetimeCompare{ - { - test: "timestamptz_timestamptz", - val1: types.NewTimestampTZ(ctx, moment), - val2: types.NewTimestampTZ(ctx, moment), - }, - { - test: "timestamptz_timestamp", - val1: types.NewTimestampTZ(ctx, moment), - val2: types.NewTimestamp(moment), - err: tzRequiredCast("timestamp", "timestamptz"), - }, - { - test: "timestamptz_timestamp_cast", - val1: types.NewTimestampTZ(ctx, moment), - val2: types.NewTimestamp(moment), - useTZ: true, - }, - { - test: "timestamptz_date", - val1: types.NewTimestampTZ(ctx, moment), - val2: types.NewDate(moment), - err: tzRequiredCast("date", "timestamptz"), - }, - { - test: "timestamptz_date_cast", - val1: types.NewTimestampTZ(ctx, moment), - val2: types.NewDate(moment), - useTZ: true, - exp: 1, - }, - { - test: "timestamptz_time", - val1: types.NewTimestampTZ(ctx, moment), - val2: types.NewTime(moment), - exp: -2, - }, - { - test: "timestamptz_timetz", - val1: types.NewTimestampTZ(ctx, moment), - val2: types.NewTime(moment), - exp: -2, - }, - { - test: "unknown_type", - val1: types.NewTimestampTZ(ctx, moment), - val2: "not a timestamp", - err: errors.New("exec invalid: unrecognized SQL/JSON datetime type string"), - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - val1, ok := tc.val1.(*types.TimestampTZ) - a.True(ok) - res, err := compareTimestampTZ(ctx, val1, tc.val2, tc.useTZ) - tc.checkCompare(t, res, err) - }) - } -} - -func TestExecuteDateTimeMethod(t *testing.T) { - t.Parallel() - ctx := context.Background() - path, _ := parser.Parse("$") - - for _, tc := range []struct { - test string - node ast.Node - value any - silent bool - find []any - exp resultStatus - err string - isErr error - }{ - { - test: "not_string", - node: ast.NewUnary(ast.UnaryDateTime, nil), - value: true, - exp: statusFailed, - err: `exec: jsonpath item method .datetime() can only be applied to a string`, - isErr: ErrVerbose, - }, - { - test: "datetime_format_unsupported", - node: ast.NewUnary(ast.UnaryDateTime, ast.NewString("YYYY")), - value: "2024-06-05", - exp: statusFailed, - err: `exec: .datetime(template) is not yet supported`, - isErr: ErrExecution, - }, - { - test: "datetime_parse_failure", - node: ast.NewUnary(ast.UnaryDateTime, nil), - value: "nope", - exp: statusFailed, - err: `exec: datetime format is not recognized: "nope"`, - isErr: ErrExecution, - }, - { - test: "datetime_parse_failure_silent", - node: ast.NewUnary(ast.UnaryDateTime, nil), - value: "nope", - exp: statusFailed, - silent: true, - }, - { - test: "datetime_parse_success", - node: ast.NewUnary(ast.UnaryDateTime, nil), - value: "2024-06-05", - exp: statusOK, - find: []any{types.NewDate(time.Date(2024, 6, 5, 0, 0, 0, 0, time.UTC))}, - }, - { - test: "date_parse_success", - node: ast.NewUnary(ast.UnaryDate, nil), - value: "2024-06-05", - exp: statusOK, - find: []any{types.NewDate(time.Date(2024, 6, 5, 0, 0, 0, 0, time.UTC))}, - }, - { - test: "date_parse_fail", - node: ast.NewUnary(ast.UnaryDate, nil), - value: "nope", - exp: statusFailed, - err: `exec: date format is not recognized: "nope"`, - isErr: ErrExecution, - }, - { - test: "date_parse_fail_silent", - node: ast.NewUnary(ast.UnaryDate, nil), - value: "nope", - exp: statusFailed, - silent: true, - }, - { - test: "date_parse_cast", - node: ast.NewUnary(ast.UnaryDate, nil), - value: "2024-06-05T12:32:42", - exp: statusOK, - find: []any{types.NewDate(time.Date(2024, 6, 5, 0, 0, 0, 0, time.UTC))}, - }, - { - test: "time_parse_success", - node: ast.NewUnary(ast.UnaryTime, nil), - value: "12:32:43", - exp: statusOK, - find: []any{types.NewTime(time.Date(0, 1, 1, 12, 32, 43, 0, time.UTC))}, - }, - { - test: "time_parse_fail", - node: ast.NewUnary(ast.UnaryTime, nil), - value: "nope", - exp: statusFailed, - err: `exec: time format is not recognized: "nope"`, - isErr: ErrExecution, - }, - { - test: "time_parse_fail_silent", - node: ast.NewUnary(ast.UnaryTime, nil), - value: "nope", - exp: statusFailed, - silent: true, - }, - { - test: "time_parse_cast", - node: ast.NewUnary(ast.UnaryTime, nil), - value: "2024-06-05T12:32:42", - exp: statusOK, - find: []any{types.NewTime(time.Date(0, 1, 1, 12, 32, 42, 0, time.UTC))}, - }, - { - test: "timetz_parse_success", - node: ast.NewUnary(ast.UnaryTimeTZ, nil), - value: "12:32:43+01", - exp: statusOK, - find: []any{types.NewTimeTZ(time.Date(0, 1, 1, 12, 32, 43, 0, time.FixedZone("", 60*60)))}, - }, - { - test: "timetz_parse_fail", - node: ast.NewUnary(ast.UnaryTimeTZ, nil), - value: "nope", - exp: statusFailed, - err: `exec: time_tz format is not recognized: "nope"`, - isErr: ErrExecution, - }, - { - test: "timetz_parse_fail_silent", - node: ast.NewUnary(ast.UnaryTimeTZ, nil), - value: "nope", - exp: statusFailed, - silent: true, - }, - { - test: "timetz_parse_cast", - node: ast.NewUnary(ast.UnaryTimeTZ, nil), - value: "2024-06-05T12:32:42Z", - exp: statusOK, - find: []any{ - types.NewTimestampTZ( - ctx, time.Date(2024, 6, 5, 12, 32, 42, 0, time.FixedZone("", 0)), - ).ToTimeTZ(ctx), - }, - }, - { - test: "timestamp_parse_success", - node: ast.NewUnary(ast.UnaryTimestamp, nil), - value: "2024-06-05T12:32:43", - exp: statusOK, - find: []any{types.NewTimestamp(time.Date(2024, 6, 5, 12, 32, 43, 0, time.FixedZone("", 0)))}, - }, - { - test: "timestamp_parse_fail", - node: ast.NewUnary(ast.UnaryTimestamp, nil), - value: "nope", - exp: statusFailed, - err: `exec: timestamp format is not recognized: "nope"`, - isErr: ErrExecution, - }, - { - test: "timestamp_parse_fail_silent", - node: ast.NewUnary(ast.UnaryTimestamp, nil), - value: "nope", - exp: statusFailed, - silent: true, - }, - { - test: "timestamp_parse_cast", - node: ast.NewUnary(ast.UnaryTimestamp, nil), - value: "2024-06-05", - exp: statusOK, - find: []any{types.NewTimestamp(time.Date(2024, 6, 5, 0, 0, 0, 0, time.UTC))}, - }, - { - test: "timestamptz_parse_success", - node: ast.NewUnary(ast.UnaryTimestampTZ, nil), - value: "2024-06-05T12:32:43+01", - exp: statusOK, - find: []any{types.NewTimestampTZ(ctx, time.Date(2024, 6, 5, 12, 32, 43, 0, time.FixedZone("", 60*60)))}, - }, - { - test: "timestamptz_parse_fail", - node: ast.NewUnary(ast.UnaryTimestampTZ, nil), - value: "nope", - exp: statusFailed, - err: `exec: timestamp_tz format is not recognized: "nope"`, - isErr: ErrExecution, - }, - { - test: "timestamptz_parse_fail_silent", - node: ast.NewUnary(ast.UnaryTimestampTZ, nil), - value: "nope", - exp: statusFailed, - silent: true, - }, - { - test: "timestamptz_parse_cast_fail", - node: ast.NewUnary(ast.UnaryTimestampTZ, nil), - value: "2024-06-05T12:32:43", - exp: statusFailed, - find: []any{}, - err: "exec: cannot convert value from timestamp to timestamptz without time zone usage." + tzHint, - isErr: ErrExecution, - }, - { - test: "date_no_found", - node: ast.NewUnary(ast.UnaryDate, nil), - value: "2024-06-05", - exp: statusOK, - }, - { - test: "date_parse_with_next", - node: ast.LinkNodes([]ast.Node{ast.NewUnary(ast.UnaryDate, nil), ast.NewMethod(ast.MethodString)}), - value: "2024-06-05", - exp: statusOK, - find: []any{"2024-06-05"}, - }, - { - test: "unary_not_datetime", - node: ast.NewUnary(ast.UnaryNot, nil), - value: "2024-06-05", - exp: statusFailed, - err: `exec invalid: unrecognized jsonpath datetime method: !`, - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Construct found. - var found *valueList - if tc.find != nil { - found = newList() - } - - // Should have UnaryNode - node, ok := tc.node.(*ast.UnaryNode) - a.True(ok) - - // Test executeDateTimeMethod with the root node set to tc.value. - e := newTestExecutor(path, nil, true, false) - e.root = tc.value - if tc.silent { - e.verbose = false - } - res, err := e.executeDateTimeMethod(ctx, node, tc.value, found) - a.Equal(tc.exp, res) - - // Check found - if tc.find != nil { - a.Equal(tc.find, found.list) - } - - // Check the error. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestParseDateTimeFormat(t *testing.T) { - t.Parallel() - r := require.New(t) - - e := &Executor{} - err := e.parseDateTimeFormat("", nil) - r.EqualError(err, "exec: .datetime(template) is not yet supported") - r.ErrorIs(err, ErrExecution) -} - -func TestParseDateTime(t *testing.T) { - t.Parallel() - ctx := context.Background() - path, _ := parser.Parse("$") - - for _, tc := range []struct { - test string - op ast.UnaryOperator - value string - arg ast.Node - exp types.DateTime - err string - isErr error - }{ - { - test: "invalid_precision", - op: ast.UnaryTime, - arg: ast.NewString("hi"), - err: "exec: invalid jsonpath item type for .time() time precision", - isErr: ErrExecution, - }, - { - test: "negative_precision", - op: ast.UnaryTime, - arg: ast.NewInteger("-1"), - err: "exec: time precision of jsonpath item method .time() is invalid", - isErr: ErrExecution, - }, - { - test: "max_precision_six", - op: ast.UnaryTime, - arg: ast.NewInteger("9"), - value: "14:15:31.78599685301", - exp: types.NewTime(time.Date(0, 1, 1, 14, 15, 31, 785997000, time.UTC)), - }, - { - test: "precision_three", - op: ast.UnaryTime, - arg: ast.NewInteger("3"), - value: "14:15:31.78599685301", - exp: types.NewTime(time.Date(0, 1, 1, 14, 15, 31, 786000000, time.UTC)), - }, - { - test: "format_not_recognized", - op: ast.UnaryTime, - value: "nope", - err: `exec: time format is not recognized: "nope"`, - isErr: ErrVerbose, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Test parseDateTime. - e := newTestExecutor(path, nil, true, false) - res, err := e.parseDateTime(ctx, tc.op, tc.value, tc.arg) - a.Equal(tc.exp, res) - - // Check the error. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestNotRecognized(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - op ast.UnaryOperator - typ string - val string - }{ - { - test: "date_nope", - op: ast.UnaryDate, - typ: "date", - val: "nope", - }, - { - test: "timestamp_time", - op: ast.UnaryTimestamp, - typ: "timestamp", - val: "12:34:21", - }, - { - test: "timestamptz_time", - op: ast.UnaryTimestampTZ, - typ: "timestamp_tz", - val: "12:34:21", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - r := require.New(t) - - err := notRecognized(tc.op, tc.val) - r.EqualError( - err, - fmt.Sprintf(`exec: %v format is not recognized: "%v"`, tc.typ, tc.val), - ) - r.ErrorIs(err, ErrVerbose) - }) - } -} - -type testDatetimeCast struct { - test string - val types.DateTime - str string - useTZ bool - exp types.DateTime - err string - isErr error -} - -func (tc testDatetimeCast) run(t *testing.T, cast func(*Executor) (types.DateTime, error)) { - t.Helper() - a := assert.New(t) - r := require.New(t) - - // Test castDate. - e := &Executor{} - e.useTZ = tc.useTZ - res, err := cast(e) - a.Equal(tc.exp, res) - - // Check the error. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } -} - -// To test the handling of unknown types.DateTime types. -type mockDateTime struct{} - -func (mockDateTime) GoTime() time.Time { return time.Now() } -func (mockDateTime) String() string { return "" } -func TestCastDate(t *testing.T) { - t.Parallel() - moment := stableTime() - var nilDate *types.Date - ctx := context.Background() - - for _, tc := range []testDatetimeCast{ - { - test: "date", - val: types.NewDate(moment), - exp: types.NewDate(moment), - }, - { - test: "time", - val: types.NewTime(moment), - str: "a datetime string", - exp: nilDate, - err: `exec: date format is not recognized: "a datetime string"`, - isErr: ErrVerbose, - }, - { - test: "timetz", - val: types.NewTimeTZ(moment), - str: "a datetime string", - exp: nilDate, - err: `exec: date format is not recognized: "a datetime string"`, - isErr: ErrVerbose, - }, - { - test: "timestamp", - val: types.NewTimestamp(moment), - exp: types.NewDate(moment), - }, - { - test: "timestamptz", - val: types.NewTimestampTZ(ctx, moment), - exp: nilDate, - err: "exec: cannot convert value from timestamptz to date without time zone usage." + tzHint, - isErr: ErrExecution, - }, - { - test: "timestamptz_cast", - val: types.NewTimestampTZ(ctx, moment), - exp: types.NewDate(moment.UTC()), - useTZ: true, - }, - { - test: "unknown_datetime_type", - val: mockDateTime{}, - exp: nilDate, - err: "exec invalid: type exec.mockDateTime not supported", - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - tc.run(t, func(e *Executor) (types.DateTime, error) { - return e.castDate(ctx, tc.val, tc.str) - }) - }) - } -} - -func TestCastTime(t *testing.T) { - t.Parallel() - moment := stableTime() - var nilTime *types.Time - ctx := context.Background() - - for _, tc := range []testDatetimeCast{ - { - test: "time", - val: types.NewTime(moment), - exp: types.NewTime(moment), - }, - { - test: "date", - val: types.NewDate(moment), - str: "hi", - exp: nilTime, - err: `exec: time format is not recognized: "hi"`, - isErr: ErrVerbose, - }, - { - test: "timetz", - val: types.NewTimeTZ(moment), - exp: nilTime, - err: "exec: cannot convert value from timetz to time without time zone usage." + tzHint, - isErr: ErrExecution, - }, - { - test: "timetz_cast", - val: types.NewTimeTZ(moment), - exp: types.NewTime(moment), - useTZ: true, - }, - { - test: "timestamp", - val: types.NewTimestamp(moment), - exp: types.NewTime(moment), - }, - { - test: "timestamptz", - val: types.NewTimestampTZ(ctx, moment), - exp: nilTime, - err: "exec: cannot convert value from timestamptz to time without time zone usage." + tzHint, - isErr: ErrExecution, - }, - { - test: "timestamptz_cast", - val: types.NewTimestampTZ(ctx, moment), - exp: types.NewTime(moment.UTC()), - useTZ: true, - }, - { - test: "unknown_datetime_type", - val: mockDateTime{}, - exp: nilTime, - err: "exec invalid: type exec.mockDateTime not supported", - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - tc.run(t, func(e *Executor) (types.DateTime, error) { - return e.castTime(ctx, tc.val, tc.str) - }) - }) - } -} - -func TestCastTimeTZ(t *testing.T) { - t.Parallel() - moment := stableTime() - var nilTimeTZ *types.TimeTZ - ctx := context.Background() - - for _, tc := range []testDatetimeCast{ - { - test: "timetz", - val: types.NewTimeTZ(moment), - exp: types.NewTimeTZ(moment), - }, - { - test: "date", - val: types.NewDate(moment), - str: "hi", - exp: nilTimeTZ, - err: `exec: time_tz format is not recognized: "hi"`, - isErr: ErrVerbose, - }, - { - test: "time", - val: types.NewTime(moment), - exp: nilTimeTZ, - err: "exec: cannot convert value from time to timetz without time zone usage." + tzHint, - isErr: ErrExecution, - }, - { - test: "time_cast", - val: types.NewTime(moment), - exp: types.NewTimeTZ(time.Date( - 0, 1, 1, - moment.Hour(), moment.Minute(), moment.Second(), moment.Nanosecond(), - time.UTC, - )), - useTZ: true, - }, - { - test: "timestamp", - val: types.NewTimestamp(moment), - str: "hi", - exp: nilTimeTZ, - err: `exec: time_tz format is not recognized: "hi"`, - isErr: ErrVerbose, - }, - { - test: "timestamptz", - val: types.NewTimestampTZ(ctx, moment), - exp: types.NewTimestampTZ(ctx, moment).ToTimeTZ(ctx), - }, - { - test: "unknown_datetime_type", - val: mockDateTime{}, - exp: nilTimeTZ, - err: "exec invalid: type exec.mockDateTime not supported", - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - tc.run(t, func(e *Executor) (types.DateTime, error) { - return e.castTimeTZ(ctx, tc.val, tc.str) - }) - }) - } -} - -func TestCastTimestamp(t *testing.T) { - t.Parallel() - moment := stableTime() - var nilTimestamp *types.Timestamp - ctx := context.Background() - - for _, tc := range []testDatetimeCast{ - { - test: "timestamp", - val: types.NewTimestamp(moment), - exp: types.NewTimestamp(moment), - }, - { - test: "date", - val: types.NewDate(moment), - exp: types.NewTimestamp(types.NewDate(moment).GoTime()), - }, - { - test: "time", - val: types.NewTime(moment), - exp: nilTimestamp, - str: "foo", - err: `exec: timestamp format is not recognized: "foo"`, - isErr: ErrVerbose, - }, - { - test: "timetz", - val: types.NewTimeTZ(moment), - exp: nilTimestamp, - str: "bar", - err: `exec: timestamp format is not recognized: "bar"`, - isErr: ErrVerbose, - }, - { - test: "timestamptz", - val: types.NewTimestampTZ(ctx, moment), - exp: nilTimestamp, - err: "exec: cannot convert value from timestamptz to timestamp without time zone usage." + tzHint, - isErr: ErrExecution, - }, - { - test: "timestamptz_cast", - val: types.NewTimestampTZ(ctx, moment), - exp: types.NewTimestamp(moment.UTC()), - useTZ: true, - }, - { - test: "unknown_datetime_type", - val: mockDateTime{}, - exp: nilTimestamp, - err: "exec invalid: type exec.mockDateTime not supported", - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - tc.run(t, func(e *Executor) (types.DateTime, error) { - return e.castTimestamp(ctx, tc.val, tc.str) - }) - }) - } -} - -func TestCastTimestampTZ(t *testing.T) { - t.Parallel() - moment := stableTime() - var nilTimestampTZ *types.TimestampTZ - ctx := context.Background() - - for _, tc := range []testDatetimeCast{ - { - test: "timestamptz", - val: types.NewTimestampTZ(ctx, moment), - exp: types.NewTimestampTZ(ctx, moment), - }, - { - test: "date", - val: types.NewDate(moment), - exp: nilTimestampTZ, - err: "exec: cannot convert value from date to timestamptz without time zone usage." + tzHint, - isErr: ErrExecution, - }, - { - test: "date_cast", - val: types.NewDate(moment), - exp: types.NewDate(moment).ToTimestampTZ(ctx), - useTZ: true, - }, - { - test: "time", - val: types.NewTime(moment), - exp: nilTimestampTZ, - str: "foo", - err: `exec: timestamp_tz format is not recognized: "foo"`, - isErr: ErrVerbose, - }, - { - test: "timetz", - val: types.NewTimeTZ(moment), - exp: nilTimestampTZ, - str: "bar", - err: `exec: timestamp_tz format is not recognized: "bar"`, - isErr: ErrVerbose, - }, - { - test: "timestamp", - val: types.NewTimestamp(moment), - exp: nilTimestampTZ, - err: "exec: cannot convert value from timestamp to timestamptz without time zone usage." + tzHint, - isErr: ErrExecution, - }, - { - test: "timestamp_cast", - val: types.NewTimestamp(moment), - exp: types.NewTimestampTZ(ctx, moment.UTC()), - useTZ: true, - }, - { - test: "unknown_datetime_type", - val: mockDateTime{}, - exp: nilTimestampTZ, - err: "exec invalid: type exec.mockDateTime not supported", - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - tc.run(t, func(e *Executor) (types.DateTime, error) { - return e.castTimestampTZ(ctx, tc.val, tc.str) - }) - }) - } -} diff --git a/path/exec/exec.go b/path/exec/exec.go deleted file mode 100644 index 5095c8b..0000000 --- a/path/exec/exec.go +++ /dev/null @@ -1,303 +0,0 @@ -// Package exec provides the routines for SQL/JSON path execution. -package exec - -import ( - "context" - "errors" - "fmt" - - "github.com/theory/sqljson/path/ast" -) - -// Things to improve or document as different: -// - .datetime(template) -// - Allow single-digit tz offsets, e.g., `+1` instead of `+01` -// - Allow space between seconds and offset -// - Years > 9999? -// - Improve .keyvalue() offsets for arrays? -// - Less accuracy than Postgres NUMERICs: Switch to -// github.com/shopspring/decimal? -// - Go regexp package varies from Postgres regex -// - Implement interfaces to be compatible with the SQL-standard -// json_exists(), json_query(), and json_value() functions added in -// PostgreSQL 17. - -// Vars represents JSON path variables and their values. -type Vars map[string]any - -var ( - // ErrExecution errors denote runtime execution errors. - ErrExecution = errors.New("exec") - - // ErrVerbose errors are execution errors that can be suppressed by - // [WithSilent]. - ErrVerbose = fmt.Errorf("%w", ErrExecution) - - // ErrInvalid errors denote invalid or unexpected execution. Generally - // internal-only. - ErrInvalid = errors.New("exec invalid") -) - -//nolint:gochecknoglobals,staticcheck -var ( - // NULL is returned when Postgres would return NULL from Match and Exists. - NULL = errors.New("NULL") -) - -// resultStatus represents the result of jsonpath expression evaluation. -type resultStatus uint8 - -const ( - statusOK resultStatus = iota - statusNotFound - statusFailed -) - -// String returns a string representation of s. -func (s resultStatus) String() string { - switch s { - case statusOK: - return "OK" - case statusNotFound: - return "NOT_FOUND" - case statusFailed: - return "FAILED" - default: - return "UNKNOWN_RESULT_STATUS" - } -} - -// failed returns true when s is statusFailed. -func (s resultStatus) failed() bool { - return s == statusFailed -} - -// valueList holds a list of jsonb values optimized for a single-value list. -type valueList struct { - list []any -} - -// newList creates a valueList with space allocated a single value. -func newList() *valueList { - return &valueList{list: make([]any, 0, 1)} -} - -// isEmpty returns true when vl is empty. -func (vl *valueList) isEmpty() bool { - return len(vl.list) == 0 -} - -// append appends val to vl, allocating more space if needed. -func (vl *valueList) append(val any) { - vl.list = append(vl.list, val) -} - -// Executor represents the context for jsonpath execution. -type Executor struct { - vars Vars // variables to substitute into jsonpath - root any // for $ evaluation - current any // for @ evaluation - baseObject kvBaseObject // "base object" for .keyvalue() evaluation - lastGeneratedObjectID int // "id" counter for .keyvalue() evaluation - innermostArraySize int // for LAST array index evaluation - path *ast.AST - - // with "true" structural errors such as absence of required json item or - // unexpected json item type are ignored - ignoreStructuralErrors bool - - // with "false" all suppressible errors are suppressed - verbose bool - // "true" enables casting between TZ and non-TZ time and timestamp types - useTZ bool -} - -// Option specifies an execution option. -type Option func(*Executor) - -// WithVars specifies variables to use during execution. -func WithVars(vars Vars) Option { return func(e *Executor) { e.vars = vars } } - -// WithTZ allows casting between TZ and non-TZ time and timestamp types. -func WithTZ() Option { return func(e *Executor) { e.useTZ = true } } - -// WithSilent suppresses the following errors: missing object field or array -// element, unexpected JSON item type, datetime and numeric errors. This -// behavior emulates the behavior of the PostgreSQL @? and @@ operators, and -// might be helpful when searching JSON document collections of varying -// structure. -func WithSilent() Option { return func(e *Executor) { e.verbose = false } } - -// newExec creates and returns a new Executor. -func newExec(path *ast.AST, opt ...Option) *Executor { - e := &Executor{ - path: path, - innermostArraySize: -1, - ignoreStructuralErrors: path.IsLax(), - lastGeneratedObjectID: 1, // Reserved for IDs from vars - verbose: true, - } - - for _, o := range opt { - o(e) - } - return e -} - -// Query returns all JSON items returned by the JSON path for the specified -// JSON value. For SQL-standard JSON path expressions it returns the JSON -// values selected from target. For predicate check expressions it returns the -// result of the predicate check: true, false, or null (false + ErrNull). The -// optional [WithVars] and [WithSilent] Options act the same as for [Exists]. -func Query(ctx context.Context, path *ast.AST, value any, opt ...Option) ([]any, error) { - exec := newExec(path, opt...) - // if exec.verbose && exec.path.IsPredicate() { - // return nil, fmt.Errorf( - // "%w: Query expects a SQL standard path expression", - // ErrVerbose, - // ) - // } - - vals, err := exec.execute(ctx, value) - if err != nil { - return nil, err - } - return vals.list, nil -} - -// First returns the first JSON item returned by the JSON path for the -// specified JSON value, or nil if there are no results. The parameters are -// the same as for [Query]. -func First(ctx context.Context, path *ast.AST, value any, opt ...Option) (any, error) { - exec := newExec(path, opt...) - // if exec.verbose && exec.path.IsPredicate() { - // return nil, fmt.Errorf( - // "%w: First expects a SQL standard path expression", - // ErrVerbose, - // ) - // } - - vals, err := exec.execute(ctx, value) - if err != nil { - return nil, err - } - if vals.isEmpty() { - //nolint:nilnil // nil is a valid return value, standing in for JSON null. - return nil, nil - } - return vals.list[0], nil -} - -// Exists checks whether the JSON path returns any item for the specified JSON -// value. (This is useful only with SQL-standard JSON path expressions, not -// predicate check expressions, since those always return a value.) If the -// [WithVars] Option is specified its fields provide named values to be -// substituted into the jsonpath expression. If the [WithSilent] Option is -// specified, the function suppresses some errors. If the [WithTZ] Option is -// specified, it allows comparisons of date/time values that require -// timezone-aware conversions. The example below requires interpretation of -// the date-only value 2015-08-02 as a timestamp with time zone, so the result -// depends on the current TimeZone setting: -// -// Exists( -// []any{"2015-08-01 12:00:00-05"}, -// `$[*] ? (@.datetime() < "2015-08-02".datetime())`, -// WithTZ(), -// ) β†’ true -func Exists(ctx context.Context, path *ast.AST, value any, opt ...Option) (bool, error) { - exec := newExec(path, opt...) - // if exec.verbose && exec.path.IsPredicate() { - // return false, fmt.Errorf( - // "%w: Exists expects a SQL standard path expression", - // ErrVerbose, - // ) - // } - - res, err := exec.exists(ctx, value) - if err != nil { - return false, err - } - if res.failed() { - return false, NULL - } - return res == statusOK, nil -} - -// Match returns the result of a JSON path predicate check for the specified -// JSON value. (This is useful only with predicate check expressions, not -// SQL-standard JSON path expressions, since it will either fail or return -// NULL if the path result is not a single boolean value.) The optional -// [WithVars] and [WithSilent] Options act the same as for [Exists]. -func Match(ctx context.Context, path *ast.AST, value any, opt ...Option) (bool, error) { - exec := newExec(path, opt...) - // if exec.verbose && !exec.path.IsPredicate() { - // return false, fmt.Errorf( - // "%w: Match expects a predicate path expression", - // ErrVerbose, - // ) - // } - - vals, err := exec.execute(ctx, value) - if err != nil { - return false, err - } - - if len(vals.list) == 1 { - switch val := vals.list[0].(type) { - case nil: - return false, NULL - case bool: - return val, nil - } - } - - if exec.verbose { - return false, fmt.Errorf( - "%w: single boolean result is expected", - ErrVerbose, - ) - } - - return false, NULL -} - -func (exec *Executor) strictAbsenceOfErrors() bool { return exec.path.IsStrict() } -func (exec *Executor) autoUnwrap() bool { return exec.path.IsLax() } -func (exec *Executor) autoWrap() bool { return exec.path.IsLax() } - -// execute executes exec.path against value, returning selected values or an error. -func (exec *Executor) execute(ctx context.Context, value any) (*valueList, error) { - exec.root = value - exec.current = value - vals := newList() - _, err := exec.query(ctx, vals, exec.path.Root(), value) - return vals, err -} - -// exists returns true if the path passed to New() returns at least one item -// for json. -func (exec *Executor) exists(ctx context.Context, json any) (resultStatus, error) { - exec.root = json - exec.current = json - return exec.query(ctx, nil, exec.path.Root(), json) -} - -// returnVerboseError returns statusFailed and, when exec.verbose is true, it -// also returns err. Otherwise it returns statusFailed and nil. err must be an -// ErrVerbose error. -func (exec *Executor) returnVerboseError(err error) (resultStatus, error) { - if exec.verbose { - return statusFailed, err - } - return statusFailed, nil -} - -// returnError returns statusFailed and, when exec.verbose is true and err is -// an ErrVerbose error, it also returns err. Otherwise it returns statusFailed -// and nil. -func (exec *Executor) returnError(err error) (resultStatus, error) { - if exec.verbose || !errors.Is(err, ErrVerbose) { - return statusFailed, err - } - return statusFailed, nil -} diff --git a/path/exec/exec_test.go b/path/exec/exec_test.go deleted file mode 100644 index 0d91bcb..0000000 --- a/path/exec/exec_test.go +++ /dev/null @@ -1,2469 +0,0 @@ -package exec - -import ( - "context" - "encoding/json" - "fmt" - "math" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/parser" - "github.com/theory/sqljson/path/types" -) - -func TestResultStatus(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - res resultStatus - }{ - {"OK", statusOK}, - {"NOT_FOUND", statusNotFound}, - {"FAILED", statusFailed}, - {"UNKNOWN_RESULT_STATUS", resultStatus(255)}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(tc.test, tc.res.String()) - a.Equal(tc.res == statusFailed, tc.res.failed()) - }) - } -} - -func TestValueList(t *testing.T) { - t.Parallel() - a := assert.New(t) - - list := newList() - a.NotNil(list) - a.True(list.isEmpty()) - a.Equal(1, cap(list.list)) - - list.append("foo") - a.False(list.isEmpty()) - a.Len(list.list, 1) - a.Equal(1, cap(list.list)) - - list.append(42) - a.False(list.isEmpty()) - a.Len(list.list, 2) - a.Equal(2, cap(list.list)) -} - -func TestOptions(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - opt Option - exp *Executor - }{ - { - test: "vars", - opt: WithVars(Vars{"foo": 1}), - exp: &Executor{verbose: true, vars: Vars{"foo": 1}}, - }, - { - test: "vars_nested", - opt: WithVars(Vars{"foo": 1, "bar": []any{1, 2}}), - exp: &Executor{verbose: true, vars: Vars{"foo": 1, "bar": []any{1, 2}}}, - }, - { - test: "tz", - opt: WithTZ(), - exp: &Executor{verbose: true, useTZ: true}, - }, - { - test: "silent", - opt: WithSilent(), - exp: &Executor{verbose: false}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - e := &Executor{verbose: true} - tc.opt(e) - a.Equal(tc.exp, e) - }) - } -} - -func TestNewExec(t *testing.T) { - t.Parallel() - lax, _ := parser.Parse("$") - strict, _ := parser.Parse("strict $") - - for _, tc := range []struct { - test string - path *ast.AST - opts []Option - exp *Executor - }{ - { - test: "lax_default", - path: lax, - exp: &Executor{ - path: lax, - innermostArraySize: -1, - ignoreStructuralErrors: true, - lastGeneratedObjectID: 1, - verbose: true, - }, - }, - { - test: "strict_default", - path: strict, - exp: &Executor{ - path: strict, - innermostArraySize: -1, - ignoreStructuralErrors: false, - lastGeneratedObjectID: 1, - verbose: true, - }, - }, - { - test: "lax_vars_silent", - path: lax, - opts: []Option{WithVars(Vars{"x": 1}), WithSilent()}, - exp: &Executor{ - path: lax, - innermostArraySize: -1, - ignoreStructuralErrors: true, - lastGeneratedObjectID: 1, - verbose: false, - vars: Vars{"x": 1}, - }, - }, - { - test: "strict_tz_silent", - path: strict, - opts: []Option{WithTZ(), WithSilent()}, - exp: &Executor{ - path: strict, - innermostArraySize: -1, - ignoreStructuralErrors: false, - lastGeneratedObjectID: 1, - verbose: false, - useTZ: true, - }, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - e := newExec(tc.path, tc.opts...) - a.Equal(tc.exp, e) - }) - } -} - -func TestQueryAndFirstAndExists(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - path string - value any - opts []Option - exp []any - err string - isErr error - null bool - }{ - { - test: "root", - path: "$", - value: []any{1, 2}, - exp: []any{[]any{1, 2}}, - }, - { - test: "empty", - path: "$[3]", - value: []any{1, 2}, - exp: []any{}, - }, - { - test: "error", - path: "$.string()", - value: []any{1, 2}, - err: "exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value", - isErr: ErrVerbose, - }, - { - test: "silent_no_error", - path: "$.string()", - opts: []Option{WithSilent()}, - value: []any{1, 2}, - exp: []any{}, - null: true, - }, - { - test: "like_regex_object", - path: `$ like_regex "^hi"`, - value: map[string]any{"x": "HIGH"}, - exp: []any{nil}, - }, - { - test: "like_regex_object_filter", - path: `$ ?(@ like_regex "^hi")`, - value: map[string]any{"x": "HIGH"}, - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - r := require.New(t) - - // Parse the path. - path, err := parser.Parse(tc.path) - r.NoError(err) - - t.Run("query", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - // Run the query. - res, err := Query(ctx, path, tc.value, tc.opts...) - a.Equal(tc.exp, res) - - // Check the error. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - - t.Run("first", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - // Run the query. - res, err := First(ctx, path, tc.value, tc.opts...) - if len(tc.exp) > 0 { - a.Equal(tc.exp[0], res) - } else { - a.Nil(res) - } - - // Check the error. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - - t.Run("exists", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - // Run the query. - res, err := Exists(ctx, path, tc.value, tc.opts...) - a.Equal(len(tc.exp) > 0, res) - - // Check the error. - if tc.isErr == nil { - if tc.null { - r.EqualError(err, "NULL") - r.ErrorIs(err, NULL) - } else { - r.NoError(err) - } - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - }) - } -} - -func TestMatch(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - path string - value any - opts []Option - exp bool - err string - isErr error - }{ - { - test: "root_eq", - path: "$ == 42", - value: int64(42), - exp: true, - }, - { - test: "root_ne", - path: "$ != 42", - value: int64(42), - exp: false, - }, - { - test: "null", - path: "$.string() == 12", - value: []any{1, 2}, - err: "NULL", - isErr: NULL, - }, - { - test: "strict_null", - path: "strict $.string() == 12", - value: []any{1, 2}, - err: "NULL", - isErr: NULL, - }, - { - test: "not_boolean", - path: "$", - value: []any{1, 2}, - err: "exec: single boolean result is expected", - isErr: ErrVerbose, - }, - { - test: "not_boolean_silent", - path: "$", - opts: []Option{WithSilent()}, - value: []any{1, 2}, - err: "NULL", - isErr: NULL, - }, - { - test: "single_boolean_non_predicate", - path: "$", - value: true, - exp: true, - }, - { - test: "error", - path: `strict $.a`, - value: map[string]any{}, - err: `exec: JSON object does not contain key "a"`, - isErr: ErrVerbose, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Parse the path. - path, err := parser.Parse(tc.path) - r.NoError(err) - - // Run the query. - res, err := Match(ctx, path, tc.value, tc.opts...) - a.Equal(tc.exp, res) - - // Check the error. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecAccessors(t *testing.T) { - t.Parallel() - a := assert.New(t) - - // Test lax. - lax, _ := parser.Parse("$") - e := newExec(lax) - a.False(e.strictAbsenceOfErrors()) - a.True(e.autoWrap()) - a.True(e.autoUnwrap()) - - // Test strict. - strict, _ := parser.Parse("strict $") - e = newExec(strict) - a.True(e.strictAbsenceOfErrors()) - a.False(e.autoWrap()) - a.False(e.autoUnwrap()) -} - -func TestReturnError(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Verbose. - e := &Executor{verbose: true} - res, err := e.returnVerboseError(ErrVerbose) - a.Equal(statusFailed, res) - r.ErrorIs(err, ErrVerbose) - res, err = e.returnError(ErrVerbose) - a.Equal(statusFailed, res) - r.ErrorIs(err, ErrVerbose) - res, err = e.returnError(ErrExecution) - a.Equal(statusFailed, res) - r.ErrorIs(err, ErrExecution) - - // Silent - e.verbose = false - res, err = e.returnVerboseError(ErrVerbose) - a.Equal(statusFailed, res) - r.NoError(err) - res, err = e.returnError(ErrVerbose) - a.Equal(statusFailed, res) - r.NoError(err) - res, err = e.returnError(ErrExecution) - a.Equal(statusFailed, res) - r.ErrorIs(err, ErrExecution) -} - -// The tests below are admittedly duplicate unit tests for methods in other -// files, but came first while writing the first pass at the implementation. - -type execTestCase struct { - test string - path string - vars Vars - useTZ bool - silent bool - result resultStatus - json any - exp []any - err string - rand bool -} - -func newTestExecutor(path *ast.AST, vars Vars, throwErrors, useTZ bool) *Executor { - return &Executor{ - path: path, - vars: vars, - innermostArraySize: -1, - useTZ: useTZ, - ignoreStructuralErrors: path.IsLax(), - verbose: throwErrors, - lastGeneratedObjectID: 1, - } -} - -func (tc execTestCase) run(t *testing.T) { - t.Helper() - a := assert.New(t) - r := require.New(t) - - path, err := parser.Parse(tc.path) - r.NoError(err) - exec := newTestExecutor(path, tc.vars, !tc.silent, tc.useTZ) - list, err := exec.execute(context.Background(), tc.json) - if tc.err != "" { - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrExecution) - a.Empty(list.list) - } else { - r.NoError(err) - a.NotNil(list) - if tc.rand { - a.ElementsMatch(tc.exp, list.list) - } else { - a.Equal(tc.exp, list.list) - } - } - - result, err := exec.exists(context.Background(), tc.json) - if tc.err != "" { - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrExecution) - a.Equal(statusFailed, result) - } else { - r.NoError(err) - exp := tc.result - if exp == statusOK && len(tc.exp) == 0 { - exp = statusNotFound - } - a.Equal(exp, result) - } -} - -func TestExecuteRoot(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "root_obj", - path: "$", - json: map[string]any{"x": 42}, - exp: []any{map[string]any{"x": 42}}, - }, - { - test: "root_num", - path: "$", - json: 42.0, - exp: []any{42.0}, - }, - { - test: "root_bool", - path: "$", - json: true, - exp: []any{true}, - }, - { - test: "root_array", - path: "$", - json: []any{42, true, "hi"}, - exp: []any{[]any{42, true, "hi"}}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteLiteral(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "null_only", - path: "null", - json: `""`, - exp: []any{nil}, - }, - { - test: "true_only", - path: "true", - json: `""`, - exp: []any{true}, - }, - { - test: "false_only", - path: "false", - json: `""`, - exp: []any{false}, - }, - { - test: "string", - path: `"yes"`, - json: []any{1, 2, 3}, - exp: []any{"yes"}, - }, - { - test: "int", - path: `42`, - json: nil, - exp: []any{int64(42)}, - }, - { - test: "float", - path: `42.0`, - json: nil, - exp: []any{float64(42.0)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecutePathKeys(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "path_x", - path: "$.x", - json: map[string]any{"x": 42}, - exp: []any{42}, - }, - { - test: "path_xy", - path: "$.x.y", - json: map[string]any{"x": map[string]any{"y": "hi"}}, - exp: []any{"hi"}, - }, - { - test: "path_xyz", - path: "$.x.y.z", - json: map[string]any{"x": map[string]any{"y": map[string]any{"z": "yep"}}}, - exp: []any{"yep"}, - }, - { - test: "path_xy_obj", - path: "$.x.y", - json: map[string]any{"x": map[string]any{"y": map[string]any{"z": "yep"}}}, - exp: []any{map[string]any{"z": "yep"}}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteAny(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "any_key", - path: "$.*", - json: map[string]any{"x": "hi", "y": 42}, - exp: []any{"hi", 42}, - rand: true, // Results can be in any order - }, - { - test: "any_key_mixed", - path: "$.*", - json: map[string]any{"x": map[string]any{"y": 42}, "z": false}, - exp: []any{map[string]any{"y": 42}, false}, - rand: true, // Results can be in any order - }, - { - test: "any_array", - path: "$[*]", - json: []any{"hi", 42}, - exp: []any{"hi", 42}, - }, - { - test: "any_array_mixed", - path: "$[*]", - json: []any{"hi", 42, true, map[string]any{"x": 1}, nil}, - exp: []any{"hi", 42, true, map[string]any{"x": 1}, nil}, - }, - { - test: "path_x_any_array", - path: "$.x[*]", - json: map[string]any{"x": []any{"hi", 42}}, - exp: []any{"hi", 42}, - }, - { - test: "path_xy_any_array", - path: "$.x.y[*]", - json: map[string]any{"x": map[string]any{"y": []any{"hi", 42}}}, - exp: []any{"hi", 42}, - }, - { - test: "any", - path: "$.**", - json: map[string]any{"x": "hi", "y": 42}, - exp: []any{map[string]any{"x": "hi", "y": 42}, "hi", 42}, - rand: true, // Results can be in any order - }, - { - test: "any_nested", - path: "$.**", - json: map[string]any{"x": map[string]any{"y": 42}, "z": map[string]any{}}, - exp: []any{ - map[string]any{"x": map[string]any{"y": 42}, "z": map[string]any{}}, - map[string]any{"y": 42}, - 42, - map[string]any{}, - }, - rand: true, // Results can be in any order - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteMath(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "add_ints", - path: "$ + 1", - json: int64(2), - exp: []any{int64(3)}, - }, - { - test: "add_floats", - path: "$ + 3.2", - json: float64(98.6), - exp: []any{float64(101.8)}, - }, - { - test: "add_int_flat", - path: "$ + 3", - json: float64(98.6), - exp: []any{float64(101.6)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteAndOr(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "binary_or_ints", - path: "$.x == 3 || $.x == 4", - json: map[string]any{"x": int64(4)}, - exp: []any{true}, - }, - { - test: "binary_or_int_float", - path: "$.x == 3 || $.y == 4.0", - json: map[string]any{"x": int64(4), "y": float64(4.0)}, - exp: []any{true}, - }, - { - test: "binary_and_strings", - path: `$.x == "hi" && $.y starts with "good"`, - json: map[string]any{"x": "hi", "y": "good bye"}, - exp: []any{true}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteNumberMethods(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "number_method", - path: `$.x.number()`, - json: map[string]any{"x": int64(3)}, - exp: []any{float64(3)}, - }, - { - test: "number_method_string", - path: `$.x.number()`, - json: map[string]any{"x": "3.4"}, - exp: []any{float64(3.4)}, - }, - { - test: "number_method_json_number", - path: `$.x.number()`, - json: map[string]any{"x": json.Number("3.4")}, - exp: []any{float64(3.4)}, - }, - { - test: "number_method_json_number_int", - path: `$.x.number()`, - json: map[string]any{"x": json.Number("1714004682")}, - exp: []any{float64(1714004682)}, - }, - { - test: "decimal_method", - path: `$.x.decimal()`, - json: map[string]any{"x": "12.2"}, - exp: []any{float64(12.2)}, - }, - { - test: "decimal_method_precision", - path: `$.x.decimal(4)`, - json: map[string]any{"x": "12.2"}, - exp: []any{float64(12)}, - }, - { - test: "decimal_method_precision_short", - path: `$.x.decimal(1)`, - json: map[string]any{"x": "12.233"}, - // exp: []any{float64(12)}, - err: `exec: argument "12.233" of jsonpath item method .decimal() is invalid for type numeric`, - }, - { - test: "decimal_method_precision_ok", - path: `$.x.decimal(5,3)`, - json: map[string]any{"x": "12.233"}, - exp: []any{float64(12.233)}, - }, - { - test: "decimal_method_precision_scale", - path: `$.x.decimal(4, 2)`, - json: map[string]any{"x": "12.233"}, - exp: []any{float64(12.23)}, - }, - { - test: "decimal_method_precision_scale_short", - path: `$.x.decimal(3, 2)`, - json: map[string]any{"x": "12.233"}, - err: `exec: argument "12.233" of jsonpath item method .decimal() is invalid for type numeric`, - }, - { - test: "abs_int", - path: `$.x.abs()`, - json: map[string]any{"x": int64(-42)}, - exp: []any{int64(42)}, - }, - { - test: "abs_float", - path: `$.x.abs()`, - json: map[string]any{"x": float64(-42.22)}, - exp: []any{float64(42.22)}, - }, - { - test: "abs_json_number_int", - path: `$.x.abs()`, - json: map[string]any{"x": json.Number("-99")}, - exp: []any{int64(99)}, - }, - { - test: "abs_json_number_float", - path: `$.x.abs()`, - json: map[string]any{"x": json.Number("-42.22")}, - exp: []any{float64(42.22)}, - }, - { - test: "floor_int", - path: `$.x.floor()`, - json: map[string]any{"x": int64(42)}, - exp: []any{int64(42)}, - }, - { - test: "floor_float", - path: `$.x.floor()`, - json: map[string]any{"x": float64(42.22)}, - exp: []any{float64(42)}, - }, - { - test: "floor_json_number_int", - path: `$.x.floor()`, - json: map[string]any{"x": json.Number("99")}, - exp: []any{int64(99)}, - }, - { - test: "floor_json_number_float", - path: `$.x.floor()`, - json: map[string]any{"x": json.Number("88.88")}, - exp: []any{float64(88)}, - }, - { - test: "ceiling_int", - path: `$.x.ceiling()`, - json: map[string]any{"x": int64(42)}, - exp: []any{int64(42)}, - }, - { - test: "ceiling_float", - path: `$.x.ceiling()`, - json: map[string]any{"x": float64(42.22)}, - exp: []any{float64(43)}, - }, - { - test: "ceiling_json_number_int", - path: `$.x.ceiling()`, - json: map[string]any{"x": json.Number("99")}, - exp: []any{int64(99)}, - }, - { - test: "ceiling_json_number_float", - path: `$.x.ceiling()`, - json: map[string]any{"x": json.Number("88.88")}, - exp: []any{float64(89)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteArraySubscripts(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "array_subscript_0", - path: `$.x[0]`, - json: map[string]any{"x": []any{"hi"}}, - exp: []any{"hi"}, - }, - { - test: "array_subscript_2", - path: `$.x[2]`, - json: map[string]any{"x": []any{"hi", "", true}}, - exp: []any{true}, - }, - { - test: "array_subscript_from_to", - path: `$.x[1 to 2]`, - json: map[string]any{"x": []any{"xx", "hi", true}}, - exp: []any{"hi", true}, - }, - { - test: "array_subscript_last", - path: `$.x[last]`, - json: map[string]any{"x": []any{"hi", "", true}}, - exp: []any{true}, - }, - { - test: "array_subscript_to_last", - path: `$.x[1 to last]`, - json: map[string]any{"x": []any{"hi", "", true}}, - exp: []any{"", true}, - }, - { - test: "array_subscript_multi", - path: `$.x[0, 3 to 4]`, - json: map[string]any{"x": []any{"hi", "", true, "x", "y"}}, - exp: []any{"hi", "x", "y"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteLikeRegex(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "like_regex", - path: `$.x like_regex "."`, - json: map[string]any{"x": "x"}, - exp: []any{true}, - }, - { - test: "like_regex_prefix", - path: `$.x like_regex "^hi"`, - json: map[string]any{"x": "hi there"}, - exp: []any{true}, - }, - { - test: "like_regex_false", - path: `$.x like_regex "^hi"`, - json: map[string]any{"x": "say hi there"}, - exp: []any{false}, - }, - { - test: "like_regex_flag", - path: `$.x like_regex "^hi" flag "i"`, - json: map[string]any{"x": "HIGH"}, - exp: []any{true}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteFilter(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "filter_true", - path: `$.x ?(@ == "hi")`, - json: map[string]any{"x": "hi"}, - exp: []any{"hi"}, - }, - { - test: "filter_false", - path: `$.x ?(@ != "hi")`, - json: map[string]any{"x": "hi"}, - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteTypeSizeMethods(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "type_method_string", - path: `$.x.type()`, - json: map[string]any{"x": "hi"}, - exp: []any{"string"}, - }, - { - test: "type_method_multi", - path: `$[*].type()`, - json: []any{int64(1), "2", map[string]any{}}, - exp: []any{"number", "string", "object"}, - }, - { - test: "size_method_array", - path: `$.x.size()`, - json: map[string]any{"x": []any{1, 2, 3}}, - exp: []any{int64(3)}, - }, - { - test: "size_method_other", - path: `$.x.size()`, - json: map[string]any{"x": true}, - exp: []any{int64(1)}, - }, - { - test: "size_method_error", - path: `strict $.x.size()`, - json: map[string]any{"x": true}, - err: `exec: jsonpath item method .size() can only be applied to an array`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteUnaryPlusMinus(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "unary_plus", - path: `+$.x`, - json: map[string]any{"x": int64(42)}, - exp: []any{int64(42)}, - }, - { - test: "unary_minus_pos", - path: `-$.x`, - json: map[string]any{"x": int64(42)}, - exp: []any{int64(-42)}, - }, - { - test: "unary_minus_neg", - path: `-$.x`, - json: map[string]any{"x": int64(-42)}, - exp: []any{int64(42)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteDateTime(t *testing.T) { - t.Parallel() - offsetZero := time.FixedZone("", 0) - ctx := context.Background() - - for _, tc := range []execTestCase{ - { - test: "date", - path: `$.x.date()`, - json: map[string]any{"x": "2009-10-03"}, - exp: []any{types.NewDate( - time.Date(2009, 10, 3, 0, 0, 0, 0, offsetZero), - )}, - }, - { - test: "time", - path: `$.x.time()`, - json: map[string]any{"x": "20:59:19.79142"}, - exp: []any{types.NewTime( - time.Date(0, 1, 1, 20, 59, 19, 791420000, offsetZero), - )}, - }, - { - test: "time_tz", - path: `$.x.time_tz()`, - json: map[string]any{"x": "20:59:19.79142-04"}, - exp: []any{types.NewTimeTZ( - time.Date(0, 1, 1, 20, 59, 19, 791420000, time.FixedZone("", -4*60*60)), - )}, - }, - { - test: "timestamp_T", - path: `$.x.timestamp()`, - json: map[string]any{"x": "2024-05-05T20:59:19.79142"}, - exp: []any{types.NewTimestamp( - time.Date(2024, 5, 5, 20, 59, 19, 791420000, offsetZero), - )}, - }, - { - test: "timestamp_space", - path: `$.x.timestamp()`, - json: map[string]any{"x": "2024-05-05 20:59:19.79142"}, - exp: []any{types.NewTimestamp( - time.Date(2024, 5, 5, 20, 59, 19, 791420000, offsetZero), - )}, - }, - { - test: "timestamp_T_tz", - path: `$.x.timestamp_tz()`, - json: map[string]any{"x": "2024-05-05T20:59:19.79142-05"}, - exp: []any{types.NewTimestampTZ( - ctx, - time.Date(2024, 5, 5, 20, 59, 19, 791420000, time.FixedZone("", -5*60*60)), - )}, - }, - { - test: "timestamp_space_tz", - path: `$.x.timestamp_tz()`, - json: map[string]any{"x": "2024-05-05 20:59:19.79142-05"}, - exp: []any{types.NewTimestampTZ( - ctx, - time.Date(2024, 5, 5, 20, 59, 19, 791420000, time.FixedZone("", -5*60*60)), - )}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - // .datetime() should also work - tc.test += "_datetime" - tc.path = `$.x.datetime()` - tc.run(t) - }) - } -} - -func TestExecuteDateTimeErrors(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "not_a_string", - path: `$.x.timestamp_tz()`, - json: map[string]any{"x": int64(42)}, - err: "exec: jsonpath item method .timestamp_tz() can only be applied to a string", - }, - { - test: "datetime_template_not_supported", - path: `$.x.datetime("HH24:MI")`, - json: map[string]any{"x": "2024-05-05 20:59:19.79142-05"}, - err: "exec: .datetime(template) is not yet supported", - }, - { - test: "invalid_precision", - path: fmt.Sprintf(`$.x.time(%v)`, int64(math.MaxInt32+1)), - json: map[string]any{"x": "2024-05-05 20:59:19.79142-05"}, - err: `exec: time precision of jsonpath item method .time() is out of integer range`, - }, - { - test: "not_a_timestamp", - path: `$.x.time()`, - json: map[string]any{"x": "NOT A TIMESTAMP"}, - err: `exec: time format is not recognized: "NOT A TIMESTAMP"`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -const tzHint = " HINT: Use WithTZ() option for time zone support" - -func TestExecuteDateTimeCast(t *testing.T) { - t.Parallel() - offsetZero := time.FixedZone("", 0) - ctx := context.Background() - - for _, tc := range []execTestCase{ - // Cast to date - { - test: "date_to_date", - path: `$.x.date()`, - json: map[string]any{"x": "2009-10-03"}, - exp: []any{types.NewDate( - time.Date(2009, 10, 3, 0, 0, 0, 0, offsetZero), - )}, - }, - { - test: "timestamp_to_date", - path: `$.x.date()`, - json: map[string]any{"x": "2009-10-03 20:59:19.79142"}, - exp: []any{types.NewDate( - time.Date(2009, 10, 3, 0, 0, 0, 0, offsetZero), - )}, - }, - { - test: "timestamp_tz_to_date", - path: `$.x.date()`, - json: map[string]any{"x": "2009-10-03 20:59:19.79142-01"}, - err: "exec: cannot convert value from timestamptz to date without time zone usage." + tzHint, - }, - { - test: "timestamp_with_tz_to_date", - path: `$.x.date()`, - useTZ: true, - json: map[string]any{"x": "2009-10-03 20:59:19.79142-01"}, - exp: []any{types.NewDate( - time.Date(2009, 10, 3, 0, 0, 0, 0, offsetZero), - )}, - }, - { - test: "time_to_date", - path: `$.x.date()`, - json: map[string]any{"x": "20:59:19.79142"}, - err: `exec: date format is not recognized: "20:59:19.79142"`, - }, - { - test: "time_tz_to_date", - path: `$.x.date()`, - json: map[string]any{"x": "20:59:19.79142-01"}, - err: `exec: date format is not recognized: "20:59:19.79142-01"`, - }, - // Cast to time - { - test: "date_to_time", - path: `$.x.time()`, - json: map[string]any{"x": "2009-10-03"}, - err: `exec: time format is not recognized: "2009-10-03"`, - }, - { - test: "time_to_time", - path: `$.x.time()`, - json: map[string]any{"x": "20:59:19.79142"}, - exp: []any{types.NewTime( - time.Date(0, 1, 1, 20, 59, 19, 791420000, offsetZero), - )}, - }, - { - test: "time_tz_to_time", - path: `$.x.time()`, - json: map[string]any{"x": "20:59:19.79142-01"}, - err: "exec: cannot convert value from timetz to time without time zone usage." + tzHint, - }, - { - test: "time_with_tz_to_time", - path: `$.x.time()`, - useTZ: true, - json: map[string]any{"x": "20:59:19.79142-01"}, - exp: []any{types.NewTime( - time.Date(0, 1, 1, 20, 59, 19, 791420000, offsetZero), - )}, - }, - { - test: "timestamp_to_time", - path: `$.x.time()`, - json: map[string]any{"x": "2009-10-03 20:59:19.79142"}, - exp: []any{types.NewTime( - time.Date(0, 1, 1, 20, 59, 19, 791420000, offsetZero), - )}, - }, - { - test: "timestamp_tz_to_time", - path: `$.x.time()`, - json: map[string]any{"x": "2009-10-03 20:59:19.79142+01"}, - err: "exec: cannot convert value from timestamptz to time without time zone usage." + tzHint, - }, - { - test: "timestamp_with_tz_to_time", - path: `$.x.time()`, - useTZ: true, - json: map[string]any{"x": "2009-10-03 20:59:19.79142+01"}, - exp: []any{types.NewTime(types.NewTimestampTZ( - ctx, - time.Date(2009, 10, 3, 20, 59, 19, 791420000, time.FixedZone("", 3600)), - ).In(offsetZero))}, - }, - // Cast to timetz - { - test: "date_to_timetz", - path: `$.x.time_tz()`, - json: map[string]any{"x": "2009-10-03"}, - err: `exec: time_tz format is not recognized: "2009-10-03"`, - }, - { - test: "time_to_timetz", - path: `$.x.time_tz()`, - json: map[string]any{"x": "20:59:19.79142"}, - err: "exec: cannot convert value from time to timetz without time zone usage." + tzHint, - }, - { - test: "time_to_time_with_tz", - path: `$.x.time_tz()`, - useTZ: true, - json: map[string]any{"x": "20:59:19.79142"}, - exp: []any{types.NewTimeTZ( - time.Date(0, 1, 1, 20, 59, 19, 791420000, offsetZero), - )}, - }, - { - test: "timetz_to_timetz", - path: `$.x.time_tz()`, - json: map[string]any{"x": "20:59:19.79142Z"}, - exp: []any{types.NewTimeTZ(time.Date(0, 1, 1, 20, 59, 19, 791420000, offsetZero))}, - }, - { - test: "timestamp_to_timetz", - path: `$.x.time_tz()`, - json: map[string]any{"x": "2009-10-03 20:59:19.79142"}, - err: `exec: time_tz format is not recognized: "2009-10-03 20:59:19.79142"`, - }, - { - test: "timestamp_tz_to_timetz", - path: `$.x.time_tz()`, - json: map[string]any{"x": "2009-10-03 20:59:19.79142Z"}, - exp: []any{types.NewTimestampTZ( - ctx, - time.Date(2009, 10, 3, 20, 59, 19, 791420000, offsetZero), - ).ToTimeTZ(ctx)}, - }, - // Cast to timestamp - { - test: "date_to_timestamp", - path: `$.x.timestamp()`, - json: map[string]any{"x": "2009-10-03"}, - exp: []any{types.NewTimestamp(time.Date(2009, 10, 3, 0, 0, 0, 0, offsetZero))}, - }, - { - test: "time_to_timestamp", - path: `$.x.timestamp()`, - json: map[string]any{"x": "20:59:19.79142"}, - err: `exec: timestamp format is not recognized: "20:59:19.79142"`, - }, - { - test: "time_tz_to_timestamp", - path: `$.x.timestamp()`, - json: map[string]any{"x": "20:59:19.79142-01"}, - err: `exec: timestamp format is not recognized: "20:59:19.79142-01"`, - }, - { - test: "timestamp_to_timestamp", - path: `$.x.timestamp()`, - json: map[string]any{"x": "2009-10-03 20:59:19.79142"}, - exp: []any{types.NewTimestamp(time.Date(2009, 10, 3, 20, 59, 19, 791420000, offsetZero))}, - }, - { - test: "timestamp_tz_to_timestamp", - path: `$.x.timestamp()`, - json: map[string]any{"x": "2009-10-03 20:59:19.79142Z"}, - err: "exec: cannot convert value from timestamptz to timestamp without time zone usage." + tzHint, - }, - { - test: "timestamp_with_tz_to_timestamp", - path: `$.x.timestamp()`, - useTZ: true, - json: map[string]any{"x": "2009-10-03 20:59:19.79142Z"}, - exp: []any{types.NewTimestamp( - time.Date(2009, 10, 3, 20, 59, 19, 791420000, offsetZero), - )}, - }, - // Cast to timestamptz - { - test: "date_to_timestamptz", - path: `$.x.timestamp_tz()`, - json: map[string]any{"x": "2009-10-03"}, - err: "exec: cannot convert value from date to timestamptz without time zone usage." + tzHint, - }, - { - test: "date_to_timestamp_with_tz", - path: `$.x.timestamp_tz()`, - useTZ: true, - json: map[string]any{"x": "2009-10-03"}, - exp: []any{types.NewDate( - time.Date(2009, 10, 3, 0, 0, 0, 0, offsetZero), - ).ToTimestampTZ(ctx)}, - }, - { - test: "time_to_timestamptz", - path: `$.x.timestamp_tz()`, - json: map[string]any{"x": "20:59:19.79142"}, - err: `exec: timestamp_tz format is not recognized: "20:59:19.79142"`, - }, - { - test: "time_tz_to_timestamptz", - path: `$.x.timestamp_tz()`, - json: map[string]any{"x": "20:59:19.79142-01"}, - err: `exec: timestamp_tz format is not recognized: "20:59:19.79142-01"`, - }, - { - test: "timestamp_to_timestamptz", - path: `$.x.timestamp_tz()`, - json: map[string]any{"x": "2009-10-03 20:59:19.79142"}, - err: "exec: cannot convert value from timestamp to timestamptz without time zone usage." + tzHint, - }, - { - test: "timestamp_to_timestamp_with_tz", - path: `$.x.timestamp_tz()`, - useTZ: true, - json: map[string]any{"x": "2009-10-03 20:59:19.79142"}, - exp: []any{types.NewTimestampTZ( - ctx, - time.Date(2009, 10, 3, 20, 59, 19, 791420000, offsetZero), - )}, - }, - { - test: "timestamp_tz_to_timestamptz", - path: `$.x.timestamp_tz()`, - json: map[string]any{"x": "2009-10-03 20:59:19.79142Z"}, - exp: []any{types.NewTimestampTZ( - ctx, - time.Date(2009, 10, 3, 20, 59, 19, 791420000, offsetZero), - )}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteTimePrecision(t *testing.T) { - t.Parallel() - offsetZero := time.FixedZone("", 0) - - for _, tc := range []execTestCase{ - { - test: "time_precision", - path: `$.x.time(3)`, - json: map[string]any{"x": "20:59:19.79142"}, - exp: []any{types.NewTime(time.Date(0, 1, 1, 20, 59, 19, 791000000, offsetZero))}, - }, - { - test: "time_tz_precision", - path: `$.x.time_tz(4)`, - json: map[string]any{"x": "20:59:19.79142+01"}, - exp: []any{types.NewTimeTZ( - time.Date(0, 1, 1, 20, 59, 19, 791400000, time.FixedZone("", 1*60*60)), - )}, - }, - { - test: "timestamp_precision", - path: `$.x.timestamp(2)`, - json: map[string]any{"x": "2024-05-05T20:59:19.791423"}, - exp: []any{types.NewTimestamp(time.Date(2024, 5, 5, 20, 59, 19, 790000000, offsetZero))}, - }, - { - test: "timestamp_tz_precision", - path: `$.x.timestamp_tz(5)`, - json: map[string]any{"x": "2024-05-05T20:59:19.791423+02:30"}, - exp: []any{types.NewTimestampTZ( - context.Background(), - time.Date(2024, 5, 5, 20, 59, 19, 791420000, time.FixedZone("", 2*60*60+30*60)), - )}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteDateComparison(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "date_eq_date", - path: `$.date() == $.date()`, - json: "2024-05-03", - exp: []any{true}, - }, - { - test: "date_ne_date", - path: `$.date() != $.date()`, - json: "2024-05-03", - exp: []any{false}, - }, - { - test: "unequal_dates", - path: `$.x.date() == $.y.date()`, - json: map[string]any{"x": "2024-05-03", "y": "2024-05-04"}, - exp: []any{false}, - }, - { - test: "gt_date", - path: `$.y.date() >= $.x.date()`, - json: map[string]any{"x": "2024-05-03", "y": "2024-05-04"}, - exp: []any{true}, - }, - { - test: "same_date", - path: `$.date() == $.date()`, - json: "2024-05-03", - exp: []any{true}, - }, - { - test: "date_eq_timestamp", - path: `$.x.date() == $.y.timestamp()`, - json: map[string]any{"x": "2024-05-03", "y": "2024-05-03 23:53:42.232"}, - exp: []any{false}, - }, - { - test: "date_lt_timestamp", - path: `$.x.date() < $.y.timestamp()`, - json: map[string]any{"x": "2024-05-03", "y": "2024-05-03 23:53:42.232"}, - exp: []any{true}, - }, - { - test: "date_eq_timestamp_midnight", - path: `$.x.date() == $.y.timestamp()`, - json: map[string]any{"x": "2024-05-03", "y": "2024-05-03 00:00:00"}, - exp: []any{true}, - }, - { - test: "date_eq_timestamp_tz", - path: `$.x.date() == $.y.timestamp_tz()`, - json: map[string]any{"x": "2024-05-03", "y": "2024-05-03 23:53:42.232Z"}, - err: "exec: cannot convert value from date to timestamptz without time zone usage." + tzHint, - }, - { - test: "date_eq_timestamp_with_tz", - path: `$.x.date() == $.y.timestamp_tz()`, - useTZ: true, - json: map[string]any{"x": "2024-05-03", "y": "2024-05-03 23:53:42.232Z"}, - exp: []any{false}, - }, - { - test: "date_le_timestamp_with_tz", - path: `$.x.date() <= $.y.timestamp_tz()`, - useTZ: true, - json: map[string]any{"x": "2024-05-03", "y": "2024-05-03 23:53:42.232Z"}, - exp: []any{true}, - }, - { - test: "date_eq_time", - path: `$.x.date() == $.y.time()`, - json: map[string]any{"x": "2024-05-03", "y": "23:53:42.232"}, - exp: []any{nil}, - }, - { - test: "date_eq_time_tz", - path: `$.x.date() == $.y.time_tz()`, - json: map[string]any{"x": "2024-05-03", "y": "23:53:42.232Z"}, - exp: []any{nil}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteTimeComparison(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "time_eq_time", - path: `$.time() == $.time()`, - json: "14:32:43.123345", - exp: []any{true}, - }, - { - test: "time_ne_time", - path: `$.time() != $.time()`, - json: "14:32:43.123345", - exp: []any{false}, - }, - { - test: "time_ne_time_true", - path: `$.time(3) != $.time(4)`, - json: "14:32:43.123345", - exp: []any{true}, - }, - { - test: "time_eq_time_tz", - path: `$.x.time() == $.y.time_tz()`, - json: map[string]any{"x": "14:32:43.123345", "y": "14:32:43.123345Z"}, - err: "exec: cannot convert value from time to timetz without time zone usage." + tzHint, - }, - { - test: "time_eq_time_with_tz", - path: `$.x.time() == $.y.time_tz()`, - useTZ: true, - json: map[string]any{"x": "14:32:43.123345", "y": "14:32:43.123345Z"}, - exp: []any{true}, - }, - { - test: "time_eq_time_with_tz_conv", - path: `$.x.time() != $.y.time_tz()`, - useTZ: true, - json: map[string]any{"x": "14:32:43.123345", "y": "14:32:43.123345-01"}, - exp: []any{true}, - }, - { - test: "time_eq_date", - path: `$.x.time() == $.y.date()`, - json: map[string]any{"x": "14:32:43", "y": "2024-05-05"}, - exp: []any{nil}, - }, - { - test: "time_eq_timestamp", - path: `$.x.time() == $.y.timestamp()`, - json: map[string]any{"x": "14:32:43", "y": "2024-05-05 14:32:43"}, - exp: []any{nil}, - }, - { - test: "time_eq_timestamp_tz", - path: `$.x.time() == $.y.timestamp_tz()`, - json: map[string]any{"x": "14:32:43", "y": "2024-05-05 14:32:43Z"}, - exp: []any{nil}, - }, - { - test: "timetz_eq_timetz", - path: `$.time_tz() == $.time_tz()`, - json: "14:32:43.123345Z", - exp: []any{true}, - }, - { - test: "timetz_ne_timetz", - path: `$.time_tz() != $.time_tz()`, - json: "14:32:43.123345Z", - exp: []any{false}, - }, - { - test: "timetz_ne_timetz_true", - path: `$.time_tz(3) != $.time_tz(4)`, - json: "14:32:43.123345Z", - exp: []any{true}, - }, - { - test: "timetz_eq_time", - path: `$.y.time_tz() == $.x.time()`, - json: map[string]any{"x": "14:32:43.123345", "y": "14:32:43.123345Z"}, - err: "exec: cannot convert value from time to timetz without time zone usage." + tzHint, - }, - { - test: "time_with_tz_eq_time", - path: `$.y.time_tz() == $.x.time()`, - useTZ: true, - json: map[string]any{"x": "14:32:43.123345", "y": "14:32:43.123345Z"}, - exp: []any{true}, - }, - { - test: "time_with_tz_conv_eq_time", - path: `$.y.time_tz() != $.x.time()`, - useTZ: true, - json: map[string]any{"x": "14:32:43.123345", "y": "14:32:43.123345-01"}, - exp: []any{true}, - }, - { - test: "timetz_eq_date", - path: `$.x.time_tz() == $.y.date()`, - json: map[string]any{"x": "14:32:43Z", "y": "2024-05-05"}, - exp: []any{nil}, - }, - { - test: "timetz_eq_timestamp", - path: `$.x.time_tz() == $.y.timestamp()`, - json: map[string]any{"x": "14:32:43Z", "y": "2024-05-05 14:32:43"}, - exp: []any{nil}, - }, - { - test: "timetz_eq_timestamp_tz", - path: `$.x.time_tz() == $.y.timestamp_tz()`, - json: map[string]any{"x": "14:32:43Z", "y": "2024-05-05 14:32:43Z"}, - exp: []any{nil}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteTimestampComparison(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "ts_eq_ts", - path: `$.timestamp() == $.timestamp()`, - json: "2024-05-05 14:32:43.123345", - exp: []any{true}, - }, - { - test: "ts_ne_ts", - path: `$.timestamp() != $.timestamp()`, - json: "2024-05-05 14:32:43.123345", - exp: []any{false}, - }, - { - test: "ts_eq_ts_precision", - path: `$.timestamp(3) == $.timestamp(4)`, - json: "2024-05-05 14:32:43.123345", - exp: []any{false}, - }, - { - test: "ts_ne_date", - path: `$[0].timestamp() != $[1].date()`, - json: []any{"2024-05-05 14:32:43.123345", "2024-05-05"}, - exp: []any{true}, - }, - { - test: "ts_eq_date", - path: `$[0].timestamp() == $[1].date()`, - json: []any{"2024-05-05 00:00:00", "2024-05-05"}, - exp: []any{true}, - }, - { - test: "ts_eq_ts_tz", - path: `$[0].timestamp() == $[1].timestamp_tz()`, - json: []any{"2024-05-05 00:00:00", "2024-05-05 00:00:00Z"}, - err: "exec: cannot convert value from timestamp to timestamptz without time zone usage." + tzHint, - }, - { - test: "ts_eq_ts_with_tz", - path: `$[0].timestamp() == $[1].timestamp_tz()`, - useTZ: true, - json: []any{"2024-05-05 00:00:00", "2024-05-05 00:00:00Z"}, - exp: []any{true}, - }, - { - test: "ts_eq_time", - path: `$[0].timestamp() == $[1].time()`, - json: []any{"2024-05-05 00:00:00", "00:00:00"}, - exp: []any{nil}, - }, - { - test: "ts_eq_time", - path: `$[0].timestamp() == $[1].time_tz()`, - json: []any{"2024-05-05 00:00:00", "00:00:00Z"}, - exp: []any{nil}, - }, - { - test: "ts_tz_eq_ts_tz", - path: `$.timestamp_tz() == $.timestamp_tz()`, - json: "2024-05-05 14:32:43.123345Z", - exp: []any{true}, - }, - { - test: "ts_tz_ne_ts_tz", - path: `$.timestamp_tz() != $.timestamp_tz()`, - json: "2024-05-05 14:32:43.123345Z", - exp: []any{false}, - }, - { - test: "ts_tz_eq_ts_tz_precision", - path: `$.timestamp_tz(2) == $.timestamp_tz(2)`, - json: "2024-05-05 14:32:43.123345Z", - exp: []any{true}, - }, - { - test: "ts_tz_ne_ts_tz_precision", - path: `$.timestamp_tz(2) != $.timestamp_tz(3)`, - json: "2024-05-05 14:32:43.123345Z", - exp: []any{true}, - }, - { - test: "ts_tz_eq_date", - path: `$[0].timestamp_tz() == $[1].date()`, - json: []any{"2024-05-05 14:32:43.123345Z", "2024-05-05"}, - err: "exec: cannot convert value from date to timestamptz without time zone usage." + tzHint, - }, - { - test: "ts_with_tz_ne_date", - path: `$[0].timestamp_tz() != $[1].date()`, - useTZ: true, - json: []any{"2024-05-05 14:32:43.123345Z", "2024-05-05"}, - exp: []any{true}, - }, - { - test: "ts_with_tz_eq_date", - path: `$[0].timestamp_tz() == $[1].date()`, - useTZ: true, - json: []any{"2024-05-05 00:00:00Z", "2024-05-05"}, - exp: []any{true}, - }, - { - test: "ts_tz_eq_timestamp", - path: `$[0].timestamp_tz() == $[1].timestamp()`, - json: []any{"2024-05-05 14:32:43.123345Z", "2024-05-05 14:32:43.123345"}, - err: "exec: cannot convert value from timestamp to timestamptz without time zone usage." + tzHint, - }, - { - test: "ts_with_tz_eq_timestamp", - path: `$[0].timestamp_tz() == $[1].timestamp()`, - useTZ: true, - json: []any{"2024-05-05 14:32:43.123345Z", "2024-05-05 14:32:43.123345"}, - exp: []any{true}, - }, - { - test: "ts_tz_eq_time", - path: `$[0].timestamp_tz() == $[1].time()`, - json: []any{"2024-05-05 00:00:00Z", "00:00:00"}, - exp: []any{nil}, - }, - { - test: "ts_tz_eq_time", - path: `$[0].timestamp_tz() == $[1].time_tz()`, - json: []any{"2024-05-05 00:00:00Z", "00:00:00Z"}, - exp: []any{nil}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteDoubleMethod(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "double_int", - path: `$.x.double()`, - json: map[string]any{"x": int64(42)}, - exp: []any{float64(42)}, - }, - { - test: "double_float", - path: `$.x.double()`, - json: map[string]any{"x": float64(98.6)}, - exp: []any{float64(98.6)}, - }, - { - test: "double_json_number", - path: `$.x.double()`, - json: map[string]any{"x": json.Number("1024.3")}, - exp: []any{float64(1024.3)}, - }, - { - test: "double_invalid_json_number", - path: `$.x.double()`, - json: map[string]any{"x": json.Number("hi")}, - err: `exec: argument "hi" of jsonpath item method .double() is invalid for type double precision`, - }, - { - test: "double_string", - path: `$.x.double()`, - json: map[string]any{"x": "1024.3"}, - exp: []any{float64(1024.3)}, - }, - { - test: "double_invalid_string", - path: `$.x.double()`, - json: map[string]any{"x": "lol"}, - err: `exec: argument "lol" of jsonpath item method .double() is invalid for type double precision`, - }, - { - test: "double_array", - path: `$.x.double()`, - json: map[string]any{"x": []any{"1024.3", int64(42)}}, - exp: []any{float64(1024.3), float64(42)}, - }, - { - test: "strict_double_array", - path: `strict $.x.double()`, - json: map[string]any{"x": []any{"1024.3", int64(42)}}, - err: "exec: jsonpath item method .double() can only be applied to a string or numeric value", - }, - { - test: "double_bool", - path: `strict $.x.double()`, - json: map[string]any{"x": true}, - err: "exec: jsonpath item method .double() can only be applied to a string or numeric value", - }, - { - test: "double_infinity", - path: `strict $.x.double()`, - json: map[string]any{"x": "infinity"}, - err: "exec: NaN or Infinity is not allowed for jsonpath item method .double()", - }, - { - test: "double_nan", - path: `strict $.x.double()`, - json: map[string]any{"x": "NaN"}, - err: "exec: NaN or Infinity is not allowed for jsonpath item method .double()", - }, - { - test: "double_null", - path: `strict $.x.double()`, - json: map[string]any{"x": nil}, - err: "exec: jsonpath item method .double() can only be applied to a string or numeric value", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteBigintMethod(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "int_int", - path: `$.x.bigint()`, - json: map[string]any{"x": int64(9876543219)}, - exp: []any{int64(9876543219)}, - }, - { - test: "int_float", - path: `$.x.bigint()`, - json: map[string]any{"x": float64(42.3)}, - exp: []any{int64(42)}, - }, - { - test: "int_json_number", - path: `$.x.bigint()`, - json: map[string]any{"x": json.Number("9876543219")}, - exp: []any{int64(9876543219)}, - }, - { - test: "int_json_number_float", - path: `$.x.bigint()`, - json: map[string]any{"x": json.Number("9876543219.2")}, - exp: []any{int64(9876543219)}, - }, - { - test: "int_string", - path: `$.x.bigint()`, - json: map[string]any{"x": "99"}, - exp: []any{int64(99)}, - }, - { - test: "int_array", - path: `$.x.bigint()`, - json: map[string]any{"x": []any{"99", int64(1024)}}, - exp: []any{int64(99), int64(1024)}, - }, - { - test: "int_array_strict", - path: `strict $.x.bigint()`, - json: map[string]any{"x": []any{"99", int64(1024)}}, - err: "exec: jsonpath item method .bigint() can only be applied to a string or numeric value", - }, - { - test: "int_obj", - path: `$.x.bigint()`, - json: map[string]any{"x": map[string]any{"99": int64(1024)}}, - err: "exec: jsonpath item method .bigint() can only be applied to a string or numeric value", - }, - { - test: "int_next", - path: "$.x.bigint().abs()", - json: map[string]any{"x": int64(-9876543219)}, - exp: []any{int64(9876543219)}, - }, - { - test: "int_null", - path: "$.x.bigint()", - json: map[string]any{"x": nil}, - err: "exec: jsonpath item method .bigint() can only be applied to a string or numeric value", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteIntegerMethod(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "int_int", - path: `$.x.integer()`, - json: map[string]any{"x": int64(42)}, - exp: []any{int64(42)}, - }, - { - test: "int_bigint", - path: `$.x.integer()`, - json: map[string]any{"x": int64(9876543219)}, - err: `exec: argument "9876543219" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "int_bigint_neg", - path: `$.x.integer()`, - json: map[string]any{"x": int64(-3147483648)}, - err: `exec: argument "-3147483648" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "int_float", - path: `$.x.integer()`, - json: map[string]any{"x": float64(42.3)}, - exp: []any{int64(42)}, - }, - { - test: "int_json_number", - path: `$.x.integer()`, - json: map[string]any{"x": json.Number("42")}, - exp: []any{int64(42)}, - }, - { - test: "int_json_number_float", - path: `$.x.integer()`, - json: map[string]any{"x": json.Number("42.2")}, - exp: []any{int64(42)}, - }, - { - test: "int_json_number_big", - path: `$.x.integer()`, - json: map[string]any{"x": json.Number("9876543219")}, - err: `exec: argument "9876543219" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "int_json_number_big_neg", - path: `$.x.integer()`, - json: map[string]any{"x": json.Number("-3147483648")}, - err: `exec: argument "-3147483648" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "int_string", - path: `$.x.integer()`, - json: map[string]any{"x": "99"}, - exp: []any{int64(99)}, - }, - { - test: "int_string_big", - path: `$.x.integer()`, - json: map[string]any{"x": "9876543219"}, - err: `exec: argument "9876543219" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "int_string_big_neg", - path: `$.x.integer()`, - json: map[string]any{"x": "-3147483648"}, - err: `exec: argument "-3147483648" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "int_array", - path: `$.x.integer()`, - json: map[string]any{"x": []any{"99", int64(1024)}}, - exp: []any{int64(99), int64(1024)}, - }, - { - test: "int_array_strict", - path: `strict $.x.integer()`, - json: map[string]any{"x": []any{"99", int64(1024)}}, - err: "exec: jsonpath item method .integer() can only be applied to a string or numeric value", - }, - { - test: "int_obj", - path: `$.x.integer()`, - json: map[string]any{"x": map[string]any{"99": int64(1024)}}, - err: "exec: jsonpath item method .integer() can only be applied to a string or numeric value", - }, - { - test: "int_next", - path: "$.x.integer().abs()", - json: map[string]any{"x": int64(-42)}, - exp: []any{int64(42)}, - }, - { - test: "int_null", - path: "$.x.integer()", - json: map[string]any{"x": nil}, - err: "exec: jsonpath item method .integer() can only be applied to a string or numeric value", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteStringMethod(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "string_string", - path: `$.x.string()`, - json: map[string]any{"x": "hi"}, - exp: []any{"hi"}, - }, - { - test: "datetime_string", - path: `$.x.datetime().string()`, - json: map[string]any{"x": "2024-05-05"}, - exp: []any{"2024-05-05"}, - }, - { - test: "date_string", - path: `$.x.date().string()`, - json: map[string]any{"x": "2024-05-05"}, - exp: []any{"2024-05-05"}, - }, - { - test: "time_string", - path: `$.x.time().string()`, - json: map[string]any{"x": "12:34:56"}, - exp: []any{"12:34:56"}, - }, - { - test: "time_tz_string", - path: `$.x.time_tz().string()`, - json: map[string]any{"x": "12:34:56Z"}, - exp: []any{"12:34:56+00:00"}, - }, - { - test: "timestamp_string", - path: `$.x.timestamp().string()`, - json: map[string]any{"x": "2024-05-05 12:34:56"}, - exp: []any{"2024-05-05T12:34:56"}, - }, - { - test: "timestamp_tz_string", - path: `$.x.timestamp_tz().string()`, - json: map[string]any{"x": "2024-05-05 12:34:56Z"}, - exp: []any{pt(context.Background(), "2024-05-05 12:34:56Z").String()}, - }, - { - test: "json_number_string", - path: `$.x.string()`, - json: map[string]any{"x": json.Number("142")}, - exp: []any{"142"}, - }, - { - test: "int_string", - path: `$.x.string()`, - json: map[string]any{"x": int64(42)}, - exp: []any{"42"}, - }, - { - test: "float_string", - path: `$.x.string()`, - json: map[string]any{"x": float64(42.3)}, - exp: []any{"42.3"}, - }, - { - test: "true_string", - path: `$.x.string()`, - json: map[string]any{"x": true}, - exp: []any{"true"}, - }, - { - test: "false_string", - path: `$.x.string()`, - json: map[string]any{"x": false}, - exp: []any{"false"}, - }, - { - test: "null_string", - path: `$.x.string()`, - json: map[string]any{"x": nil}, - err: `exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value`, - }, - { - test: "array_string", - path: `$.x.string()`, - json: map[string]any{"x": []any{int64(42), true}}, - exp: []any{"42", "true"}, - }, - { - test: "obj_string", - path: `$.x.string()`, - json: map[string]any{"x": map[string]any{"hi": 42}}, - err: `exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} - -func TestExecuteBooleanMethod(t *testing.T) { - t.Parallel() - for _, tc := range []execTestCase{ - { - test: "bool_true", - path: "$.x.boolean()", - json: map[string]any{"x": true}, - exp: []any{true}, - }, - { - test: "bool_false", - path: "$.x.boolean()", - json: map[string]any{"x": false}, - exp: []any{false}, - }, - { - test: "bool_int_1", - path: "$.x.boolean()", - json: map[string]any{"x": int64(1)}, - exp: []any{true}, - }, - { - test: "bool_int_42", - path: "$.x.boolean()", - json: map[string]any{"x": int64(42)}, - exp: []any{true}, - }, - { - test: "bool_int_0", - path: "$.x.boolean()", - json: map[string]any{"x": int64(0)}, - exp: []any{false}, - }, - { - test: "bool_float", - path: "$.x.boolean()", - json: map[string]any{"x": float64(0.1)}, - err: `exec: argument "0.1" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "bool_json_number", - path: "$.x.boolean()", - json: map[string]any{"x": json.Number("-42")}, - exp: []any{true}, - }, - { - test: "bool_json_number_float", - path: "$.x.boolean()", - json: map[string]any{"x": json.Number("-42.1")}, - err: `exec: argument "-42.1" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "bool_string_t", - path: "$.x.boolean()", - json: map[string]any{"x": "t"}, - exp: []any{true}, - }, - { - test: "bool_string_T", - path: "$.x.boolean()", - json: map[string]any{"x": "T"}, - exp: []any{true}, - }, - { - test: "bool_string_true", - path: "$.x.boolean()", - json: map[string]any{"x": "true"}, - exp: []any{true}, - }, - { - test: "bool_string_TRUE", - path: "$.x.boolean()", - json: map[string]any{"x": "TRUE"}, - exp: []any{true}, - }, - { - test: "bool_string_TrUe", - path: "$.x.boolean()", - json: map[string]any{"x": "TrUe"}, - exp: []any{true}, - }, - { - test: "bool_string_trunk", - path: "$.x.boolean()", - json: map[string]any{"x": "trunk"}, - err: `exec: argument "trunk" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "bool_string_f", - path: "$.x.boolean()", - json: map[string]any{"x": "f"}, - exp: []any{false}, - }, - { - test: "bool_string_F", - path: "$.x.boolean()", - json: map[string]any{"x": "F"}, - exp: []any{false}, - }, - { - test: "bool_string_false", - path: "$.x.boolean()", - json: map[string]any{"x": "false"}, - exp: []any{false}, - }, - { - test: "bool_string_FALSE", - path: "$.x.boolean()", - json: map[string]any{"x": "FALSE"}, - exp: []any{false}, - }, - { - test: "bool_string_FaLsE", - path: "$.x.boolean()", - json: map[string]any{"x": "FaLsE"}, - exp: []any{false}, - }, - { - test: "bool_string_flunk", - path: "$.x.boolean()", - json: map[string]any{"x": "flunk"}, - err: `exec: argument "flunk" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "bool_string_y", - path: "$.x.boolean()", - json: map[string]any{"x": "y"}, - exp: []any{true}, - }, - { - test: "bool_string_Y", - path: "$.x.boolean()", - json: map[string]any{"x": "Y"}, - exp: []any{true}, - }, - { - test: "bool_string_yes", - path: "$.x.boolean()", - json: map[string]any{"x": "yes"}, - exp: []any{true}, - }, - { - test: "bool_string_YES", - path: "$.x.boolean()", - json: map[string]any{"x": "YES"}, - exp: []any{true}, - }, - { - test: "bool_string_YeS", - path: "$.x.boolean()", - json: map[string]any{"x": "YeS"}, - exp: []any{true}, - }, - { - test: "bool_string_yet", - path: "$.x.boolean()", - json: map[string]any{"x": "yet"}, - err: `exec: argument "yet" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "bool_string_n", - path: "$.x.boolean()", - json: map[string]any{"x": "n"}, - exp: []any{false}, - }, - { - test: "bool_string_N", - path: "$.x.boolean()", - json: map[string]any{"x": "N"}, - exp: []any{false}, - }, - { - test: "bool_string_no", - path: "$.x.boolean()", - json: map[string]any{"x": "no"}, - exp: []any{false}, - }, - { - test: "bool_string_NO", - path: "$.x.boolean()", - json: map[string]any{"x": "NO"}, - exp: []any{false}, - }, - { - test: "bool_string_nO", - path: "$.x.boolean()", - json: map[string]any{"x": "nO"}, - exp: []any{false}, - }, - { - test: "bool_string_not", - path: "$.x.boolean()", - json: map[string]any{"x": "not"}, - err: `exec: argument "not" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "bool_string_on", - path: "$.x.boolean()", - json: map[string]any{"x": "on"}, - exp: []any{true}, - }, - { - test: "bool_string_ON", - path: "$.x.boolean()", - json: map[string]any{"x": "ON"}, - exp: []any{true}, - }, - { - test: "bool_string_oN", - path: "$.x.boolean()", - json: map[string]any{"x": "oN"}, - exp: []any{true}, - }, - { - test: "bool_string_o", - path: "$.x.boolean()", - json: map[string]any{"x": "o"}, - err: `exec: argument "o" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "bool_string_off", - path: "$.x.boolean()", - json: map[string]any{"x": "off"}, - exp: []any{false}, - }, - { - test: "bool_string_OFF", - path: "$.x.boolean()", - json: map[string]any{"x": "OFF"}, - exp: []any{false}, - }, - { - test: "bool_string_OfF", - path: "$.x.boolean()", - json: map[string]any{"x": "OfF"}, - exp: []any{false}, - }, - { - test: "bool_string_oft", - path: "$.x.boolean()", - json: map[string]any{"x": "oft"}, - err: `exec: argument "oft" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "bool_string_1", - path: "$.x.boolean()", - json: map[string]any{"x": "1"}, - exp: []any{true}, - }, - { - test: "bool_string_1up", - path: "$.x.boolean()", - json: map[string]any{"x": "1up"}, - err: `exec: argument "1up" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "bool_string_0", - path: "$.x.boolean()", - json: map[string]any{"x": "0"}, - exp: []any{false}, - }, - { - test: "bool_string_0n", - path: "$.x.boolean()", - json: map[string]any{"x": "0n"}, - err: `exec: argument "0n" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "bool_array", - path: "$.x.boolean()", - json: map[string]any{"x": []any{"0", true}}, - exp: []any{false, true}, - }, - { - test: "bool_array_strict", - path: "strict $.x.boolean()", - json: map[string]any{"x": []any{"0", true}}, - err: `exec: jsonpath item method .boolean() can only be applied to a boolean, string, or numeric value`, - }, - { - test: "bool_obj", - path: "strict $.x.boolean()", - json: map[string]any{"x": map[string]any{"0": true}}, - err: `exec: jsonpath item method .boolean() can only be applied to a boolean, string, or numeric value`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - tc.run(t) - }) - } -} diff --git a/path/exec/execution.go b/path/exec/execution.go deleted file mode 100644 index d0b3c08..0000000 --- a/path/exec/execution.go +++ /dev/null @@ -1,167 +0,0 @@ -package exec - -import ( - "context" - "fmt" - - "github.com/theory/sqljson/path/ast" -) - -// query is the main entry point for all path executions. It executes node -// against value and appends results to vals if vals is not nil. Returns -// statusOK if values are found, statusNotFound if none are found, and -// statusFailed on error. When statusFailed is returned, an error will also be -// returned, except when query.verbose is false and the error is ErrVerbose. -func (exec *Executor) query(ctx context.Context, vals *valueList, node ast.Node, value any) (resultStatus, error) { - if exec.strictAbsenceOfErrors() && vals == nil { - // In strict mode we must get a complete list of values to check that - // there are no errors at all. - vals := newList() - res, err := exec.executeItem(ctx, node, value, vals) - if res.failed() { - return res, err - } - - if vals.isEmpty() { - return statusNotFound, nil - } - return statusOK, nil - } - - return exec.executeItem(ctx, node, value, vals) -} - -// executeItem executes jsonpath with automatic unwrapping of current item in -// lax mode. -func (exec *Executor) executeItem( - ctx context.Context, - node ast.Node, - value any, - found *valueList, -) (resultStatus, error) { - return exec.executeItemOptUnwrapTarget(ctx, node, value, found, exec.autoUnwrap()) -} - -// executeItemOptUnwrapResult is the same as executeItem(), but when unwrap is -// true, it automatically unwraps each array item from the resulting sequence -// in lax mode. The found parameter must not be nil. -func (exec *Executor) executeItemOptUnwrapResult( - ctx context.Context, - node ast.Node, - value any, - unwrap bool, - found *valueList, -) (resultStatus, error) { - if unwrap && exec.autoUnwrap() { - seq := newList() - res, err := exec.executeItem(ctx, node, value, seq) - if res.failed() { - return res, err - } - - for _, item := range seq.list { - switch item := item.(type) { - case []any: - _, _ = exec.executeItemUnwrapTargetArray(ctx, nil, item, found) - default: - found.append(item) - } - } - return statusOK, nil - } - return exec.executeItem(ctx, node, value, found) -} - -// executeItemOptUnwrapResultSilent is the same as executeItemOptUnwrapResult, -// but with error suppression. -func (exec *Executor) executeItemOptUnwrapResultSilent( - ctx context.Context, - node ast.Node, - value any, - unwrap bool, - found *valueList, -) (resultStatus, error) { - verbose := exec.verbose - exec.verbose = false - defer func(e *Executor, te bool) { e.verbose = te }(exec, verbose) - return exec.executeItemOptUnwrapResult(ctx, node, value, unwrap, found) -} - -// executeItemOptUnwrapTarget is the main executor function: walks on jsonpath -// structure, finds relevant parts of value and evaluates expressions over -// them. When unwrap is true, the current SQL/JSON item is unwrapped if it is -// an array. Before execution it checks ctx and base with statusNotFound if it -// is done. -func (exec *Executor) executeItemOptUnwrapTarget( - ctx context.Context, - node ast.Node, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - // Check for interrupts. - select { - case <-ctx.Done(): - return statusFailed, fmt.Errorf("%w: %w", ErrExecution, ctx.Err()) - default: - } - - switch node := node.(type) { - case *ast.ConstNode: - return exec.execConstNode(ctx, node, value, found, unwrap) - case *ast.StringNode: - return exec.execLiteral(ctx, node, node.Text(), found) - case *ast.IntegerNode: - return exec.execLiteral(ctx, node, node.Int(), found) - case *ast.NumericNode: - return exec.execLiteral(ctx, node, node.Float(), found) - case *ast.VariableNode: - return exec.execVariable(ctx, node, found) - case *ast.KeyNode: - return exec.execKeyNode(ctx, node, value, found, unwrap) - case *ast.BinaryNode: - return exec.execBinaryNode(ctx, node, value, found, unwrap) - case *ast.UnaryNode: - return exec.execUnaryNode(ctx, node, value, found, unwrap) - case *ast.RegexNode: - return exec.execRegexNode(ctx, node, value, found) - case *ast.MethodNode: - return exec.execMethodNode(ctx, node, value, found, unwrap) - case *ast.AnyNode: - return exec.execAnyNode(ctx, node, value, found) - case *ast.ArrayIndexNode: - return exec.execArrayIndex(ctx, node, value, found) - } - - return statusFailed, fmt.Errorf("%w: Unknown node type %T", ErrInvalid, node) -} - -// executeNextItem executes the next jsonpath item if it exists. Otherwise, if -// found is not nil it appends value to found. -func (exec *Executor) executeNextItem( - ctx context.Context, - cur, next ast.Node, - value any, - found *valueList, -) (resultStatus, error) { - var hasNext bool - switch { - case cur == nil: - hasNext = next != nil - case next != nil: - hasNext = cur.Next() != nil - default: - next = cur.Next() - hasNext = next != nil - } - - if hasNext { - return exec.executeItem(ctx, next, value, found) - } - - if found != nil { - found.append(value) - } - - return statusOK, nil -} diff --git a/path/exec/execution_test.go b/path/exec/execution_test.go deleted file mode 100644 index a5f747d..0000000 --- a/path/exec/execution_test.go +++ /dev/null @@ -1,587 +0,0 @@ -package exec - -import ( - "context" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/parser" - "github.com/theory/sqljson/path/types" -) - -func TestQuery(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - path string - value any - vars Vars - throw bool - useTZ bool - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "lax_root", - path: "$", - value: "hi", - exp: statusOK, - find: []any{"hi"}, - }, - { - test: "var_method", - path: "strict $x.string()", - value: "hi", - vars: Vars{"x": int64(42)}, - exp: statusOK, - find: []any{"42"}, - }, - { - test: "no_var", - path: "strict $x", - value: "hi", - exp: statusFailed, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "use_tz", - path: "$.time()", - value: "12:42:53+01", - useTZ: true, - exp: statusOK, - find: []any{types.NewTime(time.Date(0, 1, 1, 12, 42, 53, 0, time.UTC))}, - }, - { - test: "no_tz", - path: "$.time()", - value: "12:42:53+01", - useTZ: false, - exp: statusFailed, - err: `exec: cannot convert value from timetz to time without time zone usage.` + tzHint, - isErr: ErrExecution, - }, - { - test: "strict_root", - path: "strict $", - value: "hi", - exp: statusOK, - find: []any{"hi"}, - }, - { - test: "filtered_not_found", - path: "$ ?(@ == 1)", - value: "hi", - exp: statusNotFound, - find: []any{}, - }, - { - test: "strict filtered_not_found", - path: "strict $ ?(@ == 1)", - value: "hi", - exp: statusNotFound, - find: []any{}, - }, - { - test: "filtered_subset", - path: "$ ?(@ >= 2)", - value: []any{int64(1), int64(3), int64(4), int64(2), int64(0), int64(99)}, - exp: statusOK, - find: []any{int64(3), int64(4), int64(2), int64(99)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Set up executor. - path, err := parser.Parse(tc.path) - r.NoError(err) - e := newTestExecutor(path, tc.vars, tc.throw, tc.useTZ) - e.root = tc.value - e.current = tc.value - - // Start with list. - vals := newList() - res, err := e.query(ctx, vals, e.path.Root(), tc.value) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, vals.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(vals.list) - } - - // Try without list (exists). - res, err = e.query(ctx, nil, e.path.Root(), tc.value) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecuteItem(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - path string - value any - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "root", - path: "$", - value: true, - exp: statusOK, - find: []any{true}, - }, - { - test: "strict_root", - path: "strict $", - value: true, - exp: statusOK, - find: []any{true}, - }, - { - test: "unwrap", - path: "$.string()", - value: []any{int64(42), true}, - exp: statusOK, - find: []any{"42", "true"}, - }, - { - test: "strict_no_unwrap", - path: "strict $.string()", - value: []any{int64(42), true}, - exp: statusFailed, - err: `exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value`, - isErr: ErrVerbose, - }, - { - test: "filtered_subset", - path: "$ ?(@ >= 2)", - value: []any{int64(1), int64(3), int64(4), int64(2), int64(0), int64(99)}, - exp: statusOK, - find: []any{int64(3), int64(4), int64(2), int64(99)}, - }, - { - test: "filtered_not_found", - path: "$ ?(@ == 1)", - value: "hi", - exp: statusNotFound, - find: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Set up executor. - path, err := parser.Parse(tc.path) - r.NoError(err) - e := newTestExecutor(path, nil, true, false) - e.root = tc.value - e.current = tc.value - - // Start with list. - vals := newList() - res, err := e.executeItem(ctx, e.path.Root(), tc.value, vals) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, vals.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(vals.list) - } - - // Try without list (exists). - res, err = e.executeItem(ctx, e.path.Root(), tc.value, nil) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecuteItemOptUnwrapResult(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - path string - value any - unwrap bool - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "root", - path: "$", - value: true, - exp: statusOK, - find: []any{true}, - }, - { - test: "strict_root", - path: "strict $", - value: true, - exp: statusOK, - find: []any{true}, - }, - { - test: "unwrap", - path: "$.string()", - value: []any{int64(42), true}, - unwrap: true, - exp: statusOK, - find: []any{"42", "true"}, - }, - { - test: "unwrap_strict", - path: "strict $.string()", - value: []any{int64(42), true}, - unwrap: true, - exp: statusFailed, - err: `exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value`, - isErr: ErrVerbose, - }, - { - test: "unwrap_error", - path: "$.integer()", - value: []any{true}, - unwrap: true, - exp: statusFailed, - err: `exec: jsonpath item method .integer() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "no_unwrap_lax", - path: "$.string()", - value: []any{int64(42), true}, - exp: statusOK, - find: []any{"42", "true"}, - }, - { - test: "nested_unwrap", - path: "$", - value: []any{int64(42), []any{true, float64(98.6)}}, - unwrap: true, - exp: statusOK, - find: []any{int64(42), []any{true, float64(98.6)}}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Set up executor. - path, err := parser.Parse(tc.path) - r.NoError(err) - e := newTestExecutor(path, nil, true, false) - e.root = tc.value - e.current = tc.value - - // Execute. - vals := newList() - res, err := e.executeItemOptUnwrapResult(ctx, e.path.Root(), tc.value, tc.unwrap, vals) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, vals.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(vals.list) - } - - // Try silent. - vals = newList() - verbose := e.verbose - res, err = e.executeItemOptUnwrapResultSilent(ctx, e.path.Root(), tc.value, tc.unwrap, vals) - a.Equal(tc.exp, res) - a.Equal(verbose, e.verbose) - r.NoError(err) - }) - } -} - -func TestExecuteItemOptUnwrapTarget(t *testing.T) { - t.Parallel() - ctx := context.Background() - rx, _ := ast.NewRegex(ast.NewConst(ast.ConstRoot), "x", "") - type wrapNode struct{ ast.Node } - - for _, tc := range []struct { - test string - cancel bool - node ast.Node - value any - unwrap bool - vars Vars - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "cancel", - cancel: true, - node: ast.NewConst(ast.ConstRoot), - value: true, - exp: statusFailed, - err: "exec: context canceled", - isErr: ErrExecution, - }, - { - test: "const", - node: ast.NewConst(ast.ConstRoot), - value: true, - exp: statusOK, - find: []any{true}, - }, - { - test: "string", - node: ast.NewString("hi"), - exp: statusOK, - find: []any{"hi"}, - }, - { - test: "integer", - node: ast.NewInteger("42"), - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "numeric", - node: ast.NewNumeric("98.6"), - exp: statusOK, - find: []any{float64(98.6)}, - }, - { - test: "variable", - node: ast.NewVariable("x"), - vars: Vars{"x": "hi"}, - exp: statusOK, - find: []any{"hi"}, - }, - { - test: "key", - node: ast.NewKey("x"), - value: map[string]any{"x": "hi"}, - exp: statusOK, - find: []any{"hi"}, - }, - { - test: "binary", - node: ast.NewBinary(ast.BinaryAdd, ast.NewConst(ast.ConstRoot), ast.NewConst(ast.ConstRoot)), - value: int64(21), - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "unary", - node: ast.NewUnary(ast.UnaryMinus, ast.NewInteger("42")), - exp: statusOK, - find: []any{int64(-42)}, - }, - { - test: "regex", - node: rx, - value: "hex", - exp: statusOK, - find: []any{true}, - }, - { - test: "method", - node: ast.NewMethod(ast.MethodString), - value: true, - exp: statusOK, - find: []any{"true"}, - }, - { - test: "any", - node: ast.NewAny(0, -1), - value: map[string]any{"x": "y"}, - exp: statusOK, - find: []any{map[string]any{"x": "y"}, "y"}, - }, - { - test: "array_index", - node: ast.NewArrayIndex([]ast.Node{ - ast.NewBinary(ast.BinarySubscript, ast.NewInteger("1"), ast.NewInteger("2")), - }), - value: []any{"x", "y", "z"}, - exp: statusOK, - find: []any{"y", "z"}, - }, - { - test: "unknown_node", - node: wrapNode{}, - exp: statusFailed, - err: `exec invalid: Unknown node type exec.wrapNode`, - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Set up executor. - e := newTestExecutor(laxRootPath, tc.vars, true, false) - e.root = tc.value - e.current = tc.value - - // Execute. - vals := newList() - var ( - res resultStatus - err error - ) - if tc.cancel { - canceledCtx, cancel := context.WithCancel(ctx) - cancel() - res, err = e.executeItemOptUnwrapTarget(canceledCtx, tc.node, tc.value, vals, tc.unwrap) - r.ErrorIs(err, context.Canceled) - } else { - res, err = e.executeItemOptUnwrapTarget(ctx, tc.node, tc.value, vals, tc.unwrap) - } - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, vals.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(vals.list) - } - }) - } -} - -func TestExecuteNextItem(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - cur ast.Node - next ast.Node - value any - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "nil_nil", - value: true, - exp: statusOK, - find: []any{true}, - }, - { - test: "nil_next", - next: ast.NewMethod(ast.MethodString), - value: true, - exp: statusOK, - find: []any{"true"}, - }, - { - test: "current_next", - cur: ast.NewMethod(ast.MethodBoolean), - next: ast.NewMethod(ast.MethodString), - value: "t", - exp: statusOK, - find: []any{"t"}, - }, - { - test: "current_next_nil", - next: ast.NewConst(ast.ConstRoot), - value: true, - exp: statusOK, - find: []any{true}, - }, - { - test: "current_next_method", - next: ast.LinkNodes([]ast.Node{ast.NewConst(ast.ConstRoot), ast.NewMethod(ast.MethodString)}), - value: true, - exp: statusOK, - find: []any{"true"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Set up executor. - e := newTestExecutor(laxRootPath, nil, true, false) - e.root = tc.value - e.current = tc.value - - // Execute. - vals := newList() - res, err := e.executeNextItem(ctx, tc.cur, tc.next, tc.value, vals) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, vals.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(vals.list) - } - - // Try without found list. - res, err = e.executeNextItem(ctx, tc.cur, tc.next, tc.value, nil) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} diff --git a/path/exec/keyvalue.go b/path/exec/keyvalue.go deleted file mode 100644 index e059281..0000000 --- a/path/exec/keyvalue.go +++ /dev/null @@ -1,134 +0,0 @@ -package exec - -import ( - "context" - "fmt" - "reflect" - "slices" - - "github.com/theory/sqljson/path/ast" - "golang.org/x/exp/maps" // Switch to maps when go 1.22 dropped -) - -// kvBaseObject represents the "base object" and its "id" for .keyvalue() -// evaluation. -type kvBaseObject struct { - addr uintptr - id int -} - -// addrOf returns the pointer address of obj when obj is a valid JSON -// container: one of map[string]any, []any, or Vars. Otherwise it returns 0. -// Used for .keyvalue() ID generation. -func addrOf(obj any) uintptr { - switch obj := obj.(type) { - case []any, map[string]any, Vars: - return reflect.ValueOf(obj).Pointer() - default: - return 0 - } -} - -// OffsetOf returns the offset of obj from bo. This is the difference between -// their pointer addresses. -func (bo kvBaseObject) OffsetOf(obj any) int64 { - addr := addrOf(obj) - if addr > bo.addr { - return int64(addr - bo.addr) - } - return int64(bo.addr - addr) -} - -// setTempBaseObject sets obj as exec.baseObject and returns a function that -// will reset it to the previous value. -func (exec *Executor) setTempBaseObject(obj any, id int) func() { - bo := exec.baseObject - exec.baseObject.addr = addrOf(obj) - exec.baseObject.id = id - return func() { exec.baseObject = bo } -} - -// executeKeyValueMethod implements the .keyvalue() method. -// -// .keyvalue() method returns a sequence of object's key-value pairs in the -// following format: '{ "key": key, "value": value, "id": id }'. -// -// "id" field is an object identifier which is constructed from the two parts: -// base object id and its binary offset from the base object: -// id = exec.baseObject.id * 10000000000 + exec.baseObject.OffsetOf(object). -// -// 10000000000 (10^10) -- is the first round decimal number greater than 2^32 -// (maximal offset in jsonb). The decimal multiplier is used here to improve -// the readability of identifiers. -// -// exec.baseObject is usually the root object of the path (context item '$') -// or path variable '$var' (literals can't produce objects for now). Objects -// generated by keyvalue() itself, they become base object for the subsequent -// .keyvalue(). -// -// - ID of '$' is 0. -// - ID of '$var' is 10000000000. -// - IDs for objects generated by .keyvalue() are assigned using global counter -// exec.lastGeneratedObjectId: 20000000000, 30000000000, 40000000000, etc. -func (exec *Executor) executeKeyValueMethod( - ctx context.Context, - node ast.Node, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - var obj map[string]any - switch val := value.(type) { - case []any: - if unwrap { - return exec.executeItemUnwrapTargetArray(ctx, node, value, found) - } - return exec.returnVerboseError(fmt.Errorf( - `%w: jsonpath item method .keyvalue() can only be applied to an object`, - ErrVerbose, - )) - case map[string]any: - obj = val - default: - return exec.returnVerboseError(fmt.Errorf( - `%w: jsonpath item method .keyvalue() can only be applied to an object`, - ErrVerbose, - )) - } - - if len(obj) == 0 { - // no key-value pairs - return statusNotFound, nil - } - - next := node.Next() - if next == nil && found == nil { - return statusOK, nil - } - - id := exec.baseObject.OffsetOf(obj) - const tenTen = 10000000000 // 10^10 - id += int64(exec.baseObject.id) * tenTen - - // Process the keys in a deterministic order for consistent ID assignment. - keys := maps.Keys(obj) - slices.Sort(keys) - - var res resultStatus - for _, k := range keys { - obj := map[string]any{"key": k, "value": obj[k], "id": id} - exec.lastGeneratedObjectID++ - defer exec.setTempBaseObject(obj, exec.lastGeneratedObjectID)() - - var err error - res, err = exec.executeNextItem(ctx, node, next, obj, found) - if res == statusFailed { - return res, err - } - - if res == statusOK && found == nil { - break - } - } - return res, nil -} diff --git a/path/exec/keyvalue_test.go b/path/exec/keyvalue_test.go deleted file mode 100644 index f1e15bf..0000000 --- a/path/exec/keyvalue_test.go +++ /dev/null @@ -1,301 +0,0 @@ -package exec - -import ( - "context" - "reflect" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/parser" - "github.com/theory/sqljson/path/types" -) - -func TestAddrOf(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - value any - noID bool - }{ - { - test: "map", - value: map[string]any{"hi": 1}, - }, - { - test: "slice", - value: []any{1, 2}, - }, - { - test: "vars", - value: Vars{"x": true}, - }, - { - test: "int", - value: int64(42), - noID: true, - }, - { - test: "bool", - value: true, - noID: true, - }, - { - test: "nil", - noID: true, - }, - { - test: "datetime", - value: types.NewDate(time.Now()), - noID: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - ptr := addrOf(tc.value) - if tc.noID { - a.Zero(ptr) - } else { - a.Equal(ptr, reflect.ValueOf(tc.value).Pointer()) - } - }) - } -} - -// deltaBetween determines the memory distance between collection and one of -// the items it contains. Used to determine keyvalue IDs at runtime because -// the memory distance can vary at runtime, but should be consistent between -// the same two literal values. -func deltaBetween(collection, item any) int64 { - delta := int64(reflect.ValueOf(item).Pointer() - reflect.ValueOf(collection).Pointer()) - if delta < 0 { - return -delta - } - return delta -} - -func TestKVBaseObject(t *testing.T) { - t.Parallel() - ctx := context.Background() - const tenTen = int64(10000000000) // 10^10 - - // The offset of an array inside a map can very by execution, so calculate - // it at runtime. - mapArray := map[string]any{"x": []any{1, 4}} - mapArrayOff := deltaBetween(mapArray, mapArray["x"]) - - for _, tc := range []struct { - test string - base any - path string - exp int64 - }{ - { - test: "sub-map", - base: map[string]any{"x": map[string]any{"y": 1}}, - path: "$.x", - }, - { - test: "sub-sub-map", - base: map[string]any{"x": map[string]any{"y": map[string]any{"z": 1}}}, - path: "$.x.y", - }, - { - test: "sub-array", - base: mapArray, - path: "$.x", - exp: mapArrayOff, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Use path to fetch the object from base - path, err := parser.Parse(tc.path) - r.NoError(err) - obj, err := First(ctx, path, tc.base) - r.NoError(err) - - kvBase := kvBaseObject{addr: addrOf(tc.base), id: 0} - off := kvBase.OffsetOf(obj) - - if tc.exp > 0 { - // We pre-calculated the id. - a.Equal(tc.exp, off) - } else { - // The ID can vary at runtime (48, 96, 144, 480 are common, but so - // are much larger numbers), so just make sure it's greater than 0 - // and less than 10000000000. - a.Positive(off) - a.Less(off, tenTen) - } - }) - } -} - -func TestSetTempBaseObject(t *testing.T) { - t.Parallel() - a := assert.New(t) - - // Set up a base object. - e := &Executor{baseObject: kvBaseObject{addr: uintptr(90210), id: 4}} - - // Replace it. - obj := map[string]any{"x": 1} - done := e.setTempBaseObject(obj, 2) - a.Equal(reflect.ValueOf(obj).Pointer(), e.baseObject.addr) - a.Equal(2, e.baseObject.id) - - // Restore the original. - done() - a.Equal(uintptr(90210), e.baseObject.addr) - a.Equal(4, e.baseObject.id) -} - -func TestExecuteKeyValueMethod(t *testing.T) { - t.Parallel() - // ID can vary at runtime, so figure out the value at runtime. - vars := Vars{"foo": map[string]any{"x": true, "y": 1}} - fooID := 10000000000 + deltaBetween(vars, vars["foo"]) - - for _, tc := range []execTestCase{ - { - test: "kv_single", - path: "$.keyvalue()", - json: map[string]any{"x": true}, - exp: []any{map[string]any{"key": "x", "value": true, "id": int64(0)}}, - }, - { - test: "kv_double", - path: "$.keyvalue()", - json: map[string]any{"x": true, "y": "hi"}, - exp: []any{ - map[string]any{"key": "x", "value": true, "id": int64(0)}, - map[string]any{"key": "y", "value": "hi", "id": int64(0)}, - }, - rand: true, // Results can be in any order - }, - { - test: "kv_sequence", - path: "$.keyvalue().keyvalue()", - json: map[string]any{"x": true, "y": "hi"}, - exp: []any{ - map[string]any{"id": int64(20000000000), "key": "key", "value": "x"}, - map[string]any{"id": int64(20000000000), "key": "value", "value": true}, - map[string]any{"id": int64(20000000000), "key": "id", "value": int64(0)}, - map[string]any{"id": int64(60000000000), "key": "id", "value": int64(0)}, - map[string]any{"id": int64(60000000000), "key": "key", "value": "y"}, - map[string]any{"id": int64(60000000000), "key": "value", "value": "hi"}, - }, - rand: true, // Results can be in any order - }, - { - test: "kv_nested", - path: "$.keyvalue()", - json: map[string]any{"foo": map[string]any{"x": true, "y": "hi"}}, - exp: []any{ - map[string]any{"id": int64(0), "key": "foo", "value": map[string]any{"x": true, "y": "hi"}}, - }, - rand: true, // Results can be in any order - }, - { - test: "kv_nested_sequence", - path: "$.keyvalue().keyvalue()", - json: map[string]any{"foo": map[string]any{"x": true, "y": "hi"}}, - exp: []any{ - map[string]any{"id": int64((20000000000)), "key": "id", "value": int64(0)}, - map[string]any{"id": int64(20000000000), "key": "key", "value": "foo"}, - map[string]any{"id": int64(20000000000), "key": "value", "value": map[string]any{"x": true, "y": "hi"}}, - }, - rand: true, // Results can be in any order - }, - { - test: "kv_multi_nested_sequence", - path: "$.keyvalue().keyvalue()", - json: map[string]any{"foo": map[string]any{"x": true, "y": "hi"}, "bar": 2, "baz": 1}, - exp: []any{ - map[string]any{"id": int64(20000000000), "key": "id", "value": int64(0)}, - map[string]any{"id": int64(20000000000), "key": "key", "value": "bar"}, - map[string]any{"id": int64(20000000000), "key": "value", "value": 2}, - map[string]any{"id": int64(60000000000), "key": "id", "value": int64(0)}, - map[string]any{"id": int64(60000000000), "key": "key", "value": "baz"}, - map[string]any{"id": int64(60000000000), "key": "value", "value": 1}, - map[string]any{"id": int64(100000000000), "key": "id", "value": int64(0)}, - map[string]any{"id": int64(100000000000), "key": "key", "value": "foo"}, - map[string]any{"id": int64(100000000000), "key": "value", "value": map[string]any{"x": true, "y": "hi"}}, - }, - rand: true, // Results can be in any order - }, - { - test: "kv_variable", - path: "$foo.keyvalue()", - vars: vars, - json: `""`, - exp: []any{ - map[string]any{"key": "x", "value": true, "id": fooID}, - map[string]any{"key": "y", "value": 1, "id": fooID}, - }, - rand: true, // Results can be in any order - }, - { - test: "kv_empty", - path: "$.keyvalue()", - json: map[string]any{}, - exp: []any{}, - }, - { - test: "kv_null", - path: "$.keyvalue()", - json: nil, - err: "exec: jsonpath item method .keyvalue() can only be applied to an object", - exp: []any{}, - }, - { - test: "array_no_unwrap", - path: "strict $.keyvalue()", - json: []any{map[string]any{"x": true}}, - err: "exec: jsonpath item method .keyvalue() can only be applied to an object", - exp: []any{}, - }, - { - test: "next_error", - path: "$.keyvalue().string()", - json: map[string]any{"x": []any{}}, - err: "exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value", - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - tc.run(t) - }) - } -} - -func TestExecuteKeyValueMethodUnwrap(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - ctx := context.Background() - - // Offset of object in a slice is non-determinate, so calculate it at runtime. - value := []any{map[string]any{"x": true, "y": "hi"}} - offset := deltaBetween(value, value[0]) - - // Run the query; lax mode will unwrap value to execute method on its items. - path, err := parser.Parse("$.keyvalue()") - r.NoError(err) - found, err := Query(ctx, path, value) - r.NoError(err) - a.Equal([]any{ - map[string]any{"id": offset, "key": "x", "value": true}, - map[string]any{"id": offset, "key": "y", "value": "hi"}, - }, found) -} diff --git a/path/exec/literal.go b/path/exec/literal.go deleted file mode 100644 index 9814f20..0000000 --- a/path/exec/literal.go +++ /dev/null @@ -1,88 +0,0 @@ -package exec - -import ( - "context" - "fmt" - - "github.com/theory/sqljson/path/ast" -) - -// execLiteral handles the execution of a literal string, integer, or float -// value. -func (exec *Executor) execLiteral( - ctx context.Context, - node ast.Node, - value any, - found *valueList, -) (resultStatus, error) { - next := node.Next() - if next == nil && found == nil { - return statusOK, nil - } - return exec.executeNextItem(ctx, node, next, value, found) -} - -// execVariable handles the execution of a node, returning an error if the -// variable is not found. -func (exec *Executor) execVariable( - ctx context.Context, - node *ast.VariableNode, - found *valueList, -) (resultStatus, error) { - if val, ok := exec.vars[node.Text()]; ok { - // keyvalue ID 1 reserved for variables. - defer exec.setTempBaseObject(exec.vars, 1)() - return exec.executeNextItem(ctx, node, node.Next(), val, found) - } - - // Return error for missing variable. - return statusFailed, fmt.Errorf( - "%w: could not find jsonpath variable %q", - ErrExecution, node.Text(), - ) -} - -// execKeyNode executes node against value, which is expected to be of type -// map[string]any. If its type is []any and unwrap is true, it passes it to -// [executeAnyItem]. Otherwise, it returns statusFailed and an error if -// exec.ignoreStructuralErrors is false and statusNotFound and no error if -// it's true. -func (exec *Executor) execKeyNode( - ctx context.Context, - node *ast.KeyNode, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - key := node.Text() - switch value := value.(type) { - case map[string]any: - val, ok := value[key] - if ok { - return exec.executeNextItem(ctx, node, nil, val, found) - } - - if !exec.ignoreStructuralErrors { - if !exec.verbose { - return statusFailed, nil - } - - return statusFailed, fmt.Errorf( - `%w: JSON object does not contain key "%s"`, - ErrVerbose, key, - ) - } - case []any: - if unwrap { - return exec.executeAnyItem(ctx, node, value, found, 1, 1, 1, false, false) - } - } - if !exec.ignoreStructuralErrors { - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath member accessor can only be applied to an object", - ErrVerbose, - )) - } - - return statusNotFound, nil -} diff --git a/path/exec/literal_test.go b/path/exec/literal_test.go deleted file mode 100644 index 51cb84d..0000000 --- a/path/exec/literal_test.go +++ /dev/null @@ -1,311 +0,0 @@ -package exec - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/parser" -) - -func TestExecLiteral(t *testing.T) { - t.Parallel() - path, _ := parser.Parse("$") - ctx := context.Background() - - for _, tc := range []struct { - test string - node ast.Node - value any - exp resultStatus - err string - isErr error - }{ - { - test: "string", - node: ast.NewString("hi"), - value: "hi", - exp: statusOK, - }, - { - test: "integer", - node: ast.NewInteger("42"), - value: int64(42), - exp: statusOK, - }, - { - test: "float", - node: ast.NewNumeric("98.6"), - value: float64(98.6), - exp: statusOK, - }, - { - test: "error", - node: ast.LinkNodes([]ast.Node{ast.NewString("hi"), ast.NewMethod(ast.MethodInteger)}), - err: "exec: jsonpath item method .integer() can only be applied to a string or numeric value", - isErr: ErrExecution, - exp: statusFailed, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - e := newTestExecutor(path, nil, true, false) - list := newList() - res, err := e.execLiteral(ctx, tc.node, tc.value, list) - a.Equal(tc.exp, res) - - if tc.isErr == nil { - r.NoError(err) - a.Equal([]any{tc.value}, list.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(list.list) - } - - // Test with nil found. - res, err = e.execLiteral(ctx, tc.node, tc.value, nil) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecVariable(t *testing.T) { - t.Parallel() - path, _ := parser.Parse("$") - ctx := context.Background() - - // Offset of object in a slice is non-determinate, so calculate it at runtime. - vars := Vars{"x": map[string]any{"y": "hi"}} - xID := 10000000000 + deltaBetween(vars, vars["x"]) - - for _, tc := range []struct { - test string - vars Vars - node ast.Node - exp resultStatus - find any - err string - isErr error - }{ - { - test: "var_exists", - vars: Vars{"x": "hi"}, - node: ast.NewVariable("x"), - exp: statusOK, - find: "hi", - }, - { - test: "var_not_exists", - vars: Vars{"x": "hi"}, - node: ast.NewVariable("y"), - err: `exec: could not find jsonpath variable "y"`, - isErr: ErrExecution, - exp: statusFailed, - }, - { - test: "var_exists_next", - vars: Vars{"x": int64(42)}, - node: ast.LinkNodes([]ast.Node{ast.NewVariable("x"), ast.NewMethod(ast.MethodString)}), - exp: statusOK, - find: "42", - }, - { - test: "var_exists_next_keyvalue", - vars: vars, - node: ast.LinkNodes([]ast.Node{ast.NewVariable("x"), ast.NewMethod(ast.MethodKeyValue)}), - exp: statusOK, - find: map[string]any{"id": xID, "key": "y", "value": "hi"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Make sure we have a variable node. - node, ok := tc.node.(*ast.VariableNode) - r.True(ok) - - // Set up an executor. - e := newTestExecutor(path, nil, true, false) - e.vars = tc.vars - - // Test execVariable with a list. - list := newList() - res, err := e.execVariable(ctx, node, list) - a.Equal(tc.exp, res) - // Root ID 0 should be restored. - a.Equal(0, e.baseObject.id) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal([]any{tc.find}, list.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(list.list) - } - - // Test with nil found. - res, err = e.execVariable(ctx, node, nil) - a.Equal(tc.exp, res) - // Root ID 0 should be restored. - a.Equal(0, e.baseObject.id) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecKeyNode(t *testing.T) { - t.Parallel() - ctx := context.Background() - lax, _ := parser.Parse("$") - strict, _ := parser.Parse("strict $") - - for _, tc := range []struct { - test string - path *ast.AST - node ast.Node - value any - unwrap bool - silent bool - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "find_key_string", - path: lax, - node: ast.NewKey("x"), - value: map[string]any{"x": "hi"}, - exp: statusOK, - find: []any{"hi"}, - }, - { - test: "find_key_array", - path: lax, - node: ast.NewKey("y"), - value: map[string]any{"y": []any{"go"}}, - exp: statusOK, - find: []any{[]any{"go"}}, - }, - { - test: "find_key_obj", - path: lax, - node: ast.NewKey("z"), - value: map[string]any{"z": map[string]any{"a": "go"}}, - exp: statusOK, - find: []any{map[string]any{"a": "go"}}, - }, - { - test: "no_such_key_lax", - path: lax, - node: ast.NewKey("y"), - value: map[string]any{"x": "hi"}, - exp: statusNotFound, - find: []any{}, - }, - { - test: "no_such_key_strict", - path: strict, - node: ast.NewKey("y"), - value: map[string]any{"x": "hi"}, - exp: statusFailed, - err: `exec: JSON object does not contain key "y"`, - isErr: ErrVerbose, - }, - { - test: "no_such_key_strict_silent", - path: strict, - node: ast.NewKey("y"), - silent: true, - value: map[string]any{"x": "hi"}, - exp: statusFailed, - find: []any{}, - }, - { - test: "not_an_object_lax", - path: lax, - node: ast.NewKey("y"), - value: []any{"hi"}, - exp: statusNotFound, - find: []any{}, - }, - { - test: "not_an_object_strict", - path: strict, - node: ast.NewKey("y"), - value: []any{"hi"}, - exp: statusFailed, - err: `exec: jsonpath member accessor can only be applied to an object`, - isErr: ErrVerbose, - }, - { - test: "unwrap_array", - path: lax, - node: ast.NewKey("y"), - value: []any{map[string]any{"y": "arg"}}, - unwrap: true, - exp: statusOK, - find: []any{"arg"}, - }, - { - test: "find_key_with_next", - path: lax, - node: ast.LinkNodes([]ast.Node{ast.NewKey("x"), ast.NewKey("y")}), - value: map[string]any{"x": map[string]any{"y": "hi"}}, - exp: statusOK, - find: []any{"hi"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Make sure we have a key node. - node, ok := tc.node.(*ast.KeyNode) - r.True(ok) - - // Set up an executor. - e := newTestExecutor(tc.path, nil, true, false) - e.verbose = !tc.silent - - // Test execKeyNode with a list. - list := newList() - res, err := e.execKeyNode(ctx, node, tc.value, list, tc.unwrap) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, list.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(list.list) - } - }) - } -} diff --git a/path/exec/math.go b/path/exec/math.go deleted file mode 100644 index 665a4f8..0000000 --- a/path/exec/math.go +++ /dev/null @@ -1,234 +0,0 @@ -package exec - -import ( - "context" - "encoding/json" - "fmt" - "math" - - "github.com/theory/sqljson/path/ast" -) - -// executeIntegerMath compares lhs to rhs using op and returns the resulting -// value. op must be a binary math operator. Returns an error for an attempt -// to divide by zero. -func executeIntegerMath(lhs, rhs int64, op ast.BinaryOperator) (int64, error) { - switch op { - case ast.BinaryAdd: - return lhs + rhs, nil - case ast.BinarySub: - return lhs - rhs, nil - case ast.BinaryMul: - return lhs * rhs, nil - case ast.BinaryDiv: - if rhs == 0 { - return 0, fmt.Errorf("%w: division by zero", ErrVerbose) - } - return lhs / rhs, nil - case ast.BinaryMod: - if rhs == 0 { - return 0, fmt.Errorf("%w: division by zero", ErrVerbose) - } - return lhs % rhs, nil - default: - // We process only the binary math operators here. - return 0, fmt.Errorf("%w: %v is not a binary math operator", ErrInvalid, op) - } -} - -// executeIntegerMath compares lhs to rhs using op and returns the resulting -// value. op must be a binary math operator. Returns an error for an attempt -// to divide by zero. -func executeFloatMath(lhs, rhs float64, op ast.BinaryOperator) (float64, error) { - switch op { - case ast.BinaryAdd: - return lhs + rhs, nil - case ast.BinarySub: - return lhs - rhs, nil - case ast.BinaryMul: - return lhs * rhs, nil - case ast.BinaryDiv: - if rhs == 0 { - return 0, fmt.Errorf("%w: division by zero", ErrVerbose) - } - return lhs / rhs, nil - case ast.BinaryMod: - if rhs == 0 { - return 0, fmt.Errorf("%w: division by zero", ErrVerbose) - } - return math.Mod(lhs, rhs), nil - default: - // We process only the binary math operators here. - return 0, fmt.Errorf("%w: %v is not a binary math operator", ErrInvalid, op) - } -} - -// mathOperandErr creates an error for an invalid operand to op. pos is the -// position of the operand, either "left" or "right". -func mathOperandErr(op ast.BinaryOperator, pos string) error { - return fmt.Errorf( - "%w: %v operand of jsonpath operator %v is not a single numeric value", - ErrVerbose, pos, op, - ) -} - -// execUnaryMathExpr executes a unary arithmetic expression for each numeric -// item in its operand's sequence. An array operand is automatically unwrapped -// in lax mode. intCallback and floatCallback are responsible for executing -// the unary math operation. -func (exec *Executor) execUnaryMathExpr( - ctx context.Context, - node *ast.UnaryNode, - value any, - intCallback intCallback, - floatCallback floatCallback, - found *valueList, -) (resultStatus, error) { - seq := newList() - res, err := exec.executeItemOptUnwrapResult(ctx, node.Operand(), value, true, seq) - if res == statusFailed { - return res, err - } - - res = statusNotFound - next := node.Next() - var val any - - for _, v := range seq.list { - val = v - ok := true - switch v := v.(type) { - case int64: - if found == nil && next == nil { - return statusOK, nil - } - val = intCallback(v) - case float64: - if found == nil && next == nil { - return statusOK, nil - } - val = floatCallback(v) - case json.Number: - if found == nil && next == nil { - return statusOK, nil - } - val, ok = castJSONNumber(v, intCallback, floatCallback) - default: - ok = found == nil && next == nil - } - - if !ok { - return exec.returnVerboseError(fmt.Errorf( - "%w: operand of unary jsonpath operator %v is not a numeric value", - ErrVerbose, node.Operator(), - )) - } - - nextRes, err := exec.executeNextItem(ctx, node, next, val, found) - if nextRes.failed() { - return nextRes, err - } - if nextRes == statusOK { - if found == nil { - return statusOK, nil - } - res = nextRes - } - } - - return res, nil -} - -// execBinaryMathExpr executes a binary arithmetic expression on singleton -// numeric operands. Array operands are automatically unwrapped in lax mode. -func (exec *Executor) execBinaryMathExpr( - ctx context.Context, - node *ast.BinaryNode, - value any, - found *valueList, -) (resultStatus, error) { - // Get the left node. - // XXX: The standard says only operands of multiplicative expressions are - // unwrapped. We extend it to other binary arithmetic expressions too. - lSeq := newList() - res, err := exec.executeItemOptUnwrapResult(ctx, node.Left(), value, true, lSeq) - if res == statusFailed { - return res, err - } - - op := node.Operator() - if len(lSeq.list) != 1 { - return exec.returnVerboseError(mathOperandErr(op, "left")) - } - - rSeq := newList() - res, err = exec.executeItemOptUnwrapResult(ctx, node.Right(), value, true, rSeq) - if res == statusFailed { - return res, err - } - - if len(rSeq.list) != 1 { - return exec.returnVerboseError(mathOperandErr(op, "right")) - } - - val, err := execMathOp(lSeq.list[0], rSeq.list[0], op) - if err != nil { - return exec.returnVerboseError(err) - } - - next := node.Next() - if next == nil && found == nil { - return statusOK, nil - } - - return exec.executeNextItem(ctx, node, next, val, found) -} - -// execMathOp casts left and right into numbers and, if it succeeds, applies -// the binary math op to left and right. left and right must be an int64, a -// float64, or a [json.Number]. In the latter case, execMathOp tries to cast -// values to int64, and falls back on float64. -func execMathOp(left, right any, op ast.BinaryOperator) (any, error) { - switch left := left.(type) { - case int64: - switch right := right.(type) { - case int64: - return executeIntegerMath(left, right, op) - case float64: - return executeFloatMath(float64(left), right, op) - case json.Number: - if right, err := right.Int64(); err == nil { - return executeIntegerMath(left, right, op) - } - if right, err := right.Float64(); err == nil { - return executeFloatMath(float64(left), right, op) - } - return nil, mathOperandErr(op, "right") - default: - return nil, mathOperandErr(op, "right") - } - case float64: - switch right := right.(type) { - case float64: - return executeFloatMath(left, right, op) - case int64: - return executeFloatMath(left, float64(right), op) - case json.Number: - if right, err := right.Float64(); err == nil { - return executeFloatMath(left, right, op) - } - return nil, mathOperandErr(op, "right") - default: - return nil, mathOperandErr(op, "right") - } - case json.Number: - if left, err := left.Int64(); err == nil { - return execMathOp(left, right, op) - } - if left, err := left.Float64(); err == nil { - return execMathOp(left, right, op) - } - } - - return nil, mathOperandErr(op, "left") -} diff --git a/path/exec/math_test.go b/path/exec/math_test.go deleted file mode 100644 index 00bfe5c..0000000 --- a/path/exec/math_test.go +++ /dev/null @@ -1,644 +0,0 @@ -package exec - -import ( - "context" - "encoding/json" - "fmt" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/parser" -) - -func TestExecuteIntegerMath(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - left int64 - right int64 - op ast.BinaryOperator - exp int64 - err string - isErr error - }{ - { - test: "add", - left: 20, - right: 22, - op: ast.BinaryAdd, - exp: 42, - }, - { - test: "sub", - left: 20, - right: 22, - op: ast.BinarySub, - exp: -2, - }, - { - test: "mul", - left: 21, - right: 2, - op: ast.BinaryMul, - exp: 42, - }, - { - test: "div", - left: 42, - right: 2, - op: ast.BinaryDiv, - exp: 21, - }, - { - test: "div_zero", - left: 42, - right: 0, - op: ast.BinaryDiv, - err: "exec: division by zero", - isErr: ErrVerbose, - }, - { - test: "mod", - left: 42, - right: 4, - op: ast.BinaryMod, - exp: 2, - }, - { - test: "mod_zero", - left: 42, - right: 0, - op: ast.BinaryMod, - err: "exec: division by zero", - isErr: ErrVerbose, - }, - { - test: "not_math", - op: ast.BinaryAnd, - err: "exec invalid: && is not a binary math operator", - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - res, err := executeIntegerMath(tc.left, tc.right, tc.op) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecuteFloatMath(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - left float64 - right float64 - op ast.BinaryOperator - exp float64 - err string - isErr error - }{ - { - test: "add", - left: 98.6, - right: 0.5, - op: ast.BinaryAdd, - exp: 99.1, - }, - { - test: "sub", - left: 14.8, - right: 1.4, - op: ast.BinarySub, - exp: 13.4, - }, - { - test: "mul", - left: 18, - right: 2.2, - op: ast.BinaryMul, - exp: 39.6, - }, - { - test: "div", - left: 12.4, - right: 4, - op: ast.BinaryDiv, - exp: 3.1, - }, - { - test: "div_zero", - left: 42, - right: 0.0, - op: ast.BinaryDiv, - err: "exec: division by zero", - isErr: ErrVerbose, - }, - { - test: "mod", - left: 42.0, - right: 4.0, - op: ast.BinaryMod, - exp: 2.0, - }, - { - test: "mod_zero", - left: 42, - right: 0.0, - op: ast.BinaryMod, - err: "exec: division by zero", - isErr: ErrVerbose, - }, - { - test: "not_math", - op: ast.BinaryAnd, - err: "exec invalid: && is not a binary math operator", - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - res, err := executeFloatMath(tc.left, tc.right, tc.op) - //nolint:testifylint - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestMathOperandErr(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - op ast.BinaryOperator - pos string - }{ - { - test: "add_left", - op: ast.BinaryAdd, - pos: "left", - }, - { - test: "sub_right", - op: ast.BinarySub, - pos: "right", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - r := require.New(t) - - err := mathOperandErr(tc.op, tc.pos) - r.EqualError(err, fmt.Sprintf( - "exec: %v operand of jsonpath operator %v is not a single numeric value", - tc.pos, tc.op, - )) - r.ErrorIs(err, ErrVerbose) - }) - } -} - -func TestExecUnaryMathExpr(t *testing.T) { - t.Parallel() - ctx := context.Background() - path, _ := parser.Parse("$") - icb := func(i int64) int64 { return i * 2 } - fcb := func(i float64) float64 { return i * 3 } - - for _, tc := range []struct { - test string - node ast.Node - value any - exp resultStatus - find []any - err string - isErr error - okNoList bool - }{ - { - test: "item_error", - node: ast.NewUnary(ast.UnaryPlus, ast.NewVariable("x")), - exp: statusFailed, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "int", - node: ast.NewUnary(ast.UnaryPlus, ast.NewConst(ast.ConstRoot)), - value: int64(-2), - exp: statusOK, - find: []any{int64(-4)}, - }, - { - test: "ints", - node: ast.NewUnary(ast.UnaryPlus, ast.NewConst(ast.ConstRoot)), - value: []any{int64(-2), int64(5)}, - exp: statusOK, - find: []any{int64(-4), int64(10)}, - }, - { - test: "float", - node: ast.NewUnary(ast.UnaryPlus, ast.NewConst(ast.ConstRoot)), - value: []any{float64(-2), float64(5)}, - exp: statusOK, - find: []any{float64(-6), float64(15)}, - }, - { - test: "json_int", - node: ast.NewUnary(ast.UnaryPlus, ast.NewConst(ast.ConstRoot)), - value: []any{json.Number("-2"), json.Number("5")}, - exp: statusOK, - find: []any{int64(-4), int64(10)}, - }, - { - test: "json_float", - node: ast.NewUnary(ast.UnaryPlus, ast.NewConst(ast.ConstRoot)), - value: []any{json.Number("-2.5"), json.Number("5.5")}, - exp: statusOK, - find: []any{float64(-7.5), float64(16.5)}, - }, - { - test: "json_bad", - node: ast.NewUnary(ast.UnaryPlus, ast.NewConst(ast.ConstRoot)), - value: []any{json.Number("lol")}, - exp: statusFailed, - err: `exec: operand of unary jsonpath operator + is not a numeric value`, - isErr: ErrVerbose, - okNoList: true, - }, - { - test: "nan", - node: ast.NewUnary(ast.UnaryMinus, ast.NewConst(ast.ConstRoot)), - value: []any{"foo"}, - exp: statusFailed, - err: `exec: operand of unary jsonpath operator - is not a numeric value`, - isErr: ErrVerbose, - okNoList: true, - }, - { - test: "next_item", - node: ast.LinkNodes([]ast.Node{ - ast.NewUnary(ast.UnaryPlus, ast.NewConst(ast.ConstRoot)), - ast.NewMethod(ast.MethodString), - }), - value: []any{int64(21)}, - exp: statusOK, - find: []any{"42"}, - }, - { - test: "next_item_error", - node: ast.LinkNodes([]ast.Node{ - ast.NewUnary(ast.UnaryPlus, ast.NewConst(ast.ConstRoot)), - ast.NewMethod(ast.MethodKeyValue), - }), - value: []any{int64(21)}, - exp: statusFailed, - err: `exec: jsonpath item method .keyvalue() can only be applied to an object`, - isErr: ErrVerbose, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Make sure we have a unary node. - node, ok := tc.node.(*ast.UnaryNode) - r.True(ok) - - // Set up an executor. - e := newTestExecutor(path, nil, true, false) - e.root = tc.value - - // Test execKeyNode with a list. - list := newList() - res, err := e.execUnaryMathExpr(ctx, node, tc.value, icb, fcb, list) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, list.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(list.list) - } - - // Try with nil found. - res, err = e.execUnaryMathExpr(ctx, node, tc.value, icb, fcb, nil) - if tc.okNoList { - a.Equal(statusOK, res) - r.NoError(err) - } else { - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - } - }) - } -} - -func TestExecBinaryMathExpr(t *testing.T) { - t.Parallel() - ctx := context.Background() - path, _ := parser.Parse("$") - - for _, tc := range []struct { - test string - node ast.Node - value any - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "invalid_left_value", - node: ast.NewBinary(ast.BinaryAdd, ast.NewVariable("x"), ast.NewInteger("2")), - exp: statusFailed, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "invalid_right_value", - node: ast.NewBinary(ast.BinaryAdd, ast.NewInteger("2"), ast.NewVariable("x")), - exp: statusFailed, - err: `exec: could not find jsonpath variable "x"`, - isErr: ErrExecution, - }, - { - test: "too_many_left", - node: ast.NewBinary(ast.BinaryAdd, ast.NewConst(ast.ConstRoot), ast.NewInteger("2")), - value: []any{int64(4), int64(4)}, - exp: statusFailed, - err: `exec: left operand of jsonpath operator + is not a single numeric value`, - isErr: ErrExecution, - }, - { - test: "too_many_right", - node: ast.NewBinary(ast.BinaryAdd, ast.NewInteger("2"), ast.NewConst(ast.ConstRoot)), - value: []any{int64(4), int64(4)}, - exp: statusFailed, - err: `exec: right operand of jsonpath operator + is not a single numeric value`, - isErr: ErrExecution, - }, - { - test: "add_int", - node: ast.NewBinary(ast.BinaryAdd, ast.NewConst(ast.ConstRoot), ast.NewInteger("2")), - value: int64(4), - exp: statusOK, - find: []any{int64(6)}, - }, - { - test: "mul_float", - node: ast.NewBinary(ast.BinaryMul, ast.NewConst(ast.ConstRoot), ast.NewInteger("2")), - value: float64(2.2), - exp: statusOK, - find: []any{float64(4.4)}, - }, - { - test: "invalid_operand", - node: ast.NewBinary(ast.BinaryAdd, ast.NewConst(ast.ConstRoot), ast.NewString("hi")), - value: int64(4), - exp: statusFailed, - err: `exec: right operand of jsonpath operator + is not a single numeric value`, - isErr: ErrExecution, - }, - { - test: "add_int_next", - node: ast.LinkNodes([]ast.Node{ - ast.NewBinary(ast.BinaryAdd, ast.NewConst(ast.ConstRoot), ast.NewInteger("2")), - ast.NewMethod(ast.MethodString), - }), - value: int64(4), - exp: statusOK, - find: []any{"6"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Make sure we have a binary node. - node, ok := tc.node.(*ast.BinaryNode) - r.True(ok) - - // Set up an executor. - e := newTestExecutor(path, nil, true, false) - e.root = tc.value - - // Test execKeyNode with a list. - list := newList() - res, err := e.execBinaryMathExpr(ctx, node, tc.value, list) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, list.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(list.list) - } - - // Try with nil found. - res, err = e.execBinaryMathExpr(ctx, node, tc.value, nil) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecMathOp(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - left any - right any - op ast.BinaryOperator - exp any - err string - isErr error - }{ - { - test: "int_int_add", - left: int64(2), - right: int64(5), - op: ast.BinaryAdd, - exp: int64(7), - }, - { - test: "int_float_sub", - left: int64(7), - right: float64(2), - op: ast.BinarySub, - exp: float64(5), - }, - { - test: "int_json_float_mul", - left: int64(2), - right: json.Number("5.2"), - op: ast.BinaryMul, - exp: float64(10.4), - }, - { - test: "int_json_int_div", - left: int64(10), - right: json.Number("5"), - op: ast.BinaryDiv, - exp: int64(2), - }, - { - test: "int_json_bad", - left: int64(10), - right: json.Number("hi"), - op: ast.BinaryDiv, - err: `exec: right operand of jsonpath operator / is not a single numeric value`, - isErr: ErrVerbose, - }, - { - test: "int_nan", - left: int64(10), - right: "hi", - op: ast.BinaryMod, - err: `exec: right operand of jsonpath operator % is not a single numeric value`, - isErr: ErrVerbose, - }, - { - test: "float_int_sub", - left: float64(7.2), - right: int64(2), - op: ast.BinarySub, - exp: float64(5.2), - }, - { - test: "float_float_add", - left: float64(7.2), - right: float64(1.6), - op: ast.BinaryAdd, - exp: float64(8.8), - }, - { - test: "float_json_int_sub", - left: float64(7.2), - right: json.Number("2"), - op: ast.BinarySub, - exp: float64(5.2), - }, - { - test: "float_json_float_add", - left: float64(7.2), - right: json.Number("1.6"), - op: ast.BinaryAdd, - exp: float64(8.8), - }, - { - test: "float_json_bad", - left: float64(10), - right: json.Number("hi"), - op: ast.BinaryMul, - err: `exec: right operand of jsonpath operator * is not a single numeric value`, - isErr: ErrVerbose, - }, - { - test: "float_nan", - left: float64(10), - right: "hi", - op: ast.BinaryMod, - err: `exec: right operand of jsonpath operator % is not a single numeric value`, - isErr: ErrVerbose, - }, - { - test: "json_int_int_add", - left: json.Number("2"), - right: int64(5), - op: ast.BinaryAdd, - exp: int64(7), - }, - { - test: "json_int_float_sub", - left: json.Number("10"), - right: float64(2.2), - op: ast.BinarySub, - exp: float64(7.8), - }, - { - test: "json_float_int_add", - left: json.Number("2.2"), - right: int64(5), - op: ast.BinaryAdd, - exp: float64(7.2), - }, - { - test: "json_float_float_sub", - left: json.Number("10.4"), - right: float64(2.2), - op: ast.BinarySub, - exp: float64(8.2), - }, - { - test: "json_bad", - left: json.Number("hi"), - op: ast.BinaryMul, - err: `exec: left operand of jsonpath operator * is not a single numeric value`, - isErr: ErrVerbose, - }, - { - test: "bad_left", - left: "hi", - op: ast.BinaryAdd, - err: `exec: left operand of jsonpath operator + is not a single numeric value`, - isErr: ErrVerbose, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - res, err := execMathOp(tc.left, tc.right, tc.op) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} diff --git a/path/exec/method.go b/path/exec/method.go deleted file mode 100644 index 14c4d3b..0000000 --- a/path/exec/method.go +++ /dev/null @@ -1,708 +0,0 @@ -package exec - -import ( - "context" - "encoding/json" - "fmt" - "math" - "strconv" - "strings" - - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/types" -) - -// execMethodNode dispatches the relevant method for node. -func (exec *Executor) execMethodNode( - ctx context.Context, - node *ast.MethodNode, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - switch name := node.Name(); name { - case ast.MethodNumber: - return exec.executeNumberMethod(ctx, node, value, found, unwrap, node) - case ast.MethodAbs: - return exec.executeNumericItemMethod( - ctx, node, value, unwrap, - intAbs, math.Abs, found, - ) - case ast.MethodFloor: - return exec.executeNumericItemMethod( - ctx, node, value, unwrap, - intSelf, math.Floor, found, - ) - case ast.MethodCeiling: - return exec.executeNumericItemMethod( - ctx, node, value, unwrap, - intSelf, math.Ceil, found, - ) - case ast.MethodType: - return exec.execMethodType(ctx, node, value, found) - case ast.MethodSize: - return exec.execMethodSize(ctx, node, value, found) - case ast.MethodDouble: - return exec.execMethodDouble(ctx, node, value, found, unwrap) - case ast.MethodInteger: - return exec.execMethodInteger(ctx, node, value, found, unwrap) - case ast.MethodBigInt: - return exec.execMethodBigInt(ctx, node, value, found, unwrap) - case ast.MethodString: - return exec.execMethodString(ctx, node, value, found, unwrap) - case ast.MethodBoolean: - return exec.execMethodBoolean(ctx, node, value, found, unwrap) - case ast.MethodKeyValue: - return exec.executeKeyValueMethod(ctx, node, value, found, unwrap) - default: - return statusFailed, fmt.Errorf( - "%w: unknown method %v", ErrInvalid, name, - ) - } -} - -// execMethodType handles the execution of .type() by determining the type of -// value and passing it to the next execution node. -func (exec *Executor) execMethodType( - ctx context.Context, - node *ast.MethodNode, - value any, - found *valueList, -) (resultStatus, error) { - var typeName string - switch value.(type) { - case map[string]any: - typeName = "object" - case []any: - typeName = "array" - case string: - typeName = "string" - case int64, float64, json.Number: - typeName = "number" - case bool: - typeName = "boolean" - case *types.Date: - typeName = "date" - case *types.Time: - typeName = "time without time zone" - case *types.TimeTZ: - typeName = "time with time zone" - case *types.Timestamp: - typeName = "timestamp without time zone" - case *types.TimestampTZ: - typeName = "timestamp with time zone" - case nil: - typeName = "null" - default: - return statusFailed, fmt.Errorf( - "%w: unsupported data type %T", ErrInvalid, value, - ) - } - - return exec.executeNextItem(ctx, node, nil, typeName, found) -} - -// execMethodSize handles the execution of .size() by determining the size of -// value and passing it to the next execution node. value's type should be -// []any, but it will be passed on if exec.autoWrap returns true and -// exec.ignoreStructuralErrors is true. -func (exec *Executor) execMethodSize( - ctx context.Context, - node *ast.MethodNode, - value any, - found *valueList, -) (resultStatus, error) { - size := 1 - switch value := value.(type) { - case []any: - size = len(value) - default: - if !exec.autoWrap() && !exec.ignoreStructuralErrors { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/jsonpath_exec.c#L1112 - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to an array", - ErrVerbose, node.Name(), - )) - } - } - return exec.executeNextItem(ctx, node, nil, int64(size), found) -} - -// execMethodDouble handles the execution of .double(). value must be a -// numeric value or a string that can be parsed into a float64, or an array -// ([]any) to which .double() will be applied to all of its values when unwrap -// is true. -func (exec *Executor) execMethodDouble( - ctx context.Context, - node *ast.MethodNode, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - var double float64 - name := node.Name() - - switch val := value.(type) { - case []any: - if unwrap { - return exec.executeItemUnwrapTargetArray(ctx, node, value, found) - } - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to a string or numeric value", - ErrVerbose, name, - )) - case int64: - double = float64(val) - case float64: - double = val - case json.Number: - var err error - double, err = val.Float64() - if err != nil { - return statusFailed, fmt.Errorf( - `%w: argument %q of jsonpath item method %v is invalid for type %v`, - ErrExecution, val, name, "double precision", - ) - } - case string: - var err error - double, err = strconv.ParseFloat(val, 64) - if err != nil { - return statusFailed, fmt.Errorf( - `%w: argument %q of jsonpath item method %v is invalid for type %v`, - ErrExecution, val, name, "double precision", - ) - } - default: - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to a string or numeric value", - ErrVerbose, name, - )) - } - - if math.IsInf(double, 0) || math.IsNaN(double) { - return exec.returnVerboseError(fmt.Errorf( - "%w: NaN or Infinity is not allowed for jsonpath item method %v", - ErrVerbose, name, - )) - } - - return exec.executeNextItem(ctx, node, nil, double, found) -} - -// execMethodInteger handles the execution of .integer(). value must be a -// numeric value or a string that can be parsed into an int32, or an array -// ([]any) to which .integer() will be applied to all of its values when -// unwrap is true. The value must be within the bounds of int32; returns a -// value of int64 since to allow its processing by other parts of the -// executor, which does not handle int32. -func (exec *Executor) execMethodInteger( - ctx context.Context, - node *ast.MethodNode, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - var ( - integer int64 - err error - ) - - switch val := value.(type) { - case []any: - if unwrap { - return exec.executeItemUnwrapTargetArray(ctx, node, value, found) - } - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to a string or numeric value", - ErrVerbose, node.Name(), - )) - case int64: - integer = val - case float64: - integer = int64(math.Round(val)) - case json.Number: - integer, err = val.Int64() - if err != nil { - var f float64 - f, err = val.Float64() - if err == nil { - integer = int64(math.Round(f)) - } - } - case string: - integer, err = strconv.ParseInt(val, 10, 32) - default: - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to a string or numeric value", - ErrVerbose, node.Name(), - )) - } - - if err != nil || integer > math.MaxInt32 || integer < math.MinInt32 { - return exec.returnVerboseError(fmt.Errorf( - `%w: argument "%v" of jsonpath item method %v is invalid for type %v`, - ErrVerbose, value, node.Name(), "integer", - )) - } - - return exec.executeNextItem(ctx, node, nil, integer, found) -} - -// execMethodBigInt handles the execution of .bigint(). value must be a -// numeric value or a string that can be parsed into an int64, or an array -// ([]any) to which .bigint() will be applied to all of its values when unwrap -// is true. -func (exec *Executor) execMethodBigInt( - ctx context.Context, - node *ast.MethodNode, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - var bigInt int64 - - switch val := value.(type) { - case []any: - if unwrap { - return exec.executeItemUnwrapTargetArray(ctx, node, value, found) - } - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to a string or numeric value", - ErrVerbose, node.Name(), - )) - case int64: - bigInt = val - case float64: - if val > math.MaxInt64 || val < math.MinInt64 || math.IsInf(val, 0) || math.IsNaN(val) { - return exec.returnVerboseError(fmt.Errorf( - `%w: argument "%v" of jsonpath item method %v is invalid for type %v`, - ErrVerbose, val, node.Name(), "bigint", - )) - } - bigInt = int64(math.Round(val)) - case json.Number: - var err error - bigInt, err = val.Int64() - if err != nil { - var f float64 - f, err = val.Float64() - if err != nil || f > math.MaxInt64 || f < math.MinInt64 || math.IsInf(f, 0) || math.IsNaN(f) { - return exec.returnVerboseError(fmt.Errorf( - `%w: argument "%v" of jsonpath item method %v is invalid for type %v`, - ErrVerbose, val, node.Name(), "bigint", - )) - } - bigInt = int64(math.Round(f)) - } - case string: - var err error - bigInt, err = strconv.ParseInt(val, 10, 64) - if err != nil { - return exec.returnVerboseError(fmt.Errorf( - `%w: argument "%v" of jsonpath item method %v is invalid for type %v`, - ErrVerbose, val, node.Name(), "bigint", - )) - } - default: - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to a string or numeric value", - ErrVerbose, node.Name(), - )) - } - - return exec.executeNextItem(ctx, node, nil, bigInt, found) -} - -// execMethodString handles the execution of .string(). value must be a -// string, number, boolean, or able to be cast to a string. -func (exec *Executor) execMethodString( - ctx context.Context, - node *ast.MethodNode, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - var str string - name := node.Name() - - switch val := value.(type) { - case []any: - if unwrap { - return exec.executeItemUnwrapTargetArray(ctx, node, value, found) - } - return exec.returnVerboseError(fmt.Errorf( - `%w: jsonpath item method %v can only be applied to a boolean, string, numeric, or datetime value`, - ErrVerbose, node.Name(), - )) - case string: - str = val - case types.DateTime: - str = val.String() - case json.Number: - str = val.String() - case int64: - str = strconv.FormatInt(val, 10) - case float64: - str = strconv.FormatFloat(val, 'f', -1, 64) - case bool: - if val { - str = "true" - } else { - str = "false" - } - default: - return exec.returnVerboseError(fmt.Errorf( - `%w: jsonpath item method %v can only be applied to a boolean, string, numeric, or datetime value`, - ErrVerbose, name, - )) - } - - return exec.executeNextItem(ctx, node, nil, str, found) -} - -// execMethodBoolean handles the execution of .boolean(). value must be a -// string, number, boolean, or able to be cast to a bool, int64, float64, -// [json.Number], or string β€” or an array ([]any) to which .boolean() will be -// applied to all of its values when unwrap is true. String values will be -// converted to bool by [execBooleanString]. -func (exec *Executor) execMethodBoolean( - ctx context.Context, - node *ast.MethodNode, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - var boolean bool - name := node.Name() - - switch val := value.(type) { - case []any: - if unwrap { - return exec.executeItemUnwrapTargetArray(ctx, node, value, found) - } - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/jsonpath_exec.c#L1385 - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to a boolean, string, or numeric value", - ErrVerbose, name, - )) - case bool: - boolean = val - case int64: - boolean = val != 0 - case float64: - if val != math.Trunc(val) { - return exec.returnVerboseError(fmt.Errorf( - `%w: argument "%v" of jsonpath item method %v is invalid for type %v`, - ErrVerbose, val, name, "boolean", - )) - } - boolean = val != 0 - case json.Number: - num, err := val.Float64() - if err != nil || num != math.Trunc(num) { - return exec.returnVerboseError(fmt.Errorf( - `%w: argument %q of jsonpath item method %v is invalid for type boolean`, - ErrVerbose, val, name, - )) - } - boolean = num != 0 - case string: - var err error - boolean, err = execBooleanString(val, name) - if err != nil { - return exec.returnVerboseError(err) - } - - default: - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to a boolean, string, or numeric value", - ErrVerbose, name, - )) - } - - return exec.executeNextItem(ctx, node, nil, boolean, found) -} - -// execBooleanString converts val to a boolean. The value of val must -// case-insensitively match one of: -// - t -// - true -// - f -// - false -// - y -// - yes -// - n -// - no -// - on -// - off -// - 1 -// - 0 -func execBooleanString(val string, name ast.MethodName) (bool, error) { - size := len(val) - if size == 0 { - return false, fmt.Errorf( - `%w: argument %q of jsonpath item method %v is invalid for type %v`, - ErrVerbose, val, name, "boolean", - ) - } - - switch val[0] { - case 't', 'T': - if size == 1 || strings.EqualFold(val, "true") { - return true, nil - } - case 'f', 'F': - if size == 1 || strings.EqualFold(val, "false") { - return false, nil - } - case 'y', 'Y': - if size == 1 || strings.EqualFold(val, "yes") { - return true, nil - } - case 'n', 'N': - if size == 1 || strings.EqualFold(val, "no") { - return false, nil - } - case 'o', 'O': - if strings.EqualFold(val, "on") { - return true, nil - } else if strings.EqualFold(val, "off") { - return false, nil - } - case '1': - if size == 1 { - return true, nil - } - case '0': - if size == 1 { - return false, nil - } - } - - return false, fmt.Errorf( - `%w: argument %q of jsonpath item method %v is invalid for type %v`, - ErrVerbose, val, name, "boolean", - ) -} - -// executeNumberMethod implements the number() and decimal() methods. It -// varies somewhat from Postgres because Postgres uses its arbitrary precision -// numeric type, which can be huge and precise, while we use only float64 and -// int64 values. If we ever switch to the github.com/shopspring/decimal -// package we could make it more precise and therefore compatible, at least -// when numbers are parsed into [json.Number]. The method parameter should -// stringify to `.number()` or `.decimal()` as appropriate. -func (exec *Executor) executeNumberMethod( - ctx context.Context, - node ast.Node, - value any, - found *valueList, - unwrap bool, - method any, -) (resultStatus, error) { - var ( - num float64 - err error - ) - - switch val := value.(type) { - case []any: - if unwrap { - return exec.executeItemUnwrapTargetArray(ctx, node, val, found) - } - return exec.returnVerboseError(fmt.Errorf( - `%w: jsonpath item method %v can only be applied to a string or numeric value`, - ErrVerbose, method, - )) - case float64: - num = val - case int64: - num = float64(val) - case json.Number: - num, err = val.Float64() - case string: - // cast string as number - num, err = strconv.ParseFloat(val, 64) - default: - return exec.returnVerboseError(fmt.Errorf( - `%w: jsonpath item method %v can only be applied to a string or numeric value`, - ErrVerbose, method, - )) - } - - if err != nil { - return exec.returnVerboseError(fmt.Errorf( - `%w: argument "%v" of jsonpath item method %v is invalid for type %v`, - ErrVerbose, value, method, "numeric", - )) - } - - if math.IsInf(num, 0) || math.IsNaN(num) { - return exec.returnVerboseError(fmt.Errorf( - "%w: NaN or Infinity is not allowed for jsonpath item method %v", - ErrVerbose, method, - )) - } - - if node, ok := node.(*ast.BinaryNode); ok { - num, err = exec.executeDecimalMethod(node, value, num) - if err != nil { - return exec.returnError(err) - } - } - - return exec.executeNextItem(ctx, node, nil, num, found) -} - -// https://github.com/postgres/postgres/blob/REL_18_BETA2/src/include/utils/numeric.h#L32-L35 -const ( - numericMaxPrecision = 1000 - numericMinScale = -1000 - numericMaxScale = 1000 -) - -// executeDecimalMethod processes the arguments to the .decimal() method, -// which must have the precision and optional scale. It converts them to -// int32, formats the number as string and then parses back into a float, -// which it returns. -func (exec *Executor) executeDecimalMethod( - node *ast.BinaryNode, - value any, - num float64, -) (float64, error) { - op := node.Operator() - if op != ast.BinaryDecimal || node.Left() == nil { - return num, nil - } - - precision, err := getNodeInt32(node.Left(), op, "precision") - if err != nil { - return 0, err - } - - // Verify the precision - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/numeric.c#L1335-L1339 - if precision < 1 || precision > numericMaxPrecision { - return 0, fmt.Errorf( - "%w: NUMERIC precision %d must be between 1 and %d", - ErrExecution, precision, numericMaxPrecision, - ) - } - - scale := 0 - if right := node.Right(); right != nil { - var err error - scale, err = getNodeInt32(right, op, "scale") - if err != nil { - return 0, err - } - - // Verify the scale. - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/numeric.c#L1340-L1344 - if scale < numericMinScale || scale > numericMaxScale { - return 0, fmt.Errorf( - "%w: NUMERIC scale %d must be between %d and %d", - ErrExecution, scale, numericMinScale, numericMaxScale, - ) - } - } - - // Round to the scale. - ratio := math.Pow10(scale) - rounded := math.Round(num*ratio) / ratio - - // Count the digits before the decimal point. - numStr := strconv.FormatFloat(rounded, 'f', -1, 64) - count := 0 - for _, ch := range numStr { - if ch == '.' { - break - } - if '1' <= ch && ch <= '9' { - count++ - } - } - - // Make sure it's got no more than precision digits. - if count > 0 && count > precision-scale { - return 0, fmt.Errorf( - `%w: argument "%v" of jsonpath item method %v is invalid for type %v`, - ErrVerbose, value, op, "numeric", - ) - } - return rounded, nil -} - -// intCallback defines a callback to carry out an operation on an int64. -type intCallback func(int64) int64 - -// floatCallback defines a callback to carry out an operation on a float64. -type floatCallback func(float64) float64 - -// intAbs returns the absolute value of x. Implements intCallback. -func intAbs(x int64) int64 { - if x < 0 { - return -x - } - return x -} - -// intSelf returns x. Implements intCallback. -func intSelf(x int64) int64 { return x } - -// floatSelf returns x. Implements floatCallback. -func floatSelf(x float64) float64 { return x } - -// intUMinus applies unary minus to x. Implements intCallback. -func intUMinus(x int64) int64 { return -x } - -// floatUMinus applies unary minus to x. Implements floatCallback. -func floatUMinus(x float64) float64 { return -x } - -// executeNumericItemMethod executes numeric item methods (.abs(), .floor(), -// .ceil()) using the specified intCallback or floatCallback. -func (exec *Executor) executeNumericItemMethod( - ctx context.Context, - node ast.Node, - value any, - unwrap bool, - intCallback intCallback, - floatCallback floatCallback, - found *valueList, -) (resultStatus, error) { - var num any - - switch val := value.(type) { - case []any: - if unwrap { - return exec.executeItemUnwrapTargetArray(ctx, node, value, found) - } - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to a numeric value", - ErrVerbose, node, - )) - case int64: - num = intCallback(val) - case float64: - num = floatCallback(val) - case json.Number: - if integer, err := val.Int64(); err == nil { - num = intCallback(integer) - } else if float, err := val.Float64(); err == nil { - num = floatCallback(float) - } else { - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to a numeric value", - ErrVerbose, node, - )) - } - default: - return exec.returnVerboseError(fmt.Errorf( - "%w: jsonpath item method %v can only be applied to a numeric value", - ErrVerbose, node, - )) - } - - return exec.executeNextItem(ctx, node, node.Next(), num, found) -} diff --git a/path/exec/method_test.go b/path/exec/method_test.go deleted file mode 100644 index 3622437..0000000 --- a/path/exec/method_test.go +++ /dev/null @@ -1,2453 +0,0 @@ -package exec - -import ( - "context" - "encoding/json" - "fmt" - "math" - "strconv" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/parser" - "github.com/theory/sqljson/path/types" -) - -func TestExecMethodNode(t *testing.T) { - t.Parallel() - path, _ := parser.Parse("$") - ctx := context.Background() - - // Offset of object in a slice is non-determinate, so calculate it at runtime. - value := []any{map[string]any{"x": true, "y": "hi"}} - offset := deltaBetween(value, value[0]) - - for _, tc := range []struct { - test string - node ast.Node - value any - unwrap bool - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "number", - node: ast.NewMethod(ast.MethodNumber), - value: "42", - exp: statusOK, - find: []any{float64(42)}, - }, - { - test: "number_unwrap", - node: ast.NewMethod(ast.MethodNumber), - value: []any{"42", "98.6"}, - exp: statusOK, - unwrap: true, - find: []any{float64(42), float64(98.6)}, - }, - { - test: "number_no_unwrap", - node: ast.NewMethod(ast.MethodNumber), - value: []any{"42", "98.6"}, - exp: statusFailed, - err: `exec: jsonpath item method .number() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "number_next", - node: ast.LinkNodes([]ast.Node{ast.NewMethod(ast.MethodNumber), ast.NewMethod(ast.MethodString)}), - value: "42", - exp: statusOK, - find: []any{"42"}, - }, - { - test: "abs", - node: ast.NewMethod(ast.MethodAbs), - value: int64(-42), - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "abs_unwrap", - node: ast.NewMethod(ast.MethodAbs), - value: []any{int64(-42), float64(98.6)}, - unwrap: true, - exp: statusOK, - find: []any{int64(42), float64(98.6)}, - }, - { - test: "abs_no_unwrap", - node: ast.NewMethod(ast.MethodAbs), - value: []any{int64(-42), float64(98.6)}, - exp: statusFailed, - err: `exec: jsonpath item method .abs() can only be applied to a numeric value`, - isErr: ErrVerbose, - }, - { - test: "floor", - node: ast.NewMethod(ast.MethodFloor), - value: float64(42.8), - exp: statusOK, - find: []any{float64(42)}, - }, - { - test: "floor_unwrap", - node: ast.NewMethod(ast.MethodFloor), - value: []any{float64(42.8), float64(99.1)}, - unwrap: true, - exp: statusOK, - find: []any{float64(42), float64(99)}, - }, - { - test: "floor_no_unwrap", - node: ast.NewMethod(ast.MethodFloor), - value: []any{float64(42.8), float64(99.1)}, - exp: statusFailed, - err: `exec: jsonpath item method .floor() can only be applied to a numeric value`, - isErr: ErrVerbose, - }, - { - test: "ceiling", - node: ast.NewMethod(ast.MethodCeiling), - value: float64(41.2), - exp: statusOK, - find: []any{float64(42)}, - }, - { - test: "ceiling_unwrap", - node: ast.NewMethod(ast.MethodCeiling), - value: []any{float64(41.2), float64(98.6)}, - unwrap: true, - exp: statusOK, - find: []any{float64(42), float64(99)}, - }, - { - test: "ceiling_no_unwrap", - node: ast.NewMethod(ast.MethodCeiling), - value: []any{float64(41.2), float64(98.6)}, - exp: statusFailed, - err: `exec: jsonpath item method .ceiling() can only be applied to a numeric value`, - isErr: ErrVerbose, - }, - { - test: "type", - node: ast.NewMethod(ast.MethodType), - value: types.NewDate(time.Now()), - exp: statusOK, - find: []any{"date"}, - }, - { - test: "type_does_not_unwrap", - node: ast.NewMethod(ast.MethodType), - value: []any{"hi", types.NewDate(time.Now())}, - unwrap: true, - exp: statusOK, - find: []any{"array"}, - }, - { - test: "type_no_unwrap", - node: ast.NewMethod(ast.MethodType), - value: []any{"hi", types.NewDate(time.Now())}, - exp: statusOK, - find: []any{"array"}, - }, - { - test: "size", - node: ast.NewMethod(ast.MethodSize), - value: []any{true, false}, - exp: statusOK, - find: []any{int64(2)}, - }, - { - test: "size_not_array", - node: ast.NewMethod(ast.MethodSize), - value: "xxx", - exp: statusOK, - find: []any{int64(1)}, - }, - { - test: "double", - node: ast.NewMethod(ast.MethodDouble), - value: "42", - exp: statusOK, - find: []any{float64(42)}, - }, - { - test: "double_unwrap", - node: ast.NewMethod(ast.MethodDouble), - value: []any{"42", int64(2), float64(98.6)}, - unwrap: true, - exp: statusOK, - find: []any{float64(42), float64(2), float64(98.6)}, - }, - { - test: "double_no_unwrap", - node: ast.NewMethod(ast.MethodDouble), - value: []any{"42", int64(2), float64(98.6)}, - exp: statusFailed, - err: `exec: jsonpath item method .double() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "integer", - node: ast.NewMethod(ast.MethodInteger), - value: "42", - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "integer_unwrap", - node: ast.NewMethod(ast.MethodInteger), - value: []any{"42", int64(2)}, - exp: statusOK, - unwrap: true, - find: []any{int64(42), int64(2)}, - }, - { - test: "integer_no_unwrap", - node: ast.NewMethod(ast.MethodInteger), - value: []any{"42", int64(2)}, - exp: statusFailed, - err: `exec: jsonpath item method .integer() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "bigint", - node: ast.NewMethod(ast.MethodBigInt), - value: "42", - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "bigint_unwrap", - node: ast.NewMethod(ast.MethodBigInt), - value: []any{"42", int64(2)}, - exp: statusOK, - unwrap: true, - find: []any{int64(42), int64(2)}, - }, - { - test: "bigint_no_unwrap", - node: ast.NewMethod(ast.MethodBigInt), - value: []any{"42", int64(2)}, - exp: statusFailed, - err: `exec: jsonpath item method .bigint() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "string", - node: ast.NewMethod(ast.MethodString), - value: true, - exp: statusOK, - find: []any{"true"}, - }, - { - // https://www.postgresql.org/message-id/A64AE04F-4410-42B7-A141-7A7349260F4D@justatheory.com - test: "string_does_not_unwrap", - node: ast.NewMethod(ast.MethodString), - value: []any{true, int64(42)}, - unwrap: true, - exp: statusOK, - find: []any{"true", "42"}, - }, - { - test: "string_no_unwrap", - node: ast.NewMethod(ast.MethodString), - value: []any{true, int64(42)}, - exp: statusFailed, - err: `exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value`, - isErr: ErrVerbose, - }, - { - test: "boolean", - node: ast.NewMethod(ast.MethodBoolean), - value: "t", - exp: statusOK, - find: []any{true}, - }, - { - test: "boolean_unwrap", - node: ast.NewMethod(ast.MethodBoolean), - value: []any{"t", "n"}, - unwrap: true, - exp: statusOK, - find: []any{true, false}, - }, - { - test: "boolean_no_unwrap", - node: ast.NewMethod(ast.MethodBoolean), - value: []any{"t", "n"}, - exp: statusFailed, - err: `exec: jsonpath item method .boolean() can only be applied to a boolean, string, or numeric value`, - isErr: ErrVerbose, - }, - { - test: "keyvalue", - node: ast.NewMethod(ast.MethodKeyValue), - value: map[string]any{"x": "hi"}, - exp: statusOK, - find: []any{map[string]any{"id": int64(0), "key": "x", "value": "hi"}}, - }, - { - test: "keyvalue_wrap", - node: ast.NewMethod(ast.MethodKeyValue), - value: value, - unwrap: true, - exp: statusOK, - find: []any{ - map[string]any{"id": offset, "key": "x", "value": true}, - map[string]any{"id": offset, "key": "y", "value": "hi"}, - }, - }, - { - test: "keyvalue_no_wrap", - node: ast.NewMethod(ast.MethodKeyValue), - value: value, - exp: statusFailed, - err: `exec: jsonpath item method .keyvalue() can only be applied to an object`, - isErr: ErrVerbose, - }, - { - test: "unknown_method", - node: ast.NewMethod(ast.MethodName(-1)), - value: struct{}{}, - exp: statusFailed, - err: `exec invalid: unknown method MethodName(-1)`, - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Make sure we have a method node. - node, ok := tc.node.(*ast.MethodNode) - r.True(ok) - - // Set up an executor. - e := newTestExecutor(path, nil, true, false) - e.root = tc.value - _ = e.setTempBaseObject(e.root, 0) - - // Test execKeyNode with a list. - list := newList() - res, err := e.execMethodNode(ctx, node, tc.value, list, tc.unwrap) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, list.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(list.list) - } - - // Try with nil found. - res, err = e.execMethodNode(ctx, node, tc.value, nil, tc.unwrap) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -type methodTestCase struct { - test string - path *ast.AST - silent bool - node ast.Node - value any - unwrap bool - exp resultStatus - find []any - err string - isErr error -} - -func (tc methodTestCase) checkResults(t *testing.T, res resultStatus, found *valueList, err error) { - t.Helper() - a := assert.New(t) - r := require.New(t) - - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, found.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(found.list) - } -} - -//nolint:gochecknoglobals -var ( - laxRootPath, _ = parser.Parse("$") - strictRootPath, _ = parser.Parse("strict $") -) - -func (tc methodTestCase) prep() (*Executor, *valueList) { - if tc.path == nil { - tc.path = laxRootPath - } - return newTestExecutor(tc.path, nil, !tc.silent, false), newList() -} - -func (tc methodTestCase) checkNode(t *testing.T, ok bool, meth *ast.MethodNode, name ast.MethodName) { - t.Helper() - assert.True(t, ok) - assert.Equal(t, name, meth.Name()) -} - -func TestExecMethodType(t *testing.T) { - t.Parallel() - ctx := context.Background() - meth := ast.NewMethod(ast.MethodType) - - for _, tc := range []methodTestCase{ - { - test: "object", - node: meth, - value: map[string]any{}, - exp: statusOK, - find: []any{"object"}, - }, - { - test: "object_next", - node: ast.LinkNodes([]ast.Node{ast.NewMethod(ast.MethodType), ast.NewMethod(ast.MethodSize)}), - value: map[string]any{}, - exp: statusOK, - find: []any{int64(1)}, - }, - { - test: "array", - node: meth, - value: []any{}, - exp: statusOK, - find: []any{"array"}, - }, - { - test: "string", - node: meth, - value: "hi", - exp: statusOK, - find: []any{"string"}, - }, - { - test: "int_number", - node: meth, - value: int64(1), - exp: statusOK, - find: []any{"number"}, - }, - { - test: "float_number", - node: meth, - value: float64(1), - exp: statusOK, - find: []any{"number"}, - }, - { - test: "json_number", - node: meth, - value: json.Number("1"), - exp: statusOK, - find: []any{"number"}, - }, - { - test: "bool", - node: meth, - value: true, - exp: statusOK, - find: []any{"boolean"}, - }, - { - test: "date", - node: meth, - value: types.NewDate(time.Now()), - exp: statusOK, - find: []any{"date"}, - }, - { - test: "time", - node: meth, - value: types.NewTime(time.Now()), - exp: statusOK, - find: []any{"time without time zone"}, - }, - { - test: "timetz", - node: meth, - value: types.NewTimeTZ(time.Now()), - exp: statusOK, - find: []any{"time with time zone"}, - }, - { - test: "timestamp", - node: meth, - value: types.NewTimestamp(time.Now()), - exp: statusOK, - find: []any{"timestamp without time zone"}, - }, - { - test: "timestampTZ", - node: meth, - value: types.NewTimestampTZ(context.Background(), time.Now()), - exp: statusOK, - find: []any{"timestamp with time zone"}, - }, - { - test: "nil", - node: meth, - value: nil, - exp: statusOK, - find: []any{"null"}, - }, - { - test: "struct", - node: meth, - value: struct{}{}, - exp: statusFailed, - err: `exec invalid: unsupported data type struct {}`, - isErr: ErrInvalid, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - // Make sure we have a .type() node. - node, ok := tc.node.(*ast.MethodNode) - tc.checkNode(t, ok, node, ast.MethodType) - - // Test execMethodType - e, list := tc.prep() - res, err := e.execMethodType(ctx, node, tc.value, list) - tc.checkResults(t, res, list, err) - }) - } -} - -func TestExecMethodSize(t *testing.T) { - t.Parallel() - ctx := context.Background() - meth := ast.NewMethod(ast.MethodSize) - - for _, tc := range []methodTestCase{ - { - test: "array_size_2", - node: meth, - value: []any{1, 3}, - exp: statusOK, - find: []any{int64(2)}, - }, - { - test: "array_size_6", - node: meth, - value: []any{1, 3, 2, 4, 6, 8}, - exp: statusOK, - find: []any{int64(6)}, - }, - { - test: "bool", - node: meth, - value: true, - exp: statusOK, - find: []any{int64(1)}, - }, - { - test: "nil", - node: meth, - value: nil, - exp: statusOK, - find: []any{int64(1)}, - }, - { - test: "object", - node: meth, - value: map[string]any{"x": true, "y": false}, - exp: statusOK, - find: []any{int64(1)}, - }, - { - test: "strict_not_array", - path: strictRootPath, - node: meth, - value: true, - exp: statusFailed, - err: `exec: jsonpath item method .size() can only be applied to an array`, - isErr: ErrVerbose, - }, - { - test: "strict_not_array_silent", - node: meth, - value: true, - silent: true, - exp: statusOK, - find: []any{int64(1)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - // Make sure we have a .size() node. - node, ok := tc.node.(*ast.MethodNode) - tc.checkNode(t, ok, node, ast.MethodSize) - - // Test execMethodSize - e, list := tc.prep() - res, err := e.execMethodSize(ctx, node, tc.value, list) - tc.checkResults(t, res, list, err) - }) - } -} - -func TestExecMethodDouble(t *testing.T) { - t.Parallel() - ctx := context.Background() - meth := ast.NewMethod(ast.MethodDouble) - - for _, tc := range []methodTestCase{ - { - test: "array_unwrap", - node: meth, - value: []any{"1", "3.2"}, - unwrap: true, - exp: statusOK, - find: []any{float64(1), float64(3.2)}, - }, - { - test: "array_no_unwrap", - node: meth, - value: []any{"1", "3.2"}, - exp: statusFailed, - err: `exec: jsonpath item method .double() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "int", - node: meth, - value: int64(42), - exp: statusOK, - find: []any{float64(42)}, - }, - { - test: "max_int", - node: meth, - value: int64(math.MaxInt64), - exp: statusOK, - find: []any{float64(math.MaxInt64)}, - }, - { - test: "min_int", - node: meth, - value: int64(math.MinInt64), - exp: statusOK, - find: []any{float64(math.MinInt64)}, - }, - { - test: "float", - node: meth, - value: float64(98.6), - exp: statusOK, - find: []any{float64(98.6)}, - }, - { - test: "max_float", - node: meth, - value: float64(math.MaxFloat64), - exp: statusOK, - find: []any{float64(math.MaxFloat64)}, - }, - { - test: "min_float", - node: meth, - value: float64(math.SmallestNonzeroFloat64), - exp: statusOK, - find: []any{float64(math.SmallestNonzeroFloat64)}, - }, - { - test: "json", - node: meth, - value: json.Number("98.6"), - exp: statusOK, - find: []any{float64(98.6)}, - }, - { - test: "json_invalid", - node: meth, - value: json.Number("hi"), - exp: statusFailed, - err: `exec: argument "hi" of jsonpath item method .double() is invalid for type double precision`, - isErr: ErrExecution, - }, - { - test: "string", - node: meth, - value: "98.6", - exp: statusOK, - find: []any{float64(98.6)}, - }, - { - test: "string_invalid", - node: meth, - value: "hi", - exp: statusFailed, - err: `exec: argument "hi" of jsonpath item method .double() is invalid for type double precision`, - isErr: ErrExecution, - }, - { - test: "bool", - node: meth, - value: true, - exp: statusFailed, - err: `exec: jsonpath item method .double() can only be applied to a string or numeric value`, - isErr: ErrExecution, - }, - { - test: "inf", - node: meth, - value: "inf", - exp: statusFailed, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .double()`, - isErr: ErrVerbose, - }, - { - test: "neg_inf", - node: meth, - value: "-inf", - exp: statusFailed, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .double()`, - isErr: ErrVerbose, - }, - { - test: "nan", - node: meth, - value: "nan", - exp: statusFailed, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .double()`, - isErr: ErrVerbose, - }, - { - test: "json_next", - node: ast.LinkNodes([]ast.Node{ast.NewMethod(ast.MethodDouble), ast.NewMethod(ast.MethodString)}), - value: json.Number("98.6"), - exp: statusOK, - find: []any{"98.6"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - // Make sure we have a .double() node. - node, ok := tc.node.(*ast.MethodNode) - tc.checkNode(t, ok, node, ast.MethodDouble) - - // Test execMethodDouble - e, list := tc.prep() - res, err := e.execMethodDouble(ctx, node, tc.value, list, tc.unwrap) - tc.checkResults(t, res, list, err) - }) - } -} - -func TestExecMethodInteger(t *testing.T) { - t.Parallel() - ctx := context.Background() - meth := ast.NewMethod(ast.MethodInteger) - - for _, tc := range []methodTestCase{ - { - test: "int", - node: meth, - value: int64(42), - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "max_int", - node: meth, - value: int64(math.MaxInt32), - exp: statusOK, - find: []any{int64(math.MaxInt32)}, - }, - { - test: "min_int", - node: meth, - value: int64(math.MinInt32), - exp: statusOK, - find: []any{int64(math.MinInt32)}, - }, - { - test: "over_max_int", - node: meth, - value: int64(math.MaxInt32 + 1), - exp: statusFailed, - err: fmt.Sprintf( - `exec: argument "%v" of jsonpath item method .integer() is invalid for type integer`, - int64(math.MaxInt32+1), - ), - isErr: ErrVerbose, - }, - { - test: "under_min_int", - node: meth, - value: int64(math.MinInt32 - 1), - exp: statusFailed, - err: fmt.Sprintf( - `exec: argument "%v" of jsonpath item method .integer() is invalid for type integer`, - int64(math.MinInt32-1), - ), - isErr: ErrVerbose, - }, - { - test: "float_round_up", - node: meth, - value: float64(98.6), - exp: statusOK, - find: []any{int64(99)}, - }, - { - test: "float_round_down", - node: meth, - value: float64(42.3), - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "json_number_int", - node: meth, - value: json.Number("42"), - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "json_number_float_down", - node: meth, - value: json.Number("42.3"), - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "json_number_float_up", - node: meth, - value: json.Number("42.5"), - exp: statusOK, - find: []any{int64(43)}, - }, - { - test: "json_number_invalid", - node: meth, - value: json.Number("hi"), - exp: statusFailed, - err: `exec: argument "hi" of jsonpath item method .integer() is invalid for type integer`, - isErr: ErrVerbose, - }, - { - test: "string", - node: meth, - value: "42", - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "string_float", - node: meth, - value: "42.3", - exp: statusFailed, - err: `exec: argument "42.3" of jsonpath item method .integer() is invalid for type integer`, - isErr: ErrVerbose, - }, - { - test: "invalid_string", - node: meth, - value: "hi", - exp: statusFailed, - err: `exec: argument "hi" of jsonpath item method .integer() is invalid for type integer`, - isErr: ErrVerbose, - }, - { - test: "inf", - node: meth, - value: "inf", - exp: statusFailed, - err: `exec: argument "inf" of jsonpath item method .integer() is invalid for type integer`, - isErr: ErrVerbose, - }, - { - test: "neg_inf", - node: meth, - value: "-inf", - exp: statusFailed, - err: `exec: argument "-inf" of jsonpath item method .integer() is invalid for type integer`, - isErr: ErrVerbose, - }, - { - test: "nan", - node: meth, - value: "nan", - exp: statusFailed, - err: `exec: argument "nan" of jsonpath item method .integer() is invalid for type integer`, - isErr: ErrVerbose, - }, - { - test: "int_next", - node: ast.LinkNodes([]ast.Node{ast.NewMethod(ast.MethodInteger), ast.NewMethod(ast.MethodString)}), - value: int64(42), - exp: statusOK, - find: []any{"42"}, - }, - { - test: "invalid_value", - node: meth, - value: true, - exp: statusFailed, - err: `exec: jsonpath item method .integer() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "int_next", - node: ast.LinkNodes([]ast.Node{ast.NewMethod(ast.MethodInteger), ast.NewMethod(ast.MethodString)}), - value: int64(42), - exp: statusOK, - find: []any{"42"}, - }, - { - test: "array", - node: meth, - value: []any{int64(42)}, - exp: statusFailed, - err: `exec: jsonpath item method .integer() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "array_unwrap", - node: meth, - value: []any{float64(42.2), "88"}, - unwrap: true, - exp: statusOK, - find: []any{int64(42), int64(88)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - // Make sure we have a .Integer() node. - node, ok := tc.node.(*ast.MethodNode) - tc.checkNode(t, ok, node, ast.MethodInteger) - - // Test execMethodInteger - e, list := tc.prep() - res, err := e.execMethodInteger(ctx, node, tc.value, list, tc.unwrap) - tc.checkResults(t, res, list, err) - }) - } -} - -func TestExecMethodBigInt(t *testing.T) { - t.Parallel() - ctx := context.Background() - meth := ast.NewMethod(ast.MethodBigInt) - - for _, tc := range []methodTestCase{ - { - test: "int", - node: meth, - value: int64(42), - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "max_int", - node: meth, - value: int64(math.MaxInt64), - exp: statusOK, - find: []any{int64(math.MaxInt64)}, - }, - { - test: "min_int", - node: meth, - value: int64(math.MinInt64), - exp: statusOK, - find: []any{int64(math.MinInt64)}, - }, - { - test: "float_up", - node: meth, - value: float64(98.6), - exp: statusOK, - find: []any{int64(99)}, - }, - { - test: "float_down", - node: meth, - value: float64(98.4), - exp: statusOK, - find: []any{int64(98)}, - }, - { - test: "float_upper_bound", - node: meth, - value: float64(math.MaxUint64), - exp: statusFailed, - err: fmt.Sprintf( - `exec: argument "%v" of jsonpath item method .bigint() is invalid for type bigint`, - float64(math.MaxUint64), - ), - isErr: ErrVerbose, - }, - { - test: "float_lower_bound", - node: meth, - value: float64(-math.MaxUint64), - exp: statusFailed, - err: fmt.Sprintf( - `exec: argument "%v" of jsonpath item method .bigint() is invalid for type bigint`, - float64(-math.MaxUint64), - ), - isErr: ErrVerbose, - }, - { - test: "json_int", - node: meth, - value: json.Number("42"), - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "json_float_down", - node: meth, - value: json.Number("-42.3"), - exp: statusOK, - find: []any{int64(-42)}, - }, - { - test: "json_float_up", - node: meth, - value: json.Number("98.6"), - exp: statusOK, - find: []any{int64(99)}, - }, - { - test: "json_float_upper_bound", - node: meth, - value: json.Number("18446744073709551615.123"), - exp: statusFailed, - err: `exec: argument "18446744073709551615.123" of jsonpath item method .bigint() is invalid for type bigint`, - isErr: ErrVerbose, - }, - { - test: "json_float_lower_bound", - node: meth, - value: json.Number("-18446744073709551615.123"), - exp: statusFailed, - err: `exec: argument "-18446744073709551615.123" of jsonpath item method .bigint() is invalid for type bigint`, - isErr: ErrVerbose, - }, - { - test: "invalid_json", - node: meth, - value: json.Number("hi"), - exp: statusFailed, - err: `exec: argument "hi" of jsonpath item method .bigint() is invalid for type bigint`, - isErr: ErrVerbose, - }, - { - test: "string_int", - node: meth, - value: "42", - exp: statusOK, - find: []any{int64(42)}, - }, - { - test: "string_max_big_int", - node: meth, - value: strconv.FormatInt(math.MaxInt64, 10), - exp: statusOK, - find: []any{int64(math.MaxInt64)}, - }, - { - test: "string_min_big_int", - node: meth, - value: strconv.FormatInt(math.MinInt64, 10), - exp: statusOK, - find: []any{int64(math.MinInt64)}, - }, - { - test: "string_float", - node: meth, - value: "42.8", - exp: statusFailed, - err: `exec: argument "42.8" of jsonpath item method .bigint() is invalid for type bigint`, - isErr: ErrVerbose, - }, - { - test: "invalid_string", - node: meth, - value: "hi", - exp: statusFailed, - err: `exec: argument "hi" of jsonpath item method .bigint() is invalid for type bigint`, - isErr: ErrVerbose, - }, - { - test: "inf", - node: meth, - value: "inf", - exp: statusFailed, - err: `exec: argument "inf" of jsonpath item method .bigint() is invalid for type bigint`, - isErr: ErrVerbose, - }, - { - test: "neg_inf", - node: meth, - value: "-inf", - exp: statusFailed, - err: `exec: argument "-inf" of jsonpath item method .bigint() is invalid for type bigint`, - isErr: ErrVerbose, - }, - { - test: "nan", - node: meth, - value: "nan", - exp: statusFailed, - err: `exec: argument "nan" of jsonpath item method .bigint() is invalid for type bigint`, - isErr: ErrVerbose, - }, - { - test: "int_next", - node: ast.LinkNodes([]ast.Node{ast.NewMethod(ast.MethodBigInt), ast.NewMethod(ast.MethodString)}), - value: int64(42), - exp: statusOK, - find: []any{"42"}, - }, - { - test: "invalid_value", - node: meth, - value: true, - exp: statusFailed, - err: `exec: jsonpath item method .bigint() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "array", - node: meth, - value: []any{int64(42)}, - exp: statusFailed, - err: `exec: jsonpath item method .bigint() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "array_unwrap", - node: meth, - value: []any{int64(42), "1024"}, - unwrap: true, - exp: statusOK, - find: []any{int64(42), int64(1024)}, - }, - { - test: "array_unwrap_next", - node: ast.LinkNodes([]ast.Node{ast.NewMethod(ast.MethodBigInt), ast.NewMethod(ast.MethodString)}), - value: []any{int64(42), "1024"}, - unwrap: true, - exp: statusOK, - find: []any{"42", "1024"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - // Make sure we have a .BigInt() node. - node, ok := tc.node.(*ast.MethodNode) - tc.checkNode(t, ok, node, ast.MethodBigInt) - - // Test execMethodBigInt - e, list := tc.prep() - res, err := e.execMethodBigInt(ctx, node, tc.value, list, tc.unwrap) - tc.checkResults(t, res, list, err) - }) - } -} - -func TestExecMethodString(t *testing.T) { - t.Parallel() - ctx := context.Background() - meth := ast.NewMethod(ast.MethodString) - now := time.Now() - - for _, tc := range []methodTestCase{ - { - test: "string", - node: meth, - value: "hi", - exp: statusOK, - find: []any{"hi"}, - }, - { - test: "date", - node: meth, - value: types.NewDate(now), - exp: statusOK, - find: []any{types.NewDate(now).String()}, - }, - { - test: "time", - node: meth, - value: types.NewTime(now), - exp: statusOK, - find: []any{types.NewTime(now).String()}, - }, - { - test: "timetz", - node: meth, - value: types.NewTimeTZ(now), - exp: statusOK, - find: []any{types.NewTimeTZ(now).String()}, - }, - { - test: "timestamp", - node: meth, - value: types.NewTimestamp(now), - exp: statusOK, - find: []any{types.NewTimestamp(now).String()}, - }, - { - test: "timestamptz", - node: meth, - value: types.NewTimestampTZ(ctx, now), - exp: statusOK, - find: []any{types.NewTimestampTZ(ctx, now).String()}, - }, - { - test: "stringer_json_number", - node: meth, - value: json.Number("188.2"), - exp: statusOK, - find: []any{"188.2"}, - }, - { - test: "int", - node: meth, - value: int64(42), - exp: statusOK, - find: []any{"42"}, - }, - { - test: "float", - node: meth, - value: float64(98.6), - exp: statusOK, - find: []any{"98.6"}, - }, - { - test: "true", - node: meth, - value: true, - exp: statusOK, - find: []any{"true"}, - }, - { - test: "false", - node: meth, - value: false, - exp: statusOK, - find: []any{"false"}, - }, - { - test: "nil", - node: meth, - value: nil, - exp: statusFailed, - err: `exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value`, - isErr: ErrVerbose, - }, - { - test: "obj", - node: meth, - value: map[string]any{}, - exp: statusFailed, - err: `exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value`, - isErr: ErrVerbose, - }, - { - test: "array", - node: meth, - value: []any{int64(42), true}, - exp: statusFailed, - err: `exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value`, - isErr: ErrVerbose, - }, - { - test: "array_unwrap", - node: meth, - value: []any{int64(42), true}, - unwrap: true, - exp: statusOK, - find: []any{"42", "true"}, - }, - { - test: "string_next", - node: ast.LinkNodes([]ast.Node{ast.NewMethod(ast.MethodString), ast.NewMethod(ast.MethodInteger)}), - value: "42", - exp: statusOK, - find: []any{int64(42)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - // Make sure we have a .String() node. - node, ok := tc.node.(*ast.MethodNode) - tc.checkNode(t, ok, node, ast.MethodString) - - // Test execMethodString - e, list := tc.prep() - res, err := e.execMethodString(ctx, node, tc.value, list, tc.unwrap) - tc.checkResults(t, res, list, err) - }) - } -} - -func TestExecMethodBoolean(t *testing.T) { - t.Parallel() - ctx := context.Background() - meth := ast.NewMethod(ast.MethodBoolean) - - for _, tc := range []methodTestCase{ - { - test: "true", - node: meth, - value: true, - exp: statusOK, - find: []any{true}, - }, - { - test: "false", - node: meth, - value: false, - exp: statusOK, - find: []any{false}, - }, - { - test: "int1", - node: meth, - value: int64(1), - exp: statusOK, - find: []any{true}, - }, - { - test: "int1000", - node: meth, - value: int64(1000), - exp: statusOK, - find: []any{true}, - }, - { - test: "int_neg10", - node: meth, - value: int64(-10), - exp: statusOK, - find: []any{true}, - }, - { - test: "int0", - node: meth, - value: int64(0), - exp: statusOK, - find: []any{false}, - }, - { - test: "int_neg0", - node: meth, - value: int64(-0), - exp: statusOK, - find: []any{false}, - }, - { - test: "float1", - node: meth, - value: float64(1.0), - exp: statusOK, - find: []any{true}, - }, - { - test: "float1000", - node: meth, - value: float64(1000.0), - exp: statusOK, - find: []any{true}, - }, - { - test: "float_neg0", - node: meth, - value: float64(-10), - exp: statusOK, - find: []any{true}, - }, - { - test: "float0", - node: meth, - value: float64(0), - exp: statusOK, - find: []any{false}, - }, - { - test: "float_dot_one", - node: meth, - value: float64(1.1), - exp: statusFailed, - err: `exec: argument "1.1" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "float_dot_nine", - node: meth, - value: float64(1.9), - exp: statusFailed, - err: `exec: argument "1.9" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "float_neg000_dot_nine", - node: meth, - value: float64(-1000.9), - exp: statusFailed, - err: `exec: argument "-1000.9" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "json_int1", - node: meth, - value: json.Number("1"), - exp: statusOK, - find: []any{true}, - }, - { - test: "json_int0", - node: meth, - value: json.Number("0"), - exp: statusOK, - find: []any{false}, - }, - { - test: "json_int1_dot0", - node: meth, - value: json.Number("1.0"), - exp: statusOK, - find: []any{true}, - }, - { - test: "json_int0_dot0", - node: meth, - value: json.Number("0.0"), - exp: statusOK, - find: []any{false}, - }, - { - test: "json_float1000", - node: meth, - value: json.Number("1000.0"), - exp: statusOK, - find: []any{true}, - }, - { - test: "json_float_neg10", - node: meth, - value: json.Number("-10.0"), - exp: statusOK, - find: []any{true}, - }, - { - test: "json_float_0", - node: meth, - value: json.Number("0.0"), - exp: statusOK, - find: []any{false}, - }, - { - test: "json_float_neg0", - node: meth, - value: json.Number("-0.0"), - exp: statusOK, - find: []any{false}, - }, - { - test: "json_float_dot_one", - node: meth, - value: json.Number("1.1"), - exp: statusFailed, - err: `exec: argument "1.1" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "float_dot_nine", - node: meth, - value: json.Number("1.9"), - exp: statusFailed, - err: `exec: argument "1.9" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "json_float_neg_1000_dot_nine", - node: meth, - value: json.Number("-1000.9"), - exp: statusFailed, - err: `exec: argument "-1000.9" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "string_t", - node: meth, - value: "t", - exp: statusOK, - find: []any{true}, - }, - { - test: "string_f", - node: meth, - value: "f", - exp: statusOK, - find: []any{false}, - }, - { - test: "string_y", - node: meth, - value: "y", - exp: statusOK, - find: []any{true}, - }, - { - test: "string_n", - node: meth, - value: "n", - exp: statusOK, - find: []any{false}, - }, - { - test: "invalid_string", - node: meth, - value: "nope", - exp: statusFailed, - err: `exec: argument "nope" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "object", - node: meth, - value: map[string]any{"x": true}, - exp: statusFailed, - err: `exec: jsonpath item method .boolean() can only be applied to a boolean, string, or numeric value`, - isErr: ErrVerbose, - }, - { - test: "array", - node: meth, - value: []any{true, false}, - exp: statusFailed, - err: `exec: jsonpath item method .boolean() can only be applied to a boolean, string, or numeric value`, - isErr: ErrVerbose, - }, - { - test: "array_unwrap", - node: meth, - value: []any{true, false}, - unwrap: true, - exp: statusOK, - find: []any{true, false}, - }, - { - test: "bool_next", - node: ast.LinkNodes([]ast.Node{ast.NewMethod(ast.MethodBoolean), ast.NewMethod(ast.MethodString)}), - value: true, - exp: statusOK, - find: []any{"true"}, - }, - { - test: "array_unwrap_next", - node: ast.LinkNodes([]ast.Node{ast.NewMethod(ast.MethodBoolean), ast.NewMethod(ast.MethodString)}), - value: []any{"t", "f"}, - unwrap: true, - exp: statusOK, - find: []any{"true", "false"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - // Make sure we have a .Boolean() node. - node, ok := tc.node.(*ast.MethodNode) - tc.checkNode(t, ok, node, ast.MethodBoolean) - - // Test execMethodBoolean - e, list := tc.prep() - res, err := e.execMethodBoolean(ctx, node, tc.value, list, tc.unwrap) - tc.checkResults(t, res, list, err) - }) - } -} - -func TestExecBooleanString(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - val string - exp bool - err string - isErr error - }{ - { - test: "empty_string", - val: "", - err: `exec: argument "" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "t", - val: "t", - exp: true, - }, - { - test: "T", - val: "T", - exp: true, - }, - { - test: "true", - val: "true", - exp: true, - }, - { - test: "TRUE", - val: "TRUE", - exp: true, - }, - { - test: "TruE", - val: "TruE", - exp: true, - }, - { - test: "tru", - val: "tru", - err: `exec: argument "tru" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "f", - val: "f", - exp: false, - }, - { - test: "F", - val: "F", - exp: false, - }, - { - test: "false", - val: "false", - exp: false, - }, - { - test: "FALSE", - val: "FALSE", - exp: false, - }, - { - test: "FalSe", - val: "FalSe", - exp: false, - }, - { - test: "fal", - val: "fal", - err: `exec: argument "fal" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "y", - val: "y", - exp: true, - }, - { - test: "Y", - val: "Y", - exp: true, - }, - { - test: "yes", - val: "yes", - exp: true, - }, - { - test: "YES", - val: "YES", - exp: true, - }, - { - test: "Yes", - val: "Yes", - exp: true, - }, - { - test: "ye", - val: "ye", - err: `exec: argument "ye" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "n", - val: "n", - exp: false, - }, - { - test: "N", - val: "N", - exp: false, - }, - { - test: "no", - val: "no", - exp: false, - }, - { - test: "NO", - val: "NO", - exp: false, - }, - { - test: "non", - val: "non", - err: `exec: argument "non" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "on", - val: "on", - exp: true, - }, - { - test: "ON", - val: "ON", - exp: true, - }, - { - test: "oN", - val: "oN", - exp: true, - }, - { - test: "off", - val: "off", - exp: false, - }, - { - test: "OFF", - val: "OFF", - exp: false, - }, - { - test: "Off", - val: "Off", - exp: false, - }, - { - test: "oof", - val: "oof", - err: `exec: argument "oof" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "1", - val: "1", - exp: true, - }, - { - test: "0", - val: "0", - exp: false, - }, - { - test: "1_space", - val: "1 ", - err: `exec: argument "1 " of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "0_space", - val: "0 ", - err: `exec: argument "0 " of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "t_space", - val: "t ", - err: `exec: argument "t " of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - { - test: "f_space", - val: " f", - err: `exec: argument " f" of jsonpath item method .boolean() is invalid for type boolean`, - isErr: ErrVerbose, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - res, err := execBooleanString(tc.val, ast.MethodBoolean) - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestExecuteNumberMethod(t *testing.T) { - t.Parallel() - ctx := context.Background() - number := ast.NewMethod(ast.MethodNumber) - decimal := ast.NewBinary(ast.BinaryDecimal, nil, nil) - - for _, tc := range []methodTestCase{ - { - test: "float", - node: number, - value: float64(98.6), - exp: statusOK, - find: []any{float64(98.6)}, - }, - { - test: "int", - node: number, - value: int64(42), - exp: statusOK, - find: []any{float64(42)}, - }, - { - test: "max_int", - node: number, - value: int64(math.MaxInt64), - exp: statusOK, - find: []any{float64(math.MaxInt64)}, - }, - { - test: "min_int", - node: number, - value: int64(math.MinInt64), - exp: statusOK, - find: []any{float64(math.MinInt64)}, - }, - { - test: "json_int", - node: number, - value: json.Number("42"), - exp: statusOK, - find: []any{float64(42)}, - }, - { - test: "json_float", - node: number, - value: json.Number("98.6"), - exp: statusOK, - find: []any{float64(98.6)}, - }, - { - test: "number_invalid_json", - node: number, - value: json.Number("hi"), - exp: statusFailed, - err: `exec: argument "hi" of jsonpath item method .number() is invalid for type numeric`, - isErr: ErrVerbose, - }, - { - test: "invalid_json_decimal", - node: decimal, - value: json.Number("hi"), - exp: statusFailed, - err: `exec: argument "hi" of jsonpath item method .decimal() is invalid for type numeric`, - isErr: ErrVerbose, - }, - { - test: "string_int", - node: number, - value: "42", - exp: statusOK, - find: []any{float64(42)}, - }, - { - test: "string_float", - node: number, - value: "98.6", - exp: statusOK, - find: []any{float64(98.6)}, - }, - { - test: "string_max_int", - node: number, - value: strconv.FormatInt(math.MaxInt64, 10), - exp: statusOK, - find: []any{float64(math.MaxInt64)}, - }, - { - test: "string_max_float", - node: number, - value: fmt.Sprintf("%v", math.MaxFloat64), - exp: statusOK, - find: []any{float64(math.MaxFloat64)}, - }, - { - test: "object_number", - node: number, - value: map[string]any{"x": "42"}, - exp: statusFailed, - err: `exec: jsonpath item method .number() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "decimal_number", - node: decimal, - value: map[string]any{"x": "42"}, - exp: statusFailed, - err: `exec: jsonpath item method .decimal() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "array", - node: number, - value: []any{"42", float64(98.6)}, - exp: statusFailed, - err: `exec: jsonpath item method .number() can only be applied to a string or numeric value`, - isErr: ErrVerbose, - }, - { - test: "array_unwrap", - node: number, - value: []any{"42", float64(98.6)}, - unwrap: true, - exp: statusOK, - find: []any{float64(42), float64(98.6)}, - }, - { - test: "inf", - node: number, - value: "inf", - exp: statusFailed, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .number()`, - isErr: ErrVerbose, - }, - { - test: "neg_inf", - node: number, - value: "-inf", - exp: statusFailed, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .number()`, - isErr: ErrVerbose, - }, - { - test: "nan", - node: number, - value: "nan", - exp: statusFailed, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .number()`, - isErr: ErrVerbose, - }, - { - test: "inf_decimal", - node: decimal, - value: "inf", - exp: statusFailed, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .decimal()`, - isErr: ErrVerbose, - }, - { - test: "float_decimal", - node: decimal, - value: float64(98.6), - exp: statusOK, - find: []any{float64(98.6)}, - }, - { - test: "float_decimal_precision", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("4"), nil), - value: float64(12.2), - exp: statusOK, - find: []any{float64(12)}, - }, - { - test: "float_decimal_precision_scale", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("4"), ast.NewInteger("2")), - value: float64(12.233), - exp: statusOK, - find: []any{float64(12.23)}, - }, - { - test: "float_decimal_error", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("3"), ast.NewInteger("2")), - value: float64(12.233), - exp: statusFailed, - err: `exec: argument "12.233" of jsonpath item method .decimal() is invalid for type numeric`, - isErr: ErrVerbose, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - // Determine the method. - var meth any - meth = tc.node - if bin, ok := tc.node.(*ast.BinaryNode); ok { - meth = bin.Operator() - } - - // Test execMethodNumber - e, list := tc.prep() - res, err := e.executeNumberMethod(ctx, tc.node, tc.value, list, tc.unwrap, meth) - tc.checkResults(t, res, list, err) - }) - } -} - -func TestExecuteDecimalMethod(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - node *ast.BinaryNode - value any - num float64 - exp float64 - err string - isErr error - }{ - { - test: "not_decimal", - node: ast.NewBinary(ast.BinaryAdd, nil, nil), - num: float64(98.6), - exp: float64(98.6), - }, - { - test: "no_args", - node: ast.NewBinary(ast.BinaryDecimal, nil, nil), - num: float64(98.6), - exp: float64(98.6), - }, - { - test: "invalid_precision", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewString("hi"), nil), - err: `exec: invalid jsonpath item type for .decimal() precision`, - isErr: ErrExecution, - }, - { - test: "precision_zero", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("0"), nil), - err: `exec: NUMERIC precision 0 must be between 1 and 1000`, - isErr: ErrExecution, - }, - { - test: "precision_1001", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("1001"), nil), - err: `exec: NUMERIC precision 1001 must be between 1 and 1000`, - isErr: ErrExecution, - }, - { - test: "precision_1000", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("1000"), nil), - num: float64(98.6), - exp: float64(99), - }, - { - test: "precision_10", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("10"), nil), - num: float64(98.6), - exp: float64(99), - }, - { - test: "precision_too_small", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("1"), nil), - value: float64(98.6), - num: float64(98.6), - err: `exec: argument "98.6" of jsonpath item method .decimal() is invalid for type numeric`, - isErr: ErrExecution, - }, - { - test: "invalid_scale", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("10"), ast.NewString("hi")), - err: `exec: invalid jsonpath item type for .decimal() scale`, - isErr: ErrExecution, - }, - { - test: "scale_neg_1001", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("10"), ast.NewInteger("-1001")), - err: `exec: NUMERIC scale -1001 must be between -1000 and 1000`, - isErr: ErrExecution, - }, - { - test: "scale_1001", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("10"), ast.NewInteger("1001")), - err: `exec: NUMERIC scale 1001 must be between -1000 and 1000`, - isErr: ErrExecution, - }, - { - test: "precision_scale_ok", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("5"), ast.NewInteger("3")), - num: float64(12.333), - exp: float64(12.333), - }, - { - test: "scale_down", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("5"), ast.NewInteger("2")), - num: float64(12.333), - exp: float64(12.33), - }, - { - test: "scale_short", - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("3"), ast.NewInteger("2")), - value: float64(12.333), - num: float64(12.333), - err: `exec: argument "12.333" of jsonpath item method .decimal() is invalid for type numeric`, - isErr: ErrExecution, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - e := newTestExecutor(laxRootPath, nil, true, false) - res, err := e.executeDecimalMethod(tc.node, tc.value, tc.num) - - //nolint:testifylint - a.Equal(tc.exp, res) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestNumericCallbacks(t *testing.T) { - t.Parallel() - - t.Run("intAbs", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.IsType((intCallback)(nil), intCallback(intAbs)) - for i, n := range []int64{0, -1, 2, -3, 4, 5} { - a.Equal(int64(i), intAbs(n)) - } - }) - - t.Run("intSelf", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.IsType((intCallback)(nil), intCallback(intSelf)) - for _, n := range []int64{4, 42, -99, -100323, 4, 10030} { - a.Equal(n, intSelf(n)) - } - }) - - t.Run("floatSelf", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.IsType((floatCallback)(nil), floatCallback(floatSelf)) - for _, n := range []float64{-1, 12, 53, 98.6, 42.3, 100.99} { - //nolint:testifylint - a.Equal(n, floatSelf(n)) - } - }) - - t.Run("intUMinus", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.IsType((intCallback)(nil), intCallback(intUMinus)) - for _, n := range []int64{4, 42, -99, -100323, 4, 10030} { - a.Equal(-n, intUMinus(n)) - } - }) - - t.Run("floatSelf", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.IsType((floatCallback)(nil), floatCallback(floatUMinus)) - for _, n := range []float64{-1, 12, 53, 98.6, 42.3, 100.99} { - //nolint:testifylint - a.Equal(-n, floatUMinus(n)) - } - }) -} - -func TestExecuteNumericItemMethod(t *testing.T) { - t.Parallel() - abs := ast.NewMethod(ast.MethodAbs) - floor := ast.NewMethod(ast.MethodFloor) - ceil := ast.NewMethod(ast.MethodCeiling) - ctx := context.Background() - - for _, tc := range []struct { - methodTestCase - - intCB intCallback - floatCB floatCallback - }{ - { - methodTestCase: methodTestCase{ - test: "int_abs", - path: laxRootPath, - node: abs, - value: int64(-42), - exp: statusOK, - find: []any{int64(42)}, - }, - intCB: intAbs, - }, - { - methodTestCase: methodTestCase{ - test: "float_abs", - path: laxRootPath, - node: abs, - value: float64(-42.2), - exp: statusOK, - find: []any{float64(42.2)}, - }, - floatCB: math.Abs, - }, - { - methodTestCase: methodTestCase{ - test: "json_int_abs", - path: laxRootPath, - node: abs, - value: json.Number("-42"), - exp: statusOK, - find: []any{int64(42)}, - }, - intCB: intAbs, - }, - { - methodTestCase: methodTestCase{ - test: "json_float_abs", - path: laxRootPath, - node: abs, - value: json.Number("-42.2"), - exp: statusOK, - find: []any{float64(42.2)}, - }, - floatCB: math.Abs, - }, - { - methodTestCase: methodTestCase{ - test: "invalid_json_number", - path: laxRootPath, - node: abs, - value: json.Number("hi"), - exp: statusFailed, - err: `exec: jsonpath item method .abs() can only be applied to a numeric value`, - isErr: ErrVerbose, - }, - }, - { - methodTestCase: methodTestCase{ - test: "object", - path: laxRootPath, - node: abs, - value: map[string]any{"hi": true}, - exp: statusFailed, - err: `exec: jsonpath item method .abs() can only be applied to a numeric value`, - isErr: ErrVerbose, - }, - }, - { - methodTestCase: methodTestCase{ - test: "array", - path: laxRootPath, - node: abs, - value: []any{int64(-42), float64(-42.2)}, - exp: statusFailed, - err: `exec: jsonpath item method .abs() can only be applied to a numeric value`, - isErr: ErrVerbose, - }, - }, - { - methodTestCase: methodTestCase{ - test: "abs_array_unwrap", - path: laxRootPath, - node: abs, - value: []any{int64(-42), float64(-42.2)}, - unwrap: true, - exp: statusOK, - find: []any{int64(42), float64(42.2)}, - }, - intCB: intAbs, - floatCB: math.Abs, - }, - { - methodTestCase: methodTestCase{ - test: "int_floor", - path: laxRootPath, - node: floor, - value: int64(-42), - exp: statusOK, - find: []any{int64(-42)}, - }, - intCB: intSelf, - }, - { - methodTestCase: methodTestCase{ - test: "float_floor", - path: laxRootPath, - node: floor, - value: float64(-42.2), - exp: statusOK, - find: []any{float64(-43)}, - }, - floatCB: math.Floor, - }, - { - methodTestCase: methodTestCase{ - test: "json_int_floor", - path: laxRootPath, - node: floor, - value: json.Number("42"), - exp: statusOK, - find: []any{int64(42)}, - }, - intCB: intSelf, - }, - { - methodTestCase: methodTestCase{ - test: "json_float_floor", - path: laxRootPath, - node: floor, - value: json.Number("42.2"), - exp: statusOK, - find: []any{float64(42)}, - }, - floatCB: math.Floor, - }, - { - methodTestCase: methodTestCase{ - test: "invalid_json_number", - path: laxRootPath, - node: floor, - value: json.Number("hi"), - exp: statusFailed, - err: `exec: jsonpath item method .floor() can only be applied to a numeric value`, - isErr: ErrVerbose, - }, - }, - { - methodTestCase: methodTestCase{ - test: "floor_array_unwrap", - path: laxRootPath, - node: floor, - value: []any{int64(42), float64(42.8)}, - unwrap: true, - exp: statusOK, - find: []any{int64(42), float64(42)}, - }, - intCB: intSelf, - floatCB: math.Floor, - }, - - { - methodTestCase: methodTestCase{ - test: "int_ceil", - path: laxRootPath, - node: ceil, - value: int64(-42), - exp: statusOK, - find: []any{int64(-42)}, - }, - intCB: intSelf, - }, - { - methodTestCase: methodTestCase{ - test: "float_ceil", - path: laxRootPath, - node: ceil, - value: float64(-42.2), - exp: statusOK, - find: []any{float64(-42)}, - }, - floatCB: math.Ceil, - }, - { - methodTestCase: methodTestCase{ - test: "json_int_ceil", - path: laxRootPath, - node: ceil, - value: json.Number("42"), - exp: statusOK, - find: []any{int64(42)}, - }, - intCB: intSelf, - }, - { - methodTestCase: methodTestCase{ - test: "json_float_ceil", - path: laxRootPath, - node: ceil, - value: json.Number("42.2"), - exp: statusOK, - find: []any{float64(43)}, - }, - floatCB: math.Ceil, - }, - { - methodTestCase: methodTestCase{ - test: "invalid_json_number", - path: laxRootPath, - node: ceil, - value: json.Number("hi"), - exp: statusFailed, - err: `exec: jsonpath item method .ceiling() can only be applied to a numeric value`, - isErr: ErrVerbose, - }, - }, - { - methodTestCase: methodTestCase{ - test: "ceil_array_unwrap", - path: laxRootPath, - node: ceil, - value: []any{int64(42), float64(42.8)}, - unwrap: true, - exp: statusOK, - find: []any{int64(42), float64(43)}, - }, - intCB: intSelf, - floatCB: math.Ceil, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - - e, list := tc.prep() - res, err := e.executeNumericItemMethod(ctx, tc.node, tc.value, tc.unwrap, tc.intCB, tc.floatCB, list) - tc.checkResults(t, res, list, err) - }) - } -} diff --git a/path/exec/op.go b/path/exec/op.go deleted file mode 100644 index b27aee4..0000000 --- a/path/exec/op.go +++ /dev/null @@ -1,263 +0,0 @@ -package exec - -import ( - "context" - "fmt" - "math" - "strings" - - "github.com/theory/sqljson/path/ast" - "golang.org/x/exp/maps" // Switch to maps when go 1.22 dropped -) - -// execBinaryNode executes node's binary operation against value. -func (exec *Executor) execBinaryNode( - ctx context.Context, - node *ast.BinaryNode, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - switch node.Operator() { - case ast.BinaryAnd, ast.BinaryOr, ast.BinaryEqual, ast.BinaryNotEqual, - ast.BinaryLess, ast.BinaryLessOrEqual, ast.BinaryGreater, - ast.BinaryGreaterOrEqual, ast.BinaryStartsWith: - // Binary boolean types. - res, err := exec.executeBoolItem(ctx, node, value, true) - return exec.appendBoolResult(ctx, node, found, res, err) - case ast.BinaryAdd, ast.BinarySub, ast.BinaryMul, ast.BinaryDiv, ast.BinaryMod: - return exec.execBinaryMathExpr(ctx, node, value, found) - case ast.BinaryDecimal: - return exec.executeNumberMethod(ctx, node, value, found, unwrap, node.Operator()) - case ast.BinarySubscript: - // This should not happen because the Parser disallows it. - return statusFailed, fmt.Errorf( - "%w: evaluating jsonpath subscript expression outside of array subscript", - ErrExecution, - ) - } - - return statusNotFound, nil -} - -// execBinaryNode executes node's unary operation against value. -func (exec *Executor) execUnaryNode( - ctx context.Context, - node *ast.UnaryNode, - value any, - found *valueList, - unwrap bool, -) (resultStatus, error) { - switch node.Operator() { - case ast.UnaryNot, ast.UnaryIsUnknown, ast.UnaryExists: - // Binary boolean types. - res, err := exec.executeBoolItem(ctx, node, value, true) - return exec.appendBoolResult(ctx, node, found, res, err) - case ast.UnaryFilter: - if unwrap { - if _, ok := value.([]any); ok { - return exec.executeItemUnwrapTargetArray(ctx, node, value, found) - } - } - - st, err := exec.executeNestedBoolItem(ctx, node.Operand(), value) - if st != predTrue { - return statusNotFound, err - } - return exec.executeNextItem(ctx, node, nil, value, found) - case ast.UnaryPlus: - return exec.execUnaryMathExpr(ctx, node, value, intSelf, floatSelf, found) - case ast.UnaryMinus: - return exec.execUnaryMathExpr(ctx, node, value, intUMinus, floatUMinus, found) - case ast.UnaryDateTime, ast.UnaryDate, ast.UnaryTime, ast.UnaryTimeTZ, - ast.UnaryTimestamp, ast.UnaryTimestampTZ: - if unwrap { - if array, ok := value.([]any); ok { - return exec.executeAnyItem(ctx, node, array, found, 1, 1, 1, false, false) - } - } - return exec.executeDateTimeMethod(ctx, node, value, found) - } - - return statusNotFound, nil -} - -// execRegexNode executes regex against value. -func (exec *Executor) execRegexNode( - ctx context.Context, - regex *ast.RegexNode, - value any, - found *valueList, -) (resultStatus, error) { - // Binary boolean type. - res, err := exec.executeBoolItem(ctx, regex, value, true) - return exec.appendBoolResult(ctx, regex, found, res, err) -} - -func (exec *Executor) tempSetIgnoreStructuralErrors(val bool) func() { - savedIgnoreStructuralErrors := exec.ignoreStructuralErrors - exec.ignoreStructuralErrors = val - return func() { exec.ignoreStructuralErrors = savedIgnoreStructuralErrors } -} - -// execAnyNode handles the execution of node. value must be either -// map[string]any or []any. If found is not nil then resultStatus should be -// ignored. -func (exec *Executor) execAnyNode( - ctx context.Context, - node *ast.AnyNode, - value any, - found *valueList, -) (resultStatus, error) { - next := node.Next() - // first try without any intermediate steps - if node.First() == 0 { - defer exec.tempSetIgnoreStructuralErrors(true)() - res, err := exec.executeNextItem(ctx, node, next, value, found) - if err != nil || (res == statusOK && found == nil) { - return res, err - } - } - - switch value := value.(type) { - case map[string]any: - return exec.executeAnyItem( - ctx, next, maps.Values(value), found, 1, - node.First(), node.Last(), true, exec.autoUnwrap(), - ) - case []any: - return exec.executeAnyItem( - ctx, next, value, found, 1, - node.First(), node.Last(), true, exec.autoUnwrap(), - ) - } - - return statusNotFound, nil -} - -// collection converts v into a slice of values if it's either a map or a -// slice. Otherwise it returns nil. -func collection(v any) []any { - switch v := v.(type) { - case map[string]any: - return maps.Values(v) // Just work with the values - case []any: - return v - } - return nil -} - -// executeAnyItem is the implementation of several jsonpath nodes: -// -// - ast.AnyNode (.** accessor) -// - ast.ConstAnyKey (.* accessor) -// - ast.ConstAnyArray ([*] accessor) -// -// The value parameter must be a slice of values; the caller must properly -// extract the values from a map. If found is not nil then resultStatus should -// be ignored. -func (exec *Executor) executeAnyItem( - ctx context.Context, - node ast.Node, - value []any, - found *valueList, - level, first, last uint32, - ignoreStructuralErrors, unwrapNext bool, -) (resultStatus, error) { - res := statusNotFound - var err error - if level > last { - return res, nil - } - - // When found is not nil, executeAnyItem can return statusNotFound even - // when items were found. This seems to be because it returns the last - // result in the list it iterates over or from a recursive call. This - // isn't super important for the top-level query functions, which pay - // attention to either the contents of found or the result, and not both. - // But to be internally consistent, look at the size of the found values - // and return statusOK below if it has grown, regardless of what the last - // result was. - size := 0 - if found != nil { - size = len(found.list) - } - - // Recursively iterate over jsonb objects/arrays - for _, v := range value { - col := collection(v) - - if level >= first || (first == math.MaxUint32 && last == math.MaxUint32 && col == nil) { - // check expression - switch { - case node != nil: - if ignoreStructuralErrors { - defer exec.tempSetIgnoreStructuralErrors(true)() - } - res, err = exec.executeItemOptUnwrapTarget(ctx, node, v, found, unwrapNext) - if res.failed() || (res == statusOK && found == nil) { - return res, err - } - case found != nil: - found.append(v) - res = statusOK - default: - return statusOK, nil - } - } - - if level < last { - res, err = exec.executeAnyItem( - ctx, node, col, found, level+1, first, last, ignoreStructuralErrors, unwrapNext, - ) - if res.failed() || (res == statusOK && found == nil) { - return res, err - } - } - } - - // Always return OK if items were found. - if found != nil && res != statusFailed && err == nil && len(found.list) > size { - res = statusOK - } - - return res, err -} - -// executeLikeRegex is the LIKE_REGEX predicate callback. -// Implements predicateCallback. -func (exec *Executor) executeLikeRegex(_ context.Context, node ast.Node, value, _ any) (predOutcome, error) { - rn, ok := node.(*ast.RegexNode) - if !ok { - panic(fmt.Sprintf( - "Node %T passed to executeLikeRegex is not an ast.RegexNode", - node, - )) - } - - str, ok := value.(string) - if !ok { - return predUnknown, nil - } - - if rn.Regexp().MatchString(str) { - return predTrue, nil - } - return predFalse, nil -} - -// executeStartsWith is the STARTS_WITH predicate callback. It returns -// predTrue when whole string starts with initial and predFalse if it does -// not. Returns predUnknown if either whole or initial is not a string. -// Implements predicateCallback. -func executeStartsWith(_ context.Context, _ ast.Node, whole, initial any) (predOutcome, error) { - if str, ok := whole.(string); ok { - if prefix, ok := initial.(string); ok { - if strings.HasPrefix(str, prefix) { - return predTrue, nil - } - return predFalse, nil - } - } - return predUnknown, nil -} diff --git a/path/exec/op_test.go b/path/exec/op_test.go deleted file mode 100644 index 4ac32b6..0000000 --- a/path/exec/op_test.go +++ /dev/null @@ -1,909 +0,0 @@ -package exec - -import ( - "context" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/types" -) - -func TestExecBinaryNode(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - node *ast.BinaryNode - value any - unwrap bool - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "and", - node: ast.NewBinary( - ast.BinaryAnd, - ast.NewBinary(ast.BinaryEqual, ast.NewConst(ast.ConstRoot), ast.NewConst(ast.ConstRoot)), - ast.NewBinary(ast.BinaryEqual, ast.NewConst(ast.ConstRoot), ast.NewConst(ast.ConstRoot)), - ), - exp: statusOK, - find: []any{true}, - }, - { - test: "or", - node: ast.NewBinary( - ast.BinaryOr, - ast.NewBinary(ast.BinaryEqual, ast.NewConst(ast.ConstRoot), ast.NewConst(ast.ConstRoot)), - ast.NewBinary(ast.BinaryEqual, ast.NewConst(ast.ConstRoot), ast.NewConst(ast.ConstRoot)), - ), - exp: statusOK, - find: []any{true}, - }, - { - test: "eq", - node: ast.NewBinary(ast.BinaryEqual, ast.NewInteger("42"), ast.NewInteger("42")), - exp: statusOK, - find: []any{true}, - }, - { - test: "ne", - node: ast.NewBinary(ast.BinaryNotEqual, ast.NewInteger("42"), ast.NewInteger("42")), - exp: statusOK, - find: []any{false}, - }, - { - test: "lt", - node: ast.NewBinary(ast.BinaryLess, ast.NewInteger("41"), ast.NewInteger("42")), - exp: statusOK, - find: []any{true}, - }, - { - test: "gt", - node: ast.NewBinary(ast.BinaryLess, ast.NewInteger("42"), ast.NewInteger("42")), - exp: statusOK, - find: []any{false}, - }, - { - test: "le", - node: ast.NewBinary(ast.BinaryLessOrEqual, ast.NewInteger("42"), ast.NewInteger("42")), - exp: statusOK, - find: []any{true}, - }, - { - test: "ge", - node: ast.NewBinary(ast.BinaryGreaterOrEqual, ast.NewInteger("42"), ast.NewInteger("42")), - exp: statusOK, - find: []any{true}, - }, - { - test: "starts_with", - node: ast.NewBinary(ast.BinaryStartsWith, ast.NewString("hi there"), ast.NewString("hi")), - exp: statusOK, - find: []any{true}, - }, - { - test: "add", - node: ast.NewBinary(ast.BinaryAdd, ast.NewInteger("12"), ast.NewInteger("38")), - exp: statusOK, - find: []any{int64(50)}, - }, - { - test: "sub", - node: ast.NewBinary(ast.BinarySub, ast.NewInteger("42"), ast.NewInteger("12")), - exp: statusOK, - find: []any{int64(30)}, - }, - { - test: "mul", - node: ast.NewBinary(ast.BinaryMul, ast.NewInteger("5"), ast.NewInteger("6")), - exp: statusOK, - find: []any{int64(30)}, - }, - { - test: "div", - node: ast.NewBinary(ast.BinaryDiv, ast.NewInteger("10"), ast.NewInteger("2")), - exp: statusOK, - find: []any{int64(5)}, - }, - { - test: "mod", - node: ast.NewBinary(ast.BinaryMod, ast.NewInteger("12"), ast.NewInteger("5")), - exp: statusOK, - find: []any{int64(2)}, - }, - { - test: "decimal", - value: float64(12.233), - node: ast.NewBinary(ast.BinaryDecimal, ast.NewInteger("4"), ast.NewInteger("2")), - exp: statusOK, - find: []any{float64(12.23)}, - }, - { - test: "subscript", - node: ast.NewBinary(ast.BinarySubscript, nil, nil), - exp: statusFailed, - err: `exec: evaluating jsonpath subscript expression outside of array subscript`, - isErr: ErrExecution, - }, - { - test: "unknown_op", - node: ast.NewBinary(ast.BinaryOperator(-1), nil, nil), - exp: statusNotFound, - find: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - e := newTestExecutor(laxRootPath, nil, true, false) - list := newList() - res, err := e.execBinaryNode(ctx, tc.node, tc.value, list, tc.unwrap) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, list.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(list.list) - } - }) - } -} - -func TestExecUnaryNode(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - node *ast.UnaryNode - value any - unwrap bool - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "not", - node: ast.NewUnary( - ast.UnaryNot, - ast.NewUnary(ast.UnaryExists, ast.NewConst(ast.ConstRoot)), - ), - exp: statusOK, - find: []any{false}, - }, - { - test: "filter", - node: ast.NewUnary( - ast.UnaryFilter, - ast.NewBinary( - ast.BinaryEqual, - ast.NewConst(ast.ConstTrue), - ast.NewConst(ast.ConstTrue), - ), - ), - value: "hi", - exp: statusOK, - find: []any{"hi"}, - }, - { - test: "filter_false", - node: ast.NewUnary( - ast.UnaryFilter, - ast.NewBinary( - ast.BinaryNotEqual, - ast.NewConst(ast.ConstTrue), - ast.NewConst(ast.ConstTrue), - ), - ), - value: "hi", - exp: statusNotFound, - find: []any{}, - }, - { - test: "filter_array", - node: ast.NewUnary( - ast.UnaryFilter, - ast.NewBinary( - ast.BinaryEqual, - ast.NewConst(ast.ConstTrue), - ast.NewConst(ast.ConstTrue), - ), - ), - value: []any{"hi"}, - exp: statusOK, - find: []any{[]any{"hi"}}, - }, - { - test: "filter_array_unwrap", - node: ast.NewUnary( - ast.UnaryFilter, - ast.NewBinary( - ast.BinaryEqual, - ast.NewConst(ast.ConstTrue), - ast.NewConst(ast.ConstTrue), - ), - ), - value: []any{"hi", "there"}, - unwrap: true, - exp: statusOK, - find: []any{"hi", "there"}, - }, - { - test: "filter_array_unwrap_false", - node: ast.NewUnary( - ast.UnaryFilter, - ast.NewBinary( - ast.BinaryNotEqual, - ast.NewConst(ast.ConstTrue), - ast.NewConst(ast.ConstTrue), - ), - ), - value: []any{"hi", "there"}, - unwrap: true, - exp: statusNotFound, - find: []any{}, - }, - { - test: "plus", - node: ast.NewUnary(ast.UnaryPlus, ast.NewConst(ast.ConstRoot)), - exp: statusOK, - value: int64(-42), - find: []any{int64(-42)}, - }, - { - test: "minus", - node: ast.NewUnary(ast.UnaryMinus, ast.NewConst(ast.ConstRoot)), - exp: statusOK, - value: int64(-42), - find: []any{int64(42)}, - }, - { - test: "datetime", - node: ast.NewUnary(ast.UnaryDateTime, nil), - exp: statusOK, - value: "2024-06-14", - find: []any{types.NewDate(time.Date(2024, 6, 14, 0, 0, 9, 9, time.UTC))}, - }, - { - test: "date", - node: ast.NewUnary(ast.UnaryDateTime, nil), - exp: statusOK, - value: "2024-06-14", - find: []any{types.NewDate(time.Date(2024, 6, 14, 0, 0, 0, 0, time.UTC))}, - }, - { - test: "time", - node: ast.NewUnary(ast.UnaryTime, nil), - exp: statusOK, - value: "14:23:54", - find: []any{types.NewTime(time.Date(0, 1, 1, 14, 23, 54, 0, time.UTC))}, - }, - { - test: "timetz", - node: ast.NewUnary(ast.UnaryTimeTZ, nil), - exp: statusOK, - value: "14:23:54+01", - find: []any{types.NewTimeTZ(time.Date(0, 1, 1, 14, 23, 54, 0, time.FixedZone("", 60*60)))}, - }, - { - test: "timestamp", - node: ast.NewUnary(ast.UnaryTimestamp, nil), - exp: statusOK, - value: "2024-06-14T14:23:54", - find: []any{types.NewTimestamp(time.Date(2024, 6, 14, 14, 23, 54, 0, time.UTC))}, - }, - { - test: "timestamptz", - node: ast.NewUnary(ast.UnaryTimestampTZ, nil), - exp: statusOK, - value: "2024-06-14T14:23:54+01", - find: []any{types.NewTimestampTZ( - ctx, - time.Date(2024, 6, 14, 14, 23, 54, 0, time.FixedZone("", 60*60)), - )}, - }, - { - test: "datetime_array", - node: ast.NewUnary(ast.UnaryDateTime, nil), - value: []any{"2024-06-14", "2024-06-14T14:23:54+01"}, - exp: statusFailed, - err: `exec: jsonpath item method .datetime() can only be applied to a string`, - isErr: ErrVerbose, - }, - { - test: "datetime_array_unwrap", - node: ast.NewUnary(ast.UnaryDateTime, nil), - exp: statusOK, - value: []any{"2024-06-14", "2024-06-14T14:23:54+01"}, - unwrap: true, - find: []any{ - types.NewDate(time.Date(2024, 6, 14, 0, 0, 0, 0, time.FixedZone("", 0))), - types.NewTimestampTZ( - ctx, - time.Date(2024, 6, 14, 14, 23, 54, 0, time.FixedZone("", 60*60)), - ), - }, - }, - { - test: "unknown_op", - node: ast.NewUnary(ast.UnaryOperator(-1), nil), - exp: statusNotFound, - find: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - e := newTestExecutor(laxRootPath, nil, true, false) - e.root = tc.value - list := newList() - res, err := e.execUnaryNode(ctx, tc.node, tc.value, list, tc.unwrap) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, list.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(list.list) - } - }) - } -} - -func TestExecRegexNode(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - regex string - value any - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "regex_match", - regex: "^hi", - value: "hi there", - exp: statusOK, - find: []any{true}, - }, - { - test: "regex_no_match", - regex: "^hi", - value: "say hi there", - exp: statusOK, - find: []any{false}, - }, - { - test: "regex_not_string", - regex: "^hi", - value: map[string]any{"x": "hi"}, - exp: statusOK, - find: []any{nil}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - rx, err := ast.NewRegex(ast.NewConst(ast.ConstRoot), tc.regex, "") - r.NoError(err) - - e := newTestExecutor(laxRootPath, nil, true, false) - e.root = tc.value - list := newList() - res, err := e.execRegexNode(ctx, rx, tc.value, list) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, list.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(list.list) - } - }) - } -} - -func TestExecAnyNode(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - node ast.Node - value any - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "map_unbounded", - node: ast.NewAny(0, -1), - value: map[string]any{"x": true, "y": true}, - exp: statusOK, - find: []any{map[string]any{"x": true, "y": true}, true, true}, - }, - { - test: "map_2_levels", - node: ast.NewAny(0, 1), - value: map[string]any{"x": map[string]any{"y": map[string]any{"z": "hi"}}}, - exp: statusOK, - find: []any{ - map[string]any{"x": map[string]any{"y": map[string]any{"z": "hi"}}}, - map[string]any{"y": map[string]any{"z": "hi"}}, - }, - }, - { - test: "map_3_levels", - node: ast.NewAny(0, 2), - value: map[string]any{"x": map[string]any{"y": map[string]any{"z": "hi"}}}, - exp: statusOK, - find: []any{ - map[string]any{"x": map[string]any{"y": map[string]any{"z": "hi"}}}, - map[string]any{"y": map[string]any{"z": "hi"}}, - map[string]any{"z": "hi"}, - }, - }, - { - test: "map_1_2_levels", - node: ast.NewAny(1, 2), - value: map[string]any{"x": map[string]any{"y": map[string]any{"z": "hi"}}}, - exp: statusOK, - find: []any{ - map[string]any{"y": map[string]any{"z": "hi"}}, - map[string]any{"z": "hi"}, - }, - }, - { - test: "array_unbounded", - node: ast.NewAny(0, -1), - value: []any{"x", "y", map[string]any{"x": "hi"}}, - exp: statusOK, - find: []any{ - []any{"x", "y", map[string]any{"x": "hi"}}, - "x", "y", - map[string]any{"x": "hi"}, - "hi", - }, - }, - { - test: "array_2_levels", - node: ast.NewAny(0, 1), - value: []any{"x", "y", map[string]any{"x": "hi"}}, - exp: statusOK, - find: []any{ - []any{"x", "y", map[string]any{"x": "hi"}}, - "x", "y", - map[string]any{"x": "hi"}, - }, - }, - { - test: "array_1_levels", - node: ast.NewAny(1, 1), - value: []any{"x", "y", map[string]any{"x": "hi"}}, - exp: statusOK, - find: []any{ - "x", "y", - map[string]any{"x": "hi"}, - }, - }, - { - test: "not_object_or_array", - node: ast.NewAny(1, -1), - value: true, - exp: statusNotFound, - find: []any{}, - }, - { - test: "map_next", - node: ast.LinkNodes([]ast.Node{ast.NewAny(1, 1), ast.NewMethod(ast.MethodString)}), - value: map[string]any{"x": true, "y": true}, - exp: statusOK, - find: []any{"true", "true"}, - }, - { - test: "map_next_error", - node: ast.LinkNodes([]ast.Node{ast.NewAny(1, 1), ast.NewMethod(ast.MethodFloor)}), - value: map[string]any{"x": "hi"}, - exp: statusFailed, - err: `exec: jsonpath item method .floor() can only be applied to a numeric value`, - isErr: ErrVerbose, - }, - { - test: "nested_array", - node: ast.NewAny(1, -1), - value: []any{[]any{"hi", true}}, - exp: statusOK, - find: []any{[]any{"hi", true}, "hi", true}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Should have an AnyNode. - node, ok := tc.node.(*ast.AnyNode) - a.True(ok) - - e := newTestExecutor(laxRootPath, nil, true, false) - e.ignoreStructuralErrors = false - e.root = tc.value - - // Test with found first and ignore the result. - list := newList() - _, err := e.execAnyNode(ctx, node, tc.value, list) - a.False(e.ignoreStructuralErrors) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, list.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(list.list) - } - - // Test without found, pay attention to the result. - res, err := e.execAnyNode(ctx, node, tc.value, nil) - a.False(e.ignoreStructuralErrors) - a.Equal(tc.exp, res) - - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestCollection(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - value any - exp []any - }{ - { - test: "slice", - value: []any{"hi", "yo"}, - exp: []any{"hi", "yo"}, - }, - { - test: "map", - value: map[string]any{"x": "hi", "y": "hi"}, - exp: []any{"hi", "hi"}, - }, - { - test: "int", - value: int64(42), - }, - { - test: "bool", - value: true, - }, - { - test: "nil", - value: nil, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(tc.exp, collection(tc.value)) - }) - } -} - -func TestExecuteAnyItem(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - node ast.Node - value []any - ignore bool - unwrap bool - exp resultStatus - find []any - err string - isErr error - }{ - { - test: "flat_all", - node: ast.NewAny(0, -1), - value: []any{"hi", true}, - exp: statusOK, - find: []any{"hi", true}, - }, - { - test: "nest_map_all", - node: ast.NewAny(0, -1), - value: []any{"hi", map[string]any{"x": map[string]any{"y": map[string]any{"z": "yo"}}}}, - exp: statusOK, - find: []any{ - "hi", - map[string]any{"x": map[string]any{"y": map[string]any{"z": "yo"}}}, - map[string]any{"y": map[string]any{"z": "yo"}}, - map[string]any{"z": "yo"}, - "yo", - map[string]any{"y": map[string]any{"z": "yo"}}, - map[string]any{"z": "yo"}, - "yo", - map[string]any{"z": "yo"}, - "yo", - "yo", - }, - }, - { - test: "nest_map_0_2", - node: ast.NewAny(0, 2), - value: []any{"hi", map[string]any{"x": map[string]any{"y": map[string]any{"z": "yo"}}}}, - exp: statusOK, - find: []any{ - "hi", - map[string]any{"x": map[string]any{"y": map[string]any{"z": "yo"}}}, - map[string]any{"y": map[string]any{"z": "yo"}}, - map[string]any{"z": "yo"}, - map[string]any{"y": map[string]any{"z": "yo"}}, - map[string]any{"z": "yo"}, - "yo", - }, - }, - { - test: "nest_map_1_2", - node: ast.NewAny(1, 2), - value: []any{"hi", map[string]any{"x": map[string]any{"y": map[string]any{"z": "yo"}}}}, - exp: statusOK, - find: []any{ - map[string]any{"y": map[string]any{"z": "yo"}}, - map[string]any{"z": "yo"}, - map[string]any{"z": "yo"}, - "yo", - }, - }, - { - test: "nest_array_all", - node: ast.NewAny(0, -1), - value: []any{"hi", []any{"yo", []any{"x", []any{true}}}}, - exp: statusOK, - find: []any{ - "hi", - []any{"yo", []any{"x", []any{true}}}, - "yo", - []any{"x", []any{true}}, - "x", - []any{true}, - true, - "yo", - []any{"x", []any{true}}, - "x", - []any{true}, - true, - "x", - []any{true}, - true, - true, - }, - }, - { - test: "nest_array_0_2", - node: ast.NewAny(0, 2), - value: []any{"hi", []any{"yo", []any{"x", []any{true}}}}, - exp: statusOK, - find: []any{ - "hi", - []any{"yo", []any{"x", []any{true}}}, - "yo", - []any{"x", []any{true}}, - "x", - []any{true}, - "yo", - []any{"x", []any{true}}, - "x", - []any{true}, - true, - }, - }, - { - test: "nest_array_1_2", - node: ast.NewAny(1, 2), - value: []any{"hi", []any{"yo", []any{"x", []any{true}}}}, - exp: statusOK, - find: []any{ - "yo", - []any{"x", []any{true}}, - "x", - []any{true}, - "x", - []any{true}, - true, - }, - }, - { - test: "level_gt_last", - node: ast.NewAny(0, 0), - value: []any{"hi", true}, - exp: statusNotFound, - find: []any{}, - }, - { - test: "next_item", - node: ast.LinkNodes([]ast.Node{ast.NewAny(0, -1), ast.NewMethod(ast.MethodString)}), - value: []any{"hi", true}, - exp: statusOK, - find: []any{"hi", "true"}, - }, - { - test: "next_item_level", - node: ast.LinkNodes([]ast.Node{ast.NewAny(0, -1), ast.NewMethod(ast.MethodString)}), - value: []any{[]any{"hi", true}}, - exp: statusOK, - find: []any{"hi", "true", "hi", "true", "hi", "true"}, - }, - { - test: "next_item_error", - node: ast.LinkNodes([]ast.Node{ast.NewAny(0, -1), ast.NewMethod(ast.MethodNumber)}), - value: []any{"hi", true}, - exp: statusFailed, - err: `exec: argument "hi" of jsonpath item method .number() is invalid for type numeric`, - isErr: ErrVerbose, - }, - { - test: "next_item_level_error", - node: ast.LinkNodes([]ast.Node{ast.NewAny(0, -1), ast.NewMethod(ast.MethodNumber)}), - value: []any{"hi", []any{"hi", true}}, - exp: statusFailed, - err: `exec: argument "hi" of jsonpath item method .number() is invalid for type numeric`, - isErr: ErrVerbose, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Should have an AnyNode. - node, ok := tc.node.(*ast.AnyNode) - a.True(ok) - - e := newTestExecutor(laxRootPath, nil, true, false) - e.ignoreStructuralErrors = false - e.root = tc.value - - // Test with found first and ignore the result. - list := newList() - res, err := e.executeAnyItem(ctx, node, tc.value, list, 1, node.First(), node.Last(), tc.ignore, tc.unwrap) - a.Equal(tc.exp, res) - a.False(e.ignoreStructuralErrors) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - a.Equal(tc.find, list.list) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - a.Empty(list.list) - } - - // Test without found, pay attention to the result. - res, err = e.executeAnyItem(ctx, node, tc.value, nil, 1, node.First(), node.Last(), tc.ignore, tc.unwrap) - a.False(e.ignoreStructuralErrors) - a.Equal(tc.exp, res) - - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -// TestExecuteLikeRegex in exec_test.go tests happy paths. -func TestExecuteLikeRegexErrors(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - ctx := context.Background() - - e := newTestExecutor(laxRootPath, nil, true, false) - r.PanicsWithValue( - "Node *ast.ConstNode passed to executeLikeRegex is not an ast.RegexNode", - func() { _, _ = e.executeLikeRegex(ctx, ast.NewConst(ast.ConstRoot), nil, nil) }, - ) - - rx, err := ast.NewRegex(ast.NewConst(ast.ConstRoot), ".", "") - r.NoError(err) - - res, err := e.executeLikeRegex(ctx, rx, true, nil) - a.Equal(predUnknown, res) - a.NoError(err) -} - -func TestExecuteStartsWith(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - str any - prefix any - exp predOutcome - }{ - { - test: "full_string", - str: "hi there", - prefix: "hi there", - exp: predTrue, - }, - { - test: "prefix", - str: "hi there", - prefix: "hi ", - exp: predTrue, - }, - { - test: "not_prefix", - str: "hi there", - prefix: " hi", - exp: predFalse, - }, - { - test: "not_string", - str: true, - exp: predUnknown, - }, - { - test: "not_string_prefix", - str: "hi", - prefix: int64(42), - exp: predUnknown, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - res, err := executeStartsWith(ctx, nil, tc.str, tc.prefix) - a.Equal(tc.exp, res) - r.NoError(err) - }) - } -} diff --git a/path/exec/pg_test.go b/path/exec/pg_test.go deleted file mode 100644 index 8890fa5..0000000 --- a/path/exec/pg_test.go +++ /dev/null @@ -1,6963 +0,0 @@ -//nolint:lll // Ignore long lines copied from Postgres. -package exec - -// Tests from https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql -// Results from https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/expected/jsonb_jsonpath.out -// Test cases scaffolded by pasting each block of tests under __DATA__ in -// .util/pg2go.pl and running `./.util/pg2go.pl | pbcopy`. - -import ( - "context" - "encoding/json" - "fmt" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/parser" - "github.com/theory/sqljson/path/types" -) - -// Convenience function to marshal a string into JSON. -func js(js string) any { - var ret any - if err := json.Unmarshal([]byte(js), &ret); err != nil { - panic(err) - } - return ret -} - -// Convenience function to marshal a string into a JSON object. -func jv(js string) map[string]any { - var ret map[string]any - if err := json.Unmarshal([]byte(js), &ret); err != nil { - panic(err) - } - return ret -} - -// Test cases for Exists(). -type existsTestCase struct { - test string - path string - json any - exp any - err string - opt []Option -} - -func (tc existsTestCase) run(ctx context.Context, a *assert.Assertions, r *require.Assertions) { - path, err := parser.Parse(tc.path) - r.NoError(err) - - res, err := Exists(ctx, path, tc.json, tc.opt...) - switch { - case tc.err != "": - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrExecution) - a.False(res) - case tc.exp == nil: - // When Postgres returns NULL, we return false + ErrNull - r.EqualError(err, "NULL") - r.ErrorIs(err, NULL) - a.False(res) - default: - r.NoError(err) - a.Equal(tc.exp, res) - } -} - -// Mimic the Postgres @? operator. -func (tc existsTestCase) runAtQuestion(ctx context.Context, a *assert.Assertions, r *require.Assertions) { - tc.opt = append(tc.opt, WithSilent()) - tc.run(ctx, a, r) -} - -// Test cases for Match(). -type matchTestCase existsTestCase - -func (tc matchTestCase) run(ctx context.Context, a *assert.Assertions, r *require.Assertions) { - path, err := parser.Parse(tc.path) - r.NoError(err) - - res, err := Match(ctx, path, tc.json, tc.opt...) - switch { - case tc.err != "": - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrExecution) - a.False(res) - case tc.exp == nil: - // When Postgres returns NULL, we return false + ErrNull - r.EqualError(err, "NULL") - r.ErrorIs(err, NULL) - a.False(res) - default: - r.NoError(err) - a.Equal(tc.exp, res) - } -} - -// Mimic the Postgres @@ operator. -func (tc matchTestCase) runAtAt(ctx context.Context, a *assert.Assertions, r *require.Assertions) { - tc.opt = append(tc.opt, WithSilent()) - tc.run(ctx, a, r) -} - -// Test cases for Query(). -type queryTestCase struct { - test string - path string - json any - exp []any - err string - opt []Option - rand bool -} - -func (tc queryTestCase) run(ctx context.Context, a *assert.Assertions, r *require.Assertions) { - path, err := parser.Parse(tc.path) - r.NoError(err) - res, err := Query(ctx, path, tc.json, tc.opt...) - - if tc.err != "" { - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrExecution) - a.Nil(res) - } else { - r.NoError(err) - if tc.rand { - a.ElementsMatch(tc.exp, res) - } else { - a.Equal(tc.exp, res) - } - } -} - -// Test cases for First(). -type firstTestCase struct { - test string - path string - json any - exp any - err string - opt []Option - rand bool -} - -func (tc firstTestCase) run(ctx context.Context, a *assert.Assertions, r *require.Assertions) { - path, err := parser.Parse(tc.path) - r.NoError(err) - res, err := First(ctx, path, tc.json, tc.opt...) - - if tc.err != "" { - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrExecution) - a.Nil(res) - } else { - r.NoError(err) - if tc.rand { - a.ElementsMatch(tc.exp, res) - } else { - a.Equal(tc.exp, res) - } - } -} - -func TestPgAtQuestion(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L1-L40 - for _, tc := range []existsTestCase{ - { - test: "test_1", - json: js(`{"a": 12}`), - path: "$", - exp: true, - }, - { - test: "test_2", - json: js(`{"a": 12}`), - path: "1", - exp: true, - }, - { - test: "test_3", - json: js(`{"a": 12}`), - path: "$.a.b", - exp: false, - }, - { - test: "test_4", - json: js(`{"a": 12}`), - path: "$.b", - exp: false, - }, - { - test: "test_5", - json: js(`{"a": 12}`), - path: "$.a + 2", - exp: true, - }, - { - test: "test_6", - json: js(`{"a": 12}`), - path: "$.b + 2", - exp: nil, - }, - { - test: "test_7", - json: js(`{"a": {"a": 12}}`), - path: "$.a.a", - exp: true, - }, - { - test: "test_8", - json: js(`{"a": {"a": 12}}`), - path: "$.*.a", - exp: true, - }, - { - test: "test_9", - json: js(`{"b": {"a": 12}}`), - path: "$.*.a", - exp: true, - }, - { - test: "test_10", - json: js(`{"b": {"a": 12}}`), - path: "$.*.b", - exp: false, - }, - { - test: "test_11", - json: js(`{"b": {"a": 12}}`), - path: "strict $.*.b", - exp: nil, - }, - { - test: "test_12", - json: js(`{}`), - path: "$.*", - exp: false, - }, - { - test: "test_13", - json: js(`{"a": 1}`), - path: "$.*", - exp: true, - }, - { - test: "test_14", - json: js(`{"a": {"b": 1}}`), - path: "lax $.**{1}", - exp: true, - }, - { - test: "test_15", - json: js(`{"a": {"b": 1}}`), - path: "lax $.**{2}", - exp: true, - }, - { - test: "test_16", - json: js(`{"a": {"b": 1}}`), - path: "lax $.**{3}", - exp: false, - }, - { - test: "test_17", - json: js(`[]`), - path: "$[*]", - exp: false, - }, - { - test: "test_18", - json: js(`[1]`), - path: "$[*]", - exp: true, - }, - { - test: "test_19", - json: js(`[1]`), - path: "$[1]", - exp: false, - }, - { - test: "test_20", - json: js(`[1]`), - path: "strict $[1]", - exp: nil, - }, - // 21-22 in TestPgQueryCompareAtQuestion - { - test: "test_23", - json: js(`[1]`), - path: "lax $[10000000000000000]", - exp: nil, - }, - { - test: "test_24", - json: js(`[1]`), - path: "strict $[10000000000000000]", - exp: nil, - }, - // 25-26 in TestPgQueryCompareAtQuestion - { - test: "test_27", - json: js(`[1]`), - path: "$[0]", - exp: true, - }, - { - test: "test_28", - json: js(`[1]`), - path: "$[0.3]", - exp: true, - }, - { - test: "test_29", - json: js(`[1]`), - path: "$[0.5]", - exp: true, - }, - { - test: "test_30", - json: js(`[1]`), - path: "$[0.9]", - exp: true, - }, - { - test: "test_31", - json: js(`[1]`), - path: "$[1.2]", - exp: false, - }, - { - test: "test_32", - json: js(`[1]`), - path: "strict $[1.2]", - exp: nil, - }, - { - test: "test_33", - json: js(`{"a": [1,2,3], "b": [3,4,5]}`), - path: "$ ? (@.a[*] > @.b[*])", - exp: false, - }, - { - test: "test_34", - json: js(`{"a": [1,2,3], "b": [3,4,5]}`), - path: "$ ? (@.a[*] >= @.b[*])", - exp: true, - }, - { - test: "test_35", - json: js(`{"a": [1,2,3], "b": [3,4,"5"]}`), - path: "$ ? (@.a[*] >= @.b[*])", - exp: true, - }, - { - test: "test_36", - json: js(`{"a": [1,2,3], "b": [3,4,"5"]}`), - path: "strict $ ? (@.a[*] >= @.b[*])", - exp: false, - }, - { - test: "test_37", - json: js(`{"a": [1,2,3], "b": [3,4,null]}`), - path: "$ ? (@.a[*] >= @.b[*])", - exp: true, - }, - { - test: "test_38", - json: js(`1`), - path: `$ ? ((@ == "1") is unknown)`, - exp: true, - }, - { - test: "test_39", - json: js(`1`), - path: `$ ? ((@ == 1) is unknown)`, - exp: false, - }, - { - test: "test_40", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[0 to 1] ? (@.a > 1)`, - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgQueryCompareAtQuestion(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L21-L26 - for _, tc := range []queryTestCase{ - { - test: "test_21", - json: js(`[1]`), - path: "strict $[1]", - err: "exec: jsonpath array subscript is out of bounds", - }, - { - test: "test_22", - json: js(`[1]`), - path: "strict $[1]", - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_25", - json: js(`[1]`), - path: "lax $[10000000000000000]", - err: "exec: jsonpath array subscript is out of integer range", - }, - { - test: "test_26", - json: js(`[1]`), - path: "strict $[10000000000000000]", - err: "exec: jsonpath array subscript is out of integer range", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgExists(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L42-L45 - for _, tc := range []existsTestCase{ - { - test: "test_1", - json: js(`[{"a": 1}, {"a": 2}, 3]`), - path: "lax $[*].a", - exp: true, - }, - { - test: "test_2", - json: js(`[{"a": 1}, {"a": 2}, 3]`), - path: "lax $[*].a", - opt: []Option{WithSilent()}, - exp: true, - }, - { - test: "test_3", - json: js(`[{"a": 1}, {"a": 2}, 3]`), - path: "strict $[*].a", - err: "exec: jsonpath member accessor can only be applied to an object", - }, - { - test: "test_4", - json: js(`[{"a": 1}, {"a": 2}, 3]`), - path: "strict $[*].a", - opt: []Option{WithSilent()}, - exp: nil, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryModes(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L47-L57 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`1`), - path: `lax $.a`, - exp: []any{}, - }, - { - test: "test_2", - json: js(`1`), - path: `strict $.a`, - err: "exec: jsonpath member accessor can only be applied to an object", - }, - { - test: "test_3", - json: js(`1`), - path: `strict $.*`, - err: "exec: jsonpath wildcard member accessor can only be applied to an object", - }, - { - test: "test_4", - json: js(`1`), - path: `strict $.a`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_5", - json: js(`1`), - path: `strict $.*`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_6", - json: js(`[]`), - path: `lax $.a`, - exp: []any{}, - }, - { - test: "test_7", - json: js(`[]`), - path: `strict $.a`, - err: "exec: jsonpath member accessor can only be applied to an object", - }, - { - test: "test_8", - json: js(`[]`), - path: `strict $.a`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_9", - json: js(`{}`), - path: `lax $.a`, - exp: []any{}, - }, - { - test: "test_10", - json: js(`{}`), - path: `strict $.a`, - err: `exec: JSON object does not contain key "a"`, - }, - { - test: "test_11", - json: js(`{}`), - path: `strict $.a`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryStrict(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L59-L66 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`1`), - path: `strict $[1]`, - err: "exec: jsonpath array accessor can only be applied to an array", - }, - { - test: "test_2", - json: js(`1`), - path: `strict $[*]`, - err: "exec: jsonpath wildcard array accessor can only be applied to an array", - }, - { - test: "test_3", - json: js(`[]`), - path: `strict $[1]`, - err: "exec: jsonpath array subscript is out of bounds", - }, - { - test: "test_4", - json: js(`[]`), - path: `strict $["a"]`, - err: "exec: jsonpath array subscript is not a single numeric value", - }, - { - test: "test_5", - json: js(`1`), - path: `strict $[1]`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_6", - json: js(`1`), - path: `strict $[*]`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_7", - json: js(`[]`), - path: `strict $[1]`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_8", - json: js(`[]`), - path: `strict $["a"]`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryBasics(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L68-L97 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`{"a": 12, "b": {"a": 13}}`), - path: `$.a`, - exp: []any{float64(12)}, - }, - - { - test: "test_2", - json: js(`{"a": 12, "b": {"a": 13}}`), - path: `$.b`, - exp: []any{js(`{"a": 13}`)}, - }, - { - test: "test_3", - json: js(`{"a": 12, "b": {"a": 13}}`), - path: `$.*`, - exp: []any{float64(12), js(`{"a": 13}`)}, - rand: true, - }, - { - test: "test_4", - json: js(`{"a": 12, "b": {"a": 13}}`), - path: `lax $.*.a`, - exp: []any{float64(13)}, - }, - { - test: "test_5", - json: js(`[12, {"a": 13}, {"b": 14}]`), - path: `lax $[*].a`, - exp: []any{float64(13)}, - }, - { - test: "test_6", - json: js(`[12, {"a": 13}, {"b": 14}]`), - path: `lax $[*].*`, - exp: []any{float64(13), float64(14)}, - rand: true, - }, - { - test: "test_7", - json: js(`[12, {"a": 13}, {"b": 14}]`), - path: `lax $[0].a`, - exp: []any{}, - }, - { - test: "test_8", - json: js(`[12, {"a": 13}, {"b": 14}]`), - path: `lax $[1].a`, - exp: []any{float64(13)}, - }, - { - test: "test_9", - json: js(`[12, {"a": 13}, {"b": 14}]`), - path: `lax $[2].a`, - exp: []any{}, - }, - { - test: "test_10", - json: js(`[12, {"a": 13}, {"b": 14}]`), - path: `lax $[0,1].a`, - exp: []any{float64(13)}, - }, - { - test: "test_11", - json: js(`[12, {"a": 13}, {"b": 14}]`), - path: `lax $[0 to 10].a`, - exp: []any{float64(13)}, - }, - { - test: "test_12", - json: js(`[12, {"a": 13}, {"b": 14}]`), - path: `lax $[0 to 10 / 0].a`, - err: "exec: division by zero", - }, - { - test: "test_13", - json: js(`[12, {"a": 13}, {"b": 14}, "ccc", true]`), - path: `$[2.5 - 1 to $.size() - 2]`, - exp: []any{js(`{"a": 13}`), js(`{"b": 14}`), "ccc"}, - }, - { - test: "test_14", - json: js(`1`), - path: `lax $[0]`, - exp: []any{float64(1)}, - }, - { - test: "test_15", - json: js(`1`), - path: `lax $[*]`, - exp: []any{float64(1)}, - }, - { - test: "test_16", - json: js(`[1]`), - path: `lax $[0]`, - exp: []any{float64(1)}, - }, - { - test: "test_17", - json: js(`[1]`), - path: `lax $[*]`, - exp: []any{float64(1)}, - }, - { - test: "test_18", - json: js(`[1,2,3]`), - path: `lax $[*]`, - exp: []any{float64(1), float64(2), float64(3)}, - }, - { - test: "test_19", - json: js(`[1,2,3]`), - path: `strict $[*].a`, - err: "exec: jsonpath member accessor can only be applied to an object", - }, - { - test: "test_20", - json: js(`[1,2,3]`), - path: `strict $[*].a`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_21", - json: js(`[]`), - path: `$[last]`, - exp: []any{}, - }, - { - test: "test_22", - json: js(`[]`), - path: `$[last ? (exists(last))]`, - exp: []any{}, - }, - { - test: "test_23", - json: js(`[]`), - path: `strict $[last]`, - err: "exec: jsonpath array subscript is out of bounds", - }, - { - test: "test_24", - json: js(`[]`), - path: `strict $[last]`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_25", - json: js(`[1]`), - path: `$[last]`, - exp: []any{float64(1)}, - }, - { - test: "test_26", - json: js(`[1,2,3]`), - path: `$[last]`, - exp: []any{float64(3)}, - }, - { - test: "test_27", - json: js(`[1,2,3]`), - path: `$[last - 1]`, - exp: []any{float64(2)}, - }, - { - test: "test_28", - json: js(`[1,2,3]`), - path: `$[last ? (@.type() == "number")]`, - exp: []any{float64(3)}, - }, - { - test: "test_29", - json: js(`[1,2,3]`), - path: `$[last ? (@.type() == "string")]`, - err: "exec: jsonpath array subscript is not a single numeric value", - }, - { - test: "test_30", - json: js(`[1,2,3]`), - path: `$[last ? (@.type() == "string")]`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryBinaryOps(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L99-L115 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`{"a": 10}`), - path: `$`, - exp: []any{js(`{"a": 10}`)}, - }, - { - test: "test_2", - json: js(`{"a": 10}`), - path: `$ ? (@.a < $value)`, - err: `exec: could not find jsonpath variable "value"`, - }, - // We have more control than Postgres here, where the requirement that - // vars be a map is enforced at compile time. - // { - // name: "test_3", - // json: js(`{"a": 10}`), - // path: `$ ? (@.a < $value)`, - // opt: []Option{WithVars(int64(1))}, - // err: `exec: "vars" argument is not an object`, - // }, - // { - // name: "test_4", - // json: js(`{"a": 10}`), - // path: `$ ? (@.a < $value)`, - // opt: []Option{WithVars(jv(`[{"value" : 13}]`))}, - // err: `exec: "vars" argument is not an object`, - // }, - { - test: "test_5", - json: js(`{"a": 10}`), - path: `$ ? (@.a < $value)`, - opt: []Option{WithVars(jv(`{"value" : 13}`))}, - exp: []any{js(`{"a": 10}`)}, - }, - { - test: "test_6", - json: js(`{"a": 10}`), - path: `$ ? (@.a < $value)`, - opt: []Option{WithVars(jv(`{"value" : 8}`))}, - exp: []any{}, - }, - { - test: "test_7", - json: js(`{"a": 10}`), - path: `$.a ? (@ < $value)`, - opt: []Option{WithVars(jv(`{"value" : 13}`))}, - exp: []any{float64(10)}, - }, - { - test: "test_8", - json: js(`[10,11,12,13,14,15]`), - path: `$[*] ? (@ < $value)`, - opt: []Option{WithVars(jv(`{"value" : 13}`))}, - exp: []any{float64(10), float64(11), float64(12)}, - }, - { - test: "test_9", - json: js(`[10,11,12,13,14,15]`), - path: `$[0,1] ? (@ < $x.value)`, - opt: []Option{WithVars(jv(`{"x": {"value" : 13}}`))}, - exp: []any{float64(10), float64(11)}, - }, - { - test: "test_10", - json: js(`[10,11,12,13,14,15]`), - path: `$[0 to 2] ? (@ < $value)`, - opt: []Option{WithVars(jv(`{"value" : 15}`))}, - exp: []any{float64(10), float64(11), float64(12)}, - }, - { - test: "test_11", - json: js(`[1,"1",2,"2",null]`), - path: `$[*] ? (@ == "1")`, - exp: []any{"1"}, - }, - { - test: "test_12", - json: js(`[1,"1",2,"2",null]`), - path: `$[*] ? (@ == $value)`, - opt: []Option{WithVars(jv(`{"value" : "1"}`))}, - exp: []any{"1"}, - }, - { - test: "test_13", - json: js(`[1,"1",2,"2",null]`), - path: `$[*] ? (@ == $value)`, - opt: []Option{WithVars(jv(`{"value" : null}`))}, - exp: []any{nil}, - }, - { - test: "test_14", - json: js(`[1, "2", null]`), - path: `$[*] ? (@ != null)`, - exp: []any{float64(1), "2"}, - }, - { - test: "test_15", - json: js(`[1, "2", null]`), - path: `$[*] ? (@ == null)`, - exp: []any{nil}, - }, - { - test: "test_16", - json: js(`{}`), - path: `$ ? (@ == @)`, - exp: []any{}, - }, - { - test: "test_17", - json: js(`[]`), - path: `strict $ ? (@ == @)`, - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryAny(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L117-L138 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**`, - exp: []any{js(`{"a": {"b": 1}}`), js(`{"b": 1}`), float64(1)}, - }, - { - test: "test_2", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{0}`, - exp: []any{js(`{"a": {"b": 1}}`)}, - }, - { - test: "test_3", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{0 to last}`, - exp: []any{js(`{"a": {"b": 1}}`), js(`{"b": 1}`), float64(1)}, - }, - { - test: "test_4", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{1}`, - exp: []any{js(`{"b": 1}`)}, - }, - { - test: "test_5", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{1 to last}`, - exp: []any{js(`{"b": 1}`), float64(1)}, - }, - { - test: "test_6", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{2}`, - exp: []any{float64(1)}, - }, - { - test: "test_7", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{2 to last}`, - exp: []any{float64(1)}, - }, - { - test: "test_8", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{3 to last}`, - exp: []any{}, - }, - { - test: "test_9", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{last}`, - exp: []any{float64(1)}, - }, - { - test: "test_10", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**.b ? (@ > 0)`, - exp: []any{float64(1)}, - }, - { - test: "test_11", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{0}.b ? (@ > 0)`, - exp: []any{}, - }, - { - test: "test_12", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{1}.b ? (@ > 0)`, - exp: []any{float64(1)}, - }, - { - test: "test_13", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{0 to last}.b ? (@ > 0)`, - exp: []any{float64(1)}, - }, - { - test: "test_14", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{1 to last}.b ? (@ > 0)`, - exp: []any{float64(1)}, - }, - { - test: "test_15", - json: js(`{"a": {"b": 1}}`), - path: `lax $.**{1 to 2}.b ? (@ > 0)`, - exp: []any{float64(1)}, - }, - { - test: "test_16", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `lax $.**.b ? (@ > 0)`, - exp: []any{float64(1)}, - }, - { - test: "test_17", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `lax $.**{0}.b ? (@ > 0)`, - exp: []any{}, - }, - { - test: "test_18", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `lax $.**{1}.b ? (@ > 0)`, - exp: []any{}, - }, - { - test: "test_19", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `lax $.**{0 to last}.b ? (@ > 0)`, - exp: []any{float64(1)}, - }, - { - test: "test_20", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `lax $.**{1 to last}.b ? (@ > 0)`, - exp: []any{float64(1)}, - }, - { - test: "test_21", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `lax $.**{1 to 2}.b ? (@ > 0)`, - exp: []any{float64(1)}, - }, - { - test: "test_22", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `lax $.**{2 to 3}.b ? (@ > 0)`, - exp: []any{float64(1)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgAtQuestionAny(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L140-L152 - for _, tc := range []existsTestCase{ - { - test: "test_1", - json: js(`{"a": {"b": 1}}`), - path: `$.**.b ? ( @ > 0)`, - exp: true, - }, - { - test: "test_2", - json: js(`{"a": {"b": 1}}`), - path: `$.**{0}.b ? ( @ > 0)`, - exp: false, - }, - { - test: "test_3", - json: js(`{"a": {"b": 1}}`), - path: `$.**{1}.b ? ( @ > 0)`, - exp: true, - }, - { - test: "test_4", - json: js(`{"a": {"b": 1}}`), - path: `$.**{0 to last}.b ? ( @ > 0)`, - exp: true, - }, - { - test: "test_5", - json: js(`{"a": {"b": 1}}`), - path: `$.**{1 to last}.b ? ( @ > 0)`, - exp: true, - }, - { - test: "test_6", - json: js(`{"a": {"b": 1}}`), - path: `$.**{1 to 2}.b ? ( @ > 0)`, - exp: true, - }, - { - test: "test_7", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `$.**.b ? ( @ > 0)`, - exp: true, - }, - { - test: "test_8", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `$.**{0}.b ? ( @ > 0)`, - exp: false, - }, - { - test: "test_9", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `$.**{1}.b ? ( @ > 0)`, - exp: false, - }, - { - test: "test_10", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `$.**{0 to last}.b ? ( @ > 0)`, - exp: true, - }, - { - test: "test_11", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `$.**{1 to last}.b ? ( @ > 0)`, - exp: true, - }, - { - test: "test_12", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `$.**{1 to 2}.b ? ( @ > 0)`, - exp: true, - }, - { - test: "test_13", - json: js(`{"a": {"c": {"b": 1}}}`), - path: `$.**{2 to 3}.b ? ( @ > 0)`, - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgQueryExists(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L154-L163 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`{"g": {"x": 2}}`), - path: `$.g ? (exists (@.x))`, - exp: []any{js(`{"x": 2}`)}, - }, - { - test: "test_2", - json: js(`{"g": {"x": 2}}`), - path: `$.g ? (exists (@.y))`, - exp: []any{}, - }, - { - test: "test_3", - json: js(`{"g": {"x": 2}}`), - path: `$.g ? (exists (@.x ? (@ >= 2) ))`, - exp: []any{js(`{"x": 2}`)}, - }, - { - test: "test_4", - json: js(`{"g": [{"x": 2}, {"y": 3}]}`), - path: `lax $.g ? (exists (@.x))`, - exp: []any{js(`{"x": 2}`)}, - }, - { - test: "test_5", - json: js(`{"g": [{"x": 2}, {"y": 3}]}`), - path: `lax $.g ? (exists (@.x + "3"))`, - exp: []any{}, - }, - { - test: "test_6", - json: js(`{"g": [{"x": 2}, {"y": 3}]}`), - path: `lax $.g ? ((exists (@.x + "3")) is unknown)`, - exp: []any{js(`{"x": 2}`), js(`{"y": 3}`)}, - }, - { - test: "test_7", - json: js(`{"g": [{"x": 2}, {"y": 3}]}`), - path: `strict $.g[*] ? (exists (@.x))`, - exp: []any{js(`{"x": 2}`)}, - }, - { - test: "test_8", - json: js(`{"g": [{"x": 2}, {"y": 3}]}`), - path: `strict $.g[*] ? ((exists (@.x)) is unknown)`, - exp: []any{js(`{"y": 3}`)}, - }, - { - test: "test_9", - json: js(`{"g": [{"x": 2}, {"y": 3}]}`), - path: `strict $.g ? (exists (@[*].x))`, - exp: []any{}, - }, - { - test: "test_10", - json: js(`{"g": [{"x": 2}, {"y": 3}]}`), - path: `strict $.g ? ((exists (@[*].x)) is unknown)`, - exp: []any{[]any{js(`{"x": 2}`), js(`{"y": 3}`)}}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryTernaryLogic(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L166-L190 - path1 := `$[*] ? (@ == true && ($x == true && $y == true) || - @ == false && !($x == true && $y == true) || - @ == null && ($x == true && $y == true) is unknown)` - path2 := `$[*] ? (@ == true && ($x == true || $y == true) || - @ == false && !($x == true || $y == true) || - @ == null && ($x == true || $y == true) is unknown)` - json := []any{true, false, nil} - - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: json, - path: path1, - opt: []Option{WithVars(jv(`{"x": true, "y": true}`))}, - exp: []any{true}, - }, - { - test: "test_2", - json: json, - path: path1, - opt: []Option{WithVars(jv(`{"x": true, "y": false}`))}, - exp: []any{false}, - }, - { - test: "test_3", - json: json, - path: path1, - opt: []Option{WithVars(jv(`{"x": true, "y": "null"}`))}, - exp: []any{nil}, - }, - { - test: "test_4", - json: json, - path: path1, - opt: []Option{WithVars(jv(`{"x": false, "y": true}`))}, - exp: []any{false}, - }, - { - test: "test_5", - json: json, - path: path1, - opt: []Option{WithVars(jv(`{"x": false, "y": false}`))}, - exp: []any{false}, - }, - { - test: "test_6", - json: json, - path: path1, - opt: []Option{WithVars(jv(`{"x": false, "y": "null"}`))}, - exp: []any{false}, - }, - { - test: "test_7", - json: json, - path: path1, - opt: []Option{WithVars(jv(`{"x": "null", "y": true}`))}, - exp: []any{nil}, - }, - { - test: "test_8", - json: json, - path: path1, - opt: []Option{WithVars(jv(`{"x": "null", "y": false}`))}, - exp: []any{false}, - }, - { - test: "test_9", - json: json, - path: path1, - opt: []Option{WithVars(jv(`{"x": "null", "y": "null"}`))}, - exp: []any{nil}, - }, - { - test: "test_10", - json: json, - path: path2, - opt: []Option{WithVars(jv(`{"x": true, "y": true}`))}, - exp: []any{true}, - }, - { - test: "test_11", - json: json, - path: path2, - opt: []Option{WithVars(jv(`{"x": true, "y": false}`))}, - exp: []any{true}, - }, - { - test: "test_12", - json: json, - path: path2, - opt: []Option{WithVars(jv(`{"x": true, "y": "null"}`))}, - exp: []any{true}, - }, - { - test: "test_13", - json: json, - path: path2, - opt: []Option{WithVars(jv(`{"x": false, "y": true}`))}, - exp: []any{true}, - }, - { - test: "test_14", - json: json, - path: path2, - opt: []Option{WithVars(jv(`{"x": false, "y": false}`))}, - exp: []any{false}, - }, - { - test: "test_15", - json: json, - path: path2, - opt: []Option{WithVars(jv(`{"x": false, "y": "null"}`))}, - exp: []any{nil}, - }, - { - test: "test_16", - json: json, - path: path2, - opt: []Option{WithVars(jv(`{"x": "null", "y": true}`))}, - exp: []any{true}, - }, - { - test: "test_17", - json: json, - path: path2, - opt: []Option{WithVars(jv(`{"x": "null", "y": false}`))}, - exp: []any{nil}, - }, - { - test: "test_18", - json: json, - path: path2, - opt: []Option{WithVars(jv(`{"x": "null", "y": "null"}`))}, - exp: []any{nil}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgAtQuestionFilter(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L192-L198 - for _, tc := range []existsTestCase{ - { - test: "test_1", - json: js(`{"a": 1, "b":1}`), - path: "$ ? (@.a == @.b)", - exp: true, - }, - { - test: "test_2", - json: js(`{"c": {"a": 1, "b":1}}`), - path: "$ ? (@.a == @.b)", - exp: false, - }, - { - test: "test_3", - json: js(`{"c": {"a": 1, "b":1}}`), - path: "$.c ? (@.a == @.b)", - exp: true, - }, - { - test: "test_4", - json: js(`{"c": {"a": 1, "b":1}}`), - path: "$.c ? ($.c.a == @.b)", - exp: true, - }, - { - test: "test_5", - json: js(`{"c": {"a": 1, "b":1}}`), - path: "$.* ? (@.a == @.b)", - exp: true, - }, - { - test: "test_6", - json: js(`{"a": 1, "b":1}`), - path: "$.** ? (@.a == @.b)", - exp: true, - }, - { - test: "test_7", - json: js(`{"c": {"a": 1, "b":1}}`), - path: "$.** ? (@.a == @.b)", - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgQueryAnyMath(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L200-L203 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`{"c": {"a": 2, "b":1}}`), - path: `$.** ? (@.a == 1 + 1)`, - exp: []any{js(`{"a": 2, "b": 1}`)}, - }, - { - test: "test_2", - json: js(`{"c": {"a": 2, "b":1}}`), - path: `$.** ? (@.a == (1 + 1))`, - exp: []any{js(`{"a": 2, "b": 1}`)}, - }, - { - test: "test_3", - json: js(`{"c": {"a": 2, "b":1}}`), - path: `$.** ? (@.a == @.b + 1)`, - exp: []any{js(`{"a": 2, "b": 1}`)}, - }, - { - test: "test_4", - json: js(`{"c": {"a": 2, "b":1}}`), - path: `$.** ? (@.a == (@.b + 1))`, - exp: []any{js(`{"a": 2, "b": 1}`)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgAtQuestionAnyMath(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L204-L215 - for _, tc := range []existsTestCase{ - { - test: "test_1", - json: js(`{"c": {"a": -1, "b":1}}`), - path: "$.** ? (@.a == - 1)", - exp: true, - }, - { - test: "test_2", - json: js(`{"c": {"a": -1, "b":1}}`), - path: "$.** ? (@.a == -1)", - exp: true, - }, - { - test: "test_3", - json: js(`{"c": {"a": -1, "b":1}}`), - path: "$.** ? (@.a == -@.b)", - exp: true, - }, - { - test: "test_4", - json: js(`{"c": {"a": -1, "b":1}}`), - path: "$.** ? (@.a == - @.b)", - exp: true, - }, - { - test: "test_5", - json: js(`{"c": {"a": 0, "b":1}}`), - path: "$.** ? (@.a == 1 - @.b)", - exp: true, - }, - { - test: "test_6", - json: js(`{"c": {"a": 2, "b":1}}`), - path: "$.** ? (@.a == 1 - - @.b)", - exp: true, - }, - { - test: "test_7", - json: js(`{"c": {"a": 0, "b":1}}`), - path: "$.** ? (@.a == 1 - +@.b)", - exp: true, - }, - { - test: "test_8", - json: js(`[1,2,3]`), - path: "$ ? (+@[*] > +2)", - exp: true, - }, - { - test: "test_9", - json: js(`[1,2,3]`), - path: "$ ? (+@[*] > +3)", - exp: false, - }, - { - test: "test_10", - json: js(`[1,2,3]`), - path: "$ ? (-@[*] < -2)", - exp: true, - }, - { - test: "test_11", - json: js(`[1,2,3]`), - path: "$ ? (-@[*] < -3)", - exp: false, - }, - { - test: "test_12", - json: js(`1`), - path: "$ ? ($ > 0)", - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgQueryMathErrors(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L218-L230 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`[1,2,0,3]`), - path: `$[*] ? (2 / @ > 0)`, - exp: []any{float64(1), float64(2), float64(3)}, - }, - { - test: "test_2", - json: js(`[1,2,0,3]`), - path: `$[*] ? ((2 / @ > 0) is unknown)`, - exp: []any{float64(0)}, - }, - { - test: "test_3", - json: js(`0`), - path: `1 / $`, - err: "exec: division by zero", - }, - { - test: "test_4", - json: js(`0`), - path: `1 / $ + 2`, - err: "exec: division by zero", - }, - { - test: "test_5", - json: js(`0`), - path: `-(3 + 1 % $)`, - err: "exec: division by zero", - }, - { - test: "test_6", - json: js(`1`), - path: `$ + "2"`, - err: "exec: right operand of jsonpath operator + is not a single numeric value", - }, - { - test: "test_7", - json: js(`[1, 2]`), - path: `3 * $`, - err: "exec: right operand of jsonpath operator * is not a single numeric value", - }, - { - test: "test_8", - json: js(`"a"`), - path: `-$`, - err: "exec: operand of unary jsonpath operator - is not a numeric value", - }, - { - test: "test_9", - json: js(`[1,"2",3]`), - path: `+$`, - err: "exec: operand of unary jsonpath operator + is not a numeric value", - }, - { - test: "test_10", - json: js(`1`), - path: `$ + "2"`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_11", - json: js(`[1, 2]`), - path: `3 * $`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_12", - json: js(`"a"`), - path: `-$`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_13", - json: js(`[1,"2",3]`), - path: `+$`, - opt: []Option{WithSilent()}, - exp: []any{float64(1)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgAtQuestionMathErrors(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L231-L234 - for _, tc := range []existsTestCase{ - { - test: "test_1", - json: js(`["1",2,0,3]`), - path: "-$[*]", - exp: true, - }, - { - test: "test_2", - json: js(`[1,"2",0,3]`), - path: "-$[*]", - exp: true, - }, - { - test: "test_3", - json: js(`["1",2,0,3]`), - path: "strict -$[*]", - exp: nil, - }, - { - test: "test_4", - json: js(`[1,"2",0,3]`), - path: "strict -$[*]", - exp: nil, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgQueryUnwrapping(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L236-L242 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`{"a": [2]}`), - path: `lax $.a * 3`, - exp: []any{float64(6)}, - }, - { - test: "test_2", - json: js(`{"a": [2]}`), - path: `lax $.a + 3`, - exp: []any{float64(5)}, - }, - { - test: "test_3", - json: js(`{"a": [2, 3, 4]}`), - path: `lax -$.a`, - exp: []any{float64(-2), float64(-3), float64(-4)}, - }, - // should fail - { - test: "test_4", - json: js(`{"a": [1, 2]}`), - path: `lax $.a * 3`, - err: "exec: left operand of jsonpath operator * is not a single numeric value", - }, - { - test: "test_5", - json: js(`{"a": [1, 2]}`), - path: `lax $.a * 3`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -// https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L244-L253 -func TestPgArrayWildcardUnwrapping(t *testing.T) { - t.Parallel() - ctx := context.Background() - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`{"a": [1,2,3], "b": [3,4,5]}`), - path: `$.*`, - rand: true, - exp: []any{ - []any{float64(1), float64(2), float64(3)}, - []any{float64(3), float64(4), float64(5)}, - }, - }, - { - test: "test_2", - json: js(`[1,2,3]`), - path: `$.*`, - exp: []any{}, - }, - { - test: "test_3", - json: js(`[1,2,3,{"b": [3,4,5]}]`), - path: `lax $.*`, - exp: []any{[]any{float64(3), float64(4), float64(5)}}, - }, - { - test: "test_4", - json: js(`[1,2,3,{"b": [3,4,5]}]`), - path: `strict $.*`, - err: "exec: jsonpath wildcard member accessor can only be applied to an object", - }, - { - test: "test_5", - json: js(`[1,2,3,{"b": [3,4,5]}]`), - path: `strict $.*`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } - for _, tc := range []existsTestCase{ - { - test: "test_6", - json: js(`{"a": [1,2,3], "b": [3,4,5]}`), - path: `$.*`, - opt: []Option{WithSilent()}, - exp: true, - }, - { - test: "test_7", - json: js(`[1,2,3]`), - path: `$.*`, - opt: []Option{WithSilent()}, - exp: false, - }, - { - test: "test_8", - json: js(`[1,2,3,{"b": [3,4,5]}]`), - path: `lax $.*`, - opt: []Option{WithSilent()}, - exp: true, - }, - { - test: "test_9", - json: js(`[1,2,3,{"b": [3,4,5]}]`), - path: `strict $.*`, - opt: []Option{WithSilent()}, - exp: nil, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -// Insert https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L255-L264 -func TestPgQueryBoolean(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L256-L258 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`2`), - path: `$ > 1`, - exp: []any{true}, - }, - { - test: "test_2", - json: js(`2`), - path: `$ <= 1`, - exp: []any{false}, - }, - { - test: "test_3", - json: js(`2`), - path: `$ == "2"`, - exp: []any{nil}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgAtQuestionBoolean(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L259 - for _, tc := range []existsTestCase{ - { - test: "test_1", - json: js(`2`), - path: `$ == "2"`, - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgAtAtBoolean(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L261-L268 - for _, tc := range []matchTestCase{ - { - test: "test_1", - json: js(`2`), - path: `$ > 1`, - exp: true, - }, - { - test: "test_2", - json: js(`2`), - path: `$ <= 1`, - exp: false, - }, - { - test: "test_3", - json: js(`2`), - path: `$ == "2"`, - exp: nil, - }, - { - test: "test_4", - json: js(`2`), - path: `1`, - exp: nil, - }, - { - test: "test_5", - json: js(`{}`), - path: `$`, - exp: nil, - }, - { - test: "test_6", - json: js(`[]`), - path: `$`, - exp: nil, - }, - { - test: "test_7", - json: js(`[1,2,3]`), - path: `$[*]`, - exp: nil, - }, - { - test: "test_8", - json: js(`[]`), - path: `$[*]`, - exp: nil, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtAt(ctx, a, r) - }) - } -} - -func TestPgMatch(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L269-L275 - for _, tc := range []matchTestCase{ - { - test: "test_1", - json: js(`[[1, true], [2, false]]`), - path: `strict $[*] ? (@[0] > $x) [1]`, - opt: []Option{WithVars(jv(`{"x": 1}`))}, - exp: false, - }, - { - test: "test_2", - json: js(`[[1, true], [2, false]]`), - path: `strict $[*] ? (@[0] < $x) [1]`, - opt: []Option{WithVars(jv(`{"x": 2}`))}, - exp: true, - }, - { - test: "test_3", - json: js(`[{"a": 1}, {"a": 2}, 3]`), - path: `lax exists($[*].a)`, - exp: true, - }, - { - test: "test_4", - json: js(`[{"a": 1}, {"a": 2}, 3]`), - path: `lax exists($[*].a)`, - opt: []Option{WithSilent()}, - exp: true, - }, - { - test: "test_5", - json: js(`[{"a": 1}, {"a": 2}, 3]`), - path: `strict exists($[*].a)`, - exp: nil, - }, - { - test: "test_6", - json: js(`[{"a": 1}, {"a": 2}, 3]`), - opt: []Option{WithSilent()}, - path: `strict exists($[*].a)`, - exp: nil, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryTypeMethod(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L278-L284 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`[null,1,true,"a",[],{}]`), - path: `$.type()`, - exp: []any{"array"}, - }, - { - test: "test_2", - json: js(`[null,1,true,"a",[],{}]`), - path: `lax $.type()`, - exp: []any{"array"}, - }, - { - test: "test_3", - json: js(`[null,1,true,"a",[],{}]`), - path: `$[*].type()`, - exp: []any{"null", "number", "boolean", "string", "array", "object"}, - }, - { - test: "test_4", - json: js(`null`), - path: `null.type()`, - exp: []any{"null"}, - }, - { - test: "test_5", - json: js(`null`), - path: `true.type()`, - exp: []any{"boolean"}, - }, - { - test: "test_6", - json: js(`null`), - path: `(123).type()`, - exp: []any{"number"}, - }, - { - test: "test_7", - json: js(`null`), - path: `"123".type()`, - exp: []any{"string"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryAbsFloorType(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L286-L291 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`{"a": 2}`), - path: `($.a - 5).abs() + 10`, - exp: []any{float64(13)}, - }, - { - test: "test_2", - json: js(`{"a": 2.5}`), - path: `-($.a * $.a).floor() % 4.3`, - exp: []any{float64(-1.7000000000000002)}, // pg:1.7 - }, - { - test: "test_3", - json: js(`[1, 2, 3]`), - path: `($[*] > 2) ? (@ == true)`, - exp: []any{true}, - }, - { - test: "test_4", - json: js(`[1, 2, 3]`), - path: `($[*] > 3).type()`, - exp: []any{"boolean"}, - }, - { - test: "test_5", - json: js(`[1, 2, 3]`), - path: `($[*].a > 3).type()`, - exp: []any{"boolean"}, - }, - { - test: "test_6", - json: js(`[1, 2, 3]`), - path: `strict ($[*].a > 3).type()`, - exp: []any{"null"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQuerySizeMethod(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L293-L295 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]`), - path: `strict $[*].size()`, - err: "exec: jsonpath item method .size() can only be applied to an array", - }, - { - test: "test_2", - json: js(`[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]`), - path: `strict $[*].size()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_3", - json: js(`[1,null,true,"11",[],[1],[1,2,3],{},{"a":1,"b":2}]`), - path: `lax $[*].size()`, - exp: []any{ - int64(1), int64(1), int64(1), int64(1), int64(0), - int64(1), int64(3), int64(1), int64(1), - }, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryMethodChain(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L297-L301 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`[0, 1, -2, -3.4, 5.6]`), - path: `$[*].abs()`, - exp: []any{float64(0), float64(1), float64(2), float64(3.4), float64(5.6)}, - }, - { - test: "test_2", - json: js(`[0, 1, -2, -3.4, 5.6]`), - path: `$[*].floor()`, - exp: []any{float64(0), float64(1), float64(-2), float64(-4), float64(5)}, - }, - { - test: "test_3", - json: js(`[0, 1, -2, -3.4, 5.6]`), - path: `$[*].ceiling()`, - exp: []any{float64(0), float64(1), float64(-2), float64(-3), float64(6)}, - }, - { - test: "test_4", - json: js(`[0, 1, -2, -3.4, 5.6]`), - path: `$[*].ceiling().abs()`, - exp: []any{float64(0), float64(1), float64(2), float64(3), float64(6)}, - }, - { - test: "test_5", - json: js(`[0, 1, -2, -3.4, 5.6]`), - path: `$[*].ceiling().abs().type()`, - exp: []any{"number", "number", "number", "number", "number"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func offset(a, b any) int64 { - x := addrOf(a) - y := addrOf(b) - if x > y { - return int64(x - y) - } - return int64(y - x) -} - -func TestPgQueryKeyValue(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // Go can have different array offsets when executing stuff in parallel, - // so create the data here so we can calculate the correct IDs in tests 5 - // and 7 below. - array, ok := js(`[{"a": 1, "b": [1, 2]}, {"c": {"a": "bbb"}}]`).([]any) - require.True(t, ok) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L303-L310 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`[{},1]`), - path: `$[*].keyvalue()`, - err: "exec: jsonpath item method .keyvalue() can only be applied to an object", - }, - { - test: "test_2", - json: js(`[{},1]`), - path: `$[*].keyvalue()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_3", - json: js(`{}`), - path: `$.keyvalue()`, - exp: []any{}, - }, - { - test: "test_4", - json: js(`{"a": 1, "b": [1, 2], "c": {"a": "bbb"}}`), - path: `$.keyvalue()`, - exp: []any{ - map[string]any{"id": int64(0), "key": "a", "value": float64(1)}, - map[string]any{"id": int64(0), "key": "b", "value": []any{float64(1), float64(2)}}, - map[string]any{"id": int64(0), "key": "c", "value": map[string]any{"a": "bbb"}}, - }, - }, - { - test: "test_5", - json: array, - path: `$[*].keyvalue()`, - // pg: IDs vary because jsonb binary layout is more consistent than Go slices. - exp: []any{ - map[string]any{"id": offset(array[0], array), "key": "a", "value": float64(1)}, - map[string]any{"id": offset(array[0], array), "key": "b", "value": []any{float64(1), float64(2)}}, - map[string]any{"id": offset(array[1], array), "key": "c", "value": map[string]any{"a": "bbb"}}, - }, - }, - { - test: "test_6", - json: array, - path: `strict $.keyvalue()`, - err: "exec: jsonpath item method .keyvalue() can only be applied to an object", - }, - { - test: "test_7", - json: array, - path: `lax $.keyvalue()`, - // pg: IDs vary because jsonb binary layout is more consistent than Go slices. - exp: []any{ - map[string]any{"id": offset(array[0], array), "key": "a", "value": float64(1)}, - map[string]any{"id": offset(array[0], array), "key": "b", "value": []any{float64(1), float64(2)}}, - map[string]any{"id": offset(array[1], array), "key": "c", "value": map[string]any{"a": "bbb"}}, - }, - }, - { - test: "test_8", - json: array, - path: `strict $.keyvalue().a`, - err: "exec: jsonpath item method .keyvalue() can only be applied to an object", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgAtQuestionKeyValue(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L311-L312 - for _, tc := range []existsTestCase{ - { - test: "test_1", - json: js(`{"a": 1, "b": [1, 2]}`), - path: `lax $.keyvalue()`, - exp: true, - }, - { - test: "test_2", - json: js(`{"a": 1, "b": [1, 2]}`), - path: `lax $.keyvalue().key`, - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgQueryDoubleMethod(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L314-L332 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.double()`, - err: "exec: jsonpath item method .double() can only be applied to a string or numeric value", - }, - { - test: "test_2", - json: js(`true`), - path: `$.double()`, - err: "exec: jsonpath item method .double() can only be applied to a string or numeric value", - }, - { - test: "test_3", - json: js(`null`), - path: `$.double()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_4", - json: js(`true`), - path: `$.double()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_5", - json: js(`[]`), - path: `$.double()`, - exp: []any{}, - }, - { - test: "test_6", - json: js(`[]`), - path: `strict $.double()`, - err: "exec: jsonpath item method .double() can only be applied to a string or numeric value", - }, - { - test: "test_7", - json: js(`{}`), - path: `$.double()`, - err: "exec: jsonpath item method .double() can only be applied to a string or numeric value", - }, - { - test: "test_8", - json: js(`[]`), - path: `strict $.double()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_9", - json: js(`{}`), - path: `$.double()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_10", - json: js(`1.23`), - path: `$.double()`, - exp: []any{float64(1.23)}, - }, - { - test: "test_11", - json: js(`"1.23"`), - path: `$.double()`, - exp: []any{float64(1.23)}, - }, - { - test: "test_12", - json: js(`"1.23aaa"`), - path: `$.double()`, - err: `exec: argument "1.23aaa" of jsonpath item method .double() is invalid for type double precision`, - }, - // Go cannot parse 1e1000 into a float because it's too big. - // Postgres JSONB accepts arbitrary numeric sizes. - // { - // name: "test_13", - // json: js(`1e1000`), - // path: `$.double()`, - // err: `exec: argument "10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" of jsonpath item method .double() is invalid for type double precision`, - // }, - { - test: "test_14", - json: js(`"nan"`), - path: `$.double()`, - err: "exec: NaN or Infinity is not allowed for jsonpath item method .double()", - }, - { - test: "test_15", - json: js(`"NaN"`), - path: `$.double()`, - err: "exec: NaN or Infinity is not allowed for jsonpath item method .double()", - }, - { - test: "test_16", - json: js(`"inf"`), - path: `$.double()`, - err: "exec: NaN or Infinity is not allowed for jsonpath item method .double()", - }, - { - test: "test_17", - json: js(`"-inf"`), - path: `$.double()`, - err: "exec: NaN or Infinity is not allowed for jsonpath item method .double()", - }, - { - test: "test_18", - json: js(`"inf"`), - path: `$.double()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_19", - json: js(`"-inf"`), - path: `$.double()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryAbsFloorCeilErr(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L334-L339 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`{}`), - path: `$.abs()`, - err: "exec: jsonpath item method .abs() can only be applied to a numeric value", - }, - { - test: "test_2", - json: js(`true`), - path: `$.floor()`, - err: "exec: jsonpath item method .floor() can only be applied to a numeric value", - }, - { - test: "test_3", - json: js(`"1.2"`), - path: `$.ceiling()`, - err: "exec: jsonpath item method .ceiling() can only be applied to a numeric value", - }, - { - test: "test_4", - json: js(`{}`), - path: `$.abs()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_5", - json: js(`true`), - path: `$.floor()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_6", - json: js(`"1.2"`), - path: `$.ceiling()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryStartsWith(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L341-L348 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`["", "a", "abc", "abcabc"]`), - path: `$[*] ? (@ starts with "abc")`, - exp: []any{"abc", "abcabc"}, - }, - { - test: "test_2", - json: js(`["", "a", "abc", "abcabc"]`), - path: `strict $ ? (@[*] starts with "abc")`, - exp: []any{[]any{"", "a", "abc", "abcabc"}}, - }, - { - test: "test_3", - json: js(`["", "a", "abd", "abdabc"]`), - path: `strict $ ? (@[*] starts with "abc")`, - exp: []any{}, - }, - { - test: "test_4", - json: js(`["abc", "abcabc", null, 1]`), - path: `strict $ ? (@[*] starts with "abc")`, - exp: []any{}, - }, - { - test: "test_5", - json: js(`["abc", "abcabc", null, 1]`), - path: `strict $ ? ((@[*] starts with "abc") is unknown)`, - exp: []any{[]any{"abc", "abcabc", nil, float64(1)}}, - }, - { - test: "test_6", - json: js(`[[null, 1, "abc", "abcabc"]]`), - path: `lax $ ? (@[*] starts with "abc")`, - exp: []any{[]any{nil, float64(1), "abc", "abcabc"}}, - }, - { - test: "test_7", - json: js(`[[null, 1, "abd", "abdabc"]]`), - path: `lax $ ? ((@[*] starts with "abc") is unknown)`, - exp: []any{[]any{nil, float64(1), "abd", "abdabc"}}, - }, - { - test: "test_8", - json: js(`[null, 1, "abd", "abdabc"]`), - path: `lax $[*] ? ((@ starts with "abc") is unknown)`, - exp: []any{nil, float64(1)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryLikeRegex(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L350-L359 - // pg: Using \t instead of \b, because \b is word boundary only in Go, while - // in Postgres it's bell. Using \t gets the original intent of the tests. - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`[null, 1, "abc", "abd", "aBdC", "abdacb", "babc", "adc\nabc", "ab\nadc"]`), - path: `lax $[*] ? (@ like_regex "^ab.*c")`, - exp: []any{"abc", "abdacb"}, - }, - { - test: "test_2", - json: js(`[null, 1, "abc", "abd", "aBdC", "abdacb", "babc", "adc\nabc", "ab\nadc"]`), - path: `lax $[*] ? (@ like_regex "^ab.*c" flag "i")`, - exp: []any{"abc", "aBdC", "abdacb"}, - }, - { - test: "test_3", - json: js(`[null, 1, "abc", "abd", "aBdC", "abdacb", "babc", "adc\nabc", "ab\nadc"]`), - path: `lax $[*] ? (@ like_regex "^ab.*c" flag "m")`, - exp: []any{"abc", "abdacb", "adc\nabc"}, - }, - { - test: "test_4", - json: js(`[null, 1, "abc", "abd", "aBdC", "abdacb", "babc", "adc\nabc", "ab\nadc"]`), - path: `lax $[*] ? (@ like_regex "^ab.*c" flag "s")`, - exp: []any{"abc", "abdacb", "ab\nadc"}, - }, - { - test: "test_5", - json: js(`[null, 1, "a\t", "a\\t", "^a\\t$"]`), - path: `lax $[*] ? (@ like_regex "a\\t" flag "q")`, - exp: []any{"a\\t", "^a\\t$"}, - }, - { - test: "test_6", - json: js(`[null, 1, "a\t", "a\\t", "^a\\t$"]`), - path: `lax $[*] ? (@ like_regex "a\\t" flag "")`, - exp: []any{"a\t"}, - }, - { - test: "test_7", - json: js(`[null, 1, "a\t", "a\\t", "^a\\t$"]`), - path: `lax $[*] ? (@ like_regex "^a\\t$" flag "q")`, - exp: []any{"^a\\t$"}, - }, - { - test: "test_8", - json: js(`[null, 1, "a\t", "a\\t", "^a\\t$"]`), - path: `lax $[*] ? (@ like_regex "^a\\T$" flag "q")`, - exp: []any{}, - }, - { - test: "test_9", - json: js(`[null, 1, "a\t", "a\\t", "^a\\t$"]`), - path: `lax $[*] ? (@ like_regex "^a\\T$" flag "iq")`, - exp: []any{"^a\\t$"}, - }, - { - test: "test_10", - json: js(`[null, 1, "a\t", "a\\t", "^a\\t$"]`), - path: `lax $[*] ? (@ like_regex "^a\\t$" flag "")`, - exp: []any{"a\t"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryDateTimeErr(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L361-L369 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.datetime()`, - err: "exec: jsonpath item method .datetime() can only be applied to a string", - }, - { - test: "test_2", - json: js(`true`), - path: `$.datetime()`, - err: "exec: jsonpath item method .datetime() can only be applied to a string", - }, - { - test: "test_3", - json: js(`1`), - path: `$.datetime()`, - err: "exec: jsonpath item method .datetime() can only be applied to a string", - }, - { - test: "test_4", - json: js(`[]`), - path: `$.datetime()`, - exp: []any{}, - }, - { - test: "test_5", - json: js(`[]`), - path: `strict $.datetime()`, - err: "exec: jsonpath item method .datetime() can only be applied to a string", - }, - { - test: "test_6", - json: js(`{}`), - path: `$.datetime()`, - err: "exec: jsonpath item method .datetime() can only be applied to a string", - }, - { - test: "test_7", - json: js(`"bogus"`), - path: `$.datetime()`, - err: `exec: datetime format is not recognized: "bogus"`, - }, - { - test: "test_8", - json: js(`"12:34"`), - path: `$.datetime("aaa")`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: invalid datetime format separator: "a"`, - }, - { - test: "test_9", - json: js(`"aaaa"`), - path: `$.datetime("HH24")`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: invalid value "aa" for "HH24"`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryDateTimeAtQuestion(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L371 - for _, tc := range []existsTestCase{ - { - test: "test_1", - json: js(`"10-03-2017"`), - path: `$.datetime("dd-mm-yyyy")`, - err: `exec: .datetime(template) is not yet supported`, - // exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func pt(ctx context.Context, ts string) types.DateTime { - val, ok := types.ParseTime(ctx, ts, -1) - if !ok { - panic("Failed to parse " + ts) - } - return val -} - -func TestPgQueryDateTimeFormat(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L372-L384 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`"10-03-2017"`), - path: `$.datetime("dd-mm-yyyy")`, - // exp: []any{pt(ctx, "2017-03-10")}, - }, - { - test: "test_2", - json: js(`"10-03-2017"`), - path: `$.datetime("dd-mm-yyyy").type()`, - // exp: []any{"date"}, - }, - { - test: "test_3", - json: js(`"10-03-2017 12:34"`), - path: `$.datetime("dd-mm-yyyy")`, - // err:"exec: trailing characters remain in input string after datetime format", - }, - { - test: "test_4", - json: js(`"10-03-2017 12:34"`), - path: `$.datetime("dd-mm-yyyy").type()`, - // err:"exec: trailing characters remain in input string after datetime format", - }, - { - test: "test_5", - json: js(`"10-03-2017 12:34"`), - path: ` $.datetime("dd-mm-yyyy HH24:MI").type()`, - // exp: []any{"timestamp without time zone"}, - }, - { - test: "test_6", - json: js(`"10-03-2017 12:34 +05:20"`), - path: `$.datetime("dd-mm-yyyy HH24:MI TZH:TZM").type()`, - // exp: []any{"timestamp with time zone"}, - }, - { - test: "test_7", - json: js(`"12:34:56"`), - path: `$.datetime("HH24:MI:SS").type()`, - // exp: []any{"time without time zone"}, - }, - { - test: "test_8", - json: js(`"12:34:56 +05:20"`), - path: `$.datetime("HH24:MI:SS TZH:TZM").type()`, - // exp: []any{"time with time zone"}, - }, - { - test: "test_9", - json: js(`"10-03-2017T12:34:56"`), - path: `$.datetime("dd-mm-yyyy\"T\"HH24:MI:SS")`, - // exp: []any{pt(ctx, "2017-03-10T12:34:56")}, - }, - { - test: "test_10", - json: js(`"10-03-2017t12:34:56"`), - path: `$.datetime("dd-mm-yyyy\"T\"HH24:MI:SS")`, - // err:`exec: unmatched format character "T"`, - }, - { - test: "test_11", - json: js(`"10-03-2017 12:34:56"`), - path: `$.datetime("dd-mm-yyyy\"T\"HH24:MI:SS")`, - // err:`exec: unmatched format character "T"`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.err = `exec: .datetime(template) is not yet supported` - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryBigInt(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L386-L416 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.bigint()`, - err: `exec: jsonpath item method .bigint() can only be applied to a string or numeric value`, - }, - { - test: "test_2", - json: js(`true`), - path: `$.bigint()`, - err: `exec: jsonpath item method .bigint() can only be applied to a string or numeric value`, - }, - { - test: "test_3", - json: js(`null`), - path: `$.bigint()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_4", - json: js(`true`), - path: `$.bigint()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_5", - json: js(`[]`), - path: `$.bigint()`, - exp: []any{}, - }, - { - test: "test_6", - json: js(`[]`), - path: `strict $.bigint()`, - err: `exec: jsonpath item method .bigint() can only be applied to a string or numeric value`, - }, - { - test: "test_7", - json: js(`{}`), - path: `$.bigint()`, - err: `exec: jsonpath item method .bigint() can only be applied to a string or numeric value`, - }, - { - test: "test_8", - json: js(`[]`), - path: `strict $.bigint()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_9", - json: js(`{}`), - path: `$.bigint()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "`test_10`", - json: js(`"1.23"`), - path: `$.bigint()`, - err: `exec: argument "1.23" of jsonpath item method .bigint() is invalid for type bigint`, - }, - { - test: "test_11", - json: js(`"1.23aaa"`), - path: `$.bigint()`, - err: `exec: argument "1.23aaa" of jsonpath item method .bigint() is invalid for type bigint`, - }, - // Go cannot parse 1e1000 into a float because it's too big. - // Postgres JSONB accepts arbitrary numeric sizes. - // { - // name: "test_12", - // json: js(`1e1000`), - // path: `$.bigint()`, - // err: `exec: argument "10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" of jsonpath item method .bigint() is invalid for type bigint`, - // }, - { - test: "test_13", - json: js(`"nan"`), - path: `$.bigint()`, - err: `exec: argument "nan" of jsonpath item method .bigint() is invalid for type bigint`, - }, - { - test: "test_14", - json: js(`"NaN"`), - path: `$.bigint()`, - err: `exec: argument "NaN" of jsonpath item method .bigint() is invalid for type bigint`, - }, - { - test: "test_15", - json: js(`"inf"`), - path: `$.bigint()`, - err: `exec: argument "inf" of jsonpath item method .bigint() is invalid for type bigint`, - }, - { - test: "test_16", - json: js(`"-inf"`), - path: `$.bigint()`, - err: `exec: argument "-inf" of jsonpath item method .bigint() is invalid for type bigint`, - }, - { - test: "test_17", - json: js(`"inf"`), - path: `$.bigint()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_18", - json: js(`"-inf"`), - path: `$.bigint()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_19", - json: js(`123`), - path: `$.bigint()`, - exp: []any{int64(123)}, - }, - { - test: "test_20", - json: js(`"123"`), - path: `$.bigint()`, - exp: []any{int64(123)}, - }, - { - test: "test_21", - json: js(`1.23`), - path: `$.bigint()`, - exp: []any{int64(1)}, - }, - { - test: "test_22", - json: js(`1.83`), - path: `$.bigint()`, - exp: []any{int64(2)}, - }, - { - test: "test_23", - json: js(`1234567890123`), - path: `$.bigint()`, - exp: []any{int64(1234567890123)}, - }, - { - test: "test_24", - json: js(`"1234567890123"`), - path: `$.bigint()`, - exp: []any{int64(1234567890123)}, - }, - { - test: "test_25", - json: js(`12345678901234567890`), - path: `$.bigint()`, - // pg: shows `"12345678901234567890"` in the error - err: `exec: argument "1.2345678901234567e+19" of jsonpath item method .bigint() is invalid for type bigint`, - }, - { - test: "test_26", - json: js(`"12345678901234567890"`), - path: `$.bigint()`, - err: `exec: argument "12345678901234567890" of jsonpath item method .bigint() is invalid for type bigint`, - }, - { - test: "test_27", - json: js(`"+123"`), - path: `$.bigint()`, - exp: []any{int64(123)}, - }, - { - test: "test_28", - json: js(`-123`), - path: `$.bigint()`, - exp: []any{int64(-123)}, - }, - { - test: "test_29", - json: js(`"-123"`), - path: `$.bigint()`, - exp: []any{int64(-123)}, - }, - { - test: "test_30", - json: js(`123`), - path: `$.bigint() * 2`, - exp: []any{int64(246)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryBooleanMethod(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L418-L458 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.boolean()`, - err: `exec: jsonpath item method .boolean() can only be applied to a boolean, string, or numeric value`, - }, - { - test: "test_2", - json: js(`null`), - path: `$.boolean()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_3", - json: js(`[]`), - path: `$.boolean()`, - exp: []any{}, - }, - { - test: "test_4", - json: js(`[]`), - path: `strict $.boolean()`, - err: `exec: jsonpath item method .boolean() can only be applied to a boolean, string, or numeric value`, - }, - { - test: "test_5", - json: js(`{}`), - path: `$.boolean()`, - err: `exec: jsonpath item method .boolean() can only be applied to a boolean, string, or numeric value`, - }, - { - test: "test_6", - json: js(`[]`), - path: `strict $.boolean()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_7", - json: js(`{}`), - path: `$.boolean()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_8", - json: js(`1.23`), - path: `$.boolean()`, - err: `exec: argument "1.23" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "test_9", - json: js(`"1.23"`), - path: `$.boolean()`, - err: `exec: argument "1.23" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "test_10", - json: js(`"1.23aaa"`), - path: `$.boolean()`, - err: `exec: argument "1.23aaa" of jsonpath item method .boolean() is invalid for type boolean`, - }, - // Go cannot parse 1e1000 into a float because it's too big. - // Postgres JSONB accepts arbitrary numeric sizes. - // { - // name: "test_11", - // json: js(`1e1000`), - // path: `$.boolean()`, - // err: `exec: argument "10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" of jsonpath item method .boolean() is invalid for type boolean`, - // }, - { - test: "test_12", - json: js(`"nan"`), - path: `$.boolean()`, - err: `exec: argument "nan" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "test_13", - json: js(`"NaN"`), - path: `$.boolean()`, - err: `exec: argument "NaN" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "test_14", - json: js(`"inf"`), - path: `$.boolean()`, - err: `exec: argument "inf" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "test_15", - json: js(`"-inf"`), - path: `$.boolean()`, - err: `exec: argument "-inf" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "test_16", - json: js(`"inf"`), - path: `$.boolean()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_17", - json: js(`"-inf"`), - path: `$.boolean()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_18", - json: js(`"100"`), - path: `$.boolean()`, - err: `exec: argument "100" of jsonpath item method .boolean() is invalid for type boolean`, - }, - { - test: "test_19", - json: js(`true`), - path: `$.boolean()`, - exp: []any{true}, - }, - { - test: "test_20", - json: js(`false`), - path: `$.boolean()`, - exp: []any{false}, - }, - { - test: "test_21", - json: js(`1`), - path: `$.boolean()`, - exp: []any{true}, - }, - { - test: "test_22", - json: js(`0`), - path: `$.boolean()`, - exp: []any{false}, - }, - { - test: "test_23", - json: js(`-1`), - path: `$.boolean()`, - exp: []any{true}, - }, - { - test: "test_24", - json: js(`100`), - path: `$.boolean()`, - exp: []any{true}, - }, - { - test: "test_25", - json: js(`"1"`), - path: `$.boolean()`, - exp: []any{true}, - }, - { - test: "test_26", - json: js(`"0"`), - path: `$.boolean()`, - exp: []any{false}, - }, - { - test: "test_27", - json: js(`"true"`), - path: `$.boolean()`, - exp: []any{true}, - }, - { - test: "test_28", - json: js(`"false"`), - path: `$.boolean()`, - exp: []any{false}, - }, - { - test: "test_29", - json: js(`"TRUE"`), - path: `$.boolean()`, - exp: []any{true}, - }, - { - test: "test_30", - json: js(`"FALSE"`), - path: `$.boolean()`, - exp: []any{false}, - }, - { - test: "test_31", - json: js(`"yes"`), - path: `$.boolean()`, - exp: []any{true}, - }, - { - test: "test_32", - json: js(`"NO"`), - path: `$.boolean()`, - exp: []any{false}, - }, - { - test: "test_33", - json: js(`"T"`), - path: `$.boolean()`, - exp: []any{true}, - }, - { - test: "test_34", - json: js(`"f"`), - path: `$.boolean()`, - exp: []any{false}, - }, - { - test: "test_35", - json: js(`"y"`), - path: `$.boolean()`, - exp: []any{true}, - }, - { - test: "test_36", - json: js(`"N"`), - path: `$.boolean()`, - exp: []any{false}, - }, - { - test: "test_37", - json: js(`true`), - path: `$.boolean().type()`, - exp: []any{"boolean"}, - }, - { - test: "test_38", - json: js(`123`), - path: `$.boolean().type()`, - exp: []any{"boolean"}, - }, - { - test: "test_39", - json: js(`"Yes"`), - path: `$.boolean().type()`, - exp: []any{"boolean"}, - }, - // pg: tests jsonb_path_query_array but our Query() always returns a - // slice. - { - test: "test_40", - json: js(`[1, "yes", false]`), - path: `$[*].boolean()`, - exp: []any{true, true, false}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryDateMethod(t *testing.T) { - t.Parallel() - - loc, err := time.LoadLocation("PST8PDT") - require.NoError(t, err) - ctx := types.ContextWithTZ(context.Background(), loc) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L460-L477 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.date()`, - err: `exec: jsonpath item method .date() can only be applied to a string`, - }, - { - test: "test_2", - json: js(`true`), - path: `$.date()`, - err: `exec: jsonpath item method .date() can only be applied to a string`, - }, - { - test: "test_3", - json: js(`1`), - path: `$.date()`, - err: `exec: jsonpath item method .date() can only be applied to a string`, - }, - { - test: "test_4", - json: js(`[]`), - path: `$.date()`, - exp: []any{}, - }, - { - test: "test_5", - json: js(`[]`), - path: `strict $.date()`, - err: `exec: jsonpath item method .date() can only be applied to a string`, - }, - { - test: "test_6", - json: js(`{}`), - path: `$.date()`, - err: `exec: jsonpath item method .date() can only be applied to a string`, - }, - { - test: "test_7", - json: js(`"bogus"`), - path: `$.date()`, - err: `exec: date format is not recognized: "bogus"`, - }, - // Test 8 in TestPgQueryDateAtQuestion below - { - test: "test_9", - json: js(`"2023-08-15"`), - path: `$.date()`, - exp: []any{pt(ctx, "2023-08-15")}, - }, - { - test: "test_10", - json: js(`"2023-08-15"`), - path: `$.date().type()`, - exp: []any{"date"}, - }, - { - test: "test_11", - json: js(`"12:34:56"`), - path: `$.date()`, - err: `exec: date format is not recognized: "12:34:56"`, - }, - { - test: "test_12", - json: js(`"12:34:56 +05:30"`), - path: `$.date()`, - err: `exec: date format is not recognized: "12:34:56 +05:30"`, - }, - { - test: "test_13", - json: js(`"2023-08-15 12:34:56"`), - path: `$.date()`, - exp: []any{pt(ctx, "2023-08-15")}, - }, - { - test: "test_14", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.date()`, - err: `exec: cannot convert value from timestamptz to date without time zone usage.` + tzHint, - }, - { - test: "test_15", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.date()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2023-08-15")}, // should work - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryDateAtQuestion(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L469 - for _, tc := range []existsTestCase{ - { - test: "test_8", - json: js(`"2023-08-15"`), - path: `$.date()`, - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgQueryDateMethodSyntaxError(t *testing.T) { - t.Parallel() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L479 - t.Run("test_16", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - path, err := parser.Parse("$.date(2)") - r.EqualError(err, `parser: syntax error at 1:9`) - r.ErrorIs(err, parser.ErrParse) - a.Nil(path) - }) -} - -func TestPgQueryDecimalMethod(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L481-L525 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.decimal()`, - err: `exec: jsonpath item method .decimal() can only be applied to a string or numeric value`, - }, - { - test: "test_2", - json: js(`true`), - path: `$.decimal()`, - err: `exec: jsonpath item method .decimal() can only be applied to a string or numeric value`, - }, - { - test: "test_3", - json: js(`null`), - path: `$.decimal()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_4", - json: js(`true`), - path: `$.decimal()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_5", - json: js(`[]`), - path: `$.decimal()`, - exp: []any{}, - }, - { - test: "test_6", - json: js(`[]`), - path: `strict $.decimal()`, - err: `exec: jsonpath item method .decimal() can only be applied to a string or numeric value`, - }, - { - test: "test_7", - json: js(`{}`), - path: `$.decimal()`, - err: `exec: jsonpath item method .decimal() can only be applied to a string or numeric value`, - }, - { - test: "test_8", - json: js(`[]`), - path: `strict $.decimal()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_9", - json: js(`{}`), - path: `$.decimal()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_10", - json: js(`1.23`), - path: `$.decimal()`, - exp: []any{float64(1.23)}, - }, - { - test: "test_11", - json: js(`"1.23"`), - path: `$.decimal()`, - exp: []any{float64(1.23)}, - }, - { - test: "test_12", - json: js(`"1.23aaa"`), - path: `$.decimal()`, - err: `exec: argument "1.23aaa" of jsonpath item method .decimal() is invalid for type numeric`, - }, - // Go cannot parse 1e1000 into a float because it's too big. - // Postgres JSONB accepts arbitrary numeric sizes. - // { - // name: "test_13", - // json: js(`1e1000`), - // path: `$.decimal()`, - // exp: []any{"10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"}, - // }, - { - test: "test_14", - json: js(`"nan"`), - path: `$.decimal()`, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .decimal()`, - }, - { - test: "test_15", - json: js(`"NaN"`), - path: `$.decimal()`, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .decimal()`, - }, - { - test: "test_16", - json: js(`"inf"`), - path: `$.decimal()`, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .decimal()`, - }, - { - test: "test_17", - json: js(`"-inf"`), - path: `$.decimal()`, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .decimal()`, - }, - { - test: "test_18", - json: js(`"inf"`), - path: `$.decimal()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_19", - json: js(`"-inf"`), - path: `$.decimal()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_20", - json: js(`123`), - path: `$.decimal()`, - exp: []any{float64(123)}, - }, - { - test: "test_21", - json: js(`"123"`), - path: `$.decimal()`, - exp: []any{float64(123)}, - }, - { - test: "test_22", - json: js(`12345678901234567890`), - path: `$.decimal()`, - exp: []any{float64(12345678901234567890)}, - }, - { - test: "test_23", - json: js(`"12345678901234567890"`), - path: `$.decimal()`, - exp: []any{float64(12345678901234567890)}, - }, - { - test: "test_24", - json: js(`"+12.3"`), - path: `$.decimal()`, - exp: []any{float64(12.3)}, - }, - { - test: "test_25", - json: js(`-12.3`), - path: `$.decimal()`, - exp: []any{float64(-12.3)}, - }, - { - test: "test_26", - json: js(`"-12.3"`), - path: `$.decimal()`, - exp: []any{float64(-12.3)}, - }, - { - test: "test_27", - json: js(`12.3`), - path: `$.decimal() * 2`, - exp: []any{float64(24.6)}, - }, - { - test: "test_28", - json: js(`12345.678`), - path: `$.decimal(6, 1)`, - exp: []any{float64(12345.7)}, - }, - { - test: "test_29", - json: js(`12345.678`), - path: `$.decimal(6, 2)`, - err: `exec: argument "12345.678" of jsonpath item method .decimal() is invalid for type numeric`, - }, - { - test: "test_30", - json: js(`1234.5678`), - path: `$.decimal(6, 2)`, - exp: []any{float64(1234.57)}, - }, - { - test: "test_31", - json: js(`12345.678`), - path: `$.decimal(4, 6)`, - err: `exec: argument "12345.678" of jsonpath item method .decimal() is invalid for type numeric`, - }, - { - test: "test_32", - json: js(`12345.678`), - path: `$.decimal(0, 6)`, - err: `exec: NUMERIC precision 0 must be between 1 and 1000`, - }, - { - test: "test_33", - json: js(`12345.678`), - path: `$.decimal(1001, 6)`, - err: `exec: NUMERIC precision 1001 must be between 1 and 1000`, - }, - { - test: "test_34", - json: js(`1234.5678`), - path: `$.decimal(+6, +2)`, - exp: []any{float64(1234.57)}, - }, - { - test: "test_35", - json: js(`1234.5678`), - path: `$.decimal(+6, -2)`, - exp: []any{float64(1200)}, - }, - { - test: "test_36", - json: js(`1234.5678`), - path: `$.decimal(-6, +2)`, - err: `exec: NUMERIC precision -6 must be between 1 and 1000`, - }, - { - test: "test_37", - json: js(`1234.5678`), - path: `$.decimal(6, -1001)`, - err: `exec: NUMERIC scale -1001 must be between -1000 and 1000`, - }, - { - test: "test_38", - json: js(`1234.5678`), - path: `$.decimal(6, 1001)`, - err: `exec: NUMERIC scale 1001 must be between -1000 and 1000`, - }, - { - test: "test_39", - json: js(`-1234.5678`), - path: `$.decimal(+6, -2)`, - exp: []any{float64(-1200)}, - }, - { - test: "test_40", - json: js(`0.0123456`), - path: `$.decimal(1,2)`, - exp: []any{float64(0.01)}, - }, - { - test: "test_41", - json: js(`0.0012345`), - path: `$.decimal(2,4)`, - exp: []any{float64(0.0012)}, - }, - { - test: "test_42", - json: js(`-0.00123456`), - path: `$.decimal(2,-4)`, - exp: []any{float64(0)}, - }, - { - test: "test_43", - json: js(`12.3`), - path: `$.decimal(12345678901,1)`, - err: `exec: precision of jsonpath item method .decimal() is out of integer range`, - }, - { - test: "test_44", - json: js(`12.3`), - path: `$.decimal(1,12345678901)`, - err: `exec: scale of jsonpath item method .decimal() is out of integer range`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryIntegerMethod(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L527-L555 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.integer()`, - err: `exec: jsonpath item method .integer() can only be applied to a string or numeric value`, - }, - { - test: "test_2", - json: js(`true`), - path: `$.integer()`, - err: `exec: jsonpath item method .integer() can only be applied to a string or numeric value`, - }, - { - test: "test_3", - json: js(`null`), - path: `$.integer()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_4", - json: js(`true`), - path: `$.integer()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_5", - json: js(`[]`), - path: `$.integer()`, - exp: []any{}, - }, - { - test: "test_6", - json: js(`[]`), - path: `strict $.integer()`, - err: `exec: jsonpath item method .integer() can only be applied to a string or numeric value`, - }, - { - test: "test_7", - json: js(`{}`), - path: `$.integer()`, - err: `exec: jsonpath item method .integer() can only be applied to a string or numeric value`, - }, - { - test: "test_8", - json: js(`[]`), - path: `strict $.integer()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_9", - json: js(`{}`), - path: `$.integer()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_10", - json: js(`"1.23"`), - path: `$.integer()`, - err: `exec: argument "1.23" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "test_11", - json: js(`"1.23aaa"`), - path: `$.integer()`, - err: `exec: argument "1.23aaa" of jsonpath item method .integer() is invalid for type integer`, - }, - // Go cannot parse 1e1000 into a float because it's too big. - // Postgres JSONB accepts arbitrary numeric sizes. - // { - // name: "test_12", - // json: js(`1e1000`), - // path: `$.integer()`, - // err: `exec: argument "10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" of jsonpath item method .integer() is invalid for type integer`, - // }, - { - test: "test_13", - json: js(`"nan"`), - path: `$.integer()`, - err: `exec: argument "nan" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "test_14", - json: js(`"NaN"`), - path: `$.integer()`, - err: `exec: argument "NaN" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "test_15", - json: js(`"inf"`), - path: `$.integer()`, - err: `exec: argument "inf" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "test_16", - json: js(`"-inf"`), - path: `$.integer()`, - err: `exec: argument "-inf" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "test_17", - json: js(`"inf"`), - path: `$.integer()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_18", - json: js(`"-inf"`), - path: `$.integer()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_19", - json: js(`123`), - path: `$.integer()`, - exp: []any{int64(123)}, - }, - { - test: "test_20", - json: js(`"123"`), - path: `$.integer()`, - exp: []any{int64(123)}, - }, - { - test: "test_21", - json: js(`1.23`), - path: `$.integer()`, - exp: []any{int64(1)}, - }, - { - test: "test_22", - json: js(`1.83`), - path: `$.integer()`, - exp: []any{int64(2)}, - }, - { - test: "test_23", - json: js(`12345678901`), - path: `$.integer()`, - // pg: shows `"12345678901"` in the error - err: `exec: argument "1.2345678901e+10" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "test_24", - json: js(`"12345678901"`), - path: `$.integer()`, - err: `exec: argument "12345678901" of jsonpath item method .integer() is invalid for type integer`, - }, - { - test: "test_25", - json: js(`"+123"`), - path: `$.integer()`, - exp: []any{int64(123)}, - }, - { - test: "test_26", - json: js(`-123`), - path: `$.integer()`, - exp: []any{int64(-123)}, - }, - { - test: "test_27", - json: js(`"-123"`), - path: `$.integer()`, - exp: []any{int64(-123)}, - }, - { - test: "test_28", - json: js(`123`), - path: `$.integer() * 2`, - exp: []any{int64(246)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryNumberMethod(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L557-L584 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.number()`, - err: `exec: jsonpath item method .number() can only be applied to a string or numeric value`, - }, - { - test: "test_2", - json: js(`true`), - path: `$.number()`, - err: `exec: jsonpath item method .number() can only be applied to a string or numeric value`, - }, - { - test: "test_3", - json: js(`null`), - path: `$.number()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_4", - json: js(`true`), - path: `$.number()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_5", - json: js(`[]`), - path: `$.number()`, - exp: []any{}, - }, - { - test: "test_6", - json: js(`[]`), - path: `strict $.number()`, - err: `exec: jsonpath item method .number() can only be applied to a string or numeric value`, - }, - { - test: "test_7", - json: js(`{}`), - path: `$.number()`, - err: `exec: jsonpath item method .number() can only be applied to a string or numeric value`, - }, - { - test: "test_8", - json: js(`[]`), - path: `strict $.number()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_9", - json: js(`{}`), - path: `$.number()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_10", - json: js(`1.23`), - path: `$.number()`, - exp: []any{float64(1.23)}, - }, - { - test: "test_11", - json: js(`"1.23"`), - path: `$.number()`, - exp: []any{float64(1.23)}, - }, - { - test: "test_12", - json: js(`"1.23aaa"`), - path: `$.number()`, - err: `exec: argument "1.23aaa" of jsonpath item method .number() is invalid for type numeric`, - }, - // Go cannot parse 1e1000 into a float because it's too big. - // Postgres JSONB accepts arbitrary numeric sizes. - // { - // name: "test_13", - // json: js(`1e1000`), - // path: `$.number()`, - // exp: []any{"10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"}, - // }, - { - test: "test_14", - json: js(`"nan"`), - path: `$.number()`, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .number()`, - }, - { - test: "test_15", - json: js(`"NaN"`), - path: `$.number()`, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .number()`, - }, - { - test: "test_16", - json: js(`"inf"`), - path: `$.number()`, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .number()`, - }, - { - test: "test_17", - json: js(`"-inf"`), - path: `$.number()`, - err: `exec: NaN or Infinity is not allowed for jsonpath item method .number()`, - }, - { - test: "test_18", - json: js(`"inf"`), - path: `$.number()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_19", - json: js(`"-inf"`), - path: `$.number()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_20", - json: js(`123`), - path: `$.number()`, - exp: []any{float64(123)}, - }, - { - test: "test_21", - json: js(`"123"`), - path: `$.number()`, - exp: []any{float64(123)}, - }, - { - test: "test_22", - json: js(`12345678901234567890`), - path: `$.number()`, - exp: []any{float64(12345678901234567890)}, - }, - { - test: "test_23", - json: js(`"12345678901234567890"`), - path: `$.number()`, - exp: []any{float64(12345678901234567890)}, - }, - { - test: "test_24", - json: js(`"+12.3"`), - path: `$.number()`, - exp: []any{float64(12.3)}, - }, - { - test: "test_25", - json: js(`-12.3`), - path: `$.number()`, - exp: []any{float64(-12.3)}, - }, - { - test: "test_26", - json: js(`"-12.3"`), - path: `$.number()`, - exp: []any{float64(-12.3)}, - }, - { - test: "test_27", - json: js(`12.3`), - path: `$.number() * 2`, - exp: []any{float64(24.6)}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryStringMethod(t *testing.T) { - t.Parallel() - - loc, err := time.LoadLocation("PST8PDT") - require.NoError(t, err) - ctx := types.ContextWithTZ(context.Background(), loc) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L586-L616 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.string()`, - err: `exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value`, - }, - { - test: "test_2", - json: js(`null`), - path: `$.string()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_3", - json: js(`[]`), - path: `$.string()`, - exp: []any{}, - }, - { - test: "test_4", - json: js(`[]`), - path: `strict $.string()`, - err: `exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value`, - }, - { - test: "test_5", - json: js(`{}`), - path: `$.string()`, - err: `exec: jsonpath item method .string() can only be applied to a boolean, string, numeric, or datetime value`, - }, - { - test: "test_6", - json: js(`[]`), - path: `strict $.string()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_7", - json: js(`{}`), - path: `$.string()`, - opt: []Option{WithSilent()}, - exp: []any{}, - }, - { - test: "test_8", - json: js(`1.23`), - path: `$.string()`, - exp: []any{"1.23"}, - }, - { - test: "test_9", - json: js(`"1.23"`), - path: `$.string()`, - exp: []any{"1.23"}, - }, - { - test: "test_10", - json: js(`"1.23aaa"`), - path: `$.string()`, - exp: []any{"1.23aaa"}, - }, - { - test: "test_11", - json: js(`1234`), - path: `$.string()`, - exp: []any{"1234"}, - }, - { - test: "test_12", - json: js(`true`), - path: `$.string()`, - exp: []any{"true"}, - }, - { - test: "test_13", - json: js(`1234`), - path: `$.string().type()`, - exp: []any{"string"}, - }, - { - test: "test_14", - json: js(`[2, true]`), - path: `$.string()`, - exp: []any{"2", "true"}, - }, - // pg: tests 15 & 16 use jsonb_path_query_array but our Query() always - // returns a slice. - { - test: "test_15", - json: js(`[1.23, "yes", false]`), - path: `$[*].string()`, - exp: []any{"1.23", "yes", "false"}, - }, - { - test: "test_16", - json: js(`[1.23, "yes", false]`), - path: `$[*].string().type()`, - exp: []any{"string", "string", "string"}, - }, - { - test: "test_17", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +5:30 - path: `$.timestamp().string()`, - err: `exec: cannot convert value from timestamptz to timestamp without time zone usage.` + tzHint, - }, - { - test: "test_18", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +5:30 - path: `$.timestamp().string()`, - opt: []Option{WithTZ()}, - exp: []any{"2023-08-15T00:04:56"}, // should work - }, - { - test: "test_19", - json: js(`"2023-08-15 12:34:56"`), - path: `$.timestamp_tz().string()`, - err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + tzHint, - }, - { - test: "test_20", - json: js(`"2023-08-15 12:34:56"`), - path: `$.timestamp_tz().string()`, - opt: []Option{WithTZ()}, - exp: []any{"2023-08-15T12:34:56-07:00"}, // should work - }, - { - test: "test_21", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +5:30 - path: `$.timestamp_tz().string()`, - exp: []any{"2023-08-15T12:34:56+05:30"}, - }, - { - test: "test_22", - json: js(`"2023-08-15 12:34:56"`), - path: `$.timestamp().string()`, - exp: []any{"2023-08-15T12:34:56"}, - }, - { - test: "test_23", - json: js(`"12:34:56+05:30"`), // pg: 12:34:56 +5:30 - path: `$.time_tz().string()`, - exp: []any{"12:34:56+05:30"}, - }, - // test_24 in TestPgQueryStringMethodTZ10 below - { - test: "test_25", - json: js(`"12:34:56"`), - path: `$.time().string()`, - exp: []any{"12:34:56"}, - }, - { - test: "test_26", - json: js(`"2023-08-15"`), - path: `$.date().string()`, - exp: []any{"2023-08-15"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryStringMethodTZ10(t *testing.T) { - t.Parallel() - // We use +10 here, because the POSIX syntax used in the test, UTC-10, - // uses the opposite meaning for +/-. - // https://www.postgresql.org/docs/current/datetime-posix-timezone-specs.html - ctx := types.ContextWithTZ(context.Background(), time.FixedZone("", 60*60*10)) // UTC-10 - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L611-L614 - for _, tc := range []queryTestCase{ - { - test: "test_24", - json: js(`"12:34:56"`), - path: `$.time_tz().string()`, - opt: []Option{WithTZ()}, - exp: []any{"12:34:56+10:00"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryNoDateStyle(t *testing.T) { - t.Parallel() - ctx := types.ContextWithTZ(context.Background(), time.UTC) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L618-L624 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +5:30 - path: `$.timestamp_tz().string()`, - exp: []any{"2023-08-15T12:34:56+05:30"}, - }, - { - test: "test_2", - json: js(`"2023-08-15 12:34:56"`), - path: `$.timestamp().string()`, - exp: []any{"2023-08-15T12:34:56"}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryTimeMethod(t *testing.T) { - t.Parallel() - - loc, err := time.LoadLocation("PST8PDT") - require.NoError(t, err) - ctx := types.ContextWithTZ(context.Background(), loc) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L626-L651 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.time()`, - err: `exec: jsonpath item method .time() can only be applied to a string`, - }, - { - test: "test_2", - json: js(`true`), - path: `$.time()`, - err: `exec: jsonpath item method .time() can only be applied to a string`, - }, - { - test: "test_3", - json: js(`1`), - path: `$.time()`, - err: `exec: jsonpath item method .time() can only be applied to a string`, - }, - { - test: "test_4", - json: js(`[]`), - path: `$.time()`, - exp: []any{}, - }, - { - test: "test_5", - json: js(`[]`), - path: `strict $.time()`, - err: `exec: jsonpath item method .time() can only be applied to a string`, - }, - { - test: "test_6", - json: js(`{}`), - path: `$.time()`, - err: `exec: jsonpath item method .time() can only be applied to a string`, - }, - { - test: "test_7", - json: js(`"bogus"`), - path: `$.time()`, - err: `exec: time format is not recognized: "bogus"`, - }, - // Test 8 in TestPgQueryTimeAtQuestion below - { - test: "test_9", - json: js(`"12:34:56"`), - path: `$.time()`, - exp: []any{pt(ctx, "12:34:56")}, - }, - { - test: "test_10", - json: js(`"12:34:56"`), - path: `$.time().type()`, - exp: []any{"time without time zone"}, - }, - { - test: "test_11", - json: js(`"2023-08-15"`), - path: `$.time()`, - err: `exec: time format is not recognized: "2023-08-15"`, - }, - { - test: "test_12", - json: js(`"12:34:56+05:30"`), // pg: uses 12:34:56 +05:30 - path: `$.time()`, - err: `exec: cannot convert value from timetz to time without time zone usage.` + tzHint, - }, - { - test: "test_13", - json: js(`"12:34:56+05:30"`), // pg: uses 12:34:56 +05:30 - path: `$.time()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:34:56")}, // should work - }, - { - test: "test_14", - json: js(`"2023-08-15 12:34:56"`), - path: `$.time()`, - exp: []any{pt(ctx, "12:34:56")}, - }, - // Tests 15 & 16 in TestPgQueryTimeMethodSyntaxError below. - { - test: "test_17", - json: js(`"12:34:56.789"`), - path: `$.time(12345678901)`, - err: `exec: time precision of jsonpath item method .time() is out of integer range`, - }, - { - test: "test_18", - json: js(`"12:34:56.789"`), - path: `$.time(0)`, - exp: []any{pt(ctx, "12:34:57")}, - }, - { - test: "test_19", - json: js(`"12:34:56.789"`), - path: `$.time(2)`, - exp: []any{pt(ctx, "12:34:56.79")}, - }, - { - test: "test_20", - json: js(`"12:34:56.789"`), - path: `$.time(5)`, - exp: []any{pt(ctx, "12:34:56.789")}, - }, - { - test: "test_21", - json: js(`"12:34:56.789"`), - path: `$.time(10)`, - exp: []any{pt(ctx, "12:34:56.789")}, - }, - { - test: "test_22", - json: js(`"12:34:56.789012"`), - path: `$.time(8)`, - exp: []any{pt(ctx, "12:34:56.789012")}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryTimeAtQuestion(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L635 - for _, tc := range []existsTestCase{ - { - test: "test_8", - json: js(`"12:34:56"`), - path: `$.time()`, - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgQueryTimeMethodSyntaxError(t *testing.T) { - t.Parallel() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L644-L645 - for _, tc := range []queryTestCase{ - { - test: "test_15", - json: js(`"12:34:56.789"`), - path: `$.time(-1)`, - err: `parser: syntax error at 1:9`, - }, - { - test: "test_16", - json: js(`"12:34:56.789"`), - path: `$.time(2.0)`, - err: `parser: syntax error at 1:11`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - path, err := parser.Parse(tc.path) - r.EqualError(err, tc.err) - r.ErrorIs(err, parser.ErrParse) - a.Nil(path) - }) - } -} - -func TestPgQueryTimeTZMethod(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L653-L676 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.time_tz()`, - err: `exec: jsonpath item method .time_tz() can only be applied to a string`, - }, - { - test: "test_2", - json: js(`true`), - path: `$.time_tz()`, - err: `exec: jsonpath item method .time_tz() can only be applied to a string`, - }, - { - test: "test_3", - json: js(`1`), - path: `$.time_tz()`, - err: `exec: jsonpath item method .time_tz() can only be applied to a string`, - }, - { - test: "test_4", - json: js(`[]`), - path: `$.time_tz()`, - exp: []any{}, - }, - { - test: "test_5", - json: js(`[]`), - path: `strict $.time_tz()`, - err: `exec: jsonpath item method .time_tz() can only be applied to a string`, - }, - { - test: "test_6", - json: js(`{}`), - path: `$.time_tz()`, - err: `exec: jsonpath item method .time_tz() can only be applied to a string`, - }, - { - test: "test_7", - json: js(`"bogus"`), - path: `$.time_tz()`, - err: `exec: time_tz format is not recognized: "bogus"`, - }, - // Test 8 in TestPgQueryTimeTZAtQuestion below - { - test: "test_9", - json: js(`"12:34:56+05:30"`), // pg: 12:34:56 +05:30 - path: `$.time_tz()`, - exp: []any{pt(ctx, "12:34:56+05:30")}, - }, - { - test: "test_10", - json: js(`"12:34:56+05:30"`), // pg: 12:34:56 +05:30 - path: `$.time_tz().type()`, - exp: []any{"time with time zone"}, - }, - { - test: "test_11", - json: js(`"2023-08-15"`), - path: `$.time_tz()`, - err: `exec: time_tz format is not recognized: "2023-08-15"`, - }, - { - test: "test_12", - json: js(`"2023-08-15 12:34:56"`), - path: `$.time_tz()`, - err: `exec: time_tz format is not recognized: "2023-08-15 12:34:56"`, - }, - // Tests 13 & 14 in TestPgQueryTimeTZMethodSyntaxError below. - { - test: "test_15", - json: js(`"12:34:56.789+05:30"`), // pg: 12:34:56.789 +05:30 - path: `$.time_tz(12345678901)`, - err: `exec: time precision of jsonpath item method .time_tz() is out of integer range`, - }, - { - test: "test_16", - json: js(`"12:34:56.789+05:30"`), // pg: 12:34:56.789 +05:30 - path: `$.time_tz(0)`, - exp: []any{pt(ctx, "12:34:57+05:30")}, - }, - { - test: "test_17", - json: js(`"12:34:56.789+05:30"`), // pg: 12:34:56.789 +05:30 - path: `$.time_tz(2)`, - exp: []any{pt(ctx, "12:34:56.79+05:30")}, - }, - { - test: "test_18", - json: js(`"12:34:56.789+05:30"`), // pg: 12:34:56.789 +05:30 - path: `$.time_tz(5)`, - exp: []any{pt(ctx, "12:34:56.789+05:30")}, - }, - { - test: "test_19", - json: js(`"12:34:56.789+05:30"`), // pg: 12:34:56.789 +05:30 - path: `$.time_tz(10)`, - exp: []any{pt(ctx, "12:34:56.789+05:30")}, - }, - { - test: "test_20", - json: js(`"12:34:56.789012+05:30"`), // pg: 12:34:56.789012 +05:30 - path: `$.time_tz(8)`, - exp: []any{pt(ctx, "12:34:56.789012+05:30")}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryTimeTZAtQuestion(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L662 - for _, tc := range []existsTestCase{ - { - test: "test_8", - json: js(`"12:34:56+05:30"`), // pg: 12:34:56 +05:30 - path: `$.time_tz()`, - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgQueryTimeTZMethodSyntaxError(t *testing.T) { - t.Parallel() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L669-L670 - for _, tc := range []queryTestCase{ - { - test: "test_13", - json: js(`"12:34:56.789 +05:30"`), - path: `$.time_tz(-1)`, - err: `parser: syntax error at 1:12`, - }, - { - test: "test_14", - json: js(`"12:34:56.789 +05:30"`), - path: `$.time_tz(2.0)`, - err: `parser: syntax error at 1:14`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - path, err := parser.Parse(tc.path) - r.EqualError(err, tc.err) - r.ErrorIs(err, parser.ErrParse) - a.Nil(path) - }) - } -} - -func TestPgQueryTimestampMethod(t *testing.T) { - t.Parallel() - - loc, err := time.LoadLocation("PST8PDT") - require.NoError(t, err) - ctx := types.ContextWithTZ(context.Background(), loc) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L678-L702 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.timestamp()`, - err: `exec: jsonpath item method .timestamp() can only be applied to a string`, - }, - { - test: "test_2", - json: js(`true`), - path: `$.timestamp()`, - err: `exec: jsonpath item method .timestamp() can only be applied to a string`, - }, - { - test: "test_3", - json: js(`1`), - path: `$.timestamp()`, - err: `exec: jsonpath item method .timestamp() can only be applied to a string`, - }, - { - test: "test_4", - json: js(`[]`), - path: `$.timestamp()`, - exp: []any{}, - }, - { - test: "test_5", - json: js(`[]`), - path: `strict $.timestamp()`, - err: `exec: jsonpath item method .timestamp() can only be applied to a string`, - }, - { - test: "test_6", - json: js(`{}`), - path: `$.timestamp()`, - err: `exec: jsonpath item method .timestamp() can only be applied to a string`, - }, - { - test: "test_7", - json: js(`"bogus"`), - path: `$.timestamp()`, - err: `exec: timestamp format is not recognized: "bogus"`, - }, - // Test 8 in TestPgQueryTimestampAtQuestion below - { - test: "test_9", - json: js(`"2023-08-15 12:34:56"`), - path: `$.timestamp()`, - exp: []any{pt(ctx, "2023-08-15T12:34:56")}, - }, - { - test: "test_10", - json: js(`"2023-08-15 12:34:56"`), - path: `$.timestamp().type()`, - exp: []any{"timestamp without time zone"}, - }, - { - test: "test_11", - json: js(`"2023-08-15"`), - path: `$.timestamp()`, - exp: []any{pt(ctx, "2023-08-15T00:00:00")}, - }, - { - test: "test_12", - json: js(`"12:34:56"`), - path: `$.timestamp()`, - err: `exec: timestamp format is not recognized: "12:34:56"`, - }, - { - test: "test_13", - json: js(`"12:34:56+05:30"`), // pg: 12:34:56 +05:30 - path: `$.timestamp()`, - err: `exec: timestamp format is not recognized: "12:34:56+05:30"`, - }, - // Tests 14 & 15 in TestPgQueryTimestampMethodSyntaxError below. - { - test: "test_16", - json: js(`"2023-08-15 12:34:56.789"`), - path: `$.timestamp(12345678901)`, - err: `exec: time precision of jsonpath item method .timestamp() is out of integer range`, - }, - { - test: "test_17", - json: js(`"2023-08-15 12:34:56.789"`), - path: `$.timestamp(0)`, - exp: []any{pt(ctx, "2023-08-15T12:34:57")}, - }, - { - test: "test_18", - json: js(`"2023-08-15 12:34:56.789"`), - path: `$.timestamp(2)`, - exp: []any{pt(ctx, "2023-08-15T12:34:56.79")}, - }, - { - test: "test_19", - json: js(`"2023-08-15 12:34:56.789"`), - path: `$.timestamp(5)`, - exp: []any{pt(ctx, "2023-08-15T12:34:56.789")}, - }, - { - test: "test_20", - json: js(`"2023-08-15 12:34:56.789"`), - path: `$.timestamp(10)`, - exp: []any{pt(ctx, "2023-08-15T12:34:56.789")}, - }, - { - test: "test_21", - json: js(`"2023-08-15 12:34:56.789012"`), - path: `$.timestamp(8)`, - exp: []any{pt(ctx, "2023-08-15T12:34:56.789012")}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryTimestampAtQuestion(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L687 - for _, tc := range []existsTestCase{ - { - test: "test_8", - json: js(`"2023-08-15 12:34:56"`), - path: `$.timestamp()`, - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgQueryTimestampMethodSyntaxError(t *testing.T) { - t.Parallel() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L695-L696 - for _, tc := range []queryTestCase{ - { - test: "test_14", - json: js(`"2023-08-15 12:34:56.789"`), - path: `$.timestamp(-1)`, - err: `parser: syntax error at 1:14`, - }, - { - test: "test_15", - json: js(`"2023-08-15 12:34:56.789"`), - path: `$.timestamp(2.0)`, - err: `parser: syntax error at 1:16`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - path, err := parser.Parse(tc.path) - r.EqualError(err, tc.err) - r.ErrorIs(err, parser.ErrParse) - a.Nil(path) - }) - } -} - -func TestPgQueryTimestampTZMethod(t *testing.T) { - t.Parallel() - - loc, err := time.LoadLocation("PST8PDT") - require.NoError(t, err) - ctx := types.ContextWithTZ(context.Background(), loc) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L704-L729 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`null`), - path: `$.timestamp_tz()`, - err: `exec: jsonpath item method .timestamp_tz() can only be applied to a string`, - }, - { - test: "test_2", - json: js(`true`), - path: `$.timestamp_tz()`, - err: `exec: jsonpath item method .timestamp_tz() can only be applied to a string`, - }, - { - test: "test_3", - json: js(`1`), - path: `$.timestamp_tz()`, - err: `exec: jsonpath item method .timestamp_tz() can only be applied to a string`, - }, - { - test: "test_4", - json: js(`[]`), - path: `$.timestamp_tz()`, - exp: []any{}, - }, - { - test: "test_5", - json: js(`[]`), - path: `strict $.timestamp_tz()`, - err: `exec: jsonpath item method .timestamp_tz() can only be applied to a string`, - }, - { - test: "test_6", - json: js(`{}`), - path: `$.timestamp_tz()`, - err: `exec: jsonpath item method .timestamp_tz() can only be applied to a string`, - }, - { - test: "test_7", - json: js(`"bogus"`), - path: `$.timestamp_tz()`, - err: `exec: timestamp_tz format is not recognized: "bogus"`, - }, - // Test 8 in TestPgQueryTimestampTZAtQuestion below - { - test: "test_9", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.timestamp_tz()`, - exp: []any{pt(ctx, "2023-08-15T12:34:56+05:30")}, - }, - { - test: "test_10", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.timestamp_tz().type()`, - exp: []any{"timestamp with time zone"}, - }, - { - test: "test_11", - json: js(`"2023-08-15"`), - path: `$.timestamp_tz()`, - err: `exec: cannot convert value from date to timestamptz without time zone usage.` + tzHint, - }, - { - test: "test_12", - json: js(`"2023-08-15"`), - path: `$.timestamp_tz()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2023-08-15T00:00:00-07:00")}, // should work - // pg: Difference in cast value formatting thread: - // https://www.postgresql.org/message-id/flat/7DE080CE-6D8C-4794-9BD1-7D9699172FAB%40justatheory.com - }, - { - test: "test_13", - json: js(`"12:34:56"`), - path: `$.timestamp_tz()`, - err: `exec: timestamp_tz format is not recognized: "12:34:56"`, - }, - { - test: "test_14", - json: js(`"12:34:56+05:30"`), // pg: 12:34:56 +05:30 - path: `$.timestamp_tz()`, - err: `exec: timestamp_tz format is not recognized: "12:34:56+05:30"`, - }, - // Tests 15 & 16 in TestPgQueryTimestampTZMethodSyntaxError below. - { - test: "test_17", - json: js(`"2023-08-15 12:34:56.789 +05:30"`), // pg: 2023-08-15 12:34:56.789 +05:30 - path: `$.timestamp_tz(12345678901)`, - err: `exec: time precision of jsonpath item method .timestamp_tz() is out of integer range`, - }, - { - test: "test_18", - json: js(`"2023-08-15 12:34:56.789+05:30"`), // pg: 2023-08-15 12:34:56.789 +05:30 - path: `$.timestamp_tz(0)`, - exp: []any{pt(ctx, "2023-08-15T12:34:57+05:30")}, - }, - { - test: "test_19", - json: js(`"2023-08-15 12:34:56.789+05:30"`), // pg: 2023-08-15 12:34:56.789 +05:30 - path: `$.timestamp_tz(2)`, - exp: []any{pt(ctx, "2023-08-15T12:34:56.79+05:30")}, - }, - { - test: "test_20", - json: js(`"2023-08-15 12:34:56.789+05:30"`), // pg: 2023-08-15 12:34:56.789 +05:30 - path: `$.timestamp_tz(5)`, - exp: []any{pt(ctx, "2023-08-15T12:34:56.789+05:30")}, - }, - { - test: "test_21", - json: js(`"2023-08-15 12:34:56.789+05:30"`), // pg: 2023-08-15 12:34:56.789 +05:30 - path: `$.timestamp_tz(10)`, - exp: []any{pt(ctx, "2023-08-15T12:34:56.789+05:30")}, - }, - { - test: "test_22", - json: js(`"2023-08-15 12:34:56.789012+05:30"`), // pg: 2023-08-15 12:34:56.789012 +05:30 - path: `$.timestamp_tz(8)`, - exp: []any{pt(ctx, "2023-08-15T12:34:56.789012+05:30")}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryTimestampTZAtQuestion(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L713 - for _, tc := range []existsTestCase{ - { - test: "test_8", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.timestamp_tz()`, - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgQueryTimestampTZMethodSyntaxError(t *testing.T) { - t.Parallel() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L722-L723 - for _, tc := range []queryTestCase{ - { - test: "test_15", - json: js(`"2023-08-15 12:34:56.789+05:30"`), // pg: "2023-08-15 12:34:56.789 +05:30" - path: `$.timestamp_tz(-1)`, - err: `parser: syntax error at 1:17`, - }, - { - test: "test_16", - json: js(`"2023-08-15 12:34:56.789+05:30"`), // pg: "2023-08-15 12:34:56.789 +05:30" - path: `$.timestamp_tz(2.0)`, - err: `parser: syntax error at 1:19`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - path, err := parser.Parse(tc.path) - r.EqualError(err, tc.err) - r.ErrorIs(err, parser.ErrParse) - a.Nil(path) - }) - } -} - -func TestPgQueryDateTimeMethodsUTC(t *testing.T) { - t.Parallel() - ctx := types.ContextWithTZ(context.Background(), time.FixedZone("", 0)) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L732-L755 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.time()`, - err: `exec: cannot convert value from timestamptz to time without time zone usage.` + tzHint, - }, - { - test: "test_2", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.time()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "07:04:56")}, // should work - }, - { - test: "test_3", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.time_tz()`, - exp: []any{pt(ctx, "07:04:56+00:00")}, - }, - { - test: "test_4", - json: js(`"12:34:56"`), - path: `$.time_tz()`, - err: `exec: cannot convert value from time to timetz without time zone usage.` + tzHint, - }, - { - test: "test_5", - json: js(`"12:34:56"`), - path: `$.time_tz()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:34:56Z")}, // should work - }, - { - test: "test_6", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.timestamp()`, - err: `exec: cannot convert value from timestamptz to timestamp without time zone usage.` + tzHint, - }, - { - test: "test_7", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.timestamp()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2023-08-15T07:04:56")}, // should work - }, - { - test: "test_8", - json: js(`"2023-08-15 12:34:56"`), - path: `$.timestamp_tz()`, - err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + tzHint, - }, - { - test: "test_9", - json: js(`"2023-08-15 12:34:56"`), - path: `$.timestamp_tz()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2023-08-15T12:34:56+00:00")}, // should work - }, - // Remove err field from remaining tests once .datetime(template) implemented - { - test: "test_10", - json: js(`"10-03-2017 12:34"`), - path: `$.datetime("dd-mm-yyyy HH24:MI")`, - exp: []any{pt(ctx, "2017-03-10T12:34:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_11", - json: js(`"10-03-2017 12:34"`), - path: `$.datetime("dd-mm-yyyy HH24:MI TZH")`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: input string is too short for datetime format`, - }, - { - test: "test_12", - json: js(`"10-03-2017 12:34 +05"`), - path: `$.datetime("dd-mm-yyyy HH24:MI TZH")`, - exp: []any{pt(ctx, "2017-03-10T12:34:00+05:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_13", - json: js(`"10-03-2017 12:34 -05"`), - path: `$.datetime("dd-mm-yyyy HH24:MI TZH")`, - exp: []any{pt(ctx, "2017-03-10T12:34:00-05:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_14", - json: js(`"10-03-2017 12:34 +05:20"`), - path: `$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")`, - exp: []any{pt(ctx, "2017-03-10T12:34:00+05:20")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_15", - json: js(`"10-03-2017 12:34 -05:20"`), - path: `$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")`, - exp: []any{pt(ctx, "2017-03-10T12:34:00-05:20")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_16", - json: js(`"12:34"`), - path: `$.datetime("HH24:MI")`, - exp: []any{pt(ctx, "12:34:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_17", - json: js(`"12:34"`), - path: `$.datetime("HH24:MI TZH")`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: input string is too short for datetime format`, - }, - { - test: "test_18", - json: js(`"12:34 +05"`), - path: `$.datetime("HH24:MI TZH")`, - exp: []any{pt(ctx, "12:34:00+05:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_19", - json: js(`"12:34 -05"`), - path: `$.datetime("HH24:MI TZH")`, - err: `exec: .datetime(template) is not yet supported`, - exp: []any{pt(ctx, "12:34:00-05:00")}, - }, - { - test: "test_20", - json: js(`"12:34 +05:20"`), - path: `$.datetime("HH24:MI TZH:TZM")`, - exp: []any{pt(ctx, "12:34:00+05:20")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_21", - json: js(`"12:34 -05:20"`), - path: `$.datetime("HH24:MI TZH:TZM")`, - exp: []any{pt(ctx, "12:34:00-05:20")}, - err: `exec: .datetime(template) is not yet supported`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryDateTimeMethodsPlus10(t *testing.T) { - t.Parallel() - ctx := types.ContextWithTZ(context.Background(), time.FixedZone("", 10*3600)) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L757-L779 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.time()`, - err: `exec: cannot convert value from timestamptz to time without time zone usage.` + tzHint, - }, - { - test: "test_2", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.time()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "17:04:56")}, // should work - }, - { - test: "test_3", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.time_tz()`, - exp: []any{pt(ctx, "17:04:56+10:00")}, - }, - { - test: "test_4", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.timestamp()`, - err: `exec: cannot convert value from timestamptz to timestamp without time zone usage.` + tzHint, - }, - { - test: "test_5", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.timestamp()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2023-08-15T17:04:56")}, // should work - }, - { - test: "test_6", - json: js(`"2023-08-15 12:34:56"`), - path: `$.timestamp_tz()`, - err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + tzHint, - }, - { - test: "test_7", - json: js(`"2023-08-15 12:34:56"`), - path: `$.timestamp_tz()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2023-08-15T12:34:56+10:00")}, // should work - // pg: Difference in cast value formatting thread: - // https://www.postgresql.org/message-id/flat/7DE080CE-6D8C-4794-9BD1-7D9699172FAB%40justatheory.com - }, - { - test: "test_8", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.timestamp_tz()`, - exp: []any{pt(ctx, "2023-08-15T12:34:56+05:30")}, - }, - { - test: "test_9", - json: js(`"10-03-2017 12:34"`), - path: `$.datetime("dd-mm-yyyy HH24:MI")`, - exp: []any{pt(ctx, "2017-03-10T12:34:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_10", - json: js(`"10-03-2017 12:34"`), - path: `$.datetime("dd-mm-yyyy HH24:MI TZH")`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: input string is too short for datetime format`, - }, - { - test: "test_11", - json: js(`"10-03-2017 12:34 +05"`), - path: `$.datetime("dd-mm-yyyy HH24:MI TZH")`, - exp: []any{pt(ctx, "2017-03-10T12:34:00+05:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_12", - json: js(`"10-03-2017 12:34 -05"`), - path: `$.datetime("dd-mm-yyyy HH24:MI TZH")`, - exp: []any{pt(ctx, "2017-03-10T12:34:00-05:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_13", - json: js(`"10-03-2017 12:34 +05:20"`), - path: `$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")`, - exp: []any{pt(ctx, "2017-03-10T12:34:00+05:20")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_14", - json: js(`"10-03-2017 12:34 -05:20"`), - path: `$.datetime("dd-mm-yyyy HH24:MI TZH:TZM")`, - exp: []any{pt(ctx, "2017-03-10T12:34:00-05:20")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_15", - json: js(`"12:34"`), - path: `$.datetime("HH24:MI")`, - exp: []any{pt(ctx, "12:34:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_16", - json: js(`"12:34"`), - path: `$.datetime("HH24:MI TZH")`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: input string is too short for datetime format`, - }, - { - test: "test_17", - json: js(`"12:34 +05"`), - path: `$.datetime("HH24:MI TZH")`, - exp: []any{pt(ctx, "12:34:00+05:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_18", - json: js(`"12:34 -05"`), - path: `$.datetime("HH24:MI TZH")`, - exp: []any{pt(ctx, "12:34:00-05:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_19", - json: js(`"12:34 +05:20"`), - path: `$.datetime("HH24:MI TZH:TZM")`, - exp: []any{pt(ctx, "12:34:00+05:20")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_20", - json: js(`"12:34 -05:20"`), - path: `$.datetime("HH24:MI TZH:TZM")`, - exp: []any{pt(ctx, "12:34:00-05:20")}, - err: `exec: .datetime(template) is not yet supported`, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryDateTimeMethodsDefaultTZ(t *testing.T) { - t.Parallel() - - loc, err := time.LoadLocation("PST8PDT") - require.NoError(t, err) - ctx := types.ContextWithTZ(context.Background(), loc) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L781-L810 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.time()`, - err: `exec: cannot convert value from timestamptz to time without time zone usage.` + tzHint, - }, - { - test: "test_2", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.time()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "00:04:56")}, // should work - }, - { - test: "test_3", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.time_tz()`, - exp: []any{pt(ctx, "00:04:56-07:00")}, - }, - { - test: "test_4", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.timestamp()`, - err: `exec: cannot convert value from timestamptz to timestamp without time zone usage.` + tzHint, - }, - { - test: "test_5", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.timestamp()`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2023-08-15T00:04:56")}, // should work - }, - { - test: "test_6", - json: js(`"2023-08-15 12:34:56+05:30"`), // pg: 2023-08-15 12:34:56 +05:30 - path: `$.timestamp_tz()`, - exp: []any{pt(ctx, "2023-08-15T12:34:56+05:30")}, - }, - { - test: "test_7", - json: js(`"2017-03-10"`), - path: `$.datetime().type()`, - exp: []any{"date"}, - }, - { - test: "test_8", - json: js(`"2017-03-10"`), - path: `$.datetime()`, - exp: []any{pt(ctx, "2017-03-10")}, - }, - { - test: "test_9", - json: js(`"2017-03-10 12:34:56"`), - path: `$.datetime().type()`, - exp: []any{"timestamp without time zone"}, - }, - { - test: "test_10", - json: js(`"2017-03-10 12:34:56"`), - path: `$.datetime()`, - exp: []any{pt(ctx, "2017-03-10T12:34:56")}, - }, - { - test: "test_11", - json: js(`"2017-03-10 12:34:56+03"`), // pg: 2017-03-10 12:34:56+3 - path: `$.datetime().type()`, - exp: []any{"timestamp with time zone"}, - }, - { - test: "test_12", - json: js(`"2017-03-10 12:34:56+03"`), // pg: 2017-03-10 12:34:56+3 - path: `$.datetime()`, - exp: []any{pt(ctx, "2017-03-10T12:34:56+03:00")}, - }, - { - test: "test_13", - json: js(`"2017-03-10 12:34:56+03:10"`), // pg: 2017-03-10 12:34:56+3:10 - path: `$.datetime().type()`, - exp: []any{"timestamp with time zone"}, - }, - { - test: "test_14", - json: js(`"2017-03-10 12:34:56+03:10"`), // pg: 2017-03-10 12:34:56+3:10 - path: `$.datetime()`, - exp: []any{pt(ctx, "2017-03-10T12:34:56+03:10")}, - }, - { - test: "test_15", - json: js(`"2017-03-10T12:34:56+03:10"`), // pg: 2017-03-10T12:34:56+3:10 - path: `$.datetime()`, - exp: []any{pt(ctx, "2017-03-10T12:34:56+03:10")}, - }, - { - test: "test_16", - json: js(`"2017-03-10t12:34:56+03:10"`), // pg: 2017-03-10t12:34:56+3:10 - path: `$.datetime()`, - err: `exec: datetime format is not recognized: "2017-03-10t12:34:56+03:10"`, - }, - { - test: "test_17", - json: js(`"2017-03-10 12:34:56.789+03:10"`), // pg: 2017-03-10 12:34:56.789+3:10 - path: `$.datetime()`, - exp: []any{pt(ctx, "2017-03-10T12:34:56.789+03:10")}, - }, - { - test: "test_18", - json: js(`"2017-03-10T12:34:56.789+03:10"`), // pg: 2017-03-10T12:34:56.789+3:10 - path: `$.datetime()`, - exp: []any{pt(ctx, "2017-03-10T12:34:56.789+03:10")}, - }, - { - test: "test_19", - json: js(`"2017-03-10t12:34:56.789+03:10"`), // pg: 2017-03-10t12:34:56.789+3:10 - path: `$.datetime()`, - err: `exec: datetime format is not recognized: "2017-03-10t12:34:56.789+03:10"`, - }, - { - test: "test_20", - json: js(`"2017-03-10T12:34:56.789-05:00"`), // pg: 2017-03-10T12:34:56.789EST - path: `$.datetime()`, - exp: []any{pt(ctx, "2017-03-10T12:34:56.789-05:00")}, - }, - { - test: "test_21", - json: js(`"2017-03-10T12:34:56.789Z"`), - path: `$.datetime()`, - exp: []any{pt(ctx, "2017-03-10T12:34:56.789+00:00")}, - }, - { - test: "test_22", - json: js(`"12:34:56"`), - path: `$.datetime().type()`, - exp: []any{"time without time zone"}, - }, - { - test: "test_23", - json: js(`"12:34:56"`), - path: `$.datetime()`, - exp: []any{pt(ctx, "12:34:56")}, - }, - { - test: "test_24", - json: js(`"12:34:56+03"`), // pg: 12:34:56+3 - path: `$.datetime().type()`, - exp: []any{"time with time zone"}, - }, - { - test: "test_25", - json: js(`"12:34:56+03"`), // pg: 12:34:56+3 - path: `$.datetime()`, - exp: []any{pt(ctx, "12:34:56+03:00")}, - }, - { - test: "test_26", - json: js(`"12:34:56+03:10"`), // pg: 12:34:56+3:10 - path: `$.datetime().type()`, - exp: []any{"time with time zone"}, - }, - { - test: "test_27", - json: js(`"12:34:56+03:10"`), // pg: 12:34:56+3:10 - path: `$.datetime()`, - exp: []any{pt(ctx, "12:34:56+03:10")}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryDateComparison(t *testing.T) { - t.Parallel() - ctx := types.ContextWithTZ(context.Background(), time.FixedZone("", 0)) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L814-L860 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03+04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].datetime() ? (@ == "10.03.2017".datetime("dd.mm.yyyy"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from date to timestamptz without time zone usage.`+hint, - }, - { - test: "test_2", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03+04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].datetime() ? (@ >= "10.03.2017".datetime("dd.mm.yyyy"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from date to timestamptz without time zone usage.`+hint, - }, - { - test: "test_3", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03+04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].datetime() ? (@ < "10.03.2017".datetime("dd.mm.yyyy"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from date to timestamptz without time zone usage.`+hint, - }, - { - test: "test_4", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03+04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].datetime() ? (@ == "10.03.2017".datetime("dd.mm.yyyy"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10"), pt(ctx, "2017-03-10T00:00:00"), pt(ctx, "2017-03-10T03:00:00+03:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_5", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03+04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].datetime() ? (@ >= "10.03.2017".datetime("dd.mm.yyyy"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10"), pt(ctx, "2017-03-11"), pt(ctx, "2017-03-10T00:00:00"), pt(ctx, "2017-03-10T12:34:56"), pt(ctx, "2017-03-10T03:00:00+03:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_6", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "12:34:56", "01:02:03+04", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].datetime() ? (@ < "10.03.2017".datetime("dd.mm.yyyy"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-09"), pt(ctx, "2017-03-10T01:02:03+04:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_7", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].datetime() ? (@ == "2017-03-10".date())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10"), pt(ctx, "2017-03-10T00:00:00"), pt(ctx, "2017-03-10T03:00:00+03:00")}, - }, - { - test: "test_8", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].datetime() ? (@ >= "2017-03-10".date())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10"), pt(ctx, "2017-03-11"), pt(ctx, "2017-03-10T00:00:00"), pt(ctx, "2017-03-10T12:34:56"), pt(ctx, "2017-03-10T03:00:00+03:00")}, - }, - { - test: "test_9", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].datetime() ? (@ < "2017-03-10".date())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-09"), pt(ctx, "2017-03-10T01:02:03+04:00")}, - }, - { - test: "test_10", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].date() ? (@ == "2017-03-10".date())`, - err: `exec: cannot convert value from timestamptz to date without time zone usage.` + tzHint, - }, - { - test: "test_11", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].date() ? (@ >= "2017-03-10".date())`, - err: `exec: cannot convert value from timestamptz to date without time zone usage.` + tzHint, - }, - { - test: "test_12", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].date() ? (@ < "2017-03-10".date())`, - err: `exec: cannot convert value from timestamptz to date without time zone usage.` + tzHint, - }, - { - test: "test_13", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].date() ? (@ == "2017-03-10".date())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10"), pt(ctx, "2017-03-10"), pt(ctx, "2017-03-10"), pt(ctx, "2017-03-10")}, - }, - { - test: "test_14", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].date() ? (@ >= "2017-03-10".date())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10"), pt(ctx, "2017-03-11"), pt(ctx, "2017-03-10"), pt(ctx, "2017-03-10"), pt(ctx, "2017-03-10")}, - }, - { - test: "test_15", - json: js(`["2017-03-10", "2017-03-11", "2017-03-09", "2017-03-10 00:00:00", "2017-03-10 12:34:56", "2017-03-10 01:02:03+04", "2017-03-10 03:00:00+03"]`), - path: `$[*].date() ? (@ < "2017-03-10".date())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-09"), pt(ctx, "2017-03-09")}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryTimeComparison(t *testing.T) { - t.Parallel() - ctx := types.ContextWithTZ(context.Background(), time.FixedZone("", 0)) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L862-L914 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ == "12:35".datetime("HH24:MI"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from time to timetz without time zone usage.` + hint, - }, - { - test: "test_2", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ >= "12:35".datetime("HH24:MI"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from time to timetz without time zone usage.` + hint, - }, - { - test: "test_3", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ < "12:35".datetime("HH24:MI"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from time to timetz without time zone usage.` + hint, - }, - { - test: "test_4", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ == "12:35".datetime("HH24:MI"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00"), pt(ctx, "12:35:00+00:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_5", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ >= "12:35".datetime("HH24:MI"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00"), pt(ctx, "12:36:00"), pt(ctx, "12:35:00+00:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_6", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ < "12:35".datetime("HH24:MI"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:34:00"), pt(ctx, "12:35:00+01:00"), pt(ctx, "13:35:00+01:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_7", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ == "12:35:00".time())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00"), pt(ctx, "12:35:00+00:00")}, - }, - { - test: "test_8", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ >= "12:35:00".time())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00"), pt(ctx, "12:36:00"), pt(ctx, "12:35:00+00:00")}, - }, - { - test: "test_9", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ < "12:35:00".time())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:34:00"), pt(ctx, "12:35:00+01:00"), pt(ctx, "13:35:00+01:00")}, - }, - { - test: "test_10", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time() ? (@ == "12:35:00".time())`, - err: `exec: cannot convert value from timetz to time without time zone usage.` + tzHint, - }, - { - test: "test_11", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time() ? (@ >= "12:35:00".time())`, - err: `exec: cannot convert value from timetz to time without time zone usage.` + tzHint, - }, - { - test: "test_12", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time() ? (@ < "12:35:00".time())`, - err: `exec: cannot convert value from timetz to time without time zone usage.` + tzHint, - }, - { - test: "test_13", - json: js(`["12:34:00.123", "12:35:00.123", "12:36:00.1123", "12:35:00.1123+00", "12:35:00.123+01", "13:35:00.123+01", "2017-03-10 12:35:00.1", "2017-03-10 12:35:00.123+01"]`), - path: `$[*].time(2) ? (@ >= "12:35:00.123".time(2))`, - err: `exec: cannot convert value from timetz to time without time zone usage.` + tzHint, - }, - { - test: "test_14", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time() ? (@ == "12:35:00".time())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00"), pt(ctx, "12:35:00"), pt(ctx, "12:35:00"), pt(ctx, "12:35:00")}, - }, - { - test: "test_15", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time() ? (@ >= "12:35:00".time())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00"), pt(ctx, "12:36:00"), pt(ctx, "12:35:00"), pt(ctx, "12:35:00"), pt(ctx, "13:35:00"), pt(ctx, "12:35:00")}, - }, - { - test: "test_16", - json: js(`["12:34:00", "12:35:00", "12:36:00", "12:35:00+00", "12:35:00+01", "13:35:00+01", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time() ? (@ < "12:35:00".time())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:34:00"), pt(ctx, "11:35:00")}, - }, - { - test: "test_17", - json: js(`["12:34:00.123", "12:35:00.123", "12:36:00.1123", "12:35:00.1123+00", "12:35:00.123+01", "13:35:00.123+01", "2017-03-10 12:35:00.1", "2017-03-10 12:35:00.123+01"]`), - path: `$[*].time(2) ? (@ >= "12:35:00.123".time(2))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00.12"), pt(ctx, "12:36:00.11"), pt(ctx, "12:35:00.12"), pt(ctx, "13:35:00.12")}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryTimeTZComparison(t *testing.T) { - t.Parallel() - ctx := types.ContextWithTZ(context.Background(), time.FixedZone("", 0)) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L917-L969 - // All ` +1`s replaced with `+01`. - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ == "12:35 +1".datetime("HH24:MI TZH"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from time to timetz without time zone usage.` + hint, - }, - { - test: "test_2", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ >= "12:35 +1".datetime("HH24:MI TZH"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from time to timetz without time zone usage.` + hint, - }, - { - test: "test_3", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ < "12:35 +1".datetime("HH24:MI TZH"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from time to timetz without time zone usage.` + hint, - }, - { - test: "test_4", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ == "12:35 +1".datetime("HH24:MI TZH"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00+01:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_5", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ >= "12:35 +1".datetime("HH24:MI TZH"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00+01:00"), pt(ctx, "12:36:00+01:00"), pt(ctx, "12:35:00-02:00"), pt(ctx, "11:35:00"), pt(ctx, "12:35:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_6", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10", "2017-03-10 12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ < "12:35 +1".datetime("HH24:MI TZH"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:34:00+01:00"), pt(ctx, "12:35:00+02:00"), pt(ctx, "10:35:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_7", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ == "12:35:00+01".time_tz())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00+01:00")}, - }, - { - test: "test_8", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ >= "12:35:00+01".time_tz())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00+01:00"), pt(ctx, "12:36:00+01:00"), pt(ctx, "12:35:00-02:00"), pt(ctx, "11:35:00"), pt(ctx, "12:35:00")}, - }, - { - test: "test_9", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].datetime() ? (@ < "12:35:00+01".time_tz())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:34:00+01:00"), pt(ctx, "12:35:00+02:00"), pt(ctx, "10:35:00")}, - }, - { - test: "test_10", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time_tz() ? (@ == "12:35:00+01".time_tz())`, - err: `exec: cannot convert value from time to timetz without time zone usage.` + tzHint, - }, - { - test: "test_11", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time_tz() ? (@ >= "12:35:00+01".time_tz())`, - err: `exec: cannot convert value from time to timetz without time zone usage.` + tzHint, - }, - { - test: "test_12", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time_tz() ? (@ < "12:35:00+01".time_tz())`, - err: `exec: cannot convert value from time to timetz without time zone usage.` + tzHint, - }, - { - test: "test_13", - json: js(`["12:34:00.123+01", "12:35:00.123+01", "12:36:00.1123+01", "12:35:00.1123+02", "12:35:00.123-02", "10:35:00.123", "11:35:00.1", "12:35:00.123", "2017-03-10 12:35:00.123 +1"]`), - path: `$[*].time_tz(2) ? (@ >= "12:35:00.123 +1".time_tz(2))`, - err: `exec: cannot convert value from time to timetz without time zone usage.` + tzHint, - }, - { - test: "test_14", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time_tz() ? (@ == "12:35:00+01".time_tz())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00+01:00")}, - }, - { - test: "test_15", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time_tz() ? (@ >= "12:35:00+01".time_tz())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00+01:00"), pt(ctx, "12:36:00+01:00"), pt(ctx, "12:35:00-02:00"), pt(ctx, "11:35:00+00:00"), pt(ctx, "12:35:00+00:00"), pt(ctx, "11:35:00+00:00")}, - }, - { - test: "test_16", - json: js(`["12:34:00+01", "12:35:00+01", "12:36:00+01", "12:35:00+02", "12:35:00-02", "10:35:00", "11:35:00", "12:35:00", "2017-03-10 12:35:00+01"]`), - path: `$[*].time_tz() ? (@ < "12:35:00+01".time_tz())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:34:00+01:00"), pt(ctx, "12:35:00+02:00"), pt(ctx, "10:35:00+00:00")}, - }, - { - test: "test_17", - json: js(`["12:34:00.123+01", "12:35:00.123+01", "12:36:00.1123+01", "12:35:00.1123+02", "12:35:00.123-02", "10:35:00.123", "11:35:00.1", "12:35:00.123", "2017-03-10 12:35:00.123+01"]`), - path: `$[*].time_tz(2) ? (@ >= "12:35:00.123+01".time_tz(2))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "12:35:00.12+01:00"), pt(ctx, "12:36:00.11+01:00"), pt(ctx, "12:35:00.12-02:00"), pt(ctx, "12:35:00.12+00:00"), pt(ctx, "11:35:00.12+00:00")}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryTimestampComparison(t *testing.T) { - t.Parallel() - ctx := types.ContextWithTZ(context.Background(), time.FixedZone("", 0)) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L917-L1023 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ == "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + hint, - }, - { - test: "test_2", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ >= "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + hint, - }, - { - test: "test_3", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ < "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + hint, - }, - { - test: "test_4", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ == "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00"), pt(ctx, "2017-03-10T13:35:00+01:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_5", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ >= "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00"), pt(ctx, "2017-03-10T12:36:00"), pt(ctx, "2017-03-10T13:35:00+01:00"), pt(ctx, "2017-03-10T12:35:00-01:00"), pt(ctx, "2017-03-11")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_6", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ < "10.03.2017 12:35".datetime("dd.mm.yyyy HH24:MI"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:34:00"), pt(ctx, "2017-03-10T12:35:00+01:00"), pt(ctx, "2017-03-10")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_7", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11"]`), - path: `$[*].datetime() ? (@ == "2017-03-10 12:35:00".timestamp())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00"), pt(ctx, "2017-03-10T13:35:00+01:00")}, - }, - { - test: "test_8", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11"]`), - path: `$[*].datetime() ? (@ >= "2017-03-10 12:35:00".timestamp())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00"), pt(ctx, "2017-03-10T12:36:00"), pt(ctx, "2017-03-10T13:35:00+01:00"), pt(ctx, "2017-03-10T12:35:00-01:00"), pt(ctx, "2017-03-11")}, - }, - { - test: "test_9", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11"]`), - path: `$[*].datetime() ? (@ < "2017-03-10 12:35:00".timestamp())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:34:00"), pt(ctx, "2017-03-10T12:35:00+01:00"), pt(ctx, "2017-03-10")}, - }, - { - test: "test_10", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp() ? (@ == "2017-03-10 12:35:00".timestamp())`, - err: `exec: cannot convert value from timestamptz to timestamp without time zone usage.` + tzHint, - }, - { - test: "test_11", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp() ? (@ >= "2017-03-10 12:35:00".timestamp())`, - err: `exec: cannot convert value from timestamptz to timestamp without time zone usage.` + tzHint, - }, - { - test: "test_12", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp() ? (@ < "2017-03-10 12:35:00".timestamp())`, - err: `exec: cannot convert value from timestamptz to timestamp without time zone usage.` + tzHint, - }, - { - test: "test_13", - json: js(`["2017-03-10 12:34:00.123", "2017-03-10 12:35:00.123", "2017-03-10 12:36:00.1123", "2017-03-10 12:35:00.1123+01", "2017-03-10 13:35:00.123+01", "2017-03-10 12:35:00.1-01", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp(2) ? (@ >= "2017-03-10 12:35:00.123".timestamp(2))`, - err: `exec: cannot convert value from timestamptz to timestamp without time zone usage.` + tzHint, - }, - { - test: "test_14", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp() ? (@ == "2017-03-10 12:35:00".timestamp())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00"), pt(ctx, "2017-03-10T12:35:00")}, - }, - { - test: "test_15", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp() ? (@ >= "2017-03-10 12:35:00".timestamp())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00"), pt(ctx, "2017-03-10T12:36:00"), pt(ctx, "2017-03-10T12:35:00"), pt(ctx, "2017-03-10T13:35:00"), pt(ctx, "2017-03-11T00:00:00")}, - }, - { - test: "test_16", - json: js(`["2017-03-10 12:34:00", "2017-03-10 12:35:00", "2017-03-10 12:36:00", "2017-03-10 12:35:00+01", "2017-03-10 13:35:00+01", "2017-03-10 12:35:00-01", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp() ? (@ < "2017-03-10 12:35:00".timestamp())`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:34:00"), pt(ctx, "2017-03-10T11:35:00"), pt(ctx, "2017-03-10T00:00:00")}, - }, - { - test: "test_17", - json: js(`["2017-03-10 12:34:00.123", "2017-03-10 12:35:00.123", "2017-03-10 12:36:00.1123", "2017-03-10 12:35:00.1123+01", "2017-03-10 13:35:00.123+01", "2017-03-10 12:35:00.1-01", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp(2) ? (@ >= "2017-03-10 12:35:00.123".timestamp(2))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00.12"), pt(ctx, "2017-03-10T12:36:00.11"), pt(ctx, "2017-03-10T12:35:00.12"), pt(ctx, "2017-03-10T13:35:00.1"), pt(ctx, "2017-03-11T00:00:00")}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryTimestampTZComparison(t *testing.T) { - t.Parallel() - ctx := types.ContextWithTZ(context.Background(), time.FixedZone("", 0)) - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L1025-L1077 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ == "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + hint, - }, - { - test: "test_2", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ >= "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + hint, - }, - { - test: "test_3", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ < "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))`, - err: `exec: .datetime(template) is not yet supported`, - // err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + hint, - }, - { - test: "test_4", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ == "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00+01:00"), pt(ctx, "2017-03-10T11:35:00")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_5", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ >= "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00+01:00"), pt(ctx, "2017-03-10T12:36:00+01:00"), pt(ctx, "2017-03-10T12:35:00-02:00"), pt(ctx, "2017-03-10T11:35:00"), pt(ctx, "2017-03-10T12:35:00"), pt(ctx, "2017-03-11")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_6", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11", "12:34:56", "12:34:56+01"]`), - path: `$[*].datetime() ? (@ < "10.03.2017 12:35 +1".datetime("dd.mm.yyyy HH24:MI TZH"))`, - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:34:00+01:00"), pt(ctx, "2017-03-10T12:35:00+02:00"), pt(ctx, "2017-03-10T10:35:00"), pt(ctx, "2017-03-10")}, - err: `exec: .datetime(template) is not yet supported`, - }, - { - test: "test_7", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11"]`), - path: `$[*].datetime() ? (@ == "2017-03-10 12:35:00+01".timestamp_tz())`, // pg: 2017-03-10 12:35:00 +1 - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00+01:00"), pt(ctx, "2017-03-10T11:35:00")}, - }, - { - test: "test_8", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11"]`), - path: `$[*].datetime() ? (@ >= "2017-03-10 12:35:00+01".timestamp_tz())`, // pg: 2017-03-10 12:35:00 +1 - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00+01:00"), pt(ctx, "2017-03-10T12:36:00+01:00"), pt(ctx, "2017-03-10T12:35:00-02:00"), pt(ctx, "2017-03-10T11:35:00"), pt(ctx, "2017-03-10T12:35:00"), pt(ctx, "2017-03-11")}, - }, - { - test: "test_9", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11"]`), - path: `$[*].datetime() ? (@ < "2017-03-10 12:35:00+01".timestamp_tz())`, // pg: 2017-03-10 12:35:00 +1 - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:34:00+01:00"), pt(ctx, "2017-03-10T12:35:00+02:00"), pt(ctx, "2017-03-10T10:35:00"), pt(ctx, "2017-03-10")}, - }, - { - test: "test_10", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp_tz() ? (@ == "2017-03-10 12:35:00+01".timestamp_tz())`, // pg: 2017-03-10 12:35:00 +1 - err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + tzHint, - }, - { - test: "test_11", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp_tz() ? (@ >= "2017-03-10 12:35:00+01".timestamp_tz())`, // pg: 2017-03-10 12:35:00 +1 - err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + tzHint, - }, - { - test: "test_12", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp_tz() ? (@ < "2017-03-10 12:35:00+01".timestamp_tz())`, // pg: 2017-03-10 12:35:00 +1 - err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + tzHint, - }, - { - test: "test_13", - json: js(`["2017-03-10 12:34:00.123+01", "2017-03-10 12:35:00.123+01", "2017-03-10 12:36:00.1123+01", "2017-03-10 12:35:00.1123+02", "2017-03-10 12:35:00.123-02", "2017-03-10 10:35:00.123", "2017-03-10 11:35:00.1", "2017-03-10 12:35:00.123", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp_tz(2) ? (@ >= "2017-03-10 12:35:00.123+01".timestamp_tz(2))`, // pg" 2017-03-10 12:35:00.123 +1 - err: `exec: cannot convert value from timestamp to timestamptz without time zone usage.` + tzHint, - }, - { - test: "test_14", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp_tz() ? (@ == "2017-03-10 12:35:00+01".timestamp_tz())`, // pg: 2017-03-10 12:35:00 +1 - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00+01:00"), pt(ctx, "2017-03-10T11:35:00+00:00")}, - }, - { - test: "test_15", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp_tz() ? (@ >= "2017-03-10 12:35:00+01".timestamp_tz())`, // pg: 2017-03-10 12:35:00 +1 - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00+01:00"), pt(ctx, "2017-03-10T12:36:00+01:00"), pt(ctx, "2017-03-10T12:35:00-02:00"), pt(ctx, "2017-03-10T11:35:00+00:00"), pt(ctx, "2017-03-10T12:35:00+00:00"), pt(ctx, "2017-03-11T00:00:00+00:00")}, - }, - { - test: "test_16", - json: js(`["2017-03-10 12:34:00+01", "2017-03-10 12:35:00+01", "2017-03-10 12:36:00+01", "2017-03-10 12:35:00+02", "2017-03-10 12:35:00-02", "2017-03-10 10:35:00", "2017-03-10 11:35:00", "2017-03-10 12:35:00", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp_tz() ? (@ < "2017-03-10 12:35:00+01".timestamp_tz())`, // pg: 2017-03-10 12:35:00 +1 - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:34:00+01:00"), pt(ctx, "2017-03-10T12:35:00+02:00"), pt(ctx, "2017-03-10T10:35:00+00:00"), pt(ctx, "2017-03-10T00:00:00+00:00")}, - }, - { - test: "test_17", - json: js(`["2017-03-10 12:34:00.123+01", "2017-03-10 12:35:00.123+01", "2017-03-10 12:36:00.1123+01", "2017-03-10 12:35:00.1123+02", "2017-03-10 12:35:00.123-02", "2017-03-10 10:35:00.123", "2017-03-10 11:35:00.1", "2017-03-10 12:35:00.123", "2017-03-10", "2017-03-11"]`), - path: `$[*].timestamp_tz(2) ? (@ >= "2017-03-10 12:35:00.123+01".timestamp_tz(2))`, // pg: 2017-03-10 12:35:00.123 +1 - opt: []Option{WithTZ()}, - exp: []any{pt(ctx, "2017-03-10T12:35:00.12+01:00"), pt(ctx, "2017-03-10T12:36:00.11+01:00"), pt(ctx, "2017-03-10T12:35:00.12-02:00"), pt(ctx, "2017-03-10T12:35:00.12+00:00"), pt(ctx, "2017-03-11T00:00:00+00:00")}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryComparisonOverflow(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L1080-L1081 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`"1000000-01-01"`), - path: `$.datetime() > "2020-01-01 12:00:00".datetime()`, - exp: []any{nil}, // pg: returns true, because it handles years 9999 but Go does not - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgQueryOperators(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L1087-L1097 - for _, tc := range []queryTestCase{ - { - test: "test_1", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[*]`, - exp: []any{js(`{"a": 1}`), js(`{"a": 2}`)}, - }, - { - test: "test_2", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[*] ? (@.a > 10)`, - exp: []any{}, - }, - { - test: "test_3", - json: js(`[{"a": 1}]`), - path: `$undefined_var`, - err: `exec: could not find jsonpath variable "undefined_var"`, - }, - // pg: tests 4-10 use jsonb_path_query_array but our Query() always - // returns a slice. - { - test: "test_4", - json: js(`[{"a": 1}]`), - path: `false`, - exp: []any{false}, - }, - { - test: "test_5", - json: js(`[{"a": 1}, {"a": 2}, {}]`), - path: `strict $[*].a`, - err: `exec: JSON object does not contain key "a"`, - }, - { - test: "test_6", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[*].a`, - exp: []any{float64(1), float64(2)}, - }, - { - test: "test_7", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[*].a ? (@ == 1)`, - exp: []any{float64(1)}, - }, - { - test: "test_8", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[*].a ? (@ > 10)`, - exp: []any{}, - }, - { - test: "test_9", - json: js(`[{"a": 1}, {"a": 2}, {"a": 3}, {"a": 5}]`), - path: `$[*].a ? (@ > $min && @ < $max)`, - opt: []Option{WithVars(jv(`{"min": 1, "max": 4}`))}, - exp: []any{float64(2), float64(3)}, - }, - { - test: "test_10", - json: js(`[{"a": 1}, {"a": 2}, {"a": 3}, {"a": 5}]`), - path: `$[*].a ? (@ > $min && @ < $max)`, - opt: []Option{WithVars(jv(`{"min": 3, "max": 4}`))}, - exp: []any{}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgFirst(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L1099-L1107 - for _, tc := range []firstTestCase{ - { - test: "test_1", - json: js(`[{"a": 1}, {"a": 2}, {}]`), - path: `strict $[*].a`, - err: `exec: JSON object does not contain key "a"`, - }, - { - test: "test_2", - json: js(`[{"a": 1}, {"a": 2}, {}]`), - path: `strict $[*].a`, - opt: []Option{WithSilent()}, - exp: float64(1), - }, - { - test: "test_3", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[*].a`, - exp: float64(1), - }, - { - test: "test_4", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[*].a ? (@ == 1)`, - exp: float64(1), - }, - { - test: "test_5", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[*].a ? (@ > 10)`, - exp: nil, - }, - { - test: "test_6", - json: js(`[{"a": 1}, {"a": 2}, {"a": 3}, {"a": 5}]`), - path: `$[*].a ? (@ > $min && @ < $max)`, - opt: []Option{WithVars(jv(`{"min": 1, "max": 4}`))}, - exp: float64(2), - }, - { - test: "test_7", - json: js(`[{"a": 1}, {"a": 2}, {"a": 3}, {"a": 5}]`), - path: `$[*].a ? (@ > $min && @ < $max)`, - opt: []Option{WithVars(jv(`{"min": 3, "max": 4}`))}, - exp: nil, - }, - { - test: "test_8", - json: js(`[{"a": 1}]`), - path: `$undefined_var`, - err: `exec: could not find jsonpath variable "undefined_var"`, - }, - { - test: "test_9", - json: js(`[{"a": 1}]`), - path: `false`, - exp: false, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgAtQuestionOperators(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L1109-L1110 - for _, tc := range []existsTestCase{ - { - test: "test_1", - json: js(`[{"a": 1}, {"a": 2}]`), - path: "$[*].a ? (@ > 1)", - exp: true, - }, - { - test: "test_2", - json: js(`[{"a": 1}, {"a": 2}]`), - path: "$[*] ? (@.a > 2)", - exp: false, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtQuestion(ctx, a, r) - }) - } -} - -func TestPgExistsOperators(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L1111-L1115 - for _, tc := range []existsTestCase{ - { - test: "test_1", - json: js(`[{"a": 1}, {"a": 2}]`), - path: "$[*].a ? (@ > 1)", - exp: true, - }, - { - test: "test_2", - json: js(`[{"a": 1}, {"a": 2}, {"a": 3}, {"a": 5}]`), - path: "$[*] ? (@.a > $min && @.a < $max)", - opt: []Option{WithVars(jv(`{"min": 1, "max": 4}`))}, - exp: true, - }, - { - test: "test_3", - json: js(`[{"a": 1}, {"a": 2}, {"a": 3}, {"a": 5}]`), - path: "$[*] ? (@.a > $min && @.a < $max)", - opt: []Option{WithVars(jv(`{"min": 3, "max": 4}`))}, - exp: false, - }, - { - test: "test_4", - json: js(`[{"a": 1}]`), - path: "$undefined_var", - err: `exec: could not find jsonpath variable "undefined_var"`, - }, - { - test: "test_5", - json: js(`[{"a": 1}]`), - path: "false", - exp: true, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgMatchOperators(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L1117-L1133 - for _, tc := range []matchTestCase{ - { - test: "test_1", - json: js(`true`), - path: `$`, - opt: []Option{}, - exp: true, - }, - { - test: "test_2", - json: js(`false`), - path: `$`, - opt: []Option{}, - exp: false, - }, - { - test: "test_3", - json: js(`null`), - path: `$`, - opt: []Option{}, - exp: nil, - }, - { - test: "test_4", - json: js(`1`), - path: `$`, - opt: []Option{WithSilent()}, - exp: nil, - }, - { - test: "test_5", - json: js(`1`), - path: `$`, - opt: []Option{}, - err: `exec: single boolean result is expected`, - }, - { - test: "test_6", - json: js(`"a"`), - path: `$`, - opt: []Option{}, - err: `exec: single boolean result is expected`, - }, - { - test: "test_7", - json: js(`{}`), - path: `$`, - opt: []Option{}, - err: `exec: single boolean result is expected`, - }, - { - test: "test_8", - json: js(`[true]`), - path: `$`, - opt: []Option{}, - err: `exec: single boolean result is expected`, - }, - { - test: "test_9", - json: js(`{}`), - path: `lax $.a`, - opt: []Option{}, - err: `exec: single boolean result is expected`, - }, - { - test: "test_10", - json: js(`{}`), - path: `strict $.a`, - opt: []Option{}, - err: `exec: JSON object does not contain key "a"`, - }, - { - test: "test_11", - json: js(`{}`), - path: `strict $.a`, - opt: []Option{WithSilent()}, - exp: nil, - }, - { - test: "test_12", - json: js(`[true, true]`), - path: `$[*]`, - opt: []Option{}, - err: `exec: single boolean result is expected`, - }, - // Tests 13 & 14 in TestPgAtAtOperators below. - { - test: "test_15", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[*].a > 1`, - exp: true, - }, - { - test: "test_16", - json: js(`[{"a": 1}]`), - path: `$undefined_var`, - err: `exec: could not find jsonpath variable "undefined_var"`, - }, - { - test: "test_17", - json: js(`[{"a": 1}]`), - path: `false`, - exp: false, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.run(ctx, a, r) - }) - } -} - -func TestPgAtAtOperators(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L1129-L1130 - for _, tc := range []matchTestCase{ - { - test: "test_13", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[*].a > 1`, - exp: true, - }, - { - test: "test_14", - json: js(`[{"a": 1}, {"a": 2}]`), - path: `$[*].a > 2`, - exp: false, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - tc.runAtAt(ctx, a, r) - }) - } -} - -func TestPgFirstStringComparison(t *testing.T) { - t.Parallel() - ctx := context.Background() - - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonb_jsonpath.sql#L1135-L1149 - i := 0 - for _, tc := range []struct { - obj1 map[string]any - obj2 map[string]any - lt bool - le bool - eq bool - ge bool - gt bool - }{ - // Table copied from https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/expected/jsonb_jsonpath.out#L4367-L4510 - {jv(`{"s": ""}`), jv(`{"s": ""}`), false, true, true, true, false}, - {jv(`{"s": ""}`), jv(`{"s": "a"}`), true, true, false, false, false}, - {jv(`{"s": ""}`), jv(`{"s": "ab"}`), true, true, false, false, false}, - {jv(`{"s": ""}`), jv(`{"s": "abc"}`), true, true, false, false, false}, - {jv(`{"s": ""}`), jv(`{"s": "abcd"}`), true, true, false, false, false}, - {jv(`{"s": ""}`), jv(`{"s": "b"}`), true, true, false, false, false}, - {jv(`{"s": ""}`), jv(`{"s": "A"}`), true, true, false, false, false}, - {jv(`{"s": ""}`), jv(`{"s": "AB"}`), true, true, false, false, false}, - {jv(`{"s": ""}`), jv(`{"s": "ABC"}`), true, true, false, false, false}, - {jv(`{"s": ""}`), jv(`{"s": "ABc"}`), true, true, false, false, false}, - {jv(`{"s": ""}`), jv(`{"s": "ABcD"}`), true, true, false, false, false}, - {jv(`{"s": ""}`), jv(`{"s": "B"}`), true, true, false, false, false}, - {jv(`{"s": "a"}`), jv(`{"s": ""}`), false, false, false, true, true}, - {jv(`{"s": "a"}`), jv(`{"s": "a"}`), false, true, true, true, false}, - {jv(`{"s": "a"}`), jv(`{"s": "ab"}`), true, true, false, false, false}, - {jv(`{"s": "a"}`), jv(`{"s": "abc"}`), true, true, false, false, false}, - {jv(`{"s": "a"}`), jv(`{"s": "abcd"}`), true, true, false, false, false}, - {jv(`{"s": "a"}`), jv(`{"s": "b"}`), true, true, false, false, false}, - {jv(`{"s": "a"}`), jv(`{"s": "A"}`), false, false, false, true, true}, - {jv(`{"s": "a"}`), jv(`{"s": "AB"}`), false, false, false, true, true}, - {jv(`{"s": "a"}`), jv(`{"s": "ABC"}`), false, false, false, true, true}, - {jv(`{"s": "a"}`), jv(`{"s": "ABc"}`), false, false, false, true, true}, - {jv(`{"s": "a"}`), jv(`{"s": "ABcD"}`), false, false, false, true, true}, - {jv(`{"s": "a"}`), jv(`{"s": "B"}`), false, false, false, true, true}, - {jv(`{"s": "ab"}`), jv(`{"s": ""}`), false, false, false, true, true}, - {jv(`{"s": "ab"}`), jv(`{"s": "a"}`), false, false, false, true, true}, - {jv(`{"s": "ab"}`), jv(`{"s": "ab"}`), false, true, true, true, false}, - {jv(`{"s": "ab"}`), jv(`{"s": "abc"}`), true, true, false, false, false}, - {jv(`{"s": "ab"}`), jv(`{"s": "abcd"}`), true, true, false, false, false}, - {jv(`{"s": "ab"}`), jv(`{"s": "b"}`), true, true, false, false, false}, - {jv(`{"s": "ab"}`), jv(`{"s": "A"}`), false, false, false, true, true}, - {jv(`{"s": "ab"}`), jv(`{"s": "AB"}`), false, false, false, true, true}, - {jv(`{"s": "ab"}`), jv(`{"s": "ABC"}`), false, false, false, true, true}, - {jv(`{"s": "ab"}`), jv(`{"s": "ABc"}`), false, false, false, true, true}, - {jv(`{"s": "ab"}`), jv(`{"s": "ABcD"}`), false, false, false, true, true}, - {jv(`{"s": "ab"}`), jv(`{"s": "B"}`), false, false, false, true, true}, - {jv(`{"s": "abc"}`), jv(`{"s": ""}`), false, false, false, true, true}, - {jv(`{"s": "abc"}`), jv(`{"s": "a"}`), false, false, false, true, true}, - {jv(`{"s": "abc"}`), jv(`{"s": "ab"}`), false, false, false, true, true}, - {jv(`{"s": "abc"}`), jv(`{"s": "abc"}`), false, true, true, true, false}, - {jv(`{"s": "abc"}`), jv(`{"s": "abcd"}`), true, true, false, false, false}, - {jv(`{"s": "abc"}`), jv(`{"s": "b"}`), true, true, false, false, false}, - {jv(`{"s": "abc"}`), jv(`{"s": "A"}`), false, false, false, true, true}, - {jv(`{"s": "abc"}`), jv(`{"s": "AB"}`), false, false, false, true, true}, - {jv(`{"s": "abc"}`), jv(`{"s": "ABC"}`), false, false, false, true, true}, - {jv(`{"s": "abc"}`), jv(`{"s": "ABc"}`), false, false, false, true, true}, - {jv(`{"s": "abc"}`), jv(`{"s": "ABcD"}`), false, false, false, true, true}, - {jv(`{"s": "abc"}`), jv(`{"s": "B"}`), false, false, false, true, true}, - {jv(`{"s": "abcd"}`), jv(`{"s": ""}`), false, false, false, true, true}, - {jv(`{"s": "abcd"}`), jv(`{"s": "a"}`), false, false, false, true, true}, - {jv(`{"s": "abcd"}`), jv(`{"s": "ab"}`), false, false, false, true, true}, - {jv(`{"s": "abcd"}`), jv(`{"s": "abc"}`), false, false, false, true, true}, - {jv(`{"s": "abcd"}`), jv(`{"s": "abcd"}`), false, true, true, true, false}, - {jv(`{"s": "abcd"}`), jv(`{"s": "b"}`), true, true, false, false, false}, - {jv(`{"s": "abcd"}`), jv(`{"s": "A"}`), false, false, false, true, true}, - {jv(`{"s": "abcd"}`), jv(`{"s": "AB"}`), false, false, false, true, true}, - {jv(`{"s": "abcd"}`), jv(`{"s": "ABC"}`), false, false, false, true, true}, - {jv(`{"s": "abcd"}`), jv(`{"s": "ABc"}`), false, false, false, true, true}, - {jv(`{"s": "abcd"}`), jv(`{"s": "ABcD"}`), false, false, false, true, true}, - {jv(`{"s": "abcd"}`), jv(`{"s": "B"}`), false, false, false, true, true}, - {jv(`{"s": "b"}`), jv(`{"s": ""}`), false, false, false, true, true}, - {jv(`{"s": "b"}`), jv(`{"s": "a"}`), false, false, false, true, true}, - {jv(`{"s": "b"}`), jv(`{"s": "ab"}`), false, false, false, true, true}, - {jv(`{"s": "b"}`), jv(`{"s": "abc"}`), false, false, false, true, true}, - {jv(`{"s": "b"}`), jv(`{"s": "abcd"}`), false, false, false, true, true}, - {jv(`{"s": "b"}`), jv(`{"s": "b"}`), false, true, true, true, false}, - {jv(`{"s": "b"}`), jv(`{"s": "A"}`), false, false, false, true, true}, - {jv(`{"s": "b"}`), jv(`{"s": "AB"}`), false, false, false, true, true}, - {jv(`{"s": "b"}`), jv(`{"s": "ABC"}`), false, false, false, true, true}, - {jv(`{"s": "b"}`), jv(`{"s": "ABc"}`), false, false, false, true, true}, - {jv(`{"s": "b"}`), jv(`{"s": "ABcD"}`), false, false, false, true, true}, - {jv(`{"s": "b"}`), jv(`{"s": "B"}`), false, false, false, true, true}, - {jv(`{"s": "A"}`), jv(`{"s": ""}`), false, false, false, true, true}, - {jv(`{"s": "A"}`), jv(`{"s": "a"}`), true, true, false, false, false}, - {jv(`{"s": "A"}`), jv(`{"s": "ab"}`), true, true, false, false, false}, - {jv(`{"s": "A"}`), jv(`{"s": "abc"}`), true, true, false, false, false}, - {jv(`{"s": "A"}`), jv(`{"s": "abcd"}`), true, true, false, false, false}, - {jv(`{"s": "A"}`), jv(`{"s": "b"}`), true, true, false, false, false}, - {jv(`{"s": "A"}`), jv(`{"s": "A"}`), false, true, true, true, false}, - {jv(`{"s": "A"}`), jv(`{"s": "AB"}`), true, true, false, false, false}, - {jv(`{"s": "A"}`), jv(`{"s": "ABC"}`), true, true, false, false, false}, - {jv(`{"s": "A"}`), jv(`{"s": "ABc"}`), true, true, false, false, false}, - {jv(`{"s": "A"}`), jv(`{"s": "ABcD"}`), true, true, false, false, false}, - {jv(`{"s": "A"}`), jv(`{"s": "B"}`), true, true, false, false, false}, - {jv(`{"s": "AB"}`), jv(`{"s": ""}`), false, false, false, true, true}, - {jv(`{"s": "AB"}`), jv(`{"s": "a"}`), true, true, false, false, false}, - {jv(`{"s": "AB"}`), jv(`{"s": "ab"}`), true, true, false, false, false}, - {jv(`{"s": "AB"}`), jv(`{"s": "abc"}`), true, true, false, false, false}, - {jv(`{"s": "AB"}`), jv(`{"s": "abcd"}`), true, true, false, false, false}, - {jv(`{"s": "AB"}`), jv(`{"s": "b"}`), true, true, false, false, false}, - {jv(`{"s": "AB"}`), jv(`{"s": "A"}`), false, false, false, true, true}, - {jv(`{"s": "AB"}`), jv(`{"s": "AB"}`), false, true, true, true, false}, - {jv(`{"s": "AB"}`), jv(`{"s": "ABC"}`), true, true, false, false, false}, - {jv(`{"s": "AB"}`), jv(`{"s": "ABc"}`), true, true, false, false, false}, - {jv(`{"s": "AB"}`), jv(`{"s": "ABcD"}`), true, true, false, false, false}, - {jv(`{"s": "AB"}`), jv(`{"s": "B"}`), true, true, false, false, false}, - {jv(`{"s": "ABC"}`), jv(`{"s": ""}`), false, false, false, true, true}, - {jv(`{"s": "ABC"}`), jv(`{"s": "a"}`), true, true, false, false, false}, - {jv(`{"s": "ABC"}`), jv(`{"s": "ab"}`), true, true, false, false, false}, - {jv(`{"s": "ABC"}`), jv(`{"s": "abc"}`), true, true, false, false, false}, - {jv(`{"s": "ABC"}`), jv(`{"s": "abcd"}`), true, true, false, false, false}, - {jv(`{"s": "ABC"}`), jv(`{"s": "b"}`), true, true, false, false, false}, - {jv(`{"s": "ABC"}`), jv(`{"s": "A"}`), false, false, false, true, true}, - {jv(`{"s": "ABC"}`), jv(`{"s": "AB"}`), false, false, false, true, true}, - {jv(`{"s": "ABC"}`), jv(`{"s": "ABC"}`), false, true, true, true, false}, - {jv(`{"s": "ABC"}`), jv(`{"s": "ABc"}`), true, true, false, false, false}, - {jv(`{"s": "ABC"}`), jv(`{"s": "ABcD"}`), true, true, false, false, false}, - {jv(`{"s": "ABC"}`), jv(`{"s": "B"}`), true, true, false, false, false}, - {jv(`{"s": "ABc"}`), jv(`{"s": ""}`), false, false, false, true, true}, - {jv(`{"s": "ABc"}`), jv(`{"s": "a"}`), true, true, false, false, false}, - {jv(`{"s": "ABc"}`), jv(`{"s": "ab"}`), true, true, false, false, false}, - {jv(`{"s": "ABc"}`), jv(`{"s": "abc"}`), true, true, false, false, false}, - {jv(`{"s": "ABc"}`), jv(`{"s": "abcd"}`), true, true, false, false, false}, - {jv(`{"s": "ABc"}`), jv(`{"s": "b"}`), true, true, false, false, false}, - {jv(`{"s": "ABc"}`), jv(`{"s": "A"}`), false, false, false, true, true}, - {jv(`{"s": "ABc"}`), jv(`{"s": "AB"}`), false, false, false, true, true}, - {jv(`{"s": "ABc"}`), jv(`{"s": "ABC"}`), false, false, false, true, true}, - {jv(`{"s": "ABc"}`), jv(`{"s": "ABc"}`), false, true, true, true, false}, - {jv(`{"s": "ABc"}`), jv(`{"s": "ABcD"}`), true, true, false, false, false}, - {jv(`{"s": "ABc"}`), jv(`{"s": "B"}`), true, true, false, false, false}, - {jv(`{"s": "ABcD"}`), jv(`{"s": ""}`), false, false, false, true, true}, - {jv(`{"s": "ABcD"}`), jv(`{"s": "a"}`), true, true, false, false, false}, - {jv(`{"s": "ABcD"}`), jv(`{"s": "ab"}`), true, true, false, false, false}, - {jv(`{"s": "ABcD"}`), jv(`{"s": "abc"}`), true, true, false, false, false}, - {jv(`{"s": "ABcD"}`), jv(`{"s": "abcd"}`), true, true, false, false, false}, - {jv(`{"s": "ABcD"}`), jv(`{"s": "b"}`), true, true, false, false, false}, - {jv(`{"s": "ABcD"}`), jv(`{"s": "A"}`), false, false, false, true, true}, - {jv(`{"s": "ABcD"}`), jv(`{"s": "AB"}`), false, false, false, true, true}, - {jv(`{"s": "ABcD"}`), jv(`{"s": "ABC"}`), false, false, false, true, true}, - {jv(`{"s": "ABcD"}`), jv(`{"s": "ABc"}`), false, false, false, true, true}, - {jv(`{"s": "ABcD"}`), jv(`{"s": "ABcD"}`), false, true, true, true, false}, - {jv(`{"s": "ABcD"}`), jv(`{"s": "B"}`), true, true, false, false, false}, - {jv(`{"s": "B"}`), jv(`{"s": ""}`), false, false, false, true, true}, - {jv(`{"s": "B"}`), jv(`{"s": "a"}`), true, true, false, false, false}, - {jv(`{"s": "B"}`), jv(`{"s": "ab"}`), true, true, false, false, false}, - {jv(`{"s": "B"}`), jv(`{"s": "abc"}`), true, true, false, false, false}, - {jv(`{"s": "B"}`), jv(`{"s": "abcd"}`), true, true, false, false, false}, - {jv(`{"s": "B"}`), jv(`{"s": "b"}`), true, true, false, false, false}, - {jv(`{"s": "B"}`), jv(`{"s": "A"}`), false, false, false, true, true}, - {jv(`{"s": "B"}`), jv(`{"s": "AB"}`), false, false, false, true, true}, - {jv(`{"s": "B"}`), jv(`{"s": "ABC"}`), false, false, false, true, true}, - {jv(`{"s": "B"}`), jv(`{"s": "ABc"}`), false, false, false, true, true}, - {jv(`{"s": "B"}`), jv(`{"s": "ABcD"}`), false, false, false, true, true}, - {jv(`{"s": "B"}`), jv(`{"s": "B"}`), false, true, true, true, false}, - } { - for _, opCase := range []struct { - op string - exp bool - }{ - {"<", tc.lt}, - {"<=", tc.le}, - {"==", tc.eq}, - {">", tc.gt}, - {">=", tc.ge}, - } { - i++ - t.Run(fmt.Sprintf("test_%v", i), func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - firstTestCase{ - json: tc.obj1, - path: "$.s" + opCase.op + " $s", - opt: []Option{WithVars(Vars(tc.obj2))}, - exp: opCase.exp, - }.run(ctx, a, r) - }) - } - } -} diff --git a/path/exec/predicate.go b/path/exec/predicate.go deleted file mode 100644 index 82db1df..0000000 --- a/path/exec/predicate.go +++ /dev/null @@ -1,115 +0,0 @@ -package exec - -import ( - "context" - - "github.com/theory/sqljson/path/ast" -) - -// predOutcome represents the result of jsonpath predicate evaluation. -type predOutcome uint8 - -const ( - predFalse predOutcome = iota - predTrue - predUnknown -) - -// String prints a string representation of p. Used for debugging. -func (p predOutcome) String() string { - switch p { - case predFalse: - return "FALSE" - case predTrue: - return "TRUE" - case predUnknown: - return "UNKNOWN" - default: - return "UNKNOWN_PREDICATE_OUTCOME" - } -} - -// predFrom converts book to a predOutcome, returning predTrue if ok is true -// and predFalse if ok is false. -func predFrom(ok bool) predOutcome { - if ok { - return predTrue - } - return predFalse -} - -// predicateCallback defines the interface to carry out a specific type of -// predicate comparison. -type predicateCallback func(ctx context.Context, node ast.Node, left, right any) (predOutcome, error) - -// executePredicate executes a unary or binary predicate. -// -// Predicates have existence semantics, because their operands are item -// sequences. Pairs of items from the left and right operand's sequences are -// checked. Returns true only if any pair satisfying the condition is found. -// In strict mode, even if the desired pair has already been found, all pairs -// still need to be examined to check the absence of errors. Returns -// executePredicate (analogous to SQL NULL) if any error occurs. -func (exec *Executor) executePredicate( - ctx context.Context, - pred, left, right ast.Node, - value any, - unwrapRightArg bool, - callback predicateCallback, -) (predOutcome, error) { - hasErr := false - found := false - - // Left argument is always auto-unwrapped. - lSeq := newList() - res, err := exec.executeItemOptUnwrapResultSilent(ctx, left, value, true, lSeq) - if res == statusFailed { - return predUnknown, err - } - - rSeq := newList() - if right != nil { - // Right argument is conditionally auto-unwrapped. - res, err := exec.executeItemOptUnwrapResultSilent(ctx, right, value, unwrapRightArg, rSeq) - if res == statusFailed { - return predUnknown, err - } - } else { - // Right arg is nil. - rSeq.append(nil) - } - - for _, lVal := range lSeq.list { - // Loop over right arg sequence. - for _, rVal := range rSeq.list { - res, err := callback(ctx, pred, lVal, rVal) - if err != nil { - return predUnknown, err - } - switch res { - case predUnknown: - if exec.strictAbsenceOfErrors() { - return predUnknown, nil - } - hasErr = true - case predTrue: - if !exec.strictAbsenceOfErrors() { - return predTrue, nil - } - found = true - case predFalse: - // Do nothing - } - } - } - - if found { // possible only in strict mode - return predTrue, nil - } - - if hasErr { // possible only in lax mode - return predUnknown, nil - } - - return predFalse, nil -} diff --git a/path/exec/predicate_test.go b/path/exec/predicate_test.go deleted file mode 100644 index 0587b12..0000000 --- a/path/exec/predicate_test.go +++ /dev/null @@ -1,166 +0,0 @@ -package exec - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" -) - -func TestPredOutcome(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - out predOutcome - }{ - {"FALSE", predFalse}, - {"TRUE", predTrue}, - {"UNKNOWN", predUnknown}, - {"UNKNOWN_PREDICATE_OUTCOME", predOutcome(255)}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(tc.test, tc.out.String()) - }) - } - - t.Run("predFrom", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(predTrue, predFrom(true)) - a.Equal(predFalse, predFrom(false)) - }) -} - -func TestPredicateCallback(t *testing.T) { - t.Parallel() - a := assert.New(t) - - e := newTestExecutor(laxRootPath, nil, true, false) - a.IsType((predicateCallback)(nil), predicateCallback(e.compareItems)) - a.IsType((predicateCallback)(nil), predicateCallback(executeStartsWith)) - a.IsType((predicateCallback)(nil), predicateCallback(e.executeLikeRegex)) -} - -func TestExecutePredicate(t *testing.T) { - t.Parallel() - ctx := context.Background() - rx, _ := ast.NewRegex(ast.NewConst(ast.ConstRoot), ".", "") - - for _, tc := range []struct { - test string - path *ast.AST - pred ast.Node - left ast.Node - right ast.Node - value any - unwrap bool - callback func(e *Executor) predicateCallback - exp predOutcome - err string - isErr error - }{ - { - test: "left_unknown", - path: laxRootPath, - left: ast.NewMethod(ast.MethodBigInt), - value: "hi", - callback: func(_ *Executor) predicateCallback { return executeStartsWith }, - exp: predUnknown, - }, - { - test: "right_unknown", - path: laxRootPath, - left: ast.NewInteger("42"), - right: ast.NewMethod(ast.MethodBigInt), - value: "hi", - callback: func(_ *Executor) predicateCallback { return executeStartsWith }, - exp: predUnknown, - }, - { - test: "left_and_right_compare", - path: laxRootPath, - pred: ast.NewBinary(ast.BinaryEqual, nil, nil), - left: ast.NewInteger("42"), - right: ast.NewInteger("42"), - callback: func(e *Executor) predicateCallback { return e.compareItems }, - exp: predTrue, - }, - { - test: "left_and_right_no_compare", - path: laxRootPath, - pred: ast.NewBinary(ast.BinaryEqual, nil, nil), - left: ast.NewInteger("42"), - right: ast.NewInteger("43"), - callback: func(e *Executor) predicateCallback { return e.compareItems }, - exp: predFalse, - }, - { - test: "left_only_regex", - path: laxRootPath, - pred: rx, - left: ast.NewString("hi"), - callback: func(e *Executor) predicateCallback { return e.executeLikeRegex }, - exp: predTrue, - }, - { - test: "compare_error", - path: laxRootPath, - pred: rx, - left: ast.NewString("hi"), - callback: func(e *Executor) predicateCallback { return e.compareItems }, - exp: predUnknown, - err: `exec invalid: invalid node type *ast.RegexNode passed to compareItems`, - isErr: ErrInvalid, - }, - { - test: "unknown_strict", - path: strictRootPath, - pred: rx, - left: ast.NewInteger("42"), - callback: func(e *Executor) predicateCallback { return e.executeLikeRegex }, - exp: predUnknown, - }, - { - test: "unknown_lax", - path: laxRootPath, - pred: rx, - left: ast.NewInteger("42"), - callback: func(e *Executor) predicateCallback { return e.executeLikeRegex }, - exp: predUnknown, - }, - { - test: "found_strict", - path: strictRootPath, - pred: rx, - left: ast.NewString("hi"), - callback: func(e *Executor) predicateCallback { return e.executeLikeRegex }, - exp: predTrue, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - e := newTestExecutor(tc.path, nil, true, false) - cb := tc.callback(e) - res, err := e.executePredicate(ctx, tc.pred, tc.left, tc.right, tc.value, tc.unwrap, cb) - a.Equal(tc.exp, res) - - // Check the error and list. - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} diff --git a/path/exec/util.go b/path/exec/util.go deleted file mode 100644 index a4a1142..0000000 --- a/path/exec/util.go +++ /dev/null @@ -1,98 +0,0 @@ -package exec - -import ( - "encoding/json" - "fmt" - "math" - - "github.com/theory/sqljson/path/ast" -) - -// castJSONNumber casts num to a an int64 (preferably) or to a float64, -// passing the result through intCallback or floatCallback, respectively. -// Returns false if num cannot be parsed into an int64 or float64. -func castJSONNumber(num json.Number, intCallback intCallback, floatCallback floatCallback) (any, bool) { - if integer, err := num.Int64(); err == nil { - return intCallback(integer), true - } else if float, err := num.Float64(); err == nil { - return floatCallback(float), true - } - - return nil, false -} - -// getNodeInt32 extracts an int32 from node and returns it. Returns an error -// if node is not an *ast.IntegerNode or its value is out of int32 range. The -// meth and field params are used in error messages. -func getNodeInt32(node ast.Node, meth any, field string) (int, error) { - var num int64 - switch node := node.(type) { - case *ast.IntegerNode: - num = node.Int() - default: - return 0, fmt.Errorf( - "%w: invalid jsonpath item type for %v %v", - ErrExecution, meth, field, - ) - } - - if num > math.MaxInt32 || num < math.MinInt32 { - return 0, fmt.Errorf( - "%w: %v of jsonpath item method %v is out of integer range", - ErrVerbose, field, meth, - ) - } - - return int(num), nil -} - -// getJSONInt32 casts val to int32 and returns it. If val is a float, its -// value will be truncated, not rounded. The op param is used in error -// messages. -func getJSONInt32(val any, op string) (int, error) { - var num int64 - switch val := val.(type) { - case int64: - num = val - case float64: - if math.IsInf(val, 0) || math.IsNaN(val) { - return 0, fmt.Errorf( - "%w: NaN or Infinity is not allowed for jsonpath %v", - ErrVerbose, op, - ) - } - num = int64(val) - case json.Number: - if integer, err := val.Int64(); err == nil { - num = integer - } else if float, err := val.Float64(); err == nil { - if math.IsInf(float, 0) || math.IsNaN(float) { - return 0, fmt.Errorf( - "%w: NaN or Infinity is not allowed for jsonpath %v", - ErrVerbose, op, - ) - } - num = int64(float) - } else { - // json.Number should never be invalid. - return 0, fmt.Errorf( - "%w: jsonpath %v is not a single numeric value", - ErrInvalid, op, - ) - } - default: - return 0, fmt.Errorf( - "%w: jsonpath %v is not a single numeric value", - ErrVerbose, op, - ) - } - - if num > math.MaxInt32 || num < math.MinInt32 { - return 0, fmt.Errorf( - "%w: jsonpath %v is out of integer range", - ErrVerbose, op, - ) - } - - return int(num), nil -} diff --git a/path/exec/util_test.go b/path/exec/util_test.go deleted file mode 100644 index 0d0c65c..0000000 --- a/path/exec/util_test.go +++ /dev/null @@ -1,255 +0,0 @@ -package exec - -import ( - "encoding/json" - "math" - "strconv" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" -) - -func TestCastJSONNumber(t *testing.T) { - t.Parallel() - - doubleInt := func(i int64) int64 { return i * 2 } - doubleFloat := func(i float64) float64 { return i * 2 } - - for _, tc := range []struct { - test string - num json.Number - exp any - ok bool - }{ - { - test: "int", - num: json.Number("42"), - exp: doubleInt(42), - ok: true, - }, - { - test: "float", - num: json.Number("98.6"), - exp: doubleFloat(98.6), - ok: true, - }, - { - test: "nan", - num: json.Number("foo"), - ok: false, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - val, ok := castJSONNumber(tc.num, doubleInt, doubleFloat) - a.Equal(tc.exp, val) - a.Equal(ok, tc.ok) - }) - } -} - -func TestGetNodeInt32(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - node ast.Node - meth string - field string - exp int - err string - isErr error - }{ - { - test: "int", - node: ast.NewInteger("42"), - exp: 42, - }, - { - test: "numeric", - node: ast.NewNumeric("98.6"), - meth: ".hi()", - field: "xxx", - err: `exec: invalid jsonpath item type for .hi() xxx`, - isErr: ErrExecution, - }, - { - test: "string", - node: ast.NewString("foo"), - meth: ".hi()", - field: "xxx", - err: `exec: invalid jsonpath item type for .hi() xxx`, - isErr: ErrExecution, - }, - { - test: "too_big", - node: ast.NewInteger(strconv.FormatInt(int64(math.MaxInt32+1), 10)), - meth: ".go()", - field: "aaa", - err: `exec: aaa of jsonpath item method .go() is out of integer range`, - isErr: ErrExecution, - }, - { - test: "too_small", - node: ast.NewInteger(strconv.FormatInt(int64(math.MinInt32-1), 10)), - meth: ".go()", - field: "aaa", - err: `exec: aaa of jsonpath item method .go() is out of integer range`, - isErr: ErrExecution, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - val, err := getNodeInt32(tc.node, tc.meth, tc.field) - a.Equal(tc.exp, val) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} - -func TestGetJSONInt32(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - val any - op string - exp int - err string - isErr error - }{ - { - test: "int", - val: int64(42), - exp: 42, - }, - { - test: "float", - val: float64(42), - exp: 42, - }, - { - test: "float_trunc_2", - val: float64(42.2), - exp: 42, - }, - { - test: "float_trunc_5", - val: float64(42.5), - exp: 42, - }, - { - test: "float_trunc_9", - val: float64(42.9), - exp: 42, - }, - { - test: "json_num_int", - val: json.Number("99"), - exp: 99, - }, - { - test: "json_num_float", - val: json.Number("99.0"), - exp: 99, - }, - { - test: "json_num_float_trunc_2", - val: json.Number("99.2"), - exp: 99, - }, - { - test: "json_num_float_trunc_5", - val: json.Number("99.5"), - exp: 99, - }, - { - test: "json_num_float_trunc_9", - val: json.Number("99.999"), - exp: 99, - }, - { - test: "float_nan", - val: math.NaN(), - op: "myThing", - err: `exec: NaN or Infinity is not allowed for jsonpath myThing`, - isErr: ErrVerbose, - }, - { - test: "float_inf", - val: math.Inf(1), - op: "myThing", - err: `exec: NaN or Infinity is not allowed for jsonpath myThing`, - isErr: ErrVerbose, - }, - { - test: "json_invalid", - val: json.Number("oof"), - op: "oof", - err: `exec invalid: jsonpath oof is not a single numeric value`, - isErr: ErrInvalid, - }, - { - test: "json_nan", - val: json.Number("nan"), - op: "xyz", - err: `exec: NaN or Infinity is not allowed for jsonpath xyz`, - isErr: ErrVerbose, - }, - { - test: "json_inf", - val: json.Number("-inf"), - op: "xyz", - err: `exec: NaN or Infinity is not allowed for jsonpath xyz`, - isErr: ErrVerbose, - }, - { - test: "string", - val: "hi", - op: "xxx", - err: `exec: jsonpath xxx is not a single numeric value`, - isErr: ErrVerbose, - }, - { - test: "too_big", - val: int64(math.MaxInt32 + 1), - op: "max", - err: `exec: jsonpath max is out of integer range`, - isErr: ErrVerbose, - }, - { - test: "too_small", - val: int64(math.MinInt32 - 1), - op: "max", - err: `exec: jsonpath max is out of integer range`, - isErr: ErrVerbose, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - val, err := getJSONInt32(tc.val, tc.op) - a.Equal(tc.exp, val) - if tc.isErr == nil { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, tc.isErr) - } - }) - } -} diff --git a/path/parser/grammar.go b/path/parser/grammar.go deleted file mode 100644 index ec8020b..0000000 --- a/path/parser/grammar.go +++ /dev/null @@ -1,1333 +0,0 @@ -// Code generated by goyacc -v -o grammar.go -p path grammar.y. DO NOT EDIT. - -//line grammar.y:2 -/*------------------------------------------------------------------------- - * - * grammar.y - * Grammar definitions for jsonpath datatype - * - * Transforms tokenized jsonpath into tree of JsonPathParseItem structs. - * - * Copyright (c) 2019-2024, PostgreSQL Global Development Group - * - * IDENTIFICATION - * https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/utils/adt/jsonpath_gram.y;hb=HEAD - * - *------------------------------------------------------------------------- - */ - -package parser - -import __yyfmt__ "fmt" - -//line grammar.y:17 - -import ( - "strconv" - - "github.com/theory/sqljson/path/ast" -) - -//line grammar.y:26 -type pathSymType struct { - yys int - str string - elems []ast.Node - indexs []ast.Node - value ast.Node - optype ast.BinaryOperator - method *ast.MethodNode - boolean bool - integer int -} - -const TO_P = 57346 -const NULL_P = 57347 -const TRUE_P = 57348 -const FALSE_P = 57349 -const IS_P = 57350 -const UNKNOWN_P = 57351 -const EXISTS_P = 57352 -const IDENT_P = 57353 -const STRING_P = 57354 -const NUMERIC_P = 57355 -const INT_P = 57356 -const VARIABLE_P = 57357 -const OR_P = 57358 -const AND_P = 57359 -const NOT_P = 57360 -const LESS_P = 57361 -const LESSEQUAL_P = 57362 -const EQUAL_P = 57363 -const NOTEQUAL_P = 57364 -const GREATEREQUAL_P = 57365 -const GREATER_P = 57366 -const ANY_P = 57367 -const STRICT_P = 57368 -const LAX_P = 57369 -const LAST_P = 57370 -const STARTS_P = 57371 -const WITH_P = 57372 -const LIKE_REGEX_P = 57373 -const FLAG_P = 57374 -const ABS_P = 57375 -const SIZE_P = 57376 -const TYPE_P = 57377 -const FLOOR_P = 57378 -const DOUBLE_P = 57379 -const CEILING_P = 57380 -const KEYVALUE_P = 57381 -const DATETIME_P = 57382 -const BIGINT_P = 57383 -const BOOLEAN_P = 57384 -const DATE_P = 57385 -const DECIMAL_P = 57386 -const INTEGER_P = 57387 -const NUMBER_P = 57388 -const STRINGFUNC_P = 57389 -const TIME_P = 57390 -const TIME_TZ_P = 57391 -const TIMESTAMP_P = 57392 -const TIMESTAMP_TZ_P = 57393 -const UMINUS = 57394 - -var pathToknames = [...]string{ - "$end", - "error", - "$unk", - "TO_P", - "NULL_P", - "TRUE_P", - "FALSE_P", - "IS_P", - "UNKNOWN_P", - "EXISTS_P", - "IDENT_P", - "STRING_P", - "NUMERIC_P", - "INT_P", - "VARIABLE_P", - "OR_P", - "AND_P", - "NOT_P", - "LESS_P", - "LESSEQUAL_P", - "EQUAL_P", - "NOTEQUAL_P", - "GREATEREQUAL_P", - "GREATER_P", - "ANY_P", - "STRICT_P", - "LAX_P", - "LAST_P", - "STARTS_P", - "WITH_P", - "LIKE_REGEX_P", - "FLAG_P", - "ABS_P", - "SIZE_P", - "TYPE_P", - "FLOOR_P", - "DOUBLE_P", - "CEILING_P", - "KEYVALUE_P", - "DATETIME_P", - "BIGINT_P", - "BOOLEAN_P", - "DATE_P", - "DECIMAL_P", - "INTEGER_P", - "NUMBER_P", - "STRINGFUNC_P", - "TIME_P", - "TIME_TZ_P", - "TIMESTAMP_P", - "TIMESTAMP_TZ_P", - "'+'", - "'-'", - "'*'", - "'/'", - "'%'", - "UMINUS", - "'('", - "')'", - "'$'", - "'@'", - "','", - "'['", - "']'", - "'{'", - "'}'", - "'.'", - "'?'", -} - -var pathStatenames = [...]string{} - -const pathEofCode = 1 -const pathErrCode = 2 -const pathInitialStackSize = 16 - -//line grammar.y:331 - -var pathExca = [...]int16{ - -1, 1, - 1, -1, - -2, 0, - -1, 80, - 58, 123, - -2, 99, - -1, 81, - 58, 124, - -2, 100, - -1, 82, - 58, 125, - -2, 101, - -1, 83, - 58, 126, - -2, 102, - -1, 84, - 58, 127, - -2, 103, - -1, 85, - 58, 128, - -2, 104, - -1, 86, - 58, 129, - -2, 106, - -1, 87, - 58, 130, - -2, 112, - -1, 88, - 58, 131, - -2, 113, - -1, 89, - 58, 132, - -2, 116, - -1, 90, - 58, 133, - -2, 117, - -1, 91, - 58, 134, - -2, 118, -} - -const pathPrivate = 57344 - -const pathLast = 251 - -var pathAct = [...]uint8{ - 160, 146, 65, 111, 154, 6, 137, 180, 133, 7, - 134, 132, 49, 50, 52, 43, 47, 130, 168, 48, - 44, 46, 177, 175, 30, 31, 32, 33, 34, 174, - 56, 141, 173, 59, 60, 61, 62, 63, 42, 41, - 172, 171, 167, 37, 39, 35, 36, 40, 38, 150, - 112, 64, 66, 28, 49, 29, 143, 129, 117, 128, - 127, 115, 126, 125, 116, 94, 95, 96, 97, 98, - 99, 100, 92, 93, 42, 41, 30, 31, 32, 33, - 34, 163, 121, 114, 176, 124, 79, 101, 102, 103, - 104, 105, 106, 107, 80, 81, 82, 83, 84, 85, - 86, 73, 87, 88, 72, 71, 89, 90, 91, 74, - 75, 76, 77, 15, 123, 68, 147, 140, 131, 57, - 122, 138, 32, 33, 34, 136, 41, 108, 42, 41, - 42, 41, 157, 158, 159, 55, 112, 164, 165, 21, - 22, 23, 3, 4, 15, 161, 20, 24, 25, 26, - 119, 170, 13, 120, 148, 149, 21, 22, 23, 162, - 169, 54, 19, 20, 24, 25, 26, 21, 22, 23, - 178, 139, 156, 113, 20, 24, 25, 26, 179, 19, - 47, 42, 41, 153, 44, 46, 10, 11, 135, 166, - 19, 142, 9, 12, 17, 18, 37, 39, 35, 36, - 40, 38, 58, 10, 11, 109, 28, 53, 29, 51, - 78, 17, 18, 2, 10, 11, 70, 27, 110, 144, - 51, 145, 17, 18, 30, 31, 32, 33, 34, 30, - 31, 32, 33, 34, 8, 155, 30, 31, 32, 33, - 34, 151, 152, 5, 118, 67, 69, 45, 14, 16, - 1, -} - -var pathPact = [...]int16{ - 116, -1000, 134, -1000, -1000, -1000, 177, 165, -47, 134, - 162, 162, -1000, 103, -1000, 77, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, 162, 89, 190, - 162, 162, 162, 162, 162, -1000, -1000, -1000, -1000, -1000, - -1000, 134, 134, -1000, 61, -1000, 69, 151, 114, 24, - -1000, 134, -1000, -1000, 134, 162, 172, 138, 50, 68, - 68, -1000, -1000, -1000, -1000, 177, 109, -1000, -1000, -1000, - 62, 56, 27, 5, 4, 2, 1, -1, -1000, -48, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 134, -53, - -54, -1000, 184, 117, -47, 112, 58, -28, -1000, -1000, - -1000, 179, -3, 102, -10, 171, 158, 158, 158, 158, - 131, 22, -1000, 162, -1000, 162, 180, -1000, -1000, -47, - -1000, -1000, -1000, -1000, -17, -44, -1000, -1000, 146, 137, - -1000, -18, -1000, -1000, -19, -1000, -1000, -27, -30, -36, - 18, -1000, -1000, -1000, -1000, 172, -1000, -1000, 102, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, 131, -1000, -59, - -1000, -} - -var pathPgo = [...]uint8{ - 0, 250, 249, 248, 2, 247, 246, 6, 245, 9, - 193, 3, 244, 243, 242, 241, 1, 235, 4, 234, - 221, 219, 218, 217, 216, 213, 210, 0, -} - -var pathR1 = [...]int8{ - 0, 1, 13, 13, 25, 25, 25, 2, 2, 2, - 2, 2, 2, 2, 23, 23, 23, 23, 23, 23, - 10, 10, 9, 9, 9, 9, 9, 9, 9, 9, - 9, 12, 12, 3, 3, 3, 3, 19, 19, 19, - 19, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 11, 11, 22, 22, 5, 5, 27, 27, 6, 6, - 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, - 7, 7, 7, 7, 16, 16, 16, 20, 20, 21, - 21, 17, 18, 18, 14, 15, 15, 8, 26, 26, - 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, - 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, - 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, - 26, 26, 26, 24, 24, 24, 24, 24, 24, 24, - 24, 24, 24, 24, 24, -} - -var pathR2 = [...]int8{ - 0, 2, 1, 1, 1, 1, 0, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 3, 4, 1, 3, 3, 3, 2, 5, 4, 3, - 5, 1, 1, 1, 1, 1, 1, 1, 4, 4, - 2, 1, 3, 2, 2, 3, 3, 3, 3, 3, - 1, 3, 1, 3, 3, 3, 1, 1, 1, 4, - 6, 2, 2, 1, 2, 4, 4, 5, 4, 5, - 5, 5, 5, 5, 1, 2, 2, 1, 3, 1, - 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, -} - -var pathChk = [...]int16{ - -1000, -1, -25, 26, 27, -13, -4, -9, -19, 58, - 52, 53, -10, 18, -3, 10, -2, 60, 61, 28, - 12, 5, 6, 7, 13, 14, 15, -23, 29, 31, - 52, 53, 54, 55, 56, 21, 22, 19, 24, 20, - 23, 17, 16, -7, 67, -5, 68, 63, -9, -4, - -4, 58, -4, -10, 58, 58, -4, 30, 12, -4, - -4, -4, -4, -4, -9, -4, -9, -8, 54, -6, - -24, 44, 43, 40, 48, 49, 50, 51, -26, 25, - 33, 34, 35, 36, 37, 38, 39, 41, 42, 45, - 46, 47, 11, 12, 4, 5, 6, 7, 8, 9, - 10, 26, 27, 28, 29, 30, 31, 32, 58, 54, - -22, -11, -4, 59, 59, -9, -9, -4, -12, 12, - 15, 32, 58, 58, 58, 58, 58, 58, 58, 58, - 65, -9, 64, 62, 64, 4, 8, -7, -7, 59, - 59, 59, 12, 59, -21, -20, -16, 14, 52, 53, - 59, -15, -14, 12, -18, -17, 14, -18, -18, -18, - -27, 14, 28, 59, -11, -4, 9, 59, 62, 14, - 14, 59, 59, 59, 59, 59, 66, 4, -16, -27, - 66, -} - -var pathDef = [...]int8{ - 6, -2, 0, 4, 5, 1, 2, 3, 41, 0, - 0, 0, 22, 0, 37, 0, 33, 34, 35, 36, - 7, 8, 9, 10, 11, 12, 13, 0, 0, 0, - 0, 0, 0, 0, 0, 14, 15, 16, 17, 18, - 19, 0, 0, 40, 0, 63, 0, 0, 0, 0, - 43, 0, 44, 26, 0, 0, 23, 0, 29, 45, - 46, 47, 48, 49, 24, 0, 25, 61, 62, 64, - 0, 115, 114, 105, 119, 120, 121, 122, 87, 58, - -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -2, -2, 88, 89, 90, 91, 92, 93, 94, 95, - 96, 97, 98, 107, 108, 109, 110, 111, 0, 0, - 0, 52, 50, 20, 42, 0, 0, 0, 28, 31, - 32, 0, 0, 80, 0, 86, 83, 83, 83, 83, - 0, 0, 54, 0, 55, 0, 0, 39, 38, 0, - 20, 21, 30, 65, 0, 79, 77, 74, 0, 0, - 68, 0, 85, 84, 0, 82, 81, 0, 0, 0, - 0, 56, 57, 66, 53, 51, 27, 67, 0, 75, - 76, 69, 70, 71, 72, 73, 59, 0, 78, 0, - 60, -} - -var pathTok1 = [...]int8{ - 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 60, 56, 3, 3, - 58, 59, 54, 52, 62, 53, 67, 55, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 68, 61, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 63, 3, 64, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 65, 3, 66, -} - -var pathTok2 = [...]int8{ - 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, - 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, - 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, - 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 57, -} - -var pathTok3 = [...]int8{ - 0, -} - -var pathErrorMessages = [...]struct { - state int - token int - msg string -}{} - - -/* parser for yacc output */ - -var ( - pathDebug = 0 - pathErrorVerbose = false -) - -type pathLexer interface { - Lex(lval *pathSymType) int - Error(s string) -} - -type pathParser interface { - Parse(pathLexer) int - Lookahead() int -} - -type pathParserImpl struct { - lval pathSymType - stack [pathInitialStackSize]pathSymType - char int -} - -func (p *pathParserImpl) Lookahead() int { - return p.char -} - -func pathNewParser() pathParser { - return &pathParserImpl{} -} - -const pathFlag = -1000 - -func pathTokname(c int) string { - if c >= 1 && c-1 < len(pathToknames) { - if pathToknames[c-1] != "" { - return pathToknames[c-1] - } - } - return __yyfmt__.Sprintf("tok-%v", c) -} - -func pathStatname(s int) string { - if s >= 0 && s < len(pathStatenames) { - if pathStatenames[s] != "" { - return pathStatenames[s] - } - } - return __yyfmt__.Sprintf("state-%v", s) -} - -func pathErrorMessage(state, lookAhead int) string { - const TOKSTART = 4 - - if !pathErrorVerbose { - return "syntax error" - } - - for _, e := range pathErrorMessages { - if e.state == state && e.token == lookAhead { - return "syntax error: " + e.msg - } - } - - res := "syntax error: unexpected " + pathTokname(lookAhead) - - // To match Bison, suggest at most four expected tokens. - expected := make([]int, 0, 4) - - // Look for shiftable tokens. - base := int(pathPact[state]) - for tok := TOKSTART; tok-1 < len(pathToknames); tok++ { - if n := base + tok; n >= 0 && n < pathLast && int(pathChk[int(pathAct[n])]) == tok { - if len(expected) == cap(expected) { - return res - } - expected = append(expected, tok) - } - } - - if pathDef[state] == -2 { - i := 0 - for pathExca[i] != -1 || int(pathExca[i+1]) != state { - i += 2 - } - - // Look for tokens that we accept or reduce. - for i += 2; pathExca[i] >= 0; i += 2 { - tok := int(pathExca[i]) - if tok < TOKSTART || pathExca[i+1] == 0 { - continue - } - if len(expected) == cap(expected) { - return res - } - expected = append(expected, tok) - } - - // If the default action is to accept or reduce, give up. - if pathExca[i+1] != 0 { - return res - } - } - - for i, tok := range expected { - if i == 0 { - res += ", expecting " - } else { - res += " or " - } - res += pathTokname(tok) - } - return res -} - -func pathlex1(lex pathLexer, lval *pathSymType) (char, token int) { - token = 0 - char = lex.Lex(lval) - if char <= 0 { - token = int(pathTok1[0]) - goto out - } - if char < len(pathTok1) { - token = int(pathTok1[char]) - goto out - } - if char >= pathPrivate { - if char < pathPrivate+len(pathTok2) { - token = int(pathTok2[char-pathPrivate]) - goto out - } - } - for i := 0; i < len(pathTok3); i += 2 { - token = int(pathTok3[i+0]) - if token == char { - token = int(pathTok3[i+1]) - goto out - } - } - -out: - if token == 0 { - token = int(pathTok2[1]) /* unknown char */ - } - if pathDebug >= 3 { - __yyfmt__.Printf("lex %s(%d)\n", pathTokname(token), uint(char)) - } - return char, token -} - -func pathParse(pathlex pathLexer) int { - return pathNewParser().Parse(pathlex) -} - -func (pathrcvr *pathParserImpl) Parse(pathlex pathLexer) int { - var pathn int - var pathVAL pathSymType - var pathDollar []pathSymType - _ = pathDollar // silence set and not used - pathS := pathrcvr.stack[:] - - Nerrs := 0 /* number of errors */ - Errflag := 0 /* error recovery flag */ - pathstate := 0 - pathrcvr.char = -1 - pathtoken := -1 // pathrcvr.char translated into internal numbering - defer func() { - // Make sure we report no lookahead when not parsing. - pathstate = -1 - pathrcvr.char = -1 - pathtoken = -1 - }() - pathp := -1 - goto pathstack - -ret0: - return 0 - -ret1: - return 1 - -pathstack: - /* put a state and value onto the stack */ - if pathDebug >= 4 { - __yyfmt__.Printf("char %v in %v\n", pathTokname(pathtoken), pathStatname(pathstate)) - } - - pathp++ - if pathp >= len(pathS) { - nyys := make([]pathSymType, len(pathS)*2) - copy(nyys, pathS) - pathS = nyys - } - pathS[pathp] = pathVAL - pathS[pathp].yys = pathstate - -pathnewstate: - pathn = int(pathPact[pathstate]) - if pathn <= pathFlag { - goto pathdefault /* simple state */ - } - if pathrcvr.char < 0 { - pathrcvr.char, pathtoken = pathlex1(pathlex, &pathrcvr.lval) - } - pathn += pathtoken - if pathn < 0 || pathn >= pathLast { - goto pathdefault - } - pathn = int(pathAct[pathn]) - if int(pathChk[pathn]) == pathtoken { /* valid shift */ - pathrcvr.char = -1 - pathtoken = -1 - pathVAL = pathrcvr.lval - pathstate = pathn - if Errflag > 0 { - Errflag-- - } - goto pathstack - } - -pathdefault: - /* default state action */ - pathn = int(pathDef[pathstate]) - if pathn == -2 { - if pathrcvr.char < 0 { - pathrcvr.char, pathtoken = pathlex1(pathlex, &pathrcvr.lval) - } - - /* look through exception table */ - xi := 0 - for { - if pathExca[xi+0] == -1 && int(pathExca[xi+1]) == pathstate { - break - } - xi += 2 - } - for xi += 2; ; xi += 2 { - pathn = int(pathExca[xi+0]) - if pathn < 0 || pathn == pathtoken { - break - } - } - pathn = int(pathExca[xi+1]) - if pathn < 0 { - goto ret0 - } - } - if pathn == 0 { - /* error ... attempt to resume parsing */ - switch Errflag { - case 0: /* brand new error */ - pathlex.Error(pathErrorMessage(pathstate, pathtoken)) - Nerrs++ - if pathDebug >= 1 { - __yyfmt__.Printf("%s", pathStatname(pathstate)) - __yyfmt__.Printf(" saw %s\n", pathTokname(pathtoken)) - } - fallthrough - - case 1, 2: /* incompletely recovered error ... try again */ - Errflag = 3 - - /* find a state where "error" is a legal shift action */ - for pathp >= 0 { - pathn = int(pathPact[pathS[pathp].yys]) + pathErrCode - if pathn >= 0 && pathn < pathLast { - pathstate = int(pathAct[pathn]) /* simulate a shift of "error" */ - if int(pathChk[pathstate]) == pathErrCode { - goto pathstack - } - } - - /* the current p has no shift on "error", pop stack */ - if pathDebug >= 2 { - __yyfmt__.Printf("error recovery pops state %d\n", pathS[pathp].yys) - } - pathp-- - } - /* there is no state on the stack with an error shift ... abort */ - goto ret1 - - case 3: /* no shift yet; clobber input char */ - if pathDebug >= 2 { - __yyfmt__.Printf("error recovery discards %s\n", pathTokname(pathtoken)) - } - if pathtoken == pathEofCode { - goto ret1 - } - pathrcvr.char = -1 - pathtoken = -1 - goto pathnewstate /* try again in the same state */ - } - } - - /* reduction by production pathn */ - if pathDebug >= 2 { - __yyfmt__.Printf("reduce %v in:\n\t%v\n", pathn, pathStatname(pathstate)) - } - - pathnt := pathn - pathpt := pathp - _ = pathpt // guard against "declared and not used" - - pathp -= int(pathR2[pathn]) - // pathp is now the index of $0. Perform the default action. Iff the - // reduced production is Ξ΅, $1 is possibly out of range. - if pathp+1 >= len(pathS) { - nyys := make([]pathSymType, len(pathS)*2) - copy(nyys, pathS) - pathS = nyys - } - pathVAL = pathS[pathp+1] - - /* consult goto table to find next state */ - pathn = int(pathR1[pathn]) - pathg := int(pathPgo[pathn]) - pathj := pathg + pathS[pathp].yys + 1 - - if pathj >= pathLast { - pathstate = int(pathAct[pathg]) - } else { - pathstate = int(pathAct[pathj]) - if int(pathChk[pathstate]) != -pathn { - pathstate = int(pathAct[pathg]) - } - } - // dummy call; replaced with literal code - switch pathnt { - - case 1: - pathDollar = pathS[pathpt-2 : pathpt+1] -//line grammar.y:81 - { - pathlex.(*lexer).setResult(pathDollar[1].boolean, pathDollar[2].value) - } - case 2: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:85 - { - pathVAL.value = pathDollar[1].value - } - case 3: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:86 - { - pathVAL.value = pathDollar[1].value - pathlex.(*lexer).setPred() - } - case 4: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:90 - { - pathVAL.boolean = false - } - case 5: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:91 - { - pathVAL.boolean = true - } - case 6: - pathDollar = pathS[pathpt-0 : pathpt+1] -//line grammar.y:92 - { - pathVAL.boolean = true - } - case 7: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:96 - { - pathVAL.value = ast.NewString(pathDollar[1].str) - } - case 8: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:97 - { - pathVAL.value = ast.NewConst(ast.ConstNull) - } - case 9: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:98 - { - pathVAL.value = ast.NewConst(ast.ConstTrue) - } - case 10: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:99 - { - pathVAL.value = ast.NewConst(ast.ConstFalse) - } - case 11: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:100 - { - pathVAL.value = ast.NewNumeric(pathDollar[1].str) - } - case 12: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:101 - { - pathVAL.value = ast.NewInteger(pathDollar[1].str) - } - case 13: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:102 - { - pathVAL.value = ast.NewVariable(pathDollar[1].str) - } - case 14: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:106 - { - pathVAL.optype = ast.BinaryEqual - } - case 15: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:107 - { - pathVAL.optype = ast.BinaryNotEqual - } - case 16: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:108 - { - pathVAL.optype = ast.BinaryLess - } - case 17: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:109 - { - pathVAL.optype = ast.BinaryGreater - } - case 18: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:110 - { - pathVAL.optype = ast.BinaryLessOrEqual - } - case 19: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:111 - { - pathVAL.optype = ast.BinaryGreaterOrEqual - } - case 20: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:115 - { - pathVAL.value = pathDollar[2].value - } - case 21: - pathDollar = pathS[pathpt-4 : pathpt+1] -//line grammar.y:116 - { - pathVAL.value = ast.NewUnary(ast.UnaryExists, pathDollar[3].value) - } - case 22: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:120 - { - pathVAL.value = pathDollar[1].value - } - case 23: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:121 - { - pathVAL.value = ast.NewBinary(pathDollar[2].optype, pathDollar[1].value, pathDollar[3].value) - } - case 24: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:122 - { - pathVAL.value = ast.NewBinary(ast.BinaryAnd, pathDollar[1].value, pathDollar[3].value) - } - case 25: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:123 - { - pathVAL.value = ast.NewBinary(ast.BinaryOr, pathDollar[1].value, pathDollar[3].value) - } - case 26: - pathDollar = pathS[pathpt-2 : pathpt+1] -//line grammar.y:124 - { - pathVAL.value = ast.NewUnary(ast.UnaryNot, pathDollar[2].value) - } - case 27: - pathDollar = pathS[pathpt-5 : pathpt+1] -//line grammar.y:126 - { - pathVAL.value = ast.NewUnary(ast.UnaryIsUnknown, pathDollar[2].value) - } - case 28: - pathDollar = pathS[pathpt-4 : pathpt+1] -//line grammar.y:128 - { - pathVAL.value = ast.NewBinary(ast.BinaryStartsWith, pathDollar[1].value, pathDollar[4].value) - } - case 29: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:130 - { - var err error - pathVAL.value, err = ast.NewRegex(pathDollar[1].value, pathDollar[3].str, "") - if err != nil { - pathlex.Error(err.Error()) - } - } - case 30: - pathDollar = pathS[pathpt-5 : pathpt+1] -//line grammar.y:138 - { - var err error - pathVAL.value, err = ast.NewRegex(pathDollar[1].value, pathDollar[3].str, pathDollar[5].str) - if err != nil { - pathlex.Error(err.Error()) - } - } - case 31: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:148 - { - pathVAL.value = ast.NewString(pathDollar[1].str) - } - case 32: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:149 - { - pathVAL.value = ast.NewVariable(pathDollar[1].str) - } - case 33: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:153 - { - pathVAL.value = pathDollar[1].value - } - case 34: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:154 - { - pathVAL.value = ast.NewConst(ast.ConstRoot) - } - case 35: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:155 - { - pathVAL.value = ast.NewConst(ast.ConstCurrent) - } - case 36: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:156 - { - pathVAL.value = ast.NewConst(ast.ConstLast) - } - case 37: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:160 - { - pathVAL.elems = []ast.Node{pathDollar[1].value} - } - case 38: - pathDollar = pathS[pathpt-4 : pathpt+1] -//line grammar.y:161 - { - pathVAL.elems = []ast.Node{pathDollar[2].value, pathDollar[4].value} - } - case 39: - pathDollar = pathS[pathpt-4 : pathpt+1] -//line grammar.y:162 - { - pathVAL.elems = []ast.Node{pathDollar[2].value, pathDollar[4].value} - } - case 40: - pathDollar = pathS[pathpt-2 : pathpt+1] -//line grammar.y:163 - { - pathVAL.elems = append(pathVAL.elems, pathDollar[2].value) - } - case 41: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:167 - { - pathVAL.value = ast.LinkNodes(pathDollar[1].elems) - } - case 42: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:168 - { - pathVAL.value = pathDollar[2].value - } - case 43: - pathDollar = pathS[pathpt-2 : pathpt+1] -//line grammar.y:169 - { - pathVAL.value = ast.NewUnaryOrNumber(ast.UnaryPlus, pathDollar[2].value) - } - case 44: - pathDollar = pathS[pathpt-2 : pathpt+1] -//line grammar.y:170 - { - pathVAL.value = ast.NewUnaryOrNumber(ast.UnaryMinus, pathDollar[2].value) - } - case 45: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:171 - { - pathVAL.value = ast.NewBinary(ast.BinaryAdd, pathDollar[1].value, pathDollar[3].value) - } - case 46: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:172 - { - pathVAL.value = ast.NewBinary(ast.BinarySub, pathDollar[1].value, pathDollar[3].value) - } - case 47: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:173 - { - pathVAL.value = ast.NewBinary(ast.BinaryMul, pathDollar[1].value, pathDollar[3].value) - } - case 48: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:174 - { - pathVAL.value = ast.NewBinary(ast.BinaryDiv, pathDollar[1].value, pathDollar[3].value) - } - case 49: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:175 - { - pathVAL.value = ast.NewBinary(ast.BinaryMod, pathDollar[1].value, pathDollar[3].value) - } - case 50: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:179 - { - pathVAL.value = ast.NewBinary(ast.BinarySubscript, pathDollar[1].value, nil) - } - case 51: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:180 - { - pathVAL.value = ast.NewBinary(ast.BinarySubscript, pathDollar[1].value, pathDollar[3].value) - } - case 52: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:184 - { - pathVAL.indexs = []ast.Node{pathDollar[1].value} - } - case 53: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:185 - { - pathVAL.indexs = append(pathVAL.indexs, pathDollar[3].value) - } - case 54: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:189 - { - pathVAL.value = ast.NewConst(ast.ConstAnyArray) - } - case 55: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:190 - { - pathVAL.value = ast.NewArrayIndex(pathDollar[2].indexs) - } - case 56: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:194 - { - pathVAL.integer, _ = strconv.Atoi(pathDollar[1].str) - } - case 57: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:195 - { - pathVAL.integer = -1 - } - case 58: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:199 - { - pathVAL.value = ast.NewAny(0, -1) - } - case 59: - pathDollar = pathS[pathpt-4 : pathpt+1] -//line grammar.y:200 - { - pathVAL.value = ast.NewAny(pathDollar[3].integer, pathDollar[3].integer) - } - case 60: - pathDollar = pathS[pathpt-6 : pathpt+1] -//line grammar.y:202 - { - pathVAL.value = ast.NewAny(pathDollar[3].integer, pathDollar[5].integer) - } - case 61: - pathDollar = pathS[pathpt-2 : pathpt+1] -//line grammar.y:206 - { - pathVAL.value = pathDollar[2].value - } - case 62: - pathDollar = pathS[pathpt-2 : pathpt+1] -//line grammar.y:207 - { - pathVAL.value = ast.NewConst(ast.ConstAnyKey) - } - case 63: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:208 - { - pathVAL.value = pathDollar[1].value - } - case 64: - pathDollar = pathS[pathpt-2 : pathpt+1] -//line grammar.y:209 - { - pathVAL.value = pathDollar[2].value - } - case 65: - pathDollar = pathS[pathpt-4 : pathpt+1] -//line grammar.y:210 - { - pathVAL.value = pathDollar[2].method - } - case 66: - pathDollar = pathS[pathpt-4 : pathpt+1] -//line grammar.y:211 - { - pathVAL.value = ast.NewUnary(ast.UnaryFilter, pathDollar[3].value) - } - case 67: - pathDollar = pathS[pathpt-5 : pathpt+1] -//line grammar.y:213 - { - switch len(pathDollar[4].elems) { - case 0: - pathVAL.value = ast.NewBinary(ast.BinaryDecimal, nil, nil) - case 1: - pathVAL.value = ast.NewBinary(ast.BinaryDecimal, pathDollar[4].elems[0], nil) - case 2: - pathVAL.value = ast.NewBinary(ast.BinaryDecimal, pathDollar[4].elems[0], pathDollar[4].elems[1]) - default: - pathlex.Error("invalid input syntax: .decimal() can only have an optional precision[,scale]") - } - } - case 68: - pathDollar = pathS[pathpt-4 : pathpt+1] -//line grammar.y:225 - { - pathVAL.value = ast.NewUnary(ast.UnaryDate, nil) - } - case 69: - pathDollar = pathS[pathpt-5 : pathpt+1] -//line grammar.y:227 - { - pathVAL.value = ast.NewUnary(ast.UnaryDateTime, pathDollar[4].value) - } - case 70: - pathDollar = pathS[pathpt-5 : pathpt+1] -//line grammar.y:229 - { - pathVAL.value = ast.NewUnary(ast.UnaryTime, pathDollar[4].value) - } - case 71: - pathDollar = pathS[pathpt-5 : pathpt+1] -//line grammar.y:231 - { - pathVAL.value = ast.NewUnary(ast.UnaryTimeTZ, pathDollar[4].value) - } - case 72: - pathDollar = pathS[pathpt-5 : pathpt+1] -//line grammar.y:233 - { - pathVAL.value = ast.NewUnary(ast.UnaryTimestamp, pathDollar[4].value) - } - case 73: - pathDollar = pathS[pathpt-5 : pathpt+1] -//line grammar.y:235 - { - pathVAL.value = ast.NewUnary(ast.UnaryTimestampTZ, pathDollar[4].value) - } - case 74: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:240 - { - pathVAL.value = ast.NewInteger(pathDollar[1].str) - } - case 75: - pathDollar = pathS[pathpt-2 : pathpt+1] -//line grammar.y:242 - { - pathVAL.value = ast.NewUnaryOrNumber(ast.UnaryPlus, ast.NewInteger(pathDollar[2].str)) - } - case 76: - pathDollar = pathS[pathpt-2 : pathpt+1] -//line grammar.y:244 - { - pathVAL.value = ast.NewUnaryOrNumber(ast.UnaryMinus, ast.NewInteger(pathDollar[2].str)) - } - case 77: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:248 - { - pathVAL.elems = []ast.Node{pathDollar[1].value} - } - case 78: - pathDollar = pathS[pathpt-3 : pathpt+1] -//line grammar.y:249 - { - pathVAL.elems = append(pathVAL.elems, pathDollar[3].value) - } - case 79: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:253 - { - pathVAL.elems = pathDollar[1].elems - } - case 80: - pathDollar = pathS[pathpt-0 : pathpt+1] -//line grammar.y:254 - { - pathVAL.elems = nil - } - case 81: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:258 - { - pathVAL.value = ast.NewInteger(pathDollar[1].str) - } - case 82: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:262 - { - pathVAL.value = pathDollar[1].value - } - case 83: - pathDollar = pathS[pathpt-0 : pathpt+1] -//line grammar.y:263 - { - pathVAL.value = nil - } - case 84: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:267 - { - pathVAL.value = ast.NewString(pathDollar[1].str) - } - case 85: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:271 - { - pathVAL.value = pathDollar[1].value - } - case 86: - pathDollar = pathS[pathpt-0 : pathpt+1] -//line grammar.y:272 - { - pathVAL.value = nil - } - case 87: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:276 - { - pathVAL.value = ast.NewKey(pathDollar[1].str) - } - case 123: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:318 - { - pathVAL.method = ast.NewMethod(ast.MethodAbs) - } - case 124: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:319 - { - pathVAL.method = ast.NewMethod(ast.MethodSize) - } - case 125: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:320 - { - pathVAL.method = ast.NewMethod(ast.MethodType) - } - case 126: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:321 - { - pathVAL.method = ast.NewMethod(ast.MethodFloor) - } - case 127: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:322 - { - pathVAL.method = ast.NewMethod(ast.MethodDouble) - } - case 128: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:323 - { - pathVAL.method = ast.NewMethod(ast.MethodCeiling) - } - case 129: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:324 - { - pathVAL.method = ast.NewMethod(ast.MethodKeyValue) - } - case 130: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:325 - { - pathVAL.method = ast.NewMethod(ast.MethodBigInt) - } - case 131: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:326 - { - pathVAL.method = ast.NewMethod(ast.MethodBoolean) - } - case 132: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:327 - { - pathVAL.method = ast.NewMethod(ast.MethodInteger) - } - case 133: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:328 - { - pathVAL.method = ast.NewMethod(ast.MethodNumber) - } - case 134: - pathDollar = pathS[pathpt-1 : pathpt+1] -//line grammar.y:329 - { - pathVAL.method = ast.NewMethod(ast.MethodString) - } - } - goto pathstack /* stack new state and value */ -} diff --git a/path/parser/grammar.y b/path/parser/grammar.y deleted file mode 100644 index 1bc8527..0000000 --- a/path/parser/grammar.y +++ /dev/null @@ -1,331 +0,0 @@ -%{ -/*------------------------------------------------------------------------- - * - * grammar.y - * Grammar definitions for jsonpath datatype - * - * Transforms tokenized jsonpath into tree of JsonPathParseItem structs. - * - * Copyright (c) 2019-2024, PostgreSQL Global Development Group - * - * IDENTIFICATION - * https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/utils/adt/jsonpath_gram.y;hb=HEAD - * - *------------------------------------------------------------------------- - */ - -package parser - -import ( - "strconv" - - "github.com/theory/sqljson/path/ast" -) -%} - -%union{ - str string - elems []ast.Node - indexs []ast.Node - value ast.Node - optype ast.BinaryOperator - method *ast.MethodNode - boolean bool - integer int -} - -%token TO_P NULL_P TRUE_P FALSE_P IS_P UNKNOWN_P EXISTS_P -%token IDENT_P STRING_P NUMERIC_P INT_P VARIABLE_P -%token OR_P AND_P NOT_P -%token LESS_P LESSEQUAL_P EQUAL_P NOTEQUAL_P GREATEREQUAL_P GREATER_P -%token ANY_P STRICT_P LAX_P LAST_P STARTS_P WITH_P LIKE_REGEX_P FLAG_P -%token ABS_P SIZE_P TYPE_P FLOOR_P DOUBLE_P CEILING_P KEYVALUE_P -%token DATETIME_P -%token BIGINT_P BOOLEAN_P DATE_P DECIMAL_P INTEGER_P NUMBER_P -%token STRINGFUNC_P TIME_P TIME_TZ_P TIMESTAMP_P TIMESTAMP_TZ_P - -%type result - -%type scalar_value path_primary expr array_accessor - any_path accessor_op key predicate delimited_predicate - index_elem starts_with_initial expr_or_predicate - datetime_template opt_datetime_template csv_elem - datetime_precision opt_datetime_precision - -%type accessor_expr csv_list opt_csv_list - -%type index_list - -%type comp_op - -%type method - -%type mode - -%type key_name - -%type any_level - -%left OR_P -%left AND_P -%right NOT_P -%left '+' '-' -%left '*' '/' '%' -%left UMINUS -%nonassoc '(' ')' - -/* Grammar follows */ -%% - -result: - mode expr_or_predicate { pathlex.(*lexer).setResult($1, $2) } - ; - -expr_or_predicate: - expr { $$ = $1 } - | predicate { $$ = $1; pathlex.(*lexer).setPred() } - ; - -mode: - STRICT_P { $$ = false } - | LAX_P { $$ = true } - | /* EMPTY */ { $$ = true } - ; - -scalar_value: - STRING_P { $$ = ast.NewString($1) } - | NULL_P { $$ = ast.NewConst(ast.ConstNull) } - | TRUE_P { $$ = ast.NewConst(ast.ConstTrue) } - | FALSE_P { $$ = ast.NewConst(ast.ConstFalse) } - | NUMERIC_P { $$ = ast.NewNumeric($1) } - | INT_P { $$ = ast.NewInteger($1) } - | VARIABLE_P { $$ = ast.NewVariable($1) } - ; - -comp_op: - EQUAL_P { $$ = ast.BinaryEqual } - | NOTEQUAL_P { $$ = ast.BinaryNotEqual } - | LESS_P { $$ = ast.BinaryLess } - | GREATER_P { $$ = ast.BinaryGreater } - | LESSEQUAL_P { $$ = ast.BinaryLessOrEqual } - | GREATEREQUAL_P { $$ = ast.BinaryGreaterOrEqual } - ; - -delimited_predicate: - '(' predicate ')' { $$ = $2 } - | EXISTS_P '(' expr ')' { $$ = ast.NewUnary(ast.UnaryExists, $3) } - ; - -predicate: - delimited_predicate { $$ = $1 } - | expr comp_op expr { $$ = ast.NewBinary($2, $1, $3) } - | predicate AND_P predicate { $$ = ast.NewBinary(ast.BinaryAnd, $1, $3) } - | predicate OR_P predicate { $$ = ast.NewBinary(ast.BinaryOr, $1, $3) } - | NOT_P delimited_predicate { $$ = ast.NewUnary(ast.UnaryNot, $2) } - | '(' predicate ')' IS_P UNKNOWN_P - { $$ = ast.NewUnary(ast.UnaryIsUnknown, $2) } - | expr STARTS_P WITH_P starts_with_initial - { $$ = ast.NewBinary(ast.BinaryStartsWith, $1, $4) } - | expr LIKE_REGEX_P STRING_P - { - var err error - $$, err = ast.NewRegex($1, $3, "") - if err != nil { - pathlex.Error(err.Error()) - } - } - | expr LIKE_REGEX_P STRING_P FLAG_P STRING_P - { - var err error - $$, err = ast.NewRegex($1, $3, $5) - if err != nil { - pathlex.Error(err.Error()) - } - } - ; - -starts_with_initial: - STRING_P { $$ = ast.NewString($1) } - | VARIABLE_P { $$ = ast.NewVariable($1) } - ; - -path_primary: - scalar_value { $$ = $1 } - | '$' { $$ = ast.NewConst(ast.ConstRoot) } - | '@' { $$ = ast.NewConst(ast.ConstCurrent) } - | LAST_P { $$ = ast.NewConst(ast.ConstLast) } - ; - -accessor_expr: - path_primary { $$ = []ast.Node{$1} } - | '(' expr ')' accessor_op { $$ = []ast.Node{$2, $4} } - | '(' predicate ')' accessor_op { $$ = []ast.Node{$2, $4} } - | accessor_expr accessor_op { $$ = append($$, $2) } - ; - -expr: - accessor_expr { $$ = ast.LinkNodes($1) } - | '(' expr ')' { $$ = $2 } - | '+' expr %prec UMINUS { $$ = ast.NewUnaryOrNumber(ast.UnaryPlus, $2) } - | '-' expr %prec UMINUS { $$ = ast.NewUnaryOrNumber(ast.UnaryMinus, $2) } - | expr '+' expr { $$ = ast.NewBinary(ast.BinaryAdd, $1, $3) } - | expr '-' expr { $$ = ast.NewBinary(ast.BinarySub, $1, $3) } - | expr '*' expr { $$ = ast.NewBinary(ast.BinaryMul, $1, $3) } - | expr '/' expr { $$ = ast.NewBinary(ast.BinaryDiv, $1, $3) } - | expr '%' expr { $$ = ast.NewBinary(ast.BinaryMod, $1, $3) } - ; - -index_elem: - expr { $$ = ast.NewBinary(ast.BinarySubscript, $1, nil) } - | expr TO_P expr { $$ = ast.NewBinary(ast.BinarySubscript, $1, $3) } - ; - -index_list: - index_elem { $$ = []ast.Node{$1} } - | index_list ',' index_elem { $$ = append($$, $3) } - ; - -array_accessor: - '[' '*' ']' { $$ = ast.NewConst(ast.ConstAnyArray) } - | '[' index_list ']' { $$ = ast.NewArrayIndex($2) } - ; - -any_level: - INT_P { $$, _ = strconv.Atoi($1) } - | LAST_P { $$ = -1 } - ; - -any_path: - ANY_P { $$ = ast.NewAny(0, -1) } - | ANY_P '{' any_level '}' { $$ = ast.NewAny($3, $3) } - | ANY_P '{' any_level TO_P any_level '}' - { $$ = ast.NewAny($3, $5) } - ; - -accessor_op: - '.' key { $$ = $2 } - | '.' '*' { $$ = ast.NewConst(ast.ConstAnyKey) } - | array_accessor { $$ = $1 } - | '.' any_path { $$ = $2 } - | '.' method '(' ')' { $$ = $2 } - | '?' '(' predicate ')' { $$ = ast.NewUnary(ast.UnaryFilter, $3) } - | '.' DECIMAL_P '(' opt_csv_list ')' - { - switch len($4) { - case 0: - $$ = ast.NewBinary(ast.BinaryDecimal, nil, nil) - case 1: - $$ = ast.NewBinary(ast.BinaryDecimal, $4[0], nil) - case 2: - $$ = ast.NewBinary(ast.BinaryDecimal, $4[0], $4[1]) - default: - pathlex.Error("invalid input syntax: .decimal() can only have an optional precision[,scale]") - } - } - | '.' DATE_P '(' ')' { $$ = ast.NewUnary(ast.UnaryDate, nil) } - | '.' DATETIME_P '(' opt_datetime_template ')' - { $$ = ast.NewUnary(ast.UnaryDateTime, $4) } - | '.' TIME_P '(' opt_datetime_precision ')' - { $$ = ast.NewUnary(ast.UnaryTime, $4) } - | '.' TIME_TZ_P '(' opt_datetime_precision ')' - { $$ = ast.NewUnary(ast.UnaryTimeTZ, $4) } - | '.' TIMESTAMP_P '(' opt_datetime_precision ')' - { $$ = ast.NewUnary(ast.UnaryTimestamp, $4) } - | '.' TIMESTAMP_TZ_P '(' opt_datetime_precision ')' - { $$ = ast.NewUnary(ast.UnaryTimestampTZ, $4) } - ; - -csv_elem: - INT_P - { $$ = ast.NewInteger($1) } - | '+' INT_P %prec UMINUS - { $$ = ast.NewUnaryOrNumber(ast.UnaryPlus, ast.NewInteger($2)) } - | '-' INT_P %prec UMINUS - { $$ = ast.NewUnaryOrNumber(ast.UnaryMinus, ast.NewInteger($2)) } - ; - -csv_list: - csv_elem { $$ = []ast.Node{$1} } - | csv_list ',' csv_elem { $$ = append($$, $3) } - ; - -opt_csv_list: - csv_list { $$ = $1 } - | /* EMPTY */ { $$ = nil } - ; - -datetime_precision: - INT_P { $$ = ast.NewInteger($1) } - ; - -opt_datetime_precision: - datetime_precision { $$ = $1 } - | /* EMPTY */ { $$ = nil } - ; - -datetime_template: - STRING_P { $$ = ast.NewString($1) } - ; - -opt_datetime_template: - datetime_template { $$ = $1 } - | /* EMPTY */ { $$ = nil } - ; - -key: - key_name { $$ = ast.NewKey($1) } - ; - -key_name: - IDENT_P - | STRING_P - | TO_P - | NULL_P - | TRUE_P - | FALSE_P - | IS_P - | UNKNOWN_P - | EXISTS_P - | STRICT_P - | LAX_P - | ABS_P - | SIZE_P - | TYPE_P - | FLOOR_P - | DOUBLE_P - | CEILING_P - | DATETIME_P - | KEYVALUE_P - | LAST_P - | STARTS_P - | WITH_P - | LIKE_REGEX_P - | FLAG_P - | BIGINT_P - | BOOLEAN_P - | DATE_P - | DECIMAL_P - | INTEGER_P - | NUMBER_P - | STRINGFUNC_P - | TIME_P - | TIME_TZ_P - | TIMESTAMP_P - | TIMESTAMP_TZ_P - ; - -method: - ABS_P { $$ = ast.NewMethod(ast.MethodAbs) } - | SIZE_P { $$ = ast.NewMethod(ast.MethodSize) } - | TYPE_P { $$ = ast.NewMethod(ast.MethodType) } - | FLOOR_P { $$ = ast.NewMethod(ast.MethodFloor) } - | DOUBLE_P { $$ = ast.NewMethod(ast.MethodDouble) } - | CEILING_P { $$ = ast.NewMethod(ast.MethodCeiling) } - | KEYVALUE_P { $$ = ast.NewMethod(ast.MethodKeyValue) } - | BIGINT_P { $$ = ast.NewMethod(ast.MethodBigInt) } - | BOOLEAN_P { $$ = ast.NewMethod(ast.MethodBoolean) } - | INTEGER_P { $$ = ast.NewMethod(ast.MethodInteger) } - | NUMBER_P { $$ = ast.NewMethod(ast.MethodNumber) } - | STRINGFUNC_P { $$ = ast.NewMethod(ast.MethodString) } - ; -%% diff --git a/path/parser/grammar_test.go b/path/parser/grammar_test.go deleted file mode 100644 index 98e9563..0000000 --- a/path/parser/grammar_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package parser - -import ( - "regexp" - "testing" - - "github.com/stretchr/testify/assert" -) - -//nolint:paralleltest // Setting a global so cannot run in parallel. -func TestGrammarStuff(t *testing.T) { - a := assert.New(t) - - pathErrorVerbose = true - t.Cleanup(func() { pathErrorVerbose = false }) - - p := &pathParserImpl{char: 42} - a.Equal(42, p.Lookahead()) - a.Equal("tok-57386", pathTokname(DECIMAL_P)) - a.Equal("TO_P", pathTokname(4)) - a.Equal("state-42", pathStatname(42)) - - a.Equal("syntax error: unexpected TO_P", pathErrorMessage(4, 4)) - a.Equal("syntax error: unexpected TO_P", pathErrorMessage(1, 4)) - a.Equal( - "syntax error: unexpected TO_P, expecting OR_P or AND_P or ')'", - pathErrorMessage(int(pathPact[0]), 4), - ) - - rx := regexp.MustCompile(`^syntax error: unexpected (?:\w+|'.'|\$[a-z]+|tok-\d+)(?:, expecting .+)?$`) - for tok := range pathToknames[3:] { - for state := range pathPact { - a.Regexp(rx, pathErrorMessage(state, tok)) - } - } -} diff --git a/path/parser/lex.go b/path/parser/lex.go deleted file mode 100644 index 5b398a3..0000000 --- a/path/parser/lex.go +++ /dev/null @@ -1,1062 +0,0 @@ -package parser - -// https://www.postgresql.org/docs/current/datatype-json.html#DATATYPE-JSONPATH: -// Numeric literals in SQL/JSON path expressions follow JavaScript rules, which -// are different from both SQL and JSON in some minor details. For example, -// SQL/JSON path allows .1 and 1., which are invalid in JSON. Non-decimal -// integer literals and underscore separators are supported, for example, -// 1_000_000, 0x1EEE_FFFF, 0o273, 0b100101. In SQL/JSON path (and in JavaScript, -// but not in SQL proper), there must not be an underscore separator directly -// after the radix prefix. -// -// An SQL/JSON path expression is typically written in an SQL query as an SQL -// character string literal, so it must be enclosed in single quotes, and any -// single quotes desired within the value must be doubled (see Section 4.1.2.1). -// Some forms of path expressions require string literals within them. These -// embedded string literals follow JavaScript/ECMAScript conventions: they must -// be surrounded by double quotes, and backslash escapes may be used within them -// to represent otherwise-hard-to-type characters. In particular, the way to -// write a double quote within an embedded string literal is \", and to write a -// backslash itself, you must write \\. Other special backslash sequences -// include those recognized in JavaScript strings: \b, \f, \n, \r, \t, \v for -// various ASCII control characters, and \uNNNN for a Unicode character -// identified by its 4-hex-digit code point and \u{N...} for a character code -// written with 1 to 6 hex digits. -// -// https://go.dev/ref/spec#Integer_literals -// An integer literal is a sequence of digits representing an integer constant. -// An optional prefix sets a non-decimal base: 0b or 0B for binary, 0, 0o, or 0O -// for octal, and 0x or 0X for hexadecimal [Go 1.13]. A single 0 is considered a -// decimal zero. In hexadecimal literals, letters a through f and A through F -// represent values 10 through 15. - -import ( - "fmt" - "strings" - "unicode" - "unicode/utf16" - "unicode/utf8" - - "github.com/smasher164/xid" - "github.com/theory/sqljson/path/ast" -) - -// position is a value that represents a source position. -type position struct { - Offset int // byte offset, starting at 0 - Line int // line number, starting at 1 - Column int // column number, starting at 1 (character count per line) -} - -// String returns the string representation of the position. -func (pos position) String() string { - return fmt.Sprintf("%d:%d", pos.Line, pos.Column) -} - -const ( - whitespace = uint64(1<<'\t' | 1<<'\n' | 1<<'\r' | 1<<' ') - - // Stop lexing: EOF or error. - stopTok = -1 - - // no char read yet, not EOF. - noChar = -1 - - // Literal tokens. - quote = '"' - newline = '\n' - backslash = '\\' - slash = '/' - dollar = '$' - null = rune(0) - - // Numeric bases. - decimal = 10 - hex = 16 - octal = 8 - binary = 2 -) - -// lexer lexes a path. -type lexer struct { - // Start position of most recently scanned token; set by Scan. - // Calling Init or Next invalidates the position (Line == 0). - // The Filename field is always left untouched by the Scanner. - // If an error is reported (via Error) and position is invalid, - // the scanner is not inside a token. Call Pos to obtain an error - // position in that case, or to obtain the position immediately - // after the most recently scanned token. - position - - // Collects errors while lexing. - errors []string - - // The parser stores the parsed result here, using setResult() and - // setPred(). - result *ast.AST - pred bool - - // Buffer to hold normalized string while parsing JavaScript string. - strBuf strings.Builder - - // True if a string was parsed into gotString. - gotString bool - - // Remaining fields borrowed from text/scanner. - - // Source buffer - srcBuf []byte // Source buffer - srcPos int // reading position (srcBuf index) - srcEnd int // source end (srcBuf index) - - // Source position - // srcBufOffset int // byte offset of srcBuf[0] in source - line int // line count - column int // character count - lastLineLen int // length of last line in characters (for correct column reporting) - lastCharLen int // length of last character in bytes - - // Token position - tokPos int // token text tail position (srcBuf index); valid if >= 0 - tokEnd int // token text tail end (srcBuf index) - - // One character look-ahead - ch rune // character before current srcPos -} - -// newLexer creates a new lexer configured to lex path. -func newLexer(path string) *lexer { - return &lexer{ - // initialize errors - errors: []string{}, - - // initialize source buffer - srcBuf: []byte(path), - srcEnd: len(path), - - // initialize source position - line: 1, - // initialize token text buffer(required for first call to next()) - tokPos: noChar, - // initialize one character look-ahead - ch: noChar, // no char read yet, not EOF - } -} - -func (l *lexer) resetStrBuf() { - l.strBuf.Reset() - l.gotString = false -} - -// next reads and returns the next Unicode character. It is designed such -// that only a minimal amount of work needs to be done in the common ASCII -// case (one test to check for both ASCII and end-of-buffer, and one test -// to check for newlines). -func (l *lexer) next() rune { - if l.srcPos == l.srcEnd { - if l.lastCharLen > 0 { - // previous character was not EOF - l.column++ - } - l.lastCharLen = 0 - return stopTok - } - - ch, width := rune(l.srcBuf[l.srcPos]), 1 - if ch >= utf8.RuneSelf { - // uncommon case: not ASCII - ch, width = utf8.DecodeRune(l.srcBuf[l.srcPos:l.srcEnd]) - if ch == utf8.RuneError && width == 1 { - // advance for correct error position - l.srcPos += width - l.lastCharLen = width - l.column++ - l.Error("invalid UTF-8 encoding") - return stopTok - } - } - - // advance - l.srcPos += width - l.lastCharLen = width - l.column++ - - // special situations - switch ch { - case 0: - l.Error("invalid character NULL") - ch = stopTok - case '\n': - l.line++ - l.lastLineLen = l.column - l.column = 0 - } - - return ch -} - -// peek returns the next Unicode character in the source without advancing -// the scanner. It returns EOF if the scanner's position is at the last -// character of the source. -func (l *lexer) peek() rune { - if l.ch == noChar { - // this code is only run for the very first character - l.ch = l.next() - } - return l.ch -} - -// Error implements the Error function required by the pathLexer interface -// generated by the parser grammar. It appends msg and the current position to -// l.errors. -func (l *lexer) Error(msg string) { - l.tokEnd = l.srcPos - l.lastCharLen // make sure token text is terminated - l.errors = append(l.errors, fmt.Sprintf("%v at %v", msg, l.pos())) -} - -// errorf provides a fmt-compatible interface sending an error to [Error]. -func (l *lexer) errorf(format string, args ...any) { - l.Error(fmt.Sprintf(format, args...)) -} - -// pos returns the position of the character immediately after the character -// or token returned by the last call to Next or Scan. Use l.Position for the -// start position of the most recently scanned token. -// -//nolint:nonamedreturns -func (l *lexer) pos() (pos position) { - pos.Offset = l.srcPos - l.lastCharLen - switch { - case l.column > 0: - // common case: last character was not a '\n' - pos.Line = l.line - pos.Column = l.column - case l.lastLineLen > 0: - // last character was a '\n' - pos.Line = l.line - 1 - pos.Column = l.lastLineLen - default: - // at the beginning of the source - pos.Line = 1 - pos.Column = 1 - } - return -} - -// Lex implements the Lex function required by the pathLexer interface -// generated by the parser grammar. It lexes the path, returning the next -// token or Unicode character from the path. The text representation of the -// token will be stored in lval.str. It reports scanning errors (read -// and token errors) by calling l.Error. -func (l *lexer) Lex(lval *pathSymType) int { - ch := l.peek() - - // reset token text position - l.tokPos = -1 - l.Line = 0 - -redo: - // skip white space - for whitespace&(1< 0 { - // common case: last character was not a '\n' - l.Line = l.line - l.Column = l.column - } else { - // last character was a '\n' - // (we cannot be at the beginning of the source - // since we have called next() at least once) - l.Line = l.line - 1 - l.Column = l.lastLineLen - } - - // determine token value - tok := ch - switch { - case isIdentRune(ch, 0): - tok, ch = l.scanIdent(ch) - case isDecimal(ch): - tok, ch = l.scanNumber(ch, false) - default: - switch ch { - case stopTok: - break - case '"': - tok, ch = l.scanString(STRING_P) - case '$': - tok, ch = l.scanVariable() - case '/': - ch = l.next() - if ch == '*' { - l.tokPos = -1 // don't collect token text - ch = l.scanComment(ch) - goto redo - } - case '.': - ch = l.next() - if isDecimal(ch) { - tok, ch = l.scanNumber(ch, true) - } - default: - tok, ch = l.scanOperator(ch) - } - } - - l.tokEnd = l.srcPos - l.lastCharLen - - l.ch = ch - lval.str = l.tokenText() - return int(tok) -} - -func lower(ch rune) rune { return ('a' - 'A') | ch } // returns lower-case ch iff ch is ASCII letter -func isDecimal(ch rune) bool { return '0' <= ch && ch <= '9' } -func isHex(ch rune) bool { return '0' <= ch && ch <= '9' || 'a' <= lower(ch) && lower(ch) <= 'f' } - -// digits accepts the sequence { digit | '_' } starting with ch0. -// If base <= 10, digits accepts any decimal digit but records -// the first invalid digit >= base in *invalid if *invalid == 0. -// digits returns the first rune that is not part of the sequence -// anymore, and a bitset describing whether the sequence contained -// digits (bit 0 is set), or separators '_' (bit 1 is set). -// -//nolint:nonamedreturns -func (l *lexer) digits(ch0 rune, base int, invalid *rune) (ch rune, digSep int) { - ch = ch0 - if base <= decimal { - maxCh := rune('0' + base) - for isDecimal(ch) || ch == '_' { - ds := 1 - if ch == '_' { - ds = 2 - } else if ch >= maxCh && *invalid == 0 { - *invalid = ch - } - digSep |= ds - ch = l.next() - } - } else { - for isHex(ch) || ch == '_' { - ds := 1 - if ch == '_' { - ds = 2 - } - digSep |= ds - ch = l.next() - } - } - return -} - -//nolint:funlen,gocognit -func (l *lexer) scanNumber(ch rune, seenDot bool) (rune, rune) { - base := decimal // number base - prefix := rune(0) // one of 0 (decimal), '0' (0-octal), 'x', 'o', or 'b' - digSep := 0 // bit 0: digit present, bit 1: '_' present - invalid := rune(0) // invalid digit in literal, or 0 - - // integer part - var tok rune - var ds int - - if !seenDot { - tok = INT_P - if ch == '0' { - ch = l.next() - switch lower(ch) { - case 'x': - ch = l.next() - base, prefix = hex, 'x' - case 'o': - ch = l.next() - base, prefix = octal, 'o' - case 'b': - ch = l.next() - base, prefix = binary, 'b' - case '.': - base, prefix = octal, '0' - digSep = 1 // leading 0 - default: - switch { - case ch == '_': - l.Error("underscore disallowed at start of numeric literal") - return stopTok, stopTok - case isDecimal(ch): - l.Error("trailing junk after numeric literal") - return stopTok, stopTok - default: - base, prefix = octal, '0' - digSep = 1 // leading 0 - } - } - } - - if ch == '_' { - l.Error("underscore disallowed at start of numeric literal") - return stopTok, stopTok - } - - ch, ds = l.digits(ch, base, &invalid) - digSep |= ds - if digSep&1 == 0 { - // No digits found, invalid. - l.Error("trailing junk after numeric literal") - return stopTok, stopTok - } - - if ch == '.' { - // May be numeric, though prefixes are integer-only. - if prefix != 0 && prefix != '0' { - // Digits found, 0x, 0o, or 0b integer looks valid, halt. - return tok, '.' - } - - ch = l.next() - seenDot = true - } - } - - // fractional part - if seenDot { - tok = NUMERIC_P - ch, ds = l.digits(ch, base, &invalid) - digSep |= ds - } - - // exponent - if e := lower(ch); e == 'e' { - if prefix != 0 && prefix != '0' { - l.errorf("%q exponent requires decimal mantissa", ch) - return stopTok, stopTok - } - - ch = l.next() - tok = NUMERIC_P - if ch == '+' || ch == '-' { - ch = l.next() - } - ch, ds = l.digits(ch, decimal, nil) - digSep |= ds - if ds&1 == 0 { - l.Error("exponent has no digits") - return stopTok, stopTok - } - } else if isIdentRune(e, 0) { - l.Error("trailing junk after numeric literal") - return stopTok, stopTok - } - - if tok == INT_P && invalid != 0 { - l.errorf("invalid digit %q in %s", invalid, litName(prefix)) - return stopTok, stopTok - } - - if digSep&2 != 0 { - l.tokEnd = l.srcPos - l.lastCharLen // make sure token text is terminated - if i := invalidSep(l.tokenText()); i >= 0 { - l.Error("'_' must separate successive digits") - return stopTok, stopTok - } - } - - if isIdentRune(ch, 0) { - l.Error("trailing junk after numeric literal") - return stopTok, stopTok - } - - return tok, ch -} - -// tokenText returns the string corresponding to the most recently scanned token. -// Valid after calling Scan and in calls of Scanner.Error. -func (l *lexer) tokenText() string { - if l.tokPos < 0 { - // no token text - return "" - } - - if l.tokEnd < l.tokPos { - // if EOF was reached, s.tokEnd is set to -1 (s.srcPos == 0) - l.tokEnd = l.tokPos - } - - if l.gotString { - // A string was parsed, return it. - return l.strBuf.String() - } - - return string(l.srcBuf[l.tokPos:l.tokEnd]) -} - -// invalidSep returns the index of the first invalid separator in x, or -1. -func invalidSep(x string) int { - x1 := ' ' // prefix char, we only care if it's 'x' - d := '.' // digit, one of '_', '0' (a digit), or '.' (anything else) - i := 0 - - // a prefix counts as a digit - if len(x) >= 2 && x[0] == '0' { - x1 = lower(rune(x[1])) - if x1 == 'x' || x1 == 'o' || x1 == 'b' { - d = '0' - i = 2 - } - } - - // mantissa and exponent - for ; i < len(x); i++ { - p := d // previous digit - d = rune(x[i]) - switch { - case d == '_': - if p != '0' { - return i - } - case isDecimal(d) || x1 == 'x' && isHex(d): - d = '0' - default: - if p == '_' { - return i - 1 - } - d = '.' - } - } - if d == '_' { - return len(x) - 1 - } - - return -1 -} - -func litName(prefix rune) string { - switch prefix { - default: - return "decimal literal" - case 'x': - return "hexadecimal literal" - case 'o', '0': - return "octal literal" - case 'b': - return "binary literal" - } -} - -// setResult creates an ast.AST and assigns it to l.result unless -// there are parser or ast.New errors. -func (l *lexer) setResult(lax bool, node ast.Node) { - if l.hasError() { - return - } - ast, err := ast.New(lax, l.pred, node) - if err != nil { - l.errors = append(l.errors, err.Error()) - } - l.result = ast -} - -// setPred indicates that the path being lexed is a predicate path query. -// Called by the parser grammar. -func (l *lexer) setPred() { - l.pred = true -} - -// scanVariable scans a variable name from l.scanner, assigns the resulting -// string to lval.str, and returns VARIABLE_P. -func (l *lexer) scanVariable() (rune, rune) { - ch := l.next() - switch { - case ch == '"': - // $"xyz" - return l.scanString(VARIABLE_P) - case isVariableRune(ch): - // $xyz - l.strBuf.WriteRune(ch) - ch = l.next() - for ; isVariableRune(ch); ch = l.next() { - l.strBuf.WriteRune(ch) - } - - l.gotString = true - - return VARIABLE_P, ch - default: - // Not a variable. - return '$', ch - } -} - -// hasError returns true if any errors have been recorded by the lexer. -func (l *lexer) hasError() bool { - return len(l.errors) > 0 -} - -// scanComment scans and discards a c-style /* */ comment. Returns Comment for -// a complete comment and 0 for an error. -func (l *lexer) scanComment(ch rune) rune { - if ch != '*' { - return '/' - } - - ch = l.next() // read character after "/*" - for { - if ch < null { - l.Error("unexpected end of comment") - break - } - ch0 := ch - ch = l.next() - if ch0 == '*' && ch == '/' { - ch = l.next() - break - } - } - return ch -} - -// scanOperator scans an operator from l.scanner if there is one, or else -// returns tok. Operators scanned: -// -// - == -// - > -// - >= -// - < -// - <= -// - <>, != -// - ! -// - && -// - || -// - ** -// -// Which all mean what you'd expect mathematically and in SQL, except for -// '**', which represents the Postgres-specific '.**' any path selector. -func (l *lexer) scanOperator(ch rune) (rune, rune) { - next := l.next() // Read the next character - - switch ch { - case '=': - if next == '=' { - return EQUAL_P, l.next() - } - case '>': - if next == '=' { - return GREATEREQUAL_P, l.next() - } - return GREATER_P, next - case '<': - switch next { - case '=': - return LESSEQUAL_P, l.next() - case '>': - return NOTEQUAL_P, l.next() - default: - return LESS_P, next - } - case '!': - if next == '=' { - return NOTEQUAL_P, l.next() - } - return NOT_P, next - case '&': - if next == ch { - return AND_P, l.next() - } - case '|': - if next == ch { - return OR_P, l.next() - } - case '*': - if next == ch { - return ANY_P, l.next() - } - default: - return ch, next - } - - return ch, next -} - -// scanIdent scans an identifier, the first character of which is ch; remaining -// characters are scanned. Identifiers are subject to the same escapes as -// strings. -func (l *lexer) scanIdent(ch rune) (rune, rune) { - // we know the zero'th rune is OK - switch ch { - case backslash: - // An escape sequence. - ch = l.scanEscape() - default: - l.strBuf.WriteRune(ch) - ch = l.next() - } - - // Scan the identifier as long as we have legit identifier runes. - for isIdentRune(ch, 1) { - switch ch { - case backslash: - // An escape sequence. - ch = l.scanEscape() - default: - l.strBuf.WriteRune(ch) - ch = l.next() - } - } - - if l.hasError() { - return stopTok, ch - } - - l.gotString = true - return identToken(l.strBuf.String()), ch -} - -func (l *lexer) scanString(ret rune) (rune, rune) { - ch := l.next() // read character after quote - for ch != quote { - if ch == newline || ch < 0 { - if !l.hasError() { - l.Error("literal not terminated") - } - l.resetStrBuf() - return stopTok, ch - } - if ch == backslash { - ch = l.scanEscape() - } else { - l.strBuf.WriteRune(ch) - ch = l.next() - } - } - - l.gotString = true - return ret, l.next() -} - -func (l *lexer) scanEscape() rune { - ch := l.next() // read character after '\' - switch ch { - case 'b': - l.strBuf.WriteRune('\b') - ch = l.next() - case 'f': - l.strBuf.WriteRune('\f') - ch = l.next() - case 'n': - l.strBuf.WriteRune('\n') - ch = l.next() - case 'r': - l.strBuf.WriteRune('\r') - ch = l.next() - case 't': - l.strBuf.WriteRune('\t') - ch = l.next() - case 'v': - l.strBuf.WriteRune('\v') - ch = l.next() - case 'x': - ch = l.scanHex() - case 'u': - ch = l.scanUnicode() - case stopTok: - l.Error("unexpected end after backslash") - ch = stopTok - default: - // Everything else is literal. - l.strBuf.WriteRune(ch) - ch = l.next() - } - - if ch == stopTok { - // Reset the string. - l.resetStrBuf() - } - - return ch -} - -// scanUnicode decodes \uNNNN and \u{NN...} UTF-16 code points into UTF-8. -func (l *lexer) scanUnicode() rune { - // Parsing borrowed from Postgres: - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/jsonpath_scan.l#L646-L696 - // and from encoding/json: - // https://cs.opensource.google/go/go/+/refs/tags/go1.22.1:src/encoding/json/decode.go;l=1253-1272 - rr := l.decodeUnicode() - if rr <= null { - return rr - } - - if utf16.IsSurrogate(rr) { - // Should be followed by another escape. - if l.next() != '\\' { - l.Error("Unicode low surrogate must follow a high surrogate") - return stopTok - } - - if l.next() != 'u' { - // Invalid surrogate. Backtrack to \ and return an error. - l.srcPos -= l.lastCharLen - l.lastCharLen = 1 - l.Error("Unicode low surrogate must follow a high surrogate") - return stopTok - } - rr1 := l.decodeUnicode() - if rr1 <= null { - return rr1 - } - - if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar { - // A valid pair; encode it as UTF-8. - l.writeUnicode(dec) - return l.next() - } - - // Invalid surrogate, return an error - l.Error("Unicode low surrogate must follow a high surrogate") - return stopTok - } - - // \u escapes are UTF-16; convert to UTF-8 - l.writeUnicode(rr) - return l.next() -} - -// isIdentRune is a predicate controlling the characters accepted as the ith -// rune in an identifier. These follow JavaScript [identifier syntax], including -// support for \u0000 and \u{000000} unicode escapes: -// -// > In JavaScript, identifiers are commonly made of alphanumeric characters, -// > underscores (_), and dollar signs ($). Identifiers are not allowed to -// > start with numbers. However, JavaScript identifiers are not only limited -// > to ASCII β€” many Unicode code points are allowed as well. Namely, any -// > character in the [ID_Start] category can start an identifier, while any -// > character in the [ID_Continue] category can appear after the first -// > character. -// > -// > In addition, JavaScript allows using Unicode escape sequences in the -// > form of \u0000 or \u{000000} in identifiers, which encode the same -// > string value as the actual Unicode characters. -// -// Variations from the spec: -// -// - Postgres does not support literal [dollar signs], and so neither do we. -// One can They can still be specified via '\$` or '\u0024'. -// -// Variations from Postgres: -// -// - Postgres allows a much wider range of Unicode characters than the -// JavaScript spec requires, including Emoji, but this function follows -// the spec. -// -// [identifier syntax]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar#identifiers -// [ID_Start]: https://util.unicode.org/UnicodeJsps/list-unicodeset.jsp?a=%5Cp%7BID_Start%7D -// [ID_Continue]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar#identifiers -// [dollar signs]: https://www.postgresql.org/message-id/9F84036F-007A-432D-8DCD-1D5C3F51F76E%40justatheory.com -func isIdentRune(ch rune, i int) bool { - return ch == '_' || ch == '\\' || (i == 0 && xid.Start(ch)) || (i > 0 && xid.Continue(ch)) -} - -// isVariableRune is a predicate controlling the characters accepted as a rune -// in a variable name. It follows the same conventions as isIdentRune, except -// that the first character is not treated different, because in SQL/JSON paths, -// variables always start with '$'. -func isVariableRune(ch rune) bool { - return xid.Continue(ch) -} - -// writeUnicode UTF-8 encodes r and writes it to l.strBuf. Required for UTF-16 -// code points expressed with \u escapes. -func (l *lexer) writeUnicode(r rune) { - // Should never need more than 4 max size UTF-8 characters (16 bytes) for a - // UTF-16 code point. - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/include/mb/pg_wchar.h#L345 - const maxUnicodeEquivalentString = utf8.UTFMax * 4 - b := make([]byte, maxUnicodeEquivalentString) - n := utf8.EncodeRune(b, r) - l.strBuf.Write(b[:n]) -} - -// merge merges two runes. Seen inline in both the Postgres and encoding/json. -// Likely to be inlined by Go. -func merge(r1, r2 rune) rune { - const four = 4 - return (r1 << four) | r2 -} - -func (l *lexer) scanHex() rune { - // Parsing borrowed from the Postgres JSON Path scanner: - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/jsonpath_scan.l#L698-L714 - if c1 := hexChar(l.next()); c1 >= 0 { - if c2 := hexChar(l.next()); c2 >= 0 { - decoded := merge(c1, c2) - if decoded > null { - l.strBuf.WriteRune(decoded) - return l.next() - } - } - } - - l.Error("invalid hexadecimal character sequence") - return stopTok -} - -// decodeUnicode decodes \uNNNN or \u{NN...} from s, returning the rune -// or null on error. -func (l *lexer) decodeUnicode() rune { - var rr rune - - if ch := l.next(); ch == '{' { - // parse '\u{NN...}' - c := l.next() - - // Consume up to six hexadecimal characters and combine them into a - // single rune. - for i := 0; i < 6 && c != '}'; i, c = i+1, l.next() { - si := hexChar(c) - if si < null { - l.Error("invalid Unicode escape sequence") - return stopTok - } - - rr = merge(rr, si) - } - - if c != '}' { - l.Error("invalid Unicode escape sequence") - return stopTok - } - } else { - // parse '\uNNNN' - // Get the next four bytes. - // l.tokPos-- - rr = hexChar(ch) - if rr < null { - l.Error("invalid Unicode escape sequence") - return stopTok - } - for range 3 { - c := hexChar(l.next()) - if c < null { - l.Error("invalid Unicode escape sequence") - return stopTok - } - - rr = rr*hex + c - } - } - - if rr == null { - // \u0000, null, not supported. - l.Error(`\u0000 cannot be converted to text`) - return stopTok - } - - return rr -} - -// hexVal turns a hex character into a rune. Returns -1 for an invalid hex code. -// Adapted from the Postgres hexval function encoding/json's getu4 function: -// https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/jsonpath_scan.l#L552-L573 -// https://cs.opensource.google/go/go/+/refs/tags/go1.22.0:src/encoding/json/decode.go;l=1149-1170 -func hexChar(c rune) rune { - switch { - case '0' <= c && c <= '9': - return c - '0' - case 'a' <= c && c <= 'f': - return c - 'a' + decimal - case 'A' <= c && c <= 'F': - return c - 'A' + decimal - default: - return -1 - } -} - -// identToken examines ident and returns the appropriate token value. If ident -// is not a jsonpath reserved word ident, it returns IDENT_P. -// -//nolint:funlen,gocyclo -func identToken(ident string) rune { - // Start with keywords required to be lowercase. - switch ident { - case "null": - return NULL_P - case "true": - return TRUE_P - case "false": - return FALSE_P - } - - // Now try case-insensitive keywords. - switch strings.ToLower(ident) { - case "is": - return IS_P - case "to": - return TO_P - case "abs": - return ABS_P - case "lax": - return LAX_P - case "date": - return DATE_P - case "flag": - return FLAG_P - case "last": - return LAST_P - case "size": - return SIZE_P - case "time": - return TIME_P - case "type": - return TYPE_P - case "with": - return WITH_P - case "floor": - return FLOOR_P - case "bigint": - return BIGINT_P - case "double": - return DOUBLE_P - case "exists": - return EXISTS_P - case "number": - return NUMBER_P - case "starts": - return STARTS_P - case "strict": - return STRICT_P - case "string": - return STRINGFUNC_P - case "boolean": - return BOOLEAN_P - case "ceiling": - return CEILING_P - case "decimal": - return DECIMAL_P - case "integer": - return INTEGER_P - case "time_tz": - return TIME_TZ_P - case "unknown": - return UNKNOWN_P - case "datetime": - return DATETIME_P - case "keyvalue": - return KEYVALUE_P - case "timestamp": - return TIMESTAMP_P - case "like_regex": - return LIKE_REGEX_P - case "timestamp_tz": - return TIMESTAMP_TZ_P - default: - return IDENT_P - } -} diff --git a/path/parser/lex_test.go b/path/parser/lex_test.go deleted file mode 100644 index a07c725..0000000 --- a/path/parser/lex_test.go +++ /dev/null @@ -1,1091 +0,0 @@ -package parser - -import ( - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" -) - -func TestNewLexer(t *testing.T) { - t.Parallel() - a := assert.New(t) - path := "$.foo?(@==1)" - - l := newLexer(path) - a.NotNil(l) - a.Equal(path, string(l.srcBuf)) - a.Equal(0, l.srcPos) - a.Equal(len(path), l.srcEnd) - a.Equal(1, l.line) - a.Equal(0, l.column) - a.Equal(0, l.lastLineLen) - a.Equal(0, l.lastCharLen) - a.Equal(noChar, l.tokPos) - a.Equal(rune(noChar), l.ch) - a.Equal(1, l.line) - a.Empty(l.tokenText()) - - // Make sure path was loaded into the scanner. - buf := new(strings.Builder) - - lval := &pathSymType{} - for tok := l.Lex(lval); tok != stopTok; tok = l.Lex(lval) { - buf.WriteString(lval.str) - } - - a.Equal(path, buf.String()) - a.Empty(l.tokenText()) - - // tokenText should be correct even when tokEnd < tokPos - l.tokEnd = l.tokPos - 1 - a.Empty(l.tokenText()) - a.Equal(l.tokPos, l.tokEnd) -} - -func TestIsIdentRune(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - val rune - char int - exp bool - }{ - {"null_first", 0, 0, false}, - {"null_second", 0, 1, false}, - {"underscore_first", '_', 0, true}, - {"underscore_second", '_', 1, true}, - {"dollar_first", '$', 0, false}, - {"dollar_second", '$', 1, false}, - {"char_first", 'a', 0, true}, - {"char_second", 'a', 1, true}, - {"alpha_first", 'a', 0, true}, - {"alpha_second", 'a', 1, true}, - {"letter_first", 'ΰͺ“', 0, true}, - {"letter_second", 'ΰͺ“', 1, true}, - {"digit_first", '9', 0, false}, - {"digit_second", '9', 1, true}, - {"emoji_first", 'πŸŽ‰', 0, false}, - {"emoji_second", 'πŸŽ‰', 1, false}, - {"backslash_first", '\\', 0, true}, - {"backslash_second", '\\', 1, true}, - {"slash_first", '/', 0, false}, - {"slash_second", '/', 1, false}, - {"space_first", ' ', 0, false}, - {"space_second", ' ', 1, false}, - {"eof", stopTok, 0, false}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Equal(tc.exp, isIdentRune(tc.val, tc.char)) - }) - } -} - -func TestScanError(t *testing.T) { - t.Parallel() - a := assert.New(t) - - l := newLexer("$.x == $y") - a.NotNil(l) - a.Equal([]string{}, l.errors) - - l.Error("oops") - a.Equal([]string{"oops at 1:1"}, l.errors) - a.Empty(l.tokenText()) - - a.Equal(int('$'), l.Lex(&pathSymType{})) - l.Error("yikes") - a.Equal([]string{"oops at 1:1", "yikes at 1:2"}, l.errors) - a.Equal("$", l.tokenText()) - - l.Error("hello") - a.Equal( - []string{"oops at 1:1", "yikes at 1:2", "hello at 1:2"}, - l.errors, - ) - a.Equal("$", l.tokenText()) -} - -func TestScanIdent(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - word string - exp string - tok int - err string - }{ - {"xxx", "xxx", "xxx", IDENT_P, ""}, - // Case-sensitive identifiers. - {"null", "null", "null", NULL_P, ""}, - {"NULL", "NULL", "NULL", IDENT_P, ""}, - {"true", "true", "true", TRUE_P, ""}, - {"True", "True", "True", IDENT_P, ""}, - {"TRUE", "TRUE", "TRUE", IDENT_P, ""}, - {"false", "false", "false", FALSE_P, ""}, - {"False", "False", "False", IDENT_P, ""}, - {"FALSE", "FALSE", "FALSE", IDENT_P, ""}, - {"TRUE", "TRUE", "TRUE", IDENT_P, ""}, - - // Case-insensitive identifiers. - {"is", "is", "is", IS_P, ""}, - {"Is", "Is", "Is", IS_P, ""}, - {"IS", "IS", "IS", IS_P, ""}, - {"to", "to", "to", TO_P, ""}, - {"To", "To", "To", TO_P, ""}, - {"TO", "TO", "TO", TO_P, ""}, - {"abs", "abs", "abs", ABS_P, ""}, - {"Abs", "Abs", "Abs", ABS_P, ""}, - {"ABS", "ABS", "ABS", ABS_P, ""}, - {"lax", "lax", "lax", LAX_P, ""}, - {"Lax", "Lax", "Lax", LAX_P, ""}, - {"LAX", "LAX", "LAX", LAX_P, ""}, - {"date", "date", "date", DATE_P, ""}, - {"Date", "Date", "Date", DATE_P, ""}, - {"DATE", "DATE", "DATE", DATE_P, ""}, - {"flag", "flag", "flag", FLAG_P, ""}, - {"Flag", "Flag", "Flag", FLAG_P, ""}, - {"FLAG", "FLAG", "FLAG", FLAG_P, ""}, - {"last", "last", "last", LAST_P, ""}, - {"Last", "Last", "Last", LAST_P, ""}, - {"LAST", "LAST", "LAST", LAST_P, ""}, - {"size", "size", "size", SIZE_P, ""}, - {"Size", "Size", "Size", SIZE_P, ""}, - {"SIZE", "SIZE", "SIZE", SIZE_P, ""}, - {"time", "time", "time", TIME_P, ""}, - {"Time", "Time", "Time", TIME_P, ""}, - {"TIME", "TIME", "TIME", TIME_P, ""}, - {"type", "type", "type", TYPE_P, ""}, - {"Type", "Type", "Type", TYPE_P, ""}, - {"TYPE", "TYPE", "TYPE", TYPE_P, ""}, - {"with", "with", "with", WITH_P, ""}, - {"With", "With", "With", WITH_P, ""}, - {"WITH", "WITH", "WITH", WITH_P, ""}, - {"floor", "floor", "floor", FLOOR_P, ""}, - {"Floor", "Floor", "Floor", FLOOR_P, ""}, - {"FLOOR", "FLOOR", "FLOOR", FLOOR_P, ""}, - {"bigint", "bigint", "bigint", BIGINT_P, ""}, - {"Bigint", "Bigint", "Bigint", BIGINT_P, ""}, - {"BIGINT", "BIGINT", "BIGINT", BIGINT_P, ""}, - {"double", "double", "double", DOUBLE_P, ""}, - {"Double", "Double", "Double", DOUBLE_P, ""}, - {"DOUBLE", "DOUBLE", "DOUBLE", DOUBLE_P, ""}, - {"exists", "exists", "exists", EXISTS_P, ""}, - {"Exists", "Exists", "Exists", EXISTS_P, ""}, - {"EXISTS", "EXISTS", "EXISTS", EXISTS_P, ""}, - {"number", "number", "number", NUMBER_P, ""}, - {"Number", "Number", "Number", NUMBER_P, ""}, - {"NUMBER", "NUMBER", "NUMBER", NUMBER_P, ""}, - {"starts", "starts", "starts", STARTS_P, ""}, - {"Starts", "Starts", "Starts", STARTS_P, ""}, - {"STARTS", "STARTS", "STARTS", STARTS_P, ""}, - {"strict", "strict", "strict", STRICT_P, ""}, - {"Strict", "Strict", "Strict", STRICT_P, ""}, - {"STRICT", "STRICT", "STRICT", STRICT_P, ""}, - {"string", "string", "string", STRINGFUNC_P, ""}, - {"String", "String", "String", STRINGFUNC_P, ""}, - {"STRING", "STRING", "STRING", STRINGFUNC_P, ""}, - {"boolean", "boolean", "boolean", BOOLEAN_P, ""}, - {"Boolean", "Boolean", "Boolean", BOOLEAN_P, ""}, - {"BOOLEAN", "BOOLEAN", "BOOLEAN", BOOLEAN_P, ""}, - {"ceiling", "ceiling", "ceiling", CEILING_P, ""}, - {"Ceiling", "Ceiling", "Ceiling", CEILING_P, ""}, - {"CEILING", "CEILING", "CEILING", CEILING_P, ""}, - {"decimal", "decimal", "decimal", DECIMAL_P, ""}, - {"Decimal", "Decimal", "Decimal", DECIMAL_P, ""}, - {"DECIMAL", "DECIMAL", "DECIMAL", DECIMAL_P, ""}, - {"integer", "integer", "integer", INTEGER_P, ""}, - {"Integer", "Integer", "Integer", INTEGER_P, ""}, - {"INTEGER", "INTEGER", "INTEGER", INTEGER_P, ""}, - {"time_tz", "time_tz", "time_tz", TIME_TZ_P, ""}, - {"Time_tz", "Time_tz", "Time_tz", TIME_TZ_P, ""}, - {"TIME_TZ", "TIME_TZ", "TIME_TZ", TIME_TZ_P, ""}, - {"unknown", "unknown", "unknown", UNKNOWN_P, ""}, - {"Unknown", "Unknown", "Unknown", UNKNOWN_P, ""}, - {"UNKNOWN", "UNKNOWN", "UNKNOWN", UNKNOWN_P, ""}, - {"datetime", "datetime", "datetime", DATETIME_P, ""}, - {"Datetime", "Datetime", "Datetime", DATETIME_P, ""}, - {"DATETIME", "DATETIME", "DATETIME", DATETIME_P, ""}, - {"keyvalue", "keyvalue", "keyvalue", KEYVALUE_P, ""}, - {"Keyvalue", "Keyvalue", "Keyvalue", KEYVALUE_P, ""}, - {"KEYVALUE", "KEYVALUE", "KEYVALUE", KEYVALUE_P, ""}, - {"timestamp", "timestamp", "timestamp", TIMESTAMP_P, ""}, - {"Timestamp", "Timestamp", "Timestamp", TIMESTAMP_P, ""}, - {"TIMESTAMP", "TIMESTAMP", "TIMESTAMP", TIMESTAMP_P, ""}, - {"like_regex", "like_regex", "like_regex", LIKE_REGEX_P, ""}, - {"Like_regex", "Like_regex", "Like_regex", LIKE_REGEX_P, ""}, - {"LIKE_REGEX", "LIKE_REGEX", "LIKE_REGEX", LIKE_REGEX_P, ""}, - {"timestamp_tz", "timestamp_tz", "timestamp_tz", TIMESTAMP_TZ_P, ""}, - {"Timestamp_tz", "Timestamp_tz", "Timestamp_tz", TIMESTAMP_TZ_P, ""}, - {"TIMESTAMP_TZ", "TIMESTAMP_TZ", "TIMESTAMP_TZ", TIMESTAMP_TZ_P, ""}, - - // Basic identifiers. - {"underscore", "x_y_z", "x_y_z", IDENT_P, ""}, - {"mixed_case", "XoX", "XoX", IDENT_P, ""}, - {"unicode", "XΓΆX", "XΓΆX", IDENT_P, ""}, - - // Identifiers with escapes. - {"escaped_dot", `X\.X`, "X.X", IDENT_P, ""}, - {"hex", `\x22hi\x22`, `"hi"`, IDENT_P, ""}, - {"hex", `\x22hi\x22`, `"hi"`, IDENT_P, ""}, - {"bell", `x\by`, "x\by", IDENT_P, ""}, - {"form_feed", `x\fy`, "x\fy", IDENT_P, ""}, - {"new_line", `x\ny`, "x\ny", IDENT_P, ""}, - {"return", `x\ry`, "x\ry", IDENT_P, ""}, - {"return_form_feed", `x\r\ny`, "x\r\ny", IDENT_P, ""}, - {"tab", `x\ty`, "x\ty", IDENT_P, ""}, - {"vertical_tab", `x\vy`, "x\vy", IDENT_P, ""}, - {"quote", `x\"y`, `x"y`, IDENT_P, ""}, - {"slash", `x\/y`, `x/y`, IDENT_P, ""}, - {"backslash", `x\\y`, `x\y`, IDENT_P, ""}, - {"unknown_escape", `x\zy`, `xzy`, IDENT_P, ""}, - {"unicode", `fo\u00f8`, "foΓΈ", IDENT_P, ""}, - {"brace_unicode_two", `p\u{67}`, "pg", IDENT_P, ""}, - {"brace_unicode_four", `fo\u{00f8}`, "foΓΈ", IDENT_P, ""}, - {"brace_unicode_six", `LO\u{00004C}`, "LOL", IDENT_P, ""}, - { - "ridiculous", - `foo\x50\u0067\u{53}\u{051}\u{00004C}\t\"bar`, - "fooPgSQL\t\"bar", - IDENT_P, - "", - }, - - // Errors. - { - "invalid_hex", - `LO\xzz`, - "", - stopTok, - "invalid hexadecimal character sequence at 1:5", - }, - { - "brace_unicode_eight", - `LO\u{00004C00}`, - "", - stopTok, - "invalid Unicode escape sequence at 1:12", - }, - { - "missing_brace", - `LO\u{0067`, - "", - stopTok, - "invalid Unicode escape sequence at 1:10", - }, - { - "bad_unicode_brace_hex", - `LO\u{zzzz}`, - "", - stopTok, - "invalid Unicode escape sequence at 1:6", - }, - { - "bad_unicode_hex", - `LO\uzzzz`, - "", - stopTok, - "invalid Unicode escape sequence at 1:5", - }, - { - "bad_lead_backslash", - `\xyy`, - "", - stopTok, - "invalid hexadecimal character sequence at 1:3", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - // Append a '.' to check that scanIdent doesn't slurp it up. - l := newLexer(tc.word + ".") - a.Equal(l.Lex(&pathSymType{}), tc.tok) - a.Equal(tc.exp, l.strBuf.String()) - - if tc.err == "" { - // Should have no errors and the trailing '.' should be teed up. - a.Empty(l.errors) - a.Equal('.', l.peek()) - } else { - a.Equal([]string{tc.err}, l.errors) - } - }) - } -} - -func TestScanString(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - str string - exp string - tok int - err string - }{ - {"xxx", `"xxx"`, "xxx", STRING_P, ""}, - {"empty_string", `""`, "", STRING_P, ""}, - {"with_spaces", `"hi there"`, "hi there", STRING_P, ""}, - {"with_unicode", `"Go on πŸŽ‰"`, "Go on πŸŽ‰", STRING_P, ""}, - {"surrogate_pair", `"\uD834\uDD1E"`, "\U0001D11E", STRING_P, ""}, - - // Identifiers with escapes. - {"hex", `"\x22hi\x22"`, `"hi"`, STRING_P, ""}, - {"bell", `"x\by"`, "x\by", STRING_P, ""}, - {"form_feed", `"x\fy"`, "x\fy", STRING_P, ""}, - {"new_line", `"x\ny"`, "x\ny", STRING_P, ""}, - {"return", `"x\ry"`, "x\ry", STRING_P, ""}, - {"return_form_feed", `"x\r\ny"`, "x\r\ny", STRING_P, ""}, - {"tab", `"x\ty"`, "x\ty", STRING_P, ""}, - {"vertical_tab", `"x\vy"`, "x\vy", STRING_P, ""}, - {"quote", `"x\"y"`, `x"y`, STRING_P, ""}, - {"slash", `"x\/y"`, `x/y`, STRING_P, ""}, - {"backslash", `"x\\y"`, `x\y`, STRING_P, ""}, - {"unknown_escape", `"x\zy"`, `xzy`, STRING_P, ""}, - {"unicode", `"fo\u00f8"`, "foΓΈ", STRING_P, ""}, - {"brace_unicode_two", `"p\u{67}"`, "pg", STRING_P, ""}, - {"brace_unicode_four", `"fo\u{00f8}"`, "foΓΈ", STRING_P, ""}, - {"brace_unicode_six", `"LO\u{00004C}"`, "LOL", STRING_P, ""}, - { - "ridiculous", - `"foo\x50\u0067\u{53}\u{051}\u{00004C}\t\"bar"`, - "fooPgSQL\t\"bar", - STRING_P, - "", - }, - - // Errors. - { - "invalid_surrogate_pair", - `"\uD834\ufffd"`, - "", - stopTok, - "Unicode low surrogate must follow a high surrogate at 1:13", - }, - { - "missing_surrogate_pair", - `"\uD834lol"`, - "", - stopTok, - "Unicode low surrogate must follow a high surrogate at 1:8", - }, - { - "bad_surrogate_pair", - `"\uD834\uzzzz`, - "", - stopTok, - "invalid Unicode escape sequence at 1:10", - }, - { - "wrong_surrogate_pair", - `"\uD834\x34"`, - "", - stopTok, - "Unicode low surrogate must follow a high surrogate at 1:9", - }, - { - "hex_null_byte", - `"go \x00"`, - "", - stopTok, - "invalid hexadecimal character sequence at 1:8", - }, - { - "invalid_hex", - `"LO\xzz"`, - "", - stopTok, - "invalid hexadecimal character sequence at 1:6", - }, - { - "null_hex", - `"LO\x00"`, - "", - stopTok, - "invalid hexadecimal character sequence at 1:7", - }, - { - "null_unicode", - `"LO\u0000"`, - "", - stopTok, - "\\u0000 cannot be converted to text at 1:9", - }, - { - "null_unicode_brace", - `"LO\u{000000}"`, - "", - stopTok, - "\\u0000 cannot be converted to text at 1:13", - }, - { - "brace_unicode_eight", - `"LO\u{00004C00}"`, - "", - stopTok, - "invalid Unicode escape sequence at 1:13", - }, - { - "missing_brace", - `"LO\u{0067"`, - "", - stopTok, - "invalid Unicode escape sequence at 1:11", - }, - { - "bad_unicode_brace_hex", - `"LO\u{zzzz}"`, - "", - stopTok, - "invalid Unicode escape sequence at 1:7", - }, - { - "bad_unicode_hex", - `"LO\uzzzz"`, - "", - stopTok, - "invalid Unicode escape sequence at 1:6", - }, - { - "unclosed_string", - `"go`, - "", - stopTok, - "literal not terminated at 1:4", - }, - { - "string_with_newline", - "\"go\nhome\"", - "", - stopTok, - "literal not terminated at 1:4", - }, - { - "unterminated_backslash", - `"go \`, - "", - stopTok, - "unexpected end after backslash at 1:6", - }, - { - "invalid_utf8", - string([]byte{0xD8, 0x34, 0xff, 0xfd}), - "", - stopTok, - "invalid UTF-8 encoding at 1:1", - }, - { - "null_byte", - string([]byte{0x1f, 0x00}), - "", - 0x1f, - "invalid character NULL at 1:2", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - l := newLexer(tc.str) - a.Equal(tc.tok, l.Lex(&pathSymType{})) - a.Equal(tc.exp, l.strBuf.String()) - - if tc.err == "" { - a.Empty(l.errors) - } else { - a.Equal([]string{tc.err}, l.errors) - } - }) - } -} - -func TestScanNumbers(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - num string - exp string - tok int - err string - }{ - {"one", "1", "1", INT_P, ""}, - {"zero", "0", "0", INT_P, ""}, - {"max_int", "9223372036854775807", "9223372036854775807", INT_P, ""}, - {"min_int", "9223372036854775808", "9223372036854775808", INT_P, ""}, // without - - {"max_uint", "18446744073709551615", "18446744073709551615", INT_P, ""}, - {"underscores", "1_000_000", "1_000_000", INT_P, ""}, - {"hex", "0x1EEE_FFFF", "0x1EEE_FFFF", INT_P, ""}, - {"HEX", "0X1EEE_FFFF", "0X1EEE_FFFF", INT_P, ""}, - {"octal", "0o273", "0o273", INT_P, ""}, - {"underscore_octal", "0o27_3", "0o27_3", INT_P, ""}, - {"OCTAL", "0O273", "0O273", INT_P, ""}, - { - "zero_prefix", - "02", // Postgres: trailing junk after numeric literal at or near "02" - "0", stopTok, - "trailing junk after numeric literal at 1:2", - }, - { - "zero_prefix_more", - "0273", // Postgres: syntax error at end of jsonpath input - "0", stopTok, - "trailing junk after numeric literal at 1:2", - }, - { - "empty_octal", - "0o", // Postgres: trailing junk after numeric literal at or near "0o" - "0o", stopTok, - "trailing junk after numeric literal at 1:3", - }, - {"binary", "0b100101", "0b100101", INT_P, ""}, - {"underscore_binary", "0b10_0101", "0b10_0101", INT_P, ""}, - {"BINARY", "0B100101", "0B100101", INT_P, ""}, - {"float", "0.42", "0.42", NUMERIC_P, ""}, - { - "max_float", - "1.79769313486231570814527423731704356798070e+308", - "1.79769313486231570814527423731704356798070e+308", - NUMERIC_P, - "", - }, - { - "min_float", // without - - "4.9406564584124654417656879286822137236505980e-324", - "4.9406564584124654417656879286822137236505980e-324", - NUMERIC_P, - "", - }, - - // https://go.dev/ref/spec#Integer_literals - {"go_int_example_01", "42", "42", INT_P, ""}, - {"go_int_example_02", "4_2", "4_2", INT_P, ""}, - { - "go_int_example_03", - "0600", // Postgres: syntax error at end of jsonpath input - "0", - stopTok, - "trailing junk after numeric literal at 1:2", - }, - { - "go_int_example_04", - "0_600", // Postgres: syntax error at end of jsonpath input - "0", - stopTok, - "underscore disallowed at start of numeric literal at 1:2", - }, - {"go_int_example_05", "0o600", "0o600", INT_P, ""}, - {"go_int_example_06", "0O600", "0O600", INT_P, ""}, - {"go_int_example_07", "0xBadFace", "0xBadFace", INT_P, ""}, - {"go_int_example_08", "0xBad_Face", "0xBad_Face", INT_P, ""}, - { - "go_int_example_09", - "0x_67_7a_2f_cc_40_c6", // Postgres: syntax error at end of jsonpath input - "0x", - stopTok, - "underscore disallowed at start of numeric literal at 1:3", - }, - { - "go_int_example_10", - "170141183460469231731687303715884105727", - "170141183460469231731687303715884105727", - INT_P, - "", - }, - { - "go_int_example_11", - "170_141183_460469_231731_687303_715884_105727", - "170_141183_460469_231731_687303_715884_105727", - INT_P, - "", - }, - {"go_int_example_12", "_42", "_42", IDENT_P, ""}, - { - "go_int_example_13", - "42_", // Postgres: trailing junk after numeric literal at or near "42_" - "42_", - stopTok, - "'_' must separate successive digits at 1:4", - }, - { - "go_int_example_14", - "4__2", // Postgres: syntax error at end of jsonpath input - "4__2", - stopTok, - "'_' must separate successive digits at 1:5", - }, - { - "go_int_example_15", - "0_xBadFace", // Postgres: syntax error at end of jsonpath input - "0", - stopTok, - "underscore disallowed at start of numeric literal at 1:2", - }, - - // https://go.dev/ref/spec#Floating-point_literals - {"go_float_example_01", "0.", "0.", NUMERIC_P, ""}, - {"go_float_example_02", "72.40", "72.40", NUMERIC_P, ""}, - { - "go_float_example_03", - "072.40", // Postgres: syntax error at end of jsonpath input - "0", - stopTok, - "trailing junk after numeric literal at 1:2", - }, - {"go_float_example_04", "2.71828", "2.71828", NUMERIC_P, ""}, - {"go_float_example_05", "1.e+0", "1.e+0", NUMERIC_P, ""}, - {"go_float_example_06", "6.67428e-11", "6.67428e-11", NUMERIC_P, ""}, - {"go_float_example_06", "1E6", "1E6", NUMERIC_P, ""}, - {"go_float_example_07", ".25", ".25", NUMERIC_P, ""}, - {"go_float_example_08", ".12345E+5", ".12345E+5", NUMERIC_P, ""}, - {"go_float_example_09", "1_5.", "1_5.", NUMERIC_P, ""}, - {"go_float_example_10", "0.15e+0_2", "0.15e+0_2", NUMERIC_P, ""}, - { - "go_float_example_11", - "0x1p-2", // Postgres: syntax error at end of jsonpath input - "0x1", - stopTok, - "trailing junk after numeric literal at 1:4", - }, - { - "go_float_example_12", - "0x2.p10", // Postgres: (2)."p10" - "0x2", - INT_P, - "", - }, - { - "go_float_example_13", - "0x1.Fp+0", // Postgres: ((1)."Fp" + 0) - "0x1", - INT_P, - "", - }, - { - "go_float_example_14", - "0X.8p-0", // Postgres: trailing junk after numeric literal at or near "01" - "0X", - stopTok, - "trailing junk after numeric literal at 1:3", - }, - { - "go_float_example_15", - "0X_1FFFP-16", // Postgres: syntax error at end of jsonpath input - "0X", - stopTok, - "underscore disallowed at start of numeric literal at 1:3", - }, - { - "go_float_example_16", - "0x15e-2", // Postgres: (350 - 2) - "0x15e", // Halts at - - INT_P, - "", - }, - { - "go_float_example_17", - "0x.p1", // Postgres: trailing junk after numeric literal at or near "0x" - "0x", - stopTok, - "trailing junk after numeric literal at 1:3", - }, - { - "go_float_example_18", - "1p-2", // Postgres: trailing junk after numeric literal at or near "1p" - "1", - stopTok, - "trailing junk after numeric literal at 1:2", - }, - { - "go_float_example_19", - "0x1.5e-2", // Postgres: syntax error at or near ".5e-2" of jsonpath input - "0x1", // Lex halts at '.', 0x1 is valid integer - INT_P, - "", - }, - { - "go_float_example_20", - "1_.5", // Postgres: trailing junk after numeric literal at or near "1_" - "1_.5", - stopTok, - "'_' must separate successive digits at 1:5", - }, - { - "go_float_example_21", - "1._5", // Postgres: trailing junk after numeric literal at or near "1._" - "1._5", - stopTok, - "'_' must separate successive digits at 1:5", - }, - { - "go_float_example_22", - "1.5_e1", // Postgres: trailing junk after numeric literal at or near "1.5_" - "1.5_e1", - stopTok, - "'_' must separate successive digits at 1:7", - }, - { - "go_float_example_23", - "1.5e_1", // Postgres: trailing junk after numeric literal at or near "1.5e" - "1.5e_1", - stopTok, - "'_' must separate successive digits at 1:7", - }, - { - "go_float_example_24", - "1.5e1_", // Postgres: trailing junk after numeric literal at or near "1.5e1_" - "1.5e1_", - stopTok, - "'_' must separate successive digits at 1:7", - }, - - // Errors - { - "underscore_hex_early", - "0x_1EEEFFFF", // Postgres: syntax error at end of jsonpath input - "0x", - stopTok, - "underscore disallowed at start of numeric literal at 1:3", - }, - { - "underscore_octal_early", - "0o_273", // Postgres: syntax error at end of jsonpath input - "0o", - stopTok, - "underscore disallowed at start of numeric literal at 1:3", - }, - { - "underscore_binary_early", - "0b_100101", // Postgres: syntax error at end of jsonpath input - "0b", - stopTok, - "underscore disallowed at start of numeric literal at 1:3", - }, - { - "hex_dot_path_utf8", - `0x2."πŸ˜€"`, // Postgres: (2)."πŸ˜€" - "0x2", - INT_P, - "", - }, - { - "no_decimal_mantissa", - `0o14e4`, // Postgres: syntax error at end of jsonpath input - "0o14", - stopTok, - "'e' exponent requires decimal mantissa at 1:5", - }, - { - "invalid_octal", - `0o9`, // Postgres: syntax error at end of jsonpath input - "0o9", - stopTok, - "invalid digit '9' in octal literal at 1:4", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - sym := &pathSymType{} - l := newLexer(tc.num) - - // // To-do tests. - // if tc.test == "go_float_example_12" || tc.test == "go_float_example_13" { - // a.NotEqual(l.Lex(sym), tc.tok) - // a.NotEqual([]string{tc.err}, l.errors) - // return - // } - - a.Equal(l.Lex(sym), tc.tok) - a.Equal(tc.exp, sym.str) - - if tc.err == "" { - a.Empty(l.errors) - } else { - a.Equal([]string{tc.err}, l.errors) - } - }) - } -} - -func TestScanVariable(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - variable string - exp string - tok int - err string - }{ - {"xxx", "$xxx", "xxx", VARIABLE_P, ""}, - {"num_prefix", "$42x", "42x", VARIABLE_P, ""}, - {"numeric", "$999", "999", VARIABLE_P, ""}, - {"mixed_case", "$XoX", "XoX", VARIABLE_P, ""}, - {"underscore", "$x_y_z", "x_y_z", VARIABLE_P, ""}, - {"mixed_case", "$XoX", "XoX", VARIABLE_P, ""}, - {"unicode", "$XΓΆX", "XΓΆX", VARIABLE_P, ""}, - {"emoji", "$🀘🏻🀘🏻", "", '$', ""}, - {"quoted", `$"xxx"`, "xxx", VARIABLE_P, ""}, - {"with_spaces", `$"hi there"`, "hi there", VARIABLE_P, ""}, - {"with_unicode", `$"Go on πŸŽ‰"`, "Go on πŸŽ‰", VARIABLE_P, ""}, - {"surrogate_pair", `$"\uD834\uDD1E"`, "\U0001D11E", VARIABLE_P, ""}, - {"root", "$", "", '$', ""}, - {"root_path", "$.x.y", "", '$', ""}, - {"root_path", "$.x.y", "", '$', ""}, - { - "null_byte", - `$"go \x00"`, - "", - stopTok, - "invalid hexadecimal character sequence at 1:9", - }, - { - "invalid_hex", - `$"LO\xzz"`, - "", - stopTok, - "invalid hexadecimal character sequence at 1:7", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - l := newLexer(tc.variable) - a.Equal(l.Lex(&pathSymType{}), tc.tok) - a.Equal(tc.exp, l.strBuf.String()) - - if tc.err == "" { - a.Empty(l.errors) - } else { - a.Equal([]string{tc.err}, l.errors) - } - }) - } -} - -func TestScanComment(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - path string - tok rune - err string - }{ - {"simple", "/* foo */", stopTok, ""}, - {"stars", "/*foo****/", stopTok, ""}, - {"escape_star", "/*foo \\**/", stopTok, ""}, - {"escape_other", "/*foo \\! */", stopTok, ""}, - {"multi_word", "/* foo bar baz */", stopTok, ""}, - {"multi_line", "/* foo bar\nbaz */", stopTok, ""}, - {"multi_line_prefix", "/* foo bar\n * baz */", stopTok, ""}, - {"EOF", "/* foo ", stopTok, "unexpected end of comment at 1:8"}, - {"not_a_comment", "/", '/', ""}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - l := newLexer(tc.path) - a.Equal('/', l.next()) - a.Equal(l.scanComment(l.next()), tc.tok) - a.Equal(len(tc.path), l.pos().Offset) - - if tc.err == "" { - a.Empty(l.errors) - } else { - a.Equal([]string{tc.err}, l.errors) - } - }) - } -} - -func TestScanOperator(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - op string - tok int - exp string - }{ - {"equal_to", "==", EQUAL_P, "=="}, - {"equal_sign_eof", "=", '=', "="}, - {"equal_sign_stop", "=[xyz]", '=', "="}, - {"ge", ">=", GREATEREQUAL_P, ">="}, - {"ge_stop", ">=x", GREATEREQUAL_P, ">="}, - {"gt", ">", GREATER_P, ">"}, - {"gt_stop", ">{x}", GREATER_P, ">"}, - {"le", "<=", LESSEQUAL_P, "<="}, - {"le_stop", "<=x", LESSEQUAL_P, "<="}, - {"le_ne", "<>", NOTEQUAL_P, "<>"}, - {"le_ne_stop", "<>x", NOTEQUAL_P, "<>"}, - {"lt", "<", LESS_P, "<"}, - {"lt_stop", "<{x}", LESS_P, "<"}, - {"not", "!", NOT_P, "!"}, - {"not_stop", "!x", NOT_P, "!"}, - {"not_equal", "!=", NOTEQUAL_P, "!="}, - {"not_equal_stop", "!=!", NOTEQUAL_P, "!="}, - {"and", "&&", AND_P, "&&"}, - {"and_stop", "&&.", AND_P, "&&"}, - {"ampersand", "&", '&', "&"}, - {"ampersand_stop", "&=", '&', "&"}, - {"or", "||", OR_P, "||"}, - {"or_stop", "||.", OR_P, "||"}, - {"pipe", "|", '|', "|"}, - {"pipe_stop", "|=", '|', "|"}, - {"any", "**", ANY_P, "**"}, - {"any_stop", "**.", ANY_P, "**"}, - {"star", "*", '*', "*"}, - {"star_stop", "*=", '*', "*"}, - {"something_else", "^^", '^', "^"}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - l := newLexer(tc.op) - tok := l.Lex(&pathSymType{}) - a.Equal(tc.tok, tok) - a.Equal(tc.exp, l.tokenText()) - a.Empty(l.errors) - }) - } -} - -func TestLexer(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - path string - exp string - tok int - err string - }{ - {"root", "$", "$", '$', ""}, - {"plus", "+", "+", '+', ""}, - {"percent", "%", "%", '%', ""}, - {"ident", "hello", "hello", IDENT_P, ""}, - {"boolean", "true", "true", TRUE_P, ""}, - {"keyword", "is", "is", IS_P, ""}, - {"integer", "42", "42", INT_P, ""}, - {"float", "42.0", "42.0", NUMERIC_P, ""}, - {"string", `"xxx"`, "xxx", STRING_P, ""}, - {"string_with_spaces", `"hi there"`, "hi there", STRING_P, ""}, - {"string_with_unicode", `"Go on πŸŽ‰"`, "Go on πŸŽ‰", STRING_P, ""}, - {"variable", `$xxx`, "xxx", VARIABLE_P, ""}, - {"quoted_variable", `$"xxx"`, "xxx", VARIABLE_P, ""}, - {"variable_with_spaces", `$"hi there"`, "hi there", VARIABLE_P, ""}, - {"variable_with_unicode", `$"Go on πŸŽ‰"`, "Go on πŸŽ‰", VARIABLE_P, ""}, - {"comment", "/* foo */", "", stopTok, ""}, - {"comment_token", "/* foo */ $", "$", '$', ""}, - {"comment", "/* foo */", "", stopTok, ""}, - {"not_comment", "/ foo", "/", '/', ""}, - {"op", "==foo", "==", EQUAL_P, ""}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - sym := &pathSymType{} - l := newLexer(tc.path) - a.Equal(l.Lex(sym), tc.tok) - a.Equal(tc.exp, sym.str) - - if tc.err == "" { - a.Empty(l.errors) - } else { - a.Equal([]string{tc.err}, l.errors) - } - }) - } -} - -func TestSetResult(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - lex *lexer - lax bool - pred bool - node ast.Node - err string - }{ - { - test: "legit_lax", - lex: &lexer{}, - lax: true, - pred: true, - node: ast.NewConst(ast.ConstNull), - }, - { - test: "no_lax", - lex: &lexer{}, - node: ast.NewConst(ast.ConstNull), - }, - { - test: "prev_err", - lex: &lexer{errors: []string{"oops"}}, - node: ast.NewConst(ast.ConstNull), - err: "oops", - }, - { - test: "ast_err", - lex: &lexer{}, - node: ast.NewConst(ast.ConstLast), - err: "LAST is allowed only in array subscripts", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - if tc.pred { - tc.lex.setPred() - } - tc.lex.setResult(tc.lax, tc.node) - if tc.err == "" { - ast, err := ast.New(tc.lax, tc.pred, tc.node) - r.NoError(err) - a.Equal(ast, tc.lex.result) - a.Empty(tc.lex.errors) - a.Equal(tc.pred, tc.lex.result.IsPredicate()) - } else { - a.Nil(tc.lex.result) - a.Equal(tc.err, tc.lex.errors[0]) - } - }) - } -} - -func TestLitName(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - prefix rune - }{ - {"decimal", 0}, - {"octal", '0'}, - {"octal", 'o'}, - {"hexadecimal", 'x'}, - {"binary", 'b'}, - } { - assert.Equal(t, tc.test+" literal", litName(tc.prefix)) - } -} diff --git a/path/parser/parser.go b/path/parser/parser.go deleted file mode 100644 index 067a135..0000000 --- a/path/parser/parser.go +++ /dev/null @@ -1,28 +0,0 @@ -// Package parser parses SQL/JSON paths. It uses the same grammar as Postgres -// to support the same syntax and capabilities, with a few minor exceptions. -// The lexer use patterns borrowed PostgreSQL and from text/scanner. -package parser - -import ( - "errors" - "fmt" - - "github.com/theory/sqljson/path/ast" -) - -//go:generate goyacc -v "" -o grammar.go -p path grammar.y - -// ErrParse errors are returned by the parser. -var ErrParse = errors.New("parser") - -// Parse parses path. -func Parse(path string) (*ast.AST, error) { - lexer := newLexer(path) - _ = pathParse(lexer) - - if len(lexer.errors) > 0 { - return nil, fmt.Errorf("%w: %v", ErrParse, lexer.errors[0]) - } - - return lexer.result, nil -} diff --git a/path/parser/parser_test.go b/path/parser/parser_test.go deleted file mode 100644 index f5d69df..0000000 --- a/path/parser/parser_test.go +++ /dev/null @@ -1,1670 +0,0 @@ -package parser - -import ( - "bytes" - "fmt" - "io" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/ast" -) - -func mkAST(t *testing.T, lax, pred bool, node ast.Node) *ast.AST { - t.Helper() - ast, err := ast.New(lax, pred, node) - require.NoError(t, err) - return ast -} - -func TestParser(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - path string - ast *ast.AST - err string - }{ - { - test: "root", - path: "$", - ast: mkAST(t, true, false, ast.LinkNodes([]ast.Node{ast.NewConst(ast.ConstRoot)})), - }, - { - test: "strict_root", - path: "strict $", - ast: mkAST(t, false, false, ast.LinkNodes([]ast.Node{ast.NewConst(ast.ConstRoot)})), - }, - { - test: "predicate", - path: "$ == 1", - ast: mkAST(t, true, true, ast.NewBinary(ast.BinaryEqual, ast.NewConst(ast.ConstRoot), ast.NewInteger("1"))), - }, - { - test: "error", - path: "$()", - err: "parser: syntax error at 1:3", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - ast, err := Parse(tc.path) - if tc.err == "" { - r.NoError(err) - a.Equal(tc.ast, ast) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrParse) - a.Nil(ast) - } - }) - } -} - -type testCase struct { - test string - path string - exp string - err string -} - -func (tc testCase) run(t *testing.T) { - t.Parallel() - ast, err := Parse(tc.path) - if tc.err == "" { - require.NoError(t, err) - assert.Equal(t, tc.exp, ast.String()) - } else { - require.EqualError(t, err, tc.err) - require.ErrorIs(t, err, ErrParse) - assert.Nil(t, ast) - } -} - -func TestJSONPathString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L3-L30 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "empty", - err: `parser: syntax error at 1:1`, - }, - { - test: "root", - path: "$", - exp: "$", - }, - { - test: "strict", - path: "strict $", - exp: "strict $", - }, - { - test: "lax", - path: "lax $", - exp: "$", - }, - { - test: "a", - path: "$.a", - exp: `$."a"`, - }, - { - test: "a_v", - path: "$.a.v", - exp: `$."a"."v"`, - }, - { - test: "a_star", - path: "$.a.*", - exp: `$."a".*`, - }, - { - test: "star_any_array", - path: "$.*[*]", - exp: "$.*[*]", - }, - { - test: "a_any_array", - path: "$.a[*]", - exp: `$."a"[*]`, - }, - { - test: "a_any_array_x2", - path: "$.a[*][*]", - exp: `$."a"[*][*]`, - }, - { - test: "root_any_array", - path: "$[*]", - exp: "$[*]", - }, - { - test: "root_array_index", - path: "$[0]", - exp: "$[0]", - }, - { - test: "root_any_array_index", - path: "$[*][0]", - exp: "$[*][0]", - }, - { - test: "any_array_a", - path: "$[*].a", - exp: `$[*]."a"`, - }, - { - test: "any_array_index_a_b", - path: "$[*][0].a.b", - exp: `$[*][0]."a"."b"`, - }, - { - test: "a_any_b", - path: "$.a.**.b", - exp: `$."a".**."b"`, - }, - { - test: "a_any2_b", - path: "$.a.**{2}.b", - exp: `$."a".**{2}."b"`, - }, - { - test: "a_any2_2_b", - path: "$.a.**{2 to 2}.b", - exp: `$."a".**{2}."b"`, - }, - { - test: "a_any2_5_b", - path: "$.a.**{2 to 5}.b", - exp: `$."a".**{2 to 5}."b"`, - }, - { - test: "a_any0_5_b", - path: "$.a.**{0 to 5}.b", - exp: `$."a".**{0 to 5}."b"`, - }, - { - test: "a_any5_last_b", - path: "$.a.**{5 to last}.b", - exp: `$."a".**{5 to last}."b"`, - }, - { - test: "a_any_last_b", - path: "$.a.**{last}.b", - exp: `$."a".**{last}."b"`, - }, - { - test: "a_any_last_5_b", - path: "$.a.**{last to 5}.b", - exp: `$."a".**{last to 5}."b"`, - }, - { - test: "plus_one", - path: "$+1", - exp: "($ + 1)", - }, - { - test: "minus_one", - path: "$-1", - exp: "($ - 1)", - }, - { - test: "minus_plus_one", - path: "$--+1", - exp: "($ - -1)", - }, - { - test: "a_div_plus_minus_one", - path: "$.a/+-1", - exp: `($."a" / -1)`, - }, - { - test: "math", - path: "1 * 2 + 4 % -3 != false", - exp: "(1 * 2 + 4 % -3 != false)", - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathEscapesString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L32-L35 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "js_escapes", - path: `"\b\f\r\n\t\v\"\'\\"`, - exp: `"\b\f\r\n\t\v\"'\\"`, - }, - { - test: "hex_and_unicode_escapes", - path: `"\x50\u0067\u{53}\u{051}\u{00004C}"`, - exp: `"PgSQL"`, - }, - { - test: "more_unicode", - path: `$.foo\x50\u0067\u{53}\u{051}\u{00004C}\t\"bar`, - exp: `$."fooPgSQL\t\"bar"`, - }, - { - test: "literal", - path: `"\z"`, // unrecognized escape is just the literal char - exp: `"z"`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathFilterString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L37-L50 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "g_a_1", - path: `$.g ? ($.a == 1)`, - exp: `$."g"?($."a" == 1)`, - }, - { - test: "g_current_1", - path: `$.g ? (@ == 1)`, - exp: `$."g"?(@ == 1)`, - }, - { - test: "g_a_current_1", - path: `$.g ? (@.a == 1)`, - exp: `$."g"?(@."a" == 1)`, - }, - { - test: "g_a_or_current", - path: `$.g ? (@.a == 1 || @.a == 4)`, - exp: `$."g"?(@."a" == 1 || @."a" == 4)`, - }, - { - test: "g_a_or_current_4", - path: `$.g ? (@.a == 1 && @.a == 4)`, - exp: `$."g"?(@."a" == 1 && @."a" == 4)`, - }, - { - test: "g_a_4_7", - path: `$.g ? (@.a == 1 || @.a == 4 && @.b == 7)`, - exp: `$."g"?(@."a" == 1 || @."a" == 4 && @."b" == 7)`, - }, - { - test: "g_a_4_b_7", - path: `$.g ? (@.a == 1 || !(@.a == 4) && @.b == 7)`, - exp: `$."g"?(@."a" == 1 || !(@."a" == 4) && @."b" == 7)`, - }, - { - test: "g_a_x_a_b", - path: `$.g ? (@.a == 1 || !(@.x >= 123 || @.a == 4) && @.b == 7)`, - exp: `$."g"?(@."a" == 1 || !(@."x" >= 123 || @."a" == 4) && @."b" == 7)`, - }, - { - test: "g_a_gt_abc", - path: `$.g ? (@.x >= @[*]?(@.a > "abc"))`, - exp: `$."g"?(@."x" >= @[*]?(@."a" > "abc"))`, - }, - { - test: "g_x_a_is_unknown", - path: `$.g ? ((@.x >= 123 || @.a == 4) is unknown)`, - exp: `$."g"?((@."x" >= 123 || @."a" == 4) is unknown)`, - }, - { - test: "g_exists_x", - path: `$.g ? (exists (@.x))`, - exp: `$."g"?(exists (@."x"))`, - }, - { - test: "g_exists_x_or_14", - path: `$.g ? (exists (@.x ? (@ == 14)))`, - exp: `$."g"?(exists (@."x"?(@ == 14)))`, - }, - { - test: "g_x_124_or_exists", - path: `$.g ? ((@.x >= 123 || @.a == 4) && exists (@.x ? (@ == 14)))`, - exp: `$."g"?((@."x" >= 123 || @."a" == 4) && exists (@."x"?(@ == 14)))`, - }, - { - test: "g_x_gt_a", - path: `$.g ? (+@.x >= +-(+@.a + 2))`, - exp: `$."g"?(+@."x" >= +(-(+@."a" + 2)))`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathArrayStuffString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L52-L64 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "a", - path: `$a`, - exp: `$"a"`, - }, - { - test: "a_b", - path: `$a.b`, - exp: `$"a"."b"`, - }, - { - test: "a_array", - path: `$a[*]`, - exp: `$"a"[*]`, - }, - { - test: "g_filter", - path: `$.g ? (@.zip == $zip)`, - exp: `$."g"?(@."zip" == $"zip")`, - }, - { - test: "a_array_multi", - path: `$.a[1,2, 3 to 16]`, - exp: `$."a"[1,2,3 to 16]`, - }, - { - test: "a_array_math", - path: `$.a[$a + 1, ($b[*]) to -($[0] * 2)]`, - exp: `$."a"[$"a" + 1,$"b"[*] to -($[0] * 2)]`, - }, - { - test: "a_array_method", - path: `$.a[$.a.size() - 3]`, - exp: `$."a"[$."a".size() - 3]`, - }, - { - test: "last", - path: `last`, - err: "parser: LAST is allowed only in array subscripts", - }, - { - test: "last_string", - path: `"last"`, - exp: `"last"`, - }, - { - test: "last_ident", - path: `$.last`, - exp: `$."last"`, - }, - { - test: "last_operand", - path: `$ ? (last > 0)`, - err: "parser: LAST is allowed only in array subscripts", - }, - { - test: "array_last", - path: `$[last]`, - exp: `$[last]`, - }, - { - test: "filter_array_last", - path: `$[$[0] ? (last > 0)]`, - exp: `$[$[0]?(last > 0)]`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathMethodString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L66-L88 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "null_type", - path: `null.type()`, - exp: `null.type()`, - }, - { - test: "one_type", - path: `1.type()`, - err: `parser: trailing junk after numeric literal at 1:3`, - }, - { - test: "parentheses_one_type", - path: `(1).type()`, - exp: `(1).type()`, - }, - { - test: "numeric_type", - path: `1.2.type()`, - exp: `(1.2).type()`, - }, - { - test: "string_type", - path: `"aaa".type()`, - exp: `"aaa".type()`, - }, - { - test: "bool_typ", - path: `true.type()`, - exp: `true.type()`, - }, - { - test: "four_meths", - path: `$.double().floor().ceiling().abs()`, - exp: `$.double().floor().ceiling().abs()`, - }, - { - test: "keyvalue_key", - path: `$.keyvalue().key`, - exp: `$.keyvalue()."key"`, - }, - { - test: "datetime", - path: `$.datetime()`, - exp: `$.datetime()`, - }, - { - test: "datetime_template", - path: `$.datetime("datetime template")`, - exp: `$.datetime("datetime template")`, - }, - { - test: "four_numeric_meths", - path: `$.bigint().integer().number().decimal()`, - exp: `$.bigint().integer().number().decimal()`, - }, - { - test: "boolean", - path: `$.boolean()`, - exp: `$.boolean()`, - }, - { - test: "date", - path: `$.date()`, - exp: `$.date()`, - }, - { - test: "decimal", - path: `$.decimal(4,2)`, - exp: `$.decimal(4,2)`, - }, - { - test: "string", - path: `$.string()`, - exp: `$.string()`, - }, - { - test: "time", - path: `$.time()`, - exp: `$.time()`, - }, - { - test: "time_arg", - path: `$.time(6)`, - exp: `$.time(6)`, - }, - { - test: "time_tz", - path: `$.time_tz()`, - exp: `$.time_tz()`, - }, - { - test: "time_tz_arg", - path: `$.time_tz(4)`, - exp: `$.time_tz(4)`, - }, - { - test: "timestamp", - path: `$.timestamp()`, - exp: `$.timestamp()`, - }, - { - test: "timestamp_arg", - path: `$.timestamp(2)`, - exp: `$.timestamp(2)`, - }, - { - test: "timestamp_tz", - path: `$.timestamp_tz()`, - exp: `$.timestamp_tz()`, - }, - { - test: "timestamp_tz_arg", - path: `$.timestamp_tz(0)`, - exp: `$.timestamp_tz(0)`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathDecimal(t *testing.T) { - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "decimal", - path: `$.decimal()`, - exp: `$.decimal()`, - }, - { - test: "decimal_p", - path: `$.decimal(4)`, - exp: `$.decimal(4)`, - }, - { - test: "decimal_plus_p", - path: `$.decimal(+4)`, - exp: `$.decimal(4)`, - }, - { - test: "decimal_minus_p", - path: `$.decimal(-4)`, - exp: `$.decimal(-4)`, - }, - { - test: "decimal_p_s", - path: `$.decimal(4,2)`, - exp: `$.decimal(4,2)`, - }, - { - test: "decimal_p_s_err", - path: `$.decimal(4,2,1)`, - err: "parser: invalid input syntax: .decimal() can only have an optional precision[,scale] at 1:17", - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathStartsWithString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L90-L91 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "starts_with_string", - path: `$ ? (@ starts with "abc")`, - exp: `$?(@ starts with "abc")`, - }, - { - test: "starts_with_variable", - path: `$ ? (@ starts with $var)`, - exp: `$?(@ starts with $"var")`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathRegexString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L93-L103 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "invalid_pattern", - path: `$ ? (@ like_regex "(invalid pattern")`, - err: "parser: error parsing regexp: missing closing ): `(invalid pattern` at 1:38", - }, - { - test: "valid_pattern", - path: `$ ? (@ like_regex "pattern")`, - exp: `$?(@ like_regex "pattern")`, - }, - { - test: "empty_flag", - path: `$ ? (@ like_regex "pattern" flag "")`, - exp: `$?(@ like_regex "pattern")`, - }, - { - test: "flag_i", - path: `$ ? (@ like_regex "pattern" flag "i")`, - exp: `$?(@ like_regex "pattern" flag "i")`, - }, - { - test: "flag_is", - path: `$ ? (@ like_regex "pattern" flag "is")`, - exp: `$?(@ like_regex "pattern" flag "is")`, - }, - { - test: "flag_isim", - path: `$ ? (@ like_regex "pattern" flag "isim")`, - exp: `$?(@ like_regex "pattern" flag "ism")`, - }, - { - test: "flag_xsms", - path: `$ ? (@ like_regex "pattern" flag "xsms")`, - err: `parser: XQuery "x" flag (expanded regular expressions) is not implemented at 1:40`, - }, - { - test: "flag_q", - path: `$ ? (@ like_regex "pattern" flag "q")`, - exp: `$?(@ like_regex "pattern" flag "q")`, - }, - { - test: "flag_iq", - path: `$ ? (@ like_regex "pattern" flag "iq")`, - exp: `$?(@ like_regex "pattern" flag "iq")`, - }, - { - test: "flag_smixq", - path: `$ ? (@ like_regex "pattern" flag "smixq")`, - exp: `$?(@ like_regex "pattern" flag "ismxq")`, - }, - { - test: "flag_a", - path: `$ ? (@ like_regex "pattern" flag "a")`, - err: `parser: Unrecognized flag character "a" in LIKE_REGEX predicate at 1:37`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathMathsString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L105-107 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "lt", - path: `$ < 1`, - exp: `($ < 1)`, - }, - { - test: "lt_or_le", - path: `($ < 1) || $.a.b <= $x`, - exp: `($ < 1 || $."a"."b" <= $"x")`, - }, - { - test: "plus", - path: `@ + 1`, - err: `parser: @ is not allowed in root expressions`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathNumericString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L37-L50 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "root_a_b", - path: `($).a.b`, - exp: `$."a"."b"`, - }, - { - test: "root_a_b_c_d", - path: `($.a.b).c.d`, - exp: `$."a"."b"."c"."d"`, - }, - { - test: "ab_xy_cd", - path: `($.a.b + -$.x.y).c.d`, - exp: `($."a"."b" + -$."x"."y")."c"."d"`, - }, - { - test: "ab_cd", - path: `(-+$.a.b).c.d`, - exp: `(-(+$."a"."b"))."c"."d"`, - }, - { - test: "1_ab_plus_cd", - path: `1 + ($.a.b + 2).c.d`, - exp: `(1 + ($."a"."b" + 2)."c"."d")`, - }, - { - test: "1_ab_gt_cd", - path: `1 + ($.a.b > 2).c.d`, - exp: `(1 + ($."a"."b" > 2)."c"."d")`, - }, - { - test: "parentheses_root", - path: `($)`, - exp: `$`, - }, - { - test: "2parentheses_root", - path: `(($))`, - exp: `$`, - }, - { - test: "extreme_parentheses", - path: `((($ + 1)).a + ((2)).b ? ((((@ > 1)) || (exists(@.c)))))`, - exp: `(($ + 1)."a" + (2)."b"?(@ > 1 || exists (@."c")))`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathCompareNumbersString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L37-L50 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "a_lt_1", - path: `$ ? (@.a < 1)`, - exp: `$?(@."a" < 1)`, - }, - { - test: "a_lt_neg_1", - path: `$ ? (@.a < -1)`, - exp: `$?(@."a" < -1)`, - }, - { - test: "a_lt_pos_1", - path: `$ ? (@.a < +1)`, - exp: `$?(@."a" < 1)`, - }, - { - test: "a_lt_dot_1", - path: `$ ? (@.a < .1)`, - exp: `$?(@."a" < 0.1)`, - }, - { - test: "a_lt_neg_dot_1", - path: `$ ? (@.a < -.1)`, - exp: `$?(@."a" < -0.1)`, - }, - { - test: "a_lt_pos_dot_1", - path: `$ ? (@.a < +.1)`, - exp: `$?(@."a" < 0.1)`, - }, - { - test: "a_lt_0_dot_1", - path: `$ ? (@.a < 0.1)`, - exp: `$?(@."a" < 0.1)`, - }, - { - test: "a_lt_neg_0_dot_1", - path: `$ ? (@.a < -0.1)`, - exp: `$?(@."a" < -0.1)`, - }, - { - test: "a_lt_pos_0_dot_1", - path: `$ ? (@.a < +0.1)`, - exp: `$?(@."a" < 0.1)`, - }, - { - test: "a_lt_10_dot_1", - path: `$ ? (@.a < 10.1)`, - exp: `$?(@."a" < 10.1)`, - }, - { - test: "a_lt_neg_10_dot_1", - path: `$ ? (@.a < -10.1)`, - exp: `$?(@."a" < -10.1)`, - }, - { - test: "a_lt_pos_10_dot_1", - path: `$ ? (@.a < +10.1)`, - exp: `$?(@."a" < 10.1)`, - }, - { - test: "a_lt_expo", - path: `$ ? (@.a < 1e1)`, - exp: `$?(@."a" < 10)`, - }, - { - test: "a_lt_neg_expo", - path: `$ ? (@.a < -1e1)`, - exp: `$?(@."a" < -10)`, - }, - { - test: "a_lt_pos_expo", - path: `$ ? (@.a < +1e1)`, - exp: `$?(@."a" < 10)`, - }, - { - test: "a_lt_dot_expo", - path: `$ ? (@.a < .1e1)`, - exp: `$?(@."a" < 1)`, - }, - { - test: "a_lt_neg_dot_expo", - path: `$ ? (@.a < -.1e1)`, - exp: `$?(@."a" < -1)`, - }, - { - test: "a_lt_pos_dot_expo", - path: `$ ? (@.a < +.1e1)`, - exp: `$?(@."a" < 1)`, - }, - { - test: "a_lt_0_dot_expo", - path: `$ ? (@.a < 0.1e1)`, - exp: `$?(@."a" < 1)`, - }, - { - test: "a_lt_neg_0_dot_expo", - path: `$ ? (@.a < -0.1e1)`, - exp: `$?(@."a" < -1)`, - }, - { - test: "a_lt_0_pos_expo", - path: `$ ? (@.a < +0.1e1)`, - exp: `$?(@."a" < 1)`, - }, - { - test: "a_lt_10_dot_expo", - path: `$ ? (@.a < 10.1e1)`, - exp: `$?(@."a" < 101)`, - }, - { - test: "a_lt_neg_10_dot_expo", - path: `$ ? (@.a < -10.1e1)`, - exp: `$?(@."a" < -101)`, - }, - { - test: "a_lt_pos_10_dot_expo", - path: `$ ? (@.a < +10.1e1)`, - exp: `$?(@."a" < 101)`, - }, - { - test: "a_lt_1_neg_expo", - path: `$ ? (@.a < 1e-1)`, - exp: `$?(@."a" < 0.1)`, - }, - { - test: "a_lt_neg_1_neg_expo", - path: `$ ? (@.a < -1e-1)`, - exp: `$?(@."a" < -0.1)`, - }, - { - test: "a_lt_pos_1_neg_expo", - path: `$ ? (@.a < +1e-1)`, - exp: `$?(@."a" < 0.1)`, - }, - { - test: "a_lt_dot_1_expo", - path: `$ ? (@.a < .1e-1)`, - exp: `$?(@."a" < 0.01)`, - }, - { - test: "a_lt_neg_dot_1_expo", - path: `$ ? (@.a < -.1e-1)`, - exp: `$?(@."a" < -0.01)`, - }, - { - test: "a_lt_pos_dot_1_expo", - path: `$ ? (@.a < +.1e-1)`, - exp: `$?(@."a" < 0.01)`, - }, - { - test: "a_lt_0_dot_1_neg_expo", - path: `$ ? (@.a < 0.1e-1)`, - exp: `$?(@."a" < 0.01)`, - }, - { - test: "a_lt_neg_0_dot_1_neg_expo", - path: `$ ? (@.a < -0.1e-1)`, - exp: `$?(@."a" < -0.01)`, - }, - { - test: "a_lt_pos_0_dot_1_neg_expo", - path: `$ ? (@.a < +0.1e-1)`, - exp: `$?(@."a" < 0.01)`, - }, - { - test: "a_lt_10_dot_1_neg_expo", - path: `$ ? (@.a < 10.1e-1)`, - exp: `$?(@."a" < 1.01)`, - }, - { - test: "a_lt_neg_10_dot_1_neg_expo", - path: `$ ? (@.a < -10.1e-1)`, - exp: `$?(@."a" < -1.01)`, - }, - { - test: "a_lt_pos_10_dot_1_neg_expo", - path: `$ ? (@.a < +10.1e-1)`, - exp: `$?(@."a" < 1.01)`, - }, - { - test: "a_lt_1_pos_expo", - path: `$ ? (@.a < 1e+1)`, - exp: `$?(@."a" < 10)`, - }, - { - test: "a_lt_neg_1_pos_expo", - path: `$ ? (@.a < -1e+1)`, - exp: `$?(@."a" < -10)`, - }, - { - test: "a_lt_pos_1_pos_expo", - path: `$ ? (@.a < +1e+1)`, - exp: `$?(@."a" < 10)`, - }, - { - test: "a_lt_dot_1_pos_expo", - path: `$ ? (@.a < .1e+1)`, - exp: `$?(@."a" < 1)`, - }, - { - test: "a_lt_neg_dot_1_pos_expo", - path: `$ ? (@.a < -.1e+1)`, - exp: `$?(@."a" < -1)`, - }, - { - test: "a_lt_pos_dot_1_pos_expo", - path: `$ ? (@.a < +.1e+1)`, - exp: `$?(@."a" < 1)`, - }, - { - test: "a_lt_0_dot_1_pos_expo", - path: `$ ? (@.a < 0.1e+1)`, - exp: `$?(@."a" < 1)`, - }, - { - test: "a_lt_neg_0_dot_1_pos_expo", - path: `$ ? (@.a < -0.1e+1)`, - exp: `$?(@."a" < -1)`, - }, - { - test: "a_lt_pos_0_dot_1_pos_expo", - path: `$ ? (@.a < +0.1e+1)`, - exp: `$?(@."a" < 1)`, - }, - { - test: "a_lt_10_dot_1_pos_expo", - path: `$ ? (@.a < 10.1e+1)`, - exp: `$?(@."a" < 101)`, - }, - { - test: "a_lt_neg_10_dot_1_pos_expo", - path: `$ ? (@.a < -10.1e+1)`, - exp: `$?(@."a" < -101)`, - }, - { - test: "a_lt_pos_10_dot_1_pos_expo", - path: `$ ? (@.a < +10.1e+1)`, - exp: `$?(@."a" < 101)`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathNumericLiteralsString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L170-205 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "zero", - path: `0`, - exp: `0`, - }, - { - test: "zero_zero", - path: `00`, - err: `parser: trailing junk after numeric literal at 1:2`, - }, - { - test: "leading_zero", - path: `0755`, - err: `parser: trailing junk after numeric literal at 1:2`, - }, - { - test: "zero_dot_zero", - path: `0.0`, - exp: `0`, // postgres: 0.00 - }, - { - test: "zero_dot_000", - path: `0.000`, - exp: `0`, // postgres: 0.00 - }, - { - test: "float_expo_1", - path: `0.000e1`, - exp: `0`, // postgres: 0.00 - }, - { - test: "float_expo_2", - path: `0.000e2`, - exp: `0`, // postgres: 0.00 - }, - { - test: "float_expo_3", - path: `0.000e3`, - exp: `0`, - }, - { - test: "0_dot_0010", - path: `0.0010`, - exp: `0.001`, // postgres: 0.0010 - }, - { - test: "float_neg_expo_1", - path: `0.0010e-1`, - exp: `0.0001`, // postgres: 0.00010 - }, - { - test: "float_pos_expo_1", - path: `0.0010e+1`, - exp: `0.01`, // postgres: 0.010 - }, - { - test: "float_pos_expo_2", - path: `0.0010e+2`, - exp: `0.1`, // postgres: 0.10 - }, - { - test: "dot_001", - path: `.001`, - exp: `0.001`, - }, - { - test: "dot_001e1", - path: `.001e1`, - exp: `0.01`, - }, - { - test: "one_dot", - path: `1.`, - exp: `1`, - }, - { - test: "done_dot_expo_1", - path: `1.e1`, - exp: `10`, - }, - { - test: "1a", - path: `1a`, - err: `parser: trailing junk after numeric literal at 1:2`, - }, - { - test: "1e", - path: `1e`, - err: `parser: exponent has no digits at 1:3`, - }, - { - test: "1_dot_e", - path: `1.e`, - err: `parser: exponent has no digits at 1:4`, - }, - { - test: "1_dot_2a", - path: `1.2a`, - err: `parser: trailing junk after numeric literal at 1:4`, - }, - { - test: "one_dot_2e", - path: `1.2e`, - err: `parser: exponent has no digits at 1:5`, - }, - { - test: "one_dot_2_dot_e", - path: `1.2.e`, - exp: `(1.2)."e"`, - }, - { - test: "parens_one_dot_two_then_e", - path: `(1.2).e`, - exp: `(1.2)."e"`, - }, - { - test: "1e3", - path: `1e3`, - exp: `1000`, - }, - { - test: "1_dot_e3", - path: `1.e3`, - exp: `1000`, - }, - { - test: "1_dot_e3_dot_e", - path: `1.e3.e`, - exp: `(1000)."e"`, - }, - { - test: "1_dot_e3_dot_e4", - path: `1.e3.e4`, - exp: `(1000)."e4"`, - }, - { - test: "1_dot_2e3", - path: `1.2e3`, - exp: `1200`, - }, - { - test: "1_dot_2e3a", - path: `1.2e3a`, - err: `parser: trailing junk after numeric literal at 1:6`, - }, - { - test: "1_dot_2_dot_e3", - path: `1.2.e3`, - exp: `(1.2)."e3"`, - }, - { - test: "parens_1_dot_2_then_dot_e3", - path: `(1.2).e3`, - exp: `(1.2)."e3"`, - }, - { - test: "1_2dot_3", - path: `1..e`, - exp: `(1)."e"`, - }, - { - test: "1_2dot_e3", - path: `1..e3`, - exp: `(1)."e3"`, - }, - { - test: "parens_1_dot_then_dot_3", - path: `(1.).e`, - exp: `(1)."e"`, - }, - { - test: "parens_1_dot_then_dot_e3", - path: `(1.).e3`, - exp: `(1)."e3"`, - }, - { - test: "1_filter_2_gt_3", - path: `1?(2>3)`, - exp: `(1)?(2 > 3)`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathNonDecimalString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L207-L223 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "binary", - path: `0b100101`, - exp: `37`, - }, - { - test: "octal", - path: `0o273`, - exp: `187`, - }, - { - test: "hex", - path: `0x42F`, - exp: `1071`, - }, - // error cases - { - test: "empty_binary", - path: `0b`, - err: `parser: trailing junk after numeric literal at 1:3`, - }, - { - test: "1b", - path: `1b`, - err: `parser: trailing junk after numeric literal at 1:2`, - }, - { - test: "0b0x", - path: `0b0x`, - err: `parser: trailing junk after numeric literal at 1:4`, - }, - - { - test: "empty_octal", - path: `0o`, - err: `parser: trailing junk after numeric literal at 1:3`, - }, - { - test: "1o", - path: `1o`, - err: `parser: trailing junk after numeric literal at 1:2`, - }, - { - test: "0o0x", - path: `0o0x`, - err: `parser: trailing junk after numeric literal at 1:4`, - }, - - { - test: "empty_hex", - path: `0x`, - err: `parser: trailing junk after numeric literal at 1:3`, - }, - { - test: "1x", - path: `1x`, - err: `parser: trailing junk after numeric literal at 1:2`, - }, - { - test: "0x0y", - path: `0x0y`, - err: `parser: trailing junk after numeric literal at 1:4`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathUnderscoreNumberString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath.sql#L225-L251 - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - { - test: "1_000_000", - path: `1_000_000`, - exp: `1000000`, - }, - { - test: "1_2_3", - path: `1_2_3`, - exp: `123`, - }, - { - test: "0x1EEE_FFFF", - path: `0x1EEE_FFFF`, - exp: `518979583`, - }, - { - test: "0o2_73", - path: `0o2_73`, - exp: `187`, - }, - { - test: "0b10_0101", - path: `0b10_0101`, - exp: `37`, - }, - - { - test: "1_000_dot_000_005", - path: `1_000.000_005`, - exp: `1000.000005`, - }, - { - test: "1_000_dot", - path: `1_000.`, - exp: `1000`, - }, - { - test: "dot_000_005", - path: `.000_005`, - exp: `0.000005`, - }, - { - test: "1_000_dot_5e0_1", - path: `1_000.5e0_1`, - exp: `10005`, - }, - // error cases - { - test: "_100", - path: `_100`, - err: `parser: syntax error at 1:5`, - }, - { - test: "100_", - path: `100_`, - err: `parser: '_' must separate successive digits at 1:5`, - }, - { - test: "100__000", - path: `100__000`, - err: `parser: '_' must separate successive digits at 1:9`, - }, - - { - test: "_1_000dot5", - path: `_1_000.5`, - err: `parser: syntax error at 1:7`, - }, - { - test: "1_000_dot_5", - path: `1_000_.5`, - err: `parser: '_' must separate successive digits at 1:9`, - }, - { - test: "1_000dot__5", - path: `1_000._5`, - err: `parser: '_' must separate successive digits at 1:9`, - }, - { - test: "1_000dot5_", - path: `1_000.5_`, - err: `parser: '_' must separate successive digits at 1:9`, - }, - { - test: "1_000dot5e_1", - path: `1_000.5e_1`, - err: `parser: '_' must separate successive digits at 1:11`, - }, - - // underscore after prefix not allowed in JavaScript (but allowed in SQL) - { - test: "0b_10_0101", - path: `0b_10_0101`, - err: `parser: underscore disallowed at start of numeric literal at 1:3`, - }, - { - test: "0o_273", - path: `0o_273`, - err: `parser: underscore disallowed at start of numeric literal at 1:3`, - }, - { - test: "0x_42F", - path: `0x_42F`, - err: `parser: underscore disallowed at start of numeric literal at 1:3`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestJSONPathEncodingString(t *testing.T) { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/test/regress/sql/jsonpath_encoding.sql - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - // checks for double-quoted values - // basic unicode input - { - test: "empty_unicode", - path: `"\u"`, // ERROR, incomplete escape - err: `parser: invalid Unicode escape sequence at 1:4`, - }, - { - test: "unicode_00", - path: `"\u00"`, // ERROR, incomplete escape - err: `parser: invalid Unicode escape sequence at 1:6`, - }, - { - test: "unicode_invalid_hex", - path: `"\u000g"`, // ERROR, g is not a hex digit - err: `parser: invalid Unicode escape sequence at 1:7`, - }, - { - test: "unicode_0000", - path: `"\u0000"`, // OK, legal escape [but Postgres doesn't support null bytes in strings] - err: `parser: \u0000 cannot be converted to text at 1:7`, - }, - { - test: "unicode_aBcD", - path: `"\uaBcD"`, // OK, uppercase and lower case both OK - exp: `"ꯍ"`, - }, - - // handling of unicode surrogate pairs - { - test: "smiley_dog", - path: `"\ud83d\ude04\ud83d\udc36"`, // correct in utf8 - exp: `"πŸ˜„πŸΆ"`, - }, - { - test: "two_highs", - path: `"\ud83d\ud83d"`, // 2 high surrogates in a row - err: `parser: Unicode low surrogate must follow a high surrogate at 1:13`, - }, - { - test: "wrong_order", - path: `"\ude04\ud83d"`, // surrogates in wrong order - err: `parser: Unicode low surrogate must follow a high surrogate at 1:13`, - }, - { - test: "orphan_high", - path: `"\ud83dX"`, // orphan high surrogate - err: `parser: Unicode low surrogate must follow a high surrogate at 1:8`, - }, - { - test: "orphan_low", - path: `"\ude04X"`, // orphan low surrogate - err: `parser: Unicode low surrogate must follow a high surrogate at 1:8`, - }, - - // handling of simple unicode escapes - { - test: "copyright_sign", - path: `"the Copyright \u00a9 sign"`, // correct in utf8 - exp: `"the Copyright Β© sign"`, - }, - { - test: "dollar_character", - path: `"dollar \u0024 character"`, // correct everywhere - exp: `"dollar $ character"`, - }, - { - test: "not_escape", - path: `"dollar \\u0024 character"`, // not an escape - exp: `"dollar \\u0024 character"`, - }, - { - test: "unescaped_null", - path: `"null \u0000 escape"`, // not escaped - err: `parser: \u0000 cannot be converted to text at 1:12`, - }, - { - test: "escaped_null", - path: `"null \\u0000 escape"`, // not an escape - exp: `"null \\u0000 escape"`, - }, - - // checks for quoted key names - // basic unicode input - { - test: "incomplete_escape", - path: `$."\u"`, // ERROR, incomplete escape - err: `parser: invalid Unicode escape sequence at 1:6`, - }, - { - test: "incomplete_escape_null", - path: `$."\u00"`, // ERROR, incomplete escape - err: `parser: invalid Unicode escape sequence at 1:8`, - }, - { - test: "invalid_hex_digit", - path: `$."\u000g"`, // ERROR, g is not a hex digit - err: `parser: invalid Unicode escape sequence at 1:9`, - }, - { - test: "null_byte_in_string", - path: `$."\u0000"`, // OK, legal escape [but Postgres doesn't support null bytes in strings] - err: `parser: \u0000 cannot be converted to text at 1:9`, - }, - { - test: "mixed_case_ok", - path: `$."\uaBcD"`, // OK, uppercase and lower case both OK - exp: `$."ꯍ"`, - }, - - // handling of unicode surrogate pairs - { - test: "smiley_dog_key", - path: `$."\ud83d\ude04\ud83d\udc36"`, // correct in utf8 - exp: `$."πŸ˜„πŸΆ"`, - }, - { - test: "two_highs_key", - path: `$."\ud83d\ud83d"`, // 2 high surrogates in a row - err: `parser: Unicode low surrogate must follow a high surrogate at 1:15`, - }, - { - test: "wrong_order_key", - path: `$."\ude04\ud83d"`, // surrogates in wrong order - err: `parser: Unicode low surrogate must follow a high surrogate at 1:15`, - }, - { - test: "orphan_high_key", - path: `$."\ud83dX"`, // orphan high surrogate - err: `parser: Unicode low surrogate must follow a high surrogate at 1:10`, - }, - { - test: "orphan_low_key", - path: `$."\ude04X"`, // orphan low surrogate - err: `parser: Unicode low surrogate must follow a high surrogate at 1:10`, - }, - - // handling of simple unicode escapes - { - test: "copyright_sign_key", - path: `$."the Copyright \u00a9 sign"`, // correct in utf8 - exp: `$."the Copyright Β© sign"`, - }, - { - test: "dollar_sign_key", - path: `$."dollar \u0024 character"`, // correct everywhere - exp: `$."dollar $ character"`, - }, - { - test: "not_escape_key", - path: `$."dollar \\u0024 character"`, // not an escape - exp: `$."dollar \\u0024 character"`, - }, - { - test: "unescaped_null_key", - path: `$."null \u0000 escape"`, // not unescaped - err: `parser: \u0000 cannot be converted to text at 1:14`, - }, - { - test: "escaped_null_key", - path: `$."null \\u0000 escape"`, // not an escape - exp: `$."null \\u0000 escape"`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestNumericEdgeCases(t *testing.T) { - t.Parallel() - - //nolint:paralleltest - for _, tc := range []testCase{ - // https://www.postgresql.org/message-id/flat/2F757EB8-AEB9-49E8-A2C6-613E06BA05D4%40justatheory.com - { - test: "hex_then_path_key", - path: `0x2.p10`, - exp: `(2)."p10"`, - }, - { - test: "float_then_path_key", - path: `3.14.p10`, - exp: `(3.14)."p10"`, - }, - { - test: "whitespace_disambiguation", - path: `2 .p10`, - exp: `(2)."p10"`, - }, - { - test: "go_float_example_12", - path: "0x2.p10", - exp: `(2)."p10"`, - }, - { - test: "go_float_example_13", - path: "0x1.Fp+0", - exp: `((1)."Fp" + 0)`, - }, - { - test: "go_float_example_16", - path: "0x15e-2", - exp: "(350 - 2)", - }, - { - test: "go_float_example_19", - path: "0x1.5e-2", - err: "parser: syntax error at 1:9", - }, - { - test: "hex_dot_path_utf8", - path: `0x2."πŸ˜€"`, - exp: `(2)."πŸ˜€"`, - }, - } { - t.Run(tc.test, tc.run) - } -} - -func TestDebugOutput(t *testing.T) { - t.Parallel() - node, _ := Parse("$.x + 2") - buf := new(bytes.Buffer) - printNode(buf, node.Root(), 0, "") - assert.Equal(t, `BinaryNode( - $ - "x" - + - 2 -) -`, buf.String()) -} - -// Placeholder function to generate output to describe an AST. Move to ast -// package? -func printNode(w io.Writer, node ast.Node, depth int, prefix string) { - indent := strings.Repeat(" ", depth*2) - switch node := node.(type) { - case nil: - return - case *ast.ConstNode, *ast.MethodNode, *ast.StringNode, *ast.VariableNode, - *ast.KeyNode, *ast.NumericNode, *ast.IntegerNode, *ast.AnyNode: - fmt.Fprintf(w, "%v%v%v\n", indent, prefix, node.String()) - case *ast.BinaryNode: - fmt.Fprintf(w, "%v%vBinaryNode(\n", indent, prefix) - printNode(w, node.Left(), depth+1, "") - fmt.Fprintf(w, "%v %v\n", indent, node.Operator()) - printNode(w, node.Right(), depth+1, "") - fmt.Fprintf(w, "%v)\n", indent) - case *ast.UnaryNode: - fmt.Fprintf(w, "%v%vUnaryNode(\n%v%v\n", indent, prefix, indent, node.Operator()) - printNode(w, node.Operand(), depth+1, "") - fmt.Fprintf(w, "%v)\n", indent) - case *ast.RegexNode: - fmt.Fprintf(w, "%v%vRegexNode(\n", indent, prefix) - printNode(w, node.Operand(), depth+1, "") - fmt.Fprintf(w, "%v%v\n", indent, node.String()) - fmt.Fprintf(w, "%v)\n", indent) - case *ast.ArrayIndexNode: - fmt.Fprintf(w, "%v%vArrayIndexNode(\n", indent, prefix) - for _, n := range node.Subscripts() { - printNode(w, n, depth+1, "β€’ ") - } - fmt.Fprintf(w, "%v)\n", indent) - } - - if next := node.Next(); next != nil { - printNode(w, next, depth+1, "") - } -} diff --git a/path/path.go b/path/path.go deleted file mode 100644 index d91fee6..0000000 --- a/path/path.go +++ /dev/null @@ -1,328 +0,0 @@ -/* -Package path provides PostgreSQL 18-compatible SQL/JSON path parsing and -execution. It supports both SQL-standard path expressions and -PostgreSQL-specific predicate check expressions. See the README for a -description of the SQL/JSON Path language. - -# Postgres Equivalents - -List of the PostgreSQL jsonpath functions and their path Package equivalents: - - - @? Operator: Use [Path.Exists] with [exec.WithSilent] - - @@ Operator: Use [Path.Match] with [exec.Silent] - - jsonb_path_exists(): Use [Path.Exists] - - jsonb_path_match(): Use [Path.Match] - - jsonb_path_query() and jsonb_path_query_array(): Use [Path.Query] - - jsonb_path_query_first(): Use [Path.First] - - jsonb_path_exists_tz(): Use [Path.Exists] with [exec.WithTZ] - - jsonb_path_match_tz(): Use [Path.Match] with [exec.WithTZ] - - jsonb_path_query_tz() and jsonb_path_query_array_tz(): Use [Path.Query] - with [exec.WithTZ] - - jsonb_path_query_first_tz(): Use [Path.First] with [exec.WithTZ] - -# Options - -The path query methods take an optional list of [exec.Option] arguments. - - - [exec.WithVars] provides named values to be substituted into the - path expression. See the WithVars example for a demonstration. - - - [exec.WithSilent] suppresses [exec.ErrVerbose] errors, including missing - object field or array element, unexpected JSON item type, and datetime - and numeric errors. This behavior might be helpful when searching JSON - entities of varying structure. See the WithSilent example for a - demonstration. - - - [exec.WithTZ] allows comparisons of date and time values that require - timezone-aware conversions. By default such conversions are made relative - to UTC, but can be made relative to another (user-preferred) time zone by - using [types.ContextWithTZ] to add it to the context passed to the query - method. See the WithTZ example for a demonstration, and [types] for more - comprehensive examples. - -# Two Types of Queries - -PostgreSQL supports two flavors of path expressions, and this package follows -suit: - - - SQL-standard path expressions hew to the SQL standard, which allows - Boolean predicates only in ?() filter expressions, and can return - any number of results. - - Boolean predicate check expressions are a PostgreSQL extension that allow - path expression to be a Boolean predicate, which can return only true, - false, and null. - -This duality can sometimes cause confusion, especially when using -[Path.Exists] and the Postgres @? operator, which only work with SQL standard -expressions, and [Path.Match] and the Postgres @@ operator, which only work -with predicate check expressions. - -The path package provides a couple of additional features to help navigate -this duality: - - - [Path.IsPredicate] returns true if a Path is a predicate check expression - - [Path.PgIndexOperator] returns a string representing the appropriate - Postgres operator to use when sending queries to the database: @? for - SQL-standard expressions and @@ for predicate check expressions. - - [Path.ExistsOrMatch] dispatches to the appropriate function, [Path.Exists] - or [Path.Match], depending on whether the path is a SQL standard or - predicate check expression. - -# Errors - -The path query methods return four types of errors: - - - [exec.ErrExecution]: Errors executing the query, such as array index out - of bounds and division by zero. - - [exec.ErrVerbose] Execution errors that can be suppressed by - [exec.WithSilent]. Wraps [exec.ErrExecution]. - - [exec.ErrInvalid]: Usage errors due to flaws in the implementation, - indicating a bug that needs fixing. Should be rare. - - [exec.NULL]: Special error value returned by [Path.Exists] and [Path.Match] - when the result is unknown. - -In addition, when [context.Context.Done] is closed in the context passed to a -query function, the query will cease operation and return an -[exec.ErrExecution] that wraps the [context.Canceled] and -[context.DeadlineExceeded] error returned from [context.Context.Err]. - -# Examples -*/ -package path - -import ( - "context" - "database/sql/driver" - "errors" - "fmt" - - "github.com/theory/sqljson/path/ast" - "github.com/theory/sqljson/path/exec" - "github.com/theory/sqljson/path/parser" - "github.com/theory/sqljson/path/types" -) - -// Path provides SQL/JSON Path operations. -type Path struct { - *ast.AST -} - -// This is only here so we can import types and the documentation links work -// properly. -var _ types.DateTime = (*types.Time)(nil) - -var ( - // ErrPath wraps parsing and execution errors. - ErrPath = errors.New("path") - - // ErrScan wraps scanning errors. - ErrScan = errors.New("scan") -) - -// Parse parses path and returns the resulting Path. Returns an error on parse -// failure. Returns an [ErrPath] error on parse failure (wraps -// [parser.ErrParse]). -func Parse(path string) (*Path, error) { - ast, err := parser.Parse(path) - if err != nil { - return nil, fmt.Errorf("%w: %w", ErrPath, err) - } - return &Path{ast}, nil -} - -// MustParse is like Parse but panics on parse failure. -func MustParse(path string) *Path { - ast, err := parser.Parse(path) - if err != nil { - panic(err) - } - return &Path{ast} -} - -// MustQuery is syntax sugar for -// MustParse(path).MustQuery(context.Background(), json). Provided mainly for -// use in documentation examples. -func MustQuery(path string, json any, opt ...exec.Option) []any { - return MustParse(path).MustQuery(context.Background(), json, opt...) -} - -// New creates and returns a new Path query defined by ast. Use [parser.Parse] -// to create ast. -func New(ast *ast.AST) *Path { - return &Path{ast} -} - -// String returns the normalized string representation of path. -func (path *Path) String() string { - return path.AST.String() -} - -// PgIndexOperator returns the indexable PostgreSQL operator used to compare a -// path to a JSON value. Returns "@?" for a SQL-standard paths and "@@" for a -// predicate check expressions. -func (path *Path) PgIndexOperator() string { - if path.IsPredicate() { - return "@@" - } - return "@?" -} - -// IsPredicate returns true if path represents a PostgreSQL-style "predicate -// check" expression, and false if it's a SQL-standard path. -func (path *Path) IsPredicate() bool { - return path.AST.IsPredicate() -} - -// Exists checks whether the path returns any item for json. (This is useful -// only with SQL-standard JSON path expressions (when [Path.IsPredicate] -// returns false), not predicate check expressions (when [Path.IsPredicate] -// returns true), which always return a value.) -// -// While the PostgreSQL jsonb_path_exists() function can return true, false, -// or null (thanks to SQL's [three-valued logic]), Exists cannot return NULL -// when the result is unknown. In such cases, Exists returns false and also -// the [exec.NULL] error value. It's a good idea to check for this error -// explicitly when the result is likely to be unknown. -// -// See the Options section for details on the optional [exec.WithVars], -// [exec.WithTZ], and [exec.WithSilent] options. -// -// [three-valued logic]: https://en.wikipedia.org/wiki/Three-valued_logic -func (path *Path) Exists(ctx context.Context, json any, opt ...exec.Option) (bool, error) { - //nolint:wrapcheck // Okay to return unwrapped error - return exec.Exists(ctx, path.AST, json, opt...) -} - -// Match returns the result of predicate check for json. (This is useful only -// with predicate check expressions, not SQL-standard JSON path expressions -// (when [Path.IsPredicate] returns false), since it will either fail or -// return nil if the path result is not a single boolean value.) -// -// While the PostgreSQL jsonb_path_match() function can return true, false, or -// null (thanks to SQL's [three-valued logic]), Match cannot return NULL when -// the result is unknown. In such cases, Match returns false and also the -// [exec.NULL] error value. It's a good idea to check for this error -// explicitly when the result is likely to be unknown. -// -// See the Options section for details on the optional [exec.WithVars], -// [exec.WithTZ], and [exec.WithSilent] options. -func (path *Path) Match(ctx context.Context, json any, opt ...exec.Option) (bool, error) { - //nolint:wrapcheck // Okay to return unwrapped error - return exec.Match(ctx, path.AST, json, opt...) -} - -// ExistsOrMatch dispatches SQL standard path expressions to [Exists] and -// predicate check expressions to [Match], reducing the need to know which to -// call. Results and options are the same as for those methods. -func (path *Path) ExistsOrMatch(ctx context.Context, json any, opt ...exec.Option) (bool, error) { - //nolint:wrapcheck // Okay to return unwrapped error - if path.IsPredicate() { - return exec.Match(ctx, path.AST, json, opt...) - } - //nolint:wrapcheck // Okay to return unwrapped error - return exec.Exists(ctx, path.AST, json, opt...) -} - -// Query returns all JSON items returned by path for json. For SQL-standard -// JSON path expressions (when [Path.IsPredicate] returns false) it returns -// the values selected from json. For predicate check expressions (when -// [Path.IsPredicate] returns true) it returns the result of the predicate -// check: true, false, or nil (for an unknown result). -// -// See the Options section for details on the optional [exec.WithVars], -// [exec.WithTZ], and [exec.WithSilent] options. -func (path *Path) Query(ctx context.Context, json any, opt ...exec.Option) ([]any, error) { - //nolint:wrapcheck // Okay to return unwrapped error - return exec.Query(ctx, path.AST, json, opt...) -} - -// MustQuery is like [Query], but panics on error. Mostly provided mainly for -// use in documentation examples. -func (path *Path) MustQuery(ctx context.Context, json any, opt ...exec.Option) []any { - res, err := exec.Query(ctx, path.AST, json, opt...) - if err != nil { - panic(err) - } - return res -} - -// First is like [Query], but returns the first JSON item returned by path for -// json, or nil if there are no results. See the Options section for details -// on the optional [exec.WithVars], [exec.WithTZ], and [exec.WithSilent] -// options. -func (path *Path) First(ctx context.Context, json any, opt ...exec.Option) (any, error) { - //nolint:wrapcheck // Okay to return unwrapped error - return exec.First(ctx, path.AST, json, opt...) -} - -// Scan implements sql.Scanner so Paths can be read from databases -// transparently. Currently, database types that map to string and []byte are -// supported. Please consult database-specific driver documentation for -// matching types. Returns [ErrScan] on scan failure (and may wrap -// [parser.ErrParse]). -func (path *Path) Scan(src any) error { - switch src := src.(type) { - case nil: - return nil - case string: - // if an empty Path comes from a table, we return a null Path - if src == "" { - return nil - } - - // see Parse for required string format - ast, err := parser.Parse(src) - if err != nil { - return fmt.Errorf("%w: %w", ErrScan, err) - } - - *path = Path{ast} - - case []byte: - // if an empty Path comes from a table, we return a null Path - if len(src) == 0 { - return nil - } - - // Parse as a string. - return path.Scan(string(src)) - - default: - return fmt.Errorf("%w: unable to scan type %T into Path", ErrScan, src) - } - - return nil -} - -// Value implements [driver.Valuer] so that Paths can be written to databases -// transparently. Currently, Paths map to strings. Please consult -// database-specific driver documentation for matching types. -func (path *Path) Value() (driver.Value, error) { - return path.String(), nil -} - -// MarshalText implements encoding.TextMarshaler. -func (path *Path) MarshalText() ([]byte, error) { - return path.MarshalBinary() -} - -// UnmarshalText implements encoding.TextUnmarshaler. -func (path *Path) UnmarshalText(data []byte) error { - return path.UnmarshalBinary(data) -} - -// MarshalBinary implements encoding.BinaryMarshaler. -func (path *Path) MarshalBinary() ([]byte, error) { - return []byte(path.String()), nil -} - -// UnmarshalBinary implements encoding.BinaryUnmarshaler. Returns [ErrScan] on -// scan failure (wraps [parser.ErrParse]). -func (path *Path) UnmarshalBinary(data []byte) error { - ast, err := parser.Parse(string(data)) - if err != nil { - return fmt.Errorf("%w: %w", ErrScan, err) - } - *path = Path{ast} - return nil -} diff --git a/path/path_test.go b/path/path_test.go deleted file mode 100644 index 0255011..0000000 --- a/path/path_test.go +++ /dev/null @@ -1,318 +0,0 @@ -package path - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/sqljson/path/exec" - "github.com/theory/sqljson/path/parser" -) - -func TestPath(t *testing.T) { - t.Parallel() - jMap := map[string]any{"foo": int64(1)} - ctx := context.Background() - - type testCase struct { - test string - path string - op string - json any - exp []any - } - - checkPath := func(tc testCase, path *Path) { - a := assert.New(t) - r := require.New(t) - - a.NotNil(path) - a.NotNil(path.AST) - a.Equal(path.AST.String(), path.String()) - a.Equal(path.AST.IsPredicate(), path.IsPredicate()) - a.Equal(tc.op, path.PgIndexOperator()) - - // Test Query - res, err := path.Query(ctx, tc.json) - r.NoError(err) - a.Equal(tc.exp, res) - - // Test MustQuery. - a.NotPanics(func() { res = path.MustQuery(ctx, tc.json) }) - a.Equal(tc.exp, res) - - // Test static MustQuery. - //nolint:contextcheck - a.NotPanics(func() { res = MustQuery(tc.path, tc.json) }) - a.Equal(tc.exp, res) - - // Test First. - item, err := path.First(ctx, tc.json) - r.NoError(err) - a.Equal(tc.exp[0], item) - - // Tests Exists. - ok, err := path.Exists(ctx, tc.json, exec.WithSilent()) - r.NoError(err) - a.True(ok) - - if _, ok := tc.exp[0].(bool); ok { - // Tests Match. - res, err := path.Match(ctx, tc.json) - r.NoError(err) - a.True(res) - } - - // Tests ExistsOrMatch. - ok, err = path.ExistsOrMatch(ctx, tc.json, exec.WithSilent()) - r.NoError(err) - a.True(ok) - } - - for _, tc := range []testCase{ - { - test: "root", - path: "$", - op: "@?", - json: jMap, - exp: []any{jMap}, - }, - { - test: "predicate", - path: "$ == 1", - op: "@@", - json: int64(1), - exp: []any{true}, - }, - { - test: "filter", - path: "$.a.b ?(@.x >= 42)", - op: "@?", - json: map[string]any{"a": map[string]any{"b": map[string]any{"x": int64(42)}}}, - exp: []any{map[string]any{"x": int64(42)}}, - }, - { - test: "exists", - path: "exists($.a.b ?(@.x >= 42))", - op: "@@", - json: map[string]any{"a": map[string]any{"b": map[string]any{"x": int64(42)}}}, - exp: []any{true}, - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Test Parse - path, err := Parse(tc.path) - r.NoError(err) - checkPath(tc, path) - - // Test MustParse - r.NotPanics(func() { path = MustParse(tc.path) }) - checkPath(tc, path) - - // Test New - checkPath(tc, New(path.AST)) - - // Test text Marshaling - text, err := path.MarshalText() - r.NoError(err) - a.Equal(text, []byte(path.AST.String())) - var txtPath Path - r.NoError(txtPath.UnmarshalText(text)) - checkPath(tc, &txtPath) - - // Test binary marshaling - bin, err := path.MarshalBinary() - r.NoError(err) - a.Equal(bin, []byte(path.AST.String())) - var binPath Path - r.NoError(binPath.UnmarshalBinary(bin)) - checkPath(tc, &binPath) - - // Test SQL marshaling - val, err := path.Value() - r.NoError(err) - a.IsType("", val) - a.Equal(path.String(), val) - sqlPath := new(Path) - r.NoError(sqlPath.Scan(val)) - checkPath(tc, sqlPath) - - // Test SQL binary unmarshaling - str, ok := val.(string) - r.True(ok) - sqlPath = new(Path) - r.NoError(sqlPath.Scan([]byte(str))) - checkPath(tc, sqlPath) - }) - } -} - -func TestQueryErrors(t *testing.T) { - t.Parallel() - for _, tc := range []struct { - test string - path string - json any - err string - }{ - { - test: "out_of_bounds", - path: "strict $[1]", - json: []any{true}, - err: "exec: jsonpath array subscript is out of bounds", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - path, err := Parse(tc.path) - r.NoError(err) - - // Test Query - res, err := path.Query(context.Background(), tc.json) - r.EqualError(err, tc.err) - r.ErrorIs(err, exec.ErrExecution) - a.Nil(res) - - // Test First - first, err := path.First(context.Background(), tc.json) - r.EqualError(err, tc.err) - r.ErrorIs(err, exec.ErrExecution) - a.Nil(first) - - // Test MustQuery - a.PanicsWithError(tc.err, func() { - path.MustQuery(context.Background(), tc.json) - }) - - // Test static MustQuery - a.PanicsWithError(tc.err, func() { - MustQuery(tc.path, tc.json) - }) - - // Test Match - ok, err := path.Match(context.Background(), tc.json) - r.EqualError(err, tc.err) - r.ErrorIs(err, exec.ErrExecution) - a.False(ok) - - // Test Exists - ok, err = path.Exists(context.Background(), tc.json) - r.EqualError(err, tc.err) - r.ErrorIs(err, exec.ErrExecution) - a.False(ok) - - // Test ExistsOrMatch - ok, err = path.Exists(context.Background(), tc.json) - r.EqualError(err, tc.err) - r.ErrorIs(err, exec.ErrExecution) - a.False(ok) - }) - } -} - -func TestPathParseErrors(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - path string - err string - }{ - { - test: "parse_error", - path: "(.)", - err: "parser: syntax error at 1:3", - }, - { - test: "validation_error", - path: "@ == 1", - err: "parser: @ is not allowed in root expressions", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Test Parse - path, err := Parse(tc.path) - r.EqualError(err, "path: "+tc.err) - r.ErrorIs(err, ErrPath) - a.Nil(path) - - // Test MustParse - a.PanicsWithError(tc.err, func() { MustParse(tc.path) }) - - // Test UnmarshalBinary - scanErr := "scan: " + tc.err - newPath := &Path{} - err = newPath.UnmarshalBinary([]byte(tc.path)) - r.EqualError(err, scanErr) - r.ErrorIs(err, ErrScan) - r.ErrorIs(err, parser.ErrParse) - a.Nil(newPath.AST) - - // Test UnmarshalText - err = newPath.UnmarshalText([]byte(tc.path)) - r.EqualError(err, scanErr) - r.ErrorIs(err, ErrScan) - r.ErrorIs(err, parser.ErrParse) - a.Nil(newPath.AST) - - // Test Scan Text - err = newPath.Scan(tc.path) - r.EqualError(err, scanErr) - r.ErrorIs(err, ErrScan) - r.ErrorIs(err, parser.ErrParse) - a.Nil(newPath.AST) - - // Test Scan Binary - err = newPath.Scan([]byte(tc.path)) - r.EqualError(err, scanErr) - r.ErrorIs(err, ErrScan) - r.ErrorIs(err, parser.ErrParse) - a.Nil(newPath.AST) - }) - } -} - -func TestScanNilPath(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - path any - }{ - {"nil", nil}, - {"empty_string", ""}, - {"no_bytes", []byte{}}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - newPath := &Path{} - r.NoError(newPath.Scan(tc.path)) - a.Nil(newPath.AST) - }) - } - - t.Run("unknown_type", func(t *testing.T) { - t.Parallel() - r := require.New(t) - - newPath := &Path{} - err := newPath.Scan(42) - r.EqualError(err, "scan: unable to scan type int into Path") - r.ErrorIs(err, ErrScan) - }) -} diff --git a/path/test/README.md b/path/test/README.md deleted file mode 100644 index 2d791cf..0000000 --- a/path/test/README.md +++ /dev/null @@ -1,11 +0,0 @@ -Hidden Path Tests -================= - -The tests in this directory test the examples from the [path -README](../README.md). They live in this directory because they're not package -examples, and if they lived in the `path` directory they would be picked up by -[godoc] or [pkgsite] and displayed in the main path documentation, which would -be rather confusing. - - [godoc]: https://pkg.go.dev/golang.org/x/tools/cmd/godoc - [pkgsite]: https://pkg.go.dev/golang.org/x/pkgsite/cmd/pkgsite diff --git a/path/test/readme_test.go b/path/test/readme_test.go deleted file mode 100644 index 7c19ee9..0000000 --- a/path/test/readme_test.go +++ /dev/null @@ -1,536 +0,0 @@ -package path_test - -import ( - "context" - "encoding/json" - "fmt" - "log" - "time" - - "github.com/theory/sqljson/path" - "github.com/theory/sqljson/path/exec" - "github.com/theory/sqljson/path/types" -) - -func decode(src []byte) any { - var value any - if err := json.Unmarshal(src, &value); err != nil { - log.Fatal(err) - } - return value -} - -func val(src string) any { - var value any - if err := json.Unmarshal([]byte(src), &value); err != nil { - log.Fatal(err) - } - return value -} - -func pp(val any) { - js, err := json.Marshal(val) - if err != nil { - log.Fatal(err) - } - //nolint:forbidigo - fmt.Println(string(js)) -} - -func ppi(val any) { - js, err := json.MarshalIndent(val, "", " ") - if err != nil { - log.Fatal(err) - } - //nolint:forbidigo - fmt.Println(string(js)) -} - -func src() []byte { - return []byte(`{ - "track": { - "segments": [ - { - "location": [ 47.763, 13.4034 ], - "start time": "2018-10-14 10:05:14", - "HR": 73 - }, - { - "location": [ 47.706, 13.2635 ], - "start time": "2018-10-14 10:39:21", - "HR": 135 - } - ] - } - }`) -} - -func Example_unmarshal() { - var value any - if err := json.Unmarshal(src(), &value); err != nil { - log.Fatal(err) - } - fmt.Printf("%T\n", value) - // Output: map[string]interface {} -} - -func Example_segments() { - value := decode(src()) - ppi(path.MustQuery("$.track.segments", value)) - // Output: [ - // [ - // { - // "HR": 73, - // "location": [ - // 47.763, - // 13.4034 - // ], - // "start time": "2018-10-14 10:05:14" - // }, - // { - // "HR": 135, - // "location": [ - // 47.706, - // 13.2635 - // ], - // "start time": "2018-10-14 10:39:21" - // } - // ] - // ] -} - -func Example_anyArray() { - value := decode(src()) - pp(path.MustQuery("$.track.segments[*].location", value)) - // Output: [[47.763,13.4034],[47.706,13.2635]] -} - -func Example_indexZero() { - value := decode(src()) - pp(path.MustQuery("$.track.segments[0].location", value)) - // Output: [[47.763,13.4034]] -} - -func Example_seg_size() { - value := decode(src()) - pp(path.MustQuery("$.track.segments.size()", value)) - // Output: [2] -} - -func Example_gt_130() { - value := decode(src()) - pp(path.MustQuery("$.track.segments[*].HR ? (@ > 130)", value)) - // Output: [135] -} - -func Example_gt_130_time() { - value := decode(src()) - pp(path.MustQuery( - `$.track.segments[*] ? (@.HR > 130)."start time"`, - value, - )) - // Output: ["2018-10-14 10:39:21"] -} - -func Example_coords() { - value := decode(src()) - pp(path.MustQuery( - `$.track.segments[*] ? (@.location[1] < 13.4) ? (@.HR > 130)."start time"`, - value, - )) - // Output: ["2018-10-14 10:39:21"] -} - -func Example_loc_high() { - value := decode(src()) - pp(path.MustQuery( - `$.track.segments[*] ? (@.location[1] < 13.4).HR ? (@ > 130)`, - value, - )) - // Output: [135] -} - -func Example_track_high() { - value := decode(src()) - pp(path.MustQuery( - `$.track ? (exists(@.segments[*] ? (@.HR > 130))).segments.size()`, - value, - )) - // Output: [2] -} - -func Example_pred_std() { - value := decode(src()) - pp(path.MustQuery("$.track.segments ?(@[*].HR > 130)", value)) - // Output: [{"HR":135,"location":[47.706,13.2635],"start time":"2018-10-14 10:39:21"}] -} - -func Example_pred() { - value := decode(src()) - pp(path.MustQuery("$.track.segments[*].HR > 130", value)) - // Output: [true] -} - -func Example_lax() { - value := decode(src()) - pp(path.MustQuery("lax $.track.segments.location", value)) - // Output: [[47.763,13.4034],[47.706,13.2635]] -} - -func expectError() { - if e := recover(); e != nil { - //nolint:forbidigo - fmt.Printf("panic: %v\n", e) - } -} - -func Example_strict_panic() { - value := decode(src()) - defer expectError() - pp(path.MustQuery("strict $.track.segments.location", value)) - // Output: panic: exec: jsonpath member accessor can only be applied to an object -} - -func Example_unwrap() { - value := decode(src()) - pp(path.MustQuery("strict $.track.segments[*].location", value)) - // Output: [[47.763,13.4034],[47.706,13.2635]] -} - -func Example_any_lax() { - value := decode(src()) - pp(path.MustQuery("lax $.**.HR", value)) - // Output: [73,135,73,135] -} - -func Example_any_strict() { - value := decode(src()) - pp(path.MustQuery("strict $.**.HR", value)) - // Output: [73,135] -} - -func Example_lax_unexpected() { - value := decode(src()) - pp(path.MustQuery("lax $.track.segments[*].location", value)) - // Output: [[47.763,13.4034],[47.706,13.2635]] -} - -func Example_lax_filter() { - value := decode(src()) - pp(path.MustQuery( - "lax $.track.segments[*].location ?(@[*] > 15)", - value, - )) - // Output: [47.763,47.706] -} - -func Example_strict_filter() { - value := decode(src()) - pp(path.MustQuery( - "strict $.track.segments[*].location ?(@[*] > 15)", - value, - )) - // Output: [[47.763,13.4034],[47.706,13.2635]] -} - -func Example_add() { - pp(path.MustQuery("$[0] + 3", val("2"))) // β†’ [5] - // Output: [5] -} - -func Example_plus() { - pp(path.MustQuery("+ $.x", val(`{"x": [2,3,4]}`))) // β†’ [2, 3, 4] - // Output: [2,3,4] -} - -func Example_sub() { - pp(path.MustQuery("7 - $[0]", val("[2]"))) // β†’ [5] - // Output: [5] -} - -func Example_neg() { - pp(path.MustQuery("- $.x", val(`{"x": [2,3,4]}`))) // β†’ [-2,-3,-4] - // Output: [-2,-3,-4] -} - -func Example_mul() { - pp(path.MustQuery("2 * $[0]", val("4"))) // β†’ [8] - // Output: [8] -} - -func Example_div() { - pp(path.MustQuery("$[0] / 2", val("[8.5]"))) // β†’ [4.25] - // Output: [4.25] -} - -func Example_mod() { - pp(path.MustQuery("$[0] % 10", val("[32]"))) // β†’ [2] - // Output: [2] -} - -func Example_type() { - pp(path.MustQuery("$[*].type()", val(`[1, "2", {}]`))) // β†’ ["number","string","object"] - // Output: ["number","string","object"] -} - -func Example_size() { - pp(path.MustQuery("$.m.size()", val(`{"m": [11, 15]}`))) // β†’ [2] - // Output: [2] -} - -func Example_boolean() { - pp(path.MustQuery("$[*].boolean()", val(`[1, "yes", false]`))) // β†’ [true,true,false] - // Output: [true,true,false] -} - -func Example_string() { - pp(path.MustQuery("$[*].string()", val(`[1.23, "xyz", false]`))) // β†’ ["1.23","xyz","false"] - pp(path.MustQuery("$.datetime().string()", "2023-08-15")) // β†’ ["2023-08-15"] - // Output: ["1.23","xyz","false"] - // ["2023-08-15"] -} - -func Example_double() { - pp(path.MustQuery("$.len.double() * 2", val(`{"len": "1.9"}`))) // β†’ [3.8] - // Output: [3.8] -} - -func Example_ceiling() { - pp(path.MustQuery("$.h.ceiling()", val(`{"h": 1.3}`))) // β†’ [2] - // Output: [2] -} - -func Example_floor() { - pp(path.MustQuery("$.h.floor()", val(`{"h": 1.7}`))) // β†’ [1] - // Output: [1] -} - -func Example_abs() { - pp(path.MustQuery("$.z.abs()", val(`{"z": -0.3}`))) // β†’ [0.3] - // Output: [0.3] -} - -func Example_bigint() { - pp(path.MustQuery("$.len.bigint()", val(`{"len": "9876543219"}`))) // β†’ [9876543219] - // Output: [9876543219] -} - -func Example_decimal() { - pp(path.MustQuery("$.decimal(6, 2)", val("1234.5678"))) // β†’ [1234.57] - // Output: [1234.57] -} - -func Example_integer() { - pp(path.MustQuery("$.len.integer()", val(`{"len": "12345"}`))) // β†’ [12345] - // Output: [12345] -} - -func Example_number() { - pp(path.MustQuery("$.len.number()", val(`{"len": "123.45"}`))) // β†’ [123.45] - // Output: [123.45] -} - -func Example_datetime() { - pp(path.MustQuery( - `$[*] ? (@.datetime() < "2015-08-02".datetime())`, - val(`["2015-08-01", "2015-08-12"]`), - )) // β†’ "2015-8-01" - // Output: ["2015-08-01"] -} - -func Example_datetime_format() { - defer expectError() - pp(path.MustQuery( - `$[*].datetime("HH24:MI")`, val(`["12:30", "18:40"]`), - )) // β†’ ["12:30:00", "18:40:00"] - // Output: panic: exec: .datetime(template) is not yet supported -} - -func Example_date() { - pp(path.MustQuery("$.date()", "2023-08-15")) // β†’ ["2023-08-15"] - // Output: ["2023-08-15"] -} - -func Example_time() { - pp(path.MustQuery("$.time()", "12:34:56")) // β†’ ["12:34:56"] - // Output: ["12:34:56"] -} - -func Example_time_precision() { - pp(path.MustQuery("$.time(2)", "12:34:56.789")) // β†’ ["12:34:56.79"] - // Output: ["12:34:56.79"] -} - -func Example_time_tz() { - pp(path.MustQuery("$.time_tz()", "12:34:56+05:30")) // β†’ ["12:34:56+05:30"] - // Output: ["12:34:56+05:30"] -} - -func Example_time_tz_precision() { - pp(path.MustQuery("$.time_tz(2)", "12:34:56.789+05:30")) // β†’ ["12:34:56.79+05:30"] - // Output: ["12:34:56.79+05:30"] -} - -func Example_timestamp() { - pp(path.MustQuery("$.timestamp()", "2023-08-15 12:34:56")) // β†’ "2023-08-15T12:34:56" - // Output: ["2023-08-15T12:34:56"] -} - -func Example_timestamp_precision() { - arg := "2023-08-15 12:34:56.789" - pp(path.MustQuery("$.timestamp(2)", arg)) // β†’ ["2023-08-15T12:34:56.79"] - // Output: ["2023-08-15T12:34:56.79"] -} - -func Example_timestamp_tz() { - arg := "2023-08-15 12:34:56+05:30" - pp(path.MustQuery("$.timestamp_tz()", arg)) // β†’ ["2023-08-15T12:34:56+05:30"] - // Output: ["2023-08-15T12:34:56+05:30"] -} - -func Example_timestamp_tz_precision() { - arg := "2023-08-15 12:34:56.789+05:30" - pp(path.MustQuery("$.timestamp_tz(2)", arg)) // β†’ ["2023-08-15T12:34:56.79+05:30"] - // Output: ["2023-08-15T12:34:56.79+05:30"] -} - -func Example_keyvalue() { - pp(path.MustQuery("$.keyvalue()", val(`{"x": "20", "y": 32}`))) - // β†’ [{"id":0,"key":"x","value":"20"},{"id":0,"key":"y","value":32}] - - // Output: [{"id":0,"key":"x","value":"20"},{"id":0,"key":"y","value":32}] -} - -func Example_eq() { - pp(path.MustQuery("$[*] ? (@ == 1)", val(`[1, "a", 1, 3]`))) // β†’ [1,1] - pp(path.MustQuery(`$[*] ? (@ == "a")`, val(`[1, "a", 1, 3]`))) // β†’ ["a"] - // Output: [1,1] - // ["a"] -} - -func Example_ne() { - pp(path.MustQuery("$[*] ? (@ != 1)", val(`[1, 2, 1, 3]`))) // β†’ [2,3] - pp(path.MustQuery(`$[*] ? (@ <> "b")`, val(`["a", "b", "c"]`))) // β†’ ["a","c"] - // Output: [2,3] - // ["a","c"] -} - -func Example_lt() { - pp(path.MustQuery("$[*] ? (@ < 2)", val(`[1, 2, 3]`))) // β†’ [1] - // Output: [1] -} - -func Example_le() { - pp(path.MustQuery(`$[*] ? (@ <= "b")`, val(`["a", "b", "c"]`))) // β†’ ["a","b"] - // Output: ["a","b"] -} - -func Example_gt() { - pp(path.MustQuery("$[*] ? (@ > 2)", val(`[1, 2, 3]`))) // β†’ [3] - // Output: [3] -} - -func Example_ge() { - pp(path.MustQuery("$[*] ? (@ >= 2)", val(`[1, 2, 3]`))) // β†’ [2,3] - // Output: [2,3] -} - -func Example_true() { - arg := val(`[ - {"name": "John", "parent": false}, - {"name": "Chris", "parent": true} - ]`) - pp(path.MustQuery("$[*] ? (@.parent == true)", arg)) // β†’ [{"name":"Chris","parent":true}] - // Output: [{"name":"Chris","parent":true}] -} - -func Example_false() { - arg := val(`[ - {"name": "John", "parent": false}, - {"name": "Chris", "parent": true} - ]`) - pp(path.MustQuery("$[*] ? (@.parent == false)", arg)) // β†’ [{"name":"John","parent":false}] - // Output: [{"name":"John","parent":false}] -} - -func Example_null() { - arg := val(`[ - {"name": "Mary", "job": null}, - {"name": "Michael", "job": "driver"} - ]`) - pp(path.MustQuery("$[*] ? (@.job == null) .name", arg)) // β†’ ["Mary"] - // Output: ["Mary"] -} - -func Example_and() { - pp(path.MustQuery("$[*] ? (@ > 1 && @ < 5)", val(`[1, 3, 7]`))) // β†’ [3] - // Output: [3] -} - -func Example_or() { - pp(path.MustQuery("$[*] ? (@ < 1 || @ > 5)", val(`[1, 3, 7]`))) // β†’ [7] - // Output: [7] -} - -func Example_not() { - pp(path.MustQuery("$[*] ? (!(@ < 5))", val(`[1, 3, 7]`))) // β†’ [7] - // Output: [7] -} - -func Example_is_unknown() { - pp(path.MustQuery("$[*] ? ((@ > 0) is unknown)", val(`[-1, 2, 7, "foo"]`))) // β†’ ["foo"] - // Output: ["foo"] -} - -func Example_like_regex() { - arg := val(`["abc", "abd", "aBdC", "abdacb", "babc"]`) - pp(path.MustQuery(`$[*] ? (@ like_regex "^ab.*c")`, arg)) // β†’ ["abc","abdacb"] - pp(path.MustQuery(`$[*] ? (@ like_regex "^ab.*c" flag "i")`, arg)) // β†’ ["abc","aBdC","abdacb"] - // Output: ["abc","abdacb"] - // ["abc","aBdC","abdacb"] -} - -func Example_starts_with() { - arg := val(`["John Smith", "Mary Stone", "Bob Johnson"]`) - pp(path.MustQuery(`$[*] ? (@ starts with "John")`, arg)) // β†’ ["John Smith"] - // Output: ["John Smith"] -} - -func Example_exists() { - arg := val(`{"x": [1, 2], "y": [2, 4]}`) - pp(path.MustQuery("strict $.* ? (exists (@ ? (@[*] > 2)))", arg)) // β†’ [[2,4]] - pp(path.MustQuery("strict $ ? (exists (@.name)) .name", val(`{"value": 42}`))) // β†’ [] - // Output: [[2,4]] - // [] -} - -func Example_regexp_string() { - p := path.MustParse("$.* ?(@ like_regex \"^\\\\d+$\")") - pp(p.MustQuery(context.Background(), val(`{"x": "42", "y": "no"}`))) // β†’ ["42"] - // Output: ["42"] -} - -func Example_regexp_literal() { - p := path.MustParse(`$.* ?(@ like_regex "^\\d+$")`) - pp(p.MustQuery(context.Background(), val(`{"x": "42", "y": "no"}`))) // β†’ ["42"] - // Output: ["42"] -} - -func Example_custom_time_zone() { - p := path.MustParse("$.timestamp_tz()") - arg := "2023-08-15 12:34:56" - pp(p.MustQuery(context.Background(), arg, exec.WithTZ())) // β†’ ["2023-08-15T12:34:56+00:00"] - - // Add a time zone to the context. - tz, err := time.LoadLocation("America/New_York") - if err != nil { - log.Fatal(err) - } - ctx := types.ContextWithTZ(context.Background(), tz) - - // The output will now be in the custom time zone. - pp(p.MustQuery(ctx, arg, exec.WithTZ())) // β†’ ["2023-08-15T12:34:56-04:00"] - // Output: - // ["2023-08-15T12:34:56+00:00"] - // ["2023-08-15T12:34:56-04:00"] -} diff --git a/path/types/date.go b/path/types/date.go deleted file mode 100644 index 42efec0..0000000 --- a/path/types/date.go +++ /dev/null @@ -1,75 +0,0 @@ -package types - -import ( - "context" - "fmt" - "time" -) - -// Date represents the PostgreSQL date type. -type Date struct { - time.Time -} - -// NewDate coerces src into a Date. -func NewDate(src time.Time) *Date { - // Convert result type to a date - return &Date{ - time.Date(src.Year(), src.Month(), src.Day(), 0, 0, 0, 0, offsetZero), - } -} - -// GoTime returns the underlying time.Time object. -func (d *Date) GoTime() time.Time { return d.Time } - -// dateFormat represents the canonical string format for Date values. -const dateFormat = "2006-01-02" - -// String returns the string representation of d. -func (d *Date) String() string { - return d.Format(dateFormat) -} - -// ToTimestamp converts ts to *Timestamp. -func (d *Date) ToTimestamp(context.Context) *Timestamp { - return NewTimestamp(d.Time) -} - -// ToTimestampTZ converts d to TimestampTZ in the time zone in ctx. -func (d *Date) ToTimestampTZ(ctx context.Context) *TimestampTZ { - t := d.Time - return NewTimestampTZ( - ctx, - time.Date( - t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, TZFromContext(ctx), - ), - ) -} - -// Compare compares the time instant d with u. If d is before u, it returns -// -1; if d is after u, it returns +1; if they're the same, it returns 0. -func (d *Date) Compare(u time.Time) int { - return d.Time.Compare(u) -} - -// MarshalJSON implements the json.Marshaler interface. The time is a quoted -// string in the RFC 3339 format with sub-second precision. -func (d *Date) MarshalJSON() ([]byte, error) { - const dateJSONSize = len(dateFormat) + len(`""`) - b := make([]byte, 0, dateJSONSize) - b = append(b, '"') - b = d.AppendFormat(b, dateFormat) - b = append(b, '"') - return b, nil -} - -// UnmarshalJSON implements the json.Unmarshaler interface. The time must be a -// quoted string in the RFC 3339 format. -func (d *Date) UnmarshalJSON(data []byte) error { - tim, err := time.Parse(dateFormat, string(data[1:len(data)-1])) - if err != nil { - return fmt.Errorf("%w: Cannot parse %s as %q", ErrSQLType, data, dateFormat) - } - *d = *NewDate(tim) - return nil -} diff --git a/path/types/date_test.go b/path/types/date_test.go deleted file mode 100644 index 75245b9..0000000 --- a/path/types/date_test.go +++ /dev/null @@ -1,87 +0,0 @@ -package types - -import ( - "context" - "fmt" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestDate(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range timestampTestCases(t) { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Don't test Time and TimeTZ - switch tc.ctor(time.Time{}, &time.Location{}).(type) { - case *Time, *TimeTZ: - return - } - - // Convert to dates. - exp := tc.time - tc.time = time.Date( - exp.Year(), exp.Month(), exp.Day(), - 0, 0, 0, 0, offsetZero, - ) - date := NewDate(tc.time) - a.Equal(&Date{Time: tc.time}, date) - a.Equal(tc.time, date.GoTime()) - a.Equal(tc.time.Format(dateFormat), date.String()) - - // Check JSON - json, err := date.MarshalJSON() - r.NoError(err) - a.JSONEq(fmt.Sprintf("%q", date.String()), string(json)) - ts2 := new(Date) - r.NoError(ts2.UnmarshalJSON(json)) - a.Equal(date, ts2) - - // Test Conversion functions. - loc := time.FixedZone("", -3*secondsPerHour) - ctx := ContextWithTZ(ctx, loc) - a.Equal(NewTimestamp(date.Time), date.ToTimestamp(ctx)) - a.Equal( - NewTimestampTZ( - ctx, - time.Date( - date.Year(), date.Month(), date.Day(), 0, 0, 0, 0, loc, - ), - ), - date.ToTimestampTZ(ctx), - ) - }) - } -} - -func TestDateInvalidJSON(t *testing.T) { - t.Parallel() - ts := new(Date) - err := ts.UnmarshalJSON([]byte(`"i am not a date"`)) - require.Error(t, err) - require.EqualError(t, err, fmt.Sprintf( - "type: Cannot parse %q as %q", - "i am not a date", dateFormat, - )) - require.ErrorIs(t, err, ErrSQLType) -} - -func TestDateCompare(t *testing.T) { - t.Parallel() - a := assert.New(t) - - apr29 := time.Date(2024, 4, 29, 0, 0, 0, 0, time.UTC) - date := &Date{Time: apr29} - a.Equal(-1, date.Compare(time.Date(2024, 4, 30, 0, 0, 0, 0, time.UTC))) - a.Equal(1, date.Compare(time.Date(2024, 4, 28, 0, 0, 0, 0, time.UTC))) - a.Equal(0, date.Compare(apr29)) - a.Equal(0, date.Compare(time.Date(2024, 4, 29, 0, 0, 0, 0, time.UTC))) -} diff --git a/path/types/example_test.go b/path/types/example_test.go deleted file mode 100644 index 4de269a..0000000 --- a/path/types/example_test.go +++ /dev/null @@ -1,318 +0,0 @@ -//nolint:godot -package types_test - -import ( - "context" - "fmt" - "log" - "time" - - "github.com/theory/sqljson/path/types" -) - -// Postgres: -// -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56+05"', '$.timestamp_tz()'); -// jsonb_path_query_tz -// ----------------------------- -// "2023-08-15T12:34:56+05:00" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56+05"', '$.timestamp_tz().string()'); -// jsonb_path_query_tz -// -------------------------- -// "2023-08-15 07:34:56+00" -// (1 row) -// -// [types.TimestampTZ]: -func Example_uTC() { - offsetPlus5 := time.FixedZone("", 5*3600) - ctx := types.ContextWithTZ(context.Background(), time.UTC) - - timestamp := types.NewTimestampTZ( - ctx, - time.Date(2023, 8, 15, 12, 34, 56, 0, offsetPlus5), - ) - - fmt.Printf("%v\n", timestamp) - // Output: 2023-08-15T12:34:56+05:00 -} - -// Postgres: -// -// david=# set time zone 'America/New_York'; -// SET -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56+05"', '$.timestamp_tz()'); -// jsonb_path_query_tz -// ----------------------------- -// "2023-08-15T12:34:56+05:00" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56+05"', '$.timestamp_tz().string()'); -// jsonb_path_query_tz -// -------------------------- -// "2023-08-15 03:34:56-04" -// (1 row) -// -// [types.TimestampTZ]: -func Example_nYC() { - tz, err := time.LoadLocation("America/New_York") - if err != nil { - log.Fatal(err) - } - ctx := types.ContextWithTZ(context.Background(), tz) - - offsetPlus5 := time.FixedZone("", 5*3600) - timestamp := types.NewTimestampTZ( - ctx, - time.Date(2023, 8, 15, 12, 34, 56, 0, offsetPlus5), - ) - - fmt.Printf("%v\n", timestamp) - // Output: 2023-08-15T12:34:56+05:00 -} - -// Postgres: -// -// david=# set time zone 'America/New_York'; -// SET -// david=# select jsonb_path_query_tz('"2023-08-15"', '$.date()'); -// jsonb_path_query_tz -// --------------------- -// "2023-08-15" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15"', '$.timestamp()'); -// jsonb_path_query_tz -// ----------------------- -// "2023-08-15T00:00:00" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15"', '$.timestamp_tz()'); -// jsonb_path_query_tz -// ----------------------------- -// "2023-08-15T04:00:00+00:00" -// (1 row) -// -// [types.Date]: -func ExampleDate() { - date := types.NewDate(time.Date(2023, 8, 15, 12, 34, 56, 0, time.UTC)) - fmt.Printf("%v\n", date) - - tz, err := time.LoadLocation("America/New_York") - if err != nil { - log.Fatal(err) - } - ctx := types.ContextWithTZ(context.Background(), tz) - - fmt.Printf("%v\n", date.ToTimestamp(ctx)) - // Difference in cast value formatting thread: - // https://www.postgresql.org/message-id/flat/7DE080CE-6D8C-4794-9BD1-7D9699172FAB%40justatheory.com - fmt.Printf("%v\n", date.ToTimestampTZ(ctx)) - // Output: 2023-08-15 - // 2023-08-15T00:00:00 - // 2023-08-15T00:00:00-04:00 -} - -// Postgres: -// -// david=# set time zone 'America/Phoenix'; -// SET -// david=# select jsonb_path_query_tz('"12:34:56"', '$.time()'); -// jsonb_path_query_tz -// --------------------- -// "12:34:56" -// (1 row) -// -// david=# select jsonb_path_query_tz('"12:34:56"', '$.time_tz()'); -// jsonb_path_query_tz -// --------------------- -// "12:34:56-07:00" -// (1 row) -// -// [types.Time]: -func ExampleTime() { - aTime := types.NewTime(time.Date(2023, 8, 15, 12, 34, 56, 0, time.UTC)) - fmt.Printf("%v\n", aTime) - - tz, err := time.LoadLocation("America/Phoenix") - if err != nil { - log.Fatal(err) - } - ctx := types.ContextWithTZ(context.Background(), tz) - fmt.Printf("%v\n", aTime.ToTimeTZ(ctx)) - // Output: 12:34:56 - // 12:34:56-07:00 -} - -// Postgres: -// -// david=# set time zone 'UTC'; -// SET -// david=# select jsonb_path_query_tz('"12:34:56-04:00"', '$.time_tz()'); -// jsonb_path_query_tz -// --------------------- -// "12:34:56-04:00" -// (1 row) -// -// david=# select jsonb_path_query_tz('"12:34:56-04:00"', '$.time()'); -// jsonb_path_query_tz -// --------------------- -// "12:34:56" -// (1 row) -// -// david=# set time zone 'America/New_York'; -// SET -// david=# select jsonb_path_query_tz('"12:34:56-04:00"', '$.time()'); -// jsonb_path_query_tz -// --------------------- -// "12:34:56" -// (1 row) -// -// [types.TimeTZ]: -func ExampleTimeTZ() { - tz, err := time.LoadLocation("America/New_York") - if err != nil { - log.Fatal(err) - } - - timeTZ := types.NewTimeTZ(time.Date(2023, 8, 15, 12, 34, 56, 0, tz)) - fmt.Printf("%v\n", timeTZ) - - ctx := types.ContextWithTZ(context.Background(), time.UTC) - fmt.Printf("%v\n", timeTZ.ToTime(ctx)) - - //nolint:gosmopolitan - ctx = types.ContextWithTZ(context.Background(), time.Local) - fmt.Printf("%v\n", timeTZ.ToTime(ctx)) - // Output: 12:34:56-04:00 - // 12:34:56 - // 12:34:56 -} - -// Postgres: -// -// david=# set time zone 'America/Phoenix'; -// SET -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56"', '$.timestamp()'); -// jsonb_path_query_tz -// ----------------------- -// "2023-08-15T12:34:56" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56"', '$.date()'); -// jsonb_path_query_tz -// --------------------- -// "2023-08-15" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56"', '$.time()'); -// jsonb_path_query_tz -// --------------------- -// "12:34:56" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56"', '$.timestamp_tz()'); -// jsonb_path_query_tz -// ----------------------------- -// "2023-08-15T19:34:56+00:00" -// (1 row) -// -// [types.Timestamp]: -func ExampleTimestamp() { - ts := types.NewTimestamp(time.Date(2023, 8, 15, 12, 34, 56, 0, time.UTC)) - fmt.Printf("%v\n", ts) - - tz, err := time.LoadLocation("America/Phoenix") - if err != nil { - log.Fatal(err) - } - ctx := types.ContextWithTZ(context.Background(), tz) - fmt.Printf("%v\n", ts.ToDate(ctx)) - fmt.Printf("%v\n", ts.ToTime(ctx)) - // Difference in cast value formatting thread: - // https://www.postgresql.org/message-id/flat/7DE080CE-6D8C-4794-9BD1-7D9699172FAB%40justatheory.com - fmt.Printf("%v\n", ts.ToTimestampTZ(ctx)) - // Output: 2023-08-15T12:34:56 - // 2023-08-15 - // 12:34:56 - // 2023-08-15T12:34:56-07:00 -} - -// Postgres: -// -// david=# set time zone 'UTC'; -// SET -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56-04"', '$.timestamp_tz()'); -// jsonb_path_query_tz -// ----------------------------- -// "2023-08-15T12:34:56-04:00" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56-04"', '$.timestamp()'); -// jsonb_path_query_tz -// ----------------------- -// "2023-08-15T16:34:56" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56-04"', '$.date()'); -// jsonb_path_query_tz -// --------------------- -// "2023-08-15" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56-04"', '$.time()'); -// jsonb_path_query_tz -// --------------------- -// "16:34:56" -// (1 row) -// -// david=# set time zone 'America/Los_Angeles'; -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56-04"', '$.timestamp()'); -// jsonb_path_query_tz -// ----------------------- -// "2023-08-15T09:34:56" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56-04"', '$.date()'); -// jsonb_path_query_tz -// --------------------- -// "2023-08-15" -// (1 row) -// -// david=# select jsonb_path_query_tz('"2023-08-15 12:34:56-04"', '$.time()'); -// jsonb_path_query_tz -// --------------------- -// "09:34:56" -// (1 row) -// -// [types.TimestampTZ]: -func ExampleTimestampTZ() { - tz, err := time.LoadLocation("America/New_York") - if err != nil { - log.Fatal(err) - } - - ctx := types.ContextWithTZ(context.Background(), time.UTC) - tsTZ := types.NewTimestampTZ(ctx, time.Date(2023, 8, 15, 12, 34, 56, 0, tz)) - fmt.Printf("%v\n", tsTZ) - fmt.Printf("%v\n", tsTZ.ToTimestamp(ctx)) - fmt.Printf("%v\n", tsTZ.ToDate(ctx)) - fmt.Printf("%v\n", tsTZ.ToTime(ctx)) - - tz, err = time.LoadLocation("America/Los_Angeles") - if err != nil { - log.Fatal(err) - } - ctx = types.ContextWithTZ(context.Background(), tz) - fmt.Printf("%v\n", tsTZ.ToTimestamp(ctx)) - fmt.Printf("%v\n", tsTZ.ToDate(ctx)) - fmt.Printf("%v\n", tsTZ.ToTime(ctx)) - // Output: 2023-08-15T12:34:56-04:00 - // 2023-08-15T16:34:56 - // 2023-08-15 - // 16:34:56 - // 2023-08-15T09:34:56 - // 2023-08-15 - // 09:34:56 -} diff --git a/path/types/offset.go b/path/types/offset.go deleted file mode 100644 index d835fbe..0000000 --- a/path/types/offset.go +++ /dev/null @@ -1,56 +0,0 @@ -package types - -import ( - "context" - "time" -) - -// offsetLocationFor returns an offset-only time.Location with the offset of -// the location of t. -func offsetLocationFor(t time.Time) *time.Location { - if name, off := t.Zone(); name != "" { - return time.FixedZone("", off) - } - return t.Location() -} - -// offsetOnlyTimeFor returns t if its time zone is offset-only or a new -// offset-only time.Time with the offset of t's zone. -func offsetOnlyTimeFor(t time.Time) time.Time { - if name, off := t.Zone(); name != "" { - return t.In(time.FixedZone("", off)) - } - return t -} - -// key is an unexported type for keys defined in this package. This prevents -// collisions with keys defined in other packages. -type key int - -//nolint:gochecknoglobals -var ( - // offsetZero represents time zone offset zero. - offsetZero = time.FixedZone("", 0) - - // tzKey is the key for time.Location values in Contexts. It is unexported; - // clients use ContextWithTZ and TZFromContext instead of using this key - // directly. - tzKey key -) - -// ContextWithTZ returns a new Context that carries value tz. -func ContextWithTZ(ctx context.Context, tz *time.Location) context.Context { - if tz == nil { - return ctx - } - return context.WithValue(ctx, tzKey, tz) -} - -// TZFromContext returns the time.Location value stored in ctx or time.UTC. -func TZFromContext(ctx context.Context) *time.Location { - tz, ok := ctx.Value(tzKey).(*time.Location) - if ok { - return tz - } - return time.UTC -} diff --git a/path/types/offset_test.go b/path/types/offset_test.go deleted file mode 100644 index 20bd25d..0000000 --- a/path/types/offset_test.go +++ /dev/null @@ -1,93 +0,0 @@ -package types - -import ( - "context" - "testing" - "time" - - "github.com/stretchr/testify/assert" -) - -func loadTZ(name string) *time.Location { - loc, err := time.LoadLocation(name) - if err != nil { - panic(err) - } - return loc -} - -type zoneTestCase struct { - test string - zone string - loc *time.Location - offset int -} - -func zoneTestCases() []zoneTestCase { - return []zoneTestCase{ - {"UTC", "UTC", loadTZ("UTC"), 0}, - {"empty", "UTC", loadTZ(""), 0}, - {"zero", "", time.FixedZone("", 0), 0}, - {"seven", "", time.FixedZone("", secondsPerHour*7), secondsPerHour * 7}, - {"neg_3", "", time.FixedZone("", secondsPerHour*-3), secondsPerHour * -3}, - {"America/New_York", "EDT", loadTZ("America/New_York"), secondsPerHour * -4}, - {"Asia/Tokyo", "JST", loadTZ("Asia/Tokyo"), secondsPerHour * 9}, - {"Africa/Nairobi", "EAT", loadTZ("Africa/Nairobi"), secondsPerHour * 3}, - } -} - -func TestOffsetLocationForAndOnlyTimeFor(t *testing.T) { - t.Parallel() - - for _, tc := range zoneTestCases() { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - // Create a time in the location. - local := time.Date(2024, 6, 24, 10, 17, 32, 0, tc.loc) - name, off := local.Zone() - a.Equal(tc.zone, name) - a.Equal(tc.offset, off) - - // Test offsetLocationFor - loc := offsetLocationFor(local) - a.Empty(loc.String()) - ts := time.Date(2024, 6, 24, 10, 17, 32, 0, loc) - name, off = ts.Zone() - a.Empty(name) - a.Equal(tc.offset, off) - - // Test offsetOnlyTimeFor. - ts = offsetOnlyTimeFor(local) - name, off = ts.Zone() - a.Empty(name) - a.Equal(tc.offset, off) - }) - } -} - -func TestContextWithTZ(t *testing.T) { - t.Parallel() - - for _, tc := range zoneTestCases() { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - ctx := ContextWithTZ(context.Background(), tc.loc) - loc := TZFromContext(ctx) - a.Equal(tc.loc, loc) - }) - } - - t.Run("no_tz", func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - loc := TZFromContext(context.Background()) - a.Equal(time.UTC, loc) - loc = TZFromContext(ContextWithTZ(context.Background(), nil)) - a.Equal(time.UTC, loc) - }) -} diff --git a/path/types/parse_time.go b/path/types/parse_time.go deleted file mode 100644 index 76c0c9b..0000000 --- a/path/types/parse_time.go +++ /dev/null @@ -1,152 +0,0 @@ -package types - -import ( - "context" - "math" - "time" -) - -// ParseTime parses src into [time.Time] by iterating through a list of valid -// date, time, and timestamp formats according to SQL/JSON standard: date, -// time_tz, time, timestamp_tz, and timestamp. Returns false if the string -// cannot be parsed by any of the formats. -// -// We also support ISO 8601 format (with "T") for timestamps, because -// PostgreSQL to_json() and to_jsonb() functions use this format. -func ParseTime(ctx context.Context, src string, precision int) (DateTime, bool) { - // Date first. - value, err := time.Parse("2006-01-02", src) - if err == nil { - return NewDate(value), true - } - - // Time with TZ - for _, format := range []string{ - "15:04:05Z07", - "15:04:05Z07:00", - } { - value, err := time.Parse(format, src) - if err == nil { - return NewTimeTZ(adjustPrecision(offsetOnlyTimeFor(value), precision)), true - } - } - - // Time without TZ - value, err = time.Parse("15:04:05", src) - if err == nil { - return NewTime(adjustPrecision(value, precision)), true - } - - // Timestamp with tz, with and without "T" - for _, format := range []string{ - "2006-01-02T15:04:05Z07", - "2006-01-02 15:04:05Z07", - "2006-01-02T15:04:05Z07:00", - "2006-01-02 15:04:05Z07:00", - } { - value, err := time.Parse(format, src) - if err == nil { - return NewTimestampTZ(ctx, adjustPrecision(value, precision)), true - } - } - - // Timestamp without tz, with and without "T" - for _, format := range []string{ - "2006-01-02T15:04:05", - "2006-01-02 15:04:05", - } { - value, err := time.Parse(format, src) - if err == nil { - return NewTimestamp(adjustPrecision(value, precision)), true - } - } - - // Not found. - return nil, false -} - -func adjustPrecision(value time.Time, precision int) time.Time { - if precision > -1 { - value = value.Round(time.Second / time.Duration(math.Pow10(precision))) - } - return value -} - -// // https://www.postgresql.org/docs/devel/functions-formatting.html -// // https://pkg.go.dev/time#pkg-constants -// var formatMap = map[string]string{ -// "HH": "03", // hour of day (01–12) -// "HH12": "03", // hour of day (01–12) -// "HH24": "15", // hour of day (00–23) -// "MI": "04", // minute (00–59) -// "SS": "05", // second (00–59) -// "MS": ".000", // millisecond (000–999) -// "US": ".000000", // microsecond (000000–999999) -// "FF1": ".0", // tenth of second (0–9) -// "FF2": ".00", // hundredth of second (00–99) -// "FF3": ".000", // millisecond (000–999) -// "FF4": ".0000", // tenth of a millisecond (0000–9999) -// "FF5": ".00000", // hundredth of a millisecond (00000–99999) -// "FF6": ".000000", // microsecond (000000–999999) -// "SSSS": "", // seconds past midnight (0–86399) -// "SSSSS": "", // seconds past midnight (0–86399) -// "AM": "PM", // meridiem indicator (without periods) -// "PM": "PM", -// "am": "pm", -// "pm": "pm", -// "A.M.": "", // meridiem indicator (with periods) -// "P.M.": "", -// "a.m.": "", -// "p.m.": "", -// "Y,YYY": "", // year (4 or more digits) with comma -// "YYYY": "2006", // year (4 or more digits) -// "YYY": "", // last 3 digits of year -// "YY": "06", // last 2 digits of year -// "Y": "", // last digit of year -// "IYYY": "", // ISO 8601 week-numbering year (4 or more digits) -// "IYY": "", // last 3 digits of ISO 8601 week-numbering year -// "IY": "", // last 2 digits of ISO 8601 week-numbering year -// "I": "", // last digit of ISO 8601 week-numbering year -// "BC": "BC", // era indicator (without periods) -// "AD": "", -// "bc": "", -// "ad": "", -// "B.C.": "", // era indicator (with periods) -// "A.D.": "", -// "b.c.": "", -// "a.d.": "", - -// "MONTH": "", // full upper case month name (blank-padded to 9 chars) -// "Month": "January", // full capitalized month name (blank-padded to 9 chars) -// "month": "", // full lower case month name (blank-padded to 9 chars) -// "MON": "", // abbreviated upper case month name (3 chars in English, localized lengths vary) -// "Mon": "Jan", // abbreviated capitalized month name (3 chars in English, localized lengths vary) -// "mon": "", // abbreviated lower case month name (3 chars in English, localized lengths vary) -// "MM": "01", // month number (01–12) -// "DAY": "", // full upper case day name (blank-padded to 9 chars) -// "Day": "Monday", // full capitalized day name (blank-padded to 9 chars) -// "day": "", // full lower case day name (blank-padded to 9 chars) -// "DY": "", // abbreviated upper case day name (3 chars in English, localized lengths vary) -// "Dy": "Mon", // abbreviated capitalized day name (3 chars in English, localized lengths vary) -// "dy": "", // abbreviated lower case day name (3 chars in English, localized lengths vary) -// "DDD": "", // day of year (001–366) -// "IDDD": "", // day of ISO 8601 week-numbering year -// (001–371; day 1 of the year is Monday of the first ISO week) -// "DD": "02", // day of month (01–31) -// "D": "", // day of the week, Sunday (1) to Saturday (7) -// "ID": "", // ISO 8601 day of the week, Monday (1) to Sunday (7) -// "W": "", // week of month (1–5) (the first week starts on the first day of the month) -// "WW": "", // week number of year (1–53) (the first week starts on the first day of the year) -// "IW": "", // week number of ISO 8601 week-numbering year -// (01–53; the first Thursday of the year is in week 1) -// "CC": "", // century (2 digits) (the twenty-first century starts on 2001-01-01) -// "J": "", // Julian Date (integer days since November 24, 4714 BC at local midnight; see Section B.7) -// "Q": "", // quarter -// "RM": "", // month in upper case Roman numerals (I–XII; I=January) -// "rm": "", // month in lower case Roman numerals (i–xii; i=January) -// "TZ": "MST", // upper case time-zone abbreviation -// "tz": "", // lower case time-zone abbreviation -// "TZH": "-07", // time-zone hours -// "TZM": "", // time-zone minutes -// "OF": "-07:00", // time-zone offset from UTC (HH or HH:MM) -// } diff --git a/path/types/parse_time_test.go b/path/types/parse_time_test.go deleted file mode 100644 index 1c970ab..0000000 --- a/path/types/parse_time_test.go +++ /dev/null @@ -1,372 +0,0 @@ -package types - -import ( - "context" - "testing" - "time" - - "github.com/stretchr/testify/assert" -) - -//nolint:unparam // keep s in case we need it in the future. -func pos(h, m, s int) *time.Location { - return time.FixedZone("", h*60*60+m*60+s) -} - -//nolint:unparam // keep s in case we need it in the future. -func neg(h, m, s int) *time.Location { - return time.FixedZone("", -(h*60*60 + m*60 + s)) -} - -type TSTestCase struct { - test string - value string - time time.Time - ctor func(t time.Time, tz *time.Location) DateTime -} - -func newTestDate(t time.Time, _ *time.Location) DateTime { return &Date{t} } -func newTestTime(t time.Time, _ *time.Location) DateTime { return &Time{t} } -func newTestTimeTZ(t time.Time, _ *time.Location) DateTime { return &TimeTZ{t} } -func newTestTimestamp(t time.Time, _ *time.Location) DateTime { return &Timestamp{t} } -func newTestTimestampTZ(t time.Time, tz *time.Location) DateTime { return &TimestampTZ{t, tz} } - -func timestampTestCases(t *testing.T) []TSTestCase { - t.Helper() - return []TSTestCase{ - // Date - { - test: "date", - value: "2024-04-29", - time: time.Date(2024, 4, 29, 0, 0, 0, 0, offsetZero), - ctor: newTestDate, - }, - // time with time zone - { - test: "time_tz_hm", - value: "14:15:31+01:22", - time: time.Date(0, 1, 1, 14, 15, 31, 0, pos(1, 22, 0)), - ctor: newTestTimeTZ, - }, - { - test: "time_tz_sub_hm", - value: "14:15:31.785996+01:22", - time: time.Date(0, 1, 1, 14, 15, 31, 785996000, pos(1, 22, 0)), - ctor: newTestTimeTZ, - }, - { - test: "time_tz_pos_hm", - value: "14:15:31.785996+03:14", - time: time.Date(0, 1, 1, 14, 15, 31, 785996000, pos(3, 14, 0)), - ctor: newTestTimeTZ, - }, - { - test: "time_tz_sub_neg_hm", - value: "14:15:31.785996-03:14", - time: time.Date(0, 1, 1, 14, 15, 31, 785996000, neg(3, 14, 0)), - ctor: newTestTimeTZ, - }, - { - test: "time_tz_neg_hm", - value: "14:15:31-03:14", - time: time.Date(0, 1, 1, 14, 15, 31, 0, neg(3, 14, 0)), - ctor: newTestTimeTZ, - }, - { - test: "time_tz_sub_h", - value: "14:15:31.785996+01", - time: time.Date(0, 1, 1, 14, 15, 31, 785996000, pos(1, 0, 0)), - ctor: newTestTimeTZ, - }, - { - test: "time_tz_h", - value: "14:15:31+01", - time: time.Date(0, 1, 1, 14, 15, 31, 0, pos(1, 0, 0)), - ctor: newTestTimeTZ, - }, - { - test: "time_tz_sub_neg_h", - value: "14:15:31.785996-11", - time: time.Date(0, 1, 1, 14, 15, 31, 785996000, neg(11, 0, 0)), - ctor: newTestTimeTZ, - }, - { - test: "time_tz_neg_h", - value: "14:15:31-11", - time: time.Date(0, 1, 1, 14, 15, 31, 0, neg(11, 0, 0)), - ctor: newTestTimeTZ, - }, - { - test: "time_tz_sub_z", - value: "14:15:31.785996Z", - time: time.Date(0, 1, 1, 14, 15, 31, 785996000, offsetZero), - ctor: newTestTimeTZ, - }, - { - test: "time_tz_z", - value: "14:15:31Z", - time: time.Date(0, 1, 1, 14, 15, 31, 0, offsetZero), - ctor: newTestTimeTZ, - }, - // time without time zone - { - test: "time_sub", - value: "14:15:31.785996", - time: time.Date(0, 1, 1, 14, 15, 31, 785996000, offsetZero), - ctor: newTestTime, - }, - { - test: "time_no_sub", - value: "14:15:31", - time: time.Date(0, 1, 1, 14, 15, 31, 0, offsetZero), - ctor: newTestTime, - }, - // timestamp "T" with time zone - { - test: "timestamp_t_tz_sub_hm", - value: "2024-04-29T15:11:38.06318+02:30", - time: time.Date(2024, 4, 29, 15, 11, 38, 63180000, pos(2, 30, 0)), - ctor: newTestTimestampTZ, - }, - { - test: "timestamp_t_tz_hm", - value: "2024-04-29T15:11:38+02:30", - time: time.Date(2024, 4, 29, 15, 11, 38, 0, pos(2, 30, 0)), - ctor: newTestTimestampTZ, - }, - { - test: "timestamp_t_tz_sub_neg_hm", - value: "2024-04-29T15:11:38.06318-02:30", - time: time.Date(2024, 4, 29, 15, 11, 38, 63180000, neg(2, 30, 0)), - ctor: newTestTimestampTZ, - }, - { - test: "timestamp_t_tz_neg_hm", - value: "2024-04-29T15:11:38-02:30", - time: time.Date(2024, 4, 29, 15, 11, 38, 0, neg(2, 30, 0)), - ctor: newTestTimestampTZ, - }, - { - test: "timestamp_t_tz_sub_z", - value: "2024-04-29T15:11:38.06318Z", - time: time.Date(2024, 4, 29, 15, 11, 38, 63180000, offsetZero), - ctor: newTestTimestampTZ, - }, - { - test: "timestamp_t_tz_z", - value: "2024-04-29T15:11:38Z", - time: time.Date(2024, 4, 29, 15, 11, 38, 0, offsetZero), - ctor: newTestTimestampTZ, - }, - // timestamp "T" without time zone - { - test: "timestamp_t_sub_hms", - value: "2024-04-29T15:11:38.06318", - time: time.Date(2024, 4, 29, 15, 11, 38, 63180000, offsetZero), - ctor: newTestTimestamp, - }, - { - test: "timestamp_t_hms", - value: "2024-04-29T15:11:38", - time: time.Date(2024, 4, 29, 15, 11, 38, 0, offsetZero), - ctor: newTestTimestamp, - }, - - // timestamp " " with time zone - { - test: "timestamp_tz_sub_hm", - value: "2024-04-29 15:11:38.06318+02:30", - time: time.Date(2024, 4, 29, 15, 11, 38, 63180000, pos(2, 30, 0)), - ctor: newTestTimestampTZ, - }, - { - test: "timestamp_tz_hm", - value: "2024-04-29 15:11:38+02:30", - time: time.Date(2024, 4, 29, 15, 11, 38, 0, pos(2, 30, 0)), - ctor: newTestTimestampTZ, - }, - { - test: "timestamp_tz_sub_neg_hm", - value: "2024-04-29 15:11:38.06318-02:30", - time: time.Date(2024, 4, 29, 15, 11, 38, 63180000, neg(2, 30, 0)), - ctor: newTestTimestampTZ, - }, - { - test: "timestamp_tz_neg_hm", - value: "2024-04-29 15:11:38-02:30", - time: time.Date(2024, 4, 29, 15, 11, 38, 0, neg(2, 30, 0)), - ctor: newTestTimestampTZ, - }, - { - test: "timestamp_tz_sub_z", - value: "2024-04-29 15:11:38.06318Z", - time: time.Date(2024, 4, 29, 15, 11, 38, 63180000, offsetZero), - ctor: newTestTimestampTZ, - }, - { - test: "timestamp_tz_z", - value: "2024-04-29 15:11:38Z", - time: time.Date(2024, 4, 29, 15, 11, 38, 0, offsetZero), - ctor: newTestTimestampTZ, - }, - // timestamp " " without time zone - { - test: "timestamp_sub_hms", - value: "2024-04-29 15:11:38.06318", - time: time.Date(2024, 4, 29, 15, 11, 38, 63180000, offsetZero), - ctor: newTestTimestamp, - }, - { - test: "timestamp_hms", - value: "2024-04-29 15:11:38", - time: time.Date(2024, 4, 29, 15, 11, 38, 0, offsetZero), - ctor: newTestTimestamp, - }, - } -} - -func TestParseTime(t *testing.T) { - t.Parallel() - for _, tc := range timestampTestCases(t) { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - for _, zc := range zoneTestCases() { - t.Run(zc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - ctx := ContextWithTZ(context.Background(), zc.loc) - tim, ok := ParseTime(ctx, tc.value, -1) - a.True(ok) - a.Equal(tc.ctor(tc.time, TZFromContext(ctx)), tim) - }) - } - }) - } -} - -func TestParseFail(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range []struct { - test string - value string - }{ - {"bogus", "bogus"}, - {"bad_date", "2024-02-30"}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - tim, ok := ParseTime(ctx, tc.value, -1) - a.False(ok) - a.Nil(tim) - }) - } -} - -func TestParseTimePrecision(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - value string - one int - two int - six int - }{ - { - test: "time_nine_places", - value: "14:15:31.78599685301", - one: 800000000, - two: 790000000, - six: 785997000, - }, - { - test: "time_six_places", - value: "14:15:31.785996", - one: 800000000, - two: 790000000, - six: 785996000, - }, - { - test: "time_three_places", - value: "14:15:31.785", - one: 800000000, - two: 790000000, - six: 785000000, - }, - { - test: "time_two_places", - value: "14:15:31.78", - one: 800000000, - two: 780000000, - six: 780000000, - }, - { - test: "time_one_place", - value: "14:15:31.7", - one: 700000000, - two: 700000000, - six: 700000000, - }, - { - test: "ts_nine_places", - value: "2020-03-11T11:22:42.465029739+01", - one: 500000000, - two: 470000000, - six: 465030000, - }, - { - test: "ts_six_places", - value: "2020-03-11T11:22:42.465029+01", - one: 500000000, - two: 470000000, - six: 465029000, - }, - { - test: "ts_three_places", - value: "2020-03-11T11:22:42.465+01", - one: 500000000, - two: 470000000, - six: 465000000, - }, - { - test: "ts_two_places", - value: "2020-03-11T11:22:42.46+01", - one: 500000000, - two: 460000000, - six: 460000000, - }, - { - test: "ts_one_place", - value: "2020-03-11T11:22:42.4+01", - one: 400000000, - two: 400000000, - six: 400000000, - }, - { - test: "ts_no_places", - value: "2020-03-11T11:22:42+01", - }, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - cmpNano(a, tc.value, 0, 0) - cmpNano(a, tc.value, 1, tc.one) - cmpNano(a, tc.value, 2, tc.two) - cmpNano(a, tc.value, 6, tc.six) - }) - } -} - -func cmpNano(a *assert.Assertions, value string, precision, exp int) { - dt, ok := ParseTime(context.Background(), value, precision) - a.True(ok) - a.Implements((*DateTime)(nil), dt) - a.Equal(exp, dt.GoTime().Nanosecond()) -} diff --git a/path/types/time.go b/path/types/time.go deleted file mode 100644 index d74d955..0000000 --- a/path/types/time.go +++ /dev/null @@ -1,78 +0,0 @@ -package types - -import ( - "context" - "fmt" - "time" -) - -// Time represents the PostgreSQL time without time zone type. -type Time struct { - // Time is the underlying time.Time value. - time.Time -} - -// NewTime coerces src into a Time. -func NewTime(src time.Time) *Time { - // Convert result type to Time without time zone (use offset 0) - return &Time{time.Date( - 0, 1, 1, - src.Hour(), src.Minute(), src.Second(), src.Nanosecond(), - offsetZero, - )} -} - -// GoTime returns the underlying time.Time object. -func (t *Time) GoTime() time.Time { return t.Time } - -// timeFormat represents the canonical string format for Time -// values. -const timeFormat = "15:04:05.999999999" - -// String returns the string representation of ts using the format -// "15:04:05.999999999". -func (t *Time) String() string { - return t.Format(timeFormat) -} - -// ToTimeTZ converts t to *TimeTZ in the time zone in ctx. It works relative -// the current date. -func (t *Time) ToTimeTZ(ctx context.Context) *TimeTZ { - now := time.Now() - return NewTimeTZ(time.Date( - now.Year(), now.Month(), now.Day(), - t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), - TZFromContext(ctx), - )) -} - -// Compare compares the time instant t with u. If d is before u, it returns -// -1; if t is after u, it returns +1; if they're the same, it returns 0. -func (t *Time) Compare(u time.Time) int { - return t.Time.Compare(u) -} - -// MarshalJSON implements the json.Marshaler interface. The time is a quoted -// string using the "15:04:05.999999999" format. -func (t *Time) MarshalJSON() ([]byte, error) { - const timeJSONSize = len(timeFormat) + len(`""`) - b := make([]byte, 0, timeJSONSize) - b = append(b, '"') - b = t.AppendFormat(b, timeFormat) - b = append(b, '"') - return b, nil -} - -// UnmarshalJSON implements the json.Unmarshaler interface. The time must be a -// quoted string in the "15:04:05.999999999" format. -func (t *Time) UnmarshalJSON(data []byte) error { - tim, err := time.Parse(timeFormat, string(data[1:len(data)-1])) - if err != nil { - return fmt.Errorf( - "%w: Cannot parse %s as %q", - ErrSQLType, data, timeFormat, - ) - } - *t = *NewTime(tim) - return nil -} diff --git a/path/types/time_test.go b/path/types/time_test.go deleted file mode 100644 index 7981405..0000000 --- a/path/types/time_test.go +++ /dev/null @@ -1,85 +0,0 @@ -package types - -import ( - "context" - "fmt" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestTime(t *testing.T) { - t.Parallel() - loc := time.FixedZone("", 10*secondsPerHour+secondsPerHour/2) - ctx := ContextWithTZ(context.Background(), loc) - - for _, tc := range timestampTestCases(t) { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Only test Time and TimeTZ - switch tc.ctor(time.Time{}, &time.Location{}).(type) { - case *Timestamp, *TimestampTZ, *Date: - return - } - - // Remove the time zone and date from all the test cases. - exp := time.Date( - 0, 1, 1, - tc.time.Hour(), tc.time.Minute(), tc.time.Second(), - tc.time.Nanosecond(), offsetZero, - ) - - ts := NewTime(tc.time) - a.Equal(&Time{Time: exp}, ts) - a.Equal(exp, ts.GoTime()) - a.Equal(exp.Format(timeFormat), ts.String()) - - // Check JSON - json, err := ts.MarshalJSON() - r.NoError(err) - a.JSONEq(fmt.Sprintf("%q", ts.String()), string(json)) - ts2 := new(Time) - r.NoError(ts2.UnmarshalJSON(json)) - a.Equal(ts, ts2) - - // Test ToTimeTZ. - a.Equal( - NewTimeTZ(time.Date( - 0, 1, 1, - ts.Hour(), ts.Minute(), ts.Second(), ts.Nanosecond(), - loc, - )), - ts.ToTimeTZ(ctx), - ) - }) - } -} - -func TestTimeInvalidJSON(t *testing.T) { - t.Parallel() - ts := new(Time) - err := ts.UnmarshalJSON([]byte(`"i am not a time"`)) - require.Error(t, err) - require.EqualError(t, err, fmt.Sprintf( - "type: Cannot parse %q as %q", - "i am not a time", timeFormat, - )) - require.ErrorIs(t, err, ErrSQLType) -} - -func TestTimeCompare(t *testing.T) { - t.Parallel() - a := assert.New(t) - - now := time.Now().UTC() - ts := &Time{Time: now} - a.Equal(-1, ts.Compare(now.Add(1*time.Hour))) - a.Equal(1, ts.Compare(now.Add(-2*time.Hour))) - a.Equal(0, ts.Compare(now)) - a.Equal(0, ts.Compare(now.Add(0))) -} diff --git a/path/types/timestamp.go b/path/types/timestamp.go deleted file mode 100644 index 033b444..0000000 --- a/path/types/timestamp.go +++ /dev/null @@ -1,89 +0,0 @@ -package types - -import ( - "context" - "fmt" - "time" -) - -// Timestamp represents the PostgreSQL timestamp without time zone type. -type Timestamp struct { - // Time is the underlying time.Time value. - time.Time -} - -// NewTimestamp coerces src into a Timestamp. -func NewTimestamp(src time.Time) *Timestamp { - // Convert result type to timestamp without time zone (use offset 0). - return &Timestamp{time.Date( - src.Year(), src.Month(), src.Day(), - src.Hour(), src.Minute(), src.Second(), src.Nanosecond(), - offsetZero, - )} -} - -// GoTime returns the underlying time.Time object. -func (ts *Timestamp) GoTime() time.Time { return ts.Time } - -const ( - // timestampFormat represents the canonical string format for Timestamp - // values. - timestampFormat = "2006-01-02T15:04:05.999999999" -) - -// String returns the string representation of ts using the format -// "2006-01-02T15:04:05.999999999". -func (ts *Timestamp) String() string { - return ts.Format(timestampFormat) -} - -// ToDate converts ts to *Date. -func (ts *Timestamp) ToDate(context.Context) *Date { - return NewDate(ts.Time) -} - -// ToTime converts ts to *Time. -func (ts *Timestamp) ToTime(context.Context) *Time { - return NewTime(ts.Time) -} - -// ToTimestampTZ converts ts to *TimestampTZ. -func (ts *Timestamp) ToTimestampTZ(ctx context.Context) *TimestampTZ { - t := ts.Time - return NewTimestampTZ(ctx, time.Date( - t.Year(), t.Month(), t.Day(), - t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), - TZFromContext(ctx), - )) -} - -// Compare compares the time instant ts with u. If ts is before u, it returns -// -1; if ts is after u, it returns +1; if they're the same, it returns 0. -func (ts *Timestamp) Compare(u time.Time) int { - return ts.Time.Compare(u) -} - -// MarshalJSON implements the json.Marshaler interface. The time is a quoted -// string using the "2006-01-02T15:04:05.999999999" format. -func (ts *Timestamp) MarshalJSON() ([]byte, error) { - const timestampJSONSize = len(timestampFormat) + len(`""`) - b := make([]byte, 0, timestampJSONSize) - b = append(b, '"') - b = ts.AppendFormat(b, timestampFormat) - b = append(b, '"') - return b, nil -} - -// UnmarshalJSON implements the json.Unmarshaler interface. The time must be a -// quoted string in the "2006-01-02T15:04:05.999999999" format. -func (ts *Timestamp) UnmarshalJSON(data []byte) error { - tim, err := time.Parse(timestampFormat, string(data[1:len(data)-1])) - if err != nil { - return fmt.Errorf( - "%w: Cannot parse %s as %q", - ErrSQLType, data, timestampFormat, - ) - } - *ts = *NewTimestamp(tim) - return nil -} diff --git a/path/types/timestamp_test.go b/path/types/timestamp_test.go deleted file mode 100644 index a024e21..0000000 --- a/path/types/timestamp_test.go +++ /dev/null @@ -1,79 +0,0 @@ -package types - -import ( - "context" - "fmt" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestTimestamp(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range timestampTestCases(t) { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Don't test Time and TimeTZ - switch tc.ctor(time.Time{}, &time.Location{}).(type) { - case *Time, *TimeTZ: - return - } - - // Remove the time zone from all the test cases (by making it UTC). - exp := time.Date( - tc.time.Year(), tc.time.Month(), tc.time.Day(), - tc.time.Hour(), tc.time.Minute(), tc.time.Second(), - tc.time.Nanosecond(), offsetZero, - ) - - ts := NewTimestamp(tc.time) - a.Equal(&Timestamp{Time: exp}, ts) - a.Equal(exp, ts.GoTime()) - a.Equal(exp.Format(timestampFormat), ts.String()) - - // Check JSON - json, err := ts.MarshalJSON() - r.NoError(err) - a.JSONEq(fmt.Sprintf("%q", ts.String()), string(json)) - ts2 := new(Timestamp) - r.NoError(ts2.UnmarshalJSON(json)) - a.Equal(ts, ts2) - - // Test Conversion methods. - a.Equal(NewDate(ts.Time), ts.ToDate(ctx)) - a.Equal(NewTime(ts.Time), ts.ToTime(ctx)) - a.Equal(NewTimestampTZ(ctx, ts.Time), ts.ToTimestampTZ(ctx)) - }) - } -} - -func TestTimestampInvalidJSON(t *testing.T) { - t.Parallel() - ts := new(Timestamp) - err := ts.UnmarshalJSON([]byte(`"i am not a timestamp"`)) - require.Error(t, err) - require.EqualError(t, err, fmt.Sprintf( - "type: Cannot parse %q as %q", - "i am not a timestamp", timestampFormat, - )) - require.ErrorIs(t, err, ErrSQLType) -} - -func TestTimestampCompare(t *testing.T) { - t.Parallel() - a := assert.New(t) - - now := time.Now().UTC() - ts := &Timestamp{Time: now} - a.Equal(-1, ts.Compare(now.Add(1*time.Hour))) - a.Equal(1, ts.Compare(now.Add(-2*time.Hour))) - a.Equal(0, ts.Compare(now)) - a.Equal(0, ts.Compare(now.Add(0))) -} diff --git a/path/types/timestamptz.go b/path/types/timestamptz.go deleted file mode 100644 index 522253f..0000000 --- a/path/types/timestamptz.go +++ /dev/null @@ -1,120 +0,0 @@ -package types - -import ( - "context" - "fmt" - "time" -) - -// TimestampTZ represents the PostgreSQL timestamp with time zone type. -type TimestampTZ struct { - // Time is the underlying time.Time value. - time.Time - - // tz is the time zone read from the context passed to NewTimestampTZ. - tz *time.Location -} - -// NewTimestampTZ creates a timestamp with time zone with src. The ctx param -// is used solely to determine the time zone used by [TimestampTZ.String]. -func NewTimestampTZ(ctx context.Context, src time.Time) *TimestampTZ { - return &TimestampTZ{ - tz: TZFromContext(ctx), - Time: time.Date( - src.Year(), src.Month(), src.Day(), - src.Hour(), src.Minute(), src.Second(), src.Nanosecond(), - offsetLocationFor(src), - ), - } -} - -// GoTime returns the underlying time.Time object. -func (ts *TimestampTZ) GoTime() time.Time { return ts.Time } - -const ( - // timestampTZSecondFormat represents the canonical string format for - // TimestampTZ values, and supports parsing 00:00:00 zones. - timestampTZSecondFormat = "2006-01-02T15:04:05.999999999Z07:00:00" - // timestampTZMinuteFormat supports parsing 00:00 zones. - timestampTZMinuteFormat = "2006-01-02T15:04:05.999999999Z07:00" - // timestampTZHourFormat supports parsing 00 zones. - timestampTZHourFormat = "2006-01-02T15:04:05.999999999Z07" - // timestampTZOutputFormat is the main output format. - timestampTZOutputFormat = "2006-01-02T15:04:05.999999999-07:00" -) - -// String returns the string representation of ts in the time zone in the -// Context passed to NewTimestampTZ, using the format -// "2006-01-02T15:04:05.999999999-07:00". -func (ts *TimestampTZ) String() string { - return ts.Format(timestampTZOutputFormat) -} - -// ToDate converts ts to *Date in the time zone in ctx. -func (ts *TimestampTZ) ToDate(ctx context.Context) *Date { - return NewDate(ts.In(TZFromContext(ctx))) -} - -// ToTime converts ts to *Time in the time zone in ctx. -func (ts *TimestampTZ) ToTime(ctx context.Context) *Time { - return NewTime(ts.In(TZFromContext(ctx))) -} - -// ToTimestamp converts ts to *Timestamp in the time zone in ctx. -func (ts *TimestampTZ) ToTimestamp(ctx context.Context) *Timestamp { - return NewTimestamp(ts.In(TZFromContext(ctx))) -} - -// ToTimeTZ converts ts to TimeTZ in the time zone in ctx. -func (ts *TimestampTZ) ToTimeTZ(ctx context.Context) *TimeTZ { - return NewTimeTZ(ts.In(TZFromContext(ctx))) -} - -// Compare compares the time instant ts with u. If ts is before u, it returns -// -1; if ts is after u, it returns +1; if they're the same, it returns 0. -func (ts *TimestampTZ) Compare(u time.Time) int { - return ts.Time.Compare(u) -} - -// MarshalJSON implements the json.Marshaler interface. The time is a quoted -// string using the "2006-01-02T15:04:05.999999999-07:00" format. -func (ts *TimestampTZ) MarshalJSON() ([]byte, error) { - const timestampJSONSize = len(timestampTZOutputFormat) + len(`""`) - b := make([]byte, 0, timestampJSONSize) - b = append(b, '"') - b = ts.AppendFormat(b, timestampTZOutputFormat) - b = append(b, '"') - return b, nil -} - -// UnmarshalJSON implements the json.Unmarshaler interface. The time must be a -// quoted string in one of the following formats: -// - 2006-01-02T15:04:05.999999999Z07:00:00 -// - 2006-01-02T15:04:05.999999999Z07:00 -// - 2006-01-02T15:04:05.999999999Z07 -func (ts *TimestampTZ) UnmarshalJSON(data []byte) error { - str := data[1 : len(data)-1] // Unquote - - // Figure out which TZ format we need. - var format string - const ( - secPlace = 9 - minPlace = 6 - ) - size := len(str) - switch { - case size >= 9 && (str[size-secPlace] == '-' || str[size-secPlace] == '+'): - format = timestampTZSecondFormat - case size >= 6 && (str[size-minPlace] == '-' || str[size-minPlace] == '+'): - format = timestampTZMinuteFormat - default: - format = timestampTZHourFormat - } - - tim, err := time.Parse(format, string(str)) - if err != nil { - return fmt.Errorf("%w: Cannot parse %s as %q", ErrSQLType, data, format) - } - *ts = TimestampTZ{Time: tim} - return nil -} diff --git a/path/types/timestamptz_test.go b/path/types/timestamptz_test.go deleted file mode 100644 index 40a64e2..0000000 --- a/path/types/timestamptz_test.go +++ /dev/null @@ -1,94 +0,0 @@ -package types - -import ( - "context" - "fmt" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestTimestampTZ(t *testing.T) { - t.Parallel() - tz := time.FixedZone("", -9+secondsPerHour) - ctx := ContextWithTZ(context.Background(), tz) - - for _, tc := range timestampTestCases(t) { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Don't test Time and TimeTZ - switch tc.ctor(time.Time{}, &time.Location{}).(type) { - case *Time, *TimeTZ: - return - } - - ts := NewTimestampTZ(ctx, tc.time) - a.Equal(&TimestampTZ{Time: tc.time, tz: tz}, ts) - a.Equal(tc.time, ts.GoTime()) - a.Equal(tc.time.Format(timestampTZOutputFormat), ts.String()) - - // Check JSON - json, err := ts.MarshalJSON() - r.NoError(err) - a.JSONEq(fmt.Sprintf("%q", ts.Format(timestampTZOutputFormat)), string(json)) - ts2 := new(TimestampTZ) - r.NoError(ts2.UnmarshalJSON(json)) - a.Equal(ts.Time, ts2.In(ts.Location())) - - // Test Conversion methods. - a.Equal(NewDate(ts.In(tz)), ts.ToDate(ctx)) - a.Equal(NewTime(ts.In(tz)), ts.ToTime(ctx)) - a.Equal(NewTimeTZ(ts.In(tz)), ts.ToTimeTZ(ctx)) - a.Equal(NewTimestamp(ts.In(tz)), ts.ToTimestamp(ctx)) - }) - } -} - -func TestTimestampTZInvalidJSON(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - value string - format string - }{ - {"dumb", `"i am not a timestamp"`, timestampTZHourFormat}, - {"pos_secs", `"i am not a timestamp+01:01:01"`, timestampTZSecondFormat}, - {"neg_secs", `"i am not a timestamp-01:01:01"`, timestampTZSecondFormat}, - {"pos_mins", `"i am not a timestamp+01:01"`, timestampTZMinuteFormat}, - {"neg_mins", `"i am not a timestamp-01:01"`, timestampTZMinuteFormat}, - {"pos_hours", `"i am not a timestamp+01"`, timestampTZHourFormat}, - {"neg_hours", `"i am not a timestamp-01"`, timestampTZHourFormat}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - r := require.New(t) - - ts := new(TimestampTZ) - err := ts.UnmarshalJSON([]byte(tc.value)) - r.Error(err) - r.EqualError(err, fmt.Sprintf( - "type: Cannot parse %v as %q", - tc.value, tc.format, - )) - r.ErrorIs(err, ErrSQLType) - }) - } -} - -func TestTimestampTZCompare(t *testing.T) { - t.Parallel() - a := assert.New(t) - - now := time.Now() - ts := &TimestampTZ{Time: now} - a.Equal(-1, ts.Compare(now.Add(1*time.Hour))) - a.Equal(1, ts.Compare(now.Add(-2*time.Hour))) - a.Equal(0, ts.Compare(now)) - a.Equal(0, ts.Compare(now.Add(0))) -} diff --git a/path/types/timetz.go b/path/types/timetz.go deleted file mode 100644 index f5b75c1..0000000 --- a/path/types/timetz.go +++ /dev/null @@ -1,118 +0,0 @@ -package types - -import ( - "context" - "fmt" - "time" -) - -// TimeTZ represents the PostgreSQL time with time zone type. -type TimeTZ struct { - // Time is the underlying time.Time value. - time.Time -} - -// NewTimeTZ coerces src into a TimeTZ. -func NewTimeTZ(src time.Time) *TimeTZ { - // Preserve the offset. - return &TimeTZ{time.Date( - 0, 1, 1, - src.Hour(), src.Minute(), src.Second(), src.Nanosecond(), - offsetLocationFor(src), - )} -} - -// GoTime returns the underlying time.Time object. -func (t *TimeTZ) GoTime() time.Time { return t.Time } - -const ( - // timeTZSecondFormat represents the canonical string format for - // TimeTZ values, and supports parsing 00:00:00 zones. - timeTZSecondFormat = "15:04:05.999999999Z07:00:00" - // timeTZMinuteFormat supports parsing 00:00 zones. - timeTZMinuteFormat = "15:04:05.999999999Z07:00" - // timeTZHourFormat supports parsing 00 zones. - timeTZHourFormat = "15:04:05.999999999Z07" - // timeTZOutputFormat outputs 00:00 zones. - timeTZOutputFormat = "15:04:05.999999999-07:00" -) - -// String returns the string representation of ts using the format -// "15:04:05.999999999-07:00". -func (t *TimeTZ) String() string { - return t.Format(timeTZOutputFormat) -} - -// ToTime converts t to *Time. -func (t *TimeTZ) ToTime(context.Context) *Time { - return NewTime(t.Time) -} - -// Compare compares the time instant t with u. If d is before u, it returns -// -1; if t is after u, it returns +1; if they're the same, it returns 0. Note -// that the TZ offset contributes to this comparison; values with different -// offsets are never considered to be the same. -func (t *TimeTZ) Compare(u time.Time) int { - // https://github.com/postgres/postgres/blob/REL_18_BETA2/src/backend/utils/adt/date.c#L2507-L2532 - - // Primary sort is by true (GMT-equivalent) time. - cmp := t.Time.UTC().Compare(u.UTC()) - if cmp != 0 { - return cmp - } - - // If same GMT time, sort by timezone; we only want to say that two - // timetz's are equal if both the time and zone parts are equal. - _, off1 := t.Zone() - _, off2 := u.Zone() - if off1 > off2 { - return -1 - } - if off1 < off2 { - return 1 - } - return 0 -} - -// MarshalJSON implements the json.Marshaler interface. The time is a quoted -// string using the "15:04:05.999999999-07:00" format. -func (t *TimeTZ) MarshalJSON() ([]byte, error) { - const timeJSONSize = len(timeTZOutputFormat) + len(`""`) - b := make([]byte, 0, timeJSONSize) - b = append(b, '"') - b = t.AppendFormat(b, timeTZOutputFormat) - b = append(b, '"') - return b, nil -} - -// UnmarshalJSON implements the json.Unmarshaler interface. The time must be a -// quoted string in one of the following formats: -// - 15:04:05.999999999Z07:00:00 -// - 15:04:05.999999999Z07:00 -// - 15:04:05.999999999Z07 -func (t *TimeTZ) UnmarshalJSON(data []byte) error { - str := data[1 : len(data)-1] // Unquote - - // Figure out which TZ format we need. - var format string - const ( - secPlace = 9 - minPlace = 6 - ) - size := len(str) - switch { - case str[size-secPlace] == '-' || str[size-secPlace] == '+': - format = timeTZSecondFormat - case str[size-minPlace] == '-' || str[size-minPlace] == '+': - format = timeTZMinuteFormat - default: - format = timeTZHourFormat - } - - tim, err := time.Parse(format, string(str)) - if err != nil { - return fmt.Errorf("%w: Cannot parse %s as %q", ErrSQLType, data, format) - } - *t = TimeTZ{Time: tim} - return nil -} diff --git a/path/types/timetz_test.go b/path/types/timetz_test.go deleted file mode 100644 index 3efaf96..0000000 --- a/path/types/timetz_test.go +++ /dev/null @@ -1,116 +0,0 @@ -package types - -import ( - "context" - "fmt" - "testing" - "time" - _ "time/tzdata" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestTimeTZCompareIss(t *testing.T) { - t.Parallel() - t1 := NewTimeTZ(time.Date(0, 1, 1, 11, 35, 0, 0, offsetZero)) - t2 := NewTimeTZ(time.Date(0, 1, 1, 12, 35, 0, 0, time.FixedZone("", 3600))) - assert.Equal(t, 1, t1.Compare(t2.Time)) -} - -func TestTimeTZ(t *testing.T) { - t.Parallel() - ctx := context.Background() - - for _, tc := range timestampTestCases(t) { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Only test Time and TimeTZ - switch tc.ctor(time.Time{}, &time.Location{}).(type) { - case *Timestamp, *TimestampTZ, *Date: - return - } - - // Remove the date from all the test cases. - exp := time.Date( - 0, 1, 1, - tc.time.Hour(), tc.time.Minute(), tc.time.Second(), - tc.time.Nanosecond(), tc.time.Location(), - ) - - ts := NewTimeTZ(tc.time) - a.Equal(&TimeTZ{Time: exp}, ts) - a.Equal(exp, ts.GoTime()) - a.Equal(exp.Format(timeTZOutputFormat), ts.String()) - - // Check JSON - json, err := ts.MarshalJSON() - r.NoError(err) - a.JSONEq(fmt.Sprintf("%q", ts.String()), string(json)) - ts2 := new(TimeTZ) - r.NoError(ts2.UnmarshalJSON(json)) - a.Equal(exp, ts2.In(exp.Location())) - - // Test ToTime. - a.Equal(NewTime(ts.Time), ts.ToTime(ctx)) - }) - } -} - -func TestTimeTZInvalidJSON(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - value string - format string - }{ - {"dumb", `"i am not a timestamp"`, timeTZHourFormat}, - {"pos_secs", `"i am not a timestamp+01:01:01"`, timeTZSecondFormat}, - {"neg_secs", `"i am not a timestamp-01:01:01"`, timeTZSecondFormat}, - {"pos_mins", `"i am not a timestamp+01:01"`, timeTZMinuteFormat}, - {"neg_mins", `"i am not a timestamp-01:01"`, timeTZMinuteFormat}, - {"pos_hours", `"i am not a timestamp+01"`, timeTZHourFormat}, - {"neg_hours", `"i am not a timestamp-01"`, timeTZHourFormat}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - r := require.New(t) - - ts := new(TimeTZ) - err := ts.UnmarshalJSON([]byte(tc.value)) - r.Error(err) - r.EqualError(err, fmt.Sprintf( - "type: Cannot parse %v as %q", - tc.value, tc.format, - )) - r.ErrorIs(err, ErrSQLType) - }) - } -} - -func TestTimeTZCompare(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - // Pretend we're in LA. - la, err := time.LoadLocation("America/Los_Angeles") - r.NoError(err) - now := time.Now().In(la) - - // Comparisons should work as expected in same TZ. - ts := &TimeTZ{Time: now} - a.Equal(-1, ts.Compare(now.Add(1*time.Hour))) - a.Equal(1, ts.Compare(now.Add(-2*time.Hour))) - a.Equal(0, ts.Compare(now)) - a.Equal(0, ts.Compare(now.Add(0))) - - // Same time but different offsets are not equal - a.Equal(1, ts.Compare(now.UTC())) - utc := &TimeTZ{Time: now.UTC()} - a.Equal(-1, utc.Compare(now)) -} diff --git a/path/types/types.go b/path/types/types.go deleted file mode 100644 index 341f575..0000000 --- a/path/types/types.go +++ /dev/null @@ -1,88 +0,0 @@ -/* -Package types provides PostgresSQL-compatible data types for SQL/JSON Path -execution. - -It makes every effort to duplicate the behavior of PostgreSQL JSONB dates and -times in particular in order to compatibly execute date and time comparisons -in SQL/JSON Path expressions. - -# DateTime Types - -Package maps the Postgres date and time types to these [DateTime]-implementing -types: - - - date: [Date] - - time: [Time] - - timetz: [TimeTZ] - - timestamp: [Timestamp] - - timestamptz: [TimestampTZ] - -Each provides a constructor that takes a [time.Time] object, which defines the -underlying representation. Each also provides casting functions between the -types, but only for supported casts. - -# Time Zones - -Like the PostgreSQL timetz and timestamptz types, [TimeTZ] and [TimestampTZ] -do not store time zone information, but an offset from UTC. Even when passed a -[time.Time] object with a detailed location, the constructors will strip it -out and retain only the offset for the [time.Time] value. - -By default, the types package operates on and displays dates and times in the -context of UTC. This affects conversion between time zone and non-time zone -data types, in particular. To change the time zone in which such operations -execute, - -When required to operate on dates and times in the context of a time zone, the -types package defaults to UTC. For example, a TimestampTZ stringifies into -UTC: - - offsetPlus5 := time.FixedZone("", 5*3600) - timestamp := types.NewTimestampTZ( - context.Background(), - time.Date(2023, 8, 15, 12, 34, 56, 0, offsetPlus5), - ) - fmt.Printf("%v\n", timestamp) // β†’ 2023-08-15T07:34:56+00:00 - -To operate in a the context of a different time zone, use [ContextWithTZ] to -add it to the context passed to any constructor or method that takes a context: - - tz, err := time.LoadLocation("America/New_York") - if err != nil { - log.Fatal(err) - } - ctx := types.ContextWithTZ(context.Background(), tz) - - offsetPlus5 := time.FixedZone("", 5*3600) - timestamp := types.NewTimestampTZ( - ctx, - time.Date(2023, 8, 15, 12, 34, 56, 0, offsetPlus5), - ) - - fmt.Printf("%v\n", timestamp) // β†’ 2023-08-15T07:34:56+00:00 - -This time zone affects casts, as well, between offset-aware types ([TimeTZ], -[TimestampTZ]) and offset-unaware types ([Date], [Time], [Timestamp]). For any -execution, be sure to pass the same context to all operations. -*/ -package types - -import ( - "errors" - "fmt" - "time" -) - -// ErrSQLType wraps errors returned by the types package. -var ErrSQLType = errors.New("type") - -// secondsPerHour contains the number of seconds in an hour (excluding leap -// seconds). -const secondsPerHour = 60 * 60 - -// DateTime defines the interface for all date and time data types. -type DateTime interface { - fmt.Stringer - // GoTime returns the underlying time.Time object. - GoTime() time.Time -} diff --git a/path/types/types_test.go b/path/types/types_test.go deleted file mode 100644 index d9bbbcd..0000000 --- a/path/types/types_test.go +++ /dev/null @@ -1,29 +0,0 @@ -package types - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestDateTime(t *testing.T) { - t.Parallel() - - for _, tc := range []struct { - test string - obj any - }{ - {"date", &Date{}}, - {"time", &Time{}}, - {"timetz", &TimeTZ{}}, - {"timestamp", &Timestamp{}}, - {"timestamptz", &TimestampTZ{}}, - } { - t.Run(tc.test, func(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.Implements((*DateTime)(nil), tc.obj) - }) - } -} diff --git a/src/index.html b/src/index.html new file mode 100644 index 0000000..3eedf50 --- /dev/null +++ b/src/index.html @@ -0,0 +1,399 @@ + + + + + Codestin Search App + + + + + + + +
+

Go SQL/JSON Path Playground

+
+ + +
+
+
+ About +

+ + About +

+

This is the playground for {{version}} of github.com/theory/sqljson/path, + a Go package that executes a PostgreSQL-compatible SQL/JSON Path + query to select values from JSON data.

+

The playground runs entirely in the browser thanks to TinyGo, which compiles the + github.com/theory/sqljson/path package into Web Assembly.

+

Learn more about SQL/JSON Path by reading the + PostgreSQL docs, + and more about github.com/theory/sqljson/path by following the links at the + bottom of the page. The code for this website can be found + on GitHub.

+

Related Playgrounds:

+ +
+
+ SQL/JSON Path Expressions +

+ + SQL/JSON Path Expressions +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Syntax ElementDescription
$root node identifier
@current node identifier (valid only within filter selectors)
."name"name selector: selects a named child of an object
.nameshorthand for ."name"
.*wildcard selector: selects all children of a node
.**recursive wildcard accessor: selects zero or more descendants of a node
.**{3}recursive wildcard accessor: selects up to specified level of hierarchy
.**{2 to 5}recursive wildcard accessor: selects from start to end level
[<subscripts>]array selector with comma-delimited subscripts
[3]index selector subscript: selects an indexed child of an array
[3 to last]array slice subscript: select slice from start to end index (or last)
[*]wildcard array selector: returns all array elements.
$var_namea variable referring to a value in the Vars object
strictraise error on a structural error
laxsuppress structural errors
?(<expr>)filter selector: selects and transforms children
.size()method selector
+
+ +
+ +
+ + +
+
+
+
+ +
+
+
+ + + +
+ + + + +
+
+
+
+
+
+ +
+
+ +
+
+ + + + diff --git a/src/main.go b/src/main.go new file mode 100644 index 0000000..6e140c4 --- /dev/null +++ b/src/main.go @@ -0,0 +1,128 @@ +// package main provides the Wasm app. +package main + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "syscall/js" + "time" + + "github.com/theory/sqljson/path" + "github.com/theory/sqljson/path/exec" + "github.com/theory/sqljson/path/types" +) + +const ( + optQuery int = 1 << iota + optExistsOrMatch + optFirst + optSilent + optTZCompare + optLocalTZ + optIndent +) + +func query(_ js.Value, args []js.Value) any { + query := args[0].String() + target := args[1].String() + vars := args[2].String() + opts := args[3].Int() + + return execute(query, target, vars, opts) +} + +func main() { + stream := make(chan struct{}) + + js.Global().Set("query", js.FuncOf(query)) + js.Global().Set("optQuery", js.ValueOf(optQuery)) + js.Global().Set("optExistsOrMatch", js.ValueOf(optExistsOrMatch)) + js.Global().Set("optFirst", js.ValueOf(optFirst)) + js.Global().Set("optSilent", js.ValueOf(optSilent)) + js.Global().Set("optTZCompare", js.ValueOf(optTZCompare)) + js.Global().Set("optLocalTZ", js.ValueOf(optLocalTZ)) + + <-stream +} + +func execute(query, target, vars string, opts int) string { + // Parse the JSON. + var value any + if err := json.Unmarshal([]byte(target), &value); err != nil { + return fmt.Sprintf("Error parsing JSON: %v", err) + } + + // Parse the SQL jsonpath query. + jsonpath, err := path.Parse(query) + if err != nil { + return fmt.Sprintf("Error parsing %v", err) + } + + // Use local time zone if requested. + ctx := context.Background() + if opts&optLocalTZ == optLocalTZ { + //nolint:gosmopolitan // We want the browser time. + ctx = types.ContextWithTZ(ctx, time.Local) + } + + // Assemble the options. + options, msg := assembleOptions(opts, vars) + if msg != "" { + return msg + } + + // Execute the query against the JSON. + var res any + switch { + case opts&optQuery == optQuery: + res, err = jsonpath.Query(ctx, value, options...) + case opts&optExistsOrMatch == optExistsOrMatch: + res, err = jsonpath.ExistsOrMatch(ctx, value, options...) + case opts&optFirst == optFirst: + res, err = jsonpath.First(ctx, value, options...) + } + + // Error handling. + if err != nil { + if errors.Is(err, exec.NULL) { + return "null" + } + return fmt.Sprintf("Error %v", err) + } + + // Serialize the result + var buf bytes.Buffer + enc := json.NewEncoder(&buf) + enc.SetEscapeHTML(false) + enc.SetIndent("", " ") + if err := enc.Encode(res); err != nil { + return fmt.Sprintf("Error parsing results: %v", err) + } + + return buf.String() +} + +func assembleOptions(opts int, vars string) ([]exec.Option, string) { + options := []exec.Option{} + if opts&optSilent == optSilent { + options = append(options, exec.WithSilent()) + } + + if opts&optTZCompare == optTZCompare { + options = append(options, exec.WithTZ()) + } + + if vars != "" { + var varsMap map[string]any + if err := json.Unmarshal([]byte(vars), &varsMap); err != nil { + return nil, fmt.Sprintf("Error parsing variables: %v", err) + } + + options = append(options, exec.WithVars(varsMap)) + } + + return options, "" +} diff --git a/src/play.css b/src/play.css new file mode 100644 index 0000000..a2120f2 --- /dev/null +++ b/src/play.css @@ -0,0 +1,478 @@ +@media (prefers-color-scheme: dark) { + body { + color-scheme: dark; + --color-fg-default: #c9d1d9; + --color-fg-muted: #8b949e; + --color-fg-subtle: #484f58; + --color-canvas-default: #0d1117; + --color-canvas-subtle: #24282f; + --color-canvas-muted: #2c2d2e; + --color-border-default: #30363d; + --color-border-muted: #555; + --color-neutral-muted: rgba(110,118,129,0.4); + --color-accent-fg: #759dc6; + --color-strong-fg: #759dc6; + --color-accent-emphasis: #1f6feb; + --color-attention-subtle: rgba(187,128,9,0.15); + --color-danger-fg: #f85149; + --color-button-lite: #666; + --color-button-lite-hover: #161c1c; + --color-input-bg: #1e1e1e; + --color-input-border: #555; + } +} + +@media (prefers-color-scheme: light) { + body { + /* color-scheme: light; */ + --color-fg-default: #24292f; + --color-fg-muted: #57606a; + --color-fg-subtle: #6e7781; + --color-canvas-default: #fff; + --color-canvas-muted: #eaf4fb; + --color-canvas-subtle: #f6f8fa; + --color-border-default: #d0d7de; + --color-border-muted: hsla(210,18%,87%,1); + --color-neutral-muted: rgba(175,184,193,0.2); + --color-accent-fg: #336791; + --color-strong-fg: #336791; + --color-accent-emphasis: #0969da; + --color-attention-subtle: #fff8c5; + --color-danger-fg: #cf222e; + --color-button-lite: #bbb; + --color-button-lite-hover: #eee; + --color-input-bg: #fff; + --color-input-border: #ccc; + } +} + +body { + -ms-text-size-adjust: 100%; + -webkit-text-size-adjust: 100%; + --font: ui-monospace,SFMono-Regular,SF Mono,Menlo,Consolas,Liberation Mono,monospace; + --color-button: #eee; + --color-button-bg: #336791; + --color-button-hover-bg: #2b4661; + --color-button-disabled-bg: #485871; + margin: 2rem; + color: var(--color-fg-default); + background-color: var(--color-canvas-subtle); + font-family: var(--font); + line-height: 1.5; + word-wrap: break-word; +} + +body [hidden] { + display: none !important; +} + +body a { + background-color: transparent; + color: var(--color-accent-fg); + text-decoration: underline; + text-underline-offset: 3px; +} + +body a:active, +body a:hover { + outline-width: 0; +} + +body a.disable{ + pointer-events: none; + color:var(--color-fg-subtle); +} + +body b, +body strong { + font-weight: 600; + color: var(--color-strong-fg); +} + +body dfn { + font-style: italic; +} + +body h1 { + margin: .67em 0; + font-weight: 600; + font-size: 2em; +} + +body header { + width: 100%; + display:flex; + flex-wrap: wrap; + align-items: center; + padding-bottom: .3em; + color: var(--color-strong-fg); +} + +body header h1 { + flex-direction: column; + width: 100%; + flex: 1 0; + font-weight: 600; +} + +body header button { + flex: 0 0 auto; + padding: .5rem; + line-height: 0; + color: var(--color-button-lite); + background-color: var(--color-canvas-subtle); + border: 1px solid var(--color-button-lite); + border-radius: .5rem; +} + +body header button:hover { + background-color: var(--color-button-lite-hover); +} + +body hr { + box-sizing: content-box; + overflow: hidden; + background: transparent; + border-bottom: 1px solid var(--color-border-muted); + height: .25em; + padding: 0; + margin: 24px 0; + background-color: var(--color-border-default); + border: 0; +} + +body input, body button, body table { + font: inherit; + margin: 0; + overflow: scroll; + font-family: inherit; + font-size: inherit; + line-height: inherit; +} + +body h1, +body h2, +body h3, +body h4, +body h5, +body h6 { + margin-top: 24px; + margin-bottom: 16px; + font-weight: 400; + line-height: 1.25; +} + +body h2 { + color: var(--color-strong-fg); + font-weight: 400; + padding-bottom: .3em; + font-size: 1.5em; +} + +body h3 { + font-weight: 400; + font-size: 1.25em; +} + +body h4 { + font-weight: 400; + font-size: 1em; +} + +body h5 { + font-weight: 400; + font-size: .875em; +} + +body h6 { + font-weight: 400; + font-size: .85em; + color: var(--color-fg-muted); +} + +body p { + margin-top: 0; + margin-bottom: 10px; +} + +body textarea, body input, body tt, body code { + font-family: var(--font); + font-size: inherit; + width:100%; +} + +body ::placeholder { + color: var(--color-fg-subtle); + opacity: 1; +} + +body p, +body blockquote, +body ul, +body ol, +body dl, +body table, +body pre, +body details, .warn { + margin-top: 0; + margin-bottom: 16px; +} + +body ul, body ol { + list-style-position: inside; + padding-left: 0; +} + +#about ul { + list-style: none; +} + +@media screen and (min-width: 768px) { + .container { + display: flex; + flex-direction: row; + } + + .container textarea { + max-height: 70vh !important; + } + + .left { + flex-direction: column; + padding-top: 1em; + width: 50%; + } + + label { + font-weight: bold; + } + + .right { + flex-direction: column; + width: 50%; + height: 100%; + padding: 1rem 0 .5rem 1rem; + } + #options { + float:right; + text-align:right; + } +} + +@media screen and (max-width: 768px) { + body { + margin: 1rem; + } + .container { + display: flex; + flex-direction: column; + } + + .left, .right { + flex-direction: column; + width: 100%; + padding: 1rem 0; + } +} + +.left div { + margin-bottom: 1em; +} + +#clear { + background-color: transparent; + color: var(--color-accent-fg); + text-decoration: none; + cursor: pointer; +} + +#clear:hover { + text-decoration: underline; +} + +#fluid { + width: 100%; + display:flex; + flex-wrap: wrap; +} + +#path { + width: 100%; + flex: 1 0; + padding-left: .5rem; + background-color: var(--color-input-bg); + border: 1px solid var(--color-input-border); + border-radius: .5rem; + box-sizing: border-box; +} + +#go { + display: flex; +} + +#go button { + line-height: 1.5rem; + margin-left: .5rem; + font-size: inherit; + padding: 0 1rem; + color: var(--color-button); + background-color: var(--color-button-bg); + border: 1px solid var(--color-button-bg); + border-radius: .5rem; +} + +#go button:hover { + background-color: var(--color-button-hover-bg) +} + +#go button:disabled { + background-color: var(--color-disabled-hover-bg) +} + +@media screen and (max-width:576px) { + #fluid { + flex-flow: column; + } + #go { + padding-top: .5rem; + flex: 0 0 auto; + height: 2.5rem; + } + #go button { + flex: 1 0; + } + #go button:first-child { + margin: 0; + } +} + +.nowrap { + white-space: nowrap; +} + +#actions > * { + margin-right: 0.5em; +} + +.right input { + width: auto; +} + +#mode-box { + display: flex; + flex-direction: row; + align-items: center; + margin-top: .5rem; +} + +#mode-box > label { + padding: 0.25rem 0.5rem; +} + +#options { + border:none; + margin: 0; + padding: .5rem 0 .5rem .5rem; +} + +#mode { + background-color:var(--color-canvas-subtle); + display: inline-block; + border: 1px solid var(--color-border-muted); + border-radius: .5rem; + overflow: hidden; +} + +#mode input[type="radio"] { + display: none; +} + +#mode label.nowrap { + text-align: center; + font-weight: normal; + margin: 0; + display: inline-block; + padding: 0.25rem 0.5rem; + cursor: pointer; +} + +#mode label.nowrap:hover { + background-color: var(--color-button-lite-hover); +} + +#mode label.nowrap:nth-child(2) { + border-left: 1px solid var(--color-border-muted); + border-right: 1px solid var(--color-border-muted); +} + +#mode label.nowrap:has(> input:checked) { + color: var(--color-button); + background-color: var(--color-button-bg); +} + +.container textarea { + margin-top: .3rem; + background-color: var(--color-canvas-subtle); + border: 1px solid var(--color-input-border); + padding: .5rem; + border-radius: .5rem; + max-height: 40vh; + box-sizing: border-box; +} + +footer { + margin: 1rem; + display: flex; + flex-direction: row; + flex-wrap: wrap; + justify-content: center; + align-items: center; + text-align: center; +} + +footer > * { + width: 100%; +} + +#refs a { + text-decoration: none; +} + +details[open], .warn { + background-color: var(--color-canvas-muted); + padding: .75rem; + border: 1px solid var(--color-input-border); + border-radius: .5rem; +} + +#about h2 { + margin-top: 0; +} + +details summary { + display: none; +} + +body table { + width: 100%; + border-collapse: collapse; +} + +body thead th { + border-bottom: 1px solid var(--color-fg-default); + padding-bottom: .5rem; + font-weight: bold; +} + +td, th { + vertical-align:top; +} + +body tbody td { + border-bottom: 1px solid var(--color-border-default); + padding: .25rem; +} + +body tbody tr td:first-child{ + font-weight: bold; +} diff --git a/src/playground/index.html b/src/playground/index.html new file mode 100644 index 0000000..fdec0ea --- /dev/null +++ b/src/playground/index.html @@ -0,0 +1,7 @@ + + + + + + +