Compare commits

...

30 Commits

Author SHA1 Message Date
59395a33ac version: 1.0.24 2025-06-11 19:38:56 -04:00
92c0a6fc0b even more jspg improved error handling, missing some codes before 2025-06-11 19:38:46 -04:00
7f66a4a35a no-op 2025-06-10 16:01:58 -04:00
d37aadb0dd version: 1.0.23 2025-06-09 18:09:33 -04:00
d0ccc47d97 added strict validation option 2025-06-09 18:09:15 -04:00
2d19bf100e version: 1.0.22 2025-06-06 14:25:18 -04:00
fb333c6cbb slight improvements to error messaging 2025-06-06 14:25:13 -04:00
d8a9a7b76b version: 1.0.21 2025-06-06 14:05:24 -04:00
c9022aefb9 fixed env 2025-06-06 14:05:19 -04:00
ccf0465e45 fixed gitignore 2025-06-06 14:02:43 -04:00
dce50d9dc3 error handling improvements to jspg to match drop structure 2025-06-06 13:58:50 -04:00
8ec6a5b58a flow updates 2025-05-29 17:51:16 -04:00
6ef7e0c55e flow update 2025-04-25 13:34:06 -04:00
1cb5fb0ecf removed random .env 2025-04-25 12:22:07 -04:00
d66aae8ae2 flow update 2025-04-24 20:02:18 -04:00
3b18901bda version: 1.0.20 2025-04-21 17:11:30 -04:00
b8c0e08068 more filtering 2025-04-21 17:11:24 -04:00
c734983a59 version: 1.0.19 2025-04-21 16:15:08 -04:00
9b11f661bc fixed release bug 2025-04-21 16:15:02 -04:00
f3a733626e version: 1.0.18 2025-04-21 16:13:16 -04:00
2bcdb8adbb version: 1.0.17 2025-04-21 16:11:31 -04:00
3988308965 branch error filtering 2025-04-21 16:11:12 -04:00
b7f528d1f6 flow 2025-04-16 21:14:07 -04:00
2febb292dc flow update 2025-04-16 20:00:35 -04:00
d1831a28ec flow update 2025-04-16 19:34:09 -04:00
c5834ac544 flow updated 2025-04-16 18:07:41 -04:00
eb25f8489e version: 1.0.16 2025-04-16 14:43:07 -04:00
21937db8de improved compile schema error messages 2025-04-16 14:42:57 -04:00
28b689cac0 version: 1.0.15 2025-04-16 01:00:57 -04:00
cc04a1a8bb made errors consistent 2025-04-16 01:00:51 -04:00
10 changed files with 705 additions and 814 deletions

13
.env
View File

@ -1,13 +0,0 @@
ENVIRONMENT=local
DATABASE_PASSWORD=QgSvstSjoc6fKphMzNgT3SliNY10eSRS
DATABASE_ROLE=agreego_admin
DATABASE_HOST=127.1.27.9
DATABASE_PORT=5432
POSTGRES_PASSWORD=xzIq5JT0xY3F+2m1GtnrKDdK29sNSXVVYZHPKJVh8pI=
DATABASE_NAME=agreego
DEV_DATABASE_NAME=agreego_dev
GITEA_TOKEN=3d70c23673517330623a5122998fb304e3c73f0a
MOOV_ACCOUNT_ID=69a0d2f6-77a2-4e26-934f-d869134f87d3
MOOV_PUBLIC_KEY=9OMhK5qGnh7Tmk2Z
MOOV_SECRET_KEY=DrRox7B-YWfO9IheiUUX7lGP8-7VY-Ni
MOOV_DOMAIN=http://localhost

3
.gitignore vendored
View File

@ -1,2 +1,3 @@
/target
/package
/package
.env

542
Cargo.lock generated
View File

@ -26,7 +26,6 @@ dependencies = [
"cfg-if",
"getrandom 0.2.15",
"once_cell",
"serde",
"version_check",
"zerocopy 0.7.35",
]
@ -113,7 +112,7 @@ dependencies = [
"miniz_oxide",
"object",
"rustc-demangle",
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -215,12 +214,6 @@ version = "3.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
[[package]]
name = "bytecount"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce"
[[package]]
name = "byteorder"
version = "1.5.0"
@ -425,15 +418,6 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "email_address"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449"
dependencies = [
"serde",
]
[[package]]
name = "enum-map"
version = "2.7.3"
@ -486,17 +470,6 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
[[package]]
name = "fancy-regex"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
dependencies = [
"bit-set",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "fastrand"
version = "2.3.0"
@ -517,7 +490,6 @@ checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5"
dependencies = [
"borrow-or-share",
"ref-cast",
"serde",
]
[[package]]
@ -541,16 +513,6 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "fraction"
version = "0.15.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f158e3ff0a1b334408dc9fb811cd99b446986f4d8b741bb08f9df1604085ae7"
dependencies = [
"lazy_static",
"num",
]
[[package]]
name = "funty"
version = "2.0.0"
@ -573,12 +535,6 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]]
name = "futures-io"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
[[package]]
name = "futures-macro"
version = "0.3.31"
@ -609,11 +565,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
dependencies = [
"futures-core",
"futures-io",
"futures-macro",
"futures-sink",
"futures-task",
"memchr",
"pin-project-lite",
"pin-utils",
"slab",
@ -730,85 +684,6 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "http"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]]
name = "http-body"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
dependencies = [
"bytes",
"http",
]
[[package]]
name = "http-body-util"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
dependencies = [
"bytes",
"futures-core",
"http",
"http-body",
"pin-project-lite",
]
[[package]]
name = "httparse"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]]
name = "hyper"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80"
dependencies = [
"bytes",
"futures-channel",
"futures-util",
"http",
"http-body",
"httparse",
"itoa",
"pin-project-lite",
"smallvec",
"tokio",
"want",
]
[[package]]
name = "hyper-util"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2"
dependencies = [
"bytes",
"futures-channel",
"futures-util",
"http",
"http-body",
"hyper",
"libc",
"pin-project-lite",
"socket2",
"tokio",
"tower-service",
"tracing",
]
[[package]]
name = "icu_collections"
version = "1.5.0"
@ -964,12 +839,6 @@ dependencies = [
"hashbrown",
]
[[package]]
name = "ipnet"
version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
[[package]]
name = "is-terminal"
version = "0.4.16"
@ -1012,37 +881,11 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "jsonschema"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "161c33c3ec738cfea3288c5c53dfcdb32fd4fc2954de86ea06f71b5a1a40bfcd"
dependencies = [
"ahash",
"base64",
"bytecount",
"email_address",
"fancy-regex",
"fraction",
"idna",
"itoa",
"num-cmp",
"once_cell",
"percent-encoding",
"referencing",
"regex-syntax",
"reqwest",
"serde",
"serde_json",
"uuid-simd",
]
[[package]]
name = "jspg"
version = "0.1.0"
dependencies = [
"boon",
"jsonschema",
"lazy_static",
"pgrx",
"pgrx-tests",
@ -1069,7 +912,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
dependencies = [
"cfg-if",
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -1116,12 +959,6 @@ version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "mime"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
@ -1167,76 +1004,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "num"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23"
dependencies = [
"num-bigint",
"num-complex",
"num-integer",
"num-iter",
"num-rational",
"num-traits",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
dependencies = [
"num-integer",
"num-traits",
]
[[package]]
name = "num-cmp"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63335b2e2c34fae2fb0aa2cecfd9f0832a1e24b3b32ecec612c3426d46dc8aaa"
[[package]]
name = "num-complex"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
dependencies = [
"num-traits",
]
[[package]]
name = "num-integer"
version = "0.1.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
dependencies = [
"num-traits",
]
[[package]]
name = "num-iter"
version = "0.1.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
dependencies = [
"num-bigint",
"num-integer",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.19"
@ -1270,12 +1037,6 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "outref"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e"
[[package]]
name = "owo-colors"
version = "4.2.0"
@ -1306,7 +1067,7 @@ dependencies = [
"libc",
"redox_syscall",
"smallvec",
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -1710,20 +1471,6 @@ dependencies = [
"syn",
]
[[package]]
name = "referencing"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40a64b3a635fad9000648b4d8a59c8710c523ab61a23d392a7d91d47683f5adc"
dependencies = [
"ahash",
"fluent-uri",
"once_cell",
"parking_lot",
"percent-encoding",
"serde_json",
]
[[package]]
name = "regex"
version = "1.11.1"
@ -1753,43 +1500,6 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "reqwest"
version = "0.12.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb"
dependencies = [
"base64",
"bytes",
"futures-channel",
"futures-core",
"futures-util",
"http",
"http-body",
"http-body-util",
"hyper",
"hyper-util",
"ipnet",
"js-sys",
"log",
"mime",
"once_cell",
"percent-encoding",
"pin-project-lite",
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper",
"tokio",
"tower",
"tower-service",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"windows-registry",
]
[[package]]
name = "rustc-demangle"
version = "0.1.24"
@ -1824,12 +1534,6 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "rustversion"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2"
[[package]]
name = "rusty-fork"
version = "0.3.0"
@ -1929,18 +1633,6 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
dependencies = [
"form_urlencoded",
"itoa",
"ryu",
"serde",
]
[[package]]
name = "sha2"
version = "0.10.8"
@ -2048,15 +1740,6 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "sync_wrapper"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
dependencies = [
"futures-core",
]
[[package]]
name = "synstructure"
version = "0.13.1"
@ -2253,58 +1936,6 @@ dependencies = [
"winnow",
]
[[package]]
name = "tower"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
dependencies = [
"futures-core",
"futures-util",
"pin-project-lite",
"sync_wrapper",
"tokio",
"tower-layer",
"tower-service",
]
[[package]]
name = "tower-layer"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
[[package]]
name = "tower-service"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
[[package]]
name = "tracing"
version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
dependencies = [
"pin-project-lite",
"tracing-core",
]
[[package]]
name = "tracing-core"
version = "0.1.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
dependencies = [
"once_cell",
]
[[package]]
name = "try-lock"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "typenum"
version = "1.18.0"
@ -2394,29 +2025,12 @@ dependencies = [
"getrandom 0.3.2",
]
[[package]]
name = "uuid-simd"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b082222b4f6619906941c17eb2297fff4c2fb96cb60164170522942a200bd8"
dependencies = [
"outref",
"uuid",
"vsimd",
]
[[package]]
name = "version_check"
version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "vsimd"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64"
[[package]]
name = "wait-timeout"
version = "0.2.1"
@ -2436,15 +2050,6 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "want"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
dependencies = [
"try-lock",
]
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
@ -2474,7 +2079,6 @@ checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
dependencies = [
"cfg-if",
"once_cell",
"rustversion",
"wasm-bindgen-macro",
]
@ -2492,19 +2096,6 @@ dependencies = [
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61"
dependencies = [
"cfg-if",
"js-sys",
"once_cell",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.100"
@ -2596,7 +2187,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143"
dependencies = [
"windows-core",
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -2607,8 +2198,8 @@ checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d"
dependencies = [
"windows-implement",
"windows-interface",
"windows-result 0.1.2",
"windows-targets 0.52.6",
"windows-result",
"windows-targets",
]
[[package]]
@ -2633,48 +2224,13 @@ dependencies = [
"syn",
]
[[package]]
name = "windows-link"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38"
[[package]]
name = "windows-registry"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3"
dependencies = [
"windows-result 0.3.2",
"windows-strings",
"windows-targets 0.53.0",
]
[[package]]
name = "windows-result"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8"
dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "windows-result"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-strings"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319"
dependencies = [
"windows-link",
"windows-targets",
]
[[package]]
@ -2683,7 +2239,7 @@ version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -2692,7 +2248,7 @@ version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -2701,30 +2257,14 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm 0.52.6",
"windows_aarch64_msvc 0.52.6",
"windows_i686_gnu 0.52.6",
"windows_i686_gnullvm 0.52.6",
"windows_i686_msvc 0.52.6",
"windows_x86_64_gnu 0.52.6",
"windows_x86_64_gnullvm 0.52.6",
"windows_x86_64_msvc 0.52.6",
]
[[package]]
name = "windows-targets"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b"
dependencies = [
"windows_aarch64_gnullvm 0.53.0",
"windows_aarch64_msvc 0.53.0",
"windows_i686_gnu 0.53.0",
"windows_i686_gnullvm 0.53.0",
"windows_i686_msvc 0.53.0",
"windows_x86_64_gnu 0.53.0",
"windows_x86_64_gnullvm 0.53.0",
"windows_x86_64_msvc 0.53.0",
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
@ -2733,96 +2273,48 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_aarch64_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnu"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_i686_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnu"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "windows_x86_64_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
[[package]]
name = "winnow"
version = "0.7.6"

View File

@ -7,7 +7,6 @@ edition = "2021"
pgrx = "0.14.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
jsonschema = "0.29.1"
lazy_static = "1.5.0"
boon = "0.6.1"
@ -34,4 +33,4 @@ lto = "thin"
panic = "unwind"
opt-level = 3
lto = "fat"
codegen-units = 1
codegen-units = 1

119
flow
View File

@ -3,153 +3,130 @@
# Flows
source ./flows/base
source ./flows/git
source ./flows/kube
source ./flows/packaging
source ./flows/rust
# Vars
POSTGRES_VERSION="17"
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
DEPENDENCIES=(cargo git icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
DEPENDENCIES+=(icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
GITEA_ORGANIZATION="cellular"
GITEA_REPOSITORY="jspg"
env() {
# Check if GITEA_TOKEN is set
if [ -z "$GITEA_TOKEN" ]; then
# If not set, try to get it from kubectl
GITEA_TOKEN=$(kubectl get secret -n cellular gitea-git -o jsonpath='{.data.token}' | base64 --decode)
if [ -z "$GITEA_TOKEN" ]; then
echo -e "❌ ${RED}GITEA_TOKEN is not set and couldn't be retrieved from kubectl${RESET}" >&2
exit 1
fi
export GITEA_TOKEN
fi
echo -e "💰 ${GREEN}Environment variables set${RESET}"
}
pgrx-prepare() {
echo -e "${BLUE}Initializing pgrx...${RESET}"
info "Initializing pgrx..."
# Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which'
local POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
if [ ! -x "$POSTGRES_CONFIG_PATH" ]; then
echo -e "${RED}Error: pg_config not found or not executable at $POSTGRES_CONFIG_PATH.${RESET}"
echo -e "${YELLOW}Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew.${RESET}"
exit 1
error "pg_config not found or not executable at $POSTGRES_CONFIG_PATH."
warning "Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew."
return 2
fi
if cargo pgrx init --pg"$POSTGRES_VERSION"="$POSTGRES_CONFIG_PATH"; then
echo -e "${GREEN}pgrx initialized successfully.${RESET}"
success "pgrx initialized successfully."
else
echo -e "${RED}Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid.${RESET}"
exit 1
error "Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid."
return 2
fi
}
build() {
local version
version=$(get-version) || return 1
version=$(get-version) || return $?
local package_dir="./package"
local tarball_name="${GITEA_REPOSITORY}.tar.gz"
local tarball_path="${package_dir}/${tarball_name}"
echo -e "📦 Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
info "Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
# Clean previous package dir
rm -rf "${package_dir}"
mkdir -p "${package_dir}"
# Create the source tarball excluding specified patterns
echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}"
info "Creating tarball: ${tarball_path}"
if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}"
success "Successfully created source tarball: ${tarball_path}"
else
echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2
return 1
error "Failed to create source tarball."
return 2
fi
}
install() {
local version
version=$(get-version) || return 1
version=$(get-version) || return $? # Propagate error
echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}"
info "Building and installing PGRX extension v$version into local PostgreSQL..."
# Run the pgrx install command
# It implicitly uses --release unless --debug is passed
# It finds pg_config or you can add flags like --pg-config if needed
if ! cargo pgrx install; then
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2
return 1
error "cargo pgrx install command failed."
return 2
fi
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}"
success "PGRX extension v$version successfully built and installed."
# Post-install modification to allow non-superuser usage
# Get the installation path dynamically using pg_config
local pg_sharedir
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
if [ -z "$pg_sharedir" ]; then
echo -e "❌ ${RED}Failed to determine PostgreSQL shared directory using pg_config.${RESET}" >&2
return 1
local pg_config_status=$?
if [ $pg_config_status -ne 0 ] || [ -z "$pg_sharedir" ]; then
error "Failed to determine PostgreSQL shared directory using pg_config."
return 2
fi
local installed_control_path="${pg_sharedir}/extension/jspg.control"
# Modify the control file
if [ ! -f "$installed_control_path" ]; then
echo -e "❌ ${RED}Installed control file not found: '$installed_control_path'${RESET}" >&2
return 1
error "Installed control file not found: '$installed_control_path'"
return 2
fi
echo -e "🔧 ${CYAN}Modifying control file for non-superuser access: ${installed_control_path}${RESET}"
info "Modifying control file for non-superuser access: ${installed_control_path}"
# Use sed -i '' for macOS compatibility
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
echo 'trusted = true' >> "$installed_control_path"; then
echo -e "✨ ${GREEN}Control file modified successfully.${RESET}"
success "Control file modified successfully."
else
echo -e "❌ ${RED}Failed to modify control file: ${installed_control_path}${RESET}" >&2
return 1
error "Failed to modify control file: ${installed_control_path}"
return 2
fi
}
test() {
echo -e "🧪 ${CYAN}Running jspg tests...${RESET}"
cargo pgrx test "pg${POSTGRES_VERSION}" "$@"
info "Running jspg tests..."
cargo pgrx test "pg${POSTGRES_VERSION}" "$@" || return $?
}
clean() {
echo -e "🧹 ${CYAN}Cleaning build artifacts...${RESET}"
cargo clean # Use standard cargo clean
info "Cleaning build artifacts..."
cargo clean || return $?
}
jspg-usage() {
echo -e " ${CYAN}JSPG Commands:${RESET}"
echo -e " prepare Check OS, Cargo, and PGRX dependencies."
echo -e " install [opts] Run prepare, then build and install the extension locally."
echo -e " reinstall [opts] Run prepare, clean, then build and install the extension locally."
echo -e " test [opts] Run pgrx integration tests."
echo -e " clean Remove pgrx build artifacts."
echo -e " build Build release artifacts into ./package/ (called by release)."
echo -e " tag Tag the current version (called by release)."
echo -e " package Upload artifacts from ./package/ (called by release)."
echo -e " release Perform a full release (increments patch, builds, tags, pushes, packages)."
printf "prepare\tCheck OS, Cargo, and PGRX dependencies.\n"
printf "install\tBuild and install the extension locally (after prepare).\n"
printf "reinstall\tClean, build, and install the extension locally (after prepare).\n"
printf "test\t\tRun pgrx integration tests.\n"
printf "clean\t\tRemove pgrx build artifacts.\n"
}
jspg-flow() {
case "$1" in
env) env; return 0;;
prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;;
build) build; return 0;;
install) install; return 0;;
reinstall) clean; install; return 0;;
test) test; return 0;;
package) env; package; return 0;;
release) env; release; return 0;;
clean) clean; return 0;;
prepare) prepare && cargo-prepare && pgrx-prepare; return $?;;
build) build; return $?;;
install) install; return $?;;
reinstall) clean && install; return $?;;
test) test "${@:2}"; return $?;;
clean) clean; return $?;;
*) return 1 ;;
esac
}
register-flow "jspg-flow" "jspg-usage"
dispatch "$@"
register-flow "jspg-usage" "jspg-flow"
dispatch "$@"

2
flows

Submodule flows updated: 9d758d581e...e154758056

1
rustfmt.toml Normal file
View File

@ -0,0 +1 @@
tab_spaces = 2

View File

@ -2,16 +2,26 @@ use pgrx::*;
pg_module_magic!();
use serde_json::{json, Value};
use std::{collections::HashMap, sync::RwLock};
use boon::{Compiler, Schemas, ValidationError, SchemaIndex, CompileError};
use boon::{CompileError, Compiler, ErrorKind, SchemaIndex, Schemas, ValidationError};
use lazy_static::lazy_static;
use serde_json::{json, Value};
use std::collections::hash_map::Entry;
use std::{collections::HashMap, sync::RwLock};
struct BoonCache {
schemas: Schemas,
id_to_index: HashMap<String, SchemaIndex>,
}
// Structure to hold error information without lifetimes
#[derive(Debug)]
struct Error {
path: String,
code: String,
message: String,
cause: String,
}
lazy_static! {
static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache {
schemas: Schemas::new(),
@ -20,22 +30,31 @@ lazy_static! {
}
#[pg_extern(strict)]
fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB {
fn cache_json_schema(schema_id: &str, schema: JsonB, strict: bool) -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
let schema_value: Value = schema.0;
let mut schema_value: Value = schema.0;
let schema_path = format!("urn:{}", schema_id);
// Apply strict validation to all objects in the schema if requested
if strict {
apply_strict_validation(&mut schema_value);
}
// Create the boon compiler and enable format assertions
let mut compiler = Compiler::new();
compiler.enable_format_assertions();
// Use schema_path when adding the resource
if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
return JsonB(json!({
"success": false,
"error": {
"message": format!("Failed to add schema resource '{}': {}", schema_id, e),
"schema_path": schema_path
}
"errors": [{
"code": "SCHEMA_RESOURCE_ADD_FAILED",
"message": format!("Failed to add schema resource '{}'", schema_id),
"details": {
"path": schema_path,
"cause": format!("{}", e)
}
}]
}));
}
@ -44,93 +63,327 @@ fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB {
Ok(sch_index) => {
// Store the index using the original schema_id as the key
cache.id_to_index.insert(schema_id.to_string(), sch_index);
JsonB(json!({ "success": true }))
JsonB(json!({ "response": "success" }))
}
Err(e) => {
let error = match &e {
CompileError::ValidationError { url: _url, src } => { // Prefix url with _
json!({
"message": format!("Schema '{}' failed validation against its metaschema: {}", schema_id, src),
"schema_path": schema_path,
"error": format!("{:?}", src),
})
let errors = match &e {
CompileError::ValidationError { url: _url, src } => {
// Collect leaf errors from the meta-schema validation failure
let mut error_list = Vec::new();
collect_errors(src, &mut error_list);
// Filter and format errors properly - no instance for schema compilation
format_errors(error_list, &schema_value)
}
_ => {
let _error_type = format!("{:?}", e).split('(').next().unwrap_or("Unknown").to_string(); // Prefix error_type with _
json!({
"message": format!("Schema '{}' compilation failed: {}", schema_id, e),
"schema_path": schema_path,
"error": format!("{:?}", e),
})
// Other compilation errors
vec![json!({
"code": "SCHEMA_COMPILATION_FAILED",
"message": format!("Schema '{}' compilation failed", schema_id),
"details": {
"path": schema_path,
"cause": format!("{:?}", e)
}
})]
}
};
JsonB(json!({
"success": false,
"error": error
}))
JsonB(json!({ "errors": errors }))
}
}
}
// Helper function to recursively apply strict validation to all objects in a schema
fn apply_strict_validation(schema: &mut Value) {
match schema {
Value::Object(map) => {
// If this is an object type schema, add additionalProperties: false
if let Some(Value::String(t)) = map.get("type") {
if t == "object" && !map.contains_key("additionalProperties") {
map.insert("additionalProperties".to_string(), Value::Bool(false));
}
}
// Recurse into all properties
for (_, value) in map.iter_mut() {
apply_strict_validation(value);
}
}
Value::Array(arr) => {
// Recurse into array items
for item in arr.iter_mut() {
apply_strict_validation(item);
}
}
_ => {}
}
}
#[pg_extern(strict, parallel_safe)]
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
// Lookup uses the original schema_id
match cache.id_to_index.get(schema_id) {
None => JsonB(json!({
"success": false,
"errors": [json!({
"message": format!("Schema with id '{}' not found in cache", schema_id),
"schema_path": "",
"instance_path": ""
})]
"errors": [{
"code": "SCHEMA_NOT_FOUND",
"message": format!("Schema '{}' not found in cache", schema_id),
"details": {
"cause": "Schema must be cached before validation"
}
}]
})),
Some(sch_index) => {
let instance_value: Value = instance.0;
match cache.schemas.validate(&instance_value, *sch_index) {
Ok(_) => JsonB(json!({ "success": true })),
Ok(_) => JsonB(json!({ "response": "success" })),
Err(validation_error) => {
// Directly use the result of format_validation_error
// which now includes the top-level success indicator and flat error list
JsonB(format_validation_error(&validation_error))
let mut error_list = Vec::new();
collect_errors(&validation_error, &mut error_list);
let errors = format_errors(error_list, &instance_value);
JsonB(json!({ "errors": errors }))
}
}
}
}
}
// Recursively collects leaf errors into a flat list
fn collect_leaf_errors(error: &ValidationError, errors_list: &mut Vec<Value>) {
if error.causes.is_empty() {
let default_message = format!("{}", error);
let message = if let Some(start_index) = default_message.find("': ") {
default_message[start_index + 3..].to_string()
} else {
default_message
};
// Recursively collects validation errors
fn collect_errors(error: &ValidationError, errors_list: &mut Vec<Error>) {
// Check if this is a structural error that we should skip
let is_structural = matches!(
&error.kind,
ErrorKind::Group | ErrorKind::AllOf | ErrorKind::AnyOf | ErrorKind::Not | ErrorKind::OneOf(_)
);
errors_list.push(json!({
"message": message,
"schema_path": error.schema_url.to_string(),
"instance_path": error.instance_location.to_string(),
}));
if error.causes.is_empty() && !is_structural {
// This is a leaf error that's not structural
let original_message = format!("{}", error.kind);
let (error_code, human_message) = convert_error_kind(&error.kind);
errors_list.push(Error {
path: error.instance_location.to_string(),
code: error_code,
message: human_message,
cause: original_message,
});
} else {
// Recurse into causes
for cause in &error.causes {
collect_leaf_errors(cause, errors_list);
collect_errors(cause, errors_list);
}
}
}
// Formats validation errors into a flat list JSON structure
fn format_validation_error(error: &ValidationError) -> Value {
let mut all_errors = Vec::new();
collect_leaf_errors(error, &mut all_errors);
// Convert ErrorKind to error code and human message
fn convert_error_kind(kind: &ErrorKind) -> (String, String) {
match kind {
ErrorKind::Type { .. } => (
"TYPE_MISMATCH".to_string(),
"Field type does not match the expected type".to_string(),
),
ErrorKind::Required { .. } => (
"REQUIRED_FIELD_MISSING".to_string(),
"Required field is missing".to_string(),
),
ErrorKind::DependentRequired { .. } => (
"DEPENDENT_REQUIRED_MISSING".to_string(),
"Dependent required fields are missing".to_string(),
),
ErrorKind::Dependency { .. } => (
"DEPENDENCY_FAILED".to_string(),
"Dependency requirement not met".to_string(),
),
ErrorKind::Enum { .. } => (
"ENUM_VIOLATED".to_string(),
"Value is not one of the allowed options".to_string(),
),
ErrorKind::Const { .. } => (
"CONST_VIOLATED".to_string(),
"Value does not match the required constant".to_string(),
),
ErrorKind::MinLength { .. } => (
"MIN_LENGTH_VIOLATED".to_string(),
"Field length is below the minimum required".to_string(),
),
ErrorKind::MaxLength { .. } => (
"MAX_LENGTH_VIOLATED".to_string(),
"Field length exceeds the maximum allowed".to_string(),
),
ErrorKind::Pattern { .. } => (
"PATTERN_VIOLATED".to_string(),
"Value does not match the required pattern".to_string(),
),
ErrorKind::Minimum { .. } => (
"MINIMUM_VIOLATED".to_string(),
"Value is below the minimum allowed".to_string(),
),
ErrorKind::Maximum { .. } => (
"MAXIMUM_VIOLATED".to_string(),
"Value exceeds the maximum allowed".to_string(),
),
ErrorKind::ExclusiveMinimum { .. } => (
"EXCLUSIVE_MINIMUM_VIOLATED".to_string(),
"Value must be greater than the minimum".to_string(),
),
ErrorKind::ExclusiveMaximum { .. } => (
"EXCLUSIVE_MAXIMUM_VIOLATED".to_string(),
"Value must be less than the maximum".to_string(),
),
ErrorKind::MultipleOf { .. } => (
"MULTIPLE_OF_VIOLATED".to_string(),
"Value is not a multiple of the required factor".to_string(),
),
ErrorKind::MinItems { .. } => (
"MIN_ITEMS_VIOLATED".to_string(),
"Array has fewer items than required".to_string(),
),
ErrorKind::MaxItems { .. } => (
"MAX_ITEMS_VIOLATED".to_string(),
"Array has more items than allowed".to_string(),
),
ErrorKind::UniqueItems { .. } => (
"UNIQUE_ITEMS_VIOLATED".to_string(),
"Array contains duplicate items".to_string(),
),
ErrorKind::MinProperties { .. } => (
"MIN_PROPERTIES_VIOLATED".to_string(),
"Object has fewer properties than required".to_string(),
),
ErrorKind::MaxProperties { .. } => (
"MAX_PROPERTIES_VIOLATED".to_string(),
"Object has more properties than allowed".to_string(),
),
ErrorKind::AdditionalProperties { .. } => (
"ADDITIONAL_PROPERTIES_NOT_ALLOWED".to_string(),
"Object contains properties that are not allowed".to_string(),
),
ErrorKind::AdditionalItems { .. } => (
"ADDITIONAL_ITEMS_NOT_ALLOWED".to_string(),
"Array contains additional items that are not allowed".to_string(),
),
ErrorKind::Format { want, .. } => (
"FORMAT_INVALID".to_string(),
format!("Invalid {} format", want),
),
ErrorKind::PropertyName { .. } => (
"INVALID_PROPERTY_NAME".to_string(),
"Property name is invalid".to_string(),
),
ErrorKind::Contains => (
"CONTAINS_FAILED".to_string(),
"No items match the required schema".to_string(),
),
ErrorKind::MinContains { .. } => (
"MIN_CONTAINS_VIOLATED".to_string(),
"Too few items match the required schema".to_string(),
),
ErrorKind::MaxContains { .. } => (
"MAX_CONTAINS_VIOLATED".to_string(),
"Too many items match the required schema".to_string(),
),
ErrorKind::ContentEncoding { .. } => (
"CONTENT_ENCODING_INVALID".to_string(),
"Content encoding is invalid".to_string(),
),
ErrorKind::ContentMediaType { .. } => (
"CONTENT_MEDIA_TYPE_INVALID".to_string(),
"Content media type is invalid".to_string(),
),
ErrorKind::FalseSchema => (
"FALSE_SCHEMA".to_string(),
"Schema validation always fails".to_string(),
),
ErrorKind::Not => (
"NOT_VIOLATED".to_string(),
"Value matched a schema it should not match".to_string(),
),
ErrorKind::RefCycle { .. } => (
"REFERENCE_CYCLE".to_string(),
"Schema contains a reference cycle".to_string(),
),
ErrorKind::Reference { .. } => (
"REFERENCE_FAILED".to_string(),
"Reference validation failed".to_string(),
),
ErrorKind::Schema { .. } => (
"SCHEMA_FAILED".to_string(),
"Schema validation failed".to_string(),
),
ErrorKind::ContentSchema => (
"CONTENT_SCHEMA_FAILED".to_string(),
"Content schema validation failed".to_string(),
),
// These shouldn't appear as leaf errors due to is_structural check
ErrorKind::Group => (
"VALIDATION_FAILED".to_string(),
"Validation failed".to_string(),
),
ErrorKind::AllOf => (
"ALL_OF_VIOLATED".to_string(),
"Value does not match all required schemas".to_string(),
),
ErrorKind::AnyOf => (
"ANY_OF_VIOLATED".to_string(),
"Value does not match any of the allowed schemas".to_string(),
),
ErrorKind::OneOf(_) => (
"ONE_OF_VIOLATED".to_string(),
"Value must match exactly one schema".to_string(),
),
}
}
json!({
"success": false,
"errors": all_errors // Flat list of specific errors
})
// Formats errors according to DropError structure
fn format_errors(errors: Vec<Error>, instance: &Value) -> Vec<Value> {
// Deduplicate by instance_path and format as DropError
let mut unique_errors: HashMap<String, Value> = HashMap::new();
for error in errors {
if let Entry::Vacant(entry) = unique_errors.entry(error.path.clone()) {
// Extract the failing value from the instance
let failing_value = extract_value_at_path(instance, &error.path);
entry.insert(json!({
"code": error.code,
"message": error.message,
"details": {
"path": error.path,
"context": failing_value,
"cause": error.cause
}
}));
}
}
unique_errors.into_values().collect()
}
// Helper function to extract value at a JSON pointer path
fn extract_value_at_path(instance: &Value, path: &str) -> Value {
let parts: Vec<&str> = path.split('/').filter(|s| !s.is_empty()).collect();
let mut current = instance;
for part in parts {
match current {
Value::Object(map) => {
if let Some(value) = map.get(part) {
current = value;
} else {
return Value::Null;
}
}
Value::Array(arr) => {
if let Ok(index) = part.parse::<usize>() {
if let Some(value) = arr.get(index) {
current = value;
} else {
return Value::Null;
}
} else {
return Value::Null;
}
}
_ => return Value::Null,
}
}
current.clone()
}
#[pg_extern(strict, parallel_safe)]
@ -140,19 +393,20 @@ fn json_schema_cached(schema_id: &str) -> bool {
}
#[pg_extern(strict)]
fn clear_json_schemas() {
fn clear_json_schemas() -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
*cache = BoonCache {
schemas: Schemas::new(),
id_to_index: HashMap::new(),
};
JsonB(json!({ "response": "success" }))
}
#[pg_extern(strict, parallel_safe)]
fn show_json_schemas() -> Vec<String> {
fn show_json_schemas() -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
let ids: Vec<String> = cache.id_to_index.keys().cloned().collect();
ids
JsonB(json!({ "response": ids }))
}
/// This module is required by `cargo pgrx test` invocations.
@ -170,9 +424,8 @@ pub mod pg_test {
}
}
#[cfg(any(test, feature = "pg_test"))]
#[pg_schema]
mod tests {
include!("tests.rs");
}
}

View File

@ -2,69 +2,67 @@ use crate::*;
use serde_json::{json, Value};
use pgrx::{JsonB, pg_test};
// Helper macro for asserting success (no changes needed, but ensure it's present)
// Helper macro for asserting success with Drop-style response
macro_rules! assert_success_with_json {
($result_jsonb:expr, $fmt:literal $(, $($args:tt)*)?) => {
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
if condition_result != Some(true) {
let has_response = $result_jsonb.0.get("response").is_some();
let has_errors = $result_jsonb.0.get("errors").is_some();
if !has_response || has_errors {
let base_msg = format!($fmt $(, $($args)*)?);
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success): {}\nResult JSON:\n{}", base_msg, pretty_json);
let panic_msg = format!("Assertion Failed (expected success with 'response' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
panic!("{}", panic_msg);
}
};
// Simpler version without message
($result_jsonb:expr) => {
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
if condition_result != Some(true) {
let has_response = $result_jsonb.0.get("response").is_some();
let has_errors = $result_jsonb.0.get("errors").is_some();
if !has_response || has_errors {
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success)\nResult JSON:\n{}", pretty_json);
let panic_msg = format!("Assertion Failed (expected success with 'response' field)\nResult JSON:\n{}", pretty_json);
panic!("{}", panic_msg);
}
};
}
// Updated helper macro for asserting failed JSON results with the new flat error structure
// Helper macro for asserting failed JSON results with Drop-style errors
macro_rules! assert_failure_with_json {
// --- Arms with error count and message substring check ---
// With custom message:
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let success = json_result.get("success").and_then(Value::as_bool);
let errors_opt = json_result.get("errors").and_then(Value::as_array);
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if success != Some(false) {
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
match errors_opt {
Some(errors) => {
if errors.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors.len(), base_msg, pretty_json);
}
if $expected_error_count > 0 {
let first_error_message = errors[0].get("message").and_then(Value::as_str);
match first_error_message {
Some(msg) => {
if !msg.contains($expected_first_message_contains) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error message mismatch): Expected contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
if $expected_error_count > 0 {
let first_error_message = errors_array[0].get("message").and_then(Value::as_str);
match first_error_message {
Some(msg) => {
if !msg.contains($expected_first_message_contains) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error message mismatch): Expected contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
}
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected 'errors' array, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error in array has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
}
};
@ -77,25 +75,20 @@ macro_rules! assert_failure_with_json {
// With custom message:
($result:expr, $expected_error_count:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let success = json_result.get("success").and_then(Value::as_bool);
let errors_opt = json_result.get("errors").and_then(Value::as_array);
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if success != Some(false) {
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
match errors_opt {
Some(errors) => {
if errors.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors.len(), base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected 'errors' array, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
};
// Without custom message (calls the one above with ""):
@ -107,25 +100,20 @@ macro_rules! assert_failure_with_json {
// With custom message:
($result:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let success = json_result.get("success").and_then(Value::as_bool);
let errors_opt = json_result.get("errors").and_then(Value::as_array);
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if success != Some(false) {
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
match errors_opt {
Some(errors) => {
if errors.is_empty() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected errors, but errors array is empty): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected 'errors' array, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.is_empty() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected errors, but 'errors' array is empty): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
};
// Without custom message (calls the one above with ""):
@ -155,47 +143,46 @@ fn test_cache_and_validate_json_schema() {
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
let invalid_instance_missing = json!({ "name": "Charlie" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()));
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()), false);
assert_success_with_json!(cache_result, "Cache operation should succeed.");
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
assert_success_with_json!(valid_result, "Validation of valid instance should succeed.");
// Invalid type
// Invalid type - age is negative
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
assert_failure_with_json!(invalid_result_type, 1, "must be >=0", "Validation with invalid type should fail.");
assert_failure_with_json!(invalid_result_type, 1, "Value is below the minimum allowed", "Validation with invalid type should fail.");
let errors_type = invalid_result_type.0["errors"].as_array().unwrap();
assert_eq!(errors_type[0]["instance_path"], "/age");
assert_eq!(errors_type[0]["schema_path"], "urn:my_schema#/properties/age");
assert_eq!(errors_type[0]["details"]["path"], "/age");
assert_eq!(errors_type[0]["details"]["context"], -5);
assert_eq!(errors_type[0]["code"], "MINIMUM_VIOLATED");
// Missing field
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
assert_failure_with_json!(invalid_result_missing, 1, "missing properties 'age'", "Validation with missing field should fail.");
assert_failure_with_json!(invalid_result_missing, 1, "Required field is missing", "Validation with missing field should fail.");
let errors_missing = invalid_result_missing.0["errors"].as_array().unwrap();
assert_eq!(errors_missing[0]["instance_path"], "");
assert_eq!(errors_missing[0]["schema_path"], "urn:my_schema#");
assert_eq!(errors_missing[0]["details"]["path"], "");
assert_eq!(errors_missing[0]["code"], "REQUIRED_FIELD_MISSING");
// Schema not found
let non_existent_id = "non_existent_schema";
let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({})));
assert_failure_with_json!(invalid_schema_result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
assert_failure_with_json!(invalid_schema_result, 1, "Schema 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
let errors_notfound = invalid_schema_result.0["errors"].as_array().unwrap();
assert_eq!(errors_notfound[0]["schema_path"], ""); // Schema path is empty for this error type
assert_eq!(errors_notfound[0]["instance_path"], ""); // Instance path is empty
assert_eq!(errors_notfound[0]["code"], "SCHEMA_NOT_FOUND");
}
#[pg_test]
fn test_validate_json_schema_not_cached() {
clear_json_schemas(); // Call clear directly
clear_json_schemas();
let instance = json!({ "foo": "bar" });
let result = validate_json_schema("non_existent_schema", jsonb(instance));
// Use the updated macro
assert_failure_with_json!(result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
assert_failure_with_json!(result, 1, "Schema 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
}
#[pg_test]
fn test_cache_invalid_json_schema() {
clear_json_schemas(); // Call clear directly
clear_json_schemas();
let schema_id = "invalid_schema";
// Schema with an invalid type *value*
let invalid_schema = json!({
@ -203,28 +190,29 @@ fn test_cache_invalid_json_schema() {
"type": ["invalid_type_value"]
});
let cache_result = cache_json_schema(schema_id, jsonb(invalid_schema));
let cache_result = cache_json_schema(schema_id, jsonb(invalid_schema), false);
// Manually check the structure for cache_json_schema failure
let json_result = &cache_result.0;
let success = json_result.get("success").and_then(Value::as_bool);
let error_obj = json_result.get("error").and_then(Value::as_object);
// Expect 2 leaf errors because the meta-schema validation fails at the type value
// and within the type array itself.
assert_failure_with_json!(
cache_result,
2, // Expect exactly two leaf errors
"Value is not one of the allowed options", // Updated to human-readable message
"Caching invalid schema should fail with specific meta-schema validation errors."
);
if success != Some(false) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure, success was not false): Caching invalid schema should fail.\nResult JSON:\n{}", pretty_json);
}
if error_obj.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected 'error' object, but none found): Caching invalid schema should return an error object.\nResult JSON:\n{}", pretty_json);
}
// Check specific fields within the error object
let message = error_obj.unwrap().get("message").and_then(Value::as_str);
// Updated check based on the actual error message seen in the logs
if message.map_or(true, |m| !m.contains("failed validation against its metaschema") || !m.contains("/type/0': value must be one of")) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (error message mismatch): Expected metaschema validation failure message containing '/type/0' error detail.\nResult JSON:\n{}", pretty_json);
}
// Ensure the errors array exists and check specifics
let errors_array = cache_result.0["errors"].as_array().expect("Errors field should be an array");
assert_eq!(errors_array.len(), 2);
// Both errors should have ENUM_VIOLATED code
assert_eq!(errors_array[0]["code"], "ENUM_VIOLATED");
assert_eq!(errors_array[1]["code"], "ENUM_VIOLATED");
// Check instance paths are preserved in path field
let paths: Vec<&str> = errors_array.iter()
.map(|e| e["details"]["path"].as_str().unwrap())
.collect();
assert!(paths.contains(&"/type"));
assert!(paths.contains(&"/type/0"));
}
#[pg_test]
@ -245,7 +233,7 @@ fn test_validate_json_schema_detailed_validation_errors() {
},
"required": ["address"]
});
let _ = cache_json_schema(schema_id, jsonb(schema));
let _ = cache_json_schema(schema_id, jsonb(schema), false);
let invalid_instance = json!({
"address": {
@ -284,77 +272,270 @@ fn test_validate_json_schema_oneof_validation_errors() {
]
});
let _ = cache_json_schema(schema_id, jsonb(schema));
let _ = cache_json_schema(schema_id, jsonb(schema), false);
// --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) ---
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
// Expect 2 leaf errors: one for maxLength (branch 0), one for missing prop (branch 1)
// Check the first error message reported by boon (maxLength).
assert_failure_with_json!(result_invalid_string, 2, "length must be <=5", "Validation with invalid string length should have 2 leaf errors");
let _errors_string = result_invalid_string.0["errors"].as_array().unwrap(); // Prefix with _
// Expect 2 leaf errors. Check count only with the macro.
assert_failure_with_json!(result_invalid_string, 2);
// Explicitly check that both expected errors are present, ignoring order
let errors_string = result_invalid_string.0["errors"].as_array().expect("Expected error array for invalid string");
assert!(errors_string.iter().any(|e|
e["details"]["path"] == "/string_prop" &&
e["code"] == "MAX_LENGTH_VIOLATED"
), "Missing maxLength error");
assert!(errors_string.iter().any(|e|
e["details"]["path"] == "" &&
e["code"] == "REQUIRED_FIELD_MISSING"
), "Missing number_prop required error");
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
let invalid_number_instance = json!({ "number_prop": 5 });
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
// Expect 2 leaf errors: one for minimum (branch 1), one for missing prop (branch 0)
// Check the first error message reported by boon (missing prop).
assert_failure_with_json!(result_invalid_number, 2, "missing properties 'string_prop'", "Validation with invalid number should have 2 leaf errors");
let _errors_number = result_invalid_number.0["errors"].as_array().unwrap(); // Prefix with _
// Expect 2 leaf errors. Check count only with the macro.
assert_failure_with_json!(result_invalid_number, 2);
// Explicitly check that both expected errors are present, ignoring order
let errors_number = result_invalid_number.0["errors"].as_array().expect("Expected error array for invalid number");
assert!(errors_number.iter().any(|e|
e["details"]["path"] == "/number_prop" &&
e["code"] == "MINIMUM_VIOLATED"
), "Missing minimum error");
assert!(errors_number.iter().any(|e|
e["details"]["path"] == "" &&
e["code"] == "REQUIRED_FIELD_MISSING"
), "Missing string_prop required error");
// --- Test case 3: Fails type check (not object) for both branches ---
// Input: boolean, expected object for both branches
let invalid_bool_instance = json!(true); // Not an object
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
// Expect 2 leaf errors, one "Type" error for each branch
// Check the first error reported by boon (want object).
assert_failure_with_json!(result_invalid_bool, 2, "want object", "Validation with invalid bool should have 2 leaf errors");
let _errors_bool = result_invalid_bool.0["errors"].as_array().unwrap(); // Prefix with _
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
assert_failure_with_json!(result_invalid_bool, 1);
// Explicitly check that the single remaining error is the type error for the root instance path
let errors_bool = result_invalid_bool.0["errors"].as_array().expect("Expected error array for invalid bool");
assert_eq!(errors_bool.len(), 1, "Expected exactly one error after deduplication");
assert_eq!(errors_bool[0]["code"], "TYPE_MISMATCH");
assert_eq!(errors_bool[0]["details"]["path"], "");
// --- Test case 4: Fails missing required for both branches ---
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
let invalid_empty_obj = json!({});
let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj));
// Expect 2 leaf errors: one required error for branch 0, one required error for branch 1
// Check the first error reported by boon (missing string_prop).
assert_failure_with_json!(result_empty_obj, 2, "missing properties 'string_prop'", "Validation with empty object should have 2 leaf errors");
let _errors_empty = result_empty_obj.0["errors"].as_array().unwrap(); // Prefix with _
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
assert_failure_with_json!(result_empty_obj, 1);
// Explicitly check that the single remaining error is one of the expected missing properties errors
let errors_empty = result_empty_obj.0["errors"].as_array().expect("Expected error array for empty object");
assert_eq!(errors_empty.len(), 1, "Expected exactly one error after filtering empty object");
assert_eq!(errors_empty[0]["code"], "REQUIRED_FIELD_MISSING");
assert_eq!(errors_empty[0]["details"]["path"], "");
// The human message should be generic
assert_eq!(errors_empty[0]["message"], "Required field is missing");
}
#[pg_test]
fn test_clear_json_schemas() {
clear_json_schemas(); // Call clear directly
let clear_result = clear_json_schemas();
assert_success_with_json!(clear_result);
let schema_id = "schema_to_clear";
let schema = json!({ "type": "string" });
cache_json_schema(schema_id, jsonb(schema.clone()));
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()), false);
assert_success_with_json!(cache_result);
let show_result1 = show_json_schemas();
assert!(show_result1.contains(&schema_id.to_string()));
let schemas1 = show_result1.0["response"].as_array().unwrap();
assert!(schemas1.contains(&json!(schema_id)));
clear_json_schemas();
let clear_result2 = clear_json_schemas();
assert_success_with_json!(clear_result2);
let show_result2 = show_json_schemas();
assert!(show_result2.is_empty());
let schemas2 = show_result2.0["response"].as_array().unwrap();
assert!(schemas2.is_empty());
let instance = json!("test");
let validate_result = validate_json_schema(schema_id, jsonb(instance));
// Use the updated macro
assert_failure_with_json!(validate_result, 1, "Schema with id 'schema_to_clear' not found", "Validation should fail after clearing schemas.");
assert_failure_with_json!(validate_result, 1, "Schema 'schema_to_clear' not found", "Validation should fail after clearing schemas.");
}
#[pg_test]
fn test_show_json_schemas() {
clear_json_schemas(); // Call clear directly
let _ = clear_json_schemas();
let schema_id1 = "schema1";
let schema_id2 = "schema2";
let schema = json!({ "type": "boolean" });
cache_json_schema(schema_id1, jsonb(schema.clone()));
cache_json_schema(schema_id2, jsonb(schema.clone()));
let _ = cache_json_schema(schema_id1, jsonb(schema.clone()), false);
let _ = cache_json_schema(schema_id2, jsonb(schema.clone()), false);
let mut result = show_json_schemas(); // Make result mutable
result.sort(); // Sort for deterministic testing
assert_eq!(result, vec!["schema1".to_string(), "schema2".to_string()]); // Check exact content
assert!(result.contains(&schema_id1.to_string())); // Keep specific checks too if desired
assert!(result.contains(&schema_id2.to_string()));
let result = show_json_schemas();
let schemas = result.0["response"].as_array().unwrap();
assert_eq!(schemas.len(), 2);
assert!(schemas.contains(&json!(schema_id1)));
assert!(schemas.contains(&json!(schema_id2)));
}
#[pg_test]
fn test_auto_strict_validation() {
clear_json_schemas();
let schema_id = "strict_test";
let schema_id_non_strict = "non_strict_test";
// Schema without explicit additionalProperties: false
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"profile": {
"type": "object",
"properties": {
"age": { "type": "number" },
"preferences": {
"type": "object",
"properties": {
"theme": { "type": "string" }
}
}
}
},
"tags": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string" },
"value": { "type": "string" }
}
}
}
}
});
// Cache the same schema twice - once with strict=true, once with strict=false
let cache_result_strict = cache_json_schema(schema_id, jsonb(schema.clone()), true);
assert_success_with_json!(cache_result_strict, "Schema caching with strict=true should succeed");
let cache_result_non_strict = cache_json_schema(schema_id_non_strict, jsonb(schema.clone()), false);
assert_success_with_json!(cache_result_non_strict, "Schema caching with strict=false should succeed");
// Test 1: Valid instance with no extra properties (should pass for both)
let valid_instance = json!({
"name": "John",
"profile": {
"age": 30,
"preferences": {
"theme": "dark"
}
},
"tags": [
{"id": "1", "value": "rust"},
{"id": "2", "value": "postgres"}
]
});
let valid_result_strict = validate_json_schema(schema_id, jsonb(valid_instance.clone()));
assert_success_with_json!(valid_result_strict, "Valid instance should pass with strict schema");
let valid_result_non_strict = validate_json_schema(schema_id_non_strict, jsonb(valid_instance));
assert_success_with_json!(valid_result_non_strict, "Valid instance should pass with non-strict schema");
// Test 2: Root level extra property
let invalid_root_extra = json!({
"name": "John",
"extraField": "should fail" // Extra property at root
});
// Should fail with strict schema
let result_root_strict = validate_json_schema(schema_id, jsonb(invalid_root_extra.clone()));
assert_failure_with_json!(result_root_strict, 1, "Object contains properties that are not allowed");
let errors_root = result_root_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_root[0]["code"], "ADDITIONAL_PROPERTIES_NOT_ALLOWED");
assert_eq!(errors_root[0]["details"]["path"], "");
// Should pass with non-strict schema
let result_root_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_root_extra));
assert_success_with_json!(result_root_non_strict, "Extra property should be allowed with non-strict schema");
// Test 3: Nested object extra property
let invalid_nested_extra = json!({
"name": "John",
"profile": {
"age": 30,
"extraNested": "should fail" // Extra property in nested object
}
});
// Should fail with strict schema
let result_nested_strict = validate_json_schema(schema_id, jsonb(invalid_nested_extra.clone()));
assert_failure_with_json!(result_nested_strict, 1, "Object contains properties that are not allowed");
let errors_nested = result_nested_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_nested[0]["code"], "ADDITIONAL_PROPERTIES_NOT_ALLOWED");
assert_eq!(errors_nested[0]["details"]["path"], "/profile");
// Should pass with non-strict schema
let result_nested_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_nested_extra));
assert_success_with_json!(result_nested_non_strict, "Extra nested property should be allowed with non-strict schema");
// Test 4: Deeply nested object extra property
let invalid_deep_extra = json!({
"name": "John",
"profile": {
"age": 30,
"preferences": {
"theme": "dark",
"extraDeep": "should fail" // Extra property in deeply nested object
}
}
});
// Should fail with strict schema
let result_deep_strict = validate_json_schema(schema_id, jsonb(invalid_deep_extra.clone()));
assert_failure_with_json!(result_deep_strict, 1, "Object contains properties that are not allowed");
let errors_deep = result_deep_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_deep[0]["code"], "ADDITIONAL_PROPERTIES_NOT_ALLOWED");
assert_eq!(errors_deep[0]["details"]["path"], "/profile/preferences");
// Should pass with non-strict schema
let result_deep_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_deep_extra));
assert_success_with_json!(result_deep_non_strict, "Extra deep property should be allowed with non-strict schema");
// Test 5: Array item extra property
let invalid_array_item_extra = json!({
"name": "John",
"tags": [
{"id": "1", "value": "rust", "extraInArray": "should fail"} // Extra property in array item
]
});
// Should fail with strict schema
let result_array_strict = validate_json_schema(schema_id, jsonb(invalid_array_item_extra.clone()));
assert_failure_with_json!(result_array_strict, 1, "Object contains properties that are not allowed");
let errors_array = result_array_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_array[0]["code"], "ADDITIONAL_PROPERTIES_NOT_ALLOWED");
assert_eq!(errors_array[0]["details"]["path"], "/tags/0");
// Should pass with non-strict schema
let result_array_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_array_item_extra));
assert_success_with_json!(result_array_non_strict, "Extra array item property should be allowed with non-strict schema");
// Test 6: Schema with explicit additionalProperties: true should allow extras even with strict=true
let schema_id_permissive = "permissive_test";
let permissive_schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" }
},
"additionalProperties": true // Explicitly allow additional properties
});
let _ = cache_json_schema(schema_id_permissive, jsonb(permissive_schema), true); // Note: strict=true
let instance_with_extra = json!({
"name": "John",
"extraAllowed": "should pass"
});
let result_permissive = validate_json_schema(schema_id_permissive, jsonb(instance_with_extra));
assert_success_with_json!(result_permissive, "Instance with extra property should pass when additionalProperties is explicitly true, even with strict=true");
}

View File

@ -1 +1 @@
1.0.14
1.0.24