Compare commits

..

28 Commits

Author SHA1 Message Date
d8a9a7b76b version: 1.0.21 2025-06-06 14:05:24 -04:00
c9022aefb9 fixed env 2025-06-06 14:05:19 -04:00
ccf0465e45 fixed gitignore 2025-06-06 14:02:43 -04:00
dce50d9dc3 error handling improvements to jspg to match drop structure 2025-06-06 13:58:50 -04:00
8ec6a5b58a flow updates 2025-05-29 17:51:16 -04:00
6ef7e0c55e flow update 2025-04-25 13:34:06 -04:00
1cb5fb0ecf removed random .env 2025-04-25 12:22:07 -04:00
d66aae8ae2 flow update 2025-04-24 20:02:18 -04:00
3b18901bda version: 1.0.20 2025-04-21 17:11:30 -04:00
b8c0e08068 more filtering 2025-04-21 17:11:24 -04:00
c734983a59 version: 1.0.19 2025-04-21 16:15:08 -04:00
9b11f661bc fixed release bug 2025-04-21 16:15:02 -04:00
f3a733626e version: 1.0.18 2025-04-21 16:13:16 -04:00
2bcdb8adbb version: 1.0.17 2025-04-21 16:11:31 -04:00
3988308965 branch error filtering 2025-04-21 16:11:12 -04:00
b7f528d1f6 flow 2025-04-16 21:14:07 -04:00
2febb292dc flow update 2025-04-16 20:00:35 -04:00
d1831a28ec flow update 2025-04-16 19:34:09 -04:00
c5834ac544 flow updated 2025-04-16 18:07:41 -04:00
eb25f8489e version: 1.0.16 2025-04-16 14:43:07 -04:00
21937db8de improved compile schema error messages 2025-04-16 14:42:57 -04:00
28b689cac0 version: 1.0.15 2025-04-16 01:00:57 -04:00
cc04a1a8bb made errors consistent 2025-04-16 01:00:51 -04:00
3ceb8a0770 version: 1.0.14 2025-04-16 00:38:10 -04:00
499bf68b2a more error cleanup 2025-04-16 00:38:04 -04:00
6ca00f27e9 version: 1.0.13 2025-04-15 23:30:57 -04:00
520be66035 better error messaging 2025-04-15 23:30:47 -04:00
c3146ca433 flow update 2025-04-15 01:52:12 -04:00
9 changed files with 713 additions and 954 deletions

13
.env
View File

@ -1,13 +0,0 @@
ENVIRONMENT=local
DATABASE_PASSWORD=2HwURf1Za7m5ZKtECAfQJGpni3832RV3
DATABASE_ROLE=agreego_admin
DATABASE_HOST=127.1.27.10
DATABASE_PORT=5432
POSTGRES_PASSWORD=xzIq5JT0xY3F+2m1GtnrKDdK29sNSXVVYZHPKJVh8pI=
DATABASE_NAME=agreego
DEV_DATABASE_NAME=agreego_dev
GITEA_TOKEN=3d70c23673517330623a5122998fb304e3c73f0a
MOOV_ACCOUNT_ID=69a0d2f6-77a2-4e26-934f-d869134f87d3
MOOV_PUBLIC_KEY=9OMhK5qGnh7Tmk2Z
MOOV_SECRET_KEY=DrRox7B-YWfO9IheiUUX7lGP8-7VY-Ni
MOOV_DOMAIN=http://localhost

3
.gitignore vendored
View File

@ -1,2 +1,3 @@
/target
/package
/package
.env

542
Cargo.lock generated
View File

@ -26,7 +26,6 @@ dependencies = [
"cfg-if",
"getrandom 0.2.15",
"once_cell",
"serde",
"version_check",
"zerocopy 0.7.35",
]
@ -113,7 +112,7 @@ dependencies = [
"miniz_oxide",
"object",
"rustc-demangle",
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -215,12 +214,6 @@ version = "3.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
[[package]]
name = "bytecount"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce"
[[package]]
name = "byteorder"
version = "1.5.0"
@ -425,15 +418,6 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "email_address"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449"
dependencies = [
"serde",
]
[[package]]
name = "enum-map"
version = "2.7.3"
@ -486,17 +470,6 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
[[package]]
name = "fancy-regex"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
dependencies = [
"bit-set",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "fastrand"
version = "2.3.0"
@ -517,7 +490,6 @@ checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5"
dependencies = [
"borrow-or-share",
"ref-cast",
"serde",
]
[[package]]
@ -541,16 +513,6 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "fraction"
version = "0.15.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f158e3ff0a1b334408dc9fb811cd99b446986f4d8b741bb08f9df1604085ae7"
dependencies = [
"lazy_static",
"num",
]
[[package]]
name = "funty"
version = "2.0.0"
@ -573,12 +535,6 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]]
name = "futures-io"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
[[package]]
name = "futures-macro"
version = "0.3.31"
@ -609,11 +565,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
dependencies = [
"futures-core",
"futures-io",
"futures-macro",
"futures-sink",
"futures-task",
"memchr",
"pin-project-lite",
"pin-utils",
"slab",
@ -730,85 +684,6 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "http"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]]
name = "http-body"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
dependencies = [
"bytes",
"http",
]
[[package]]
name = "http-body-util"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
dependencies = [
"bytes",
"futures-core",
"http",
"http-body",
"pin-project-lite",
]
[[package]]
name = "httparse"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]]
name = "hyper"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80"
dependencies = [
"bytes",
"futures-channel",
"futures-util",
"http",
"http-body",
"httparse",
"itoa",
"pin-project-lite",
"smallvec",
"tokio",
"want",
]
[[package]]
name = "hyper-util"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2"
dependencies = [
"bytes",
"futures-channel",
"futures-util",
"http",
"http-body",
"hyper",
"libc",
"pin-project-lite",
"socket2",
"tokio",
"tower-service",
"tracing",
]
[[package]]
name = "icu_collections"
version = "1.5.0"
@ -964,12 +839,6 @@ dependencies = [
"hashbrown",
]
[[package]]
name = "ipnet"
version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
[[package]]
name = "is-terminal"
version = "0.4.16"
@ -1012,37 +881,11 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "jsonschema"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "161c33c3ec738cfea3288c5c53dfcdb32fd4fc2954de86ea06f71b5a1a40bfcd"
dependencies = [
"ahash",
"base64",
"bytecount",
"email_address",
"fancy-regex",
"fraction",
"idna",
"itoa",
"num-cmp",
"once_cell",
"percent-encoding",
"referencing",
"regex-syntax",
"reqwest",
"serde",
"serde_json",
"uuid-simd",
]
[[package]]
name = "jspg"
version = "0.1.0"
dependencies = [
"boon",
"jsonschema",
"lazy_static",
"pgrx",
"pgrx-tests",
@ -1069,7 +912,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
dependencies = [
"cfg-if",
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -1116,12 +959,6 @@ version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "mime"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
@ -1167,76 +1004,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "num"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23"
dependencies = [
"num-bigint",
"num-complex",
"num-integer",
"num-iter",
"num-rational",
"num-traits",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
dependencies = [
"num-integer",
"num-traits",
]
[[package]]
name = "num-cmp"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63335b2e2c34fae2fb0aa2cecfd9f0832a1e24b3b32ecec612c3426d46dc8aaa"
[[package]]
name = "num-complex"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
dependencies = [
"num-traits",
]
[[package]]
name = "num-integer"
version = "0.1.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
dependencies = [
"num-traits",
]
[[package]]
name = "num-iter"
version = "0.1.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
dependencies = [
"num-bigint",
"num-integer",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.19"
@ -1270,12 +1037,6 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "outref"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e"
[[package]]
name = "owo-colors"
version = "4.2.0"
@ -1306,7 +1067,7 @@ dependencies = [
"libc",
"redox_syscall",
"smallvec",
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -1710,20 +1471,6 @@ dependencies = [
"syn",
]
[[package]]
name = "referencing"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40a64b3a635fad9000648b4d8a59c8710c523ab61a23d392a7d91d47683f5adc"
dependencies = [
"ahash",
"fluent-uri",
"once_cell",
"parking_lot",
"percent-encoding",
"serde_json",
]
[[package]]
name = "regex"
version = "1.11.1"
@ -1753,43 +1500,6 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "reqwest"
version = "0.12.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb"
dependencies = [
"base64",
"bytes",
"futures-channel",
"futures-core",
"futures-util",
"http",
"http-body",
"http-body-util",
"hyper",
"hyper-util",
"ipnet",
"js-sys",
"log",
"mime",
"once_cell",
"percent-encoding",
"pin-project-lite",
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper",
"tokio",
"tower",
"tower-service",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"windows-registry",
]
[[package]]
name = "rustc-demangle"
version = "0.1.24"
@ -1824,12 +1534,6 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "rustversion"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2"
[[package]]
name = "rusty-fork"
version = "0.3.0"
@ -1929,18 +1633,6 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
dependencies = [
"form_urlencoded",
"itoa",
"ryu",
"serde",
]
[[package]]
name = "sha2"
version = "0.10.8"
@ -2048,15 +1740,6 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "sync_wrapper"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
dependencies = [
"futures-core",
]
[[package]]
name = "synstructure"
version = "0.13.1"
@ -2253,58 +1936,6 @@ dependencies = [
"winnow",
]
[[package]]
name = "tower"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
dependencies = [
"futures-core",
"futures-util",
"pin-project-lite",
"sync_wrapper",
"tokio",
"tower-layer",
"tower-service",
]
[[package]]
name = "tower-layer"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
[[package]]
name = "tower-service"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
[[package]]
name = "tracing"
version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
dependencies = [
"pin-project-lite",
"tracing-core",
]
[[package]]
name = "tracing-core"
version = "0.1.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
dependencies = [
"once_cell",
]
[[package]]
name = "try-lock"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "typenum"
version = "1.18.0"
@ -2394,29 +2025,12 @@ dependencies = [
"getrandom 0.3.2",
]
[[package]]
name = "uuid-simd"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b082222b4f6619906941c17eb2297fff4c2fb96cb60164170522942a200bd8"
dependencies = [
"outref",
"uuid",
"vsimd",
]
[[package]]
name = "version_check"
version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "vsimd"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64"
[[package]]
name = "wait-timeout"
version = "0.2.1"
@ -2436,15 +2050,6 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "want"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
dependencies = [
"try-lock",
]
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
@ -2474,7 +2079,6 @@ checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
dependencies = [
"cfg-if",
"once_cell",
"rustversion",
"wasm-bindgen-macro",
]
@ -2492,19 +2096,6 @@ dependencies = [
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61"
dependencies = [
"cfg-if",
"js-sys",
"once_cell",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.100"
@ -2596,7 +2187,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143"
dependencies = [
"windows-core",
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -2607,8 +2198,8 @@ checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d"
dependencies = [
"windows-implement",
"windows-interface",
"windows-result 0.1.2",
"windows-targets 0.52.6",
"windows-result",
"windows-targets",
]
[[package]]
@ -2633,48 +2224,13 @@ dependencies = [
"syn",
]
[[package]]
name = "windows-link"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38"
[[package]]
name = "windows-registry"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3"
dependencies = [
"windows-result 0.3.2",
"windows-strings",
"windows-targets 0.53.0",
]
[[package]]
name = "windows-result"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8"
dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "windows-result"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-strings"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319"
dependencies = [
"windows-link",
"windows-targets",
]
[[package]]
@ -2683,7 +2239,7 @@ version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -2692,7 +2248,7 @@ version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets 0.52.6",
"windows-targets",
]
[[package]]
@ -2701,30 +2257,14 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm 0.52.6",
"windows_aarch64_msvc 0.52.6",
"windows_i686_gnu 0.52.6",
"windows_i686_gnullvm 0.52.6",
"windows_i686_msvc 0.52.6",
"windows_x86_64_gnu 0.52.6",
"windows_x86_64_gnullvm 0.52.6",
"windows_x86_64_msvc 0.52.6",
]
[[package]]
name = "windows-targets"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b"
dependencies = [
"windows_aarch64_gnullvm 0.53.0",
"windows_aarch64_msvc 0.53.0",
"windows_i686_gnu 0.53.0",
"windows_i686_gnullvm 0.53.0",
"windows_i686_msvc 0.53.0",
"windows_x86_64_gnu 0.53.0",
"windows_x86_64_gnullvm 0.53.0",
"windows_x86_64_msvc 0.53.0",
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
@ -2733,96 +2273,48 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_aarch64_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnu"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_i686_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnu"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "windows_x86_64_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
[[package]]
name = "winnow"
version = "0.7.6"

View File

@ -7,7 +7,6 @@ edition = "2021"
pgrx = "0.14.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
jsonschema = "0.29.1"
lazy_static = "1.5.0"
boon = "0.6.1"
@ -34,4 +33,4 @@ lto = "thin"
panic = "unwind"
opt-level = 3
lto = "fat"
codegen-units = 1
codegen-units = 1

119
flow
View File

@ -3,153 +3,130 @@
# Flows
source ./flows/base
source ./flows/git
source ./flows/kube
source ./flows/packaging
source ./flows/rust
# Vars
POSTGRES_VERSION="17"
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
DEPENDENCIES=(cargo git icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
DEPENDENCIES+=(icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
GITEA_ORGANIZATION="cellular"
GITEA_REPOSITORY="jspg"
env() {
# Check if GITEA_TOKEN is set
if [ -z "$GITEA_TOKEN" ]; then
# If not set, try to get it from kubectl
GITEA_TOKEN=$(kubectl get secret -n cellular gitea-git -o jsonpath='{.data.token}' | base64 --decode)
if [ -z "$GITEA_TOKEN" ]; then
echo -e "❌ ${RED}GITEA_TOKEN is not set and couldn't be retrieved from kubectl${RESET}" >&2
exit 1
fi
export GITEA_TOKEN
fi
echo -e "💰 ${GREEN}Environment variables set${RESET}"
}
pgrx-prepare() {
echo -e "${BLUE}Initializing pgrx...${RESET}"
info "Initializing pgrx..."
# Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which'
local POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
if [ ! -x "$POSTGRES_CONFIG_PATH" ]; then
echo -e "${RED}Error: pg_config not found or not executable at $POSTGRES_CONFIG_PATH.${RESET}"
echo -e "${YELLOW}Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew.${RESET}"
exit 1
error "pg_config not found or not executable at $POSTGRES_CONFIG_PATH."
warning "Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew."
return 2
fi
if cargo pgrx init --pg"$POSTGRES_VERSION"="$POSTGRES_CONFIG_PATH"; then
echo -e "${GREEN}pgrx initialized successfully.${RESET}"
success "pgrx initialized successfully."
else
echo -e "${RED}Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid.${RESET}"
exit 1
error "Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid."
return 2
fi
}
build() {
local version
version=$(get-version) || return 1
version=$(get-version) || return $?
local package_dir="./package"
local tarball_name="${GITEA_REPOSITORY}.tar.gz"
local tarball_path="${package_dir}/${tarball_name}"
echo -e "📦 Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
info "Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
# Clean previous package dir
rm -rf "${package_dir}"
mkdir -p "${package_dir}"
# Create the source tarball excluding specified patterns
echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}"
info "Creating tarball: ${tarball_path}"
if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}"
success "Successfully created source tarball: ${tarball_path}"
else
echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2
return 1
error "Failed to create source tarball."
return 2
fi
}
install() {
local version
version=$(get-version) || return 1
version=$(get-version) || return $? # Propagate error
echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}"
info "Building and installing PGRX extension v$version into local PostgreSQL..."
# Run the pgrx install command
# It implicitly uses --release unless --debug is passed
# It finds pg_config or you can add flags like --pg-config if needed
if ! cargo pgrx install; then
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2
return 1
error "cargo pgrx install command failed."
return 2
fi
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}"
success "PGRX extension v$version successfully built and installed."
# Post-install modification to allow non-superuser usage
# Get the installation path dynamically using pg_config
local pg_sharedir
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
if [ -z "$pg_sharedir" ]; then
echo -e "❌ ${RED}Failed to determine PostgreSQL shared directory using pg_config.${RESET}" >&2
return 1
local pg_config_status=$?
if [ $pg_config_status -ne 0 ] || [ -z "$pg_sharedir" ]; then
error "Failed to determine PostgreSQL shared directory using pg_config."
return 2
fi
local installed_control_path="${pg_sharedir}/extension/jspg.control"
# Modify the control file
if [ ! -f "$installed_control_path" ]; then
echo -e "❌ ${RED}Installed control file not found: '$installed_control_path'${RESET}" >&2
return 1
error "Installed control file not found: '$installed_control_path'"
return 2
fi
echo -e "🔧 ${CYAN}Modifying control file for non-superuser access: ${installed_control_path}${RESET}"
info "Modifying control file for non-superuser access: ${installed_control_path}"
# Use sed -i '' for macOS compatibility
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
echo 'trusted = true' >> "$installed_control_path"; then
echo -e "✨ ${GREEN}Control file modified successfully.${RESET}"
success "Control file modified successfully."
else
echo -e "❌ ${RED}Failed to modify control file: ${installed_control_path}${RESET}" >&2
return 1
error "Failed to modify control file: ${installed_control_path}"
return 2
fi
}
test() {
echo -e "🧪 ${CYAN}Running jspg tests...${RESET}"
cargo pgrx test "pg${POSTGRES_VERSION}" "$@"
info "Running jspg tests..."
cargo pgrx test "pg${POSTGRES_VERSION}" "$@" || return $?
}
clean() {
echo -e "🧹 ${CYAN}Cleaning build artifacts...${RESET}"
cargo clean # Use standard cargo clean
info "Cleaning build artifacts..."
cargo clean || return $?
}
jspg-usage() {
echo -e " ${CYAN}JSPG Commands:${RESET}"
echo -e " prepare Check OS, Cargo, and PGRX dependencies."
echo -e " install [opts] Run prepare, then build and install the extension locally."
echo -e " reinstall [opts] Run prepare, clean, then build and install the extension locally."
echo -e " test [opts] Run pgrx integration tests."
echo -e " clean Remove pgrx build artifacts."
echo -e " build Build release artifacts into ./package/ (called by release)."
echo -e " tag Tag the current version (called by release)."
echo -e " package Upload artifacts from ./package/ (called by release)."
echo -e " release Perform a full release (increments patch, builds, tags, pushes, packages)."
printf "prepare\tCheck OS, Cargo, and PGRX dependencies.\n"
printf "install\tBuild and install the extension locally (after prepare).\n"
printf "reinstall\tClean, build, and install the extension locally (after prepare).\n"
printf "test\t\tRun pgrx integration tests.\n"
printf "clean\t\tRemove pgrx build artifacts.\n"
}
jspg-flow() {
case "$1" in
env) env; return 0;;
prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;;
build) build; return 0;;
install) install; return 0;;
reinstall) clean; install; return 0;;
test) test; return 0;;
package) env; package; return 0;;
release) env; release; return 0;;
clean) clean; return 0;;
prepare) prepare && cargo-prepare && pgrx-prepare; return $?;;
build) build; return $?;;
install) install; return $?;;
reinstall) clean && install; return $?;;
test) test "${@:2}"; return $?;;
clean) clean; return $?;;
*) return 1 ;;
esac
}
register-flow "jspg-flow" "jspg-usage"
dispatch "$@"
register-flow "jspg-usage" "jspg-flow"
dispatch "$@"

2
flows

Submodule flows updated: db55335254...e154758056

View File

@ -23,56 +23,54 @@ lazy_static! {
fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
let schema_value: Value = schema.0;
let schema_path = format!("urn:{}", schema_id);
let mut compiler = Compiler::new();
compiler.enable_format_assertions();
if let Err(e) = compiler.add_resource(schema_id, schema_value) {
// Use schema_path when adding the resource
if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
return JsonB(json!({
"success": false,
"error": {
"kind": "SchemaResourceError",
"message": format!("Failed to add schema resource: {}", e),
"schema_id": schema_id
}
"errors": [{
"code": "SCHEMA_RESOURCE_ADD_FAILED",
"message": format!("Failed to add schema resource '{}'", schema_id),
"details": {
"path": schema_path,
"cause": format!("{}", e)
}
}]
}));
}
match compiler.compile(schema_id, &mut cache.schemas) {
// Use schema_path when compiling
match compiler.compile(&schema_path, &mut cache.schemas) {
Ok(sch_index) => {
// Store the index using the original schema_id as the key
cache.id_to_index.insert(schema_id.to_string(), sch_index);
JsonB(json!({ "success": true }))
JsonB(json!({ "response": "success" }))
}
Err(e) => {
// Enhance error reporting by matching on the CompileError variant
let error_details = match &e {
CompileError::ValidationError { url, src } => {
// Metaschema validation failed - provide more detail
json!({
"kind": "SchemaCompilationError",
"sub_kind": "ValidationError", // Explicitly state it's a metaschema validation error
"message": format!("Schema failed validation against its metaschema: {}", src),
"schema_id": schema_id,
"failed_at_url": url,
"validation_details": format!("{:?}", src), // Include full debug info of the validation error
})
let errors = match &e {
CompileError::ValidationError { url: _url, src } => {
// Collect leaf errors from the meta-schema validation failure
let mut error_list = Vec::new();
collect_validation_errors(src, &mut error_list);
// Filter and format errors properly - no instance for schema compilation
format_drop_errors(error_list, &schema_value)
}
_ => {
// Other compilation errors
vec![json!({
"code": "SCHEMA_COMPILATION_FAILED",
"message": format!("Schema '{}' compilation failed", schema_id),
"details": {
"path": schema_path,
"cause": format!("{:?}", e)
}
// Handle other potential compilation errors
_ => {
let error_type = format!("{:?}", e).split('(').next().unwrap_or("Unknown").to_string();
json!({
"kind": "SchemaCompilationError",
"sub_kind": error_type, // e.g., "InvalidJsonPointer", "UnsupportedUrlScheme"
"message": format!("Schema compilation failed: {}", e),
"schema_id": schema_id,
"details": format!("{:?}", e), // Generic debug info
})
}
};
JsonB(json!({
"success": false,
"error": error_details
}))
})]
}
};
JsonB(json!({ "errors": errors }))
}
}
}
@ -81,42 +79,234 @@ fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB {
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
// Lookup uses the original schema_id
match cache.id_to_index.get(schema_id) {
None => JsonB(json!({
"success": false,
"error": {
"kind": "SchemaNotFound",
"message": format!("Schema with id '{}' not found in cache", schema_id)
}
"errors": [{
"code": "SCHEMA_NOT_FOUND",
"message": format!("Schema '{}' not found in cache", schema_id),
"details": {
"cause": "Schema must be cached before validation"
}
}]
})),
Some(sch_index) => {
let instance_value: Value = instance.0;
match cache.schemas.validate(&instance_value, *sch_index) {
Ok(_) => JsonB(json!({ "success": true })),
Ok(_) => JsonB(json!({ "response": "success" })),
Err(validation_error) => {
let error = format_validation_error(&validation_error);
JsonB(json!({
"success": false,
"error": error
}))
let mut error_list = Vec::new();
collect_validation_errors(&validation_error, &mut error_list);
let errors = format_drop_errors(error_list, &instance_value);
JsonB(json!({ "errors": errors }))
}
}
}
}
}
fn format_validation_error(error: &ValidationError) -> Value {
json!({
"instance_path": error.instance_location.to_string(),
"schema_path": error.schema_url.to_string(),
"kind": format!("{:?}", error.kind),
"message": format!("{}", error),
"error": error
.causes
.iter()
.map(format_validation_error)
.collect::<Vec<_>>()
})
// Recursively collects validation errors
fn collect_validation_errors(error: &ValidationError, errors_list: &mut Vec<(String, String, String)>) {
// Check if this is a structural error that we should skip
let error_message = format!("{}", error.kind);
let is_structural = error_message == "validation failed" ||
error_message == "allOf failed" ||
error_message == "anyOf failed" ||
error_message == "not failed" ||
error_message.starts_with("oneOf failed");
if error.causes.is_empty() && !is_structural {
// This is a leaf error that's not structural
// Format just the error kind, not the whole validation error
let message = format!("{}", error.kind);
errors_list.push((
error.instance_location.to_string(),
error.schema_url.to_string(),
message
));
} else {
// Recurse into causes
for cause in &error.causes {
collect_validation_errors(cause, errors_list);
}
}
}
// Formats errors according to DropError structure
fn format_drop_errors(raw_errors: Vec<(String, String, String)>, instance: &Value) -> Vec<Value> {
use std::collections::HashMap;
use std::collections::hash_map::Entry;
// We don't filter structural paths from instance paths anymore
// because instance paths shouldn't contain these segments anyway
// The issue was likely with schema paths, not instance paths
let plausible_errors = raw_errors;
// 2. Deduplicate by instance_path and format as DropError
let mut unique_errors: HashMap<String, Value> = HashMap::new();
for (instance_path, schema_path, message) in plausible_errors {
if let Entry::Vacant(entry) = unique_errors.entry(instance_path.clone()) {
// Convert message to error code and make it human readable
let (code, human_message) = enhance_error_message(&message);
// Extract the failing value from the instance
let failing_value = extract_value_at_path(instance, &instance_path);
entry.insert(json!({
"code": code,
"message": human_message,
"details": {
"path": schema_path,
"context": json!({
"instance_path": instance_path,
"failing_value": failing_value
}),
"cause": message // Original error message
}
}));
}
}
unique_errors.into_values().collect()
}
// Helper function to extract value at a JSON pointer path
fn extract_value_at_path(instance: &Value, path: &str) -> Value {
let parts: Vec<&str> = path.split('/').filter(|s| !s.is_empty()).collect();
let mut current = instance;
for part in parts {
match current {
Value::Object(map) => {
if let Some(value) = map.get(part) {
current = value;
} else {
return Value::Null;
}
}
Value::Array(arr) => {
if let Ok(index) = part.parse::<usize>() {
if let Some(value) = arr.get(index) {
current = value;
} else {
return Value::Null;
}
} else {
return Value::Null;
}
}
_ => return Value::Null,
}
}
current.clone()
}
// Helper to convert validation messages to error codes and human-readable messages
fn enhance_error_message(message: &str) -> (String, String) {
// Match exact boon error message patterns
let trimmed = message.trim();
if trimmed.contains("value must be one of") {
("ENUM_VIOLATED".to_string(),
"Value is not one of the allowed options".to_string())
} else if trimmed.contains("length must be >=") && trimmed.contains("but got") {
("MIN_LENGTH_VIOLATED".to_string(),
"Field length is below the minimum required".to_string())
} else if trimmed.contains("length must be <=") && trimmed.contains("but got") {
("MAX_LENGTH_VIOLATED".to_string(),
"Field length exceeds the maximum allowed".to_string())
} else if trimmed.contains("must be >=") && trimmed.contains("but got") {
("MINIMUM_VIOLATED".to_string(),
"Value is below the minimum allowed".to_string())
} else if trimmed.contains("must be <=") && trimmed.contains("but got") {
("MAXIMUM_VIOLATED".to_string(),
"Value exceeds the maximum allowed".to_string())
} else if trimmed.contains("must be >") && trimmed.contains("but got") {
("EXCLUSIVE_MINIMUM_VIOLATED".to_string(),
"Value must be greater than the minimum".to_string())
} else if trimmed.contains("must be <") && trimmed.contains("but got") {
("EXCLUSIVE_MAXIMUM_VIOLATED".to_string(),
"Value must be less than the maximum".to_string())
} else if trimmed.contains("does not match pattern") {
("PATTERN_VIOLATED".to_string(),
"Value does not match the required pattern".to_string())
} else if trimmed.contains("missing properties") {
("REQUIRED_FIELD_MISSING".to_string(),
"Required field is missing".to_string())
} else if trimmed.contains("want") && trimmed.contains("but got") {
("TYPE_MISMATCH".to_string(),
"Field type does not match the expected type".to_string())
} else if trimmed.starts_with("value must be") && !trimmed.contains("one of") {
("CONST_VIOLATED".to_string(),
"Value does not match the required constant".to_string())
} else if trimmed.contains("is not valid") && trimmed.contains(":") {
("FORMAT_INVALID".to_string(),
extract_format_message(trimmed))
} else if trimmed.contains("items at") && trimmed.contains("are equal") {
("UNIQUE_ITEMS_VIOLATED".to_string(),
"Array contains duplicate items".to_string())
} else if trimmed.contains("additionalProperties") && trimmed.contains("not allowed") {
("ADDITIONAL_PROPERTIES_NOT_ALLOWED".to_string(),
"Object contains properties that are not allowed".to_string())
} else if trimmed.contains("is not multipleOf") {
("MULTIPLE_OF_VIOLATED".to_string(),
"Value is not a multiple of the required factor".to_string())
} else if trimmed.contains("minimum") && trimmed.contains("properties required") {
("MIN_PROPERTIES_VIOLATED".to_string(),
"Object has fewer properties than required".to_string())
} else if trimmed.contains("maximum") && trimmed.contains("properties required") {
("MAX_PROPERTIES_VIOLATED".to_string(),
"Object has more properties than allowed".to_string())
} else if trimmed.contains("minimum") && trimmed.contains("items required") {
("MIN_ITEMS_VIOLATED".to_string(),
"Array has fewer items than required".to_string())
} else if trimmed.contains("maximum") && trimmed.contains("items required") {
("MAX_ITEMS_VIOLATED".to_string(),
"Array has more items than allowed".to_string())
} else if trimmed == "false schema" {
("FALSE_SCHEMA".to_string(),
"Schema validation always fails".to_string())
} else if trimmed == "not failed" {
("NOT_VIOLATED".to_string(),
"Value matched a schema it should not match".to_string())
} else if trimmed == "allOf failed" {
("ALL_OF_VIOLATED".to_string(),
"Value does not match all required schemas".to_string())
} else if trimmed == "anyOf failed" {
("ANY_OF_VIOLATED".to_string(),
"Value does not match any of the allowed schemas".to_string())
} else if trimmed.contains("oneOf failed") {
("ONE_OF_VIOLATED".to_string(),
"Value must match exactly one schema".to_string())
} else if trimmed == "validation failed" {
("VALIDATION_FAILED".to_string(),
"Validation failed".to_string())
} else {
// For any unmatched patterns, try to provide a generic human-readable message
// while preserving the original error in details.cause
("VALIDATION_FAILED".to_string(),
"Validation failed".to_string())
}
}
// Extract a better format message
fn extract_format_message(message: &str) -> String {
if message.contains("date-time") {
"Invalid date-time format".to_string()
} else if message.contains("email") {
"Invalid email format".to_string()
} else if message.contains("uri") {
"Invalid URI format".to_string()
} else if message.contains("uuid") {
"Invalid UUID format".to_string()
} else {
"Invalid format".to_string()
}
}
#[pg_extern(strict, parallel_safe)]
@ -126,305 +316,40 @@ fn json_schema_cached(schema_id: &str) -> bool {
}
#[pg_extern(strict)]
fn clear_json_schemas() {
fn clear_json_schemas() -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
*cache = BoonCache {
schemas: Schemas::new(),
id_to_index: HashMap::new(),
};
JsonB(json!({ "response": "success" }))
}
#[pg_extern(strict, parallel_safe)]
fn show_json_schemas() -> Vec<String> {
fn show_json_schemas() -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
let ids: Vec<String> = cache.id_to_index.keys().cloned().collect();
ids
}
#[pg_schema]
#[cfg(any(test, feature = "pg_test"))]
mod tests {
use pgrx::*;
use pgrx::pg_test;
use super::*;
use serde_json::json;
fn jsonb(val: Value) -> JsonB {
JsonB(val)
}
fn setup_test() {
clear_json_schemas();
}
#[pg_test]
fn test_cache_and_validate_json_schema() {
setup_test();
let schema_id = "my_schema";
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
});
let valid_instance = json!({ "name": "Alice", "age": 30 });
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
let invalid_instance_missing = json!({ "name": "Charlie" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()));
assert!(cache_result.0["success"].as_bool().unwrap());
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
assert!(valid_result.0["success"].as_bool().unwrap());
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
assert!(!invalid_result_type.0["success"].as_bool().unwrap());
let error_obj_type = invalid_result_type.0.get("error").expect("Expected top-level 'error' object");
let causes_age = error_obj_type.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes)");
assert!(!causes_age.is_empty(), "Expected causes for invalid age");
let first_cause_age = &causes_age[0];
assert!(first_cause_age["kind"].as_str().unwrap().contains("Minimum"), "Kind '{}' should contain Minimum", first_cause_age["kind"]);
let msg = first_cause_age["message"].as_str().unwrap_or("");
assert!(msg.contains("must be >=0"), "Error message mismatch for age minimum: {}", msg);
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
assert!(!invalid_result_missing.0["success"].as_bool().unwrap());
let error_obj_missing = invalid_result_missing.0.get("error").expect("Expected top-level 'error' object");
let causes_missing = error_obj_missing.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes) for missing");
assert!(!causes_missing.is_empty(), "Expected causes for missing age");
let first_cause_missing = &causes_missing[0];
assert!(first_cause_missing["kind"].as_str().unwrap().contains("Required"));
let msg_missing = first_cause_missing["message"].as_str().unwrap_or("");
assert!(msg_missing.contains("missing properties 'age'"), "Error message mismatch for missing 'age': {}", msg_missing);
assert!(first_cause_missing["instance_path"] == "", "Expected empty instance path for missing field");
let non_existent_id = "non_existent_schema";
let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({})));
assert!(!invalid_schema_result.0["success"].as_bool().unwrap());
let schema_not_found_error = invalid_schema_result.0
.get("error") // Top level error object
.expect("Expected top-level 'error' object for schema not found");
assert_eq!(schema_not_found_error["kind"], "SchemaNotFound");
assert!(schema_not_found_error["message"].as_str().unwrap().contains(non_existent_id));
}
#[pg_test]
fn test_validate_json_schema_not_cached() {
setup_test();
let instance = json!({ "foo": "bar" });
let result = validate_json_schema("non_existent_schema", jsonb(instance));
assert!(!result.0["success"].as_bool().unwrap());
let error_obj = result.0.get("error").expect("Expected top-level 'error' object");
assert_eq!(error_obj["kind"], "SchemaNotFound");
assert!(error_obj["message"].as_str().unwrap().contains("non_existent_schema"));
}
#[pg_test]
fn test_cache_invalid_json_schema() {
setup_test();
let schema_id = "invalid_schema";
let invalid_schema_json = "{\"type\": \"string\" \"maxLength\": 5}";
let invalid_schema_value: Result<Value, _> = serde_json::from_str(invalid_schema_json);
assert!(invalid_schema_value.is_err(), "Test setup assumes invalid JSON string");
let schema_representing_invalid = json!({
"type": 123
});
let result = cache_json_schema(schema_id, jsonb(schema_representing_invalid.clone()));
assert!(!result.0["success"].as_bool().unwrap());
let error_obj = result.0.get("error").expect("Expected top-level 'error' object for compilation failure");
assert_eq!(error_obj.get("kind").and_then(Value::as_str), Some("SchemaCompilationError"));
assert_eq!(error_obj.get("sub_kind").and_then(Value::as_str), Some("ValidationError"), "Expected sub_kind 'ValidationError' for metaschema failure");
assert!(error_obj.get("message").and_then(Value::as_str).is_some(), "Expected 'message' field in error object");
assert!(error_obj["message"].as_str().unwrap().contains("Schema failed validation against its metaschema"), "Error message mismatch");
assert_eq!(error_obj.get("schema_id").and_then(Value::as_str), Some(schema_id));
let failed_at_url = error_obj.get("failed_at_url").and_then(Value::as_str).expect("Expected 'failed_at_url' string");
assert!(failed_at_url.ends_with(&format!("{}#", schema_id)), "failed_at_url ('{}') should end with schema_id + '#' ('{}#')", failed_at_url, schema_id);
assert!(error_obj.get("validation_details").and_then(Value::as_str).is_some(), "Expected 'validation_details' field");
}
#[pg_test]
fn test_validate_json_schema_detailed_validation_errors() {
setup_test();
let schema_id = "detailed_schema";
let schema = json!({
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string", "maxLength": 10 }
},
"required": ["street", "city"]
}
},
"required": ["address"]
});
let invalid_instance = json!({
"address": {
"street": 123,
"city": "Supercalifragilisticexpialidocious"
}
});
assert!(cache_json_schema(schema_id, jsonb(schema.clone())).0["success"].as_bool().unwrap());
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
assert!(!result.0["success"].as_bool().unwrap());
let error_obj = result.0.get("error").expect("Expected top-level 'error' object");
let causes = error_obj.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes)");
assert!(causes.len() >= 2, "Expected at least 2 detailed causes");
let street_error = causes.iter().find(|e| e["instance_path"] == "/address/street").expect("Missing street error");
assert!(street_error["kind"].as_str().unwrap().contains("Type"), "Kind '{}' should contain Type", street_error["kind"]);
let street_msg = street_error["message"].as_str().unwrap_or("null");
assert!(street_msg.contains("want string, but got number"), "Street message mismatch: {}", street_msg);
let city_error = causes.iter().find(|e| e["instance_path"] == "/address/city").expect("Missing city error");
assert!(city_error["kind"].as_str().unwrap().contains("MaxLength"), "Kind '{}' should contain MaxLength", city_error["kind"]);
let city_msg = city_error["message"].as_str().unwrap_or("null");
assert!(city_msg.contains("length must be <=10"), "City message mismatch: {}", city_msg);
assert_eq!(causes.len(), 2, "Expected exactly 2 errors (street type, city length)");
}
#[pg_test]
fn test_validate_json_schema_oneof_validation_errors() {
setup_test();
let schema_id = "oneof_schema";
let schema = json!({
"oneOf": [
{
"type": "object",
"properties": {
"string_prop": { "type": "string", "maxLength": 5 }
},
"required": ["string_prop"]
},
{
"type": "object",
"properties": {
"number_prop": { "type": "number", "minimum": 10 }
},
"required": ["number_prop"]
}
]
});
cache_json_schema(schema_id, jsonb(schema));
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
assert!(!result_invalid_string.0["success"].as_bool().unwrap());
let error_obj_string = result_invalid_string.0.get("error").expect("Expected top-level 'error' object");
assert!(error_obj_string["kind"].as_str().unwrap().contains("Schema"), "Top level kind '{}' should contain Schema for OneOf failure", error_obj_string["kind"]);
assert!(error_obj_string["message"].as_str().unwrap().contains("oneOf failed, none matched"), "OneOf message mismatch: {}", error_obj_string["message"]); // Final adjustment
let causes_string = error_obj_string.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes)");
assert_eq!(causes_string.len(), 1, "Expected one cause for oneOf failure (string)");
let nested_causes_string = causes_string[0].get("error").and_then(Value::as_array).expect("Expected deeper nested causes for string oneOf");
assert_eq!(nested_causes_string.len(), 2, "Expected two nested causes for string oneOf");
let string_schema_fail = nested_causes_string.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/0/properties/string_prop")).expect("Missing nested cause for string schema");
assert_eq!(string_schema_fail["instance_path"].as_str().unwrap(), "/string_prop", "Instance path should be /string_prop");
assert!(string_schema_fail["kind"].as_str().unwrap().contains("MaxLength"), "Nested string cause kind should be MaxLength");
let number_schema_fail = nested_causes_string.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/1")).expect("Missing nested cause for number schema");
assert_eq!(number_schema_fail["instance_path"].as_str().unwrap(), "", "Instance path for branch 2 type mismatch should be empty");
assert!(number_schema_fail["kind"].as_str().unwrap().contains("Required"), "Nested number cause kind should be Required");
let invalid_number_instance = json!({ "number_prop": 5 });
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
assert!(!result_invalid_number.0["success"].as_bool().unwrap());
let error_obj_number = result_invalid_number.0.get("error").expect("Expected top-level 'error' object");
assert!(error_obj_number["kind"].as_str().unwrap().contains("Schema"), "Top level kind '{}' should contain Schema for OneOf failure", error_obj_number["kind"]);
assert!(error_obj_number["message"].as_str().unwrap().contains("oneOf failed, none matched"), "OneOf message mismatch: {}", error_obj_number["message"]); // Final adjustment
let causes_number = error_obj_number.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes)");
assert_eq!(causes_number.len(), 1, "Expected one cause for oneOf failure (number)");
let nested_causes_number = causes_number[0].get("error").and_then(Value::as_array).expect("Expected deeper nested causes for number oneOf");
assert_eq!(nested_causes_number.len(), 2, "Expected two nested causes for number oneOf");
let string_schema_fail_num = nested_causes_number.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/0")).expect("Missing nested cause for string schema (number case)");
assert_eq!(string_schema_fail_num["instance_path"].as_str().unwrap(), "", "Instance path for branch 1 type mismatch should be empty");
assert!(string_schema_fail_num["kind"].as_str().unwrap().contains("Required"), "Nested string cause kind should be Required (number case)");
let number_schema_fail_num = nested_causes_number.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/1/properties/number_prop")).expect("Missing nested cause for number schema (number case)");
assert_eq!(number_schema_fail_num["instance_path"].as_str().unwrap(), "/number_prop", "Instance path should be /number_prop (number case)");
assert!(number_schema_fail_num["kind"].as_str().unwrap().contains("Minimum"), "Nested number cause kind should be Minimum (number case)");
let invalid_bool_instance = json!({ "other_prop": true });
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
assert!(!result_invalid_bool.0["success"].as_bool().unwrap());
let error_obj_bool = result_invalid_bool.0.get("error").expect("Expected top-level 'error' object");
assert!(error_obj_bool["kind"].as_str().unwrap().contains("Schema"), "Top level kind '{}' should contain Schema for OneOf failure", error_obj_bool["kind"]);
assert!(error_obj_bool["message"].as_str().unwrap().contains("oneOf failed, none matched"), "OneOf message mismatch: {}", error_obj_bool["message"]); // Final adjustment
let causes_bool = error_obj_bool.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes)");
assert_eq!(causes_bool.len(), 1, "Expected one cause for oneOf failure (bool)");
let nested_causes_bool = causes_bool[0].get("error").and_then(Value::as_array).expect("Expected deeper nested causes for bool oneOf");
assert_eq!(nested_causes_bool.len(), 2, "Expected two nested causes for bool oneOf");
let bool_fail_0 = nested_causes_bool.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/0")).expect("Missing nested cause for branch 0 type fail");
assert_eq!(bool_fail_0["instance_path"].as_str().unwrap(), "", "Instance path for branch 0 type fail should be empty");
assert!(bool_fail_0["kind"].as_str().unwrap().contains("Required"), "Nested bool cause 0 kind should be Required");
let bool_fail_1 = nested_causes_bool.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/1")).expect("Missing nested cause for branch 1 type fail");
assert_eq!(bool_fail_1["instance_path"].as_str().unwrap(), "", "Instance path for branch 1 type fail should be empty");
assert!(bool_fail_1["kind"].as_str().unwrap().contains("Required"), "Nested bool cause 1 kind should be Required");
}
#[pg_test]
fn test_clear_json_schemas() {
setup_test();
let schema_id = "schema_to_clear";
let schema = json!({ "type": "string" });
cache_json_schema(schema_id, jsonb(schema.clone()));
let show_result1 = show_json_schemas();
assert!(show_result1.contains(&schema_id.to_string()));
clear_json_schemas();
let show_result2 = show_json_schemas();
assert!(show_result2.is_empty());
let instance = json!("test");
let validate_result = validate_json_schema(schema_id, jsonb(instance));
assert!(!validate_result.0["success"].as_bool().unwrap());
let error_obj = validate_result.0.get("error").expect("Expected top-level 'error' object");
assert_eq!(error_obj["kind"], "SchemaNotFound");
assert!(error_obj["message"].as_str().unwrap().contains(schema_id));
}
#[pg_test]
fn test_show_json_schemas() {
setup_test();
let schema_id1 = "schema1";
let schema_id2 = "schema2";
let schema = json!({ "type": "boolean" });
cache_json_schema(schema_id1, jsonb(schema.clone()));
cache_json_schema(schema_id2, jsonb(schema.clone()));
let result = show_json_schemas();
assert!(result.contains(&schema_id1.to_string()));
assert!(result.contains(&schema_id2.to_string()));
}
JsonB(json!({ "response": ids }))
}
/// This module is required by `cargo pgrx test` invocations.
/// It must be visible at the root of your extension crate.
#[cfg(test)]
pub mod pg_test {
pub fn setup(_options: Vec<&str>) {
// perform one-off initialization when the pg_test framework starts
}
#[must_use]
pub fn postgresql_conf_options() -> Vec<&'static str> {
// return any postgresql.conf settings that are required for your tests
vec![]
}
}
}
#[cfg(any(test, feature = "pg_test"))]
#[pg_schema]
mod tests {
include!("tests.rs");
}

378
src/tests.rs Normal file
View File

@ -0,0 +1,378 @@
use crate::*;
use serde_json::{json, Value};
use pgrx::{JsonB, pg_test};
// Helper macro for asserting success with Drop-style response
macro_rules! assert_success_with_json {
($result_jsonb:expr, $fmt:literal $(, $($args:tt)*)?) => {
let has_response = $result_jsonb.0.get("response").is_some();
let has_errors = $result_jsonb.0.get("errors").is_some();
if !has_response || has_errors {
let base_msg = format!($fmt $(, $($args)*)?);
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success with 'response' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
panic!("{}", panic_msg);
}
};
// Simpler version without message
($result_jsonb:expr) => {
let has_response = $result_jsonb.0.get("response").is_some();
let has_errors = $result_jsonb.0.get("errors").is_some();
if !has_response || has_errors {
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success with 'response' field)\nResult JSON:\n{}", pretty_json);
panic!("{}", panic_msg);
}
};
}
// Helper macro for asserting failed JSON results with Drop-style errors
macro_rules! assert_failure_with_json {
// --- Arms with error count and message substring check ---
// With custom message:
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
if $expected_error_count > 0 {
let first_error_message = errors_array[0].get("message").and_then(Value::as_str);
match first_error_message {
Some(msg) => {
if !msg.contains($expected_first_message_contains) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error message mismatch): Expected contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error in array has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
}
};
// Without custom message (calls the one above with ""):
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr) => {
assert_failure_with_json!($result, $expected_error_count, $expected_first_message_contains, "");
};
// --- Arms with error count check only ---
// With custom message:
($result:expr, $expected_error_count:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
};
// Without custom message (calls the one above with ""):
($result:expr, $expected_error_count:expr) => {
assert_failure_with_json!($result, $expected_error_count, "");
};
// --- Arms checking failure only (expects at least one error) ---
// With custom message:
($result:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.is_empty() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected errors, but 'errors' array is empty): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
};
// Without custom message (calls the one above with ""):
($result:expr) => {
assert_failure_with_json!($result, "");
};
}
fn jsonb(val: Value) -> JsonB {
JsonB(val)
}
#[pg_test]
fn test_cache_and_validate_json_schema() {
clear_json_schemas(); // Call clear directly
let schema_id = "my_schema";
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
});
let valid_instance = json!({ "name": "Alice", "age": 30 });
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
let invalid_instance_missing = json!({ "name": "Charlie" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()));
assert_success_with_json!(cache_result, "Cache operation should succeed.");
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
assert_success_with_json!(valid_result, "Validation of valid instance should succeed.");
// Invalid type - age is negative
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
assert_failure_with_json!(invalid_result_type, 1, "Value is below the minimum allowed", "Validation with invalid type should fail.");
let errors_type = invalid_result_type.0["errors"].as_array().unwrap();
assert_eq!(errors_type[0]["details"]["context"]["instance_path"], "/age");
assert_eq!(errors_type[0]["details"]["path"], "urn:my_schema#/properties/age");
assert_eq!(errors_type[0]["code"], "MINIMUM_VIOLATED");
// Missing field
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
assert_failure_with_json!(invalid_result_missing, 1, "Required field is missing", "Validation with missing field should fail.");
let errors_missing = invalid_result_missing.0["errors"].as_array().unwrap();
assert_eq!(errors_missing[0]["details"]["context"]["instance_path"], "");
assert_eq!(errors_missing[0]["details"]["path"], "urn:my_schema#");
assert_eq!(errors_missing[0]["code"], "REQUIRED_FIELD_MISSING");
// Schema not found
let non_existent_id = "non_existent_schema";
let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({})));
assert_failure_with_json!(invalid_schema_result, 1, "Schema 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
let errors_notfound = invalid_schema_result.0["errors"].as_array().unwrap();
assert_eq!(errors_notfound[0]["code"], "SCHEMA_NOT_FOUND");
}
#[pg_test]
fn test_validate_json_schema_not_cached() {
clear_json_schemas();
let instance = json!({ "foo": "bar" });
let result = validate_json_schema("non_existent_schema", jsonb(instance));
assert_failure_with_json!(result, 1, "Schema 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
}
#[pg_test]
fn test_cache_invalid_json_schema() {
clear_json_schemas();
let schema_id = "invalid_schema";
// Schema with an invalid type *value*
let invalid_schema = json!({
"$id": "urn:invalid_schema",
"type": ["invalid_type_value"]
});
let cache_result = cache_json_schema(schema_id, jsonb(invalid_schema));
// Expect 2 leaf errors because the meta-schema validation fails at the type value
// and within the type array itself.
assert_failure_with_json!(
cache_result,
2, // Expect exactly two leaf errors
"Value is not one of the allowed options", // Updated to human-readable message
"Caching invalid schema should fail with specific meta-schema validation errors."
);
// Ensure the errors array exists and check specifics
let errors_array = cache_result.0["errors"].as_array().expect("Errors field should be an array");
assert_eq!(errors_array.len(), 2);
// Both errors should have ENUM_VIOLATED code
assert_eq!(errors_array[0]["code"], "ENUM_VIOLATED");
assert_eq!(errors_array[1]["code"], "ENUM_VIOLATED");
// Check instance paths are preserved in context
let paths: Vec<&str> = errors_array.iter()
.map(|e| e["details"]["context"]["instance_path"].as_str().unwrap())
.collect();
assert!(paths.contains(&"/type"));
assert!(paths.contains(&"/type/0"));
}
#[pg_test]
fn test_validate_json_schema_detailed_validation_errors() {
clear_json_schemas(); // Call clear directly
let schema_id = "detailed_errors";
let schema = json!({
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string", "maxLength": 10 }
},
"required": ["street", "city"]
}
},
"required": ["address"]
});
let _ = cache_json_schema(schema_id, jsonb(schema));
let invalid_instance = json!({
"address": {
"street": 123, // Wrong type
"city": "Supercalifragilisticexpialidocious" // Too long
}
});
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
// Update: Expect 2 errors again, as boon reports both nested errors.
assert_failure_with_json!(result, 2);
}
#[pg_test]
fn test_validate_json_schema_oneof_validation_errors() {
clear_json_schemas(); // Call clear directly
let schema_id = "oneof_schema";
let schema = json!({
"oneOf": [
{ // Option 1: Object with string prop
"type": "object",
"properties": {
"string_prop": { "type": "string", "maxLength": 5 }
},
"required": ["string_prop"]
},
{ // Option 2: Object with number prop
"type": "object",
"properties": {
"number_prop": { "type": "number", "minimum": 10 }
},
"required": ["number_prop"]
}
]
});
let _ = cache_json_schema(schema_id, jsonb(schema));
// --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) ---
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
// Expect 2 leaf errors. Check count only with the macro.
assert_failure_with_json!(result_invalid_string, 2);
// Explicitly check that both expected errors are present, ignoring order
let errors_string = result_invalid_string.0["errors"].as_array().expect("Expected error array for invalid string");
assert!(errors_string.iter().any(|e|
e["details"]["context"]["instance_path"] == "/string_prop" &&
e["code"] == "MAX_LENGTH_VIOLATED"
), "Missing maxLength error");
assert!(errors_string.iter().any(|e|
e["details"]["context"]["instance_path"] == "" &&
e["code"] == "REQUIRED_FIELD_MISSING"
), "Missing number_prop required error");
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
let invalid_number_instance = json!({ "number_prop": 5 });
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
// Expect 2 leaf errors. Check count only with the macro.
assert_failure_with_json!(result_invalid_number, 2);
// Explicitly check that both expected errors are present, ignoring order
let errors_number = result_invalid_number.0["errors"].as_array().expect("Expected error array for invalid number");
assert!(errors_number.iter().any(|e|
e["details"]["context"]["instance_path"] == "/number_prop" &&
e["code"] == "MINIMUM_VIOLATED"
), "Missing minimum error");
assert!(errors_number.iter().any(|e|
e["details"]["context"]["instance_path"] == "" &&
e["code"] == "REQUIRED_FIELD_MISSING"
), "Missing string_prop required error");
// --- Test case 3: Fails type check (not object) for both branches ---
// Input: boolean, expected object for both branches
let invalid_bool_instance = json!(true); // Not an object
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
assert_failure_with_json!(result_invalid_bool, 1);
// Explicitly check that the single remaining error is the type error for the root instance path
let errors_bool = result_invalid_bool.0["errors"].as_array().expect("Expected error array for invalid bool");
assert_eq!(errors_bool.len(), 1, "Expected exactly one error after deduplication");
assert_eq!(errors_bool[0]["code"], "TYPE_MISMATCH");
assert_eq!(errors_bool[0]["details"]["context"]["instance_path"], "");
// --- Test case 4: Fails missing required for both branches ---
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
let invalid_empty_obj = json!({});
let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj));
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
assert_failure_with_json!(result_empty_obj, 1);
// Explicitly check that the single remaining error is one of the expected missing properties errors
let errors_empty = result_empty_obj.0["errors"].as_array().expect("Expected error array for empty object");
assert_eq!(errors_empty.len(), 1, "Expected exactly one error after filtering empty object");
assert_eq!(errors_empty[0]["code"], "REQUIRED_FIELD_MISSING");
assert_eq!(errors_empty[0]["details"]["context"]["instance_path"], "");
// The human message should be generic
assert_eq!(errors_empty[0]["message"], "Required field is missing");
}
#[pg_test]
fn test_clear_json_schemas() {
let clear_result = clear_json_schemas();
assert_success_with_json!(clear_result);
let schema_id = "schema_to_clear";
let schema = json!({ "type": "string" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()));
assert_success_with_json!(cache_result);
let show_result1 = show_json_schemas();
let schemas1 = show_result1.0["response"].as_array().unwrap();
assert!(schemas1.contains(&json!(schema_id)));
let clear_result2 = clear_json_schemas();
assert_success_with_json!(clear_result2);
let show_result2 = show_json_schemas();
let schemas2 = show_result2.0["response"].as_array().unwrap();
assert!(schemas2.is_empty());
let instance = json!("test");
let validate_result = validate_json_schema(schema_id, jsonb(instance));
assert_failure_with_json!(validate_result, 1, "Schema 'schema_to_clear' not found", "Validation should fail after clearing schemas.");
}
#[pg_test]
fn test_show_json_schemas() {
let _ = clear_json_schemas();
let schema_id1 = "schema1";
let schema_id2 = "schema2";
let schema = json!({ "type": "boolean" });
let _ = cache_json_schema(schema_id1, jsonb(schema.clone()));
let _ = cache_json_schema(schema_id2, jsonb(schema.clone()));
let result = show_json_schemas();
let schemas = result.0["response"].as_array().unwrap();
assert_eq!(schemas.len(), 2);
assert!(schemas.contains(&json!(schema_id1)));
assert!(schemas.contains(&json!(schema_id2)));
}

View File

@ -1 +1 @@
1.0.12
1.0.21