Compare commits

...

41 Commits

Author SHA1 Message Date
03beada825 version: 1.0.25 2025-06-11 20:28:46 -04:00
efdd7528cc switched strict validation from additionalProperties to unevaluatedProperties to catch conditional properties automatically in verification 2025-06-11 20:28:39 -04:00
59395a33ac version: 1.0.24 2025-06-11 19:38:56 -04:00
92c0a6fc0b even more jspg improved error handling, missing some codes before 2025-06-11 19:38:46 -04:00
7f66a4a35a no-op 2025-06-10 16:01:58 -04:00
d37aadb0dd version: 1.0.23 2025-06-09 18:09:33 -04:00
d0ccc47d97 added strict validation option 2025-06-09 18:09:15 -04:00
2d19bf100e version: 1.0.22 2025-06-06 14:25:18 -04:00
fb333c6cbb slight improvements to error messaging 2025-06-06 14:25:13 -04:00
d8a9a7b76b version: 1.0.21 2025-06-06 14:05:24 -04:00
c9022aefb9 fixed env 2025-06-06 14:05:19 -04:00
ccf0465e45 fixed gitignore 2025-06-06 14:02:43 -04:00
dce50d9dc3 error handling improvements to jspg to match drop structure 2025-06-06 13:58:50 -04:00
8ec6a5b58a flow updates 2025-05-29 17:51:16 -04:00
6ef7e0c55e flow update 2025-04-25 13:34:06 -04:00
1cb5fb0ecf removed random .env 2025-04-25 12:22:07 -04:00
d66aae8ae2 flow update 2025-04-24 20:02:18 -04:00
3b18901bda version: 1.0.20 2025-04-21 17:11:30 -04:00
b8c0e08068 more filtering 2025-04-21 17:11:24 -04:00
c734983a59 version: 1.0.19 2025-04-21 16:15:08 -04:00
9b11f661bc fixed release bug 2025-04-21 16:15:02 -04:00
f3a733626e version: 1.0.18 2025-04-21 16:13:16 -04:00
2bcdb8adbb version: 1.0.17 2025-04-21 16:11:31 -04:00
3988308965 branch error filtering 2025-04-21 16:11:12 -04:00
b7f528d1f6 flow 2025-04-16 21:14:07 -04:00
2febb292dc flow update 2025-04-16 20:00:35 -04:00
d1831a28ec flow update 2025-04-16 19:34:09 -04:00
c5834ac544 flow updated 2025-04-16 18:07:41 -04:00
eb25f8489e version: 1.0.16 2025-04-16 14:43:07 -04:00
21937db8de improved compile schema error messages 2025-04-16 14:42:57 -04:00
28b689cac0 version: 1.0.15 2025-04-16 01:00:57 -04:00
cc04a1a8bb made errors consistent 2025-04-16 01:00:51 -04:00
3ceb8a0770 version: 1.0.14 2025-04-16 00:38:10 -04:00
499bf68b2a more error cleanup 2025-04-16 00:38:04 -04:00
6ca00f27e9 version: 1.0.13 2025-04-15 23:30:57 -04:00
520be66035 better error messaging 2025-04-15 23:30:47 -04:00
c3146ca433 flow update 2025-04-15 01:52:12 -04:00
b4d9628b05 version: 1.0.12 2025-04-15 00:25:39 -04:00
635d31d723 more validation fixes 2025-04-15 00:25:29 -04:00
08efcb92db version: 1.0.11 2025-04-14 21:53:39 -04:00
dad1216e1f more validation fixes 2025-04-14 21:53:30 -04:00
9 changed files with 1000 additions and 960 deletions

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
/target /target
/package /package
.env

542
Cargo.lock generated
View File

@ -26,7 +26,6 @@ dependencies = [
"cfg-if", "cfg-if",
"getrandom 0.2.15", "getrandom 0.2.15",
"once_cell", "once_cell",
"serde",
"version_check", "version_check",
"zerocopy 0.7.35", "zerocopy 0.7.35",
] ]
@ -113,7 +112,7 @@ dependencies = [
"miniz_oxide", "miniz_oxide",
"object", "object",
"rustc-demangle", "rustc-demangle",
"windows-targets 0.52.6", "windows-targets",
] ]
[[package]] [[package]]
@ -215,12 +214,6 @@ version = "3.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
[[package]]
name = "bytecount"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce"
[[package]] [[package]]
name = "byteorder" name = "byteorder"
version = "1.5.0" version = "1.5.0"
@ -425,15 +418,6 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "email_address"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "enum-map" name = "enum-map"
version = "2.7.3" version = "2.7.3"
@ -486,17 +470,6 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
[[package]]
name = "fancy-regex"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
dependencies = [
"bit-set",
"regex-automata",
"regex-syntax",
]
[[package]] [[package]]
name = "fastrand" name = "fastrand"
version = "2.3.0" version = "2.3.0"
@ -517,7 +490,6 @@ checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5"
dependencies = [ dependencies = [
"borrow-or-share", "borrow-or-share",
"ref-cast", "ref-cast",
"serde",
] ]
[[package]] [[package]]
@ -541,16 +513,6 @@ dependencies = [
"percent-encoding", "percent-encoding",
] ]
[[package]]
name = "fraction"
version = "0.15.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f158e3ff0a1b334408dc9fb811cd99b446986f4d8b741bb08f9df1604085ae7"
dependencies = [
"lazy_static",
"num",
]
[[package]] [[package]]
name = "funty" name = "funty"
version = "2.0.0" version = "2.0.0"
@ -573,12 +535,6 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]]
name = "futures-io"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
[[package]] [[package]]
name = "futures-macro" name = "futures-macro"
version = "0.3.31" version = "0.3.31"
@ -609,11 +565,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-io",
"futures-macro", "futures-macro",
"futures-sink", "futures-sink",
"futures-task", "futures-task",
"memchr",
"pin-project-lite", "pin-project-lite",
"pin-utils", "pin-utils",
"slab", "slab",
@ -730,85 +684,6 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "http"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]]
name = "http-body"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
dependencies = [
"bytes",
"http",
]
[[package]]
name = "http-body-util"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
dependencies = [
"bytes",
"futures-core",
"http",
"http-body",
"pin-project-lite",
]
[[package]]
name = "httparse"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]]
name = "hyper"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80"
dependencies = [
"bytes",
"futures-channel",
"futures-util",
"http",
"http-body",
"httparse",
"itoa",
"pin-project-lite",
"smallvec",
"tokio",
"want",
]
[[package]]
name = "hyper-util"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2"
dependencies = [
"bytes",
"futures-channel",
"futures-util",
"http",
"http-body",
"hyper",
"libc",
"pin-project-lite",
"socket2",
"tokio",
"tower-service",
"tracing",
]
[[package]] [[package]]
name = "icu_collections" name = "icu_collections"
version = "1.5.0" version = "1.5.0"
@ -964,12 +839,6 @@ dependencies = [
"hashbrown", "hashbrown",
] ]
[[package]]
name = "ipnet"
version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
[[package]] [[package]]
name = "is-terminal" name = "is-terminal"
version = "0.4.16" version = "0.4.16"
@ -1012,37 +881,11 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "jsonschema"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "161c33c3ec738cfea3288c5c53dfcdb32fd4fc2954de86ea06f71b5a1a40bfcd"
dependencies = [
"ahash",
"base64",
"bytecount",
"email_address",
"fancy-regex",
"fraction",
"idna",
"itoa",
"num-cmp",
"once_cell",
"percent-encoding",
"referencing",
"regex-syntax",
"reqwest",
"serde",
"serde_json",
"uuid-simd",
]
[[package]] [[package]]
name = "jspg" name = "jspg"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"boon", "boon",
"jsonschema",
"lazy_static", "lazy_static",
"pgrx", "pgrx",
"pgrx-tests", "pgrx-tests",
@ -1069,7 +912,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"windows-targets 0.52.6", "windows-targets",
] ]
[[package]] [[package]]
@ -1116,12 +959,6 @@ version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "mime"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]] [[package]]
name = "minimal-lexical" name = "minimal-lexical"
version = "0.2.1" version = "0.2.1"
@ -1167,76 +1004,6 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "num"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23"
dependencies = [
"num-bigint",
"num-complex",
"num-integer",
"num-iter",
"num-rational",
"num-traits",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
dependencies = [
"num-integer",
"num-traits",
]
[[package]]
name = "num-cmp"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63335b2e2c34fae2fb0aa2cecfd9f0832a1e24b3b32ecec612c3426d46dc8aaa"
[[package]]
name = "num-complex"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
dependencies = [
"num-traits",
]
[[package]]
name = "num-integer"
version = "0.1.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
dependencies = [
"num-traits",
]
[[package]]
name = "num-iter"
version = "0.1.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
dependencies = [
"num-bigint",
"num-integer",
"num-traits",
]
[[package]] [[package]]
name = "num-traits" name = "num-traits"
version = "0.2.19" version = "0.2.19"
@ -1270,12 +1037,6 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "outref"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e"
[[package]] [[package]]
name = "owo-colors" name = "owo-colors"
version = "4.2.0" version = "4.2.0"
@ -1306,7 +1067,7 @@ dependencies = [
"libc", "libc",
"redox_syscall", "redox_syscall",
"smallvec", "smallvec",
"windows-targets 0.52.6", "windows-targets",
] ]
[[package]] [[package]]
@ -1710,20 +1471,6 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "referencing"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40a64b3a635fad9000648b4d8a59c8710c523ab61a23d392a7d91d47683f5adc"
dependencies = [
"ahash",
"fluent-uri",
"once_cell",
"parking_lot",
"percent-encoding",
"serde_json",
]
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.11.1" version = "1.11.1"
@ -1753,43 +1500,6 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "reqwest"
version = "0.12.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb"
dependencies = [
"base64",
"bytes",
"futures-channel",
"futures-core",
"futures-util",
"http",
"http-body",
"http-body-util",
"hyper",
"hyper-util",
"ipnet",
"js-sys",
"log",
"mime",
"once_cell",
"percent-encoding",
"pin-project-lite",
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper",
"tokio",
"tower",
"tower-service",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"windows-registry",
]
[[package]] [[package]]
name = "rustc-demangle" name = "rustc-demangle"
version = "0.1.24" version = "0.1.24"
@ -1824,12 +1534,6 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "rustversion"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2"
[[package]] [[package]]
name = "rusty-fork" name = "rusty-fork"
version = "0.3.0" version = "0.3.0"
@ -1929,18 +1633,6 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
dependencies = [
"form_urlencoded",
"itoa",
"ryu",
"serde",
]
[[package]] [[package]]
name = "sha2" name = "sha2"
version = "0.10.8" version = "0.10.8"
@ -2048,15 +1740,6 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "sync_wrapper"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
dependencies = [
"futures-core",
]
[[package]] [[package]]
name = "synstructure" name = "synstructure"
version = "0.13.1" version = "0.13.1"
@ -2253,58 +1936,6 @@ dependencies = [
"winnow", "winnow",
] ]
[[package]]
name = "tower"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
dependencies = [
"futures-core",
"futures-util",
"pin-project-lite",
"sync_wrapper",
"tokio",
"tower-layer",
"tower-service",
]
[[package]]
name = "tower-layer"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
[[package]]
name = "tower-service"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
[[package]]
name = "tracing"
version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
dependencies = [
"pin-project-lite",
"tracing-core",
]
[[package]]
name = "tracing-core"
version = "0.1.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
dependencies = [
"once_cell",
]
[[package]]
name = "try-lock"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]] [[package]]
name = "typenum" name = "typenum"
version = "1.18.0" version = "1.18.0"
@ -2394,29 +2025,12 @@ dependencies = [
"getrandom 0.3.2", "getrandom 0.3.2",
] ]
[[package]]
name = "uuid-simd"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b082222b4f6619906941c17eb2297fff4c2fb96cb60164170522942a200bd8"
dependencies = [
"outref",
"uuid",
"vsimd",
]
[[package]] [[package]]
name = "version_check" name = "version_check"
version = "0.9.5" version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "vsimd"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64"
[[package]] [[package]]
name = "wait-timeout" name = "wait-timeout"
version = "0.2.1" version = "0.2.1"
@ -2436,15 +2050,6 @@ dependencies = [
"winapi-util", "winapi-util",
] ]
[[package]]
name = "want"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
dependencies = [
"try-lock",
]
[[package]] [[package]]
name = "wasi" name = "wasi"
version = "0.11.0+wasi-snapshot-preview1" version = "0.11.0+wasi-snapshot-preview1"
@ -2474,7 +2079,6 @@ checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"once_cell", "once_cell",
"rustversion",
"wasm-bindgen-macro", "wasm-bindgen-macro",
] ]
@ -2492,19 +2096,6 @@ dependencies = [
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61"
dependencies = [
"cfg-if",
"js-sys",
"once_cell",
"wasm-bindgen",
"web-sys",
]
[[package]] [[package]]
name = "wasm-bindgen-macro" name = "wasm-bindgen-macro"
version = "0.2.100" version = "0.2.100"
@ -2596,7 +2187,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143"
dependencies = [ dependencies = [
"windows-core", "windows-core",
"windows-targets 0.52.6", "windows-targets",
] ]
[[package]] [[package]]
@ -2607,8 +2198,8 @@ checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d"
dependencies = [ dependencies = [
"windows-implement", "windows-implement",
"windows-interface", "windows-interface",
"windows-result 0.1.2", "windows-result",
"windows-targets 0.52.6", "windows-targets",
] ]
[[package]] [[package]]
@ -2633,48 +2224,13 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "windows-link"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38"
[[package]]
name = "windows-registry"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3"
dependencies = [
"windows-result 0.3.2",
"windows-strings",
"windows-targets 0.53.0",
]
[[package]] [[package]]
name = "windows-result" name = "windows-result"
version = "0.1.2" version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8"
dependencies = [ dependencies = [
"windows-targets 0.52.6", "windows-targets",
]
[[package]]
name = "windows-result"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-strings"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319"
dependencies = [
"windows-link",
] ]
[[package]] [[package]]
@ -2683,7 +2239,7 @@ version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [ dependencies = [
"windows-targets 0.52.6", "windows-targets",
] ]
[[package]] [[package]]
@ -2692,7 +2248,7 @@ version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [ dependencies = [
"windows-targets 0.52.6", "windows-targets",
] ]
[[package]] [[package]]
@ -2701,30 +2257,14 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [ dependencies = [
"windows_aarch64_gnullvm 0.52.6", "windows_aarch64_gnullvm",
"windows_aarch64_msvc 0.52.6", "windows_aarch64_msvc",
"windows_i686_gnu 0.52.6", "windows_i686_gnu",
"windows_i686_gnullvm 0.52.6", "windows_i686_gnullvm",
"windows_i686_msvc 0.52.6", "windows_i686_msvc",
"windows_x86_64_gnu 0.52.6", "windows_x86_64_gnu",
"windows_x86_64_gnullvm 0.52.6", "windows_x86_64_gnullvm",
"windows_x86_64_msvc 0.52.6", "windows_x86_64_msvc",
]
[[package]]
name = "windows-targets"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b"
dependencies = [
"windows_aarch64_gnullvm 0.53.0",
"windows_aarch64_msvc 0.53.0",
"windows_i686_gnu 0.53.0",
"windows_i686_gnullvm 0.53.0",
"windows_i686_msvc 0.53.0",
"windows_x86_64_gnu 0.53.0",
"windows_x86_64_gnullvm 0.53.0",
"windows_x86_64_msvc 0.53.0",
] ]
[[package]] [[package]]
@ -2733,96 +2273,48 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
[[package]] [[package]]
name = "windows_aarch64_msvc" name = "windows_aarch64_msvc"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_aarch64_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
[[package]] [[package]]
name = "windows_i686_gnu" name = "windows_i686_gnu"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnu"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
[[package]] [[package]]
name = "windows_i686_gnullvm" name = "windows_i686_gnullvm"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
[[package]] [[package]]
name = "windows_i686_msvc" name = "windows_i686_msvc"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_i686_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
[[package]] [[package]]
name = "windows_x86_64_gnu" name = "windows_x86_64_gnu"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnu"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
[[package]] [[package]]
name = "windows_x86_64_gnullvm" name = "windows_x86_64_gnullvm"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
[[package]] [[package]]
name = "windows_x86_64_msvc" name = "windows_x86_64_msvc"
version = "0.52.6" version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "windows_x86_64_msvc"
version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
[[package]] [[package]]
name = "winnow" name = "winnow"
version = "0.7.6" version = "0.7.6"

View File

@ -7,7 +7,6 @@ edition = "2021"
pgrx = "0.14.0" pgrx = "0.14.0"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
jsonschema = "0.29.1"
lazy_static = "1.5.0" lazy_static = "1.5.0"
boon = "0.6.1" boon = "0.6.1"

117
flow
View File

@ -3,153 +3,130 @@
# Flows # Flows
source ./flows/base source ./flows/base
source ./flows/git source ./flows/git
source ./flows/kube
source ./flows/packaging source ./flows/packaging
source ./flows/rust source ./flows/rust
# Vars # Vars
POSTGRES_VERSION="17" POSTGRES_VERSION="17"
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config" POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
DEPENDENCIES=(cargo git icu4c pkg-config "postgresql@${POSTGRES_VERSION}") DEPENDENCIES+=(icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0) CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
GITEA_ORGANIZATION="cellular" GITEA_ORGANIZATION="cellular"
GITEA_REPOSITORY="jspg" GITEA_REPOSITORY="jspg"
env() {
# Check if GITEA_TOKEN is set
if [ -z "$GITEA_TOKEN" ]; then
# If not set, try to get it from kubectl
GITEA_TOKEN=$(kubectl get secret -n cellular gitea-git -o jsonpath='{.data.token}' | base64 --decode)
if [ -z "$GITEA_TOKEN" ]; then
echo -e "❌ ${RED}GITEA_TOKEN is not set and couldn't be retrieved from kubectl${RESET}" >&2
exit 1
fi
export GITEA_TOKEN
fi
echo -e "💰 ${GREEN}Environment variables set${RESET}"
}
pgrx-prepare() { pgrx-prepare() {
echo -e "${BLUE}Initializing pgrx...${RESET}" info "Initializing pgrx..."
# Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which' # Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which'
local POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config" local POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
if [ ! -x "$POSTGRES_CONFIG_PATH" ]; then if [ ! -x "$POSTGRES_CONFIG_PATH" ]; then
echo -e "${RED}Error: pg_config not found or not executable at $POSTGRES_CONFIG_PATH.${RESET}" error "pg_config not found or not executable at $POSTGRES_CONFIG_PATH."
echo -e "${YELLOW}Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew.${RESET}" warning "Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew."
exit 1 return 2
fi fi
if cargo pgrx init --pg"$POSTGRES_VERSION"="$POSTGRES_CONFIG_PATH"; then if cargo pgrx init --pg"$POSTGRES_VERSION"="$POSTGRES_CONFIG_PATH"; then
echo -e "${GREEN}pgrx initialized successfully.${RESET}" success "pgrx initialized successfully."
else else
echo -e "${RED}Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid.${RESET}" error "Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid."
exit 1 return 2
fi fi
} }
build() { build() {
local version local version
version=$(get-version) || return 1 version=$(get-version) || return $?
local package_dir="./package" local package_dir="./package"
local tarball_name="${GITEA_REPOSITORY}.tar.gz" local tarball_name="${GITEA_REPOSITORY}.tar.gz"
local tarball_path="${package_dir}/${tarball_name}" local tarball_path="${package_dir}/${tarball_name}"
echo -e "📦 Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..." info "Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
# Clean previous package dir # Clean previous package dir
rm -rf "${package_dir}" rm -rf "${package_dir}"
mkdir -p "${package_dir}" mkdir -p "${package_dir}"
# Create the source tarball excluding specified patterns # Create the source tarball excluding specified patterns
echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}" info "Creating tarball: ${tarball_path}"
if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}" success "Successfully created source tarball: ${tarball_path}"
else else
echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2 error "Failed to create source tarball."
return 1 return 2
fi fi
} }
install() { install() {
local version local version
version=$(get-version) || return 1 version=$(get-version) || return $? # Propagate error
echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}" info "Building and installing PGRX extension v$version into local PostgreSQL..."
# Run the pgrx install command # Run the pgrx install command
# It implicitly uses --release unless --debug is passed
# It finds pg_config or you can add flags like --pg-config if needed
if ! cargo pgrx install; then if ! cargo pgrx install; then
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2 error "cargo pgrx install command failed."
return 1 return 2
fi fi
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}" success "PGRX extension v$version successfully built and installed."
# Post-install modification to allow non-superuser usage # Post-install modification to allow non-superuser usage
# Get the installation path dynamically using pg_config
local pg_sharedir local pg_sharedir
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir) pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
if [ -z "$pg_sharedir" ]; then local pg_config_status=$?
echo -e "❌ ${RED}Failed to determine PostgreSQL shared directory using pg_config.${RESET}" >&2 if [ $pg_config_status -ne 0 ] || [ -z "$pg_sharedir" ]; then
return 1 error "Failed to determine PostgreSQL shared directory using pg_config."
return 2
fi fi
local installed_control_path="${pg_sharedir}/extension/jspg.control" local installed_control_path="${pg_sharedir}/extension/jspg.control"
# Modify the control file # Modify the control file
if [ ! -f "$installed_control_path" ]; then if [ ! -f "$installed_control_path" ]; then
echo -e "❌ ${RED}Installed control file not found: '$installed_control_path'${RESET}" >&2 error "Installed control file not found: '$installed_control_path'"
return 1 return 2
fi fi
echo -e "🔧 ${CYAN}Modifying control file for non-superuser access: ${installed_control_path}${RESET}" info "Modifying control file for non-superuser access: ${installed_control_path}"
# Use sed -i '' for macOS compatibility # Use sed -i '' for macOS compatibility
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \ if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
echo 'trusted = true' >> "$installed_control_path"; then echo 'trusted = true' >> "$installed_control_path"; then
echo -e "✨ ${GREEN}Control file modified successfully.${RESET}" success "Control file modified successfully."
else else
echo -e "❌ ${RED}Failed to modify control file: ${installed_control_path}${RESET}" >&2 error "Failed to modify control file: ${installed_control_path}"
return 1 return 2
fi fi
} }
test() { test() {
echo -e "🧪 ${CYAN}Running jspg tests...${RESET}" info "Running jspg tests..."
cargo pgrx test "pg${POSTGRES_VERSION}" "$@" cargo pgrx test "pg${POSTGRES_VERSION}" "$@" || return $?
} }
clean() { clean() {
echo -e "🧹 ${CYAN}Cleaning build artifacts...${RESET}" info "Cleaning build artifacts..."
cargo clean # Use standard cargo clean cargo clean || return $?
} }
jspg-usage() { jspg-usage() {
echo -e " ${CYAN}JSPG Commands:${RESET}" printf "prepare\tCheck OS, Cargo, and PGRX dependencies.\n"
echo -e " prepare Check OS, Cargo, and PGRX dependencies." printf "install\tBuild and install the extension locally (after prepare).\n"
echo -e " install [opts] Run prepare, then build and install the extension locally." printf "reinstall\tClean, build, and install the extension locally (after prepare).\n"
echo -e " reinstall [opts] Run prepare, clean, then build and install the extension locally." printf "test\t\tRun pgrx integration tests.\n"
echo -e " test [opts] Run pgrx integration tests." printf "clean\t\tRemove pgrx build artifacts.\n"
echo -e " clean Remove pgrx build artifacts."
echo -e " build Build release artifacts into ./package/ (called by release)."
echo -e " tag Tag the current version (called by release)."
echo -e " package Upload artifacts from ./package/ (called by release)."
echo -e " release Perform a full release (increments patch, builds, tags, pushes, packages)."
} }
jspg-flow() { jspg-flow() {
case "$1" in case "$1" in
env) env; return 0;; prepare) prepare && cargo-prepare && pgrx-prepare; return $?;;
prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;; build) build; return $?;;
build) build; return 0;; install) install; return $?;;
install) install; return 0;; reinstall) clean && install; return $?;;
reinstall) clean; install; return 0;; test) test "${@:2}"; return $?;;
test) test; return 0;; clean) clean; return $?;;
package) env; package; return 0;;
release) env; release; return 0;;
clean) clean; return 0;;
*) return 1 ;; *) return 1 ;;
esac esac
} }
register-flow "jspg-flow" "jspg-usage" register-flow "jspg-usage" "jspg-flow"
dispatch "$@" dispatch "$@"

2
flows

Submodule flows updated: db55335254...e154758056

1
rustfmt.toml Normal file
View File

@ -0,0 +1 @@
tab_spaces = 2

View File

@ -2,97 +2,388 @@ use pgrx::*;
pg_module_magic!(); pg_module_magic!();
use serde_json::{json, Value}; use boon::{CompileError, Compiler, ErrorKind, SchemaIndex, Schemas, ValidationError};
use std::{collections::HashMap, sync::RwLock};
use boon::{Compiler, Schemas, ValidationError, SchemaIndex};
use lazy_static::lazy_static; use lazy_static::lazy_static;
use serde_json::{json, Value};
use std::collections::hash_map::Entry;
use std::{collections::HashMap, sync::RwLock};
struct BoonCache { struct BoonCache {
schemas: Schemas, schemas: Schemas,
id_to_index: HashMap<String, SchemaIndex>, id_to_index: HashMap<String, SchemaIndex>,
} }
// Structure to hold error information without lifetimes
#[derive(Debug)]
struct Error {
path: String,
code: String,
message: String,
cause: String,
}
lazy_static! { lazy_static! {
static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache { static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache {
schemas: Schemas::new(), schemas: Schemas::new(),
id_to_index: HashMap::new() id_to_index: HashMap::new(),
}); });
} }
#[pg_extern(strict)] #[pg_extern(strict)]
fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB { fn cache_json_schema(schema_id: &str, schema: JsonB, strict: bool) -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap(); let mut cache = SCHEMA_CACHE.write().unwrap();
let schema_value: Value = schema.0; let mut schema_value: Value = schema.0;
let schema_path = format!("urn:{}", schema_id);
// Apply strict validation to all objects in the schema if requested
if strict {
apply_strict_validation(&mut schema_value);
}
// Create the boon compiler and enable format assertions
let mut compiler = Compiler::new(); let mut compiler = Compiler::new();
compiler.enable_format_assertions(); compiler.enable_format_assertions();
let schema_url = format!("urn:jspg:{}", schema_id); // Use schema_path when adding the resource
if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
if let Err(e) = compiler.add_resource(&schema_url, schema_value) {
return JsonB(json!({ return JsonB(json!({
"success": false, "errors": [{
"error": format!("Failed to add schema resource '{}': {}", schema_id, e) "code": "SCHEMA_RESOURCE_ADD_FAILED",
"message": format!("Failed to add schema resource '{}'", schema_id),
"details": {
"path": schema_path,
"cause": format!("{}", e)
}
}]
})); }));
} }
match compiler.compile(&schema_url, &mut cache.schemas) { // Use schema_path when compiling
match compiler.compile(&schema_path, &mut cache.schemas) {
Ok(sch_index) => { Ok(sch_index) => {
// Store the index using the original schema_id as the key
cache.id_to_index.insert(schema_id.to_string(), sch_index); cache.id_to_index.insert(schema_id.to_string(), sch_index);
JsonB(json!({ JsonB(json!({ "response": "success" }))
"success": true,
"schema_id": schema_id,
"message": "Schema cached successfully."
}))
} }
Err(e) => JsonB(json!({ Err(e) => {
"success": false, let errors = match &e {
"schema_id": schema_id, CompileError::ValidationError { url: _url, src } => {
"error": format!("Schema compilation failed: {}", e) // Collect leaf errors from the meta-schema validation failure
})), let mut error_list = Vec::new();
collect_errors(src, &mut error_list);
// Filter and format errors properly - no instance for schema compilation
format_errors(error_list, &schema_value)
}
_ => {
// Other compilation errors
vec![json!({
"code": "SCHEMA_COMPILATION_FAILED",
"message": format!("Schema '{}' compilation failed", schema_id),
"details": {
"path": schema_path,
"cause": format!("{:?}", e)
}
})]
}
};
JsonB(json!({ "errors": errors }))
}
}
}
// Helper function to recursively apply strict validation to all objects in a schema
fn apply_strict_validation(schema: &mut Value) {
match schema {
Value::Object(map) => {
// If this is an object type schema, add unevaluatedProperties: false
if let Some(Value::String(t)) = map.get("type") {
if t == "object" && !map.contains_key("unevaluatedProperties") && !map.contains_key("additionalProperties") {
map.insert("unevaluatedProperties".to_string(), Value::Bool(false));
}
}
// Recurse into all properties
for (_, value) in map.iter_mut() {
apply_strict_validation(value);
}
}
Value::Array(arr) => {
// Recurse into array items
for item in arr.iter_mut() {
apply_strict_validation(item);
}
}
_ => {}
} }
} }
#[pg_extern(strict, parallel_safe)] #[pg_extern(strict, parallel_safe)]
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB { fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap(); let cache = SCHEMA_CACHE.read().unwrap();
// Lookup uses the original schema_id
match cache.id_to_index.get(schema_id) { match cache.id_to_index.get(schema_id) {
None => JsonB(json!({ None => JsonB(json!({
"success": false,
"errors": [{ "errors": [{
"kind": "SchemaNotFound", "code": "SCHEMA_NOT_FOUND",
"message": format!("Schema with id '{}' not found in cache", schema_id) "message": format!("Schema '{}' not found in cache", schema_id),
"details": {
"cause": "Schema must be cached before validation"
}
}] }]
})), })),
Some(sch_index) => { Some(sch_index) => {
let instance_value: Value = instance.0; let instance_value: Value = instance.0;
match cache.schemas.validate(&instance_value, *sch_index) { match cache.schemas.validate(&instance_value, *sch_index) {
Ok(_) => JsonB(json!({ "success": true })), Ok(_) => JsonB(json!({ "response": "success" })),
Err(validation_error) => { Err(validation_error) => {
let error_details = format_boon_errors(&validation_error); let mut error_list = Vec::new();
JsonB(json!({ collect_errors(&validation_error, &mut error_list);
"success": false, let errors = format_errors(error_list, &instance_value);
"errors": [error_details] JsonB(json!({ "errors": errors }))
}))
} }
} }
} }
} }
} }
fn format_boon_errors(error: &ValidationError) -> Value { // Recursively collects validation errors
json!({ fn collect_errors(error: &ValidationError, errors_list: &mut Vec<Error>) {
"instance_path": error.instance_location.to_string(), // Check if this is a structural error that we should skip
"schema_path": error.schema_url.to_string(), let is_structural = matches!(
"kind": format!("{:?}", error.kind), &error.kind,
"message": format!("{}", error), ErrorKind::Group | ErrorKind::AllOf | ErrorKind::AnyOf | ErrorKind::Not | ErrorKind::OneOf(_)
"causes": error );
.causes
.iter() if error.causes.is_empty() && !is_structural {
.map(format_boon_errors) // This is a leaf error that's not structural
.collect::<Vec<_>>() let original_message = format!("{}", error.kind);
}) let (error_code, human_message) = convert_error_kind(&error.kind);
errors_list.push(Error {
path: error.instance_location.to_string(),
code: error_code,
message: human_message,
cause: original_message,
});
} else {
// Recurse into causes
for cause in &error.causes {
collect_errors(cause, errors_list);
}
}
}
// Convert ErrorKind to error code and human message
fn convert_error_kind(kind: &ErrorKind) -> (String, String) {
match kind {
ErrorKind::Type { .. } => (
"TYPE_MISMATCH".to_string(),
"Field type does not match the expected type".to_string(),
),
ErrorKind::Required { .. } => (
"REQUIRED_FIELD_MISSING".to_string(),
"Required field is missing".to_string(),
),
ErrorKind::DependentRequired { .. } => (
"DEPENDENT_REQUIRED_MISSING".to_string(),
"Dependent required fields are missing".to_string(),
),
ErrorKind::Dependency { .. } => (
"DEPENDENCY_FAILED".to_string(),
"Dependency requirement not met".to_string(),
),
ErrorKind::Enum { .. } => (
"ENUM_VIOLATED".to_string(),
"Value is not one of the allowed options".to_string(),
),
ErrorKind::Const { .. } => (
"CONST_VIOLATED".to_string(),
"Value does not match the required constant".to_string(),
),
ErrorKind::MinLength { .. } => (
"MIN_LENGTH_VIOLATED".to_string(),
"Field length is below the minimum required".to_string(),
),
ErrorKind::MaxLength { .. } => (
"MAX_LENGTH_VIOLATED".to_string(),
"Field length exceeds the maximum allowed".to_string(),
),
ErrorKind::Pattern { .. } => (
"PATTERN_VIOLATED".to_string(),
"Value does not match the required pattern".to_string(),
),
ErrorKind::Minimum { .. } => (
"MINIMUM_VIOLATED".to_string(),
"Value is below the minimum allowed".to_string(),
),
ErrorKind::Maximum { .. } => (
"MAXIMUM_VIOLATED".to_string(),
"Value exceeds the maximum allowed".to_string(),
),
ErrorKind::ExclusiveMinimum { .. } => (
"EXCLUSIVE_MINIMUM_VIOLATED".to_string(),
"Value must be greater than the minimum".to_string(),
),
ErrorKind::ExclusiveMaximum { .. } => (
"EXCLUSIVE_MAXIMUM_VIOLATED".to_string(),
"Value must be less than the maximum".to_string(),
),
ErrorKind::MultipleOf { .. } => (
"MULTIPLE_OF_VIOLATED".to_string(),
"Value is not a multiple of the required factor".to_string(),
),
ErrorKind::MinItems { .. } => (
"MIN_ITEMS_VIOLATED".to_string(),
"Array has fewer items than required".to_string(),
),
ErrorKind::MaxItems { .. } => (
"MAX_ITEMS_VIOLATED".to_string(),
"Array has more items than allowed".to_string(),
),
ErrorKind::UniqueItems { .. } => (
"UNIQUE_ITEMS_VIOLATED".to_string(),
"Array contains duplicate items".to_string(),
),
ErrorKind::MinProperties { .. } => (
"MIN_PROPERTIES_VIOLATED".to_string(),
"Object has fewer properties than required".to_string(),
),
ErrorKind::MaxProperties { .. } => (
"MAX_PROPERTIES_VIOLATED".to_string(),
"Object has more properties than allowed".to_string(),
),
ErrorKind::AdditionalProperties { .. } => (
"ADDITIONAL_PROPERTIES_NOT_ALLOWED".to_string(),
"Object contains properties that are not allowed".to_string(),
),
ErrorKind::AdditionalItems { .. } => (
"ADDITIONAL_ITEMS_NOT_ALLOWED".to_string(),
"Array contains additional items that are not allowed".to_string(),
),
ErrorKind::Format { want, .. } => (
"FORMAT_INVALID".to_string(),
format!("Invalid {} format", want),
),
ErrorKind::PropertyName { .. } => (
"INVALID_PROPERTY_NAME".to_string(),
"Property name is invalid".to_string(),
),
ErrorKind::Contains => (
"CONTAINS_FAILED".to_string(),
"No items match the required schema".to_string(),
),
ErrorKind::MinContains { .. } => (
"MIN_CONTAINS_VIOLATED".to_string(),
"Too few items match the required schema".to_string(),
),
ErrorKind::MaxContains { .. } => (
"MAX_CONTAINS_VIOLATED".to_string(),
"Too many items match the required schema".to_string(),
),
ErrorKind::ContentEncoding { .. } => (
"CONTENT_ENCODING_INVALID".to_string(),
"Content encoding is invalid".to_string(),
),
ErrorKind::ContentMediaType { .. } => (
"CONTENT_MEDIA_TYPE_INVALID".to_string(),
"Content media type is invalid".to_string(),
),
ErrorKind::FalseSchema => (
"FALSE_SCHEMA".to_string(),
"Schema validation always fails".to_string(),
),
ErrorKind::Not => (
"NOT_VIOLATED".to_string(),
"Value matched a schema it should not match".to_string(),
),
ErrorKind::RefCycle { .. } => (
"REFERENCE_CYCLE".to_string(),
"Schema contains a reference cycle".to_string(),
),
ErrorKind::Reference { .. } => (
"REFERENCE_FAILED".to_string(),
"Reference validation failed".to_string(),
),
ErrorKind::Schema { .. } => (
"SCHEMA_FAILED".to_string(),
"Schema validation failed".to_string(),
),
ErrorKind::ContentSchema => (
"CONTENT_SCHEMA_FAILED".to_string(),
"Content schema validation failed".to_string(),
),
// These shouldn't appear as leaf errors due to is_structural check
ErrorKind::Group => (
"VALIDATION_FAILED".to_string(),
"Validation failed".to_string(),
),
ErrorKind::AllOf => (
"ALL_OF_VIOLATED".to_string(),
"Value does not match all required schemas".to_string(),
),
ErrorKind::AnyOf => (
"ANY_OF_VIOLATED".to_string(),
"Value does not match any of the allowed schemas".to_string(),
),
ErrorKind::OneOf(_) => (
"ONE_OF_VIOLATED".to_string(),
"Value must match exactly one schema".to_string(),
),
}
}
// Formats errors according to DropError structure
fn format_errors(errors: Vec<Error>, instance: &Value) -> Vec<Value> {
// Deduplicate by instance_path and format as DropError
let mut unique_errors: HashMap<String, Value> = HashMap::new();
for error in errors {
if let Entry::Vacant(entry) = unique_errors.entry(error.path.clone()) {
// Extract the failing value from the instance
let failing_value = extract_value_at_path(instance, &error.path);
entry.insert(json!({
"code": error.code,
"message": error.message,
"details": {
"path": error.path,
"context": failing_value,
"cause": error.cause
}
}));
}
}
unique_errors.into_values().collect()
}
// Helper function to extract value at a JSON pointer path
fn extract_value_at_path(instance: &Value, path: &str) -> Value {
let parts: Vec<&str> = path.split('/').filter(|s| !s.is_empty()).collect();
let mut current = instance;
for part in parts {
match current {
Value::Object(map) => {
if let Some(value) = map.get(part) {
current = value;
} else {
return Value::Null;
}
}
Value::Array(arr) => {
if let Ok(index) = part.parse::<usize>() {
if let Some(value) = arr.get(index) {
current = value;
} else {
return Value::Null;
}
} else {
return Value::Null;
}
}
_ => return Value::Null,
}
}
current.clone()
} }
#[pg_extern(strict, parallel_safe)] #[pg_extern(strict, parallel_safe)]
@ -106,297 +397,35 @@ fn clear_json_schemas() -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap(); let mut cache = SCHEMA_CACHE.write().unwrap();
*cache = BoonCache { *cache = BoonCache {
schemas: Schemas::new(), schemas: Schemas::new(),
id_to_index: HashMap::new() id_to_index: HashMap::new(),
}; };
JsonB(json!({ JsonB(json!({ "response": "success" }))
"success": true,
"message": "Schema cache cleared."
}))
} }
#[pg_extern(strict, parallel_safe)] #[pg_extern(strict, parallel_safe)]
fn show_json_schemas() -> JsonB { fn show_json_schemas() -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap(); let cache = SCHEMA_CACHE.read().unwrap();
let ids: Vec<&String> = cache.id_to_index.keys().collect(); let ids: Vec<String> = cache.id_to_index.keys().cloned().collect();
JsonB(json!({ JsonB(json!({ "response": ids }))
"cached_schema_ids": ids
}))
}
#[pg_schema]
#[cfg(any(test, feature = "pg_test"))]
mod tests {
use pgrx::*;
use pgrx::pg_test;
use super::*;
use serde_json::json;
fn jsonb(val: Value) -> JsonB {
JsonB(val)
}
fn setup_test() {
clear_json_schemas();
}
#[pg_test]
fn test_cache_and_validate_json_schema() {
setup_test();
let schema_id = "my_schema";
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
});
let valid_instance = json!({ "name": "Alice", "age": 30 });
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
let invalid_instance_missing = json!({ "name": "Charlie" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()));
assert!(cache_result.0["success"].as_bool().unwrap());
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
assert!(valid_result.0["success"].as_bool().unwrap());
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
assert!(!invalid_result_type.0["success"].as_bool().unwrap());
// --- Assertions for invalid_result_type ---
// Get top-level errors
let top_level_errors = invalid_result_type.0["errors"].as_array().expect("Top-level 'errors' should be an array");
assert_eq!(top_level_errors.len(), 1, "Should have exactly one top-level error for invalid type");
// Get the first (and only) top-level error
let top_level_error = top_level_errors.get(0).expect("Should get the first top-level error");
// Check top-level error kind
assert!(top_level_error.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Schema { url:")),
"Incorrect kind for top-level error. Expected 'Schema {{ url:'. Error: {:?}. All errors: {:?}", top_level_error, top_level_errors);
// Get the 'causes' array from the top-level error
let causes_age = top_level_error.get("causes").and_then(Value::as_array).expect("Top-level error 'causes' should be an array");
assert_eq!(causes_age.len(), 1, "Should have one cause for the age error");
// Get the actual age error from the 'causes' array
let age_error = causes_age.get(0).expect("Should have an error object in 'causes'");
assert_eq!(age_error.get("instance_path").and_then(Value::as_str), Some("/age"),
"Incorrect instance_path for age error. Error: {:?}. All errors: {:?}", age_error, top_level_errors);
assert!(age_error.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Minimum { got:")),
"Incorrect kind prefix for age error. Expected 'Minimum {{ got:'. Error: {:?}. All errors: {:?}", age_error, top_level_errors);
let expected_prefix = "at '/age': must be >=0";
assert!(age_error.get("message")
.and_then(Value::as_str)
.map_or(false, |m| m.starts_with(expected_prefix)),
"Incorrect message prefix for age error. Expected prefix '{}'. Error: {:?}. All errors: {:?}",
expected_prefix, age_error, top_level_errors);
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
assert!(!invalid_result_missing.0["success"].as_bool().unwrap(), "Validation should fail for missing required field");
// --- Assertions for invalid_result_missing ---
// Get top-level errors
let top_level_errors_missing = invalid_result_missing.0["errors"].as_array().expect("Errors should be an array for missing field");
assert_eq!(top_level_errors_missing.len(), 1, "Should have one top-level error for missing field");
// Get the first (and only) top-level error
let top_error_missing = top_level_errors_missing.get(0).expect("Should get the first top-level missing field error");
// Check top-level error kind
assert!(top_error_missing.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Schema { url:")),
"Incorrect kind for missing field top-level error. Error: {:?}. All errors: {:?}", top_error_missing, top_level_errors_missing);
// Get the 'causes' array from the top-level error
let causes_missing = top_error_missing.get("causes").and_then(Value::as_array).expect("Causes should be an array for missing field error");
assert_eq!(causes_missing.len(), 1, "Should have one cause for missing field");
// Get the actual missing field error from the 'causes' array
let missing_error = causes_missing.get(0).expect("Should have missing field error object in 'causes'");
// Assertions on the specific missing field error
assert_eq!(missing_error.get("instance_path").and_then(Value::as_str), Some(""),
"Incorrect instance_path for missing age error: {:?}", missing_error);
assert!(missing_error.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Required { want: [\"age\"]")),
"Incorrect kind for missing age error. Expected prefix 'Required {{ want: [\"age\"] }}'. Error: {:?}", missing_error);
}
#[pg_test]
fn test_validate_json_schema_not_cached() {
setup_test();
let instance = json!({ "foo": "bar" });
let result = validate_json_schema("non_existent_schema", jsonb(instance));
assert!(!result.0["success"].as_bool().unwrap());
let errors = result.0["errors"].as_array().unwrap();
assert_eq!(errors.len(), 1);
assert_eq!(errors[0]["kind"], json!("SchemaNotFound"));
assert!(errors[0]["message"].as_str().unwrap().contains("non_existent_schema"));
}
#[pg_test]
fn test_cache_invalid_json_schema() {
setup_test();
let schema_id = "invalid_schema";
let invalid_schema_json = "{\"type\": \"string\" \"maxLength\": 5}";
let invalid_schema_value: Result<Value, _> = serde_json::from_str(invalid_schema_json);
assert!(invalid_schema_value.is_err(), "Test setup assumes invalid JSON string");
let schema_representing_invalid = json!({
"type": 123
});
let result = cache_json_schema(schema_id, jsonb(schema_representing_invalid.clone()));
assert!(!result.0["success"].as_bool().unwrap());
assert!(result.0["error"].as_str().unwrap().contains("Schema compilation failed"));
}
#[pg_test]
fn test_validate_json_schema_detailed_validation_errors() {
setup_test();
let schema_id = "detailed_schema";
let schema = json!({
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string", "maxLength": 10 }
},
"required": ["street", "city"]
}
},
"required": ["address"]
});
let invalid_instance = json!({
"address": {
"city": "San Francisco Bay Area"
}
});
assert!(cache_json_schema(schema_id, jsonb(schema.clone())).0["success"].as_bool().unwrap());
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
assert!(!result.0["success"].as_bool().unwrap());
let errors = result.0["errors"].as_array().expect("Errors should be an array");
let top_error = errors.get(0).expect("Expected at least one top-level error object");
let causes = top_error.get("causes").and_then(Value::as_array).expect("Expected causes array");
let has_required_street_error = causes.iter().any(|e|
e.get("instance_path").and_then(Value::as_str) == Some("/address") && // Check path inside cause
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("Required { want:") && // Check kind prefix
e.get("kind").and_then(Value::as_str).unwrap_or("").contains("street") // Ensure 'street' is mentioned
);
assert!(has_required_street_error, "Missing required 'street' error within causes. Actual errors: {:?}", errors);
let has_maxlength_city_error = causes.iter().any(|e| // Check within causes
e.get("instance_path").and_then(Value::as_str) == Some("/address/city") &&
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("MaxLength { got:") // Check kind prefix
);
assert!(has_maxlength_city_error, "Missing maxLength 'city' error within causes. Actual errors: {:?}", errors);
}
#[pg_test]
fn test_validate_json_schema_oneof_validation_errors() {
setup_test();
let schema_id = "oneof_schema";
let schema = json!({
"type": "object",
"properties": {
"value": {
"oneOf": [
{ "type": "string", "minLength": 5 },
{ "type": "number", "minimum": 10 }
]
}
},
"required": ["value"]
});
assert!(cache_json_schema(schema_id, jsonb(schema.clone())).0["success"].as_bool().unwrap());
let invalid_instance = json!({ "value": "abc" });
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
assert!(!result.0["success"].as_bool().unwrap());
let errors_val = result.0["errors"].as_array().expect("Errors should be an array");
let top_schema_error = errors_val.get(0).expect("Expected at least one top-level Schema error object");
let schema_error_causes = top_schema_error.get("causes").and_then(Value::as_array).expect("Expected causes array for Schema error");
let oneof_error = schema_error_causes.iter().find(|e| {
e.get("kind").and_then(Value::as_str) == Some("OneOf(None)") &&
e.get("instance_path").and_then(Value::as_str) == Some("/value")
}).expect("Could not find the OneOf(None) error for /value within Schema causes");
let oneof_causes = oneof_error.get("causes").and_then(Value::as_array)
.expect("Expected causes array for OneOf error");
let has_minlength_error = oneof_causes.iter().any(|e| // Check within OneOf causes
e.get("instance_path").and_then(Value::as_str) == Some("/value") &&
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("MinLength { got:") // Check kind prefix
);
assert!(has_minlength_error, "Missing MinLength error within OneOf causes. Actual errors: {:?}", errors_val);
let has_type_error = oneof_causes.iter().any(|e| // Check within OneOf causes
e.get("instance_path").and_then(Value::as_str) == Some("/value") &&
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("Type { got: String, want: Types") // More specific kind check
);
assert!(has_type_error, "Missing Type error within OneOf causes. Actual errors: {:?}", errors_val);
}
#[pg_test]
fn test_clear_json_schemas() {
setup_test();
let schema_id = "schema_to_clear";
let schema = json!({ "type": "string" });
cache_json_schema(schema_id, jsonb(schema.clone()));
let show_result1 = show_json_schemas();
assert!(show_result1.0["cached_schema_ids"].as_array().unwrap().iter().any(|id| id.as_str() == Some(schema_id)));
let clear_result = clear_json_schemas();
assert!(clear_result.0["success"].as_bool().unwrap());
let show_result2 = show_json_schemas();
assert!(show_result2.0["cached_schema_ids"].as_array().unwrap().is_empty());
let instance = json!("test");
let validate_result = validate_json_schema(schema_id, jsonb(instance));
assert!(!validate_result.0["success"].as_bool().unwrap());
assert_eq!(validate_result.0["errors"].as_array().unwrap()[0]["kind"], json!("SchemaNotFound"));
}
#[pg_test]
fn test_show_json_schemas() {
setup_test();
let schema_id1 = "schema1";
let schema_id2 = "schema2";
let schema = json!({ "type": "boolean" });
cache_json_schema(schema_id1, jsonb(schema.clone()));
cache_json_schema(schema_id2, jsonb(schema.clone()));
let result = show_json_schemas();
let ids = result.0["cached_schema_ids"].as_array().unwrap();
assert_eq!(ids.len(), 2);
assert!(ids.contains(&json!(schema_id1)));
assert!(ids.contains(&json!(schema_id2)));
}
} }
/// This module is required by `cargo pgrx test` invocations.
/// It must be visible at the root of your extension crate.
#[cfg(test)] #[cfg(test)]
pub mod pg_test { pub mod pg_test {
pub fn setup(_options: Vec<&str>) { pub fn setup(_options: Vec<&str>) {
// perform one-off initialization when the pg_test framework starts // perform one-off initialization when the pg_test framework starts
} }
#[must_use]
pub fn postgresql_conf_options() -> Vec<&'static str> { pub fn postgresql_conf_options() -> Vec<&'static str> {
// return any postgresql.conf settings that are required for your tests // return any postgresql.conf settings that are required for your tests
vec![] vec![]
} }
} }
#[cfg(any(test, feature = "pg_test"))]
#[pg_schema]
mod tests {
include!("tests.rs");
}

541
src/tests.rs Normal file
View File

@ -0,0 +1,541 @@
use crate::*;
use serde_json::{json, Value};
use pgrx::{JsonB, pg_test};
// Helper macro for asserting success with Drop-style response
macro_rules! assert_success_with_json {
($result_jsonb:expr, $fmt:literal $(, $($args:tt)*)?) => {
let has_response = $result_jsonb.0.get("response").is_some();
let has_errors = $result_jsonb.0.get("errors").is_some();
if !has_response || has_errors {
let base_msg = format!($fmt $(, $($args)*)?);
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success with 'response' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
panic!("{}", panic_msg);
}
};
// Simpler version without message
($result_jsonb:expr) => {
let has_response = $result_jsonb.0.get("response").is_some();
let has_errors = $result_jsonb.0.get("errors").is_some();
if !has_response || has_errors {
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success with 'response' field)\nResult JSON:\n{}", pretty_json);
panic!("{}", panic_msg);
}
};
}
// Helper macro for asserting failed JSON results with Drop-style errors
macro_rules! assert_failure_with_json {
// --- Arms with error count and message substring check ---
// With custom message:
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
if $expected_error_count > 0 {
let first_error_message = errors_array[0].get("message").and_then(Value::as_str);
match first_error_message {
Some(msg) => {
if !msg.contains($expected_first_message_contains) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error message mismatch): Expected contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error in array has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
}
};
// Without custom message (calls the one above with ""):
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr) => {
assert_failure_with_json!($result, $expected_error_count, $expected_first_message_contains, "");
};
// --- Arms with error count check only ---
// With custom message:
($result:expr, $expected_error_count:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
};
// Without custom message (calls the one above with ""):
($result:expr, $expected_error_count:expr) => {
assert_failure_with_json!($result, $expected_error_count, "");
};
// --- Arms checking failure only (expects at least one error) ---
// With custom message:
($result:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.is_empty() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected errors, but 'errors' array is empty): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
};
// Without custom message (calls the one above with ""):
($result:expr) => {
assert_failure_with_json!($result, "");
};
}
fn jsonb(val: Value) -> JsonB {
JsonB(val)
}
#[pg_test]
fn test_cache_and_validate_json_schema() {
clear_json_schemas(); // Call clear directly
let schema_id = "my_schema";
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
});
let valid_instance = json!({ "name": "Alice", "age": 30 });
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
let invalid_instance_missing = json!({ "name": "Charlie" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()), false);
assert_success_with_json!(cache_result, "Cache operation should succeed.");
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
assert_success_with_json!(valid_result, "Validation of valid instance should succeed.");
// Invalid type - age is negative
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
assert_failure_with_json!(invalid_result_type, 1, "Value is below the minimum allowed", "Validation with invalid type should fail.");
let errors_type = invalid_result_type.0["errors"].as_array().unwrap();
assert_eq!(errors_type[0]["details"]["path"], "/age");
assert_eq!(errors_type[0]["details"]["context"], -5);
assert_eq!(errors_type[0]["code"], "MINIMUM_VIOLATED");
// Missing field
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
assert_failure_with_json!(invalid_result_missing, 1, "Required field is missing", "Validation with missing field should fail.");
let errors_missing = invalid_result_missing.0["errors"].as_array().unwrap();
assert_eq!(errors_missing[0]["details"]["path"], "");
assert_eq!(errors_missing[0]["code"], "REQUIRED_FIELD_MISSING");
// Schema not found
let non_existent_id = "non_existent_schema";
let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({})));
assert_failure_with_json!(invalid_schema_result, 1, "Schema 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
let errors_notfound = invalid_schema_result.0["errors"].as_array().unwrap();
assert_eq!(errors_notfound[0]["code"], "SCHEMA_NOT_FOUND");
}
#[pg_test]
fn test_validate_json_schema_not_cached() {
clear_json_schemas();
let instance = json!({ "foo": "bar" });
let result = validate_json_schema("non_existent_schema", jsonb(instance));
assert_failure_with_json!(result, 1, "Schema 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
}
#[pg_test]
fn test_cache_invalid_json_schema() {
clear_json_schemas();
let schema_id = "invalid_schema";
// Schema with an invalid type *value*
let invalid_schema = json!({
"$id": "urn:invalid_schema",
"type": ["invalid_type_value"]
});
let cache_result = cache_json_schema(schema_id, jsonb(invalid_schema), false);
// Expect 2 leaf errors because the meta-schema validation fails at the type value
// and within the type array itself.
assert_failure_with_json!(
cache_result,
2, // Expect exactly two leaf errors
"Value is not one of the allowed options", // Updated to human-readable message
"Caching invalid schema should fail with specific meta-schema validation errors."
);
// Ensure the errors array exists and check specifics
let errors_array = cache_result.0["errors"].as_array().expect("Errors field should be an array");
assert_eq!(errors_array.len(), 2);
// Both errors should have ENUM_VIOLATED code
assert_eq!(errors_array[0]["code"], "ENUM_VIOLATED");
assert_eq!(errors_array[1]["code"], "ENUM_VIOLATED");
// Check instance paths are preserved in path field
let paths: Vec<&str> = errors_array.iter()
.map(|e| e["details"]["path"].as_str().unwrap())
.collect();
assert!(paths.contains(&"/type"));
assert!(paths.contains(&"/type/0"));
}
#[pg_test]
fn test_validate_json_schema_detailed_validation_errors() {
clear_json_schemas(); // Call clear directly
let schema_id = "detailed_errors";
let schema = json!({
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string", "maxLength": 10 }
},
"required": ["street", "city"]
}
},
"required": ["address"]
});
let _ = cache_json_schema(schema_id, jsonb(schema), false);
let invalid_instance = json!({
"address": {
"street": 123, // Wrong type
"city": "Supercalifragilisticexpialidocious" // Too long
}
});
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
// Update: Expect 2 errors again, as boon reports both nested errors.
assert_failure_with_json!(result, 2);
}
#[pg_test]
fn test_validate_json_schema_oneof_validation_errors() {
clear_json_schemas(); // Call clear directly
let schema_id = "oneof_schema";
let schema = json!({
"oneOf": [
{ // Option 1: Object with string prop
"type": "object",
"properties": {
"string_prop": { "type": "string", "maxLength": 5 }
},
"required": ["string_prop"]
},
{ // Option 2: Object with number prop
"type": "object",
"properties": {
"number_prop": { "type": "number", "minimum": 10 }
},
"required": ["number_prop"]
}
]
});
let _ = cache_json_schema(schema_id, jsonb(schema), false);
// --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) ---
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
// Expect 2 leaf errors. Check count only with the macro.
assert_failure_with_json!(result_invalid_string, 2);
// Explicitly check that both expected errors are present, ignoring order
let errors_string = result_invalid_string.0["errors"].as_array().expect("Expected error array for invalid string");
assert!(errors_string.iter().any(|e|
e["details"]["path"] == "/string_prop" &&
e["code"] == "MAX_LENGTH_VIOLATED"
), "Missing maxLength error");
assert!(errors_string.iter().any(|e|
e["details"]["path"] == "" &&
e["code"] == "REQUIRED_FIELD_MISSING"
), "Missing number_prop required error");
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
let invalid_number_instance = json!({ "number_prop": 5 });
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
// Expect 2 leaf errors. Check count only with the macro.
assert_failure_with_json!(result_invalid_number, 2);
// Explicitly check that both expected errors are present, ignoring order
let errors_number = result_invalid_number.0["errors"].as_array().expect("Expected error array for invalid number");
assert!(errors_number.iter().any(|e|
e["details"]["path"] == "/number_prop" &&
e["code"] == "MINIMUM_VIOLATED"
), "Missing minimum error");
assert!(errors_number.iter().any(|e|
e["details"]["path"] == "" &&
e["code"] == "REQUIRED_FIELD_MISSING"
), "Missing string_prop required error");
// --- Test case 3: Fails type check (not object) for both branches ---
// Input: boolean, expected object for both branches
let invalid_bool_instance = json!(true); // Not an object
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
assert_failure_with_json!(result_invalid_bool, 1);
// Explicitly check that the single remaining error is the type error for the root instance path
let errors_bool = result_invalid_bool.0["errors"].as_array().expect("Expected error array for invalid bool");
assert_eq!(errors_bool.len(), 1, "Expected exactly one error after deduplication");
assert_eq!(errors_bool[0]["code"], "TYPE_MISMATCH");
assert_eq!(errors_bool[0]["details"]["path"], "");
// --- Test case 4: Fails missing required for both branches ---
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
let invalid_empty_obj = json!({});
let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj));
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
assert_failure_with_json!(result_empty_obj, 1);
// Explicitly check that the single remaining error is one of the expected missing properties errors
let errors_empty = result_empty_obj.0["errors"].as_array().expect("Expected error array for empty object");
assert_eq!(errors_empty.len(), 1, "Expected exactly one error after filtering empty object");
assert_eq!(errors_empty[0]["code"], "REQUIRED_FIELD_MISSING");
assert_eq!(errors_empty[0]["details"]["path"], "");
// The human message should be generic
assert_eq!(errors_empty[0]["message"], "Required field is missing");
}
#[pg_test]
fn test_clear_json_schemas() {
let clear_result = clear_json_schemas();
assert_success_with_json!(clear_result);
let schema_id = "schema_to_clear";
let schema = json!({ "type": "string" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()), false);
assert_success_with_json!(cache_result);
let show_result1 = show_json_schemas();
let schemas1 = show_result1.0["response"].as_array().unwrap();
assert!(schemas1.contains(&json!(schema_id)));
let clear_result2 = clear_json_schemas();
assert_success_with_json!(clear_result2);
let show_result2 = show_json_schemas();
let schemas2 = show_result2.0["response"].as_array().unwrap();
assert!(schemas2.is_empty());
let instance = json!("test");
let validate_result = validate_json_schema(schema_id, jsonb(instance));
assert_failure_with_json!(validate_result, 1, "Schema 'schema_to_clear' not found", "Validation should fail after clearing schemas.");
}
#[pg_test]
fn test_show_json_schemas() {
let _ = clear_json_schemas();
let schema_id1 = "schema1";
let schema_id2 = "schema2";
let schema = json!({ "type": "boolean" });
let _ = cache_json_schema(schema_id1, jsonb(schema.clone()), false);
let _ = cache_json_schema(schema_id2, jsonb(schema.clone()), false);
let result = show_json_schemas();
let schemas = result.0["response"].as_array().unwrap();
assert_eq!(schemas.len(), 2);
assert!(schemas.contains(&json!(schema_id1)));
assert!(schemas.contains(&json!(schema_id2)));
}
#[pg_test]
fn test_auto_strict_validation() {
clear_json_schemas();
let schema_id = "strict_test";
let schema_id_non_strict = "non_strict_test";
// Schema without explicit additionalProperties: false
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"profile": {
"type": "object",
"properties": {
"age": { "type": "number" },
"preferences": {
"type": "object",
"properties": {
"theme": { "type": "string" }
}
}
}
},
"tags": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string" },
"value": { "type": "string" }
}
}
}
}
});
// Cache the same schema twice - once with strict=true, once with strict=false
let cache_result_strict = cache_json_schema(schema_id, jsonb(schema.clone()), true);
assert_success_with_json!(cache_result_strict, "Schema caching with strict=true should succeed");
let cache_result_non_strict = cache_json_schema(schema_id_non_strict, jsonb(schema.clone()), false);
assert_success_with_json!(cache_result_non_strict, "Schema caching with strict=false should succeed");
// Test 1: Valid instance with no extra properties (should pass for both)
let valid_instance = json!({
"name": "John",
"profile": {
"age": 30,
"preferences": {
"theme": "dark"
}
},
"tags": [
{"id": "1", "value": "rust"},
{"id": "2", "value": "postgres"}
]
});
let valid_result_strict = validate_json_schema(schema_id, jsonb(valid_instance.clone()));
assert_success_with_json!(valid_result_strict, "Valid instance should pass with strict schema");
let valid_result_non_strict = validate_json_schema(schema_id_non_strict, jsonb(valid_instance));
assert_success_with_json!(valid_result_non_strict, "Valid instance should pass with non-strict schema");
// Test 2: Root level extra property
let invalid_root_extra = json!({
"name": "John",
"extraField": "should fail" // Extra property at root
});
// Should fail with strict schema
let result_root_strict = validate_json_schema(schema_id, jsonb(invalid_root_extra.clone()));
assert_failure_with_json!(result_root_strict, 1, "Schema validation always fails");
let errors_root = result_root_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_root[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_root[0]["details"]["path"], "/extraField");
// Should pass with non-strict schema
let result_root_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_root_extra));
assert_success_with_json!(result_root_non_strict, "Extra property should be allowed with non-strict schema");
// Test 3: Nested object extra property
let invalid_nested_extra = json!({
"name": "John",
"profile": {
"age": 30,
"extraNested": "should fail" // Extra property in nested object
}
});
// Should fail with strict schema
let result_nested_strict = validate_json_schema(schema_id, jsonb(invalid_nested_extra.clone()));
assert_failure_with_json!(result_nested_strict, 1, "Schema validation always fails");
let errors_nested = result_nested_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_nested[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_nested[0]["details"]["path"], "/profile/extraNested");
// Should pass with non-strict schema
let result_nested_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_nested_extra));
assert_success_with_json!(result_nested_non_strict, "Extra nested property should be allowed with non-strict schema");
// Test 4: Deeply nested object extra property
let invalid_deep_extra = json!({
"name": "John",
"profile": {
"age": 30,
"preferences": {
"theme": "dark",
"extraDeep": "should fail" // Extra property in deeply nested object
}
}
});
// Should fail with strict schema
let result_deep_strict = validate_json_schema(schema_id, jsonb(invalid_deep_extra.clone()));
assert_failure_with_json!(result_deep_strict, 1, "Schema validation always fails");
let errors_deep = result_deep_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_deep[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_deep[0]["details"]["path"], "/profile/preferences/extraDeep");
// Should pass with non-strict schema
let result_deep_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_deep_extra));
assert_success_with_json!(result_deep_non_strict, "Extra deep property should be allowed with non-strict schema");
// Test 5: Array item extra property
let invalid_array_item_extra = json!({
"name": "John",
"tags": [
{"id": "1", "value": "rust", "extraInArray": "should fail"} // Extra property in array item
]
});
// Should fail with strict schema
let result_array_strict = validate_json_schema(schema_id, jsonb(invalid_array_item_extra.clone()));
assert_failure_with_json!(result_array_strict, 1, "Schema validation always fails");
let errors_array = result_array_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_array[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_array[0]["details"]["path"], "/tags/0/extraInArray");
// Should pass with non-strict schema
let result_array_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_array_item_extra));
assert_success_with_json!(result_array_non_strict, "Extra array item property should be allowed with non-strict schema");
// Test 6: Schema with explicit additionalProperties: true should allow extras even with strict=true
let schema_id_permissive = "permissive_test";
let permissive_schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" }
},
"additionalProperties": true // Explicitly allow additional properties
});
let _ = cache_json_schema(schema_id_permissive, jsonb(permissive_schema), true); // Note: strict=true
let instance_with_extra = json!({
"name": "John",
"extraAllowed": "should pass"
});
let result_permissive = validate_json_schema(schema_id_permissive, jsonb(instance_with_extra));
assert_success_with_json!(result_permissive, "Instance with extra property should pass when additionalProperties is explicitly true, even with strict=true");
}

View File

@ -1 +1 @@
1.0.10 1.0.25