Compare commits

...

15 Commits

9 changed files with 1966 additions and 1126 deletions

BIN
.DS_Store vendored Normal file

Binary file not shown.

49
Cargo.lock generated
View File

@ -362,6 +362,15 @@ version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "codepage"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48f68d061bc2828ae826206326e61251aca94c1e4a5305cf52d9138639c918b4"
dependencies = [
"encoding_rs",
]
[[package]]
name = "convert_case"
version = "0.8.0"
@ -418,6 +427,15 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "encoding_rs"
version = "0.8.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3"
dependencies = [
"cfg-if",
]
[[package]]
name = "enum-map"
version = "2.7.3"
@ -1106,9 +1124,9 @@ dependencies = [
[[package]]
name = "pgrx"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e1b41219b12cfcaa5d58f946a7ff1e7ddf0a4f7f930a7cdab612916e8a12c64"
checksum = "bab5bc1d60d3bc3c966d307a3c7313b1ebfb49a0ec183be3f1a057df0bcc9988"
dependencies = [
"atomic-traits",
"bitflags",
@ -1130,9 +1148,9 @@ dependencies = [
[[package]]
name = "pgrx-bindgen"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6afcef51e801bb18662716f1c524cedfb7943844593171734fe4d3a94c9afa12"
checksum = "9804b74c211a9edd550cd974718f8cc407dec50d8e9cafb906e0b042ba434af0"
dependencies = [
"bindgen",
"cc",
@ -1149,9 +1167,9 @@ dependencies = [
[[package]]
name = "pgrx-macros"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "729af3e6954d2f76230d700efd8606121f13f71f800e5c76173add2c02097948"
checksum = "f230769493bf567f137de23264d604d267dd72b8a77c596528e43cf423c6208e"
dependencies = [
"pgrx-sql-entity-graph",
"proc-macro2",
@ -1161,11 +1179,13 @@ dependencies = [
[[package]]
name = "pgrx-pg-config"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "116e33a329f3fac976b5f3150f14f2612735dfc56a15cb0a0800f25a3bd90aa7"
checksum = "49b64c071c2a46a19ab4521120a25b02b598f4abf6e9b4b1769a7922edeee3de"
dependencies = [
"cargo_toml",
"codepage",
"encoding_rs",
"eyre",
"home",
"owo-colors",
@ -1175,13 +1195,14 @@ dependencies = [
"thiserror 2.0.12",
"toml",
"url",
"winapi",
]
[[package]]
name = "pgrx-pg-sys"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd074044513f1f7fc63fd1ed0117ad0fbe690ef1b445f6d72b92e611b3846490"
checksum = "fcbfa98ec7a90252d13a78ac666541173dbb01a2fc1ba20131db6490c0711125"
dependencies = [
"cee-scape",
"libc",
@ -1194,9 +1215,9 @@ dependencies = [
[[package]]
name = "pgrx-sql-entity-graph"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0eb73c4b916d4abb422fff66c2606c46bf4b99136209306836e89766a8d49cd"
checksum = "e79bbf5a33cff6cfdc6dda3a976cd931c995eaa2c073a7c59b8f8fe8f6faa073"
dependencies = [
"convert_case",
"eyre",
@ -1210,9 +1231,9 @@ dependencies = [
[[package]]
name = "pgrx-tests"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aba4e9a97fd148c9f65cf0c56d33a4cde4deb9941f5c0d914a39148e8148a7a6"
checksum = "9791c709882f3af9545bcca71670fdd82768f67a428b416b6210eae3773dbd0d"
dependencies = [
"clap-cargo",
"eyre",

View File

@ -1,17 +1,17 @@
[package]
name = "jspg"
version = "0.1.0"
edition = "2021"
edition = "2024"
[dependencies]
pgrx = "0.14.0"
pgrx = "0.15.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
lazy_static = "1.5.0"
boon = "0.6.1"
[dev-dependencies]
pgrx-tests = "0.14.0"
pgrx-tests = "0.15.0"
[lib]
crate-type = ["cdylib", "lib"]

4
flow
View File

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Flows
source ./flows/base
@ -11,7 +11,7 @@ source ./flows/rust
POSTGRES_VERSION="17"
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
DEPENDENCIES+=(icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
CARGO_DEPENDENCIES=(cargo-pgrx==0.15.0)
GITEA_ORGANIZATION="cellular"
GITEA_REPOSITORY="jspg"

88
src/helpers.rs Normal file
View File

@ -0,0 +1,88 @@
use serde_json::Value;
use pgrx::JsonB;
// Simple test helpers for cleaner test code
pub fn assert_success(result: &JsonB) {
let json = &result.0;
if !json.get("response").is_some() || json.get("errors").is_some() {
let pretty = serde_json::to_string_pretty(json).unwrap_or_else(|_| format!("{:?}", json));
panic!("Expected success but got:\n{}", pretty);
}
}
pub fn assert_failure(result: &JsonB) {
let json = &result.0;
if json.get("response").is_some() || !json.get("errors").is_some() {
let pretty = serde_json::to_string_pretty(json).unwrap_or_else(|_| format!("{:?}", json));
panic!("Expected failure but got:\n{}", pretty);
}
}
pub fn assert_error_count(result: &JsonB, expected_count: usize) {
assert_failure(result);
let errors = get_errors(result);
if errors.len() != expected_count {
let pretty = serde_json::to_string_pretty(&result.0).unwrap_or_else(|_| format!("{:?}", result.0));
panic!("Expected {} errors, got {}:\n{}", expected_count, errors.len(), pretty);
}
}
pub fn get_errors(result: &JsonB) -> &Vec<Value> {
result.0["errors"].as_array().expect("errors should be an array")
}
pub fn has_error_with_code(result: &JsonB, code: &str) -> bool {
get_errors(result).iter().any(|e| e["code"] == code)
}
pub fn has_error_with_code_and_path(result: &JsonB, code: &str, path: &str) -> bool {
get_errors(result).iter().any(|e| e["code"] == code && e["details"]["path"] == path)
}
pub fn assert_has_error(result: &JsonB, code: &str, path: &str) {
if !has_error_with_code_and_path(result, code, path) {
let pretty = serde_json::to_string_pretty(&result.0).unwrap_or_else(|_| format!("{:?}", result.0));
panic!("Expected error with code='{}' and path='{}' but not found:\n{}", code, path, pretty);
}
}
pub fn find_error_with_code<'a>(result: &'a JsonB, code: &str) -> &'a Value {
get_errors(result).iter().find(|e| e["code"] == code)
.unwrap_or_else(|| panic!("No error found with code '{}'", code))
}
pub fn find_error_with_code_and_path<'a>(result: &'a JsonB, code: &str, path: &str) -> &'a Value {
get_errors(result).iter().find(|e| e["code"] == code && e["details"]["path"] == path)
.unwrap_or_else(|| panic!("No error found with code '{}' and path '{}'", code, path))
}
pub fn assert_error_detail(error: &Value, detail_key: &str, expected_value: &str) {
let actual = error["details"][detail_key].as_str()
.unwrap_or_else(|| panic!("Error detail '{}' is not a string", detail_key));
assert_eq!(actual, expected_value, "Error detail '{}' mismatch", detail_key);
}
// Additional convenience helpers for common patterns
pub fn assert_error_message_contains(error: &Value, substring: &str) {
let message = error["message"].as_str().expect("error should have message");
assert!(message.contains(substring), "Expected message to contain '{}', got '{}'", substring, message);
}
pub fn assert_error_cause_json(error: &Value, expected_cause: &Value) {
let cause = &error["details"]["cause"];
assert!(cause.is_object(), "cause should be JSON object");
assert_eq!(cause, expected_cause, "cause mismatch");
}
pub fn assert_error_context(error: &Value, expected_context: &Value) {
assert_eq!(&error["details"]["context"], expected_context, "context mismatch");
}
pub fn jsonb(val: Value) -> JsonB {
JsonB(val)
}

View File

@ -9,9 +9,23 @@ use std::borrow::Cow;
use std::collections::hash_map::Entry;
use std::{collections::HashMap, sync::RwLock};
struct BoonCache {
#[derive(Clone, Copy, Debug, PartialEq)]
enum SchemaType {
Enum,
Type,
PublicPunc,
PrivatePunc,
}
struct Schema {
index: SchemaIndex,
t: SchemaType,
value: Value,
}
struct Cache {
schemas: Schemas,
id_to_index: HashMap<String, SchemaIndex>,
map: HashMap<String, Schema>,
}
// Structure to hold error information without lifetimes
@ -24,94 +38,186 @@ struct Error {
}
lazy_static! {
static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache {
static ref SCHEMA_CACHE: RwLock<Cache> = RwLock::new(Cache {
schemas: Schemas::new(),
id_to_index: HashMap::new(),
map: HashMap::new(),
});
}
#[pg_extern(strict)]
fn cache_json_schema(schema_id: &str, schema: JsonB, strict: bool) -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
let mut schema_value: Value = schema.0;
let schema_path = format!("urn:{}", schema_id);
fn cache_json_schemas(enums: JsonB, types: JsonB, puncs: JsonB) -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
let enums_value: Value = enums.0;
let types_value: Value = types.0;
let puncs_value: Value = puncs.0;
// Apply strict validation to all objects in the schema if requested
if strict {
apply_strict_validation(&mut schema_value);
}
*cache = Cache {
schemas: Schemas::new(),
map: HashMap::new(),
};
// Create the boon compiler and enable format assertions
let mut compiler = Compiler::new();
compiler.enable_format_assertions();
let mut compiler = Compiler::new();
compiler.enable_format_assertions();
// Use schema_path when adding the resource
if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
return JsonB(json!({
"errors": [{
"code": "SCHEMA_RESOURCE_ADD_FAILED",
"message": format!("Failed to add schema resource '{}'", schema_id),
"details": {
"schema": schema_id,
"cause": format!("{}", e)
let mut errors = Vec::new();
let mut schemas_to_compile = Vec::new();
// Phase 1: Enums
if let Some(enums_array) = enums_value.as_array() {
for enum_row in enums_array {
if let Some(schemas_raw) = enum_row.get("schemas") {
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
schemas_to_compile.push((schema_id.to_string(), schema_def.clone(), SchemaType::Enum));
}
}
}
}
}
}]
}
// Phase 2: Types
if let Some(types_array) = types_value.as_array() {
for type_row in types_array {
if let Some(schemas_raw) = type_row.get("schemas") {
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
schemas_to_compile.push((schema_id.to_string(), schema_def.clone(), SchemaType::Type));
}
}
}
}
}
}
// Phase 3: Puncs
if let Some(puncs_array) = puncs_value.as_array() {
for punc_row in puncs_array {
if let Some(punc_obj) = punc_row.as_object() {
if let Some(punc_name) = punc_obj.get("name").and_then(|v| v.as_str()) {
let is_public = punc_obj.get("public").and_then(|v| v.as_bool()).unwrap_or(false);
let punc_schema_type = if is_public { SchemaType::PublicPunc } else { SchemaType::PrivatePunc };
if let Some(schemas_raw) = punc_obj.get("schemas") {
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
let request_schema_id = format!("{}.request", punc_name);
let response_schema_id = format!("{}.response", punc_name);
let schema_type_for_def = if schema_id == request_schema_id || schema_id == response_schema_id {
punc_schema_type
} else {
SchemaType::Type
};
schemas_to_compile.push((schema_id.to_string(), schema_def.clone(), schema_type_for_def));
}
}
}
}
}
}
}
}
// Add all resources to compiler first
for (id, value, schema_type) in &schemas_to_compile {
add_schema_resource(&mut compiler, id, value.clone(), *schema_type, &mut errors);
}
if !errors.is_empty() {
return JsonB(json!({ "errors": errors }));
}
// Compile all schemas
compile_all_schemas(&mut compiler, &mut cache, &schemas_to_compile, &mut errors);
if errors.is_empty() {
JsonB(json!({ "response": "success" }))
} else {
JsonB(json!({ "errors": errors }))
}
}
// Helper function to add a schema resource (without compiling)
fn add_schema_resource(
compiler: &mut Compiler,
schema_id: &str,
mut schema_value: Value,
schema_type: SchemaType,
errors: &mut Vec<Value>
) {
match schema_type {
SchemaType::Enum | SchemaType::PrivatePunc => {},
SchemaType::Type | SchemaType::PublicPunc => apply_strict_validation(&mut schema_value, schema_type),
}
if let Err(e) = compiler.add_resource(schema_id, schema_value) {
errors.push(json!({
"code": "SCHEMA_RESOURCE_FAILED",
"message": format!("Failed to add schema resource '{}'", schema_id),
"details": { "schema": schema_id, "cause": format!("{}", e) }
}));
}
}
// Use schema_path when compiling
match compiler.compile(&schema_path, &mut cache.schemas) {
Ok(sch_index) => {
// Store the index using the original schema_id as the key
cache.id_to_index.insert(schema_id.to_string(), sch_index);
JsonB(json!({ "response": "success" }))
}
Err(e) => {
let errors = match &e {
CompileError::ValidationError { url: _url, src } => {
// Collect leaf errors from the meta-schema validation failure
let mut error_list = Vec::new();
collect_errors(src, &mut error_list);
// Filter and format errors properly - no instance for schema compilation
format_errors(error_list, &schema_value, schema_id)
}
_ => {
// Other compilation errors
vec![json!({
"code": "SCHEMA_COMPILATION_FAILED",
"message": format!("Schema '{}' compilation failed", schema_id),
"details": {
"schema": schema_id,
"cause": format!("{:?}", e)
// Helper function to compile all added resources
fn compile_all_schemas(
compiler: &mut Compiler,
cache: &mut Cache,
schemas_to_compile: &[(String, Value, SchemaType)],
errors: &mut Vec<Value>,
) {
for (id, value, schema_type) in schemas_to_compile {
match compiler.compile(id, &mut cache.schemas) {
Ok(index) => {
cache.map.insert(id.clone(), Schema { index, t: *schema_type, value: value.clone() });
}
Err(e) => {
match &e {
CompileError::ValidationError { src, .. } => {
let mut error_list = Vec::new();
collect_errors(src, &mut error_list);
let formatted_errors = format_errors(error_list, value, id);
errors.extend(formatted_errors);
}
_ => {
errors.push(json!({
"code": "SCHEMA_COMPILATION_FAILED",
"message": format!("Schema '{}' compilation failed", id),
"details": { "schema": id, "cause": format!("{:?}", e) }
}));
}
};
}
})]
}
};
JsonB(json!({ "errors": errors }))
}
}
}
// Helper function to apply strict validation to a schema
//
// This recursively adds unevaluatedProperties: false to object-type schemas,
// but SKIPS schemas inside if/then/else to avoid breaking conditional validation.
fn apply_strict_validation(schema: &mut Value) {
apply_strict_validation_recursive(schema, false);
// For type schemas, it skips the top level to allow inheritance.
fn apply_strict_validation(schema: &mut Value, schema_type: SchemaType) {
apply_strict_validation_recursive(schema, false, schema_type, true);
}
fn apply_strict_validation_recursive(schema: &mut Value, inside_conditional: bool) {
fn apply_strict_validation_recursive(schema: &mut Value, inside_conditional: bool, schema_type: SchemaType, is_top_level: bool) {
match schema {
Value::Object(map) => {
// Skip adding strict validation if we're inside a conditional
if !inside_conditional {
// Add strict validation to object schemas only at top level
if let Some(Value::String(t)) = map.get("type") {
if t == "object" && !map.contains_key("unevaluatedProperties") && !map.contains_key("additionalProperties") {
// At top level, use unevaluatedProperties: false
// This considers all evaluated properties from all schemas
map.insert("unevaluatedProperties".to_string(), Value::Bool(false));
}
// OR if we're at the top level of a type schema (types should be extensible)
let skip_strict = inside_conditional || (matches!(schema_type, SchemaType::Type) && is_top_level);
if !skip_strict {
// Apply unevaluatedProperties: false to schemas that have $ref OR type: "object"
let has_ref = map.contains_key("$ref");
let has_object_type = map.get("type").and_then(|v| v.as_str()) == Some("object");
if (has_ref || has_object_type) && !map.contains_key("unevaluatedProperties") && !map.contains_key("additionalProperties") {
// Use unevaluatedProperties: false to prevent extra properties
// This considers all evaluated properties from all schemas including refs
map.insert("unevaluatedProperties".to_string(), Value::Bool(false));
}
}
@ -119,49 +225,162 @@ fn apply_strict_validation_recursive(schema: &mut Value, inside_conditional: boo
for (key, value) in map.iter_mut() {
// Mark when we're inside conditional branches
let in_conditional = inside_conditional || matches!(key.as_str(), "if" | "then" | "else");
apply_strict_validation_recursive(value, in_conditional);
apply_strict_validation_recursive(value, in_conditional, schema_type, false)
}
}
Value::Array(arr) => {
// Recurse into array items
for item in arr.iter_mut() {
apply_strict_validation_recursive(item, inside_conditional);
apply_strict_validation_recursive(item, inside_conditional, schema_type, false);
}
}
_ => {}
}
}
fn walk_and_validate_refs(
instance: &Value,
schema: &Value,
cache: &std::sync::RwLockReadGuard<Cache>,
path_parts: &mut Vec<String>,
type_validated: bool,
top_level_id: Option<&str>,
errors: &mut Vec<Value>,
) {
if let Some(ref_url) = schema.get("$ref").and_then(|v| v.as_str()) {
if let Some(s) = cache.map.get(ref_url) {
let mut new_type_validated = type_validated;
if !type_validated && s.t == SchemaType::Type {
let id_to_use = top_level_id.unwrap_or(ref_url);
let expected_type = id_to_use.split('.').next().unwrap_or(id_to_use);
if let Some(actual_type) = instance.get("type").and_then(|v| v.as_str()) {
if actual_type == expected_type {
new_type_validated = true;
} else {
path_parts.push("type".to_string());
let path = format!("/{}", path_parts.join("/"));
path_parts.pop();
errors.push(json!({
"code": "TYPE_MISMATCH",
"message": format!("Instance type '{}' does not match expected type '{}' derived from schema $ref", actual_type, expected_type),
"details": { "path": path, "context": instance, "cause": { "expected": expected_type, "actual": actual_type }, "schema": ref_url }
}));
}
} else {
if top_level_id.is_some() {
let path = if path_parts.is_empty() { "".to_string() } else { format!("/{}", path_parts.join("/")) };
errors.push(json!({
"code": "TYPE_MISMATCH",
"message": "Instance is missing 'type' property required for schema validation",
"details": { "path": path, "context": instance, "cause": { "expected": expected_type }, "schema": ref_url }
}));
}
}
}
walk_and_validate_refs(instance, &s.value, cache, path_parts, new_type_validated, None, errors);
}
}
if let Some(properties) = schema.get("properties").and_then(|v| v.as_object()) {
for (prop_name, prop_schema) in properties {
if let Some(prop_value) = instance.get(prop_name) {
path_parts.push(prop_name.clone());
walk_and_validate_refs(prop_value, prop_schema, cache, path_parts, type_validated, None, errors);
path_parts.pop();
}
}
}
if let Some(items_schema) = schema.get("items") {
if let Some(instance_array) = instance.as_array() {
for (i, item) in instance_array.iter().enumerate() {
path_parts.push(i.to_string());
walk_and_validate_refs(item, items_schema, cache, path_parts, false, None, errors);
path_parts.pop();
}
}
}
if let Some(all_of_array) = schema.get("allOf").and_then(|v| v.as_array()) {
for sub_schema in all_of_array {
walk_and_validate_refs(instance, sub_schema, cache, path_parts, type_validated, None, errors);
}
}
if let Some(any_of_array) = schema.get("anyOf").and_then(|v| v.as_array()) {
for sub_schema in any_of_array {
walk_and_validate_refs(instance, sub_schema, cache, path_parts, type_validated, None, errors);
}
}
if let Some(one_of_array) = schema.get("oneOf").and_then(|v| v.as_array()) {
for sub_schema in one_of_array {
walk_and_validate_refs(instance, sub_schema, cache, path_parts, type_validated, None, errors);
}
}
if let Some(if_schema) = schema.get("if") {
walk_and_validate_refs(instance, if_schema, cache, path_parts, type_validated, None, errors);
}
if let Some(then_schema) = schema.get("then") {
walk_and_validate_refs(instance, then_schema, cache, path_parts, type_validated, None, errors);
}
if let Some(else_schema) = schema.get("else") {
walk_and_validate_refs(instance, else_schema, cache, path_parts, type_validated, None, errors);
}
if let Some(not_schema) = schema.get("not") {
walk_and_validate_refs(instance, not_schema, cache, path_parts, type_validated, None, errors);
}
}
#[pg_extern(strict, parallel_safe)]
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
// Lookup uses the original schema_id
match cache.id_to_index.get(schema_id) {
None => JsonB(json!({
"errors": [{
"code": "SCHEMA_NOT_FOUND",
"message": format!("Schema '{}' not found in cache", schema_id),
"details": {
"schema": schema_id,
"cause": "Schema must be cached before validation"
let cache = SCHEMA_CACHE.read().unwrap();
match cache.map.get(schema_id) {
None => JsonB(json!({
"errors": [{
"code": "SCHEMA_NOT_FOUND",
"message": format!("Schema '{}' not found in cache", schema_id),
"details": {
"schema": schema_id,
"cause": "Schema was not found in bulk cache - ensure cache_json_schemas was called"
}
}]
})),
Some(schema) => {
let instance_value: Value = instance.0;
match cache.schemas.validate(&instance_value, schema.index) {
Ok(_) => {
let mut custom_errors = Vec::new();
if schema.t == SchemaType::Type || schema.t == SchemaType::PublicPunc || schema.t == SchemaType::PrivatePunc {
let mut path_parts = vec![];
let top_level_id = if schema.t == SchemaType::Type { Some(schema_id) } else { None };
walk_and_validate_refs(&instance_value, &schema.value, &cache, &mut path_parts, false, top_level_id, &mut custom_errors);
}
if custom_errors.is_empty() {
JsonB(json!({ "response": "success" }))
} else {
JsonB(json!({ "errors": custom_errors }))
}
}
Err(validation_error) => {
let mut error_list = Vec::new();
collect_errors(&validation_error, &mut error_list);
let errors = format_errors(error_list, &instance_value, schema_id);
let filtered_errors = filter_false_schema_errors(errors);
if filtered_errors.is_empty() {
JsonB(json!({ "response": "success" }))
} else {
JsonB(json!({ "errors": filtered_errors }))
}
}
}
}
}]
})),
Some(sch_index) => {
let instance_value: Value = instance.0;
match cache.schemas.validate(&instance_value, *sch_index) {
Ok(_) => JsonB(json!({ "response": "success" })),
Err(validation_error) => {
let mut error_list = Vec::new();
collect_errors(&validation_error, &mut error_list);
let errors = format_errors(error_list, &instance_value, schema_id);
// Filter out FALSE_SCHEMA errors if there are other validation errors
let filtered_errors = filter_false_schema_errors(errors);
JsonB(json!({ "errors": filtered_errors }))
}
}
}
}
}
// Recursively collects validation errors
@ -486,6 +705,13 @@ fn handle_additional_items_error(base_path: &str, got: usize) -> Vec<Error> {
}
fn handle_format_error(base_path: &str, want: &str, got: &Cow<Value>, err: &Box<dyn std::error::Error>) -> Vec<Error> {
// If the value is an empty string, skip format validation.
if let Value::String(s) = got.as_ref() {
if s.is_empty() {
return vec![];
}
}
vec![Error {
path: base_path.to_string(),
code: "FORMAT_INVALID".to_string(),
@ -751,15 +977,15 @@ fn extract_value_at_path(instance: &Value, path: &str) -> Value {
#[pg_extern(strict, parallel_safe)]
fn json_schema_cached(schema_id: &str) -> bool {
let cache = SCHEMA_CACHE.read().unwrap();
cache.id_to_index.contains_key(schema_id)
cache.map.contains_key(schema_id)
}
#[pg_extern(strict)]
fn clear_json_schemas() -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
*cache = BoonCache {
*cache = Cache {
schemas: Schemas::new(),
id_to_index: HashMap::new(),
map: HashMap::new(),
};
JsonB(json!({ "response": "success" }))
}
@ -767,7 +993,7 @@ fn clear_json_schemas() -> JsonB {
#[pg_extern(strict, parallel_safe)]
fn show_json_schemas() -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
let ids: Vec<String> = cache.id_to_index.keys().cloned().collect();
let ids: Vec<String> = cache.map.keys().cloned().collect();
JsonB(json!({ "response": ids }))
}
@ -786,6 +1012,16 @@ pub mod pg_test {
}
}
#[cfg(any(test, feature = "pg_test"))]
mod helpers {
include!("helpers.rs");
}
#[cfg(any(test, feature = "pg_test"))]
mod schemas {
include!("schemas.rs");
}
#[cfg(any(test, feature = "pg_test"))]
#[pg_schema]
mod tests {

830
src/schemas.rs Normal file
View File

@ -0,0 +1,830 @@
use crate::*;
use serde_json::{json, Value};
use pgrx::JsonB;
// Helper to convert Value to JsonB
fn jsonb(val: Value) -> JsonB {
JsonB(val)
}
pub fn simple_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "simple",
"public": false,
"schemas": [{
"$id": "simple.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn invalid_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "invalid_punc",
"public": false,
"schemas": [{
"$id": "invalid_punc.request",
"type": ["invalid_type_value"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn errors_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "detailed_errors_test",
"public": false,
"schemas": [{
"$id": "detailed_errors_test.request",
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string", "maxLength": 10 }
},
"required": ["street", "city"]
}
},
"required": ["address"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn oneof_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "oneof_test",
"public": false,
"schemas": [{
"$id": "oneof_test.request",
"oneOf": [
{
"type": "object",
"properties": {
"string_prop": { "type": "string", "maxLength": 5 }
},
"required": ["string_prop"]
},
{
"type": "object",
"properties": {
"number_prop": { "type": "number", "minimum": 10 }
},
"required": ["number_prop"]
}
]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn root_types_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([
{
"name": "object_test",
"public": false,
"schemas": [{
"$id": "object_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
}]
},
{
"name": "array_test",
"public": false,
"schemas": [{
"$id": "array_test.request",
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string", "format": "uuid" }
}
}
}]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn strict_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([
{
"name": "basic_strict_test",
"public": true,
"schemas": [{
"$id": "basic_strict_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
}
}]
},
{
"name": "non_strict_test",
"public": false,
"schemas": [{
"$id": "non_strict_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
}
}]
},
{
"name": "nested_strict_test",
"public": true,
"schemas": [{
"$id": "nested_strict_test.request",
"type": "object",
"properties": {
"user": {
"type": "object",
"properties": {
"name": { "type": "string" }
}
},
"items": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string" }
}
}
}
}
}]
},
{
"name": "already_unevaluated_test",
"public": true,
"schemas": [{
"$id": "already_unevaluated_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
},
"unevaluatedProperties": true
}]
},
{
"name": "already_additional_test",
"public": true,
"schemas": [{
"$id": "already_additional_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
},
"additionalProperties": false
}]
},
{
"name": "conditional_strict_test",
"public": true,
"schemas": [{
"$id": "conditional_strict_test.request",
"type": "object",
"properties": {
"creating": { "type": "boolean" }
},
"if": {
"properties": {
"creating": { "const": true }
}
},
"then": {
"properties": {
"name": { "type": "string" }
},
"required": ["name"]
}
}]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn required_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "basic_validation_test",
"public": false,
"schemas": [{
"$id": "basic_validation_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn dependencies_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "dependency_split_test",
"public": false,
"schemas": [{
"$id": "dependency_split_test.request",
"type": "object",
"properties": {
"creating": { "type": "boolean" },
"name": { "type": "string" },
"kind": { "type": "string" },
"description": { "type": "string" }
},
"dependencies": {
"creating": ["name", "kind"]
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn nested_req_deps_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "nested_dep_test",
"public": false,
"schemas": [{
"$id": "nested_dep_test.request",
"type": "object",
"properties": {
"items": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string" },
"creating": { "type": "boolean" },
"name": { "type": "string" },
"kind": { "type": "string" }
},
"required": ["id"],
"dependencies": {
"creating": ["name", "kind"]
}
}
}
},
"required": ["items"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn additional_properties_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([
{
"name": "additional_props_test",
"public": false,
"schemas": [{
"$id": "additional_props_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "number" }
},
"additionalProperties": false
}]
},
{
"name": "nested_additional_props_test",
"public": false,
"schemas": [{
"$id": "nested_additional_props_test.request",
"type": "object",
"properties": {
"user": {
"type": "object",
"properties": {
"name": { "type": "string" }
},
"additionalProperties": false
}
}
}]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn unevaluated_properties_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([
{
"name": "simple_unevaluated_test",
"public": false,
"schemas": [{
"$id": "simple_unevaluated_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "number" }
},
"patternProperties": {
"^attr_": { "type": "string" }
},
"unevaluatedProperties": false
}]
},
{
"name": "conditional_unevaluated_test",
"public": false,
"schemas": [{
"$id": "conditional_unevaluated_test.request",
"type": "object",
"allOf": [
{
"properties": {
"firstName": { "type": "string" }
}
},
{
"properties": {
"lastName": { "type": "string" }
}
}
],
"properties": {
"age": { "type": "number" }
},
"unevaluatedProperties": false
}]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn format_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "format_test",
"public": false,
"schemas": [{
"$id": "format_test.request",
"type": "object",
"properties": {
"uuid": { "type": "string", "format": "uuid" },
"date_time": { "type": "string", "format": "date-time" },
"email": { "type": "string", "format": "email" }
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn property_merging_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "entity",
"schemas": [{
"$id": "entity",
"type": "object",
"properties": {
"id": { "type": "string" },
"name": { "type": "string" }
},
"required": ["id"]
}]
},
{
"name": "user",
"schemas": [{
"$id": "user",
"$ref": "entity",
"properties": {
"password": { "type": "string", "minLength": 8 }
},
"required": ["password"]
}]
},
{
"name": "person",
"schemas": [{
"$id": "person",
"$ref": "user",
"properties": {
"first_name": { "type": "string", "minLength": 1 },
"last_name": { "type": "string", "minLength": 1 }
},
"required": ["first_name", "last_name"]
}]
}
]);
let puncs = json!([]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn required_merging_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "entity",
"schemas": [{
"$id": "entity",
"type": "object",
"properties": {
"id": { "type": "string", "format": "uuid" },
"type": { "type": "string" },
"created_by": { "type": "string", "format": "uuid" }
},
"required": ["id", "type", "created_by"]
}]
},
{
"name": "user",
"schemas": [{
"$id": "user",
"$ref": "entity",
"properties": {
"password": { "type": "string", "minLength": 8 }
},
"if": {
"properties": { "type": { "const": "user" } }
},
"then": {
"required": ["password"]
}
}]
},
{
"name": "person",
"schemas": [{
"$id": "person",
"$ref": "user",
"properties": {
"first_name": { "type": "string", "minLength": 1 },
"last_name": { "type": "string", "minLength": 1 }
},
"if": {
"properties": { "type": { "const": "person" } }
},
"then": {
"required": ["first_name", "last_name"]
}
}]
}
]);
let puncs = json!([]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn dependencies_merging_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "entity",
"schemas": [{
"$id": "entity",
"type": "object",
"properties": {
"id": { "type": "string", "format": "uuid" },
"type": { "type": "string" },
"created_by": { "type": "string", "format": "uuid" },
"creating": { "type": "boolean" },
"name": { "type": "string" }
},
"required": ["id", "type", "created_by"],
"dependencies": {
"creating": ["name"]
}
}]
},
{
"name": "user",
"schemas": [{
"$id": "user",
"$ref": "entity",
"properties": {
"password": { "type": "string", "minLength": 8 }
},
"dependencies": {
"creating": ["name"]
}
}]
},
{
"name": "person",
"schemas": [{
"$id": "person",
"$ref": "user",
"properties": {
"first_name": { "type": "string", "minLength": 1 },
"last_name": { "type": "string", "minLength": 1 }
},
"dependencies": {
"creating": ["first_name", "last_name"]
}
}]
}
]);
let puncs = json!([]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn punc_with_refs_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "entity",
"schemas": [{
"$id": "entity",
"type": "object",
"properties": {
"id": { "type": "string" },
"name": { "type": "string" },
"type": { "type": "string" }
},
"required": ["id", "type"]
}]
},
{
"name": "person",
"schemas": [{
"$id": "person",
"$ref": "entity",
"properties": {
"first_name": { "type": "string", "minLength": 1 },
"last_name": { "type": "string", "minLength": 1 },
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string" }
},
"required": ["street", "city"]
}
}
}]
}
]);
let puncs = json!([
{
"name": "public_ref_test",
"public": true,
"schemas": [{
"$id": "public_ref_test.request",
"$ref": "person"
}]
},
{
"name": "private_ref_test",
"public": false,
"schemas": [{
"$id": "private_ref_test.request",
"$ref": "person"
}]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn enum_schemas() -> JsonB {
let enums = json!([
{
"name": "task_priority",
"values": ["low", "medium", "high", "urgent"],
"schemas": [{
"$id": "task_priority",
"type": "string",
"enum": ["low", "medium", "high", "urgent"]
}]
}
]);
let types = json!([]);
let puncs = json!([{
"name": "enum_ref_test",
"public": false,
"schemas": [{
"$id": "enum_ref_test.request",
"type": "object",
"properties": {
"priority": { "$ref": "task_priority" }
},
"required": ["priority"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn punc_local_refs_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "global_thing",
"schemas": [{
"$id": "global_thing",
"type": "object",
"properties": {
"id": { "type": "string", "format": "uuid" },
"type": { "type": "string" }
},
"required": ["id", "type"]
}]
}
]);
let puncs = json!([
{
"name": "punc_with_local_ref_test",
"public": false,
"schemas": [
{
"$id": "local_address",
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string" }
},
"required": ["street", "city"]
},
{
"$id": "punc_with_local_ref_test.request",
"$ref": "local_address"
}
]
},
{
"name": "punc_with_local_ref_to_global_test",
"public": false,
"schemas": [
{
"$id": "local_user_with_thing",
"type": "object",
"properties": {
"user_name": { "type": "string" },
"thing": { "$ref": "global_thing" }
},
"required": ["user_name", "thing"]
},
{
"$id": "punc_with_local_ref_to_global_test.request",
"$ref": "local_user_with_thing"
}
]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn title_override_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "base_with_title",
"schemas": [{
"$id": "base_with_title",
"type": "object",
"title": "Base Title",
"properties": {
"name": { "type": "string" }
},
"required": ["name"]
}]
},
{
"name": "override_with_title",
"schemas": [{
"$id": "override_with_title",
"$ref": "base_with_title",
"title": "Override Title"
}]
}
]);
let puncs = json!([]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn type_matching_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "entity",
"schemas": [{
"$id": "entity",
"type": "object",
"properties": { "type": { "type": "string" }, "name": { "type": "string" } },
"required": ["type", "name"]
}]
},
{
"name": "job",
"schemas": [{
"$id": "job",
"$ref": "entity",
"properties": { "job_id": { "type": "string" } },
"required": ["job_id"]
}]
},
{
"name": "super_job",
"schemas": [
{
"$id": "super_job",
"$ref": "job",
"properties": { "manager_id": { "type": "string" } },
"required": ["manager_id"]
},
{
"$id": "super_job.short",
"$ref": "super_job",
"properties": { "name": { "maxLength": 10 } }
}
]
}
]);
let puncs = json!([{
"name": "type_test_punc",
"public": false,
"schemas": [{
"$id": "type_test_punc.request",
"type": "object",
"properties": {
"root_job": { "$ref": "job" },
"nested_or_super_job": {
"oneOf": [
{ "$ref": "super_job" },
{
"type": "object",
"properties": {
"my_job": { "$ref": "job" }
},
"required": ["my_job"]
}
]
}
},
"required": ["root_job", "nested_or_super_job"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}

File diff suppressed because it is too large Load Diff

View File

@ -1 +1 @@
1.0.29
1.0.36