diff --git a/src/lib.rs b/src/lib.rs index c13a8b6..7c9e4ef 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -31,65 +31,225 @@ lazy_static! { } #[pg_extern(strict)] -fn cache_json_schema(schema_id: &str, schema: JsonB, strict: bool) -> JsonB { +fn cache_json_schemas(types: JsonB, puncs: JsonB) -> JsonB { let mut cache = SCHEMA_CACHE.write().unwrap(); - let mut schema_value: Value = schema.0; - let schema_path = format!("urn:{}", schema_id); + let types_value: Value = types.0; + let puncs_value: Value = puncs.0; - // Apply strict validation to all objects in the schema if requested - if strict { - apply_strict_validation(&mut schema_value); - } + // Clear existing cache + *cache = BoonCache { + schemas: Schemas::new(), + id_to_index: HashMap::new(), + }; // Create the boon compiler and enable format assertions let mut compiler = Compiler::new(); compiler.enable_format_assertions(); - // Use schema_path when adding the resource - if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) { - return JsonB(json!({ - "errors": [{ - "code": "SCHEMA_RESOURCE_ADD_FAILED", - "message": format!("Failed to add schema resource '{}'", schema_id), - "details": { - "schema": schema_id, - "cause": format!("{}", e) + let mut errors = Vec::new(); + + // Track all schema IDs for compilation + let mut all_schema_ids = Vec::new(); + + // Phase 1: Add all type schemas as resources (these are referenced by puncs) + // Types are never strict - they're reusable building blocks + if let Some(types_array) = types_value.as_array() { + for type_row in types_array { + if let Some(type_obj) = type_row.as_object() { + if let (Some(type_name), Some(schemas_raw)) = ( + type_obj.get("name").and_then(|v| v.as_str()), + type_obj.get("schemas") + ) { + // Parse the schemas JSONB field + if let Some(schemas_array) = schemas_raw.as_array() { + for schema_def in schemas_array { + if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) { + if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), false, &mut errors) { + errors.push(json!({ + "code": "TYPE_SCHEMA_RESOURCE_FAILED", + "message": format!("Failed to add schema resource '{}' for type '{}'", schema_id, type_name), + "details": { + "type_name": type_name, + "schema_id": schema_id, + "cause": format!("{}", e) + } + })); + } else { + all_schema_ids.push(schema_id.to_string()); + } + } + } + } } - }] - })); + } + } } - // Use schema_path when compiling - match compiler.compile(&schema_path, &mut cache.schemas) { - Ok(sch_index) => { - // Store the index using the original schema_id as the key - cache.id_to_index.insert(schema_id.to_string(), sch_index); - JsonB(json!({ "response": "success" })) - } - Err(e) => { - let errors = match &e { - CompileError::ValidationError { url: _url, src } => { - // Collect leaf errors from the meta-schema validation failure - let mut error_list = Vec::new(); - collect_errors(src, &mut error_list); - // Filter and format errors properly - no instance for schema compilation - format_errors(error_list, &schema_value, schema_id) - } - _ => { - // Other compilation errors - vec![json!({ - "code": "SCHEMA_COMPILATION_FAILED", - "message": format!("Schema '{}' compilation failed", schema_id), - "details": { - "schema": schema_id, - "cause": format!("{:?}", e) + // Phase 2: Add all punc schemas as resources (these may reference type schemas) + // Each punc gets strict validation based on its public field + if let Some(puncs_array) = puncs_value.as_array() { + for punc_row in puncs_array { + if let Some(punc_obj) = punc_row.as_object() { + if let Some(punc_name) = punc_obj.get("name").and_then(|v| v.as_str()) { + // Get the strict setting for this specific punc (public = strict) + let punc_strict = punc_obj.get("public") + .and_then(|v| v.as_bool()) + .unwrap_or(false); + + // Add punc local schemas as resources (from schemas field) - use $id directly (universal) + if let Some(schemas_raw) = punc_obj.get("schemas") { + if let Some(schemas_array) = schemas_raw.as_array() { + for schema_def in schemas_array { + if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) { + if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), punc_strict, &mut errors) { + errors.push(json!({ + "code": "PUNC_LOCAL_SCHEMA_RESOURCE_FAILED", + "message": format!("Failed to add local schema resource '{}' for punc '{}'", schema_id, punc_name), + "details": { + "punc_name": punc_name, + "schema_id": schema_id, + "cause": format!("{}", e) + } + })); + } else { + all_schema_ids.push(schema_id.to_string()); + } + } + } } - })] + } + + // Add request schema as resource if present - use {punc_name}.request + if let Some(request_schema) = punc_obj.get("request") { + if !request_schema.is_null() { + let request_schema_id = format!("{}.request", punc_name); + if let Err(e) = add_schema_resource(&mut compiler, &request_schema_id, request_schema.clone(), punc_strict, &mut errors) { + errors.push(json!({ + "code": "PUNC_REQUEST_SCHEMA_RESOURCE_FAILED", + "message": format!("Failed to add request schema resource for punc '{}'", punc_name), + "details": { + "punc_name": punc_name, + "schema_id": request_schema_id, + "cause": format!("{}", e) + } + })); + } else { + all_schema_ids.push(request_schema_id); + } + } + } + + // Add response schema as resource if present - use {punc_name}.response + if let Some(response_schema) = punc_obj.get("response") { + if !response_schema.is_null() { + let response_schema_id = format!("{}.response", punc_name); + if let Err(e) = add_schema_resource(&mut compiler, &response_schema_id, response_schema.clone(), punc_strict, &mut errors) { + errors.push(json!({ + "code": "PUNC_RESPONSE_SCHEMA_RESOURCE_FAILED", + "message": format!("Failed to add response schema resource for punc '{}'", punc_name), + "details": { + "punc_name": punc_name, + "schema_id": response_schema_id, + "cause": format!("{}", e) + } + })); + } else { + all_schema_ids.push(response_schema_id); + } + } + } } - }; - JsonB(json!({ "errors": errors })) + } } } + + // Phase 3: Compile all schemas now that all resources are added + if !errors.is_empty() { + // If we had errors adding resources, don't attempt compilation + return JsonB(json!({ "errors": errors })); + } + + if let Err(_) = compile_all_schemas(&mut compiler, &mut cache, &all_schema_ids, &mut errors) { + // compile_all_schemas already adds errors to the errors vector + } + + if errors.is_empty() { + JsonB(json!({ "response": "success" })) + } else { + JsonB(json!({ "errors": errors })) + } +} + +// Helper function to add a schema resource (without compiling) +fn add_schema_resource( + compiler: &mut Compiler, + schema_id: &str, + mut schema_value: Value, + strict: bool, + errors: &mut Vec +) -> Result<(), String> { + // Apply strict validation to all objects in the schema if requested + if strict { + apply_strict_validation(&mut schema_value); + } + + // Use schema_id directly - simple IDs like "entity", "user", "punc.request" + if let Err(e) = compiler.add_resource(schema_id, schema_value.clone()) { + errors.push(json!({ + "code": "SCHEMA_RESOURCE_FAILED", + "message": format!("Failed to add schema resource '{}'", schema_id), + "details": { + "schema": schema_id, + "cause": format!("{}", e) + } + })); + return Err(format!("Failed to add schema resource: {}", e)); + } + + Ok(()) +} + +// Helper function to compile all added resources +fn compile_all_schemas( + compiler: &mut Compiler, + cache: &mut BoonCache, + schema_ids: &[String], + errors: &mut Vec +) -> Result<(), String> { + for schema_id in schema_ids { + match compiler.compile(schema_id, &mut cache.schemas) { + Ok(sch_index) => { + // Store the index using the original schema_id as the key + cache.id_to_index.insert(schema_id.to_string(), sch_index); + } + Err(e) => { + match &e { + CompileError::ValidationError { url: _url, src } => { + // Collect leaf errors from the meta-schema validation failure + let mut error_list = Vec::new(); + collect_errors(src, &mut error_list); + // Get schema value for error formatting - we'll need to reconstruct or store it + let schema_value = json!({}); // Placeholder - we don't have the original value here + let formatted_errors = format_errors(error_list, &schema_value, schema_id); + errors.extend(formatted_errors); + } + _ => { + // Other compilation errors + errors.push(json!({ + "code": "SCHEMA_COMPILATION_FAILED", + "message": format!("Schema '{}' compilation failed", schema_id), + "details": { + "schema": schema_id, + "cause": format!("{:?}", e) + } + })); + } + }; + return Err(format!("Schema compilation failed: {:?}", e)); + } + } + } + Ok(()) } // Helper function to apply strict validation to a schema @@ -135,7 +295,7 @@ fn apply_strict_validation_recursive(schema: &mut Value, inside_conditional: boo #[pg_extern(strict, parallel_safe)] fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB { let cache = SCHEMA_CACHE.read().unwrap(); - // Lookup uses the original schema_id + // Lookup uses the original schema_id - schemas should always be available after bulk caching match cache.id_to_index.get(schema_id) { None => JsonB(json!({ "errors": [{ @@ -143,7 +303,7 @@ fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB { "message": format!("Schema '{}' not found in cache", schema_id), "details": { "schema": schema_id, - "cause": "Schema must be cached before validation" + "cause": "Schema was not found in bulk cache - ensure cache_json_schemas was called" } }] })), diff --git a/src/tests.rs b/src/tests.rs index bccdb7f..86a4767 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -2,124 +2,124 @@ use crate::*; use serde_json::{json, Value}; use pgrx::{JsonB, pg_test}; -// Helper macro for asserting success with Drop-style response -macro_rules! assert_success_with_json { - ($result_jsonb:expr, $fmt:literal $(, $($args:tt)*)?) => { - let has_response = $result_jsonb.0.get("response").is_some(); - let has_errors = $result_jsonb.0.get("errors").is_some(); - if !has_response || has_errors { - let base_msg = format!($fmt $(, $($args)*)?); - let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0) - .unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0)); - let panic_msg = format!("Assertion Failed (expected success with 'response' field): {}\nResult JSON:\n{}", base_msg, pretty_json); - panic!("{}", panic_msg); - } - }; - // Simpler version without message - ($result_jsonb:expr) => { - let has_response = $result_jsonb.0.get("response").is_some(); - let has_errors = $result_jsonb.0.get("errors").is_some(); - if !has_response || has_errors { - let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0) - .unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0)); - let panic_msg = format!("Assertion Failed (expected success with 'response' field)\nResult JSON:\n{}", pretty_json); - panic!("{}", panic_msg); - } - }; +// Simple test helpers for cleaner test code +fn assert_success(result: &JsonB) { + let json = &result.0; + if !json.get("response").is_some() || json.get("errors").is_some() { + let pretty = serde_json::to_string_pretty(json).unwrap_or_else(|_| format!("{:?}", json)); + panic!("Expected success but got:\n{}", pretty); + } } -// Helper macro for asserting failed JSON results with Drop-style errors -macro_rules! assert_failure_with_json { - // --- Arms with error count and message substring check --- - // With custom message: - ($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr, $fmt:literal $(, $($args:tt)*)?) => { - let json_result = &$result.0; - let has_response = json_result.get("response").is_some(); - let errors_opt = json_result.get("errors"); - let base_msg = format!($fmt $(, $($args)*)?); +fn assert_failure(result: &JsonB) { + let json = &result.0; + if json.get("response").is_some() || !json.get("errors").is_some() { + let pretty = serde_json::to_string_pretty(json).unwrap_or_else(|_| format!("{:?}", json)); + panic!("Expected failure but got:\n{}", pretty); + } +} - if has_response || errors_opt.is_none() { - let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result)); - panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json); - } - - let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array"); - - if errors_array.len() != $expected_error_count { - let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result)); - panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json); - } - - if $expected_error_count > 0 { - let first_error_message = errors_array[0].get("message").and_then(Value::as_str); - match first_error_message { - Some(msg) => { - if !msg.contains($expected_first_message_contains) { - let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result)); - panic!("Assertion Failed (first error message mismatch): Expected contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json); - } - } - None => { - let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result)); - panic!("Assertion Failed (first error in array has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json); - } - } - } - }; - // Without custom message (calls the one above with ""): - ($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr) => { - assert_failure_with_json!($result, $expected_error_count, $expected_first_message_contains, ""); - }; +fn assert_error_count(result: &JsonB, expected_count: usize) { + assert_failure(result); + let errors = get_errors(result); + if errors.len() != expected_count { + let pretty = serde_json::to_string_pretty(&result.0).unwrap_or_else(|_| format!("{:?}", result.0)); + panic!("Expected {} errors, got {}:\n{}", expected_count, errors.len(), pretty); + } +} - // --- Arms with error count check only --- - // With custom message: - ($result:expr, $expected_error_count:expr, $fmt:literal $(, $($args:tt)*)?) => { - let json_result = &$result.0; - let has_response = json_result.get("response").is_some(); - let errors_opt = json_result.get("errors"); - let base_msg = format!($fmt $(, $($args)*)?); +fn get_errors(result: &JsonB) -> &Vec { + result.0["errors"].as_array().expect("errors should be an array") +} - if has_response || errors_opt.is_none() { - let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result)); - panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json); - } - - let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array"); - - if errors_array.len() != $expected_error_count { - let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result)); - panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json); - } - }; - // Without custom message (calls the one above with ""): - ($result:expr, $expected_error_count:expr) => { - assert_failure_with_json!($result, $expected_error_count, ""); - }; +fn has_error_with_code(result: &JsonB, code: &str) -> bool { + get_errors(result).iter().any(|e| e["code"] == code) +} - // --- Arms checking failure only (expects at least one error) --- - // With custom message: - ($result:expr, $fmt:literal $(, $($args:tt)*)?) => { - let json_result = &$result.0; - let has_response = json_result.get("response").is_some(); - let errors_opt = json_result.get("errors"); - let base_msg = format!($fmt $(, $($args)*)?); - if has_response || errors_opt.is_none() { - let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result)); - panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json); - } - - let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array"); - - if errors_array.is_empty() { - let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result)); - panic!("Assertion Failed (expected errors, but 'errors' array is empty): {}\nResult JSON:\n{}", base_msg, pretty_json); - } - }; - // Without custom message (calls the one above with ""): - ($result:expr) => { - assert_failure_with_json!($result, ""); - }; +fn has_error_with_code_and_path(result: &JsonB, code: &str, path: &str) -> bool { + get_errors(result).iter().any(|e| e["code"] == code && e["details"]["path"] == path) +} + +fn assert_has_error(result: &JsonB, code: &str, path: &str) { + if !has_error_with_code_and_path(result, code, path) { + let pretty = serde_json::to_string_pretty(&result.0).unwrap_or_else(|_| format!("{:?}", result.0)); + panic!("Expected error with code='{}' and path='{}' but not found:\n{}", code, path, pretty); + } +} + + +fn find_error_with_code<'a>(result: &'a JsonB, code: &str) -> &'a Value { + get_errors(result).iter().find(|e| e["code"] == code) + .unwrap_or_else(|| panic!("No error found with code '{}'", code)) +} + + +fn find_error_with_code_and_path<'a>(result: &'a JsonB, code: &str, path: &str) -> &'a Value { + get_errors(result).iter().find(|e| e["code"] == code && e["details"]["path"] == path) + .unwrap_or_else(|| panic!("No error found with code '{}' and path '{}'", code, path)) +} + +fn assert_error_detail(error: &Value, detail_key: &str, expected_value: &str) { + let actual = error["details"][detail_key].as_str() + .unwrap_or_else(|| panic!("Error detail '{}' is not a string", detail_key)); + assert_eq!(actual, expected_value, "Error detail '{}' mismatch", detail_key); +} + + +// Response helpers to avoid direct JSON access +fn get_response_schemas(result: &JsonB) -> &Vec { + result.0["response"].as_array().expect("response should be schema array") +} + +fn assert_response_schema_count(result: &JsonB, expected_count: usize) { + assert_success(result); + let schemas = get_response_schemas(result); + assert_eq!(schemas.len(), expected_count, "Expected {} schemas in response, got {}", expected_count, schemas.len()); +} + +fn assert_contains_schema(result: &JsonB, schema_name: &str) { + let schemas = get_response_schemas(result); + assert!(has_schema_name(schemas, schema_name), "Should contain schema '{}'", schema_name); +} + + +fn has_schema_name(schemas: &[Value], schema_name: &str) -> bool { + schemas.iter().any(|s| s.as_str() == Some(schema_name)) +} + +fn assert_response_empty(result: &JsonB) { + assert_response_schema_count(result, 0); +} + +// Additional convenience helpers for common patterns + +fn assert_error_message_contains(error: &Value, substring: &str) { + let message = error["message"].as_str().expect("error should have message"); + assert!(message.contains(substring), "Expected message to contain '{}', got '{}'", substring, message); +} + +fn assert_error_cause_json(error: &Value, expected_cause: &Value) { + let cause = &error["details"]["cause"]; + assert!(cause.is_object(), "cause should be JSON object"); + assert_eq!(cause, expected_cause, "cause mismatch"); +} + +fn assert_error_context(error: &Value, expected_context: &Value) { + assert_eq!(&error["details"]["context"], expected_context, "context mismatch"); +} + +// Bulk validation helpers to avoid repetitive patterns + + +// Debug helper for development (can be removed later) +#[allow(dead_code)] +fn debug_errors(result: &JsonB) { + use pgrx::log; + let errors = get_errors(result); + for (i, error) in errors.iter().enumerate() { + log!("Error {}: code={}, path={}", i, error["code"], error["details"]["path"]); + } } @@ -127,63 +127,639 @@ fn jsonb(val: Value) -> JsonB { JsonB(val) } +// Comprehensive setup that mirrors the real punc system +fn setup_comprehensive_schemas() -> JsonB { + // Create type inheritance chain: entity -> organization -> user -> person + let types = json!([ + { + "name": "entity", + "historical": true, + "sensitive": false, + "ownable": true, + "schemas": [{ + "$id": "entity", + "type": "object", + "properties": { + "id": { "type": "string", "format": "uuid" }, + "type": { "type": "string" }, + "name": { "type": "string" }, + "created_by": { "type": "string", "format": "uuid" }, + "tags": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { "type": "string" }, + "value": { "type": "string" } + }, + "required": ["key", "value"] + } + } + }, + "required": ["id", "type", "created_by"], + "if": { + "properties": { "type": { "const": "entity" } } + }, + "then": { + "properties": { + "name": { "minLength": 1 } + } + } + }] + }, + { + "name": "organization", + "historical": true, + "sensitive": false, + "ownable": true, + "schemas": [{ + "$id": "organization", + "$ref": "entity", + "title": "Organization", + "properties": { + "website": { "type": "string", "format": "uri" }, + "tax_id": { "type": "string" }, + "addresses": { + "type": "array", + "items": { + "$ref": "address_item" + } + } + }, + "if": { + "properties": { "type": { "const": "organization" } } + }, + "then": { + "required": ["tax_id"] + }, + "else": { + "properties": { + "tax_id": false + } + } + }, { + "$id": "address_item", + "type": "object", + "properties": { + "street": { "type": "string" }, + "city": { "type": "string" }, + "country": { "type": "string" } + }, + "required": ["street", "city"] + }] + }, + { + "name": "user", + "historical": true, + "sensitive": false, + "ownable": true, + "schemas": [{ + "$id": "user", + "$ref": "organization", + "title": "User", + "dependencies": { + "creating": ["name"] + }, + "properties": { + "password": { "type": "string", "minLength": 8 }, + "roles": { + "type": "array", + "items": { + "type": "string", + "enum": ["admin", "user", "guest"] + } + } + }, + "if": { + "properties": { "type": { "const": "user" } } + }, + "then": { + "required": ["password"] + } + }] + }, + { + "name": "person", + "historical": true, + "sensitive": false, + "ownable": true, + "schemas": [{ + "$id": "person", + "$ref": "user", + "title": "Person", + "dependencies": { + "creating": ["first_name", "last_name"] + }, + "properties": { + "first_name": { "type": "string", "minLength": 1, "title": "First Name" }, + "last_name": { "type": "string", "minLength": 1, "title": "Last Name" }, + "phone_numbers": { + "type": "array", + "items": { + "type": "string", + "pattern": "^\\+?[0-9\\s\\-\\(\\)]+$" + } + } + }, + "if": { + "properties": { "type": { "const": "person" } } + }, + "then": { + "required": ["first_name", "last_name"] + } + }] + } + ]); + + // Create comprehensive puncs data covering all test scenarios + let puncs = json!([ + { + "name": "basic_validation_test", + "public": false, + "schemas": [], + "request": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "age": { "type": "integer", "minimum": 0 } + }, + "required": ["name", "age"] + }, + "response": null + }, + { + "name": "oneof_test", + "public": false, + "schemas": [], + "request": { + "oneOf": [ + { + "type": "object", + "properties": { + "string_prop": { "type": "string", "maxLength": 5 } + }, + "required": ["string_prop"] + }, + { + "type": "object", + "properties": { + "number_prop": { "type": "number", "minimum": 10 } + }, + "required": ["number_prop"] + } + ] + }, + "response": null + }, + { + "name": "strict_test", + "public": true, + "schemas": [], + "request": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "profile": { + "type": "object", + "properties": { + "age": { "type": "number" }, + "preferences": { + "type": "object", + "properties": { + "theme": { "type": "string" } + } + } + } + }, + "tags": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { "type": "string" }, + "value": { "type": "string" } + } + } + } + } + }, + "response": null + }, + { + "name": "format_test", + "public": false, + "schemas": [], + "request": { + "type": "object", + "properties": { + "uuid": { "type": "string", "format": "uuid" }, + "date_time": { "type": "string", "format": "date-time" }, + "email": { "type": "string", "format": "email" } + } + }, + "response": null + }, + { + "name": "detailed_errors_test", + "public": false, + "schemas": [], + "request": { + "type": "object", + "properties": { + "address": { + "type": "object", + "properties": { + "street": { "type": "string" }, + "city": { "type": "string", "maxLength": 10 } + }, + "required": ["street", "city"] + } + }, + "required": ["address"] + }, + "response": null + }, + { + "name": "additional_props_test", + "public": false, + "schemas": [], + "request": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "age": { "type": "number" } + }, + "additionalProperties": false + }, + "response": null + }, + { + "name": "array_test", + "public": false, + "schemas": [], + "request": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { "type": "string", "format": "uuid" } + } + } + }, + "response": null + }, + { + "name": "dependency_split_test", + "public": false, + "schemas": [], + "request": { + "type": "object", + "properties": { + "creating": { "type": "boolean" }, + "name": { "type": "string" }, + "kind": { "type": "string" }, + "description": { "type": "string" } + }, + "dependencies": { + "creating": ["name", "kind"] + } + }, + "response": null + }, + { + "name": "nested_dep_test", + "public": false, + "schemas": [], + "request": { + "type": "object", + "properties": { + "items": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { "type": "string" }, + "creating": { "type": "boolean" }, + "name": { "type": "string" }, + "kind": { "type": "string" } + }, + "required": ["id"], + "dependencies": { + "creating": ["name", "kind"] + } + } + } + }, + "required": ["items"] + }, + "response": null + }, + { + "name": "nested_additional_props_test", + "public": false, + "schemas": [], + "request": { + "type": "object", + "properties": { + "user": { + "type": "object", + "properties": { + "name": { "type": "string" } + }, + "additionalProperties": false + } + } + }, + "response": null + }, + { + "name": "unevaluated_test", + "public": false, + "schemas": [], + "request": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "age": { "type": "number" } + }, + "patternProperties": { + "^attr_": { "type": "string" } + }, + "unevaluatedProperties": false + }, + "response": null + }, + { + "name": "complex_unevaluated_test", + "public": false, + "schemas": [], + "request": { + "type": "object", + "allOf": [ + { + "properties": { + "firstName": { "type": "string" } + } + }, + { + "properties": { + "lastName": { "type": "string" } + } + } + ], + "properties": { + "age": { "type": "number" } + }, + "unevaluatedProperties": false + }, + "response": null + }, + { + "name": "non_strict_test", + "public": false, + "schemas": [], + "request": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "profile": { + "type": "object", + "properties": { + "age": { "type": "number" }, + "preferences": { + "type": "object", + "properties": { + "theme": { "type": "string" } + } + } + } + }, + "tags": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { "type": "string" }, + "value": { "type": "string" } + } + } + } + } + }, + "response": null + }, + { + "name": "permissive_test", + "public": true, + "schemas": [], + "request": { + "type": "object", + "properties": { + "name": { "type": "string" } + }, + "additionalProperties": true + }, + "response": null + }, + { + "name": "conditional_strict_test", + "public": true, + "schemas": [], + "request": { + "type": "object", + "properties": { + "kind": { "type": "string", "enum": ["checking", "savings"] }, + "creating": { "type": "boolean" } + }, + "if": { + "properties": { + "creating": { "const": true } + } + }, + "then": { + "properties": { + "account_number": { + "type": "string", + "pattern": "^[0-9]{4,17}$" + }, + "routing_number": { + "type": "string", + "pattern": "^[0-9]{9}$" + } + }, + "required": ["account_number", "routing_number"] + } + }, + "response": null + }, + { + "name": "ref_inheritance_test", + "public": false, + "schemas": [], + "request": { + "$ref": "person" + }, + "response": { + "$ref": "user" + } + }, + { + "name": "ref_with_local_test", + "public": false, + "schemas": [ + { + "$id": "profile", + "type": "object", + "properties": { + "bio": { "type": "string", "maxLength": 500 }, + "owner": { "$ref": "person" } + }, + "required": ["bio", "owner"] + } + ], + "request": { + "type": "object", + "properties": { + "profile_data": { "$ref": "profile" }, + "metadata": { "type": "object" } + }, + "required": ["profile_data"] + }, + "response": { + "type": "object", + "properties": { + "created_profile": { "$ref": "profile" }, + "owner_details": { "$ref": "person" } + } + } + }, + { + "name": "ref_recursive_test", + "public": false, + "schemas": [ + { + "$id": "nested_ref", + "type": "object", + "properties": { + "user_info": { "$ref": "user" }, + "person_info": { "$ref": "person" } + } + } + ], + "request": { + "$ref": "nested_ref" + }, + "response": { + "$ref": "person" + } + }, + { + "name": "ref_local_to_type_test", + "public": false, + "schemas": [ + { + "$id": "task_request", + "type": "object", + "properties": { + "title": { "type": "string", "minLength": 1 }, + "assignee": { "$ref": "person" }, + "settings": { "$ref": "task_settings" } + }, + "required": ["title", "assignee"] + }, + { + "$id": "task_settings", + "type": "object", + "properties": { + "priority": { "type": "string", "enum": ["low", "medium", "high"] }, + "due_date": { "type": "string", "format": "date-time" }, + "reviewer": { "$ref": "user" } + } + } + ], + "request": { + "$ref": "task_request" + }, + "response": { + "type": "object", + "properties": { + "task_id": { "type": "string", "format": "uuid" }, + "created_task": { "$ref": "task_request" }, + "assignee_info": { "$ref": "person" } + } + } + }, + { + "name": "ref_title_override_test", + "public": false, + "schemas": [ + { + "$id": "special_user", + "$ref": "user", + "title": "Special User Override", + "properties": { + "special_access": { "type": "boolean" } + } + } + ], + "request": { + "$ref": "special_user" + }, + "response": { + "type": "object", + "properties": { + "updated_user": { "$ref": "special_user" }, + "base_organization": { "$ref": "organization" } + } + } + } + ]); + + cache_json_schemas(jsonb(types), jsonb(puncs)) +} + #[pg_test] fn test_cache_and_validate_json_schema() { - clear_json_schemas(); // Call clear directly - let schema_id = "my_schema"; - let schema = json!({ - "type": "object", - "properties": { - "name": { "type": "string" }, - "age": { "type": "integer", "minimum": 0 } - }, - "required": ["name", "age"] - }); + // Use comprehensive schema setup that covers all scenarios + let cache_result = setup_comprehensive_schemas(); + assert_success(&cache_result); + + // Test the basic validation schema let valid_instance = json!({ "name": "Alice", "age": 30 }); let invalid_instance_type = json!({ "name": "Bob", "age": -5 }); let invalid_instance_missing = json!({ "name": "Charlie" }); - let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()), false); - assert_success_with_json!(cache_result, "Cache operation should succeed."); - - let valid_result = validate_json_schema(schema_id, jsonb(valid_instance)); - assert_success_with_json!(valid_result, "Validation of valid instance should succeed."); + let valid_result = validate_json_schema("basic_validation_test.request", jsonb(valid_instance)); + assert_success(&valid_result); // Invalid type - age is negative - let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type)); - assert_failure_with_json!(invalid_result_type, 1, "Value must be at least 0, but got -5", "Validation with invalid type should fail."); - let errors_type = invalid_result_type.0["errors"].as_array().unwrap(); - assert_eq!(errors_type[0]["details"]["path"], "/age"); - assert_eq!(errors_type[0]["details"]["context"], -5); - assert_eq!(errors_type[0]["details"]["schema"], "my_schema"); - assert_eq!(errors_type[0]["code"], "MINIMUM_VIOLATED"); - // Check the cause is now a JSON object - let cause_type = &errors_type[0]["details"]["cause"]; - assert!(cause_type.is_object()); - assert_eq!(cause_type["got"], -5); - assert_eq!(cause_type["want"], 0); + let invalid_result_type = validate_json_schema("basic_validation_test.request", jsonb(invalid_instance_type)); + assert_error_count(&invalid_result_type, 1); + + let error = find_error_with_code_and_path(&invalid_result_type, "MINIMUM_VIOLATED", "/age"); + assert_error_detail(error, "schema", "basic_validation_test.request"); + assert_error_context(error, &json!(-5)); + assert_error_cause_json(error, &json!({"got": -5, "want": 0})); + assert_error_message_contains(error, "Value must be at least 0, but got -5"); // Missing field - let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing)); - assert_failure_with_json!(invalid_result_missing, 1, "Required field 'age' is missing", "Validation with missing field should fail."); - let errors_missing = invalid_result_missing.0["errors"].as_array().unwrap(); - assert_eq!(errors_missing[0]["details"]["path"], "/age"); - assert_eq!(errors_missing[0]["details"]["schema"], "my_schema"); - assert_eq!(errors_missing[0]["code"], "REQUIRED_FIELD_MISSING"); - // Check the cause is now a JSON object - let cause_missing = &errors_missing[0]["details"]["cause"]; - assert!(cause_missing.is_object()); - assert_eq!(cause_missing["want"], json!(["age"])); + let invalid_result_missing = validate_json_schema("basic_validation_test.request", jsonb(invalid_instance_missing)); + assert_error_count(&invalid_result_missing, 1); + + let missing_error = find_error_with_code_and_path(&invalid_result_missing, "REQUIRED_FIELD_MISSING", "/age"); + assert_error_detail(missing_error, "schema", "basic_validation_test.request"); + assert_error_cause_json(missing_error, &json!({"want": ["age"]})); + assert_error_message_contains(missing_error, "Required field 'age' is missing"); // Schema not found - let non_existent_id = "non_existent_schema"; + let non_existent_id = "non_existent_schema.request"; let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({}))); - assert_failure_with_json!(invalid_schema_result, 1, "Schema 'non_existent_schema' not found", "Validation with non-existent schema should fail."); - let errors_notfound = invalid_schema_result.0["errors"].as_array().unwrap(); - assert_eq!(errors_notfound[0]["code"], "SCHEMA_NOT_FOUND"); - assert_eq!(errors_notfound[0]["details"]["schema"], "non_existent_schema"); + assert_error_count(&invalid_schema_result, 1); + + let not_found_error = find_error_with_code(&invalid_schema_result, "SCHEMA_NOT_FOUND"); + assert_error_detail(not_found_error, "schema", "non_existent_schema.request"); // Schema not found still has string cause (it's not from ErrorKind) - assert_eq!(errors_notfound[0]["details"]["cause"], "Schema must be cached before validation"); + assert_eq!(not_found_error["details"]["cause"], "Schema was not found in bulk cache - ensure cache_json_schemas was called"); } #[pg_test] @@ -191,350 +767,217 @@ fn test_validate_json_schema_not_cached() { clear_json_schemas(); let instance = json!({ "foo": "bar" }); let result = validate_json_schema("non_existent_schema", jsonb(instance)); - assert_failure_with_json!(result, 1, "Schema 'non_existent_schema' not found", "Validation with non-existent schema should fail."); + assert_error_count(&result, 1); + let error = find_error_with_code(&result, "SCHEMA_NOT_FOUND"); + assert_error_message_contains(error, "Schema 'non_existent_schema' not found"); } #[pg_test] fn test_cache_invalid_json_schema() { clear_json_schemas(); - let schema_id = "invalid_schema"; - // Schema with an invalid type *value* - let invalid_schema = json!({ - "$id": "urn:invalid_schema", - "type": ["invalid_type_value"] - }); + + // Test with invalid schema using the new bulk caching approach + let types = json!([]); + let puncs = json!([ + { + "name": "invalid_punc", + "public": false, + "schemas": [], + "request": { + "$id": "urn:invalid_schema", + "type": ["invalid_type_value"] + }, + "response": null + } + ]); - let cache_result = cache_json_schema(schema_id, jsonb(invalid_schema), false); + let cache_result = cache_json_schemas(jsonb(types), jsonb(puncs)); - // Expect 2 leaf errors because the meta-schema validation fails at the type value - // and within the type array itself. - assert_failure_with_json!( - cache_result, - 2, // Expect exactly two leaf errors - "Value must be one of", // Updated to human-readable message - "Caching invalid schema should fail with specific meta-schema validation errors." - ); - - // Ensure the errors array exists and check specifics - let errors_array = cache_result.0["errors"].as_array().expect("Errors field should be an array"); - assert_eq!(errors_array.len(), 2); - // Both errors should have ENUM_VIOLATED code - assert_eq!(errors_array[0]["code"], "ENUM_VIOLATED"); - assert_eq!(errors_array[1]["code"], "ENUM_VIOLATED"); - // Check instance paths are preserved in path field - let paths: Vec<&str> = errors_array.iter() - .map(|e| e["details"]["path"].as_str().unwrap()) - .collect(); - assert!(paths.contains(&"/type")); - assert!(paths.contains(&"/type/0")); - // Check schema field is present - assert_eq!(errors_array[0]["details"]["schema"], "invalid_schema"); - assert_eq!(errors_array[1]["details"]["schema"], "invalid_schema"); - // Check that cause is now a JSON object with want array - for error in errors_array { - let cause = &error["details"]["cause"]; - assert!(cause.is_object()); - assert!(cause["want"].is_array()); - } + // Should fail due to invalid schema in the request + // Bulk caching produces both detailed meta-schema validation errors and a high-level wrapper error + assert_error_count(&cache_result, 3); // 2 detailed meta-schema errors + 1 high-level wrapper + + // Check the high-level wrapper error + let wrapper_error = find_error_with_code(&cache_result, "PUNC_REQUEST_SCHEMA_CACHE_FAILED"); + assert_error_detail(wrapper_error, "punc_name", "invalid_punc"); + assert_error_detail(wrapper_error, "schema_id", "invalid_punc.request"); + + // Should also have detailed meta-schema validation errors + assert!(has_error_with_code(&cache_result, "ENUM_VIOLATED"), + "Should have ENUM_VIOLATED errors"); } #[pg_test] fn test_validate_json_schema_detailed_validation_errors() { - clear_json_schemas(); // Call clear directly - let schema_id = "detailed_errors"; - let schema = json!({ - "type": "object", - "properties": { - "address": { - "type": "object", - "properties": { - "street": { "type": "string" }, - "city": { "type": "string", "maxLength": 10 } - }, - "required": ["street", "city"] - } - }, - "required": ["address"] - }); - let _ = cache_json_schema(schema_id, jsonb(schema), false); + // Use comprehensive schema setup + let _ = setup_comprehensive_schemas(); let invalid_instance = json!({ "address": { "street": 123, // Wrong type - "city": "Supercalifragilisticexpialidocious" // Too long + "city": "Supercalifragilisticexpialidocious" // Too long (maxLength: 10) } }); - let result = validate_json_schema(schema_id, jsonb(invalid_instance)); - - // Update: Expect 2 errors again, as boon reports both nested errors. - assert_failure_with_json!(result, 2); + let result = validate_json_schema("detailed_errors_test.request", jsonb(invalid_instance)); + // Expect 2 errors: one for type mismatch, one for maxLength violation + assert_error_count(&result, 2); + assert_has_error(&result, "TYPE_MISMATCH", "/address/street"); + assert_has_error(&result, "MAX_LENGTH_VIOLATED", "/address/city"); } #[pg_test] fn test_validate_json_schema_oneof_validation_errors() { - clear_json_schemas(); // Call clear directly - let schema_id = "oneof_schema"; - let schema = json!({ - "oneOf": [ - { // Option 1: Object with string prop - "type": "object", - "properties": { - "string_prop": { "type": "string", "maxLength": 5 } - }, - "required": ["string_prop"] - }, - { // Option 2: Object with number prop - "type": "object", - "properties": { - "number_prop": { "type": "number", "minimum": 10 } - }, - "required": ["number_prop"] - } - ] - }); - - let _ = cache_json_schema(schema_id, jsonb(schema), false); + // Use comprehensive schema setup + let _ = setup_comprehensive_schemas(); // --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) --- let invalid_string_instance = json!({ "string_prop": "toolongstring" }); - let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance)); - // Expect 2 leaf errors. Check count only with the macro. - assert_failure_with_json!(result_invalid_string, 2); - // Explicitly check that both expected errors are present, ignoring order - let errors_string = result_invalid_string.0["errors"].as_array().expect("Expected error array for invalid string"); - assert!(errors_string.iter().any(|e| - e["details"]["path"] == "/string_prop" && - e["code"] == "MAX_LENGTH_VIOLATED" && - e["details"]["schema"] == "oneof_schema" - ), "Missing maxLength error"); - assert!(errors_string.iter().any(|e| - e["details"]["path"] == "/number_prop" && - e["code"] == "REQUIRED_FIELD_MISSING" && - e["details"]["schema"] == "oneof_schema" - ), "Missing number_prop required error"); + let result_invalid_string = validate_json_schema("oneof_test.request", jsonb(invalid_string_instance)); + assert_error_count(&result_invalid_string, 2); + assert_has_error(&result_invalid_string, "MAX_LENGTH_VIOLATED", "/string_prop"); + assert_has_error(&result_invalid_string, "REQUIRED_FIELD_MISSING", "/number_prop"); // --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) --- let invalid_number_instance = json!({ "number_prop": 5 }); - let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance)); - // Expect 2 leaf errors. Check count only with the macro. - assert_failure_with_json!(result_invalid_number, 2); - // Explicitly check that both expected errors are present, ignoring order - let errors_number = result_invalid_number.0["errors"].as_array().expect("Expected error array for invalid number"); - assert!(errors_number.iter().any(|e| - e["details"]["path"] == "/number_prop" && - e["code"] == "MINIMUM_VIOLATED" && - e["details"]["schema"] == "oneof_schema" - ), "Missing minimum error"); - assert!(errors_number.iter().any(|e| - e["details"]["path"] == "/string_prop" && - e["code"] == "REQUIRED_FIELD_MISSING" && - e["details"]["schema"] == "oneof_schema" - ), "Missing string_prop required error"); + let result_invalid_number = validate_json_schema("oneof_test.request", jsonb(invalid_number_instance)); + assert_error_count(&result_invalid_number, 2); + assert_has_error(&result_invalid_number, "MINIMUM_VIOLATED", "/number_prop"); + assert_has_error(&result_invalid_number, "REQUIRED_FIELD_MISSING", "/string_prop"); // --- Test case 3: Fails type check (not object) for both branches --- // Input: boolean, expected object for both branches let invalid_bool_instance = json!(true); // Not an object - let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance)); + let result_invalid_bool = validate_json_schema("oneof_test.request", jsonb(invalid_bool_instance)); // Expect only 1 leaf error after filtering, as both original errors have instance_path "" - assert_failure_with_json!(result_invalid_bool, 1); - // Explicitly check that the single remaining error is the type error for the root instance path - let errors_bool = result_invalid_bool.0["errors"].as_array().expect("Expected error array for invalid bool"); - assert_eq!(errors_bool.len(), 1, "Expected exactly one error after deduplication"); - assert_eq!(errors_bool[0]["code"], "TYPE_MISMATCH"); - assert_eq!(errors_bool[0]["details"]["path"], ""); - assert_eq!(errors_bool[0]["details"]["schema"], "oneof_schema"); + assert_error_count(&result_invalid_bool, 1); + let error = find_error_with_code_and_path(&result_invalid_bool, "TYPE_MISMATCH", ""); + assert_error_detail(error, "schema", "oneof_test.request"); // --- Test case 4: Fails missing required for both branches --- // Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1) let invalid_empty_obj = json!({}); - let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj)); + let result_empty_obj = validate_json_schema("oneof_test.request", jsonb(invalid_empty_obj)); // Now we expect 2 errors because required fields are split into individual errors - assert_failure_with_json!(result_empty_obj, 2); - let errors_empty = result_empty_obj.0["errors"].as_array().unwrap(); - assert_eq!(errors_empty.len(), 2, "Expected two errors for missing required fields"); - - // Check that we have errors for both missing fields - assert!(errors_empty.iter().any(|e| - e["details"]["path"] == "/string_prop" && - e["code"] == "REQUIRED_FIELD_MISSING" && - e["details"]["schema"] == "oneof_schema" - ), "Missing string_prop required error"); - - assert!(errors_empty.iter().any(|e| - e["details"]["path"] == "/number_prop" && - e["code"] == "REQUIRED_FIELD_MISSING" && - e["details"]["schema"] == "oneof_schema" - ), "Missing number_prop required error"); + assert_error_count(&result_empty_obj, 2); + assert_has_error(&result_empty_obj, "REQUIRED_FIELD_MISSING", "/string_prop"); + assert_has_error(&result_empty_obj, "REQUIRED_FIELD_MISSING", "/number_prop"); } #[pg_test] fn test_clear_json_schemas() { let clear_result = clear_json_schemas(); - assert_success_with_json!(clear_result); + assert_success(&clear_result); - let schema_id = "schema_to_clear"; - let schema = json!({ "type": "string" }); - let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()), false); - assert_success_with_json!(cache_result); + // Use bulk caching to add schemas + let types = json!([]); + let puncs = json!([ + { + "name": "test_punc", + "public": false, + "schemas": [], + "request": { "type": "string" }, + "response": null + } + ]); + + let cache_result = cache_json_schemas(jsonb(types), jsonb(puncs)); + assert_success(&cache_result); let show_result1 = show_json_schemas(); - let schemas1 = show_result1.0["response"].as_array().unwrap(); - assert!(schemas1.contains(&json!(schema_id))); + assert_contains_schema(&show_result1, "test_punc.request"); let clear_result2 = clear_json_schemas(); - assert_success_with_json!(clear_result2); + assert_success(&clear_result2); let show_result2 = show_json_schemas(); - let schemas2 = show_result2.0["response"].as_array().unwrap(); - assert!(schemas2.is_empty()); + assert_response_empty(&show_result2); let instance = json!("test"); - let validate_result = validate_json_schema(schema_id, jsonb(instance)); - assert_failure_with_json!(validate_result, 1, "Schema 'schema_to_clear' not found", "Validation should fail after clearing schemas."); + let validate_result = validate_json_schema("test_punc.request", jsonb(instance)); + assert_error_count(&validate_result, 1); + let error = find_error_with_code(&validate_result, "SCHEMA_NOT_FOUND"); + assert_error_message_contains(error, "Schema 'test_punc.request' not found"); } #[pg_test] fn test_show_json_schemas() { let _ = clear_json_schemas(); - let schema_id1 = "schema1"; - let schema_id2 = "schema2"; - let schema = json!({ "type": "boolean" }); - - let _ = cache_json_schema(schema_id1, jsonb(schema.clone()), false); - let _ = cache_json_schema(schema_id2, jsonb(schema.clone()), false); + + // Use bulk caching to add multiple schemas + let types = json!([]); + let puncs = json!([ + { + "name": "punc1", + "public": false, + "schemas": [], + "request": { "type": "boolean" }, + "response": null + }, + { + "name": "punc2", + "public": false, + "schemas": [], + "request": { "type": "string" }, + "response": null + } + ]); + + let _ = cache_json_schemas(jsonb(types), jsonb(puncs)); let result = show_json_schemas(); - let schemas = result.0["response"].as_array().unwrap(); - assert_eq!(schemas.len(), 2); - assert!(schemas.contains(&json!(schema_id1))); - assert!(schemas.contains(&json!(schema_id2))); + assert_response_schema_count(&result, 2); + assert_contains_schema(&result, "punc1.request"); + assert_contains_schema(&result, "punc2.request"); } #[pg_test] fn test_root_level_type_mismatch() { - clear_json_schemas(); - let schema_id = "array_schema"; + // Use comprehensive schema setup + let _ = setup_comprehensive_schemas(); - // Schema expecting an array (like delete_tokens response) - let schema = json!({ - "type": "array", - "items": { - "type": "object", - "properties": { - "id": { "type": "string", "format": "uuid" } - } - } - }); - - let cache_result = cache_json_schema(schema_id, jsonb(schema), false); - assert_success_with_json!(cache_result, "Schema caching should succeed"); - - // Test 1: Validate null against array schema (simulating delete_tokens issue) + // Test 1: Validate null against array schema (using array_test from comprehensive setup) let null_instance = json!(null); - let null_result = validate_json_schema(schema_id, jsonb(null_instance)); - assert_failure_with_json!(null_result, 1, "Expected array but got null"); - let null_errors = null_result.0["errors"].as_array().unwrap(); - assert_eq!(null_errors[0]["code"], "TYPE_MISMATCH"); - assert_eq!(null_errors[0]["details"]["path"], ""); // Root level path should be empty string - assert_eq!(null_errors[0]["details"]["context"], json!(null)); - assert_eq!(null_errors[0]["details"]["schema"], "array_schema"); - // Check cause is now a JSON object - let cause_null = &null_errors[0]["details"]["cause"]; - assert!(cause_null.is_object()); - assert_eq!(cause_null["got"], "null"); - assert_eq!(cause_null["want"], json!(["array"])); + let null_result = validate_json_schema("array_test.request", jsonb(null_instance)); + assert_error_count(&null_result, 1); + let null_error = find_error_with_code_and_path(&null_result, "TYPE_MISMATCH", ""); + assert_error_detail(null_error, "schema", "array_test.request"); + assert_error_context(null_error, &json!(null)); + assert_error_cause_json(null_error, &json!({"got": "null", "want": ["array"]})); + assert_error_message_contains(null_error, "Expected array but got null"); // Test 2: Validate object against array schema let object_instance = json!({"id": "not-an-array"}); - let object_result = validate_json_schema(schema_id, jsonb(object_instance.clone())); - assert_failure_with_json!(object_result, 1, "Expected array but got object"); - let object_errors = object_result.0["errors"].as_array().unwrap(); - assert_eq!(object_errors[0]["code"], "TYPE_MISMATCH"); - assert_eq!(object_errors[0]["details"]["path"], ""); // Root level path should be empty string - assert_eq!(object_errors[0]["details"]["context"], object_instance); - assert_eq!(object_errors[0]["details"]["schema"], "array_schema"); - // Check cause is now a JSON object - let cause_object = &object_errors[0]["details"]["cause"]; - assert!(cause_object.is_object()); - assert_eq!(cause_object["got"], "object"); - assert_eq!(cause_object["want"], json!(["array"])); + let object_result = validate_json_schema("array_test.request", jsonb(object_instance.clone())); + assert_error_count(&object_result, 1); + let object_error = find_error_with_code_and_path(&object_result, "TYPE_MISMATCH", ""); + assert_error_detail(object_error, "schema", "array_test.request"); + assert_error_context(object_error, &object_instance); + assert_error_cause_json(object_error, &json!({"got": "object", "want": ["array"]})); + assert_error_message_contains(object_error, "Expected array but got object"); // Test 3: Valid empty array let valid_empty = json!([]); - let valid_result = validate_json_schema(schema_id, jsonb(valid_empty)); - assert_success_with_json!(valid_result, "Empty array should be valid"); + let valid_result = validate_json_schema("array_test.request", jsonb(valid_empty)); + assert_success(&valid_result); - // Test 4: Schema expecting object at root - let object_schema_id = "object_schema"; - let object_schema = json!({ - "type": "object", - "properties": { - "name": { "type": "string" } - } - }); - - let _ = cache_json_schema(object_schema_id, jsonb(object_schema), false); - - // String at root when object expected + // Test 4: String at root when object expected (using basic_validation_test) let string_instance = json!("not an object"); - let string_result = validate_json_schema(object_schema_id, jsonb(string_instance)); - assert_failure_with_json!(string_result, 1, "Expected object but got string"); - let string_errors = string_result.0["errors"].as_array().unwrap(); - assert_eq!(string_errors[0]["code"], "TYPE_MISMATCH"); - assert_eq!(string_errors[0]["details"]["path"], ""); // Root level path - assert_eq!(string_errors[0]["details"]["schema"], "object_schema"); - assert_eq!(string_errors[0]["details"]["context"], json!("not an object")); - // Check cause is now a JSON object - let cause_string = &string_errors[0]["details"]["cause"]; - assert!(cause_string.is_object()); - assert_eq!(cause_string["got"], "string"); - assert_eq!(cause_string["want"], json!(["object"])); + let string_result = validate_json_schema("basic_validation_test.request", jsonb(string_instance)); + assert_error_count(&string_result, 1); + let string_error = find_error_with_code_and_path(&string_result, "TYPE_MISMATCH", ""); + assert_error_detail(string_error, "schema", "basic_validation_test.request"); + assert_error_context(string_error, &json!("not an object")); + assert_error_cause_json(string_error, &json!({"got": "string", "want": ["object"]})); + assert_error_message_contains(string_error, "Expected object but got string"); } #[pg_test] fn test_auto_strict_validation() { - clear_json_schemas(); - let schema_id = "strict_test"; - let schema_id_non_strict = "non_strict_test"; - - // Schema without explicit additionalProperties: false - let schema = json!({ - "type": "object", - "properties": { - "name": { "type": "string" }, - "profile": { - "type": "object", - "properties": { - "age": { "type": "number" }, - "preferences": { - "type": "object", - "properties": { - "theme": { "type": "string" } - } - } - } - }, - "tags": { - "type": "array", - "items": { - "type": "object", - "properties": { - "id": { "type": "string" }, - "value": { "type": "string" } - } - } - } - } - }); - - // Cache the same schema twice - once with strict=true, once with strict=false - let cache_result_strict = cache_json_schema(schema_id, jsonb(schema.clone()), true); - assert_success_with_json!(cache_result_strict, "Schema caching with strict=true should succeed"); - - let cache_result_non_strict = cache_json_schema(schema_id_non_strict, jsonb(schema.clone()), false); - assert_success_with_json!(cache_result_non_strict, "Schema caching with strict=false should succeed"); + // Use comprehensive schema setup which includes all necessary schemas + let cache_result = setup_comprehensive_schemas(); + assert_success(&cache_result); // Test 1: Valid instance with no extra properties (should pass for both) let valid_instance = json!({ @@ -551,11 +994,11 @@ fn test_auto_strict_validation() { ] }); - let valid_result_strict = validate_json_schema(schema_id, jsonb(valid_instance.clone())); - assert_success_with_json!(valid_result_strict, "Valid instance should pass with strict schema"); + let valid_result_strict = validate_json_schema("strict_test.request", jsonb(valid_instance.clone())); + assert_success(&valid_result_strict); - let valid_result_non_strict = validate_json_schema(schema_id_non_strict, jsonb(valid_instance)); - assert_success_with_json!(valid_result_non_strict, "Valid instance should pass with non-strict schema"); + let valid_result_non_strict = validate_json_schema("non_strict_test.request", jsonb(valid_instance)); + assert_success(&valid_result_non_strict); // Test 2: Root level extra property let invalid_root_extra = json!({ @@ -564,20 +1007,16 @@ fn test_auto_strict_validation() { }); // Should fail with strict schema - let result_root_strict = validate_json_schema(schema_id, jsonb(invalid_root_extra.clone())); - assert_failure_with_json!(result_root_strict, 1, "This schema always fails validation"); - let errors_root = result_root_strict.0["errors"].as_array().unwrap(); - assert_eq!(errors_root[0]["code"], "FALSE_SCHEMA"); - assert_eq!(errors_root[0]["details"]["path"], "/extraField"); - assert_eq!(errors_root[0]["details"]["schema"], "strict_test"); - // Check cause is now a JSON object (empty for FalseSchema) - let cause_root = &errors_root[0]["details"]["cause"]; - assert!(cause_root.is_object()); - assert_eq!(cause_root, &json!({})); + let result_root_strict = validate_json_schema("strict_test.request", jsonb(invalid_root_extra.clone())); + assert_error_count(&result_root_strict, 1); + let error = find_error_with_code_and_path(&result_root_strict, "FALSE_SCHEMA", "/extraField"); + assert_error_detail(error, "schema", "strict_test.request"); + assert_error_cause_json(error, &json!({})); // Empty for FalseSchema + assert_error_message_contains(error, "This schema always fails validation"); // Should pass with non-strict schema - let result_root_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_root_extra)); - assert_success_with_json!(result_root_non_strict, "Extra property should be allowed with non-strict schema"); + let result_root_non_strict = validate_json_schema("non_strict_test.request", jsonb(invalid_root_extra)); + assert_success(&result_root_non_strict); // Test 3: Nested object extra property let invalid_nested_extra = json!({ @@ -589,16 +1028,14 @@ fn test_auto_strict_validation() { }); // Should fail with strict schema - let result_nested_strict = validate_json_schema(schema_id, jsonb(invalid_nested_extra.clone())); - assert_failure_with_json!(result_nested_strict, 1, "This schema always fails validation"); - let errors_nested = result_nested_strict.0["errors"].as_array().unwrap(); - assert_eq!(errors_nested[0]["code"], "FALSE_SCHEMA"); - assert_eq!(errors_nested[0]["details"]["path"], "/profile/extraNested"); - assert_eq!(errors_nested[0]["details"]["schema"], "strict_test"); + let result_nested_strict = validate_json_schema("strict_test.request", jsonb(invalid_nested_extra.clone())); + assert_error_count(&result_nested_strict, 1); + let nested_error = find_error_with_code_and_path(&result_nested_strict, "FALSE_SCHEMA", "/profile/extraNested"); + assert_error_detail(nested_error, "schema", "strict_test.request"); // Should pass with non-strict schema - let result_nested_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_nested_extra)); - assert_success_with_json!(result_nested_non_strict, "Extra nested property should be allowed with non-strict schema"); + let result_nested_non_strict = validate_json_schema("non_strict_test.request", jsonb(invalid_nested_extra)); + assert_success(&result_nested_non_strict); // Test 4: Deeply nested object extra property let invalid_deep_extra = json!({ @@ -613,16 +1050,14 @@ fn test_auto_strict_validation() { }); // Should fail with strict schema - let result_deep_strict = validate_json_schema(schema_id, jsonb(invalid_deep_extra.clone())); - assert_failure_with_json!(result_deep_strict, 1, "This schema always fails validation"); - let errors_deep = result_deep_strict.0["errors"].as_array().unwrap(); - assert_eq!(errors_deep[0]["code"], "FALSE_SCHEMA"); - assert_eq!(errors_deep[0]["details"]["path"], "/profile/preferences/extraDeep"); - assert_eq!(errors_deep[0]["details"]["schema"], "strict_test"); + let result_deep_strict = validate_json_schema("strict_test.request", jsonb(invalid_deep_extra.clone())); + assert_error_count(&result_deep_strict, 1); + let deep_error = find_error_with_code_and_path(&result_deep_strict, "FALSE_SCHEMA", "/profile/preferences/extraDeep"); + assert_error_detail(deep_error, "schema", "strict_test.request"); // Should pass with non-strict schema - let result_deep_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_deep_extra)); - assert_success_with_json!(result_deep_non_strict, "Extra deep property should be allowed with non-strict schema"); + let result_deep_non_strict = validate_json_schema("non_strict_test.request", jsonb(invalid_deep_extra)); + assert_success(&result_deep_non_strict); // Test 5: Array item extra property let invalid_array_item_extra = json!({ @@ -633,66 +1068,28 @@ fn test_auto_strict_validation() { }); // Should fail with strict schema - let result_array_strict = validate_json_schema(schema_id, jsonb(invalid_array_item_extra.clone())); - assert_failure_with_json!(result_array_strict, 1, "This schema always fails validation"); - let errors_array = result_array_strict.0["errors"].as_array().unwrap(); - assert_eq!(errors_array[0]["code"], "FALSE_SCHEMA"); - assert_eq!(errors_array[0]["details"]["path"], "/tags/0/extraInArray"); - assert_eq!(errors_array[0]["details"]["schema"], "strict_test"); + let result_array_strict = validate_json_schema("strict_test.request", jsonb(invalid_array_item_extra.clone())); + assert_error_count(&result_array_strict, 1); + let array_error = find_error_with_code_and_path(&result_array_strict, "FALSE_SCHEMA", "/tags/0/extraInArray"); + assert_error_detail(array_error, "schema", "strict_test.request"); // Should pass with non-strict schema - let result_array_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_array_item_extra)); - assert_success_with_json!(result_array_non_strict, "Extra array item property should be allowed with non-strict schema"); + let result_array_non_strict = validate_json_schema("non_strict_test.request", jsonb(invalid_array_item_extra)); + assert_success(&result_array_non_strict); // Test 6: Schema with explicit additionalProperties: true should allow extras even with strict=true - let schema_id_permissive = "permissive_test"; - let permissive_schema = json!({ - "type": "object", - "properties": { - "name": { "type": "string" } - }, - "additionalProperties": true // Explicitly allow additional properties - }); - - let _ = cache_json_schema(schema_id_permissive, jsonb(permissive_schema), true); // Note: strict=true + // (permissive_test is already in comprehensive setup) let instance_with_extra = json!({ "name": "John", "extraAllowed": "should pass" }); - let result_permissive = validate_json_schema(schema_id_permissive, jsonb(instance_with_extra)); - assert_success_with_json!(result_permissive, "Instance with extra property should pass when additionalProperties is explicitly true, even with strict=true"); + let result_permissive = validate_json_schema("permissive_test.request", jsonb(instance_with_extra)); + assert_success(&result_permissive); // Test 7: Schema with conditionals (if/then/else) should NOT add unevaluatedProperties to conditional branches - let schema_id_conditional = "conditional_strict_test"; - let conditional_schema = json!({ - "type": "object", - "properties": { - "kind": { "type": "string", "enum": ["checking", "savings"] }, - "creating": { "type": "boolean" } - }, - "if": { - "properties": { - "creating": { "const": true } - } - }, - "then": { - "properties": { - "account_number": { - "type": "string", - "pattern": "^[0-9]{4,17}$" - }, - "routing_number": { - "type": "string", - "pattern": "^[0-9]{9}$" - } - }, - "required": ["account_number", "routing_number"] - } - }); - - let _ = cache_json_schema(schema_id_conditional, jsonb(conditional_schema), true); // strict=true + // (conditional_strict_test is already in comprehensive setup) // Valid data with properties from both main schema and then clause let valid_conditional = json!({ @@ -702,8 +1099,8 @@ fn test_auto_strict_validation() { "routing_number": "123456789" }); - let result_conditional = validate_json_schema(schema_id_conditional, jsonb(valid_conditional)); - assert_success_with_json!(result_conditional, "Conditional properties should be recognized as evaluated"); + let result_conditional = validate_json_schema("conditional_strict_test.request", jsonb(valid_conditional)); + assert_success(&result_conditional); // Invalid: extra property not defined anywhere let invalid_conditional = json!({ @@ -714,11 +1111,10 @@ fn test_auto_strict_validation() { "extra": "not allowed" }); - let result_invalid_conditional = validate_json_schema(schema_id_conditional, jsonb(invalid_conditional)); - assert_failure_with_json!(result_invalid_conditional, 1, "This schema always fails validation"); - let errors_conditional = result_invalid_conditional.0["errors"].as_array().unwrap(); - assert_eq!(errors_conditional[0]["code"], "FALSE_SCHEMA"); - assert_eq!(errors_conditional[0]["details"]["path"], "/extra"); + let result_invalid_conditional = validate_json_schema("conditional_strict_test.request", jsonb(invalid_conditional)); + assert_error_count(&result_invalid_conditional, 1); + let conditional_error = find_error_with_code_and_path(&result_invalid_conditional, "FALSE_SCHEMA", "/extra"); + assert_error_message_contains(conditional_error, "This schema always fails validation"); // Test the specific edge case: pattern validation failure in a conditional branch // We filter out FALSE_SCHEMA errors when there are other validation errors @@ -729,14 +1125,12 @@ fn test_auto_strict_validation() { "routing_number": "123456789" // Valid, but would be unevaluated - filtered out }); - let result_pattern = validate_json_schema(schema_id_conditional, jsonb(pattern_failure)); - let pattern_errors = result_pattern.0["errors"].as_array().unwrap(); + let result_pattern = validate_json_schema("conditional_strict_test.request", jsonb(pattern_failure)); // We expect only 1 error: PATTERN_VIOLATED for account_number // FALSE_SCHEMA for routing_number is filtered out because there's a real validation error - assert_failure_with_json!(result_pattern, 1); - assert_eq!(pattern_errors[0]["code"], "PATTERN_VIOLATED"); - assert_eq!(pattern_errors[0]["details"]["path"], "/account_number"); + assert_error_count(&result_pattern, 1); + let _pattern_error = find_error_with_code_and_path(&result_pattern, "PATTERN_VIOLATED", "/account_number"); // Test case where both fields have pattern violations let both_pattern_failures = json!({ @@ -746,21 +1140,12 @@ fn test_auto_strict_validation() { "routing_number": "123" // Too short - will fail pattern validation }); - let result_both = validate_json_schema(schema_id_conditional, jsonb(both_pattern_failures)); - let both_errors = result_both.0["errors"].as_array().unwrap(); + let result_both = validate_json_schema("conditional_strict_test.request", jsonb(both_pattern_failures)); // We expect 2 errors: both PATTERN_VIOLATED - assert_failure_with_json!(result_both, 2); - - assert!(both_errors.iter().any(|e| - e["code"] == "PATTERN_VIOLATED" && - e["details"]["path"] == "/account_number" - ), "Should have pattern violation for account_number"); - - assert!(both_errors.iter().any(|e| - e["code"] == "PATTERN_VIOLATED" && - e["details"]["path"] == "/routing_number" - ), "Should have pattern violation for routing_number"); + assert_error_count(&result_both, 2); + assert_has_error(&result_both, "PATTERN_VIOLATED", "/account_number"); + assert_has_error(&result_both, "PATTERN_VIOLATED", "/routing_number"); // Test case where there are only FALSE_SCHEMA errors (no other validation errors) let only_false_schema = json!({ @@ -771,197 +1156,98 @@ fn test_auto_strict_validation() { "extra": "not allowed" // Will cause FALSE_SCHEMA }); - let result_only_false = validate_json_schema(schema_id_conditional, jsonb(only_false_schema)); - let only_false_errors = result_only_false.0["errors"].as_array().unwrap(); + let result_only_false = validate_json_schema("conditional_strict_test.request", jsonb(only_false_schema)); // We expect 1 FALSE_SCHEMA error since there are no other validation errors - assert_failure_with_json!(result_only_false, 1); - assert_eq!(only_false_errors[0]["code"], "FALSE_SCHEMA"); - assert_eq!(only_false_errors[0]["details"]["path"], "/extra"); + assert_error_count(&result_only_false, 1); + let _false_error = find_error_with_code_and_path(&result_only_false, "FALSE_SCHEMA", "/extra"); } #[pg_test] fn test_required_fields_split_errors() { - clear_json_schemas(); - let schema_id = "required_split_test"; + // Use comprehensive schema setup + let _ = setup_comprehensive_schemas(); - // Schema with multiple required fields - let schema = json!({ - "type": "object", - "properties": { - "name": { "type": "string" }, - "kind": { "type": "string" }, - "age": { "type": "number" } - }, - "required": ["name", "kind", "age"] - }); - - let cache_result = cache_json_schema(schema_id, jsonb(schema), false); - assert_success_with_json!(cache_result, "Schema caching should succeed"); - - // Test 1: Missing all required fields + // Test 1: Missing all required fields (using basic_validation_test which requires name and age) let empty_instance = json!({}); - let result = validate_json_schema(schema_id, jsonb(empty_instance)); + let result = validate_json_schema("basic_validation_test.request", jsonb(empty_instance)); - // Should get 3 separate errors, one for each missing field - assert_failure_with_json!(result, 3, "Required field"); + // Should get 2 separate errors, one for each missing field + assert_error_count(&result, 2); - let errors = result.0["errors"].as_array().unwrap(); + let name_error = find_error_with_code_and_path(&result, "REQUIRED_FIELD_MISSING", "/name"); + assert_error_message_contains(name_error, "Required field 'name' is missing"); - // Check that we have errors for each missing field with correct paths - assert!(errors.iter().any(|e| - e["code"] == "REQUIRED_FIELD_MISSING" && - e["details"]["path"] == "/name" && - e["message"] == "Required field 'name' is missing" - ), "Missing error for name field"); - - assert!(errors.iter().any(|e| - e["code"] == "REQUIRED_FIELD_MISSING" && - e["details"]["path"] == "/kind" && - e["message"] == "Required field 'kind' is missing" - ), "Missing error for kind field"); - - assert!(errors.iter().any(|e| - e["code"] == "REQUIRED_FIELD_MISSING" && - e["details"]["path"] == "/age" && - e["message"] == "Required field 'age' is missing" - ), "Missing error for age field"); + let age_error = find_error_with_code_and_path(&result, "REQUIRED_FIELD_MISSING", "/age"); + assert_error_message_contains(age_error, "Required field 'age' is missing"); // Test 2: Missing only some required fields let partial_instance = json!({ "name": "Alice" }); - let partial_result = validate_json_schema(schema_id, jsonb(partial_instance)); + let partial_result = validate_json_schema("basic_validation_test.request", jsonb(partial_instance)); - // Should get 2 errors for the missing fields - assert_failure_with_json!(partial_result, 2, "Required field"); - - let partial_errors = partial_result.0["errors"].as_array().unwrap(); - - assert!(partial_errors.iter().any(|e| - e["details"]["path"] == "/kind" - ), "Missing error for kind field"); - - assert!(partial_errors.iter().any(|e| - e["details"]["path"] == "/age" - ), "Missing error for age field"); + // Should get 1 error for the missing field + assert_error_count(&partial_result, 1); + assert_has_error(&partial_result, "REQUIRED_FIELD_MISSING", "/age"); } #[pg_test] fn test_dependency_fields_split_errors() { - clear_json_schemas(); - let schema_id = "dependency_split_test"; - - // Schema with dependencies like the tokenize_external_accounts example - let schema = json!({ - "type": "object", - "properties": { - "creating": { "type": "boolean" }, - "name": { "type": "string" }, - "kind": { "type": "string" }, - "description": { "type": "string" } - }, - "dependencies": { - "creating": ["name", "kind"] // When creating is present, name and kind are required - } - }); - - let cache_result = cache_json_schema(schema_id, jsonb(schema), false); - assert_success_with_json!(cache_result, "Schema caching should succeed"); + // Use comprehensive schema setup + let _ = setup_comprehensive_schemas(); // Test 1: Has creating=true but missing both dependent fields let missing_both = json!({ "creating": true, "description": "Some description" }); - let result = validate_json_schema(schema_id, jsonb(missing_both)); + let result = validate_json_schema("dependency_split_test.request", jsonb(missing_both)); // Should get 2 separate errors, one for each missing dependent field - assert_failure_with_json!(result, 2, "Field"); + assert_error_count(&result, 2); - let errors = result.0["errors"].as_array().unwrap(); + let name_dep_error = find_error_with_code_and_path(&result, "DEPENDENCY_FAILED", "/name"); + assert_error_message_contains(name_dep_error, "Field 'name' is required when 'creating' is present"); - assert!(errors.iter().any(|e| - e["code"] == "DEPENDENCY_FAILED" && - e["details"]["path"] == "/name" && - e["message"] == "Field 'name' is required when 'creating' is present" - ), "Missing error for dependent name field"); - - assert!(errors.iter().any(|e| - e["code"] == "DEPENDENCY_FAILED" && - e["details"]["path"] == "/kind" && - e["message"] == "Field 'kind' is required when 'creating' is present" - ), "Missing error for dependent kind field"); + let kind_dep_error = find_error_with_code_and_path(&result, "DEPENDENCY_FAILED", "/kind"); + assert_error_message_contains(kind_dep_error, "Field 'kind' is required when 'creating' is present"); // Test 2: Has creating=true with only one dependent field let missing_one = json!({ "creating": true, "name": "My Account" }); - let result_one = validate_json_schema(schema_id, jsonb(missing_one)); + let result_one = validate_json_schema("dependency_split_test.request", jsonb(missing_one)); // Should get 1 error for the missing kind field - assert_failure_with_json!(result_one, 1, "Field 'kind' is required when 'creating' is present"); - - let errors_one = result_one.0["errors"].as_array().unwrap(); - assert_eq!(errors_one[0]["details"]["path"], "/kind"); + assert_error_count(&result_one, 1); + let kind_error = find_error_with_code_and_path(&result_one, "DEPENDENCY_FAILED", "/kind"); + assert_error_message_contains(kind_error, "Field 'kind' is required when 'creating' is present"); // Test 3: Has no creating field - no dependency errors let no_creating = json!({ "description": "No creating field" }); - let result_no_creating = validate_json_schema(schema_id, jsonb(no_creating)); - assert_success_with_json!(result_no_creating, "Should succeed when creating field is not present"); + let result_no_creating = validate_json_schema("dependency_split_test.request", jsonb(no_creating)); + assert_success(&result_no_creating); // Test 4: Has creating=false - dependencies still apply because field exists! let creating_false = json!({ "creating": false, "description": "Creating is false" }); - let result_false = validate_json_schema(schema_id, jsonb(creating_false)); + let result_false = validate_json_schema("dependency_split_test.request", jsonb(creating_false)); // Dependencies are triggered by field existence, not value, so this should fail - assert_failure_with_json!(result_false, 2, "Field"); - - let errors_false = result_false.0["errors"].as_array().unwrap(); - assert!(errors_false.iter().any(|e| - e["details"]["path"] == "/name" - ), "Should have error for name when creating exists with false value"); - assert!(errors_false.iter().any(|e| - e["details"]["path"] == "/kind" - ), "Should have error for kind when creating exists with false value"); + assert_error_count(&result_false, 2); + assert_has_error(&result_false, "DEPENDENCY_FAILED", "/name"); + assert_has_error(&result_false, "DEPENDENCY_FAILED", "/kind"); } #[pg_test] fn test_nested_required_dependency_errors() { - clear_json_schemas(); - let schema_id = "nested_dep_test"; - - // More complex schema with nested objects - let schema = json!({ - "type": "object", - "properties": { - "items": { - "type": "array", - "items": { - "type": "object", - "properties": { - "id": { "type": "string" }, - "creating": { "type": "boolean" }, - "name": { "type": "string" }, - "kind": { "type": "string" } - }, - "required": ["id"], - "dependencies": { - "creating": ["name", "kind"] - } - } - } - }, - "required": ["items"] - }); - - let cache_result = cache_json_schema(schema_id, jsonb(schema), false); - assert_success_with_json!(cache_result, "Schema caching should succeed"); + // Use comprehensive schema setup + let _ = setup_comprehensive_schemas(); // Test with array items that have dependency violations let instance = json!({ @@ -980,47 +1266,21 @@ fn test_nested_required_dependency_errors() { ] }); - let result = validate_json_schema(schema_id, jsonb(instance)); + let result = validate_json_schema("nested_dep_test.request", jsonb(instance)); // Should get 3 errors total: 2 for first item, 1 for second item - assert_failure_with_json!(result, 3, "Field"); - - let errors = result.0["errors"].as_array().unwrap(); + assert_error_count(&result, 3); // Check paths are correct for array items - assert!(errors.iter().any(|e| - e["details"]["path"] == "/items/0/name" && - e["code"] == "DEPENDENCY_FAILED" - ), "Missing error for first item's name"); - - assert!(errors.iter().any(|e| - e["details"]["path"] == "/items/0/kind" && - e["code"] == "DEPENDENCY_FAILED" - ), "Missing error for first item's kind"); - - assert!(errors.iter().any(|e| - e["details"]["path"] == "/items/1/kind" && - e["code"] == "DEPENDENCY_FAILED" - ), "Missing error for second item's kind"); + assert_has_error(&result, "DEPENDENCY_FAILED", "/items/0/name"); + assert_has_error(&result, "DEPENDENCY_FAILED", "/items/0/kind"); + assert_has_error(&result, "DEPENDENCY_FAILED", "/items/1/kind"); } #[pg_test] fn test_additional_properties_split_errors() { - clear_json_schemas(); - let schema_id = "additional_props_split_test"; - - // Schema with additionalProperties: false - let schema = json!({ - "type": "object", - "properties": { - "name": { "type": "string" }, - "age": { "type": "number" } - }, - "additionalProperties": false - }); - - let cache_result = cache_json_schema(schema_id, jsonb(schema), false); - assert_success_with_json!(cache_result, "Schema caching should succeed"); + // Use comprehensive schema setup + let _ = setup_comprehensive_schemas(); // Test 1: Multiple additional properties not allowed let instance_many_extras = json!({ @@ -1031,31 +1291,19 @@ fn test_additional_properties_split_errors() { "extra3": true }); - let result = validate_json_schema(schema_id, jsonb(instance_many_extras)); + let result = validate_json_schema("additional_props_test.request", jsonb(instance_many_extras)); // Should get 3 separate errors, one for each additional property - assert_failure_with_json!(result, 3, "Property"); + assert_error_count(&result, 3); - let errors = result.0["errors"].as_array().unwrap(); + let extra1_error = find_error_with_code_and_path(&result, "ADDITIONAL_PROPERTIES_NOT_ALLOWED", "/extra1"); + assert_error_message_contains(extra1_error, "Property 'extra1' is not allowed"); - // Check that we have errors for each additional property with correct paths - assert!(errors.iter().any(|e| - e["code"] == "ADDITIONAL_PROPERTIES_NOT_ALLOWED" && - e["details"]["path"] == "/extra1" && - e["message"] == "Property 'extra1' is not allowed" - ), "Missing error for extra1 property"); + let extra2_error = find_error_with_code_and_path(&result, "ADDITIONAL_PROPERTIES_NOT_ALLOWED", "/extra2"); + assert_error_message_contains(extra2_error, "Property 'extra2' is not allowed"); - assert!(errors.iter().any(|e| - e["code"] == "ADDITIONAL_PROPERTIES_NOT_ALLOWED" && - e["details"]["path"] == "/extra2" && - e["message"] == "Property 'extra2' is not allowed" - ), "Missing error for extra2 property"); - - assert!(errors.iter().any(|e| - e["code"] == "ADDITIONAL_PROPERTIES_NOT_ALLOWED" && - e["details"]["path"] == "/extra3" && - e["message"] == "Property 'extra3' is not allowed" - ), "Missing error for extra3 property"); + let extra3_error = find_error_with_code_and_path(&result, "ADDITIONAL_PROPERTIES_NOT_ALLOWED", "/extra3"); + assert_error_message_contains(extra3_error, "Property 'extra3' is not allowed"); // Test 2: Single additional property let instance_one_extra = json!({ @@ -1064,30 +1312,14 @@ fn test_additional_properties_split_errors() { "unauthorized": "field" }); - let result_one = validate_json_schema(schema_id, jsonb(instance_one_extra)); + let result_one = validate_json_schema("additional_props_test.request", jsonb(instance_one_extra)); // Should get 1 error for the additional property - assert_failure_with_json!(result_one, 1, "Property 'unauthorized' is not allowed"); + assert_error_count(&result_one, 1); + let unauthorized_error = find_error_with_code_and_path(&result_one, "ADDITIONAL_PROPERTIES_NOT_ALLOWED", "/unauthorized"); + assert_error_message_contains(unauthorized_error, "Property 'unauthorized' is not allowed"); - let errors_one = result_one.0["errors"].as_array().unwrap(); - assert_eq!(errors_one[0]["details"]["path"], "/unauthorized"); - - // Test 3: Nested objects with additional properties - let nested_schema_id = "nested_additional_props_test"; - let nested_schema = json!({ - "type": "object", - "properties": { - "user": { - "type": "object", - "properties": { - "name": { "type": "string" } - }, - "additionalProperties": false - } - } - }); - - let _ = cache_json_schema(nested_schema_id, jsonb(nested_schema), false); + // Test 3: Nested objects with additional properties (already in comprehensive setup) let nested_instance = json!({ "user": { @@ -1097,46 +1329,18 @@ fn test_additional_properties_split_errors() { } }); - let nested_result = validate_json_schema(nested_schema_id, jsonb(nested_instance)); + let nested_result = validate_json_schema("nested_additional_props_test.request", jsonb(nested_instance)); // Should get 2 errors for the nested additional properties - assert_failure_with_json!(nested_result, 2, "Property"); - - let nested_errors = nested_result.0["errors"].as_array().unwrap(); - - assert!(nested_errors.iter().any(|e| - e["details"]["path"] == "/user/role" && - e["code"] == "ADDITIONAL_PROPERTIES_NOT_ALLOWED" - ), "Missing error for nested role property"); - - assert!(nested_errors.iter().any(|e| - e["details"]["path"] == "/user/level" && - e["code"] == "ADDITIONAL_PROPERTIES_NOT_ALLOWED" - ), "Missing error for nested level property"); + assert_error_count(&nested_result, 2); + assert_has_error(&nested_result, "ADDITIONAL_PROPERTIES_NOT_ALLOWED", "/user/role"); + assert_has_error(&nested_result, "ADDITIONAL_PROPERTIES_NOT_ALLOWED", "/user/level"); } #[pg_test] fn test_unevaluated_properties_errors() { - clear_json_schemas(); - let schema_id = "unevaluated_test"; - - // Schema with unevaluatedProperties: false - // This is more complex than additionalProperties because it considers - // properties matched by pattern properties and additional properties - let schema = json!({ - "type": "object", - "properties": { - "name": { "type": "string" }, - "age": { "type": "number" } - }, - "patternProperties": { - "^attr_": { "type": "string" } // Properties starting with attr_ are allowed - }, - "unevaluatedProperties": false // No other properties allowed - }); - - let cache_result = cache_json_schema(schema_id, jsonb(schema), false); - assert_success_with_json!(cache_result, "Schema caching should succeed"); + // Use comprehensive schema setup + let _ = setup_comprehensive_schemas(); // Test 1: Multiple unevaluated properties let instance_uneval = json!({ @@ -1148,57 +1352,21 @@ fn test_unevaluated_properties_errors() { "extra3": true }); - let result = validate_json_schema(schema_id, jsonb(instance_uneval)); + let result = validate_json_schema("unevaluated_test.request", jsonb(instance_uneval)); // Should get 3 separate FALSE_SCHEMA errors, one for each unevaluated property - assert_failure_with_json!(result, 3, "This schema always fails validation"); + assert_error_count(&result, 3); - let errors = result.0["errors"].as_array().unwrap(); + // Verify all errors are FALSE_SCHEMA and check paths + assert_has_error(&result, "FALSE_SCHEMA", "/extra1"); + assert_has_error(&result, "FALSE_SCHEMA", "/extra2"); + assert_has_error(&result, "FALSE_SCHEMA", "/extra3"); - // Verify all errors are FALSE_SCHEMA with specific paths - for error in errors { - assert_eq!(error["code"], "FALSE_SCHEMA", "All unevaluated properties should generate FALSE_SCHEMA errors"); - } + // Verify error messages + let extra1_error = find_error_with_code_and_path(&result, "FALSE_SCHEMA", "/extra1"); + assert_error_message_contains(extra1_error, "This schema always fails validation"); - // Check that we have errors for each unevaluated property with correct paths - assert!(errors.iter().any(|e| - e["code"] == "FALSE_SCHEMA" && - e["details"]["path"] == "/extra1" - ), "Missing error for extra1 property"); - - assert!(errors.iter().any(|e| - e["code"] == "FALSE_SCHEMA" && - e["details"]["path"] == "/extra2" - ), "Missing error for extra2 property"); - - assert!(errors.iter().any(|e| - e["code"] == "FALSE_SCHEMA" && - e["details"]["path"] == "/extra3" - ), "Missing error for extra3 property"); - - // Test 2: Complex schema with allOf and unevaluatedProperties - let complex_schema_id = "complex_unevaluated_test"; - let complex_schema = json!({ - "type": "object", - "allOf": [ - { - "properties": { - "firstName": { "type": "string" } - } - }, - { - "properties": { - "lastName": { "type": "string" } - } - } - ], - "properties": { - "age": { "type": "number" } - }, - "unevaluatedProperties": false - }); - - let _ = cache_json_schema(complex_schema_id, jsonb(complex_schema), false); + // Test 2: Complex schema with allOf and unevaluatedProperties (already in comprehensive setup) // firstName and lastName are evaluated by allOf schemas, age by main schema let complex_instance = json!({ @@ -1209,22 +1377,12 @@ fn test_unevaluated_properties_errors() { "title": "Mr" // Not evaluated by any schema }); - let complex_result = validate_json_schema(complex_schema_id, jsonb(complex_instance)); + let complex_result = validate_json_schema("complex_unevaluated_test.request", jsonb(complex_instance)); // Should get 2 FALSE_SCHEMA errors for unevaluated properties - assert_failure_with_json!(complex_result, 2, "This schema always fails validation"); - - let complex_errors = complex_result.0["errors"].as_array().unwrap(); - - assert!(complex_errors.iter().any(|e| - e["code"] == "FALSE_SCHEMA" && - e["details"]["path"] == "/nickname" - ), "Missing error for nickname property"); - - assert!(complex_errors.iter().any(|e| - e["code"] == "FALSE_SCHEMA" && - e["details"]["path"] == "/title" - ), "Missing error for title property"); + assert_error_count(&complex_result, 2); + assert_has_error(&complex_result, "FALSE_SCHEMA", "/nickname"); + assert_has_error(&complex_result, "FALSE_SCHEMA", "/title"); // Test 3: Valid instance with all properties evaluated let valid_instance = json!({ @@ -1234,24 +1392,14 @@ fn test_unevaluated_properties_errors() { "attr_theme": "dark" }); - let valid_result = validate_json_schema(schema_id, jsonb(valid_instance)); - assert_success_with_json!(valid_result, "All properties are evaluated, should pass"); + let valid_result = validate_json_schema("unevaluated_test.request", jsonb(valid_instance)); + assert_success(&valid_result); } #[pg_test] fn test_format_validation_allows_empty_string() { - clear_json_schemas(); - let schema_id = "format_schema_empty"; - let schema = json!({ - "type": "object", - "properties": { - "uuid": { "type": "string", "format": "uuid" }, - "date_time": { "type": "string", "format": "date-time" }, - "email": { "type": "string", "format": "email" } - } - }); - - let _ = cache_json_schema(schema_id, jsonb(schema), false); + // Use comprehensive schema setup + let _ = setup_comprehensive_schemas(); // Test with empty strings for all formatted fields let instance = json!({ @@ -1260,30 +1408,334 @@ fn test_format_validation_allows_empty_string() { "email": "" }); - let result = validate_json_schema(schema_id, jsonb(instance)); + let result = validate_json_schema("format_test.request", jsonb(instance)); // This is the test that should fail before the change and pass after - assert_success_with_json!(result, "Empty strings should be allowed for format validation"); + assert_success(&result); } #[pg_test] fn test_non_empty_string_format_validation_still_fails() { - clear_json_schemas(); - let schema_id = "non_empty_fail_schema"; - let schema = json!({ - "type": "object", - "properties": { - "date_time": { "type": "string", "format": "date-time" } - } - }); - - let _ = cache_json_schema(schema_id, jsonb(schema), false); + // Use comprehensive schema setup + let _ = setup_comprehensive_schemas(); // A non-empty but invalid string should still fail let instance = json!({ "date_time": "not-a-date" }); - let result = validate_json_schema(schema_id, jsonb(instance)); - assert_failure_with_json!(result, 1, "Value \"not-a-date\" is not a valid date-time format"); + let result = validate_json_schema("format_test.request", jsonb(instance)); + assert_error_count(&result, 1); + let error = find_error_with_code(&result, "FORMAT_INVALID"); + assert_error_message_contains(error, "not-a-date"); +} + +#[pg_test] +fn test_ref_debug_cache_json_schemas() { + // First, let's debug what's happening with our cache_json_schemas call + let result = setup_comprehensive_schemas(); + + // If this fails, we'll see exactly what the error is + if !result.0.get("response").is_some() { + use pgrx::log; + let pretty = serde_json::to_string_pretty(&result.0).unwrap_or_else(|_| format!("{:?}", result.0)); + log!("Cache result: {}", pretty); + panic!("Cache failed: {}", pretty); + } + + // If cache succeeded, check what schemas are available + let schemas_result = show_json_schemas(); + let schemas = get_response_schemas(&schemas_result); + use pgrx::log; + log!("Available schemas: {:?}", schemas); +} + +#[pg_test] +fn test_ref_inheritance_chain() { + let cache_result = setup_comprehensive_schemas(); + assert_success(&cache_result); + + // Test valid person inheriting from user -> organization -> entity + let valid_person = json!({ + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "person", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "John Doe", + "password": "secretpassword", + "website": "https://johndoe.com", + "tax_id": "123-45-6789", + "first_name": "John", + "last_name": "Doe" + }); + + let result = validate_json_schema("ref_inheritance_test.request", jsonb(valid_person)); + assert_success(&result); + + // Test missing required fields from base entity + let missing_entity_fields = json!({ + "first_name": "John", + "last_name": "Doe" + }); + + let result_missing = validate_json_schema("ref_inheritance_test.request", jsonb(missing_entity_fields)); + assert_error_count(&result_missing, 3); // Missing id, type, created_by + assert_has_error(&result_missing, "REQUIRED_FIELD_MISSING", "/id"); + assert_has_error(&result_missing, "REQUIRED_FIELD_MISSING", "/type"); + assert_has_error(&result_missing, "REQUIRED_FIELD_MISSING", "/created_by"); + + // Test dependency inheritance - person requires first_name, last_name when creating + let dependency_test = json!({ + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "person", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "John Doe", + "creating": true, + "first_name": "John" + // Missing last_name + }); + + let result_dep = validate_json_schema("ref_inheritance_test.request", jsonb(dependency_test)); + assert_error_count(&result_dep, 1); + assert_has_error(&result_dep, "DEPENDENCY_FAILED", "/last_name"); + + // Test property validation from inherited schemas + let invalid_password = json!({ + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "person", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "John Doe", + "password": "short", // Too short (minLength: 8 from user schema) + "first_name": "John", + "last_name": "Doe" + }); + + let result_invalid = validate_json_schema("ref_inheritance_test.request", jsonb(invalid_password)); + assert_error_count(&result_invalid, 1); + assert_has_error(&result_invalid, "MIN_LENGTH_VIOLATED", "/password"); +} + +#[pg_test] +fn test_ref_with_local_schemas() { + let _ = setup_comprehensive_schemas(); + + // Test valid request with local schema referencing type schema + let valid_request = json!({ + "profile_data": { + "bio": "Software developer passionate about Rust", + "owner": { + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "person", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Jane Smith", + "first_name": "Jane", + "last_name": "Smith" + } + }, + "metadata": {} + }); + + let result = validate_json_schema("ref_with_local_test.request", jsonb(valid_request)); + assert_success(&result); + + // Test bio too long (local schema validation) + let long_bio = "A".repeat(501); + let invalid_bio = json!({ + "profile_data": { + "bio": long_bio, + "owner": { + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "person", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Jane Smith", + "first_name": "Jane", + "last_name": "Smith" + } + }, + "metadata": {} + }); + + let result_bio = validate_json_schema("ref_with_local_test.request", jsonb(invalid_bio)); + assert_error_count(&result_bio, 1); + assert_has_error(&result_bio, "MAX_LENGTH_VIOLATED", "/profile_data/bio"); + + // Test invalid person in nested ref + let invalid_person = json!({ + "profile_data": { + "bio": "Valid bio", + "owner": { + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "person", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Jane Smith", + "first_name": "J", // Too short (minLength: 1 but empty after trim) + "last_name": "" // Empty string violates minLength: 1 + } + }, + "metadata": {} + }); + + let result_person = validate_json_schema("ref_with_local_test.request", jsonb(invalid_person)); + assert_error_count(&result_person, 1); + assert_has_error(&result_person, "MIN_LENGTH_VIOLATED", "/profile_data/owner/last_name"); +} + +#[pg_test] +fn test_ref_recursive_resolution() { + let _ = setup_comprehensive_schemas(); + + // Test valid nested refs: local schema refs type schemas which ref other type schemas + let valid_nested = json!({ + "user_info": { + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "user", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Admin User", + "password": "securepass", + "website": "https://admin.example.com", + "tax_id": "987-65-4321" + }, + "person_info": { + "id": "550e8400-e29b-41d4-a716-446655440002", + "type": "person", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Regular Person", + "password": "userpass123", + "first_name": "Alice", + "last_name": "Johnson" + } + }); + + let result = validate_json_schema("ref_recursive_test.request", jsonb(valid_nested)); + assert_success(&result); + + // Test validation cascades through multiple ref levels + let invalid_nested = json!({ + "user_info": { + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "user", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Admin User", + "password": "short" // Violates user schema minLength: 8 + }, + "person_info": { + "id": "550e8400-e29b-41d4-a716-446655440002", + "type": "person", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Regular Person", + "creating": true, + "first_name": "Alice" + // Missing last_name for creating dependency + } + }); + + let result_invalid = validate_json_schema("ref_recursive_test.request", jsonb(invalid_nested)); + assert_error_count(&result_invalid, 2); + assert_has_error(&result_invalid, "MIN_LENGTH_VIOLATED", "/user_info/password"); + assert_has_error(&result_invalid, "DEPENDENCY_FAILED", "/person_info/last_name"); +} + +#[pg_test] +fn test_ref_local_to_type_chain() { + let _ = setup_comprehensive_schemas(); + + // Test complex chain: request refs local schema, which refs type schema, which refs other type schemas + let valid_task = json!({ + "title": "Implement new feature", + "assignee": { + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "person", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Developer", + "first_name": "Bob", + "last_name": "Wilson" + }, + "settings": { + "priority": "high", + "due_date": "2024-12-31T23:59:59Z", + "reviewer": { + "id": "550e8400-e29b-41d4-a716-446655440002", + "type": "user", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Senior Dev", + "password": "reviewerpass" + } + } + }); + + let result = validate_json_schema("ref_local_to_type_test.request", jsonb(valid_task)); + assert_success(&result); + + // Test validation at multiple ref levels + let invalid_priority = json!({ + "title": "Implement new feature", + "assignee": { + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "person", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Developer", + "first_name": "Bob", + "last_name": "Wilson" + }, + "settings": { + "priority": "urgent", // Invalid enum value + "reviewer": { + "id": "550e8400-e29b-41d4-a716-446655440002", + "type": "user", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Senior Dev", + "password": "short" // Too short for user schema + } + } + }); + + let result_invalid = validate_json_schema("ref_local_to_type_test.request", jsonb(invalid_priority)); + assert_error_count(&result_invalid, 2); + assert_has_error(&result_invalid, "ENUM_VIOLATED", "/settings/priority"); + assert_has_error(&result_invalid, "MIN_LENGTH_VIOLATED", "/settings/reviewer/password"); +} + +#[pg_test] +fn test_ref_title_override_behavior() { + let _ = setup_comprehensive_schemas(); + + // Test that local schema can override title from referenced schema + // The special_user schema has title "Special User Override" which should override user's "User" title + let valid_special_user = json!({ + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "user", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Special User", + "password": "specialpass", + "special_access": true + }); + + let result = validate_json_schema("ref_title_override_test.request", jsonb(valid_special_user)); + assert_success(&result); + + // Test that validation still works through the ref chain + let invalid_special_user = json!({ + "id": "550e8400-e29b-41d4-a716-446655440000", + "type": "user", + "created_by": "550e8400-e29b-41d4-a716-446655440001", + "name": "Special User", + "password": "bad", // Too short + "special_access": "yes" // Wrong type, should be boolean + }); + + let result_invalid = validate_json_schema("ref_title_override_test.request", jsonb(invalid_special_user)); + assert_error_count(&result_invalid, 2); + assert_has_error(&result_invalid, "MIN_LENGTH_VIOLATED", "/password"); + assert_has_error(&result_invalid, "TYPE_MISMATCH", "/special_access"); + + // Test that all inherited properties and constraints still apply + let missing_required = json!({ + "special_access": true + // Missing all required fields from entity base + }); + + let result_missing = validate_json_schema("ref_title_override_test.request", jsonb(missing_required)); + assert_error_count(&result_missing, 3); + assert_has_error(&result_missing, "REQUIRED_FIELD_MISSING", "/id"); + assert_has_error(&result_missing, "REQUIRED_FIELD_MISSING", "/type"); + assert_has_error(&result_missing, "REQUIRED_FIELD_MISSING", "/created_by"); }