Files
jspg/src/tests.rs

1090 lines
42 KiB
Rust

use crate::*;
use serde_json::{json, Value};
use pgrx::{JsonB, pg_test};
// Helper macro for asserting success with Drop-style response
macro_rules! assert_success_with_json {
($result_jsonb:expr, $fmt:literal $(, $($args:tt)*)?) => {
let has_response = $result_jsonb.0.get("response").is_some();
let has_errors = $result_jsonb.0.get("errors").is_some();
if !has_response || has_errors {
let base_msg = format!($fmt $(, $($args)*)?);
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success with 'response' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
panic!("{}", panic_msg);
}
};
// Simpler version without message
($result_jsonb:expr) => {
let has_response = $result_jsonb.0.get("response").is_some();
let has_errors = $result_jsonb.0.get("errors").is_some();
if !has_response || has_errors {
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success with 'response' field)\nResult JSON:\n{}", pretty_json);
panic!("{}", panic_msg);
}
};
}
// Helper macro for asserting failed JSON results with Drop-style errors
macro_rules! assert_failure_with_json {
// --- Arms with error count and message substring check ---
// With custom message:
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
if $expected_error_count > 0 {
let first_error_message = errors_array[0].get("message").and_then(Value::as_str);
match first_error_message {
Some(msg) => {
if !msg.contains($expected_first_message_contains) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error message mismatch): Expected contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error in array has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
}
};
// Without custom message (calls the one above with ""):
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr) => {
assert_failure_with_json!($result, $expected_error_count, $expected_first_message_contains, "");
};
// --- Arms with error count check only ---
// With custom message:
($result:expr, $expected_error_count:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
};
// Without custom message (calls the one above with ""):
($result:expr, $expected_error_count:expr) => {
assert_failure_with_json!($result, $expected_error_count, "");
};
// --- Arms checking failure only (expects at least one error) ---
// With custom message:
($result:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let has_response = json_result.get("response").is_some();
let errors_opt = json_result.get("errors");
let base_msg = format!($fmt $(, $($args)*)?);
if has_response || errors_opt.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure with 'errors' field): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
let errors_array = errors_opt.unwrap().as_array().expect("'errors' should be an array");
if errors_array.is_empty() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected errors, but 'errors' array is empty): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
};
// Without custom message (calls the one above with ""):
($result:expr) => {
assert_failure_with_json!($result, "");
};
}
fn jsonb(val: Value) -> JsonB {
JsonB(val)
}
#[pg_test]
fn test_cache_and_validate_json_schema() {
clear_json_schemas(); // Call clear directly
let schema_id = "my_schema";
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
});
let valid_instance = json!({ "name": "Alice", "age": 30 });
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
let invalid_instance_missing = json!({ "name": "Charlie" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()), false);
assert_success_with_json!(cache_result, "Cache operation should succeed.");
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
assert_success_with_json!(valid_result, "Validation of valid instance should succeed.");
// Invalid type - age is negative
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
assert_failure_with_json!(invalid_result_type, 1, "Value is below the minimum allowed", "Validation with invalid type should fail.");
let errors_type = invalid_result_type.0["errors"].as_array().unwrap();
assert_eq!(errors_type[0]["details"]["path"], "/age");
assert_eq!(errors_type[0]["details"]["context"], -5);
assert_eq!(errors_type[0]["details"]["schema"], "my_schema");
assert_eq!(errors_type[0]["code"], "MINIMUM_VIOLATED");
// Missing field
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
assert_failure_with_json!(invalid_result_missing, 1, "Required field 'age' is missing", "Validation with missing field should fail.");
let errors_missing = invalid_result_missing.0["errors"].as_array().unwrap();
assert_eq!(errors_missing[0]["details"]["path"], "/age");
assert_eq!(errors_missing[0]["details"]["schema"], "my_schema");
assert_eq!(errors_missing[0]["code"], "REQUIRED_FIELD_MISSING");
// Schema not found
let non_existent_id = "non_existent_schema";
let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({})));
assert_failure_with_json!(invalid_schema_result, 1, "Schema 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
let errors_notfound = invalid_schema_result.0["errors"].as_array().unwrap();
assert_eq!(errors_notfound[0]["code"], "SCHEMA_NOT_FOUND");
assert_eq!(errors_notfound[0]["details"]["schema"], "non_existent_schema");
}
#[pg_test]
fn test_validate_json_schema_not_cached() {
clear_json_schemas();
let instance = json!({ "foo": "bar" });
let result = validate_json_schema("non_existent_schema", jsonb(instance));
assert_failure_with_json!(result, 1, "Schema 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
}
#[pg_test]
fn test_cache_invalid_json_schema() {
clear_json_schemas();
let schema_id = "invalid_schema";
// Schema with an invalid type *value*
let invalid_schema = json!({
"$id": "urn:invalid_schema",
"type": ["invalid_type_value"]
});
let cache_result = cache_json_schema(schema_id, jsonb(invalid_schema), false);
// Expect 2 leaf errors because the meta-schema validation fails at the type value
// and within the type array itself.
assert_failure_with_json!(
cache_result,
2, // Expect exactly two leaf errors
"Value is not one of the allowed options", // Updated to human-readable message
"Caching invalid schema should fail with specific meta-schema validation errors."
);
// Ensure the errors array exists and check specifics
let errors_array = cache_result.0["errors"].as_array().expect("Errors field should be an array");
assert_eq!(errors_array.len(), 2);
// Both errors should have ENUM_VIOLATED code
assert_eq!(errors_array[0]["code"], "ENUM_VIOLATED");
assert_eq!(errors_array[1]["code"], "ENUM_VIOLATED");
// Check instance paths are preserved in path field
let paths: Vec<&str> = errors_array.iter()
.map(|e| e["details"]["path"].as_str().unwrap())
.collect();
assert!(paths.contains(&"/type"));
assert!(paths.contains(&"/type/0"));
// Check schema field is present
assert_eq!(errors_array[0]["details"]["schema"], "invalid_schema");
assert_eq!(errors_array[1]["details"]["schema"], "invalid_schema");
}
#[pg_test]
fn test_validate_json_schema_detailed_validation_errors() {
clear_json_schemas(); // Call clear directly
let schema_id = "detailed_errors";
let schema = json!({
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string", "maxLength": 10 }
},
"required": ["street", "city"]
}
},
"required": ["address"]
});
let _ = cache_json_schema(schema_id, jsonb(schema), false);
let invalid_instance = json!({
"address": {
"street": 123, // Wrong type
"city": "Supercalifragilisticexpialidocious" // Too long
}
});
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
// Update: Expect 2 errors again, as boon reports both nested errors.
assert_failure_with_json!(result, 2);
}
#[pg_test]
fn test_validate_json_schema_oneof_validation_errors() {
clear_json_schemas(); // Call clear directly
let schema_id = "oneof_schema";
let schema = json!({
"oneOf": [
{ // Option 1: Object with string prop
"type": "object",
"properties": {
"string_prop": { "type": "string", "maxLength": 5 }
},
"required": ["string_prop"]
},
{ // Option 2: Object with number prop
"type": "object",
"properties": {
"number_prop": { "type": "number", "minimum": 10 }
},
"required": ["number_prop"]
}
]
});
let _ = cache_json_schema(schema_id, jsonb(schema), false);
// --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) ---
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
// Expect 2 leaf errors. Check count only with the macro.
assert_failure_with_json!(result_invalid_string, 2);
// Explicitly check that both expected errors are present, ignoring order
let errors_string = result_invalid_string.0["errors"].as_array().expect("Expected error array for invalid string");
assert!(errors_string.iter().any(|e|
e["details"]["path"] == "/string_prop" &&
e["code"] == "MAX_LENGTH_VIOLATED" &&
e["details"]["schema"] == "oneof_schema"
), "Missing maxLength error");
assert!(errors_string.iter().any(|e|
e["details"]["path"] == "/number_prop" &&
e["code"] == "REQUIRED_FIELD_MISSING" &&
e["details"]["schema"] == "oneof_schema"
), "Missing number_prop required error");
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
let invalid_number_instance = json!({ "number_prop": 5 });
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
// Expect 2 leaf errors. Check count only with the macro.
assert_failure_with_json!(result_invalid_number, 2);
// Explicitly check that both expected errors are present, ignoring order
let errors_number = result_invalid_number.0["errors"].as_array().expect("Expected error array for invalid number");
assert!(errors_number.iter().any(|e|
e["details"]["path"] == "/number_prop" &&
e["code"] == "MINIMUM_VIOLATED" &&
e["details"]["schema"] == "oneof_schema"
), "Missing minimum error");
assert!(errors_number.iter().any(|e|
e["details"]["path"] == "/string_prop" &&
e["code"] == "REQUIRED_FIELD_MISSING" &&
e["details"]["schema"] == "oneof_schema"
), "Missing string_prop required error");
// --- Test case 3: Fails type check (not object) for both branches ---
// Input: boolean, expected object for both branches
let invalid_bool_instance = json!(true); // Not an object
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
assert_failure_with_json!(result_invalid_bool, 1);
// Explicitly check that the single remaining error is the type error for the root instance path
let errors_bool = result_invalid_bool.0["errors"].as_array().expect("Expected error array for invalid bool");
assert_eq!(errors_bool.len(), 1, "Expected exactly one error after deduplication");
assert_eq!(errors_bool[0]["code"], "TYPE_MISMATCH");
assert_eq!(errors_bool[0]["details"]["path"], "");
assert_eq!(errors_bool[0]["details"]["schema"], "oneof_schema");
// --- Test case 4: Fails missing required for both branches ---
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
let invalid_empty_obj = json!({});
let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj));
// Now we expect 2 errors because required fields are split into individual errors
assert_failure_with_json!(result_empty_obj, 2);
let errors_empty = result_empty_obj.0["errors"].as_array().expect("Expected error array for empty object");
assert_eq!(errors_empty.len(), 2, "Expected two errors for missing required fields");
// Check that we have errors for both missing fields
assert!(errors_empty.iter().any(|e|
e["details"]["path"] == "/string_prop" &&
e["code"] == "REQUIRED_FIELD_MISSING" &&
e["details"]["schema"] == "oneof_schema"
), "Missing string_prop required error");
assert!(errors_empty.iter().any(|e|
e["details"]["path"] == "/number_prop" &&
e["code"] == "REQUIRED_FIELD_MISSING" &&
e["details"]["schema"] == "oneof_schema"
), "Missing number_prop required error");
}
#[pg_test]
fn test_clear_json_schemas() {
let clear_result = clear_json_schemas();
assert_success_with_json!(clear_result);
let schema_id = "schema_to_clear";
let schema = json!({ "type": "string" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()), false);
assert_success_with_json!(cache_result);
let show_result1 = show_json_schemas();
let schemas1 = show_result1.0["response"].as_array().unwrap();
assert!(schemas1.contains(&json!(schema_id)));
let clear_result2 = clear_json_schemas();
assert_success_with_json!(clear_result2);
let show_result2 = show_json_schemas();
let schemas2 = show_result2.0["response"].as_array().unwrap();
assert!(schemas2.is_empty());
let instance = json!("test");
let validate_result = validate_json_schema(schema_id, jsonb(instance));
assert_failure_with_json!(validate_result, 1, "Schema 'schema_to_clear' not found", "Validation should fail after clearing schemas.");
}
#[pg_test]
fn test_show_json_schemas() {
let _ = clear_json_schemas();
let schema_id1 = "schema1";
let schema_id2 = "schema2";
let schema = json!({ "type": "boolean" });
let _ = cache_json_schema(schema_id1, jsonb(schema.clone()), false);
let _ = cache_json_schema(schema_id2, jsonb(schema.clone()), false);
let result = show_json_schemas();
let schemas = result.0["response"].as_array().unwrap();
assert_eq!(schemas.len(), 2);
assert!(schemas.contains(&json!(schema_id1)));
assert!(schemas.contains(&json!(schema_id2)));
}
#[pg_test]
fn test_root_level_type_mismatch() {
clear_json_schemas();
let schema_id = "array_schema";
// Schema expecting an array (like delete_tokens response)
let schema = json!({
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string", "format": "uuid" }
}
}
});
let cache_result = cache_json_schema(schema_id, jsonb(schema), false);
assert_success_with_json!(cache_result, "Schema caching should succeed");
// Test 1: Validate null against array schema (simulating delete_tokens issue)
let null_instance = json!(null);
let null_result = validate_json_schema(schema_id, jsonb(null_instance));
assert_failure_with_json!(null_result, 1, "Field type does not match the expected type");
let null_errors = null_result.0["errors"].as_array().unwrap();
assert_eq!(null_errors[0]["code"], "TYPE_MISMATCH");
assert_eq!(null_errors[0]["details"]["path"], ""); // Root level path should be empty string
assert_eq!(null_errors[0]["details"]["context"], json!(null));
assert_eq!(null_errors[0]["details"]["schema"], "array_schema");
assert!(null_errors[0]["details"]["cause"].as_str().unwrap().contains("want array"));
// Test 2: Validate object against array schema
let object_instance = json!({"id": "not-an-array"});
let object_result = validate_json_schema(schema_id, jsonb(object_instance.clone()));
assert_failure_with_json!(object_result, 1, "Field type does not match the expected type");
let object_errors = object_result.0["errors"].as_array().unwrap();
assert_eq!(object_errors[0]["code"], "TYPE_MISMATCH");
assert_eq!(object_errors[0]["details"]["path"], ""); // Root level path should be empty string
assert_eq!(object_errors[0]["details"]["context"], object_instance);
assert_eq!(object_errors[0]["details"]["schema"], "array_schema");
// Test 3: Valid empty array
let valid_empty = json!([]);
let valid_result = validate_json_schema(schema_id, jsonb(valid_empty));
assert_success_with_json!(valid_result, "Empty array should be valid");
// Test 4: Schema expecting object at root
let object_schema_id = "object_schema";
let object_schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" }
}
});
let _ = cache_json_schema(object_schema_id, jsonb(object_schema), false);
// String at root when object expected
let string_instance = json!("not an object");
let string_result = validate_json_schema(object_schema_id, jsonb(string_instance));
assert_failure_with_json!(string_result, 1, "Field type does not match the expected type");
let string_errors = string_result.0["errors"].as_array().unwrap();
assert_eq!(string_errors[0]["code"], "TYPE_MISMATCH");
assert_eq!(string_errors[0]["details"]["path"], ""); // Root level path
assert_eq!(string_errors[0]["details"]["schema"], "object_schema");
assert_eq!(string_errors[0]["details"]["context"], json!("not an object"));
}
#[pg_test]
fn test_auto_strict_validation() {
clear_json_schemas();
let schema_id = "strict_test";
let schema_id_non_strict = "non_strict_test";
// Schema without explicit additionalProperties: false
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"profile": {
"type": "object",
"properties": {
"age": { "type": "number" },
"preferences": {
"type": "object",
"properties": {
"theme": { "type": "string" }
}
}
}
},
"tags": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string" },
"value": { "type": "string" }
}
}
}
}
});
// Cache the same schema twice - once with strict=true, once with strict=false
let cache_result_strict = cache_json_schema(schema_id, jsonb(schema.clone()), true);
assert_success_with_json!(cache_result_strict, "Schema caching with strict=true should succeed");
let cache_result_non_strict = cache_json_schema(schema_id_non_strict, jsonb(schema.clone()), false);
assert_success_with_json!(cache_result_non_strict, "Schema caching with strict=false should succeed");
// Test 1: Valid instance with no extra properties (should pass for both)
let valid_instance = json!({
"name": "John",
"profile": {
"age": 30,
"preferences": {
"theme": "dark"
}
},
"tags": [
{"id": "1", "value": "rust"},
{"id": "2", "value": "postgres"}
]
});
let valid_result_strict = validate_json_schema(schema_id, jsonb(valid_instance.clone()));
assert_success_with_json!(valid_result_strict, "Valid instance should pass with strict schema");
let valid_result_non_strict = validate_json_schema(schema_id_non_strict, jsonb(valid_instance));
assert_success_with_json!(valid_result_non_strict, "Valid instance should pass with non-strict schema");
// Test 2: Root level extra property
let invalid_root_extra = json!({
"name": "John",
"extraField": "should fail" // Extra property at root
});
// Should fail with strict schema
let result_root_strict = validate_json_schema(schema_id, jsonb(invalid_root_extra.clone()));
assert_failure_with_json!(result_root_strict, 1, "Schema validation always fails");
let errors_root = result_root_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_root[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_root[0]["details"]["path"], "/extraField");
assert_eq!(errors_root[0]["details"]["schema"], "strict_test");
// Should pass with non-strict schema
let result_root_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_root_extra));
assert_success_with_json!(result_root_non_strict, "Extra property should be allowed with non-strict schema");
// Test 3: Nested object extra property
let invalid_nested_extra = json!({
"name": "John",
"profile": {
"age": 30,
"extraNested": "should fail" // Extra property in nested object
}
});
// Should fail with strict schema
let result_nested_strict = validate_json_schema(schema_id, jsonb(invalid_nested_extra.clone()));
assert_failure_with_json!(result_nested_strict, 1, "Schema validation always fails");
let errors_nested = result_nested_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_nested[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_nested[0]["details"]["path"], "/profile/extraNested");
assert_eq!(errors_nested[0]["details"]["schema"], "strict_test");
// Should pass with non-strict schema
let result_nested_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_nested_extra));
assert_success_with_json!(result_nested_non_strict, "Extra nested property should be allowed with non-strict schema");
// Test 4: Deeply nested object extra property
let invalid_deep_extra = json!({
"name": "John",
"profile": {
"age": 30,
"preferences": {
"theme": "dark",
"extraDeep": "should fail" // Extra property in deeply nested object
}
}
});
// Should fail with strict schema
let result_deep_strict = validate_json_schema(schema_id, jsonb(invalid_deep_extra.clone()));
assert_failure_with_json!(result_deep_strict, 1, "Schema validation always fails");
let errors_deep = result_deep_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_deep[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_deep[0]["details"]["path"], "/profile/preferences/extraDeep");
assert_eq!(errors_deep[0]["details"]["schema"], "strict_test");
// Should pass with non-strict schema
let result_deep_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_deep_extra));
assert_success_with_json!(result_deep_non_strict, "Extra deep property should be allowed with non-strict schema");
// Test 5: Array item extra property
let invalid_array_item_extra = json!({
"name": "John",
"tags": [
{"id": "1", "value": "rust", "extraInArray": "should fail"} // Extra property in array item
]
});
// Should fail with strict schema
let result_array_strict = validate_json_schema(schema_id, jsonb(invalid_array_item_extra.clone()));
assert_failure_with_json!(result_array_strict, 1, "Schema validation always fails");
let errors_array = result_array_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_array[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_array[0]["details"]["path"], "/tags/0/extraInArray");
assert_eq!(errors_array[0]["details"]["schema"], "strict_test");
// Should pass with non-strict schema
let result_array_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_array_item_extra));
assert_success_with_json!(result_array_non_strict, "Extra array item property should be allowed with non-strict schema");
// Test 6: Schema with explicit additionalProperties: true should allow extras even with strict=true
let schema_id_permissive = "permissive_test";
let permissive_schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" }
},
"additionalProperties": true // Explicitly allow additional properties
});
let _ = cache_json_schema(schema_id_permissive, jsonb(permissive_schema), true); // Note: strict=true
let instance_with_extra = json!({
"name": "John",
"extraAllowed": "should pass"
});
let result_permissive = validate_json_schema(schema_id_permissive, jsonb(instance_with_extra));
assert_success_with_json!(result_permissive, "Instance with extra property should pass when additionalProperties is explicitly true, even with strict=true");
}
#[pg_test]
fn test_required_fields_split_errors() {
clear_json_schemas();
let schema_id = "required_split_test";
// Schema with multiple required fields
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"kind": { "type": "string" },
"age": { "type": "number" }
},
"required": ["name", "kind", "age"]
});
let cache_result = cache_json_schema(schema_id, jsonb(schema), false);
assert_success_with_json!(cache_result, "Schema caching should succeed");
// Test 1: Missing all required fields
let empty_instance = json!({});
let result = validate_json_schema(schema_id, jsonb(empty_instance));
// Should get 3 separate errors, one for each missing field
assert_failure_with_json!(result, 3, "Required field");
let errors = result.0["errors"].as_array().unwrap();
// Check that we have errors for each missing field with correct paths
assert!(errors.iter().any(|e|
e["code"] == "REQUIRED_FIELD_MISSING" &&
e["details"]["path"] == "/name" &&
e["message"] == "Required field 'name' is missing"
), "Missing error for name field");
assert!(errors.iter().any(|e|
e["code"] == "REQUIRED_FIELD_MISSING" &&
e["details"]["path"] == "/kind" &&
e["message"] == "Required field 'kind' is missing"
), "Missing error for kind field");
assert!(errors.iter().any(|e|
e["code"] == "REQUIRED_FIELD_MISSING" &&
e["details"]["path"] == "/age" &&
e["message"] == "Required field 'age' is missing"
), "Missing error for age field");
// Test 2: Missing only some required fields
let partial_instance = json!({
"name": "Alice"
});
let partial_result = validate_json_schema(schema_id, jsonb(partial_instance));
// Should get 2 errors for the missing fields
assert_failure_with_json!(partial_result, 2, "Required field");
let partial_errors = partial_result.0["errors"].as_array().unwrap();
assert!(partial_errors.iter().any(|e|
e["details"]["path"] == "/kind"
), "Missing error for kind field");
assert!(partial_errors.iter().any(|e|
e["details"]["path"] == "/age"
), "Missing error for age field");
}
#[pg_test]
fn test_dependency_fields_split_errors() {
clear_json_schemas();
let schema_id = "dependency_split_test";
// Schema with dependencies like the tokenize_external_accounts example
let schema = json!({
"type": "object",
"properties": {
"creating": { "type": "boolean" },
"name": { "type": "string" },
"kind": { "type": "string" },
"description": { "type": "string" }
},
"dependencies": {
"creating": ["name", "kind"] // When creating is present, name and kind are required
}
});
let cache_result = cache_json_schema(schema_id, jsonb(schema), false);
assert_success_with_json!(cache_result, "Schema caching should succeed");
// Test 1: Has creating=true but missing both dependent fields
let missing_both = json!({
"creating": true,
"description": "Some description"
});
let result = validate_json_schema(schema_id, jsonb(missing_both));
// Should get 2 separate errors, one for each missing dependent field
assert_failure_with_json!(result, 2, "Field");
let errors = result.0["errors"].as_array().unwrap();
assert!(errors.iter().any(|e|
e["code"] == "DEPENDENCY_FAILED" &&
e["details"]["path"] == "/name" &&
e["message"] == "Field 'name' is required when 'creating' is present"
), "Missing error for dependent name field");
assert!(errors.iter().any(|e|
e["code"] == "DEPENDENCY_FAILED" &&
e["details"]["path"] == "/kind" &&
e["message"] == "Field 'kind' is required when 'creating' is present"
), "Missing error for dependent kind field");
// Test 2: Has creating=true with only one dependent field
let missing_one = json!({
"creating": true,
"name": "My Account"
});
let result_one = validate_json_schema(schema_id, jsonb(missing_one));
// Should get 1 error for the missing kind field
assert_failure_with_json!(result_one, 1, "Field 'kind' is required when 'creating' is present");
let errors_one = result_one.0["errors"].as_array().unwrap();
assert_eq!(errors_one[0]["details"]["path"], "/kind");
// Test 3: Has no creating field - no dependency errors
let no_creating = json!({
"description": "No creating field"
});
let result_no_creating = validate_json_schema(schema_id, jsonb(no_creating));
assert_success_with_json!(result_no_creating, "Should succeed when creating field is not present");
// Test 4: Has creating=false - dependencies still apply because field exists!
let creating_false = json!({
"creating": false,
"description": "Creating is false"
});
let result_false = validate_json_schema(schema_id, jsonb(creating_false));
// Dependencies are triggered by field existence, not value, so this should fail
assert_failure_with_json!(result_false, 2, "Field");
let errors_false = result_false.0["errors"].as_array().unwrap();
assert!(errors_false.iter().any(|e|
e["details"]["path"] == "/name"
), "Should have error for name when creating exists with false value");
assert!(errors_false.iter().any(|e|
e["details"]["path"] == "/kind"
), "Should have error for kind when creating exists with false value");
}
#[pg_test]
fn test_nested_required_dependency_errors() {
clear_json_schemas();
let schema_id = "nested_dep_test";
// More complex schema with nested objects
let schema = json!({
"type": "object",
"properties": {
"items": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string" },
"creating": { "type": "boolean" },
"name": { "type": "string" },
"kind": { "type": "string" }
},
"required": ["id"],
"dependencies": {
"creating": ["name", "kind"]
}
}
}
},
"required": ["items"]
});
let cache_result = cache_json_schema(schema_id, jsonb(schema), false);
assert_success_with_json!(cache_result, "Schema caching should succeed");
// Test with array items that have dependency violations
let instance = json!({
"items": [
{
"id": "item1",
"creating": true
// Missing name and kind
},
{
"id": "item2",
"creating": true,
"name": "Item 2"
// Missing kind
}
]
});
let result = validate_json_schema(schema_id, jsonb(instance));
// Should get 3 errors total: 2 for first item, 1 for second item
assert_failure_with_json!(result, 3, "Field");
let errors = result.0["errors"].as_array().unwrap();
// Check paths are correct for array items
assert!(errors.iter().any(|e|
e["details"]["path"] == "/items/0/name" &&
e["code"] == "DEPENDENCY_FAILED"
), "Missing error for first item's name");
assert!(errors.iter().any(|e|
e["details"]["path"] == "/items/0/kind" &&
e["code"] == "DEPENDENCY_FAILED"
), "Missing error for first item's kind");
assert!(errors.iter().any(|e|
e["details"]["path"] == "/items/1/kind" &&
e["code"] == "DEPENDENCY_FAILED"
), "Missing error for second item's kind");
}
#[pg_test]
fn test_additional_properties_split_errors() {
clear_json_schemas();
let schema_id = "additional_props_split_test";
// Schema with additionalProperties: false
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "number" }
},
"additionalProperties": false
});
let cache_result = cache_json_schema(schema_id, jsonb(schema), false);
assert_success_with_json!(cache_result, "Schema caching should succeed");
// Test 1: Multiple additional properties not allowed
let instance_many_extras = json!({
"name": "Alice",
"age": 30,
"extra1": "not allowed",
"extra2": 42,
"extra3": true
});
let result = validate_json_schema(schema_id, jsonb(instance_many_extras));
// Should get 3 separate errors, one for each additional property
assert_failure_with_json!(result, 3, "Property");
let errors = result.0["errors"].as_array().unwrap();
// Check that we have errors for each additional property with correct paths
assert!(errors.iter().any(|e|
e["code"] == "ADDITIONAL_PROPERTIES_NOT_ALLOWED" &&
e["details"]["path"] == "/extra1" &&
e["message"] == "Property 'extra1' is not allowed"
), "Missing error for extra1 property");
assert!(errors.iter().any(|e|
e["code"] == "ADDITIONAL_PROPERTIES_NOT_ALLOWED" &&
e["details"]["path"] == "/extra2" &&
e["message"] == "Property 'extra2' is not allowed"
), "Missing error for extra2 property");
assert!(errors.iter().any(|e|
e["code"] == "ADDITIONAL_PROPERTIES_NOT_ALLOWED" &&
e["details"]["path"] == "/extra3" &&
e["message"] == "Property 'extra3' is not allowed"
), "Missing error for extra3 property");
// Test 2: Single additional property
let instance_one_extra = json!({
"name": "Bob",
"age": 25,
"unauthorized": "field"
});
let result_one = validate_json_schema(schema_id, jsonb(instance_one_extra));
// Should get 1 error for the additional property
assert_failure_with_json!(result_one, 1, "Property 'unauthorized' is not allowed");
let errors_one = result_one.0["errors"].as_array().unwrap();
assert_eq!(errors_one[0]["details"]["path"], "/unauthorized");
// Test 3: Nested objects with additional properties
let nested_schema_id = "nested_additional_props_test";
let nested_schema = json!({
"type": "object",
"properties": {
"user": {
"type": "object",
"properties": {
"name": { "type": "string" }
},
"additionalProperties": false
}
}
});
let _ = cache_json_schema(nested_schema_id, jsonb(nested_schema), false);
let nested_instance = json!({
"user": {
"name": "Charlie",
"role": "admin",
"level": 5
}
});
let nested_result = validate_json_schema(nested_schema_id, jsonb(nested_instance));
// Should get 2 errors for the nested additional properties
assert_failure_with_json!(nested_result, 2, "Property");
let nested_errors = nested_result.0["errors"].as_array().unwrap();
assert!(nested_errors.iter().any(|e|
e["details"]["path"] == "/user/role" &&
e["code"] == "ADDITIONAL_PROPERTIES_NOT_ALLOWED"
), "Missing error for nested role property");
assert!(nested_errors.iter().any(|e|
e["details"]["path"] == "/user/level" &&
e["code"] == "ADDITIONAL_PROPERTIES_NOT_ALLOWED"
), "Missing error for nested level property");
}
#[pg_test]
fn test_unevaluated_properties_errors() {
clear_json_schemas();
let schema_id = "unevaluated_test";
// Schema with unevaluatedProperties: false
// This is more complex than additionalProperties because it considers
// properties matched by pattern properties and additional properties
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "number" }
},
"patternProperties": {
"^attr_": { "type": "string" } // Properties starting with attr_ are allowed
},
"unevaluatedProperties": false // No other properties allowed
});
let cache_result = cache_json_schema(schema_id, jsonb(schema), false);
assert_success_with_json!(cache_result, "Schema caching should succeed");
// Test 1: Multiple unevaluated properties
let instance_uneval = json!({
"name": "Alice",
"age": 30,
"attr_color": "blue", // This is OK - matches pattern
"extra1": "not evaluated", // These should fail
"extra2": 42,
"extra3": true
});
let result = validate_json_schema(schema_id, jsonb(instance_uneval));
// Should get 3 separate FALSE_SCHEMA errors, one for each unevaluated property
assert_failure_with_json!(result, 3, "Schema validation always fails");
let errors = result.0["errors"].as_array().unwrap();
// Verify all errors are FALSE_SCHEMA with specific paths
for error in errors {
assert_eq!(error["code"], "FALSE_SCHEMA", "All unevaluated properties should generate FALSE_SCHEMA errors");
}
// Check that we have errors for each unevaluated property with correct paths
assert!(errors.iter().any(|e|
e["code"] == "FALSE_SCHEMA" &&
e["details"]["path"] == "/extra1"
), "Missing error for extra1 property");
assert!(errors.iter().any(|e|
e["code"] == "FALSE_SCHEMA" &&
e["details"]["path"] == "/extra2"
), "Missing error for extra2 property");
assert!(errors.iter().any(|e|
e["code"] == "FALSE_SCHEMA" &&
e["details"]["path"] == "/extra3"
), "Missing error for extra3 property");
// Test 2: Complex schema with allOf and unevaluatedProperties
let complex_schema_id = "complex_unevaluated_test";
let complex_schema = json!({
"type": "object",
"allOf": [
{
"properties": {
"firstName": { "type": "string" }
}
},
{
"properties": {
"lastName": { "type": "string" }
}
}
],
"properties": {
"age": { "type": "number" }
},
"unevaluatedProperties": false
});
let _ = cache_json_schema(complex_schema_id, jsonb(complex_schema), false);
// firstName and lastName are evaluated by allOf schemas, age by main schema
let complex_instance = json!({
"firstName": "John",
"lastName": "Doe",
"age": 25,
"nickname": "JD", // Not evaluated by any schema
"title": "Mr" // Not evaluated by any schema
});
let complex_result = validate_json_schema(complex_schema_id, jsonb(complex_instance));
// Should get 2 FALSE_SCHEMA errors for unevaluated properties
assert_failure_with_json!(complex_result, 2, "Schema validation always fails");
let complex_errors = complex_result.0["errors"].as_array().unwrap();
assert!(complex_errors.iter().any(|e|
e["code"] == "FALSE_SCHEMA" &&
e["details"]["path"] == "/nickname"
), "Missing error for nickname property");
assert!(complex_errors.iter().any(|e|
e["code"] == "FALSE_SCHEMA" &&
e["details"]["path"] == "/title"
), "Missing error for title property");
// Test 3: Valid instance with all properties evaluated
let valid_instance = json!({
"name": "Bob",
"age": 40,
"attr_style": "modern",
"attr_theme": "dark"
});
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
assert_success_with_json!(valid_result, "All properties are evaluated, should pass");
}