improvements to error handling again

This commit is contained in:
2025-06-12 00:59:33 -04:00
parent 03beada825
commit 69ab6165bb
2 changed files with 95 additions and 10 deletions

View File

@ -51,7 +51,7 @@ fn cache_json_schema(schema_id: &str, schema: JsonB, strict: bool) -> JsonB {
"code": "SCHEMA_RESOURCE_ADD_FAILED",
"message": format!("Failed to add schema resource '{}'", schema_id),
"details": {
"path": schema_path,
"schema": schema_id,
"cause": format!("{}", e)
}
}]
@ -72,7 +72,7 @@ fn cache_json_schema(schema_id: &str, schema: JsonB, strict: bool) -> JsonB {
let mut error_list = Vec::new();
collect_errors(src, &mut error_list);
// Filter and format errors properly - no instance for schema compilation
format_errors(error_list, &schema_value)
format_errors(error_list, &schema_value, schema_id)
}
_ => {
// Other compilation errors
@ -80,7 +80,7 @@ fn cache_json_schema(schema_id: &str, schema: JsonB, strict: bool) -> JsonB {
"code": "SCHEMA_COMPILATION_FAILED",
"message": format!("Schema '{}' compilation failed", schema_id),
"details": {
"path": schema_path,
"schema": schema_id,
"cause": format!("{:?}", e)
}
})]
@ -126,6 +126,7 @@ fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
"code": "SCHEMA_NOT_FOUND",
"message": format!("Schema '{}' not found in cache", schema_id),
"details": {
"schema": schema_id,
"cause": "Schema must be cached before validation"
}
}]
@ -137,7 +138,7 @@ fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
Err(validation_error) => {
let mut error_list = Vec::new();
collect_errors(&validation_error, &mut error_list);
let errors = format_errors(error_list, &instance_value);
let errors = format_errors(error_list, &instance_value, schema_id);
JsonB(json!({ "errors": errors }))
}
}
@ -332,7 +333,7 @@ fn convert_error_kind(kind: &ErrorKind) -> (String, String) {
}
// Formats errors according to DropError structure
fn format_errors(errors: Vec<Error>, instance: &Value) -> Vec<Value> {
fn format_errors(errors: Vec<Error>, instance: &Value, schema_id: &str) -> Vec<Value> {
// Deduplicate by instance_path and format as DropError
let mut unique_errors: HashMap<String, Value> = HashMap::new();
for error in errors {
@ -345,7 +346,8 @@ fn format_errors(errors: Vec<Error>, instance: &Value) -> Vec<Value> {
"details": {
"path": error.path,
"context": failing_value,
"cause": error.cause
"cause": error.cause,
"schema": schema_id
}
}));
}

View File

@ -155,6 +155,7 @@ fn test_cache_and_validate_json_schema() {
let errors_type = invalid_result_type.0["errors"].as_array().unwrap();
assert_eq!(errors_type[0]["details"]["path"], "/age");
assert_eq!(errors_type[0]["details"]["context"], -5);
assert_eq!(errors_type[0]["details"]["schema"], "my_schema");
assert_eq!(errors_type[0]["code"], "MINIMUM_VIOLATED");
// Missing field
@ -162,6 +163,7 @@ fn test_cache_and_validate_json_schema() {
assert_failure_with_json!(invalid_result_missing, 1, "Required field is missing", "Validation with missing field should fail.");
let errors_missing = invalid_result_missing.0["errors"].as_array().unwrap();
assert_eq!(errors_missing[0]["details"]["path"], "");
assert_eq!(errors_missing[0]["details"]["schema"], "my_schema");
assert_eq!(errors_missing[0]["code"], "REQUIRED_FIELD_MISSING");
// Schema not found
@ -170,6 +172,7 @@ fn test_cache_and_validate_json_schema() {
assert_failure_with_json!(invalid_schema_result, 1, "Schema 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
let errors_notfound = invalid_schema_result.0["errors"].as_array().unwrap();
assert_eq!(errors_notfound[0]["code"], "SCHEMA_NOT_FOUND");
assert_eq!(errors_notfound[0]["details"]["schema"], "non_existent_schema");
}
#[pg_test]
@ -213,6 +216,9 @@ fn test_cache_invalid_json_schema() {
.collect();
assert!(paths.contains(&"/type"));
assert!(paths.contains(&"/type/0"));
// Check schema field is present
assert_eq!(errors_array[0]["details"]["schema"], "invalid_schema");
assert_eq!(errors_array[1]["details"]["schema"], "invalid_schema");
}
#[pg_test]
@ -283,11 +289,13 @@ fn test_validate_json_schema_oneof_validation_errors() {
let errors_string = result_invalid_string.0["errors"].as_array().expect("Expected error array for invalid string");
assert!(errors_string.iter().any(|e|
e["details"]["path"] == "/string_prop" &&
e["code"] == "MAX_LENGTH_VIOLATED"
e["code"] == "MAX_LENGTH_VIOLATED" &&
e["details"]["schema"] == "oneof_schema"
), "Missing maxLength error");
assert!(errors_string.iter().any(|e|
e["details"]["path"] == "" &&
e["code"] == "REQUIRED_FIELD_MISSING"
e["code"] == "REQUIRED_FIELD_MISSING" &&
e["details"]["schema"] == "oneof_schema"
), "Missing number_prop required error");
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
@ -299,11 +307,13 @@ fn test_validate_json_schema_oneof_validation_errors() {
let errors_number = result_invalid_number.0["errors"].as_array().expect("Expected error array for invalid number");
assert!(errors_number.iter().any(|e|
e["details"]["path"] == "/number_prop" &&
e["code"] == "MINIMUM_VIOLATED"
e["code"] == "MINIMUM_VIOLATED" &&
e["details"]["schema"] == "oneof_schema"
), "Missing minimum error");
assert!(errors_number.iter().any(|e|
e["details"]["path"] == "" &&
e["code"] == "REQUIRED_FIELD_MISSING"
e["code"] == "REQUIRED_FIELD_MISSING" &&
e["details"]["schema"] == "oneof_schema"
), "Missing string_prop required error");
// --- Test case 3: Fails type check (not object) for both branches ---
@ -317,6 +327,7 @@ fn test_validate_json_schema_oneof_validation_errors() {
assert_eq!(errors_bool.len(), 1, "Expected exactly one error after deduplication");
assert_eq!(errors_bool[0]["code"], "TYPE_MISMATCH");
assert_eq!(errors_bool[0]["details"]["path"], "");
assert_eq!(errors_bool[0]["details"]["schema"], "oneof_schema");
// --- Test case 4: Fails missing required for both branches ---
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
@ -329,6 +340,7 @@ fn test_validate_json_schema_oneof_validation_errors() {
assert_eq!(errors_empty.len(), 1, "Expected exactly one error after filtering empty object");
assert_eq!(errors_empty[0]["code"], "REQUIRED_FIELD_MISSING");
assert_eq!(errors_empty[0]["details"]["path"], "");
assert_eq!(errors_empty[0]["details"]["schema"], "oneof_schema");
// The human message should be generic
assert_eq!(errors_empty[0]["message"], "Required field is missing");
}
@ -376,6 +388,73 @@ fn test_show_json_schemas() {
assert!(schemas.contains(&json!(schema_id2)));
}
#[pg_test]
fn test_root_level_type_mismatch() {
clear_json_schemas();
let schema_id = "array_schema";
// Schema expecting an array (like delete_tokens response)
let schema = json!({
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string", "format": "uuid" }
}
}
});
let cache_result = cache_json_schema(schema_id, jsonb(schema), false);
assert_success_with_json!(cache_result, "Schema caching should succeed");
// Test 1: Validate null against array schema (simulating delete_tokens issue)
let null_instance = json!(null);
let null_result = validate_json_schema(schema_id, jsonb(null_instance));
assert_failure_with_json!(null_result, 1, "Field type does not match the expected type");
let null_errors = null_result.0["errors"].as_array().unwrap();
assert_eq!(null_errors[0]["code"], "TYPE_MISMATCH");
assert_eq!(null_errors[0]["details"]["path"], ""); // Root level path should be empty string
assert_eq!(null_errors[0]["details"]["context"], json!(null));
assert_eq!(null_errors[0]["details"]["schema"], "array_schema");
assert!(null_errors[0]["details"]["cause"].as_str().unwrap().contains("want array"));
// Test 2: Validate object against array schema
let object_instance = json!({"id": "not-an-array"});
let object_result = validate_json_schema(schema_id, jsonb(object_instance.clone()));
assert_failure_with_json!(object_result, 1, "Field type does not match the expected type");
let object_errors = object_result.0["errors"].as_array().unwrap();
assert_eq!(object_errors[0]["code"], "TYPE_MISMATCH");
assert_eq!(object_errors[0]["details"]["path"], ""); // Root level path should be empty string
assert_eq!(object_errors[0]["details"]["context"], object_instance);
assert_eq!(object_errors[0]["details"]["schema"], "array_schema");
// Test 3: Valid empty array
let valid_empty = json!([]);
let valid_result = validate_json_schema(schema_id, jsonb(valid_empty));
assert_success_with_json!(valid_result, "Empty array should be valid");
// Test 4: Schema expecting object at root
let object_schema_id = "object_schema";
let object_schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" }
}
});
let _ = cache_json_schema(object_schema_id, jsonb(object_schema), false);
// String at root when object expected
let string_instance = json!("not an object");
let string_result = validate_json_schema(object_schema_id, jsonb(string_instance));
assert_failure_with_json!(string_result, 1, "Field type does not match the expected type");
let string_errors = string_result.0["errors"].as_array().unwrap();
assert_eq!(string_errors[0]["code"], "TYPE_MISMATCH");
assert_eq!(string_errors[0]["details"]["path"], ""); // Root level path
assert_eq!(string_errors[0]["details"]["schema"], "object_schema");
assert_eq!(string_errors[0]["details"]["context"], json!("not an object"));
}
#[pg_test]
fn test_auto_strict_validation() {
clear_json_schemas();
@ -452,6 +531,7 @@ fn test_auto_strict_validation() {
let errors_root = result_root_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_root[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_root[0]["details"]["path"], "/extraField");
assert_eq!(errors_root[0]["details"]["schema"], "strict_test");
// Should pass with non-strict schema
let result_root_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_root_extra));
@ -472,6 +552,7 @@ fn test_auto_strict_validation() {
let errors_nested = result_nested_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_nested[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_nested[0]["details"]["path"], "/profile/extraNested");
assert_eq!(errors_nested[0]["details"]["schema"], "strict_test");
// Should pass with non-strict schema
let result_nested_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_nested_extra));
@ -495,6 +576,7 @@ fn test_auto_strict_validation() {
let errors_deep = result_deep_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_deep[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_deep[0]["details"]["path"], "/profile/preferences/extraDeep");
assert_eq!(errors_deep[0]["details"]["schema"], "strict_test");
// Should pass with non-strict schema
let result_deep_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_deep_extra));
@ -514,6 +596,7 @@ fn test_auto_strict_validation() {
let errors_array = result_array_strict.0["errors"].as_array().unwrap();
assert_eq!(errors_array[0]["code"], "FALSE_SCHEMA");
assert_eq!(errors_array[0]["details"]["path"], "/tags/0/extraInArray");
assert_eq!(errors_array[0]["details"]["schema"], "strict_test");
// Should pass with non-strict schema
let result_array_non_strict = validate_json_schema(schema_id_non_strict, jsonb(invalid_array_item_extra));