validation error fixes
This commit is contained in:
10
.editorconfig
Normal file
10
.editorconfig
Normal file
@ -0,0 +1,10 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
|
||||
[*.{json,toml,control,rs}]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
32
flow
32
flow
@ -80,11 +80,37 @@ install() {
|
||||
# Run the pgrx install command
|
||||
# It implicitly uses --release unless --debug is passed
|
||||
# It finds pg_config or you can add flags like --pg-config if needed
|
||||
if ! cargo pgrx install "$@"; then # Pass any extra args like --debug
|
||||
if ! cargo pgrx install; then
|
||||
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2
|
||||
return 1
|
||||
fi
|
||||
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}"
|
||||
|
||||
# Post-install modification to allow non-superuser usage
|
||||
# Get the installation path dynamically using pg_config
|
||||
local pg_sharedir
|
||||
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
|
||||
if [ -z "$pg_sharedir" ]; then
|
||||
echo -e "❌ ${RED}Failed to determine PostgreSQL shared directory using pg_config.${RESET}" >&2
|
||||
return 1
|
||||
fi
|
||||
local installed_control_path="${pg_sharedir}/extension/jspg.control"
|
||||
|
||||
# Modify the control file
|
||||
if [ ! -f "$installed_control_path" ]; then
|
||||
echo -e "❌ ${RED}Installed control file not found: '$installed_control_path'${RESET}" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -e "🔧 ${CYAN}Modifying control file for non-superuser access: ${installed_control_path}${RESET}"
|
||||
# Use sed -i '' for macOS compatibility
|
||||
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
|
||||
echo 'trusted = true' >> "$installed_control_path"; then
|
||||
echo -e "✨ ${GREEN}Control file modified successfully.${RESET}"
|
||||
else
|
||||
echo -e "❌ ${RED}Failed to modify control file: ${installed_control_path}${RESET}" >&2
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
test() {
|
||||
@ -115,8 +141,8 @@ jspg-flow() {
|
||||
env) env; return 0;;
|
||||
prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;;
|
||||
build) build; return 0;;
|
||||
install) base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;;
|
||||
reinstall) base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;;
|
||||
install) install; return 0;;
|
||||
reinstall) clean; install; return 0;;
|
||||
test) test; return 0;;
|
||||
package) env; package; return 0;;
|
||||
release) env; release; return 0;;
|
||||
|
||||
157
src/lib.rs
157
src/lib.rs
@ -4,7 +4,6 @@ use serde_json::json;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::RwLock;
|
||||
use lazy_static::lazy_static;
|
||||
use jsonschema;
|
||||
|
||||
pg_module_magic!();
|
||||
|
||||
@ -15,19 +14,27 @@ lazy_static! {
|
||||
|
||||
// Cache a schema explicitly with a provided ID
|
||||
#[pg_extern(immutable, strict, parallel_safe)]
|
||||
fn cache_schema(schema_id: &str, schema: JsonB) -> bool {
|
||||
fn cache_schema(schema_id: &str, schema: JsonB) -> JsonB {
|
||||
let schema_value = schema.0;
|
||||
|
||||
// Compile the schema using the builder pattern
|
||||
match jsonschema::options()
|
||||
.with_draft(Draft::Draft7)
|
||||
.should_validate_formats(true)
|
||||
.build(&schema.0)
|
||||
.build(&schema_value)
|
||||
{
|
||||
Ok(compiled) => {
|
||||
SCHEMA_CACHE.write().unwrap().insert(schema_id.to_string(), compiled);
|
||||
true
|
||||
},
|
||||
Ok(compiled_schema) => {
|
||||
// If compilation succeeds, add it to the cache
|
||||
let mut cache = SCHEMA_CACHE.write().unwrap();
|
||||
cache.insert(schema_id.to_string(), compiled_schema);
|
||||
JsonB(json!({ "success": true, "id": schema_id }))
|
||||
}
|
||||
Err(e) => {
|
||||
notice!("Failed to cache schema '{}': {}", schema_id, e);
|
||||
false
|
||||
// If compilation fails, return an error
|
||||
JsonB(json!({
|
||||
"success": false,
|
||||
"error": format!("Failed to compile schema '{}': {}", schema_id, e)
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -45,25 +52,62 @@ fn validate_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
||||
let compiled_schema: &Validator = match cache.get(schema_id) {
|
||||
Some(schema) => schema,
|
||||
None => {
|
||||
// Return the 'schema not cached' error in the standard object format
|
||||
let error_msg = format!("Schema ID '{}' not cached", schema_id);
|
||||
return JsonB(json!({
|
||||
"valid": false,
|
||||
"errors": [format!("Schema ID '{}' not cached", schema_id)]
|
||||
"errors": [json!({
|
||||
"kind": "SchemaNotFound", // Custom kind for this case
|
||||
"error": error_msg
|
||||
})]
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
if compiled_schema.is_valid(&instance.0) {
|
||||
let instance_value = instance.0;
|
||||
// Use iter_errors() to get all validation errors
|
||||
let errors_iterator = compiled_schema.iter_errors(&instance_value);
|
||||
|
||||
// Collect errors into a vector first to check if any exist
|
||||
let collected_errors_result: Vec<_> = errors_iterator.collect();
|
||||
|
||||
if collected_errors_result.is_empty() {
|
||||
// No errors found, validation passed
|
||||
JsonB(json!({ "valid": true }))
|
||||
} else {
|
||||
let errors: Vec<String> = compiled_schema
|
||||
.iter_errors(&instance.0)
|
||||
.map(|e| e.to_string())
|
||||
.collect();
|
||||
|
||||
JsonB(json!({ "valid": false, "errors": errors }))
|
||||
// Errors found, format them
|
||||
let error_details = collect_all_errors(collected_errors_result.into_iter());
|
||||
JsonB(json!({
|
||||
"valid": false,
|
||||
"errors": error_details
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
fn format_validation_error(error: &jsonschema::ValidationError) -> serde_json::Value {
|
||||
json!({
|
||||
"instance_path": error.instance_path.to_string(),
|
||||
"schema_path": error.schema_path.to_string(),
|
||||
"kind": format!("{:?}", error.kind),
|
||||
"error": error.to_string()
|
||||
})
|
||||
}
|
||||
|
||||
// Simplified: Collects all validation errors by formatting each one.
|
||||
// Assumes the iterator provided by iter_errors() gives all necessary detail.
|
||||
fn collect_all_errors<'a>(
|
||||
errors: impl Iterator<Item = jsonschema::ValidationError<'a>>,
|
||||
) -> Vec<serde_json::Value> {
|
||||
errors.map(|e| format_validation_error(&e)).collect()
|
||||
}
|
||||
|
||||
// Show the IDs of all schemas currently in the cache
|
||||
#[pg_extern(immutable, parallel_safe)]
|
||||
fn show_schema_cache() -> Vec<String> {
|
||||
let cache = SCHEMA_CACHE.read().unwrap();
|
||||
cache.keys().cloned().collect()
|
||||
}
|
||||
|
||||
// Clear the entire schema cache explicitly
|
||||
#[pg_extern(immutable, parallel_safe)]
|
||||
fn clear_schema_cache() -> bool {
|
||||
@ -74,12 +118,22 @@ fn clear_schema_cache() -> bool {
|
||||
#[pg_schema]
|
||||
#[cfg(any(test, feature = "pg_test"))]
|
||||
mod tests {
|
||||
use pgrx::*;
|
||||
use pgrx::prelude::*;
|
||||
use serde_json::json;
|
||||
use pgrx::JsonB; // Import JsonB specifically for tests
|
||||
|
||||
// Helper to clear cache before tests that need it
|
||||
fn setup_test() {
|
||||
crate::clear_schema_cache();
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_cache_and_validate_schema() {
|
||||
assert!(crate::cache_schema("test_schema", JsonB(json!({ "type": "object" }))));
|
||||
setup_test();
|
||||
assert!(crate::cache_schema(
|
||||
"test_schema",
|
||||
JsonB(json!({ "type": "object" }))
|
||||
).0["success"] == json!(true));
|
||||
assert!(crate::schema_cached("test_schema"));
|
||||
|
||||
let result_valid = crate::validate_schema("test_schema", JsonB(json!({ "foo": "bar" })));
|
||||
@ -87,18 +141,20 @@ mod tests {
|
||||
|
||||
let result_invalid = crate::validate_schema("test_schema", JsonB(json!(42)));
|
||||
assert_eq!(result_invalid.0["valid"], false);
|
||||
assert!(result_invalid.0["errors"][0].as_str().unwrap().contains("not of type"));
|
||||
assert!(result_invalid.0["errors"][0]["error"].as_str().unwrap().contains("is not of type \"object\""));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_schema_not_cached() {
|
||||
setup_test();
|
||||
let result = crate::validate_schema("unknown_schema", JsonB(json!({})));
|
||||
assert_eq!(result.0["valid"], false);
|
||||
assert!(result.0["errors"][0].as_str().unwrap().contains("not cached"));
|
||||
assert!(result.0["errors"][0]["error"].as_str().unwrap().contains("not cached"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_clear_schema_cache() {
|
||||
setup_test();
|
||||
crate::cache_schema("clear_test", JsonB(json!({ "type": "object" })));
|
||||
assert!(crate::schema_cached("clear_test"));
|
||||
|
||||
@ -108,10 +164,65 @@ mod tests {
|
||||
|
||||
#[pg_test]
|
||||
fn test_invalid_schema_cache() {
|
||||
let result = crate::cache_schema("bad_schema", JsonB(json!({ "type": "unknown_type" })));
|
||||
assert!(!result);
|
||||
setup_test();
|
||||
// Attempt to cache an invalid schema definition
|
||||
let result = crate::cache_schema(
|
||||
"bad_schema",
|
||||
JsonB(json!({ "type": "unknown_type" }))
|
||||
);
|
||||
assert!(result.0["success"] == json!(false), "Caching an invalid schema should fail");
|
||||
assert!(!crate::schema_cached("bad_schema"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_show_schema_cache() {
|
||||
setup_test();
|
||||
assert!(crate::cache_schema("schema1", JsonB(json!({ "type": "string" }))).0["success"] == json!(true));
|
||||
assert!(crate::cache_schema("schema2", JsonB(json!({ "type": "number" }))).0["success"] == json!(true));
|
||||
|
||||
let mut cached_ids = crate::show_schema_cache();
|
||||
cached_ids.sort(); // Sort for deterministic comparison
|
||||
|
||||
assert_eq!(cached_ids.len(), 2);
|
||||
assert_eq!(cached_ids, vec!["schema1", "schema2"]);
|
||||
|
||||
crate::clear_schema_cache();
|
||||
let empty_ids = crate::show_schema_cache();
|
||||
assert!(empty_ids.is_empty());
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_detailed_validation_errors() {
|
||||
setup_test();
|
||||
let schema_id = "required_prop_schema";
|
||||
let schema = JsonB(json!({
|
||||
"title": "Test Required",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"age": { "type": "integer" }
|
||||
},
|
||||
"required": ["name"]
|
||||
}));
|
||||
|
||||
assert!(crate::cache_schema(schema_id, schema).0["success"] == json!(true));
|
||||
|
||||
// Instance missing the required 'name' property
|
||||
let invalid_instance = JsonB(json!({ "age": 30 }));
|
||||
let result = crate::validate_schema(schema_id, invalid_instance);
|
||||
|
||||
assert_eq!(result.0["valid"], false);
|
||||
let errors = result.0["errors"].as_array().expect("Errors should be an array");
|
||||
assert_eq!(errors.len(), 1, "Should have exactly one error");
|
||||
|
||||
let error = &errors[0];
|
||||
eprintln!("Validation Error Details: {}", error);
|
||||
|
||||
assert_eq!(error["instance_path"].as_str().unwrap(), "", "Instance path should be root");
|
||||
assert_eq!(error["schema_path"].as_str().unwrap(), "/required", "Schema path should point to required keyword");
|
||||
assert!(error["kind"].as_str().unwrap().contains("Required"), "Error kind should be Required");
|
||||
assert!(error["error"].as_str().unwrap().contains("is a required property"), "Error message mismatch");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
Reference in New Issue
Block a user