diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..394e35b --- /dev/null +++ b/.editorconfig @@ -0,0 +1,10 @@ +root = true + +[*] +end_of_line = lf +insert_final_newline = true + +[*.{json,toml,control,rs}] +charset = utf-8 +indent_style = space +indent_size = 2 diff --git a/flow b/flow index 2a23bde..50e37e7 100755 --- a/flow +++ b/flow @@ -76,15 +76,41 @@ install() { version=$(get-version) || return 1 echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}" - + # Run the pgrx install command # It implicitly uses --release unless --debug is passed # It finds pg_config or you can add flags like --pg-config if needed - if ! cargo pgrx install "$@"; then # Pass any extra args like --debug + if ! cargo pgrx install; then echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2 return 1 fi echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}" + + # Post-install modification to allow non-superuser usage + # Get the installation path dynamically using pg_config + local pg_sharedir + pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir) + if [ -z "$pg_sharedir" ]; then + echo -e "❌ ${RED}Failed to determine PostgreSQL shared directory using pg_config.${RESET}" >&2 + return 1 + fi + local installed_control_path="${pg_sharedir}/extension/jspg.control" + + # Modify the control file + if [ ! -f "$installed_control_path" ]; then + echo -e "❌ ${RED}Installed control file not found: '$installed_control_path'${RESET}" >&2 + return 1 + fi + + echo -e "🔧 ${CYAN}Modifying control file for non-superuser access: ${installed_control_path}${RESET}" + # Use sed -i '' for macOS compatibility + if sed -i '' '/^superuser = false/d' "$installed_control_path" && \ + echo 'trusted = true' >> "$installed_control_path"; then + echo -e "✨ ${GREEN}Control file modified successfully.${RESET}" + else + echo -e "❌ ${RED}Failed to modify control file: ${installed_control_path}${RESET}" >&2 + return 1 + fi } test() { @@ -115,8 +141,8 @@ jspg-flow() { env) env; return 0;; prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;; build) build; return 0;; - install) base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;; - reinstall) base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;; + install) install; return 0;; + reinstall) clean; install; return 0;; test) test; return 0;; package) env; package; return 0;; release) env; release; return 0;; diff --git a/src/lib.rs b/src/lib.rs index 47c6197..5faf72e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -4,123 +4,234 @@ use serde_json::json; use std::collections::HashMap; use std::sync::RwLock; use lazy_static::lazy_static; -use jsonschema; pg_module_magic!(); // Global, thread-safe schema cache using the correct Validator type lazy_static! { - static ref SCHEMA_CACHE: RwLock> = RwLock::new(HashMap::new()); + static ref SCHEMA_CACHE: RwLock> = RwLock::new(HashMap::new()); } // Cache a schema explicitly with a provided ID #[pg_extern(immutable, strict, parallel_safe)] -fn cache_schema(schema_id: &str, schema: JsonB) -> bool { - match jsonschema::options() - .with_draft(Draft::Draft7) - .should_validate_formats(true) - .build(&schema.0) - { - Ok(compiled) => { - SCHEMA_CACHE.write().unwrap().insert(schema_id.to_string(), compiled); - true - }, - Err(e) => { - notice!("Failed to cache schema '{}': {}", schema_id, e); - false - } +fn cache_schema(schema_id: &str, schema: JsonB) -> JsonB { + let schema_value = schema.0; + + // Compile the schema using the builder pattern + match jsonschema::options() + .with_draft(Draft::Draft7) + .should_validate_formats(true) + .build(&schema_value) + { + Ok(compiled_schema) => { + // If compilation succeeds, add it to the cache + let mut cache = SCHEMA_CACHE.write().unwrap(); + cache.insert(schema_id.to_string(), compiled_schema); + JsonB(json!({ "success": true, "id": schema_id })) } + Err(e) => { + // If compilation fails, return an error + JsonB(json!({ + "success": false, + "error": format!("Failed to compile schema '{}': {}", schema_id, e) + })) + } + } } // Check if a schema is cached #[pg_extern(immutable, strict, parallel_safe)] fn schema_cached(schema_id: &str) -> bool { - SCHEMA_CACHE.read().unwrap().contains_key(schema_id) + SCHEMA_CACHE.read().unwrap().contains_key(schema_id) } // Validate JSONB instance against a cached schema by ID #[pg_extern(immutable, strict, parallel_safe)] fn validate_schema(schema_id: &str, instance: JsonB) -> JsonB { - let cache = SCHEMA_CACHE.read().unwrap(); - let compiled_schema: &Validator = match cache.get(schema_id) { - Some(schema) => schema, - None => { - return JsonB(json!({ - "valid": false, - "errors": [format!("Schema ID '{}' not cached", schema_id)] - })); - } - }; - - if compiled_schema.is_valid(&instance.0) { - JsonB(json!({ "valid": true })) - } else { - let errors: Vec = compiled_schema - .iter_errors(&instance.0) - .map(|e| e.to_string()) - .collect(); - - JsonB(json!({ "valid": false, "errors": errors })) + let cache = SCHEMA_CACHE.read().unwrap(); + let compiled_schema: &Validator = match cache.get(schema_id) { + Some(schema) => schema, + None => { + // Return the 'schema not cached' error in the standard object format + let error_msg = format!("Schema ID '{}' not cached", schema_id); + return JsonB(json!({ + "valid": false, + "errors": [json!({ + "kind": "SchemaNotFound", // Custom kind for this case + "error": error_msg + })] + })); } + }; + + let instance_value = instance.0; + // Use iter_errors() to get all validation errors + let errors_iterator = compiled_schema.iter_errors(&instance_value); + + // Collect errors into a vector first to check if any exist + let collected_errors_result: Vec<_> = errors_iterator.collect(); + + if collected_errors_result.is_empty() { + // No errors found, validation passed + JsonB(json!({ "valid": true })) + } else { + // Errors found, format them + let error_details = collect_all_errors(collected_errors_result.into_iter()); + JsonB(json!({ + "valid": false, + "errors": error_details + })) + } +} + +fn format_validation_error(error: &jsonschema::ValidationError) -> serde_json::Value { + json!({ + "instance_path": error.instance_path.to_string(), + "schema_path": error.schema_path.to_string(), + "kind": format!("{:?}", error.kind), + "error": error.to_string() + }) +} + +// Simplified: Collects all validation errors by formatting each one. +// Assumes the iterator provided by iter_errors() gives all necessary detail. +fn collect_all_errors<'a>( + errors: impl Iterator>, +) -> Vec { + errors.map(|e| format_validation_error(&e)).collect() +} + +// Show the IDs of all schemas currently in the cache +#[pg_extern(immutable, parallel_safe)] +fn show_schema_cache() -> Vec { + let cache = SCHEMA_CACHE.read().unwrap(); + cache.keys().cloned().collect() } // Clear the entire schema cache explicitly #[pg_extern(immutable, parallel_safe)] fn clear_schema_cache() -> bool { - SCHEMA_CACHE.write().unwrap().clear(); - true + SCHEMA_CACHE.write().unwrap().clear(); + true } #[pg_schema] #[cfg(any(test, feature = "pg_test"))] mod tests { - use pgrx::*; - use serde_json::json; + use pgrx::prelude::*; + use serde_json::json; + use pgrx::JsonB; // Import JsonB specifically for tests - #[pg_test] - fn test_cache_and_validate_schema() { - assert!(crate::cache_schema("test_schema", JsonB(json!({ "type": "object" })))); - assert!(crate::schema_cached("test_schema")); + // Helper to clear cache before tests that need it + fn setup_test() { + crate::clear_schema_cache(); + } - let result_valid = crate::validate_schema("test_schema", JsonB(json!({ "foo": "bar" }))); - assert_eq!(result_valid.0["valid"], true); + #[pg_test] + fn test_cache_and_validate_schema() { + setup_test(); + assert!(crate::cache_schema( + "test_schema", + JsonB(json!({ "type": "object" })) + ).0["success"] == json!(true)); + assert!(crate::schema_cached("test_schema")); - let result_invalid = crate::validate_schema("test_schema", JsonB(json!(42))); - assert_eq!(result_invalid.0["valid"], false); - assert!(result_invalid.0["errors"][0].as_str().unwrap().contains("not of type")); - } + let result_valid = crate::validate_schema("test_schema", JsonB(json!({ "foo": "bar" }))); + assert_eq!(result_valid.0["valid"], true); - #[pg_test] - fn test_schema_not_cached() { - let result = crate::validate_schema("unknown_schema", JsonB(json!({}))); - assert_eq!(result.0["valid"], false); - assert!(result.0["errors"][0].as_str().unwrap().contains("not cached")); - } + let result_invalid = crate::validate_schema("test_schema", JsonB(json!(42))); + assert_eq!(result_invalid.0["valid"], false); + assert!(result_invalid.0["errors"][0]["error"].as_str().unwrap().contains("is not of type \"object\"")); + } - #[pg_test] - fn test_clear_schema_cache() { - crate::cache_schema("clear_test", JsonB(json!({ "type": "object" }))); - assert!(crate::schema_cached("clear_test")); + #[pg_test] + fn test_schema_not_cached() { + setup_test(); + let result = crate::validate_schema("unknown_schema", JsonB(json!({}))); + assert_eq!(result.0["valid"], false); + assert!(result.0["errors"][0]["error"].as_str().unwrap().contains("not cached")); + } - crate::clear_schema_cache(); - assert!(!crate::schema_cached("clear_test")); - } + #[pg_test] + fn test_clear_schema_cache() { + setup_test(); + crate::cache_schema("clear_test", JsonB(json!({ "type": "object" }))); + assert!(crate::schema_cached("clear_test")); - #[pg_test] - fn test_invalid_schema_cache() { - let result = crate::cache_schema("bad_schema", JsonB(json!({ "type": "unknown_type" }))); - assert!(!result); - assert!(!crate::schema_cached("bad_schema")); - } + crate::clear_schema_cache(); + assert!(!crate::schema_cached("clear_test")); + } + + #[pg_test] + fn test_invalid_schema_cache() { + setup_test(); + // Attempt to cache an invalid schema definition + let result = crate::cache_schema( + "bad_schema", + JsonB(json!({ "type": "unknown_type" })) + ); + assert!(result.0["success"] == json!(false), "Caching an invalid schema should fail"); + assert!(!crate::schema_cached("bad_schema")); + } + + #[pg_test] + fn test_show_schema_cache() { + setup_test(); + assert!(crate::cache_schema("schema1", JsonB(json!({ "type": "string" }))).0["success"] == json!(true)); + assert!(crate::cache_schema("schema2", JsonB(json!({ "type": "number" }))).0["success"] == json!(true)); + + let mut cached_ids = crate::show_schema_cache(); + cached_ids.sort(); // Sort for deterministic comparison + + assert_eq!(cached_ids.len(), 2); + assert_eq!(cached_ids, vec!["schema1", "schema2"]); + + crate::clear_schema_cache(); + let empty_ids = crate::show_schema_cache(); + assert!(empty_ids.is_empty()); + } + + #[pg_test] + fn test_detailed_validation_errors() { + setup_test(); + let schema_id = "required_prop_schema"; + let schema = JsonB(json!({ + "title": "Test Required", + "type": "object", + "properties": { + "name": { "type": "string" }, + "age": { "type": "integer" } + }, + "required": ["name"] + })); + + assert!(crate::cache_schema(schema_id, schema).0["success"] == json!(true)); + + // Instance missing the required 'name' property + let invalid_instance = JsonB(json!({ "age": 30 })); + let result = crate::validate_schema(schema_id, invalid_instance); + + assert_eq!(result.0["valid"], false); + let errors = result.0["errors"].as_array().expect("Errors should be an array"); + assert_eq!(errors.len(), 1, "Should have exactly one error"); + + let error = &errors[0]; + eprintln!("Validation Error Details: {}", error); + + assert_eq!(error["instance_path"].as_str().unwrap(), "", "Instance path should be root"); + assert_eq!(error["schema_path"].as_str().unwrap(), "/required", "Schema path should point to required keyword"); + assert!(error["kind"].as_str().unwrap().contains("Required"), "Error kind should be Required"); + assert!(error["error"].as_str().unwrap().contains("is a required property"), "Error message mismatch"); + } } #[cfg(test)] pub mod pg_test { - pub fn setup(_options: Vec<&str>) { - // Initialization if needed - } + pub fn setup(_options: Vec<&str>) { + // Initialization if needed + } - pub fn postgresql_conf_options() -> Vec<&'static str> { - vec![] - } + pub fn postgresql_conf_options() -> Vec<&'static str> { + vec![] + } }