Compare commits

..

12 Commits
1.0.2 ... 1.0.8

Author SHA1 Message Date
67406c0b96 version: 1.0.8 2025-04-14 16:11:49 -04:00
28fff3be11 validation error fixes 2025-04-14 16:11:44 -04:00
70f3d30258 version: 1.0.7 2025-04-14 12:03:07 -04:00
406466454e excluding flows from jspg release 2025-04-14 12:03:01 -04:00
2a9d51fa77 version: 1.0.6 2025-04-14 11:24:22 -04:00
ae90137308 updated flows 2025-04-14 11:24:18 -04:00
d22a8669ef version: 1.0.5 2025-04-14 11:19:38 -04:00
b32c17a4f5 updated flow 2025-04-14 11:19:28 -04:00
79cce357e2 version: 1.0.4 2025-04-13 23:03:58 -04:00
512fa28b91 failed commit 2025-04-13 23:03:53 -04:00
a36120459b version: 1.0.3 2025-04-13 22:58:52 -04:00
19734a5b0d failed commit 2025-04-13 22:58:47 -04:00
5 changed files with 236 additions and 89 deletions

10
.editorconfig Normal file
View File

@ -0,0 +1,10 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
[*.{json,toml,control,rs}]
charset = utf-8
indent_style = space
indent_size = 2

48
flow
View File

@ -11,7 +11,6 @@ POSTGRES_VERSION="17"
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
DEPENDENCIES=(cargo git icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
PACKAGE_NAME="jspg"
GITEA_ORGANIZATION="cellular"
GITEA_REPOSITORY="jspg"
@ -53,7 +52,7 @@ build() {
local version
version=$(get-version) || return 1
local package_dir="./package"
local tarball_name="${GITEA_REPOSITORY}-src-v${version}.tar.gz"
local tarball_name="${GITEA_REPOSITORY}.tar.gz"
local tarball_path="${package_dir}/${tarball_name}"
echo -e "📦 Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
@ -64,7 +63,7 @@ build() {
# Create the source tarball excluding specified patterns
echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}"
if tar --exclude='.git*' --exclude='./target' --exclude='./package' -czf "${tarball_path}" .; then
if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}"
else
echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2
@ -77,15 +76,41 @@ install() {
version=$(get-version) || return 1
echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}"
# Run the pgrx install command
# It implicitly uses --release unless --debug is passed
# It finds pg_config or you can add flags like --pg-config if needed
if ! cargo pgrx install "$@"; then # Pass any extra args like --debug
if ! cargo pgrx install; then
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2
return 1
fi
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}"
# Post-install modification to allow non-superuser usage
# Get the installation path dynamically using pg_config
local pg_sharedir
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
if [ -z "$pg_sharedir" ]; then
echo -e "❌ ${RED}Failed to determine PostgreSQL shared directory using pg_config.${RESET}" >&2
return 1
fi
local installed_control_path="${pg_sharedir}/extension/jspg.control"
# Modify the control file
if [ ! -f "$installed_control_path" ]; then
echo -e "❌ ${RED}Installed control file not found: '$installed_control_path'${RESET}" >&2
return 1
fi
echo -e "🔧 ${CYAN}Modifying control file for non-superuser access: ${installed_control_path}${RESET}"
# Use sed -i '' for macOS compatibility
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
echo 'trusted = true' >> "$installed_control_path"; then
echo -e "✨ ${GREEN}Control file modified successfully.${RESET}"
else
echo -e "❌ ${RED}Failed to modify control file: ${installed_control_path}${RESET}" >&2
return 1
fi
}
test() {
@ -113,14 +138,15 @@ jspg-usage() {
jspg-flow() {
case "$1" in
prepare) env; base prepare; cargo-prepare; pgrx-prepare; return 0;;
build) env; build; return 0;;
install) env; base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;;
reinstall) env; base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;;
test) env; test; return 0;;
env) env; return 0;;
prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;;
build) build; return 0;;
install) install; return 0;;
reinstall) clean; install; return 0;;
test) test; return 0;;
package) env; package; return 0;;
release) env; release; return 0;;
clean) env; clean; return 0;;
clean) clean; return 0;;
*) return 1 ;;
esac
}

2
flows

Submodule flows updated: 2487aa6a25...db55335254

View File

@ -4,123 +4,234 @@ use serde_json::json;
use std::collections::HashMap;
use std::sync::RwLock;
use lazy_static::lazy_static;
use jsonschema;
pg_module_magic!();
// Global, thread-safe schema cache using the correct Validator type
lazy_static! {
static ref SCHEMA_CACHE: RwLock<HashMap<String, Validator>> = RwLock::new(HashMap::new());
static ref SCHEMA_CACHE: RwLock<HashMap<String, Validator>> = RwLock::new(HashMap::new());
}
// Cache a schema explicitly with a provided ID
#[pg_extern(immutable, strict, parallel_safe)]
fn cache_schema(schema_id: &str, schema: JsonB) -> bool {
match jsonschema::options()
.with_draft(Draft::Draft7)
.should_validate_formats(true)
.build(&schema.0)
{
Ok(compiled) => {
SCHEMA_CACHE.write().unwrap().insert(schema_id.to_string(), compiled);
true
},
Err(e) => {
notice!("Failed to cache schema '{}': {}", schema_id, e);
false
}
fn cache_schema(schema_id: &str, schema: JsonB) -> JsonB {
let schema_value = schema.0;
// Compile the schema using the builder pattern
match jsonschema::options()
.with_draft(Draft::Draft7)
.should_validate_formats(true)
.build(&schema_value)
{
Ok(compiled_schema) => {
// If compilation succeeds, add it to the cache
let mut cache = SCHEMA_CACHE.write().unwrap();
cache.insert(schema_id.to_string(), compiled_schema);
JsonB(json!({ "success": true, "id": schema_id }))
}
Err(e) => {
// If compilation fails, return an error
JsonB(json!({
"success": false,
"error": format!("Failed to compile schema '{}': {}", schema_id, e)
}))
}
}
}
// Check if a schema is cached
#[pg_extern(immutable, strict, parallel_safe)]
fn schema_cached(schema_id: &str) -> bool {
SCHEMA_CACHE.read().unwrap().contains_key(schema_id)
SCHEMA_CACHE.read().unwrap().contains_key(schema_id)
}
// Validate JSONB instance against a cached schema by ID
#[pg_extern(immutable, strict, parallel_safe)]
fn validate_schema(schema_id: &str, instance: JsonB) -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
let compiled_schema: &Validator = match cache.get(schema_id) {
Some(schema) => schema,
None => {
return JsonB(json!({
"valid": false,
"errors": [format!("Schema ID '{}' not cached", schema_id)]
}));
}
};
if compiled_schema.is_valid(&instance.0) {
JsonB(json!({ "valid": true }))
} else {
let errors: Vec<String> = compiled_schema
.iter_errors(&instance.0)
.map(|e| e.to_string())
.collect();
JsonB(json!({ "valid": false, "errors": errors }))
let cache = SCHEMA_CACHE.read().unwrap();
let compiled_schema: &Validator = match cache.get(schema_id) {
Some(schema) => schema,
None => {
// Return the 'schema not cached' error in the standard object format
let error_msg = format!("Schema ID '{}' not cached", schema_id);
return JsonB(json!({
"valid": false,
"errors": [json!({
"kind": "SchemaNotFound", // Custom kind for this case
"error": error_msg
})]
}));
}
};
let instance_value = instance.0;
// Use iter_errors() to get all validation errors
let errors_iterator = compiled_schema.iter_errors(&instance_value);
// Collect errors into a vector first to check if any exist
let collected_errors_result: Vec<_> = errors_iterator.collect();
if collected_errors_result.is_empty() {
// No errors found, validation passed
JsonB(json!({ "valid": true }))
} else {
// Errors found, format them
let error_details = collect_all_errors(collected_errors_result.into_iter());
JsonB(json!({
"valid": false,
"errors": error_details
}))
}
}
fn format_validation_error(error: &jsonschema::ValidationError) -> serde_json::Value {
json!({
"instance_path": error.instance_path.to_string(),
"schema_path": error.schema_path.to_string(),
"kind": format!("{:?}", error.kind),
"error": error.to_string()
})
}
// Simplified: Collects all validation errors by formatting each one.
// Assumes the iterator provided by iter_errors() gives all necessary detail.
fn collect_all_errors<'a>(
errors: impl Iterator<Item = jsonschema::ValidationError<'a>>,
) -> Vec<serde_json::Value> {
errors.map(|e| format_validation_error(&e)).collect()
}
// Show the IDs of all schemas currently in the cache
#[pg_extern(immutable, parallel_safe)]
fn show_schema_cache() -> Vec<String> {
let cache = SCHEMA_CACHE.read().unwrap();
cache.keys().cloned().collect()
}
// Clear the entire schema cache explicitly
#[pg_extern(immutable, parallel_safe)]
fn clear_schema_cache() -> bool {
SCHEMA_CACHE.write().unwrap().clear();
true
SCHEMA_CACHE.write().unwrap().clear();
true
}
#[pg_schema]
#[cfg(any(test, feature = "pg_test"))]
mod tests {
use pgrx::*;
use serde_json::json;
use pgrx::prelude::*;
use serde_json::json;
use pgrx::JsonB; // Import JsonB specifically for tests
#[pg_test]
fn test_cache_and_validate_schema() {
assert!(crate::cache_schema("test_schema", JsonB(json!({ "type": "object" }))));
assert!(crate::schema_cached("test_schema"));
// Helper to clear cache before tests that need it
fn setup_test() {
crate::clear_schema_cache();
}
let result_valid = crate::validate_schema("test_schema", JsonB(json!({ "foo": "bar" })));
assert_eq!(result_valid.0["valid"], true);
#[pg_test]
fn test_cache_and_validate_schema() {
setup_test();
assert!(crate::cache_schema(
"test_schema",
JsonB(json!({ "type": "object" }))
).0["success"] == json!(true));
assert!(crate::schema_cached("test_schema"));
let result_invalid = crate::validate_schema("test_schema", JsonB(json!(42)));
assert_eq!(result_invalid.0["valid"], false);
assert!(result_invalid.0["errors"][0].as_str().unwrap().contains("not of type"));
}
let result_valid = crate::validate_schema("test_schema", JsonB(json!({ "foo": "bar" })));
assert_eq!(result_valid.0["valid"], true);
#[pg_test]
fn test_schema_not_cached() {
let result = crate::validate_schema("unknown_schema", JsonB(json!({})));
assert_eq!(result.0["valid"], false);
assert!(result.0["errors"][0].as_str().unwrap().contains("not cached"));
}
let result_invalid = crate::validate_schema("test_schema", JsonB(json!(42)));
assert_eq!(result_invalid.0["valid"], false);
assert!(result_invalid.0["errors"][0]["error"].as_str().unwrap().contains("is not of type \"object\""));
}
#[pg_test]
fn test_clear_schema_cache() {
crate::cache_schema("clear_test", JsonB(json!({ "type": "object" })));
assert!(crate::schema_cached("clear_test"));
#[pg_test]
fn test_schema_not_cached() {
setup_test();
let result = crate::validate_schema("unknown_schema", JsonB(json!({})));
assert_eq!(result.0["valid"], false);
assert!(result.0["errors"][0]["error"].as_str().unwrap().contains("not cached"));
}
crate::clear_schema_cache();
assert!(!crate::schema_cached("clear_test"));
}
#[pg_test]
fn test_clear_schema_cache() {
setup_test();
crate::cache_schema("clear_test", JsonB(json!({ "type": "object" })));
assert!(crate::schema_cached("clear_test"));
#[pg_test]
fn test_invalid_schema_cache() {
let result = crate::cache_schema("bad_schema", JsonB(json!({ "type": "unknown_type" })));
assert!(!result);
assert!(!crate::schema_cached("bad_schema"));
}
crate::clear_schema_cache();
assert!(!crate::schema_cached("clear_test"));
}
#[pg_test]
fn test_invalid_schema_cache() {
setup_test();
// Attempt to cache an invalid schema definition
let result = crate::cache_schema(
"bad_schema",
JsonB(json!({ "type": "unknown_type" }))
);
assert!(result.0["success"] == json!(false), "Caching an invalid schema should fail");
assert!(!crate::schema_cached("bad_schema"));
}
#[pg_test]
fn test_show_schema_cache() {
setup_test();
assert!(crate::cache_schema("schema1", JsonB(json!({ "type": "string" }))).0["success"] == json!(true));
assert!(crate::cache_schema("schema2", JsonB(json!({ "type": "number" }))).0["success"] == json!(true));
let mut cached_ids = crate::show_schema_cache();
cached_ids.sort(); // Sort for deterministic comparison
assert_eq!(cached_ids.len(), 2);
assert_eq!(cached_ids, vec!["schema1", "schema2"]);
crate::clear_schema_cache();
let empty_ids = crate::show_schema_cache();
assert!(empty_ids.is_empty());
}
#[pg_test]
fn test_detailed_validation_errors() {
setup_test();
let schema_id = "required_prop_schema";
let schema = JsonB(json!({
"title": "Test Required",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer" }
},
"required": ["name"]
}));
assert!(crate::cache_schema(schema_id, schema).0["success"] == json!(true));
// Instance missing the required 'name' property
let invalid_instance = JsonB(json!({ "age": 30 }));
let result = crate::validate_schema(schema_id, invalid_instance);
assert_eq!(result.0["valid"], false);
let errors = result.0["errors"].as_array().expect("Errors should be an array");
assert_eq!(errors.len(), 1, "Should have exactly one error");
let error = &errors[0];
eprintln!("Validation Error Details: {}", error);
assert_eq!(error["instance_path"].as_str().unwrap(), "", "Instance path should be root");
assert_eq!(error["schema_path"].as_str().unwrap(), "/required", "Schema path should point to required keyword");
assert!(error["kind"].as_str().unwrap().contains("Required"), "Error kind should be Required");
assert!(error["error"].as_str().unwrap().contains("is a required property"), "Error message mismatch");
}
}
#[cfg(test)]
pub mod pg_test {
pub fn setup(_options: Vec<&str>) {
// Initialization if needed
}
pub fn setup(_options: Vec<&str>) {
// Initialization if needed
}
pub fn postgresql_conf_options() -> Vec<&'static str> {
vec![]
}
pub fn postgresql_conf_options() -> Vec<&'static str> {
vec![]
}
}

View File

@ -1 +1 @@
1.0.2
1.0.8