Compare commits

...

20 Commits

Author SHA1 Message Date
3b18901bda version: 1.0.20 2025-04-21 17:11:30 -04:00
b8c0e08068 more filtering 2025-04-21 17:11:24 -04:00
c734983a59 version: 1.0.19 2025-04-21 16:15:08 -04:00
9b11f661bc fixed release bug 2025-04-21 16:15:02 -04:00
f3a733626e version: 1.0.18 2025-04-21 16:13:16 -04:00
2bcdb8adbb version: 1.0.17 2025-04-21 16:11:31 -04:00
3988308965 branch error filtering 2025-04-21 16:11:12 -04:00
b7f528d1f6 flow 2025-04-16 21:14:07 -04:00
2febb292dc flow update 2025-04-16 20:00:35 -04:00
d1831a28ec flow update 2025-04-16 19:34:09 -04:00
c5834ac544 flow updated 2025-04-16 18:07:41 -04:00
eb25f8489e version: 1.0.16 2025-04-16 14:43:07 -04:00
21937db8de improved compile schema error messages 2025-04-16 14:42:57 -04:00
28b689cac0 version: 1.0.15 2025-04-16 01:00:57 -04:00
cc04a1a8bb made errors consistent 2025-04-16 01:00:51 -04:00
3ceb8a0770 version: 1.0.14 2025-04-16 00:38:10 -04:00
499bf68b2a more error cleanup 2025-04-16 00:38:04 -04:00
6ca00f27e9 version: 1.0.13 2025-04-15 23:30:57 -04:00
520be66035 better error messaging 2025-04-15 23:30:47 -04:00
c3146ca433 flow update 2025-04-15 01:52:12 -04:00
6 changed files with 580 additions and 386 deletions

4
.env
View File

@ -1,7 +1,7 @@
ENVIRONMENT=local ENVIRONMENT=local
DATABASE_PASSWORD=2HwURf1Za7m5ZKtECAfQJGpni3832RV3 DATABASE_PASSWORD=tIr4TJ0qUwGVM0rlQSe3W7Tgpi33zPbk
DATABASE_ROLE=agreego_admin DATABASE_ROLE=agreego_admin
DATABASE_HOST=127.1.27.10 DATABASE_HOST=127.1.27.4
DATABASE_PORT=5432 DATABASE_PORT=5432
POSTGRES_PASSWORD=xzIq5JT0xY3F+2m1GtnrKDdK29sNSXVVYZHPKJVh8pI= POSTGRES_PASSWORD=xzIq5JT0xY3F+2m1GtnrKDdK29sNSXVVYZHPKJVh8pI=
DATABASE_NAME=agreego DATABASE_NAME=agreego

106
flow
View File

@ -9,7 +9,7 @@ source ./flows/rust
# Vars # Vars
POSTGRES_VERSION="17" POSTGRES_VERSION="17"
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config" POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
DEPENDENCIES=(cargo git icu4c pkg-config "postgresql@${POSTGRES_VERSION}") DEPENDENCIES+=(icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0) CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
GITEA_ORGANIZATION="cellular" GITEA_ORGANIZATION="cellular"
GITEA_REPOSITORY="jspg" GITEA_REPOSITORY="jspg"
@ -20,133 +20,125 @@ env() {
# If not set, try to get it from kubectl # If not set, try to get it from kubectl
GITEA_TOKEN=$(kubectl get secret -n cellular gitea-git -o jsonpath='{.data.token}' | base64 --decode) GITEA_TOKEN=$(kubectl get secret -n cellular gitea-git -o jsonpath='{.data.token}' | base64 --decode)
if [ -z "$GITEA_TOKEN" ]; then if [ -z "$GITEA_TOKEN" ]; then
echo -e "❌ ${RED}GITEA_TOKEN is not set and couldn't be retrieved from kubectl${RESET}" >&2 error "GITEA_TOKEN is not set and couldn't be retrieved from kubectl" >&2
exit 1 return 2
fi fi
export GITEA_TOKEN export GITEA_TOKEN
fi fi
echo -e "💰 ${GREEN}Environment variables set${RESET}" success "Environment variables set"
} }
pgrx-prepare() { pgrx-prepare() {
echo -e "${BLUE}Initializing pgrx...${RESET}" info "Initializing pgrx..."
# Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which' # Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which'
local POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config" local POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
if [ ! -x "$POSTGRES_CONFIG_PATH" ]; then if [ ! -x "$POSTGRES_CONFIG_PATH" ]; then
echo -e "${RED}Error: pg_config not found or not executable at $POSTGRES_CONFIG_PATH.${RESET}" error "pg_config not found or not executable at $POSTGRES_CONFIG_PATH."
echo -e "${YELLOW}Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew.${RESET}" warning "Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew."
exit 1 return 2
fi fi
if cargo pgrx init --pg"$POSTGRES_VERSION"="$POSTGRES_CONFIG_PATH"; then if cargo pgrx init --pg"$POSTGRES_VERSION"="$POSTGRES_CONFIG_PATH"; then
echo -e "${GREEN}pgrx initialized successfully.${RESET}" success "pgrx initialized successfully."
else else
echo -e "${RED}Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid.${RESET}" error "Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid."
exit 1 return 2
fi fi
} }
build() { build() {
local version local version
version=$(get-version) || return 1 version=$(get-version) || return $?
local package_dir="./package" local package_dir="./package"
local tarball_name="${GITEA_REPOSITORY}.tar.gz" local tarball_name="${GITEA_REPOSITORY}.tar.gz"
local tarball_path="${package_dir}/${tarball_name}" local tarball_path="${package_dir}/${tarball_name}"
echo -e "📦 Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..." info "Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
# Clean previous package dir # Clean previous package dir
rm -rf "${package_dir}" rm -rf "${package_dir}"
mkdir -p "${package_dir}" mkdir -p "${package_dir}"
# Create the source tarball excluding specified patterns # Create the source tarball excluding specified patterns
echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}" info "Creating tarball: ${tarball_path}"
if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}" success "Successfully created source tarball: ${tarball_path}"
else else
echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2 error "Failed to create source tarball."
return 1 return 2
fi fi
} }
install() { install() {
local version local version
version=$(get-version) || return 1 version=$(get-version) || return $? # Propagate error
echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}" info "Building and installing PGRX extension v$version into local PostgreSQL..."
# Run the pgrx install command # Run the pgrx install command
# It implicitly uses --release unless --debug is passed
# It finds pg_config or you can add flags like --pg-config if needed
if ! cargo pgrx install; then if ! cargo pgrx install; then
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2 error "cargo pgrx install command failed."
return 1 return 2
fi fi
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}" success "PGRX extension v$version successfully built and installed."
# Post-install modification to allow non-superuser usage # Post-install modification to allow non-superuser usage
# Get the installation path dynamically using pg_config
local pg_sharedir local pg_sharedir
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir) pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
if [ -z "$pg_sharedir" ]; then local pg_config_status=$?
echo -e "❌ ${RED}Failed to determine PostgreSQL shared directory using pg_config.${RESET}" >&2 if [ $pg_config_status -ne 0 ] || [ -z "$pg_sharedir" ]; then
return 1 error "Failed to determine PostgreSQL shared directory using pg_config."
return 2
fi fi
local installed_control_path="${pg_sharedir}/extension/jspg.control" local installed_control_path="${pg_sharedir}/extension/jspg.control"
# Modify the control file # Modify the control file
if [ ! -f "$installed_control_path" ]; then if [ ! -f "$installed_control_path" ]; then
echo -e "❌ ${RED}Installed control file not found: '$installed_control_path'${RESET}" >&2 error "Installed control file not found: '$installed_control_path'"
return 1 return 2
fi fi
echo -e "🔧 ${CYAN}Modifying control file for non-superuser access: ${installed_control_path}${RESET}" info "Modifying control file for non-superuser access: ${installed_control_path}"
# Use sed -i '' for macOS compatibility # Use sed -i '' for macOS compatibility
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \ if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
echo 'trusted = true' >> "$installed_control_path"; then echo 'trusted = true' >> "$installed_control_path"; then
echo -e "✨ ${GREEN}Control file modified successfully.${RESET}" success "Control file modified successfully."
else else
echo -e "❌ ${RED}Failed to modify control file: ${installed_control_path}${RESET}" >&2 error "Failed to modify control file: ${installed_control_path}"
return 1 return 2
fi fi
} }
test() { test() {
echo -e "🧪 ${CYAN}Running jspg tests...${RESET}" info "Running jspg tests..."
cargo pgrx test "pg${POSTGRES_VERSION}" "$@" cargo pgrx test "pg${POSTGRES_VERSION}" "$@" || return $?
} }
clean() { clean() {
echo -e "🧹 ${CYAN}Cleaning build artifacts...${RESET}" info "Cleaning build artifacts..."
cargo clean # Use standard cargo clean cargo clean || return $?
} }
jspg-usage() { jspg-usage() {
echo -e " ${CYAN}JSPG Commands:${RESET}" printf "prepare\tCheck OS, Cargo, and PGRX dependencies.\n"
echo -e " prepare Check OS, Cargo, and PGRX dependencies." printf "install\tBuild and install the extension locally (after prepare).\n"
echo -e " install [opts] Run prepare, then build and install the extension locally." printf "reinstall\tClean, build, and install the extension locally (after prepare).\n"
echo -e " reinstall [opts] Run prepare, clean, then build and install the extension locally." printf "test\t\tRun pgrx integration tests.\n"
echo -e " test [opts] Run pgrx integration tests." printf "clean\t\tRemove pgrx build artifacts.\n"
echo -e " clean Remove pgrx build artifacts."
echo -e " build Build release artifacts into ./package/ (called by release)."
echo -e " tag Tag the current version (called by release)."
echo -e " package Upload artifacts from ./package/ (called by release)."
echo -e " release Perform a full release (increments patch, builds, tags, pushes, packages)."
} }
jspg-flow() { jspg-flow() {
case "$1" in case "$1" in
env) env; return 0;; env) env; return $?;;
prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;; prepare) prepare && cargo-prepare && pgrx-prepare; return $?;;
build) build; return 0;; build) build; return $?;;
install) install; return 0;; install) install; return $?;;
reinstall) clean; install; return 0;; reinstall) clean && install; return $?;;
test) test; return 0;; test) test "${@:2}"; return $?;;
package) env; package; return 0;; clean) clean; return $?;;
release) env; release; return 0;; release) env; release; return $?;;
clean) clean; return 0;;
*) return 1 ;; *) return 1 ;;
esac esac
} }

2
flows

Submodule flows updated: db55335254...3e3954fb79

View File

@ -23,56 +23,53 @@ lazy_static! {
fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB { fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap(); let mut cache = SCHEMA_CACHE.write().unwrap();
let schema_value: Value = schema.0; let schema_value: Value = schema.0;
let schema_path = format!("urn:{}", schema_id);
let mut compiler = Compiler::new(); let mut compiler = Compiler::new();
compiler.enable_format_assertions(); compiler.enable_format_assertions();
if let Err(e) = compiler.add_resource(schema_id, schema_value) { // Use schema_path when adding the resource
if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
return JsonB(json!({ return JsonB(json!({
"success": false, "success": false,
"error": { "error": {
"kind": "SchemaResourceError", "message": format!("Failed to add schema resource '{}': {}", schema_id, e),
"message": format!("Failed to add schema resource: {}", e), "schema_path": schema_path
"schema_id": schema_id
} }
})); }));
} }
match compiler.compile(schema_id, &mut cache.schemas) { // Use schema_path when compiling
match compiler.compile(&schema_path, &mut cache.schemas) {
Ok(sch_index) => { Ok(sch_index) => {
// Store the index using the original schema_id as the key
cache.id_to_index.insert(schema_id.to_string(), sch_index); cache.id_to_index.insert(schema_id.to_string(), sch_index);
JsonB(json!({ "success": true })) JsonB(json!({ "success": true }))
} }
Err(e) => { Err(e) => {
// Enhance error reporting by matching on the CompileError variant let error = match &e {
let error_details = match &e { CompileError::ValidationError { url: _url, src } => {
CompileError::ValidationError { url, src } => { // Collect leaf errors from the meta-schema validation failure
// Metaschema validation failed - provide more detail let mut error_list = Vec::new();
json!({ collect_leaf_errors(src, &mut error_list);
"kind": "SchemaCompilationError", // Filter and deduplicate errors, returning as a single JSON Value (Array)
"sub_kind": "ValidationError", // Explicitly state it's a metaschema validation error json!(filter_boon_errors(error_list))
"message": format!("Schema failed validation against its metaschema: {}", src), }
"schema_id": schema_id, _ => {
"failed_at_url": url, // Keep existing handling for other compilation errors
"validation_details": format!("{:?}", src), // Include full debug info of the validation error let _error_type = format!("{:?}", e).split('(').next().unwrap_or("Unknown").to_string();
}) json!({
} "message": format!("Schema '{}' compilation failed: {}", schema_id, e),
// Handle other potential compilation errors "schema_path": schema_path,
_ => { "detail": format!("{:?}", e),
let error_type = format!("{:?}", e).split('(').next().unwrap_or("Unknown").to_string(); })
json!({ }
"kind": "SchemaCompilationError", };
"sub_kind": error_type, // e.g., "InvalidJsonPointer", "UnsupportedUrlScheme" // Ensure the outer structure remains { success: false, error: ... }
"message": format!("Schema compilation failed: {}", e), JsonB(json!({
"schema_id": schema_id, "success": false,
"details": format!("{:?}", e), // Generic debug info "error": error
}) }))
}
};
JsonB(json!({
"success": false,
"error": error_details
}))
} }
} }
} }
@ -81,11 +78,11 @@ fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB {
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB { fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap(); let cache = SCHEMA_CACHE.read().unwrap();
// Lookup uses the original schema_id
match cache.id_to_index.get(schema_id) { match cache.id_to_index.get(schema_id) {
None => JsonB(json!({ None => JsonB(json!({
"success": false, "success": false,
"error": { "error": {
"kind": "SchemaNotFound",
"message": format!("Schema with id '{}' not found in cache", schema_id) "message": format!("Schema with id '{}' not found in cache", schema_id)
} }
})), })),
@ -94,10 +91,13 @@ fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
match cache.schemas.validate(&instance_value, *sch_index) { match cache.schemas.validate(&instance_value, *sch_index) {
Ok(_) => JsonB(json!({ "success": true })), Ok(_) => JsonB(json!({ "success": true })),
Err(validation_error) => { Err(validation_error) => {
let error = format_validation_error(&validation_error); // Directly use the result of format_validation_error
// which now includes the top-level success indicator and flat error list
let mut error_list = Vec::new();
collect_leaf_errors(&validation_error, &mut error_list);
JsonB(json!({ JsonB(json!({
"success": false, "success": false,
"error": error "error": filter_boon_errors(error_list) // Filter and deduplicate errors
})) }))
} }
} }
@ -105,18 +105,71 @@ fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
} }
} }
fn format_validation_error(error: &ValidationError) -> Value { // Recursively collects leaf errors into a flat list
json!({ fn collect_leaf_errors(error: &ValidationError, errors_list: &mut Vec<Value>) {
"instance_path": error.instance_location.to_string(), if error.causes.is_empty() {
"schema_path": error.schema_url.to_string(), let default_message = format!("{}", error);
"kind": format!("{:?}", error.kind), let message = if let Some(start_index) = default_message.find("': ") {
"message": format!("{}", error), default_message[start_index + 3..].to_string()
"error": error } else {
.causes default_message
.iter() };
.map(format_validation_error)
.collect::<Vec<_>>() errors_list.push(json!({
}) "message": message,
"schema_path": error.schema_url.to_string(),
"instance_path": error.instance_location.to_string(),
}));
} else {
for cause in &error.causes {
collect_leaf_errors(cause, errors_list);
}
}
}
// Filters collected errors, removing structural noise and then deduplicating by instance_path
fn filter_boon_errors(raw_errors: Vec<Value>) -> Vec<Value> {
use std::collections::HashMap;
use std::collections::hash_map::Entry;
// Define schema keywords that indicate structural paths, not instance paths
let structural_path_segments = [
"/allOf/", "/anyOf/", "/oneOf/",
"/if/", "/then/", "/else/",
"/not/"
// Note: "/properties/" and "/items/" are generally valid,
// but might appear spuriously in boon's paths for complex types.
// We exclude only the explicitly logical/combinatorial ones for now.
];
// 1. Filter out errors with instance_paths containing structural segments
let plausible_errors: Vec<Value> = raw_errors.into_iter().filter(|error_value| {
if let Some(instance_path_value) = error_value.get("instance_path") {
if let Some(instance_path_str) = instance_path_value.as_str() {
// Keep if NONE of the structural segments are present
!structural_path_segments.iter().any(|&segment| instance_path_str.contains(segment))
} else {
false // Invalid instance_path type, filter out
}
} else {
false // No instance_path field, filter out
}
}).collect();
// 2. Deduplicate the remaining plausible errors by instance_path
let mut unique_errors: HashMap<String, Value> = HashMap::new();
for error_value in plausible_errors {
if let Some(instance_path_value) = error_value.get("instance_path") {
if let Some(instance_path_str) = instance_path_value.as_str() {
if let Entry::Vacant(entry) = unique_errors.entry(instance_path_str.to_string()) {
entry.insert(error_value);
}
}
}
}
// Collect the unique errors
unique_errors.into_values().collect()
} }
#[pg_extern(strict, parallel_safe)] #[pg_extern(strict, parallel_safe)]
@ -141,290 +194,24 @@ fn show_json_schemas() -> Vec<String> {
ids ids
} }
#[pg_schema] /// This module is required by `cargo pgrx test` invocations.
#[cfg(any(test, feature = "pg_test"))] /// It must be visible at the root of your extension crate.
mod tests {
use pgrx::*;
use pgrx::pg_test;
use super::*;
use serde_json::json;
fn jsonb(val: Value) -> JsonB {
JsonB(val)
}
fn setup_test() {
clear_json_schemas();
}
#[pg_test]
fn test_cache_and_validate_json_schema() {
setup_test();
let schema_id = "my_schema";
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
});
let valid_instance = json!({ "name": "Alice", "age": 30 });
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
let invalid_instance_missing = json!({ "name": "Charlie" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()));
assert!(cache_result.0["success"].as_bool().unwrap());
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
assert!(valid_result.0["success"].as_bool().unwrap());
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
assert!(!invalid_result_type.0["success"].as_bool().unwrap());
let error_obj_type = invalid_result_type.0.get("error").expect("Expected top-level 'error' object");
let causes_age = error_obj_type.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes)");
assert!(!causes_age.is_empty(), "Expected causes for invalid age");
let first_cause_age = &causes_age[0];
assert!(first_cause_age["kind"].as_str().unwrap().contains("Minimum"), "Kind '{}' should contain Minimum", first_cause_age["kind"]);
let msg = first_cause_age["message"].as_str().unwrap_or("");
assert!(msg.contains("must be >=0"), "Error message mismatch for age minimum: {}", msg);
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
assert!(!invalid_result_missing.0["success"].as_bool().unwrap());
let error_obj_missing = invalid_result_missing.0.get("error").expect("Expected top-level 'error' object");
let causes_missing = error_obj_missing.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes) for missing");
assert!(!causes_missing.is_empty(), "Expected causes for missing age");
let first_cause_missing = &causes_missing[0];
assert!(first_cause_missing["kind"].as_str().unwrap().contains("Required"));
let msg_missing = first_cause_missing["message"].as_str().unwrap_or("");
assert!(msg_missing.contains("missing properties 'age'"), "Error message mismatch for missing 'age': {}", msg_missing);
assert!(first_cause_missing["instance_path"] == "", "Expected empty instance path for missing field");
let non_existent_id = "non_existent_schema";
let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({})));
assert!(!invalid_schema_result.0["success"].as_bool().unwrap());
let schema_not_found_error = invalid_schema_result.0
.get("error") // Top level error object
.expect("Expected top-level 'error' object for schema not found");
assert_eq!(schema_not_found_error["kind"], "SchemaNotFound");
assert!(schema_not_found_error["message"].as_str().unwrap().contains(non_existent_id));
}
#[pg_test]
fn test_validate_json_schema_not_cached() {
setup_test();
let instance = json!({ "foo": "bar" });
let result = validate_json_schema("non_existent_schema", jsonb(instance));
assert!(!result.0["success"].as_bool().unwrap());
let error_obj = result.0.get("error").expect("Expected top-level 'error' object");
assert_eq!(error_obj["kind"], "SchemaNotFound");
assert!(error_obj["message"].as_str().unwrap().contains("non_existent_schema"));
}
#[pg_test]
fn test_cache_invalid_json_schema() {
setup_test();
let schema_id = "invalid_schema";
let invalid_schema_json = "{\"type\": \"string\" \"maxLength\": 5}";
let invalid_schema_value: Result<Value, _> = serde_json::from_str(invalid_schema_json);
assert!(invalid_schema_value.is_err(), "Test setup assumes invalid JSON string");
let schema_representing_invalid = json!({
"type": 123
});
let result = cache_json_schema(schema_id, jsonb(schema_representing_invalid.clone()));
assert!(!result.0["success"].as_bool().unwrap());
let error_obj = result.0.get("error").expect("Expected top-level 'error' object for compilation failure");
assert_eq!(error_obj.get("kind").and_then(Value::as_str), Some("SchemaCompilationError"));
assert_eq!(error_obj.get("sub_kind").and_then(Value::as_str), Some("ValidationError"), "Expected sub_kind 'ValidationError' for metaschema failure");
assert!(error_obj.get("message").and_then(Value::as_str).is_some(), "Expected 'message' field in error object");
assert!(error_obj["message"].as_str().unwrap().contains("Schema failed validation against its metaschema"), "Error message mismatch");
assert_eq!(error_obj.get("schema_id").and_then(Value::as_str), Some(schema_id));
let failed_at_url = error_obj.get("failed_at_url").and_then(Value::as_str).expect("Expected 'failed_at_url' string");
assert!(failed_at_url.ends_with(&format!("{}#", schema_id)), "failed_at_url ('{}') should end with schema_id + '#' ('{}#')", failed_at_url, schema_id);
assert!(error_obj.get("validation_details").and_then(Value::as_str).is_some(), "Expected 'validation_details' field");
}
#[pg_test]
fn test_validate_json_schema_detailed_validation_errors() {
setup_test();
let schema_id = "detailed_schema";
let schema = json!({
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string", "maxLength": 10 }
},
"required": ["street", "city"]
}
},
"required": ["address"]
});
let invalid_instance = json!({
"address": {
"street": 123,
"city": "Supercalifragilisticexpialidocious"
}
});
assert!(cache_json_schema(schema_id, jsonb(schema.clone())).0["success"].as_bool().unwrap());
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
assert!(!result.0["success"].as_bool().unwrap());
let error_obj = result.0.get("error").expect("Expected top-level 'error' object");
let causes = error_obj.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes)");
assert!(causes.len() >= 2, "Expected at least 2 detailed causes");
let street_error = causes.iter().find(|e| e["instance_path"] == "/address/street").expect("Missing street error");
assert!(street_error["kind"].as_str().unwrap().contains("Type"), "Kind '{}' should contain Type", street_error["kind"]);
let street_msg = street_error["message"].as_str().unwrap_or("null");
assert!(street_msg.contains("want string, but got number"), "Street message mismatch: {}", street_msg);
let city_error = causes.iter().find(|e| e["instance_path"] == "/address/city").expect("Missing city error");
assert!(city_error["kind"].as_str().unwrap().contains("MaxLength"), "Kind '{}' should contain MaxLength", city_error["kind"]);
let city_msg = city_error["message"].as_str().unwrap_or("null");
assert!(city_msg.contains("length must be <=10"), "City message mismatch: {}", city_msg);
assert_eq!(causes.len(), 2, "Expected exactly 2 errors (street type, city length)");
}
#[pg_test]
fn test_validate_json_schema_oneof_validation_errors() {
setup_test();
let schema_id = "oneof_schema";
let schema = json!({
"oneOf": [
{
"type": "object",
"properties": {
"string_prop": { "type": "string", "maxLength": 5 }
},
"required": ["string_prop"]
},
{
"type": "object",
"properties": {
"number_prop": { "type": "number", "minimum": 10 }
},
"required": ["number_prop"]
}
]
});
cache_json_schema(schema_id, jsonb(schema));
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
assert!(!result_invalid_string.0["success"].as_bool().unwrap());
let error_obj_string = result_invalid_string.0.get("error").expect("Expected top-level 'error' object");
assert!(error_obj_string["kind"].as_str().unwrap().contains("Schema"), "Top level kind '{}' should contain Schema for OneOf failure", error_obj_string["kind"]);
assert!(error_obj_string["message"].as_str().unwrap().contains("oneOf failed, none matched"), "OneOf message mismatch: {}", error_obj_string["message"]); // Final adjustment
let causes_string = error_obj_string.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes)");
assert_eq!(causes_string.len(), 1, "Expected one cause for oneOf failure (string)");
let nested_causes_string = causes_string[0].get("error").and_then(Value::as_array).expect("Expected deeper nested causes for string oneOf");
assert_eq!(nested_causes_string.len(), 2, "Expected two nested causes for string oneOf");
let string_schema_fail = nested_causes_string.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/0/properties/string_prop")).expect("Missing nested cause for string schema");
assert_eq!(string_schema_fail["instance_path"].as_str().unwrap(), "/string_prop", "Instance path should be /string_prop");
assert!(string_schema_fail["kind"].as_str().unwrap().contains("MaxLength"), "Nested string cause kind should be MaxLength");
let number_schema_fail = nested_causes_string.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/1")).expect("Missing nested cause for number schema");
assert_eq!(number_schema_fail["instance_path"].as_str().unwrap(), "", "Instance path for branch 2 type mismatch should be empty");
assert!(number_schema_fail["kind"].as_str().unwrap().contains("Required"), "Nested number cause kind should be Required");
let invalid_number_instance = json!({ "number_prop": 5 });
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
assert!(!result_invalid_number.0["success"].as_bool().unwrap());
let error_obj_number = result_invalid_number.0.get("error").expect("Expected top-level 'error' object");
assert!(error_obj_number["kind"].as_str().unwrap().contains("Schema"), "Top level kind '{}' should contain Schema for OneOf failure", error_obj_number["kind"]);
assert!(error_obj_number["message"].as_str().unwrap().contains("oneOf failed, none matched"), "OneOf message mismatch: {}", error_obj_number["message"]); // Final adjustment
let causes_number = error_obj_number.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes)");
assert_eq!(causes_number.len(), 1, "Expected one cause for oneOf failure (number)");
let nested_causes_number = causes_number[0].get("error").and_then(Value::as_array).expect("Expected deeper nested causes for number oneOf");
assert_eq!(nested_causes_number.len(), 2, "Expected two nested causes for number oneOf");
let string_schema_fail_num = nested_causes_number.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/0")).expect("Missing nested cause for string schema (number case)");
assert_eq!(string_schema_fail_num["instance_path"].as_str().unwrap(), "", "Instance path for branch 1 type mismatch should be empty");
assert!(string_schema_fail_num["kind"].as_str().unwrap().contains("Required"), "Nested string cause kind should be Required (number case)");
let number_schema_fail_num = nested_causes_number.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/1/properties/number_prop")).expect("Missing nested cause for number schema (number case)");
assert_eq!(number_schema_fail_num["instance_path"].as_str().unwrap(), "/number_prop", "Instance path should be /number_prop (number case)");
assert!(number_schema_fail_num["kind"].as_str().unwrap().contains("Minimum"), "Nested number cause kind should be Minimum (number case)");
let invalid_bool_instance = json!({ "other_prop": true });
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
assert!(!result_invalid_bool.0["success"].as_bool().unwrap());
let error_obj_bool = result_invalid_bool.0.get("error").expect("Expected top-level 'error' object");
assert!(error_obj_bool["kind"].as_str().unwrap().contains("Schema"), "Top level kind '{}' should contain Schema for OneOf failure", error_obj_bool["kind"]);
assert!(error_obj_bool["message"].as_str().unwrap().contains("oneOf failed, none matched"), "OneOf message mismatch: {}", error_obj_bool["message"]); // Final adjustment
let causes_bool = error_obj_bool.get("error").and_then(Value::as_array).expect("Expected nested 'error' array (causes)");
assert_eq!(causes_bool.len(), 1, "Expected one cause for oneOf failure (bool)");
let nested_causes_bool = causes_bool[0].get("error").and_then(Value::as_array).expect("Expected deeper nested causes for bool oneOf");
assert_eq!(nested_causes_bool.len(), 2, "Expected two nested causes for bool oneOf");
let bool_fail_0 = nested_causes_bool.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/0")).expect("Missing nested cause for branch 0 type fail");
assert_eq!(bool_fail_0["instance_path"].as_str().unwrap(), "", "Instance path for branch 0 type fail should be empty");
assert!(bool_fail_0["kind"].as_str().unwrap().contains("Required"), "Nested bool cause 0 kind should be Required");
let bool_fail_1 = nested_causes_bool.iter().find(|c| c["schema_path"].as_str().unwrap().ends_with("/oneOf/1")).expect("Missing nested cause for branch 1 type fail");
assert_eq!(bool_fail_1["instance_path"].as_str().unwrap(), "", "Instance path for branch 1 type fail should be empty");
assert!(bool_fail_1["kind"].as_str().unwrap().contains("Required"), "Nested bool cause 1 kind should be Required");
}
#[pg_test]
fn test_clear_json_schemas() {
setup_test();
let schema_id = "schema_to_clear";
let schema = json!({ "type": "string" });
cache_json_schema(schema_id, jsonb(schema.clone()));
let show_result1 = show_json_schemas();
assert!(show_result1.contains(&schema_id.to_string()));
clear_json_schemas();
let show_result2 = show_json_schemas();
assert!(show_result2.is_empty());
let instance = json!("test");
let validate_result = validate_json_schema(schema_id, jsonb(instance));
assert!(!validate_result.0["success"].as_bool().unwrap());
let error_obj = validate_result.0.get("error").expect("Expected top-level 'error' object");
assert_eq!(error_obj["kind"], "SchemaNotFound");
assert!(error_obj["message"].as_str().unwrap().contains(schema_id));
}
#[pg_test]
fn test_show_json_schemas() {
setup_test();
let schema_id1 = "schema1";
let schema_id2 = "schema2";
let schema = json!({ "type": "boolean" });
cache_json_schema(schema_id1, jsonb(schema.clone()));
cache_json_schema(schema_id2, jsonb(schema.clone()));
let result = show_json_schemas();
assert!(result.contains(&schema_id1.to_string()));
assert!(result.contains(&schema_id2.to_string()));
}
}
#[cfg(test)] #[cfg(test)]
pub mod pg_test { pub mod pg_test {
pub fn setup(_options: Vec<&str>) { pub fn setup(_options: Vec<&str>) {
// perform one-off initialization when the pg_test framework starts // perform one-off initialization when the pg_test framework starts
} }
#[must_use]
pub fn postgresql_conf_options() -> Vec<&'static str> { pub fn postgresql_conf_options() -> Vec<&'static str> {
// return any postgresql.conf settings that are required for your tests // return any postgresql.conf settings that are required for your tests
vec![] vec![]
} }
}
#[cfg(any(test, feature = "pg_test"))]
#[pg_schema]
mod tests {
include!("tests.rs");
} }

415
src/tests.rs Normal file
View File

@ -0,0 +1,415 @@
use crate::*;
use serde_json::{json, Value};
use pgrx::{JsonB, pg_test};
// Helper macro for asserting success (no changes needed, but ensure it's present)
macro_rules! assert_success_with_json {
($result_jsonb:expr, $fmt:literal $(, $($args:tt)*)?) => {
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
if condition_result != Some(true) {
let base_msg = format!($fmt $(, $($args)*)?);
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success): {}\nResult JSON:\n{}", base_msg, pretty_json);
panic!("{}", panic_msg);
}
};
// Simpler version without message
($result_jsonb:expr) => {
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
if condition_result != Some(true) {
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success)\nResult JSON:\n{}", pretty_json);
panic!("{}", panic_msg);
}
};
}
// Updated helper macro for asserting failed JSON results with the new flat error structure
macro_rules! assert_failure_with_json {
// --- Arms with error count and message substring check ---
// With custom message:
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let success = json_result.get("success").and_then(Value::as_bool);
let error_val_opt = json_result.get("error"); // Changed key
let base_msg = format!($fmt $(, $($args)*)?);
if success != Some(false) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
match error_val_opt {
Some(error_val) => {
if error_val.is_array() {
let errors_array = error_val.as_array().unwrap();
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
if $expected_error_count > 0 {
let first_error_message = errors_array[0].get("message").and_then(Value::as_str);
match first_error_message {
Some(msg) => {
if !msg.contains($expected_first_message_contains) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error message mismatch): Expected contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error in array has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
}
} else if error_val.is_object() {
// Handle single error object case (like 'schema not found')
if $expected_error_count != 1 {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, but got a single error object. {}\nResult JSON:\n{}", $expected_error_count, base_msg, pretty_json);
}
let message = error_val.get("message").and_then(Value::as_str);
match message {
Some(msg) => {
if !msg.contains($expected_first_message_contains) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (error message mismatch): Expected object message contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (error object has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
} else {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
};
// Without custom message (calls the one above with ""):
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr) => {
assert_failure_with_json!($result, $expected_error_count, $expected_first_message_contains, "");
};
// --- Arms with error count check only ---
// With custom message:
($result:expr, $expected_error_count:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let success = json_result.get("success").and_then(Value::as_bool);
let error_val_opt = json_result.get("error"); // Changed key
let base_msg = format!($fmt $(, $($args)*)?);
if success != Some(false) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
match error_val_opt {
Some(error_val) => {
if error_val.is_array() {
let errors_array = error_val.as_array().unwrap();
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
} else if error_val.is_object() {
if $expected_error_count != 1 {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, but got a single error object. {}\nResult JSON:\n{}", $expected_error_count, base_msg, pretty_json);
}
// Count check passes if expected is 1 and got object
} else {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
};
// Without custom message (calls the one above with ""):
($result:expr, $expected_error_count:expr) => {
assert_failure_with_json!($result, $expected_error_count, "");
};
// --- Arms checking failure only (expects at least one error) ---
// With custom message:
($result:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let success = json_result.get("success").and_then(Value::as_bool);
let error_val_opt = json_result.get("error"); // Changed key
let base_msg = format!($fmt $(, $($args)*)?);
if success != Some(false) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
match error_val_opt {
Some(error_val) => {
if error_val.is_object() {
// OK: single error object is a failure
} else if error_val.is_array() {
if error_val.as_array().unwrap().is_empty() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected errors, but 'error' array is empty): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
// OK: non-empty error array is a failure
} else {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
};
// Without custom message (calls the one above with ""):
($result:expr) => {
assert_failure_with_json!($result, "");
};
}
fn jsonb(val: Value) -> JsonB {
JsonB(val)
}
#[pg_test]
fn test_cache_and_validate_json_schema() {
clear_json_schemas(); // Call clear directly
let schema_id = "my_schema";
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
});
let valid_instance = json!({ "name": "Alice", "age": 30 });
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
let invalid_instance_missing = json!({ "name": "Charlie" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()));
assert_success_with_json!(cache_result, "Cache operation should succeed.");
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
assert_success_with_json!(valid_result, "Validation of valid instance should succeed.");
// Invalid type
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
assert_failure_with_json!(invalid_result_type, 1, "must be >=0", "Validation with invalid type should fail.");
let errors_type = invalid_result_type.0["error"].as_array().unwrap(); // Check 'error', expect array
assert_eq!(errors_type[0]["instance_path"], "/age");
assert_eq!(errors_type[0]["schema_path"], "urn:my_schema#/properties/age");
// Missing field
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
assert_failure_with_json!(invalid_result_missing, 1, "missing properties 'age'", "Validation with missing field should fail.");
let errors_missing = invalid_result_missing.0["error"].as_array().unwrap(); // Check 'error', expect array
assert_eq!(errors_missing[0]["instance_path"], "");
assert_eq!(errors_missing[0]["schema_path"], "urn:my_schema#");
// Schema not found
let non_existent_id = "non_existent_schema";
let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({})));
assert_failure_with_json!(invalid_schema_result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
// Check 'error' is an object for 'schema not found'
let error_notfound_obj = invalid_schema_result.0["error"].as_object().expect("'error' should be an object for schema not found");
assert!(error_notfound_obj.contains_key("message")); // Check message exists
// Removed checks for schema_path/instance_path as they aren't added in lib.rs for this case
}
#[pg_test]
fn test_validate_json_schema_not_cached() {
clear_json_schemas(); // Call clear directly
let instance = json!({ "foo": "bar" });
let result = validate_json_schema("non_existent_schema", jsonb(instance));
// Use the updated macro, expecting count 1 and specific message (handles object case)
assert_failure_with_json!(result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
}
#[pg_test]
fn test_cache_invalid_json_schema() {
clear_json_schemas(); // Call clear directly
let schema_id = "invalid_schema";
// Schema with an invalid type *value*
let invalid_schema = json!({
"$id": "urn:invalid_schema",
"type": ["invalid_type_value"]
});
let cache_result = cache_json_schema(schema_id, jsonb(invalid_schema));
// Expect 2 leaf errors because the meta-schema validation fails at the type value
// and within the type array itself.
assert_failure_with_json!(
cache_result,
2, // Expect exactly two leaf errors
"value must be one of", // Check message substring (present in both)
"Caching invalid schema should fail with specific meta-schema validation errors."
);
// Ensure the error is an array and check specifics
let error_array = cache_result.0["error"].as_array().expect("Error field should be an array");
assert_eq!(error_array.len(), 2);
// Note: Order might vary depending on boon's internal processing, check both possibilities or sort.
// Assuming the order shown in the logs for now:
assert_eq!(error_array[0]["instance_path"], "/type");
assert!(error_array[0]["message"].as_str().unwrap().contains("value must be one of"));
assert_eq!(error_array[1]["instance_path"], "/type/0");
assert!(error_array[1]["message"].as_str().unwrap().contains("value must be one of"));
}
#[pg_test]
fn test_validate_json_schema_detailed_validation_errors() {
clear_json_schemas(); // Call clear directly
let schema_id = "detailed_errors";
let schema = json!({
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string", "maxLength": 10 }
},
"required": ["street", "city"]
}
},
"required": ["address"]
});
let _ = cache_json_schema(schema_id, jsonb(schema));
let invalid_instance = json!({
"address": {
"street": 123, // Wrong type
"city": "Supercalifragilisticexpialidocious" // Too long
}
});
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
// Update: Expect 2 errors again, as boon reports both nested errors.
assert_failure_with_json!(result, 2);
}
#[pg_test]
fn test_validate_json_schema_oneof_validation_errors() {
clear_json_schemas(); // Call clear directly
let schema_id = "oneof_schema";
let schema = json!({
"oneOf": [
{ // Option 1: Object with string prop
"type": "object",
"properties": {
"string_prop": { "type": "string", "maxLength": 5 }
},
"required": ["string_prop"]
},
{ // Option 2: Object with number prop
"type": "object",
"properties": {
"number_prop": { "type": "number", "minimum": 10 }
},
"required": ["number_prop"]
}
]
});
let _ = cache_json_schema(schema_id, jsonb(schema));
// --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) ---
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
// Expect 2 leaf errors. Check count only with the macro.
assert_failure_with_json!(result_invalid_string, 2);
// Explicitly check that both expected errors are present, ignoring order
let errors_string = result_invalid_string.0["error"].as_array().expect("Expected error array for invalid string");
assert!(errors_string.iter().any(|e| e["instance_path"] == "/string_prop" && e["message"].as_str().unwrap().contains("length must be <=5")), "Missing maxLength error");
assert!(errors_string.iter().any(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("missing properties 'number_prop'")), "Missing number_prop required error");
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
let invalid_number_instance = json!({ "number_prop": 5 });
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
// Expect 2 leaf errors. Check count only with the macro.
assert_failure_with_json!(result_invalid_number, 2);
// Explicitly check that both expected errors are present, ignoring order
let errors_number = result_invalid_number.0["error"].as_array().expect("Expected error array for invalid number");
assert!(errors_number.iter().any(|e| e["instance_path"] == "/number_prop" && e["message"].as_str().unwrap().contains("must be >=10")), "Missing minimum error");
assert!(errors_number.iter().any(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("missing properties 'string_prop'")), "Missing string_prop required error");
// --- Test case 3: Fails type check (not object) for both branches ---
// Input: boolean, expected object for both branches
let invalid_bool_instance = json!(true); // Not an object
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
assert_failure_with_json!(result_invalid_bool, 1);
// Explicitly check that the single remaining error is the type error for the root instance path
let errors_bool = result_invalid_bool.0["error"].as_array().expect("Expected error array for invalid bool");
assert_eq!(errors_bool.iter().filter(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("want object")).count(), 1, "Expected one 'want object' error at root after filtering");
// --- Test case 4: Fails missing required for both branches ---
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
let invalid_empty_obj = json!({});
let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj));
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
assert_failure_with_json!(result_empty_obj, 1);
// Explicitly check that the single remaining error is one of the expected missing properties errors
let errors_empty = result_empty_obj.0["error"].as_array().expect("Expected error array for empty object");
assert_eq!(errors_empty.len(), 1, "Expected exactly one error after filtering empty object");
let the_error = &errors_empty[0];
assert_eq!(the_error["instance_path"], "", "Expected instance_path to be empty string");
let message = the_error["message"].as_str().unwrap();
assert!(message.contains("missing properties 'string_prop'") || message.contains("missing properties 'number_prop'"),
"Error message should indicate missing string_prop or number_prop, got: {}", message);
}
#[pg_test]
fn test_clear_json_schemas() {
clear_json_schemas(); // Call clear directly
let schema_id = "schema_to_clear";
let schema = json!({ "type": "string" });
cache_json_schema(schema_id, jsonb(schema.clone()));
let show_result1 = show_json_schemas();
assert!(show_result1.contains(&schema_id.to_string()));
clear_json_schemas();
let show_result2 = show_json_schemas();
assert!(show_result2.is_empty());
let instance = json!("test");
let validate_result = validate_json_schema(schema_id, jsonb(instance));
// Use the updated macro, expecting count 1 and specific message (handles object case)
assert_failure_with_json!(validate_result, 1, "Schema with id 'schema_to_clear' not found", "Validation should fail after clearing schemas.");
}
#[pg_test]
fn test_show_json_schemas() {
clear_json_schemas(); // Call clear directly
let schema_id1 = "schema1";
let schema_id2 = "schema2";
let schema = json!({ "type": "boolean" });
cache_json_schema(schema_id1, jsonb(schema.clone()));
cache_json_schema(schema_id2, jsonb(schema.clone()));
let mut result = show_json_schemas(); // Make result mutable
result.sort(); // Sort for deterministic testing
assert_eq!(result, vec!["schema1".to_string(), "schema2".to_string()]); // Check exact content
assert!(result.contains(&schema_id1.to_string())); // Keep specific checks too if desired
assert!(result.contains(&schema_id2.to_string()));
}

View File

@ -1 +1 @@
1.0.12 1.0.20