Compare commits

...

21 Commits

Author SHA1 Message Date
28b689cac0 version: 1.0.15 2025-04-16 01:00:57 -04:00
cc04a1a8bb made errors consistent 2025-04-16 01:00:51 -04:00
3ceb8a0770 version: 1.0.14 2025-04-16 00:38:10 -04:00
499bf68b2a more error cleanup 2025-04-16 00:38:04 -04:00
6ca00f27e9 version: 1.0.13 2025-04-15 23:30:57 -04:00
520be66035 better error messaging 2025-04-15 23:30:47 -04:00
c3146ca433 flow update 2025-04-15 01:52:12 -04:00
b4d9628b05 version: 1.0.12 2025-04-15 00:25:39 -04:00
635d31d723 more validation fixes 2025-04-15 00:25:29 -04:00
08efcb92db version: 1.0.11 2025-04-14 21:53:39 -04:00
dad1216e1f more validation fixes 2025-04-14 21:53:30 -04:00
2fcf8613b8 version: 1.0.10 2025-04-14 20:23:23 -04:00
f88c27aa70 fixed naming, added back json_schema_cached 2025-04-14 20:23:18 -04:00
48e74815d3 version: 1.0.9 2025-04-14 18:08:45 -04:00
23235d4b9d -m switched to boon 2025-04-14 18:08:36 -04:00
67406c0b96 version: 1.0.8 2025-04-14 16:11:49 -04:00
28fff3be11 validation error fixes 2025-04-14 16:11:44 -04:00
70f3d30258 version: 1.0.7 2025-04-14 12:03:07 -04:00
406466454e excluding flows from jspg release 2025-04-14 12:03:01 -04:00
2a9d51fa77 version: 1.0.6 2025-04-14 11:24:22 -04:00
ae90137308 updated flows 2025-04-14 11:24:18 -04:00
9 changed files with 635 additions and 111 deletions

10
.editorconfig Normal file
View File

@ -0,0 +1,10 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
[*.{json,toml,control,rs}]
charset = utf-8
indent_style = space
indent_size = 2

13
.env Normal file
View File

@ -0,0 +1,13 @@
ENVIRONMENT=local
DATABASE_PASSWORD=QgSvstSjoc6fKphMzNgT3SliNY10eSRS
DATABASE_ROLE=agreego_admin
DATABASE_HOST=127.1.27.9
DATABASE_PORT=5432
POSTGRES_PASSWORD=xzIq5JT0xY3F+2m1GtnrKDdK29sNSXVVYZHPKJVh8pI=
DATABASE_NAME=agreego
DEV_DATABASE_NAME=agreego_dev
GITEA_TOKEN=3d70c23673517330623a5122998fb304e3c73f0a
MOOV_ACCOUNT_ID=69a0d2f6-77a2-4e26-934f-d869134f87d3
MOOV_PUBLIC_KEY=9OMhK5qGnh7Tmk2Z
MOOV_SECRET_KEY=DrRox7B-YWfO9IheiUUX7lGP8-7VY-Ni
MOOV_DOMAIN=http://localhost

27
Cargo.lock generated
View File

@ -68,6 +68,12 @@ version = "1.0.97"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f"
[[package]]
name = "appendlist"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e149dc73cd30538307e7ffa2acd3d2221148eaeed4871f246657b1c3eaa1cbd2"
[[package]] [[package]]
name = "async-trait" name = "async-trait"
version = "0.1.88" version = "0.1.88"
@ -177,6 +183,26 @@ dependencies = [
"generic-array", "generic-array",
] ]
[[package]]
name = "boon"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baa187da765010b70370368c49f08244b1ae5cae1d5d33072f76c8cb7112fe3e"
dependencies = [
"ahash",
"appendlist",
"base64",
"fluent-uri",
"idna",
"once_cell",
"percent-encoding",
"regex",
"regex-syntax",
"serde",
"serde_json",
"url",
]
[[package]] [[package]]
name = "borrow-or-share" name = "borrow-or-share"
version = "0.2.2" version = "0.2.2"
@ -1015,6 +1041,7 @@ dependencies = [
name = "jspg" name = "jspg"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"boon",
"jsonschema", "jsonschema",
"lazy_static", "lazy_static",
"pgrx", "pgrx",

View File

@ -9,6 +9,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
jsonschema = "0.29.1" jsonschema = "0.29.1"
lazy_static = "1.5.0" lazy_static = "1.5.0"
boon = "0.6.1"
[dev-dependencies] [dev-dependencies]
pgrx-tests = "0.14.0" pgrx-tests = "0.14.0"
@ -22,6 +23,7 @@ path = "src/bin/pgrx_embed.rs"
[features] [features]
pg17 = ["pgrx/pg17", "pgrx-tests/pg17" ] pg17 = ["pgrx/pg17", "pgrx-tests/pg17" ]
# Local feature flag used by `cargo pgrx test`
pg_test = [] pg_test = []
[profile.dev] [profile.dev]

36
flow
View File

@ -63,7 +63,7 @@ build() {
# Create the source tarball excluding specified patterns # Create the source tarball excluding specified patterns
echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}" echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}"
if tar --exclude='.git*' --exclude='./target' --exclude='./package' -czf "${tarball_path}" .; then if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}" echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}"
else else
echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2 echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2
@ -76,15 +76,41 @@ install() {
version=$(get-version) || return 1 version=$(get-version) || return 1
echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}" echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}"
# Run the pgrx install command # Run the pgrx install command
# It implicitly uses --release unless --debug is passed # It implicitly uses --release unless --debug is passed
# It finds pg_config or you can add flags like --pg-config if needed # It finds pg_config or you can add flags like --pg-config if needed
if ! cargo pgrx install "$@"; then # Pass any extra args like --debug if ! cargo pgrx install; then
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2 echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2
return 1 return 1
fi fi
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}" echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}"
# Post-install modification to allow non-superuser usage
# Get the installation path dynamically using pg_config
local pg_sharedir
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
if [ -z "$pg_sharedir" ]; then
echo -e "❌ ${RED}Failed to determine PostgreSQL shared directory using pg_config.${RESET}" >&2
return 1
fi
local installed_control_path="${pg_sharedir}/extension/jspg.control"
# Modify the control file
if [ ! -f "$installed_control_path" ]; then
echo -e "❌ ${RED}Installed control file not found: '$installed_control_path'${RESET}" >&2
return 1
fi
echo -e "🔧 ${CYAN}Modifying control file for non-superuser access: ${installed_control_path}${RESET}"
# Use sed -i '' for macOS compatibility
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
echo 'trusted = true' >> "$installed_control_path"; then
echo -e "✨ ${GREEN}Control file modified successfully.${RESET}"
else
echo -e "❌ ${RED}Failed to modify control file: ${installed_control_path}${RESET}" >&2
return 1
fi
} }
test() { test() {
@ -115,8 +141,8 @@ jspg-flow() {
env) env; return 0;; env) env; return 0;;
prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;; prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;;
build) build; return 0;; build) build; return 0;;
install) base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;; install) install; return 0;;
reinstall) base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;; reinstall) clean; install; return 0;;
test) test; return 0;; test) test; return 0;;
package) env; package; return 0;; package) env; package; return 0;;
release) env; release; return 0;; release) env; release; return 0;;

2
flows

Submodule flows updated: 6eab4783f3...9d758d581e

View File

@ -1,126 +1,170 @@
use pgrx::*; use pgrx::*;
use jsonschema::{Draft, Validator};
use serde_json::json;
use std::collections::HashMap;
use std::sync::RwLock;
use lazy_static::lazy_static;
use jsonschema;
pg_module_magic!(); pg_module_magic!();
// Global, thread-safe schema cache using the correct Validator type use serde_json::{json, Value};
use std::{collections::HashMap, sync::RwLock};
use boon::{Compiler, Schemas, ValidationError, SchemaIndex, CompileError};
use lazy_static::lazy_static;
struct BoonCache {
schemas: Schemas,
id_to_index: HashMap<String, SchemaIndex>,
}
lazy_static! { lazy_static! {
static ref SCHEMA_CACHE: RwLock<HashMap<String, Validator>> = RwLock::new(HashMap::new()); static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache {
schemas: Schemas::new(),
id_to_index: HashMap::new(),
});
} }
// Cache a schema explicitly with a provided ID #[pg_extern(strict)]
#[pg_extern(immutable, strict, parallel_safe)] fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB {
fn cache_schema(schema_id: &str, schema: JsonB) -> bool { let mut cache = SCHEMA_CACHE.write().unwrap();
match jsonschema::options() let schema_value: Value = schema.0;
.with_draft(Draft::Draft7) let schema_path = format!("urn:{}", schema_id);
.should_validate_formats(true)
.build(&schema.0) let mut compiler = Compiler::new();
{ compiler.enable_format_assertions();
Ok(compiled) => {
SCHEMA_CACHE.write().unwrap().insert(schema_id.to_string(), compiled); // Use schema_path when adding the resource
true if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
}, return JsonB(json!({
Err(e) => { "success": false,
notice!("Failed to cache schema '{}': {}", schema_id, e); "error": {
false "message": format!("Failed to add schema resource '{}': {}", schema_id, e),
} "schema_path": schema_path
}
}));
}
// Use schema_path when compiling
match compiler.compile(&schema_path, &mut cache.schemas) {
Ok(sch_index) => {
// Store the index using the original schema_id as the key
cache.id_to_index.insert(schema_id.to_string(), sch_index);
JsonB(json!({ "success": true }))
} }
} Err(e) => {
let error = match &e {
// Check if a schema is cached CompileError::ValidationError { url: _url, src } => { // Prefix url with _
#[pg_extern(immutable, strict, parallel_safe)] json!({
fn schema_cached(schema_id: &str) -> bool { "message": format!("Schema '{}' failed validation against its metaschema: {}", schema_id, src),
SCHEMA_CACHE.read().unwrap().contains_key(schema_id) "schema_path": schema_path,
} "error": format!("{:?}", src),
})
// Validate JSONB instance against a cached schema by ID
#[pg_extern(immutable, strict, parallel_safe)]
fn validate_schema(schema_id: &str, instance: JsonB) -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
let compiled_schema: &Validator = match cache.get(schema_id) {
Some(schema) => schema,
None => {
return JsonB(json!({
"valid": false,
"errors": [format!("Schema ID '{}' not cached", schema_id)]
}));
} }
_ => {
let _error_type = format!("{:?}", e).split('(').next().unwrap_or("Unknown").to_string(); // Prefix error_type with _
json!({
"message": format!("Schema '{}' compilation failed: {}", schema_id, e),
"schema_path": schema_path,
"error": format!("{:?}", e),
})
}
};
JsonB(json!({
"success": false,
"error": error
}))
}
}
}
#[pg_extern(strict, parallel_safe)]
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
// Lookup uses the original schema_id
match cache.id_to_index.get(schema_id) {
None => JsonB(json!({
"success": false,
"error": {
"message": format!("Schema with id '{}' not found in cache", schema_id)
}
})),
Some(sch_index) => {
let instance_value: Value = instance.0;
match cache.schemas.validate(&instance_value, *sch_index) {
Ok(_) => JsonB(json!({ "success": true })),
Err(validation_error) => {
// Directly use the result of format_validation_error
// which now includes the top-level success indicator and flat error list
let mut all_errors = Vec::new();
collect_leaf_errors(&validation_error, &mut all_errors);
JsonB(json!({
"success": false,
"error": all_errors // Flat list of specific errors
}))
}
}
}
}
}
// Recursively collects leaf errors into a flat list
fn collect_leaf_errors(error: &ValidationError, errors_list: &mut Vec<Value>) {
if error.causes.is_empty() {
let default_message = format!("{}", error);
let message = if let Some(start_index) = default_message.find("': ") {
default_message[start_index + 3..].to_string()
} else {
default_message
}; };
if compiled_schema.is_valid(&instance.0) { errors_list.push(json!({
JsonB(json!({ "valid": true })) "message": message,
} else { "schema_path": error.schema_url.to_string(),
let errors: Vec<String> = compiled_schema "instance_path": error.instance_location.to_string(),
.iter_errors(&instance.0) }));
.map(|e| e.to_string()) } else {
.collect(); for cause in &error.causes {
collect_leaf_errors(cause, errors_list);
JsonB(json!({ "valid": false, "errors": errors }))
} }
}
} }
// Clear the entire schema cache explicitly #[pg_extern(strict, parallel_safe)]
#[pg_extern(immutable, parallel_safe)] fn json_schema_cached(schema_id: &str) -> bool {
fn clear_schema_cache() -> bool { let cache = SCHEMA_CACHE.read().unwrap();
SCHEMA_CACHE.write().unwrap().clear(); cache.id_to_index.contains_key(schema_id)
true
} }
#[pg_schema] #[pg_extern(strict)]
#[cfg(any(test, feature = "pg_test"))] fn clear_json_schemas() {
mod tests { let mut cache = SCHEMA_CACHE.write().unwrap();
use pgrx::*; *cache = BoonCache {
use serde_json::json; schemas: Schemas::new(),
id_to_index: HashMap::new(),
#[pg_test] };
fn test_cache_and_validate_schema() {
assert!(crate::cache_schema("test_schema", JsonB(json!({ "type": "object" }))));
assert!(crate::schema_cached("test_schema"));
let result_valid = crate::validate_schema("test_schema", JsonB(json!({ "foo": "bar" })));
assert_eq!(result_valid.0["valid"], true);
let result_invalid = crate::validate_schema("test_schema", JsonB(json!(42)));
assert_eq!(result_invalid.0["valid"], false);
assert!(result_invalid.0["errors"][0].as_str().unwrap().contains("not of type"));
}
#[pg_test]
fn test_schema_not_cached() {
let result = crate::validate_schema("unknown_schema", JsonB(json!({})));
assert_eq!(result.0["valid"], false);
assert!(result.0["errors"][0].as_str().unwrap().contains("not cached"));
}
#[pg_test]
fn test_clear_schema_cache() {
crate::cache_schema("clear_test", JsonB(json!({ "type": "object" })));
assert!(crate::schema_cached("clear_test"));
crate::clear_schema_cache();
assert!(!crate::schema_cached("clear_test"));
}
#[pg_test]
fn test_invalid_schema_cache() {
let result = crate::cache_schema("bad_schema", JsonB(json!({ "type": "unknown_type" })));
assert!(!result);
assert!(!crate::schema_cached("bad_schema"));
}
} }
#[pg_extern(strict, parallel_safe)]
fn show_json_schemas() -> Vec<String> {
let cache = SCHEMA_CACHE.read().unwrap();
let ids: Vec<String> = cache.id_to_index.keys().cloned().collect();
ids
}
/// This module is required by `cargo pgrx test` invocations.
/// It must be visible at the root of your extension crate.
#[cfg(test)] #[cfg(test)]
pub mod pg_test { pub mod pg_test {
pub fn setup(_options: Vec<&str>) { pub fn setup(_options: Vec<&str>) {
// Initialization if needed // perform one-off initialization when the pg_test framework starts
} }
pub fn postgresql_conf_options() -> Vec<&'static str> { #[must_use]
vec![] pub fn postgresql_conf_options() -> Vec<&'static str> {
} // return any postgresql.conf settings that are required for your tests
vec![]
}
} }
#[cfg(any(test, feature = "pg_test"))]
#[pg_schema]
mod tests {
include!("tests.rs");
}

402
src/tests.rs Normal file
View File

@ -0,0 +1,402 @@
use crate::*;
use serde_json::{json, Value};
use pgrx::{JsonB, pg_test};
// Helper macro for asserting success (no changes needed, but ensure it's present)
macro_rules! assert_success_with_json {
($result_jsonb:expr, $fmt:literal $(, $($args:tt)*)?) => {
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
if condition_result != Some(true) {
let base_msg = format!($fmt $(, $($args)*)?);
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success): {}\nResult JSON:\n{}", base_msg, pretty_json);
panic!("{}", panic_msg);
}
};
// Simpler version without message
($result_jsonb:expr) => {
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
if condition_result != Some(true) {
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
let panic_msg = format!("Assertion Failed (expected success)\nResult JSON:\n{}", pretty_json);
panic!("{}", panic_msg);
}
};
}
// Updated helper macro for asserting failed JSON results with the new flat error structure
macro_rules! assert_failure_with_json {
// --- Arms with error count and message substring check ---
// With custom message:
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let success = json_result.get("success").and_then(Value::as_bool);
let error_val_opt = json_result.get("error"); // Changed key
let base_msg = format!($fmt $(, $($args)*)?);
if success != Some(false) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
match error_val_opt {
Some(error_val) => {
if error_val.is_array() {
let errors_array = error_val.as_array().unwrap();
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
if $expected_error_count > 0 {
let first_error_message = errors_array[0].get("message").and_then(Value::as_str);
match first_error_message {
Some(msg) => {
if !msg.contains($expected_first_message_contains) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error message mismatch): Expected contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (first error in array has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
}
} else if error_val.is_object() {
// Handle single error object case (like 'schema not found')
if $expected_error_count != 1 {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, but got a single error object. {}\nResult JSON:\n{}", $expected_error_count, base_msg, pretty_json);
}
let message = error_val.get("message").and_then(Value::as_str);
match message {
Some(msg) => {
if !msg.contains($expected_first_message_contains) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (error message mismatch): Expected object message contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (error object has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
} else {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
};
// Without custom message (calls the one above with ""):
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr) => {
assert_failure_with_json!($result, $expected_error_count, $expected_first_message_contains, "");
};
// --- Arms with error count check only ---
// With custom message:
($result:expr, $expected_error_count:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let success = json_result.get("success").and_then(Value::as_bool);
let error_val_opt = json_result.get("error"); // Changed key
let base_msg = format!($fmt $(, $($args)*)?);
if success != Some(false) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
match error_val_opt {
Some(error_val) => {
if error_val.is_array() {
let errors_array = error_val.as_array().unwrap();
if errors_array.len() != $expected_error_count {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
}
} else if error_val.is_object() {
if $expected_error_count != 1 {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (wrong error count): Expected {} errors, but got a single error object. {}\nResult JSON:\n{}", $expected_error_count, base_msg, pretty_json);
}
// Count check passes if expected is 1 and got object
} else {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
};
// Without custom message (calls the one above with ""):
($result:expr, $expected_error_count:expr) => {
assert_failure_with_json!($result, $expected_error_count, "");
};
// --- Arms checking failure only (expects at least one error) ---
// With custom message:
($result:expr, $fmt:literal $(, $($args:tt)*)?) => {
let json_result = &$result.0;
let success = json_result.get("success").and_then(Value::as_bool);
let error_val_opt = json_result.get("error"); // Changed key
let base_msg = format!($fmt $(, $($args)*)?);
if success != Some(false) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
match error_val_opt {
Some(error_val) => {
if error_val.is_object() {
// OK: single error object is a failure
} else if error_val.is_array() {
if error_val.as_array().unwrap().is_empty() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected errors, but 'error' array is empty): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
// OK: non-empty error array is a failure
} else {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
None => {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
}
}
};
// Without custom message (calls the one above with ""):
($result:expr) => {
assert_failure_with_json!($result, "");
};
}
fn jsonb(val: Value) -> JsonB {
JsonB(val)
}
#[pg_test]
fn test_cache_and_validate_json_schema() {
clear_json_schemas(); // Call clear directly
let schema_id = "my_schema";
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
});
let valid_instance = json!({ "name": "Alice", "age": 30 });
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
let invalid_instance_missing = json!({ "name": "Charlie" });
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()));
assert_success_with_json!(cache_result, "Cache operation should succeed.");
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
assert_success_with_json!(valid_result, "Validation of valid instance should succeed.");
// Invalid type
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
assert_failure_with_json!(invalid_result_type, 1, "must be >=0", "Validation with invalid type should fail.");
let errors_type = invalid_result_type.0["error"].as_array().unwrap(); // Check 'error', expect array
assert_eq!(errors_type[0]["instance_path"], "/age");
assert_eq!(errors_type[0]["schema_path"], "urn:my_schema#/properties/age");
// Missing field
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
assert_failure_with_json!(invalid_result_missing, 1, "missing properties 'age'", "Validation with missing field should fail.");
let errors_missing = invalid_result_missing.0["error"].as_array().unwrap(); // Check 'error', expect array
assert_eq!(errors_missing[0]["instance_path"], "");
assert_eq!(errors_missing[0]["schema_path"], "urn:my_schema#");
// Schema not found
let non_existent_id = "non_existent_schema";
let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({})));
assert_failure_with_json!(invalid_schema_result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
// Check 'error' is an object for 'schema not found'
let error_notfound_obj = invalid_schema_result.0["error"].as_object().expect("'error' should be an object for schema not found");
assert!(error_notfound_obj.contains_key("message")); // Check message exists
// Removed checks for schema_path/instance_path as they aren't added in lib.rs for this case
}
#[pg_test]
fn test_validate_json_schema_not_cached() {
clear_json_schemas(); // Call clear directly
let instance = json!({ "foo": "bar" });
let result = validate_json_schema("non_existent_schema", jsonb(instance));
// Use the updated macro, expecting count 1 and specific message (handles object case)
assert_failure_with_json!(result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
}
#[pg_test]
fn test_cache_invalid_json_schema() {
clear_json_schemas(); // Call clear directly
let schema_id = "invalid_schema";
// Schema with an invalid type *value*
let invalid_schema = json!({
"$id": "urn:invalid_schema",
"type": ["invalid_type_value"]
});
let cache_result = cache_json_schema(schema_id, jsonb(invalid_schema));
// Manually check the structure for cache_json_schema failure
let json_result = &cache_result.0;
let success = json_result.get("success").and_then(Value::as_bool);
let error_obj = json_result.get("error").and_then(Value::as_object);
if success != Some(false) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected failure, success was not false): Caching invalid schema should fail.\nResult JSON:\n{}", pretty_json);
}
if error_obj.is_none() {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (expected 'error' object, but none found): Caching invalid schema should return an error object.\nResult JSON:\n{}", pretty_json);
}
// Check specific fields within the error object
let message = error_obj.unwrap().get("message").and_then(Value::as_str);
// Updated check based on the actual error message seen in the logs
if message.map_or(true, |m| !m.contains("failed validation against its metaschema") || !m.contains("/type/0': value must be one of")) {
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
panic!("Assertion Failed (error message mismatch): Expected metaschema validation failure message containing '/type/0' error detail.\nResult JSON:\n{}", pretty_json);
}
}
#[pg_test]
fn test_validate_json_schema_detailed_validation_errors() {
clear_json_schemas(); // Call clear directly
let schema_id = "detailed_errors";
let schema = json!({
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string", "maxLength": 10 }
},
"required": ["street", "city"]
}
},
"required": ["address"]
});
let _ = cache_json_schema(schema_id, jsonb(schema));
let invalid_instance = json!({
"address": {
"street": 123, // Wrong type
"city": "Supercalifragilisticexpialidocious" // Too long
}
});
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
// Update: Expect 2 errors again, as boon reports both nested errors.
assert_failure_with_json!(result, 2);
}
#[pg_test]
fn test_validate_json_schema_oneof_validation_errors() {
clear_json_schemas(); // Call clear directly
let schema_id = "oneof_schema";
let schema = json!({
"oneOf": [
{ // Option 1: Object with string prop
"type": "object",
"properties": {
"string_prop": { "type": "string", "maxLength": 5 }
},
"required": ["string_prop"]
},
{ // Option 2: Object with number prop
"type": "object",
"properties": {
"number_prop": { "type": "number", "minimum": 10 }
},
"required": ["number_prop"]
}
]
});
let _ = cache_json_schema(schema_id, jsonb(schema));
// --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) ---
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
// Expect 2 leaf errors: one for maxLength (branch 0), one for missing prop (branch 1)
// Check the first error message reported by boon (maxLength).
assert_failure_with_json!(result_invalid_string, 2, "length must be <=5", "Validation with invalid string length should have 2 leaf errors");
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
let invalid_number_instance = json!({ "number_prop": 5 });
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
// Expect 2 leaf errors: one for minimum (branch 1), one for missing prop (branch 0)
// Check the first error message reported by boon (missing prop).
assert_failure_with_json!(result_invalid_number, 2, "missing properties 'string_prop'", "Validation with invalid number should have 2 leaf errors");
// --- Test case 3: Fails type check (not object) for both branches ---
// Input: boolean, expected object for both branches
let invalid_bool_instance = json!(true); // Not an object
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
// Expect 2 leaf errors, one "Type" error for each branch
// Check the first error reported by boon (want object).
assert_failure_with_json!(result_invalid_bool, 2, "want object", "Validation with invalid bool should have 2 leaf errors");
// --- Test case 4: Fails missing required for both branches ---
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
let invalid_empty_obj = json!({});
let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj));
// Expect 2 leaf errors: one required error for branch 0, one required error for branch 1
// Check the first error reported by boon (missing string_prop).
assert_failure_with_json!(result_empty_obj, 2, "missing properties 'string_prop'", "Validation with empty object should have 2 leaf errors");
}
#[pg_test]
fn test_clear_json_schemas() {
clear_json_schemas(); // Call clear directly
let schema_id = "schema_to_clear";
let schema = json!({ "type": "string" });
cache_json_schema(schema_id, jsonb(schema.clone()));
let show_result1 = show_json_schemas();
assert!(show_result1.contains(&schema_id.to_string()));
clear_json_schemas();
let show_result2 = show_json_schemas();
assert!(show_result2.is_empty());
let instance = json!("test");
let validate_result = validate_json_schema(schema_id, jsonb(instance));
// Use the updated macro, expecting count 1 and specific message (handles object case)
assert_failure_with_json!(validate_result, 1, "Schema with id 'schema_to_clear' not found", "Validation should fail after clearing schemas.");
}
#[pg_test]
fn test_show_json_schemas() {
clear_json_schemas(); // Call clear directly
let schema_id1 = "schema1";
let schema_id2 = "schema2";
let schema = json!({ "type": "boolean" });
cache_json_schema(schema_id1, jsonb(schema.clone()));
cache_json_schema(schema_id2, jsonb(schema.clone()));
let mut result = show_json_schemas(); // Make result mutable
result.sort(); // Sort for deterministic testing
assert_eq!(result, vec!["schema1".to_string(), "schema2".to_string()]); // Check exact content
assert!(result.contains(&schema_id1.to_string())); // Keep specific checks too if desired
assert!(result.contains(&schema_id2.to_string()));
}

View File

@ -1 +1 @@
1.0.5 1.0.15