Compare commits
13 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 3ceb8a0770 | |||
| 499bf68b2a | |||
| 6ca00f27e9 | |||
| 520be66035 | |||
| c3146ca433 | |||
| b4d9628b05 | |||
| 635d31d723 | |||
| 08efcb92db | |||
| dad1216e1f | |||
| 2fcf8613b8 | |||
| f88c27aa70 | |||
| 48e74815d3 | |||
| 23235d4b9d |
13
.env
Normal file
13
.env
Normal file
@ -0,0 +1,13 @@
|
||||
ENVIRONMENT=local
|
||||
DATABASE_PASSWORD=QgSvstSjoc6fKphMzNgT3SliNY10eSRS
|
||||
DATABASE_ROLE=agreego_admin
|
||||
DATABASE_HOST=127.1.27.9
|
||||
DATABASE_PORT=5432
|
||||
POSTGRES_PASSWORD=xzIq5JT0xY3F+2m1GtnrKDdK29sNSXVVYZHPKJVh8pI=
|
||||
DATABASE_NAME=agreego
|
||||
DEV_DATABASE_NAME=agreego_dev
|
||||
GITEA_TOKEN=3d70c23673517330623a5122998fb304e3c73f0a
|
||||
MOOV_ACCOUNT_ID=69a0d2f6-77a2-4e26-934f-d869134f87d3
|
||||
MOOV_PUBLIC_KEY=9OMhK5qGnh7Tmk2Z
|
||||
MOOV_SECRET_KEY=DrRox7B-YWfO9IheiUUX7lGP8-7VY-Ni
|
||||
MOOV_DOMAIN=http://localhost
|
||||
27
Cargo.lock
generated
27
Cargo.lock
generated
@ -68,6 +68,12 @@ version = "1.0.97"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f"
|
||||
|
||||
[[package]]
|
||||
name = "appendlist"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e149dc73cd30538307e7ffa2acd3d2221148eaeed4871f246657b1c3eaa1cbd2"
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.88"
|
||||
@ -177,6 +183,26 @@ dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "boon"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baa187da765010b70370368c49f08244b1ae5cae1d5d33072f76c8cb7112fe3e"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"appendlist",
|
||||
"base64",
|
||||
"fluent-uri",
|
||||
"idna",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"regex",
|
||||
"regex-syntax",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "borrow-or-share"
|
||||
version = "0.2.2"
|
||||
@ -1015,6 +1041,7 @@ dependencies = [
|
||||
name = "jspg"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"boon",
|
||||
"jsonschema",
|
||||
"lazy_static",
|
||||
"pgrx",
|
||||
|
||||
@ -9,6 +9,7 @@ serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
jsonschema = "0.29.1"
|
||||
lazy_static = "1.5.0"
|
||||
boon = "0.6.1"
|
||||
|
||||
[dev-dependencies]
|
||||
pgrx-tests = "0.14.0"
|
||||
@ -22,6 +23,7 @@ path = "src/bin/pgrx_embed.rs"
|
||||
|
||||
[features]
|
||||
pg17 = ["pgrx/pg17", "pgrx-tests/pg17" ]
|
||||
# Local feature flag used by `cargo pgrx test`
|
||||
pg_test = []
|
||||
|
||||
[profile.dev]
|
||||
|
||||
2
flows
2
flows
Submodule flows updated: db55335254...9d758d581e
337
src/lib.rs
337
src/lib.rs
@ -1,237 +1,178 @@
|
||||
use pgrx::*;
|
||||
use jsonschema::{Draft, Validator};
|
||||
use serde_json::json;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::RwLock;
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
pg_module_magic!();
|
||||
|
||||
// Global, thread-safe schema cache using the correct Validator type
|
||||
use serde_json::{json, Value};
|
||||
use std::{collections::HashMap, sync::RwLock};
|
||||
use boon::{Compiler, Schemas, ValidationError, SchemaIndex, CompileError};
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
struct BoonCache {
|
||||
schemas: Schemas,
|
||||
id_to_index: HashMap<String, SchemaIndex>,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref SCHEMA_CACHE: RwLock<HashMap<String, Validator>> = RwLock::new(HashMap::new());
|
||||
static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache {
|
||||
schemas: Schemas::new(),
|
||||
id_to_index: HashMap::new(),
|
||||
});
|
||||
}
|
||||
|
||||
// Cache a schema explicitly with a provided ID
|
||||
#[pg_extern(immutable, strict, parallel_safe)]
|
||||
fn cache_schema(schema_id: &str, schema: JsonB) -> JsonB {
|
||||
let schema_value = schema.0;
|
||||
|
||||
// Compile the schema using the builder pattern
|
||||
match jsonschema::options()
|
||||
.with_draft(Draft::Draft7)
|
||||
.should_validate_formats(true)
|
||||
.build(&schema_value)
|
||||
{
|
||||
Ok(compiled_schema) => {
|
||||
// If compilation succeeds, add it to the cache
|
||||
#[pg_extern(strict)]
|
||||
fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB {
|
||||
let mut cache = SCHEMA_CACHE.write().unwrap();
|
||||
cache.insert(schema_id.to_string(), compiled_schema);
|
||||
JsonB(json!({ "success": true, "id": schema_id }))
|
||||
}
|
||||
Err(e) => {
|
||||
// If compilation fails, return an error
|
||||
JsonB(json!({
|
||||
"success": false,
|
||||
"error": format!("Failed to compile schema '{}': {}", schema_id, e)
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
let schema_value: Value = schema.0;
|
||||
let schema_path = format!("urn:{}", schema_id);
|
||||
|
||||
// Check if a schema is cached
|
||||
#[pg_extern(immutable, strict, parallel_safe)]
|
||||
fn schema_cached(schema_id: &str) -> bool {
|
||||
SCHEMA_CACHE.read().unwrap().contains_key(schema_id)
|
||||
}
|
||||
let mut compiler = Compiler::new();
|
||||
compiler.enable_format_assertions();
|
||||
|
||||
// Validate JSONB instance against a cached schema by ID
|
||||
#[pg_extern(immutable, strict, parallel_safe)]
|
||||
fn validate_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
||||
let cache = SCHEMA_CACHE.read().unwrap();
|
||||
let compiled_schema: &Validator = match cache.get(schema_id) {
|
||||
Some(schema) => schema,
|
||||
None => {
|
||||
// Return the 'schema not cached' error in the standard object format
|
||||
let error_msg = format!("Schema ID '{}' not cached", schema_id);
|
||||
// Use schema_path when adding the resource
|
||||
if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
|
||||
return JsonB(json!({
|
||||
"valid": false,
|
||||
"errors": [json!({
|
||||
"kind": "SchemaNotFound", // Custom kind for this case
|
||||
"error": error_msg
|
||||
})]
|
||||
"success": false,
|
||||
"error": {
|
||||
"message": format!("Failed to add schema resource '{}': {}", schema_id, e),
|
||||
"schema_path": schema_path
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
// Use schema_path when compiling
|
||||
match compiler.compile(&schema_path, &mut cache.schemas) {
|
||||
Ok(sch_index) => {
|
||||
// Store the index using the original schema_id as the key
|
||||
cache.id_to_index.insert(schema_id.to_string(), sch_index);
|
||||
JsonB(json!({ "success": true }))
|
||||
}
|
||||
Err(e) => {
|
||||
let error = match &e {
|
||||
CompileError::ValidationError { url: _url, src } => { // Prefix url with _
|
||||
json!({
|
||||
"message": format!("Schema '{}' failed validation against its metaschema: {}", schema_id, src),
|
||||
"schema_path": schema_path,
|
||||
"error": format!("{:?}", src),
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
let _error_type = format!("{:?}", e).split('(').next().unwrap_or("Unknown").to_string(); // Prefix error_type with _
|
||||
json!({
|
||||
"message": format!("Schema '{}' compilation failed: {}", schema_id, e),
|
||||
"schema_path": schema_path,
|
||||
"error": format!("{:?}", e),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let instance_value = instance.0;
|
||||
// Use iter_errors() to get all validation errors
|
||||
let errors_iterator = compiled_schema.iter_errors(&instance_value);
|
||||
|
||||
// Collect errors into a vector first to check if any exist
|
||||
let collected_errors_result: Vec<_> = errors_iterator.collect();
|
||||
|
||||
if collected_errors_result.is_empty() {
|
||||
// No errors found, validation passed
|
||||
JsonB(json!({ "valid": true }))
|
||||
} else {
|
||||
// Errors found, format them
|
||||
let error_details = collect_all_errors(collected_errors_result.into_iter());
|
||||
JsonB(json!({
|
||||
"valid": false,
|
||||
"errors": error_details
|
||||
"success": false,
|
||||
"error": error
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn format_validation_error(error: &jsonschema::ValidationError) -> serde_json::Value {
|
||||
#[pg_extern(strict, parallel_safe)]
|
||||
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
||||
let cache = SCHEMA_CACHE.read().unwrap();
|
||||
|
||||
// Lookup uses the original schema_id
|
||||
match cache.id_to_index.get(schema_id) {
|
||||
None => JsonB(json!({
|
||||
"success": false,
|
||||
"errors": [json!({
|
||||
"message": format!("Schema with id '{}' not found in cache", schema_id),
|
||||
"schema_path": "",
|
||||
"instance_path": ""
|
||||
})]
|
||||
})),
|
||||
Some(sch_index) => {
|
||||
let instance_value: Value = instance.0;
|
||||
match cache.schemas.validate(&instance_value, *sch_index) {
|
||||
Ok(_) => JsonB(json!({ "success": true })),
|
||||
Err(validation_error) => {
|
||||
// Directly use the result of format_validation_error
|
||||
// which now includes the top-level success indicator and flat error list
|
||||
JsonB(format_validation_error(&validation_error))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively collects leaf errors into a flat list
|
||||
fn collect_leaf_errors(error: &ValidationError, errors_list: &mut Vec<Value>) {
|
||||
if error.causes.is_empty() {
|
||||
let default_message = format!("{}", error);
|
||||
let message = if let Some(start_index) = default_message.find("': ") {
|
||||
default_message[start_index + 3..].to_string()
|
||||
} else {
|
||||
default_message
|
||||
};
|
||||
|
||||
errors_list.push(json!({
|
||||
"message": message,
|
||||
"schema_path": error.schema_url.to_string(),
|
||||
"instance_path": error.instance_location.to_string(),
|
||||
}));
|
||||
} else {
|
||||
for cause in &error.causes {
|
||||
collect_leaf_errors(cause, errors_list);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Formats validation errors into a flat list JSON structure
|
||||
fn format_validation_error(error: &ValidationError) -> Value {
|
||||
let mut all_errors = Vec::new();
|
||||
collect_leaf_errors(error, &mut all_errors);
|
||||
|
||||
json!({
|
||||
"instance_path": error.instance_path.to_string(),
|
||||
"schema_path": error.schema_path.to_string(),
|
||||
"kind": format!("{:?}", error.kind),
|
||||
"error": error.to_string()
|
||||
"success": false,
|
||||
"errors": all_errors // Flat list of specific errors
|
||||
})
|
||||
}
|
||||
|
||||
// Simplified: Collects all validation errors by formatting each one.
|
||||
// Assumes the iterator provided by iter_errors() gives all necessary detail.
|
||||
fn collect_all_errors<'a>(
|
||||
errors: impl Iterator<Item = jsonschema::ValidationError<'a>>,
|
||||
) -> Vec<serde_json::Value> {
|
||||
errors.map(|e| format_validation_error(&e)).collect()
|
||||
}
|
||||
|
||||
// Show the IDs of all schemas currently in the cache
|
||||
#[pg_extern(immutable, parallel_safe)]
|
||||
fn show_schema_cache() -> Vec<String> {
|
||||
#[pg_extern(strict, parallel_safe)]
|
||||
fn json_schema_cached(schema_id: &str) -> bool {
|
||||
let cache = SCHEMA_CACHE.read().unwrap();
|
||||
cache.keys().cloned().collect()
|
||||
cache.id_to_index.contains_key(schema_id)
|
||||
}
|
||||
|
||||
// Clear the entire schema cache explicitly
|
||||
#[pg_extern(immutable, parallel_safe)]
|
||||
fn clear_schema_cache() -> bool {
|
||||
SCHEMA_CACHE.write().unwrap().clear();
|
||||
true
|
||||
#[pg_extern(strict)]
|
||||
fn clear_json_schemas() {
|
||||
let mut cache = SCHEMA_CACHE.write().unwrap();
|
||||
*cache = BoonCache {
|
||||
schemas: Schemas::new(),
|
||||
id_to_index: HashMap::new(),
|
||||
};
|
||||
}
|
||||
|
||||
#[pg_schema]
|
||||
#[cfg(any(test, feature = "pg_test"))]
|
||||
mod tests {
|
||||
use pgrx::prelude::*;
|
||||
use serde_json::json;
|
||||
use pgrx::JsonB; // Import JsonB specifically for tests
|
||||
|
||||
// Helper to clear cache before tests that need it
|
||||
fn setup_test() {
|
||||
crate::clear_schema_cache();
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_cache_and_validate_schema() {
|
||||
setup_test();
|
||||
assert!(crate::cache_schema(
|
||||
"test_schema",
|
||||
JsonB(json!({ "type": "object" }))
|
||||
).0["success"] == json!(true));
|
||||
assert!(crate::schema_cached("test_schema"));
|
||||
|
||||
let result_valid = crate::validate_schema("test_schema", JsonB(json!({ "foo": "bar" })));
|
||||
assert_eq!(result_valid.0["valid"], true);
|
||||
|
||||
let result_invalid = crate::validate_schema("test_schema", JsonB(json!(42)));
|
||||
assert_eq!(result_invalid.0["valid"], false);
|
||||
assert!(result_invalid.0["errors"][0]["error"].as_str().unwrap().contains("is not of type \"object\""));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_schema_not_cached() {
|
||||
setup_test();
|
||||
let result = crate::validate_schema("unknown_schema", JsonB(json!({})));
|
||||
assert_eq!(result.0["valid"], false);
|
||||
assert!(result.0["errors"][0]["error"].as_str().unwrap().contains("not cached"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_clear_schema_cache() {
|
||||
setup_test();
|
||||
crate::cache_schema("clear_test", JsonB(json!({ "type": "object" })));
|
||||
assert!(crate::schema_cached("clear_test"));
|
||||
|
||||
crate::clear_schema_cache();
|
||||
assert!(!crate::schema_cached("clear_test"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_invalid_schema_cache() {
|
||||
setup_test();
|
||||
// Attempt to cache an invalid schema definition
|
||||
let result = crate::cache_schema(
|
||||
"bad_schema",
|
||||
JsonB(json!({ "type": "unknown_type" }))
|
||||
);
|
||||
assert!(result.0["success"] == json!(false), "Caching an invalid schema should fail");
|
||||
assert!(!crate::schema_cached("bad_schema"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_show_schema_cache() {
|
||||
setup_test();
|
||||
assert!(crate::cache_schema("schema1", JsonB(json!({ "type": "string" }))).0["success"] == json!(true));
|
||||
assert!(crate::cache_schema("schema2", JsonB(json!({ "type": "number" }))).0["success"] == json!(true));
|
||||
|
||||
let mut cached_ids = crate::show_schema_cache();
|
||||
cached_ids.sort(); // Sort for deterministic comparison
|
||||
|
||||
assert_eq!(cached_ids.len(), 2);
|
||||
assert_eq!(cached_ids, vec!["schema1", "schema2"]);
|
||||
|
||||
crate::clear_schema_cache();
|
||||
let empty_ids = crate::show_schema_cache();
|
||||
assert!(empty_ids.is_empty());
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_detailed_validation_errors() {
|
||||
setup_test();
|
||||
let schema_id = "required_prop_schema";
|
||||
let schema = JsonB(json!({
|
||||
"title": "Test Required",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"age": { "type": "integer" }
|
||||
},
|
||||
"required": ["name"]
|
||||
}));
|
||||
|
||||
assert!(crate::cache_schema(schema_id, schema).0["success"] == json!(true));
|
||||
|
||||
// Instance missing the required 'name' property
|
||||
let invalid_instance = JsonB(json!({ "age": 30 }));
|
||||
let result = crate::validate_schema(schema_id, invalid_instance);
|
||||
|
||||
assert_eq!(result.0["valid"], false);
|
||||
let errors = result.0["errors"].as_array().expect("Errors should be an array");
|
||||
assert_eq!(errors.len(), 1, "Should have exactly one error");
|
||||
|
||||
let error = &errors[0];
|
||||
eprintln!("Validation Error Details: {}", error);
|
||||
|
||||
assert_eq!(error["instance_path"].as_str().unwrap(), "", "Instance path should be root");
|
||||
assert_eq!(error["schema_path"].as_str().unwrap(), "/required", "Schema path should point to required keyword");
|
||||
assert!(error["kind"].as_str().unwrap().contains("Required"), "Error kind should be Required");
|
||||
assert!(error["error"].as_str().unwrap().contains("is a required property"), "Error message mismatch");
|
||||
}
|
||||
#[pg_extern(strict, parallel_safe)]
|
||||
fn show_json_schemas() -> Vec<String> {
|
||||
let cache = SCHEMA_CACHE.read().unwrap();
|
||||
let ids: Vec<String> = cache.id_to_index.keys().cloned().collect();
|
||||
ids
|
||||
}
|
||||
|
||||
/// This module is required by `cargo pgrx test` invocations.
|
||||
/// It must be visible at the root of your extension crate.
|
||||
#[cfg(test)]
|
||||
pub mod pg_test {
|
||||
pub fn setup(_options: Vec<&str>) {
|
||||
// Initialization if needed
|
||||
// perform one-off initialization when the pg_test framework starts
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn postgresql_conf_options() -> Vec<&'static str> {
|
||||
// return any postgresql.conf settings that are required for your tests
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[cfg(any(test, feature = "pg_test"))]
|
||||
#[pg_schema]
|
||||
mod tests {
|
||||
include!("tests.rs");
|
||||
}
|
||||
360
src/tests.rs
Normal file
360
src/tests.rs
Normal file
@ -0,0 +1,360 @@
|
||||
use crate::*;
|
||||
use serde_json::{json, Value};
|
||||
use pgrx::{JsonB, pg_test};
|
||||
|
||||
// Helper macro for asserting success (no changes needed, but ensure it's present)
|
||||
macro_rules! assert_success_with_json {
|
||||
($result_jsonb:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
|
||||
if condition_result != Some(true) {
|
||||
let base_msg = format!($fmt $(, $($args)*)?);
|
||||
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
|
||||
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
|
||||
let panic_msg = format!("Assertion Failed (expected success): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
panic!("{}", panic_msg);
|
||||
}
|
||||
};
|
||||
// Simpler version without message
|
||||
($result_jsonb:expr) => {
|
||||
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
|
||||
if condition_result != Some(true) {
|
||||
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
|
||||
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
|
||||
let panic_msg = format!("Assertion Failed (expected success)\nResult JSON:\n{}", pretty_json);
|
||||
panic!("{}", panic_msg);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Updated helper macro for asserting failed JSON results with the new flat error structure
|
||||
macro_rules! assert_failure_with_json {
|
||||
// --- Arms with error count and message substring check ---
|
||||
// With custom message:
|
||||
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||
let json_result = &$result.0;
|
||||
let success = json_result.get("success").and_then(Value::as_bool);
|
||||
let errors_opt = json_result.get("errors").and_then(Value::as_array);
|
||||
let base_msg = format!($fmt $(, $($args)*)?);
|
||||
|
||||
if success != Some(false) {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
match errors_opt {
|
||||
Some(errors) => {
|
||||
if errors.len() != $expected_error_count {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors.len(), base_msg, pretty_json);
|
||||
}
|
||||
if $expected_error_count > 0 {
|
||||
let first_error_message = errors[0].get("message").and_then(Value::as_str);
|
||||
match first_error_message {
|
||||
Some(msg) => {
|
||||
if !msg.contains($expected_first_message_contains) {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (first error message mismatch): Expected contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (first error has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected 'errors' array, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
};
|
||||
// Without custom message (calls the one above with ""):
|
||||
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr) => {
|
||||
assert_failure_with_json!($result, $expected_error_count, $expected_first_message_contains, "");
|
||||
};
|
||||
|
||||
// --- Arms with error count check only ---
|
||||
// With custom message:
|
||||
($result:expr, $expected_error_count:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||
let json_result = &$result.0;
|
||||
let success = json_result.get("success").and_then(Value::as_bool);
|
||||
let errors_opt = json_result.get("errors").and_then(Value::as_array);
|
||||
let base_msg = format!($fmt $(, $($args)*)?);
|
||||
|
||||
if success != Some(false) {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
match errors_opt {
|
||||
Some(errors) => {
|
||||
if errors.len() != $expected_error_count {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors.len(), base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected 'errors' array, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
};
|
||||
// Without custom message (calls the one above with ""):
|
||||
($result:expr, $expected_error_count:expr) => {
|
||||
assert_failure_with_json!($result, $expected_error_count, "");
|
||||
};
|
||||
|
||||
// --- Arms checking failure only (expects at least one error) ---
|
||||
// With custom message:
|
||||
($result:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||
let json_result = &$result.0;
|
||||
let success = json_result.get("success").and_then(Value::as_bool);
|
||||
let errors_opt = json_result.get("errors").and_then(Value::as_array);
|
||||
let base_msg = format!($fmt $(, $($args)*)?);
|
||||
|
||||
if success != Some(false) {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
match errors_opt {
|
||||
Some(errors) => {
|
||||
if errors.is_empty() {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected errors, but errors array is empty): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected 'errors' array, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
};
|
||||
// Without custom message (calls the one above with ""):
|
||||
($result:expr) => {
|
||||
assert_failure_with_json!($result, "");
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
fn jsonb(val: Value) -> JsonB {
|
||||
JsonB(val)
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_cache_and_validate_json_schema() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id = "my_schema";
|
||||
let schema = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"age": { "type": "integer", "minimum": 0 }
|
||||
},
|
||||
"required": ["name", "age"]
|
||||
});
|
||||
let valid_instance = json!({ "name": "Alice", "age": 30 });
|
||||
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
|
||||
let invalid_instance_missing = json!({ "name": "Charlie" });
|
||||
|
||||
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()));
|
||||
assert_success_with_json!(cache_result, "Cache operation should succeed.");
|
||||
|
||||
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
|
||||
assert_success_with_json!(valid_result, "Validation of valid instance should succeed.");
|
||||
|
||||
// Invalid type
|
||||
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
|
||||
assert_failure_with_json!(invalid_result_type, 1, "must be >=0", "Validation with invalid type should fail.");
|
||||
let errors_type = invalid_result_type.0["errors"].as_array().unwrap();
|
||||
assert_eq!(errors_type[0]["instance_path"], "/age");
|
||||
assert_eq!(errors_type[0]["schema_path"], "urn:my_schema#/properties/age");
|
||||
|
||||
// Missing field
|
||||
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
|
||||
assert_failure_with_json!(invalid_result_missing, 1, "missing properties 'age'", "Validation with missing field should fail.");
|
||||
let errors_missing = invalid_result_missing.0["errors"].as_array().unwrap();
|
||||
assert_eq!(errors_missing[0]["instance_path"], "");
|
||||
assert_eq!(errors_missing[0]["schema_path"], "urn:my_schema#");
|
||||
|
||||
// Schema not found
|
||||
let non_existent_id = "non_existent_schema";
|
||||
let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({})));
|
||||
assert_failure_with_json!(invalid_schema_result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
|
||||
let errors_notfound = invalid_schema_result.0["errors"].as_array().unwrap();
|
||||
assert_eq!(errors_notfound[0]["schema_path"], ""); // Schema path is empty for this error type
|
||||
assert_eq!(errors_notfound[0]["instance_path"], ""); // Instance path is empty
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_validate_json_schema_not_cached() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let instance = json!({ "foo": "bar" });
|
||||
let result = validate_json_schema("non_existent_schema", jsonb(instance));
|
||||
// Use the updated macro
|
||||
assert_failure_with_json!(result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_cache_invalid_json_schema() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id = "invalid_schema";
|
||||
// Schema with an invalid type *value*
|
||||
let invalid_schema = json!({
|
||||
"$id": "urn:invalid_schema",
|
||||
"type": ["invalid_type_value"]
|
||||
});
|
||||
|
||||
let cache_result = cache_json_schema(schema_id, jsonb(invalid_schema));
|
||||
|
||||
// Manually check the structure for cache_json_schema failure
|
||||
let json_result = &cache_result.0;
|
||||
let success = json_result.get("success").and_then(Value::as_bool);
|
||||
let error_obj = json_result.get("error").and_then(Value::as_object);
|
||||
|
||||
if success != Some(false) {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected failure, success was not false): Caching invalid schema should fail.\nResult JSON:\n{}", pretty_json);
|
||||
}
|
||||
if error_obj.is_none() {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected 'error' object, but none found): Caching invalid schema should return an error object.\nResult JSON:\n{}", pretty_json);
|
||||
}
|
||||
// Check specific fields within the error object
|
||||
let message = error_obj.unwrap().get("message").and_then(Value::as_str);
|
||||
// Updated check based on the actual error message seen in the logs
|
||||
if message.map_or(true, |m| !m.contains("failed validation against its metaschema") || !m.contains("/type/0': value must be one of")) {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (error message mismatch): Expected metaschema validation failure message containing '/type/0' error detail.\nResult JSON:\n{}", pretty_json);
|
||||
}
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_validate_json_schema_detailed_validation_errors() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id = "detailed_errors";
|
||||
let schema = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"address": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"street": { "type": "string" },
|
||||
"city": { "type": "string", "maxLength": 10 }
|
||||
},
|
||||
"required": ["street", "city"]
|
||||
}
|
||||
},
|
||||
"required": ["address"]
|
||||
});
|
||||
let _ = cache_json_schema(schema_id, jsonb(schema));
|
||||
|
||||
let invalid_instance = json!({
|
||||
"address": {
|
||||
"street": 123, // Wrong type
|
||||
"city": "Supercalifragilisticexpialidocious" // Too long
|
||||
}
|
||||
});
|
||||
|
||||
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
|
||||
|
||||
// Update: Expect 2 errors again, as boon reports both nested errors.
|
||||
assert_failure_with_json!(result, 2);
|
||||
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_validate_json_schema_oneof_validation_errors() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id = "oneof_schema";
|
||||
let schema = json!({
|
||||
"oneOf": [
|
||||
{ // Option 1: Object with string prop
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"string_prop": { "type": "string", "maxLength": 5 }
|
||||
},
|
||||
"required": ["string_prop"]
|
||||
},
|
||||
{ // Option 2: Object with number prop
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"number_prop": { "type": "number", "minimum": 10 }
|
||||
},
|
||||
"required": ["number_prop"]
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let _ = cache_json_schema(schema_id, jsonb(schema));
|
||||
|
||||
// --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) ---
|
||||
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
|
||||
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
|
||||
// Expect 2 leaf errors: one for maxLength (branch 0), one for missing prop (branch 1)
|
||||
// Check the first error message reported by boon (maxLength).
|
||||
assert_failure_with_json!(result_invalid_string, 2, "length must be <=5", "Validation with invalid string length should have 2 leaf errors");
|
||||
let _errors_string = result_invalid_string.0["errors"].as_array().unwrap(); // Prefix with _
|
||||
|
||||
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
|
||||
let invalid_number_instance = json!({ "number_prop": 5 });
|
||||
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
|
||||
// Expect 2 leaf errors: one for minimum (branch 1), one for missing prop (branch 0)
|
||||
// Check the first error message reported by boon (missing prop).
|
||||
assert_failure_with_json!(result_invalid_number, 2, "missing properties 'string_prop'", "Validation with invalid number should have 2 leaf errors");
|
||||
let _errors_number = result_invalid_number.0["errors"].as_array().unwrap(); // Prefix with _
|
||||
|
||||
// --- Test case 3: Fails type check (not object) for both branches ---
|
||||
// Input: boolean, expected object for both branches
|
||||
let invalid_bool_instance = json!(true); // Not an object
|
||||
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
|
||||
// Expect 2 leaf errors, one "Type" error for each branch
|
||||
// Check the first error reported by boon (want object).
|
||||
assert_failure_with_json!(result_invalid_bool, 2, "want object", "Validation with invalid bool should have 2 leaf errors");
|
||||
let _errors_bool = result_invalid_bool.0["errors"].as_array().unwrap(); // Prefix with _
|
||||
|
||||
// --- Test case 4: Fails missing required for both branches ---
|
||||
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
|
||||
let invalid_empty_obj = json!({});
|
||||
let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj));
|
||||
// Expect 2 leaf errors: one required error for branch 0, one required error for branch 1
|
||||
// Check the first error reported by boon (missing string_prop).
|
||||
assert_failure_with_json!(result_empty_obj, 2, "missing properties 'string_prop'", "Validation with empty object should have 2 leaf errors");
|
||||
let _errors_empty = result_empty_obj.0["errors"].as_array().unwrap(); // Prefix with _
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_clear_json_schemas() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id = "schema_to_clear";
|
||||
let schema = json!({ "type": "string" });
|
||||
cache_json_schema(schema_id, jsonb(schema.clone()));
|
||||
|
||||
let show_result1 = show_json_schemas();
|
||||
assert!(show_result1.contains(&schema_id.to_string()));
|
||||
|
||||
clear_json_schemas();
|
||||
|
||||
let show_result2 = show_json_schemas();
|
||||
assert!(show_result2.is_empty());
|
||||
|
||||
let instance = json!("test");
|
||||
let validate_result = validate_json_schema(schema_id, jsonb(instance));
|
||||
// Use the updated macro
|
||||
assert_failure_with_json!(validate_result, 1, "Schema with id 'schema_to_clear' not found", "Validation should fail after clearing schemas.");
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_show_json_schemas() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id1 = "schema1";
|
||||
let schema_id2 = "schema2";
|
||||
let schema = json!({ "type": "boolean" });
|
||||
|
||||
cache_json_schema(schema_id1, jsonb(schema.clone()));
|
||||
cache_json_schema(schema_id2, jsonb(schema.clone()));
|
||||
|
||||
let mut result = show_json_schemas(); // Make result mutable
|
||||
result.sort(); // Sort for deterministic testing
|
||||
assert_eq!(result, vec!["schema1".to_string(), "schema2".to_string()]); // Check exact content
|
||||
assert!(result.contains(&schema_id1.to_string())); // Keep specific checks too if desired
|
||||
assert!(result.contains(&schema_id2.to_string()));
|
||||
}
|
||||
Reference in New Issue
Block a user