Compare commits
40 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 3b18901bda | |||
| b8c0e08068 | |||
| c734983a59 | |||
| 9b11f661bc | |||
| f3a733626e | |||
| 2bcdb8adbb | |||
| 3988308965 | |||
| b7f528d1f6 | |||
| 2febb292dc | |||
| d1831a28ec | |||
| c5834ac544 | |||
| eb25f8489e | |||
| 21937db8de | |||
| 28b689cac0 | |||
| cc04a1a8bb | |||
| 3ceb8a0770 | |||
| 499bf68b2a | |||
| 6ca00f27e9 | |||
| 520be66035 | |||
| c3146ca433 | |||
| b4d9628b05 | |||
| 635d31d723 | |||
| 08efcb92db | |||
| dad1216e1f | |||
| 2fcf8613b8 | |||
| f88c27aa70 | |||
| 48e74815d3 | |||
| 23235d4b9d | |||
| 67406c0b96 | |||
| 28fff3be11 | |||
| 70f3d30258 | |||
| 406466454e | |||
| 2a9d51fa77 | |||
| ae90137308 | |||
| d22a8669ef | |||
| b32c17a4f5 | |||
| 79cce357e2 | |||
| 512fa28b91 | |||
| a36120459b | |||
| 19734a5b0d |
10
.editorconfig
Normal file
10
.editorconfig
Normal file
@ -0,0 +1,10 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
|
||||
[*.{json,toml,control,rs}]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
13
.env
Normal file
13
.env
Normal file
@ -0,0 +1,13 @@
|
||||
ENVIRONMENT=local
|
||||
DATABASE_PASSWORD=tIr4TJ0qUwGVM0rlQSe3W7Tgpi33zPbk
|
||||
DATABASE_ROLE=agreego_admin
|
||||
DATABASE_HOST=127.1.27.4
|
||||
DATABASE_PORT=5432
|
||||
POSTGRES_PASSWORD=xzIq5JT0xY3F+2m1GtnrKDdK29sNSXVVYZHPKJVh8pI=
|
||||
DATABASE_NAME=agreego
|
||||
DEV_DATABASE_NAME=agreego_dev
|
||||
GITEA_TOKEN=3d70c23673517330623a5122998fb304e3c73f0a
|
||||
MOOV_ACCOUNT_ID=69a0d2f6-77a2-4e26-934f-d869134f87d3
|
||||
MOOV_PUBLIC_KEY=9OMhK5qGnh7Tmk2Z
|
||||
MOOV_SECRET_KEY=DrRox7B-YWfO9IheiUUX7lGP8-7VY-Ni
|
||||
MOOV_DOMAIN=http://localhost
|
||||
27
Cargo.lock
generated
27
Cargo.lock
generated
@ -68,6 +68,12 @@ version = "1.0.97"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f"
|
||||
|
||||
[[package]]
|
||||
name = "appendlist"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e149dc73cd30538307e7ffa2acd3d2221148eaeed4871f246657b1c3eaa1cbd2"
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.88"
|
||||
@ -177,6 +183,26 @@ dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "boon"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baa187da765010b70370368c49f08244b1ae5cae1d5d33072f76c8cb7112fe3e"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"appendlist",
|
||||
"base64",
|
||||
"fluent-uri",
|
||||
"idna",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"regex",
|
||||
"regex-syntax",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "borrow-or-share"
|
||||
version = "0.2.2"
|
||||
@ -1015,6 +1041,7 @@ dependencies = [
|
||||
name = "jspg"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"boon",
|
||||
"jsonschema",
|
||||
"lazy_static",
|
||||
"pgrx",
|
||||
|
||||
@ -9,6 +9,7 @@ serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
jsonschema = "0.29.1"
|
||||
lazy_static = "1.5.0"
|
||||
boon = "0.6.1"
|
||||
|
||||
[dev-dependencies]
|
||||
pgrx-tests = "0.14.0"
|
||||
@ -22,6 +23,7 @@ path = "src/bin/pgrx_embed.rs"
|
||||
|
||||
[features]
|
||||
pg17 = ["pgrx/pg17", "pgrx-tests/pg17" ]
|
||||
# Local feature flag used by `cargo pgrx test`
|
||||
pg_test = []
|
||||
|
||||
[profile.dev]
|
||||
|
||||
120
flow
120
flow
@ -9,9 +9,8 @@ source ./flows/rust
|
||||
# Vars
|
||||
POSTGRES_VERSION="17"
|
||||
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
|
||||
DEPENDENCIES=(cargo git icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
|
||||
DEPENDENCIES+=(icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
|
||||
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
|
||||
PACKAGE_NAME="jspg"
|
||||
GITEA_ORGANIZATION="cellular"
|
||||
GITEA_REPOSITORY="jspg"
|
||||
|
||||
@ -21,106 +20,125 @@ env() {
|
||||
# If not set, try to get it from kubectl
|
||||
GITEA_TOKEN=$(kubectl get secret -n cellular gitea-git -o jsonpath='{.data.token}' | base64 --decode)
|
||||
if [ -z "$GITEA_TOKEN" ]; then
|
||||
echo -e "❌ ${RED}GITEA_TOKEN is not set and couldn't be retrieved from kubectl${RESET}" >&2
|
||||
exit 1
|
||||
error "GITEA_TOKEN is not set and couldn't be retrieved from kubectl" >&2
|
||||
return 2
|
||||
fi
|
||||
export GITEA_TOKEN
|
||||
fi
|
||||
|
||||
echo -e "💰 ${GREEN}Environment variables set${RESET}"
|
||||
success "Environment variables set"
|
||||
}
|
||||
|
||||
pgrx-prepare() {
|
||||
echo -e "${BLUE}Initializing pgrx...${RESET}"
|
||||
info "Initializing pgrx..."
|
||||
# Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which'
|
||||
local POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
|
||||
|
||||
if [ ! -x "$POSTGRES_CONFIG_PATH" ]; then
|
||||
echo -e "${RED}Error: pg_config not found or not executable at $POSTGRES_CONFIG_PATH.${RESET}"
|
||||
echo -e "${YELLOW}Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew.${RESET}"
|
||||
exit 1
|
||||
error "pg_config not found or not executable at $POSTGRES_CONFIG_PATH."
|
||||
warning "Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew."
|
||||
return 2
|
||||
fi
|
||||
|
||||
if cargo pgrx init --pg"$POSTGRES_VERSION"="$POSTGRES_CONFIG_PATH"; then
|
||||
echo -e "${GREEN}pgrx initialized successfully.${RESET}"
|
||||
success "pgrx initialized successfully."
|
||||
else
|
||||
echo -e "${RED}Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid.${RESET}"
|
||||
exit 1
|
||||
error "Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid."
|
||||
return 2
|
||||
fi
|
||||
}
|
||||
|
||||
build() {
|
||||
local version
|
||||
version=$(get-version) || return 1
|
||||
version=$(get-version) || return $?
|
||||
local package_dir="./package"
|
||||
local tarball_name="${GITEA_REPOSITORY}-src-v${version}.tar.gz"
|
||||
local tarball_name="${GITEA_REPOSITORY}.tar.gz"
|
||||
local tarball_path="${package_dir}/${tarball_name}"
|
||||
|
||||
echo -e "📦 Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
|
||||
info "Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
|
||||
|
||||
# Clean previous package dir
|
||||
rm -rf "${package_dir}"
|
||||
mkdir -p "${package_dir}"
|
||||
|
||||
# Create the source tarball excluding specified patterns
|
||||
echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}"
|
||||
if tar --exclude='.git*' --exclude='./target' --exclude='./package' -czf "${tarball_path}" .; then
|
||||
echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}"
|
||||
info "Creating tarball: ${tarball_path}"
|
||||
if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
|
||||
success "Successfully created source tarball: ${tarball_path}"
|
||||
else
|
||||
echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2
|
||||
return 1
|
||||
error "Failed to create source tarball."
|
||||
return 2
|
||||
fi
|
||||
}
|
||||
|
||||
install() {
|
||||
local version
|
||||
version=$(get-version) || return 1
|
||||
version=$(get-version) || return $? # Propagate error
|
||||
|
||||
info "Building and installing PGRX extension v$version into local PostgreSQL..."
|
||||
|
||||
echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}"
|
||||
|
||||
# Run the pgrx install command
|
||||
# It implicitly uses --release unless --debug is passed
|
||||
# It finds pg_config or you can add flags like --pg-config if needed
|
||||
if ! cargo pgrx install "$@"; then # Pass any extra args like --debug
|
||||
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2
|
||||
return 1
|
||||
if ! cargo pgrx install; then
|
||||
error "cargo pgrx install command failed."
|
||||
return 2
|
||||
fi
|
||||
success "PGRX extension v$version successfully built and installed."
|
||||
|
||||
# Post-install modification to allow non-superuser usage
|
||||
local pg_sharedir
|
||||
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
|
||||
local pg_config_status=$?
|
||||
if [ $pg_config_status -ne 0 ] || [ -z "$pg_sharedir" ]; then
|
||||
error "Failed to determine PostgreSQL shared directory using pg_config."
|
||||
return 2
|
||||
fi
|
||||
local installed_control_path="${pg_sharedir}/extension/jspg.control"
|
||||
|
||||
# Modify the control file
|
||||
if [ ! -f "$installed_control_path" ]; then
|
||||
error "Installed control file not found: '$installed_control_path'"
|
||||
return 2
|
||||
fi
|
||||
|
||||
info "Modifying control file for non-superuser access: ${installed_control_path}"
|
||||
# Use sed -i '' for macOS compatibility
|
||||
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
|
||||
echo 'trusted = true' >> "$installed_control_path"; then
|
||||
success "Control file modified successfully."
|
||||
else
|
||||
error "Failed to modify control file: ${installed_control_path}"
|
||||
return 2
|
||||
fi
|
||||
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}"
|
||||
}
|
||||
|
||||
test() {
|
||||
echo -e "🧪 ${CYAN}Running jspg tests...${RESET}"
|
||||
cargo pgrx test "pg${POSTGRES_VERSION}" "$@"
|
||||
info "Running jspg tests..."
|
||||
cargo pgrx test "pg${POSTGRES_VERSION}" "$@" || return $?
|
||||
}
|
||||
|
||||
clean() {
|
||||
echo -e "🧹 ${CYAN}Cleaning build artifacts...${RESET}"
|
||||
cargo clean # Use standard cargo clean
|
||||
info "Cleaning build artifacts..."
|
||||
cargo clean || return $?
|
||||
}
|
||||
|
||||
jspg-usage() {
|
||||
echo -e " ${CYAN}JSPG Commands:${RESET}"
|
||||
echo -e " prepare Check OS, Cargo, and PGRX dependencies."
|
||||
echo -e " install [opts] Run prepare, then build and install the extension locally."
|
||||
echo -e " reinstall [opts] Run prepare, clean, then build and install the extension locally."
|
||||
echo -e " test [opts] Run pgrx integration tests."
|
||||
echo -e " clean Remove pgrx build artifacts."
|
||||
echo -e " build Build release artifacts into ./package/ (called by release)."
|
||||
echo -e " tag Tag the current version (called by release)."
|
||||
echo -e " package Upload artifacts from ./package/ (called by release)."
|
||||
echo -e " release Perform a full release (increments patch, builds, tags, pushes, packages)."
|
||||
printf "prepare\tCheck OS, Cargo, and PGRX dependencies.\n"
|
||||
printf "install\tBuild and install the extension locally (after prepare).\n"
|
||||
printf "reinstall\tClean, build, and install the extension locally (after prepare).\n"
|
||||
printf "test\t\tRun pgrx integration tests.\n"
|
||||
printf "clean\t\tRemove pgrx build artifacts.\n"
|
||||
}
|
||||
|
||||
jspg-flow() {
|
||||
case "$1" in
|
||||
prepare) env; base prepare; cargo-prepare; pgrx-prepare; return 0;;
|
||||
build) env; build; return 0;;
|
||||
install) env; base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;;
|
||||
reinstall) env; base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;;
|
||||
test) env; test; return 0;;
|
||||
package) env; package; return 0;;
|
||||
release) env; release; return 0;;
|
||||
clean) env; clean; return 0;;
|
||||
env) env; return $?;;
|
||||
prepare) prepare && cargo-prepare && pgrx-prepare; return $?;;
|
||||
build) build; return $?;;
|
||||
install) install; return $?;;
|
||||
reinstall) clean && install; return $?;;
|
||||
test) test "${@:2}"; return $?;;
|
||||
clean) clean; return $?;;
|
||||
release) env; release; return $?;;
|
||||
*) return 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
2
flows
2
flows
Submodule flows updated: 2487aa6a25...3e3954fb79
299
src/lib.rs
299
src/lib.rs
@ -1,126 +1,217 @@
|
||||
use pgrx::*;
|
||||
use jsonschema::{Draft, Validator};
|
||||
use serde_json::json;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::RwLock;
|
||||
use lazy_static::lazy_static;
|
||||
use jsonschema;
|
||||
|
||||
pg_module_magic!();
|
||||
|
||||
// Global, thread-safe schema cache using the correct Validator type
|
||||
use serde_json::{json, Value};
|
||||
use std::{collections::HashMap, sync::RwLock};
|
||||
use boon::{Compiler, Schemas, ValidationError, SchemaIndex, CompileError};
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
struct BoonCache {
|
||||
schemas: Schemas,
|
||||
id_to_index: HashMap<String, SchemaIndex>,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref SCHEMA_CACHE: RwLock<HashMap<String, Validator>> = RwLock::new(HashMap::new());
|
||||
static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache {
|
||||
schemas: Schemas::new(),
|
||||
id_to_index: HashMap::new(),
|
||||
});
|
||||
}
|
||||
|
||||
// Cache a schema explicitly with a provided ID
|
||||
#[pg_extern(immutable, strict, parallel_safe)]
|
||||
fn cache_schema(schema_id: &str, schema: JsonB) -> bool {
|
||||
match jsonschema::options()
|
||||
.with_draft(Draft::Draft7)
|
||||
.should_validate_formats(true)
|
||||
.build(&schema.0)
|
||||
{
|
||||
Ok(compiled) => {
|
||||
SCHEMA_CACHE.write().unwrap().insert(schema_id.to_string(), compiled);
|
||||
true
|
||||
},
|
||||
Err(e) => {
|
||||
notice!("Failed to cache schema '{}': {}", schema_id, e);
|
||||
false
|
||||
}
|
||||
#[pg_extern(strict)]
|
||||
fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB {
|
||||
let mut cache = SCHEMA_CACHE.write().unwrap();
|
||||
let schema_value: Value = schema.0;
|
||||
let schema_path = format!("urn:{}", schema_id);
|
||||
|
||||
let mut compiler = Compiler::new();
|
||||
compiler.enable_format_assertions();
|
||||
|
||||
// Use schema_path when adding the resource
|
||||
if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
|
||||
return JsonB(json!({
|
||||
"success": false,
|
||||
"error": {
|
||||
"message": format!("Failed to add schema resource '{}': {}", schema_id, e),
|
||||
"schema_path": schema_path
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
// Use schema_path when compiling
|
||||
match compiler.compile(&schema_path, &mut cache.schemas) {
|
||||
Ok(sch_index) => {
|
||||
// Store the index using the original schema_id as the key
|
||||
cache.id_to_index.insert(schema_id.to_string(), sch_index);
|
||||
JsonB(json!({ "success": true }))
|
||||
}
|
||||
}
|
||||
|
||||
// Check if a schema is cached
|
||||
#[pg_extern(immutable, strict, parallel_safe)]
|
||||
fn schema_cached(schema_id: &str) -> bool {
|
||||
SCHEMA_CACHE.read().unwrap().contains_key(schema_id)
|
||||
}
|
||||
|
||||
// Validate JSONB instance against a cached schema by ID
|
||||
#[pg_extern(immutable, strict, parallel_safe)]
|
||||
fn validate_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
||||
let cache = SCHEMA_CACHE.read().unwrap();
|
||||
let compiled_schema: &Validator = match cache.get(schema_id) {
|
||||
Some(schema) => schema,
|
||||
None => {
|
||||
return JsonB(json!({
|
||||
"valid": false,
|
||||
"errors": [format!("Schema ID '{}' not cached", schema_id)]
|
||||
}));
|
||||
Err(e) => {
|
||||
let error = match &e {
|
||||
CompileError::ValidationError { url: _url, src } => {
|
||||
// Collect leaf errors from the meta-schema validation failure
|
||||
let mut error_list = Vec::new();
|
||||
collect_leaf_errors(src, &mut error_list);
|
||||
// Filter and deduplicate errors, returning as a single JSON Value (Array)
|
||||
json!(filter_boon_errors(error_list))
|
||||
}
|
||||
_ => {
|
||||
// Keep existing handling for other compilation errors
|
||||
let _error_type = format!("{:?}", e).split('(').next().unwrap_or("Unknown").to_string();
|
||||
json!({
|
||||
"message": format!("Schema '{}' compilation failed: {}", schema_id, e),
|
||||
"schema_path": schema_path,
|
||||
"detail": format!("{:?}", e),
|
||||
})
|
||||
}
|
||||
};
|
||||
// Ensure the outer structure remains { success: false, error: ... }
|
||||
JsonB(json!({
|
||||
"success": false,
|
||||
"error": error
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[pg_extern(strict, parallel_safe)]
|
||||
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
||||
let cache = SCHEMA_CACHE.read().unwrap();
|
||||
|
||||
// Lookup uses the original schema_id
|
||||
match cache.id_to_index.get(schema_id) {
|
||||
None => JsonB(json!({
|
||||
"success": false,
|
||||
"error": {
|
||||
"message": format!("Schema with id '{}' not found in cache", schema_id)
|
||||
}
|
||||
})),
|
||||
Some(sch_index) => {
|
||||
let instance_value: Value = instance.0;
|
||||
match cache.schemas.validate(&instance_value, *sch_index) {
|
||||
Ok(_) => JsonB(json!({ "success": true })),
|
||||
Err(validation_error) => {
|
||||
// Directly use the result of format_validation_error
|
||||
// which now includes the top-level success indicator and flat error list
|
||||
let mut error_list = Vec::new();
|
||||
collect_leaf_errors(&validation_error, &mut error_list);
|
||||
JsonB(json!({
|
||||
"success": false,
|
||||
"error": filter_boon_errors(error_list) // Filter and deduplicate errors
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively collects leaf errors into a flat list
|
||||
fn collect_leaf_errors(error: &ValidationError, errors_list: &mut Vec<Value>) {
|
||||
if error.causes.is_empty() {
|
||||
let default_message = format!("{}", error);
|
||||
let message = if let Some(start_index) = default_message.find("': ") {
|
||||
default_message[start_index + 3..].to_string()
|
||||
} else {
|
||||
default_message
|
||||
};
|
||||
|
||||
if compiled_schema.is_valid(&instance.0) {
|
||||
JsonB(json!({ "valid": true }))
|
||||
} else {
|
||||
let errors: Vec<String> = compiled_schema
|
||||
.iter_errors(&instance.0)
|
||||
.map(|e| e.to_string())
|
||||
.collect();
|
||||
|
||||
JsonB(json!({ "valid": false, "errors": errors }))
|
||||
errors_list.push(json!({
|
||||
"message": message,
|
||||
"schema_path": error.schema_url.to_string(),
|
||||
"instance_path": error.instance_location.to_string(),
|
||||
}));
|
||||
} else {
|
||||
for cause in &error.causes {
|
||||
collect_leaf_errors(cause, errors_list);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clear the entire schema cache explicitly
|
||||
#[pg_extern(immutable, parallel_safe)]
|
||||
fn clear_schema_cache() -> bool {
|
||||
SCHEMA_CACHE.write().unwrap().clear();
|
||||
true
|
||||
// Filters collected errors, removing structural noise and then deduplicating by instance_path
|
||||
fn filter_boon_errors(raw_errors: Vec<Value>) -> Vec<Value> {
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
|
||||
// Define schema keywords that indicate structural paths, not instance paths
|
||||
let structural_path_segments = [
|
||||
"/allOf/", "/anyOf/", "/oneOf/",
|
||||
"/if/", "/then/", "/else/",
|
||||
"/not/"
|
||||
// Note: "/properties/" and "/items/" are generally valid,
|
||||
// but might appear spuriously in boon's paths for complex types.
|
||||
// We exclude only the explicitly logical/combinatorial ones for now.
|
||||
];
|
||||
|
||||
// 1. Filter out errors with instance_paths containing structural segments
|
||||
let plausible_errors: Vec<Value> = raw_errors.into_iter().filter(|error_value| {
|
||||
if let Some(instance_path_value) = error_value.get("instance_path") {
|
||||
if let Some(instance_path_str) = instance_path_value.as_str() {
|
||||
// Keep if NONE of the structural segments are present
|
||||
!structural_path_segments.iter().any(|&segment| instance_path_str.contains(segment))
|
||||
} else {
|
||||
false // Invalid instance_path type, filter out
|
||||
}
|
||||
} else {
|
||||
false // No instance_path field, filter out
|
||||
}
|
||||
}).collect();
|
||||
|
||||
// 2. Deduplicate the remaining plausible errors by instance_path
|
||||
let mut unique_errors: HashMap<String, Value> = HashMap::new();
|
||||
for error_value in plausible_errors {
|
||||
if let Some(instance_path_value) = error_value.get("instance_path") {
|
||||
if let Some(instance_path_str) = instance_path_value.as_str() {
|
||||
if let Entry::Vacant(entry) = unique_errors.entry(instance_path_str.to_string()) {
|
||||
entry.insert(error_value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Collect the unique errors
|
||||
unique_errors.into_values().collect()
|
||||
}
|
||||
|
||||
#[pg_schema]
|
||||
#[cfg(any(test, feature = "pg_test"))]
|
||||
mod tests {
|
||||
use pgrx::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[pg_test]
|
||||
fn test_cache_and_validate_schema() {
|
||||
assert!(crate::cache_schema("test_schema", JsonB(json!({ "type": "object" }))));
|
||||
assert!(crate::schema_cached("test_schema"));
|
||||
|
||||
let result_valid = crate::validate_schema("test_schema", JsonB(json!({ "foo": "bar" })));
|
||||
assert_eq!(result_valid.0["valid"], true);
|
||||
|
||||
let result_invalid = crate::validate_schema("test_schema", JsonB(json!(42)));
|
||||
assert_eq!(result_invalid.0["valid"], false);
|
||||
assert!(result_invalid.0["errors"][0].as_str().unwrap().contains("not of type"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_schema_not_cached() {
|
||||
let result = crate::validate_schema("unknown_schema", JsonB(json!({})));
|
||||
assert_eq!(result.0["valid"], false);
|
||||
assert!(result.0["errors"][0].as_str().unwrap().contains("not cached"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_clear_schema_cache() {
|
||||
crate::cache_schema("clear_test", JsonB(json!({ "type": "object" })));
|
||||
assert!(crate::schema_cached("clear_test"));
|
||||
|
||||
crate::clear_schema_cache();
|
||||
assert!(!crate::schema_cached("clear_test"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_invalid_schema_cache() {
|
||||
let result = crate::cache_schema("bad_schema", JsonB(json!({ "type": "unknown_type" })));
|
||||
assert!(!result);
|
||||
assert!(!crate::schema_cached("bad_schema"));
|
||||
}
|
||||
#[pg_extern(strict, parallel_safe)]
|
||||
fn json_schema_cached(schema_id: &str) -> bool {
|
||||
let cache = SCHEMA_CACHE.read().unwrap();
|
||||
cache.id_to_index.contains_key(schema_id)
|
||||
}
|
||||
|
||||
#[pg_extern(strict)]
|
||||
fn clear_json_schemas() {
|
||||
let mut cache = SCHEMA_CACHE.write().unwrap();
|
||||
*cache = BoonCache {
|
||||
schemas: Schemas::new(),
|
||||
id_to_index: HashMap::new(),
|
||||
};
|
||||
}
|
||||
|
||||
#[pg_extern(strict, parallel_safe)]
|
||||
fn show_json_schemas() -> Vec<String> {
|
||||
let cache = SCHEMA_CACHE.read().unwrap();
|
||||
let ids: Vec<String> = cache.id_to_index.keys().cloned().collect();
|
||||
ids
|
||||
}
|
||||
|
||||
/// This module is required by `cargo pgrx test` invocations.
|
||||
/// It must be visible at the root of your extension crate.
|
||||
#[cfg(test)]
|
||||
pub mod pg_test {
|
||||
pub fn setup(_options: Vec<&str>) {
|
||||
// Initialization if needed
|
||||
}
|
||||
pub fn setup(_options: Vec<&str>) {
|
||||
// perform one-off initialization when the pg_test framework starts
|
||||
}
|
||||
|
||||
pub fn postgresql_conf_options() -> Vec<&'static str> {
|
||||
vec![]
|
||||
}
|
||||
#[must_use]
|
||||
pub fn postgresql_conf_options() -> Vec<&'static str> {
|
||||
// return any postgresql.conf settings that are required for your tests
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[cfg(any(test, feature = "pg_test"))]
|
||||
#[pg_schema]
|
||||
mod tests {
|
||||
include!("tests.rs");
|
||||
}
|
||||
415
src/tests.rs
Normal file
415
src/tests.rs
Normal file
@ -0,0 +1,415 @@
|
||||
use crate::*;
|
||||
use serde_json::{json, Value};
|
||||
use pgrx::{JsonB, pg_test};
|
||||
|
||||
// Helper macro for asserting success (no changes needed, but ensure it's present)
|
||||
macro_rules! assert_success_with_json {
|
||||
($result_jsonb:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
|
||||
if condition_result != Some(true) {
|
||||
let base_msg = format!($fmt $(, $($args)*)?);
|
||||
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
|
||||
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
|
||||
let panic_msg = format!("Assertion Failed (expected success): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
panic!("{}", panic_msg);
|
||||
}
|
||||
};
|
||||
// Simpler version without message
|
||||
($result_jsonb:expr) => {
|
||||
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
|
||||
if condition_result != Some(true) {
|
||||
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
|
||||
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
|
||||
let panic_msg = format!("Assertion Failed (expected success)\nResult JSON:\n{}", pretty_json);
|
||||
panic!("{}", panic_msg);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Updated helper macro for asserting failed JSON results with the new flat error structure
|
||||
macro_rules! assert_failure_with_json {
|
||||
// --- Arms with error count and message substring check ---
|
||||
// With custom message:
|
||||
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||
let json_result = &$result.0;
|
||||
let success = json_result.get("success").and_then(Value::as_bool);
|
||||
let error_val_opt = json_result.get("error"); // Changed key
|
||||
let base_msg = format!($fmt $(, $($args)*)?);
|
||||
|
||||
if success != Some(false) {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
match error_val_opt {
|
||||
Some(error_val) => {
|
||||
if error_val.is_array() {
|
||||
let errors_array = error_val.as_array().unwrap();
|
||||
if errors_array.len() != $expected_error_count {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
|
||||
}
|
||||
if $expected_error_count > 0 {
|
||||
let first_error_message = errors_array[0].get("message").and_then(Value::as_str);
|
||||
match first_error_message {
|
||||
Some(msg) => {
|
||||
if !msg.contains($expected_first_message_contains) {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (first error message mismatch): Expected contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (first error in array has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if error_val.is_object() {
|
||||
// Handle single error object case (like 'schema not found')
|
||||
if $expected_error_count != 1 {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (wrong error count): Expected {} errors, but got a single error object. {}\nResult JSON:\n{}", $expected_error_count, base_msg, pretty_json);
|
||||
}
|
||||
let message = error_val.get("message").and_then(Value::as_str);
|
||||
match message {
|
||||
Some(msg) => {
|
||||
if !msg.contains($expected_first_message_contains) {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (error message mismatch): Expected object message contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (error object has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
};
|
||||
// Without custom message (calls the one above with ""):
|
||||
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr) => {
|
||||
assert_failure_with_json!($result, $expected_error_count, $expected_first_message_contains, "");
|
||||
};
|
||||
|
||||
// --- Arms with error count check only ---
|
||||
// With custom message:
|
||||
($result:expr, $expected_error_count:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||
let json_result = &$result.0;
|
||||
let success = json_result.get("success").and_then(Value::as_bool);
|
||||
let error_val_opt = json_result.get("error"); // Changed key
|
||||
let base_msg = format!($fmt $(, $($args)*)?);
|
||||
|
||||
if success != Some(false) {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
match error_val_opt {
|
||||
Some(error_val) => {
|
||||
if error_val.is_array() {
|
||||
let errors_array = error_val.as_array().unwrap();
|
||||
if errors_array.len() != $expected_error_count {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
|
||||
}
|
||||
} else if error_val.is_object() {
|
||||
if $expected_error_count != 1 {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (wrong error count): Expected {} errors, but got a single error object. {}\nResult JSON:\n{}", $expected_error_count, base_msg, pretty_json);
|
||||
}
|
||||
// Count check passes if expected is 1 and got object
|
||||
} else {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
};
|
||||
// Without custom message (calls the one above with ""):
|
||||
($result:expr, $expected_error_count:expr) => {
|
||||
assert_failure_with_json!($result, $expected_error_count, "");
|
||||
};
|
||||
|
||||
// --- Arms checking failure only (expects at least one error) ---
|
||||
// With custom message:
|
||||
($result:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||
let json_result = &$result.0;
|
||||
let success = json_result.get("success").and_then(Value::as_bool);
|
||||
let error_val_opt = json_result.get("error"); // Changed key
|
||||
let base_msg = format!($fmt $(, $($args)*)?);
|
||||
|
||||
if success != Some(false) {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
match error_val_opt {
|
||||
Some(error_val) => {
|
||||
if error_val.is_object() {
|
||||
// OK: single error object is a failure
|
||||
} else if error_val.is_array() {
|
||||
if error_val.as_array().unwrap().is_empty() {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected errors, but 'error' array is empty): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
// OK: non-empty error array is a failure
|
||||
} else {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||
}
|
||||
}
|
||||
};
|
||||
// Without custom message (calls the one above with ""):
|
||||
($result:expr) => {
|
||||
assert_failure_with_json!($result, "");
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
fn jsonb(val: Value) -> JsonB {
|
||||
JsonB(val)
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_cache_and_validate_json_schema() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id = "my_schema";
|
||||
let schema = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"age": { "type": "integer", "minimum": 0 }
|
||||
},
|
||||
"required": ["name", "age"]
|
||||
});
|
||||
let valid_instance = json!({ "name": "Alice", "age": 30 });
|
||||
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
|
||||
let invalid_instance_missing = json!({ "name": "Charlie" });
|
||||
|
||||
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()));
|
||||
assert_success_with_json!(cache_result, "Cache operation should succeed.");
|
||||
|
||||
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
|
||||
assert_success_with_json!(valid_result, "Validation of valid instance should succeed.");
|
||||
|
||||
// Invalid type
|
||||
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
|
||||
assert_failure_with_json!(invalid_result_type, 1, "must be >=0", "Validation with invalid type should fail.");
|
||||
let errors_type = invalid_result_type.0["error"].as_array().unwrap(); // Check 'error', expect array
|
||||
assert_eq!(errors_type[0]["instance_path"], "/age");
|
||||
assert_eq!(errors_type[0]["schema_path"], "urn:my_schema#/properties/age");
|
||||
|
||||
// Missing field
|
||||
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
|
||||
assert_failure_with_json!(invalid_result_missing, 1, "missing properties 'age'", "Validation with missing field should fail.");
|
||||
let errors_missing = invalid_result_missing.0["error"].as_array().unwrap(); // Check 'error', expect array
|
||||
assert_eq!(errors_missing[0]["instance_path"], "");
|
||||
assert_eq!(errors_missing[0]["schema_path"], "urn:my_schema#");
|
||||
|
||||
// Schema not found
|
||||
let non_existent_id = "non_existent_schema";
|
||||
let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({})));
|
||||
assert_failure_with_json!(invalid_schema_result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
|
||||
// Check 'error' is an object for 'schema not found'
|
||||
let error_notfound_obj = invalid_schema_result.0["error"].as_object().expect("'error' should be an object for schema not found");
|
||||
assert!(error_notfound_obj.contains_key("message")); // Check message exists
|
||||
// Removed checks for schema_path/instance_path as they aren't added in lib.rs for this case
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_validate_json_schema_not_cached() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let instance = json!({ "foo": "bar" });
|
||||
let result = validate_json_schema("non_existent_schema", jsonb(instance));
|
||||
// Use the updated macro, expecting count 1 and specific message (handles object case)
|
||||
assert_failure_with_json!(result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_cache_invalid_json_schema() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id = "invalid_schema";
|
||||
// Schema with an invalid type *value*
|
||||
let invalid_schema = json!({
|
||||
"$id": "urn:invalid_schema",
|
||||
"type": ["invalid_type_value"]
|
||||
});
|
||||
|
||||
let cache_result = cache_json_schema(schema_id, jsonb(invalid_schema));
|
||||
|
||||
// Expect 2 leaf errors because the meta-schema validation fails at the type value
|
||||
// and within the type array itself.
|
||||
assert_failure_with_json!(
|
||||
cache_result,
|
||||
2, // Expect exactly two leaf errors
|
||||
"value must be one of", // Check message substring (present in both)
|
||||
"Caching invalid schema should fail with specific meta-schema validation errors."
|
||||
);
|
||||
|
||||
// Ensure the error is an array and check specifics
|
||||
let error_array = cache_result.0["error"].as_array().expect("Error field should be an array");
|
||||
assert_eq!(error_array.len(), 2);
|
||||
// Note: Order might vary depending on boon's internal processing, check both possibilities or sort.
|
||||
// Assuming the order shown in the logs for now:
|
||||
assert_eq!(error_array[0]["instance_path"], "/type");
|
||||
assert!(error_array[0]["message"].as_str().unwrap().contains("value must be one of"));
|
||||
assert_eq!(error_array[1]["instance_path"], "/type/0");
|
||||
assert!(error_array[1]["message"].as_str().unwrap().contains("value must be one of"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_validate_json_schema_detailed_validation_errors() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id = "detailed_errors";
|
||||
let schema = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"address": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"street": { "type": "string" },
|
||||
"city": { "type": "string", "maxLength": 10 }
|
||||
},
|
||||
"required": ["street", "city"]
|
||||
}
|
||||
},
|
||||
"required": ["address"]
|
||||
});
|
||||
let _ = cache_json_schema(schema_id, jsonb(schema));
|
||||
|
||||
let invalid_instance = json!({
|
||||
"address": {
|
||||
"street": 123, // Wrong type
|
||||
"city": "Supercalifragilisticexpialidocious" // Too long
|
||||
}
|
||||
});
|
||||
|
||||
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
|
||||
|
||||
// Update: Expect 2 errors again, as boon reports both nested errors.
|
||||
assert_failure_with_json!(result, 2);
|
||||
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_validate_json_schema_oneof_validation_errors() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id = "oneof_schema";
|
||||
let schema = json!({
|
||||
"oneOf": [
|
||||
{ // Option 1: Object with string prop
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"string_prop": { "type": "string", "maxLength": 5 }
|
||||
},
|
||||
"required": ["string_prop"]
|
||||
},
|
||||
{ // Option 2: Object with number prop
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"number_prop": { "type": "number", "minimum": 10 }
|
||||
},
|
||||
"required": ["number_prop"]
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let _ = cache_json_schema(schema_id, jsonb(schema));
|
||||
|
||||
// --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) ---
|
||||
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
|
||||
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
|
||||
// Expect 2 leaf errors. Check count only with the macro.
|
||||
assert_failure_with_json!(result_invalid_string, 2);
|
||||
// Explicitly check that both expected errors are present, ignoring order
|
||||
let errors_string = result_invalid_string.0["error"].as_array().expect("Expected error array for invalid string");
|
||||
assert!(errors_string.iter().any(|e| e["instance_path"] == "/string_prop" && e["message"].as_str().unwrap().contains("length must be <=5")), "Missing maxLength error");
|
||||
assert!(errors_string.iter().any(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("missing properties 'number_prop'")), "Missing number_prop required error");
|
||||
|
||||
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
|
||||
let invalid_number_instance = json!({ "number_prop": 5 });
|
||||
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
|
||||
// Expect 2 leaf errors. Check count only with the macro.
|
||||
assert_failure_with_json!(result_invalid_number, 2);
|
||||
// Explicitly check that both expected errors are present, ignoring order
|
||||
let errors_number = result_invalid_number.0["error"].as_array().expect("Expected error array for invalid number");
|
||||
assert!(errors_number.iter().any(|e| e["instance_path"] == "/number_prop" && e["message"].as_str().unwrap().contains("must be >=10")), "Missing minimum error");
|
||||
assert!(errors_number.iter().any(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("missing properties 'string_prop'")), "Missing string_prop required error");
|
||||
|
||||
// --- Test case 3: Fails type check (not object) for both branches ---
|
||||
// Input: boolean, expected object for both branches
|
||||
let invalid_bool_instance = json!(true); // Not an object
|
||||
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
|
||||
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
|
||||
assert_failure_with_json!(result_invalid_bool, 1);
|
||||
// Explicitly check that the single remaining error is the type error for the root instance path
|
||||
let errors_bool = result_invalid_bool.0["error"].as_array().expect("Expected error array for invalid bool");
|
||||
assert_eq!(errors_bool.iter().filter(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("want object")).count(), 1, "Expected one 'want object' error at root after filtering");
|
||||
|
||||
// --- Test case 4: Fails missing required for both branches ---
|
||||
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
|
||||
let invalid_empty_obj = json!({});
|
||||
let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj));
|
||||
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
|
||||
assert_failure_with_json!(result_empty_obj, 1);
|
||||
// Explicitly check that the single remaining error is one of the expected missing properties errors
|
||||
let errors_empty = result_empty_obj.0["error"].as_array().expect("Expected error array for empty object");
|
||||
assert_eq!(errors_empty.len(), 1, "Expected exactly one error after filtering empty object");
|
||||
let the_error = &errors_empty[0];
|
||||
assert_eq!(the_error["instance_path"], "", "Expected instance_path to be empty string");
|
||||
let message = the_error["message"].as_str().unwrap();
|
||||
assert!(message.contains("missing properties 'string_prop'") || message.contains("missing properties 'number_prop'"),
|
||||
"Error message should indicate missing string_prop or number_prop, got: {}", message);
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_clear_json_schemas() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id = "schema_to_clear";
|
||||
let schema = json!({ "type": "string" });
|
||||
cache_json_schema(schema_id, jsonb(schema.clone()));
|
||||
|
||||
let show_result1 = show_json_schemas();
|
||||
assert!(show_result1.contains(&schema_id.to_string()));
|
||||
|
||||
clear_json_schemas();
|
||||
|
||||
let show_result2 = show_json_schemas();
|
||||
assert!(show_result2.is_empty());
|
||||
|
||||
let instance = json!("test");
|
||||
let validate_result = validate_json_schema(schema_id, jsonb(instance));
|
||||
// Use the updated macro, expecting count 1 and specific message (handles object case)
|
||||
assert_failure_with_json!(validate_result, 1, "Schema with id 'schema_to_clear' not found", "Validation should fail after clearing schemas.");
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_show_json_schemas() {
|
||||
clear_json_schemas(); // Call clear directly
|
||||
let schema_id1 = "schema1";
|
||||
let schema_id2 = "schema2";
|
||||
let schema = json!({ "type": "boolean" });
|
||||
|
||||
cache_json_schema(schema_id1, jsonb(schema.clone()));
|
||||
cache_json_schema(schema_id2, jsonb(schema.clone()));
|
||||
|
||||
let mut result = show_json_schemas(); // Make result mutable
|
||||
result.sort(); // Sort for deterministic testing
|
||||
assert_eq!(result, vec!["schema1".to_string(), "schema2".to_string()]); // Check exact content
|
||||
assert!(result.contains(&schema_id1.to_string())); // Keep specific checks too if desired
|
||||
assert!(result.contains(&schema_id2.to_string()));
|
||||
}
|
||||
Reference in New Issue
Block a user