Compare commits
24 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| c734983a59 | |||
| 9b11f661bc | |||
| f3a733626e | |||
| 2bcdb8adbb | |||
| 3988308965 | |||
| b7f528d1f6 | |||
| 2febb292dc | |||
| d1831a28ec | |||
| c5834ac544 | |||
| eb25f8489e | |||
| 21937db8de | |||
| 28b689cac0 | |||
| cc04a1a8bb | |||
| 3ceb8a0770 | |||
| 499bf68b2a | |||
| 6ca00f27e9 | |||
| 520be66035 | |||
| c3146ca433 | |||
| b4d9628b05 | |||
| 635d31d723 | |||
| 08efcb92db | |||
| dad1216e1f | |||
| 2fcf8613b8 | |||
| f88c27aa70 |
13
.env
Normal file
13
.env
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
ENVIRONMENT=local
|
||||||
|
DATABASE_PASSWORD=tIr4TJ0qUwGVM0rlQSe3W7Tgpi33zPbk
|
||||||
|
DATABASE_ROLE=agreego_admin
|
||||||
|
DATABASE_HOST=127.1.27.4
|
||||||
|
DATABASE_PORT=5432
|
||||||
|
POSTGRES_PASSWORD=xzIq5JT0xY3F+2m1GtnrKDdK29sNSXVVYZHPKJVh8pI=
|
||||||
|
DATABASE_NAME=agreego
|
||||||
|
DEV_DATABASE_NAME=agreego_dev
|
||||||
|
GITEA_TOKEN=3d70c23673517330623a5122998fb304e3c73f0a
|
||||||
|
MOOV_ACCOUNT_ID=69a0d2f6-77a2-4e26-934f-d869134f87d3
|
||||||
|
MOOV_PUBLIC_KEY=9OMhK5qGnh7Tmk2Z
|
||||||
|
MOOV_SECRET_KEY=DrRox7B-YWfO9IheiUUX7lGP8-7VY-Ni
|
||||||
|
MOOV_DOMAIN=http://localhost
|
||||||
106
flow
106
flow
@ -9,7 +9,7 @@ source ./flows/rust
|
|||||||
# Vars
|
# Vars
|
||||||
POSTGRES_VERSION="17"
|
POSTGRES_VERSION="17"
|
||||||
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
|
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
|
||||||
DEPENDENCIES=(cargo git icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
|
DEPENDENCIES+=(icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
|
||||||
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
|
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
|
||||||
GITEA_ORGANIZATION="cellular"
|
GITEA_ORGANIZATION="cellular"
|
||||||
GITEA_REPOSITORY="jspg"
|
GITEA_REPOSITORY="jspg"
|
||||||
@ -20,133 +20,125 @@ env() {
|
|||||||
# If not set, try to get it from kubectl
|
# If not set, try to get it from kubectl
|
||||||
GITEA_TOKEN=$(kubectl get secret -n cellular gitea-git -o jsonpath='{.data.token}' | base64 --decode)
|
GITEA_TOKEN=$(kubectl get secret -n cellular gitea-git -o jsonpath='{.data.token}' | base64 --decode)
|
||||||
if [ -z "$GITEA_TOKEN" ]; then
|
if [ -z "$GITEA_TOKEN" ]; then
|
||||||
echo -e "❌ ${RED}GITEA_TOKEN is not set and couldn't be retrieved from kubectl${RESET}" >&2
|
error "GITEA_TOKEN is not set and couldn't be retrieved from kubectl" >&2
|
||||||
exit 1
|
return 2
|
||||||
fi
|
fi
|
||||||
export GITEA_TOKEN
|
export GITEA_TOKEN
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -e "💰 ${GREEN}Environment variables set${RESET}"
|
success "Environment variables set"
|
||||||
}
|
}
|
||||||
|
|
||||||
pgrx-prepare() {
|
pgrx-prepare() {
|
||||||
echo -e "${BLUE}Initializing pgrx...${RESET}"
|
info "Initializing pgrx..."
|
||||||
# Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which'
|
# Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which'
|
||||||
local POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
|
local POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
|
||||||
|
|
||||||
if [ ! -x "$POSTGRES_CONFIG_PATH" ]; then
|
if [ ! -x "$POSTGRES_CONFIG_PATH" ]; then
|
||||||
echo -e "${RED}Error: pg_config not found or not executable at $POSTGRES_CONFIG_PATH.${RESET}"
|
error "pg_config not found or not executable at $POSTGRES_CONFIG_PATH."
|
||||||
echo -e "${YELLOW}Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew.${RESET}"
|
warning "Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew."
|
||||||
exit 1
|
return 2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if cargo pgrx init --pg"$POSTGRES_VERSION"="$POSTGRES_CONFIG_PATH"; then
|
if cargo pgrx init --pg"$POSTGRES_VERSION"="$POSTGRES_CONFIG_PATH"; then
|
||||||
echo -e "${GREEN}pgrx initialized successfully.${RESET}"
|
success "pgrx initialized successfully."
|
||||||
else
|
else
|
||||||
echo -e "${RED}Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid.${RESET}"
|
error "Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid."
|
||||||
exit 1
|
return 2
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
build() {
|
build() {
|
||||||
local version
|
local version
|
||||||
version=$(get-version) || return 1
|
version=$(get-version) || return $?
|
||||||
local package_dir="./package"
|
local package_dir="./package"
|
||||||
local tarball_name="${GITEA_REPOSITORY}.tar.gz"
|
local tarball_name="${GITEA_REPOSITORY}.tar.gz"
|
||||||
local tarball_path="${package_dir}/${tarball_name}"
|
local tarball_path="${package_dir}/${tarball_name}"
|
||||||
|
|
||||||
echo -e "📦 Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
|
info "Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
|
||||||
|
|
||||||
# Clean previous package dir
|
# Clean previous package dir
|
||||||
rm -rf "${package_dir}"
|
rm -rf "${package_dir}"
|
||||||
mkdir -p "${package_dir}"
|
mkdir -p "${package_dir}"
|
||||||
|
|
||||||
# Create the source tarball excluding specified patterns
|
# Create the source tarball excluding specified patterns
|
||||||
echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}"
|
info "Creating tarball: ${tarball_path}"
|
||||||
if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
|
if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
|
||||||
echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}"
|
success "Successfully created source tarball: ${tarball_path}"
|
||||||
else
|
else
|
||||||
echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2
|
error "Failed to create source tarball."
|
||||||
return 1
|
return 2
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
install() {
|
install() {
|
||||||
local version
|
local version
|
||||||
version=$(get-version) || return 1
|
version=$(get-version) || return $? # Propagate error
|
||||||
|
|
||||||
echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}"
|
info "Building and installing PGRX extension v$version into local PostgreSQL..."
|
||||||
|
|
||||||
# Run the pgrx install command
|
# Run the pgrx install command
|
||||||
# It implicitly uses --release unless --debug is passed
|
|
||||||
# It finds pg_config or you can add flags like --pg-config if needed
|
|
||||||
if ! cargo pgrx install; then
|
if ! cargo pgrx install; then
|
||||||
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2
|
error "cargo pgrx install command failed."
|
||||||
return 1
|
return 2
|
||||||
fi
|
fi
|
||||||
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}"
|
success "PGRX extension v$version successfully built and installed."
|
||||||
|
|
||||||
# Post-install modification to allow non-superuser usage
|
# Post-install modification to allow non-superuser usage
|
||||||
# Get the installation path dynamically using pg_config
|
|
||||||
local pg_sharedir
|
local pg_sharedir
|
||||||
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
|
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
|
||||||
if [ -z "$pg_sharedir" ]; then
|
local pg_config_status=$?
|
||||||
echo -e "❌ ${RED}Failed to determine PostgreSQL shared directory using pg_config.${RESET}" >&2
|
if [ $pg_config_status -ne 0 ] || [ -z "$pg_sharedir" ]; then
|
||||||
return 1
|
error "Failed to determine PostgreSQL shared directory using pg_config."
|
||||||
|
return 2
|
||||||
fi
|
fi
|
||||||
local installed_control_path="${pg_sharedir}/extension/jspg.control"
|
local installed_control_path="${pg_sharedir}/extension/jspg.control"
|
||||||
|
|
||||||
# Modify the control file
|
# Modify the control file
|
||||||
if [ ! -f "$installed_control_path" ]; then
|
if [ ! -f "$installed_control_path" ]; then
|
||||||
echo -e "❌ ${RED}Installed control file not found: '$installed_control_path'${RESET}" >&2
|
error "Installed control file not found: '$installed_control_path'"
|
||||||
return 1
|
return 2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -e "🔧 ${CYAN}Modifying control file for non-superuser access: ${installed_control_path}${RESET}"
|
info "Modifying control file for non-superuser access: ${installed_control_path}"
|
||||||
# Use sed -i '' for macOS compatibility
|
# Use sed -i '' for macOS compatibility
|
||||||
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
|
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
|
||||||
echo 'trusted = true' >> "$installed_control_path"; then
|
echo 'trusted = true' >> "$installed_control_path"; then
|
||||||
echo -e "✨ ${GREEN}Control file modified successfully.${RESET}"
|
success "Control file modified successfully."
|
||||||
else
|
else
|
||||||
echo -e "❌ ${RED}Failed to modify control file: ${installed_control_path}${RESET}" >&2
|
error "Failed to modify control file: ${installed_control_path}"
|
||||||
return 1
|
return 2
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
test() {
|
test() {
|
||||||
echo -e "🧪 ${CYAN}Running jspg tests...${RESET}"
|
info "Running jspg tests..."
|
||||||
cargo pgrx test "pg${POSTGRES_VERSION}" "$@"
|
cargo pgrx test "pg${POSTGRES_VERSION}" "$@" || return $?
|
||||||
}
|
}
|
||||||
|
|
||||||
clean() {
|
clean() {
|
||||||
echo -e "🧹 ${CYAN}Cleaning build artifacts...${RESET}"
|
info "Cleaning build artifacts..."
|
||||||
cargo clean # Use standard cargo clean
|
cargo clean || return $?
|
||||||
}
|
}
|
||||||
|
|
||||||
jspg-usage() {
|
jspg-usage() {
|
||||||
echo -e " ${CYAN}JSPG Commands:${RESET}"
|
printf "prepare\tCheck OS, Cargo, and PGRX dependencies.\n"
|
||||||
echo -e " prepare Check OS, Cargo, and PGRX dependencies."
|
printf "install\tBuild and install the extension locally (after prepare).\n"
|
||||||
echo -e " install [opts] Run prepare, then build and install the extension locally."
|
printf "reinstall\tClean, build, and install the extension locally (after prepare).\n"
|
||||||
echo -e " reinstall [opts] Run prepare, clean, then build and install the extension locally."
|
printf "test\t\tRun pgrx integration tests.\n"
|
||||||
echo -e " test [opts] Run pgrx integration tests."
|
printf "clean\t\tRemove pgrx build artifacts.\n"
|
||||||
echo -e " clean Remove pgrx build artifacts."
|
|
||||||
echo -e " build Build release artifacts into ./package/ (called by release)."
|
|
||||||
echo -e " tag Tag the current version (called by release)."
|
|
||||||
echo -e " package Upload artifacts from ./package/ (called by release)."
|
|
||||||
echo -e " release Perform a full release (increments patch, builds, tags, pushes, packages)."
|
|
||||||
}
|
}
|
||||||
|
|
||||||
jspg-flow() {
|
jspg-flow() {
|
||||||
case "$1" in
|
case "$1" in
|
||||||
env) env; return 0;;
|
env) env; return $?;;
|
||||||
prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;;
|
prepare) prepare && cargo-prepare && pgrx-prepare; return $?;;
|
||||||
build) build; return 0;;
|
build) build; return $?;;
|
||||||
install) install; return 0;;
|
install) install; return $?;;
|
||||||
reinstall) clean; install; return 0;;
|
reinstall) clean && install; return $?;;
|
||||||
test) test; return 0;;
|
test) test "${@:2}"; return $?;;
|
||||||
package) env; package; return 0;;
|
clean) clean; return $?;;
|
||||||
release) env; release; return 0;;
|
release) env; release; return $?;;
|
||||||
clean) clean; return 0;;
|
|
||||||
*) return 1 ;;
|
*) return 1 ;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|||||||
2
flows
2
flows
Submodule flows updated: db55335254...3e3954fb79
512
src/lib.rs
512
src/lib.rs
@ -4,393 +4,197 @@ pg_module_magic!();
|
|||||||
|
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::{collections::HashMap, sync::RwLock};
|
use std::{collections::HashMap, sync::RwLock};
|
||||||
use boon::{Compiler, Schemas, ValidationError, SchemaIndex};
|
use boon::{Compiler, Schemas, ValidationError, SchemaIndex, CompileError};
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
struct BoonCache {
|
struct BoonCache {
|
||||||
schemas: Schemas,
|
schemas: Schemas,
|
||||||
id_to_index: HashMap<String, SchemaIndex>,
|
id_to_index: HashMap<String, SchemaIndex>,
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache {
|
static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache {
|
||||||
schemas: Schemas::new(),
|
schemas: Schemas::new(),
|
||||||
id_to_index: HashMap::new()
|
id_to_index: HashMap::new(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pg_extern(strict)]
|
#[pg_extern(strict)]
|
||||||
fn cache_schema(schema_id: &str, schema: JsonB) -> JsonB {
|
fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB {
|
||||||
let mut cache = SCHEMA_CACHE.write().unwrap();
|
let mut cache = SCHEMA_CACHE.write().unwrap();
|
||||||
let schema_value: Value = schema.0;
|
let schema_value: Value = schema.0;
|
||||||
|
let schema_path = format!("urn:{}", schema_id);
|
||||||
|
|
||||||
let mut compiler = Compiler::new();
|
let mut compiler = Compiler::new();
|
||||||
compiler.enable_format_assertions();
|
compiler.enable_format_assertions();
|
||||||
|
|
||||||
let schema_url = format!("urn:jspg:{}", schema_id);
|
// Use schema_path when adding the resource
|
||||||
|
if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
|
||||||
|
return JsonB(json!({
|
||||||
|
"success": false,
|
||||||
|
"error": {
|
||||||
|
"message": format!("Failed to add schema resource '{}': {}", schema_id, e),
|
||||||
|
"schema_path": schema_path
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
if let Err(e) = compiler.add_resource(&schema_url, schema_value) {
|
// Use schema_path when compiling
|
||||||
return JsonB(json!({
|
match compiler.compile(&schema_path, &mut cache.schemas) {
|
||||||
"success": false,
|
Ok(sch_index) => {
|
||||||
"error": format!("Failed to add schema resource '{}': {}", schema_id, e)
|
// Store the index using the original schema_id as the key
|
||||||
}));
|
cache.id_to_index.insert(schema_id.to_string(), sch_index);
|
||||||
|
JsonB(json!({ "success": true }))
|
||||||
}
|
}
|
||||||
|
Err(e) => {
|
||||||
match compiler.compile(&schema_url, &mut cache.schemas) {
|
let error = match &e {
|
||||||
Ok(sch_index) => {
|
CompileError::ValidationError { url: _url, src } => {
|
||||||
cache.id_to_index.insert(schema_id.to_string(), sch_index);
|
// Collect leaf errors from the meta-schema validation failure
|
||||||
JsonB(json!({
|
let mut error_list = Vec::new();
|
||||||
"success": true,
|
collect_leaf_errors(src, &mut error_list);
|
||||||
"schema_id": schema_id,
|
// Return the flat list directly
|
||||||
"message": "Schema cached successfully."
|
json!(error_list)
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
Err(e) => JsonB(json!({
|
_ => {
|
||||||
"success": false,
|
// Keep existing handling for other compilation errors
|
||||||
"schema_id": schema_id,
|
let _error_type = format!("{:?}", e).split('(').next().unwrap_or("Unknown").to_string();
|
||||||
"error": format!("Schema compilation failed: {}", e)
|
json!({
|
||||||
})),
|
"message": format!("Schema '{}' compilation failed: {}", schema_id, e),
|
||||||
|
"schema_path": schema_path,
|
||||||
|
"detail": format!("{:?}", e),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// Ensure the outer structure remains { success: false, error: ... }
|
||||||
|
JsonB(json!({
|
||||||
|
"success": false,
|
||||||
|
"error": error
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pg_extern(strict, parallel_safe)]
|
#[pg_extern(strict, parallel_safe)]
|
||||||
fn validate_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
||||||
let cache = SCHEMA_CACHE.read().unwrap();
|
let cache = SCHEMA_CACHE.read().unwrap();
|
||||||
|
|
||||||
match cache.id_to_index.get(schema_id) {
|
// Lookup uses the original schema_id
|
||||||
None => JsonB(json!({
|
match cache.id_to_index.get(schema_id) {
|
||||||
|
None => JsonB(json!({
|
||||||
|
"success": false,
|
||||||
|
"error": {
|
||||||
|
"message": format!("Schema with id '{}' not found in cache", schema_id)
|
||||||
|
}
|
||||||
|
})),
|
||||||
|
Some(sch_index) => {
|
||||||
|
let instance_value: Value = instance.0;
|
||||||
|
match cache.schemas.validate(&instance_value, *sch_index) {
|
||||||
|
Ok(_) => JsonB(json!({ "success": true })),
|
||||||
|
Err(validation_error) => {
|
||||||
|
// Collect all leaf errors first
|
||||||
|
let mut raw_error_list = Vec::new();
|
||||||
|
collect_leaf_errors(&validation_error, &mut raw_error_list);
|
||||||
|
|
||||||
|
// Filter the errors (e.g., deduplicate by instance_path)
|
||||||
|
let filtered_error_list = filter_boon_errors(raw_error_list);
|
||||||
|
|
||||||
|
JsonB(json!({
|
||||||
"success": false,
|
"success": false,
|
||||||
"errors": [{
|
"error": filtered_error_list // Return the filtered list
|
||||||
"kind": "SchemaNotFound",
|
}))
|
||||||
"message": format!("Schema with id '{}' not found in cache", schema_id)
|
|
||||||
}]
|
|
||||||
})),
|
|
||||||
Some(sch_index) => {
|
|
||||||
let instance_value: Value = instance.0;
|
|
||||||
match cache.schemas.validate(&instance_value, *sch_index) {
|
|
||||||
Ok(_) => JsonB(json!({ "success": true })),
|
|
||||||
Err(validation_error) => {
|
|
||||||
let error_details = format_boon_errors(&validation_error);
|
|
||||||
JsonB(json!({
|
|
||||||
"success": false,
|
|
||||||
"errors": [error_details]
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_boon_errors(error: &ValidationError) -> Value {
|
// Recursively collects leaf errors into a flat list
|
||||||
json!({
|
fn collect_leaf_errors(error: &ValidationError, errors_list: &mut Vec<Value>) {
|
||||||
"instance_path": error.instance_location.to_string(),
|
if error.causes.is_empty() {
|
||||||
"schema_path": error.schema_url.to_string(),
|
let default_message = format!("{}", error);
|
||||||
"kind": format!("{:?}", error.kind),
|
let message = if let Some(start_index) = default_message.find("': ") {
|
||||||
"message": format!("{}", error),
|
default_message[start_index + 3..].to_string()
|
||||||
"causes": error
|
} else {
|
||||||
.causes
|
default_message
|
||||||
.iter()
|
|
||||||
.map(format_boon_errors)
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pg_extern(strict)]
|
|
||||||
fn clear_schema_cache() -> JsonB {
|
|
||||||
let mut cache = SCHEMA_CACHE.write().unwrap();
|
|
||||||
*cache = BoonCache {
|
|
||||||
schemas: Schemas::new(),
|
|
||||||
id_to_index: HashMap::new()
|
|
||||||
};
|
};
|
||||||
JsonB(json!({
|
|
||||||
"success": true,
|
errors_list.push(json!({
|
||||||
"message": "Schema cache cleared."
|
"message": message,
|
||||||
}))
|
"schema_path": error.schema_url.to_string(),
|
||||||
|
"instance_path": error.instance_location.to_string(),
|
||||||
|
}));
|
||||||
|
} else {
|
||||||
|
for cause in &error.causes {
|
||||||
|
collect_leaf_errors(cause, errors_list);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filters collected errors, e.g., deduplicating by instance_path
|
||||||
|
fn filter_boon_errors(raw_errors: Vec<Value>) -> Vec<Value> {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
|
// Use a HashMap to keep only the first error for each instance_path
|
||||||
|
let mut unique_errors: HashMap<String, Value> = HashMap::new();
|
||||||
|
|
||||||
|
for error_value in raw_errors {
|
||||||
|
if let Some(instance_path_value) = error_value.get("instance_path") {
|
||||||
|
if let Some(instance_path_str) = instance_path_value.as_str() {
|
||||||
|
// Use Entry API to insert only if the key is not present
|
||||||
|
if let Entry::Vacant(entry) = unique_errors.entry(instance_path_str.to_string()) {
|
||||||
|
entry.insert(error_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If error doesn't have instance_path or it's not a string, we might ignore it or handle differently.
|
||||||
|
// For now, we implicitly ignore errors without a valid string instance_path for deduplication.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect the unique errors from the map values
|
||||||
|
unique_errors.into_values().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pg_extern(strict, parallel_safe)]
|
#[pg_extern(strict, parallel_safe)]
|
||||||
fn show_schema_cache() -> JsonB {
|
fn json_schema_cached(schema_id: &str) -> bool {
|
||||||
let cache = SCHEMA_CACHE.read().unwrap();
|
let cache = SCHEMA_CACHE.read().unwrap();
|
||||||
let ids: Vec<&String> = cache.id_to_index.keys().collect();
|
cache.id_to_index.contains_key(schema_id)
|
||||||
JsonB(json!({
|
|
||||||
"cached_schema_ids": ids
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pg_schema]
|
#[pg_extern(strict)]
|
||||||
#[cfg(any(test, feature = "pg_test"))]
|
fn clear_json_schemas() {
|
||||||
mod tests {
|
let mut cache = SCHEMA_CACHE.write().unwrap();
|
||||||
use pgrx::*;
|
*cache = BoonCache {
|
||||||
use pgrx::pg_test;
|
schemas: Schemas::new(),
|
||||||
use super::*;
|
id_to_index: HashMap::new(),
|
||||||
use serde_json::json;
|
};
|
||||||
|
|
||||||
fn jsonb(val: Value) -> JsonB {
|
|
||||||
JsonB(val)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setup_test() {
|
|
||||||
clear_schema_cache();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pg_test]
|
|
||||||
fn test_cache_and_validate_schema() {
|
|
||||||
setup_test();
|
|
||||||
let schema_id = "my_schema";
|
|
||||||
let schema = json!({
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"name": { "type": "string" },
|
|
||||||
"age": { "type": "integer", "minimum": 0 }
|
|
||||||
},
|
|
||||||
"required": ["name", "age"]
|
|
||||||
});
|
|
||||||
let valid_instance = json!({ "name": "Alice", "age": 30 });
|
|
||||||
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
|
|
||||||
let invalid_instance_missing = json!({ "name": "Charlie" });
|
|
||||||
|
|
||||||
let cache_result = cache_schema(schema_id, jsonb(schema.clone()));
|
|
||||||
assert!(cache_result.0["success"].as_bool().unwrap());
|
|
||||||
|
|
||||||
let valid_result = validate_schema(schema_id, jsonb(valid_instance));
|
|
||||||
assert!(valid_result.0["success"].as_bool().unwrap());
|
|
||||||
|
|
||||||
let invalid_result_type = validate_schema(schema_id, jsonb(invalid_instance_type));
|
|
||||||
assert!(!invalid_result_type.0["success"].as_bool().unwrap());
|
|
||||||
|
|
||||||
// --- Assertions for invalid_result_type ---
|
|
||||||
|
|
||||||
// Get top-level errors
|
|
||||||
let top_level_errors = invalid_result_type.0["errors"].as_array().expect("Top-level 'errors' should be an array");
|
|
||||||
assert_eq!(top_level_errors.len(), 1, "Should have exactly one top-level error for invalid type");
|
|
||||||
|
|
||||||
// Get the first (and only) top-level error
|
|
||||||
let top_level_error = top_level_errors.get(0).expect("Should get the first top-level error");
|
|
||||||
|
|
||||||
// Check top-level error kind
|
|
||||||
assert!(top_level_error.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Schema { url:")),
|
|
||||||
"Incorrect kind for top-level error. Expected 'Schema {{ url:'. Error: {:?}. All errors: {:?}", top_level_error, top_level_errors);
|
|
||||||
|
|
||||||
// Get the 'causes' array from the top-level error
|
|
||||||
let causes_age = top_level_error.get("causes").and_then(Value::as_array).expect("Top-level error 'causes' should be an array");
|
|
||||||
assert_eq!(causes_age.len(), 1, "Should have one cause for the age error");
|
|
||||||
|
|
||||||
// Get the actual age error from the 'causes' array
|
|
||||||
let age_error = causes_age.get(0).expect("Should have an error object in 'causes'");
|
|
||||||
assert_eq!(age_error.get("instance_path").and_then(Value::as_str), Some("/age"),
|
|
||||||
"Incorrect instance_path for age error. Error: {:?}. All errors: {:?}", age_error, top_level_errors);
|
|
||||||
|
|
||||||
assert!(age_error.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Minimum { got:")),
|
|
||||||
"Incorrect kind prefix for age error. Expected 'Minimum {{ got:'. Error: {:?}. All errors: {:?}", age_error, top_level_errors);
|
|
||||||
|
|
||||||
let expected_prefix = "at '/age': must be >=0";
|
|
||||||
assert!(age_error.get("message")
|
|
||||||
.and_then(Value::as_str)
|
|
||||||
.map_or(false, |m| m.starts_with(expected_prefix)),
|
|
||||||
"Incorrect message prefix for age error. Expected prefix '{}'. Error: {:?}. All errors: {:?}",
|
|
||||||
expected_prefix, age_error, top_level_errors);
|
|
||||||
|
|
||||||
let invalid_result_missing = validate_schema(schema_id, jsonb(invalid_instance_missing));
|
|
||||||
assert!(!invalid_result_missing.0["success"].as_bool().unwrap(), "Validation should fail for missing required field");
|
|
||||||
|
|
||||||
// --- Assertions for invalid_result_missing ---
|
|
||||||
|
|
||||||
// Get top-level errors
|
|
||||||
let top_level_errors_missing = invalid_result_missing.0["errors"].as_array().expect("Errors should be an array for missing field");
|
|
||||||
assert_eq!(top_level_errors_missing.len(), 1, "Should have one top-level error for missing field");
|
|
||||||
|
|
||||||
// Get the first (and only) top-level error
|
|
||||||
let top_error_missing = top_level_errors_missing.get(0).expect("Should get the first top-level missing field error");
|
|
||||||
|
|
||||||
// Check top-level error kind
|
|
||||||
assert!(top_error_missing.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Schema { url:")),
|
|
||||||
"Incorrect kind for missing field top-level error. Error: {:?}. All errors: {:?}", top_error_missing, top_level_errors_missing);
|
|
||||||
|
|
||||||
// Get the 'causes' array from the top-level error
|
|
||||||
let causes_missing = top_error_missing.get("causes").and_then(Value::as_array).expect("Causes should be an array for missing field error");
|
|
||||||
assert_eq!(causes_missing.len(), 1, "Should have one cause for missing field");
|
|
||||||
|
|
||||||
// Get the actual missing field error from the 'causes' array
|
|
||||||
let missing_error = causes_missing.get(0).expect("Should have missing field error object in 'causes'");
|
|
||||||
|
|
||||||
// Assertions on the specific missing field error
|
|
||||||
assert_eq!(missing_error.get("instance_path").and_then(Value::as_str), Some(""),
|
|
||||||
"Incorrect instance_path for missing age error: {:?}", missing_error);
|
|
||||||
assert!(missing_error.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Required { want: [\"age\"]")),
|
|
||||||
"Incorrect kind for missing age error. Expected prefix 'Required {{ want: [\"age\"] }}'. Error: {:?}", missing_error);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pg_test]
|
|
||||||
fn test_schema_not_cached() {
|
|
||||||
setup_test();
|
|
||||||
let instance = json!({ "foo": "bar" });
|
|
||||||
let result = validate_schema("non_existent_schema", jsonb(instance));
|
|
||||||
assert!(!result.0["success"].as_bool().unwrap());
|
|
||||||
let errors = result.0["errors"].as_array().unwrap();
|
|
||||||
assert_eq!(errors.len(), 1);
|
|
||||||
assert_eq!(errors[0]["kind"], json!("SchemaNotFound"));
|
|
||||||
assert!(errors[0]["message"].as_str().unwrap().contains("non_existent_schema"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pg_test]
|
|
||||||
fn test_invalid_schema_cache() {
|
|
||||||
setup_test();
|
|
||||||
let schema_id = "invalid_schema";
|
|
||||||
let invalid_schema_json = "{\"type\": \"string\" \"maxLength\": 5}";
|
|
||||||
let invalid_schema_value: Result<Value, _> = serde_json::from_str(invalid_schema_json);
|
|
||||||
assert!(invalid_schema_value.is_err(), "Test setup assumes invalid JSON string");
|
|
||||||
|
|
||||||
let schema_representing_invalid = json!({
|
|
||||||
"type": 123
|
|
||||||
});
|
|
||||||
|
|
||||||
let result = cache_schema(schema_id, jsonb(schema_representing_invalid.clone()));
|
|
||||||
assert!(!result.0["success"].as_bool().unwrap());
|
|
||||||
assert!(result.0["error"].as_str().unwrap().contains("Schema compilation failed"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pg_test]
|
|
||||||
fn test_detailed_validation_errors() {
|
|
||||||
setup_test();
|
|
||||||
let schema_id = "detailed_schema";
|
|
||||||
let schema = json!({
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"address": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"street": { "type": "string" },
|
|
||||||
"city": { "type": "string", "maxLength": 10 }
|
|
||||||
},
|
|
||||||
"required": ["street", "city"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["address"]
|
|
||||||
});
|
|
||||||
let invalid_instance = json!({
|
|
||||||
"address": {
|
|
||||||
"city": "San Francisco Bay Area"
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
assert!(cache_schema(schema_id, jsonb(schema.clone())).0["success"].as_bool().unwrap());
|
|
||||||
let result = validate_schema(schema_id, jsonb(invalid_instance));
|
|
||||||
assert!(!result.0["success"].as_bool().unwrap());
|
|
||||||
|
|
||||||
let errors = result.0["errors"].as_array().expect("Errors should be an array");
|
|
||||||
let top_error = errors.get(0).expect("Expected at least one top-level error object");
|
|
||||||
let causes = top_error.get("causes").and_then(Value::as_array).expect("Expected causes array");
|
|
||||||
|
|
||||||
let has_required_street_error = causes.iter().any(|e|
|
|
||||||
e.get("instance_path").and_then(Value::as_str) == Some("/address") && // Check path inside cause
|
|
||||||
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("Required { want:") && // Check kind prefix
|
|
||||||
e.get("kind").and_then(Value::as_str).unwrap_or("").contains("street") // Ensure 'street' is mentioned
|
|
||||||
);
|
|
||||||
assert!(has_required_street_error, "Missing required 'street' error within causes. Actual errors: {:?}", errors);
|
|
||||||
|
|
||||||
let has_maxlength_city_error = causes.iter().any(|e| // Check within causes
|
|
||||||
e.get("instance_path").and_then(Value::as_str) == Some("/address/city") &&
|
|
||||||
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("MaxLength { got:") // Check kind prefix
|
|
||||||
);
|
|
||||||
assert!(has_maxlength_city_error, "Missing maxLength 'city' error within causes. Actual errors: {:?}", errors);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pg_test]
|
|
||||||
fn test_oneof_validation_errors() {
|
|
||||||
setup_test();
|
|
||||||
let schema_id = "oneof_schema";
|
|
||||||
let schema = json!({
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"value": {
|
|
||||||
"oneOf": [
|
|
||||||
{ "type": "string", "minLength": 5 },
|
|
||||||
{ "type": "number", "minimum": 10 }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["value"]
|
|
||||||
});
|
|
||||||
assert!(cache_schema(schema_id, jsonb(schema.clone())).0["success"].as_bool().unwrap());
|
|
||||||
|
|
||||||
let invalid_instance = json!({ "value": "abc" });
|
|
||||||
let result = validate_schema(schema_id, jsonb(invalid_instance));
|
|
||||||
|
|
||||||
assert!(!result.0["success"].as_bool().unwrap());
|
|
||||||
|
|
||||||
let errors_val = result.0["errors"].as_array().expect("Errors should be an array");
|
|
||||||
let top_schema_error = errors_val.get(0).expect("Expected at least one top-level Schema error object");
|
|
||||||
let schema_error_causes = top_schema_error.get("causes").and_then(Value::as_array).expect("Expected causes array for Schema error");
|
|
||||||
|
|
||||||
let oneof_error = schema_error_causes.iter().find(|e| {
|
|
||||||
e.get("kind").and_then(Value::as_str) == Some("OneOf(None)") &&
|
|
||||||
e.get("instance_path").and_then(Value::as_str) == Some("/value")
|
|
||||||
}).expect("Could not find the OneOf(None) error for /value within Schema causes");
|
|
||||||
|
|
||||||
let oneof_causes = oneof_error.get("causes").and_then(Value::as_array)
|
|
||||||
.expect("Expected causes array for OneOf error");
|
|
||||||
|
|
||||||
let has_minlength_error = oneof_causes.iter().any(|e| // Check within OneOf causes
|
|
||||||
e.get("instance_path").and_then(Value::as_str) == Some("/value") &&
|
|
||||||
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("MinLength { got:") // Check kind prefix
|
|
||||||
);
|
|
||||||
assert!(has_minlength_error, "Missing MinLength error within OneOf causes. Actual errors: {:?}", errors_val);
|
|
||||||
|
|
||||||
let has_type_error = oneof_causes.iter().any(|e| // Check within OneOf causes
|
|
||||||
e.get("instance_path").and_then(Value::as_str) == Some("/value") &&
|
|
||||||
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("Type { got: String, want: Types") // More specific kind check
|
|
||||||
);
|
|
||||||
assert!(has_type_error, "Missing Type error within OneOf causes. Actual errors: {:?}", errors_val);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pg_test]
|
|
||||||
fn test_clear_schema_cache() {
|
|
||||||
setup_test();
|
|
||||||
let schema_id = "schema_to_clear";
|
|
||||||
let schema = json!({ "type": "string" });
|
|
||||||
cache_schema(schema_id, jsonb(schema.clone()));
|
|
||||||
|
|
||||||
let show_result1 = show_schema_cache();
|
|
||||||
assert!(show_result1.0["cached_schema_ids"].as_array().unwrap().iter().any(|id| id.as_str() == Some(schema_id)));
|
|
||||||
|
|
||||||
let clear_result = clear_schema_cache();
|
|
||||||
assert!(clear_result.0["success"].as_bool().unwrap());
|
|
||||||
|
|
||||||
let show_result2 = show_schema_cache();
|
|
||||||
assert!(show_result2.0["cached_schema_ids"].as_array().unwrap().is_empty());
|
|
||||||
|
|
||||||
let instance = json!("test");
|
|
||||||
let validate_result = validate_schema(schema_id, jsonb(instance));
|
|
||||||
assert!(!validate_result.0["success"].as_bool().unwrap());
|
|
||||||
assert_eq!(validate_result.0["errors"].as_array().unwrap()[0]["kind"], json!("SchemaNotFound"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[pg_test]
|
|
||||||
fn test_show_schema_cache() {
|
|
||||||
setup_test();
|
|
||||||
let schema_id1 = "schema1";
|
|
||||||
let schema_id2 = "schema2";
|
|
||||||
let schema = json!({ "type": "boolean" });
|
|
||||||
|
|
||||||
cache_schema(schema_id1, jsonb(schema.clone()));
|
|
||||||
cache_schema(schema_id2, jsonb(schema.clone()));
|
|
||||||
|
|
||||||
let result = show_schema_cache();
|
|
||||||
let ids = result.0["cached_schema_ids"].as_array().unwrap();
|
|
||||||
assert_eq!(ids.len(), 2);
|
|
||||||
assert!(ids.contains(&json!(schema_id1)));
|
|
||||||
assert!(ids.contains(&json!(schema_id2)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[pg_extern(strict, parallel_safe)]
|
||||||
|
fn show_json_schemas() -> Vec<String> {
|
||||||
|
let cache = SCHEMA_CACHE.read().unwrap();
|
||||||
|
let ids: Vec<String> = cache.id_to_index.keys().cloned().collect();
|
||||||
|
ids
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This module is required by `cargo pgrx test` invocations.
|
||||||
|
/// It must be visible at the root of your extension crate.
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod pg_test {
|
pub mod pg_test {
|
||||||
pub fn setup(_options: Vec<&str>) {
|
pub fn setup(_options: Vec<&str>) {
|
||||||
// perform one-off initialization when the pg_test framework starts
|
// perform one-off initialization when the pg_test framework starts
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn postgresql_conf_options() -> Vec<&'static str> {
|
#[must_use]
|
||||||
// return any postgresql.conf settings that are required for your tests
|
pub fn postgresql_conf_options() -> Vec<&'static str> {
|
||||||
vec![]
|
// return any postgresql.conf settings that are required for your tests
|
||||||
}
|
vec![]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(any(test, feature = "pg_test"))]
|
||||||
|
#[pg_schema]
|
||||||
|
mod tests {
|
||||||
|
include!("tests.rs");
|
||||||
}
|
}
|
||||||
415
src/tests.rs
Normal file
415
src/tests.rs
Normal file
@ -0,0 +1,415 @@
|
|||||||
|
use crate::*;
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
use pgrx::{JsonB, pg_test};
|
||||||
|
|
||||||
|
// Helper macro for asserting success (no changes needed, but ensure it's present)
|
||||||
|
macro_rules! assert_success_with_json {
|
||||||
|
($result_jsonb:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||||
|
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
|
||||||
|
if condition_result != Some(true) {
|
||||||
|
let base_msg = format!($fmt $(, $($args)*)?);
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
|
||||||
|
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
|
||||||
|
let panic_msg = format!("Assertion Failed (expected success): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
panic!("{}", panic_msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// Simpler version without message
|
||||||
|
($result_jsonb:expr) => {
|
||||||
|
let condition_result: Option<bool> = $result_jsonb.0.get("success").and_then(Value::as_bool);
|
||||||
|
if condition_result != Some(true) {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&$result_jsonb.0)
|
||||||
|
.unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", $result_jsonb.0));
|
||||||
|
let panic_msg = format!("Assertion Failed (expected success)\nResult JSON:\n{}", pretty_json);
|
||||||
|
panic!("{}", panic_msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Updated helper macro for asserting failed JSON results with the new flat error structure
|
||||||
|
macro_rules! assert_failure_with_json {
|
||||||
|
// --- Arms with error count and message substring check ---
|
||||||
|
// With custom message:
|
||||||
|
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||||
|
let json_result = &$result.0;
|
||||||
|
let success = json_result.get("success").and_then(Value::as_bool);
|
||||||
|
let error_val_opt = json_result.get("error"); // Changed key
|
||||||
|
let base_msg = format!($fmt $(, $($args)*)?);
|
||||||
|
|
||||||
|
if success != Some(false) {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
match error_val_opt {
|
||||||
|
Some(error_val) => {
|
||||||
|
if error_val.is_array() {
|
||||||
|
let errors_array = error_val.as_array().unwrap();
|
||||||
|
if errors_array.len() != $expected_error_count {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
if $expected_error_count > 0 {
|
||||||
|
let first_error_message = errors_array[0].get("message").and_then(Value::as_str);
|
||||||
|
match first_error_message {
|
||||||
|
Some(msg) => {
|
||||||
|
if !msg.contains($expected_first_message_contains) {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (first error message mismatch): Expected contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (first error in array has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if error_val.is_object() {
|
||||||
|
// Handle single error object case (like 'schema not found')
|
||||||
|
if $expected_error_count != 1 {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (wrong error count): Expected {} errors, but got a single error object. {}\nResult JSON:\n{}", $expected_error_count, base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
let message = error_val.get("message").and_then(Value::as_str);
|
||||||
|
match message {
|
||||||
|
Some(msg) => {
|
||||||
|
if !msg.contains($expected_first_message_contains) {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (error message mismatch): Expected object message contains '{}', got: '{}'. {}\nResult JSON:\n{}", $expected_first_message_contains, msg, base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (error object has no 'message' string): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// Without custom message (calls the one above with ""):
|
||||||
|
($result:expr, $expected_error_count:expr, $expected_first_message_contains:expr) => {
|
||||||
|
assert_failure_with_json!($result, $expected_error_count, $expected_first_message_contains, "");
|
||||||
|
};
|
||||||
|
|
||||||
|
// --- Arms with error count check only ---
|
||||||
|
// With custom message:
|
||||||
|
($result:expr, $expected_error_count:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||||
|
let json_result = &$result.0;
|
||||||
|
let success = json_result.get("success").and_then(Value::as_bool);
|
||||||
|
let error_val_opt = json_result.get("error"); // Changed key
|
||||||
|
let base_msg = format!($fmt $(, $($args)*)?);
|
||||||
|
|
||||||
|
if success != Some(false) {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
match error_val_opt {
|
||||||
|
Some(error_val) => {
|
||||||
|
if error_val.is_array() {
|
||||||
|
let errors_array = error_val.as_array().unwrap();
|
||||||
|
if errors_array.len() != $expected_error_count {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (wrong error count): Expected {} errors, got {}. {}\nResult JSON:\n{}", $expected_error_count, errors_array.len(), base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
} else if error_val.is_object() {
|
||||||
|
if $expected_error_count != 1 {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (wrong error count): Expected {} errors, but got a single error object. {}\nResult JSON:\n{}", $expected_error_count, base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
// Count check passes if expected is 1 and got object
|
||||||
|
} else {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// Without custom message (calls the one above with ""):
|
||||||
|
($result:expr, $expected_error_count:expr) => {
|
||||||
|
assert_failure_with_json!($result, $expected_error_count, "");
|
||||||
|
};
|
||||||
|
|
||||||
|
// --- Arms checking failure only (expects at least one error) ---
|
||||||
|
// With custom message:
|
||||||
|
($result:expr, $fmt:literal $(, $($args:tt)*)?) => {
|
||||||
|
let json_result = &$result.0;
|
||||||
|
let success = json_result.get("success").and_then(Value::as_bool);
|
||||||
|
let error_val_opt = json_result.get("error"); // Changed key
|
||||||
|
let base_msg = format!($fmt $(, $($args)*)?);
|
||||||
|
|
||||||
|
if success != Some(false) {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (expected failure, success was not false): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
match error_val_opt {
|
||||||
|
Some(error_val) => {
|
||||||
|
if error_val.is_object() {
|
||||||
|
// OK: single error object is a failure
|
||||||
|
} else if error_val.is_array() {
|
||||||
|
if error_val.as_array().unwrap().is_empty() {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (expected errors, but 'error' array is empty): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
// OK: non-empty error array is a failure
|
||||||
|
} else {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed ('error' value is not an array or object): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let pretty_json = serde_json::to_string_pretty(&json_result).unwrap_or_else(|_| format!("(Failed to pretty-print JSON: {:?})", json_result));
|
||||||
|
panic!("Assertion Failed (expected 'error' key, but none found): {}\nResult JSON:\n{}", base_msg, pretty_json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// Without custom message (calls the one above with ""):
|
||||||
|
($result:expr) => {
|
||||||
|
assert_failure_with_json!($result, "");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn jsonb(val: Value) -> JsonB {
|
||||||
|
JsonB(val)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pg_test]
|
||||||
|
fn test_cache_and_validate_json_schema() {
|
||||||
|
clear_json_schemas(); // Call clear directly
|
||||||
|
let schema_id = "my_schema";
|
||||||
|
let schema = json!({
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": { "type": "string" },
|
||||||
|
"age": { "type": "integer", "minimum": 0 }
|
||||||
|
},
|
||||||
|
"required": ["name", "age"]
|
||||||
|
});
|
||||||
|
let valid_instance = json!({ "name": "Alice", "age": 30 });
|
||||||
|
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
|
||||||
|
let invalid_instance_missing = json!({ "name": "Charlie" });
|
||||||
|
|
||||||
|
let cache_result = cache_json_schema(schema_id, jsonb(schema.clone()));
|
||||||
|
assert_success_with_json!(cache_result, "Cache operation should succeed.");
|
||||||
|
|
||||||
|
let valid_result = validate_json_schema(schema_id, jsonb(valid_instance));
|
||||||
|
assert_success_with_json!(valid_result, "Validation of valid instance should succeed.");
|
||||||
|
|
||||||
|
// Invalid type
|
||||||
|
let invalid_result_type = validate_json_schema(schema_id, jsonb(invalid_instance_type));
|
||||||
|
assert_failure_with_json!(invalid_result_type, 1, "must be >=0", "Validation with invalid type should fail.");
|
||||||
|
let errors_type = invalid_result_type.0["error"].as_array().unwrap(); // Check 'error', expect array
|
||||||
|
assert_eq!(errors_type[0]["instance_path"], "/age");
|
||||||
|
assert_eq!(errors_type[0]["schema_path"], "urn:my_schema#/properties/age");
|
||||||
|
|
||||||
|
// Missing field
|
||||||
|
let invalid_result_missing = validate_json_schema(schema_id, jsonb(invalid_instance_missing));
|
||||||
|
assert_failure_with_json!(invalid_result_missing, 1, "missing properties 'age'", "Validation with missing field should fail.");
|
||||||
|
let errors_missing = invalid_result_missing.0["error"].as_array().unwrap(); // Check 'error', expect array
|
||||||
|
assert_eq!(errors_missing[0]["instance_path"], "");
|
||||||
|
assert_eq!(errors_missing[0]["schema_path"], "urn:my_schema#");
|
||||||
|
|
||||||
|
// Schema not found
|
||||||
|
let non_existent_id = "non_existent_schema";
|
||||||
|
let invalid_schema_result = validate_json_schema(non_existent_id, jsonb(json!({})));
|
||||||
|
assert_failure_with_json!(invalid_schema_result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
|
||||||
|
// Check 'error' is an object for 'schema not found'
|
||||||
|
let error_notfound_obj = invalid_schema_result.0["error"].as_object().expect("'error' should be an object for schema not found");
|
||||||
|
assert!(error_notfound_obj.contains_key("message")); // Check message exists
|
||||||
|
// Removed checks for schema_path/instance_path as they aren't added in lib.rs for this case
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pg_test]
|
||||||
|
fn test_validate_json_schema_not_cached() {
|
||||||
|
clear_json_schemas(); // Call clear directly
|
||||||
|
let instance = json!({ "foo": "bar" });
|
||||||
|
let result = validate_json_schema("non_existent_schema", jsonb(instance));
|
||||||
|
// Use the updated macro, expecting count 1 and specific message (handles object case)
|
||||||
|
assert_failure_with_json!(result, 1, "Schema with id 'non_existent_schema' not found", "Validation with non-existent schema should fail.");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pg_test]
|
||||||
|
fn test_cache_invalid_json_schema() {
|
||||||
|
clear_json_schemas(); // Call clear directly
|
||||||
|
let schema_id = "invalid_schema";
|
||||||
|
// Schema with an invalid type *value*
|
||||||
|
let invalid_schema = json!({
|
||||||
|
"$id": "urn:invalid_schema",
|
||||||
|
"type": ["invalid_type_value"]
|
||||||
|
});
|
||||||
|
|
||||||
|
let cache_result = cache_json_schema(schema_id, jsonb(invalid_schema));
|
||||||
|
|
||||||
|
// Expect 2 leaf errors because the meta-schema validation fails at the type value
|
||||||
|
// and within the type array itself.
|
||||||
|
assert_failure_with_json!(
|
||||||
|
cache_result,
|
||||||
|
2, // Expect exactly two leaf errors
|
||||||
|
"value must be one of", // Check message substring (present in both)
|
||||||
|
"Caching invalid schema should fail with specific meta-schema validation errors."
|
||||||
|
);
|
||||||
|
|
||||||
|
// Ensure the error is an array and check specifics
|
||||||
|
let error_array = cache_result.0["error"].as_array().expect("Error field should be an array");
|
||||||
|
assert_eq!(error_array.len(), 2);
|
||||||
|
// Note: Order might vary depending on boon's internal processing, check both possibilities or sort.
|
||||||
|
// Assuming the order shown in the logs for now:
|
||||||
|
assert_eq!(error_array[0]["instance_path"], "/type");
|
||||||
|
assert!(error_array[0]["message"].as_str().unwrap().contains("value must be one of"));
|
||||||
|
assert_eq!(error_array[1]["instance_path"], "/type/0");
|
||||||
|
assert!(error_array[1]["message"].as_str().unwrap().contains("value must be one of"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pg_test]
|
||||||
|
fn test_validate_json_schema_detailed_validation_errors() {
|
||||||
|
clear_json_schemas(); // Call clear directly
|
||||||
|
let schema_id = "detailed_errors";
|
||||||
|
let schema = json!({
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"address": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"street": { "type": "string" },
|
||||||
|
"city": { "type": "string", "maxLength": 10 }
|
||||||
|
},
|
||||||
|
"required": ["street", "city"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["address"]
|
||||||
|
});
|
||||||
|
let _ = cache_json_schema(schema_id, jsonb(schema));
|
||||||
|
|
||||||
|
let invalid_instance = json!({
|
||||||
|
"address": {
|
||||||
|
"street": 123, // Wrong type
|
||||||
|
"city": "Supercalifragilisticexpialidocious" // Too long
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = validate_json_schema(schema_id, jsonb(invalid_instance));
|
||||||
|
|
||||||
|
// Update: Expect 2 errors again, as boon reports both nested errors.
|
||||||
|
assert_failure_with_json!(result, 2);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pg_test]
|
||||||
|
fn test_validate_json_schema_oneof_validation_errors() {
|
||||||
|
clear_json_schemas(); // Call clear directly
|
||||||
|
let schema_id = "oneof_schema";
|
||||||
|
let schema = json!({
|
||||||
|
"oneOf": [
|
||||||
|
{ // Option 1: Object with string prop
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"string_prop": { "type": "string", "maxLength": 5 }
|
||||||
|
},
|
||||||
|
"required": ["string_prop"]
|
||||||
|
},
|
||||||
|
{ // Option 2: Object with number prop
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"number_prop": { "type": "number", "minimum": 10 }
|
||||||
|
},
|
||||||
|
"required": ["number_prop"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
let _ = cache_json_schema(schema_id, jsonb(schema));
|
||||||
|
|
||||||
|
// --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) ---
|
||||||
|
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
|
||||||
|
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
|
||||||
|
// Expect 2 leaf errors. Check count only with the macro.
|
||||||
|
assert_failure_with_json!(result_invalid_string, 2);
|
||||||
|
// Explicitly check that both expected errors are present, ignoring order
|
||||||
|
let errors_string = result_invalid_string.0["error"].as_array().expect("Expected error array for invalid string");
|
||||||
|
assert!(errors_string.iter().any(|e| e["instance_path"] == "/string_prop" && e["message"].as_str().unwrap().contains("length must be <=5")), "Missing maxLength error");
|
||||||
|
assert!(errors_string.iter().any(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("missing properties 'number_prop'")), "Missing number_prop required error");
|
||||||
|
|
||||||
|
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
|
||||||
|
let invalid_number_instance = json!({ "number_prop": 5 });
|
||||||
|
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
|
||||||
|
// Expect 2 leaf errors. Check count only with the macro.
|
||||||
|
assert_failure_with_json!(result_invalid_number, 2);
|
||||||
|
// Explicitly check that both expected errors are present, ignoring order
|
||||||
|
let errors_number = result_invalid_number.0["error"].as_array().expect("Expected error array for invalid number");
|
||||||
|
assert!(errors_number.iter().any(|e| e["instance_path"] == "/number_prop" && e["message"].as_str().unwrap().contains("must be >=10")), "Missing minimum error");
|
||||||
|
assert!(errors_number.iter().any(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("missing properties 'string_prop'")), "Missing string_prop required error");
|
||||||
|
|
||||||
|
// --- Test case 3: Fails type check (not object) for both branches ---
|
||||||
|
// Input: boolean, expected object for both branches
|
||||||
|
let invalid_bool_instance = json!(true); // Not an object
|
||||||
|
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
|
||||||
|
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
|
||||||
|
assert_failure_with_json!(result_invalid_bool, 1);
|
||||||
|
// Explicitly check that the single remaining error is the type error for the root instance path
|
||||||
|
let errors_bool = result_invalid_bool.0["error"].as_array().expect("Expected error array for invalid bool");
|
||||||
|
assert_eq!(errors_bool.iter().filter(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("want object")).count(), 1, "Expected one 'want object' error at root after filtering");
|
||||||
|
|
||||||
|
// --- Test case 4: Fails missing required for both branches ---
|
||||||
|
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
|
||||||
|
let invalid_empty_obj = json!({});
|
||||||
|
let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj));
|
||||||
|
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
|
||||||
|
assert_failure_with_json!(result_empty_obj, 1);
|
||||||
|
// Explicitly check that the single remaining error is one of the expected missing properties errors
|
||||||
|
let errors_empty = result_empty_obj.0["error"].as_array().expect("Expected error array for empty object");
|
||||||
|
assert_eq!(errors_empty.len(), 1, "Expected exactly one error after filtering empty object");
|
||||||
|
let the_error = &errors_empty[0];
|
||||||
|
assert_eq!(the_error["instance_path"], "", "Expected instance_path to be empty string");
|
||||||
|
let message = the_error["message"].as_str().unwrap();
|
||||||
|
assert!(message.contains("missing properties 'string_prop'") || message.contains("missing properties 'number_prop'"),
|
||||||
|
"Error message should indicate missing string_prop or number_prop, got: {}", message);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pg_test]
|
||||||
|
fn test_clear_json_schemas() {
|
||||||
|
clear_json_schemas(); // Call clear directly
|
||||||
|
let schema_id = "schema_to_clear";
|
||||||
|
let schema = json!({ "type": "string" });
|
||||||
|
cache_json_schema(schema_id, jsonb(schema.clone()));
|
||||||
|
|
||||||
|
let show_result1 = show_json_schemas();
|
||||||
|
assert!(show_result1.contains(&schema_id.to_string()));
|
||||||
|
|
||||||
|
clear_json_schemas();
|
||||||
|
|
||||||
|
let show_result2 = show_json_schemas();
|
||||||
|
assert!(show_result2.is_empty());
|
||||||
|
|
||||||
|
let instance = json!("test");
|
||||||
|
let validate_result = validate_json_schema(schema_id, jsonb(instance));
|
||||||
|
// Use the updated macro, expecting count 1 and specific message (handles object case)
|
||||||
|
assert_failure_with_json!(validate_result, 1, "Schema with id 'schema_to_clear' not found", "Validation should fail after clearing schemas.");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pg_test]
|
||||||
|
fn test_show_json_schemas() {
|
||||||
|
clear_json_schemas(); // Call clear directly
|
||||||
|
let schema_id1 = "schema1";
|
||||||
|
let schema_id2 = "schema2";
|
||||||
|
let schema = json!({ "type": "boolean" });
|
||||||
|
|
||||||
|
cache_json_schema(schema_id1, jsonb(schema.clone()));
|
||||||
|
cache_json_schema(schema_id2, jsonb(schema.clone()));
|
||||||
|
|
||||||
|
let mut result = show_json_schemas(); // Make result mutable
|
||||||
|
result.sort(); // Sort for deterministic testing
|
||||||
|
assert_eq!(result, vec!["schema1".to_string(), "schema2".to_string()]); // Check exact content
|
||||||
|
assert!(result.contains(&schema_id1.to_string())); // Keep specific checks too if desired
|
||||||
|
assert!(result.contains(&schema_id2.to_string()));
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user