Compare commits

...

12 Commits

9 changed files with 1881 additions and 1093 deletions

BIN
.DS_Store vendored Normal file

Binary file not shown.

49
Cargo.lock generated
View File

@ -362,6 +362,15 @@ version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "codepage"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48f68d061bc2828ae826206326e61251aca94c1e4a5305cf52d9138639c918b4"
dependencies = [
"encoding_rs",
]
[[package]]
name = "convert_case"
version = "0.8.0"
@ -418,6 +427,15 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "encoding_rs"
version = "0.8.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3"
dependencies = [
"cfg-if",
]
[[package]]
name = "enum-map"
version = "2.7.3"
@ -1106,9 +1124,9 @@ dependencies = [
[[package]]
name = "pgrx"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e1b41219b12cfcaa5d58f946a7ff1e7ddf0a4f7f930a7cdab612916e8a12c64"
checksum = "bab5bc1d60d3bc3c966d307a3c7313b1ebfb49a0ec183be3f1a057df0bcc9988"
dependencies = [
"atomic-traits",
"bitflags",
@ -1130,9 +1148,9 @@ dependencies = [
[[package]]
name = "pgrx-bindgen"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6afcef51e801bb18662716f1c524cedfb7943844593171734fe4d3a94c9afa12"
checksum = "9804b74c211a9edd550cd974718f8cc407dec50d8e9cafb906e0b042ba434af0"
dependencies = [
"bindgen",
"cc",
@ -1149,9 +1167,9 @@ dependencies = [
[[package]]
name = "pgrx-macros"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "729af3e6954d2f76230d700efd8606121f13f71f800e5c76173add2c02097948"
checksum = "f230769493bf567f137de23264d604d267dd72b8a77c596528e43cf423c6208e"
dependencies = [
"pgrx-sql-entity-graph",
"proc-macro2",
@ -1161,11 +1179,13 @@ dependencies = [
[[package]]
name = "pgrx-pg-config"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "116e33a329f3fac976b5f3150f14f2612735dfc56a15cb0a0800f25a3bd90aa7"
checksum = "49b64c071c2a46a19ab4521120a25b02b598f4abf6e9b4b1769a7922edeee3de"
dependencies = [
"cargo_toml",
"codepage",
"encoding_rs",
"eyre",
"home",
"owo-colors",
@ -1175,13 +1195,14 @@ dependencies = [
"thiserror 2.0.12",
"toml",
"url",
"winapi",
]
[[package]]
name = "pgrx-pg-sys"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd074044513f1f7fc63fd1ed0117ad0fbe690ef1b445f6d72b92e611b3846490"
checksum = "fcbfa98ec7a90252d13a78ac666541173dbb01a2fc1ba20131db6490c0711125"
dependencies = [
"cee-scape",
"libc",
@ -1194,9 +1215,9 @@ dependencies = [
[[package]]
name = "pgrx-sql-entity-graph"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0eb73c4b916d4abb422fff66c2606c46bf4b99136209306836e89766a8d49cd"
checksum = "e79bbf5a33cff6cfdc6dda3a976cd931c995eaa2c073a7c59b8f8fe8f6faa073"
dependencies = [
"convert_case",
"eyre",
@ -1210,9 +1231,9 @@ dependencies = [
[[package]]
name = "pgrx-tests"
version = "0.14.0"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aba4e9a97fd148c9f65cf0c56d33a4cde4deb9941f5c0d914a39148e8148a7a6"
checksum = "9791c709882f3af9545bcca71670fdd82768f67a428b416b6210eae3773dbd0d"
dependencies = [
"clap-cargo",
"eyre",

View File

@ -1,17 +1,17 @@
[package]
name = "jspg"
version = "0.1.0"
edition = "2021"
edition = "2024"
[dependencies]
pgrx = "0.14.0"
pgrx = "0.15.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
lazy_static = "1.5.0"
boon = "0.6.1"
[dev-dependencies]
pgrx-tests = "0.14.0"
pgrx-tests = "0.15.0"
[lib]
crate-type = ["cdylib", "lib"]

6
flow
View File

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Flows
source ./flows/base
@ -11,7 +11,7 @@ source ./flows/rust
POSTGRES_VERSION="17"
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
DEPENDENCIES+=(icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
CARGO_DEPENDENCIES=(cargo-pgrx==0.15.0)
GITEA_ORGANIZATION="cellular"
GITEA_REPOSITORY="jspg"
@ -129,4 +129,4 @@ jspg-flow() {
register-flow "jspg-usage" "jspg-flow"
dispatch "$@"
dispatch "$@"

88
src/helpers.rs Normal file
View File

@ -0,0 +1,88 @@
use serde_json::Value;
use pgrx::JsonB;
// Simple test helpers for cleaner test code
pub fn assert_success(result: &JsonB) {
let json = &result.0;
if !json.get("response").is_some() || json.get("errors").is_some() {
let pretty = serde_json::to_string_pretty(json).unwrap_or_else(|_| format!("{:?}", json));
panic!("Expected success but got:\n{}", pretty);
}
}
pub fn assert_failure(result: &JsonB) {
let json = &result.0;
if json.get("response").is_some() || !json.get("errors").is_some() {
let pretty = serde_json::to_string_pretty(json).unwrap_or_else(|_| format!("{:?}", json));
panic!("Expected failure but got:\n{}", pretty);
}
}
pub fn assert_error_count(result: &JsonB, expected_count: usize) {
assert_failure(result);
let errors = get_errors(result);
if errors.len() != expected_count {
let pretty = serde_json::to_string_pretty(&result.0).unwrap_or_else(|_| format!("{:?}", result.0));
panic!("Expected {} errors, got {}:\n{}", expected_count, errors.len(), pretty);
}
}
pub fn get_errors(result: &JsonB) -> &Vec<Value> {
result.0["errors"].as_array().expect("errors should be an array")
}
pub fn has_error_with_code(result: &JsonB, code: &str) -> bool {
get_errors(result).iter().any(|e| e["code"] == code)
}
pub fn has_error_with_code_and_path(result: &JsonB, code: &str, path: &str) -> bool {
get_errors(result).iter().any(|e| e["code"] == code && e["details"]["path"] == path)
}
pub fn assert_has_error(result: &JsonB, code: &str, path: &str) {
if !has_error_with_code_and_path(result, code, path) {
let pretty = serde_json::to_string_pretty(&result.0).unwrap_or_else(|_| format!("{:?}", result.0));
panic!("Expected error with code='{}' and path='{}' but not found:\n{}", code, path, pretty);
}
}
pub fn find_error_with_code<'a>(result: &'a JsonB, code: &str) -> &'a Value {
get_errors(result).iter().find(|e| e["code"] == code)
.unwrap_or_else(|| panic!("No error found with code '{}'", code))
}
pub fn find_error_with_code_and_path<'a>(result: &'a JsonB, code: &str, path: &str) -> &'a Value {
get_errors(result).iter().find(|e| e["code"] == code && e["details"]["path"] == path)
.unwrap_or_else(|| panic!("No error found with code '{}' and path '{}'", code, path))
}
pub fn assert_error_detail(error: &Value, detail_key: &str, expected_value: &str) {
let actual = error["details"][detail_key].as_str()
.unwrap_or_else(|| panic!("Error detail '{}' is not a string", detail_key));
assert_eq!(actual, expected_value, "Error detail '{}' mismatch", detail_key);
}
// Additional convenience helpers for common patterns
pub fn assert_error_message_contains(error: &Value, substring: &str) {
let message = error["message"].as_str().expect("error should have message");
assert!(message.contains(substring), "Expected message to contain '{}', got '{}'", substring, message);
}
pub fn assert_error_cause_json(error: &Value, expected_cause: &Value) {
let cause = &error["details"]["cause"];
assert!(cause.is_object(), "cause should be JSON object");
assert_eq!(cause, expected_cause, "cause mismatch");
}
pub fn assert_error_context(error: &Value, expected_context: &Value) {
assert_eq!(&error["details"]["context"], expected_context, "context mismatch");
}
pub fn jsonb(val: Value) -> JsonB {
JsonB(val)
}

View File

@ -9,9 +9,18 @@ use std::borrow::Cow;
use std::collections::hash_map::Entry;
use std::{collections::HashMap, sync::RwLock};
#[derive(Clone, Copy, Debug, PartialEq)]
enum SchemaType {
Enum,
Type,
PublicPunc,
PrivatePunc,
}
struct BoonCache {
schemas: Schemas,
id_to_index: HashMap<String, SchemaIndex>,
id_to_type: HashMap<String, SchemaType>,
}
// Structure to hold error information without lifetimes
@ -27,91 +36,278 @@ lazy_static! {
static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache {
schemas: Schemas::new(),
id_to_index: HashMap::new(),
id_to_type: HashMap::new(),
});
}
#[pg_extern(strict)]
fn cache_json_schema(schema_id: &str, schema: JsonB, strict: bool) -> JsonB {
fn cache_json_schemas(enums: JsonB, types: JsonB, puncs: JsonB) -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
let mut schema_value: Value = schema.0;
let schema_path = format!("urn:{}", schema_id);
let enums_value: Value = enums.0;
let types_value: Value = types.0;
let puncs_value: Value = puncs.0;
// Apply strict validation to all objects in the schema if requested
if strict {
apply_strict_validation(&mut schema_value);
}
// Clear existing cache
*cache = BoonCache {
schemas: Schemas::new(),
id_to_index: HashMap::new(),
id_to_type: HashMap::new(),
};
// Create the boon compiler and enable format assertions
let mut compiler = Compiler::new();
compiler.enable_format_assertions();
// Use schema_path when adding the resource
if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
return JsonB(json!({
"errors": [{
"code": "SCHEMA_RESOURCE_ADD_FAILED",
"message": format!("Failed to add schema resource '{}'", schema_id),
"details": {
"schema": schema_id,
"cause": format!("{}", e)
let mut errors = Vec::new();
// Track all schema IDs for compilation
let mut all_schema_ids = Vec::new();
// Phase 1: Add all enum schemas as resources (priority 1 - these are referenced by types and puncs)
// Enums are never strict - they're reusable building blocks
if let Some(enums_array) = enums_value.as_array() {
for enum_row in enums_array {
if let Some(enum_obj) = enum_row.as_object() {
if let (Some(enum_name), Some(schemas_raw)) = (
enum_obj.get("name").and_then(|v| v.as_str()),
enum_obj.get("schemas")
) {
// Parse the schemas JSONB field
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), SchemaType::Enum, &mut errors) {
errors.push(json!({
"code": "ENUM_SCHEMA_RESOURCE_FAILED",
"message": format!("Failed to add schema resource '{}' for enum '{}'", schema_id, enum_name),
"details": {
"enum_name": enum_name,
"schema_id": schema_id,
"cause": format!("{}", e)
}
}));
} else {
all_schema_ids.push(schema_id.to_string());
cache.id_to_type.insert(schema_id.to_string(), SchemaType::Enum);
}
}
}
}
}
}]
}
}
}
// Phase 2: Add all type schemas as resources (priority 2 - these are referenced by puncs)
// Types are always strict - they should not allow extra properties
if let Some(types_array) = types_value.as_array() {
for type_row in types_array {
if let Some(type_obj) = type_row.as_object() {
if let (Some(type_name), Some(schemas_raw)) = (
type_obj.get("name").and_then(|v| v.as_str()),
type_obj.get("schemas")
) {
// Parse the schemas JSONB field
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), SchemaType::Type, &mut errors) {
errors.push(json!({
"code": "TYPE_SCHEMA_RESOURCE_FAILED",
"message": format!("Failed to add schema resource '{}' for type '{}'", schema_id, type_name),
"details": {
"type_name": type_name,
"schema_id": schema_id,
"cause": format!("{}", e)
}
}));
} else {
all_schema_ids.push(schema_id.to_string());
cache.id_to_type.insert(schema_id.to_string(), SchemaType::Type);
}
}
}
}
}
}
}
}
// Phase 3: Add all punc schemas as resources (these may reference enum and type schemas)
// Each punc gets strict validation based on its public field
if let Some(puncs_array) = puncs_value.as_array() {
for punc_row in puncs_array {
if let Some(punc_obj) = punc_row.as_object() {
if let Some(punc_name) = punc_obj.get("name").and_then(|v| v.as_str()) {
// Determine schema type based on public status
let is_public = punc_obj.get("public")
.and_then(|v| v.as_bool())
.unwrap_or(false);
let punc_schema_type = if is_public { SchemaType::PublicPunc } else { SchemaType::PrivatePunc };
// Add punc schemas from the 'schemas' array
if let Some(schemas_raw) = punc_obj.get("schemas") {
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
let request_schema_id = format!("{}.request", punc_name);
let response_schema_id = format!("{}.response", punc_name);
let schema_type_for_def = if schema_id == request_schema_id || schema_id == response_schema_id {
punc_schema_type
} else {
SchemaType::Type // For local/nested schemas
};
if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), schema_type_for_def, &mut errors) {
errors.push(json!({
"code": "PUNC_SCHEMA_RESOURCE_FAILED",
"message": format!("Failed to add schema resource '{}' for punc '{}'", schema_id, punc_name),
"details": {
"punc_name": punc_name,
"schema_id": schema_id,
"cause": format!("{}", e)
}
}));
} else {
all_schema_ids.push(schema_id.to_string());
cache.id_to_type.insert(schema_id.to_string(), schema_type_for_def);
}
}
}
}
}
}
}
}
}
// Phase 4: Compile all schemas now that all resources are added
if !errors.is_empty() {
// If we had errors adding resources, don't attempt compilation
return JsonB(json!({ "errors": errors }));
}
if let Err(_) = compile_all_schemas(&mut compiler, &mut cache, &all_schema_ids, &mut errors) {
// Add a high-level wrapper error when schema compilation fails
errors.push(json!({
"code": "COMPILE_ALL_SCHEMAS_FAILED",
"message": "Failed to compile JSON schemas during cache operation",
"details": {
"cause": "Schema compilation failed - see detailed errors above"
}
}));
}
// Use schema_path when compiling
match compiler.compile(&schema_path, &mut cache.schemas) {
Ok(sch_index) => {
// Store the index using the original schema_id as the key
cache.id_to_index.insert(schema_id.to_string(), sch_index);
JsonB(json!({ "response": "success" }))
}
Err(e) => {
let errors = match &e {
CompileError::ValidationError { url: _url, src } => {
// Collect leaf errors from the meta-schema validation failure
let mut error_list = Vec::new();
collect_errors(src, &mut error_list);
// Filter and format errors properly - no instance for schema compilation
format_errors(error_list, &schema_value, schema_id)
}
_ => {
// Other compilation errors
vec![json!({
"code": "SCHEMA_COMPILATION_FAILED",
"message": format!("Schema '{}' compilation failed", schema_id),
"details": {
"schema": schema_id,
"cause": format!("{:?}", e)
}
})]
}
};
JsonB(json!({ "errors": errors }))
if errors.is_empty() {
JsonB(json!({ "response": "success" }))
} else {
JsonB(json!({ "errors": errors }))
}
}
// Helper function to add a schema resource (without compiling)
fn add_schema_resource(
compiler: &mut Compiler,
schema_id: &str,
mut schema_value: Value,
schema_type: SchemaType,
errors: &mut Vec<Value>
) -> Result<(), String> {
// Apply strict validation based on schema type
match schema_type {
SchemaType::Enum | SchemaType::PrivatePunc => {
// Enums and private puncs don't need strict validation
},
SchemaType::Type | SchemaType::PublicPunc => {
apply_strict_validation(&mut schema_value, schema_type);
}
}
// Use schema_id directly - simple IDs like "entity", "user", "punc.request"
if let Err(e) = compiler.add_resource(schema_id, schema_value.clone()) {
errors.push(json!({
"code": "SCHEMA_RESOURCE_FAILED",
"message": format!("Failed to add schema resource '{}'", schema_id),
"details": {
"schema": schema_id,
"cause": format!("{}", e)
}
}));
return Err(format!("Failed to add schema resource: {}", e));
}
Ok(())
}
// Helper function to compile all added resources
fn compile_all_schemas(
compiler: &mut Compiler,
cache: &mut BoonCache,
schema_ids: &[String],
errors: &mut Vec<Value>
) -> Result<(), String> {
for schema_id in schema_ids {
match compiler.compile(schema_id, &mut cache.schemas) {
Ok(sch_index) => {
// Store the index using the original schema_id as the key
cache.id_to_index.insert(schema_id.to_string(), sch_index);
}
Err(e) => {
match &e {
CompileError::ValidationError { url: _url, src } => {
// Collect leaf errors from the meta-schema validation failure
let mut error_list = Vec::new();
collect_errors(src, &mut error_list);
// Get schema value for error formatting - we'll need to reconstruct or store it
let schema_value = json!({}); // Placeholder - we don't have the original value here
let formatted_errors = format_errors(error_list, &schema_value, schema_id);
errors.extend(formatted_errors);
}
_ => {
// Other compilation errors
errors.push(json!({
"code": "SCHEMA_COMPILATION_FAILED",
"message": format!("Schema '{}' compilation failed", schema_id),
"details": {
"schema": schema_id,
"cause": format!("{:?}", e)
}
}));
}
};
return Err(format!("Schema compilation failed: {:?}", e));
}
}
}
Ok(())
}
// Helper function to apply strict validation to a schema
//
// This recursively adds unevaluatedProperties: false to object-type schemas,
// but SKIPS schemas inside if/then/else to avoid breaking conditional validation.
fn apply_strict_validation(schema: &mut Value) {
apply_strict_validation_recursive(schema, false);
// For type schemas, it skips the top level to allow inheritance.
fn apply_strict_validation(schema: &mut Value, schema_type: SchemaType) {
apply_strict_validation_recursive(schema, false, schema_type, true);
}
fn apply_strict_validation_recursive(schema: &mut Value, inside_conditional: bool) {
fn apply_strict_validation_recursive(schema: &mut Value, inside_conditional: bool, schema_type: SchemaType, is_top_level: bool) {
match schema {
Value::Object(map) => {
// Skip adding strict validation if we're inside a conditional
if !inside_conditional {
// Add strict validation to object schemas only at top level
if let Some(Value::String(t)) = map.get("type") {
if t == "object" && !map.contains_key("unevaluatedProperties") && !map.contains_key("additionalProperties") {
// At top level, use unevaluatedProperties: false
// This considers all evaluated properties from all schemas
map.insert("unevaluatedProperties".to_string(), Value::Bool(false));
}
// OR if we're at the top level of a type schema (types should be extensible)
let skip_strict = inside_conditional || (matches!(schema_type, SchemaType::Type) && is_top_level);
if !skip_strict {
// Apply unevaluatedProperties: false to schemas that have $ref OR type: "object"
let has_ref = map.contains_key("$ref");
let has_object_type = map.get("type").and_then(|v| v.as_str()) == Some("object");
if (has_ref || has_object_type) && !map.contains_key("unevaluatedProperties") && !map.contains_key("additionalProperties") {
// Use unevaluatedProperties: false to prevent extra properties
// This considers all evaluated properties from all schemas including refs
map.insert("unevaluatedProperties".to_string(), Value::Bool(false));
}
}
@ -119,23 +315,64 @@ fn apply_strict_validation_recursive(schema: &mut Value, inside_conditional: boo
for (key, value) in map.iter_mut() {
// Mark when we're inside conditional branches
let in_conditional = inside_conditional || matches!(key.as_str(), "if" | "then" | "else");
apply_strict_validation_recursive(value, in_conditional);
apply_strict_validation_recursive(value, in_conditional, schema_type, false)
}
}
Value::Array(arr) => {
// Recurse into array items
for item in arr.iter_mut() {
apply_strict_validation_recursive(item, inside_conditional);
apply_strict_validation_recursive(item, inside_conditional, schema_type, false);
}
}
_ => {}
}
}
fn validate_type_against_schema_id(instance: &Value, schema_id: &str) -> JsonB {
let expected_type = schema_id.split('.').next().unwrap_or(schema_id);
if let Some(actual_type) = instance.get("type").and_then(|v| v.as_str()) {
if actual_type == expected_type {
return JsonB(json!({ "response": "success" }));
}
}
// If we reach here, validation failed. Now we build the specific error.
let (message, cause, context) =
if let Some(actual_type) = instance.get("type").and_then(|v| v.as_str()) {
// This handles the case where the type is a string but doesn't match.
(
format!("Instance type '{}' does not match expected type '{}' derived from schema ID", actual_type, expected_type),
json!({ "expected": expected_type, "actual": actual_type }),
json!(actual_type)
)
} else {
// This handles the case where 'type' is missing or not a string.
(
"Instance 'type' property is missing or not a string".to_string(),
json!("The 'type' property must be a string and is required for this validation."),
instance.get("type").unwrap_or(&Value::Null).clone()
)
};
JsonB(json!({
"errors": [{
"code": "TYPE_MISMATCH",
"message": message,
"details": {
"path": "/type",
"context": context,
"cause": cause,
"schema": schema_id
}
}]
}))
}
#[pg_extern(strict, parallel_safe)]
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
// Lookup uses the original schema_id
// Lookup uses the original schema_id - schemas should always be available after bulk caching
match cache.id_to_index.get(schema_id) {
None => JsonB(json!({
"errors": [{
@ -143,21 +380,34 @@ fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
"message": format!("Schema '{}' not found in cache", schema_id),
"details": {
"schema": schema_id,
"cause": "Schema must be cached before validation"
"cause": "Schema was not found in bulk cache - ensure cache_json_schemas was called"
}
}]
})),
Some(sch_index) => {
let instance_value: Value = instance.0;
match cache.schemas.validate(&instance_value, *sch_index) {
Ok(_) => JsonB(json!({ "response": "success" })),
Ok(_) => {
// After standard validation, perform custom type check if it's a Type schema
if let Some(&schema_type) = cache.id_to_type.get(schema_id) {
if schema_type == SchemaType::Type {
return validate_type_against_schema_id(&instance_value, schema_id);
}
}
// For non-Type schemas, or if type not found (shouldn't happen), success.
JsonB(json!({ "response": "success" }))
}
Err(validation_error) => {
let mut error_list = Vec::new();
collect_errors(&validation_error, &mut error_list);
let errors = format_errors(error_list, &instance_value, schema_id);
// Filter out FALSE_SCHEMA errors if there are other validation errors
let filtered_errors = filter_false_schema_errors(errors);
JsonB(json!({ "errors": filtered_errors }))
if filtered_errors.is_empty() {
JsonB(json!({ "response": "success" }))
} else {
JsonB(json!({ "errors": filtered_errors }))
}
}
}
}
@ -486,6 +736,13 @@ fn handle_additional_items_error(base_path: &str, got: usize) -> Vec<Error> {
}
fn handle_format_error(base_path: &str, want: &str, got: &Cow<Value>, err: &Box<dyn std::error::Error>) -> Vec<Error> {
// If the value is an empty string, skip format validation.
if let Value::String(s) = got.as_ref() {
if s.is_empty() {
return vec![];
}
}
vec![Error {
path: base_path.to_string(),
code: "FORMAT_INVALID".to_string(),
@ -760,6 +1017,7 @@ fn clear_json_schemas() -> JsonB {
*cache = BoonCache {
schemas: Schemas::new(),
id_to_index: HashMap::new(),
id_to_type: HashMap::new(),
};
JsonB(json!({ "response": "success" }))
}
@ -786,6 +1044,16 @@ pub mod pg_test {
}
}
#[cfg(any(test, feature = "pg_test"))]
mod helpers {
include!("helpers.rs");
}
#[cfg(any(test, feature = "pg_test"))]
mod schemas {
include!("schemas.rs");
}
#[cfg(any(test, feature = "pg_test"))]
#[pg_schema]
mod tests {

805
src/schemas.rs Normal file
View File

@ -0,0 +1,805 @@
use crate::*;
use serde_json::{json, Value};
use pgrx::JsonB;
// Helper to convert Value to JsonB
fn jsonb(val: Value) -> JsonB {
JsonB(val)
}
pub fn simple_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "simple",
"public": false,
"schemas": [{
"$id": "simple.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn invalid_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "invalid_punc",
"public": false,
"schemas": [{
"$id": "invalid_punc.request",
"type": ["invalid_type_value"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn errors_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "detailed_errors_test",
"public": false,
"schemas": [{
"$id": "detailed_errors_test.request",
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string", "maxLength": 10 }
},
"required": ["street", "city"]
}
},
"required": ["address"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn oneof_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "oneof_test",
"public": false,
"schemas": [{
"$id": "oneof_test.request",
"oneOf": [
{
"type": "object",
"properties": {
"string_prop": { "type": "string", "maxLength": 5 }
},
"required": ["string_prop"]
},
{
"type": "object",
"properties": {
"number_prop": { "type": "number", "minimum": 10 }
},
"required": ["number_prop"]
}
]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn root_types_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([
{
"name": "object_test",
"public": false,
"schemas": [{
"$id": "object_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
}]
},
{
"name": "array_test",
"public": false,
"schemas": [{
"$id": "array_test.request",
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string", "format": "uuid" }
}
}
}]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn strict_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([
{
"name": "basic_strict_test",
"public": true,
"schemas": [{
"$id": "basic_strict_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
}
}]
},
{
"name": "non_strict_test",
"public": false,
"schemas": [{
"$id": "non_strict_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
}
}]
},
{
"name": "nested_strict_test",
"public": true,
"schemas": [{
"$id": "nested_strict_test.request",
"type": "object",
"properties": {
"user": {
"type": "object",
"properties": {
"name": { "type": "string" }
}
},
"items": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string" }
}
}
}
}
}]
},
{
"name": "already_unevaluated_test",
"public": true,
"schemas": [{
"$id": "already_unevaluated_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
},
"unevaluatedProperties": true
}]
},
{
"name": "already_additional_test",
"public": true,
"schemas": [{
"$id": "already_additional_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
},
"additionalProperties": false
}]
},
{
"name": "conditional_strict_test",
"public": true,
"schemas": [{
"$id": "conditional_strict_test.request",
"type": "object",
"properties": {
"creating": { "type": "boolean" }
},
"if": {
"properties": {
"creating": { "const": true }
}
},
"then": {
"properties": {
"name": { "type": "string" }
},
"required": ["name"]
}
}]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn required_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "basic_validation_test",
"public": false,
"schemas": [{
"$id": "basic_validation_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn dependencies_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "dependency_split_test",
"public": false,
"schemas": [{
"$id": "dependency_split_test.request",
"type": "object",
"properties": {
"creating": { "type": "boolean" },
"name": { "type": "string" },
"kind": { "type": "string" },
"description": { "type": "string" }
},
"dependencies": {
"creating": ["name", "kind"]
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn nested_req_deps_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "nested_dep_test",
"public": false,
"schemas": [{
"$id": "nested_dep_test.request",
"type": "object",
"properties": {
"items": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string" },
"creating": { "type": "boolean" },
"name": { "type": "string" },
"kind": { "type": "string" }
},
"required": ["id"],
"dependencies": {
"creating": ["name", "kind"]
}
}
}
},
"required": ["items"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn additional_properties_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([
{
"name": "additional_props_test",
"public": false,
"schemas": [{
"$id": "additional_props_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "number" }
},
"additionalProperties": false
}]
},
{
"name": "nested_additional_props_test",
"public": false,
"schemas": [{
"$id": "nested_additional_props_test.request",
"type": "object",
"properties": {
"user": {
"type": "object",
"properties": {
"name": { "type": "string" }
},
"additionalProperties": false
}
}
}]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn unevaluated_properties_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([
{
"name": "simple_unevaluated_test",
"public": false,
"schemas": [{
"$id": "simple_unevaluated_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "number" }
},
"patternProperties": {
"^attr_": { "type": "string" }
},
"unevaluatedProperties": false
}]
},
{
"name": "conditional_unevaluated_test",
"public": false,
"schemas": [{
"$id": "conditional_unevaluated_test.request",
"type": "object",
"allOf": [
{
"properties": {
"firstName": { "type": "string" }
}
},
{
"properties": {
"lastName": { "type": "string" }
}
}
],
"properties": {
"age": { "type": "number" }
},
"unevaluatedProperties": false
}]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn format_schemas() -> JsonB {
let enums = json!([]);
let types = json!([]);
let puncs = json!([{
"name": "format_test",
"public": false,
"schemas": [{
"$id": "format_test.request",
"type": "object",
"properties": {
"uuid": { "type": "string", "format": "uuid" },
"date_time": { "type": "string", "format": "date-time" },
"email": { "type": "string", "format": "email" }
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn property_merging_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "entity",
"schemas": [{
"$id": "entity",
"type": "object",
"properties": {
"id": { "type": "string" },
"name": { "type": "string" }
},
"required": ["id"]
}]
},
{
"name": "user",
"schemas": [{
"$id": "user",
"$ref": "entity",
"properties": {
"password": { "type": "string", "minLength": 8 }
},
"required": ["password"]
}]
},
{
"name": "person",
"schemas": [{
"$id": "person",
"$ref": "user",
"properties": {
"first_name": { "type": "string", "minLength": 1 },
"last_name": { "type": "string", "minLength": 1 }
},
"required": ["first_name", "last_name"]
}]
}
]);
let puncs = json!([]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn required_merging_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "entity",
"schemas": [{
"$id": "entity",
"type": "object",
"properties": {
"id": { "type": "string", "format": "uuid" },
"type": { "type": "string" },
"created_by": { "type": "string", "format": "uuid" }
},
"required": ["id", "type", "created_by"]
}]
},
{
"name": "user",
"schemas": [{
"$id": "user",
"$ref": "entity",
"properties": {
"password": { "type": "string", "minLength": 8 }
},
"if": {
"properties": { "type": { "const": "user" } }
},
"then": {
"required": ["password"]
}
}]
},
{
"name": "person",
"schemas": [{
"$id": "person",
"$ref": "user",
"properties": {
"first_name": { "type": "string", "minLength": 1 },
"last_name": { "type": "string", "minLength": 1 }
},
"if": {
"properties": { "type": { "const": "person" } }
},
"then": {
"required": ["first_name", "last_name"]
}
}]
}
]);
let puncs = json!([]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn dependencies_merging_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "entity",
"schemas": [{
"$id": "entity",
"type": "object",
"properties": {
"id": { "type": "string", "format": "uuid" },
"type": { "type": "string" },
"created_by": { "type": "string", "format": "uuid" },
"creating": { "type": "boolean" },
"name": { "type": "string" }
},
"required": ["id", "type", "created_by"],
"dependencies": {
"creating": ["name"]
}
}]
},
{
"name": "user",
"schemas": [{
"$id": "user",
"$ref": "entity",
"properties": {
"password": { "type": "string", "minLength": 8 }
},
"dependencies": {
"creating": ["name"]
}
}]
},
{
"name": "person",
"schemas": [{
"$id": "person",
"$ref": "user",
"properties": {
"first_name": { "type": "string", "minLength": 1 },
"last_name": { "type": "string", "minLength": 1 }
},
"dependencies": {
"creating": ["first_name", "last_name"]
}
}]
}
]);
let puncs = json!([]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn punc_with_refs_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "entity",
"schemas": [{
"$id": "entity",
"type": "object",
"properties": {
"id": { "type": "string" },
"name": { "type": "string" }
},
"required": ["id"]
}]
},
{
"name": "person",
"schemas": [{
"$id": "person",
"$ref": "entity",
"properties": {
"first_name": { "type": "string", "minLength": 1 },
"last_name": { "type": "string", "minLength": 1 },
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string" }
},
"required": ["street", "city"]
}
}
}]
}
]);
let puncs = json!([
{
"name": "public_ref_test",
"public": true,
"schemas": [{
"$id": "public_ref_test.request",
"$ref": "person"
}]
},
{
"name": "private_ref_test",
"public": false,
"schemas": [{
"$id": "private_ref_test.request",
"$ref": "person"
}]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn enum_schemas() -> JsonB {
let enums = json!([
{
"name": "task_priority",
"values": ["low", "medium", "high", "urgent"],
"schemas": [{
"$id": "task_priority",
"type": "string",
"enum": ["low", "medium", "high", "urgent"]
}]
}
]);
let types = json!([]);
let puncs = json!([{
"name": "enum_ref_test",
"public": false,
"schemas": [{
"$id": "enum_ref_test.request",
"type": "object",
"properties": {
"priority": { "$ref": "task_priority" }
},
"required": ["priority"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn punc_local_refs_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "global_thing",
"schemas": [{
"$id": "global_thing",
"type": "object",
"properties": {
"id": { "type": "string", "format": "uuid" }
},
"required": ["id"]
}]
}
]);
let puncs = json!([
{
"name": "punc_with_local_ref_test",
"public": false,
"schemas": [
{
"$id": "local_address",
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string" }
},
"required": ["street", "city"]
},
{
"$id": "punc_with_local_ref_test.request",
"$ref": "local_address"
}
]
},
{
"name": "punc_with_local_ref_to_global_test",
"public": false,
"schemas": [
{
"$id": "local_user_with_thing",
"type": "object",
"properties": {
"user_name": { "type": "string" },
"thing": { "$ref": "global_thing" }
},
"required": ["user_name", "thing"]
},
{
"$id": "punc_with_local_ref_to_global_test.request",
"$ref": "local_user_with_thing"
}
]
}
]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn title_override_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "base_with_title",
"schemas": [{
"$id": "base_with_title",
"type": "object",
"title": "Base Title",
"properties": {
"name": { "type": "string" }
},
"required": ["name"]
}]
},
{
"name": "override_with_title",
"schemas": [{
"$id": "override_with_title",
"$ref": "base_with_title",
"title": "Override Title"
}]
}
]);
let puncs = json!([]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn type_matching_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "entity",
"schemas": [{
"$id": "entity",
"type": "object",
"properties": { "type": { "type": "string" }, "name": { "type": "string" } },
"required": ["type", "name"]
}]
},
{
"name": "job",
"schemas": [{
"$id": "job",
"$ref": "entity",
"properties": { "job_id": { "type": "string" } },
"required": ["job_id"]
}]
},
{
"name": "super_job",
"schemas": [
{
"$id": "super_job",
"$ref": "job",
"properties": { "manager_id": { "type": "string" } },
"required": ["manager_id"]
},
{
"$id": "super_job.short",
"$ref": "super_job",
"properties": { "name": { "maxLength": 10 } }
}
]
}
]);
let puncs = json!([]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}

File diff suppressed because it is too large Load Diff

View File

@ -1 +1 @@
1.0.29
1.0.35