Compare commits

..

7 Commits

4 changed files with 568 additions and 365 deletions

View File

@ -9,7 +9,7 @@ use std::borrow::Cow;
use std::collections::hash_map::Entry;
use std::{collections::HashMap, sync::RwLock};
#[derive(Clone, Copy, Debug)]
#[derive(Clone, Copy, Debug, PartialEq)]
enum SchemaType {
Enum,
Type,
@ -17,9 +17,15 @@ enum SchemaType {
PrivatePunc,
}
struct BoonCache {
struct Schema {
index: SchemaIndex,
t: SchemaType,
value: Value,
}
struct Cache {
schemas: Schemas,
id_to_index: HashMap<String, SchemaIndex>,
map: HashMap<String, Schema>,
}
// Structure to hold error information without lifetimes
@ -32,203 +38,105 @@ struct Error {
}
lazy_static! {
static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache {
static ref SCHEMA_CACHE: RwLock<Cache> = RwLock::new(Cache {
schemas: Schemas::new(),
id_to_index: HashMap::new(),
map: HashMap::new(),
});
}
#[pg_extern(strict)]
fn cache_json_schemas(enums: JsonB, types: JsonB, puncs: JsonB) -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
let enums_value: Value = enums.0;
let types_value: Value = types.0;
let puncs_value: Value = puncs.0;
let mut cache = SCHEMA_CACHE.write().unwrap();
let enums_value: Value = enums.0;
let types_value: Value = types.0;
let puncs_value: Value = puncs.0;
// Clear existing cache
*cache = BoonCache {
schemas: Schemas::new(),
id_to_index: HashMap::new(),
};
*cache = Cache {
schemas: Schemas::new(),
map: HashMap::new(),
};
// Create the boon compiler and enable format assertions
let mut compiler = Compiler::new();
compiler.enable_format_assertions();
let mut compiler = Compiler::new();
compiler.enable_format_assertions();
let mut errors = Vec::new();
let mut errors = Vec::new();
let mut schemas_to_compile = Vec::new();
// Track all schema IDs for compilation
let mut all_schema_ids = Vec::new();
// Phase 1: Add all enum schemas as resources (priority 1 - these are referenced by types and puncs)
// Enums are never strict - they're reusable building blocks
if let Some(enums_array) = enums_value.as_array() {
for enum_row in enums_array {
if let Some(enum_obj) = enum_row.as_object() {
if let (Some(enum_name), Some(schemas_raw)) = (
enum_obj.get("name").and_then(|v| v.as_str()),
enum_obj.get("schemas")
) {
// Parse the schemas JSONB field
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), SchemaType::Enum, &mut errors) {
errors.push(json!({
"code": "ENUM_SCHEMA_RESOURCE_FAILED",
"message": format!("Failed to add schema resource '{}' for enum '{}'", schema_id, enum_name),
"details": {
"enum_name": enum_name,
"schema_id": schema_id,
"cause": format!("{}", e)
// Phase 1: Enums
if let Some(enums_array) = enums_value.as_array() {
for enum_row in enums_array {
if let Some(schemas_raw) = enum_row.get("schemas") {
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
schemas_to_compile.push((schema_id.to_string(), schema_def.clone(), SchemaType::Enum));
}
}
}));
} else {
all_schema_ids.push(schema_id.to_string());
}
}
}
}
}
}
}
}
// Phase 2: Add all type schemas as resources (priority 2 - these are referenced by puncs)
// Types are always strict - they should not allow extra properties
if let Some(types_array) = types_value.as_array() {
for type_row in types_array {
if let Some(type_obj) = type_row.as_object() {
if let (Some(type_name), Some(schemas_raw)) = (
type_obj.get("name").and_then(|v| v.as_str()),
type_obj.get("schemas")
) {
// Parse the schemas JSONB field
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), SchemaType::Type, &mut errors) {
errors.push(json!({
"code": "TYPE_SCHEMA_RESOURCE_FAILED",
"message": format!("Failed to add schema resource '{}' for type '{}'", schema_id, type_name),
"details": {
"type_name": type_name,
"schema_id": schema_id,
"cause": format!("{}", e)
// Phase 2: Types
if let Some(types_array) = types_value.as_array() {
for type_row in types_array {
if let Some(schemas_raw) = type_row.get("schemas") {
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
schemas_to_compile.push((schema_id.to_string(), schema_def.clone(), SchemaType::Type));
}
}
}));
} else {
all_schema_ids.push(schema_id.to_string());
}
}
}
}
}
}
}
}
// Phase 3: Add all punc schemas as resources (these may reference enum and type schemas)
// Each punc gets strict validation based on its public field
if let Some(puncs_array) = puncs_value.as_array() {
for punc_row in puncs_array {
if let Some(punc_obj) = punc_row.as_object() {
if let Some(punc_name) = punc_obj.get("name").and_then(|v| v.as_str()) {
// Determine schema type based on public status
let is_public = punc_obj.get("public")
.and_then(|v| v.as_bool())
.unwrap_or(false);
let punc_schema_type = if is_public { SchemaType::PublicPunc } else { SchemaType::PrivatePunc };
// Add punc local schemas as resources (from schemas field) - use $id directly (universal)
if let Some(schemas_raw) = punc_obj.get("schemas") {
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), SchemaType::Type, &mut errors) {
errors.push(json!({
"code": "PUNC_LOCAL_SCHEMA_RESOURCE_FAILED",
"message": format!("Failed to add local schema resource '{}' for punc '{}'", schema_id, punc_name),
"details": {
"punc_name": punc_name,
"schema_id": schema_id,
"cause": format!("{}", e)
}
}));
} else {
all_schema_ids.push(schema_id.to_string());
}
// Phase 3: Puncs
if let Some(puncs_array) = puncs_value.as_array() {
for punc_row in puncs_array {
if let Some(punc_obj) = punc_row.as_object() {
if let Some(punc_name) = punc_obj.get("name").and_then(|v| v.as_str()) {
let is_public = punc_obj.get("public").and_then(|v| v.as_bool()).unwrap_or(false);
let punc_schema_type = if is_public { SchemaType::PublicPunc } else { SchemaType::PrivatePunc };
if let Some(schemas_raw) = punc_obj.get("schemas") {
if let Some(schemas_array) = schemas_raw.as_array() {
for schema_def in schemas_array {
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
let request_schema_id = format!("{}.request", punc_name);
let response_schema_id = format!("{}.response", punc_name);
let schema_type_for_def = if schema_id == request_schema_id || schema_id == response_schema_id {
punc_schema_type
} else {
SchemaType::Type
};
schemas_to_compile.push((schema_id.to_string(), schema_def.clone(), schema_type_for_def));
}
}
}
}
}
}
}
}
// Add request schema as resource if present - use {punc_name}.request
if let Some(request_schema) = punc_obj.get("request") {
if !request_schema.is_null() {
let request_schema_id = format!("{}.request", punc_name);
if let Err(e) = add_schema_resource(&mut compiler, &request_schema_id, request_schema.clone(), punc_schema_type, &mut errors) {
errors.push(json!({
"code": "PUNC_REQUEST_SCHEMA_RESOURCE_FAILED",
"message": format!("Failed to add request schema resource for punc '{}'", punc_name),
"details": {
"punc_name": punc_name,
"schema_id": request_schema_id,
"cause": format!("{}", e)
}
}));
} else {
all_schema_ids.push(request_schema_id);
}
}
}
// Add response schema as resource if present - use {punc_name}.response
if let Some(response_schema) = punc_obj.get("response") {
if !response_schema.is_null() {
let response_schema_id = format!("{}.response", punc_name);
if let Err(e) = add_schema_resource(&mut compiler, &response_schema_id, response_schema.clone(), punc_schema_type, &mut errors) {
errors.push(json!({
"code": "PUNC_RESPONSE_SCHEMA_RESOURCE_FAILED",
"message": format!("Failed to add response schema resource for punc '{}'", punc_name),
"details": {
"punc_name": punc_name,
"schema_id": response_schema_id,
"cause": format!("{}", e)
}
}));
} else {
all_schema_ids.push(response_schema_id);
}
}
}
}
}
}
}
// Phase 4: Compile all schemas now that all resources are added
if !errors.is_empty() {
// If we had errors adding resources, don't attempt compilation
return JsonB(json!({ "errors": errors }));
}
// Add all resources to compiler first
for (id, value, schema_type) in &schemas_to_compile {
add_schema_resource(&mut compiler, id, value.clone(), *schema_type, &mut errors);
}
if let Err(_) = compile_all_schemas(&mut compiler, &mut cache, &all_schema_ids, &mut errors) {
// Add a high-level wrapper error when schema compilation fails
errors.push(json!({
"code": "COMPILE_ALL_SCHEMAS_FAILED",
"message": "Failed to compile JSON schemas during cache operation",
"details": {
"cause": "Schema compilation failed - see detailed errors above"
}
}));
}
if !errors.is_empty() {
return JsonB(json!({ "errors": errors }));
}
if errors.is_empty() {
JsonB(json!({ "response": "success" }))
} else {
JsonB(json!({ "errors": errors }))
}
// Compile all schemas
compile_all_schemas(&mut compiler, &mut cache, &schemas_to_compile, &mut errors);
if errors.is_empty() {
JsonB(json!({ "response": "success" }))
} else {
JsonB(json!({ "errors": errors }))
}
}
// Helper function to add a schema resource (without compiling)
@ -238,74 +146,51 @@ fn add_schema_resource(
mut schema_value: Value,
schema_type: SchemaType,
errors: &mut Vec<Value>
) -> Result<(), String> {
// Apply strict validation based on schema type
) {
match schema_type {
SchemaType::Enum | SchemaType::PrivatePunc => {
// Enums and private puncs don't need strict validation
},
SchemaType::Type | SchemaType::PublicPunc => {
apply_strict_validation(&mut schema_value, schema_type);
}
SchemaType::Enum | SchemaType::PrivatePunc => {},
SchemaType::Type | SchemaType::PublicPunc => apply_strict_validation(&mut schema_value, schema_type),
}
// Use schema_id directly - simple IDs like "entity", "user", "punc.request"
if let Err(e) = compiler.add_resource(schema_id, schema_value.clone()) {
if let Err(e) = compiler.add_resource(schema_id, schema_value) {
errors.push(json!({
"code": "SCHEMA_RESOURCE_FAILED",
"message": format!("Failed to add schema resource '{}'", schema_id),
"details": {
"schema": schema_id,
"cause": format!("{}", e)
}
"details": { "schema": schema_id, "cause": format!("{}", e) }
}));
return Err(format!("Failed to add schema resource: {}", e));
}
Ok(())
}
// Helper function to compile all added resources
fn compile_all_schemas(
compiler: &mut Compiler,
cache: &mut BoonCache,
schema_ids: &[String],
errors: &mut Vec<Value>
) -> Result<(), String> {
for schema_id in schema_ids {
match compiler.compile(schema_id, &mut cache.schemas) {
Ok(sch_index) => {
// Store the index using the original schema_id as the key
cache.id_to_index.insert(schema_id.to_string(), sch_index);
}
Err(e) => {
match &e {
CompileError::ValidationError { url: _url, src } => {
// Collect leaf errors from the meta-schema validation failure
let mut error_list = Vec::new();
collect_errors(src, &mut error_list);
// Get schema value for error formatting - we'll need to reconstruct or store it
let schema_value = json!({}); // Placeholder - we don't have the original value here
let formatted_errors = format_errors(error_list, &schema_value, schema_id);
errors.extend(formatted_errors);
}
_ => {
// Other compilation errors
errors.push(json!({
"code": "SCHEMA_COMPILATION_FAILED",
"message": format!("Schema '{}' compilation failed", schema_id),
"details": {
"schema": schema_id,
"cause": format!("{:?}", e)
}
}));
}
};
return Err(format!("Schema compilation failed: {:?}", e));
}
compiler: &mut Compiler,
cache: &mut Cache,
schemas_to_compile: &[(String, Value, SchemaType)],
errors: &mut Vec<Value>,
) {
for (id, value, schema_type) in schemas_to_compile {
match compiler.compile(id, &mut cache.schemas) {
Ok(index) => {
cache.map.insert(id.clone(), Schema { index, t: *schema_type, value: value.clone() });
}
Err(e) => {
match &e {
CompileError::ValidationError { src, .. } => {
let mut error_list = Vec::new();
collect_errors(src, &mut error_list);
let formatted_errors = format_errors(error_list, value, id);
errors.extend(formatted_errors);
}
_ => {
errors.push(json!({
"code": "SCHEMA_COMPILATION_FAILED",
"message": format!("Schema '{}' compilation failed", id),
"details": { "schema": id, "cause": format!("{:?}", e) }
}));
}
};
}
}
}
}
Ok(())
}
// Helper function to apply strict validation to a schema
@ -353,40 +238,149 @@ fn apply_strict_validation_recursive(schema: &mut Value, inside_conditional: boo
}
}
fn walk_and_validate_refs(
instance: &Value,
schema: &Value,
cache: &std::sync::RwLockReadGuard<Cache>,
path_parts: &mut Vec<String>,
type_validated: bool,
top_level_id: Option<&str>,
errors: &mut Vec<Value>,
) {
if let Some(ref_url) = schema.get("$ref").and_then(|v| v.as_str()) {
if let Some(s) = cache.map.get(ref_url) {
let mut new_type_validated = type_validated;
if !type_validated && s.t == SchemaType::Type {
let id_to_use = top_level_id.unwrap_or(ref_url);
let expected_type = id_to_use.split('.').next().unwrap_or(id_to_use);
if let Some(actual_type) = instance.get("type").and_then(|v| v.as_str()) {
if actual_type == expected_type {
new_type_validated = true;
} else {
path_parts.push("type".to_string());
let path = format!("/{}", path_parts.join("/"));
path_parts.pop();
errors.push(json!({
"code": "TYPE_MISMATCH",
"message": format!("Instance type '{}' does not match expected type '{}' derived from schema $ref", actual_type, expected_type),
"details": { "path": path, "context": instance, "cause": { "expected": expected_type, "actual": actual_type }, "schema": ref_url }
}));
}
} else {
if top_level_id.is_some() {
let path = if path_parts.is_empty() { "".to_string() } else { format!("/{}", path_parts.join("/")) };
errors.push(json!({
"code": "TYPE_MISMATCH",
"message": "Instance is missing 'type' property required for schema validation",
"details": { "path": path, "context": instance, "cause": { "expected": expected_type }, "schema": ref_url }
}));
}
}
}
walk_and_validate_refs(instance, &s.value, cache, path_parts, new_type_validated, None, errors);
}
}
if let Some(properties) = schema.get("properties").and_then(|v| v.as_object()) {
for (prop_name, prop_schema) in properties {
if let Some(prop_value) = instance.get(prop_name) {
path_parts.push(prop_name.clone());
walk_and_validate_refs(prop_value, prop_schema, cache, path_parts, type_validated, None, errors);
path_parts.pop();
}
}
}
if let Some(items_schema) = schema.get("items") {
if let Some(instance_array) = instance.as_array() {
for (i, item) in instance_array.iter().enumerate() {
path_parts.push(i.to_string());
walk_and_validate_refs(item, items_schema, cache, path_parts, false, None, errors);
path_parts.pop();
}
}
}
if let Some(all_of_array) = schema.get("allOf").and_then(|v| v.as_array()) {
for sub_schema in all_of_array {
walk_and_validate_refs(instance, sub_schema, cache, path_parts, type_validated, None, errors);
}
}
if let Some(any_of_array) = schema.get("anyOf").and_then(|v| v.as_array()) {
for sub_schema in any_of_array {
walk_and_validate_refs(instance, sub_schema, cache, path_parts, type_validated, None, errors);
}
}
if let Some(one_of_array) = schema.get("oneOf").and_then(|v| v.as_array()) {
for sub_schema in one_of_array {
walk_and_validate_refs(instance, sub_schema, cache, path_parts, type_validated, None, errors);
}
}
if let Some(if_schema) = schema.get("if") {
walk_and_validate_refs(instance, if_schema, cache, path_parts, type_validated, None, errors);
}
if let Some(then_schema) = schema.get("then") {
walk_and_validate_refs(instance, then_schema, cache, path_parts, type_validated, None, errors);
}
if let Some(else_schema) = schema.get("else") {
walk_and_validate_refs(instance, else_schema, cache, path_parts, type_validated, None, errors);
}
if let Some(not_schema) = schema.get("not") {
walk_and_validate_refs(instance, not_schema, cache, path_parts, type_validated, None, errors);
}
}
#[pg_extern(strict, parallel_safe)]
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
// Lookup uses the original schema_id - schemas should always be available after bulk caching
match cache.id_to_index.get(schema_id) {
None => JsonB(json!({
"errors": [{
"code": "SCHEMA_NOT_FOUND",
"message": format!("Schema '{}' not found in cache", schema_id),
"details": {
"schema": schema_id,
"cause": "Schema was not found in bulk cache - ensure cache_json_schemas was called"
let cache = SCHEMA_CACHE.read().unwrap();
match cache.map.get(schema_id) {
None => JsonB(json!({
"errors": [{
"code": "SCHEMA_NOT_FOUND",
"message": format!("Schema '{}' not found in cache", schema_id),
"details": {
"schema": schema_id,
"cause": "Schema was not found in bulk cache - ensure cache_json_schemas was called"
}
}]
})),
Some(schema) => {
let instance_value: Value = instance.0;
match cache.schemas.validate(&instance_value, schema.index) {
Ok(_) => {
let mut custom_errors = Vec::new();
if schema.t == SchemaType::Type || schema.t == SchemaType::PublicPunc || schema.t == SchemaType::PrivatePunc {
let mut path_parts = vec![];
let top_level_id = if schema.t == SchemaType::Type { Some(schema_id) } else { None };
walk_and_validate_refs(&instance_value, &schema.value, &cache, &mut path_parts, false, top_level_id, &mut custom_errors);
}
if custom_errors.is_empty() {
JsonB(json!({ "response": "success" }))
} else {
JsonB(json!({ "errors": custom_errors }))
}
}
Err(validation_error) => {
let mut error_list = Vec::new();
collect_errors(&validation_error, &mut error_list);
let errors = format_errors(error_list, &instance_value, schema_id);
let filtered_errors = filter_false_schema_errors(errors);
if filtered_errors.is_empty() {
JsonB(json!({ "response": "success" }))
} else {
JsonB(json!({ "errors": filtered_errors }))
}
}
}
}
}]
})),
Some(sch_index) => {
let instance_value: Value = instance.0;
match cache.schemas.validate(&instance_value, *sch_index) {
Ok(_) => JsonB(json!({ "response": "success" })),
Err(validation_error) => {
let mut error_list = Vec::new();
collect_errors(&validation_error, &mut error_list);
let errors = format_errors(error_list, &instance_value, schema_id);
// Filter out FALSE_SCHEMA errors if there are other validation errors
let filtered_errors = filter_false_schema_errors(errors);
if filtered_errors.is_empty() {
JsonB(json!({ "response": "success" }))
} else {
JsonB(json!({ "errors": filtered_errors }))
}
}
}
}
}
}
// Recursively collects validation errors
@ -983,15 +977,15 @@ fn extract_value_at_path(instance: &Value, path: &str) -> Value {
#[pg_extern(strict, parallel_safe)]
fn json_schema_cached(schema_id: &str) -> bool {
let cache = SCHEMA_CACHE.read().unwrap();
cache.id_to_index.contains_key(schema_id)
cache.map.contains_key(schema_id)
}
#[pg_extern(strict)]
fn clear_json_schemas() -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
*cache = BoonCache {
*cache = Cache {
schemas: Schemas::new(),
id_to_index: HashMap::new(),
map: HashMap::new(),
};
JsonB(json!({ "response": "success" }))
}
@ -999,7 +993,7 @@ fn clear_json_schemas() -> JsonB {
#[pg_extern(strict, parallel_safe)]
fn show_json_schemas() -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
let ids: Vec<String> = cache.id_to_index.keys().cloned().collect();
let ids: Vec<String> = cache.map.keys().cloned().collect();
JsonB(json!({ "response": ids }))
}

View File

@ -13,14 +13,15 @@ pub fn simple_schemas() -> JsonB {
let puncs = json!([{
"name": "simple",
"public": false,
"request": {
"schemas": [{
"$id": "simple.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
@ -32,10 +33,10 @@ pub fn invalid_schemas() -> JsonB {
let puncs = json!([{
"name": "invalid_punc",
"public": false,
"request": {
"$id": "urn:invalid_schema",
"schemas": [{
"$id": "invalid_punc.request",
"type": ["invalid_type_value"]
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
@ -47,7 +48,8 @@ pub fn errors_schemas() -> JsonB {
let puncs = json!([{
"name": "detailed_errors_test",
"public": false,
"request": {
"schemas": [{
"$id": "detailed_errors_test.request",
"type": "object",
"properties": {
"address": {
@ -60,7 +62,7 @@ pub fn errors_schemas() -> JsonB {
}
},
"required": ["address"]
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
@ -72,7 +74,8 @@ pub fn oneof_schemas() -> JsonB {
let puncs = json!([{
"name": "oneof_test",
"public": false,
"request": {
"schemas": [{
"$id": "oneof_test.request",
"oneOf": [
{
"type": "object",
@ -89,7 +92,7 @@ pub fn oneof_schemas() -> JsonB {
"required": ["number_prop"]
}
]
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
@ -102,19 +105,21 @@ pub fn root_types_schemas() -> JsonB {
{
"name": "object_test",
"public": false,
"request": {
"schemas": [{
"$id": "object_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
}
}]
},
{
"name": "array_test",
"public": false,
"request": {
"schemas": [{
"$id": "array_test.request",
"type": "array",
"items": {
"type": "object",
@ -122,7 +127,7 @@ pub fn root_types_schemas() -> JsonB {
"id": { "type": "string", "format": "uuid" }
}
}
}
}]
}
]);
@ -136,27 +141,30 @@ pub fn strict_schemas() -> JsonB {
{
"name": "basic_strict_test",
"public": true,
"request": {
"schemas": [{
"$id": "basic_strict_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
}
}
}]
},
{
"name": "non_strict_test",
"public": false,
"request": {
"schemas": [{
"$id": "non_strict_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
}
}
}]
},
{
"name": "nested_strict_test",
"public": true,
"request": {
"schemas": [{
"$id": "nested_strict_test.request",
"type": "object",
"properties": {
"user": {
@ -175,34 +183,37 @@ pub fn strict_schemas() -> JsonB {
}
}
}
}
}]
},
{
"name": "already_unevaluated_test",
"public": true,
"request": {
"schemas": [{
"$id": "already_unevaluated_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
},
"unevaluatedProperties": true
}
}]
},
{
"name": "already_additional_test",
"public": true,
"request": {
"schemas": [{
"$id": "already_additional_test.request",
"type": "object",
"properties": {
"name": { "type": "string" }
},
"additionalProperties": false
}
}]
},
{
"name": "conditional_strict_test",
"public": true,
"request": {
"schemas": [{
"$id": "conditional_strict_test.request",
"type": "object",
"properties": {
"creating": { "type": "boolean" }
@ -218,7 +229,7 @@ pub fn strict_schemas() -> JsonB {
},
"required": ["name"]
}
}
}]
}
]);
@ -231,14 +242,15 @@ pub fn required_schemas() -> JsonB {
let puncs = json!([{
"name": "basic_validation_test",
"public": false,
"request": {
"schemas": [{
"$id": "basic_validation_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
@ -250,7 +262,8 @@ pub fn dependencies_schemas() -> JsonB {
let puncs = json!([{
"name": "dependency_split_test",
"public": false,
"request": {
"schemas": [{
"$id": "dependency_split_test.request",
"type": "object",
"properties": {
"creating": { "type": "boolean" },
@ -261,7 +274,7 @@ pub fn dependencies_schemas() -> JsonB {
"dependencies": {
"creating": ["name", "kind"]
}
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
@ -273,7 +286,8 @@ pub fn nested_req_deps_schemas() -> JsonB {
let puncs = json!([{
"name": "nested_dep_test",
"public": false,
"request": {
"schemas": [{
"$id": "nested_dep_test.request",
"type": "object",
"properties": {
"items": {
@ -294,7 +308,7 @@ pub fn nested_req_deps_schemas() -> JsonB {
}
},
"required": ["items"]
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
@ -307,19 +321,21 @@ pub fn additional_properties_schemas() -> JsonB {
{
"name": "additional_props_test",
"public": false,
"request": {
"schemas": [{
"$id": "additional_props_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "number" }
},
"additionalProperties": false
}
}]
},
{
"name": "nested_additional_props_test",
"public": false,
"request": {
"schemas": [{
"$id": "nested_additional_props_test.request",
"type": "object",
"properties": {
"user": {
@ -330,7 +346,7 @@ pub fn additional_properties_schemas() -> JsonB {
"additionalProperties": false
}
}
}
}]
}
]);
@ -344,7 +360,8 @@ pub fn unevaluated_properties_schemas() -> JsonB {
{
"name": "simple_unevaluated_test",
"public": false,
"request": {
"schemas": [{
"$id": "simple_unevaluated_test.request",
"type": "object",
"properties": {
"name": { "type": "string" },
@ -354,12 +371,13 @@ pub fn unevaluated_properties_schemas() -> JsonB {
"^attr_": { "type": "string" }
},
"unevaluatedProperties": false
}
}]
},
{
"name": "conditional_unevaluated_test",
"public": false,
"request": {
"schemas": [{
"$id": "conditional_unevaluated_test.request",
"type": "object",
"allOf": [
{
@ -377,7 +395,7 @@ pub fn unevaluated_properties_schemas() -> JsonB {
"age": { "type": "number" }
},
"unevaluatedProperties": false
}
}]
}
]);
@ -390,14 +408,15 @@ pub fn format_schemas() -> JsonB {
let puncs = json!([{
"name": "format_test",
"public": false,
"request": {
"schemas": [{
"$id": "format_test.request",
"type": "object",
"properties": {
"uuid": { "type": "string", "format": "uuid" },
"date_time": { "type": "string", "format": "date-time" },
"email": { "type": "string", "format": "email" }
}
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
@ -572,9 +591,10 @@ pub fn punc_with_refs_schemas() -> JsonB {
"type": "object",
"properties": {
"id": { "type": "string" },
"name": { "type": "string" }
"name": { "type": "string" },
"type": { "type": "string" }
},
"required": ["id"]
"required": ["id", "type"]
}]
},
{
@ -602,16 +622,18 @@ pub fn punc_with_refs_schemas() -> JsonB {
{
"name": "public_ref_test",
"public": true,
"request": {
"schemas": [{
"$id": "public_ref_test.request",
"$ref": "person"
}
}]
},
{
"name": "private_ref_test",
"public": false,
"request": {
"schemas": [{
"$id": "private_ref_test.request",
"$ref": "person"
}
}]
}
]);
@ -636,13 +658,14 @@ pub fn enum_schemas() -> JsonB {
let puncs = json!([{
"name": "enum_ref_test",
"public": false,
"request": {
"schemas": [{
"$id": "enum_ref_test.request",
"type": "object",
"properties": {
"priority": { "$ref": "task_priority" }
},
"required": ["priority"]
}
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
@ -658,9 +681,10 @@ pub fn punc_local_refs_schemas() -> JsonB {
"$id": "global_thing",
"type": "object",
"properties": {
"id": { "type": "string", "format": "uuid" }
"id": { "type": "string", "format": "uuid" },
"type": { "type": "string" }
},
"required": ["id"]
"required": ["id", "type"]
}]
}
]);
@ -669,34 +693,40 @@ pub fn punc_local_refs_schemas() -> JsonB {
{
"name": "punc_with_local_ref_test",
"public": false,
"schemas": [{
"$id": "local_address",
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string" }
"schemas": [
{
"$id": "local_address",
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string" }
},
"required": ["street", "city"]
},
"required": ["street", "city"]
}],
"request": {
"$ref": "local_address"
}
{
"$id": "punc_with_local_ref_test.request",
"$ref": "local_address"
}
]
},
{
"name": "punc_with_local_ref_to_global_test",
"public": false,
"schemas": [{
"$id": "local_user_with_thing",
"type": "object",
"properties": {
"user_name": { "type": "string" },
"thing": { "$ref": "global_thing" }
"schemas": [
{
"$id": "local_user_with_thing",
"type": "object",
"properties": {
"user_name": { "type": "string" },
"thing": { "$ref": "global_thing" }
},
"required": ["user_name", "thing"]
},
"required": ["user_name", "thing"]
}],
"request": {
"$ref": "local_user_with_thing"
}
{
"$id": "punc_with_local_ref_to_global_test.request",
"$ref": "local_user_with_thing"
}
]
}
]);
@ -731,5 +761,70 @@ pub fn title_override_schemas() -> JsonB {
let puncs = json!([]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}
pub fn type_matching_schemas() -> JsonB {
let enums = json!([]);
let types = json!([
{
"name": "entity",
"schemas": [{
"$id": "entity",
"type": "object",
"properties": { "type": { "type": "string" }, "name": { "type": "string" } },
"required": ["type", "name"]
}]
},
{
"name": "job",
"schemas": [{
"$id": "job",
"$ref": "entity",
"properties": { "job_id": { "type": "string" } },
"required": ["job_id"]
}]
},
{
"name": "super_job",
"schemas": [
{
"$id": "super_job",
"$ref": "job",
"properties": { "manager_id": { "type": "string" } },
"required": ["manager_id"]
},
{
"$id": "super_job.short",
"$ref": "super_job",
"properties": { "name": { "maxLength": 10 } }
}
]
}
]);
let puncs = json!([{
"name": "type_test_punc",
"public": false,
"schemas": [{
"$id": "type_test_punc.request",
"type": "object",
"properties": {
"root_job": { "$ref": "job" },
"nested_or_super_job": {
"oneOf": [
{ "$ref": "super_job" },
{
"type": "object",
"properties": {
"my_job": { "$ref": "job" }
},
"required": ["my_job"]
}
]
}
},
"required": ["root_job", "nested_or_super_job"]
}]
}]);
cache_json_schemas(jsonb(enums), jsonb(types), jsonb(puncs))
}

View File

@ -51,16 +51,7 @@ fn test_validate_simple() {
#[pg_test]
fn test_cache_invalid() {
let cache_result = invalid_schemas();
// Should fail due to invalid schema in the request
// Bulk caching produces both detailed meta-schema validation errors and a high-level wrapper error
assert_error_count(&cache_result, 3); // 2 detailed meta-schema errors + 1 high-level wrapper
// Check the high-level wrapper error
let wrapper_error = find_error_with_code(&cache_result, "COMPILE_ALL_SCHEMAS_FAILED");
assert_error_message_contains(wrapper_error, "Failed to compile JSON schemas during cache operation");
// Should also have detailed meta-schema validation errors
assert_error_count(&cache_result, 2);
assert!(has_error_with_code(&cache_result, "ENUM_VIOLATED"),
"Should have ENUM_VIOLATED errors");
}
@ -509,6 +500,7 @@ fn test_validate_property_merging() {
// From entity
"id": "550e8400-e29b-41d4-a716-446655440000",
"name": "John Doe",
"type": "person",
// From user
"password": "securepass123",
@ -525,6 +517,7 @@ fn test_validate_property_merging() {
let invalid_mixed_properties = json!({
"id": "550e8400-e29b-41d4-a716-446655440000",
"name": "John Doe",
"type": "person",
"password": "short", // Too short from user schema
"first_name": "", // Empty string violates person schema minLength
"last_name": "Doe"
@ -546,15 +539,13 @@ fn test_validate_required_merging() {
// user: ["password"] (conditional when type=user)
// person: ["first_name", "last_name"] (conditional when type=person)
let missing_all_required = json!({});
let missing_all_required = json!({ "type": "person" });
let result = validate_json_schema("person", jsonb(missing_all_required));
// Should fail for all required fields across inheritance chain
assert_error_count(&result, 6); // id, type, created_by, password, first_name, last_name
assert_error_count(&result, 4); // id, created_by, first_name, last_name
assert_has_error(&result, "REQUIRED_FIELD_MISSING", "/id");
assert_has_error(&result, "REQUIRED_FIELD_MISSING", "/type");
assert_has_error(&result, "REQUIRED_FIELD_MISSING", "/created_by");
assert_has_error(&result, "REQUIRED_FIELD_MISSING", "/password");
assert_has_error(&result, "REQUIRED_FIELD_MISSING", "/first_name");
assert_has_error(&result, "REQUIRED_FIELD_MISSING", "/last_name");
@ -622,6 +613,7 @@ fn test_validate_punc_with_refs() {
// Test 1: Public punc is strict - no extra properties allowed at root level
let public_root_extra = json!({
"type": "person",
"id": "550e8400-e29b-41d4-a716-446655440000",
"name": "John Doe",
"first_name": "John",
@ -637,6 +629,7 @@ fn test_validate_punc_with_refs() {
// Test 2: Private punc allows extra properties at root level
let private_root_extra = json!({
"type": "person",
"id": "550e8400-e29b-41d4-a716-446655440000",
"name": "John Doe",
"first_name": "John",
@ -650,6 +643,7 @@ fn test_validate_punc_with_refs() {
// Test 3: Valid data with address should pass for both
let valid_data_with_address = json!({
"type": "person",
"id": "550e8400-e29b-41d4-a716-446655440000",
"name": "John Doe",
"first_name": "John",
@ -668,6 +662,7 @@ fn test_validate_punc_with_refs() {
// Test 4: Extra properties in nested address should fail for BOTH puncs (types are always strict)
let address_with_extra = json!({
"type": "person",
"id": "550e8400-e29b-41d4-a716-446655440000",
"name": "John Doe",
"first_name": "John",
@ -679,20 +674,6 @@ fn test_validate_punc_with_refs() {
}
});
// NOTE: The following test is disabled due to what appears to be a bug in the `boon` validator.
// When a validation fails within a referenced schema (`$ref`), `boon` does not seem to propagate
// the set of evaluated properties back to the parent schema. As a result, if the parent schema
// also uses `unevaluatedProperties`, it incorrectly flags all properties as unevaluated.
// In this case, the validation of `person` fails on `/address/country`, which prevents the
// `public_ref_test.request` schema from learning that `id`, `name`, etc., were evaluated,
// causing it to incorrectly report 6 errors instead of the expected 1.
// The `allOf` wrapper workaround does not solve this, as the information is lost on any `Err` result.
// This test is preserved to be re-enabled if/when the validator is fixed.
//
// let result_public_address = validate_json_schema("public_ref_test.request", jsonb(address_with_extra.clone()));
// assert_error_count(&result_public_address, 1);
// assert_has_error(&result_public_address, "FALSE_SCHEMA", "/address/country");
let result_private_address = validate_json_schema("private_ref_test.request", jsonb(address_with_extra));
assert_error_count(&result_private_address, 1);
assert_has_error(&result_private_address, "FALSE_SCHEMA", "/address/country");
@ -735,6 +716,7 @@ fn test_validate_punc_local_refs() {
// Test 1: Punc request referencing a schema defined locally within the punc
let valid_local_ref = json!({
"type": "local_address",
"street": "123 Main St",
"city": "Anytown"
});
@ -742,6 +724,7 @@ fn test_validate_punc_local_refs() {
assert_success(&result_valid_local);
let invalid_local_ref = json!({
"type": "local_address",
"street": "123 Main St" // Missing city
});
let result_invalid_local = validate_json_schema("punc_with_local_ref_test.request", jsonb(invalid_local_ref));
@ -750,8 +733,10 @@ fn test_validate_punc_local_refs() {
// Test 2: Punc with a local schema that references a global type schema
let valid_global_ref = json!({
"type": "local_user_with_thing",
"user_name": "Alice",
"thing": {
"type": "global_thing",
"id": "550e8400-e29b-41d4-a716-446655440000"
}
});
@ -759,8 +744,10 @@ fn test_validate_punc_local_refs() {
assert_success(&result_valid_global);
let invalid_global_ref = json!({
"type": "local_user_with_thing",
"user_name": "Bob",
"thing": {
"type": "global_thing",
"id": "not-a-uuid" // Invalid format for global_thing's id
}
});
@ -777,14 +764,141 @@ fn test_validate_title_override() {
// Test that a schema with an overridden title still inherits validation keywords correctly.
// This instance is valid because it provides the 'name' required by the base schema.
let valid_instance = json!({ "name": "Test Name" });
let valid_instance = json!({ "type": "override_with_title", "name": "Test Name" });
let result_valid = validate_json_schema("override_with_title", jsonb(valid_instance));
assert_success(&result_valid);
// This instance is invalid because it's missing the 'name' required by the base schema.
// This proves that validation keywords are inherited even when metadata keywords are overridden.
let invalid_instance = json!({});
let invalid_instance = json!({ "type": "override_with_title" });
let result_invalid = validate_json_schema("override_with_title", jsonb(invalid_instance));
assert_error_count(&result_invalid, 1);
assert_has_error(&result_invalid, "REQUIRED_FIELD_MISSING", "/name");
}
#[pg_test]
fn test_validate_type_matching() {
let cache_result = type_matching_schemas();
assert_success(&cache_result);
// 1. Test 'job' which extends 'entity'
let valid_job = json!({
"type": "job",
"name": "my job",
"job_id": "job123"
});
let result_valid_job = validate_json_schema("job", jsonb(valid_job));
assert_success(&result_valid_job);
let invalid_job = json!({
"type": "not_job",
"name": "my job",
"job_id": "job123"
});
let result_invalid_job = validate_json_schema("job", jsonb(invalid_job));
assert_error_count(&result_invalid_job, 1);
assert_has_error(&result_invalid_job, "TYPE_MISMATCH", "/type");
// 2. Test 'super_job' which extends 'job'
let valid_super_job = json!({
"type": "super_job",
"name": "my super job",
"job_id": "job123",
"manager_id": "mgr1"
});
let result_valid_super_job = validate_json_schema("super_job", jsonb(valid_super_job));
assert_success(&result_valid_super_job);
// 3. Test 'super_job.short' which should still expect type 'super_job'
let valid_short_super_job = json!({
"type": "super_job",
"name": "short", // maxLength: 10
"job_id": "job123",
"manager_id": "mgr1"
});
let result_valid_short = validate_json_schema("super_job.short", jsonb(valid_short_super_job));
assert_success(&result_valid_short);
let invalid_short_super_job = json!({
"type": "job", // Should be 'super_job'
"name": "short",
"job_id": "job123",
"manager_id": "mgr1"
});
let result_invalid_short = validate_json_schema("super_job.short", jsonb(invalid_short_super_job));
assert_error_count(&result_invalid_short, 1);
let error = find_error_with_code_and_path(&result_invalid_short, "TYPE_MISMATCH", "/type");
assert_error_message_contains(error, "Instance type 'job' does not match expected type 'super_job'");
// 4. Test punc with root, nested, and oneOf type refs
let valid_punc_instance = json!({
"root_job": {
"type": "job",
"name": "root job",
"job_id": "job456"
},
"nested_or_super_job": {
"type": "super_job",
"name": "nested super job",
"job_id": "job789",
"manager_id": "mgr2"
}
});
let result_valid_punc = validate_json_schema("type_test_punc.request", jsonb(valid_punc_instance));
assert_success(&result_valid_punc);
// 5. Test invalid type at punc root ref
let invalid_punc_root = json!({
"root_job": {
"type": "entity", // Should be "job"
"name": "root job",
"job_id": "job456"
},
"nested_or_super_job": {
"type": "super_job",
"name": "nested super job",
"job_id": "job789",
"manager_id": "mgr2"
}
});
let result_invalid_punc_root = validate_json_schema("type_test_punc.request", jsonb(invalid_punc_root));
assert_error_count(&result_invalid_punc_root, 1);
assert_has_error(&result_invalid_punc_root, "TYPE_MISMATCH", "/root_job/type");
// 6. Test invalid type at punc nested ref
let invalid_punc_nested = json!({
"root_job": {
"type": "job",
"name": "root job",
"job_id": "job456"
},
"nested_or_super_job": {
"my_job": {
"type": "entity", // Should be "job"
"name": "nested job",
"job_id": "job789"
}
}
});
let result_invalid_punc_nested = validate_json_schema("type_test_punc.request", jsonb(invalid_punc_nested));
assert_error_count(&result_invalid_punc_nested, 1);
assert_has_error(&result_invalid_punc_nested, "TYPE_MISMATCH", "/nested_or_super_job/my_job/type");
// 7. Test invalid type at punc oneOf ref
let invalid_punc_oneof = json!({
"root_job": {
"type": "job",
"name": "root job",
"job_id": "job456"
},
"nested_or_super_job": {
"type": "job", // Should be "super_job"
"name": "nested super job",
"job_id": "job789",
"manager_id": "mgr2"
}
});
let result_invalid_punc_oneof = validate_json_schema("type_test_punc.request", jsonb(invalid_punc_oneof));
// This will have multiple errors because the invalid oneOf branch will also fail the other branch's validation
assert_has_error(&result_invalid_punc_oneof, "TYPE_MISMATCH", "/nested_or_super_job/type");
}

View File

@ -1 +1 @@
1.0.33
1.0.36