860 lines
28 KiB
Rust
860 lines
28 KiB
Rust
use pgrx::*;
|
|
|
|
pg_module_magic!();
|
|
|
|
use boon::{CompileError, Compiler, ErrorKind, SchemaIndex, Schemas, ValidationError, Type, Types, ValidationOptions};
|
|
use lazy_static::lazy_static;
|
|
use serde_json::{json, Value, Number};
|
|
use std::borrow::Cow;
|
|
use std::collections::hash_map::Entry;
|
|
use std::{collections::{HashMap, HashSet}, sync::RwLock};
|
|
|
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
|
enum SchemaType {
|
|
Enum,
|
|
Type,
|
|
Family, // Added for generated hierarchy schemas
|
|
PublicPunc,
|
|
PrivatePunc,
|
|
}
|
|
|
|
struct Schema {
|
|
index: SchemaIndex,
|
|
t: SchemaType,
|
|
}
|
|
|
|
struct Cache {
|
|
schemas: Schemas,
|
|
map: HashMap<String, Schema>,
|
|
}
|
|
|
|
// Structure to hold error information without lifetimes
|
|
#[derive(Debug)]
|
|
struct Error {
|
|
path: String,
|
|
code: String,
|
|
message: String,
|
|
cause: Value, // Changed from String to Value to store JSON
|
|
}
|
|
|
|
lazy_static! {
|
|
static ref SCHEMA_CACHE: RwLock<Cache> = RwLock::new(Cache {
|
|
schemas: Schemas::new(),
|
|
map: HashMap::new(),
|
|
});
|
|
}
|
|
|
|
#[pg_extern(strict)]
|
|
fn cache_json_schemas(enums: JsonB, types: JsonB, puncs: JsonB) -> JsonB {
|
|
let mut cache = SCHEMA_CACHE.write().unwrap();
|
|
let enums_value: Value = enums.0;
|
|
let types_value: Value = types.0;
|
|
let puncs_value: Value = puncs.0;
|
|
|
|
*cache = Cache {
|
|
schemas: Schemas::new(),
|
|
map: HashMap::new(),
|
|
};
|
|
|
|
let mut compiler = Compiler::new();
|
|
compiler.enable_format_assertions();
|
|
|
|
let mut errors = Vec::new();
|
|
let mut schemas_to_compile = Vec::new();
|
|
|
|
// Phase 1: Enums
|
|
if let Some(enums_array) = enums_value.as_array() {
|
|
for enum_row in enums_array {
|
|
if let Some(schemas_raw) = enum_row.get("schemas") {
|
|
if let Some(schemas_array) = schemas_raw.as_array() {
|
|
for schema_def in schemas_array {
|
|
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
|
|
schemas_to_compile.push((schema_id.to_string(), schema_def.clone(), SchemaType::Enum));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Phase 2: Types & Hierarchy Pre-processing
|
|
let mut hierarchy_map: HashMap<String, HashSet<String>> = HashMap::new();
|
|
if let Some(types_array) = types_value.as_array() {
|
|
for type_row in types_array {
|
|
// Process main schemas for the type
|
|
if let Some(schemas_raw) = type_row.get("schemas") {
|
|
if let Some(schemas_array) = schemas_raw.as_array() {
|
|
for schema_def in schemas_array {
|
|
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
|
|
schemas_to_compile.push((schema_id.to_string(), schema_def.clone(), SchemaType::Type));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Process hierarchy to build .family enums
|
|
if let Some(type_name) = type_row.get("name").and_then(|v| v.as_str()) {
|
|
if let Some(hierarchy_raw) = type_row.get("hierarchy") {
|
|
if let Some(hierarchy_array) = hierarchy_raw.as_array() {
|
|
for ancestor_val in hierarchy_array {
|
|
if let Some(ancestor_name) = ancestor_val.as_str() {
|
|
hierarchy_map
|
|
.entry(ancestor_name.to_string())
|
|
.or_default()
|
|
.insert(type_name.to_string());
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Generate and add the .family schemas
|
|
for (base_type, descendant_types) in hierarchy_map {
|
|
let family_schema_id = format!("{}.family", base_type);
|
|
let enum_values: Vec<String> = descendant_types.into_iter().collect();
|
|
let family_schema = json!({
|
|
"$id": family_schema_id,
|
|
"type": "string",
|
|
"enum": enum_values
|
|
});
|
|
schemas_to_compile.push((family_schema_id, family_schema, SchemaType::Family));
|
|
}
|
|
|
|
// Phase 3: Puncs
|
|
if let Some(puncs_array) = puncs_value.as_array() {
|
|
for punc_row in puncs_array {
|
|
if let Some(punc_obj) = punc_row.as_object() {
|
|
if let Some(punc_name) = punc_obj.get("name").and_then(|v| v.as_str()) {
|
|
let is_public = punc_obj.get("public").and_then(|v| v.as_bool()).unwrap_or(false);
|
|
let punc_schema_type = if is_public { SchemaType::PublicPunc } else { SchemaType::PrivatePunc };
|
|
if let Some(schemas_raw) = punc_obj.get("schemas") {
|
|
if let Some(schemas_array) = schemas_raw.as_array() {
|
|
for schema_def in schemas_array {
|
|
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
|
|
let request_schema_id = format!("{}.request", punc_name);
|
|
let response_schema_id = format!("{}.response", punc_name);
|
|
let schema_type_for_def = if schema_id == request_schema_id || schema_id == response_schema_id {
|
|
punc_schema_type
|
|
} else {
|
|
SchemaType::Type
|
|
};
|
|
schemas_to_compile.push((schema_id.to_string(), schema_def.clone(), schema_type_for_def));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Add all resources to compiler first
|
|
for (id, value, schema_type) in &schemas_to_compile {
|
|
add_schema_resource(&mut compiler, id, value.clone(), *schema_type, &mut errors);
|
|
}
|
|
|
|
if !errors.is_empty() {
|
|
return JsonB(json!({ "errors": errors }));
|
|
}
|
|
|
|
// Compile all schemas
|
|
compile_all_schemas(&mut compiler, &mut cache, &schemas_to_compile, &mut errors);
|
|
|
|
if errors.is_empty() {
|
|
JsonB(json!({ "response": "success" }))
|
|
} else {
|
|
JsonB(json!({ "errors": errors }))
|
|
}
|
|
}
|
|
|
|
// Helper function to add a schema resource (without compiling)
|
|
fn add_schema_resource(
|
|
compiler: &mut Compiler,
|
|
schema_id: &str,
|
|
schema_value: Value,
|
|
_schema_type: SchemaType,
|
|
errors: &mut Vec<Value>
|
|
) {
|
|
if let Err(e) = compiler.add_resource(schema_id, schema_value) {
|
|
errors.push(json!({
|
|
"code": "SCHEMA_RESOURCE_FAILED",
|
|
"message": format!("Failed to add schema resource '{}'", schema_id),
|
|
"details": { "schema": schema_id, "cause": format!("{}", e) }
|
|
}));
|
|
}
|
|
}
|
|
|
|
// Helper function to compile all added resources
|
|
fn compile_all_schemas(
|
|
compiler: &mut Compiler,
|
|
cache: &mut Cache,
|
|
schemas_to_compile: &[(String, Value, SchemaType)],
|
|
errors: &mut Vec<Value>,
|
|
) {
|
|
for (id, value, schema_type) in schemas_to_compile {
|
|
match compiler.compile(id, &mut cache.schemas) {
|
|
Ok(index) => {
|
|
cache.map.insert(id.clone(), Schema { index, t: *schema_type });
|
|
}
|
|
Err(e) => {
|
|
match &e {
|
|
CompileError::ValidationError { src, .. } => {
|
|
let mut error_list = Vec::new();
|
|
collect_errors(src, &mut error_list);
|
|
let formatted_errors = format_errors(error_list, value, id);
|
|
errors.extend(formatted_errors);
|
|
}
|
|
_ => {
|
|
errors.push(json!({
|
|
"code": "SCHEMA_COMPILATION_FAILED",
|
|
"message": format!("Schema '{}' compilation failed", id),
|
|
"details": { "schema": id, "cause": format!("{:?}", e) }
|
|
}));
|
|
}
|
|
};
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
#[pg_extern(strict, parallel_safe)]
|
|
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
|
let cache = SCHEMA_CACHE.read().unwrap();
|
|
match cache.map.get(schema_id) {
|
|
None => JsonB(json!({
|
|
"errors": [{
|
|
"code": "SCHEMA_NOT_FOUND",
|
|
"message": format!("Schema '{}' not found in cache", schema_id),
|
|
"details": {
|
|
"schema": schema_id,
|
|
"cause": "Schema was not found in bulk cache - ensure cache_json_schemas was called"
|
|
}
|
|
}]
|
|
})),
|
|
Some(schema) => {
|
|
let instance_value: Value = instance.0;
|
|
let options = match schema.t {
|
|
SchemaType::PublicPunc => Some(ValidationOptions { be_strict: true }),
|
|
_ => None,
|
|
};
|
|
|
|
match cache.schemas.validate(&instance_value, schema.index, options) {
|
|
Ok(_) => {
|
|
JsonB(json!({ "response": "success" }))
|
|
}
|
|
Err(validation_error) => {
|
|
let mut error_list = Vec::new();
|
|
collect_errors(&validation_error, &mut error_list);
|
|
let errors = format_errors(error_list, &instance_value, schema_id);
|
|
if errors.is_empty() {
|
|
JsonB(json!({ "response": "success" }))
|
|
} else {
|
|
JsonB(json!({ "errors": errors }))
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Recursively collects validation errors
|
|
fn collect_errors(error: &ValidationError, errors_list: &mut Vec<Error>) {
|
|
// Check if this is a structural error that we should skip
|
|
let is_structural = matches!(
|
|
&error.kind,
|
|
ErrorKind::Group | ErrorKind::AllOf | ErrorKind::AnyOf | ErrorKind::Not | ErrorKind::OneOf(_)
|
|
);
|
|
|
|
if !error.causes.is_empty() || is_structural {
|
|
for cause in &error.causes {
|
|
collect_errors(cause, errors_list);
|
|
}
|
|
return
|
|
}
|
|
|
|
let base_path = error.instance_location.to_string();
|
|
let errors_to_add = match &error.kind {
|
|
ErrorKind::Type { got, want } => handle_type_error(&base_path, got, want),
|
|
ErrorKind::Required { want } => handle_required_error(&base_path, want),
|
|
ErrorKind::Dependency { prop, missing } => handle_dependency_error(&base_path, prop, missing, false),
|
|
ErrorKind::DependentRequired { prop, missing } => handle_dependency_error(&base_path, prop, missing, true),
|
|
ErrorKind::AdditionalProperties { got } => handle_additional_properties_error(&base_path, got),
|
|
ErrorKind::Enum { want } => handle_enum_error(&base_path, want),
|
|
ErrorKind::Const { want } => handle_const_error(&base_path, want),
|
|
ErrorKind::MinLength { got, want } => handle_min_length_error(&base_path, *got, *want),
|
|
ErrorKind::MaxLength { got, want } => handle_max_length_error(&base_path, *got, *want),
|
|
ErrorKind::Pattern { got, want } => handle_pattern_error(&base_path, got, want),
|
|
ErrorKind::Minimum { got, want } => handle_minimum_error(&base_path, got, want),
|
|
ErrorKind::Maximum { got, want } => handle_maximum_error(&base_path, got, want),
|
|
ErrorKind::ExclusiveMinimum { got, want } => handle_exclusive_minimum_error(&base_path, got, want),
|
|
ErrorKind::ExclusiveMaximum { got, want } => handle_exclusive_maximum_error(&base_path, got, want),
|
|
ErrorKind::MultipleOf { got, want } => handle_multiple_of_error(&base_path, got, want),
|
|
ErrorKind::MinItems { got, want } => handle_min_items_error(&base_path, *got, *want),
|
|
ErrorKind::MaxItems { got, want } => handle_max_items_error(&base_path, *got, *want),
|
|
ErrorKind::UniqueItems { got } => handle_unique_items_error(&base_path, got),
|
|
ErrorKind::MinProperties { got, want } => handle_min_properties_error(&base_path, *got, *want),
|
|
ErrorKind::MaxProperties { got, want } => handle_max_properties_error(&base_path, *got, *want),
|
|
ErrorKind::AdditionalItems { got } => handle_additional_items_error(&base_path, *got),
|
|
ErrorKind::Format { want, got, err } => handle_format_error(&base_path, want, got, err),
|
|
ErrorKind::PropertyName { prop } => handle_property_name_error(&base_path, prop),
|
|
ErrorKind::Contains => handle_contains_error(&base_path),
|
|
ErrorKind::MinContains { got, want } => handle_min_contains_error(&base_path, got, *want),
|
|
ErrorKind::MaxContains { got, want } => handle_max_contains_error(&base_path, got, *want),
|
|
ErrorKind::ContentEncoding { want, err } => handle_content_encoding_error(&base_path, want, err),
|
|
ErrorKind::ContentMediaType { want, err, .. } => handle_content_media_type_error(&base_path, want, err),
|
|
ErrorKind::FalseSchema => handle_false_schema_error(&base_path),
|
|
ErrorKind::Not => handle_not_error(&base_path),
|
|
ErrorKind::RefCycle { url, kw_loc1, kw_loc2 } => handle_ref_cycle_error(&base_path, url, kw_loc1, kw_loc2),
|
|
ErrorKind::Reference { kw, url } => handle_reference_error(&base_path, kw, url),
|
|
ErrorKind::Schema { url } => handle_schema_error(&base_path, url),
|
|
ErrorKind::ContentSchema => handle_content_schema_error(&base_path),
|
|
ErrorKind::Group => handle_group_error(&base_path),
|
|
ErrorKind::AllOf => handle_all_of_error(&base_path),
|
|
ErrorKind::AnyOf => handle_any_of_error(&base_path),
|
|
ErrorKind::OneOf(matched) => handle_one_of_error(&base_path, matched),
|
|
};
|
|
|
|
errors_list.extend(errors_to_add);
|
|
}
|
|
|
|
// Handler functions for each error kind
|
|
fn handle_type_error(base_path: &str, got: &Type, want: &Types) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "TYPE_MISMATCH".to_string(),
|
|
message: format!("Expected {} but got {}",
|
|
want.iter().map(|t| t.to_string()).collect::<Vec<_>>().join(" or "),
|
|
got
|
|
),
|
|
cause: json!({
|
|
"got": got.to_string(),
|
|
"want": want.iter().map(|t| t.to_string()).collect::<Vec<_>>()
|
|
}),
|
|
}]
|
|
}
|
|
|
|
fn handle_required_error(base_path: &str, want: &[&str]) -> Vec<Error> {
|
|
// Create a separate error for each missing required field
|
|
want.iter().map(|missing_field| {
|
|
let field_path = if base_path.is_empty() {
|
|
format!("/{}", missing_field)
|
|
} else {
|
|
format!("{}/{}", base_path, missing_field)
|
|
};
|
|
|
|
Error {
|
|
path: field_path,
|
|
code: "REQUIRED_FIELD_MISSING".to_string(),
|
|
message: format!("Required field '{}' is missing", missing_field),
|
|
cause: json!({ "want": [missing_field] }),
|
|
}
|
|
}).collect()
|
|
}
|
|
|
|
fn handle_dependency_error(base_path: &str, prop: &str, missing: &[&str], is_dependent_required: bool) -> Vec<Error> {
|
|
// Create a separate error for each missing field
|
|
missing.iter().map(|missing_field| {
|
|
let field_path = if base_path.is_empty() {
|
|
format!("/{}", missing_field)
|
|
} else {
|
|
format!("{}/{}", base_path, missing_field)
|
|
};
|
|
|
|
let (code, message) = if is_dependent_required {
|
|
(
|
|
"DEPENDENT_REQUIRED_MISSING".to_string(),
|
|
format!("Field '{}' is required when '{}' is present", missing_field, prop),
|
|
)
|
|
} else {
|
|
(
|
|
"DEPENDENCY_FAILED".to_string(),
|
|
format!("Field '{}' is required when '{}' is present", missing_field, prop),
|
|
)
|
|
};
|
|
|
|
Error {
|
|
path: field_path,
|
|
code,
|
|
message,
|
|
cause: json!({ "prop": prop, "missing": [missing_field] }),
|
|
}
|
|
}).collect()
|
|
}
|
|
|
|
fn handle_additional_properties_error(base_path: &str, got: &[Cow<str>]) -> Vec<Error> {
|
|
let mut errors = Vec::new();
|
|
for extra_prop in got {
|
|
let field_path = if base_path.is_empty() {
|
|
format!("/{}", extra_prop)
|
|
} else {
|
|
format!("{}/{}", base_path, extra_prop)
|
|
};
|
|
errors.push(Error {
|
|
path: field_path,
|
|
code: "ADDITIONAL_PROPERTIES_NOT_ALLOWED".to_string(),
|
|
message: format!("Property '{}' is not allowed", extra_prop),
|
|
cause: json!({ "got": [extra_prop.to_string()] }),
|
|
});
|
|
}
|
|
errors
|
|
}
|
|
|
|
fn handle_enum_error(base_path: &str, want: &[Value]) -> Vec<Error> {
|
|
let message = if want.len() == 1 {
|
|
format!("Value must be {}", serde_json::to_string(&want[0]).unwrap_or_else(|_| "unknown".to_string()))
|
|
} else {
|
|
format!("Value must be one of: {}",
|
|
want.iter()
|
|
.map(|v| serde_json::to_string(v).unwrap_or_else(|_| "unknown".to_string()))
|
|
.collect::<Vec<_>>()
|
|
.join(", ")
|
|
)
|
|
};
|
|
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "ENUM_VIOLATED".to_string(),
|
|
message,
|
|
cause: json!({ "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_const_error(base_path: &str, want: &Value) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "CONST_VIOLATED".to_string(),
|
|
message: format!("Value must be exactly {}", serde_json::to_string(want).unwrap_or_else(|_| "unknown".to_string())),
|
|
cause: json!({ "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_min_length_error(base_path: &str, got: usize, want: usize) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "MIN_LENGTH_VIOLATED".to_string(),
|
|
message: format!("String length must be at least {} characters, but got {}", want, got),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_max_length_error(base_path: &str, got: usize, want: usize) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "MAX_LENGTH_VIOLATED".to_string(),
|
|
message: format!("String length must be at most {} characters, but got {}", want, got),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_pattern_error(base_path: &str, got: &Cow<str>, want: &str) -> Vec<Error> {
|
|
let display_value = if got.len() > 50 {
|
|
format!("{}...", &got[..50])
|
|
} else {
|
|
got.to_string()
|
|
};
|
|
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "PATTERN_VIOLATED".to_string(),
|
|
message: format!("Value '{}' does not match pattern '{}'", display_value, want),
|
|
cause: json!({ "got": got.to_string(), "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_minimum_error(base_path: &str, got: &Cow<Number>, want: &Number) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "MINIMUM_VIOLATED".to_string(),
|
|
message: format!("Value must be at least {}, but got {}", want, got),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_maximum_error(base_path: &str, got: &Cow<Number>, want: &Number) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "MAXIMUM_VIOLATED".to_string(),
|
|
message: format!("Value must be at most {}, but got {}", want, got),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_exclusive_minimum_error(base_path: &str, got: &Cow<Number>, want: &Number) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "EXCLUSIVE_MINIMUM_VIOLATED".to_string(),
|
|
message: format!("Value must be greater than {}, but got {}", want, got),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_exclusive_maximum_error(base_path: &str, got: &Cow<Number>, want: &Number) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "EXCLUSIVE_MAXIMUM_VIOLATED".to_string(),
|
|
message: format!("Value must be less than {}, but got {}", want, got),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_multiple_of_error(base_path: &str, got: &Cow<Number>, want: &Number) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "MULTIPLE_OF_VIOLATED".to_string(),
|
|
message: format!("{} is not a multiple of {}", got, want),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_min_items_error(base_path: &str, got: usize, want: usize) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "MIN_ITEMS_VIOLATED".to_string(),
|
|
message: format!("Array must have at least {} items, but has {}", want, got),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_max_items_error(base_path: &str, got: usize, want: usize) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "MAX_ITEMS_VIOLATED".to_string(),
|
|
message: format!("Array must have at most {} items, but has {}", want, got),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_unique_items_error(base_path: &str, got: &[usize; 2]) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "UNIQUE_ITEMS_VIOLATED".to_string(),
|
|
message: format!("Array items at positions {} and {} are duplicates", got[0], got[1]),
|
|
cause: json!({ "got": got }),
|
|
}]
|
|
}
|
|
|
|
fn handle_min_properties_error(base_path: &str, got: usize, want: usize) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "MIN_PROPERTIES_VIOLATED".to_string(),
|
|
message: format!("Object must have at least {} properties, but has {}", want, got),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_max_properties_error(base_path: &str, got: usize, want: usize) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "MAX_PROPERTIES_VIOLATED".to_string(),
|
|
message: format!("Object must have at most {} properties, but has {}", want, got),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_additional_items_error(base_path: &str, got: usize) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "ADDITIONAL_ITEMS_NOT_ALLOWED".to_string(),
|
|
message: format!("Last {} array items are not allowed", got),
|
|
cause: json!({ "got": got }),
|
|
}]
|
|
}
|
|
|
|
fn handle_format_error(base_path: &str, want: &str, got: &Cow<Value>, err: &Box<dyn std::error::Error>) -> Vec<Error> {
|
|
// If the value is an empty string, skip format validation.
|
|
if let Value::String(s) = got.as_ref() {
|
|
if s.is_empty() {
|
|
return vec![];
|
|
}
|
|
}
|
|
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "FORMAT_INVALID".to_string(),
|
|
message: format!("Value {} is not a valid {} format",
|
|
serde_json::to_string(got.as_ref()).unwrap_or_else(|_| "unknown".to_string()),
|
|
want
|
|
),
|
|
cause: json!({ "got": got, "want": want, "err": err.to_string() }),
|
|
}]
|
|
}
|
|
|
|
fn handle_property_name_error(base_path: &str, prop: &str) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "INVALID_PROPERTY_NAME".to_string(),
|
|
message: format!("Property name '{}' is invalid", prop),
|
|
cause: json!({ "prop": prop }),
|
|
}]
|
|
}
|
|
|
|
fn handle_contains_error(base_path: &str) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "CONTAINS_FAILED".to_string(),
|
|
message: "No array items match the required schema".to_string(),
|
|
cause: json!({}),
|
|
}]
|
|
}
|
|
|
|
fn handle_min_contains_error(base_path: &str, got: &[usize], want: usize) -> Vec<Error> {
|
|
let message = if got.is_empty() {
|
|
format!("At least {} array items must match the schema, but none do", want)
|
|
} else {
|
|
format!("At least {} array items must match the schema, but only {} do (at positions {})",
|
|
want,
|
|
got.len(),
|
|
got.iter().map(|i| i.to_string()).collect::<Vec<_>>().join(", ")
|
|
)
|
|
};
|
|
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "MIN_CONTAINS_VIOLATED".to_string(),
|
|
message,
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_max_contains_error(base_path: &str, got: &[usize], want: usize) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "MAX_CONTAINS_VIOLATED".to_string(),
|
|
message: format!("At most {} array items can match the schema, but {} do (at positions {})",
|
|
want,
|
|
got.len(),
|
|
got.iter().map(|i| i.to_string()).collect::<Vec<_>>().join(", ")
|
|
),
|
|
cause: json!({ "got": got, "want": want }),
|
|
}]
|
|
}
|
|
|
|
fn handle_content_encoding_error(base_path: &str, want: &str, err: &Box<dyn std::error::Error>) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "CONTENT_ENCODING_INVALID".to_string(),
|
|
message: format!("Content is not valid {} encoding: {}", want, err),
|
|
cause: json!({ "want": want, "err": err.to_string() }),
|
|
}]
|
|
}
|
|
|
|
fn handle_content_media_type_error(base_path: &str, want: &str, err: &Box<dyn std::error::Error>) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "CONTENT_MEDIA_TYPE_INVALID".to_string(),
|
|
message: format!("Content is not valid {} media type: {}", want, err),
|
|
cause: json!({ "want": want, "err": err.to_string() }),
|
|
}]
|
|
}
|
|
|
|
fn handle_false_schema_error(base_path: &str) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "FALSE_SCHEMA".to_string(),
|
|
message: "This schema always fails validation".to_string(),
|
|
cause: json!({}),
|
|
}]
|
|
}
|
|
|
|
fn handle_not_error(base_path: &str) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "NOT_VIOLATED".to_string(),
|
|
message: "Value matches a schema that it should not match".to_string(),
|
|
cause: json!({}),
|
|
}]
|
|
}
|
|
|
|
fn handle_ref_cycle_error(base_path: &str, url: &str, kw_loc1: &str, kw_loc2: &str) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "REFERENCE_CYCLE".to_string(),
|
|
message: format!("Reference cycle detected: both '{}' and '{}' resolve to '{}'", kw_loc1, kw_loc2, url),
|
|
cause: json!({ "url": url, "kw_loc1": kw_loc1, "kw_loc2": kw_loc2 }),
|
|
}]
|
|
}
|
|
|
|
fn handle_reference_error(base_path: &str, kw: &str, url: &str) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "REFERENCE_FAILED".to_string(),
|
|
message: format!("{} reference to '{}' failed validation", kw, url),
|
|
cause: json!({ "kw": kw, "url": url }),
|
|
}]
|
|
}
|
|
|
|
fn handle_schema_error(base_path: &str, url: &str) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "SCHEMA_FAILED".to_string(),
|
|
message: format!("Schema '{}' validation failed", url),
|
|
cause: json!({ "url": url }),
|
|
}]
|
|
}
|
|
|
|
fn handle_content_schema_error(base_path: &str) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "CONTENT_SCHEMA_FAILED".to_string(),
|
|
message: "Content schema validation failed".to_string(),
|
|
cause: json!({}),
|
|
}]
|
|
}
|
|
|
|
fn handle_group_error(base_path: &str) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "VALIDATION_FAILED".to_string(),
|
|
message: "Validation failed".to_string(),
|
|
cause: json!({}),
|
|
}]
|
|
}
|
|
|
|
fn handle_all_of_error(base_path: &str) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "ALL_OF_VIOLATED".to_string(),
|
|
message: "Value does not match all required schemas".to_string(),
|
|
cause: json!({}),
|
|
}]
|
|
}
|
|
|
|
fn handle_any_of_error(base_path: &str) -> Vec<Error> {
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "ANY_OF_VIOLATED".to_string(),
|
|
message: "Value does not match any of the allowed schemas".to_string(),
|
|
cause: json!({}),
|
|
}]
|
|
}
|
|
|
|
fn handle_one_of_error(base_path: &str, matched: &Option<(usize, usize)>) -> Vec<Error> {
|
|
let (message, cause) = match matched {
|
|
None => (
|
|
"Value must match exactly one schema, but matches none".to_string(),
|
|
json!({ "matched_indices": null })
|
|
),
|
|
Some((i, j)) => (
|
|
format!("Value must match exactly one schema, but matches schemas at positions {} and {}", i, j),
|
|
json!({ "matched_indices": [i, j] })
|
|
),
|
|
};
|
|
|
|
vec![Error {
|
|
path: base_path.to_string(),
|
|
code: "ONE_OF_VIOLATED".to_string(),
|
|
message,
|
|
cause,
|
|
}]
|
|
}
|
|
|
|
// Formats errors according to DropError structure
|
|
fn format_errors(errors: Vec<Error>, instance: &Value, schema_id: &str) -> Vec<Value> {
|
|
let mut unique_errors: HashMap<String, Value> = HashMap::new();
|
|
for error in errors {
|
|
let error_path = error.path.clone();
|
|
if let Entry::Vacant(entry) = unique_errors.entry(error_path.clone()) {
|
|
let failing_value = extract_value_at_path(instance, &error.path);
|
|
entry.insert(json!({
|
|
"code": error.code,
|
|
"message": error.message,
|
|
"details": {
|
|
"path": error.path,
|
|
"context": failing_value,
|
|
"cause": error.cause,
|
|
"schema": schema_id
|
|
}
|
|
}));
|
|
}
|
|
}
|
|
|
|
unique_errors.into_values().collect::<Vec<Value>>()
|
|
}
|
|
|
|
// Helper function to extract value at a JSON pointer path
|
|
fn extract_value_at_path(instance: &Value, path: &str) -> Value {
|
|
let parts: Vec<&str> = path.split('/').filter(|s| !s.is_empty()).collect();
|
|
let mut current = instance;
|
|
|
|
for part in parts {
|
|
match current {
|
|
Value::Object(map) => {
|
|
if let Some(value) = map.get(part) {
|
|
current = value;
|
|
} else {
|
|
return Value::Null;
|
|
}
|
|
}
|
|
Value::Array(arr) => {
|
|
if let Ok(index) = part.parse::<usize>() {
|
|
if let Some(value) = arr.get(index) {
|
|
current = value;
|
|
} else {
|
|
return Value::Null;
|
|
}
|
|
} else {
|
|
return Value::Null;
|
|
}
|
|
}
|
|
_ => return Value::Null,
|
|
}
|
|
}
|
|
|
|
current.clone()
|
|
}
|
|
|
|
#[pg_extern(strict, parallel_safe)]
|
|
fn json_schema_cached(schema_id: &str) -> bool {
|
|
let cache = SCHEMA_CACHE.read().unwrap();
|
|
cache.map.contains_key(schema_id)
|
|
}
|
|
|
|
#[pg_extern(strict)]
|
|
fn clear_json_schemas() -> JsonB {
|
|
let mut cache = SCHEMA_CACHE.write().unwrap();
|
|
*cache = Cache {
|
|
schemas: Schemas::new(),
|
|
map: HashMap::new(),
|
|
};
|
|
JsonB(json!({ "response": "success" }))
|
|
}
|
|
|
|
#[pg_extern(strict, parallel_safe)]
|
|
fn show_json_schemas() -> JsonB {
|
|
let cache = SCHEMA_CACHE.read().unwrap();
|
|
let ids: Vec<String> = cache.map.keys().cloned().collect();
|
|
JsonB(json!({ "response": ids }))
|
|
}
|
|
|
|
/// This module is required by `cargo pgrx test` invocations.
|
|
/// It must be visible at the root of your extension crate.
|
|
#[cfg(test)]
|
|
pub mod pg_test {
|
|
pub fn setup(_options: Vec<&str>) {
|
|
// perform one-off initialization when the pg_test framework starts
|
|
}
|
|
|
|
#[must_use]
|
|
pub fn postgresql_conf_options() -> Vec<&'static str> {
|
|
// return any postgresql.conf settings that are required for your tests
|
|
vec![]
|
|
}
|
|
}
|
|
|
|
#[cfg(any(test, feature = "pg_test"))]
|
|
mod helpers {
|
|
include!("helpers.rs");
|
|
}
|
|
|
|
#[cfg(any(test, feature = "pg_test"))]
|
|
mod schemas {
|
|
include!("schemas.rs");
|
|
}
|
|
|
|
#[cfg(any(test, feature = "pg_test"))]
|
|
#[pg_schema]
|
|
mod tests {
|
|
include!("tests.rs");
|
|
} |