jspg updates for punc-v2
This commit is contained in:
208
src/lib.rs
208
src/lib.rs
@ -31,66 +31,226 @@ lazy_static! {
|
||||
}
|
||||
|
||||
#[pg_extern(strict)]
|
||||
fn cache_json_schema(schema_id: &str, schema: JsonB, strict: bool) -> JsonB {
|
||||
fn cache_json_schemas(types: JsonB, puncs: JsonB) -> JsonB {
|
||||
let mut cache = SCHEMA_CACHE.write().unwrap();
|
||||
let mut schema_value: Value = schema.0;
|
||||
let schema_path = format!("urn:{}", schema_id);
|
||||
let types_value: Value = types.0;
|
||||
let puncs_value: Value = puncs.0;
|
||||
|
||||
// Apply strict validation to all objects in the schema if requested
|
||||
if strict {
|
||||
apply_strict_validation(&mut schema_value);
|
||||
}
|
||||
// Clear existing cache
|
||||
*cache = BoonCache {
|
||||
schemas: Schemas::new(),
|
||||
id_to_index: HashMap::new(),
|
||||
};
|
||||
|
||||
// Create the boon compiler and enable format assertions
|
||||
let mut compiler = Compiler::new();
|
||||
compiler.enable_format_assertions();
|
||||
|
||||
// Use schema_path when adding the resource
|
||||
if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
|
||||
return JsonB(json!({
|
||||
"errors": [{
|
||||
"code": "SCHEMA_RESOURCE_ADD_FAILED",
|
||||
let mut errors = Vec::new();
|
||||
|
||||
// Track all schema IDs for compilation
|
||||
let mut all_schema_ids = Vec::new();
|
||||
|
||||
// Phase 1: Add all type schemas as resources (these are referenced by puncs)
|
||||
// Types are never strict - they're reusable building blocks
|
||||
if let Some(types_array) = types_value.as_array() {
|
||||
for type_row in types_array {
|
||||
if let Some(type_obj) = type_row.as_object() {
|
||||
if let (Some(type_name), Some(schemas_raw)) = (
|
||||
type_obj.get("name").and_then(|v| v.as_str()),
|
||||
type_obj.get("schemas")
|
||||
) {
|
||||
// Parse the schemas JSONB field
|
||||
if let Some(schemas_array) = schemas_raw.as_array() {
|
||||
for schema_def in schemas_array {
|
||||
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
|
||||
if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), false, &mut errors) {
|
||||
errors.push(json!({
|
||||
"code": "TYPE_SCHEMA_RESOURCE_FAILED",
|
||||
"message": format!("Failed to add schema resource '{}' for type '{}'", schema_id, type_name),
|
||||
"details": {
|
||||
"type_name": type_name,
|
||||
"schema_id": schema_id,
|
||||
"cause": format!("{}", e)
|
||||
}
|
||||
}));
|
||||
} else {
|
||||
all_schema_ids.push(schema_id.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 2: Add all punc schemas as resources (these may reference type schemas)
|
||||
// Each punc gets strict validation based on its public field
|
||||
if let Some(puncs_array) = puncs_value.as_array() {
|
||||
for punc_row in puncs_array {
|
||||
if let Some(punc_obj) = punc_row.as_object() {
|
||||
if let Some(punc_name) = punc_obj.get("name").and_then(|v| v.as_str()) {
|
||||
// Get the strict setting for this specific punc (public = strict)
|
||||
let punc_strict = punc_obj.get("public")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
// Add punc local schemas as resources (from schemas field) - use $id directly (universal)
|
||||
if let Some(schemas_raw) = punc_obj.get("schemas") {
|
||||
if let Some(schemas_array) = schemas_raw.as_array() {
|
||||
for schema_def in schemas_array {
|
||||
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
|
||||
if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), punc_strict, &mut errors) {
|
||||
errors.push(json!({
|
||||
"code": "PUNC_LOCAL_SCHEMA_RESOURCE_FAILED",
|
||||
"message": format!("Failed to add local schema resource '{}' for punc '{}'", schema_id, punc_name),
|
||||
"details": {
|
||||
"punc_name": punc_name,
|
||||
"schema_id": schema_id,
|
||||
"cause": format!("{}", e)
|
||||
}
|
||||
}));
|
||||
} else {
|
||||
all_schema_ids.push(schema_id.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add request schema as resource if present - use {punc_name}.request
|
||||
if let Some(request_schema) = punc_obj.get("request") {
|
||||
if !request_schema.is_null() {
|
||||
let request_schema_id = format!("{}.request", punc_name);
|
||||
if let Err(e) = add_schema_resource(&mut compiler, &request_schema_id, request_schema.clone(), punc_strict, &mut errors) {
|
||||
errors.push(json!({
|
||||
"code": "PUNC_REQUEST_SCHEMA_RESOURCE_FAILED",
|
||||
"message": format!("Failed to add request schema resource for punc '{}'", punc_name),
|
||||
"details": {
|
||||
"punc_name": punc_name,
|
||||
"schema_id": request_schema_id,
|
||||
"cause": format!("{}", e)
|
||||
}
|
||||
}));
|
||||
} else {
|
||||
all_schema_ids.push(request_schema_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add response schema as resource if present - use {punc_name}.response
|
||||
if let Some(response_schema) = punc_obj.get("response") {
|
||||
if !response_schema.is_null() {
|
||||
let response_schema_id = format!("{}.response", punc_name);
|
||||
if let Err(e) = add_schema_resource(&mut compiler, &response_schema_id, response_schema.clone(), punc_strict, &mut errors) {
|
||||
errors.push(json!({
|
||||
"code": "PUNC_RESPONSE_SCHEMA_RESOURCE_FAILED",
|
||||
"message": format!("Failed to add response schema resource for punc '{}'", punc_name),
|
||||
"details": {
|
||||
"punc_name": punc_name,
|
||||
"schema_id": response_schema_id,
|
||||
"cause": format!("{}", e)
|
||||
}
|
||||
}));
|
||||
} else {
|
||||
all_schema_ids.push(response_schema_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 3: Compile all schemas now that all resources are added
|
||||
if !errors.is_empty() {
|
||||
// If we had errors adding resources, don't attempt compilation
|
||||
return JsonB(json!({ "errors": errors }));
|
||||
}
|
||||
|
||||
if let Err(_) = compile_all_schemas(&mut compiler, &mut cache, &all_schema_ids, &mut errors) {
|
||||
// compile_all_schemas already adds errors to the errors vector
|
||||
}
|
||||
|
||||
if errors.is_empty() {
|
||||
JsonB(json!({ "response": "success" }))
|
||||
} else {
|
||||
JsonB(json!({ "errors": errors }))
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to add a schema resource (without compiling)
|
||||
fn add_schema_resource(
|
||||
compiler: &mut Compiler,
|
||||
schema_id: &str,
|
||||
mut schema_value: Value,
|
||||
strict: bool,
|
||||
errors: &mut Vec<Value>
|
||||
) -> Result<(), String> {
|
||||
// Apply strict validation to all objects in the schema if requested
|
||||
if strict {
|
||||
apply_strict_validation(&mut schema_value);
|
||||
}
|
||||
|
||||
// Use schema_id directly - simple IDs like "entity", "user", "punc.request"
|
||||
if let Err(e) = compiler.add_resource(schema_id, schema_value.clone()) {
|
||||
errors.push(json!({
|
||||
"code": "SCHEMA_RESOURCE_FAILED",
|
||||
"message": format!("Failed to add schema resource '{}'", schema_id),
|
||||
"details": {
|
||||
"schema": schema_id,
|
||||
"cause": format!("{}", e)
|
||||
}
|
||||
}]
|
||||
}));
|
||||
return Err(format!("Failed to add schema resource: {}", e));
|
||||
}
|
||||
|
||||
// Use schema_path when compiling
|
||||
match compiler.compile(&schema_path, &mut cache.schemas) {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper function to compile all added resources
|
||||
fn compile_all_schemas(
|
||||
compiler: &mut Compiler,
|
||||
cache: &mut BoonCache,
|
||||
schema_ids: &[String],
|
||||
errors: &mut Vec<Value>
|
||||
) -> Result<(), String> {
|
||||
for schema_id in schema_ids {
|
||||
match compiler.compile(schema_id, &mut cache.schemas) {
|
||||
Ok(sch_index) => {
|
||||
// Store the index using the original schema_id as the key
|
||||
cache.id_to_index.insert(schema_id.to_string(), sch_index);
|
||||
JsonB(json!({ "response": "success" }))
|
||||
}
|
||||
Err(e) => {
|
||||
let errors = match &e {
|
||||
match &e {
|
||||
CompileError::ValidationError { url: _url, src } => {
|
||||
// Collect leaf errors from the meta-schema validation failure
|
||||
let mut error_list = Vec::new();
|
||||
collect_errors(src, &mut error_list);
|
||||
// Filter and format errors properly - no instance for schema compilation
|
||||
format_errors(error_list, &schema_value, schema_id)
|
||||
// Get schema value for error formatting - we'll need to reconstruct or store it
|
||||
let schema_value = json!({}); // Placeholder - we don't have the original value here
|
||||
let formatted_errors = format_errors(error_list, &schema_value, schema_id);
|
||||
errors.extend(formatted_errors);
|
||||
}
|
||||
_ => {
|
||||
// Other compilation errors
|
||||
vec![json!({
|
||||
errors.push(json!({
|
||||
"code": "SCHEMA_COMPILATION_FAILED",
|
||||
"message": format!("Schema '{}' compilation failed", schema_id),
|
||||
"details": {
|
||||
"schema": schema_id,
|
||||
"cause": format!("{:?}", e)
|
||||
}
|
||||
})]
|
||||
}));
|
||||
}
|
||||
};
|
||||
JsonB(json!({ "errors": errors }))
|
||||
return Err(format!("Schema compilation failed: {:?}", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper function to apply strict validation to a schema
|
||||
//
|
||||
@ -135,7 +295,7 @@ fn apply_strict_validation_recursive(schema: &mut Value, inside_conditional: boo
|
||||
#[pg_extern(strict, parallel_safe)]
|
||||
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
||||
let cache = SCHEMA_CACHE.read().unwrap();
|
||||
// Lookup uses the original schema_id
|
||||
// Lookup uses the original schema_id - schemas should always be available after bulk caching
|
||||
match cache.id_to_index.get(schema_id) {
|
||||
None => JsonB(json!({
|
||||
"errors": [{
|
||||
@ -143,7 +303,7 @@ fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
||||
"message": format!("Schema '{}' not found in cache", schema_id),
|
||||
"details": {
|
||||
"schema": schema_id,
|
||||
"cause": "Schema must be cached before validation"
|
||||
"cause": "Schema was not found in bulk cache - ensure cache_json_schemas was called"
|
||||
}
|
||||
}]
|
||||
})),
|
||||
|
||||
2342
src/tests.rs
2342
src/tests.rs
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user