jspg updates for punc-v2
This commit is contained in:
254
src/lib.rs
254
src/lib.rs
@ -31,65 +31,225 @@ lazy_static! {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[pg_extern(strict)]
|
#[pg_extern(strict)]
|
||||||
fn cache_json_schema(schema_id: &str, schema: JsonB, strict: bool) -> JsonB {
|
fn cache_json_schemas(types: JsonB, puncs: JsonB) -> JsonB {
|
||||||
let mut cache = SCHEMA_CACHE.write().unwrap();
|
let mut cache = SCHEMA_CACHE.write().unwrap();
|
||||||
let mut schema_value: Value = schema.0;
|
let types_value: Value = types.0;
|
||||||
let schema_path = format!("urn:{}", schema_id);
|
let puncs_value: Value = puncs.0;
|
||||||
|
|
||||||
// Apply strict validation to all objects in the schema if requested
|
// Clear existing cache
|
||||||
if strict {
|
*cache = BoonCache {
|
||||||
apply_strict_validation(&mut schema_value);
|
schemas: Schemas::new(),
|
||||||
}
|
id_to_index: HashMap::new(),
|
||||||
|
};
|
||||||
|
|
||||||
// Create the boon compiler and enable format assertions
|
// Create the boon compiler and enable format assertions
|
||||||
let mut compiler = Compiler::new();
|
let mut compiler = Compiler::new();
|
||||||
compiler.enable_format_assertions();
|
compiler.enable_format_assertions();
|
||||||
|
|
||||||
// Use schema_path when adding the resource
|
let mut errors = Vec::new();
|
||||||
if let Err(e) = compiler.add_resource(&schema_path, schema_value.clone()) {
|
|
||||||
return JsonB(json!({
|
// Track all schema IDs for compilation
|
||||||
"errors": [{
|
let mut all_schema_ids = Vec::new();
|
||||||
"code": "SCHEMA_RESOURCE_ADD_FAILED",
|
|
||||||
"message": format!("Failed to add schema resource '{}'", schema_id),
|
// Phase 1: Add all type schemas as resources (these are referenced by puncs)
|
||||||
"details": {
|
// Types are never strict - they're reusable building blocks
|
||||||
"schema": schema_id,
|
if let Some(types_array) = types_value.as_array() {
|
||||||
"cause": format!("{}", e)
|
for type_row in types_array {
|
||||||
|
if let Some(type_obj) = type_row.as_object() {
|
||||||
|
if let (Some(type_name), Some(schemas_raw)) = (
|
||||||
|
type_obj.get("name").and_then(|v| v.as_str()),
|
||||||
|
type_obj.get("schemas")
|
||||||
|
) {
|
||||||
|
// Parse the schemas JSONB field
|
||||||
|
if let Some(schemas_array) = schemas_raw.as_array() {
|
||||||
|
for schema_def in schemas_array {
|
||||||
|
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
|
||||||
|
if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), false, &mut errors) {
|
||||||
|
errors.push(json!({
|
||||||
|
"code": "TYPE_SCHEMA_RESOURCE_FAILED",
|
||||||
|
"message": format!("Failed to add schema resource '{}' for type '{}'", schema_id, type_name),
|
||||||
|
"details": {
|
||||||
|
"type_name": type_name,
|
||||||
|
"schema_id": schema_id,
|
||||||
|
"cause": format!("{}", e)
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
} else {
|
||||||
|
all_schema_ids.push(schema_id.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}]
|
}
|
||||||
}));
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use schema_path when compiling
|
// Phase 2: Add all punc schemas as resources (these may reference type schemas)
|
||||||
match compiler.compile(&schema_path, &mut cache.schemas) {
|
// Each punc gets strict validation based on its public field
|
||||||
Ok(sch_index) => {
|
if let Some(puncs_array) = puncs_value.as_array() {
|
||||||
// Store the index using the original schema_id as the key
|
for punc_row in puncs_array {
|
||||||
cache.id_to_index.insert(schema_id.to_string(), sch_index);
|
if let Some(punc_obj) = punc_row.as_object() {
|
||||||
JsonB(json!({ "response": "success" }))
|
if let Some(punc_name) = punc_obj.get("name").and_then(|v| v.as_str()) {
|
||||||
}
|
// Get the strict setting for this specific punc (public = strict)
|
||||||
Err(e) => {
|
let punc_strict = punc_obj.get("public")
|
||||||
let errors = match &e {
|
.and_then(|v| v.as_bool())
|
||||||
CompileError::ValidationError { url: _url, src } => {
|
.unwrap_or(false);
|
||||||
// Collect leaf errors from the meta-schema validation failure
|
|
||||||
let mut error_list = Vec::new();
|
// Add punc local schemas as resources (from schemas field) - use $id directly (universal)
|
||||||
collect_errors(src, &mut error_list);
|
if let Some(schemas_raw) = punc_obj.get("schemas") {
|
||||||
// Filter and format errors properly - no instance for schema compilation
|
if let Some(schemas_array) = schemas_raw.as_array() {
|
||||||
format_errors(error_list, &schema_value, schema_id)
|
for schema_def in schemas_array {
|
||||||
}
|
if let Some(schema_id) = schema_def.get("$id").and_then(|v| v.as_str()) {
|
||||||
_ => {
|
if let Err(e) = add_schema_resource(&mut compiler, schema_id, schema_def.clone(), punc_strict, &mut errors) {
|
||||||
// Other compilation errors
|
errors.push(json!({
|
||||||
vec![json!({
|
"code": "PUNC_LOCAL_SCHEMA_RESOURCE_FAILED",
|
||||||
"code": "SCHEMA_COMPILATION_FAILED",
|
"message": format!("Failed to add local schema resource '{}' for punc '{}'", schema_id, punc_name),
|
||||||
"message": format!("Schema '{}' compilation failed", schema_id),
|
"details": {
|
||||||
"details": {
|
"punc_name": punc_name,
|
||||||
"schema": schema_id,
|
"schema_id": schema_id,
|
||||||
"cause": format!("{:?}", e)
|
"cause": format!("{}", e)
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
} else {
|
||||||
|
all_schema_ids.push(schema_id.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})]
|
}
|
||||||
|
|
||||||
|
// Add request schema as resource if present - use {punc_name}.request
|
||||||
|
if let Some(request_schema) = punc_obj.get("request") {
|
||||||
|
if !request_schema.is_null() {
|
||||||
|
let request_schema_id = format!("{}.request", punc_name);
|
||||||
|
if let Err(e) = add_schema_resource(&mut compiler, &request_schema_id, request_schema.clone(), punc_strict, &mut errors) {
|
||||||
|
errors.push(json!({
|
||||||
|
"code": "PUNC_REQUEST_SCHEMA_RESOURCE_FAILED",
|
||||||
|
"message": format!("Failed to add request schema resource for punc '{}'", punc_name),
|
||||||
|
"details": {
|
||||||
|
"punc_name": punc_name,
|
||||||
|
"schema_id": request_schema_id,
|
||||||
|
"cause": format!("{}", e)
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
} else {
|
||||||
|
all_schema_ids.push(request_schema_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add response schema as resource if present - use {punc_name}.response
|
||||||
|
if let Some(response_schema) = punc_obj.get("response") {
|
||||||
|
if !response_schema.is_null() {
|
||||||
|
let response_schema_id = format!("{}.response", punc_name);
|
||||||
|
if let Err(e) = add_schema_resource(&mut compiler, &response_schema_id, response_schema.clone(), punc_strict, &mut errors) {
|
||||||
|
errors.push(json!({
|
||||||
|
"code": "PUNC_RESPONSE_SCHEMA_RESOURCE_FAILED",
|
||||||
|
"message": format!("Failed to add response schema resource for punc '{}'", punc_name),
|
||||||
|
"details": {
|
||||||
|
"punc_name": punc_name,
|
||||||
|
"schema_id": response_schema_id,
|
||||||
|
"cause": format!("{}", e)
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
} else {
|
||||||
|
all_schema_ids.push(response_schema_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
JsonB(json!({ "errors": errors }))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Phase 3: Compile all schemas now that all resources are added
|
||||||
|
if !errors.is_empty() {
|
||||||
|
// If we had errors adding resources, don't attempt compilation
|
||||||
|
return JsonB(json!({ "errors": errors }));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Err(_) = compile_all_schemas(&mut compiler, &mut cache, &all_schema_ids, &mut errors) {
|
||||||
|
// compile_all_schemas already adds errors to the errors vector
|
||||||
|
}
|
||||||
|
|
||||||
|
if errors.is_empty() {
|
||||||
|
JsonB(json!({ "response": "success" }))
|
||||||
|
} else {
|
||||||
|
JsonB(json!({ "errors": errors }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to add a schema resource (without compiling)
|
||||||
|
fn add_schema_resource(
|
||||||
|
compiler: &mut Compiler,
|
||||||
|
schema_id: &str,
|
||||||
|
mut schema_value: Value,
|
||||||
|
strict: bool,
|
||||||
|
errors: &mut Vec<Value>
|
||||||
|
) -> Result<(), String> {
|
||||||
|
// Apply strict validation to all objects in the schema if requested
|
||||||
|
if strict {
|
||||||
|
apply_strict_validation(&mut schema_value);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use schema_id directly - simple IDs like "entity", "user", "punc.request"
|
||||||
|
if let Err(e) = compiler.add_resource(schema_id, schema_value.clone()) {
|
||||||
|
errors.push(json!({
|
||||||
|
"code": "SCHEMA_RESOURCE_FAILED",
|
||||||
|
"message": format!("Failed to add schema resource '{}'", schema_id),
|
||||||
|
"details": {
|
||||||
|
"schema": schema_id,
|
||||||
|
"cause": format!("{}", e)
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
return Err(format!("Failed to add schema resource: {}", e));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to compile all added resources
|
||||||
|
fn compile_all_schemas(
|
||||||
|
compiler: &mut Compiler,
|
||||||
|
cache: &mut BoonCache,
|
||||||
|
schema_ids: &[String],
|
||||||
|
errors: &mut Vec<Value>
|
||||||
|
) -> Result<(), String> {
|
||||||
|
for schema_id in schema_ids {
|
||||||
|
match compiler.compile(schema_id, &mut cache.schemas) {
|
||||||
|
Ok(sch_index) => {
|
||||||
|
// Store the index using the original schema_id as the key
|
||||||
|
cache.id_to_index.insert(schema_id.to_string(), sch_index);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
match &e {
|
||||||
|
CompileError::ValidationError { url: _url, src } => {
|
||||||
|
// Collect leaf errors from the meta-schema validation failure
|
||||||
|
let mut error_list = Vec::new();
|
||||||
|
collect_errors(src, &mut error_list);
|
||||||
|
// Get schema value for error formatting - we'll need to reconstruct or store it
|
||||||
|
let schema_value = json!({}); // Placeholder - we don't have the original value here
|
||||||
|
let formatted_errors = format_errors(error_list, &schema_value, schema_id);
|
||||||
|
errors.extend(formatted_errors);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// Other compilation errors
|
||||||
|
errors.push(json!({
|
||||||
|
"code": "SCHEMA_COMPILATION_FAILED",
|
||||||
|
"message": format!("Schema '{}' compilation failed", schema_id),
|
||||||
|
"details": {
|
||||||
|
"schema": schema_id,
|
||||||
|
"cause": format!("{:?}", e)
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return Err(format!("Schema compilation failed: {:?}", e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper function to apply strict validation to a schema
|
// Helper function to apply strict validation to a schema
|
||||||
@ -135,7 +295,7 @@ fn apply_strict_validation_recursive(schema: &mut Value, inside_conditional: boo
|
|||||||
#[pg_extern(strict, parallel_safe)]
|
#[pg_extern(strict, parallel_safe)]
|
||||||
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
||||||
let cache = SCHEMA_CACHE.read().unwrap();
|
let cache = SCHEMA_CACHE.read().unwrap();
|
||||||
// Lookup uses the original schema_id
|
// Lookup uses the original schema_id - schemas should always be available after bulk caching
|
||||||
match cache.id_to_index.get(schema_id) {
|
match cache.id_to_index.get(schema_id) {
|
||||||
None => JsonB(json!({
|
None => JsonB(json!({
|
||||||
"errors": [{
|
"errors": [{
|
||||||
@ -143,7 +303,7 @@ fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
|||||||
"message": format!("Schema '{}' not found in cache", schema_id),
|
"message": format!("Schema '{}' not found in cache", schema_id),
|
||||||
"details": {
|
"details": {
|
||||||
"schema": schema_id,
|
"schema": schema_id,
|
||||||
"cause": "Schema must be cached before validation"
|
"cause": "Schema was not found in bulk cache - ensure cache_json_schemas was called"
|
||||||
}
|
}
|
||||||
}]
|
}]
|
||||||
})),
|
})),
|
||||||
|
|||||||
2148
src/tests.rs
2148
src/tests.rs
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user