This commit is contained in:
2026-04-02 21:55:57 -04:00
parent 29d8dfb608
commit 06f6a587de
21 changed files with 673 additions and 457 deletions

View File

@ -44,7 +44,7 @@ impl MockExecutor {
#[cfg(test)]
impl DatabaseExecutor for MockExecutor {
fn query(&self, sql: &str, _args: Option<&[Value]>) -> Result<Value, String> {
fn query(&self, sql: &str, _args: Option<Vec<Value>>) -> Result<Value, String> {
println!("JSPG_SQL: {}", sql);
MOCK_STATE.with(|state| {
let mut s = state.borrow_mut();
@ -65,7 +65,7 @@ impl DatabaseExecutor for MockExecutor {
})
}
fn execute(&self, sql: &str, _args: Option<&[Value]>) -> Result<(), String> {
fn execute(&self, sql: &str, _args: Option<Vec<Value>>) -> Result<(), String> {
println!("JSPG_SQL: {}", sql);
MOCK_STATE.with(|state| {
let mut s = state.borrow_mut();

View File

@ -9,10 +9,10 @@ use serde_json::Value;
/// without a live Postgres SPI connection.
pub trait DatabaseExecutor: Send + Sync {
/// Executes a query expecting a single JSONB return, representing rows.
fn query(&self, sql: &str, args: Option<&[Value]>) -> Result<Value, String>;
fn query(&self, sql: &str, args: Option<Vec<Value>>) -> Result<Value, String>;
/// Executes an operation (INSERT, UPDATE, DELETE, or pg_notify) that does not return rows.
fn execute(&self, sql: &str, args: Option<&[Value]>) -> Result<(), String>;
fn execute(&self, sql: &str, args: Option<Vec<Value>>) -> Result<(), String>;
/// Returns the current authenticated user's ID
fn auth_user_id(&self) -> Result<String, String>;

View File

@ -67,15 +67,11 @@ impl SpiExecutor {
}
impl DatabaseExecutor for SpiExecutor {
fn query(&self, sql: &str, args: Option<&[Value]>) -> Result<Value, String> {
let mut json_args = Vec::new();
fn query(&self, sql: &str, args: Option<Vec<Value>>) -> Result<Value, String> {
let mut args_with_oid: Vec<pgrx::datum::DatumWithOid> = Vec::new();
if let Some(params) = args {
for val in params {
json_args.push(pgrx::JsonB(val.clone()));
}
for j_val in json_args.into_iter() {
args_with_oid.push(pgrx::datum::DatumWithOid::from(j_val));
args_with_oid.push(pgrx::datum::DatumWithOid::from(pgrx::JsonB(val)));
}
}
@ -98,15 +94,11 @@ impl DatabaseExecutor for SpiExecutor {
})
}
fn execute(&self, sql: &str, args: Option<&[Value]>) -> Result<(), String> {
let mut json_args = Vec::new();
fn execute(&self, sql: &str, args: Option<Vec<Value>>) -> Result<(), String> {
let mut args_with_oid: Vec<pgrx::datum::DatumWithOid> = Vec::new();
if let Some(params) = args {
for val in params {
json_args.push(pgrx::JsonB(val.clone()));
}
for j_val in json_args.into_iter() {
args_with_oid.push(pgrx::datum::DatumWithOid::from(j_val));
args_with_oid.push(pgrx::datum::DatumWithOid::from(pgrx::JsonB(val)));
}
}

View File

@ -53,18 +53,38 @@ impl Database {
executor: Box::new(MockExecutor::new()),
};
let mut errors = Vec::new();
if let Some(arr) = val.get("enums").and_then(|v| v.as_array()) {
for item in arr {
if let Ok(def) = serde_json::from_value::<Enum>(item.clone()) {
db.enums.insert(def.name.clone(), def);
match serde_json::from_value::<Enum>(item.clone()) {
Ok(def) => {
db.enums.insert(def.name.clone(), def);
}
Err(e) => {
errors.push(crate::drop::Error {
code: "DATABASE_ENUM_PARSE_FAILED".to_string(),
message: format!("Failed to parse database enum: {}", e),
details: crate::drop::ErrorDetails::default(),
});
}
}
}
}
if let Some(arr) = val.get("types").and_then(|v| v.as_array()) {
for item in arr {
if let Ok(def) = serde_json::from_value::<Type>(item.clone()) {
db.types.insert(def.name.clone(), def);
match serde_json::from_value::<Type>(item.clone()) {
Ok(def) => {
db.types.insert(def.name.clone(), def);
}
Err(e) => {
errors.push(crate::drop::Error {
code: "DATABASE_TYPE_PARSE_FAILED".to_string(),
message: format!("Failed to parse database type: {}", e),
details: crate::drop::ErrorDetails::default(),
});
}
}
}
}
@ -80,16 +100,11 @@ impl Database {
}
}
Err(e) => {
return Err(crate::drop::Drop::with_errors(vec![crate::drop::Error {
errors.push(crate::drop::Error {
code: "DATABASE_RELATION_PARSE_FAILED".to_string(),
message: format!("Failed to parse database relation: {}", e),
details: crate::drop::ErrorDetails {
path: "".to_string(),
cause: None,
context: None,
schema: None,
},
}]));
details: crate::drop::ErrorDetails::default(),
});
}
}
}
@ -97,27 +112,48 @@ impl Database {
if let Some(arr) = val.get("puncs").and_then(|v| v.as_array()) {
for item in arr {
if let Ok(def) = serde_json::from_value::<Punc>(item.clone()) {
db.puncs.insert(def.name.clone(), def);
match serde_json::from_value::<Punc>(item.clone()) {
Ok(def) => {
db.puncs.insert(def.name.clone(), def);
}
Err(e) => {
errors.push(crate::drop::Error {
code: "DATABASE_PUNC_PARSE_FAILED".to_string(),
message: format!("Failed to parse database punc: {}", e),
details: crate::drop::ErrorDetails::default(),
});
}
}
}
}
if let Some(arr) = val.get("schemas").and_then(|v| v.as_array()) {
for (i, item) in arr.iter().enumerate() {
if let Ok(mut schema) = serde_json::from_value::<Schema>(item.clone()) {
let id = schema
.obj
.id
.clone()
.unwrap_or_else(|| format!("schema_{}", i));
schema.obj.id = Some(id.clone());
db.schemas.insert(id, schema);
match serde_json::from_value::<Schema>(item.clone()) {
Ok(mut schema) => {
let id = schema
.obj
.id
.clone()
.unwrap_or_else(|| format!("schema_{}", i));
schema.obj.id = Some(id.clone());
db.schemas.insert(id, schema);
}
Err(e) => {
errors.push(crate::drop::Error {
code: "DATABASE_SCHEMA_PARSE_FAILED".to_string(),
message: format!("Failed to parse database schema: {}", e),
details: crate::drop::ErrorDetails::default(),
});
}
}
}
}
db.compile()?;
db.compile(&mut errors);
if !errors.is_empty() {
return Err(crate::drop::Drop::with_errors(errors));
}
Ok(db)
}
@ -128,12 +164,12 @@ impl Database {
}
/// Executes a query expecting a single JSONB array return, representing rows.
pub fn query(&self, sql: &str, args: Option<&[Value]>) -> Result<Value, String> {
pub fn query(&self, sql: &str, args: Option<Vec<Value>>) -> Result<Value, String> {
self.executor.query(sql, args)
}
/// Executes an operation (INSERT, UPDATE, DELETE, or pg_notify) that does not return rows.
pub fn execute(&self, sql: &str, args: Option<&[Value]>) -> Result<(), String> {
pub fn execute(&self, sql: &str, args: Option<Vec<Value>>) -> Result<(), String> {
self.executor.execute(sql, args)
}
@ -147,68 +183,48 @@ impl Database {
self.executor.timestamp()
}
pub fn compile(&mut self) -> Result<(), crate::drop::Drop> {
pub fn compile(&mut self, errors: &mut Vec<crate::drop::Error>) {
let mut harvested = Vec::new();
for schema in self.schemas.values_mut() {
if let Err(msg) = schema.collect_schemas(None, &mut harvested) {
return Err(crate::drop::Drop::with_errors(vec![crate::drop::Error {
code: "SCHEMA_VALIDATION_FAILED".to_string(),
message: msg,
details: crate::drop::ErrorDetails { path: "".to_string(), cause: None, context: None, schema: None },
}]));
}
schema.collect_schemas(None, &mut harvested, errors);
}
self.schemas.extend(harvested);
if let Err(msg) = self.collect_schemas() {
return Err(crate::drop::Drop::with_errors(vec![crate::drop::Error {
code: "SCHEMA_VALIDATION_FAILED".to_string(),
message: msg,
details: crate::drop::ErrorDetails {
path: "".to_string(),
cause: None,
context: None,
schema: None,
},
}]));
}
self.collect_schemas(errors);
self.collect_depths();
self.collect_descendants();
// Mathematically evaluate all property inheritances, formats, schemas, and foreign key edges topographically over OnceLocks
let mut visited = std::collections::HashSet::new();
for schema in self.schemas.values() {
schema.compile(self, &mut visited);
schema.compile(self, &mut visited, errors);
}
Ok(())
}
fn collect_schemas(&mut self) -> Result<(), String> {
fn collect_schemas(&mut self, errors: &mut Vec<crate::drop::Error>) {
let mut to_insert = Vec::new();
// Pass 1: Extract all Schemas structurally off top level definitions into the master registry.
// Validate every node recursively via string filters natively!
for type_def in self.types.values() {
for mut schema in type_def.schemas.clone() {
schema.collect_schemas(None, &mut to_insert)?;
schema.collect_schemas(None, &mut to_insert, errors);
}
}
for punc_def in self.puncs.values() {
for mut schema in punc_def.schemas.clone() {
schema.collect_schemas(None, &mut to_insert)?;
schema.collect_schemas(None, &mut to_insert, errors);
}
}
for enum_def in self.enums.values() {
for mut schema in enum_def.schemas.clone() {
schema.collect_schemas(None, &mut to_insert)?;
schema.collect_schemas(None, &mut to_insert, errors);
}
}
for (id, schema) in to_insert {
self.schemas.insert(id, schema);
}
Ok(())
}
fn collect_depths(&mut self) {
@ -247,19 +263,15 @@ impl Database {
}
}
// Cache generic descendants for $family runtime lookups
// Cache exhaustive descendants matrix for generic $family string lookups natively
let mut descendants = HashMap::new();
for (id, schema) in &self.schemas {
if let Some(family_target) = &schema.obj.family {
let mut desc_set = HashSet::new();
Self::collect_descendants_recursively(family_target, &direct_refs, &mut desc_set);
let mut desc_vec: Vec<String> = desc_set.into_iter().collect();
desc_vec.sort();
for id in self.schemas.keys() {
let mut desc_set = HashSet::new();
Self::collect_descendants_recursively(id, &direct_refs, &mut desc_set);
let mut desc_vec: Vec<String> = desc_set.into_iter().collect();
desc_vec.sort();
// By placing all descendants directly onto the ID mapped location of the Family declaration,
// we can lookup descendants natively in ValidationContext without AST replacement overrides.
descendants.insert(id.clone(), desc_vec);
}
descendants.insert(id.clone(), desc_vec);
}
self.descendants = descendants;
}

View File

@ -255,6 +255,7 @@ impl Schema {
&self,
db: &crate::database::Database,
visited: &mut std::collections::HashSet<String>,
errors: &mut Vec<crate::drop::Error>,
) {
if self.obj.compiled_properties.get().is_some() {
return;
@ -301,7 +302,7 @@ impl Schema {
// 1. Resolve INHERITANCE dependencies first
if let Some(ref_id) = &self.obj.r#ref {
if let Some(parent) = db.schemas.get(ref_id) {
parent.compile(db, visited);
parent.compile(db, visited, errors);
if let Some(p_props) = parent.obj.compiled_properties.get() {
props.extend(p_props.clone());
}
@ -310,7 +311,7 @@ impl Schema {
if let Some(all_of) = &self.obj.all_of {
for ao in all_of {
ao.compile(db, visited);
ao.compile(db, visited, errors);
if let Some(ao_props) = ao.obj.compiled_properties.get() {
props.extend(ao_props.clone());
}
@ -318,14 +319,14 @@ impl Schema {
}
if let Some(then_schema) = &self.obj.then_ {
then_schema.compile(db, visited);
then_schema.compile(db, visited, errors);
if let Some(t_props) = then_schema.obj.compiled_properties.get() {
props.extend(t_props.clone());
}
}
if let Some(else_schema) = &self.obj.else_ {
else_schema.compile(db, visited);
else_schema.compile(db, visited, errors);
if let Some(e_props) = else_schema.obj.compiled_properties.get() {
props.extend(e_props.clone());
}
@ -345,47 +346,47 @@ impl Schema {
let _ = self.obj.compiled_property_names.set(names);
// 4. Compute Edges natively
let schema_edges = self.compile_edges(db, visited, &props);
let schema_edges = self.compile_edges(db, visited, &props, errors);
let _ = self.obj.compiled_edges.set(schema_edges);
// 5. Build our inline children properties recursively NOW! (Depth-first search)
if let Some(local_props) = &self.obj.properties {
for child in local_props.values() {
child.compile(db, visited);
child.compile(db, visited, errors);
}
}
if let Some(items) = &self.obj.items {
items.compile(db, visited);
items.compile(db, visited, errors);
}
if let Some(pattern_props) = &self.obj.pattern_properties {
for child in pattern_props.values() {
child.compile(db, visited);
child.compile(db, visited, errors);
}
}
if let Some(additional_props) = &self.obj.additional_properties {
additional_props.compile(db, visited);
additional_props.compile(db, visited, errors);
}
if let Some(one_of) = &self.obj.one_of {
for child in one_of {
child.compile(db, visited);
child.compile(db, visited, errors);
}
}
if let Some(arr) = &self.obj.prefix_items {
for child in arr {
child.compile(db, visited);
child.compile(db, visited, errors);
}
}
if let Some(child) = &self.obj.not {
child.compile(db, visited);
child.compile(db, visited, errors);
}
if let Some(child) = &self.obj.contains {
child.compile(db, visited);
child.compile(db, visited, errors);
}
if let Some(child) = &self.obj.property_names {
child.compile(db, visited);
child.compile(db, visited, errors);
}
if let Some(child) = &self.obj.if_ {
child.compile(db, visited);
child.compile(db, visited, errors);
}
if let Some(id) = &self.obj.id {
@ -394,30 +395,38 @@ impl Schema {
}
#[allow(unused_variables)]
fn validate_identifier(id: &str, field_name: &str) -> Result<(), String> {
fn validate_identifier(id: &str, field_name: &str, errors: &mut Vec<crate::drop::Error>) {
#[cfg(not(test))]
for c in id.chars() {
if !c.is_ascii_lowercase() && !c.is_ascii_digit() && c != '_' && c != '.' {
return Err(format!("Invalid character '{}' in JSON Schema '{}' property: '{}'. Identifiers must exclusively contain [a-z0-9_.]", c, field_name, id));
errors.push(crate::drop::Error {
code: "INVALID_IDENTIFIER".to_string(),
message: format!(
"Invalid character '{}' in JSON Schema '{}' property: '{}'. Identifiers must exclusively contain [a-z0-9_.]",
c, field_name, id
),
details: crate::drop::ErrorDetails::default(),
});
return;
}
}
Ok(())
}
pub fn collect_schemas(
&mut self,
tracking_path: Option<String>,
to_insert: &mut Vec<(String, Schema)>,
) -> Result<(), String> {
errors: &mut Vec<crate::drop::Error>,
) {
if let Some(id) = &self.obj.id {
Self::validate_identifier(id, "$id")?;
Self::validate_identifier(id, "$id", errors);
to_insert.push((id.clone(), self.clone()));
}
if let Some(r#ref) = &self.obj.r#ref {
Self::validate_identifier(r#ref, "$ref")?;
Self::validate_identifier(r#ref, "$ref", errors);
}
if let Some(family) = &self.obj.family {
Self::validate_identifier(family, "$family")?;
Self::validate_identifier(family, "$family", errors);
}
// Is this schema an inline ad-hoc composition?
@ -431,20 +440,20 @@ impl Schema {
// Provide the path origin to children natively, prioritizing the explicit `$id` boundary if one exists
let origin_path = self.obj.id.clone().or(tracking_path);
self.collect_child_schemas(origin_path, to_insert)?;
Ok(())
self.collect_child_schemas(origin_path, to_insert, errors);
}
pub fn collect_child_schemas(
&mut self,
origin_path: Option<String>,
to_insert: &mut Vec<(String, Schema)>,
) -> Result<(), String> {
errors: &mut Vec<crate::drop::Error>,
) {
if let Some(props) = &mut self.obj.properties {
for (k, v) in props.iter_mut() {
let mut inner = (**v).clone();
let next_path = origin_path.as_ref().map(|o| format!("{}/{}", o, k));
inner.collect_schemas(next_path, to_insert)?;
inner.collect_schemas(next_path, to_insert, errors);
*v = Arc::new(inner);
}
}
@ -453,48 +462,50 @@ impl Schema {
for (k, v) in pattern_props.iter_mut() {
let mut inner = (**v).clone();
let next_path = origin_path.as_ref().map(|o| format!("{}/{}", o, k));
inner.collect_schemas(next_path, to_insert)?;
inner.collect_schemas(next_path, to_insert, errors);
*v = Arc::new(inner);
}
}
let mut map_arr = |arr: &mut Vec<Arc<Schema>>| -> Result<(), String> {
let mut map_arr = |arr: &mut Vec<Arc<Schema>>| {
for v in arr.iter_mut() {
let mut inner = (**v).clone();
inner.collect_schemas(origin_path.clone(), to_insert)?;
inner.collect_schemas(origin_path.clone(), to_insert, errors);
*v = Arc::new(inner);
}
Ok(())
};
if let Some(arr) = &mut self.obj.prefix_items { map_arr(arr)?; }
if let Some(arr) = &mut self.obj.all_of { map_arr(arr)?; }
if let Some(arr) = &mut self.obj.one_of { map_arr(arr)?; }
if let Some(arr) = &mut self.obj.prefix_items {
map_arr(arr);
}
if let Some(arr) = &mut self.obj.all_of {
map_arr(arr);
}
if let Some(arr) = &mut self.obj.one_of {
map_arr(arr);
}
let mut map_opt = |opt: &mut Option<Arc<Schema>>, pass_path: bool| -> Result<(), String> {
let mut map_opt = |opt: &mut Option<Arc<Schema>>, pass_path: bool| {
if let Some(v) = opt {
let mut inner = (**v).clone();
let next = if pass_path { origin_path.clone() } else { None };
inner.collect_schemas(next, to_insert)?;
inner.collect_schemas(next, to_insert, errors);
*v = Arc::new(inner);
}
Ok(())
};
map_opt(&mut self.obj.additional_properties, false)?;
map_opt(&mut self.obj.additional_properties, false);
// `items` absolutely must inherit the EXACT property path assigned to the Array wrapper!
// This allows nested Arrays enclosing bare Entity structs to correctly register as the boundary mapping.
map_opt(&mut self.obj.items, true)?;
map_opt(&mut self.obj.not, false)?;
map_opt(&mut self.obj.contains, false)?;
map_opt(&mut self.obj.property_names, false)?;
map_opt(&mut self.obj.if_, false)?;
map_opt(&mut self.obj.then_, false)?;
map_opt(&mut self.obj.else_, false)?;
map_opt(&mut self.obj.items, true);
Ok(())
map_opt(&mut self.obj.not, false);
map_opt(&mut self.obj.contains, false);
map_opt(&mut self.obj.property_names, false);
map_opt(&mut self.obj.if_, false);
map_opt(&mut self.obj.then_, false);
map_opt(&mut self.obj.else_, false);
}
/// Dynamically infers and compiles all structural database relationships between this Schema
@ -506,16 +517,23 @@ impl Schema {
db: &crate::database::Database,
visited: &mut std::collections::HashSet<String>,
props: &std::collections::BTreeMap<String, std::sync::Arc<Schema>>,
errors: &mut Vec<crate::drop::Error>,
) -> std::collections::BTreeMap<String, crate::database::edge::Edge> {
let mut schema_edges = std::collections::BTreeMap::new();
// Determine the physical Database Table Name this schema structurally represents
// Plucks the polymorphic discriminator via dot-notation (e.g. extracting "person" from "full.person")
let mut parent_type_name = None;
if let Some(family) = &self.obj.family {
parent_type_name = Some(family.split('.').next_back().unwrap_or(family).to_string());
} else if let Some(identifier) = self.obj.identifier() {
parent_type_name = Some(identifier.split('.').next_back().unwrap_or(&identifier).to_string());
parent_type_name = Some(
identifier
.split('.')
.next_back()
.unwrap_or(&identifier)
.to_string(),
);
}
if let Some(p_type) = parent_type_name {
@ -525,12 +543,14 @@ impl Schema {
for (prop_name, prop_schema) in props {
let mut child_type_name = None;
let mut target_schema = prop_schema.clone();
let mut is_array = false;
// Structurally unpack the inner target entity if the object maps to an array list
if let Some(crate::database::schema::SchemaTypeOrArray::Single(t)) =
&prop_schema.obj.type_
{
if t == "array" {
is_array = true;
if let Some(items) = &prop_schema.obj.items {
target_schema = items.clone();
}
@ -545,24 +565,31 @@ impl Schema {
} else if let Some(arr) = &target_schema.obj.one_of {
if let Some(first) = arr.first() {
if let Some(ref_id) = first.obj.identifier() {
child_type_name = Some(ref_id.split('.').next_back().unwrap_or(&ref_id).to_string());
child_type_name =
Some(ref_id.split('.').next_back().unwrap_or(&ref_id).to_string());
}
}
}
if let Some(c_type) = child_type_name {
if db.types.contains_key(&c_type) {
// Ensure the child Schema's AST has accurately compiled its own physical property keys so we can
// Ensure the child Schema's AST has accurately compiled its own physical property keys so we can
// inject them securely for Many-to-Many Twin Deduction disambiguation matching.
target_schema.compile(db, visited);
target_schema.compile(db, visited, errors);
if let Some(compiled_target_props) = target_schema.obj.compiled_properties.get() {
let keys_for_ambiguity: Vec<String> =
compiled_target_props.keys().cloned().collect();
// Interrogate the Database catalog graph to discover the exact Foreign Key Constraint connecting the components
if let Some((relation, is_forward)) =
resolve_relation(db, &p_type, &c_type, prop_name, Some(&keys_for_ambiguity))
{
if let Some((relation, is_forward)) = resolve_relation(
db,
&p_type,
&c_type,
prop_name,
Some(&keys_for_ambiguity),
is_array,
errors,
) {
schema_edges.insert(
prop_name.clone(),
crate::database::edge::Edge {
@ -589,11 +616,12 @@ pub(crate) fn resolve_relation<'a>(
child_type: &str,
prop_name: &str,
relative_keys: Option<&Vec<String>>,
is_array: bool,
errors: &mut Vec<crate::drop::Error>,
) -> Option<(&'a crate::database::relation::Relation, bool)> {
// Enforce graph locality by ensuring we don't accidentally crawl to pure structural entity boundaries
if parent_type == "entity" && child_type == "entity" {
return None;
return None;
}
let p_def = db.types.get(parent_type)?;
@ -605,11 +633,22 @@ pub(crate) fn resolve_relation<'a>(
// Scour the complete catalog for any Edge matching the inheritance scope of the two objects
// This automatically binds polymorphic structures (e.g. recognizing a relationship targeting User
// also natively binds instances specifically typed as Person).
for rel in db.relations.values() {
let is_forward = p_def.hierarchy.contains(&rel.source_type)
&& c_def.hierarchy.contains(&rel.destination_type);
let is_reverse = p_def.hierarchy.contains(&rel.destination_type)
&& c_def.hierarchy.contains(&rel.source_type);
let mut all_rels: Vec<&crate::database::relation::Relation> = db.relations.values().collect();
all_rels.sort_by(|a, b| a.constraint.cmp(&b.constraint));
for rel in all_rels {
let mut is_forward =
p_def.hierarchy.contains(&rel.source_type) && c_def.hierarchy.contains(&rel.destination_type);
let is_reverse =
p_def.hierarchy.contains(&rel.destination_type) && c_def.hierarchy.contains(&rel.source_type);
// Structural Cardinality Filtration:
// If the schema requires a collection (Array), it is mathematically impossible for a pure
// Forward scalar edge (where the parent holds exactly one UUID pointer) to fulfill a One-to-Many request.
// Thus, if it's an array, we fully reject pure Forward edges and only accept Reverse edges (or Junction edges).
if is_array && is_forward && !is_reverse {
is_forward = false;
}
if is_forward {
matching_rels.push(rel);
@ -622,6 +661,14 @@ pub(crate) fn resolve_relation<'a>(
// Abort relation discovery early if no hierarchical inheritance match was found
if matching_rels.is_empty() {
errors.push(crate::drop::Error {
code: "EDGE_MISSING".to_string(),
message: format!(
"No database relation exists between '{}' and '{}' for property '{}'",
parent_type, child_type, prop_name
),
details: crate::drop::ErrorDetails::default(),
});
return None;
}
@ -648,10 +695,10 @@ pub(crate) fn resolve_relation<'a>(
}
}
// Complex Subgraph Resolution: The database contains multiple equally explicit foreign key constraints
// Complex Subgraph Resolution: The database contains multiple equally explicit foreign key constraints
// linking these objects (such as pointing to `source` and `target` in Many-to-Many junction models).
if !resolved && relative_keys.is_some() {
// Twin Deduction Pass 1: We inspect the exact properties structurally defined inside the compiled payload
// Twin Deduction Pass 1: We inspect the exact properties structurally defined inside the compiled payload
// to observe which explicit relation arrow the child payload natively consumes.
let keys = relative_keys.unwrap();
let mut consumed_rel_idx = None;
@ -664,7 +711,7 @@ pub(crate) fn resolve_relation<'a>(
}
}
// Twin Deduction Pass 2: Knowing which arrow points outbound, we can mathematically deduce its twin
// Twin Deduction Pass 2: Knowing which arrow points outbound, we can mathematically deduce its twin
// providing the reverse ownership on the same junction boundary must be the incoming Edge to the parent.
if let Some(used_idx) = consumed_rel_idx {
let used_rel = matching_rels[used_idx];
@ -697,9 +744,25 @@ pub(crate) fn resolve_relation<'a>(
}
if null_prefix_ids.len() == 1 {
chosen_idx = null_prefix_ids[0];
resolved = true;
}
}
// If we exhausted all mathematical deduction pathways and STILL cannot isolate a single edge,
// we must abort rather than silently guessing. Returning None prevents arbitrary SQL generation
// and forces a clean structural error for the architect.
if !resolved {
errors.push(crate::drop::Error {
code: "AMBIGUOUS_TYPE_RELATIONS".to_string(),
message: format!(
"Ambiguous database relation between '{}' and '{}' for property '{}'",
parent_type, child_type, prop_name
),
details: crate::drop::ErrorDetails::default(),
});
return None;
}
Some((matching_rels[chosen_idx], directions[chosen_idx]))
}

View File

@ -64,7 +64,7 @@ pub struct Error {
pub details: ErrorDetails,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[derive(Debug, Serialize, Deserialize, Clone, Default)]
pub struct ErrorDetails {
pub path: String,
#[serde(skip_serializing_if = "Option::is_none")]

View File

@ -40,7 +40,7 @@ impl Merger {
}
};
let result = self.merge_internal(target_schema, data.clone(), &mut notifications_queue);
let result = self.merge_internal(target_schema, data, &mut notifications_queue);
let val_resolved = match result {
Ok(val) => val,
@ -78,7 +78,7 @@ impl Merger {
details: crate::drop::ErrorDetails {
path: "".to_string(),
cause: final_cause,
context: Some(data),
context: None,
schema: None,
},
}]);
@ -238,7 +238,7 @@ impl Merger {
if !type_def.relationship {
let (fields, kind, fetched, replaces) =
self.stage_entity(entity_fields.clone(), type_def, &user_id, &timestamp)?;
self.stage_entity(entity_fields, type_def, &user_id, &timestamp)?;
entity_fields = fields;
entity_change_kind = kind;
entity_fetched = fetched;
@ -320,7 +320,7 @@ impl Merger {
if type_def.relationship {
let (fields, kind, fetched, replaces) =
self.stage_entity(entity_fields.clone(), type_def, &user_id, &timestamp)?;
self.stage_entity(entity_fields, type_def, &user_id, &timestamp)?;
entity_fields = fields;
entity_change_kind = kind;
entity_fetched = fetched;

View File

@ -124,33 +124,28 @@ impl<'a> Compiler<'a> {
}
// Handle $family Polymorphism fallbacks for relations
if let Some(family_target) = &node.schema.obj.family {
let base_type_name = family_target
.split('.')
.next_back()
.unwrap_or(family_target)
.to_string();
if let Some(type_def) = self.db.types.get(&base_type_name) {
if type_def.variations.len() == 1 {
let mut bypass_schema = crate::database::schema::Schema::default();
bypass_schema.obj.r#ref = Some(family_target.clone());
let mut bypass_node = node.clone();
bypass_node.schema = std::sync::Arc::new(bypass_schema);
return self.compile_node(bypass_node);
}
let mut sorted_variations: Vec<String> = type_def.variations.iter().cloned().collect();
sorted_variations.sort();
let mut family_schemas = Vec::new();
for variation in &sorted_variations {
let mut ref_schema = crate::database::schema::Schema::default();
ref_schema.obj.r#ref = Some(variation.clone());
family_schemas.push(std::sync::Arc::new(ref_schema));
}
return self.compile_one_of(&family_schemas, node);
let mut all_targets = vec![family_target.clone()];
if let Some(descendants) = self.db.descendants.get(family_target) {
all_targets.extend(descendants.clone());
}
if all_targets.len() == 1 {
let mut bypass_schema = crate::database::schema::Schema::default();
bypass_schema.obj.r#ref = Some(all_targets[0].clone());
let mut bypass_node = node.clone();
bypass_node.schema = std::sync::Arc::new(bypass_schema);
return self.compile_node(bypass_node);
}
all_targets.sort();
let mut family_schemas = Vec::new();
for variation in &all_targets {
let mut ref_schema = crate::database::schema::Schema::default();
ref_schema.obj.r#ref = Some(variation.clone());
family_schemas.push(std::sync::Arc::new(ref_schema));
}
return self.compile_one_of(&family_schemas, node);
}
// Handle oneOf Polymorphism fallbacks for relations
@ -230,49 +225,62 @@ impl<'a> Compiler<'a> {
let mut select_args = Vec::new();
if let Some(family_target) = node.schema.obj.family.as_ref() {
let base_type_name = family_target
.split('.')
.next_back()
.unwrap_or(family_target)
.to_string();
let family_prefix = family_target.rfind('.').map(|idx| &family_target[..idx]);
if let Some(fam_type_def) = self.db.types.get(&base_type_name) {
if fam_type_def.variations.len() == 1 {
let mut bypass_schema = crate::database::schema::Schema::default();
bypass_schema.obj.r#ref = Some(family_target.clone());
bypass_schema.compile(self.db, &mut std::collections::HashSet::new());
let mut all_targets = vec![family_target.clone()];
if let Some(descendants) = self.db.descendants.get(family_target) {
all_targets.extend(descendants.clone());
}
// Filter targets to EXACTLY match the family_target prefix
let mut final_targets = Vec::new();
for target in all_targets {
let target_prefix = target.rfind('.').map(|idx| &target[..idx]);
if target_prefix == family_prefix {
final_targets.push(target);
}
}
final_targets.sort();
final_targets.dedup();
if final_targets.len() == 1 {
let variation = &final_targets[0];
if let Some(target_schema) = self.db.schemas.get(variation) {
let mut bypass_node = node.clone();
bypass_node.schema = std::sync::Arc::new(bypass_schema);
bypass_node.schema = std::sync::Arc::new(target_schema.clone());
let mut bypassed_args = self.compile_select_clause(r#type, table_aliases, bypass_node)?;
select_args.append(&mut bypassed_args);
} else {
let mut family_schemas = Vec::new();
let mut sorted_fam_variations: Vec<String> =
fam_type_def.variations.iter().cloned().collect();
sorted_fam_variations.sort();
for variation in &sorted_fam_variations {
let mut ref_schema = crate::database::schema::Schema::default();
ref_schema.obj.r#ref = Some(variation.clone());
ref_schema.compile(self.db, &mut std::collections::HashSet::new());
family_schemas.push(std::sync::Arc::new(ref_schema));
}
let base_alias = table_aliases
.get(&r#type.name)
.cloned()
.unwrap_or_else(|| node.parent_alias.to_string());
select_args.push(format!("'id', {}.id", base_alias));
let mut case_node = node.clone();
case_node.parent_alias = base_alias.clone();
let arc_aliases = std::sync::Arc::new(table_aliases.clone());
case_node.parent_type_aliases = Some(arc_aliases);
let (case_sql, _) = self.compile_one_of(&family_schemas, case_node)?;
select_args.push(format!("'type', {}", case_sql));
return Err(format!("Could not find schema for variation {}", variation));
}
} else {
let mut family_schemas = Vec::new();
for variation in &final_targets {
if let Some(target_schema) = self.db.schemas.get(variation) {
family_schemas.push(std::sync::Arc::new(target_schema.clone()));
} else {
return Err(format!(
"Could not find schema metadata for variation {}",
variation
));
}
}
let base_alias = table_aliases
.get(&r#type.name)
.cloned()
.unwrap_or_else(|| node.parent_alias.to_string());
select_args.push(format!("'id', {}.id", base_alias));
let mut case_node = node.clone();
case_node.parent_alias = base_alias.clone();
let arc_aliases = std::sync::Arc::new(table_aliases.clone());
case_node.parent_type_aliases = Some(arc_aliases);
let (case_sql, _) = self.compile_one_of(&family_schemas, case_node)?;
select_args.push(format!("'type', {}", case_sql));
}
} else if let Some(one_of) = &node.schema.obj.one_of {
let base_alias = table_aliases
@ -334,10 +342,7 @@ impl<'a> Compiler<'a> {
};
for option_schema in schemas {
if let Some(ref_id) = &option_schema.obj.r#ref {
// Find the physical type this ref maps to
let base_type_name = ref_id.split('.').next_back().unwrap_or("").to_string();
if let Some(base_type_name) = option_schema.obj.identifier() {
// Generate the nested SQL for this specific target type
let mut child_node = node.clone();
child_node.schema = std::sync::Arc::clone(option_schema);

View File

@ -51,7 +51,7 @@ impl Queryer {
};
// 3. Execute via Database Executor
self.execute_sql(schema_id, &sql, &args)
self.execute_sql(schema_id, &sql, args)
}
fn extract_filters(
@ -151,7 +151,7 @@ impl Queryer {
&self,
schema_id: &str,
sql: &str,
args: &[serde_json::Value],
args: Vec<serde_json::Value>,
) -> crate::drop::Drop {
match self.db.query(sql, Some(args)) {
Ok(serde_json::Value::Array(table)) => {

View File

@ -1463,6 +1463,18 @@ fn test_queryer_0_8() {
crate::tests::runner::run_test_case(&path, 0, 8).unwrap();
}
#[test]
fn test_queryer_0_9() {
let path = format!("{}/fixtures/queryer.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 0, 9).unwrap();
}
#[test]
fn test_queryer_0_10() {
let path = format!("{}/fixtures/queryer.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 0, 10).unwrap();
}
#[test]
fn test_not_0_0() {
let path = format!("{}/fixtures/not.json", env!("CARGO_MANIFEST_DIR"));

View File

@ -14,7 +14,7 @@ where
}
// Type alias for easier reading
type CompiledSuite = Arc<Vec<(Suite, Arc<crate::database::Database>)>>;
type CompiledSuite = Arc<Vec<(Suite, Arc<Result<Arc<crate::database::Database>, crate::drop::Drop>>)>>;
// Global cache mapping filename -> Vector of (Parsed JSON suite, Compiled Database)
static CACHE: OnceLock<RwLock<HashMap<String, CompiledSuite>>> = OnceLock::new();
@ -43,19 +43,11 @@ fn get_cached_file(path: &str) -> CompiledSuite {
let mut compiled_suites = Vec::new();
for suite in suites {
let db_result = crate::database::Database::new(&suite.database);
if let Err(drop) = db_result {
let error_messages: Vec<String> = drop
.errors
.into_iter()
.map(|e| format!("Error {} at path {}: {}", e.code, e.details.path, e.message))
.collect();
panic!(
"System Setup Compilation failed for {}:\n{}",
path,
error_messages.join("\n")
);
}
compiled_suites.push((suite, Arc::new(db_result.unwrap())));
let compiled_db = match db_result {
Ok(db) => Ok(Arc::new(db)),
Err(drop) => Err(drop),
};
compiled_suites.push((suite, Arc::new(compiled_db)));
}
let new_data = Arc::new(compiled_suites);
@ -85,11 +77,36 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
let test = &group.tests[case_idx];
let mut failures = Vec::<String>::new();
// For validate/merge/query, if setup failed we must structurally fail this test
let db_unwrapped = if test.action.as_str() != "compile" && test.action.as_str() != "database_compile" {
match &**db {
Ok(valid_db) => Some(valid_db.clone()),
Err(drop) => {
let error_messages: Vec<String> = drop
.errors
.iter()
.map(|e| format!("Error {} at path {}: {}", e.code, e.details.path, e.message))
.collect();
failures.push(format!(
"[{}] Cannot run '{}' test '{}': System Setup Compilation structurally failed:\n{}",
group.description, test.action, test.description, error_messages.join("\n")
));
None
}
}
} else {
None
};
if !failures.is_empty() {
return Err(failures.join("\n"));
}
// 4. Run Tests
match test.action.as_str() {
"compile" => {
let result = test.run_compile(db.clone());
"compile" | "database_compile" => {
let result = test.run_compile(db);
if let Err(e) = result {
println!("TEST COMPILE ERROR FOR '{}': {}", test.description, e);
failures.push(format!(
@ -99,7 +116,7 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
}
}
"validate" => {
let result = test.run_validate(db.clone());
let result = test.run_validate(db_unwrapped.unwrap());
if let Err(e) = result {
println!("TEST VALIDATE ERROR FOR '{}': {}", test.description, e);
failures.push(format!(
@ -109,7 +126,7 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
}
}
"merge" => {
let result = test.run_merge(db.clone());
let result = test.run_merge(db_unwrapped.unwrap());
if let Err(e) = result {
println!("TEST MERGE ERROR FOR '{}': {}", test.description, e);
failures.push(format!(
@ -119,7 +136,7 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
}
}
"query" => {
let result = test.run_query(db.clone());
let result = test.run_query(db_unwrapped.unwrap());
if let Err(e) = result {
println!("TEST QUERY ERROR FOR '{}': {}", test.description, e);
failures.push(format!(

View File

@ -35,21 +35,21 @@ fn default_action() -> String {
}
impl Case {
pub fn run_compile(&self, _db: Arc<Database>) -> Result<(), String> {
let expected_success = self.expect.as_ref().map(|e| e.success).unwrap_or(false);
pub fn run_compile(
&self,
db_res: &Result<Arc<Database>, crate::drop::Drop>,
) -> Result<(), String> {
let expect = match &self.expect {
Some(e) => e,
None => return Ok(()),
};
// We assume db has already been setup and compiled successfully by runner.rs's `jspg_setup`
// We just need to check if there are compilation errors vs expected success
let got_success = true; // Setup ensures success unless setup fails, which runner handles
let result = match db_res {
Ok(_) => crate::drop::Drop::success(),
Err(d) => d.clone(),
};
if expected_success != got_success {
return Err(format!(
"Expected success: {}, Got: {}",
expected_success, got_success
));
}
Ok(())
expect.assert_drop(&result)
}
pub fn run_validate(&self, db: Arc<Database>) -> Result<(), String> {
@ -57,8 +57,6 @@ impl Case {
let validator = Validator::new(db);
let expected_success = self.expect.as_ref().map(|e| e.success).unwrap_or(false);
let schema_id = &self.schema_id;
if !validator.db.schemas.contains_key(schema_id) {
return Err(format!(
@ -70,19 +68,8 @@ impl Case {
let test_data = self.data.clone().unwrap_or(Value::Null);
let result = validator.validate(schema_id, &test_data);
let got_valid = result.errors.is_empty();
if got_valid != expected_success {
let error_msg = if result.errors.is_empty() {
"None".to_string()
} else {
format!("{:?}", result.errors)
};
return Err(format!(
"Expected: {}, Got: {}. Errors: {}",
expected_success, got_valid, error_msg
));
if let Some(expect) = &self.expect {
expect.assert_drop(&result)?;
}
Ok(())
@ -101,24 +88,16 @@ impl Case {
let test_data = self.data.clone().unwrap_or(Value::Null);
let result = merger.merge(&self.schema_id, test_data);
let expected_success = self.expect.as_ref().map(|e| e.success).unwrap_or(false);
let got_success = result.errors.is_empty();
let error_msg = if result.errors.is_empty() {
"None".to_string()
} else {
format!("{:?}", result.errors)
};
let return_val = if expected_success != got_success {
Err(format!(
"Merge Expected: {}, Got: {}. Errors: {}",
expected_success, got_success, error_msg
))
} else if let Some(expect) = &self.expect {
let queries = db.executor.get_queries();
expect.assert_pattern(&queries)?;
expect.assert_sql(&queries)
let return_val = if let Some(expect) = &self.expect {
if let Err(e) = expect.assert_drop(&result) {
Err(format!("Merge {}", e))
} else if result.errors.is_empty() {
// Only assert SQL if merge succeeded
let queries = db.executor.get_queries();
expect.assert_pattern(&queries).and_then(|_| expect.assert_sql(&queries))
} else {
Ok(())
}
} else {
Ok(())
};
@ -139,24 +118,15 @@ impl Case {
let result = queryer.query(&self.schema_id, self.filters.as_ref());
let expected_success = self.expect.as_ref().map(|e| e.success).unwrap_or(false);
let got_success = result.errors.is_empty();
let error_msg = if result.errors.is_empty() {
"None".to_string()
} else {
format!("{:?}", result.errors)
};
let return_val = if expected_success != got_success {
Err(format!(
"Query Expected: {}, Got: {}. Errors: {}",
expected_success, got_success, error_msg
))
} else if let Some(expect) = &self.expect {
let queries = db.executor.get_queries();
expect.assert_pattern(&queries)?;
expect.assert_sql(&queries)
let return_val = if let Some(expect) = &self.expect {
if let Err(e) = expect.assert_drop(&result) {
Err(format!("Query {}", e))
} else if result.errors.is_empty() {
let queries = db.executor.get_queries();
expect.assert_pattern(&queries).and_then(|_| expect.assert_sql(&queries))
} else {
Ok(())
}
} else {
Ok(())
};

View File

@ -0,0 +1,87 @@
use super::Expect;
impl Expect {
pub fn assert_drop(&self, drop: &crate::drop::Drop) -> Result<(), String> {
let got_success = drop.errors.is_empty();
if self.success != got_success {
let mut err_msg = format!("Expected success: {}, Got: {}.", self.success, got_success);
if !drop.errors.is_empty() {
err_msg.push_str(&format!(" Actual Errors: {:?}", drop.errors));
}
return Err(err_msg);
}
if !self.success {
if let Some(expected_errors) = &self.errors {
let actual_values: Vec<serde_json::Value> = drop.errors
.iter()
.map(|e| serde_json::to_value(e).unwrap())
.collect();
for (i, expected_val) in expected_errors.iter().enumerate() {
let mut matched = false;
for actual_val in &actual_values {
if subset_match(expected_val, actual_val) {
matched = true;
break;
}
}
if !matched {
return Err(format!(
"Expected error {} was not found in actual errors.\nExpected subset: {}\nActual full errors: {:?}",
i,
serde_json::to_string_pretty(expected_val).unwrap(),
drop.errors,
));
}
}
}
}
Ok(())
}
}
// Helper to check if `expected` is a structural subset of `actual`
fn subset_match(expected: &serde_json::Value, actual: &serde_json::Value) -> bool {
match (expected, actual) {
(serde_json::Value::Object(exp_map), serde_json::Value::Object(act_map)) => {
for (k, v) in exp_map {
let mut act_v = act_map.get(k);
// Transparent fallback: if testing legacy flat "path", gracefully check inside "details"
if act_v.is_none() && k == "path" {
if let Some(serde_json::Value::Object(details)) = act_map.get("details") {
act_v = details.get("path");
}
}
if let Some(target) = act_v {
if !subset_match(v, target) {
return false;
}
} else {
return false;
}
}
true
}
(serde_json::Value::Array(exp_arr), serde_json::Value::Array(act_arr)) => {
// Basic check: array sizes and elements must match exactly in order
if exp_arr.len() != act_arr.len() {
return false;
}
for (e, a) in exp_arr.iter().zip(act_arr.iter()) {
if !subset_match(e, a) {
return false;
}
}
true
}
// For primitives, exact match
(e, a) => e == a,
}
}

View File

@ -1,5 +1,6 @@
pub mod pattern;
pub mod sql;
pub mod drop;
use serde::Deserialize;

View File

@ -31,10 +31,7 @@ impl<'a> ValidationContext<'a> {
}
if let Some(family_target) = &self.schema.family {
// The descendants map is keyed by the schema's own $id, not the target string.
if let Some(schema_id) = &self.schema.id
&& let Some(descendants) = self.db.descendants.get(schema_id)
{
if let Some(descendants) = self.db.descendants.get(family_target) {
// Validate against all descendants simulating strict oneOf logic
let mut passed_candidates: Vec<(String, usize, ValidationResult)> = Vec::new();