Compare commits

..

11 Commits

21 changed files with 3208 additions and 2062 deletions

104
add_test.py Normal file
View File

@ -0,0 +1,104 @@
import json
def load_json(path):
with open(path, 'r') as f:
return json.load(f)
def save_json(path, data):
with open(path, 'w') as f:
json.dump(data, f, indent=2)
def add_invoice(data):
# Add 'invoice' type
types = data[0]['database']['types']
# Check if invoice already exists
if any(t.get('name') == 'invoice' for t in types):
return
types.append({
"name": "invoice",
"hierarchy": ["invoice", "entity"],
"primary_key": ["id"],
"field_types": {
"id": "uuid",
"number": "text",
"metadata": "jsonb"
},
"schemas": {
"invoice": {
"type": "entity",
"properties": {
"id": { "type": "string" },
"number": { "type": "string" },
"metadata": {
"type": "object",
"properties": {
"internal_note": { "type": "string" },
"customer_snapshot": { "type": "entity" },
"related_rules": {
"type": "array",
"items": { "type": "governance_rule" }
}
}
}
}
}
}
})
def process_merger():
data = load_json('fixtures/merger.json')
add_invoice(data)
# Add test
data[0]['tests'].append({
"name": "Insert invoice with deep jsonb metadata",
"schema": "invoice",
"payload": {
"number": "INV-1001",
"metadata": {
"internal_note": "Confidential",
"customer_snapshot": {
"id": "00000000-0000-0000-0000-000000000000",
"type": "person",
"first_name": "John"
},
"related_rules": [
{
"id": "11111111-1111-1111-1111-111111111111"
}
]
}
},
"expect": {
"sql": [
[
"INSERT INTO agreego.invoice (metadata, number, id) VALUES ($1, $2, gen_random_uuid()) ON CONFLICT (id) DO UPDATE SET metadata = EXCLUDED.metadata, number = EXCLUDED.number RETURNING id, type",
{"metadata": {"customer_snapshot": {"first_name": "John", "id": "00000000-0000-0000-0000-000000000000", "type": "person"}, "internal_note": "Confidential", "related_rules": [{"id": "11111111-1111-1111-1111-111111111111"}]}, "number": "INV-1001"}
]
]
}
})
save_json('fixtures/merger.json', data)
def process_queryer():
data = load_json('fixtures/queryer.json')
add_invoice(data)
data[0]['tests'].append({
"name": "Query invoice with complex JSONB metadata field extraction",
"schema": "invoice",
"query": {
"extract": ["id", "number", "metadata"],
"conditions": []
},
"expect": {
"sql": "SELECT jsonb_build_object('id', t1.id, 'metadata', t1.metadata, 'number', t1.number) FROM agreego.invoice t1 WHERE (t1.id IS NOT NULL)",
"params": {}
}
})
save_json('fixtures/queryer.json', data)
process_merger()
process_queryer()

View File

@ -1,63 +1,87 @@
import json import json
path = "fixtures/database.json" def load_json(path):
with open(path, 'r') as f:
return json.load(f)
with open(path, "r") as f: def save_json(path, data):
data = json.load(f) with open(path, 'w') as f:
json.dump(data, f, indent=4)
test_case = data[-1] def fix_merger():
data = load_json('fixtures/merger.json')
last_test = data[0]['tests'][-1]
# Check if the last test is our bad one
if "name" in last_test and last_test["name"] == "Insert invoice with deep jsonb metadata":
new_test = {
"description": last_test["name"],
"action": "merge",
"schema_id": last_test["schema"],
"data": last_test["payload"],
"expect": {
"success": True,
"sql": [
[
"INSERT INTO agreego.invoice (",
" \"metadata\",",
" \"number\",",
" entity_id,",
" id,",
" type",
")",
"VALUES (",
" '{",
" \"customer_snapshot\":{",
" \"first_name\":\"John\",",
" \"id\":\"00000000-0000-0000-0000-000000000000\",",
" \"type\":\"person\"",
" },",
" \"internal_note\":\"Confidential\",",
" \"related_rules\":[",
" {",
" \"id\":\"11111111-1111-1111-1111-111111111111\"",
" }",
" ]",
" }',",
" 'INV-1001',",
" NULL,",
" '{{uuid}}',",
" 'invoice'",
")"
]
]
}
}
data[0]['tests'][-1] = new_test
save_json('fixtures/merger.json', data)
test_case["database"]["relations"] = [ def fix_queryer():
{ data = load_json('fixtures/queryer.json')
"id": "r1", last_test = data[0]['tests'][-1]
"type": "relation",
"constraint": "fk_person_email", if "name" in last_test and last_test["name"] == "Query invoice with complex JSONB metadata field extraction":
"source_type": "person", "source_columns": ["email_id"], new_test = {
"destination_type": "email_address", "destination_columns": ["id"], "description": last_test["name"],
"prefix": "email" "action": "query",
}, "schema_id": last_test["schema"],
{ "expect": {
"id": "r2", "success": True,
"type": "relation", "sql": [
"constraint": "fk_person_ad_hoc_bubble", [
"source_type": "person", "source_columns": ["ad_hoc_bubble_id"], "(SELECT jsonb_strip_nulls(jsonb_build_object(",
"destination_type": "some_bubble", "destination_columns": ["id"], " 'id', invoice_1.id,",
"prefix": "ad_hoc_bubble" " 'metadata', invoice_1.metadata,",
}, " 'number', invoice_1.number,",
{ " 'type', invoice_1.type",
"id": "r3", "))",
"type": "relation", "FROM agreego.invoice invoice_1)"
"constraint": "fk_person_generic_bubble", ]
"source_type": "person", "source_columns": ["generic_bubble_id"], ]
"destination_type": "some_bubble", "destination_columns": ["id"], }
"prefix": "generic_bubble" }
}, data[0]['tests'][-1] = new_test
{ save_json('fixtures/queryer.json', data)
"id": "r4",
"type": "relation",
"constraint": "fk_person_extended_relations",
"source_type": "contact", "source_columns": ["source_id"],
"destination_type": "person", "destination_columns": ["id"],
"prefix": "extended_relations"
},
{
"id": "r5",
"type": "relation",
"constraint": "fk_person_standard_relations",
"source_type": "contact", "source_columns": ["source_id_2"],
"destination_type": "person", "destination_columns": ["id"],
"prefix": "standard_relations"
},
{
"id": "r6",
"type": "relation",
"constraint": "fk_contact_target",
"source_type": "contact", "source_columns": ["target_id"],
"destination_type": "email_address", "destination_columns": ["id"],
"prefix": "target"
}
]
with open(path, "w") as f:
json.dump(data, f, indent=2)
fix_merger()
fix_queryer()

View File

@ -664,5 +664,268 @@
} }
} }
] ]
},
{
"description": "JSONB boundaries",
"database": {
"relations": [
{
"id": "33333333-3333-3333-3333-333333333333",
"type": "relation",
"constraint": "fk_invoice_line_invoice",
"source_type": "invoice_line",
"source_columns": [
"invoice_id"
],
"destination_type": "invoice",
"destination_columns": [
"id"
]
}
],
"types": [
{
"name": "entity",
"hierarchy": [
"entity"
],
"grouped_fields": {
"entity": [
"id",
"type",
"archived",
"created_at"
]
},
"field_types": {
"id": "uuid",
"archived": "boolean",
"created_at": "timestamptz",
"type": "text"
},
"schemas": {
"entity": {
"type": "object",
"properties": {
"id": {
"type": "string",
"format": "uuid"
},
"type": {
"type": "string"
},
"archived": {
"type": "boolean"
},
"created_at": {
"type": "string",
"format": "date-time"
},
"created": {
"type": "boolean"
}
}
}
},
"fields": [
"id",
"type",
"archived",
"created_at"
],
"variations": [
"entity",
"invoice",
"invoice_line"
]
},
{
"name": "invoice",
"schemas": {
"invoice": {
"type": "entity",
"properties": {
"total": {
"type": "number"
},
"lines": {
"type": "array",
"items": {
"type": "invoice_line"
}
},
"metadata_line": {
"type": "invoice_line"
},
"metadata_lines": {
"type": "array",
"items": {
"type": "invoice_line"
}
},
"metadata_nested_line": {
"type": "object",
"properties": {
"line": {
"type": "invoice_line"
}
}
},
"metadata_nested_lines": {
"type": "object",
"properties": {
"lines": {
"type": "array",
"items": {
"type": "invoice_line"
}
}
}
}
}
}
},
"hierarchy": [
"invoice",
"entity"
],
"fields": [
"id",
"type",
"total",
"metadata_line",
"metadata_lines",
"metadata_nested_line",
"metadata_nested_lines",
"created_at",
"created_by",
"modified_at",
"modified_by",
"archived"
],
"grouped_fields": {
"invoice": [
"id",
"type",
"total",
"metadata_line",
"metadata_lines",
"metadata_nested_line",
"metadata_nested_lines"
],
"entity": [
"id",
"type",
"created_at",
"created_by",
"modified_at",
"modified_by",
"archived"
]
},
"lookup_fields": [
"id"
],
"historical": true,
"relationship": false,
"field_types": {
"id": "uuid",
"type": "text",
"archived": "boolean",
"total": "numeric",
"metadata_line": "jsonb",
"metadata_lines": "jsonb",
"metadata_nested_line": "jsonb",
"metadata_nested_lines": "jsonb",
"created_at": "timestamptz",
"created_by": "uuid",
"modified_at": "timestamptz",
"modified_by": "uuid"
},
"variations": [
"invoice"
]
},
{
"name": "invoice_line",
"schemas": {
"invoice_line": {
"type": "entity",
"properties": {
"invoice_id": {
"type": "string"
},
"price": {
"type": "number"
}
}
}
},
"hierarchy": [
"invoice_line",
"entity"
],
"fields": [
"id",
"type",
"invoice_id",
"price",
"created_at",
"created_by",
"modified_at",
"modified_by",
"archived"
],
"grouped_fields": {
"invoice_line": [
"id",
"type",
"invoice_id",
"price"
],
"entity": [
"id",
"type",
"created_at",
"created_by",
"modified_at",
"modified_by",
"archived"
]
},
"lookup_fields": [],
"historical": true,
"relationship": false,
"field_types": {
"id": "uuid",
"type": "text",
"archived": "boolean",
"invoice_id": "uuid",
"price": "numeric",
"created_at": "timestamptz",
"created_by": "uuid",
"modified_at": "timestamptz",
"modified_by": "uuid"
},
"variations": [
"invoice_line"
]
}
]
},
"tests": [
{
"description": "Assert no JSONB paths promoted",
"action": "compile",
"expect": {
"success": true,
"schemas": [
"entity",
"invoice",
"invoice_line"
]
}
}
]
} }
] ]

File diff suppressed because it is too large Load Diff

View File

@ -636,5 +636,110 @@
} }
} }
] ]
},
{
"description": "STI Projections (Lacking Kind Discriminator Definitions)",
"database": {
"types": [
{
"name": "widget",
"variations": [
"widget"
],
"schemas": {
"widget": {
"type": "object",
"properties": {
"type": {
"type": "string"
}
}
},
"stock.widget": {
"type": "widget",
"properties": {
"kind": {
"type": "string"
},
"amount": {
"type": "integer"
}
}
},
"projected.widget": {
"type": "widget",
"properties": {
"alias": {
"type": "string"
}
}
}
}
}
],
"schemas": {
"stock_widget_validation": {
"type": "stock.widget"
},
"projected_widget_validation": {
"type": "projected.widget"
}
}
},
"tests": [
{
"description": "stock.widget securely expects kind when configured",
"schema_id": "stock_widget_validation",
"data": {
"type": "widget",
"amount": 5
},
"action": "validate",
"expect": {
"success": false,
"errors": [
{
"code": "MISSING_KIND",
"details": {
"path": ""
}
}
]
}
},
{
"description": "projected.widget seamlessly bypasses kind expectation when excluded from schema",
"schema_id": "projected_widget_validation",
"data": {
"type": "widget",
"alias": "Test Projection"
},
"action": "validate",
"expect": {
"success": true
}
},
{
"description": "projected.widget securely fails if user erroneously provides extra kind property",
"schema_id": "projected_widget_validation",
"data": {
"type": "widget",
"alias": "Test Projection",
"kind": "projected"
},
"action": "validate",
"expect": {
"success": false,
"errors": [
{
"code": "STRICT_PROPERTY_VIOLATION",
"details": {
"path": "kind"
}
}
]
}
}
]
} }
] ]

File diff suppressed because it is too large Load Diff

View File

@ -2,9 +2,9 @@ pub mod edge;
pub mod r#enum; pub mod r#enum;
pub mod executors; pub mod executors;
pub mod formats; pub mod formats;
pub mod object;
pub mod page; pub mod page;
pub mod punc; pub mod punc;
pub mod object;
pub mod relation; pub mod relation;
pub mod schema; pub mod schema;
pub mod r#type; pub mod r#type;
@ -60,10 +60,17 @@ impl Database {
db.enums.insert(def.name.clone(), def); db.enums.insert(def.name.clone(), def);
} }
Err(e) => { Err(e) => {
let name = item
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("unknown");
errors.push(crate::drop::Error { errors.push(crate::drop::Error {
code: "DATABASE_ENUM_PARSE_FAILED".to_string(), code: "DATABASE_ENUM_PARSE_FAILED".to_string(),
message: format!("Failed to parse database enum: {}", e), message: format!("Failed to parse database enum '{}': {}", name, e),
details: crate::drop::ErrorDetails::default(), details: crate::drop::ErrorDetails {
context: Some(serde_json::json!(name)),
..Default::default()
},
}); });
} }
} }
@ -77,10 +84,17 @@ impl Database {
db.types.insert(def.name.clone(), def); db.types.insert(def.name.clone(), def);
} }
Err(e) => { Err(e) => {
let name = item
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("unknown");
errors.push(crate::drop::Error { errors.push(crate::drop::Error {
code: "DATABASE_TYPE_PARSE_FAILED".to_string(), code: "DATABASE_TYPE_PARSE_FAILED".to_string(),
message: format!("Failed to parse database type: {}", e), message: format!("Failed to parse database type '{}': {}", name, e),
details: crate::drop::ErrorDetails::default(), details: crate::drop::ErrorDetails {
context: Some(serde_json::json!(name)),
..Default::default()
},
}); });
} }
} }
@ -98,10 +112,17 @@ impl Database {
} }
} }
Err(e) => { Err(e) => {
let constraint = item
.get("constraint")
.and_then(|v| v.as_str())
.unwrap_or("unknown");
errors.push(crate::drop::Error { errors.push(crate::drop::Error {
code: "DATABASE_RELATION_PARSE_FAILED".to_string(), code: "DATABASE_RELATION_PARSE_FAILED".to_string(),
message: format!("Failed to parse database relation: {}", e), message: format!("Failed to parse database relation '{}': {}", constraint, e),
details: crate::drop::ErrorDetails::default(), details: crate::drop::ErrorDetails {
context: Some(serde_json::json!(constraint)),
..Default::default()
},
}); });
} }
} }
@ -115,10 +136,17 @@ impl Database {
db.puncs.insert(def.name.clone(), def); db.puncs.insert(def.name.clone(), def);
} }
Err(e) => { Err(e) => {
let name = item
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("unknown");
errors.push(crate::drop::Error { errors.push(crate::drop::Error {
code: "DATABASE_PUNC_PARSE_FAILED".to_string(), code: "DATABASE_PUNC_PARSE_FAILED".to_string(),
message: format!("Failed to parse database punc: {}", e), message: format!("Failed to parse database punc '{}': {}", name, e),
details: crate::drop::ErrorDetails::default(), details: crate::drop::ErrorDetails {
context: Some(serde_json::json!(name)),
..Default::default()
},
}); });
} }
} }
@ -135,7 +163,10 @@ impl Database {
errors.push(crate::drop::Error { errors.push(crate::drop::Error {
code: "DATABASE_SCHEMA_PARSE_FAILED".to_string(), code: "DATABASE_SCHEMA_PARSE_FAILED".to_string(),
message: format!("Failed to parse database schema key '{}': {}", key, e), message: format!("Failed to parse database schema key '{}': {}", key, e),
details: crate::drop::ErrorDetails::default(), details: crate::drop::ErrorDetails {
context: Some(serde_json::json!(key)),
..Default::default()
},
}); });
} }
} }
@ -180,7 +211,13 @@ impl Database {
pub fn compile(&mut self, errors: &mut Vec<crate::drop::Error>) { pub fn compile(&mut self, errors: &mut Vec<crate::drop::Error>) {
let mut harvested = Vec::new(); let mut harvested = Vec::new();
for (id, schema_arc) in &self.schemas { for (id, schema_arc) in &self.schemas {
crate::database::schema::Schema::collect_schemas(schema_arc, id, id.clone(), &mut harvested, errors); crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut harvested,
errors,
);
} }
for (id, schema_arc) in harvested { for (id, schema_arc) in harvested {
self.schemas.insert(id, schema_arc); self.schemas.insert(id, schema_arc);
@ -189,11 +226,12 @@ impl Database {
self.collect_schemas(errors); self.collect_schemas(errors);
// Mathematically evaluate all property inheritances, formats, schemas, and foreign key edges topographically over OnceLocks // Mathematically evaluate all property inheritances, formats, schemas, and foreign key edges topographically over OnceLocks
let mut visited = std::collections::HashSet::new();
for (id, schema_arc) in &self.schemas { for (id, schema_arc) in &self.schemas {
// First compile pass initializes exact structural root_id mapping to resolve DB constraints // First compile pass initializes exact structural root_id mapping to resolve DB constraints
let root_id = id.split('/').next().unwrap_or(id); let root_id = id.split('/').next().unwrap_or(id);
schema_arc.as_ref().compile(self, root_id, id.clone(), &mut visited, errors); schema_arc
.as_ref()
.compile(self, root_id, id.clone(), errors);
} }
} }
@ -205,19 +243,37 @@ impl Database {
for type_def in self.types.values() { for type_def in self.types.values() {
for (id, schema_arc) in &type_def.schemas { for (id, schema_arc) in &type_def.schemas {
to_insert.push((id.clone(), Arc::clone(schema_arc))); to_insert.push((id.clone(), Arc::clone(schema_arc)));
crate::database::schema::Schema::collect_schemas(schema_arc, id, id.clone(), &mut to_insert, errors); crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut to_insert,
errors,
);
} }
} }
for punc_def in self.puncs.values() { for punc_def in self.puncs.values() {
for (id, schema_arc) in &punc_def.schemas { for (id, schema_arc) in &punc_def.schemas {
to_insert.push((id.clone(), Arc::clone(schema_arc))); to_insert.push((id.clone(), Arc::clone(schema_arc)));
crate::database::schema::Schema::collect_schemas(schema_arc, id, id.clone(), &mut to_insert, errors); crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut to_insert,
errors,
);
} }
} }
for enum_def in self.enums.values() { for enum_def in self.enums.values() {
for (id, schema_arc) in &enum_def.schemas { for (id, schema_arc) in &enum_def.schemas {
to_insert.push((id.clone(), Arc::clone(schema_arc))); to_insert.push((id.clone(), Arc::clone(schema_arc)));
crate::database::schema::Schema::collect_schemas(schema_arc, id, id.clone(), &mut to_insert, errors); crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut to_insert,
errors,
);
} }
} }
@ -257,10 +313,10 @@ impl Database {
all_rels.sort_by(|a, b| a.constraint.cmp(&b.constraint)); all_rels.sort_by(|a, b| a.constraint.cmp(&b.constraint));
for rel in all_rels { for rel in all_rels {
let mut is_forward = let mut is_forward = p_def.hierarchy.contains(&rel.source_type)
p_def.hierarchy.contains(&rel.source_type) && c_def.hierarchy.contains(&rel.destination_type); && c_def.hierarchy.contains(&rel.destination_type);
let is_reverse = let is_reverse = p_def.hierarchy.contains(&rel.destination_type)
p_def.hierarchy.contains(&rel.destination_type) && c_def.hierarchy.contains(&rel.source_type); && c_def.hierarchy.contains(&rel.source_type);
// Structural Cardinality Filtration: // Structural Cardinality Filtration:
// If the schema requires a collection (Array), it is mathematically impossible for a pure // If the schema requires a collection (Array), it is mathematically impossible for a pure
@ -282,7 +338,7 @@ impl Database {
// Abort relation discovery early if no hierarchical inheritance match was found // Abort relation discovery early if no hierarchical inheritance match was found
if matching_rels.is_empty() { if matching_rels.is_empty() {
let mut details = crate::drop::ErrorDetails { let mut details = crate::drop::ErrorDetails {
path: path.to_string(), path: Some(path.to_string()),
..Default::default() ..Default::default()
}; };
if let Some(sid) = schema_id { if let Some(sid) = schema_id {
@ -381,7 +437,7 @@ impl Database {
// and forces a clean structural error for the architect. // and forces a clean structural error for the architect.
if !resolved { if !resolved {
let mut details = crate::drop::ErrorDetails { let mut details = crate::drop::ErrorDetails {
path: path.to_string(), path: Some(path.to_string()),
context: serde_json::to_value(&matching_rels).ok(), context: serde_json::to_value(&matching_rels).ok(),
cause: Some("Multiple conflicting constraints found matching prefixes".to_string()), cause: Some("Multiple conflicting constraints found matching prefixes".to_string()),
..Default::default() ..Default::default()

View File

@ -28,21 +28,12 @@ impl Schema {
db: &crate::database::Database, db: &crate::database::Database,
root_id: &str, root_id: &str,
path: String, path: String,
visited: &mut std::collections::HashSet<String>,
errors: &mut Vec<crate::drop::Error>, errors: &mut Vec<crate::drop::Error>,
) { ) {
if self.obj.compiled_properties.get().is_some() { if self.obj.compiled_properties.get().is_some() {
return; return;
} }
if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &self.obj.type_ {
if !crate::database::object::is_primitive_type(t) {
if !visited.insert(t.clone()) {
return; // Break cyclical resolution
}
}
}
if let Some(format_str) = &self.obj.format { if let Some(format_str) = &self.obj.format {
if let Some(fmt) = crate::database::formats::FORMATS.get(format_str.as_str()) { if let Some(fmt) = crate::database::formats::FORMATS.get(format_str.as_str()) {
let _ = self let _ = self
@ -79,7 +70,7 @@ impl Schema {
if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &self.obj.type_ { if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &self.obj.type_ {
if !crate::database::object::is_primitive_type(t) { if !crate::database::object::is_primitive_type(t) {
if let Some(parent) = db.schemas.get(t) { if let Some(parent) = db.schemas.get(t) {
parent.as_ref().compile(db, t, t.clone(), visited, errors); parent.as_ref().compile(db, t, t.clone(), errors);
if let Some(p_props) = parent.obj.compiled_properties.get() { if let Some(p_props) = parent.obj.compiled_properties.get() {
props.extend(p_props.clone()); props.extend(p_props.clone());
} }
@ -103,7 +94,7 @@ impl Schema {
types types
), ),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: path.clone(), path: Some(path.clone()),
schema: Some(root_id.to_string()), schema: Some(root_id.to_string()),
..Default::default() ..Default::default()
} }
@ -113,7 +104,7 @@ impl Schema {
for t in types { for t in types {
if !crate::database::object::is_primitive_type(t) { if !crate::database::object::is_primitive_type(t) {
if let Some(parent) = db.schemas.get(t) { if let Some(parent) = db.schemas.get(t) {
parent.as_ref().compile(db, t, t.clone(), visited, errors); parent.as_ref().compile(db, t, t.clone(), errors);
} }
} }
} }
@ -133,21 +124,21 @@ impl Schema {
let _ = self.obj.compiled_property_names.set(names); let _ = self.obj.compiled_property_names.set(names);
// 4. Compute Edges natively // 4. Compute Edges natively
let schema_edges = self.compile_edges(db, root_id, &path, visited, &props, errors); let schema_edges = self.compile_edges(db, root_id, &path, &props, errors);
let _ = self.obj.compiled_edges.set(schema_edges); let _ = self.obj.compiled_edges.set(schema_edges);
// 5. Build our inline children properties recursively NOW! (Depth-first search) // 5. Build our inline children properties recursively NOW! (Depth-first search)
if let Some(local_props) = &self.obj.properties { if let Some(local_props) = &self.obj.properties {
for (k, child) in local_props { for (k, child) in local_props {
child.compile(db, root_id, format!("{}/{}", path, k), visited, errors); child.compile(db, root_id, format!("{}/{}", path, k), errors);
} }
} }
if let Some(items) = &self.obj.items { if let Some(items) = &self.obj.items {
items.compile(db, root_id, format!("{}/items", path), visited, errors); items.compile(db, root_id, format!("{}/items", path), errors);
} }
if let Some(pattern_props) = &self.obj.pattern_properties { if let Some(pattern_props) = &self.obj.pattern_properties {
for (k, child) in pattern_props { for (k, child) in pattern_props {
child.compile(db, root_id, format!("{}/{}", path, k), visited, errors); child.compile(db, root_id, format!("{}/{}", path, k), errors);
} }
} }
if let Some(additional_props) = &self.obj.additional_properties { if let Some(additional_props) = &self.obj.additional_properties {
@ -155,47 +146,65 @@ impl Schema {
db, db,
root_id, root_id,
format!("{}/additionalProperties", path), format!("{}/additionalProperties", path),
visited,
errors, errors,
); );
} }
if let Some(one_of) = &self.obj.one_of { if let Some(one_of) = &self.obj.one_of {
for (i, child) in one_of.iter().enumerate() { for (i, child) in one_of.iter().enumerate() {
child.compile(db, root_id, format!("{}/oneOf/{}", path, i), visited, errors); child.compile(
db,
root_id,
format!("{}/oneOf/{}", path, i),
errors,
);
} }
} }
if let Some(arr) = &self.obj.prefix_items { if let Some(arr) = &self.obj.prefix_items {
for (i, child) in arr.iter().enumerate() { for (i, child) in arr.iter().enumerate() {
child.compile(db, root_id, format!("{}/prefixItems/{}", path, i), visited, errors); child.compile(
db,
root_id,
format!("{}/prefixItems/{}", path, i),
errors,
);
} }
} }
if let Some(child) = &self.obj.not { if let Some(child) = &self.obj.not {
child.compile(db, root_id, format!("{}/not", path), visited, errors); child.compile(db, root_id, format!("{}/not", path), errors);
} }
if let Some(child) = &self.obj.contains { if let Some(child) = &self.obj.contains {
child.compile(db, root_id, format!("{}/contains", path), visited, errors); child.compile(db, root_id, format!("{}/contains", path), errors);
} }
if let Some(cases) = &self.obj.cases { if let Some(cases) = &self.obj.cases {
for (i, c) in cases.iter().enumerate() { for (i, c) in cases.iter().enumerate() {
if let Some(child) = &c.when { if let Some(child) = &c.when {
child.compile(db, root_id, format!("{}/cases/{}/when", path, i), visited, errors); child.compile(
db,
root_id,
format!("{}/cases/{}/when", path, i),
errors,
);
} }
if let Some(child) = &c.then { if let Some(child) = &c.then {
child.compile(db, root_id, format!("{}/cases/{}/then", path, i), visited, errors); child.compile(
db,
root_id,
format!("{}/cases/{}/then", path, i),
errors,
);
} }
if let Some(child) = &c.else_ { if let Some(child) = &c.else_ {
child.compile(db, root_id, format!("{}/cases/{}/else", path, i), visited, errors); child.compile(
db,
root_id,
format!("{}/cases/{}/else", path, i),
errors,
);
} }
} }
} }
self.compile_polymorphism(db, root_id, &path, errors); self.compile_polymorphism(db, root_id, &path, errors);
if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &self.obj.type_ {
if !crate::database::object::is_primitive_type(t) {
visited.remove(t);
}
}
} }
/// Dynamically infers and compiles all structural database relationships between this Schema /// Dynamically infers and compiles all structural database relationships between this Schema
@ -207,7 +216,6 @@ impl Schema {
db: &crate::database::Database, db: &crate::database::Database,
root_id: &str, root_id: &str,
path: &str, path: &str,
visited: &mut std::collections::HashSet<String>,
props: &std::collections::BTreeMap<String, std::sync::Arc<Schema>>, props: &std::collections::BTreeMap<String, std::sync::Arc<Schema>>,
errors: &mut Vec<crate::drop::Error>, errors: &mut Vec<crate::drop::Error>,
) -> std::collections::BTreeMap<String, crate::database::edge::Edge> { ) -> std::collections::BTreeMap<String, crate::database::edge::Edge> {
@ -220,7 +228,7 @@ impl Schema {
if let Some(family) = &self.obj.family { if let Some(family) = &self.obj.family {
// 1. Explicit horizontal routing // 1. Explicit horizontal routing
parent_type_name = Some(family.split('.').next_back().unwrap_or(family).to_string()); parent_type_name = Some(family.split('.').next_back().unwrap_or(family).to_string());
} else if !path.contains('/') { } else if path == root_id {
// 2. Root nodes trust their exact registry footprint // 2. Root nodes trust their exact registry footprint
let base_type_name = path.split('.').next_back().unwrap_or(path).to_string(); let base_type_name = path.split('.').next_back().unwrap_or(path).to_string();
if db.types.contains_key(&base_type_name) { if db.types.contains_key(&base_type_name) {
@ -233,21 +241,9 @@ impl Schema {
} }
} }
if parent_type_name.is_none() {
// 4. Absolute fallback for completely anonymous inline structures
let base_type_name = root_id
.split('.')
.next_back()
.unwrap_or(root_id)
.to_string();
if db.types.contains_key(&base_type_name) {
parent_type_name = Some(base_type_name);
}
}
if let Some(p_type) = parent_type_name { if let Some(p_type) = parent_type_name {
// Proceed only if the resolved table physically exists within the Postgres Type hierarchy // Proceed only if the resolved table physically exists within the Postgres Type hierarchy
if db.types.contains_key(&p_type) { if let Some(type_def) = db.types.get(&p_type) {
// Iterate over all discovered schema boundaries mapped inside the object // Iterate over all discovered schema boundaries mapped inside the object
for (prop_name, prop_schema) in props { for (prop_name, prop_schema) in props {
let mut child_type_name = None; let mut child_type_name = None;
@ -287,10 +283,26 @@ impl Schema {
} }
if let Some(c_type) = child_type_name { if let Some(c_type) = child_type_name {
// Skip edge compilation for JSONB columns — they store data inline, not relationally.
// The physical column type from field_types is the single source of truth.
if let Some(ft) = type_def.field_types.as_ref()
.and_then(|v| v.get(prop_name.as_str()))
.and_then(|v| v.as_str())
{
if ft == "jsonb" {
continue;
}
}
if db.types.contains_key(&c_type) { if db.types.contains_key(&c_type) {
// Ensure the child Schema's AST has accurately compiled its own physical property keys so we can // Ensure the child Schema's AST has accurately compiled its own physical property keys so we can
// inject them securely for Many-to-Many Twin Deduction disambiguation matching. // inject them securely for Many-to-Many Twin Deduction disambiguation matching.
target_schema.compile(db, root_id, format!("{}/{}", path, prop_name), visited, errors); target_schema.compile(
db,
root_id,
format!("{}/{}", path, prop_name),
errors,
);
if let Some(compiled_target_props) = target_schema.obj.compiled_properties.get() { if let Some(compiled_target_props) = target_schema.obj.compiled_properties.get() {
let keys_for_ambiguity: Vec<String> = let keys_for_ambiguity: Vec<String> =
compiled_target_props.keys().cloned().collect(); compiled_target_props.keys().cloned().collect();
@ -379,7 +391,7 @@ impl Schema {
for c in one_of { for c in one_of {
let mut child_id = String::new(); let mut child_id = String::new();
let mut child_is_primitive = false; let mut child_is_primitive = false;
if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &c.obj.type_ { if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &c.obj.type_ {
if crate::database::object::is_primitive_type(t) { if crate::database::object::is_primitive_type(t) {
child_is_primitive = true; child_is_primitive = true;
@ -389,77 +401,77 @@ impl Schema {
structural_types.insert("object".to_string()); structural_types.insert("object".to_string());
} }
} else { } else {
disjoint_base = false; disjoint_base = false;
} }
if !child_is_primitive { if !child_is_primitive {
if let Some(t_val) = c.obj.get_discriminator_value("type", &child_id) { if let Some(t_val) = c.obj.get_discriminator_value("type", &child_id) {
type_vals.insert(t_val); type_vals.insert(t_val);
} }
if let Some(k_val) = c.obj.get_discriminator_value("kind", &child_id) { if let Some(k_val) = c.obj.get_discriminator_value("kind", &child_id) {
kind_vals.insert(k_val); kind_vals.insert(k_val);
} }
} }
} }
if disjoint_base && structural_types.len() == one_of.len() { if disjoint_base && structural_types.len() == one_of.len() {
strategy = "".to_string(); strategy = "".to_string();
for (i, c) in one_of.iter().enumerate() { for (i, c) in one_of.iter().enumerate() {
if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &c.obj.type_ { if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &c.obj.type_ {
if crate::database::object::is_primitive_type(t) { if crate::database::object::is_primitive_type(t) {
options.insert(t.clone(), (Some(i), None)); options.insert(t.clone(), (Some(i), None));
} else { } else {
options.insert("object".to_string(), (Some(i), None)); options.insert("object".to_string(), (Some(i), None));
} }
} }
} }
} else { } else {
strategy = if type_vals.len() > 1 && type_vals.len() == one_of.len() { strategy = if type_vals.len() > 1 && type_vals.len() == one_of.len() {
"type".to_string() "type".to_string()
} else if kind_vals.len() > 1 && kind_vals.len() == one_of.len() { } else if kind_vals.len() > 1 && kind_vals.len() == one_of.len() {
"kind".to_string() "kind".to_string()
} else { } else {
"".to_string() "".to_string()
}; };
if strategy.is_empty() { if strategy.is_empty() {
errors.push(crate::drop::Error { errors.push(crate::drop::Error {
code: "AMBIGUOUS_POLYMORPHISM".to_string(), code: "AMBIGUOUS_POLYMORPHISM".to_string(),
message: format!("oneOf boundaries must map mathematically unique 'type' or 'kind' discriminators, or strictly contain disjoint primitive types."), message: format!("oneOf boundaries must map mathematically unique 'type' or 'kind' discriminators, or strictly contain disjoint primitive types."),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: path.to_string(), path: Some(path.to_string()),
schema: Some(root_id.to_string()), schema: Some(root_id.to_string()),
..Default::default() ..Default::default()
} }
}); });
return; return;
}
for (i, c) in one_of.iter().enumerate() {
let mut child_id = String::new();
if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &c.obj.type_ {
if !crate::database::object::is_primitive_type(t) {
child_id = t.clone();
}
} }
for (i, c) in one_of.iter().enumerate() { if let Some(val) = c.obj.get_discriminator_value(&strategy, &child_id) {
let mut child_id = String::new(); if options.contains_key(&val) {
if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &c.obj.type_ { errors.push(crate::drop::Error {
if !crate::database::object::is_primitive_type(t) {
child_id = t.clone();
}
}
if let Some(val) = c.obj.get_discriminator_value(&strategy, &child_id) {
if options.contains_key(&val) {
errors.push(crate::drop::Error {
code: "POLYMORPHIC_COLLISION".to_string(), code: "POLYMORPHIC_COLLISION".to_string(),
message: format!("Polymorphic boundary defines multiple candidates mapped to the identical discriminator value '{}'.", val), message: format!("Polymorphic boundary defines multiple candidates mapped to the identical discriminator value '{}'.", val),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: path.to_string(), path: Some(path.to_string()),
schema: Some(root_id.to_string()), schema: Some(root_id.to_string()),
..Default::default() ..Default::default()
} }
}); });
continue; continue;
}
options.insert(val, (Some(i), None));
} }
options.insert(val, (Some(i), None));
} }
}
} }
} else { } else {
return; return;
@ -491,7 +503,7 @@ impl Schema {
c, field_name, id c, field_name, id
), ),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: path.to_string(), path: Some(path.to_string()),
schema: Some(root_id.to_string()), schema: Some(root_id.to_string()),
..Default::default() ..Default::default()
}, },
@ -521,7 +533,7 @@ impl Schema {
} }
} else if !crate::database::object::is_primitive_type(t) { } else if !crate::database::object::is_primitive_type(t) {
Self::validate_identifier(t, "type", root_id, &path, errors); Self::validate_identifier(t, "type", root_id, &path, errors);
// Is this an explicit inline ad-hoc composition? // Is this an explicit inline ad-hoc composition?
if schema_arc.obj.properties.is_some() || schema_arc.obj.cases.is_some() { if schema_arc.obj.properties.is_some() || schema_arc.obj.cases.is_some() {
to_insert.push((path.clone(), Arc::clone(schema_arc))); to_insert.push((path.clone(), Arc::clone(schema_arc)));
@ -559,7 +571,13 @@ impl Schema {
let mut map_arr = |arr: &Vec<Arc<Schema>>, sub: &str| { let mut map_arr = |arr: &Vec<Arc<Schema>>, sub: &str| {
for (i, v) in arr.iter().enumerate() { for (i, v) in arr.iter().enumerate() {
Self::collect_schemas(v, root_id, format!("{}/{}/{}", path, sub, i), to_insert, errors); Self::collect_schemas(
v,
root_id,
format!("{}/{}/{}", path, sub, i),
to_insert,
errors,
);
} }
}; };
@ -574,7 +592,7 @@ impl Schema {
let mut map_opt = |opt: &Option<Arc<Schema>>, pass_path: bool, sub: &str| { let mut map_opt = |opt: &Option<Arc<Schema>>, pass_path: bool, sub: &str| {
if let Some(v) = opt { if let Some(v) = opt {
if pass_path { if pass_path {
// Arrays explicitly push their wrapper natively. // Arrays explicitly push their wrapper natively.
// 'items' becomes a transparent conduit, bypassing self-promotion and skipping the '/items' suffix. // 'items' becomes a transparent conduit, bypassing self-promotion and skipping the '/items' suffix.
Self::collect_child_schemas(v, root_id, path.clone(), to_insert, errors); Self::collect_child_schemas(v, root_id, path.clone(), to_insert, errors);
} else { } else {

View File

@ -66,7 +66,8 @@ pub struct Error {
#[derive(Debug, Serialize, Deserialize, Clone, Default)] #[derive(Debug, Serialize, Deserialize, Clone, Default)]
pub struct ErrorDetails { pub struct ErrorDetails {
pub path: String, #[serde(skip_serializing_if = "Option::is_none")]
pub path: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub cause: Option<String>, pub cause: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]

View File

@ -30,7 +30,7 @@ fn jspg_failure() -> JsonB {
code: "ENGINE_NOT_INITIALIZED".to_string(), code: "ENGINE_NOT_INITIALIZED".to_string(),
message: "JSPG extension has not been initialized via jspg_setup".to_string(), message: "JSPG extension has not been initialized via jspg_setup".to_string(),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: "".to_string(), path: None,
cause: None, cause: None,
context: None, context: None,
schema: None, schema: None,

View File

@ -31,7 +31,7 @@ impl Merger {
code: "MERGE_FAILED".to_string(), code: "MERGE_FAILED".to_string(),
message: format!("Unknown schema_id: {}", schema_id), message: format!("Unknown schema_id: {}", schema_id),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: "".to_string(), path: None,
cause: None, cause: None,
context: Some(data), context: Some(data),
schema: None, schema: None,
@ -76,7 +76,7 @@ impl Merger {
code: final_code, code: final_code,
message: final_message, message: final_message,
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: "".to_string(), path: None,
cause: final_cause, cause: final_cause,
context: None, context: None,
schema: None, schema: None,
@ -92,7 +92,7 @@ impl Merger {
code: "MERGE_FAILED".to_string(), code: "MERGE_FAILED".to_string(),
message: format!("Executor Error in pre-ordered notify: {:?}", e), message: format!("Executor Error in pre-ordered notify: {:?}", e),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: "".to_string(), path: None,
cause: None, cause: None,
context: None, context: None,
schema: None, schema: None,

View File

@ -347,22 +347,23 @@ impl<'a> Compiler<'a> {
child_node.schema = Arc::clone(target_schema); child_node.schema = Arc::clone(target_schema);
child_node.is_polymorphic_branch = true; child_node.is_polymorphic_branch = true;
let val_sql = if disc == "kind" && node.parent_type.is_some() && node.parent_type_aliases.is_some() { let val_sql =
if disc == "kind" && node.parent_type.is_some() && node.parent_type_aliases.is_some() {
let aliases_arc = node.parent_type_aliases.as_ref().unwrap(); let aliases_arc = node.parent_type_aliases.as_ref().unwrap();
let aliases = aliases_arc.as_ref(); let aliases = aliases_arc.as_ref();
let p_type = node.parent_type.unwrap(); let p_type = node.parent_type.unwrap();
let select_args = self.compile_select_clause(p_type, aliases, child_node.clone())?; let select_args = self.compile_select_clause(p_type, aliases, child_node.clone())?;
if select_args.is_empty() { if select_args.is_empty() {
"jsonb_build_object()".to_string() "jsonb_build_object()".to_string()
} else { } else {
format!("jsonb_build_object({})", select_args.join(", ")) format!("jsonb_build_object({})", select_args.join(", "))
} }
} else { } else {
let (sql, _) = self.compile_node(child_node)?; let (sql, _) = self.compile_node(child_node)?;
sql sql
}; };
case_statements.push(format!( case_statements.push(format!(
"WHEN {}.{} = '{}' THEN ({})", "WHEN {}.{} = '{}' THEN ({})",
@ -473,6 +474,15 @@ impl<'a> Compiler<'a> {
} }
} }
if let Some(ft) = r#type.field_types.as_ref().and_then(|v| v.as_object()) {
if let Some(pg_type) = ft.get(prop_key).and_then(|v| v.as_str()) {
if pg_type == "json" || pg_type == "jsonb" {
select_args.push(format!("'{}', {}.{}", prop_key, owner_alias, prop_key));
continue;
}
}
}
let child_node = Node { let child_node = Node {
schema: std::sync::Arc::clone(prop_schema), schema: std::sync::Arc::clone(prop_schema),
parent_alias: owner_alias.clone(), parent_alias: owner_alias.clone(),

View File

@ -33,7 +33,7 @@ impl Queryer {
code: "FILTER_PARSE_FAILED".to_string(), code: "FILTER_PARSE_FAILED".to_string(),
message: msg.clone(), message: msg.clone(),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: "".to_string(), // filters apply to the root query path: None, // filters apply to the root query
cause: Some(msg), cause: Some(msg),
context: filters.cloned(), context: filters.cloned(),
schema: Some(schema_id.to_string()), schema: Some(schema_id.to_string()),
@ -138,7 +138,7 @@ impl Queryer {
code: "QUERY_COMPILATION_FAILED".to_string(), code: "QUERY_COMPILATION_FAILED".to_string(),
message: e.clone(), message: e.clone(),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: "".to_string(), path: None,
cause: Some(e), cause: Some(e),
context: None, context: None,
schema: Some(schema_id.to_string()), schema: Some(schema_id.to_string()),
@ -165,7 +165,7 @@ impl Queryer {
code: "QUERY_FAILED".to_string(), code: "QUERY_FAILED".to_string(),
message: format!("Expected array from generic query, got: {:?}", other), message: format!("Expected array from generic query, got: {:?}", other),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: "".to_string(), path: None,
cause: Some(format!("Expected array, got {}", other)), cause: Some(format!("Expected array, got {}", other)),
context: Some(serde_json::json!([sql])), context: Some(serde_json::json!([sql])),
schema: Some(schema_id.to_string()), schema: Some(schema_id.to_string()),
@ -175,7 +175,7 @@ impl Queryer {
code: "QUERY_FAILED".to_string(), code: "QUERY_FAILED".to_string(),
message: format!("SPI error in queryer: {}", e), message: format!("SPI error in queryer: {}", e),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: "".to_string(), path: None,
cause: Some(format!("SPI error in queryer: {}", e)), cause: Some(format!("SPI error in queryer: {}", e)),
context: Some(serde_json::json!([sql])), context: Some(serde_json::json!([sql])),
schema: Some(schema_id.to_string()), schema: Some(schema_id.to_string()),

View File

@ -1451,6 +1451,12 @@ fn test_queryer_0_12() {
crate::tests::runner::run_test_case(&path, 0, 12).unwrap(); crate::tests::runner::run_test_case(&path, 0, 12).unwrap();
} }
#[test]
fn test_queryer_0_13() {
let path = format!("{}/fixtures/queryer.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 0, 13).unwrap();
}
#[test] #[test]
fn test_polymorphism_0_0() { fn test_polymorphism_0_0() {
let path = format!("{}/fixtures/polymorphism.json", env!("CARGO_MANIFEST_DIR")); let path = format!("{}/fixtures/polymorphism.json", env!("CARGO_MANIFEST_DIR"));
@ -1553,6 +1559,24 @@ fn test_polymorphism_4_1() {
crate::tests::runner::run_test_case(&path, 4, 1).unwrap(); crate::tests::runner::run_test_case(&path, 4, 1).unwrap();
} }
#[test]
fn test_polymorphism_5_0() {
let path = format!("{}/fixtures/polymorphism.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 5, 0).unwrap();
}
#[test]
fn test_polymorphism_5_1() {
let path = format!("{}/fixtures/polymorphism.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 5, 1).unwrap();
}
#[test]
fn test_polymorphism_5_2() {
let path = format!("{}/fixtures/polymorphism.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 5, 2).unwrap();
}
#[test] #[test]
fn test_not_0_0() { fn test_not_0_0() {
let path = format!("{}/fixtures/not.json", env!("CARGO_MANIFEST_DIR")); let path = format!("{}/fixtures/not.json", env!("CARGO_MANIFEST_DIR"));
@ -3689,6 +3713,12 @@ fn test_database_5_0() {
crate::tests::runner::run_test_case(&path, 5, 0).unwrap(); crate::tests::runner::run_test_case(&path, 5, 0).unwrap();
} }
#[test]
fn test_database_6_0() {
let path = format!("{}/fixtures/database.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 6, 0).unwrap();
}
#[test] #[test]
fn test_cases_0_0() { fn test_cases_0_0() {
let path = format!("{}/fixtures/cases.json", env!("CARGO_MANIFEST_DIR")); let path = format!("{}/fixtures/cases.json", env!("CARGO_MANIFEST_DIR"));
@ -8086,3 +8116,9 @@ fn test_merger_0_12() {
let path = format!("{}/fixtures/merger.json", env!("CARGO_MANIFEST_DIR")); let path = format!("{}/fixtures/merger.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 0, 12).unwrap(); crate::tests::runner::run_test_case(&path, 0, 12).unwrap();
} }
#[test]
fn test_merger_0_13() {
let path = format!("{}/fixtures/merger.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 0, 13).unwrap();
}

View File

@ -18,7 +18,7 @@ fn test_library_api() {
"errors": [{ "errors": [{
"code": "ENGINE_NOT_INITIALIZED", "code": "ENGINE_NOT_INITIALIZED",
"message": "JSPG extension has not been initialized via jspg_setup", "message": "JSPG extension has not been initialized via jspg_setup",
"details": { "path": "" } "details": {}
}] }]
}) })
); );

View File

@ -86,7 +86,7 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
let error_messages: Vec<String> = drop let error_messages: Vec<String> = drop
.errors .errors
.iter() .iter()
.map(|e| format!("Error {} at path {}: {}", e.code, e.details.path, e.message)) .map(|e| format!("Error {} at path {}: {}", e.code, e.details.path.as_deref().unwrap_or("/"), e.message))
.collect(); .collect();
failures.push(format!( failures.push(format!(
"[{}] Cannot run '{}' test '{}': System Setup Compilation structurally failed:\n{}", "[{}] Cannot run '{}' test '{}': System Setup Compilation structurally failed:\n{}",

View File

@ -68,7 +68,7 @@ impl Validator {
code: e.code, code: e.code,
message: e.message, message: e.message,
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: e.path, path: Some(e.path),
cause: None, cause: None,
context: None, context: None,
schema: None, schema: None,
@ -82,7 +82,7 @@ impl Validator {
code: e.code, code: e.code,
message: e.message, message: e.message,
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: e.path, path: Some(e.path),
cause: None, cause: None,
context: None, context: None,
schema: None, schema: None,
@ -94,7 +94,7 @@ impl Validator {
code: "SCHEMA_NOT_FOUND".to_string(), code: "SCHEMA_NOT_FOUND".to_string(),
message: format!("Schema {} not found", schema_id), message: format!("Schema {} not found", schema_id),
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: "/".to_string(), path: Some("/".to_string()),
cause: None, cause: None,
context: None, context: None,
schema: None, schema: None,

View File

@ -24,9 +24,6 @@ impl<'a> ValidationContext<'a> {
if let Some(obj) = self.instance.as_object() { if let Some(obj) = self.instance.as_object() {
for key in obj.keys() { for key in obj.keys() {
if key == "type" || key == "kind" {
continue; // Reserved keywords implicitly allowed
}
if !result.evaluated_keys.contains(key) && !self.overrides.contains(key) { if !result.evaluated_keys.contains(key) && !self.overrides.contains(key) {
result.errors.push(ValidationError { result.errors.push(ValidationError {
code: "STRICT_PROPERTY_VIOLATION".to_string(), code: "STRICT_PROPERTY_VIOLATION".to_string(),

View File

@ -54,14 +54,19 @@ impl<'a> ValidationContext<'a> {
// If the target mathematically declares a horizontal structural STI variation natively // If the target mathematically declares a horizontal structural STI variation natively
if schema_identifier_str.contains('.') { if schema_identifier_str.contains('.') {
if obj.get("kind").is_none() { let requires_kind = self.schema.compiled_properties.get()
result.errors.push(ValidationError { .map_or(false, |p| p.contains_key("kind"));
code: "MISSING_KIND".to_string(),
message: "Schema mechanically requires horizontal kind discrimination".to_string(), if requires_kind {
path: self.path.clone(), if obj.get("kind").is_none() {
}); result.errors.push(ValidationError {
} else { code: "MISSING_KIND".to_string(),
result.evaluated_keys.insert("kind".to_string()); message: "Schema mechanically requires horizontal kind discrimination".to_string(),
path: self.path.clone(),
});
} else {
result.evaluated_keys.insert("kind".to_string());
}
} }
} }
} else { } else {

View File

@ -1 +1 @@
1.0.114 1.0.120

24
wipe_test.py Normal file
View File

@ -0,0 +1,24 @@
import json
def load_json(path):
with open(path, 'r') as f:
return json.load(f)
def save_json(path, data):
with open(path, 'w') as f:
json.dump(data, f, indent=4)
def fix_merger():
data = load_json('fixtures/merger.json')
last_test = data[0]['tests'][-1]
last_test["expect"]["sql"] = []
save_json('fixtures/merger.json', data)
def fix_queryer():
data = load_json('fixtures/queryer.json')
last_test = data[0]['tests'][-1]
last_test["expect"]["sql"] = []
save_json('fixtures/queryer.json', data)
fix_merger()
fix_queryer()