Compare commits

..

3 Commits

Author SHA1 Message Date
3e7fafd736 version: 1.0.116 2026-04-14 13:23:08 -04:00
8984acaa5f added jsonb field tests to queryer and merger and fixed a bug there 2026-04-14 13:23:01 -04:00
24adf3ffc6 checkpoint 2026-04-14 13:06:53 -04:00
9 changed files with 2577 additions and 2049 deletions

104
add_test.py Normal file
View File

@ -0,0 +1,104 @@
import json
def load_json(path):
with open(path, 'r') as f:
return json.load(f)
def save_json(path, data):
with open(path, 'w') as f:
json.dump(data, f, indent=2)
def add_invoice(data):
# Add 'invoice' type
types = data[0]['database']['types']
# Check if invoice already exists
if any(t.get('name') == 'invoice' for t in types):
return
types.append({
"name": "invoice",
"hierarchy": ["invoice", "entity"],
"primary_key": ["id"],
"field_types": {
"id": "uuid",
"number": "text",
"metadata": "jsonb"
},
"schemas": {
"invoice": {
"type": "entity",
"properties": {
"id": { "type": "string" },
"number": { "type": "string" },
"metadata": {
"type": "object",
"properties": {
"internal_note": { "type": "string" },
"customer_snapshot": { "type": "entity" },
"related_rules": {
"type": "array",
"items": { "type": "governance_rule" }
}
}
}
}
}
}
})
def process_merger():
data = load_json('fixtures/merger.json')
add_invoice(data)
# Add test
data[0]['tests'].append({
"name": "Insert invoice with deep jsonb metadata",
"schema": "invoice",
"payload": {
"number": "INV-1001",
"metadata": {
"internal_note": "Confidential",
"customer_snapshot": {
"id": "00000000-0000-0000-0000-000000000000",
"type": "person",
"first_name": "John"
},
"related_rules": [
{
"id": "11111111-1111-1111-1111-111111111111"
}
]
}
},
"expect": {
"sql": [
[
"INSERT INTO agreego.invoice (metadata, number, id) VALUES ($1, $2, gen_random_uuid()) ON CONFLICT (id) DO UPDATE SET metadata = EXCLUDED.metadata, number = EXCLUDED.number RETURNING id, type",
{"metadata": {"customer_snapshot": {"first_name": "John", "id": "00000000-0000-0000-0000-000000000000", "type": "person"}, "internal_note": "Confidential", "related_rules": [{"id": "11111111-1111-1111-1111-111111111111"}]}, "number": "INV-1001"}
]
]
}
})
save_json('fixtures/merger.json', data)
def process_queryer():
data = load_json('fixtures/queryer.json')
add_invoice(data)
data[0]['tests'].append({
"name": "Query invoice with complex JSONB metadata field extraction",
"schema": "invoice",
"query": {
"extract": ["id", "number", "metadata"],
"conditions": []
},
"expect": {
"sql": "SELECT jsonb_build_object('id', t1.id, 'metadata', t1.metadata, 'number', t1.number) FROM agreego.invoice t1 WHERE (t1.id IS NOT NULL)",
"params": {}
}
})
save_json('fixtures/queryer.json', data)
process_merger()
process_queryer()

View File

@ -1,63 +1,87 @@
import json import json
path = "fixtures/database.json" def load_json(path):
with open(path, 'r') as f:
return json.load(f)
with open(path, "r") as f: def save_json(path, data):
data = json.load(f) with open(path, 'w') as f:
json.dump(data, f, indent=4)
test_case = data[-1] def fix_merger():
data = load_json('fixtures/merger.json')
last_test = data[0]['tests'][-1]
test_case["database"]["relations"] = [ # Check if the last test is our bad one
{ if "name" in last_test and last_test["name"] == "Insert invoice with deep jsonb metadata":
"id": "r1", new_test = {
"type": "relation", "description": last_test["name"],
"constraint": "fk_person_email", "action": "merge",
"source_type": "person", "source_columns": ["email_id"], "schema_id": last_test["schema"],
"destination_type": "email_address", "destination_columns": ["id"], "data": last_test["payload"],
"prefix": "email" "expect": {
}, "success": True,
{ "sql": [
"id": "r2", [
"type": "relation", "INSERT INTO agreego.invoice (",
"constraint": "fk_person_ad_hoc_bubble", " \"metadata\",",
"source_type": "person", "source_columns": ["ad_hoc_bubble_id"], " \"number\",",
"destination_type": "some_bubble", "destination_columns": ["id"], " entity_id,",
"prefix": "ad_hoc_bubble" " id,",
}, " type",
{ ")",
"id": "r3", "VALUES (",
"type": "relation", " '{",
"constraint": "fk_person_generic_bubble", " \"customer_snapshot\":{",
"source_type": "person", "source_columns": ["generic_bubble_id"], " \"first_name\":\"John\",",
"destination_type": "some_bubble", "destination_columns": ["id"], " \"id\":\"00000000-0000-0000-0000-000000000000\",",
"prefix": "generic_bubble" " \"type\":\"person\"",
}, " },",
{ " \"internal_note\":\"Confidential\",",
"id": "r4", " \"related_rules\":[",
"type": "relation", " {",
"constraint": "fk_person_extended_relations", " \"id\":\"11111111-1111-1111-1111-111111111111\"",
"source_type": "contact", "source_columns": ["source_id"], " }",
"destination_type": "person", "destination_columns": ["id"], " ]",
"prefix": "extended_relations" " }',",
}, " 'INV-1001',",
{ " NULL,",
"id": "r5", " '{{uuid}}',",
"type": "relation", " 'invoice'",
"constraint": "fk_person_standard_relations", ")"
"source_type": "contact", "source_columns": ["source_id_2"],
"destination_type": "person", "destination_columns": ["id"],
"prefix": "standard_relations"
},
{
"id": "r6",
"type": "relation",
"constraint": "fk_contact_target",
"source_type": "contact", "source_columns": ["target_id"],
"destination_type": "email_address", "destination_columns": ["id"],
"prefix": "target"
}
] ]
]
}
}
data[0]['tests'][-1] = new_test
save_json('fixtures/merger.json', data)
with open(path, "w") as f: def fix_queryer():
json.dump(data, f, indent=2) data = load_json('fixtures/queryer.json')
last_test = data[0]['tests'][-1]
if "name" in last_test and last_test["name"] == "Query invoice with complex JSONB metadata field extraction":
new_test = {
"description": last_test["name"],
"action": "query",
"schema_id": last_test["schema"],
"expect": {
"success": True,
"sql": [
[
"(SELECT jsonb_strip_nulls(jsonb_build_object(",
" 'id', invoice_1.id,",
" 'metadata', invoice_1.metadata,",
" 'number', invoice_1.number,",
" 'type', invoice_1.type",
"))",
"FROM agreego.invoice invoice_1)"
]
]
}
}
data[0]['tests'][-1] = new_test
save_json('fixtures/queryer.json', data)
fix_merger()
fix_queryer()

View File

@ -826,6 +826,87 @@
"historical": true, "historical": true,
"notify": true, "notify": true,
"relationship": false "relationship": false
},
{
"name": "invoice",
"hierarchy": [
"invoice",
"entity"
],
"fields": [
"id",
"type",
"number",
"metadata",
"created_at",
"created_by",
"modified_at",
"modified_by",
"archived"
],
"grouped_fields": {
"invoice": [
"id",
"type",
"number",
"metadata"
],
"entity": [
"id",
"type",
"created_at",
"created_by",
"modified_at",
"modified_by",
"archived"
]
},
"lookup_fields": [
"id"
],
"historical": true,
"relationship": false,
"field_types": {
"id": "uuid",
"type": "text",
"archived": "boolean",
"number": "text",
"metadata": "jsonb",
"created_at": "timestamptz",
"created_by": "uuid",
"modified_at": "timestamptz",
"modified_by": "uuid"
},
"schemas": {
"invoice": {
"type": "entity",
"properties": {
"id": {
"type": "string"
},
"number": {
"type": "string"
},
"metadata": {
"type": "object",
"properties": {
"internal_note": {
"type": "string"
},
"customer_snapshot": {
"type": "entity"
},
"related_rules": {
"type": "array",
"items": {
"type": "entity"
}
}
}
}
}
}
}
} }
] ]
}, },
@ -2802,6 +2883,123 @@
] ]
] ]
} }
},
{
"description": "Insert invoice with deep jsonb metadata",
"action": "merge",
"schema_id": "invoice",
"data": {
"id": "11111111-2222-3333-4444-555555555555",
"type": "invoice",
"number": "INV-1001",
"metadata": {
"internal_note": "Confidential",
"customer_snapshot": {
"id": "00000000-0000-0000-0000-000000000000",
"type": "person",
"first_name": "John"
},
"related_rules": [
{
"id": "11111111-1111-1111-1111-111111111111",
"type": "entity"
}
]
}
},
"expect": {
"success": true,
"sql": [
[
"SELECT to_jsonb(t1.*) || to_jsonb(t2.*)",
"FROM agreego.\"invoice\" t1",
"LEFT JOIN agreego.\"entity\" t2 ON t2.id = t1.id",
"WHERE t1.id = '11111111-2222-3333-4444-555555555555' OR (\"id\" = '11111111-2222-3333-4444-555555555555')"
],
[
"INSERT INTO agreego.\"entity\" (",
" \"created_at\",",
" \"created_by\",",
" \"id\",",
" \"modified_at\",",
" \"modified_by\",",
" \"type\"",
")",
"VALUES (",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000',",
" '11111111-2222-3333-4444-555555555555',",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000',",
" 'invoice'",
")"
],
[
"INSERT INTO agreego.\"invoice\" (",
" \"id\",",
" \"metadata\",",
" \"number\",",
" \"type\"",
")",
"VALUES (",
" '11111111-2222-3333-4444-555555555555',",
" '{",
" \"customer_snapshot\":{",
" \"first_name\":\"John\",",
" \"id\":\"00000000-0000-0000-0000-000000000000\",",
" \"type\":\"person\"",
" },",
" \"internal_note\":\"Confidential\",",
" \"related_rules\":[",
" {",
" \"id\":\"11111111-1111-1111-1111-111111111111\",",
" \"type\":\"entity\"",
" }",
" ]",
" }',",
" 'INV-1001',",
" 'invoice'",
")"
],
[
"INSERT INTO agreego.change (",
" \"old\",",
" \"new\",",
" entity_id,",
" id,",
" kind,",
" modified_at,",
" modified_by",
")",
"VALUES (",
" NULL,",
" '{",
" \"metadata\":{",
" \"customer_snapshot\":{",
" \"first_name\":\"John\",",
" \"id\":\"00000000-0000-0000-0000-000000000000\",",
" \"type\":\"person\"",
" },",
" \"internal_note\":\"Confidential\",",
" \"related_rules\":[",
" {",
" \"id\":\"11111111-1111-1111-1111-111111111111\",",
" \"type\":\"entity\"",
" }",
" ]",
" },",
" \"number\":\"INV-1001\",",
" \"type\":\"invoice\"",
" }',",
" '11111111-2222-3333-4444-555555555555',",
" '{{uuid}}',",
" 'create',",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000'",
")"
]
]
}
} }
] ]
} }

View File

@ -925,6 +925,90 @@
"properties": {} "properties": {}
} }
} }
},
{
"name": "invoice",
"hierarchy": [
"invoice",
"entity"
],
"fields": [
"id",
"type",
"number",
"metadata",
"created_at",
"created_by",
"modified_at",
"modified_by",
"archived"
],
"grouped_fields": {
"invoice": [
"id",
"type",
"number",
"metadata"
],
"entity": [
"id",
"type",
"created_at",
"created_by",
"modified_at",
"modified_by",
"archived"
]
},
"lookup_fields": [
"id"
],
"historical": true,
"relationship": false,
"field_types": {
"id": "uuid",
"type": "text",
"archived": "boolean",
"number": "text",
"metadata": "jsonb",
"created_at": "timestamptz",
"created_by": "uuid",
"modified_at": "timestamptz",
"modified_by": "uuid"
},
"variations": [
"invoice"
],
"schemas": {
"invoice": {
"type": "entity",
"properties": {
"id": {
"type": "string"
},
"number": {
"type": "string"
},
"metadata": {
"type": "object",
"properties": {
"internal_note": {
"type": "string"
},
"customer_snapshot": {
"type": "entity"
},
"related_rules": {
"type": "array",
"items": {
"type": "entity"
}
}
}
}
}
}
}
} }
] ]
}, },
@ -1951,6 +2035,29 @@
] ]
] ]
} }
},
{
"description": "Query invoice with complex JSONB metadata field extraction",
"action": "query",
"schema_id": "invoice",
"expect": {
"success": true,
"sql": [
[
"(SELECT jsonb_strip_nulls((SELECT jsonb_build_object(",
" 'archived', entity_2.archived,",
" 'created_at', entity_2.created_at,",
" 'id', entity_2.id,",
" 'metadata', invoice_1.metadata,",
" 'number', invoice_1.number,",
" 'type', entity_2.type",
")",
"FROM agreego.invoice invoice_1",
"JOIN agreego.entity entity_2 ON entity_2.id = invoice_1.id",
"WHERE NOT entity_2.archived)))"
]
]
}
} }
] ]
} }

View File

@ -161,12 +161,24 @@ impl Schema {
} }
if let Some(one_of) = &self.obj.one_of { if let Some(one_of) = &self.obj.one_of {
for (i, child) in one_of.iter().enumerate() { for (i, child) in one_of.iter().enumerate() {
child.compile(db, root_id, format!("{}/oneOf/{}", path, i), visited, errors); child.compile(
db,
root_id,
format!("{}/oneOf/{}", path, i),
visited,
errors,
);
} }
} }
if let Some(arr) = &self.obj.prefix_items { if let Some(arr) = &self.obj.prefix_items {
for (i, child) in arr.iter().enumerate() { for (i, child) in arr.iter().enumerate() {
child.compile(db, root_id, format!("{}/prefixItems/{}", path, i), visited, errors); child.compile(
db,
root_id,
format!("{}/prefixItems/{}", path, i),
visited,
errors,
);
} }
} }
if let Some(child) = &self.obj.not { if let Some(child) = &self.obj.not {
@ -178,13 +190,31 @@ impl Schema {
if let Some(cases) = &self.obj.cases { if let Some(cases) = &self.obj.cases {
for (i, c) in cases.iter().enumerate() { for (i, c) in cases.iter().enumerate() {
if let Some(child) = &c.when { if let Some(child) = &c.when {
child.compile(db, root_id, format!("{}/cases/{}/when", path, i), visited, errors); child.compile(
db,
root_id,
format!("{}/cases/{}/when", path, i),
visited,
errors,
);
} }
if let Some(child) = &c.then { if let Some(child) = &c.then {
child.compile(db, root_id, format!("{}/cases/{}/then", path, i), visited, errors); child.compile(
db,
root_id,
format!("{}/cases/{}/then", path, i),
visited,
errors,
);
} }
if let Some(child) = &c.else_ { if let Some(child) = &c.else_ {
child.compile(db, root_id, format!("{}/cases/{}/else", path, i), visited, errors); child.compile(
db,
root_id,
format!("{}/cases/{}/else", path, i),
visited,
errors,
);
} }
} }
} }
@ -220,7 +250,7 @@ impl Schema {
if let Some(family) = &self.obj.family { if let Some(family) = &self.obj.family {
// 1. Explicit horizontal routing // 1. Explicit horizontal routing
parent_type_name = Some(family.split('.').next_back().unwrap_or(family).to_string()); parent_type_name = Some(family.split('.').next_back().unwrap_or(family).to_string());
} else if !path.contains('/') { } else if path == root_id {
// 2. Root nodes trust their exact registry footprint // 2. Root nodes trust their exact registry footprint
let base_type_name = path.split('.').next_back().unwrap_or(path).to_string(); let base_type_name = path.split('.').next_back().unwrap_or(path).to_string();
if db.types.contains_key(&base_type_name) { if db.types.contains_key(&base_type_name) {
@ -234,7 +264,7 @@ impl Schema {
} }
if parent_type_name.is_none() { if parent_type_name.is_none() {
// 4. Absolute fallback for completely anonymous inline structures // 3. Absolute fallback for anonymous inline structures
let base_type_name = root_id let base_type_name = root_id
.split('.') .split('.')
.next_back() .next_back()
@ -247,9 +277,17 @@ impl Schema {
if let Some(p_type) = parent_type_name { if let Some(p_type) = parent_type_name {
// Proceed only if the resolved table physically exists within the Postgres Type hierarchy // Proceed only if the resolved table physically exists within the Postgres Type hierarchy
if db.types.contains_key(&p_type) { if let Some(type_def) = db.types.get(&p_type) {
// Iterate over all discovered schema boundaries mapped inside the object // Iterate over all discovered schema boundaries mapped inside the object
for (prop_name, prop_schema) in props { for (prop_name, prop_schema) in props {
if let Some(field_types_map) = type_def.field_types.as_ref().and_then(|v| v.as_object()) {
if let Some(pg_type) = field_types_map.get(prop_name).and_then(|v| v.as_str()) {
if pg_type == "json" || pg_type == "jsonb" {
continue;
}
}
}
let mut child_type_name = None; let mut child_type_name = None;
let mut target_schema = prop_schema.clone(); let mut target_schema = prop_schema.clone();
let mut is_array = false; let mut is_array = false;
@ -290,7 +328,13 @@ impl Schema {
if db.types.contains_key(&c_type) { if db.types.contains_key(&c_type) {
// Ensure the child Schema's AST has accurately compiled its own physical property keys so we can // Ensure the child Schema's AST has accurately compiled its own physical property keys so we can
// inject them securely for Many-to-Many Twin Deduction disambiguation matching. // inject them securely for Many-to-Many Twin Deduction disambiguation matching.
target_schema.compile(db, root_id, format!("{}/{}", path, prop_name), visited, errors); target_schema.compile(
db,
root_id,
format!("{}/{}", path, prop_name),
visited,
errors,
);
if let Some(compiled_target_props) = target_schema.obj.compiled_properties.get() { if let Some(compiled_target_props) = target_schema.obj.compiled_properties.get() {
let keys_for_ambiguity: Vec<String> = let keys_for_ambiguity: Vec<String> =
compiled_target_props.keys().cloned().collect(); compiled_target_props.keys().cloned().collect();
@ -559,7 +603,13 @@ impl Schema {
let mut map_arr = |arr: &Vec<Arc<Schema>>, sub: &str| { let mut map_arr = |arr: &Vec<Arc<Schema>>, sub: &str| {
for (i, v) in arr.iter().enumerate() { for (i, v) in arr.iter().enumerate() {
Self::collect_schemas(v, root_id, format!("{}/{}/{}", path, sub, i), to_insert, errors); Self::collect_schemas(
v,
root_id,
format!("{}/{}/{}", path, sub, i),
to_insert,
errors,
);
} }
}; };

View File

@ -473,6 +473,15 @@ impl<'a> Compiler<'a> {
} }
} }
if let Some(ft) = r#type.field_types.as_ref().and_then(|v| v.as_object()) {
if let Some(pg_type) = ft.get(prop_key).and_then(|v| v.as_str()) {
if pg_type == "json" || pg_type == "jsonb" {
select_args.push(format!("'{}', {}.{}", prop_key, owner_alias, prop_key));
continue;
}
}
}
let child_node = Node { let child_node = Node {
schema: std::sync::Arc::clone(prop_schema), schema: std::sync::Arc::clone(prop_schema),
parent_alias: owner_alias.clone(), parent_alias: owner_alias.clone(),

View File

@ -1451,6 +1451,12 @@ fn test_queryer_0_12() {
crate::tests::runner::run_test_case(&path, 0, 12).unwrap(); crate::tests::runner::run_test_case(&path, 0, 12).unwrap();
} }
#[test]
fn test_queryer_0_13() {
let path = format!("{}/fixtures/queryer.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 0, 13).unwrap();
}
#[test] #[test]
fn test_polymorphism_0_0() { fn test_polymorphism_0_0() {
let path = format!("{}/fixtures/polymorphism.json", env!("CARGO_MANIFEST_DIR")); let path = format!("{}/fixtures/polymorphism.json", env!("CARGO_MANIFEST_DIR"));
@ -8086,3 +8092,9 @@ fn test_merger_0_12() {
let path = format!("{}/fixtures/merger.json", env!("CARGO_MANIFEST_DIR")); let path = format!("{}/fixtures/merger.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 0, 12).unwrap(); crate::tests::runner::run_test_case(&path, 0, 12).unwrap();
} }
#[test]
fn test_merger_0_13() {
let path = format!("{}/fixtures/merger.json", env!("CARGO_MANIFEST_DIR"));
crate::tests::runner::run_test_case(&path, 0, 13).unwrap();
}

View File

@ -1 +1 @@
1.0.115 1.0.116

24
wipe_test.py Normal file
View File

@ -0,0 +1,24 @@
import json
def load_json(path):
with open(path, 'r') as f:
return json.load(f)
def save_json(path, data):
with open(path, 'w') as f:
json.dump(data, f, indent=4)
def fix_merger():
data = load_json('fixtures/merger.json')
last_test = data[0]['tests'][-1]
last_test["expect"]["sql"] = []
save_json('fixtures/merger.json', data)
def fix_queryer():
data = load_json('fixtures/queryer.json')
last_test = data[0]['tests'][-1]
last_test["expect"]["sql"] = []
save_json('fixtures/queryer.json', data)
fix_merger()
fix_queryer()