Compare commits

...

8 Commits
1.0.79 ... main

Author SHA1 Message Date
9bdb767685 version: 1.0.82 2026-03-20 18:05:43 -04:00
bdd89fe695 cleanup 2026-03-20 18:05:37 -04:00
8135d80045 cleanup 2026-03-20 18:05:18 -04:00
9255439d53 added support for root schema compiled properties for the mixer 2026-03-20 18:04:49 -04:00
9038607729 version: 1.0.81 2026-03-20 15:53:59 -04:00
9f6c27c3b8 support ad-hoc refing without entity types 2026-03-20 15:53:48 -04:00
75aac41362 version: 1.0.80 2026-03-20 06:48:19 -04:00
dbcef42401 merger fixes 2026-03-20 06:48:08 -04:00
10 changed files with 222 additions and 1522 deletions

View File

View File

@ -719,6 +719,24 @@
{
"name": "attachment",
"schemas": [
{
"$id": "type_metadata",
"type": "object",
"properties": {
"type": {
"type": "string"
}
}
},
{
"$id": "other_metadata",
"type": "object",
"properties": {
"other": {
"type": "string"
}
}
},
{
"$id": "attachment",
"$ref": "entity",
@ -729,9 +747,11 @@
"type": "string"
}
},
"metadata": {
"type": "object",
"additionalProperties": true
"type_metadata": {
"$ref": "type_metadata"
},
"other_metadata": {
"$ref": "other_metadata"
}
}
}
@ -744,7 +764,8 @@
"id",
"type",
"flags",
"metadata",
"type_metadata",
"other_metadata",
"created_at",
"created_by",
"modified_at",
@ -756,7 +777,8 @@
"id",
"type",
"flags",
"metadata"
"type_metadata",
"other_metadata"
],
"entity": [
"id",
@ -772,7 +794,8 @@
"id": "uuid",
"type": "text",
"flags": "_text",
"metadata": "jsonb",
"type_metadata": "jsonb",
"other_metadata": "jsonb",
"created_at": "timestamptz",
"created_by": "uuid",
"modified_at": "timestamptz",
@ -2260,7 +2283,7 @@
}
},
{
"description": "Insert attachment displaying side-by-side array literal and jsonb formatting translations",
"description": "Attachment with text[] and jsonb metadata structures",
"action": "merge",
"data": {
"type": "attachment",
@ -2268,9 +2291,11 @@
"urgent",
"reviewed"
],
"metadata": {
"size": 1024,
"source": "upload"
"other_metadata": {
"other": "hello"
},
"type_metadata": {
"type": "type_metadata"
}
},
"expect": {
@ -2298,14 +2323,16 @@
"INSERT INTO agreego.\"attachment\" (",
" \"flags\",",
" \"id\",",
" \"metadata\",",
" \"type\"",
" \"other_metadata\",",
" \"type\",",
" \"type_metadata\"",
")",
"VALUES (",
" '{\"urgent\",\"reviewed\"}',",
" '{{uuid:attachment_id}}',",
" '{\"size\":1024,\"source\":\"upload\"}',",
" 'attachment'",
" '{\"other\":\"hello\"}',",
" 'attachment',",
" '{\"type\":\"type_metadata\"}'",
")"
],
[
@ -2322,8 +2349,9 @@
" NULL,",
" '{",
" \"flags\":[\"urgent\",\"reviewed\"],",
" \"metadata\":{\"size\":1024,\"source\":\"upload\"},",
" \"type\":\"attachment\"",
" \"other_metadata\":{\"other\":\"hello\"},",
" \"type\":\"attachment\",",
" \"type_metadata\":{\"type\":\"type_metadata\"}",
" }',",
" '{{uuid:attachment_id}}',",
" '{{uuid}}',",
@ -2339,15 +2367,17 @@
" \"created_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"flags\":[\"urgent\",\"reviewed\"],",
" \"id\":\"{{uuid:attachment_id}}\",",
" \"metadata\":{\"size\":1024,\"source\":\"upload\"},",
" \"modified_at\":\"{{timestamp}}\",",
" \"modified_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"type\":\"attachment\"",
" \"other_metadata\":{\"other\":\"hello\"},",
" \"type\":\"attachment\",",
" \"type_metadata\":{\"type\":\"type_metadata\"}",
" },",
" \"new\":{",
" \"flags\":[\"urgent\",\"reviewed\"],",
" \"metadata\":{\"size\":1024,\"source\":\"upload\"},",
" \"type\":\"attachment\"",
" \"other_metadata\":{\"other\":\"hello\"},",
" \"type\":\"attachment\",",
" \"type_metadata\":{\"type\":\"type_metadata\"}",
" }",
" }')"
]

1439
out.txt

File diff suppressed because it is too large Load Diff

View File

@ -23,6 +23,7 @@ use relation::Relation;
use schema::Schema;
use serde_json::Value;
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use r#type::Type;
pub struct Database {
@ -310,12 +311,84 @@ impl Database {
}
fn compile_schemas(&mut self) {
// Pass 3: compile_internals across pure structure
let schema_ids: Vec<String> = self.schemas.keys().cloned().collect();
let mut compiled_names_map: HashMap<String, Vec<String>> = HashMap::new();
let mut compiled_props_map: HashMap<String, std::collections::BTreeMap<String, Arc<Schema>>> =
HashMap::new();
for id in &schema_ids {
if let Some(schema) = self.schemas.get(id) {
let mut visited = HashSet::new();
let merged = self.merged_properties(schema, &mut visited);
let mut names: Vec<String> = merged.keys().cloned().collect();
if !names.is_empty() {
names.sort();
compiled_names_map.insert(id.clone(), names);
compiled_props_map.insert(id.clone(), merged);
}
}
}
for id in schema_ids {
if let Some(schema) = self.schemas.get_mut(&id) {
if let Some(names) = compiled_names_map.remove(&id) {
schema.obj.compiled_property_names = Some(names);
}
if let Some(props) = compiled_props_map.remove(&id) {
schema.obj.compiled_properties = Some(props);
}
schema.compile_internals();
}
}
}
pub fn merged_properties(
&self,
schema: &Schema,
visited: &mut HashSet<String>,
) -> std::collections::BTreeMap<String, Arc<Schema>> {
if let Some(props) = &schema.obj.compiled_properties {
return props.clone();
}
let mut props = std::collections::BTreeMap::new();
if let Some(id) = &schema.obj.id {
if !visited.insert(id.clone()) {
return props;
}
}
if let Some(ref_id) = &schema.obj.r#ref {
if let Some(parent_schema) = self.schemas.get(ref_id) {
props.extend(self.merged_properties(parent_schema, visited));
}
}
if let Some(all_of) = &schema.obj.all_of {
for ao in all_of {
props.extend(self.merged_properties(ao, visited));
}
}
if let Some(then_schema) = &schema.obj.then_ {
props.extend(self.merged_properties(then_schema, visited));
}
if let Some(else_schema) = &schema.obj.else_ {
props.extend(self.merged_properties(else_schema, visited));
}
if let Some(local_props) = &schema.obj.properties {
for (k, v) in local_props {
props.insert(k.clone(), v.clone());
}
}
if let Some(id) = &schema.obj.id {
visited.remove(id);
}
props
}
}

View File

@ -167,6 +167,13 @@ pub struct SchemaObject {
#[serde(skip_serializing_if = "Option::is_none")]
pub extensible: Option<bool>,
#[serde(rename = "compiledProperties")]
#[serde(skip_serializing_if = "Option::is_none")]
pub compiled_property_names: Option<Vec<String>>,
#[serde(skip)]
pub compiled_properties: Option<BTreeMap<String, Arc<Schema>>>,
#[serde(skip)]
pub compiled_format: Option<CompiledFormat>,
#[serde(skip)]

View File

@ -176,7 +176,7 @@ impl Merger {
// Attempt to extract relative object type name
let relative_type_name = match relative.get("type").and_then(|v| v.as_str()) {
Some(t) => t,
Some(t) => t.to_string(),
None => continue,
};
@ -185,7 +185,7 @@ impl Merger {
// Call central Database O(1) graph logic
let relative_relation = self.db.get_relation(
&type_def.name,
relative_type_name,
&relative_type_name,
&relation_name,
Some(&relative_keys),
);
@ -200,11 +200,16 @@ impl Merger {
}
}
let merged_relative = match self.merge_internal(Value::Object(relative), notifications)? {
let mut merged_relative = match self.merge_internal(Value::Object(relative), notifications)? {
Value::Object(m) => m,
_ => continue,
};
merged_relative.insert(
"type".to_string(),
Value::String(relative_type_name),
);
Self::apply_entity_relation(
&mut entity_fields,
&relation.source_columns,

View File

@ -19,11 +19,7 @@ pub struct Node<'a> {
impl<'a> Compiler<'a> {
/// Compiles a JSON schema into a nested PostgreSQL query returning JSONB
pub fn compile(
&self,
schema_id: &str,
filter_keys: &[String],
) -> Result<String, String> {
pub fn compile(&self, schema_id: &str, filter_keys: &[String]) -> Result<String, String> {
let schema = self
.db
.schemas
@ -251,8 +247,7 @@ impl<'a> Compiler<'a> {
let mut bypass_node = node.clone();
bypass_node.schema = std::sync::Arc::new(bypass_schema);
let mut bypassed_args =
self.compile_select_clause(r#type, table_aliases, bypass_node)?;
let mut bypassed_args = self.compile_select_clause(r#type, table_aliases, bypass_node)?;
select_args.append(&mut bypassed_args);
} else {
let mut family_schemas = Vec::new();
@ -400,7 +395,9 @@ impl<'a> Compiler<'a> {
) -> Result<Vec<String>, String> {
let mut select_args = Vec::new();
let grouped_fields = r#type.grouped_fields.as_ref().and_then(|v| v.as_object());
let merged_props = self.get_merged_properties(node.schema.as_ref());
let merged_props = self
.db
.merged_properties(node.schema.as_ref(), &mut std::collections::HashSet::new());
let mut sorted_keys: Vec<&String> = merged_props.keys().collect();
sorted_keys.sort();
@ -494,7 +491,13 @@ impl<'a> Compiler<'a> {
where_clauses.push(format!("NOT {}.archived", entity_alias));
self.compile_filter_conditions(r#type, type_aliases, &node, &base_alias, &mut where_clauses);
self.compile_relation_conditions(r#type, type_aliases, &node, &base_alias, &mut where_clauses)?;
self.compile_relation_conditions(
r#type,
type_aliases,
&node,
&base_alias,
&mut where_clauses,
)?;
Ok(where_clauses)
}
@ -509,7 +512,10 @@ impl<'a> Compiler<'a> {
for (t_name, fields_val) in gf {
if let Some(fields_arr) = fields_val.as_array() {
if fields_arr.iter().any(|v| v.as_str() == Some(field_name)) {
return type_aliases.get(t_name).cloned().unwrap_or_else(|| base_alias.to_string());
return type_aliases
.get(t_name)
.cloned()
.unwrap_or_else(|| base_alias.to_string());
}
}
}
@ -606,13 +612,31 @@ impl<'a> Compiler<'a> {
));
} else {
let sql_op = match op {
"$eq" => if is_ilike { "ILIKE" } else { "=" },
"$ne" => if is_ilike { "NOT ILIKE" } else { "!=" },
"$eq" => {
if is_ilike {
"ILIKE"
} else {
"="
}
}
"$ne" => {
if is_ilike {
"NOT ILIKE"
} else {
"!="
}
}
"$gt" => ">",
"$gte" => ">=",
"$lt" => "<",
"$lte" => "<=",
_ => if is_ilike { "ILIKE" } else { "=" },
_ => {
if is_ilike {
"ILIKE"
} else {
"="
}
}
};
let param_sql = if is_ilike && (op == "$eq" || op == "$ne") {
@ -643,7 +667,9 @@ impl<'a> Compiler<'a> {
let mut child_relation_alias = base_alias.to_string();
if let Some(parent_type) = node.parent_type {
let merged_props = self.get_merged_properties(node.schema.as_ref());
let merged_props = self
.db
.merged_properties(node.schema.as_ref(), &mut std::collections::HashSet::new());
let relative_keys: Vec<String> = merged_props.keys().cloned().collect();
let (relation, is_parent_source) = self
@ -695,25 +721,4 @@ impl<'a> Compiler<'a> {
}
Ok(())
}
fn get_merged_properties(
&self,
schema: &crate::database::schema::Schema,
) -> std::collections::BTreeMap<String, Arc<crate::database::schema::Schema>> {
let mut props = std::collections::BTreeMap::new();
if let Some(ref_id) = &schema.obj.r#ref {
if let Some(parent_schema) = self.db.schemas.get(ref_id) {
props.extend(self.get_merged_properties(parent_schema));
}
}
if let Some(local_props) = &schema.obj.properties {
for (k, v) in local_props {
props.insert(k.clone(), v.clone());
}
}
props
}
}

View File

@ -62,7 +62,8 @@ fn test_library_api() {
"properties": {
"name": { "type": "string" }
},
"required": ["name"]
"required": ["name"],
"compiledProperties": ["name"]
}
}
})

View File

@ -13,13 +13,18 @@ impl<'a> ValidationContext<'a> {
) -> Result<bool, ValidationError> {
let current = self.instance;
if let Some(obj) = current.as_object() {
// Entity Bound Implicit Type Validation
if let Some(lookup_key) = self.schema.id.as_ref().or(self.schema.r#ref.as_ref()) {
let base_type_name = lookup_key.split('.').next_back().unwrap_or("").to_string();
if let Some(type_def) = self.db.types.get(&base_type_name)
&& let Some(type_val) = obj.get("type")
// Entity implicit type validation
// Use the specific schema id or ref as a fallback
if let Some(identifier) = self.schema.id.as_ref().or(self.schema.r#ref.as_ref()) {
// Kick in if the data object has a type field
if let Some(type_val) = obj.get("type")
&& let Some(type_str) = type_val.as_str()
{
// Get the string or the final segment as the base
let base = identifier.split('.').next_back().unwrap_or("").to_string();
// Check if the base is a global type name
if let Some(type_def) = self.db.types.get(&base) {
// Ensure the instance type is a variation of the global type
if type_def.variations.contains(type_str) {
// Ensure it passes strict mode
result.evaluated_keys.insert("type".to_string());
@ -33,8 +38,15 @@ impl<'a> ValidationContext<'a> {
path: format!("{}/type", self.path),
});
}
} else {
// Ad-Hoc schemas natively use strict schema discriminator strings instead of variation inheritance
if type_str == identifier {
result.evaluated_keys.insert("type".to_string());
}
}
}
}
if let Some(min) = self.schema.min_properties
&& (obj.len() as f64) < min
{
@ -44,6 +56,7 @@ impl<'a> ValidationContext<'a> {
path: self.path.to_string(),
});
}
if let Some(max) = self.schema.max_properties
&& (obj.len() as f64) > max
{
@ -53,6 +66,7 @@ impl<'a> ValidationContext<'a> {
path: self.path.to_string(),
});
}
if let Some(ref req) = self.schema.required {
for field in req {
if !obj.contains_key(field) {
@ -114,10 +128,14 @@ impl<'a> ValidationContext<'a> {
// Entity Bound Implicit Type Interception
if key == "type"
&& let Some(lookup_key) = sub_schema.id.as_ref().or(sub_schema.r#ref.as_ref())
&& let Some(schema_bound) = sub_schema.id.as_ref().or(sub_schema.r#ref.as_ref())
{
let base_type_name = lookup_key.split('.').next_back().unwrap_or("").to_string();
if let Some(type_def) = self.db.types.get(&base_type_name)
let physical_type_name = schema_bound
.split('.')
.next_back()
.unwrap_or("")
.to_string();
if let Some(type_def) = self.db.types.get(&physical_type_name)
&& let Some(instance_type) = child_instance.as_str()
&& type_def.variations.contains(instance_type)
{

View File

@ -1 +1 @@
1.0.79
1.0.82