Compare commits

...

4 Commits

Author SHA1 Message Date
3a4c53dc7d version: 1.0.125 2026-04-17 07:43:30 -04:00
69bd726b25 more filter fixes 2026-04-17 07:43:19 -04:00
c2267b68d8 version: 1.0.124 2026-04-17 05:53:54 -04:00
f58d1a32a3 full database extracton 2026-04-17 05:53:44 -04:00
12 changed files with 259 additions and 102 deletions

View File

@ -94,19 +94,30 @@
{ {
"id": "type3", "id": "type3",
"type": "type", "type": "type",
"name": "filter", "name": "search",
"module": "core", "module": "core",
"source": "filter", "source": "search",
"hierarchy": [ "hierarchy": [
"filter" "search"
], ],
"variations": [ "variations": [
"filter", "search"
"string.condition",
"integer.condition",
"date.condition"
], ],
"schemas": { "schemas": {
"search": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"filter": {
"type": "filter"
}
}
},
"filter": {
"type": "object"
},
"condition": { "condition": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -161,7 +172,7 @@
"schemas": { "schemas": {
"person": {}, "person": {},
"person.filter": { "person.filter": {
"type": "object", "type": "filter",
"compiledPropertyNames": [ "compiledPropertyNames": [
"age", "age",
"billing_address", "billing_address",
@ -197,7 +208,7 @@
}, },
"address": {}, "address": {},
"address.filter": { "address.filter": {
"type": "object", "type": "filter",
"compiledPropertyNames": [ "compiledPropertyNames": [
"city" "city"
], ],
@ -210,10 +221,33 @@
} }
} }
}, },
"filter": {},
"condition": {}, "condition": {},
"string.condition": {}, "string.condition": {},
"integer.condition": {}, "integer.condition": {},
"date.condition": {} "date.condition": {},
"search": {},
"search.filter": {
"type": "filter",
"compiledPropertyNames": [
"filter",
"name"
],
"properties": {
"filter": {
"type": [
"filter.filter",
"null"
]
},
"name": {
"type": [
"string.condition",
"null"
]
}
}
}
} }
} }
} }

View File

@ -1195,7 +1195,7 @@
"description": "Simple entity select with multiple filters", "description": "Simple entity select with multiple filters",
"action": "query", "action": "query",
"schema_id": "entity", "schema_id": "entity",
"filters": { "filter": {
"id": { "id": {
"$eq": "123e4567-e89b-12d3-a456-426614174000", "$eq": "123e4567-e89b-12d3-a456-426614174000",
"$ne": "123e4567-e89b-12d3-a456-426614174001", "$ne": "123e4567-e89b-12d3-a456-426614174001",
@ -1443,7 +1443,7 @@
"description": "Person select on full schema with filters", "description": "Person select on full schema with filters",
"action": "query", "action": "query",
"schema_id": "full.person", "schema_id": "full.person",
"filters": { "filter": {
"age": { "age": {
"$eq": 30, "$eq": 30,
"$gt": 20, "$gt": 20,

View File

@ -1,6 +1,6 @@
use crate::database::Database;
use crate::database::object::{SchemaObject, SchemaTypeOrArray}; use crate::database::object::{SchemaObject, SchemaTypeOrArray};
use crate::database::schema::Schema; use crate::database::schema::Schema;
use crate::database::Database;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::sync::Arc; use std::sync::Arc;
@ -20,16 +20,26 @@ impl Schema {
let mut child_obj = SchemaObject::default(); let mut child_obj = SchemaObject::default();
child_obj.type_ = Some(SchemaTypeOrArray::Multiple(filter_type)); child_obj.type_ = Some(SchemaTypeOrArray::Multiple(filter_type));
filter_props.insert(key.clone(), Arc::new(Schema { obj: child_obj, always_fail: false })); filter_props.insert(
key.clone(),
Arc::new(Schema {
obj: child_obj,
always_fail: false,
}),
);
} }
} }
if !filter_props.is_empty() { if !filter_props.is_empty() {
let mut wrapper_obj = SchemaObject::default(); let mut wrapper_obj = SchemaObject::default();
wrapper_obj.type_ = Some(SchemaTypeOrArray::Single("object".to_string())); // Conceptually link this directly into the STI lineage of the base `filter` object
wrapper_obj.type_ = Some(SchemaTypeOrArray::Single("filter".to_string()));
wrapper_obj.properties = Some(filter_props); wrapper_obj.properties = Some(filter_props);
return Some(Schema { obj: wrapper_obj, always_fail: false }); return Some(Schema {
obj: wrapper_obj,
always_fail: false,
});
} }
} }
None None

View File

@ -1,6 +1,6 @@
pub mod collection; pub mod collection;
pub mod edges; pub mod edges;
pub mod filters; pub mod filter;
pub mod polymorphism; pub mod polymorphism;
use crate::database::schema::Schema; use crate::database::schema::Schema;

View File

@ -29,12 +29,15 @@ use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use r#type::Type; use r#type::Type;
#[derive(serde::Serialize)]
pub struct Database { pub struct Database {
pub enums: HashMap<String, Enum>, pub enums: HashMap<String, Enum>,
pub types: HashMap<String, Type>, pub types: HashMap<String, Type>,
pub puncs: HashMap<String, Punc>, pub puncs: HashMap<String, Punc>,
pub relations: HashMap<String, Relation>, pub relations: HashMap<String, Relation>,
#[serde(skip)]
pub schemas: HashMap<String, Arc<Schema>>, pub schemas: HashMap<String, Arc<Schema>>,
#[serde(skip)]
pub executor: Box<dyn DatabaseExecutor + Send + Sync>, pub executor: Box<dyn DatabaseExecutor + Send + Sync>,
} }
@ -238,23 +241,30 @@ impl Database {
// Phase 2: Synthesize Composed Filter References // Phase 2: Synthesize Composed Filter References
let mut filter_schemas = Vec::new(); let mut filter_schemas = Vec::new();
for type_def in self.types.values() { for (type_name, type_def) in &self.types {
for (id, schema_arc) in &type_def.schemas { for (id, schema_arc) in &type_def.schemas {
// Only run synthesis on actual structured, table-backed boundaries. Exclude subschemas! // Only run synthesis on actual structured, table-backed boundaries. Exclude subschemas!
let base_name = id.split('.').last().unwrap_or(id); let base_name = id.split('.').last().unwrap_or(id);
let is_table_backed = base_name == type_def.name; let is_table_backed = base_name == type_def.name;
if is_table_backed && !id.contains('/') { if is_table_backed && !id.contains('/') {
if let Some(filter_schema) = schema_arc.compile_filter(self, id, errors) { if let Some(filter_schema) = schema_arc.compile_filter(self, id, errors) {
filter_schemas.push((format!("{}.filter", id), Arc::new(filter_schema))); filter_schemas.push((
type_name.clone(),
format!("{}.filter", id),
Arc::new(filter_schema),
));
} }
} }
} }
} }
let mut filter_ids = Vec::new(); let mut filter_ids = Vec::new();
for (id, filter_arc) in filter_schemas { for (type_name, id, filter_arc) in filter_schemas {
filter_ids.push(id.clone()); filter_ids.push(id.clone());
self.schemas.insert(id, filter_arc); self.schemas.insert(id.clone(), filter_arc.clone());
if let Some(t) = self.types.get_mut(&type_name) {
t.schemas.insert(id, filter_arc);
}
} }
// Now actively compile the newly injected filters to lock all nested compose references natively // Now actively compile the newly injected filters to lock all nested compose references natively
@ -269,50 +279,88 @@ impl Database {
} }
fn collect_schemas(&mut self, errors: &mut Vec<crate::drop::Error>) { fn collect_schemas(&mut self, errors: &mut Vec<crate::drop::Error>) {
let mut to_insert = Vec::new(); let mut type_insert = Vec::new();
let mut punc_insert = Vec::new();
let mut enum_insert = Vec::new();
let mut global_insert = Vec::new();
// Pass 1: Extract all Schemas structurally off top level definitions into the master registry. // Pass 1: Extract all Schemas structurally off top level definitions into the master registry.
// Validate every node recursively via string filters natively! // Validate every node recursively via string filters natively!
for type_def in self.types.values() { for (type_name, type_def) in &self.types {
for (id, schema_arc) in &type_def.schemas { for (id, schema_arc) in &type_def.schemas {
to_insert.push((id.clone(), Arc::clone(schema_arc))); global_insert.push((id.clone(), Arc::clone(schema_arc)));
let mut local_insert = Vec::new();
crate::database::schema::Schema::collect_schemas( crate::database::schema::Schema::collect_schemas(
schema_arc, schema_arc,
id, id,
id.clone(), id.clone(),
&mut to_insert, &mut local_insert,
errors,
);
}
}
for punc_def in self.puncs.values() {
for (id, schema_arc) in &punc_def.schemas {
to_insert.push((id.clone(), Arc::clone(schema_arc)));
crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut to_insert,
errors,
);
}
}
for enum_def in self.enums.values() {
for (id, schema_arc) in &enum_def.schemas {
to_insert.push((id.clone(), Arc::clone(schema_arc)));
crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut to_insert,
errors, errors,
); );
for entry in &local_insert {
type_insert.push((type_name.clone(), entry.0.clone(), Arc::clone(&entry.1)));
global_insert.push((entry.0.clone(), Arc::clone(&entry.1)));
}
} }
} }
for (id, schema_arc) in to_insert { for (punc_name, punc_def) in &self.puncs {
for (id, schema_arc) in &punc_def.schemas {
global_insert.push((id.clone(), Arc::clone(schema_arc)));
let mut local_insert = Vec::new();
crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut local_insert,
errors,
);
for entry in &local_insert {
punc_insert.push((punc_name.clone(), entry.0.clone(), Arc::clone(&entry.1)));
global_insert.push((entry.0.clone(), Arc::clone(&entry.1)));
}
}
}
for (enum_name, enum_def) in &self.enums {
for (id, schema_arc) in &enum_def.schemas {
global_insert.push((id.clone(), Arc::clone(schema_arc)));
let mut local_insert = Vec::new();
crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut local_insert,
errors,
);
for entry in &local_insert {
enum_insert.push((enum_name.clone(), entry.0.clone(), Arc::clone(&entry.1)));
global_insert.push((entry.0.clone(), Arc::clone(&entry.1)));
}
}
}
// Apply global inserts
for (id, schema_arc) in global_insert {
self.schemas.insert(id, schema_arc); self.schemas.insert(id, schema_arc);
} }
// Apply local scopes
for (origin_name, id, schema_arc) in type_insert {
if let Some(t) = self.types.get_mut(&origin_name) {
t.schemas.insert(id, schema_arc);
}
}
for (origin_name, id, schema_arc) in punc_insert {
if let Some(p) = self.puncs.get_mut(&origin_name) {
p.schemas.insert(id, schema_arc);
}
}
for (origin_name, id, schema_arc) in enum_insert {
if let Some(e) = self.enums.get_mut(&origin_name) {
e.schemas.insert(id, schema_arc);
}
}
} }
/// Inspects the Postgres pg_constraint relations catalog to securely identify /// Inspects the Postgres pg_constraint relations catalog to securely identify

View File

@ -15,6 +15,7 @@ pub struct Punc {
pub public: bool, pub public: bool,
pub form: bool, pub form: bool,
pub get: Option<String>, pub get: Option<String>,
pub save: Option<String>,
pub page: Option<Page>, pub page: Option<Page>,
#[serde(default)] #[serde(default)]
pub schemas: std::collections::BTreeMap<String, Arc<Schema>>, pub schemas: std::collections::BTreeMap<String, Arc<Schema>>,

View File

@ -3,6 +3,8 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, Default)] #[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[serde(default)] #[serde(default)]
pub struct Relation { pub struct Relation {
pub id: String,
pub r#type: String,
pub constraint: String, pub constraint: String,
pub source_type: String, pub source_type: String,
pub source_columns: Vec<String>, pub source_columns: Vec<String>,

View File

@ -72,7 +72,7 @@ pub fn jspg_merge(schema_id: &str, data: JsonB) -> JsonB {
} }
#[cfg_attr(not(test), pg_extern)] #[cfg_attr(not(test), pg_extern)]
pub fn jspg_query(schema_id: &str, filters: Option<JsonB>) -> JsonB { pub fn jspg_query(schema_id: &str, filter: Option<JsonB>) -> JsonB {
let engine_opt = { let engine_opt = {
let lock = GLOBAL_JSPG.read().unwrap(); let lock = GLOBAL_JSPG.read().unwrap();
lock.clone() lock.clone()
@ -82,7 +82,7 @@ pub fn jspg_query(schema_id: &str, filters: Option<JsonB>) -> JsonB {
Some(engine) => { Some(engine) => {
let drop = engine let drop = engine
.queryer .queryer
.query(schema_id, filters.as_ref().map(|f| &f.0)); .query(schema_id, filter.as_ref().map(|f| &f.0));
JsonB(serde_json::to_value(drop).unwrap()) JsonB(serde_json::to_value(drop).unwrap())
} }
None => jspg_failure(), None => jspg_failure(),
@ -109,7 +109,7 @@ pub fn jspg_validate(schema_id: &str, instance: JsonB) -> JsonB {
} }
#[cfg_attr(not(test), pg_extern)] #[cfg_attr(not(test), pg_extern)]
pub fn jspg_schemas() -> JsonB { pub fn jspg_database() -> JsonB {
let engine_opt = { let engine_opt = {
let lock = GLOBAL_JSPG.read().unwrap(); let lock = GLOBAL_JSPG.read().unwrap();
lock.clone() lock.clone()
@ -117,9 +117,9 @@ pub fn jspg_schemas() -> JsonB {
match engine_opt { match engine_opt {
Some(engine) => { Some(engine) => {
let schemas_json = serde_json::to_value(&engine.database.schemas) let database_json = serde_json::to_value(&engine.database)
.unwrap_or(serde_json::Value::Object(serde_json::Map::new())); .unwrap_or(serde_json::Value::Object(serde_json::Map::new()));
let drop = crate::drop::Drop::success_with_val(schemas_json); let drop = crate::drop::Drop::success_with_val(database_json);
JsonB(serde_json::to_value(drop).unwrap()) JsonB(serde_json::to_value(drop).unwrap())
} }
None => jspg_failure(), None => jspg_failure(),

View File

@ -21,9 +21,9 @@ impl Queryer {
pub fn query( pub fn query(
&self, &self,
schema_id: &str, schema_id: &str,
filters: Option<&serde_json::Value>, filter: Option<&serde_json::Value>,
) -> crate::drop::Drop { ) -> crate::drop::Drop {
let filters_map = filters.and_then(|f| f.as_object()); let filters_map = filter.and_then(|f| f.as_object());
// 1. Process filters into structured $op keys and linear values // 1. Process filters into structured $op keys and linear values
let (filter_keys, args) = match self.parse_filter_entries(filters_map) { let (filter_keys, args) = match self.parse_filter_entries(filters_map) {
@ -35,7 +35,7 @@ impl Queryer {
details: crate::drop::ErrorDetails { details: crate::drop::ErrorDetails {
path: None, // filters apply to the root query path: None, // filters apply to the root query
cause: Some(msg), cause: Some(msg),
context: filters.cloned(), context: filter.cloned(),
schema: Some(schema_id.to_string()), schema: Some(schema_id.to_string()),
}, },
}]); }]);

View File

@ -81,54 +81,116 @@ fn test_library_api() {
}) })
); );
// 3. Validate jspg_schemas // 3. Validate jspg_database mapping natively!
let schemas_drop = jspg_schemas(); let db_drop = jspg_database();
assert_eq!( assert_eq!(
schemas_drop.0, db_drop.0,
json!({ json!({
"type": "drop", "type": "drop",
"response": { "response": {
"source_schema": { "enums": {},
"type": "object", "puncs": {},
"properties": { "relations": {
"type": { "type": "string" }, "fk_test_target": {
"name": { "type": "string" }, "id": "11111111-1111-1111-1111-111111111111",
"target": { "type": "relation",
"type": "target_schema", "constraint": "fk_test_target",
"compiledPropertyNames": ["value"] "destination_columns": ["id"],
} "destination_type": "target_schema",
}, "prefix": "target",
"required": ["name"], "source_columns": ["target_id"],
"compiledPropertyNames": ["name", "target", "type"], "source_type": "source_schema"
"compiledEdges": {
"target": {
"constraint": "fk_test_target",
"forward": true
}
} }
}, },
"source_schema.filter": { "types": {
"type": "object", "source_schema": {
"properties": { "default_fields": [],
"type": { "type": ["string.condition", "null"] }, "field_types": null,
"name": { "type": ["string.condition", "null"] }, "fields": [],
"target": { "type": ["target_schema.filter", "null"] } "grouped_fields": null,
"hierarchy": ["source_schema", "entity"],
"historical": false,
"id": "",
"longevity": null,
"lookup_fields": [],
"module": "",
"name": "source_schema",
"notify": false,
"null_fields": [],
"ownable": false,
"relationship": false,
"schemas": {
"source_schema": {
"compiledEdges": {
"target": {
"constraint": "fk_test_target",
"forward": true
}
},
"compiledPropertyNames": ["name", "target", "type"],
"properties": {
"name": { "type": "string" },
"target": {
"compiledPropertyNames": ["value"],
"type": "target_schema"
},
"type": { "type": "string" }
},
"required": ["name"],
"type": "object"
},
"source_schema.filter": {
"compiledPropertyNames": ["name", "target", "type"],
"properties": {
"name": { "type": ["string.condition", "null"] },
"target": { "type": ["target_schema.filter", "null"] },
"type": { "type": ["string.condition", "null"] }
},
"type": "filter"
}
},
"sensitive": false,
"source": "",
"type": "",
"variations": ["source_schema"]
}, },
"compiledPropertyNames": ["name", "target", "type"] "target_schema": {
}, "default_fields": [],
"target_schema": { "field_types": null,
"type": "object", "fields": [],
"properties": { "grouped_fields": null,
"value": { "type": "number" } "hierarchy": ["target_schema", "entity"],
}, "historical": false,
"compiledPropertyNames": ["value"] "id": "",
}, "longevity": null,
"target_schema.filter": { "lookup_fields": [],
"type": "object", "module": "",
"properties": { "name": "target_schema",
"value": { "type": ["number.condition", "null"] } "notify": false,
}, "null_fields": [],
"compiledPropertyNames": ["value"] "ownable": false,
"relationship": false,
"schemas": {
"target_schema": {
"compiledPropertyNames": ["value"],
"properties": {
"value": { "type": "number" }
},
"type": "object"
},
"target_schema.filter": {
"compiledPropertyNames": ["value"],
"properties": {
"value": { "type": ["number.condition", "null"] }
},
"type": "filter"
}
},
"sensitive": false,
"source": "",
"type": "",
"variations": ["target_schema"]
}
} }
} }
}) })

View File

@ -17,7 +17,7 @@ pub struct Case {
// For Query // For Query
#[serde(default)] #[serde(default)]
pub filters: Option<serde_json::Value>, pub filter: Option<serde_json::Value>,
// For Merge & Validate // For Merge & Validate
#[serde(default)] #[serde(default)]
@ -122,7 +122,7 @@ impl Case {
use crate::queryer::Queryer; use crate::queryer::Queryer;
let queryer = Queryer::new(db.clone()); let queryer = Queryer::new(db.clone());
let result = queryer.query(&self.schema_id, self.filters.as_ref()); let result = queryer.query(&self.schema_id, self.filter.as_ref());
let return_val = if let Some(expect) = &self.expect { let return_val = if let Some(expect) = &self.expect {
if let Err(e) = expect.assert_drop(&result) { if let Err(e) = expect.assert_drop(&result) {

View File

@ -1 +1 @@
1.0.123 1.0.125