Compare commits

...

4 Commits

Author SHA1 Message Date
3a4c53dc7d version: 1.0.125 2026-04-17 07:43:30 -04:00
69bd726b25 more filter fixes 2026-04-17 07:43:19 -04:00
c2267b68d8 version: 1.0.124 2026-04-17 05:53:54 -04:00
f58d1a32a3 full database extracton 2026-04-17 05:53:44 -04:00
12 changed files with 259 additions and 102 deletions

View File

@ -94,19 +94,30 @@
{
"id": "type3",
"type": "type",
"name": "filter",
"name": "search",
"module": "core",
"source": "filter",
"source": "search",
"hierarchy": [
"filter"
"search"
],
"variations": [
"filter",
"string.condition",
"integer.condition",
"date.condition"
"search"
],
"schemas": {
"search": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"filter": {
"type": "filter"
}
}
},
"filter": {
"type": "object"
},
"condition": {
"type": "object",
"properties": {
@ -161,7 +172,7 @@
"schemas": {
"person": {},
"person.filter": {
"type": "object",
"type": "filter",
"compiledPropertyNames": [
"age",
"billing_address",
@ -197,7 +208,7 @@
},
"address": {},
"address.filter": {
"type": "object",
"type": "filter",
"compiledPropertyNames": [
"city"
],
@ -210,10 +221,33 @@
}
}
},
"filter": {},
"condition": {},
"string.condition": {},
"integer.condition": {},
"date.condition": {}
"date.condition": {},
"search": {},
"search.filter": {
"type": "filter",
"compiledPropertyNames": [
"filter",
"name"
],
"properties": {
"filter": {
"type": [
"filter.filter",
"null"
]
},
"name": {
"type": [
"string.condition",
"null"
]
}
}
}
}
}
}

View File

@ -1195,7 +1195,7 @@
"description": "Simple entity select with multiple filters",
"action": "query",
"schema_id": "entity",
"filters": {
"filter": {
"id": {
"$eq": "123e4567-e89b-12d3-a456-426614174000",
"$ne": "123e4567-e89b-12d3-a456-426614174001",
@ -1443,7 +1443,7 @@
"description": "Person select on full schema with filters",
"action": "query",
"schema_id": "full.person",
"filters": {
"filter": {
"age": {
"$eq": 30,
"$gt": 20,

View File

@ -1,6 +1,6 @@
use crate::database::Database;
use crate::database::object::{SchemaObject, SchemaTypeOrArray};
use crate::database::schema::Schema;
use crate::database::Database;
use std::collections::BTreeMap;
use std::sync::Arc;
@ -20,16 +20,26 @@ impl Schema {
let mut child_obj = SchemaObject::default();
child_obj.type_ = Some(SchemaTypeOrArray::Multiple(filter_type));
filter_props.insert(key.clone(), Arc::new(Schema { obj: child_obj, always_fail: false }));
filter_props.insert(
key.clone(),
Arc::new(Schema {
obj: child_obj,
always_fail: false,
}),
);
}
}
if !filter_props.is_empty() {
let mut wrapper_obj = SchemaObject::default();
wrapper_obj.type_ = Some(SchemaTypeOrArray::Single("object".to_string()));
// Conceptually link this directly into the STI lineage of the base `filter` object
wrapper_obj.type_ = Some(SchemaTypeOrArray::Single("filter".to_string()));
wrapper_obj.properties = Some(filter_props);
return Some(Schema { obj: wrapper_obj, always_fail: false });
return Some(Schema {
obj: wrapper_obj,
always_fail: false,
});
}
}
None

View File

@ -1,6 +1,6 @@
pub mod collection;
pub mod edges;
pub mod filters;
pub mod filter;
pub mod polymorphism;
use crate::database::schema::Schema;

View File

@ -29,12 +29,15 @@ use std::collections::HashMap;
use std::sync::Arc;
use r#type::Type;
#[derive(serde::Serialize)]
pub struct Database {
pub enums: HashMap<String, Enum>,
pub types: HashMap<String, Type>,
pub puncs: HashMap<String, Punc>,
pub relations: HashMap<String, Relation>,
#[serde(skip)]
pub schemas: HashMap<String, Arc<Schema>>,
#[serde(skip)]
pub executor: Box<dyn DatabaseExecutor + Send + Sync>,
}
@ -238,23 +241,30 @@ impl Database {
// Phase 2: Synthesize Composed Filter References
let mut filter_schemas = Vec::new();
for type_def in self.types.values() {
for (type_name, type_def) in &self.types {
for (id, schema_arc) in &type_def.schemas {
// Only run synthesis on actual structured, table-backed boundaries. Exclude subschemas!
let base_name = id.split('.').last().unwrap_or(id);
let is_table_backed = base_name == type_def.name;
if is_table_backed && !id.contains('/') {
if let Some(filter_schema) = schema_arc.compile_filter(self, id, errors) {
filter_schemas.push((format!("{}.filter", id), Arc::new(filter_schema)));
filter_schemas.push((
type_name.clone(),
format!("{}.filter", id),
Arc::new(filter_schema),
));
}
}
}
}
let mut filter_ids = Vec::new();
for (id, filter_arc) in filter_schemas {
for (type_name, id, filter_arc) in filter_schemas {
filter_ids.push(id.clone());
self.schemas.insert(id, filter_arc);
self.schemas.insert(id.clone(), filter_arc.clone());
if let Some(t) = self.types.get_mut(&type_name) {
t.schemas.insert(id, filter_arc);
}
}
// Now actively compile the newly injected filters to lock all nested compose references natively
@ -269,50 +279,88 @@ impl Database {
}
fn collect_schemas(&mut self, errors: &mut Vec<crate::drop::Error>) {
let mut to_insert = Vec::new();
let mut type_insert = Vec::new();
let mut punc_insert = Vec::new();
let mut enum_insert = Vec::new();
let mut global_insert = Vec::new();
// Pass 1: Extract all Schemas structurally off top level definitions into the master registry.
// Validate every node recursively via string filters natively!
for type_def in self.types.values() {
for (type_name, type_def) in &self.types {
for (id, schema_arc) in &type_def.schemas {
to_insert.push((id.clone(), Arc::clone(schema_arc)));
global_insert.push((id.clone(), Arc::clone(schema_arc)));
let mut local_insert = Vec::new();
crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut to_insert,
errors,
);
}
}
for punc_def in self.puncs.values() {
for (id, schema_arc) in &punc_def.schemas {
to_insert.push((id.clone(), Arc::clone(schema_arc)));
crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut to_insert,
errors,
);
}
}
for enum_def in self.enums.values() {
for (id, schema_arc) in &enum_def.schemas {
to_insert.push((id.clone(), Arc::clone(schema_arc)));
crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut to_insert,
&mut local_insert,
errors,
);
for entry in &local_insert {
type_insert.push((type_name.clone(), entry.0.clone(), Arc::clone(&entry.1)));
global_insert.push((entry.0.clone(), Arc::clone(&entry.1)));
}
}
}
for (id, schema_arc) in to_insert {
for (punc_name, punc_def) in &self.puncs {
for (id, schema_arc) in &punc_def.schemas {
global_insert.push((id.clone(), Arc::clone(schema_arc)));
let mut local_insert = Vec::new();
crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut local_insert,
errors,
);
for entry in &local_insert {
punc_insert.push((punc_name.clone(), entry.0.clone(), Arc::clone(&entry.1)));
global_insert.push((entry.0.clone(), Arc::clone(&entry.1)));
}
}
}
for (enum_name, enum_def) in &self.enums {
for (id, schema_arc) in &enum_def.schemas {
global_insert.push((id.clone(), Arc::clone(schema_arc)));
let mut local_insert = Vec::new();
crate::database::schema::Schema::collect_schemas(
schema_arc,
id,
id.clone(),
&mut local_insert,
errors,
);
for entry in &local_insert {
enum_insert.push((enum_name.clone(), entry.0.clone(), Arc::clone(&entry.1)));
global_insert.push((entry.0.clone(), Arc::clone(&entry.1)));
}
}
}
// Apply global inserts
for (id, schema_arc) in global_insert {
self.schemas.insert(id, schema_arc);
}
// Apply local scopes
for (origin_name, id, schema_arc) in type_insert {
if let Some(t) = self.types.get_mut(&origin_name) {
t.schemas.insert(id, schema_arc);
}
}
for (origin_name, id, schema_arc) in punc_insert {
if let Some(p) = self.puncs.get_mut(&origin_name) {
p.schemas.insert(id, schema_arc);
}
}
for (origin_name, id, schema_arc) in enum_insert {
if let Some(e) = self.enums.get_mut(&origin_name) {
e.schemas.insert(id, schema_arc);
}
}
}
/// Inspects the Postgres pg_constraint relations catalog to securely identify

View File

@ -15,6 +15,7 @@ pub struct Punc {
pub public: bool,
pub form: bool,
pub get: Option<String>,
pub save: Option<String>,
pub page: Option<Page>,
#[serde(default)]
pub schemas: std::collections::BTreeMap<String, Arc<Schema>>,

View File

@ -3,6 +3,8 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[serde(default)]
pub struct Relation {
pub id: String,
pub r#type: String,
pub constraint: String,
pub source_type: String,
pub source_columns: Vec<String>,

View File

@ -72,7 +72,7 @@ pub fn jspg_merge(schema_id: &str, data: JsonB) -> JsonB {
}
#[cfg_attr(not(test), pg_extern)]
pub fn jspg_query(schema_id: &str, filters: Option<JsonB>) -> JsonB {
pub fn jspg_query(schema_id: &str, filter: Option<JsonB>) -> JsonB {
let engine_opt = {
let lock = GLOBAL_JSPG.read().unwrap();
lock.clone()
@ -82,7 +82,7 @@ pub fn jspg_query(schema_id: &str, filters: Option<JsonB>) -> JsonB {
Some(engine) => {
let drop = engine
.queryer
.query(schema_id, filters.as_ref().map(|f| &f.0));
.query(schema_id, filter.as_ref().map(|f| &f.0));
JsonB(serde_json::to_value(drop).unwrap())
}
None => jspg_failure(),
@ -109,7 +109,7 @@ pub fn jspg_validate(schema_id: &str, instance: JsonB) -> JsonB {
}
#[cfg_attr(not(test), pg_extern)]
pub fn jspg_schemas() -> JsonB {
pub fn jspg_database() -> JsonB {
let engine_opt = {
let lock = GLOBAL_JSPG.read().unwrap();
lock.clone()
@ -117,9 +117,9 @@ pub fn jspg_schemas() -> JsonB {
match engine_opt {
Some(engine) => {
let schemas_json = serde_json::to_value(&engine.database.schemas)
let database_json = serde_json::to_value(&engine.database)
.unwrap_or(serde_json::Value::Object(serde_json::Map::new()));
let drop = crate::drop::Drop::success_with_val(schemas_json);
let drop = crate::drop::Drop::success_with_val(database_json);
JsonB(serde_json::to_value(drop).unwrap())
}
None => jspg_failure(),

View File

@ -21,9 +21,9 @@ impl Queryer {
pub fn query(
&self,
schema_id: &str,
filters: Option<&serde_json::Value>,
filter: Option<&serde_json::Value>,
) -> crate::drop::Drop {
let filters_map = filters.and_then(|f| f.as_object());
let filters_map = filter.and_then(|f| f.as_object());
// 1. Process filters into structured $op keys and linear values
let (filter_keys, args) = match self.parse_filter_entries(filters_map) {
@ -35,7 +35,7 @@ impl Queryer {
details: crate::drop::ErrorDetails {
path: None, // filters apply to the root query
cause: Some(msg),
context: filters.cloned(),
context: filter.cloned(),
schema: Some(schema_id.to_string()),
},
}]);

View File

@ -81,54 +81,116 @@ fn test_library_api() {
})
);
// 3. Validate jspg_schemas
let schemas_drop = jspg_schemas();
// 3. Validate jspg_database mapping natively!
let db_drop = jspg_database();
assert_eq!(
schemas_drop.0,
db_drop.0,
json!({
"type": "drop",
"response": {
"source_schema": {
"type": "object",
"properties": {
"type": { "type": "string" },
"name": { "type": "string" },
"target": {
"type": "target_schema",
"compiledPropertyNames": ["value"]
}
},
"required": ["name"],
"compiledPropertyNames": ["name", "target", "type"],
"compiledEdges": {
"target": {
"constraint": "fk_test_target",
"forward": true
}
"enums": {},
"puncs": {},
"relations": {
"fk_test_target": {
"id": "11111111-1111-1111-1111-111111111111",
"type": "relation",
"constraint": "fk_test_target",
"destination_columns": ["id"],
"destination_type": "target_schema",
"prefix": "target",
"source_columns": ["target_id"],
"source_type": "source_schema"
}
},
"source_schema.filter": {
"type": "object",
"properties": {
"type": { "type": ["string.condition", "null"] },
"name": { "type": ["string.condition", "null"] },
"target": { "type": ["target_schema.filter", "null"] }
"types": {
"source_schema": {
"default_fields": [],
"field_types": null,
"fields": [],
"grouped_fields": null,
"hierarchy": ["source_schema", "entity"],
"historical": false,
"id": "",
"longevity": null,
"lookup_fields": [],
"module": "",
"name": "source_schema",
"notify": false,
"null_fields": [],
"ownable": false,
"relationship": false,
"schemas": {
"source_schema": {
"compiledEdges": {
"target": {
"constraint": "fk_test_target",
"forward": true
}
},
"compiledPropertyNames": ["name", "target", "type"],
"properties": {
"name": { "type": "string" },
"target": {
"compiledPropertyNames": ["value"],
"type": "target_schema"
},
"type": { "type": "string" }
},
"required": ["name"],
"type": "object"
},
"source_schema.filter": {
"compiledPropertyNames": ["name", "target", "type"],
"properties": {
"name": { "type": ["string.condition", "null"] },
"target": { "type": ["target_schema.filter", "null"] },
"type": { "type": ["string.condition", "null"] }
},
"type": "filter"
}
},
"sensitive": false,
"source": "",
"type": "",
"variations": ["source_schema"]
},
"compiledPropertyNames": ["name", "target", "type"]
},
"target_schema": {
"type": "object",
"properties": {
"value": { "type": "number" }
},
"compiledPropertyNames": ["value"]
},
"target_schema.filter": {
"type": "object",
"properties": {
"value": { "type": ["number.condition", "null"] }
},
"compiledPropertyNames": ["value"]
"target_schema": {
"default_fields": [],
"field_types": null,
"fields": [],
"grouped_fields": null,
"hierarchy": ["target_schema", "entity"],
"historical": false,
"id": "",
"longevity": null,
"lookup_fields": [],
"module": "",
"name": "target_schema",
"notify": false,
"null_fields": [],
"ownable": false,
"relationship": false,
"schemas": {
"target_schema": {
"compiledPropertyNames": ["value"],
"properties": {
"value": { "type": "number" }
},
"type": "object"
},
"target_schema.filter": {
"compiledPropertyNames": ["value"],
"properties": {
"value": { "type": ["number.condition", "null"] }
},
"type": "filter"
}
},
"sensitive": false,
"source": "",
"type": "",
"variations": ["target_schema"]
}
}
}
})

View File

@ -17,7 +17,7 @@ pub struct Case {
// For Query
#[serde(default)]
pub filters: Option<serde_json::Value>,
pub filter: Option<serde_json::Value>,
// For Merge & Validate
#[serde(default)]
@ -122,7 +122,7 @@ impl Case {
use crate::queryer::Queryer;
let queryer = Queryer::new(db.clone());
let result = queryer.query(&self.schema_id, self.filters.as_ref());
let result = queryer.query(&self.schema_id, self.filter.as_ref());
let return_val = if let Some(expect) = &self.expect {
if let Err(e) = expect.assert_drop(&result) {

View File

@ -1 +1 @@
1.0.123
1.0.125