Compare commits
24 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e19e1921e5 | |||
| 94d011e729 | |||
| 263cf04ffb | |||
| 00375c2926 | |||
| 885b9b5e44 | |||
| 298645ffdb | |||
| 330280ba48 | |||
| 02e661d219 | |||
| f7163e2689 | |||
| 091007006d | |||
| 3d66a7fc3c | |||
| e1314496dd | |||
| 70a27b430d | |||
| e078b8a74b | |||
| c2c0e62c2d | |||
| ebb97b3509 | |||
| 5d18847f32 | |||
| 4a33e29628 | |||
| d8fc286e94 | |||
| 507dc6d780 | |||
| e340039a30 | |||
| 08768e3d42 | |||
| 6c9e6575ce | |||
| 5d11c4c92c |
5
.vscode/extensions.json
vendored
Normal file
5
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"rust-lang.rust-analyzer"
|
||||
]
|
||||
}
|
||||
81
Cargo.lock
generated
81
Cargo.lock
generated
@ -55,6 +55,15 @@ version = "1.0.101"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea"
|
||||
|
||||
[[package]]
|
||||
name = "ar_archive_writer"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b"
|
||||
dependencies = [
|
||||
"object",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.89"
|
||||
@ -874,6 +883,7 @@ dependencies = [
|
||||
"regex-syntax",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sqlparser",
|
||||
"url",
|
||||
"uuid",
|
||||
"xxhash-rust",
|
||||
@ -1040,6 +1050,15 @@ dependencies = [
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.37.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.21.3"
|
||||
@ -1377,6 +1396,16 @@ dependencies = [
|
||||
"unarray",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "psm"
|
||||
version = "0.1.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3852766467df634d74f0b2d7819bf8dc483a0eb2e3b0f50f756f9cfe8b0d18d8"
|
||||
dependencies = [
|
||||
"ar_archive_writer",
|
||||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quick-error"
|
||||
version = "1.2.3"
|
||||
@ -1442,6 +1471,26 @@ dependencies = [
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "recursive"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0786a43debb760f491b1bc0269fe5e84155353c67482b9e60d0cfb596054b43e"
|
||||
dependencies = [
|
||||
"recursive-proc-macro-impl",
|
||||
"stacker",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "recursive-proc-macro-impl"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76009fbe0614077fc1a2ce255e3a1881a2e3a3527097d5dc6d8212c585e7e38b"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.18"
|
||||
@ -1669,12 +1718,35 @@ dependencies = [
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlparser"
|
||||
version = "0.61.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dbf5ea8d4d7c808e1af1cbabebca9a2abe603bcefc22294c5b95018d53200cb7"
|
||||
dependencies = [
|
||||
"log",
|
||||
"recursive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
|
||||
|
||||
[[package]]
|
||||
name = "stacker"
|
||||
version = "0.1.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08d74a23609d509411d10e2176dc2a4346e3b4aea2e7b1869f19fdedbc71c013"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"psm",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stringprep"
|
||||
version = "0.1.5"
|
||||
@ -2323,6 +2395,15 @@ dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.59.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.60.2"
|
||||
|
||||
@ -23,6 +23,7 @@ indexmap = { version = "2.13.0", features = ["serde"] }
|
||||
moka = { version = "0.12.14", features = ["sync"] }
|
||||
xxhash-rust = { version = "0.8.15", features = ["xxh64"] }
|
||||
dashmap = "6.1.0"
|
||||
sqlparser = "0.61.0"
|
||||
|
||||
[dev-dependencies]
|
||||
pgrx-tests = "0.16.1"
|
||||
|
||||
@ -43,7 +43,7 @@ JSPG implements specific extensions to the Draft 2020-12 standard to support the
|
||||
#### A. Polymorphism & Referencing (`$ref`, `$family`, and Native Types)
|
||||
* **Native Type Discrimination (`variations`)**: Schemas defined inside a Postgres `type` are Entities. The validator securely and implicitly manages their `"type"` property. If an entity inherits from `user`, incoming JSON can safely define `{"type": "person"}` without errors, thanks to `compiled_variations` inheritance.
|
||||
* **Structural Inheritance & Viral Infection (`$ref`)**: `$ref` is used exclusively for structural inheritance, *never* for union creation. A Punc request schema that `$ref`s an Entity virally inherits all physical database polymorphism rules for that target.
|
||||
* **Shape Polymorphism (`$family`)**: Auto-expands polymorphic API lists based on an abstract Descendants Graph. If `{"$family": "widget"}` is used, JSPG evaluates the JSON against every schema that `$ref`s widget.
|
||||
* **Shape Polymorphism (`$family`)**: Auto-expands polymorphic API lists based on an abstract **Descendants Graph**. If `{"$family": "widget"}` is used, the Validator dynamically identifies *every* schema in the registry that `$ref`s `widget` (e.g., `stock.widget`, `task.widget`) and evaluates the JSON against all of them.
|
||||
* **Strict Matches & Depth Heuristic**: Polymorphic structures MUST match exactly **one** schema permutation. If multiple inherited struct permutations pass, JSPG applies the **Depth Heuristic Tie-Breaker**, selecting the candidate deepest in the inheritance tree.
|
||||
|
||||
#### B. Dot-Notation Schema Resolution & Database Mapping
|
||||
@ -103,6 +103,10 @@ The Queryer transforms Postgres into a pre-compiled Semantic Query Engine via th
|
||||
* **Array Inclusion**: `{"$in": [values]}`, `{"$nin": [values]}` use native `jsonb_array_elements_text()` bindings to enforce `IN` and `NOT IN` logic without runtime SQL injection risks.
|
||||
* **Text Matching (ILIKE)**: Evaluates `$eq` or `$ne` against string fields containing the `%` character natively into Postgres `ILIKE` and `NOT ILIKE` partial substring matches.
|
||||
* **Type Casting**: Safely resolves dynamic combinations by casting values instantly into the physical database types mapped in the schema (e.g. parsing `uuid` bindings to `::uuid`, formatting DateTimes to `::timestamptz`, and numbers to `::numeric`).
|
||||
* **Polymorphic SQL Generation (`$family`)**: Compiles `$family` properties by analyzing the **Physical Database Variations**, *not* the schema descendants.
|
||||
* **The Dot Convention**: When a schema requests `$family: "target.schema"`, the compiler extracts the base type (e.g. `schema`) and looks up its Physical Table definition.
|
||||
* **Multi-Table Branching**: If the Physical Table is a parent to other tables (e.g. `organization` has variations `["organization", "bot", "person"]`), the compiler generates a dynamic `CASE WHEN type = '...' THEN ...` query, expanding into `JOIN`s for each variation.
|
||||
* **Single-Table Bypass**: If the Physical Table is a leaf node with only one variation (e.g. `person` has variations `["person"]`), the compiler cleanly bypasses `CASE` generation and compiles a simple `SELECT` across the base table, as all schema extensions (e.g. `light.person`, `full.person`) are guaranteed to reside in the exact same physical row.
|
||||
|
||||
### The Stem Engine
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -24,7 +24,9 @@ impl DatabaseExecutor for SpiExecutor {
|
||||
}
|
||||
}
|
||||
|
||||
pgrx::PgTryBuilder::new(|| {
|
||||
Spi::connect(|client| {
|
||||
pgrx::notice!("JSPG_SQL: {}", sql);
|
||||
match client.select(sql, Some(args_with_oid.len() as i64), &args_with_oid) {
|
||||
Ok(tup_table) => {
|
||||
let mut results = Vec::new();
|
||||
@ -38,6 +40,12 @@ impl DatabaseExecutor for SpiExecutor {
|
||||
Err(e) => Err(format!("SPI Query Fetch Failure: {}", e)),
|
||||
}
|
||||
})
|
||||
})
|
||||
.catch_others(|cause| {
|
||||
pgrx::warning!("JSPG Caught Native Postgres Error: {:?}", cause);
|
||||
Err(format!("{:?}", cause))
|
||||
})
|
||||
.execute()
|
||||
}
|
||||
|
||||
fn execute(&self, sql: &str, args: Option<&[Value]>) -> Result<(), String> {
|
||||
@ -52,12 +60,20 @@ impl DatabaseExecutor for SpiExecutor {
|
||||
}
|
||||
}
|
||||
|
||||
pgrx::PgTryBuilder::new(|| {
|
||||
Spi::connect_mut(|client| {
|
||||
pgrx::notice!("JSPG_SQL: {}", sql);
|
||||
match client.update(sql, Some(args_with_oid.len() as i64), &args_with_oid) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => Err(format!("SPI Execution Failure: {}", e)),
|
||||
}
|
||||
})
|
||||
})
|
||||
.catch_others(|cause| {
|
||||
pgrx::warning!("JSPG Caught Native Postgres Error: {:?}", cause);
|
||||
Err(format!("{:?}", cause))
|
||||
})
|
||||
.execute()
|
||||
}
|
||||
|
||||
fn auth_user_id(&self) -> Result<String, String> {
|
||||
|
||||
@ -32,7 +32,7 @@ pub struct Database {
|
||||
pub enums: HashMap<String, Enum>,
|
||||
pub types: HashMap<String, Type>,
|
||||
pub puncs: HashMap<String, Punc>,
|
||||
pub relations: HashMap<String, Relation>,
|
||||
pub relations: Vec<Relation>,
|
||||
pub schemas: HashMap<String, Schema>,
|
||||
// Map of Schema ID -> { Entity Type -> Target Subschema Arc }
|
||||
pub stems: HashMap<String, HashMap<String, Arc<Stem>>>,
|
||||
@ -46,7 +46,7 @@ impl Database {
|
||||
let mut db = Self {
|
||||
enums: HashMap::new(),
|
||||
types: HashMap::new(),
|
||||
relations: HashMap::new(),
|
||||
relations: Vec::new(),
|
||||
puncs: HashMap::new(),
|
||||
schemas: HashMap::new(),
|
||||
stems: HashMap::new(),
|
||||
@ -78,7 +78,11 @@ impl Database {
|
||||
for item in arr {
|
||||
match serde_json::from_value::<Relation>(item.clone()) {
|
||||
Ok(def) => {
|
||||
db.relations.insert(def.constraint.clone(), def);
|
||||
if db.types.contains_key(&def.source_type)
|
||||
&& db.types.contains_key(&def.destination_type)
|
||||
{
|
||||
db.relations.push(def);
|
||||
}
|
||||
}
|
||||
Err(e) => println!("DATABASE RELATION PARSE FAILED: {:?}", e),
|
||||
}
|
||||
@ -137,7 +141,6 @@ impl Database {
|
||||
self.executor.timestamp()
|
||||
}
|
||||
|
||||
/// Organizes the graph of the database, compiling regex, format functions, and caching relationships.
|
||||
pub fn compile(&mut self) -> Result<(), crate::drop::Drop> {
|
||||
self.collect_schemas();
|
||||
self.collect_depths();
|
||||
@ -226,6 +229,79 @@ impl Database {
|
||||
self.descendants = descendants;
|
||||
}
|
||||
|
||||
pub fn get_relation(
|
||||
&self,
|
||||
parent_type: &str,
|
||||
child_type: &str,
|
||||
prop_name: &str,
|
||||
relative_keys: Option<&Vec<String>>,
|
||||
) -> Option<(&Relation, bool)> {
|
||||
if parent_type == "entity" && child_type == "entity" {
|
||||
return None; // Ignore entity <-> entity generic fallbacks, they aren't useful edges
|
||||
}
|
||||
|
||||
let p_def = self.types.get(parent_type)?;
|
||||
let c_def = self.types.get(child_type)?;
|
||||
|
||||
let mut matching_rels = Vec::new();
|
||||
let mut directions = Vec::new();
|
||||
|
||||
for rel in &self.relations {
|
||||
let is_forward = p_def.hierarchy.contains(&rel.source_type)
|
||||
&& c_def.hierarchy.contains(&rel.destination_type);
|
||||
let is_reverse = p_def.hierarchy.contains(&rel.destination_type)
|
||||
&& c_def.hierarchy.contains(&rel.source_type);
|
||||
|
||||
if is_forward {
|
||||
matching_rels.push(rel);
|
||||
directions.push(true);
|
||||
} else if is_reverse {
|
||||
matching_rels.push(rel);
|
||||
directions.push(false);
|
||||
}
|
||||
}
|
||||
|
||||
if matching_rels.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if matching_rels.len() == 1 {
|
||||
return Some((matching_rels[0], directions[0]));
|
||||
}
|
||||
|
||||
let mut chosen_idx = 0;
|
||||
let mut resolved = false;
|
||||
|
||||
// Reduce ambiguity with prefix
|
||||
for (i, rel) in matching_rels.iter().enumerate() {
|
||||
if let Some(prefix) = &rel.prefix {
|
||||
if prefix == prop_name {
|
||||
chosen_idx = i;
|
||||
resolved = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reduce ambiguity by checking if relative payload OMITS the prefix (M:M heuristic)
|
||||
if !resolved && relative_keys.is_some() {
|
||||
let keys = relative_keys.unwrap();
|
||||
let mut missing_prefix_ids = Vec::new();
|
||||
for (i, rel) in matching_rels.iter().enumerate() {
|
||||
if let Some(prefix) = &rel.prefix {
|
||||
if !keys.contains(prefix) {
|
||||
missing_prefix_ids.push(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
if missing_prefix_ids.len() == 1 {
|
||||
chosen_idx = missing_prefix_ids[0];
|
||||
}
|
||||
}
|
||||
|
||||
Some((matching_rels[chosen_idx], directions[chosen_idx]))
|
||||
}
|
||||
|
||||
fn collect_descendants_recursively(
|
||||
target: &str,
|
||||
direct_refs: &HashMap<String, Vec<String>>,
|
||||
@ -335,17 +411,14 @@ impl Database {
|
||||
if let (Some(pt), Some(prop)) = (&parent_type, &property_name) {
|
||||
let expected_col = format!("{}_id", prop);
|
||||
let mut found = false;
|
||||
for rel in db.relations.values() {
|
||||
if (rel.source_type == *pt && rel.destination_type == entity_type)
|
||||
|| (rel.source_type == entity_type && rel.destination_type == *pt)
|
||||
{
|
||||
|
||||
if let Some((rel, _)) = db.get_relation(pt, &entity_type, prop, None) {
|
||||
if rel.source_columns.contains(&expected_col) {
|
||||
relation_col = Some(expected_col.clone());
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
relation_col = Some(expected_col);
|
||||
}
|
||||
|
||||
@ -67,6 +67,10 @@ pub struct Error {
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct ErrorDetails {
|
||||
pub path: String,
|
||||
// Extensions can be added here (package, cause, etc)
|
||||
// For now, validator only provides path
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cause: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub context: Option<Value>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub schema: Option<String>,
|
||||
}
|
||||
|
||||
@ -31,6 +31,9 @@ fn jspg_failure() -> JsonB {
|
||||
message: "JSPG extension has not been initialized via jspg_setup".to_string(),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: "".to_string(),
|
||||
cause: None,
|
||||
context: None,
|
||||
schema: None,
|
||||
},
|
||||
};
|
||||
let drop = crate::drop::Drop::with_errors(vec![error]);
|
||||
|
||||
@ -21,21 +21,21 @@ impl Merger {
|
||||
}
|
||||
|
||||
pub fn merge(&self, data: Value) -> crate::drop::Drop {
|
||||
let mut val_resolved = Value::Null;
|
||||
let mut notifications_queue = Vec::new();
|
||||
|
||||
let result = self.merge_internal(data, &mut notifications_queue);
|
||||
let result = self.merge_internal(data.clone(), &mut notifications_queue);
|
||||
|
||||
match result {
|
||||
Ok(val) => {
|
||||
val_resolved = val;
|
||||
}
|
||||
let val_resolved = match result {
|
||||
Ok(val) => val,
|
||||
Err(msg) => {
|
||||
return crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: "MERGE_FAILED".to_string(),
|
||||
message: msg,
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: "".to_string(),
|
||||
cause: None,
|
||||
context: Some(data),
|
||||
schema: None,
|
||||
},
|
||||
}]);
|
||||
}
|
||||
@ -49,6 +49,9 @@ impl Merger {
|
||||
message: format!("Executor Error in pre-ordered notify: {:?}", e),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: "".to_string(),
|
||||
cause: None,
|
||||
context: None,
|
||||
schema: None,
|
||||
},
|
||||
}]);
|
||||
}
|
||||
@ -82,7 +85,11 @@ impl Merger {
|
||||
crate::drop::Drop::success_with_val(stripped_val)
|
||||
}
|
||||
|
||||
pub(crate) fn merge_internal(&self, data: Value, notifications: &mut Vec<String>) -> Result<Value, String> {
|
||||
pub(crate) fn merge_internal(
|
||||
&self,
|
||||
data: Value,
|
||||
notifications: &mut Vec<String>,
|
||||
) -> Result<Value, String> {
|
||||
match data {
|
||||
Value::Array(items) => self.merge_array(items, notifications),
|
||||
Value::Object(map) => self.merge_object(map, notifications),
|
||||
@ -90,7 +97,11 @@ impl Merger {
|
||||
}
|
||||
}
|
||||
|
||||
fn merge_array(&self, items: Vec<Value>, notifications: &mut Vec<String>) -> Result<Value, String> {
|
||||
fn merge_array(
|
||||
&self,
|
||||
items: Vec<Value>,
|
||||
notifications: &mut Vec<String>,
|
||||
) -> Result<Value, String> {
|
||||
let mut resolved_items = Vec::new();
|
||||
for item in items {
|
||||
let resolved = self.merge_internal(item, notifications)?;
|
||||
@ -99,7 +110,11 @@ impl Merger {
|
||||
Ok(Value::Array(resolved_items))
|
||||
}
|
||||
|
||||
fn merge_object(&self, obj: serde_json::Map<String, Value>, notifications: &mut Vec<String>) -> Result<Value, String> {
|
||||
fn merge_object(
|
||||
&self,
|
||||
obj: serde_json::Map<String, Value>,
|
||||
notifications: &mut Vec<String>,
|
||||
) -> Result<Value, String> {
|
||||
let queue_start = notifications.len();
|
||||
|
||||
let type_name = match obj.get("type").and_then(|v| v.as_str()) {
|
||||
@ -158,10 +173,23 @@ impl Merger {
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let relative_relation = self.get_entity_relation(type_def, &relative, &relation_name)?;
|
||||
// Attempt to extract relative object type name
|
||||
let relative_type_name = match relative.get("type").and_then(|v| v.as_str()) {
|
||||
Some(t) => t,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
if let Some(relation) = relative_relation {
|
||||
let parent_is_source = type_def.hierarchy.contains(&relation.source_type);
|
||||
let relative_keys: Vec<String> = relative.keys().cloned().collect();
|
||||
|
||||
// Call central Database O(1) graph logic
|
||||
let relative_relation = self.db.get_relation(
|
||||
&type_def.name,
|
||||
relative_type_name,
|
||||
&relation_name,
|
||||
Some(&relative_keys),
|
||||
);
|
||||
|
||||
if let Some((relation, parent_is_source)) = relative_relation {
|
||||
|
||||
if parent_is_source {
|
||||
// Parent holds FK to Child. Child MUST be generated FIRST.
|
||||
@ -247,9 +275,23 @@ impl Merger {
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let relative_relation = self.get_entity_relation(type_def, first_relative, &relation_name)?;
|
||||
// Attempt to extract relative object type name
|
||||
let relative_type_name = match first_relative.get("type").and_then(|v| v.as_str()) {
|
||||
Some(t) => t,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
if let Some(relation) = relative_relation {
|
||||
let relative_keys: Vec<String> = first_relative.keys().cloned().collect();
|
||||
|
||||
// Call central Database O(1) graph logic
|
||||
let relative_relation = self.db.get_relation(
|
||||
&type_def.name,
|
||||
relative_type_name,
|
||||
&relation_name,
|
||||
Some(&relative_keys),
|
||||
);
|
||||
|
||||
if let Some((relation, _)) = relative_relation {
|
||||
let mut relative_responses = Vec::new();
|
||||
for relative_item_val in relative_arr {
|
||||
if let Value::Object(mut relative_item) = relative_item_val {
|
||||
@ -266,7 +308,8 @@ impl Merger {
|
||||
&entity_fields,
|
||||
);
|
||||
|
||||
let merged_relative = match self.merge_internal(Value::Object(relative_item), notifications)? {
|
||||
let merged_relative =
|
||||
match self.merge_internal(Value::Object(relative_item), notifications)? {
|
||||
Value::Object(m) => m,
|
||||
_ => continue,
|
||||
};
|
||||
@ -760,101 +803,7 @@ impl Merger {
|
||||
changes
|
||||
}
|
||||
|
||||
fn reduce_entity_relations(
|
||||
&self,
|
||||
mut matching_relations: Vec<crate::database::relation::Relation>,
|
||||
relative: &serde_json::Map<String, Value>,
|
||||
relation_name: &str,
|
||||
) -> Result<Option<crate::database::relation::Relation>, String> {
|
||||
if matching_relations.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
if matching_relations.len() == 1 {
|
||||
return Ok(Some(matching_relations.pop().unwrap()));
|
||||
}
|
||||
|
||||
let exact_match: Vec<_> = matching_relations
|
||||
.iter()
|
||||
.filter(|r| r.prefix.as_deref() == Some(relation_name))
|
||||
.cloned()
|
||||
.collect();
|
||||
if exact_match.len() == 1 {
|
||||
return Ok(Some(exact_match.into_iter().next().unwrap()));
|
||||
}
|
||||
|
||||
matching_relations.retain(|r| {
|
||||
if let Some(prefix) = &r.prefix {
|
||||
!relative.contains_key(prefix)
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
if matching_relations.len() == 1 {
|
||||
Ok(Some(matching_relations.pop().unwrap()))
|
||||
} else {
|
||||
let constraints: Vec<_> = matching_relations
|
||||
.iter()
|
||||
.map(|r| r.constraint.clone())
|
||||
.collect();
|
||||
Err(format!(
|
||||
"AMBIGUOUS_TYPE_RELATIONS: Could not reduce ambiguous type relations: {}",
|
||||
constraints.join(", ")
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn get_entity_relation(
|
||||
&self,
|
||||
entity_type: &crate::database::r#type::Type,
|
||||
relative: &serde_json::Map<String, Value>,
|
||||
relation_name: &str,
|
||||
) -> Result<Option<crate::database::relation::Relation>, String> {
|
||||
let relative_type_name = match relative.get("type").and_then(|v| v.as_str()) {
|
||||
Some(t) => t,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let relative_type = match self.db.types.get(relative_type_name) {
|
||||
Some(t) => t,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let mut relative_relations: Vec<crate::database::relation::Relation> = Vec::new();
|
||||
|
||||
for r in self.db.relations.values() {
|
||||
if r.source_type != "entity" && r.destination_type != "entity" {
|
||||
let condition1 = relative_type.hierarchy.contains(&r.source_type)
|
||||
&& entity_type.hierarchy.contains(&r.destination_type);
|
||||
let condition2 = entity_type.hierarchy.contains(&r.source_type)
|
||||
&& relative_type.hierarchy.contains(&r.destination_type);
|
||||
|
||||
if condition1 || condition2 {
|
||||
relative_relations.push(r.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut relative_relation =
|
||||
self.reduce_entity_relations(relative_relations, relative, relation_name)?;
|
||||
|
||||
if relative_relation.is_none() {
|
||||
let mut poly_relations: Vec<crate::database::relation::Relation> = Vec::new();
|
||||
for r in self.db.relations.values() {
|
||||
if r.destination_type == "entity" {
|
||||
let condition1 = relative_type.hierarchy.contains(&r.source_type);
|
||||
let condition2 = entity_type.hierarchy.contains(&r.source_type);
|
||||
|
||||
if condition1 || condition2 {
|
||||
poly_relations.push(r.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
relative_relation = self.reduce_entity_relations(poly_relations, relative, relation_name)?;
|
||||
}
|
||||
|
||||
Ok(relative_relation)
|
||||
}
|
||||
// Helper Functions
|
||||
|
||||
fn apply_entity_relation(
|
||||
source_entity: &mut serde_json::Map<String, Value>,
|
||||
|
||||
@ -1,15 +1,24 @@
|
||||
use crate::database::Database;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct SqlCompiler {
|
||||
pub db: Arc<Database>,
|
||||
pub struct Compiler<'a> {
|
||||
pub db: &'a Database,
|
||||
pub filter_keys: &'a [String],
|
||||
pub is_stem_query: bool,
|
||||
pub alias_counter: usize,
|
||||
}
|
||||
|
||||
impl SqlCompiler {
|
||||
pub fn new(db: Arc<Database>) -> Self {
|
||||
Self { db }
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Node<'a> {
|
||||
pub schema: std::sync::Arc<crate::database::schema::Schema>,
|
||||
pub parent_alias: String,
|
||||
pub parent_type_aliases: Option<std::sync::Arc<std::collections::HashMap<String, String>>>,
|
||||
pub parent_type: Option<&'a crate::database::r#type::Type>,
|
||||
pub property_name: Option<String>,
|
||||
pub depth: usize,
|
||||
pub stem_path: String,
|
||||
}
|
||||
|
||||
impl<'a> Compiler<'a> {
|
||||
/// Compiles a JSON schema into a nested PostgreSQL query returning JSONB
|
||||
pub fn compile(
|
||||
&self,
|
||||
@ -23,11 +32,10 @@ impl SqlCompiler {
|
||||
.get(schema_id)
|
||||
.ok_or_else(|| format!("Schema not found: {}", schema_id))?;
|
||||
|
||||
let resolved_arc;
|
||||
let target_schema = if let Some(path) = stem_path.filter(|p| !p.is_empty() && *p != "/") {
|
||||
if let Some(stems_map) = self.db.stems.get(schema_id) {
|
||||
if let Some(stem) = stems_map.get(path) {
|
||||
resolved_arc = stem.schema.clone();
|
||||
stem.schema.clone()
|
||||
} else {
|
||||
return Err(format!(
|
||||
"Stem entity type '{}' not found in schema '{}'",
|
||||
@ -40,64 +48,67 @@ impl SqlCompiler {
|
||||
path, schema_id
|
||||
));
|
||||
}
|
||||
resolved_arc.as_ref()
|
||||
} else {
|
||||
schema
|
||||
std::sync::Arc::new(schema.clone())
|
||||
};
|
||||
|
||||
// We expect the top level to typically be an Object or Array
|
||||
let is_stem_query = stem_path.is_some();
|
||||
let (sql, _) = self.walk_schema(target_schema, "t1", None, filter_keys, is_stem_query, 0, String::new())?;
|
||||
|
||||
let mut compiler = Compiler {
|
||||
db: &self.db,
|
||||
filter_keys,
|
||||
is_stem_query,
|
||||
alias_counter: 0,
|
||||
};
|
||||
|
||||
let node = Node {
|
||||
schema: target_schema,
|
||||
parent_alias: "t1".to_string(),
|
||||
parent_type_aliases: None,
|
||||
parent_type: None,
|
||||
property_name: None,
|
||||
depth: 0,
|
||||
stem_path: String::new(),
|
||||
};
|
||||
|
||||
let (sql, _) = compiler.compile_node(node)?;
|
||||
Ok(sql)
|
||||
}
|
||||
|
||||
/// Recursively walks the schema AST emitting native PostgreSQL jsonb mapping
|
||||
/// Returns a tuple of (SQL_String, Field_Type)
|
||||
fn walk_schema(
|
||||
&self,
|
||||
schema: &crate::database::schema::Schema,
|
||||
parent_alias: &str,
|
||||
prop_name_context: Option<&str>,
|
||||
filter_keys: &[String],
|
||||
is_stem_query: bool,
|
||||
depth: usize,
|
||||
current_path: String,
|
||||
) -> Result<(String, String), String> {
|
||||
fn compile_node(&mut self, node: Node<'a>) -> Result<(String, String), String> {
|
||||
// Determine the base schema type (could be an array, object, or literal)
|
||||
match &schema.obj.type_ {
|
||||
match &node.schema.obj.type_ {
|
||||
Some(crate::database::schema::SchemaTypeOrArray::Single(t)) if t == "array" => {
|
||||
// Handle Arrays:
|
||||
if let Some(items) = &schema.obj.items {
|
||||
let next_path = if current_path.is_empty() {
|
||||
self.compile_array(node)
|
||||
}
|
||||
_ => self.compile_reference(node),
|
||||
}
|
||||
}
|
||||
|
||||
fn compile_array(&mut self, node: Node<'a>) -> Result<(String, String), String> {
|
||||
if let Some(items) = &node.schema.obj.items {
|
||||
let next_path = if node.stem_path.is_empty() {
|
||||
String::from("#")
|
||||
} else {
|
||||
format!("{}.#", current_path)
|
||||
format!("{}.#", node.stem_path)
|
||||
};
|
||||
|
||||
if let Some(ref_id) = &items.obj.r#ref {
|
||||
if let Some(type_def) = self.db.types.get(ref_id) {
|
||||
return self.compile_entity_node(
|
||||
items,
|
||||
type_def,
|
||||
parent_alias,
|
||||
prop_name_context,
|
||||
true,
|
||||
filter_keys,
|
||||
is_stem_query,
|
||||
depth,
|
||||
next_path,
|
||||
);
|
||||
let mut entity_noke = node.clone();
|
||||
entity_noke.stem_path = next_path;
|
||||
entity_noke.schema = std::sync::Arc::clone(items);
|
||||
return self.compile_entity(type_def, entity_noke, true);
|
||||
}
|
||||
}
|
||||
let (item_sql, _) = self.walk_schema(
|
||||
items,
|
||||
parent_alias,
|
||||
prop_name_context,
|
||||
filter_keys,
|
||||
is_stem_query,
|
||||
depth + 1,
|
||||
next_path,
|
||||
)?;
|
||||
|
||||
let mut next_node = node.clone();
|
||||
next_node.depth += 1;
|
||||
next_node.stem_path = next_path;
|
||||
next_node.schema = std::sync::Arc::clone(items);
|
||||
let (item_sql, _) = self.compile_node(next_node)?;
|
||||
return Ok((
|
||||
format!("(SELECT jsonb_agg({}) FROM TODO)", item_sql),
|
||||
"array".to_string(),
|
||||
@ -109,122 +120,104 @@ impl SqlCompiler {
|
||||
"array".to_string(),
|
||||
))
|
||||
}
|
||||
_ => {
|
||||
|
||||
fn compile_reference(&mut self, node: Node<'a>) -> Result<(String, String), String> {
|
||||
// Determine if this schema represents a Database Entity
|
||||
let mut resolved_type = None;
|
||||
|
||||
// Target is generally a specific schema (e.g. 'base.person'), but it tells us what physical
|
||||
// database table hierarchy it maps to via the `schema.id` prefix/suffix convention.
|
||||
if let Some(lookup_key) = schema.obj.id.as_ref().or(schema.obj.r#ref.as_ref()) {
|
||||
if let Some(family_target) = node.schema.obj.family.as_ref() {
|
||||
resolved_type = self.db.types.get(family_target);
|
||||
} else if let Some(lookup_key) = node
|
||||
.schema
|
||||
.obj
|
||||
.id
|
||||
.as_ref()
|
||||
.or(node.schema.obj.r#ref.as_ref())
|
||||
{
|
||||
let base_type_name = lookup_key.split('.').next_back().unwrap_or("").to_string();
|
||||
resolved_type = self.db.types.get(&base_type_name);
|
||||
}
|
||||
|
||||
if let Some(type_def) = resolved_type {
|
||||
return self.compile_entity_node(
|
||||
schema,
|
||||
type_def,
|
||||
parent_alias,
|
||||
prop_name_context,
|
||||
false,
|
||||
filter_keys,
|
||||
is_stem_query,
|
||||
depth,
|
||||
current_path,
|
||||
);
|
||||
return self.compile_entity(type_def, node.clone(), false);
|
||||
}
|
||||
|
||||
// Handle Direct Refs
|
||||
if let Some(ref_id) = &schema.obj.r#ref {
|
||||
if let Some(ref_id) = &node.schema.obj.r#ref {
|
||||
// If it's just an ad-hoc struct ref, we should resolve it
|
||||
if let Some(target_schema) = self.db.schemas.get(ref_id) {
|
||||
return self.walk_schema(
|
||||
target_schema,
|
||||
parent_alias,
|
||||
prop_name_context,
|
||||
filter_keys,
|
||||
is_stem_query,
|
||||
depth,
|
||||
current_path,
|
||||
);
|
||||
let mut ref_node = node.clone();
|
||||
ref_node.schema = std::sync::Arc::new(target_schema.clone());
|
||||
return self.compile_node(ref_node);
|
||||
}
|
||||
return Err(format!("Unresolved $ref: {}", ref_id));
|
||||
}
|
||||
// Handle $family Polymorphism fallbacks for relations
|
||||
if let Some(family_target) = &node.schema.obj.family {
|
||||
let base_type_name = family_target
|
||||
.split('.')
|
||||
.next_back()
|
||||
.unwrap_or(family_target)
|
||||
.to_string();
|
||||
|
||||
if let Some(type_def) = self.db.types.get(&base_type_name) {
|
||||
if type_def.variations.len() == 1 {
|
||||
let mut bypass_schema = crate::database::schema::Schema::default();
|
||||
bypass_schema.obj.r#ref = Some(family_target.clone());
|
||||
let mut bypass_node = node.clone();
|
||||
bypass_node.schema = std::sync::Arc::new(bypass_schema);
|
||||
return self.compile_node(bypass_node);
|
||||
}
|
||||
|
||||
let mut sorted_variations: Vec<String> = type_def.variations.iter().cloned().collect();
|
||||
sorted_variations.sort();
|
||||
|
||||
let mut family_schemas = Vec::new();
|
||||
for variation in &sorted_variations {
|
||||
let mut ref_schema = crate::database::schema::Schema::default();
|
||||
ref_schema.obj.r#ref = Some(variation.clone());
|
||||
family_schemas.push(std::sync::Arc::new(ref_schema));
|
||||
}
|
||||
|
||||
return self.compile_one_of(&family_schemas, node);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle oneOf Polymorphism fallbacks for relations
|
||||
if let Some(one_of) = &node.schema.obj.one_of {
|
||||
return self.compile_one_of(one_of, node.clone());
|
||||
}
|
||||
|
||||
// Just an inline object definition?
|
||||
if let Some(props) = &schema.obj.properties {
|
||||
return self.compile_inline_object(
|
||||
props,
|
||||
parent_alias,
|
||||
filter_keys,
|
||||
is_stem_query,
|
||||
depth,
|
||||
current_path,
|
||||
);
|
||||
if let Some(props) = &node.schema.obj.properties {
|
||||
return self.compile_object(props, node.clone());
|
||||
}
|
||||
|
||||
// Literal fallback
|
||||
Ok((
|
||||
format!(
|
||||
"{}.{}",
|
||||
parent_alias,
|
||||
prop_name_context.unwrap_or("unknown_prop")
|
||||
node.parent_alias,
|
||||
node.property_name.as_deref().unwrap_or("unknown_prop")
|
||||
),
|
||||
"string".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_merged_properties(
|
||||
&self,
|
||||
schema: &crate::database::schema::Schema,
|
||||
) -> std::collections::BTreeMap<String, Arc<crate::database::schema::Schema>> {
|
||||
let mut props = std::collections::BTreeMap::new();
|
||||
|
||||
if let Some(ref_id) = &schema.obj.r#ref {
|
||||
if let Some(parent_schema) = self.db.schemas.get(ref_id) {
|
||||
props.extend(self.get_merged_properties(parent_schema));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(local_props) = &schema.obj.properties {
|
||||
for (k, v) in local_props {
|
||||
props.insert(k.clone(), v.clone());
|
||||
}
|
||||
}
|
||||
|
||||
props
|
||||
}
|
||||
|
||||
fn compile_entity_node(
|
||||
&self,
|
||||
schema: &crate::database::schema::Schema,
|
||||
type_def: &crate::database::r#type::Type,
|
||||
parent_alias: &str,
|
||||
prop_name: Option<&str>,
|
||||
fn compile_entity(
|
||||
&mut self,
|
||||
r#type: &'a crate::database::r#type::Type,
|
||||
node: Node<'a>,
|
||||
is_array: bool,
|
||||
filter_keys: &[String],
|
||||
is_stem_query: bool,
|
||||
depth: usize,
|
||||
current_path: String,
|
||||
) -> Result<(String, String), String> {
|
||||
let local_ctx = format!("{}_{}", parent_alias, prop_name.unwrap_or("obj"));
|
||||
|
||||
// 1. Build FROM clauses and table aliases
|
||||
let (table_aliases, from_clauses) = self.build_hierarchy_from_clauses(type_def, &local_ctx);
|
||||
let (table_aliases, from_clauses) = self.compile_from_clause(r#type);
|
||||
|
||||
// 2. Map properties and build jsonb_build_object args
|
||||
let select_args = self.map_properties_to_aliases(
|
||||
schema,
|
||||
type_def,
|
||||
&table_aliases,
|
||||
parent_alias,
|
||||
filter_keys,
|
||||
is_stem_query,
|
||||
depth,
|
||||
¤t_path,
|
||||
)?;
|
||||
let mut select_args = self.compile_select_clause(r#type, &table_aliases, node.clone())?;
|
||||
|
||||
// 2.5 Inject polymorphism directly into the query object
|
||||
let mut poly_args = self.compile_polymorphism_select(r#type, &table_aliases, node.clone())?;
|
||||
select_args.append(&mut poly_args);
|
||||
|
||||
let jsonb_obj_sql = if select_args.is_empty() {
|
||||
"jsonb_build_object()".to_string()
|
||||
@ -233,15 +226,7 @@ impl SqlCompiler {
|
||||
};
|
||||
|
||||
// 3. Build WHERE clauses
|
||||
let where_clauses = self.build_filter_where_clauses(
|
||||
schema,
|
||||
type_def,
|
||||
&table_aliases,
|
||||
parent_alias,
|
||||
prop_name,
|
||||
filter_keys,
|
||||
¤t_path,
|
||||
)?;
|
||||
let where_clauses = self.compile_where_clause(r#type, &table_aliases, node)?;
|
||||
|
||||
let selection = if is_array {
|
||||
format!("COALESCE(jsonb_agg({}), '[]'::jsonb)", jsonb_obj_sql)
|
||||
@ -266,22 +251,161 @@ impl SqlCompiler {
|
||||
))
|
||||
}
|
||||
|
||||
fn build_hierarchy_from_clauses(
|
||||
&self,
|
||||
type_def: &crate::database::r#type::Type,
|
||||
local_ctx: &str,
|
||||
fn compile_polymorphism_select(
|
||||
&mut self,
|
||||
r#type: &'a crate::database::r#type::Type,
|
||||
table_aliases: &std::collections::HashMap<String, String>,
|
||||
node: Node<'a>,
|
||||
) -> Result<Vec<String>, String> {
|
||||
let mut select_args = Vec::new();
|
||||
|
||||
if let Some(family_target) = node.schema.obj.family.as_ref() {
|
||||
let base_type_name = family_target
|
||||
.split('.')
|
||||
.next_back()
|
||||
.unwrap_or(family_target)
|
||||
.to_string();
|
||||
|
||||
if let Some(fam_type_def) = self.db.types.get(&base_type_name) {
|
||||
if fam_type_def.variations.len() == 1 {
|
||||
let mut bypass_schema = crate::database::schema::Schema::default();
|
||||
bypass_schema.obj.r#ref = Some(family_target.clone());
|
||||
|
||||
let mut bypass_node = node.clone();
|
||||
bypass_node.schema = std::sync::Arc::new(bypass_schema);
|
||||
|
||||
let mut bypassed_args =
|
||||
self.compile_select_clause(r#type, table_aliases, bypass_node)?;
|
||||
select_args.append(&mut bypassed_args);
|
||||
} else {
|
||||
let mut family_schemas = Vec::new();
|
||||
let mut sorted_fam_variations: Vec<String> =
|
||||
fam_type_def.variations.iter().cloned().collect();
|
||||
sorted_fam_variations.sort();
|
||||
|
||||
for variation in &sorted_fam_variations {
|
||||
let mut ref_schema = crate::database::schema::Schema::default();
|
||||
ref_schema.obj.r#ref = Some(variation.clone());
|
||||
family_schemas.push(std::sync::Arc::new(ref_schema));
|
||||
}
|
||||
|
||||
let base_alias = table_aliases
|
||||
.get(&r#type.name)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| node.parent_alias.to_string());
|
||||
select_args.push(format!("'id', {}.id", base_alias));
|
||||
let mut case_node = node.clone();
|
||||
case_node.parent_alias = base_alias.clone();
|
||||
let arc_aliases = std::sync::Arc::new(table_aliases.clone());
|
||||
case_node.parent_type_aliases = Some(arc_aliases);
|
||||
|
||||
let (case_sql, _) = self.compile_one_of(&family_schemas, case_node)?;
|
||||
select_args.push(format!("'type', {}", case_sql));
|
||||
}
|
||||
}
|
||||
} else if let Some(one_of) = &node.schema.obj.one_of {
|
||||
let base_alias = table_aliases
|
||||
.get(&r#type.name)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| node.parent_alias.to_string());
|
||||
select_args.push(format!("'id', {}.id", base_alias));
|
||||
let mut case_node = node.clone();
|
||||
case_node.parent_alias = base_alias.clone();
|
||||
let arc_aliases = std::sync::Arc::new(table_aliases.clone());
|
||||
case_node.parent_type_aliases = Some(arc_aliases);
|
||||
|
||||
let (case_sql, _) = self.compile_one_of(one_of, case_node)?;
|
||||
select_args.push(format!("'type', {}", case_sql));
|
||||
}
|
||||
|
||||
Ok(select_args)
|
||||
}
|
||||
|
||||
fn compile_object(
|
||||
&mut self,
|
||||
props: &std::collections::BTreeMap<String, std::sync::Arc<crate::database::schema::Schema>>,
|
||||
node: Node<'a>,
|
||||
) -> Result<(String, String), String> {
|
||||
let mut build_args = Vec::new();
|
||||
for (k, v) in props {
|
||||
let next_path = if node.stem_path.is_empty() {
|
||||
k.clone()
|
||||
} else {
|
||||
format!("{}.{}", node.stem_path, k)
|
||||
};
|
||||
|
||||
let mut child_node = node.clone();
|
||||
child_node.property_name = Some(k.clone());
|
||||
child_node.depth += 1;
|
||||
child_node.stem_path = next_path;
|
||||
child_node.schema = std::sync::Arc::clone(v);
|
||||
|
||||
let (child_sql, val_type) = self.compile_node(child_node)?;
|
||||
if val_type == "abort" {
|
||||
continue;
|
||||
}
|
||||
build_args.push(format!("'{}', {}", k, child_sql));
|
||||
}
|
||||
let combined = format!("jsonb_build_object({})", build_args.join(", "));
|
||||
Ok((combined, "object".to_string()))
|
||||
}
|
||||
|
||||
fn compile_one_of(
|
||||
&mut self,
|
||||
schemas: &[Arc<crate::database::schema::Schema>],
|
||||
node: Node<'a>,
|
||||
) -> Result<(String, String), String> {
|
||||
let mut case_statements = Vec::new();
|
||||
let type_col = if let Some(prop) = &node.property_name {
|
||||
format!("{}_type", prop)
|
||||
} else {
|
||||
"type".to_string()
|
||||
};
|
||||
|
||||
for option_schema in schemas {
|
||||
if let Some(ref_id) = &option_schema.obj.r#ref {
|
||||
// Find the physical type this ref maps to
|
||||
let base_type_name = ref_id.split('.').next_back().unwrap_or("").to_string();
|
||||
|
||||
// Generate the nested SQL for this specific target type
|
||||
let mut child_node = node.clone();
|
||||
child_node.schema = std::sync::Arc::clone(option_schema);
|
||||
let (val_sql, _) = self.compile_node(child_node)?;
|
||||
|
||||
case_statements.push(format!(
|
||||
"WHEN {}.{} = '{}' THEN ({})",
|
||||
node.parent_alias, type_col, base_type_name, val_sql
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if case_statements.is_empty() {
|
||||
return Ok(("NULL".to_string(), "string".to_string()));
|
||||
}
|
||||
|
||||
case_statements.sort();
|
||||
|
||||
let sql = format!("CASE {} ELSE NULL END", case_statements.join(" "));
|
||||
|
||||
Ok((sql, "object".to_string()))
|
||||
}
|
||||
|
||||
fn compile_from_clause(
|
||||
&mut self,
|
||||
r#type: &crate::database::r#type::Type,
|
||||
) -> (std::collections::HashMap<String, String>, Vec<String>) {
|
||||
let mut table_aliases = std::collections::HashMap::new();
|
||||
let mut from_clauses = Vec::new();
|
||||
|
||||
for (i, table_name) in type_def.hierarchy.iter().enumerate() {
|
||||
let alias = format!("{}_t{}", local_ctx, i + 1);
|
||||
for (i, table_name) in r#type.hierarchy.iter().enumerate() {
|
||||
self.alias_counter += 1;
|
||||
let alias = format!("{}_{}", table_name, self.alias_counter);
|
||||
table_aliases.insert(table_name.clone(), alias.clone());
|
||||
|
||||
if i == 0 {
|
||||
from_clauses.push(format!("agreego.{} {}", table_name, alias));
|
||||
} else {
|
||||
let prev_alias = format!("{}_t{}", local_ctx, i);
|
||||
let prev_alias = format!("{}_{}", r#type.hierarchy[i - 1], self.alias_counter - 1);
|
||||
from_clauses.push(format!(
|
||||
"JOIN agreego.{} {} ON {}.id = {}.id",
|
||||
table_name, alias, alias, prev_alias
|
||||
@ -291,26 +415,48 @@ impl SqlCompiler {
|
||||
(table_aliases, from_clauses)
|
||||
}
|
||||
|
||||
fn map_properties_to_aliases(
|
||||
&self,
|
||||
schema: &crate::database::schema::Schema,
|
||||
type_def: &crate::database::r#type::Type,
|
||||
fn compile_select_clause(
|
||||
&mut self,
|
||||
r#type: &'a crate::database::r#type::Type,
|
||||
table_aliases: &std::collections::HashMap<String, String>,
|
||||
parent_alias: &str,
|
||||
filter_keys: &[String],
|
||||
is_stem_query: bool,
|
||||
depth: usize,
|
||||
current_path: &str,
|
||||
node: Node<'a>,
|
||||
) -> Result<Vec<String>, String> {
|
||||
let mut select_args = Vec::new();
|
||||
let grouped_fields = type_def.grouped_fields.as_ref().and_then(|v| v.as_object());
|
||||
let merged_props = self.get_merged_properties(schema);
|
||||
let grouped_fields = r#type.grouped_fields.as_ref().and_then(|v| v.as_object());
|
||||
let merged_props = self.get_merged_properties(node.schema.as_ref());
|
||||
let mut sorted_keys: Vec<&String> = merged_props.keys().collect();
|
||||
sorted_keys.sort();
|
||||
|
||||
for prop_key in sorted_keys {
|
||||
let prop_schema = &merged_props[prop_key];
|
||||
|
||||
let is_object_or_array = match &prop_schema.obj.type_ {
|
||||
Some(crate::database::schema::SchemaTypeOrArray::Single(s)) => {
|
||||
s == "object" || s == "array"
|
||||
}
|
||||
Some(crate::database::schema::SchemaTypeOrArray::Multiple(v)) => {
|
||||
v.contains(&"object".to_string()) || v.contains(&"array".to_string())
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let is_primitive = prop_schema.obj.r#ref.is_none()
|
||||
&& !is_object_or_array
|
||||
&& prop_schema.obj.family.is_none()
|
||||
&& prop_schema.obj.one_of.is_none();
|
||||
|
||||
if is_primitive {
|
||||
if let Some(ft) = r#type.field_types.as_ref().and_then(|v| v.as_object()) {
|
||||
if !ft.contains_key(prop_key) {
|
||||
continue; // Skip frontend virtual properties missing from physical table fields
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (prop_key, prop_schema) in &merged_props {
|
||||
let mut owner_alias = table_aliases
|
||||
.get("entity")
|
||||
.cloned()
|
||||
.unwrap_or_else(|| format!("{}_t_err", parent_alias));
|
||||
.unwrap_or_else(|| format!("{}_t_err", node.parent_alias));
|
||||
|
||||
if let Some(gf) = grouped_fields {
|
||||
for (t_name, fields_val) in gf {
|
||||
@ -319,28 +465,30 @@ impl SqlCompiler {
|
||||
owner_alias = table_aliases
|
||||
.get(t_name)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| parent_alias.to_string());
|
||||
.unwrap_or_else(|| node.parent_alias.to_string());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let next_path = if current_path.is_empty() {
|
||||
let mut child_node = node.clone();
|
||||
child_node.parent_alias = owner_alias.clone();
|
||||
let arc_aliases = std::sync::Arc::new(table_aliases.clone());
|
||||
child_node.parent_type_aliases = Some(arc_aliases);
|
||||
child_node.parent_type = Some(r#type);
|
||||
child_node.property_name = Some(prop_key.clone());
|
||||
child_node.depth += 1;
|
||||
let next_path = if node.stem_path.is_empty() {
|
||||
prop_key.clone()
|
||||
} else {
|
||||
format!("{}.{}", current_path, prop_key)
|
||||
format!("{}.{}", node.stem_path, prop_key)
|
||||
};
|
||||
|
||||
let (val_sql, val_type) = self.walk_schema(
|
||||
prop_schema,
|
||||
&owner_alias,
|
||||
Some(prop_key),
|
||||
filter_keys,
|
||||
is_stem_query,
|
||||
depth + 1,
|
||||
next_path,
|
||||
)?;
|
||||
child_node.stem_path = next_path;
|
||||
child_node.schema = std::sync::Arc::clone(prop_schema);
|
||||
|
||||
let (val_sql, val_type) = self.compile_node(child_node)?;
|
||||
|
||||
if val_type != "abort" {
|
||||
select_args.push(format!("'{}', {}", prop_key, val_sql));
|
||||
@ -349,77 +497,65 @@ impl SqlCompiler {
|
||||
Ok(select_args)
|
||||
}
|
||||
|
||||
fn build_filter_where_clauses(
|
||||
fn compile_where_clause(
|
||||
&self,
|
||||
schema: &crate::database::schema::Schema,
|
||||
type_def: &crate::database::r#type::Type,
|
||||
table_aliases: &std::collections::HashMap<String, String>,
|
||||
parent_alias: &str,
|
||||
prop_name: Option<&str>,
|
||||
filter_keys: &[String],
|
||||
current_path: &str,
|
||||
r#type: &'a crate::database::r#type::Type,
|
||||
type_aliases: &std::collections::HashMap<String, String>,
|
||||
node: Node<'a>,
|
||||
) -> Result<Vec<String>, String> {
|
||||
let base_alias = table_aliases
|
||||
.get(&type_def.name)
|
||||
let base_alias = type_aliases
|
||||
.get(&r#type.name)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "err".to_string());
|
||||
|
||||
let entity_alias = type_aliases
|
||||
.get("entity")
|
||||
.cloned()
|
||||
.unwrap_or_else(|| base_alias.clone());
|
||||
|
||||
let mut where_clauses = Vec::new();
|
||||
where_clauses.push(format!("NOT {}.archived", base_alias));
|
||||
where_clauses.push(format!("NOT {}.archived", entity_alias));
|
||||
|
||||
for (i, filter_key) in filter_keys.iter().enumerate() {
|
||||
let mut parts = filter_key.split(':');
|
||||
let full_field_path = parts.next().unwrap_or(filter_key);
|
||||
let op = parts.next().unwrap_or("$eq");
|
||||
self.compile_filter_conditions(r#type, type_aliases, &node, &base_alias, &mut where_clauses);
|
||||
self.compile_relation_conditions(r#type, type_aliases, &node, &base_alias, &mut where_clauses)?;
|
||||
|
||||
let field_name = if current_path.is_empty() {
|
||||
if full_field_path.contains('.') || full_field_path.contains('#') {
|
||||
continue;
|
||||
Ok(where_clauses)
|
||||
}
|
||||
full_field_path
|
||||
} else {
|
||||
let prefix = format!("{}.", current_path);
|
||||
if full_field_path.starts_with(&prefix) {
|
||||
let remainder = &full_field_path[prefix.len()..];
|
||||
if remainder.contains('.') || remainder.contains('#') {
|
||||
continue;
|
||||
}
|
||||
remainder
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let mut filter_alias = base_alias.clone();
|
||||
|
||||
if let Some(gf) = type_def.grouped_fields.as_ref().and_then(|v| v.as_object()) {
|
||||
fn resolve_filter_alias(
|
||||
r#type: &crate::database::r#type::Type,
|
||||
type_aliases: &std::collections::HashMap<String, String>,
|
||||
base_alias: &str,
|
||||
field_name: &str,
|
||||
) -> String {
|
||||
if let Some(gf) = r#type.grouped_fields.as_ref().and_then(|v| v.as_object()) {
|
||||
for (t_name, fields_val) in gf {
|
||||
if let Some(fields_arr) = fields_val.as_array() {
|
||||
if fields_arr.iter().any(|v| v.as_str() == Some(field_name)) {
|
||||
filter_alias = table_aliases
|
||||
.get(t_name)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| base_alias.clone());
|
||||
break;
|
||||
return type_aliases.get(t_name).cloned().unwrap_or_else(|| base_alias.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
base_alias.to_string()
|
||||
}
|
||||
|
||||
fn determine_sql_cast_and_op(
|
||||
r#type: &crate::database::r#type::Type,
|
||||
node: &Node,
|
||||
field_name: &str,
|
||||
) -> (&'static str, bool) {
|
||||
let mut is_ilike = false;
|
||||
let mut cast = "";
|
||||
|
||||
if let Some(field_types) = type_def.field_types.as_ref().and_then(|v| v.as_object()) {
|
||||
if let Some(field_types) = r#type.field_types.as_ref().and_then(|v| v.as_object()) {
|
||||
if let Some(pg_type_val) = field_types.get(field_name) {
|
||||
if let Some(pg_type) = pg_type_val.as_str() {
|
||||
if pg_type == "uuid" {
|
||||
cast = "::uuid";
|
||||
} else if pg_type == "boolean" || pg_type == "bool" {
|
||||
cast = "::boolean";
|
||||
} else if pg_type.contains("timestamp")
|
||||
|| pg_type == "timestamptz"
|
||||
|| pg_type == "date"
|
||||
{
|
||||
} else if pg_type.contains("timestamp") || pg_type == "timestamptz" || pg_type == "date" {
|
||||
cast = "::timestamptz";
|
||||
} else if pg_type == "numeric"
|
||||
|| pg_type.contains("int")
|
||||
@ -429,7 +565,7 @@ impl SqlCompiler {
|
||||
cast = "::numeric";
|
||||
} else if pg_type == "text" || pg_type.contains("char") {
|
||||
let mut is_enum = false;
|
||||
if let Some(props) = &schema.obj.properties {
|
||||
if let Some(props) = &node.schema.obj.properties {
|
||||
if let Some(ps) = props.get(field_name) {
|
||||
is_enum = ps.obj.enum_.is_some();
|
||||
}
|
||||
@ -441,6 +577,42 @@ impl SqlCompiler {
|
||||
}
|
||||
}
|
||||
}
|
||||
(cast, is_ilike)
|
||||
}
|
||||
|
||||
fn compile_filter_conditions(
|
||||
&self,
|
||||
r#type: &crate::database::r#type::Type,
|
||||
type_aliases: &std::collections::HashMap<String, String>,
|
||||
node: &Node,
|
||||
base_alias: &str,
|
||||
where_clauses: &mut Vec<String>,
|
||||
) {
|
||||
for (i, filter_key) in self.filter_keys.iter().enumerate() {
|
||||
let mut parts = filter_key.split(':');
|
||||
let full_field_path = parts.next().unwrap_or(filter_key);
|
||||
let op = parts.next().unwrap_or("$eq");
|
||||
|
||||
let field_name = if node.stem_path.is_empty() {
|
||||
if full_field_path.contains('.') || full_field_path.contains('#') {
|
||||
continue;
|
||||
}
|
||||
full_field_path
|
||||
} else {
|
||||
let prefix = format!("{}.", node.stem_path);
|
||||
if full_field_path.starts_with(&prefix) {
|
||||
let remainder = &full_field_path[prefix.len()..];
|
||||
if remainder.contains('.') || remainder.contains('#') {
|
||||
continue;
|
||||
}
|
||||
remainder
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let filter_alias = Self::resolve_filter_alias(r#type, type_aliases, base_alias, field_name);
|
||||
let (cast, is_ilike) = Self::determine_sql_cast_and_op(r#type, node, field_name);
|
||||
|
||||
let param_index = i + 1;
|
||||
let p_val = format!("${}#>>'{{}}'", param_index);
|
||||
@ -457,31 +629,13 @@ impl SqlCompiler {
|
||||
));
|
||||
} else {
|
||||
let sql_op = match op {
|
||||
"$eq" => {
|
||||
if is_ilike {
|
||||
"ILIKE"
|
||||
} else {
|
||||
"="
|
||||
}
|
||||
}
|
||||
"$ne" => {
|
||||
if is_ilike {
|
||||
"NOT ILIKE"
|
||||
} else {
|
||||
"!="
|
||||
}
|
||||
}
|
||||
"$eq" => if is_ilike { "ILIKE" } else { "=" },
|
||||
"$ne" => if is_ilike { "NOT ILIKE" } else { "!=" },
|
||||
"$gt" => ">",
|
||||
"$gte" => ">=",
|
||||
"$lt" => "<",
|
||||
"$lte" => "<=",
|
||||
_ => {
|
||||
if is_ilike {
|
||||
"ILIKE"
|
||||
} else {
|
||||
"="
|
||||
}
|
||||
}
|
||||
_ => if is_ilike { "ILIKE" } else { "=" },
|
||||
};
|
||||
|
||||
let param_sql = if is_ilike && (op == "$eq" || op == "$ne") {
|
||||
@ -496,46 +650,93 @@ impl SqlCompiler {
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(_prop) = prop_name {
|
||||
where_clauses.push(format!("{}.parent_id = {}.id", base_alias, parent_alias));
|
||||
}
|
||||
|
||||
Ok(where_clauses)
|
||||
}
|
||||
|
||||
fn compile_inline_object(
|
||||
fn compile_relation_conditions(
|
||||
&self,
|
||||
props: &std::collections::BTreeMap<String, std::sync::Arc<crate::database::schema::Schema>>,
|
||||
parent_alias: &str,
|
||||
filter_keys: &[String],
|
||||
is_stem_query: bool,
|
||||
depth: usize,
|
||||
current_path: String,
|
||||
) -> Result<(String, String), String> {
|
||||
let mut build_args = Vec::new();
|
||||
for (k, v) in props {
|
||||
let next_path = if current_path.is_empty() {
|
||||
k.clone()
|
||||
} else {
|
||||
format!("{}.{}", current_path, k)
|
||||
};
|
||||
r#type: &crate::database::r#type::Type,
|
||||
type_aliases: &std::collections::HashMap<String, String>,
|
||||
node: &Node,
|
||||
base_alias: &str,
|
||||
where_clauses: &mut Vec<String>,
|
||||
) -> Result<(), String> {
|
||||
if let Some(prop_ref) = &node.property_name {
|
||||
let prop = prop_ref.as_str();
|
||||
let mut parent_relation_alias = node.parent_alias.clone();
|
||||
let mut child_relation_alias = base_alias.to_string();
|
||||
|
||||
let (child_sql, val_type) = self.walk_schema(
|
||||
v,
|
||||
parent_alias,
|
||||
Some(k),
|
||||
filter_keys,
|
||||
is_stem_query,
|
||||
depth + 1,
|
||||
next_path,
|
||||
)?;
|
||||
if val_type == "abort" {
|
||||
continue;
|
||||
}
|
||||
build_args.push(format!("'{}', {}", k, child_sql));
|
||||
}
|
||||
let combined = format!("jsonb_build_object({})", build_args.join(", "));
|
||||
Ok((combined, "object".to_string()))
|
||||
if let Some(parent_type) = node.parent_type {
|
||||
let merged_props = self.get_merged_properties(node.schema.as_ref());
|
||||
let relative_keys: Vec<String> = merged_props.keys().cloned().collect();
|
||||
|
||||
let (relation, is_parent_source) = self
|
||||
.db
|
||||
.get_relation(&parent_type.name, &r#type.name, prop, Some(&relative_keys))
|
||||
.ok_or_else(|| {
|
||||
format!(
|
||||
"Could not dynamically resolve database relation mapping for {} -> {} on property {}",
|
||||
parent_type.name, r#type.name, prop
|
||||
)
|
||||
})?;
|
||||
|
||||
let source_col = &relation.source_columns[0];
|
||||
let dest_col = &relation.destination_columns[0];
|
||||
|
||||
if let Some(pta) = &node.parent_type_aliases {
|
||||
let p_search_type = if is_parent_source {
|
||||
&relation.source_type
|
||||
} else {
|
||||
&relation.destination_type
|
||||
};
|
||||
if let Some(a) = pta.get(p_search_type) {
|
||||
parent_relation_alias = a.clone();
|
||||
}
|
||||
}
|
||||
|
||||
let c_search_type = if is_parent_source {
|
||||
&relation.destination_type
|
||||
} else {
|
||||
&relation.source_type
|
||||
};
|
||||
if let Some(a) = type_aliases.get(c_search_type) {
|
||||
child_relation_alias = a.clone();
|
||||
}
|
||||
|
||||
let sql_string = if is_parent_source {
|
||||
format!(
|
||||
"{}.{} = {}.{}",
|
||||
parent_relation_alias, source_col, child_relation_alias, dest_col
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"{}.{} = {}.{}",
|
||||
child_relation_alias, source_col, parent_relation_alias, dest_col
|
||||
)
|
||||
};
|
||||
where_clauses.push(sql_string);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_merged_properties(
|
||||
&self,
|
||||
schema: &crate::database::schema::Schema,
|
||||
) -> std::collections::BTreeMap<String, Arc<crate::database::schema::Schema>> {
|
||||
let mut props = std::collections::BTreeMap::new();
|
||||
|
||||
if let Some(ref_id) = &schema.obj.r#ref {
|
||||
if let Some(parent_schema) = self.db.schemas.get(ref_id) {
|
||||
props.extend(self.get_merged_properties(parent_schema));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(local_props) = &schema.obj.properties {
|
||||
for (k, v) in local_props {
|
||||
props.insert(k.clone(), v.clone());
|
||||
}
|
||||
}
|
||||
|
||||
props
|
||||
}
|
||||
}
|
||||
|
||||
@ -32,9 +32,12 @@ impl Queryer {
|
||||
Err(msg) => {
|
||||
return crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: "FILTER_PARSE_FAILED".to_string(),
|
||||
message: msg,
|
||||
message: msg.clone(),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: schema_id.to_string(),
|
||||
path: "".to_string(), // filters apply to the root query
|
||||
cause: Some(msg),
|
||||
context: filters.cloned(),
|
||||
schema: Some(schema_id.to_string()),
|
||||
},
|
||||
}]);
|
||||
}
|
||||
@ -94,7 +97,13 @@ impl Queryer {
|
||||
return Ok(cached_sql.value().clone());
|
||||
}
|
||||
|
||||
let compiler = compiler::SqlCompiler::new(self.db.clone());
|
||||
let compiler = compiler::Compiler {
|
||||
db: &self.db,
|
||||
filter_keys: filter_keys,
|
||||
is_stem_query: stem_opt.is_some(),
|
||||
alias_counter: 0,
|
||||
};
|
||||
|
||||
match compiler.compile(schema_id, stem_opt, filter_keys) {
|
||||
Ok(compiled_sql) => {
|
||||
self
|
||||
@ -104,9 +113,12 @@ impl Queryer {
|
||||
}
|
||||
Err(e) => Err(crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: "QUERY_COMPILATION_FAILED".to_string(),
|
||||
message: e,
|
||||
message: e.clone(),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: schema_id.to_string(),
|
||||
path: "".to_string(),
|
||||
cause: Some(e),
|
||||
context: None,
|
||||
schema: Some(schema_id.to_string()),
|
||||
},
|
||||
}])),
|
||||
}
|
||||
@ -130,14 +142,20 @@ impl Queryer {
|
||||
code: "QUERY_FAILED".to_string(),
|
||||
message: format!("Expected array from generic query, got: {:?}", other),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: schema_id.to_string(),
|
||||
path: "".to_string(),
|
||||
cause: Some(format!("Expected array, got {}", other)),
|
||||
context: Some(serde_json::json!([sql])),
|
||||
schema: Some(schema_id.to_string()),
|
||||
},
|
||||
}]),
|
||||
Err(e) => crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: "QUERY_FAILED".to_string(),
|
||||
message: format!("SPI error in queryer: {}", e),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: schema_id.to_string(),
|
||||
path: "".to_string(),
|
||||
cause: Some(format!("SPI error in queryer: {}", e)),
|
||||
context: Some(serde_json::json!([sql])),
|
||||
schema: Some(schema_id.to_string()),
|
||||
},
|
||||
}]),
|
||||
}
|
||||
|
||||
@ -1469,6 +1469,12 @@ fn test_queryer_0_9() {
|
||||
crate::tests::runner::run_test_case(&path, 0, 9).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_queryer_0_10() {
|
||||
let path = format!("{}/fixtures/queryer.json", env!("CARGO_MANIFEST_DIR"));
|
||||
crate::tests::runner::run_test_case(&path, 0, 10).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_not_0_0() {
|
||||
let path = format!("{}/fixtures/not.json", env!("CARGO_MANIFEST_DIR"));
|
||||
|
||||
@ -1,19 +1,10 @@
|
||||
use crate::tests::types::Suite;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::sync::{Arc, OnceLock, RwLock};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TestSuite {
|
||||
#[allow(dead_code)]
|
||||
pub description: String,
|
||||
pub database: serde_json::Value,
|
||||
pub tests: Vec<TestCase>,
|
||||
}
|
||||
|
||||
use crate::tests::types::TestCase;
|
||||
use serde_json::Value;
|
||||
|
||||
pub fn deserialize_some<'de, D>(deserializer: D) -> Result<Option<Value>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
@ -23,7 +14,7 @@ where
|
||||
}
|
||||
|
||||
// Type alias for easier reading
|
||||
type CompiledSuite = Arc<Vec<(TestSuite, Arc<crate::database::Database>)>>;
|
||||
type CompiledSuite = Arc<Vec<(Suite, Arc<crate::database::Database>)>>;
|
||||
|
||||
// Global cache mapping filename -> Vector of (Parsed JSON suite, Compiled Database)
|
||||
static CACHE: OnceLock<RwLock<HashMap<String, CompiledSuite>>> = OnceLock::new();
|
||||
@ -46,7 +37,7 @@ fn get_cached_file(path: &str) -> CompiledSuite {
|
||||
} else {
|
||||
let content =
|
||||
fs::read_to_string(path).unwrap_or_else(|_| panic!("Failed to read file: {}", path));
|
||||
let suites: Vec<TestSuite> = serde_json::from_str(&content)
|
||||
let suites: Vec<Suite> = serde_json::from_str(&content)
|
||||
.unwrap_or_else(|e| panic!("Failed to parse JSON in {}: {}", path, e));
|
||||
|
||||
let mut compiled_suites = Vec::new();
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
use super::expect::ExpectBlock;
|
||||
use super::expect::Expect;
|
||||
use crate::database::Database;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TestCase {
|
||||
pub struct Case {
|
||||
pub description: String,
|
||||
|
||||
#[serde(default = "default_action")]
|
||||
@ -30,14 +30,14 @@ pub struct TestCase {
|
||||
#[serde(default)]
|
||||
pub mocks: Option<serde_json::Value>,
|
||||
|
||||
pub expect: Option<ExpectBlock>,
|
||||
pub expect: Option<Expect>,
|
||||
}
|
||||
|
||||
fn default_action() -> String {
|
||||
"validate".to_string()
|
||||
}
|
||||
|
||||
impl TestCase {
|
||||
impl Case {
|
||||
pub fn run_compile(&self, db: Arc<Database>) -> Result<(), String> {
|
||||
let expected_success = self.expect.as_ref().map(|e| e.success).unwrap_or(false);
|
||||
|
||||
@ -138,6 +138,7 @@ impl TestCase {
|
||||
))
|
||||
} else if let Some(expect) = &self.expect {
|
||||
let queries = db.executor.get_queries();
|
||||
expect.assert_pattern(&queries)?;
|
||||
expect.assert_sql(&queries)
|
||||
} else {
|
||||
Ok(())
|
||||
@ -176,6 +177,7 @@ impl TestCase {
|
||||
))
|
||||
} else if let Some(expect) = &self.expect {
|
||||
let queries = db.executor.get_queries();
|
||||
expect.assert_pattern(&queries)?;
|
||||
expect.assert_sql(&queries)
|
||||
} else {
|
||||
Ok(())
|
||||
|
||||
22
src/tests/types/expect/mod.rs
Normal file
22
src/tests/types/expect/mod.rs
Normal file
@ -0,0 +1,22 @@
|
||||
pub mod pattern;
|
||||
pub mod sql;
|
||||
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum SqlExpectation {
|
||||
Single(String),
|
||||
Multi(Vec<String>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Expect {
|
||||
pub success: bool,
|
||||
pub result: Option<serde_json::Value>,
|
||||
pub errors: Option<Vec<serde_json::Value>>,
|
||||
pub stems: Option<HashMap<String, HashMap<String, serde_json::Value>>>,
|
||||
#[serde(default)]
|
||||
pub sql: Option<Vec<SqlExpectation>>,
|
||||
}
|
||||
@ -1,30 +1,13 @@
|
||||
use super::Expect;
|
||||
use regex::Regex;
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum SqlExpectation {
|
||||
Single(String),
|
||||
Multi(Vec<String>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ExpectBlock {
|
||||
pub success: bool,
|
||||
pub result: Option<serde_json::Value>,
|
||||
pub errors: Option<Vec<serde_json::Value>>,
|
||||
pub stems: Option<HashMap<String, HashMap<String, serde_json::Value>>>,
|
||||
#[serde(default)]
|
||||
pub sql: Option<Vec<SqlExpectation>>,
|
||||
}
|
||||
|
||||
impl ExpectBlock {
|
||||
impl Expect {
|
||||
/// Advanced SQL execution assertion algorithm ported from `assert.go`.
|
||||
/// This compares two arrays of strings, one containing {{uuid:name}} or {{timestamp}} placeholders,
|
||||
/// and the other containing actual executed database queries. It ensures that placeholder UUIDs
|
||||
/// are consistently mapped to the same actual UUIDs across all lines, and strictly validates line-by-line sequences.
|
||||
pub fn assert_sql(&self, actual: &[String]) -> Result<(), String> {
|
||||
pub fn assert_pattern(&self, actual: &[String]) -> Result<(), String> {
|
||||
let patterns = match &self.sql {
|
||||
Some(s) => s,
|
||||
None => return Ok(()),
|
||||
@ -76,8 +59,8 @@ impl ExpectBlock {
|
||||
let aline = clean_str(aline_raw);
|
||||
|
||||
let pattern_str_raw = match pattern_expect {
|
||||
SqlExpectation::Single(s) => s.clone(),
|
||||
SqlExpectation::Multi(m) => m.join(" "),
|
||||
super::SqlExpectation::Single(s) => s.clone(),
|
||||
super::SqlExpectation::Multi(m) => m.join(" "),
|
||||
};
|
||||
|
||||
let pattern_str = clean_str(&pattern_str_raw);
|
||||
206
src/tests/types/expect/sql.rs
Normal file
206
src/tests/types/expect/sql.rs
Normal file
@ -0,0 +1,206 @@
|
||||
use super::Expect;
|
||||
use sqlparser::ast::{Expr, Query, SelectItem, Statement, TableFactor};
|
||||
use sqlparser::dialect::PostgreSqlDialect;
|
||||
use sqlparser::parser::Parser;
|
||||
use std::collections::HashSet;
|
||||
|
||||
impl Expect {
|
||||
pub fn assert_sql(&self, actual: &[String]) -> Result<(), String> {
|
||||
for query in actual {
|
||||
if let Err(e) = Self::validate_semantic_sql(query) {
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn validate_semantic_sql(sql: &str) -> Result<(), String> {
|
||||
let dialect = PostgreSqlDialect {};
|
||||
let statements = match Parser::parse_sql(&dialect, sql) {
|
||||
Ok(s) => s,
|
||||
Err(e) => return Err(format!("SQL Syntax Error: {}\nSQL: {}", e, sql)),
|
||||
};
|
||||
|
||||
for statement in statements {
|
||||
Self::validate_statement(&statement, sql)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn validate_statement(stmt: &Statement, original_sql: &str) -> Result<(), String> {
|
||||
match stmt {
|
||||
Statement::Query(query) => Self::validate_query(query, &HashSet::new(), original_sql)?,
|
||||
Statement::Insert(insert) => {
|
||||
if let Some(query) = &insert.source {
|
||||
Self::validate_query(query, &HashSet::new(), original_sql)?
|
||||
}
|
||||
}
|
||||
Statement::Update(update) => {
|
||||
if let Some(expr) = &update.selection {
|
||||
Self::validate_expr(expr, &HashSet::new(), original_sql)?;
|
||||
}
|
||||
}
|
||||
Statement::Delete(delete) => {
|
||||
if let Some(expr) = &delete.selection {
|
||||
Self::validate_expr(expr, &HashSet::new(), original_sql)?;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn validate_query(
|
||||
query: &Query,
|
||||
available_aliases: &HashSet<String>,
|
||||
original_sql: &str,
|
||||
) -> Result<(), String> {
|
||||
if let sqlparser::ast::SetExpr::Select(select) = &*query.body {
|
||||
Self::validate_select(&select, available_aliases, original_sql)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn validate_select(
|
||||
select: &sqlparser::ast::Select,
|
||||
parent_aliases: &HashSet<String>,
|
||||
original_sql: &str,
|
||||
) -> Result<(), String> {
|
||||
let mut available_aliases = parent_aliases.clone();
|
||||
|
||||
// 1. Collect all declared table aliases in the FROM clause and JOINs
|
||||
for table_with_joins in &select.from {
|
||||
Self::collect_aliases_from_table_factor(&table_with_joins.relation, &mut available_aliases);
|
||||
for join in &table_with_joins.joins {
|
||||
Self::collect_aliases_from_table_factor(&join.relation, &mut available_aliases);
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Validate all SELECT projection fields
|
||||
for projection in &select.projection {
|
||||
if let SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } = projection {
|
||||
Self::validate_expr(expr, &available_aliases, original_sql)?;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Validate ON conditions in joins
|
||||
for table_with_joins in &select.from {
|
||||
for join in &table_with_joins.joins {
|
||||
if let sqlparser::ast::JoinOperator::Inner(sqlparser::ast::JoinConstraint::On(expr))
|
||||
| sqlparser::ast::JoinOperator::LeftOuter(sqlparser::ast::JoinConstraint::On(expr))
|
||||
| sqlparser::ast::JoinOperator::RightOuter(sqlparser::ast::JoinConstraint::On(expr))
|
||||
| sqlparser::ast::JoinOperator::FullOuter(sqlparser::ast::JoinConstraint::On(expr))
|
||||
| sqlparser::ast::JoinOperator::Join(sqlparser::ast::JoinConstraint::On(expr)) =
|
||||
&join.join_operator
|
||||
{
|
||||
Self::validate_expr(expr, &available_aliases, original_sql)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Validate WHERE conditions
|
||||
if let Some(selection) = &select.selection {
|
||||
Self::validate_expr(selection, &available_aliases, original_sql)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn collect_aliases_from_table_factor(tf: &TableFactor, aliases: &mut HashSet<String>) {
|
||||
match tf {
|
||||
TableFactor::Table { name, alias, .. } => {
|
||||
if let Some(table_alias) = alias {
|
||||
aliases.insert(table_alias.name.value.clone());
|
||||
} else if let Some(last) = name.0.last() {
|
||||
match last {
|
||||
sqlparser::ast::ObjectNamePart::Identifier(i) => {
|
||||
aliases.insert(i.value.clone());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
TableFactor::Derived {
|
||||
subquery,
|
||||
alias: Some(table_alias),
|
||||
..
|
||||
} => {
|
||||
aliases.insert(table_alias.name.value.clone());
|
||||
// A derived table is technically a nested scope which is opaque outside, but for pure semantic checks
|
||||
// its internal contents should be validated purely within its own scope (not leaking external aliases in, usually)
|
||||
// but Postgres allows lateral correlation. We will validate its interior with an empty scope.
|
||||
let _ = Self::validate_query(subquery, &HashSet::new(), "");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_expr(
|
||||
expr: &Expr,
|
||||
available_aliases: &HashSet<String>,
|
||||
sql: &str,
|
||||
) -> Result<(), String> {
|
||||
match expr {
|
||||
Expr::CompoundIdentifier(idents) => {
|
||||
if idents.len() == 2 {
|
||||
let alias = &idents[0].value;
|
||||
if !available_aliases.is_empty() && !available_aliases.contains(alias) {
|
||||
return Err(format!(
|
||||
"Semantic Error: Orchestrated query referenced table alias '{}' but it was not declared in the query's FROM/JOIN clauses.\nAvailable aliases: {:?}\nSQL: {}",
|
||||
alias, available_aliases, sql
|
||||
));
|
||||
}
|
||||
} else if idents.len() > 2 {
|
||||
let alias = &idents[1].value; // In form schema.table.column, 'table' is idents[1]
|
||||
if !available_aliases.is_empty() && !available_aliases.contains(alias) {
|
||||
return Err(format!(
|
||||
"Semantic Error: Orchestrated query referenced table '{}' but it was not mapped.\nAvailable aliases: {:?}\nSQL: {}",
|
||||
alias, available_aliases, sql
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::Subquery(subquery) => Self::validate_query(subquery, available_aliases, sql)?,
|
||||
Expr::Exists { subquery, .. } => Self::validate_query(subquery, available_aliases, sql)?,
|
||||
Expr::InSubquery {
|
||||
expr: e, subquery, ..
|
||||
} => {
|
||||
Self::validate_expr(e, available_aliases, sql)?;
|
||||
Self::validate_query(subquery, available_aliases, sql)?;
|
||||
}
|
||||
Expr::BinaryOp { left, right, .. } => {
|
||||
Self::validate_expr(left, available_aliases, sql)?;
|
||||
Self::validate_expr(right, available_aliases, sql)?;
|
||||
}
|
||||
Expr::IsFalse(e)
|
||||
| Expr::IsNotFalse(e)
|
||||
| Expr::IsTrue(e)
|
||||
| Expr::IsNotTrue(e)
|
||||
| Expr::IsNull(e)
|
||||
| Expr::IsNotNull(e)
|
||||
| Expr::InList { expr: e, .. }
|
||||
| Expr::Nested(e)
|
||||
| Expr::UnaryOp { expr: e, .. }
|
||||
| Expr::Cast { expr: e, .. }
|
||||
| Expr::Like { expr: e, .. }
|
||||
| Expr::ILike { expr: e, .. }
|
||||
| Expr::AnyOp { left: e, .. }
|
||||
| Expr::AllOp { left: e, .. } => {
|
||||
Self::validate_expr(e, available_aliases, sql)?;
|
||||
}
|
||||
Expr::Function(func) => {
|
||||
if let sqlparser::ast::FunctionArguments::List(args) = &func.args {
|
||||
if let Some(sqlparser::ast::FunctionArg::Unnamed(
|
||||
sqlparser::ast::FunctionArgExpr::Expr(e),
|
||||
)) = args.args.get(0)
|
||||
{
|
||||
Self::validate_expr(e, available_aliases, sql)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -2,6 +2,6 @@ pub mod case;
|
||||
pub mod expect;
|
||||
pub mod suite;
|
||||
|
||||
pub use case::TestCase;
|
||||
pub use expect::ExpectBlock;
|
||||
pub use suite::TestSuite;
|
||||
pub use case::Case;
|
||||
pub use expect::Expect;
|
||||
pub use suite::Suite;
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
use super::case::TestCase;
|
||||
use super::case::Case;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TestSuite {
|
||||
pub struct Suite {
|
||||
#[allow(dead_code)]
|
||||
pub description: String,
|
||||
pub database: serde_json::Value,
|
||||
pub tests: Vec<TestCase>,
|
||||
pub tests: Vec<Case>,
|
||||
}
|
||||
|
||||
@ -67,7 +67,12 @@ impl Validator {
|
||||
.map(|e| crate::drop::Error {
|
||||
code: e.code,
|
||||
message: e.message,
|
||||
details: crate::drop::ErrorDetails { path: e.path },
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: e.path,
|
||||
cause: None,
|
||||
context: None,
|
||||
schema: None,
|
||||
},
|
||||
})
|
||||
.collect();
|
||||
crate::drop::Drop::with_errors(errors)
|
||||
@ -76,7 +81,12 @@ impl Validator {
|
||||
Err(e) => crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: e.code,
|
||||
message: e.message,
|
||||
details: crate::drop::ErrorDetails { path: e.path },
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: e.path,
|
||||
cause: None,
|
||||
context: None,
|
||||
schema: None,
|
||||
},
|
||||
}]),
|
||||
}
|
||||
} else {
|
||||
@ -84,7 +94,10 @@ impl Validator {
|
||||
code: "SCHEMA_NOT_FOUND".to_string(),
|
||||
message: format!("Schema {} not found", schema_id),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: "".to_string(),
|
||||
path: "/".to_string(),
|
||||
cause: None,
|
||||
context: None,
|
||||
schema: None,
|
||||
},
|
||||
}])
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user