322 lines
9.1 KiB
Rust
322 lines
9.1 KiB
Rust
pub mod r#enum;
|
|
pub mod executors;
|
|
pub mod formats;
|
|
pub mod page;
|
|
pub mod punc;
|
|
pub mod relation;
|
|
pub mod schema;
|
|
pub mod r#type;
|
|
|
|
// External mock exports inside the executor sub-folder
|
|
|
|
use r#enum::Enum;
|
|
use executors::DatabaseExecutor;
|
|
|
|
#[cfg(not(test))]
|
|
use executors::pgrx::SpiExecutor;
|
|
|
|
#[cfg(test)]
|
|
use executors::mock::MockExecutor;
|
|
|
|
use punc::Punc;
|
|
use relation::Relation;
|
|
use schema::Schema;
|
|
use serde_json::Value;
|
|
use std::collections::{HashMap, HashSet};
|
|
use r#type::Type;
|
|
|
|
pub struct Database {
|
|
pub enums: HashMap<String, Enum>,
|
|
pub types: HashMap<String, Type>,
|
|
pub puncs: HashMap<String, Punc>,
|
|
pub relations: Vec<Relation>,
|
|
pub schemas: HashMap<String, Schema>,
|
|
pub descendants: HashMap<String, Vec<String>>,
|
|
pub depths: HashMap<String, usize>,
|
|
pub executor: Box<dyn DatabaseExecutor + Send + Sync>,
|
|
}
|
|
|
|
impl Database {
|
|
pub fn new(val: &serde_json::Value) -> Result<Self, crate::drop::Drop> {
|
|
let mut db = Self {
|
|
enums: HashMap::new(),
|
|
types: HashMap::new(),
|
|
relations: Vec::new(),
|
|
puncs: HashMap::new(),
|
|
schemas: HashMap::new(),
|
|
descendants: HashMap::new(),
|
|
depths: HashMap::new(),
|
|
#[cfg(not(test))]
|
|
executor: Box::new(SpiExecutor::new()),
|
|
#[cfg(test)]
|
|
executor: Box::new(MockExecutor::new()),
|
|
};
|
|
|
|
if let Some(arr) = val.get("enums").and_then(|v| v.as_array()) {
|
|
for item in arr {
|
|
if let Ok(def) = serde_json::from_value::<Enum>(item.clone()) {
|
|
db.enums.insert(def.name.clone(), def);
|
|
}
|
|
}
|
|
}
|
|
|
|
if let Some(arr) = val.get("types").and_then(|v| v.as_array()) {
|
|
for item in arr {
|
|
if let Ok(def) = serde_json::from_value::<Type>(item.clone()) {
|
|
db.types.insert(def.name.clone(), def);
|
|
}
|
|
}
|
|
}
|
|
|
|
if let Some(arr) = val.get("relations").and_then(|v| v.as_array()) {
|
|
for item in arr {
|
|
match serde_json::from_value::<Relation>(item.clone()) {
|
|
Ok(def) => {
|
|
if db.types.contains_key(&def.source_type)
|
|
&& db.types.contains_key(&def.destination_type)
|
|
{
|
|
db.relations.push(def);
|
|
}
|
|
}
|
|
Err(e) => println!("DATABASE RELATION PARSE FAILED: {:?}", e),
|
|
}
|
|
}
|
|
}
|
|
|
|
if let Some(arr) = val.get("puncs").and_then(|v| v.as_array()) {
|
|
for item in arr {
|
|
if let Ok(def) = serde_json::from_value::<Punc>(item.clone()) {
|
|
db.puncs.insert(def.name.clone(), def);
|
|
}
|
|
}
|
|
}
|
|
|
|
if let Some(arr) = val.get("schemas").and_then(|v| v.as_array()) {
|
|
for (i, item) in arr.iter().enumerate() {
|
|
if let Ok(mut schema) = serde_json::from_value::<Schema>(item.clone()) {
|
|
let id = schema
|
|
.obj
|
|
.id
|
|
.clone()
|
|
.unwrap_or_else(|| format!("schema_{}", i));
|
|
schema.obj.id = Some(id.clone());
|
|
db.schemas.insert(id, schema);
|
|
}
|
|
}
|
|
}
|
|
|
|
db.compile()?;
|
|
Ok(db)
|
|
}
|
|
|
|
/// Override the default executor for unit testing
|
|
pub fn with_executor(mut self, executor: Box<dyn DatabaseExecutor + Send + Sync>) -> Self {
|
|
self.executor = executor;
|
|
self
|
|
}
|
|
|
|
/// Executes a query expecting a single JSONB array return, representing rows.
|
|
pub fn query(&self, sql: &str, args: Option<&[Value]>) -> Result<Value, String> {
|
|
self.executor.query(sql, args)
|
|
}
|
|
|
|
/// Executes an operation (INSERT, UPDATE, DELETE, or pg_notify) that does not return rows.
|
|
pub fn execute(&self, sql: &str, args: Option<&[Value]>) -> Result<(), String> {
|
|
self.executor.execute(sql, args)
|
|
}
|
|
|
|
/// Returns the current authenticated user's ID
|
|
pub fn auth_user_id(&self) -> Result<String, String> {
|
|
self.executor.auth_user_id()
|
|
}
|
|
|
|
/// Returns the current transaction timestamp
|
|
pub fn timestamp(&self) -> Result<String, String> {
|
|
self.executor.timestamp()
|
|
}
|
|
|
|
pub fn compile(&mut self) -> Result<(), crate::drop::Drop> {
|
|
self.collect_schemas();
|
|
self.collect_depths();
|
|
self.collect_descendants();
|
|
self.compile_schemas();
|
|
|
|
Ok(())
|
|
}
|
|
|
|
fn collect_schemas(&mut self) {
|
|
let mut to_insert = Vec::new();
|
|
|
|
// Pass 1: Extract all Schemas structurally off top level definitions into the master registry.
|
|
for type_def in self.types.values() {
|
|
for mut schema in type_def.schemas.clone() {
|
|
schema.harvest(&mut to_insert);
|
|
}
|
|
}
|
|
for punc_def in self.puncs.values() {
|
|
for mut schema in punc_def.schemas.clone() {
|
|
schema.harvest(&mut to_insert);
|
|
}
|
|
}
|
|
for enum_def in self.enums.values() {
|
|
for mut schema in enum_def.schemas.clone() {
|
|
schema.harvest(&mut to_insert);
|
|
}
|
|
}
|
|
|
|
for (id, schema) in to_insert {
|
|
self.schemas.insert(id, schema);
|
|
}
|
|
}
|
|
|
|
fn collect_depths(&mut self) {
|
|
let mut depths: HashMap<String, usize> = HashMap::new();
|
|
let schema_ids: Vec<String> = self.schemas.keys().cloned().collect();
|
|
|
|
for id in schema_ids {
|
|
let mut current_id = id.clone();
|
|
let mut depth = 0;
|
|
let mut visited = HashSet::new();
|
|
|
|
while let Some(schema) = self.schemas.get(¤t_id) {
|
|
if !visited.insert(current_id.clone()) {
|
|
break; // Cycle detected
|
|
}
|
|
if let Some(ref_str) = &schema.obj.r#ref {
|
|
current_id = ref_str.clone();
|
|
depth += 1;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
depths.insert(id, depth);
|
|
}
|
|
self.depths = depths;
|
|
}
|
|
|
|
fn collect_descendants(&mut self) {
|
|
let mut direct_refs: HashMap<String, Vec<String>> = HashMap::new();
|
|
for (id, schema) in &self.schemas {
|
|
if let Some(ref_str) = &schema.obj.r#ref {
|
|
direct_refs
|
|
.entry(ref_str.clone())
|
|
.or_default()
|
|
.push(id.clone());
|
|
}
|
|
}
|
|
|
|
// Cache generic descendants for $family runtime lookups
|
|
let mut descendants = HashMap::new();
|
|
for (id, schema) in &self.schemas {
|
|
if let Some(family_target) = &schema.obj.family {
|
|
let mut desc_set = HashSet::new();
|
|
Self::collect_descendants_recursively(family_target, &direct_refs, &mut desc_set);
|
|
let mut desc_vec: Vec<String> = desc_set.into_iter().collect();
|
|
desc_vec.sort();
|
|
|
|
// By placing all descendants directly onto the ID mapped location of the Family declaration,
|
|
// we can lookup descendants natively in ValidationContext without AST replacement overrides.
|
|
descendants.insert(id.clone(), desc_vec);
|
|
}
|
|
}
|
|
self.descendants = descendants;
|
|
}
|
|
|
|
pub fn get_relation(
|
|
&self,
|
|
parent_type: &str,
|
|
child_type: &str,
|
|
prop_name: &str,
|
|
relative_keys: Option<&Vec<String>>,
|
|
) -> Option<(&Relation, bool)> {
|
|
if parent_type == "entity" && child_type == "entity" {
|
|
return None; // Ignore entity <-> entity generic fallbacks, they aren't useful edges
|
|
}
|
|
|
|
let p_def = self.types.get(parent_type)?;
|
|
let c_def = self.types.get(child_type)?;
|
|
|
|
let mut matching_rels = Vec::new();
|
|
let mut directions = Vec::new();
|
|
|
|
for rel in &self.relations {
|
|
let is_forward = p_def.hierarchy.contains(&rel.source_type)
|
|
&& c_def.hierarchy.contains(&rel.destination_type);
|
|
let is_reverse = p_def.hierarchy.contains(&rel.destination_type)
|
|
&& c_def.hierarchy.contains(&rel.source_type);
|
|
|
|
if is_forward {
|
|
matching_rels.push(rel);
|
|
directions.push(true);
|
|
} else if is_reverse {
|
|
matching_rels.push(rel);
|
|
directions.push(false);
|
|
}
|
|
}
|
|
|
|
if matching_rels.is_empty() {
|
|
return None;
|
|
}
|
|
|
|
if matching_rels.len() == 1 {
|
|
return Some((matching_rels[0], directions[0]));
|
|
}
|
|
|
|
let mut chosen_idx = 0;
|
|
let mut resolved = false;
|
|
|
|
// Reduce ambiguity with prefix
|
|
for (i, rel) in matching_rels.iter().enumerate() {
|
|
if let Some(prefix) = &rel.prefix {
|
|
if prefix == prop_name {
|
|
chosen_idx = i;
|
|
resolved = true;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
// Reduce ambiguity by checking if relative payload OMITS the prefix (M:M heuristic)
|
|
if !resolved && relative_keys.is_some() {
|
|
let keys = relative_keys.unwrap();
|
|
let mut missing_prefix_ids = Vec::new();
|
|
for (i, rel) in matching_rels.iter().enumerate() {
|
|
if let Some(prefix) = &rel.prefix {
|
|
if !keys.contains(prefix) {
|
|
missing_prefix_ids.push(i);
|
|
}
|
|
}
|
|
}
|
|
if missing_prefix_ids.len() == 1 {
|
|
chosen_idx = missing_prefix_ids[0];
|
|
}
|
|
}
|
|
|
|
Some((matching_rels[chosen_idx], directions[chosen_idx]))
|
|
}
|
|
|
|
fn collect_descendants_recursively(
|
|
target: &str,
|
|
direct_refs: &HashMap<String, Vec<String>>,
|
|
descendants: &mut HashSet<String>,
|
|
) {
|
|
if let Some(children) = direct_refs.get(target) {
|
|
for child in children {
|
|
if descendants.insert(child.clone()) {
|
|
Self::collect_descendants_recursively(child, direct_refs, descendants);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
fn compile_schemas(&mut self) {
|
|
// Pass 3: compile_internals across pure structure
|
|
let schema_ids: Vec<String> = self.schemas.keys().cloned().collect();
|
|
for id in schema_ids {
|
|
if let Some(schema) = self.schemas.get_mut(&id) {
|
|
schema.compile_internals();
|
|
}
|
|
}
|
|
}
|
|
}
|