295 lines
8.8 KiB
Rust
295 lines
8.8 KiB
Rust
pub mod edge;
|
|
pub mod r#enum;
|
|
pub mod executors;
|
|
pub mod formats;
|
|
pub mod page;
|
|
pub mod punc;
|
|
pub mod relation;
|
|
pub mod schema;
|
|
pub mod r#type;
|
|
|
|
// External mock exports inside the executor sub-folder
|
|
|
|
use r#enum::Enum;
|
|
use executors::DatabaseExecutor;
|
|
|
|
#[cfg(not(test))]
|
|
use executors::pgrx::SpiExecutor;
|
|
|
|
#[cfg(test)]
|
|
use executors::mock::MockExecutor;
|
|
|
|
use punc::Punc;
|
|
use relation::Relation;
|
|
use schema::Schema;
|
|
use serde_json::Value;
|
|
use std::collections::{HashMap, HashSet};
|
|
use r#type::Type;
|
|
|
|
pub struct Database {
|
|
pub enums: HashMap<String, Enum>,
|
|
pub types: HashMap<String, Type>,
|
|
pub puncs: HashMap<String, Punc>,
|
|
pub relations: HashMap<String, Relation>,
|
|
pub schemas: HashMap<String, Schema>,
|
|
pub descendants: HashMap<String, Vec<String>>,
|
|
pub depths: HashMap<String, usize>,
|
|
pub executor: Box<dyn DatabaseExecutor + Send + Sync>,
|
|
}
|
|
|
|
impl Database {
|
|
pub fn new(val: &serde_json::Value) -> (Self, crate::drop::Drop) {
|
|
let mut db = Self {
|
|
enums: HashMap::new(),
|
|
types: HashMap::new(),
|
|
relations: HashMap::new(),
|
|
puncs: HashMap::new(),
|
|
schemas: HashMap::new(),
|
|
descendants: HashMap::new(),
|
|
depths: HashMap::new(),
|
|
#[cfg(not(test))]
|
|
executor: Box::new(SpiExecutor::new()),
|
|
#[cfg(test)]
|
|
executor: Box::new(MockExecutor::new()),
|
|
};
|
|
|
|
let mut errors = Vec::new();
|
|
|
|
if let Some(arr) = val.get("enums").and_then(|v| v.as_array()) {
|
|
for item in arr {
|
|
match serde_json::from_value::<Enum>(item.clone()) {
|
|
Ok(def) => {
|
|
db.enums.insert(def.name.clone(), def);
|
|
}
|
|
Err(e) => {
|
|
errors.push(crate::drop::Error {
|
|
code: "DATABASE_ENUM_PARSE_FAILED".to_string(),
|
|
message: format!("Failed to parse database enum: {}", e),
|
|
details: crate::drop::ErrorDetails::default(),
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if let Some(arr) = val.get("types").and_then(|v| v.as_array()) {
|
|
for item in arr {
|
|
match serde_json::from_value::<Type>(item.clone()) {
|
|
Ok(def) => {
|
|
db.types.insert(def.name.clone(), def);
|
|
}
|
|
Err(e) => {
|
|
errors.push(crate::drop::Error {
|
|
code: "DATABASE_TYPE_PARSE_FAILED".to_string(),
|
|
message: format!("Failed to parse database type: {}", e),
|
|
details: crate::drop::ErrorDetails::default(),
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if let Some(arr) = val.get("relations").and_then(|v| v.as_array()) {
|
|
for item in arr {
|
|
match serde_json::from_value::<Relation>(item.clone()) {
|
|
Ok(def) => {
|
|
if db.types.contains_key(&def.source_type)
|
|
&& db.types.contains_key(&def.destination_type)
|
|
{
|
|
db.relations.insert(def.constraint.clone(), def);
|
|
}
|
|
}
|
|
Err(e) => {
|
|
errors.push(crate::drop::Error {
|
|
code: "DATABASE_RELATION_PARSE_FAILED".to_string(),
|
|
message: format!("Failed to parse database relation: {}", e),
|
|
details: crate::drop::ErrorDetails::default(),
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if let Some(arr) = val.get("puncs").and_then(|v| v.as_array()) {
|
|
for item in arr {
|
|
match serde_json::from_value::<Punc>(item.clone()) {
|
|
Ok(def) => {
|
|
db.puncs.insert(def.name.clone(), def);
|
|
}
|
|
Err(e) => {
|
|
errors.push(crate::drop::Error {
|
|
code: "DATABASE_PUNC_PARSE_FAILED".to_string(),
|
|
message: format!("Failed to parse database punc: {}", e),
|
|
details: crate::drop::ErrorDetails::default(),
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if let Some(arr) = val.get("schemas").and_then(|v| v.as_array()) {
|
|
for (i, item) in arr.iter().enumerate() {
|
|
match serde_json::from_value::<Schema>(item.clone()) {
|
|
Ok(mut schema) => {
|
|
let id = schema
|
|
.obj
|
|
.id
|
|
.clone()
|
|
.unwrap_or_else(|| format!("schema_{}", i));
|
|
schema.obj.id = Some(id.clone());
|
|
db.schemas.insert(id, schema);
|
|
}
|
|
Err(e) => {
|
|
errors.push(crate::drop::Error {
|
|
code: "DATABASE_SCHEMA_PARSE_FAILED".to_string(),
|
|
message: format!("Failed to parse database schema: {}", e),
|
|
details: crate::drop::ErrorDetails::default(),
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
db.compile(&mut errors);
|
|
let drop = if errors.is_empty() {
|
|
crate::drop::Drop::success()
|
|
} else {
|
|
crate::drop::Drop::with_errors(errors)
|
|
};
|
|
(db, drop)
|
|
}
|
|
|
|
/// Override the default executor for unit testing
|
|
pub fn with_executor(mut self, executor: Box<dyn DatabaseExecutor + Send + Sync>) -> Self {
|
|
self.executor = executor;
|
|
self
|
|
}
|
|
|
|
/// Executes a query expecting a single JSONB array return, representing rows.
|
|
pub fn query(&self, sql: &str, args: Option<Vec<Value>>) -> Result<Value, String> {
|
|
self.executor.query(sql, args)
|
|
}
|
|
|
|
/// Executes an operation (INSERT, UPDATE, DELETE, or pg_notify) that does not return rows.
|
|
pub fn execute(&self, sql: &str, args: Option<Vec<Value>>) -> Result<(), String> {
|
|
self.executor.execute(sql, args)
|
|
}
|
|
|
|
/// Returns the current authenticated user's ID
|
|
pub fn auth_user_id(&self) -> Result<String, String> {
|
|
self.executor.auth_user_id()
|
|
}
|
|
|
|
/// Returns the current transaction timestamp
|
|
pub fn timestamp(&self) -> Result<String, String> {
|
|
self.executor.timestamp()
|
|
}
|
|
|
|
pub fn compile(&mut self, errors: &mut Vec<crate::drop::Error>) {
|
|
let mut harvested = Vec::new();
|
|
for schema in self.schemas.values_mut() {
|
|
schema.collect_schemas(None, &mut harvested, errors);
|
|
}
|
|
self.schemas.extend(harvested);
|
|
|
|
self.collect_schemas(errors);
|
|
self.collect_depths();
|
|
self.collect_descendants();
|
|
|
|
// Mathematically evaluate all property inheritances, formats, schemas, and foreign key edges topographically over OnceLocks
|
|
let mut visited = std::collections::HashSet::new();
|
|
for schema in self.schemas.values() {
|
|
schema.compile(self, &mut visited, errors);
|
|
}
|
|
}
|
|
|
|
fn collect_schemas(&mut self, errors: &mut Vec<crate::drop::Error>) {
|
|
let mut to_insert = Vec::new();
|
|
|
|
// Pass 1: Extract all Schemas structurally off top level definitions into the master registry.
|
|
// Validate every node recursively via string filters natively!
|
|
for type_def in self.types.values() {
|
|
for mut schema in type_def.schemas.clone() {
|
|
schema.collect_schemas(None, &mut to_insert, errors);
|
|
}
|
|
}
|
|
for punc_def in self.puncs.values() {
|
|
for mut schema in punc_def.schemas.clone() {
|
|
schema.collect_schemas(None, &mut to_insert, errors);
|
|
}
|
|
}
|
|
for enum_def in self.enums.values() {
|
|
for mut schema in enum_def.schemas.clone() {
|
|
schema.collect_schemas(None, &mut to_insert, errors);
|
|
}
|
|
}
|
|
|
|
for (id, schema) in to_insert {
|
|
self.schemas.insert(id, schema);
|
|
}
|
|
}
|
|
|
|
fn collect_depths(&mut self) {
|
|
let mut depths: HashMap<String, usize> = HashMap::new();
|
|
let schema_ids: Vec<String> = self.schemas.keys().cloned().collect();
|
|
|
|
for id in schema_ids {
|
|
let mut current_id = id.clone();
|
|
let mut depth = 0;
|
|
let mut visited = HashSet::new();
|
|
|
|
while let Some(schema) = self.schemas.get(¤t_id) {
|
|
if !visited.insert(current_id.clone()) {
|
|
break; // Cycle detected
|
|
}
|
|
if let Some(ref_str) = &schema.obj.r#ref {
|
|
current_id = ref_str.clone();
|
|
depth += 1;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
depths.insert(id, depth);
|
|
}
|
|
self.depths = depths;
|
|
}
|
|
|
|
fn collect_descendants(&mut self) {
|
|
let mut direct_refs: HashMap<String, Vec<String>> = HashMap::new();
|
|
for (id, schema) in &self.schemas {
|
|
if let Some(ref_str) = &schema.obj.r#ref {
|
|
direct_refs
|
|
.entry(ref_str.clone())
|
|
.or_default()
|
|
.push(id.clone());
|
|
}
|
|
}
|
|
|
|
// Cache exhaustive descendants matrix for generic $family string lookups natively
|
|
let mut descendants = HashMap::new();
|
|
for id in self.schemas.keys() {
|
|
let mut desc_set = HashSet::new();
|
|
Self::collect_descendants_recursively(id, &direct_refs, &mut desc_set);
|
|
let mut desc_vec: Vec<String> = desc_set.into_iter().collect();
|
|
desc_vec.sort();
|
|
|
|
descendants.insert(id.clone(), desc_vec);
|
|
}
|
|
self.descendants = descendants;
|
|
}
|
|
|
|
fn collect_descendants_recursively(
|
|
target: &str,
|
|
direct_refs: &std::collections::HashMap<String, Vec<String>>,
|
|
descendants: &mut std::collections::HashSet<String>,
|
|
) {
|
|
if let Some(children) = direct_refs.get(target) {
|
|
for child in children {
|
|
if descendants.insert(child.clone()) {
|
|
Self::collect_descendants_recursively(child, direct_refs, descendants);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|