passing all tests

This commit is contained in:
2026-03-11 17:26:45 -04:00
parent 44be75f5d4
commit 2c74d0a1a6
8 changed files with 9181 additions and 1210 deletions

View File

@ -2,23 +2,38 @@ use crate::database::executors::DatabaseExecutor;
use serde_json::Value;
#[cfg(test)]
use std::sync::Mutex;
use std::cell::RefCell;
#[cfg(test)]
pub struct MockExecutor {
pub query_responses: Mutex<Vec<Result<Value, String>>>,
pub execute_responses: Mutex<Vec<Result<(), String>>>,
pub captured_queries: Mutex<Vec<String>>,
pub struct MockState {
pub captured_queries: Vec<String>,
pub query_responses: Vec<Result<Value, String>>,
pub execute_responses: Vec<Result<(), String>>,
}
#[cfg(test)]
impl MockState {
pub fn new() -> Self {
Self {
captured_queries: Default::default(),
query_responses: Default::default(),
execute_responses: Default::default(),
}
}
}
#[cfg(test)]
thread_local! {
pub static MOCK_STATE: RefCell<MockState> = RefCell::new(MockState::new());
}
#[cfg(test)]
pub struct MockExecutor {}
#[cfg(test)]
impl MockExecutor {
pub fn new() -> Self {
Self {
query_responses: Mutex::new(Vec::new()),
execute_responses: Mutex::new(Vec::new()),
captured_queries: Mutex::new(Vec::new()),
}
Self {}
}
}
@ -26,22 +41,26 @@ impl MockExecutor {
impl DatabaseExecutor for MockExecutor {
fn query(&self, sql: &str, _args: Option<&[Value]>) -> Result<Value, String> {
println!("DEBUG SQL QUERY: {}", sql);
self.captured_queries.lock().unwrap().push(sql.to_string());
let mut responses = self.query_responses.lock().unwrap();
if responses.is_empty() {
return Ok(Value::Array(vec![]));
}
responses.remove(0)
MOCK_STATE.with(|state| {
let mut s = state.borrow_mut();
s.captured_queries.push(sql.to_string());
if s.query_responses.is_empty() {
return Ok(Value::Array(vec![]));
}
s.query_responses.remove(0)
})
}
fn execute(&self, sql: &str, _args: Option<&[Value]>) -> Result<(), String> {
println!("DEBUG SQL EXECUTE: {}", sql);
self.captured_queries.lock().unwrap().push(sql.to_string());
let mut responses = self.execute_responses.lock().unwrap();
if responses.is_empty() {
return Ok(());
}
responses.remove(0)
MOCK_STATE.with(|state| {
let mut s = state.borrow_mut();
s.captured_queries.push(sql.to_string());
if s.execute_responses.is_empty() {
return Ok(());
}
s.execute_responses.remove(0)
})
}
fn auth_user_id(&self) -> Result<String, String> {
@ -54,11 +73,16 @@ impl DatabaseExecutor for MockExecutor {
#[cfg(test)]
fn get_queries(&self) -> Vec<String> {
self.captured_queries.lock().unwrap().clone()
MOCK_STATE.with(|state| state.borrow().captured_queries.clone())
}
#[cfg(test)]
fn reset_mocks(&self) {
self.captured_queries.lock().unwrap().clear();
MOCK_STATE.with(|state| {
let mut s = state.borrow_mut();
s.captured_queries.clear();
s.query_responses.clear();
s.execute_responses.clear();
});
}
}

View File

@ -76,8 +76,11 @@ impl Database {
if let Some(arr) = val.get("relations").and_then(|v| v.as_array()) {
for item in arr {
if let Ok(def) = serde_json::from_value::<Relation>(item.clone()) {
db.relations.insert(def.constraint.clone(), def);
match serde_json::from_value::<Relation>(item.clone()) {
Ok(def) => {
db.relations.insert(def.constraint.clone(), def);
}
Err(e) => println!("DATABASE RELATION PARSE FAILED: {:?}", e),
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,17 +1,17 @@
use serde::Deserialize;
use std::collections::HashMap;
use std::fs;
use std::sync::{Arc, OnceLock, RwLock};
#[derive(Debug, Deserialize)]
struct TestSuite {
pub struct TestSuite {
#[allow(dead_code)]
description: String,
database: serde_json::Value,
tests: Vec<TestCase>,
pub description: String,
pub database: serde_json::Value,
pub tests: Vec<TestCase>,
}
use crate::tests::types::{ExpectBlock, TestCase};
use crate::validator::Validator;
use crate::tests::types::TestCase;
use serde_json::Value;
pub fn deserialize_some<'de, D>(deserializer: D) -> Result<Option<Value>, D::Error>
@ -22,87 +22,128 @@ where
Ok(Some(v))
}
pub fn run_test_file_at_index(path: &str, index: usize) -> Result<(), String> {
let content =
fs::read_to_string(path).unwrap_or_else(|_| panic!("Failed to read file: {}", path));
let suite: Vec<TestSuite> = serde_json::from_str(&content)
.unwrap_or_else(|e| panic!("Failed to parse JSON in {}: {}", path, e));
// Type alias for easier reading
type CompiledSuite = Arc<Vec<(TestSuite, Arc<crate::database::Database>)>>;
if index >= suite.len() {
panic!("Index {} out of bounds for file {}", index, path);
// Global cache mapping filename -> Vector of (Parsed JSON suite, Compiled Database)
static CACHE: OnceLock<RwLock<HashMap<String, CompiledSuite>>> = OnceLock::new();
fn get_cached_file(path: &str) -> CompiledSuite {
let cache_lock = CACHE.get_or_init(|| RwLock::new(HashMap::new()));
let file_data = {
let read_guard = cache_lock.read().unwrap();
read_guard.get(path).cloned()
};
match file_data {
Some(data) => data,
None => {
let mut write_guard = cache_lock.write().unwrap();
// double check in case another thread compiled while we waited for lock
if let Some(data) = write_guard.get(path) {
data.clone()
} else {
let content =
fs::read_to_string(path).unwrap_or_else(|_| panic!("Failed to read file: {}", path));
let suites: Vec<TestSuite> = serde_json::from_str(&content)
.unwrap_or_else(|e| panic!("Failed to parse JSON in {}: {}", path, e));
let mut compiled_suites = Vec::new();
for suite in suites {
let db_result = crate::database::Database::new(&suite.database);
if let Err(drop) = db_result {
let error_messages: Vec<String> = drop
.errors
.into_iter()
.map(|e| format!("Error {} at path {}: {}", e.code, e.details.path, e.message))
.collect();
panic!(
"System Setup Compilation failed for {}:\n{}",
path,
error_messages.join("\n")
);
}
compiled_suites.push((suite, Arc::new(db_result.unwrap())));
}
let new_data = Arc::new(compiled_suites);
write_guard.insert(path.to_string(), new_data.clone());
new_data
}
}
}
}
pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<(), String> {
let file_data = get_cached_file(path);
if suite_idx >= file_data.len() {
panic!("Suite Index {} out of bounds for file {}", suite_idx, path);
}
let group = &suite[index];
let (group, db) = &file_data[suite_idx];
if case_idx >= group.tests.len() {
panic!(
"Case Index {} out of bounds for suite {} in file {}",
case_idx, suite_idx, path
);
}
let test = &group.tests[case_idx];
let mut failures = Vec::<String>::new();
let db_json = group.database.clone();
let db_result = crate::database::Database::new(&db_json);
if let Err(drop) = db_result {
let error_messages: Vec<String> = drop
.errors
.into_iter()
.map(|e| format!("Error {} at path {}: {}", e.code, e.details.path, e.message))
.collect();
return Err(format!(
"System Setup Compilation failed:\n{}",
error_messages.join("\n")
));
}
let db = db_result.unwrap();
let validator = Validator::new(std::sync::Arc::new(db));
// 4. Run Tests
for test in group.tests.iter() {
// Provide fallback for legacy expectations if `expect` block was missing despite migration script
let expected_success = test
.expect
.as_ref()
.map(|e| e.success)
.unwrap_or(test.valid.unwrap_or(false));
let _expected_errors = test
.expect
.as_ref()
.and_then(|e| e.errors.clone())
.unwrap_or(test.expect_errors.clone().unwrap_or(vec![]));
// Provide fallback for legacy expectations if `expect` block was missing despite migration script
let _expected_success = test
.expect
.as_ref()
.map(|e| e.success)
.unwrap_or(test.valid.unwrap_or(false));
let _expected_errors = test
.expect
.as_ref()
.and_then(|e| e.errors.clone())
.unwrap_or(test.expect_errors.clone().unwrap_or(vec![]));
match test.action.as_str() {
"validate" => {
let result = test.run_validate(validator.db.clone());
if let Err(e) = result {
println!("TEST VALIDATE ERROR FOR '{}': {}", test.description, e);
failures.push(format!(
"[{}] Validate Test '{}' failed. Error: {}",
group.description, test.description, e
));
}
}
"merge" => {
let result = test.run_merge(validator.db.clone());
if let Err(e) = result {
println!("TEST MERGE ERROR FOR '{}': {}", test.description, e);
failures.push(format!(
"[{}] Merge Test '{}' failed. Error: {}",
group.description, test.description, e
));
}
}
"query" => {
let result = test.run_query(validator.db.clone());
if let Err(e) = result {
println!("TEST QUERY ERROR FOR '{}': {}", test.description, e);
failures.push(format!(
"[{}] Query Test '{}' failed. Error: {}",
group.description, test.description, e
));
}
}
_ => {
match test.action.as_str() {
"validate" => {
let result = test.run_validate(db.clone());
if let Err(e) = result {
println!("TEST VALIDATE ERROR FOR '{}': {}", test.description, e);
failures.push(format!(
"[{}] Unknown action '{}' for test '{}'",
group.description, test.action, test.description
"[{}] Validate Test '{}' failed. Error: {}",
group.description, test.description, e
));
}
}
"merge" => {
let result = test.run_merge(db.clone());
if let Err(e) = result {
println!("TEST MERGE ERROR FOR '{}': {}", test.description, e);
failures.push(format!(
"[{}] Merge Test '{}' failed. Error: {}",
group.description, test.description, e
));
}
}
"query" => {
let result = test.run_query(db.clone());
if let Err(e) = result {
println!("TEST QUERY ERROR FOR '{}': {}", test.description, e);
failures.push(format!(
"[{}] Query Test '{}' failed. Error: {}",
group.description, test.description, e
));
}
}
_ => {
failures.push(format!(
"[{}] Unknown action '{}' for test '{}'",
group.description, test.action, test.description
));
}
}
if !failures.is_empty() {