queryer merger test progress
This commit is contained in:
File diff suppressed because it is too large
Load Diff
94
src/tests/mod.rs
Normal file
94
src/tests/mod.rs
Normal file
@ -0,0 +1,94 @@
|
||||
use crate::*;
|
||||
pub mod runner;
|
||||
pub mod types;
|
||||
use serde_json::json;
|
||||
|
||||
// Database module tests moved to src/database/executors/mock.rs
|
||||
|
||||
#[test]
|
||||
fn test_library_api() {
|
||||
// 1. Initially, schemas are not cached.
|
||||
|
||||
// Expected uninitialized drop format: errors + null response
|
||||
let uninitialized_drop = jspg_validate("test_schema", JsonB(json!({})));
|
||||
assert_eq!(
|
||||
uninitialized_drop.0,
|
||||
json!({
|
||||
"type": "drop",
|
||||
"errors": [{
|
||||
"code": "ENGINE_NOT_INITIALIZED",
|
||||
"message": "JSPG extension has not been initialized via jspg_setup",
|
||||
"details": { "path": "" }
|
||||
}]
|
||||
})
|
||||
);
|
||||
|
||||
// 2. Cache schemas
|
||||
let db_json = json!({
|
||||
"puncs": [],
|
||||
"enums": [],
|
||||
"relations": [],
|
||||
"types": [{
|
||||
"schemas": [{
|
||||
"$id": "test_schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" }
|
||||
},
|
||||
"required": ["name"]
|
||||
}]
|
||||
}]
|
||||
});
|
||||
|
||||
let cache_drop = jspg_setup(JsonB(db_json));
|
||||
assert_eq!(
|
||||
cache_drop.0,
|
||||
json!({
|
||||
"type": "drop",
|
||||
"response": "success"
|
||||
})
|
||||
);
|
||||
|
||||
// 4. Validate Happy Path
|
||||
let happy_drop = jspg_validate("test_schema", JsonB(json!({"name": "Neo"})));
|
||||
assert_eq!(
|
||||
happy_drop.0,
|
||||
json!({
|
||||
"type": "drop",
|
||||
"response": "success"
|
||||
})
|
||||
);
|
||||
|
||||
// 5. Validate Unhappy Path
|
||||
let unhappy_drop = jspg_validate("test_schema", JsonB(json!({"wrong": "data"})));
|
||||
assert_eq!(
|
||||
unhappy_drop.0,
|
||||
json!({
|
||||
"type": "drop",
|
||||
"errors": [
|
||||
{
|
||||
"code": "REQUIRED_FIELD_MISSING",
|
||||
"message": "Missing name",
|
||||
"details": { "path": "/name" }
|
||||
},
|
||||
{
|
||||
"code": "STRICT_PROPERTY_VIOLATION",
|
||||
"message": "Unexpected property 'wrong'",
|
||||
"details": { "path": "/wrong" }
|
||||
}
|
||||
]
|
||||
})
|
||||
);
|
||||
|
||||
// 6. Clear Schemas
|
||||
let clear_drop = jspg_teardown();
|
||||
assert_eq!(
|
||||
clear_drop.0,
|
||||
json!({
|
||||
"type": "drop",
|
||||
"response": "success"
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
include!("fixtures.rs");
|
||||
113
src/tests/runner.rs
Normal file
113
src/tests/runner.rs
Normal file
@ -0,0 +1,113 @@
|
||||
use serde::Deserialize;
|
||||
use std::fs;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct TestSuite {
|
||||
#[allow(dead_code)]
|
||||
description: String,
|
||||
database: serde_json::Value,
|
||||
tests: Vec<TestCase>,
|
||||
}
|
||||
|
||||
use crate::tests::types::{ExpectBlock, TestCase};
|
||||
|
||||
use crate::validator::Validator;
|
||||
use serde_json::Value;
|
||||
|
||||
pub fn deserialize_some<'de, D>(deserializer: D) -> Result<Option<Value>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let v = Value::deserialize(deserializer)?;
|
||||
Ok(Some(v))
|
||||
}
|
||||
|
||||
pub fn run_test_file_at_index(path: &str, index: usize) -> Result<(), String> {
|
||||
let content =
|
||||
fs::read_to_string(path).unwrap_or_else(|_| panic!("Failed to read file: {}", path));
|
||||
let suite: Vec<TestSuite> = serde_json::from_str(&content)
|
||||
.unwrap_or_else(|e| panic!("Failed to parse JSON in {}: {}", path, e));
|
||||
|
||||
if index >= suite.len() {
|
||||
panic!("Index {} out of bounds for file {}", index, path);
|
||||
}
|
||||
|
||||
let group = &suite[index];
|
||||
let mut failures = Vec::<String>::new();
|
||||
|
||||
let db_json = group.database.clone();
|
||||
let db_result = crate::database::Database::new(&db_json);
|
||||
if let Err(drop) = db_result {
|
||||
let error_messages: Vec<String> = drop
|
||||
.errors
|
||||
.into_iter()
|
||||
.map(|e| format!("Error {} at path {}: {}", e.code, e.details.path, e.message))
|
||||
.collect();
|
||||
return Err(format!(
|
||||
"System Setup Compilation failed:\n{}",
|
||||
error_messages.join("\n")
|
||||
));
|
||||
}
|
||||
let db = db_result.unwrap();
|
||||
let validator = Validator::new(std::sync::Arc::new(db));
|
||||
|
||||
// 4. Run Tests
|
||||
for test in group.tests.iter() {
|
||||
// Provide fallback for legacy expectations if `expect` block was missing despite migration script
|
||||
let expected_success = test
|
||||
.expect
|
||||
.as_ref()
|
||||
.map(|e| e.success)
|
||||
.unwrap_or(test.valid.unwrap_or(false));
|
||||
let _expected_errors = test
|
||||
.expect
|
||||
.as_ref()
|
||||
.and_then(|e| e.errors.clone())
|
||||
.unwrap_or(test.expect_errors.clone().unwrap_or(vec![]));
|
||||
|
||||
match test.action.as_str() {
|
||||
"validate" => {
|
||||
let result = test.run_validate(validator.db.clone());
|
||||
if let Err(e) = result {
|
||||
println!("TEST VALIDATE ERROR FOR '{}': {}", test.description, e);
|
||||
failures.push(format!(
|
||||
"[{}] Validate Test '{}' failed. Error: {}",
|
||||
group.description, test.description, e
|
||||
));
|
||||
}
|
||||
}
|
||||
"merge" => {
|
||||
let result = test.run_merge(validator.db.clone());
|
||||
if let Err(e) = result {
|
||||
println!("TEST MERGE ERROR FOR '{}': {}", test.description, e);
|
||||
failures.push(format!(
|
||||
"[{}] Merge Test '{}' failed. Error: {}",
|
||||
group.description, test.description, e
|
||||
));
|
||||
}
|
||||
}
|
||||
"query" => {
|
||||
let result = test.run_query(validator.db.clone());
|
||||
if let Err(e) = result {
|
||||
println!("TEST QUERY ERROR FOR '{}': {}", test.description, e);
|
||||
failures.push(format!(
|
||||
"[{}] Query Test '{}' failed. Error: {}",
|
||||
group.description, test.description, e
|
||||
));
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
failures.push(format!(
|
||||
"[{}] Unknown action '{}' for test '{}'",
|
||||
group.description, test.action, test.description
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !failures.is_empty() {
|
||||
return Err(failures.join("\n"));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
167
src/tests/types/case.rs
Normal file
167
src/tests/types/case.rs
Normal file
@ -0,0 +1,167 @@
|
||||
use super::expect::ExpectBlock;
|
||||
use crate::database::Database;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TestCase {
|
||||
pub description: String,
|
||||
|
||||
#[serde(default = "default_action")]
|
||||
pub action: String, // "validate", "merge", or "query"
|
||||
|
||||
// For Validate & Query
|
||||
#[serde(default)]
|
||||
pub schema_id: String,
|
||||
|
||||
// For Query
|
||||
#[serde(default)]
|
||||
pub stem: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub filters: Option<serde_json::Value>,
|
||||
|
||||
// For Merge & Validate
|
||||
#[serde(default)]
|
||||
pub data: Option<serde_json::Value>,
|
||||
|
||||
// For Merge & Query mocks
|
||||
#[serde(default)]
|
||||
pub mocks: Option<serde_json::Value>,
|
||||
|
||||
pub expect: Option<ExpectBlock>,
|
||||
|
||||
// Legacy support for older tests to avoid migrating them all instantly
|
||||
pub valid: Option<bool>,
|
||||
pub expect_errors: Option<Vec<serde_json::Value>>,
|
||||
}
|
||||
|
||||
fn default_action() -> String {
|
||||
"validate".to_string()
|
||||
}
|
||||
|
||||
impl TestCase {
|
||||
pub fn execute(&self, db: Arc<Database>) -> Result<(), String> {
|
||||
match self.action.as_str() {
|
||||
"validate" => self.run_validate(db),
|
||||
"merge" => self.run_merge(db),
|
||||
"query" => self.run_query(db),
|
||||
_ => Err(format!(
|
||||
"Unknown action '{}' for test '{}'",
|
||||
self.action, self.description
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_validate(&self, db: Arc<Database>) -> Result<(), String> {
|
||||
use crate::validator::Validator;
|
||||
|
||||
let validator = Validator::new(db);
|
||||
|
||||
let expected_success = self
|
||||
.expect
|
||||
.as_ref()
|
||||
.map(|e| e.success)
|
||||
.unwrap_or(self.valid.unwrap_or(false));
|
||||
|
||||
// _expected_errors is preserved for future diffing if needed
|
||||
let _expected_errors = self
|
||||
.expect
|
||||
.as_ref()
|
||||
.and_then(|e| e.errors.clone())
|
||||
.unwrap_or(self.expect_errors.clone().unwrap_or(vec![]));
|
||||
|
||||
let schema_id = &self.schema_id;
|
||||
if !validator.db.schemas.contains_key(schema_id) {
|
||||
return Err(format!(
|
||||
"Missing Schema: Cannot find schema ID '{}'",
|
||||
schema_id
|
||||
));
|
||||
}
|
||||
|
||||
let test_data = self.data.clone().unwrap_or(Value::Null);
|
||||
let result = validator.validate(schema_id, &test_data);
|
||||
|
||||
let got_valid = result.errors.is_empty();
|
||||
|
||||
if got_valid != expected_success {
|
||||
let error_msg = if result.errors.is_empty() {
|
||||
"None".to_string()
|
||||
} else {
|
||||
format!("{:?}", result.errors)
|
||||
};
|
||||
|
||||
return Err(format!(
|
||||
"Expected: {}, Got: {}. Errors: {}",
|
||||
expected_success, got_valid, error_msg
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_merge(&self, db: Arc<Database>) -> Result<(), String> {
|
||||
use crate::merger::Merger;
|
||||
let merger = Merger::new(db.clone());
|
||||
|
||||
let test_data = self.data.clone().unwrap_or(Value::Null);
|
||||
let result = merger.merge(test_data);
|
||||
|
||||
let expected_success = self.expect.as_ref().map(|e| e.success).unwrap_or(false);
|
||||
let got_success = result.errors.is_empty();
|
||||
|
||||
let error_msg = if result.errors.is_empty() {
|
||||
"None".to_string()
|
||||
} else {
|
||||
format!("{:?}", result.errors)
|
||||
};
|
||||
|
||||
let return_val = if expected_success != got_success {
|
||||
Err(format!(
|
||||
"Merge Expected: {}, Got: {}. Errors: {}",
|
||||
expected_success, got_success, error_msg
|
||||
))
|
||||
} else if let Some(expect) = &self.expect {
|
||||
let queries = db.executor.get_queries();
|
||||
expect.assert_sql(&queries)
|
||||
} else {
|
||||
Ok(())
|
||||
};
|
||||
|
||||
db.executor.reset_mocks();
|
||||
return_val
|
||||
}
|
||||
|
||||
pub fn run_query(&self, db: Arc<Database>) -> Result<(), String> {
|
||||
use crate::queryer::Queryer;
|
||||
let queryer = Queryer::new(db.clone());
|
||||
|
||||
let stem_opt = self.stem.as_deref();
|
||||
let result = queryer.query(&self.schema_id, stem_opt, self.filters.as_ref());
|
||||
|
||||
let expected_success = self.expect.as_ref().map(|e| e.success).unwrap_or(false);
|
||||
let got_success = result.errors.is_empty();
|
||||
|
||||
let error_msg = if result.errors.is_empty() {
|
||||
"None".to_string()
|
||||
} else {
|
||||
format!("{:?}", result.errors)
|
||||
};
|
||||
|
||||
let return_val = if expected_success != got_success {
|
||||
Err(format!(
|
||||
"Query Expected: {}, Got: {}. Errors: {}",
|
||||
expected_success, got_success, error_msg
|
||||
))
|
||||
} else if let Some(expect) = &self.expect {
|
||||
let queries = db.executor.get_queries();
|
||||
expect.assert_sql(&queries)
|
||||
} else {
|
||||
Ok(())
|
||||
};
|
||||
|
||||
db.executor.reset_mocks();
|
||||
return_val
|
||||
}
|
||||
}
|
||||
122
src/tests/types/expect.rs
Normal file
122
src/tests/types/expect.rs
Normal file
@ -0,0 +1,122 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ExpectBlock {
|
||||
pub success: bool,
|
||||
pub result: Option<serde_json::Value>,
|
||||
pub errors: Option<Vec<serde_json::Value>>,
|
||||
#[serde(default)]
|
||||
pub sql: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl ExpectBlock {
|
||||
/// Advanced SQL execution assertion algorithm ported from `assert.go`.
|
||||
/// This compares two arrays of strings, one containing {{uuid:name}} or {{timestamp}} placeholders,
|
||||
/// and the other containing actual executed database queries. It ensures that placeholder UUIDs
|
||||
/// are consistently mapped to the same actual UUIDs across all lines, and strictly validates line-by-line sequences.
|
||||
pub fn assert_sql(&self, actual: &[String]) -> Result<(), String> {
|
||||
let patterns = match &self.sql {
|
||||
Some(s) => s,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
if patterns.len() != actual.len() {
|
||||
return Err(format!(
|
||||
"Length mismatch: expected {} SQL executions, got {}.\nActual Execution Log:\n{}",
|
||||
patterns.len(),
|
||||
actual.len(),
|
||||
actual.join("\n")
|
||||
));
|
||||
}
|
||||
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
|
||||
let types = HashMap::from([
|
||||
(
|
||||
"uuid",
|
||||
r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}",
|
||||
),
|
||||
(
|
||||
"timestamp",
|
||||
r"\d{4}-\d{2}-\d{2}(?:[ T])\d{2}:\d{2}:\d{2}(?:\.\d{1,6})?(?:Z|\+\d{2}(?::\d{2})?)?",
|
||||
),
|
||||
("integer", r"-?\d+"),
|
||||
("float", r"-?\d+\.\d+"),
|
||||
("text", r"(?:''|[^'])*"),
|
||||
("json", r"(?:''|[^'])*"),
|
||||
]);
|
||||
|
||||
let mut seen: HashMap<String, String> = HashMap::new();
|
||||
let system_uuid = "00000000-0000-0000-0000-000000000000";
|
||||
|
||||
// Placeholder regex: {{type:name}} or {{type}}
|
||||
let ph_rx = Regex::new(r"\{\{([a-z]+)(?:[:]([^}]+))?\}\}").unwrap();
|
||||
|
||||
for (i, pattern_str) in patterns.iter().enumerate() {
|
||||
let aline = &actual[i];
|
||||
let mut pp = regex::escape(pattern_str);
|
||||
pp = pp.replace(r"\{\{", "{{").replace(r"\}\}", "}}");
|
||||
|
||||
let mut cap_names = HashMap::new(); // cg_X -> var_name
|
||||
let mut group_idx = 0;
|
||||
|
||||
let mut final_rx_str = String::new();
|
||||
let mut last_match = 0;
|
||||
|
||||
let pp_clone = pp.clone();
|
||||
for caps in ph_rx.captures_iter(&pp_clone) {
|
||||
let full_match = caps.get(0).unwrap();
|
||||
final_rx_str.push_str(&pp[last_match..full_match.start()]);
|
||||
|
||||
let type_name = caps.get(1).unwrap().as_str();
|
||||
let var_name = caps.get(2).map(|m| m.as_str());
|
||||
|
||||
if let Some(name) = var_name {
|
||||
if let Some(val) = seen.get(name) {
|
||||
final_rx_str.push_str(®ex::escape(val));
|
||||
} else {
|
||||
let type_pattern = types.get(type_name).unwrap_or(&".*?");
|
||||
let cg_name = format!("cg_{}", group_idx);
|
||||
final_rx_str.push_str(&format!("(?P<{}>{})", cg_name, type_pattern));
|
||||
cap_names.insert(cg_name, name.to_string());
|
||||
group_idx += 1;
|
||||
}
|
||||
} else {
|
||||
let type_pattern = types.get(type_name).unwrap_or(&".*?");
|
||||
final_rx_str.push_str(&format!("(?:{})", type_pattern));
|
||||
}
|
||||
|
||||
last_match = full_match.end();
|
||||
}
|
||||
final_rx_str.push_str(&pp[last_match..]);
|
||||
|
||||
let final_rx = match Regex::new(&format!("^{}$", final_rx_str)) {
|
||||
Ok(r) => r,
|
||||
Err(e) => return Err(format!("Bad constructed regex: {} -> {}", final_rx_str, e)),
|
||||
};
|
||||
|
||||
if let Some(captures) = final_rx.captures(aline) {
|
||||
for (cg_name, var_name) in cap_names {
|
||||
if let Some(m) = captures.name(&cg_name) {
|
||||
let matched_str = m.as_str();
|
||||
if matched_str != system_uuid {
|
||||
seen.insert(var_name, matched_str.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return Err(format!(
|
||||
"Line mismatched at execution sequence {}.\nExpected Pattern: {}\nActual SQL: {}\nRegex used: {}\nVariables Mapped: {:?}",
|
||||
i + 1,
|
||||
pattern_str,
|
||||
aline,
|
||||
final_rx_str,
|
||||
seen
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
7
src/tests/types/mod.rs
Normal file
7
src/tests/types/mod.rs
Normal file
@ -0,0 +1,7 @@
|
||||
pub mod case;
|
||||
pub mod expect;
|
||||
pub mod suite;
|
||||
|
||||
pub use case::TestCase;
|
||||
pub use expect::ExpectBlock;
|
||||
pub use suite::TestSuite;
|
||||
10
src/tests/types/suite.rs
Normal file
10
src/tests/types/suite.rs
Normal file
@ -0,0 +1,10 @@
|
||||
use super::case::TestCase;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TestSuite {
|
||||
#[allow(dead_code)]
|
||||
pub description: String,
|
||||
pub database: serde_json::Value,
|
||||
pub tests: Vec<TestCase>,
|
||||
}
|
||||
Reference in New Issue
Block a user