Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 146efaa2d9 | |||
| d0294eec3f | |||
| 02ab4b6438 | |||
| 2a8b991269 | |||
| ce9c9baac9 | |||
| 3034406706 |
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -1663,6 +1663,7 @@ version = "1.0.149"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"itoa",
|
||||
"memchr",
|
||||
"serde",
|
||||
|
||||
@ -6,7 +6,7 @@ edition = "2024"
|
||||
[dependencies]
|
||||
pgrx = "0.16.1"
|
||||
serde = { version = "1.0.228", features = ["derive", "rc"] }
|
||||
serde_json = "1.0.149"
|
||||
serde_json = { version = "1.0.149", features = ["preserve_order"] }
|
||||
lazy_static = "1.5.0"
|
||||
once_cell = "1.21.3"
|
||||
ahash = "0.8.12"
|
||||
@ -30,7 +30,7 @@ pgrx-tests = "0.16.1"
|
||||
|
||||
[build-dependencies]
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
serde_json = "1.0.149"
|
||||
serde_json = { version = "1.0.149", features = ["preserve_order"] }
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "lib"]
|
||||
|
||||
23
GEMINI.md
23
GEMINI.md
@ -285,3 +285,26 @@ JSPG abandons the standard `cargo pgrx test` model in favor of native OS testing
|
||||
3. **Modular Test Dispatcher**: The `src/tests/types/` module deserializes the abstract JSON test payloads into `Suite`, `Case`, and `Expect` data structures.
|
||||
* The `compile` action natively asserts the exact output shape of `jspg_stems`, allowing structural and relationship mapping logic to be tested purely through JSON without writing brute-force manual tests in Rust.
|
||||
4. **Unit Context Execution**: When `cargo test` executes, the runner iterates the JSON payloads. Because the tests run natively inside the module via `#cfg(test)`, the Rust compiler globally erases `pgrx` C-linkage, instantiates the `MockExecutor`, and allows for pure structural evaluation of complex database logic completely in memory in parallel.
|
||||
|
||||
### SQL Expectation Formatting & Auto-Variablization
|
||||
|
||||
Because JSPG SQL compilation generates large, complex relational statements (often featuring dynamically generated UUIDs or timestamps), manually updating expected SQL strings in the test fixtures is error-prone and tedious. To streamline this, JSPG includes a built-in intelligent test fixture formatter.
|
||||
|
||||
**When to use it:**
|
||||
Whenever you modify the internal SQL generation logic (in the Queryer or Merger) and need to update the expected SQL outputs across the entire test suite.
|
||||
|
||||
**How to run it:**
|
||||
Run the test suite sequentially while passing the `UPDATE_EXPECT=1` environment variable:
|
||||
```bash
|
||||
UPDATE_EXPECT=1 cargo test --test-threads=1
|
||||
```
|
||||
*Note: The `--test-threads=1` flag is strictly required to prevent parallel tests from concurrently overwriting the same JSON fixture files and corrupting them.*
|
||||
|
||||
**How it works (Intelligent Variablization):**
|
||||
The JSPG engine natively generates actual, random UUIDs in memory for records inserted during `merger` tests. To assert relational integrity without hardcoding ephemeral random strings, the formatter utilizes an intelligent variable extraction map:
|
||||
1. **Payload Extraction**: Before evaluating the SQL output, the test runner recursively scans the JSON of the `data` and `mocks` blocks for that specific test case. It maps any physical UUID it finds to its exact JSON path (e.g., `3333...` -> `mocks.0.id`).
|
||||
2. **SQL Canonicalization**: The test runner utilizes `sqlparser` to format the raw engine SQL into pristine, multi-line readable structures.
|
||||
3. **Variable Mapping**: It scans the formatted SQL using regex for UUIDs. If it encounters a UUID matching the payload extraction map, it replaces it with a template tag like `{{uuid:mocks.0.id}}` or `{{uuid:data.customer_id}}`.
|
||||
4. **Generated Fallbacks**: If it encounters a brand-new random UUID that wasn't provided in the inputs (e.g., a newly generated ID for an `INSERT`), it assigns it a sequential tracking variable like `{{uuid:generated_0}}`. Every subsequent appearance of that *exact* same random UUID in the SQL transaction will reuse the `{{uuid:generated_0}}` tag. Timestamps are naturally replaced with `{{timestamp}}`.
|
||||
|
||||
This guarantees the `assert_pattern` execution engine can strictly validate that the exact same ID generated for a parent entity is correctly passed as a foreign key to its children across complex database transactions.
|
||||
|
||||
@ -197,11 +197,11 @@
|
||||
"gender.condition": {
|
||||
"type": "condition",
|
||||
"compiledPropertyNames": [
|
||||
"kind",
|
||||
"$eq",
|
||||
"$ne",
|
||||
"$nof",
|
||||
"$of",
|
||||
"kind"
|
||||
"$nof"
|
||||
],
|
||||
"properties": {
|
||||
"$eq": {
|
||||
@ -239,29 +239,29 @@
|
||||
"person": {},
|
||||
"person.filter": {
|
||||
"compiledPropertyNames": [
|
||||
"$and",
|
||||
"$or",
|
||||
"ad_hoc",
|
||||
"first_name",
|
||||
"age",
|
||||
"billing_address",
|
||||
"birth_date",
|
||||
"first_name",
|
||||
"gender",
|
||||
"tags"
|
||||
"birth_date",
|
||||
"tags",
|
||||
"ad_hoc",
|
||||
"$and",
|
||||
"$or"
|
||||
],
|
||||
"properties": {
|
||||
"$and": {
|
||||
"items": {
|
||||
"compiledPropertyNames": [
|
||||
"$and",
|
||||
"$or",
|
||||
"ad_hoc",
|
||||
"first_name",
|
||||
"age",
|
||||
"billing_address",
|
||||
"birth_date",
|
||||
"first_name",
|
||||
"gender",
|
||||
"tags"
|
||||
"birth_date",
|
||||
"tags",
|
||||
"ad_hoc",
|
||||
"$and",
|
||||
"$or"
|
||||
],
|
||||
"type": "person.filter"
|
||||
},
|
||||
@ -273,15 +273,15 @@
|
||||
"$or": {
|
||||
"items": {
|
||||
"compiledPropertyNames": [
|
||||
"$and",
|
||||
"$or",
|
||||
"ad_hoc",
|
||||
"first_name",
|
||||
"age",
|
||||
"billing_address",
|
||||
"birth_date",
|
||||
"first_name",
|
||||
"gender",
|
||||
"tags"
|
||||
"birth_date",
|
||||
"tags",
|
||||
"ad_hoc",
|
||||
"$and",
|
||||
"$or"
|
||||
],
|
||||
"type": "person.filter"
|
||||
},
|
||||
@ -350,9 +350,9 @@
|
||||
"address.filter": {
|
||||
"type": "filter",
|
||||
"compiledPropertyNames": [
|
||||
"city",
|
||||
"$and",
|
||||
"$or",
|
||||
"city"
|
||||
"$or"
|
||||
],
|
||||
"properties": {
|
||||
"$and": {
|
||||
@ -362,9 +362,9 @@
|
||||
],
|
||||
"items": {
|
||||
"compiledPropertyNames": [
|
||||
"city",
|
||||
"$and",
|
||||
"$or",
|
||||
"city"
|
||||
"$or"
|
||||
],
|
||||
"type": "address.filter"
|
||||
}
|
||||
@ -376,9 +376,9 @@
|
||||
],
|
||||
"items": {
|
||||
"compiledPropertyNames": [
|
||||
"city",
|
||||
"$and",
|
||||
"$or",
|
||||
"city"
|
||||
"$or"
|
||||
],
|
||||
"type": "address.filter"
|
||||
}
|
||||
@ -400,11 +400,11 @@
|
||||
"search.filter": {
|
||||
"type": "filter",
|
||||
"compiledPropertyNames": [
|
||||
"$and",
|
||||
"$or",
|
||||
"filter",
|
||||
"kind",
|
||||
"name"
|
||||
"name",
|
||||
"filter",
|
||||
"$and",
|
||||
"$or"
|
||||
],
|
||||
"properties": {
|
||||
"$and": {
|
||||
@ -414,11 +414,11 @@
|
||||
],
|
||||
"items": {
|
||||
"compiledPropertyNames": [
|
||||
"$and",
|
||||
"$or",
|
||||
"filter",
|
||||
"kind",
|
||||
"name"
|
||||
"name",
|
||||
"filter",
|
||||
"$and",
|
||||
"$or"
|
||||
],
|
||||
"type": "search.filter"
|
||||
}
|
||||
@ -430,11 +430,11 @@
|
||||
],
|
||||
"items": {
|
||||
"compiledPropertyNames": [
|
||||
"$and",
|
||||
"$or",
|
||||
"filter",
|
||||
"kind",
|
||||
"name"
|
||||
"name",
|
||||
"filter",
|
||||
"$and",
|
||||
"$or"
|
||||
],
|
||||
"type": "search.filter"
|
||||
}
|
||||
|
||||
1891
fixtures/merger.json
1891
fixtures/merger.json
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,12 +1,12 @@
|
||||
use crate::database::object::{SchemaObject, SchemaTypeOrArray};
|
||||
use crate::database::schema::Schema;
|
||||
use crate::database::r#enum::Enum;
|
||||
use std::collections::BTreeMap;
|
||||
use indexmap::IndexMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
impl Enum {
|
||||
pub fn compile_condition(&self) -> Schema {
|
||||
let mut props = BTreeMap::new();
|
||||
let mut props = IndexMap::new();
|
||||
let enum_name = &self.name;
|
||||
|
||||
let mut eq_obj = SchemaObject::default();
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
use crate::database::schema::Schema;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
impl Schema {
|
||||
/// Dynamically infers and compiles all structural database relationships between this Schema
|
||||
@ -10,10 +11,10 @@ impl Schema {
|
||||
db: &crate::database::Database,
|
||||
root_id: &str,
|
||||
path: &str,
|
||||
props: &std::collections::BTreeMap<String, std::sync::Arc<Schema>>,
|
||||
props: &IndexMap<String, std::sync::Arc<Schema>>,
|
||||
errors: &mut Vec<crate::drop::Error>,
|
||||
) -> std::collections::BTreeMap<String, crate::database::edge::Edge> {
|
||||
let mut schema_edges = std::collections::BTreeMap::new();
|
||||
) -> IndexMap<String, crate::database::edge::Edge> {
|
||||
let mut schema_edges = IndexMap::new();
|
||||
|
||||
// Determine the physical Database Table Name this schema structurally represents
|
||||
// Plucks the polymorphic discriminator via dot-notation (e.g. extracting "person" from "full.person")
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use crate::database::Database;
|
||||
use crate::database::object::{SchemaObject, SchemaTypeOrArray};
|
||||
use crate::database::schema::Schema;
|
||||
use std::collections::BTreeMap;
|
||||
use indexmap::IndexMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
impl Schema {
|
||||
@ -12,7 +12,7 @@ impl Schema {
|
||||
_errors: &mut Vec<crate::drop::Error>,
|
||||
) -> Option<Schema> {
|
||||
if let Some(props) = self.obj.compiled_properties.get() {
|
||||
let mut filter_props = BTreeMap::new();
|
||||
let mut filter_props = IndexMap::new();
|
||||
for (key, child) in props {
|
||||
let mut structural_filter = None;
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@ pub mod filter;
|
||||
pub mod polymorphism;
|
||||
|
||||
use crate::database::schema::Schema;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
impl Schema {
|
||||
pub fn compile(
|
||||
@ -48,7 +49,7 @@ impl Schema {
|
||||
}
|
||||
}
|
||||
|
||||
let mut props = std::collections::BTreeMap::new();
|
||||
let mut props = IndexMap::new();
|
||||
|
||||
// 1. Resolve INHERITANCE dependencies first
|
||||
if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &self.obj.type_ {
|
||||
@ -124,8 +125,7 @@ impl Schema {
|
||||
|
||||
// 4. Set the OnceLock!
|
||||
let _ = self.obj.compiled_properties.set(props.clone());
|
||||
let mut names: Vec<String> = props.keys().cloned().collect();
|
||||
names.sort();
|
||||
let names: Vec<String> = props.keys().cloned().collect();
|
||||
let _ = self.obj.compiled_property_names.set(names);
|
||||
|
||||
// 5. Compute Edges natively
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
use indexmap::IndexSet;
|
||||
use crate::database::schema::Schema;
|
||||
|
||||
impl Schema {
|
||||
@ -8,7 +9,7 @@ impl Schema {
|
||||
path: &str,
|
||||
errors: &mut Vec<crate::drop::Error>,
|
||||
) {
|
||||
let mut options = std::collections::BTreeMap::new();
|
||||
let mut options = indexmap::IndexMap::new();
|
||||
let strategy: &str;
|
||||
|
||||
if let Some(family) = &self.obj.family {
|
||||
@ -65,10 +66,10 @@ impl Schema {
|
||||
}
|
||||
}
|
||||
} else if let Some(one_of) = &self.obj.one_of {
|
||||
let mut type_vals = std::collections::HashSet::new();
|
||||
let mut kind_vals = std::collections::HashSet::new();
|
||||
let mut type_vals = IndexSet::new();
|
||||
let mut kind_vals = IndexSet::new();
|
||||
let mut disjoint_base = true;
|
||||
let mut structural_types = std::collections::HashSet::new();
|
||||
let mut structural_types = IndexSet::new();
|
||||
|
||||
for c in one_of {
|
||||
let mut child_id = String::new();
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
use crate::database::schema::Schema;
|
||||
use indexmap::IndexMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
|
||||
@ -10,5 +11,5 @@ pub struct Enum {
|
||||
pub source: String,
|
||||
pub values: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub schemas: std::collections::BTreeMap<String, Arc<Schema>>,
|
||||
pub schemas: IndexMap<String, Arc<Schema>>,
|
||||
}
|
||||
|
||||
@ -23,18 +23,18 @@ use punc::Punc;
|
||||
use relation::Relation;
|
||||
use schema::Schema;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use indexmap::IndexMap;
|
||||
use std::sync::Arc;
|
||||
use r#type::Type;
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct Database {
|
||||
pub enums: HashMap<String, Enum>,
|
||||
pub types: HashMap<String, Type>,
|
||||
pub puncs: HashMap<String, Punc>,
|
||||
pub relations: HashMap<String, Relation>,
|
||||
pub enums: IndexMap<String, Enum>,
|
||||
pub types: IndexMap<String, Type>,
|
||||
pub puncs: IndexMap<String, Punc>,
|
||||
pub relations: IndexMap<String, Relation>,
|
||||
#[serde(skip)]
|
||||
pub schemas: HashMap<String, Arc<Schema>>,
|
||||
pub schemas: IndexMap<String, Arc<Schema>>,
|
||||
#[serde(skip)]
|
||||
pub executor: Box<dyn DatabaseExecutor + Send + Sync>,
|
||||
}
|
||||
@ -42,11 +42,11 @@ pub struct Database {
|
||||
impl Database {
|
||||
pub fn new(val: &serde_json::Value) -> (Self, crate::drop::Drop) {
|
||||
let mut db = Self {
|
||||
enums: HashMap::new(),
|
||||
types: HashMap::new(),
|
||||
relations: HashMap::new(),
|
||||
puncs: HashMap::new(),
|
||||
schemas: HashMap::new(),
|
||||
enums: IndexMap::new(),
|
||||
types: IndexMap::new(),
|
||||
relations: IndexMap::new(),
|
||||
puncs: IndexMap::new(),
|
||||
schemas: IndexMap::new(),
|
||||
#[cfg(not(test))]
|
||||
executor: Box::new(SpiExecutor::new()),
|
||||
#[cfg(test)]
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use crate::database::schema::Schema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use indexmap::IndexMap;
|
||||
use std::sync::Arc;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
@ -30,10 +30,10 @@ pub struct SchemaObject {
|
||||
|
||||
// Object Keywords
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub properties: Option<BTreeMap<String, Arc<Schema>>>,
|
||||
pub properties: Option<IndexMap<String, Arc<Schema>>>,
|
||||
#[serde(rename = "patternProperties")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern_properties: Option<BTreeMap<String, Arc<Schema>>>,
|
||||
pub pattern_properties: Option<IndexMap<String, Arc<Schema>>>,
|
||||
#[serde(rename = "additionalProperties")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub additional_properties: Option<Arc<Schema>>,
|
||||
@ -46,7 +46,7 @@ pub struct SchemaObject {
|
||||
|
||||
// dependencies can be schema dependencies or property dependencies
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub dependencies: Option<BTreeMap<String, Dependency>>,
|
||||
pub dependencies: Option<IndexMap<String, Dependency>>,
|
||||
|
||||
// Array Keywords
|
||||
#[serde(rename = "items")]
|
||||
@ -147,7 +147,7 @@ pub struct SchemaObject {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub control: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub actions: Option<BTreeMap<String, Action>>,
|
||||
pub actions: Option<IndexMap<String, Action>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub computer: Option<String>,
|
||||
#[serde(default)]
|
||||
@ -164,7 +164,7 @@ pub struct SchemaObject {
|
||||
|
||||
// Internal structural representation caching active AST Node maps. Unlike the Go framework counterpart, the JSPG implementation DOES natively include ALL ancestral inheritance boundary schemas because it compiles locally against the raw database graph.
|
||||
#[serde(skip)]
|
||||
pub compiled_properties: OnceLock<BTreeMap<String, Arc<Schema>>>,
|
||||
pub compiled_properties: OnceLock<IndexMap<String, Arc<Schema>>>,
|
||||
|
||||
#[serde(rename = "compiledDiscriminator")]
|
||||
#[serde(skip_deserializing)]
|
||||
@ -176,13 +176,13 @@ pub struct SchemaObject {
|
||||
#[serde(skip_deserializing)]
|
||||
#[serde(skip_serializing_if = "crate::database::object::is_once_lock_map_empty")]
|
||||
#[serde(serialize_with = "crate::database::object::serialize_once_lock")]
|
||||
pub compiled_options: OnceLock<BTreeMap<String, (Option<usize>, Option<String>)>>,
|
||||
pub compiled_options: OnceLock<IndexMap<String, (Option<usize>, Option<String>)>>,
|
||||
|
||||
#[serde(rename = "compiledEdges")]
|
||||
#[serde(skip_deserializing)]
|
||||
#[serde(skip_serializing_if = "crate::database::object::is_once_lock_map_empty")]
|
||||
#[serde(serialize_with = "crate::database::object::serialize_once_lock")]
|
||||
pub compiled_edges: OnceLock<BTreeMap<String, crate::database::edge::Edge>>,
|
||||
pub compiled_edges: OnceLock<IndexMap<String, crate::database::edge::Edge>>,
|
||||
|
||||
#[serde(skip)]
|
||||
pub compiled_format: OnceLock<CompiledFormat>,
|
||||
@ -245,7 +245,7 @@ pub fn serialize_once_lock<T: serde::Serialize, S: serde::Serializer>(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_once_lock_map_empty<K, V>(lock: &OnceLock<std::collections::BTreeMap<K, V>>) -> bool {
|
||||
pub fn is_once_lock_map_empty<K, V>(lock: &OnceLock<indexmap::IndexMap<K, V>>) -> bool {
|
||||
lock.get().map_or(true, |m| m.is_empty())
|
||||
}
|
||||
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use crate::database::page::Page;
|
||||
use crate::database::schema::Schema;
|
||||
use indexmap::IndexMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
|
||||
@ -18,5 +19,5 @@ pub struct Punc {
|
||||
pub save: Option<String>,
|
||||
pub page: Option<Page>,
|
||||
#[serde(default)]
|
||||
pub schemas: std::collections::BTreeMap<String, Arc<Schema>>,
|
||||
pub schemas: IndexMap<String, Arc<Schema>>,
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use std::collections::HashSet;
|
||||
use indexmap::{IndexMap, IndexSet};
|
||||
|
||||
use crate::database::schema::Schema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -25,7 +25,7 @@ pub struct Type {
|
||||
#[serde(default)]
|
||||
pub hierarchy: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub variations: HashSet<String>,
|
||||
pub variations: IndexSet<String>,
|
||||
#[serde(default)]
|
||||
pub relationship: bool,
|
||||
#[serde(default)]
|
||||
@ -39,5 +39,5 @@ pub struct Type {
|
||||
pub default_fields: Vec<String>,
|
||||
pub field_types: Option<Value>,
|
||||
#[serde(default)]
|
||||
pub schemas: std::collections::BTreeMap<String, Arc<Schema>>,
|
||||
pub schemas: IndexMap<String, Arc<Schema>>,
|
||||
}
|
||||
|
||||
11
src/lib.rs
11
src/lib.rs
@ -7,6 +7,9 @@ pg_module_magic!();
|
||||
#[cfg(test)]
|
||||
pub struct JsonB(pub serde_json::Value);
|
||||
|
||||
#[cfg(test)]
|
||||
pub struct Json(pub serde_json::Value);
|
||||
|
||||
pub mod database;
|
||||
pub mod drop;
|
||||
pub mod jspg;
|
||||
@ -41,7 +44,7 @@ fn jspg_failure() -> JsonB {
|
||||
}
|
||||
|
||||
#[cfg_attr(not(test), pg_extern(strict))]
|
||||
pub fn jspg_setup(database: JsonB) -> JsonB {
|
||||
pub fn jspg_setup(database: Json) -> JsonB {
|
||||
let (new_jspg, drop) = crate::jspg::Jspg::new(&database.0);
|
||||
let new_arc = Arc::new(new_jspg);
|
||||
|
||||
@ -109,7 +112,7 @@ pub fn jspg_validate(schema_id: &str, instance: JsonB) -> JsonB {
|
||||
}
|
||||
|
||||
#[cfg_attr(not(test), pg_extern)]
|
||||
pub fn jspg_database() -> JsonB {
|
||||
pub fn jspg_database() -> Json {
|
||||
let engine_opt = {
|
||||
let lock = GLOBAL_JSPG.read().unwrap();
|
||||
lock.clone()
|
||||
@ -120,9 +123,9 @@ pub fn jspg_database() -> JsonB {
|
||||
let database_json = serde_json::to_value(&engine.database)
|
||||
.unwrap_or(serde_json::Value::Object(serde_json::Map::new()));
|
||||
let drop = crate::drop::Drop::success_with_val(database_json);
|
||||
JsonB(serde_json::to_value(drop).unwrap())
|
||||
Json(serde_json::to_value(drop).unwrap())
|
||||
}
|
||||
None => jspg_failure(),
|
||||
None => Json(jspg_failure().0),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
use crate::database::Database;
|
||||
use indexmap::IndexMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct Compiler<'a> {
|
||||
@ -256,7 +257,7 @@ impl<'a> Compiler<'a> {
|
||||
|
||||
fn compile_object(
|
||||
&mut self,
|
||||
props: &std::collections::BTreeMap<String, std::sync::Arc<crate::database::schema::Schema>>,
|
||||
props: &IndexMap<String, std::sync::Arc<crate::database::schema::Schema>>,
|
||||
node: Node<'a>,
|
||||
) -> Result<(String, String), String> {
|
||||
let mut build_args = Vec::new();
|
||||
@ -377,10 +378,7 @@ impl<'a> Compiler<'a> {
|
||||
return Ok(("NULL".to_string(), "string".to_string()));
|
||||
}
|
||||
|
||||
case_statements.sort();
|
||||
|
||||
let sql = format!("CASE {} ELSE NULL END", case_statements.join(" "));
|
||||
|
||||
Ok((sql, "object".to_string()))
|
||||
}
|
||||
|
||||
@ -417,7 +415,7 @@ impl<'a> Compiler<'a> {
|
||||
) -> Result<Vec<String>, String> {
|
||||
let mut select_args = Vec::new();
|
||||
let grouped_fields = r#type.grouped_fields.as_ref().and_then(|v| v.as_object());
|
||||
let default_props = std::collections::BTreeMap::new();
|
||||
let default_props = IndexMap::new();
|
||||
let merged_props = node
|
||||
.schema
|
||||
.obj
|
||||
|
||||
393
src/tests/formatter.rs
Normal file
393
src/tests/formatter.rs
Normal file
@ -0,0 +1,393 @@
|
||||
use sqlparser::ast::{
|
||||
BinaryOperator, Expr, Function, FunctionArg, Join, JoinConstraint, JoinOperator,
|
||||
Query, Select, SelectItem, SetExpr, Statement, TableWithJoins, Value
|
||||
};
|
||||
use sqlparser::dialect::PostgreSqlDialect;
|
||||
use sqlparser::parser::Parser;
|
||||
|
||||
pub struct SqlFormatter {
|
||||
pub lines: Vec<String>,
|
||||
pub indent: usize,
|
||||
}
|
||||
|
||||
impl SqlFormatter {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
lines: Vec::new(),
|
||||
indent: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format(sql: &str) -> Vec<String> {
|
||||
let dialect = PostgreSqlDialect {};
|
||||
let ast = match Parser::parse_sql(&dialect, sql) {
|
||||
Ok(ast) => ast,
|
||||
Err(e) => {
|
||||
println!("DEBUG PARSE SQL ERROR: {:?}", e);
|
||||
return vec![sql.to_string()];
|
||||
}
|
||||
};
|
||||
|
||||
if ast.is_empty() {
|
||||
return vec![sql.to_string()];
|
||||
}
|
||||
|
||||
let mut formatter = SqlFormatter::new();
|
||||
formatter.format_statement(&ast[0]);
|
||||
formatter.lines
|
||||
}
|
||||
|
||||
fn push_str(&mut self, s: &str) {
|
||||
if self.lines.is_empty() {
|
||||
self.lines.push(format!("{}{}", " ".repeat(self.indent), s.replace("JSONB", "jsonb")));
|
||||
} else {
|
||||
let last = self.lines.last_mut().unwrap();
|
||||
last.push_str(&s.replace("JSONB", "jsonb"));
|
||||
}
|
||||
}
|
||||
|
||||
fn push_line(&mut self, s: &str) {
|
||||
self.lines.push(format!("{}{}", " ".repeat(self.indent), s.replace("JSONB", "jsonb")));
|
||||
}
|
||||
|
||||
fn format_statement(&mut self, stmt: &Statement) {
|
||||
match stmt {
|
||||
Statement::Query(query) => {
|
||||
self.push_line("(");
|
||||
self.format_query(query);
|
||||
self.push_str(")");
|
||||
}
|
||||
Statement::Update(_update) => {
|
||||
let sql = stmt.to_string();
|
||||
self.format_update_fallback(&sql);
|
||||
}
|
||||
_ => {
|
||||
let sql = stmt.to_string();
|
||||
if sql.starts_with("INSERT") {
|
||||
self.format_insert_fallback(&sql);
|
||||
} else {
|
||||
self.push_line(&sql);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn format_insert_fallback(&mut self, sql: &str) {
|
||||
let s = sql.to_string();
|
||||
if let Some(values_idx) = s.find(" VALUES (") {
|
||||
let prefix = &s[..values_idx];
|
||||
let suffix = &s[values_idx + 9..];
|
||||
|
||||
if let Some(paren_idx) = prefix.find(" (") {
|
||||
self.push_line(&format!("{} (", &prefix[..paren_idx]));
|
||||
self.indent += 2;
|
||||
let cols = &prefix[paren_idx + 2..prefix.len() - 1];
|
||||
let cols_split: Vec<&str> = cols.split(", ").collect();
|
||||
for (i, col) in cols_split.iter().enumerate() {
|
||||
let comma = if i < cols_split.len() - 1 { "," } else { "" };
|
||||
let c = col.replace("\"", "");
|
||||
self.push_line(&format!("\"{}\"{}", c, comma));
|
||||
}
|
||||
self.indent -= 2;
|
||||
self.push_line(")");
|
||||
} else {
|
||||
self.push_line(prefix);
|
||||
}
|
||||
|
||||
self.push_line("VALUES (");
|
||||
self.indent += 2;
|
||||
|
||||
let vals = if suffix.ends_with(")") { &suffix[..suffix.len() - 1] } else { suffix };
|
||||
let mut val_tokens = Vec::new();
|
||||
let mut curr = String::new();
|
||||
let mut in_str = false;
|
||||
for c in vals.chars() {
|
||||
if c == '\'' {
|
||||
in_str = !in_str;
|
||||
curr.push(c);
|
||||
} else if c == ',' && !in_str {
|
||||
val_tokens.push(curr.trim().to_string());
|
||||
curr = String::new();
|
||||
} else {
|
||||
curr.push(c);
|
||||
}
|
||||
}
|
||||
if !curr.trim().is_empty() {
|
||||
val_tokens.push(curr.trim().to_string());
|
||||
}
|
||||
|
||||
for (i, val) in val_tokens.iter().enumerate() {
|
||||
let comma = if i < val_tokens.len() - 1 { "," } else { "" };
|
||||
|
||||
if val.starts_with("'{") && val.ends_with("}'") {
|
||||
let inner = &val[1..val.len() - 1];
|
||||
// Unescape single quotes from SQL strings
|
||||
let unescaped = inner.replace("''", "'");
|
||||
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&unescaped) {
|
||||
if let Ok(pretty) = serde_json::to_string_pretty(&json) {
|
||||
let lines: Vec<&str> = pretty.split('\n').collect();
|
||||
self.push_line("'{");
|
||||
self.indent += 2;
|
||||
for (j, line) in lines.iter().skip(1).enumerate() {
|
||||
if j == lines.len() - 2 {
|
||||
self.indent -= 2;
|
||||
// re-escape single quotes for SQL
|
||||
self.push_line(&format!("{}'{}", line.replace("'", "''"), comma));
|
||||
} else {
|
||||
self.push_line(&line.replace("'", "''"));
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.push_line(&format!("{}{}", val, comma));
|
||||
}
|
||||
self.indent -= 2;
|
||||
self.push_line(")");
|
||||
} else {
|
||||
self.push_line(&s);
|
||||
}
|
||||
}
|
||||
|
||||
fn format_update_fallback(&mut self, sql: &str) {
|
||||
let s = sql.to_string();
|
||||
if let Some(set_idx) = s.find(" SET ") {
|
||||
self.push_line(&format!("{} SET", &s[..set_idx]));
|
||||
self.indent += 2;
|
||||
|
||||
let after_set = &s[set_idx + 5..];
|
||||
let where_idx = after_set.find(" WHERE ");
|
||||
let assigns = if let Some(w) = where_idx { &after_set[..w] } else { after_set };
|
||||
let assigns_split: Vec<&str> = assigns.split(", ").collect();
|
||||
for (i, assign) in assigns_split.iter().enumerate() {
|
||||
let comma = if i < assigns_split.len() - 1 { "," } else { "" };
|
||||
self.push_line(&format!("{}{}", assign.replace("\"", ""), comma));
|
||||
}
|
||||
self.indent -= 2;
|
||||
|
||||
if let Some(w) = where_idx {
|
||||
self.push_line("WHERE");
|
||||
self.indent += 2;
|
||||
self.push_line(&after_set[w + 7..]);
|
||||
self.indent -= 2;
|
||||
}
|
||||
} else {
|
||||
self.push_line(&s);
|
||||
}
|
||||
}
|
||||
|
||||
fn format_query(&mut self, query: &Query) {
|
||||
match &*query.body {
|
||||
SetExpr::Select(select) => self.format_select(select),
|
||||
SetExpr::Query(inner_query) => {
|
||||
self.push_str("(");
|
||||
self.format_query(inner_query);
|
||||
self.push_str(")");
|
||||
}
|
||||
_ => self.push_str(&query.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn format_select(&mut self, select: &Select) {
|
||||
self.push_str("SELECT ");
|
||||
for (i, p) in select.projection.iter().enumerate() {
|
||||
let comma = if i < select.projection.len() - 1 { ", " } else { "" };
|
||||
self.format_select_item(p);
|
||||
self.push_str(comma);
|
||||
}
|
||||
|
||||
if !select.from.is_empty() {
|
||||
self.push_line("FROM ");
|
||||
for (i, table) in select.from.iter().enumerate() {
|
||||
let comma = if i < select.from.len() - 1 { ", " } else { "" };
|
||||
self.format_table_with_joins(table);
|
||||
self.push_str(comma);
|
||||
}
|
||||
|
||||
if let Some(selection) = &select.selection {
|
||||
self.push_line("WHERE");
|
||||
self.indent += 2;
|
||||
self.push_line(""); // new line for where clauses
|
||||
self.format_expr(selection);
|
||||
self.indent -= 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn format_select_item(&mut self, item: &SelectItem) {
|
||||
match item {
|
||||
SelectItem::UnnamedExpr(expr) => self.format_expr(expr),
|
||||
SelectItem::ExprWithAlias { expr, alias } => {
|
||||
self.format_expr(expr);
|
||||
self.push_str(&format!(" AS {}", alias));
|
||||
}
|
||||
_ => self.push_str(&item.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn format_table_with_joins(&mut self, table: &TableWithJoins) {
|
||||
self.push_str(&table.relation.to_string());
|
||||
for join in &table.joins {
|
||||
self.push_line("");
|
||||
self.format_join(join);
|
||||
}
|
||||
}
|
||||
|
||||
fn format_join(&mut self, join: &Join) {
|
||||
let op = match &join.join_operator {
|
||||
JoinOperator::Inner(_) => "JOIN",
|
||||
JoinOperator::LeftOuter(_) => "LEFT JOIN",
|
||||
_ => "JOIN",
|
||||
};
|
||||
self.push_str(&format!("{} {} ON ", op, join.relation));
|
||||
|
||||
match &join.join_operator {
|
||||
JoinOperator::Inner(JoinConstraint::On(expr)) => self.format_expr(expr),
|
||||
JoinOperator::LeftOuter(JoinConstraint::On(expr)) => self.format_expr(expr),
|
||||
JoinOperator::Join(JoinConstraint::On(expr)) => self.format_expr(expr),
|
||||
_ => {
|
||||
println!("FALLBACK JOIN OP: {:?}", join.join_operator);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn format_expr(&mut self, expr: &Expr) {
|
||||
match expr {
|
||||
Expr::Function(func) => self.format_function(func),
|
||||
Expr::BinaryOp { left, op, right } => {
|
||||
if *op == BinaryOperator::And || *op == BinaryOperator::Or {
|
||||
self.format_expr(left);
|
||||
self.push_line(&format!("{} ", op));
|
||||
self.format_expr(right);
|
||||
} else {
|
||||
self.format_expr(left);
|
||||
self.push_str(&format!(" {} ", op));
|
||||
self.format_expr(right);
|
||||
}
|
||||
}
|
||||
Expr::Nested(inner) => {
|
||||
self.push_str("(");
|
||||
self.format_expr(inner);
|
||||
self.push_str(")");
|
||||
}
|
||||
Expr::IsNull(inner) => {
|
||||
self.format_expr(inner);
|
||||
self.push_str(" IS NULL");
|
||||
}
|
||||
Expr::IsNotNull(inner) => {
|
||||
self.format_expr(inner);
|
||||
self.push_str(" IS NOT NULL");
|
||||
}
|
||||
Expr::Subquery(query) => {
|
||||
self.push_str("(");
|
||||
self.indent += 2;
|
||||
self.push_line("");
|
||||
self.format_query(query);
|
||||
self.indent -= 2;
|
||||
self.push_line(")");
|
||||
}
|
||||
Expr::Case { operand, conditions, else_result, .. } => {
|
||||
self.push_str("CASE");
|
||||
if let Some(op) = operand {
|
||||
self.push_str(" ");
|
||||
self.format_expr(op);
|
||||
}
|
||||
self.indent += 2;
|
||||
for when in conditions {
|
||||
self.push_line("WHEN ");
|
||||
self.format_expr(&when.condition);
|
||||
self.push_str(" THEN ");
|
||||
self.format_expr(&when.result);
|
||||
}
|
||||
if let Some(els) = else_result {
|
||||
self.push_line("ELSE ");
|
||||
self.format_expr(els);
|
||||
}
|
||||
self.indent -= 2;
|
||||
self.push_line("END");
|
||||
}
|
||||
Expr::UnaryOp { op, expr: inner } => {
|
||||
self.push_str(&format!("{} ", op));
|
||||
self.format_expr(inner);
|
||||
}
|
||||
|
||||
Expr::Value(sqlparser::ast::ValueWithSpan { value: Value::SingleQuotedString(s), .. }) | Expr::Value(sqlparser::ast::ValueWithSpan { value: Value::EscapedStringLiteral(s), .. }) => {
|
||||
if s.starts_with('{') && s.ends_with('}') {
|
||||
if let Ok(json) = serde_json::from_str::<serde_json::Value>(s) {
|
||||
if let Ok(pretty) = serde_json::to_string_pretty(&json) {
|
||||
let lines: Vec<&str> = pretty.split('\n').collect();
|
||||
self.push_str("'{");
|
||||
self.indent += 2;
|
||||
for (j, line) in lines.iter().skip(1).enumerate() {
|
||||
if j == lines.len() - 2 {
|
||||
self.indent -= 2;
|
||||
self.push_line(&format!("{}'", line.replace("'", "''")));
|
||||
} else {
|
||||
self.push_line(&line.replace("'", "''"));
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
self.push_str(&expr.to_string());
|
||||
}
|
||||
_ => {
|
||||
self.push_str(&expr.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn format_function(&mut self, func: &Function) {
|
||||
let name = func.name.to_string();
|
||||
self.push_str(&format!("{}(", name));
|
||||
|
||||
if let sqlparser::ast::FunctionArguments::List(list) = &func.args {
|
||||
if name == "jsonb_build_object" {
|
||||
self.indent += 2;
|
||||
self.push_line("");
|
||||
let mut i = 0;
|
||||
while i < list.args.len() {
|
||||
let arg_key = &list.args[i];
|
||||
let arg_val = if i + 1 < list.args.len() { Some(&list.args[i+1]) } else { None };
|
||||
|
||||
self.format_function_arg(arg_key);
|
||||
self.push_str(", ");
|
||||
if let Some(val) = arg_val {
|
||||
self.format_function_arg(val);
|
||||
}
|
||||
|
||||
if i + 2 < list.args.len() {
|
||||
self.push_str(",");
|
||||
self.push_line("");
|
||||
}
|
||||
i += 2;
|
||||
}
|
||||
self.indent -= 2;
|
||||
self.push_line(")");
|
||||
} else {
|
||||
for (i, arg) in list.args.iter().enumerate() {
|
||||
let comma = if i < list.args.len() - 1 { ", " } else { "" };
|
||||
self.format_function_arg(arg);
|
||||
self.push_str(comma);
|
||||
}
|
||||
self.push_str(")");
|
||||
}
|
||||
} else {
|
||||
self.push_str(")");
|
||||
}
|
||||
}
|
||||
|
||||
fn format_function_arg(&mut self, arg: &FunctionArg) {
|
||||
match arg {
|
||||
FunctionArg::Unnamed(sqlparser::ast::FunctionArgExpr::Expr(expr)) => self.format_expr(expr),
|
||||
_ => {
|
||||
println!("FALLBACK ARG: {:?}", arg);
|
||||
self.push_str(&arg.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
use crate::*;
|
||||
pub mod formatter;
|
||||
pub mod runner;
|
||||
pub mod types;
|
||||
use serde_json::json;
|
||||
@ -72,7 +73,7 @@ fn test_library_api() {
|
||||
]
|
||||
});
|
||||
|
||||
let cache_drop = jspg_setup(JsonB(db_json));
|
||||
let cache_drop = jspg_setup(Json(db_json));
|
||||
assert_eq!(
|
||||
cache_drop.0,
|
||||
json!({
|
||||
@ -127,7 +128,7 @@ fn test_library_api() {
|
||||
"forward": true
|
||||
}
|
||||
},
|
||||
"compiledPropertyNames": ["name", "target", "type"],
|
||||
"compiledPropertyNames": ["type", "name", "target"],
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"target": {
|
||||
@ -140,19 +141,19 @@ fn test_library_api() {
|
||||
"type": "object"
|
||||
},
|
||||
"source_schema.filter": {
|
||||
"compiledPropertyNames": ["$and", "$or", "name", "target", "type"],
|
||||
"compiledPropertyNames": ["type", "name", "target", "$and", "$or"],
|
||||
"properties": {
|
||||
"$and": {
|
||||
"type": ["array", "null"],
|
||||
"items": {
|
||||
"compiledPropertyNames": ["$and", "$or", "name", "target", "type"],
|
||||
"compiledPropertyNames": ["type", "name", "target", "$and", "$or"],
|
||||
"type": "source_schema.filter"
|
||||
}
|
||||
},
|
||||
"$or": {
|
||||
"type": ["array", "null"],
|
||||
"items": {
|
||||
"compiledPropertyNames": ["$and", "$or", "name", "target", "type"],
|
||||
"compiledPropertyNames": ["type", "name", "target", "$and", "$or"],
|
||||
"type": "source_schema.filter"
|
||||
}
|
||||
},
|
||||
@ -193,19 +194,19 @@ fn test_library_api() {
|
||||
"type": "object"
|
||||
},
|
||||
"target_schema.filter": {
|
||||
"compiledPropertyNames": ["$and", "$or", "value"],
|
||||
"compiledPropertyNames": ["value", "$and", "$or"],
|
||||
"properties": {
|
||||
"$and": {
|
||||
"type": ["array", "null"],
|
||||
"items": {
|
||||
"compiledPropertyNames": ["$and", "$or", "value"],
|
||||
"compiledPropertyNames": ["value", "$and", "$or"],
|
||||
"type": "target_schema.filter"
|
||||
}
|
||||
},
|
||||
"$or": {
|
||||
"type": ["array", "null"],
|
||||
"items": {
|
||||
"compiledPropertyNames": ["$and", "$or", "value"],
|
||||
"compiledPropertyNames": ["value", "$and", "$or"],
|
||||
"type": "target_schema.filter"
|
||||
}
|
||||
},
|
||||
|
||||
@ -127,7 +127,7 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
|
||||
}
|
||||
}
|
||||
"merge" => {
|
||||
let result = test.run_merge(db_unwrapped.unwrap());
|
||||
let result = test.run_merge(db_unwrapped.unwrap(), path, suite_idx, case_idx);
|
||||
if let Err(e) = result {
|
||||
println!("TEST MERGE ERROR FOR '{}': {}", test.description, e);
|
||||
failures.push(format!(
|
||||
@ -137,7 +137,7 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
|
||||
}
|
||||
}
|
||||
"query" => {
|
||||
let result = test.run_query(db_unwrapped.unwrap());
|
||||
let result = test.run_query(db_unwrapped.unwrap(), path, suite_idx, case_idx);
|
||||
if let Err(e) = result {
|
||||
println!("TEST QUERY ERROR FOR '{}': {}", test.description, e);
|
||||
failures.push(format!(
|
||||
@ -160,3 +160,83 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn extract_uuids(val: &Value, path: &str, map: &mut HashMap<String, String>) {
|
||||
let uuid_re = regex::Regex::new(r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$").unwrap();
|
||||
|
||||
match val {
|
||||
Value::Object(obj) => {
|
||||
for (k, v) in obj {
|
||||
let new_path = if path.is_empty() { k.clone() } else { format!("{}.{}", path, k) };
|
||||
extract_uuids(v, &new_path, map);
|
||||
}
|
||||
}
|
||||
Value::Array(arr) => {
|
||||
for (i, v) in arr.iter().enumerate() {
|
||||
let new_path = if path.is_empty() { i.to_string() } else { format!("{}.{}", path, i) };
|
||||
extract_uuids(v, &new_path, map);
|
||||
}
|
||||
}
|
||||
Value::String(s) => {
|
||||
if s != "00000000-0000-0000-0000-000000000000" && uuid_re.is_match(s) {
|
||||
map.insert(s.clone(), path.to_string());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn canonicalize_with_map(s: &str, uuid_map: &HashMap<String, String>, gen_map: &mut HashMap<String, usize>) -> String {
|
||||
let uuid_re = regex::Regex::new(r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}").unwrap();
|
||||
let s1 = uuid_re.replace_all(s, |caps: ®ex::Captures| {
|
||||
let val = &caps[0];
|
||||
if val == "00000000-0000-0000-0000-000000000000" {
|
||||
val.to_string()
|
||||
} else if let Some(path) = uuid_map.get(val) {
|
||||
format!("{{{{uuid:{}}}}}", path)
|
||||
} else {
|
||||
let next_idx = gen_map.len();
|
||||
let idx = *gen_map.entry(val.to_string()).or_insert(next_idx);
|
||||
format!("{{{{uuid:generated_{}}}}}", idx)
|
||||
}
|
||||
});
|
||||
|
||||
let ts_re = regex::Regex::new(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{1,6})?(?:Z|\+\d{2}(?::\d{2})?)?").unwrap();
|
||||
ts_re.replace_all(&s1, "{{timestamp}}").to_string()
|
||||
}
|
||||
|
||||
pub fn update_sql_fixture(path: &str, suite_idx: usize, case_idx: usize, queries: &[String]) {
|
||||
use crate::tests::formatter::SqlFormatter;
|
||||
let content = fs::read_to_string(path).unwrap();
|
||||
let mut file_data: Value = serde_json::from_str(&content).unwrap();
|
||||
|
||||
let mut uuid_map = HashMap::new();
|
||||
if let Some(test_case) = file_data.get(suite_idx).and_then(|s| s.get("tests")).and_then(|t| t.get(case_idx)) {
|
||||
if let Some(data) = test_case.get("data") {
|
||||
extract_uuids(data, "data", &mut uuid_map);
|
||||
}
|
||||
if let Some(mocks) = test_case.get("mocks") {
|
||||
extract_uuids(mocks, "mocks", &mut uuid_map);
|
||||
}
|
||||
}
|
||||
|
||||
let mut gen_map = HashMap::new();
|
||||
|
||||
let mut formatted_sql = Vec::new();
|
||||
for q in queries {
|
||||
let res = SqlFormatter::format(q);
|
||||
let mapped_res: Vec<String> = res.into_iter().map(|l| canonicalize_with_map(&l, &uuid_map, &mut gen_map)).collect();
|
||||
formatted_sql.push(mapped_res);
|
||||
}
|
||||
|
||||
if let Some(expect) = file_data[suite_idx]["tests"][case_idx].get_mut("expect") {
|
||||
if let Some(obj) = expect.as_object_mut() {
|
||||
obj.remove("pattern");
|
||||
obj.insert("sql".to_string(), serde_json::json!(formatted_sql));
|
||||
}
|
||||
}
|
||||
|
||||
// To preserve original formatting, we just use serde_json pretty output
|
||||
let formatted_json = serde_json::to_string_pretty(&file_data).unwrap();
|
||||
fs::write(path, formatted_json).unwrap();
|
||||
}
|
||||
|
||||
@ -75,7 +75,7 @@ impl Case {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_merge(&self, db: Arc<Database>) -> Result<(), String> {
|
||||
pub fn run_merge(&self, db: Arc<Database>, path: &str, suite_idx: usize, case_idx: usize) -> Result<(), String> {
|
||||
if let Some(mocks) = &self.mocks {
|
||||
if let Some(arr) = mocks.as_array() {
|
||||
db.executor.set_mocks(arr.clone());
|
||||
@ -94,7 +94,10 @@ impl Case {
|
||||
} else if result.errors.is_empty() {
|
||||
// Only assert SQL if merge succeeded
|
||||
let queries = db.executor.get_queries();
|
||||
expect.assert_pattern(&queries).and_then(|_| expect.assert_sql(&queries))
|
||||
if std::env::var("UPDATE_EXPECT").is_ok() {
|
||||
crate::tests::runner::update_sql_fixture(path, suite_idx, case_idx, &queries);
|
||||
}
|
||||
expect.assert_sql(&queries)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
@ -106,7 +109,7 @@ impl Case {
|
||||
return_val
|
||||
}
|
||||
|
||||
pub fn run_query(&self, db: Arc<Database>) -> Result<(), String> {
|
||||
pub fn run_query(&self, db: Arc<Database>, path: &str, suite_idx: usize, case_idx: usize) -> Result<(), String> {
|
||||
if let Some(mocks) = &self.mocks {
|
||||
if let Some(arr) = mocks.as_array() {
|
||||
db.executor.set_mocks(arr.clone());
|
||||
@ -123,7 +126,10 @@ impl Case {
|
||||
Err(format!("Query {}", e))
|
||||
} else if result.errors.is_empty() {
|
||||
let queries = db.executor.get_queries();
|
||||
expect.assert_pattern(&queries).and_then(|_| expect.assert_sql(&queries))
|
||||
if std::env::var("UPDATE_EXPECT").is_ok() {
|
||||
crate::tests::runner::update_sql_fixture(path, suite_idx, case_idx, &queries);
|
||||
}
|
||||
expect.assert_sql(&queries)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
use crate::validator::context::ValidationContext;
|
||||
use crate::validator::error::ValidationError;
|
||||
use crate::validator::result::ValidationResult;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
impl<'a> ValidationContext<'a> {
|
||||
pub(crate) fn validate_family(
|
||||
@ -65,7 +66,7 @@ impl<'a> ValidationContext<'a> {
|
||||
|
||||
pub(crate) fn execute_polymorph(
|
||||
&self,
|
||||
options: &std::collections::BTreeMap<String, (Option<usize>, Option<String>)>,
|
||||
options: &IndexMap<String, (Option<usize>, Option<String>)>,
|
||||
result: &mut ValidationResult,
|
||||
) -> Result<bool, ValidationError> {
|
||||
// 1. O(1) Fast-Path Router & Extractor
|
||||
|
||||
Reference in New Issue
Block a user