Compare commits
113 Commits
cfcb259eab
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 9387152859 | |||
| 4ab1b210ae | |||
| 7c8df22709 | |||
| e4286ac6a9 | |||
| 4411ac82f7 | |||
| 9b763a23c3 | |||
| ae4d83acd1 | |||
| ee8c9086ef | |||
| 8730a828c6 | |||
| 776a442374 | |||
| 5c1779651c | |||
| 6c047e326d | |||
| 7876567ae7 | |||
| 06f6a587de | |||
| 29d8dfb608 | |||
| 5b36ecf06c | |||
| 76467a6fed | |||
| 930d0513cd | |||
| cad651dbd8 | |||
| ea9ac8469c | |||
| ebcdb661fa | |||
| c893e29c59 | |||
| 7523431007 | |||
| dd98bfac9e | |||
| 2f3a1d16b7 | |||
| e86fe5cc4e | |||
| 93b0a70718 | |||
| 9c24f1af8f | |||
| f9cf1f837a | |||
| 796df7763c | |||
| 4a10833f50 | |||
| 46fc032026 | |||
| 7ec06b81cc | |||
| c4e8e0309f | |||
| eb91b65e65 | |||
| 8bf3649465 | |||
| 9fe5a34163 | |||
| f5bf21eb58 | |||
| 9dcafed406 | |||
| ffd6c27da3 | |||
| 4941dc6069 | |||
| a8a15a82ef | |||
| 8dcc714963 | |||
| f87ac81f3b | |||
| 8ca9017cc4 | |||
| 10c57e59ec | |||
| ef4571767c | |||
| 29bd25eaff | |||
| 4d9b510819 | |||
| 3c4b1066df | |||
| 4c59d9ba7f | |||
| a1038490dd | |||
| 14707330a7 | |||
| 77bc92533c | |||
| 4060119b01 | |||
| 95546fe10c | |||
| 882bdc6271 | |||
| 9bdb767685 | |||
| bdd89fe695 | |||
| 8135d80045 | |||
| 9255439d53 | |||
| 9038607729 | |||
| 9f6c27c3b8 | |||
| 75aac41362 | |||
| dbcef42401 | |||
| b6c5561d2f | |||
| e01b778d68 | |||
| 6eb134c0d6 | |||
| 7ccc4b7cce | |||
| 77bfa4cd18 | |||
| b47a5abd26 | |||
| fcd8310ed8 | |||
| 31519e8447 | |||
| 847e921b1c | |||
| e19e1921e5 | |||
| 94d011e729 | |||
| 263cf04ffb | |||
| 00375c2926 | |||
| 885b9b5e44 | |||
| 298645ffdb | |||
| 330280ba48 | |||
| 02e661d219 | |||
| f7163e2689 | |||
| 091007006d | |||
| 3d66a7fc3c | |||
| e1314496dd | |||
| 70a27b430d | |||
| e078b8a74b | |||
| c2c0e62c2d | |||
| ebb97b3509 | |||
| 5d18847f32 | |||
| 4a33e29628 | |||
| d8fc286e94 | |||
| 507dc6d780 | |||
| e340039a30 | |||
| 08768e3d42 | |||
| 6c9e6575ce | |||
| 5d11c4c92c | |||
| 25239d635b | |||
| 3bec6a6102 | |||
| 6444b300b3 | |||
| c529c8b8ea | |||
| 2f15ae3d41 | |||
| f8528aa85e | |||
| b6f383e700 | |||
| db5183930d | |||
| 6de75ba525 | |||
| 6632570712 | |||
| d4347072f2 | |||
| 290464adc1 | |||
| d6deaa0b0f | |||
| 6a275e1d90 | |||
| 797a0a5460 |
@ -4,9 +4,11 @@ description: jspg work preparation
|
||||
|
||||
This workflow will get you up-to-speed on the JSPG custom json-schema-based cargo pgrx postgres validation extension. Everything you read will be in the jspg directory/project.
|
||||
|
||||
Read over this entire workflow and commit to every section of work in a task list, so that you don't stop half way through before reviewing all of the directories and files mentioned. Do not ask for confirmation after generating this task list and proceed through all sections in your list.
|
||||
Read over this entire workflow and commit to every section of work in a fresh task list (DO THIS FIRST), so that you don't stop half way through before reviewing all of the directories and files mentioned. Do not ask for confirmation after generating this task list and proceed through all sections in your list.
|
||||
|
||||
Please analyze the files and directories and do not use cat, find, or the terminal to discover or read in any of these files. Analyze every file mentioned. If a directory is mentioned or a /*, please analyze the directory, every single file at its root, and recursively analyze every subdirectory and every single file in every subdirectory to capture not just critical files, but the entirety of what is requested. I state again, DO NOT just review a cherry picking of files in any folder or wildcard specified. Review 100% of all files discovered recursively!
|
||||
Please analyze the files and directories and do not use cat, find, or the terminal AT ALL to discover or read in any of these files! USE YOUR TOOLS ONLY. Analyze every file mentioned. If a directory is mentioned or a /*, please analyze the directory, every single file at its root, and recursively analyze every subdirectory and every single file in every subdirectory to capture not just critical files, but the entirety of what is requested. I state again, DO NOT just review a cherry picking of files in any folder or wildcard specified. Review 100% of all files discovered recursively!
|
||||
|
||||
Do not make any code changes. Just focus on your task list and reading files!
|
||||
|
||||
Section 1: Various Documentation
|
||||
|
||||
@ -48,7 +50,6 @@ Now, review some punc type and enum source in the api project with api/ these fi
|
||||
- api/punc/sql/tables.sql
|
||||
- api/punc/sql/domains.sql
|
||||
- api/punc/sql/indexes.sql
|
||||
- api/punc/sql/functions/entity.sql
|
||||
- api/punc/sql/functions/puncs.sql
|
||||
- api/punc/sql/puncs/entity.sql
|
||||
- api/punc/sql/puncs/persons.sql
|
||||
|
||||
11
.test/tests.md
Normal file
11
.test/tests.md
Normal file
@ -0,0 +1,11 @@
|
||||
# 🗒️ Test Report (punc/framework)
|
||||
|
||||
_Generated at Wed Mar 18 05:21:40 EDT 2026_
|
||||
|
||||
## Summary
|
||||
|
||||
| Lang | Status | Tests | Passed | Failed | Duration |
|
||||
| :--- | :---: | :---: | :---: | :---: | ---: |
|
||||
|
||||
## Results
|
||||
|
||||
5
.vscode/extensions.json
vendored
Normal file
5
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"rust-lang.rust-analyzer"
|
||||
]
|
||||
}
|
||||
81
Cargo.lock
generated
81
Cargo.lock
generated
@ -55,6 +55,15 @@ version = "1.0.101"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea"
|
||||
|
||||
[[package]]
|
||||
name = "ar_archive_writer"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b"
|
||||
dependencies = [
|
||||
"object",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.89"
|
||||
@ -874,6 +883,7 @@ dependencies = [
|
||||
"regex-syntax",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sqlparser",
|
||||
"url",
|
||||
"uuid",
|
||||
"xxhash-rust",
|
||||
@ -1040,6 +1050,15 @@ dependencies = [
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.37.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.21.3"
|
||||
@ -1377,6 +1396,16 @@ dependencies = [
|
||||
"unarray",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "psm"
|
||||
version = "0.1.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3852766467df634d74f0b2d7819bf8dc483a0eb2e3b0f50f756f9cfe8b0d18d8"
|
||||
dependencies = [
|
||||
"ar_archive_writer",
|
||||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quick-error"
|
||||
version = "1.2.3"
|
||||
@ -1442,6 +1471,26 @@ dependencies = [
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "recursive"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0786a43debb760f491b1bc0269fe5e84155353c67482b9e60d0cfb596054b43e"
|
||||
dependencies = [
|
||||
"recursive-proc-macro-impl",
|
||||
"stacker",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "recursive-proc-macro-impl"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76009fbe0614077fc1a2ce255e3a1881a2e3a3527097d5dc6d8212c585e7e38b"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.18"
|
||||
@ -1669,12 +1718,35 @@ dependencies = [
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlparser"
|
||||
version = "0.61.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dbf5ea8d4d7c808e1af1cbabebca9a2abe603bcefc22294c5b95018d53200cb7"
|
||||
dependencies = [
|
||||
"log",
|
||||
"recursive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
|
||||
|
||||
[[package]]
|
||||
name = "stacker"
|
||||
version = "0.1.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08d74a23609d509411d10e2176dc2a4346e3b4aea2e7b1869f19fdedbc71c013"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"psm",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stringprep"
|
||||
version = "0.1.5"
|
||||
@ -2323,6 +2395,15 @@ dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.59.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.60.2"
|
||||
|
||||
@ -23,6 +23,7 @@ indexmap = { version = "2.13.0", features = ["serde"] }
|
||||
moka = { version = "0.12.14", features = ["sync"] }
|
||||
xxhash-rust = { version = "0.8.15", features = ["xxh64"] }
|
||||
dashmap = "6.1.0"
|
||||
sqlparser = "0.61.0"
|
||||
|
||||
[dev-dependencies]
|
||||
pgrx-tests = "0.16.1"
|
||||
|
||||
226
GEMINI.md
226
GEMINI.md
@ -7,79 +7,197 @@
|
||||
JSPG operates by deeply integrating the JSON Schema Draft 2020-12 specification directly into the Postgres session lifecycle. It is built around three core pillars:
|
||||
* **Validator**: In-memory, near-instant JSON structural validation and type polymorphism routing.
|
||||
* **Merger**: Automatically traverse and UPSERT deeply nested JSON graphs into normalized relational tables.
|
||||
* **Queryer**: Compile JSON Schemas into static, cached SQL SPI `SELECT` plans for fetching full entities or isolated "Stems".
|
||||
* **Queryer**: Compile JSON Schemas into static, cached SQL SPI `SELECT` plans for fetching full entities or isolated ad-hoc object boundaries.
|
||||
|
||||
### 🎯 Goals
|
||||
1. **Draft 2020-12 Compliance**: Attempt to adhere to the official JSON Schema Draft 2020-12 specification.
|
||||
1. **Draft 2020-12 Based**: Attempt to adhere to the official JSON Schema Draft 2020-12 specification, while heavily augmenting it for strict structural typing.
|
||||
2. **Ultra-Fast Execution**: Compile schemas into optimized in-memory validation trees and cached SQL SPIs to bypass Postgres Query Builder overheads.
|
||||
3. **Connection-Bound Caching**: Leverage the PostgreSQL session lifecycle using an **Atomic Swap** pattern. Schemas are 100% frozen, completely eliminating locks during read access.
|
||||
4. **Structural Inheritance**: Support object-oriented schema design via Implicit Keyword Shadowing and virtual `$family` references natively mapped to Postgres table constraints.
|
||||
5. **Reactive Beats**: Provide natively generated "Stems" (isolated payload fragments) for dynamic websocket reactivity.
|
||||
5. **Reactive Beats**: Provide ultra-fast natively generated flat payloads mapping directly to the Dart topological state for dynamic websocket reactivity.
|
||||
|
||||
### Concurrency & Threading ("Immutable Graphs")
|
||||
To support high-throughput operations while allowing for runtime updates (e.g., during hot-reloading), JSPG uses an **Atomic Swap** pattern:
|
||||
1. **Parser Phase**: Schema JSONs are parsed into ordered `Schema` structs.
|
||||
2. **Compiler Phase**: The database iterates all parsed schemas and pre-computes native optimization maps (Descendants Map, Depths Map, Variations Map).
|
||||
3. **Immutable Validator**: The `Validator` struct immutably owns the `Database` registry and all its global maps. Schemas themselves are completely frozen; `$ref` strings are resolved dynamically at runtime using pre-computed O(1) maps.
|
||||
3. **Immutable AST Caching**: The `Validator` struct immutably owns the `Database` registry. Schemas themselves are frozen structurally, but utilize `OnceLock` interior mutability during the Compilation Phase to permanently cache resolved `type` inheritances, properties, and `compiled_edges` directly onto their AST nodes. This guarantees strict `O(1)` relationship and property validation execution at runtime without locking or recursive DB polling.
|
||||
4. **Lock-Free Reads**: Incoming operations acquire a read lock just long enough to clone the `Arc` inside an `RwLock<Option<Arc<Validator>>>`, ensuring zero blocking during schema updates.
|
||||
|
||||
### Global API Reference
|
||||
These functions operate on the global `GLOBAL_JSPG` engine instance and provide administrative boundaries:
|
||||
|
||||
* `jspg_setup(database jsonb) -> jsonb`: Initializes the engine. Deserializes the full database schema registry (types, enums, puncs, relations) from Postgres and compiles them into memory atomically.
|
||||
* `jspg_teardown() -> jsonb`: Clears the current session's engine instance from `GLOBAL_JSPG`, resetting the cache.
|
||||
* `jspg_schemas() -> jsonb`: Exports the fully compiled AST snapshot (including all inherited dependencies) out of `GLOBAL_JSPG` into standard JSON Schema representations.
|
||||
|
||||
---
|
||||
|
||||
## 2. Validator
|
||||
## 2. Schema Modeling (Punc Developer Guide)
|
||||
|
||||
JSPG augments standard JSON Schema 2020-12 to provide an opinionated, strict, and highly ergonomic Object-Oriented paradigm. Developers defining Punc Data Models should follow these conventions.
|
||||
|
||||
### Types of Types
|
||||
* **Table-Backed (Entity Types)**: Primarily defined in root type schemas. These represent physical Postgres tables.
|
||||
* They absolutely **require** an `$id`.
|
||||
* The schema conceptually requires a `type` discriminator at runtime so the engine knows what physical variation to interact with.
|
||||
* Can inherit other entity types to build lineage (e.g. `person` -> `organization` -> `entity`).
|
||||
* **Field-Backed (JSONB Bubbles)**: These are shapes that live entirely inside a Postgres JSONB column without being tied to a top-level table constraint.
|
||||
* **Global `$id` Promotion**: Utilizing explicit `$id` declarations promotes the schema to the Global Registry. This effectively creates strictly-typed code-generator universes (e.g., generating an `InvoiceNotificationMetadata` Dart class) operating cleanly inside unstructured Postgres JSONB columns.
|
||||
* They can re-use the standard `type` discriminator locally for `oneOf` polymorphism without conflicting with global Postgres Table constraints.
|
||||
|
||||
### Discriminators & The Dot Convention (A.B)
|
||||
In Punc, polymorphic targets like explicit tagged unions or STI (Single Table Inheritance) rely on discriminators. Because Punc favors universal consistency, a schema's data contract must be explicit and mathematically identical regardless of the routing context an endpoint consumes it through.
|
||||
|
||||
**The 2-Tier Paradigm**: The system inherently prevents "God Tables" by restricting routing to exactly two dimensions, guaranteeing absolute $O(1)$ lookups without ambiguity:
|
||||
1. **Vertical Routing (`type`)**: Identifies the specific Postgres Table lineage (e.g. `person` vs `organization`).
|
||||
2. **Horizontal Routing (`kind.type`)**: Natively evaluates Single Table Inheritance. The runtime dynamically concatenates `$kind.$type` to yield the namespace-protected schema `$id` (e.g. `light.person`), maintaining collision-free schema registration.
|
||||
|
||||
Therefore, any schema that participates in polymorphic discrimination MUST explicitly define its discriminator properties natively inside its `properties` block. However, to stay DRY and maintain flexible APIs, you **DO NOT** need to hardcode `const` values, nor should you add them to your `required` array. The Punc engine treats `type` and `kind` as **magic properties**.
|
||||
|
||||
**Magic Validation Constraints**:
|
||||
* **Dynamically Required**: The system inherently drives the need for their requirement. The Validator dynamically expects the discriminators and structurally bubbles `MISSING_TYPE` ultimata ONLY when a polymorphic router (`$family` / `oneOf`) dynamically requires them to resolve a path. You never manually put them in the JSON schema `required` block.
|
||||
* **Implicit Resolution**: When wrapped in `$family` or `oneOf`, the polymorphic router can mathematically parse the schema `$id` (e.g. `light.person`) and natively validate that `type` equals `"person"` and `kind` equals `"light"`, bubbling `CONST_VIOLATED` if they mismatch, all without you ever hardcoding `const` limitations.
|
||||
* **Generator Explicitness**: Because Postgres is the Single Source of Truth, forcing the explicit definition in `properties` initially guarantees the downstream Dart/Go code generators observe the fields and can cleanly serialize them dynamically back to the server.
|
||||
|
||||
For example, a schema representing `$id: "light.person"` must natively define its own structural boundaries:
|
||||
```json
|
||||
{
|
||||
"$id": "light.person",
|
||||
"type": "person",
|
||||
"properties": {
|
||||
"type": { "type": "string" },
|
||||
"kind": { "type": "string" }
|
||||
},
|
||||
"required": ["type", "kind"]
|
||||
}
|
||||
```
|
||||
|
||||
* **The Object Contract (Presence)**: The Object enforces its own structural integrity mechanically. Standard JSON Validation natively ensures `type` and `kind` are present, bubbling `REQUIRED_FIELD_MISSING` organically if omitted.
|
||||
* **The Dynamic Values (`db.types`)**: Because the `type` and `kind` properties technically exist, the Punc engine dynamically intercepts them during `validate_object`. It mathematically parses the schema `$id` (e.g. `light.person`) and natively validates that `type` equals `"person"` (or a valid descendant in `db.types`) and `kind` equals `"light"`, bubbling `CONST_VIOLATED` if they mismatch.
|
||||
* **The Routing Contract**: When wrapped in `$family` or `oneOf`, the polymorphic router can execute Lightning Fast $O(1)$ fast-paths by reading the payload's `type`/`kind` identifiers, and gracefully fallback to standard structural failure if omitted.
|
||||
|
||||
### Composition & Inheritance (The `type` keyword)
|
||||
Punc completely abandons the standard JSON Schema `$ref` keyword. Instead, it overloads the exact same `type` keyword used for primitives. A `"type"` in Punc is mathematically evaluated as either a Native Primitive (`"string"`, `"null"`) or a Custom Object Pointer (`"budget"`, `"user"`).
|
||||
* **Single Inheritance**: Setting `"type": "user"` acts exactly like an `extends` keyword. The schema borrows all fields and constraints from the `user` identity. During `jspg_setup`, the compiler recursively crawls the dependencies to map the physical Postgres table, permanently mapping its type restriction to `"object"` under the hood so JSON standards remain unbroken.
|
||||
* **Implicit Keyword Shadowing**: Unlike standard JSON Schema inheritance, local property definitions natively override and shadow inherited properties.
|
||||
* **Primitive Array Shorthand (Optionality)**: The `type` array syntax is heavily optimized for nullable fields. Defining `"type": ["budget", "null"]` natively builds a nullable strict, generating `Budget? budget;` in Dart. You can freely mix primitives like `["string", "number", "null"]`.
|
||||
* **Strict Array Constraint**: To explicitly prevent mathematically ambiguous Multiple Inheritance, a `type` array is strictly constrained to at most **ONE** Custom Object Pointer. Defining `"type": ["person", "organization"]` will intentionally trigger a fatal database compilation error natively instructing developers to build a proper tagged union (`oneOf`) instead.
|
||||
|
||||
### Polymorphism (`$family` and `oneOf`)
|
||||
Polymorphism is how an object boundary can dynamically take on entirely different shapes based on the payload provided at runtime.
|
||||
* **`$family` (Target-Based Polymorphism)**: An explicit Punc compiler macro instructing the database compiler to dynamically search its internal `db.descendants` registry and find all physical schemas that mathematically resolve to the target.
|
||||
* *Across Tables (Vertical)*: If `$family: entity` is requested, the payload's `type` field acts as the discriminator, dynamically routing to standard variations like `organization` or `person` spanning multiple Postgres tables.
|
||||
* *Single Table (Horizontal)*: If `$family: widget` is requested, the router explicitly evaluates the Dot Convention dynamically. If the payload possesses `"type": "widget"` and `"kind": "stock"`, the router mathematically resolves to the string `"stock.widget"` and routes exclusively to that explicit `JSPG` schema.
|
||||
* **`oneOf` (Strict Tagged Unions)**: A hardcoded array of JSON Schema candidate options. Punc strictly bans mathematical "Union of Sets" evaluation. Every `oneOf` candidate item MUST either be a pure primitive (`{ "type": "null" }`) or a user-defined Object Pointer providing a specific discriminator (e.g., `{ "type": "invoice_metadata" }`). This ensures validations remain pure $O(1)$ fast-paths and allows the Dart generator to emit pristine `sealed classes`.
|
||||
|
||||
### Conditionals (`cases`)
|
||||
Standard JSON Schema forces developers to write deeply nested `allOf` -> `if` -> `properties` blocks just to execute conditional branching. **JSPG completely abandons `allOf` and this practice.** For declarative business logic and structural mutations conditionally based upon property bounds, use the top-level `cases` array.
|
||||
|
||||
It evaluates as an **Independent Declarative Rules Engine**. Every `Case` block within the array is evaluated independently in parallel. For a given rule, if the `when` condition evaluates to true, its `then` schema is executed. If it evaluates to false, its `else` schema is executed (if present). To maintain strict standard JSON Schema compatibility internally, the `when` block utilizes pure JSON Schema `properties` definitions (e.g. `enum`, `const`) rather than injecting unstandardized MongoDB operators. Because `when`, `then`, and `else` are themselves standard schemas, they natively support nested `cases` to handle mutually exclusive `else if` architectures.
|
||||
|
||||
```json
|
||||
{
|
||||
"$id": "save_external_account",
|
||||
"cases": [
|
||||
{
|
||||
"when": {
|
||||
"properties": {
|
||||
"status": { "const": "unverified" }
|
||||
},
|
||||
"required": ["status"]
|
||||
},
|
||||
"then": {
|
||||
"required": ["amount_1", "amount_2"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"when": {
|
||||
"properties": { "kind": { "const": "credit" } },
|
||||
"required": ["kind"]
|
||||
},
|
||||
"then": {
|
||||
"required": ["details"]
|
||||
},
|
||||
"else": {
|
||||
"cases": [
|
||||
{
|
||||
"when": { "properties": { "kind": { "const": "checking" } }, "required": ["kind"] },
|
||||
"then": { "required": ["routing_number"] }
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Strict by Default & Extensibility
|
||||
* **Strictness**: By default, any property not explicitly defined in the schema causes a validation error (effectively enforcing `additionalProperties: false` globally).
|
||||
* **Extensibility (`extensible: true`)**: To allow a free-for-all of undefined properties, schemas must explicitly declare `"extensible": true`.
|
||||
* **Structured Additional Properties**: If `additionalProperties: {...}` is defined as a schema, arbitrary keys are allowed so long as their values match the defined type constraint.
|
||||
* **Inheritance Boundaries**: Strictness resets when crossing non-primitive `type` boundaries. A schema extending a strict parent remains strict unless it explicitly overrides with `"extensible": true`.
|
||||
|
||||
### Format Leniency for Empty Strings
|
||||
To simplify frontend form validation, format validators specifically for `uuid`, `date-time`, and `email` explicitly allow empty strings (`""`), treating them as "present but unset".
|
||||
|
||||
---
|
||||
|
||||
## 3. Database
|
||||
|
||||
The Database module manages the core execution graphs and structural compilation of the Postgres environment.
|
||||
|
||||
### Relational Edge Resolution
|
||||
When compiling nested object graphs or arrays, the JSPG engine must dynamically infer which Postgres Foreign Key constraint correctly bridges the parent to the nested schema. To guarantee deterministic SQL generation, it utilizes a strict, multi-step algebraic resolution process applied during the `OnceLock` Compilation phase:
|
||||
|
||||
1. **Graph Locality Boundary**: Before evaluating constraints, the engine ensures the parent and child types do not belong strictly to the same inheritance lineage (e.g., `invoice` -> `activity`). Structural inheritance edges are handled natively by the payload merger, so relational edge discovery is intentionally bypassed.
|
||||
2. **Structural Cardinality Filtration**: If the JSON Schema requires an Array collection (`{"type": "array"}`), JSPG mathematically rejects pure scalar Forward constraints (where the parent holds a single UUID pointer), logically narrowing the possibilities to Reverse (1:N) or Junction (M:M) constraints.
|
||||
3. **Exact Prefix Match**: If an explicitly prefixed Foreign Key (e.g. `fk_invoice_counterparty_entity` -> `prefix: "counterparty"`) directly matches the name of the requested schema property (e.g. `{"counterparty": {...}}`), it is instantly selected.
|
||||
4. **Ambiguity Elimination (M:M Twin Deduction)**: If multiple explicitly prefixed relations remain (which happens by design in Many-to-Many junction tables like `contact` or `role`), the compiler inspects the actual compiled child JSON schema AST. If it observes the child natively consumes one of the prefixes as an explicit outbound property (e.g. `contact` explicitly defining `{ "target": ... }`), it considers that arrow "used up". It mathematically deduces that its exact twin providing reverse ownership (`"source"`) MUST be the inbound link mapping from the parent.
|
||||
5. **Implicit Base Fallback (1:M)**: If no explicit prefix matches, and M:M deduction fails, the compiler filters for exactly one remaining relation with a `null` prefix (e.g. `fk_invoice_line_invoice` -> `prefix: null`). A `null` prefix mathematically denotes the core structural parent-child ownership edge and is used safely as a fallback.
|
||||
6. **Deterministic Abort**: If the engine exhausts all deduction pathways and the edge remains ambiguous, it explicitly aborts schema compilation (`returns None`) rather than silently generating unpredictable SQL.
|
||||
|
||||
### Ad-Hoc Schema Promotion
|
||||
To seamlessly support deeply nested, inline Object definitions that don't declare an explicit `$id`, JSPG aggressively promotes them to standalone topological entities during the database compilation phase.
|
||||
* **Hash Generation:** While evaluating the unified graph, if the compiler enters an `Object` or `Array` structure completely lacking an `$id`, it dynamically calculates a localized hash alias representing exactly its structural constraints.
|
||||
* **Promotion:** This inline chunk is mathematically elevated to its own `$id` in the `db.schemas` cache registry. This guarantees that $O(1)$ WebSockets or isolated queries can natively target any arbitrary sub-object of a massive database topology directly without recursively re-parsing its parent's AST block every read.
|
||||
|
||||
---
|
||||
|
||||
## 4. Validator
|
||||
|
||||
The Validator provides strict, schema-driven evaluation for the "Punc" architecture.
|
||||
|
||||
### API Reference
|
||||
* `jspg_setup(database jsonb) -> jsonb`: Loads and compiles the entire registry (types, enums, puncs, relations) atomically.
|
||||
* `mask_json_schema(schema_id text, instance jsonb) -> jsonb`: Validates and prunes unknown properties dynamically, returning masked data.
|
||||
* `jspg_validate(schema_id text, instance jsonb) -> jsonb`: Returns boolean-like success or structured errors.
|
||||
* `jspg_teardown() -> jsonb`: Clears the current session's schema cache.
|
||||
* `jspg_validate(schema_id text, instance jsonb) -> jsonb`: Validates the `instance` JSON payload strictly against the constraints of the registered `schema_id`. Returns boolean-like success or structured error codes.
|
||||
|
||||
### Custom Features & Deviations
|
||||
JSPG implements specific extensions to the Draft 2020-12 standard to support the Punc architecture's object-oriented needs while heavily optimizing for zero-runtime lookups.
|
||||
|
||||
* **Caching Strategy**: The Validator caches the pre-compiled `Database` registry in memory upon initialization (`jspg_setup`). This registry holds the comprehensive graph of schema boundaries, Types, ENUMs, and Foreign Key relationships, acting as the Single Source of Truth for all validation operations without polling Postgres.
|
||||
|
||||
#### A. Polymorphism & Referencing (`$ref`, `$family`, and Native Types)
|
||||
* **Native Type Discrimination (`variations`)**: Schemas defined inside a Postgres `type` are Entities. The validator securely and implicitly manages their `"type"` property. If an entity inherits from `user`, incoming JSON can safely define `{"type": "person"}` without errors, thanks to `compiled_variations` inheritance.
|
||||
* **Structural Inheritance & Viral Infection (`$ref`)**: `$ref` is used exclusively for structural inheritance, *never* for union creation. A Punc request schema that `$ref`s an Entity virally inherits all physical database polymorphism rules for that target.
|
||||
* **Shape Polymorphism (`$family`)**: Auto-expands polymorphic API lists based on an abstract Descendants Graph. If `{"$family": "widget"}` is used, JSPG evaluates the JSON against every schema that `$ref`s widget.
|
||||
* **Strict Matches & Depth Heuristic**: Polymorphic structures MUST match exactly **one** schema permutation. If multiple inherited struct permutations pass, JSPG applies the **Depth Heuristic Tie-Breaker**, selecting the candidate deepest in the inheritance tree.
|
||||
|
||||
#### B. Dot-Notation Schema Resolution & Database Mapping
|
||||
* **The Dot Convention**: When a schema represents a specific variation or shape of an underlying physical database `Type` (e.g., a "summary" of a "person"), its `$id` must adhere to a dot-notation suffix convention (e.g., `summary.person` or `full.person`).
|
||||
* **Entity Resolution**: The framework (Validator, Queryer, Merger) dynamically determines the backing PostgreSQL table structure by splitting the schema's `$id` (or `$ref`) by `.` and extracting the **last segment** (`next_back()`). If the last segment matches a known Database Type (like `person`), the framework natively applies that table's inheritance rules, variations, and physical foreign keys to the schema graph, regardless of the prefix.
|
||||
|
||||
#### C. Strict by Default & Extensibility
|
||||
* **Strictness**: By default, any property not explicitly defined in the schema causes a validation error (effectively enforcing `additionalProperties: false` globally).
|
||||
* **Extensibility (`extensible: true`)**: To allow a free-for-all of undefined properties, schemas must explicitly declare `"extensible": true`.
|
||||
* **Structured Additional Properties**: If `additionalProperties: {...}` is defined as a schema, arbitrary keys are allowed so long as their values match the defined type constraint.
|
||||
* **Inheritance Boundaries**: Strictness resets when crossing `$ref` boundaries. A schema extending a strict parent remains strict unless it explicitly overrides with `"extensible": true`.
|
||||
|
||||
#### D. Implicit Keyword Shadowing
|
||||
* **Inheritance (`$ref` + properties)**: Unlike standard JSON Schema, when a schema uses `$ref` alongside local properties, JSPG implements **Smart Merge**. Local constraints natively take precedence over (shadow) inherited constraints for the same keyword.
|
||||
* *Example*: If `entity` has `type: {const: "entity"}`, but `person` defines `type: {const: "person"}`, the local `person` const cleanly overrides the inherited one.
|
||||
* **Composition (`allOf`)**: When evaluating `allOf`, standard intersection rules apply seamlessly. No shadowing occurs, meaning all constraints from all branches must pass.
|
||||
|
||||
#### E. Format Leniency for Empty Strings
|
||||
To simplify frontend form validation, format validators specifically for `uuid`, `date-time`, and `email` explicitly allow empty strings (`""`), treating them as "present but unset".
|
||||
* **Discriminator Fast Paths & Extraction**: When executing a polymorphic node (`oneOf` or `$family`), the engine statically analyzes the incoming JSON payload for the literal `type` and `kind` string coordinates. It routes the evaluation specifically to matching candidates in $O(1)$ while returning `MISSING_TYPE` ultimata directly.
|
||||
* **Missing Type Ultimatum**: If an entity logically requires a discriminator and the JSON payload omits it, JSPG short-circuits branch execution entirely, bubbling a single, perfectly-pathed `MISSING_TYPE` error back to the UI natively to prevent confusing cascading failures.
|
||||
* **Golden Match Context**: When exactly one structural candidate perfectly maps a discriminator, the Validator exclusively cascades that specific structural error context directly to the user, stripping away all noise generated by other parallel schemas.
|
||||
|
||||
---
|
||||
|
||||
## 5. Merger
|
||||
|
||||
## 3. Merger
|
||||
The Merger provides an automated, high-performance graph synchronization engine. It orchestrates the complex mapping of nested JSON objects into normalized Postgres relational tables, honoring all inheritance and graph constraints.
|
||||
|
||||
The Merger provides an automated, high-performance graph synchronization engine via the `jspg_merge(cue JSONB)` API. It orchestrates the complex mapping of nested JSON objects into normalized Postgres relational tables, honoring all inheritance and graph constraints.
|
||||
### API Reference
|
||||
* `jspg_merge(schema_id text, data jsonb) -> jsonb`: Traverses the provided JSON payload according to the compiled relational map of `schema_id`. Dynamically builds and executes relational SQL UPSERT paths natively.
|
||||
|
||||
### Core Features
|
||||
|
||||
* **Caching Strategy**: The Merger leverages the `Validator`'s in-memory `Database` registry to instantly resolve Foreign Key mapping graphs. It additionally utilizes the concurrent `GLOBAL_JSPG` application memory (`DashMap`) to cache statically constructed SQL `SELECT` strings used during deduplication (`lk_`) and difference tracking calculations.
|
||||
* **Caching Strategy**: The Merger leverages the native `compiled_edges` permanently cached onto the Schema AST via `OnceLock` to instantly resolve Foreign Key mapping graphs natively in absolute `O(1)` time. It additionally utilizes the concurrent `GLOBAL_JSPG` application memory (`DashMap`) to cache statically constructed SQL `SELECT` strings used during deduplication (`lk_`) and difference tracking calculations.
|
||||
* **Deep Graph Merging**: The Merger walks arbitrary levels of deeply nested JSON schemas (e.g. tracking an `order`, its `customer`, and an array of its `lines`). It intelligently discovers the correct parent-to-child or child-to-parent Foreign Keys stored in the registry and automatically maps the UUIDs across the relationships during UPSERT.
|
||||
* **Prefix Foreign Key Matching**: Handles scenario where multiple relations point to the same table by using database Foreign Key constraint prefixes (`fk_`). For example, if a schema has `shipping_address` and `billing_address`, the merger resolves against `fk_shipping_address_entity` vs `fk_billing_address_entity` automatically to correctly route object properties.
|
||||
* **Dynamic Deduplication & Lookups**: If a nested object is provided without an `id`, the Merger utilizes Postgres `lk_` index constraints defined in the schema registry (e.g. `lk_person` mapped to `first_name` and `last_name`). It dynamically queries these unique matching constraints to discover the correct UUID to perform an UPDATE, preventing data duplication.
|
||||
* **Hierarchical Table Inheritance**: The Punc system uses distributed table inheritance (e.g. `person` inherits `user` inherits `organization` inherits `entity`). The Merger splits the incoming JSON payload and performs atomic row updates across *all* relevant tables in the lineage map.
|
||||
* **The Archive Paradigm**: Data is never deleted in the Punc system. The Merger securely enforces referential integrity by toggling the `archived` Boolean flag on the base `entity` table rather than issuing SQL `DELETE` commands.
|
||||
* **Change Tracking & Reactivity**: The Merger diffs the incoming JSON against the existing database row (utilizing static, `DashMap`-cached `lk_` SELECT string templates). Every detected change is recorded into the `agreego.change` audit table, tracking the user mapping. It then natively uses `pg_notify` to broadcast a completely flat row-level diff out to the Go WebSocket server for O(1) routing.
|
||||
* **Flat Structural Beats (Unidirectional Flow)**: The Merger purposefully DOES NOT trace or hydrate outbound Foreign Keys or nested parent structures during writes. It emits completely flat, mathematically perfect structural deltas via `pg_notify` representing only the exact Postgres rows that changed. This guarantees the write-path remains O(1) lightning fast. It is the strict responsibility of the upstream Punc Framework (the Go `Speaker`) to intercept these flat beats, evaluate them against active Websocket Schema Topologies, and dynamically issue targeted `jspg_query` reads to hydrate the exact contextual subgraphs required by listening clients.
|
||||
* **Pre-Order Notification Traversal**: To support proper topological hydration on the upstream Go Framework, the Merger decouples the `pg_notify` execution from the physical database write execution. The engine collects structural changes and explicitly fires `pg_notify` SQL statements in strict **Pre-Order** (Parent -> Relations -> Children). This guarantees that WebSocket clients receive the parent entity `Beat` prior to any nested child entities, ensuring stable unidirectional data flows without hydration race conditions.
|
||||
* **Many-to-Many Graph Edge Management**: Operates seamlessly with the global `agreego.relationship` table, allowing the system to represent and merge arbitrary reified M:M relationships directionally between any two entities.
|
||||
* **Sparse Updates**: Empty JSON strings `""` are directly bound as explicit SQL `NULL` directives to clear data, whilst omitted (missing) properties skip UPDATE execution entirely, ensuring partial UI submissions do not wipe out sibling fields.
|
||||
* **Unified Return Structure**: To eliminate UI hydration race conditions and multi-user duplication, `jspg_merge` explicitly strips the response graph and returns only the root `{ "id": "uuid" }` (or an array of IDs for list insertions). External APIs can then explicitly call read APIs to fetch the resulting graph, while the UI relies 100% implicitly on the flat `pg_notify` pipeline for reactive state synchronization.
|
||||
@ -87,35 +205,32 @@ The Merger provides an automated, high-performance graph synchronization engine
|
||||
|
||||
---
|
||||
|
||||
## 4. Queryer
|
||||
## 6. Queryer
|
||||
|
||||
The Queryer transforms Postgres into a pre-compiled Semantic Query Engine via the `jspg_query(schema_id text, cue jsonb)` API, designed to serve the exact shape of Punc responses directly via SQL.
|
||||
The Queryer transforms Postgres into a pre-compiled Semantic Query Engine, designed to serve the exact shape of Punc responses directly via SQL.
|
||||
|
||||
### API Reference
|
||||
* `jspg_query(schema_id text, filters jsonb) -> jsonb`: Compiles the JSON Schema AST of `schema_id` directly into pre-planned, nested multi-JOIN SQL execution trees. Processes `filters` structurally.
|
||||
|
||||
### Core Features
|
||||
|
||||
* **Caching Strategy (DashMap SQL Caching)**: The Queryer securely caches its compiled, static SQL string templates per schema permutation inside the `GLOBAL_JSPG` concurrent `DashMap`. This eliminates recursive AST schema crawling on consecutive requests. Furthermore, it evaluates the strings via Postgres SPI (Server Programming Interface) Prepared Statements, leveraging native database caching of execution plans for extreme performance.
|
||||
* **Schema-to-SQL Compilation**: Compiles JSON Schema ASTs spanning deep arrays directly into static, pre-planned SQL multi-JOIN queries. This explicitly features the `Smart Merge` evaluation engine which natively translates properties through `allOf` and `$ref` inheritances, mapping JSON fields specifically to their physical database table aliases during translation.
|
||||
* **Dynamic Filtering**: Binds parameters natively through `cue.filters` objects. The queryer enforces a strict, structured, MongoDB-style operator syntax to map incoming JSON request paths directly to their originating structural table columns.
|
||||
* **Schema-to-SQL Compilation**: Compiles JSON Schema ASTs spanning deep arrays directly into static, pre-planned SQL multi-JOIN queries. This explicitly features the `Smart Merge` evaluation engine which natively translates properties through `type` inheritances, mapping JSON fields specifically to their physical database table aliases during translation.
|
||||
* **Root Null-Stripping Optimization**: Unlike traditional nested document builders, the Queryer intelligently defers Postgres' natively recursive `jsonb_strip_nulls` execution to the absolute apex of the compiled query pipeline. The compiler organically layers millions of rapid `jsonb_build_object()` sub-query allocations instantly, wrapping them in a singular overarching pass. This strips all empty optionals uniformly before exiting the database, maximizing CPU throughput.
|
||||
* **Dynamic Filtering**: Binds parameters natively through `cue.filters` objects. The queryer enforces a strict, structured, MongoDB-style operator syntax to map incoming JSON request constraints directly to their originating structural table columns. Filters support both flat path notation (e.g., `"contacts/is_primary": {...}`) and deeply nested recursive JSON structures (e.g., `{"contacts": {"is_primary": {...}}}`). The queryer recursively traverses and flattens these structures at AST compilation time.
|
||||
* **Equality / Inequality**: `{"$eq": value}`, `{"$ne": value}` automatically map to `=` and `!=`.
|
||||
* **Comparison**: `{"$gt": ...}`, `{"$gte": ...}`, `{"$lt": ...}`, `{"$lte": ...}` directly compile to Postgres comparison operators (`> `, `>=`, `<`, `<=`).
|
||||
* **Array Inclusion**: `{"$in": [values]}`, `{"$nin": [values]}` use native `jsonb_array_elements_text()` bindings to enforce `IN` and `NOT IN` logic without runtime SQL injection risks.
|
||||
* **Text Matching (ILIKE)**: Evaluates `$eq` or `$ne` against string fields containing the `%` character natively into Postgres `ILIKE` and `NOT ILIKE` partial substring matches.
|
||||
* **Type Casting**: Safely resolves dynamic combinations by casting values instantly into the physical database types mapped in the schema (e.g. parsing `uuid` bindings to `::uuid`, formatting DateTimes to `::timestamptz`, and numbers to `::numeric`).
|
||||
### 4. The Stem Engine
|
||||
* **Polymorphic SQL Generation (`$family`)**: Compiles `$family` properties by analyzing the **Physical Database Variations**, *not* the schema descendants.
|
||||
* **The Dot Convention**: When a schema requests `$family: "target.schema"`, the compiler extracts the base type (e.g. `schema`) and looks up its Physical Table definition.
|
||||
* **Multi-Table Branching**: If the Physical Table is a parent to other tables (e.g. `organization` has variations `["organization", "bot", "person"]`), the compiler generates a dynamic `CASE WHEN type = '...' THEN ...` query, expanding into `JOIN`s for each variation.
|
||||
* **Single-Table Bypass**: If the Physical Table is a leaf node with only one variation (e.g. `person` has variations `["person"]`), the compiler cleanly bypasses `CASE` generation and compiles a simple `SELECT` across the base table, as all schema extensions (e.g. `light.person`, `full.person`) are guaranteed to reside in the exact same physical row.
|
||||
|
||||
Rather than over-fetching heavy Entity payloads and trimming them, Punc Framework Websockets depend on isolated subgraphs defined as **Stems**.
|
||||
A `Stem` is **not a JSON Pointer** or a physical path string (like `/properties/contacts/items/phone_number`). It is simply a declaration of an **Entity Type boundary** that exists somewhere within the compiled JSON Schema graph.
|
||||
---
|
||||
|
||||
Because `pg_notify` (Beats) fire rigidly from physical Postgres tables (e.g. `{"type": "phone_number"}`), the Go Framework only ever needs to know: "Does the schema `with_contacts.person` contain the `phone_number` Entity anywhere inside its tree?"
|
||||
|
||||
* **Initialization:** During startup (`jspg_stems()`), the database crawls all Schemas and maps out every physical Entity Type it references. It builds a flat dictionary of `Schema ID -> [Entity Types]` (e.g. `with_contacts.person -> ["person", "contact", "phone_number", "email_address"]`).
|
||||
* **Relationship Path Squashing:** When calculating nested string paths structurally to discover these boundaries, JSPG intentionally **omits** properties natively named `target` or `source` if they belong to a native database `relationship` table override. This ensures paths like `phone_numbers/contact/target` correctly register their beat resolution pattern as `phone_numbers/contact/phone_number`.
|
||||
* **The Go Router**: The Golang Punc framework uses this exact mapping to register WebSocket Beat frequencies exclusively on the Entity types discovered.
|
||||
* **The Queryer Execution**: When the Go framework asks JSPG to hydrate a partial `phone_number` stem for the `with_contacts.person` schema, instead of jumping through string paths, the SQL Compiler simply reaches into the Schema's AST using the `phone_number` Type string, pulls out exactly that entity's mapping rules, and returns a fully correlated `SELECT` block! This natively handles nested array properties injected via `oneOf` or array references efficiently bypassing runtime powerset expansion.
|
||||
* **Performance:** These Stem execution structures are fully statically compiled via SPI and map perfectly to `O(1)` real-time routing logic on the application tier.
|
||||
|
||||
|
||||
## 5. Testing & Execution Architecture
|
||||
## 7. Testing & Execution Architecture
|
||||
|
||||
JSPG implements a strict separation of concerns to bypass the need to boot a full PostgreSQL cluster for unit and integration testing. Because `pgrx::spi::Spi` directly links to PostgreSQL C-headers, building the library with `cargo test` on macOS natively normally results in fatal `dyld` crashes.
|
||||
|
||||
@ -127,7 +242,8 @@ To solve this, JSPG introduces the `DatabaseExecutor` trait inside `src/database
|
||||
### Universal Test Harness (`src/tests/`)
|
||||
JSPG abandons the standard `cargo pgrx test` model in favor of native OS testing for a >1000x speed increase (`~0.05s` execution).
|
||||
|
||||
1. **JSON Fixtures**: All core interactions are defined abstractly as JSON arrays in `fixtures/`. Each file contains suites of `TestCase` objects with an `action` flag (`validate`, `merge`, `query`).
|
||||
1. **JSON Fixtures**: All core interactions are defined abstractly as JSON arrays in `fixtures/`. Each file contains suites of `TestCase` objects with an `action` flag (`compile`, `validate`, `merge`, `query`).
|
||||
2. **`build.rs` Generator**: The build script traverses the JSON fixtures, extracts their structural identities, and generates standard `#[test]` blocks into `src/tests/fixtures.rs`.
|
||||
3. **Modular Test Dispatcher**: The `src/tests/types/` module deserializes the abstract JSON test payloads into `Suite`, `Case`, and `Expect` data structures.
|
||||
4. **Unit Context Execution**: When `cargo test` executes, the `Runner` feeds the JSON payloads directly into `case.execute(db)`. Because the tests run natively inside the module via `#cfg(test)`, the Rust compiler globally erases `pgrx` C-linkage, instantiates the `MockExecutor`, and allows for pure structural evaluation of complex database logic completely in memory.
|
||||
* The `compile` action natively asserts the exact output shape of `jspg_stems`, allowing structural and relationship mapping logic to be tested purely through JSON without writing brute-force manual tests in Rust.
|
||||
4. **Unit Context Execution**: When `cargo test` executes, the runner iterates the JSON payloads. Because the tests run natively inside the module via `#cfg(test)`, the Rust compiler globally erases `pgrx` C-linkage, instantiates the `MockExecutor`, and allows for pure structural evaluation of complex database logic completely in memory in parallel.
|
||||
|
||||
58
LOOKUP_VERIFICATION.md
Normal file
58
LOOKUP_VERIFICATION.md
Normal file
@ -0,0 +1,58 @@
|
||||
# The Postgres Partial Index Claiming Pattern
|
||||
|
||||
This document outlines the architectural strategy for securely handling the deduplication, claiming, and verification of sensitive unique identifiers (like email addresses or phone numbers) strictly through PostgreSQL without requiring "magical" logic in the JSPG `Merger`.
|
||||
|
||||
## The Denial of Service (DoS) Squatter Problem
|
||||
|
||||
If you enforce a standard `UNIQUE` constraint on an email address table:
|
||||
1. Malicious User A signs up and adds `jeff.bezos@amazon.com` to their account but never verifies it.
|
||||
2. The real Jeff Bezos signs up.
|
||||
3. The Database blocks Jeff because the unique string already exists.
|
||||
|
||||
The squatter has effectively locked the legitimate owner out of the system.
|
||||
|
||||
## The Anti-Patterns
|
||||
|
||||
1. **Global Entity Flags**: Adding a global `verified` boolean to the root `entity` table forces unrelated objects (like Widgets, Invoices, Orders) to carry verification logic that doesn't belong to them.
|
||||
2. **Magical Merger Logic**: Making JSPG's `Merger` aware of a specific `verified` field breaks its pure structural translation model. The Merger shouldn't need hardcoded conditional logic to know if it's allowed to update an unverified row.
|
||||
|
||||
## The Solution: Postgres Partial Unique Indexes
|
||||
|
||||
The holy grail is to defer all claiming logic natively to the database engine using a **Partial Unique Index**.
|
||||
|
||||
```sql
|
||||
-- Remove any existing global unique constraint on address first
|
||||
CREATE UNIQUE INDEX lk_email_address_verified
|
||||
ON email_address (address)
|
||||
WHERE verified_at IS NOT NULL;
|
||||
```
|
||||
|
||||
### How the Lifecycle Works Natively
|
||||
|
||||
1. **Unverified Squatters (Isolated Rows):**
|
||||
A hundred different users can send `{ "address": "jeff.bezos@amazon.com" }` through the `save_person` Punc. Because the Punc isolates them and doesn't allow setting the `verified_at` property natively (enforced by the JSON schema), the JSPG Merger inserts `NULL`.
|
||||
Postgres permits all 100 `INSERT` commands to succeed because the Partial Index **ignores** rows where `verified_at IS NULL`. Every user gets their own isolated, unverified row acting as a placeholder on their contact edge.
|
||||
|
||||
2. **The Verification Race (The Claim):**
|
||||
The real Jeff clicks his magic verification link. The backend securely executes a specific verification Punc that runs:
|
||||
`UPDATE email_address SET verified_at = now() WHERE id = <jeff's-real-uuid>`
|
||||
|
||||
3. **The Lockout:**
|
||||
Because Jeff's row now strictly satisfies `verified_at IS NOT NULL`, that exact row enters the Partial Unique Index.
|
||||
If any of the other 99 squatters *ever* click their fake verification links (or if a new user tries to verify that same email), PostgreSQL hits the index and violently throws a **Unique Constraint Violation**, flawlessly blocking them. The winner has permanently claimed the slot across the entire environment!
|
||||
|
||||
### Periodic Cleanup
|
||||
|
||||
Since unverified rows are allowed to accumulate without colliding, a simple Postgres `pg_cron` job or backend worker can sweep the table nightly to prune abandoned claims and preserve storage:
|
||||
|
||||
```sql
|
||||
DELETE FROM email_address
|
||||
WHERE verified_at IS NULL
|
||||
AND created_at < NOW() - INTERVAL '24 hours';
|
||||
```
|
||||
|
||||
### Why this is the Ultimate Architecture
|
||||
|
||||
* The **JSPG Merger** remains mathematically pure. It doesn't know what `verified_at` is; it simply respects the database's structural limits (`O(1)` pure translation).
|
||||
* **Row-Level Security (RLS)** naturally blocks users from seeing or claiming each other's unverified rows.
|
||||
* You offload complex race-condition tracking entirely to the C-level PostgreSQL B-Tree indexing engine, guaranteeing absolute cluster-wide atomicity.
|
||||
@ -1,677 +0,0 @@
|
||||
[
|
||||
{
|
||||
"description": "allOf",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"properties": {
|
||||
"bar": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"bar"
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"foo"
|
||||
]
|
||||
}
|
||||
],
|
||||
"$id": "allOf_0_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "allOf",
|
||||
"data": {
|
||||
"foo": "baz",
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "allOf_0_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "mismatch second",
|
||||
"data": {
|
||||
"foo": "baz"
|
||||
},
|
||||
"schema_id": "allOf_0_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "mismatch first",
|
||||
"data": {
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "allOf_0_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "wrong type",
|
||||
"data": {
|
||||
"foo": "baz",
|
||||
"bar": "quux"
|
||||
},
|
||||
"schema_id": "allOf_0_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "allOf with base schema",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"properties": {
|
||||
"bar": {
|
||||
"type": "integer"
|
||||
},
|
||||
"baz": {},
|
||||
"foo": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"bar"
|
||||
],
|
||||
"allOf": [
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"foo"
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"baz": {
|
||||
"type": "null"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"baz"
|
||||
]
|
||||
}
|
||||
],
|
||||
"$id": "allOf_1_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid",
|
||||
"data": {
|
||||
"foo": "quux",
|
||||
"bar": 2,
|
||||
"baz": null
|
||||
},
|
||||
"schema_id": "allOf_1_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "mismatch base schema",
|
||||
"data": {
|
||||
"foo": "quux",
|
||||
"baz": null
|
||||
},
|
||||
"schema_id": "allOf_1_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "mismatch first allOf",
|
||||
"data": {
|
||||
"bar": 2,
|
||||
"baz": null
|
||||
},
|
||||
"schema_id": "allOf_1_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "mismatch second allOf",
|
||||
"data": {
|
||||
"foo": "quux",
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "allOf_1_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "mismatch both",
|
||||
"data": {
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "allOf_1_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "allOf simple types",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"maximum": 30
|
||||
},
|
||||
{
|
||||
"minimum": 20
|
||||
}
|
||||
],
|
||||
"$id": "allOf_2_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid",
|
||||
"data": 25,
|
||||
"schema_id": "allOf_2_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "mismatch one",
|
||||
"data": 35,
|
||||
"schema_id": "allOf_2_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "allOf with boolean schemas, all true",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
true,
|
||||
true
|
||||
],
|
||||
"$id": "allOf_3_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "any value is valid",
|
||||
"data": "foo",
|
||||
"schema_id": "allOf_3_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "allOf with boolean schemas, some false",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
true,
|
||||
false
|
||||
],
|
||||
"$id": "allOf_4_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "any value is invalid",
|
||||
"data": "foo",
|
||||
"schema_id": "allOf_4_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "allOf with boolean schemas, all false",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
false,
|
||||
false
|
||||
],
|
||||
"$id": "allOf_5_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "any value is invalid",
|
||||
"data": "foo",
|
||||
"schema_id": "allOf_5_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "allOf with one empty schema",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{}
|
||||
],
|
||||
"$id": "allOf_6_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "any data is valid",
|
||||
"data": 1,
|
||||
"schema_id": "allOf_6_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "allOf with two empty schemas",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{},
|
||||
{}
|
||||
],
|
||||
"$id": "allOf_7_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "any data is valid",
|
||||
"data": 1,
|
||||
"schema_id": "allOf_7_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "allOf with the first empty schema",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{},
|
||||
{
|
||||
"type": "number"
|
||||
}
|
||||
],
|
||||
"$id": "allOf_8_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "number is valid",
|
||||
"data": 1,
|
||||
"schema_id": "allOf_8_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "string is invalid",
|
||||
"data": "foo",
|
||||
"schema_id": "allOf_8_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "allOf with the last empty schema",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{}
|
||||
],
|
||||
"$id": "allOf_9_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "number is valid",
|
||||
"data": 1,
|
||||
"schema_id": "allOf_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "string is invalid",
|
||||
"data": "foo",
|
||||
"schema_id": "allOf_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "nested allOf, to check validation semantics",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"$id": "allOf_10_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "null is valid",
|
||||
"data": null,
|
||||
"schema_id": "allOf_10_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "anything non-null is invalid",
|
||||
"data": 123,
|
||||
"schema_id": "allOf_10_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "extensible: true allows extra properties in allOf",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"properties": {
|
||||
"bar": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"bar"
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"foo"
|
||||
]
|
||||
}
|
||||
],
|
||||
"extensible": true,
|
||||
"$id": "allOf_12_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "extra property is valid",
|
||||
"data": {
|
||||
"foo": "baz",
|
||||
"bar": 2,
|
||||
"qux": 3
|
||||
},
|
||||
"schema_id": "allOf_12_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "strict by default with allOf properties",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"const": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"bar": {
|
||||
"const": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"$id": "allOf_13_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "validates merged properties",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "allOf_13_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "fails on extra property z explicitly",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2,
|
||||
"z": 3
|
||||
},
|
||||
"schema_id": "allOf_13_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "allOf with nested extensible: true (partial looseness)",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"const": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"extensible": true,
|
||||
"properties": {
|
||||
"bar": {
|
||||
"const": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"$id": "allOf_14_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "extensible subschema doesn't make root extensible if root is strict",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2,
|
||||
"z": 3
|
||||
},
|
||||
"schema_id": "allOf_14_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "strictness: allOf composition with strict refs",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "partA"
|
||||
},
|
||||
{
|
||||
"$ref": "partB"
|
||||
}
|
||||
],
|
||||
"$id": "allOf_15_0"
|
||||
},
|
||||
{
|
||||
"$id": "partA",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "partB",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "merged instance is valid",
|
||||
"data": {
|
||||
"id": "1",
|
||||
"name": "Me"
|
||||
},
|
||||
"schema_id": "allOf_15_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "extra property is invalid (root is strict)",
|
||||
"data": {
|
||||
"id": "1",
|
||||
"name": "Me",
|
||||
"extra": 1
|
||||
},
|
||||
"schema_id": "allOf_15_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "partA mismatch is invalid",
|
||||
"data": {
|
||||
"id": 1,
|
||||
"name": "Me"
|
||||
},
|
||||
"schema_id": "allOf_15_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
183
fixtures/cases.json
Normal file
183
fixtures/cases.json
Normal file
@ -0,0 +1,183 @@
|
||||
[
|
||||
{
|
||||
"description": "Multi-Paradigm Declarative Cases",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "parallel_rules",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"status": { "type": "string" },
|
||||
"kind": { "type": "string" }
|
||||
},
|
||||
"cases": [
|
||||
{
|
||||
"when": { "properties": { "status": {"const": "unverified"} }, "required": ["status"] },
|
||||
"then": {
|
||||
"properties": {
|
||||
"amount_1": {"type": "number"},
|
||||
"amount_2": {"type": "number"}
|
||||
},
|
||||
"required": ["amount_1", "amount_2"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"when": { "properties": { "kind": {"const": "credit"} }, "required": ["kind"] },
|
||||
"then": {
|
||||
"properties": {
|
||||
"cvv": {"type": "number"}
|
||||
},
|
||||
"required": ["cvv"]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"$id": "mutually_exclusive",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": { "type": "string" }
|
||||
},
|
||||
"cases": [
|
||||
{
|
||||
"when": { "properties": { "type": {"const": "A"} }, "required": ["type"] },
|
||||
"then": {
|
||||
"properties": { "field_a": {"type": "number"} },
|
||||
"required": ["field_a"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"when": { "properties": { "type": {"const": "B"} }, "required": ["type"] },
|
||||
"then": {
|
||||
"properties": { "field_b": {"type": "number"} },
|
||||
"required": ["field_b"]
|
||||
},
|
||||
"else": {
|
||||
"properties": { "fallback_b": {"type": "number"} },
|
||||
"required": ["fallback_b"]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"$id": "nested_fallbacks",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"tier": { "type": "string" }
|
||||
},
|
||||
"cases": [
|
||||
{
|
||||
"when": { "properties": { "tier": {"const": "1"} }, "required": ["tier"] },
|
||||
"then": {
|
||||
"properties": { "basic": {"type": "number"} },
|
||||
"required": ["basic"]
|
||||
},
|
||||
"else": {
|
||||
"cases": [
|
||||
{
|
||||
"when": { "properties": { "tier": {"const": "2"} }, "required": ["tier"] },
|
||||
"then": {
|
||||
"properties": { "standard": {"type": "number"} },
|
||||
"required": ["standard"]
|
||||
},
|
||||
"else": {
|
||||
"properties": { "premium": {"type": "number"} },
|
||||
"required": ["premium"]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"$id": "missing_when",
|
||||
"type": "object",
|
||||
"cases": [
|
||||
{
|
||||
"else": {
|
||||
"properties": { "unconditional": {"type": "number"} },
|
||||
"required": ["unconditional"]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Fires only the first rule successfully",
|
||||
"data": { "status": "unverified", "amount_1": 1, "amount_2": 2 },
|
||||
"schema_id": "parallel_rules",
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "Fires both independent parallel rules flawlessly",
|
||||
"data": { "status": "unverified", "kind": "credit", "amount_1": 1, "amount_2": 2, "cvv": 123 },
|
||||
"schema_id": "parallel_rules",
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "Catches errors triggered concurrently by multiple independent blocked rules",
|
||||
"data": { "status": "unverified", "kind": "credit" },
|
||||
"schema_id": "parallel_rules",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{ "code": "REQUIRED_FIELD_MISSING", "details": { "path": "amount_1" } },
|
||||
{ "code": "REQUIRED_FIELD_MISSING", "details": { "path": "amount_2" } },
|
||||
{ "code": "REQUIRED_FIELD_MISSING", "details": { "path": "cvv" } }
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "STRICT_PROPERTY_VIOLATION throws if an un-triggered then property is submitted",
|
||||
"data": { "status": "verified", "cvv": 123 },
|
||||
"schema_id": "parallel_rules",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{ "code": "STRICT_PROPERTY_VIOLATION", "details": { "path": "cvv" } }
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Successfully routes mutually exclusive properties seamlessly",
|
||||
"data": { "type": "A", "field_a": 1, "fallback_b": 2 },
|
||||
"schema_id": "mutually_exclusive",
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "Nested fallbacks execute seamlessly",
|
||||
"data": { "tier": "3", "premium": 1 },
|
||||
"schema_id": "nested_fallbacks",
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "A case without a when executes its else indiscriminately",
|
||||
"data": { "unconditional": 1 },
|
||||
"schema_id": "missing_when",
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "A case without a when throws if else unconditionally requires field",
|
||||
"data": { },
|
||||
"schema_id": "missing_when",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{ "code": "REQUIRED_FIELD_MISSING", "details": { "path": "unconditional" } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
410
fixtures/database.json
Normal file
410
fixtures/database.json
Normal file
@ -0,0 +1,410 @@
|
||||
[
|
||||
{
|
||||
"description": "Edge missing - 0 relations",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"id": "11111111-1111-1111-1111-111111111111",
|
||||
"type": "type",
|
||||
"name": "org",
|
||||
"module": "test",
|
||||
"source": "test",
|
||||
"hierarchy": [
|
||||
"org"
|
||||
],
|
||||
"variations": [
|
||||
"org"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "full.org",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"missing_users": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "full.user"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "22222222-2222-2222-2222-222222222222",
|
||||
"type": "type",
|
||||
"name": "user",
|
||||
"module": "test",
|
||||
"source": "test",
|
||||
"hierarchy": [
|
||||
"user"
|
||||
],
|
||||
"variations": [
|
||||
"user"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "full.user",
|
||||
"type": "object",
|
||||
"properties": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"relations": []
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "throws EDGE_MISSING when 0 relations exist between org and user",
|
||||
"action": "compile",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "EDGE_MISSING"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Edge missing - array cardinality rejection",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"id": "11111111-1111-1111-1111-111111111111",
|
||||
"type": "type",
|
||||
"name": "parent",
|
||||
"module": "test",
|
||||
"source": "test",
|
||||
"hierarchy": [
|
||||
"parent"
|
||||
],
|
||||
"variations": [
|
||||
"parent"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "full.parent",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"children": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "full.child"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "22222222-2222-2222-2222-222222222222",
|
||||
"type": "type",
|
||||
"name": "child",
|
||||
"module": "test",
|
||||
"source": "test",
|
||||
"hierarchy": [
|
||||
"child"
|
||||
],
|
||||
"variations": [
|
||||
"child"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "full.child",
|
||||
"type": "object",
|
||||
"properties": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"relations": [
|
||||
{
|
||||
"id": "33333333-3333-3333-3333-333333333333",
|
||||
"type": "relation",
|
||||
"constraint": "fk_parent_child",
|
||||
"source_type": "parent",
|
||||
"source_columns": [
|
||||
"child_id"
|
||||
],
|
||||
"destination_type": "child",
|
||||
"destination_columns": [
|
||||
"id"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "throws EDGE_MISSING because a Forward scaler edge cannot mathematically fulfill an Array collection",
|
||||
"action": "compile",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "EDGE_MISSING"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Ambiguous type relations - multiple unprefixed relations",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"id": "11111111-1111-1111-1111-111111111111",
|
||||
"type": "type",
|
||||
"name": "invoice",
|
||||
"module": "test",
|
||||
"source": "test",
|
||||
"hierarchy": [
|
||||
"invoice"
|
||||
],
|
||||
"variations": [
|
||||
"invoice"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "full.invoice",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"activities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "full.activity"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "22222222-2222-2222-2222-222222222222",
|
||||
"type": "type",
|
||||
"name": "activity",
|
||||
"module": "test",
|
||||
"source": "test",
|
||||
"hierarchy": [
|
||||
"activity"
|
||||
],
|
||||
"variations": [
|
||||
"activity"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "full.activity",
|
||||
"type": "object",
|
||||
"properties": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"relations": [
|
||||
{
|
||||
"id": "33333333-3333-3333-3333-333333333333",
|
||||
"type": "relation",
|
||||
"constraint": "fk_activity_invoice_1",
|
||||
"source_type": "activity",
|
||||
"source_columns": [
|
||||
"invoice_id_1"
|
||||
],
|
||||
"destination_type": "invoice",
|
||||
"destination_columns": [
|
||||
"id"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "44444444-4444-4444-4444-444444444444",
|
||||
"type": "relation",
|
||||
"constraint": "fk_activity_invoice_2",
|
||||
"source_type": "activity",
|
||||
"source_columns": [
|
||||
"invoice_id_2"
|
||||
],
|
||||
"destination_type": "invoice",
|
||||
"destination_columns": [
|
||||
"id"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "throws AMBIGUOUS_TYPE_RELATIONS when fallback encounters multiple naked constraints",
|
||||
"action": "compile",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "AMBIGUOUS_TYPE_RELATIONS",
|
||||
"details": {
|
||||
"cause": "Multiple conflicting constraints found matching prefixes",
|
||||
"context": [
|
||||
{
|
||||
"constraint": "fk_activity_invoice_1"
|
||||
},
|
||||
{
|
||||
"constraint": "fk_activity_invoice_2"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Ambiguous type relations - M:M twin deduction failure",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"id": "11111111-1111-1111-1111-111111111111",
|
||||
"type": "type",
|
||||
"name": "actor",
|
||||
"module": "test",
|
||||
"source": "test",
|
||||
"hierarchy": [
|
||||
"actor"
|
||||
],
|
||||
"variations": [
|
||||
"actor"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "full.actor",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"ambiguous_edge": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "empty.junction"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "22222222-2222-2222-2222-222222222222",
|
||||
"type": "type",
|
||||
"name": "junction",
|
||||
"module": "test",
|
||||
"source": "test",
|
||||
"hierarchy": [
|
||||
"junction"
|
||||
],
|
||||
"variations": [
|
||||
"junction"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "empty.junction",
|
||||
"type": "object",
|
||||
"properties": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"relations": [
|
||||
{
|
||||
"id": "33333333-3333-3333-3333-333333333333",
|
||||
"type": "relation",
|
||||
"constraint": "fk_junction_source_actor",
|
||||
"source_type": "junction",
|
||||
"source_columns": [
|
||||
"source_id"
|
||||
],
|
||||
"destination_type": "actor",
|
||||
"destination_columns": [
|
||||
"id"
|
||||
],
|
||||
"prefix": "source"
|
||||
},
|
||||
{
|
||||
"id": "44444444-4444-4444-4444-444444444444",
|
||||
"type": "relation",
|
||||
"constraint": "fk_junction_target_actor",
|
||||
"source_type": "junction",
|
||||
"source_columns": [
|
||||
"target_id"
|
||||
],
|
||||
"destination_type": "actor",
|
||||
"destination_columns": [
|
||||
"id"
|
||||
],
|
||||
"prefix": "target"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "throws AMBIGUOUS_TYPE_RELATIONS because child doesn't explicitly expose 'source' or 'target' for twin deduction",
|
||||
"action": "compile",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "AMBIGUOUS_TYPE_RELATIONS",
|
||||
"details": {
|
||||
"cause": "Multiple conflicting constraints found matching prefixes",
|
||||
"context": [
|
||||
{
|
||||
"constraint": "fk_junction_source_actor"
|
||||
},
|
||||
{
|
||||
"constraint": "fk_junction_target_actor"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Database type parse failed",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"id": [
|
||||
"must",
|
||||
"be",
|
||||
"string",
|
||||
"to",
|
||||
"fail"
|
||||
],
|
||||
"type": "type",
|
||||
"name": "failure",
|
||||
"module": "test",
|
||||
"source": "test",
|
||||
"hierarchy": [
|
||||
"failure"
|
||||
],
|
||||
"variations": [
|
||||
"failure"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "throws DATABASE_TYPE_PARSE_FAILED when metadata completely fails Serde typing",
|
||||
"action": "compile",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "DATABASE_TYPE_PARSE_FAILED"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
@ -142,7 +142,7 @@
|
||||
"errors": [
|
||||
{
|
||||
"code": "CONST_VIOLATED",
|
||||
"path": "/con"
|
||||
"details": { "path": "con" }
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@ -1,217 +0,0 @@
|
||||
[
|
||||
{
|
||||
"description": "Entity families via pure $ref graph",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"name": "entity",
|
||||
"variations": [
|
||||
"entity",
|
||||
"organization",
|
||||
"person"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "entity",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "light.entity",
|
||||
"$ref": "entity"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "organization",
|
||||
"variations": [
|
||||
"organization",
|
||||
"person"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "organization",
|
||||
"$ref": "entity",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "person",
|
||||
"variations": [
|
||||
"person"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "person",
|
||||
"$ref": "organization",
|
||||
"properties": {
|
||||
"first_name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "light.person",
|
||||
"$ref": "light.entity"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"puncs": [
|
||||
{
|
||||
"name": "get_entities",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "get_entities.response",
|
||||
"$family": "entity"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "get_light_entities",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "get_light_entities.response",
|
||||
"$family": "light.entity"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Family matches base entity",
|
||||
"schema_id": "get_entities.response",
|
||||
"data": {
|
||||
"id": "1",
|
||||
"type": "entity"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Family matches descendant person",
|
||||
"schema_id": "get_entities.response",
|
||||
"data": {
|
||||
"id": "2",
|
||||
"type": "person",
|
||||
"name": "ACME",
|
||||
"first_name": "John"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Graph family matches light.entity",
|
||||
"schema_id": "get_light_entities.response",
|
||||
"data": {
|
||||
"id": "3",
|
||||
"type": "entity"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Graph family matches light.person (because it $refs light.entity)",
|
||||
"schema_id": "get_light_entities.response",
|
||||
"data": {
|
||||
"id": "4",
|
||||
"type": "person"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Graph family excludes organization (missing light. schema that $refs light.entity)",
|
||||
"schema_id": "get_light_entities.response",
|
||||
"data": {
|
||||
"id": "5",
|
||||
"type": "organization",
|
||||
"name": "ACME"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "FAMILY_MISMATCH",
|
||||
"path": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Ad-hoc non-entity families (using normal json-schema object structures)",
|
||||
"database": {
|
||||
"puncs": [
|
||||
{
|
||||
"name": "get_widgets",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "widget",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"widget_type": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "special_widget",
|
||||
"$ref": "widget",
|
||||
"properties": {
|
||||
"special_feature": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "get_widgets.response",
|
||||
"$family": "widget"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Ad-hoc family matches strictly by shape (no magic variations for base schemas)",
|
||||
"schema_id": "get_widgets.response",
|
||||
"data": {
|
||||
"id": "1",
|
||||
"widget_type": "special",
|
||||
"special_feature": "yes"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
@ -1,594 +0,0 @@
|
||||
[
|
||||
{
|
||||
"description": "ignore if without then or else",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"if": {
|
||||
"const": 0
|
||||
},
|
||||
"$id": "if-then-else_0_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid when valid against lone if",
|
||||
"data": 0,
|
||||
"schema_id": "if-then-else_0_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "valid when invalid against lone if",
|
||||
"data": "hello",
|
||||
"schema_id": "if-then-else_0_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "ignore then without if",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"then": {
|
||||
"const": 0
|
||||
},
|
||||
"$id": "if-then-else_1_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid when valid against lone then",
|
||||
"data": 0,
|
||||
"schema_id": "if-then-else_1_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "valid when invalid against lone then",
|
||||
"data": "hello",
|
||||
"schema_id": "if-then-else_1_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "ignore else without if",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"else": {
|
||||
"const": 0
|
||||
},
|
||||
"$id": "if-then-else_2_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid when valid against lone else",
|
||||
"data": 0,
|
||||
"schema_id": "if-then-else_2_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "valid when invalid against lone else",
|
||||
"data": "hello",
|
||||
"schema_id": "if-then-else_2_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "if and then without else",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"if": {
|
||||
"exclusiveMaximum": 0
|
||||
},
|
||||
"then": {
|
||||
"minimum": -10
|
||||
},
|
||||
"$id": "if-then-else_3_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid through then",
|
||||
"data": -1,
|
||||
"schema_id": "if-then-else_3_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "invalid through then",
|
||||
"data": -100,
|
||||
"schema_id": "if-then-else_3_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "valid when if test fails",
|
||||
"data": 3,
|
||||
"schema_id": "if-then-else_3_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "if and else without then",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"if": {
|
||||
"exclusiveMaximum": 0
|
||||
},
|
||||
"else": {
|
||||
"multipleOf": 2
|
||||
},
|
||||
"$id": "if-then-else_4_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid when if test passes",
|
||||
"data": -1,
|
||||
"schema_id": "if-then-else_4_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "valid through else",
|
||||
"data": 4,
|
||||
"schema_id": "if-then-else_4_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "invalid through else",
|
||||
"data": 3,
|
||||
"schema_id": "if-then-else_4_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "validate against correct branch, then vs else",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"if": {
|
||||
"exclusiveMaximum": 0
|
||||
},
|
||||
"then": {
|
||||
"minimum": -10
|
||||
},
|
||||
"else": {
|
||||
"multipleOf": 2
|
||||
},
|
||||
"$id": "if-then-else_5_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid through then",
|
||||
"data": -1,
|
||||
"schema_id": "if-then-else_5_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "invalid through then",
|
||||
"data": -100,
|
||||
"schema_id": "if-then-else_5_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "valid through else",
|
||||
"data": 4,
|
||||
"schema_id": "if-then-else_5_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "invalid through else",
|
||||
"data": 3,
|
||||
"schema_id": "if-then-else_5_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "non-interference across combined schemas",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"allOf": [
|
||||
{
|
||||
"if": {
|
||||
"exclusiveMaximum": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"then": {
|
||||
"minimum": -10
|
||||
}
|
||||
},
|
||||
{
|
||||
"else": {
|
||||
"multipleOf": 2
|
||||
}
|
||||
}
|
||||
],
|
||||
"$id": "if-then-else_6_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid, but would have been invalid through then",
|
||||
"data": -100,
|
||||
"schema_id": "if-then-else_6_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "valid, but would have been invalid through else",
|
||||
"data": 3,
|
||||
"schema_id": "if-then-else_6_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "if with boolean schema true",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"if": true,
|
||||
"then": {
|
||||
"const": "then"
|
||||
},
|
||||
"else": {
|
||||
"const": "else"
|
||||
},
|
||||
"$id": "if-then-else_7_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "boolean schema true in if always chooses the then path (valid)",
|
||||
"data": "then",
|
||||
"schema_id": "if-then-else_7_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "boolean schema true in if always chooses the then path (invalid)",
|
||||
"data": "else",
|
||||
"schema_id": "if-then-else_7_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "if with boolean schema false",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"if": false,
|
||||
"then": {
|
||||
"const": "then"
|
||||
},
|
||||
"else": {
|
||||
"const": "else"
|
||||
},
|
||||
"$id": "if-then-else_8_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "boolean schema false in if always chooses the else path (invalid)",
|
||||
"data": "then",
|
||||
"schema_id": "if-then-else_8_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "boolean schema false in if always chooses the else path (valid)",
|
||||
"data": "else",
|
||||
"schema_id": "if-then-else_8_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "if appears at the end when serialized (keyword processing sequence)",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"then": {
|
||||
"const": "yes"
|
||||
},
|
||||
"else": {
|
||||
"const": "other"
|
||||
},
|
||||
"if": {
|
||||
"maxLength": 4
|
||||
},
|
||||
"$id": "if-then-else_9_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "yes redirects to then and passes",
|
||||
"data": "yes",
|
||||
"schema_id": "if-then-else_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "other redirects to else and passes",
|
||||
"data": "other",
|
||||
"schema_id": "if-then-else_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "no redirects to then and fails",
|
||||
"data": "no",
|
||||
"schema_id": "if-then-else_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "invalid redirects to else and fails",
|
||||
"data": "invalid",
|
||||
"schema_id": "if-then-else_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "then: false fails when condition matches",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"if": {
|
||||
"const": 1
|
||||
},
|
||||
"then": false,
|
||||
"$id": "if-then-else_10_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "matches if → then=false → invalid",
|
||||
"data": 1,
|
||||
"schema_id": "if-then-else_10_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "does not match if → then ignored → valid",
|
||||
"data": 2,
|
||||
"schema_id": "if-then-else_10_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "else: false fails when condition does not match",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"if": {
|
||||
"const": 1
|
||||
},
|
||||
"else": false,
|
||||
"$id": "if-then-else_11_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "matches if → else ignored → valid",
|
||||
"data": 1,
|
||||
"schema_id": "if-then-else_11_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "does not match if → else executes → invalid",
|
||||
"data": 2,
|
||||
"schema_id": "if-then-else_11_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "extensible: true allows extra properties in if-then-else",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"if": {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"const": 1
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"foo"
|
||||
]
|
||||
},
|
||||
"then": {
|
||||
"properties": {
|
||||
"bar": {
|
||||
"const": 2
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"bar"
|
||||
]
|
||||
},
|
||||
"extensible": true,
|
||||
"$id": "if-then-else_12_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "extra property is valid (matches if and then)",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2,
|
||||
"extra": "prop"
|
||||
},
|
||||
"schema_id": "if-then-else_12_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "strict by default with if-then properties",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"if": {
|
||||
"properties": {
|
||||
"foo": {
|
||||
"const": 1
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"foo"
|
||||
]
|
||||
},
|
||||
"then": {
|
||||
"properties": {
|
||||
"bar": {
|
||||
"const": 2
|
||||
}
|
||||
}
|
||||
},
|
||||
"$id": "if-then-else_13_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid match (foo + bar)",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "if-then-else_13_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "fails on extra property z explicitly",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2,
|
||||
"z": 3
|
||||
},
|
||||
"schema_id": "if-then-else_13_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
219
fixtures/invoice.json
Normal file
219
fixtures/invoice.json
Normal file
@ -0,0 +1,219 @@
|
||||
[
|
||||
{
|
||||
"description": "Invoice Attachment Reproducer",
|
||||
"database": {
|
||||
"puncs": [
|
||||
{
|
||||
"name": "get_invoice",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "get_invoice.response",
|
||||
"oneOf": [
|
||||
{ "type": "invoice" },
|
||||
{ "type": "null" }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"enums": [],
|
||||
"relations": [
|
||||
{
|
||||
"id": "10000000-0000-0000-0000-000000000001",
|
||||
"type": "relation",
|
||||
"constraint": "fk_attachment_attachable_entity",
|
||||
"source_type": "attachment",
|
||||
"source_columns": ["attachable_id", "attachable_type"],
|
||||
"destination_type": "entity",
|
||||
"destination_columns": ["id", "type"],
|
||||
"prefix": null
|
||||
}
|
||||
],
|
||||
"types": [
|
||||
{
|
||||
"name": "entity",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "entity",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": { "type": "string", "format": "uuid" },
|
||||
"type": { "type": "string" },
|
||||
"archived": { "type": "boolean" },
|
||||
"created_at": { "type": "string", "format": "date-time" }
|
||||
}
|
||||
}
|
||||
],
|
||||
"hierarchy": ["entity"],
|
||||
"variations": ["entity", "activity", "invoice", "attachment"],
|
||||
"fields": ["id", "type", "archived", "created_at"],
|
||||
"grouped_fields": {
|
||||
"entity": ["id", "type", "archived", "created_at"]
|
||||
},
|
||||
"field_types": {
|
||||
"id": "uuid",
|
||||
"type": "text",
|
||||
"archived": "boolean",
|
||||
"created_at": "timestamptz"
|
||||
},
|
||||
"lookup_fields": [],
|
||||
"historical": false,
|
||||
"notify": false,
|
||||
"relationship": false
|
||||
},
|
||||
{
|
||||
"name": "activity",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "activity",
|
||||
"type": "entity",
|
||||
"properties": {
|
||||
"start_date": { "type": "string", "format": "date-time" }
|
||||
}
|
||||
}
|
||||
],
|
||||
"hierarchy": ["activity", "entity"],
|
||||
"variations": ["activity", "invoice"],
|
||||
"fields": ["id", "type", "archived", "created_at", "start_date"],
|
||||
"grouped_fields": {
|
||||
"entity": ["id", "type", "archived", "created_at"],
|
||||
"activity": ["start_date"]
|
||||
},
|
||||
"field_types": {
|
||||
"id": "uuid",
|
||||
"type": "text",
|
||||
"archived": "boolean",
|
||||
"created_at": "timestamptz",
|
||||
"start_date": "timestamptz"
|
||||
},
|
||||
"lookup_fields": [],
|
||||
"historical": false,
|
||||
"notify": false,
|
||||
"relationship": false
|
||||
},
|
||||
{
|
||||
"name": "invoice",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "invoice",
|
||||
"type": "activity",
|
||||
"properties": {
|
||||
"status": { "type": "string" },
|
||||
"attachments": {
|
||||
"type": "array",
|
||||
"items": { "type": "attachment" }
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"hierarchy": ["invoice", "activity", "entity"],
|
||||
"variations": ["invoice"],
|
||||
"fields": ["id", "type", "archived", "created_at", "start_date", "status"],
|
||||
"grouped_fields": {
|
||||
"entity": ["id", "type", "archived", "created_at"],
|
||||
"activity": ["start_date"],
|
||||
"invoice": ["status"]
|
||||
},
|
||||
"field_types": {
|
||||
"id": "uuid",
|
||||
"type": "text",
|
||||
"archived": "boolean",
|
||||
"created_at": "timestamptz",
|
||||
"start_date": "timestamptz",
|
||||
"status": "text"
|
||||
},
|
||||
"lookup_fields": [],
|
||||
"historical": false,
|
||||
"notify": false,
|
||||
"relationship": false
|
||||
},
|
||||
{
|
||||
"name": "attachment",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "attachment",
|
||||
"type": "entity",
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"attachable_id": { "type": "string", "format": "uuid" },
|
||||
"attachable_type": { "type": "string" },
|
||||
"kind": { "type": "string" },
|
||||
"file": { "type": "string" },
|
||||
"data": { "type": "object" }
|
||||
}
|
||||
}
|
||||
],
|
||||
"hierarchy": ["attachment", "entity"],
|
||||
"variations": ["attachment"],
|
||||
"fields": ["id", "type", "archived", "created_at", "attachable_id", "attachable_type", "kind", "file", "data", "name"],
|
||||
"grouped_fields": {
|
||||
"entity": ["id", "type", "archived", "created_at"],
|
||||
"attachment": ["attachable_id", "attachable_type", "kind", "file", "data", "name"]
|
||||
},
|
||||
"field_types": {
|
||||
"id": "uuid",
|
||||
"type": "text",
|
||||
"archived": "boolean",
|
||||
"created_at": "timestamptz",
|
||||
"attachable_id": "uuid",
|
||||
"attachable_type": "text",
|
||||
"kind": "text",
|
||||
"file": "text",
|
||||
"data": "jsonb",
|
||||
"name": "text"
|
||||
},
|
||||
"lookup_fields": [],
|
||||
"historical": false,
|
||||
"notify": false,
|
||||
"relationship": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Invoice with an empty array of attachments",
|
||||
"schema_id": "get_invoice.response",
|
||||
"data": {
|
||||
"id": "11111111-1111-1111-1111-111111111111",
|
||||
"type": "invoice",
|
||||
"archived": false,
|
||||
"created_at": "2023-01-01T00:00:00Z",
|
||||
"status": "draft",
|
||||
"attachments": []
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Invoice with a valid attachment with null data",
|
||||
"schema_id": "get_invoice.response",
|
||||
"data": {
|
||||
"id": "11111111-1111-1111-1111-111111111111",
|
||||
"type": "invoice",
|
||||
"archived": false,
|
||||
"created_at": "2023-01-01T00:00:00Z",
|
||||
"status": "draft",
|
||||
"attachments": [
|
||||
{
|
||||
"id": "22222222-2222-2222-2222-222222222222",
|
||||
"type": "attachment",
|
||||
"archived": false,
|
||||
"created_at": "2023-01-01T00:00:00Z",
|
||||
"name": "receipt",
|
||||
"attachable_id": "11111111-1111-1111-1111-111111111111",
|
||||
"attachable_type": "invoice",
|
||||
"kind": "document",
|
||||
"file": "path/to/doc.pdf"
|
||||
}
|
||||
]
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
@ -141,13 +141,13 @@
|
||||
"items": false,
|
||||
"prefixItems": [
|
||||
{
|
||||
"$ref": "item"
|
||||
"type": "item"
|
||||
},
|
||||
{
|
||||
"$ref": "item"
|
||||
"type": "item"
|
||||
},
|
||||
{
|
||||
"$ref": "item"
|
||||
"type": "item"
|
||||
}
|
||||
],
|
||||
"$id": "items_3_0"
|
||||
@ -158,10 +158,10 @@
|
||||
"items": false,
|
||||
"prefixItems": [
|
||||
{
|
||||
"$ref": "sub-item"
|
||||
"type": "sub-item"
|
||||
},
|
||||
{
|
||||
"$ref": "sub-item"
|
||||
"type": "sub-item"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
@ -12,7 +12,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"$ref": "base_0",
|
||||
"type": "base_0",
|
||||
"properties": {
|
||||
"child_prop": {
|
||||
"type": "string"
|
||||
@ -47,8 +47,8 @@
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "TYPE_MISMATCH",
|
||||
"path": "/base_prop"
|
||||
"code": "INVALID_TYPE",
|
||||
"details": { "path": "base_prop" }
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -71,7 +71,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"$ref": "base_1",
|
||||
"type": "base_1",
|
||||
"properties": {
|
||||
"b": {
|
||||
"type": "string"
|
||||
@ -109,7 +109,7 @@
|
||||
"errors": [
|
||||
{
|
||||
"code": "REQUIRED_FIELD_MISSING",
|
||||
"path": "/a"
|
||||
"details": { "path": "a" }
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -126,7 +126,7 @@
|
||||
"errors": [
|
||||
{
|
||||
"code": "REQUIRED_FIELD_MISSING",
|
||||
"path": "/b"
|
||||
"details": { "path": "b" }
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -154,7 +154,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"$ref": "base_2",
|
||||
"type": "base_2",
|
||||
"properties": {
|
||||
"child_dep": {
|
||||
"type": "string"
|
||||
@ -195,8 +195,8 @@
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "DEPENDENCY_FAILED",
|
||||
"path": "/base_dep"
|
||||
"code": "DEPENDENCY_MISSING",
|
||||
"details": { "path": "" }
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -213,8 +213,8 @@
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "DEPENDENCY_FAILED",
|
||||
"path": "/child_dep"
|
||||
"code": "DEPENDENCY_MISSING",
|
||||
"details": { "path": "" }
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -241,7 +241,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"$ref": "base_3",
|
||||
"type": "base_3",
|
||||
"properties": {
|
||||
"c": {
|
||||
"type": "string"
|
||||
|
||||
1460
fixtures/merger.json
1460
fixtures/merger.json
File diff suppressed because it is too large
Load Diff
167
fixtures/objectTypes.json
Normal file
167
fixtures/objectTypes.json
Normal file
@ -0,0 +1,167 @@
|
||||
[
|
||||
{
|
||||
"description": "Strict Inheritance",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "parent_type",
|
||||
"type": "object",
|
||||
"properties": {"a": {"type": "integer"}},
|
||||
"required": ["a"]
|
||||
},
|
||||
{
|
||||
"$id": "child_type",
|
||||
"type": "parent_type",
|
||||
"properties": {"b": {"type": "integer"}}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid child inherits properties and strictness",
|
||||
"schema_id": "child_type",
|
||||
"data": {"a": 1, "b": 2},
|
||||
"action": "validate",
|
||||
"expect": {"success": true}
|
||||
},
|
||||
{
|
||||
"description": "missing inherited required property fails",
|
||||
"schema_id": "child_type",
|
||||
"data": {"b": 2},
|
||||
"action": "validate",
|
||||
"expect": {"success": false}
|
||||
},
|
||||
{
|
||||
"description": "additional properties fail due to inherited strictness",
|
||||
"schema_id": "child_type",
|
||||
"data": {"a": 1, "b": 2, "c": 3},
|
||||
"action": "validate",
|
||||
"expect": {"success": false}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Local Shadowing (Composition & Proxies)",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "budget",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"max": {"type": "integer", "maximum": 100}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "custom_budget",
|
||||
"type": "budget",
|
||||
"properties": {
|
||||
"max": {"type": "integer", "maximum": 50}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "shadowing override applies (50 is locally allowed)",
|
||||
"schema_id": "custom_budget",
|
||||
"data": {"max": 40},
|
||||
"action": "validate",
|
||||
"expect": {"success": true}
|
||||
},
|
||||
{
|
||||
"description": "shadowing override applies natively, rejecting 60 even though parent allowed 100",
|
||||
"schema_id": "custom_budget",
|
||||
"data": {"max": 60},
|
||||
"action": "validate",
|
||||
"expect": {"success": false}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Inline id Resolution",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "api.request",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"inline_obj": {
|
||||
"$id": "inline_nested",
|
||||
"type": "object",
|
||||
"properties": {"foo": {"type": "string"}},
|
||||
"required": ["foo"]
|
||||
},
|
||||
"proxy_obj": {
|
||||
"type": "inline_nested"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "proxy resolves and validates to the inline component",
|
||||
"schema_id": "api.request",
|
||||
"data": {
|
||||
"inline_obj": {"foo": "bar"},
|
||||
"proxy_obj": {"foo": "baz"}
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {"success": true}
|
||||
},
|
||||
{
|
||||
"description": "proxy resolves and catches violation targeting inline component constraints",
|
||||
"schema_id": "api.request",
|
||||
"data": {
|
||||
"inline_obj": {"foo": "bar"},
|
||||
"proxy_obj": {}
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {"success": false}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Primitive Array Shorthand (Optionality)",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "invoice",
|
||||
"type": "object",
|
||||
"properties": {"amount": {"type": "integer"}},
|
||||
"required": ["amount"]
|
||||
},
|
||||
{
|
||||
"$id": "request",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"inv": {"type": ["invoice", "null"]}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "valid object passes shorthand inheritance check",
|
||||
"schema_id": "request",
|
||||
"data": {"inv": {"amount": 5}},
|
||||
"action": "validate",
|
||||
"expect": {"success": true}
|
||||
},
|
||||
{
|
||||
"description": "null passes shorthand inheritance check",
|
||||
"schema_id": "request",
|
||||
"data": {"inv": null},
|
||||
"action": "validate",
|
||||
"expect": {"success": true}
|
||||
},
|
||||
{
|
||||
"description": "invalid object fails inner constraints safely",
|
||||
"schema_id": "request",
|
||||
"data": {"inv": {"amount": "string"}},
|
||||
"action": "validate",
|
||||
"expect": {"success": false}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
@ -1,670 +0,0 @@
|
||||
[
|
||||
{
|
||||
"description": "oneOf",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "integer"
|
||||
},
|
||||
{
|
||||
"minimum": 2
|
||||
}
|
||||
],
|
||||
"$id": "oneOf_0_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "first oneOf valid",
|
||||
"data": 1,
|
||||
"schema_id": "oneOf_0_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "second oneOf valid",
|
||||
"data": 2.5,
|
||||
"schema_id": "oneOf_0_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "both oneOf valid",
|
||||
"data": 3,
|
||||
"schema_id": "oneOf_0_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "neither oneOf valid",
|
||||
"data": 1.5,
|
||||
"schema_id": "oneOf_0_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "oneOf with base schema",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"type": "string",
|
||||
"oneOf": [
|
||||
{
|
||||
"minLength": 2
|
||||
},
|
||||
{
|
||||
"maxLength": 4
|
||||
}
|
||||
],
|
||||
"$id": "oneOf_1_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "mismatch base schema",
|
||||
"data": 3,
|
||||
"schema_id": "oneOf_1_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "one oneOf valid",
|
||||
"data": "foobar",
|
||||
"schema_id": "oneOf_1_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "both oneOf valid",
|
||||
"data": "foo",
|
||||
"schema_id": "oneOf_1_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "oneOf with boolean schemas, all true",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"oneOf": [
|
||||
true,
|
||||
true,
|
||||
true
|
||||
],
|
||||
"$id": "oneOf_2_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "any value is invalid",
|
||||
"data": "foo",
|
||||
"schema_id": "oneOf_2_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "oneOf with boolean schemas, one true",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"oneOf": [
|
||||
true,
|
||||
false,
|
||||
false
|
||||
],
|
||||
"$id": "oneOf_3_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "any value is valid",
|
||||
"data": "foo",
|
||||
"schema_id": "oneOf_3_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "oneOf with boolean schemas, more than one true",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"oneOf": [
|
||||
true,
|
||||
true,
|
||||
false
|
||||
],
|
||||
"$id": "oneOf_4_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "any value is invalid",
|
||||
"data": "foo",
|
||||
"schema_id": "oneOf_4_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "oneOf with boolean schemas, all false",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"oneOf": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
],
|
||||
"$id": "oneOf_5_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "any value is invalid",
|
||||
"data": "foo",
|
||||
"schema_id": "oneOf_5_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "oneOf complex types",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"bar": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"bar"
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"foo"
|
||||
]
|
||||
}
|
||||
],
|
||||
"$id": "oneOf_6_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "first oneOf valid (complex)",
|
||||
"data": {
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "oneOf_6_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "second oneOf valid (complex)",
|
||||
"data": {
|
||||
"foo": "baz"
|
||||
},
|
||||
"schema_id": "oneOf_6_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "both oneOf valid (complex)",
|
||||
"data": {
|
||||
"foo": "baz",
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "oneOf_6_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "neither oneOf valid (complex)",
|
||||
"data": {
|
||||
"foo": 2,
|
||||
"bar": "quux"
|
||||
},
|
||||
"schema_id": "oneOf_6_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "oneOf with empty schema",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{}
|
||||
],
|
||||
"$id": "oneOf_7_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "one valid - valid",
|
||||
"data": "foo",
|
||||
"schema_id": "oneOf_7_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "both valid - invalid",
|
||||
"data": 123,
|
||||
"schema_id": "oneOf_7_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "oneOf with required",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"foo": true,
|
||||
"bar": true,
|
||||
"baz": true
|
||||
},
|
||||
"oneOf": [
|
||||
{
|
||||
"required": [
|
||||
"foo",
|
||||
"bar"
|
||||
]
|
||||
},
|
||||
{
|
||||
"required": [
|
||||
"foo",
|
||||
"baz"
|
||||
]
|
||||
}
|
||||
],
|
||||
"$id": "oneOf_8_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "both invalid - invalid",
|
||||
"data": {
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "oneOf_8_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "first valid - valid",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "oneOf_8_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "second valid - valid",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"baz": 3
|
||||
},
|
||||
"schema_id": "oneOf_8_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "both valid - invalid",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2,
|
||||
"baz": 3
|
||||
},
|
||||
"schema_id": "oneOf_8_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "extra property invalid (strict)",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2,
|
||||
"extra": 3
|
||||
},
|
||||
"schema_id": "oneOf_8_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "oneOf with required (extensible)",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"type": "object",
|
||||
"extensible": true,
|
||||
"oneOf": [
|
||||
{
|
||||
"required": [
|
||||
"foo",
|
||||
"bar"
|
||||
]
|
||||
},
|
||||
{
|
||||
"required": [
|
||||
"foo",
|
||||
"baz"
|
||||
]
|
||||
}
|
||||
],
|
||||
"$id": "oneOf_9_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "both invalid - invalid",
|
||||
"data": {
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "oneOf_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "first valid - valid",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2
|
||||
},
|
||||
"schema_id": "oneOf_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "second valid - valid",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"baz": 3
|
||||
},
|
||||
"schema_id": "oneOf_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "both valid - invalid",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2,
|
||||
"baz": 3
|
||||
},
|
||||
"schema_id": "oneOf_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "extra properties are valid (extensible)",
|
||||
"data": {
|
||||
"foo": 1,
|
||||
"bar": 2,
|
||||
"extra": "value"
|
||||
},
|
||||
"schema_id": "oneOf_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "oneOf with missing optional property",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"bar": true,
|
||||
"baz": true
|
||||
},
|
||||
"required": [
|
||||
"bar"
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"foo": true
|
||||
},
|
||||
"required": [
|
||||
"foo"
|
||||
]
|
||||
}
|
||||
],
|
||||
"$id": "oneOf_10_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "first oneOf valid",
|
||||
"data": {
|
||||
"bar": 8
|
||||
},
|
||||
"schema_id": "oneOf_10_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "second oneOf valid",
|
||||
"data": {
|
||||
"foo": "foo"
|
||||
},
|
||||
"schema_id": "oneOf_10_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "both oneOf valid",
|
||||
"data": {
|
||||
"foo": "foo",
|
||||
"bar": 8
|
||||
},
|
||||
"schema_id": "oneOf_10_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "neither oneOf valid",
|
||||
"data": {
|
||||
"baz": "quux"
|
||||
},
|
||||
"schema_id": "oneOf_10_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "nested oneOf, to check validation semantics",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"$id": "oneOf_11_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "null is valid",
|
||||
"data": null,
|
||||
"schema_id": "oneOf_11_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "anything non-null is invalid",
|
||||
"data": 123,
|
||||
"schema_id": "oneOf_11_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "extensible: true allows extra properties in oneOf",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"bar": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"bar"
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"foo"
|
||||
]
|
||||
}
|
||||
],
|
||||
"extensible": true,
|
||||
"$id": "oneOf_12_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "extra property is valid (matches first option)",
|
||||
"data": {
|
||||
"bar": 2,
|
||||
"extra": "prop"
|
||||
},
|
||||
"schema_id": "oneOf_12_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
367
fixtures/paths.json
Normal file
367
fixtures/paths.json
Normal file
@ -0,0 +1,367 @@
|
||||
[
|
||||
{
|
||||
"description": "Hybrid Array Pathing",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "hybrid_pathing",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primitives": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"ad_hoc_objects": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name"
|
||||
]
|
||||
}
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"type": "number",
|
||||
"minimum": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"deep_entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"nested": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"flag": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "happy path passes structural validation",
|
||||
"data": {
|
||||
"primitives": [
|
||||
"a",
|
||||
"b"
|
||||
],
|
||||
"ad_hoc_objects": [
|
||||
{
|
||||
"name": "obj1"
|
||||
}
|
||||
],
|
||||
"entities": [
|
||||
{
|
||||
"id": "entity-1",
|
||||
"value": 15
|
||||
}
|
||||
],
|
||||
"deep_entities": [
|
||||
{
|
||||
"id": "parent-1",
|
||||
"nested": [
|
||||
{
|
||||
"id": "child-1",
|
||||
"flag": true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema_id": "hybrid_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "primitive arrays use numeric indexing",
|
||||
"data": {
|
||||
"primitives": [
|
||||
"a",
|
||||
123
|
||||
]
|
||||
},
|
||||
"schema_id": "hybrid_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "INVALID_TYPE",
|
||||
"details": { "path": "primitives/1" }
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "ad-hoc objects without ids use numeric indexing",
|
||||
"data": {
|
||||
"ad_hoc_objects": [
|
||||
{
|
||||
"name": "valid"
|
||||
},
|
||||
{
|
||||
"age": 30
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema_id": "hybrid_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "REQUIRED_FIELD_MISSING",
|
||||
"details": { "path": "ad_hoc_objects/1/name" }
|
||||
},
|
||||
{
|
||||
"code": "STRICT_PROPERTY_VIOLATION",
|
||||
"details": { "path": "ad_hoc_objects/1/age" }
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "arrays of objects with ids use topological uuid indexing",
|
||||
"data": {
|
||||
"entities": [
|
||||
{
|
||||
"id": "entity-alpha",
|
||||
"value": 20
|
||||
},
|
||||
{
|
||||
"id": "entity-beta",
|
||||
"value": 5
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema_id": "hybrid_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "MINIMUM_VIOLATED",
|
||||
"details": { "path": "entities/entity-beta/value" }
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "deeply nested entity arrays retain full topological paths",
|
||||
"data": {
|
||||
"deep_entities": [
|
||||
{
|
||||
"id": "parent-omega",
|
||||
"nested": [
|
||||
{
|
||||
"id": "child-alpha",
|
||||
"flag": true
|
||||
},
|
||||
{
|
||||
"id": "child-beta",
|
||||
"flag": "invalid-string"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema_id": "hybrid_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "INVALID_TYPE",
|
||||
"details": { "path": "deep_entities/parent-omega/nested/child-beta/flag" }
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Polymorphic Structure Pathing",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"name": "widget",
|
||||
"variations": ["widget"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "widget",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": { "type": "string" },
|
||||
"type": { "type": "string" }
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "stock.widget",
|
||||
"type": "widget",
|
||||
"properties": {
|
||||
"kind": { "type": "string" },
|
||||
"amount": { "type": "integer" },
|
||||
"details": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"nested_metric": { "type": "number" }
|
||||
},
|
||||
"required": ["nested_metric"]
|
||||
}
|
||||
},
|
||||
"required": ["amount", "details"]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "polymorphic_pathing",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"metadata_bubbles": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$id": "contact_metadata",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": { "const": "contact" },
|
||||
"phone": { "type": "string" }
|
||||
},
|
||||
"required": ["phone"]
|
||||
},
|
||||
{
|
||||
"$id": "invoice_metadata",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": { "const": "invoice" },
|
||||
"invoice_id": { "type": "integer" }
|
||||
},
|
||||
"required": ["invoice_id"]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"table_families": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$family": "widget"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "oneOf tags natively bubble specific schema paths into deep array roots",
|
||||
"data": {
|
||||
"metadata_bubbles": [
|
||||
{ "type": "invoice", "invoice_id": 100 },
|
||||
{ "type": "contact", "phone": 12345, "rogue_field": "x" }
|
||||
]
|
||||
},
|
||||
"schema_id": "polymorphic_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "INVALID_TYPE",
|
||||
"details": { "path": "metadata_bubbles/1/phone" }
|
||||
},
|
||||
{
|
||||
"code": "STRICT_PROPERTY_VIOLATION",
|
||||
"details": { "path": "metadata_bubbles/1/rogue_field" }
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "families mechanically map physical variants directly onto topological uuid array paths",
|
||||
"data": {
|
||||
"table_families": [
|
||||
{
|
||||
"id": "widget-1",
|
||||
"type": "widget",
|
||||
"kind": "stock",
|
||||
"amount": 500,
|
||||
"details": { "nested_metric": 42.0 }
|
||||
},
|
||||
{
|
||||
"id": "widget-2",
|
||||
"type": "widget",
|
||||
"kind": "stock",
|
||||
"amount": "not_an_int",
|
||||
"details": {
|
||||
"stray_child": true
|
||||
},
|
||||
"unexpected_root_prop": "hi"
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema_id": "polymorphic_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "INVALID_TYPE",
|
||||
"details": { "path": "table_families/widget-2/amount" }
|
||||
},
|
||||
{
|
||||
"code": "REQUIRED_FIELD_MISSING",
|
||||
"details": { "path": "table_families/widget-2/details/nested_metric" }
|
||||
},
|
||||
{
|
||||
"code": "STRICT_PROPERTY_VIOLATION",
|
||||
"details": { "path": "table_families/widget-2/details/stray_child" }
|
||||
},
|
||||
{
|
||||
"code": "STRICT_PROPERTY_VIOLATION",
|
||||
"details": { "path": "table_families/widget-2/unexpected_root_prop" }
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
479
fixtures/polymorphism.json
Normal file
479
fixtures/polymorphism.json
Normal file
@ -0,0 +1,479 @@
|
||||
[
|
||||
{
|
||||
"description": "Vertical $family Routing (Across Tables)",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"name": "entity",
|
||||
"variations": ["entity", "organization", "person", "bot"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "entity",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": { "type": "string" },
|
||||
"id": { "type": "string" }
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "organization",
|
||||
"variations": ["organization", "person"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "organization",
|
||||
"type": "entity",
|
||||
"properties": {
|
||||
"name": { "type": "string" }
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "person",
|
||||
"variations": ["person"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "person",
|
||||
"type": "organization",
|
||||
"properties": {
|
||||
"first_name": { "type": "string" }
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "bot",
|
||||
"variations": ["bot"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "bot",
|
||||
"type": "entity",
|
||||
"properties": {
|
||||
"model": { "type": "string" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "family_entity",
|
||||
"$family": "entity"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Matches base entity",
|
||||
"schema_id": "family_entity",
|
||||
"data": { "type": "entity", "id": "1" },
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "Matches descendant person natively",
|
||||
"schema_id": "family_entity",
|
||||
"data": { "type": "person", "id": "2", "first_name": "Bob" },
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "Missing type fails out immediately",
|
||||
"schema_id": "family_entity",
|
||||
"data": { "id": "3", "first_name": "Bob" },
|
||||
"action": "validate",
|
||||
"expect": { "success": false, "errors": [ { "code": "MISSING_TYPE", "details": { "path": "" } } ] }
|
||||
},
|
||||
{
|
||||
"description": "Alias matching failure",
|
||||
"schema_id": "family_entity",
|
||||
"data": { "type": "alien", "id": "4" },
|
||||
"action": "validate",
|
||||
"expect": { "success": false, "errors": [ { "code": "NO_FAMILY_MATCH", "details": { "path": "" } } ] }
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Matrix $family Routing (Vertical + Horizontal Intersections)",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"name": "entity",
|
||||
"variations": ["entity", "organization", "person", "bot"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "entity",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": { "type": "string" }
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "light.entity",
|
||||
"type": "entity",
|
||||
"properties": {
|
||||
"kind": { "type": "string" }
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "organization",
|
||||
"variations": ["organization", "person"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "organization",
|
||||
"type": "entity"
|
||||
},
|
||||
{
|
||||
"$id": "light.organization",
|
||||
"type": "light.entity"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "person",
|
||||
"variations": ["person"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "person",
|
||||
"type": "organization"
|
||||
},
|
||||
{
|
||||
"$id": "light.person",
|
||||
"type": "light.organization"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "bot",
|
||||
"variations": ["bot"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "bot",
|
||||
"type": "entity"
|
||||
},
|
||||
{
|
||||
"$id": "light.bot",
|
||||
"type": "light.entity"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "family_light_org",
|
||||
"$family": "light.organization"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Matches light.organization exact matrix target",
|
||||
"schema_id": "family_light_org",
|
||||
"data": { "type": "organization", "kind": "light" },
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "Matches descendant light.person through matrix evaluation",
|
||||
"schema_id": "family_light_org",
|
||||
"data": { "type": "person", "kind": "light" },
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "Structurally fails to route bot (bot is not descendant of organization)",
|
||||
"schema_id": "family_light_org",
|
||||
"data": { "type": "bot", "kind": "light" },
|
||||
"action": "validate",
|
||||
"expect": { "success": false, "errors": [ { "code": "NO_FAMILY_MATCH", "details": { "path": "" } } ] }
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Horizontal $family Routing (Virtual Variations)",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "widget",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": { "type": "string" }
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "stock.widget",
|
||||
"type": "widget",
|
||||
"properties": {
|
||||
"kind": { "type": "string" },
|
||||
"amount": { "type": "integer" }
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "super_stock.widget",
|
||||
"type": "stock.widget",
|
||||
"properties": {
|
||||
"super_amount": { "type": "integer" }
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "family_widget",
|
||||
"$family": "widget"
|
||||
},
|
||||
{
|
||||
"$id": "family_stock_widget",
|
||||
"$family": "stock.widget"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Base widget resolves stock widget horizontally",
|
||||
"schema_id": "family_widget",
|
||||
"data": { "type": "widget", "kind": "stock", "amount": 5 },
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "Base widget resolves nested super stock widget natively via descendants crawl",
|
||||
"schema_id": "family_widget",
|
||||
"data": { "type": "widget", "kind": "super_stock", "amount": 5, "super_amount": 10 },
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "stock.widget explicit family resolves nested super stock via fast path",
|
||||
"schema_id": "family_stock_widget",
|
||||
"data": { "type": "widget", "kind": "super_stock", "amount": 5, "super_amount": 10 },
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "stock.widget fails when presented an invalid payload constraint",
|
||||
"schema_id": "family_stock_widget",
|
||||
"data": { "type": "widget", "kind": "super_stock", "amount": 5, "super_amount": "not_an_int" },
|
||||
"action": "validate",
|
||||
"expect": { "success": false, "errors": [ { "code": "INVALID_TYPE", "details": { "path": "super_amount" } } ] }
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Strict oneOf Punc Pointers (Tagged Unions)",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"name": "entity",
|
||||
"variations": ["entity", "person", "bot"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "entity",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": { "type": "string" }
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "person",
|
||||
"variations": ["person"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "person",
|
||||
"type": "entity"
|
||||
},
|
||||
{
|
||||
"$id": "full.person",
|
||||
"type": "person",
|
||||
"properties": {
|
||||
"kind": { "type": "string" },
|
||||
"age": { "type": "integer" }
|
||||
},
|
||||
"required": ["age"]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "bot",
|
||||
"variations": ["bot"],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "bot",
|
||||
"type": "entity"
|
||||
},
|
||||
{
|
||||
"$id": "full.bot",
|
||||
"type": "bot",
|
||||
"properties": {
|
||||
"kind": { "type": "string" },
|
||||
"version": { "type": "string" }
|
||||
},
|
||||
"required": ["version"]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "oneOf_union",
|
||||
"oneOf": [
|
||||
{ "type": "full.person" },
|
||||
{ "type": "full.bot" }
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Throws MISSING_TYPE if discriminator matches neither",
|
||||
"schema_id": "oneOf_union",
|
||||
"data": { "kind": "full", "age": 5 },
|
||||
"action": "validate",
|
||||
"expect": { "success": false, "errors": [ { "code": "MISSING_TYPE", "details": { "path": "" } } ] }
|
||||
},
|
||||
{
|
||||
"description": "Golden match throws pure structural error exclusively on person",
|
||||
"schema_id": "oneOf_union",
|
||||
"data": { "type": "person", "kind": "full", "age": "five" },
|
||||
"action": "validate",
|
||||
"expect": { "success": false, "errors": [ { "code": "INVALID_TYPE", "details": { "path": "age" } } ] }
|
||||
},
|
||||
{
|
||||
"description": "Golden matches perfectly",
|
||||
"schema_id": "oneOf_union",
|
||||
"data": { "type": "person", "kind": "full", "age": 5 },
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "Fails nicely with NO_ONEOF_MATCH",
|
||||
"schema_id": "oneOf_union",
|
||||
"data": { "type": "alien", "kind": "full", "age": 5 },
|
||||
"action": "validate",
|
||||
"expect": { "success": false, "errors": [ { "code": "NO_ONEOF_MATCH", "details": { "path": "" } } ] }
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "JSONB Field Bubble oneOf Discrimination (Promoted IDs)",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "metadata",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": { "type": "string" }
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "invoice.metadata",
|
||||
"type": "metadata",
|
||||
"properties": {
|
||||
"invoice_id": { "type": "integer" }
|
||||
},
|
||||
"required": ["invoice_id"]
|
||||
},
|
||||
{
|
||||
"$id": "payment.metadata",
|
||||
"type": "metadata",
|
||||
"properties": {
|
||||
"payment_id": { "type": "integer" }
|
||||
},
|
||||
"required": ["payment_id"]
|
||||
},
|
||||
{
|
||||
"$id": "oneOf_bubble",
|
||||
"oneOf": [
|
||||
{ "type": "invoice.metadata" },
|
||||
{ "type": "payment.metadata" }
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Extracts golden match natively from explicit JSONB type discriminator",
|
||||
"schema_id": "oneOf_bubble",
|
||||
"data": { "type": "invoice.metadata", "invoice_id": "nan" },
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [ { "code": "INVALID_TYPE", "details": { "path": "invoice_id" } } ]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Valid payload succeeds perfectly in JSONB universe",
|
||||
"schema_id": "oneOf_bubble",
|
||||
"data": { "type": "payment.metadata", "payment_id": 123 },
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Standard JSON Schema oneOf",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "oneOf_scalars",
|
||||
"oneOf": [
|
||||
{ "type": "integer" },
|
||||
{ "minimum": 2 }
|
||||
]
|
||||
},
|
||||
{
|
||||
"$id": "oneOf_dedupe",
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"shared": { "type": "integer" }
|
||||
},
|
||||
"required": ["shared"]
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"shared": { "type": "integer" },
|
||||
"extra": { "type": "string" }
|
||||
},
|
||||
"required": ["shared", "extra"]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Valid exclusively against first scalar choice",
|
||||
"schema_id": "oneOf_scalars",
|
||||
"data": 1,
|
||||
"action": "validate",
|
||||
"expect": { "success": true }
|
||||
},
|
||||
{
|
||||
"description": "Fails mathematically if matches both schemas natively",
|
||||
"schema_id": "oneOf_scalars",
|
||||
"data": 3,
|
||||
"action": "validate",
|
||||
"expect": { "success": false }
|
||||
},
|
||||
{
|
||||
"description": "Deduper merges shared errors dynamically exactly like JSON Schema",
|
||||
"schema_id": "oneOf_dedupe",
|
||||
"data": { "shared": "nan" },
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{ "code": "NO_ONEOF_MATCH", "details": { "path": "" } },
|
||||
{ "code": "INVALID_TYPE", "details": { "path": "shared" } }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,929 +0,0 @@
|
||||
[
|
||||
{
|
||||
"description": "nested refs",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$ref": "c_212",
|
||||
"$id": "ref_4_0"
|
||||
},
|
||||
{
|
||||
"$id": "a_212",
|
||||
"type": "integer"
|
||||
},
|
||||
{
|
||||
"$id": "b_212",
|
||||
"$ref": "a_212"
|
||||
},
|
||||
{
|
||||
"$id": "c_212",
|
||||
"$ref": "b_212"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "nested ref valid",
|
||||
"data": 5,
|
||||
"schema_id": "ref_4_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "nested ref invalid",
|
||||
"data": "a",
|
||||
"schema_id": "ref_4_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "ref applies alongside sibling keywords",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"$ref": "reffed_248",
|
||||
"maxItems": 2
|
||||
}
|
||||
},
|
||||
"$id": "ref_5_0"
|
||||
},
|
||||
{
|
||||
"$id": "reffed_248",
|
||||
"type": "array"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "ref valid, maxItems valid",
|
||||
"data": {
|
||||
"foo": []
|
||||
},
|
||||
"schema_id": "ref_5_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "ref valid, maxItems invalid",
|
||||
"data": {
|
||||
"foo": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
},
|
||||
"schema_id": "ref_5_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "ref invalid",
|
||||
"data": {
|
||||
"foo": "string"
|
||||
},
|
||||
"schema_id": "ref_5_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "property named $ref that is not a reference",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"properties": {
|
||||
"$ref": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"$id": "ref_6_0"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "property named $ref valid",
|
||||
"data": {
|
||||
"$ref": "a"
|
||||
},
|
||||
"schema_id": "ref_6_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "property named $ref invalid",
|
||||
"data": {
|
||||
"$ref": 2
|
||||
},
|
||||
"schema_id": "ref_6_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "property named $ref, containing an actual $ref",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"properties": {
|
||||
"$ref": {
|
||||
"$ref": "is-string_344"
|
||||
}
|
||||
},
|
||||
"$id": "ref_7_0"
|
||||
},
|
||||
{
|
||||
"$id": "is-string_344",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "property named $ref valid",
|
||||
"data": {
|
||||
"$ref": "a"
|
||||
},
|
||||
"schema_id": "ref_7_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "property named $ref invalid",
|
||||
"data": {
|
||||
"$ref": 2
|
||||
},
|
||||
"schema_id": "ref_7_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "$ref to boolean schema true",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$ref": "bool_378",
|
||||
"$id": "ref_8_0"
|
||||
},
|
||||
{
|
||||
"$id": "bool_378",
|
||||
"extensible": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "any value is valid",
|
||||
"data": "foo",
|
||||
"schema_id": "ref_8_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "$ref to boolean schema false",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$ref": "bool_400",
|
||||
"$id": "ref_9_0"
|
||||
},
|
||||
{
|
||||
"$id": "bool_400",
|
||||
"extensible": false,
|
||||
"not": {}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "any value is invalid",
|
||||
"data": "foo",
|
||||
"schema_id": "ref_9_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "refs with quote",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"properties": {
|
||||
"foo\"bar": {
|
||||
"$ref": "foo%22bar_550"
|
||||
}
|
||||
},
|
||||
"$id": "ref_11_0"
|
||||
},
|
||||
{
|
||||
"$id": "foo%22bar_550",
|
||||
"type": "number"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "object with numbers is valid",
|
||||
"data": {
|
||||
"foo\"bar": 1
|
||||
},
|
||||
"schema_id": "ref_11_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "object with strings is invalid",
|
||||
"data": {
|
||||
"foo\"bar": "1"
|
||||
},
|
||||
"schema_id": "ref_11_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "$ref boundary resets to loose",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$ref": "target_1465",
|
||||
"$id": "ref_35_0"
|
||||
},
|
||||
{
|
||||
"$id": "target_1465",
|
||||
"properties": {
|
||||
"foo": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "extra property in ref target is invalid (strict by default)",
|
||||
"data": {
|
||||
"foo": "bar",
|
||||
"extra": 1
|
||||
},
|
||||
"schema_id": "ref_35_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "$ref target can enforce strictness",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$ref": "target_1496",
|
||||
"$id": "ref_36_0"
|
||||
},
|
||||
{
|
||||
"$id": "target_1496",
|
||||
"extensible": false,
|
||||
"properties": {
|
||||
"foo": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "extra property in ref target is invalid",
|
||||
"data": {
|
||||
"foo": "bar",
|
||||
"extra": 1
|
||||
},
|
||||
"schema_id": "ref_36_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "strictness: boundary reset at $ref",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"extensible": true,
|
||||
"properties": {
|
||||
"inline_child": {
|
||||
"properties": {
|
||||
"a": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ref_child": {
|
||||
"$ref": "strict_node_1544"
|
||||
},
|
||||
"extensible_ref_child": {
|
||||
"$ref": "extensible_node_1551"
|
||||
}
|
||||
},
|
||||
"$id": "ref_37_0"
|
||||
},
|
||||
{
|
||||
"$id": "strict_node_1544",
|
||||
"properties": {
|
||||
"b": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "extensible_node_1551",
|
||||
"extensible": true,
|
||||
"properties": {
|
||||
"c": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "inline child inherits looseness",
|
||||
"data": {
|
||||
"inline_child": {
|
||||
"a": 1,
|
||||
"extra": 2
|
||||
}
|
||||
},
|
||||
"schema_id": "ref_37_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "ref child resets to strict (default)",
|
||||
"data": {
|
||||
"ref_child": {
|
||||
"b": 1,
|
||||
"extra": 2
|
||||
}
|
||||
},
|
||||
"schema_id": "ref_37_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "ref child with explicit extensible=true is loose",
|
||||
"data": {
|
||||
"extensible_ref_child": {
|
||||
"c": 1,
|
||||
"extra": 2
|
||||
}
|
||||
},
|
||||
"schema_id": "ref_37_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "arrays: ref items inherit strictness (reset at boundary)",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"properties": {
|
||||
"list": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "strict_node_1614"
|
||||
}
|
||||
}
|
||||
},
|
||||
"$id": "ref_38_0"
|
||||
},
|
||||
{
|
||||
"$id": "strict_node_1614",
|
||||
"properties": {
|
||||
"a": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "ref item with extra property is invalid (strict by default)",
|
||||
"data": {
|
||||
"list": [
|
||||
{
|
||||
"a": 1,
|
||||
"extra": 2
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema_id": "ref_38_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "implicit keyword shadowing",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$ref": "parent_1648",
|
||||
"properties": {
|
||||
"type": {
|
||||
"const": "child"
|
||||
},
|
||||
"age": {
|
||||
"minimum": 15
|
||||
}
|
||||
},
|
||||
"$id": "ref_39_0"
|
||||
},
|
||||
{
|
||||
"$id": "parent_1648",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"const": "parent"
|
||||
},
|
||||
"age": {
|
||||
"minimum": 10,
|
||||
"maximum": 20
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type",
|
||||
"age"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "child type overrides parent type",
|
||||
"data": {
|
||||
"type": "child",
|
||||
"age": 15
|
||||
},
|
||||
"schema_id": "ref_39_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "parent type is now invalid (shadowed)",
|
||||
"data": {
|
||||
"type": "parent",
|
||||
"age": 15
|
||||
},
|
||||
"schema_id": "ref_39_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "child min age (15) is enforced",
|
||||
"data": {
|
||||
"type": "child",
|
||||
"age": 12
|
||||
},
|
||||
"schema_id": "ref_39_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "parent max age (20) is shadowed (replaced) by child definition",
|
||||
"data": {
|
||||
"type": "child",
|
||||
"age": 21
|
||||
},
|
||||
"schema_id": "ref_39_0",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Entities extending entities (Physical Birth)",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"name": "entity",
|
||||
"variations": [
|
||||
"entity",
|
||||
"organization",
|
||||
"person"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "entity",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "organization",
|
||||
"variations": [
|
||||
"organization",
|
||||
"person"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "organization",
|
||||
"$ref": "entity",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "person",
|
||||
"variations": [
|
||||
"person"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "person",
|
||||
"$ref": "organization",
|
||||
"properties": {
|
||||
"first_name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"puncs": [
|
||||
{
|
||||
"name": "save_org",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "save_org.request",
|
||||
"$ref": "organization"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Valid person against organization schema (implicit type allowance from physical hierarchy)",
|
||||
"schema_id": "save_org.request",
|
||||
"data": {
|
||||
"id": "1",
|
||||
"type": "person",
|
||||
"name": "ACME"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Valid organization against organization schema",
|
||||
"schema_id": "save_org.request",
|
||||
"data": {
|
||||
"id": "2",
|
||||
"type": "organization",
|
||||
"name": "ACME"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Invalid entity against organization schema (ancestor not allowed)",
|
||||
"schema_id": "save_org.request",
|
||||
"data": {
|
||||
"id": "3",
|
||||
"type": "entity"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "TYPE_MISMATCH",
|
||||
"path": "/type"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Viral Infection: Ad-hocs inheriting entity boundaries via $ref",
|
||||
"database": {
|
||||
"types": [
|
||||
{
|
||||
"name": "entity",
|
||||
"variations": [
|
||||
"entity",
|
||||
"person"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "entity",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "person",
|
||||
"variations": [
|
||||
"person"
|
||||
],
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "person",
|
||||
"$ref": "entity",
|
||||
"properties": {
|
||||
"first_name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "light.person",
|
||||
"$ref": "entity",
|
||||
"properties": {
|
||||
"first_name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"puncs": [
|
||||
{
|
||||
"name": "save_person_light",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "save_person_light.request",
|
||||
"$ref": "light.person",
|
||||
"properties": {
|
||||
"extra_request_field": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Valid person against ad-hoc request schema (request virally inherited person variations)",
|
||||
"schema_id": "save_person_light.request",
|
||||
"data": {
|
||||
"id": "1",
|
||||
"type": "person",
|
||||
"first_name": "John",
|
||||
"extra_request_field": "test"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Invalid entity against ad-hoc request schema (viral inheritance enforces person boundary)",
|
||||
"schema_id": "save_person_light.request",
|
||||
"data": {
|
||||
"id": "1",
|
||||
"type": "entity",
|
||||
"first_name": "John"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "TYPE_MISMATCH",
|
||||
"path": "/type"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Ad-hocs extending ad-hocs (No type property)",
|
||||
"database": {
|
||||
"puncs": [
|
||||
{
|
||||
"name": "save_address",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "address",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"street": {
|
||||
"type": "string"
|
||||
},
|
||||
"city": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "us_address",
|
||||
"$ref": "address",
|
||||
"properties": {
|
||||
"state": {
|
||||
"type": "string"
|
||||
},
|
||||
"zip": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "save_address.request",
|
||||
"$ref": "us_address"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Valid us_address",
|
||||
"schema_id": "save_address.request",
|
||||
"data": {
|
||||
"street": "123 Main",
|
||||
"city": "Anytown",
|
||||
"state": "CA",
|
||||
"zip": "12345"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Invalid base address against us_address",
|
||||
"schema_id": "save_address.request",
|
||||
"data": {
|
||||
"street": "123 Main",
|
||||
"city": "Anytown"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Ad-hocs extending ad-hocs (with string type property, no magic)",
|
||||
"database": {
|
||||
"puncs": [
|
||||
{
|
||||
"name": "save_config",
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "config_base",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"const": "config_base"
|
||||
},
|
||||
"setting": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "config_advanced",
|
||||
"$ref": "config_base",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"const": "config_advanced"
|
||||
},
|
||||
"advanced_setting": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$id": "save_config.request",
|
||||
"$ref": "config_base"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "Valid config_base against config_base",
|
||||
"schema_id": "save_config.request",
|
||||
"data": {
|
||||
"type": "config_base",
|
||||
"setting": "on"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Invalid config_advanced against config_base (no type magic, const is strictly 'config_base')",
|
||||
"schema_id": "save_config.request",
|
||||
"data": {
|
||||
"type": "config_advanced",
|
||||
"setting": "on",
|
||||
"advanced_setting": "off"
|
||||
},
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
2
flows
2
flows
Submodule flows updated: a7b0f5dc4d...4d61e13e00
7
src/database/edge.rs
Normal file
7
src/database/edge.rs
Normal file
@ -0,0 +1,7 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct Edge {
|
||||
pub constraint: String,
|
||||
pub forward: bool,
|
||||
}
|
||||
@ -44,8 +44,8 @@ impl MockExecutor {
|
||||
|
||||
#[cfg(test)]
|
||||
impl DatabaseExecutor for MockExecutor {
|
||||
fn query(&self, sql: &str, _args: Option<&[Value]>) -> Result<Value, String> {
|
||||
println!("DEBUG SQL QUERY: {}", sql);
|
||||
fn query(&self, sql: &str, _args: Option<Vec<Value>>) -> Result<Value, String> {
|
||||
println!("JSPG_SQL: {}", sql);
|
||||
MOCK_STATE.with(|state| {
|
||||
let mut s = state.borrow_mut();
|
||||
s.captured_queries.push(sql.to_string());
|
||||
@ -65,8 +65,8 @@ impl DatabaseExecutor for MockExecutor {
|
||||
})
|
||||
}
|
||||
|
||||
fn execute(&self, sql: &str, _args: Option<&[Value]>) -> Result<(), String> {
|
||||
println!("DEBUG SQL EXECUTE: {}", sql);
|
||||
fn execute(&self, sql: &str, _args: Option<Vec<Value>>) -> Result<(), String> {
|
||||
println!("JSPG_SQL: {}", sql);
|
||||
MOCK_STATE.with(|state| {
|
||||
let mut s = state.borrow_mut();
|
||||
s.captured_queries.push(sql.to_string());
|
||||
@ -124,42 +124,23 @@ fn parse_and_match_mocks(sql: &str, mocks: &[Value]) -> Option<Vec<Value>> {
|
||||
return None;
|
||||
};
|
||||
|
||||
// 2. Extract WHERE conditions
|
||||
let mut conditions = Vec::new();
|
||||
// 2. Extract WHERE conditions string
|
||||
let mut where_clause = String::new();
|
||||
if let Some(where_idx) = sql_upper.find(" WHERE ") {
|
||||
let mut where_end = sql_upper.find(" ORDER BY ").unwrap_or(sql.len());
|
||||
let mut where_end = sql_upper.find(" ORDER BY ").unwrap_or(sql_upper.len());
|
||||
if let Some(limit_idx) = sql_upper.find(" LIMIT ") {
|
||||
if limit_idx < where_end {
|
||||
where_end = limit_idx;
|
||||
}
|
||||
}
|
||||
let where_clause = &sql[where_idx + 7..where_end];
|
||||
let and_regex = Regex::new(r"(?i)\s+AND\s+").ok()?;
|
||||
let parts = and_regex.split(where_clause);
|
||||
for part in parts {
|
||||
if let Some(eq_idx) = part.find('=') {
|
||||
let left = part[..eq_idx]
|
||||
.trim()
|
||||
.split('.')
|
||||
.last()
|
||||
.unwrap_or("")
|
||||
.trim_matches('"');
|
||||
let right = part[eq_idx + 1..].trim().trim_matches('\'');
|
||||
conditions.push((left.to_string(), right.to_string()));
|
||||
} else if part.to_uppercase().contains(" IS NULL") {
|
||||
let left = part[..part.to_uppercase().find(" IS NULL").unwrap()]
|
||||
.trim()
|
||||
.split('.')
|
||||
.last()
|
||||
.unwrap_or("")
|
||||
.replace('"', ""); // Remove quotes explicitly
|
||||
conditions.push((left, "null".to_string()));
|
||||
}
|
||||
}
|
||||
where_clause = sql[where_idx + 7..where_end].to_string();
|
||||
}
|
||||
|
||||
// 3. Find matching mocks
|
||||
let mut matches = Vec::new();
|
||||
let or_regex = Regex::new(r"(?i)\s+OR\s+").ok()?;
|
||||
let and_regex = Regex::new(r"(?i)\s+AND\s+").ok()?;
|
||||
|
||||
for mock in mocks {
|
||||
if let Some(mock_obj) = mock.as_object() {
|
||||
if let Some(t) = mock_obj.get("type") {
|
||||
@ -168,25 +149,66 @@ fn parse_and_match_mocks(sql: &str, mocks: &[Value]) -> Option<Vec<Value>> {
|
||||
}
|
||||
}
|
||||
|
||||
let mut matches_all = true;
|
||||
for (k, v) in &conditions {
|
||||
let mock_val_str = match mock_obj.get(k) {
|
||||
Some(Value::String(s)) => s.clone(),
|
||||
Some(Value::Number(n)) => n.to_string(),
|
||||
Some(Value::Bool(b)) => b.to_string(),
|
||||
Some(Value::Null) => "null".to_string(),
|
||||
_ => {
|
||||
matches_all = false;
|
||||
break;
|
||||
if where_clause.is_empty() {
|
||||
matches.push(mock.clone());
|
||||
continue;
|
||||
}
|
||||
|
||||
let or_parts = or_regex.split(&where_clause);
|
||||
let mut any_branch_matched = false;
|
||||
|
||||
for or_part in or_parts {
|
||||
let branch_str = or_part.replace('(', "").replace(')', "");
|
||||
let mut branch_matches = true;
|
||||
|
||||
for part in and_regex.split(&branch_str) {
|
||||
if let Some(eq_idx) = part.find('=') {
|
||||
let left = part[..eq_idx]
|
||||
.trim()
|
||||
.split('.')
|
||||
.last()
|
||||
.unwrap_or("")
|
||||
.trim_matches('"');
|
||||
let right = part[eq_idx + 1..].trim().trim_matches('\'');
|
||||
|
||||
let mock_val_str = match mock_obj.get(left) {
|
||||
Some(Value::String(s)) => s.clone(),
|
||||
Some(Value::Number(n)) => n.to_string(),
|
||||
Some(Value::Bool(b)) => b.to_string(),
|
||||
Some(Value::Null) => "null".to_string(),
|
||||
_ => "".to_string(),
|
||||
};
|
||||
if mock_val_str != right {
|
||||
branch_matches = false;
|
||||
break;
|
||||
}
|
||||
} else if part.to_uppercase().contains(" IS NULL") {
|
||||
let left = part[..part.to_uppercase().find(" IS NULL").unwrap()]
|
||||
.trim()
|
||||
.split('.')
|
||||
.last()
|
||||
.unwrap_or("")
|
||||
.trim_matches('"');
|
||||
|
||||
let mock_val_str = match mock_obj.get(left) {
|
||||
Some(Value::Null) => "null".to_string(),
|
||||
_ => "".to_string(),
|
||||
};
|
||||
|
||||
if mock_val_str != "null" {
|
||||
branch_matches = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
if mock_val_str != *v {
|
||||
matches_all = false;
|
||||
}
|
||||
|
||||
if branch_matches {
|
||||
any_branch_matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if matches_all {
|
||||
if any_branch_matched {
|
||||
matches.push(mock.clone());
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,10 +9,10 @@ use serde_json::Value;
|
||||
/// without a live Postgres SPI connection.
|
||||
pub trait DatabaseExecutor: Send + Sync {
|
||||
/// Executes a query expecting a single JSONB return, representing rows.
|
||||
fn query(&self, sql: &str, args: Option<&[Value]>) -> Result<Value, String>;
|
||||
fn query(&self, sql: &str, args: Option<Vec<Value>>) -> Result<Value, String>;
|
||||
|
||||
/// Executes an operation (INSERT, UPDATE, DELETE, or pg_notify) that does not return rows.
|
||||
fn execute(&self, sql: &str, args: Option<&[Value]>) -> Result<(), String>;
|
||||
fn execute(&self, sql: &str, args: Option<Vec<Value>>) -> Result<(), String>;
|
||||
|
||||
/// Returns the current authenticated user's ID
|
||||
fn auth_user_id(&self) -> Result<String, String>;
|
||||
|
||||
@ -9,88 +9,145 @@ impl SpiExecutor {
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
|
||||
fn transact<F, R>(&self, f: F) -> Result<R, String>
|
||||
where
|
||||
F: FnOnce() -> Result<R, String>,
|
||||
{
|
||||
unsafe {
|
||||
let oldcontext = pgrx::pg_sys::CurrentMemoryContext;
|
||||
let oldowner = pgrx::pg_sys::CurrentResourceOwner;
|
||||
pgrx::pg_sys::BeginInternalSubTransaction(std::ptr::null());
|
||||
pgrx::pg_sys::MemoryContextSwitchTo(oldcontext);
|
||||
|
||||
let runner = std::panic::AssertUnwindSafe(move || {
|
||||
let res = f();
|
||||
|
||||
pgrx::pg_sys::ReleaseCurrentSubTransaction();
|
||||
pgrx::pg_sys::MemoryContextSwitchTo(oldcontext);
|
||||
pgrx::pg_sys::CurrentResourceOwner = oldowner;
|
||||
|
||||
res
|
||||
});
|
||||
|
||||
pgrx::PgTryBuilder::new(runner)
|
||||
.catch_rust_panic(|cause| {
|
||||
pgrx::pg_sys::RollbackAndReleaseCurrentSubTransaction();
|
||||
pgrx::pg_sys::MemoryContextSwitchTo(oldcontext);
|
||||
pgrx::pg_sys::CurrentResourceOwner = oldowner;
|
||||
|
||||
// Rust panics are fatal bugs, not validation errors. Rethrow so they bubble up.
|
||||
cause.rethrow()
|
||||
})
|
||||
.catch_others(|cause| {
|
||||
pgrx::pg_sys::RollbackAndReleaseCurrentSubTransaction();
|
||||
pgrx::pg_sys::MemoryContextSwitchTo(oldcontext);
|
||||
pgrx::pg_sys::CurrentResourceOwner = oldowner;
|
||||
|
||||
let error_msg = match &cause {
|
||||
pgrx::pg_sys::panic::CaughtError::PostgresError(e)
|
||||
| pgrx::pg_sys::panic::CaughtError::ErrorReport(e) => {
|
||||
let json_err = serde_json::json!({
|
||||
"error": e.message(),
|
||||
"code": format!("{:?}", e.sql_error_code()),
|
||||
"detail": e.detail(),
|
||||
"hint": e.hint()
|
||||
});
|
||||
json_err.to_string()
|
||||
}
|
||||
_ => format!("{:?}", cause),
|
||||
};
|
||||
|
||||
pgrx::warning!("JSPG Caught Native Postgres Error: {}", error_msg);
|
||||
Err(error_msg)
|
||||
})
|
||||
.execute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DatabaseExecutor for SpiExecutor {
|
||||
fn query(&self, sql: &str, args: Option<&[Value]>) -> Result<Value, String> {
|
||||
let mut json_args = Vec::new();
|
||||
fn query(&self, sql: &str, args: Option<Vec<Value>>) -> Result<Value, String> {
|
||||
let mut args_with_oid: Vec<pgrx::datum::DatumWithOid> = Vec::new();
|
||||
if let Some(params) = args {
|
||||
for val in params {
|
||||
json_args.push(pgrx::JsonB(val.clone()));
|
||||
}
|
||||
for j_val in json_args.into_iter() {
|
||||
args_with_oid.push(pgrx::datum::DatumWithOid::from(j_val));
|
||||
args_with_oid.push(pgrx::datum::DatumWithOid::from(pgrx::JsonB(val)));
|
||||
}
|
||||
}
|
||||
|
||||
Spi::connect(|client| {
|
||||
match client.select(sql, Some(args_with_oid.len() as i64), &args_with_oid) {
|
||||
Ok(tup_table) => {
|
||||
let mut results = Vec::new();
|
||||
for row in tup_table {
|
||||
if let Ok(Some(jsonb)) = row.get::<pgrx::JsonB>(1) {
|
||||
results.push(jsonb.0);
|
||||
pgrx::debug1!("JSPG_SQL: {}", sql);
|
||||
self.transact(|| {
|
||||
Spi::connect(|client| {
|
||||
match client.select(sql, Some(args_with_oid.len() as i64), &args_with_oid) {
|
||||
Ok(tup_table) => {
|
||||
let mut results = Vec::new();
|
||||
for row in tup_table {
|
||||
if let Ok(Some(jsonb)) = row.get::<pgrx::JsonB>(1) {
|
||||
results.push(jsonb.0);
|
||||
}
|
||||
}
|
||||
Ok(Value::Array(results))
|
||||
}
|
||||
Ok(Value::Array(results))
|
||||
Err(e) => Err(format!("SPI Query Fetch Failure: {}", e)),
|
||||
}
|
||||
Err(e) => Err(format!("SPI Query Fetch Failure: {}", e)),
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn execute(&self, sql: &str, args: Option<&[Value]>) -> Result<(), String> {
|
||||
let mut json_args = Vec::new();
|
||||
fn execute(&self, sql: &str, args: Option<Vec<Value>>) -> Result<(), String> {
|
||||
let mut args_with_oid: Vec<pgrx::datum::DatumWithOid> = Vec::new();
|
||||
if let Some(params) = args {
|
||||
for val in params {
|
||||
json_args.push(pgrx::JsonB(val.clone()));
|
||||
}
|
||||
for j_val in json_args.into_iter() {
|
||||
args_with_oid.push(pgrx::datum::DatumWithOid::from(j_val));
|
||||
args_with_oid.push(pgrx::datum::DatumWithOid::from(pgrx::JsonB(val)));
|
||||
}
|
||||
}
|
||||
|
||||
Spi::connect_mut(|client| {
|
||||
match client.update(sql, Some(args_with_oid.len() as i64), &args_with_oid) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => Err(format!("SPI Execution Failure: {}", e)),
|
||||
}
|
||||
pgrx::debug1!("JSPG_SQL: {}", sql);
|
||||
self.transact(|| {
|
||||
Spi::connect_mut(|client| {
|
||||
match client.update(sql, Some(args_with_oid.len() as i64), &args_with_oid) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => Err(format!("SPI Execution Failure: {}", e)),
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn auth_user_id(&self) -> Result<String, String> {
|
||||
Spi::connect(|client| {
|
||||
let mut tup_table = client
|
||||
.select(
|
||||
"SELECT COALESCE(current_setting('auth.user_id', true), 'ffffffff-ffff-ffff-ffff-ffffffffffff')",
|
||||
None,
|
||||
&[],
|
||||
)
|
||||
.map_err(|e| format!("SPI Select Error: {}", e))?;
|
||||
self.transact(|| {
|
||||
Spi::connect(|client| {
|
||||
let mut tup_table = client
|
||||
.select(
|
||||
"SELECT COALESCE(current_setting('auth.user_id', true), 'ffffffff-ffff-ffff-ffff-ffffffffffff')",
|
||||
None,
|
||||
&[],
|
||||
)
|
||||
.map_err(|e| format!("SPI Select Error: {}", e))?;
|
||||
|
||||
let row = tup_table
|
||||
.next()
|
||||
.ok_or("No user id setting returned from context".to_string())?;
|
||||
let user_id: Option<String> = row.get(1).map_err(|e| e.to_string())?;
|
||||
let row = tup_table
|
||||
.next()
|
||||
.ok_or("No user id setting returned from context".to_string())?;
|
||||
let user_id: Option<String> = row.get(1).map_err(|e| e.to_string())?;
|
||||
|
||||
user_id.ok_or("Missing user_id".to_string())
|
||||
user_id.ok_or("Missing user_id".to_string())
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn timestamp(&self) -> Result<String, String> {
|
||||
Spi::connect(|client| {
|
||||
let mut tup_table = client
|
||||
.select("SELECT clock_timestamp()::text", None, &[])
|
||||
.map_err(|e| format!("SPI Select Error: {}", e))?;
|
||||
self.transact(|| {
|
||||
Spi::connect(|client| {
|
||||
let mut tup_table = client
|
||||
.select("SELECT clock_timestamp()::text", None, &[])
|
||||
.map_err(|e| format!("SPI Select Error: {}", e))?;
|
||||
|
||||
let row = tup_table
|
||||
.next()
|
||||
.ok_or("No clock timestamp returned".to_string())?;
|
||||
let timestamp: Option<String> = row.get(1).map_err(|e| e.to_string())?;
|
||||
let row = tup_table
|
||||
.next()
|
||||
.ok_or("No clock timestamp returned".to_string())?;
|
||||
let timestamp: Option<String> = row.get(1).map_err(|e| e.to_string())?;
|
||||
|
||||
timestamp.ok_or("Missing timestamp".to_string())
|
||||
timestamp.ok_or("Missing timestamp".to_string())
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
pub mod edge;
|
||||
pub mod r#enum;
|
||||
pub mod executors;
|
||||
pub mod formats;
|
||||
@ -18,14 +19,11 @@ use executors::pgrx::SpiExecutor;
|
||||
#[cfg(test)]
|
||||
use executors::mock::MockExecutor;
|
||||
|
||||
pub mod stem;
|
||||
use punc::Punc;
|
||||
use relation::Relation;
|
||||
use schema::Schema;
|
||||
use serde_json::Value;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::sync::Arc;
|
||||
use stem::Stem;
|
||||
use r#type::Type;
|
||||
|
||||
pub struct Database {
|
||||
@ -34,22 +32,19 @@ pub struct Database {
|
||||
pub puncs: HashMap<String, Punc>,
|
||||
pub relations: HashMap<String, Relation>,
|
||||
pub schemas: HashMap<String, Schema>,
|
||||
// Map of Schema ID -> { Entity Type -> Target Subschema Arc }
|
||||
pub stems: HashMap<String, HashMap<String, Arc<Stem>>>,
|
||||
pub descendants: HashMap<String, Vec<String>>,
|
||||
pub depths: HashMap<String, usize>,
|
||||
pub executor: Box<dyn DatabaseExecutor + Send + Sync>,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub fn new(val: &serde_json::Value) -> Result<Self, crate::drop::Drop> {
|
||||
pub fn new(val: &serde_json::Value) -> (Self, crate::drop::Drop) {
|
||||
let mut db = Self {
|
||||
enums: HashMap::new(),
|
||||
types: HashMap::new(),
|
||||
relations: HashMap::new(),
|
||||
puncs: HashMap::new(),
|
||||
schemas: HashMap::new(),
|
||||
stems: HashMap::new(),
|
||||
descendants: HashMap::new(),
|
||||
depths: HashMap::new(),
|
||||
#[cfg(not(test))]
|
||||
@ -58,18 +53,38 @@ impl Database {
|
||||
executor: Box::new(MockExecutor::new()),
|
||||
};
|
||||
|
||||
let mut errors = Vec::new();
|
||||
|
||||
if let Some(arr) = val.get("enums").and_then(|v| v.as_array()) {
|
||||
for item in arr {
|
||||
if let Ok(def) = serde_json::from_value::<Enum>(item.clone()) {
|
||||
db.enums.insert(def.name.clone(), def);
|
||||
match serde_json::from_value::<Enum>(item.clone()) {
|
||||
Ok(def) => {
|
||||
db.enums.insert(def.name.clone(), def);
|
||||
}
|
||||
Err(e) => {
|
||||
errors.push(crate::drop::Error {
|
||||
code: "DATABASE_ENUM_PARSE_FAILED".to_string(),
|
||||
message: format!("Failed to parse database enum: {}", e),
|
||||
details: crate::drop::ErrorDetails::default(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(arr) = val.get("types").and_then(|v| v.as_array()) {
|
||||
for item in arr {
|
||||
if let Ok(def) = serde_json::from_value::<Type>(item.clone()) {
|
||||
db.types.insert(def.name.clone(), def);
|
||||
match serde_json::from_value::<Type>(item.clone()) {
|
||||
Ok(def) => {
|
||||
db.types.insert(def.name.clone(), def);
|
||||
}
|
||||
Err(e) => {
|
||||
errors.push(crate::drop::Error {
|
||||
code: "DATABASE_TYPE_PARSE_FAILED".to_string(),
|
||||
message: format!("Failed to parse database type: {}", e),
|
||||
details: crate::drop::ErrorDetails::default(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -78,37 +93,70 @@ impl Database {
|
||||
for item in arr {
|
||||
match serde_json::from_value::<Relation>(item.clone()) {
|
||||
Ok(def) => {
|
||||
db.relations.insert(def.constraint.clone(), def);
|
||||
if db.types.contains_key(&def.source_type)
|
||||
&& db.types.contains_key(&def.destination_type)
|
||||
{
|
||||
db.relations.insert(def.constraint.clone(), def);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
errors.push(crate::drop::Error {
|
||||
code: "DATABASE_RELATION_PARSE_FAILED".to_string(),
|
||||
message: format!("Failed to parse database relation: {}", e),
|
||||
details: crate::drop::ErrorDetails::default(),
|
||||
});
|
||||
}
|
||||
Err(e) => println!("DATABASE RELATION PARSE FAILED: {:?}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(arr) = val.get("puncs").and_then(|v| v.as_array()) {
|
||||
for item in arr {
|
||||
if let Ok(def) = serde_json::from_value::<Punc>(item.clone()) {
|
||||
db.puncs.insert(def.name.clone(), def);
|
||||
match serde_json::from_value::<Punc>(item.clone()) {
|
||||
Ok(def) => {
|
||||
db.puncs.insert(def.name.clone(), def);
|
||||
}
|
||||
Err(e) => {
|
||||
errors.push(crate::drop::Error {
|
||||
code: "DATABASE_PUNC_PARSE_FAILED".to_string(),
|
||||
message: format!("Failed to parse database punc: {}", e),
|
||||
details: crate::drop::ErrorDetails::default(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(arr) = val.get("schemas").and_then(|v| v.as_array()) {
|
||||
for (i, item) in arr.iter().enumerate() {
|
||||
if let Ok(mut schema) = serde_json::from_value::<Schema>(item.clone()) {
|
||||
let id = schema
|
||||
.obj
|
||||
.id
|
||||
.clone()
|
||||
.unwrap_or_else(|| format!("schema_{}", i));
|
||||
schema.obj.id = Some(id.clone());
|
||||
db.schemas.insert(id, schema);
|
||||
match serde_json::from_value::<Schema>(item.clone()) {
|
||||
Ok(mut schema) => {
|
||||
let id = schema
|
||||
.obj
|
||||
.id
|
||||
.clone()
|
||||
.unwrap_or_else(|| format!("schema_{}", i));
|
||||
schema.obj.id = Some(id.clone());
|
||||
db.schemas.insert(id, schema);
|
||||
}
|
||||
Err(e) => {
|
||||
errors.push(crate::drop::Error {
|
||||
code: "DATABASE_SCHEMA_PARSE_FAILED".to_string(),
|
||||
message: format!("Failed to parse database schema: {}", e),
|
||||
details: crate::drop::ErrorDetails::default(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
db.compile()?;
|
||||
Ok(db)
|
||||
db.compile(&mut errors);
|
||||
let drop = if errors.is_empty() {
|
||||
crate::drop::Drop::success()
|
||||
} else {
|
||||
crate::drop::Drop::with_errors(errors)
|
||||
};
|
||||
(db, drop)
|
||||
}
|
||||
|
||||
/// Override the default executor for unit testing
|
||||
@ -118,12 +166,12 @@ impl Database {
|
||||
}
|
||||
|
||||
/// Executes a query expecting a single JSONB array return, representing rows.
|
||||
pub fn query(&self, sql: &str, args: Option<&[Value]>) -> Result<Value, String> {
|
||||
pub fn query(&self, sql: &str, args: Option<Vec<Value>>) -> Result<Value, String> {
|
||||
self.executor.query(sql, args)
|
||||
}
|
||||
|
||||
/// Executes an operation (INSERT, UPDATE, DELETE, or pg_notify) that does not return rows.
|
||||
pub fn execute(&self, sql: &str, args: Option<&[Value]>) -> Result<(), String> {
|
||||
pub fn execute(&self, sql: &str, args: Option<Vec<Value>>) -> Result<(), String> {
|
||||
self.executor.execute(sql, args)
|
||||
}
|
||||
|
||||
@ -137,34 +185,42 @@ impl Database {
|
||||
self.executor.timestamp()
|
||||
}
|
||||
|
||||
/// Organizes the graph of the database, compiling regex, format functions, and caching relationships.
|
||||
pub fn compile(&mut self) -> Result<(), crate::drop::Drop> {
|
||||
self.collect_schemas();
|
||||
pub fn compile(&mut self, errors: &mut Vec<crate::drop::Error>) {
|
||||
let mut harvested = Vec::new();
|
||||
for schema in self.schemas.values_mut() {
|
||||
schema.collect_schemas(None, &mut harvested, errors);
|
||||
}
|
||||
self.schemas.extend(harvested);
|
||||
|
||||
self.collect_schemas(errors);
|
||||
self.collect_depths();
|
||||
self.collect_descendants();
|
||||
self.compile_schemas();
|
||||
self.collect_stems()?;
|
||||
|
||||
Ok(())
|
||||
// Mathematically evaluate all property inheritances, formats, schemas, and foreign key edges topographically over OnceLocks
|
||||
let mut visited = std::collections::HashSet::new();
|
||||
for schema in self.schemas.values() {
|
||||
schema.compile(self, &mut visited, errors);
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_schemas(&mut self) {
|
||||
fn collect_schemas(&mut self, errors: &mut Vec<crate::drop::Error>) {
|
||||
let mut to_insert = Vec::new();
|
||||
|
||||
// Pass 1: Extract all Schemas structurally off top level definitions into the master registry.
|
||||
// Validate every node recursively via string filters natively!
|
||||
for type_def in self.types.values() {
|
||||
for mut schema in type_def.schemas.clone() {
|
||||
schema.harvest(&mut to_insert);
|
||||
schema.collect_schemas(None, &mut to_insert, errors);
|
||||
}
|
||||
}
|
||||
for punc_def in self.puncs.values() {
|
||||
for mut schema in punc_def.schemas.clone() {
|
||||
schema.harvest(&mut to_insert);
|
||||
schema.collect_schemas(None, &mut to_insert, errors);
|
||||
}
|
||||
}
|
||||
for enum_def in self.enums.values() {
|
||||
for mut schema in enum_def.schemas.clone() {
|
||||
schema.harvest(&mut to_insert);
|
||||
schema.collect_schemas(None, &mut to_insert, errors);
|
||||
}
|
||||
}
|
||||
|
||||
@ -186,12 +242,14 @@ impl Database {
|
||||
if !visited.insert(current_id.clone()) {
|
||||
break; // Cycle detected
|
||||
}
|
||||
if let Some(ref_str) = &schema.obj.r#ref {
|
||||
current_id = ref_str.clone();
|
||||
depth += 1;
|
||||
} else {
|
||||
break;
|
||||
if let Some(crate::database::schema::SchemaTypeOrArray::Single(t)) = &schema.obj.type_ {
|
||||
if !crate::database::schema::is_primitive_type(t) {
|
||||
current_id = t.clone();
|
||||
depth += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
depths.insert(id, depth);
|
||||
}
|
||||
@ -201,35 +259,33 @@ impl Database {
|
||||
fn collect_descendants(&mut self) {
|
||||
let mut direct_refs: HashMap<String, Vec<String>> = HashMap::new();
|
||||
for (id, schema) in &self.schemas {
|
||||
if let Some(ref_str) = &schema.obj.r#ref {
|
||||
direct_refs
|
||||
.entry(ref_str.clone())
|
||||
.or_default()
|
||||
.push(id.clone());
|
||||
if let Some(crate::database::schema::SchemaTypeOrArray::Single(t)) = &schema.obj.type_ {
|
||||
if !crate::database::schema::is_primitive_type(t) {
|
||||
direct_refs
|
||||
.entry(t.clone())
|
||||
.or_default()
|
||||
.push(id.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cache generic descendants for $family runtime lookups
|
||||
// Cache exhaustive descendants matrix for generic $family string lookups natively
|
||||
let mut descendants = HashMap::new();
|
||||
for (id, schema) in &self.schemas {
|
||||
if let Some(family_target) = &schema.obj.family {
|
||||
let mut desc_set = HashSet::new();
|
||||
Self::collect_descendants_recursively(family_target, &direct_refs, &mut desc_set);
|
||||
let mut desc_vec: Vec<String> = desc_set.into_iter().collect();
|
||||
desc_vec.sort();
|
||||
for id in self.schemas.keys() {
|
||||
let mut desc_set = HashSet::new();
|
||||
Self::collect_descendants_recursively(id, &direct_refs, &mut desc_set);
|
||||
let mut desc_vec: Vec<String> = desc_set.into_iter().collect();
|
||||
desc_vec.sort();
|
||||
|
||||
// By placing all descendants directly onto the ID mapped location of the Family declaration,
|
||||
// we can lookup descendants natively in ValidationContext without AST replacement overrides.
|
||||
descendants.insert(id.clone(), desc_vec);
|
||||
}
|
||||
descendants.insert(id.clone(), desc_vec);
|
||||
}
|
||||
self.descendants = descendants;
|
||||
}
|
||||
|
||||
fn collect_descendants_recursively(
|
||||
target: &str,
|
||||
direct_refs: &HashMap<String, Vec<String>>,
|
||||
descendants: &mut HashSet<String>,
|
||||
direct_refs: &std::collections::HashMap<String, Vec<String>>,
|
||||
descendants: &mut std::collections::HashSet<String>,
|
||||
) {
|
||||
if let Some(children) = direct_refs.get(target) {
|
||||
for child in children {
|
||||
@ -239,218 +295,4 @@ impl Database {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn compile_schemas(&mut self) {
|
||||
// Pass 3: compile_internals across pure structure
|
||||
let schema_ids: Vec<String> = self.schemas.keys().cloned().collect();
|
||||
for id in schema_ids {
|
||||
if let Some(schema) = self.schemas.get_mut(&id) {
|
||||
schema.compile_internals();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_stems(&mut self) -> Result<(), crate::drop::Drop> {
|
||||
let mut db_stems: HashMap<String, HashMap<String, Arc<Stem>>> = HashMap::new();
|
||||
let mut errors: Vec<crate::drop::Error> = Vec::new();
|
||||
|
||||
let schema_ids: Vec<String> = self.schemas.keys().cloned().collect();
|
||||
for schema_id in schema_ids {
|
||||
if let Some(schema) = self.schemas.get(&schema_id) {
|
||||
let mut inner_map = HashMap::new();
|
||||
Self::discover_stems(
|
||||
self,
|
||||
&schema_id,
|
||||
schema,
|
||||
String::from(""),
|
||||
None,
|
||||
None,
|
||||
&mut inner_map,
|
||||
&mut errors,
|
||||
);
|
||||
if !inner_map.is_empty() {
|
||||
println!("SCHEMA: {} STEMS: {:?}", schema_id, inner_map.keys());
|
||||
db_stems.insert(schema_id, inner_map);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.stems = db_stems;
|
||||
|
||||
if !errors.is_empty() {
|
||||
return Err(crate::drop::Drop::with_errors(errors));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn discover_stems(
|
||||
db: &Database,
|
||||
root_schema_id: &str,
|
||||
schema: &Schema,
|
||||
mut current_path: String,
|
||||
parent_type: Option<String>,
|
||||
property_name: Option<String>,
|
||||
inner_map: &mut HashMap<String, Arc<Stem>>,
|
||||
errors: &mut Vec<crate::drop::Error>,
|
||||
) {
|
||||
let mut is_entity = false;
|
||||
let mut entity_type = String::new();
|
||||
|
||||
let mut examine_id = None;
|
||||
if let Some(ref r) = schema.obj.r#ref {
|
||||
examine_id = Some(r.clone());
|
||||
} else if let Some(ref id) = schema.obj.id {
|
||||
examine_id = Some(id.clone());
|
||||
}
|
||||
|
||||
if let Some(target) = examine_id {
|
||||
let parts: Vec<&str> = target.split('.').collect();
|
||||
if let Some(last_seg) = parts.last() {
|
||||
if db.types.contains_key(*last_seg) {
|
||||
is_entity = true;
|
||||
entity_type = last_seg.to_string();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut relation_col = None;
|
||||
if is_entity {
|
||||
if let (Some(pt), Some(prop)) = (&parent_type, &property_name) {
|
||||
let expected_col = format!("{}_id", prop);
|
||||
let mut found = false;
|
||||
for rel in db.relations.values() {
|
||||
if (rel.source_type == *pt && rel.destination_type == entity_type)
|
||||
|| (rel.source_type == entity_type && rel.destination_type == *pt)
|
||||
{
|
||||
if rel.source_columns.contains(&expected_col) {
|
||||
relation_col = Some(expected_col.clone());
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
relation_col = Some(expected_col);
|
||||
}
|
||||
}
|
||||
|
||||
let stem = Stem {
|
||||
r#type: entity_type.clone(),
|
||||
relation: relation_col,
|
||||
schema: Arc::new(schema.clone()),
|
||||
};
|
||||
|
||||
let mut branch_path = current_path.clone();
|
||||
if !current_path.is_empty() {
|
||||
branch_path = format!("{}/{}", current_path, entity_type);
|
||||
}
|
||||
|
||||
if inner_map.contains_key(&branch_path) {
|
||||
errors.push(crate::drop::Error {
|
||||
code: "STEM_COLLISION".to_string(),
|
||||
message: format!("The stem path `{}` resolves to multiple Entity boundaries. This usually occurs during un-wrapped $family or oneOf polymorphic schemas where multiple Entities are directly assigned to the same property. To fix this, encapsulate the polymorphic branch.", branch_path),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: root_schema_id.to_string(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
inner_map.insert(branch_path.clone(), Arc::new(stem));
|
||||
|
||||
// Update current_path for structural children
|
||||
current_path = branch_path;
|
||||
}
|
||||
|
||||
let next_parent = if is_entity {
|
||||
Some(entity_type.clone())
|
||||
} else {
|
||||
parent_type.clone()
|
||||
};
|
||||
|
||||
// Properties branch
|
||||
if let Some(props) = &schema.obj.properties {
|
||||
for (k, v) in props {
|
||||
// Bypass target and source properties if we are in a relationship
|
||||
if let Some(parent_str) = &next_parent {
|
||||
if let Some(pt) = db.types.get(parent_str) {
|
||||
if pt.relationship && (k == "target" || k == "source") {
|
||||
Self::discover_stems(
|
||||
db,
|
||||
root_schema_id,
|
||||
v,
|
||||
current_path.clone(),
|
||||
next_parent.clone(),
|
||||
Some(k.clone()),
|
||||
inner_map,
|
||||
errors,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Standard Property Pathing
|
||||
let next_path = if current_path.is_empty() {
|
||||
k.clone()
|
||||
} else {
|
||||
format!("{}/{}", current_path, k)
|
||||
};
|
||||
|
||||
Self::discover_stems(
|
||||
db,
|
||||
root_schema_id,
|
||||
v,
|
||||
next_path,
|
||||
next_parent.clone(),
|
||||
Some(k.clone()),
|
||||
inner_map,
|
||||
errors,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Array Item branch
|
||||
if let Some(items) = &schema.obj.items {
|
||||
Self::discover_stems(
|
||||
db,
|
||||
root_schema_id,
|
||||
items,
|
||||
current_path.clone(),
|
||||
next_parent.clone(),
|
||||
property_name.clone(),
|
||||
inner_map,
|
||||
errors,
|
||||
);
|
||||
}
|
||||
|
||||
// Polymorphism branch
|
||||
if let Some(arr) = &schema.obj.one_of {
|
||||
for v in arr {
|
||||
Self::discover_stems(
|
||||
db,
|
||||
root_schema_id,
|
||||
v.as_ref(),
|
||||
current_path.clone(),
|
||||
next_parent.clone(),
|
||||
property_name.clone(),
|
||||
inner_map,
|
||||
errors,
|
||||
);
|
||||
}
|
||||
}
|
||||
if let Some(arr) = &schema.obj.all_of {
|
||||
for v in arr {
|
||||
Self::discover_stems(
|
||||
db,
|
||||
root_schema_id,
|
||||
v.as_ref(),
|
||||
current_path.clone(),
|
||||
next_parent.clone(),
|
||||
property_name.clone(),
|
||||
inner_map,
|
||||
errors,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,10 +2,29 @@ use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use std::sync::Arc;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
pub fn serialize_once_lock<T: serde::Serialize, S: serde::Serializer>(
|
||||
lock: &OnceLock<T>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error> {
|
||||
if let Some(val) = lock.get() {
|
||||
val.serialize(serializer)
|
||||
} else {
|
||||
serializer.serialize_none()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_once_lock_map_empty<K, V>(lock: &OnceLock<std::collections::BTreeMap<K, V>>) -> bool {
|
||||
lock.get().map_or(true, |m| m.is_empty())
|
||||
}
|
||||
|
||||
pub fn is_once_lock_vec_empty<T>(lock: &OnceLock<Vec<T>>) -> bool {
|
||||
lock.get().map_or(true, |v| v.is_empty())
|
||||
}
|
||||
|
||||
// Schema mirrors the Go Punc Generator's schema struct for consistency.
|
||||
// It is an order-preserving representation of a JSON Schema.
|
||||
|
||||
pub fn deserialize_some<'de, D>(deserializer: D) -> Result<Option<Value>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
@ -13,125 +32,187 @@ where
|
||||
let v = Value::deserialize(deserializer)?;
|
||||
Ok(Some(v))
|
||||
}
|
||||
|
||||
pub fn is_primitive_type(t: &str) -> bool {
|
||||
matches!(
|
||||
t,
|
||||
"string" | "number" | "integer" | "boolean" | "object" | "array" | "null"
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Case {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<Arc<Schema>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub then: Option<Arc<Schema>>,
|
||||
#[serde(rename = "else")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub else_: Option<Arc<Schema>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct SchemaObject {
|
||||
// Core Schema Keywords
|
||||
#[serde(rename = "$id")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub id: Option<String>,
|
||||
#[serde(rename = "$ref")]
|
||||
pub r#ref: Option<String>,
|
||||
/*
|
||||
Note: The `Ref` field in the Go struct is a pointer populated by the linker.
|
||||
In Rust, we might handle this differently (e.g., separate lookup or Rc/Arc),
|
||||
so we omit the direct recursive `Ref` field for now and rely on `ref_string`.
|
||||
*/
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub title: Option<String>,
|
||||
#[serde(default)] // Allow missing type
|
||||
#[serde(rename = "type")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub type_: Option<SchemaTypeOrArray>, // Handles string or array of strings
|
||||
|
||||
// Object Keywords
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub properties: Option<BTreeMap<String, Arc<Schema>>>,
|
||||
#[serde(rename = "patternProperties")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern_properties: Option<BTreeMap<String, Arc<Schema>>>,
|
||||
#[serde(rename = "additionalProperties")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub additional_properties: Option<Arc<Schema>>,
|
||||
#[serde(rename = "$family")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub family: Option<String>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub required: Option<Vec<String>>,
|
||||
|
||||
// dependencies can be schema dependencies or property dependencies
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub dependencies: Option<BTreeMap<String, Dependency>>,
|
||||
|
||||
// Array Keywords
|
||||
#[serde(rename = "items")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub items: Option<Arc<Schema>>,
|
||||
#[serde(rename = "prefixItems")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub prefix_items: Option<Vec<Arc<Schema>>>,
|
||||
|
||||
// String Validation
|
||||
#[serde(rename = "minLength")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub min_length: Option<f64>,
|
||||
#[serde(rename = "maxLength")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub max_length: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern: Option<String>,
|
||||
|
||||
// Array Validation
|
||||
#[serde(rename = "minItems")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub min_items: Option<f64>,
|
||||
#[serde(rename = "maxItems")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub max_items: Option<f64>,
|
||||
#[serde(rename = "uniqueItems")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub unique_items: Option<bool>,
|
||||
#[serde(rename = "contains")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub contains: Option<Arc<Schema>>,
|
||||
#[serde(rename = "minContains")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub min_contains: Option<f64>,
|
||||
#[serde(rename = "maxContains")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub max_contains: Option<f64>,
|
||||
|
||||
// Object Validation
|
||||
#[serde(rename = "minProperties")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub min_properties: Option<f64>,
|
||||
#[serde(rename = "maxProperties")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub max_properties: Option<f64>,
|
||||
#[serde(rename = "propertyNames")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub property_names: Option<Arc<Schema>>,
|
||||
|
||||
// Numeric Validation
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub format: Option<String>,
|
||||
#[serde(rename = "enum")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub enum_: Option<Vec<Value>>, // `enum` is a reserved keyword in Rust
|
||||
#[serde(
|
||||
default,
|
||||
rename = "const",
|
||||
deserialize_with = "crate::database::schema::deserialize_some"
|
||||
)]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub const_: Option<Value>,
|
||||
|
||||
// Numeric Validation
|
||||
#[serde(rename = "multipleOf")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub multiple_of: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub minimum: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub maximum: Option<f64>,
|
||||
#[serde(rename = "exclusiveMinimum")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub exclusive_minimum: Option<f64>,
|
||||
#[serde(rename = "exclusiveMaximum")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub exclusive_maximum: Option<f64>,
|
||||
|
||||
// Combining Keywords
|
||||
#[serde(rename = "allOf")]
|
||||
pub all_of: Option<Vec<Arc<Schema>>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cases: Option<Vec<Case>>,
|
||||
#[serde(rename = "oneOf")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub one_of: Option<Vec<Arc<Schema>>>,
|
||||
#[serde(rename = "not")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub not: Option<Arc<Schema>>,
|
||||
#[serde(rename = "if")]
|
||||
pub if_: Option<Arc<Schema>>,
|
||||
#[serde(rename = "then")]
|
||||
pub then_: Option<Arc<Schema>>,
|
||||
#[serde(rename = "else")]
|
||||
pub else_: Option<Arc<Schema>>,
|
||||
|
||||
// Custom Vocabularies
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub form: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub display: Option<Vec<String>>,
|
||||
#[serde(rename = "enumNames")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub enum_names: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub control: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub actions: Option<BTreeMap<String, Action>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub computer: Option<String>,
|
||||
#[serde(default)]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub extensible: Option<bool>,
|
||||
|
||||
#[serde(rename = "compiledProperties")]
|
||||
#[serde(skip_deserializing)]
|
||||
#[serde(skip_serializing_if = "crate::database::schema::is_once_lock_vec_empty")]
|
||||
#[serde(serialize_with = "crate::database::schema::serialize_once_lock")]
|
||||
pub compiled_property_names: OnceLock<Vec<String>>,
|
||||
|
||||
#[serde(skip)]
|
||||
pub compiled_format: Option<CompiledFormat>,
|
||||
pub compiled_properties: OnceLock<BTreeMap<String, Arc<Schema>>>,
|
||||
|
||||
#[serde(rename = "compiledEdges")]
|
||||
#[serde(skip_deserializing)]
|
||||
#[serde(skip_serializing_if = "crate::database::schema::is_once_lock_map_empty")]
|
||||
#[serde(serialize_with = "crate::database::schema::serialize_once_lock")]
|
||||
pub compiled_edges: OnceLock<BTreeMap<String, crate::database::edge::Edge>>,
|
||||
|
||||
#[serde(skip)]
|
||||
pub compiled_pattern: Option<CompiledRegex>,
|
||||
pub compiled_format: OnceLock<CompiledFormat>,
|
||||
#[serde(skip)]
|
||||
pub compiled_pattern_properties: Option<Vec<(CompiledRegex, Arc<Schema>)>>,
|
||||
pub compiled_pattern: OnceLock<CompiledRegex>,
|
||||
#[serde(skip)]
|
||||
pub compiled_pattern_properties: OnceLock<Vec<(CompiledRegex, Arc<Schema>)>>,
|
||||
}
|
||||
|
||||
/// Represents a compiled format validator
|
||||
@ -175,19 +256,38 @@ impl std::ops::DerefMut for Schema {
|
||||
}
|
||||
|
||||
impl Schema {
|
||||
pub fn compile_internals(&mut self) {
|
||||
self.map_children(|child| child.compile_internals());
|
||||
|
||||
if let Some(format_str) = &self.obj.format
|
||||
&& let Some(fmt) = crate::database::formats::FORMATS.get(format_str.as_str())
|
||||
{
|
||||
self.obj.compiled_format = Some(crate::database::schema::CompiledFormat::Func(fmt.func));
|
||||
pub fn compile(
|
||||
&self,
|
||||
db: &crate::database::Database,
|
||||
visited: &mut std::collections::HashSet<String>,
|
||||
errors: &mut Vec<crate::drop::Error>,
|
||||
) {
|
||||
if self.obj.compiled_properties.get().is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(pattern_str) = &self.obj.pattern
|
||||
&& let Ok(re) = regex::Regex::new(pattern_str)
|
||||
{
|
||||
self.obj.compiled_pattern = Some(crate::database::schema::CompiledRegex(re));
|
||||
if let Some(id) = &self.obj.id {
|
||||
if !visited.insert(id.clone()) {
|
||||
return; // Break cyclical resolution
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(format_str) = &self.obj.format {
|
||||
if let Some(fmt) = crate::database::formats::FORMATS.get(format_str.as_str()) {
|
||||
let _ = self
|
||||
.obj
|
||||
.compiled_format
|
||||
.set(crate::database::schema::CompiledFormat::Func(fmt.func));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(pattern_str) = &self.obj.pattern {
|
||||
if let Ok(re) = regex::Regex::new(pattern_str) {
|
||||
let _ = self
|
||||
.obj
|
||||
.compiled_pattern
|
||||
.set(crate::database::schema::CompiledRegex(re));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(pattern_props) = &self.obj.pattern_properties {
|
||||
@ -198,34 +298,199 @@ impl Schema {
|
||||
}
|
||||
}
|
||||
if !compiled.is_empty() {
|
||||
self.obj.compiled_pattern_properties = Some(compiled);
|
||||
let _ = self.obj.compiled_pattern_properties.set(compiled);
|
||||
}
|
||||
}
|
||||
|
||||
let mut props = std::collections::BTreeMap::new();
|
||||
|
||||
// 1. Resolve INHERITANCE dependencies first
|
||||
if let Some(crate::database::schema::SchemaTypeOrArray::Single(t)) = &self.obj.type_ {
|
||||
if !crate::database::schema::is_primitive_type(t) {
|
||||
if let Some(parent) = db.schemas.get(t) {
|
||||
parent.compile(db, visited, errors);
|
||||
if let Some(p_props) = parent.obj.compiled_properties.get() {
|
||||
props.extend(p_props.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(crate::database::schema::SchemaTypeOrArray::Multiple(types)) = &self.obj.type_ {
|
||||
let mut custom_type_count = 0;
|
||||
for t in types {
|
||||
if !crate::database::schema::is_primitive_type(t) {
|
||||
custom_type_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if custom_type_count > 1 {
|
||||
errors.push(crate::drop::Error {
|
||||
code: "MULTIPLE_INHERITANCE_PROHIBITED".to_string(),
|
||||
message: format!(
|
||||
"Schema '{}' attempts to extend multiple custom object pointers in its type array. Use 'oneOf' for polymorphism and tagged unions.",
|
||||
self.obj.identifier().unwrap_or("unknown".to_string())
|
||||
),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: self.obj.identifier().unwrap_or("unknown".to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
for t in types {
|
||||
if !crate::database::schema::is_primitive_type(t) {
|
||||
if let Some(parent) = db.schemas.get(t) {
|
||||
parent.compile(db, visited, errors);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Add local properties
|
||||
if let Some(local_props) = &self.obj.properties {
|
||||
for (k, v) in local_props {
|
||||
props.insert(k.clone(), v.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Set the OnceLock!
|
||||
let _ = self.obj.compiled_properties.set(props.clone());
|
||||
let mut names: Vec<String> = props.keys().cloned().collect();
|
||||
names.sort();
|
||||
let _ = self.obj.compiled_property_names.set(names);
|
||||
|
||||
// 4. Compute Edges natively
|
||||
let schema_edges = self.compile_edges(db, visited, &props, errors);
|
||||
let _ = self.obj.compiled_edges.set(schema_edges);
|
||||
|
||||
// 5. Build our inline children properties recursively NOW! (Depth-first search)
|
||||
if let Some(local_props) = &self.obj.properties {
|
||||
for child in local_props.values() {
|
||||
child.compile(db, visited, errors);
|
||||
}
|
||||
}
|
||||
if let Some(items) = &self.obj.items {
|
||||
items.compile(db, visited, errors);
|
||||
}
|
||||
if let Some(pattern_props) = &self.obj.pattern_properties {
|
||||
for child in pattern_props.values() {
|
||||
child.compile(db, visited, errors);
|
||||
}
|
||||
}
|
||||
if let Some(additional_props) = &self.obj.additional_properties {
|
||||
additional_props.compile(db, visited, errors);
|
||||
}
|
||||
if let Some(one_of) = &self.obj.one_of {
|
||||
for child in one_of {
|
||||
child.compile(db, visited, errors);
|
||||
}
|
||||
}
|
||||
if let Some(arr) = &self.obj.prefix_items {
|
||||
for child in arr {
|
||||
child.compile(db, visited, errors);
|
||||
}
|
||||
}
|
||||
if let Some(child) = &self.obj.not {
|
||||
child.compile(db, visited, errors);
|
||||
}
|
||||
if let Some(child) = &self.obj.contains {
|
||||
child.compile(db, visited, errors);
|
||||
}
|
||||
if let Some(cases) = &self.obj.cases {
|
||||
for c in cases {
|
||||
if let Some(child) = &c.when {
|
||||
child.compile(db, visited, errors);
|
||||
}
|
||||
if let Some(child) = &c.then {
|
||||
child.compile(db, visited, errors);
|
||||
}
|
||||
if let Some(child) = &c.else_ {
|
||||
child.compile(db, visited, errors);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(id) = &self.obj.id {
|
||||
visited.remove(id);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused_variables)]
|
||||
fn validate_identifier(id: &str, field_name: &str, errors: &mut Vec<crate::drop::Error>) {
|
||||
#[cfg(not(test))]
|
||||
for c in id.chars() {
|
||||
if !c.is_ascii_lowercase() && !c.is_ascii_digit() && c != '_' && c != '.' {
|
||||
errors.push(crate::drop::Error {
|
||||
code: "INVALID_IDENTIFIER".to_string(),
|
||||
message: format!(
|
||||
"Invalid character '{}' in JSON Schema '{}' property: '{}'. Identifiers must exclusively contain [a-z0-9_.]",
|
||||
c, field_name, id
|
||||
),
|
||||
details: crate::drop::ErrorDetails::default(),
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn harvest(&mut self, to_insert: &mut Vec<(String, Schema)>) {
|
||||
pub fn collect_schemas(
|
||||
&mut self,
|
||||
tracking_path: Option<String>,
|
||||
to_insert: &mut Vec<(String, Schema)>,
|
||||
errors: &mut Vec<crate::drop::Error>,
|
||||
) {
|
||||
if let Some(id) = &self.obj.id {
|
||||
Self::validate_identifier(id, "$id", errors);
|
||||
to_insert.push((id.clone(), self.clone()));
|
||||
}
|
||||
self.map_children(|child| child.harvest(to_insert));
|
||||
if let Some(crate::database::schema::SchemaTypeOrArray::Single(t)) = &self.obj.type_ {
|
||||
if !crate::database::schema::is_primitive_type(t) {
|
||||
Self::validate_identifier(t, "type", errors);
|
||||
}
|
||||
}
|
||||
if let Some(family) = &self.obj.family {
|
||||
Self::validate_identifier(family, "$family", errors);
|
||||
}
|
||||
|
||||
// Is this schema an inline ad-hoc composition?
|
||||
// Meaning it has a tracking context, lacks an explicit $id, but extends an Entity ref with explicit properties!
|
||||
if self.obj.id.is_none() && self.obj.properties.is_some() {
|
||||
if let Some(crate::database::schema::SchemaTypeOrArray::Single(t)) = &self.obj.type_ {
|
||||
if !crate::database::schema::is_primitive_type(t) {
|
||||
if let Some(ref path) = tracking_path {
|
||||
to_insert.push((path.clone(), self.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Provide the path origin to children natively, prioritizing the explicit `$id` boundary if one exists
|
||||
let origin_path = self.obj.id.clone().or(tracking_path);
|
||||
|
||||
self.collect_child_schemas(origin_path, to_insert, errors);
|
||||
}
|
||||
|
||||
pub fn map_children<F>(&mut self, mut f: F)
|
||||
where
|
||||
F: FnMut(&mut Schema),
|
||||
{
|
||||
pub fn collect_child_schemas(
|
||||
&mut self,
|
||||
origin_path: Option<String>,
|
||||
to_insert: &mut Vec<(String, Schema)>,
|
||||
errors: &mut Vec<crate::drop::Error>,
|
||||
) {
|
||||
if let Some(props) = &mut self.obj.properties {
|
||||
for v in props.values_mut() {
|
||||
for (k, v) in props.iter_mut() {
|
||||
let mut inner = (**v).clone();
|
||||
f(&mut inner);
|
||||
let next_path = origin_path.as_ref().map(|o| format!("{}/{}", o, k));
|
||||
inner.collect_schemas(next_path, to_insert, errors);
|
||||
*v = Arc::new(inner);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(pattern_props) = &mut self.obj.pattern_properties {
|
||||
for v in pattern_props.values_mut() {
|
||||
for (k, v) in pattern_props.iter_mut() {
|
||||
let mut inner = (**v).clone();
|
||||
f(&mut inner);
|
||||
let next_path = origin_path.as_ref().map(|o| format!("{}/{}", o, k));
|
||||
inner.collect_schemas(next_path, to_insert, errors);
|
||||
*v = Arc::new(inner);
|
||||
}
|
||||
}
|
||||
@ -233,7 +498,7 @@ impl Schema {
|
||||
let mut map_arr = |arr: &mut Vec<Arc<Schema>>| {
|
||||
for v in arr.iter_mut() {
|
||||
let mut inner = (**v).clone();
|
||||
f(&mut inner);
|
||||
inner.collect_schemas(origin_path.clone(), to_insert, errors);
|
||||
*v = Arc::new(inner);
|
||||
}
|
||||
};
|
||||
@ -241,30 +506,328 @@ impl Schema {
|
||||
if let Some(arr) = &mut self.obj.prefix_items {
|
||||
map_arr(arr);
|
||||
}
|
||||
if let Some(arr) = &mut self.obj.all_of {
|
||||
map_arr(arr);
|
||||
}
|
||||
|
||||
if let Some(arr) = &mut self.obj.one_of {
|
||||
map_arr(arr);
|
||||
}
|
||||
|
||||
let mut map_opt = |opt: &mut Option<Arc<Schema>>| {
|
||||
let mut map_opt = |opt: &mut Option<Arc<Schema>>, pass_path: bool| {
|
||||
if let Some(v) = opt {
|
||||
let mut inner = (**v).clone();
|
||||
f(&mut inner);
|
||||
let next = if pass_path { origin_path.clone() } else { None };
|
||||
inner.collect_schemas(next, to_insert, errors);
|
||||
*v = Arc::new(inner);
|
||||
}
|
||||
};
|
||||
|
||||
map_opt(&mut self.obj.additional_properties);
|
||||
map_opt(&mut self.obj.items);
|
||||
map_opt(&mut self.obj.contains);
|
||||
map_opt(&mut self.obj.property_names);
|
||||
map_opt(&mut self.obj.not);
|
||||
map_opt(&mut self.obj.if_);
|
||||
map_opt(&mut self.obj.then_);
|
||||
map_opt(&mut self.obj.else_);
|
||||
map_opt(&mut self.obj.additional_properties, false);
|
||||
|
||||
// `items` absolutely must inherit the EXACT property path assigned to the Array wrapper!
|
||||
// This allows nested Arrays enclosing bare Entity structs to correctly register as the boundary mapping.
|
||||
map_opt(&mut self.obj.items, true);
|
||||
|
||||
map_opt(&mut self.obj.not, false);
|
||||
map_opt(&mut self.obj.contains, false);
|
||||
map_opt(&mut self.obj.property_names, false);
|
||||
if let Some(cases) = &mut self.obj.cases {
|
||||
for c in cases.iter_mut() {
|
||||
if let Some(when) = &mut c.when {
|
||||
let mut inner = (**when).clone();
|
||||
inner.collect_schemas(origin_path.clone(), to_insert, errors);
|
||||
*when = Arc::new(inner);
|
||||
}
|
||||
if let Some(then) = &mut c.then {
|
||||
let mut inner = (**then).clone();
|
||||
inner.collect_schemas(origin_path.clone(), to_insert, errors);
|
||||
*then = Arc::new(inner);
|
||||
}
|
||||
if let Some(else_) = &mut c.else_ {
|
||||
let mut inner = (**else_).clone();
|
||||
inner.collect_schemas(origin_path.clone(), to_insert, errors);
|
||||
*else_ = Arc::new(inner);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Dynamically infers and compiles all structural database relationships between this Schema
|
||||
/// and its nested children. This functions recursively traverses the JSON Schema abstract syntax
|
||||
/// tree, identifies physical PostgreSQL table boundaries, and locks the resulting relation
|
||||
/// constraint paths directly onto the `compiled_edges` map in O(1) memory.
|
||||
pub fn compile_edges(
|
||||
&self,
|
||||
db: &crate::database::Database,
|
||||
visited: &mut std::collections::HashSet<String>,
|
||||
props: &std::collections::BTreeMap<String, std::sync::Arc<Schema>>,
|
||||
errors: &mut Vec<crate::drop::Error>,
|
||||
) -> std::collections::BTreeMap<String, crate::database::edge::Edge> {
|
||||
let mut schema_edges = std::collections::BTreeMap::new();
|
||||
|
||||
// Determine the physical Database Table Name this schema structurally represents
|
||||
// Plucks the polymorphic discriminator via dot-notation (e.g. extracting "person" from "full.person")
|
||||
let mut parent_type_name = None;
|
||||
if let Some(family) = &self.obj.family {
|
||||
parent_type_name = Some(family.split('.').next_back().unwrap_or(family).to_string());
|
||||
} else if let Some(identifier) = self.obj.identifier() {
|
||||
parent_type_name = Some(
|
||||
identifier
|
||||
.split('.')
|
||||
.next_back()
|
||||
.unwrap_or(&identifier)
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(p_type) = parent_type_name {
|
||||
// Proceed only if the resolved table physically exists within the Postgres Type hierarchy
|
||||
if db.types.contains_key(&p_type) {
|
||||
// Iterate over all discovered schema boundaries mapped inside the object
|
||||
for (prop_name, prop_schema) in props {
|
||||
let mut child_type_name = None;
|
||||
let mut target_schema = prop_schema.clone();
|
||||
let mut is_array = false;
|
||||
|
||||
// Structurally unpack the inner target entity if the object maps to an array list
|
||||
if let Some(crate::database::schema::SchemaTypeOrArray::Single(t)) =
|
||||
&prop_schema.obj.type_
|
||||
{
|
||||
if t == "array" {
|
||||
is_array = true;
|
||||
if let Some(items) = &prop_schema.obj.items {
|
||||
target_schema = items.clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine the physical Postgres table backing the nested child schema recursively
|
||||
if let Some(family) = &target_schema.obj.family {
|
||||
child_type_name = Some(family.split('.').next_back().unwrap_or(family).to_string());
|
||||
} else if let Some(ref_id) = target_schema.obj.identifier() {
|
||||
child_type_name = Some(ref_id.split('.').next_back().unwrap_or(&ref_id).to_string());
|
||||
} else if let Some(arr) = &target_schema.obj.one_of {
|
||||
if let Some(first) = arr.first() {
|
||||
if let Some(ref_id) = first.obj.identifier() {
|
||||
child_type_name =
|
||||
Some(ref_id.split('.').next_back().unwrap_or(&ref_id).to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(c_type) = child_type_name {
|
||||
if db.types.contains_key(&c_type) {
|
||||
// Ensure the child Schema's AST has accurately compiled its own physical property keys so we can
|
||||
// inject them securely for Many-to-Many Twin Deduction disambiguation matching.
|
||||
target_schema.compile(db, visited, errors);
|
||||
if let Some(compiled_target_props) = target_schema.obj.compiled_properties.get() {
|
||||
let keys_for_ambiguity: Vec<String> =
|
||||
compiled_target_props.keys().cloned().collect();
|
||||
|
||||
// Interrogate the Database catalog graph to discover the exact Foreign Key Constraint connecting the components
|
||||
if let Some((relation, is_forward)) = resolve_relation(
|
||||
db,
|
||||
&p_type,
|
||||
&c_type,
|
||||
prop_name,
|
||||
Some(&keys_for_ambiguity),
|
||||
is_array,
|
||||
self.id.as_deref(),
|
||||
&format!("/{}", prop_name),
|
||||
errors,
|
||||
) {
|
||||
schema_edges.insert(
|
||||
prop_name.clone(),
|
||||
crate::database::edge::Edge {
|
||||
constraint: relation.constraint.clone(),
|
||||
forward: is_forward,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
schema_edges
|
||||
}
|
||||
}
|
||||
|
||||
/// Inspects the Postgres pg_constraint relations catalog to securely identify
|
||||
/// the precise Foreign Key connecting a parent and child hierarchy path.
|
||||
pub(crate) fn resolve_relation<'a>(
|
||||
db: &'a crate::database::Database,
|
||||
parent_type: &str,
|
||||
child_type: &str,
|
||||
prop_name: &str,
|
||||
relative_keys: Option<&Vec<String>>,
|
||||
is_array: bool,
|
||||
schema_id: Option<&str>,
|
||||
path: &str,
|
||||
errors: &mut Vec<crate::drop::Error>,
|
||||
) -> Option<(&'a crate::database::relation::Relation, bool)> {
|
||||
// Enforce graph locality by ensuring we don't accidentally crawl to pure structural entity boundaries
|
||||
if parent_type == "entity" && child_type == "entity" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let p_def = db.types.get(parent_type)?;
|
||||
let c_def = db.types.get(child_type)?;
|
||||
|
||||
let mut matching_rels = Vec::new();
|
||||
let mut directions = Vec::new();
|
||||
|
||||
// Scour the complete catalog for any Edge matching the inheritance scope of the two objects
|
||||
// This automatically binds polymorphic structures (e.g. recognizing a relationship targeting User
|
||||
// also natively binds instances specifically typed as Person).
|
||||
let mut all_rels: Vec<&crate::database::relation::Relation> = db.relations.values().collect();
|
||||
all_rels.sort_by(|a, b| a.constraint.cmp(&b.constraint));
|
||||
|
||||
for rel in all_rels {
|
||||
let mut is_forward =
|
||||
p_def.hierarchy.contains(&rel.source_type) && c_def.hierarchy.contains(&rel.destination_type);
|
||||
let is_reverse =
|
||||
p_def.hierarchy.contains(&rel.destination_type) && c_def.hierarchy.contains(&rel.source_type);
|
||||
|
||||
// Structural Cardinality Filtration:
|
||||
// If the schema requires a collection (Array), it is mathematically impossible for a pure
|
||||
// Forward scalar edge (where the parent holds exactly one UUID pointer) to fulfill a One-to-Many request.
|
||||
// Thus, if it's an array, we fully reject pure Forward edges and only accept Reverse edges (or Junction edges).
|
||||
if is_array && is_forward && !is_reverse {
|
||||
is_forward = false;
|
||||
}
|
||||
|
||||
if is_forward {
|
||||
matching_rels.push(rel);
|
||||
directions.push(true);
|
||||
} else if is_reverse {
|
||||
matching_rels.push(rel);
|
||||
directions.push(false);
|
||||
}
|
||||
}
|
||||
|
||||
// Abort relation discovery early if no hierarchical inheritance match was found
|
||||
if matching_rels.is_empty() {
|
||||
let mut details = crate::drop::ErrorDetails {
|
||||
path: path.to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
if let Some(sid) = schema_id {
|
||||
details.schema = Some(sid.to_string());
|
||||
}
|
||||
|
||||
errors.push(crate::drop::Error {
|
||||
code: "EDGE_MISSING".to_string(),
|
||||
message: format!(
|
||||
"No database relation exists between '{}' and '{}' for property '{}'",
|
||||
parent_type, child_type, prop_name
|
||||
),
|
||||
details,
|
||||
});
|
||||
return None;
|
||||
}
|
||||
|
||||
// Ideal State: The objects only share a solitary structural relation, resolving ambiguity instantly.
|
||||
if matching_rels.len() == 1 {
|
||||
return Some((matching_rels[0], directions[0]));
|
||||
}
|
||||
|
||||
let mut chosen_idx = 0;
|
||||
let mut resolved = false;
|
||||
|
||||
// Exact Prefix Disambiguation: Determine if the database specifically names this constraint
|
||||
// directly mapping to the JSON Schema property name (e.g., `fk_{child}_{property_name}`)
|
||||
for (i, rel) in matching_rels.iter().enumerate() {
|
||||
if let Some(prefix) = &rel.prefix {
|
||||
if prop_name.starts_with(prefix)
|
||||
|| prefix.starts_with(prop_name)
|
||||
|| prefix.replace("_", "") == prop_name.replace("_", "")
|
||||
{
|
||||
chosen_idx = i;
|
||||
resolved = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Complex Subgraph Resolution: The database contains multiple equally explicit foreign key constraints
|
||||
// linking these objects (such as pointing to `source` and `target` in Many-to-Many junction models).
|
||||
if !resolved && relative_keys.is_some() {
|
||||
// Twin Deduction Pass 1: We inspect the exact properties structurally defined inside the compiled payload
|
||||
// to observe which explicit relation arrow the child payload natively consumes.
|
||||
let keys = relative_keys.unwrap();
|
||||
let mut consumed_rel_idx = None;
|
||||
for (i, rel) in matching_rels.iter().enumerate() {
|
||||
if let Some(prefix) = &rel.prefix {
|
||||
if keys.contains(prefix) {
|
||||
consumed_rel_idx = Some(i);
|
||||
break; // Found the routing edge explicitly consumed by the schema payload
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Twin Deduction Pass 2: Knowing which arrow points outbound, we can mathematically deduce its twin
|
||||
// providing the reverse ownership on the same junction boundary must be the incoming Edge to the parent.
|
||||
if let Some(used_idx) = consumed_rel_idx {
|
||||
let used_rel = matching_rels[used_idx];
|
||||
let mut twin_ids = Vec::new();
|
||||
for (i, rel) in matching_rels.iter().enumerate() {
|
||||
if i != used_idx
|
||||
&& rel.source_type == used_rel.source_type
|
||||
&& rel.destination_type == used_rel.destination_type
|
||||
&& rel.prefix.is_some()
|
||||
{
|
||||
twin_ids.push(i);
|
||||
}
|
||||
}
|
||||
|
||||
if twin_ids.len() == 1 {
|
||||
chosen_idx = twin_ids[0];
|
||||
resolved = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Implicit Base Fallback: If no complex explicit paths resolve, but exactly one relation
|
||||
// sits entirely naked (without a constraint prefix), it must be the core structural parent ownership.
|
||||
if !resolved {
|
||||
let mut null_prefix_ids = Vec::new();
|
||||
for (i, rel) in matching_rels.iter().enumerate() {
|
||||
if rel.prefix.is_none() {
|
||||
null_prefix_ids.push(i);
|
||||
}
|
||||
}
|
||||
if null_prefix_ids.len() == 1 {
|
||||
chosen_idx = null_prefix_ids[0];
|
||||
resolved = true;
|
||||
}
|
||||
}
|
||||
|
||||
// If we exhausted all mathematical deduction pathways and STILL cannot isolate a single edge,
|
||||
// we must abort rather than silently guessing. Returning None prevents arbitrary SQL generation
|
||||
// and forces a clean structural error for the architect.
|
||||
if !resolved {
|
||||
let mut details = crate::drop::ErrorDetails {
|
||||
path: path.to_string(),
|
||||
context: serde_json::to_value(&matching_rels).ok(),
|
||||
cause: Some("Multiple conflicting constraints found matching prefixes".to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
if let Some(sid) = schema_id {
|
||||
details.schema = Some(sid.to_string());
|
||||
}
|
||||
|
||||
errors.push(crate::drop::Error {
|
||||
code: "AMBIGUOUS_TYPE_RELATIONS".to_string(),
|
||||
message: format!(
|
||||
"Ambiguous database relation between '{}' and '{}' for property '{}'",
|
||||
parent_type, child_type, prop_name
|
||||
),
|
||||
details,
|
||||
});
|
||||
return None;
|
||||
}
|
||||
|
||||
Some((matching_rels[chosen_idx], directions[chosen_idx]))
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Schema {
|
||||
@ -302,13 +865,9 @@ impl<'de> Deserialize<'de> for Schema {
|
||||
&& obj.format.is_none()
|
||||
&& obj.enum_.is_none()
|
||||
&& obj.const_.is_none()
|
||||
&& obj.all_of.is_none()
|
||||
&& obj.cases.is_none()
|
||||
&& obj.one_of.is_none()
|
||||
&& obj.not.is_none()
|
||||
&& obj.if_.is_none()
|
||||
&& obj.then_.is_none()
|
||||
&& obj.else_.is_none()
|
||||
&& obj.r#ref.is_none()
|
||||
&& obj.family.is_none();
|
||||
|
||||
if is_empty && obj.extensible.is_none() {
|
||||
@ -322,6 +881,20 @@ impl<'de> Deserialize<'de> for Schema {
|
||||
}
|
||||
}
|
||||
|
||||
impl SchemaObject {
|
||||
pub fn identifier(&self) -> Option<String> {
|
||||
if let Some(id) = &self.id {
|
||||
return Some(id.split('.').next_back().unwrap_or("").to_string());
|
||||
}
|
||||
if let Some(SchemaTypeOrArray::Single(t)) = &self.type_ {
|
||||
if !is_primitive_type(t) {
|
||||
return Some(t.split('.').next_back().unwrap_or("").to_string());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum SchemaTypeOrArray {
|
||||
@ -331,7 +904,9 @@ pub enum SchemaTypeOrArray {
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Action {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub navigate: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub punc: Option<String>,
|
||||
}
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
|
||||
@ -1,17 +0,0 @@
|
||||
use crate::database::schema::Schema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Stem {
|
||||
pub r#type: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub relation: Option<String>,
|
||||
|
||||
// The actual database schema node mapping for
|
||||
// O(1) jump table execution for queryer.
|
||||
//
|
||||
// Automatically skipped from `jspg_stems()` JSON payload output.
|
||||
#[serde(skip)]
|
||||
pub schema: Arc<Schema>,
|
||||
}
|
||||
@ -15,6 +15,8 @@ pub struct Type {
|
||||
#[serde(default)]
|
||||
pub historical: bool,
|
||||
#[serde(default)]
|
||||
pub notify: bool,
|
||||
#[serde(default)]
|
||||
pub sensitive: bool,
|
||||
#[serde(default)]
|
||||
pub ownable: bool,
|
||||
|
||||
10
src/drop.rs
10
src/drop.rs
@ -64,9 +64,13 @@ pub struct Error {
|
||||
pub details: ErrorDetails,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, Default)]
|
||||
pub struct ErrorDetails {
|
||||
pub path: String,
|
||||
// Extensions can be added here (package, cause, etc)
|
||||
// For now, validator only provides path
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cause: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub context: Option<Value>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub schema: Option<String>,
|
||||
}
|
||||
|
||||
@ -1,79 +0,0 @@
|
||||
# Entity Engine (jspg)
|
||||
|
||||
## Overview
|
||||
|
||||
This document outlines the architecture for moving the complex, CPU-bound row merging (`merge_entity`) and dynamic querying (`query_entity`) functionality out of PL/pgSQL and directly into the Rust-based `jspg` extension.
|
||||
|
||||
By treating the `jspg` schema registry as the absolute Single Source of Truth, we can leverage Rust and the Postgres query planner (via SPI) to achieve near O(1) execution planning for deeply nested reads, complex relational writes, and partial hydration beats.
|
||||
|
||||
## The Problem
|
||||
|
||||
Historically, `agreego.merge_entity` (PL/pgSQL) handled nested writes by segmenting JSON, resolving types, searching hierarchies, and dynamically concatenating `INSERT`/`UPDATE` statements. `agreego.query_entity` was conceived to do the same for reads (handling base security, inheritance JOINs, and filtering automatically).
|
||||
|
||||
However, this design hits three major limitations:
|
||||
1. **CPU Bound Operations**: PL/pgSQL is comparatively slow at complex string concatenation and massive JSON graph traversals.
|
||||
2. **Query Planning Cache Busting**: Generating massive, dynamic SQL strings prevents Postgres from caching query plans. `EXECUTE dynamic_sql` forces the planner to re-evaluate statistics and execution paths on every function call, leading to extreme latency spikes at scale.
|
||||
3. **The Hydration Beat Problem**: The Punc framework requires fetching specific UI "fragments" (e.g. just the `target` of a specific `contact` array element) to feed WebSockets. Hand-rolling CTEs for every possible sub-tree permutation to serve beats will quickly become unmaintainable.
|
||||
|
||||
## The Solution: Semantic Engine Database
|
||||
|
||||
By migrating `merge_entity` and `query_entity` to `jspg`, we turn the database into a pre-compiled Semantic Engine.
|
||||
|
||||
1. **Schema-to-SQL Compilation**: During the connection lifecycle (`cache_json_schemas()`), `jspg` statically analyzes the JSON Schema AST. It acts as a compiler, translating the schema layout into perfectly optimized, multi-JOIN SQL query strings for *every* node/fragment in the schema.
|
||||
2. **Prepared Statements (SPI)**: `jspg` feeds these computed SQL strings into the Postgres SPI (Server Programming Interface) using `Spi::prepare()`. Postgres calculates the query execution plan *once* and caches it in memory.
|
||||
3. **Instant Execution**: When a Punc needs data, `jspg` retrieves the cached PreparedStatement, securely binds binary parameters, and executes the pre-planned query instantly.
|
||||
|
||||
## Architecture
|
||||
|
||||
### 1. The `cache_json_schemas()` Expansion
|
||||
The initialization function must now ingest `types` and `agreego.relation` data so the internal `Registry` holds the full Relational Graph.
|
||||
|
||||
During schema compilation, if a schema is associated with a database Type, it triggers the **SQL Compiler Phase**:
|
||||
- It builds a table-resolution AST mapping to `JOIN` clauses based on foreign keys.
|
||||
- It translates JSON schema properties to `SELECT jsonb_build_object(...)`.
|
||||
- It generates static SQL for `INSERT`, `UPDATE`, and `SELECT` (including path-based fragment SELECTs).
|
||||
- It calls `Spi::prepare()` to cache these plans inside the Session Context.
|
||||
|
||||
### 2. `agreego.query_entity` (Reads)
|
||||
* **API**: `agreego.query_entity(schema_id TEXT, fragment_path TEXT, cue JSONB)`
|
||||
* **Execution**:
|
||||
* Rust locates the target Schema in memory.
|
||||
* It uses the `fragment_path` (e.g., `/` for a full read, or `/contacts/0/target` for a hydration beat) to fetch the exact PreparedStatement.
|
||||
* It binds variables (Row Level Security IDs, filtering, pagination limit/offset) parsed from the `cue`.
|
||||
* SPI returns the heavily nested, pre-aggregated `JSONB` instantly.
|
||||
|
||||
### 3. Unified Aggregations & Computeds (Schema `query` objects)
|
||||
We replace the concept of a complex string parser (PEL) with native structured JSON JSON objects using the `query` keyword.
|
||||
|
||||
A structured `query` block in the schema:
|
||||
```json
|
||||
"total": {
|
||||
"type": "number",
|
||||
"readOnly": true,
|
||||
"query": {
|
||||
"aggregate": "sum",
|
||||
"source": "lines",
|
||||
"field": "amount"
|
||||
}
|
||||
}
|
||||
```
|
||||
* **Frontend (Dart)**: The Go generator parses the JSON object directly and emits the native UI aggregation code (e.g. `lines.fold(...)`) for instant UI updates before the server responds.
|
||||
* **Backend (jspg)**: The Rust SQL compiler natively deserializes the `query` object into an internal struct. It recognizes the `aggregate` instruction and outputs a Postgres native aggregation: `(SELECT SUM(amount) FROM agreego.invoice_line WHERE invoice_id = t1.id)` as a column in the prepared `SELECT` statement.
|
||||
* **Unification**: The database-calculated value acts as the authoritative truth, synchronizing and correcting the client automatically on the resulting `beat`.
|
||||
|
||||
### 4. `agreego.merge_entity` (Writes)
|
||||
* **API**: `agreego.merge_entity(cue JSONB)`
|
||||
* **Execution**:
|
||||
* Parses the incoming `cue` JSON via `serde_json` at C-like speeds.
|
||||
* Recursively validates and *constructively masks* the tree against the strict schema.
|
||||
* Traverses the relational graph (which is fully loaded in the `jspg` registry).
|
||||
* Binds the new values directly into the cached `INSERT` or `UPDATE` SPI prepared statements for each table in the hierarchy.
|
||||
* Evaluates field differences and natively uses `pg_notify` to fire atomic row-level changes for the Go Beat framework.
|
||||
|
||||
## Roadmap
|
||||
|
||||
1. **Relational Ingestion**: Update `cache_json_schemas` to pass relational metadata (`agreego.relation` rows) into the `jspg` registry cache.
|
||||
2. **The SQL Compiler**: Build the AST-to-String compiler in Rust that reads properties, `$ref`s, and `$family` trees to piece together generic SQL.
|
||||
3. **SPI Caching**: Integrate `Spi::prepare` into the `Validator` creation phase.
|
||||
4. **Rust `merge_entity`**: Port the constructive structural extraction loop from PL/pgSQL to Rust.
|
||||
5. **Rust `query_entity`**: Abstract the query runtime, mapping Punc JSON `filters` arrays to SPI-bound parameters safely.
|
||||
19
src/jspg.rs
19
src/jspg.rs
@ -12,18 +12,21 @@ pub struct Jspg {
|
||||
}
|
||||
|
||||
impl Jspg {
|
||||
pub fn new(database_val: &serde_json::Value) -> Result<Self, crate::drop::Drop> {
|
||||
let database_instance = Database::new(database_val)?;
|
||||
pub fn new(database_val: &serde_json::Value) -> (Self, crate::drop::Drop) {
|
||||
let (database_instance, drop) = Database::new(database_val);
|
||||
let database = Arc::new(database_instance);
|
||||
let validator = Validator::new(database.clone());
|
||||
let queryer = Queryer::new(database.clone());
|
||||
let merger = Merger::new(database.clone());
|
||||
|
||||
Ok(Self {
|
||||
database,
|
||||
validator,
|
||||
queryer,
|
||||
merger,
|
||||
})
|
||||
(
|
||||
Self {
|
||||
database,
|
||||
validator,
|
||||
queryer,
|
||||
merger,
|
||||
},
|
||||
drop,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
43
src/lib.rs
43
src/lib.rs
@ -31,6 +31,9 @@ fn jspg_failure() -> JsonB {
|
||||
message: "JSPG extension has not been initialized via jspg_setup".to_string(),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: "".to_string(),
|
||||
cause: None,
|
||||
context: None,
|
||||
schema: None,
|
||||
},
|
||||
};
|
||||
let drop = crate::drop::Drop::with_errors(vec![error]);
|
||||
@ -39,25 +42,20 @@ fn jspg_failure() -> JsonB {
|
||||
|
||||
#[cfg_attr(not(test), pg_extern(strict))]
|
||||
pub fn jspg_setup(database: JsonB) -> JsonB {
|
||||
match crate::jspg::Jspg::new(&database.0) {
|
||||
Ok(new_jspg) => {
|
||||
let new_arc = Arc::new(new_jspg);
|
||||
let (new_jspg, drop) = crate::jspg::Jspg::new(&database.0);
|
||||
let new_arc = Arc::new(new_jspg);
|
||||
|
||||
// 3. ATOMIC SWAP
|
||||
{
|
||||
let mut lock = GLOBAL_JSPG.write().unwrap();
|
||||
*lock = Some(new_arc);
|
||||
}
|
||||
|
||||
let drop = crate::drop::Drop::success();
|
||||
JsonB(serde_json::to_value(drop).unwrap())
|
||||
}
|
||||
Err(drop) => JsonB(serde_json::to_value(drop).unwrap()),
|
||||
// 3. ATOMIC SWAP
|
||||
{
|
||||
let mut lock = GLOBAL_JSPG.write().unwrap();
|
||||
*lock = Some(new_arc);
|
||||
}
|
||||
|
||||
JsonB(serde_json::to_value(drop).unwrap())
|
||||
}
|
||||
|
||||
#[cfg_attr(not(test), pg_extern)]
|
||||
pub fn jspg_merge(data: JsonB) -> JsonB {
|
||||
pub fn jspg_merge(schema_id: &str, data: JsonB) -> JsonB {
|
||||
// Try to acquire a read lock to get a clone of the Engine Arc
|
||||
let engine_opt = {
|
||||
let lock = GLOBAL_JSPG.read().unwrap();
|
||||
@ -66,7 +64,7 @@ pub fn jspg_merge(data: JsonB) -> JsonB {
|
||||
|
||||
match engine_opt {
|
||||
Some(engine) => {
|
||||
let drop = engine.merger.merge(data.0);
|
||||
let drop = engine.merger.merge(schema_id, data.0);
|
||||
JsonB(serde_json::to_value(drop).unwrap())
|
||||
}
|
||||
None => jspg_failure(),
|
||||
@ -74,7 +72,7 @@ pub fn jspg_merge(data: JsonB) -> JsonB {
|
||||
}
|
||||
|
||||
#[cfg_attr(not(test), pg_extern)]
|
||||
pub fn jspg_query(schema_id: &str, stem: Option<&str>, filters: Option<JsonB>) -> JsonB {
|
||||
pub fn jspg_query(schema_id: &str, filters: Option<JsonB>) -> JsonB {
|
||||
let engine_opt = {
|
||||
let lock = GLOBAL_JSPG.read().unwrap();
|
||||
lock.clone()
|
||||
@ -84,7 +82,7 @@ pub fn jspg_query(schema_id: &str, stem: Option<&str>, filters: Option<JsonB>) -
|
||||
Some(engine) => {
|
||||
let drop = engine
|
||||
.queryer
|
||||
.query(schema_id, stem, filters.as_ref().map(|f| &f.0));
|
||||
.query(schema_id, filters.as_ref().map(|f| &f.0));
|
||||
JsonB(serde_json::to_value(drop).unwrap())
|
||||
}
|
||||
None => jspg_failure(),
|
||||
@ -111,9 +109,7 @@ pub fn jspg_validate(schema_id: &str, instance: JsonB) -> JsonB {
|
||||
}
|
||||
|
||||
#[cfg_attr(not(test), pg_extern)]
|
||||
pub fn jspg_stems() -> JsonB {
|
||||
use serde_json::{Map, Value};
|
||||
|
||||
pub fn jspg_schemas() -> JsonB {
|
||||
let engine_opt = {
|
||||
let lock = GLOBAL_JSPG.read().unwrap();
|
||||
lock.clone()
|
||||
@ -121,9 +117,12 @@ pub fn jspg_stems() -> JsonB {
|
||||
|
||||
match engine_opt {
|
||||
Some(engine) => {
|
||||
JsonB(serde_json::to_value(&engine.database.stems).unwrap_or(Value::Object(Map::new())))
|
||||
let schemas_json = serde_json::to_value(&engine.database.schemas)
|
||||
.unwrap_or(serde_json::Value::Object(serde_json::Map::new()));
|
||||
let drop = crate::drop::Drop::success_with_val(schemas_json);
|
||||
JsonB(serde_json::to_value(drop).unwrap())
|
||||
}
|
||||
None => JsonB(Value::Object(Map::new())),
|
||||
None => jspg_failure(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -4,6 +4,7 @@
|
||||
pub mod cache;
|
||||
|
||||
use crate::database::Database;
|
||||
use crate::database::r#type::Type;
|
||||
use serde_json::Value;
|
||||
use std::sync::Arc;
|
||||
|
||||
@ -20,64 +21,158 @@ impl Merger {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn merge(&self, data: Value) -> crate::drop::Drop {
|
||||
match self.merge_internal(data) {
|
||||
Ok(val) => {
|
||||
let stripped_val = match val {
|
||||
Value::Object(mut map) => {
|
||||
pub fn merge(&self, schema_id: &str, data: Value) -> crate::drop::Drop {
|
||||
let mut notifications_queue = Vec::new();
|
||||
|
||||
let target_schema = match self.db.schemas.get(schema_id) {
|
||||
Some(s) => Arc::new(s.clone()),
|
||||
None => {
|
||||
return crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: "MERGE_FAILED".to_string(),
|
||||
message: format!("Unknown schema_id: {}", schema_id),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: "".to_string(),
|
||||
cause: None,
|
||||
context: Some(data),
|
||||
schema: None,
|
||||
},
|
||||
}]);
|
||||
}
|
||||
};
|
||||
|
||||
let result = self.merge_internal(target_schema, data, &mut notifications_queue);
|
||||
|
||||
let val_resolved = match result {
|
||||
Ok(val) => val,
|
||||
Err(msg) => {
|
||||
let mut final_code = "MERGE_FAILED".to_string();
|
||||
let mut final_message = msg.clone();
|
||||
let mut final_cause = None;
|
||||
|
||||
if let Ok(Value::Object(map)) = serde_json::from_str::<Value>(&msg) {
|
||||
if let (Some(Value::String(e_msg)), Some(Value::String(e_code))) =
|
||||
(map.get("error"), map.get("code"))
|
||||
{
|
||||
final_message = e_msg.clone();
|
||||
final_code = e_code.clone();
|
||||
let mut cause_parts = Vec::new();
|
||||
if let Some(Value::String(d)) = map.get("detail") {
|
||||
if !d.is_empty() {
|
||||
cause_parts.push(d.clone());
|
||||
}
|
||||
}
|
||||
if let Some(Value::String(h)) = map.get("hint") {
|
||||
if !h.is_empty() {
|
||||
cause_parts.push(h.clone());
|
||||
}
|
||||
}
|
||||
if !cause_parts.is_empty() {
|
||||
final_cause = Some(cause_parts.join("\n"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: final_code,
|
||||
message: final_message,
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: "".to_string(),
|
||||
cause: final_cause,
|
||||
context: None,
|
||||
schema: None,
|
||||
},
|
||||
}]);
|
||||
}
|
||||
};
|
||||
|
||||
// Execute the globally collected, pre-ordered notifications last!
|
||||
for notify_sql in notifications_queue {
|
||||
if let Err(e) = self.db.execute(¬ify_sql, None) {
|
||||
return crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: "MERGE_FAILED".to_string(),
|
||||
message: format!("Executor Error in pre-ordered notify: {:?}", e),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: "".to_string(),
|
||||
cause: None,
|
||||
context: None,
|
||||
schema: None,
|
||||
},
|
||||
}]);
|
||||
}
|
||||
}
|
||||
|
||||
let stripped_val = match val_resolved {
|
||||
Value::Object(mut map) => {
|
||||
let mut out = serde_json::Map::new();
|
||||
if let Some(id) = map.remove("id") {
|
||||
out.insert("id".to_string(), id);
|
||||
}
|
||||
Value::Object(out)
|
||||
}
|
||||
Value::Array(arr) => {
|
||||
let mut out_arr = Vec::new();
|
||||
for item in arr {
|
||||
if let Value::Object(mut map) = item {
|
||||
let mut out = serde_json::Map::new();
|
||||
if let Some(id) = map.remove("id") {
|
||||
out.insert("id".to_string(), id);
|
||||
}
|
||||
Value::Object(out)
|
||||
out_arr.push(Value::Object(out));
|
||||
} else {
|
||||
out_arr.push(Value::Null);
|
||||
}
|
||||
Value::Array(arr) => {
|
||||
let mut out_arr = Vec::new();
|
||||
for item in arr {
|
||||
if let Value::Object(mut map) = item {
|
||||
let mut out = serde_json::Map::new();
|
||||
if let Some(id) = map.remove("id") {
|
||||
out.insert("id".to_string(), id);
|
||||
}
|
||||
out_arr.push(Value::Object(out));
|
||||
} else {
|
||||
out_arr.push(Value::Null);
|
||||
}
|
||||
}
|
||||
Value::Array(out_arr)
|
||||
}
|
||||
other => other,
|
||||
};
|
||||
crate::drop::Drop::success_with_val(stripped_val)
|
||||
}
|
||||
Value::Array(out_arr)
|
||||
}
|
||||
Err(msg) => crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: "MERGE_FAILED".to_string(),
|
||||
message: msg,
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: "".to_string(),
|
||||
},
|
||||
}]),
|
||||
}
|
||||
other => other,
|
||||
};
|
||||
crate::drop::Drop::success_with_val(stripped_val)
|
||||
}
|
||||
|
||||
pub(crate) fn merge_internal(&self, data: Value) -> Result<Value, String> {
|
||||
pub(crate) fn merge_internal(
|
||||
&self,
|
||||
schema: Arc<crate::database::schema::Schema>,
|
||||
data: Value,
|
||||
notifications: &mut Vec<String>,
|
||||
) -> Result<Value, String> {
|
||||
match data {
|
||||
Value::Array(items) => self.merge_array(items),
|
||||
Value::Object(map) => self.merge_object(map),
|
||||
Value::Array(items) => self.merge_array(schema, items, notifications),
|
||||
Value::Object(map) => self.merge_object(schema, map, notifications),
|
||||
_ => Err("Invalid merge payload: root must be an Object or Array".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn merge_array(&self, items: Vec<Value>) -> Result<Value, String> {
|
||||
fn merge_array(
|
||||
&self,
|
||||
schema: Arc<crate::database::schema::Schema>,
|
||||
items: Vec<Value>,
|
||||
notifications: &mut Vec<String>,
|
||||
) -> Result<Value, String> {
|
||||
let mut item_schema = schema.clone();
|
||||
if let Some(crate::database::schema::SchemaTypeOrArray::Single(t)) = &schema.obj.type_ {
|
||||
if t == "array" {
|
||||
if let Some(items_def) = &schema.obj.items {
|
||||
item_schema = items_def.clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut resolved_items = Vec::new();
|
||||
for item in items {
|
||||
let resolved = self.merge_internal(item)?;
|
||||
let resolved = self.merge_internal(item_schema.clone(), item, notifications)?;
|
||||
resolved_items.push(resolved);
|
||||
}
|
||||
Ok(Value::Array(resolved_items))
|
||||
}
|
||||
|
||||
fn merge_object(&self, obj: serde_json::Map<String, Value>) -> Result<Value, String> {
|
||||
fn merge_object(
|
||||
&self,
|
||||
schema: Arc<crate::database::schema::Schema>,
|
||||
obj: serde_json::Map<String, Value>,
|
||||
notifications: &mut Vec<String>,
|
||||
) -> Result<Value, String> {
|
||||
let queue_start = notifications.len();
|
||||
|
||||
let type_name = match obj.get("type").and_then(|v| v.as_str()) {
|
||||
Some(t) => t.to_string(),
|
||||
None => return Err("Missing required 'type' field on object".to_string()),
|
||||
@ -88,25 +183,49 @@ impl Merger {
|
||||
None => return Err(format!("Unknown entity type: {}", type_name)),
|
||||
};
|
||||
|
||||
// 1. Segment the entity: fields in type_def.fields are database fields, others are relationships
|
||||
let compiled_props = match schema.obj.compiled_properties.get() {
|
||||
Some(props) => props,
|
||||
None => return Err("Schema has no compiled properties for merging".to_string()),
|
||||
};
|
||||
|
||||
let mut entity_fields = serde_json::Map::new();
|
||||
let mut entity_objects = serde_json::Map::new();
|
||||
let mut entity_arrays = serde_json::Map::new();
|
||||
let mut entity_objects = std::collections::BTreeMap::new();
|
||||
let mut entity_arrays = std::collections::BTreeMap::new();
|
||||
|
||||
for (k, v) in obj {
|
||||
let is_field = type_def.fields.contains(&k) || k == "created";
|
||||
let typeof_v = match &v {
|
||||
Value::Object(_) => "object",
|
||||
Value::Array(_) => "array",
|
||||
_ => "other",
|
||||
};
|
||||
// Always retain system and unmapped core fields natively implicitly mapped to the Postgres tables
|
||||
if k == "id" || k == "type" || k == "created" {
|
||||
entity_fields.insert(k.clone(), v.clone());
|
||||
continue;
|
||||
}
|
||||
|
||||
if is_field {
|
||||
entity_fields.insert(k, v);
|
||||
} else if typeof_v == "object" {
|
||||
entity_objects.insert(k, v);
|
||||
} else if typeof_v == "array" {
|
||||
entity_arrays.insert(k, v);
|
||||
if let Some(prop_schema) = compiled_props.get(&k) {
|
||||
let mut is_edge = false;
|
||||
if let Some(edges) = schema.obj.compiled_edges.get() {
|
||||
if edges.contains_key(&k) {
|
||||
is_edge = true;
|
||||
}
|
||||
}
|
||||
|
||||
if is_edge {
|
||||
let typeof_v = match &v {
|
||||
Value::Object(_) => "object",
|
||||
Value::Array(_) => "array",
|
||||
_ => "field", // Malformed edge data?
|
||||
};
|
||||
if typeof_v == "object" {
|
||||
entity_objects.insert(k.clone(), (v.clone(), prop_schema.clone()));
|
||||
} else if typeof_v == "array" {
|
||||
entity_arrays.insert(k.clone(), (v.clone(), prop_schema.clone()));
|
||||
} else {
|
||||
entity_fields.insert(k.clone(), v.clone());
|
||||
}
|
||||
} else {
|
||||
// Not an edge! It's a raw Postgres column (e.g., JSONB, text[])
|
||||
entity_fields.insert(k.clone(), v.clone());
|
||||
}
|
||||
} else if type_def.fields.contains(&k) {
|
||||
entity_fields.insert(k.clone(), v.clone());
|
||||
}
|
||||
}
|
||||
|
||||
@ -115,85 +234,99 @@ impl Merger {
|
||||
|
||||
let mut entity_change_kind = None;
|
||||
let mut entity_fetched = None;
|
||||
let mut entity_replaces = None;
|
||||
|
||||
// 2. Pre-stage the entity (for non-relationships)
|
||||
if !type_def.relationship {
|
||||
let (fields, kind, fetched) =
|
||||
self.stage_entity(entity_fields.clone(), type_def, &user_id, ×tamp)?;
|
||||
let (fields, kind, fetched, replaces) =
|
||||
self.stage_entity(entity_fields, type_def, &user_id, ×tamp)?;
|
||||
entity_fields = fields;
|
||||
entity_change_kind = kind;
|
||||
entity_fetched = fetched;
|
||||
entity_replaces = replaces;
|
||||
}
|
||||
|
||||
let mut entity_response = serde_json::Map::new();
|
||||
|
||||
// 3. Handle related objects
|
||||
for (relation_name, relative_val) in entity_objects {
|
||||
for (relation_name, (relative_val, rel_schema)) in entity_objects {
|
||||
let mut relative = match relative_val {
|
||||
Value::Object(m) => m,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let relative_relation = self.get_entity_relation(type_def, &relative, &relation_name)?;
|
||||
let relative_type_name = match relative.get("type").and_then(|v| v.as_str()) {
|
||||
Some(t) => t.to_string(),
|
||||
None => continue,
|
||||
};
|
||||
|
||||
if let Some(relation) = relative_relation {
|
||||
let parent_is_source = type_def.hierarchy.contains(&relation.source_type);
|
||||
if let Some(compiled_edges) = schema.obj.compiled_edges.get() {
|
||||
if let Some(edge) = compiled_edges.get(&relation_name) {
|
||||
if let Some(relation) = self.db.relations.get(&edge.constraint) {
|
||||
let parent_is_source = edge.forward;
|
||||
|
||||
if parent_is_source {
|
||||
// Parent holds FK to Child. Child MUST be generated FIRST.
|
||||
if !relative.contains_key("organization_id") {
|
||||
if let Some(org_id) = entity_fields.get("organization_id") {
|
||||
relative.insert("organization_id".to_string(), org_id.clone());
|
||||
if parent_is_source {
|
||||
if !relative.contains_key("organization_id") {
|
||||
if let Some(org_id) = entity_fields.get("organization_id") {
|
||||
relative.insert("organization_id".to_string(), org_id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let mut merged_relative = match self.merge_internal(
|
||||
rel_schema.clone(),
|
||||
Value::Object(relative),
|
||||
notifications,
|
||||
)? {
|
||||
Value::Object(m) => m,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
merged_relative.insert("type".to_string(), Value::String(relative_type_name));
|
||||
|
||||
Self::apply_entity_relation(
|
||||
&mut entity_fields,
|
||||
&relation.source_columns,
|
||||
&relation.destination_columns,
|
||||
&merged_relative,
|
||||
);
|
||||
entity_response.insert(relation_name, Value::Object(merged_relative));
|
||||
} else {
|
||||
if !relative.contains_key("organization_id") {
|
||||
if let Some(org_id) = entity_fields.get("organization_id") {
|
||||
relative.insert("organization_id".to_string(), org_id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Self::apply_entity_relation(
|
||||
&mut relative,
|
||||
&relation.source_columns,
|
||||
&relation.destination_columns,
|
||||
&entity_fields,
|
||||
);
|
||||
|
||||
let merged_relative = match self.merge_internal(
|
||||
rel_schema.clone(),
|
||||
Value::Object(relative),
|
||||
notifications,
|
||||
)? {
|
||||
Value::Object(m) => m,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
entity_response.insert(relation_name, Value::Object(merged_relative));
|
||||
}
|
||||
}
|
||||
|
||||
let merged_relative = match self.merge_internal(Value::Object(relative))? {
|
||||
Value::Object(m) => m,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
Self::apply_entity_relation(
|
||||
&mut entity_fields,
|
||||
&relation.source_columns,
|
||||
&relation.destination_columns,
|
||||
&merged_relative,
|
||||
);
|
||||
entity_response.insert(relation_name, Value::Object(merged_relative));
|
||||
} else {
|
||||
// Child holds FK back to Parent.
|
||||
if !relative.contains_key("organization_id") {
|
||||
if let Some(org_id) = entity_fields.get("organization_id") {
|
||||
relative.insert("organization_id".to_string(), org_id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Self::apply_entity_relation(
|
||||
&mut relative,
|
||||
&relation.source_columns,
|
||||
&relation.destination_columns,
|
||||
&entity_fields,
|
||||
);
|
||||
|
||||
let merged_relative = match self.merge_internal(Value::Object(relative))? {
|
||||
Value::Object(m) => m,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
entity_response.insert(relation_name, Value::Object(merged_relative));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Post-stage the entity (for relationships)
|
||||
if type_def.relationship {
|
||||
let (fields, kind, fetched) =
|
||||
self.stage_entity(entity_fields.clone(), type_def, &user_id, ×tamp)?;
|
||||
let (fields, kind, fetched, replaces) =
|
||||
self.stage_entity(entity_fields, type_def, &user_id, ×tamp)?;
|
||||
entity_fields = fields;
|
||||
entity_change_kind = kind;
|
||||
entity_fetched = fetched;
|
||||
entity_replaces = replaces;
|
||||
}
|
||||
|
||||
// 5. Process the main entity fields
|
||||
self.merge_entity_fields(
|
||||
entity_change_kind.as_deref().unwrap_or(""),
|
||||
&type_name,
|
||||
@ -202,13 +335,11 @@ impl Merger {
|
||||
entity_fetched.as_ref(),
|
||||
)?;
|
||||
|
||||
// Add main entity fields to response
|
||||
for (k, v) in &entity_fields {
|
||||
entity_response.insert(k.clone(), v.clone());
|
||||
}
|
||||
|
||||
// 6. Handle related arrays
|
||||
for (relation_name, relative_val) in entity_arrays {
|
||||
for (relation_name, (relative_val, rel_schema)) in entity_arrays {
|
||||
let relative_arr = match relative_val {
|
||||
Value::Array(a) => a,
|
||||
_ => continue,
|
||||
@ -218,51 +349,69 @@ impl Merger {
|
||||
continue;
|
||||
}
|
||||
|
||||
let first_relative = match &relative_arr[0] {
|
||||
Value::Object(m) => m,
|
||||
_ => continue,
|
||||
};
|
||||
if let Some(compiled_edges) = schema.obj.compiled_edges.get() {
|
||||
if let Some(edge) = compiled_edges.get(&relation_name) {
|
||||
if let Some(relation) = self.db.relations.get(&edge.constraint) {
|
||||
let mut relative_responses = Vec::new();
|
||||
for relative_item_val in relative_arr {
|
||||
if let Value::Object(mut relative_item) = relative_item_val {
|
||||
if !relative_item.contains_key("organization_id") {
|
||||
if let Some(org_id) = entity_fields.get("organization_id") {
|
||||
relative_item.insert("organization_id".to_string(), org_id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let relative_relation = self.get_entity_relation(type_def, first_relative, &relation_name)?;
|
||||
Self::apply_entity_relation(
|
||||
&mut relative_item,
|
||||
&relation.source_columns,
|
||||
&relation.destination_columns,
|
||||
&entity_fields,
|
||||
);
|
||||
|
||||
if let Some(relation) = relative_relation {
|
||||
let mut relative_responses = Vec::new();
|
||||
for relative_item_val in relative_arr {
|
||||
if let Value::Object(mut relative_item) = relative_item_val {
|
||||
if !relative_item.contains_key("organization_id") {
|
||||
if let Some(org_id) = entity_fields.get("organization_id") {
|
||||
relative_item.insert("organization_id".to_string(), org_id.clone());
|
||||
let mut item_schema = rel_schema.clone();
|
||||
if let Some(crate::database::schema::SchemaTypeOrArray::Single(t)) =
|
||||
&rel_schema.obj.type_
|
||||
{
|
||||
if t == "array" {
|
||||
if let Some(items_def) = &rel_schema.obj.items {
|
||||
item_schema = items_def.clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let merged_relative = match self.merge_internal(
|
||||
item_schema,
|
||||
Value::Object(relative_item),
|
||||
notifications,
|
||||
)? {
|
||||
Value::Object(m) => m,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
relative_responses.push(Value::Object(merged_relative));
|
||||
}
|
||||
}
|
||||
|
||||
Self::apply_entity_relation(
|
||||
&mut relative_item,
|
||||
&relation.source_columns,
|
||||
&relation.destination_columns,
|
||||
&entity_fields,
|
||||
);
|
||||
|
||||
let merged_relative = match self.merge_internal(Value::Object(relative_item))? {
|
||||
Value::Object(m) => m,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
relative_responses.push(Value::Object(merged_relative));
|
||||
entity_response.insert(relation_name, Value::Array(relative_responses));
|
||||
}
|
||||
}
|
||||
entity_response.insert(relation_name, Value::Array(relative_responses));
|
||||
}
|
||||
}
|
||||
|
||||
// 7. Perform change tracking
|
||||
self.merge_entity_change(
|
||||
// 7. Perform change tracking dynamically suppressing noise based on type bounds!
|
||||
let notify_sql = self.merge_entity_change(
|
||||
type_def,
|
||||
&entity_fields,
|
||||
entity_fetched.as_ref(),
|
||||
entity_change_kind.as_deref(),
|
||||
&user_id,
|
||||
×tamp,
|
||||
entity_replaces.as_deref(),
|
||||
)?;
|
||||
|
||||
if let Some(sql) = notify_sql {
|
||||
notifications.insert(queue_start, sql);
|
||||
}
|
||||
|
||||
// Produce the full tree response
|
||||
let mut final_response = serde_json::Map::new();
|
||||
if let Some(fetched) = entity_fetched {
|
||||
@ -288,13 +437,42 @@ impl Merger {
|
||||
serde_json::Map<String, Value>,
|
||||
Option<String>,
|
||||
Option<serde_json::Map<String, Value>>,
|
||||
Option<String>,
|
||||
),
|
||||
String,
|
||||
> {
|
||||
let type_name = type_def.name.as_str();
|
||||
|
||||
// 🚀 Anchor Short-Circuit Optimization
|
||||
// An anchor is STRICTLY a struct containing merely an `id` and `type`.
|
||||
// We aggressively bypass Database SPI `SELECT` fetches because there are no primitive
|
||||
// mutations to apply to the row. PostgreSQL inherently protects relationships via Foreign Keys downstream.
|
||||
let is_anchor = entity_fields.len() == 2
|
||||
&& entity_fields.contains_key("id")
|
||||
&& entity_fields.contains_key("type");
|
||||
|
||||
let has_valid_id = entity_fields
|
||||
.get("id")
|
||||
.and_then(|v| v.as_str())
|
||||
.map_or(false, |s| !s.is_empty());
|
||||
|
||||
if is_anchor && has_valid_id {
|
||||
return Ok((entity_fields, None, None, None));
|
||||
}
|
||||
|
||||
let entity_fetched = self.fetch_entity(&entity_fields, type_def)?;
|
||||
|
||||
let mut replaces_id = None;
|
||||
if let Some(ref fetched_row) = entity_fetched {
|
||||
let provided_id = entity_fields.get("id").and_then(|v| v.as_str());
|
||||
let fetched_id = fetched_row.get("id").and_then(|v| v.as_str());
|
||||
if let (Some(pid), Some(fid)) = (provided_id, fetched_id) {
|
||||
if !pid.is_empty() && pid != fid {
|
||||
replaces_id = Some(pid.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let system_keys = vec![
|
||||
"id".to_string(),
|
||||
"type".to_string(),
|
||||
@ -344,7 +522,7 @@ impl Merger {
|
||||
);
|
||||
|
||||
entity_fields = new_fields;
|
||||
} else if changes.is_empty() {
|
||||
} else if changes.is_empty() && replaces_id.is_none() {
|
||||
let mut new_fields = serde_json::Map::new();
|
||||
new_fields.insert(
|
||||
"id".to_string(),
|
||||
@ -360,6 +538,8 @@ impl Merger {
|
||||
.unwrap_or(false);
|
||||
entity_change_kind = if is_archived {
|
||||
Some("delete".to_string())
|
||||
} else if changes.is_empty() && replaces_id.is_some() {
|
||||
Some("replace".to_string())
|
||||
} else {
|
||||
Some("update".to_string())
|
||||
};
|
||||
@ -382,7 +562,12 @@ impl Merger {
|
||||
entity_fields = new_fields;
|
||||
}
|
||||
|
||||
Ok((entity_fields, entity_change_kind, entity_fetched))
|
||||
Ok((
|
||||
entity_fields,
|
||||
entity_change_kind,
|
||||
entity_fetched,
|
||||
replaces_id,
|
||||
))
|
||||
}
|
||||
|
||||
fn fetch_entity(
|
||||
@ -437,11 +622,14 @@ impl Merger {
|
||||
template
|
||||
};
|
||||
|
||||
let where_clause = if let Some(id) = id_val {
|
||||
format!("WHERE t1.id = {}", Self::quote_literal(id))
|
||||
} else if lookup_complete {
|
||||
let mut lookup_predicates = Vec::new();
|
||||
let mut where_parts = Vec::new();
|
||||
|
||||
if let Some(id) = id_val {
|
||||
where_parts.push(format!("t1.id = {}", Self::quote_literal(id)));
|
||||
}
|
||||
|
||||
if lookup_complete {
|
||||
let mut lookup_predicates = Vec::new();
|
||||
for column in &entity_type.lookup_fields {
|
||||
let val = entity_fields.get(column).unwrap_or(&Value::Null);
|
||||
if column == "type" {
|
||||
@ -450,10 +638,14 @@ impl Merger {
|
||||
lookup_predicates.push(format!("\"{}\" = {}", column, Self::quote_literal(val)));
|
||||
}
|
||||
}
|
||||
format!("WHERE {}", lookup_predicates.join(" AND "))
|
||||
} else {
|
||||
where_parts.push(format!("({})", lookup_predicates.join(" AND ")));
|
||||
}
|
||||
|
||||
if where_parts.is_empty() {
|
||||
return Ok(None);
|
||||
};
|
||||
}
|
||||
|
||||
let where_clause = format!("WHERE {}", where_parts.join(" OR "));
|
||||
|
||||
let final_sql = format!("{} {}", fetch_sql_template, where_clause);
|
||||
|
||||
@ -549,11 +741,7 @@ impl Merger {
|
||||
for key in &sorted_keys {
|
||||
columns.push(format!("\"{}\"", key));
|
||||
let val = entity_pairs.get(key).unwrap();
|
||||
if val.as_str() == Some("") {
|
||||
values.push("NULL".to_string());
|
||||
} else {
|
||||
values.push(Self::quote_literal(val));
|
||||
}
|
||||
values.push(Self::format_sql_value(val, key, entity_type));
|
||||
}
|
||||
|
||||
if columns.is_empty() {
|
||||
@ -566,10 +754,7 @@ impl Merger {
|
||||
columns.join(", "),
|
||||
values.join(", ")
|
||||
);
|
||||
self
|
||||
.db
|
||||
.execute(&sql, None)
|
||||
.map_err(|e| format!("SPI Error in INSERT: {:?}", e))?;
|
||||
self.db.execute(&sql, None)?;
|
||||
} else if change_kind == "update" || change_kind == "delete" {
|
||||
entity_pairs.remove("id");
|
||||
entity_pairs.remove("type");
|
||||
@ -587,7 +772,11 @@ impl Merger {
|
||||
if val.as_str() == Some("") {
|
||||
set_clauses.push(format!("\"{}\" = NULL", key));
|
||||
} else {
|
||||
set_clauses.push(format!("\"{}\" = {}", key, Self::quote_literal(val)));
|
||||
set_clauses.push(format!(
|
||||
"\"{}\" = {}",
|
||||
key,
|
||||
Self::format_sql_value(val, key, entity_type)
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@ -597,10 +786,7 @@ impl Merger {
|
||||
set_clauses.join(", "),
|
||||
Self::quote_literal(&Value::String(id_str.to_string()))
|
||||
);
|
||||
self
|
||||
.db
|
||||
.execute(&sql, None)
|
||||
.map_err(|e| format!("SPI Error in UPDATE: {:?}", e))?;
|
||||
self.db.execute(&sql, None)?;
|
||||
}
|
||||
}
|
||||
|
||||
@ -609,24 +795,27 @@ impl Merger {
|
||||
|
||||
fn merge_entity_change(
|
||||
&self,
|
||||
type_obj: &Type,
|
||||
entity_fields: &serde_json::Map<String, Value>,
|
||||
entity_fetched: Option<&serde_json::Map<String, Value>>,
|
||||
entity_change_kind: Option<&str>,
|
||||
user_id: &str,
|
||||
timestamp: &str,
|
||||
) -> Result<(), String> {
|
||||
replaces_id: Option<&str>,
|
||||
) -> Result<Option<String>, String> {
|
||||
let change_kind = match entity_change_kind {
|
||||
Some(k) => k,
|
||||
None => return Ok(()),
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let id_str = entity_fields.get("id").unwrap();
|
||||
let type_name = entity_fields.get("type").unwrap();
|
||||
|
||||
let mut changes = serde_json::Map::new();
|
||||
let is_update = change_kind == "update" || change_kind == "delete";
|
||||
let mut old_vals = serde_json::Map::new();
|
||||
let mut new_vals = serde_json::Map::new();
|
||||
let exists = change_kind == "update" || change_kind == "delete" || change_kind == "replace";
|
||||
|
||||
if !is_update {
|
||||
if !exists {
|
||||
let system_keys = vec![
|
||||
"id".to_string(),
|
||||
"created_by".to_string(),
|
||||
@ -636,7 +825,7 @@ impl Merger {
|
||||
];
|
||||
for (k, v) in entity_fields {
|
||||
if !system_keys.contains(k) {
|
||||
changes.insert(k.clone(), v.clone());
|
||||
new_vals.insert(k.clone(), v.clone());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -653,16 +842,17 @@ impl Merger {
|
||||
if let Some(fetched) = entity_fetched {
|
||||
let old_val = fetched.get(k).unwrap_or(&Value::Null);
|
||||
if v != old_val {
|
||||
changes.insert(k.clone(), v.clone());
|
||||
new_vals.insert(k.clone(), v.clone());
|
||||
old_vals.insert(k.clone(), old_val.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
changes.insert("type".to_string(), type_name.clone());
|
||||
new_vals.insert("type".to_string(), type_name.clone());
|
||||
}
|
||||
|
||||
let mut complete = entity_fields.clone();
|
||||
if is_update {
|
||||
if exists {
|
||||
if let Some(fetched) = entity_fetched {
|
||||
let mut temp = fetched.clone();
|
||||
for (k, v) in entity_fields {
|
||||
@ -672,37 +862,49 @@ impl Merger {
|
||||
}
|
||||
}
|
||||
|
||||
let new_val_obj = Value::Object(new_vals);
|
||||
let old_val_obj = if old_vals.is_empty() {
|
||||
Value::Null
|
||||
} else {
|
||||
Value::Object(old_vals)
|
||||
};
|
||||
|
||||
let mut notification = serde_json::Map::new();
|
||||
notification.insert("complete".to_string(), Value::Object(complete));
|
||||
if is_update {
|
||||
notification.insert("changes".to_string(), Value::Object(changes.clone()));
|
||||
notification.insert("new".to_string(), new_val_obj.clone());
|
||||
|
||||
if old_val_obj != Value::Null {
|
||||
notification.insert("old".to_string(), old_val_obj.clone());
|
||||
}
|
||||
|
||||
let change_sql = format!(
|
||||
"INSERT INTO agreego.change (changes, entity_id, id, kind, modified_at, modified_by) VALUES ({}, {}, {}, {}, {}, {})",
|
||||
Self::quote_literal(&Value::Object(changes)),
|
||||
Self::quote_literal(id_str),
|
||||
Self::quote_literal(&Value::String(uuid::Uuid::new_v4().to_string())),
|
||||
Self::quote_literal(&Value::String(change_kind.to_string())),
|
||||
Self::quote_literal(&Value::String(timestamp.to_string())),
|
||||
Self::quote_literal(&Value::String(user_id.to_string()))
|
||||
);
|
||||
if let Some(rep) = replaces_id {
|
||||
notification.insert("replaces".to_string(), Value::String(rep.to_string()));
|
||||
}
|
||||
|
||||
let notify_sql = format!(
|
||||
"SELECT pg_notify('entity', {})",
|
||||
Self::quote_literal(&Value::String(Value::Object(notification).to_string()))
|
||||
);
|
||||
let mut notify_sql = None;
|
||||
if type_obj.historical && change_kind != "replace" {
|
||||
let change_sql = format!(
|
||||
"INSERT INTO agreego.change (\"old\", \"new\", entity_id, id, kind, modified_at, modified_by) VALUES ({}, {}, {}, {}, {}, {}, {})",
|
||||
Self::quote_literal(&old_val_obj),
|
||||
Self::quote_literal(&new_val_obj),
|
||||
Self::quote_literal(id_str),
|
||||
Self::quote_literal(&Value::String(uuid::Uuid::new_v4().to_string())),
|
||||
Self::quote_literal(&Value::String(change_kind.to_string())),
|
||||
Self::quote_literal(&Value::String(timestamp.to_string())),
|
||||
Self::quote_literal(&Value::String(user_id.to_string()))
|
||||
);
|
||||
|
||||
self
|
||||
.db
|
||||
.execute(&change_sql, None)
|
||||
.map_err(|e| format!("Executor Error in change: {:?}", e))?;
|
||||
self
|
||||
.db
|
||||
.execute(¬ify_sql, None)
|
||||
.map_err(|e| format!("Executor Error in notify: {:?}", e))?;
|
||||
self.db.execute(&change_sql, None)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
if type_obj.notify {
|
||||
notify_sql = Some(format!(
|
||||
"SELECT pg_notify('entity', {})",
|
||||
Self::quote_literal(&Value::String(Value::Object(notification).to_string()))
|
||||
));
|
||||
}
|
||||
|
||||
Ok(notify_sql)
|
||||
}
|
||||
|
||||
fn compare_entities(
|
||||
@ -736,101 +938,7 @@ impl Merger {
|
||||
changes
|
||||
}
|
||||
|
||||
fn reduce_entity_relations(
|
||||
&self,
|
||||
mut matching_relations: Vec<crate::database::relation::Relation>,
|
||||
relative: &serde_json::Map<String, Value>,
|
||||
relation_name: &str,
|
||||
) -> Result<Option<crate::database::relation::Relation>, String> {
|
||||
if matching_relations.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
if matching_relations.len() == 1 {
|
||||
return Ok(Some(matching_relations.pop().unwrap()));
|
||||
}
|
||||
|
||||
let exact_match: Vec<_> = matching_relations
|
||||
.iter()
|
||||
.filter(|r| r.prefix.as_deref() == Some(relation_name))
|
||||
.cloned()
|
||||
.collect();
|
||||
if exact_match.len() == 1 {
|
||||
return Ok(Some(exact_match.into_iter().next().unwrap()));
|
||||
}
|
||||
|
||||
matching_relations.retain(|r| {
|
||||
if let Some(prefix) = &r.prefix {
|
||||
!relative.contains_key(prefix)
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
if matching_relations.len() == 1 {
|
||||
Ok(Some(matching_relations.pop().unwrap()))
|
||||
} else {
|
||||
let constraints: Vec<_> = matching_relations
|
||||
.iter()
|
||||
.map(|r| r.constraint.clone())
|
||||
.collect();
|
||||
Err(format!(
|
||||
"AMBIGUOUS_TYPE_RELATIONS: Could not reduce ambiguous type relations: {}",
|
||||
constraints.join(", ")
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn get_entity_relation(
|
||||
&self,
|
||||
entity_type: &crate::database::r#type::Type,
|
||||
relative: &serde_json::Map<String, Value>,
|
||||
relation_name: &str,
|
||||
) -> Result<Option<crate::database::relation::Relation>, String> {
|
||||
let relative_type_name = match relative.get("type").and_then(|v| v.as_str()) {
|
||||
Some(t) => t,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let relative_type = match self.db.types.get(relative_type_name) {
|
||||
Some(t) => t,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let mut relative_relations: Vec<crate::database::relation::Relation> = Vec::new();
|
||||
|
||||
for r in self.db.relations.values() {
|
||||
if r.source_type != "entity" && r.destination_type != "entity" {
|
||||
let condition1 = relative_type.hierarchy.contains(&r.source_type)
|
||||
&& entity_type.hierarchy.contains(&r.destination_type);
|
||||
let condition2 = entity_type.hierarchy.contains(&r.source_type)
|
||||
&& relative_type.hierarchy.contains(&r.destination_type);
|
||||
|
||||
if condition1 || condition2 {
|
||||
relative_relations.push(r.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut relative_relation =
|
||||
self.reduce_entity_relations(relative_relations, relative, relation_name)?;
|
||||
|
||||
if relative_relation.is_none() {
|
||||
let mut poly_relations: Vec<crate::database::relation::Relation> = Vec::new();
|
||||
for r in self.db.relations.values() {
|
||||
if r.destination_type == "entity" {
|
||||
let condition1 = relative_type.hierarchy.contains(&r.source_type);
|
||||
let condition2 = entity_type.hierarchy.contains(&r.source_type);
|
||||
|
||||
if condition1 || condition2 {
|
||||
poly_relations.push(r.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
relative_relation = self.reduce_entity_relations(poly_relations, relative, relation_name)?;
|
||||
}
|
||||
|
||||
Ok(relative_relation)
|
||||
}
|
||||
// Helper Functions
|
||||
|
||||
fn apply_entity_relation(
|
||||
source_entity: &mut serde_json::Map<String, Value>,
|
||||
@ -848,6 +956,34 @@ impl Merger {
|
||||
}
|
||||
}
|
||||
|
||||
fn format_sql_value(val: &Value, key: &str, entity_type: &Type) -> String {
|
||||
if val.as_str() == Some("") {
|
||||
return "NULL".to_string();
|
||||
}
|
||||
|
||||
let mut is_pg_array = false;
|
||||
if let Some(field_types_map) = entity_type.field_types.as_ref().and_then(|v| v.as_object()) {
|
||||
if let Some(t_val) = field_types_map.get(key) {
|
||||
if let Some(t_str) = t_val.as_str() {
|
||||
if t_str.starts_with('_') {
|
||||
is_pg_array = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if is_pg_array && val.is_array() {
|
||||
let mut s = val.to_string();
|
||||
if s.starts_with('[') && s.ends_with(']') {
|
||||
s.replace_range(0..1, "{");
|
||||
s.replace_range(s.len() - 1..s.len(), "}");
|
||||
}
|
||||
Self::quote_literal(&Value::String(s))
|
||||
} else {
|
||||
Self::quote_literal(val)
|
||||
}
|
||||
}
|
||||
|
||||
fn quote_literal(val: &Value) -> String {
|
||||
match val {
|
||||
Value::Null => "NULL".to_string(),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -21,7 +21,6 @@ impl Queryer {
|
||||
pub fn query(
|
||||
&self,
|
||||
schema_id: &str,
|
||||
stem_opt: Option<&str>,
|
||||
filters: Option<&serde_json::Value>,
|
||||
) -> crate::drop::Drop {
|
||||
let filters_map = filters.and_then(|f| f.as_object());
|
||||
@ -32,25 +31,66 @@ impl Queryer {
|
||||
Err(msg) => {
|
||||
return crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: "FILTER_PARSE_FAILED".to_string(),
|
||||
message: msg,
|
||||
message: msg.clone(),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: schema_id.to_string(),
|
||||
path: "".to_string(), // filters apply to the root query
|
||||
cause: Some(msg),
|
||||
context: filters.cloned(),
|
||||
schema: Some(schema_id.to_string()),
|
||||
},
|
||||
}]);
|
||||
}
|
||||
};
|
||||
|
||||
let stem_key = stem_opt.unwrap_or("/");
|
||||
let cache_key = format!("{}(Stem:{}):{}", schema_id, stem_key, filter_keys.join(","));
|
||||
let cache_key = format!("{}:{}", schema_id, filter_keys.join(","));
|
||||
|
||||
// 2. Fetch from cache or compile
|
||||
let sql = match self.get_or_compile_sql(&cache_key, schema_id, stem_opt, &filter_keys) {
|
||||
let sql = match self.get_or_compile_sql(&cache_key, schema_id, &filter_keys) {
|
||||
Ok(sql) => sql,
|
||||
Err(drop) => return drop,
|
||||
};
|
||||
|
||||
// 3. Execute via Database Executor
|
||||
self.execute_sql(schema_id, &sql, &args)
|
||||
self.execute_sql(schema_id, &sql, args)
|
||||
}
|
||||
|
||||
fn extract_filters(
|
||||
prefix: String,
|
||||
val: &serde_json::Value,
|
||||
entries: &mut Vec<(String, serde_json::Value)>,
|
||||
) -> Result<(), String> {
|
||||
if let Some(obj) = val.as_object() {
|
||||
let mut is_op_obj = false;
|
||||
if let Some(first_key) = obj.keys().next() {
|
||||
if first_key.starts_with('$') {
|
||||
is_op_obj = true;
|
||||
}
|
||||
}
|
||||
|
||||
if is_op_obj {
|
||||
for (op, op_val) in obj {
|
||||
if !op.starts_with('$') {
|
||||
return Err(format!("Filter operator must start with '$', got: {}", op));
|
||||
}
|
||||
entries.push((format!("{}:{}", prefix, op), op_val.clone()));
|
||||
}
|
||||
} else {
|
||||
for (k, v) in obj {
|
||||
let next_prefix = if prefix.is_empty() {
|
||||
k.clone()
|
||||
} else {
|
||||
format!("{}/{}", prefix, k)
|
||||
};
|
||||
Self::extract_filters(next_prefix, v, entries)?;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return Err(format!(
|
||||
"Filter for path '{}' must be an operator object like {{$eq: ...}} or a nested map.",
|
||||
prefix
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_filter_entries(
|
||||
@ -60,19 +100,7 @@ impl Queryer {
|
||||
let mut filter_entries: Vec<(String, serde_json::Value)> = Vec::new();
|
||||
if let Some(fm) = filters_map {
|
||||
for (key, val) in fm {
|
||||
if let Some(obj) = val.as_object() {
|
||||
for (op, op_val) in obj {
|
||||
if !op.starts_with('$') {
|
||||
return Err(format!("Filter operator must start with '$', got: {}", op));
|
||||
}
|
||||
filter_entries.push((format!("{}:{}", key, op), op_val.clone()));
|
||||
}
|
||||
} else {
|
||||
return Err(format!(
|
||||
"Filter for field '{}' must be an object with operators like $eq, $in, etc.",
|
||||
key
|
||||
));
|
||||
}
|
||||
Self::extract_filters(key.clone(), val, &mut filter_entries)?;
|
||||
}
|
||||
}
|
||||
filter_entries.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
@ -87,15 +115,19 @@ impl Queryer {
|
||||
&self,
|
||||
cache_key: &str,
|
||||
schema_id: &str,
|
||||
stem_opt: Option<&str>,
|
||||
filter_keys: &[String],
|
||||
) -> Result<String, crate::drop::Drop> {
|
||||
if let Some(cached_sql) = self.cache.get(cache_key) {
|
||||
return Ok(cached_sql.value().clone());
|
||||
}
|
||||
|
||||
let compiler = compiler::SqlCompiler::new(self.db.clone());
|
||||
match compiler.compile(schema_id, stem_opt, filter_keys) {
|
||||
let compiler = compiler::Compiler {
|
||||
db: &self.db,
|
||||
filter_keys: filter_keys,
|
||||
alias_counter: 0,
|
||||
};
|
||||
|
||||
match compiler.compile(schema_id, filter_keys) {
|
||||
Ok(compiled_sql) => {
|
||||
self
|
||||
.cache
|
||||
@ -104,9 +136,12 @@ impl Queryer {
|
||||
}
|
||||
Err(e) => Err(crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: "QUERY_COMPILATION_FAILED".to_string(),
|
||||
message: e,
|
||||
message: e.clone(),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: schema_id.to_string(),
|
||||
path: "".to_string(),
|
||||
cause: Some(e),
|
||||
context: None,
|
||||
schema: Some(schema_id.to_string()),
|
||||
},
|
||||
}])),
|
||||
}
|
||||
@ -116,7 +151,7 @@ impl Queryer {
|
||||
&self,
|
||||
schema_id: &str,
|
||||
sql: &str,
|
||||
args: &[serde_json::Value],
|
||||
args: Vec<serde_json::Value>,
|
||||
) -> crate::drop::Drop {
|
||||
match self.db.query(sql, Some(args)) {
|
||||
Ok(serde_json::Value::Array(table)) => {
|
||||
@ -130,14 +165,20 @@ impl Queryer {
|
||||
code: "QUERY_FAILED".to_string(),
|
||||
message: format!("Expected array from generic query, got: {:?}", other),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: schema_id.to_string(),
|
||||
path: "".to_string(),
|
||||
cause: Some(format!("Expected array, got {}", other)),
|
||||
context: Some(serde_json::json!([sql])),
|
||||
schema: Some(schema_id.to_string()),
|
||||
},
|
||||
}]),
|
||||
Err(e) => crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: "QUERY_FAILED".to_string(),
|
||||
message: format!("SPI error in queryer: {}", e),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: schema_id.to_string(),
|
||||
path: "".to_string(),
|
||||
cause: Some(format!("SPI error in queryer: {}", e)),
|
||||
context: Some(serde_json::json!([sql])),
|
||||
schema: Some(schema_id.to_string()),
|
||||
},
|
||||
}]),
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -10,7 +10,7 @@ fn test_library_api() {
|
||||
// 1. Initially, schemas are not cached.
|
||||
|
||||
// Expected uninitialized drop format: errors + null response
|
||||
let uninitialized_drop = jspg_validate("test_schema", JsonB(json!({})));
|
||||
let uninitialized_drop = jspg_validate("source_schema", JsonB(json!({})));
|
||||
assert_eq!(
|
||||
uninitialized_drop.0,
|
||||
json!({
|
||||
@ -27,17 +27,46 @@ fn test_library_api() {
|
||||
let db_json = json!({
|
||||
"puncs": [],
|
||||
"enums": [],
|
||||
"relations": [],
|
||||
"types": [{
|
||||
"schemas": [{
|
||||
"$id": "test_schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" }
|
||||
},
|
||||
"required": ["name"]
|
||||
}]
|
||||
}]
|
||||
"relations": [
|
||||
{
|
||||
"id": "11111111-1111-1111-1111-111111111111",
|
||||
"type": "relation",
|
||||
"constraint": "fk_test_target",
|
||||
"source_type": "source_schema",
|
||||
"source_columns": ["target_id"],
|
||||
"destination_type": "target_schema",
|
||||
"destination_columns": ["id"],
|
||||
"prefix": "target"
|
||||
}
|
||||
],
|
||||
"types": [
|
||||
{
|
||||
"name": "source_schema",
|
||||
"variations": ["source_schema"],
|
||||
"hierarchy": ["source_schema", "entity"],
|
||||
"schemas": [{
|
||||
"$id": "source_schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"target": { "type": "target_schema" }
|
||||
},
|
||||
"required": ["name"]
|
||||
}]
|
||||
},
|
||||
{
|
||||
"name": "target_schema",
|
||||
"variations": ["target_schema"],
|
||||
"hierarchy": ["target_schema", "entity"],
|
||||
"schemas": [{
|
||||
"$id": "target_schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": { "type": "number" }
|
||||
}
|
||||
}]
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let cache_drop = jspg_setup(JsonB(db_json));
|
||||
@ -49,8 +78,46 @@ fn test_library_api() {
|
||||
})
|
||||
);
|
||||
|
||||
// 3. Validate jspg_schemas
|
||||
let schemas_drop = jspg_schemas();
|
||||
assert_eq!(
|
||||
schemas_drop.0,
|
||||
json!({
|
||||
"type": "drop",
|
||||
"response": {
|
||||
"source_schema": {
|
||||
"$id": "source_schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"target": {
|
||||
"type": "target_schema",
|
||||
"compiledProperties": ["value"]
|
||||
}
|
||||
},
|
||||
"required": ["name"],
|
||||
"compiledProperties": ["name", "target"],
|
||||
"compiledEdges": {
|
||||
"target": {
|
||||
"constraint": "fk_test_target",
|
||||
"forward": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"target_schema": {
|
||||
"$id": "target_schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": { "type": "number" }
|
||||
},
|
||||
"compiledProperties": ["value"]
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// 4. Validate Happy Path
|
||||
let happy_drop = jspg_validate("test_schema", JsonB(json!({"name": "Neo"})));
|
||||
let happy_drop = jspg_validate("source_schema", JsonB(json!({"type": "source_schema", "name": "Neo"})));
|
||||
assert_eq!(
|
||||
happy_drop.0,
|
||||
json!({
|
||||
@ -60,7 +127,7 @@ fn test_library_api() {
|
||||
);
|
||||
|
||||
// 5. Validate Unhappy Path
|
||||
let unhappy_drop = jspg_validate("test_schema", JsonB(json!({"wrong": "data"})));
|
||||
let unhappy_drop = jspg_validate("source_schema", JsonB(json!({"type": "source_schema", "wrong": "data"})));
|
||||
assert_eq!(
|
||||
unhappy_drop.0,
|
||||
json!({
|
||||
@ -69,12 +136,12 @@ fn test_library_api() {
|
||||
{
|
||||
"code": "REQUIRED_FIELD_MISSING",
|
||||
"message": "Missing name",
|
||||
"details": { "path": "/name" }
|
||||
"details": { "path": "name" }
|
||||
},
|
||||
{
|
||||
"code": "STRICT_PROPERTY_VIOLATION",
|
||||
"message": "Unexpected property 'wrong'",
|
||||
"details": { "path": "/wrong" }
|
||||
"details": { "path": "wrong" }
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
@ -1,19 +1,10 @@
|
||||
use crate::tests::types::Suite;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::sync::{Arc, OnceLock, RwLock};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TestSuite {
|
||||
#[allow(dead_code)]
|
||||
pub description: String,
|
||||
pub database: serde_json::Value,
|
||||
pub tests: Vec<TestCase>,
|
||||
}
|
||||
|
||||
use crate::tests::types::TestCase;
|
||||
use serde_json::Value;
|
||||
|
||||
pub fn deserialize_some<'de, D>(deserializer: D) -> Result<Option<Value>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
@ -23,7 +14,7 @@ where
|
||||
}
|
||||
|
||||
// Type alias for easier reading
|
||||
type CompiledSuite = Arc<Vec<(TestSuite, Arc<crate::database::Database>)>>;
|
||||
type CompiledSuite = Arc<Vec<(Suite, Arc<Result<Arc<crate::database::Database>, crate::drop::Drop>>)>>;
|
||||
|
||||
// Global cache mapping filename -> Vector of (Parsed JSON suite, Compiled Database)
|
||||
static CACHE: OnceLock<RwLock<HashMap<String, CompiledSuite>>> = OnceLock::new();
|
||||
@ -46,25 +37,18 @@ fn get_cached_file(path: &str) -> CompiledSuite {
|
||||
} else {
|
||||
let content =
|
||||
fs::read_to_string(path).unwrap_or_else(|_| panic!("Failed to read file: {}", path));
|
||||
let suites: Vec<TestSuite> = serde_json::from_str(&content)
|
||||
let suites: Vec<Suite> = serde_json::from_str(&content)
|
||||
.unwrap_or_else(|e| panic!("Failed to parse JSON in {}: {}", path, e));
|
||||
|
||||
let mut compiled_suites = Vec::new();
|
||||
for suite in suites {
|
||||
let db_result = crate::database::Database::new(&suite.database);
|
||||
if let Err(drop) = db_result {
|
||||
let error_messages: Vec<String> = drop
|
||||
.errors
|
||||
.into_iter()
|
||||
.map(|e| format!("Error {} at path {}: {}", e.code, e.details.path, e.message))
|
||||
.collect();
|
||||
panic!(
|
||||
"System Setup Compilation failed for {}:\n{}",
|
||||
path,
|
||||
error_messages.join("\n")
|
||||
);
|
||||
}
|
||||
compiled_suites.push((suite, Arc::new(db_result.unwrap())));
|
||||
let (db, drop) = crate::database::Database::new(&suite.database);
|
||||
let compiled_db = if drop.errors.is_empty() {
|
||||
Ok(Arc::new(db))
|
||||
} else {
|
||||
Err(drop)
|
||||
};
|
||||
compiled_suites.push((suite, Arc::new(compiled_db)));
|
||||
}
|
||||
|
||||
let new_data = Arc::new(compiled_suites);
|
||||
@ -94,11 +78,46 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
|
||||
let test = &group.tests[case_idx];
|
||||
let mut failures = Vec::<String>::new();
|
||||
|
||||
// For validate/merge/query, if setup failed we must structurally fail this test
|
||||
let db_unwrapped = if test.action.as_str() != "compile" {
|
||||
match &**db {
|
||||
Ok(valid_db) => Some(valid_db.clone()),
|
||||
Err(drop) => {
|
||||
let error_messages: Vec<String> = drop
|
||||
.errors
|
||||
.iter()
|
||||
.map(|e| format!("Error {} at path {}: {}", e.code, e.details.path, e.message))
|
||||
.collect();
|
||||
failures.push(format!(
|
||||
"[{}] Cannot run '{}' test '{}': System Setup Compilation structurally failed:\n{}",
|
||||
group.description, test.action, test.description, error_messages.join("\n")
|
||||
));
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if !failures.is_empty() {
|
||||
return Err(failures.join("\n"));
|
||||
}
|
||||
|
||||
// 4. Run Tests
|
||||
|
||||
match test.action.as_str() {
|
||||
"compile" => {
|
||||
let result = test.run_compile(db);
|
||||
if let Err(e) = result {
|
||||
println!("TEST COMPILE ERROR FOR '{}': {}", test.description, e);
|
||||
failures.push(format!(
|
||||
"[{}] Compile Test '{}' failed. Error: {}",
|
||||
group.description, test.description, e
|
||||
));
|
||||
}
|
||||
}
|
||||
"validate" => {
|
||||
let result = test.run_validate(db.clone());
|
||||
let result = test.run_validate(db_unwrapped.unwrap());
|
||||
if let Err(e) = result {
|
||||
println!("TEST VALIDATE ERROR FOR '{}': {}", test.description, e);
|
||||
failures.push(format!(
|
||||
@ -108,7 +127,7 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
|
||||
}
|
||||
}
|
||||
"merge" => {
|
||||
let result = test.run_merge(db.clone());
|
||||
let result = test.run_merge(db_unwrapped.unwrap());
|
||||
if let Err(e) = result {
|
||||
println!("TEST MERGE ERROR FOR '{}': {}", test.description, e);
|
||||
failures.push(format!(
|
||||
@ -118,7 +137,7 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
|
||||
}
|
||||
}
|
||||
"query" => {
|
||||
let result = test.run_query(db.clone());
|
||||
let result = test.run_query(db_unwrapped.unwrap());
|
||||
if let Err(e) = result {
|
||||
println!("TEST QUERY ERROR FOR '{}': {}", test.description, e);
|
||||
failures.push(format!(
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
use super::expect::ExpectBlock;
|
||||
use super::expect::Expect;
|
||||
use crate::database::Database;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TestCase {
|
||||
pub struct Case {
|
||||
pub description: String,
|
||||
|
||||
#[serde(default = "default_action")]
|
||||
@ -16,9 +16,6 @@ pub struct TestCase {
|
||||
pub schema_id: String,
|
||||
|
||||
// For Query
|
||||
#[serde(default)]
|
||||
pub stem: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub filters: Option<serde_json::Value>,
|
||||
|
||||
@ -30,24 +27,29 @@ pub struct TestCase {
|
||||
#[serde(default)]
|
||||
pub mocks: Option<serde_json::Value>,
|
||||
|
||||
pub expect: Option<ExpectBlock>,
|
||||
pub expect: Option<Expect>,
|
||||
}
|
||||
|
||||
fn default_action() -> String {
|
||||
"validate".to_string()
|
||||
}
|
||||
|
||||
impl TestCase {
|
||||
pub fn execute(&self, db: Arc<Database>) -> Result<(), String> {
|
||||
match self.action.as_str() {
|
||||
"validate" => self.run_validate(db),
|
||||
"merge" => self.run_merge(db),
|
||||
"query" => self.run_query(db),
|
||||
_ => Err(format!(
|
||||
"Unknown action '{}' for test '{}'",
|
||||
self.action, self.description
|
||||
)),
|
||||
}
|
||||
impl Case {
|
||||
pub fn run_compile(
|
||||
&self,
|
||||
db_res: &Result<Arc<Database>, crate::drop::Drop>,
|
||||
) -> Result<(), String> {
|
||||
let expect = match &self.expect {
|
||||
Some(e) => e,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
let result = match db_res {
|
||||
Ok(_) => crate::drop::Drop::success(),
|
||||
Err(d) => d.clone(),
|
||||
};
|
||||
|
||||
expect.assert_drop(&result)
|
||||
}
|
||||
|
||||
pub fn run_validate(&self, db: Arc<Database>) -> Result<(), String> {
|
||||
@ -55,8 +57,6 @@ impl TestCase {
|
||||
|
||||
let validator = Validator::new(db);
|
||||
|
||||
let expected_success = self.expect.as_ref().map(|e| e.success).unwrap_or(false);
|
||||
|
||||
let schema_id = &self.schema_id;
|
||||
if !validator.db.schemas.contains_key(schema_id) {
|
||||
return Err(format!(
|
||||
@ -68,19 +68,8 @@ impl TestCase {
|
||||
let test_data = self.data.clone().unwrap_or(Value::Null);
|
||||
let result = validator.validate(schema_id, &test_data);
|
||||
|
||||
let got_valid = result.errors.is_empty();
|
||||
|
||||
if got_valid != expected_success {
|
||||
let error_msg = if result.errors.is_empty() {
|
||||
"None".to_string()
|
||||
} else {
|
||||
format!("{:?}", result.errors)
|
||||
};
|
||||
|
||||
return Err(format!(
|
||||
"Expected: {}, Got: {}. Errors: {}",
|
||||
expected_success, got_valid, error_msg
|
||||
));
|
||||
if let Some(expect) = &self.expect {
|
||||
expect.assert_drop(&result)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -97,25 +86,18 @@ impl TestCase {
|
||||
let merger = Merger::new(db.clone());
|
||||
|
||||
let test_data = self.data.clone().unwrap_or(Value::Null);
|
||||
let result = merger.merge(test_data);
|
||||
let result = merger.merge(&self.schema_id, test_data);
|
||||
|
||||
let expected_success = self.expect.as_ref().map(|e| e.success).unwrap_or(false);
|
||||
let got_success = result.errors.is_empty();
|
||||
|
||||
let error_msg = if result.errors.is_empty() {
|
||||
"None".to_string()
|
||||
} else {
|
||||
format!("{:?}", result.errors)
|
||||
};
|
||||
|
||||
let return_val = if expected_success != got_success {
|
||||
Err(format!(
|
||||
"Merge Expected: {}, Got: {}. Errors: {}",
|
||||
expected_success, got_success, error_msg
|
||||
))
|
||||
} else if let Some(expect) = &self.expect {
|
||||
let queries = db.executor.get_queries();
|
||||
expect.assert_sql(&queries)
|
||||
let return_val = if let Some(expect) = &self.expect {
|
||||
if let Err(e) = expect.assert_drop(&result) {
|
||||
Err(format!("Merge {}", e))
|
||||
} else if result.errors.is_empty() {
|
||||
// Only assert SQL if merge succeeded
|
||||
let queries = db.executor.get_queries();
|
||||
expect.assert_pattern(&queries).and_then(|_| expect.assert_sql(&queries))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
};
|
||||
@ -134,26 +116,17 @@ impl TestCase {
|
||||
use crate::queryer::Queryer;
|
||||
let queryer = Queryer::new(db.clone());
|
||||
|
||||
let stem_opt = self.stem.as_deref();
|
||||
let result = queryer.query(&self.schema_id, stem_opt, self.filters.as_ref());
|
||||
let result = queryer.query(&self.schema_id, self.filters.as_ref());
|
||||
|
||||
let expected_success = self.expect.as_ref().map(|e| e.success).unwrap_or(false);
|
||||
let got_success = result.errors.is_empty();
|
||||
|
||||
let error_msg = if result.errors.is_empty() {
|
||||
"None".to_string()
|
||||
} else {
|
||||
format!("{:?}", result.errors)
|
||||
};
|
||||
|
||||
let return_val = if expected_success != got_success {
|
||||
Err(format!(
|
||||
"Query Expected: {}, Got: {}. Errors: {}",
|
||||
expected_success, got_success, error_msg
|
||||
))
|
||||
} else if let Some(expect) = &self.expect {
|
||||
let queries = db.executor.get_queries();
|
||||
expect.assert_sql(&queries)
|
||||
let return_val = if let Some(expect) = &self.expect {
|
||||
if let Err(e) = expect.assert_drop(&result) {
|
||||
Err(format!("Query {}", e))
|
||||
} else if result.errors.is_empty() {
|
||||
let queries = db.executor.get_queries();
|
||||
expect.assert_pattern(&queries).and_then(|_| expect.assert_sql(&queries))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
};
|
||||
|
||||
88
src/tests/types/expect/drop.rs
Normal file
88
src/tests/types/expect/drop.rs
Normal file
@ -0,0 +1,88 @@
|
||||
use super::Expect;
|
||||
|
||||
impl Expect {
|
||||
pub fn assert_drop(&self, drop: &crate::drop::Drop) -> Result<(), String> {
|
||||
let got_success = drop.errors.is_empty();
|
||||
|
||||
if self.success != got_success {
|
||||
let mut err_msg = format!("Expected success: {}, Got: {}.", self.success, got_success);
|
||||
if !drop.errors.is_empty() {
|
||||
err_msg.push_str(&format!(" Actual Errors: {:?}", drop.errors));
|
||||
}
|
||||
return Err(err_msg);
|
||||
}
|
||||
|
||||
if !self.success {
|
||||
if let Some(expected_errors) = &self.errors {
|
||||
let actual_values: Vec<serde_json::Value> = drop.errors
|
||||
.iter()
|
||||
.map(|e| serde_json::to_value(e).unwrap())
|
||||
.collect();
|
||||
|
||||
if expected_errors.len() != actual_values.len() {
|
||||
return Err(format!(
|
||||
"Expected {} errors, but got {}.\nExpected subset: {:?}\nActual full errors: {:?}",
|
||||
expected_errors.len(),
|
||||
actual_values.len(),
|
||||
expected_errors,
|
||||
drop.errors
|
||||
));
|
||||
}
|
||||
|
||||
for (i, expected_val) in expected_errors.iter().enumerate() {
|
||||
let mut matched = false;
|
||||
|
||||
for actual_val in &actual_values {
|
||||
if subset_match(expected_val, actual_val) {
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !matched {
|
||||
return Err(format!(
|
||||
"Expected error {} was not found in actual errors.\nExpected subset: {}\nActual full errors: {:?}",
|
||||
i,
|
||||
serde_json::to_string_pretty(expected_val).unwrap(),
|
||||
drop.errors,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to check if `expected` is a structural subset of `actual`
|
||||
fn subset_match(expected: &serde_json::Value, actual: &serde_json::Value) -> bool {
|
||||
match (expected, actual) {
|
||||
(serde_json::Value::Object(exp_map), serde_json::Value::Object(act_map)) => {
|
||||
for (k, v) in exp_map {
|
||||
if let Some(act_v) = act_map.get(k) {
|
||||
if !subset_match(v, act_v) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
(serde_json::Value::Array(exp_arr), serde_json::Value::Array(act_arr)) => {
|
||||
// Basic check: array sizes and elements must match exactly in order
|
||||
if exp_arr.len() != act_arr.len() {
|
||||
return false;
|
||||
}
|
||||
for (e, a) in exp_arr.iter().zip(act_arr.iter()) {
|
||||
if !subset_match(e, a) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
// For primitives, exact match
|
||||
(e, a) => e == a,
|
||||
}
|
||||
}
|
||||
21
src/tests/types/expect/mod.rs
Normal file
21
src/tests/types/expect/mod.rs
Normal file
@ -0,0 +1,21 @@
|
||||
pub mod pattern;
|
||||
pub mod sql;
|
||||
pub mod drop;
|
||||
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum SqlExpectation {
|
||||
Single(String),
|
||||
Multi(Vec<String>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Expect {
|
||||
pub success: bool,
|
||||
pub result: Option<serde_json::Value>,
|
||||
pub errors: Option<Vec<serde_json::Value>>,
|
||||
#[serde(default)]
|
||||
pub sql: Option<Vec<SqlExpectation>>,
|
||||
}
|
||||
@ -1,29 +1,13 @@
|
||||
use super::Expect;
|
||||
use regex::Regex;
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum SqlExpectation {
|
||||
Single(String),
|
||||
Multi(Vec<String>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ExpectBlock {
|
||||
pub success: bool,
|
||||
pub result: Option<serde_json::Value>,
|
||||
pub errors: Option<Vec<serde_json::Value>>,
|
||||
#[serde(default)]
|
||||
pub sql: Option<Vec<SqlExpectation>>,
|
||||
}
|
||||
|
||||
impl ExpectBlock {
|
||||
impl Expect {
|
||||
/// Advanced SQL execution assertion algorithm ported from `assert.go`.
|
||||
/// This compares two arrays of strings, one containing {{uuid:name}} or {{timestamp}} placeholders,
|
||||
/// and the other containing actual executed database queries. It ensures that placeholder UUIDs
|
||||
/// are consistently mapped to the same actual UUIDs across all lines, and strictly validates line-by-line sequences.
|
||||
pub fn assert_sql(&self, actual: &[String]) -> Result<(), String> {
|
||||
pub fn assert_pattern(&self, actual: &[String]) -> Result<(), String> {
|
||||
let patterns = match &self.sql {
|
||||
Some(s) => s,
|
||||
None => return Ok(()),
|
||||
@ -75,8 +59,8 @@ impl ExpectBlock {
|
||||
let aline = clean_str(aline_raw);
|
||||
|
||||
let pattern_str_raw = match pattern_expect {
|
||||
SqlExpectation::Single(s) => s.clone(),
|
||||
SqlExpectation::Multi(m) => m.join(" "),
|
||||
super::SqlExpectation::Single(s) => s.clone(),
|
||||
super::SqlExpectation::Multi(m) => m.join(" "),
|
||||
};
|
||||
|
||||
let pattern_str = clean_str(&pattern_str_raw);
|
||||
206
src/tests/types/expect/sql.rs
Normal file
206
src/tests/types/expect/sql.rs
Normal file
@ -0,0 +1,206 @@
|
||||
use super::Expect;
|
||||
use sqlparser::ast::{Expr, Query, SelectItem, Statement, TableFactor};
|
||||
use sqlparser::dialect::PostgreSqlDialect;
|
||||
use sqlparser::parser::Parser;
|
||||
use std::collections::HashSet;
|
||||
|
||||
impl Expect {
|
||||
pub fn assert_sql(&self, actual: &[String]) -> Result<(), String> {
|
||||
for query in actual {
|
||||
if let Err(e) = Self::validate_semantic_sql(query) {
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn validate_semantic_sql(sql: &str) -> Result<(), String> {
|
||||
let dialect = PostgreSqlDialect {};
|
||||
let statements = match Parser::parse_sql(&dialect, sql) {
|
||||
Ok(s) => s,
|
||||
Err(e) => return Err(format!("SQL Syntax Error: {}\nSQL: {}", e, sql)),
|
||||
};
|
||||
|
||||
for statement in statements {
|
||||
Self::validate_statement(&statement, sql)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn validate_statement(stmt: &Statement, original_sql: &str) -> Result<(), String> {
|
||||
match stmt {
|
||||
Statement::Query(query) => Self::validate_query(query, &HashSet::new(), original_sql)?,
|
||||
Statement::Insert(insert) => {
|
||||
if let Some(query) = &insert.source {
|
||||
Self::validate_query(query, &HashSet::new(), original_sql)?
|
||||
}
|
||||
}
|
||||
Statement::Update(update) => {
|
||||
if let Some(expr) = &update.selection {
|
||||
Self::validate_expr(expr, &HashSet::new(), original_sql)?;
|
||||
}
|
||||
}
|
||||
Statement::Delete(delete) => {
|
||||
if let Some(expr) = &delete.selection {
|
||||
Self::validate_expr(expr, &HashSet::new(), original_sql)?;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn validate_query(
|
||||
query: &Query,
|
||||
available_aliases: &HashSet<String>,
|
||||
original_sql: &str,
|
||||
) -> Result<(), String> {
|
||||
if let sqlparser::ast::SetExpr::Select(select) = &*query.body {
|
||||
Self::validate_select(&select, available_aliases, original_sql)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn validate_select(
|
||||
select: &sqlparser::ast::Select,
|
||||
parent_aliases: &HashSet<String>,
|
||||
original_sql: &str,
|
||||
) -> Result<(), String> {
|
||||
let mut available_aliases = parent_aliases.clone();
|
||||
|
||||
// 1. Collect all declared table aliases in the FROM clause and JOINs
|
||||
for table_with_joins in &select.from {
|
||||
Self::collect_aliases_from_table_factor(&table_with_joins.relation, &mut available_aliases);
|
||||
for join in &table_with_joins.joins {
|
||||
Self::collect_aliases_from_table_factor(&join.relation, &mut available_aliases);
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Validate all SELECT projection fields
|
||||
for projection in &select.projection {
|
||||
if let SelectItem::UnnamedExpr(expr) | SelectItem::ExprWithAlias { expr, .. } = projection {
|
||||
Self::validate_expr(expr, &available_aliases, original_sql)?;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Validate ON conditions in joins
|
||||
for table_with_joins in &select.from {
|
||||
for join in &table_with_joins.joins {
|
||||
if let sqlparser::ast::JoinOperator::Inner(sqlparser::ast::JoinConstraint::On(expr))
|
||||
| sqlparser::ast::JoinOperator::LeftOuter(sqlparser::ast::JoinConstraint::On(expr))
|
||||
| sqlparser::ast::JoinOperator::RightOuter(sqlparser::ast::JoinConstraint::On(expr))
|
||||
| sqlparser::ast::JoinOperator::FullOuter(sqlparser::ast::JoinConstraint::On(expr))
|
||||
| sqlparser::ast::JoinOperator::Join(sqlparser::ast::JoinConstraint::On(expr)) =
|
||||
&join.join_operator
|
||||
{
|
||||
Self::validate_expr(expr, &available_aliases, original_sql)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Validate WHERE conditions
|
||||
if let Some(selection) = &select.selection {
|
||||
Self::validate_expr(selection, &available_aliases, original_sql)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn collect_aliases_from_table_factor(tf: &TableFactor, aliases: &mut HashSet<String>) {
|
||||
match tf {
|
||||
TableFactor::Table { name, alias, .. } => {
|
||||
if let Some(table_alias) = alias {
|
||||
aliases.insert(table_alias.name.value.clone());
|
||||
} else if let Some(last) = name.0.last() {
|
||||
match last {
|
||||
sqlparser::ast::ObjectNamePart::Identifier(i) => {
|
||||
aliases.insert(i.value.clone());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
TableFactor::Derived {
|
||||
subquery,
|
||||
alias: Some(table_alias),
|
||||
..
|
||||
} => {
|
||||
aliases.insert(table_alias.name.value.clone());
|
||||
// A derived table is technically a nested scope which is opaque outside, but for pure semantic checks
|
||||
// its internal contents should be validated purely within its own scope (not leaking external aliases in, usually)
|
||||
// but Postgres allows lateral correlation. We will validate its interior with an empty scope.
|
||||
let _ = Self::validate_query(subquery, &HashSet::new(), "");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_expr(
|
||||
expr: &Expr,
|
||||
available_aliases: &HashSet<String>,
|
||||
sql: &str,
|
||||
) -> Result<(), String> {
|
||||
match expr {
|
||||
Expr::CompoundIdentifier(idents) => {
|
||||
if idents.len() == 2 {
|
||||
let alias = &idents[0].value;
|
||||
if !available_aliases.is_empty() && !available_aliases.contains(alias) {
|
||||
return Err(format!(
|
||||
"Semantic Error: Orchestrated query referenced table alias '{}' but it was not declared in the query's FROM/JOIN clauses.\nAvailable aliases: {:?}\nSQL: {}",
|
||||
alias, available_aliases, sql
|
||||
));
|
||||
}
|
||||
} else if idents.len() > 2 {
|
||||
let alias = &idents[1].value; // In form schema.table.column, 'table' is idents[1]
|
||||
if !available_aliases.is_empty() && !available_aliases.contains(alias) {
|
||||
return Err(format!(
|
||||
"Semantic Error: Orchestrated query referenced table '{}' but it was not mapped.\nAvailable aliases: {:?}\nSQL: {}",
|
||||
alias, available_aliases, sql
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::Subquery(subquery) => Self::validate_query(subquery, available_aliases, sql)?,
|
||||
Expr::Exists { subquery, .. } => Self::validate_query(subquery, available_aliases, sql)?,
|
||||
Expr::InSubquery {
|
||||
expr: e, subquery, ..
|
||||
} => {
|
||||
Self::validate_expr(e, available_aliases, sql)?;
|
||||
Self::validate_query(subquery, available_aliases, sql)?;
|
||||
}
|
||||
Expr::BinaryOp { left, right, .. } => {
|
||||
Self::validate_expr(left, available_aliases, sql)?;
|
||||
Self::validate_expr(right, available_aliases, sql)?;
|
||||
}
|
||||
Expr::IsFalse(e)
|
||||
| Expr::IsNotFalse(e)
|
||||
| Expr::IsTrue(e)
|
||||
| Expr::IsNotTrue(e)
|
||||
| Expr::IsNull(e)
|
||||
| Expr::IsNotNull(e)
|
||||
| Expr::InList { expr: e, .. }
|
||||
| Expr::Nested(e)
|
||||
| Expr::UnaryOp { expr: e, .. }
|
||||
| Expr::Cast { expr: e, .. }
|
||||
| Expr::Like { expr: e, .. }
|
||||
| Expr::ILike { expr: e, .. }
|
||||
| Expr::AnyOp { left: e, .. }
|
||||
| Expr::AllOp { left: e, .. } => {
|
||||
Self::validate_expr(e, available_aliases, sql)?;
|
||||
}
|
||||
Expr::Function(func) => {
|
||||
if let sqlparser::ast::FunctionArguments::List(args) = &func.args {
|
||||
if let Some(sqlparser::ast::FunctionArg::Unnamed(
|
||||
sqlparser::ast::FunctionArgExpr::Expr(e),
|
||||
)) = args.args.get(0)
|
||||
{
|
||||
Self::validate_expr(e, available_aliases, sql)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -2,6 +2,6 @@ pub mod case;
|
||||
pub mod expect;
|
||||
pub mod suite;
|
||||
|
||||
pub use case::TestCase;
|
||||
pub use expect::ExpectBlock;
|
||||
pub use suite::TestSuite;
|
||||
pub use case::Case;
|
||||
pub use expect::Expect;
|
||||
pub use suite::Suite;
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
use super::case::TestCase;
|
||||
use super::case::Case;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TestSuite {
|
||||
pub struct Suite {
|
||||
#[allow(dead_code)]
|
||||
pub description: String,
|
||||
pub database: serde_json::Value,
|
||||
pub tests: Vec<TestCase>,
|
||||
pub tests: Vec<Case>,
|
||||
}
|
||||
|
||||
@ -41,6 +41,14 @@ impl<'a> ValidationContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn join_path(&self, key: &str) -> String {
|
||||
if self.path.is_empty() {
|
||||
key.to_string()
|
||||
} else {
|
||||
format!("{}/{}", self.path, key)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn derive(
|
||||
&self,
|
||||
schema: &'a Schema,
|
||||
|
||||
@ -67,7 +67,12 @@ impl Validator {
|
||||
.map(|e| crate::drop::Error {
|
||||
code: e.code,
|
||||
message: e.message,
|
||||
details: crate::drop::ErrorDetails { path: e.path },
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: e.path,
|
||||
cause: None,
|
||||
context: None,
|
||||
schema: None,
|
||||
},
|
||||
})
|
||||
.collect();
|
||||
crate::drop::Drop::with_errors(errors)
|
||||
@ -76,7 +81,12 @@ impl Validator {
|
||||
Err(e) => crate::drop::Drop::with_errors(vec![crate::drop::Error {
|
||||
code: e.code,
|
||||
message: e.message,
|
||||
details: crate::drop::ErrorDetails { path: e.path },
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: e.path,
|
||||
cause: None,
|
||||
context: None,
|
||||
schema: None,
|
||||
},
|
||||
}]),
|
||||
}
|
||||
} else {
|
||||
@ -84,7 +94,10 @@ impl Validator {
|
||||
code: "SCHEMA_NOT_FOUND".to_string(),
|
||||
message: format!("Schema {} not found", schema_id),
|
||||
details: crate::drop::ErrorDetails {
|
||||
path: "".to_string(),
|
||||
path: "/".to_string(),
|
||||
cause: None,
|
||||
context: None,
|
||||
schema: None,
|
||||
},
|
||||
}])
|
||||
}
|
||||
|
||||
@ -91,12 +91,17 @@ impl<'a> ValidationContext<'a> {
|
||||
if let Some(ref prefix) = self.schema.prefix_items {
|
||||
for (i, sub_schema) in prefix.iter().enumerate() {
|
||||
if i < len {
|
||||
let path = format!("{}/{}", self.path, i);
|
||||
if let Some(child_instance) = arr.get(i) {
|
||||
let mut item_path = self.join_path(&i.to_string());
|
||||
if let Some(obj) = child_instance.as_object() {
|
||||
if let Some(id_str) = obj.get("id").and_then(|v| v.as_str()) {
|
||||
item_path = self.join_path(id_str);
|
||||
}
|
||||
}
|
||||
let derived = self.derive(
|
||||
sub_schema,
|
||||
child_instance,
|
||||
&path,
|
||||
&item_path,
|
||||
HashSet::new(),
|
||||
self.extensible,
|
||||
false,
|
||||
@ -112,12 +117,17 @@ impl<'a> ValidationContext<'a> {
|
||||
|
||||
if let Some(ref items_schema) = self.schema.items {
|
||||
for i in validation_index..len {
|
||||
let path = format!("{}/{}", self.path, i);
|
||||
if let Some(child_instance) = arr.get(i) {
|
||||
let mut item_path = self.join_path(&i.to_string());
|
||||
if let Some(obj) = child_instance.as_object() {
|
||||
if let Some(id_str) = obj.get("id").and_then(|v| v.as_str()) {
|
||||
item_path = self.join_path(id_str);
|
||||
}
|
||||
}
|
||||
let derived = self.derive(
|
||||
items_schema,
|
||||
child_instance,
|
||||
&path,
|
||||
&item_path,
|
||||
HashSet::new(),
|
||||
self.extensible,
|
||||
false,
|
||||
|
||||
45
src/validator/rules/cases.rs
Normal file
45
src/validator/rules/cases.rs
Normal file
@ -0,0 +1,45 @@
|
||||
use crate::validator::context::ValidationContext;
|
||||
use crate::validator::error::ValidationError;
|
||||
use crate::validator::result::ValidationResult;
|
||||
|
||||
impl<'a> ValidationContext<'a> {
|
||||
pub(crate) fn validate_cases(
|
||||
&self,
|
||||
result: &mut ValidationResult,
|
||||
) -> Result<bool, ValidationError> {
|
||||
if let Some(cases) = &self.schema.cases {
|
||||
for case in cases {
|
||||
if let Some(ref when_schema) = case.when {
|
||||
let derived_when = self.derive_for_schema(when_schema, true);
|
||||
let when_res = derived_when.validate()?;
|
||||
|
||||
// Evaluates all cases independently.
|
||||
if when_res.is_valid() {
|
||||
result
|
||||
.evaluated_keys
|
||||
.extend(when_res.evaluated_keys.clone());
|
||||
result
|
||||
.evaluated_indices
|
||||
.extend(when_res.evaluated_indices.clone());
|
||||
|
||||
if let Some(ref then_schema) = case.then {
|
||||
let derived_then = self.derive_for_schema(then_schema, true);
|
||||
result.merge(derived_then.validate()?);
|
||||
}
|
||||
} else {
|
||||
if let Some(ref else_schema) = case.else_ {
|
||||
let derived_else = self.derive_for_schema(else_schema, true);
|
||||
result.merge(derived_else.validate()?);
|
||||
}
|
||||
}
|
||||
} else if let Some(ref else_schema) = case.else_ {
|
||||
// A rule with a missing `when` fires the `else` indiscriminately
|
||||
let derived_else = self.derive_for_schema(else_schema, true);
|
||||
result.merge(derived_else.validate()?);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
@ -1,92 +0,0 @@
|
||||
use crate::validator::context::ValidationContext;
|
||||
use crate::validator::error::ValidationError;
|
||||
use crate::validator::result::ValidationResult;
|
||||
|
||||
impl<'a> ValidationContext<'a> {
|
||||
pub(crate) fn validate_combinators(
|
||||
&self,
|
||||
result: &mut ValidationResult,
|
||||
) -> Result<bool, ValidationError> {
|
||||
if let Some(ref all_of) = self.schema.all_of {
|
||||
for sub in all_of {
|
||||
let derived = self.derive_for_schema(sub, true);
|
||||
let res = derived.validate()?;
|
||||
result.merge(res);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref one_of) = self.schema.one_of {
|
||||
let mut passed_candidates: Vec<(Option<String>, usize, ValidationResult)> = Vec::new();
|
||||
|
||||
for sub in one_of {
|
||||
let derived = self.derive_for_schema(sub, true);
|
||||
let sub_res = derived.validate()?;
|
||||
if sub_res.is_valid() {
|
||||
let child_id = sub.id.clone();
|
||||
let depth = child_id
|
||||
.as_ref()
|
||||
.and_then(|id| self.db.depths.get(id).copied())
|
||||
.unwrap_or(0);
|
||||
passed_candidates.push((child_id, depth, sub_res));
|
||||
}
|
||||
}
|
||||
|
||||
if passed_candidates.len() == 1 {
|
||||
result.merge(passed_candidates.pop().unwrap().2);
|
||||
} else if passed_candidates.is_empty() {
|
||||
result.errors.push(ValidationError {
|
||||
code: "NO_ONEOF_MATCH".to_string(),
|
||||
message: "Matches none of oneOf schemas".to_string(),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
} else {
|
||||
// Apply depth heuristic tie-breaker
|
||||
let mut best_depth: Option<usize> = None;
|
||||
let mut ambiguous = false;
|
||||
let mut best_res = None;
|
||||
|
||||
for (_, depth, res) in passed_candidates.into_iter() {
|
||||
if let Some(current_best) = best_depth {
|
||||
if depth > current_best {
|
||||
best_depth = Some(depth);
|
||||
best_res = Some(res);
|
||||
ambiguous = false;
|
||||
} else if depth == current_best {
|
||||
ambiguous = true;
|
||||
}
|
||||
} else {
|
||||
best_depth = Some(depth);
|
||||
best_res = Some(res);
|
||||
}
|
||||
}
|
||||
|
||||
if !ambiguous {
|
||||
if let Some(res) = best_res {
|
||||
result.merge(res);
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
result.errors.push(ValidationError {
|
||||
code: "AMBIGUOUS_ONEOF_MATCH".to_string(),
|
||||
message: "Matches multiple oneOf schemas without a clear depth winner".to_string(),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref not_schema) = self.schema.not {
|
||||
let derived = self.derive_for_schema(not_schema, true);
|
||||
let sub_res = derived.validate()?;
|
||||
if sub_res.is_valid() {
|
||||
result.errors.push(ValidationError {
|
||||
code: "NOT_VIOLATED".to_string(),
|
||||
message: "Matched 'not' schema".to_string(),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
@ -3,30 +3,14 @@ use crate::validator::error::ValidationError;
|
||||
use crate::validator::result::ValidationResult;
|
||||
|
||||
impl<'a> ValidationContext<'a> {
|
||||
pub(crate) fn validate_conditionals(
|
||||
&self,
|
||||
result: &mut ValidationResult,
|
||||
) -> Result<bool, ValidationError> {
|
||||
if let Some(ref if_schema) = self.schema.if_ {
|
||||
let derived_if = self.derive_for_schema(if_schema, true);
|
||||
let if_res = derived_if.validate()?;
|
||||
|
||||
result.evaluated_keys.extend(if_res.evaluated_keys.clone());
|
||||
result
|
||||
.evaluated_indices
|
||||
.extend(if_res.evaluated_indices.clone());
|
||||
|
||||
if if_res.is_valid() {
|
||||
if let Some(ref then_schema) = self.schema.then_ {
|
||||
let derived_then = self.derive_for_schema(then_schema, true);
|
||||
result.merge(derived_then.validate()?);
|
||||
}
|
||||
} else if let Some(ref else_schema) = self.schema.else_ {
|
||||
let derived_else = self.derive_for_schema(else_schema, true);
|
||||
result.merge(derived_else.validate()?);
|
||||
pub(crate) fn validate_extensible(&self, result: &mut ValidationResult) -> Result<bool, ValidationError> {
|
||||
if self.extensible {
|
||||
if let Some(obj) = self.instance.as_object() {
|
||||
result.evaluated_keys.extend(obj.keys().cloned());
|
||||
} else if let Some(arr) = self.instance.as_array() {
|
||||
result.evaluated_indices.extend(0..arr.len());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
@ -40,11 +24,14 @@ impl<'a> ValidationContext<'a> {
|
||||
|
||||
if let Some(obj) = self.instance.as_object() {
|
||||
for key in obj.keys() {
|
||||
if key == "type" || key == "kind" {
|
||||
continue; // Reserved keywords implicitly allowed
|
||||
}
|
||||
if !result.evaluated_keys.contains(key) && !self.overrides.contains(key) {
|
||||
result.errors.push(ValidationError {
|
||||
code: "STRICT_PROPERTY_VIOLATION".to_string(),
|
||||
message: format!("Unexpected property '{}'", key),
|
||||
path: format!("{}/{}", self.path, key),
|
||||
path: self.join_path(key),
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -53,10 +40,18 @@ impl<'a> ValidationContext<'a> {
|
||||
if let Some(arr) = self.instance.as_array() {
|
||||
for i in 0..arr.len() {
|
||||
if !result.evaluated_indices.contains(&i) {
|
||||
let mut item_path = self.join_path(&i.to_string());
|
||||
if let Some(child_instance) = arr.get(i) {
|
||||
if let Some(obj) = child_instance.as_object() {
|
||||
if let Some(id_str) = obj.get("id").and_then(|v| v.as_str()) {
|
||||
item_path = self.join_path(id_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
result.errors.push(ValidationError {
|
||||
code: "STRICT_ITEM_VIOLATION".to_string(),
|
||||
message: format!("Unexpected item at index {}", i),
|
||||
path: format!("{}/{}", self.path, i),
|
||||
path: item_path,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -8,7 +8,7 @@ impl<'a> ValidationContext<'a> {
|
||||
result: &mut ValidationResult,
|
||||
) -> Result<bool, ValidationError> {
|
||||
let current = self.instance;
|
||||
if let Some(ref compiled_fmt) = self.schema.compiled_format {
|
||||
if let Some(compiled_fmt) = self.schema.compiled_format.get() {
|
||||
match compiled_fmt {
|
||||
crate::database::schema::CompiledFormat::Func(f) => {
|
||||
let should = if let Some(s) = current.as_str() {
|
||||
|
||||
@ -3,10 +3,11 @@ use crate::validator::error::ValidationError;
|
||||
use crate::validator::result::ValidationResult;
|
||||
|
||||
pub mod array;
|
||||
pub mod combinators;
|
||||
pub mod conditionals;
|
||||
pub mod cases;
|
||||
pub mod core;
|
||||
pub mod extensible;
|
||||
pub mod format;
|
||||
pub mod not;
|
||||
pub mod numeric;
|
||||
pub mod object;
|
||||
pub mod polymorphism;
|
||||
@ -27,7 +28,7 @@ impl<'a> ValidationContext<'a> {
|
||||
if !self.validate_family(&mut result)? {
|
||||
return Ok(result);
|
||||
}
|
||||
if !self.validate_refs(&mut result)? {
|
||||
if !self.validate_type_inheritance(&mut result)? {
|
||||
return Ok(result);
|
||||
}
|
||||
|
||||
@ -42,8 +43,11 @@ impl<'a> ValidationContext<'a> {
|
||||
self.validate_array(&mut result)?;
|
||||
|
||||
// Multipliers & Conditionals
|
||||
self.validate_combinators(&mut result)?;
|
||||
self.validate_conditionals(&mut result)?;
|
||||
if !self.validate_one_of(&mut result)? {
|
||||
return Ok(result);
|
||||
}
|
||||
self.validate_not(&mut result)?;
|
||||
self.validate_cases(&mut result)?;
|
||||
|
||||
// State Tracking
|
||||
self.validate_extensible(&mut result)?;
|
||||
@ -77,15 +81,4 @@ impl<'a> ValidationContext<'a> {
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_extensible(&self, result: &mut ValidationResult) -> Result<bool, ValidationError> {
|
||||
if self.extensible {
|
||||
if let Some(obj) = self.instance.as_object() {
|
||||
result.evaluated_keys.extend(obj.keys().cloned());
|
||||
} else if let Some(arr) = self.instance.as_array() {
|
||||
result.evaluated_indices.extend(0..arr.len());
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
|
||||
24
src/validator/rules/not.rs
Normal file
24
src/validator/rules/not.rs
Normal file
@ -0,0 +1,24 @@
|
||||
use crate::validator::context::ValidationContext;
|
||||
use crate::validator::error::ValidationError;
|
||||
use crate::validator::result::ValidationResult;
|
||||
|
||||
impl<'a> ValidationContext<'a> {
|
||||
pub(crate) fn validate_not(
|
||||
&self,
|
||||
result: &mut ValidationResult,
|
||||
) -> Result<bool, ValidationError> {
|
||||
if let Some(ref not_schema) = self.schema.not {
|
||||
let derived = self.derive_for_schema(not_schema, true);
|
||||
let sub_res = derived.validate()?;
|
||||
if sub_res.is_valid() {
|
||||
result.errors.push(ValidationError {
|
||||
code: "NOT_VIOLATED".to_string(),
|
||||
message: "Matched 'not' schema".to_string(),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
@ -13,28 +13,73 @@ impl<'a> ValidationContext<'a> {
|
||||
) -> Result<bool, ValidationError> {
|
||||
let current = self.instance;
|
||||
if let Some(obj) = current.as_object() {
|
||||
// Entity Bound Implicit Type Validation
|
||||
if let Some(lookup_key) = self.schema.id.as_ref().or(self.schema.r#ref.as_ref()) {
|
||||
let base_type_name = lookup_key.split('.').next_back().unwrap_or("").to_string();
|
||||
if let Some(type_def) = self.db.types.get(&base_type_name)
|
||||
&& let Some(type_val) = obj.get("type")
|
||||
&& let Some(type_str) = type_val.as_str()
|
||||
{
|
||||
if type_def.variations.contains(type_str) {
|
||||
// Ensure it passes strict mode
|
||||
result.evaluated_keys.insert("type".to_string());
|
||||
// Entity implicit type validation
|
||||
if let Some(schema_identifier) = self.schema.identifier() {
|
||||
// We decompose identity string routing inherently
|
||||
let expected_type = schema_identifier.split('.').last().unwrap_or(&schema_identifier);
|
||||
|
||||
// Check if the identifier represents a registered global database entity boundary mathematically
|
||||
if let Some(type_def) = self.db.types.get(expected_type) {
|
||||
if let Some(type_val) = obj.get("type") {
|
||||
if let Some(type_str) = type_val.as_str() {
|
||||
if type_def.variations.contains(type_str) {
|
||||
// The instance is validly declaring a known structural descent
|
||||
result.evaluated_keys.insert("type".to_string());
|
||||
} else {
|
||||
result.errors.push(ValidationError {
|
||||
code: "CONST_VIOLATED".to_string(), // Aligning with original const override errors natively
|
||||
message: format!(
|
||||
"Type '{}' is not a valid descendant for this entity bound schema",
|
||||
type_str
|
||||
),
|
||||
path: self.join_path("type"),
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Because it's a global entity target, the payload must structurally provide a discriminator natively
|
||||
result.errors.push(ValidationError {
|
||||
code: "CONST_VIOLATED".to_string(), // Aligning with original const override errors
|
||||
message: format!(
|
||||
"Type '{}' is not a valid descendant for this entity bound schema",
|
||||
type_str
|
||||
),
|
||||
path: format!("{}/type", self.path),
|
||||
code: "MISSING_TYPE".to_string(),
|
||||
message: format!("Schema mechanically requires type discrimination '{}'", expected_type),
|
||||
path: self.path.clone(), // Empty boundary
|
||||
});
|
||||
}
|
||||
|
||||
// If the target mathematically declares a horizontal structural STI variation natively
|
||||
if schema_identifier.contains('.') {
|
||||
if obj.get("kind").is_none() {
|
||||
result.errors.push(ValidationError {
|
||||
code: "MISSING_KIND".to_string(),
|
||||
message: "Schema mechanically requires horizontal kind discrimination".to_string(),
|
||||
path: self.path.clone(),
|
||||
});
|
||||
} else {
|
||||
result.evaluated_keys.insert("kind".to_string());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If it isn't registered globally, it might be a nested Ad-Hoc candidate running via O(1) union routers.
|
||||
// Because they lack manual type property descriptors, we natively shield "type" and "kind" keys from
|
||||
// triggering additionalProperty violations natively IF they precisely correspond to their fast-path boundaries
|
||||
if let Some(type_val) = obj.get("type") {
|
||||
if let Some(type_str) = type_val.as_str() {
|
||||
if type_str == expected_type {
|
||||
result.evaluated_keys.insert("type".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(kind_val) = obj.get("kind") {
|
||||
if let Some((kind_str, _)) = schema_identifier.rsplit_once('.') {
|
||||
if let Some(actual_kind) = kind_val.as_str() {
|
||||
if actual_kind == kind_str {
|
||||
result.evaluated_keys.insert("kind".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(min) = self.schema.min_properties
|
||||
&& (obj.len() as f64) < min
|
||||
{
|
||||
@ -44,6 +89,7 @@ impl<'a> ValidationContext<'a> {
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(max) = self.schema.max_properties
|
||||
&& (obj.len() as f64) > max
|
||||
{
|
||||
@ -53,14 +99,23 @@ impl<'a> ValidationContext<'a> {
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(ref req) = self.schema.required {
|
||||
for field in req {
|
||||
if !obj.contains_key(field) {
|
||||
result.errors.push(ValidationError {
|
||||
code: "REQUIRED_FIELD_MISSING".to_string(),
|
||||
message: format!("Missing {}", field),
|
||||
path: format!("{}/{}", self.path, field),
|
||||
});
|
||||
if field == "type" {
|
||||
result.errors.push(ValidationError {
|
||||
code: "MISSING_TYPE".to_string(),
|
||||
message: "Missing type discriminator".to_string(),
|
||||
path: self.join_path(field),
|
||||
});
|
||||
} else {
|
||||
result.errors.push(ValidationError {
|
||||
code: "REQUIRED_FIELD_MISSING".to_string(),
|
||||
message: format!("Missing {}", field),
|
||||
path: self.join_path(field),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -98,8 +153,11 @@ impl<'a> ValidationContext<'a> {
|
||||
}
|
||||
|
||||
if let Some(child_instance) = obj.get(key) {
|
||||
let new_path = format!("{}/{}", self.path, key);
|
||||
let is_ref = sub_schema.r#ref.is_some();
|
||||
let new_path = self.join_path(key);
|
||||
let is_ref = match &sub_schema.type_ {
|
||||
Some(crate::database::schema::SchemaTypeOrArray::Single(t)) => !crate::database::schema::is_primitive_type(t),
|
||||
_ => false,
|
||||
};
|
||||
let next_extensible = if is_ref { false } else { self.extensible };
|
||||
|
||||
let derived = self.derive(
|
||||
@ -110,22 +168,9 @@ impl<'a> ValidationContext<'a> {
|
||||
next_extensible,
|
||||
false,
|
||||
);
|
||||
let mut item_res = derived.validate()?;
|
||||
let item_res = derived.validate()?;
|
||||
|
||||
|
||||
// Entity Bound Implicit Type Interception
|
||||
if key == "type"
|
||||
&& let Some(lookup_key) = sub_schema.id.as_ref().or(sub_schema.r#ref.as_ref())
|
||||
{
|
||||
let base_type_name = lookup_key.split('.').next_back().unwrap_or("").to_string();
|
||||
if let Some(type_def) = self.db.types.get(&base_type_name)
|
||||
&& let Some(instance_type) = child_instance.as_str()
|
||||
&& type_def.variations.contains(instance_type)
|
||||
{
|
||||
item_res
|
||||
.errors
|
||||
.retain(|e| e.code != "CONST_VIOLATED" && e.code != "ENUM_VIOLATED");
|
||||
}
|
||||
}
|
||||
|
||||
result.merge(item_res);
|
||||
result.evaluated_keys.insert(key.to_string());
|
||||
@ -133,12 +178,15 @@ impl<'a> ValidationContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref compiled_pp) = self.schema.compiled_pattern_properties {
|
||||
if let Some(compiled_pp) = self.schema.compiled_pattern_properties.get() {
|
||||
for (compiled_re, sub_schema) in compiled_pp {
|
||||
for (key, child_instance) in obj {
|
||||
if compiled_re.0.is_match(key) {
|
||||
let new_path = format!("{}/{}", self.path, key);
|
||||
let is_ref = sub_schema.r#ref.is_some();
|
||||
let new_path = self.join_path(key);
|
||||
let is_ref = match &sub_schema.type_ {
|
||||
Some(crate::database::schema::SchemaTypeOrArray::Single(t)) => !crate::database::schema::is_primitive_type(t),
|
||||
_ => false,
|
||||
};
|
||||
let next_extensible = if is_ref { false } else { self.extensible };
|
||||
|
||||
let derived = self.derive(
|
||||
@ -165,7 +213,7 @@ impl<'a> ValidationContext<'a> {
|
||||
{
|
||||
locally_matched = true;
|
||||
}
|
||||
if !locally_matched && let Some(ref compiled_pp) = self.schema.compiled_pattern_properties
|
||||
if !locally_matched && let Some(compiled_pp) = self.schema.compiled_pattern_properties.get()
|
||||
{
|
||||
for (compiled_re, _) in compiled_pp {
|
||||
if compiled_re.0.is_match(key) {
|
||||
@ -176,8 +224,11 @@ impl<'a> ValidationContext<'a> {
|
||||
}
|
||||
|
||||
if !locally_matched {
|
||||
let new_path = format!("{}/{}", self.path, key);
|
||||
let is_ref = additional_schema.r#ref.is_some();
|
||||
let new_path = self.join_path(key);
|
||||
let is_ref = match &additional_schema.type_ {
|
||||
Some(crate::database::schema::SchemaTypeOrArray::Single(t)) => !crate::database::schema::is_primitive_type(t),
|
||||
_ => false,
|
||||
};
|
||||
let next_extensible = if is_ref { false } else { self.extensible };
|
||||
|
||||
let derived = self.derive(
|
||||
@ -197,7 +248,7 @@ impl<'a> ValidationContext<'a> {
|
||||
|
||||
if let Some(ref property_names) = self.schema.property_names {
|
||||
for key in obj.keys() {
|
||||
let _new_path = format!("{}/propertyNames/{}", self.path, key);
|
||||
let _new_path = self.join_path(&format!("propertyNames/{}", key));
|
||||
let val_str = Value::String(key.to_string());
|
||||
|
||||
let ctx = ValidationContext::new(
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
use crate::database::schema::Schema;
|
||||
use crate::validator::context::ValidationContext;
|
||||
use crate::validator::error::ValidationError;
|
||||
use crate::validator::result::ValidationResult;
|
||||
@ -13,9 +14,8 @@ impl<'a> ValidationContext<'a> {
|
||||
|| self.schema.required.is_some()
|
||||
|| self.schema.additional_properties.is_some()
|
||||
|| self.schema.items.is_some()
|
||||
|| self.schema.r#ref.is_some()
|
||||
|| self.schema.cases.is_some()
|
||||
|| self.schema.one_of.is_some()
|
||||
|| self.schema.all_of.is_some()
|
||||
|| self.schema.enum_.is_some()
|
||||
|| self.schema.const_.is_some();
|
||||
|
||||
@ -25,91 +25,34 @@ impl<'a> ValidationContext<'a> {
|
||||
message: "$family must be used exclusively without other constraints".to_string(),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
// Short-circuit: the schema formulation is broken
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(family_target) = &self.schema.family {
|
||||
// The descendants map is keyed by the schema's own $id, not the target string.
|
||||
if let Some(schema_id) = &self.schema.id
|
||||
&& let Some(descendants) = self.db.descendants.get(schema_id)
|
||||
{
|
||||
// Validate against all descendants simulating strict oneOf logic
|
||||
let mut passed_candidates: Vec<(String, usize, ValidationResult)> = Vec::new();
|
||||
if let Some(descendants) = self.db.descendants.get(family_target) {
|
||||
let mut candidates = Vec::new();
|
||||
|
||||
// The target itself is also an implicitly valid candidate
|
||||
let mut all_targets = vec![family_target.clone()];
|
||||
all_targets.extend(descendants.clone());
|
||||
// Add the target base schema itself
|
||||
if let Some(base_schema) = self.db.schemas.get(family_target) {
|
||||
candidates.push(base_schema);
|
||||
}
|
||||
|
||||
for child_id in &all_targets {
|
||||
// Add all descendants
|
||||
for child_id in descendants {
|
||||
if let Some(child_schema) = self.db.schemas.get(child_id) {
|
||||
let derived = self.derive(
|
||||
child_schema,
|
||||
self.instance,
|
||||
&self.path,
|
||||
self.overrides.clone(),
|
||||
self.extensible,
|
||||
self.reporter, // Inherit parent reporter flag, do not bypass strictness!
|
||||
);
|
||||
|
||||
// Explicitly run validate_scoped to accurately test candidates with strictness checks enabled
|
||||
let res = derived.validate_scoped()?;
|
||||
|
||||
if res.is_valid() {
|
||||
let depth = self.db.depths.get(child_id).copied().unwrap_or(0);
|
||||
passed_candidates.push((child_id.clone(), depth, res));
|
||||
}
|
||||
candidates.push(child_schema);
|
||||
}
|
||||
}
|
||||
|
||||
if passed_candidates.len() == 1 {
|
||||
result.merge(passed_candidates.pop().unwrap().2);
|
||||
} else if passed_candidates.is_empty() {
|
||||
result.errors.push(ValidationError {
|
||||
code: "NO_FAMILY_MATCH".to_string(),
|
||||
message: format!(
|
||||
"Payload did not match any descendants of family '{}'",
|
||||
family_target
|
||||
),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
} else {
|
||||
// Apply depth heuristic tie-breaker
|
||||
let mut best_depth: Option<usize> = None;
|
||||
let mut ambiguous = false;
|
||||
let mut best_res = None;
|
||||
// Use prefix from family string (e.g. `light.`)
|
||||
let prefix = family_target
|
||||
.rsplit_once('.')
|
||||
.map(|(p, _)| format!("{}.", p))
|
||||
.unwrap_or_default();
|
||||
|
||||
for (_, depth, res) in passed_candidates.into_iter() {
|
||||
if let Some(current_best) = best_depth {
|
||||
if depth > current_best {
|
||||
best_depth = Some(depth);
|
||||
best_res = Some(res);
|
||||
ambiguous = false; // Broke the tie
|
||||
} else if depth == current_best {
|
||||
ambiguous = true; // Tie at the highest level
|
||||
}
|
||||
} else {
|
||||
best_depth = Some(depth);
|
||||
best_res = Some(res);
|
||||
}
|
||||
}
|
||||
|
||||
if !ambiguous {
|
||||
if let Some(res) = best_res {
|
||||
result.merge(res);
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
result.errors.push(ValidationError {
|
||||
code: "AMBIGUOUS_FAMILY_MATCH".to_string(),
|
||||
message: format!(
|
||||
"Payload matched multiple descendants of family '{}' without a clear depth winner",
|
||||
family_target
|
||||
),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
if !self.validate_polymorph(&candidates, Some(&prefix), result)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -117,13 +60,290 @@ impl<'a> ValidationContext<'a> {
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub(crate) fn validate_refs(
|
||||
pub(crate) fn validate_one_of(
|
||||
&self,
|
||||
result: &mut ValidationResult,
|
||||
) -> Result<bool, ValidationError> {
|
||||
// 1. Core $ref logic relies on the fast O(1) map to allow cycles and proper nesting
|
||||
if let Some(ref_str) = &self.schema.r#ref {
|
||||
if let Some(global_schema) = self.db.schemas.get(ref_str) {
|
||||
if let Some(ref one_of) = self.schema.one_of {
|
||||
let mut candidates = Vec::new();
|
||||
for schema in one_of {
|
||||
candidates.push(schema.as_ref());
|
||||
}
|
||||
if !self.validate_polymorph(&candidates, None, result)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub(crate) fn validate_polymorph(
|
||||
&self,
|
||||
candidates: &[&Schema],
|
||||
family_prefix: Option<&str>,
|
||||
result: &mut ValidationResult,
|
||||
) -> Result<bool, ValidationError> {
|
||||
let mut passed_candidates: Vec<(Option<String>, ValidationResult)> = Vec::new();
|
||||
let mut failed_candidates: Vec<ValidationResult> = Vec::new();
|
||||
|
||||
// 1. O(1) Fast-Path Router & Extractor
|
||||
let instance_type = self.instance.as_object().and_then(|o| o.get("type")).and_then(|t| t.as_str());
|
||||
let instance_kind = self.instance.as_object().and_then(|o| o.get("kind")).and_then(|k| k.as_str());
|
||||
|
||||
let mut viable_candidates = Vec::new();
|
||||
|
||||
for sub in candidates {
|
||||
let _child_id = sub.identifier().unwrap_or_default();
|
||||
let mut can_match = true;
|
||||
|
||||
if let Some(t) = instance_type {
|
||||
// Fast Path 1: Pure Ad-Hoc Match (schema identifier == type)
|
||||
// If it matches exactly, it's our golden candidate. Make all others non-viable manually?
|
||||
// Wait, we loop through all and filter down. If exact match is found, we should ideally break and use ONLY that.
|
||||
// Let's implement the logic safely.
|
||||
|
||||
let mut exact_match_found = false;
|
||||
|
||||
if let Some(schema_id) = &sub.id {
|
||||
// Compute Vertical Exact Target (e.g. "person" or "light.person")
|
||||
let exact_target = if let Some(prefix) = family_prefix {
|
||||
format!("{}{}", prefix, t)
|
||||
} else {
|
||||
t.to_string()
|
||||
};
|
||||
|
||||
// Fast Path 1 & 2: Vertical Exact Match
|
||||
if schema_id == &exact_target {
|
||||
if instance_kind.is_none() {
|
||||
exact_match_found = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Fast Path 3: Horizontal Sibling Match (kind + . + type)
|
||||
if let Some(k) = instance_kind {
|
||||
let sibling_target = format!("{}.{}", k, t);
|
||||
if schema_id == &sibling_target {
|
||||
exact_match_found = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if exact_match_found {
|
||||
// We found an exact literal structural identity match!
|
||||
// Wipe the existing viable_candidates and only yield this guy!
|
||||
viable_candidates.clear();
|
||||
viable_candidates.push(*sub);
|
||||
break;
|
||||
}
|
||||
|
||||
// Fast Path 4: Vertical Inheritance Fallback (Physical DB constraint)
|
||||
if let Some(crate::database::schema::SchemaTypeOrArray::Single(t_ptr)) = &sub.type_ {
|
||||
if !crate::database::schema::is_primitive_type(t_ptr) {
|
||||
if let Some(base_type) = t_ptr.split('.').last() {
|
||||
if let Some(type_def) = self.db.types.get(base_type) {
|
||||
if !type_def.variations.contains(&t.to_string()) {
|
||||
can_match = false;
|
||||
}
|
||||
} else {
|
||||
if t_ptr != t {
|
||||
can_match = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fast Path 5: Explicit Schema JSON `const` values check
|
||||
if can_match {
|
||||
if let Some(props) = &sub.properties {
|
||||
if let Some(type_prop) = props.get("type") {
|
||||
if let Some(const_val) = &type_prop.const_ {
|
||||
if let Some(const_str) = const_val.as_str() {
|
||||
if const_str != t {
|
||||
can_match = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if can_match {
|
||||
viable_candidates.push(*sub);
|
||||
}
|
||||
}
|
||||
|
||||
println!("DEBUG VIABLE: {:?}", viable_candidates.iter().map(|s| s.id.clone()).collect::<Vec<_>>());
|
||||
// 2. Evaluate Viable Candidates
|
||||
// 2. Evaluate Viable Candidates
|
||||
// Composition validation is natively handled directly via type compilation.
|
||||
// The deprecated allOf JSON structure is no longer supported nor traversed.
|
||||
for sub in viable_candidates.clone() {
|
||||
let derived = self.derive_for_schema(sub, false);
|
||||
let sub_res = derived.validate()?;
|
||||
if sub_res.is_valid() {
|
||||
passed_candidates.push((sub.id.clone(), sub_res));
|
||||
} else {
|
||||
failed_candidates.push(sub_res);
|
||||
}
|
||||
}
|
||||
for f in &failed_candidates {
|
||||
println!(" - Failed candidate errors: {:?}", f.errors.iter().map(|e| e.code.clone()).collect::<Vec<_>>());
|
||||
}
|
||||
|
||||
if passed_candidates.len() == 1 {
|
||||
result.merge(passed_candidates.pop().unwrap().1);
|
||||
} else if passed_candidates.is_empty() {
|
||||
// 3. Discriminator Pathing (Failure Analytics)
|
||||
let type_path = self.join_path("type");
|
||||
|
||||
if instance_type.is_some() {
|
||||
// Filter to candidates that didn't explicitly throw a CONST violation on `type`
|
||||
let mut genuinely_failed = Vec::new();
|
||||
for res in &failed_candidates {
|
||||
let rejected_type = res.errors.iter().any(|e| {
|
||||
(e.code == "CONST_VIOLATED" || e.code == "ENUM_VIOLATED") && e.path == type_path
|
||||
});
|
||||
if !rejected_type {
|
||||
genuinely_failed.push(res.clone());
|
||||
}
|
||||
}
|
||||
|
||||
println!("DEBUG genuinely_failed len: {}", genuinely_failed.len());
|
||||
|
||||
if genuinely_failed.len() == 1 {
|
||||
// Golden Type Match (1 candidate was structurally possible but failed property validation)
|
||||
let sub_res = genuinely_failed.pop().unwrap();
|
||||
result.errors.extend(sub_res.errors);
|
||||
result.evaluated_keys.extend(sub_res.evaluated_keys);
|
||||
return Ok(false);
|
||||
} else {
|
||||
// Pure Ad-Hoc Union
|
||||
result.errors.push(ValidationError {
|
||||
code: if self.schema.family.is_some() { "NO_FAMILY_MATCH".to_string() } else { "NO_ONEOF_MATCH".to_string() },
|
||||
message: "Payload matches none of the required candidate sub-schemas".to_string(),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
|
||||
for sub_res in &failed_candidates {
|
||||
result.evaluated_keys.extend(sub_res.evaluated_keys.clone());
|
||||
}
|
||||
println!("DEBUG ELSE NO_FAMILY_MATCH RUNNING. Genuinely Failed len: {}", genuinely_failed.len());
|
||||
if viable_candidates.is_empty() {
|
||||
if let Some(obj) = self.instance.as_object() {
|
||||
result.evaluated_keys.extend(obj.keys().cloned());
|
||||
}
|
||||
}
|
||||
for sub_res in genuinely_failed {
|
||||
for e in sub_res.errors {
|
||||
if !result.errors.iter().any(|existing| existing.code == e.code && existing.path == e.path) {
|
||||
result.errors.push(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
return Ok(false);
|
||||
}
|
||||
} else {
|
||||
// Instance missing type
|
||||
// Instance missing type
|
||||
let expects_type = viable_candidates.iter().any(|c| {
|
||||
c.compiled_property_names.get().map_or(false, |props| props.contains(&"type".to_string()))
|
||||
});
|
||||
|
||||
if expects_type {
|
||||
result.errors.push(ValidationError {
|
||||
code: "MISSING_TYPE".to_string(),
|
||||
message: "Missing type discriminator. Unable to resolve polymorphic boundaries".to_string(),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
|
||||
for sub_res in failed_candidates {
|
||||
result.evaluated_keys.extend(sub_res.evaluated_keys);
|
||||
}
|
||||
return Ok(false);
|
||||
} else {
|
||||
// Pure Ad-Hoc Union
|
||||
result.errors.push(ValidationError {
|
||||
code: if self.schema.family.is_some() { "NO_FAMILY_MATCH".to_string() } else { "NO_ONEOF_MATCH".to_string() },
|
||||
message: "Payload matches none of the required candidate sub-schemas".to_string(),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
|
||||
if let Some(first) = failed_candidates.first() {
|
||||
let mut shared_errors = first.errors.clone();
|
||||
for sub_res in failed_candidates.iter().skip(1) {
|
||||
shared_errors.retain(|e1| {
|
||||
sub_res.errors.iter().any(|e2| e1.code == e2.code && e1.path == e2.path)
|
||||
});
|
||||
}
|
||||
for e in shared_errors {
|
||||
if !result.errors.iter().any(|existing| existing.code == e.code && existing.path == e.path) {
|
||||
result.errors.push(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for sub_res in failed_candidates {
|
||||
result.evaluated_keys.extend(sub_res.evaluated_keys);
|
||||
}
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
}
|
||||
} else {
|
||||
result.errors.push(ValidationError {
|
||||
code: "AMBIGUOUS_POLYMORPHIC_MATCH".to_string(),
|
||||
message: "Matches multiple polymorphic candidates inextricably".to_string(),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub(crate) fn validate_type_inheritance(
|
||||
&self,
|
||||
result: &mut ValidationResult,
|
||||
) -> Result<bool, ValidationError> {
|
||||
// Core inheritance logic replaces legacy routing
|
||||
let payload_primitive = match self.instance {
|
||||
serde_json::Value::Null => "null",
|
||||
serde_json::Value::Bool(_) => "boolean",
|
||||
serde_json::Value::Number(n) => {
|
||||
if n.is_i64() || n.is_u64() {
|
||||
"integer"
|
||||
} else {
|
||||
"number"
|
||||
}
|
||||
}
|
||||
serde_json::Value::String(_) => "string",
|
||||
serde_json::Value::Array(_) => "array",
|
||||
serde_json::Value::Object(_) => "object",
|
||||
};
|
||||
|
||||
let mut custom_types = Vec::new();
|
||||
match &self.schema.type_ {
|
||||
Some(crate::database::schema::SchemaTypeOrArray::Single(t)) => {
|
||||
if !crate::database::schema::is_primitive_type(t) {
|
||||
custom_types.push(t.clone());
|
||||
}
|
||||
}
|
||||
Some(crate::database::schema::SchemaTypeOrArray::Multiple(arr)) => {
|
||||
if arr.contains(&payload_primitive.to_string()) || (payload_primitive == "integer" && arr.contains(&"number".to_string())) {
|
||||
// It natively matched a primitive in the array options, skip forcing custom proxy fallback
|
||||
} else {
|
||||
for t in arr {
|
||||
if !crate::database::schema::is_primitive_type(t) {
|
||||
custom_types.push(t.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
for t in custom_types {
|
||||
if let Some(global_schema) = self.db.schemas.get(&t) {
|
||||
let mut new_overrides = self.overrides.clone();
|
||||
if let Some(props) = &self.schema.properties {
|
||||
new_overrides.extend(props.keys().map(|k| k.to_string()));
|
||||
@ -135,16 +355,16 @@ impl<'a> ValidationContext<'a> {
|
||||
&self.path,
|
||||
new_overrides,
|
||||
self.extensible,
|
||||
true,
|
||||
true, // Reporter mode
|
||||
);
|
||||
shadow.root = global_schema;
|
||||
result.merge(shadow.validate()?);
|
||||
} else {
|
||||
result.errors.push(ValidationError {
|
||||
code: "REF_RESOLUTION_FAILED".to_string(),
|
||||
code: "INHERITANCE_RESOLUTION_FAILED".to_string(),
|
||||
message: format!(
|
||||
"Reference pointer to '{}' was not found in schema registry",
|
||||
ref_str
|
||||
"Inherited entity pointer '{}' was not found in schema registry",
|
||||
t
|
||||
),
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
|
||||
@ -28,7 +28,7 @@ impl<'a> ValidationContext<'a> {
|
||||
path: self.path.to_string(),
|
||||
});
|
||||
}
|
||||
if let Some(ref compiled_re) = self.schema.compiled_pattern {
|
||||
if let Some(compiled_re) = self.schema.compiled_pattern.get() {
|
||||
if !compiled_re.0.is_match(s) {
|
||||
result.errors.push(ValidationError {
|
||||
code: "PATTERN_VIOLATED".to_string(),
|
||||
|
||||
Reference in New Issue
Block a user