Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| f9cf1f837a | |||
| 796df7763c | |||
| 4a10833f50 | |||
| 46fc032026 | |||
| 7ec06b81cc | |||
| c4e8e0309f | |||
| eb91b65e65 | |||
| 8bf3649465 |
58
LOOKUP_VERIFICATION.md
Normal file
58
LOOKUP_VERIFICATION.md
Normal file
@ -0,0 +1,58 @@
|
||||
# The Postgres Partial Index Claiming Pattern
|
||||
|
||||
This document outlines the architectural strategy for securely handling the deduplication, claiming, and verification of sensitive unique identifiers (like email addresses or phone numbers) strictly through PostgreSQL without requiring "magical" logic in the JSPG `Merger`.
|
||||
|
||||
## The Denial of Service (DoS) Squatter Problem
|
||||
|
||||
If you enforce a standard `UNIQUE` constraint on an email address table:
|
||||
1. Malicious User A signs up and adds `jeff.bezos@amazon.com` to their account but never verifies it.
|
||||
2. The real Jeff Bezos signs up.
|
||||
3. The Database blocks Jeff because the unique string already exists.
|
||||
|
||||
The squatter has effectively locked the legitimate owner out of the system.
|
||||
|
||||
## The Anti-Patterns
|
||||
|
||||
1. **Global Entity Flags**: Adding a global `verified` boolean to the root `entity` table forces unrelated objects (like Widgets, Invoices, Orders) to carry verification logic that doesn't belong to them.
|
||||
2. **Magical Merger Logic**: Making JSPG's `Merger` aware of a specific `verified` field breaks its pure structural translation model. The Merger shouldn't need hardcoded conditional logic to know if it's allowed to update an unverified row.
|
||||
|
||||
## The Solution: Postgres Partial Unique Indexes
|
||||
|
||||
The holy grail is to defer all claiming logic natively to the database engine using a **Partial Unique Index**.
|
||||
|
||||
```sql
|
||||
-- Remove any existing global unique constraint on address first
|
||||
CREATE UNIQUE INDEX lk_email_address_verified
|
||||
ON email_address (address)
|
||||
WHERE verified_at IS NOT NULL;
|
||||
```
|
||||
|
||||
### How the Lifecycle Works Natively
|
||||
|
||||
1. **Unverified Squatters (Isolated Rows):**
|
||||
A hundred different users can send `{ "address": "jeff.bezos@amazon.com" }` through the `save_person` Punc. Because the Punc isolates them and doesn't allow setting the `verified_at` property natively (enforced by the JSON schema), the JSPG Merger inserts `NULL`.
|
||||
Postgres permits all 100 `INSERT` commands to succeed because the Partial Index **ignores** rows where `verified_at IS NULL`. Every user gets their own isolated, unverified row acting as a placeholder on their contact edge.
|
||||
|
||||
2. **The Verification Race (The Claim):**
|
||||
The real Jeff clicks his magic verification link. The backend securely executes a specific verification Punc that runs:
|
||||
`UPDATE email_address SET verified_at = now() WHERE id = <jeff's-real-uuid>`
|
||||
|
||||
3. **The Lockout:**
|
||||
Because Jeff's row now strictly satisfies `verified_at IS NOT NULL`, that exact row enters the Partial Unique Index.
|
||||
If any of the other 99 squatters *ever* click their fake verification links (or if a new user tries to verify that same email), PostgreSQL hits the index and violently throws a **Unique Constraint Violation**, flawlessly blocking them. The winner has permanently claimed the slot across the entire environment!
|
||||
|
||||
### Periodic Cleanup
|
||||
|
||||
Since unverified rows are allowed to accumulate without colliding, a simple Postgres `pg_cron` job or backend worker can sweep the table nightly to prune abandoned claims and preserve storage:
|
||||
|
||||
```sql
|
||||
DELETE FROM email_address
|
||||
WHERE verified_at IS NULL
|
||||
AND created_at < NOW() - INTERVAL '24 hours';
|
||||
```
|
||||
|
||||
### Why this is the Ultimate Architecture
|
||||
|
||||
* The **JSPG Merger** remains mathematically pure. It doesn't know what `verified_at` is; it simply respects the database's structural limits (`O(1)` pure translation).
|
||||
* **Row-Level Security (RLS)** naturally blocks users from seeing or claiming each other's unverified rows.
|
||||
* You offload complex race-condition tracking entirely to the C-level PostgreSQL B-Tree indexing engine, guaranteeing absolute cluster-wide atomicity.
|
||||
@ -142,7 +142,7 @@
|
||||
"errors": [
|
||||
{
|
||||
"code": "CONST_VIOLATED",
|
||||
"path": "/con"
|
||||
"path": "con"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@ -48,7 +48,7 @@
|
||||
"errors": [
|
||||
{
|
||||
"code": "TYPE_MISMATCH",
|
||||
"path": "/base_prop"
|
||||
"path": "base_prop"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -109,7 +109,7 @@
|
||||
"errors": [
|
||||
{
|
||||
"code": "REQUIRED_FIELD_MISSING",
|
||||
"path": "/a"
|
||||
"path": "a"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -126,7 +126,7 @@
|
||||
"errors": [
|
||||
{
|
||||
"code": "REQUIRED_FIELD_MISSING",
|
||||
"path": "/b"
|
||||
"path": "b"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -196,7 +196,7 @@
|
||||
"errors": [
|
||||
{
|
||||
"code": "DEPENDENCY_FAILED",
|
||||
"path": "/base_dep"
|
||||
"path": "base_dep"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -214,7 +214,7 @@
|
||||
"errors": [
|
||||
{
|
||||
"code": "DEPENDENCY_FAILED",
|
||||
"path": "/child_dep"
|
||||
"path": "child_dep"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@ -972,7 +972,12 @@
|
||||
"LEFT JOIN agreego.\"user\" t2 ON t2.id = t1.id",
|
||||
"LEFT JOIN agreego.\"organization\" t3 ON t3.id = t1.id",
|
||||
"LEFT JOIN agreego.\"entity\" t4 ON t4.id = t1.id",
|
||||
"WHERE \"first_name\" = 'LookupFirst' AND \"last_name\" = 'LookupLast' AND \"date_of_birth\" = '1990-01-01T00:00:00Z' AND \"pronouns\" = 'they/them'"
|
||||
"WHERE (",
|
||||
" \"first_name\" = 'LookupFirst'",
|
||||
" AND \"last_name\" = 'LookupLast'",
|
||||
" AND \"date_of_birth\" = '1990-01-01T00:00:00Z'",
|
||||
" AND \"pronouns\" = 'they/them'",
|
||||
")"
|
||||
],
|
||||
[
|
||||
"UPDATE agreego.\"person\"",
|
||||
@ -1039,6 +1044,114 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Update existing person with id (lookup)",
|
||||
"action": "merge",
|
||||
"data": {
|
||||
"id": "33333333-3333-3333-3333-333333333333",
|
||||
"type": "person",
|
||||
"first_name": "LookupFirst",
|
||||
"last_name": "LookupLast",
|
||||
"date_of_birth": "1990-01-01T00:00:00Z",
|
||||
"pronouns": "they/them",
|
||||
"contact_id": "abc-contact"
|
||||
},
|
||||
"mocks": [
|
||||
{
|
||||
"id": "22222222-2222-2222-2222-222222222222",
|
||||
"type": "person",
|
||||
"first_name": "LookupFirst",
|
||||
"last_name": "LookupLast",
|
||||
"date_of_birth": "1990-01-01T00:00:00Z",
|
||||
"pronouns": "they/them",
|
||||
"contact_id": "old-contact"
|
||||
}
|
||||
],
|
||||
"schema_id": "person",
|
||||
"expect": {
|
||||
"success": true,
|
||||
"sql": [
|
||||
[
|
||||
"SELECT to_jsonb(t1.*) || to_jsonb(t2.*) || to_jsonb(t3.*) || to_jsonb(t4.*)",
|
||||
"FROM agreego.\"person\" t1",
|
||||
"LEFT JOIN agreego.\"user\" t2 ON t2.id = t1.id",
|
||||
"LEFT JOIN agreego.\"organization\" t3 ON t3.id = t1.id",
|
||||
"LEFT JOIN agreego.\"entity\" t4 ON t4.id = t1.id",
|
||||
"WHERE",
|
||||
" t1.id = '33333333-3333-3333-3333-333333333333'",
|
||||
" OR (",
|
||||
" \"first_name\" = 'LookupFirst'",
|
||||
" AND \"last_name\" = 'LookupLast'",
|
||||
" AND \"date_of_birth\" = '1990-01-01T00:00:00Z'",
|
||||
" AND \"pronouns\" = 'they/them'",
|
||||
" )"
|
||||
],
|
||||
[
|
||||
"UPDATE agreego.\"person\"",
|
||||
"SET",
|
||||
" \"contact_id\" = 'abc-contact'",
|
||||
"WHERE",
|
||||
" id = '22222222-2222-2222-2222-222222222222'"
|
||||
],
|
||||
[
|
||||
"UPDATE agreego.\"entity\"",
|
||||
"SET",
|
||||
" \"modified_at\" = '2026-03-10T00:00:00Z',",
|
||||
" \"modified_by\" = '00000000-0000-0000-0000-000000000000'",
|
||||
"WHERE",
|
||||
" id = '22222222-2222-2222-2222-222222222222'"
|
||||
],
|
||||
[
|
||||
"INSERT INTO agreego.change (",
|
||||
" \"old\",",
|
||||
" \"new\",",
|
||||
" entity_id,",
|
||||
" id,",
|
||||
" kind,",
|
||||
" modified_at,",
|
||||
" modified_by",
|
||||
")",
|
||||
"VALUES (",
|
||||
" '{",
|
||||
" \"contact_id\":\"old-contact\"",
|
||||
" }',",
|
||||
" '{",
|
||||
" \"contact_id\":\"abc-contact\",",
|
||||
" \"type\":\"person\"",
|
||||
" }',",
|
||||
" '22222222-2222-2222-2222-222222222222',",
|
||||
" '{{uuid}}',",
|
||||
" 'update',",
|
||||
" '{{timestamp}}',",
|
||||
" '00000000-0000-0000-0000-000000000000'",
|
||||
")"
|
||||
],
|
||||
[
|
||||
"SELECT pg_notify('entity', '{",
|
||||
" \"complete\":{",
|
||||
" \"contact_id\":\"abc-contact\",",
|
||||
" \"date_of_birth\":\"1990-01-01T00:00:00Z\",",
|
||||
" \"first_name\":\"LookupFirst\",",
|
||||
" \"id\":\"22222222-2222-2222-2222-222222222222\",",
|
||||
" \"last_name\":\"LookupLast\",",
|
||||
" \"modified_at\":\"2026-03-10T00:00:00Z\",",
|
||||
" \"modified_by\":\"00000000-0000-0000-0000-000000000000\",",
|
||||
" \"pronouns\":\"they/them\",",
|
||||
" \"type\":\"person\"",
|
||||
" },",
|
||||
" \"new\":{",
|
||||
" \"contact_id\":\"abc-contact\",",
|
||||
" \"type\":\"person\"",
|
||||
" },",
|
||||
" \"old\":{",
|
||||
" \"contact_id\":\"old-contact\"",
|
||||
" },",
|
||||
" \"replaces\":\"33333333-3333-3333-3333-333333333333\"",
|
||||
" }')"
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Update existing person with id (no lookup)",
|
||||
"action": "merge",
|
||||
@ -1484,7 +1597,7 @@
|
||||
"SELECT to_jsonb(t1.*) || to_jsonb(t2.*)",
|
||||
"FROM agreego.\"order\" t1",
|
||||
"LEFT JOIN agreego.\"entity\" t2 ON t2.id = t1.id",
|
||||
"WHERE t1.id = 'abc'"
|
||||
"WHERE t1.id = 'abc' OR (\"id\" = 'abc')"
|
||||
],
|
||||
[
|
||||
"INSERT INTO agreego.\"entity\" (",
|
||||
|
||||
214
fixtures/paths.json
Normal file
214
fixtures/paths.json
Normal file
@ -0,0 +1,214 @@
|
||||
[
|
||||
{
|
||||
"description": "Hybrid Array Pathing",
|
||||
"database": {
|
||||
"schemas": [
|
||||
{
|
||||
"$id": "hybrid_pathing",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primitives": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"ad_hoc_objects": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name"
|
||||
]
|
||||
}
|
||||
},
|
||||
"entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"type": "number",
|
||||
"minimum": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"deep_entities": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"nested": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"flag": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"tests": [
|
||||
{
|
||||
"description": "happy path passes structural validation",
|
||||
"data": {
|
||||
"primitives": [
|
||||
"a",
|
||||
"b"
|
||||
],
|
||||
"ad_hoc_objects": [
|
||||
{
|
||||
"name": "obj1"
|
||||
}
|
||||
],
|
||||
"entities": [
|
||||
{
|
||||
"id": "entity-1",
|
||||
"value": 15
|
||||
}
|
||||
],
|
||||
"deep_entities": [
|
||||
{
|
||||
"id": "parent-1",
|
||||
"nested": [
|
||||
{
|
||||
"id": "child-1",
|
||||
"flag": true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema_id": "hybrid_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "primitive arrays use numeric indexing",
|
||||
"data": {
|
||||
"primitives": [
|
||||
"a",
|
||||
123
|
||||
]
|
||||
},
|
||||
"schema_id": "hybrid_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "INVALID_TYPE",
|
||||
"path": "primitives/1"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "ad-hoc objects without ids use numeric indexing",
|
||||
"data": {
|
||||
"ad_hoc_objects": [
|
||||
{
|
||||
"name": "valid"
|
||||
},
|
||||
{
|
||||
"age": 30
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema_id": "hybrid_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "REQUIRED_FIELD_MISSING",
|
||||
"path": "ad_hoc_objects/1/name"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "arrays of objects with ids use topological uuid indexing",
|
||||
"data": {
|
||||
"entities": [
|
||||
{
|
||||
"id": "entity-alpha",
|
||||
"value": 20
|
||||
},
|
||||
{
|
||||
"id": "entity-beta",
|
||||
"value": 5
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema_id": "hybrid_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "MINIMUM_VIOLATED",
|
||||
"path": "entities/entity-beta/value"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "deeply nested entity arrays retain full topological paths",
|
||||
"data": {
|
||||
"deep_entities": [
|
||||
{
|
||||
"id": "parent-omega",
|
||||
"nested": [
|
||||
{
|
||||
"id": "child-alpha",
|
||||
"flag": true
|
||||
},
|
||||
{
|
||||
"id": "child-beta",
|
||||
"flag": "invalid-string"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema_id": "hybrid_pathing",
|
||||
"action": "validate",
|
||||
"expect": {
|
||||
"success": false,
|
||||
"errors": [
|
||||
{
|
||||
"code": "INVALID_TYPE",
|
||||
"path": "deep_entities/parent-omega/nested/child-beta/flag"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
@ -677,7 +677,7 @@
|
||||
"errors": [
|
||||
{
|
||||
"code": "TYPE_MISMATCH",
|
||||
"path": "/type"
|
||||
"path": "type"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -782,7 +782,7 @@
|
||||
"errors": [
|
||||
{
|
||||
"code": "TYPE_MISMATCH",
|
||||
"path": "/type"
|
||||
"path": "type"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@ -124,42 +124,23 @@ fn parse_and_match_mocks(sql: &str, mocks: &[Value]) -> Option<Vec<Value>> {
|
||||
return None;
|
||||
};
|
||||
|
||||
// 2. Extract WHERE conditions
|
||||
let mut conditions = Vec::new();
|
||||
// 2. Extract WHERE conditions string
|
||||
let mut where_clause = String::new();
|
||||
if let Some(where_idx) = sql_upper.find(" WHERE ") {
|
||||
let mut where_end = sql_upper.find(" ORDER BY ").unwrap_or(sql.len());
|
||||
let mut where_end = sql_upper.find(" ORDER BY ").unwrap_or(sql_upper.len());
|
||||
if let Some(limit_idx) = sql_upper.find(" LIMIT ") {
|
||||
if limit_idx < where_end {
|
||||
where_end = limit_idx;
|
||||
}
|
||||
}
|
||||
let where_clause = &sql[where_idx + 7..where_end];
|
||||
let and_regex = Regex::new(r"(?i)\s+AND\s+").ok()?;
|
||||
let parts = and_regex.split(where_clause);
|
||||
for part in parts {
|
||||
if let Some(eq_idx) = part.find('=') {
|
||||
let left = part[..eq_idx]
|
||||
.trim()
|
||||
.split('.')
|
||||
.last()
|
||||
.unwrap_or("")
|
||||
.trim_matches('"');
|
||||
let right = part[eq_idx + 1..].trim().trim_matches('\'');
|
||||
conditions.push((left.to_string(), right.to_string()));
|
||||
} else if part.to_uppercase().contains(" IS NULL") {
|
||||
let left = part[..part.to_uppercase().find(" IS NULL").unwrap()]
|
||||
.trim()
|
||||
.split('.')
|
||||
.last()
|
||||
.unwrap_or("")
|
||||
.replace('"', ""); // Remove quotes explicitly
|
||||
conditions.push((left, "null".to_string()));
|
||||
}
|
||||
}
|
||||
where_clause = sql[where_idx + 7..where_end].to_string();
|
||||
}
|
||||
|
||||
// 3. Find matching mocks
|
||||
let mut matches = Vec::new();
|
||||
let or_regex = Regex::new(r"(?i)\s+OR\s+").ok()?;
|
||||
let and_regex = Regex::new(r"(?i)\s+AND\s+").ok()?;
|
||||
|
||||
for mock in mocks {
|
||||
if let Some(mock_obj) = mock.as_object() {
|
||||
if let Some(t) = mock_obj.get("type") {
|
||||
@ -168,25 +149,66 @@ fn parse_and_match_mocks(sql: &str, mocks: &[Value]) -> Option<Vec<Value>> {
|
||||
}
|
||||
}
|
||||
|
||||
let mut matches_all = true;
|
||||
for (k, v) in &conditions {
|
||||
let mock_val_str = match mock_obj.get(k) {
|
||||
Some(Value::String(s)) => s.clone(),
|
||||
Some(Value::Number(n)) => n.to_string(),
|
||||
Some(Value::Bool(b)) => b.to_string(),
|
||||
Some(Value::Null) => "null".to_string(),
|
||||
_ => {
|
||||
matches_all = false;
|
||||
break;
|
||||
if where_clause.is_empty() {
|
||||
matches.push(mock.clone());
|
||||
continue;
|
||||
}
|
||||
|
||||
let or_parts = or_regex.split(&where_clause);
|
||||
let mut any_branch_matched = false;
|
||||
|
||||
for or_part in or_parts {
|
||||
let branch_str = or_part.replace('(', "").replace(')', "");
|
||||
let mut branch_matches = true;
|
||||
|
||||
for part in and_regex.split(&branch_str) {
|
||||
if let Some(eq_idx) = part.find('=') {
|
||||
let left = part[..eq_idx]
|
||||
.trim()
|
||||
.split('.')
|
||||
.last()
|
||||
.unwrap_or("")
|
||||
.trim_matches('"');
|
||||
let right = part[eq_idx + 1..].trim().trim_matches('\'');
|
||||
|
||||
let mock_val_str = match mock_obj.get(left) {
|
||||
Some(Value::String(s)) => s.clone(),
|
||||
Some(Value::Number(n)) => n.to_string(),
|
||||
Some(Value::Bool(b)) => b.to_string(),
|
||||
Some(Value::Null) => "null".to_string(),
|
||||
_ => "".to_string(),
|
||||
};
|
||||
if mock_val_str != right {
|
||||
branch_matches = false;
|
||||
break;
|
||||
}
|
||||
} else if part.to_uppercase().contains(" IS NULL") {
|
||||
let left = part[..part.to_uppercase().find(" IS NULL").unwrap()]
|
||||
.trim()
|
||||
.split('.')
|
||||
.last()
|
||||
.unwrap_or("")
|
||||
.trim_matches('"');
|
||||
|
||||
let mock_val_str = match mock_obj.get(left) {
|
||||
Some(Value::Null) => "null".to_string(),
|
||||
_ => "".to_string(),
|
||||
};
|
||||
|
||||
if mock_val_str != "null" {
|
||||
branch_matches = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
if mock_val_str != *v {
|
||||
matches_all = false;
|
||||
}
|
||||
|
||||
if branch_matches {
|
||||
any_branch_matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if matches_all {
|
||||
if any_branch_matched {
|
||||
matches.push(mock.clone());
|
||||
}
|
||||
}
|
||||
|
||||
@ -228,13 +228,15 @@ impl Merger {
|
||||
|
||||
let mut entity_change_kind = None;
|
||||
let mut entity_fetched = None;
|
||||
let mut entity_replaces = None;
|
||||
|
||||
if !type_def.relationship {
|
||||
let (fields, kind, fetched) =
|
||||
let (fields, kind, fetched, replaces) =
|
||||
self.stage_entity(entity_fields.clone(), type_def, &user_id, ×tamp)?;
|
||||
entity_fields = fields;
|
||||
entity_change_kind = kind;
|
||||
entity_fetched = fetched;
|
||||
entity_replaces = replaces;
|
||||
}
|
||||
|
||||
let mut entity_response = serde_json::Map::new();
|
||||
@ -308,11 +310,12 @@ impl Merger {
|
||||
}
|
||||
|
||||
if type_def.relationship {
|
||||
let (fields, kind, fetched) =
|
||||
let (fields, kind, fetched, replaces) =
|
||||
self.stage_entity(entity_fields.clone(), type_def, &user_id, ×tamp)?;
|
||||
entity_fields = fields;
|
||||
entity_change_kind = kind;
|
||||
entity_fetched = fetched;
|
||||
entity_replaces = replaces;
|
||||
}
|
||||
|
||||
self.merge_entity_fields(
|
||||
@ -388,6 +391,7 @@ impl Merger {
|
||||
entity_change_kind.as_deref(),
|
||||
&user_id,
|
||||
×tamp,
|
||||
entity_replaces.as_deref(),
|
||||
)?;
|
||||
|
||||
if let Some(sql) = notify_sql {
|
||||
@ -419,6 +423,7 @@ impl Merger {
|
||||
serde_json::Map<String, Value>,
|
||||
Option<String>,
|
||||
Option<serde_json::Map<String, Value>>,
|
||||
Option<String>,
|
||||
),
|
||||
String,
|
||||
> {
|
||||
@ -438,11 +443,22 @@ impl Merger {
|
||||
.map_or(false, |s| !s.is_empty());
|
||||
|
||||
if is_anchor && has_valid_id {
|
||||
return Ok((entity_fields, None, None));
|
||||
return Ok((entity_fields, None, None, None));
|
||||
}
|
||||
|
||||
let entity_fetched = self.fetch_entity(&entity_fields, type_def)?;
|
||||
|
||||
let mut replaces_id = None;
|
||||
if let Some(ref fetched_row) = entity_fetched {
|
||||
let provided_id = entity_fields.get("id").and_then(|v| v.as_str());
|
||||
let fetched_id = fetched_row.get("id").and_then(|v| v.as_str());
|
||||
if let (Some(pid), Some(fid)) = (provided_id, fetched_id) {
|
||||
if !pid.is_empty() && pid != fid {
|
||||
replaces_id = Some(pid.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let system_keys = vec![
|
||||
"id".to_string(),
|
||||
"type".to_string(),
|
||||
@ -530,7 +546,7 @@ impl Merger {
|
||||
entity_fields = new_fields;
|
||||
}
|
||||
|
||||
Ok((entity_fields, entity_change_kind, entity_fetched))
|
||||
Ok((entity_fields, entity_change_kind, entity_fetched, replaces_id))
|
||||
}
|
||||
|
||||
fn fetch_entity(
|
||||
@ -585,11 +601,14 @@ impl Merger {
|
||||
template
|
||||
};
|
||||
|
||||
let where_clause = if let Some(id) = id_val {
|
||||
format!("WHERE t1.id = {}", Self::quote_literal(id))
|
||||
} else if lookup_complete {
|
||||
let mut lookup_predicates = Vec::new();
|
||||
let mut where_parts = Vec::new();
|
||||
|
||||
if let Some(id) = id_val {
|
||||
where_parts.push(format!("t1.id = {}", Self::quote_literal(id)));
|
||||
}
|
||||
|
||||
if lookup_complete {
|
||||
let mut lookup_predicates = Vec::new();
|
||||
for column in &entity_type.lookup_fields {
|
||||
let val = entity_fields.get(column).unwrap_or(&Value::Null);
|
||||
if column == "type" {
|
||||
@ -598,10 +617,14 @@ impl Merger {
|
||||
lookup_predicates.push(format!("\"{}\" = {}", column, Self::quote_literal(val)));
|
||||
}
|
||||
}
|
||||
format!("WHERE {}", lookup_predicates.join(" AND "))
|
||||
} else {
|
||||
where_parts.push(format!("({})", lookup_predicates.join(" AND ")));
|
||||
}
|
||||
|
||||
if where_parts.is_empty() {
|
||||
return Ok(None);
|
||||
};
|
||||
}
|
||||
|
||||
let where_clause = format!("WHERE {}", where_parts.join(" OR "));
|
||||
|
||||
let final_sql = format!("{} {}", fetch_sql_template, where_clause);
|
||||
|
||||
@ -761,6 +784,7 @@ impl Merger {
|
||||
entity_change_kind: Option<&str>,
|
||||
user_id: &str,
|
||||
timestamp: &str,
|
||||
replaces_id: Option<&str>,
|
||||
) -> Result<Option<String>, String> {
|
||||
let change_kind = match entity_change_kind {
|
||||
Some(k) => k,
|
||||
@ -835,6 +859,10 @@ impl Merger {
|
||||
if old_val_obj != Value::Null {
|
||||
notification.insert("old".to_string(), old_val_obj.clone());
|
||||
}
|
||||
|
||||
if let Some(rep) = replaces_id {
|
||||
notification.insert("replaces".to_string(), Value::String(rep.to_string()));
|
||||
}
|
||||
|
||||
let mut notify_sql = None;
|
||||
if type_obj.historical {
|
||||
|
||||
@ -2927,6 +2927,36 @@ fn test_minimum_1_6() {
|
||||
crate::tests::runner::run_test_case(&path, 1, 6).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_paths_0_0() {
|
||||
let path = format!("{}/fixtures/paths.json", env!("CARGO_MANIFEST_DIR"));
|
||||
crate::tests::runner::run_test_case(&path, 0, 0).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_paths_0_1() {
|
||||
let path = format!("{}/fixtures/paths.json", env!("CARGO_MANIFEST_DIR"));
|
||||
crate::tests::runner::run_test_case(&path, 0, 1).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_paths_0_2() {
|
||||
let path = format!("{}/fixtures/paths.json", env!("CARGO_MANIFEST_DIR"));
|
||||
crate::tests::runner::run_test_case(&path, 0, 2).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_paths_0_3() {
|
||||
let path = format!("{}/fixtures/paths.json", env!("CARGO_MANIFEST_DIR"));
|
||||
crate::tests::runner::run_test_case(&path, 0, 3).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_paths_0_4() {
|
||||
let path = format!("{}/fixtures/paths.json", env!("CARGO_MANIFEST_DIR"));
|
||||
crate::tests::runner::run_test_case(&path, 0, 4).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_one_of_0_0() {
|
||||
let path = format!("{}/fixtures/oneOf.json", env!("CARGO_MANIFEST_DIR"));
|
||||
@ -8566,3 +8596,9 @@ fn test_merger_0_10() {
|
||||
let path = format!("{}/fixtures/merger.json", env!("CARGO_MANIFEST_DIR"));
|
||||
crate::tests::runner::run_test_case(&path, 0, 10).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_merger_0_11() {
|
||||
let path = format!("{}/fixtures/merger.json", env!("CARGO_MANIFEST_DIR"));
|
||||
crate::tests::runner::run_test_case(&path, 0, 11).unwrap();
|
||||
}
|
||||
|
||||
@ -134,12 +134,12 @@ fn test_library_api() {
|
||||
{
|
||||
"code": "REQUIRED_FIELD_MISSING",
|
||||
"message": "Missing name",
|
||||
"details": { "path": "/name" }
|
||||
"details": { "path": "name" }
|
||||
},
|
||||
{
|
||||
"code": "STRICT_PROPERTY_VIOLATION",
|
||||
"message": "Unexpected property 'wrong'",
|
||||
"details": { "path": "/wrong" }
|
||||
"details": { "path": "wrong" }
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
@ -41,6 +41,14 @@ impl<'a> ValidationContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn join_path(&self, key: &str) -> String {
|
||||
if self.path.is_empty() {
|
||||
key.to_string()
|
||||
} else {
|
||||
format!("{}/{}", self.path, key)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn derive(
|
||||
&self,
|
||||
schema: &'a Schema,
|
||||
|
||||
@ -91,12 +91,17 @@ impl<'a> ValidationContext<'a> {
|
||||
if let Some(ref prefix) = self.schema.prefix_items {
|
||||
for (i, sub_schema) in prefix.iter().enumerate() {
|
||||
if i < len {
|
||||
let path = format!("{}/{}", self.path, i);
|
||||
if let Some(child_instance) = arr.get(i) {
|
||||
let mut item_path = self.join_path(&i.to_string());
|
||||
if let Some(obj) = child_instance.as_object() {
|
||||
if let Some(id_str) = obj.get("id").and_then(|v| v.as_str()) {
|
||||
item_path = self.join_path(id_str);
|
||||
}
|
||||
}
|
||||
let derived = self.derive(
|
||||
sub_schema,
|
||||
child_instance,
|
||||
&path,
|
||||
&item_path,
|
||||
HashSet::new(),
|
||||
self.extensible,
|
||||
false,
|
||||
@ -112,12 +117,17 @@ impl<'a> ValidationContext<'a> {
|
||||
|
||||
if let Some(ref items_schema) = self.schema.items {
|
||||
for i in validation_index..len {
|
||||
let path = format!("{}/{}", self.path, i);
|
||||
if let Some(child_instance) = arr.get(i) {
|
||||
let mut item_path = self.join_path(&i.to_string());
|
||||
if let Some(obj) = child_instance.as_object() {
|
||||
if let Some(id_str) = obj.get("id").and_then(|v| v.as_str()) {
|
||||
item_path = self.join_path(id_str);
|
||||
}
|
||||
}
|
||||
let derived = self.derive(
|
||||
items_schema,
|
||||
child_instance,
|
||||
&path,
|
||||
&item_path,
|
||||
HashSet::new(),
|
||||
self.extensible,
|
||||
false,
|
||||
|
||||
@ -44,7 +44,7 @@ impl<'a> ValidationContext<'a> {
|
||||
result.errors.push(ValidationError {
|
||||
code: "STRICT_PROPERTY_VIOLATION".to_string(),
|
||||
message: format!("Unexpected property '{}'", key),
|
||||
path: format!("{}/{}", self.path, key),
|
||||
path: self.join_path(key),
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -53,10 +53,18 @@ impl<'a> ValidationContext<'a> {
|
||||
if let Some(arr) = self.instance.as_array() {
|
||||
for i in 0..arr.len() {
|
||||
if !result.evaluated_indices.contains(&i) {
|
||||
let mut item_path = self.join_path(&i.to_string());
|
||||
if let Some(child_instance) = arr.get(i) {
|
||||
if let Some(obj) = child_instance.as_object() {
|
||||
if let Some(id_str) = obj.get("id").and_then(|v| v.as_str()) {
|
||||
item_path = self.join_path(id_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
result.errors.push(ValidationError {
|
||||
code: "STRICT_ITEM_VIOLATION".to_string(),
|
||||
message: format!("Unexpected item at index {}", i),
|
||||
path: format!("{}/{}", self.path, i),
|
||||
path: item_path,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -32,7 +32,7 @@ impl<'a> ValidationContext<'a> {
|
||||
"Type '{}' is not a valid descendant for this entity bound schema",
|
||||
type_str
|
||||
),
|
||||
path: format!("{}/type", self.path),
|
||||
path: self.join_path("type"),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
@ -70,7 +70,7 @@ impl<'a> ValidationContext<'a> {
|
||||
result.errors.push(ValidationError {
|
||||
code: "REQUIRED_FIELD_MISSING".to_string(),
|
||||
message: format!("Missing {}", field),
|
||||
path: format!("{}/{}", self.path, field),
|
||||
path: self.join_path(field),
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -109,7 +109,7 @@ impl<'a> ValidationContext<'a> {
|
||||
}
|
||||
|
||||
if let Some(child_instance) = obj.get(key) {
|
||||
let new_path = format!("{}/{}", self.path, key);
|
||||
let new_path = self.join_path(key);
|
||||
let is_ref = sub_schema.r#ref.is_some();
|
||||
let next_extensible = if is_ref { false } else { self.extensible };
|
||||
|
||||
@ -147,7 +147,7 @@ impl<'a> ValidationContext<'a> {
|
||||
for (compiled_re, sub_schema) in compiled_pp {
|
||||
for (key, child_instance) in obj {
|
||||
if compiled_re.0.is_match(key) {
|
||||
let new_path = format!("{}/{}", self.path, key);
|
||||
let new_path = self.join_path(key);
|
||||
let is_ref = sub_schema.r#ref.is_some();
|
||||
let next_extensible = if is_ref { false } else { self.extensible };
|
||||
|
||||
@ -186,7 +186,7 @@ impl<'a> ValidationContext<'a> {
|
||||
}
|
||||
|
||||
if !locally_matched {
|
||||
let new_path = format!("{}/{}", self.path, key);
|
||||
let new_path = self.join_path(key);
|
||||
let is_ref = additional_schema.r#ref.is_some();
|
||||
let next_extensible = if is_ref { false } else { self.extensible };
|
||||
|
||||
@ -207,7 +207,7 @@ impl<'a> ValidationContext<'a> {
|
||||
|
||||
if let Some(ref property_names) = self.schema.property_names {
|
||||
for key in obj.keys() {
|
||||
let _new_path = format!("{}/propertyNames/{}", self.path, key);
|
||||
let _new_path = self.join_path(&format!("propertyNames/{}", key));
|
||||
let val_str = Value::String(key.to_string());
|
||||
|
||||
let ctx = ValidationContext::new(
|
||||
|
||||
Reference in New Issue
Block a user