Compare commits

..

14 Commits

Author SHA1 Message Date
473b087d97 version: 1.0.142 2026-05-14 14:48:19 -04:00
6d6745d95d removed all jsonb 2026-05-14 14:48:09 -04:00
146efaa2d9 version: 1.0.141 2026-05-14 14:01:41 -04:00
d0294eec3f last ordering fixes 2026-05-14 13:57:50 -04:00
02ab4b6438 version: 1.0.140 2026-05-14 05:58:58 -04:00
2a8b991269 fixed ordering of all things sql 2026-05-14 05:58:38 -04:00
ce9c9baac9 fixing ordering checkpoint 2026-05-14 03:26:03 -04:00
3034406706 fixing ordering checkpoint 2026-05-14 03:21:12 -04:00
3d918a1acc version: 1.0.139 2026-05-13 19:28:13 -04:00
1f9b407074 final fix to org id setting in merge 2026-05-13 19:28:03 -04:00
6ea6007d86 version: 1.0.138 2026-05-13 16:31:15 -04:00
c129864c89 fixed another org id issue with merger 2026-05-13 16:31:06 -04:00
777fc8bbf8 version: 1.0.137 2026-05-13 15:58:54 -04:00
803d62b2fb cleanup 2026-05-13 15:58:49 -04:00
33 changed files with 2494 additions and 2507 deletions

1
Cargo.lock generated
View File

@ -1663,6 +1663,7 @@ version = "1.0.149"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86"
dependencies = [ dependencies = [
"indexmap",
"itoa", "itoa",
"memchr", "memchr",
"serde", "serde",

View File

@ -6,7 +6,7 @@ edition = "2024"
[dependencies] [dependencies]
pgrx = "0.16.1" pgrx = "0.16.1"
serde = { version = "1.0.228", features = ["derive", "rc"] } serde = { version = "1.0.228", features = ["derive", "rc"] }
serde_json = "1.0.149" serde_json = { version = "1.0.149", features = ["preserve_order"] }
lazy_static = "1.5.0" lazy_static = "1.5.0"
once_cell = "1.21.3" once_cell = "1.21.3"
ahash = "0.8.12" ahash = "0.8.12"
@ -30,7 +30,7 @@ pgrx-tests = "0.16.1"
[build-dependencies] [build-dependencies]
serde = { version = "1.0.228", features = ["derive"] } serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149" serde_json = { version = "1.0.149", features = ["preserve_order"] }
[lib] [lib]
crate-type = ["cdylib", "lib"] crate-type = ["cdylib", "lib"]

View File

@ -285,3 +285,26 @@ JSPG abandons the standard `cargo pgrx test` model in favor of native OS testing
3. **Modular Test Dispatcher**: The `src/tests/types/` module deserializes the abstract JSON test payloads into `Suite`, `Case`, and `Expect` data structures. 3. **Modular Test Dispatcher**: The `src/tests/types/` module deserializes the abstract JSON test payloads into `Suite`, `Case`, and `Expect` data structures.
* The `compile` action natively asserts the exact output shape of `jspg_stems`, allowing structural and relationship mapping logic to be tested purely through JSON without writing brute-force manual tests in Rust. * The `compile` action natively asserts the exact output shape of `jspg_stems`, allowing structural and relationship mapping logic to be tested purely through JSON without writing brute-force manual tests in Rust.
4. **Unit Context Execution**: When `cargo test` executes, the runner iterates the JSON payloads. Because the tests run natively inside the module via `#cfg(test)`, the Rust compiler globally erases `pgrx` C-linkage, instantiates the `MockExecutor`, and allows for pure structural evaluation of complex database logic completely in memory in parallel. 4. **Unit Context Execution**: When `cargo test` executes, the runner iterates the JSON payloads. Because the tests run natively inside the module via `#cfg(test)`, the Rust compiler globally erases `pgrx` C-linkage, instantiates the `MockExecutor`, and allows for pure structural evaluation of complex database logic completely in memory in parallel.
### SQL Expectation Formatting & Auto-Variablization
Because JSPG SQL compilation generates large, complex relational statements (often featuring dynamically generated UUIDs or timestamps), manually updating expected SQL strings in the test fixtures is error-prone and tedious. To streamline this, JSPG includes a built-in intelligent test fixture formatter.
**When to use it:**
Whenever you modify the internal SQL generation logic (in the Queryer or Merger) and need to update the expected SQL outputs across the entire test suite.
**How to run it:**
Run the test suite sequentially while passing the `UPDATE_EXPECT=1` environment variable:
```bash
UPDATE_EXPECT=1 cargo test --test-threads=1
```
*Note: The `--test-threads=1` flag is strictly required to prevent parallel tests from concurrently overwriting the same JSON fixture files and corrupting them.*
**How it works (Intelligent Variablization):**
The JSPG engine natively generates actual, random UUIDs in memory for records inserted during `merger` tests. To assert relational integrity without hardcoding ephemeral random strings, the formatter utilizes an intelligent variable extraction map:
1. **Payload Extraction**: Before evaluating the SQL output, the test runner recursively scans the JSON of the `data` and `mocks` blocks for that specific test case. It maps any physical UUID it finds to its exact JSON path (e.g., `3333...` -> `mocks.0.id`).
2. **SQL Canonicalization**: The test runner utilizes `sqlparser` to format the raw engine SQL into pristine, multi-line readable structures.
3. **Variable Mapping**: It scans the formatted SQL using regex for UUIDs. If it encounters a UUID matching the payload extraction map, it replaces it with a template tag like `{{uuid:mocks.0.id}}` or `{{uuid:data.customer_id}}`.
4. **Generated Fallbacks**: If it encounters a brand-new random UUID that wasn't provided in the inputs (e.g., a newly generated ID for an `INSERT`), it assigns it a sequential tracking variable like `{{uuid:generated_0}}`. Every subsequent appearance of that *exact* same random UUID in the SQL transaction will reuse the `{{uuid:generated_0}}` tag. Timestamps are naturally replaced with `{{timestamp}}`.
This guarantees the `assert_pattern` execution engine can strictly validate that the exact same ID generated for a parent entity is correctly passed as a foreign key to its children across complex database transactions.

View File

@ -1,15 +0,0 @@
import json
with open("fixtures/merger.json", "r") as f:
data = json.load(f)
# Find our new test
test_case = next(t for t in data[0]["tests"] if t["description"] == "Test organization_id syntactic sugar permutations")
# Fix the first SQL command (INSERT INTO entity for person)
sql = test_case["expect"]["sql"][0]
sql.remove(" \"organization_id\",")
sql.remove(" NULL,")
with open("fixtures/merger.json", "w") as f:
json.dump(data, f, indent=2)

View File

@ -1,17 +0,0 @@
import json
with open("fixtures/merger.json", "r") as f:
data = json.load(f)
db = data[0]["database"]
# Add organization_id to fields and grouped_fields.entity of order, order_line, person
for t in db["types"]:
if t["name"] in ["order", "order_line", "person"]:
if "organization_id" not in t["fields"]:
t["fields"].append("organization_id")
if "organization_id" not in t["grouped_fields"]["entity"]:
t["grouped_fields"]["entity"].append("organization_id")
with open("fixtures/merger.json", "w") as f:
json.dump(data, f, indent=2)

View File

@ -1,16 +0,0 @@
import json
with open("fixtures/merger.json", "r") as f:
text = f.read()
# Fix the broken formatting
text = text.replace("'{',\n \" {timestamp}\",\n \" }'", "'{{timestamp}}'")
text = text.replace("'{',\n \" {uuid}\",\n \" }'", "'{{uuid}}'")
text = text.replace("'{',\n \" {uuid:person_id}\",\n \" }'", "'{{uuid:person_id}}'")
text = text.replace("'{',\n \" {uuid:order_id}\",\n \" }'", "'{{uuid:order_id}}'")
text = text.replace("'{',\n \" {uuid:line1_id}\",\n \" }'", "'{{uuid:line1_id}}'")
text = text.replace("'{',\n \" {uuid:line2_id}\",\n \" }'", "'{{uuid:line2_id}}'")
with open("fixtures/merger.json", "w") as f:
f.write(text)

View File

@ -1,32 +0,0 @@
import json
with open("fixtures/merger.json", "r") as f:
data = json.load(f)
test_case = data[0]["tests"][-1]
for j, sql_group in enumerate(test_case["expect"]["sql"]):
new_group = []
i = 0
while i < len(sql_group):
s = sql_group[i]
if s.strip() == "'{":
if i + 2 < len(sql_group):
next_line = sql_group[i+1].strip()
next_next_line = sql_group[i+2].strip()
if next_next_line == "}',":
# Reconstruct
new_group.append(f" '{next_line}',")
i += 3
continue
elif next_next_line == "}'":
new_group.append(f" '{next_line}'")
i += 3
continue
new_group.append(s)
i += 1
test_case["expect"]["sql"][j] = new_group
with open("fixtures/merger.json", "w") as f:
json.dump(data, f, indent=2)

View File

@ -1,20 +0,0 @@
import json
with open("fixtures/merger.json", "r") as f:
data = json.load(f)
test_case = data[0]["tests"][-1]
for j, sql_group in enumerate(test_case["expect"]["sql"]):
for i, s in enumerate(sql_group):
s = s.replace("'{timestamp}'", "'{{timestamp}}'")
s = s.replace("'{uuid}'", "'{{uuid}}'")
s = s.replace("'{uuid:person_id}'", "'{{uuid:person_id}}'")
s = s.replace("'{uuid:order_id}'", "'{{uuid:order_id}}'")
s = s.replace("'{uuid:line1_id}'", "'{{uuid:line1_id}}'")
s = s.replace("'{uuid:line2_id}'", "'{{uuid:line2_id}}'")
sql_group[i] = s
with open("fixtures/merger.json", "w") as f:
json.dump(data, f, indent=2)

View File

@ -197,11 +197,11 @@
"gender.condition": { "gender.condition": {
"type": "condition", "type": "condition",
"compiledPropertyNames": [ "compiledPropertyNames": [
"kind",
"$eq", "$eq",
"$ne", "$ne",
"$nof",
"$of", "$of",
"kind" "$nof"
], ],
"properties": { "properties": {
"$eq": { "$eq": {
@ -239,29 +239,29 @@
"person": {}, "person": {},
"person.filter": { "person.filter": {
"compiledPropertyNames": [ "compiledPropertyNames": [
"$and", "first_name",
"$or",
"ad_hoc",
"age", "age",
"billing_address", "billing_address",
"birth_date",
"first_name",
"gender", "gender",
"tags" "birth_date",
"tags",
"ad_hoc",
"$and",
"$or"
], ],
"properties": { "properties": {
"$and": { "$and": {
"items": { "items": {
"compiledPropertyNames": [ "compiledPropertyNames": [
"$and", "first_name",
"$or",
"ad_hoc",
"age", "age",
"billing_address", "billing_address",
"birth_date",
"first_name",
"gender", "gender",
"tags" "birth_date",
"tags",
"ad_hoc",
"$and",
"$or"
], ],
"type": "person.filter" "type": "person.filter"
}, },
@ -273,15 +273,15 @@
"$or": { "$or": {
"items": { "items": {
"compiledPropertyNames": [ "compiledPropertyNames": [
"$and", "first_name",
"$or",
"ad_hoc",
"age", "age",
"billing_address", "billing_address",
"birth_date",
"first_name",
"gender", "gender",
"tags" "birth_date",
"tags",
"ad_hoc",
"$and",
"$or"
], ],
"type": "person.filter" "type": "person.filter"
}, },
@ -350,9 +350,9 @@
"address.filter": { "address.filter": {
"type": "filter", "type": "filter",
"compiledPropertyNames": [ "compiledPropertyNames": [
"city",
"$and", "$and",
"$or", "$or"
"city"
], ],
"properties": { "properties": {
"$and": { "$and": {
@ -362,9 +362,9 @@
], ],
"items": { "items": {
"compiledPropertyNames": [ "compiledPropertyNames": [
"city",
"$and", "$and",
"$or", "$or"
"city"
], ],
"type": "address.filter" "type": "address.filter"
} }
@ -376,9 +376,9 @@
], ],
"items": { "items": {
"compiledPropertyNames": [ "compiledPropertyNames": [
"city",
"$and", "$and",
"$or", "$or"
"city"
], ],
"type": "address.filter" "type": "address.filter"
} }
@ -400,11 +400,11 @@
"search.filter": { "search.filter": {
"type": "filter", "type": "filter",
"compiledPropertyNames": [ "compiledPropertyNames": [
"$and",
"$or",
"filter",
"kind", "kind",
"name" "name",
"filter",
"$and",
"$or"
], ],
"properties": { "properties": {
"$and": { "$and": {
@ -414,11 +414,11 @@
], ],
"items": { "items": {
"compiledPropertyNames": [ "compiledPropertyNames": [
"$and",
"$or",
"filter",
"kind", "kind",
"name" "name",
"filter",
"$and",
"$or"
], ],
"type": "search.filter" "type": "search.filter"
} }
@ -430,11 +430,11 @@
], ],
"items": { "items": {
"compiledPropertyNames": [ "compiledPropertyNames": [
"$and",
"$or",
"filter",
"kind", "kind",
"name" "name",
"filter",
"$and",
"$or"
], ],
"type": "search.filter" "type": "search.filter"
} }

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,111 +0,0 @@
import json
import re
with open("fixtures/merger.json", "r") as f:
data = json.load(f)
test_case = next(t for t in data[0]["tests"] if t["description"] == "Test organization_id syntactic sugar permutations")
def format_sql(sql_str):
if sql_str.startswith("INSERT INTO"):
parts = sql_str.split(" VALUES ")
insert_part = parts[0]
values_part = parts[1]
insert_match = re.match(r"(INSERT INTO [a-zA-Z0-9_.\"]+) \((.*)\)", insert_part)
table = insert_match.group(1)
cols_str = insert_match.group(2)
cols = [c.strip() for c in cols_str.split(",")]
values_str = values_part[1:-1]
# We need to split values_str carefully, as JSON strings contain commas!
# Since it's single quotes around values, we can split by ", " but that's risky.
# Let's do a simple parse:
vals = []
current_val = []
in_quote = False
i = 0
while i < len(values_str):
c = values_str[i]
if c == "'":
# handle double quotes inside? Postgres uses '' for escaping ' inside '.
# Here we don't have that complexity.
in_quote = not in_quote
current_val.append(c)
elif c == ',' and not in_quote:
vals.append("".join(current_val).strip())
current_val = []
else:
current_val.append(c)
i += 1
vals.append("".join(current_val).strip())
lines = [f"{table} ("]
for i, col in enumerate(cols):
lines.append(f" {col}" + ("," if i < len(cols) - 1 else ""))
lines.append(")")
lines.append("VALUES (")
for i, val in enumerate(vals):
if val.startswith("'{") and val.endswith("}'"):
# Format JSON
lines.append(" '{")
json_str = val[2:-2]
# Split json keys by ",
json_pairs = json_str.split(',"')
for j, pair in enumerate(json_pairs):
if j > 0:
pair = '"' + pair
lines.append(f" {pair}" + ("," if j < len(json_pairs) - 1 else ""))
lines.append(" }'" + ("," if i < len(vals) - 1 else ""))
else:
# Replace '{{uuid}}' with '00000000-0000-0000-0000-000000000000' for created_by etc if it was replaced as '{{uuid}}'
if val == "'{{uuid}}'" and cols[i] in ['"created_by"', '"modified_by"', 'modified_by']:
val = "'00000000-0000-0000-0000-000000000000'"
lines.append(f" {val}" + ("," if i < len(vals) - 1 else ""))
lines.append(")")
return lines
elif sql_str.startswith("SELECT pg_notify"):
# Format notify string
match = re.match(r"SELECT pg_notify\('entity', '(.*)'\)", sql_str)
payload = match.group(1)
# We know payload looks like {"complete":{...},"new":{...}}
lines = ["SELECT pg_notify('entity', '{"]
# split complete and new
complete_str = payload[payload.find('"complete":{')+12:payload.find('},"new":{')]
new_str = payload[payload.find('"new":{')+7:-2]
lines.append(" \"complete\":{")
complete_pairs = complete_str.split(',"')
for j, pair in enumerate(complete_pairs):
if j > 0:
pair = '"' + pair
lines.append(f" {pair}" + ("," if j < len(complete_pairs) - 1 else ""))
lines.append(" },")
lines.append(" \"new\":{")
new_pairs = new_str.split(',"')
for j, pair in enumerate(new_pairs):
if j > 0:
pair = '"' + pair
lines.append(f" {pair}" + ("," if j < len(new_pairs) - 1 else ""))
lines.append(" }")
lines.append(" }')")
return lines
return [sql_str]
new_sql = []
for sql_group in test_case["expect"]["sql"]:
sql_str = "".join(sql_group)
formatted = format_sql(sql_str)
new_sql.append(formatted)
test_case["expect"]["sql"] = new_sql
with open("fixtures/merger.json", "w") as f:
json.dump(data, f, indent=2)

View File

@ -1,409 +0,0 @@
import json
with open("fixtures/merger.json", "r") as f:
data = json.load(f)
db = data[0]["database"]
# 1. Update entity schema
entity_type = next(t for t in db["types"] if t["name"] == "entity")
entity_type["schemas"]["entity"]["properties"]["organization_id"] = {"type": "string"}
entity_type["fields"].append("organization_id")
entity_type["grouped_fields"]["entity"].append("organization_id")
# 2. Update person schema
person_type = next(t for t in db["types"] if t["name"] == "person")
person_type["schemas"]["person"]["properties"]["organization_id"] = {
"type": "string",
"const": "ffffffff-ffff-ffff-ffff-ffffffffffff"
}
# 3. Add the test case
test_case = {
"description": "Test organization_id syntactic sugar permutations",
"action": "merge",
"data": {
"type": "order",
"organization_id": "parent-org-id",
"customer": {
"type": "person",
"first_name": "Const",
"last_name": "Person"
},
"lines": [
{
"type": "order_line"
},
{
"type": "order_line",
"organization_id": "explicit-org-id"
}
]
},
"schema_id": "order",
"expect": {
"success": True,
"sql": [
[
"INSERT INTO agreego.\"entity\" (",
" \"created_at\",",
" \"created_by\",",
" \"id\",",
" \"modified_at\",",
" \"modified_by\",",
" \"organization_id\",",
" \"type\"",
")",
"VALUES (",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000',",
" '{{uuid:person_id}}',",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000',",
" NULL,",
" 'person'",
")"
],
[
"INSERT INTO agreego.\"organization\" (",
" \"id\",",
" \"type\"",
")",
"VALUES (",
" '{{uuid:person_id}}',",
" 'person'",
")"
],
[
"INSERT INTO agreego.\"user\" (",
" \"id\",",
" \"type\"",
")",
"VALUES (",
" '{{uuid:person_id}}',",
" 'person'",
")"
],
[
"INSERT INTO agreego.\"person\" (",
" \"first_name\",",
" \"id\",",
" \"last_name\",",
" \"type\"",
")",
"VALUES (",
" 'Const',",
" '{{uuid:person_id}}',",
" 'Person',",
" 'person'",
")"
],
[
"INSERT INTO agreego.change (",
" \"old\",",
" \"new\",",
" entity_id,",
" id,",
" kind,",
" modified_at,",
" modified_by",
")",
"VALUES (",
" NULL,",
" '{",
" \"first_name\":\"Const\",",
" \"last_name\":\"Person\",",
" \"type\":\"person\"",
" }',",
" '{{uuid:person_id}}',",
" '{{uuid}}',",
" 'create',",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000'",
")"
],
[
"INSERT INTO agreego.\"entity\" (",
" \"created_at\",",
" \"created_by\",",
" \"id\",",
" \"modified_at\",",
" \"modified_by\",",
" \"organization_id\",",
" \"type\"",
")",
"VALUES (",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000',",
" '{{uuid:line1_id}}',",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000',",
" 'parent-org-id',",
" 'order_line'",
")"
],
[
"INSERT INTO agreego.\"order_line\" (",
" \"id\",",
" \"order_id\",",
" \"type\"",
")",
"VALUES (",
" '{{uuid:line1_id}}',",
" '{{uuid:order_id}}',",
" 'order_line'",
")"
],
[
"INSERT INTO agreego.change (",
" \"old\",",
" \"new\",",
" entity_id,",
" id,",
" kind,",
" modified_at,",
" modified_by",
")",
"VALUES (",
" NULL,",
" '{",
" \"order_id\":\"{{uuid:order_id}}\",",
" \"organization_id\":\"parent-org-id\",",
" \"type\":\"order_line\"",
" }',",
" '{{uuid:line1_id}}',",
" '{{uuid}}',",
" 'create',",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000'",
")"
],
[
"INSERT INTO agreego.\"entity\" (",
" \"created_at\",",
" \"created_by\",",
" \"id\",",
" \"modified_at\",",
" \"modified_by\",",
" \"organization_id\",",
" \"type\"",
")",
"VALUES (",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000',",
" '{{uuid:line2_id}}',",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000',",
" 'explicit-org-id',",
" 'order_line'",
")"
],
[
"INSERT INTO agreego.\"order_line\" (",
" \"id\",",
" \"order_id\",",
" \"type\"",
")",
"VALUES (",
" '{{uuid:line2_id}}',",
" '{{uuid:order_id}}',",
" 'order_line'",
")"
],
[
"INSERT INTO agreego.change (",
" \"old\",",
" \"new\",",
" entity_id,",
" id,",
" kind,",
" modified_at,",
" modified_by",
")",
"VALUES (",
" NULL,",
" '{",
" \"order_id\":\"{{uuid:order_id}}\",",
" \"organization_id\":\"explicit-org-id\",",
" \"type\":\"order_line\"",
" }',",
" '{{uuid:line2_id}}',",
" '{{uuid}}',",
" 'create',",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000'",
")"
],
[
"INSERT INTO agreego.\"entity\" (",
" \"created_at\",",
" \"created_by\",",
" \"id\",",
" \"modified_at\",",
" \"modified_by\",",
" \"organization_id\",",
" \"type\"",
")",
"VALUES (",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000',",
" '{{uuid:order_id}}',",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000',",
" 'parent-org-id',",
" 'order'",
")"
],
[
"INSERT INTO agreego.\"order\" (",
" \"customer_id\",",
" \"id\",",
" \"type\"",
")",
"VALUES (",
" '{{uuid:person_id}}',",
" '{{uuid:order_id}}',",
" 'order'",
")"
],
[
"INSERT INTO agreego.change (",
" \"old\",",
" \"new\",",
" entity_id,",
" id,",
" kind,",
" modified_at,",
" modified_by",
")",
"VALUES (",
" NULL,",
" '{",
" \"customer_id\":\"{{uuid:person_id}}\",",
" \"organization_id\":\"parent-org-id\",",
" \"type\":\"order\"",
" }',",
" '{{uuid:order_id}}',",
" '{{uuid}}',",
" 'create',",
" '{{timestamp}}',",
" '00000000-0000-0000-0000-000000000000'",
")"
],
[
"SELECT pg_notify('entity', '{",
" \"complete\":{",
" \"created_at\":\"{{timestamp}}\",",
" \"created_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"first_name\":\"Const\",",
" \"id\":\"{{uuid:person_id}}\",",
" \"last_name\":\"Person\",",
" \"modified_at\":\"{{timestamp}}\",",
" \"modified_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"type\":\"person\"",
" },",
" \"new\":{",
" \"first_name\":\"Const\",",
" \"last_name\":\"Person\",",
" \"type\":\"person\"",
" }",
" }')"
],
[
"SELECT pg_notify('entity', '{",
" \"complete\":{",
" \"created_at\":\"{{timestamp}}\",",
" \"created_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"id\":\"{{uuid:line1_id}}\",",
" \"modified_at\":\"{{timestamp}}\",",
" \"modified_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"order_id\":\"{{uuid:order_id}}\",",
" \"organization_id\":\"parent-org-id\",",
" \"type\":\"order_line\"",
" },",
" \"new\":{",
" \"order_id\":\"{{uuid:order_id}}\",",
" \"organization_id\":\"parent-org-id\",",
" \"type\":\"order_line\"",
" }",
" }')"
],
[
"SELECT pg_notify('entity', '{",
" \"complete\":{",
" \"created_at\":\"{{timestamp}}\",",
" \"created_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"id\":\"{{uuid:line2_id}}\",",
" \"modified_at\":\"{{timestamp}}\",",
" \"modified_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"order_id\":\"{{uuid:order_id}}\",",
" \"organization_id\":\"explicit-org-id\",",
" \"type\":\"order_line\"",
" },",
" \"new\":{",
" \"order_id\":\"{{uuid:order_id}}\",",
" \"organization_id\":\"explicit-org-id\",",
" \"type\":\"order_line\"",
" }",
" }')"
],
[
"SELECT pg_notify('entity', '{",
" \"complete\":{",
" \"created_at\":\"{{timestamp}}\",",
" \"created_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"customer\":{",
" \"created_at\":\"{{timestamp}}\",",
" \"created_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"first_name\":\"Const\",",
" \"id\":\"{{uuid:person_id}}\",",
" \"last_name\":\"Person\",",
" \"modified_at\":\"{{timestamp}}\",",
" \"modified_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"type\":\"person\"",
" },",
" \"customer_id\":\"{{uuid:person_id}}\",",
" \"id\":\"{{uuid:order_id}}\",",
" \"lines\":[",
" {",
" \"created_at\":\"{{timestamp}}\",",
" \"created_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"id\":\"{{uuid:line1_id}}\",",
" \"modified_at\":\"{{timestamp}}\",",
" \"modified_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"order_id\":\"{{uuid:order_id}}\",",
" \"organization_id\":\"parent-org-id\",",
" \"type\":\"order_line\"",
" },",
" {",
" \"created_at\":\"{{timestamp}}\",",
" \"created_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"id\":\"{{uuid:line2_id}}\",",
" \"modified_at\":\"{{timestamp}}\",",
" \"modified_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"order_id\":\"{{uuid:order_id}}\",",
" \"organization_id\":\"explicit-org-id\",",
" \"type\":\"order_line\"",
" }",
" ],",
" \"modified_at\":\"{{timestamp}}\",",
" \"modified_by\":\"00000000-0000-0000-0000-000000000000\",",
" \"organization_id\":\"parent-org-id\",",
" \"type\":\"order\"",
" },",
" \"new\":{",
" \"customer_id\":\"{{uuid:person_id}}\",",
" \"organization_id\":\"parent-org-id\",",
" \"type\":\"order\"",
" }",
" }')"
]
]
}
}
data[0]["tests"].append(test_case)
with open("fixtures/merger.json", "w") as f:
json.dump(data, f, indent=2)

View File

@ -1,12 +1,12 @@
use crate::database::object::{SchemaObject, SchemaTypeOrArray}; use crate::database::object::{SchemaObject, SchemaTypeOrArray};
use crate::database::schema::Schema; use crate::database::schema::Schema;
use crate::database::r#enum::Enum; use crate::database::r#enum::Enum;
use std::collections::BTreeMap; use indexmap::IndexMap;
use std::sync::Arc; use std::sync::Arc;
impl Enum { impl Enum {
pub fn compile_condition(&self) -> Schema { pub fn compile_condition(&self) -> Schema {
let mut props = BTreeMap::new(); let mut props = IndexMap::new();
let enum_name = &self.name; let enum_name = &self.name;
let mut eq_obj = SchemaObject::default(); let mut eq_obj = SchemaObject::default();

View File

@ -1,4 +1,5 @@
use crate::database::schema::Schema; use crate::database::schema::Schema;
use indexmap::IndexMap;
impl Schema { impl Schema {
/// Dynamically infers and compiles all structural database relationships between this Schema /// Dynamically infers and compiles all structural database relationships between this Schema
@ -10,10 +11,10 @@ impl Schema {
db: &crate::database::Database, db: &crate::database::Database,
root_id: &str, root_id: &str,
path: &str, path: &str,
props: &std::collections::BTreeMap<String, std::sync::Arc<Schema>>, props: &IndexMap<String, std::sync::Arc<Schema>>,
errors: &mut Vec<crate::drop::Error>, errors: &mut Vec<crate::drop::Error>,
) -> std::collections::BTreeMap<String, crate::database::edge::Edge> { ) -> IndexMap<String, crate::database::edge::Edge> {
let mut schema_edges = std::collections::BTreeMap::new(); let mut schema_edges = IndexMap::new();
// Determine the physical Database Table Name this schema structurally represents // Determine the physical Database Table Name this schema structurally represents
// Plucks the polymorphic discriminator via dot-notation (e.g. extracting "person" from "full.person") // Plucks the polymorphic discriminator via dot-notation (e.g. extracting "person" from "full.person")

View File

@ -1,7 +1,7 @@
use crate::database::Database; use crate::database::Database;
use crate::database::object::{SchemaObject, SchemaTypeOrArray}; use crate::database::object::{SchemaObject, SchemaTypeOrArray};
use crate::database::schema::Schema; use crate::database::schema::Schema;
use std::collections::BTreeMap; use indexmap::IndexMap;
use std::sync::Arc; use std::sync::Arc;
impl Schema { impl Schema {
@ -12,7 +12,7 @@ impl Schema {
_errors: &mut Vec<crate::drop::Error>, _errors: &mut Vec<crate::drop::Error>,
) -> Option<Schema> { ) -> Option<Schema> {
if let Some(props) = self.obj.compiled_properties.get() { if let Some(props) = self.obj.compiled_properties.get() {
let mut filter_props = BTreeMap::new(); let mut filter_props = IndexMap::new();
for (key, child) in props { for (key, child) in props {
let mut structural_filter = None; let mut structural_filter = None;

View File

@ -5,6 +5,7 @@ pub mod filter;
pub mod polymorphism; pub mod polymorphism;
use crate::database::schema::Schema; use crate::database::schema::Schema;
use indexmap::IndexMap;
impl Schema { impl Schema {
pub fn compile( pub fn compile(
@ -48,7 +49,7 @@ impl Schema {
} }
} }
let mut props = std::collections::BTreeMap::new(); let mut props = IndexMap::new();
// 1. Resolve INHERITANCE dependencies first // 1. Resolve INHERITANCE dependencies first
if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &self.obj.type_ { if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &self.obj.type_ {
@ -124,8 +125,7 @@ impl Schema {
// 4. Set the OnceLock! // 4. Set the OnceLock!
let _ = self.obj.compiled_properties.set(props.clone()); let _ = self.obj.compiled_properties.set(props.clone());
let mut names: Vec<String> = props.keys().cloned().collect(); let names: Vec<String> = props.keys().cloned().collect();
names.sort();
let _ = self.obj.compiled_property_names.set(names); let _ = self.obj.compiled_property_names.set(names);
// 5. Compute Edges natively // 5. Compute Edges natively

View File

@ -1,3 +1,4 @@
use indexmap::IndexSet;
use crate::database::schema::Schema; use crate::database::schema::Schema;
impl Schema { impl Schema {
@ -8,7 +9,7 @@ impl Schema {
path: &str, path: &str,
errors: &mut Vec<crate::drop::Error>, errors: &mut Vec<crate::drop::Error>,
) { ) {
let mut options = std::collections::BTreeMap::new(); let mut options = indexmap::IndexMap::new();
let strategy: &str; let strategy: &str;
if let Some(family) = &self.obj.family { if let Some(family) = &self.obj.family {
@ -65,10 +66,10 @@ impl Schema {
} }
} }
} else if let Some(one_of) = &self.obj.one_of { } else if let Some(one_of) = &self.obj.one_of {
let mut type_vals = std::collections::HashSet::new(); let mut type_vals = IndexSet::new();
let mut kind_vals = std::collections::HashSet::new(); let mut kind_vals = IndexSet::new();
let mut disjoint_base = true; let mut disjoint_base = true;
let mut structural_types = std::collections::HashSet::new(); let mut structural_types = IndexSet::new();
for c in one_of { for c in one_of {
let mut child_id = String::new(); let mut child_id = String::new();

View File

@ -1,4 +1,5 @@
use crate::database::schema::Schema; use crate::database::schema::Schema;
use indexmap::IndexMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::sync::Arc;
@ -10,5 +11,5 @@ pub struct Enum {
pub source: String, pub source: String,
pub values: Vec<String>, pub values: Vec<String>,
#[serde(default)] #[serde(default)]
pub schemas: std::collections::BTreeMap<String, Arc<Schema>>, pub schemas: IndexMap<String, Arc<Schema>>,
} }

View File

@ -23,18 +23,18 @@ use punc::Punc;
use relation::Relation; use relation::Relation;
use schema::Schema; use schema::Schema;
use serde_json::Value; use serde_json::Value;
use std::collections::HashMap; use indexmap::IndexMap;
use std::sync::Arc; use std::sync::Arc;
use r#type::Type; use r#type::Type;
#[derive(serde::Serialize)] #[derive(serde::Serialize)]
pub struct Database { pub struct Database {
pub enums: HashMap<String, Enum>, pub enums: IndexMap<String, Enum>,
pub types: HashMap<String, Type>, pub types: IndexMap<String, Type>,
pub puncs: HashMap<String, Punc>, pub puncs: IndexMap<String, Punc>,
pub relations: HashMap<String, Relation>, pub relations: IndexMap<String, Relation>,
#[serde(skip)] #[serde(skip)]
pub schemas: HashMap<String, Arc<Schema>>, pub schemas: IndexMap<String, Arc<Schema>>,
#[serde(skip)] #[serde(skip)]
pub executor: Box<dyn DatabaseExecutor + Send + Sync>, pub executor: Box<dyn DatabaseExecutor + Send + Sync>,
} }
@ -42,11 +42,11 @@ pub struct Database {
impl Database { impl Database {
pub fn new(val: &serde_json::Value) -> (Self, crate::drop::Drop) { pub fn new(val: &serde_json::Value) -> (Self, crate::drop::Drop) {
let mut db = Self { let mut db = Self {
enums: HashMap::new(), enums: IndexMap::new(),
types: HashMap::new(), types: IndexMap::new(),
relations: HashMap::new(), relations: IndexMap::new(),
puncs: HashMap::new(), puncs: IndexMap::new(),
schemas: HashMap::new(), schemas: IndexMap::new(),
#[cfg(not(test))] #[cfg(not(test))]
executor: Box::new(SpiExecutor::new()), executor: Box::new(SpiExecutor::new()),
#[cfg(test)] #[cfg(test)]

View File

@ -1,7 +1,7 @@
use crate::database::schema::Schema; use crate::database::schema::Schema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use std::collections::BTreeMap; use indexmap::IndexMap;
use std::sync::Arc; use std::sync::Arc;
use std::sync::OnceLock; use std::sync::OnceLock;
@ -30,10 +30,10 @@ pub struct SchemaObject {
// Object Keywords // Object Keywords
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<BTreeMap<String, Arc<Schema>>>, pub properties: Option<IndexMap<String, Arc<Schema>>>,
#[serde(rename = "patternProperties")] #[serde(rename = "patternProperties")]
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub pattern_properties: Option<BTreeMap<String, Arc<Schema>>>, pub pattern_properties: Option<IndexMap<String, Arc<Schema>>>,
#[serde(rename = "additionalProperties")] #[serde(rename = "additionalProperties")]
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub additional_properties: Option<Arc<Schema>>, pub additional_properties: Option<Arc<Schema>>,
@ -46,7 +46,7 @@ pub struct SchemaObject {
// dependencies can be schema dependencies or property dependencies // dependencies can be schema dependencies or property dependencies
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub dependencies: Option<BTreeMap<String, Dependency>>, pub dependencies: Option<IndexMap<String, Dependency>>,
// Array Keywords // Array Keywords
#[serde(rename = "items")] #[serde(rename = "items")]
@ -147,7 +147,7 @@ pub struct SchemaObject {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub control: Option<String>, pub control: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub actions: Option<BTreeMap<String, Action>>, pub actions: Option<IndexMap<String, Action>>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub computer: Option<String>, pub computer: Option<String>,
#[serde(default)] #[serde(default)]
@ -164,7 +164,7 @@ pub struct SchemaObject {
// Internal structural representation caching active AST Node maps. Unlike the Go framework counterpart, the JSPG implementation DOES natively include ALL ancestral inheritance boundary schemas because it compiles locally against the raw database graph. // Internal structural representation caching active AST Node maps. Unlike the Go framework counterpart, the JSPG implementation DOES natively include ALL ancestral inheritance boundary schemas because it compiles locally against the raw database graph.
#[serde(skip)] #[serde(skip)]
pub compiled_properties: OnceLock<BTreeMap<String, Arc<Schema>>>, pub compiled_properties: OnceLock<IndexMap<String, Arc<Schema>>>,
#[serde(rename = "compiledDiscriminator")] #[serde(rename = "compiledDiscriminator")]
#[serde(skip_deserializing)] #[serde(skip_deserializing)]
@ -176,13 +176,13 @@ pub struct SchemaObject {
#[serde(skip_deserializing)] #[serde(skip_deserializing)]
#[serde(skip_serializing_if = "crate::database::object::is_once_lock_map_empty")] #[serde(skip_serializing_if = "crate::database::object::is_once_lock_map_empty")]
#[serde(serialize_with = "crate::database::object::serialize_once_lock")] #[serde(serialize_with = "crate::database::object::serialize_once_lock")]
pub compiled_options: OnceLock<BTreeMap<String, (Option<usize>, Option<String>)>>, pub compiled_options: OnceLock<IndexMap<String, (Option<usize>, Option<String>)>>,
#[serde(rename = "compiledEdges")] #[serde(rename = "compiledEdges")]
#[serde(skip_deserializing)] #[serde(skip_deserializing)]
#[serde(skip_serializing_if = "crate::database::object::is_once_lock_map_empty")] #[serde(skip_serializing_if = "crate::database::object::is_once_lock_map_empty")]
#[serde(serialize_with = "crate::database::object::serialize_once_lock")] #[serde(serialize_with = "crate::database::object::serialize_once_lock")]
pub compiled_edges: OnceLock<BTreeMap<String, crate::database::edge::Edge>>, pub compiled_edges: OnceLock<IndexMap<String, crate::database::edge::Edge>>,
#[serde(skip)] #[serde(skip)]
pub compiled_format: OnceLock<CompiledFormat>, pub compiled_format: OnceLock<CompiledFormat>,
@ -245,7 +245,7 @@ pub fn serialize_once_lock<T: serde::Serialize, S: serde::Serializer>(
} }
} }
pub fn is_once_lock_map_empty<K, V>(lock: &OnceLock<std::collections::BTreeMap<K, V>>) -> bool { pub fn is_once_lock_map_empty<K, V>(lock: &OnceLock<indexmap::IndexMap<K, V>>) -> bool {
lock.get().map_or(true, |m| m.is_empty()) lock.get().map_or(true, |m| m.is_empty())
} }

View File

@ -1,5 +1,6 @@
use crate::database::page::Page; use crate::database::page::Page;
use crate::database::schema::Schema; use crate::database::schema::Schema;
use indexmap::IndexMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::sync::Arc;
@ -18,5 +19,5 @@ pub struct Punc {
pub save: Option<String>, pub save: Option<String>,
pub page: Option<Page>, pub page: Option<Page>,
#[serde(default)] #[serde(default)]
pub schemas: std::collections::BTreeMap<String, Arc<Schema>>, pub schemas: IndexMap<String, Arc<Schema>>,
} }

View File

@ -1,4 +1,4 @@
use std::collections::HashSet; use indexmap::{IndexMap, IndexSet};
use crate::database::schema::Schema; use crate::database::schema::Schema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -25,7 +25,7 @@ pub struct Type {
#[serde(default)] #[serde(default)]
pub hierarchy: Vec<String>, pub hierarchy: Vec<String>,
#[serde(default)] #[serde(default)]
pub variations: HashSet<String>, pub variations: IndexSet<String>,
#[serde(default)] #[serde(default)]
pub relationship: bool, pub relationship: bool,
#[serde(default)] #[serde(default)]
@ -39,5 +39,5 @@ pub struct Type {
pub default_fields: Vec<String>, pub default_fields: Vec<String>,
pub field_types: Option<Value>, pub field_types: Option<Value>,
#[serde(default)] #[serde(default)]
pub schemas: std::collections::BTreeMap<String, Arc<Schema>>, pub schemas: IndexMap<String, Arc<Schema>>,
} }

View File

@ -5,7 +5,7 @@ use pgrx::*;
pg_module_magic!(); pg_module_magic!();
#[cfg(test)] #[cfg(test)]
pub struct JsonB(pub serde_json::Value); pub struct Json(pub serde_json::Value);
pub mod database; pub mod database;
pub mod drop; pub mod drop;
@ -25,7 +25,7 @@ lazy_static::lazy_static! {
static ref GLOBAL_JSPG: RwLock<Option<Arc<jspg::Jspg>>> = RwLock::new(None); static ref GLOBAL_JSPG: RwLock<Option<Arc<jspg::Jspg>>> = RwLock::new(None);
} }
fn jspg_failure() -> JsonB { fn jspg_failure() -> Json {
let error = crate::drop::Error { let error = crate::drop::Error {
code: "ENGINE_NOT_INITIALIZED".to_string(), code: "ENGINE_NOT_INITIALIZED".to_string(),
message: "JSPG extension has not been initialized via jspg_setup".to_string(), message: "JSPG extension has not been initialized via jspg_setup".to_string(),
@ -37,11 +37,11 @@ fn jspg_failure() -> JsonB {
}, },
}; };
let drop = crate::drop::Drop::with_errors(vec![error]); let drop = crate::drop::Drop::with_errors(vec![error]);
JsonB(serde_json::to_value(drop).unwrap()) Json(serde_json::to_value(drop).unwrap())
} }
#[cfg_attr(not(test), pg_extern(strict))] #[cfg_attr(not(test), pg_extern(strict))]
pub fn jspg_setup(database: JsonB) -> JsonB { pub fn jspg_setup(database: Json) -> Json {
let (new_jspg, drop) = crate::jspg::Jspg::new(&database.0); let (new_jspg, drop) = crate::jspg::Jspg::new(&database.0);
let new_arc = Arc::new(new_jspg); let new_arc = Arc::new(new_jspg);
@ -51,11 +51,11 @@ pub fn jspg_setup(database: JsonB) -> JsonB {
*lock = Some(new_arc); *lock = Some(new_arc);
} }
JsonB(serde_json::to_value(drop).unwrap()) Json(serde_json::to_value(drop).unwrap())
} }
#[cfg_attr(not(test), pg_extern)] #[cfg_attr(not(test), pg_extern)]
pub fn jspg_merge(schema_id: &str, data: JsonB) -> JsonB { pub fn jspg_merge(schema_id: &str, data: Json) -> Json {
// Try to acquire a read lock to get a clone of the Engine Arc // Try to acquire a read lock to get a clone of the Engine Arc
let engine_opt = { let engine_opt = {
let lock = GLOBAL_JSPG.read().unwrap(); let lock = GLOBAL_JSPG.read().unwrap();
@ -65,14 +65,14 @@ pub fn jspg_merge(schema_id: &str, data: JsonB) -> JsonB {
match engine_opt { match engine_opt {
Some(engine) => { Some(engine) => {
let drop = engine.merger.merge(schema_id, data.0); let drop = engine.merger.merge(schema_id, data.0);
JsonB(serde_json::to_value(drop).unwrap()) Json(serde_json::to_value(drop).unwrap())
} }
None => jspg_failure(), None => jspg_failure(),
} }
} }
#[cfg_attr(not(test), pg_extern)] #[cfg_attr(not(test), pg_extern)]
pub fn jspg_query(schema_id: &str, filter: Option<JsonB>) -> JsonB { pub fn jspg_query(schema_id: &str, filter: Option<Json>) -> Json {
let engine_opt = { let engine_opt = {
let lock = GLOBAL_JSPG.read().unwrap(); let lock = GLOBAL_JSPG.read().unwrap();
lock.clone() lock.clone()
@ -83,7 +83,7 @@ pub fn jspg_query(schema_id: &str, filter: Option<JsonB>) -> JsonB {
let drop = engine let drop = engine
.queryer .queryer
.query(schema_id, filter.as_ref().map(|f| &f.0)); .query(schema_id, filter.as_ref().map(|f| &f.0));
JsonB(serde_json::to_value(drop).unwrap()) Json(serde_json::to_value(drop).unwrap())
} }
None => jspg_failure(), None => jspg_failure(),
} }
@ -92,7 +92,7 @@ pub fn jspg_query(schema_id: &str, filter: Option<JsonB>) -> JsonB {
// `mask_json_schema` has been removed as the mask architecture is fully replaced by Spi string queries during DB interactions. // `mask_json_schema` has been removed as the mask architecture is fully replaced by Spi string queries during DB interactions.
#[cfg_attr(not(test), pg_extern(strict, parallel_safe))] #[cfg_attr(not(test), pg_extern(strict, parallel_safe))]
pub fn jspg_validate(schema_id: &str, instance: JsonB) -> JsonB { pub fn jspg_validate(schema_id: &str, instance: Json) -> Json {
// 1. Acquire Snapshot // 1. Acquire Snapshot
let jspg_arc = { let jspg_arc = {
let lock = GLOBAL_JSPG.read().unwrap(); let lock = GLOBAL_JSPG.read().unwrap();
@ -102,14 +102,14 @@ pub fn jspg_validate(schema_id: &str, instance: JsonB) -> JsonB {
// 2. Validate (Lock-Free) // 2. Validate (Lock-Free)
if let Some(engine) = jspg_arc { if let Some(engine) = jspg_arc {
let drop = engine.validator.validate(schema_id, &instance.0); let drop = engine.validator.validate(schema_id, &instance.0);
JsonB(serde_json::to_value(drop).unwrap()) Json(serde_json::to_value(drop).unwrap())
} else { } else {
jspg_failure() jspg_failure()
} }
} }
#[cfg_attr(not(test), pg_extern)] #[cfg_attr(not(test), pg_extern)]
pub fn jspg_database() -> JsonB { pub fn jspg_database() -> Json {
let engine_opt = { let engine_opt = {
let lock = GLOBAL_JSPG.read().unwrap(); let lock = GLOBAL_JSPG.read().unwrap();
lock.clone() lock.clone()
@ -120,18 +120,18 @@ pub fn jspg_database() -> JsonB {
let database_json = serde_json::to_value(&engine.database) let database_json = serde_json::to_value(&engine.database)
.unwrap_or(serde_json::Value::Object(serde_json::Map::new())); .unwrap_or(serde_json::Value::Object(serde_json::Map::new()));
let drop = crate::drop::Drop::success_with_val(database_json); let drop = crate::drop::Drop::success_with_val(database_json);
JsonB(serde_json::to_value(drop).unwrap()) Json(serde_json::to_value(drop).unwrap())
} }
None => jspg_failure(), None => jspg_failure(),
} }
} }
#[cfg_attr(not(test), pg_extern(strict))] #[cfg_attr(not(test), pg_extern(strict))]
pub fn jspg_teardown() -> JsonB { pub fn jspg_teardown() -> Json {
let mut lock = GLOBAL_JSPG.write().unwrap(); let mut lock = GLOBAL_JSPG.write().unwrap();
*lock = None; *lock = None;
let drop = crate::drop::Drop::success(); let drop = crate::drop::Drop::success();
JsonB(serde_json::to_value(drop).unwrap()) Json(serde_json::to_value(drop).unwrap())
} }
#[cfg(test)] #[cfg(test)]

View File

@ -40,7 +40,7 @@ impl Merger {
} }
}; };
let result = self.merge_internal(target_schema, data, &mut notifications_queue); let result = self.merge_internal(target_schema, data, &mut notifications_queue, None, false);
let val_resolved = match result { let val_resolved = match result {
Ok(val) => val, Ok(val) => val,
@ -129,33 +129,16 @@ impl Merger {
crate::drop::Drop::success_with_val(stripped_val) crate::drop::Drop::success_with_val(stripped_val)
} }
fn inject_organization_id(
relative: &mut serde_json::Map<String, Value>,
entity_fields: &serde_json::Map<String, Value>,
schema: &Arc<crate::database::schema::Schema>,
) {
if !relative.contains_key("organization_id") {
if let Some(org_id) = entity_fields.get("organization_id") {
if let Some(compiled_props) = schema.obj.compiled_properties.get() {
if let Some(org_schema) = compiled_props.get("organization_id") {
if org_schema.obj.const_.is_some() {
return;
}
}
}
relative.insert("organization_id".to_string(), org_id.clone());
}
}
}
pub(crate) fn merge_internal( pub(crate) fn merge_internal(
&self, &self,
mut schema: Arc<crate::database::schema::Schema>, mut schema: Arc<crate::database::schema::Schema>,
data: Value, data: Value,
notifications: &mut Vec<String>, notifications: &mut Vec<String>,
parent_org_id: Option<String>,
is_child: bool,
) -> Result<Value, String> { ) -> Result<Value, String> {
match data { match data {
Value::Array(items) => self.merge_array(schema, items, notifications), Value::Array(items) => self.merge_array(schema, items, notifications, parent_org_id, is_child),
Value::Object(map) => { Value::Object(map) => {
if let Some(options) = schema.obj.compiled_options.get() { if let Some(options) = schema.obj.compiled_options.get() {
if let Some(disc) = schema.obj.compiled_discriminator.get() { if let Some(disc) = schema.obj.compiled_discriminator.get() {
@ -163,9 +146,7 @@ impl Merger {
if let Some(v) = val { if let Some(v) = val {
if let Some((idx_opt, target_id_opt)) = options.get(v) { if let Some((idx_opt, target_id_opt)) = options.get(v) {
if let Some(target_id) = target_id_opt { if let Some(target_id) = target_id_opt {
if let Some(target_schema) = if let Some(target_schema) = self.db.schemas.get(target_id) {
self.db.schemas.get(target_id)
{
schema = target_schema.clone(); schema = target_schema.clone();
} else { } else {
return Err(format!( return Err(format!(
@ -204,7 +185,7 @@ impl Merger {
} }
} }
} }
self.merge_object(schema, map, notifications) self.merge_object(schema, map, notifications, parent_org_id, is_child)
} }
_ => Err("Invalid merge payload: root must be an Object or Array".to_string()), _ => Err("Invalid merge payload: root must be an Object or Array".to_string()),
} }
@ -215,6 +196,8 @@ impl Merger {
schema: Arc<crate::database::schema::Schema>, schema: Arc<crate::database::schema::Schema>,
items: Vec<Value>, items: Vec<Value>,
notifications: &mut Vec<String>, notifications: &mut Vec<String>,
parent_org_id: Option<String>,
is_child: bool,
) -> Result<Value, String> { ) -> Result<Value, String> {
let mut item_schema = schema.clone(); let mut item_schema = schema.clone();
if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &schema.obj.type_ { if let Some(crate::database::object::SchemaTypeOrArray::Single(t)) = &schema.obj.type_ {
@ -227,7 +210,7 @@ impl Merger {
let mut resolved_items = Vec::new(); let mut resolved_items = Vec::new();
for item in items { for item in items {
let resolved = self.merge_internal(item_schema.clone(), item, notifications)?; let resolved = self.merge_internal(item_schema.clone(), item, notifications, parent_org_id.clone(), is_child)?;
resolved_items.push(resolved); resolved_items.push(resolved);
} }
Ok(Value::Array(resolved_items)) Ok(Value::Array(resolved_items))
@ -238,6 +221,8 @@ impl Merger {
schema: Arc<crate::database::schema::Schema>, schema: Arc<crate::database::schema::Schema>,
obj: serde_json::Map<String, Value>, obj: serde_json::Map<String, Value>,
notifications: &mut Vec<String>, notifications: &mut Vec<String>,
parent_org_id: Option<String>,
is_child: bool,
) -> Result<Value, String> { ) -> Result<Value, String> {
let queue_start = notifications.len(); let queue_start = notifications.len();
@ -297,6 +282,20 @@ impl Merger {
} }
} }
let mut current_org_id = None;
if let Some(compiled_props) = schema.obj.compiled_properties.get() {
if let Some(org_schema) = compiled_props.get("organization_id") {
if let Some(c) = &org_schema.obj.const_ {
if let Some(c_str) = c.as_str() {
current_org_id = Some(c_str.to_string());
}
}
}
}
if current_org_id.is_none() {
current_org_id = parent_org_id.clone();
}
let user_id = self.db.auth_user_id()?; let user_id = self.db.auth_user_id()?;
let timestamp = self.db.timestamp()?; let timestamp = self.db.timestamp()?;
@ -311,6 +310,16 @@ impl Merger {
entity_change_kind = kind; entity_change_kind = kind;
entity_fetched = fetched; entity_fetched = fetched;
entity_replaces = replaces; entity_replaces = replaces;
if entity_change_kind.as_deref() == Some("create") {
if is_child {
if !entity_fields.contains_key("organization_id") {
if let Some(ref org_id) = current_org_id {
entity_fields.insert("organization_id".to_string(), Value::String(org_id.clone()));
}
}
}
}
} }
let mut entity_response = serde_json::Map::new(); let mut entity_response = serde_json::Map::new();
@ -331,13 +340,14 @@ impl Merger {
if let Some(relation) = self.db.relations.get(&edge.constraint) { if let Some(relation) = self.db.relations.get(&edge.constraint) {
let parent_is_source = edge.forward; let parent_is_source = edge.forward;
let org_id_to_pass = entity_fields.get("organization_id").and_then(|v| v.as_str()).map(|s| s.to_string());
if parent_is_source { if parent_is_source {
Self::inject_organization_id(&mut relative, &entity_fields, &rel_schema);
let mut merged_relative = match self.merge_internal( let mut merged_relative = match self.merge_internal(
rel_schema.clone(), rel_schema.clone(),
Value::Object(relative), Value::Object(relative),
notifications, notifications,
org_id_to_pass.clone(),
true,
)? { )? {
Value::Object(m) => m, Value::Object(m) => m,
_ => continue, _ => continue,
@ -353,8 +363,6 @@ impl Merger {
); );
entity_response.insert(relation_name, Value::Object(merged_relative)); entity_response.insert(relation_name, Value::Object(merged_relative));
} else { } else {
Self::inject_organization_id(&mut relative, &entity_fields, &rel_schema);
Self::apply_entity_relation( Self::apply_entity_relation(
&mut relative, &mut relative,
&relation.source_columns, &relation.source_columns,
@ -366,6 +374,8 @@ impl Merger {
rel_schema.clone(), rel_schema.clone(),
Value::Object(relative), Value::Object(relative),
notifications, notifications,
org_id_to_pass.clone(),
true,
)? { )? {
Value::Object(m) => m, Value::Object(m) => m,
_ => continue, _ => continue,
@ -385,6 +395,16 @@ impl Merger {
entity_change_kind = kind; entity_change_kind = kind;
entity_fetched = fetched; entity_fetched = fetched;
entity_replaces = replaces; entity_replaces = replaces;
if entity_change_kind.as_deref() == Some("create") {
if is_child {
if !entity_fields.contains_key("organization_id") {
if let Some(ref org_id) = current_org_id {
entity_fields.insert("organization_id".to_string(), Value::String(org_id.clone()));
}
}
}
}
} }
self.merge_entity_fields( self.merge_entity_fields(
@ -423,11 +443,10 @@ impl Merger {
} }
} }
let org_id_to_pass = entity_fields.get("organization_id").and_then(|v| v.as_str()).map(|s| s.to_string());
let mut relative_responses = Vec::new(); let mut relative_responses = Vec::new();
for relative_item_val in relative_arr { for relative_item_val in relative_arr {
if let Value::Object(mut relative_item) = relative_item_val { if let Value::Object(mut relative_item) = relative_item_val {
Self::inject_organization_id(&mut relative_item, &entity_fields, &item_schema);
Self::apply_entity_relation( Self::apply_entity_relation(
&mut relative_item, &mut relative_item,
&relation.source_columns, &relation.source_columns,
@ -439,6 +458,8 @@ impl Merger {
item_schema.clone(), item_schema.clone(),
Value::Object(relative_item), Value::Object(relative_item),
notifications, notifications,
org_id_to_pass.clone(),
true,
)? { )? {
Value::Object(m) => m, Value::Object(m) => m,
_ => continue, _ => continue,

View File

@ -1,4 +1,5 @@
use crate::database::Database; use crate::database::Database;
use indexmap::IndexMap;
use std::sync::Arc; use std::sync::Arc;
pub struct Compiler<'a> { pub struct Compiler<'a> {
@ -256,7 +257,7 @@ impl<'a> Compiler<'a> {
fn compile_object( fn compile_object(
&mut self, &mut self,
props: &std::collections::BTreeMap<String, std::sync::Arc<crate::database::schema::Schema>>, props: &IndexMap<String, std::sync::Arc<crate::database::schema::Schema>>,
node: Node<'a>, node: Node<'a>,
) -> Result<(String, String), String> { ) -> Result<(String, String), String> {
let mut build_args = Vec::new(); let mut build_args = Vec::new();
@ -377,10 +378,7 @@ impl<'a> Compiler<'a> {
return Ok(("NULL".to_string(), "string".to_string())); return Ok(("NULL".to_string(), "string".to_string()));
} }
case_statements.sort();
let sql = format!("CASE {} ELSE NULL END", case_statements.join(" ")); let sql = format!("CASE {} ELSE NULL END", case_statements.join(" "));
Ok((sql, "object".to_string())) Ok((sql, "object".to_string()))
} }
@ -417,7 +415,7 @@ impl<'a> Compiler<'a> {
) -> Result<Vec<String>, String> { ) -> Result<Vec<String>, String> {
let mut select_args = Vec::new(); let mut select_args = Vec::new();
let grouped_fields = r#type.grouped_fields.as_ref().and_then(|v| v.as_object()); let grouped_fields = r#type.grouped_fields.as_ref().and_then(|v| v.as_object());
let default_props = std::collections::BTreeMap::new(); let default_props = IndexMap::new();
let merged_props = node let merged_props = node
.schema .schema
.obj .obj

393
src/tests/formatter.rs Normal file
View File

@ -0,0 +1,393 @@
use sqlparser::ast::{
BinaryOperator, Expr, Function, FunctionArg, Join, JoinConstraint, JoinOperator,
Query, Select, SelectItem, SetExpr, Statement, TableWithJoins, Value
};
use sqlparser::dialect::PostgreSqlDialect;
use sqlparser::parser::Parser;
pub struct SqlFormatter {
pub lines: Vec<String>,
pub indent: usize,
}
impl SqlFormatter {
pub fn new() -> Self {
Self {
lines: Vec::new(),
indent: 0,
}
}
pub fn format(sql: &str) -> Vec<String> {
let dialect = PostgreSqlDialect {};
let ast = match Parser::parse_sql(&dialect, sql) {
Ok(ast) => ast,
Err(e) => {
println!("DEBUG PARSE SQL ERROR: {:?}", e);
return vec![sql.to_string()];
}
};
if ast.is_empty() {
return vec![sql.to_string()];
}
let mut formatter = SqlFormatter::new();
formatter.format_statement(&ast[0]);
formatter.lines
}
fn push_str(&mut self, s: &str) {
if self.lines.is_empty() {
self.lines.push(format!("{}{}", " ".repeat(self.indent), s.replace("JSONB", "jsonb")));
} else {
let last = self.lines.last_mut().unwrap();
last.push_str(&s.replace("JSONB", "jsonb"));
}
}
fn push_line(&mut self, s: &str) {
self.lines.push(format!("{}{}", " ".repeat(self.indent), s.replace("JSONB", "jsonb")));
}
fn format_statement(&mut self, stmt: &Statement) {
match stmt {
Statement::Query(query) => {
self.push_line("(");
self.format_query(query);
self.push_str(")");
}
Statement::Update(_update) => {
let sql = stmt.to_string();
self.format_update_fallback(&sql);
}
_ => {
let sql = stmt.to_string();
if sql.starts_with("INSERT") {
self.format_insert_fallback(&sql);
} else {
self.push_line(&sql);
}
}
}
}
fn format_insert_fallback(&mut self, sql: &str) {
let s = sql.to_string();
if let Some(values_idx) = s.find(" VALUES (") {
let prefix = &s[..values_idx];
let suffix = &s[values_idx + 9..];
if let Some(paren_idx) = prefix.find(" (") {
self.push_line(&format!("{} (", &prefix[..paren_idx]));
self.indent += 2;
let cols = &prefix[paren_idx + 2..prefix.len() - 1];
let cols_split: Vec<&str> = cols.split(", ").collect();
for (i, col) in cols_split.iter().enumerate() {
let comma = if i < cols_split.len() - 1 { "," } else { "" };
let c = col.replace("\"", "");
self.push_line(&format!("\"{}\"{}", c, comma));
}
self.indent -= 2;
self.push_line(")");
} else {
self.push_line(prefix);
}
self.push_line("VALUES (");
self.indent += 2;
let vals = if suffix.ends_with(")") { &suffix[..suffix.len() - 1] } else { suffix };
let mut val_tokens = Vec::new();
let mut curr = String::new();
let mut in_str = false;
for c in vals.chars() {
if c == '\'' {
in_str = !in_str;
curr.push(c);
} else if c == ',' && !in_str {
val_tokens.push(curr.trim().to_string());
curr = String::new();
} else {
curr.push(c);
}
}
if !curr.trim().is_empty() {
val_tokens.push(curr.trim().to_string());
}
for (i, val) in val_tokens.iter().enumerate() {
let comma = if i < val_tokens.len() - 1 { "," } else { "" };
if val.starts_with("'{") && val.ends_with("}'") {
let inner = &val[1..val.len() - 1];
// Unescape single quotes from SQL strings
let unescaped = inner.replace("''", "'");
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&unescaped) {
if let Ok(pretty) = serde_json::to_string_pretty(&json) {
let lines: Vec<&str> = pretty.split('\n').collect();
self.push_line("'{");
self.indent += 2;
for (j, line) in lines.iter().skip(1).enumerate() {
if j == lines.len() - 2 {
self.indent -= 2;
// re-escape single quotes for SQL
self.push_line(&format!("{}'{}", line.replace("'", "''"), comma));
} else {
self.push_line(&line.replace("'", "''"));
}
}
continue;
}
}
}
self.push_line(&format!("{}{}", val, comma));
}
self.indent -= 2;
self.push_line(")");
} else {
self.push_line(&s);
}
}
fn format_update_fallback(&mut self, sql: &str) {
let s = sql.to_string();
if let Some(set_idx) = s.find(" SET ") {
self.push_line(&format!("{} SET", &s[..set_idx]));
self.indent += 2;
let after_set = &s[set_idx + 5..];
let where_idx = after_set.find(" WHERE ");
let assigns = if let Some(w) = where_idx { &after_set[..w] } else { after_set };
let assigns_split: Vec<&str> = assigns.split(", ").collect();
for (i, assign) in assigns_split.iter().enumerate() {
let comma = if i < assigns_split.len() - 1 { "," } else { "" };
self.push_line(&format!("{}{}", assign.replace("\"", ""), comma));
}
self.indent -= 2;
if let Some(w) = where_idx {
self.push_line("WHERE");
self.indent += 2;
self.push_line(&after_set[w + 7..]);
self.indent -= 2;
}
} else {
self.push_line(&s);
}
}
fn format_query(&mut self, query: &Query) {
match &*query.body {
SetExpr::Select(select) => self.format_select(select),
SetExpr::Query(inner_query) => {
self.push_str("(");
self.format_query(inner_query);
self.push_str(")");
}
_ => self.push_str(&query.to_string()),
}
}
fn format_select(&mut self, select: &Select) {
self.push_str("SELECT ");
for (i, p) in select.projection.iter().enumerate() {
let comma = if i < select.projection.len() - 1 { ", " } else { "" };
self.format_select_item(p);
self.push_str(comma);
}
if !select.from.is_empty() {
self.push_line("FROM ");
for (i, table) in select.from.iter().enumerate() {
let comma = if i < select.from.len() - 1 { ", " } else { "" };
self.format_table_with_joins(table);
self.push_str(comma);
}
if let Some(selection) = &select.selection {
self.push_line("WHERE");
self.indent += 2;
self.push_line(""); // new line for where clauses
self.format_expr(selection);
self.indent -= 2;
}
}
}
fn format_select_item(&mut self, item: &SelectItem) {
match item {
SelectItem::UnnamedExpr(expr) => self.format_expr(expr),
SelectItem::ExprWithAlias { expr, alias } => {
self.format_expr(expr);
self.push_str(&format!(" AS {}", alias));
}
_ => self.push_str(&item.to_string()),
}
}
fn format_table_with_joins(&mut self, table: &TableWithJoins) {
self.push_str(&table.relation.to_string());
for join in &table.joins {
self.push_line("");
self.format_join(join);
}
}
fn format_join(&mut self, join: &Join) {
let op = match &join.join_operator {
JoinOperator::Inner(_) => "JOIN",
JoinOperator::LeftOuter(_) => "LEFT JOIN",
_ => "JOIN",
};
self.push_str(&format!("{} {} ON ", op, join.relation));
match &join.join_operator {
JoinOperator::Inner(JoinConstraint::On(expr)) => self.format_expr(expr),
JoinOperator::LeftOuter(JoinConstraint::On(expr)) => self.format_expr(expr),
JoinOperator::Join(JoinConstraint::On(expr)) => self.format_expr(expr),
_ => {
println!("FALLBACK JOIN OP: {:?}", join.join_operator);
}
}
}
fn format_expr(&mut self, expr: &Expr) {
match expr {
Expr::Function(func) => self.format_function(func),
Expr::BinaryOp { left, op, right } => {
if *op == BinaryOperator::And || *op == BinaryOperator::Or {
self.format_expr(left);
self.push_line(&format!("{} ", op));
self.format_expr(right);
} else {
self.format_expr(left);
self.push_str(&format!(" {} ", op));
self.format_expr(right);
}
}
Expr::Nested(inner) => {
self.push_str("(");
self.format_expr(inner);
self.push_str(")");
}
Expr::IsNull(inner) => {
self.format_expr(inner);
self.push_str(" IS NULL");
}
Expr::IsNotNull(inner) => {
self.format_expr(inner);
self.push_str(" IS NOT NULL");
}
Expr::Subquery(query) => {
self.push_str("(");
self.indent += 2;
self.push_line("");
self.format_query(query);
self.indent -= 2;
self.push_line(")");
}
Expr::Case { operand, conditions, else_result, .. } => {
self.push_str("CASE");
if let Some(op) = operand {
self.push_str(" ");
self.format_expr(op);
}
self.indent += 2;
for when in conditions {
self.push_line("WHEN ");
self.format_expr(&when.condition);
self.push_str(" THEN ");
self.format_expr(&when.result);
}
if let Some(els) = else_result {
self.push_line("ELSE ");
self.format_expr(els);
}
self.indent -= 2;
self.push_line("END");
}
Expr::UnaryOp { op, expr: inner } => {
self.push_str(&format!("{} ", op));
self.format_expr(inner);
}
Expr::Value(sqlparser::ast::ValueWithSpan { value: Value::SingleQuotedString(s), .. }) | Expr::Value(sqlparser::ast::ValueWithSpan { value: Value::EscapedStringLiteral(s), .. }) => {
if s.starts_with('{') && s.ends_with('}') {
if let Ok(json) = serde_json::from_str::<serde_json::Value>(s) {
if let Ok(pretty) = serde_json::to_string_pretty(&json) {
let lines: Vec<&str> = pretty.split('\n').collect();
self.push_str("'{");
self.indent += 2;
for (j, line) in lines.iter().skip(1).enumerate() {
if j == lines.len() - 2 {
self.indent -= 2;
self.push_line(&format!("{}'", line.replace("'", "''")));
} else {
self.push_line(&line.replace("'", "''"));
}
}
return;
}
}
}
self.push_str(&expr.to_string());
}
_ => {
self.push_str(&expr.to_string());
}
}
}
fn format_function(&mut self, func: &Function) {
let name = func.name.to_string();
self.push_str(&format!("{}(", name));
if let sqlparser::ast::FunctionArguments::List(list) = &func.args {
if name == "jsonb_build_object" {
self.indent += 2;
self.push_line("");
let mut i = 0;
while i < list.args.len() {
let arg_key = &list.args[i];
let arg_val = if i + 1 < list.args.len() { Some(&list.args[i+1]) } else { None };
self.format_function_arg(arg_key);
self.push_str(", ");
if let Some(val) = arg_val {
self.format_function_arg(val);
}
if i + 2 < list.args.len() {
self.push_str(",");
self.push_line("");
}
i += 2;
}
self.indent -= 2;
self.push_line(")");
} else {
for (i, arg) in list.args.iter().enumerate() {
let comma = if i < list.args.len() - 1 { ", " } else { "" };
self.format_function_arg(arg);
self.push_str(comma);
}
self.push_str(")");
}
} else {
self.push_str(")");
}
}
fn format_function_arg(&mut self, arg: &FunctionArg) {
match arg {
FunctionArg::Unnamed(sqlparser::ast::FunctionArgExpr::Expr(expr)) => self.format_expr(expr),
_ => {
println!("FALLBACK ARG: {:?}", arg);
self.push_str(&arg.to_string());
}
}
}
}

View File

@ -1,4 +1,5 @@
use crate::*; use crate::*;
pub mod formatter;
pub mod runner; pub mod runner;
pub mod types; pub mod types;
use serde_json::json; use serde_json::json;
@ -10,7 +11,7 @@ fn test_library_api() {
// 1. Initially, schemas are not cached. // 1. Initially, schemas are not cached.
// Expected uninitialized drop format: errors + null response // Expected uninitialized drop format: errors + null response
let uninitialized_drop = jspg_validate("source_schema", JsonB(json!({}))); let uninitialized_drop = jspg_validate("source_schema", Json(json!({})));
assert_eq!( assert_eq!(
uninitialized_drop.0, uninitialized_drop.0,
json!({ json!({
@ -72,7 +73,7 @@ fn test_library_api() {
] ]
}); });
let cache_drop = jspg_setup(JsonB(db_json)); let cache_drop = jspg_setup(Json(db_json));
assert_eq!( assert_eq!(
cache_drop.0, cache_drop.0,
json!({ json!({
@ -127,7 +128,7 @@ fn test_library_api() {
"forward": true "forward": true
} }
}, },
"compiledPropertyNames": ["name", "target", "type"], "compiledPropertyNames": ["type", "name", "target"],
"properties": { "properties": {
"name": { "type": "string" }, "name": { "type": "string" },
"target": { "target": {
@ -140,19 +141,19 @@ fn test_library_api() {
"type": "object" "type": "object"
}, },
"source_schema.filter": { "source_schema.filter": {
"compiledPropertyNames": ["$and", "$or", "name", "target", "type"], "compiledPropertyNames": ["type", "name", "target", "$and", "$or"],
"properties": { "properties": {
"$and": { "$and": {
"type": ["array", "null"], "type": ["array", "null"],
"items": { "items": {
"compiledPropertyNames": ["$and", "$or", "name", "target", "type"], "compiledPropertyNames": ["type", "name", "target", "$and", "$or"],
"type": "source_schema.filter" "type": "source_schema.filter"
} }
}, },
"$or": { "$or": {
"type": ["array", "null"], "type": ["array", "null"],
"items": { "items": {
"compiledPropertyNames": ["$and", "$or", "name", "target", "type"], "compiledPropertyNames": ["type", "name", "target", "$and", "$or"],
"type": "source_schema.filter" "type": "source_schema.filter"
} }
}, },
@ -193,19 +194,19 @@ fn test_library_api() {
"type": "object" "type": "object"
}, },
"target_schema.filter": { "target_schema.filter": {
"compiledPropertyNames": ["$and", "$or", "value"], "compiledPropertyNames": ["value", "$and", "$or"],
"properties": { "properties": {
"$and": { "$and": {
"type": ["array", "null"], "type": ["array", "null"],
"items": { "items": {
"compiledPropertyNames": ["$and", "$or", "value"], "compiledPropertyNames": ["value", "$and", "$or"],
"type": "target_schema.filter" "type": "target_schema.filter"
} }
}, },
"$or": { "$or": {
"type": ["array", "null"], "type": ["array", "null"],
"items": { "items": {
"compiledPropertyNames": ["$and", "$or", "value"], "compiledPropertyNames": ["value", "$and", "$or"],
"type": "target_schema.filter" "type": "target_schema.filter"
} }
}, },
@ -225,7 +226,7 @@ fn test_library_api() {
); );
// 4. Validate Happy Path // 4. Validate Happy Path
let happy_drop = jspg_validate("source_schema", JsonB(json!({"type": "source_schema", "name": "Neo"}))); let happy_drop = jspg_validate("source_schema", Json(json!({"type": "source_schema", "name": "Neo"})));
assert_eq!( assert_eq!(
happy_drop.0, happy_drop.0,
json!({ json!({
@ -235,7 +236,7 @@ fn test_library_api() {
); );
// 5. Validate Unhappy Path // 5. Validate Unhappy Path
let unhappy_drop = jspg_validate("source_schema", JsonB(json!({"type": "source_schema", "wrong": "data"}))); let unhappy_drop = jspg_validate("source_schema", Json(json!({"type": "source_schema", "wrong": "data"})));
assert_eq!( assert_eq!(
unhappy_drop.0, unhappy_drop.0,
json!({ json!({

View File

@ -127,7 +127,7 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
} }
} }
"merge" => { "merge" => {
let result = test.run_merge(db_unwrapped.unwrap()); let result = test.run_merge(db_unwrapped.unwrap(), path, suite_idx, case_idx);
if let Err(e) = result { if let Err(e) = result {
println!("TEST MERGE ERROR FOR '{}': {}", test.description, e); println!("TEST MERGE ERROR FOR '{}': {}", test.description, e);
failures.push(format!( failures.push(format!(
@ -137,7 +137,7 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
} }
} }
"query" => { "query" => {
let result = test.run_query(db_unwrapped.unwrap()); let result = test.run_query(db_unwrapped.unwrap(), path, suite_idx, case_idx);
if let Err(e) = result { if let Err(e) = result {
println!("TEST QUERY ERROR FOR '{}': {}", test.description, e); println!("TEST QUERY ERROR FOR '{}': {}", test.description, e);
failures.push(format!( failures.push(format!(
@ -160,3 +160,83 @@ pub fn run_test_case(path: &str, suite_idx: usize, case_idx: usize) -> Result<()
Ok(()) Ok(())
} }
pub fn extract_uuids(val: &Value, path: &str, map: &mut HashMap<String, String>) {
let uuid_re = regex::Regex::new(r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$").unwrap();
match val {
Value::Object(obj) => {
for (k, v) in obj {
let new_path = if path.is_empty() { k.clone() } else { format!("{}.{}", path, k) };
extract_uuids(v, &new_path, map);
}
}
Value::Array(arr) => {
for (i, v) in arr.iter().enumerate() {
let new_path = if path.is_empty() { i.to_string() } else { format!("{}.{}", path, i) };
extract_uuids(v, &new_path, map);
}
}
Value::String(s) => {
if s != "00000000-0000-0000-0000-000000000000" && uuid_re.is_match(s) {
map.insert(s.clone(), path.to_string());
}
}
_ => {}
}
}
pub fn canonicalize_with_map(s: &str, uuid_map: &HashMap<String, String>, gen_map: &mut HashMap<String, usize>) -> String {
let uuid_re = regex::Regex::new(r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}").unwrap();
let s1 = uuid_re.replace_all(s, |caps: &regex::Captures| {
let val = &caps[0];
if val == "00000000-0000-0000-0000-000000000000" {
val.to_string()
} else if let Some(path) = uuid_map.get(val) {
format!("{{{{uuid:{}}}}}", path)
} else {
let next_idx = gen_map.len();
let idx = *gen_map.entry(val.to_string()).or_insert(next_idx);
format!("{{{{uuid:generated_{}}}}}", idx)
}
});
let ts_re = regex::Regex::new(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{1,6})?(?:Z|\+\d{2}(?::\d{2})?)?").unwrap();
ts_re.replace_all(&s1, "{{timestamp}}").to_string()
}
pub fn update_sql_fixture(path: &str, suite_idx: usize, case_idx: usize, queries: &[String]) {
use crate::tests::formatter::SqlFormatter;
let content = fs::read_to_string(path).unwrap();
let mut file_data: Value = serde_json::from_str(&content).unwrap();
let mut uuid_map = HashMap::new();
if let Some(test_case) = file_data.get(suite_idx).and_then(|s| s.get("tests")).and_then(|t| t.get(case_idx)) {
if let Some(data) = test_case.get("data") {
extract_uuids(data, "data", &mut uuid_map);
}
if let Some(mocks) = test_case.get("mocks") {
extract_uuids(mocks, "mocks", &mut uuid_map);
}
}
let mut gen_map = HashMap::new();
let mut formatted_sql = Vec::new();
for q in queries {
let res = SqlFormatter::format(q);
let mapped_res: Vec<String> = res.into_iter().map(|l| canonicalize_with_map(&l, &uuid_map, &mut gen_map)).collect();
formatted_sql.push(mapped_res);
}
if let Some(expect) = file_data[suite_idx]["tests"][case_idx].get_mut("expect") {
if let Some(obj) = expect.as_object_mut() {
obj.remove("pattern");
obj.insert("sql".to_string(), serde_json::json!(formatted_sql));
}
}
// To preserve original formatting, we just use serde_json pretty output
let formatted_json = serde_json::to_string_pretty(&file_data).unwrap();
fs::write(path, formatted_json).unwrap();
}

View File

@ -75,7 +75,7 @@ impl Case {
Ok(()) Ok(())
} }
pub fn run_merge(&self, db: Arc<Database>) -> Result<(), String> { pub fn run_merge(&self, db: Arc<Database>, path: &str, suite_idx: usize, case_idx: usize) -> Result<(), String> {
if let Some(mocks) = &self.mocks { if let Some(mocks) = &self.mocks {
if let Some(arr) = mocks.as_array() { if let Some(arr) = mocks.as_array() {
db.executor.set_mocks(arr.clone()); db.executor.set_mocks(arr.clone());
@ -94,7 +94,10 @@ impl Case {
} else if result.errors.is_empty() { } else if result.errors.is_empty() {
// Only assert SQL if merge succeeded // Only assert SQL if merge succeeded
let queries = db.executor.get_queries(); let queries = db.executor.get_queries();
expect.assert_pattern(&queries).and_then(|_| expect.assert_sql(&queries)) if std::env::var("UPDATE_EXPECT").is_ok() {
crate::tests::runner::update_sql_fixture(path, suite_idx, case_idx, &queries);
}
expect.assert_sql(&queries)
} else { } else {
Ok(()) Ok(())
} }
@ -106,7 +109,7 @@ impl Case {
return_val return_val
} }
pub fn run_query(&self, db: Arc<Database>) -> Result<(), String> { pub fn run_query(&self, db: Arc<Database>, path: &str, suite_idx: usize, case_idx: usize) -> Result<(), String> {
if let Some(mocks) = &self.mocks { if let Some(mocks) = &self.mocks {
if let Some(arr) = mocks.as_array() { if let Some(arr) = mocks.as_array() {
db.executor.set_mocks(arr.clone()); db.executor.set_mocks(arr.clone());
@ -123,7 +126,10 @@ impl Case {
Err(format!("Query {}", e)) Err(format!("Query {}", e))
} else if result.errors.is_empty() { } else if result.errors.is_empty() {
let queries = db.executor.get_queries(); let queries = db.executor.get_queries();
expect.assert_pattern(&queries).and_then(|_| expect.assert_sql(&queries)) if std::env::var("UPDATE_EXPECT").is_ok() {
crate::tests::runner::update_sql_fixture(path, suite_idx, case_idx, &queries);
}
expect.assert_sql(&queries)
} else { } else {
Ok(()) Ok(())
} }

View File

@ -1,6 +1,7 @@
use crate::validator::context::ValidationContext; use crate::validator::context::ValidationContext;
use crate::validator::error::ValidationError; use crate::validator::error::ValidationError;
use crate::validator::result::ValidationResult; use crate::validator::result::ValidationResult;
use indexmap::IndexMap;
impl<'a> ValidationContext<'a> { impl<'a> ValidationContext<'a> {
pub(crate) fn validate_family( pub(crate) fn validate_family(
@ -65,7 +66,7 @@ impl<'a> ValidationContext<'a> {
pub(crate) fn execute_polymorph( pub(crate) fn execute_polymorph(
&self, &self,
options: &std::collections::BTreeMap<String, (Option<usize>, Option<String>)>, options: &IndexMap<String, (Option<usize>, Option<String>)>,
result: &mut ValidationResult, result: &mut ValidationResult,
) -> Result<bool, ValidationError> { ) -> Result<bool, ValidationError> {
// 1. O(1) Fast-Path Router & Extractor // 1. O(1) Fast-Path Router & Extractor

View File

@ -1,88 +0,0 @@
import json
import re
# Read the test output
output = """
JSPG_SQL: INSERT INTO agreego."entity" ("created_at", "created_by", "id", "modified_at", "modified_by", "type") VALUES ('2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000', '734f0f6e-3408-4d18-a6d7-725400ff6b30', '2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000', 'person')
JSPG_SQL: INSERT INTO agreego."organization" ("id", "type") VALUES ('734f0f6e-3408-4d18-a6d7-725400ff6b30', 'person')
JSPG_SQL: INSERT INTO agreego."user" ("id", "type") VALUES ('734f0f6e-3408-4d18-a6d7-725400ff6b30', 'person')
JSPG_SQL: INSERT INTO agreego."person" ("first_name", "id", "last_name", "type") VALUES ('Const', '734f0f6e-3408-4d18-a6d7-725400ff6b30', 'Person', 'person')
JSPG_SQL: INSERT INTO agreego.change ("old", "new", entity_id, id, kind, modified_at, modified_by) VALUES (NULL, '{"first_name":"Const","last_name":"Person","type":"person"}', '734f0f6e-3408-4d18-a6d7-725400ff6b30', '7195460a-edff-4d0d-b137-c040616b9f27', 'create', '2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000')
JSPG_SQL: INSERT INTO agreego."entity" ("created_at", "created_by", "id", "modified_at", "modified_by", "organization_id", "type") VALUES ('2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000', '369e92ac-41c5-4d43-9286-c004edb96e76', '2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000', 'parent-org-id', 'order')
JSPG_SQL: INSERT INTO agreego."order" ("customer_id", "id", "type") VALUES ('734f0f6e-3408-4d18-a6d7-725400ff6b30', '369e92ac-41c5-4d43-9286-c004edb96e76', 'order')
JSPG_SQL: INSERT INTO agreego."entity" ("created_at", "created_by", "id", "modified_at", "modified_by", "organization_id", "type") VALUES ('2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000', '48e91d8d-99ef-4f74-b2e6-c98f9501bb7a', '2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000', 'parent-org-id', 'order_line')
JSPG_SQL: INSERT INTO agreego."order_line" ("id", "order_id", "type") VALUES ('48e91d8d-99ef-4f74-b2e6-c98f9501bb7a', '369e92ac-41c5-4d43-9286-c004edb96e76', 'order_line')
JSPG_SQL: INSERT INTO agreego.change ("old", "new", entity_id, id, kind, modified_at, modified_by) VALUES (NULL, '{"order_id":"369e92ac-41c5-4d43-9286-c004edb96e76","organization_id":"parent-org-id","type":"order_line"}', '48e91d8d-99ef-4f74-b2e6-c98f9501bb7a', '5ab5c99b-926a-4878-98a7-c531859d2ebe', 'create', '2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000')
JSPG_SQL: INSERT INTO agreego."entity" ("created_at", "created_by", "id", "modified_at", "modified_by", "organization_id", "type") VALUES ('2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000', 'b91b93b2-1f75-4be3-a731-88562d289997', '2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000', 'explicit-org-id', 'order_line')
JSPG_SQL: INSERT INTO agreego."order_line" ("id", "order_id", "type") VALUES ('b91b93b2-1f75-4be3-a731-88562d289997', '369e92ac-41c5-4d43-9286-c004edb96e76', 'order_line')
JSPG_SQL: INSERT INTO agreego.change ("old", "new", entity_id, id, kind, modified_at, modified_by) VALUES (NULL, '{"order_id":"369e92ac-41c5-4d43-9286-c004edb96e76","organization_id":"explicit-org-id","type":"order_line"}', 'b91b93b2-1f75-4be3-a731-88562d289997', 'ad35cf4e-d2de-4f87-aa3d-ec30101397ca', 'create', '2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000')
JSPG_SQL: INSERT INTO agreego.change ("old", "new", entity_id, id, kind, modified_at, modified_by) VALUES (NULL, '{"customer_id":"734f0f6e-3408-4d18-a6d7-725400ff6b30","organization_id":"parent-org-id","type":"order"}', '369e92ac-41c5-4d43-9286-c004edb96e76', '4646bcc7-e1dd-45f7-ba66-33175844fa79', 'create', '2026-03-10T00:00:00Z', '00000000-0000-0000-0000-000000000000')
JSPG_SQL: SELECT pg_notify('entity', '{"complete":{"created_at":"2026-03-10T00:00:00Z","created_by":"00000000-0000-0000-0000-000000000000","customer_id":"734f0f6e-3408-4d18-a6d7-725400ff6b30","id":"369e92ac-41c5-4d43-9286-c004edb96e76","modified_at":"2026-03-10T00:00:00Z","modified_by":"00000000-0000-0000-0000-000000000000","organization_id":"parent-org-id","type":"order"},"new":{"customer_id":"734f0f6e-3408-4d18-a6d7-725400ff6b30","organization_id":"parent-org-id","type":"order"}}')
JSPG_SQL: SELECT pg_notify('entity', '{"complete":{"created_at":"2026-03-10T00:00:00Z","created_by":"00000000-0000-0000-0000-000000000000","first_name":"Const","id":"734f0f6e-3408-4d18-a6d7-725400ff6b30","last_name":"Person","modified_at":"2026-03-10T00:00:00Z","modified_by":"00000000-0000-0000-0000-000000000000","type":"person"},"new":{"first_name":"Const","last_name":"Person","type":"person"}}')
JSPG_SQL: SELECT pg_notify('entity', '{"complete":{"created_at":"2026-03-10T00:00:00Z","created_by":"00000000-0000-0000-0000-000000000000","id":"48e91d8d-99ef-4f74-b2e6-c98f9501bb7a","modified_at":"2026-03-10T00:00:00Z","modified_by":"00000000-0000-0000-0000-000000000000","order_id":"369e92ac-41c5-4d43-9286-c004edb96e76","organization_id":"parent-org-id","type":"order_line"},"new":{"order_id":"369e92ac-41c5-4d43-9286-c004edb96e76","organization_id":"parent-org-id","type":"order_line"}}')
JSPG_SQL: SELECT pg_notify('entity', '{"complete":{"created_at":"2026-03-10T00:00:00Z","created_by":"00000000-0000-0000-0000-000000000000","id":"b91b93b2-1f75-4be3-a731-88562d289997","modified_at":"2026-03-10T00:00:00Z","modified_by":"00000000-0000-0000-0000-000000000000","order_id":"369e92ac-41c5-4d43-9286-c004edb96e76","organization_id":"explicit-org-id","type":"order_line"},"new":{"order_id":"369e92ac-41c5-4d43-9286-c004edb96e76","organization_id":"explicit-org-id","type":"order_line"}}')
"""
lines = [line.replace("JSPG_SQL: ", "").strip() for line in output.split("\n") if line.startswith("JSPG_SQL: ")]
person_id = "734f0f6e-3408-4d18-a6d7-725400ff6b30"
order_id = "369e92ac-41c5-4d43-9286-c004edb96e76"
line1_id = "48e91d8d-99ef-4f74-b2e6-c98f9501bb7a"
line2_id = "b91b93b2-1f75-4be3-a731-88562d289997"
def replace_ids(s):
s = s.replace(person_id, "{{uuid:person_id}}")
s = s.replace(order_id, "{{uuid:order_id}}")
s = s.replace(line1_id, "{{uuid:line1_id}}")
s = s.replace(line2_id, "{{uuid:line2_id}}")
s = re.sub(r"'[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}'", "'{{uuid}}'", s)
s = s.replace("'2026-03-10T00:00:00Z'", "'{{timestamp}}'")
s = s.replace('"2026-03-10T00:00:00Z"', '"{{timestamp}}"')
return s
new_sql = []
for line in lines:
replaced = replace_ids(line)
new_sql.append([replaced]) # Simple array of single string elements for now, test runner doesn't mind formatting
# format properly like existing tests (split by VALUES)
formatted_sql = []
for sql_arr in new_sql:
sql = sql_arr[0]
if "VALUES" in sql and "INSERT INTO" in sql:
parts = sql.split(" VALUES ")
insert_part = parts[0]
values_part = parts[1]
insert_tokens = insert_part.split(" (")
table = insert_tokens[0]
cols = insert_tokens[1][:-1].split(", ")
# reconstruct with indent
new_cmd = [
table + " (",
]
for i, col in enumerate(cols):
new_cmd.append(" " + col + ("," if i < len(cols) - 1 else ""))
new_cmd.append(")")
new_cmd.append("VALUES (")
vals = values_part[1:-1].split(", ")
# if val is json, it might have commas
# simple split won't work well for json.
# we can just use the raw sql without pretty print, test runner handles arrays of strings just by joining them with spaces
# Just format using the test runner's expected format. Test runner joins with space or newline
# To be safe, just split into arbitrary chunks
formatted_sql.append([sql])
with open("fixtures/merger.json", "r") as f:
data = json.load(f)
test_case = next(t for t in data[0]["tests"] if t["description"] == "Test organization_id syntactic sugar permutations")
test_case["expect"]["sql"] = formatted_sql
with open("fixtures/merger.json", "w") as f:
json.dump(data, f, indent=2)

View File

@ -1 +1 @@
1.0.136 1.0.142