56 lines
1.7 KiB
Python
56 lines
1.7 KiB
Python
import json
|
|
import os
|
|
import glob
|
|
|
|
fixtures_dir = 'fixtures'
|
|
for filepath in glob.glob(os.path.join(fixtures_dir, '*.json')):
|
|
try:
|
|
with open(filepath, 'r') as f:
|
|
data = json.load(f)
|
|
except Exception as e:
|
|
continue
|
|
|
|
changed = False
|
|
for suite in data:
|
|
db = suite.get("database")
|
|
if not db or "schemas" not in db:
|
|
continue
|
|
|
|
legacy_schemas = db["schemas"]
|
|
# Make sure types array is ready
|
|
if "types" not in db:
|
|
db["types"] = []
|
|
|
|
# Push schemas into types
|
|
for schema_id, schema_def in legacy_schemas.items():
|
|
base_name = schema_id.split('.')[-1]
|
|
|
|
# Find an existing type with base_name first
|
|
found = False
|
|
for t in db["types"]:
|
|
if t.get("name") == base_name:
|
|
if "schemas" not in t:
|
|
t["schemas"] = {}
|
|
t["schemas"][schema_id] = schema_def
|
|
found = True
|
|
break
|
|
|
|
if not found:
|
|
db["types"].append({
|
|
"name": base_name,
|
|
"variations": [base_name], # Optional placeholder, shouldn't break anything
|
|
"hierarchy": [base_name, "entity"],
|
|
"schemas": {
|
|
schema_id: schema_def
|
|
}
|
|
})
|
|
|
|
# Clean up legacy global map
|
|
del db["schemas"]
|
|
changed = True
|
|
|
|
if changed:
|
|
with open(filepath, 'w') as f:
|
|
json.dump(data, f, indent=2)
|
|
print("Migrated legacy schemas to types in", filepath)
|