Compare commits

...

1 Commits

Author SHA1 Message Date
120f488d93 cleanup 2026-04-17 18:27:49 -04:00
6 changed files with 0 additions and 255 deletions

View File

@ -1,55 +0,0 @@
import json
import os
import glob
fixtures_dir = 'fixtures'
for filepath in glob.glob(os.path.join(fixtures_dir, '*.json')):
try:
with open(filepath, 'r') as f:
data = json.load(f)
except Exception as e:
continue
changed = False
for suite in data:
db = suite.get("database")
if not db or "schemas" not in db:
continue
legacy_schemas = db["schemas"]
# Make sure types array is ready
if "types" not in db:
db["types"] = []
# Push schemas into types
for schema_id, schema_def in legacy_schemas.items():
base_name = schema_id.split('.')[-1]
# Find an existing type with base_name first
found = False
for t in db["types"]:
if t.get("name") == base_name:
if "schemas" not in t:
t["schemas"] = {}
t["schemas"][schema_id] = schema_def
found = True
break
if not found:
db["types"].append({
"name": base_name,
"variations": [base_name], # Optional placeholder, shouldn't break anything
"hierarchy": [base_name, "entity"],
"schemas": {
schema_id: schema_def
}
})
# Clean up legacy global map
del db["schemas"]
changed = True
if changed:
with open(filepath, 'w') as f:
json.dump(data, f, indent=2)
print("Migrated legacy schemas to types in", filepath)

View File

@ -1,54 +0,0 @@
import json
import os
import glob
fixtures_dir = 'fixtures'
for filepath in glob.glob(os.path.join(fixtures_dir, '*.json')):
try:
with open(filepath, 'r') as f:
data = json.load(f)
except Exception as e:
print(f"Failed to load {filepath}: {e}")
continue
changed = False
for suite in data:
db = suite.get("database")
if not db or "schemas" not in db:
continue
legacy_schemas = db["schemas"]
# Make sure types array is ready
if "types" not in db:
db["types"] = []
# Push schemas into types
for schema_id, schema_def in legacy_schemas.items():
base_name = schema_id.split('.')[-1]
# Find an existing type with base_name first
found = False
for t in db["types"]:
if t.get("name") == base_name:
if "schemas" not in t:
t["schemas"] = {}
t["schemas"][schema_id] = schema_def
found = True
break
if not found:
db["types"].append({
"name": base_name,
"schemas": {
schema_id: schema_def
}
})
# Clean up legacy global map
del db["schemas"]
changed = True
if changed:
with open(filepath, 'w') as f:
json.dump(data, f, indent=2)
print("Migrated legacy schemas to types properly in", filepath)

View File

@ -1,41 +0,0 @@
const fs = require('fs');
const path = require('path');
function updateFile(filePath) {
let content = fs.readFileSync(filePath, 'utf8');
let data;
try {
data = JSON.parse(content);
} catch (e) {
console.error("Failed to parse " + filePath, e);
return;
}
let changed = false;
for (let suite of data) {
if (suite.database && suite.database.puncs && suite.database.puncs.length > 0) {
if (!suite.database.types) suite.database.types = [];
for (let punc of suite.database.puncs) {
// Determine if we should push it to types.
// Basically all of them should go to types except maybe if they are explicitly being tested as Puncs?
// But the tests construct Queryer and Merger using these ids, which query the Type Realm.
suite.database.types.push(punc);
}
delete suite.database.puncs;
changed = true;
}
}
if (changed) {
fs.writeFileSync(filePath, JSON.stringify(data, null, 2));
console.log("Reverted puncs to types in " + filePath);
}
}
let fixturesDir = 'fixtures';
let files = fs.readdirSync(fixturesDir);
for (let file of files) {
if (file.endsWith('.json')) {
updateFile(path.join(fixturesDir, file));
}
}

View File

@ -1,29 +0,0 @@
import json
import os
import glob
fixtures_dir = 'fixtures'
for filepath in glob.glob(os.path.join(fixtures_dir, '*.json')):
with open(filepath, 'r') as f:
try:
data = json.load(f)
except Exception as e:
print("Failed to parse", filepath, e)
continue
changed = False
for suite in data:
db = suite.get("database", {})
puncs = db.get("puncs", [])
if puncs:
if "types" not in db:
db["types"] = []
for punc in puncs:
db["types"].append(punc)
del db["puncs"]
changed = True
if changed:
with open(filepath, 'w') as f:
json.dump(data, f, indent=2)
print("Reverted puncs to types in", filepath)

View File

@ -1,43 +0,0 @@
const fs = require('fs');
const path = require('path');
function updateFile(filePath) {
let content = fs.readFileSync(filePath, 'utf8');
let data;
try {
data = JSON.parse(content);
} catch (e) {
console.error("Failed to parse " + filePath, e);
return;
}
let changed = false;
for (let suite of data) {
if (suite.database && suite.database.schemas) {
if (!suite.database.puncs) suite.database.puncs = [];
for (let id of Object.keys(suite.database.schemas)) {
let schema = suite.database.schemas[id];
let puncType = {
name: id,
schemas: { [id]: schema }
};
suite.database.puncs.push(puncType);
}
delete suite.database.schemas;
changed = true;
}
}
if (changed) {
fs.writeFileSync(filePath, JSON.stringify(data, null, 2));
console.log("Updated " + filePath);
}
}
let fixturesDir = 'fixtures';
let files = fs.readdirSync(fixturesDir);
for (let file of files) {
if (file.endsWith('.json')) {
updateFile(path.join(fixturesDir, file));
}
}

View File

@ -1,33 +0,0 @@
import json
import os
fixtures_dir = 'fixtures'
for filename in os.listdir(fixtures_dir):
if not filename.endswith('.json'):
continue
filepath = os.path.join(fixtures_dir, filename)
with open(filepath, 'r') as f:
try:
data = json.load(f)
except json.JSONDecodeError:
print("Failed to parse", filepath)
continue
changed = False
for suite in data:
db = suite.get('database', {})
if 'schemas' in db:
if 'types' not in db:
db['types'] = []
for id_str, schema in db['schemas'].items():
target_type = {
'name': id_str,
'schemas': { id_str: schema }
}
db['types'].append(target_type)
del db['schemas']
changed = True
if changed:
with open(filepath, 'w') as f:
json.dump(data, f, indent=2)
print("Updated", filepath)