Compare commits

..

16 Commits
1.0.1 ... 1.0.9

Author SHA1 Message Date
48e74815d3 version: 1.0.9 2025-04-14 18:08:45 -04:00
23235d4b9d -m switched to boon 2025-04-14 18:08:36 -04:00
67406c0b96 version: 1.0.8 2025-04-14 16:11:49 -04:00
28fff3be11 validation error fixes 2025-04-14 16:11:44 -04:00
70f3d30258 version: 1.0.7 2025-04-14 12:03:07 -04:00
406466454e excluding flows from jspg release 2025-04-14 12:03:01 -04:00
2a9d51fa77 version: 1.0.6 2025-04-14 11:24:22 -04:00
ae90137308 updated flows 2025-04-14 11:24:18 -04:00
d22a8669ef version: 1.0.5 2025-04-14 11:19:38 -04:00
b32c17a4f5 updated flow 2025-04-14 11:19:28 -04:00
79cce357e2 version: 1.0.4 2025-04-13 23:03:58 -04:00
512fa28b91 failed commit 2025-04-13 23:03:53 -04:00
a36120459b version: 1.0.3 2025-04-13 22:58:52 -04:00
19734a5b0d failed commit 2025-04-13 22:58:47 -04:00
d4aa2af6cf version: 1.0.2 2025-04-13 22:57:15 -04:00
c4c4796ab0 failed commit 2025-04-13 22:57:11 -04:00
7 changed files with 440 additions and 90 deletions

10
.editorconfig Normal file
View File

@ -0,0 +1,10 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
[*.{json,toml,control,rs}]
charset = utf-8
indent_style = space
indent_size = 2

27
Cargo.lock generated
View File

@ -68,6 +68,12 @@ version = "1.0.97"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f"
[[package]]
name = "appendlist"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e149dc73cd30538307e7ffa2acd3d2221148eaeed4871f246657b1c3eaa1cbd2"
[[package]]
name = "async-trait"
version = "0.1.88"
@ -177,6 +183,26 @@ dependencies = [
"generic-array",
]
[[package]]
name = "boon"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baa187da765010b70370368c49f08244b1ae5cae1d5d33072f76c8cb7112fe3e"
dependencies = [
"ahash",
"appendlist",
"base64",
"fluent-uri",
"idna",
"once_cell",
"percent-encoding",
"regex",
"regex-syntax",
"serde",
"serde_json",
"url",
]
[[package]]
name = "borrow-or-share"
version = "0.2.2"
@ -1015,6 +1041,7 @@ dependencies = [
name = "jspg"
version = "0.1.0"
dependencies = [
"boon",
"jsonschema",
"lazy_static",
"pgrx",

View File

@ -9,6 +9,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
jsonschema = "0.29.1"
lazy_static = "1.5.0"
boon = "0.6.1"
[dev-dependencies]
pgrx-tests = "0.14.0"
@ -22,6 +23,7 @@ path = "src/bin/pgrx_embed.rs"
[features]
pg17 = ["pgrx/pg17", "pgrx-tests/pg17" ]
# Local feature flag used by `cargo pgrx test`
pg_test = []
[profile.dev]

59
flow
View File

@ -11,10 +11,24 @@ POSTGRES_VERSION="17"
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
DEPENDENCIES=(cargo git icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
PACKAGE_NAME="jspg"
GITEA_ORGANIZATION="cellular"
GITEA_REPOSITORY="jspg"
env() {
# Check if GITEA_TOKEN is set
if [ -z "$GITEA_TOKEN" ]; then
# If not set, try to get it from kubectl
GITEA_TOKEN=$(kubectl get secret -n cellular gitea-git -o jsonpath='{.data.token}' | base64 --decode)
if [ -z "$GITEA_TOKEN" ]; then
echo -e "❌ ${RED}GITEA_TOKEN is not set and couldn't be retrieved from kubectl${RESET}" >&2
exit 1
fi
export GITEA_TOKEN
fi
echo -e "💰 ${GREEN}Environment variables set${RESET}"
}
pgrx-prepare() {
echo -e "${BLUE}Initializing pgrx...${RESET}"
# Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which'
@ -38,7 +52,7 @@ build() {
local version
version=$(get-version) || return 1
local package_dir="./package"
local tarball_name="${GITEA_REPOSITORY}-src-v${version}.tar.gz"
local tarball_name="${GITEA_REPOSITORY}.tar.gz"
local tarball_path="${package_dir}/${tarball_name}"
echo -e "📦 Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
@ -49,7 +63,7 @@ build() {
# Create the source tarball excluding specified patterns
echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}"
if tar --exclude='.git*' --exclude='./target' --exclude='./package' -czf "${tarball_path}" .; then
if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}"
else
echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2
@ -62,15 +76,41 @@ install() {
version=$(get-version) || return 1
echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}"
# Run the pgrx install command
# It implicitly uses --release unless --debug is passed
# It finds pg_config or you can add flags like --pg-config if needed
if ! cargo pgrx install "$@"; then # Pass any extra args like --debug
if ! cargo pgrx install; then
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2
return 1
fi
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}"
# Post-install modification to allow non-superuser usage
# Get the installation path dynamically using pg_config
local pg_sharedir
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
if [ -z "$pg_sharedir" ]; then
echo -e "❌ ${RED}Failed to determine PostgreSQL shared directory using pg_config.${RESET}" >&2
return 1
fi
local installed_control_path="${pg_sharedir}/extension/jspg.control"
# Modify the control file
if [ ! -f "$installed_control_path" ]; then
echo -e "❌ ${RED}Installed control file not found: '$installed_control_path'${RESET}" >&2
return 1
fi
echo -e "🔧 ${CYAN}Modifying control file for non-superuser access: ${installed_control_path}${RESET}"
# Use sed -i '' for macOS compatibility
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
echo 'trusted = true' >> "$installed_control_path"; then
echo -e "✨ ${GREEN}Control file modified successfully.${RESET}"
else
echo -e "❌ ${RED}Failed to modify control file: ${installed_control_path}${RESET}" >&2
return 1
fi
}
test() {
@ -98,13 +138,14 @@ jspg-usage() {
jspg-flow() {
case "$1" in
env) env; return 0;;
prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;;
build) build; return 0;;
install) base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;;
reinstall) base prepare; cargo-prepare; pgrx-prepare; install "$@"; return 0;;
install) install; return 0;;
reinstall) clean; install; return 0;;
test) test; return 0;;
package) package; return 0;;
release) release; return 0;;
package) env; package; return 0;;
release) env; release; return 0;;
clean) clean; return 0;;
*) return 1 ;;
esac

2
flows

Submodule flows updated: 2487aa6a25...db55335254

View File

@ -1,126 +1,396 @@
use pgrx::*;
use jsonschema::{Draft, Validator};
use serde_json::json;
use std::collections::HashMap;
use std::sync::RwLock;
use lazy_static::lazy_static;
use jsonschema;
pg_module_magic!();
// Global, thread-safe schema cache using the correct Validator type
lazy_static! {
static ref SCHEMA_CACHE: RwLock<HashMap<String, Validator>> = RwLock::new(HashMap::new());
use serde_json::{json, Value};
use std::{collections::HashMap, sync::RwLock};
use boon::{Compiler, Schemas, ValidationError, SchemaIndex};
use lazy_static::lazy_static;
struct BoonCache {
schemas: Schemas,
id_to_index: HashMap<String, SchemaIndex>,
}
// Cache a schema explicitly with a provided ID
#[pg_extern(immutable, strict, parallel_safe)]
fn cache_schema(schema_id: &str, schema: JsonB) -> bool {
match jsonschema::options()
.with_draft(Draft::Draft7)
.should_validate_formats(true)
.build(&schema.0)
{
Ok(compiled) => {
SCHEMA_CACHE.write().unwrap().insert(schema_id.to_string(), compiled);
true
},
Err(e) => {
notice!("Failed to cache schema '{}': {}", schema_id, e);
false
lazy_static! {
static ref SCHEMA_CACHE: RwLock<BoonCache> = RwLock::new(BoonCache {
schemas: Schemas::new(),
id_to_index: HashMap::new()
});
}
#[pg_extern(strict)]
fn cache_schema(schema_id: &str, schema: JsonB) -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
let schema_value: Value = schema.0;
let mut compiler = Compiler::new();
compiler.enable_format_assertions();
let schema_url = format!("urn:jspg:{}", schema_id);
if let Err(e) = compiler.add_resource(&schema_url, schema_value) {
return JsonB(json!({
"success": false,
"error": format!("Failed to add schema resource '{}': {}", schema_id, e)
}));
}
match compiler.compile(&schema_url, &mut cache.schemas) {
Ok(sch_index) => {
cache.id_to_index.insert(schema_id.to_string(), sch_index);
JsonB(json!({
"success": true,
"schema_id": schema_id,
"message": "Schema cached successfully."
}))
}
Err(e) => JsonB(json!({
"success": false,
"schema_id": schema_id,
"error": format!("Schema compilation failed: {}", e)
})),
}
}
// Check if a schema is cached
#[pg_extern(immutable, strict, parallel_safe)]
fn schema_cached(schema_id: &str) -> bool {
SCHEMA_CACHE.read().unwrap().contains_key(schema_id)
}
// Validate JSONB instance against a cached schema by ID
#[pg_extern(immutable, strict, parallel_safe)]
#[pg_extern(strict, parallel_safe)]
fn validate_schema(schema_id: &str, instance: JsonB) -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
let compiled_schema: &Validator = match cache.get(schema_id) {
Some(schema) => schema,
None => {
return JsonB(json!({
"valid": false,
"errors": [format!("Schema ID '{}' not cached", schema_id)]
}));
match cache.id_to_index.get(schema_id) {
None => JsonB(json!({
"success": false,
"errors": [{
"kind": "SchemaNotFound",
"message": format!("Schema with id '{}' not found in cache", schema_id)
}]
})),
Some(sch_index) => {
let instance_value: Value = instance.0;
match cache.schemas.validate(&instance_value, *sch_index) {
Ok(_) => JsonB(json!({ "success": true })),
Err(validation_error) => {
let error_details = format_boon_errors(&validation_error);
JsonB(json!({
"success": false,
"errors": [error_details]
}))
}
}
}
};
if compiled_schema.is_valid(&instance.0) {
JsonB(json!({ "valid": true }))
} else {
let errors: Vec<String> = compiled_schema
.iter_errors(&instance.0)
.map(|e| e.to_string())
.collect();
JsonB(json!({ "valid": false, "errors": errors }))
}
}
// Clear the entire schema cache explicitly
#[pg_extern(immutable, parallel_safe)]
fn clear_schema_cache() -> bool {
SCHEMA_CACHE.write().unwrap().clear();
true
fn format_boon_errors(error: &ValidationError) -> Value {
json!({
"instance_path": error.instance_location.to_string(),
"schema_path": error.schema_url.to_string(),
"kind": format!("{:?}", error.kind),
"message": format!("{}", error),
"causes": error
.causes
.iter()
.map(format_boon_errors)
.collect::<Vec<_>>()
})
}
#[pg_extern(strict)]
fn clear_schema_cache() -> JsonB {
let mut cache = SCHEMA_CACHE.write().unwrap();
*cache = BoonCache {
schemas: Schemas::new(),
id_to_index: HashMap::new()
};
JsonB(json!({
"success": true,
"message": "Schema cache cleared."
}))
}
#[pg_extern(strict, parallel_safe)]
fn show_schema_cache() -> JsonB {
let cache = SCHEMA_CACHE.read().unwrap();
let ids: Vec<&String> = cache.id_to_index.keys().collect();
JsonB(json!({
"cached_schema_ids": ids
}))
}
#[pg_schema]
#[cfg(any(test, feature = "pg_test"))]
mod tests {
use pgrx::*;
use pgrx::pg_test;
use super::*;
use serde_json::json;
fn jsonb(val: Value) -> JsonB {
JsonB(val)
}
fn setup_test() {
clear_schema_cache();
}
#[pg_test]
fn test_cache_and_validate_schema() {
assert!(crate::cache_schema("test_schema", JsonB(json!({ "type": "object" }))));
assert!(crate::schema_cached("test_schema"));
setup_test();
let schema_id = "my_schema";
let schema = json!({
"type": "object",
"properties": {
"name": { "type": "string" },
"age": { "type": "integer", "minimum": 0 }
},
"required": ["name", "age"]
});
let valid_instance = json!({ "name": "Alice", "age": 30 });
let invalid_instance_type = json!({ "name": "Bob", "age": -5 });
let invalid_instance_missing = json!({ "name": "Charlie" });
let result_valid = crate::validate_schema("test_schema", JsonB(json!({ "foo": "bar" })));
assert_eq!(result_valid.0["valid"], true);
let cache_result = cache_schema(schema_id, jsonb(schema.clone()));
assert!(cache_result.0["success"].as_bool().unwrap());
let result_invalid = crate::validate_schema("test_schema", JsonB(json!(42)));
assert_eq!(result_invalid.0["valid"], false);
assert!(result_invalid.0["errors"][0].as_str().unwrap().contains("not of type"));
let valid_result = validate_schema(schema_id, jsonb(valid_instance));
assert!(valid_result.0["success"].as_bool().unwrap());
let invalid_result_type = validate_schema(schema_id, jsonb(invalid_instance_type));
assert!(!invalid_result_type.0["success"].as_bool().unwrap());
// --- Assertions for invalid_result_type ---
// Get top-level errors
let top_level_errors = invalid_result_type.0["errors"].as_array().expect("Top-level 'errors' should be an array");
assert_eq!(top_level_errors.len(), 1, "Should have exactly one top-level error for invalid type");
// Get the first (and only) top-level error
let top_level_error = top_level_errors.get(0).expect("Should get the first top-level error");
// Check top-level error kind
assert!(top_level_error.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Schema { url:")),
"Incorrect kind for top-level error. Expected 'Schema {{ url:'. Error: {:?}. All errors: {:?}", top_level_error, top_level_errors);
// Get the 'causes' array from the top-level error
let causes_age = top_level_error.get("causes").and_then(Value::as_array).expect("Top-level error 'causes' should be an array");
assert_eq!(causes_age.len(), 1, "Should have one cause for the age error");
// Get the actual age error from the 'causes' array
let age_error = causes_age.get(0).expect("Should have an error object in 'causes'");
assert_eq!(age_error.get("instance_path").and_then(Value::as_str), Some("/age"),
"Incorrect instance_path for age error. Error: {:?}. All errors: {:?}", age_error, top_level_errors);
assert!(age_error.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Minimum { got:")),
"Incorrect kind prefix for age error. Expected 'Minimum {{ got:'. Error: {:?}. All errors: {:?}", age_error, top_level_errors);
let expected_prefix = "at '/age': must be >=0";
assert!(age_error.get("message")
.and_then(Value::as_str)
.map_or(false, |m| m.starts_with(expected_prefix)),
"Incorrect message prefix for age error. Expected prefix '{}'. Error: {:?}. All errors: {:?}",
expected_prefix, age_error, top_level_errors);
let invalid_result_missing = validate_schema(schema_id, jsonb(invalid_instance_missing));
assert!(!invalid_result_missing.0["success"].as_bool().unwrap(), "Validation should fail for missing required field");
// --- Assertions for invalid_result_missing ---
// Get top-level errors
let top_level_errors_missing = invalid_result_missing.0["errors"].as_array().expect("Errors should be an array for missing field");
assert_eq!(top_level_errors_missing.len(), 1, "Should have one top-level error for missing field");
// Get the first (and only) top-level error
let top_error_missing = top_level_errors_missing.get(0).expect("Should get the first top-level missing field error");
// Check top-level error kind
assert!(top_error_missing.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Schema { url:")),
"Incorrect kind for missing field top-level error. Error: {:?}. All errors: {:?}", top_error_missing, top_level_errors_missing);
// Get the 'causes' array from the top-level error
let causes_missing = top_error_missing.get("causes").and_then(Value::as_array).expect("Causes should be an array for missing field error");
assert_eq!(causes_missing.len(), 1, "Should have one cause for missing field");
// Get the actual missing field error from the 'causes' array
let missing_error = causes_missing.get(0).expect("Should have missing field error object in 'causes'");
// Assertions on the specific missing field error
assert_eq!(missing_error.get("instance_path").and_then(Value::as_str), Some(""),
"Incorrect instance_path for missing age error: {:?}", missing_error);
assert!(missing_error.get("kind").and_then(Value::as_str).map_or(false, |k| k.starts_with("Required { want: [\"age\"]")),
"Incorrect kind for missing age error. Expected prefix 'Required {{ want: [\"age\"] }}'. Error: {:?}", missing_error);
}
#[pg_test]
fn test_schema_not_cached() {
let result = crate::validate_schema("unknown_schema", JsonB(json!({})));
assert_eq!(result.0["valid"], false);
assert!(result.0["errors"][0].as_str().unwrap().contains("not cached"));
}
#[pg_test]
fn test_clear_schema_cache() {
crate::cache_schema("clear_test", JsonB(json!({ "type": "object" })));
assert!(crate::schema_cached("clear_test"));
crate::clear_schema_cache();
assert!(!crate::schema_cached("clear_test"));
setup_test();
let instance = json!({ "foo": "bar" });
let result = validate_schema("non_existent_schema", jsonb(instance));
assert!(!result.0["success"].as_bool().unwrap());
let errors = result.0["errors"].as_array().unwrap();
assert_eq!(errors.len(), 1);
assert_eq!(errors[0]["kind"], json!("SchemaNotFound"));
assert!(errors[0]["message"].as_str().unwrap().contains("non_existent_schema"));
}
#[pg_test]
fn test_invalid_schema_cache() {
let result = crate::cache_schema("bad_schema", JsonB(json!({ "type": "unknown_type" })));
assert!(!result);
assert!(!crate::schema_cached("bad_schema"));
setup_test();
let schema_id = "invalid_schema";
let invalid_schema_json = "{\"type\": \"string\" \"maxLength\": 5}";
let invalid_schema_value: Result<Value, _> = serde_json::from_str(invalid_schema_json);
assert!(invalid_schema_value.is_err(), "Test setup assumes invalid JSON string");
let schema_representing_invalid = json!({
"type": 123
});
let result = cache_schema(schema_id, jsonb(schema_representing_invalid.clone()));
assert!(!result.0["success"].as_bool().unwrap());
assert!(result.0["error"].as_str().unwrap().contains("Schema compilation failed"));
}
#[pg_test]
fn test_detailed_validation_errors() {
setup_test();
let schema_id = "detailed_schema";
let schema = json!({
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": { "type": "string" },
"city": { "type": "string", "maxLength": 10 }
},
"required": ["street", "city"]
}
},
"required": ["address"]
});
let invalid_instance = json!({
"address": {
"city": "San Francisco Bay Area"
}
});
assert!(cache_schema(schema_id, jsonb(schema.clone())).0["success"].as_bool().unwrap());
let result = validate_schema(schema_id, jsonb(invalid_instance));
assert!(!result.0["success"].as_bool().unwrap());
let errors = result.0["errors"].as_array().expect("Errors should be an array");
let top_error = errors.get(0).expect("Expected at least one top-level error object");
let causes = top_error.get("causes").and_then(Value::as_array).expect("Expected causes array");
let has_required_street_error = causes.iter().any(|e|
e.get("instance_path").and_then(Value::as_str) == Some("/address") && // Check path inside cause
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("Required { want:") && // Check kind prefix
e.get("kind").and_then(Value::as_str).unwrap_or("").contains("street") // Ensure 'street' is mentioned
);
assert!(has_required_street_error, "Missing required 'street' error within causes. Actual errors: {:?}", errors);
let has_maxlength_city_error = causes.iter().any(|e| // Check within causes
e.get("instance_path").and_then(Value::as_str) == Some("/address/city") &&
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("MaxLength { got:") // Check kind prefix
);
assert!(has_maxlength_city_error, "Missing maxLength 'city' error within causes. Actual errors: {:?}", errors);
}
#[pg_test]
fn test_oneof_validation_errors() {
setup_test();
let schema_id = "oneof_schema";
let schema = json!({
"type": "object",
"properties": {
"value": {
"oneOf": [
{ "type": "string", "minLength": 5 },
{ "type": "number", "minimum": 10 }
]
}
},
"required": ["value"]
});
assert!(cache_schema(schema_id, jsonb(schema.clone())).0["success"].as_bool().unwrap());
let invalid_instance = json!({ "value": "abc" });
let result = validate_schema(schema_id, jsonb(invalid_instance));
assert!(!result.0["success"].as_bool().unwrap());
let errors_val = result.0["errors"].as_array().expect("Errors should be an array");
let top_schema_error = errors_val.get(0).expect("Expected at least one top-level Schema error object");
let schema_error_causes = top_schema_error.get("causes").and_then(Value::as_array).expect("Expected causes array for Schema error");
let oneof_error = schema_error_causes.iter().find(|e| {
e.get("kind").and_then(Value::as_str) == Some("OneOf(None)") &&
e.get("instance_path").and_then(Value::as_str) == Some("/value")
}).expect("Could not find the OneOf(None) error for /value within Schema causes");
let oneof_causes = oneof_error.get("causes").and_then(Value::as_array)
.expect("Expected causes array for OneOf error");
let has_minlength_error = oneof_causes.iter().any(|e| // Check within OneOf causes
e.get("instance_path").and_then(Value::as_str) == Some("/value") &&
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("MinLength { got:") // Check kind prefix
);
assert!(has_minlength_error, "Missing MinLength error within OneOf causes. Actual errors: {:?}", errors_val);
let has_type_error = oneof_causes.iter().any(|e| // Check within OneOf causes
e.get("instance_path").and_then(Value::as_str) == Some("/value") &&
e.get("kind").and_then(Value::as_str).unwrap_or("").starts_with("Type { got: String, want: Types") // More specific kind check
);
assert!(has_type_error, "Missing Type error within OneOf causes. Actual errors: {:?}", errors_val);
}
#[pg_test]
fn test_clear_schema_cache() {
setup_test();
let schema_id = "schema_to_clear";
let schema = json!({ "type": "string" });
cache_schema(schema_id, jsonb(schema.clone()));
let show_result1 = show_schema_cache();
assert!(show_result1.0["cached_schema_ids"].as_array().unwrap().iter().any(|id| id.as_str() == Some(schema_id)));
let clear_result = clear_schema_cache();
assert!(clear_result.0["success"].as_bool().unwrap());
let show_result2 = show_schema_cache();
assert!(show_result2.0["cached_schema_ids"].as_array().unwrap().is_empty());
let instance = json!("test");
let validate_result = validate_schema(schema_id, jsonb(instance));
assert!(!validate_result.0["success"].as_bool().unwrap());
assert_eq!(validate_result.0["errors"].as_array().unwrap()[0]["kind"], json!("SchemaNotFound"));
}
#[pg_test]
fn test_show_schema_cache() {
setup_test();
let schema_id1 = "schema1";
let schema_id2 = "schema2";
let schema = json!({ "type": "boolean" });
cache_schema(schema_id1, jsonb(schema.clone()));
cache_schema(schema_id2, jsonb(schema.clone()));
let result = show_schema_cache();
let ids = result.0["cached_schema_ids"].as_array().unwrap();
assert_eq!(ids.len(), 2);
assert!(ids.contains(&json!(schema_id1)));
assert!(ids.contains(&json!(schema_id2)));
}
}
#[cfg(test)]
pub mod pg_test {
pub fn setup(_options: Vec<&str>) {
// Initialization if needed
// perform one-off initialization when the pg_test framework starts
}
pub fn postgresql_conf_options() -> Vec<&'static str> {
// return any postgresql.conf settings that are required for your tests
vec![]
}
}
}

View File

@ -1 +1 @@
1.0.1
1.0.9