first commit of jspg extension
This commit is contained in:
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
/target
|
||||||
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
[submodule "flows"]
|
||||||
|
path = flows
|
||||||
|
url = git@gitea-ssh.thoughtpatterns.ai:cellular/flows.git
|
||||||
37
Cargo.toml
Normal file
37
Cargo.toml
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
[package]
|
||||||
|
name = "jspg"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
crate-type = ["cdylib", "lib"]
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "jspg"
|
||||||
|
path = "./src/bin/jspg.rs"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["pg17"]
|
||||||
|
pg17 = ["pgrx/pg17", "pgrx-tests/pg17" ]
|
||||||
|
pg_test = []
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
pgrx = "0.14.0"
|
||||||
|
serde = "1.0.219"
|
||||||
|
serde_json = "1.0.140"
|
||||||
|
jsonschema = {version = "0.29.1", default-features = false}
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
pgrx-tests = "0.14.0"
|
||||||
|
|
||||||
|
[profile.dev]
|
||||||
|
panic = "unwind"
|
||||||
|
lto = "thin"
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
panic = "unwind"
|
||||||
|
opt-level = 3
|
||||||
|
lto = "fat"
|
||||||
|
codegen-units = 1
|
||||||
123
flow
Executable file
123
flow
Executable file
@ -0,0 +1,123 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Flows
|
||||||
|
source "flows/base"
|
||||||
|
source "flows/release"
|
||||||
|
|
||||||
|
# Vars
|
||||||
|
# pg_config is tricky as it's not always just a command but needs PATH setup
|
||||||
|
# We check for it specifically in the prepare function.
|
||||||
|
DEPENDENCIES=(cargo git icu4c pkg-config)
|
||||||
|
CARGO_DEPENDENCIES=(cargo-pgrx)
|
||||||
|
GITEA_ORGANIZATION="cellular"
|
||||||
|
GITEA_REPOSITORY="jspg"
|
||||||
|
PACKAGE_NAME="jspg"
|
||||||
|
|
||||||
|
cargo-prepare() {
|
||||||
|
echo -e "${BLUE}Checking Cargo dependencies...${RESET}"
|
||||||
|
for DEP in "${CARGO_DEPENDENCIES[@]}"; do \
|
||||||
|
if ! command -v "${DEP}" &> /dev/null; then \
|
||||||
|
echo -e "${YELLOW}Attempting to install ${DEP} via cargo...${RESET}"; \
|
||||||
|
if cargo install "${DEP}"; then
|
||||||
|
echo -e "✅ ${GREEN}${DEP}: installed via cargo${RESET}"; \
|
||||||
|
else
|
||||||
|
echo -e "❌ ${RED}${DEP}: failed to install via cargo${RESET}"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
else \
|
||||||
|
echo -e "✅ ${GREEN}${DEP}: installed${RESET}"; \
|
||||||
|
fi \
|
||||||
|
done
|
||||||
|
echo -e "${GREEN}All Cargo dependencies met.${RESET}"
|
||||||
|
}
|
||||||
|
|
||||||
|
postgres-prepare() {
|
||||||
|
echo -e "${BLUE}Checking PostgreSQL dependencies...${RESET}"
|
||||||
|
if ! command -v pg_config &> /dev/null; then
|
||||||
|
echo -e "❌ ${RED}pg_config: missing. Ensure PostgreSQL development headers are installed and pg_config is in your PATH.${RESET}"; \
|
||||||
|
exit 1; \
|
||||||
|
else
|
||||||
|
echo -e "✅ ${GREEN}pg_config: installed${RESET}"; \
|
||||||
|
fi
|
||||||
|
echo -e "${GREEN}PostgreSQL dependencies met.${RESET}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Build function (used by release flow)
|
||||||
|
# Creates release artifacts in ./package/
|
||||||
|
build() {
|
||||||
|
local version
|
||||||
|
version=$(get-version)
|
||||||
|
echo -e "📦 ${CYAN}Building release v$version for $PACKAGE_NAME...${RESET}"
|
||||||
|
|
||||||
|
# Build with cargo pgrx install --release
|
||||||
|
if ! cargo pgrx install --release; then
|
||||||
|
echo -e "❌ ${RED}Build failed during cargo pgrx install.${RESET}" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create package directory
|
||||||
|
mkdir -p "$PACKAGE_DIRECTORY"
|
||||||
|
|
||||||
|
# Find and copy artifacts (adjust paths if needed)
|
||||||
|
# Assuming standard output locations for pgrx
|
||||||
|
local pgrx_target_dir="target/release"
|
||||||
|
local sql_file="${pgrx_target_dir}/${PACKAGE_NAME}.sql"
|
||||||
|
local so_file # Varies by OS/arch, find it
|
||||||
|
so_file=$(find "${pgrx_target_dir}" -maxdepth 1 -name "lib${PACKAGE_NAME}*.so" -print -quit || find "${pgrx_target_dir}" -maxdepth 1 -name "${PACKAGE_NAME}*.dylib" -print -quit || find "${pgrx_target_dir}" -maxdepth 1 -name "${PACKAGE_NAME}*.dll" -print -quit)
|
||||||
|
|
||||||
|
if [ -z "$so_file" ] || [ ! -f "$so_file" ]; then
|
||||||
|
echo -e "❌ ${RED}Could not find shared library (.so/.dylib/.dll) in ${pgrx_target_dir}${RESET}" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if [ ! -f "$sql_file" ]; then
|
||||||
|
echo -e "❌ ${RED}Could not find SQL file ($sql_file)${RESET}" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e " ${CYAN}Copying artifacts to $PACKAGE_DIRECTORY...${RESET}"
|
||||||
|
cp "$so_file" "$PACKAGE_DIRECTORY/"
|
||||||
|
cp "$sql_file" "$PACKAGE_DIRECTORY/"
|
||||||
|
|
||||||
|
echo -e "✨ ${GREEN}Build v$version complete. Artifacts ready in ./$PACKAGE_DIRECTORY/${RESET}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Install for development (non-release)
|
||||||
|
install() {
|
||||||
|
echo -e "🔧 ${CYAN}Installing jspg extension (dev build)...${RESET}"
|
||||||
|
cargo pgrx install "$@" # Pass any extra args like --debug
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
test() {
|
||||||
|
echo -e "🧪 ${CYAN}Running jspg tests...${RESET}"
|
||||||
|
cargo pgrx test "$@" # Pass any extra args
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clean build artifacts
|
||||||
|
clean() {
|
||||||
|
echo -e "🧹 ${CYAN}Cleaning build artifacts...${RESET}"
|
||||||
|
cargo pgrx clean
|
||||||
|
}
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
echo -e " install [opts] Build and install the extension locally (dev)."
|
||||||
|
echo -e " test [opts] Run pgrx integration tests."
|
||||||
|
echo -e " clean Remove pgrx build artifacts."
|
||||||
|
echo -e " build Build release artifacts into ./package/ (called by release)."
|
||||||
|
echo -e " tag Tag the current version (called by release)."
|
||||||
|
echo -e " package Upload artifacts from ./package/ (called by release)."
|
||||||
|
echo -e " release Perform a full release (increments patch, builds, tags, pushes, packages)."
|
||||||
|
}
|
||||||
|
|
||||||
|
# --- Command Dispatcher ---
|
||||||
|
case "$1" in
|
||||||
|
prepare) base prepare; cargo-prepare; postgres-prepare;;
|
||||||
|
install) install "${@:2}";;
|
||||||
|
test) test "${@:2}";;
|
||||||
|
clean) clean;;
|
||||||
|
build) build;;
|
||||||
|
tag) tag;; # From release flow
|
||||||
|
package) package;; # From release flow
|
||||||
|
release) release;;
|
||||||
|
*) base "$@";; # Handles base update and unknown commands via base-usage
|
||||||
|
esac
|
||||||
1
flows
Submodule
1
flows
Submodule
Submodule flows added at 8431378080
125
src/lib.rs
Normal file
125
src/lib.rs
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
use pgrx::*;
|
||||||
|
use jsonschema::{JSONSchema, Draft};
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::RwLock;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
|
pg_module_magic!();
|
||||||
|
|
||||||
|
// Global, thread-safe schema cache
|
||||||
|
lazy_static! {
|
||||||
|
static ref SCHEMA_CACHE: RwLock<HashMap<String, JSONSchema>> = RwLock::new(HashMap::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache a schema explicitly with a provided ID
|
||||||
|
#[pg_extern(immutable, strict, parallel_safe)]
|
||||||
|
fn cache_schema(schema_id: &str, schema: JsonB) -> bool {
|
||||||
|
match JSONSchema::options()
|
||||||
|
.with_draft(Draft::Draft7)
|
||||||
|
.should_validate_formats(true)
|
||||||
|
.compile(&schema.0)
|
||||||
|
{
|
||||||
|
Ok(compiled) => {
|
||||||
|
SCHEMA_CACHE.write().unwrap().insert(schema_id.to_string(), compiled);
|
||||||
|
true
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
notice!("Failed to cache schema '{}': {}", schema_id, e);
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if a schema is cached
|
||||||
|
#[pg_extern(immutable, strict, parallel_safe)]
|
||||||
|
fn schema_cached(schema_id: &str) -> bool {
|
||||||
|
SCHEMA_CACHE.read().unwrap().contains_key(schema_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate JSONB instance against a cached schema by ID
|
||||||
|
#[pg_extern(immutable, strict, parallel_safe)]
|
||||||
|
fn validate_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
||||||
|
let cache = SCHEMA_CACHE.read().unwrap();
|
||||||
|
let compiled_schema = match cache.get(schema_id) {
|
||||||
|
Some(schema) => schema,
|
||||||
|
None => {
|
||||||
|
return JsonB(json!({
|
||||||
|
"valid": false,
|
||||||
|
"errors": [format!("Schema ID '{}' not cached", schema_id)]
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if compiled_schema.is_valid(&instance.0) {
|
||||||
|
JsonB(json!({ "valid": true }))
|
||||||
|
} else {
|
||||||
|
let errors: Vec<String> = compiled_schema
|
||||||
|
.iter_errors(&instance.0)
|
||||||
|
.map(|e| e.to_string())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
JsonB(json!({ "valid": false, "errors": errors }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear the entire schema cache explicitly
|
||||||
|
#[pg_extern(immutable, parallel_safe)]
|
||||||
|
fn clear_schema_cache() -> bool {
|
||||||
|
SCHEMA_CACHE.write().unwrap().clear();
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pg_schema]
|
||||||
|
#[cfg(any(test, feature = "pg_test"))]
|
||||||
|
mod tests {
|
||||||
|
use pgrx::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[pg_test]
|
||||||
|
fn test_cache_and_validate_schema() {
|
||||||
|
assert!(crate::cache_schema("test_schema", JsonB(json!({ "type": "object" }))));
|
||||||
|
assert!(crate::schema_cached("test_schema"));
|
||||||
|
|
||||||
|
let result_valid = crate::validate_schema("test_schema", JsonB(json!({ "foo": "bar" })));
|
||||||
|
assert_eq!(result_valid.0["valid"], true);
|
||||||
|
|
||||||
|
let result_invalid = crate::validate_schema("test_schema", JsonB(json!(42)));
|
||||||
|
assert_eq!(result_invalid.0["valid"], false);
|
||||||
|
assert!(result_invalid.0["errors"][0].as_str().unwrap().contains("not of type"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pg_test]
|
||||||
|
fn test_schema_not_cached() {
|
||||||
|
let result = crate::validate_schema("unknown_schema", JsonB(json!({})));
|
||||||
|
assert_eq!(result.0["valid"], false);
|
||||||
|
assert!(result.0["errors"][0].as_str().unwrap().contains("not cached"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pg_test]
|
||||||
|
fn test_clear_schema_cache() {
|
||||||
|
crate::cache_schema("clear_test", JsonB(json!({ "type": "object" })));
|
||||||
|
assert!(crate::schema_cached("clear_test"));
|
||||||
|
|
||||||
|
crate::clear_schema_cache();
|
||||||
|
assert!(!crate::schema_cached("clear_test"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pg_test]
|
||||||
|
fn test_invalid_schema_cache() {
|
||||||
|
let result = crate::cache_schema("bad_schema", JsonB(json!({ "type": "unknown_type" })));
|
||||||
|
assert!(!result);
|
||||||
|
assert!(!crate::schema_cached("bad_schema"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub mod pg_test {
|
||||||
|
pub fn setup(_options: Vec<&str>) {
|
||||||
|
// Initialization if needed
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn postgresql_conf_options() -> Vec<&'static str> {
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user