install and build flow cleanup
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@ -1 +1,2 @@
|
||||
/target
|
||||
/package
|
||||
83
flow
83
flow
@ -35,34 +35,42 @@ pgrx-prepare() {
|
||||
}
|
||||
|
||||
build() {
|
||||
local version=$(get-version)
|
||||
echo -e "📦 ${CYAN}Preparing source package v$version for $PACKAGE_NAME...${RESET}"
|
||||
|
||||
local version
|
||||
version=$(get-version) || return 1
|
||||
local package_dir="./package"
|
||||
local tarball_name="${GITEA_REPOSITORY}-src-v${version}.tar.gz"
|
||||
local tarball_path="${package_dir}/${tarball_name}"
|
||||
|
||||
echo -e "📦 Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
|
||||
|
||||
# Clean previous package dir
|
||||
rm -rf "${package_dir}"
|
||||
mkdir -p "${package_dir}"
|
||||
|
||||
# Copy necessary source files
|
||||
echo -e " ${CYAN}Copying source files to ${package_dir}${RESET}"
|
||||
cp -R src "${package_dir}/"
|
||||
cp Cargo.toml "${package_dir}/"
|
||||
cp Cargo.lock "${package_dir}/"
|
||||
cp jspg.control "${package_dir}/"
|
||||
|
||||
# Verify files copied
|
||||
if [ ! -d "${package_dir}/src" ] || [ ! -f "${package_dir}/Cargo.toml" ] || [ ! -f "${package_dir}/Cargo.lock" ] || [ ! -f "${package_dir}/jspg.control" ]; then
|
||||
echo -e "❌ ${RED}Failed to copy all source files to ${package_dir}.${RESET}"
|
||||
exit 1
|
||||
# Create the source tarball excluding specified patterns
|
||||
echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}"
|
||||
if tar --exclude='.git*' --exclude='./target' --exclude='./package' -czf "${tarball_path}" .; then
|
||||
echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}"
|
||||
else
|
||||
echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -e "✨ ${GREEN}Source package prepared in ${package_dir}${RESET}"
|
||||
}
|
||||
|
||||
install() {
|
||||
echo -e "🔧 ${CYAN}Installing jspg extension (dev build)...${RESET}"
|
||||
cargo pgrx install "$@" # Pass any extra args like --debug
|
||||
local version
|
||||
version=$(get-version) || return 1
|
||||
|
||||
echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}"
|
||||
|
||||
# Run the pgrx install command
|
||||
# It implicitly uses --release unless --debug is passed
|
||||
# It finds pg_config or you can add flags like --pg-config if needed
|
||||
if ! cargo pgrx install "$@"; then # Pass any extra args like --debug
|
||||
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2
|
||||
return 1
|
||||
fi
|
||||
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}"
|
||||
}
|
||||
|
||||
test() {
|
||||
@ -75,45 +83,6 @@ clean() {
|
||||
cargo clean # Use standard cargo clean
|
||||
}
|
||||
|
||||
# Override base package function to create and upload a source tarball
|
||||
package() {
|
||||
local version
|
||||
version=$(get-version) || return 1
|
||||
local package_dir="./package"
|
||||
local tarball_name="${GITEA_REPOSITORY}-src-v${version}.tar.gz"
|
||||
local tarball_path="${package_dir}/${tarball_name}"
|
||||
|
||||
echo -e "📦 ${CYAN}Creating source tarball ${tarball_name}...${RESET}"
|
||||
|
||||
# Ensure the package directory exists and has content
|
||||
if [ ! -d "$package_dir" ] || [ -z "$(ls -A "$package_dir")" ]; then
|
||||
echo -e "❌ ${RED}Source files not found in $package_dir. Run 'flow build' first.${RESET}" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Create the tarball from the contents of the package directory
|
||||
if tar -czf "${tarball_path}" -C "${package_dir}" .; then
|
||||
echo -e "✨ ${GREEN}Created source tarball: ${tarball_path}${RESET}"
|
||||
else
|
||||
echo -e "❌ ${RED}Failed to create tarball.${RESET}" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -e "📤 ${CYAN}Uploading ${tarball_name} to Gitea...${RESET}"
|
||||
if curl -X PUT \
|
||||
-H "Authorization: token $GITEA_TOKEN" \
|
||||
"$GITEA_API_URL/packages/$GITEA_ORGANIZATION/generic/$GITEA_REPOSITORY/$version/$tarball_name" \
|
||||
-H "Content-Type: application/gzip" \
|
||||
-T "${tarball_path}" \
|
||||
-f > /dev/null; then
|
||||
echo -e "✨ ${GREEN}Successfully uploaded ${tarball_name}${RESET}"
|
||||
else
|
||||
echo -e "❌ ${RED}Failed to upload ${tarball_name}${RESET}" >&2
|
||||
# Clean up tarball on failure? Maybe not, user might want it.
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
jspg-usage() {
|
||||
echo -e " ${CYAN}JSPG Commands:${RESET}"
|
||||
echo -e " prepare Check OS, Cargo, and PGRX dependencies."
|
||||
|
||||
2943
package/Cargo.lock
generated
2943
package/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,35 +0,0 @@
|
||||
[package]
|
||||
name = "jspg"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
pgrx = "0.14.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
jsonschema = "0.29.1"
|
||||
lazy_static = "1.5.0"
|
||||
|
||||
[dev-dependencies]
|
||||
pgrx-tests = "0.14.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "lib"]
|
||||
|
||||
[[bin]]
|
||||
name = "pgrx_embed_jspg"
|
||||
path = "src/bin/pgrx_embed.rs"
|
||||
|
||||
[features]
|
||||
pg17 = ["pgrx/pg17", "pgrx-tests/pg17" ]
|
||||
pg_test = []
|
||||
|
||||
[profile.dev]
|
||||
panic = "unwind"
|
||||
lto = "thin"
|
||||
|
||||
[profile.release]
|
||||
panic = "unwind"
|
||||
opt-level = 3
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
@ -1,5 +0,0 @@
|
||||
comment = 'jspg'
|
||||
default_version = '@CARGO_VERSION@'
|
||||
module_pathname = '$libdir/jspg'
|
||||
relocatable = false
|
||||
superuser = false
|
||||
@ -1 +0,0 @@
|
||||
::pgrx::pgrx_embed!();
|
||||
@ -1,126 +0,0 @@
|
||||
use pgrx::*;
|
||||
use jsonschema::{Draft, Validator};
|
||||
use serde_json::json;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::RwLock;
|
||||
use lazy_static::lazy_static;
|
||||
use jsonschema;
|
||||
|
||||
pg_module_magic!();
|
||||
|
||||
// Global, thread-safe schema cache using the correct Validator type
|
||||
lazy_static! {
|
||||
static ref SCHEMA_CACHE: RwLock<HashMap<String, Validator>> = RwLock::new(HashMap::new());
|
||||
}
|
||||
|
||||
// Cache a schema explicitly with a provided ID
|
||||
#[pg_extern(immutable, strict, parallel_safe)]
|
||||
fn cache_schema(schema_id: &str, schema: JsonB) -> bool {
|
||||
match jsonschema::options()
|
||||
.with_draft(Draft::Draft7)
|
||||
.should_validate_formats(true)
|
||||
.build(&schema.0)
|
||||
{
|
||||
Ok(compiled) => {
|
||||
SCHEMA_CACHE.write().unwrap().insert(schema_id.to_string(), compiled);
|
||||
true
|
||||
},
|
||||
Err(e) => {
|
||||
notice!("Failed to cache schema '{}': {}", schema_id, e);
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if a schema is cached
|
||||
#[pg_extern(immutable, strict, parallel_safe)]
|
||||
fn schema_cached(schema_id: &str) -> bool {
|
||||
SCHEMA_CACHE.read().unwrap().contains_key(schema_id)
|
||||
}
|
||||
|
||||
// Validate JSONB instance against a cached schema by ID
|
||||
#[pg_extern(immutable, strict, parallel_safe)]
|
||||
fn validate_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
||||
let cache = SCHEMA_CACHE.read().unwrap();
|
||||
let compiled_schema: &Validator = match cache.get(schema_id) {
|
||||
Some(schema) => schema,
|
||||
None => {
|
||||
return JsonB(json!({
|
||||
"valid": false,
|
||||
"errors": [format!("Schema ID '{}' not cached", schema_id)]
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
if compiled_schema.is_valid(&instance.0) {
|
||||
JsonB(json!({ "valid": true }))
|
||||
} else {
|
||||
let errors: Vec<String> = compiled_schema
|
||||
.iter_errors(&instance.0)
|
||||
.map(|e| e.to_string())
|
||||
.collect();
|
||||
|
||||
JsonB(json!({ "valid": false, "errors": errors }))
|
||||
}
|
||||
}
|
||||
|
||||
// Clear the entire schema cache explicitly
|
||||
#[pg_extern(immutable, parallel_safe)]
|
||||
fn clear_schema_cache() -> bool {
|
||||
SCHEMA_CACHE.write().unwrap().clear();
|
||||
true
|
||||
}
|
||||
|
||||
#[pg_schema]
|
||||
#[cfg(any(test, feature = "pg_test"))]
|
||||
mod tests {
|
||||
use pgrx::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[pg_test]
|
||||
fn test_cache_and_validate_schema() {
|
||||
assert!(crate::cache_schema("test_schema", JsonB(json!({ "type": "object" }))));
|
||||
assert!(crate::schema_cached("test_schema"));
|
||||
|
||||
let result_valid = crate::validate_schema("test_schema", JsonB(json!({ "foo": "bar" })));
|
||||
assert_eq!(result_valid.0["valid"], true);
|
||||
|
||||
let result_invalid = crate::validate_schema("test_schema", JsonB(json!(42)));
|
||||
assert_eq!(result_invalid.0["valid"], false);
|
||||
assert!(result_invalid.0["errors"][0].as_str().unwrap().contains("not of type"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_schema_not_cached() {
|
||||
let result = crate::validate_schema("unknown_schema", JsonB(json!({})));
|
||||
assert_eq!(result.0["valid"], false);
|
||||
assert!(result.0["errors"][0].as_str().unwrap().contains("not cached"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_clear_schema_cache() {
|
||||
crate::cache_schema("clear_test", JsonB(json!({ "type": "object" })));
|
||||
assert!(crate::schema_cached("clear_test"));
|
||||
|
||||
crate::clear_schema_cache();
|
||||
assert!(!crate::schema_cached("clear_test"));
|
||||
}
|
||||
|
||||
#[pg_test]
|
||||
fn test_invalid_schema_cache() {
|
||||
let result = crate::cache_schema("bad_schema", JsonB(json!({ "type": "unknown_type" })));
|
||||
assert!(!result);
|
||||
assert!(!crate::schema_cached("bad_schema"));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod pg_test {
|
||||
pub fn setup(_options: Vec<&str>) {
|
||||
// Initialization if needed
|
||||
}
|
||||
|
||||
pub fn postgresql_conf_options() -> Vec<&'static str> {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user