Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 3b18901bda | |||
| b8c0e08068 | |||
| c734983a59 | |||
| 9b11f661bc | |||
| f3a733626e | |||
| 2bcdb8adbb | |||
| 3988308965 | |||
| b7f528d1f6 | |||
| 2febb292dc | |||
| d1831a28ec | |||
| c5834ac544 |
106
flow
106
flow
@ -9,7 +9,7 @@ source ./flows/rust
|
|||||||
# Vars
|
# Vars
|
||||||
POSTGRES_VERSION="17"
|
POSTGRES_VERSION="17"
|
||||||
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
|
POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
|
||||||
DEPENDENCIES=(cargo git icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
|
DEPENDENCIES+=(icu4c pkg-config "postgresql@${POSTGRES_VERSION}")
|
||||||
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
|
CARGO_DEPENDENCIES=(cargo-pgrx==0.14.0)
|
||||||
GITEA_ORGANIZATION="cellular"
|
GITEA_ORGANIZATION="cellular"
|
||||||
GITEA_REPOSITORY="jspg"
|
GITEA_REPOSITORY="jspg"
|
||||||
@ -20,133 +20,125 @@ env() {
|
|||||||
# If not set, try to get it from kubectl
|
# If not set, try to get it from kubectl
|
||||||
GITEA_TOKEN=$(kubectl get secret -n cellular gitea-git -o jsonpath='{.data.token}' | base64 --decode)
|
GITEA_TOKEN=$(kubectl get secret -n cellular gitea-git -o jsonpath='{.data.token}' | base64 --decode)
|
||||||
if [ -z "$GITEA_TOKEN" ]; then
|
if [ -z "$GITEA_TOKEN" ]; then
|
||||||
echo -e "❌ ${RED}GITEA_TOKEN is not set and couldn't be retrieved from kubectl${RESET}" >&2
|
error "GITEA_TOKEN is not set and couldn't be retrieved from kubectl" >&2
|
||||||
exit 1
|
return 2
|
||||||
fi
|
fi
|
||||||
export GITEA_TOKEN
|
export GITEA_TOKEN
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -e "💰 ${GREEN}Environment variables set${RESET}"
|
success "Environment variables set"
|
||||||
}
|
}
|
||||||
|
|
||||||
pgrx-prepare() {
|
pgrx-prepare() {
|
||||||
echo -e "${BLUE}Initializing pgrx...${RESET}"
|
info "Initializing pgrx..."
|
||||||
# Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which'
|
# Explicitly point to the postgresql@${POSTGRES_VERSION} pg_config, don't rely on 'which'
|
||||||
local POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
|
local POSTGRES_CONFIG_PATH="/opt/homebrew/opt/postgresql@${POSTGRES_VERSION}/bin/pg_config"
|
||||||
|
|
||||||
if [ ! -x "$POSTGRES_CONFIG_PATH" ]; then
|
if [ ! -x "$POSTGRES_CONFIG_PATH" ]; then
|
||||||
echo -e "${RED}Error: pg_config not found or not executable at $POSTGRES_CONFIG_PATH.${RESET}"
|
error "pg_config not found or not executable at $POSTGRES_CONFIG_PATH."
|
||||||
echo -e "${YELLOW}Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew.${RESET}"
|
warning "Ensure postgresql@${POSTGRES_VERSION} is installed correctly via Homebrew."
|
||||||
exit 1
|
return 2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if cargo pgrx init --pg"$POSTGRES_VERSION"="$POSTGRES_CONFIG_PATH"; then
|
if cargo pgrx init --pg"$POSTGRES_VERSION"="$POSTGRES_CONFIG_PATH"; then
|
||||||
echo -e "${GREEN}pgrx initialized successfully.${RESET}"
|
success "pgrx initialized successfully."
|
||||||
else
|
else
|
||||||
echo -e "${RED}Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid.${RESET}"
|
error "Failed to initialize pgrx. Check PostgreSQL development packages are installed and $POSTGRES_CONFIG_PATH is valid."
|
||||||
exit 1
|
return 2
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
build() {
|
build() {
|
||||||
local version
|
local version
|
||||||
version=$(get-version) || return 1
|
version=$(get-version) || return $?
|
||||||
local package_dir="./package"
|
local package_dir="./package"
|
||||||
local tarball_name="${GITEA_REPOSITORY}.tar.gz"
|
local tarball_name="${GITEA_REPOSITORY}.tar.gz"
|
||||||
local tarball_path="${package_dir}/${tarball_name}"
|
local tarball_path="${package_dir}/${tarball_name}"
|
||||||
|
|
||||||
echo -e "📦 Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
|
info "Creating source tarball v$version for ${GITEA_REPOSITORY} in $package_dir..."
|
||||||
|
|
||||||
# Clean previous package dir
|
# Clean previous package dir
|
||||||
rm -rf "${package_dir}"
|
rm -rf "${package_dir}"
|
||||||
mkdir -p "${package_dir}"
|
mkdir -p "${package_dir}"
|
||||||
|
|
||||||
# Create the source tarball excluding specified patterns
|
# Create the source tarball excluding specified patterns
|
||||||
echo -e " ${CYAN}Creating tarball: ${tarball_path}${RESET}"
|
info "Creating tarball: ${tarball_path}"
|
||||||
if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
|
if tar --exclude='.git*' --exclude='./target' --exclude='./package' --exclude='./flows' --exclude='./flow' -czf "${tarball_path}" .; then
|
||||||
echo -e "✨ ${GREEN}Successfully created source tarball: ${tarball_path}${RESET}"
|
success "Successfully created source tarball: ${tarball_path}"
|
||||||
else
|
else
|
||||||
echo -e "❌ ${RED}Failed to create source tarball.${RESET}" >&2
|
error "Failed to create source tarball."
|
||||||
return 1
|
return 2
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
install() {
|
install() {
|
||||||
local version
|
local version
|
||||||
version=$(get-version) || return 1
|
version=$(get-version) || return $? # Propagate error
|
||||||
|
|
||||||
echo -e "🔧 ${CYAN}Building and installing PGRX extension v$version into local PostgreSQL...${RESET}"
|
info "Building and installing PGRX extension v$version into local PostgreSQL..."
|
||||||
|
|
||||||
# Run the pgrx install command
|
# Run the pgrx install command
|
||||||
# It implicitly uses --release unless --debug is passed
|
|
||||||
# It finds pg_config or you can add flags like --pg-config if needed
|
|
||||||
if ! cargo pgrx install; then
|
if ! cargo pgrx install; then
|
||||||
echo -e "❌ ${RED}cargo pgrx install command failed.${RESET}" >&2
|
error "cargo pgrx install command failed."
|
||||||
return 1
|
return 2
|
||||||
fi
|
fi
|
||||||
echo -e "✨ ${GREEN}PGRX extension v$version successfully built and installed.${RESET}"
|
success "PGRX extension v$version successfully built and installed."
|
||||||
|
|
||||||
# Post-install modification to allow non-superuser usage
|
# Post-install modification to allow non-superuser usage
|
||||||
# Get the installation path dynamically using pg_config
|
|
||||||
local pg_sharedir
|
local pg_sharedir
|
||||||
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
|
pg_sharedir=$("$POSTGRES_CONFIG_PATH" --sharedir)
|
||||||
if [ -z "$pg_sharedir" ]; then
|
local pg_config_status=$?
|
||||||
echo -e "❌ ${RED}Failed to determine PostgreSQL shared directory using pg_config.${RESET}" >&2
|
if [ $pg_config_status -ne 0 ] || [ -z "$pg_sharedir" ]; then
|
||||||
return 1
|
error "Failed to determine PostgreSQL shared directory using pg_config."
|
||||||
|
return 2
|
||||||
fi
|
fi
|
||||||
local installed_control_path="${pg_sharedir}/extension/jspg.control"
|
local installed_control_path="${pg_sharedir}/extension/jspg.control"
|
||||||
|
|
||||||
# Modify the control file
|
# Modify the control file
|
||||||
if [ ! -f "$installed_control_path" ]; then
|
if [ ! -f "$installed_control_path" ]; then
|
||||||
echo -e "❌ ${RED}Installed control file not found: '$installed_control_path'${RESET}" >&2
|
error "Installed control file not found: '$installed_control_path'"
|
||||||
return 1
|
return 2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -e "🔧 ${CYAN}Modifying control file for non-superuser access: ${installed_control_path}${RESET}"
|
info "Modifying control file for non-superuser access: ${installed_control_path}"
|
||||||
# Use sed -i '' for macOS compatibility
|
# Use sed -i '' for macOS compatibility
|
||||||
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
|
if sed -i '' '/^superuser = false/d' "$installed_control_path" && \
|
||||||
echo 'trusted = true' >> "$installed_control_path"; then
|
echo 'trusted = true' >> "$installed_control_path"; then
|
||||||
echo -e "✨ ${GREEN}Control file modified successfully.${RESET}"
|
success "Control file modified successfully."
|
||||||
else
|
else
|
||||||
echo -e "❌ ${RED}Failed to modify control file: ${installed_control_path}${RESET}" >&2
|
error "Failed to modify control file: ${installed_control_path}"
|
||||||
return 1
|
return 2
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
test() {
|
test() {
|
||||||
echo -e "🧪 ${CYAN}Running jspg tests...${RESET}"
|
info "Running jspg tests..."
|
||||||
cargo pgrx test "pg${POSTGRES_VERSION}" "$@"
|
cargo pgrx test "pg${POSTGRES_VERSION}" "$@" || return $?
|
||||||
}
|
}
|
||||||
|
|
||||||
clean() {
|
clean() {
|
||||||
echo -e "🧹 ${CYAN}Cleaning build artifacts...${RESET}"
|
info "Cleaning build artifacts..."
|
||||||
cargo clean # Use standard cargo clean
|
cargo clean || return $?
|
||||||
}
|
}
|
||||||
|
|
||||||
jspg-usage() {
|
jspg-usage() {
|
||||||
echo -e " ${CYAN}JSPG Commands:${RESET}"
|
printf "prepare\tCheck OS, Cargo, and PGRX dependencies.\n"
|
||||||
echo -e " prepare Check OS, Cargo, and PGRX dependencies."
|
printf "install\tBuild and install the extension locally (after prepare).\n"
|
||||||
echo -e " install [opts] Run prepare, then build and install the extension locally."
|
printf "reinstall\tClean, build, and install the extension locally (after prepare).\n"
|
||||||
echo -e " reinstall [opts] Run prepare, clean, then build and install the extension locally."
|
printf "test\t\tRun pgrx integration tests.\n"
|
||||||
echo -e " test [opts] Run pgrx integration tests."
|
printf "clean\t\tRemove pgrx build artifacts.\n"
|
||||||
echo -e " clean Remove pgrx build artifacts."
|
|
||||||
echo -e " build Build release artifacts into ./package/ (called by release)."
|
|
||||||
echo -e " tag Tag the current version (called by release)."
|
|
||||||
echo -e " package Upload artifacts from ./package/ (called by release)."
|
|
||||||
echo -e " release Perform a full release (increments patch, builds, tags, pushes, packages)."
|
|
||||||
}
|
}
|
||||||
|
|
||||||
jspg-flow() {
|
jspg-flow() {
|
||||||
case "$1" in
|
case "$1" in
|
||||||
env) env; return 0;;
|
env) env; return $?;;
|
||||||
prepare) base prepare; cargo-prepare; pgrx-prepare; return 0;;
|
prepare) prepare && cargo-prepare && pgrx-prepare; return $?;;
|
||||||
build) build; return 0;;
|
build) build; return $?;;
|
||||||
install) install; return 0;;
|
install) install; return $?;;
|
||||||
reinstall) clean; install; return 0;;
|
reinstall) clean && install; return $?;;
|
||||||
test) test; return 0;;
|
test) test "${@:2}"; return $?;;
|
||||||
package) env; package; return 0;;
|
clean) clean; return $?;;
|
||||||
release) env; release; return 0;;
|
release) env; release; return $?;;
|
||||||
clean) clean; return 0;;
|
|
||||||
*) return 1 ;;
|
*) return 1 ;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|||||||
2
flows
2
flows
Submodule flows updated: 9d758d581e...3e3954fb79
51
src/lib.rs
51
src/lib.rs
@ -52,8 +52,8 @@ fn cache_json_schema(schema_id: &str, schema: JsonB) -> JsonB {
|
|||||||
// Collect leaf errors from the meta-schema validation failure
|
// Collect leaf errors from the meta-schema validation failure
|
||||||
let mut error_list = Vec::new();
|
let mut error_list = Vec::new();
|
||||||
collect_leaf_errors(src, &mut error_list);
|
collect_leaf_errors(src, &mut error_list);
|
||||||
// Return the flat list directly
|
// Filter and deduplicate errors, returning as a single JSON Value (Array)
|
||||||
json!(error_list)
|
json!(filter_boon_errors(error_list))
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
// Keep existing handling for other compilation errors
|
// Keep existing handling for other compilation errors
|
||||||
@ -97,7 +97,7 @@ fn validate_json_schema(schema_id: &str, instance: JsonB) -> JsonB {
|
|||||||
collect_leaf_errors(&validation_error, &mut error_list);
|
collect_leaf_errors(&validation_error, &mut error_list);
|
||||||
JsonB(json!({
|
JsonB(json!({
|
||||||
"success": false,
|
"success": false,
|
||||||
"error": error_list // Flat list of specific errors
|
"error": filter_boon_errors(error_list) // Filter and deduplicate errors
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -127,6 +127,51 @@ fn collect_leaf_errors(error: &ValidationError, errors_list: &mut Vec<Value>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Filters collected errors, removing structural noise and then deduplicating by instance_path
|
||||||
|
fn filter_boon_errors(raw_errors: Vec<Value>) -> Vec<Value> {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
|
// Define schema keywords that indicate structural paths, not instance paths
|
||||||
|
let structural_path_segments = [
|
||||||
|
"/allOf/", "/anyOf/", "/oneOf/",
|
||||||
|
"/if/", "/then/", "/else/",
|
||||||
|
"/not/"
|
||||||
|
// Note: "/properties/" and "/items/" are generally valid,
|
||||||
|
// but might appear spuriously in boon's paths for complex types.
|
||||||
|
// We exclude only the explicitly logical/combinatorial ones for now.
|
||||||
|
];
|
||||||
|
|
||||||
|
// 1. Filter out errors with instance_paths containing structural segments
|
||||||
|
let plausible_errors: Vec<Value> = raw_errors.into_iter().filter(|error_value| {
|
||||||
|
if let Some(instance_path_value) = error_value.get("instance_path") {
|
||||||
|
if let Some(instance_path_str) = instance_path_value.as_str() {
|
||||||
|
// Keep if NONE of the structural segments are present
|
||||||
|
!structural_path_segments.iter().any(|&segment| instance_path_str.contains(segment))
|
||||||
|
} else {
|
||||||
|
false // Invalid instance_path type, filter out
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
false // No instance_path field, filter out
|
||||||
|
}
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
// 2. Deduplicate the remaining plausible errors by instance_path
|
||||||
|
let mut unique_errors: HashMap<String, Value> = HashMap::new();
|
||||||
|
for error_value in plausible_errors {
|
||||||
|
if let Some(instance_path_value) = error_value.get("instance_path") {
|
||||||
|
if let Some(instance_path_str) = instance_path_value.as_str() {
|
||||||
|
if let Entry::Vacant(entry) = unique_errors.entry(instance_path_str.to_string()) {
|
||||||
|
entry.insert(error_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect the unique errors
|
||||||
|
unique_errors.into_values().collect()
|
||||||
|
}
|
||||||
|
|
||||||
#[pg_extern(strict, parallel_safe)]
|
#[pg_extern(strict, parallel_safe)]
|
||||||
fn json_schema_cached(schema_id: &str) -> bool {
|
fn json_schema_cached(schema_id: &str) -> bool {
|
||||||
let cache = SCHEMA_CACHE.read().unwrap();
|
let cache = SCHEMA_CACHE.read().unwrap();
|
||||||
|
|||||||
39
src/tests.rs
39
src/tests.rs
@ -333,32 +333,47 @@ fn test_validate_json_schema_oneof_validation_errors() {
|
|||||||
// --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) ---
|
// --- Test case 1: Fails string maxLength (in branch 0) AND missing number_prop (in branch 1) ---
|
||||||
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
|
let invalid_string_instance = json!({ "string_prop": "toolongstring" });
|
||||||
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
|
let result_invalid_string = validate_json_schema(schema_id, jsonb(invalid_string_instance));
|
||||||
// Expect 2 leaf errors: one for maxLength (branch 0), one for missing prop (branch 1)
|
// Expect 2 leaf errors. Check count only with the macro.
|
||||||
// Check the first error message reported by boon (maxLength).
|
assert_failure_with_json!(result_invalid_string, 2);
|
||||||
assert_failure_with_json!(result_invalid_string, 2, "length must be <=5", "Validation with invalid string length should have 2 leaf errors");
|
// Explicitly check that both expected errors are present, ignoring order
|
||||||
|
let errors_string = result_invalid_string.0["error"].as_array().expect("Expected error array for invalid string");
|
||||||
|
assert!(errors_string.iter().any(|e| e["instance_path"] == "/string_prop" && e["message"].as_str().unwrap().contains("length must be <=5")), "Missing maxLength error");
|
||||||
|
assert!(errors_string.iter().any(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("missing properties 'number_prop'")), "Missing number_prop required error");
|
||||||
|
|
||||||
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
|
// --- Test case 2: Fails number minimum (in branch 1) AND missing string_prop (in branch 0) ---
|
||||||
let invalid_number_instance = json!({ "number_prop": 5 });
|
let invalid_number_instance = json!({ "number_prop": 5 });
|
||||||
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
|
let result_invalid_number = validate_json_schema(schema_id, jsonb(invalid_number_instance));
|
||||||
// Expect 2 leaf errors: one for minimum (branch 1), one for missing prop (branch 0)
|
// Expect 2 leaf errors. Check count only with the macro.
|
||||||
// Check the first error message reported by boon (missing prop).
|
assert_failure_with_json!(result_invalid_number, 2);
|
||||||
assert_failure_with_json!(result_invalid_number, 2, "missing properties 'string_prop'", "Validation with invalid number should have 2 leaf errors");
|
// Explicitly check that both expected errors are present, ignoring order
|
||||||
|
let errors_number = result_invalid_number.0["error"].as_array().expect("Expected error array for invalid number");
|
||||||
|
assert!(errors_number.iter().any(|e| e["instance_path"] == "/number_prop" && e["message"].as_str().unwrap().contains("must be >=10")), "Missing minimum error");
|
||||||
|
assert!(errors_number.iter().any(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("missing properties 'string_prop'")), "Missing string_prop required error");
|
||||||
|
|
||||||
// --- Test case 3: Fails type check (not object) for both branches ---
|
// --- Test case 3: Fails type check (not object) for both branches ---
|
||||||
// Input: boolean, expected object for both branches
|
// Input: boolean, expected object for both branches
|
||||||
let invalid_bool_instance = json!(true); // Not an object
|
let invalid_bool_instance = json!(true); // Not an object
|
||||||
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
|
let result_invalid_bool = validate_json_schema(schema_id, jsonb(invalid_bool_instance));
|
||||||
// Expect 2 leaf errors, one "Type" error for each branch
|
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
|
||||||
// Check the first error reported by boon (want object).
|
assert_failure_with_json!(result_invalid_bool, 1);
|
||||||
assert_failure_with_json!(result_invalid_bool, 2, "want object", "Validation with invalid bool should have 2 leaf errors");
|
// Explicitly check that the single remaining error is the type error for the root instance path
|
||||||
|
let errors_bool = result_invalid_bool.0["error"].as_array().expect("Expected error array for invalid bool");
|
||||||
|
assert_eq!(errors_bool.iter().filter(|e| e["instance_path"] == "" && e["message"].as_str().unwrap().contains("want object")).count(), 1, "Expected one 'want object' error at root after filtering");
|
||||||
|
|
||||||
// --- Test case 4: Fails missing required for both branches ---
|
// --- Test case 4: Fails missing required for both branches ---
|
||||||
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
|
// Input: empty object, expected string_prop (branch 0) OR number_prop (branch 1)
|
||||||
let invalid_empty_obj = json!({});
|
let invalid_empty_obj = json!({});
|
||||||
let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj));
|
let result_empty_obj = validate_json_schema(schema_id, jsonb(invalid_empty_obj));
|
||||||
// Expect 2 leaf errors: one required error for branch 0, one required error for branch 1
|
// Expect only 1 leaf error after filtering, as both original errors have instance_path ""
|
||||||
// Check the first error reported by boon (missing string_prop).
|
assert_failure_with_json!(result_empty_obj, 1);
|
||||||
assert_failure_with_json!(result_empty_obj, 2, "missing properties 'string_prop'", "Validation with empty object should have 2 leaf errors");
|
// Explicitly check that the single remaining error is one of the expected missing properties errors
|
||||||
|
let errors_empty = result_empty_obj.0["error"].as_array().expect("Expected error array for empty object");
|
||||||
|
assert_eq!(errors_empty.len(), 1, "Expected exactly one error after filtering empty object");
|
||||||
|
let the_error = &errors_empty[0];
|
||||||
|
assert_eq!(the_error["instance_path"], "", "Expected instance_path to be empty string");
|
||||||
|
let message = the_error["message"].as_str().unwrap();
|
||||||
|
assert!(message.contains("missing properties 'string_prop'") || message.contains("missing properties 'number_prop'"),
|
||||||
|
"Error message should indicate missing string_prop or number_prop, got: {}", message);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pg_test]
|
#[pg_test]
|
||||||
|
|||||||
Reference in New Issue
Block a user