#!/usr/bin/env bash # create_d7030_test.sh # Create a realistic d7030 database with tables, data, and many BLOBs to test large object restore set -euo pipefail DB_NAME="d7030" NUM_DOCUMENTS=3000 # Number of documents with BLOBs (increased to stress test locks) NUM_IMAGES=2000 # Number of image records (increased to stress test locks) echo "Creating database: $DB_NAME" # Drop if exists sudo -u postgres psql -c "DROP DATABASE IF EXISTS $DB_NAME;" 2>/dev/null || true # Create database sudo -u postgres psql -c "CREATE DATABASE $DB_NAME;" echo "Creating schema and tables..." # Enable pgcrypto extension for gen_random_bytes sudo -u postgres psql -d "$DB_NAME" -c "CREATE EXTENSION IF NOT EXISTS pgcrypto;" # Create schema with realistic business tables sudo -u postgres psql -d "$DB_NAME" <<'EOF' -- Create tables for a document management system CREATE TABLE departments ( dept_id SERIAL PRIMARY KEY, dept_name VARCHAR(100) NOT NULL, created_at TIMESTAMP DEFAULT NOW() ); CREATE TABLE employees ( emp_id SERIAL PRIMARY KEY, dept_id INTEGER REFERENCES departments(dept_id), first_name VARCHAR(50) NOT NULL, last_name VARCHAR(50) NOT NULL, email VARCHAR(100) UNIQUE, hire_date DATE DEFAULT CURRENT_DATE ); CREATE TABLE document_types ( type_id SERIAL PRIMARY KEY, type_name VARCHAR(50) NOT NULL, description TEXT ); -- Table with large objects (BLOBs) CREATE TABLE documents ( doc_id SERIAL PRIMARY KEY, emp_id INTEGER REFERENCES employees(emp_id), type_id INTEGER REFERENCES document_types(type_id), title VARCHAR(255) NOT NULL, description TEXT, file_data OID, -- Large object reference file_size INTEGER, mime_type VARCHAR(100), created_at TIMESTAMP DEFAULT NOW(), updated_at TIMESTAMP DEFAULT NOW() ); CREATE TABLE images ( image_id SERIAL PRIMARY KEY, doc_id INTEGER REFERENCES documents(doc_id), image_name VARCHAR(255), image_data OID, -- Large object reference thumbnail_data OID, -- Another large object width INTEGER, height INTEGER, created_at TIMESTAMP DEFAULT NOW() ); CREATE TABLE audit_log ( log_id SERIAL PRIMARY KEY, table_name VARCHAR(50), record_id INTEGER, action VARCHAR(20), changed_by INTEGER, changed_at TIMESTAMP DEFAULT NOW(), details JSONB ); -- Create indexes CREATE INDEX idx_documents_emp ON documents(emp_id); CREATE INDEX idx_documents_type ON documents(type_id); CREATE INDEX idx_images_doc ON images(doc_id); CREATE INDEX idx_audit_table ON audit_log(table_name, record_id); -- Insert reference data INSERT INTO departments (dept_name) VALUES ('Engineering'), ('Sales'), ('Marketing'), ('HR'), ('Finance'); INSERT INTO document_types (type_name, description) VALUES ('Contract', 'Legal contracts and agreements'), ('Invoice', 'Financial invoices and receipts'), ('Report', 'Business reports and analysis'), ('Manual', 'Technical manuals and guides'), ('Presentation', 'Presentation slides and materials'); -- Insert employees INSERT INTO employees (dept_id, first_name, last_name, email) SELECT (random() * 4 + 1)::INTEGER, 'Employee_' || generate_series, 'LastName_' || generate_series, 'employee' || generate_series || '@d7030.com' FROM generate_series(1, 50); EOF echo "Inserting documents with large objects (BLOBs)..." # Create a temporary file with random data for importing in postgres home TEMP_FILE="/var/lib/pgsql/test_blob_data.bin" sudo dd if=/dev/urandom of="$TEMP_FILE" bs=1024 count=50 2>/dev/null sudo chown postgres:postgres "$TEMP_FILE" # Create documents with actual large objects using lo_import sudo -u postgres psql -d "$DB_NAME" </dev/null sudo dd if=/dev/urandom of="$TEMP_THUMB" bs=1024 count=10 2>/dev/null sudo chown postgres:postgres "$TEMP_IMAGE" "$TEMP_THUMB" # Create images with multiple large objects per record sudo -u postgres psql -d "$DB_NAME" <