367 lines
10 KiB
Bash
Executable File
367 lines
10 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
# C Nostr Relay - Backup Script
|
|
# Automated backup solution for event-based configuration relay
|
|
|
|
set -e
|
|
|
|
# Colors for output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
NC='\033[0m' # No Color
|
|
|
|
# Default configuration
|
|
RELAY_DIR="/opt/c-relay"
|
|
BACKUP_DIR="/backup/c-relay"
|
|
RETENTION_DAYS="30"
|
|
COMPRESS="true"
|
|
REMOTE_BACKUP=""
|
|
S3_BUCKET=""
|
|
NOTIFICATION_EMAIL=""
|
|
LOG_FILE="/var/log/relay-backup.log"
|
|
|
|
# Functions
|
|
print_step() {
|
|
echo -e "${BLUE}[STEP]${NC} $1"
|
|
echo "$(date '+%Y-%m-%d %H:%M:%S') [STEP] $1" >> "$LOG_FILE"
|
|
}
|
|
|
|
print_success() {
|
|
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
|
echo "$(date '+%Y-%m-%d %H:%M:%S') [SUCCESS] $1" >> "$LOG_FILE"
|
|
}
|
|
|
|
print_warning() {
|
|
echo -e "${YELLOW}[WARNING]${NC} $1"
|
|
echo "$(date '+%Y-%m-%d %H:%M:%S') [WARNING] $1" >> "$LOG_FILE"
|
|
}
|
|
|
|
print_error() {
|
|
echo -e "${RED}[ERROR]${NC} $1"
|
|
echo "$(date '+%Y-%m-%d %H:%M:%S') [ERROR] $1" >> "$LOG_FILE"
|
|
}
|
|
|
|
show_help() {
|
|
echo "Usage: $0 [OPTIONS]"
|
|
echo
|
|
echo "Options:"
|
|
echo " -d, --relay-dir DIR Relay directory (default: /opt/c-relay)"
|
|
echo " -b, --backup-dir DIR Backup directory (default: /backup/c-relay)"
|
|
echo " -r, --retention DAYS Retention period in days (default: 30)"
|
|
echo " -n, --no-compress Don't compress backups"
|
|
echo " -s, --s3-bucket BUCKET Upload to S3 bucket"
|
|
echo " -e, --email EMAIL Send notification email"
|
|
echo " -v, --verify Verify backup integrity"
|
|
echo " -h, --help Show this help message"
|
|
echo
|
|
echo "Examples:"
|
|
echo " $0 # Basic backup"
|
|
echo " $0 -s my-backup-bucket -e admin@example.com"
|
|
echo " $0 -r 7 -n # 7-day retention, no compression"
|
|
}
|
|
|
|
parse_args() {
|
|
while [[ $# -gt 0 ]]; do
|
|
case $1 in
|
|
-d|--relay-dir)
|
|
RELAY_DIR="$2"
|
|
shift 2
|
|
;;
|
|
-b|--backup-dir)
|
|
BACKUP_DIR="$2"
|
|
shift 2
|
|
;;
|
|
-r|--retention)
|
|
RETENTION_DAYS="$2"
|
|
shift 2
|
|
;;
|
|
-n|--no-compress)
|
|
COMPRESS="false"
|
|
shift
|
|
;;
|
|
-s|--s3-bucket)
|
|
S3_BUCKET="$2"
|
|
shift 2
|
|
;;
|
|
-e|--email)
|
|
NOTIFICATION_EMAIL="$2"
|
|
shift 2
|
|
;;
|
|
-v|--verify)
|
|
VERIFY="true"
|
|
shift
|
|
;;
|
|
-h|--help)
|
|
show_help
|
|
exit 0
|
|
;;
|
|
*)
|
|
print_error "Unknown option: $1"
|
|
show_help
|
|
exit 1
|
|
;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
check_dependencies() {
|
|
print_step "Checking dependencies..."
|
|
|
|
# Check sqlite3
|
|
if ! command -v sqlite3 &> /dev/null; then
|
|
print_error "sqlite3 not found. Install with: apt install sqlite3"
|
|
exit 1
|
|
fi
|
|
|
|
# Check compression tools
|
|
if [[ "$COMPRESS" == "true" ]]; then
|
|
if ! command -v gzip &> /dev/null; then
|
|
print_error "gzip not found for compression"
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
# Check S3 tools if needed
|
|
if [[ -n "$S3_BUCKET" ]]; then
|
|
if ! command -v aws &> /dev/null; then
|
|
print_error "AWS CLI not found. Install with: apt install awscli"
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
print_success "Dependencies verified"
|
|
}
|
|
|
|
find_database() {
|
|
print_step "Finding relay database..."
|
|
|
|
# Look for .nrdb files in relay directory
|
|
DB_FILES=($(find "$RELAY_DIR" -name "*.nrdb" 2>/dev/null))
|
|
|
|
if [[ ${#DB_FILES[@]} -eq 0 ]]; then
|
|
print_error "No relay database files found in $RELAY_DIR"
|
|
exit 1
|
|
elif [[ ${#DB_FILES[@]} -gt 1 ]]; then
|
|
print_warning "Multiple database files found:"
|
|
printf '%s\n' "${DB_FILES[@]}"
|
|
print_warning "Using the first one: ${DB_FILES[0]}"
|
|
fi
|
|
|
|
DB_FILE="${DB_FILES[0]}"
|
|
DB_NAME=$(basename "$DB_FILE")
|
|
|
|
print_success "Found database: $DB_FILE"
|
|
}
|
|
|
|
create_backup_directory() {
|
|
print_step "Creating backup directory..."
|
|
|
|
if [[ ! -d "$BACKUP_DIR" ]]; then
|
|
mkdir -p "$BACKUP_DIR"
|
|
chmod 700 "$BACKUP_DIR"
|
|
print_success "Created backup directory: $BACKUP_DIR"
|
|
else
|
|
print_success "Using existing backup directory: $BACKUP_DIR"
|
|
fi
|
|
}
|
|
|
|
perform_backup() {
|
|
local timestamp=$(date +%Y%m%d_%H%M%S)
|
|
local backup_name="relay_backup_${timestamp}"
|
|
local backup_file="$BACKUP_DIR/${backup_name}.nrdb"
|
|
|
|
print_step "Creating database backup..."
|
|
|
|
# Check if database is accessible
|
|
if [[ ! -r "$DB_FILE" ]]; then
|
|
print_error "Cannot read database file: $DB_FILE"
|
|
exit 1
|
|
fi
|
|
|
|
# Get database size
|
|
local db_size=$(du -h "$DB_FILE" | cut -f1)
|
|
print_step "Database size: $db_size"
|
|
|
|
# Create SQLite backup using .backup command (hot backup)
|
|
if sqlite3 "$DB_FILE" ".backup $backup_file" 2>/dev/null; then
|
|
print_success "Database backup created: $backup_file"
|
|
else
|
|
# Fallback to file copy if .backup fails
|
|
print_warning "SQLite backup failed, using file copy method"
|
|
cp "$DB_FILE" "$backup_file"
|
|
print_success "File copy backup created: $backup_file"
|
|
fi
|
|
|
|
# Verify backup file
|
|
if [[ ! -f "$backup_file" ]]; then
|
|
print_error "Backup file was not created"
|
|
exit 1
|
|
fi
|
|
|
|
# Check backup integrity
|
|
if [[ "$VERIFY" == "true" ]]; then
|
|
print_step "Verifying backup integrity..."
|
|
if sqlite3 "$backup_file" "PRAGMA integrity_check;" | grep -q "ok"; then
|
|
print_success "Backup integrity verified"
|
|
else
|
|
print_error "Backup integrity check failed"
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
# Compress backup
|
|
if [[ "$COMPRESS" == "true" ]]; then
|
|
print_step "Compressing backup..."
|
|
gzip "$backup_file"
|
|
backup_file="${backup_file}.gz"
|
|
print_success "Backup compressed: $backup_file"
|
|
fi
|
|
|
|
# Set backup file as global variable for other functions
|
|
BACKUP_FILE="$backup_file"
|
|
BACKUP_NAME="$backup_name"
|
|
}
|
|
|
|
upload_to_s3() {
|
|
if [[ -z "$S3_BUCKET" ]]; then
|
|
return 0
|
|
fi
|
|
|
|
print_step "Uploading backup to S3..."
|
|
|
|
local s3_path="s3://$S3_BUCKET/c-relay/$(date +%Y)/$(date +%m)/"
|
|
|
|
if aws s3 cp "$BACKUP_FILE" "$s3_path" --storage-class STANDARD_IA; then
|
|
print_success "Backup uploaded to S3: $s3_path"
|
|
else
|
|
print_error "Failed to upload backup to S3"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
cleanup_old_backups() {
|
|
print_step "Cleaning up old backups..."
|
|
|
|
local deleted_count=0
|
|
|
|
# Clean local backups
|
|
while IFS= read -r -d '' file; do
|
|
rm "$file"
|
|
((deleted_count++))
|
|
done < <(find "$BACKUP_DIR" -name "relay_backup_*.nrdb*" -mtime "+$RETENTION_DAYS" -print0 2>/dev/null)
|
|
|
|
if [[ $deleted_count -gt 0 ]]; then
|
|
print_success "Deleted $deleted_count old local backups"
|
|
else
|
|
print_success "No old local backups to delete"
|
|
fi
|
|
|
|
# Clean S3 backups if configured
|
|
if [[ -n "$S3_BUCKET" ]]; then
|
|
local cutoff_date=$(date -d "$RETENTION_DAYS days ago" +%Y-%m-%d)
|
|
print_step "Cleaning S3 backups older than $cutoff_date..."
|
|
|
|
# Note: This is a simplified approach. In production, use S3 lifecycle policies
|
|
aws s3 ls "s3://$S3_BUCKET/c-relay/" --recursive | \
|
|
awk '$1 < "'$cutoff_date'" {print $4}' | \
|
|
while read -r key; do
|
|
aws s3 rm "s3://$S3_BUCKET/$key"
|
|
print_step "Deleted S3 backup: $key"
|
|
done
|
|
fi
|
|
}
|
|
|
|
send_notification() {
|
|
if [[ -z "$NOTIFICATION_EMAIL" ]]; then
|
|
return 0
|
|
fi
|
|
|
|
print_step "Sending notification email..."
|
|
|
|
local subject="C Nostr Relay Backup - $(date +%Y-%m-%d)"
|
|
local backup_size=$(du -h "$BACKUP_FILE" | cut -f1)
|
|
|
|
local message="Backup completed successfully.
|
|
|
|
Details:
|
|
- Date: $(date)
|
|
- Database: $DB_FILE
|
|
- Backup File: $BACKUP_FILE
|
|
- Backup Size: $backup_size
|
|
- Retention: $RETENTION_DAYS days
|
|
"
|
|
|
|
if [[ -n "$S3_BUCKET" ]]; then
|
|
message+="\n- S3 Bucket: $S3_BUCKET"
|
|
fi
|
|
|
|
# Try to send email using mail command
|
|
if command -v mail &> /dev/null; then
|
|
echo -e "$message" | mail -s "$subject" "$NOTIFICATION_EMAIL"
|
|
print_success "Notification sent to $NOTIFICATION_EMAIL"
|
|
else
|
|
print_warning "Mail command not available, skipping notification"
|
|
fi
|
|
}
|
|
|
|
show_backup_summary() {
|
|
local backup_size=$(du -h "$BACKUP_FILE" | cut -f1)
|
|
local backup_count=$(find "$BACKUP_DIR" -name "relay_backup_*.nrdb*" | wc -l)
|
|
|
|
echo
|
|
echo "🎉 Backup Completed Successfully!"
|
|
echo
|
|
echo "Backup Details:"
|
|
echo " Source DB: $DB_FILE"
|
|
echo " Backup File: $BACKUP_FILE"
|
|
echo " Backup Size: $backup_size"
|
|
echo " Compressed: $COMPRESS"
|
|
echo " Verified: ${VERIFY:-false}"
|
|
echo
|
|
echo "Storage:"
|
|
echo " Local Backups: $backup_count files in $BACKUP_DIR"
|
|
echo " Retention: $RETENTION_DAYS days"
|
|
|
|
if [[ -n "$S3_BUCKET" ]]; then
|
|
echo " S3 Bucket: $S3_BUCKET"
|
|
fi
|
|
|
|
echo
|
|
echo "Management Commands:"
|
|
echo " List backups: find $BACKUP_DIR -name 'relay_backup_*'"
|
|
echo " Restore: See examples/deployment/backup/restore-relay.sh"
|
|
echo
|
|
}
|
|
|
|
# Main execution
|
|
main() {
|
|
echo
|
|
echo "==============================================="
|
|
echo "💾 C Nostr Relay - Database Backup"
|
|
echo "==============================================="
|
|
echo
|
|
|
|
# Initialize log file
|
|
mkdir -p "$(dirname "$LOG_FILE")"
|
|
touch "$LOG_FILE"
|
|
|
|
parse_args "$@"
|
|
check_dependencies
|
|
find_database
|
|
create_backup_directory
|
|
perform_backup
|
|
upload_to_s3
|
|
cleanup_old_backups
|
|
send_notification
|
|
show_backup_summary
|
|
|
|
print_success "Backup process completed successfully!"
|
|
}
|
|
|
|
# Handle errors
|
|
trap 'print_error "Backup failed at line $LINENO"' ERR
|
|
|
|
# Run main function
|
|
main "$@" |