Configure secure, encrypted backup storage using S3-compatible providers with automated retention policies, client-side GPG encryption, and monitoring scripts for reliable data protection.
Prerequisites
- Root or sudo access
- Internet connectivity for S3 provider
- S3-compatible storage account with API credentials
- At least 2GB free disk space for temporary files
- Mail server configuration for alerts
What this solves
Remote backup storage with S3-compatible services provides secure, scalable data protection for critical systems. This tutorial configures MinIO client and AWS CLI tools to work with any S3-compatible provider, implements both client-side GPG encryption and server-side encryption, and sets up automated retention policies with monitoring. You'll learn to create resilient backup workflows that protect against data loss, ransomware, and hardware failures.
Step-by-step installation
Update system packages
Start by updating your package manager to ensure you get the latest versions of all dependencies.
sudo apt update && sudo apt upgrade -y
sudo apt install -y curl wget gnupg2 cron rsync
Install MinIO client
MinIO client provides S3-compatible command-line tools for object storage operations with any S3-compatible provider.
curl -LO https://dl.min.io/client/mc/release/linux-amd64/mc
chmod +x mc
sudo mv mc /usr/local/bin/
mc --version
Install AWS CLI v2
AWS CLI provides advanced S3 operations and is compatible with most S3-compatible storage providers.
curl -LO "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip"
unzip awscli-exe-linux-x86_64.zip
sudo ./aws/install
aws --version
rm -rf awscli-exe-linux-x86_64.zip aws/
Create backup user and directories
Create a dedicated user for backup operations with proper permissions and directory structure.
sudo useradd -r -s /bin/bash -d /opt/backup -m backup-user
sudo mkdir -p /opt/backup/{scripts,logs,temp,keys}
sudo chown -R backup-user:backup-user /opt/backup
sudo chmod 750 /opt/backup
sudo chmod 700 /opt/backup/keys
Generate GPG encryption keys
Create GPG keys for client-side encryption of backup data before uploading to remote storage.
sudo -u backup-user gpg --batch --gen-key --pinentry-mode loopback <
Configure MinIO client for S3 provider
Set up MinIO client to connect to your S3-compatible storage provider. Replace credentials and endpoint with your provider's details.
sudo -u backup-user mc alias set s3backup https://s3.amazonaws.com YOUR_ACCESS_KEY YOUR_SECRET_KEY
sudo -u backup-user mc admin info s3backup
sudo -u backup-user mc mb s3backup/my-backups --region us-east-1
Configure AWS CLI credentials
Set up AWS CLI with your S3-compatible provider credentials for advanced operations and lifecycle policies.
sudo -u backup-user aws configure set aws_access_key_id YOUR_ACCESS_KEY
sudo -u backup-user aws configure set aws_secret_access_key YOUR_SECRET_KEY
sudo -u backup-user aws configure set default.region us-east-1
sudo -u backup-user aws configure set default.output json
Enable server-side encryption
Configure server-side encryption for the S3 bucket to add an additional layer of security.
sudo -u backup-user aws s3api put-bucket-encryption \
--bucket my-backups \
--server-side-encryption-configuration '{
"Rules": [{
"ApplyServerSideEncryptionByDefault": {
"SSEAlgorithm": "AES256"
}
}]
}'
Create backup lifecycle policy
Implement automated retention policies to manage storage costs and comply with data retention requirements.
{
"Rules": [{
"ID": "BackupRetentionRule",
"Status": "Enabled",
"Filter": {
"Prefix": "daily/"
},
"Transitions": [{
"Days": 30,
"StorageClass": "STANDARD_IA"
}, {
"Days": 90,
"StorageClass": "GLACIER"
}, {
"Days": 365,
"StorageClass": "DEEP_ARCHIVE"
}],
"Expiration": {
"Days": 2555
}
}, {
"ID": "WeeklyBackupRetention",
"Status": "Enabled",
"Filter": {
"Prefix": "weekly/"
},
"Transitions": [{
"Days": 60,
"StorageClass": "GLACIER"
}],
"Expiration": {
"Days": 3650
}
}]
}
Apply lifecycle policy to bucket
Apply the retention policy to automatically manage backup lifecycle and reduce storage costs.
sudo -u backup-user aws s3api put-bucket-lifecycle-configuration \
--bucket my-backups \
--lifecycle-configuration file:///opt/backup/lifecycle-policy.json
sudo -u backup-user aws s3api get-bucket-lifecycle-configuration \
--bucket my-backups
Create backup script with encryption
Develop a comprehensive backup script that handles GPG encryption, compression, and secure upload with error handling.
#!/bin/bash
set -euo pipefail
Configuration
BACKUP_NAME="$(hostname)-$(date +%Y%m%d-%H%M%S)"
SOURCE_DIR="/etc /home /var/log"
TEMP_DIR="/opt/backup/temp"
LOG_FILE="/opt/backup/logs/backup-$(date +%Y%m%d).log"
S3_BUCKET="my-backups"
GPG_RECIPIENT="backup@example.com"
GPG_PASSPHRASE="YourSecureGPGPassphrase123!"
Functions
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a "$LOG_FILE"
}
cleanup() {
rm -rf "$TEMP_DIR/$BACKUP_NAME"*
log "Cleanup completed"
}
trap cleanup EXIT
Start backup
log "Starting backup: $BACKUP_NAME"
Create archive
log "Creating compressed archive"
tar -czf "$TEMP_DIR/$BACKUP_NAME.tar.gz" $SOURCE_DIR 2>>"$LOG_FILE" || {
log "ERROR: Failed to create archive"
exit 1
}
Encrypt archive
log "Encrypting archive with GPG"
echo "$GPG_PASSPHRASE" | gpg --batch --yes --passphrase-fd 0 \
--cipher-algo AES256 --compress-algo 1 --s2k-mode 3 \
--s2k-digest-algo SHA256 --s2k-count 65011712 \
--symmetric --output "$TEMP_DIR/$BACKUP_NAME.tar.gz.gpg" \
"$TEMP_DIR/$BACKUP_NAME.tar.gz" || {
log "ERROR: GPG encryption failed"
exit 1
}
Calculate checksums
sha256sum "$TEMP_DIR/$BACKUP_NAME.tar.gz.gpg" > "$TEMP_DIR/$BACKUP_NAME.sha256"
Upload to S3 with versioning
log "Uploading encrypted backup to S3"
aws s3 cp "$TEMP_DIR/$BACKUP_NAME.tar.gz.gpg" \
"s3://$S3_BUCKET/daily/$BACKUP_NAME.tar.gz.gpg" \
--storage-class STANDARD \
--metadata "backup-date=$(date -Iseconds),hostname=$(hostname)" || {
log "ERROR: S3 upload failed"
exit 1
}
Upload checksum
aws s3 cp "$TEMP_DIR/$BACKUP_NAME.sha256" \
"s3://$S3_BUCKET/daily/$BACKUP_NAME.sha256" || {
log "ERROR: Checksum upload failed"
exit 1
}
Verify upload
REMOTE_SIZE=$(aws s3 ls "s3://$S3_BUCKET/daily/$BACKUP_NAME.tar.gz.gpg" | awk '{print $3}')
LOCAL_SIZE=$(stat -c%s "$TEMP_DIR/$BACKUP_NAME.tar.gz.gpg")
if [ "$REMOTE_SIZE" != "$LOCAL_SIZE" ]; then
log "ERROR: Upload verification failed - size mismatch"
exit 1
fi
log "Backup completed successfully: $BACKUP_NAME"
log "Encrypted size: $(du -h "$TEMP_DIR/$BACKUP_NAME.tar.gz.gpg" | cut -f1)"
log "S3 location: s3://$S3_BUCKET/daily/$BACKUP_NAME.tar.gz.gpg"
Create restore script
Develop a restore script to decrypt and extract backups from S3 storage when needed.
#!/bin/bash
set -euo pipefail
Configuration
S3_BUCKET="my-backups"
RESTORE_DIR="/opt/backup/restore"
TEMP_DIR="/opt/backup/temp"
GPG_PASSPHRASE="YourSecureGPGPassphrase123!"
if [ $# -ne 1 ]; then
echo "Usage: $0 "
echo "Example: $0 myserver-20241201-120000"
exit 1
fi
BACKUP_NAME="$1"
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1"
}
Create restore directory
mkdir -p "$RESTORE_DIR" "$TEMP_DIR"
Download backup and checksum
log "Downloading backup: $BACKUP_NAME"
aws s3 cp "s3://$S3_BUCKET/daily/$BACKUP_NAME.tar.gz.gpg" "$TEMP_DIR/" || {
log "ERROR: Failed to download backup"
exit 1
}
aws s3 cp "s3://$S3_BUCKET/daily/$BACKUP_NAME.sha256" "$TEMP_DIR/" || {
log "ERROR: Failed to download checksum"
exit 1
}
Verify integrity
log "Verifying backup integrity"
cd "$TEMP_DIR"
if ! sha256sum -c "$BACKUP_NAME.sha256"; then
log "ERROR: Checksum verification failed"
exit 1
fi
Decrypt backup
log "Decrypting backup"
echo "$GPG_PASSPHRASE" | gpg --batch --yes --passphrase-fd 0 \
--decrypt "$BACKUP_NAME.tar.gz.gpg" > "$BACKUP_NAME.tar.gz" || {
log "ERROR: Decryption failed"
exit 1
}
Extract backup
log "Extracting backup to $RESTORE_DIR"
tar -xzf "$BACKUP_NAME.tar.gz" -C "$RESTORE_DIR" || {
log "ERROR: Extraction failed"
exit 1
}
log "Restore completed successfully"
log "Files restored to: $RESTORE_DIR"
Set proper script permissions
Configure secure permissions for backup scripts to prevent unauthorized access while allowing execution.
sudo chown backup-user:backup-user /opt/backup/scripts/*.sh
sudo chmod 750 /opt/backup/scripts/*.sh
sudo chmod 644 /opt/backup/lifecycle-policy.json
Create monitoring script
Implement monitoring to track backup success, storage usage, and send alerts for failures.
#!/bin/bash
set -euo pipefail
S3_BUCKET="my-backups"
LOG_DIR="/opt/backup/logs"
ALERT_EMAIL="admin@example.com"
MAX_AGE_HOURS=25
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1"
}
send_alert() {
local subject="$1"
local message="$2"
echo "$message" | mail -s "$subject" "$ALERT_EMAIL" 2>/dev/null || {
log "WARNING: Failed to send email alert"
}
}
Check last backup age
log "Checking backup freshness"
LAST_BACKUP=$(aws s3 ls "s3://$S3_BUCKET/daily/" --recursive | sort | tail -1 | awk '{print $1" "$2}')
if [ -z "$LAST_BACKUP" ]; then
send_alert "Backup Alert: No backups found" "No backups found in S3 bucket: $S3_BUCKET"
exit 1
fi
LAST_BACKUP_EPOCH=$(date -d "$LAST_BACKUP" +%s)
CURRENT_EPOCH=$(date +%s)
AGE_HOURS=$(( (CURRENT_EPOCH - LAST_BACKUP_EPOCH) / 3600 ))
if [ $AGE_HOURS -gt $MAX_AGE_HOURS ]; then
send_alert "Backup Alert: Backup too old" "Last backup is $AGE_HOURS hours old (max: $MAX_AGE_HOURS)"
fi
Check storage usage
TOTAL_SIZE=$(aws s3 ls "s3://$S3_BUCKET" --recursive --summarize | grep "Total Size:" | awk '{print $3}')
TOTAL_FILES=$(aws s3 ls "s3://$S3_BUCKET" --recursive --summarize | grep "Total Objects:" | awk '{print $3}')
log "Storage usage: $TOTAL_FILES files, $(numfmt --to=iec $TOTAL_SIZE)"
Check recent backup logs for errors
if [ -f "$LOG_DIR/backup-$(date +%Y%m%d).log" ]; then
ERROR_COUNT=$(grep -c "ERROR" "$LOG_DIR/backup-$(date +%Y%m%d).log" || true)
if [ $ERROR_COUNT -gt 0 ]; then
ERRORS=$(grep "ERROR" "$LOG_DIR/backup-$(date +%Y%m%d).log")
send_alert "Backup Alert: Errors detected" "Found $ERROR_COUNT errors in today's backup log:\n$ERRORS"
fi
fi
log "Monitoring completed - Age: ${AGE_HOURS}h, Files: $TOTAL_FILES"
Install mail utilities for alerts
Install mail utilities to enable email notifications for backup monitoring and alerts.
sudo apt install -y mailutils
sudo dpkg-reconfigure exim4-config
Set up automated backup schedule
Configure cron jobs to run backups automatically with proper scheduling for daily, weekly, and monitoring tasks.
sudo -u backup-user crontab -e
Add these lines to the crontab:
# Daily backup at 2 AM
0 2 * /opt/backup/scripts/backup.sh >> /opt/backup/logs/cron.log 2>&1
Weekly backup on Sunday at 3 AM (modify script for weekly/)
0 3 0 sed 's/daily\//weekly\//g' /opt/backup/scripts/backup.sh | bash >> /opt/backup/logs/cron-weekly.log 2>&1
Monitor backups every 6 hours
0 /6 /opt/backup/scripts/monitor.sh >> /opt/backup/logs/monitor.log 2>&1
Clean old logs monthly
0 4 1 find /opt/backup/logs -name "*.log" -mtime +30 -delete
Configure log rotation
Set up log rotation to prevent backup logs from consuming excessive disk space over time.
/opt/backup/logs/*.log {
daily
rotate 30
compress
delaycompress
missingok
notifempty
create 640 backup-user backup-user
postrotate
/usr/bin/systemctl reload rsyslog > /dev/null 2>&1 || true
endscript
}
Configure client-side encryption options
Set up asymmetric encryption
For enhanced security, configure asymmetric GPG encryption using your public key for encryption and private key for decryption.
# Export public key for encryption
sudo -u backup-user gpg --armor --export backup@example.com > /opt/backup/keys/backup-public.asc
Export private key for secure storage (password protected)
sudo -u backup-user gpg --armor --export-secret-keys backup@example.com > /opt/backup/keys/backup-private.asc
Set restrictive permissions on private key
sudo chmod 600 /opt/backup/keys/backup-private.asc
Create advanced backup script with asymmetric encryption
Modify the backup script to use asymmetric encryption for better key management and security.
#!/bin/bash
set -euo pipefail
Asymmetric encryption version
BACKUP_NAME="$(hostname)-$(date +%Y%m%d-%H%M%S)"
SOURCE_DIR="/etc /home /var/log"
TEMP_DIR="/opt/backup/temp"
LOG_FILE="/opt/backup/logs/backup-$(date +%Y%m%d).log"
S3_BUCKET="my-backups"
GPG_RECIPIENT="backup@example.com"
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a "$LOG_FILE"
}
Create and encrypt archive with public key
log "Creating encrypted archive with public key"
tar -czf - $SOURCE_DIR 2>>"$LOG_FILE" | \
gpg --trust-model always --encrypt --recipient "$GPG_RECIPIENT" \
--cipher-algo AES256 --compress-algo 2 \
--output "$TEMP_DIR/$BACKUP_NAME.tar.gz.gpg" || {
log "ERROR: Encryption failed"
exit 1
}
Continue with upload process...
sha256sum "$TEMP_DIR/$BACKUP_NAME.tar.gz.gpg" > "$TEMP_DIR/$BACKUP_NAME.sha256"
aws s3 cp "$TEMP_DIR/$BACKUP_NAME.tar.gz.gpg" \
"s3://$S3_BUCKET/daily/$BACKUP_NAME.tar.gz.gpg" \
--storage-class STANDARD
log "Asymmetric encrypted backup completed: $BACKUP_NAME"
Verify your setup
Test your backup system to ensure all components work correctly before relying on it for critical data.
# Test MinIO client connection
sudo -u backup-user mc ls s3backup/my-backups
Test AWS CLI connection
sudo -u backup-user aws s3 ls s3://my-backups
Verify GPG encryption works
echo "test" | sudo -u backup-user gpg --encrypt --recipient backup@example.com | sudo -u backup-user gpg --decrypt
Run a test backup
sudo -u backup-user /opt/backup/scripts/backup.sh
Verify backup was uploaded
sudo -u backup-user aws s3 ls s3://my-backups/daily/ --human-readable
Test monitoring script
sudo -u backup-user /opt/backup/scripts/monitor.sh
Verify cron is configured
sudo -u backup-user crontab -l
Common issues
| Symptom | Cause | Fix |
|---|---|---|
| GPG encryption fails | Missing passphrase or key | Verify GPG key exists: gpg --list-keys backup@example.com |
| S3 upload permission denied | Incorrect AWS credentials or bucket policy | Check credentials: aws sts get-caller-identity |
| Backup script fails silently | Missing execute permissions or dependencies | Check permissions: ls -la /opt/backup/scripts/ |
| Cron jobs not running | Cron service disabled or syntax error | Check cron status: systemctl status cron and logs |
| Email alerts not working | Mail service not configured | Test mail: echo "test" | mail -s "test" admin@example.com |
| Large backup timeouts | S3 multipart upload needed | Configure AWS CLI: aws configure set default.s3.multipart_threshold 64MB |
| Restore fails with wrong passphrase | GPG passphrase mismatch | Verify with test decryption before production use |
| Backup size unexpectedly large | Including temporary or cache files | Add exclusions to tar: --exclude='/tmp' --exclude='/var/cache' |
Next steps
- Configure backup encryption with GPG and rsync for secure automated backups
- Set up MySQL backup encryption and remote storage with rsync
- Setup S3-compatible disaster recovery with cross-region replication
- Configure backup monitoring with Prometheus and Grafana dashboards
- Implement backup encryption key rotation and secure management
Automated install script
Run this to automate the entire setup
#!/usr/bin/env bash
set -euo pipefail
# Colors for output
readonly RED='\033[0;31m'
readonly GREEN='\033[0;32m'
readonly YELLOW='\033[1;33m'
readonly NC='\033[0m'
# Global variables
BACKUP_USER="backup-user"
BACKUP_HOME="/opt/backup"
TOTAL_STEPS=7
# Logging functions
log_info() { echo -e "${GREEN}[INFO]${NC} $1"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
# Cleanup on failure
cleanup() {
log_error "Installation failed. Cleaning up..."
sudo userdel -r "$BACKUP_USER" 2>/dev/null || true
sudo rm -f /usr/local/bin/mc 2>/dev/null || true
sudo rm -rf /usr/local/aws-cli 2>/dev/null || true
sudo rm -f /usr/local/bin/aws 2>/dev/null || true
sudo rm -rf "$BACKUP_HOME" 2>/dev/null || true
exit 1
}
trap cleanup ERR
# Check prerequisites
check_prerequisites() {
if [[ $EUID -ne 0 ]]; then
log_error "This script must be run as root or with sudo"
exit 1
fi
if ! command -v curl &> /dev/null; then
log_error "curl is required but not installed"
exit 1
fi
}
# Detect distribution
detect_distro() {
if [ -f /etc/os-release ]; then
. /etc/os-release
case "$ID" in
ubuntu|debian)
PKG_MGR="apt"
PKG_INSTALL="apt install -y"
PKG_UPDATE="apt update && apt upgrade -y"
;;
almalinux|rocky|centos|rhel|ol|fedora)
PKG_MGR="dnf"
PKG_INSTALL="dnf install -y"
PKG_UPDATE="dnf update -y"
;;
amzn)
PKG_MGR="yum"
PKG_INSTALL="yum install -y"
PKG_UPDATE="yum update -y"
;;
*)
log_error "Unsupported distribution: $ID"
exit 1
;;
esac
else
log_error "Cannot detect distribution. /etc/os-release not found."
exit 1
fi
log_info "Detected distribution: $ID (Package manager: $PKG_MGR)"
}
# Update system packages
update_system() {
echo "[1/$TOTAL_STEPS] Updating system packages..."
$PKG_UPDATE
echo "Installing base dependencies..."
if [[ "$PKG_MGR" == "apt" ]]; then
$PKG_INSTALL curl wget gnupg2 cron rsync unzip
else
$PKG_INSTALL curl wget gnupg2 cronie rsync unzip
# Ensure cronie is enabled on RHEL-based systems
systemctl enable --now crond 2>/dev/null || true
fi
log_info "System packages updated successfully"
}
# Install MinIO client
install_minio_client() {
echo "[2/$TOTAL_STEPS] Installing MinIO client..."
local temp_dir=$(mktemp -d)
cd "$temp_dir"
curl -LO https://dl.min.io/client/mc/release/linux-amd64/mc
chmod 755 mc
mv mc /usr/local/bin/
# Verify installation
if /usr/local/bin/mc --version &> /dev/null; then
log_info "MinIO client installed successfully"
else
log_error "MinIO client installation failed"
exit 1
fi
cd - > /dev/null
rm -rf "$temp_dir"
}
# Install AWS CLI v2
install_aws_cli() {
echo "[3/$TOTAL_STEPS] Installing AWS CLI v2..."
local temp_dir=$(mktemp -d)
cd "$temp_dir"
curl -LO "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip"
unzip -q awscli-exe-linux-x86_64.zip
./aws/install
# Verify installation
if aws --version &> /dev/null; then
log_info "AWS CLI v2 installed successfully"
else
log_error "AWS CLI v2 installation failed"
exit 1
fi
cd - > /dev/null
rm -rf "$temp_dir"
}
# Create backup user and directories
create_backup_user() {
echo "[4/$TOTAL_STEPS] Creating backup user and directories..."
# Create backup user
if ! id "$BACKUP_USER" &>/dev/null; then
useradd -r -s /bin/bash -d "$BACKUP_HOME" -m "$BACKUP_USER"
log_info "Created backup user: $BACKUP_USER"
else
log_warn "User $BACKUP_USER already exists"
fi
# Create directory structure
mkdir -p "$BACKUP_HOME"/{scripts,logs,temp,keys}
chown -R "$BACKUP_USER:$BACKUP_USER" "$BACKUP_HOME"
chmod 750 "$BACKUP_HOME"
chmod 700 "$BACKUP_HOME/keys"
chmod 755 "$BACKUP_HOME"/{scripts,logs,temp}
log_info "Backup directories created with proper permissions"
}
# Generate GPG encryption keys
generate_gpg_keys() {
echo "[5/$TOTAL_STEPS] Generating GPG encryption keys..."
local gpg_batch_file="$BACKUP_HOME/keys/gpg_batch"
cat > "$gpg_batch_file" << 'EOF'
%no-protection
Key-Type: RSA
Key-Length: 4096
Subkey-Type: RSA
Subkey-Length: 4096
Name-Real: Backup System
Name-Email: backup@localhost
Expire-Date: 0
%commit
EOF
chown "$BACKUP_USER:$BACKUP_USER" "$gpg_batch_file"
chmod 600 "$gpg_batch_file"
# Generate keys as backup user
sudo -u "$BACKUP_USER" gpg --batch --gen-key --pinentry-mode loopback "$gpg_batch_file"
# Export public key for reference
sudo -u "$BACKUP_USER" gpg --armor --export backup@localhost > "$BACKUP_HOME/keys/backup_public.asc"
chown "$BACKUP_USER:$BACKUP_USER" "$BACKUP_HOME/keys/backup_public.asc"
chmod 644 "$BACKUP_HOME/keys/backup_public.asc"
rm -f "$gpg_batch_file"
log_info "GPG encryption keys generated successfully"
}
# Create basic backup scripts
create_backup_scripts() {
echo "[6/$TOTAL_STEPS] Creating basic backup scripts..."
# Create a sample backup script
cat > "$BACKUP_HOME/scripts/backup_example.sh" << 'EOF'
#!/bin/bash
# Example backup script - customize as needed
set -euo pipefail
BACKUP_SOURCE="/path/to/backup"
BACKUP_NAME="$(hostname)-$(date +%Y%m%d_%H%M%S)"
TEMP_DIR="/opt/backup/temp"
LOG_FILE="/opt/backup/logs/backup_$(date +%Y%m%d).log"
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a "$LOG_FILE"
}
# Create encrypted backup
log "Starting backup: $BACKUP_NAME"
tar -czf "$TEMP_DIR/${BACKUP_NAME}.tar.gz" -C "$(dirname "$BACKUP_SOURCE")" "$(basename "$BACKUP_SOURCE")"
gpg --trust-model always --encrypt --recipient backup@localhost --output "$TEMP_DIR/${BACKUP_NAME}.tar.gz.gpg" "$TEMP_DIR/${BACKUP_NAME}.tar.gz"
# Upload to S3 (configure mc/aws first)
# mc cp "$TEMP_DIR/${BACKUP_NAME}.tar.gz.gpg" myminio/backups/
# aws s3 cp "$TEMP_DIR/${BACKUP_NAME}.tar.gz.gpg" s3://my-backup-bucket/
# Cleanup
rm -f "$TEMP_DIR/${BACKUP_NAME}.tar.gz" "$TEMP_DIR/${BACKUP_NAME}.tar.gz.gpg"
log "Backup completed: $BACKUP_NAME"
EOF
chown "$BACKUP_USER:$BACKUP_USER" "$BACKUP_HOME/scripts/backup_example.sh"
chmod 750 "$BACKUP_HOME/scripts/backup_example.sh"
log_info "Basic backup scripts created"
}
# Perform verification checks
verify_installation() {
echo "[7/$TOTAL_STEPS] Verifying installation..."
local errors=0
# Check MinIO client
if ! /usr/local/bin/mc --version &> /dev/null; then
log_error "MinIO client verification failed"
((errors++))
fi
# Check AWS CLI
if ! aws --version &> /dev/null; then
log_error "AWS CLI verification failed"
((errors++))
fi
# Check backup user
if ! id "$BACKUP_USER" &>/dev/null; then
log_error "Backup user verification failed"
((errors++))
fi
# Check directories
for dir in scripts logs temp keys; do
if [[ ! -d "$BACKUP_HOME/$dir" ]]; then
log_error "Directory $BACKUP_HOME/$dir not found"
((errors++))
fi
done
# Check GPG keys
if ! sudo -u "$BACKUP_USER" gpg --list-secret-keys backup@localhost &> /dev/null; then
log_error "GPG key verification failed"
((errors++))
fi
if [[ $errors -eq 0 ]]; then
log_info "All verification checks passed"
return 0
else
log_error "$errors verification checks failed"
return 1
fi
}
# Main installation function
main() {
log_info "Starting S3-compatible backup storage setup"
check_prerequisites
detect_distro
update_system
install_minio_client
install_aws_cli
create_backup_user
generate_gpg_keys
create_backup_scripts
if verify_installation; then
log_info "Installation completed successfully!"
echo
echo "Next steps:"
echo "1. Configure MinIO client: sudo -u $BACKUP_USER mc alias set myminio https://your-s3-endpoint ACCESS_KEY SECRET_KEY"
echo "2. Configure AWS CLI: sudo -u $BACKUP_USER aws configure"
echo "3. Customize backup scripts in $BACKUP_HOME/scripts/"
echo "4. Set up cron jobs for automated backups"
else
log_error "Installation completed with errors. Please review the output above."
exit 1
fi
}
main "$@"
Review the script before running. Execute with: bash install.sh