904 lines
28 KiB
Bash
904 lines
28 KiB
Bash
#!/bin/bash
|
|
# Off-site Backup Storage System
|
|
# Enterprise-grade off-site backup with cloud integration and automated sync
|
|
|
|
# Import error handling library
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
source "$SCRIPT_DIR/lib/error_handling.sh"
|
|
|
|
# Configuration
|
|
readonly OFFSITE_CONFIG_DIR="/opt/migration/configs/offsite"
|
|
readonly RCLONE_CONFIG_DIR="/root/.config/rclone"
|
|
readonly BACKUP_SYNC_DIR="/opt/migration/offsite_sync"
|
|
readonly OFFSITE_LOG_DIR="/var/log/offsite_backup"
|
|
|
|
# Cloud providers configuration
|
|
declare -A CLOUD_PROVIDERS=(
|
|
["aws_s3"]="enabled:true,bucket:homelab-backups-$(date +%Y),region:us-east-1,storage_class:GLACIER_IR"
|
|
["google_drive"]="enabled:true,folder:HomeLabBackups,service_account:true"
|
|
["backblaze_b2"]="enabled:true,bucket:homelab-backups,application_key_id:from_env"
|
|
["rsync_net"]="enabled:false,server:rsync.net,path:/backup/homelab"
|
|
)
|
|
|
|
# Backup sync policies
|
|
declare -A SYNC_POLICIES=(
|
|
["critical"]="frequency:daily,retention:365d,encryption:required,compression:high"
|
|
["important"]="frequency:weekly,retention:90d,encryption:required,compression:medium"
|
|
["standard"]="frequency:monthly,retention:30d,encryption:optional,compression:low"
|
|
)
|
|
|
|
# Cleanup function
|
|
cleanup_offsite_backup() {
|
|
log_info "Cleaning up off-site backup temporary files..."
|
|
|
|
# Clean up temporary sync files
|
|
find /tmp -name "rclone_*.tmp" -mmin +120 -delete 2>/dev/null || true
|
|
find /tmp -name "offsite_*.tmp" -mmin +120 -delete 2>/dev/null || true
|
|
|
|
# Clean up lock files
|
|
rm -f /tmp/offsite_backup_*.lock 2>/dev/null || true
|
|
|
|
log_info "Off-site backup cleanup completed"
|
|
}
|
|
|
|
# Rollback function
|
|
rollback_offsite_backup() {
|
|
log_info "Rolling back off-site backup configuration..."
|
|
|
|
# Stop any running sync processes
|
|
pkill -f "rclone.*sync" 2>/dev/null || true
|
|
pkill -f "offsite_backup" 2>/dev/null || true
|
|
|
|
cleanup_offsite_backup
|
|
log_info "Off-site backup rollback completed"
|
|
}
|
|
|
|
# Function to setup off-site backup infrastructure
|
|
setup_offsite_infrastructure() {
|
|
log_step "Setting up off-site backup infrastructure..."
|
|
|
|
# Create directory structure
|
|
local directories=(
|
|
"$OFFSITE_CONFIG_DIR"
|
|
"$RCLONE_CONFIG_DIR"
|
|
"$BACKUP_SYNC_DIR"
|
|
"$OFFSITE_LOG_DIR"
|
|
"$BACKUP_SYNC_DIR/pending"
|
|
"$BACKUP_SYNC_DIR/synced"
|
|
"$BACKUP_SYNC_DIR/failed"
|
|
)
|
|
|
|
for dir in "${directories[@]}"; do
|
|
mkdir -p "$dir"
|
|
chmod 750 "$dir"
|
|
done
|
|
|
|
# Install required tools
|
|
install_backup_tools
|
|
|
|
# Setup cloud provider configurations
|
|
setup_cloud_providers
|
|
|
|
# Create sync policies
|
|
create_sync_policies
|
|
|
|
log_success "Off-site backup infrastructure setup completed"
|
|
}
|
|
|
|
# Function to install backup tools
|
|
install_backup_tools() {
|
|
log_step "Installing off-site backup tools..."
|
|
|
|
# Install rclone if not present
|
|
if ! command -v rclone >/dev/null 2>&1; then
|
|
log_info "Installing rclone..."
|
|
curl https://rclone.org/install.sh | bash
|
|
|
|
if command -v rclone >/dev/null 2>&1; then
|
|
log_success "rclone installed successfully"
|
|
else
|
|
log_error "Failed to install rclone"
|
|
return 1
|
|
fi
|
|
fi
|
|
|
|
# Install additional backup utilities
|
|
local tools=("age" "restic" "duplicity" "gpg" "curl" "aws-cli" "google-cloud-sdk-gke-gcloud-auth-plugin")
|
|
|
|
for tool in "${tools[@]}"; do
|
|
if ! command -v "${tool%%-*}" >/dev/null 2>&1; then
|
|
log_info "Installing $tool..."
|
|
case "$tool" in
|
|
"age")
|
|
# Install age encryption tool
|
|
curl -L https://github.com/FiloSottile/age/releases/latest/download/age-linux-amd64.tar.gz | tar xz -C /tmp
|
|
sudo mv /tmp/age/age* /usr/local/bin/
|
|
;;
|
|
"restic")
|
|
# Install restic backup tool
|
|
restic_version=$(curl -s https://api.github.com/repos/restic/restic/releases/latest | grep '"tag_name"' | cut -d'"' -f4)
|
|
curl -L "https://github.com/restic/restic/releases/latest/download/restic_${restic_version#v}_linux_amd64.bz2" | bunzip2 > /tmp/restic
|
|
chmod +x /tmp/restic && sudo mv /tmp/restic /usr/local/bin/
|
|
;;
|
|
"aws-cli")
|
|
# Install AWS CLI
|
|
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "/tmp/awscliv2.zip"
|
|
unzip -q /tmp/awscliv2.zip -d /tmp && sudo /tmp/aws/install
|
|
;;
|
|
*)
|
|
apt-get update && apt-get install -y "$tool" 2>/dev/null || log_warn "Could not install $tool"
|
|
;;
|
|
esac
|
|
fi
|
|
done
|
|
|
|
log_success "Backup tools installation completed"
|
|
}
|
|
|
|
# Function to setup cloud provider configurations
|
|
setup_cloud_providers() {
|
|
log_step "Setting up cloud provider configurations..."
|
|
|
|
# Create rclone configuration template
|
|
cat > "$OFFSITE_CONFIG_DIR/rclone_template.conf" << 'EOF'
|
|
# Rclone Configuration Template for Off-site Backups
|
|
# Customize with actual credentials
|
|
|
|
[aws-s3-glacier]
|
|
type = s3
|
|
provider = AWS
|
|
access_key_id = YOUR_AWS_ACCESS_KEY
|
|
secret_access_key = YOUR_AWS_SECRET_KEY
|
|
region = us-east-1
|
|
storage_class = GLACIER_IR
|
|
server_side_encryption = AES256
|
|
|
|
[google-drive-backup]
|
|
type = drive
|
|
client_id = YOUR_GOOGLE_CLIENT_ID
|
|
client_secret = YOUR_GOOGLE_CLIENT_SECRET
|
|
token = YOUR_GOOGLE_TOKEN
|
|
root_folder_id = YOUR_BACKUP_FOLDER_ID
|
|
|
|
[backblaze-b2]
|
|
type = b2
|
|
account = YOUR_B2_ACCOUNT_ID
|
|
key = YOUR_B2_APPLICATION_KEY
|
|
hard_delete = true
|
|
|
|
[rsync-net]
|
|
type = sftp
|
|
host = rsync.net
|
|
user = YOUR_RSYNC_USERNAME
|
|
key_file = ~/.ssh/rsync_net_key
|
|
use_insecure_cipher = false
|
|
disable_hashcheck = false
|
|
|
|
[local-encrypted]
|
|
type = crypt
|
|
remote = /opt/migration/backups
|
|
filename_encryption = standard
|
|
directory_name_encryption = true
|
|
password = YOUR_ENCRYPTION_PASSWORD
|
|
password2 = YOUR_SALT_PASSWORD
|
|
EOF
|
|
|
|
# Create AWS S3 configuration
|
|
create_aws_s3_config
|
|
|
|
# Create Google Drive configuration
|
|
create_google_drive_config
|
|
|
|
# Create Backblaze B2 configuration
|
|
create_backblaze_config
|
|
|
|
# Create encrypted local storage configuration
|
|
create_encrypted_storage_config
|
|
|
|
log_success "Cloud provider configurations created"
|
|
}
|
|
|
|
# Function to create AWS S3 configuration
|
|
create_aws_s3_config() {
|
|
log_info "Creating AWS S3 Glacier configuration..."
|
|
|
|
cat > "$OFFSITE_CONFIG_DIR/aws_s3_setup.sh" << 'EOF'
|
|
#!/bin/bash
|
|
# AWS S3 Glacier Setup Script
|
|
|
|
# Set AWS credentials (use environment variables or AWS CLI configure)
|
|
export AWS_ACCESS_KEY_ID="${AWS_ACCESS_KEY_ID:-}"
|
|
export AWS_SECRET_ACCESS_KEY="${AWS_SECRET_ACCESS_KEY:-}"
|
|
export AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION:-us-east-1}"
|
|
|
|
# Create S3 bucket with proper configuration
|
|
BUCKET_NAME="homelab-backups-$(date +%Y)"
|
|
|
|
# Check if bucket exists
|
|
if ! aws s3 ls "s3://$BUCKET_NAME" 2>/dev/null; then
|
|
echo "Creating S3 bucket: $BUCKET_NAME"
|
|
aws s3 mb "s3://$BUCKET_NAME" --region "$AWS_DEFAULT_REGION"
|
|
|
|
# Configure bucket for backup use
|
|
aws s3api put-bucket-encryption \
|
|
--bucket "$BUCKET_NAME" \
|
|
--server-side-encryption-configuration '{
|
|
"Rules": [{
|
|
"ApplyServerSideEncryptionByDefault": {
|
|
"SSEAlgorithm": "AES256"
|
|
}
|
|
}]
|
|
}'
|
|
|
|
# Configure lifecycle policy for cost optimization
|
|
aws s3api put-bucket-lifecycle-configuration \
|
|
--bucket "$BUCKET_NAME" \
|
|
--lifecycle-configuration '{
|
|
"Rules": [{
|
|
"ID": "BackupRetentionPolicy",
|
|
"Status": "Enabled",
|
|
"Filter": {"Prefix": "homelab/"},
|
|
"Transitions": [
|
|
{
|
|
"Days": 30,
|
|
"StorageClass": "STANDARD_IA"
|
|
},
|
|
{
|
|
"Days": 90,
|
|
"StorageClass": "GLACIER"
|
|
},
|
|
{
|
|
"Days": 365,
|
|
"StorageClass": "DEEP_ARCHIVE"
|
|
}
|
|
],
|
|
"Expiration": {
|
|
"Days": 2555 # 7 years
|
|
}
|
|
}]
|
|
}'
|
|
|
|
# Enable versioning for backup protection
|
|
aws s3api put-bucket-versioning \
|
|
--bucket "$BUCKET_NAME" \
|
|
--versioning-configuration Status=Enabled
|
|
|
|
echo "S3 bucket $BUCKET_NAME configured successfully"
|
|
else
|
|
echo "S3 bucket $BUCKET_NAME already exists"
|
|
fi
|
|
|
|
# Configure rclone for S3
|
|
if [[ -n "$AWS_ACCESS_KEY_ID" ]] && [[ -n "$AWS_SECRET_ACCESS_KEY" ]]; then
|
|
rclone config create aws-s3-glacier s3 \
|
|
provider=AWS \
|
|
access_key_id="$AWS_ACCESS_KEY_ID" \
|
|
secret_access_key="$AWS_SECRET_ACCESS_KEY" \
|
|
region="$AWS_DEFAULT_REGION" \
|
|
storage_class=GLACIER_IR \
|
|
server_side_encryption=AES256
|
|
|
|
echo "Rclone AWS S3 configuration completed"
|
|
else
|
|
echo "AWS credentials not set - manual configuration required"
|
|
fi
|
|
EOF
|
|
|
|
chmod +x "$OFFSITE_CONFIG_DIR/aws_s3_setup.sh"
|
|
log_success "AWS S3 configuration script created"
|
|
}
|
|
|
|
# Function to create Google Drive configuration
|
|
create_google_drive_config() {
|
|
log_info "Creating Google Drive configuration..."
|
|
|
|
cat > "$OFFSITE_CONFIG_DIR/google_drive_setup.sh" << 'EOF'
|
|
#!/bin/bash
|
|
# Google Drive Setup Script
|
|
|
|
echo "Setting up Google Drive for off-site backups..."
|
|
|
|
# Create service account key directory
|
|
mkdir -p ~/.config/gcloud/
|
|
|
|
# Note: Service account JSON key should be placed at:
|
|
SERVICE_ACCOUNT_KEY="~/.config/gcloud/service-account-key.json"
|
|
|
|
if [[ -f "$SERVICE_ACCOUNT_KEY" ]]; then
|
|
echo "Configuring rclone for Google Drive with service account..."
|
|
|
|
rclone config create google-drive-backup drive \
|
|
service_account_file="$SERVICE_ACCOUNT_KEY" \
|
|
team_drive="" \
|
|
root_folder_id=""
|
|
|
|
# Test connection
|
|
if rclone lsd google-drive-backup: >/dev/null 2>&1; then
|
|
echo "Google Drive configuration successful"
|
|
|
|
# Create backup folder structure
|
|
rclone mkdir google-drive-backup:HomeLabBackups/daily
|
|
rclone mkdir google-drive-backup:HomeLabBackups/weekly
|
|
rclone mkdir google-drive-backup:HomeLabBackups/monthly
|
|
rclone mkdir google-drive-backup:HomeLabBackups/critical
|
|
|
|
echo "Google Drive backup folders created"
|
|
else
|
|
echo "Google Drive configuration failed - check service account key"
|
|
fi
|
|
else
|
|
echo "Google Drive service account key not found at $SERVICE_ACCOUNT_KEY"
|
|
echo "Please obtain a service account key from Google Cloud Console"
|
|
echo "and place it at $SERVICE_ACCOUNT_KEY"
|
|
fi
|
|
EOF
|
|
|
|
chmod +x "$OFFSITE_CONFIG_DIR/google_drive_setup.sh"
|
|
log_success "Google Drive configuration script created"
|
|
}
|
|
|
|
# Function to create Backblaze B2 configuration
|
|
create_backblaze_config() {
|
|
log_info "Creating Backblaze B2 configuration..."
|
|
|
|
cat > "$OFFSITE_CONFIG_DIR/backblaze_setup.sh" << 'EOF'
|
|
#!/bin/bash
|
|
# Backblaze B2 Setup Script
|
|
|
|
echo "Setting up Backblaze B2 for off-site backups..."
|
|
|
|
# B2 credentials should be set as environment variables
|
|
B2_ACCOUNT_ID="${B2_ACCOUNT_ID:-}"
|
|
B2_APPLICATION_KEY="${B2_APPLICATION_KEY:-}"
|
|
BUCKET_NAME="homelab-backups-$(date +%Y)"
|
|
|
|
if [[ -n "$B2_ACCOUNT_ID" ]] && [[ -n "$B2_APPLICATION_KEY" ]]; then
|
|
echo "Configuring rclone for Backblaze B2..."
|
|
|
|
rclone config create backblaze-b2 b2 \
|
|
account="$B2_ACCOUNT_ID" \
|
|
key="$B2_APPLICATION_KEY" \
|
|
hard_delete=true
|
|
|
|
# Test connection and create bucket
|
|
if rclone lsd backblaze-b2: >/dev/null 2>&1; then
|
|
echo "Backblaze B2 configuration successful"
|
|
|
|
# Create bucket if it doesn't exist
|
|
if ! rclone lsd "backblaze-b2:" | grep -q "$BUCKET_NAME"; then
|
|
rclone mkdir "backblaze-b2:$BUCKET_NAME"
|
|
echo "Created bucket: $BUCKET_NAME"
|
|
fi
|
|
|
|
# Create folder structure
|
|
rclone mkdir "backblaze-b2:$BUCKET_NAME/daily"
|
|
rclone mkdir "backblaze-b2:$BUCKET_NAME/weekly"
|
|
rclone mkdir "backblaze-b2:$BUCKET_NAME/monthly"
|
|
rclone mkdir "backblaze-b2:$BUCKET_NAME/critical"
|
|
|
|
echo "Backblaze B2 backup folders created"
|
|
else
|
|
echo "Backblaze B2 configuration failed - check credentials"
|
|
fi
|
|
else
|
|
echo "Backblaze B2 credentials not set"
|
|
echo "Please set B2_ACCOUNT_ID and B2_APPLICATION_KEY environment variables"
|
|
fi
|
|
EOF
|
|
|
|
chmod +x "$OFFSITE_CONFIG_DIR/backblaze_setup.sh"
|
|
log_success "Backblaze B2 configuration script created"
|
|
}
|
|
|
|
# Function to create encrypted storage configuration
|
|
create_encrypted_storage_config() {
|
|
log_info "Creating encrypted storage configuration..."
|
|
|
|
# Generate encryption keys for different security levels
|
|
local encryption_dir="/opt/migration/secrets/offsite"
|
|
mkdir -p "$encryption_dir"
|
|
chmod 700 "$encryption_dir"
|
|
|
|
# Generate strong encryption keys
|
|
if [[ ! -f "$encryption_dir/offsite_encryption_key" ]]; then
|
|
# Generate primary encryption key
|
|
age-keygen > "$encryption_dir/offsite_encryption_key"
|
|
chmod 600 "$encryption_dir/offsite_encryption_key"
|
|
|
|
# Generate backup encryption passwords
|
|
openssl rand -base64 32 > "$encryption_dir/backup_password_primary"
|
|
openssl rand -base64 32 > "$encryption_dir/backup_password_secondary"
|
|
chmod 600 "$encryption_dir"/backup_password_*
|
|
|
|
log_success "Encryption keys generated"
|
|
fi
|
|
|
|
# Create encrypted backup wrapper script
|
|
cat > "$OFFSITE_CONFIG_DIR/encrypted_backup.sh" << 'EOF'
|
|
#!/bin/bash
|
|
# Encrypted Backup Wrapper
|
|
|
|
set -euo pipefail
|
|
|
|
ENCRYPTION_KEY="/opt/migration/secrets/offsite/offsite_encryption_key"
|
|
BACKUP_SOURCE="${1:-/opt/migration/backups}"
|
|
BACKUP_DESTINATION="${2:-/opt/migration/encrypted_backups}"
|
|
BACKUP_NAME="${3:-backup_$(date +%Y%m%d_%H%M%S)}"
|
|
|
|
if [[ ! -f "$ENCRYPTION_KEY" ]]; then
|
|
echo "Error: Encryption key not found: $ENCRYPTION_KEY"
|
|
exit 1
|
|
fi
|
|
|
|
echo "Creating encrypted backup: $BACKUP_NAME"
|
|
|
|
# Create encrypted archive
|
|
mkdir -p "$BACKUP_DESTINATION"
|
|
|
|
# Use age for encryption with compression
|
|
tar -czf - -C "$BACKUP_SOURCE" . | \
|
|
age -r "$(cat "$ENCRYPTION_KEY" | grep public | cut -d' ' -f4)" \
|
|
> "$BACKUP_DESTINATION/${BACKUP_NAME}.tar.gz.age"
|
|
|
|
# Verify the encrypted file
|
|
if age -d -i "$ENCRYPTION_KEY" "$BACKUP_DESTINATION/${BACKUP_NAME}.tar.gz.age" | tar -tzf - >/dev/null 2>&1; then
|
|
echo "Encrypted backup verified successfully"
|
|
|
|
# Generate checksum
|
|
sha256sum "$BACKUP_DESTINATION/${BACKUP_NAME}.tar.gz.age" > "$BACKUP_DESTINATION/${BACKUP_NAME}.sha256"
|
|
|
|
echo "Backup created: $BACKUP_DESTINATION/${BACKUP_NAME}.tar.gz.age"
|
|
echo "Size: $(du -h "$BACKUP_DESTINATION/${BACKUP_NAME}.tar.gz.age" | cut -f1)"
|
|
else
|
|
echo "Error: Encrypted backup verification failed"
|
|
rm -f "$BACKUP_DESTINATION/${BACKUP_NAME}.tar.gz.age"
|
|
exit 1
|
|
fi
|
|
EOF
|
|
|
|
chmod +x "$OFFSITE_CONFIG_DIR/encrypted_backup.sh"
|
|
log_success "Encrypted storage configuration created"
|
|
}
|
|
|
|
# Function to create sync policies
|
|
create_sync_policies() {
|
|
log_step "Creating backup sync policies..."
|
|
|
|
cat > "$OFFSITE_CONFIG_DIR/sync_policies.yml" << 'EOF'
|
|
# Off-site Backup Sync Policies
|
|
# Defines how different types of backups are synced to off-site storage
|
|
|
|
version: "1.0"
|
|
|
|
policies:
|
|
critical:
|
|
description: "Critical system backups - highest priority"
|
|
frequency: "daily"
|
|
retention: "365d"
|
|
encryption: "required"
|
|
compression: "high"
|
|
verification: "mandatory"
|
|
destinations:
|
|
primary: "aws-s3-glacier"
|
|
secondary: "backblaze-b2"
|
|
tertiary: "google-drive-backup"
|
|
notification:
|
|
on_success: false
|
|
on_failure: true
|
|
on_delay: true
|
|
|
|
important:
|
|
description: "Important application data"
|
|
frequency: "weekly"
|
|
retention: "90d"
|
|
encryption: "required"
|
|
compression: "medium"
|
|
verification: "recommended"
|
|
destinations:
|
|
primary: "backblaze-b2"
|
|
secondary: "aws-s3-glacier"
|
|
notification:
|
|
on_success: false
|
|
on_failure: true
|
|
on_delay: false
|
|
|
|
standard:
|
|
description: "Standard backups and archives"
|
|
frequency: "monthly"
|
|
retention: "30d"
|
|
encryption: "optional"
|
|
compression: "low"
|
|
verification: "basic"
|
|
destinations:
|
|
primary: "google-drive-backup"
|
|
notification:
|
|
on_success: false
|
|
on_failure: true
|
|
on_delay: false
|
|
|
|
backup_categories:
|
|
critical:
|
|
- "postgres_dumps"
|
|
- "docker_configs"
|
|
- "ssl_certificates"
|
|
- "secrets_backup"
|
|
- "system_configurations"
|
|
|
|
important:
|
|
- "application_data"
|
|
- "user_uploads"
|
|
- "media_metadata"
|
|
- "home_automation_configs"
|
|
- "monitoring_data"
|
|
|
|
standard:
|
|
- "log_archives"
|
|
- "temporary_backups"
|
|
- "documentation"
|
|
- "development_data"
|
|
|
|
sync_schedule:
|
|
critical: "0 2 * * *" # Daily at 2 AM
|
|
important: "0 3 * * 0" # Weekly on Sunday at 3 AM
|
|
standard: "0 4 1 * *" # Monthly on 1st at 4 AM
|
|
|
|
monitoring:
|
|
enabled: true
|
|
metrics_endpoint: "http://localhost:9999/offsite-metrics"
|
|
alert_thresholds:
|
|
sync_delay_hours: 25
|
|
failure_count: 3
|
|
storage_usage_percent: 85
|
|
EOF
|
|
|
|
log_success "Sync policies configuration created"
|
|
}
|
|
|
|
# Function to setup automated sync
|
|
setup_automated_sync() {
|
|
log_step "Setting up automated off-site backup sync..."
|
|
|
|
# Create main sync orchestrator
|
|
cat > "/opt/migration/scripts/offsite_sync_orchestrator.sh" << 'EOF'
|
|
#!/bin/bash
|
|
# Off-site Backup Sync Orchestrator
|
|
|
|
set -euo pipefail
|
|
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
source "$SCRIPT_DIR/lib/error_handling.sh"
|
|
|
|
OFFSITE_CONFIG_DIR="/opt/migration/configs/offsite"
|
|
SYNC_POLICIES="$OFFSITE_CONFIG_DIR/sync_policies.yml"
|
|
LOCAL_BACKUP_DIR="/opt/migration/backups"
|
|
SYNC_LOG_DIR="/var/log/offsite_backup"
|
|
|
|
# Create sync session log
|
|
SYNC_SESSION_ID="sync_$(date +%Y%m%d_%H%M%S)_$$"
|
|
SYNC_LOG="$SYNC_LOG_DIR/${SYNC_SESSION_ID}.log"
|
|
mkdir -p "$SYNC_LOG_DIR"
|
|
|
|
exec 6> "$SYNC_LOG"
|
|
log_info "Starting off-site sync session: $SYNC_SESSION_ID" >&6
|
|
|
|
# Function to sync category based on policy
|
|
sync_category() {
|
|
local category=$1
|
|
local policy_config=$(yq eval ".policies.$category" "$SYNC_POLICIES")
|
|
|
|
if [[ "$policy_config" == "null" ]]; then
|
|
log_error "Policy not found for category: $category" >&6
|
|
return 1
|
|
fi
|
|
|
|
local destinations=$(yq eval ".policies.$category.destinations | keys" "$SYNC_POLICIES" | grep -v "^#")
|
|
local encryption_required=$(yq eval ".policies.$category.encryption" "$SYNC_POLICIES")
|
|
local compression_level=$(yq eval ".policies.$category.compression" "$SYNC_POLICIES")
|
|
|
|
log_info "Syncing category: $category" >&6
|
|
log_info "Destinations: $destinations" >&6
|
|
|
|
# Get backup files for this category
|
|
local backup_files=()
|
|
local category_items=$(yq eval ".backup_categories.$category[]" "$SYNC_POLICIES")
|
|
|
|
while IFS= read -r item; do
|
|
# Find matching backup files
|
|
local matching_files=$(find "$LOCAL_BACKUP_DIR" -name "*$item*" -type f -mtime -1)
|
|
if [[ -n "$matching_files" ]]; then
|
|
backup_files+=($matching_files)
|
|
fi
|
|
done <<< "$category_items"
|
|
|
|
if [[ ${#backup_files[@]} -eq 0 ]]; then
|
|
log_warn "No backup files found for category: $category" >&6
|
|
return 0
|
|
fi
|
|
|
|
log_info "Found ${#backup_files[@]} backup files for $category" >&6
|
|
|
|
# Sync to each destination
|
|
while IFS= read -r dest_priority; do
|
|
local destination=$(echo "$dest_priority" | cut -d: -f1 | xargs)
|
|
|
|
if [[ "$destination" != "primary" ]] && [[ "$destination" != "secondary" ]] && [[ "$destination" != "tertiary" ]]; then
|
|
continue
|
|
fi
|
|
|
|
local dest_name=$(yq eval ".policies.$category.destinations.$destination" "$SYNC_POLICIES")
|
|
|
|
log_info "Syncing to $destination ($dest_name)" >&6
|
|
|
|
for backup_file in "${backup_files[@]}"; do
|
|
sync_file_to_destination "$backup_file" "$dest_name" "$category" "$encryption_required"
|
|
done
|
|
done <<< "$destinations"
|
|
|
|
log_success "Category $category sync completed" >&6
|
|
}
|
|
|
|
# Function to sync individual file to destination
|
|
sync_file_to_destination() {
|
|
local file_path=$1
|
|
local destination=$2
|
|
local category=$3
|
|
local encryption_required=$4
|
|
|
|
local filename=$(basename "$file_path")
|
|
local dest_path="$category/$(date +%Y/%m)"
|
|
|
|
log_info "Syncing: $filename -> $destination:$dest_path" >&6
|
|
|
|
# Encrypt file if required
|
|
local sync_file="$file_path"
|
|
if [[ "$encryption_required" == "required" ]]; then
|
|
local encrypted_file="/tmp/${filename}.encrypted"
|
|
|
|
if "$OFFSITE_CONFIG_DIR/encrypted_backup.sh" "$(dirname "$file_path")" "/tmp" "${filename}.encrypted"; then
|
|
sync_file="$encrypted_file"
|
|
log_info "File encrypted for sync: $filename" >&6
|
|
else
|
|
log_error "Failed to encrypt file: $filename" >&6
|
|
return 1
|
|
fi
|
|
fi
|
|
|
|
# Perform sync with retry logic
|
|
local sync_attempts=3
|
|
local sync_success=false
|
|
|
|
for ((attempt=1; attempt<=sync_attempts; attempt++)); do
|
|
log_info "Sync attempt $attempt/$sync_attempts for $filename" >&6
|
|
|
|
if rclone copy "$sync_file" "$destination:$dest_path" --progress --stats-one-line 2>&6; then
|
|
sync_success=true
|
|
break
|
|
else
|
|
log_warn "Sync attempt $attempt failed for $filename" >&6
|
|
sleep $((attempt * 10)) # Exponential backoff
|
|
fi
|
|
done
|
|
|
|
# Cleanup encrypted temporary file
|
|
if [[ "$sync_file" != "$file_path" ]]; then
|
|
rm -f "$sync_file"
|
|
fi
|
|
|
|
if [[ "$sync_success" == true ]]; then
|
|
log_success "Successfully synced: $filename" >&6
|
|
|
|
# Verify sync if required
|
|
verify_sync "$destination" "$dest_path/$filename" "$file_path"
|
|
else
|
|
log_error "Failed to sync after $sync_attempts attempts: $filename" >&6
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# Function to verify sync
|
|
verify_sync() {
|
|
local destination=$1
|
|
local remote_path=$2
|
|
local local_file=$3
|
|
|
|
# Get remote file size and local file size
|
|
local remote_size=$(rclone size "$destination:$remote_path" --json 2>/dev/null | jq -r '.bytes // 0')
|
|
local local_size=$(stat -c%s "$local_file" 2>/dev/null || echo "0")
|
|
|
|
if [[ "$remote_size" == "$local_size" ]] && [[ "$remote_size" != "0" ]]; then
|
|
log_info "Sync verification passed: $remote_path" >&6
|
|
return 0
|
|
else
|
|
log_error "Sync verification failed: $remote_path (remote: $remote_size, local: $local_size)" >&6
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# Main sync execution
|
|
main() {
|
|
local sync_category=${1:-"all"}
|
|
|
|
log_info "Off-site backup sync started for: $sync_category" >&6
|
|
|
|
case "$sync_category" in
|
|
"critical")
|
|
sync_category "critical"
|
|
;;
|
|
"important")
|
|
sync_category "important"
|
|
;;
|
|
"standard")
|
|
sync_category "standard"
|
|
;;
|
|
"all")
|
|
sync_category "critical"
|
|
sync_category "important"
|
|
sync_category "standard"
|
|
;;
|
|
*)
|
|
log_error "Unknown sync category: $sync_category" >&6
|
|
exit 1
|
|
;;
|
|
esac
|
|
|
|
log_success "Off-site backup sync completed: $sync_category" >&6
|
|
exec 6>&-
|
|
}
|
|
|
|
# Execute main function
|
|
main "$@"
|
|
EOF
|
|
|
|
chmod +x "/opt/migration/scripts/offsite_sync_orchestrator.sh"
|
|
|
|
# Create systemd services for automated sync
|
|
create_sync_systemd_services
|
|
|
|
log_success "Automated sync setup completed"
|
|
}
|
|
|
|
# Function to create systemd services for sync scheduling
|
|
create_sync_systemd_services() {
|
|
log_info "Creating systemd services for sync scheduling..."
|
|
|
|
# Critical backup sync service
|
|
cat > "/tmp/offsite-sync-critical.service" << 'EOF'
|
|
[Unit]
|
|
Description=Off-site Critical Backup Sync
|
|
After=network-online.target
|
|
Wants=network-online.target
|
|
|
|
[Service]
|
|
Type=oneshot
|
|
ExecStart=/opt/migration/scripts/offsite_sync_orchestrator.sh critical
|
|
User=root
|
|
StandardOutput=journal
|
|
StandardError=journal
|
|
EOF
|
|
|
|
cat > "/tmp/offsite-sync-critical.timer" << 'EOF'
|
|
[Unit]
|
|
Description=Run critical backup sync daily
|
|
Requires=offsite-sync-critical.service
|
|
|
|
[Timer]
|
|
OnCalendar=daily
|
|
RandomizedDelaySec=1800
|
|
Persistent=true
|
|
|
|
[Install]
|
|
WantedBy=timers.target
|
|
EOF
|
|
|
|
# Important backup sync service
|
|
cat > "/tmp/offsite-sync-important.service" << 'EOF'
|
|
[Unit]
|
|
Description=Off-site Important Backup Sync
|
|
After=network-online.target
|
|
Wants=network-online.target
|
|
|
|
[Service]
|
|
Type=oneshot
|
|
ExecStart=/opt/migration/scripts/offsite_sync_orchestrator.sh important
|
|
User=root
|
|
StandardOutput=journal
|
|
StandardError=journal
|
|
EOF
|
|
|
|
cat > "/tmp/offsite-sync-important.timer" << 'EOF'
|
|
[Unit]
|
|
Description=Run important backup sync weekly
|
|
Requires=offsite-sync-important.service
|
|
|
|
[Timer]
|
|
OnCalendar=Sun 03:00
|
|
RandomizedDelaySec=1800
|
|
Persistent=true
|
|
|
|
[Install]
|
|
WantedBy=timers.target
|
|
EOF
|
|
|
|
# Install systemd services
|
|
sudo mv /tmp/offsite-sync-*.service /etc/systemd/system/
|
|
sudo mv /tmp/offsite-sync-*.timer /etc/systemd/system/
|
|
|
|
sudo systemctl daemon-reload
|
|
sudo systemctl enable offsite-sync-critical.timer
|
|
sudo systemctl enable offsite-sync-important.timer
|
|
sudo systemctl start offsite-sync-critical.timer
|
|
sudo systemctl start offsite-sync-important.timer
|
|
|
|
log_success "Systemd services created and enabled"
|
|
}
|
|
|
|
# Main execution function
|
|
main() {
|
|
local action=${1:-"setup"}
|
|
|
|
# Register cleanup and rollback functions
|
|
register_cleanup cleanup_offsite_backup
|
|
register_rollback rollback_offsite_backup
|
|
|
|
case $action in
|
|
"setup")
|
|
log_step "Setting up off-site backup storage system..."
|
|
|
|
# Validate prerequisites
|
|
validate_prerequisites curl tar age rclone
|
|
|
|
# Setup infrastructure
|
|
setup_offsite_infrastructure
|
|
create_checkpoint "offsite_infrastructure_setup"
|
|
|
|
# Setup automated sync
|
|
setup_automated_sync
|
|
create_checkpoint "automated_sync_setup"
|
|
|
|
log_success "✅ Off-site backup storage system setup completed!"
|
|
log_info "📁 Configuration: $OFFSITE_CONFIG_DIR"
|
|
log_info "🔄 Sync orchestrator: /opt/migration/scripts/offsite_sync_orchestrator.sh"
|
|
log_info "⚡ Manual sync: /opt/migration/scripts/offsite_sync_orchestrator.sh [critical|important|standard|all]"
|
|
log_info "🗂️ Logs: $OFFSITE_LOG_DIR"
|
|
|
|
echo ""
|
|
log_info "Next steps:"
|
|
echo " 1. Configure cloud provider credentials"
|
|
echo " 2. Run setup scripts in $OFFSITE_CONFIG_DIR/"
|
|
echo " 3. Test sync: /opt/migration/scripts/offsite_sync_orchestrator.sh critical"
|
|
;;
|
|
|
|
"sync")
|
|
local category=${2:-"all"}
|
|
/opt/migration/scripts/offsite_sync_orchestrator.sh "$category"
|
|
;;
|
|
|
|
"test")
|
|
log_info "Testing off-site backup connectivity..."
|
|
rclone listremotes
|
|
echo "Available remotes configured"
|
|
;;
|
|
|
|
"help"|*)
|
|
cat << EOF
|
|
Off-site Backup Storage System
|
|
|
|
Usage: $0 <action> [options]
|
|
|
|
Actions:
|
|
setup - Setup off-site backup infrastructure
|
|
sync - Run sync [critical|important|standard|all]
|
|
test - Test connectivity to configured remotes
|
|
help - Show this help
|
|
|
|
Examples:
|
|
$0 setup
|
|
$0 sync critical
|
|
$0 test
|
|
EOF
|
|
;;
|
|
esac
|
|
}
|
|
|
|
# Execute main function
|
|
main "$@" |