db_archive/process.sh
2024-07-18 17:31:32 -07:00

78 lines
2.7 KiB
Bash

#!/bin/bash
# Configuration
DB_USER=""
DB_PASSWORD=""
DB_HOST=""
DB_PORT=3306
WEB_SERVER="https://"
UPLOAD_KEY="your_secure_upload_key"
DB_IDENTIFIERS=("Classic" "Kunark" "Velious" "Luclin" "Planes")
BACKUP_DIR="./db_backup"
UPLOAD_URL="${WEB_SERVER}/upload_handler.php"
SCHEMA_URL="https://raw.githubusercontent.com/EQEmu/Server/master/common/database_schema.h"
SCHEMA_FILE="./database_schema.h"
# Create backup directory if it doesn't exist
mkdir -p ${BACKUP_DIR}
# Download the latest database_schema.h file
curl -o ${SCHEMA_FILE} ${SCHEMA_URL}
# Extract tables from the schema file
extract_tables() {
local section=$1
sed -n "/Get${section}Tables()/,/}/p" ${SCHEMA_FILE} | grep -Eo '"[a-zA-Z0-9_]+" ?,' | tr -d '",' | sort | uniq
}
# Function to dump tables from a list
dump_tables() {
local db_identifier=$1
local tables=$2
local group_name=$3
local lower_db_identifier=$(echo ${db_identifier} | tr '[:upper:]' '[:lower:]')
local dump_file="${BACKUP_DIR}/${group_name}-${lower_db_identifier}_$(date +\%F).sql"
for table in ${tables}; do
mysqldump -u${DB_USER} -p${DB_PASSWORD} -h${DB_HOST} --port=${DB_PORT} "content-${lower_db_identifier}" ${table} >> ${dump_file}
done
}
# Function to get the database version
get_db_version() {
local db_identifier=$1
local lower_db_identifier=$(echo ${db_identifier} | tr '[:upper:]' '[:lower:]')
local version_query="SELECT version FROM db_version LIMIT 1;"
local version=$(mysql -u${DB_USER} -p${DB_PASSWORD} -h${DB_HOST} --port=${DB_PORT} -D "content-${lower_db_identifier}" -se "${version_query}")
echo ${version}
}
# Extract tables from the schema file
CONTENT_TABLES=$(extract_tables "Content")
VERSION_TABLES=$(extract_tables "Version")
# Dump each group of tables from each database
for db_identifier in "${DB_IDENTIFIERS[@]}"; do
timestamp=$(date +\%Y\%m\%d\%H\%M)
dump_tables ${db_identifier} "${CONTENT_TABLES}" "content"
dump_tables ${db_identifier} "${VERSION_TABLES}" "version"
# Get the database version
db_version=$(get_db_version ${db_identifier})
# Compress the dumps
TAR_FILE="${BACKUP_DIR}/pa-${db_identifier,,}-${timestamp}.tar.gz"
tar -czf ${TAR_FILE} -C ${BACKUP_DIR} "content-${db_identifier,,}_$(date +\%F).sql" "version-${db_identifier,,}_$(date +\%F).sql"
# Upload the compressed file to the web server
curl -F "file=@${TAR_FILE}" -F "db_identifier=${db_identifier}" -F "db_version=${db_version}" -F "key=${UPLOAD_KEY}" ${UPLOAD_URL}
# Clean up the backup files after upload
rm ${BACKUP_DIR}/*.sql
rm ${TAR_FILE}
done
# Clean up old backups (optional)
find ${BACKUP_DIR} -type f -name "*.sql" -mtime +7 -exec rm {} \;
find ${BACKUP_DIR} -type f -name "*.tar.gz" -mtime +7 -exec rm {} \;