Commit initial après suppression de l'historique

This commit is contained in:
lapatatedouce
2024-09-03 02:24:27 +02:00
commit 9f96c67463
6 changed files with 423 additions and 0 deletions

View File

@@ -0,0 +1,29 @@
# This script creates a tar.gz backup of the /opt/AdGuardHome directory, cron jobs, and system logs.
# The backup is stored in the './backups/' directory.
#
# To automate backups, you can add this script to your crontab.
# For example, to run this backup on the 1st of every month, you would add the following line to your crontab:
# 0 0 1 * * /path/to/this/script.sh
# (Remember to replace '/path/to/this/script.sh' with the actual path to this script)
# You can edit your crontab with the command: crontab -e
# Get the current date in a suitable format for the filename
current_date=$(date +"%Y-%m-%d")
# Create the backup filename
backup_filename="adguardhome_backup_${current_date}.tar.gz"
# Default backup location
backup_location="./backups/"
# Create the 'adguardhome' directory if it doesn't exist
mkdir -p "$backup_location/adguardhome"
# Create the tar.gz archive
tar -czvf "$backup_location/adguardhome/$backup_filename" /opt/AdGuardHome /var/spool/cron/crontabs/* /var/log
# Print a success message
echo "Backup created successfully in $backup_location/adguardhome/$backup_filename"
# Delete backups older than 90 days
find "$backup_location/adguardhome" -mtime +90 -type f -delete

View File

@@ -0,0 +1,140 @@
#!/bin/bash
# This script performs an incremental backup of the specified source directory,
# generates CRC32 checksums for only the files that are backed up or modified,
# and manages the deletion of old backups. The CRC32 checksum files are stored
# in a dedicated subdirectory within the backup directory. You can also run the
# script with the 'verification' argument to check the integrity of only the files
# processed in the latest backup.
#
# NOTE: This script is not tested and is intended for debugging purposes. Use with caution.
# Configuration
SOURCE_DIR="/chemin/vers/votre/dossier_source" # Replace with the path to your source directory
BACKUP_DIR="/chemin/vers/votre/dossier_backup" # Replace with the path to your backup directory
LOG_FILE="/chemin/vers/votre/fichier_log" # Replace with the path to your log file
RETENTION_PERIOD=60 # Number of days to retain deleted/modified files (2 months)
EMAIL="votre_email@example.com" # Replace with your email address
CRC_RETENTION_PERIOD=60 # Number of days to retain CRC checksum files
# Directory for CRC32 checksums within the backup directory
CRC_DIR="$BACKUP_DIR/.crc_checksums"
mkdir -p "$CRC_DIR" # Create the checksum directory if it does not exist
# Files for CRC32 checksums
SOURCE_CRC_FILE="$CRC_DIR/source_crc_checksums.txt"
BACKUP_CRC_FILE="$CRC_DIR/backup_crc_checksums.txt"
# Temporary files for CRC32 checksums of the current backup
CURRENT_BACKUP_CRC_FILE="$CRC_DIR/current_backup_crc_checksums.txt"
# Function to send an email in case of error
send_error_email() {
SUBJECT="Backup Error on $(hostname)"
MESSAGE="An error occurred during the backup on $(hostname) at $(date +'%Y-%m-%d %H:%M:%S'). Please check the log file at $LOG_FILE for details."
echo "$MESSAGE" | mail -s "$SUBJECT" "$EMAIL"
}
# Function to log actions to the log file and send an email in case of error
log_action() {
echo "$(date +'%Y-%m-%d %H:%M:%S') - $1" >> "$LOG_FILE"
if [[ "$1" == "ERREUR"* ]]; then
send_error_email
fi
}
# Check if the source and backup directories exist
if [ ! -d "$SOURCE_DIR" ]; then
log_action "ERREUR : The source directory '$SOURCE_DIR' does not exist."
exit 1
fi
if [ ! -d "$BACKUP_DIR" ]; then
log_action "The backup directory '$BACKUP_DIR' does not exist. Creating directory."
mkdir -p "$BACKUP_DIR"
if [ $? -ne 0 ]; then
log_action "ERREUR : Unable to create the backup directory '$BACKUP_DIR'."
exit 1
fi
fi
# Function to generate CRC32 checksums only for files processed in the current backup
generate_crc_checksums() {
# Generate checksums for source files
find "$SOURCE_DIR" -type f -exec crc32 {} \; > "$SOURCE_CRC_FILE"
if [ $? -ne 0 ]; then
log_action "ERREUR : Unable to generate CRC32 checksums for the source directory."
exit 1
fi
# Generate checksums for files in the backup directory
find "$BACKUP_DIR" -type f ! -path "$CRC_DIR/*" -exec crc32 {} \; > "$CURRENT_BACKUP_CRC_FILE"
if [ $? -ne 0 ]; then
log_action "ERREUR : Unable to generate CRC32 checksums for the backup directory."
exit 1
fi
}
# Function to verify the integrity of files processed in the latest backup
verify_integrity() {
if [ ! -f "$SOURCE_CRC_FILE" ] || [ ! -f "$CURRENT_BACKUP_CRC_FILE" ]; then
log_action "ERREUR : Checksum files do not exist. Run the script without arguments to generate them."
exit 1
fi
# Compare the checksums and store differences
diff "$SOURCE_CRC_FILE" "$CURRENT_BACKUP_CRC_FILE" > /tmp/diff_output.txt
if [ $? -eq 0 ]; then
echo "La vérification de l'intégrité des fichiers a terminé avec succès."
log_action "Backup integrity has been successfully verified."
else
log_action "ERREUR : Backup integrity check failed. Differences found between source and backup checksums."
# Print the errors with file paths
while IFS= read -r line; do
echo "$line"
done < /tmp/diff_output.txt
exit 1
fi
}
# Function to clean up old CRC32 checksum files
cleanup_old_crc_files() {
find "$CRC_DIR" -type f -mtime +$CRC_RETENTION_PERIOD -exec rm -f {} \;
if [ $? -ne 0 ]; then
log_action "ERREUR : Unable to delete old CRC32 checksum files."
exit 1
fi
}
# If the 'verification' argument is provided, only perform the verification
if [ "$1" == "verification" ]; then
verify_integrity
exit 0
fi
# Incremental backup with error handling
# This section performs the incremental backup of the source directory to the backup directory
# Deleted files are moved to a dated subdirectory. The checksum directory is excluded from the backup.
rsync -av --delete --backup --backup-dir="$BACKUP_DIR/deleted/$(date +'%Y-%m-%d')" --exclude "$CRC_DIR/" "$SOURCE_DIR/" "$BACKUP_DIR/"
if [ $? -ne 0 ]; then
log_action "ERREUR : The incremental backup failed."
exit 1
fi
# Generate CRC32 checksums for the files processed in the latest backup
generate_crc_checksums
# Verify the integrity of the files processed in the latest backup
verify_integrity
# Delete old backups of deleted/modified files
find "$BACKUP_DIR/deleted" -type d -mtime +$RETENTION_PERIOD -exec rm -rf {} \;
if [ $? -ne 0 ]; then
log_action "ERREUR : Unable to delete old backups."
exit 1
fi
# Clean up old CRC32 checksum files
cleanup_old_crc_files
log_action "Incremental backup completed successfully."

View File

@@ -0,0 +1,32 @@
#!/bin/bash
# This script initiates restoration requests for objects listed in a specified file.
# It requires the bucket name, file name, and the number of days for the restoration.
# Example usage:
# ./restore-glacier-objects.sh my-bucket object-list.txt 3
# (Initiates restoration for objects listed in 'object-list.txt' in 'my-bucket' for 3 days)
# Check the number of arguments
if [[ $# -ne 3 ]]; then
echo "Usage: $0 <bucket_name> <file_name> <number_of_days>"
exit 1
fi
# Retrieve arguments
BUCKET="$1"
FILE="$2"
DAYS="$3"
# Check if the file exists
if [[ ! -f "$FILE" ]]; then
echo "Error: The file '$FILE' does not exist."
exit 1
fi
# Iterate through the file and initiate restoration requests
while read -r KEY; do
aws s3api restore-object --restore-request Days=$DAYS --bucket $BUCKET --key "$KEY"
done < "$FILE"
echo "Restoration requests initiated for objects listed in '$FILE'."

View File

@@ -0,0 +1,52 @@
#!/bin/bash
# This script lists objects stored in the Glacier storage class within an S3 bucket.
# It optionally filters the list based on a specified directory within the bucket.
# The output is saved to a text file with a sanitized name based on the bucket and directory.
# Example usage:
# ./list-glacier-objects.sh my-bucket my-directory
# (Lists objects in the 'my-directory' directory within the 'my-bucket' bucket)
# ./list-glacier-objects.sh my-bucket
# (Lists all objects in the 'my-bucket' bucket)
# Vérification du nombre d'arguments
if [[ $# -lt 1 || $# -gt 2 ]]; then
echo "Utilisation: $0 <nom_du_bucket> [nom_du_répertoire]"
exit 1
fi
# Récupération des arguments
BUCKET="$1"
DIRECTORY="$2"
# Fonction pour "nettoyer" les noms pour le fichier de sortie
sanitize_name() {
echo "$1" | tr -dc '[:alnum:]-_.'
}
# Construction du nom de fichier de sortie
SANITIZED_BUCKET=$(sanitize_name "$BUCKET")
if [[ -n "$DIRECTORY" ]]; then
SANITIZED_DIRECTORY=$(sanitize_name "$DIRECTORY")
OUTPUT_FILE="${SANITIZED_BUCKET}_${SANITIZED_DIRECTORY}_glacier-restore.txt"
else
OUTPUT_FILE="${SANITIZED_BUCKET}_glacier-restore.txt"
fi
# Construction de la commande AWS CLI
COMMAND="aws s3api list-objects-v2 --bucket $BUCKET --query \"Contents[?StorageClass=='GLACIER']\""
# Ajout du préfixe si un répertoire est spécifié
if [[ -n "$DIRECTORY" ]]; then
COMMAND+=" --prefix $DIRECTORY"
fi
# Finalisation de la commande avec la sortie formatée
COMMAND+=" --output text | awk '{print \$2}' > $OUTPUT_FILE"
# Exécution de la commande
eval $COMMAND
# Message de confirmation
echo "Liste des objets Glacier enregistrée dans $OUTPUT_FILE"