Skip to content

Backup mariadb db

Que fait ce script :

Ce script est a utiliser pour etre executé par un cron. Le script backup une bdd mariadb et fait un fichier.sql.gz

Après ce fichier.sql.gz est upload sur un storage account Azure qui a les creds renseigners dans set_variable_and_creds.sh

Et après il y a un message dans un channel Teams avec la taille du fichier.sql.gz

Le code :

contenu du fichier variables_and_creds.sh

function set_variable_and_creds(){

    DATE=`date +%d-%m-%Y`

    STORAGE_ACCOUNT_NAME="..."
    CONTAINER_NAME="...."
    ACCES_KEYS="123456789.....=="


    HOST="localhost"
    PORT="3306"
    USERNAME="USERNAME"
    PASSWORD="PASSWORD"
    DATABASE="DATABASE_NAME"

    TEAMS_WEBOOK="https://*****.webhook.office.com/webhookb2/*****/IncomingWebhook/123**9"


}

contenu du fichier backup_mariadb.sh

#!/bin/bash


# SET DEBUG=true for extra logs
DEBUG=false
#DEBUG=true

source variables_and_creds.sh # include function 
#set_variable_and_creds()
#cd /myPath/if_I_Use_In/Crontab

function log_error(){
    return_code=$?
    message_erreur=$1    
    log_date=$(date +"%Y-%m-%d %H:%M:%S")
    # Couleurs
    color_in_red='\033[0;31m'
    no_color='\033[0m' # Réinitialise la couleur

    if [[ $return_code -ne 0 ]]; then
        echo -e "${color_in_red}$log_date : Erreur sur la commande [  $message_erreur ]${no_color}"
        exit 1
    fi
}

function create_mariadb_dump_and_gzip(){

    echo "Start Backups"
    # Download en create .sql.gz
    mysqldump -v -h $HOST --port=$PORT -u $USERNAME  -p$PASSWORD --opt --single-transaction --skip-lock-tables "${DATABASE}"  | gzip > ./backup/"${DATABASE}"_$DATE.sql.gz
}


function upload_file_to_blob(){
    local file_name=$1

    echo -e "\t upload : ${file_name}"

    if [[ $DEBUG == "true" ]]; then
        az storage blob upload --account-name ${STORAGE_ACCOUNT_NAME} --account-key ${ACCES_KEYS} --container-name ${CONTAINER_NAME} --file ${file_name} --name ${file_name} 
    else
        az storage blob upload --account-name ${STORAGE_ACCOUNT_NAME} --account-key ${ACCES_KEYS} --container-name ${CONTAINER_NAME} --file ${file_name} --name ${file_name} > /dev/null
    fi

    log_error "az storage blob upload --account-name ${STORAGE_ACCOUNT_NAME} --account-key **** --container-name ${CONTAINER_NAME} --file ${file_name} --name ${file_name}"
}

function clean_local_file(){
    name_file=$1
    echo -e "\t Clean file : ${name_file}"
    rm -rf "./${name_file}"
    log_error "./${name_file}"

}

function message_teams(){
    #chanel CRON
    local url_webhook=$TEAMS_WEBOOK
    local message=$1
    local payload='{"text": "```'"$message"'```"}'
    curl -X POST -H "Content-Type: application/json" -d "$payload" "$url_webhook"

}

function check_upload_backup(){
    local date=$(date +"%Y-%m-%d")
    az storage blob list --account-name ${STORAGE_ACCOUNT_NAME} --account-key ${ACCES_KEYS} --container-name ${CONTAINER_NAME} --output table --query "[?properties.lastModified >= '${date}T00:00:00Z'] | [].{Name:name, Length:properties.contentLength}"
}

function main(){

    echo -e "\tStart :${DATE}"

    set_variable_and_creds # SET GLOBALE VARIABLE FOR create_cosomosdb_dump function
    create_mariadb_dump_and_gzip

    upload_file_to_blob "backup/${DATABASE}_${DATE}.sql.gz" 
    clean_local_file "backup/${DATABASE}_${DATE}.sql.gz" 

    ls_storage=$(check_upload_backup)
    message_teams "${ls_storage}" # Send message to teams with the last backup


} # END main

main