导语:使用cronjob定时备份mysql,将原先宿主机的定时cron任务转移到k8s中。省去不同系统的兼容性,解决定时任务无法运行时排查的耗时。

yaml如下

全量备份

/opt/app-ai/deepwise/data/scripts 备份脚本的路径

/opt/app-ai/deepwise/data/mysql-data 宿主机上mysql的data目录

/opt/app-ai/deepwise/data/mysql-backup 备份文件夹

apiVersion: batch/v1beta1
kind: CronJob
metadata:
  namespace: deepwise
  name:  mysql-fulldump
spec:
  jobTemplate:
    spec:
      completions: 1
      template:
        spec:
          restartPolicy: Never
          volumes:
            - name: mysql-script
              hostPath:
                path: /opt/app-ai/deepwise/data/scripts
            - name: mysql-backup
              hostPath:
                path: /opt/app-ai/deepwise/data/mysql-backup
            - name: local-time
              hostPath:
                path: /etc/localtime
            - name: mysql-data
              hostPath:
                path: /opt/app-ai/deepwise/data/mysql-data
          containers:
            - name: mysqldump-container
              image: percona/percona-xtrabackup:2.4
              volumeMounts:
                - name: mysql-script
                  mountPath: /opt/app-ai/deepwise/data/scripts
                - name: local-time
                  mountPath: /etc/localtime
                - name: mysql-backup
                  mountPath: /opt/app-ai/deepwise/data/mysql-backup
                - name: mysql-data
                  mountPath: /var/lib/mysql

              command:
                - "sh"
                - "/opt/app-ai/deepwise/data/scripts/backup.sh"
  schedule: "0 * * * *"  # 这里为了测试1小时跑一次  正常时每周日跑1次

对应shell脚本

#!/bin/bash
#version = v20220115pro_Drwise
backup_path="/opt/app-ai/deepwise/data/mysql-backup"
data_dir="/opt/app-ai/deepwise/data/mysql-data"
DATE=$(date +%Y-%m-%d_%H-%M-%S)

backup(){
        if  [ ! -z "${data_dir}" ];then
            mkdir -p ${backup_path}
	    date >> ${backup_path}/${DATE}.log
            echo " innobackupex --user=root --password=password  --host=mysql-headless  ${backup_path} > ${backup_path}/${DATE}.log 2>&1 &"
            innobackupex --user=root --password=password  --host=mysql-headless  ${backup_path}   > ${backup_path}/${DATE}.log 2>&1
            #nohup  innobackupex --user=root --password=password  --host=mysql-headless  ${backup_path}   > ${backup_path}/${DATE}.log 2>&1
	    date >> ${backup_path}/${DATE}.log
        fi
}
clean(){
        find ${backup_path}/ -name "*" -mtime +14 -exec rm -rf {} \;
}
backup
clean

增量备份

增量备份以周日的备份为基础

    apiVersion: batch/v1beta1
    kind: CronJob
    metadata:
      namespace: deepwise
      name:  mysql-incdump
    spec:
      jobTemplate:
        spec:
          completions: 1
          template:
            spec:
              restartPolicy: Never
              volumes:
                - name: mysql-script
                  hostPath:
                    path: /opt/app-ai/deepwise/data/scripts
                - name: mysql-backup
                  hostPath:
                    path: /opt/app-ai/deepwise/data/mysql-backup
                - name: local-time
                  hostPath:
                    path: /etc/localtime
                - name: mysql-data
                  hostPath:
                    path: /opt/app-ai/deepwise/data/mysql-data
              containers:
                - name: mysqldump-container
                  image: percona/percona-xtrabackup:2.4
                  volumeMounts:
                    - name: mysql-script
                      mountPath: /opt/app-ai/deepwise/data/scripts
                    - name: local-time
                      mountPath: /etc/localtime
                    - name: mysql-backup
                      mountPath: /opt/app-ai/deepwise/data/mysql-backup
                    - name: mysql-data
                      mountPath: /var/lib/mysql

                  command:
                    - "sh"
                    - "/opt/app-ai/deepwise/data/scripts/backup_inc.sh"
      schedule: "0 0 * * *"

对应shell脚本

#!/bin/bash
#version = v202010630_Drwise
backup_path="/opt/app-ai/deepwise/data/mysql-backup"
data_dir="/opt/app-ai/deepwise/data/mysql-data"
STANDARD=$(ls ${backup_path} |grep `date -d 'yesterday' '+%Y-%m-%d'`.* |grep -v log | awk '{print $NF}')
#STANDARD_Sunday=$(ls -l --time-style '+%A' ${backup_path} | grep -E "Monday" |grep -v 'log' | awk '{print $NF}')
STANDARD_Sunday=$(ls -l --time-style '+%A' ${backup_path} | grep -E "Sunday" |grep -v 'log' | awk '{print $NF}')
DATE=`date +%Y-%m-%d_%H-%M-%S`
echo STANDARD $STANDARD
echo STANDARD_Sunday $STANDARD_Sunday
backup(){
        if  [ ! -z "${STANDARD}" ];then
            data > ${backup_path}/${DATE}.log 2>&1
	    echo "STANDARD"
            echo "nohup time innobackupex --user=root --password=password  --host=mysql-headless --incremental ${backup_path} --incremental-basedir=${backup_path}/${STANDARD} >${backup_path}/${DATE}.log 2>&1 &"
            innobackupex --user=root --password=password  --host=mysql-headless --incremental ${backup_path} --incremental-basedir=${backup_path}/${STANDARD} >${backup_path}/${DATE}.log 2>&1
        else
            echo "STANDARD_Sunday"
            echo "nohup time innobackupex --user=root --password=password  --host=mysql-headless --incremental ${backup_path} --incremental-basedir=${backup_path}/${STANDARD_Sunday} >${backup_path}/${DATE}.log 2>&1 &"
            innobackupex --user=root --password=password  --host=mysql-headless --incremental ${backup_path} --incremental-basedir=${backup_path}/${STANDARD_Sunday} >${backup_path}/${DATE}.log 2>&1
            data >> ${backup_path}/${DATE}.log 2>&1
        fi
}
backup
Logo

K8S/Kubernetes社区为您提供最前沿的新闻资讯和知识内容

更多推荐