备份是我们运维人员最基本的日常工作,做好备份是稳定运维的一个重要环节。下面分享两个使用过的简单备份脚本:
1)网站数据备份
将网站数据/var/www/vhost/www.hqsb.com和/var/www/vhost/www.huanqiu.com分别备份到:
/Data/code-backup/www.hqsb.com和/Data/code-backup/www.huanqiu.com下。
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
|
[root@huanqiu_web5 code-backup]
# cat web_code_backup.sh
#!/bin/bash
#备份网站数据
/bin/tar
-zvcf
/Data/code-backup/www
.huanqiu.com
/www
.huanqiu.com_`
date
+%Y%m%d_%H%M%S`.
tar
.gz
/var/www/vhosts/www
.huanqiu.com
/bin/tar
-zvcf
/Data/code-backup/www
.hqsb.com
/www
.hqsb.com_`
date
+%Y%m%d_%H%M%S`.
tar
.gz
/var/www/vhosts/www
.hqsb.com
#删除一周之前的备份文件
find
/Data/code-backup/www
.huanqiu.com -
type
f -mtime +7 -
exec
rm
-f {} \;
find
/Data/code-backup/www
.hqsb.com -
type
f -mtime +7 -
exec
rm
-f {} \;
[root@huanqiu_web5 ~]
# crontab -l
#每天凌晨5点备份网站数据
0 5 * * *
/bin/bash
-x
/Data/code-backup/web_code_backup
.sh >
/dev/null
2>&1
备份后的效果如下:
[root@huanqiu_web5 ~]
# ls /Data/code-backup/www.huanqiu.com/
www.huanqiu.com_20170322_174328.
tar
.gz
[root@xqsj_web5 ~]
# ls /Data/code-backup/www.hqsb.com/
www.hqsb.com_20170322_174409.
tar
.gz
|
2)数据库备份(自动删除10天前的备份文件)
数据库服务使用的是阿里云的mysql,远程进行定时的全量备份,备份到本地,以防万一。mysql数据库远程备份的数据最好打包压缩
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
|
[root@huanqiuPC
crontab
]
# pwd
/Data/Mysql_Bakup/crontab
[root@huanqiuPC
crontab
]
# cat backup_db_wangshibo.sh
#!/bin/bash
MYSQL=
"/usr/bin/mysql"
MYSQLDUMP=
"/usr/bin/mysqldump"
BACKUP_DIR=
"/Data/Mysql_Bakup"
#DB_SOCKET="/var/lib/mysql/mysql.sock"
DB_hostname=
"110.120.11.9"
DBNAME=
"wangshibo"
DB_USER=
"db_wangshibo"
DB_PASS=
"mhxzk3rfzh"
TIME=`
date
+%Y%m%d%H%M%S`
LOCK_FILE=
"${BACKUP_DIR}/lock_file.tmp"
BKUP_LOG=
"/Data/Mysql_Backup/${TIME}_bkup.log"
DEL_BAK=`
date
-d
'10 days ago'
'+%Y%m%d'
`
##To judge lock_file
if
[[ -f $LOCK_FILE ]];
then
exit
255
else
echo
$$ > $LOCK_FILE
fi
##dump databases##
echo
${TIME} >> ${BKUP_LOG}
echo
"=======Start Bakup============"
>>${BKUP_LOG}
#${MYSQLDUMP} -h ${DB_hostname} -u${DB_USER} -p${DB_PASS} --databases ${DBNAME} | gzip -9 > ${BACKUP_DIR}/${TIME}.${DBNAME}.gz
${MYSQLDUMP} -h ${DB_hostname} -u${DB_USER} -p${DB_PASS} --databases ${DBNAME} |
gzip
-9 > ${BACKUP_DIR}/${TIME}.${DBNAME}.gz
echo
"=======Finished Bakup============"
>>${BKUP_LOG}
/bin/rm
-f ${LOCK_FILE}
##del back 10 days before##
/bin/rm
-f ${BACKUP_DIR}/${DEL_BAK}*.gz
|
定时进行备份
1
2
|
[root@huanqiuPC Mysql_Bakup]
# crontab -l
10 0,6,12,18 * * *
/bin/bash
/Data/Mysql_Bakup/crontab/backup_db_wangshibo
.sh >
/dev/null
2>&1
|
脚本执行后的备份效果如下
1
2
3
|
[root@huanqiuPC
crontab
]
# cd /Data/Mysql_Bakup
[root@huanqiuPC Mysql_Bakup]
# ls
20161202061001.wangshibo.gz
|
同步线上数据库到beta环境数据库(覆盖beta数据库):
将上面定时备份的数据包拷贝到beta机器上,然后解压,登陆mysql,source命令进行手动覆盖。
--------------------------------------------------------------------再看一例-----------------------------------------------------------------------
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
|
[root@backup online_bak]
# cat rsync.sh (脚本中的同步:限速3M,保留最近一个月的备份)
#!/bin/bash
# ehr data backup----------------------------------------------------------
cd
/data/bak/online_bak/192
.168.34.27
/tomcat_data/
/usr/bin/rsync
-e
"ssh -p22222"
-avpgolr --bwlimit=3072 192.168.34.27:
/data/tomcat7/webapps
/data/bak/online_bak/192
.168.34.27
/tomcat_data/
`
date
+%Y%m%d`
/bin/tar
-zvcf `
date
+%Y%m%d`.
tar
.gz `
date
+%Y%m%d`
rm
-rf `
date
+%Y%m%d`
cd
/data/bak/online_bak/192
.168.34.27
/tomcat_data/
NUM1=`
ls
-l|
awk
'{print $9}'
|
grep
2017|
wc
-l`
I1=$(
/usr/bin/expr
$NUM1 - 30 )
ls
-l|
awk
'{print $9}'
|
grep
2017|
sed
-n
"1,$I1 p"
|
xargs
rm
-f
# zp data backup----------------------------------------------------------
cd
/data/bak/online_bak/192
.168.34.33
/tomcat_data/
/usr/bin/rsync
-e
"ssh -p22222"
-avpgolr --bwlimit=3072 192.168.34.33:
/data/tomcat8/webapps
/data/bak/online_bak/192
.168.34.33
/tomcat_data/
`
date
+%Y%m%d`
/bin/tar
-zvcf `
date
+%Y%m%d`.
tar
.gz `
date
+%Y%m%d`
rm
-rf `
date
+%Y%m%d`
cd
/data/bak/online_bak/192
.168.34.33
/tomcat_data/
NUM2=`
ls
-l|
awk
'{print $9}'
|
grep
2017|
wc
-l`
I2=$(
/usr/bin/expr
$NUM2 - 30 )
ls
-l|
awk
'{print $9}'
|
grep
2017|
sed
-n
"1,$I2 p"
|
xargs
rm
-f
cd
/data/bak/online_bak/192
.168.34.33
/upload
/usr/bin/rsync
-e
"ssh -p22222"
-avpgolr --bwlimit=3072 192.168.34.33:
/home/zrx_hr/upload
/data/bak/online_bak/192
.168.34.33
/upload/
`
date
+%Y%m%d`
/bin/tar
-zvcf `
date
+%Y%m%d`.
tar
.gz `
date
+%Y%m%d`
rm
-rf `
date
+%Y%m%d`
cd
/data/bak/online_bak/192
.168.34.33
/upload
NUM3=`
ls
-l|
awk
'{print $9}'
|
grep
2017|
wc
-l`
I3=$(
/usr/bin/expr
$NUM3 - 30 )
ls
-l|
awk
'{print $9}'
|
grep
2017|
sed
-n
"1,$I3 p"
|
xargs
rm
-f
# zabbix mysql backup----------------------------------------------------------
/bin/mkdir
/data/bak/online_bak/192
.168.16.21
/mysql_data/
`
date
+%Y%m%d`
/data/mysql/bin/mysqldump
-hlocalhost -uroot -pBKJK-@@@-12345 --databases zabbix >
/data/bak/online_bak/192
.168.16.21
/mysql_data/
`
date
+%Y%m%d`
/zabbix
.sql
cd
/data/bak/online_bak/192
.168.16.21
/mysql_data/
/bin/tar
-zvcf `
date
+%Y%m%d`.
tar
.gz `
date
+%Y%m%d`
rm
-rf `
date
+%Y%m%d`
cd
/data/bak/online_bak/192
.168.16.21
/mysql_data/
NUM4=`
ls
-l|
awk
'{print $9}'
|
grep
2017|
wc
-l`
I4=$(
/usr/bin/expr
$NUM4 - 30 )
ls
-l|
awk
'{print $9}'
|
grep
2017|
sed
-n
"1,$I4 p"
|
xargs
rm
-f
[root@backup online_bak]
# pwd
/data/bak/online_bak
[root@backup online_bak]
# ls
192.168.16.21
rsync
.sh
192.168.34.27 192.168.34.33
[root@backup online_bak]
# ll
total 10K
drwxr-xr-x 3 root root 23 Aug 19 17:47 192.168.16.21
drwxr-xr-x 4 root root 41 Aug 19 18:30 192.168.34.27
drwxr-xr-x 4 root root 37 Aug 19 18:17 192.168.34.33
-rwxr-xr-x 1 root root 6.3K Aug 19 19:20
rsync
.sh
[root@backup online_bak]
# ll 192.168.16.21/
total 4.0K
drwxr-xr-x 2 root root 28 Aug 19 19:43 mysql_data
[root@backup online_bak]
# ll 192.168.16.21/mysql_data/
total 1.5G
-rw-r--r-- 1 root root 1.5G Aug 19 19:43 20170819.
tar
.gz
[root@backup online_bak]
# ll 192.168.34.27
total 4.0K
drwxr-xr-x 2 root root 4.0K Aug 19 19:26 tomcat_data
[root@backup online_bak]
# ll 192.168.34.27/tomcat_data/
total 3.9G
......
-rw-r--r-- 1 root root 140M Aug 19 11:06 20170818.
tar
.gz
-rw-r--r-- 1 root root 140M Aug 19 19:26 20170819.
tar
.gz
[root@backup online_bak]
# ll 192.168.34.33
total 8.0K
drwxr-xr-x 2 root root 4.0K Aug 19 19:26 tomcat_data
drwxr-xr-x 2 root root 28 Aug 19 19:30 upload
[root@backup online_bak]
# crontab -l
# online backup
0 2 * * *
/bin/bash
-x
/data/bak/online_bak/rsync
.sh >
/dev/null
2>&1
|
---------------------------------------------------------------------------------------------------
1
2
3
4
5
6
7
8
9
10
11
|
取一个目录下,按照文件/目录的修改时间来排序,取最后一次修改的文件
[work@qd-
op
-comm01 xcspam]$
ls
bin xcspam-20170802145542 xcspam-20170807204545 xcspam-20170814115753 xcspam-20170818115806 xcspam-20170824162641 xcspam-20170831173616
xcspam xcspam-20170802194447 xcspam-20170808163425 xcspam-20170815191150 xcspam-20170821122949 xcspam-20170824165020 xcspam-20170831191347
xcspam-20170731154018 xcspam-20170803113809 xcspam-20170808195340 xcspam-20170815210032 xcspam-20170821153300 xcspam-20170829100941 xcspam-20170904105109
xcspam-20170801190647 xcspam-20170807150022 xcspam-20170809103648 xcspam-20170816141022 xcspam-20170822173600 xcspam-20170831135623 xcspam-20170911120519
xcspam-20170802142921 xcspam-20170807164137 xcspam-20170809111246 xcspam-20170816190704 xcspam-20170823101913 xcspam-20170831160115 xcspam-20170911195802
[work@qd-
op
-comm01 xcspam]$
ls
-rtd xcspam* |
tail
-1
xcspam-20170911195802
[work@qd-
op
-comm01 xcspam]$
ls
-rtd xcspam* |
tail
-2|
head
-1
//
这是倒数第二个被修改的文件
|
***************当你发现自己的才华撑不起野心时,就请安静下来学习吧***************