nginx日志按天自动分割备份

#!/bin/bash
#按天分割nginx日志
#主要需要注意的就是,nginx日志日期的格式,我这个地方access和error的格式不一样
#access.log 示例
#14.215.176.8 - - [29/Nov/2019:14:30:16 +0800] "GET / HTTP/1.1" 200 2900 "-" "Mozilla/5.0 (Linux; U; Android 4.1; en-us; GT-N7100 Build/JRO03C;Baiduspider-ads)AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30" "10.207.236.35"
#error.log 示例
#2019/11/29 14:47:21 [error] 18122#0: *12 open() "/data/www/11/asdf" failed (2: No such file or directory), client: 42.236.10.117, server: hs01.11.cn, request: "GET /asdf HTTP/1.1", host: "hs01.11.cn", referrer: "http://baidu.com/"

start_time=`date -d "yesterday" +%d/%b/%Y`
end_time=`date -d "yesterday" +%d/%b/%Y`":23:59:59"

start_time2=`date -d "yesterday" +%Y/%m/%d`
end_time2=`date -d "yesterday" +%Y/%m/%d`":23:59:59"

current_date=`date -d "yesterday" +%Y%m%d`

file_path="/usr/local/nginx/logs/la/"
access_file_path=$file_path"access.log"
error_file_path=$file_path"error.log"
backup_access_filename="${file_path}backup/access_${current_date}.log"
backup_error_filename="${file_path}backup/error_${current_date}.log"

touch "$backup_access_filename"
touch "$backup_error_filename"
cat   $access_file_path | awk '$4 >="['${start_time}'" && $4 <="['${end_time}'"' > "${backup_access_filename}"
cat   $error_file_path | awk '$1 >="'${start_time2}'" && $1 <="'${end_time2}'"' > "${backup_error_filename}"

#自动删除超过60天的日志
find ${file_path}"/backup/" -type f -mtime +60 -name “*.log” -exec rm -rf {} \;


然后用crontab加入定时任务就行了

 

 

发表评论