清理日志shell脚本
来源:原创
时间:2020-11-05
作者:脚本小站
分类:SHELL
elasticsearch es定时清理脚本:
#!/bin/bash #################################### # # # 定时清理es索引 # # # #################################### #set -x export MANY_DAYS_AGO=$(date "+%Y-%m-%d" -d '32 day ago') export TEMP_FILE=/tmp/indices.txt export ES_IP=192.168.199.39 curl -XGET http://$ES_IP:9200/_cat/indices > $TEMP_FILE for var in $(cat $TEMP_FILE | sort -r | awk '{print $3}') do log_time=${var:0-10:10} # 2018.10.24 # 过滤掉空字符串 if [ -z $log_time ];then continue fi log_time=${log_time//./-} # 2018-10-24 # 过滤掉非日期string if grep '[[:alpha:]]' <<< $log_time; then echo $var "Non date format, not clear." continue fi # 小于指定时间的日期 difference=$(($(date +%s -d ${MANY_DAYS_AGO}) - $(date +%s -d $log_time))) if [ $difference -gt 0 ]; then #echo $var if [ -n $var ]; then curl -XDELETE "http://${ES_IP}:9200/${var}" fi fi done
清理非空日志:
#!/bin/bash export DIR=/data/application/releases/ebiz-clm-syn-1.0.0-SNAPSHOT-*/logs/console.log for var in $(ls $DIR) do if [ -s $var ] ;then cat /dev/null > $var fi done
清理两天之前的日志:
find ./ -type f -mtime +2 -exec rm {} \;
定时任务:
15 2 * * * find /data/applogs/clmweb/app -type f -mtime +10 -exec rm {} \; 25 2 * * * find /data/applogs/clmweb/sys -type f -mtime +10 -exec rm {} \; 35 2 * * * find /data/applogs/tomcat -type f -mtime +10 -exec rm {} \;
定时清理日志:
#!/bin/bash export PERCENT=`df -h | grep /dev/vda3 | awk '{print $5}' | awk -F '%' '{print $1}'` export LOG_FILE=/data/application/current/logs/console.log if [ $PERCENT -gt 70 ]; then if [ -f $LOG_FILE ];then cat /dev/null > $LOG_FILE fi fi
清理docker日志:
#!/bin/bash export DIR=/var/lib/docker/containers export BIG_DIR_LIST=`du -sh $DIR/* | grep G| awk '{print $2}'` for var in $BIG_DIR_LIST do log_dir_name=`basename $var` big_log_file=$DIR/$log_dir_name/$log_dir_name'-json.log' if [ -f $big_log_file ]; then cat /dev/null > $big_log_file echo "cat /dev/null > $big_log_file" >> /tmp/clear.log fi done
清理特定文件:
#!/bin/bash export PERCENT=`df -h | grep /dev/sda4 | awk '{print $5}' | awk -F '%' '{print $1}'` export CURRENT_DATE=`date +%Y-%m-%d` files=( /data/applogs-pre/ebiz-cxj-venus/app.${CURRENT_DATE}.log /data/applogs-pre/ebiz-cxj-venus/app_error.${CURRENT_DATE}.log /data/applogs-pre/ebiz-cxj-spm-service/app.${CURRENT_DATE}.log /data/applogs-pre/ebiz-cxj-spm-service/app_error.${CURRENT_DATE}.log /data/applogs-pre/ebiz-promotion2-fed-venus-service/venus.${CURRENT_DATE}.log /data/applogs-pre/ebiz-psi-venus-service/app.${CURRENT_DATE}.log /data/applogs-pre/framework-service-mail-impl/app.${CURRENT_DATE}.log /data/applogs-pre/ebiz-mdse-center-venus-service/venus.tracer.${CURRENT_DATE}.log /data/applogs-pre/ebiz-mdse-center-venus-service/app.${CURRENT_DATE}.log /data/applogs/ebiz-new-mdse-center-venus-service/venus.${CURRENT_DATE}.log /data/applogs/ebiz-oms-service/venus.${CURRENT_DATE}.log ) if [ $PERCENT -gt 50 ]; then for var in ${files[@]} do echo $var if [ -f $var ]; then cat /dev/null > $var fi done fi
清理cdh日志:
#!/bin/bash export HOSTNAME=`hostname` export DIRS=( /var/log/hadoop-yarn/hadoop-cmf-yarn-NODEMANAGER-${HOSTNAME}.xxx.com.log.out.* /var/log/hadoop-yarn/hadoop-cmf-yarn-RESOURCEMANAGER-${HOSTNAME}.xxx.com.log.out.* /var/log/kudu/kudu-master.${HOSTNAME}.xxx.com.invalid-user.log.INFO.* /var/log/kudu/kudu-master.${HOSTNAME}.xxx.com.invalid-user.log.WARNING.* /var/log/kudu/kudu-tserver.${HOSTNAME}.xxx.com.invalid-user.log.WARNING.* /var/log/hadoop-hdfs/hdfs-audit.log.* /var/log/hadoop-hdfs/hadoop-cmf-hdfs-NAMENODE-${HOSTNAME}.xxx.com.log.out.* /var/log/hadoop-hdfs/hadoop-cmf-hdfs-DATANODE-${HOSTNAME}.xxx.com.log.out.* /var/log/zookeeper/zookeeper-cmf-zookeeper-SERVER-${HOSTNAME}.xxx.com.log.* /var/log/hive/hadoop-cmf-hive-HIVEMETASTORE-${HOSTNAME}.xxx.com.log.out.* /var/log/hive/hadoop-cmf-hive-HIVESERVER2-${HOSTNAME}.xxx.com.log.out.* ) export PERCENT=`df -h | grep /dev/mapper/centos-root | awk '{print $5}' | awk -F '%' '{print $1}'` if [ $PERCENT -gt 78 ]; then for var in ${DIRS[@]} do for file in `ls $var` do cat /dev/null > $file done done fi echo `date "+%Y-%m-%d %T"` >> /root/bin/clear.log
清理不规则日志:
#!/bin/bash export LOG_PATH="/trust/logs" export FILES=( #gc.vgc #api_client.log access_log.* info.*.log.gz error.*.log.gz api_client.*.gz *.tmp ) export PERCENT=`df -h | grep /dev/vda1 | awk '{print $5}' | awk -F '%' '{print $1}'` if [ $PERCENT -gt 69 ]; then for dir in `ls $LOG_PATH` do for subdir in "${LOG_PATH}/${dir}" do for files in ${FILES[@]} do #echo "$subdir/$files" find $subdir -name $files -type f -mtime +50 -exec rm {} \; done done done fi
清理xxxx-xx日期目录的数据:
#!/bin/bash export DATADIR=/opt/nfs/autotest/autotest-log-pvc-097b9b66-37ed-4c4e-b288-759d8b956a62/test-data export MANY_DAYS_AGO=$(date "+%Y-%m-%d" -d '120 day ago') cd $DATADIR for dir in `ls $DATADIR` do for subdir in `ls $DATADIR/$dir` do if [ -d $DATADIR/$dir/$subdir ] ;then # 找到时间大于 MANY_DAYS_AGO 的目录, ${subdir}格式为2022-1 difference=$(($(date +%s -d ${MANY_DAYS_AGO}) - $(date +%s -d "${subdir}-01"))) # echo $difference if [ $difference -gt 0 ]; then cd $DATADIR/$dir tar -zcf $subdir.tar.gz $subdir rm -rf $subdir cd $DATADIR fi else echo $subdir fi done done
清理docker日志脚本:
#!/bin/bash set -x export LOGPATH=/var/lib/docker/containers for var in `ls $LOGPATH` do size=`du -sm $LOGPATH/$var | awk '{print $1}'` if [ $size -gt 1024 ]; then du -sh $LOGPATH/$var/$var-json.log cat /dev/null > $LOGPATH/$var/$var-json.log fi done
清理日期格式的目录:
#!/bin/bash export FILEDIR=/root/confluence/files export MANY_DAYS_AGO=$(date "+%Y-%m-%d" -d '15 day ago') for var in `ls $FILEDIR` do dirCreateTime=${var:0-10:10} if [ -z $dirCreateTime ];then continue fi if ! grep '[[:alpha:]]' <<< $dirCreateTime > /dev/null; then # 过滤到字母的都不是日期,进行取反,就是不执行下面操作 #echo $(date +%s -d $dirCreateTime) difference=$(($(date +%s -d ${MANY_DAYS_AGO}) - $(date +%s -d $dirCreateTime))) # 计算时间差 if [ $difference -gt 0 ]; then # 日期是否大于0 if [ -n $var ]; then rm -rf $FILEDIR/$var fi fi fi done