6百多万的数据 不能彻底删除所有的1. batch_sadd.sh(生成1亿个redis命令)
#!/bin/bash
#
file=E:/test.txt
for i in {1..100}; do
for j in {1..1000000}; do
echo "set ypb$i$j" $j >> $file
done
done
2. 20.sh 将上一步骤生成的redis命令解析的aof文件。 (bash 20.sh > data.txt)
#!/bin/bash
#
# 将redis命令文件生成aof文件
# aof文件格式: 每个命令独立编码为一段数据,数据遵循统一协议。
# 每行以"\r\n"结束每一行,
# 每一段数据,第一行*开头,后面是命令参数个数
# 接着后面每两行表示一个参数,第一行以$开启,后面跟参数的长度,第二行为具体的参数
file=$1
if [ ! -f $file ]; then
echo "$file not exist"
fi
while read CMD; do
# each command begins with *{number arguments in command}\r\n
# XS是数组,${#XS[@]}是获取数组的元素个数, ${XS[@]}获取数组中的所有元素
XS=($CMD); printf "*${#XS[@]}\r\n"
# for each argument, we append ${length}\r\n{argument}\r\n
# ${#X}是获取字符串的长度
for X in $CMD; do printf "\${#X}\r\n$X\r\n"; done
done < $file
3. batch_exec.sh (执行data.txt文件,测试耗时)
#!/bin/bash
#
# 使用redis的pipe模式。批量插入生成的aof文件
file=$1
if [ ! -f $file ]; then
echo "file path not exist"
exit 1
fi
# 开始时间
timer_start=`date "+%Y-%m-%d %H:%M:%S"`
timestamp_start=`date +%s -d "${timer_start}"`
cat $file | redis-cli --pipe
# 结束时间
timer_end=`date "+%Y-%m-%d %H:%M:%S"`
timestamp_end=`date +%s -d "${timer_end}"`
# 时间差
timestamp_diff=`expr $timestamp_end - $timestamp_start`
echo "timer_start: " $timer_start
echo "timer_end: " $timer_end
echo "timestamp_diff: ""$timestamp_diff"s
4. batch_delete.sh (批量删除数据,测试耗时)
#!/bin/bash
#
# 批量删除指定key前缀的数据
# 开始时间
timer_start=`date "+%Y-%m-%d %H:%M:%S"`
timestamp_start=`date +%s -d "${timer_start}"`
redis-cli --scan --pattern "ypb*" | xargs -L 2000 redis-cli -n 0 -p 6379 del >> /dev/null
# 结束时间
timer_end=`date "+%Y-%m-%d %H:%M:%S"`
timestamp_end=`date +%s -d "${timer_end}"`
timestamp_diff=`expr $timestamp_end - $timestamp_start`
echo "timer_start: " $timer_start
echo "timer_end: " $timer_end
echo "timestamp_diff: ""$timestamp_diff"s