github项目下载地址:https://github.com/echowei/DeepTraffic
百度网盘下载地址:链接:https://pan.baidu.com/s/1Nf-GNXAGWpKtGxW93bCOdw
提取码:mub1
前两步使用powershell运行,powershell是基于windows的类似Linux的shell命令,在徽标键搜索框中直接搜索powershell。
有个别小伙伴在使用powershell的时候无法成功,这里做简要补充,看看是否有用
1,powershell最好使用管理员运行
2,笔者运行环境为win10,运行时报了一个错误
无法加载文件 ******.ps1,因为在此系统中禁止执行脚本。有关详细信息,请参阅 "get-help about_signing"
解决方案:在管理员权限下的powershell,输入命令set-ExecutionPolicy RemoteSigned
(1)1_Pcap2Session.ps1
foreach($f in gci data/*.pcap)
{
0_Tool\SplitCap_2-1\SplitCap -p 50000 -b 50000 -r $f.FullName -o 2_Session\AllLayers\$($f.BaseName)-ALL
#0_Tool\SplitCap_2-1\SplitCap -p 50000 -b 50000 -r $f.FullName -s flow -o 2_Session\AllLayers\$($f.BaseName)-ALL
gci 2_Session\AllLayers\$($f.BaseName)-ALL | ?{$_.Length -eq 0} | del
#_Tool\SplitCap_2-1\SplitCap -p 50000 -b 50000 -r $f.FullName -o 2_Session\L7\$($f.BaseName)-L7 -y L7
0_Tool\SplitCap_2-1\SplitCap -p 50000 -b 50000 -r $f.FullName -s flow -o 2_Session\L7\$($f.BaseName)-L7 -y L7
gci 2_Session\L7\$($f.BaseName)-L7 | ?{$_.Length -eq 0} | del
}
0_Tool\finddupe -del 2_Session\AllLayers
0_Tool\finddupe -del 2_Session\L7
(2)2_ProcessSession.ps1
$SESSIONS_COUNT_LIMIT_MIN = 0
$SESSIONS_COUNT_LIMIT_MAX = 60000
$TRIMED_FILE_LEN = 784
$SOURCE_SESSION_DIR = "2_Session\AllLayers"
echo "If Sessions more than $SESSIONS_COUNT_LIMIT_MAX we only select the largest $SESSIONS_COUNT_LIMIT_MAX."
echo "Finally Selected Sessions:"
$dirs = gci $SOURCE_SESSION_DIR -Directory
foreach($d in $dirs)
{
$files = gci $d.FullName
$count = $files.count
if($count -gt $SESSIONS_COUNT_LIMIT_MIN)
{
echo "$($d.Name) $count"
if($count -gt $SESSIONS_COUNT_LIMIT_MAX)
{
$files = $files | sort Length -Descending | select -First $SESSIONS_COUNT_LIMIT_MAX
$count = $SESSIONS_COUNT_LIMIT_MAX
}
$files = $files | resolve-path
$test = $files | get-random -count ([int]($count/10))
$train = $files | ?{$_ -notin $test}
$path_test = "3_ProcessedSession\FilteredSession\Test\$($d.Name)"
$path_train = "3_ProcessedSession\FilteredSession\Train\$($d.Name)"
ni -Path $path_test -ItemType Directory -Force
ni -Path $path_train -ItemType Directory -Force
cp $test -destination $path_test
cp $train -destination $path_train
}
}
echo "All files will be trimed to $TRIMED_FILE_LEN length and if it's even shorter we'll fill the end with 0x00..."
$paths = @(('3_ProcessedSession\FilteredSession\Train', '3_ProcessedSession\TrimedSession\Train'), ('3_ProcessedSession\FilteredSession\Test', '3_ProcessedSession\TrimedSession\Test'))
foreach($p in $paths)
{
foreach ($d in gci $p[0] -Directory)
{
ni -Path "$($p[1])\$($d.Name)" -ItemType Directory -Force
foreach($f in gci $d.fullname)
{
$content = [System.IO.File]::ReadAllBytes($f.FullName)
$len = $f.length - $TRIMED_FILE_LEN
if($len -gt 0)
{
$content = $content[0..($TRIMED_FILE_LEN-1)]
}
elseif($len -lt 0)
{
$padding = [Byte[]] (,0x00 * ([math]::abs($len)))
$content = $content += $padding
}
Set-Content -value $content -encoding byte -path "$($p[1])\$($d.Name)\$($f.Name)"
}
}
}
后两步运行的是Python文件 。
(3)3_Session2png.py
import numpy
from PIL import Image
import binascii
import errno
import os
PNG_SIZE = 28
def getMatrixfrom_pcap(filename,width):
with open(filename, 'rb') as f:
content = f.read()
hexst = binascii.hexlify(content)
fh = numpy.array([int(hexst[i:i+2],16) for i in range(0, len(hexst), 2)])
rn = len(fh)//width
fh = numpy.reshape(fh[:rn*width],(-1,width))
fh = numpy.uint8(fh)
return fh
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
paths = [['3_ProcessedSession\TrimedSession\Train', '4_Png\Train'],['3_ProcessedSession\TrimedSession\Test', '4_Png\Test']]
for p in paths:
for i, d in enumerate(os.listdir(p[0])):
dir_full = os.path.join(p[1], str(i))
mkdir_p(dir_full)
for f in os.listdir(os.path.join(p[0], d)):
bin_full = os.path.join(p[0], d, f)
im = Image.fromarray(getMatrixfrom_pcap(bin_full,PNG_SIZE))
png_full = os.path.join(dir_full, os.path.splitext(f)[0]+'.png')
im.save(png_full)
(4)4_Png2Mnist.py
import os
import errno
from PIL import Image
from array import *
from random import shuffle
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
# Load from and save to
mkdir_p('5_Mnist')
Names = [['4_Png\Test','5_Mnist\\test']]
for name in Names:
data_image = array('B')
data_label = array('B')
FileList = []
for dirname in os.listdir(name[0]):
path = os.path.join(name[0],dirname)
for filename in os.listdir(path):
if filename.endswith(".png"):
FileList.append(os.path.join(name[0],dirname,filename))
shuffle(FileList) # Usefull for further segmenting the validation set
for filename in FileList:
print (filename)
print(filename.split('\\')[2])
label = int(filename.split('\\')[2])
print("标签:")
print(label)
Im = Image.open(filename)
pixel = Im.load()
width, height = Im.size
for x in range(0,width):
for y in range(0,height):
data_image.append(pixel[y,x])
data_label.append(label) # labels start (one unsigned byte each)
hexval = "{0:#0{1}x}".format(len(FileList),6) # number of files in HEX
hexval = '0x' + hexval[2:].zfill(8)
# header for label array
header = array('B')
header.extend([0,0,8,1])
header.append(int('0x'+hexval[2:][0:2],16))
header.append(int('0x'+hexval[2:][2:4],16))
header.append(int('0x'+hexval[2:][4:6],16))
header.append(int('0x'+hexval[2:][6:8],16))
data_label = header + data_label
# additional header for images array
if max([width,height]) <= 256:
header.extend([0,0,0,width,0,0,0,height])
else:
raise ValueError('Image exceeds maximum size: 256x256 pixels');
header[3] = 3 # Changing MSB for image data (0x00000803)
data_image = header + data_image
output_file = open(name[1]+'-images-idx3-ubyte', 'wb')
data_image.tofile(output_file)
output_file.close()
output_file = open(name[1]+'-labels-idx1-ubyte', 'wb')
data_label.tofile(output_file)
output_file.close()
# gzip resulting files
for name in Names:
os.system('gzip '+name[1]+'-images-idx3-ubyte')
os.system('gzip '+name[1]+'-labels-idx1-ubyte')