BUUCTF蜘蛛侠呀

解压后发现是流量包,好多icmp包

发现icmp包尾部有$$STRAT打头16进制的字符串,好多重复得。我们只需要提取尾部这些字符串是当icmp的type=0时上图标识为褐色的字符串,还需要把16进制的字符串转为对应的字符串(bytes 类型)并去重。

使用python脚本

import pyshark
import binascii


def process_pcap():
    # 使用pyshark的FileCapture打开名为out.pcap的文件,
    # 并设置显示过滤器,只捕获icmp.type等于0的ICMP数据包
    packets = pyshark.FileCapture('out.pcap', display_filter="icmp.type==0")
    res = []
    # 以写入模式打开名为out.txt的文件,指定编码为'utf - 8'
    with open('out.txt', 'w', encoding='utf - 8') as f:
        # 遍历捕获到的每个数据包
        for each in packets:
            try:
                # 将数据包中的十六进制数据(each.icmp.data)先转换为字节串,
                # 再使用'utf - 8'编码将字节串解码为字符串
                data = binascii.unhexlify(each.icmp.data).decode('utf - 8')
                # 如果解码后的字符串不在结果列表res中
                if data not in res:
                    # 将该字符串写入到out.txt文件中
                    f.write(data)
                    # 将该字符串添加到结果列表res中,实现去重功能
                    res.append(data)
            # 如果在binascii.unhexlify或decode操作中出现错误,捕获binascii.Error异常并跳过
            except binascii.Error:
                pass
    # 关闭数据包捕获对象
    packets.close()
    print('done')


if __name__ == '__main__':
    process_pcap()

把out.txt首行和尾的开始和结束标志去除,去掉每行的头部的,

复制内容到cyberchef

或者使用下面两个python脚本直接输出processed_out.txt。内容复制到cyberchef

有两个好处一是直接生成最终结果,二是由于数据较大处理时间约两分钟,初始化有提示带进度条用户体验好。

import os
import pyshark
import binascii
from tqdm import tqdm
import time


def process_pcap_sync():
    res = []
    print("开始获取数据包总数...")
    start_time = time.perf_counter()
    last_print_time = start_time
    current_time = time.perf_counter()
    elapsed_time = current_time - start_time
    print(f"\r获取数据包总数已耗时: {elapsed_time:.3f}秒", end='')
    try:
        packet_capture = pyshark.FileCapture('out.pcap', display_filter="icmp.type==0")
        total_packets = len(list(packet_capture))
        for packet in packet_capture:
            current_time = time.perf_counter()
            if current_time - last_print_time >= 0.1:
                elapsed_time = current_time - start_time
                print(f"\r获取数据包总数已耗时: {elapsed_time:.3f}秒", end='')
                last_print_time = current_time
        packet_capture.close()
        end_time = time.perf_counter()
        print(f"\r获取数据包总数耗时: {end_time - start_time:.3f}秒")
        print("开始捕获和处理数据包...")
        # 重新创建packet_capture对象,因为之前已经关闭了
        packet_capture = pyshark.FileCapture('out.pcap', display_filter="icmp.type==0")
        progress_bar = tqdm(total = total_packets)
        for packet in packet_capture:
            try:
                data = binascii.unhexlify(packet.icmp.data).decode('utf - 8')
                if data not in res:
                    res.append(data)
            except binascii.Error as e:
                print(f"处理数据包时出现binascii.Error异常: {e}")
            progress_bar.update(1)
        progress_bar.close()
        packet_capture.close()
    except Exception as e:
        print(f"在处理pcap文件时发生错误: {e}")

    if not res:
        print("没有获取到有效的数据,可能是过滤条件问题或者pcap文件内容问题")
        return

    new_res = res[1: - 1]
    new_content = []
    for line in new_res:
        if line.startswith('$$START$$'):
            line = line.replace('$$START$$', '', 1)
        line = line.rstrip('\n')
        new_content.append(line)

    output_file = 'processed_out.txt'
    with open(output_file, 'w', encoding='utf - 8') as f_out:
        for line in new_content:
            f_out.write(line + '\n')
    print('done')


if __name__ == '__main__':
    try:
        process_pcap_sync()
    except Exception as e:
        print(f"在运行主程序时发生错误: {e}")
import pyshark
import time
import sqlite3
from tqdm import tqdm

def get_ip_info(packet):
    try:
        source_ip = packet.ip.src
        destination_ip = packet.ip.dst
    except AttributeError:
        source_ip = 'N/A'
        destination_ip = 'N/A'
    return source_ip, destination_ip


def process_pcap_sync():
    res = []
    unique_data = set()
    print("开始获取数据包总数...")
    start_time = time.perf_counter()
    try:
        conn = sqlite3.connect('packets.db')
        cursor = conn.cursor()
        cursor.execute('''CREATE TABLE IF NOT EXISTS packets
                          (id INTEGER PRIMARY KEY AUTOINCREMENT,
                           timestamp REAL,
                           source_ip TEXT,
                           destination_ip TEXT,
                           protocol TEXT,
                           data TEXT)''')
        packet_count = 0
        capture = pyshark.FileCapture('out.pcap')
        data_to_insert = []
        for packet in capture:
            if hasattr(packet, 'icmp') and int(packet.icmp.type) == 0:
                packet_count += 1
                timestamp = float(packet.sniff_time.timestamp())
                source_ip, destination_ip = get_ip_info(packet)
                protocol = packet.transport_layer
                data = 'N/A'
                if hasattr(packet, 'icmp'):
                    data_hex = packet.icmp.data.binary_value
                    if data_hex.startswith(b'$$START$$'):
                        try:
                            data = data_hex.decode('utf - 8')
                            if data not in unique_data:
                                unique_data.add(data)
                                res.append(data)
                                data_to_insert.append((timestamp, source_ip, destination_ip, protocol, data))
                        except UnicodeDecodeError:
                            pass
        total_packets = packet_count
        cursor.executemany('INSERT INTO packets (timestamp, source_ip, destination_ip, protocol, data) VALUES (?,?,?,?,?)',
                           data_to_insert)
        conn.commit()
        end_time = time.perf_counter()
        print(f"\r获取数据包总数耗时: {end_time - start_time:.3f}秒")
        print("开始捕获和处理数据包...")
        progress_bar = tqdm(total = total_packets)
        for packet in capture:
            if hasattr(packet, 'icmp') and int(packet.icmp.type) == 0:
                if hasattr(packet, 'icmp'):
                    data_hex = packet.icmp.data.binary_value
                    if data_hex.startswith(b'$$START$$'):
                        try:
                            data = data_hex.decode('utf - 8')
                            if data not in unique_data:
                                unique_data.add(data)
                                res.append(data)
                        except UnicodeDecodeError:
                            pass
                progress_bar.update(1)
        progress_bar.close()
        conn.close()
    except Exception as e:
        print(f"在处理pcap文件时发生错误: {e}")

    if not res:
        print("没有获取到有效的数据,可能是过滤条件问题或者pcap文件内容问题")
        return

    new_res = res[1: - 1]
    new_content = []
    for line in new_res:
        if line.startswith('$$START$$'):
            line = line.replace('$$START$$', '', 1)
        line = line.rstrip('\n')
        new_content.append(line)

    output_file = 'processed_out.txt'
    with open(output_file, 'w', encoding='utf - 8') as f_out:
        for line in new_content:
            f_out.write(line + '\n')
    print('done')


if __name__ == '__main__':
    try:
        process_pcap_sync()
    except Exception as e:
        print(f"在运行主程序时发生错误: {e}")

  pyshark 在解析数据包时是深度解析,占用资源大,耗时较多,上面的python脚本执行效率太低,都需要4分钟以上。

     果断弃用pyshark库处理流量包数据,改用更强大灵活的Scapy模块,Scapy 是一个强大的交互式数据包处理程序。它可以用于发送、嗅探、剖析和伪造网络数据包。与 pyshark 相比,Scapy 在数据包的构建、修改和发送方面具有更大的灵活,果然效率提高,速度飙升,代码如下:

from scapy.all import *
import binascii
from tqdm import tqdm
import time
import sqlite3


def get_ip_info(packet):
    if packet.haslayer(IP):
        return packet[IP].src, packet[IP].dst
    return 'N/A', 'N/A'


def process_pcap_sync():
    res = []
    unique_data = set()
    print("开始获取数据包总数...")
    start_time = time.perf_counter()
    try:
        conn = sqlite3.connect('packets.db')
        cursor = conn.cursor()
        cursor.execute('''CREATE TABLE IF NOT EXISTS packets
                          (id INTEGER PRIMARY KEY AUTOINCREMENT,
                           timestamp REAL,
                           source_ip TEXT,
                           destination_ip TEXT,
                           protocol TEXT,
                           data TEXT)''')
        packet_count = 0
        packets = rdpcap('out.pcap')
        data_to_insert = []
        for packet in packets:
            if packet.haslayer(ICMP) and packet[ICMP].type == 0:
                packet_count += 1
                timestamp = float(packet.time)
                source_ip, destination_ip = get_ip_info(packet)
                protocol = packet.name
                data = 'N/A'
                if packet.haslayer(ICMP):
                    data_hex = packet[ICMP].load
                    if data_hex.startswith(b'$$START$$'):
                        try:
                            data = data_hex.decode('utf - 8')
                            if data not in unique_data:
                                unique_data.add(data)
                                res.append(data)
                                data_to_insert.append((timestamp, source_ip, destination_ip, protocol, data))
                        except UnicodeDecodeError:
                            pass
        total_packets = packet_count
        cursor.executemany('INSERT INTO packets (timestamp, source_ip, destination_ip, protocol, data) VALUES (?,?,?,?,?)',
                           data_to_insert)
        conn.commit()
        end_time = time.perf_counter()
        print(f"\r获取数据包总数耗时: {end_time - start_time:.3f}秒")
        print("开始捕获和处理数据包...")
        progress_bar = tqdm(total = total_packets)
        for packet in packets:
            if packet.haslayer(ICMP) and packet[ICMP].type == 0:
                if packet.haslayer(ICMP):
                    data_hex = packet[ICMP].load
                    if data_hex.startswith(b'$$START$$'):
                        try:
                            data = data_hex.decode('utf - 8')
                            if data not in unique_data:
                                unique_data.add(data)
                                res.append(data)
                        except UnicodeDecodeError:
                            pass
                progress_bar.update(1)
        progress_bar.close()
        conn.close()
    except Exception as e:
        print(f"在处理pcap文件时发生错误: {e}")

    if not res:
        print("没有获取到有效的数据,可能是过滤条件问题或者pcap文件内容问题")
        return

    new_res = res[1: - 1]
    new_content = []
    for line in new_res:
        if line.startswith('$$START$$'):
            line = line.replace('$$START$$', '', 1)
        line = line.rstrip('\n')
        new_content.append(line)

    output_file = 'processed_out.txt'
    with open(output_file, 'w', encoding='utf - 8') as f_out:
        for line in new_content:
            f_out.write(line + '\n')
    print('done')


if __name__ == '__main__':
    try:
        process_pcap_sync()
    except Exception as e:
        print(f"在运行主程序时发生错误: {e}")

获取包只有几十秒

找到一个更牛的库dpkt,是一个轻量级的网络数据包解析库。在处理网络数据包时具有较高的效率。由于其轻量级和底层操作的特点,它可以快速地解析数据包,减少处理时间和资源消耗。在处理大量网络数据包(如在高速网络环境下的数据包捕获和分析)时,这种高效性能够保证系统的性能。

几秒完活,脚本如下:

import dpkt
import socket
import time
from tqdm import tqdm
import sqlite3


def get_ip_info(eth):
    if isinstance(eth.data, dpkt.ip.IP):
        ip = eth.data
        return socket.inet_ntoa(ip.src), socket.inet_ntoa(ip.dst)
    return 'N/A', 'N/A'


def process_pcap_sync():
    res = []
    unique_data = set()
    print("开始获取数据包总数...")
    start_time = time.perf_counter()
    try:
        conn = sqlite3.connect('packets.db')
        cursor = conn.cursor()
        cursor.execute('''CREATE TABLE IF NOT EXISTS packets
                          (id INTEGER PRIMARY KEY AUTOINCREMENT,
                           timestamp REAL,
                           source_ip TEXT,
                           destination_ip TEXT,
                           protocol TEXT,
                           data TEXT)''')
        packet_count = 0
        packet_info = []
        with open('out.pcap', 'rb') as f:
            pcap = dpkt.pcap.Reader(f)
            data_to_insert = []
            for ts, buf in pcap:
                eth = dpkt.ethernet.Ethernet(buf)
                if isinstance(eth.data, dpkt.ip.IP) and isinstance(eth.data.data, dpkt.icmp.ICMP) and eth.data.data.type == 0:
                    packet_count += 1
                    timestamp = ts
                    source_ip, destination_ip = get_ip_info(eth)
                    protocol = 'ICMP'
                    packet_info.append((timestamp, source_ip, destination_ip, protocol))
                    data = 'N/A'
                    if eth.data.data.data:
                        icmp_data = eth.data.data.data
                        if isinstance(icmp_data, dpkt.icmp.ICMP.Echo):
                            try:
                                data_hex = bytes(icmp_data)
                            except TypeError as e:
                                print(f"TypeError when converting ICMP.Echo to bytes: {e}")
                                continue
                        else:
                            data_hex = icmp_data
                        #print(f"Before conversion, data_hex: {data_hex}, type: {type(data_hex)}")
                        #if isinstance(data_hex, str):
                            #data_hex = bytes.fromhex(data_hex)
                                                        
                        #print(f"After conversion, data_hex: {data_hex}, type: {type(data_hex)}")
                        
                        if isinstance(data_hex, bytes):
                            if data_hex.startswith(b'$$START$$'):
                                try:
                                    data = data_hex.decode('utf - 8')
                                    print(f"Decoded data: {data}")
                                    if data not in unique_data:
                                        unique_data.add(data)
                                        res.append(data)
                                        data_to_insert.append((timestamp, source_ip, destination_ip, protocol, data))
                                except UnicodeDecodeError as e:
                                    print(f"UnicodeDecodeError: {e} while decoding {data_hex}")
        total_packets = packet_count
        cursor.executemany('INSERT INTO packets (timestamp, source_ip, destination_ip, protocol, data) VALUES (?,?,?,?,?)',
                           data_to_insert)
        conn.commit()
        end_time = time.perf_counter()
        print(f"\r获取数据包总数耗时: {end_time - start_time:.3f}秒")
        print("开始捕获和处理数据包...")
        progress_bar = tqdm(total = total_packets)
        with open('out.pcap', 'rb') as f:
            pcap = dpkt.pcap.Reader(f)
            index = 0
            for ts, buf in pcap:
                eth = dpkt.ethernet.Ethernet(buf)
                if isinstance(eth.data, dpkt.ip.IP) and isinstance(eth.data.data, dpkt.icmp.ICMP) and eth.data.data.type == 0:
                    timestamp, source_ip, destination_ip, protocol = packet_info[index]
                    index += 1
                    if eth.data.data.data:
                        icmp_data = eth.data.data.data
                        if isinstance(icmp_data, dpkt.icmp.ICMP.Echo):
                            try:
                                data_hex = bytes(icmp_data)
                            except TypeError as e:
                                print(f"TypeError when converting ICMP.Echo to bytes: {e}")
                                continue
                        else:
                            data_hex = icmp_data
                        #if isinstance(data_hex, str):
                            #data_hex = bytes.fromhex(data_hex)
                        #print(f"Data hex (raw): {data_hex.hex()}")
                        if isinstance(data_hex, bytes):
                            start_marker = b'$$START$$'
                            start_index = data_hex.find(start_marker)
                            if start_index!= - 1:
                                data_hex = data_hex[start_index:]                            
                                try:
                                    data = data_hex.decode('utf - 8')
                                    #print(f"Decoded data: {data}")
                                    if data not in unique_data:
                                        unique_data.add(data)
                                        res.append(data)
                                except UnicodeDecodeError as e:
                                    print(f"UnicodeDecodeError: {e} while decoding {data_hex}")
                    progress_bar.update(1)
        progress_bar.close()
        conn.close()
    except Exception as e:
        print(f"在处理pcap文件时发生错误: {e}")

    if not res:
        print("没有获取到有效的数据,可能是过滤条件问题或者pcap文件内容问题")
        return

    new_res = res[1: - 1]
    new_content = []
    for line in new_res:
        if line.startswith('$$START$$'):
            line = line.replace('$$START$$', '', 1)
        line = line.rstrip('\n')
        new_content.append(line)

    output_file = 'processed_out.txt'
    with open(output_file, 'w', encoding='utf - 8') as f_out:
        for line in new_content:
            f_out.write(line + '\n')
    print('done')


if __name__ == '__main__':
    try:
        process_pcap_sync()
    except Exception as e:
        print(f"在运行主程序时发生错误: {e}")

在调试脚本时遇到难点,就是怎么去掉原始数据data_hex的前置字节

在 “$$START$$ ”之前有字节 \x0cn\x01\x00。这表明在这个字节串中,“$$START$$ ”不是字节串的绝对开头部分,字节串开头还有其他数据。

                   

start_marker = b'$$START$$'

start_index = data_hex.find(start_marker)

if start_index!= - 1:

    data_hex = data_hex[start_index:]    

未知前置字节长度但有标识的情况

如果不知道前置字节的长度,但知道前置字节之后的某个标识(例如 $$START$$)是数据部分的真正开始,可以先找到这个标识的位置,然后通过切片去除前置字节。

  这里首先使用 find 方法找到标识 b'$$START$$' 在字节串中的位置,如果找到(find 方法返回值不为 - 1),则通过切片操作 data_hex[start_index:] 创建一个新的字节串 new_data_hex,这个新字节串是从原字节串中标识位置开始到结尾的部分,相当于去除了前置字节。

  • 以上脚本我都保留了所有输出的打印语句,大家喜欢玩可以调试一下。代码太长优化了一下
import dpkt
import socket
import time
from tqdm import tqdm
import sqlite3


def get_ip_info(eth):
    if isinstance(eth.data, dpkt.ip.IP):
        ip = eth.data
        return socket.inet_ntoa(ip.src), socket.inet_ntoa(ip.dst)
    return 'N/A', 'N/A'


def process_icmp_data(icmp_data):
    res = []
    if isinstance(icmp_data, dpkt.icmp.ICMP.Echo):
        try:
            data_hex = bytes(icmp_data)
        except TypeError as e:
            print(f"TypeError when converting ICMP.Echo to bytes: {e}")
            return res
    else:
        data_hex = icmp_data
    if isinstance(data_hex, bytes):
        start_marker = b'$$START$$'
        start_index = data_hex.find(start_marker)
        if start_index!= - 1:
            data_hex = data_hex[start_index:]
            try:
                data = data_hex.decode('utf - 8')
                res.append(data)
            except UnicodeDecodeError as e:
                print(f"UnicodeDecodeError: {e} while decoding {data_hex}")
    return res


def process_pcap_sync():
    print("开始获取数据包总数...")
    start_time = time.perf_counter()
    try:
        conn = sqlite3.connect('packets.db')
        cursor = conn.cursor()
        cursor.execute('''CREATE TABLE IF NOT EXISTS packets
                          (id INTEGER PRIMARY KEY AUTOINCREMENT,
                           timestamp REAL,
                           source_ip TEXT,
                           destination_ip TEXT,
                           protocol TEXT,
                           data TEXT)''')
        packet_count = 0
        packet_info = []
        data_to_insert = []
        unique_data = set()
        with open('out.pcap', 'rb') as f:
            pcap = dpkt.pcap.Reader(f)
            for ts, buf in pcap:
                eth = dpkt.ethernet.Ethernet(buf)
                if isinstance(eth.data, dpkt.ip.IP) and isinstance(eth.data.data, dpkt.icmp.ICMP) and eth.data.data.type == 0:
                    packet_count += 1
                    timestamp = ts
                    source_ip, destination_ip = get_ip_info(eth)
                    protocol = 'ICMP'
                    packet_info.append((timestamp, source_ip, destination_ip, protocol))
                    data = 'N/A'
                    if eth.data.data.data:
                        icmp_res = process_icmp_data(eth.data.data.data)
                        for data in icmp_res:
                            if data not in unique_data:
                                unique_data.add(data)
                                data_to_insert.append((timestamp, source_ip, destination_ip, protocol, data))
        total_packets = packet_count
        cursor.executemany('INSERT INTO packets (timestamp, source_ip, destination_ip, protocol, data) VALUES (?,?,?,?,?)',
                           data_to_insert)
        conn.commit()
        end_time = time.perf_counter()
        print(f"\r获取数据包总数耗时: {end_time - start_time:.3f}秒")
        print("开始捕获和处理数据包...")
        progress_bar = tqdm(total = total_packets)
        with open('out.pcap', 'rb') as f:
            pcap = dpkt.pcap.Reader(f)
            index = 0
            for ts, buf in pcap:
                eth = dpkt.ethernet.Ethernet(buf)
                if isinstance(eth.data, dpkt.ip.IP) and isinstance(eth.data.data, dpkt.icmp.ICMP) and eth.data.data.type == 0:
                    timestamp, source_ip, destination_ip, protocol = packet_info[index]
                    index += 1
                    if eth.data.data.data:
                        icmp_res = process_icmp_data(eth.data.data.data)
                        for data in icmp_res:
                            if data not in unique_data:
                                unique_data.add(data)
                    progress_bar.update(1)
        progress_bar.close()
        conn.close()
        new_res = list(unique_data)
        new_content = []
        for line in new_res:
            if line.startswith('$$START$$'):
                line = line.replace('$$START$$', '', 1)
            line = line.rstrip('\n')
            new_content.append(line)
        output_file = 'processed_out.txt'
        with open(output_file, 'w', encoding='utf - 8') as f_out:
            for line in new_content:
                f_out.write(line + '\n')
        print('done')
    except Exception as e:
        print(f"在处理pcap文件时发生错误: {e}")


if __name__ == '__main__':
    try:
        process_pcap_sync()
    except Exception as e:
        print(f"在运行主程序时发生错误: {e}")

cyberchef识别出是zip文件,点击保存图标,另存为zip文件,解压得flag.gif

把这个gif文件拷贝进kali,输入下面命令

identify -format "%T" flag.gif
 

把使用identify得到隐写信息

2050502050502050205020202050202020205050205020502050205050505050202050502020205020505050205020206666

我们去掉尾部6666,把20用0替换,50用1替换

205050205050205020502020205020202020505020502050205020505050505020205050202020502050505020502020

使用python和qt写个程序实现,源码如下:

import sys
from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QLabel, QLineEdit, QPushButton, QTextEdit


class TextReplaceTool(QWidget):
    def __init__(self):
        super().__init__()

        self.init_ui()

    def init_ui(self):
        # 查找输入框及标签
        self.find_label = QLabel('查找内容:')
        self.find_input = QLineEdit()

        # 替换输入框及标签
        self.replace_label = QLabel('替换内容:')
        self.replace_input = QLineEdit()

        # 查找按钮
        self.find_button = QPushButton('查找')
        self.find_button.clicked.connect(self.find_text)

        # 替换按钮
        self.replace_button = QPushButton('替换')
        self.replace_button.clicked.connect(self.replace_text)

        # 文本编辑区域
        self.text_edit = QTextEdit()

        # 布局设置
        hbox1 = QHBoxLayout()
        hbox1.addWidget(self.find_label)
        hbox1.addWidget(self.find_input)

        hbox2 = QHBoxLayout()
        hbox2.addWidget(self.replace_label)
        hbox2.addWidget(self.replace_input)

        hbox3 = QHBoxLayout()
        hbox3.addWidget(self.find_button)
        hbox3.addWidget(self.replace_button)

        vbox = QVBoxLayout()
        vbox.addLayout(hbox1)
        vbox.addLayout(hbox2)
        vbox.addLayout(hbox3)
        vbox.addWidget(self.text_edit)

        self.setLayout(vbox)

        self.setWindowTitle('文本查找替换工具')
        self.show()

    def find_text(self):
        find_str = self.find_input.text()
        text = self.text_edit.toPlainText()
        start_index = text.find(find_str)
        if start_index!= -1:
            self.text_edit.moveCursor(QTextEdit.MoveOperation.Start)
            cursor = self.text_edit.textCursor()
            cursor.setPosition(start_index)
            self.text_edit.setTextCursor(cursor)

    def replace_text(self):
        find_str = self.find_input.text()
        replace_str = self.replace_input.text()
        text = self.text_edit.toPlainText()
        new_text = text.replace(find_str, replace_str)
        self.text_edit.setPlainText(new_text)


if __name__ == '__main__':
    app = QApplication(sys.argv)
    ex = TextReplaceTool()
    sys.exit(app.exec_())

运行gui如图:两次替换可得结果

011011010100010000110101010111110011000101110100

去cyterchef

先binary(二进制)-bytes(字符串)再MD5编码

得 f0f1003afe4ae8ce4aa8e8487a8ab3b6

flag{f0f1003afe4ae8ce4aa8e8487a8ab3b6}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值