python之bt种子,dht网络共享热门资源

11 篇文章 0 订阅
2 篇文章 0 订阅

最近研究了dht网络,使用python写了个爬虫程序,另外用php做了个搜索网站,今天又把sphinx加上了,这样就是一个简单的bt种子搜索引擎了哈,网址:http://bt.dianfenxiang.com



H5小游戏网站:http://app.ih5games.com

另外也弄了一个微信公众号,不时发布一些热门影视剧:

微信公众号:tutuyouya


下面这个是加入共享网络,并获取种子hash码的Python:

#encoding: utf-8
import socket
from hashlib import sha1
from random import randint
from struct import unpack, pack
from socket import inet_aton, inet_ntoa
from bisect import bisect_left
from threading import Timer
from time import sleep
import MySQLdb

from bencode import bencode, bdecode

BOOTSTRAP_NODES = [
    ("router.bittorrent.com", 6881),
    ("dht.transmissionbt.com", 6881),
    ("router.utorrent.com", 6881)
] 
TID_LENGTH = 8
KRPC_TIMEOUT = 10
REBORN_TIME = 5 * 60
K = 8

def entropy(bytes):
    s = ""
    for i in range(bytes):
        s += chr(randint(0, 255))
    return s

def random_id():
    hash = sha1()
    hash.update( entropy(20) )
    return hash.digest()

def decode_nodes(nodes):
    n = []
    length = len(nodes)
    if (length % 26) != 0: 
        return n
    for i in range(0, length, 26):
        nid = nodes[i:i+20]
        ip = inet_ntoa(nodes[i+20:i+24])
        port = unpack("!H", nodes[i+24:i+26])[0]
        n.append( (nid, ip, port) )
    return n

def encode_nodes(nodes):
    strings = []
    for node in nodes:
        s = "%s%s%s" % (node.nid, inet_aton(node.ip), pack("!H", node.port))
        strings.append(s)

    return "".join(strings)

def intify(hstr):
    return long(hstr.encode('hex'), 16)    

def timer(t, f):
    Timer(t, f).start()


class BucketFull(Exception):
    pass


class KRPC(object):
    def __init__(self):
        self.types = {
            "r": self.response_received,
            "q": self.query_received
        }
        self.actions = {
            "ping": self.ping_received,
            "find_node": self.find_node_received,
            "get_peers": self.get_peers_received,
            "announce_peer": self.announce_peer_received,
        }

        self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        self.socket.bind(("0.0.0.0", self.port))

    def response_received(self, msg, address):
        self.find_node_handler(msg)

    def query_received(self, msg, address):
        try:
            self.actions[msg["q"]](msg, address)
        except KeyError:
            pass

    def send_krpc(self, msg, address):
        try:
            self.socket.sendto(bencode(msg), address)
        except:
            pass


class Client(KRPC):
    def __init__(self, table):
        self.table = table

        timer(KRPC_TIMEOUT, self.timeout)
        timer(REBORN_TIME, self.reborn)
        KRPC.__init__(self)

    def find_node(self, address, nid=None):
        nid = self.get_neighbor(nid) if nid else self.table.nid
        tid = entropy(TID_LENGTH)
        
        msg = {
            "t": tid,
            "y": "q",
            "q": "find_node",
            "a": {"id": nid, "target": random_id()}
        }
        self.send_krpc(msg, address)

    def find_node_handler(self, msg):
        try:
            nodes = decode_nodes(msg["r"]["nodes"])
            for node in nodes:
                (nid, ip, port) = node
                if len(nid) != 20: continue
                if nid == self.table.nid: continue
                self.find_node( (ip, port), nid )
        except KeyError:
            pass

    def joinDHT(self):
        for address in BOOTSTRAP_NODES: 
            self.find_node(address)

    def timeout(self):
        if len( self.table.buckets ) < 2:
            self.joinDHT()
        timer(KRPC_TIMEOUT, self.timeout)

    def reborn(self):
        self.table.nid = random_id()
        self.table.buckets = [ KBucket(0, 2**160) ]
        timer(REBORN_TIME, self.reborn)

    def start(self):
        self.joinDHT()

        while True:
            try:
                (data, address) = self.socket.recvfrom(65536)
                msg = bdecode(data)
                self.types[msg["y"]](msg, address)
            except Exception:
                pass

    def get_neighbor(self, target):
        return target[:10]+random_id()[10:]


class Server(Client):
    def __init__(self, master, table, port):
        self.table = table
        self.master = master
        self.port = port
        Client.__init__(self, table)

    def ping_received(self, msg, address):
        try:
            nid = msg["a"]["id"]
            msg = {
                "t": msg["t"],
                "y": "r",
                "r": {"id": self.get_neighbor(nid)}
            }
            self.send_krpc(msg, address)
            self.find_node(address, nid)
        except KeyError:
            pass

    def find_node_received(self, msg, address):
        try:
            target = msg["a"]["target"]
            neighbors = self.table.get_neighbors(target)
            
            nid = msg["a"]["id"]
            msg = {
                "t": msg["t"],
                "y": "r",
                "r": {
                    "id": self.get_neighbor(target), 
                    "nodes": encode_nodes(neighbors)
                }
            }
            self.table.append(KNode(nid, *address))
            self.send_krpc(msg, address)
            self.find_node(address, nid)
        except KeyError:
            pass

    def get_peers_received(self, msg, address):
        try:
            infohash = msg["a"]["info_hash"]

            neighbors = self.table.get_neighbors(infohash)

            nid = msg["a"]["id"]
            msg = {
                "t": msg["t"],
                "y": "r",
                "r": {
                    "id": self.get_neighbor(infohash), 
                    "nodes": encode_nodes(neighbors)
                }
            }
            self.table.append(KNode(nid, *address))
            self.send_krpc(msg, address)
            self.master.log(infohash)
            self.find_node(address, nid)
        except KeyError:
            pass

    def announce_peer_received(self, msg, address):
        try:
            infohash = msg["a"]["info_hash"]
            nid = msg["a"]["id"]

            msg = { 
                "t": msg["t"],
                "y": "r",
                "r": {"id": self.get_neighbor(infohash)}
            }

            self.table.append(KNode(nid, *address))
            self.send_krpc(msg, address)
            self.master.log(infohash)
            self.find_node(address, nid)
        except KeyError:
            pass

class KTable(object):
    def __init__(self, nid):
        self.nid = nid
        self.buckets = [ KBucket(0, 2**160) ]

    def append(self, node):
        index = self.bucket_index(node.nid)
        try:
            bucket = self.buckets[index]
            bucket.append(node)
        except IndexError:
            return
        except BucketFull:
            if not bucket.in_range(self.nid): return

            self.split_bucket(index)
            self.append(node)

    def get_neighbors(self, target):
        nodes = []
        if len(self.buckets) == 0: return nodes
        if len(target) != 20 : return nodes

        index = self.bucket_index(target)
        try:
            nodes = self.buckets[index].nodes
            min = index - 1
            max = index + 1

            while len(nodes) < K and ((min >= 0) or (max < len(self.buckets))):
                if min >= 0:
                    nodes.extend(self.buckets[min].nodes)

                if max < len(self.buckets):
                    nodes.extend(self.buckets[max].nodes)

                min -= 1
                max += 1

            num = intify(target)
            nodes.sort(lambda a, b, num=num: cmp(num^intify(a.nid), num^intify(b.nid)))
            return nodes[:K]
        except IndexError:
            return nodes

    def bucket_index(self, target):
        return bisect_left(self.buckets, intify(target))

    def split_bucket(self, index):
        old = self.buckets[index]
        point = old.max - (old.max - old.min)/2
        new = KBucket(point, old.max)
        old.max = point
        self.buckets.insert(index + 1, new)
        for node in old.nodes[:]:
            if new.in_range(node.nid):
                new.append(node)
                old.remove(node)

    def __iter__(self):
        for bucket in self.buckets:
            yield bucket


class KBucket(object):
    __slots__ = ("min", "max", "nodes")

    def __init__(self, min, max):
        self.min = min
        self.max = max
        self.nodes = []

    def append(self, node):
        if node in self:
            self.remove(node)
            self.nodes.append(node)
        else:
            if len(self) < K:
                self.nodes.append(node)
            else:
                raise BucketFull

    def remove(self, node):
        self.nodes.remove(node)

    def in_range(self, target):
        return self.min <= intify(target) < self.max

    def __len__(self):
        return len(self.nodes)

    def __contains__(self, node):
        return node in self.nodes

    def __iter__(self):
        for node in self.nodes:
            yield node

    def __lt__(self, target):
        return self.max <= target


class KNode(object):
    __slots__ = ("nid", "ip", "port")
    
    def __init__(self, nid, ip, port):
        self.nid = nid
        self.ip = ip
        self.port = port

    def __eq__(self, other):
        return self.nid == other.nid



#using example
class Master(object):
    def __init__(self, f):
        self.f = f

    def log(self, infohash):
        try:
            conn=MySQLdb.connect(host='127.0.0.1',user='',passwd='',port='',charset="UTF8")
            cur=conn.cursor()
            conn.select_db('dht')
            hash_hex=infohash.encode("hex")
            sql="insert into hash_info(hash,name) values('%s','%s')"%(hash_hex,"")
            try:
                cur.execute(sql)
                conn.commit()
            except MySQLdb.Error,e:
                print 'mysql error %d:%s'%(e.args[0],e.args[1])
            
        except MySQLdb.Error,e:
            print 'mysql error %d:%s'%(e.args[0],e.args[1])
        self.f.write(infohash.encode("hex")+"\n")
        self.f.flush()
try:
    f = open("infohash.log", "a")
    m = Master(f)
    s = Server(Master(f), KTable(random_id()), 80010)
    s.start()     
except KeyboardInterrupt:
    s.socket.close()
    f.close()

下面是根据hash下载种子并解析的Python程序:

# _*_ coding: utf-8 _*_
'''
    https://zoink.it
'''

import urllib,urllib2,os,MySQLdb,gzip,base64,time
from io import BytesIO
from btdht import Parser
import sys
reload(sys)   
sys.setdefaultencoding('utf8')

def save(filename, content):

    try:
        file = open(filename, 'wb')
        file.write(content)
        file.close()
    except IOError,e:
        print e

            

def getTorrents(info_hash):


    url="https://zoink.it/torrent/%s.torrent"%info_hash.upper()
    #url="http://torrage.com/torrent/%s.torrent"%info_hash.upper()
    #print url
    
    try:
        torrent=urllib2.urlopen(url,timeout=30)
        buffer = BytesIO(torrent.read())
        gz = gzip.GzipFile(fileobj=buffer)
        raw_data=gz.read()
        save("torrents/"+info_hash+".torrent",raw_data)
    except IOError,e:
        print e
        #print "downloading+"+info_hash+".torrent failed"
        return False
    #print "downloading+"+info_hash+".torrent success"
    return True

def getAllTorrents(table):

    #try:
       # os.mkdir("torrents")
    #except Error,e:
        #print e

    try:
        conn=MySQLdb.connect(host='127.0.0.1',user='',passwd='',port='',charset="UTF8")
        cur=conn.cursor()
        conn.select_db('dht')
        sql="select * from "+table + " where info = '' limit 30"
        count=cur.execute(sql)
        print "thers are %s row in table"% count
        result=cur.fetchall()
        i=0
        for r in result:
            time.sleep(1)
            #the torrent info is empty
            
            
            #download the torrent file success
            state=getTorrents(r[0])
            
            if state:
                #count the torrent file
                i=i+1

                try:
                    parser=Parser.Parser("torrents/"+r[0]+".torrent")
                except :
                    print 'bt file error'
                
                #print parser
                name=parser.getName()
                #print '============================================================='
                #print r[0]
                #print name
                #update the tuple
                #print r[0]
                encoding=parser.getEncoding()
                comment=parser.getComments()
                info = parser.getInfo()
                info = base64.encodestring(str(info))
                ##多文件还是单文件
                isFiles = parser.getJeiGou()
                ##根据isFiles获取文件
                if isFiles == 2:
                    files = parser.getFilesList()
                else:
                    files = parser.getOneFileList()
                ##获取种子创建日期
                createDate = parser.getCreateDate()
                print isFiles
                print name
                print files
                print createDate
                #print comment
                try:
                    date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
                    if encoding:
                        if isinstance(name, unicode):
                            name = name.encode('utf-8')
                            info = info.encode('utf-8')
                            filesStr = files[0].encode('utf-8')
                            lengthsStr = files[1].encode('utf-8')
                            createDatesStr = createDate.encode('utf-8')
                        else:
                           filesStr = files[0]
                           lengthsStr = files[1]
                           createDatesStr = createDate
                        
                        #name = (name.decode(encoding)).encode('utf-8')
                        #print name + "=========================="
                        #print encoding                  
                        sql="update hash_info set hash_info.name='%s',info='%s',create_time='%s',files='%s',lengths='%s',create_date='%s' where hash_info.hash='%s'"%(name,info,date,filesStr,lengthsStr,createDatesStr,r[0])
                        print sql
                        cur.execute(sql)
                    else:
                        if isinstance(name, unicode):
                            name = name.encode('utf-8')
                            info = info.encode('utf-8')
                            filesStr = files[0].encode('utf-8')
                            lengthsStr = files[1].encode('utf-8')
                            createDatesStr = createDate.encode('utf-8')
                        else:
                           filesStr = files[0]
                           lengthsStr = files[1]
                           createDatesStr = createDate
                        #print name + "0000000000000000000000000"
                        name = name.decode('utf-8')
                        #print name.encode('utf-8')+" "+encoding
                        #sql="update hash_info set hash_info.info='%s' where hash_info.hash='%s'"%(name,r[0])
                        sql="update hash_info set hash_info.name='%s',info='%s',create_time='%s',files='%s',lengths='%s',create_date='%s' where hash_info.hash='%s'"%(name,info,date,filesStr,lengthsStr,createDatesStr,r[0])
                        #print sql
                        cur.execute(sql)
                    conn.commit()
                except Exception ,e:
                    print e
            else:
                try:
                    #torrent file download failed
                    sql="delete from hash_info where hash_info.hash='%s'"%(r[0])
                    cur.execute(sql)
                    conn.commit()
                except:
                    print 'error'
        
        cur.close()
        conn.close()
    except MySQLdb.Error,e:
        print 'mysql error %d:%s'%(e.args[0],e.args[1])
        
    print 'the torrent files :'+str(i)    
if __name__=="__main__":

    getAllTorrents("hash_info")
'''
    info_hash="5302C30A88347F10E1F0A5BF334A8AC85D545AC0"
    getTorrents(info_hash)
'''


  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
爬虫(Web Crawler)是一种自动化程序,用于从互联网上收集信息。其主要功能是访问网页、提取数据并存储,以便后续分析或展示。爬虫通常由搜索引擎、数据挖掘工具、监测系统等应用于网络数据抓取的场景。 爬虫的工作流程包括以下几个关键步骤: URL收集: 爬虫从一个或多个初始URL开始,递归或迭代地发现新的URL,构建一个URL队列。这些URL可以通过链接分析、站点地图、搜索引擎等方式获取。 请求网页: 爬虫使用HTTP或其他协议向目标URL发起请求,获取网页的HTML内容。这通常通过HTTP请求库实现,如Python中的Requests库。 解析内容: 爬虫对获取的HTML进行解析,提取有用的信息。常用的解析工具有正则表达式、XPath、Beautiful Soup等。这些工具帮助爬虫定位和提取目标数据,如文本、图片、链接等。 数据存储: 爬虫将提取的数据存储到数据库、文件或其他存储介质中,以备后续分析或展示。常用的存储形式包括关系型数据库、NoSQL数据库、JSON文件等。 遵守规则: 为避免对网站造成过大负担或触发反爬虫机制,爬虫需要遵守网站的robots.txt协议,限制访问频率和深度,并模拟人类访问行为,如设置User-Agent。 反爬虫应对: 由于爬虫的存在,一些网站采取了反爬虫措施,如验证码、IP封锁等。爬虫工程师需要设计相应的策略来应对这些挑战。 爬虫在各个领域都有广泛的应用,包括搜索引擎索引、数据挖掘、价格监测、新闻聚合等。然而,使用爬虫需要遵守法律和伦理规范,尊重网站的使用政策,并确保对被访问网站的服务器负责。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值