import requests
from bs4 import BeautifulSoup
from concurrent.futures import ThreadPoolExecutor
def check_website(url, output_file):
try:
response = requests.get(url, timeout=2)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
title = soup.title.string
with open(output_file, 'a') as file:
file.write(f"Website: {url}\nTitle: {title}\n\n")
except requests.exceptions.RequestException as e:
print(f"Error checking website {url}: {e}")
def main(input_file, output_file):
with open(input_file, 'r') as file:
websites = file.readlines()
with ThreadPoolExecutor(max_workers=10) as executor:
futures = []
for website in websites:
website = website.strip()
future = executor.submit(check_website, website, output_file)
futures.append(future)
# 等待所有任务完成
for future in futures:
future.result()
if __name__ == '__main__':
input_file = 'url.txt'
output_file = 'youxiao.txt'
main(input_file, output_file)