@
1.说明
使用多进程
积极主动python多进程是复制资源,线程是共享变量
所以这个的socket要关两次,因为复制文件的时候,是把文件的fd给复制过去(file description)
一旦一个连接存在,就说明文件还在使用,所以要关两次
这里使用的是对象的方式
初始化中写到了全局的配置
run_forever中一个死循环一直调用服务代码
2.代码
import socket
import re
import multiprocessing
class WSGIServer(object):
def __init__(self):
#1.创建套接字
self.tcp_sever_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.tcp_sever_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
#2.绑定套接字
self.tcp_sever_socket.bind(("", 7890))
#3.监听套接字
self.tcp_sever_socket.listen(128)
def service_clients(self,new_socket):
"""为这个客户返回数据"""
#1.接收浏览器发送过来的请求,即http请求
#GET /HTTP/1.1
# ...
request = new_socket.recv(1024).decode("utf-8")
request_lines = request.splitlines()
ret = re.match(r"[^/]+(/[^ ]*)", request_lines[0])
file_name = ""
if ret:
file_name = ret.group(1)
if file_name == "/":
file_name = "/index.html"
http_header = ""
http_body = ""
try:
f = open("./html" + file_name, "rb")
except:
http_body = "<h1>Sorry not found</h1>".encode("utf-8")
http_header = "HTTP/1.1 404 NOT FOUND\r\n"
http_header += "Content-Length:%d\r\n\r\n" % len(http_body)
else:
http_body = f.read()
f.close()
http_header = "HTTP/1.1 200 OK\r\n"
http_header += "Content-Length:%d\r\n\r\n" % len(http_body)
new_socket.send(http_header.encode("utf-8"))
new_socket.send(http_body)
new_socket.close()
def run_forerver(self):
"""用来完成整体的控制"""
while True:
#1.等待行客户端连接
new_socket, client_addr = self.tcp_sever_socket.accept()
#2.为这个客户端服务
p = multiprocessing.Process(target=self.service_clients,args=(new_socket,))
p.start()
new_socket.close()
#关闭监听套接字
self.tcp_sever_socket.close()
def main():
wsgi_server = WSGIServer()
wsgi_server.run_forerver()
if __name__ == '__main__':
main()
关于作者
个人博客网站
个人GitHub地址
个人公众号: