简介:实际的压测场景中经常遇到使用多个用户登录、多个用户点击、购买等情况,这个时候就需要使用参数化。
准备工序:相关攻略 - Python第三方库:伪数据生成 - Faker
数据库快速创建10个用户源码:
import pymysql
from faker import Faker
import random
import time
class DataBaseHandle(object):
def __init__(self, host, user, password, database, port):
self.db = pymysql.connect(host=host, user=user, password=password, database=database, port=port)
def insert_db(self, sql):
self.cursor = self.db.cursor()
try:
self.cursor.execute(sql)
self.db.commit()
except:
self.db.rollback()
finally:
self.cursor.close()
def create_data(loop=10):
fake = Faker()
users = []
for i in range(loop):
user = dict()
last_name = fake.last_name() # 姓
first_name = fake.first_name() # 名
name = last_name + first_name
user["username"] = name
user["mobile"] = int(12000000000) + int(time.time()) + int(random.random() * 1000000000)
users.append(user)
return users
if __name__ == '__main__':
# 请根据您的实际填写对应的数据库信息。
DbHandle = DataBaseHandle('数据库host', 'root', '123456', 'testdata', 3306)
users = create_data(10)
for i in users:
username = i["username"]
mobile = i["mobile"]
sql = f'INSERT INTO userinfo VALUES ("{username}", "{mobile}");'
DbHandle.insert_db(sql)
print(f"add username: {username} - mobile:{mobile} success")
执行后如图:
数据库新增数据结果:
用户名和手机号参数化的登录案例压测:
步骤:
1、查询出数据库用户表的10个用户
2、将用户放入队列
3、在登录性能压测脚本中使用该队列数据
数据库查询源码:相关攻略 - Python:封装pymysql模块操作mysql
db_operation.py
import pymysql
class MysqlHelper(object):
conn = None
def __init__(self, host, username, password, db, charset='utf8', port=3306):
self.host = host
self.username = username
self.password = password
self.db = db
self.charset = charset
self.port = port
def connect(self):
self.conn = pymysql.connect(host=self.host,
port=self.port,
user=self.username,
password=self.password,
db=self.db,
charset=self.charset)
self.cursor = self.conn.cursor()
def close(self):
self.cursor.close()
self.conn.close()
def get_one(self, sql, params=()):
result = None
try:
self.connect()
self.cursor.execute(sql, params)
result = self.cursor.fetchone()
self.close()
except Exception as e:
print(e)
return result
def get_all(self, sql, params=()):
list_data = ()
try:
self.connect()
self.cursor.execute(sql, params)
list_data = self.cursor.fetchall()
self.close()
except Exception as e:
print(e)
return list_data
def delete_one(self, sql, params=()):
result = None
try:
self.connect()
self.cursor.execute(sql, params)
result = self.cursor.fetchone()
self.close()
except Exception as e:
print(e)
return result
if __name__ == '__main__':
my_db = MysqlHelper('数据库host', 'root', '123456', 'testdata', port=3306)
# 获取多个
sql = 'SELECT username, mobile FROM `userinfo`;'
result = my_db.get_all(sql)
print("result:{}".format(result))
客户端源码:
from locust import HttpUser, task
import logging
from db_operation import MysqlHelper
import queue
class HelloWorldUser(HttpUser):
# 创建队列
que = queue.Queue()
def on_start(self):
my_db = MysqlHelper('数据库host', 'root', '123456', 'testdata', port=3360)
# 获取 10 个
sql = 'SELECT username, mobile FROM `userinfo`;'
result = my_db.get_all(sql)
for i in result:
self.que.put(i)
@task
def login(self):
user_info = self.que.get()
username, mobile = user_info
data = {"username": username, "mobile": mobile, "password": "123456"}
self.client.post("/login", json=data)
# 如果需要循环使用,则重新加入队列。
# user_info = (username, mobile)
# self.que.put(user_info)
if __name__ == '__main__':
import os
os.system("locust -f my_locust")
服务端源码:
from sanic import Sanic
from sanic import response
import datetime
import uuid
app = Sanic(__name__)
key = str(uuid.uuid4())
@app.post('/login')
def login(request):
time = str(datetime.datetime.now())[:-7]
data = request.json
print("data:", data)
if data["password"] == "123456":
message = {"login time": time, "username": data["username"], "mobile": data["mobile"], "token": key}
print("mes:", message)
return response.json(message)
if __name__ == "__main__":
app.run(host="127.0.0.1", port=3031, auto_reload=True)
案例1:只参数化执行10个用户,运行完成则堵塞等待。
locust-web执行结果:因为队列中只有10个元素,所以在获取队列中元素完毕后,因为队列会堵塞等待,最终只运行10个。
服务端运行结果:只接收并响应10个用户。
图片
案例2:取消客户端注释,将登录的用户重新加入队列。参数化执行10个用户,循环使用这10个用户。
locust-web执行结果:
服务端运行结果:循环接收并响应这10个用户。
微信公众号:玩转测试开发
欢迎关注,共同进步,谢谢!