python
路遥车慢
这个作者很懒,什么都没留下…
展开
-
selenium
数据驱动测试实例# -*- coding:utf-8 -*-from selenium import webdriverclass Login(): def userLogin(self,driver): driver =webdriver.Firefox() driver.implicitly_wait(10) driver.get("htt原创 2017-05-13 09:05:41 · 196 阅读 · 0 评论 -
selenium键盘操作
# coding=utf-8from selenium import webdriverfrom selenium.webdriver.common.keys import Keysdriver = webdriver.Firefox()driver.get('http://www.baidu.com')# 输入框输入内容driver.find_element_by_xpath('//*[@原创 2017-05-29 09:21:58 · 520 阅读 · 0 评论 -
编写web测试用例
untitled├── runtest.py└── test_case ├── __init__.py ├── test_baidu.py └── test_youdao.pytest_baidu.py# -*- coding:utf-8 -*-from selenium import webdriverimport unittestimport timeclass原创 2017-05-09 22:17:01 · 500 阅读 · 0 评论 -
selenium switch_to.frame
# -*- coding:utf-8 -*-from selenium import webdriverclass Login(): def userLogin(self,driver): driver =webdriver.Firefox() driver.implicitly_wait(10) driver.get("http://www原创 2017-05-09 22:20:15 · 1822 阅读 · 0 评论 -
selenium截取当前窗口,并保存为图片
# -*- coding:utf-8 -*-from selenium import webdriverfrom time import sleepdriver = webdriver.Firefox()driver.get('http://www.baidu.com')driver.find_element_by_id('kw').send_keys('selenium')driver.fi原创 2017-05-29 13:12:22 · 3799 阅读 · 0 评论 -
selenium通过add_cookie()添加cookie
selenium通过add_cookie()添加cookie,再次访问网站时,服务器直接读取浏览器cookie第一次登陆 手动输入用户名密码# -*- coding:utf-8 -*-from selenium import webdriverfrom time import sleepdriver = webdriver.Firefox()driver.get('http://www.bai原创 2017-05-29 14:30:32 · 26661 阅读 · 1 评论 -
BeautifulSoup
# -*- coding:utf-8 -*-# pip install beautifulsoup4 安装from bs4 import BeautifulSoupimport rehtml_doc = """<html><head><title>The Dormouse's story</title></head><body><p class="title"><b>The Dormouse原创 2017-06-16 11:07:09 · 239 阅读 · 0 评论 -
pymsql基本用法
#!/usr/bin/env python# --coding = utf-8# Author Allen Leeimport pymysql#创建链接对象conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='123', db='Allen')#创建游标cursor = conn.cur原创 2017-06-09 13:59:03 · 4029 阅读 · 0 评论 -
flask+mysql+highcharts监控内存
agent.py# -*- coding:utf-8 -*-import timeimport pymysqlconn = pymysql.connect(host='127.0.0.1',user='root',password='osyunwei',db='memory')conn.autocommit(True)cur = conn.cursor()def getMem():原创 2017-06-09 17:19:35 · 1995 阅读 · 0 评论 -
rabbitmq基础
product.pyimport pikaconnection = pika.BlockingConnection(pika.ConnectionParameters("localhost"))channel = connection.channel()channel.queue_declare(queue="hello")channel.basic_publish(exchange="",rout原创 2017-10-31 13:02:35 · 363 阅读 · 0 评论 -
Pycharm用鼠标滚轮控制字体大小的
Pycharm用鼠标滚轮控制字体大小的一、pycharm字体放大的设置File —> setting —> Keymap —>在搜寻框中输入:increase —> Increase Font Size(双击) —> 在弹出的对话框中选择Add Mouse Shortcut 在弹出的对话框中同时按住ctrl键和鼠标滚轮向上滑。二、Py原创 2017-10-23 19:10:12 · 11905 阅读 · 3 评论 -
Python3 urllib库爬虫 基础
基础add_header()添加报头url="http://blog.csdn.net/yudiyanwang/article/details/78322039"req = urllib.request.Request(url)req.add_header("User-Agent","Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:56.0) Gecko/2原创 2017-10-25 18:02:09 · 323 阅读 · 0 评论 -
用scrapy获取代理ip地址
items.py -*- coding: utf-8 -*-# Define here the models for your scraped items## See documentation in:# http://doc.scrapy.org/en/latest/topics/items.htmlimport scrapyclass GetproxyItem(scrapy.Item):原创 2017-05-28 21:05:27 · 5660 阅读 · 0 评论 -
爬一个漫画
# -*- coding:utf-8 -*-from selenium import webdriverfrom Mylog import MyLog as mylogimport osimport sysclass GetCartoon(object): def __init__(self): self.startUrl = u'http://www.1kkk.com原创 2017-05-28 09:34:16 · 621 阅读 · 0 评论 -
设置代理服务器
# -*- coding:utf-8 -*-''' 在网上找一些代理IP 代理服务器设置 proxy_addr:代理服务器地址 url:要爬取网页地址'''import urllib.requestdef use_proxy(proxy_addr,url): proxy = urllib.request.ProxyHandler({'http':proxy原创 2017-05-13 09:11:55 · 982 阅读 · 0 评论 -
使用add_header()添加报头
使用add_header()方法添加报头信息 Request对象名.add_header(字段名,字段值) 使用data=urllib.request.urlopen(req).read()打开对应网址并读取网页内容,并赋值给dataurl= 'http://www.baidu.com'req = urllib.request.Request(url)req.add_heade原创 2017-05-13 09:25:58 · 22511 阅读 · 0 评论 -
使用DebugLog
”’ 开启DebugLog 调试log日志 ”’httphd = urllib.request.HTTPHandler(debuglevel=1)httpshd = urllib.request.HTTPSHandler(debuglevel=1)opener = urllib.request.build_opener(httphd,httpshd)urllib.request.i原创 2017-05-13 09:32:57 · 839 阅读 · 0 评论 -
Cookiejar处理cookie
import urllib.parseimport http.cookiejarurl='https://passport.csdn.net/account/login?from=http://my.csdn.net/my/mycsdn'postdata = urllib.parse.urlencode( { 'username':'yourname', '原创 2017-05-13 17:00:56 · 2713 阅读 · 2 评论 -
爬取京东图书列表页图片
import re,osimport urllib.requestfrom time import sleepurl='https://list.jd.com/list.html?cat=1713,3287,3797''''<img width="200" height="200" data-img="1" data-lazy-img="done" title="" src="//img13原创 2017-05-13 18:50:29 · 565 阅读 · 0 评论 -
scrapy常用命令
scrapy startproject myfirst //创建一个爬虫项目strapy startproject --logfile="/tmp/scrapy.log" myfirstscrapy fetch http://www.baidu.com //fetch用例显示爬虫爬去的过程scrapy runspider first.py //可以使用runspider直接运行该爬虫文件,而不依原创 2017-05-15 16:46:17 · 895 阅读 · 0 评论 -
scrapy简单示例
# -*- coding: utf-8 -*-import scrapyfrom scrapy.linkextractors import LinkExtractorfrom scrapy.spiders import CrawlSpider, Rulefrom myfirst.items import MyfirstItemclass WocaoSpider(CrawlSpider):原创 2017-05-15 16:47:54 · 469 阅读 · 0 评论 -
scrapy当当当当 连衣裙分类
scrapy startproject dangdangscrapy genspider -t basic cao dangdang.com# -*- coding: utf-8 -*-# Define here the models for your scraped items## See documentation in:# http://doc.scrapy.org/en/latest原创 2017-05-15 21:23:26 · 278 阅读 · 0 评论 -
python 将数据写入execl
将数据写入表格import xlwtif __name__ == "__main__": book = xlwt.Workbook(encoding='utf8',style_compression=0) sheet = book.add_sheet('dede') sheet.write(0,0,'hello world') sheet.write(1,1,u'中文原创 2017-05-27 13:00:17 · 524 阅读 · 0 评论 -
python 将数据写入execl
将数据写入表格import xlwtif __name__ == "__main__": book = xlwt.Workbook(encoding='utf8',style_compression=0) sheet = book.add_sheet('dede') sheet.write(0,0,'hello world') sheet.write(1,1,u'中文原创 2017-05-27 13:01:37 · 490 阅读 · 0 评论 -
pytohon 日志处理 logging模块
日志处理# -*- coding:utf-8 -*-import loggingimport getpassimport datetimeclass MyLog(object): def __init__(self): self.user = getpass.getuser() self.logger = logging.getLogger(self.u原创 2017-05-27 14:25:29 · 269 阅读 · 0 评论 -
pymysql 基本操作
In [2]: conn = pymysql.connect(host="127.0.0.1",user="root",passwd="osyunwei")In [3]: conn.query("create database pymysql")Out[3]: 1In [5]: conn = pymysql.connect(host="127.0.0.1",user="root",passwd=原创 2017-10-26 14:22:32 · 1117 阅读 · 0 评论