1、直接
import pymysql
import pandas as pd
from sqlalchemy import create_engine
def GetQueryByDF(sql):
connect_info = 'mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8'\
.format("user", "passwd", "hostname", "port","dbname")
engine = create_engine(connect_info)
result = pd.read_sql(sql, engine)
return result
GetQueryByDF("select a.movie_num,a.title,a.language,a.area,a.director,a.type,a.tv_describe,(case a.vtype_sub when '0' then '电视剧' when '1' then '纪录片' end) as type,group_concat(b.crew_name SEPARATOR ',') as crew from hx_online_tv a join hx_crew_images_formal b on a.movie_num = b.movie_num GROUP BY b.movie_num;")
2、间接
import pymysql
import pandas as pd
conn = pymysql.connect(host='p', user='w', password='c', port=3306,
db='a')
cursor = conn.cursor()
hot = "select a.movie_num,a.movie_title,a.language,a.area,a.director,a.movie_type,a.movie_describe,a.duration,(case a.vtype_sub when '0' then '电影' when '1' then '纪录片' end),group_concat(b.crew_name SEPARATOR ',') from hx_online_movies a join hx_crew_images_formal b on a.movie_num = b.movie_num GROUP BY b.movie_num ;"
# hot="select * from hx_media_scrapy_info_201912 limit 5"
cursor.execute(hot)
results = cursor.fetchall()
conn.commit()
movies_info = list(results)
df = pd.DataFrame(movies_info,columns=['movie_num','movie_title','language','area',
'director','movie_type','movie_describe','duration','type','crew_name'])
def combine(x):
return x['movie_title'] + " " + x['language']+ " " + x['area']\
+ " " + x['director']+ " " + x['crew_name']+ " " + x['movie_describe']+ " " + x['type']+ " " + x['movie_type']
df['Combined_Data'] = df.apply(lambda x: combine(x),axis=1)
保存txt格式
datas['querytext'].to_csv('excel2txt.txt', sep='\t', index=False)
或者array保存形式
import numpy as np
np.savetxt("querys11.txt",datas['querys'].unique(),fmt = '%s')
open 一行一行保存形式
with open('bbb.txt',"a") as file:
for index,i in enumerate(list(ids_)):
socre = list(datas[datas['queryid']==i]['socre'])[0]
aid = list(datas[datas['queryid']==i]['aid'])[0]
file.write(str(socre) + " " + "qid:" + str(index+1)+ " # " + str(aid)+"\n")