Python调用sqlAlchemy从sftp根目录传输csv文件到本地及共享文件夹,以及从本地传输csv到数据库
代码已进行了注释,就不做太多的解释了
from smb.SMBConnection import SMBConnection
import paramiko
import os
import pymssql
import pymysql
import datetime
import time
from time import mktime
from smb.SMBConnection import *
import sys
from sqlalchemy import create_engine,Table,Column,Date,Integer,String,ForeignKey
import csv
import re
import sqlalchemy
import pandas as pd
from sqlalchemy import MetaData
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
import socket
def sftp_download(host,port,username,password,local,remote,timeout=15,banner_timeout=15,auto_timeout=15):
sf = paramiko.Transport((host,port))
sf.banner_timeout=banner_timeout
sf.auto_timeout=auto_timeout
sf.timeout=timeout
sf.connect(username = username,password = password)
sftp = paramiko.SFTPClient.from_transport(sf)
try:
if os.path.isdir(local):
for f in sftp.listdir(remote):
sftp.get(os.path.join(remote+f),os.path.join(local+'/'+f))
else:
sftp.get(remote,local)
except Exception as e:
print('download exception:',e)
sf.close()
sf.close()
if __name__=='__main__':
StartTime=datetime.datetime.now()
host_address='f*****'
host = socket.gethostbyname(host_address)
port = 22
username = 'SFTP****t'
password = 'n****'
local ='/data/sftp_data'
remote = '/'
sftp_download(host,port,username,password,local,remote)
file_path='/data/sftp_data/'
filename=os.listdir(file_path)
for i in filename:
localFile=open(file_path+i,"rb")
conn= SMBConnection("s000***","IJ12yr***","LOL***","w01***",use_ntlm_v2=True, domain="pharmatechs")
assert conn.connect("10.9***",139)
conn.listShares(timeout=30)
conn.storeFile('ITData-MDMBackup',"UKG/"+i,localFile)
ODS_InsertTime=str(datetime.datetime.now())
DataSource=sorted(filename,key=lambda x:int(x[32:39]))[-1]
data=pd.read_csv(open(file_path+DataSource,"r",encoding='ISO-8859-1'))
data=data.fillna('')
df=data.values.tolist()
host_db='10.111****'
user_db='bi_***n'
password_db='OO****t'
port_db='2***1'
database='ODS'
engine_str='mssql+pymssql://'+user_db+':'+password_db+'@'+host_db+':'+port_db+'/'+database
engine=create_engine(engine_str,pool_size=10,max_overflow=20)
metadata=MetaData(engine)
user_table=Table('ODS_UKG_Employee',metadata,autoload=True)
conn=engine.connect()
conn.execute('TRUNCATE TABLE ODS_UKG_Employee')
try:
for k in df:
conn.execute(user_table.insert(),EmployeeNumber=k[0],EmployeeName=k[1],CompanyCode=k[2],OrgLevel1=k[3],OrgLevel2=k[4],OrgLevel3=k[5],OrgLevel4=k[6],EmailAddress=k[7],JobAlternate=k[8],EmploymentStatus=k[9],EmployeeNumberSupervisor=k[10],LastHireDate=k[11],TerminationDate=k[12],EecUDField04=k[13],EecUDField02=k[14],ManagementClass=k[15],DataSource=DataSource,ODS_InsertTime=ODS_InsertTime )
EndTime=datetime.datetime.now()
RunTime=EndTime-StartTime
RunTime=RunTime.seconds
user_table_info=Table('ODS_ETL_PyLog',metadata,autoload=True)
ins=user_table_info.insert()
conn.execute(ins,TableName='ODS_UKG_Employee',ScriptName='file_trans.py',StartTime=StartTime,EndTime=EndTime,RunTime=RunTime,RunStatus='Success')
except Exception as e:
EndTime=datetime.datetime.now()
RunTime=EndTime-StartTime
RunTime=RunTime.seconds
user_table_info=Table('ODS_ETL_PyLog',metadata,autoload=True)
ins=user_table_info.insert()
conn.execute(ins,TableName='ODS_UKG_Employee',ScriptName='file_trans.py',StartTime=StartTime,EndTime=EndTime,RunTime=RunTime,RunStatus='Failure',ErrorMessage=e)