自1984以来,DTN IQ Feed提供流数据集标准 精确覆盖农业、能源和金融市场的服务和贸易解决方案。DTN IQ Feed在高端分析软件、流式报价服务和定制的Web内容,在交付数据源方面是一个被证实的行业领导者。DTN IQ Feed 对股票、期权、期货、大宗商品和固定收益证券、实时新闻线、历史数据和更多的时间敏感报价.
下面的代码实现了如何获取IQ feed支持品种的历史数据,不过前期是已经订阅IQ feed数据服务并且启动终端。
from iqfeed import historicData
import datetime
dateStart = datetime.datetime(2014,10,1)
dateEnd = datetime.datetime(2015,10,1)
iq = historicData(dateStart, dateEnd, 60)
symbolOne = "AAPL"
symbolOneData = iq.download_symbol(symbolOne)
import datetime
import socket
import os.path
import pandas as pd
"""
IQ DTN Feed Historical Symbol Download.
Downloads the symbol data in CSV format and stores it in a local directroy.
If we already have the symbol data downloaded, it will not hit IQ DTN Feed again,
it will simple use the local data.
To flush the local CSV file, simply delete the directory.
Constructor enables to specify a start and end date for the symbol data as well
as the frequency. Great for making sure data is consistent.
class historicData:
#
def __init__(self, startDate, endDate, timeFrame=60):
self.startDate = startDate.strftime("%Y%m%d %H%M%S")
self.endDate = endDate.strftime("%Y%m%d %H%M%S")
self.timeFrame = str(timeFrame)
# We dont want the download directory to be in our source control
self.downloadDir = "E:/Simon/MarketData/"
self.host = "127.0.0.1" # Localhost
self.port = 9100 # Historical data socket port
def read_historical_data_socket(self, sock, recv_buffer=4096):
"""
Read the information from the socket, in a buffered
fashion, receiving only 4096 bytes at a time.
Parameters:
sock - The socket object
recv_buffer - Amount in bytes to receive per read
"""
buffer = ""
data = ""
while True:
data = sock.recv(recv_buffer)
buffer += data.decode('utf-8')
# Check if the end message string arrives
if "!ENDMSG!" in buffer:
break
# Remove the end message string
buffer = buffer[:-12]
return buffer
def download_symbol(self, symbol):
# Construct the message needed by IQFeed to retrieve data
#[bars in seconds],[beginning date: CCYYMMDD HHmmSS],[ending date: CCYYMMDD HHmmSS],[empty],[beginning time filter: HHmmSS],[ending time filter: HHmmSS],[old or new: 0 or 1],[empty],[queue data points per second]
#message = "HIT,%s,%i,%s,%s,,093000,160000,1\n" % symbol, self.timeFrame, self.startDate, self.endDate
#message = message = "HIT,%s,%s,20150101 075000,,,093000,160000,1\n" % symbol, self.timeFrame
fileName = "{0}{1}-{2}-{3}-{4}.csv".format(self.downloadDir, symbol, self.timeFrame, self.startDate, self.endDate)
exists = os.path.isfile(fileName)
if exists == False:
message = "HIT,{0},'{1}',{2},{3},,093000,160000,1\n".format(symbol, self.timeFrame, self.startDate, self.endDate)
# Open a streaming socket to the IQFeed server locally
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((self.host, self.port))
sock.sendall(message.encode('utf-8'))
data = self.read_historical_data_socket(sock)
sock.close
# Remove all the endlines and line-ending
# comma delimiter from each record
data = "".join(data.split("\r"))
data = data.replace(",\n","\n")[:-1]
# Write the data stream to disk
f = open(fileName, "w")
f.write(data)
f.close()
return pd.io.parsers.read_csv(fileName, header=0, index_col=0, names=['datetime','open','low','high','close','volume','oi'], parse_dates=True)