matlab wrapper是什么,matlab_wrapper.py

#!/usr/bin/python

#-*-coding:utf-8-*-

import matlab.engine

import datetime

from gradientdescent import gd

import models.arima_matlab

import models.narxnet_matlab

def hour_predict_up(cellid, turnid, cur, eng, predict_start):

predict_step = 24

season = 24.0

rows = get_omc_data_up(cellid, cur, predict_start)

last_data_time = rows[-1][0]

rawdata = [r[1:] for r in rows]

if len(rawdata) >= 48:

a = stack_train(eng, rawdata, 24, season)

actual_predict_step = predict_step if predict_step < len(rawdata) - 2 else len(rawdata) - 2

[y, x] = stack_predict(eng, rawdata, actual_predict_step, season, a)

predict_start_day = last_data_time + datetime.timedelta(hours=1)

for i in xrange(len(y)):

datatime = predict_start_day + datetime.timedelta(hours=i)

sub = []

for s in x[i]:

sub.append(s)

cur.execute("insert into predict_result_hour_up (cellid, turn_id, datatime, predict_data, sub_predict_data) values ('{0}', '{1}', '{2}', {3}, '{4}')".format(cellid, turnid, datatime, y[i], ",".join([str(b) for b in sub])))

else:

print "--------------filtered:" + cellid + "----" + str(len(rows)) + "-----------------"

def hour_predict_down(cellid, turnid, cur, eng, predict_start):

predict_step = 24

season = 24.0

rows = get_omc_data_down(cellid, cur, predict_start)

last_data_time = rows[-1][0]

rawdata = [r[1:] for r in rows]

if len(rawdata) >= 48:

a = stack_train(eng, rawdata, 24, season)

actual_predict_step = predict_step if predict_step < len(rawdata) - 2 else len(rawdata) - 2

[y, x] = stack_predict(eng, rawdata, actual_predict_step, season, a)

predict_start_day = last_data_time + datetime.timedelta(hours=1)

for i in xrange(len(y)):

datatime = predict_start_day + datetime.timedelta(hours=i)

sub = []

for s in x[i]:

sub.append(s)

cur.execute("insert into predict_result_hour_down (cellid, turn_id, datatime, predict_data, sub_predict_data) values ('{0}', '{1}', '{2}', {3}, '{4}')".format(cellid, turnid, datatime, y[i], ",".join([str(b) for b in sub])))

else:

print "--------------filtered:" + cellid + "----" + str(len(rows)) + "-----------------"

def day_predict_up(cellid, turnid, cur, eng):

predict_step = 30

season = 7.0

rows = get_omc_day_data_up(cellid, cur)

last_data_time = rows[-1][0]

rawdata = [r[1:] for r in rows]

if (len(rawdata) - [r[-1] for r in rawdata].count(0)) >= 70:

a = stack_train(eng, rawdata, 24, season)

actual_predict_step = predict_step if predict_step < len(rawdata) - 2 else len(rawdata) - 2

[y, x] = stack_predict(eng, rawdata, actual_predict_step, season, a)

predict_start_day = last_data_time + datetime.timedelta(days=1)

for i in xrange(len(y)):

datatime = predict_start_day + datetime.timedelta(days=i)

sub = []

for s in x[i]:

sub.append(s)

cur.execute("insert into predict_result_day_up_sixcities (cgi, turn_id, datatime, predict_data, sub_predict_data) values ('{0}', '{1}', '{2}', {3}, '{4}')".format(cellid, turnid, datatime, y[i], ",".join([str(b) for b in sub])))

else:

print "--------------filtered:" + cellid + "----" + str(len(rows)) + "-----------------"

def day_predict_down(cellid, turnid, cur, eng):

predict_step = 30

season = 7.0

rows = get_omc_day_data_down(cellid, cur)

last_data_time = rows[-1][0]

rawdata = [r[1:] for r in rows]

if (len(rawdata) - [r[-1] for r in rawdata].count(0)) >= 70:

a = stack_train(eng, rawdata, 24, season)

actual_predict_step = predict_step if predict_step < len(rawdata) - 2 else len(rawdata) - 2

[y, x] = stack_predict(eng, rawdata, actual_predict_step, season, a)

predict_start_day = last_data_time + datetime.timedelta(days=1)

for i in xrange(len(y)):

datatime = predict_start_day + datetime.timedelta(days=i)

sub = []

for s in x[i]:

sub.append(s)

cur.execute("insert into predict_result_day_down_sixcities (cgi, turn_id, datatime, predict_data, sub_predict_data) values ('{0}', '{1}', '{2}', {3}, '{4}')".format(cellid, turnid, datatime, y[i], ",".join([str(b) for b in sub])))

else:

print "--------------filtered:" + cellid + "----" + str(len(rows)) + "-----------------"

def get_omc_data_up(cellid, cur, predict_start):

cur.execute('''select datatime, date_part('hour', datatime), date_part('DOW', datatime), prb_up, prb_down, wireless_use_ratio, wireless_conn_ratio, rrc_conn_ratio,

rrc_conn_maxcount, rrc_conn_averagecount, rrc_conn_valid_maxcount, rrc_conn_valid_averagecount, up_data from omc_data

where cellid='{0}' and prb_up is not null and datatime < '{1}' order by datatime

'''.format(cellid, predict_start))

rows = cur.fetchall()

return rows

def get_omc_data_down(cellid, cur, predict_start):

cur.execute('''select datatime, date_part('hour', datatime), date_part('DOW', datatime), prb_up, prb_down, wireless_use_ratio, wireless_conn_ratio, rrc_conn_ratio,

rrc_conn_maxcount, rrc_conn_averagecount, rrc_conn_valid_maxcount, rrc_conn_valid_averagecount, down_data from omc_data

where cellid='{0}' and prb_up is not null and datatime < '{1}' order by datatime

'''.format(cellid, predict_start))

rows = cur.fetchall()

return rows

def get_omc_day_data_up(cellid, cur):

cur.execute('''select * from (

select datatime, date_part('day', datatime), date_part('DOW', datatime), prb_up, prb_down, wireless_use_ratio, wireless_conn_ratio, rrc_conn_ratio,

rrc_conn_maxcount, rrc_conn_averagecount, rrc_conn_valid_maxcount, rrc_conn_valid_averagecount, up_data from omc_upday_sixcities_buffer

where cgi='{0}' and datatime < '2018-12-01'

union all

select omc.datatime, date_part('day', omc.datatime), date_part('DOW', omc.datatime), omc.prb_up, omc.prb_down, omc.wireless_use_ratio, omc.wireless_conn_ratio, omc.rrc_conn_ratio,

omc.rrc_conn_maxcount, omc.rrc_conn_averagecount, omc.rrc_conn_valid_maxcount, omc.rrc_conn_valid_averagecount, omc.up_data from omc_upday_sixcities omc left join

omc_upday_sixcities_buffer buf on omc.cgi = buf.cgi and omc.datatime = buf.datatime

where buf.datatime is null and omc.cgi='{0}' and omc.datatime < '2018-12-01' ) t order by datatime

'''.format(cellid))

rows = cur.fetchall()

return rows

def get_omc_day_data_down(cellid, cur):

cur.execute('''

select * from (

select datatime, date_part('day', datatime), date_part('DOW', datatime), prb_up, prb_down, wireless_use_ratio, wireless_conn_ratio, rrc_conn_ratio,

rrc_conn_maxcount, rrc_conn_averagecount, rrc_conn_valid_maxcount, rrc_conn_valid_averagecount, down_data from omc_downday_sixcities_buffer

where cgi='{0}' and datatime < '2018-12-01'

union all

select omc.datatime, date_part('day', omc.datatime), date_part('DOW', omc.datatime), omc.prb_up, omc.prb_down, omc.wireless_use_ratio, omc.wireless_conn_ratio, omc.rrc_conn_ratio,

omc.rrc_conn_maxcount, omc.rrc_conn_averagecount, omc.rrc_conn_valid_maxcount, omc.rrc_conn_valid_averagecount, omc.down_data from omc_downday_sixcities omc left join

omc_downday_sixcities_buffer buf on omc.cgi = buf.cgi and omc.datatime = buf.datatime

where buf.datatime is null and omc.cgi='{0}' and omc.datatime < '2018-12-01' ) t order by datatime

'''.format(cellid))

rows = cur.fetchall()

return rows

def get_omc_day_data(cellid, cur):

cur.execute('''

select t1.data_date, date_part('day', t1.data_date), prb_up, prb_down, wireless_use_ratio, wireless_conn_ratio, rrc_conn_ratio,

rrc_conn_maxcount, rrc_conn_averagecount, rrc_conn_valid_maxcount, rrc_conn_valid_averagecount, up_data, down_data from (

select date_trunc('day', datatime) as data_date, prb_up, prb_down, wireless_use_ratio, wireless_conn_ratio, rrc_conn_ratio,

rrc_conn_maxcount, rrc_conn_averagecount, rrc_conn_valid_maxcount, rrc_conn_valid_averagecount, down_data from (

select row_number() over (partition by date_trunc('day', datatime) order by down_data desc) rn, * from omc_data_release where eci = {0} and prb_up is not null

) t_down where rn = 1

) t1 inner join (

select data_date, up_data from (

select row_number() over (partition by date_trunc('day', datatime) order by up_data desc) rn, up_data, date_trunc('day', datatime) as data_date from omc_data_release where eci = {0} and prb_up is not null

) t_up where rn = 1) t2

on t1.data_date = t2.data_date order by t1.data_date

'''.format(cellid))

rows = cur.fetchall()

return rows

def stack_train(eng, rows, predict_step, season):

up_train_data = rows[1:-1*predict_step]

up_train_data_matlab = matlab.double(up_train_data)

naxnet_result = models.narxnet_matlab.predict(eng, up_train_data_matlab, predict_step)

arima_result = models.arima_matlab.predict(eng, up_train_data_matlab, float(predict_step), season)

union_result = []

for i in range(len(naxnet_result)):

union_result.append([naxnet_result[i][0], arima_result[i][0]])

x = []

for data_index in range(len(union_result)):

item = []

for model_index in range(len(union_result[0])):

data = 0 if union_result[data_index][model_index] < 0 else union_result[data_index][model_index]

item.append(data)

x.append(item)

y = [r[11] for r in rows[-1*predict_step:]]

a = gd(x, y)

return a

def stack_predict(eng, rows, predict_step, season, a):

train_data = rows

data_matlab = matlab.double(train_data)

naxnet_result = models.narxnet_matlab.predict(eng, data_matlab, predict_step)

arima_result = models.arima_matlab.predict(eng, data_matlab, float(predict_step), season)

union_result = []

for i in range(len(naxnet_result)):

union_result.append([naxnet_result[i][0], arima_result[i][0]])

x = []

for data_index in range(len(union_result)):

item = []

for model_index in range(len(union_result[0])):

data = 0 if union_result[data_index][model_index] < 0 else union_result[data_index][model_index]

item.append(data)

x.append(item)

y = []

for index in range(len(x)):

y.append(x[index][0] * a + (1-a) * x[index][1])

return [y, x]

一键复制

编辑

Web IDE

原始数据

按行查看

历史

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值