from sklearn.neural_network import MLPRegressor
from sklearn.externals import joblib
from sklearn import preprocessing
import numpy as np
import sys
inputn = int(sys.argv[1])#3
outputn = int(sys.argv[2])#1
inputcol = tuple(range(0,inputn,1))
print inputcol
X = np.loadtxt(open(sys.argv[3],"r"),delimiter="\t",usecols=inputcol)
print X
outputcol = tuple(range(inputn,inputn+outputn,1))
print outputcol
y = np.loadtxt(open(sys.argv[3],"r"),delimiter="\t",usecols=outputcol)
print y
sc = preprocessing.StandardScaler()
X_scaled = sc.fit_transform(X)
clf = MLPRegressor(activation='logistic',solver='sgd',learning_rate='adaptive',)
clf.fit(X_scaled, y)
print(clf.get_params(True))
filename = 'nbrbp.pkl'
joblib.dump(clf, filename, compress=9)
验证准确性
from sklearn.externals import joblib
from sklearn import preprocessing
import numpy as np
import sys
def maxidx(listfoo):
#print(listfoo)
vmax = max(listfoo)
for i in range(0,len(listfoo)):
if listfoo[i]==vmax:
#print i
return i
filename = 'nbrbp.pkl'
clf2 = joblib.load(filename)
inputn = int(sys.argv[1])
outputn = int(sys.argv[2])
inputcol = tuple(range(0,inputn,1))
outputcol = tuple(range(inputn,inputn+outputn,1))
X = np.loadtxt(open(sys.argv[3],"r"),delimiter="\t",usecols=inputcol)
y = np.loadtxt(open(sys.argv[3],"r"),delimiter="\t",usecols=outputcol)
sc = preprocessing.StandardScaler()
X_scaled = sc.fit_transform(X)
hitmos1=0
hitmos2=0
hitmos3=0
maxidxv=0
hit=0
allcount=0
for i in range(0,X.shape[0]):
left = clf2.predict(X_scaled[i].reshape(1,-1)).tolist()
right = y[i].tolist()
tmpf = (float(left[0])-100)*(float(right)-100)
if tmpf>0:
flag = 1
else:
flag = 0
print str(left[0])+'\t'+str(right)+'\t'+str(flag)