第一届Danku算法大赛:议会选举结果预测

github代码

简介

比赛内容的确十分简单,就是通过特征:经度和纬度,预测获胜党:民主党或共和党。数据形式如下:[047606200, 122332100, 0],前两维为特征,最后一维为获胜党。比赛强制使用最简单的神经网络,在此框架调节层数+每层神经元个数+神经元的权重(weight)+层的偏差(bias)

主要目的其实探索以太坊智能合约作为算法平台的可能性。

更详细的内容说明:https://github.com/doublespending/danku/tree/master/competition

比赛流程

Populus框架,基于Python的智能合约开发框架,提供Python程序与智能合约交互的能力。用这个框架主要是大部分的ML算法库是基于Python的。

搭环境

  1. 搭建Populus框架
  2. pip install -r requirements.txt
populus==2.2.0
tensorflow==1.4.1
pytest!=3.3.*,>=2.7.2
pandas==0.22.0
matplotlib==2.1.2
web3==3.16.5

获取数据

  1. 跑起本地geth轻节点即可(–light mode),同步全节点太慢了;并开启本地8534RPC端口更多让下面的程序通过Web3.0于智能合约进行交互;
  2. 通过下面程序连接本地RPC端口,从 合约0x9A0991fc223dFFE420e08f15b88a593a3b8D44B,通过abi等,调用合约中的函数获得数据集合,并可视化数据。注意,数据获取可能会出现超时的问题,解决方法;
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from dutils.dataset import Dataset
from dutils.neural_network import NeuralNetwork
from web3 import Web3, HTTPProvider, IPCProvider
from matplotlib import pyplot as plt
import numpy as np

# 设置http超时时间为180s,不然程序会超时
import socket
timeout = 180
socket.setdefaulttimeout(timeout)

w_scale = 1000 # Scale up weights by 1000x
b_scale = 1000 # Scale up biases by 1000x

def scale_packed_data(data, scale):
    # Scale data and convert it to an integer
    return list(map(lambda x: int(x*scale), data))

print("Connecting to geth...\n")
web3 = Web3(HTTPProvider('http://localhost:8545'))
# web3 = Web3(HTTPProvider('http://93.85.92.250:8545'))
# 用web3.isConnected()判断是否连上了geth的8545的RPC端口
if web3.isConnected():
    print("connected!\n")
else:
    print("not connected!\n")

print("Connected to the geth node!\n")

abi = [{"constant":True,"inputs":[],"name":"init1_block_height","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[],"name":"init2","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":False,"inputs":[{"name":"submission_index","type":"uint256"}],"name":"evaluate_model","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[{"name":"submission_index","type":"uint256"},{"name":"data","type":"int256[3][]"}],"name":"model_accuracy","outputs":[{"name":"","type":"int256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"get_training_index","outputs":[{"name":"","type":"uint256[16]"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"evaluation_stage_block_size","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"name":"","type":"uint256"},{"name":"","type":"uint256"}],"name":"test_data","outputs":[{"name":"","type":"int256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"get_testing_index","outputs":[{"name":"","type":"uint256[4]"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"_test_data_groups","type":"int256[]"},{"name":"_test_data_group_nonces","type":"int256"}],"name":"reveal_test_data","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[{"name":"paymentAddress","type":"address"},{"name":"num_neurons_input_layer","type":"uint256"},{"name":"num_neurons_output_layer","type":"uint256"},{"name":"num_neurons_hidden_layer","type":"uint256[]"},{"name":"weights","type":"int256[]"},{"name":"biases","type":"int256[]"}],"name":"get_submission_id","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"best_submission_index","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"use_test_data","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"_train_data_groups","type":"int256[]"},{"name":"_train_data_group_nonces","type":"int256"}],"name":"init3","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[{"name":"l_nn","type":"uint256[]"},{"name":"input_layer","type":"int256[]"},{"name":"hidden_layers","type":"int256[]"},{"name":"output_layer","type":"int256[]"},{"name":"weights","type":"int256[]"},{"name":"biases","type":"int256[]"}],"name":"forward_pass2","outputs":[{"name":"","type":"int256[]"}],"payable":False,"stateMutability":"pure","type":"function"},{"constant":False,"inputs":[{"name":"_hashed_data_groups","type":"bytes32[20]"},{"name":"accuracy_criteria","type":"int256"},{"name":"organizer_refund_address","type":"address"}],"name":"init1","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"organizer","outputs":[{"name":"","type":"address"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"init_level","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"name":"","type":"uint256"}],"name":"testing_partition","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"get_train_data_length","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"best_submission_accuracy","outputs":[{"name":"","type":"int256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[],"name":"finalize_contract","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"contract_terminated","outputs":[{"name":"","type":"bool"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"init3_block_height","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"get_submission_queue_length","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[{"name":"payment_address","type":"address"},{"name":"num_neurons_input_layer","type":"uint256"},{"name":"num_neurons_output_layer","type":"uint256"},{"name":"num_neurons_hidden_layer","type":"uint256[]"},{"name":"weights","type":"int256[]"},{"name":"biases","type":"int256[]"}],"name":"submit_model","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[{"name":"","type":"uint256"}],"name":"training_partition","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"reveal_test_data_groups_block_size","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":False,"inputs":[],"name":"cancel_contract","outputs":[],"payable":False,"stateMutability":"nonpayable","type":"function"},{"constant":True,"inputs":[],"name":"model_accuracy_criteria","outputs":[{"name":"","type":"int256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[{"name":"","type":"uint256"},{"name":"","type":"uint256"}],"name":"train_data","outputs":[{"name":"","type":"int256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"get_test_data_length","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"constant":True,"inputs":[],"name":"submission_stage_block_size","outputs":[{"name":"","type":"uint256"}],"payable":False,"stateMutability":"view","type":"function"},{"inputs":[],"payable":False,"stateMutability":"nonpayable","type":"constructor"},{"payable":True,"stateMutability":"payable","type":"fallback"}]

contract_tx = "0x9A0991fc223dFFE420e08f15b88a593a3b8D44B8"

# Get contract instance
danku = web3.eth.contract(abi, contract_tx)

print("Downloading training data from the contract...\n")
# Get training data
contract_train_data_length = danku.call().get_train_data_length()
print("train_data_length:", contract_train_data_length)
contract_train_data = []
# 第i行数据
for i in range(contract_train_data_length):
    for j in range(3):
        # 两维特征一维标签
        contract_train_data.append(danku.call().train_data(i,j))
ds = Dataset()
# dps: data point size
ds.dps = 3
# 依据dps,unpacke data
contract_train_data = ds.unpack_data(contract_train_data)
print("Download finished!\n")
print("Contract training data:\n" + str(contract_train_data) + "\n")

# Visualize the training data
print("Visualizing training data...\n")
scatter_x = np.array(list(map(lambda x: x[1:2][0], contract_train_data)))
scatter_y = np.array(list(map(lambda x: x[:1][0], contract_train_data)))
group = np.array(list(map(lambda x: x[2:3][0], contract_train_data)))
cdict = {0: "blue", 1: "red"}

names = []
names.append("Democrat")
names.append("Republican")

fig, ax = plt.subplots()
for g in np.unique(group):
    ix = np.where(group == g)
    ax.scatter(scatter_x[ix], scatter_y[ix], c = cdict[g], label = names[g], s = 4)
ax.legend()
plt.title("Training data points")
plt.show()

得到实验数据:

[(44066415, -98675133, 1), (36418966, -84201074, 1), (46521481, -97416257, 1), (39311151, -102742290, 1), (35221092, -85209082, 1), (39988674, -86563204, 1), (41546441, -103710045, 1), (32444907, -88193305, 0), (35836335, -86033176, 1), (42836424, -99747046, 1), (31477095, -98630919, 1), (41062763, -84716982, 1), (38176762, -85521751, 0), (31770473, -81620830, 0), (35918960, -92127207, 1), (32482828, -104287257, 1), (34294469, -94095969, 1), (42879975, -124070945, 1), (41613034, -90606284, 0), (36623330, -93940707, 1), (39158846, -87723392, 1), (43907616, -96653301, 1), (38360691, -91477511, 1), (34023386, -98687748, 1), (37175980, -92820353, 1), (34665780, -83709564, 1), (35722628, -87212698, 1), (45708402, -101154581, 1), (40990039, -109704889, 1), (44425040, -88111252, 1), (43848835, -90468791, 1), (32510383, -86813834, 1), (41112122, -102421494, 1), (36965149, -86013864, 1), (39638988, -92465810, 1), (38314373, -89050259, 1), (46084584, -118906079, 1), (33537730, -81599891, 1), (40772874, -82532057, 1), (32681539, -85565954, 1), (40169839, -80595763, 1), (34908133, -94089381, 1), (37195924, -76524805, 0), (44387188, -68074964, 0), (38085837, -120176042, 1), (43088397, -73591652, 1), (40023673, -87475513, 1), (44318900, -70477506, 1), (30901690, -89820725, 1), (30111843, -94190147, 1), (32337632, -92672165, 1), (47145874, -101159363, 1), (42586029, -76801671, 1), (42042732, -97829855, 1), (39121515, -88157258, 1), (40905947, -76859662, 1), (41903068, -89540365, 1), (40712066, -85468798, 1), (28350458, -98568760, 1), (38327408, -90155869, 1), (32118710, -106648973, 0), (46768695, -98565136, 1), (46007415, -96359342, 1), (41040988, -86254272, 1), (36311201, -78922920, 1), (46419838, -120231713, 1), (46418659, -102562280, 1), (36384473, -82266921, 1), (33900651, -92131777, 1), (32028823, -82516246, 1), (40780751, -73977182, 0), (34633251, -87418659, 1), (40056400, -84025444, 1), (41048947, -75608580, 1), (35781848, -80203546, 1), (42459711, -96457068, 1), (28422970, -99754260, 0), (48509680, -110966631, 1), (35780292, -89026456, 1), (34306936, -92398709, 1), (44662822, -70153049, 0), (33611010, -83602459, 1), (38121657, -105147660, 1), (44472352, -83959844, 1), (31534162, -94639022, 1), (38192224, -108415514, 1), (37167752, -84116614, 1), (44343261, -84944639, 1), (35285746, -110288704, 1), (31212878, -83412143, 1), (45145011, -123523136, 1), (38864632, -80212410, 1), (41583475, -85337737, 1), (44341602, -89526303, 0), (39393753, -88348819, 1), (32989503, -83876108, 1), (32771459, -93406102, 1), (36369783, -103505541, 1), (42059677, -94040876, 1), (38390344, -120490831, 1), (37638307, -106597775, 1), (33616723, -100254782, 1), (39778816, -92075059, 1), (37444623, -86400828, 1), (35803869, -87259140, 1), (36864233, -87409139, 1), (35274844, -90798946, 1), (38742077, -92962870, 1), (40402532, -78255764, 1), (35786732, -87787992, 1), (44827274, -73513225, 0), (37761293, -106981852, 1), (38440898, -78874173, 0), (40155779, -92233840, 1), (30711984, -102676783, 1), (38620028, -89569004, 1), (30468691, -87703035, 1), (44025217, -91770781, 1), (41244701, -98182952, 1), (41672687, -93572173, 0), (48360516, -108893920, 1), (30219829, -98358613, 1), (36690661, -93344687, 1), (43492815, -73421536, 1), (34929800, -84988536, 1), (41175557, -86554326, 1), (40536073, -95321135, 1), (40495710, -81216271, 1), (44772503, -69247539, 0), (35057695, -77431386, 1), (32366002, -93101086, 1), (40579830, -75579884, 0), (39408888, -81207860, 1), (47761025, -118729143, 1), (46984699, -95099350, 1), (32490777, -92159354, 1), (39138003, -79481555, 1), (36033601, -93704255, 1), (48067730, -123080362, 1), (44946121, -94075720, 1), (42803973, -84295949, 1), (44601118, -107788860, 1), (34488104, -99158545, 1), (38377411, -121444429, 0), (36690445, -97557809, 1), (40338699, -80810216, 1), (45098685, -100879214, 0), (46564002, -95082796, 1), (42785695, -110186599, 1), (39161343, -80049182, 1), (48940067, -111849008, 1), (36937670, -82622624, 1), (36052510, -79107692, 0), (34558293, -101803862, 1), (40801867, -85791823, 1), (34524376, -102784569, 1), (29357005, -99762039, 1), (46300349, -123456145, 1), (37270438, -107093153, 1), (46074658, -85733841, 1), (41811929, -87687320, 0), (41912702, -94898698, 1), (35843919, -90180600, 1), (40478118, -94423294, 1), (33745660, -102320586, 1), (36978223, -90081609, 1), (29279536, -89373855, 1), (35096272, -94349565, 1), (33047275, -102428171, 1), (34947259, -95720708, 1), (43415126, -96146555, 1), (31864331, -101493750, 1), (33151992, -91559968, 1), (39433804, -82366742, 1), (37968468, -80669597, 1), (30141958, -84552965, 1), (37193593, -92078527, 1), (44271785, -112306237, 1), (45325565, -111868085, 1), (45297105, -121233727, 1), (40314012, -85666794, 1), (37236073, -86270740, 1), (38021292, -84745488, 1), (34732322, -79829665, 0), (39073820, -83385303, 1), (33563367, -96000216, 1), (34262252, -78625479, 1), (36347807, -89818059, 1), (41838005, -111917217, 1), (36496258, -95702010, 1), (36077612, -83299826, 1), (38286819, -85732062, 1), (32254152, -84489818, 1), (46112755, -103520928, 1), (36520167, -82857862, 1), (38177464, -107727360, 0), (36817672, -88304440, 1), (34862630, -84248612, 1), (42727153, -87675979, 1), (34387337, -81082923, 0), (32782264, -95443370, 1), (43908687, -107530662, 1), (38958134, -84984085, 1), (42788579, -103299462, 1), (48465282, -99645681, 1), (46221840, -89240577, 1), (39519386, -122292996, 1), (42296842, -71533229, 0), (39296522, -80774491, 1), (47857915, -122092025, 0), (44430096, -73026735, 0), (38797733, -91048471, 1), (41549414, -80967673, 1), (28427276, -96667313, 1), (36694745, -77535517, 0), (43653701, -90841723, 1), (40628112, -105569245, 0), (34749594, -98167941, 1), (41649999, -92065181, 1), (29823383, -94475364, 1), (36044098, -82003691, 1), (33431043, -93876485, 1), (45258882, -95141945, 1), (32301179, -94742539, 1), (38032735, -85366055, 1), (32571552, -95057746, 1), (32118831, -82724555, 1), (38988409, -107231438, 0), (36241614, -88860371, 1), (40176417, -101687688, 1), (31953707, -99605724, 1), (35423045, -83263368, 1), (43406406, -93748603, 1), (32907328, -85077535, 1), (32901627, -84212644, 1), (36455346, -76280026, 1), (39839405, -74945624, 0), (43428355, -93261042, 1), (43002000, -83692507, 0), (43130459, -94855389, 1), (41994014, -95778514, 1), (48270833, -112311973, 1), (41979407, -100557354, 1), (39981457, -95092325, 1), (40715072, -87730743, 1), (39236906, -87674977, 1), (41871766, -71558518, 0), (32411237, -95289903, 1), (39283031, -82397708, 1), (40104094, -77472794, 1), (36008302, -105023886, 0), (33077086, -99958978, 1), (31417011, -101542300, 1), (34966095, -102313090, 1), (40515895, -85295619, 1), (44369593, -97652194, 1), (30537773, -98333113, 1), (32383145, -82557135, 1), (36124661, -77825049, 1), (36442431, -78557098, 1), (43041606, -78057252, 1), (39142737, -122243739, 1), (31216132, -87461537, 1), (47151357, -110222384, 1), (37716443, -84299699, 1), (36661010, -90850785, 1), (37858464, -91426693, 1), (37848391, -107680381, 0), (33857142, -94374800, 1), (40222408, -77941975, 1), (36254905, -92688068, 1), (40180418, -78829021, 1), (45978288, -108194508, 1), (39466564, -91332846, 1), (32606213, -91759778, 1), (36632573, -94342802, 1), (40406448, -93584314, 1), (35520244, -84825054, 1), (36660281, -80913605, 1), (32306026, -101949963, 1), (31198914, -81332211, 1), (41610180, -71175183, 0), (38041293, -81064784, 1), (31569243, -103006747, 1), (35927901, -114972061, 0), (43203195, -91944055, 1), (39224095, -77878788, 1), (34981449, -84735233, 1), (30186506, -90880484, 1), (38169920, -91968002, 1), (35203213, -91606367, 1), (47722245, -121369352, 0), (35237428, -96248595, 1), (34670187, -91224126, 1), (33087138, -83224920, 0), (41204236, -83683700, 1), (39874071, -94153677, 1), (32743795, -101432354, 1), (35081997, -98087971, 1), (39440956, -84575746, 1), (48750618, -116540670, 1), (35785097, -88041426, 1), (43714955, -98644922, 1), (31760361, -84614914, 0), (40370660, -82481346, 1), (37866825, -122253582, 0), (41667733, -103102335, 1), (36379479, -79366733, 1), (46838747, -117644337, 0), (34577224, -91463319, 1), (34467520, -87647762, 1), (46897589, -119686316, 1), (39251510, -84914370, 1), (45989733, -93620235, 1), (38475369, -81880455, 1), (36603844, -89650005, 1), (40504545, -114412635, 1), (39544391, -81245804, 1), (48250130, -117353111, 1), (34079836, -91227608, 1), (44500341, -93382574, 1), (35044339, -106672872, 0), (45871169, -95237474, 1), (40742182, -89134822, 1), (38959176, -91924180, 1), (39116467, -82553489, 1), (42639932, -85004661, 1), (45801586, -122520347, 0), (32873466, -81875482, 1), (37434913, -88246148, 1), (39285204, -80385344, 1), (38961601, -82345709, 1), (47095793, -94021032, 1), (34603933, -96420055, 1), (34092391, -102829924, 1), (46821648, -95391968, 1), (41604786, -72871945, 0), (31920835, -91922802, 1), (31755315, -103155748, 1), (33611119, -84068288, 0), (32900767, -85822787, 1), (31808458, -87316372, 1), (36455368, -97187383, 1), (31026276, -96533031, 1), (46125267, -123905174, 0), (39376153, -90293302, 1), (32308289, -100404514, 1), (39987910, -84350107, 1), (43117255, -79018415, 1), (28997960, -82016139, 1), (40317430, -98904418, 1), (43026984, -73055156, 0), (39538568, -79190791, 1), (39313258, -86756423, 1), (36429663, -90692099, 1), (39241181, -85564278, 1), (36080707, -80024400, 0), (43674582, -96791340, 1), (33786594, -118298662, 0), (40042115, -116974803, 1), (35097862, -82988872, 1), (34723793, -96962983, 1), (40118361, -86964894, 1), (31961337, -91660042, 1), (39953262, -82211784, 1), (44898516, -92923010, 0), (31914948, -92669269, 1), (45173284, -122259861, 0), (30572825, -90439399, 1), (36982951, -94187929, 1), (44242705, -92425643, 1), (28421814, -98071902, 1), (45996345, -112088845, 1), (46475341, -117537763, 1), (42533987, -99702813, 1), (37274585, -119540624, 1), (37637433, -84976020, 1), (38697289, -87091762, 1), (35737757, -81616600, 1), (34591790, -83352534, 1), (34441802, -80603298, 1), (33610018, -101821292, 1), (46849592, -101972303, 1), (33698302, -81864218, 1), (45504288, -100311701, 1), (36445330, -91054035, 1), (34897753, -77774970, 1), (36337083, -102916844, 1), (40726466, -86989485, 1), (43691966, -101601609, 1), (40890132, -97597278, 1), (35151590, -94966276, 1), (34964812, -100270689, 1), (35709894, -98810990, 1), (35521229, -81418729, 1), (38866297, -84517548, 1), (31574779, -85536957, 1), (39632997, -86607678, 1), (41394526, -75824239, 0), (42869816, -85969722, 1)]

数据可视化:
这里写图片描述

训练模型

首先我是用了比赛方提供的包dutils进行神经网络的训练以及参数的输出。我用了5折交叉验证去训练模型,然后选择泛化误差最小的值。

import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from dutils.dataset import Dataset
from dutils.neural_network import NeuralNetwork
from matplotlib import pyplot as plt
import numpy as np
from numpy import array as narray

###
from sklearn.cross_validation import StratifiedKFold
from sklearn.model_selection import KFold
###

w_scale = 1000 # Scale up weights by 1000x
b_scale = 1000 # Scale up biases by 1000x

def scale_packed_data(data, scale):
    # Scale data and convert it to an integer
    return list(map(lambda x: int(x*scale), data))



all_data = [(44066415, -98675133, 1), (36418966, -84201074, 1), (46521481, -97416257, 1), (39311151, -102742290, 1), (35221092, -85209082, 1), (39988674, -86563204, 1), (41546441, -103710045, 1), (32444907, -88193305, 0), (35836335, -86033176, 1), (42836424, -99747046, 1), (31477095, -98630919, 1), (41062763, -84716982, 1), (38176762, -85521751, 0), (31770473, -81620830, 0), (35918960, -92127207, 1), (32482828, -104287257, 1), (34294469, -94095969, 1), (42879975, -124070945, 1), (41613034, -90606284, 0), (36623330, -93940707, 1), (39158846, -87723392, 1), (43907616, -96653301, 1), (38360691, -91477511, 1), (34023386, -98687748, 1), (37175980, -92820353, 1), (34665780, -83709564, 1), (35722628, -87212698, 1), (45708402, -101154581, 1), (40990039, -109704889, 1), (44425040, -88111252, 1), (43848835, -90468791, 1), (32510383, -86813834, 1), (41112122, -102421494, 1), (36965149, -86013864, 1), (39638988, -92465810, 1), (38314373, -89050259, 1), (46084584, -118906079, 1), (33537730, -81599891, 1), (40772874, -82532057, 1), (32681539, -85565954, 1), (40169839, -80595763, 1), (34908133, -94089381, 1), (37195924, -76524805, 0), (44387188, -68074964, 0), (38085837, -120176042, 1), (43088397, -73591652, 1), (40023673, -87475513, 1), (44318900, -70477506, 1), (30901690, -89820725, 1), (30111843, -94190147, 1), (32337632, -92672165, 1), (47145874, -101159363, 1), (42586029, -76801671, 1), (42042732, -97829855, 1), (39121515, -88157258, 1), (40905947, -76859662, 1), (41903068, -89540365, 1), (40712066, -85468798, 1), (28350458, -98568760, 1), (38327408, -90155869, 1), (32118710, -106648973, 0), (46768695, -98565136, 1), (46007415, -96359342, 1), (41040988, -86254272, 1), (36311201, -78922920, 1), (46419838, -120231713, 1), (46418659, -102562280, 1), (36384473, -82266921, 1), (33900651, -92131777, 1), (32028823, -82516246, 1), (40780751, -73977182, 0), (34633251, -87418659, 1), (40056400, -84025444, 1), (41048947, -75608580, 1), (35781848, -80203546, 1), (42459711, -96457068, 1), (28422970, -99754260, 0), (48509680, -110966631, 1), (35780292, -89026456, 1), (34306936, -92398709, 1), (44662822, -70153049, 0), (33611010, -83602459, 1), (38121657, -105147660, 1), (44472352, -83959844, 1), (31534162, -94639022, 1), (38192224, -108415514, 1), (37167752, -84116614, 1), (44343261, -84944639, 1), (35285746, -110288704, 1), (31212878, -83412143, 1), (45145011, -123523136, 1), (38864632, -80212410, 1), (41583475, -85337737, 1), (44341602, -89526303, 0), (39393753, -88348819, 1), (32989503, -83876108, 1), (32771459, -93406102, 1), (36369783, -103505541, 1), (42059677, -94040876, 1), (38390344, -120490831, 1), (37638307, -106597775, 1), (33616723, -100254782, 1), (39778816, -92075059, 1), (37444623, -86400828, 1), (35803869, -87259140, 1), (36864233, -87409139, 1), (35274844, -90798946, 1), (38742077, -92962870, 1), (40402532, -78255764, 1), (35786732, -87787992, 1), (44827274, -73513225, 0), (37761293, -106981852, 1), (38440898, -78874173, 0), (40155779, -92233840, 1), (30711984, -102676783, 1), (38620028, -89569004, 1), (30468691, -87703035, 1), (44025217, -91770781, 1), (41244701, -98182952, 1), (41672687, -93572173, 0), (48360516, -108893920, 1), (30219829, -98358613, 1), (36690661, -93344687, 1), (43492815, -73421536, 1), (34929800, -84988536, 1), (41175557, -86554326, 1), (40536073, -95321135, 1), (40495710, -81216271, 1), (44772503, -69247539, 0), (35057695, -77431386, 1), (32366002, -93101086, 1), (40579830, -75579884, 0), (39408888, -81207860, 1), (47761025, -118729143, 1), (46984699, -95099350, 1), (32490777, -92159354, 1), (39138003, -79481555, 1), (36033601, -93704255, 1), (48067730, -123080362, 1), (44946121, -94075720, 1), (42803973, -84295949, 1), (44601118, -107788860, 1), (34488104, -99158545, 1), (38377411, -121444429, 0), (36690445, -97557809, 1), (40338699, -80810216, 1), (45098685, -100879214, 0), (46564002, -95082796, 1), (42785695, -110186599, 1), (39161343, -80049182, 1), (48940067, -111849008, 1), (36937670, -82622624, 1), (36052510, -79107692, 0), (34558293, -101803862, 1), (40801867, -85791823, 1), (34524376, -102784569, 1), (29357005, -99762039, 1), (46300349, -123456145, 1), (37270438, -107093153, 1), (46074658, -85733841, 1), (41811929, -87687320, 0), (41912702, -94898698, 1), (35843919, -90180600, 1), (40478118, -94423294, 1), (33745660, -102320586, 1), (36978223, -90081609, 1), (29279536, -89373855, 1), (35096272, -94349565, 1), (33047275, -102428171, 1), (34947259, -95720708, 1), (43415126, -96146555, 1), (31864331, -101493750, 1), (33151992, -91559968, 1), (39433804, -82366742, 1), (37968468, -80669597, 1), (30141958, -84552965, 1), (37193593, -92078527, 1), (44271785, -112306237, 1), (45325565, -111868085, 1), (45297105, -121233727, 1), (40314012, -85666794, 1), (37236073, -86270740, 1), (38021292, -84745488, 1), (34732322, -79829665, 0), (39073820, -83385303, 1), (33563367, -96000216, 1), (34262252, -78625479, 1), (36347807, -89818059, 1), (41838005, -111917217, 1), (36496258, -95702010, 1), (36077612, -83299826, 1), (38286819, -85732062, 1), (32254152, -84489818, 1), (46112755, -103520928, 1), (36520167, -82857862, 1), (38177464, -107727360, 0), (36817672, -88304440, 1), (34862630, -84248612, 1), (42727153, -87675979, 1), (34387337, -81082923, 0), (32782264, -95443370, 1), (43908687, -107530662, 1), (38958134, -84984085, 1), (42788579, -103299462, 1), (48465282, -99645681, 1), (46221840, -89240577, 1), (39519386, -122292996, 1), (42296842, -71533229, 0), (39296522, -80774491, 1), (47857915, -122092025, 0), (44430096, -73026735, 0), (38797733, -91048471, 1), (41549414, -80967673, 1), (28427276, -96667313, 1), (36694745, -77535517, 0), (43653701, -90841723, 1), (40628112, -105569245, 0), (34749594, -98167941, 1), (41649999, -92065181, 1), (29823383, -94475364, 1), (36044098, -82003691, 1), (33431043, -93876485, 1), (45258882, -95141945, 1), (32301179, -94742539, 1), (38032735, -85366055, 1), (32571552, -95057746, 1), (32118831, -82724555, 1), (38988409, -107231438, 0), (36241614, -88860371, 1), (40176417, -101687688, 1), (31953707, -99605724, 1), (35423045, -83263368, 1), (43406406, -93748603, 1), (32907328, -85077535, 1), (32901627, -84212644, 1), (36455346, -76280026, 1), (39839405, -74945624, 0), (43428355, -93261042, 1), (43002000, -83692507, 0), (43130459, -94855389, 1), (41994014, -95778514, 1), (48270833, -112311973, 1), (41979407, -100557354, 1), (39981457, -95092325, 1), (40715072, -87730743, 1), (39236906, -87674977, 1), (41871766, -71558518, 0), (32411237, -95289903, 1), (39283031, -82397708, 1), (40104094, -77472794, 1), (36008302, -105023886, 0), (33077086, -99958978, 1), (31417011, -101542300, 1), (34966095, -102313090, 1), (40515895, -85295619, 1), (44369593, -97652194, 1), (30537773, -98333113, 1), (32383145, -82557135, 1), (36124661, -77825049, 1), (36442431, -78557098, 1), (43041606, -78057252, 1), (39142737, -122243739, 1), (31216132, -87461537, 1), (47151357, -110222384, 1), (37716443, -84299699, 1), (36661010, -90850785, 1), (37858464, -91426693, 1), (37848391, -107680381, 0), (33857142, -94374800, 1), (40222408, -77941975, 1), (36254905, -92688068, 1), (40180418, -78829021, 1), (45978288, -108194508, 1), (39466564, -91332846, 1), (32606213, -91759778, 1), (36632573, -94342802, 1), (40406448, -93584314, 1), (35520244, -84825054, 1), (36660281, -80913605, 1), (32306026, -101949963, 1), (31198914, -81332211, 1), (41610180, -71175183, 0), (38041293, -81064784, 1), (31569243, -103006747, 1), (35927901, -114972061, 0), (43203195, -91944055, 1), (39224095, -77878788, 1), (34981449, -84735233, 1), (30186506, -90880484, 1), (38169920, -91968002, 1), (35203213, -91606367, 1), (47722245, -121369352, 0), (35237428, -96248595, 1), (34670187, -91224126, 1), (33087138, -83224920, 0), (41204236, -83683700, 1), (39874071, -94153677, 1), (32743795, -101432354, 1), (35081997, -98087971, 1), (39440956, -84575746, 1), (48750618, -116540670, 1), (35785097, -88041426, 1), (43714955, -98644922, 1), (31760361, -84614914, 0), (40370660, -82481346, 1), (37866825, -122253582, 0), (41667733, -103102335, 1), (36379479, -79366733, 1), (46838747, -117644337, 0), (34577224, -91463319, 1), (34467520, -87647762, 1), (46897589, -119686316, 1), (39251510, -84914370, 1), (45989733, -93620235, 1), (38475369, -81880455, 1), (36603844, -89650005, 1), (40504545, -114412635, 1), (39544391, -81245804, 1), (48250130, -117353111, 1), (34079836, -91227608, 1), (44500341, -93382574, 1), (35044339, -106672872, 0), (45871169, -95237474, 1), (40742182, -89134822, 1), (38959176, -91924180, 1), (39116467, -82553489, 1), (42639932, -85004661, 1), (45801586, -122520347, 0), (32873466, -81875482, 1), (37434913, -88246148, 1), (39285204, -80385344, 1), (38961601, -82345709, 1), (47095793, -94021032, 1), (34603933, -96420055, 1), (34092391, -102829924, 1), (46821648, -95391968, 1), (41604786, -72871945, 0), (31920835, -91922802, 1), (31755315, -103155748, 1), (33611119, -84068288, 0), (32900767, -85822787, 1), (31808458, -87316372, 1), (36455368, -97187383, 1), (31026276, -96533031, 1), (46125267, -123905174, 0), (39376153, -90293302, 1), (32308289, -100404514, 1), (39987910, -84350107, 1), (43117255, -79018415, 1), (28997960, -82016139, 1), (40317430, -98904418, 1), (43026984, -73055156, 0), (39538568, -79190791, 1), (39313258, -86756423, 1), (36429663, -90692099, 1), (39241181, -85564278, 1), (36080707, -80024400, 0), (43674582, -96791340, 1), (33786594, -118298662, 0), (40042115, -116974803, 1), (35097862, -82988872, 1), (34723793, -96962983, 1), (40118361, -86964894, 1), (31961337, -91660042, 1), (39953262, -82211784, 1), (44898516, -92923010, 0), (31914948, -92669269, 1), (45173284, -122259861, 0), (30572825, -90439399, 1), (36982951, -94187929, 1), (44242705, -92425643, 1), (28421814, -98071902, 1), (45996345, -112088845, 1), (46475341, -117537763, 1), (42533987, -99702813, 1), (37274585, -119540624, 1), (37637433, -84976020, 1), (38697289, -87091762, 1), (35737757, -81616600, 1), (34591790, -83352534, 1), (34441802, -80603298, 1), (33610018, -101821292, 1), (46849592, -101972303, 1), (33698302, -81864218, 1), (45504288, -100311701, 1), (36445330, -91054035, 1), (34897753, -77774970, 1), (36337083, -102916844, 1), (40726466, -86989485, 1), (43691966, -101601609, 1), (40890132, -97597278, 1), (35151590, -94966276, 1), (34964812, -100270689, 1), (35709894, -98810990, 1), (35521229, -81418729, 1), (38866297, -84517548, 1), (31574779, -85536957, 1), (39632997, -86607678, 1), (41394526, -75824239, 0), (42869816, -85969722, 1)]

# contract_train_data = all_data[:320]
# contract_train_data_length = len(contract_train_data)
# print("Contract train data length:", contract_train_data_length)
# contract_test_data = all_data[-80:]
# contract_test_data_length = len(contract_test_data)
# print("Contract test data length:", contract_test_data_length)

# print("Download finished!\n")


pos_data = []
neg_data = []

for data in all_data:
    if data[2] == 1:
        pos_data.append(data)
    elif data[2] == 0:
        neg_data.append(data)
    else:
        print("Warning:", data)


more_neg_data = neg_data * 6

all_data = pos_data+ more_neg_data

# all_data == 25 * n
all_data = all_data[:650]



print("data num:", len(all_data))
# Train a neural network with the data
il_nn = 2 # 2 input neurons
hl_nn = [60, 60] # 2 hidden layers with 2 and 5 neurons respectively
ol_nn = 2 # 2 output neurons for binary classification

###
scores = 0
n = 5
kf = KFold(n_splits=n, shuffle=True, random_state=2018)
print("Now we art training %d fold cross validation..\n" % n)
for tr_ind, te_ind in kf.split(all_data):
    contract_train_data = np.array(all_data)[tr_ind]
    contract_train_data_length = len(contract_train_data)
    print("Contract train data length:", contract_train_data_length)
    contract_test_data = np.array(all_data)[te_ind]
    #contract_test_data = np.array(all_data)[tr_ind]
    contract_test_data_length = len(contract_test_data)
    print("Contract test data length:", contract_test_data_length)

    # Train a neural network with contract data
    print("Training a neural network with the following:\n\
        configuration: " +str(il_nn) + " x " + str(hl_nn) + " x " + str(ol_nn) + "\n\
        total iteration: 100000\n\
        learning rate: 0.001")
    nn = NeuralNetwork(il_nn, hl_nn, ol_nn, 0.002, 100000, 52, 10000)
    nn.load_train_data(nn.binary_2_one_hot(contract_train_data))
    nn.load_test_data(nn.binary_2_one_hot(contract_test_data))
    nn.init_network()
    nn.train()
    print("Neural network trained!")
    score = nn.test()
    scores += float(score)
    print("This fold score:%s\n" % str(score))
print("Final score:%f" % (scores/n))

答案提交

方案一:MyEtherWallet

  1. 填入合约地址以及ABI
    这里写图片描述
  2. 选择函数,并提交相应的参数。注意,若数组[1, 2, 3]为参数,则不应该添入“[1, 2, 3]”,而应该添入”1, 2, 3”。
    这里写图片描述

使用Remix以及Metamask

  1. 使用Remix编译并本地运行智能合约代码,代码在此
  2. 调用合约函数
    这里写图片描述
  3. 得到交易的二进制形式(即下图的input)
    这里写图片描述
  4. 利用metamask把input发给目标合约

合约分析

简述一下流程

  1. 比赛发起者提交数据哈希
  2. 运行合约,随机挑出数据哈希作为训练集
  3. 比赛发起者提交相应的训练集数据
  4. 参赛者下载数据并训练
  5. 参赛提交模型
  6. 比赛发起者公布测试数据
  7. 根据准确率高低发送奖励

一些坑

  1. 神经网络的参数一多,交易就很大,从而难以发送给以太坊网络中
  2. Danku的神经网络是很奇怪的,每一个神经元有一个weight,而每一层神经元有一个bias
  3. Danku的神经网络没有softmax(没有映射到01中),因为放大倍数是10000,所以只能映射到0-10000以及10000-20000之间,神经网络库根本不管用。
  4. 我感觉它给的神经网络库还写错了
  5. 准确率的计算用的是很神奇的算法,不是很合理
  6. 用的RELU函数也奇怪
  7. 给的样例的放大倍数是1000,而合约上是10000
  8. 合约没有检查bias数组形式的合理性

解决的方法

我查看数据,发现了数据是极度的样本不均匀,所以自行构造了一个简单的神经网络,只输出一个label的。

合约

pragma solidity ^0.4.19;
// Danku contract version 0.0.1
// Data points are x, y, and z

contract Danku_demo {
  function Danku_demo() public {
    // Neural Network Structure:
    //
    // (assertd) input layer x number of neurons
    // (optional) hidden layers x number of neurons
    // (assertd) output layer x number of neurons
  }
  struct Submission {
      address payment_address;
      // Define the number of neurons each layer has.
      uint num_neurons_input_layer;
      uint num_neurons_output_layer;
      // There can be multiple hidden layers.
      uint[] num_neurons_hidden_layer;
      // Weights indexes are the following:
      // weights[l_i x l_n_i x pl_n_i]
      // Also number of layers in weights is layers.length-1
      int256[] weights;
      int256[] biases;
  }
  struct NeuralLayer {
    int256[] neurons;
    int256[] errors;
    string layer_type;
  }

  address public organizer;
  // Keep track of the best model
  uint public best_submission_index;
  // Keep track of best model accuracy
  int256 public best_submission_accuracy = 0;
  // The model accuracy criteria
  int256 public model_accuracy_criteria;
  // Use test data if provided
  bool public use_test_data = false;
  // Each partition is 5% of the total dataset size
  uint constant partition_size = 25;
  // Data points are made up of x and y coordinates and the prediction
  uint constant datapoint_size = 3;
  uint constant prediction_size = 1;
  // Max number of data groups
  // Change this to your data group size
  uint16 constant max_num_data_groups = 500;
  // Training partition size
  uint16 constant training_data_group_size = 400;
  // Testing partition size
  uint16 constant testing_data_group_size = max_num_data_groups - training_data_group_size;
  // Dataset is divided into data groups.
  // Every data group includes a nonce.
  // Look at sha_data_group() for more detail about hashing a data group
  bytes32[max_num_data_groups/partition_size] hashed_data_groups;
  // Nonces are revelead together with data groups
  uint[max_num_data_groups/partition_size] data_group_nonces;
  // + 1 for prediction
  // A data group has 3 data points in total
  int256[datapoint_size][] public train_data;
  int256[datapoint_size][] public test_data;
  bytes32 partition_seed;
  // Deadline for submitting solutions in terms of block size
  uint public submission_stage_block_size = 241920; // 6 weeks timeframe
  // Deadline for revealing the testing dataset
  uint public reveal_test_data_groups_block_size = 17280; // 3 days timeframe
  // Deadline for evaluating the submissions
  uint public evaluation_stage_block_size = 40320; // 7 days timeframe
  uint public init1_block_height;
  uint public init3_block_height;
  uint public init_level = 0;
  // Training partition size is 14 (70%)
  // Testing partition size is 6 (30%)
  uint[training_data_group_size/partition_size] public training_partition;
  uint[testing_data_group_size/partition_size] public testing_partition;
  uint256 train_dg_revealed = 0;
  uint256 test_dg_revealed = 0;
  Submission[] submission_queue;
  bool public contract_terminated = false;
  // Integer precision for calculating float values for weights and biases
  int constant int_precision = 10000;

  // Takes in array of hashed data points of the entire dataset,
  // submission and evaluation times
  function init1(bytes32[max_num_data_groups/partition_size] _hashed_data_groups, int accuracy_criteria, address organizer_refund_address) external {
    // Make sure contract is not terminated
    assert(contract_terminated == false);
    // Make sure it's called in order
    assert(init_level == 0);
    organizer = organizer_refund_address;
    init_level = 1;
    init1_block_height = block.number;

    // Make sure there are in total 20 hashed data groups
    assert(_hashed_data_groups.length == max_num_data_groups/partition_size);
    hashed_data_groups = _hashed_data_groups;
    // Accuracy criteria example: 85.9% => 8,590
    // 100 % => 10,000
    assert(accuracy_criteria > 0);
    model_accuracy_criteria = accuracy_criteria;
  }

  function init2() external {
    // Make sure contract is not terminated
    assert(contract_terminated == false);
    // Only allow calling it once, in order
    assert(init_level == 1);
    // Make sure it's being called within 20 blocks on init1()
    // to minimize organizer influence on random index selection
    if (block.number <= init1_block_height+20 && block.number > init1_block_height) {
      // TODO: Also make sure it's being called 1 block after init1()
      // Randomly select indexes
      uint[] memory index_array = new uint[](max_num_data_groups/partition_size);
      for (uint i = 0; i < max_num_data_groups/partition_size; i++) {
        index_array[i] = i;
      }
      randomly_select_index(index_array);
      init_level = 2;
    } else {
      // Cancel the contract if init2() hasn't been called within 5
      // blocks of init1()
      cancel_contract();
    }
  }

  function init3(int256[] _train_data_groups, int256 _train_data_group_nonces) external {
    // Pass a single data group at a time
    // Make sure contract is not terminated
    assert(contract_terminated == false);
    // Only allow calling once, in order
    assert(init_level == 2);
    // Verify data group and nonce lengths
    assert((_train_data_groups.length/partition_size)/datapoint_size == 1);
    // Verify data group hashes
    // Order of revealed training data group must be the same with training partitions
    // Otherwise hash verification will fail
    assert(sha_data_group(_train_data_groups, _train_data_group_nonces) ==
      hashed_data_groups[training_partition[train_dg_revealed]]);
    train_dg_revealed += 1;
    // Assign training data after verifying the corresponding hash
    unpack_data_groups(_train_data_groups, true);
    if (train_dg_revealed == (training_data_group_size/partition_size)) {
      init_level = 3;
      init3_block_height = block.number;
    }
  }

  function get_training_index() public view returns(uint[training_data_group_size/partition_size]) {
    return training_partition;
  }

  function get_testing_index() public view returns(uint[testing_data_group_size/partition_size]) {
    return testing_partition;
  }

  function get_submission_queue_length() public view returns(uint) {
    return submission_queue.length;
  }

  function submit_model(
    // Public function for users to submit a solution
    address payment_address,
    uint num_neurons_input_layer,
    uint num_neurons_output_layer,
    uint[] num_neurons_hidden_layer,
    int[] weights,
    int256[] biases) public {
      // Make sure contract is not terminated
      assert(contract_terminated == false);
      // Make sure it's not the initialization stage anymore
      assert(init_level == 3);
      // Make sure it's still within the submission stage
      assert(block.number < init3_block_height + submission_stage_block_size);
      // Make sure that num of neurons in the input & output layer matches
      // the problem description
      assert(num_neurons_input_layer == datapoint_size - prediction_size);
      // Because we can encode binary output in two different ways, we check
      // for both of them
      assert(num_neurons_output_layer == prediction_size || num_neurons_output_layer == (prediction_size+1));
      // Make sure that the number of weights match network structure
      assert(valid_weights(weights, num_neurons_input_layer, num_neurons_output_layer, num_neurons_hidden_layer));
      // Add solution to submission queue
      submission_queue.push(Submission(
        payment_address,
        num_neurons_input_layer,
        num_neurons_output_layer,
        num_neurons_hidden_layer,
        weights,
        biases));
  }

  function get_submission_id(
    // Public function that returns the submission index ID
    address paymentAddress,
    uint num_neurons_input_layer,
    uint num_neurons_output_layer,
    uint[] num_neurons_hidden_layer,
    int[] weights,
    int256[] biases) public view returns (uint) {
      // Iterate over submission queue to get submission index ID
      for (uint i = 0; i < submission_queue.length; i++) {
        if (submission_queue[i].payment_address != paymentAddress) {
          continue;
        }
        if (submission_queue[i].num_neurons_input_layer != num_neurons_input_layer) {
          continue;
        }
        if (submission_queue[i].num_neurons_output_layer != num_neurons_output_layer) {
          continue;
        }
        for (uint j = 0; j < num_neurons_hidden_layer.length; j++) {
            if (submission_queue[i].num_neurons_hidden_layer[j] != num_neurons_hidden_layer[j]) {
              continue;
            }
        }
        for (uint k = 0; k < weights.length; k++) {
            if (submission_queue[i].weights[k] != weights[k]) {
              continue;
            }
        }
        for (uint l = 0; l < biases.length; l++) {
          if (submission_queue[i].biases[l] != biases[l]) {
            continue;
          }
        }
        // If everything matches, return the submission index
        return i;
      }
      // If submission is not in the queue, just throw an exception
      require(false);
  }

    function reveal_test_data(int256[] _test_data_groups, int256 _test_data_group_nonces) external {
    // Make sure contract is not terminated
    assert(contract_terminated == false);
    // Make sure it's not the initialization stage anymore
    assert(init_level == 3);
    // Make sure it's revealed after the submission stage
    assert(block.number >= init3_block_height + submission_stage_block_size);
    // Make sure it's revealed within the reveal stage
    assert(block.number < init3_block_height + submission_stage_block_size + reveal_test_data_groups_block_size);
    // Verify data group and nonce lengths
    assert((_test_data_groups.length/partition_size)/datapoint_size == 1);
    // Verify data group hashes
    assert(sha_data_group(_test_data_groups, _test_data_group_nonces) ==
      hashed_data_groups[testing_partition[test_dg_revealed]]);
    test_dg_revealed += 1;
    // Assign testing data after verifying the corresponding hash
    unpack_data_groups(_test_data_groups, false);
    // Use test data for evaluation
    use_test_data = true;
  }

  function evaluate_model(uint submission_index) public {
    // TODO: Make sure that if there's two same submission w/ same weights
    // and biases, the first one submitted should get the reward.
    // Make sure contract is not terminated
    assert(contract_terminated == false);
    // Make sure it's not the initialization stage anymore
    assert(init_level == 3);
    // Make sure it's evaluated after the reveal stage
    assert(block.number >= init3_block_height + submission_stage_block_size + reveal_test_data_groups_block_size);
    // Make sure it's evaluated within the evaluation stage
    assert(block.number < init3_block_height + submission_stage_block_size + reveal_test_data_groups_block_size + evaluation_stage_block_size);
    // Evaluates a submitted model & keeps track of the best model
    int256 submission_accuracy = 0;
    if (use_test_data == true) {
      submission_accuracy = model_accuracy(submission_index, test_data);
    } else {
      submission_accuracy = model_accuracy(submission_index, train_data);
    }

    // Keep track of the most accurate model
    if (submission_accuracy > best_submission_accuracy) {
      best_submission_index = submission_index;
      best_submission_accuracy = submission_accuracy;
    }
  }

  function cancel_contract() public {
    // Make sure contract is not already terminated
    assert(contract_terminated == false);
    // Contract can only be cancelled if initialization has failed.
    assert(init_level < 3);
    // Refund remaining balance to organizer
    organizer.transfer(this.balance);
    // Terminate contract
    contract_terminated = true;
  }

  function finalize_contract() public {
    // Make sure contract is not terminated
    assert(contract_terminated == false);
    // Make sure it's not the initialization stage anymore
    assert(init_level == 3);
    // Make sure the contract is finalized after the evaluation stage
    assert(block.number >= init3_block_height + submission_stage_block_size + reveal_test_data_groups_block_size + evaluation_stage_block_size);
    // Get the best submission to compare it against the criteria
    Submission memory best_submission = submission_queue[best_submission_index];
    // If best submission passes criteria, payout to the submitter
    if (best_submission_accuracy >= model_accuracy_criteria) {
      best_submission.payment_address.transfer(this.balance);
    // If the best submission fails the criteria, refund the balance back to the organizer
    } else {
      organizer.transfer(this.balance);
    }
    contract_terminated = true;
  }

  function model_accuracy(uint submission_index, int256[datapoint_size][] data) public constant returns (int256){
    // Make sure contract is not terminated
    assert(contract_terminated == false);
    // Make sure it's not the initialization stage anymore
    assert(init_level == 3);
    // Leave function public for offline error calculation
    // Get's the sum error for the model
    Submission memory sub = submission_queue[submission_index];
    int256 true_prediction = 0;
    int256 false_prediction = 0;
    bool one_hot; // one-hot encoding if prediction size is 1 but model output size is 2
    int[] memory prediction;
    int[] memory ground_truth;
    if ((prediction_size + 1) == sub.num_neurons_output_layer) {
      one_hot = true;
      prediction = new int[](sub.num_neurons_output_layer);
      ground_truth = new int[](sub.num_neurons_output_layer);
    } else {
      one_hot = false;
      prediction = new int[](prediction_size);
      ground_truth = new int[](prediction_size);
    }
    for (uint i = 0; i < data.length; i++) {
      // Get ground truth
      for (uint j = datapoint_size-prediction_size; j < data[i].length; j++) {
        uint d_index = j - datapoint_size + prediction_size;
        // Only get prediction values
        if (one_hot == true) {
          if (data[i][j] == 0) {
            ground_truth[d_index] = 1;
            ground_truth[d_index + 1] = 0;
          } else if (data[i][j] == 1) {
            ground_truth[d_index] = 0;
            ground_truth[d_index + 1] = 1;
          } else {
            // One-hot encoding for more than 2 classes is not supported
            require(false);
          }
        } else {
          ground_truth[d_index] = data[i][j];
        }
      }
      // Get prediction
      prediction = get_prediction(sub, data[i]);
      // Get error for the output layer
      for (uint k = 0; k < ground_truth.length; k++) {
        if (ground_truth[k] == prediction[k]) {
          true_prediction += 1;
        } else {
          false_prediction += 1;
        }
      }
    }
    // We multipl by int_precision to get up to x decimal point precision while
    // calculating the accuracy
    return (true_prediction * int_precision) / (true_prediction + false_prediction);
  }

  function get_train_data_length() public view returns(uint256) {
    return train_data.length;
  }

  function get_test_data_length() public view returns(uint256) {
    return test_data.length;
  }

  function round_up_division(int256 dividend, int256 divisor) private pure returns(int256) {
    // A special trick since solidity normall rounds it down
    return (dividend + divisor -1) / divisor;
  }

  function not_in_train_partition(uint[training_data_group_size/partition_size] partition, uint number) private pure returns (bool) {
    for (uint i = 0; i < partition.length; i++) {
      if (number == partition[i]) {
        return false;
      }
    }
    return true;
  }

  function randomly_select_index(uint[] array) private {
    uint t_index = 0;
    uint array_length = array.length;
    uint block_i = 0;
    // Randomly select training indexes
    while(t_index < training_partition.length) {
      uint random_index = uint(sha256(block.blockhash(block.number-block_i))) % array_length;
      training_partition[t_index] = array[random_index];
      array[random_index] = array[array_length-1];
      array_length--;
      block_i++;
      t_index++;
    }
    t_index = 0;
    while(t_index < testing_partition.length) {
      testing_partition[t_index] = array[array_length-1];
      array_length--;
      t_index++;
    }
  }

  function valid_weights(int[] weights, uint num_neurons_input_layer, uint num_neurons_output_layer, uint[] num_neurons_hidden_layer) private pure returns (bool) {
    // make sure the number of weights match the network structure
    // get number of weights based on network structure
    uint ns_total = 0;
    uint wa_total = 0;
    uint number_of_layers = 2 + num_neurons_hidden_layer.length;

    if (number_of_layers == 2) {
      ns_total = num_neurons_input_layer * num_neurons_output_layer;
    } else {
      for(uint i = 0; i < num_neurons_hidden_layer.length; i++) {
        // Get weights between first hidden layer and input layer
        if (i==0){
          ns_total += num_neurons_input_layer * num_neurons_hidden_layer[i];
        // Get weights between hidden layers
        } else {
          ns_total += num_neurons_hidden_layer[i-1] * num_neurons_hidden_layer[i];
        }
      }
      // Get weights between last hidden layer and output layer
      ns_total += num_neurons_hidden_layer[num_neurons_hidden_layer.length-1] * num_neurons_output_layer;
    }
    // get number of weights in the weights array
    wa_total = weights.length;

    return ns_total == wa_total;
  }

    function unpack_data_groups(int256[] _data_groups, bool is_train_data) private {
    int256[datapoint_size][] memory merged_data_group = new int256[datapoint_size][](_data_groups.length/datapoint_size);

    for (uint i = 0; i < _data_groups.length/datapoint_size; i++) {
      for (uint j = 0; j < datapoint_size; j++) {
        merged_data_group[i][j] = _data_groups[i*datapoint_size + j];
      }
    }
    if (is_train_data == true) {
      // Assign training data
      for (uint k = 0; k < merged_data_group.length; k++) {
        train_data.push(merged_data_group[k]);
      }
    } else {
      // Assign testing data
      for (uint l = 0; l < merged_data_group.length; l++) {
        test_data.push(merged_data_group[l]);
      }
    }
  }

    function sha_data_group(int256[] data_group, int256 data_group_nonce) private pure returns (bytes32) {
      // Extract the relevant data points for the given data group index
      // We concat all data groups and add the nounce to the end of the array
      // and get the sha256 for the array
      uint index_tracker = 0;
      uint256 total_size = datapoint_size * partition_size;
      /* uint256 start_index = data_group_index * total_size;
      uint256 iter_limit = start_index + total_size; */
      int256[] memory all_data_points = new int256[](total_size+1);

      for (uint256 i = 0; i < total_size; i++) {
        all_data_points[index_tracker] = data_group[i];
        index_tracker += 1;
      }
      // Add nonce to the whole array
      all_data_points[index_tracker] = data_group_nonce;
      // Return sha256 on all data points + nonce
      return sha256(all_data_points);
    }

  function relu_activation(int256 x) private pure returns (int256) {
    if (x < 0) {
      return 0;
    } else {
      return x;
    }
  }

  function get_layer(uint nn) private pure returns (int256[]) {
    int256[] memory input_layer = new int256[](nn);
    return input_layer;
  }

  function get_hidden_layers(uint[] l_nn) private pure returns (int256[]) {
    uint total_nn = 0;
    // Skip first and last layer since they're not hidden layers
    for (uint i = 1; i < l_nn.length-1; i++) {
      total_nn += l_nn[i];
    }
    int256[] memory hidden_layers = new int256[](total_nn);
    return hidden_layers;
  }

  function access_hidden_layer(int256[] hls, uint[] l_nn, uint index) private pure returns (int256[]) {
    // TODO: Bug is here, doesn't work for between last hidden and output layer
    // Returns the hidden layer from the hidden layers array
    int256[] memory hidden_layer = new int256[](l_nn[index+1]);
    uint hidden_layer_index = 0;
    uint start = 0;
    uint end = 0;
    for (uint i = 0; i < index; i++) {
      start += l_nn[i+1];
    }
    for (uint j = 0; j < (index + 1); j++) {
      end += l_nn[j+1];
    }
    for (uint h_i = start; h_i < end; h_i++) {
      hidden_layer[hidden_layer_index] = hls[h_i];
      hidden_layer_index += 1;
    }
    return hidden_layer;
  }

  function get_prediction(Submission sub, int[datapoint_size] data_point) private pure returns(int256[]) {
    uint[] memory l_nn = new uint[](sub.num_neurons_hidden_layer.length + 2);
    l_nn[0] = sub.num_neurons_input_layer;
    for (uint i = 0; i < sub.num_neurons_hidden_layer.length; i++) {
      l_nn[i+1] = sub.num_neurons_hidden_layer[i];
    }
    l_nn[sub.num_neurons_hidden_layer.length+1] = sub.num_neurons_output_layer;
    return forward_pass(data_point, sub.weights, sub.biases, l_nn);
  }

  function forward_pass(int[datapoint_size] data_point, int256[] weights, int256[] biases, uint[] l_nn) private pure returns (int256[]) {
    // Initialize neuron arrays
    int256[] memory input_layer = get_layer(l_nn[0]);
    int256[] memory hidden_layers = get_hidden_layers(l_nn);
    int256[] memory output_layer = get_layer(l_nn[l_nn.length-1]);

    // load inputs from input layer
    for (uint input_i = 0; input_i < l_nn[0]; input_i++) {
      input_layer[input_i] = data_point[input_i];
    }
    return forward_pass2(l_nn, input_layer, hidden_layers, output_layer, weights, biases);
  }

  function forward_pass2(uint[] l_nn, int256[] input_layer, int256[] hidden_layers, int256[] output_layer, int256[] weights, int256[] biases) public pure returns (int256[]) {
    // index_counter[0] is weight index
    // index_counter[1] is hidden_layer_index
    uint[] memory index_counter = new uint[](2);
    for (uint layer_i = 0; layer_i < (l_nn.length-1); layer_i++) {
      int256[] memory current_layer;
      int256[] memory prev_layer;
      // If between input and first hidden layer
      if (hidden_layers.length != 0) {
        if (layer_i == 0) {
          current_layer = access_hidden_layer(hidden_layers, l_nn, layer_i);
          prev_layer = input_layer;
        // If between output and last hidden layer
        } else if (layer_i == (l_nn.length-2)) {
          current_layer = output_layer;
          prev_layer = access_hidden_layer(hidden_layers, l_nn, (layer_i-1));
        // If between hidden layers
        } else {
          current_layer = access_hidden_layer(hidden_layers, l_nn, layer_i);
          prev_layer = access_hidden_layer(hidden_layers, l_nn, layer_i-1);
        }
      } else {
        current_layer = output_layer;
        prev_layer = input_layer;
      }
      for (uint layer_neuron_i = 0; layer_neuron_i < current_layer.length; layer_neuron_i++) {
        int total = 0;
        for (uint prev_layer_neuron_i = 0; prev_layer_neuron_i < prev_layer.length; prev_layer_neuron_i++) {
          total += prev_layer[prev_layer_neuron_i] * weights[index_counter[0]];
          index_counter[0]++;
        }
        total += biases[layer_i];
        total = total / int_precision; // Divide by int_precision to scale down
        // If between output and last hidden layer
        if (layer_i == (l_nn.length-2)) {
            output_layer[layer_neuron_i] = relu_activation(total);
        } else {
            hidden_layers[index_counter[1]] = relu_activation(total);
        }
        index_counter[1]++;
      }
    }
    return output_layer;
  }

  // Fallback function for sending ether to this contract
  function () public payable {}
}
阅读更多
想对作者说点什么?

博主推荐

换一批

没有更多推荐了,返回首页