data1 = scio.loadmat("C:/Users/tyh18/Desktop/data/A/1_Normal_1.mat")["X098_DE_time"]
data2 = scio.loadmat("C:/Users/tyh18/Desktop/data/A/2_B007_1.mat")["X119_DE_time"]
data3 = scio.loadmat("C:/Users/tyh18/Desktop/data/A/3_B014_1.mat")["X223_DE_time"]
data4 = scio.loadmat("C:/Users/tyh18/Desktop/data/A/4_B021_1.mat")["X186_DE_time"]
data5 = scio.loadmat("C:/Users/tyh18/Desktop/data/A/5_IR007_1.mat")["X106_DE_time"]
data6 = scio.loadmat("C:/Users/tyh18/Desktop/data/A/6_IR014_1.mat")["X170_DE_time"]
data7 = scio.loadmat("C:/Users/tyh18/Desktop/data/A/7_IR021_1.mat")["X210_DE_time"]
data8 = scio.loadmat("C:/Users/tyh18/Desktop/data/A/8_OR007@6_1.mat")["X131_DE_time"]
data9 = scio.loadmat("C:/Users/tyh18/Desktop/data/A/9_OR014@6_1.mat")["X198_DE_time"]
data10 = scio.loadmat("C:/Users/tyh18/Desktop/data/A/10_OR021@6_1.mat")["X235_DE_time"]
def ber_train(data,windowlen,buchang):
p=len(data)
L=list()
q=(p-windowlen)
for i in range(q):
if int(i) % int(buchang) == 0:
a=data[i:i+windowlen]
L.append(a)
if int(i) / int(buchang) > 659 :
break
return L
def data_train(data,windowlen,buchang,biaoqian):
data=ber_train(data,windowlen,buchang)
data=np.array(data)
x,y,z=data.shape
c=data.reshape(x,y)
for i in range(x):
c1=c[i,:]
pingjunzhi=np.mean(c1)
c2=c1-pingjunzhi
c3=max(abs(c2))
c1=c2/c3
c[i,:]=c1
k=np.zeros((x,1))+biaoqian
KK=np.hstack([c,k])
return KK
def ber_test(data,windowlen,buchang):
p=len(data)
L=list()
q=(p-windowlen)
for i in range(q):
if int(i) % int(buchang) == 0:
a=data[i:i+windowlen]
L.append(a)
if int(i) / int(buchang) > 58 :
break
return L
def data_test(data,windowlen,buchang,biaoqian):
data=ber_test(data,windowlen,buchang)
data=np.array(data)
x,y,z=data.shape
c=data.reshape(x,y)
for i in range(x):
c1=c[i,:]
pingjunzhi=np.mean(c1)
c2=c1-pingjunzhi
c3=max(abs(c2))
c1=c2/c3
c[i,:]=c1
k=np.zeros((x,1))+biaoqian
KK=np.hstack([c,k])
return KK
yb1_train=data_train(data1,2048,731,1)
yb2_train=data_train(data2,2048,181,2)
yb3_train=data_train(data3,2048,181,3)
yb4_train=data_train(data4,2048,182,4)
yb5_train=data_train(data5,2048,182,5)
yb6_train=data_train(data6,2048,181,6)
yb7_train=data_train(data7,2048,181,7)
yb8_train=data_train(data8,2048,182,8)
yb9_train=data_train(data9,2048,182,9)
yb10_train=data_train(data10,2048,182,10)
yb1_test = data_test(data1,2048,2048,1)
yb2_test = data_test(data2,2048,2048,2)
yb3_test = data_test(data3,2048,2048,3)
yb4_test = data_test(data4,2048,2048,4)
yb5_test = data_test(data5,2048,2048,5)
yb6_test = data_test(data6,2048,2048,6)
yb7_test = data_test(data7,2048,2048,7)
yb8_test = data_test(data8,2048,2048,8)
yb9_test = data_test(data9,2048,2048,9)
yb10_test = data_test(data10,2048,2048,10)
L_train=np.vstack([yb1_train,yb2_train,yb3_train,yb4_train,yb5_train,yb6_train,yb7_train,yb8_train,yb9_train,yb10_train])
L_test=np.vstack([yb1_test,yb2_test,yb3_test,yb4_test,yb5_test,yb6_test,yb7_test,yb8_test,yb9_test,yb10_test])
X_train = np.expand_dims(L_train[:, 0:2048].astype(float), axis=1)
Y_train = L_train[:, 2048]
X_test = np.expand_dims(L_test[:, 0:2048].astype(float), axis=1)
Y_test = L_test[:, 2048]
encoder = LabelEncoder()
Y_train = encoder.fit_transform(Y_train.ravel())
X_train, Y_train = torch.FloatTensor(X_train), torch.LongTensor(Y_train)
train_dataset = torch.utils.data.TensorDataset(X_train, Y_train)
encoder = LabelEncoder()
Y_test = encoder.fit_transform(Y_test.ravel())
X_test, Y_test = torch.FloatTensor(X_test), torch.LongTensor(Y_test)
val_dataset = torch.utils.data.TensorDataset(X_test, Y_test)
train_loader = DataLoader(train_dataset, batch_size=330, shuffle=True)
val_loader = DataLoader(val_dataset, batch_size=590, shuffle=True)
print(len(val_dataset))
print(len(train_dataset))