Magpie描述符预测材料性能(案例)
(代码自用``)
import pandas as pd
df=pd.read_excel("/processed Data.xlsx")
from matminer.featurizers.conversions import StrToComposition
from matminer.featurizers.base import MultipleFeaturizer
from matminer.featurizers import composition as cf
str_to_comp = StrToComposition()
df3=str_to_comp.featurize_dataframe(df, col_id='Formula')```
feature_calculators = MultipleFeaturizer(
[cf.Stoichiometry(),
cf.ElementProperty.from_preset("magpie"),
cf.ValenceOrbital(props=['avg']),
cf.IonProperty(fast=True),
cf.BandCenter(), cf.AtomicOrbitals()]
)
feature_labels = feature_calculators.feature_labels()
df4=feature_calculators.featurize_dataframe(df3, col_id='composition',ignore_errors=True)
column=df4.describe().columns
df4=df4[column]
P=df4.iloc[:,1:150]
Q=df4.iloc[:,0]
from sklearn.preprocessing import StandardScaler
ss = StandardScaler()
Pt = ss.fit_transform(P)
P = pd.DataFrame(Pt, columns=P.columns)
from sklearn.feature_selection import VarianceThreshold
correlated_features=set()
correlation_matrix=P.corr()
for i in range(len(correlation_matrix.columns)):
for j in range(i):
if abs(correlation_matrix.iloc[i,j]) > 0.8:
colname = correlation_matrix.columns[i]
correlated_features.add(colname)
P=P.drop(labels=correlated_features, axis=1)
df_var = P.var()
var_0 = df_var.index[df_var.values==0]
P= P.drop(var_0,axis=1)
from sklearn.model_selection import train_test_split
X_train,X_test,Y_train,Y_test=train_test_split(P,Q,test_size=0.1,random_state=1)
from sklearn.preprocessing import StandardScaler
sc=StandardScaler()
X_train=sc.fit_transform(X_train)
X_test=sc.transform(X_test)
import numpy as np
from sklearn.model_selection import RandomizedSearchCV
n_estimators = [int(x) for x in np.linspace(start = 100, stop = 1200, num = 12)]
learning_rate= [0.1, 0.01, 0.001]
max_depth = [int(x) for x in np.linspace(1, 30, num = 10)]
random_state = 1
loss = ['squared_error', 'absolute_error','huber', 'quantile']
random_grid = {'n_estimators': n_estimators,
'learning_rate': learning_rate,
'max_depth': max_depth,
# 'random_state': random_state ,
'loss': loss}
print(random_grid)
from sklearn.ensemble import GradientBoostingRegressor
gbr = GradientBoostingRegressor()
gbr_random = RandomizedSearchCV(estimator = gbr, param_distributions = random_grid,scoring='neg_mean_squared_error', n_iter = 10, cv = 5, verbose=2)
gbr_random.fit(X_train,Y_train)
gbr_random.best_params_
Y_pred=gbr_random.predict(X_test)
import matplotlib.pyplot as plt
plt.scatter(Y_test,Y_pred)
plt.plot([21500,24500], [21500,24500])
plt.show()
from math import sqrt
from sklearn.metrics import mean_absolute_error
from sklearn.metrics import mean_squared_error
from sklearn.metrics import r2_score
MAE=mean_absolute_error(Y_test, Y_pred)
MSE=mean_squared_error(Y_test, Y_pred)
print("r2 score:", r2_score(Y_test,Y_pred))