import numpy as np
import tensorflow as tf
from tensorflow.keras.callbacks import EarlyStopping
# 生成模拟数据
X = np.random.rand(20000, 2) # 输入特征,二维向量
y = np.where(X[:, 0] + X[:, 1] > 1.5, 1, 0) # 输出标签,二分类问题
# 将输入和输出转换为张量
X = tf.convert_to_tensor(X, dtype=tf.float32)
y = tf.convert_to_tensor(y, dtype=tf.float32)
# 定义模型结构
model = tf.keras.Sequential([
tf.keras.layers.Dense(1, input_shape=(2,), activation='sigmoid') # 全连接层(Dense层)
])
# 编译模型
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) # 二元交叉熵损失函数 Adam优化器:自适应学习率,对超参数不敏感
# 定义EarlyStopping回调函数
early_stopping = EarlyStopping(monitor='loss', patience=10, min_delta=0.001)
# 训练模型,添加EarlyStopping回调函数
history = model.fit(X, y, epochs=500, batch_size=64, callbacks=[early_stopping])
# 打印训练结果
print("Training accuracy:", max(history.history['accuracy']))
print("Training loss:", min(history.history['loss']))
# 预测新样本
while True:
x1 = input("x1: ")
x2 = input("x2: ")
new_samples = np.array([[float(x1), float(x2)]])
predictions = model.predict(new_samples)
threshold = 0.5
predicted_labels = (predictions >= threshold).astype(int)
print("Predicted labels for new samples:", predicted_labels)
print("Predictions for new samples:", predictions)
TensorFlow实现逻辑回归-简单案例
最新推荐文章于 2023-12-18 17:30:00 发布