import os os.environ['CUDA_VISIBLE_DEVICES']='-1' import matplotlib.pyplot as plt import numpy as np import tensorflow as tf from tensorflow.keras import models, layers, optimizers, losses, metrics, utils, activations from tensorflow.keras.models import Sequential x_str = 'Where are you from ?' y_str = '你来自哪里' x_list = x_str.split(' ') y_list = list(y_str) word_list = x_list + y_list # 1. 获取词集 去重 word_list = list(set(word_list)) # 2. 根据字符生成词典 word_to_num = {w: i for i, w in enumerate(word_list)} # 3. x,y 转码 x_data = [word_to_num[c] for c in x_list] y_data = [word_to_num[c] for c in y_list] seq_length = max(len(x_data), len(y_data)) # 4. x,y one-hot编码 n_classes = len(word_list) x = utils.to_categorical(x_data, num_classes=n_classes) y = utils.to_categorical(y_data, num_classes=n_classes) # 调整维度 tf.expend_dim x = np.reshape(x, (-1, seq_length, n_classes)) y = np.reshape(y, (-1, seq_length, n_classes)) # 模型构建 model = Sequential() model.add(layers.LSTM(units=n_classes, input_shape=(seq_length, n_classes), return_sequences=True)) model.add(layers.LSTM(n_classes, return_sequences=True)) model.add(layers.TimeDistributed(layers.Dense(n_classes))) model.add(layers.Activation('softmax')) model.summary() # 模型编译 model.compile(optimizer=optimizers.Adam(0.1), loss=losses.categorical_crossentropy, metrics=metrics.categorical_accuracy) # 模型训练 train_log = model.fit(x, y, epochs=100) # 绘制训练过程中的损失函数变化曲线 loss_list = train_log.history['loss'] plt.plot(loss_list) plt.show() # 对x进行预测 predict = model.predict(x) # 获取输出h的每一行的最大值索引 for pred in predict: print("x_str: ", x_str) pred_index = np.argmax(pred, axis=1) char = [word_list[j] for j in pred_index] print("h_str:", ''.join(char))
tensorflow2.x中英文翻译
最新推荐文章于 2023-12-05 20:12:11 发布