tensorflow2.x中英文翻译

import os
os.environ['CUDA_VISIBLE_DEVICES']='-1'
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from tensorflow.keras import models, layers, optimizers, losses, metrics, utils, activations
from tensorflow.keras.models import Sequential

x_str = 'Where are you from ?'
y_str = '你来自哪里'
x_list = x_str.split(' ')
y_list = list(y_str)
word_list = x_list + y_list
# 1. 获取词集 去重
word_list = list(set(word_list))
# 2. 根据字符生成词典
word_to_num = {w: i for i, w in enumerate(word_list)}
# 3. x,y 转码
x_data = [word_to_num[c] for c in x_list]
y_data = [word_to_num[c] for c in y_list]
seq_length = max(len(x_data), len(y_data))
# 4. x,y one-hot编码
n_classes = len(word_list)
x = utils.to_categorical(x_data, num_classes=n_classes)
y = utils.to_categorical(y_data, num_classes=n_classes)
# 调整维度 tf.expend_dim
x = np.reshape(x, (-1, seq_length, n_classes))
y = np.reshape(y, (-1, seq_length, n_classes))

# 模型构建
model = Sequential()
model.add(layers.LSTM(units=n_classes, input_shape=(seq_length, n_classes), return_sequences=True))
model.add(layers.LSTM(n_classes, return_sequences=True))
model.add(layers.TimeDistributed(layers.Dense(n_classes)))
model.add(layers.Activation('softmax'))
model.summary()

# 模型编译
model.compile(optimizer=optimizers.Adam(0.1), loss=losses.categorical_crossentropy,
              metrics=metrics.categorical_accuracy)
# 模型训练
train_log = model.fit(x, y, epochs=100)
# 绘制训练过程中的损失函数变化曲线
loss_list = train_log.history['loss']
plt.plot(loss_list)
plt.show()
# 对x进行预测
predict = model.predict(x)
# 获取输出h的每一行的最大值索引
for pred in predict:
    print("x_str: ", x_str)
    pred_index = np.argmax(pred, axis=1)
    char = [word_list[j] for j in pred_index]
    print("h_str:", ''.join(char))
  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值