Kaggle_tweet_emotion_bert

import pandas as pd
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
import os
import tqdm
import tensorflow as tf
print(tf.__version__)
import re
from tensorflow import keras
from sklearn.model_selection import train_test_split
from nltk.corpus import stopwords
import nltk
import tensorflow_hub as hub
import tokenization
2.0.0
#Load data
path_home = r"D:\pro\tianchi\kaggle_tweet_emotion"
data_train = pd.read_csv(os.path.join(path_home,"train.csv"),encoding="utf-8")
data_test = pd.read_csv(os.path.join(path_home,"test.csv"),encoding="utf-8")
data_submit = pd.read_csv(os.path.join(path_home,"sample_submission.csv"),encoding="utf-8")
# data_train_copy = data_train.copy()
# data_train_copy = data_train_copy.fillna("nan")
# ag = data_train_copy.groupby("keyword").agg({"text":np.size,"target":np.mean}).rename(
#     columns={"text":"count","target":"disaster_probability"})
# ag = ag.sort_values("disaster_probability",ascending=False)
# data_clean
stopwords_english = stopwords.words("english")
# print(stopwords_english)
def cleanword(s):
    s = s.lower()
    temp = re.findall("http\S*",s)  
    for deletStr in temp:
        if deletStr != "":
            s = s.replace(deletStr," ")
    temp = re.findall("@\S*",s)
    for deletStr in temp:
        if deletStr != "":
            s = s.replace(deletStr," ")
    temp = re.findall("\d*",s)
    for deletStr in temp:
        if deletStr != "":
            s = s.replace(deletStr," ")
            
    temp = re.findall("\x89\S*",s)
    for deletStr in temp:
        if deletStr != "":
            s = s.replace(deletStr[:5]," ")

    s = s.replace("\n"," ")
    s = s.replace(","," ")
    s = s.replace("?"," ")
    s = s.replace("..."," ")
    s = s.replace("."," ")
    s = s.replace("["," ")
    s = s.replace("]"," ")
    s = s.replace("!"," ")
    s = s.replace(":"," ")
    s = s.replace("-"," ")
    s = s.replace("#"," ")
    s = s.replace("|"," ")
    s = s.replace("("," ")
    s = s.replace(")"," ")
    s = s.replace(";"," ")
    s = s.replace("="," ")
    s = s.replace(">"," ")
    s = s.replace("<"," ")
    s = s.replace("/"," ")
    

    #delet conntinue " "
    s_new = ""
    word = ""
    
    for i in range(len(s)):
        if s[i] != " " :
            word += s[i]
        else:
            if word != "":
                s_new = s_new + " " + word
                word = ""         
    if word != "":
        s_new += word
        
    s_new = s_new.strip()
    
    
    return s_new
data_test['text'] = data_test['text'].apply(cleanword)
data_train['text'] = data_train['text'].apply(cleanword)
#tfhub加载本地bert模型

#调用tfhub上 下载保存到本地的模型:https://hub.tensorflow.google.cn
"""
1:找到需要的包的下载网址,
2:修改网址:把tfhub.dev替换为storage.googleapis.com/tfhub-modules,并附加.tar.gz作为后缀,下载压缩包
引用博客:https://zhuanlan.zhihu.com/p/64069911
"""
path_bert = r"D:\pro\model\bert_en_uncased_L-12_H-768_A-12-2"
spec = hub.load(path_bert)
bert_layer = hub.KerasLayer(spec,trainable=True)
vocab_file = bert_layer.resolved_object.vocab_file.asset_path.numpy()
do_lower_case = bert_layer.resolved_object.do_lower_case.numpy()
tokenizer = tokenization.FullTokenizer(vocab_file, do_lower_case) #建立分词器
def bert_encode(texts,tokenizer,max_length = 20):
    all_tokens = []
    all_masks = []
    all_segments = []
    
    for text in texts:
        text = tokenizer.tokenize(text)  #分词
        text = text[:max_length-2]
        input_sequence = ["[CLS]"] + text + ["[SEP]"]
        pad_len = max_length- len(input_sequence)
        tokens = tokenizer.convert_tokens_to_ids(input_sequence)
        tokens += [0] * pad_len
        pad_masks = [1] * len(input_sequence) + [0] * pad_len
        segment_ids = [0] * max_length
        
        all_tokens.append(tokens)
        all_masks.append(pad_masks)
        all_segments.append(segment_ids)
        
    return np.array(all_tokens) , np.array(all_masks) , np.array(all_segments)        
        
    
max_seq_length = 20
def build_model(bert_layer,max_len):
    input_word_ids = keras.layers.Input(shape=(max_len,), dtype=tf.int32, name="input_word_ids")
    input_mask = keras.layers.Input(shape=(max_len,), dtype=tf.int32, name="input_mask")
    segment_ids = keras.layers.Input(shape=(max_len,), dtype=tf.int32, name="segment_ids")

    _, sequence_output = bert_layer([input_word_ids, input_mask, segment_ids])
    clf_output = sequence_output[:, 0, :]
    out = keras.layers.Dense(1, activation='sigmoid')(clf_output)
    
    model = keras.models.Model(inputs=[input_word_ids, input_mask, segment_ids], outputs=out)
    model.compile(keras.optimizers.Adam(lr=2e-6), loss='binary_crossentropy', metrics=['accuracy'])
    
    return model
    
model = build_model(bert_layer,max_seq_length)
model.summary()
Model: "model"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_word_ids (InputLayer)     [(None, 20)]         0                                            
__________________________________________________________________________________________________
input_mask (InputLayer)         [(None, 20)]         0                                            
__________________________________________________________________________________________________
segment_ids (InputLayer)        [(None, 20)]         0                                            
__________________________________________________________________________________________________
keras_layer_1 (KerasLayer)      [(None, 768), (None, 109482241   input_word_ids[0][0]             
                                                                 input_mask[0][0]                 
                                                                 segment_ids[0][0]                
__________________________________________________________________________________________________
tf_op_layer_strided_slice (Tens [(None, 768)]        0           keras_layer_1[0][1]              
__________________________________________________________________________________________________
dense (Dense)                   (None, 1)            769         tf_op_layer_strided_slice[0][0]  
==================================================================================================
Total params: 109,483,010
Trainable params: 109,483,009
Non-trainable params: 1
__________________________________________________________________________________________________
x_train_input = bert_encode(data_train["text"].values.tolist()[:10],tokenizer,20)
y_input = np.array(data_train['target'].tolist()[:10])
x_test_input = bert_encode(data_test["text"].values.tolist()[:10],tokenizer,20)
x_train_input[2]
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
       [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])
history = model.fit(
    x_train_input,
    y_input,
    epochs=1,
    batch_size=2,
)
Train on 10 samples
 2/10 [=====>........................] - ETA: 59s


---------------------------------------------------------------------------

NotFoundError                             Traceback (most recent call last)

<ipython-input-14-63795db02d11> in <module>
      3     y_input,
      4     epochs=1,
----> 5     batch_size=2,
      6 )


D:\anaconda\envs\python37-tf2\lib\site-packages\tensorflow_core\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, max_queue_size, workers, use_multiprocessing, **kwargs)
    726         max_queue_size=max_queue_size,
    727         workers=workers,
--> 728         use_multiprocessing=use_multiprocessing)
    729 
    730   def evaluate(self,


D:\anaconda\envs\python37-tf2\lib\site-packages\tensorflow_core\python\keras\engine\training_v2.py in fit(self, model, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, **kwargs)
    322                 mode=ModeKeys.TRAIN,
    323                 training_context=training_context,
--> 324                 total_epochs=epochs)
    325             cbks.make_logs(model, epoch_logs, training_result, ModeKeys.TRAIN)
    326 


D:\anaconda\envs\python37-tf2\lib\site-packages\tensorflow_core\python\keras\engine\training_v2.py in run_one_epoch(model, iterator, execution_function, dataset_size, batch_size, strategy, steps_per_epoch, num_samples, mode, training_context, total_epochs)
    121         step=step, mode=mode, size=current_batch_size) as batch_logs:
    122       try:
--> 123         batch_outs = execution_function(iterator)
    124       except (StopIteration, errors.OutOfRangeError):
    125         # TODO(kaftan): File bug about tf function and errors.OutOfRangeError?


D:\anaconda\envs\python37-tf2\lib\site-packages\tensorflow_core\python\keras\engine\training_v2_utils.py in execution_function(input_fn)
     84     # `numpy` translates Tensors to values in Eager mode.
     85     return nest.map_structure(_non_none_constant_value,
---> 86                               distributed_function(input_fn))
     87 
     88   return execution_function


D:\anaconda\envs\python37-tf2\lib\site-packages\tensorflow_core\python\eager\def_function.py in __call__(self, *args, **kwds)
    455 
    456     tracing_count = self._get_tracing_count()
--> 457     result = self._call(*args, **kwds)
    458     if tracing_count == self._get_tracing_count():
    459       self._call_counter.called_without_tracing()


D:\anaconda\envs\python37-tf2\lib\site-packages\tensorflow_core\python\eager\def_function.py in _call(self, *args, **kwds)
    518         # Lifting succeeded, so variables are initialized and we can run the
    519         # stateless function.
--> 520         return self._stateless_fn(*args, **kwds)
    521     else:
    522       canon_args, canon_kwds = \


D:\anaconda\envs\python37-tf2\lib\site-packages\tensorflow_core\python\eager\function.py in __call__(self, *args, **kwargs)
   1821     """Calls a graph function specialized to the inputs."""
   1822     graph_function, args, kwargs = self._maybe_define_function(args, kwargs)
-> 1823     return graph_function._filtered_call(args, kwargs)  # pylint: disable=protected-access
   1824 
   1825   @property


D:\anaconda\envs\python37-tf2\lib\site-packages\tensorflow_core\python\eager\function.py in _filtered_call(self, args, kwargs)
   1139          if isinstance(t, (ops.Tensor,
   1140                            resource_variable_ops.BaseResourceVariable))),
-> 1141         self.captured_inputs)
   1142 
   1143   def _call_flat(self, args, captured_inputs, cancellation_manager=None):


D:\anaconda\envs\python37-tf2\lib\site-packages\tensorflow_core\python\eager\function.py in _call_flat(self, args, captured_inputs, cancellation_manager)
   1222     if executing_eagerly:
   1223       flat_outputs = forward_function.call(
-> 1224           ctx, args, cancellation_manager=cancellation_manager)
   1225     else:
   1226       gradient_name = self._delayed_rewrite_functions.register()


D:\anaconda\envs\python37-tf2\lib\site-packages\tensorflow_core\python\eager\function.py in call(self, ctx, args, cancellation_manager)
    509               inputs=args,
    510               attrs=("executor_type", executor_type, "config_proto", config),
--> 511               ctx=ctx)
    512         else:
    513           outputs = execute.execute_with_cancellation(


D:\anaconda\envs\python37-tf2\lib\site-packages\tensorflow_core\python\eager\execute.py in quick_execute(op_name, num_outputs, inputs, attrs, ctx, name)
     65     else:
     66       message = e.message
---> 67     six.raise_from(core._status_to_exception(e.code, message), None)
     68   except TypeError as e:
     69     keras_symbolic_tensors = [


D:\anaconda\envs\python37-tf2\lib\site-packages\six.py in raise_from(value, from_value)


NotFoundError:  [_Derived_]No gradient defined for op: StatefulPartitionedCall
	 [[{{node Func/_4}}]]
	 [[PartitionedCall/gradients/StatefulPartitionedCall_grad/PartitionedCall/gradients/StatefulPartitionedCall_grad/SymbolicGradient]] [Op:__inference_distributed_function_45886]

Function call stack:
distributed_function
result = model.predict(x_test_input)
print(result)
[[0.50043786]
 [0.6113935 ]
 [0.54540795]
 [0.5461257 ]
 [0.623365  ]
 [0.5049557 ]
 [0.53970295]
 [0.44961035]
 [0.5201147 ]
 [0.55124974]]

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值