数据处理部分
assert ‘ptb.train.txt’ in os.listdir("./data/ptb")
with open(’./data/ptb/ptb.train.txt’, ‘r’) as f:
lines = f.readlines()
raw_dataset = [st.split() for st in lines]
counter = collections.Counter([tk for st in raw_dataset for tk in st])
counter = dict(filter(lambda x: x[1] >= 5, counter.items()))
counter.items()
id2token = [tk for tk,_ in counter.items()]
token2id = {tk:id for id,tk in enumerate(id2token)}
dataset = [[token2id[tk] for tk in st if tk in id2token]for st in raw_dataset]
num_token = sum(len(st) for st in dataset)
def discard(idx):
return random.uniform(0, 1) < 1 - math.sqrt(
1e-4 / counter[id2token[idx]] * num_token)
subdataset = [[tk for tk in st if not discard(tk)] for st in dataset]
def compare(tk):
return ‘# %s: before=%d, after=%d’ % (tk
,sum(st.count(token2id[tk]) for st i