问题描述
Traceback (most recent call last):
File "/mnt/lab/XXX/code/promptist/./diff_prompter/ppo_prompter_first_text.py", line 304, in <module>
main(args)
File "/mnt/lab/XXX/code/promptist/./diff_prompter/ppo_prompter_first_text.py", line 284, in main
model = trlx.train(
File "/mnt/lab/XXX/code/promptist/trlx/trlx/trlx.py", line 47, in train
model: AcceleratePPOModel = get_model(config.model.model_type)(config)
File "/mnt/lab/XXX/code/promptist/trlx/trlx/model/accelerate_ppo_model.py", line 75, in __init__
self.load_ckpt(config.model.ckpt_path)
File "/mnt/lab/XXX/code/promptist/trlx/trlx/model/accelerate_ppo_model.py", line 78, in load_ckpt
self.model.module.load_state_dict(torch.load(os.path.join(ckpt_path, "pytorch_model.bin")))
File "/mnt/lab/XXX/anaconda3/envs/promptist/lib/python3.9/site-packages/torch/nn/modules/module.py", line 2041, in load_state_dict
raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
RuntimeError: Error(s) in loading state_dict for GPTHydraHeadWithValueModel:
Unexpected key(s) in state_dict: "gpt.transformer.h.0.attn.bias",
"gpt.transformer.h.0.attn.masked_bias", "gpt.transformer.h.1.attn.bias",
"gpt.transformer.h.1.attn.masked_bias", "gpt.transformer.h.2.attn.bias",
"gpt.transformer.h.2.attn.masked_bias", "gpt.transformer.h.3.attn.bias",
"gpt.transformer.h.3.attn.masked_bias", "gpt.transformer.h.4.attn.bias",
"gpt.transformer.h.4.attn.masked_bias", "gpt.transformer.h.5.attn.bias",
"gpt.transformer.h.5.attn.masked_bias", "gpt.transformer.h.6.attn.bias",
"gpt.transformer.h.6.attn.masked_bias", "gpt.transformer.h.7.attn.bias",
"gpt.transformer.h.7.attn.masked_bias", "gpt.transformer.h.8.attn.bias",
"gpt.transformer.h.8.attn.masked_bias", "gpt.transformer.h.9.attn.bias",
"gpt.transformer.h.9.attn.masked_bias", "gpt.transformer.h.10.attn.bias",
"gpt.transformer.h.10.attn.masked_bias", "gpt.transformer.h.11.attn.bias",
"gpt.transformer.h.11.attn.masked_bias", "value_model.transformer.h.0.attn.bias",
"value_model.transformer.h.0.attn.masked_bias", "value_model.transformer.h.1.attn.bias",
"value_model.transformer.h.1.attn.masked_bias", "value_model.transformer.h.2.attn.bias",
"value_model.transformer.h.2.attn.masked_bias", "value_model.transformer.h.3.attn.bias",
"value_model.transformer.h.3.attn.masked_bias", "value_model.transformer.h.4.attn.bias",
"value_model.transformer.h.4.attn.masked_bias", "value_model.transformer.h.5.attn.bias",
"value_model.transformer.h.5.attn.masked_bias", "value_model.transformer.h.6.attn.bias",
"value_model.transformer.h.6.attn.masked_bias", "value_model.transformer.h.7.attn.bias",
"value_model.transformer.h.7.attn.masked_bias", "value_model.transformer.h.8.attn.bias",
"value_model.transformer.h.8.attn.masked_bias", "value_model.transformer.h.9.attn.bias",
"value_model.transformer.h.9.attn.masked_bias", "value_model.transformer.h.10.attn.bias",
"value_model.transformer.h.10.attn.masked_bias", "value_model.transformer.h.11.attn.bias",
"value_model.transformer.h.11.attn.masked_bias", "frozen_head.h.0.attn.bias",
"frozen_head.h.0.attn.masked_bias", "frozen_head.h.1.attn.bias",
"frozen_head.h.1.attn.masked_bias".
问题描述
期间未对代码进行修改,只是将transformers 4.25.1
升级为transformers 4.39.3
。
问题解决
将transformers 4.39.3
降级为transformers 4.25.1
pip install transformers==4.25.1