使用MindSpore报这个错是什么意思,RuntimeError: _Map_base::a

问题描述:

【功能模块】

mindspore 1.6

【操作步骤&问题现象】

1、我在用msp训练神经网络,算子是自定义act_fun

2、神经网络训练第一批图片没有报错,训练第二批图片时报错了,错误是

RuntimeError: _Map_base::a

【截图信息】

算子:

 网络:(画出的那一行在第二批图片进入网络训练后出错了)

 【日志信息】(可选,上传日志内容或者附件)

RuntimeError                              Traceback (most recent call last)

~\AppData\Local\Temp/ipykernel_2524/1719793408.py in <module>

     14     os.mkdir(save_path)

     15 n_epoch = 20

---> 16 train(n_epoch,ckpoint,False) # 调用训练函数

     17

     18 eval()

~\AppData\Local\Temp/ipykernel_2524/1719793408.py in train(epoch, ckpoint_cb, sink_mode)

      1 # 定义训练函数

      2 def train(epoch, ckpoint_cb, sink_mode):

----> 3     model.train(epoch,ds_train,callbacks=[ckpoint_cb, LossMonitor(125)], dataset_sink_mode=sink_mode) # 每过200个step就输出训练损失值

      4

      5 # 定义验证函数

F:\miniconda\envs\python39\lib\site-packages\mindspore\train\model.py in train(self, epoch, train_dataset, callbacks, dataset_sink_mode, sink_size)

    720         _device_number_check(self._parallel_mode, self._device_number)

    721

--> 722         self._train(epoch,

    723                     train_dataset,

    724                     callbacks=callbacks,

F:\miniconda\envs\python39\lib\site-packages\mindspore\train\model.py in _train(self, epoch, train_dataset, callbacks, dataset_sink_mode, sink_size)

    496             self._check_reuse_dataset(train_dataset)

    497             if not dataset_sink_mode:

--> 498                 self._train_process(epoch, train_dataset, list_callback, cb_params)

    499             elif context.get_context("device_target") == "CPU":

    500                 logger.warning("The CPU cannot support dataset sink mode currently."

F:\miniconda\envs\python39\lib\site-packages\mindspore\train\model.py in _train_process(self, epoch, train_dataset, list_callback, cb_params)

    624                 cb_params.train_dataset_element = next_element

    625                 list_callback.step_begin(run_context)

--> 626                 outputs = self._train_network(*next_element)

    627                 cb_params.net_outputs = outputs

    628                 if self._loss_scale_manager and self._loss_scale_manager.get_drop_overflow_update():

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\cell.py in __call__(self, *inputs, **kwargs)

    431             except Exception as err:

    432                 _pynative_executor.clear_res()

--> 433                 raise err

    434

    435         if _pynative_executor.is_top_cell():

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\cell.py in __call__(self, *inputs, **kwargs)

    428         with self.CellGuard():

    429             try:

--> 430                 output = self.run_construct(cast_inputs, kwargs)

    431             except Exception as err:

    432                 _pynative_executor.clear_res()

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\cell.py in run_construct(self, cast_inputs, kwargs)

    350             output = self._hook_construct(*cast_inputs)

    351         else:

--> 352             output = self.construct(*cast_inputs, **kwargs)

    353         return output

    354

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\wrap\cell_wrapper.py in construct(self, *inputs)

    351

    352     def construct(self, *inputs):

--> 353         loss = self.network(*inputs)

    354         sens = F.fill(loss.dtype, loss.shape, self.sens)

    355         grads = self.grad(self.network, self.weights)(*inputs, sens)

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\cell.py in __call__(self, *inputs, **kwargs)

    431             except Exception as err:

    432                 _pynative_executor.clear_res()

--> 433                 raise err

    434

    435         if _pynative_executor.is_top_cell():

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\cell.py in __call__(self, *inputs, **kwargs)

    428         with self.CellGuard():

    429             try:

--> 430                 output = self.run_construct(cast_inputs, kwargs)

    431             except Exception as err:

    432                 _pynative_executor.clear_res()

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\cell.py in run_construct(self, cast_inputs, kwargs)

    350             output = self._hook_construct(*cast_inputs)

    351         else:

--> 352             output = self.construct(*cast_inputs, **kwargs)

    353         return output

    354

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\wrap\cell_wrapper.py in construct(self, data, label)

    108

    109     def construct(self, data, label):

--> 110         out = self._backbone(data)

    111         return self._loss_fn(out, label)

    112

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\cell.py in __call__(self, *inputs, **kwargs)

    431             except Exception as err:

    432                 _pynative_executor.clear_res()

--> 433                 raise err

    434

    435         if _pynative_executor.is_top_cell():

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\cell.py in __call__(self, *inputs, **kwargs)

    428         with self.CellGuard():

    429             try:

--> 430                 output = self.run_construct(cast_inputs, kwargs)

    431             except Exception as err:

    432                 _pynative_executor.clear_res()

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\cell.py in run_construct(self, cast_inputs, kwargs)

    350             output = self._hook_construct(*cast_inputs)

    351         else:

--> 352             output = self.construct(*cast_inputs, **kwargs)

    353         return output

    354

~\AppData\Local\Temp/ipykernel_2524/872694673.py in construct(self, x, time_window)

     37         x = self.avgpool_op(c1_spike)

     38         print("?")

---> 39         c2_mem, c2_spike = mem_update(self.conv2, x, c2_mem, c2_spike, self.lateral2)

     40         print("!!")

     41

~\AppData\Local\Temp/ipykernel_2524/3497521393.py in mem_update(ops, x, mem, spike, lateral)

     25         mem += lateral(spike)

     26     print(mem)

---> 27     spike = grad_all(act_fun)(mem)

     28     return mem, spike[0]

F:\miniconda\envs\python39\lib\site-packages\mindspore\ops\composite\base.py in after_grad(*args, **kwargs)

    392             else:

    393                 def after_grad(*args, **kwargs):

--> 394                     return grad_(fn)(*args, **kwargs)

    395

    396         self.grad_fn = after_grad

F:\miniconda\envs\python39\lib\site-packages\mindspore\common\api.py in wrapper(*arg, **kwargs)

     76     @wraps(fn)

     77     def wrapper(*arg, **kwargs):

---> 78         results = fn(*arg, **kwargs)

     79

     80         def _convert_data(data):

F:\miniconda\envs\python39\lib\site-packages\mindspore\ops\composite\base.py in after_grad(*args, **kwargs)

    379                 if _pynative_executor.check_graph(fn, *args, **kwargs):

    380                     print("Another grad step is running")

--> 381                 self._pynative_forward_run(grad_, args, kwargs, fn)

    382                 _pynative_executor.grad(grad_, fn, weights, *args, **kwargs)

    383                 out = _pynative_executor(fn, *args, **kwargs)

F:\miniconda\envs\python39\lib\site-packages\mindspore\ops\composite\base.py in _pynative_forward_run(self, grad, args, kwargs, fn)

    354             if not _pynative_executor.check_run(grad, fn, *args, **new_kwargs):

    355                 fn.set_grad()

--> 356                 fn(*args, **new_kwargs)

    357                 fn.set_grad(False)

    358

F:\miniconda\envs\python39\lib\site-packages\mindspore\nn\cell.py in __call__(self, *inputs, **kwargs)

    438         if isinstance(output, Parameter):

    439             output = output.data

--> 440         _pynative_executor.end_graph(self, output, *inputs, **kwargs)

    441         return output

    442

F:\miniconda\envs\python39\lib\site-packages\mindspore\common\api.py in end_graph(self, obj, output, *args, **kwargs)

    380

    381     def end_graph(self, obj, output, *args, **kwargs):

--> 382         self._executor.end_graph(obj, output, *args, *(kwargs.values()))

    383

    384     def check_graph(self, obj, *args, **kwargs):

RuntimeError: _Map_base::at

解答:

这种构造方法需要在PYNATIVE_MODE模式下面用:context.set_context(mode=context.PYNATIVE_MODE)

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值