You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
当前不支持获取bfloat16的浮点数信息, 会报错
Traceback (most recent call last):
File "/home/opsdev/zqh/1836_full.py", line 88, in
inference(args)
File "/home/opsdev/zqh/1836_full.py", line 67, in inference
outputs = model.generate(
File "/root/miniconda3/envs/ci3.9/lib/python3.9/contextlib.py", line 79, in inner
return func(*args, **kwds)
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/transformers/generation/utils.py", line 2025, in generate
result = self._sample(
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/transformers/generation/utils.py", line 3038, in _sample
outputs = self(**model_inputs, return_dict=True)
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/core/nn/modules/module.py", line 390, in _wrapped_call_impl
return self.forward(*args, **kwargs)
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/transformers/models/qwen2/modeling_qwen2.py", line 765, in forward
outputs = self.model(
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/core/nn/modules/module.py", line 390, in _wrapped_call_impl
return self.forward(*args, **kwargs)
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/transformers/models/qwen2/modeling_qwen2.py", line 565, in forward
causal_mask = self._update_causal_mask(
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/transformers/models/qwen2/modeling_qwen2.py", line 663, in _update_causal_mask
min_dtype = float(ops.finfo(dtype).min)
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/core/ops/other.py", line 669, in finfo
return np.finfo(mindspore.dtype_to_nptype(dtype))
File "/home/opsdev/.local/lib/python3.9/site-packages/numpy/core/getlimits.py", line 398, in new
raise ValueError("data type %r not inexact" % (dtype))
ValueError: data type <class 'bfloat16'> not inexact
The text was updated successfully, but these errors were encountered:
当前不支持获取bfloat16的浮点数信息, 会报错
Traceback (most recent call last):
File "/home/opsdev/zqh/1836_full.py", line 88, in
inference(args)
File "/home/opsdev/zqh/1836_full.py", line 67, in inference
outputs = model.generate(
File "/root/miniconda3/envs/ci3.9/lib/python3.9/contextlib.py", line 79, in inner
return func(*args, **kwds)
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/transformers/generation/utils.py", line 2025, in generate
result = self._sample(
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/transformers/generation/utils.py", line 3038, in _sample
outputs = self(**model_inputs, return_dict=True)
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/core/nn/modules/module.py", line 390, in _wrapped_call_impl
return self.forward(*args, **kwargs)
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/transformers/models/qwen2/modeling_qwen2.py", line 765, in forward
outputs = self.model(
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/core/nn/modules/module.py", line 390, in _wrapped_call_impl
return self.forward(*args, **kwargs)
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/transformers/models/qwen2/modeling_qwen2.py", line 565, in forward
causal_mask = self._update_causal_mask(
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/transformers/models/qwen2/modeling_qwen2.py", line 663, in _update_causal_mask
min_dtype = float(ops.finfo(dtype).min)
File "/home/opsdev/.local/lib/python3.9/site-packages/mindnlp/core/ops/other.py", line 669, in finfo
return np.finfo(mindspore.dtype_to_nptype(dtype))
File "/home/opsdev/.local/lib/python3.9/site-packages/numpy/core/getlimits.py", line 398, in new
raise ValueError("data type %r not inexact" % (dtype))
ValueError: data type <class 'bfloat16'> not inexact
The text was updated successfully, but these errors were encountered: