You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
I tested your LayoutLMv3 training code and I didn't get any memory issue, but with this new model I get the following error
/databricks/python/lib/python3.8/site-packages/transformers/models/lilt/modeling_lilt.py in forward(self, input_ids, bbox, attention_mask, token_type_ids, position_ids, head_mask, inputs_embeds, labels, output_attentions, output_hidden_states, return_dict)
1028 return_dict = return_dict if return_dict is not None else self.config.use_return_dict
1029
-> 1030 outputs = self.lilt(
1031 input_ids,
1032 bbox=bbox,
/databricks/python/lib/python3.8/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
1188 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1189 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1190 return forward_call(*input, **kwargs)
1191 # Do not call functions when jit is used
1192 full_backward_hooks, non_full_backward_hooks = [], []
/databricks/python/lib/python3.8/site-packages/transformers/models/lilt/modeling_lilt.py in forward(self, input_ids, bbox, attention_mask, token_type_ids, position_ids, head_mask, inputs_embeds, output_attentions, output_hidden_states, return_dict)
816 )
817
--> 818 layout_embedding_output = self.layout_embeddings(bbox=bbox, position_ids=position_ids)
819
820 encoder_outputs = self.encoder(
/databricks/python/lib/python3.8/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
1188 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1189 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1190 return forward_call(*input, **kwargs)
1191 # Do not call functions when jit is used
1192 full_backward_hooks, non_full_backward_hooks = [], []
/databricks/python/lib/python3.8/site-packages/transformers/models/lilt/modeling_lilt.py in forward(self, bbox, position_ids)
179 dim=-1,
180 )
--> 181 spatial_position_embeddings = self.box_linear_embeddings(spatial_position_embeddings)
182 box_position_embeddings = self.box_position_embeddings(position_ids)
183
/databricks/python/lib/python3.8/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
1188 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks
1189 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1190 return forward_call(*input, **kwargs)
1191 # Do not call functions when jit is used
1192 full_backward_hooks, non_full_backward_hooks = [], []
/databricks/python/lib/python3.8/site-packages/torch/nn/modules/linear.py in forward(self, input)
112
113 def forward(self, input: Tensor) -> Tensor:
--> 114 return F.linear(input, self.weight, self.bias)
115
116 def extra_repr(self) -> str:
RuntimeError: CUDA error: CUBLAS_STATUS_NOT_INITIALIZED when calling `cublasCreate(handle)`
The text was updated successfully, but these errors were encountered:
I tested your LayoutLMv3 training code and I didn't get any memory issue, but with this new model I get the following error
The text was updated successfully, but these errors were encountered: