Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- ---------------------------------------------------------------------------
- RuntimeError Traceback (most recent call last)
- Cell In[1], line 98
- 96 if __name__ == '__main__':
- 97 print("Downloading pretrained weights and starting fine-tuning...")
- ---> 98 train()
- 99 print("\nEvaluating fine-tuned model...")
- 100 test()
- Cell In[1], line 67, in train()
- 64 labels = labels.to(device)
- 66 # Forward pass
- ---> 67 outputs = model(images).logits
- 68 loss = criterion(outputs, labels)
- 70 # Backward and optimize
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1739, in Module._wrapped_call_impl(self, *args, **kwargs)
- 1737 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
- 1738 else:
- -> 1739 return self._call_impl(*args, **kwargs)
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1750, in Module._call_impl(self, *args, **kwargs)
- 1745 # If we don't have any hooks, we want to skip the rest of the logic in
- 1746 # this function, and just call forward.
- 1747 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
- 1748 or _global_backward_pre_hooks or _global_backward_hooks
- 1749 or _global_forward_hooks or _global_forward_pre_hooks):
- -> 1750 return forward_call(*args, **kwargs)
- 1752 result = None
- 1753 called_always_called_hooks = set()
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\transformers\models\swin\modeling_swin.py:1266, in SwinForImageClassification.forward(self, pixel_values, head_mask, labels, output_attentions, output_hidden_states, interpolate_pos_encoding, return_dict)
- 1258 r"""
- 1259 labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
- 1260 Labels for computing the image classification/regression loss. Indices should be in `[0, ...,
- 1261 config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
- 1262 `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
- 1263 """
- 1264 return_dict = return_dict if return_dict is not None else self.config.use_return_dict
- -> 1266 outputs = self.swin(
- 1267 pixel_values,
- 1268 head_mask=head_mask,
- 1269 output_attentions=output_attentions,
- 1270 output_hidden_states=output_hidden_states,
- 1271 interpolate_pos_encoding=interpolate_pos_encoding,
- 1272 return_dict=return_dict,
- 1273 )
- 1275 pooled_output = outputs[1]
- 1277 logits = self.classifier(pooled_output)
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1739, in Module._wrapped_call_impl(self, *args, **kwargs)
- 1737 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
- 1738 else:
- -> 1739 return self._call_impl(*args, **kwargs)
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1750, in Module._call_impl(self, *args, **kwargs)
- 1745 # If we don't have any hooks, we want to skip the rest of the logic in
- 1746 # this function, and just call forward.
- 1747 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
- 1748 or _global_backward_pre_hooks or _global_backward_hooks
- 1749 or _global_forward_hooks or _global_forward_pre_hooks):
- -> 1750 return forward_call(*args, **kwargs)
- 1752 result = None
- 1753 called_always_called_hooks = set()
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\transformers\models\swin\modeling_swin.py:1062, in SwinModel.forward(self, pixel_values, bool_masked_pos, head_mask, output_attentions, output_hidden_states, interpolate_pos_encoding, return_dict)
- 1056 head_mask = self.get_head_mask(head_mask, len(self.config.depths))
- 1058 embedding_output, input_dimensions = self.embeddings(
- 1059 pixel_values, bool_masked_pos=bool_masked_pos, interpolate_pos_encoding=interpolate_pos_encoding
- 1060 )
- -> 1062 encoder_outputs = self.encoder(
- 1063 embedding_output,
- 1064 input_dimensions,
- 1065 head_mask=head_mask,
- 1066 output_attentions=output_attentions,
- 1067 output_hidden_states=output_hidden_states,
- 1068 return_dict=return_dict,
- 1069 )
- 1071 sequence_output = encoder_outputs[0]
- 1072 sequence_output = self.layernorm(sequence_output)
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1739, in Module._wrapped_call_impl(self, *args, **kwargs)
- 1737 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
- 1738 else:
- -> 1739 return self._call_impl(*args, **kwargs)
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1750, in Module._call_impl(self, *args, **kwargs)
- 1745 # If we don't have any hooks, we want to skip the rest of the logic in
- 1746 # this function, and just call forward.
- 1747 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
- 1748 or _global_backward_pre_hooks or _global_backward_hooks
- 1749 or _global_forward_hooks or _global_forward_pre_hooks):
- -> 1750 return forward_call(*args, **kwargs)
- 1752 result = None
- 1753 called_always_called_hooks = set()
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\transformers\models\swin\modeling_swin.py:881, in SwinEncoder.forward(self, hidden_states, input_dimensions, head_mask, output_attentions, output_hidden_states, output_hidden_states_before_downsampling, always_partition, return_dict)
- 872 layer_outputs = self._gradient_checkpointing_func(
- 873 layer_module.__call__,
- 874 hidden_states,
- (...) 878 always_partition,
- 879 )
- 880 else:
- --> 881 layer_outputs = layer_module(
- 882 hidden_states, input_dimensions, layer_head_mask, output_attentions, always_partition
- 883 )
- 885 hidden_states = layer_outputs[0]
- 886 hidden_states_before_downsampling = layer_outputs[1]
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1739, in Module._wrapped_call_impl(self, *args, **kwargs)
- 1737 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
- 1738 else:
- -> 1739 return self._call_impl(*args, **kwargs)
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1750, in Module._call_impl(self, *args, **kwargs)
- 1745 # If we don't have any hooks, we want to skip the rest of the logic in
- 1746 # this function, and just call forward.
- 1747 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
- 1748 or _global_backward_pre_hooks or _global_backward_hooks
- 1749 or _global_forward_hooks or _global_forward_pre_hooks):
- -> 1750 return forward_call(*args, **kwargs)
- 1752 result = None
- 1753 called_always_called_hooks = set()
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\transformers\models\swin\modeling_swin.py:801, in SwinStage.forward(self, hidden_states, input_dimensions, head_mask, output_attentions, always_partition)
- 798 for i, layer_module in enumerate(self.blocks):
- 799 layer_head_mask = head_mask[i] if head_mask is not None else None
- --> 801 layer_outputs = layer_module(
- 802 hidden_states, input_dimensions, layer_head_mask, output_attentions, always_partition
- 803 )
- 805 hidden_states = layer_outputs[0]
- 807 hidden_states_before_downsampling = hidden_states
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1739, in Module._wrapped_call_impl(self, *args, **kwargs)
- 1737 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
- 1738 else:
- -> 1739 return self._call_impl(*args, **kwargs)
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1750, in Module._call_impl(self, *args, **kwargs)
- 1745 # If we don't have any hooks, we want to skip the rest of the logic in
- 1746 # this function, and just call forward.
- 1747 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
- 1748 or _global_backward_pre_hooks or _global_backward_hooks
- 1749 or _global_forward_hooks or _global_forward_pre_hooks):
- -> 1750 return forward_call(*args, **kwargs)
- 1752 result = None
- 1753 called_always_called_hooks = set()
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\transformers\models\swin\modeling_swin.py:731, in SwinLayer.forward(self, hidden_states, input_dimensions, head_mask, output_attentions, always_partition)
- 726 hidden_states_windows = hidden_states_windows.view(-1, self.window_size * self.window_size, channels)
- 727 attn_mask = self.get_attn_mask(
- 728 height_pad, width_pad, dtype=hidden_states.dtype, device=hidden_states_windows.device
- 729 )
- --> 731 attention_outputs = self.attention(
- 732 hidden_states_windows, attn_mask, head_mask, output_attentions=output_attentions
- 733 )
- 735 attention_output = attention_outputs[0]
- 737 attention_windows = attention_output.view(-1, self.window_size, self.window_size, channels)
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1739, in Module._wrapped_call_impl(self, *args, **kwargs)
- 1737 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
- 1738 else:
- -> 1739 return self._call_impl(*args, **kwargs)
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1750, in Module._call_impl(self, *args, **kwargs)
- 1745 # If we don't have any hooks, we want to skip the rest of the logic in
- 1746 # this function, and just call forward.
- 1747 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
- 1748 or _global_backward_pre_hooks or _global_backward_hooks
- 1749 or _global_forward_hooks or _global_forward_pre_hooks):
- -> 1750 return forward_call(*args, **kwargs)
- 1752 result = None
- 1753 called_always_called_hooks = set()
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\transformers\models\swin\modeling_swin.py:604, in SwinAttention.forward(self, hidden_states, attention_mask, head_mask, output_attentions)
- 597 def forward(
- 598 self,
- 599 hidden_states: torch.Tensor,
- (...) 602 output_attentions: Optional[bool] = False,
- 603 ) -> Tuple[torch.Tensor]:
- --> 604 self_outputs = self.self(hidden_states, attention_mask, head_mask, output_attentions)
- 605 attention_output = self.output(self_outputs[0], hidden_states)
- 606 outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1739, in Module._wrapped_call_impl(self, *args, **kwargs)
- 1737 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
- 1738 else:
- -> 1739 return self._call_impl(*args, **kwargs)
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\torch\nn\modules\module.py:1750, in Module._call_impl(self, *args, **kwargs)
- 1745 # If we don't have any hooks, we want to skip the rest of the logic in
- 1746 # this function, and just call forward.
- 1747 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
- 1748 or _global_backward_pre_hooks or _global_backward_hooks
- 1749 or _global_forward_hooks or _global_forward_pre_hooks):
- -> 1750 return forward_call(*args, **kwargs)
- 1752 result = None
- 1753 called_always_called_hooks = set()
- File f:\Desktop\School_Stuff\Programming\AI\.venv\Lib\site-packages\transformers\models\swin\modeling_swin.py:527, in SwinSelfAttention.forward(self, hidden_states, attention_mask, head_mask, output_attentions)
- 522 relative_position_bias = relative_position_bias.view(
- 523 self.window_size[0] * self.window_size[1], self.window_size[0] * self.window_size[1], -1
- 524 )
- 526 relative_position_bias = relative_position_bias.permute(2, 0, 1).contiguous()
- --> 527 attention_scores = attention_scores + relative_position_bias.unsqueeze(0)
- 529 if attention_mask is not None:
- 530 # Apply the attention mask is (precomputed for all layers in SwinModel forward() function)
- 531 mask_shape = attention_mask.shape[0]
- RuntimeError: The size of tensor a (16) must match the size of tensor b (49) at non-singleton dimension 3
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement