Skip to content

Commit

Permalink
hot fix self.position_embeddings->self.position_embedding (#33958)
Browse files Browse the repository at this point in the history
  • Loading branch information
ArthurZucker committed Oct 7, 2024
1 parent 333ec0a commit be96843
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions src/transformers/models/siglip/modeling_siglip.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,13 +279,13 @@ def interpolate_pos_encoding(self, embeddings: torch.Tensor, height: int, width:
"""

num_patches = embeddings.shape[1]
num_positions = self.position_embeddings.shape[1]
num_positions = self.position_embedding.shape[1]

# always interpolate when tracing to ensure the exported model works for dynamic input shapes
if not torch.jit.is_tracing() and num_patches == num_positions and height == width:
return self.position_embeddings
return self.position_embedding

patch_pos_embed = self.position_embeddings
patch_pos_embed = self.position_embedding

dim = embeddings.shape[-1]

Expand Down

0 comments on commit be96843

Please sign in to comment.