Skip to content

Commit

Permalink
feat: bilinear interpolation of attention heads when dimension does n…
Browse files Browse the repository at this point in the history
…ot match, useful for segformer G
  • Loading branch information
beniz committed Jun 13, 2022
1 parent 1327aa1 commit eed9494
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 158 deletions.
21 changes: 19 additions & 2 deletions models/modules/attn_network.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
from torch import nn
import torch.nn.functional as F
import warnings


class BaseGenerator_attn(nn.Module):
Expand All @@ -12,9 +14,24 @@ def compute_outputs(self, input, attentions, images):
outputs = []

for i in range(self.nb_mask_attn - self.nb_mask_input):
outputs.append(images[i] * attentions[i])
if images[i].shape == attentions[i].shape:
outputs.append(images[i] * attentions[i])
else:
warnings.warn("Bilinear interpolation of attention heads")
rattention = F.interpolate(
attentions[i], size=(images[i].shape[2], images[i].shape[3])
)
outputs.append(images[i] * rattention)

for i in range(self.nb_mask_attn - self.nb_mask_input, self.nb_mask_attn):
outputs.append(input * attentions[i])
if input.shape == attentions[i]:
outputs.append(input * attentions[i])
else:
warnings.warn("Bilinear interpolation of attention heads")
rattention = F.interpolate(
attentions[i], size=(input.shape[2], input.shape[3])
)
outputs.append(input * rattention)

return images, attentions, outputs

Expand Down
156 changes: 0 additions & 156 deletions scripts/gen_sliding_images.py

This file was deleted.

0 comments on commit eed9494

Please sign in to comment.