Skip to content

Commit

Permalink
Merge pull request #695 from eriklindernoren/feature/mish_activation_…
Browse files Browse the repository at this point in the history
…function

Add support for the mish activation function
  • Loading branch information
Flova authored May 27, 2021
2 parents 5a64a3d + 2290756 commit 1c03ebe
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 1 deletion.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "PyTorchYolo"
version = "1.3.2"
version = "1.4.0"
readme = "README.md"
repository = "https://github.com/eriklindernoren/PyTorch-YOLOv3"
description = "Minimal PyTorch implementation of YOLO"
Expand Down
10 changes: 10 additions & 0 deletions pytorchyolo/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ def create_modules(module_defs):
nn.BatchNorm2d(filters, momentum=0.9, eps=1e-5))
if module_def["activation"] == "leaky":
modules.add_module(f"leaky_{module_i}", nn.LeakyReLU(0.1))
if module_def["activation"] == "mish":
modules.add_module(f"mish_{module_i}", Mish())

elif module_def["type"] == "maxpool":
kernel_size = int(module_def["size"])
Expand Down Expand Up @@ -111,6 +113,14 @@ def forward(self, x):
x = F.interpolate(x, scale_factor=self.scale_factor, mode=self.mode)
return x

class Mish(nn.Module):
""" The MISH activation function (https://github.com/digantamisra98/Mish) """

def __init__(self):
super(Mish, self).__init__()

def forward(self, x):
return x * torch.tanh(F.softplus(x))

class YOLOLayer(nn.Module):
"""Detection layer"""
Expand Down

0 comments on commit 1c03ebe

Please sign in to comment.