Unverified 提交 6bd5e8bc authored 作者: Glenn Jocher's avatar Glenn Jocher 提交者: GitHub

nn.SiLU() export support (#1713)

上级 c923fbff
...@@ -15,7 +15,7 @@ import torch.nn as nn ...@@ -15,7 +15,7 @@ import torch.nn as nn
import models import models
from models.experimental import attempt_load from models.experimental import attempt_load
from utils.activations import Hardswish from utils.activations import Hardswish, SiLU
from utils.general import set_logging, check_img_size from utils.general import set_logging, check_img_size
if __name__ == '__main__': if __name__ == '__main__':
...@@ -43,9 +43,12 @@ if __name__ == '__main__': ...@@ -43,9 +43,12 @@ if __name__ == '__main__':
# Update model # Update model
for k, m in model.named_modules(): for k, m in model.named_modules():
m._non_persistent_buffers_set = set() # pytorch 1.6.0 compatibility m._non_persistent_buffers_set = set() # pytorch 1.6.0 compatibility
if isinstance(m, models.common.Conv) and isinstance(m.act, nn.Hardswish): if isinstance(m, models.common.Conv): # assign export-friendly activations
m.act = Hardswish() # assign activation if isinstance(m.act, nn.Hardswish):
# if isinstance(m, models.yolo.Detect): m.act = Hardswish()
elif isinstance(m.act, nn.SiLU):
m.act = SiLU()
# elif isinstance(m, models.yolo.Detect):
# m.forward = m.forward_export # assign forward (optional) # m.forward = m.forward_export # assign forward (optional)
model.model[-1].export = True # set Detect() layer export=True model.model[-1].export = True # set Detect() layer export=True
y = model(img) # dry run y = model(img) # dry run
......
...@@ -5,8 +5,8 @@ import torch.nn as nn ...@@ -5,8 +5,8 @@ import torch.nn as nn
import torch.nn.functional as F import torch.nn.functional as F
# Swish https://arxiv.org/pdf/1905.02244.pdf --------------------------------------------------------------------------- # SiLU https://arxiv.org/pdf/1905.02244.pdf ----------------------------------------------------------------------------
class Swish(nn.Module): # class SiLU(nn.Module): # export-friendly version of nn.SiLU()
@staticmethod @staticmethod
def forward(x): def forward(x):
return x * torch.sigmoid(x) return x * torch.sigmoid(x)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论