Implement all missing docstrings (#5298)
Co-authored-by: snyk-bot <snyk-bot@snyk.io> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
parent
e7f0658744
commit
7fd5dcbd86
26 changed files with 649 additions and 79 deletions
|
|
@ -88,6 +88,7 @@ class DWConv(Conv):
|
|||
"""Depth-wise convolution."""
|
||||
|
||||
def __init__(self, c1, c2, k=1, s=1, d=1, act=True): # ch_in, ch_out, kernel, stride, dilation, activation
|
||||
"""Initialize Depth-wise convolution with given parameters."""
|
||||
super().__init__(c1, c2, k, s, g=math.gcd(c1, c2), d=d, act=act)
|
||||
|
||||
|
||||
|
|
@ -95,6 +96,7 @@ class DWConvTranspose2d(nn.ConvTranspose2d):
|
|||
"""Depth-wise transpose convolution."""
|
||||
|
||||
def __init__(self, c1, c2, k=1, s=1, p1=0, p2=0): # ch_in, ch_out, kernel, stride, padding, padding_out
|
||||
"""Initialize DWConvTranspose2d class with given parameters."""
|
||||
super().__init__(c1, c2, k, s, p1, p2, groups=math.gcd(c1, c2))
|
||||
|
||||
|
||||
|
|
@ -121,12 +123,18 @@ class ConvTranspose(nn.Module):
|
|||
class Focus(nn.Module):
|
||||
"""Focus wh information into c-space."""
|
||||
|
||||
def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups
|
||||
def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True):
|
||||
"""Initializes Focus object with user defined channel, convolution, padding, group and activation values."""
|
||||
super().__init__()
|
||||
self.conv = Conv(c1 * 4, c2, k, s, p, g, act=act)
|
||||
# self.contract = Contract(gain=2)
|
||||
|
||||
def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2)
|
||||
def forward(self, x):
|
||||
"""
|
||||
Applies convolution to concatenated tensor and returns the output.
|
||||
|
||||
Input shape is (b,c,w,h) and output shape is (b,4c,w/2,h/2).
|
||||
"""
|
||||
return self.conv(torch.cat((x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]), 1))
|
||||
# return self.conv(self.contract(x))
|
||||
|
||||
|
|
@ -134,7 +142,10 @@ class Focus(nn.Module):
|
|||
class GhostConv(nn.Module):
|
||||
"""Ghost Convolution https://github.com/huawei-noah/ghostnet."""
|
||||
|
||||
def __init__(self, c1, c2, k=1, s=1, g=1, act=True): # ch_in, ch_out, kernel, stride, groups
|
||||
def __init__(self, c1, c2, k=1, s=1, g=1, act=True):
|
||||
"""Initializes the GhostConv object with input channels, output channels, kernel size, stride, groups and
|
||||
activation.
|
||||
"""
|
||||
super().__init__()
|
||||
c_ = c2 // 2 # hidden channels
|
||||
self.cv1 = Conv(c1, c_, k, s, None, g, act=act)
|
||||
|
|
@ -280,7 +291,8 @@ class SpatialAttention(nn.Module):
|
|||
class CBAM(nn.Module):
|
||||
"""Convolutional Block Attention Module."""
|
||||
|
||||
def __init__(self, c1, kernel_size=7): # ch_in, kernels
|
||||
def __init__(self, c1, kernel_size=7):
|
||||
"""Initialize CBAM with given input channel (c1) and kernel size."""
|
||||
super().__init__()
|
||||
self.channel_attention = ChannelAttention(c1)
|
||||
self.spatial_attention = SpatialAttention(kernel_size)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue